JapariBypass/dmmUpdater.py

99 lines
3.7 KiB
Python

import os
import sys
import hashlib
import requests
import urllib.parse
from urllib.request import urlretrieve
from pathlib import Path
def get_file_list(url):
url = "https://apidgp-gameplayer.games.dmm.com" + url
print("Retrieving file list from " + url)
result = requests.get(url)
result.raise_for_status()
data = result.json()["data"]
return data["domain"], data["file_list"]
def get_file_hash(file_path):
if not os.path.exists(file_path):
return None
with open(file_path, "rb") as f:
file_hash = hashlib.md5()
while chunk := f.read(8192):
file_hash.update(chunk)
return file_hash.hexdigest()
def update_game(game_path, files_url, files_param):
print("Updating game")
server_url, server_files = get_file_list(files_url)
server_file_dict = {str(Path(game_path, file["local_path"].lstrip('/')).resolve()): file for file in server_files}
local_file_dict = {str(Path(dp, f).resolve()): "" for dp, dn, filenames in os.walk(game_path) for f in filenames}
files_to_download = []
files_to_delete = []
for abs_file_path in server_file_dict.keys():
server_file = server_file_dict[abs_file_path]
if abs_file_path in local_file_dict:
if server_file["force_delete_flg"]:
files_to_delete.append(abs_file_path)
else:
local_file_hash = get_file_hash(abs_file_path)
if server_file["check_hash_flg"] and local_file_hash == server_file["hash"]:
continue
download_url = urllib.parse.urljoin(server_url, server_file["path"]) + files_param
files_to_download.append({"url":download_url, "path":abs_file_path})
else:
download_url = urllib.parse.urljoin(server_url, server_file["path"]) + files_param
files_to_download.append({"url":download_url, "path":abs_file_path})
print("Files to download:", len(files_to_download))
count = len(files_to_download)
if count > 0:
index = 0
max_len = 0
def show(j, downloaded, total_size):
nonlocal max_len
x = int(40*j/count)
string = "Downloading: [{}{}] {}/{} ({}/{})".format("#"*x, "."*(40-x), j, count, min(downloaded,total_size), total_size)
max_len = max(len(string), max_len)
print(string.ljust(max_len, ' '), end='\r', file=sys.stdout, flush=True)
retries = 3
show(0, 0, 0)
while index < len(files_to_download):
try:
file = files_to_download[index]
url, path = file["url"], file["path"]
response = requests.get(url, timeout=10, stream=True)
total_size = int(response.headers.get("content-length", 0))
block_size = 1024 * 1024
downloaded = 0
os.makedirs(os.path.dirname(path), exist_ok=True)
with open(path, 'wb') as out_file:
for data in response.iter_content(block_size):
out_file.write(data)
downloaded += len(data)
show(index + 1, downloaded, total_size)
index += 1
retries = 3
except Exception as e:
print(e, "retrying")
retries -= 1
if retries == 0:
print(f'Retry for file {file["url"]} failed 3 times')
return False
print("\n", flush=True, file=sys.stdout)
# #files_to_delete is unused until fully tested
# for file in files_to_delete:
# os.remove(file)
return True