diff --git a/gogdl/__init__.py b/gogdl/__init__.py index efe4616..b8206a3 100644 --- a/gogdl/__init__.py +++ b/gogdl/__init__.py @@ -1,12 +1,10 @@ -""" - _ _ _ _ -| |__ ___ _ __ ___ (_) ___ __ _ ___ __ _ __| | | -| '_ \ / _ \ '__/ _ \| |/ __|____ / _` |/ _ \ / _` |/ _` | | -| | | | __/ | | (_) | | (_|_____| (_| | (_) | (_| | (_| | | -|_| |_|\___|_| \___/|_|\___| \__, |\___/ \__, |\__,_|_| - |___/ |___/ +# _ _ _ _ +# | |__ ___ _ __ ___ (_) ___ __ _ ___ __ _ __| | | +# | '_ \ / _ \ '__/ _ \| |/ __|____ / _` |/ _ \ / _` |/ _` | | +# | | | | __/ | | (_) | | (_|_____| (_| | (_) | (_| | (_| | | +# |_| |_|\___|_| \___/|_|\___| \__, |\___/ \__, |\__,_|_| -""" -version = "0.7.3" + +version = "1.0.0" diff --git a/gogdl/api.py b/gogdl/api.py index b038295..db7d426 100755 --- a/gogdl/api.py +++ b/gogdl/api.py @@ -6,6 +6,8 @@ from gogdl.dl import dl_utils from gogdl import version import gogdl.constants as constants +from gogdl import version + class ApiHandler: @@ -27,7 +29,9 @@ def __init__(self, auth_manager): self.endpoints = dict() # Map of secure link endpoints self.working_on_ids = list() # List of products we are waiting for to complete getting the secure link - def get_item_data(self, id, expanded=[]): + def get_item_data(self, id, expanded=None): + if expanded is None: + expanded = [] self.logger.info(f"Getting info from products endpoint for id: {id}") url = f'{constants.GOG_API}/products/{id}' expanded_arg = '?expand=' @@ -48,7 +52,7 @@ def get_game_details(self, id): if response.ok: return response.json() - def get_dependenices_list(self, depot_version=2): + def get_dependencies_repo(self, depot_version=2): self.logger.info("Getting Dependencies repository") url = constants.DEPENDENCIES_URL if depot_version == 2 else constants.DEPENDENCIES_V1_URL response = self.session.get(url) @@ -56,9 +60,7 @@ def get_dependenices_list(self, depot_version=2): return None json_data = json.loads(response.content) - if 'repository_manifest' in json_data: - self.logger.info("Getting repository manifest") - return dl_utils.get_zlib_encoded(self, str(json_data['repository_manifest']), self.logger)[0], json_data.get('version') + return json_data def does_user_own(self, id): if not self.owned: diff --git a/gogdl/args.py b/gogdl/args.py index fcbf05d..c12a3d8 100755 --- a/gogdl/args.py +++ b/gogdl/args.py @@ -1,5 +1,6 @@ # Initialize argparse module and return arguments import argparse +from multiprocessing import cpu_count def init_parser(): @@ -20,23 +21,49 @@ def init_parser(): subparsers = parser.add_subparsers(dest="command") + import_parser = subparsers.add_parser( + "import", help="Show data about game in the specified path" + ) + import_parser.add_argument("path") + + # REDIST DOWNLOAD + + redist_download_parser = subparsers.add_parser("redist", aliases=["dependencies"], + help="Download specified dependencies to provided location") + + redist_download_parser.add_argument("--ids", help="Coma separated ids") + redist_download_parser.add_argument("--path", help="Location where to download the files") + redist_download_parser.add_argument("--print-manifest", action="store_true", help="Prints manifest to stdout and exits") + redist_download_parser.add_argument( + "--max-workers", + dest="workers_count", + default=cpu_count(), + help="Specify number of worker threads, by default number of CPU threads", + ) + + + # AUTH + auth_parser = subparsers.add_parser("auth", help="Manage authorization") auth_parser.add_argument("--client-id", dest="client_id") auth_parser.add_argument("--client-secret", dest="client_secret") auth_parser.add_argument("--code", dest="authorization_code", help="Pass authorization code (use for login), when passed client-id and secret are ignored") + # DOWNLOAD + download_parser = subparsers.add_parser( "download", aliases=["repair", "update"], help="Download/update/repair game" ) download_parser.add_argument("id", help="Game id") download_parser.add_argument("--lang", "-l", help="Specify game language") download_parser.add_argument( - "--build", "-b", dest="build", help="Specify buildId (allows repairing)" + "--build", "-b", dest="build", help="Specify buildId" ) download_parser.add_argument( "--path", "-p", dest="path", help="Specify download path", required=True ) + download_parser.add_argument("--support", dest="support_path", help="Specify path where support files should be stored, by default they are put into game dir") download_parser.add_argument( "--platform", "--os", @@ -45,26 +72,53 @@ def init_parser(): choices=["windows", "osx", "linux"], ) download_parser.add_argument( - "--with-dlcs", dest="dlcs", action="store_true", help="Should download dlcs" + "--with-dlcs", dest="dlcs", action="store_true", help="Should download all dlcs" + ) + download_parser.add_argument( + "--skip-dlcs", dest="dlcs", action="store_false", help="Should skip all dlcs" + ) + download_parser.add_argument( + "--dlcs", + dest="dlcs_list", + default=[], + help="List of dlc ids to download (separated by coma)", ) download_parser.add_argument( - "--skip-dlcs", dest="dlcs", action="store_false", help="Should skip dlcs" + "--dlc-only", dest="dlc_only", action="store_true", help="Download only DLC" ) + download_parser.add_argument("--branch", help="Choose build branch to use") + download_parser.add_argument("--password", help="Password to access other branches") + download_parser.add_argument("--force-gen", choices=["1", "2"], dest="force_generation", help="Force specific manifest generation (FOR DEBUGGING)") download_parser.add_argument( "--max-workers", dest="workers_count", - default=0, + default=cpu_count(), help="Specify number of worker threads, by default number of CPU threads", ) - import_parser = subparsers.add_parser( - "import", help="Show data about game in the specified path" - ) - import_parser.add_argument("path") + # SIZE CALCULATING, AND OTHER MANIFEST INFO calculate_size_parser = subparsers.add_parser( "info", help="Calculates estimated download size and list of DLCs" ) + + calculate_size_parser.add_argument( + "--with-dlcs", + dest="dlcs", + action="store_true", + help="Should download all dlcs", + ) + calculate_size_parser.add_argument( + "--skip-dlcs", dest="dlcs", action="store_false", help="Should skip all dlcs" + ) + calculate_size_parser.add_argument( + "--dlcs", + dest="dlcs_list", + help="Coma separated list of dlc ids to download", + ) + calculate_size_parser.add_argument( + "--dlc-only", dest="dlc_only", action="store_true", help="Download only DLC" + ) calculate_size_parser.add_argument("id") calculate_size_parser.add_argument( "--platform", @@ -77,13 +131,18 @@ def init_parser(): "--build", "-b", dest="build", help="Specify buildId" ) calculate_size_parser.add_argument("--lang", "-l", help="Specify game language") + calculate_size_parser.add_argument("--branch", help="Choose build branch to use") + calculate_size_parser.add_argument("--password", help="Password to access other branches") + calculate_size_parser.add_argument("--force-gen", choices=["1", "2"], dest="force_generation", help="Force specific manifest generation (FOR DEBUGGING)") calculate_size_parser.add_argument( "--max-workers", dest="workers_count", - default=0, + default=cpu_count(), help="Specify number of worker threads, by default number of CPU threads", ) + # LAUNCH + launch_parser = subparsers.add_parser( "launch", help="Launch the game in specified path", add_help=False ) @@ -108,6 +167,8 @@ def init_parser(): "--override-exe", dest="override_exe", help="Override executable to be run" ) + # SAVES + save_parser = subparsers.add_parser("save-sync", help="Sync game saves") save_parser.add_argument("path", help="Path to sync files") save_parser.add_argument("id", help="Game id") @@ -143,6 +204,8 @@ def init_parser(): required=True, ) + # SAVES CLEAR + clear_parser = subparsers.add_parser("save-clear", help="Clear cloud game saves") clear_parser.add_argument("path", help="Path to sync files") clear_parser.add_argument("id", help="Game id") @@ -157,4 +220,5 @@ def init_parser(): required=True, ) + return parser.parse_known_args() diff --git a/gogdl/cli.py b/gogdl/cli.py index 30725c5..725abdb 100755 --- a/gogdl/cli.py +++ b/gogdl/cli.py @@ -1,6 +1,8 @@ #!/usr/bin/env python3 +from multiprocessing import freeze_support import gogdl.args as args -from gogdl.dl import manager +from gogdl.dl.managers import manager +from gogdl.dl.managers import dependencies import gogdl.api as api import gogdl.imports as imports import gogdl.launch as launch @@ -9,8 +11,6 @@ from gogdl import version as gogdl_version import logging -logging.basicConfig(format="[%(name)s] %(levelname)s: %(message)s", level=logging.INFO) -logger = logging.getLogger("MAIN") def display_version(): @@ -19,6 +19,11 @@ def display_version(): def main(): arguments, unknown_args = args.init_parser() + level = logging.INFO + if '-d' in unknown_args or '--debug' in unknown_args: + level = logging.DEBUG + logging.basicConfig(format="[%(name)s] %(levelname)s: %(message)s", level=level) + logger = logging.getLogger("MAIN") logger.debug(arguments) if arguments.display_version: display_version() @@ -28,19 +33,29 @@ def main(): return authorization_manager = auth.AuthorizationManager(arguments.auth_config_path) api_handler = api.ApiHandler(authorization_manager) - download_manager = manager.DownloadManager(api_handler) clouds_storage_manager = saves.CloudStorageManager(api_handler, authorization_manager) - switcher = { - "download": download_manager.download, - "repair": download_manager.download, - "update": download_manager.download, - "import": imports.get_info, - "info": download_manager.calculate_download_size, - "launch": launch.launch, - "save-sync": clouds_storage_manager.sync, - "save-clear": clouds_storage_manager.clear, - "auth": authorization_manager.handle_cli - } + + switcher = {} + if arguments.command in ["download", "repair", "update", "info"]: + download_manager = manager.Manager(arguments, unknown_args, api_handler) + switcher = { + "download": download_manager.download, + "repair": download_manager.download, + "update": download_manager.download, + "info": download_manager.calculate_download_size, + } + elif arguments.command in ["redist", "dependencies"]: + dependencies_handler = dependencies.DependenciesManager(arguments.ids.split(","), arguments.path, arguments.workers_count, api_handler, print_manifest=arguments.print_manifest) + if not arguments.print_manifest: + dependencies_handler.get() + else: + switcher = { + "import": imports.get_info, + "launch": launch.launch, + "save-sync": clouds_storage_manager.sync, + "save-clear": clouds_storage_manager.clear, + "auth": authorization_manager.handle_cli + } function = switcher.get(arguments.command) if function: @@ -48,4 +63,5 @@ def main(): if __name__ == "__main__": + freeze_support() main() diff --git a/gogdl/constants.py b/gogdl/constants.py index 8fa02a6..1e5657b 100755 --- a/gogdl/constants.py +++ b/gogdl/constants.py @@ -10,13 +10,22 @@ DEPENDENCIES_URL = "https://content-system.gog.com/dependencies/repository?generation=2" DEPENDENCIES_V1_URL = "https://content-system.gog.com/redists/repository" +NON_NATIVE_SEP = "\\" if os.sep == "/" else "/" -# Use only for Linux -CACHE_DIR = ( - os.path.join( - os.getenv("XDG_CACHE_HOME", os.path.join(os.path.expanduser("~"), ".cache")), - "heroicGOGdl", +if platform == 'linux': + CONFIG_DIR = os.path.join( + os.getenv("XDG_CONFIG_HOME", os.path.expanduser("~/.config")), 'heroic_gogdl' ) - if platform == "linux" - else "" -) +elif platform == 'win32': + CONFIG_DIR = os.path.join( + os.getenv("APPDATA"), 'heroic_gogdl' + ) +elif platform == 'darwin': + CONFIG_DIR = os.path.join( + os.path.expanduser("~/Library"), "Application Support", "heroic_gogdl" + ) + +if os.getenv("GOGDL_CONFIG_PATH"): + CONFIG_DIR = os.path.join(os.getenv("GOGDL_CONFIG_PATH"), "heroic_gogdl") + +MANIFESTS_DIR = os.path.join(CONFIG_DIR, "manifests") diff --git a/gogdl/dl/dl_utils.py b/gogdl/dl/dl_utils.py index 0cf8234..b68ff42 100755 --- a/gogdl/dl/dl_utils.py +++ b/gogdl/dl/dl_utils.py @@ -2,35 +2,39 @@ import zlib import os import gogdl.constants as constants +from gogdl.dl.objects import v1, v2 import shutil import time import requests -from sys import exit +from sys import exit, platform PATH_SEPARATOR = os.sep TIMEOUT = 10 def get_json(api_handler, url): - x = api_handler.session.get(url) + x = api_handler.session.get(url, headers={"Accept": "application/json"}) if not x.ok: return return x.json() -def get_zlib_encoded(api_handler, url, logger=None): - r = requests.get(url, headers=api_handler.session.headers, timeout=TIMEOUT) - if r.status_code != 200: - if logger: - logger.info("zlib response != 200") - return - try: - decompressed = json.loads(zlib.decompress(r.content, 15)) - except zlib.error: - if logger: - logger.info("error decompressing response") - return json.loads(r.content), r.headers - return decompressed, r.headers +def get_zlib_encoded(api_handler, url): + retries = 5 + while retries > 0: + try: + x = api_handler.session.get(url, timeout=TIMEOUT) + if not x.ok: + return None, None + try: + decompressed = json.loads(zlib.decompress(x.content, 15)) + except zlib.error: + return x.json(), x.headers + return decompressed, x.headers + except Exception: + time.sleep(2) + retries-=1 + return None, None def prepare_location(path, logger=None): @@ -47,49 +51,41 @@ def galaxy_path(manifest: str): return galaxy_path -def get_secure_link(api_handler, path, gameId, generation=2, logger=None): +def get_secure_link(api_handler, path, gameId, generation=2, logger=None, root=None): url = "" if generation == 2: url = f"{constants.GOG_CONTENT_SYSTEM}/products/{gameId}/secure_link?_version=2&generation=2&path={path}" elif generation == 1: url = f"{constants.GOG_CONTENT_SYSTEM}/products/{gameId}/secure_link?_version=2&type=depot&path={path}" + if root: + url += f"&root={root}" try: r = requests.get(url, headers=api_handler.session.headers, timeout=TIMEOUT) except BaseException as exception: - logger.info(exception) + if logger: + logger.info(exception) time.sleep(0.2) return get_secure_link(api_handler, path, gameId, generation, logger) if r.status_code != 200: - logger.info("invalid secure link response") + if logger: + logger.info("invalid secure link response") time.sleep(0.2) return get_secure_link(api_handler, path, gameId, generation, logger) - js = r.json() - endpoint = classify_cdns(js["urls"], generation) - url_format = endpoint["url_format"] - parameters = endpoint["parameters"] - if generation == 1: - if parameters.get("path"): - parameters["path"] = parameters["path"] + "/main.bin" - - return merge_url_with_params(url_format, parameters) + return js['urls'] - return endpoint - - -def get_dependency_link(api_handler, path): +def get_dependency_link(api_handler): data = get_json( api_handler, - f"{constants.GOG_CONTENT_SYSTEM}/open_link?generation=2&_version=2&path=/dependencies/store/" - + path, + f"{constants.GOG_CONTENT_SYSTEM}/open_link?generation=2&_version=2&path=/dependencies/store/", ) - endpoint = classify_cdns(data["urls"]) - url = endpoint["url"] - return url + if not data: + return None + return data["urls"] def merge_url_with_params(url, parameters): @@ -101,21 +97,7 @@ def merge_url_with_params(url, parameters): def parent_dir(path: str): - return path[0: path.rindex(PATH_SEPARATOR)] - - -def classify_cdns(cdns, generation=2): - best = None - for cdn in cdns: - if generation not in cdn["supports_generation"]: - continue - if not best: - best = cdn - else: - if best["priority"] < cdn["priority"]: - best = cdn - - return best + return os.path.split(path)[0] def calculate_sum(path, function, read_speed_function=None): @@ -142,7 +124,7 @@ def get_readable_size(size): return size, power_labels[n] + "B" -def check_free_space(size, path): +def check_free_space(size: int, path: str): if not os.path.exists(path): os.makedirs(path, exist_ok=True) _, _, available_space = shutil.disk_usage(path) @@ -154,3 +136,44 @@ def get_range_header(offset, size): from_value = offset to_value = (int(offset) + int(size)) - 1 return f"bytes={from_value}-{to_value}" + +# Creates appropriate Manifest class based on provided meta from json +def create_manifest_class(meta: dict, api_handler): + version = meta.get("version") + if version == 1: + return v1.Manifest.from_json(meta, api_handler) + else: + return v2.Manifest.from_json(meta, api_handler) + +def get_case_insensitive_name(path): + if platform == "win32" or os.path.exists(path): + return path + root = path + # Find existing directory + while not os.path.exists(root): + root = os.path.split(root)[0] + + if not root[len(root) - 1] in ["/", "\\"]: + root = root + os.sep + # Separate unknown path from existing one + s_working_dir = path.replace(root, "").split(os.sep) + paths_to_find = len(s_working_dir) + paths_found = 0 + for directory in s_working_dir: + if not os.path.exists(root): + break + dir_list = os.listdir(root) + found = False + for existing_dir in dir_list: + if existing_dir.lower() == directory.lower(): + root = os.path.join(root, existing_dir) + paths_found += 1 + found = True + if not found: + root = os.path.join(root, directory) + paths_found += 1 + + if paths_to_find != paths_found: + root = os.path.join(root, os.sep.join(s_working_dir[paths_found:])) + return root + diff --git a/gogdl/dl/linux_native.py b/gogdl/dl/linux_native.py deleted file mode 100644 index 549e9bf..0000000 --- a/gogdl/dl/linux_native.py +++ /dev/null @@ -1,205 +0,0 @@ -# Linux installers downloader - this handles downloading and unpacking it to provided path -# Untill GOG will allow to download those using content system this is the only way -from gogdl.dl import dl_utils, progressbar -from gogdl import constants -import os -import xml.etree.ElementTree as ET -import sys -import subprocess -import logging -import hashlib -import shutil - - -logger = logging.getLogger("LINUX") - - -def get_folder_name_from_windows_manifest(api_handler, id): - builds = dl_utils.get_json( - api_handler, - f"{constants.GOG_CONTENT_SYSTEM}/products/{id}/os/windows/builds?generation=2", - ) - - url = builds["items"][0]["link"] - meta, headers = dl_utils.get_zlib_encoded(api_handler, url) - install_dir = ( - meta["installDirectory"] - if builds["items"][0]["generation"] == 2 - else meta["product"]["installDirectory"] - ) - return install_dir - - -def download(id, api_handler, arguments): - logger.info("Getting folder name from windows manifest") - folder_name = get_folder_name_from_windows_manifest(api_handler, id) - install_path = ( - os.path.join(arguments.path, folder_name) - if arguments.command == "download" - else str(arguments.path) - ) - logger.info("Getting downlad info") - game_details = api_handler.get_item_data(id, ["downloads", "expanded_dlcs"]) - - owned_dlcs = [] - if len(game_details["dlcs"]) > 0: - dlcs = game_details["dlcs"]["products"] - if arguments.dlcs: - for dlc in dlcs: - if api_handler.does_user_own(dlc["id"]): - owned_dlcs.append(dlc) - installers = game_details["downloads"]["installers"] - - if os.path.exists(install_path): - shutil.rmtree(install_path) - linux_installers = filter_linux_installers(installers) - - if len(linux_installers) == 0: - logger.error("Nothing do download") - sys.exit(1) - directory_path, name = os.path.split(install_path) - os.makedirs(directory_path, exist_ok=True) - download_installer(arguments, linux_installers, api_handler, install_path) - - for dlc in owned_dlcs: - response = api_handler.session.get(dlc["expanded_link"]) - details = response.json() - dlc_installers = details["downloads"]["installers"] - dlc_linux_installers = filter_linux_installers(dlc_installers) - download_installer( - arguments, dlc_linux_installers, api_handler, install_path, True - ) - logger.info("Cleaning up") - shutil.rmtree(constants.CACHE_DIR) - - logger.info("Done") - sys.exit(0) - - -def filter_linux_installers(installers): - linux_installers = [] - # Filter out linux installers - for installer in installers: - if installer["os"] == "linux": - linux_installers.append(installer) - return linux_installers - - -def download_installer( - arguments, linux_installers, api_handler, install_path, is_dlc=False -): - found = None - for installer in linux_installers: - if installer["language"] == arguments.lang.split("-")[0]: - found = installer - - if not found: - if len(linux_installers) == 0: - logger.warning("Couldn't find installer to download") - return - else: - found = linux_installers[0] - - if not dl_utils.check_free_space(found["total_size"], constants.CACHE_DIR): - logger.error("Not enough available disk space") - - # There is one file for linux - url = found["files"][0]["downlink"] - download = dl_utils.get_json(api_handler, url) - checksum = api_handler.session.get(download["checksum"]) - md5 = "" - if checksum.ok and checksum.content: - checksum = ET.fromstring(checksum.content) - md5 = checksum.attrib["md5"] - success, path = get_file( - download["downlink"], constants.CACHE_DIR, api_handler, md5 - ) - if success: - if md5 and dl_utils.calculate_sum(path, hashlib.md5) != md5: - logger.warning("Installer integrity invalid, downloading again") - success, path = get_file( - download["downlink"], constants.CACHE_DIR, api_handler, md5 - ) - unpacked_path = os.path.join(constants.CACHE_DIR, "unpacked") - logger.info("Checking available disk space") - - if not dl_utils.check_free_space(get_installer_unpack_size(path), unpacked_path): - logger.error("Not enough available disk space") - sys.exit(1) - logger.info("Looks fine continuing") - logger.info("Unpacking game files") - unpack_installer(path, unpacked_path, logger) - - gamefiles_path = os.path.join(unpacked_path, "data", "noarch") - # Move files to destination - # shutil.move(gamefiles_path+'/*', install_path) - command = f'mv -f "{gamefiles_path}" "{install_path}"' - if is_dlc: - command = f'cp -r "{gamefiles_path}"/* "{install_path}"' - logger.info("Moving game files") - subprocess.run(command.encode("utf-8"), shell=True, encoding="utf-8") - - shutil.rmtree(unpacked_path) - - -def get_installer_unpack_size(script_path): - # From sharkwouter's minigalaxy code - var = subprocess.Popen(["unzip", "-v", script_path], stdout=subprocess.PIPE) - output = var.communicate()[0].decode("utf-8") - var.wait() - lines_list = output.split("\n") - if len(lines_list) > 2 and not lines_list[-1].strip(): - last_line = lines_list[-2].strip() - else: - last_line = lines_list[-1].strip() - size_value = int(last_line.split()[0]) - return size_value - - -# Unzips installer to target location -def unpack_installer(script_path, target_path, logger): - logger.info("Unpacking installer using unzip") - if os.path.exists(target_path): - shutil.rmtree(target_path) - command = ["unzip", "-qq", script_path, "-d", target_path] - - process = subprocess.Popen(command) - return_code = process.wait() - return return_code == 1 - - -def get_file(url, path, api_handler, md5): - response = api_handler.session.get(url, stream=True, allow_redirects=True) - total = response.headers.get("Content-Length") - total_readable = dl_utils.get_readable_size(int(total)) - file_name = response.url[response.url.rfind("/") + 1 : response.url.rfind("?")] - path = os.path.join(path, file_name) - - if os.path.exists(path): - if dl_utils.calculate_sum(path, hashlib.md5) == md5: - logger.info("Using existing file") - return True, path - else: - os.remove(path) - - progress_bar = progressbar.ProgressBar( - int(total), dl_utils.get_readable_size(int(total)), 50 - ) - progress_bar.start() - with open(path, "ab") as f: - if total is None: - f.write(response.content) - progress_bar.update_bytes_written(len(response.content)) - progress_bar.update_download_speed(len(response.content)) - else: - total = int(total) - for data in response.iter_content( - chunk_size=max(int(total / 1000), 1024 * 1024) - ): - progress_bar.update_download_speed(len(data)) - written = f.write(data) - progress_bar.update_bytes_written(written) - f.close() - progress_bar.completed = True - progress_bar.join() - return response.ok, path diff --git a/gogdl/dl/manager.py b/gogdl/dl/manager.py deleted file mode 100755 index 6d626da..0000000 --- a/gogdl/dl/manager.py +++ /dev/null @@ -1,429 +0,0 @@ -import json -from sys import platform -from multiprocessing import cpu_count -from gogdl.dl import objects, linux_native -from gogdl.dl.worker import * -from gogdl.dl.progressbar import ProgressBar -from concurrent.futures import ThreadPoolExecutor, as_completed -import gogdl.constants as constants -from sys import exit - - -class DownloadManager: - def __init__(self, api_handler): - self.api_handler = api_handler - self.logger = logging.getLogger('DOWNLOAD_MANAGER') - self.logger.setLevel(logging.INFO) - # This is the default (won't be used in heroic) removed automatic detection since introduces crashes on MacOS - self.lang = 'en' - self.cancelled = False - self.dlcs_should_be_downloaded = False - self.dlc_ids = [] - self.threads = [] - self.platform = "windows" if platform == "win32" else "osx" if platform == "darwin" else "linux" - - def download(self, args, unknown_args): - if self.get_download_metadata(args): - if self.perform_download(): - self.logger.info("Done") - exit(0) - else: - exit(2) - else: - exit(1) - - def calculate_download_size(self, args, unknown_args): - if self.get_download_metadata(args): - # Override language to English when checking download size - if self.depot_version == 1: - self.lang = 'English' - download_files, dependency_files = self.collect_depots() - size_data = self.calculate_size(download_files, dependency_files) - download_size = size_data[0] - disk_size = size_data[1] - - dlcs = [] - if self.depot_version == 2: - for product in self.meta['products']: - if product["productId"] != self.meta["baseProductId"]: - if self.api_handler.does_user_own(product["productId"]): - dlcs.append({ - "title": product['name'], - "app_name": product['productId'] - }) - - languages = [] - # Get possible languages - depots_array = self.meta['depots'] if self.depot_version == 2 else self.meta['product']['depots'] - for depot in depots_array: - if 'redist' in depot: - continue - for lang in depot['languages']: - if ((lang != "*") and (lang != "Neutral")) and not (lang in languages): - languages.append(lang) - - build_id = self.builds["items"][0]["build_id"] if self.depot_version == 2 else self.meta["product"][ - "timestamp"] - - print(json.dumps({"download_size": download_size, - "disk_size": disk_size, - "dlcs": dlcs, - "buildId": build_id, - "languages": languages, - "folder_name": self.meta["installDirectory"] if self.depot_version == 2 else - self.meta['product']['installDirectory'], - "versionEtag": self.versionEtag, - "versionName": self.versionName - })) - - def get_download_metadata(self, args): - - if args.platform: - self.platform = args.platform - if int(args.workers_count) > 0: - self.allowed_threads = int(args.workers_count) - else: - self.allowed_threads = cpu_count() - # Getting more data - self.dl_target = self.api_handler.get_item_data(args.id) - self.dl_target['id'] = args.id - - if args.lang: - self.lang = args.lang - try: - self.dlcs_should_be_downloaded = args.dlcs - except AttributeError: - pass - if self.platform == 'linux': - linux_native.download(self.dl_target['id'], self.api_handler, args) - return False - is_compatible = self.check_compatibility() - self.logger.info(f'Game is compatible') if is_compatible else self.logger.error(f'Game is incompatible') - if not is_compatible: - return False - self.logger.debug('Getting Build data') - # Builds data - self.builds = dl_utils.get_json( - self.api_handler, - f'{constants.GOG_CONTENT_SYSTEM}/products/{self.dl_target["id"]}/os/{self.platform}/builds?generation=2') - # Just in case - if self.builds['count'] == 0: - self.logger.error('Nothing to download, exiting') - return False - - target_build = self.builds['items'][0] - for build in self.builds['items']: - if not build['branch']: - target_build = build - break - if args.build: - # Find build - for build in self.builds['items']: - if build['build_id'] == args.build: - target_build = build - break - - # Downloading most recent thing by default - self.depot_version = target_build['generation'] - if self.depot_version == 1 or self.depot_version == 2: - self.logger.info(f"Depot version: {self.depot_version}") - else: - self.logger.error("Unsupported depot version please report this") - return False - - meta_url = target_build['link'] - self.logger.info('Getting Meta data') - self.meta, headers = dl_utils.get_zlib_encoded(self.api_handler, meta_url, self.logger) - - self.versionEtag = headers.get("Etag") - self.versionName = target_build['version_name'] - install_directory = self.meta['installDirectory'] if self.depot_version == 2 else self.meta['product'][ - 'installDirectory'] - try: - self.path = args.path - if args.command == 'download': - self.dl_path = os.path.join( - self.path, install_directory) - else: - self.dl_path = self.path - except AttributeError: - pass - - # Redist version is useful for V1 depots - self.dependencies, self.redist_version = self.handle_dependencies() - - return True - - def collect_depots(self): - collected_depots = [] - download_files = [] - dependency_files = [] - - owned_dlcs = [] - if self.depot_version == 2: - if self.meta.get('products'): - for dlc in self.meta['products']: - if dlc['productId'] != self.meta['baseProductId']: - if self.api_handler.does_user_own(dlc['productId']): - owned_dlcs.append(dlc['productId']) - self.dlc_ids.append(dlc['productId']) - - for depot in self.meta['depots']: - if str(depot['productId']) == str(self.dl_target['id']) or self.dlcs_should_be_downloaded and ( - depot['productId'] in owned_dlcs): - # TODO: Respect user language - newObject = objects.Depot(self.lang, depot) - if newObject.check_language(): - collected_depots.append(newObject) - else: - if self.meta['product'].get('gameIDs'): - for dlc in self.meta['product']['gameIDs']: - if dlc['gameID'] != self.meta['product']['rootGameID']: - if self.api_handler.does_user_own(dlc['gameID']): - owned_dlcs.append(dlc['gameID']) - for depot in self.meta['product']['depots']: - if 'redist' not in depot: - depot_object = objects.DepotV1(self.lang, depot) - if depot_object.check_language(): - collected_depots.append(depot_object) - - self.logger.debug( - f"Collected {len(collected_depots)} depots, proceeding to download, Dependencies Depots: {len(self.dependencies)}") - if self.depot_version == 2: - for depot in collected_depots: - manifest = dl_utils.get_zlib_encoded( - self.api_handler, - f'{constants.GOG_CDN}/content-system/v2/meta/{dl_utils.galaxy_path(depot.manifest)}', self.logger)[0] - download_files += self.get_depot_list(manifest, depot.product_id) - for depot in self.dependencies: - manifest = dl_utils.get_zlib_encoded( - self.api_handler, - f'{constants.GOG_CDN}/content-system/v2/dependencies/meta/{dl_utils.galaxy_path(depot["manifest"])}', self.logger)[ - 0] - dependency_files += self.get_depot_list(manifest) - else: - for depot in collected_depots: - url = f'{constants.GOG_CDN}/content-system/v1/manifests/{self.dl_target["id"]}/{self.platform}/{self.builds["items"][0]["legacy_build_id"]}/{depot.manifest}' - manifest = dl_utils.get_json(self.api_handler, url) - download_files += manifest['depot']['files'] - - for depot in self.dependencies: - url = f"{constants.GOG_CDN}/content-system/v1/redists/manifests/{self.redist_version}/{depot['manifest']}" - repo = dl_utils.get_json(self.api_handler, url) - if depot['path'][0] == '/' and len(depot['path']) > 1: - depot['path'] = depot['path'][1:] - - for redist_file in range(len(repo['depot']['files'])): - # This makes path absolute, and appends download link to depot object - if repo['depot']['files'][redist_file]['path'][0] == '/': - repo['depot']['files'][redist_file]['path'] = repo['depot']['files'][redist_file]['path'][1:] - - url = repo['depot']['files'][redist_file].get('url') - - if url: - redistributable_id, file_name = url.split('/') - - cdn_json = dl_utils.get_json(self.api_handler, - f"{constants.GOG_CONTENT_SYSTEM}/open_link?_version=2&generation=1&path=redists/{redistributable_id}/{self.redist_version}") - cdn = dl_utils.classify_cdns(cdn_json['urls'], 1) - if 'url' not in cdn: - self.logger.info( - f"Couldn't get cdn url for dependency {redistributable_id}/{self.redist_version}") - break - repo['depot']['files'][redist_file]['link'] = cdn['url'] + '/main.bin' - - repo['depot']['files'][redist_file]['path'] = os.path.join(depot['path'], - repo['depot']['files'][redist_file][ - 'path']) - - dependency_files.extend(repo['depot']['files']) - return download_files, dependency_files - - # V2 downloading - def perform_download(self): - # print(self.meta) - if self.depot_version == 1: - return self.perform_download_V1() - self.logger.debug("Collecting base game depots") - - files = self.collect_depots() - - download_files = files[0] - dependency_files = files[1] - - self.logger.debug( - f"Downloading {len(download_files)} game files, and {len(dependency_files)} dependency files proceeding") - - size_data = self.calculate_size(download_files, dependency_files) - download_size = size_data[0] - disk_size = size_data[1] - - readable_download_size = dl_utils.get_readable_size(download_size) - readable_disk_size = dl_utils.get_readable_size(disk_size) - self.logger.info(f"Download size: {round(readable_download_size[0], 2)}{readable_download_size[1]}") - self.logger.info(f"Size on disk: {round(readable_disk_size[0], 2)}{readable_disk_size[1]}") - self.logger.info("Checking free disk space") - if not dl_utils.check_free_space(disk_size, self.path): - self.logger.error("Not enough available disk space") - return False - allowed_threads = max(1, self.allowed_threads) - self.logger.debug("Spawning progress bar process") - self.progress = ProgressBar(download_size, f"{round(readable_download_size[0], 2)}{readable_download_size[1]}", - 50) - self.progress.start() - - self.thpool = ThreadPoolExecutor(max_workers=allowed_threads) - endpoints = dict() - - self.api_handler.get_new_secure_link(self.dl_target['id']) - for dlc_id in self.dlc_ids: - self.api_handler.get_new_secure_link(dlc_id) - # Main game files - for file in download_files: - thread = DLWorker(file, self.dl_path, self.api_handler, self.dl_target['id'], self.progress) - # thread.do_stuff() - self.threads.append(self.thpool.submit(thread.do_stuff)) - # Dependencies - for file in dependency_files: - thread = DLWorker(file, self.dl_path, self.api_handler, self.dl_target['id'], self.progress) - self.threads.append(self.thpool.submit(thread.do_stuff, (True))) - - # Wait until everything finishes - for thread in as_completed(self.threads): - if thread.cancelled(): - self.cancelled = True - break - - # TODO: Get game icon, for shortcuts - self.progress.completed = True - return not self.cancelled - - def perform_download_V1(self): - self.logger.debug("Redirecting download to V1 handler") - - download_files, dependency_files = self.collect_depots() - - dl_utils.prepare_location(self.dl_path, self.logger) - - self.api_handler.get_new_secure_link(self.dl_target['id'], - f"/{self.platform}/{self.builds['items'][0]['legacy_build_id']}", - 1) - - for dlc_id in self.dlc_ids: - self.api_handler.get_new_secure_link(dlc_id, - f"/{self.platform}/{self.builds['items'][0]['legacy_build_id']}", 1) - - size_data = self.calculate_size(download_files, []) - download_size = size_data[0] - disk_size = size_data[1] - readable_download_size = dl_utils.get_readable_size(download_size) - readable_disk_size = dl_utils.get_readable_size(disk_size) - self.logger.info(f"Download size: {round(readable_download_size[0], 2)}{readable_download_size[1]}") - self.logger.info(f"Size on disk: {round(readable_disk_size[0], 2)}{readable_disk_size[1]}") - - allowed_threads = max(1, self.allowed_threads) - self.thpool = ThreadPoolExecutor(max_workers=allowed_threads) - - self.logger.debug("Spawning progress bar process") - self.progress = ProgressBar(download_size, f"{round(readable_download_size[0], 2)}{readable_download_size[1]}", - 50) - self.progress.start() - self.threads = [] - for download_file in download_files: - worker = DLWorkerV1(download_file, self.dl_path, self.api_handler, self.dl_target['id'], - self.progress, self.platform, - self.builds['items'][0]['legacy_build_id']) - thread = self.thpool.submit(worker.do_stuff, False) - self.threads.append(thread) - # worker.do_stuff(False) - - for download_file in dependency_files: - worker = DLWorkerV1(download_file, self.dl_path, self.api_handler, - self.dl_target['id'], self.progress, self.platform, - self.builds['items'][0]['legacy_build_id']) - thread = self.thpool.submit(worker.do_stuff, True) - self.threads.append(thread) - - # worker.do_stuff(True) - - for thread in as_completed(self.threads): - if thread.cancelled(): - self.cancelled = True - break - - self.progress.completed = True - - return True - - def handle_dependencies(self): - dependencies_json, version = self.api_handler.get_dependenices_list(self.depot_version) - dependencies_array = [] - if self.depot_version == 2 and 'dependencies' not in self.meta: - return [], None - old_iterator = [] - if self.depot_version == 1: - if 'dependencies' in self.meta['product']['gameIDs'][0]: - old_iterator.extend(self.meta['product']['gameIDs'][0]['dependencies']) - for depot in self.meta['product']['depots']: - if 'redist' in depot: - old_iterator.append(depot) - - iterator = self.meta['dependencies'] if self.depot_version == 2 else old_iterator - dependencies_depots = dependencies_json['depots'] if self.depot_version == 2 else dependencies_json['product'][ - 'depots'] - for dependency in dependencies_depots: - for game_dep in iterator: - if self.depot_version == 2: - if dependency['dependencyId'] == game_dep: - dependencies_array.append(dependency) - else: - if game_dep['redist'] in dependency['gameIDs']: - if game_dep.get('targetDir'): - dependency['path'] = game_dep['targetDir'] - else: - dependency['path'] = os.path.join("__redist", game_dep['redist']) - dependency['redist'] = game_dep['redist'] - dependencies_array.append(dependency) - return dependencies_array, version - - def get_depot_list(self, manifest, product_id=None): - download_list = list() - for item in manifest['depot']['items']: - obj = None - if item['type'] == 'DepotFile': - obj = objects.DepotFile(item, product_id) - else: - obj = objects.DepotDirectory(item) - download_list.append(obj) - return download_list - - def check_compatibility(self): - self.logger.info(f"Checking compatibility of {self.dl_target['title']} with {self.platform}") - return self.dl_target['content_system_compatibility'][self.platform] - - def unpack_v1(self, download_files): - self.logger.info("Unpacking main.bin (fs intense thing)") - - def calculate_size(self, files, dependencies): - self.logger.info("Calculating download size") - download_size = 0 - disk_size = 0 - for file in files: - if type(file) == objects.DepotFile and self.depot_version == 2: - for chunk in file.chunks: - download_size += int(chunk['compressedSize']) - disk_size += int(chunk['size']) - elif self.depot_version == 1: - if file.get('size'): - disk_size += int(file['size']) - for dependency in dependencies: - if self.depot_version == 2: - for chunk in dependency.chunks: - download_size += int(chunk['compressedSize']) - disk_size += int(chunk['size']) - elif self.depot_version == 1: - if not dependency.get('directory'): - disk_size += int(dependency['size']) - if self.depot_version == 1: - download_size = disk_size - return (download_size, disk_size) diff --git a/gogdl/dl/managers/dependencies.py b/gogdl/dl/managers/dependencies.py new file mode 100644 index 0000000..b39118a --- /dev/null +++ b/gogdl/dl/managers/dependencies.py @@ -0,0 +1,165 @@ +from sys import exit +import logging +import os +import json +from typing import Optional +from gogdl.dl import dl_utils +import gogdl.constants as constants +from gogdl.dl.managers.task_executor import ExecutingManager +from gogdl.dl.objects import v2 +from gogdl.dl.objects.generic import BaseDiff + + +def get_depot_list(manifest, product_id=None): + download_list = list() + for item in manifest["depot"]["items"]: + if item["type"] == "DepotFile": + download_list.append(v2.DepotFile(item, product_id)) + return download_list + + +# Looks like we can use V2 dependencies for V1 games too WOAH +# We are doing that obviously +class DependenciesManager: + def __init__( + self, ids, path, workers_count, api_handler, print_manifest=False, download_game_deps_only=False + ): + self.api = api_handler + + self.logger = logging.getLogger("REDIST") + + self.path = path + self.installed_manifest = os.path.join(self.path, '.gogdl-redist-manifest') + self.workers_count = int(workers_count) + self.build = self.api.get_dependencies_repo() + self.repository = dl_utils.get_zlib_encoded(self.api, self.build['repository_manifest'])[0] or {} + # Put version for easier serialization + self.repository['build_id'] = self.build['build_id'] + + self.ids = ids + self.download_game_deps_only = download_game_deps_only # Basically skip all redist with path starting with __redist + if self.repository and print_manifest: + print(json.dumps(self.repository)) + + def get_files_for_depot_manifest(self, manifest): + url = f'{constants.GOG_CDN}/content-system/v2/dependencies/meta/{dl_utils.galaxy_path(manifest)}' + manifest = dl_utils.get_zlib_encoded(self.api, url)[0] + + return get_depot_list(manifest, 'redist') + + + def get(self, return_files=False): + old_depots = [] + new_depots = [] + if not self.ids: + return [] + installed = set() + + # This will be always None for redist writen in game dir + existing_manifest = None + if os.path.exists(self.installed_manifest): + try: + with open(self.installed_manifest, 'r') as f: + existing_manifest = json.load(f) + except Exception: + existing_manifest = None + pass + else: + if 'depots' in existing_manifest and 'build_id' in existing_manifest: + already_installed = existing_manifest.get('HGLInstalled') or [] + for depot in existing_manifest["depots"]: + if depot["dependencyId"] in already_installed: + old_depots.append(depot) + + for depot in self.repository["depots"]: + if depot["dependencyId"] in self.ids: + # By default we want to download all redist beginning + # with redist (game installation runs installation of the game's ones) + should_download = depot["executable"]["path"].startswith("__redist") + + # If we want to download redist located in game dir we flip the boolean + if self.download_game_deps_only: + should_download = not should_download + + if should_download: + installed.add(depot['dependencyId']) + new_depots.append(depot) + + new_files = [] + old_files = [] + + # Collect files for each redistributable + for depot in new_depots: + new_files += self.get_files_for_depot_manifest(depot["manifest"]) + + for depot in old_depots: + old_files += self.get_files_for_depot_manifest(depot["manifest"]) + + if return_files: + return new_files + + + diff = DependenciesDiff.compare(new_files, old_files) + + if not len(diff.changed) and not len(diff.deleted) and not len(diff.new): + self.logger.info("Nothing to do") + return + + secure_link = dl_utils.get_dependency_link(self.api) # This should never expire + executor = ExecutingManager(self.api, self.workers_count, self.path, os.path.join(self.path, 'gog-support'), diff, {'redist': secure_link}) + success = executor.setup() + if not success: + print('Unable to proceed, Not enough disk space') + exit(2) + cancelled = executor.run() + + if cancelled: + return + + repository = self.repository + repository['HGLInstalled'] = list(installed) + + json_repository = json.dumps(repository) + with open(self.installed_manifest, 'w') as f: + f.write(json_repository) + + +class DependenciesDiff(BaseDiff): + def __init__(self): + super().__init__() + + @classmethod + def compare(cls, new_files: list, old_files: Optional[list]): + comparison = cls() + + if not old_files: + comparison.new = new_files + return comparison + + new_files_paths = dict() + for file in new_files: + new_files_paths.update({file.path.lower(): file}) + + old_files_paths = dict() + for file in old_files: + old_files_paths.update({file.path.lower(): file}) + + for old_file in old_files_paths.values(): + if not new_files_paths.get(old_file.path.lower()): + comparison.deleted.append(old_file) + + for new_file in new_files_paths.values(): + old_file = old_files_paths.get(new_file.path.lower()) + if not old_file: + comparison.new.append(new_file) + else: + if len(new_file.chunks) == 1 and len(old_file.chunks) == 1: + if new_file.chunks[0]["md5"] != old_file.chunks[0]["md5"]: + comparison.changed.append(new_file) + else: + if (new_file.md5 and old_file.md5 and new_file.md5 != old_file.md5) or (new_file.sha256 and old_file.sha256 != new_file.sha256): + comparison.changed.append(v2.FileDiff.compare(new_file, old_file)) + elif len(new_file.chunks) != len(old_file.chunks): + comparison.changed.append(v2.FileDiff.compare(new_file, old_file)) + return comparison + diff --git a/gogdl/dl/managers/linux.py b/gogdl/dl/managers/linux.py new file mode 100644 index 0000000..6e08b7f --- /dev/null +++ b/gogdl/dl/managers/linux.py @@ -0,0 +1,300 @@ +# Manage downloading of linux native games using new zip method based on Range headers +import json +import logging +import hashlib +import os.path +import stat +from concurrent.futures import ThreadPoolExecutor, as_completed +from zlib import crc32 +from gogdl.dl import dl_utils +from gogdl.dl.managers.task_executor import ExecutingManager +from gogdl.dl.objects.generic import BaseDiff +from gogdl.dl.objects.v2 import DepotLink +from gogdl.dl.workers import linux as linux_worker +from gogdl.dl.objects import linux +from gogdl import constants + + +def get_folder_name_from_windows_manifest(api_handler, id): + builds = dl_utils.get_json( + api_handler, + f"{constants.GOG_CONTENT_SYSTEM}/products/{id}/os/windows/builds?generation=2", + ) + + url = builds["items"][0]["link"] + meta, headers = dl_utils.get_zlib_encoded(api_handler, url) + install_dir = ( + meta["installDirectory"] + if builds["items"][0]["generation"] == 2 + else meta["product"]["installDirectory"] + ) + return install_dir + + +class Manager: + def __init__(self, generic_manager): + self.game_id = generic_manager.game_id + self.arguments = generic_manager.arguments + self.unknown_arguments = generic_manager.unknown_arguments + self.is_verifying = generic_manager.is_verifying + + self.api_handler = generic_manager.api_handler + self.allowed_threads = generic_manager.allowed_threads + self.folder_name = get_folder_name_from_windows_manifest(self.api_handler, self.game_id) + + if "path" in self.arguments: + self.path = self.arguments.path + if generic_manager.should_append_folder_name: + self.path = os.path.join(self.path, self.folder_name) + else: + self.path = "" + + self.lang = self.arguments.lang + self.dlcs_should_be_downloaded = self.arguments.dlcs + if self.arguments.dlcs_list: + self.dlcs_list = self.arguments.dlcs_list.split(",") + else: + self.dlcs_list = [] + self.dlc_only = self.arguments.dlc_only + + self.logger = logging.getLogger("LINUX") + self.logger.info("Initialized Linux Download Manager") + + self.game_data = None + + self.languages_codes = list() + self.downlink = None + self.game_files = list() + + self.installer_handlers = list() + + @staticmethod + def filter_linux_installers(installers): + return [installer for installer in installers if installer["os"] == "linux"] + + def find_matching_installer(self, installers): + if len(installers) == 1: + return installers[0] + for installer in installers: + if installer["language"] == self.lang: + return installer + + # English installers should be multilanguage ready + for installer in installers: + if installer["language"] == "en": + return installer + + return None + + def setup(self): + self.game_data = self.api_handler.get_item_data(self.game_id, expanded=['downloads', 'expanded_dlcs']) + + # Filter linux installers + game_installers = self.filter_linux_installers(self.game_data["downloads"]["installers"]) + + self.languages_codes = [installer["language"] for installer in game_installers] + + self.game_installer = self.find_matching_installer(game_installers) + + if not self.dlc_only: + installer_data = dl_utils.get_json(self.api_handler, self.game_installer["files"][0]["downlink"]) + game_install_handler = linux.InstallerHandler(installer_data["downlink"],self.game_id,self.api_handler.session) + self.installer_handlers.append(game_install_handler) + + # Create dlc installer handlers + if self.dlcs_should_be_downloaded: + for dlc in self.game_data["expanded_dlcs"]: + if self.dlcs_should_be_downloaded and self.api_handler.does_user_own(dlc["id"]): + if self.dlcs_list and str(dlc["id"]) not in self.dlcs_list: + continue + + linux_installers = self.filter_linux_installers(dlc["downloads"]["installers"]) + installer = self.find_matching_installer(linux_installers) + installer_data = dl_utils.get_json(self.api_handler, installer["files"][0]["downlink"]) + + install_handler = linux.InstallerHandler(installer_data["downlink"], + str(dlc["id"]), + self.api_handler.session) + + self.installer_handlers.append(install_handler) + + pool = ThreadPoolExecutor(self.allowed_threads) + futures = [] + for handler in self.installer_handlers: + futures.append(pool.submit(handler.setup)) + + for future in as_completed(futures): + if future.cancelled(): + break + + def calculate_download_sizes(self): + download_size = 0 + size = 0 + + for handler in self.installer_handlers: + for file in handler.central_directory.files: + if not file.file_name.startswith("data/noarch") or file.file_name.endswith("/"): + continue + size += file.uncompressed_size + download_size += file.compressed_size + return download_size, size + + def get_owned_dlcs(self): + dlcs = list() + for dlc in self.game_data["expanded_dlcs"]: + if self.api_handler.does_user_own(dlc["id"]): + if not dlc["downloads"]["installers"]: + continue + dlc_languages = [installer["language"] for installer in + self.filter_linux_installers(dlc["downloads"]["installers"])] + dlcs.append({"title": dlc["title"], "id": dlc["id"], "languages": [dlc_languages]}) + return dlcs + + def get_download_size(self): + self.setup() + + dlcs = self.get_owned_dlcs() + + download_size, disk_size = self.calculate_download_sizes() + + response = { + "download_size": download_size, + "disk_size": disk_size, + "dlcs": dlcs, + "languages": self.languages_codes, + "folder_name": self.folder_name, + "dependencies": [], + "versionName": self.game_installer["version"], + } + + return response + + def download(self): + self.setup() + manifest_path = os.path.join(self.path, '.gogdl-linux-manifest') + + cd_files = dict() + for handler in self.installer_handlers: + for file in handler.central_directory.files: + if not file.file_name.startswith("data/noarch") or file.file_name.endswith("/"): + continue + cd_files.update({file.file_name: file}) + + manifest_data = None + if os.path.exists(manifest_path): + with open(manifest_path, 'r') as f: + manifest_data = json.load(f) + + new: list[linux.CentralDirectoryFile] = list() + deleted: list[str] = list() + if manifest_data and not self.is_verifying: + manifest_files = dict() + for file in manifest_data['files']: + manifest_files.update({file['file_name']: file['crc32']}) + + for file_name in manifest_files: + if file_name in cd_files: + if cd_files[file_name].crc32 != manifest_files[file_name]: + new.append(cd_files[file_name]) + else: + deleted.append(file_name) + + for file_name in cd_files: + if file_name not in manifest_files: + new.append(cd_files[file_name]) + + else: + new = list(cd_files.values()) + + sources = dict() + for handler in self.installer_handlers: + sources.update({handler.product: handler.url}) + + print("New/changed files", len(new)) + print("Deleted", len(deleted)) + print("Total files", len(cd_files)) + + if self.is_verifying: + self.logger.info("Verifying files") + invalid = list() + for file in new: + path = file.file_name.replace("data/noarch", self.path) + + if not os.path.exists(path): + invalid.append(file) + else: + if file.is_symlink(): + continue + with open(path, 'rb') as fh: + sum = 0 + while data := fh.read(1024*1024): + sum = crc32(data, sum) + + if sum != file.crc32: + invalid.append(file) + if not len(invalid): + self.logger.info("All files look good") + return + new = invalid + + + diff = BaseDiff() + + final_files = list() + for file in new: + # Prepare file for download + # Calculate data offsets + handler = None + for ins in self.installer_handlers: + if ins.product == file.product: + handler = ins + break + + if not handler: + print("Orphan file found") + continue + + data_start = handler.start_of_archive_index + file.relative_local_file_offset + 34 + file.file_name_length + file.extra_field_length + c_size = file.compressed_size + size = file.uncompressed_size + method = file.compression_method + checksum = file.crc32 + + path = file.file_name.replace("data/noarch", self.path) + if file.is_symlink(): + data = handler.get_bytes_from_file(from_b=data_start, size=c_size, add_archive_index=False) + diff.links.append(DepotLink({"path": path, "target": os.path.normpath(os.path.join(dl_utils.parent_dir(path), data.decode()))})) + continue + file_permissions = int(bin(int.from_bytes(file.ext_file_attrs, "little"))[3:][:9]) + executable = (file_permissions & (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)) != 0 + final_files.append(linux.LinuxFile(file.product, path, method, data_start, c_size, size, checksum, executable)) + + diff.new = final_files + + manager = ExecutingManager(self.api_handler, self.allowed_threads, self.path, None, diff, sources) + + manager.setup() + for file in deleted: + path = file.replace('data/noarch', self.path) + if os.path.exists(path): + os.remove(path) + cancelled = manager.run() + + if cancelled: + return + + new_manifest = dict() + + gameinfo_file = os.path.join(self.path, 'gameinfo') + if os.path.exists(gameinfo_file): + checksum = hashlib.md5() + with open(gameinfo_file, 'rb') as f: + checksum.update(f.read()) + new_manifest['info_checksum'] = checksum.hexdigest() + + new_manifest['files'] = [f.as_dict() for f in cd_files.values()] + + + os.makedirs(self.path, exist_ok=True) + with open(manifest_path, 'w') as f: + manifest_data = json.dump(new_manifest,f) diff --git a/gogdl/dl/managers/manager.py b/gogdl/dl/managers/manager.py new file mode 100644 index 0000000..8708b8b --- /dev/null +++ b/gogdl/dl/managers/manager.py @@ -0,0 +1,141 @@ +from dataclasses import dataclass +from multiprocessing import cpu_count +from sys import exit +import os +import logging +import json + +from gogdl import constants +from gogdl.dl.managers import linux, v1, v2 + +@dataclass +class UnsupportedPlatform(Exception): + pass + +class Manager: + def __init__(self, arguments, unknown_arguments, api_handler): + self.arguments = arguments + self.unknown_arguments = unknown_arguments + self.api_handler = api_handler + + self.platform = arguments.platform + self.should_append_folder_name = self.arguments.command == "download" + self.is_verifying = self.arguments.command == "repair" + self.game_id = arguments.id + self.branch = arguments.branch or None + if "workers_count" in arguments: + self.allowed_threads = int(arguments.workers_count) + else: + self.allowed_threads = cpu_count() + + self.logger = logging.getLogger("GENERIC DOWNLOAD_MANAGER") + + self.galaxy_api_data = None + + self.download_manager = None + self.builds = None + self.target_build = None + + def get_builds(self, build_platform): + password = '' if not self.arguments.password else '&password=' + self.arguments.password + generation = self.arguments.force_generation or "2" + response = self.api_handler.session.get( + f"{constants.GOG_CONTENT_SYSTEM}/products/{self.game_id}/os/{build_platform}/builds?&generation={generation}{password}" + ) + + if not response.ok: + raise UnsupportedPlatform() + data = response.json() + + if data['total_count'] == 0: + raise UnsupportedPlatform() + + return data + + def calculate_download_size(self, arguments, unknown_arguments): + self.setup_download_manager() + + download_size_response = self.download_manager.get_download_size() + download_size_response['builds'] = self.builds + + + print(json.dumps(download_size_response)) + + def download(self, arguments, unknown_arguments): + self.setup_download_manager() + + self.download_manager.download() + + def setup_download_manager(self): + # TODO: If content system for linux ever appears remove this if statement + # But keep the one below so we have some sort of fallback + # in case not all games were available in content system + if self.platform == "linux": + self.logger.info( + "Platform is Linux, redirecting download to Linux Native installer manager" + ) + + self.download_manager = linux.Manager(self) + + return + + try: + self.builds = self.get_builds(self.platform) + except UnsupportedPlatform: + if self.platform == "linux": + self.logger.info( + "Platform is Linux, redirecting download to Linux Native installer manager" + ) + + self.download_manager = linux.Manager(self) + + return + + self.logger.error(f"Game doesn't support content system api, unable to proceed using platform {self.platform}") + exit(1) + + # If Linux download ever progresses to this point, then it's time for some good party + + if len(self.builds["items"]) == 0: + self.logger.error("No builds found") + exit(1) + self.target_build = self.builds["items"][0] + + for build in self.builds["items"]: + if build["branch"] == None: + self.target_build = build + break + + for build in self.builds["items"]: + if build["branch"] == self.branch: + self.target_build = build + break + + if self.arguments.build: + # Find build + for build in self.builds["items"]: + if build["build_id"] == self.arguments.build: + self.target_build = build + break + self.logger.debug(f'Found build {self.target_build}') + + generation = self.target_build["generation"] + + if self.is_verifying: + manifest_path = os.path.join(constants.MANIFESTS_DIR, self.game_id) + if os.path.exists(manifest_path): + with open(manifest_path, 'r') as f: + manifest_data = json.load(f) + generation = int(manifest_data['version']) + + # This code shouldn't run at all but it's here just in case GOG decides they will return different generation than requested one + # Of course assuming they will ever change their content system generation (I highly doubt they will) + if generation not in [1, 2]: + raise Exception("Unsupported depot version please report this") + + self.logger.info(f"Depot version: {generation}") + + if generation == 1: + self.download_manager = v1.Manager(self) + elif generation == 2: + self.download_manager = v2.Manager(self) diff --git a/gogdl/dl/managers/task_executor.py b/gogdl/dl/managers/task_executor.py new file mode 100644 index 0000000..e3515f2 --- /dev/null +++ b/gogdl/dl/managers/task_executor.py @@ -0,0 +1,796 @@ +import logging +import os +import signal +import time +from sys import exit +from threading import Thread +from collections import deque, Counter +from multiprocessing import Queue, Manager as ProcessingManager +from threading import Condition +from multiprocessing.shared_memory import SharedMemory +from queue import Empty +from typing import Union +from gogdl.dl import dl_utils + +from gogdl.dl.dl_utils import get_readable_size +from gogdl.dl.progressbar import ProgressBar +from gogdl.dl.workers import task_executor +from gogdl.dl.objects import generic, v2, v1, linux + +class ExecutingManager: + def __init__(self, api_handler, allowed_threads, path, support, diff, secure_links) -> None: + self.api_handler = api_handler + self.allowed_threads = allowed_threads + self.path = path + self.resume_file = os.path.join(path, '.gogdl-resume') + self.support = support or os.path.join(path, 'gog-support') + self.cache = os.path.join(path, '.gogdl-download-cache') + self.diff: generic.BaseDiff = diff + self.secure_links = secure_links + self.logger = logging.getLogger("TASK_EXEC") + + self.download_size = 0 + self.disk_size = 0 + + self.shared_memory = None + self.shm_segments = deque() + self.hash_map = dict() + self.v2_chunks_to_download = deque() + self.v1_chunks_to_download = deque() + self.linux_chunks_to_download = deque() + self.tasks = deque() + self.active_tasks = 0 + + self.processed_items = 0 + self.items_to_complete = 0 + + self.download_workers = list() + self.writer_worker = None + self.threads = list() + + self.shm_cond = Condition() + self.task_cond = Condition() + + self.running = True + + def setup(self): + self.logger.debug("Beginning executor manager setup") + self.logger.debug("Initializing queues") + # Queues + self.download_queue = Queue() + self.download_res_queue = Queue() + self.writer_queue = Queue() + self.writer_res_queue = Queue() + + self.download_speed_updates = Queue() + self.writer_speed_updates = Queue() + + self.manager = ProcessingManager() + self.shared_secure_links = self.manager.dict() + self.shared_secure_links.update(self.secure_links) + + # Required space for download to succeed + required_disk_size_delta = 0 + + # This can be either v1 File or v2 DepotFile + for f in self.diff.deleted + self.diff.removed_redist: + support_flag = generic.TaskFlag.SUPPORT if 'support' in f.flags else generic.TaskFlag.NONE + self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.DELETE_FILE | support_flag)) + if isinstance(f, v1.File): + required_disk_size_delta -= f.size + elif isinstance(f, v2.DepotFile): + required_disk_size_delta -= sum([ch['size'] for ch in f.chunks]) + + current_tmp_size = required_disk_size_delta + + shared_chunks_counter = Counter() + completed_files = set() + + missing_files = set() + mismatched_files = set() + + downloaded_v1 = dict() + downloaded_linux = dict() + cached = set() + + # Re-use caches + if os.path.exists(self.cache): + for cache_file in os.listdir(self.cache): + cached.add(cache_file) + + self.biggest_chunk = 0 + # Find biggest chunk to optimize how much memory is 'wasted' per chunk + # Also create hashmap for those files + for f in self.diff.new + self.diff.changed + self.diff.redist: + if isinstance(f, v1.File): + self.hash_map.update({f.path.lower(): f.hash}) + + elif isinstance(f, linux.LinuxFile): + self.hash_map.update({f.path.lower(): f.hash}) + + elif isinstance(f, v2.DepotFile): + first_chunk_checksum = f.chunks[0]['md5'] if len(f.chunks) else None + checksum = f.md5 or f.sha256 or first_chunk_checksum + self.hash_map.update({f.path.lower(): checksum}) + for i, chunk in enumerate(f.chunks): + shared_chunks_counter[chunk["compressedMd5"]] += 1 + if self.biggest_chunk < chunk["size"]: + self.biggest_chunk = chunk["size"] + + elif isinstance(f, v2.FileDiff): + first_chunk_checksum = f.file.chunks[0]['md5'] if len(f.file.chunks) else None + checksum = f.file.md5 or f.file.sha256 or first_chunk_checksum + self.hash_map.update({f.file.path.lower(): checksum}) + for i, chunk in enumerate(f.file.chunks): + if chunk.get("old_offset") is None: + shared_chunks_counter[chunk["compressedMd5"]] += 1 + if self.biggest_chunk < chunk["size"]: + self.biggest_chunk = chunk["size"] + + elif isinstance(f, v2.FilePatchDiff): + first_chunk_checksum = f.new_file.chunks[0]['md5'] if len(f.new_file.chunks) else None + checksum = f.new_file.md5 or f.new_file.sha256 or first_chunk_checksum + self.hash_map.update({f.new_file.path.lower(): checksum}) + for chunk in f.chunks: + shared_chunks_counter[chunk["compressedMd5"]] += 1 + if self.biggest_chunk < chunk["size"]: + self.biggest_chunk = chunk["size"] + + + if not self.biggest_chunk: + self.biggest_chunk = 20 * 1024 * 1024 + else: + # Have at least 10 MiB chunk size for V1 downloads + self.biggest_chunk = max(self.biggest_chunk, 10 * 1024 * 1024) + + if os.path.exists(self.resume_file): + self.logger.info("Attempting to continue the download") + try: + missing = 0 + mismatch = 0 + + with open(self.resume_file, 'r') as f: + for line in f.readlines(): + hash, support, file_path = line.strip().split(':') + + if support == 'support': + abs_path = os.path.join(self.support, file_path) + else: + abs_path = os.path.join(self.path, file_path) + + if not os.path.exists(dl_utils.get_case_insensitive_name(abs_path)): + missing_files.add(file_path.lower()) + missing += 1 + continue + + current_hash = self.hash_map.get(file_path.lower()) + if current_hash != hash: + mismatched_files.add(file_path.lower()) + mismatch += 1 + continue + + completed_files.add(file_path.lower()) + if missing: + self.logger.warning(f'There are {missing} missing files, and will be re-downloaded') + if mismatch: + self.logger.warning(f'There are {mismatch} changed files since last download, and will be re-downloaded') + + except Exception as e: + self.logger.error(f"Unable to resume download, continuing as normal {e}") + + + # Create tasks for each chunk + for f in self.diff.new + self.diff.changed + self.diff.redist: + if isinstance(f, v1.File): + support_flag = generic.TaskFlag.SUPPORT if 'support' in f.flags else generic.TaskFlag.NONE + if f.size == 0: + self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.CREATE_FILE | support_flag)) + continue + + if f.path.lower() in completed_files: + downloaded_v1[f.hash] = f + continue + + required_disk_size_delta += f.size + # In case of same file we can copy it over + if f.hash in downloaded_v1: + self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.COPY_FILE | support_flag, old_flags=generic.TaskFlag.SUPPORT if 'support' in downloaded_v1[f.hash].flags else generic.TaskFlag.NONE, old_file=downloaded_v1[f.hash].path)) + if 'executable' in f.flags: + self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.MAKE_EXE | support_flag)) + continue + self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.OPEN_FILE | support_flag)) + self.download_size += f.size + self.disk_size += f.size + size_left = f.size + chunk_offset = 0 + i = 0 + # Split V1 file by chunks, so we can store it in shared memory + # This makes checksum useless during the download, but well... + while size_left: + chunk_size = min(self.biggest_chunk, size_left) + offset = f.offset + chunk_offset + + task = generic.V1Task(f.product_id, i, offset, chunk_size, f.hash) + self.tasks.append(task) + self.v1_chunks_to_download.append((f.product_id, task.compressed_md5, offset, chunk_size)) + + chunk_offset += chunk_size + size_left -= chunk_size + i += 1 + + self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.CLOSE_FILE | support_flag)) + if 'executable' in f.flags: + self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.MAKE_EXE | support_flag)) + downloaded_v1[f.hash] = f + + elif isinstance(f, linux.LinuxFile): + if f.size == 0: + self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.CREATE_FILE)) + continue + + if f.path.lower() in completed_files: + downloaded_linux[f.hash] = f + continue + + required_disk_size_delta += f.size + if f.hash in downloaded_linux: + self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.COPY_FILE, old_flags=generic.TaskFlag.NONE, old_file=downloaded_linux[f.hash].path)) + if 'executable' in f.flags: + self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.MAKE_EXE)) + continue + + self.tasks.append(generic.FileTask(f.path+'.tmp', flags=generic.TaskFlag.OPEN_FILE)) + self.download_size += f.compressed_size + self.disk_size += f.size + size_left = f.compressed_size + chunk_offset = 0 + i = 0 + # Split V1 file by chunks, so we can store it in shared memory + # This makes checksum useless during the download, but well... + while size_left: + chunk_size = min(self.biggest_chunk, size_left) + offset = f.offset + chunk_offset + + task = generic.V1Task(f.product, i, offset, chunk_size, f.hash) + self.tasks.append(task) + self.linux_chunks_to_download.append((f.product, task.compressed_md5, offset, chunk_size)) + + chunk_offset += chunk_size + size_left -= chunk_size + i += 1 + + self.tasks.append(generic.FileTask(f.path + '.tmp', flags=generic.TaskFlag.CLOSE_FILE)) + if f.compression: + self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.OPEN_FILE)) + self.tasks.append(generic.ChunkTask(f.product, 0, f.hash+"_dec", f.hash+"_dec", f.compressed_size, f.compressed_size, True, False, 0, old_flags=generic.TaskFlag.ZIP_DEC, old_file=f.path+'.tmp')) + self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.CLOSE_FILE)) + self.tasks.append(generic.FileTask(f.path + '.tmp', flags=generic.TaskFlag.DELETE_FILE)) + else: + self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.DELETE_FILE | generic.TaskFlag.RENAME_FILE, old_file=f.path+'.tmp')) + + if 'executable' in f.flags: + self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.MAKE_EXE)) + downloaded_linux[f.hash] = f + + elif isinstance(f, v2.DepotFile): + support_flag = generic.TaskFlag.SUPPORT if 'support' in f.flags else generic.TaskFlag.NONE + if not len(f.chunks): + self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.CREATE_FILE | support_flag)) + continue + if f.path.lower() in completed_files: + continue + self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.OPEN_FILE | support_flag)) + for i, chunk in enumerate(f.chunks): + new_task = generic.ChunkTask(f.product_id, i, chunk["compressedMd5"], chunk["md5"], chunk["size"], chunk["compressedSize"]) + is_cached = chunk["md5"] in cached + if shared_chunks_counter[chunk["compressedMd5"]] > 1 and not is_cached: + self.v2_chunks_to_download.append((f.product_id, chunk["compressedMd5"])) + self.download_size += chunk['compressedSize'] + new_task.offload_to_cache = True + new_task.cleanup = True + cached.add(chunk["md5"]) + current_tmp_size += chunk['size'] + elif is_cached: + new_task.old_offset = 0 + # This can safely be absolute path, due to + # how os.path.join works in Writer + new_task.old_file = os.path.join(self.cache, chunk["md5"]) + else: + self.v2_chunks_to_download.append((f.product_id, chunk["compressedMd5"])) + self.download_size += chunk['compressedSize'] + self.disk_size += chunk['size'] + current_tmp_size += chunk['size'] + shared_chunks_counter[chunk["compressedMd5"]] -= 1 + new_task.cleanup = True + self.tasks.append(new_task) + if is_cached and shared_chunks_counter[chunk["compressedMd5"]] == 0: + cached.remove(chunk["md5"]) + self.tasks.append(generic.FileTask(os.path.join(self.cache, chunk["md5"]), flags=generic.TaskFlag.DELETE_FILE)) + current_tmp_size -= chunk['size'] + self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.CLOSE_FILE | support_flag)) + if 'executable' in f.flags: + self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.MAKE_EXE | support_flag)) + + elif isinstance(f, v2.FileDiff): + chunk_tasks = [] + reused = 0 + file_size = 0 + support_flag = generic.TaskFlag.SUPPORT if 'support' in f.file.flags else generic.TaskFlag.NONE + old_support_flag = generic.TaskFlag.SUPPORT if 'support' in f.old_file_flags else generic.TaskFlag.NONE + if f.file.path.lower() in completed_files: + continue + for i, chunk in enumerate(f.file.chunks): + chunk_task = generic.ChunkTask(f.file.product_id, i, chunk["compressedMd5"], chunk["md5"], chunk["size"], chunk["compressedSize"]) + file_size += chunk['size'] + if chunk.get("old_offset") is not None and f.file.path.lower() not in mismatched_files and f.file.path.lower() not in missing_files: + chunk_task.old_offset = chunk["old_offset"] + chunk_task.old_flags = old_support_flag + chunk_task.old_file = f.file.path + reused += 1 + + chunk_tasks.append(chunk_task) + else: + is_cached = chunk["md5"] in cached + if shared_chunks_counter[chunk["compressedMd5"]] > 1 and not is_cached: + self.v2_chunks_to_download.append((f.file.product_id, chunk["compressedMd5"])) + self.download_size += chunk['compressedSize'] + chunk_task.offload_to_cache = True + cached.add(chunk["md5"]) + current_tmp_size += chunk['size'] + elif is_cached: + chunk_task.old_offset = 0 + chunk_task.old_file = os.path.join(self.cache, chunk["md5"]) + else: + self.v2_chunks_to_download.append((f.file.product_id, chunk["compressedMd5"])) + self.download_size += chunk['compressedSize'] + + shared_chunks_counter[chunk["compressedMd5"]] -= 1 + chunk_task.cleanup = True + chunk_tasks.append(chunk_task) + if is_cached and shared_chunks_counter[chunk["compressedMd5"]] == 0: + cached.remove(chunk["md5"]) + self.tasks.append(generic.FileTask(os.path.join(self.cache, chunk["md5"]), flags=generic.TaskFlag.DELETE_FILE)) + current_tmp_size -= chunk['size'] + current_tmp_size += file_size + required_disk_size_delta = max(current_tmp_size, required_disk_size_delta) + if reused: + self.tasks.append(generic.FileTask(f.file.path + ".tmp", flags=generic.TaskFlag.OPEN_FILE | support_flag)) + self.tasks.extend(chunk_tasks) + self.tasks.append(generic.FileTask(f.file.path + ".tmp", flags=generic.TaskFlag.CLOSE_FILE | support_flag)) + self.tasks.append(generic.FileTask(f.file.path, flags=generic.TaskFlag.RENAME_FILE | generic.TaskFlag.DELETE_FILE | support_flag, old_file=f.file.path + ".tmp")) + current_tmp_size -= file_size + else: + self.tasks.append(generic.FileTask(f.file.path, flags=generic.TaskFlag.OPEN_FILE | support_flag)) + self.tasks.extend(chunk_tasks) + self.tasks.append(generic.FileTask(f.file.path, flags=generic.TaskFlag.CLOSE_FILE | support_flag)) + if 'executable' in f.file.flags: + self.tasks.append(generic.FileTask(f.file.path, flags=generic.TaskFlag.MAKE_EXE | support_flag)) + self.disk_size += file_size + + elif isinstance(f, v2.FilePatchDiff): + chunk_tasks = [] + patch_size = 0 + old_file_size = 0 + out_file_size = 0 + if f.target.lower() in completed_files: + continue + + # Calculate output size + for chunk in f.new_file.chunks: + out_file_size += chunk['size'] + + # Calculate old size + for chunk in f.old_file.chunks: + old_file_size += chunk['size'] + + # Make chunk tasks + for i, chunk in enumerate(f.chunks): + chunk_task = generic.ChunkTask(f'{f.new_file.product_id}_patch', i, chunk['compressedMd5'], chunk['md5'], chunk['size'], chunk['compressedSize']) + chunk_task.cleanup = True + patch_size += chunk['size'] + is_cached = chunk["md5"] in cached + if shared_chunks_counter[chunk["compressedMd5"]] > 1 and not is_cached: + self.v2_chunks_to_download.append((f'{f.new_file.product_id}_patch', chunk["compressedMd5"])) + chunk_task.offload_to_cache = True + cached.add(chunk["md5"]) + self.download_size += chunk['compressedSize'] + current_tmp_size += chunk['size'] + required_disk_size_delta = max(current_tmp_size, required_disk_size_delta) + elif is_cached: + chunk_task.old_offset = 0 + chunk_task.old_file = os.path.join(self.cache, chunk["md5"]) + else: + self.v2_chunks_to_download.append((f'{f.new_file.product_id}_patch', chunk["compressedMd5"])) + self.download_size += chunk['compressedSize'] + shared_chunks_counter[chunk['compressedMd5']] -= 1 + chunk_tasks.append(chunk_task) + if is_cached and shared_chunks_counter[chunk["compressedMd5"]] == 0: + cached.remove(chunk["md5"]) + self.tasks.append(generic.FileTask(os.path.join(self.cache, chunk["md5"]), flags=generic.TaskFlag.DELETE_FILE)) + current_tmp_size -= chunk['size'] + + self.disk_size += patch_size + current_tmp_size += patch_size + required_disk_size_delta = max(current_tmp_size, required_disk_size_delta) + + # Download patch + self.tasks.append(generic.FileTask(f.target + ".delta", flags=generic.TaskFlag.OPEN_FILE)) + self.tasks.extend(chunk_tasks) + self.tasks.append(generic.FileTask(f.target + ".delta", flags=generic.TaskFlag.CLOSE_FILE)) + + current_tmp_size += out_file_size + required_disk_size_delta = max(current_tmp_size, required_disk_size_delta) + + # Apply patch to .tmp file + self.tasks.append(generic.FileTask(f.target + ".tmp", flags=generic.TaskFlag.PATCH, patch_file=(f.target + '.delta'), old_file=f.source)) + current_tmp_size -= patch_size + required_disk_size_delta = max(current_tmp_size, required_disk_size_delta) + # Remove patch file + self.tasks.append(generic.FileTask(f.target + ".delta", flags=generic.TaskFlag.DELETE_FILE)) + current_tmp_size -= old_file_size + required_disk_size_delta = max(current_tmp_size, required_disk_size_delta) + # Move new file to old one's location + self.tasks.append(generic.FileTask(f.target, flags=generic.TaskFlag.RENAME_FILE | generic.TaskFlag.DELETE_FILE, old_file=f.target + ".tmp")) + self.disk_size += out_file_size + + required_disk_size_delta = max(current_tmp_size, required_disk_size_delta) + + + for f in self.diff.links: + self.tasks.append(generic.FileTask(f.path, flags=generic.TaskFlag.CREATE_SYMLINK, old_file=f.target)) + + self.items_to_complete = len(self.tasks) + + print(get_readable_size(self.download_size), self.download_size) + print(get_readable_size(required_disk_size_delta), required_disk_size_delta) + + return dl_utils.check_free_space(required_disk_size_delta, self.path) + + + def run(self): + self.shared_memory = SharedMemory(create=True, size=1024*1024*1024) + self.logger.debug(f"Created shared memory {self.shared_memory.size / 1024 / 1024:.02f} MiB") + + chunk_size = self.biggest_chunk + for i in range(int(self.shared_memory.size / chunk_size)): + segment = generic.MemorySegment(offset=i*chunk_size, end=i*chunk_size+chunk_size) + self.shm_segments.append(segment) + self.logger.debug(f"Created shm segments {len(self.shm_segments)}, chunk size = {self.biggest_chunk / 1024 / 1024:.02f} MiB") + interrupted = False + self.fatal_error = False + def handle_sig(num, frame): + nonlocal interrupted + self.interrupt_shutdown() + interrupted = True + exit(-num) + + try: + self.threads.append(Thread(target=self.download_manager, args=(self.task_cond, self.shm_cond))) + self.threads.append(Thread(target=self.process_task_results, args=(self.task_cond,))) + self.threads.append(Thread(target=self.process_writer_task_results, args=(self.shm_cond,))) + self.progress = ProgressBar(self.disk_size, self.download_speed_updates, self.writer_speed_updates) + + # Spawn workers + for _ in range(self.allowed_threads): + worker = task_executor.Download(self.shared_memory.name, self.download_queue, self.download_res_queue, self.download_speed_updates, self.shared_secure_links) + worker.start() + self.download_workers.append(worker) + + self.writer_worker = task_executor.Writer(self.shared_memory.name, self.writer_queue, self.writer_res_queue, self.writer_speed_updates, self.cache) + self.writer_worker.start() + + [th.start() for th in self.threads] + + signal.signal(signal.SIGTERM, handle_sig) + signal.signal(signal.SIGINT, handle_sig) + + if self.disk_size: + self.progress.start() + + while self.processed_items < self.items_to_complete and not interrupted and not self.fatal_error: + time.sleep(1) + if interrupted: + return True + except KeyboardInterrupt: + return True + + self.shutdown() + return self.fatal_error + + def interrupt_shutdown(self): + self.progress.completed = True + self.running = False + + with self.task_cond: + self.task_cond.notify() + + with self.shm_cond: + self.shm_cond.notify() + + for t in self.threads: + t.join(timeout=5.0) + if t.is_alive(): + self.logger.warning(f'Thread did not terminate! {repr(t)}') + + for child in self.download_workers: + child.join(timeout=5.0) + if child.exitcode is None: + child.terminate() + + # Clean queues + for queue in [self.writer_res_queue, self.writer_queue, self.download_queue, self.download_res_queue, self.download_speed_updates, self.writer_speed_updates]: + try: + while True: + _ = queue.get_nowait() + except Empty: + queue.close() + queue.join_thread() + + self.shared_memory.close() + self.shared_memory.unlink() + self.shared_memory = None + + + def shutdown(self): + self.logger.debug("Stopping progressbar") + self.progress.completed = True + + + self.logger.debug("Sending terminate instruction to workers") + for _ in range(self.allowed_threads): + self.download_queue.put(generic.TerminateWorker()) + + self.writer_queue.put(generic.TerminateWorker()) + for worker in self.download_workers: + worker.join(timeout=2) + if worker.is_alive(): + self.logger.warning("Forcefully terminating download workers") + worker.terminate() + self.writer_worker.join(timeout=10) + + self.writer_queue.close() + self.writer_res_queue.close() + self.download_queue.close() + self.download_res_queue.close() + self.download_speed_updates.close() + self.writer_speed_updates.close() + + self.logger.debug("Unlinking shared memory") + if self.shared_memory: + self.shared_memory.close() + self.shared_memory.unlink() + self.shared_memory = None + + self.running = False + with self.task_cond: + self.task_cond.notify() + + with self.shm_cond: + self.shm_cond.notify() + + try: + if os.path.exists(self.resume_file): + os.remove(self.resume_file) + except: + self.logger.error("Failed to remove resume file") + + def download_manager(self, task_cond: Condition, shm_cond: Condition): + self.logger.debug("Starting download scheduler") + no_shm = False + while self.running: + while self.active_tasks <= self.allowed_threads * 2 and (self.v2_chunks_to_download or self.v1_chunks_to_download or self.linux_chunks_to_download): + + try: + memory_segment = self.shm_segments.popleft() + no_shm = False + except IndexError: + no_shm = True + break + + if self.v1_chunks_to_download: + product_id, chunk_id, offset, chunk_size = self.v1_chunks_to_download.popleft() + + try: + self.download_queue.put(task_executor.DownloadTask1(product_id, offset, chunk_size, chunk_id, memory_segment)) + self.logger.debug(f"Pushed v1 download to queue {chunk_id} {product_id} {offset} {chunk_size}") + self.active_tasks += 1 + continue + except Exception as e: + self.logger.warning(f"Failed to push v1 task to download {e}") + self.v1_chunks_to_download.appendleft((product_id, chunk_id, offset, chunk_size)) + self.shm_segments.appendleft(memory_segment) + break + elif self.linux_chunks_to_download: + product_id, chunk_id, offset, chunk_size = self.linux_chunks_to_download.popleft() + try: + self.download_queue.put(task_executor.DownloadTask1(product_id, offset, chunk_size, chunk_id, memory_segment)) + self.logger.debug(f"Pushed linux download to queue {chunk_id} {product_id} {offset} {chunk_size}") + self.active_tasks += 1 + continue + except Exception as e: + self.logger.warning(f"Failed to push v1 task to download {e}") + self.v1_chunks_to_download.appendleft((product_id, chunk_id, offset, chunk_size)) + self.shm_segments.appendleft(memory_segment) + break + + elif self.v2_chunks_to_download: + product_id, chunk_hash = self.v2_chunks_to_download.popleft() + try: + self.download_queue.put(task_executor.DownloadTask2(product_id, chunk_hash, memory_segment), timeout=1) + self.logger.debug(f"Pushed DownloadTask2 for {chunk_hash}") + self.active_tasks += 1 + except Exception as e: + self.logger.warning(f"Failed to push task to download {e}") + self.v2_chunks_to_download.appendleft((product_id, chunk_hash)) + self.shm_segments.appendleft(memory_segment) + break + + else: + with task_cond: + self.logger.debug("Waiting for more tasks") + task_cond.wait(timeout=1.0) + continue + + if no_shm: + with shm_cond: + self.logger.debug(f"Waiting for more memory") + shm_cond.wait(timeout=1.0) + + self.logger.debug("Download scheduler out..") + + + def process_task_results(self, task_cond: Condition): + self.logger.debug("Download results collector starting") + ready_chunks = dict() + + try: + task = self.tasks.popleft() + except IndexError: + task = None + + current_dest = self.path + current_file = '' + + while task and self.running: + if isinstance(task, generic.FileTask): + try: + task_dest = self.path + old_destination = None + if task.flags & generic.TaskFlag.SUPPORT: + task_dest = self.support + if task.old_flags & generic.TaskFlag.SUPPORT: + old_destination = self.support + + writer_task = task_executor.WriterTask(task_dest, task.path, task.flags, old_destination=old_destination, old_file=task.old_file, patch_file=task.patch_file) + self.writer_queue.put(writer_task, timeout=1) + if task.flags & generic.TaskFlag.OPEN_FILE: + current_file = task.path + current_dest = task_dest + except Exception as e: + self.tasks.appendleft(task) + self.logger.warning(f"Failed to add queue element {e}") + continue + + try: + task: Union[generic.ChunkTask, generic.V1Task] = self.tasks.popleft() + except IndexError: + break + continue + + while ((task.compressed_md5 in ready_chunks) or task.old_file): + shm = None + if not task.old_file: + shm = ready_chunks[task.compressed_md5].task.memory_segment + + try: + self.logger.debug(f"Adding {task.compressed_md5} to writer") + flags = generic.TaskFlag.NONE + old_destination = None + if task.cleanup: + flags |= generic.TaskFlag.RELEASE_MEM + if task.offload_to_cache: + flags |= generic.TaskFlag.OFFLOAD_TO_CACHE + if task.old_flags & generic.TaskFlag.ZIP_DEC: + flags |= generic.TaskFlag.ZIP_DEC + if task.old_flags & generic.TaskFlag.SUPPORT: + old_destination = self.support + self.writer_queue.put(task_executor.WriterTask(current_dest, current_file, flags=flags, shared_memory=shm, old_destination=old_destination, old_file=task.old_file, old_offset=task.old_offset, size=task.size, hash=task.md5), timeout=1) + except Exception as e: + self.logger.error(f"Adding to writer queue failed {e}") + break + + if task.cleanup and not task.old_file: + del ready_chunks[task.compressed_md5] + + try: + task = self.tasks.popleft() + if isinstance(task, generic.FileTask): + break + except IndexError: + task = None + break + + else: + try: + res: task_executor.DownloadTaskResult = self.download_res_queue.get(timeout=1) + if res.success: + self.logger.debug(f"Chunk {res.task.compressed_sum} ready") + ready_chunks[res.task.compressed_sum] = res + self.progress.update_downloaded_size(res.download_size) + self.progress.update_decompressed_size(res.decompressed_size) + self.active_tasks -= 1 + else: + self.logger.warning(f"Chunk download failed, reason {res.fail_reason}") + try: + self.download_queue.put(res.task, timeout=1) + except Exception as e: + self.logger.warning("Failed to resubmit download task, pushing to chunks queue") + + with task_cond: + task_cond.notify() + except Empty: + pass + except Exception as e: + self.logger.warning(f"Unhandled exception {e}") + + self.logger.debug("Download results collector exiting...") + + def process_writer_task_results(self, shm_cond: Condition): + self.logger.debug("Starting writer results collector") + while self.running: + try: + res: task_executor.WriterTaskResult = self.writer_res_queue.get(timeout=1) + + if isinstance(res.task, generic.TerminateWorker): + break + + if res.success and res.task.flags & generic.TaskFlag.CLOSE_FILE and not res.task.file_path.endswith('.delta'): + if res.task.file_path.endswith('.tmp'): + res.task.file_path = res.task.file_path[:-4] + + checksum = self.hash_map.get(res.task.file_path.lower()) + if not checksum: + self.logger.warning(f"No checksum for closed file, unable to push to resume file {res.task.file_path}") + else: + if res.task.flags & generic.TaskFlag.SUPPORT: + support = "support" + else: + support = "" + + with open(self.resume_file, 'a') as f: + f.write(f"{checksum}:{support}:{res.task.file_path}\n") + + if res.success and res.task.flags & generic.TaskFlag.PATCH: + if res.task.file_path.endswith('.tmp'): + res.task.file_path = res.task.file_path[:-4] + + checksum = self.hash_map.get(res.task.file_path.lower()) + if not checksum: + self.logger.warning(f"No checksum for patched file, unable to push to resume file {res.task.file_path}") + else: + if res.task.flags & generic.TaskFlag.SUPPORT: + support = "support" + else: + support = "" + + with open(self.resume_file, 'a') as f: + f.write(f"{checksum}:{support}:{res.task.file_path}\n") + + if not res.success: + self.logger.fatal("Task writer failed") + self.fatal_error = True + return + + self.progress.update_bytes_written(res.written) + if res.task.flags & generic.TaskFlag.RELEASE_MEM and res.task.shared_memory: + self.logger.debug(f"Releasing memory {res.task.shared_memory}") + self.shm_segments.appendleft(res.task.shared_memory) + with shm_cond: + shm_cond.notify() + self.processed_items += 1 + + except Empty: + continue + + self.logger.debug("Writer results collector exiting...") + diff --git a/gogdl/dl/managers/v1.py b/gogdl/dl/managers/v1.py new file mode 100644 index 0000000..6955133 --- /dev/null +++ b/gogdl/dl/managers/v1.py @@ -0,0 +1,290 @@ +# Handle old games downloading via V1 depot system +# V1 is there since GOG 1.0 days, it has no compression and relies on downloading chunks from big main.bin file +import hashlib +from sys import exit +import os +import logging +import json +from typing import Union +from gogdl import constants +from gogdl.dl import dl_utils +from gogdl.dl.managers.dependencies import DependenciesManager +from gogdl.dl.managers.task_executor import ExecutingManager +from gogdl.dl.workers.task_executor import DownloadTask1, DownloadTask2, WriterTask +from gogdl.dl.objects import v1 + + +class Manager: + def __init__(self, generic_manager): + self.game_id = generic_manager.game_id + self.arguments = generic_manager.arguments + self.unknown_arguments = generic_manager.unknown_arguments + if "path" in self.arguments: + self.path = self.arguments.path + else: + self.path = "" + + if "support_path" in self.arguments: + self.support = self.arguments.support_path + else: + self.support = "" + + self.api_handler = generic_manager.api_handler + self.should_append_folder_name = generic_manager.should_append_folder_name + self.is_verifying = generic_manager.is_verifying + self.allowed_threads = generic_manager.allowed_threads + + self.platform = generic_manager.platform + + self.builds = generic_manager.builds + self.build = generic_manager.target_build + self.version_name = self.build["version_name"] + + self.lang = self.arguments.lang or "English" + self.dlcs_should_be_downloaded = self.arguments.dlcs + if self.arguments.dlcs_list: + self.dlcs_list = self.arguments.dlcs_list.split(",") + + else: + self.dlcs_list = list() + + self.dlc_only = self.arguments.dlc_only + + self.manifest = None + self.meta = None + + self.logger = logging.getLogger("V1") + self.logger.info("Initialized V1 Download Manager") + + # Get manifest of selected build + def get_meta(self): + meta_url = self.build["link"] + self.meta, headers = dl_utils.get_zlib_encoded(self.api_handler, meta_url) + if not self.meta: + raise Exception("There was an error obtaining meta") + if headers: + self.version_etag = headers.get("Etag") + + # Append folder name when downloading + if self.should_append_folder_name: + self.path = os.path.join(self.path, self.meta["product"]["installDirectory"]) + + def get_download_size(self): + self.get_meta() + dlcs = self.get_dlcs_user_owns(True) + self.manifest = v1.Manifest(self.platform, self.meta, self.lang, dlcs, self.api_handler, False) + + build = self.api_handler.get_dependencies_repo() + repository = dl_utils.get_zlib_encoded(self.api_handler, build['repository_manifest'])[0] or {} + + size_data = self.manifest.calculate_download_size() + + for depot in repository["depots"]: + if depot["dependencyId"] in self.manifest.dependencies_ids: + if not depot["executable"]["path"].startswith("__redist"): + size_data[self.game_id]['*']["download_size"] += depot["compressedSize"] + size_data[self.game_id]['*']["disk_size"] += depot["size"] + + available_branches = set([build["branch"] for build in self.builds["items"] if build["branch"]]) + available_branches_list = [None] + list(available_branches) + + for dlc in dlcs: + dlc.update({"size": size_data[dlc["id"]]}) + + response = { + "size": size_data[self.game_id], + "dlcs": dlcs, + "buildId": self.build["legacy_build_id"], + "languages": self.manifest.list_languages(), + "folder_name": self.meta["product"]["installDirectory"], + "dependencies": [dep.id for dep in self.manifest.dependencies], + "versionEtag": self.version_etag, + "versionName": self.version_name, + "available_branches": available_branches_list + } + return response + + + def get_dlcs_user_owns(self, info_command=False, requested_dlcs=None): + if requested_dlcs is None: + requested_dlcs = list() + if not self.dlcs_should_be_downloaded and not info_command: + return [] + self.logger.debug("Getting dlcs user owns") + dlcs = [] + if len(requested_dlcs) > 0: + for product in self.meta["product"]["gameIDs"]: + if ( + product["gameID"] != self.game_id # Check if not base game + and product["gameID"] in requested_dlcs # Check if requested by user + and self.api_handler.does_user_own(product["gameID"]) # Check if owned + ): + dlcs.append({"title": product["name"]["en"], "id": product["gameID"]}) + return dlcs + for product in self.meta["product"]["gameIDs"]: + # Check if not base game and if owned + if product["gameID"] != self.game_id and self.api_handler.does_user_own( + product["gameID"] + ): + dlcs.append({"title": product["name"]["en"], "id": product["gameID"]}) + return dlcs + + + def download(self): + manifest_path = os.path.join(constants.MANIFESTS_DIR, self.game_id) + old_manifest = None + + # Load old manifest + if os.path.exists(manifest_path): + with open(manifest_path, "r") as f_handle: + try: + json_data = json.load(f_handle) + old_manifest = dl_utils.create_manifest_class(json_data, self.api_handler) + except json.JSONDecodeError: + old_manifest = None + pass + + if self.is_verifying: + if old_manifest: + self.manifest = old_manifest + old_manifest = None + dlcs_user_owns = self.manifest.dlcs or [] + else: + raise Exception("No manifest stored locally, unable to verify") + else: + self.get_meta() + dlcs_user_owns = self.get_dlcs_user_owns(requested_dlcs=self.dlcs_list) + + if self.arguments.dlcs_list: + self.logger.info(f"Requested dlcs {self.arguments.dlcs_list}") + self.logger.info(f"Owned dlcs {dlcs_user_owns}") + self.logger.debug("Parsing manifest") + self.manifest = v1.Manifest(self.platform, self.meta, self.lang, dlcs_user_owns, self.api_handler, self.dlc_only) + + if self.manifest: + self.manifest.get_files() + + if old_manifest: + old_manifest.get_files() + + diff = v1.ManifestDiff.compare(self.manifest, old_manifest) + + self.logger.info(f"{diff}") + + + has_dependencies = len(self.manifest.dependencies) > 0 + + secure_link_endpoints_ids = [product["id"] for product in dlcs_user_owns] + if not self.dlc_only: + secure_link_endpoints_ids.append(self.game_id) + secure_links = dict() + for product_id in secure_link_endpoints_ids: + secure_links.update( + { + product_id: dl_utils.get_secure_link( + self.api_handler, f"/{self.platform}/{self.manifest.data['product']['timestamp']}/", product_id, generation=1 + ) + } + ) + + dependency_manager = DependenciesManager([dep.id for dep in self.manifest.dependencies], self.path, self.allowed_threads, self.api_handler, download_game_deps_only=True) + + # Find dependencies that are no longer used + if old_manifest: + removed_dependencies = [id for id in old_manifest.dependencies_ids if id not in self.manifest.dependencies_ids] + + for depot in dependency_manager.repository["depots"]: + if depot["dependencyId"] in removed_dependencies and not depot["executable"]["path"].startswith("__redist"): + diff.removed_redist += dependency_manager.get_files_for_depot_manifest(depot['manifest']) + + if has_dependencies: + secure_links.update({'redist': dl_utils.get_dependency_link(self.api_handler)}) + + diff.redist = dependency_manager.get(return_files=True) or [] + + + if not len(diff.changed) and not len(diff.deleted) and not len(diff.new) and not len(diff.redist) and not len(diff.removed_redist): + self.logger.info("Nothing to do") + return + + if self.is_verifying: + new_diff = v1.ManifestDiff() + invalid = 0 + for file in diff.new: + # V1 only files + if not file.size: + continue + + if 'support' in file.flags: + file_path = os.path.join(self.support, file.path) + else: + file_path = os.path.join(self.path, file.path) + file_path = dl_utils.get_case_insensitive_name(file_path) + + if not os.path.exists(file_path): + invalid += 1 + new_diff.new.append(file) + continue + + with open(file_path, 'rb') as fh: + file_sum = hashlib.md5() + + while chunk := fh.read(8 * 1024 * 1024): + file_sum.update(chunk) + + if file_sum.hexdigest() != file.hash: + invalid += 1 + new_diff.new.append(file) + continue + + for file in diff.redist: + if len(file.chunks) == 0: + continue + file_path = dl_utils.get_case_insensitive_name(os.path.join(self.path, file.path)) + if not os.path.exists(file_path): + invalid += 1 + new_diff.redist.append(file) + continue + valid = True + with open(file_path, 'rb') as fh: + for chunk in file.chunks: + chunk_sum = hashlib.md5() + chunk_data = fh.read(chunk['size']) + chunk_sum.update(chunk_data) + + if chunk_sum.hexdigest() != chunk['md5']: + valid = False + break + if not valid: + invalid += 1 + new_diff.redist.append(file) + continue + if not invalid: + self.logger.info("All files look good") + return + + self.logger.info(f"Found {invalid} broken files, repairing...") + diff = new_diff + + executor = ExecutingManager(self.api_handler, self.allowed_threads, self.path, self.support, diff, secure_links) + success = executor.setup() + if not success: + print('Unable to proceed, Not enough disk space') + exit(2) + dl_utils.prepare_location(self.path) + + for dir in self.manifest.dirs: + manifest_dir_path = os.path.join(self.path, dir.path) + dl_utils.prepare_location(dl_utils.get_case_insensitive_name(manifest_dir_path)) + + cancelled = executor.run() + + if cancelled: + return + + dl_utils.prepare_location(constants.MANIFESTS_DIR) + if self.manifest: + with open(manifest_path, 'w') as f_handle: + data = self.manifest.serialize_to_json() + f_handle.write(data) + diff --git a/gogdl/dl/managers/v2.py b/gogdl/dl/managers/v2.py new file mode 100644 index 0000000..f5a082b --- /dev/null +++ b/gogdl/dl/managers/v2.py @@ -0,0 +1,306 @@ +# Handle newer depots download +# This was introduced in GOG Galaxy 2.0, it features compression and files split by chunks +import json +from sys import exit +from gogdl.dl import dl_utils +import gogdl.dl.objects.v2 as v2 +import hashlib +from gogdl.dl.managers import dependencies +from gogdl.dl.managers.task_executor import ExecutingManager +from gogdl.dl.workers import task_executor +from gogdl import constants +import os +import logging + + +class Manager: + def __init__(self, generic_manager): + self.game_id = generic_manager.game_id + self.arguments = generic_manager.arguments + self.unknown_arguments = generic_manager.unknown_arguments + if "path" in self.arguments: + self.path = self.arguments.path + else: + self.path = "" + if "support_path" in self.arguments: + self.support = self.arguments.support_path + else: + self.support = "" + + self.allowed_threads = generic_manager.allowed_threads + + self.api_handler = generic_manager.api_handler + self.should_append_folder_name = generic_manager.should_append_folder_name + self.is_verifying = generic_manager.is_verifying + + self.builds = generic_manager.builds + self.build = generic_manager.target_build + self.version_name = self.build["version_name"] + + self.lang = self.arguments.lang or "en-US" + self.dlcs_should_be_downloaded = self.arguments.dlcs + if self.arguments.dlcs_list: + self.dlcs_list = self.arguments.dlcs_list.split(",") + else: + self.dlcs_list = list() + self.dlc_only = self.arguments.dlc_only + + self.manifest = None + self.stop_all_threads = False + + self.logger = logging.getLogger("V2") + self.logger.info("Initialized V2 Download Manager") + + def get_download_size(self): + self.get_meta() + dlcs = self.get_dlcs_user_owns(info_command=True) + self.manifest = v2.Manifest(self.meta, self.lang, dlcs, self.api_handler, False) + + build = self.api_handler.get_dependencies_repo() + repository = dl_utils.get_zlib_encoded(self.api_handler, build['repository_manifest'])[0] or {} + + size_data = self.manifest.calculate_download_size() + + for depot in repository["depots"]: + if depot["dependencyId"] in self.manifest.dependencies_ids: + if not depot["executable"]["path"].startswith("__redist"): + size_data[self.game_id]['*']["download_size"] += depot["compressedSize"] + size_data[self.game_id]['*']["disk_size"] += depot["size"] + + available_branches = set([build["branch"] for build in self.builds["items"] if build["branch"]]) + available_branches_list = [None] + list(available_branches) + + + for dlc in dlcs: + dlc.update({"size": size_data[dlc["id"]]}) + + response = { + "size": size_data[self.game_id], + "dlcs": dlcs, + "buildId": self.build["build_id"], + "languages": self.manifest.list_languages(), + "folder_name": self.meta["installDirectory"], + "dependencies": self.manifest.dependencies_ids, + "versionEtag": self.version_etag, + "versionName": self.version_name, + "available_branches": available_branches_list + } + return response + + def download(self): + manifest_path = os.path.join(constants.MANIFESTS_DIR, self.game_id) + old_manifest = None + + # Load old manifest + if os.path.exists(manifest_path): + self.logger.debug(f"Loading existing manifest for game {self.game_id}") + with open(manifest_path, 'r') as f_handle: + try: + json_data = json.load(f_handle) + self.logger.info("Creating Manifest instance from existing manifest") + old_manifest = dl_utils.create_manifest_class(json_data, self.api_handler) + except json.JSONDecodeError: + old_manifest = None + pass + + if self.is_verifying: + if old_manifest: + self.logger.warning("Verifying - ignoring obtained manifest in favor of existing one") + self.manifest = old_manifest + dlcs_user_owns = self.manifest.dlcs or [] + old_manifest = None + else: + raise Exception("No manifest stored locally, unable to verify") + else: + self.get_meta() + dlcs_user_owns = self.get_dlcs_user_owns( + requested_dlcs=self.dlcs_list + ) + + if self.arguments.dlcs_list: + self.logger.info(f"Requested dlcs {self.arguments.dlcs_list}") + self.logger.info(f"Owned dlcs {dlcs_user_owns}") + + self.logger.debug("Parsing manifest") + self.manifest = v2.Manifest( + self.meta, self.lang, dlcs_user_owns, self.api_handler, self.dlc_only + ) + patch = None + if self.manifest: + self.logger.debug("Requesting files of primary manifest") + self.manifest.get_files() + if old_manifest: + self.logger.debug("Requesting files of previous manifest") + old_manifest.get_files() + patch = v2.Patch.get(self.manifest, old_manifest, self.lang, dlcs_user_owns, self.api_handler) + if not patch: + self.logger.info("No patch found, falling back to chunk based updates") + + diff = v2.ManifestDiff.compare(self.manifest, old_manifest, patch) + self.logger.info(diff) + + + dependencies_manager = dependencies.DependenciesManager(self.manifest.dependencies_ids, self.path, + self.arguments.workers_count, self.api_handler, download_game_deps_only=True) + + # Find dependencies that are no longer used + if old_manifest: + removed_dependencies = [id for id in old_manifest.dependencies_ids if id not in self.manifest.dependencies_ids] + + for depot in dependencies_manager.repository["depots"]: + if depot["dependencyId"] in removed_dependencies and not depot["executable"]["path"].startswith("__redist"): + diff.removed_redist += dependencies_manager.get_files_for_depot_manifest(depot['manifest']) + + + diff.redist = dependencies_manager.get(True) or [] + + if not len(diff.changed) and not len(diff.deleted) and not len(diff.new) and not len(diff.redist) and not len(diff.removed_redist): + self.logger.info("Nothing to do") + return + secure_link_endpoints_ids = [product["id"] for product in dlcs_user_owns] + if not self.dlc_only: + secure_link_endpoints_ids.append(self.game_id) + secure_links = dict() + for product_id in secure_link_endpoints_ids: + secure_links.update( + { + product_id: dl_utils.get_secure_link( + self.api_handler, "/", product_id + ) + } + ) + if patch: + secure_links.update( + { + f"{product_id}_patch": dl_utils.get_secure_link( + self.api_handler, "/", product_id, root="/patches/store" + ) + } + ) + + if len(diff.redist) > 0: + secure_links.update( + { + 'redist': dl_utils.get_dependency_link(self.api_handler) + } + ) + + if self.is_verifying: + new_diff = v2.ManifestDiff() + invalid = 0 + + for file in diff.new: + if len(file.chunks) == 0: + continue + if 'support' in file.flags: + file_path = os.path.join(self.support, file.path) + else: + file_path = os.path.join(self.path, file.path) + file_path = dl_utils.get_case_insensitive_name(file_path) + if not os.path.exists(file_path): + invalid += 1 + new_diff.new.append(file) + continue + valid = True + with open(file_path, 'rb') as fh: + for chunk in file.chunks: + chunk_sum = hashlib.md5() + chunk_data = fh.read(chunk['size']) + chunk_sum.update(chunk_data) + + if chunk_sum.hexdigest() != chunk['md5']: + valid = False + break + if not valid: + invalid += 1 + new_diff.new.append(file) + continue + + for file in diff.redist: + if len(file.chunks) == 0: + continue + file_path = dl_utils.get_case_insensitive_name(os.path.join(self.path, file.path)) + if not os.path.exists(file_path): + invalid += 1 + new_diff.redist.append(file) + continue + valid = True + with open(file_path, 'rb') as fh: + for chunk in file.chunks: + chunk_sum = hashlib.md5() + chunk_data = fh.read(chunk['size']) + chunk_sum.update(chunk_data) + + if chunk_sum.hexdigest() != chunk['md5']: + valid = False + break + if not valid: + invalid += 1 + new_diff.redist.append(file) + continue + for file in diff.links: + file_path = os.path.join(self.path, file.path) + file_path = dl_utils.get_case_insensitive_name(file_path) + if not os.path.exists(file_path): + new_diff.links.append(file) + + if not invalid: + self.logger.info("All files look good") + return + + self.logger.info(f"Found {invalid} broken files, repairing...") + diff = new_diff + + executor = ExecutingManager(self.api_handler, self.allowed_threads, self.path, self.support, diff, secure_links) + success = executor.setup() + if not success: + print('Unable to proceed, Not enough disk space') + exit(2) + dl_utils.prepare_location(self.path) + + for dir in self.manifest.dirs: + manifest_dir_path = os.path.join(self.path, dir.path) + dl_utils.prepare_location(dl_utils.get_case_insensitive_name(manifest_dir_path)) + cancelled = executor.run() + + if cancelled: + return + + dl_utils.prepare_location(constants.MANIFESTS_DIR) + if self.manifest: + with open(manifest_path, 'w') as f_handle: + data = self.manifest.serialize_to_json() + f_handle.write(data) + + def get_meta(self): + meta_url = self.build["link"] + self.meta, headers = dl_utils.get_zlib_encoded(self.api_handler, meta_url) + self.version_etag = headers.get("Etag") + + # Append folder name when downloading + if self.should_append_folder_name: + self.path = os.path.join(self.path, self.meta["installDirectory"]) + + def get_dlcs_user_owns(self, info_command=False, requested_dlcs=None): + if requested_dlcs is None: + requested_dlcs = list() + if not self.dlcs_should_be_downloaded and not info_command: + return [] + self.logger.debug("Getting dlcs user owns") + dlcs = [] + if len(requested_dlcs) > 0: + for product in self.meta["products"]: + if ( + product["productId"] != self.game_id + and product["productId"] in requested_dlcs + and self.api_handler.does_user_own(product["productId"]) + ): + dlcs.append({"title": product["name"], "id": product["productId"]}) + return dlcs + for product in self.meta["products"]: + if product["productId"] != self.game_id and self.api_handler.does_user_own( + product["productId"] + ): + dlcs.append({"title": product["name"], "id": product["productId"]}) + return dlcs + diff --git a/gogdl/dl/objects.py b/gogdl/dl/objects.py deleted file mode 100755 index 3785aec..0000000 --- a/gogdl/dl/objects.py +++ /dev/null @@ -1,59 +0,0 @@ -class DepotFile: - def __init__(self, item_data, product_id): - self.path = item_data['path'].replace('\\', '/') - self.chunks = item_data['chunks'] - self.flags = item_data.get('flags') - self.md5 = item_data.get('md5') - self.sha256 = item_data.get('sha256') - self.product_id = product_id - - -# That exists in some depots, indicates directory to be created, it has only path in it -# Yes that's the thing -class DepotDirectory: - def __init__(self, item_data): - self.path = item_data['path'] - - -class Depot: - def __init__(self, target_lang, depot_data): - self.target_lang = target_lang - self.languages = depot_data['languages'] - self.bitness = depot_data.get('osBitness') - self.product_id = depot_data['productId'] - self.compressed_size = depot_data.get('compressedSize') - self.size = depot_data['size'] - self.manifest = depot_data['manifest'] - - def check_language(self): - status = False - for lang in self.languages: - status = lang == '*' or self.target_lang.lower() == lang.lower() or self.target_lang.split('-')[ - 0].lower() == lang.lower() - if status: - break - return status - - -class DepotV1: - def __init__(self, target_lang, depot_data): - self.target_lang = target_lang - self.languages = depot_data['languages'] - self.game_ids = depot_data['gameIDs'] - self.size = depot_data['size'] - self.manifest = depot_data['manifest'] - - def check_language(self): - status = True - for lang in self.languages: - status = lang == "Neutral" or lang == self.target_lang - if status: - break - return status - - -class DependencyV1: - def __init__(self, data): - self.id = data['redist'] - self.size = data.get('size') - self.target_dir = data['targetDir'] diff --git a/gogdl/dl/objects/generic.py b/gogdl/dl/objects/generic.py new file mode 100644 index 0000000..0a1b799 --- /dev/null +++ b/gogdl/dl/objects/generic.py @@ -0,0 +1,93 @@ +from dataclasses import dataclass +from enum import Flag, auto +from typing import Optional + + +class BaseDiff: + def __init__(self): + self.deleted = [] + self.new = [] + self.changed = [] + self.redist = [] + self.removed_redist = [] + + self.links = [] # Unix only + + def __str__(self): + return f"Deleted: {len(self.deleted)} New: {len(self.new)} Changed: {len(self.changed)}" + +class TaskFlag(Flag): + NONE = 0 + SUPPORT = auto() + OPEN_FILE = auto() + CLOSE_FILE = auto() + CREATE_FILE = auto() + CREATE_SYMLINK = auto() + RENAME_FILE = auto() + COPY_FILE = auto() + DELETE_FILE = auto() + OFFLOAD_TO_CACHE = auto() + MAKE_EXE = auto() + PATCH = auto() + RELEASE_MEM = auto() + ZIP_DEC = auto() + +@dataclass +class MemorySegment: + offset: int + end: int + + @property + def size(self): + return self.end - self.offset + +@dataclass +class ChunkTask: + product: str + index: int + + compressed_md5: str + md5: str + size: int + download_size: int + + cleanup: bool = False + offload_to_cache: bool = False + old_offset: Optional[int] = None + old_flags: TaskFlag = TaskFlag.NONE + old_file: Optional[str] = None + +@dataclass +class V1Task: + product: str + index: int + offset: int + size: int + md5: str + cleanup: Optional[bool] = True + + old_offset: Optional[int] = None + offload_to_cache: Optional[bool] = False + old_flags: TaskFlag = TaskFlag.NONE + old_file: Optional[str] = None + + # This isn't actual sum, but unique id of chunk we use to decide + # if we should push it to writer + @property + def compressed_md5(self): + return self.md5 + "_" + str(self.index) + +@dataclass +class FileTask: + path: str + flags: TaskFlag + + old_flags: TaskFlag = TaskFlag.NONE + old_file: Optional[str] = None + + patch_file: Optional[str] = None + + +@dataclass +class TerminateWorker: + pass diff --git a/gogdl/dl/objects/linux.py b/gogdl/dl/objects/linux.py new file mode 100644 index 0000000..3dfeeea --- /dev/null +++ b/gogdl/dl/objects/linux.py @@ -0,0 +1,365 @@ +from io import BytesIO + + +END_OF_CENTRAL_DIRECTORY = b"\x50\x4b\x05\x06" +CENTRAL_DIRECTORY = b"\x50\x4b\x01\x02" +LOCAL_FILE_HEADER = b"\x50\x4b\x03\x04" + +# ZIP64 +ZIP_64_END_OF_CD_LOCATOR = b"\x50\x4b\x06\x07" +ZIP_64_END_OF_CD = b"\x50\x4b\x06\x06" + +class LocalFile: + def __init__(self) -> None: + self.relative_local_file_offset: int + self.version_needed: bytes + self.general_purpose_bit_flag: bytes + self.compression_method: int + self.last_modification_time: bytes + self.last_modification_date: bytes + self.crc32: bytes + self.compressed_size: int + self.uncompressed_size: int + self.file_name_length: int + self.extra_field_length: int + self.file_name: str + self.extra_field: bytes + self.last_byte: int + + def load_data(self, handler): + return handler.get_bytes_from_file( + from_b=self.last_byte + self.relative_local_file_offset, + size=self.compressed_size, + raw_response=True + ) + + @classmethod + def from_bytes(cls, data, offset, handler): + local_file = cls() + local_file.relative_local_file_offset = 0 + local_file.version_needed = data[4:6] + local_file.general_purpose_bit_flag = data[6:8] + local_file.compression_method = int.from_bytes(data[8:10], "little") + local_file.last_modification_time = data[10:12] + local_file.last_modification_date = data[12:14] + local_file.crc32 = data[14:18] + local_file.compressed_size = int.from_bytes(data[18:22], "little") + local_file.uncompressed_size = int.from_bytes(data[22:26], "little") + local_file.file_name_length = int.from_bytes(data[26:28], "little") + local_file.extra_field_length = int.from_bytes(data[28:30], "little") + + extra_data = handler.get_bytes_from_file( + from_b=30 + offset, + size=local_file.file_name_length + local_file.extra_field_length, + ) + + local_file.file_name = bytes( + extra_data[0: local_file.file_name_length] + ).decode() + + local_file.extra_field = data[ + local_file.file_name_length: local_file.file_name_length + + local_file.extra_field_length + ] + local_file.last_byte = ( + local_file.file_name_length + local_file.extra_field_length + 30 + ) + return local_file + + def __str__(self): + return f"\nCompressionMethod: {self.compression_method} \nFileNameLen: {self.file_name_length} \nFileName: {self.file_name} \nCompressedSize: {self.compressed_size} \nUncompressedSize: {self.uncompressed_size}" + + +class CentralDirectoryFile: + def __init__(self, product): + self.product = product + self.version_made_by: bytes + self.version_needed_to_extract: bytes + self.general_purpose_bit_flag: bytes + self.compression_method: int + self.last_modification_time: bytes + self.last_modification_date: bytes + self.crc32: int + self.compressed_size: int + self.uncompressed_size: int + self.file_name_length: int + self.extra_field_length: int + self.file_comment_length: int + self.disk_number_start: bytes + self.int_file_attrs: bytes + self.ext_file_attrs: bytes + self.relative_local_file_offset: int + self.file_name: str + self.extra_field: BytesIO + self.comment: bytes + self.last_byte: int + + @classmethod + def from_bytes(cls, data, product): + cd_file = cls(product) + + cd_file.version_made_by = data[4:6] + cd_file.version_needed_to_extract = data[6:8] + cd_file.general_purpose_bit_flag = data[8:10] + cd_file.compression_method = int.from_bytes(data[10:12], "little") + cd_file.last_modification_time = data[12:14] + cd_file.last_modification_date = data[14:16] + cd_file.crc32 = int.from_bytes(data[16:20], "little") + cd_file.compressed_size = int.from_bytes(data[20:24], "little") + cd_file.uncompressed_size = int.from_bytes(data[24:28], "little") + cd_file.file_name_length = int.from_bytes(data[28:30], "little") + cd_file.extra_field_length = int.from_bytes(data[30:32], "little") + cd_file.file_comment_length = int.from_bytes(data[32:34], "little") + cd_file.disk_number_start = data[34:36] + cd_file.int_file_attrs = data[36:38] + cd_file.ext_file_attrs = data[38:42] + cd_file.relative_local_file_offset = int.from_bytes(data[42:46], "little") + + extra_field_start = 46 + cd_file.file_name_length + cd_file.file_name = bytes(data[46:extra_field_start]).decode() + + cd_file.extra_field = BytesIO(data[ + extra_field_start: extra_field_start + cd_file.extra_field_length + ]) + + field = None + while True: + id = int.from_bytes(cd_file.extra_field.read(2), "little") + size = int.from_bytes(cd_file.extra_field.read(2), "little") + + if id == 0x01: + field = BytesIO(cd_file.extra_field.read(size)) + break + + cd_file.extra_field.seek(size, 1) + + if cd_file.extra_field_length - cd_file.extra_field.tell() > 0: + break + + if field: + if cd_file.uncompressed_size == 0xFFFFFFFF: + cd_file.uncompressed_size = int.from_bytes(field.read(8), "little") + + if cd_file.compressed_size == 0xFFFFFFFF: + cd_file.compressed_size = int.from_bytes(field.read(8), "little") + + if cd_file.relative_local_file_offset == 0xFFFFFFFF: + cd_file.relative_local_file_offset = int.from_bytes(field.read(8), "little") + + comment_start = extra_field_start + cd_file.extra_field_length + cd_file.comment = data[ + comment_start: comment_start + cd_file.file_comment_length + ] + + cd_file.last_byte = comment_start + cd_file.file_comment_length + + return cd_file, comment_start + cd_file.file_comment_length + + def is_symlink(self): + return (int.from_bytes(self.ext_file_attrs, "little") & 1 << 29) != 0 + + def as_dict(self): + return {'file_name': self.file_name, 'crc32': self.crc32, 'compressed_size': self.compressed_size, 'size': self.uncompressed_size, 'is_symlink': self.is_symlink()} + + def __str__(self): + return f"\nCompressionMethod: {self.compression_method} \nFileNameLen: {self.file_name_length} \nFileName: {self.file_name} \nStartDisk: {self.disk_number_start} \nCompressedSize: {self.compressed_size} \nUncompressedSize: {self.uncompressed_size}" + + def __repr__(self): + return self.file_name + + +class CentralDirectory: + def __init__(self, product): + self.files = [] + self.product = product + + @staticmethod + def create_central_dir_file(data, product): + return CentralDirectoryFile.from_bytes(data, product) + + @classmethod + def from_bytes(cls, data, n, product): + central_dir = cls(product) + for record in range(n): + cd_file, next_offset = central_dir.create_central_dir_file(data, product) + central_dir.files.append(cd_file) + data = data[next_offset:] + return central_dir + +class Zip64EndOfCentralDirLocator: + def __init__(self): + self.number_of_disk: int + self.zip64_end_of_cd_offset: int + self.total_number_of_disks: int + + @classmethod + def from_bytes(cls, data): + zip64_end_of_cd = cls() + zip64_end_of_cd.number_of_disk = int.from_bytes(data[4:8], "little") + zip64_end_of_cd.zip64_end_of_cd_offset = int.from_bytes(data[8:16], "little") + zip64_end_of_cd.total_number_of_disks = int.from_bytes(data[16:20], "little") + return zip64_end_of_cd + + def __str__(self): + return f"\nZIP64EOCDLocator\nDisk Number: {self.number_of_disk}\nZ64_EOCD Offset: {self.zip64_end_of_cd_offset}\nNumber of disks: {self.total_number_of_disks}" + +class Zip64EndOfCentralDir: + def __init__(self): + self.size: int + self.version_made_by: bytes + self.version_needed: bytes + self.number_of_disk: bytes + self.central_directory_start_disk: bytes + self.number_of_entries_on_this_disk: int + self.number_of_entries_total: int + self.size_of_central_directory: int + self.central_directory_offset: int + self.extensible_data = None + + @classmethod + def from_bytes(cls, data): + end_of_cd = cls() + + end_of_cd.size = int.from_bytes(data[4:12], "little") + end_of_cd.version_made_by = data[12:14] + end_of_cd.version_needed = data[14:16] + end_of_cd.number_of_disk = data[16:20] + end_of_cd.central_directory_start_disk = data[20:24] + end_of_cd.number_of_entries_on_this_disk = int.from_bytes(data[24:32], "little") + end_of_cd.number_of_entries_total = int.from_bytes(data[32:40], "little") + end_of_cd.size_of_central_directory = int.from_bytes(data[40:48], "little") + end_of_cd.central_directory_offset = int.from_bytes(data[48:56], "little") + + return end_of_cd + + def __str__(self) -> str: + return f"\nZ64 EndOfCD\nSize: {self.size}\nNumber of disk: {self.number_of_disk}\nEntries on this disk: {self.number_of_entries_on_this_disk}\nEntries total: {self.number_of_entries_total}\nCD offset: {self.central_directory_offset}" + + +class EndOfCentralDir: + def __init__(self): + self.number_of_disk: bytes + self.central_directory_disk: bytes + self.central_directory_records: int + self.size_of_central_directory: int + self.central_directory_offset: int + self.comment_length: bytes + self.comment: bytes + + @classmethod + def from_bytes(cls, data): + central_dir = cls() + central_dir.number_of_disk = data[4:6] + central_dir.central_directory_disk = data[6:8] + central_dir.central_directory_records = int.from_bytes(data[8:10], "little") + central_dir.size_of_central_directory = int.from_bytes(data[12:16], "little") + central_dir.central_directory_offset = int.from_bytes(data[16:20], "little") + central_dir.comment_length = data[20:22] + central_dir.comment = data[ + 22: 22 + int.from_bytes(central_dir.comment_length, "little") + ] + + return central_dir + + def __str__(self): + return f"\nDiskNumber: {self.number_of_disk} \nCentralDirRecords: {self.central_directory_records} \nCentralDirSize: {self.size_of_central_directory} \nCentralDirOffset: {self.central_directory_offset}" + + +class InstallerHandler: + def __init__(self, url, product_id, session): + self.url = url + self.product = product_id + self.session = session + self.file_size = 0 + beginning_of_file = self.get_bytes_from_file( + from_b=1024*512, size=1024*512, add_archive_index=False + ) + + self.start_of_archive_index = beginning_of_file.find(LOCAL_FILE_HEADER) + 1024*512 + + # ZIP contents + self.central_directory_offset: int + self.central_directory_records: int + self.size_of_central_directory: int + self.central_directory: CentralDirectory + + def get_bytes_from_file(self, from_b=-1, size=None, add_archive_index=True, raw_response=False): + if add_archive_index: + from_b += self.start_of_archive_index + + from_b_repr = str(from_b) if from_b > -1 else "" + if size: + end_b = from_b + size - 1 + else: + end_b = "" + range_header = self.get_range_header(from_b_repr, end_b) + + response = self.session.get(self.url, headers={'Range': range_header}, + allow_redirects=False, stream=raw_response) + if response.status_code == 302: + # Skip content-system API + self.url = response.headers.get('Location') or self.url + return self.get_bytes_from_file(from_b, size, add_archive_index, raw_response) + if not self.file_size: + self.file_size = int(response.headers.get("Content-Range").split("/")[-1]) + if raw_response: + return response + else: + data = response.content + return data + + @staticmethod + def get_range_header(from_b="", to_b=""): + return f"bytes={from_b}-{to_b}" + + def setup(self): + self.__find_end_of_cd() + self.__find_central_directory() + + def __find_end_of_cd(self): + end_of_cd_data = self.get_bytes_from_file( + from_b=self.file_size - 100, add_archive_index=False + ) + + end_of_cd_header_data_index = end_of_cd_data.find(END_OF_CENTRAL_DIRECTORY) + zip64_end_of_cd_locator_index = end_of_cd_data.find(ZIP_64_END_OF_CD_LOCATOR) + end_of_cd = EndOfCentralDir.from_bytes(end_of_cd_data[end_of_cd_header_data_index:]) + if end_of_cd.central_directory_offset == 0xFFFFFFFF: + # We need to find zip64 headers + + zip64_end_of_cd_locator = Zip64EndOfCentralDirLocator.from_bytes(end_of_cd_data[zip64_end_of_cd_locator_index:]) + zip64_end_of_cd_data = self.get_bytes_from_file(from_b=zip64_end_of_cd_locator.zip64_end_of_cd_offset, size=200) + zip64_end_of_cd = Zip64EndOfCentralDir.from_bytes(zip64_end_of_cd_data) + + self.central_directory_offset = zip64_end_of_cd.central_directory_offset + self.size_of_central_directory = zip64_end_of_cd.size_of_central_directory + self.central_directory_records = zip64_end_of_cd.number_of_entries_total + else: + self.central_directory_offset = end_of_cd.central_directory_offset + self.size_of_central_directory = end_of_cd.size_of_central_directory + self.central_directory_records = end_of_cd.central_directory_records + + def __find_central_directory(self): + central_directory_data = self.get_bytes_from_file( + from_b=self.central_directory_offset, + size=self.size_of_central_directory, + ) + + self.central_directory = CentralDirectory.from_bytes( + central_directory_data, self.central_directory_records, self.product + ) + + +class LinuxFile: + def __init__(self, product, path, compression, start, compressed_size, size, checksum, executable): + self.product = product + self.path = path + self.compression = compression == 8 + self.offset = start + self.compressed_size = compressed_size + self.size = size + self.hash = str(checksum) + self.flags = [] + if executable: + self.flags.append("executable") + diff --git a/gogdl/dl/objects/v1.py b/gogdl/dl/objects/v1.py new file mode 100644 index 0000000..9436dec --- /dev/null +++ b/gogdl/dl/objects/v1.py @@ -0,0 +1,167 @@ +import json +import os +from gogdl.dl import dl_utils +from gogdl.dl.objects import generic, v2 +from gogdl import constants + + +class Depot: + def __init__(self, target_lang, depot_data): + self.target_lang = target_lang + self.languages = depot_data["languages"] + self.game_ids = depot_data["gameIDs"] + self.size = int(depot_data["size"]) + self.manifest = depot_data["manifest"] + + def check_language(self): + status = True + for lang in self.languages: + status = lang == "Neutral" or lang == self.target_lang + if status: + break + return status + +class Directory: + def __init__(self, item_data): + self.path = item_data["path"].replace(constants.NON_NATIVE_SEP, os.sep).lstrip(os.sep) + +class Dependency: + def __init__(self, data): + self.id = data["redist"] + self.size = data.get("size") + self.target_dir = data.get("targetDir") + + +class File: + def __init__(self, data, product_id): + self.offset = data.get("offset") + self.hash = data.get("hash") + self.url = data.get("url") + self.path = data["path"].lstrip("/") + self.size = data["size"] + self.flags = [] + if data.get("support"): + self.flags.append("support") + if data.get("executable"): + self.flags.append("executble") + + self.product_id = product_id + +class Manifest: + def __init__(self, platform, meta, language, dlcs, api_handler, dlc_only): + self.platform = platform + self.data = meta + self.data['HGLPlatform'] = platform + self.data["HGLInstallLanguage"] = language + self.data["HGLdlcs"] = dlcs + self.product_id = meta["product"]["rootGameID"] + self.dlcs = dlcs + self.dlc_only = dlc_only + self.all_depots = [] + self.depots = self.parse_depots(language, meta["product"]["depots"]) + self.dependencies = [Dependency(depot) for depot in meta["product"]["depots"] if depot.get('redist')] + self.dependencies_ids = [depot['redist'] for depot in meta["product"]["depots"] if depot.get('redist')] + + self.api_handler = api_handler + + self.files = [] + self.dirs = [] + + @classmethod + def from_json(cls, meta, api_handler): + manifest = cls(meta['HGLPlatform'], meta, meta['HGLInstallLanguage'], meta["HGLdlcs"], api_handler, False) + return manifest + + def serialize_to_json(self): + return json.dumps(self.data) + + def parse_depots(self, language, depots): + parsed = [] + dlc_ids = [dlc["id"] for dlc in self.dlcs] + for depot in depots: + if depot.get("redist"): + continue + + for g_id in depot["gameIDs"]: + if g_id in dlc_ids or (not self.dlc_only and self.product_id == g_id): + new_depot = Depot(language, depot) + parsed.append(new_depot) + self.all_depots.append(new_depot) + break + return list(filter(lambda x: x.check_language(), parsed)) + + def list_languages(self): + languages_dict = set() + for depot in self.all_depots: + for language in depot.languages: + if language != "Neutral": + languages_dict.add(language) + + return list(languages_dict) + + def calculate_download_size(self): + data = dict() + + for depot in self.all_depots: + for product_id in depot.game_ids: + if not product_id in data: + data[product_id] = dict() + product_data = data[product_id] + for lang in depot.languages: + if lang == "Neutral": + lang = "*" + if not lang in product_data: + product_data[lang] = {"download_size": 0, "disk_size": 0} + + product_data[lang]["download_size"] += depot.size + product_data[lang]["disk_size"] += depot.size + + return data + + + def get_files(self): + for depot in self.depots: + manifest = dl_utils.get_json(self.api_handler, f"{constants.GOG_CDN}/content-system/v1/manifests/{depot.game_ids[0]}/{self.platform}/{self.data['product']['timestamp']}/{depot.manifest}") + for record in manifest["depot"]["files"]: + if "directory" in record: + self.dirs.append(Directory(record)) + else: + self.files.append(File(record, depot.game_ids[0])) + +class ManifestDiff(generic.BaseDiff): + def __init__(self): + super().__init__() + + @classmethod + def compare(cls, new_manifest, old_manifest=None): + comparison = cls() + + if not old_manifest: + comparison.new = new_manifest.files + return comparison + + new_files = dict() + for file in new_manifest.files: + new_files.update({file.path.lower(): file}) + + old_files = dict() + for file in old_manifest.files: + old_files.update({file.path.lower(): file}) + + for old_file in old_files.values(): + if not new_files.get(old_file.path.lower()): + comparison.deleted.append(old_file) + + if type(old_manifest) == v2.Manifest: + comparison.new = new_manifest.files + return comparison + + for new_file in new_files.values(): + old_file = old_files.get(new_file.path.lower()) + if not old_file: + comparison.new.append(new_file) + else: + if new_file.hash != old_file.hash: + comparison.changed.append(new_file) + + return comparison diff --git a/gogdl/dl/objects/v2.py b/gogdl/dl/objects/v2.py new file mode 100644 index 0000000..8c04619 --- /dev/null +++ b/gogdl/dl/objects/v2.py @@ -0,0 +1,292 @@ +import json +import os + +from gogdl.dl import dl_utils +from gogdl.dl.objects import generic, v1 +from gogdl import constants + + +class DepotFile: + def __init__(self, item_data, product_id): + self.flags = item_data.get("flags") or list() + self.path = item_data["path"].replace(constants.NON_NATIVE_SEP, os.sep).lstrip(os.sep) + self.chunks = item_data["chunks"] + self.md5 = item_data.get("md5") + self.sha256 = item_data.get("sha256") + self.product_id = product_id + + +# That exists in some depots, indicates directory to be created, it has only path in it +# Yes that's the thing +class DepotDirectory: + def __init__(self, item_data): + self.path = item_data["path"].replace(constants.NON_NATIVE_SEP, os.sep).rstrip(os.sep) + +class DepotLink: + def __init__(self, item_data): + self.path = item_data["path"] + self.target = item_data["target"] + + +class Depot: + def __init__(self, target_lang, depot_data): + self.target_lang = target_lang + self.languages = depot_data["languages"] + self.bitness = depot_data.get("osBitness") + self.product_id = depot_data["productId"] + self.compressed_size = depot_data.get("compressedSize") + self.size = depot_data["size"] + self.manifest = depot_data["manifest"] + + def check_language(self): + status = False + for lang in self.languages: + status = ( + lang == "*" + or self.target_lang == lang + or self.target_lang.split("-")[0] == lang + ) + if status: + break + return status + +class Manifest: + def __init__(self, meta, language, dlcs, api_handler, dlc_only): + self.data = meta + self.data["HGLInstallLanguage"] = language + self.data["HGLdlcs"] = dlcs + self.product_id = meta["baseProductId"] + self.dlcs = dlcs + self.dlc_only = dlc_only + self.all_depots = [] + self.depots = self.parse_depots(language, meta["depots"]) + self.dependencies_ids = meta.get("dependencies") + if not self.dependencies_ids: + self.dependencies_ids = list() + self.install_directory = meta["installDirectory"] + + self.api_handler = api_handler + + self.files = [] + self.dirs = [] + + @classmethod + def from_json(cls, meta, api_handler): + manifest = cls(meta, meta["HGLInstallLanguage"], meta["HGLdlcs"], api_handler, False) + return manifest + + def serialize_to_json(self): + return json.dumps(self.data) + + def parse_depots(self, language, depots): + parsed = [] + dlc_ids = [dlc["id"] for dlc in self.dlcs] + for depot in depots: + if depot["productId"] in dlc_ids or ( + not self.dlc_only and self.product_id == depot["productId"] + ): + new_depot = Depot(language, depot) + parsed.append(new_depot) + self.all_depots.append(new_depot) + + + return list(filter(lambda x: x.check_language(), parsed)) + + def list_languages(self): + languages_dict = set() + for depot in self.all_depots: + for language in depot.languages: + if language != "*": + languages_dict.add(language) + + return list(languages_dict) + + def calculate_download_size(self): + data = dict() + + for depot in self.all_depots: + if not depot.product_id in data: + data[depot.product_id] = dict() + product_data = data[depot.product_id] + for lang in depot.languages: + if not lang in product_data: + product_data[lang] = {"download_size":0, "disk_size":0} + + product_data[lang]["download_size"] += depot.compressed_size + product_data[lang]["disk_size"] += depot.size + + return data + + def get_files(self): + for depot in self.depots: + manifest = dl_utils.get_zlib_encoded( + self.api_handler, + f"{constants.GOG_CDN}/content-system/v2/meta/{dl_utils.galaxy_path(depot.manifest)}", + )[0] + for item in manifest["depot"]["items"]: + if item["type"] == "DepotFile": + self.files.append(DepotFile(item, depot.product_id)) + elif item["type"] == "DepotLink": + self.files.append(DepotLink(item)) + else: + self.dirs.append(DepotDirectory(item)) + +class FileDiff: + def __init__(self): + self.file: DepotFile + self.old_file_flags: list[str] + self.disk_size_diff: int = 0 + + @classmethod + def compare(cls, new: DepotFile, old: DepotFile): + diff = cls() + diff.disk_size_diff = sum([ch['size'] for ch in new.chunks]) + diff.disk_size_diff -= sum([ch['size'] for ch in old.chunks]) + diff.old_file_flags = old.flags + for new_chunk in new.chunks: + old_offset = 0 + for old_chunk in old.chunks: + if old_chunk["md5"] == new_chunk["md5"]: + new_chunk["old_offset"] = old_offset + old_offset += old_chunk["size"] + diff.file = new + return diff + +# Using xdelta patching +class FilePatchDiff: + def __init__(self, data): + self.md5_source = data['md5_source'] + self.md5_target = data['md5_target'] + self.source = data['path_source'].replace('\\', '/') + self.target = data['path_target'].replace('\\', '/') + self.md5 = data['md5'] + self.chunks = data['chunks'] + + self.old_file: DepotFile + self.new_file: DepotFile + +class ManifestDiff(generic.BaseDiff): + def __init__(self): + super().__init__() + + @classmethod + def compare(cls, manifest, old_manifest=None, patch=None): + comparison = cls() + is_manifest_upgrade = isinstance(old_manifest, v1.Manifest) + + if not old_manifest: + comparison.new = manifest.files + return comparison + + new_files = dict() + for file in manifest.files: + new_files.update({file.path.lower(): file}) + + old_files = dict() + for file in old_manifest.files: + old_files.update({file.path.lower(): file}) + + for old_file in old_files.values(): + if not new_files.get(old_file.path.lower()): + comparison.deleted.append(old_file) + + for new_file in new_files.values(): + old_file = old_files.get(new_file.path.lower()) + if isinstance(new_file, DepotLink): + comparison.links.append(new_file) + continue + if not old_file: + comparison.new.append(new_file) + else: + if is_manifest_upgrade: + if len(new_file.chunks) == 0: + continue + new_final_sum = new_file.md5 or new_file.chunks[0]["md5"] + if new_final_sum: + if old_file.hash != new_final_sum: + comparison.changed.append(new_file) + continue + + patch_file = None + if patch and len(old_file.chunks): + for p_file in patch.files: + old_final_sum = old_file.md5 or old_file.chunks[0]["md5"] + if p_file.md5_source == old_final_sum: + patch_file = p_file + patch_file.old_file = old_file + patch_file.new_file = new_file + + if patch_file: + comparison.changed.append(patch_file) + continue + + if len(new_file.chunks) == 1 and len(old_file.chunks) == 1: + if new_file.chunks[0]["md5"] != old_file.chunks[0]["md5"]: + comparison.changed.append(new_file) + else: + if (new_file.md5 and old_file.md5 and new_file.md5 != old_file.md5) or (new_file.sha256 and old_file.sha256 and old_file.sha256 != new_file.sha256): + comparison.changed.append(FileDiff.compare(new_file, old_file)) + elif len(new_file.chunks) != len(old_file.chunks): + comparison.changed.append(FileDiff.compare(new_file, old_file)) + return comparison + +class Patch: + def __init__(self): + self.patch_data = {} + self.files = [] + + @classmethod + def get(cls, manifest, old_manifest, lang: str, dlcs: list, api_handler): + if isinstance(manifest, v1.Manifest) or isinstance(old_manifest, v1.Manifest): + return None + from_build = old_manifest.data.get('buildId') + to_build = manifest.data.get('buildId') + if not from_build or not to_build: + return None + dlc_ids = [dlc["id"] for dlc in dlcs] + patch_meta = dl_utils.get_zlib_encoded(api_handler, f'{constants.GOG_CONTENT_SYSTEM}/products/{manifest.product_id}/patches?_version=4&from_build_id={from_build}&to_build_id={to_build}')[0] + if not patch_meta or patch_meta.get('error'): + return None + patch_data = dl_utils.get_zlib_encoded(api_handler, patch_meta['link'])[0] + if not patch_data: + return None + + if patch_data['algorithm'] != 'xdelta3': + print("Unsupported patch algorithm") + return None + + depots = [] + # Get depots we need + for depot in patch_data['depots']: + if depot['productId'] == patch_data['baseProductId'] or depot['productId'] in dlc_ids: + if lang in depot['languages']: + depots.append(depot) + + if not depots: + return None + + files = [] + fail = False + for depot in depots: + depotdiffs = dl_utils.get_zlib_encoded(api_handler, f'{constants.GOG_CDN}/content-system/v2/patches/meta/{dl_utils.galaxy_path(depot["manifest"])}')[0] + if not depotdiffs: + fail = True + break + for diff in depotdiffs['depot']['items']: + if diff['type'] == 'DepotDiff': + files.append(FilePatchDiff(diff)) + else: + print('Unknown type in patcher', diff['type']) + return None + + if fail: + # TODO: Handle this beter + # Maybe exception? + print("Failed to get patch manifests") + return None + + patch = cls() + patch.patch_data = patch_data + patch.files = files + + return patch diff --git a/gogdl/dl/progressbar.py b/gogdl/dl/progressbar.py index 669a451..2df2a5d 100755 --- a/gogdl/dl/progressbar.py +++ b/gogdl/dl/progressbar.py @@ -1,107 +1,112 @@ +import queue +from multiprocessing import Queue import threading import logging from time import sleep, time class ProgressBar(threading.Thread): - def __init__(self, max_val, total_readable_size, length): + def __init__(self, max_val: int, speed_queue: Queue, write_queue: Queue): self.logger = logging.getLogger("PROGRESS") self.downloaded = 0 self.total = max_val - self.length = length + self.speed_queue = speed_queue + self.write_queue = write_queue self.started_at = time() self.last_update = time() - self.total_readable_size = total_readable_size self.completed = False - self.read_total = 0 - self.written_total = 0 + self.decompressed = 0 - self.written_since_last_update = 0 - self.read_since_last_update = 0 self.downloaded_since_last_update = 0 self.decompressed_since_last_update = 0 + self.written_since_last_update = 0 + self.read_since_last_update = 0 - super().__init__(target=self.print_progressbar) + self.written_total = 0 + super().__init__(target=self.loop) + + def loop(self): + while not self.completed: + self.print_progressbar() + self.downloaded_since_last_update = self.decompressed_since_last_update = 0 + self.written_since_last_update = self.read_since_last_update = 0 + timestamp = time() + while not self.completed and (time() - timestamp) < 1: + try: + dl, dec = self.speed_queue.get_nowait() + self.downloaded_since_last_update += dl + self.decompressed_since_last_update += dec + except queue.Empty: + pass + try: + wr, r = self.write_queue.get_nowait() + self.written_since_last_update += wr + self.read_since_last_update += r + except queue.Empty: + pass + + self.print_progressbar() def print_progressbar(self): - done = 0 - - while True: - if self.completed: - break - percentage = (self.downloaded / self.total) * 100 - running_time = time() - self.started_at - runtime_h = int(running_time // 3600) - runtime_m = int((running_time % 3600) // 60) - runtime_s = int((running_time % 3600) % 60) - - time_since_last_update = time() - self.last_update - if time_since_last_update == 0: - time_since_last_update = 1 - size_left = self.total - self.downloaded - - # average_speed = self.downloaded / running_time - - if percentage > 0: - estimated_time = (100 * running_time) / percentage - running_time - else: - estimated_time = 0 - - estimated_h = int(estimated_time // 3600) - estimated_time = estimated_time % 3600 - estimated_m = int(estimated_time // 60) - estimated_s = int(estimated_time % 60) - - write_speed = self.written_since_last_update / time_since_last_update - read_speed = self.read_since_last_update / time_since_last_update - download_speed = self.downloaded_since_last_update / time_since_last_update - decompress_speed = ( - self.decompressed_since_last_update / time_since_last_update - ) - - self.read_total += self.read_since_last_update - self.written_total += self.written_since_last_update - self.downloaded += self.downloaded_since_last_update - - self.read_since_last_update = self.written_since_last_update = 0 - self.decompressed_since_last_update = self.downloaded_since_last_update = 0 - - self.logger.info( - f"= Progress: {percentage:.02f} {self.downloaded}/{self.total}, " - + f"Running for: {runtime_h:02d}:{runtime_m:02d}:{runtime_s:02d}, " - + f"ETA: {estimated_h:02d}:{estimated_m:02d}:{estimated_s:02d}" - ) - - self.logger.info( - f"= Downloaded: {self.downloaded / 1024 / 1024:.02f} MiB, " - f"Written: {self.written_total / 1024 / 1024:.02f} MiB" - ) - - self.logger.info( - f" + Download\t- {download_speed / 1024 / 1024:.02f} MiB/s (raw) " - f"/ {decompress_speed / 1024 / 1024:.02f} MiB/s (decompressed)" - ) - - self.logger.info( - f" + Disk\t- {write_speed / 1024 / 1024:.02f} MiB/s (write) / " - f"{read_speed / 1024 / 1024:.02f} MiB/s (read)" - ) - - self.last_update = time() - sleep(1) + percentage = (self.written_total / self.total) * 100 + running_time = time() - self.started_at + runtime_h = int(running_time // 3600) + runtime_m = int((running_time % 3600) // 60) + runtime_s = int((running_time % 3600) % 60) + + print_time_delta = time() - self.last_update + + current_dl_speed = 0 + current_decompress = 0 + if print_time_delta: + current_dl_speed = self.downloaded_since_last_update / print_time_delta + current_decompress = self.decompressed_since_last_update / print_time_delta + current_w_speed = self.written_since_last_update / print_time_delta + current_r_speed = self.read_since_last_update / print_time_delta + else: + current_w_speed = 0 + current_r_speed = 0 + + if percentage > 0: + estimated_time = (100 * running_time) / percentage - running_time + else: + estimated_time = 0 + estimated_time = max(estimated_time, 0) # Cap to 0 + + estimated_h = int(estimated_time // 3600) + estimated_time = estimated_time % 3600 + estimated_m = int(estimated_time // 60) + estimated_s = int(estimated_time % 60) + + self.logger.info( + f"= Progress: {percentage:.02f} {self.written_total}/{self.total}, " + + f"Running for: {runtime_h:02d}:{runtime_m:02d}:{runtime_s:02d}, " + + f"ETA: {estimated_h:02d}:{estimated_m:02d}:{estimated_s:02d}" + ) + + self.logger.info( + f"= Downloaded: {self.downloaded / 1024 / 1024:.02f} MiB, " + f"Written: {self.written_total / 1024 / 1024:.02f} MiB" + ) + + self.logger.info( + f" + Download\t- {current_dl_speed / 1024 / 1024:.02f} MiB/s (raw) " + f"/ {current_decompress / 1024 / 1024:.02f} MiB/s (decompressed)" + ) + + self.logger.info( + f" + Disk\t- {current_w_speed / 1024 / 1024:.02f} MiB/s (write) / " + f"{current_r_speed / 1024 / 1024:.02f} MiB/s (read)" + ) + + self.last_update = time() def update_downloaded_size(self, addition): self.downloaded += addition - def update_download_speed(self, addition): - self.downloaded_since_last_update += addition - - def update_decompressed_speed(self, addition): - self.decompressed_since_last_update += addition - - def update_bytes_read(self, addition): - self.read_since_last_update += addition + def update_decompressed_size(self, addition): + self.decompressed += addition def update_bytes_written(self, addition): - self.written_since_last_update += addition + self.written_total += addition diff --git a/gogdl/dl/worker.py b/gogdl/dl/worker.py deleted file mode 100755 index 93d371c..0000000 --- a/gogdl/dl/worker.py +++ /dev/null @@ -1,310 +0,0 @@ -from gogdl.dl import dl_utils -from gogdl.dl.objects import DepotDirectory -from copy import copy -from sys import platform as os_platform -import shutil -import hashlib -import zlib -import logging -import os -import stat - - -class DLWorker: - def __init__(self, data, path, api_handler, gameId, progress): - self.data = data - self.path = path - self.api_handler = api_handler - self.progress = progress - self.gameId = gameId - self.completed = False - self.logger = logging.getLogger("DOWNLOAD_WORKER") - self.downloaded_size = 0 - - self.retries = 3 - - def do_stuff(self, is_dependency=False): - self.is_dependency = is_dependency - if os_platform == "win32": - self.data.path = self.data.path.replace("/", "\\") - else: - self.data.path = self.data.path.replace("\\", os.sep) - item_path = os.path.join(self.path, self.data.path.lstrip("/\\")) - if type(self.data) == DepotDirectory: - dl_utils.prepare_location(item_path) - return - if self.data.flags and "support" in self.data.flags: - item_path = os.path.join(self.path, "support", self.gameId, self.data.path) - if type(self.data) == DepotDirectory: - dl_utils.prepare_location(item_path) - return - # Fix for https://github.com/Heroic-Games-Launcher/heroic-gogdl/issues/3 - if len(self.data.chunks) == 0: - directory, file_name = os.path.split(item_path) - dl_utils.prepare_location(directory) - open(item_path, "w").close() - return - - if self.verify_file(item_path): - size = 0 - for chunk in self.data.chunks: - size += chunk["compressedSize"] - self.progress.update_downloaded_size(size) - self.completed = True - return - - if os.path.exists(item_path): - os.remove(item_path) - - for index in range(len(self.data.chunks)): - chunk = self.data.chunks[index] - compressed_md5 = chunk["compressedMd5"] - md5 = chunk["md5"] - self.downloaded_size = chunk["compressedSize"] - download_path = os.path.join(item_path + f".tmp{index}") - dl_utils.prepare_location(dl_utils.parent_dir(download_path), self.logger) - self.get_file(download_path, compressed_md5, md5, index) - - for index in range(len(self.data.chunks)): - self.decompress_file(item_path + f".tmp{index}", item_path) - - if ( - self.data.flags - and ("executable" in self.data.flags) - and os_platform != "win32" - ): - file_stats = os.stat(item_path) - permissions = file_stats.st_mode | stat.S_IEXEC - os.chmod(item_path, permissions) - - if self.data.path.startswith("app"): - file_path = os.path.join(self.path, "support", self.gameId, self.data.path) - - dest_path = self.data.path.replace("app", self.path) - - try: - shutil.copy(file_path, dest_path) - except Exception: - pass - - self.completed = True - - def get_file_url(self, compressed_md5): - endpoint = self.api_handler.get_secure_link(self.data.product_id) - parameters = copy(endpoint["parameters"]) - parameters["path"] += "/" + dl_utils.galaxy_path(compressed_md5) - url = dl_utils.merge_url_with_params(endpoint["url_format"], parameters) - - return url - - def decompress_file(self, compressed, decompressed): - if os.path.exists(compressed): - file = open(compressed, "rb") - - read_data = file.read() - self.progress.update_bytes_read(len(read_data)) - - dc = zlib.decompress(read_data, 15) - f = open(decompressed, "ab") - - f.write(dc) - self.progress.update_bytes_written(len(dc)) - self.progress.update_decompressed_speed(len(dc)) - f.close() - file.close() - os.remove(compressed) - else: - raise Exception("Unable to decompress file, it doesn't exist") - - def get_file(self, path, compressed_sum, decompressed_sum, index=0): - if self.is_dependency: - url = dl_utils.get_dependency_link( - self.api_handler, dl_utils.galaxy_path(compressed_sum) - ) - else: - url = self.get_file_url(compressed_sum) - isExisting = os.path.exists(path) - if isExisting: - if ( - dl_utils.calculate_sum( - path, hashlib.md5, self.progress.update_bytes_read - ) - != compressed_sum - ): - os.remove(path) - else: - return - with open(path, "ab") as f: - response = self.api_handler.session.get( - url, stream=True, allow_redirects=True - ) - if response.status_code == 403: - self.api_handler.get_new_secure_link(self.data.product_id) - self.get_file(path, compressed_sum, decompressed_sum, index) - return - - if not response.ok: - if self.retries > 0: - self.retries -= 1 - self.get_file(path, compressed_sum, decompressed_sum, index) - return - total = response.headers.get("Content-Length") - if total is None: - self.progress.update_download_speed(len(response.content)) - written = f.write(response.content) - self.progress.update_bytes_written(written) - else: - total = int(total) - for data in response.iter_content( - chunk_size=max(int(total / 1000), 1024 * 1024) - ): - self.progress.update_download_speed(len(data)) - written = f.write(data) - self.progress.update_bytes_written(written) - f.close() - isExisting = os.path.exists(path) - if isExisting and ( - dl_utils.calculate_sum( - path, hashlib.md5, self.progress.update_bytes_read - ) - != compressed_sum - ): - self.logger.warning( - f"Checksums dismatch for compressed chunk of {path}" - ) - if isExisting: - os.remove(path) - self.get_file(path, compressed_sum, decompressed_sum, index) - - def verify_file(self, item_path): - if os.path.exists(item_path): - calculated = None - should_be = None - if len(self.data.chunks) > 1: - if self.data.md5: - should_be = self.data.md5 - calculated = dl_utils.calculate_sum( - item_path, hashlib.md5, self.progress.update_bytes_read - ) - elif self.data.sha256: - should_be = self.data.sha256 - calculated = dl_utils.calculate_sum( - item_path, hashlib.sha256, self.progress.update_bytes_read - ) - else: - # In case if there are sha256 sums in chunks - if "sha256" in self.data.chunks[0]: - calculated = dl_utils.calculate_sum( - item_path, hashlib.sha256, self.progress.update_bytes_read - ) - should_be = self.data.chunks[0]["sha256"] - elif "md5" in self.data.chunks[0]: - calculated = dl_utils.calculate_sum( - item_path, hashlib.md5, self.progress.update_bytes_read - ) - should_be = self.data.chunks[0]["md5"] - return calculated == should_be - else: - return False - - -class DLWorkerV1: - def __init__(self, data, path, api_handler, game_id, progressbar, platform, build_id): - self.data = data - self.path = path - self.api_handler = api_handler - self.progress = progressbar - self.gameId = game_id - self.platform = platform - self.build_id = build_id - self.completed = False - self.logger = logging.getLogger("DOWNLOAD_WORKER_V1") - self.downloaded_size = 0 - - self.retries = 3 - - def do_stuff(self, is_dependency=False): - if self.data["path"].startswith("/"): - self.data["path"] = self.data["path"][1:] - item_path = os.path.join(self.path, self.data["path"]) - if os_platform == "win32": - item_path = item_path.replace("/", os.sep) - else: - item_path = item_path.replace("\\", os.sep) - - if self.data.get("support"): - item_path = os.path.join(self.path, "support", self.data["path"]) - if self.data.get("directory"): - os.makedirs(item_path, exist_ok=True) - return - if self.data.get("size") == 0: - dl_utils.prepare_location(dl_utils.parent_dir(item_path), self.logger) - open(item_path, 'x').close() - return - if self.verify_file(item_path): - self.completed = True - if not is_dependency: - self.progress.update_downloaded_size(int(self.data["size"])) - return - else: - if os.path.exists(item_path): - os.remove(item_path) - dl_utils.prepare_location(dl_utils.parent_dir(item_path), self.logger) - - self.get_file(item_path) - - def get_file(self, item_path): - headers = { - "Range": dl_utils.get_range_header(self.data["offset"], self.data["size"]) - } - - download_link = self.data.get('link') - if not download_link: - download_link = self.api_handler.get_secure_link(self.data["url"].split("/")[0]) - with open(item_path, "ab") as f: - print(download_link) - response = self.api_handler.session.get( - download_link, headers=headers, stream=True, allow_redirects=True - ) - if response.status_code == 403: - self.api_handler.get_new_secure_link(self.data['url'].split("/")[0], - f"/{self.platform}/{self.build_id}", - 1) - - self.get_file(item_path) - return - if not response.ok: - if self.retries > 0: - self.retries -= 1 - self.get_file(item_path) - return - total = response.headers.get("Content-Length") - if total is None: - self.progress.update_download_speed(len(response.content)) - written = f.write(response.content) - self.progress.update_bytes_written(written) - - else: - total = int(total) - for data in response.iter_content( - chunk_size=max(int(total / 1000), 1024 * 1024) - ): - self.progress.update_download_speed(len(data)) - written = f.write(data) - self.progress.update_bytes_written(written) - - f.close() - if os.path.exists(item_path): - if not self.verify_file(item_path): - self.logger.warning(f"Checksums mismatch for file {item_path}") - os.remove(item_path) - self.get_file(item_path) - - def verify_file(self, item_path): - if os.path.exists(item_path): - calculated = dl_utils.calculate_sum( - item_path, hashlib.md5, self.progress.update_bytes_read - ) - should_be = self.data["hash"] - return calculated == should_be - return False diff --git a/gogdl/dl/workers/linux.py b/gogdl/dl/workers/linux.py new file mode 100644 index 0000000..8df3a8a --- /dev/null +++ b/gogdl/dl/workers/linux.py @@ -0,0 +1,87 @@ +import os +import zlib + +from gogdl.dl.objects.linux import LocalFile + + +class DLWorker: + def __init__(self, file_data, path): + self.data = file_data + self.install_path = path + self.file_path = self.data.file_name.replace("data/noarch", self.install_path) + + self.retries = 0 + + def verify(self): + file_handle = open(self.file_path, 'rb') + crc = 0 + while data := file_handle.read(1024 * 1024): + crc = zlib.crc32(data, crc) + + return crc == self.data.crc32 + + def work(self, installer_handler): + if os.path.exists(self.file_path): + if self.verify(): + return + + file_permissions = bin(int.from_bytes(self.data.ext_file_attrs, "little"))[9:][:9] + + # Load local file header + file_data = installer_handler.get_bytes_from_file( + from_b=self.data.relative_local_file_offset, + size=30, + ) + local_file = LocalFile.from_bytes( + file_data, + self.data.relative_local_file_offset, + installer_handler, # Passsing in handler to be able to pull more data + ) + local_file.relative_local_file_offset = self.data.relative_local_file_offset + + directory, name = os.path.split(self.file_path) + os.makedirs(directory, exist_ok=True) + + response = local_file.load_data(installer_handler) + total = response.headers.get("Content-Length") + + with open(self.file_path + ".tmp", "wb") as f: + if total is None: + f.write(response.content) + else: + total = int(total) + for data in response.iter_content( + chunk_size=max(int(total / 1000), 1024 * 1024) + ): + f.write(data) + + f.close() + + with open(self.file_path, "wb") as f: + tmp_handle = open(self.file_path + ".tmp", 'rb') + decompressor = zlib.decompressobj(-15) + + if local_file.compression_method == 8: + while stream := tmp_handle.read(1024 * 1024): + decompressed = decompressor.decompress(stream) + f.write(decompressed) + f.flush() + f.close() + elif local_file.compression_method == 0: + tmp_handle.close() + f.close() + os.rename(self.file_path + ".tmp", self.file_path) + else: + print("Unsupported compression method", local_file.compression_method) + + if os.path.exists(self.file_path + ".tmp"): + os.remove(self.file_path + ".tmp") + + if not self.verify(): + if self.retries < 3: + self.retries += 1 + os.remove(self.file_path) + self.work(installer_handler) + return + + os.chmod(self.file_path, int(f"0b{file_permissions}", base=0)) diff --git a/gogdl/dl/workers/task_executor.py b/gogdl/dl/workers/task_executor.py new file mode 100644 index 0000000..2d26fd6 --- /dev/null +++ b/gogdl/dl/workers/task_executor.py @@ -0,0 +1,461 @@ +from multiprocessing.shared_memory import SharedMemory +import os +from queue import Empty +import shutil +import sys +import stat +import traceback +import time +import requests +import zlib +import hashlib +from io import BytesIO +from typing import Optional, Union +from copy import copy +from gogdl.dl import dl_utils +from dataclasses import dataclass +from enum import Enum, auto +from multiprocessing import Process, Queue +from gogdl.dl.objects.generic import MemorySegment, TaskFlag, TerminateWorker +from gogdl.xdelta import patcher + + +class FailReason(Enum): + UNKNOWN = 0 + CHECKSUM = auto() + CONNECTION = auto() + UNAUTHORIZED = auto() + + MISSING_CHUNK = auto() + + +@dataclass +class DownloadTask: + product_id: str + +@dataclass +class DownloadTask1(DownloadTask): + offset: int + size: int + # This sum is not valid MD5 as it contains chunk id too + # V1 doesn't support chunks, this is sort of forceful way to use them + # in this algorithm + compressed_sum: str + memory_segment: MemorySegment + +@dataclass +class DownloadTask2(DownloadTask): + compressed_sum: str + memory_segment: MemorySegment + + +@dataclass +class WriterTask: + # Root directory of game files + destination: str + # File path from manifest + file_path: str + flags: TaskFlag + + hash: Optional[str] = None + size: Optional[int] = None + shared_memory: Optional[MemorySegment] = None + old_destination: Optional[str] = None + old_file: Optional[str] = None + old_offset: Optional[int] = None + + patch_file: Optional[str] = None + +@dataclass +class DownloadTaskResult: + success: bool + fail_reason: Optional[FailReason] + task: Union[DownloadTask2, DownloadTask1] + download_size: Optional[int] = None + decompressed_size: Optional[int] = None + +@dataclass +class WriterTaskResult: + success: bool + task: Union[WriterTask, TerminateWorker] + written: int = 0 + + +class Download(Process): + def __init__(self, shared_memory, download_queue, results_queue, speed_queue, shared_secure_links): + self.shared_memory = SharedMemory(name=shared_memory) + self.download_queue: Queue = download_queue + self.results_queue: Queue = results_queue + self.speed_queue: Queue = speed_queue + self.secure_links: dict = shared_secure_links + self.session = requests.session() + self.early_exit = False + super().__init__() + + def run(self): + while not self.early_exit: + try: + task: Union[DownloadTask1, DownloadTask2, TerminateWorker] = self.download_queue.get(timeout=1) + except Empty: + continue + + if isinstance(task, TerminateWorker): + break + + if type(task) == DownloadTask2: + self.v2(task) + elif type(task) == DownloadTask1: + self.v1(task) + + self.session.close() + self.shared_memory.close() + + def v2(self, task: DownloadTask2): + retries = 5 + urls = self.secure_links[task.product_id] + + compressed_md5 = task.compressed_sum + + endpoint = copy(urls[0]) + if task.product_id != 'redist': + endpoint["parameters"]["path"] += f"/{dl_utils.galaxy_path(compressed_md5)}" + url = dl_utils.merge_url_with_params( + endpoint["url_format"], endpoint["parameters"] + ) + else: + endpoint["url"] += "/" + dl_utils.galaxy_path(compressed_md5) + url = endpoint["url"] + + buffer = bytes() + compressed_sum = hashlib.md5() + download_size = 0 + response = None + while retries > 0: + buffer = bytes() + compressed_sum = hashlib.md5() + download_size = 0 + decompressor = zlib.decompressobj() + try: + response = self.session.get(url, stream=True, timeout=10) + response.raise_for_status() + for chunk in response.iter_content(1024 * 512): + download_size += len(chunk) + compressed_sum.update(chunk) + decompressed = decompressor.decompress(chunk) + buffer += decompressed + self.speed_queue.put((len(chunk), len(decompressed))) + + except Exception as e: + print("Connection failed", e) + if response and response.status_code == 401: + self.results_queue.put(DownloadTaskResult(False, FailReason.UNAUTHORIZED, task)) + print("Connection failed, unauthorized") + return + retries -= 1 + time.sleep(2) + continue + break + else: + self.results_queue.put(DownloadTaskResult(False, FailReason.CHECKSUM, task)) + return + + decompressed_size = 0 + try: + decompressed_size = len(buffer) + self.shared_memory.buf[task.memory_segment.offset:decompressed_size+task.memory_segment.offset] = buffer + + except Exception as e: + print("ERROR", e) + self.results_queue.put(DownloadTaskResult(False, FailReason.UNKNOWN, task)) + return + + if compressed_sum.hexdigest() != compressed_md5: + self.results_queue.put(DownloadTaskResult(False, FailReason.CHECKSUM, task)) + return + + self.results_queue.put(DownloadTaskResult(True, None, task, download_size=download_size, decompressed_size=decompressed_size)) + + def v1(self, task: DownloadTask1): + retries = 5 + urls = self.secure_links[task.product_id] + + response = None + if type(urls) == str: + url = urls + else: + endpoint = copy(urls[0]) + endpoint["parameters"]["path"] += "/main.bin" + url = dl_utils.merge_url_with_params( + endpoint["url_format"], endpoint["parameters"] + ) + range_header = dl_utils.get_range_header(task.offset, task.size) + + buffer = bytes() + while retries > 0: + buffer = bytes() + try: + response = self.session.get(url, stream=True, timeout=10, headers={'Range': range_header}) + response.raise_for_status() + for chunk in response.iter_content(1024 * 512): + buffer += chunk + self.speed_queue.put((len(chunk), len(chunk))) + except Exception as e: + print("Connection failed", e) + #Handle exception + if response and response.status_code == 401: + self.results_queue.put(DownloadTaskResult(False, FailReason.UNAUTHORIZED, task)) + return + retries -= 1 + time.sleep(2) + continue + break + else: + self.results_queue.put(DownloadTaskResult(False, FailReason.CHECKSUM, task)) + return + + download_size = 0 + try: + download_size = len(buffer) + self.shared_memory.buf[task.memory_segment.offset:download_size + task.memory_segment.offset] = buffer + + except Exception as e: + print("ERROR", e) + self.results_queue.put(DownloadTaskResult(False, FailReason.UNKNOWN, task)) + return + + if len(buffer) != task.size: + self.results_queue.put(DownloadTaskResult(False, FailReason.CHECKSUM, task)) + return + + self.results_queue.put(DownloadTaskResult(True, None, task, download_size=download_size, decompressed_size=download_size)) + +class Writer(Process): + def __init__(self, shared_memory, writer_queue, results_queue, speed_queue, cache): + self.shared_memory = SharedMemory(name=shared_memory) + self.cache = cache + self.writer_queue: Queue = writer_queue + self.results_queue: Queue = results_queue + self.speed_queue: Queue = speed_queue + self.early_exit = False + super().__init__() + + def run(self): + file_handle = None + current_file = '' + + while not self.early_exit: + try: + task: Union[WriterTask, TerminateWorker] = self.writer_queue.get(timeout=2) + except Empty: + continue + + if isinstance(task, TerminateWorker): + self.results_queue.put(WriterTaskResult(True, task)) + break + + written = 0 + + task_path = dl_utils.get_case_insensitive_name(os.path.join(task.destination, task.file_path)) + split_path = os.path.split(task_path) + if split_path[0] and not os.path.exists(split_path[0]): + dl_utils.prepare_location(split_path[0]) + + if task.flags & TaskFlag.CREATE_FILE: + open(task_path, 'a').close() + self.results_queue.put(WriterTaskResult(True, task)) + continue + + elif task.flags & TaskFlag.CREATE_SYMLINK: + dest = task.old_destination or task.destination + # Windows will likely not have this ran ever + if os.path.exists(task_path): + os.unlink(task_path) + os.symlink(dl_utils.get_case_insensitive_name(os.path.join(dest, task.old_file)), task_path) + self.results_queue.put(WriterTaskResult(True, task)) + continue + + elif task.flags & TaskFlag.OPEN_FILE: + if file_handle: + print("Opening on unclosed file") + file_handle.close() + file_handle = open(task_path, 'wb') + current_file = task_path + + self.results_queue.put(WriterTaskResult(True, task)) + continue + elif task.flags & TaskFlag.CLOSE_FILE: + if file_handle: + file_handle.close() + file_handle = None + self.results_queue.put(WriterTaskResult(True, task)) + continue + + elif task.flags & TaskFlag.COPY_FILE: + if file_handle and task.file_path == current_file: + print("Copy on unclosed file") + file_handle.close() + file_handle = None + + if not task.old_file: + # if this ever happens.... + self.results_queue.put(WriterTaskResult(False, task)) + continue + + dest = task.old_destination or task.destination + try: + shutil.copy(dl_utils.get_case_insensitive_name(os.path.join(dest, task.old_file)), task_path) + except shutil.SameFileError: + pass + except Exception: + self.results_queue.put(WriterTaskResult(False, task)) + continue + self.results_queue.put(WriterTaskResult(True, task)) + continue + + elif task.flags & TaskFlag.RENAME_FILE: + if file_handle and task.file_path == current_file: + print("Renaming on unclosed file") + file_handle.close() + file_handle = None + + if not task.old_file: + # if this ever happens.... + self.results_queue.put(WriterTaskResult(False, task)) + continue + + if task.flags & TaskFlag.DELETE_FILE and os.path.exists(task_path): + try: + os.remove(task_path) + except OSError as e: + self.results_queue.put(WriterTaskResult(False, task)) + continue + dest = task.old_destination or task.destination + try: + os.rename(dl_utils.get_case_insensitive_name(os.path.join(dest, task.old_file)), task_path) + except OSError as e: + self.results_queue.put(WriterTaskResult(False, task)) + continue + + self.results_queue.put(WriterTaskResult(True, task)) + continue + + elif task.flags & TaskFlag.PATCH: + if file_handle and task.file_path == current_file: + print("Patching on unclosed file") + file_handle.close() + file_handle = None + + if not task.old_file or not task.patch_file: + # if this ever happens.... + self.results_queue.put(WriterTaskResult(False, task)) + continue + + try: + dest = task.old_destination or task.destination + source = os.path.join(dest, task.old_file) + source = dl_utils.get_case_insensitive_name(source) + patch = os.path.join(task.destination, task.patch_file) + patch = dl_utils.get_case_insensitive_name(patch) + target = task_path + + patcher.patch(source, patch, target, self.speed_queue) + + except Exception as e: + print("Patch failed", e) + print(traceback.format_exc()) + self.results_queue.put(WriterTaskResult(False, task)) + continue + written = 0 + if os.path.exists(target): + written = os.path.getsize(target) + self.results_queue.put(WriterTaskResult(True, task, written=written)) + continue + + elif task.flags & TaskFlag.DELETE_FILE: + if file_handle and task.file_path == current_file: + print("Deleting on unclosed file") + file_handle.close() + file_handle = None + try: + if os.path.exists(task_path): + os.remove(task_path) + except OSError as e: + self.results_queue.put(WriterTaskResult(False, task)) + continue + + elif task.flags & TaskFlag.MAKE_EXE: + if file_handle and task.file_path == current_file: + print("Making exe on unclosed file") + file_handle.close() + file_handle = None + if sys.platform != 'win32': + try: + st = os.stat(task_path) + os.chmod(task_path, st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) + except Exception as e: + self.results_queue.put(WriterTaskResult(False, task)) + continue + self.results_queue.put(WriterTaskResult(True, task)) + continue + + try: + if task.shared_memory: + if not task.size: + print("No size") + self.results_queue.put(WriterTaskResult(False, task)) + continue + offset = task.shared_memory.offset + end = offset + task.size + left = task.size + buffer = BytesIO(self.shared_memory.buf[offset:end].tobytes()) + while left > 0: + chunk = buffer.read(min(1024 * 1024, left)) + written += file_handle.write(chunk) + self.speed_queue.put((len(chunk), 0)) + left -= len(chunk) + + if task.flags & TaskFlag.OFFLOAD_TO_CACHE and task.hash: + cache_file_path = os.path.join(self.cache, task.hash) + dl_utils.prepare_location(self.cache) + cache_file = open(cache_file_path, 'wb') + cache_file.write(self.shared_memory.buf[offset:end].tobytes()) + self.speed_queue.put((task.size, 0)) + cache_file.close() + elif task.old_file: + if not task.size: + print("No size") + self.results_queue.put(WriterTaskResult(False, task)) + continue + dest = task.old_destination or task.destination + old_file_path = dl_utils.get_case_insensitive_name(os.path.join(dest, task.old_file)) + old_file_handle = open(old_file_path, "rb") + if task.old_offset: + old_file_handle.seek(task.old_offset) + left = task.size + if task.flags & TaskFlag.ZIP_DEC: + decompressor = zlib.decompressobj(-15) + else: + decompressor = None + while left > 0: + chunk = old_file_handle.read(min(1024*1024, left)) + if decompressor: + data = decompressor.decompress(chunk) + else: + data = chunk + written += file_handle.write(data) + self.speed_queue.put((len(data), len(chunk))) + left -= len(chunk) + old_file_handle.close() + if task.flags & TaskFlag.ZIP_DEC: + written = written - task.size + + except Exception as e: + print("Writer exception", e) + self.results_queue.put(WriterTaskResult(False, task)) + else: + self.results_queue.put(WriterTaskResult(True, task, written=written)) + + + self.shared_memory.close() + shutil.rmtree(self.cache, ignore_errors=True) + diff --git a/gogdl/imports.py b/gogdl/imports.py index bdfa9c4..b67d4b1 100755 --- a/gogdl/imports.py +++ b/gogdl/imports.py @@ -23,6 +23,9 @@ def get_info(args, unknown_args): installed_language = None info = {} if platform != "linux": + if not info_file: + print("Error importing, no info file") + return f = open(info_file, "r") info = json.loads(f.read()) f.close() @@ -42,22 +45,22 @@ def get_info(args, unknown_args): f.close() build_id = build.get("buildId") - version_name = build_id - if build_id and platform != "linux": - # Get version name - builds_res = requests.get( - f"{constants.GOG_CONTENT_SYSTEM}/products/{game_id}/os/{platform}/builds?generation=2", - headers={ - "User-Agent": "GOGGalaxyCommunicationService/2.0.4.164 (Windows_32bit)" - }, - ) - builds = builds_res.json() - target_build = builds["items"][0] - for build in builds["items"]: - if build["build_id"] == build_id: - target_build = build - break - version_name = target_build["version_name"] + version_name = build_id + if build_id and platform != "linux": + # Get version name + builds_res = requests.get( + f"{constants.GOG_CONTENT_SYSTEM}/products/{game_id}/os/{platform}/builds?generation=2", + headers={ + "User-Agent": "GOGGalaxyCommunicationService/2.0.4.164 (Windows_32bit)" + }, + ) + builds = builds_res.json() + target_build = builds["items"][0] + for build in builds["items"]: + if build["build_id"] == build_id: + target_build = build + break + version_name = target_build["version_name"] if platform == "linux" and os.path.exists(os.path.join(path, "gameinfo")): # Linux version installed using installer gameinfo_file = open(os.path.join(path, "gameinfo"), "r") @@ -82,7 +85,7 @@ def get_info(args, unknown_args): "title": title, "tasks": info["playTasks"] if info and info.get("playTasks") else None, "installedLanguage": installed_language, - "installedWithDlcs": with_dlcs, + "dlcs": with_dlcs, "platform": platform, "versionName": version_name, } @@ -91,20 +94,38 @@ def get_info(args, unknown_args): def load_game_details(path): + base_path = path found = glob.glob(os.path.join(path, "goggame-*.info")) build_id = glob.glob(os.path.join(path, "goggame-*.id")) platform = "windows" if not found: + base_path = os.path.join(path, "Contents", "Resources") found = glob.glob(os.path.join(path, "Contents", "Resources", "goggame-*.info")) build_id = glob.glob( os.path.join(path, "Contents", "Resources", "goggame-*.id") ) platform = "osx" if not found: + base_path = os.path.join(path, "game") found = glob.glob(os.path.join(path, "game", "goggame-*.info")) build_id = glob.glob(os.path.join(path, "game", "goggame-*.id")) platform = "linux" if not found: if os.path.exists(os.path.join(path, "gameinfo")): - return (None, None, "linux", False) - return (found[0], build_id[0] if build_id else None, platform, len(found) > 1) + return (None, None, "linux", []) + + root_id = None + # Array of DLC game ids + dlcs = [] + for info in found: + with open(info) as info_file: + data = json.load(info_file) + if not root_id: + root_id = data.get("rootGameId") + if data["gameId"] == root_id: + continue + + dlcs.append(data["gameId"]) + + return (os.path.join(base_path, f"goggame-{root_id}.info"), os.path.join(base_path, f"goggame-{root_id}.id") if build_id else None, platform, dlcs) + diff --git a/gogdl/launch.py b/gogdl/launch.py index ffc9a61..a034f60 100644 --- a/gogdl/launch.py +++ b/gogdl/launch.py @@ -3,6 +3,7 @@ import sys import subprocess import time +from gogdl.dl.dl_utils import get_case_insensitive_name from ctypes import * from gogdl.process import Process import signal @@ -54,7 +55,7 @@ def launch(arguments, unknown_args): working_dir = os.path.join(arguments.path, relative_working_dir) if not os.path.exists(executable): - executable = get_case_insensitive_name(arguments.path, executable) + executable = get_case_insensitive_name(executable) if len(wrapper) > 0 and wrapper[0] is not None: command.extend(wrapper) @@ -93,7 +94,7 @@ def launch(arguments, unknown_args): print("Launch command:", command) # Handle case sensitive file systems if not os.path.exists(working_dir): - working_dir = get_case_insensitive_name(arguments.path, working_dir) + working_dir = get_case_insensitive_name(working_dir) status = None if sys.platform == 'linux': @@ -214,13 +215,3 @@ def load_game_info(path, id, platform): return json.loads(data) -def get_case_insensitive_name(root, path): - if not root[len(root) - 1] in ["/", "\\"]: - root = root + "/" - s_working_dir = path.replace(root, "").split(os.sep) - for directory in s_working_dir: - dir_list = os.listdir(root) - for existing_dir in dir_list: - if existing_dir.lower() == directory.lower(): - root = os.path.join(root, existing_dir) - return root diff --git a/gogdl/saves.py b/gogdl/saves.py index ac8bb94..e505d7f 100644 --- a/gogdl/saves.py +++ b/gogdl/saves.py @@ -231,7 +231,7 @@ def get_auth_ids(self): ) meta_url = builds["items"][0]["link"] - meta, headers = dl_utils.get_zlib_encoded(self.api, meta_url, self.logger) + meta, headers = dl_utils.get_zlib_encoded(self.api, meta_url) return meta["clientId"], meta["clientSecret"] def delete_file(self, file: SyncFile): diff --git a/gogdl/xdelta/__init__.py b/gogdl/xdelta/__init__.py new file mode 100644 index 0000000..6ccc123 --- /dev/null +++ b/gogdl/xdelta/__init__.py @@ -0,0 +1 @@ +# Python implementation of xdelta3 decoding only diff --git a/gogdl/xdelta/objects.py b/gogdl/xdelta/objects.py new file mode 100644 index 0000000..f2bb9b6 --- /dev/null +++ b/gogdl/xdelta/objects.py @@ -0,0 +1,139 @@ +from dataclasses import dataclass +from io import IOBase, BytesIO +from typing import Optional + +@dataclass +class CodeTable: + add_sizes = 17 + near_modes = 4 + same_modes = 3 + + cpy_sizes = 15 + + addcopy_add_max = 4 + addcopy_near_cpy_max = 6 + addcopy_same_cpy_max = 4 + + copyadd_add_max = 1 + copyadd_near_cpy_max = 4 + copyadd_same_cpy_max = 4 + + addcopy_max_sizes = [ [6,163,3],[6,175,3],[6,187,3],[6,199,3],[6,211,3],[6,223,3], + [4,235,1],[4,239,1],[4,243,1]] + copyadd_max_sizes = [[4,247,1],[4,248,1],[4,249,1],[4,250,1],[4,251,1],[4,252,1], + [4,253,1],[4,254,1],[4,255,1]] + +XD3_NOOP = 0 +XD3_ADD = 1 +XD3_RUN = 2 +XD3_CPY = 3 + +@dataclass +class Instruction: + type1:int = 0 + size1:int = 0 + type2:int = 0 + size2:int = 0 + +@dataclass +class HalfInstruction: + type: int = 0 + size: int = 0 + addr: int = 0 + + +@dataclass +class AddressCache: + s_near = CodeTable.near_modes + s_same = CodeTable.same_modes + next_slot = 0 + near_array = [0 for _ in range(s_near)] + same_array = [0 for _ in range(s_same * 256)] + + def update(self, addr): + self.near_array[self.next_slot] = addr + self.next_slot = (self.next_slot + 1) % self.s_near + + self.same_array[addr % (self.s_same*256)] = addr + +@dataclass +class Context: + source: IOBase + target: IOBase + + data_sec: BytesIO + inst_sec: BytesIO + addr_sec: BytesIO + + acache: AddressCache + dec_pos: int = 0 + cpy_len: int = 0 + cpy_off: int = 0 + dec_winoff: int = 0 + + target_buffer: Optional[bytearray] = None + +def build_code_table(): + table: list[Instruction] = [] + for _ in range(256): + table.append(Instruction()) + + cpy_modes = 2 + CodeTable.near_modes + CodeTable.same_modes + i = 0 + + table[i].type1 = XD3_RUN + i+=1 + table[i].type1 = XD3_ADD + i+=1 + + size1 = 1 + + for size1 in range(1, CodeTable.add_sizes + 1): + table[i].type1 = XD3_ADD + table[i].size1 = size1 + i+=1 + + for mode in range(0, cpy_modes): + table[i].type1 = XD3_CPY + mode + i += 1 + for size1 in range(4, 4 + CodeTable.cpy_sizes): + table[i].type1 = XD3_CPY + mode + table[i].size1 = size1 + i+=1 + + + for mode in range(cpy_modes): + for size1 in range(1, CodeTable.addcopy_add_max + 1): + is_near = mode < (2 + CodeTable.near_modes) + if is_near: + max = CodeTable.addcopy_near_cpy_max + else: + max = CodeTable.addcopy_same_cpy_max + for size2 in range(4, max + 1): + table[i].type1 = XD3_ADD + table[i].size1 = size1 + table[i].type2 = XD3_CPY + mode + table[i].size2 = size2 + i+=1 + + + for mode in range(cpy_modes): + is_near = mode < (2 + CodeTable.near_modes) + if is_near: + max = CodeTable.copyadd_near_cpy_max + else: + max = CodeTable.copyadd_same_cpy_max + for size1 in range(4, max + 1): + for size2 in range(1, CodeTable.copyadd_add_max + 1): + table[i].type1 = XD3_CPY + mode + table[i].size1 = size1 + table[i].type2 = XD3_ADD + table[i].size2 = size2 + i+=1 + + return table + +CODE_TABLE = build_code_table() + +class ChecksumMissmatch(AssertionError): + pass diff --git a/gogdl/xdelta/patcher.py b/gogdl/xdelta/patcher.py new file mode 100644 index 0000000..19f3a9f --- /dev/null +++ b/gogdl/xdelta/patcher.py @@ -0,0 +1,204 @@ +from io import BytesIO +import math +from multiprocessing import Queue +from zlib import adler32 +from gogdl.xdelta import objects + +# Convert stfio integer +def read_integer_stream(stream): + res = 0 + while True: + res <<= 7 + integer = stream.read(1)[0] + res |= (integer & 0b1111111) + if not (integer & 0b10000000): + break + + return res + +def parse_halfinst(context: objects.Context, halfinst: objects.HalfInstruction): + if halfinst.size == 0: + halfinst.size = read_integer_stream(context.inst_sec) + + if halfinst.type >= objects.XD3_CPY: + # Decode address + mode = halfinst.type - objects.XD3_CPY + same_start = 2 + context.acache.s_near + + if mode < same_start: + halfinst.addr = read_integer_stream(context.addr_sec) + + if mode == 0: + pass + elif mode == 1: + halfinst.addr = context.dec_pos - halfinst.addr + if halfinst.addr < 0: + halfinst.addr = context.cpy_len + halfinst.addr + else: + halfinst.addr += context.acache.near_array[mode - 2] + else: + mode -= same_start + addr = context.addr_sec.read(1)[0] + halfinst.addr = context.acache.same_array[(mode * 256) + addr] + context.acache.update(halfinst.addr) + + context.dec_pos += halfinst.size + + +def decode_halfinst(context:objects.Context, halfinst: objects.HalfInstruction, speed_queue: Queue): + take = halfinst.size + + if halfinst.type == objects.XD3_RUN: + byte = context.data_sec.read(1) + + for _ in range(take): + context.target_buffer.extend(byte) + + halfinst.type = objects.XD3_NOOP + elif halfinst.type == objects.XD3_ADD: + buffer = context.data_sec.read(take) + assert len(buffer) == take + context.target_buffer.extend(buffer) + halfinst.type = objects.XD3_NOOP + else: # XD3_CPY and higher + if halfinst.addr < (context.cpy_len or 0): + context.source.seek(context.cpy_off + halfinst.addr) + left = take + while left > 0: + buffer = context.source.read(min(1024 * 1024, left)) + size = len(buffer) + speed_queue.put((0, size)) + context.target_buffer.extend(buffer) + left -= size + + else: + print("OVERLAP NOT IMPLEMENTED") + raise Exception("OVERLAP") + halfinst.type = objects.XD3_NOOP + + +def patch(source: str, patch: str, out: str, speed_queue: Queue): + src_handle = open(source, 'rb') + patch_handle = open(patch, 'rb') + dst_handle = open(out, 'wb') + + + # Verify if patch is actually xdelta patch + headers = patch_handle.read(5) + try: + assert headers[0] == 0xD6 + assert headers[1] == 0xC3 + assert headers[2] == 0xC4 + except AssertionError: + print("Specified patch file is unlikely to be xdelta patch") + return + + HDR_INDICATOR = headers[4] + COMPRESSOR_ID = HDR_INDICATOR & (1 << 0) != 0 + CODE_TABLE = HDR_INDICATOR & (1 << 1) != 0 + APP_HEADER = HDR_INDICATOR & (1 << 2) != 0 + app_header_data = bytes() + + if COMPRESSOR_ID or CODE_TABLE: + print("Compressor ID and codetable are yet not supported") + return + + if APP_HEADER: + app_header_size = read_integer_stream(patch_handle) + app_header_data = patch_handle.read(app_header_size) + + context = objects.Context(src_handle, dst_handle, BytesIO(), BytesIO(), BytesIO(), objects.AddressCache()) + + win_number = 0 + win_indicator = patch_handle.read(1)[0] + while win_indicator is not None: + context.acache = objects.AddressCache() + source_used = win_indicator & (1 << 0) != 0 + target_used = win_indicator & (1 << 1) != 0 + adler32_sum = win_indicator & (1 << 2) != 0 + + if source_used: + source_segment_length = read_integer_stream(patch_handle) + source_segment_position = read_integer_stream(patch_handle) + else: + source_segment_length = 0 + source_segment_position = 0 + + context.cpy_len = source_segment_length + context.cpy_off = source_segment_position + context.source.seek(context.cpy_off or 0) + context.dec_pos = 0 + + # Parse delta + delta_encoding_length = read_integer_stream(patch_handle) + + window_length = read_integer_stream(patch_handle) + context.target_buffer = bytearray() + + delta_indicator = patch_handle.read(1)[0] + + add_run_data_length = read_integer_stream(patch_handle) + instructions_length = read_integer_stream(patch_handle) + addresses_length = read_integer_stream(patch_handle) + + parsed_sum = 0 + if adler32_sum: + checksum = patch_handle.read(4) + parsed_sum = int.from_bytes(checksum, 'big') + + + context.data_sec = BytesIO(patch_handle.read(add_run_data_length)) + context.inst_sec = BytesIO(patch_handle.read(instructions_length)) + context.addr_sec = BytesIO(patch_handle.read(addresses_length)) + + + current1 = objects.HalfInstruction() + current2 = objects.HalfInstruction() + + while context.inst_sec.tell() < instructions_length or current1.type != objects.XD3_NOOP or current2.type != objects.XD3_NOOP: + if current1.type == objects.XD3_NOOP and current2.type == objects.XD3_NOOP: + ins = objects.CODE_TABLE[context.inst_sec.read(1)[0]] + current1.type = ins.type1 + current2.type = ins.type2 + current1.size = ins.size1 + current2.size = ins.size2 + + if current1.type != objects.XD3_NOOP: + parse_halfinst(context, current1) + if current2.type != objects.XD3_NOOP: + parse_halfinst(context, current2) + + while current1.type != objects.XD3_NOOP: + decode_halfinst(context, current1, speed_queue) + + while current2.type != objects.XD3_NOOP: + decode_halfinst(context, current2, speed_queue) + + if adler32_sum: + calculated_sum = adler32(context.target_buffer) + if parsed_sum != calculated_sum: + raise objects.ChecksumMissmatch + + total_size = len(context.target_buffer) + chunk_size = 1024 * 1024 + for i in range(math.ceil(total_size / chunk_size)): + chunk = context.target_buffer[i * chunk_size : min((i + 1) * chunk_size, total_size)] + context.target.write(chunk) + speed_queue.put((len(chunk), 0)) + + context.target.flush() + + indicator = patch_handle.read(1) + if not len(indicator): + win_indicator = None + continue + win_indicator = indicator[0] + win_number += 1 + + + dst_handle.flush() + src_handle.close() + patch_handle.close() + dst_handle.close() + +