From 8f5af908270357c34e223b7108cc1f2d62e53f1a Mon Sep 17 00:00:00 2001 From: rix Date: Sun, 12 Jan 2025 19:21:27 +0100 Subject: [PATCH] Improve process exit condition handling This should allow for better debugging of #820 Unified "My-JDownloader" naming across codebase for consistency. Enhanced subprocess handling with improved termination logic and added safeguards for graceful shutdown. Addressed minor code errors and improved retry mechanisms for JDownloader connection attempts. --- feedcrawler/external_tools/myjd_api.py | 2 +- feedcrawler/providers/shared_state.py | 3 +- feedcrawler/providers/version.py | 2 +- feedcrawler/run.py | 304 ++++++++++-------- .../vuejs_frontend/package-lock.json | 4 +- .../web_interface/vuejs_frontend/package.json | 2 +- .../vuejs_frontend/src/components/MyJD.vue | 2 +- setup.py | 2 +- 8 files changed, 173 insertions(+), 148 deletions(-) diff --git a/feedcrawler/external_tools/myjd_api.py b/feedcrawler/external_tools/myjd_api.py index 70145572..fc7d1c0b 100644 --- a/feedcrawler/external_tools/myjd_api.py +++ b/feedcrawler/external_tools/myjd_api.py @@ -1,7 +1,7 @@ # -*- encoding: utf-8 -*- # FeedCrawler # Projekt von https://github.com/rix1337 -# Dieses Modul stellt die API von My JDownloader in Form eines Python-Moduls zur Verfügung. +# Dieses Modul stellt die API von My-JDownloader in Form eines Python-Moduls zur Verfügung. # # Enthält Code von: # https://github.com/mmarquezs/My.Jdownloader-API-Python-Library/ diff --git a/feedcrawler/providers/shared_state.py b/feedcrawler/providers/shared_state.py index 3cb630a0..cf8c37f1 100644 --- a/feedcrawler/providers/shared_state.py +++ b/feedcrawler/providers/shared_state.py @@ -45,6 +45,7 @@ def set_initial_values(): else: gui_enabled = True update("connected", False) + update("exiting", False) update("gui", gui_enabled) update("gui_active_in_tray_and_browser_opened_for_config_once", False) update("ww_blocked", False) @@ -151,7 +152,7 @@ def get_device(): if attempts % 10 == 0: print( - f"WARNUNG: {attempts} aufeinanderfolgende My JDownloader Verbindungsfehler. Bitte prüfen und ggf. neu starten!") + f"WARNUNG: {attempts} aufeinanderfolgende My-JDownloader-Verbindungsfehler. Bitte prüfen und ggf. neu starten!") time.sleep(3) if connect_device(): diff --git a/feedcrawler/providers/version.py b/feedcrawler/providers/version.py index f58cc739..32cf582f 100644 --- a/feedcrawler/providers/version.py +++ b/feedcrawler/providers/version.py @@ -8,7 +8,7 @@ def get_version(): - return "21.0.5" + return "21.0.6" def create_version_file(): diff --git a/feedcrawler/run.py b/feedcrawler/run.py index 6ff59744..01193842 100644 --- a/feedcrawler/run.py +++ b/feedcrawler/run.py @@ -26,187 +26,211 @@ version = f"v.{version.get_version()}" +subprocesses = [] + def main(): - with multiprocessing.Manager() as manager: - shared_state_dict = manager.dict() - shared_state_lock = manager.Lock() - shared_state.set_state(shared_state_dict, shared_state_lock) + try: + signal.signal(signal.SIGINT, signal_handler) + signal.signal(signal.SIGTERM, signal_handler) - parser = argparse.ArgumentParser() - parser.add_argument("--log-level", help="Legt fest, wie genau geloggt wird (INFO, DEBUG)") - parser.add_argument("--port", help="Legt den Port des Webservers fest") - parser.add_argument("--delay", help="Verzögere Suchlauf nach Start um ganze Zahl in Sekunden") - arguments = parser.parse_args() + with multiprocessing.Manager() as manager: + shared_state_dict = manager.dict() + shared_state_lock = manager.Lock() + shared_state.set_state(shared_state_dict, shared_state_lock) - shared_state.set_initial_values() + parser = argparse.ArgumentParser() + parser.add_argument("--log-level", help="Legt fest, wie genau geloggt wird (INFO, DEBUG)") + parser.add_argument("--port", help="Legt den Port des Webservers fest") + parser.add_argument("--delay", help="Verzögere Suchlauf nach Start um ganze Zahl in Sekunden") + arguments = parser.parse_args() - if shared_state.values["gui"]: - window = gui.create_main_window() - sys.stdout = gui.PrintToConsoleAndGui(window) - else: - sys.stdout = Unbuffered(sys.stdout) - - print(f"""┌──────────────────────────────────────────────┐ - FeedCrawler {version} von RiX - https://github.com/rix1337/FeedCrawler -└──────────────────────────────────────────────┘""") - - local_address = f'http://{check_ip()}' - port = int('9090') - if arguments.port: - port = int(arguments.port) - - if os.environ.get('DOCKER'): - config_path = "/config" - local_address = f'http://[HOST_IP]' - elif os.environ.get('GITHUB_ACTION_PR'): - config_path = "/home/runner/work/_temp/feedcrawler" - else: - config_path_file = "FeedCrawler.conf" - if not os.path.exists(config_path_file): - path_config(port, local_address, shared_state) - with open(config_path_file, "r") as f: - config_path = f.readline() + shared_state.set_initial_values() - os.makedirs(config_path, exist_ok=True) + if shared_state.values["gui"]: + window = gui.create_main_window() + sys.stdout = gui.PrintToConsoleAndGui(window) + else: + sys.stdout = Unbuffered(sys.stdout) + + print(f"""┌──────────────────────────────────────────────┐ + FeedCrawler {version} von RiX + https://github.com/rix1337/FeedCrawler + └──────────────────────────────────────────────┘""") + + local_address = f'http://{check_ip()}' + port = int('9090') + if arguments.port: + port = int(arguments.port) + + if os.environ.get('DOCKER'): + config_path = "/config" + local_address = f'http://<<>>]' + elif os.environ.get('GITHUB_ACTION_PR'): + config_path = "/home/runner/work/_temp/feedcrawler" + else: + config_path_file = "FeedCrawler.conf" + if not os.path.exists(config_path_file): + path_config(port, local_address, shared_state) + with open(config_path_file, "r") as f: + config_path = f.readline() - try: - temp_file = tempfile.TemporaryFile(dir=config_path) - temp_file.close() - except Exception as e: - print(f'Auf das Verzeichnis "{config_path}" konnte nicht zugegriffen werden: {e}"' - f'Beende FeedCrawler!') - sys.exit(1) + os.makedirs(config_path, exist_ok=True) - shared_state.set_files(config_path) + try: + temp_file = tempfile.TemporaryFile(dir=config_path) + temp_file.close() + except Exception as e: + print(f'Auf das Verzeichnis "{config_path}" konnte nicht zugegriffen werden: {e}"' + f'Beende FeedCrawler!') + sys.exit(1) - print(f'Nutze das Verzeichnis "{config_path}" für Einstellungen/Logs') + shared_state.set_files(config_path) - log_level = logging.DEBUG if arguments.log_level == "DEBUG" else logging.INFO + print(f'Nutze das Verzeichnis "{config_path}" für Einstellungen/Logs') - shared_state.update("log_level", log_level) - shared_state.set_logger() - shared_state.set_sites() + log_level = logging.DEBUG if arguments.log_level == "DEBUG" else logging.INFO - if not os.environ.get('GITHUB_ACTION_PR') and not get_clean_hostnames(shared_state): - hostnames_config(port, local_address, shared_state) - get_clean_hostnames(shared_state) + shared_state.update("log_level", log_level) + shared_state.set_logger() + shared_state.set_sites() - if not os.environ.get('GITHUB_ACTION_PR'): - feedcrawler = CrawlerConfig('FeedCrawler') - user = feedcrawler.get('myjd_user') - password = feedcrawler.get('myjd_pass') - device = feedcrawler.get('myjd_device') + if not os.environ.get('GITHUB_ACTION_PR') and not get_clean_hostnames(shared_state): + hostnames_config(port, local_address, shared_state) + get_clean_hostnames(shared_state) - if user and password and device: - set_device_from_config() - else: - myjd_config(port, local_address, shared_state) + if not os.environ.get('GITHUB_ACTION_PR'): + feedcrawler = CrawlerConfig('FeedCrawler') + user = feedcrawler.get('myjd_user') + password = feedcrawler.get('myjd_pass') + device = feedcrawler.get('myjd_device') - process_jdownloader = multiprocessing.Process(target=jdownloader_connection, - args=(shared_state_dict, shared_state_lock)) - process_jdownloader.start() + if user and password and device: + set_device_from_config() + else: + myjd_config(port, local_address, shared_state) - feedcrawler = CrawlerConfig('FeedCrawler') - if not os.environ.get('DOCKER') and not arguments.port: - port = int(feedcrawler.get("port")) + process_jdownloader = multiprocessing.Process(name="JDownloaderConnection", target=jdownloader_connection, + args=(shared_state_dict, shared_state_lock)) - if feedcrawler.get("prefix"): - prefix = f"/{feedcrawler.get('prefix')}" - else: - prefix = '' + subprocesses.append(process_jdownloader) + process_jdownloader.start() - print(f'Der Webserver ist erreichbar unter "{local_address}:{port}{prefix}"') + feedcrawler = CrawlerConfig('FeedCrawler') + if not os.environ.get('DOCKER') and not arguments.port: + port = int(feedcrawler.get("port")) - shared_state.set_connection_info(local_address, port, prefix) + if feedcrawler.get("prefix"): + prefix = f"/{feedcrawler.get('prefix')}" + else: + prefix = '' - CrawlerConfig("FeedCrawler").remove_redundant_entries() - remove_redundant_db_tables(shared_state.values["dbfile"]) + print(f'Der Webserver ist erreichbar unter "{local_address}:{port}{prefix}"') - process_web_server = multiprocessing.Process(target=web_server, args=(shared_state_dict, shared_state_lock,)) - process_web_server.start() + shared_state.set_connection_info(local_address, port, prefix) - if arguments.delay: - delay = int(arguments.delay) - else: - delay = 10 + CrawlerConfig("FeedCrawler").remove_redundant_entries() + remove_redundant_db_tables(shared_state.values["dbfile"]) - if not os.environ.get('GITHUB_ACTION_PR'): - try: + process_web_server = multiprocessing.Process(name="WebServer", target=web_server, + args=(shared_state_dict, shared_state_lock,)) + subprocesses.append(process_web_server) + process_web_server.start() + + if arguments.delay: + delay = int(arguments.delay) + else: + delay = 10 + + if not os.environ.get('GITHUB_ACTION_PR'): time.sleep(delay) while not shared_state.values["connected"]: - print(f"Verbindung zu JDownloader noch nicht hergestellt - verzögere Suchlauf um {delay} Sekunden") + if shared_state.values["exiting"]: + sys.exit(1) + print( + f"Verbindung zu JDownloader noch nicht hergestellt - verzögere Suchlauf um {delay} Sekunden") time.sleep(delay) - except KeyboardInterrupt: - sys.exit(1) - process_feed_crawler = multiprocessing.Process(target=feed_crawler, - args=(shared_state_dict, shared_state_lock,)) - process_feed_crawler.start() + process_feed_crawler = multiprocessing.Process(name="FeedCrawler", target=feed_crawler, + args=(shared_state_dict, shared_state_lock,)) + subprocesses.append(process_feed_crawler) + process_feed_crawler.start() + + process_watch_packages = multiprocessing.Process(name="PackageWatcher", target=watch_packages, + args=(shared_state_dict, shared_state_lock,)) + subprocesses.append(process_watch_packages) + process_watch_packages.start() + + if shared_state.values["gui"]: + gui.main_gui(window, shared_state_dict, shared_state_lock) + sys.stdout = sys.__stdout__ + + else: # regular console + print('Drücke [Strg] + [C] zum Beenden') + try: + while True: + signal.pause() + except AttributeError: + while True: + time.sleep(1) + else: + feed_crawler(shared_state_dict, shared_state_lock) + process_web_server.terminate() - process_watch_packages = multiprocessing.Process(target=watch_packages, - args=(shared_state_dict, shared_state_lock,)) - process_watch_packages.start() + except KeyboardInterrupt: + print("[Strg] + [C] empfangen!") - if shared_state.values["gui"]: - gui.main_gui(window, shared_state_dict, shared_state_lock) + except Exception as e: + print(f"Haupt-Thread abgestürzt: {e}") - sys.stdout = sys.__stdout__ - process_jdownloader.terminate() - process_web_server.terminate() - process_feed_crawler.terminate() - process_watch_packages.terminate() - sys.exit(0) - - else: # regular console - def signal_handler(sig, frame): - process_jdownloader.terminate() - process_web_server.terminate() - process_feed_crawler.terminate() - process_watch_packages.terminate() - sys.exit(0) - - signal.signal(signal.SIGINT, signal_handler) - print('Drücke [Strg] + [C] zum Beenden') - try: - while True: - signal.pause() - except AttributeError: - while True: - time.sleep(1) - else: - feed_crawler(shared_state_dict, shared_state_lock) - process_web_server.terminate() - sys.exit(0) + finally: + print("Haupt-Thread beendet. Beende Sub-Prozesse...") + terminate_all_processes() + print("Alle Prozesse beendet!") + sys.exit(0) -def jdownloader_connection(shared_state_dict, shared_state_lock): - shared_state.set_state(shared_state_dict, shared_state_lock) +def signal_handler(sig, frame): + print(f"Signal {sig} empfangen. Beende Haupt-Thread...") + sys.exit(0) + - shared_state.set_device_from_config() - connection_established = shared_state.get_device() and shared_state.get_device().name +def terminate_all_processes(): + """Terminate all subprocesses.""" + for process in subprocesses: + if process.is_alive(): + print(f"Beende Sub-Prozess {process.name}...") + process.terminate() # Gracefully terminate process + process.join() # Ensure process is stopped + else: + print(f"Sub-Prozess {process.name} bereits beendet...") + +def jdownloader_connection(shared_state_dict, shared_state_lock): try: + shared_state.set_state(shared_state_dict, shared_state_lock) + shared_state.set_device_from_config() + connection_established = shared_state.get_device() and shared_state.get_device().name if not connection_established: - i = 0 - while i < 10: - i += 1 - print(f'Verbindungsversuch {i} mit My JDownloader gescheitert.') - time.sleep(60) + for i in range(10): # Retry up to 10 times + print(f'Verbindungsversuch {i + 1} mit My JDownloader gescheitert.') + time.sleep(6) # Keep sleep short for responsiveness set_device_from_config() connection_established = shared_state.get_device() and shared_state.get_device().name if connection_established: break + + if connection_established: + print(f'Erfolgreich mit My JDownloader verbunden. Gerätename: "{shared_state.get_device().name}"') + shared_state.update("connected", True) + else: + print('My-JDownloader-Zugangsversuche nicht erfolgreich! Beende FeedCrawler!') + except KeyboardInterrupt: - sys.exit(1) - - if connection_established: - print(f'Erfolgreich mit My JDownloader verbunden. Gerätename: "{shared_state.get_device().name}"') - shared_state.update("connected", True) - else: - print('My JDownloader Zugangsversuche nicht erfolgreich! Beende FeedCrawler!') - sys.exit(1) + print("Breche Verbindungsversuche ab...") + shared_state.update("exiting", True) + except Exception as e: + print(f"Error in JDownloader connection: {e}") if __name__ == "__main__": diff --git a/feedcrawler/web_interface/vuejs_frontend/package-lock.json b/feedcrawler/web_interface/vuejs_frontend/package-lock.json index d44d2ced..acbb283e 100644 --- a/feedcrawler/web_interface/vuejs_frontend/package-lock.json +++ b/feedcrawler/web_interface/vuejs_frontend/package-lock.json @@ -1,12 +1,12 @@ { "name": "feedcrawler-web", - "version": "21.0.5", + "version": "21.0.6", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "feedcrawler-web", - "version": "21.0.5", + "version": "21.0.6", "dependencies": { "@formkit/i18n": "^1.6.9", "@formkit/vue": "^1.6.9", diff --git a/feedcrawler/web_interface/vuejs_frontend/package.json b/feedcrawler/web_interface/vuejs_frontend/package.json index e53ec328..f86889fd 100644 --- a/feedcrawler/web_interface/vuejs_frontend/package.json +++ b/feedcrawler/web_interface/vuejs_frontend/package.json @@ -1,6 +1,6 @@ { "name": "feedcrawler-web", - "version": "21.0.5", + "version": "21.0.6", "type": "module", "scripts": { "dev": "vite", diff --git a/feedcrawler/web_interface/vuejs_frontend/src/components/MyJD.vue b/feedcrawler/web_interface/vuejs_frontend/src/components/MyJD.vue index f6434f79..5974aa7f 100644 --- a/feedcrawler/web_interface/vuejs_frontend/src/components/MyJD.vue +++ b/feedcrawler/web_interface/vuejs_frontend/src/components/MyJD.vue @@ -462,7 +462,7 @@ function openCaptcha(index) { y="0px"> - + My JDownloader diff --git a/setup.py b/setup.py index b1ad65f4..7b3afecd 100644 --- a/setup.py +++ b/setup.py @@ -23,7 +23,7 @@ version=get_version(), author="rix1337", author_email="", - description="Automate downloads using predefined sites and the My JDownloader API", + description="Automate downloads using predefined sites and the My-JDownloader-API", long_description=long_description, long_description_content_type="text/markdown", url="https://github.com/rix1337/FeedCrawler",