diff --git a/src/module/core/data_migration.py b/src/module/core/data_migration.py new file mode 100644 index 00000000..b0b7fecf --- /dev/null +++ b/src/module/core/data_migration.py @@ -0,0 +1,21 @@ +import os + +from module.models import BangumiData +from module.utils import json_config +from module.database import BangumiDatabase + + +def data_migration(): + if not os.path.isfile("data/data.json"): + return False + old_data = json_config.load("data/data.json") + infos = old_data["bangumi_info"] + rss_link = old_data["rss_link"] + new_data = [] + for info in infos: + new_data.append(BangumiData(**info, rss_link=[rss_link])) + with BangumiDatabase() as database: + database.update_table() + database.insert_list(new_data) + os.remove("data/data.json") + return True diff --git a/src/module/core/sub_thread.py b/src/module/core/sub_thread.py index 297ac179..b2e640b8 100644 --- a/src/module/core/sub_thread.py +++ b/src/module/core/sub_thread.py @@ -3,6 +3,8 @@ import time import logging import threading +from .data_migration import data_migration + from module.rss import RSSAnalyser, add_rules from module.manager import Renamer, FullSeasonGet from module.database import BangumiDatabase @@ -83,13 +85,22 @@ def start_thread(): return {"status": "ok"} +def first_run(): + if not os.path.exists(DATA_PATH): + if data_migration(): + logger.info("Updated, data migration completed.") + else: + logger.info("First run, init downloader.") + with DownloadClient() as client: + client.init_downloader() + client.add_rss_feed(settings.rss_link()) + + async def start_program(): global rss_thread, rename_thread start_info() - if not os.path.exists(DATA_PATH): - with DownloadClient() as client: - client.init_downloader() - client.add_rss_feed(settings.rss_link()) + # First init + first_run() with BangumiDatabase() as database: database.update_table() rss_thread = threading.Thread(target=rss_loop, args=(stop_event,))