Last active
September 18, 2025 19:43
-
-
Save cooperdk/ab548ea772ec0c940b1e8f1e54c25205 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import os | |
| import json | |
| import sqlite3 | |
| import threading | |
| import requests | |
| import zipfile | |
| import py7zr | |
| import rarfile | |
| import subprocess | |
| import platform | |
| import urllib.parse | |
| from bs4 import BeautifulSoup | |
| import tkinter as tk | |
| from tkinter import ttk, messagebox, filedialog | |
| import socket | |
| import webbrowser | |
| CONFIG_DIR = "config" | |
| SETTINGS_FILE = os.path.join(CONFIG_DIR, "settings.json") | |
| DB_FILE = os.path.join(CONFIG_DIR, "data.db") | |
| ENGINE_PREFIXES = { | |
| 1: "QSP", | |
| 2: "RPGM", | |
| 3: "Unity", | |
| 4: "HTML", | |
| 5: "RAGS", | |
| 6: "Java", | |
| 7: "Ren'Py", | |
| 8: "Flash", | |
| 12: "ADRIFT", | |
| 14: "Others", | |
| 17: "Tads", | |
| 30: "Wolf RPG", | |
| 31: "Unreal Engine", | |
| 47: "WebGL" | |
| } | |
| STATUS_PREFIXES = { | |
| 18: "Completed", | |
| 20: "Onhold", | |
| 22: "Abandoned" | |
| } | |
| OTHER_GAME_PREFIXES = { | |
| 13: "VN", | |
| 19: "Collection", | |
| 23: "SiteRip" | |
| } | |
| TAGS = { | |
| 30: "incest", | |
| 44: "slave", | |
| 45: "rpg", | |
| 75: "milf", | |
| 103: "corruption", | |
| 105: "bestiality", | |
| 107: "3dcg", | |
| 111: "mind control", | |
| 130: "big tits", | |
| 141: "sci-fi", | |
| 162: "adventure", | |
| 173: "male protagonist", | |
| 174: "male domination", | |
| 176: "masturbation", | |
| 179: "fantasy", | |
| 181: "lesbian", | |
| 182: "monster", | |
| 191: "futa/trans", | |
| 199: "trainer", | |
| 215: "tentacles", | |
| 216: "bukkake", | |
| 225: "pregnancy", | |
| 237: "oral sex", | |
| 254: "harem", | |
| 258: "netorare", | |
| 259: "handjob", | |
| 264: "bdsm", | |
| 278: "creampie", | |
| 290: "lactation", | |
| 322: "multiple endings", | |
| 324: "no sexual content", | |
| 327: "twins", | |
| 330: "romance", | |
| 339: "blackmail", | |
| 348: "dating sim", | |
| 351: "teasing", | |
| 354: "superpowers", | |
| 360: "gay", | |
| 361: "humor", | |
| 362: "trap", | |
| 374: "prostitution", | |
| 382: "furry", | |
| 384: "exhibitionism", | |
| 392: "female protagonist", | |
| 394: "monster girl", | |
| 408: "paranormal", | |
| 411: "titfuck", | |
| 417: "rape", | |
| 448: "simulator", | |
| 449: "management", | |
| 452: "turn based combat", | |
| 480: "stripping", | |
| 485: "voyeurism", | |
| 498: "group sex", | |
| 505: "parody", | |
| 522: "text based", | |
| 535: "groping", | |
| 547: "school setting", | |
| 550: "combat", | |
| 553: "footjob", | |
| 606: "cosplay", | |
| 628: "strategy", | |
| 639: "loli", | |
| 670: "sexual harassment", | |
| 689: "scat", | |
| 708: "horror", | |
| 728: "graphic violence", | |
| 736: "japanese game", | |
| 749: "shota", | |
| 757: "vore", | |
| 769: "spanking", | |
| 776: "side-scroller", | |
| 783: "animated", | |
| 817: "big ass", | |
| 833: "virgin", | |
| 871: "humiliation", | |
| 875: "transformation", | |
| 894: "interracial", | |
| 895: "virtual reality", | |
| 916: "ahegao", | |
| 924: "cheating", | |
| 1079: "shooter", | |
| 1111: "kinetic novel", | |
| 1254: "urination", | |
| 1305: "sleep sex", | |
| 1407: "dilf", | |
| 1434: "3d game", | |
| 1471: "puzzle", | |
| 1476: "possession", | |
| 1483: "internal view", | |
| 1506: "voiced", | |
| 1507: "2dcg", | |
| 1508: "platformer", | |
| 1525: "point & click", | |
| 1556: "multiple penetration", | |
| 1707: "real porn", | |
| 1766: "pov", | |
| 1828: "necrophilia", | |
| 2209: "vaginal sex", | |
| 2214: "2d game", | |
| 2215: "sissification", | |
| 2216: "sex toys", | |
| 2217: "drugs", | |
| 2218: "religion", | |
| 2219: "asset-clothing", | |
| 2220: "asset-character", | |
| 2221: "asset-environment", | |
| 2222: "asset-hair", | |
| 2223: "asset-light", | |
| 2224: "asset-pose", | |
| 2225: "asset-prop", | |
| 2226: "asset-shader", | |
| 2227: "asset-texture", | |
| 2228: "asset-utility", | |
| 2229: "mobile game", | |
| 2234: "swinging", | |
| 2235: "asset-bundle", | |
| 2236: "asset-script", | |
| 2237: "asset-expression", | |
| 2238: "asset-plugin", | |
| 2239: "asset-animal", | |
| 2240: "asset-addon", | |
| 2241: "anal sex", | |
| 2242: "multiple protagonist", | |
| 2246: "character creation", | |
| 2247: "censored", | |
| 2249: "dystopian setting", | |
| 2250: "asset-vehicle", | |
| 2251: "asset-morph", | |
| 2252: "female domination", | |
| 2254: "asset-animation", | |
| 2255: "futa/trans protagonist", | |
| 2256: "asset-hdri", | |
| 2257: "sandbox", | |
| 2258: "asset-audio", | |
| 2260: "asset-honey-select", | |
| 2261: "asset-ai-shoujo", | |
| 2262: "asset-honey-select2", | |
| 2263: "asset-koikatu", | |
| 2264: "asset-playhome", | |
| 2265: "ai cg", | |
| 2267: "asset-daz-gen2", | |
| 2268: "asset-daz-gen3", | |
| 2269: "asset-daz-gen8", | |
| 2270: "asset-daz-gen81", | |
| 2271: "asset-daz-gen9", | |
| 2272: "asset-male", | |
| 2273: "asset-female", | |
| 2274: "asset-nonbinary", | |
| 2275: "asset-scene", | |
| 2276: "asset-daz-gen1", | |
| 2277: "asset-daz-v4", | |
| 2278: "asset-daz-m4" | |
| } | |
| DEFAULT_HOSTS = [ | |
| "gofile.io", "pixeldrain.com", "anonfiles.com", "workupload.com", "mixdrop.co", "mega.nz", | |
| "mediafire.com", "racaty.net", "uploadhaven.com", "files.fm", "fileupload.io", "krakenfiles.com", | |
| "filemoon.sx", "send.cm", "1fichier.com", "zippyshare.com", "rapidgator.net", "fboom.me", | |
| "katfile.com", "hitfile.net", "dropapk.to", "ddownload.com", "uploadrar.com", "letsupload.io", | |
| "bayfiles.com", "tusfiles.com" | |
| ] | |
| os.makedirs(CONFIG_DIR, exist_ok=True) | |
| os.makedirs("banners", exist_ok=True) | |
| # Create default settings | |
| if not os.path.exists(SETTINGS_FILE): | |
| with open(SETTINGS_FILE, "w") as f: | |
| json.dump({ | |
| "username": "", | |
| "password": "", | |
| "auto_update": False, | |
| "preferred_os": "Windows", | |
| "host_priority": DEFAULT_HOSTS | |
| }, f) | |
| # Create DB tables | |
| conn = sqlite3.connect(DB_FILE, check_same_thread=False) | |
| cursor = conn.cursor() | |
| cursor.execute("""CREATE TABLE IF NOT EXISTS games ( | |
| id TEXT PRIMARY KEY, | |
| title TEXT, | |
| version TEXT, | |
| banner TEXT, | |
| description TEXT, | |
| thread_url TEXT, | |
| watched INTEGER DEFAULT 0, | |
| game_links TEXT, | |
| install_path TEXT, | |
| install_date TEXT, | |
| last_launched TEXT, | |
| update_available INTEGER DEFAULT 0 | |
| )""") | |
| cursor.execute(""" | |
| CREATE TABLE IF NOT EXISTS tags ( | |
| id INTEGER PRIMARY KEY AUTOINCREMENT, | |
| name TEXT UNIQUE, | |
| url TEXT | |
| ) | |
| """) | |
| cursor.execute(""" | |
| CREATE TABLE IF NOT EXISTS game_tags ( | |
| game_id INTEGER, | |
| tag_id INTEGER, | |
| UNIQUE(game_id, tag_id) | |
| ) | |
| """) | |
| cursor.execute("CREATE TABLE IF NOT EXISTS prefixes (id INTEGER PRIMARY KEY, name TEXT UNIQUE)") | |
| conn.commit() | |
| TAG_MAP = {} | |
| PREFIX_MAP = {} | |
| def extract_os_links(post_content, target_os): | |
| os_keywords = { | |
| "windows": ["win", "windows"], | |
| "linux": ["lin", "linux"], | |
| "mac": ["mac", "macos", "osx"] | |
| } | |
| target_keywords = os_keywords.get(target_os.lower(), ["win"]) | |
| grouped_links = [] | |
| current_platform = None | |
| for elem in post_content.find_all(["span", "a"], recursive=True): | |
| if elem.name == "span": | |
| label = elem.get_text().lower() | |
| if any(k in label for k in os_keywords["windows"] + os_keywords["linux"] + os_keywords["mac"]): | |
| current_platform = label | |
| elif elem.name == "a" and elem.has_attr("href"): | |
| if current_platform and any(k in current_platform for k in target_keywords): | |
| grouped_links.append(elem["href"]) | |
| return grouped_links | |
| def scrape_tags_from_thread(textbody): | |
| try: | |
| r = session.get(textbody, timeout=10) | |
| soup = BeautifulSoup(texbody.text, "html.parser") | |
| tag_span = soup.find("span", class_="js-tagList") | |
| if not tag_span: | |
| print("[Tag Scrape] No tag span found.") | |
| return [] | |
| tags = [a.get_text(strip=True) for a in tag_span.find_all("a")] | |
| return tags | |
| except Exception as e: | |
| print(f"[Tag Scrape Error] {thread_url}: {e}") | |
| return [] | |
| def get_game_links_from_search(session, search_url): | |
| response = session.get(search_url) | |
| soup = BeautifulSoup(response.text, "html.parser") | |
| links = [] | |
| for a in soup.select("a[href^='/threads/']"): | |
| href = a.get("href") | |
| if href and href.startswith("/threads/") and href.count("/") == 2: | |
| full_url = "https://f95zone.to" + href | |
| links.append(full_url) | |
| return list(set(links)) | |
| def insert_tags_and_relationships(conn, game_id, tags): | |
| cursor = conn.cursor() | |
| for tag_name, tag_url in tags: | |
| cursor.execute("INSERT OR IGNORE INTO tags (name, url) VALUES (?, ?)", (tag_name, tag_url)) | |
| cursor.execute("SELECT id FROM tags WHERE name = ?", (tag_name,)) | |
| tag_id = cursor.fetchone()[0] | |
| cursor.execute("INSERT OR IGNORE INTO game_tags (game_id, tag_id) VALUES (?, ?)", (game_id, tag_id)) | |
| conn.commit() | |
| def insert_or_get_game_id(conn, thread_url, title="Unknown"): | |
| cursor = conn.cursor() | |
| cursor.execute("SELECT id FROM games WHERE thread_url = ?", (thread_url,)) | |
| result = cursor.fetchone() | |
| if result: | |
| return result[0] | |
| cursor.execute("INSERT INTO games (title, thread_url) VALUES (?, ?)", (title, thread_url)) | |
| conn.commit() | |
| return cursor.lastrowid | |
| def scrape_tags_and_prefixes(session): | |
| print("Session type inside scrape:", type(session)) | |
| url = "https://f95zone.to/sam/latest_alpha/" | |
| r = session.get(url) | |
| soup = BeautifulSoup(r.text, 'html.parser') | |
| for tag in soup.find_all("li", {"data-tag-id": True}): | |
| tag_id = int(tag["data-tag-id"]) | |
| tag_name = tag.get_text(strip=True) | |
| cursor.execute("INSERT OR IGNORE INTO tags (id, name) VALUES (?, ?)", (tag_id, tag_name)) | |
| for prefix in soup.find_all("li", {"data-prefix-id": True}): | |
| prefix_id = int(prefix["data-prefix-id"]) | |
| prefix_name = prefix.get_text(strip=True) | |
| cursor.execute("INSERT OR IGNORE INTO prefixes (id, name) VALUES (?, ?)", (prefix_id, prefix_name)) | |
| conn.commit() | |
| def check_host_status(host): | |
| try: | |
| domain = host.split("/")[0].replace("www.", "") | |
| socket.gethostbyname(domain) | |
| return "✅" | |
| except: | |
| return "❌" | |
| def load_tag_prefix_maps(): | |
| TAG_MAP.clear() | |
| for tag_id, name in TAGS.items(): | |
| TAG_MAP[name.lower()] = tag_id | |
| PREFIX_MAP.clear() | |
| for prefix_id, name in {**STATUS_PREFIXES, **ENGINE_PREFIXES}.items(): | |
| PREFIX_MAP[name.lower()] = prefix_id | |
| class GameManagerApp: | |
| def __init__(self, root): | |
| self.tabs = {} | |
| self.root = root | |
| self.session = requests.Session() | |
| print("Session initialized:", type(self.session)) | |
| self.root.title("F95Zone Game Manager") | |
| #self.session = None | |
| self.session.headers.update({ | |
| "User-Agent": ( | |
| "Mozilla/5.0 (Windows NT 10.0; Win64; x64) " | |
| "AppleWebKit/537.36 (KHTML, like Gecko) " | |
| "Chrome/117.0.0.0 Safari/537.36" | |
| ) | |
| }) | |
| if "search" in self.tabs: | |
| self.notebook.forget(self.tabs["search"]) | |
| if hasattr(self, "search_frame"): | |
| self.search_frame.destroy() | |
| self.conn = conn | |
| self.username_var = tk.StringVar() | |
| self.password_var = tk.StringVar() | |
| self.auto_update_var = tk.BooleanVar() | |
| self.os_var = tk.StringVar(value="Windows") | |
| self.notebook = ttk.Notebook(root) | |
| self.notebook.pack(fill="both", expand=True) | |
| self.search_tab = ttk.Frame(self.notebook) | |
| self.library_tab = ttk.Frame(self.notebook) | |
| self.settings_tab = ttk.Frame(self.notebook) | |
| self.notebook.add(self.search_tab, text="Search") | |
| self.notebook.add(self.library_tab, text="Library") | |
| self.notebook.add(self.settings_tab, text="Settings") | |
| self.build_search_tab() | |
| self.build_library_tab() | |
| self.build_settings_tab() | |
| self.load_settings() | |
| def login_to_f95zone(self, username, password): | |
| login_page_url = "https://f95zone.to/login/" | |
| post_url = "https://f95zone.to/login/login" | |
| # Step 1: GET login page to retrieve CSRF token | |
| response = self.session.get(login_page_url) | |
| soup = BeautifulSoup(response.text, "html.parser") | |
| token_input = soup.find("input", {"name": "_xfToken"}) | |
| xf_token = token_input["value"] if token_input else None | |
| if not xf_token: | |
| raise Exception("CSRF token not found. Cannot log in.") | |
| # Step 2: Build payload with token | |
| payload = { | |
| "login": username, | |
| "password": password, | |
| "remember": 1, | |
| "_xfToken": xf_token | |
| } | |
| headers = { | |
| "User-Agent": ( | |
| "Mozilla/5.0 (Windows NT 10.0; Win64; x64) " | |
| "AppleWebKit/537.36 (KHTML, like Gecko) " | |
| "Chrome/117.0.0.0 Safari/537.36" | |
| ), "Referer": login_page_url} | |
| # Step 3: POST login credentials | |
| response = self.session.post(post_url, data=payload, headers=headers) | |
| # Step 4: Check if login succeeded | |
| if "incorrect" in response.text.lower() or "login" in response.url: | |
| raise Exception("Login failed. Check credentials.") | |
| print("✅ Login successful: ", self.username_var.get()) | |
| return self.session | |
| def build_search_tab(self): | |
| print("Running search tab cleanup") | |
| for tab_id in self.notebook.tabs(): | |
| tab_text = self.notebook.tab(tab_id, "text") | |
| print(f"Tab found: {tab_text}") | |
| if tab_text == "Search": | |
| print("Removing existing Search tab") | |
| self.notebook.forget(tab_id) | |
| if "search" in self.tabs and self.tabs["search"] in self.notebook.tabs(): | |
| print("Search tab already exists. Skipping rebuild.") | |
| return | |
| for tab_id in self.notebook.tabs(): | |
| if self.notebook.tab(tab_id, "text") == "Search": | |
| self.notebook.forget(tab_id) | |
| self.search_frame = ttk.Frame(self.notebook) | |
| self.notebook.add(self.search_frame, text="Search") | |
| # Define styles for progress bar phases | |
| style = ttk.Style() | |
| style.theme_use("default") | |
| style.configure("Download.Horizontal.TProgressbar", background="#4a90e2") # Blue | |
| style.configure("Extract.Horizontal.TProgressbar", background="#f5a623") # Orange | |
| # Progress bar + status label | |
| progress_container = ttk.Frame(self.search_frame) | |
| filter_container = ttk.Frame(self.search_frame) | |
| filter_container.pack(fill="both", expand=True) | |
| canvas = tk.Canvas(filter_container) | |
| canvas.pack(side="left", fill="both", expand=True) | |
| scrollbar = ttk.Scrollbar(filter_container, orient="vertical", command=canvas.yview) | |
| scrollbar.pack(side="right", fill="y") | |
| self.tag_widgets = [] | |
| scrollable_frame = ttk.Frame(canvas) | |
| canvas.create_window((0, 0), window=scrollable_frame, anchor="nw") | |
| canvas.configure(yscrollcommand=scrollbar.set) | |
| def resize_canvas(event): | |
| canvas.itemconfig("all", width=event.width) | |
| scrollable_frame.bind("<Configure>", lambda e: canvas.configure(scrollregion=canvas.bbox("all"))) | |
| canvas.bind("<Configure>", resize_canvas) | |
| # Ensure the scrollable frame expands | |
| scrollable_frame.bind( | |
| "<Configure>", | |
| lambda e: canvas.configure(scrollregion=canvas.bbox("all")) | |
| ) | |
| self.progress_bar = ttk.Progressbar(progress_container, orient="horizontal", length=300, mode="determinate", style="Download.Horizontal.TProgressbar") | |
| self.progress_bar.pack(pady=2) | |
| self.status_label = ttk.Label(progress_container, text="Idle") | |
| self.status_label.pack() | |
| # Search query | |
| ttk.Label(self.search_frame, text="Search Query:").pack(anchor="w") | |
| self.search_query_var = tk.StringVar() | |
| ttk.Entry(self.search_frame, textvariable=self.search_query_var).pack(fill="x", padx=5) | |
| # Release date filter | |
| ttk.Label(self.search_frame, text="Release Date ≥ (YYYY-MM-DD):").pack(anchor="w") | |
| self.release_date_var = tk.StringVar() | |
| ttk.Entry(self.search_frame, textvariable=self.release_date_var).pack(fill="x", padx=5) | |
| # Minimum rating | |
| ttk.Label(self.search_frame, text="Minimum Rating (0–5):").pack(anchor="w") | |
| self.min_rating_var = tk.DoubleVar(value=0.0) | |
| ttk.Scale(self.search_frame, from_=0.0, to=5.0, variable=self.min_rating_var, orient="horizontal").pack(fill="x", padx=5) | |
| # --- TAGS --- | |
| ttk.Label(self.search_frame, text="Tags:").pack(anchor="w", padx=5) | |
| self.tag_vars = {} | |
| tag_section = self.create_collapsible_section(scrollable_frame, "Tags") | |
| search_var = tk.StringVar() | |
| search_entry = ttk.Entry(tag_section, textvariable=search_var) | |
| search_entry.pack(fill="x", pady=2) | |
| self.tag_grid = ttk.Frame(tag_section) | |
| self.tag_grid.pack(fill="x", expand=True) | |
| def layout_tag_buttons_debounced(event=None): | |
| if hasattr(self, "_layout_after_id"): | |
| self.tag_grid.after_cancel(self._layout_after_id) | |
| self._layout_after_id = self.tag_grid.after(100, layout_tag_buttons) | |
| self.tag_grid.bind("<Configure>", layout_tag_buttons_debounced) | |
| def update_tag_filter(*args): | |
| query = search_var.get().lower() | |
| for tag, widget in tag_widgets.items(): | |
| if query in tag: | |
| widget.grid() | |
| else: | |
| widget.grid_remove() | |
| search_var.trace_add("write", update_tag_filter) | |
| columns = 6 | |
| row = col = 0 | |
| tag_widgets = {} | |
| for tag_id, tag_name in sorted(TAGS.items(), key=lambda x: x[1].lower()): | |
| var = tk.BooleanVar() | |
| cb = ttk.Checkbutton(self.tag_grid, text=tag_name.title(), variable=var, style="Tag.TCheckbutton") | |
| self.tag_widgets.append(cb) | |
| cb.grid(row=row, column=col, sticky="w", padx=2, pady=2) | |
| self.tag_vars[tag_name.lower()] = var | |
| tag_widgets[tag_name.lower()] = cb | |
| col += 1 | |
| if col >= columns: | |
| col = 0 | |
| row += 1 | |
| self.tag_match_mode = tk.StringVar(value="any") | |
| ttk.Radiobutton(self.search_frame, text="Match Any Tag", variable=self.tag_match_mode, value="any").pack(anchor="w", padx=5) | |
| ttk.Radiobutton(self.search_frame, text="Match All Tags", variable=self.tag_match_mode, value="all").pack(anchor="w", padx=5) | |
| # --- STATUS --- | |
| ttk.Label(self.search_frame, text="Status:").pack(anchor="w", padx=5) | |
| self.status_vars = {} | |
| status_section = self.create_collapsible_section(scrollable_frame, "Status") | |
| for prefix_id, prefix_name in STATUS_PREFIXES.items(): | |
| var = tk.BooleanVar() | |
| cb = ttk.Checkbutton(status_section, text=prefix_name, variable=var) | |
| cb.pack(anchor="w", padx=5) | |
| self.status_vars[prefix_name.lower()] = var | |
| self.status_match_mode = tk.StringVar(value="any") | |
| ttk.Radiobutton(self.search_frame, text="Match Any Status", variable=self.status_match_mode, value="any").pack(anchor="w", padx=5) | |
| ttk.Radiobutton(self.search_frame, text="Match All Statuses", variable=self.status_match_mode, value="all").pack(anchor="w", padx=5) | |
| # --- ENGINE --- | |
| ttk.Label(self.search_frame, text="Engine:").pack(anchor="w", padx=5) | |
| self.engine_vars = {} | |
| engine_section = self.create_collapsible_section(scrollable_frame, "Engine") | |
| for prefix_id, prefix_name in ENGINE_PREFIXES.items(): | |
| var = tk.BooleanVar() | |
| cb = ttk.Checkbutton(engine_section, text=prefix_name, variable=var) | |
| cb.pack(anchor="w", padx=5) | |
| self.engine_vars[prefix_name.lower()] = var | |
| self.engine_match_mode = tk.StringVar(value="any") | |
| ttk.Radiobutton(self.search_frame, text="Match Any Engine", variable=self.engine_match_mode, value="any").pack(anchor="w", padx=5) | |
| ttk.Radiobutton(self.search_frame, text="Match All Engines", variable=self.engine_match_mode, value="all").pack(anchor="w", padx=5) | |
| # Search button | |
| ttk.Button(self.search_frame, text="Search", command=self.start_search_thread).pack(pady=10) | |
| # Results container | |
| results_container = ttk.Frame(self.search_frame) | |
| results_container.pack(fill="both", expand=True) | |
| self.results_canvas = tk.Canvas(results_container, height=400) | |
| self.scrollbar = ttk.Scrollbar(results_container, orient="vertical", command=self.results_canvas.yview) | |
| self.results_frame = ttk.Frame(self.results_canvas) | |
| self.results_frame.bind("<Configure>", lambda e: self.results_canvas.configure(scrollregion=self.results_canvas.bbox("all"))) | |
| self.results_canvas.create_window((0, 0), window=self.results_frame, anchor="nw") | |
| self.results_canvas.configure(yscrollcommand=self.scrollbar.set) | |
| self.results_canvas.pack(side="left", fill="both", expand=True) | |
| self.scrollbar.pack(side="right", fill="y") | |
| def layout_tag_buttons(event=None): | |
| for widget in self.tag_grid.winfo_children(): | |
| widget.grid_forget() | |
| grid_width = self.tag_grid.winfo_width() | |
| button_width = 150 | |
| columns = max(1, grid_width // button_width) | |
| row = col = 0 | |
| for cb in self.tag_widgets: | |
| cb.grid(row=row, column=col, sticky="w", padx=2, pady=2) | |
| col += 1 | |
| if col >= columns: | |
| col = 0 | |
| row += 1 | |
| def create_collapsible_section(self, parent, title): | |
| container = ttk.Frame(parent) | |
| header = ttk.Frame(container) | |
| header.pack(fill="x") | |
| toggle_btn = ttk.Button(header, text=f"▼ {title}", style="Toolbutton") | |
| toggle_btn.pack(side="left", anchor="w") | |
| content = ttk.Frame(container) | |
| content.pack(fill="x", padx=5) | |
| def toggle(): | |
| if content.winfo_viewable(): | |
| content.pack_forget() | |
| toggle_btn.config(text=f"► {title}") | |
| else: | |
| content.pack(fill="x", padx=5) | |
| toggle_btn.config(text=f"▼ {title}") | |
| toggle_btn.config(command=toggle) | |
| container.pack(fill="x", pady=5) | |
| return content | |
| def start_search_thread(self): | |
| thread = threading.Thread(target=self.perform_search) | |
| thread.start() | |
| def perform_search(self): | |
| import urllib.parse | |
| import requests | |
| base_url = "https://f95zone.to/sam/latest_alpha/latest_data.php" | |
| query = self.search_query_var.get().strip() | |
| selected_tags = [str(tag_id) for tag_id, var in self.tag_vars.items() if var.get()] | |
| selected_status = [str(prefix_id) for prefix_id, var in self.status_vars.items() if var.get()] | |
| selected_engines = [str(prefix_id) for prefix_id, var in self.engine_vars.items() if var.get()] | |
| tag_mode = self.tag_match_mode.get() | |
| prefix_mode = self.engine_match_mode.get() | |
| combined_prefixes = selected_status + selected_engines | |
| page = 1 | |
| all_results = [] | |
| while True: | |
| params = { | |
| "cmd": "list", | |
| "cat": "games", | |
| "page": str(page), | |
| "sort": "date" | |
| } | |
| if query: | |
| params["search"] = query | |
| if selected_tags: | |
| if tag_mode == "any": | |
| params["tags[]"] = ",".join(selected_tags) | |
| else: | |
| params["tags[]"] = selected_tags # repeated tags[] entries | |
| if combined_prefixes: | |
| if prefix_mode == "any": | |
| params["prefixes[]"] = ",".join(combined_prefixes) | |
| else: | |
| params["prefixes[]"] = combined_prefixes # repeated prefixes[] entries | |
| full_url = f"{base_url}?{urllib.parse.urlencode(params, doseq=True)}" | |
| print(f"Fetching page {page}: {full_url}") | |
| try: | |
| response = requests.get(full_url) | |
| data = response.json() | |
| #print(data) | |
| except Exception as e: | |
| print(f"Error fetching page {page}: {e}") | |
| break | |
| results = data.get("msg", {}).get("data", []) | |
| all_results.extend(results) | |
| print(results) | |
| current_page = int(data.get("msg", {}).get("pagination", {}).get("page", page)) | |
| total_pages = int(data.get("msg", {}).get("pagination", {}).get("total")) | |
| print("Page " + str(current_page) + " out of " + str(total_pages)) | |
| if current_page >= total_pages: | |
| break | |
| page += 1 | |
| print(f"Total results retrieved: {len(all_results)}") | |
| try: | |
| self._update_results_list(all_results) | |
| except Exception as e: | |
| print(f"Error updating results list: {e}") | |
| self._update_page_progress("✅ Search complete.") | |
| self.root.after(0, lambda: self._update_results_list(all_results)) | |
| def _update_results_list(self, results): | |
| for widget in self.results_frame.winfo_children(): | |
| widget.destroy() | |
| for game in results: | |
| title = game.get("title", "Untitled") | |
| dev = game.get("creator", "Unknown") | |
| banner_path = game.get("cover", "") | |
| thread_url = "https://f95zone.to/threads/" + game.get("thread_url", "#") + "/" | |
| game_id = game.get("thread_id", "") | |
| tags = game.get("tags", "") | |
| installed = game.get("installed", False) | |
| display_text = f"{title} | {dev} | {tags}" | |
| row = ttk.Frame(self.results_frame) | |
| row.pack(fill="x", pady=5, padx=5) | |
| # Banner image | |
| try: | |
| img = Image.open(banner_path).resize((150, 75)) | |
| tk_img = ImageTk.PhotoImage(img) | |
| banner_label = ttk.Label(row, image=tk_img) | |
| banner_label.image = tk_img | |
| banner_label.pack(side="left") | |
| except: | |
| pass | |
| # Title and thread link | |
| info_frame = ttk.Frame(row) | |
| info_frame.pack(side="left", fill="x", expand=True) | |
| ttk.Label(info_frame, text=title, font=("Segoe UI", 10, "bold")).pack(anchor="w") | |
| ttk.Button(info_frame, text="Open Page", command=lambda url=thread_url: webbrowser.open(url)).pack(anchor="w") | |
| # Install/Remove button | |
| btn_text = "Remove" if installed else "Install" | |
| action_btn = ttk.Button(info_frame, text=btn_text) | |
| action_btn.pack(anchor="w", pady=2) | |
| if installed: | |
| action_btn.config(command=lambda gid=game_id: self.confirm_remove_game(gid)) | |
| else: | |
| action_btn.config(command=lambda gid=game_id, btn=action_btn: self.start_download_thread(gid, title, btn)) | |
| # Progress bar and status | |
| progress = ttk.Progressbar(info_frame, orient="horizontal", length=200, mode="determinate") | |
| progress.pack(anchor="w", pady=2) | |
| status = ttk.Label(info_frame, text="Idle") | |
| status.pack(anchor="w") | |
| def _update_page_progress(self, message): | |
| self.status_label.after(0, lambda: self.status_label.config(text=message)) | |
| def cancel_scraping(self): | |
| self.cancel_scrape = True | |
| def scrape_tags_from_search_results(self): | |
| self.cancel_scrape = False | |
| self.progress.start() # Start animation | |
| self.status_label.config(text="Scraping in progress...") | |
| threading.Thread(target=self.scrape_tags_worker, daemon=True).start() | |
| thread.start() | |
| def scrape_tags_worker(self): | |
| try: | |
| search_url = self.search_url_var.get() | |
| thread_links = get_game_links_from_search(self.session, search_url) | |
| total = len(thread_links) | |
| for i, thread_url in enumerate(thread_links, start=1): | |
| if self.cancel_scrape: | |
| self._update_thread_progress("Scraping canceled.") | |
| break | |
| tags = scrape_tags_from_thread(self.session, thread_url) | |
| game_id = insert_or_get_game_id(self.conn, thread_url) | |
| insert_tags_and_relationships(self.conn, game_id, tags) | |
| self._update_thread_progress(f"Scraping thread {i} of {total}…") | |
| if not self.cancel_scrape: | |
| self._update_thread_progress("Scraping complete.") | |
| self.root.after(0, lambda: messagebox.showinfo("Success", "Tags scraped and saved from search results.")) | |
| except Exception as e: | |
| self._update_thread_progress("Error occurred.") | |
| self.root.after(0, lambda: messagebox.showerror("Error", str(e))) | |
| finally: | |
| self.progress.stop() | |
| def _update_thread_progress(self, message): | |
| self.thread_progress_label.after(0, lambda: self.thread_progress_label.config(text=message)) | |
| def _update_status(self, message): | |
| self.status_label.after(0, lambda: self.status_label.config(text=message)) | |
| def scrape_game_details(self, game_id, thread_url, title): | |
| #thread_url = id | |
| id = game_id | |
| try: | |
| r = self.session.get(thread_url, timeout=10) | |
| soup = BeautifulSoup(r.text, "html.parser") | |
| # Banner image | |
| banner = soup.find("img", class_="bbImage") | |
| banner_url = banner["src"] if banner else "" | |
| # Tags | |
| #tags = scrape_tags_from_thread(self.session, thread_url) | |
| tags = scrape_tags_from_thread(soup) | |
| tags_json = json.dumps(tags) | |
| matched_tags = [] | |
| for tag_name, tag_url in tags: | |
| normalized = tag_name.strip().lower() | |
| tag_id = TAG_LOOKUP.get(normalized) | |
| if tag_id: | |
| matched_tags.append((tag_id, tag_name, tag_url)) | |
| else: | |
| print(f"[Tag Warning] Unrecognized tag: {tag_name}") | |
| matched_tags_json = json.dumps(tags) | |
| print("TAGS:\n" + matched_tags_json) | |
| # Description and release date | |
| meta_desc = soup.find("meta", property="og:description") | |
| full_desc = meta_desc["content"] if meta_desc else "" | |
| description = full_desc.split("Thread Updated")[0].strip() if "Thread Updated" in full_desc else full_desc.strip() | |
| last_update = "" | |
| if "Release Date:" in full_desc: | |
| last_update = full_desc.split("Release Date:")[1].split("Thread Updated")[0].strip() | |
| # Extract all links from the post content | |
| post_content = soup.find("div", class_="message-content") | |
| if not post_content: | |
| print("[Scrape Error] Could not find post content.") | |
| return | |
| wanted_os = self.os_var.get() | |
| game_links = extract_os_links(post_content, wanted_os) | |
| game_links_json = json.dumps(game_links) | |
| print("Links: " + game_links_json) | |
| # Store in DB | |
| game_id = insert_or_get_game_id(self.conn, thread_url, title) | |
| cursor = self.conn.cursor() | |
| cursor.execute(""" | |
| UPDATE games SET banner = ?, description = ?, install_date = ?, id = ?, game_links = ?, title = ? | |
| WHERE id = ? | |
| """, (banner_url, description, last_update, game_id, game_links_json, title, id)) | |
| self.conn.commit() | |
| insert_tags_and_relationships(self.conn, game_id, tags) | |
| banner_filename = f"{title.replace(' ', '_')}.jpg" | |
| banner_path = os.path.join("banners", banner_filename) | |
| try: | |
| r = requests.get(banner_url, timeout=10) | |
| with open(banner_path, "wb") as f: | |
| f.write(r.content) | |
| except Exception as e: | |
| print(f"Failed to download banner: {e}") | |
| banner_path = "" | |
| # Store local path in DB | |
| cursor.execute(""" | |
| UPDATE games SET banner = ?, description = ?, install_date = ?, watched = 1 | |
| WHERE id = ? | |
| """, (banner_path, description, last_update, game_id)) | |
| for tag_id, tag_name, tag_url in matched_tags: | |
| cursor.execute(""" | |
| INSERT INTO game_tags (game_id, tag_id, tag_name, tag_url) | |
| VALUES (?, ?, ?, ?) | |
| """, (id, tag_id, tag_name, tag_url)) | |
| self.conn.commit() | |
| except Exception as e: | |
| print(f"[Scrape Error] {thread_url}: {e}") | |
| def show_game_preview(self, event): | |
| selection = self.results_list.curselection() | |
| if not selection: | |
| return | |
| index = selection[0] | |
| title, url = self.search_results[index] | |
| try: | |
| r = self.session.get(url, timeout=10) | |
| soup = BeautifulSoup(r.text, "html.parser") | |
| desc = soup.find("div", class_="bbWrapper").text.strip() | |
| self.preview_text.delete("1.0", tk.END) | |
| self.preview_text.insert(tk.END, f"{title}\n\n{desc}") | |
| except Exception as e: | |
| self.preview_text.delete("1.0", tk.END) | |
| self.preview_text.insert(tk.END, f"Error loading preview: {e}") | |
| def build_library_tab(self): | |
| for tab_id in self.notebook.tabs(): | |
| if self.notebook.tab(tab_id, "text") == "Library": | |
| self.notebook.forget(tab_id) | |
| if "library" in self.tabs and self.tabs["library"] in self.notebook.tabs(): | |
| print("Lib tab already exists. Skipping rebuild.") | |
| return | |
| if "library" in self.tabs: | |
| self.notebook.forget(self.tabs["library"]) | |
| self.library_frame = ttk.Frame(self.notebook) | |
| tab_id = self.notebook.add(self.library_frame, text="Library") | |
| self.tabs["library"] = tab_id | |
| # ... build the UI inside self.library_frame ... | |
| header = ttk.Frame(self.library_tab) | |
| header.pack(fill="x", padx=10, pady=5) | |
| self.library_frame = ttk.Frame(self.notebook) | |
| self.notebook.add(self.library_frame, text="Library") | |
| # Initial population | |
| self.refresh_library_list() | |
| ttk.Label(header, text="Installed Games").pack(side="left") | |
| ttk.Button(header, text="Check for Updates", command=self.start_update_check_thread).pack(side="right") | |
| self.library_list = tk.Listbox(self.library_tab, height=15) | |
| self.library_list.pack(fill="both", expand=True, padx=10, pady=5) | |
| self.library_list.bind("<<ListboxSelect>>", self.show_installed_game) | |
| self.library_preview = tk.Text(self.library_tab, height=10, wrap="word") | |
| self.library_preview.pack(fill="x", padx=10, pady=5) | |
| ttk.Button(self.library_tab, text="Launch Game", command=self.launch_selected_game).pack(pady=5) | |
| ttk.Button(self.library_frame, text="Check for Updates", command=self.check_for_updates).pack(pady=5) | |
| self.load_installed_games() | |
| def refresh_library_list(self): | |
| for widget in self.library_frame.winfo_children(): | |
| widget.destroy() | |
| canvas = tk.Canvas(self.library_frame) | |
| scrollbar = ttk.Scrollbar(self.library_frame, orient="vertical", command=canvas.yview) | |
| scroll_frame = ttk.Frame(canvas) | |
| scroll_frame.bind("<Configure>", lambda e: canvas.configure(scrollregion=canvas.bbox("all"))) | |
| canvas.create_window((0, 0), window=scroll_frame, anchor="nw") | |
| canvas.configure(yscrollcommand=scrollbar.set) | |
| canvas.pack(side="left", fill="both", expand=True) | |
| scrollbar.pack(side="right", fill="y") | |
| cursor = self.conn.cursor() | |
| cursor.execute(""" | |
| SELECT id, title, banner, install_path, version, last_launched, update_available | |
| FROM games WHERE watched = 1 OR install_date IS NOT NULL | |
| """) | |
| for game_id, title, banner_path, install_path, version, last_launched, update_flag in cursor.fetchall(): | |
| row = ttk.Frame(scroll_frame) | |
| row.pack(fill="x", pady=5, padx=5) | |
| # Banner | |
| try: | |
| img = Image.open(banner_path).resize((150, 75)) | |
| tk_img = ImageTk.PhotoImage(img) | |
| banner_label = ttk.Label(row, image=tk_img) | |
| banner_label.image = tk_img | |
| banner_label.pack(side="left", padx=5) | |
| except: | |
| banner_label = ttk.Label(row, text="[No Banner]") | |
| banner_label.pack(side="left", padx=5) | |
| # Info | |
| info_frame = ttk.Frame(row) | |
| info_frame.pack(side="left", fill="x", expand=True) | |
| display_title = title | |
| if update_flag: | |
| display_title += " 🔔 Update Available" | |
| if last_launched: | |
| display_title += f" (Last played: {last_launched})" | |
| ttk.Label(info_frame, text=display_title, font=("Segoe UI", 10, "bold")).pack(anchor="w") | |
| ttk.Button(info_frame, text="Launch", command=lambda gid=game_id: self.launch_game(gid)).pack(anchor="w", pady=2) | |
| ttk.Button(info_frame, text="Remove", command=lambda gid=game_id: self.confirm_remove_game(gid)).pack(anchor="w", pady=2) | |
| def check_for_updates(self): | |
| cursor = self.conn.cursor() | |
| cursor.execute("SELECT id, title, thread_id, version, install_path FROM games WHERE installed = 1") | |
| games = cursor.fetchall() | |
| for game_id, title, thread_id, stored_version, install_path in games: | |
| thread_url = f"https://f95zone.to/threads/{thread_id}/" | |
| latest_version = scrape_version_from_thread(thread_url) | |
| # Compare with DB and _version.fdm | |
| version_file = os.path.join(install_path, "_version.fdm") | |
| file_version = "" | |
| if os.path.exists(version_file): | |
| with open(version_file, "r", encoding="utf-8") as f: | |
| file_version = f.read().strip() | |
| if latest_version != stored_version or latest_version != file_version: | |
| cursor.execute("UPDATE games SET update_available = 1 WHERE id = ?", (game_id,)) | |
| else: | |
| cursor.execute("UPDATE games SET update_available = 0 WHERE id = ?", (game_id,)) | |
| self.conn.commit() | |
| self.refresh_library_list() | |
| def load_installed_games(self): | |
| self.library_list.delete(0, tk.END) | |
| cursor.execute("SELECT title, version, thread_url FROM games") | |
| self.installed_games = cursor.fetchall() | |
| for title, version, url in self.installed_games: | |
| self.library_list.insert(tk.END, f"{title} (v{version})") | |
| def start_update_check_thread(self): | |
| threading.Thread(target=self.check_for_updates, daemon=True).start() | |
| def check_for_updates(self): | |
| for i, (title, version, url) in enumerate(self.installed_games): | |
| try: | |
| r = self.session.get(url, timeout=10) | |
| soup = BeautifulSoup(r.text, "html.parser") | |
| latest = soup.find("span", class_="label").text.strip() | |
| if latest != version: | |
| self.library_list.delete(i) | |
| self.library_list.insert(i, f"{title} (v{version}) ⚠️ Update Available") | |
| except Exception as e: | |
| print(f"[Update Check Error] {title}: {e}") | |
| def show_installed_game(self, event): | |
| selection = self.library_list.curselection() | |
| if not selection: | |
| return | |
| index = selection[0] | |
| title, version, url = self.installed_games[index] | |
| cursor.execute("SELECT description FROM games WHERE title = ?", (title,)) | |
| row = cursor.fetchone() | |
| self.library_preview.delete("1.0", tk.END) | |
| if row: | |
| self.library_preview.insert(tk.END, row[0]) | |
| else: | |
| self.library_preview.insert(tk.END, "No description available.") | |
| def launch_selected_game(self): | |
| selection = self.library_list.curselection() | |
| if not selection: | |
| return | |
| index = selection[0] | |
| title, version, url = self.installed_games[index] | |
| cursor.execute("SELECT install_path FROM games WHERE title = ?", (title,)) | |
| row = cursor.fetchone() | |
| if not row: | |
| messagebox.showerror("Launch Error", "Install path not found.") | |
| return | |
| path = row[0] | |
| exe = None | |
| for file in os.listdir(path): | |
| if file.endswith(".exe") or file.endswith(".html"): | |
| exe = os.path.join(path, file) | |
| break | |
| if exe: | |
| try: | |
| if platform.system() == "Windows": | |
| subprocess.Popen(exe) | |
| else: | |
| subprocess.Popen(["open", exe]) | |
| except Exception as e: | |
| messagebox.showerror("Launch Error", str(e)) | |
| else: | |
| messagebox.showerror("Launch Error", "No launchable file found.") | |
| def build_settings_tab(self): | |
| frame = ttk.Frame(self.settings_tab) | |
| frame.pack(fill="both", expand=True, padx=10, pady=10) | |
| self.login_progress = ttk.Progressbar(frame, mode="indeterminate") | |
| self.login_progress.grid(row=8, column=0, columnspan=2, sticky="ew", pady=(5, 0)) | |
| self.login_status_label = ttk.Label(frame, text="") | |
| self.login_status_label.grid(row=9, column=0, columnspan=2, sticky="w") | |
| ttk.Label(frame, text="F95Zone Username:").grid(row=0, column=0, sticky="w") | |
| ttk.Entry(frame, textvariable=self.username_var, width=30).grid(row=0, column=1, sticky="w") | |
| ttk.Label(frame, text="F95Zone Password:").grid(row=1, column=0, sticky="w") | |
| ttk.Entry(frame, textvariable=self.password_var, show="*", width=30).grid(row=1, column=1, sticky="w") | |
| ttk.Checkbutton(frame, text="Enable Auto-Update Check", variable=self.auto_update_var).grid(row=2, column=0, columnspan=2, sticky="w") | |
| ttk.Label(frame, text="Preferred OS:").grid(row=3, column=0, sticky="w") | |
| ttk.Combobox(frame, textvariable=self.os_var, values=["Windows", "Linux", "Mac"], width=28).grid(row=3, column=1, sticky="w") | |
| ttk.Label(frame, text="File Host Priority (drag to reorder):").grid(row=5, column=0, columnspan=2, sticky="w", pady=(10, 0)) | |
| self.host_listbox = tk.Listbox(frame, height=10, selectmode="browse", exportselection=False) | |
| self.host_listbox.grid(row=6, column=0, columnspan=2, sticky="nsew") | |
| self.host_listbox.bind("<Button-1>", self.start_drag) | |
| self.host_listbox.bind("<B1-Motion>", self.do_drag) | |
| ttk.Button(frame, text="Refresh Host Status", command=self.load_settings).grid(row=7, column=0, columnspan=2, pady=5) | |
| ttk.Button(frame, text="Save Settings", command=self.save_settings).grid(row=4, column=0, pady=10) | |
| ttk.Button(frame, text="Login & Scrape Tags", command=self.start_login_thread).grid(row=4, column=1, pady=10) | |
| def save_settings(self): | |
| reordered_hosts = [ | |
| self.host_listbox.get(i).split(" ", 1)[1] | |
| for i in range(self.host_listbox.size()) | |
| ] | |
| settings = { | |
| "username": self.username_var.get(), | |
| "password": self.password_var.get(), | |
| "auto_update": self.auto_update_var.get(), | |
| "preferred_os": self.os_var.get(), | |
| "host_priority": reordered_hosts | |
| } | |
| with open(SETTINGS_FILE, "w") as f: | |
| json.dump(settings, f, indent=4) | |
| messagebox.showinfo("Settings", "Settings saved.") | |
| def start_login_thread(self): | |
| self.login_progress.start() | |
| self.login_status_label.config(text="Logging in and scraping...") | |
| threading.Thread(target=self._login_and_scrape_worker, daemon=True).start() | |
| def _login_and_scrape_worker(self): | |
| try: | |
| self.login_to_f95zone(self.username_var.get(), self.password_var.get()) | |
| #scrape_tags_and_prefixes(self.session) | |
| load_tag_prefix_maps() | |
| r = self.session.get("https://f95zone.to/sam/latest_alpha/") | |
| print(r.text[:500]) # Debug output | |
| self._update_login_status("✅ Login successful and tags loaded.") | |
| #messagebox.showinfo("Login", "Login successful and tags loaded.") | |
| except Exception as e: | |
| self._update_login_status("❌ Login failed.") | |
| messagebox.showerror("Login Error", str(e)) | |
| finally: | |
| self.login_progress.stop() | |
| def _update_login_status(self, message): | |
| self.login_status_label.after(0, lambda: self.login_status_label.config(text=message)) | |
| def load_settings(self): | |
| if os.path.exists(SETTINGS_FILE): | |
| with open(SETTINGS_FILE, "r") as f: | |
| settings = json.load(f) | |
| self.username_var.set(settings.get("username", "")) | |
| self.password_var.set(settings.get("password", "")) | |
| self.auto_update_var.set(settings.get("auto_update", False)) | |
| self.os_var.set(settings.get("preferred_os", "Windows")) | |
| self.host_priority = settings.get("host_priority", DEFAULT_HOSTS) | |
| self.host_listbox.delete(0, tk.END) | |
| for host in self.host_priority: | |
| icon = check_host_status(host) | |
| self.host_listbox.insert(tk.END, f"{icon} {host}") | |
| def start_drag(self, event): | |
| self.drag_index = self.host_listbox.nearest(event.y) | |
| def do_drag(self, event): | |
| new_index = self.host_listbox.nearest(event.y) | |
| if new_index != self.drag_index: | |
| item = self.host_listbox.get(self.drag_index) | |
| self.host_listbox.delete(self.drag_index) | |
| self.host_listbox.insert(new_index, item) | |
| self.drag_index = new_index | |
| def start_download_thread(self, game_id, title, button): | |
| self.btn = button | |
| self.btn.config(state="disabled") | |
| thread = threading.Thread(target=self.download_game, args=(game_id, title)) | |
| thread.start() | |
| def download_game(self, game_id, title): | |
| self.btn.config(state="disabled") | |
| cursor = self.conn.cursor() | |
| thread_url = "https://f95zone.to/threads/" + str(game_id) + "/" | |
| print("OS: " + self.os_var.get()) | |
| game_data = self.scrape_game_details(game_id, thread_url, title) | |
| if not game_data: | |
| print(f"[Download] Failed to scrape game details for ID {game_id}") | |
| return | |
| #title = game_data.get("title", f"Game_{game_id}") | |
| download_urls = game_data.get("download_links", []) | |
| version = game_data.get("version", "0.0") | |
| if not download_urls: | |
| print(f"[Download] No download links found for {title}") | |
| return | |
| # Fetch game info | |
| cursor.execute(""" | |
| SELECT title, game_links, version FROM games WHERE id = ? | |
| """, (game_id,)) | |
| row = cursor.fetchone() | |
| if not row: | |
| print(f"[Download] Game ID {game_id} not found.") | |
| return | |
| title, url_list_json, version = row | |
| try: | |
| download_urls = json.loads(url_list_json) # stored as JSON array | |
| except Exception as e: | |
| print(f"[Download] Failed to parse download URLs for {title}: {e}") | |
| return | |
| # Get full list of file hosts from settings | |
| all_hosts = DEFAULT_HOSTS # e.g. ["mega", "workupload", "pixeldrain", "anonfiles"] | |
| preferred_hosts = self.settings.get("host_priority", all_hosts) | |
| # Sort URLs by host priority | |
| prioritized_urls = [] | |
| remaining_urls = [] | |
| for host in preferred_hosts: | |
| for url in download_urls: | |
| if host.lower() in url.lower() and url not in prioritized_urls: | |
| prioritized_urls.append(url) | |
| for url in download_urls: | |
| if url not in prioritized_urls: | |
| remaining_urls.append(url) | |
| all_urls = prioritized_urls + remaining_urls | |
| # Destination folder | |
| install_root = self.settings.get("install_path", "Games") | |
| dest_folder = os.path.join(install_root, title.replace(" ", "_")) | |
| os.makedirs(dest_folder, exist_ok=True) | |
| archive_path = os.path.join(dest_folder, "game_archive") | |
| # Try each URL until one succeeds | |
| for url in all_urls: | |
| print(f"[Download] Trying URL: {url}") | |
| try: | |
| self.download_file(url, archive_path) | |
| break # Success | |
| except Exception as e: | |
| print(f"[Download] Failed to download from {url}: {e}") | |
| continue | |
| else: | |
| print(f"[Download] All URLs failed for {title}.") | |
| return | |
| # Unpack and install | |
| self.unpack_game(archive_path, dest_folder) | |
| # Write version file | |
| version_file = os.path.join(dest_folder, "_version.fdm") | |
| try: | |
| with open(version_file, "w", encoding="utf-8") as f: | |
| f.write(version) | |
| except Exception as e: | |
| print(f"[Download] Failed to write version file: {e}") | |
| # Update DB | |
| now = datetime.now().strftime("%Y-%m-%d") | |
| cursor.execute(""" | |
| UPDATE games SET installed = 1, install_path = ?, install_date = ? | |
| WHERE id = ? | |
| """, (dest_folder, now, game_id)) | |
| self.conn.commit() | |
| def download_file(self, url, dest_folder): | |
| self.status_label.config(text="Downloading...") | |
| self.progress_bar.config(style="Download.Horizontal.TProgressbar") | |
| local_filename = url.split("/")[-1] | |
| file_path = os.path.join(dest_folder, local_filename) | |
| with requests.get(url, stream=True) as r: | |
| r.raise_for_status() | |
| total_size = int(r.headers.get("content-length", 0)) | |
| block_size = 8192 | |
| downloaded = 0 | |
| with open(file_path, "wb") as f: | |
| for chunk in r.iter_content(chunk_size=block_size): | |
| if chunk: | |
| f.write(chunk) | |
| downloaded += len(chunk) | |
| progress = int((downloaded / total_size) * 100) | |
| self.progress_bar["value"] = progress | |
| self.root.update_idletasks() | |
| return file_path | |
| def unpack_game(self, file_path, dest_folder): | |
| self.status_label.config(text="Extracting...") | |
| self.progress_bar.config(style="Extract.Horizontal.TProgressbar") | |
| ext = os.path.splitext(file_path)[1].lower() | |
| self.progress_bar["value"] = 0 | |
| self.root.update_idletasks() | |
| if ext == ".zip": | |
| with zipfile.ZipFile(file_path, 'r') as zip_ref: | |
| total = len(zip_ref.infolist()) | |
| for i, file in enumerate(zip_ref.infolist()): | |
| zip_ref.extract(file, dest_folder) | |
| self.progress_bar["value"] = int((i + 1) / total * 100) | |
| self.root.update_idletasks() | |
| elif ext == ".7z": | |
| with py7zr.SevenZipFile(file_path, mode='r') as archive: | |
| all_files = archive.getnames() | |
| total = len(all_files) | |
| for i, name in enumerate(all_files): | |
| archive.extract(targets=[name], path=dest_folder) | |
| self.progress_bar["value"] = int((i + 1) / total * 100) | |
| self.root.update_idletasks() | |
| elif ext == ".rar": | |
| with rarfile.RarFile(file_path) as rf: | |
| all_files = rf.infolist() | |
| total = len(all_files) | |
| for i, file in enumerate(all_files): | |
| rf.extract(file, dest_folder) | |
| self.progress_bar["value"] = int((i + 1) / total * 100) | |
| self.root.update_idletasks() | |
| else: | |
| raise Exception(f"Unsupported archive format: {ext}") | |
| self.progress_bar["value"] = 0 | |
| self.status_label.config(text="Idle") | |
| self.btn.config(state="normal") | |
| def extract_game_data(thread_url, session): | |
| r = self.session.get(thread_url, timeout=10) | |
| soup = BeautifulSoup(r.text, "html.parser") | |
| title = soup.find("h1", class_="p-title-value").text.strip() | |
| banner = soup.find("img", class_="bbImage")["src"] if soup.find("img", class_="bbImage") else "" | |
| desc = soup.find("div", class_="bbWrapper").text.strip() | |
| version = "Unknown" | |
| for line in desc.splitlines(): | |
| if "Version:" in line: | |
| version = line.split("Version:")[1].strip() | |
| break | |
| # Load host priority from settings | |
| with open(SETTINGS_FILE, "r") as f: | |
| settings = json.load(f) | |
| host_priority = settings.get("host_priority", []) | |
| # Collect all matching links | |
| all_links = [a["href"] for a in soup.find_all("a", href=True)] | |
| download_url = "" | |
| for host in host_priority: | |
| for link in all_links: | |
| if host in link: | |
| download_url = link | |
| break | |
| if download_url: | |
| break | |
| return { | |
| "id": thread_url.split("/")[-1], | |
| "title": title, | |
| "version": version, | |
| "banner": banner, | |
| "description": desc, | |
| "download_url": download_url | |
| } | |
| if __name__ == "__main__": | |
| load_tag_prefix_maps() # Must be called before GUI is built | |
| root = tk.Tk() | |
| app = GameManagerApp(root) | |
| app.session=requests.Session() | |
| root.update_idletasks() | |
| width = 1200 | |
| height = 800 | |
| x = (root.winfo_screenwidth() // 2) - (width // 2) | |
| y = (root.winfo_screenheight() // 2) - (height // 2) | |
| root.geometry(f"{width}x{height}+{x}+{y}") | |
| root.mainloop() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment