3 Commits

Author SHA1 Message Date
Yuvi9587
56a83195b2 Update readme.md 2025-08-11 09:31:53 -07:00
Yuvi9587
26fa3b9bc1 Commit 2025-08-10 09:16:31 -07:00
Yuvi9587
f7c4d892a8 commit 2025-08-07 21:42:04 -07:00
10 changed files with 968 additions and 249 deletions

View File

@@ -99,7 +99,7 @@ Built with PyQt5, this tool is designed for users who want deep filtering capabi
### Install Dependencies
```bash
pip install PyQt5 requests Pillow mega.py fpdf2 python-docx
pip install PyQt5 requests Pillow mega.py fpdf python-docx
```
### Running the Application

View File

@@ -60,6 +60,7 @@ DOWNLOAD_LOCATION_KEY = "downloadLocationV1"
RESOLUTION_KEY = "window_resolution"
UI_SCALE_KEY = "ui_scale_factor"
SAVE_CREATOR_JSON_KEY = "saveCreatorJsonProfile"
FETCH_FIRST_KEY = "fetchAllPostsFirst"
# --- UI Constants and Identifiers ---
HTML_PREFIX = "<!HTML!>"
@@ -111,7 +112,9 @@ CREATOR_DOWNLOAD_DEFAULT_FOLDER_IGNORE_WORDS = {
"may", "jun", "june", "jul", "july", "aug", "august", "sep", "september",
"oct", "october", "nov", "november", "dec", "december",
"mon", "monday", "tue", "tuesday", "wed", "wednesday", "thu", "thursday",
"fri", "friday", "sat", "saturday", "sun", "sunday"
"fri", "friday", "sat", "saturday", "sun", "sunday", "Pack", "tier", "spoiler",
# add more according to need
}

View File

@@ -1,7 +1,7 @@
import time
import traceback
from urllib.parse import urlparse
import json # Ensure json is imported
import json
import requests
from ..utils.network_utils import extract_post_info, prepare_cookies_for_request
from ..config.constants import (
@@ -120,7 +120,8 @@ def download_from_api(
selected_cookie_file=None,
app_base_dir=None,
manga_filename_style_for_sort_check=None,
processed_post_ids=None
processed_post_ids=None,
fetch_all_first=False
):
headers = {
'User-Agent': 'Mozilla/5.0',
@@ -183,6 +184,7 @@ def download_from_api(
logger("⚠️ Page range (start/end page) is ignored when a specific post URL is provided (searching all pages for the post).")
is_manga_mode_fetch_all_and_sort_oldest_first = manga_mode and (manga_filename_style_for_sort_check != STYLE_DATE_POST_TITLE) and not target_post_id
should_fetch_all = fetch_all_first or is_manga_mode_fetch_all_and_sort_oldest_first
api_base_url = f"https://{api_domain}/api/v1/{service}/user/{user_id}"
page_size = 50
if is_manga_mode_fetch_all_and_sort_oldest_first:

View File

@@ -0,0 +1,80 @@
import time
import requests
import json
from urllib.parse import urlparse
def fetch_server_channels(server_id, logger, cookies=None, cancellation_event=None, pause_event=None):
"""
Fetches the list of channels for a given Discord server ID from the Kemono API.
UPDATED to be pausable and cancellable.
"""
domains_to_try = ["kemono.cr", "kemono.su"]
for domain in domains_to_try:
if cancellation_event and cancellation_event.is_set():
logger(" Channel fetching cancelled by user.")
return None
while pause_event and pause_event.is_set():
if cancellation_event and cancellation_event.is_set(): break
time.sleep(0.5)
lookup_url = f"https://{domain}/api/v1/discord/channel/lookup/{server_id}"
logger(f" Attempting to fetch channel list from: {lookup_url}")
try:
response = requests.get(lookup_url, cookies=cookies, timeout=15)
response.raise_for_status()
channels = response.json()
if isinstance(channels, list):
logger(f" ✅ Found {len(channels)} channels for server {server_id}.")
return channels
except (requests.exceptions.RequestException, json.JSONDecodeError):
# This is a silent failure, we'll just try the next domain
pass
logger(f" ❌ Failed to fetch channel list for server {server_id} from all available domains.")
return None
def fetch_channel_messages(channel_id, logger, cancellation_event, pause_event, cookies=None):
"""
Fetches all messages from a Discord channel by looping through API pages (pagination).
Uses a page size of 150 and handles the specific offset logic.
"""
offset = 0
page_size = 150 # Corrected page size based on your findings
api_base_url = f"https://kemono.cr/api/v1/discord/channel/{channel_id}"
while not (cancellation_event and cancellation_event.is_set()):
if pause_event and pause_event.is_set():
logger(" Message fetching paused...")
while pause_event.is_set():
if cancellation_event and cancellation_event.is_set(): break
time.sleep(0.5)
logger(" Message fetching resumed.")
if cancellation_event and cancellation_event.is_set():
break
paginated_url = f"{api_base_url}?o={offset}"
logger(f" Fetching messages from API: page starting at offset {offset}")
try:
response = requests.get(paginated_url, cookies=cookies, timeout=20)
response.raise_for_status()
messages_batch = response.json()
if not messages_batch:
logger(f" ✅ Reached end of messages for channel {channel_id}.")
break
logger(f" Fetched {len(messages_batch)} messages...")
yield messages_batch
if len(messages_batch) < page_size:
logger(f" ✅ Last page of messages received for channel {channel_id}.")
break
offset += page_size
time.sleep(0.5)
except (requests.exceptions.RequestException, json.JSONDecodeError) as e:
logger(f" ❌ Error fetching messages at offset {offset}: {e}")
break

View File

@@ -826,36 +826,59 @@ class PostProcessorWorker:
return 0, 1, filename_to_save_in_main_path, was_original_name_kept_flag, FILE_DOWNLOAD_STATUS_FAILED_RETRYABLE_LATER, details_for_failure
def process(self):
# --- START: REFACTORED PROCESS METHOD ---
# 1. DATA MAPPING: Map Discord Message or Creator Post fields to a consistent set of variables.
if self.service == 'discord':
# For Discord, self.post is a MESSAGE object from the API.
post_title = self.post.get('content', '') or f"Message {self.post.get('id', 'N/A')}"
post_id = self.post.get('id', 'unknown_id')
post_main_file_info = {} # Discord messages don't have a single main file
post_attachments = self.post.get('attachments', [])
post_content_html = self.post.get('content', '')
post_data = self.post # Keep a reference to the original message object
log_prefix = "Message"
else:
# Existing logic for standard creator posts
post_title = self.post.get('title', '') or 'untitled_post'
post_id = self.post.get('id', 'unknown_id')
post_main_file_info = self.post.get('file')
post_attachments = self.post.get('attachments', [])
post_content_html = self.post.get('content', '')
post_data = self.post # Reference to the post object
log_prefix = "Post"
# 2. SHARED PROCESSING LOGIC: The rest of the function now uses the consistent variables from above.
result_tuple = (0, 0, [], [], [], None, None)
total_downloaded_this_post = 0
total_skipped_this_post = 0
determined_post_save_path_for_history = self.override_output_dir if self.override_output_dir else self.download_root
try:
if self._check_pause(f"Post processing for ID {self.post.get('id', 'N/A')}"):
result_tuple = (0, 0, [], [], [], None, None)
return result_tuple
if self._check_pause(f"{log_prefix} processing for ID {post_id}"):
return (0, 0, [], [], [], None, None)
if self.check_cancel():
result_tuple = (0, 0, [], [], [], None, None)
return result_tuple
return (0, 0, [], [], [], None, None)
current_character_filters = self._get_current_character_filters()
kept_original_filenames_for_log = []
retryable_failures_this_post = []
permanent_failures_this_post = []
total_downloaded_this_post = 0
total_skipped_this_post = 0
history_data_for_this_post = None
parsed_api_url = urlparse(self.api_url_input)
post_data = self.post
post_id = post_data.get('id', 'unknown_id')
# CONTEXT-AWARE URL for Referer Header
if self.service == 'discord':
server_id = self.user_id
channel_id = self.post.get('channel', 'unknown_channel')
post_page_url = f"https://{parsed_api_url.netloc}/discord/server/{server_id}/{channel_id}"
else:
post_page_url = f"https://{parsed_api_url.netloc}/{self.service}/user/{self.user_id}/post/{post_id}"
headers = {'User-Agent': 'Mozilla/5.0', 'Referer': post_page_url, 'Accept': '*/*'}
link_pattern = re.compile(r"""<a\s+.*?href=["'](https?://[^"']+)["'][^>]*>(.*?)</a>""", re.IGNORECASE | re.DOTALL)
post_data = self.post
post_title = post_data.get('title', '') or 'untitled_post'
post_id = post_data.get('id', 'unknown_id')
post_main_file_info = post_data.get('file')
post_attachments = post_data.get('attachments', [])
effective_unwanted_keywords_for_folder_naming = self.unwanted_keywords.copy()
is_full_creator_download_no_char_filter = not self.target_post_id_from_initial_url and not current_character_filters
@@ -874,9 +897,9 @@ class PostProcessorWorker:
self.logger(f" Applying creator download specific folder ignore words ({len(self.creator_download_folder_ignore_words)} words).")
effective_unwanted_keywords_for_folder_naming.update(self.creator_download_folder_ignore_words)
post_content_html = post_data.get('content', '')
if not self.extract_links_only:
self.logger(f"\n--- Processing Post {post_id} ('{post_title[:50]}...') (Thread: {threading.current_thread().name}) ---")
self.logger(f"\n--- Processing {log_prefix} {post_id} ('{post_title[:50]}...') (Thread: {threading.current_thread().name}) ---")
num_potential_files_in_post = len(post_attachments or []) + (1 if post_main_file_info and post_main_file_info.get('path') else 0)
post_is_candidate_by_title_char_match = False
@@ -920,7 +943,7 @@ class PostProcessorWorker:
if original_api_att_name:
all_files_from_post_api_for_char_check.append({'_original_name_for_log': original_api_att_name})
if current_character_filters and self.char_filter_scope == CHAR_SCOPE_COMMENTS:
if current_character_filters and self.char_filter_scope == CHAR_SCOPE_COMMENTS and self.service != 'discord':
self.logger(f" [Char Scope: Comments] Phase 1: Checking post files for matches before comments for post ID '{post_id}'.")
if self._check_pause(f"File check (comments scope) for post {post_id}"):
result_tuple = (0, num_potential_files_in_post, [], [], [], None, None)
@@ -943,7 +966,7 @@ class PostProcessorWorker:
if post_is_candidate_by_file_char_match_in_comment_scope: break
self.logger(f" [Char Scope: Comments] Phase 1 Result: post_is_candidate_by_file_char_match_in_comment_scope = {post_is_candidate_by_file_char_match_in_comment_scope}")
if current_character_filters and self.char_filter_scope == CHAR_SCOPE_COMMENTS:
if current_character_filters and self.char_filter_scope == CHAR_SCOPE_COMMENTS and self.service != 'discord':
if not post_is_candidate_by_file_char_match_in_comment_scope:
if self._check_pause(f"Comment check for post {post_id}"):
result_tuple = (0, num_potential_files_in_post, [], [], [], None, None)
@@ -1143,29 +1166,50 @@ class PostProcessorWorker:
suffix_counter = 0
final_post_subfolder_name = ""
while True:
suffix_counter = 0
folder_creation_successful = False
final_post_subfolder_name = ""
post_id_for_folder = str(self.post.get('id', 'unknown_id'))
while not folder_creation_successful:
if suffix_counter == 0:
name_candidate = original_cleaned_post_title_for_sub
else:
name_candidate = f"{original_cleaned_post_title_for_sub}_{suffix_counter}"
potential_post_subfolder_path = os.path.join(base_path_for_post_subfolder, name_candidate)
id_file_path = os.path.join(potential_post_subfolder_path, f".postid_{post_id_for_folder}")
if not os.path.isdir(potential_post_subfolder_path):
# Folder does not exist, create it and its ID file
try:
os.makedirs(potential_post_subfolder_path, exist_ok=False)
os.makedirs(potential_post_subfolder_path)
with open(id_file_path, 'w') as f:
f.write(post_id_for_folder)
final_post_subfolder_name = name_candidate
folder_creation_successful = True
if suffix_counter > 0:
self.logger(f" Post subfolder name conflict: Using '{final_post_subfolder_name}' instead of '{original_cleaned_post_title_for_sub}' to avoid mixing posts.")
self.logger(f" Post subfolder name conflict: Using '{final_post_subfolder_name}' to avoid mixing posts.")
except OSError as e_mkdir:
self.logger(f" ❌ Error creating directory '{potential_post_subfolder_path}': {e_mkdir}.")
final_post_subfolder_name = original_cleaned_post_title_for_sub
break
except FileExistsError:
else:
# Folder exists, check if it's for this post or a different one
if os.path.exists(id_file_path):
# ID file matches! This is a restore scenario. Reuse the folder.
self.logger(f" Re-using existing post subfolder: '{name_candidate}'")
final_post_subfolder_name = name_candidate
folder_creation_successful = True
else:
# Folder exists but ID file does not match (or is missing). This is a normal name collision.
suffix_counter += 1
if suffix_counter > 100:
self.logger(f" ⚠️ Exceeded 100 attempts to find unique subfolder name for '{original_cleaned_post_title_for_sub}'. Using UUID.")
if suffix_counter > 100: # Safety break
self.logger(f" ⚠️ Exceeded 100 attempts to find unique subfolder for '{original_cleaned_post_title_for_sub}'.")
final_post_subfolder_name = f"{original_cleaned_post_title_for_sub}_{uuid.uuid4().hex[:8]}"
os.makedirs(os.path.join(base_path_for_post_subfolder, final_post_subfolder_name), exist_ok=True)
break
except OSError as e_mkdir:
self.logger(f" ❌ Error creating directory '{potential_post_subfolder_path}': {e_mkdir}. Files for this post might be saved in parent or fail.")
final_post_subfolder_name = original_cleaned_post_title_for_sub
break
determined_post_save_path_for_history = os.path.join(base_path_for_post_subfolder, final_post_subfolder_name)
if self.skip_words_list and (self.skip_words_scope == SKIP_SCOPE_POSTS or self.skip_words_scope == SKIP_SCOPE_BOTH):
@@ -1807,14 +1851,23 @@ class PostProcessorWorker:
permanent_failures_this_post, history_data_for_this_post,
None)
except Exception as main_thread_err:
self.logger(f"\n❌ Critical error within Worker process for {log_prefix} {post_id}: {main_thread_err}")
self.logger(traceback.format_exc())
# Ensure we still return a valid tuple to prevent the app from stalling
result_tuple = (0, 1, [], [], [{'error': str(main_thread_err)}], None, None)
finally:
# This block ALWAYS executes, ensuring that every task signals its completion.
# This is critical for the main thread to know when all work is done.
if not self.extract_links_only and self.use_post_subfolders and total_downloaded_this_post == 0:
path_to_check_for_emptiness = determined_post_save_path_for_history
try:
# Check if the path is a directory and if it's empty
if os.path.isdir(path_to_check_for_emptiness) and not os.listdir(path_to_check_for_emptiness):
self.logger(f" 🗑️ Removing empty post-specific subfolder: '{path_to_check_for_emptiness}'")
os.rmdir(path_to_check_for_emptiness)
except OSError as e_rmdir:
# Log if removal fails for any reason (e.g., permissions)
self.logger(f" ⚠️ Could not remove potentially empty subfolder '{path_to_check_for_emptiness}': {e_rmdir}")
self._emit_signal('worker_finished', result_tuple)
@@ -1881,7 +1934,8 @@ class DownloadThread(QThread):
single_pdf_mode=False,
project_root_dir=None,
processed_post_ids=None,
start_offset=0):
start_offset=0,
fetch_first=False):
super().__init__()
self.api_url_input = api_url_input
self.output_dir = output_dir
@@ -1947,6 +2001,7 @@ class DownloadThread(QThread):
self.project_root_dir = project_root_dir
self.processed_post_ids_set = set(processed_post_ids) if processed_post_ids is not None else set()
self.start_offset = start_offset
self.fetch_first = fetch_first
if self.compress_images and Image is None:
self.logger("⚠️ Image compression disabled: Pillow library not found (DownloadThread).")
@@ -1993,7 +2048,8 @@ class DownloadThread(QThread):
selected_cookie_file=self.selected_cookie_file,
app_base_dir=self.app_base_dir,
manga_filename_style_for_sort_check=self.manga_filename_style if self.manga_mode_active else None,
processed_post_ids=self.processed_post_ids_set
processed_post_ids=self.processed_post_ids_set,
fetch_all_first=self.fetch_first
)
for posts_batch_data in post_generator:

View File

@@ -16,7 +16,8 @@ from ..main_window import get_app_icon_object
from ...config.constants import (
THEME_KEY, LANGUAGE_KEY, DOWNLOAD_LOCATION_KEY,
RESOLUTION_KEY, UI_SCALE_KEY, SAVE_CREATOR_JSON_KEY,
COOKIE_TEXT_KEY, USE_COOKIE_KEY
COOKIE_TEXT_KEY, USE_COOKIE_KEY,
FETCH_FIRST_KEY ### ADDED ###
)
@@ -36,7 +37,7 @@ class FutureSettingsDialog(QDialog):
screen_height = QApplication.primaryScreen().availableGeometry().height() if QApplication.primaryScreen() else 800
scale_factor = screen_height / 800.0
base_min_w, base_min_h = 420, 360 # Adjusted height for new layout
base_min_w, base_min_h = 420, 390
scaled_min_w = int(base_min_w * scale_factor)
scaled_min_h = int(base_min_h * scale_factor)
self.setMinimumSize(scaled_min_w, scaled_min_h)
@@ -49,7 +50,6 @@ class FutureSettingsDialog(QDialog):
"""Initializes all UI components and layouts for the dialog."""
main_layout = QVBoxLayout(self)
# --- Group 1: Interface Settings ---
self.interface_group_box = QGroupBox()
interface_layout = QGridLayout(self.interface_group_box)
@@ -76,36 +76,32 @@ class FutureSettingsDialog(QDialog):
main_layout.addWidget(self.interface_group_box)
# --- Group 2: Download & Window Settings ---
self.download_window_group_box = QGroupBox()
download_window_layout = QGridLayout(self.download_window_group_box)
# Window Size (Resolution)
self.window_size_label = QLabel()
self.resolution_combo_box = QComboBox()
self.resolution_combo_box.currentIndexChanged.connect(self._display_setting_changed)
download_window_layout.addWidget(self.window_size_label, 0, 0)
download_window_layout.addWidget(self.resolution_combo_box, 0, 1)
# Default Path
self.default_path_label = QLabel()
self.save_path_button = QPushButton()
# --- START: MODIFIED LOGIC ---
self.save_path_button.clicked.connect(self._save_cookie_and_path)
# --- END: MODIFIED LOGIC ---
download_window_layout.addWidget(self.default_path_label, 1, 0)
download_window_layout.addWidget(self.save_path_button, 1, 1)
# Save Creator.json Checkbox
self.save_creator_json_checkbox = QCheckBox()
self.save_creator_json_checkbox.stateChanged.connect(self._creator_json_setting_changed)
download_window_layout.addWidget(self.save_creator_json_checkbox, 2, 0, 1, 2)
self.fetch_first_checkbox = QCheckBox()
self.fetch_first_checkbox.stateChanged.connect(self._fetch_first_setting_changed)
download_window_layout.addWidget(self.fetch_first_checkbox, 3, 0, 1, 2)
main_layout.addWidget(self.download_window_group_box)
main_layout.addStretch(1)
# --- OK Button ---
self.ok_button = QPushButton()
self.ok_button.clicked.connect(self.accept)
main_layout.addWidget(self.ok_button, 0, Qt.AlignRight | Qt.AlignBottom)
@@ -113,17 +109,27 @@ class FutureSettingsDialog(QDialog):
def _load_checkbox_states(self):
"""Loads the initial state for all checkboxes from settings."""
self.save_creator_json_checkbox.blockSignals(True)
# Default to True so the feature is on by default for users
should_save = self.parent_app.settings.value(SAVE_CREATOR_JSON_KEY, True, type=bool)
self.save_creator_json_checkbox.setChecked(should_save)
self.save_creator_json_checkbox.blockSignals(False)
self.fetch_first_checkbox.blockSignals(True)
should_fetch_first = self.parent_app.settings.value(FETCH_FIRST_KEY, False, type=bool)
self.fetch_first_checkbox.setChecked(should_fetch_first)
self.fetch_first_checkbox.blockSignals(False)
def _creator_json_setting_changed(self, state):
"""Saves the state of the 'Save Creator.json' checkbox."""
is_checked = state == Qt.Checked
self.parent_app.settings.setValue(SAVE_CREATOR_JSON_KEY, is_checked)
self.parent_app.settings.sync()
def _fetch_first_setting_changed(self, state):
"""Saves the state of the 'Fetch First' checkbox."""
is_checked = state == Qt.Checked
self.parent_app.settings.setValue(FETCH_FIRST_KEY, is_checked)
self.parent_app.settings.sync()
def _tr(self, key, default_text=""):
if callable(get_translation) and self.parent_app:
return get_translation(self.parent_app.current_selected_language, key, default_text)
@@ -132,33 +138,31 @@ class FutureSettingsDialog(QDialog):
def _retranslate_ui(self):
self.setWindowTitle(self._tr("settings_dialog_title", "Settings"))
# Group Box Titles
self.interface_group_box.setTitle(self._tr("interface_group_title", "Interface Settings"))
self.download_window_group_box.setTitle(self._tr("download_window_group_title", "Download & Window Settings"))
# Interface Group Labels
self.theme_label.setText(self._tr("theme_label", "Theme:"))
self.ui_scale_label.setText(self._tr("ui_scale_label", "UI Scale:"))
self.language_label.setText(self._tr("language_label", "Language:"))
# Download & Window Group Labels
self.window_size_label.setText(self._tr("window_size_label", "Window Size:"))
self.default_path_label.setText(self._tr("default_path_label", "Default Path:"))
self.save_creator_json_checkbox.setText(self._tr("save_creator_json_label", "Save Creator.json file"))
# --- START: MODIFIED LOGIC ---
# Buttons and Controls
self.fetch_first_checkbox.setText(self._tr("fetch_first_label", "Fetch First (Download after all pages are found)"))
self.fetch_first_checkbox.setToolTip(self._tr("fetch_first_tooltip", "If checked, the downloader will find all posts from a creator first before starting any downloads.\nThis can be slower to start but provides a more accurate progress bar."))
self._update_theme_toggle_button_text()
self.save_path_button.setText(self._tr("settings_save_cookie_path_button", "Save Cookie + Download Path"))
self.save_path_button.setToolTip(self._tr("settings_save_cookie_path_tooltip", "Save the current 'Download Location' and Cookie settings for future sessions."))
self.ok_button.setText(self._tr("ok_button", "OK"))
# --- END: MODIFIED LOGIC ---
# Populate dropdowns
self._populate_display_combo_boxes()
self._populate_language_combo_box()
self._load_checkbox_states()
# --- (The rest of the file remains unchanged) ---
def _apply_theme(self):
if self.parent_app and self.parent_app.current_theme == "dark":
scale = getattr(self.parent_app, 'scale_factor', 1)
@@ -285,14 +289,12 @@ class FutureSettingsDialog(QDialog):
path_saved = False
cookie_saved = False
# --- Save Download Path Logic ---
if hasattr(self.parent_app, 'dir_input') and self.parent_app.dir_input:
current_path = self.parent_app.dir_input.text().strip()
if current_path and os.path.isdir(current_path):
self.parent_app.settings.setValue(DOWNLOAD_LOCATION_KEY, current_path)
path_saved = True
# --- Save Cookie Logic ---
if hasattr(self.parent_app, 'use_cookie_checkbox'):
use_cookie = self.parent_app.use_cookie_checkbox.isChecked()
cookie_content = self.parent_app.cookie_text_input.text().strip()
@@ -301,7 +303,7 @@ class FutureSettingsDialog(QDialog):
self.parent_app.settings.setValue(USE_COOKIE_KEY, True)
self.parent_app.settings.setValue(COOKIE_TEXT_KEY, cookie_content)
cookie_saved = True
else: # Also save the 'off' state
else:
self.parent_app.settings.setValue(USE_COOKIE_KEY, False)
self.parent_app.settings.setValue(COOKIE_TEXT_KEY, "")

View File

@@ -0,0 +1,146 @@
import os
import re
import datetime
try:
from fpdf import FPDF
FPDF_AVAILABLE = True
class PDF(FPDF):
"""Custom PDF class for Discord chat logs."""
def __init__(self, server_name, channel_name, *args, **kwargs):
super().__init__(*args, **kwargs)
self.server_name = server_name
self.channel_name = channel_name
self.default_font_family = 'DejaVu' # Can be changed to Arial if font fails
def header(self):
if self.page_no() == 1:
return # No header on the title page
self.set_font(self.default_font_family, '', 8)
self.cell(0, 10, f'{self.server_name} - #{self.channel_name}', 0, 0, 'L')
self.cell(0, 10, 'Page ' + str(self.page_no()), 0, 0, 'R')
self.ln(10)
def footer(self):
pass # No footer needed, header has page number
except ImportError:
FPDF_AVAILABLE = False
FPDF = None
PDF = None
def create_pdf_from_discord_messages(messages_data, server_name, channel_name, output_filename, font_path, logger=print):
"""
Creates a single PDF from a list of Discord message objects, formatted as a chat log.
UPDATED to include clickable links for attachments and embeds.
"""
if not FPDF_AVAILABLE:
logger("❌ PDF Creation failed: 'fpdf2' library is not installed.")
return False
if not messages_data:
logger(" No messages were found or fetched to create a PDF.")
return False
logger(" Sorting messages by date (oldest first)...")
messages_data.sort(key=lambda m: m.get('published', ''))
pdf = PDF(server_name, channel_name)
default_font_family = 'DejaVu'
try:
bold_font_path = font_path.replace("DejaVuSans.ttf", "DejaVuSans-Bold.ttf")
if not os.path.exists(font_path) or not os.path.exists(bold_font_path):
raise RuntimeError("Font files not found")
pdf.add_font('DejaVu', '', font_path, uni=True)
pdf.add_font('DejaVu', 'B', bold_font_path, uni=True)
except Exception as font_error:
logger(f" ⚠️ Could not load DejaVu font: {font_error}. Falling back to Arial.")
default_font_family = 'Arial'
pdf.default_font_family = 'Arial'
# --- Title Page ---
pdf.add_page()
pdf.set_font(default_font_family, 'B', 24)
pdf.cell(w=0, h=20, text="Discord Chat Log", align='C', new_x="LMARGIN", new_y="NEXT")
pdf.ln(10)
pdf.set_font(default_font_family, '', 16)
pdf.cell(w=0, h=10, text=f"Server: {server_name}", align='C', new_x="LMARGIN", new_y="NEXT")
pdf.cell(w=0, h=10, text=f"Channel: #{channel_name}", align='C', new_x="LMARGIN", new_y="NEXT")
pdf.ln(5)
pdf.set_font(default_font_family, '', 10)
pdf.cell(w=0, h=10, text=f"Generated on: {datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}", align='C', new_x="LMARGIN", new_y="NEXT")
pdf.cell(w=0, h=10, text=f"Total Messages: {len(messages_data)}", align='C', new_x="LMARGIN", new_y="NEXT")
pdf.add_page()
logger(f" Starting PDF creation with {len(messages_data)} messages...")
for i, message in enumerate(messages_data):
author = message.get('author', {}).get('global_name') or message.get('author', {}).get('username', 'Unknown User')
timestamp_str = message.get('published', '')
content = message.get('content', '')
attachments = message.get('attachments', [])
embeds = message.get('embeds', [])
try:
# Handle timezone information correctly
if timestamp_str.endswith('Z'):
timestamp_str = timestamp_str[:-1] + '+00:00'
dt_obj = datetime.datetime.fromisoformat(timestamp_str)
formatted_timestamp = dt_obj.strftime('%Y-%m-%d %H:%M:%S')
except (ValueError, TypeError):
formatted_timestamp = timestamp_str
# Draw a separator line
if i > 0:
pdf.ln(2)
pdf.set_draw_color(200, 200, 200) # Light grey line
pdf.cell(0, 0, '', border='T')
pdf.ln(2)
# Message Header
pdf.set_font(default_font_family, 'B', 11)
pdf.write(5, f"{author} ")
pdf.set_font(default_font_family, '', 9)
pdf.set_text_color(128, 128, 128)
pdf.write(5, f"({formatted_timestamp})")
pdf.set_text_color(0, 0, 0)
pdf.ln(6)
# Message Content
if content:
pdf.set_font(default_font_family, '', 10)
pdf.multi_cell(w=0, h=5, text=content)
# --- START: MODIFIED ATTACHMENT AND EMBED LOGIC ---
if attachments or embeds:
pdf.ln(1)
pdf.set_font(default_font_family, '', 9)
pdf.set_text_color(22, 119, 219) # A nice blue for links
for att in attachments:
file_name = att.get('name', 'untitled')
file_path = att.get('path', '')
# Construct the full, clickable URL for the attachment
full_url = f"https://kemono.cr/data{file_path}"
pdf.write(5, text=f"[Attachment: {file_name}]", link=full_url)
pdf.ln() # New line after each attachment
for embed in embeds:
embed_url = embed.get('url', 'no url')
# The embed URL is already a full URL
pdf.write(5, text=f"[Embed: {embed_url}]", link=embed_url)
pdf.ln() # New line after each embed
pdf.set_text_color(0, 0, 0) # Reset color to black
# --- END: MODIFIED ATTACHMENT AND EMBED LOGIC ---
try:
pdf.output(output_filename)
logger(f"✅ Successfully created Discord chat log PDF: '{os.path.basename(output_filename)}'")
return True
except Exception as e:
logger(f"❌ A critical error occurred while saving the final PDF: {e}")
return False

View File

@@ -34,6 +34,7 @@ from ..core.workers import DownloadThread as BackendDownloadThread
from ..core.workers import PostProcessorWorker
from ..core.workers import PostProcessorSignals
from ..core.api_client import download_from_api
from ..core.discord_client import fetch_server_channels, fetch_channel_messages
from ..core.manager import DownloadManager
from .assets import get_app_icon_object
from ..config.constants import *
@@ -56,6 +57,7 @@ from .dialogs.FavoriteArtistsDialog import FavoriteArtistsDialog
from .dialogs.ConfirmAddAllDialog import ConfirmAddAllDialog
from .dialogs.MoreOptionsDialog import MoreOptionsDialog
from .dialogs.SinglePDF import create_single_pdf_from_content
from .dialogs.discord_pdf_generator import create_pdf_from_discord_messages
from .dialogs.SupportDialog import SupportDialog
from .dialogs.KeepDuplicatesDialog import KeepDuplicatesDialog
from .dialogs.MultipartScopeDialog import MultipartScopeDialog
@@ -98,6 +100,7 @@ class DownloaderApp (QWidget ):
finished_signal =pyqtSignal (int ,int ,bool ,list )
external_link_signal =pyqtSignal (str ,str ,str ,str ,str )
file_progress_signal =pyqtSignal (str ,object )
fetch_only_complete_signal = pyqtSignal(list)
def __init__(self):
@@ -150,6 +153,7 @@ class DownloaderApp (QWidget ):
self.final_download_history_entries = []
self.favorite_download_queue = deque()
self.is_processing_favorites_queue = False
self.current_processing_favorite_item_info = None
self.download_counter = 0
self.permanently_failed_files_for_dialog = []
self.last_link_input_text_for_queue_sync = ""
@@ -241,6 +245,11 @@ class DownloaderApp (QWidget ):
self.single_pdf_mode = False
self.save_creator_json_enabled_this_session = True
self.is_single_post_session = False
self.discord_download_scope = 'files'
self.is_fetching_only = False
self.fetched_posts_for_download = []
self.is_ready_to_download_fetched = False
print(f" Known.txt will be loaded/saved at: {self.config_file}")
@@ -535,18 +544,26 @@ class DownloaderApp (QWidget ):
self.log_signal.emit(f"❌ Failed to remove temp session file: {e_rem}")
def _update_button_states_and_connections(self):
"""
Updates the text and click connections of the main action buttons
based on the current application state.
"""
try: self.download_btn.clicked.disconnect()
except TypeError: pass
try: self.pause_btn.clicked.disconnect()
except TypeError: pass
try: self.cancel_btn.clicked.disconnect()
try:
self.download_btn.clicked.disconnect()
self.pause_btn.clicked.disconnect()
self.cancel_btn.clicked.disconnect()
except TypeError: pass
is_download_active = self._is_download_active()
fetch_first_enabled = self.settings.value(FETCH_FIRST_KEY, False, type=bool)
print(f"--- DEBUG: Updating buttons (is_download_active={is_download_active}) ---")
if self.is_ready_to_download_fetched:
num_posts = len(self.fetched_posts_for_download)
self.download_btn.setText(f"⬇️ Start Download ({num_posts} Posts)")
self.download_btn.setEnabled(True)
self.download_btn.clicked.connect(self.start_download)
self.pause_btn.setEnabled(False)
self.cancel_btn.setText("🗑️ Clear Fetch")
self.cancel_btn.setEnabled(True)
self.cancel_btn.clicked.connect(self.reset_application_state)
return # <-- This 'return' is CRITICAL
if self.active_update_profile and self.new_posts_for_update and not is_download_active:
# State: Update confirmation (new posts found, waiting for user to start)
@@ -596,30 +613,139 @@ class DownloaderApp (QWidget ):
self.cancel_btn.setToolTip(self._tr("discard_session_tooltip", "Click to discard the interrupted session and reset the UI."))
elif is_download_active:
print(" --> Button state: ACTIVE DOWNLOAD/FETCH")
if self.is_fetching_only:
self.download_btn.setText("⏳ Fetching Pages...")
self.download_btn.setEnabled(False)
self.pause_btn.setEnabled(False)
else:
# --- START MODIFICATION ---
# Check if we are about to download fetched posts and update text accordingly
if self.is_ready_to_download_fetched:
num_posts = len(self.fetched_posts_for_download)
self.download_btn.setText(f"⬇️ Start Download ({num_posts} Posts)")
self.download_btn.setEnabled(True) # Keep it enabled for the user to click
else:
# Original logic for an active download in other scenarios
self.download_btn.setText(self._tr("start_download_button_text", "⬇️ Start Download"))
self.download_btn.setEnabled(False)
self.pause_btn.setText(self._tr("resume_download_button_text", "▶️ Resume Download") if self.is_paused else self._tr("pause_download_button_text", "⏸️ Pause Download"))
self.pause_btn.setEnabled(True)
self.pause_btn.clicked.connect(self._handle_pause_resume_action)
self.pause_btn.setToolTip(self._tr("resume_download_button_tooltip", "Click to resume the download.") if self.is_paused else self._tr("pause_download_button_tooltip", "Click to pause the download."))
print(" --> Button state: IDLE")
self.cancel_btn.setText(self._tr("cancel_button_text", "❌ Cancel & Reset UI"))
self.cancel_btn.setEnabled(True)
self.cancel_btn.clicked.connect(self.cancel_download_button_action)
self.cancel_btn.setToolTip(self._tr("cancel_button_tooltip", "Click to cancel the ongoing download/extraction process and reset the UI fields (preserving URL and Directory)."))
else:
url_text = self.link_input.text().strip()
_, _, post_id = extract_post_info(url_text)
is_single_post = bool(post_id)
if fetch_first_enabled and not is_single_post:
self.download_btn.setText("📄 Fetch Pages")
else:
self.download_btn.setText(self._tr("start_download_button_text", "⬇️ Start Download"))
self.download_btn.setEnabled(True)
self.download_btn.clicked.connect(self.start_download)
self.pause_btn.setText(self._tr("pause_download_button_text", "⏸️ Pause Download"))
self.pause_btn.setEnabled(False)
self.pause_btn.setToolTip(self._tr("pause_download_button_tooltip", "Click to pause the ongoing download process."))
self.cancel_btn.setText(self._tr("cancel_button_text", "❌ Cancel & Reset UI"))
self.cancel_btn.setEnabled(False)
self.cancel_btn.setToolTip(self._tr("cancel_button_tooltip", "Click to cancel the ongoing download/extraction process and reset the UI fields (preserving URL and Directory)."))
def _run_fetch_only_thread(self, fetch_args):
"""
Runs in a background thread to ONLY fetch all posts without downloading.
"""
all_posts = []
try:
post_generator = download_from_api(**fetch_args)
for post_batch in post_generator:
if self.cancellation_event.is_set():
break
all_posts.extend(post_batch)
except Exception as e:
self.log_signal.emit(f"❌ Error during fetch-only operation: {e}")
finally:
self.fetch_only_complete_signal.emit(all_posts)
def _fetch_only_finished(self, fetched_posts):
"""
Called on the main thread when the fetch-only operation is complete.
Updates the UI to the 'ready to download' state.
"""
print("\n--- DEBUG: Entering _fetch_only_finished ---\n")
self.download_thread = None
self.is_fetching_only = False
if self.cancellation_event.is_set():
self.log_signal.emit(" Page fetching was cancelled.")
self._update_button_states_and_connections()
self.set_ui_enabled(True)
return
self.fetched_posts_for_download = fetched_posts
self.is_ready_to_download_fetched = True # <-- ADD THIS LINE
self.log_signal.emit(f"✅ Fetch complete. Found {len(self.fetched_posts_for_download)} posts.")
self.progress_label.setText(f"Found {len(self.fetched_posts_for_download)} posts. Ready to download.")
self._update_button_states_and_connections()
self.set_ui_enabled(True)
def _start_download_of_fetched_posts(self):
"""
Initiates the download of the posts that were previously fetched.
"""
self.is_ready_to_download_fetched = False # Reset the state flag
self.log_signal.emit(f"🚀 Starting download of {len(self.fetched_posts_for_download)} fetched posts...")
# Manually set the UI to a "downloading" state for reliability
self.set_ui_enabled(False)
self.download_btn.setText("⬇️ Downloading...")
self.download_btn.setEnabled(False)
self.pause_btn.setEnabled(True)
self.cancel_btn.setEnabled(True)
self.cancel_btn.setText("❌ Cancel & Reset UI")
try:
# Ensure signals are connected to the correct actions for this state
self.cancel_btn.clicked.disconnect()
self.pause_btn.clicked.disconnect()
except TypeError:
pass
self.cancel_btn.clicked.connect(self.cancel_download_button_action)
self.pause_btn.clicked.connect(self._handle_pause_resume_action)
args_template = self.last_start_download_args
args_template['fetch_first'] = False
num_threads = int(self.thread_count_input.text()) if self.use_multithreading_checkbox.isChecked() else 1
self.thread_pool = ThreadPoolExecutor(max_workers=num_threads, thread_name_prefix='PostWorker_')
self.total_posts_to_process = len(self.fetched_posts_for_download)
self.processed_posts_count = 0
self.overall_progress_signal.emit(self.total_posts_to_process, 0)
ppw_expected_keys = list(PostProcessorWorker.__init__.__code__.co_varnames)[1:]
for post_data in self.fetched_posts_for_download:
self._submit_post_to_worker_pool(
post_data, args_template, 1, self.worker_to_gui_queue, ppw_expected_keys, {}
)
self.fetched_posts_for_download = []
self.is_fetcher_thread_running = False
self._check_if_all_work_is_done()
def update_discord_button_visibility(self, text=""):
if not hasattr(self, 'save_discord_as_pdf_btn'):
return
url_text = self.link_input.text().strip()
service, _, _ = extract_post_info(url_text)
is_discord = (service == 'discord')
self.save_discord_as_pdf_btn.setVisible(is_discord)
def _clear_update_selection(self):
"""Clears the loaded creator profile and fully resets the UI to its default state."""
@@ -784,6 +910,7 @@ class DownloaderApp (QWidget ):
self .actual_gui_signals .file_successfully_downloaded_signal .connect (self ._handle_actual_file_downloaded )
self.actual_gui_signals.worker_finished_signal.connect(self._handle_worker_result)
self .actual_gui_signals .file_download_status_signal .connect (lambda status :None )
self.fetch_only_complete_signal.connect(self._fetch_only_finished)
if hasattr (self ,'character_input'):
self .character_input .textChanged .connect (self ._on_character_input_changed_live )
@@ -791,6 +918,10 @@ class DownloaderApp (QWidget ):
self .use_cookie_checkbox .toggled .connect (self ._update_cookie_input_visibility )
if hasattr (self ,'link_input'):
self .link_input .textChanged .connect (self ._sync_queue_with_link_input )
self.link_input.textChanged.connect(self._update_contextual_ui_elements)
self.link_input.textChanged.connect(self._update_button_states_and_connections)
if hasattr(self, 'discord_scope_toggle_button'):
self.discord_scope_toggle_button.clicked.connect(self._cycle_discord_scope)
if hasattr (self ,'cookie_browse_button'):
self .cookie_browse_button .clicked .connect (self ._browse_cookie_file )
if hasattr (self ,'cookie_text_input'):
@@ -954,6 +1085,10 @@ class DownloaderApp (QWidget ):
self ._handle_file_successfully_downloaded (payload [0 ])
elif signal_type == 'worker_finished':
self.actual_gui_signals.worker_finished_signal.emit(payload[0] if payload else tuple())
elif signal_type == 'set_progress_label' and self.progress_label:
self.progress_label.setText(payload[0] if payload else "")
elif signal_type == 'set_ui_enabled':
self.set_ui_enabled(payload[0] if payload else True)
else:
self .log_signal .emit (f"⚠️ Unknown signal type from worker queue: {signal_type }")
self .worker_to_gui_queue .task_done ()
@@ -1020,6 +1155,103 @@ class DownloaderApp (QWidget ):
self .character_list .addItems ([entry ["name"]for entry in KNOWN_NAMES ])
def start_discord_pdf_save(self):
if self._is_download_active():
QMessageBox.warning(self, "Busy", "Another operation is already in progress.")
return
api_url = self.link_input.text().strip()
service, server_id, channel_id = extract_post_info(api_url)
if service != 'discord':
QMessageBox.critical(self, "Input Error", "This feature is only for Discord URLs.")
return
# --- Get Save Filename ---
default_filename = f"discord_{server_id}_{channel_id or 'server'}.pdf"
filepath, _ = QFileDialog.getSaveFileName(self, "Save Discord Log as PDF", default_filename, "PDF Files (*.pdf)")
if not filepath:
self.log_signal.emit(" Discord PDF save cancelled by user.")
return
# --- Create and run the background thread ---
pdf_thread = threading.Thread(
target=self._run_discord_pdf_creation_thread,
args=(api_url, server_id, channel_id, filepath),
daemon=True
)
pdf_thread.start()
def _run_discord_pdf_creation_thread(self, api_url, server_id, channel_id, output_filepath):
def queue_logger(message):
self.worker_to_gui_queue.put({'type': 'progress', 'payload': (message,)})
def queue_progress_label_update(message):
self.worker_to_gui_queue.put({'type': 'set_progress_label', 'payload': (message,)})
self.set_ui_enabled(False)
queue_logger("=" * 40)
queue_logger(f"🚀 Starting Discord PDF export for: {api_url}")
queue_progress_label_update("Fetching messages...")
all_messages = []
cookies = prepare_cookies_for_request(
self.use_cookie_checkbox.isChecked(), self.cookie_text_input.text(),
self.selected_cookie_filepath, self.app_base_dir, queue_logger # Use safe logger
)
channels_to_process = []
server_name_for_pdf = server_id
if channel_id:
channels_to_process.append({'id': channel_id, 'name': channel_id})
else:
channels = fetch_server_channels(server_id, queue_logger, cookies) # Use safe logger
if channels:
channels_to_process = channels
# In a real scenario, you'd get the server name from an API. We'll use the ID.
server_name_for_pdf = server_id
else:
queue_logger(f"❌ Could not find any channels for server {server_id}.")
self.worker_to_gui_queue.put({'type': 'set_ui_enabled', 'payload': (True,)})
return
# Fetch messages for all required channels
for i, channel in enumerate(channels_to_process):
queue_progress_label_update(f"Fetching from channel {i+1}/{len(channels_to_process)}: #{channel.get('name', '')}")
message_generator = fetch_channel_messages(channel['id'], queue_logger, self.cancellation_event, self.pause_event, cookies) # Use safe logger
for message_batch in message_generator:
all_messages.extend(message_batch)
queue_progress_label_update(f"Collected {len(all_messages)} total messages. Generating PDF...")
# Determine font path
if getattr(sys, 'frozen', False) and hasattr(sys, '_MEIPASS'):
base_path = sys._MEIPASS
else:
base_path = self.app_base_dir
font_path = os.path.join(base_path, 'data', 'dejavu-sans', 'DejaVuSans.ttf')
# Generate the PDF
success = create_pdf_from_discord_messages(
all_messages,
server_name_for_pdf,
channels_to_process[0].get('name', channel_id) if len(channels_to_process) == 1 else "All Channels",
output_filepath,
font_path,
logger=queue_logger # Use safe logger
)
if success:
queue_progress_label_update(f"✅ PDF export complete!")
else:
queue_progress_label_update(f"❌ PDF export failed. Check log for details.")
queue_logger("=" * 40)
# Safely re-enable the UI from the main thread via the queue
self.worker_to_gui_queue.put({'type': 'set_ui_enabled', 'payload': (True,)})
def save_known_names(self):
"""
Saves the current list of known names (KNOWN_NAMES) to the config file.
@@ -1708,15 +1940,27 @@ class DownloaderApp (QWidget ):
scrollbar .setValue (0 )
def _is_download_active (self ):
single_thread_active =self .download_thread and self .download_thread .isRunning ()
single_thread_active = False
if self.download_thread:
if hasattr(self.download_thread, 'isRunning') and self.download_thread.isRunning():
single_thread_active = True
elif hasattr(self.download_thread, 'is_alive') and self.download_thread.is_alive():
single_thread_active = True
fetcher_active =hasattr (self ,'is_fetcher_thread_running')and self .is_fetcher_thread_running
pool_has_active_tasks =self .thread_pool is not None and any (not f .done ()for f in self .active_futures if f is not None )
retry_pool_active =hasattr (self ,'retry_thread_pool')and self .retry_thread_pool is not None and hasattr (self ,'active_retry_futures')and any (not f .done ()for f in self .active_retry_futures if f is not None )
external_dl_thread_active =hasattr (self ,'external_link_download_thread')and self .external_link_download_thread is not None and self .external_link_download_thread .isRunning ()
return single_thread_active or fetcher_active or pool_has_active_tasks or retry_pool_active or external_dl_thread_active
# --- ADD THIS LINE ---
fetching_only_active = hasattr(self, 'is_fetching_only') and self.is_fetching_only
print("--- DEBUG: _is_download_active check ---")
print(f" single_thread_active: {single_thread_active} (thread is {self.download_thread})")
print(f" is_fetcher_thread_running: {self.is_fetcher_thread_running}")
print(f" is_fetching_only: {self.is_fetching_only}")
result = single_thread_active or fetcher_active or pool_has_active_tasks or retry_pool_active or external_dl_thread_active or fetching_only_active
print(f" ==> Returning: {result}")
return result
def handle_external_link_signal (self ,post_title ,link_text ,link_url ,platform ,decryption_key ):
link_data =(post_title ,link_text ,link_url ,platform ,decryption_key )
@@ -1996,7 +2240,11 @@ class DownloaderApp (QWidget ):
self.download_extracted_links_button.setVisible(is_only_links)
self._update_download_extracted_links_button_state()
if self.download_btn:
if self.is_ready_to_download_fetched:
return
if is_only_links:
self.download_btn.setText(self._tr("extract_links_button_text", "🔗 Extract Links"))
else:
@@ -2706,11 +2954,28 @@ class DownloaderApp (QWidget ):
self .favorite_mode_posts_button .setEnabled (False )
def update_ui_for_manga_mode (self ,checked ):
# --- START: NEW DISCORD UI LOGIC ---
url_text =self .link_input .text ().strip ()if self .link_input else ""
service, _, _ = extract_post_info(url_text)
is_discord_url = (service == 'discord')
if is_discord_url:
# When a discord URL is detected, disable incompatible options
if self.manga_mode_checkbox:
self.manga_mode_checkbox.setEnabled(False)
self.manga_mode_checkbox.setChecked(False)
if self.page_range_label: self.page_range_label.setEnabled(False)
if self.start_page_input: self.start_page_input.setEnabled(False)
if self.to_label: self.to_label.setEnabled(False)
if self.end_page_input: self.end_page_input.setEnabled(False)
checked = False # Force manga mode off
# --- END: NEW DISCORD UI LOGIC ---
is_only_links_mode =self .radio_only_links and self .radio_only_links .isChecked ()
is_only_archives_mode =self .radio_only_archives and self .radio_only_archives .isChecked ()
is_only_audio_mode =hasattr (self ,'radio_only_audio')and self .radio_only_audio .isChecked ()
url_text =self .link_input .text ().strip ()if self .link_input else ""
# The rest of the original function continues from here...
_ ,_ ,post_id =extract_post_info (url_text )
is_creator_feed =not post_id if url_text else False
@@ -2720,7 +2985,8 @@ class DownloaderApp (QWidget ):
if self.favorite_download_queue and all(item.get('type') == 'single_post_from_popup' for item in self.favorite_download_queue):
is_single_post = True
can_enable_manga_checkbox = (is_creator_feed or is_single_post) and not is_favorite_mode_on
# --- MODIFIED: Added check for is_discord_url ---
can_enable_manga_checkbox = (is_creator_feed or is_single_post) and not is_favorite_mode_on and not is_discord_url
if self .manga_mode_checkbox :
self .manga_mode_checkbox .setEnabled (can_enable_manga_checkbox)
@@ -2738,6 +3004,8 @@ class DownloaderApp (QWidget ):
if self .manga_rename_toggle_button :
self .manga_rename_toggle_button .setVisible (manga_mode_effectively_on and not (is_only_links_mode or is_only_archives_mode or is_only_audio_mode ))
# --- MODIFIED: Added check for is_discord_url ---
if not is_discord_url:
self .update_page_range_enabled_state ()
current_filename_style =self .manga_filename_style
@@ -2773,7 +3041,6 @@ class DownloaderApp (QWidget ):
self ._update_multithreading_for_date_mode ()
def filter_character_list (self ,search_text ):
search_text_lower =search_text .lower ()
for i in range (self .character_list .count ()):
@@ -2841,7 +3108,38 @@ class DownloaderApp (QWidget ):
if total_posts >0 or processed_posts >0 :
self .file_progress_label .setText ("")
def _update_contextual_ui_elements(self, text=""):
"""Shows or hides UI elements based on the URL, like the Discord scope button."""
if not hasattr(self, 'discord_scope_toggle_button'): return
url_text = self.link_input.text().strip()
service, _, _ = extract_post_info(url_text)
is_discord = (service == 'discord')
self.discord_scope_toggle_button.setVisible(is_discord)
if is_discord: self._update_discord_scope_button_text()
else: self.download_btn.setText(self._tr("start_download_button_text", "⬇️ Start Download"))
def _update_discord_scope_button_text(self):
"""Updates the text of the discord scope button and the main download button."""
if self.discord_download_scope == 'files':
self.discord_scope_toggle_button.setText("Scope: Files")
self.download_btn.setText(self._tr("start_download_button_text", "⬇️ Start Download"))
else:
self.discord_scope_toggle_button.setText("Scope: Messages")
self.download_btn.setText("📄 Save Messages as PDF")
def _cycle_discord_scope(self):
"""Toggles between 'files' and 'messages' for Discord downloads."""
self.discord_download_scope = 'messages' if self.discord_download_scope == 'files' else 'files'
self._update_discord_scope_button_text()
def start_download(self, direct_api_url=None, override_output_dir=None, is_restore=False, is_continuation=False, item_type_from_queue=None):
from ..utils.file_utils import clean_folder_name
from ..config.constants import FOLDER_NAME_STOP_WORDS
if self.is_ready_to_download_fetched:
self._start_download_of_fetched_posts()
return True
self.finish_lock = threading.Lock()
self.is_finishing = False
if self.active_update_profile:
@@ -2852,7 +3150,7 @@ class DownloaderApp (QWidget ):
self.is_finishing = False
self.downloaded_hash_counts.clear()
global KNOWN_NAMES, BackendDownloadThread, PostProcessorWorker, extract_post_info, clean_folder_name, MAX_FILE_THREADS_PER_POST_OR_WORKER
global KNOWN_NAMES, BackendDownloadThread, PostProcessorWorker, extract_post_info, MAX_FILE_THREADS_PER_POST_OR_WORKER
if not is_restore and not is_continuation:
self.permanently_failed_files_for_dialog.clear()
@@ -2968,7 +3266,152 @@ class DownloaderApp (QWidget ):
self.cancellation_message_logged_this_session = False
service, user_id, post_id_from_url = extract_post_info(api_url)
service, id1, id2 = extract_post_info(api_url)
if not service or not id1:
QMessageBox.critical(self, "Input Error", "Invalid or unsupported URL format.")
return False
if service == 'discord':
server_id, channel_id = id1, id2
def discord_processing_task():
# --- FIX: Wrap the entire task in a try...finally block ---
try:
def queue_logger(message):
self.worker_to_gui_queue.put({'type': 'progress', 'payload': (message,)})
def queue_progress_label_update(message):
self.worker_to_gui_queue.put({'type': 'set_progress_label', 'payload': (message,)})
cookies = prepare_cookies_for_request(
self.use_cookie_checkbox.isChecked(), self.cookie_text_input.text(),
self.selected_cookie_filepath, self.app_base_dir, queue_logger
)
# --- SCOPE: MESSAGES (PDF CREATION) ---
if self.discord_download_scope == 'messages':
queue_logger("=" * 40)
queue_logger(f"🚀 Starting Discord PDF export for: {api_url}")
output_dir = self.dir_input.text().strip()
if not output_dir or not os.path.isdir(output_dir):
queue_logger("❌ PDF Save Error: No valid download directory selected in the UI.")
self.worker_to_gui_queue.put({'type': 'set_ui_enabled', 'payload': (True,)})
return
default_filename = f"discord_{server_id}_{channel_id or 'server'}.pdf"
output_filepath = os.path.join(output_dir, default_filename) # We'll save with a default name
all_messages, channels_to_process = [], []
server_name_for_pdf = server_id
if channel_id:
channels_to_process.append({'id': channel_id, 'name': channel_id})
else:
channels = fetch_server_channels(server_id, queue_logger, cookies)
if channels:
channels_to_process = channels
for i, channel in enumerate(channels_to_process):
queue_progress_label_update(f"Fetching from channel {i+1}/{len(channels_to_process)}: #{channel.get('name', '')}")
message_generator = fetch_channel_messages(channel['id'], queue_logger, self.cancellation_event, self.pause_event, cookies)
for message_batch in message_generator:
all_messages.extend(message_batch)
queue_progress_label_update(f"Collected {len(all_messages)} total messages. Generating PDF...")
if getattr(sys, 'frozen', False):
base_path = sys._MEIPASS
else:
base_path = self.app_base_dir
font_path = os.path.join(base_path, 'data', 'dejavu-sans', 'DejaVuSans.ttf')
success = create_pdf_from_discord_messages(
all_messages, server_name_for_pdf,
channels_to_process[0].get('name', channel_id) if len(channels_to_process) == 1 else "All Channels",
output_filepath, font_path, logger=queue_logger
)
if success:
queue_progress_label_update("✅ PDF export complete!")
else:
queue_progress_label_update("❌ PDF export failed.")
self.finished_signal.emit(0, len(all_messages), self.cancellation_event.is_set(), [])
return
# --- SCOPE: FILES (DOWNLOAD) ---
elif self.discord_download_scope == 'files':
worker_args = {
'download_root': effective_output_dir_for_run, 'known_names': list(KNOWN_NAMES),
'filter_character_list': self._parse_character_filters(self.character_input.text().strip()),
'emitter': self.worker_to_gui_queue, 'unwanted_keywords': FOLDER_NAME_STOP_WORDS,
'filter_mode': self.get_filter_mode(), 'skip_zip': self.skip_zip_checkbox.isChecked(),
'use_subfolders': self.use_subfolders_checkbox.isChecked(), 'use_post_subfolders': self.use_subfolder_per_post_checkbox.isChecked(),
'target_post_id_from_initial_url': None, 'custom_folder_name': None,
'compress_images': self.compress_images_checkbox.isChecked(), 'download_thumbnails': self.download_thumbnails_checkbox.isChecked(),
'service': service, 'user_id': server_id, 'api_url_input': api_url,
'pause_event': self.pause_event, 'cancellation_event': self.cancellation_event,
'downloaded_files': self.downloaded_files, 'downloaded_file_hashes': self.downloaded_file_hashes,
'downloaded_files_lock': self.downloaded_files_lock, 'downloaded_file_hashes_lock': self.downloaded_file_hashes_lock,
'skip_words_list': [word.strip().lower() for word in self.skip_words_input.text().strip().split(',') if word.strip()],
'skip_words_scope': self.get_skip_words_scope(), 'char_filter_scope': self.get_char_filter_scope(),
'remove_from_filename_words_list': [word.strip() for word in self.remove_from_filename_input.text().strip().split(',') if word.strip()],
'scan_content_for_images': self.scan_content_images_checkbox.isChecked(),
'manga_mode_active': False,
}
total_dl, total_skip = 0, 0
def process_channel_files(channel_id_to_process, output_directory):
nonlocal total_dl, total_skip
message_generator = fetch_channel_messages(channel_id_to_process, queue_logger, self.cancellation_event, self.pause_event, cookies)
for message_batch in message_generator:
if self.cancellation_event.is_set():
break
for message in message_batch:
if self.cancellation_event.is_set():
break
if not message.get('attachments'):
continue
worker_instance_args = worker_args.copy()
worker_instance_args.update({'post_data': message, 'download_root': output_directory, 'override_output_dir': output_directory})
worker = PostProcessorWorker(**worker_instance_args)
dl_count, skip_count, _, _, _, _, _ = worker.process()
total_dl += dl_count
total_skip += skip_count
if channel_id:
process_channel_files(channel_id, effective_output_dir_for_run)
else:
channels = fetch_server_channels(server_id, queue_logger, cookies)
if channels:
for i, channel in enumerate(channels):
if self.cancellation_event.is_set():
break
chan_id = channel.get('id')
chan_name = channel.get('name', f"channel_{chan_id}")
queue_logger("=" * 40)
queue_logger(f"Processing Channel {i+1}/{len(channels)}: '{chan_name}'")
channel_dir = os.path.join(effective_output_dir_for_run, clean_folder_name(chan_name))
os.makedirs(channel_dir, exist_ok=True)
process_channel_files(chan_id, channel_dir)
self.finished_signal.emit(total_dl, total_skip, self.cancellation_event.is_set(), [])
finally:
# This ensures the flag is reset, allowing the UI to finalize correctly
self.is_fetcher_thread_running = False
# --- FIX: Set the fetcher running flag to prevent premature finalization ---
self.is_fetcher_thread_running = True
self.set_ui_enabled(False)
self.download_thread = threading.Thread(target=discord_processing_task, daemon=True)
self.download_thread.start()
self._update_button_states_and_connections()
return True
user_id, post_id_from_url = id1, id2
if direct_api_url and not post_id_from_url and item_type_from_queue and 'post' in item_type_from_queue:
self.log_signal.emit(f"❌ CRITICAL ERROR: Could not parse post ID from the queued POST URL: {api_url}")
@@ -2981,10 +3424,6 @@ class DownloaderApp (QWidget ):
)
return False
if not service or not user_id:
QMessageBox.critical(self, "Input Error", "Invalid or unsupported URL format.")
return False
self.save_creator_json_enabled_this_session = self.settings.value(SAVE_CREATOR_JSON_KEY, True, type=bool)
self.is_single_post_session = bool(post_id_from_url)
@@ -3028,8 +3467,6 @@ class DownloaderApp (QWidget ):
profile_processed_ids = set()
session_processed_ids = set(processed_post_ids_for_restore)
combined_processed_ids = session_processed_ids.union(profile_processed_ids)
processed_post_ids_for_this_run = list(combined_processed_ids)
@@ -3149,6 +3586,8 @@ class DownloaderApp (QWidget ):
compress_images = False;
self.compress_images_checkbox.setChecked(False)
fetch_first_enabled = self.settings.value(FETCH_FIRST_KEY, False, type=bool)
log_messages = ["=" * 40, f"🚀 Starting {'Link Extraction' if extract_links_only else ('Archive Download' if backend_filter_mode == 'archive' else 'Download')} @ {time.strftime('%Y-%m-%d %H:%M:%S')}", f" URL: {api_url}"]
current_mode_log_text = "Download"
@@ -3493,9 +3932,37 @@ class DownloaderApp (QWidget ):
'skip_current_file_flag': None,
'processed_post_ids': processed_post_ids_for_this_run,
'start_offset': start_offset_for_restore,
'fetch_first': fetch_first_enabled,
}
args_template['override_output_dir'] = override_output_dir
fetch_first_enabled = self.settings.value(FETCH_FIRST_KEY, False, type=bool)
self.last_start_download_args = args_template.copy()
if fetch_first_enabled and not post_id_from_url:
self.log_signal.emit("🚀 Starting Stage 1: Fetching all pages...")
self.is_fetching_only = True
self.set_ui_enabled(False)
self._update_button_states_and_connections()
self.progress_label.setText("Progress: Fetching pages...")
fetch_thread_args = {
'api_url_input': api_url, 'logger': self.log_signal.emit,
'start_page': start_page, 'end_page': end_page,
'manga_mode': manga_mode, 'cancellation_event': self.cancellation_event,
'pause_event': self.pause_event, 'use_cookie': use_cookie_for_this_run,
'cookie_text': cookie_text_from_input, 'selected_cookie_file': selected_cookie_file_path_for_backend,
'app_base_dir': app_base_dir_for_cookies,
'manga_filename_style_for_sort_check': self.manga_filename_style,
'processed_post_ids': processed_post_ids_for_this_run,
'fetch_all_first': True
}
self.download_thread = threading.Thread(target=self._run_fetch_only_thread, args=(fetch_thread_args,), daemon=True)
self.download_thread.start()
return True
try:
if should_use_multithreading_for_posts:
self.log_signal.emit(f" Initializing multi-threaded {current_mode_log_text.lower()} with {effective_num_post_workers} post workers...")
@@ -3526,6 +3993,7 @@ class DownloaderApp (QWidget ):
]
args_template['skip_current_file_flag'] = None
single_thread_args = {key: args_template[key] for key in dt_expected_keys if key in args_template}
single_thread_args['fetch_first'] = args_template.get('fetch_first', False)
self.start_single_threaded_download(**single_thread_args)
except Exception as e:
self._update_button_states_and_connections()
@@ -3767,134 +4235,74 @@ class DownloaderApp (QWidget ):
def _fetch_and_queue_posts(self, fetcher_args):
"""
Fetches post data and submits tasks to the pool.
This version is corrected to handle single-post fetches directly
in multi-threaded mode.
Fetches post data using the central `download_from_api` generator
and submits tasks to the worker pool. This respects the 'Fetch First' setting.
"""
global PostProcessorWorker, download_from_api, requests, json, traceback, urlparse
api_url_input_for_fetcher = fetcher_args['api_url']
global PostProcessorWorker, download_from_api
worker_args_template = fetcher_args['worker_args_template']
processed_post_ids_set = set(fetcher_args.get('processed_post_ids', []))
start_page = worker_args_template.get('start_page')
end_page = worker_args_template.get('end_page')
target_post_id = worker_args_template.get('target_post_id_from_initial_url') # Get the target post ID
logger_func = lambda msg: self.log_signal.emit(f"[Fetcher] {msg}")
try:
service = worker_args_template.get('service')
user_id = worker_args_template.get('user_id')
cancellation_event = self.cancellation_event
pause_event = self.pause_event
session_lock = self.session_lock
session_file_path = self.session_file_path
parsed_api_url = urlparse(api_url_input_for_fetcher)
headers = {'User-Agent': 'Mozilla/5.0', 'Referer': f"https://{parsed_api_url.netloc}/"}
cookies = prepare_cookies_for_request(
worker_args_template.get('use_cookie'),
worker_args_template.get('cookie_text'),
worker_args_template.get('selected_cookie_file'),
worker_args_template.get('app_base_dir'),
logger_func
# This single call now handles all fetching logic, including 'Fetch First'.
post_generator = download_from_api(
api_url_input=worker_args_template.get('api_url_input'),
logger=logger_func,
start_page=worker_args_template.get('start_page'),
end_page=worker_args_template.get('end_page'),
manga_mode=worker_args_template.get('manga_mode_active'),
cancellation_event=self.cancellation_event,
pause_event=self.pause_event,
use_cookie=worker_args_template.get('use_cookie'),
cookie_text=worker_args_template.get('cookie_text'),
selected_cookie_file=worker_args_template.get('selected_cookie_file'),
app_base_dir=worker_args_template.get('app_base_dir'),
manga_filename_style_for_sort_check=worker_args_template.get('manga_filename_style'),
processed_post_ids=worker_args_template.get('processed_post_ids', []),
fetch_all_first=worker_args_template.get('fetch_first', False)
)
if target_post_id:
logger_func(f"Mode: Single Post. Attempting direct fetch for post ID: {target_post_id}")
post_api_url = f"https://{parsed_api_url.netloc}/api/v1/{service}/user/{user_id}/post/{target_post_id}"
try:
response = requests.get(post_api_url, headers=headers, cookies=cookies, timeout=(15, 60))
response.raise_for_status()
single_post_data = response.json()
if isinstance(single_post_data, list) and single_post_data:
single_post_data = single_post_data[0]
if not isinstance(single_post_data, dict):
raise ValueError(f"Expected a dictionary for post data, but got {type(single_post_data)}")
self.total_posts_to_process = 1
self.overall_progress_signal.emit(1, 0)
ppw_expected_keys = list(PostProcessorWorker.__init__.__code__.co_varnames)[1:]
self._submit_post_to_worker_pool(
single_post_data,
worker_args_template,
worker_args_template.get('num_file_threads_for_worker', 1),
worker_args_template.get('emitter'),
ppw_expected_keys,
{}
)
except (requests.RequestException, json.JSONDecodeError, ValueError) as e:
logger_func(f"❌ Failed to fetch single post directly: {e}. Aborting.")
return
page_size = 50
offset = 0
current_page_num = 1
if start_page and start_page > 1:
offset = (start_page - 1) * page_size
current_page_num = start_page
while not cancellation_event.is_set():
while pause_event.is_set():
time.sleep(0.5)
if cancellation_event.is_set(): break
if cancellation_event.is_set(): break
if end_page and current_page_num > end_page:
logger_func(f"✅ Reached specified end page ({end_page}) for creator feed. Stopping.")
break
api_url = f"https://{parsed_api_url.netloc}/api/v1/{service}/user/{user_id}?o={offset}"
logger_func(f"Fetching post list: {api_url} (Page approx. {offset // page_size + 1})")
try:
response = requests.get(api_url, headers=headers, cookies=cookies, timeout=20)
response.raise_for_status()
posts_batch_from_api = response.json()
except (requests.RequestException, json.JSONDecodeError) as e:
logger_func(f"❌ API Error fetching posts: {e}. Aborting the entire download.")
self.cancellation_event.set()
break
if not posts_batch_from_api:
logger_func("✅ Reached end of posts (API returned no more content).")
break
new_posts_to_process = [
post for post in posts_batch_from_api if post.get('id') not in processed_post_ids_set
]
num_skipped = len(posts_batch_from_api) - len(new_posts_to_process)
if num_skipped > 0:
logger_func(f" Skipped {num_skipped} already processed post(s) from this page.")
if new_posts_to_process:
ppw_expected_keys = list(PostProcessorWorker.__init__.__code__.co_varnames)[1:]
num_file_dl_threads = worker_args_template.get('num_file_threads_for_worker', 1)
emitter = worker_args_template.get('emitter')
fetch_first_enabled = worker_args_template.get('fetch_first', False)
if fetch_first_enabled:
# --- FETCH FIRST LOGIC ---
# Exhaust the generator to get all posts into one list before processing.
logger_func(" Fetch First: All posts have been fetched. Now queuing for download...")
all_posts = [post for batch in post_generator for post in batch]
self.total_posts_to_process = len(all_posts)
self.overall_progress_signal.emit(self.total_posts_to_process, self.processed_posts_count)
for post_data in all_posts:
if self.cancellation_event.is_set():
break
self._submit_post_to_worker_pool(post_data, worker_args_template, num_file_dl_threads, emitter, ppw_expected_keys, {})
else:
# --- STANDARD CONCURRENT LOGIC ---
# Iterate over the batches of posts as they are yielded by the generator.
for posts_batch_from_api in post_generator:
if self.cancellation_event.is_set():
break
processed_post_ids_set = set(worker_args_template.get('processed_post_ids', []))
new_posts_to_process = [
post for post in posts_batch_from_api if post.get('id') not in processed_post_ids_set
]
if new_posts_to_process:
for post_data in new_posts_to_process:
if cancellation_event.is_set():
if self.cancellation_event.is_set():
break
self._submit_post_to_worker_pool(post_data, worker_args_template, num_file_dl_threads, emitter, ppw_expected_keys, {})
self.total_posts_to_process += len(new_posts_to_process)
self.overall_progress_signal.emit(self.total_posts_to_process, self.processed_posts_count)
next_offset = offset + page_size
with session_lock:
if os.path.exists(session_file_path):
try:
with open(session_file_path, 'r', encoding='utf-8') as f:
session_data = json.load(f)
session_data['download_state']['last_processed_offset'] = next_offset
self._save_session_file(session_data)
except (json.JSONDecodeError, KeyError, OSError) as e:
logger_func(f"⚠️ Could not update session offset: {e}")
offset = offset + page_size
current_page_num += 1
except Exception as e:
logger_func(f"❌ Critical error during post fetching: {e}\n{traceback.format_exc(limit=2)}")
finally:
@@ -3924,9 +4332,11 @@ class DownloaderApp (QWidget ):
self.permanently_failed_files_for_dialog.extend(permanent)
self._update_error_button_count()
if history_data:
# This single call now correctly handles both history and profile saving.
if history_data and not permanent:
self._add_to_history_candidates(history_data)
elif history_data and permanent:
post_id = history_data.get('post_id', 'N/A')
self.log_signal.emit(f"⚠️ Post {post_id} had permanent file failures. It will NOT be marked as processed and will be retried on the next session/update.")
self.overall_progress_signal.emit(self.total_posts_to_process, self.processed_posts_count)
@@ -4298,7 +4708,8 @@ class DownloaderApp (QWidget ):
self.progress_label.setText(self._tr("status_cancelling", "Cancelling... Please wait."))
if self.download_thread and self.download_thread.isRunning():
# Only call QThread-specific methods if the thread is a QThread
if self.download_thread and hasattr(self.download_thread, 'requestInterruption'):
self.download_thread.requestInterruption()
self.log_signal.emit(" Signaled single download thread to interrupt.")
@@ -4402,6 +4813,7 @@ class DownloaderApp (QWidget ):
self.log_signal.emit("=" * 40)
if self.download_thread:
if isinstance(self.download_thread, QThread):
try:
if hasattr(self.download_thread, 'progress_signal'): self.download_thread.progress_signal.disconnect(self.handle_main_log)
if hasattr(self.download_thread, 'add_character_prompt_signal'): self.download_thread.add_character_prompt_signal.disconnect(self.add_character_prompt_signal)
@@ -4417,9 +4829,10 @@ class DownloaderApp (QWidget ):
self.log_signal.emit(f" Note during single-thread signal disconnection: {e}")
if not self.download_thread.isRunning():
if self.download_thread:
self.download_thread.deleteLater()
self.download_thread = None
else:
self.download_thread = None
self.progress_label.setText(
f"{status_message}: "
@@ -4435,10 +4848,10 @@ class DownloaderApp (QWidget ):
"Would you like to attempt to download these failed files again?",
QMessageBox.Yes | QMessageBox.No, QMessageBox.Yes)
if reply == QMessageBox.Yes:
self.is_finishing = False # Allow retry session to start
self.finish_lock.release() # Release lock for the retry session
self.is_finishing = False
self.finish_lock.release()
self._start_failed_files_retry_session()
return # Exit to allow retry session to run
return
else:
self.log_signal.emit(" User chose not to retry failed files.")
self.permanently_failed_files_for_dialog.extend(self.retryable_failed_files_info)
@@ -4737,7 +5150,7 @@ class DownloaderApp (QWidget ):
self.is_paused = False
self._clear_session_file()
self._reset_ui_to_defaults()
self._perform_soft_ui_reset()
self._load_saved_download_location()
self.main_log_output.clear()
self.external_log_output.clear()
@@ -4793,6 +5206,9 @@ class DownloaderApp (QWidget ):
self.settings.sync()
self._update_manga_filename_style_button_text()
self.update_ui_for_manga_mode(self.manga_mode_checkbox.isChecked() if self.manga_mode_checkbox else False)
self.is_fetching_only = False
self.fetched_posts_for_download = []
self.is_ready_to_download_fetched = False
self.set_ui_enabled(True)
self.log_signal.emit("✅ Application fully reset. Ready for new download.")

View File

@@ -141,12 +141,15 @@ def prepare_cookies_for_request(use_cookie_flag, cookie_text_input, selected_coo
def extract_post_info(url_string):
"""
Parses a URL string to extract the service, user ID, and post ID.
UPDATED to support Discord server/channel URLs.
Args:
url_string (str): The URL to parse.
Returns:
tuple: A tuple containing (service, user_id, post_id). Any can be None.
tuple: A tuple containing (service, id1, id2).
For posts: (service, user_id, post_id).
For Discord: ('discord', server_id, channel_id).
"""
if not isinstance(url_string, str) or not url_string.strip():
return None, None, None
@@ -155,7 +158,15 @@ def extract_post_info(url_string):
parsed_url = urlparse(url_string.strip())
path_parts = [part for part in parsed_url.path.strip('/').split('/') if part]
# Standard format: /<service>/user/<user_id>/post/<post_id>
# Check for new Discord URL format first
# e.g., /discord/server/891670433978531850/1252332668805189723
if len(path_parts) >= 3 and path_parts[0].lower() == 'discord' and path_parts[1].lower() == 'server':
service = 'discord'
server_id = path_parts[2]
channel_id = path_parts[3] if len(path_parts) >= 4 else None
return service, server_id, channel_id
# Standard creator/post format: /<service>/user/<user_id>/post/<post_id>
if len(path_parts) >= 3 and path_parts[1].lower() == 'user':
service = path_parts[0]
user_id = path_parts[2]
@@ -174,7 +185,6 @@ def extract_post_info(url_string):
return None, None, None
def get_link_platform(url):
"""
Identifies the platform of a given URL based on its domain.

View File

@@ -391,6 +391,10 @@ def setup_ui(main_app):
main_app.link_search_button.setVisible(False)
main_app.link_search_button.setFixedWidth(int(30 * scale))
log_title_layout.addWidget(main_app.link_search_button)
main_app.discord_scope_toggle_button = QPushButton("Scope: Files")
main_app.discord_scope_toggle_button.setVisible(False) # Hidden by default
main_app.discord_scope_toggle_button.setFixedWidth(int(140 * scale))
log_title_layout.addWidget(main_app.discord_scope_toggle_button)
main_app.manga_rename_toggle_button = QPushButton()
main_app.manga_rename_toggle_button.setVisible(False)
main_app.manga_rename_toggle_button.setFixedWidth(int(140 * scale))