Proxy Support

This commit is contained in:
Yuvi9587 2025-12-23 21:26:49 +05:30
parent efa0abd0f1
commit f9c504b936
3 changed files with 120 additions and 44 deletions

View File

@ -3,7 +3,7 @@ import time
import os import os
import json import json
import traceback import traceback
from concurrent.futures import ThreadPoolExecutor, as_completed, Future from concurrent.futures import ThreadPoolExecutor, as_completed, Future, CancelledError
from .api_client import download_from_api from .api_client import download_from_api
from .workers import PostProcessorWorker from .workers import PostProcessorWorker
from ..config.constants import ( from ..config.constants import (
@ -113,6 +113,29 @@ class DownloadManager:
self.is_running = False # Allow another session to start if needed self.is_running = False # Allow another session to start if needed
self.progress_queue.put({'type': 'handoff_to_single_thread', 'payload': (config,)}) self.progress_queue.put({'type': 'handoff_to_single_thread', 'payload': (config,)})
def _get_proxies_from_config(self, config):
"""Constructs the proxy dictionary from the config."""
if not config.get('proxy_enabled'):
return None
host = config.get('proxy_host')
port = config.get('proxy_port')
if not host or not port:
return None
proxy_str = f"http://{host}:{port}"
# Add auth if provided
user = config.get('proxy_username')
password = config.get('proxy_password')
if user and password:
proxy_str = f"http://{user}:{password}@{host}:{port}"
return {
"http": proxy_str,
"https": proxy_str
}
def _fetch_and_queue_posts_for_pool(self, config, restore_data, creator_profile_data): def _fetch_and_queue_posts_for_pool(self, config, restore_data, creator_profile_data):
""" """
Fetches posts from the API in batches and submits them as tasks to a thread pool. Fetches posts from the API in batches and submits them as tasks to a thread pool.
@ -127,6 +150,9 @@ class DownloadManager:
profile_processed_ids = set(creator_profile_data.get('processed_post_ids', [])) profile_processed_ids = set(creator_profile_data.get('processed_post_ids', []))
processed_ids = session_processed_ids.union(profile_processed_ids) processed_ids = session_processed_ids.union(profile_processed_ids)
# Helper to get proxies
proxies = self._get_proxies_from_config(config)
if restore_data and 'all_posts_data' in restore_data: if restore_data and 'all_posts_data' in restore_data:
# This logic for session restore remains as it relies on a pre-fetched list # This logic for session restore remains as it relies on a pre-fetched list
all_posts = restore_data['all_posts_data'] all_posts = restore_data['all_posts_data']
@ -143,12 +169,20 @@ class DownloadManager:
for post_data in posts_to_process: for post_data in posts_to_process:
if self.cancellation_event.is_set(): if self.cancellation_event.is_set():
break break
worker = PostProcessorWorker(post_data, config, self.progress_queue)
worker_args = self._map_config_to_worker_args(post_data, config)
# Manually inject proxies here if _map_config_to_worker_args didn't catch it (though it should)
worker_args['proxies'] = proxies
worker = PostProcessorWorker(**worker_args)
future = self.thread_pool.submit(worker.process) future = self.thread_pool.submit(worker.process)
future.add_done_callback(self._handle_future_result) future.add_done_callback(self._handle_future_result)
self.active_futures.append(future) self.active_futures.append(future)
else: else:
# --- Streaming Logic --- # --- Streaming Logic ---
if proxies:
self._log(f" 🌐 Using Proxy: {config.get('proxy_host')}:{config.get('proxy_port')}")
post_generator = download_from_api( post_generator = download_from_api(
api_url_input=config['api_url'], api_url_input=config['api_url'],
logger=self._log, logger=self._log,
@ -156,7 +190,8 @@ class DownloadManager:
end_page=config.get('end_page'), end_page=config.get('end_page'),
cancellation_event=self.cancellation_event, cancellation_event=self.cancellation_event,
pause_event=self.pause_event, pause_event=self.pause_event,
cookies_dict=None # Cookie handling handled inside client if needed, or update if passed cookies_dict=None, # Cookie handling handled inside client if needed
proxies=proxies # <--- NEW: Pass proxies to API client
) )
for post_batch in post_generator: for post_batch in post_generator:
@ -169,23 +204,16 @@ class DownloadManager:
new_posts_batch = [p for p in post_batch if p.get('id') not in processed_ids] new_posts_batch = [p for p in post_batch if p.get('id') not in processed_ids]
if not new_posts_batch: if not new_posts_batch:
# Log skipped count for UI feedback if needed, already handled in api_client usually
continue continue
# Update total posts dynamically as we find them # Update total posts dynamically as we find them
self.total_posts += len(new_posts_batch) self.total_posts += len(new_posts_batch)
# Note: total_posts in streaming is a "running total of found posts", not absolute total
for post_data in new_posts_batch: for post_data in new_posts_batch:
if self.cancellation_event.is_set(): if self.cancellation_event.is_set():
break break
# Pass explicit args or config to worker # MAPPING CONFIG TO WORKER ARGS
# Ideally PostProcessorWorker should accept the whole config dict or mapped args
# For now assuming PostProcessorWorker takes (post_data, config_dict, queue)
# OR we map the config to the args expected by PostProcessorWorker.__init__
# MAPPING CONFIG TO WORKER ARGS (Safe wrapper)
worker_args = self._map_config_to_worker_args(post_data, config) worker_args = self._map_config_to_worker_args(post_data, config)
worker = PostProcessorWorker(**worker_args) worker = PostProcessorWorker(**worker_args)
@ -193,7 +221,7 @@ class DownloadManager:
future.add_done_callback(self._handle_future_result) future.add_done_callback(self._handle_future_result)
self.active_futures.append(future) self.active_futures.append(future)
# Small sleep to prevent UI freeze if batches are huge and instant # Small sleep to prevent UI freeze
time.sleep(0.01) time.sleep(0.01)
except Exception as e: except Exception as e:
@ -205,6 +233,9 @@ class DownloadManager:
def _map_config_to_worker_args(self, post_data, config): def _map_config_to_worker_args(self, post_data, config):
"""Helper to map the flat config dict to PostProcessorWorker arguments.""" """Helper to map the flat config dict to PostProcessorWorker arguments."""
# Get proxy dict
proxies = self._get_proxies_from_config(config)
# This mirrors the arguments in workers.py PostProcessorWorker.__init__ # This mirrors the arguments in workers.py PostProcessorWorker.__init__
return { return {
'post_data': post_data, 'post_data': post_data,
@ -221,29 +252,27 @@ class DownloadManager:
'custom_folder_name': config.get('custom_folder_name'), 'custom_folder_name': config.get('custom_folder_name'),
'compress_images': config.get('compress_images'), 'compress_images': config.get('compress_images'),
'download_thumbnails': config.get('download_thumbnails'), 'download_thumbnails': config.get('download_thumbnails'),
'service': config.get('service') or 'unknown', # extracted elsewhere 'service': config.get('service') or 'unknown',
'user_id': config.get('user_id') or 'unknown', 'user_id': config.get('user_id') or 'unknown',
'pause_event': self.pause_event, 'pause_event': self.pause_event,
'api_url_input': config.get('api_url'), 'api_url_input': config.get('api_url'),
'cancellation_event': self.cancellation_event, 'cancellation_event': self.cancellation_event,
'downloaded_files': None, # Managed per worker or global if passed 'downloaded_files': None,
'downloaded_file_hashes': None, 'downloaded_file_hashes': None,
'downloaded_files_lock': None, 'downloaded_files_lock': None,
'downloaded_file_hashes_lock': None, 'downloaded_file_hashes_lock': None,
# Add other necessary fields from config...
'manga_mode_active': config.get('manga_mode_active'), 'manga_mode_active': config.get('manga_mode_active'),
'manga_filename_style': config.get('manga_filename_style'), 'manga_filename_style': config.get('manga_filename_style'),
'manga_custom_filename_format': config.get('custom_manga_filename_format', "{published} {title}"), # Pass custom format 'manga_custom_filename_format': config.get('custom_manga_filename_format', "{published} {title}"),
'manga_custom_date_format': config.get('manga_custom_date_format', "YYYY-MM-DD"), 'manga_custom_date_format': config.get('manga_custom_date_format', "YYYY-MM-DD"),
'use_multithreading': config.get('use_multithreading', True), 'use_multithreading': config.get('use_multithreading', True),
# Ensure defaults for others 'proxies': proxies, # <--- NEW: Pass proxies to worker
} }
def _setup_creator_profile(self, config): def _setup_creator_profile(self, config):
"""Prepares the path and loads data for the current creator's profile.""" """Prepares the path and loads data for the current creator's profile."""
# Extract name logic here or assume config has it # Extract name logic here or assume config has it
# ... (Same as your existing code) self.current_creator_name_for_profile = "Unknown"
self.current_creator_name_for_profile = "Unknown" # Placeholder
# You should ideally extract name from URL or config here if available # You should ideally extract name from URL or config here if available
return {} return {}

View File

@ -133,7 +133,8 @@ class PostProcessorWorker:
sfp_threshold=None, sfp_threshold=None,
handle_unknown_mode=False, handle_unknown_mode=False,
creator_name_cache=None, creator_name_cache=None,
add_info_in_pdf=False add_info_in_pdf=False,
proxies=None
): ):
self.post = post_data self.post = post_data
@ -208,9 +209,8 @@ class PostProcessorWorker:
self.sfp_threshold = sfp_threshold self.sfp_threshold = sfp_threshold
self.handle_unknown_mode = handle_unknown_mode self.handle_unknown_mode = handle_unknown_mode
self.creator_name_cache = creator_name_cache self.creator_name_cache = creator_name_cache
#-- New assign --
self.add_info_in_pdf = add_info_in_pdf self.add_info_in_pdf = add_info_in_pdf
#-- New assign -- self.proxies = proxies
if self.compress_images and Image is None: if self.compress_images and Image is None:
@ -263,7 +263,7 @@ class PostProcessorWorker:
new_url = parsed_url._replace(netloc=new_domain).geturl() new_url = parsed_url._replace(netloc=new_domain).geturl()
try: try:
with requests.head(new_url, headers={'User-Agent': 'Mozilla/5.0'}, timeout=5, allow_redirects=True) as resp: with requests.head(new_url, headers={'User-Agent': 'Mozilla/5.0'}, timeout=5, allow_redirects=True, proxies=self.proxies) as resp:
if resp.status_code == 200: if resp.status_code == 200:
return new_url return new_url
except requests.RequestException: except requests.RequestException:
@ -338,7 +338,8 @@ class PostProcessorWorker:
api_original_filename_for_size_check = file_info.get('_original_name_for_log', file_info.get('name')) api_original_filename_for_size_check = file_info.get('_original_name_for_log', file_info.get('name'))
try: try:
# Use a stream=True HEAD request to get headers without downloading the body # Use a stream=True HEAD request to get headers without downloading the body
with requests.head(file_url, headers=file_download_headers, timeout=15, cookies=cookies_to_use_for_file, allow_redirects=True) as head_response: with requests.head(file_url, headers=file_download_headers, timeout=15, cookies=cookies_to_use_for_file, allow_redirects=True, proxies=self.proxies) as head_response:
head_response.raise_for_status() head_response.raise_for_status()
content_length = head_response.headers.get('Content-Length') content_length = head_response.headers.get('Content-Length')
if content_length: if content_length:
@ -672,7 +673,7 @@ class PostProcessorWorker:
current_url_to_try = file_url current_url_to_try = file_url
response = requests.get(current_url_to_try, headers=file_download_headers, timeout=(30, 300), stream=True, cookies=cookies_to_use_for_file) response = requests.get(current_url_to_try, headers=file_download_headers, timeout=(30, 300), stream=True, cookies=cookies_to_use_for_file, proxies=self.proxies)
if response.status_code == 403 and ('kemono.' in current_url_to_try or 'coomer.' in current_url_to_try): if response.status_code == 403 and ('kemono.' in current_url_to_try or 'coomer.' in current_url_to_try):
self.logger(f" ⚠️ Got 403 Forbidden for '{api_original_filename}'. Attempting subdomain rotation...") self.logger(f" ⚠️ Got 403 Forbidden for '{api_original_filename}'. Attempting subdomain rotation...")
@ -681,8 +682,7 @@ class PostProcessorWorker:
self.logger(f" Retrying with new URL: {new_url}") self.logger(f" Retrying with new URL: {new_url}")
file_url = new_url file_url = new_url
response.close() # Close the old response response.close() # Close the old response
response = requests.get(new_url, headers=file_download_headers, timeout=(30, 300), stream=True, cookies=cookies_to_use_for_file) response = requests.get(new_url, headers=file_download_headers, timeout=(30, 300), stream=True, cookies=cookies_to_use_for_file, proxies=self.proxies)
response.raise_for_status() response.raise_for_status()
# --- REVISED AND MOVED SIZE CHECK LOGIC --- # --- REVISED AND MOVED SIZE CHECK LOGIC ---
@ -1105,7 +1105,7 @@ class PostProcessorWorker:
'Accept': 'text/css' 'Accept': 'text/css'
} }
cookies = prepare_cookies_for_request(self.use_cookie, self.cookie_text, self.selected_cookie_file, self.app_base_dir, self.logger, target_domain=api_domain) cookies = prepare_cookies_for_request(self.use_cookie, self.cookie_text, self.selected_cookie_file, self.app_base_dir, self.logger, target_domain=api_domain)
full_post_data = fetch_single_post_data(api_domain, self.service, self.user_id, post_id, headers, self.logger, cookies_dict=cookies) full_post_data = fetch_single_post_data(api_domain, self.service, self.user_id, post_id, headers, self.logger, cookies_dict=cookies, proxies=self.proxies)
if full_post_data: if full_post_data:
self.logger(" ✅ Full post data fetched successfully.") self.logger(" ✅ Full post data fetched successfully.")
self.post = full_post_data self.post = full_post_data
@ -1306,13 +1306,17 @@ class PostProcessorWorker:
if not any(d in api_domain_for_comments.lower() for d in ['kemono.su', 'kemono.party', 'kemono.cr', 'coomer.su', 'coomer.party', 'coomer.st']): if not any(d in api_domain_for_comments.lower() for d in ['kemono.su', 'kemono.party', 'kemono.cr', 'coomer.su', 'coomer.party', 'coomer.st']):
self.logger(f"⚠️ Unrecognized domain '{api_domain_for_comments}' for comment API. Defaulting based on service.") self.logger(f"⚠️ Unrecognized domain '{api_domain_for_comments}' for comment API. Defaulting based on service.")
api_domain_for_comments = "kemono.cr" if "kemono" in self.service.lower() else "coomer.st" api_domain_for_comments = "kemono.cr" if "kemono" in self.service.lower() else "coomer.st"
# Fetch comments (Indented correctly now)
comments_data = fetch_post_comments( comments_data = fetch_post_comments(
api_domain_for_comments, self.service, self.user_id, post_id, api_domain_for_comments, self.service, self.user_id, post_id,
headers, self.logger, self.cancellation_event, self.pause_event, headers, self.logger, self.cancellation_event, self.pause_event,
cookies_dict=prepare_cookies_for_request( cookies_dict=prepare_cookies_for_request(
self.use_cookie, self.cookie_text, self.selected_cookie_file, self.app_base_dir, self.logger self.use_cookie, self.cookie_text, self.selected_cookie_file, self.app_base_dir, self.logger
),
proxies=self.proxies
) )
)
if comments_data: if comments_data:
self.logger(f" Fetched {len(comments_data)} comments for post {post_id}.") self.logger(f" Fetched {len(comments_data)} comments for post {post_id}.")
for comment_item_idx, comment_item in enumerate(comments_data): for comment_item_idx, comment_item in enumerate(comments_data):
@ -1340,7 +1344,7 @@ class PostProcessorWorker:
self.logger(f" ⚠️ Error fetching or processing comments for post {post_id}: {e_fetch_comment}") self.logger(f" ⚠️ Error fetching or processing comments for post {post_id}: {e_fetch_comment}")
except Exception as e_generic_comment: except Exception as e_generic_comment:
self.logger(f" ❌ Unexpected error during comment processing for post {post_id}: {e_generic_comment}\n{traceback.format_exc(limit=2)}") self.logger(f" ❌ Unexpected error during comment processing for post {post_id}: {e_generic_comment}\n{traceback.format_exc(limit=2)}")
self.logger(f" [Char Scope: Comments] Phase 2 Result: post_is_candidate_by_comment_char_match = {post_is_candidate_by_comment_char_match}")
else: else:
self.logger(f" [Char Scope: Comments] Phase 2: Skipped comment check for post ID '{post_id}' because a file match already made it a candidate.") self.logger(f" [Char Scope: Comments] Phase 2: Skipped comment check for post ID '{post_id}' because a file match already made it a candidate.")
@ -2327,9 +2331,10 @@ class DownloadThread(QThread):
manga_custom_filename_format="{published} {title}", manga_custom_filename_format="{published} {title}",
manga_custom_date_format="YYYY-MM-DD" , manga_custom_date_format="YYYY-MM-DD" ,
sfp_threshold=None, sfp_threshold=None,
creator_name_cache=None creator_name_cache=None,
proxies=None
): ):
super().__init__() super().__init__()
self.api_url_input = api_url_input self.api_url_input = api_url_input
self.output_dir = output_dir self.output_dir = output_dir
@ -2404,6 +2409,7 @@ class DownloadThread(QThread):
self.domain_override = domain_override self.domain_override = domain_override
self.sfp_threshold = sfp_threshold self.sfp_threshold = sfp_threshold
self.creator_name_cache = creator_name_cache self.creator_name_cache = creator_name_cache
self.proxies = proxies
if self.compress_images and Image is None: if self.compress_images and Image is None:
self.logger("⚠️ Image compression disabled: Pillow library not found (DownloadThread).") self.logger("⚠️ Image compression disabled: Pillow library not found (DownloadThread).")
@ -2437,6 +2443,7 @@ class DownloadThread(QThread):
self.logger(" Starting post fetch (single-threaded download process)...") self.logger(" Starting post fetch (single-threaded download process)...")
# --- FIX: Removed duplicate proxies argument here ---
post_generator = download_from_api( post_generator = download_from_api(
self.api_url_input, self.api_url_input,
logger=self.logger, logger=self.logger,
@ -2451,7 +2458,8 @@ class DownloadThread(QThread):
app_base_dir=self.app_base_dir, app_base_dir=self.app_base_dir,
manga_filename_style_for_sort_check=self.manga_filename_style if self.manga_mode_active else None, manga_filename_style_for_sort_check=self.manga_filename_style if self.manga_mode_active else None,
processed_post_ids=self.processed_post_ids_set, processed_post_ids=self.processed_post_ids_set,
fetch_all_first=self.fetch_first fetch_all_first=self.fetch_first,
proxies=self.proxies
) )
for posts_batch_data in post_generator: for posts_batch_data in post_generator:
@ -2464,6 +2472,7 @@ class DownloadThread(QThread):
was_process_cancelled = True was_process_cancelled = True
break break
# --- FIX: Ensure 'proxies' is in this dictionary ---
worker_args = { worker_args = {
'post_data': individual_post_data, 'post_data': individual_post_data,
'emitter': worker_signals_obj, 'emitter': worker_signals_obj,
@ -2532,7 +2541,8 @@ class DownloadThread(QThread):
'archive_only_mode': self.archive_only_mode, 'archive_only_mode': self.archive_only_mode,
'manga_custom_filename_format': self.manga_custom_filename_format, 'manga_custom_filename_format': self.manga_custom_filename_format,
'manga_custom_date_format': self.manga_custom_date_format, 'manga_custom_date_format': self.manga_custom_date_format,
'sfp_threshold': self.sfp_threshold 'sfp_threshold': self.sfp_threshold,
'proxies': self.proxies
} }
post_processing_worker = PostProcessorWorker(**worker_args) post_processing_worker = PostProcessorWorker(**worker_args)

View File

@ -844,6 +844,19 @@ class DownloaderApp (QWidget ):
settings['keep_duplicates_mode'] = self.keep_duplicates_mode settings['keep_duplicates_mode'] = self.keep_duplicates_mode
settings['keep_duplicates_limit'] = self.keep_duplicates_limit settings['keep_duplicates_limit'] = self.keep_duplicates_limit
settings['proxy_enabled'] = self.settings.value(PROXY_ENABLED_KEY, False, type=bool)
settings['proxy_host'] = self.settings.value(PROXY_HOST_KEY, "", type=str)
settings['proxy_port'] = self.settings.value(PROXY_PORT_KEY, "", type=str)
settings['proxy_username'] = self.settings.value(PROXY_USERNAME_KEY, "", type=str)
settings['proxy_password'] = self.settings.value(PROXY_PASSWORD_KEY, "", type=str)
settings['proxies'] = None
if settings['proxy_enabled'] and settings['proxy_host'] and settings['proxy_port']:
proxy_str = f"http://{settings['proxy_host']}:{settings['proxy_port']}"
if settings['proxy_username'] and settings['proxy_password']:
proxy_str = f"http://{settings['proxy_username']}:{settings['proxy_password']}@{settings['proxy_host']}:{settings['proxy_port']}"
settings['proxies'] = {'http': proxy_str, 'https': proxy_str}
return settings return settings
@ -4627,6 +4640,14 @@ class DownloaderApp (QWidget ):
if should_use_multithreading_for_posts: if should_use_multithreading_for_posts:
log_messages.append(f" Number of Post Worker Threads: {effective_num_post_workers}") log_messages.append(f" Number of Post Worker Threads: {effective_num_post_workers}")
proxy_enabled_log = self.settings.value(PROXY_ENABLED_KEY, False, type=bool)
if proxy_enabled_log:
p_host = self.settings.value(PROXY_HOST_KEY, "")
p_port = self.settings.value(PROXY_PORT_KEY, "")
log_messages.append(f" Proxy: Enabled ({p_host}:{p_port})")
else:
log_messages.append(f" Proxy: Disabled")
if domain_override_command: if domain_override_command:
self.log_signal.emit(f" Domain Override Active: Will probe for the correct 'n*' subdomain on the '.{domain_override_command}' domain for each file.") self.log_signal.emit(f" Domain Override Active: Will probe for the correct 'n*' subdomain on the '.{domain_override_command}' domain for each file.")
@ -4639,7 +4660,7 @@ class DownloaderApp (QWidget ):
self.set_ui_enabled(False) self.set_ui_enabled(False)
from src.config.constants import FOLDER_NAME_STOP_WORDS from src.config.constants import FOLDER_NAME_STOP_WORDS
current_proxies = self._get_current_ui_settings_as_dict().get('proxies')
args_template = { args_template = {
'api_url_input': api_url, 'api_url_input': api_url,
'download_root': effective_output_dir_for_run, 'download_root': effective_output_dir_for_run,
@ -4716,6 +4737,7 @@ class DownloaderApp (QWidget ):
'sfp_threshold': download_commands.get('sfp_threshold'), 'sfp_threshold': download_commands.get('sfp_threshold'),
'handle_unknown_mode': handle_unknown_command, 'handle_unknown_mode': handle_unknown_command,
'add_info_in_pdf': self.add_info_in_pdf_setting, 'add_info_in_pdf': self.add_info_in_pdf_setting,
'proxies': current_proxies
} }
args_template['override_output_dir'] = override_output_dir args_template['override_output_dir'] = override_output_dir
@ -4741,7 +4763,8 @@ class DownloaderApp (QWidget ):
'app_base_dir': app_base_dir_for_cookies, 'app_base_dir': app_base_dir_for_cookies,
'manga_filename_style_for_sort_check': self.manga_filename_style, 'manga_filename_style_for_sort_check': self.manga_filename_style,
'processed_post_ids': processed_post_ids_for_this_run, 'processed_post_ids': processed_post_ids_for_this_run,
'fetch_all_first': True 'fetch_all_first': True,
'proxies': self._get_current_ui_settings_as_dict().get('proxies')
} }
self.download_thread = threading.Thread(target=self._run_fetch_only_thread, args=(fetch_thread_args,), daemon=True) self.download_thread = threading.Thread(target=self._run_fetch_only_thread, args=(fetch_thread_args,), daemon=True)
@ -5097,8 +5120,7 @@ class DownloaderApp (QWidget ):
ppw_expected_keys = list(PostProcessorWorker.__init__.__code__.co_varnames)[1:] ppw_expected_keys = list(PostProcessorWorker.__init__.__code__.co_varnames)[1:]
# 1. Define all LIVE RUNTIME arguments. current_proxies = self._get_current_ui_settings_as_dict().get('proxies')
# These are taken from the current app state and are the same for all workers.
live_runtime_args = { live_runtime_args = {
'emitter': self.worker_to_gui_queue, 'emitter': self.worker_to_gui_queue,
'creator_name_cache': self.creator_name_cache, 'creator_name_cache': self.creator_name_cache,
@ -5129,6 +5151,7 @@ class DownloaderApp (QWidget ):
'cookie_text': self.cookie_text_input.text(), 'cookie_text': self.cookie_text_input.text(),
'selected_cookie_file': self.selected_cookie_filepath, 'selected_cookie_file': self.selected_cookie_filepath,
'add_info_in_pdf': self.add_info_in_pdf_setting, 'add_info_in_pdf': self.add_info_in_pdf_setting,
'proxies': current_proxies,
} }
# 2. Define DEFAULTS for all settings that *should* be in the profile. # 2. Define DEFAULTS for all settings that *should* be in the profile.
@ -5364,6 +5387,19 @@ class DownloaderApp (QWidget ):
self._update_manga_filename_style_button_text() self._update_manga_filename_style_button_text()
self._update_multipart_toggle_button_text() self._update_multipart_toggle_button_text()
if 'proxy_enabled' in settings:
self.settings.setValue(PROXY_ENABLED_KEY, settings['proxy_enabled'])
if 'proxy_host' in settings:
self.settings.setValue(PROXY_HOST_KEY, settings['proxy_host'])
if 'proxy_port' in settings:
self.settings.setValue(PROXY_PORT_KEY, settings['proxy_port'])
if 'proxy_username' in settings:
self.settings.setValue(PROXY_USERNAME_KEY, settings['proxy_username'])
if 'proxy_password' in settings:
self.settings.setValue(PROXY_PASSWORD_KEY, settings['proxy_password'])
self.settings.sync()
def start_multi_threaded_download(self, num_post_workers, **kwargs): def start_multi_threaded_download(self, num_post_workers, **kwargs):
""" """
Initializes and starts the multi-threaded download process. Initializes and starts the multi-threaded download process.
@ -5424,7 +5460,8 @@ class DownloaderApp (QWidget ):
app_base_dir=worker_args_template.get('app_base_dir'), app_base_dir=worker_args_template.get('app_base_dir'),
manga_filename_style_for_sort_check=worker_args_template.get('manga_filename_style'), manga_filename_style_for_sort_check=worker_args_template.get('manga_filename_style'),
processed_post_ids=worker_args_template.get('processed_post_ids', []), processed_post_ids=worker_args_template.get('processed_post_ids', []),
fetch_all_first=worker_args_template.get('fetch_first', False) fetch_all_first=worker_args_template.get('fetch_first', False),
proxies=worker_args_template.get('proxies')
) )
ppw_expected_keys = list(PostProcessorWorker.__init__.__code__.co_varnames)[1:] ppw_expected_keys = list(PostProcessorWorker.__init__.__code__.co_varnames)[1:]