mirror of
https://github.com/Yuvi9587/Kemono-Downloader.git
synced 2025-12-29 16:14:44 +00:00
Compare commits
7 Commits
675646e763
...
5a8c151c97
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5a8c151c97 | ||
|
|
50ba60a461 | ||
|
|
23521e7060 | ||
|
|
f9c504b936 | ||
|
|
efa0abd0f1 | ||
|
|
7d76d00470 | ||
|
|
1494d3f456 |
@ -68,6 +68,14 @@ DISCORD_TOKEN_KEY = "discord/token"
|
|||||||
|
|
||||||
POST_DOWNLOAD_ACTION_KEY = "postDownloadAction"
|
POST_DOWNLOAD_ACTION_KEY = "postDownloadAction"
|
||||||
|
|
||||||
|
|
||||||
|
# --- Proxy / Network Keys ---
|
||||||
|
PROXY_ENABLED_KEY = "proxy/enabled"
|
||||||
|
PROXY_HOST_KEY = "proxy/host"
|
||||||
|
PROXY_PORT_KEY = "proxy/port"
|
||||||
|
PROXY_USERNAME_KEY = "proxy/username"
|
||||||
|
PROXY_PASSWORD_KEY = "proxy/password"
|
||||||
|
|
||||||
# --- UI Constants and Identifiers ---
|
# --- UI Constants and Identifiers ---
|
||||||
HTML_PREFIX = "<!HTML!>"
|
HTML_PREFIX = "<!HTML!>"
|
||||||
LOG_DISPLAY_LINKS = "links"
|
LOG_DISPLAY_LINKS = "links"
|
||||||
|
|||||||
@ -12,7 +12,7 @@ from ..config.constants import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def fetch_posts_paginated(api_url_base, headers, offset, logger, cancellation_event=None, pause_event=None, cookies_dict=None):
|
def fetch_posts_paginated(api_url_base, headers, offset, logger, cancellation_event=None, pause_event=None, cookies_dict=None, proxies=None):
|
||||||
"""
|
"""
|
||||||
Fetches a single page of posts from the API with robust retry logic.
|
Fetches a single page of posts from the API with robust retry logic.
|
||||||
"""
|
"""
|
||||||
@ -41,7 +41,7 @@ def fetch_posts_paginated(api_url_base, headers, offset, logger, cancellation_ev
|
|||||||
logger(log_message)
|
logger(log_message)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with requests.get(paginated_url, headers=headers, timeout=(15, 60), cookies=cookies_dict) as response:
|
with requests.get(paginated_url, headers=headers, timeout=(15, 60), cookies=cookies_dict, proxies=proxies) as response:
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
response.encoding = 'utf-8'
|
response.encoding = 'utf-8'
|
||||||
return response.json()
|
return response.json()
|
||||||
@ -81,7 +81,7 @@ def fetch_posts_paginated(api_url_base, headers, offset, logger, cancellation_ev
|
|||||||
|
|
||||||
raise RuntimeError(f"Failed to fetch page {paginated_url} after all attempts.")
|
raise RuntimeError(f"Failed to fetch page {paginated_url} after all attempts.")
|
||||||
|
|
||||||
def fetch_single_post_data(api_domain, service, user_id, post_id, headers, logger, cookies_dict=None):
|
def fetch_single_post_data(api_domain, service, user_id, post_id, headers, logger, cookies_dict=None, proxies=None):
|
||||||
"""
|
"""
|
||||||
Fetches the full data, including the 'content' field, for a single post using cloudscraper.
|
Fetches the full data, including the 'content' field, for a single post using cloudscraper.
|
||||||
"""
|
"""
|
||||||
@ -92,7 +92,7 @@ def fetch_single_post_data(api_domain, service, user_id, post_id, headers, logge
|
|||||||
scraper = None
|
scraper = None
|
||||||
try:
|
try:
|
||||||
scraper = cloudscraper.create_scraper()
|
scraper = cloudscraper.create_scraper()
|
||||||
response = scraper.get(post_api_url, headers=headers, timeout=(15, 300), cookies=cookies_dict)
|
response = scraper.get(post_api_url, headers=headers, timeout=(15, 300), cookies=cookies_dict, proxies=proxies)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
full_post_data = response.json()
|
full_post_data = response.json()
|
||||||
@ -111,7 +111,7 @@ def fetch_single_post_data(api_domain, service, user_id, post_id, headers, logge
|
|||||||
scraper.close()
|
scraper.close()
|
||||||
|
|
||||||
|
|
||||||
def fetch_post_comments(api_domain, service, user_id, post_id, headers, logger, cancellation_event=None, pause_event=None, cookies_dict=None):
|
def fetch_post_comments(api_domain, service, user_id, post_id, headers, logger, cancellation_event=None, pause_event=None, cookies_dict=None, proxies=None):
|
||||||
"""Fetches all comments for a specific post."""
|
"""Fetches all comments for a specific post."""
|
||||||
if cancellation_event and cancellation_event.is_set():
|
if cancellation_event and cancellation_event.is_set():
|
||||||
raise RuntimeError("Comment fetch operation cancelled by user.")
|
raise RuntimeError("Comment fetch operation cancelled by user.")
|
||||||
@ -120,7 +120,7 @@ def fetch_post_comments(api_domain, service, user_id, post_id, headers, logger,
|
|||||||
logger(f" Fetching comments: {comments_api_url}")
|
logger(f" Fetching comments: {comments_api_url}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with requests.get(comments_api_url, headers=headers, timeout=(10, 30), cookies=cookies_dict) as response:
|
with requests.get(comments_api_url, headers=headers, timeout=(10, 30), cookies=cookies_dict, proxies=proxies) as response:
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
response.encoding = 'utf-8'
|
response.encoding = 'utf-8'
|
||||||
return response.json()
|
return response.json()
|
||||||
@ -143,7 +143,8 @@ def download_from_api(
|
|||||||
app_base_dir=None,
|
app_base_dir=None,
|
||||||
manga_filename_style_for_sort_check=None,
|
manga_filename_style_for_sort_check=None,
|
||||||
processed_post_ids=None,
|
processed_post_ids=None,
|
||||||
fetch_all_first=False
|
fetch_all_first=False,
|
||||||
|
proxies=None
|
||||||
):
|
):
|
||||||
parsed_input_url_for_domain = urlparse(api_url_input)
|
parsed_input_url_for_domain = urlparse(api_url_input)
|
||||||
api_domain = parsed_input_url_for_domain.netloc
|
api_domain = parsed_input_url_for_domain.netloc
|
||||||
@ -179,7 +180,7 @@ def download_from_api(
|
|||||||
direct_post_api_url = f"https://{api_domain}/api/v1/{service}/user/{user_id}/post/{target_post_id}"
|
direct_post_api_url = f"https://{api_domain}/api/v1/{service}/user/{user_id}/post/{target_post_id}"
|
||||||
logger(f" Attempting direct fetch for target post: {direct_post_api_url}")
|
logger(f" Attempting direct fetch for target post: {direct_post_api_url}")
|
||||||
try:
|
try:
|
||||||
with requests.get(direct_post_api_url, headers=headers, timeout=(10, 30), cookies=cookies_for_api) as direct_response:
|
with requests.get(direct_post_api_url, headers=headers, timeout=(10, 30), cookies=cookies_for_api, proxies=proxies) as direct_response:
|
||||||
direct_response.raise_for_status()
|
direct_response.raise_for_status()
|
||||||
direct_response.encoding = 'utf-8'
|
direct_response.encoding = 'utf-8'
|
||||||
direct_post_data = direct_response.json()
|
direct_post_data = direct_response.json()
|
||||||
@ -249,7 +250,7 @@ def download_from_api(
|
|||||||
logger(f" Manga Mode: Reached specified end page ({end_page}). Stopping post fetch.")
|
logger(f" Manga Mode: Reached specified end page ({end_page}). Stopping post fetch.")
|
||||||
break
|
break
|
||||||
try:
|
try:
|
||||||
posts_batch_manga = fetch_posts_paginated(api_base_url, headers, current_offset_manga, logger, cancellation_event, pause_event, cookies_dict=cookies_for_api)
|
posts_batch_manga = fetch_posts_paginated(api_base_url, headers, current_offset_manga, logger, cancellation_event, pause_event, cookies_dict=cookies_for_api, proxies=proxies)
|
||||||
if not isinstance(posts_batch_manga, list):
|
if not isinstance(posts_batch_manga, list):
|
||||||
logger(f"❌ API Error (Manga Mode): Expected list of posts, got {type(posts_batch_manga)}.")
|
logger(f"❌ API Error (Manga Mode): Expected list of posts, got {type(posts_batch_manga)}.")
|
||||||
break
|
break
|
||||||
@ -351,7 +352,7 @@ def download_from_api(
|
|||||||
break
|
break
|
||||||
|
|
||||||
try:
|
try:
|
||||||
raw_posts_batch = fetch_posts_paginated(api_base_url, headers, current_offset, logger, cancellation_event, pause_event, cookies_dict=cookies_for_api)
|
raw_posts_batch = fetch_posts_paginated(api_base_url, headers, current_offset, logger, cancellation_event, pause_event, cookies_dict=cookies_for_api, proxies=proxies)
|
||||||
if not isinstance(raw_posts_batch, list):
|
if not isinstance(raw_posts_batch, list):
|
||||||
logger(f"❌ API Error: Expected list of posts, got {type(raw_posts_batch)} at page {current_page_num} (offset {current_offset}).")
|
logger(f"❌ API Error: Expected list of posts, got {type(raw_posts_batch)} at page {current_page_num} (offset {current_offset}).")
|
||||||
break
|
break
|
||||||
|
|||||||
@ -13,9 +13,17 @@ class DeviantArtClient:
|
|||||||
|
|
||||||
def __init__(self, logger_func=print):
|
def __init__(self, logger_func=print):
|
||||||
self.session = requests.Session()
|
self.session = requests.Session()
|
||||||
|
# Headers matching 1.py (Firefox)
|
||||||
self.session.headers.update({
|
self.session.headers.update({
|
||||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
|
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:128.0) Gecko/20100101 Firefox/128.0",
|
||||||
'Accept': '*/*',
|
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/png,image/svg+xml,*/*;q=0.8",
|
||||||
|
"Accept-Language": "en-US,en;q=0.5",
|
||||||
|
"Connection": "keep-alive",
|
||||||
|
"Upgrade-Insecure-Requests": "1",
|
||||||
|
"Sec-Fetch-Dest": "document",
|
||||||
|
"Sec-Fetch-Mode": "navigate",
|
||||||
|
"Sec-Fetch-Site": "none",
|
||||||
|
"Sec-Fetch-User": "?1",
|
||||||
})
|
})
|
||||||
self.access_token = None
|
self.access_token = None
|
||||||
self.logger = logger_func
|
self.logger = logger_func
|
||||||
@ -60,7 +68,19 @@ class DeviantArtClient:
|
|||||||
try:
|
try:
|
||||||
resp = self.session.get(url, params=params, timeout=20)
|
resp = self.session.get(url, params=params, timeout=20)
|
||||||
|
|
||||||
# Handle Token Expiration (401)
|
# 429: Rate Limit (Retry infinitely like 1.py)
|
||||||
|
if resp.status_code == 429:
|
||||||
|
retry_after = resp.headers.get('Retry-After')
|
||||||
|
if retry_after:
|
||||||
|
sleep_time = int(retry_after) + 1
|
||||||
|
else:
|
||||||
|
sleep_time = 5 # Default sleep from 1.py
|
||||||
|
|
||||||
|
self._log_once(sleep_time, f" [DeviantArt] ⚠️ Rate limit (429). Sleeping {sleep_time}s...")
|
||||||
|
time.sleep(sleep_time)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# 401: Token Expired (Refresh and Retry)
|
||||||
if resp.status_code == 401:
|
if resp.status_code == 401:
|
||||||
self.logger(" [DeviantArt] Token expired. Refreshing...")
|
self.logger(" [DeviantArt] Token expired. Refreshing...")
|
||||||
if self.authenticate():
|
if self.authenticate():
|
||||||
@ -69,60 +89,45 @@ class DeviantArtClient:
|
|||||||
else:
|
else:
|
||||||
raise Exception("Failed to refresh token")
|
raise Exception("Failed to refresh token")
|
||||||
|
|
||||||
# Handle Rate Limiting (429)
|
if 400 <= resp.status_code < 500:
|
||||||
if resp.status_code == 429:
|
resp.raise_for_status() # This raises immediately, breaking the loop
|
||||||
if retries < max_retries:
|
|
||||||
retry_after = resp.headers.get('Retry-After')
|
|
||||||
|
|
||||||
if retry_after:
|
if 500 <= resp.status_code < 600:
|
||||||
sleep_time = int(retry_after) + 1
|
resp.raise_for_status()
|
||||||
msg = f" [DeviantArt] ⚠️ Rate limit (Server says wait {sleep_time}s)."
|
|
||||||
else:
|
|
||||||
sleep_time = backoff_delay * (2 ** retries)
|
|
||||||
msg = f" [DeviantArt] ⚠️ Rate limit reached. Retrying in {sleep_time}s..."
|
|
||||||
|
|
||||||
# --- THREAD-SAFE LOGGING CHECK ---
|
|
||||||
should_log = False
|
|
||||||
with self.log_lock:
|
|
||||||
if sleep_time not in self.logged_waits:
|
|
||||||
self.logged_waits.add(sleep_time)
|
|
||||||
should_log = True
|
|
||||||
|
|
||||||
if should_log:
|
|
||||||
self.logger(msg)
|
|
||||||
|
|
||||||
time.sleep(sleep_time)
|
|
||||||
retries += 1
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
resp.raise_for_status()
|
|
||||||
|
|
||||||
resp.raise_for_status()
|
resp.raise_for_status()
|
||||||
|
|
||||||
# Clear log history on success so we get warned again if limits return later
|
|
||||||
with self.log_lock:
|
with self.log_lock:
|
||||||
if self.logged_waits:
|
self.logged_waits.clear()
|
||||||
self.logged_waits.clear()
|
|
||||||
|
|
||||||
return resp.json()
|
return resp.json()
|
||||||
|
|
||||||
|
except requests.exceptions.HTTPError as e:
|
||||||
|
if e.response is not None and 400 <= e.response.status_code < 500:
|
||||||
|
raise e
|
||||||
|
|
||||||
|
# Otherwise fall through to general retry logic (for 5xx)
|
||||||
|
pass
|
||||||
|
|
||||||
except requests.exceptions.RequestException as e:
|
except requests.exceptions.RequestException as e:
|
||||||
|
# Network errors / 5xx errors -> Retry
|
||||||
if retries < max_retries:
|
if retries < max_retries:
|
||||||
# Using the lock here too to prevent connection error spam
|
self._log_once("conn_error", f" [DeviantArt] Connection error: {e}. Retrying...")
|
||||||
should_log = False
|
time.sleep(backoff_delay)
|
||||||
with self.log_lock:
|
|
||||||
if "conn_error" not in self.logged_waits:
|
|
||||||
self.logged_waits.add("conn_error")
|
|
||||||
should_log = True
|
|
||||||
|
|
||||||
if should_log:
|
|
||||||
self.logger(f" [DeviantArt] Connection error: {e}. Retrying...")
|
|
||||||
|
|
||||||
time.sleep(2)
|
|
||||||
retries += 1
|
retries += 1
|
||||||
continue
|
continue
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
|
def _log_once(self, key, message):
|
||||||
|
"""Helper to avoid spamming the same log message during loops."""
|
||||||
|
should_log = False
|
||||||
|
with self.log_lock:
|
||||||
|
if key not in self.logged_waits:
|
||||||
|
self.logged_waits.add(key)
|
||||||
|
should_log = True
|
||||||
|
if should_log:
|
||||||
|
self.logger(message)
|
||||||
|
|
||||||
def get_deviation_uuid(self, url):
|
def get_deviation_uuid(self, url):
|
||||||
"""Scrapes the deviation page to find the UUID."""
|
"""Scrapes the deviation page to find the UUID."""
|
||||||
try:
|
try:
|
||||||
@ -139,13 +144,17 @@ class DeviantArtClient:
|
|||||||
|
|
||||||
def get_deviation_content(self, uuid):
|
def get_deviation_content(self, uuid):
|
||||||
"""Fetches download info."""
|
"""Fetches download info."""
|
||||||
|
# 1. Try high-res download endpoint
|
||||||
try:
|
try:
|
||||||
data = self._api_call(f"/deviation/download/{uuid}")
|
data = self._api_call(f"/deviation/download/{uuid}")
|
||||||
if 'src' in data:
|
if 'src' in data:
|
||||||
return data
|
return data
|
||||||
except:
|
except:
|
||||||
|
# If 400/403 (Not downloadable), we fail silently here
|
||||||
|
# and proceed to step 2 (Metadata fallback)
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
# 2. Fallback to standard content
|
||||||
try:
|
try:
|
||||||
meta = self._api_call(f"/deviation/{uuid}")
|
meta = self._api_call(f"/deviation/{uuid}")
|
||||||
if 'content' in meta:
|
if 'content' in meta:
|
||||||
|
|||||||
@ -3,7 +3,7 @@ import time
|
|||||||
import os
|
import os
|
||||||
import json
|
import json
|
||||||
import traceback
|
import traceback
|
||||||
from concurrent.futures import ThreadPoolExecutor, as_completed, Future
|
from concurrent.futures import ThreadPoolExecutor, as_completed, Future, CancelledError
|
||||||
from .api_client import download_from_api
|
from .api_client import download_from_api
|
||||||
from .workers import PostProcessorWorker
|
from .workers import PostProcessorWorker
|
||||||
from ..config.constants import (
|
from ..config.constants import (
|
||||||
@ -113,6 +113,29 @@ class DownloadManager:
|
|||||||
self.is_running = False # Allow another session to start if needed
|
self.is_running = False # Allow another session to start if needed
|
||||||
self.progress_queue.put({'type': 'handoff_to_single_thread', 'payload': (config,)})
|
self.progress_queue.put({'type': 'handoff_to_single_thread', 'payload': (config,)})
|
||||||
|
|
||||||
|
def _get_proxies_from_config(self, config):
|
||||||
|
"""Constructs the proxy dictionary from the config."""
|
||||||
|
if not config.get('proxy_enabled'):
|
||||||
|
return None
|
||||||
|
|
||||||
|
host = config.get('proxy_host')
|
||||||
|
port = config.get('proxy_port')
|
||||||
|
if not host or not port:
|
||||||
|
return None
|
||||||
|
|
||||||
|
proxy_str = f"http://{host}:{port}"
|
||||||
|
|
||||||
|
# Add auth if provided
|
||||||
|
user = config.get('proxy_username')
|
||||||
|
password = config.get('proxy_password')
|
||||||
|
if user and password:
|
||||||
|
proxy_str = f"http://{user}:{password}@{host}:{port}"
|
||||||
|
|
||||||
|
return {
|
||||||
|
"http": proxy_str,
|
||||||
|
"https": proxy_str
|
||||||
|
}
|
||||||
|
|
||||||
def _fetch_and_queue_posts_for_pool(self, config, restore_data, creator_profile_data):
|
def _fetch_and_queue_posts_for_pool(self, config, restore_data, creator_profile_data):
|
||||||
"""
|
"""
|
||||||
Fetches posts from the API in batches and submits them as tasks to a thread pool.
|
Fetches posts from the API in batches and submits them as tasks to a thread pool.
|
||||||
@ -127,6 +150,9 @@ class DownloadManager:
|
|||||||
profile_processed_ids = set(creator_profile_data.get('processed_post_ids', []))
|
profile_processed_ids = set(creator_profile_data.get('processed_post_ids', []))
|
||||||
processed_ids = session_processed_ids.union(profile_processed_ids)
|
processed_ids = session_processed_ids.union(profile_processed_ids)
|
||||||
|
|
||||||
|
# Helper to get proxies
|
||||||
|
proxies = self._get_proxies_from_config(config)
|
||||||
|
|
||||||
if restore_data and 'all_posts_data' in restore_data:
|
if restore_data and 'all_posts_data' in restore_data:
|
||||||
# This logic for session restore remains as it relies on a pre-fetched list
|
# This logic for session restore remains as it relies on a pre-fetched list
|
||||||
all_posts = restore_data['all_posts_data']
|
all_posts = restore_data['all_posts_data']
|
||||||
@ -143,12 +169,20 @@ class DownloadManager:
|
|||||||
for post_data in posts_to_process:
|
for post_data in posts_to_process:
|
||||||
if self.cancellation_event.is_set():
|
if self.cancellation_event.is_set():
|
||||||
break
|
break
|
||||||
worker = PostProcessorWorker(post_data, config, self.progress_queue)
|
|
||||||
|
worker_args = self._map_config_to_worker_args(post_data, config)
|
||||||
|
# Manually inject proxies here if _map_config_to_worker_args didn't catch it (though it should)
|
||||||
|
worker_args['proxies'] = proxies
|
||||||
|
|
||||||
|
worker = PostProcessorWorker(**worker_args)
|
||||||
future = self.thread_pool.submit(worker.process)
|
future = self.thread_pool.submit(worker.process)
|
||||||
future.add_done_callback(self._handle_future_result)
|
future.add_done_callback(self._handle_future_result)
|
||||||
self.active_futures.append(future)
|
self.active_futures.append(future)
|
||||||
else:
|
else:
|
||||||
# --- Streaming Logic ---
|
# --- Streaming Logic ---
|
||||||
|
if proxies:
|
||||||
|
self._log(f" 🌐 Using Proxy: {config.get('proxy_host')}:{config.get('proxy_port')}")
|
||||||
|
|
||||||
post_generator = download_from_api(
|
post_generator = download_from_api(
|
||||||
api_url_input=config['api_url'],
|
api_url_input=config['api_url'],
|
||||||
logger=self._log,
|
logger=self._log,
|
||||||
@ -156,7 +190,8 @@ class DownloadManager:
|
|||||||
end_page=config.get('end_page'),
|
end_page=config.get('end_page'),
|
||||||
cancellation_event=self.cancellation_event,
|
cancellation_event=self.cancellation_event,
|
||||||
pause_event=self.pause_event,
|
pause_event=self.pause_event,
|
||||||
cookies_dict=None # Cookie handling handled inside client if needed, or update if passed
|
cookies_dict=None, # Cookie handling handled inside client if needed
|
||||||
|
proxies=proxies # <--- NEW: Pass proxies to API client
|
||||||
)
|
)
|
||||||
|
|
||||||
for post_batch in post_generator:
|
for post_batch in post_generator:
|
||||||
@ -169,23 +204,16 @@ class DownloadManager:
|
|||||||
new_posts_batch = [p for p in post_batch if p.get('id') not in processed_ids]
|
new_posts_batch = [p for p in post_batch if p.get('id') not in processed_ids]
|
||||||
|
|
||||||
if not new_posts_batch:
|
if not new_posts_batch:
|
||||||
# Log skipped count for UI feedback if needed, already handled in api_client usually
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Update total posts dynamically as we find them
|
# Update total posts dynamically as we find them
|
||||||
self.total_posts += len(new_posts_batch)
|
self.total_posts += len(new_posts_batch)
|
||||||
# Note: total_posts in streaming is a "running total of found posts", not absolute total
|
|
||||||
|
|
||||||
for post_data in new_posts_batch:
|
for post_data in new_posts_batch:
|
||||||
if self.cancellation_event.is_set():
|
if self.cancellation_event.is_set():
|
||||||
break
|
break
|
||||||
|
|
||||||
# Pass explicit args or config to worker
|
# MAPPING CONFIG TO WORKER ARGS
|
||||||
# Ideally PostProcessorWorker should accept the whole config dict or mapped args
|
|
||||||
# For now assuming PostProcessorWorker takes (post_data, config_dict, queue)
|
|
||||||
# OR we map the config to the args expected by PostProcessorWorker.__init__
|
|
||||||
|
|
||||||
# MAPPING CONFIG TO WORKER ARGS (Safe wrapper)
|
|
||||||
worker_args = self._map_config_to_worker_args(post_data, config)
|
worker_args = self._map_config_to_worker_args(post_data, config)
|
||||||
worker = PostProcessorWorker(**worker_args)
|
worker = PostProcessorWorker(**worker_args)
|
||||||
|
|
||||||
@ -193,7 +221,7 @@ class DownloadManager:
|
|||||||
future.add_done_callback(self._handle_future_result)
|
future.add_done_callback(self._handle_future_result)
|
||||||
self.active_futures.append(future)
|
self.active_futures.append(future)
|
||||||
|
|
||||||
# Small sleep to prevent UI freeze if batches are huge and instant
|
# Small sleep to prevent UI freeze
|
||||||
time.sleep(0.01)
|
time.sleep(0.01)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -205,6 +233,9 @@ class DownloadManager:
|
|||||||
|
|
||||||
def _map_config_to_worker_args(self, post_data, config):
|
def _map_config_to_worker_args(self, post_data, config):
|
||||||
"""Helper to map the flat config dict to PostProcessorWorker arguments."""
|
"""Helper to map the flat config dict to PostProcessorWorker arguments."""
|
||||||
|
# Get proxy dict
|
||||||
|
proxies = self._get_proxies_from_config(config)
|
||||||
|
|
||||||
# This mirrors the arguments in workers.py PostProcessorWorker.__init__
|
# This mirrors the arguments in workers.py PostProcessorWorker.__init__
|
||||||
return {
|
return {
|
||||||
'post_data': post_data,
|
'post_data': post_data,
|
||||||
@ -221,29 +252,27 @@ class DownloadManager:
|
|||||||
'custom_folder_name': config.get('custom_folder_name'),
|
'custom_folder_name': config.get('custom_folder_name'),
|
||||||
'compress_images': config.get('compress_images'),
|
'compress_images': config.get('compress_images'),
|
||||||
'download_thumbnails': config.get('download_thumbnails'),
|
'download_thumbnails': config.get('download_thumbnails'),
|
||||||
'service': config.get('service') or 'unknown', # extracted elsewhere
|
'service': config.get('service') or 'unknown',
|
||||||
'user_id': config.get('user_id') or 'unknown',
|
'user_id': config.get('user_id') or 'unknown',
|
||||||
'pause_event': self.pause_event,
|
'pause_event': self.pause_event,
|
||||||
'api_url_input': config.get('api_url'),
|
'api_url_input': config.get('api_url'),
|
||||||
'cancellation_event': self.cancellation_event,
|
'cancellation_event': self.cancellation_event,
|
||||||
'downloaded_files': None, # Managed per worker or global if passed
|
'downloaded_files': None,
|
||||||
'downloaded_file_hashes': None,
|
'downloaded_file_hashes': None,
|
||||||
'downloaded_files_lock': None,
|
'downloaded_files_lock': None,
|
||||||
'downloaded_file_hashes_lock': None,
|
'downloaded_file_hashes_lock': None,
|
||||||
# Add other necessary fields from config...
|
|
||||||
'manga_mode_active': config.get('manga_mode_active'),
|
'manga_mode_active': config.get('manga_mode_active'),
|
||||||
'manga_filename_style': config.get('manga_filename_style'),
|
'manga_filename_style': config.get('manga_filename_style'),
|
||||||
'manga_custom_filename_format': config.get('custom_manga_filename_format', "{published} {title}"), # Pass custom format
|
'manga_custom_filename_format': config.get('custom_manga_filename_format', "{published} {title}"),
|
||||||
'manga_custom_date_format': config.get('manga_custom_date_format', "YYYY-MM-DD"),
|
'manga_custom_date_format': config.get('manga_custom_date_format', "YYYY-MM-DD"),
|
||||||
'use_multithreading': config.get('use_multithreading', True),
|
'use_multithreading': config.get('use_multithreading', True),
|
||||||
# Ensure defaults for others
|
'proxies': proxies, # <--- NEW: Pass proxies to worker
|
||||||
}
|
}
|
||||||
|
|
||||||
def _setup_creator_profile(self, config):
|
def _setup_creator_profile(self, config):
|
||||||
"""Prepares the path and loads data for the current creator's profile."""
|
"""Prepares the path and loads data for the current creator's profile."""
|
||||||
# Extract name logic here or assume config has it
|
# Extract name logic here or assume config has it
|
||||||
# ... (Same as your existing code)
|
self.current_creator_name_for_profile = "Unknown"
|
||||||
self.current_creator_name_for_profile = "Unknown" # Placeholder
|
|
||||||
# You should ideally extract name from URL or config here if available
|
# You should ideally extract name from URL or config here if available
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
|||||||
@ -133,7 +133,8 @@ class PostProcessorWorker:
|
|||||||
sfp_threshold=None,
|
sfp_threshold=None,
|
||||||
handle_unknown_mode=False,
|
handle_unknown_mode=False,
|
||||||
creator_name_cache=None,
|
creator_name_cache=None,
|
||||||
add_info_in_pdf=False
|
add_info_in_pdf=False,
|
||||||
|
proxies=None
|
||||||
|
|
||||||
):
|
):
|
||||||
self.post = post_data
|
self.post = post_data
|
||||||
@ -208,9 +209,8 @@ class PostProcessorWorker:
|
|||||||
self.sfp_threshold = sfp_threshold
|
self.sfp_threshold = sfp_threshold
|
||||||
self.handle_unknown_mode = handle_unknown_mode
|
self.handle_unknown_mode = handle_unknown_mode
|
||||||
self.creator_name_cache = creator_name_cache
|
self.creator_name_cache = creator_name_cache
|
||||||
#-- New assign --
|
|
||||||
self.add_info_in_pdf = add_info_in_pdf
|
self.add_info_in_pdf = add_info_in_pdf
|
||||||
#-- New assign --
|
self.proxies = proxies
|
||||||
|
|
||||||
|
|
||||||
if self.compress_images and Image is None:
|
if self.compress_images and Image is None:
|
||||||
@ -263,7 +263,7 @@ class PostProcessorWorker:
|
|||||||
new_url = parsed_url._replace(netloc=new_domain).geturl()
|
new_url = parsed_url._replace(netloc=new_domain).geturl()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with requests.head(new_url, headers={'User-Agent': 'Mozilla/5.0'}, timeout=5, allow_redirects=True) as resp:
|
with requests.head(new_url, headers={'User-Agent': 'Mozilla/5.0'}, timeout=5, allow_redirects=True, proxies=self.proxies) as resp:
|
||||||
if resp.status_code == 200:
|
if resp.status_code == 200:
|
||||||
return new_url
|
return new_url
|
||||||
except requests.RequestException:
|
except requests.RequestException:
|
||||||
@ -338,7 +338,8 @@ class PostProcessorWorker:
|
|||||||
api_original_filename_for_size_check = file_info.get('_original_name_for_log', file_info.get('name'))
|
api_original_filename_for_size_check = file_info.get('_original_name_for_log', file_info.get('name'))
|
||||||
try:
|
try:
|
||||||
# Use a stream=True HEAD request to get headers without downloading the body
|
# Use a stream=True HEAD request to get headers without downloading the body
|
||||||
with requests.head(file_url, headers=file_download_headers, timeout=15, cookies=cookies_to_use_for_file, allow_redirects=True) as head_response:
|
with requests.head(file_url, headers=file_download_headers, timeout=15, cookies=cookies_to_use_for_file, allow_redirects=True, proxies=self.proxies) as head_response:
|
||||||
|
|
||||||
head_response.raise_for_status()
|
head_response.raise_for_status()
|
||||||
content_length = head_response.headers.get('Content-Length')
|
content_length = head_response.headers.get('Content-Length')
|
||||||
if content_length:
|
if content_length:
|
||||||
@ -672,7 +673,7 @@ class PostProcessorWorker:
|
|||||||
|
|
||||||
current_url_to_try = file_url
|
current_url_to_try = file_url
|
||||||
|
|
||||||
response = requests.get(current_url_to_try, headers=file_download_headers, timeout=(30, 300), stream=True, cookies=cookies_to_use_for_file)
|
response = requests.get(current_url_to_try, headers=file_download_headers, timeout=(30, 300), stream=True, cookies=cookies_to_use_for_file, proxies=self.proxies)
|
||||||
|
|
||||||
if response.status_code == 403 and ('kemono.' in current_url_to_try or 'coomer.' in current_url_to_try):
|
if response.status_code == 403 and ('kemono.' in current_url_to_try or 'coomer.' in current_url_to_try):
|
||||||
self.logger(f" ⚠️ Got 403 Forbidden for '{api_original_filename}'. Attempting subdomain rotation...")
|
self.logger(f" ⚠️ Got 403 Forbidden for '{api_original_filename}'. Attempting subdomain rotation...")
|
||||||
@ -681,8 +682,7 @@ class PostProcessorWorker:
|
|||||||
self.logger(f" Retrying with new URL: {new_url}")
|
self.logger(f" Retrying with new URL: {new_url}")
|
||||||
file_url = new_url
|
file_url = new_url
|
||||||
response.close() # Close the old response
|
response.close() # Close the old response
|
||||||
response = requests.get(new_url, headers=file_download_headers, timeout=(30, 300), stream=True, cookies=cookies_to_use_for_file)
|
response = requests.get(new_url, headers=file_download_headers, timeout=(30, 300), stream=True, cookies=cookies_to_use_for_file, proxies=self.proxies)
|
||||||
|
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
# --- REVISED AND MOVED SIZE CHECK LOGIC ---
|
# --- REVISED AND MOVED SIZE CHECK LOGIC ---
|
||||||
@ -1105,7 +1105,7 @@ class PostProcessorWorker:
|
|||||||
'Accept': 'text/css'
|
'Accept': 'text/css'
|
||||||
}
|
}
|
||||||
cookies = prepare_cookies_for_request(self.use_cookie, self.cookie_text, self.selected_cookie_file, self.app_base_dir, self.logger, target_domain=api_domain)
|
cookies = prepare_cookies_for_request(self.use_cookie, self.cookie_text, self.selected_cookie_file, self.app_base_dir, self.logger, target_domain=api_domain)
|
||||||
full_post_data = fetch_single_post_data(api_domain, self.service, self.user_id, post_id, headers, self.logger, cookies_dict=cookies)
|
full_post_data = fetch_single_post_data(api_domain, self.service, self.user_id, post_id, headers, self.logger, cookies_dict=cookies, proxies=self.proxies)
|
||||||
if full_post_data:
|
if full_post_data:
|
||||||
self.logger(" ✅ Full post data fetched successfully.")
|
self.logger(" ✅ Full post data fetched successfully.")
|
||||||
self.post = full_post_data
|
self.post = full_post_data
|
||||||
@ -1306,13 +1306,17 @@ class PostProcessorWorker:
|
|||||||
if not any(d in api_domain_for_comments.lower() for d in ['kemono.su', 'kemono.party', 'kemono.cr', 'coomer.su', 'coomer.party', 'coomer.st']):
|
if not any(d in api_domain_for_comments.lower() for d in ['kemono.su', 'kemono.party', 'kemono.cr', 'coomer.su', 'coomer.party', 'coomer.st']):
|
||||||
self.logger(f"⚠️ Unrecognized domain '{api_domain_for_comments}' for comment API. Defaulting based on service.")
|
self.logger(f"⚠️ Unrecognized domain '{api_domain_for_comments}' for comment API. Defaulting based on service.")
|
||||||
api_domain_for_comments = "kemono.cr" if "kemono" in self.service.lower() else "coomer.st"
|
api_domain_for_comments = "kemono.cr" if "kemono" in self.service.lower() else "coomer.st"
|
||||||
|
|
||||||
|
# Fetch comments (Indented correctly now)
|
||||||
comments_data = fetch_post_comments(
|
comments_data = fetch_post_comments(
|
||||||
api_domain_for_comments, self.service, self.user_id, post_id,
|
api_domain_for_comments, self.service, self.user_id, post_id,
|
||||||
headers, self.logger, self.cancellation_event, self.pause_event,
|
headers, self.logger, self.cancellation_event, self.pause_event,
|
||||||
cookies_dict=prepare_cookies_for_request(
|
cookies_dict=prepare_cookies_for_request(
|
||||||
self.use_cookie, self.cookie_text, self.selected_cookie_file, self.app_base_dir, self.logger
|
self.use_cookie, self.cookie_text, self.selected_cookie_file, self.app_base_dir, self.logger
|
||||||
)
|
),
|
||||||
|
proxies=self.proxies
|
||||||
)
|
)
|
||||||
|
|
||||||
if comments_data:
|
if comments_data:
|
||||||
self.logger(f" Fetched {len(comments_data)} comments for post {post_id}.")
|
self.logger(f" Fetched {len(comments_data)} comments for post {post_id}.")
|
||||||
for comment_item_idx, comment_item in enumerate(comments_data):
|
for comment_item_idx, comment_item in enumerate(comments_data):
|
||||||
@ -1340,7 +1344,7 @@ class PostProcessorWorker:
|
|||||||
self.logger(f" ⚠️ Error fetching or processing comments for post {post_id}: {e_fetch_comment}")
|
self.logger(f" ⚠️ Error fetching or processing comments for post {post_id}: {e_fetch_comment}")
|
||||||
except Exception as e_generic_comment:
|
except Exception as e_generic_comment:
|
||||||
self.logger(f" ❌ Unexpected error during comment processing for post {post_id}: {e_generic_comment}\n{traceback.format_exc(limit=2)}")
|
self.logger(f" ❌ Unexpected error during comment processing for post {post_id}: {e_generic_comment}\n{traceback.format_exc(limit=2)}")
|
||||||
self.logger(f" [Char Scope: Comments] Phase 2 Result: post_is_candidate_by_comment_char_match = {post_is_candidate_by_comment_char_match}")
|
|
||||||
else:
|
else:
|
||||||
self.logger(f" [Char Scope: Comments] Phase 2: Skipped comment check for post ID '{post_id}' because a file match already made it a candidate.")
|
self.logger(f" [Char Scope: Comments] Phase 2: Skipped comment check for post ID '{post_id}' because a file match already made it a candidate.")
|
||||||
|
|
||||||
@ -2327,9 +2331,10 @@ class DownloadThread(QThread):
|
|||||||
manga_custom_filename_format="{published} {title}",
|
manga_custom_filename_format="{published} {title}",
|
||||||
manga_custom_date_format="YYYY-MM-DD" ,
|
manga_custom_date_format="YYYY-MM-DD" ,
|
||||||
sfp_threshold=None,
|
sfp_threshold=None,
|
||||||
creator_name_cache=None
|
creator_name_cache=None,
|
||||||
|
proxies=None
|
||||||
):
|
):
|
||||||
|
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.api_url_input = api_url_input
|
self.api_url_input = api_url_input
|
||||||
self.output_dir = output_dir
|
self.output_dir = output_dir
|
||||||
@ -2404,6 +2409,7 @@ class DownloadThread(QThread):
|
|||||||
self.domain_override = domain_override
|
self.domain_override = domain_override
|
||||||
self.sfp_threshold = sfp_threshold
|
self.sfp_threshold = sfp_threshold
|
||||||
self.creator_name_cache = creator_name_cache
|
self.creator_name_cache = creator_name_cache
|
||||||
|
self.proxies = proxies
|
||||||
|
|
||||||
if self.compress_images and Image is None:
|
if self.compress_images and Image is None:
|
||||||
self.logger("⚠️ Image compression disabled: Pillow library not found (DownloadThread).")
|
self.logger("⚠️ Image compression disabled: Pillow library not found (DownloadThread).")
|
||||||
@ -2437,6 +2443,7 @@ class DownloadThread(QThread):
|
|||||||
|
|
||||||
self.logger(" Starting post fetch (single-threaded download process)...")
|
self.logger(" Starting post fetch (single-threaded download process)...")
|
||||||
|
|
||||||
|
# --- FIX: Removed duplicate proxies argument here ---
|
||||||
post_generator = download_from_api(
|
post_generator = download_from_api(
|
||||||
self.api_url_input,
|
self.api_url_input,
|
||||||
logger=self.logger,
|
logger=self.logger,
|
||||||
@ -2451,7 +2458,8 @@ class DownloadThread(QThread):
|
|||||||
app_base_dir=self.app_base_dir,
|
app_base_dir=self.app_base_dir,
|
||||||
manga_filename_style_for_sort_check=self.manga_filename_style if self.manga_mode_active else None,
|
manga_filename_style_for_sort_check=self.manga_filename_style if self.manga_mode_active else None,
|
||||||
processed_post_ids=self.processed_post_ids_set,
|
processed_post_ids=self.processed_post_ids_set,
|
||||||
fetch_all_first=self.fetch_first
|
fetch_all_first=self.fetch_first,
|
||||||
|
proxies=self.proxies
|
||||||
)
|
)
|
||||||
|
|
||||||
for posts_batch_data in post_generator:
|
for posts_batch_data in post_generator:
|
||||||
@ -2464,6 +2472,7 @@ class DownloadThread(QThread):
|
|||||||
was_process_cancelled = True
|
was_process_cancelled = True
|
||||||
break
|
break
|
||||||
|
|
||||||
|
# --- FIX: Ensure 'proxies' is in this dictionary ---
|
||||||
worker_args = {
|
worker_args = {
|
||||||
'post_data': individual_post_data,
|
'post_data': individual_post_data,
|
||||||
'emitter': worker_signals_obj,
|
'emitter': worker_signals_obj,
|
||||||
@ -2532,7 +2541,8 @@ class DownloadThread(QThread):
|
|||||||
'archive_only_mode': self.archive_only_mode,
|
'archive_only_mode': self.archive_only_mode,
|
||||||
'manga_custom_filename_format': self.manga_custom_filename_format,
|
'manga_custom_filename_format': self.manga_custom_filename_format,
|
||||||
'manga_custom_date_format': self.manga_custom_date_format,
|
'manga_custom_date_format': self.manga_custom_date_format,
|
||||||
'sfp_threshold': self.sfp_threshold
|
'sfp_threshold': self.sfp_threshold,
|
||||||
|
'proxies': self.proxies
|
||||||
}
|
}
|
||||||
|
|
||||||
post_processing_worker = PostProcessorWorker(**worker_args)
|
post_processing_worker = PostProcessorWorker(**worker_args)
|
||||||
|
|||||||
@ -21,8 +21,7 @@ class DeviantArtDownloadThread(QThread):
|
|||||||
self.pause_event = pause_event
|
self.pause_event = pause_event
|
||||||
self.cancellation_event = cancellation_event
|
self.cancellation_event = cancellation_event
|
||||||
|
|
||||||
# --- PASS LOGGER TO CLIENT ---
|
# Pass logger to client so we see "Rate Limit" messages in the UI
|
||||||
# This ensures client logs go to the UI, not just the black console window
|
|
||||||
self.client = DeviantArtClient(logger_func=self.progress_signal.emit)
|
self.client = DeviantArtClient(logger_func=self.progress_signal.emit)
|
||||||
|
|
||||||
self.parent_app = parent
|
self.parent_app = parent
|
||||||
@ -30,12 +29,13 @@ class DeviantArtDownloadThread(QThread):
|
|||||||
self.skip_count = 0
|
self.skip_count = 0
|
||||||
|
|
||||||
# --- THREAD SETTINGS ---
|
# --- THREAD SETTINGS ---
|
||||||
self.max_threads = 10
|
# STRICTLY 1 THREAD (Sequential) to match 1.py and avoid Rate Limits
|
||||||
|
self.max_threads = 1
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
self.progress_signal.emit("=" * 40)
|
self.progress_signal.emit("=" * 40)
|
||||||
self.progress_signal.emit(f"🚀 Starting DeviantArt download for: {self.url}")
|
self.progress_signal.emit(f"🚀 Starting DeviantArt download for: {self.url}")
|
||||||
self.progress_signal.emit(f" ℹ️ Using {self.max_threads} parallel threads.")
|
self.progress_signal.emit(f" ℹ️ Mode: Sequential (1 thread) to prevent 429 errors.")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if not self.client.authenticate():
|
if not self.client.authenticate():
|
||||||
@ -108,8 +108,10 @@ class DeviantArtDownloadThread(QThread):
|
|||||||
future = executor.submit(self._process_deviation_task, deviation, base_folder)
|
future = executor.submit(self._process_deviation_task, deviation, base_folder)
|
||||||
futures.append(future)
|
futures.append(future)
|
||||||
|
|
||||||
|
# Wait for this batch to finish before getting the next page
|
||||||
wait(futures)
|
wait(futures)
|
||||||
|
|
||||||
|
# Match 1.py: Sleep 1s between pages to be nice to API
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
|
|
||||||
def _process_deviation_task(self, deviation, base_folder):
|
def _process_deviation_task(self, deviation, base_folder):
|
||||||
@ -119,6 +121,7 @@ class DeviantArtDownloadThread(QThread):
|
|||||||
title = deviation.get('title', 'Unknown')
|
title = deviation.get('title', 'Unknown')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
# This handles the fallback logic internally
|
||||||
content = self.client.get_deviation_content(dev_id)
|
content = self.client.get_deviation_content(dev_id)
|
||||||
if content:
|
if content:
|
||||||
self._download_file(content['src'], deviation, override_dir=base_folder)
|
self._download_file(content['src'], deviation, override_dir=base_folder)
|
||||||
@ -152,6 +155,7 @@ class DeviantArtDownloadThread(QThread):
|
|||||||
|
|
||||||
final_filename = f"{safe_title}{ext}"
|
final_filename = f"{safe_title}{ext}"
|
||||||
|
|
||||||
|
# Naming logic
|
||||||
if self.parent_app and self.parent_app.manga_mode_checkbox.isChecked():
|
if self.parent_app and self.parent_app.manga_mode_checkbox.isChecked():
|
||||||
try:
|
try:
|
||||||
creator_name = metadata.get('author', {}).get('username', 'Unknown')
|
creator_name = metadata.get('author', {}).get('username', 'Unknown')
|
||||||
|
|||||||
@ -5,10 +5,11 @@ import sys
|
|||||||
|
|
||||||
# --- PyQt5 Imports ---
|
# --- PyQt5 Imports ---
|
||||||
from PyQt5.QtCore import Qt, QStandardPaths, QTimer
|
from PyQt5.QtCore import Qt, QStandardPaths, QTimer
|
||||||
|
from PyQt5.QtGui import QIntValidator # <--- NEW: Added for Port validation
|
||||||
from PyQt5.QtWidgets import (
|
from PyQt5.QtWidgets import (
|
||||||
QApplication, QDialog, QHBoxLayout, QLabel, QPushButton, QVBoxLayout,
|
QApplication, QDialog, QHBoxLayout, QLabel, QPushButton, QVBoxLayout,
|
||||||
QGroupBox, QComboBox, QMessageBox, QGridLayout, QCheckBox, QLineEdit,
|
QGroupBox, QComboBox, QMessageBox, QGridLayout, QCheckBox, QLineEdit,
|
||||||
QTabWidget, QWidget, QFileDialog # Added QFileDialog
|
QTabWidget, QWidget, QFileDialog
|
||||||
)
|
)
|
||||||
# --- Local Application Imports ---
|
# --- Local Application Imports ---
|
||||||
from ...i18n.translator import get_translation
|
from ...i18n.translator import get_translation
|
||||||
@ -21,7 +22,9 @@ from ...config.constants import (
|
|||||||
RESOLUTION_KEY, UI_SCALE_KEY, SAVE_CREATOR_JSON_KEY,
|
RESOLUTION_KEY, UI_SCALE_KEY, SAVE_CREATOR_JSON_KEY,
|
||||||
DATE_PREFIX_FORMAT_KEY,
|
DATE_PREFIX_FORMAT_KEY,
|
||||||
COOKIE_TEXT_KEY, USE_COOKIE_KEY,
|
COOKIE_TEXT_KEY, USE_COOKIE_KEY,
|
||||||
FETCH_FIRST_KEY, DISCORD_TOKEN_KEY, POST_DOWNLOAD_ACTION_KEY
|
FETCH_FIRST_KEY, DISCORD_TOKEN_KEY, POST_DOWNLOAD_ACTION_KEY,
|
||||||
|
PROXY_ENABLED_KEY, PROXY_HOST_KEY, PROXY_PORT_KEY,
|
||||||
|
PROXY_USERNAME_KEY, PROXY_PASSWORD_KEY
|
||||||
)
|
)
|
||||||
from ...services.updater import UpdateChecker, UpdateDownloader
|
from ...services.updater import UpdateChecker, UpdateDownloader
|
||||||
|
|
||||||
@ -118,16 +121,15 @@ class FutureSettingsDialog(QDialog):
|
|||||||
super().__init__(parent)
|
super().__init__(parent)
|
||||||
self.parent_app = parent_app_ref
|
self.parent_app = parent_app_ref
|
||||||
self.setModal(True)
|
self.setModal(True)
|
||||||
self.update_downloader_thread = None # To keep a reference
|
self.update_downloader_thread = None
|
||||||
|
|
||||||
app_icon = get_app_icon_object()
|
app_icon = get_app_icon_object()
|
||||||
if app_icon and not app_icon.isNull():
|
if app_icon and not app_icon.isNull():
|
||||||
self.setWindowIcon(app_icon)
|
self.setWindowIcon(app_icon)
|
||||||
|
|
||||||
screen_height = QApplication.primaryScreen().availableGeometry().height() if QApplication.primaryScreen() else 800
|
screen_height = QApplication.primaryScreen().availableGeometry().height() if QApplication.primaryScreen() else 800
|
||||||
# Use a more balanced aspect ratio
|
|
||||||
scale_factor = screen_height / 1000.0
|
scale_factor = screen_height / 1000.0
|
||||||
base_min_w, base_min_h = 480, 420 # Wider, less tall
|
base_min_w, base_min_h = 550, 450 # <--- TWEAK: Slightly increased width for better layout
|
||||||
scaled_min_w = int(base_min_w * scale_factor)
|
scaled_min_w = int(base_min_w * scale_factor)
|
||||||
scaled_min_h = int(base_min_h * scale_factor)
|
scaled_min_h = int(base_min_h * scale_factor)
|
||||||
self.setMinimumSize(scaled_min_w, scaled_min_h)
|
self.setMinimumSize(scaled_min_w, scaled_min_h)
|
||||||
@ -136,6 +138,9 @@ class FutureSettingsDialog(QDialog):
|
|||||||
self._retranslate_ui()
|
self._retranslate_ui()
|
||||||
self._apply_theme()
|
self._apply_theme()
|
||||||
|
|
||||||
|
# <--- NEW: Load proxy settings on init
|
||||||
|
self._load_proxy_settings()
|
||||||
|
|
||||||
def _init_ui(self):
|
def _init_ui(self):
|
||||||
"""Initializes all UI components and layouts for the dialog."""
|
"""Initializes all UI components and layouts for the dialog."""
|
||||||
main_layout = QVBoxLayout(self)
|
main_layout = QVBoxLayout(self)
|
||||||
@ -147,14 +152,16 @@ class FutureSettingsDialog(QDialog):
|
|||||||
# --- Create Tabs ---
|
# --- Create Tabs ---
|
||||||
self.display_tab = QWidget()
|
self.display_tab = QWidget()
|
||||||
self.downloads_tab = QWidget()
|
self.downloads_tab = QWidget()
|
||||||
|
self.network_tab = QWidget() # <--- NEW: Network Tab
|
||||||
self.updates_tab = QWidget()
|
self.updates_tab = QWidget()
|
||||||
|
|
||||||
# Add tabs to the widget
|
# Add tabs to the widget
|
||||||
self.tab_widget.addTab(self.display_tab, "Display")
|
self.tab_widget.addTab(self.display_tab, "Display")
|
||||||
self.tab_widget.addTab(self.downloads_tab, "Downloads")
|
self.tab_widget.addTab(self.downloads_tab, "Downloads")
|
||||||
|
self.tab_widget.addTab(self.network_tab, "Proxy/Network") # <--- NEW
|
||||||
self.tab_widget.addTab(self.updates_tab, "Updates")
|
self.tab_widget.addTab(self.updates_tab, "Updates")
|
||||||
|
|
||||||
# --- Populate Display Tab ---
|
# [Display Tab Code (Unchanged) ...]
|
||||||
display_tab_layout = QVBoxLayout(self.display_tab)
|
display_tab_layout = QVBoxLayout(self.display_tab)
|
||||||
self.display_group_box = QGroupBox()
|
self.display_group_box = QGroupBox()
|
||||||
display_layout = QGridLayout(self.display_group_box)
|
display_layout = QGridLayout(self.display_group_box)
|
||||||
@ -184,9 +191,9 @@ class FutureSettingsDialog(QDialog):
|
|||||||
display_layout.addWidget(self.resolution_combo_box, 3, 1)
|
display_layout.addWidget(self.resolution_combo_box, 3, 1)
|
||||||
|
|
||||||
display_tab_layout.addWidget(self.display_group_box)
|
display_tab_layout.addWidget(self.display_group_box)
|
||||||
display_tab_layout.addStretch(1) # Push content to the top
|
display_tab_layout.addStretch(1)
|
||||||
|
|
||||||
# --- Populate Downloads Tab ---
|
# [Downloads Tab Code (Unchanged) ...]
|
||||||
downloads_tab_layout = QVBoxLayout(self.downloads_tab)
|
downloads_tab_layout = QVBoxLayout(self.downloads_tab)
|
||||||
self.download_settings_group_box = QGroupBox()
|
self.download_settings_group_box = QGroupBox()
|
||||||
download_settings_layout = QGridLayout(self.download_settings_group_box)
|
download_settings_layout = QGridLayout(self.download_settings_group_box)
|
||||||
@ -217,7 +224,6 @@ class FutureSettingsDialog(QDialog):
|
|||||||
self.fetch_first_checkbox.stateChanged.connect(self._fetch_first_setting_changed)
|
self.fetch_first_checkbox.stateChanged.connect(self._fetch_first_setting_changed)
|
||||||
download_settings_layout.addWidget(self.fetch_first_checkbox, 4, 0, 1, 2)
|
download_settings_layout.addWidget(self.fetch_first_checkbox, 4, 0, 1, 2)
|
||||||
|
|
||||||
# --- START: Add new Load/Save buttons ---
|
|
||||||
settings_file_layout = QHBoxLayout()
|
settings_file_layout = QHBoxLayout()
|
||||||
self.load_settings_button = QPushButton()
|
self.load_settings_button = QPushButton()
|
||||||
self.save_settings_button = QPushButton()
|
self.save_settings_button = QPushButton()
|
||||||
@ -225,18 +231,63 @@ class FutureSettingsDialog(QDialog):
|
|||||||
settings_file_layout.addWidget(self.save_settings_button)
|
settings_file_layout.addWidget(self.save_settings_button)
|
||||||
settings_file_layout.addStretch(1)
|
settings_file_layout.addStretch(1)
|
||||||
|
|
||||||
# Add this new layout to the grid
|
download_settings_layout.addLayout(settings_file_layout, 5, 0, 1, 2)
|
||||||
download_settings_layout.addLayout(settings_file_layout, 5, 0, 1, 2) # Row 5, span 2 cols
|
|
||||||
|
|
||||||
# Connect signals
|
|
||||||
self.load_settings_button.clicked.connect(self._handle_load_settings)
|
self.load_settings_button.clicked.connect(self._handle_load_settings)
|
||||||
self.save_settings_button.clicked.connect(self._handle_save_settings)
|
self.save_settings_button.clicked.connect(self._handle_save_settings)
|
||||||
# --- END: Add new Load/Save buttons ---
|
|
||||||
|
|
||||||
downloads_tab_layout.addWidget(self.download_settings_group_box)
|
downloads_tab_layout.addWidget(self.download_settings_group_box)
|
||||||
downloads_tab_layout.addStretch(1) # Push content to the top
|
downloads_tab_layout.addStretch(1)
|
||||||
|
|
||||||
# --- Populate Updates Tab ---
|
# --- START: Network Tab (NEW) ---
|
||||||
|
network_tab_layout = QVBoxLayout(self.network_tab)
|
||||||
|
self.proxy_group_box = QGroupBox()
|
||||||
|
proxy_layout = QGridLayout(self.proxy_group_box)
|
||||||
|
|
||||||
|
# Enable Checkbox
|
||||||
|
self.proxy_enabled_checkbox = QCheckBox()
|
||||||
|
self.proxy_enabled_checkbox.stateChanged.connect(self._proxy_setting_changed)
|
||||||
|
proxy_layout.addWidget(self.proxy_enabled_checkbox, 0, 0, 1, 2)
|
||||||
|
|
||||||
|
# Host / IP
|
||||||
|
self.proxy_host_label = QLabel()
|
||||||
|
self.proxy_host_input = QLineEdit()
|
||||||
|
self.proxy_host_input.setPlaceholderText("127.0.0.1")
|
||||||
|
self.proxy_host_input.editingFinished.connect(self._proxy_setting_changed)
|
||||||
|
proxy_layout.addWidget(self.proxy_host_label, 1, 0)
|
||||||
|
proxy_layout.addWidget(self.proxy_host_input, 1, 1)
|
||||||
|
|
||||||
|
# Port
|
||||||
|
self.proxy_port_label = QLabel()
|
||||||
|
self.proxy_port_input = QLineEdit()
|
||||||
|
self.proxy_port_input.setPlaceholderText("8080")
|
||||||
|
self.proxy_port_input.setValidator(QIntValidator(1, 65535, self)) # Only numbers
|
||||||
|
self.proxy_port_input.editingFinished.connect(self._proxy_setting_changed)
|
||||||
|
proxy_layout.addWidget(self.proxy_port_label, 2, 0)
|
||||||
|
proxy_layout.addWidget(self.proxy_port_input, 2, 1)
|
||||||
|
|
||||||
|
# Username
|
||||||
|
self.proxy_user_label = QLabel()
|
||||||
|
self.proxy_user_input = QLineEdit()
|
||||||
|
self.proxy_user_input.setPlaceholderText("(Optional)")
|
||||||
|
self.proxy_user_input.editingFinished.connect(self._proxy_setting_changed)
|
||||||
|
proxy_layout.addWidget(self.proxy_user_label, 3, 0)
|
||||||
|
proxy_layout.addWidget(self.proxy_user_input, 3, 1)
|
||||||
|
|
||||||
|
# Password
|
||||||
|
self.proxy_pass_label = QLabel()
|
||||||
|
self.proxy_pass_input = QLineEdit()
|
||||||
|
self.proxy_pass_input.setPlaceholderText("(Optional)")
|
||||||
|
self.proxy_pass_input.setEchoMode(QLineEdit.Password) # Mask input
|
||||||
|
self.proxy_pass_input.editingFinished.connect(self._proxy_setting_changed)
|
||||||
|
proxy_layout.addWidget(self.proxy_pass_label, 4, 0)
|
||||||
|
proxy_layout.addWidget(self.proxy_pass_input, 4, 1)
|
||||||
|
|
||||||
|
network_tab_layout.addWidget(self.proxy_group_box)
|
||||||
|
network_tab_layout.addStretch(1)
|
||||||
|
# --- END: Network Tab (NEW) ---
|
||||||
|
|
||||||
|
# [Updates Tab Code (Unchanged) ...]
|
||||||
updates_tab_layout = QVBoxLayout(self.updates_tab)
|
updates_tab_layout = QVBoxLayout(self.updates_tab)
|
||||||
self.update_group_box = QGroupBox()
|
self.update_group_box = QGroupBox()
|
||||||
update_layout = QGridLayout(self.update_group_box)
|
update_layout = QGridLayout(self.update_group_box)
|
||||||
@ -249,7 +300,7 @@ class FutureSettingsDialog(QDialog):
|
|||||||
update_layout.addWidget(self.check_update_button, 1, 0, 1, 2)
|
update_layout.addWidget(self.check_update_button, 1, 0, 1, 2)
|
||||||
|
|
||||||
updates_tab_layout.addWidget(self.update_group_box)
|
updates_tab_layout.addWidget(self.update_group_box)
|
||||||
updates_tab_layout.addStretch(1) # Push content to the top
|
updates_tab_layout.addStretch(1)
|
||||||
|
|
||||||
# --- OK Button (outside tabs) ---
|
# --- OK Button (outside tabs) ---
|
||||||
button_layout = QHBoxLayout()
|
button_layout = QHBoxLayout()
|
||||||
@ -266,16 +317,17 @@ class FutureSettingsDialog(QDialog):
|
|||||||
# --- Tab Titles ---
|
# --- Tab Titles ---
|
||||||
self.tab_widget.setTabText(0, self._tr("settings_tab_display", "Display"))
|
self.tab_widget.setTabText(0, self._tr("settings_tab_display", "Display"))
|
||||||
self.tab_widget.setTabText(1, self._tr("settings_tab_downloads", "Downloads"))
|
self.tab_widget.setTabText(1, self._tr("settings_tab_downloads", "Downloads"))
|
||||||
self.tab_widget.setTabText(2, self._tr("settings_tab_updates", "Updates"))
|
self.tab_widget.setTabText(2, self._tr("settings_tab_network", "Proxy/Network")) # <--- NEW
|
||||||
|
self.tab_widget.setTabText(3, self._tr("settings_tab_updates", "Updates"))
|
||||||
|
|
||||||
# --- Display Tab ---
|
# [Display Tab (Unchanged) ...]
|
||||||
self.display_group_box.setTitle(self._tr("display_settings_group_title", "Display Settings"))
|
self.display_group_box.setTitle(self._tr("display_settings_group_title", "Display Settings"))
|
||||||
self.theme_label.setText(self._tr("theme_label", "Theme:"))
|
self.theme_label.setText(self._tr("theme_label", "Theme:"))
|
||||||
self.ui_scale_label.setText(self._tr("ui_scale_label", "UI Scale:"))
|
self.ui_scale_label.setText(self._tr("ui_scale_label", "UI Scale:"))
|
||||||
self.language_label.setText(self._tr("language_label", "Language:"))
|
self.language_label.setText(self._tr("language_label", "Language:"))
|
||||||
self.window_size_label.setText(self._tr("window_size_label", "Window Size:"))
|
self.window_size_label.setText(self._tr("window_size_label", "Window Size:"))
|
||||||
|
|
||||||
# --- Downloads Tab ---
|
# [Downloads Tab (Unchanged) ...]
|
||||||
self.download_settings_group_box.setTitle(self._tr("download_settings_group_title", "Download Settings"))
|
self.download_settings_group_box.setTitle(self._tr("download_settings_group_title", "Download Settings"))
|
||||||
self.default_path_label.setText(self._tr("default_path_label", "Default Path:"))
|
self.default_path_label.setText(self._tr("default_path_label", "Default Path:"))
|
||||||
self.date_prefix_format_label.setText(self._tr("date_prefix_format_label", "Post Subfolder Format:"))
|
self.date_prefix_format_label.setText(self._tr("date_prefix_format_label", "Post Subfolder Format:"))
|
||||||
@ -294,32 +346,93 @@ class FutureSettingsDialog(QDialog):
|
|||||||
self.fetch_first_checkbox.setToolTip(self._tr("fetch_first_tooltip", "If checked, the downloader will find all posts from a creator first before starting any downloads.\nThis can be slower to start but provides a more accurate progress bar."))
|
self.fetch_first_checkbox.setToolTip(self._tr("fetch_first_tooltip", "If checked, the downloader will find all posts from a creator first before starting any downloads.\nThis can be slower to start but provides a more accurate progress bar."))
|
||||||
self.save_path_button.setText(self._tr("settings_save_all_button", "Save Path + Cookie + Token"))
|
self.save_path_button.setText(self._tr("settings_save_all_button", "Save Path + Cookie + Token"))
|
||||||
self.save_path_button.setToolTip(self._tr("settings_save_all_tooltip", "Save the current 'Download Location', Cookie, and Discord Token settings for future sessions."))
|
self.save_path_button.setToolTip(self._tr("settings_save_all_tooltip", "Save the current 'Download Location', Cookie, and Discord Token settings for future sessions."))
|
||||||
|
|
||||||
# --- START: Add new button text ---
|
|
||||||
self.load_settings_button.setText(self._tr("load_settings_button", "Load Settings..."))
|
self.load_settings_button.setText(self._tr("load_settings_button", "Load Settings..."))
|
||||||
self.load_settings_button.setToolTip(self._tr("load_settings_tooltip", "Load all download settings from a .json file."))
|
self.load_settings_button.setToolTip(self._tr("load_settings_tooltip", "Load all download settings from a .json file."))
|
||||||
self.save_settings_button.setText(self._tr("save_settings_button", "Save Settings..."))
|
self.save_settings_button.setText(self._tr("save_settings_button", "Save Settings..."))
|
||||||
self.save_settings_button.setToolTip(self._tr("save_settings_tooltip", "Save all current download settings to a .json file."))
|
self.save_settings_button.setToolTip(self._tr("save_settings_tooltip", "Save all current download settings to a .json file."))
|
||||||
# --- END: Add new button text ---
|
|
||||||
|
|
||||||
# --- Updates Tab ---
|
# --- START: Network Tab (NEW) ---
|
||||||
|
self.proxy_group_box.setTitle(self._tr("proxy_settings_group_title", "Proxy Configuration"))
|
||||||
|
self.proxy_enabled_checkbox.setText(self._tr("proxy_enabled_label", "Enable Proxy"))
|
||||||
|
self.proxy_host_label.setText(self._tr("proxy_host_label", "Host / IP:"))
|
||||||
|
self.proxy_port_label.setText(self._tr("proxy_port_label", "Port:"))
|
||||||
|
self.proxy_user_label.setText(self._tr("proxy_user_label", "Username (Optional):"))
|
||||||
|
self.proxy_pass_label.setText(self._tr("proxy_pass_label", "Password (Optional):"))
|
||||||
|
# --- END: Network Tab (NEW) ---
|
||||||
|
|
||||||
|
# [Updates Tab (Unchanged) ...]
|
||||||
self.update_group_box.setTitle(self._tr("update_group_title", "Application Updates"))
|
self.update_group_box.setTitle(self._tr("update_group_title", "Application Updates"))
|
||||||
current_version = self.parent_app.windowTitle().split(' v')[-1]
|
current_version = self.parent_app.windowTitle().split(' v')[-1]
|
||||||
self.version_label.setText(self._tr("current_version_label", f"Current Version: v{current_version}"))
|
self.version_label.setText(self._tr("current_version_label", f"Current Version: v{current_version}"))
|
||||||
self.update_status_label.setText(self._tr("update_status_ready", "Ready to check."))
|
self.update_status_label.setText(self._tr("update_status_ready", "Ready to check."))
|
||||||
self.check_update_button.setText(self._tr("check_for_updates_button", "Check for Updates"))
|
self.check_update_button.setText(self._tr("check_for_updates_button", "Check for Updates"))
|
||||||
|
|
||||||
# --- General ---
|
|
||||||
self._update_theme_toggle_button_text()
|
self._update_theme_toggle_button_text()
|
||||||
self.ok_button.setText(self._tr("ok_button", "OK"))
|
self.ok_button.setText(self._tr("ok_button", "OK"))
|
||||||
|
|
||||||
# --- Load Data ---
|
|
||||||
self._populate_display_combo_boxes()
|
self._populate_display_combo_boxes()
|
||||||
self._populate_language_combo_box()
|
self._populate_language_combo_box()
|
||||||
self._populate_post_download_action_combo()
|
self._populate_post_download_action_combo()
|
||||||
self._load_date_prefix_format()
|
self._load_date_prefix_format()
|
||||||
self._load_checkbox_states()
|
self._load_checkbox_states()
|
||||||
|
|
||||||
|
# --- START: New Proxy Logic ---
|
||||||
|
def _load_proxy_settings(self):
|
||||||
|
"""Loads proxy settings from QSettings into the UI."""
|
||||||
|
self.proxy_enabled_checkbox.blockSignals(True)
|
||||||
|
self.proxy_host_input.blockSignals(True)
|
||||||
|
self.proxy_port_input.blockSignals(True)
|
||||||
|
self.proxy_user_input.blockSignals(True)
|
||||||
|
self.proxy_pass_input.blockSignals(True)
|
||||||
|
|
||||||
|
enabled = self.parent_app.settings.value(PROXY_ENABLED_KEY, False, type=bool)
|
||||||
|
host = self.parent_app.settings.value(PROXY_HOST_KEY, "", type=str)
|
||||||
|
port = self.parent_app.settings.value(PROXY_PORT_KEY, "", type=str)
|
||||||
|
user = self.parent_app.settings.value(PROXY_USERNAME_KEY, "", type=str)
|
||||||
|
password = self.parent_app.settings.value(PROXY_PASSWORD_KEY, "", type=str)
|
||||||
|
|
||||||
|
self.proxy_enabled_checkbox.setChecked(enabled)
|
||||||
|
self.proxy_host_input.setText(host)
|
||||||
|
self.proxy_port_input.setText(port)
|
||||||
|
self.proxy_user_input.setText(user)
|
||||||
|
self.proxy_pass_input.setText(password)
|
||||||
|
|
||||||
|
self._update_proxy_fields_state(enabled)
|
||||||
|
|
||||||
|
self.proxy_enabled_checkbox.blockSignals(False)
|
||||||
|
self.proxy_host_input.blockSignals(False)
|
||||||
|
self.proxy_port_input.blockSignals(False)
|
||||||
|
self.proxy_user_input.blockSignals(False)
|
||||||
|
self.proxy_pass_input.blockSignals(False)
|
||||||
|
|
||||||
|
def _proxy_setting_changed(self):
|
||||||
|
"""Saves the current proxy UI state to QSettings."""
|
||||||
|
enabled = self.proxy_enabled_checkbox.isChecked()
|
||||||
|
host = self.proxy_host_input.text().strip()
|
||||||
|
port = self.proxy_port_input.text().strip()
|
||||||
|
user = self.proxy_user_input.text().strip()
|
||||||
|
password = self.proxy_pass_input.text().strip()
|
||||||
|
|
||||||
|
self.parent_app.settings.setValue(PROXY_ENABLED_KEY, enabled)
|
||||||
|
self.parent_app.settings.setValue(PROXY_HOST_KEY, host)
|
||||||
|
self.parent_app.settings.setValue(PROXY_PORT_KEY, port)
|
||||||
|
self.parent_app.settings.setValue(PROXY_USERNAME_KEY, user)
|
||||||
|
self.parent_app.settings.setValue(PROXY_PASSWORD_KEY, password)
|
||||||
|
self.parent_app.settings.sync()
|
||||||
|
|
||||||
|
self._update_proxy_fields_state(enabled)
|
||||||
|
|
||||||
|
# Optional: Notify main app that network settings changed if needed
|
||||||
|
# self.parent_app.reload_proxy_settings()
|
||||||
|
|
||||||
|
def _update_proxy_fields_state(self, enabled):
|
||||||
|
"""Enables or disables input fields based on the checkbox."""
|
||||||
|
self.proxy_host_input.setEnabled(enabled)
|
||||||
|
self.proxy_port_input.setEnabled(enabled)
|
||||||
|
self.proxy_user_input.setEnabled(enabled)
|
||||||
|
self.proxy_pass_input.setEnabled(enabled)
|
||||||
|
# --- END: New Proxy Logic ---
|
||||||
|
|
||||||
def _check_for_updates(self):
|
def _check_for_updates(self):
|
||||||
self.check_update_button.setEnabled(False)
|
self.check_update_button.setEnabled(False)
|
||||||
self.update_status_label.setText(self._tr("update_status_checking", "Checking..."))
|
self.update_status_label.setText(self._tr("update_status_checking", "Checking..."))
|
||||||
|
|||||||
@ -844,6 +844,19 @@ class DownloaderApp (QWidget ):
|
|||||||
settings['keep_duplicates_mode'] = self.keep_duplicates_mode
|
settings['keep_duplicates_mode'] = self.keep_duplicates_mode
|
||||||
settings['keep_duplicates_limit'] = self.keep_duplicates_limit
|
settings['keep_duplicates_limit'] = self.keep_duplicates_limit
|
||||||
|
|
||||||
|
settings['proxy_enabled'] = self.settings.value(PROXY_ENABLED_KEY, False, type=bool)
|
||||||
|
settings['proxy_host'] = self.settings.value(PROXY_HOST_KEY, "", type=str)
|
||||||
|
settings['proxy_port'] = self.settings.value(PROXY_PORT_KEY, "", type=str)
|
||||||
|
settings['proxy_username'] = self.settings.value(PROXY_USERNAME_KEY, "", type=str)
|
||||||
|
settings['proxy_password'] = self.settings.value(PROXY_PASSWORD_KEY, "", type=str)
|
||||||
|
|
||||||
|
settings['proxies'] = None
|
||||||
|
if settings['proxy_enabled'] and settings['proxy_host'] and settings['proxy_port']:
|
||||||
|
proxy_str = f"http://{settings['proxy_host']}:{settings['proxy_port']}"
|
||||||
|
if settings['proxy_username'] and settings['proxy_password']:
|
||||||
|
proxy_str = f"http://{settings['proxy_username']}:{settings['proxy_password']}@{settings['proxy_host']}:{settings['proxy_port']}"
|
||||||
|
settings['proxies'] = {'http': proxy_str, 'https': proxy_str}
|
||||||
|
|
||||||
return settings
|
return settings
|
||||||
|
|
||||||
|
|
||||||
@ -4627,6 +4640,14 @@ class DownloaderApp (QWidget ):
|
|||||||
if should_use_multithreading_for_posts:
|
if should_use_multithreading_for_posts:
|
||||||
log_messages.append(f" Number of Post Worker Threads: {effective_num_post_workers}")
|
log_messages.append(f" Number of Post Worker Threads: {effective_num_post_workers}")
|
||||||
|
|
||||||
|
proxy_enabled_log = self.settings.value(PROXY_ENABLED_KEY, False, type=bool)
|
||||||
|
if proxy_enabled_log:
|
||||||
|
p_host = self.settings.value(PROXY_HOST_KEY, "")
|
||||||
|
p_port = self.settings.value(PROXY_PORT_KEY, "")
|
||||||
|
log_messages.append(f" Proxy: Enabled ({p_host}:{p_port})")
|
||||||
|
else:
|
||||||
|
log_messages.append(f" Proxy: Disabled")
|
||||||
|
|
||||||
if domain_override_command:
|
if domain_override_command:
|
||||||
self.log_signal.emit(f"ℹ️ Domain Override Active: Will probe for the correct 'n*' subdomain on the '.{domain_override_command}' domain for each file.")
|
self.log_signal.emit(f"ℹ️ Domain Override Active: Will probe for the correct 'n*' subdomain on the '.{domain_override_command}' domain for each file.")
|
||||||
|
|
||||||
@ -4639,7 +4660,7 @@ class DownloaderApp (QWidget ):
|
|||||||
self.set_ui_enabled(False)
|
self.set_ui_enabled(False)
|
||||||
|
|
||||||
from src.config.constants import FOLDER_NAME_STOP_WORDS
|
from src.config.constants import FOLDER_NAME_STOP_WORDS
|
||||||
|
current_proxies = self._get_current_ui_settings_as_dict().get('proxies')
|
||||||
args_template = {
|
args_template = {
|
||||||
'api_url_input': api_url,
|
'api_url_input': api_url,
|
||||||
'download_root': effective_output_dir_for_run,
|
'download_root': effective_output_dir_for_run,
|
||||||
@ -4716,6 +4737,7 @@ class DownloaderApp (QWidget ):
|
|||||||
'sfp_threshold': download_commands.get('sfp_threshold'),
|
'sfp_threshold': download_commands.get('sfp_threshold'),
|
||||||
'handle_unknown_mode': handle_unknown_command,
|
'handle_unknown_mode': handle_unknown_command,
|
||||||
'add_info_in_pdf': self.add_info_in_pdf_setting,
|
'add_info_in_pdf': self.add_info_in_pdf_setting,
|
||||||
|
'proxies': current_proxies
|
||||||
}
|
}
|
||||||
|
|
||||||
args_template['override_output_dir'] = override_output_dir
|
args_template['override_output_dir'] = override_output_dir
|
||||||
@ -4741,7 +4763,8 @@ class DownloaderApp (QWidget ):
|
|||||||
'app_base_dir': app_base_dir_for_cookies,
|
'app_base_dir': app_base_dir_for_cookies,
|
||||||
'manga_filename_style_for_sort_check': self.manga_filename_style,
|
'manga_filename_style_for_sort_check': self.manga_filename_style,
|
||||||
'processed_post_ids': processed_post_ids_for_this_run,
|
'processed_post_ids': processed_post_ids_for_this_run,
|
||||||
'fetch_all_first': True
|
'fetch_all_first': True,
|
||||||
|
'proxies': self._get_current_ui_settings_as_dict().get('proxies')
|
||||||
}
|
}
|
||||||
|
|
||||||
self.download_thread = threading.Thread(target=self._run_fetch_only_thread, args=(fetch_thread_args,), daemon=True)
|
self.download_thread = threading.Thread(target=self._run_fetch_only_thread, args=(fetch_thread_args,), daemon=True)
|
||||||
@ -5097,8 +5120,7 @@ class DownloaderApp (QWidget ):
|
|||||||
|
|
||||||
ppw_expected_keys = list(PostProcessorWorker.__init__.__code__.co_varnames)[1:]
|
ppw_expected_keys = list(PostProcessorWorker.__init__.__code__.co_varnames)[1:]
|
||||||
|
|
||||||
# 1. Define all LIVE RUNTIME arguments.
|
current_proxies = self._get_current_ui_settings_as_dict().get('proxies')
|
||||||
# These are taken from the current app state and are the same for all workers.
|
|
||||||
live_runtime_args = {
|
live_runtime_args = {
|
||||||
'emitter': self.worker_to_gui_queue,
|
'emitter': self.worker_to_gui_queue,
|
||||||
'creator_name_cache': self.creator_name_cache,
|
'creator_name_cache': self.creator_name_cache,
|
||||||
@ -5129,6 +5151,7 @@ class DownloaderApp (QWidget ):
|
|||||||
'cookie_text': self.cookie_text_input.text(),
|
'cookie_text': self.cookie_text_input.text(),
|
||||||
'selected_cookie_file': self.selected_cookie_filepath,
|
'selected_cookie_file': self.selected_cookie_filepath,
|
||||||
'add_info_in_pdf': self.add_info_in_pdf_setting,
|
'add_info_in_pdf': self.add_info_in_pdf_setting,
|
||||||
|
'proxies': current_proxies,
|
||||||
}
|
}
|
||||||
|
|
||||||
# 2. Define DEFAULTS for all settings that *should* be in the profile.
|
# 2. Define DEFAULTS for all settings that *should* be in the profile.
|
||||||
@ -5364,6 +5387,19 @@ class DownloaderApp (QWidget ):
|
|||||||
self._update_manga_filename_style_button_text()
|
self._update_manga_filename_style_button_text()
|
||||||
self._update_multipart_toggle_button_text()
|
self._update_multipart_toggle_button_text()
|
||||||
|
|
||||||
|
if 'proxy_enabled' in settings:
|
||||||
|
self.settings.setValue(PROXY_ENABLED_KEY, settings['proxy_enabled'])
|
||||||
|
if 'proxy_host' in settings:
|
||||||
|
self.settings.setValue(PROXY_HOST_KEY, settings['proxy_host'])
|
||||||
|
if 'proxy_port' in settings:
|
||||||
|
self.settings.setValue(PROXY_PORT_KEY, settings['proxy_port'])
|
||||||
|
if 'proxy_username' in settings:
|
||||||
|
self.settings.setValue(PROXY_USERNAME_KEY, settings['proxy_username'])
|
||||||
|
if 'proxy_password' in settings:
|
||||||
|
self.settings.setValue(PROXY_PASSWORD_KEY, settings['proxy_password'])
|
||||||
|
|
||||||
|
self.settings.sync()
|
||||||
|
|
||||||
def start_multi_threaded_download(self, num_post_workers, **kwargs):
|
def start_multi_threaded_download(self, num_post_workers, **kwargs):
|
||||||
"""
|
"""
|
||||||
Initializes and starts the multi-threaded download process.
|
Initializes and starts the multi-threaded download process.
|
||||||
@ -5424,7 +5460,8 @@ class DownloaderApp (QWidget ):
|
|||||||
app_base_dir=worker_args_template.get('app_base_dir'),
|
app_base_dir=worker_args_template.get('app_base_dir'),
|
||||||
manga_filename_style_for_sort_check=worker_args_template.get('manga_filename_style'),
|
manga_filename_style_for_sort_check=worker_args_template.get('manga_filename_style'),
|
||||||
processed_post_ids=worker_args_template.get('processed_post_ids', []),
|
processed_post_ids=worker_args_template.get('processed_post_ids', []),
|
||||||
fetch_all_first=worker_args_template.get('fetch_first', False)
|
fetch_all_first=worker_args_template.get('fetch_first', False),
|
||||||
|
proxies=worker_args_template.get('proxies')
|
||||||
)
|
)
|
||||||
|
|
||||||
ppw_expected_keys = list(PostProcessorWorker.__init__.__code__.co_varnames)[1:]
|
ppw_expected_keys = list(PostProcessorWorker.__init__.__code__.co_varnames)[1:]
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user