mirror of
https://github.com/Yuvi9587/Kemono-Downloader.git
synced 2025-12-29 16:14:44 +00:00
Commit
This commit is contained in:
parent
4bf57eb752
commit
77bd428b91
@ -5,7 +5,8 @@ import time
|
||||
import random
|
||||
from urllib.parse import urlparse
|
||||
|
||||
def get_chapter_list(scraper, series_url, logger_func):
|
||||
# 1. Update arguments to accept proxies=None
|
||||
def get_chapter_list(scraper, series_url, logger_func, proxies=None):
|
||||
"""
|
||||
Checks if a URL is a series page and returns a list of all chapter URLs if it is.
|
||||
Relies on a passed-in scraper session for connection.
|
||||
@ -16,9 +17,13 @@ def get_chapter_list(scraper, series_url, logger_func):
|
||||
response = None
|
||||
max_retries = 8
|
||||
|
||||
# 2. Define smart timeout logic
|
||||
req_timeout = (30, 120) if proxies else 30
|
||||
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
response = scraper.get(series_url, headers=headers, timeout=30)
|
||||
# 3. Add proxies, verify=False, and the new timeout
|
||||
response = scraper.get(series_url, headers=headers, timeout=req_timeout, proxies=proxies, verify=False)
|
||||
response.raise_for_status()
|
||||
logger_func(f" [AllComic] Successfully connected to series page on attempt {attempt + 1}.")
|
||||
break
|
||||
@ -53,7 +58,8 @@ def get_chapter_list(scraper, series_url, logger_func):
|
||||
logger_func(f" [AllComic] ❌ Error parsing chapters after successful connection: {e}")
|
||||
return []
|
||||
|
||||
def fetch_chapter_data(scraper, chapter_url, logger_func):
|
||||
# 4. Update arguments here too
|
||||
def fetch_chapter_data(scraper, chapter_url, logger_func, proxies=None):
|
||||
"""
|
||||
Fetches the comic title, chapter title, and image URLs for a single chapter page.
|
||||
Relies on a passed-in scraper session for connection.
|
||||
@ -64,9 +70,14 @@ def fetch_chapter_data(scraper, chapter_url, logger_func):
|
||||
|
||||
response = None
|
||||
max_retries = 8
|
||||
|
||||
# 5. Define smart timeout logic again
|
||||
req_timeout = (30, 120) if proxies else 30
|
||||
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
response = scraper.get(chapter_url, headers=headers, timeout=30)
|
||||
# 6. Add proxies, verify=False, and timeout
|
||||
response = scraper.get(chapter_url, headers=headers, timeout=req_timeout, proxies=proxies, verify=False)
|
||||
response.raise_for_status()
|
||||
break
|
||||
except requests.RequestException as e:
|
||||
|
||||
@ -40,8 +40,11 @@ def fetch_posts_paginated(api_url_base, headers, offset, logger, cancellation_ev
|
||||
log_message += f" (Attempt {attempt + 1}/{max_retries})"
|
||||
logger(log_message)
|
||||
|
||||
request_timeout = (30, 120) if proxies else (15, 60)
|
||||
|
||||
try:
|
||||
with requests.get(paginated_url, headers=headers, timeout=(15, 60), cookies=cookies_dict, proxies=proxies) as response:
|
||||
with requests.get(paginated_url, headers=headers, timeout=request_timeout, cookies=cookies_dict, proxies=proxies, verify=False) as response:
|
||||
|
||||
response.raise_for_status()
|
||||
response.encoding = 'utf-8'
|
||||
return response.json()
|
||||
@ -92,7 +95,11 @@ def fetch_single_post_data(api_domain, service, user_id, post_id, headers, logge
|
||||
scraper = None
|
||||
try:
|
||||
scraper = cloudscraper.create_scraper()
|
||||
response = scraper.get(post_api_url, headers=headers, timeout=(15, 300), cookies=cookies_dict, proxies=proxies)
|
||||
# Keep the 300s read timeout for both, but increase connect timeout for proxies
|
||||
request_timeout = (30, 300) if proxies else (15, 300)
|
||||
|
||||
response = scraper.get(post_api_url, headers=headers, timeout=request_timeout, cookies=cookies_dict, proxies=proxies, verify=False)
|
||||
|
||||
response.raise_for_status()
|
||||
|
||||
full_post_data = response.json()
|
||||
@ -120,7 +127,9 @@ def fetch_post_comments(api_domain, service, user_id, post_id, headers, logger,
|
||||
logger(f" Fetching comments: {comments_api_url}")
|
||||
|
||||
try:
|
||||
with requests.get(comments_api_url, headers=headers, timeout=(10, 30), cookies=cookies_dict, proxies=proxies) as response:
|
||||
request_timeout = (30, 60) if proxies else (10, 30)
|
||||
|
||||
with requests.get(comments_api_url, headers=headers, timeout=request_timeout, cookies=cookies_dict, proxies=proxies, verify=False) as response:
|
||||
response.raise_for_status()
|
||||
response.encoding = 'utf-8'
|
||||
return response.json()
|
||||
@ -180,7 +189,9 @@ def download_from_api(
|
||||
direct_post_api_url = f"https://{api_domain}/api/v1/{service}/user/{user_id}/post/{target_post_id}"
|
||||
logger(f" Attempting direct fetch for target post: {direct_post_api_url}")
|
||||
try:
|
||||
with requests.get(direct_post_api_url, headers=headers, timeout=(10, 30), cookies=cookies_for_api, proxies=proxies) as direct_response:
|
||||
request_timeout = (30, 60) if proxies else (10, 30)
|
||||
|
||||
with requests.get(direct_post_api_url, headers=headers, timeout=request_timeout, cookies=cookies_for_api, proxies=proxies, verify=False) as direct_response:
|
||||
direct_response.raise_for_status()
|
||||
direct_response.encoding = 'utf-8'
|
||||
direct_post_data = direct_response.json()
|
||||
|
||||
@ -11,9 +11,18 @@ class DeviantArtClient:
|
||||
CLIENT_SECRET = "76b08c69cfb27f26d6161f9ab6d061a1"
|
||||
BASE_API = "https://www.deviantart.com/api/v1/oauth2"
|
||||
|
||||
def __init__(self, logger_func=print):
|
||||
# 1. Accept proxies in init
|
||||
def __init__(self, logger_func=print, proxies=None):
|
||||
self.session = requests.Session()
|
||||
# Headers matching 1.py (Firefox)
|
||||
|
||||
# 2. Configure Session with Proxy & SSL settings immediately
|
||||
if proxies:
|
||||
self.session.proxies.update(proxies)
|
||||
self.session.verify = False # Ignore SSL for proxies
|
||||
self.proxies_enabled = True
|
||||
else:
|
||||
self.proxies_enabled = False
|
||||
|
||||
self.session.headers.update({
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:128.0) Gecko/20100101 Firefox/128.0",
|
||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/png,image/svg+xml,*/*;q=0.8",
|
||||
@ -41,7 +50,10 @@ class DeviantArtClient:
|
||||
"client_id": self.CLIENT_ID,
|
||||
"client_secret": self.CLIENT_SECRET
|
||||
}
|
||||
resp = self.session.post(url, data=data, timeout=10)
|
||||
# 3. Smart timeout (longer if proxy)
|
||||
req_timeout = 30 if self.proxies_enabled else 10
|
||||
|
||||
resp = self.session.post(url, data=data, timeout=req_timeout)
|
||||
resp.raise_for_status()
|
||||
data = resp.json()
|
||||
self.access_token = data.get("access_token")
|
||||
@ -64,17 +76,21 @@ class DeviantArtClient:
|
||||
max_retries = 4
|
||||
backoff_delay = 2
|
||||
|
||||
# 4. Smart timeout
|
||||
req_timeout = 30 if self.proxies_enabled else 20
|
||||
|
||||
while True:
|
||||
try:
|
||||
resp = self.session.get(url, params=params, timeout=20)
|
||||
resp = self.session.get(url, params=params, timeout=req_timeout)
|
||||
|
||||
# 429: Rate Limit (Retry infinitely like 1.py)
|
||||
# 429: Rate Limit
|
||||
if resp.status_code == 429:
|
||||
retry_after = resp.headers.get('Retry-After')
|
||||
if retry_after:
|
||||
sleep_time = int(retry_after) + 1
|
||||
sleep_time = int(retry_after) + 2 # Add buffer
|
||||
else:
|
||||
sleep_time = 5 # Default sleep from 1.py
|
||||
# 5. Increase default wait time for 429s
|
||||
sleep_time = 15
|
||||
|
||||
self._log_once(sleep_time, f" [DeviantArt] ⚠️ Rate limit (429). Sleeping {sleep_time}s...")
|
||||
time.sleep(sleep_time)
|
||||
@ -90,7 +106,7 @@ class DeviantArtClient:
|
||||
raise Exception("Failed to refresh token")
|
||||
|
||||
if 400 <= resp.status_code < 500:
|
||||
resp.raise_for_status() # This raises immediately, breaking the loop
|
||||
resp.raise_for_status()
|
||||
|
||||
if 500 <= resp.status_code < 600:
|
||||
resp.raise_for_status()
|
||||
@ -105,12 +121,9 @@ class DeviantArtClient:
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if e.response is not None and 400 <= e.response.status_code < 500:
|
||||
raise e
|
||||
|
||||
# Otherwise fall through to general retry logic (for 5xx)
|
||||
pass
|
||||
|
||||
except requests.exceptions.RequestException as e:
|
||||
# Network errors / 5xx errors -> Retry
|
||||
if retries < max_retries:
|
||||
self._log_once("conn_error", f" [DeviantArt] Connection error: {e}. Retrying...")
|
||||
time.sleep(backoff_delay)
|
||||
@ -131,7 +144,8 @@ class DeviantArtClient:
|
||||
def get_deviation_uuid(self, url):
|
||||
"""Scrapes the deviation page to find the UUID."""
|
||||
try:
|
||||
resp = self.session.get(url, timeout=15)
|
||||
req_timeout = 30 if self.proxies_enabled else 15
|
||||
resp = self.session.get(url, timeout=req_timeout)
|
||||
match = re.search(r'"deviationUuid":"([^"]+)"', resp.text)
|
||||
if match:
|
||||
return match.group(1)
|
||||
@ -144,17 +158,13 @@ class DeviantArtClient:
|
||||
|
||||
def get_deviation_content(self, uuid):
|
||||
"""Fetches download info."""
|
||||
# 1. Try high-res download endpoint
|
||||
try:
|
||||
data = self._api_call(f"/deviation/download/{uuid}")
|
||||
if 'src' in data:
|
||||
return data
|
||||
except:
|
||||
# If 400/403 (Not downloadable), we fail silently here
|
||||
# and proceed to step 2 (Metadata fallback)
|
||||
pass
|
||||
|
||||
# 2. Fallback to standard content
|
||||
try:
|
||||
meta = self._api_call(f"/deviation/{uuid}")
|
||||
if 'content' in meta:
|
||||
|
||||
@ -1,31 +1,35 @@
|
||||
import requests
|
||||
import cloudscraper
|
||||
import json
|
||||
|
||||
def fetch_nhentai_gallery(gallery_id, logger=print):
|
||||
# 1. Update arguments to accept proxies=None
|
||||
def fetch_nhentai_gallery(gallery_id, logger=print, proxies=None):
|
||||
"""
|
||||
Fetches the metadata for a single nhentai gallery using cloudscraper to bypass Cloudflare.
|
||||
|
||||
Args:
|
||||
gallery_id (str or int): The ID of the nhentai gallery.
|
||||
logger (function): A function to log progress and error messages.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary containing the gallery's metadata if successful, otherwise None.
|
||||
Fetches the metadata for a single nhentai gallery.
|
||||
Switched to standard requests to support proxies with self-signed certs.
|
||||
"""
|
||||
api_url = f"https://nhentai.net/api/gallery/{gallery_id}"
|
||||
|
||||
scraper = cloudscraper.create_scraper()
|
||||
# 2. Use a real User-Agent to avoid immediate blocking
|
||||
headers = {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
|
||||
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
|
||||
}
|
||||
|
||||
logger(f" Fetching nhentai gallery metadata from: {api_url}")
|
||||
|
||||
# 3. Smart timeout logic
|
||||
req_timeout = (30, 120) if proxies else 20
|
||||
|
||||
try:
|
||||
# Use the scraper to make the GET request
|
||||
response = scraper.get(api_url, timeout=20)
|
||||
# 4. Use requests.get with proxies, verify=False, and timeout
|
||||
response = requests.get(api_url, headers=headers, timeout=req_timeout, proxies=proxies, verify=False)
|
||||
|
||||
if response.status_code == 404:
|
||||
logger(f" ❌ Gallery not found (404): ID {gallery_id}")
|
||||
return None
|
||||
elif response.status_code == 403:
|
||||
logger(f" ❌ Access Denied (403): Cloudflare blocked the request. Try a different proxy or User-Agent.")
|
||||
return None
|
||||
|
||||
response.raise_for_status()
|
||||
|
||||
@ -36,9 +40,9 @@ def fetch_nhentai_gallery(gallery_id, logger=print):
|
||||
gallery_data['pages'] = gallery_data.pop('images')['pages']
|
||||
return gallery_data
|
||||
else:
|
||||
logger(" ❌ API response is missing essential keys (id, media_id, or images).")
|
||||
logger(" ❌ API response is missing essential keys (id, media_id, images).")
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger(f" ❌ An error occurred while fetching gallery {gallery_id}: {e}")
|
||||
logger(f" ❌ Error fetching nhentai metadata: {e}")
|
||||
return None
|
||||
@ -263,7 +263,7 @@ class PostProcessorWorker:
|
||||
new_url = parsed_url._replace(netloc=new_domain).geturl()
|
||||
|
||||
try:
|
||||
with requests.head(new_url, headers={'User-Agent': 'Mozilla/5.0'}, timeout=5, allow_redirects=True, proxies=self.proxies) as resp:
|
||||
with requests.head(new_url, headers={'User-Agent': 'Mozilla/5.0'}, timeout=5, allow_redirects=True, proxies=self.proxies, verify=False) as resp:
|
||||
if resp.status_code == 200:
|
||||
return new_url
|
||||
except requests.RequestException:
|
||||
@ -338,7 +338,7 @@ class PostProcessorWorker:
|
||||
api_original_filename_for_size_check = file_info.get('_original_name_for_log', file_info.get('name'))
|
||||
try:
|
||||
# Use a stream=True HEAD request to get headers without downloading the body
|
||||
with requests.head(file_url, headers=file_download_headers, timeout=15, cookies=cookies_to_use_for_file, allow_redirects=True, proxies=self.proxies) as head_response:
|
||||
with requests.head(file_url, headers=file_download_headers, timeout=15, cookies=cookies_to_use_for_file, allow_redirects=True, proxies=self.proxies, verify=False) as head_response:
|
||||
|
||||
head_response.raise_for_status()
|
||||
content_length = head_response.headers.get('Content-Length')
|
||||
@ -673,7 +673,7 @@ class PostProcessorWorker:
|
||||
|
||||
current_url_to_try = file_url
|
||||
|
||||
response = requests.get(current_url_to_try, headers=file_download_headers, timeout=(30, 300), stream=True, cookies=cookies_to_use_for_file, proxies=self.proxies)
|
||||
response = requests.get(current_url_to_try, headers=file_download_headers, timeout=(30, 300), stream=True, cookies=cookies_to_use_for_file, proxies=self.proxies, verify=False)
|
||||
|
||||
if response.status_code == 403 and ('kemono.' in current_url_to_try or 'coomer.' in current_url_to_try):
|
||||
self.logger(f" ⚠️ Got 403 Forbidden for '{api_original_filename}'. Attempting subdomain rotation...")
|
||||
@ -682,7 +682,7 @@ class PostProcessorWorker:
|
||||
self.logger(f" Retrying with new URL: {new_url}")
|
||||
file_url = new_url
|
||||
response.close() # Close the old response
|
||||
response = requests.get(new_url, headers=file_download_headers, timeout=(30, 300), stream=True, cookies=cookies_to_use_for_file, proxies=self.proxies)
|
||||
response = requests.get(new_url, headers=file_download_headers, timeout=(30, 300), stream=True, cookies=cookies_to_use_for_file, proxies=self.proxies, verify=False)
|
||||
response.raise_for_status()
|
||||
|
||||
# --- REVISED AND MOVED SIZE CHECK LOGIC ---
|
||||
|
||||
@ -19,12 +19,14 @@ class AllcomicDownloadThread(QThread):
|
||||
finished_signal = pyqtSignal(int, int, bool)
|
||||
overall_progress_signal = pyqtSignal(int, int)
|
||||
|
||||
def __init__(self, url, output_dir, parent=None):
|
||||
# 1. Update __init__ to accept proxies
|
||||
def __init__(self, url, output_dir, parent=None, proxies=None):
|
||||
super().__init__(parent)
|
||||
self.comic_url = url
|
||||
self.output_dir = output_dir
|
||||
self.is_cancelled = False
|
||||
self.pause_event = parent.pause_event if hasattr(parent, 'pause_event') else threading.Event()
|
||||
self.proxies = proxies # Store the proxies
|
||||
|
||||
def _check_pause(self):
|
||||
if self.is_cancelled: return True
|
||||
@ -40,13 +42,19 @@ class AllcomicDownloadThread(QThread):
|
||||
grand_total_dl = 0
|
||||
grand_total_skip = 0
|
||||
|
||||
# Create the scraper session ONCE for the entire job
|
||||
scraper = cloudscraper.create_scraper(
|
||||
browser={'browser': 'firefox', 'platform': 'windows', 'desktop': True}
|
||||
)
|
||||
if self.proxies:
|
||||
self.progress_signal.emit(f" 🌍 Network: Using Proxy {self.proxies}")
|
||||
else:
|
||||
self.progress_signal.emit(" 🌍 Network: Direct Connection (No Proxy)")
|
||||
|
||||
# Pass the scraper to the function
|
||||
chapters_to_download = allcomic_get_list(scraper, self.comic_url, self.progress_signal.emit)
|
||||
scraper = requests.Session()
|
||||
scraper.headers.update({
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
|
||||
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
|
||||
})
|
||||
|
||||
# 2. Pass self.proxies to get_chapter_list
|
||||
chapters_to_download = allcomic_get_list(scraper, self.comic_url, self.progress_signal.emit, proxies=self.proxies)
|
||||
|
||||
if not chapters_to_download:
|
||||
chapters_to_download = [self.comic_url]
|
||||
@ -57,8 +65,9 @@ class AllcomicDownloadThread(QThread):
|
||||
if self._check_pause(): break
|
||||
|
||||
self.progress_signal.emit(f"\n-- Processing Chapter {chapter_idx + 1}/{len(chapters_to_download)} --")
|
||||
# Pass the scraper to the function
|
||||
comic_title, chapter_title, image_urls = allcomic_fetch_data(scraper, chapter_url, self.progress_signal.emit)
|
||||
|
||||
# 3. Pass self.proxies to fetch_chapter_data
|
||||
comic_title, chapter_title, image_urls = allcomic_fetch_data(scraper, chapter_url, self.progress_signal.emit, proxies=self.proxies)
|
||||
|
||||
if not image_urls:
|
||||
self.progress_signal.emit(f"❌ Failed to get data for chapter. Skipping.")
|
||||
@ -80,6 +89,9 @@ class AllcomicDownloadThread(QThread):
|
||||
self.overall_progress_signal.emit(total_files_in_chapter, 0)
|
||||
headers = {'Referer': chapter_url}
|
||||
|
||||
# 4. Define smart timeout for images
|
||||
img_timeout = (30, 120) if self.proxies else 60
|
||||
|
||||
for i, img_url in enumerate(image_urls):
|
||||
if self._check_pause(): break
|
||||
|
||||
@ -97,8 +109,9 @@ class AllcomicDownloadThread(QThread):
|
||||
if self._check_pause(): break
|
||||
try:
|
||||
self.progress_signal.emit(f" Downloading ({i+1}/{total_files_in_chapter}): '{filename}' (Attempt {attempt + 1})...")
|
||||
# Use the persistent scraper object
|
||||
response = scraper.get(img_url, stream=True, headers=headers, timeout=60)
|
||||
|
||||
# 5. Use proxies, verify=False, and new timeout
|
||||
response = scraper.get(img_url, stream=True, headers=headers, timeout=img_timeout, proxies=self.proxies, verify=False)
|
||||
response.raise_for_status()
|
||||
|
||||
with open(filepath, 'wb') as f:
|
||||
@ -125,7 +138,7 @@ class AllcomicDownloadThread(QThread):
|
||||
grand_total_skip += 1
|
||||
|
||||
self.overall_progress_signal.emit(total_files_in_chapter, i + 1)
|
||||
time.sleep(0.5) # Increased delay between images for this site
|
||||
time.sleep(0.5)
|
||||
|
||||
if self._check_pause(): break
|
||||
|
||||
|
||||
@ -2,8 +2,8 @@ import os
|
||||
import time
|
||||
import requests
|
||||
import re
|
||||
import random # Needed for random delays
|
||||
from datetime import datetime
|
||||
# REMOVED: ThreadPoolExecutor, wait (Not needed for sequential speed)
|
||||
from PyQt5.QtCore import QThread, pyqtSignal
|
||||
from ...core.deviantart_client import DeviantArtClient
|
||||
from ...utils.file_utils import clean_folder_name
|
||||
@ -14,24 +14,29 @@ class DeviantArtDownloadThread(QThread):
|
||||
overall_progress_signal = pyqtSignal(int, int)
|
||||
finished_signal = pyqtSignal(int, int, bool, list)
|
||||
|
||||
def __init__(self, url, output_dir, pause_event, cancellation_event, parent=None):
|
||||
# 1. Accept proxies in init
|
||||
def __init__(self, url, output_dir, pause_event, cancellation_event, parent=None, proxies=None):
|
||||
super().__init__(parent)
|
||||
self.url = url
|
||||
self.output_dir = output_dir
|
||||
self.pause_event = pause_event
|
||||
self.cancellation_event = cancellation_event
|
||||
|
||||
# Pass logger to client
|
||||
self.client = DeviantArtClient(logger_func=self.progress_signal.emit)
|
||||
self.proxies = proxies # Store proxies
|
||||
|
||||
self.parent_app = parent
|
||||
self.download_count = 0
|
||||
self.skip_count = 0
|
||||
|
||||
def run(self):
|
||||
self.client = DeviantArtClient(logger_func=self.progress_signal.emit, proxies=self.proxies)
|
||||
|
||||
if self.proxies:
|
||||
self.progress_signal.emit(f" 🌍 Network: Using Proxy {self.proxies}")
|
||||
else:
|
||||
self.progress_signal.emit(" 🌍 Network: Direct Connection")
|
||||
|
||||
self.progress_signal.emit("=" * 40)
|
||||
self.progress_signal.emit(f"🚀 Starting DeviantArt download for: {self.url}")
|
||||
self.progress_signal.emit(f" ℹ️ Mode: High-Speed Sequential (Matches 1.py)")
|
||||
|
||||
try:
|
||||
if not self.client.authenticate():
|
||||
@ -87,7 +92,6 @@ class DeviantArtDownloadThread(QThread):
|
||||
if not os.path.exists(base_folder):
|
||||
os.makedirs(base_folder, exist_ok=True)
|
||||
|
||||
# --- OPTIMIZED LOOP (Matches 1.py structure) ---
|
||||
while has_more:
|
||||
if self._check_pause_cancel(): break
|
||||
|
||||
@ -98,12 +102,14 @@ class DeviantArtDownloadThread(QThread):
|
||||
|
||||
if not results: break
|
||||
|
||||
# DIRECT LOOP - No ThreadPoolExecutor overhead
|
||||
for deviation in results:
|
||||
if self._check_pause_cancel(): break
|
||||
self._process_deviation_task(deviation, base_folder)
|
||||
|
||||
# Be nice to API (1 second sleep per batch of 24)
|
||||
# 4. FIX 429: Add a small random delay between items
|
||||
# This prevents hammering the API 24 times in a single second.
|
||||
time.sleep(random.uniform(0.5, 1.2))
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
def _process_deviation_task(self, deviation, base_folder):
|
||||
@ -113,7 +119,6 @@ class DeviantArtDownloadThread(QThread):
|
||||
title = deviation.get('title', 'Unknown')
|
||||
|
||||
try:
|
||||
# Try to get content (Handles fallback internally now)
|
||||
content = self.client.get_deviation_content(dev_id)
|
||||
if content:
|
||||
self._download_file(content['src'], deviation, override_dir=base_folder)
|
||||
@ -168,7 +173,6 @@ class DeviantArtDownloadThread(QThread):
|
||||
final_filename = f"{clean_folder_name(new_name)}{ext}"
|
||||
|
||||
except Exception as e:
|
||||
# Reduced logging verbosity slightly for speed
|
||||
pass
|
||||
|
||||
save_dir = override_dir if override_dir else self.output_dir
|
||||
@ -185,7 +189,11 @@ class DeviantArtDownloadThread(QThread):
|
||||
try:
|
||||
self.progress_signal.emit(f" ⬇️ Downloading: {final_filename}")
|
||||
|
||||
with requests.get(file_url, stream=True, timeout=30) as r:
|
||||
# 5. Determine smart timeout for files
|
||||
timeout_val = (30, 120) if self.proxies else 30
|
||||
|
||||
# 6. Use proxies and verify=False
|
||||
with requests.get(file_url, stream=True, timeout=timeout_val, proxies=self.proxies, verify=False) as r:
|
||||
r.raise_for_status()
|
||||
|
||||
with open(filepath, 'wb') as f:
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import os
|
||||
import time
|
||||
import cloudscraper
|
||||
import requests
|
||||
from PyQt5.QtCore import QThread, pyqtSignal
|
||||
|
||||
from ...utils.file_utils import clean_folder_name
|
||||
@ -17,68 +17,78 @@ class NhentaiDownloadThread(QThread):
|
||||
|
||||
EXTENSION_MAP = {'j': 'jpg', 'p': 'png', 'g': 'gif', 'w': 'webp' }
|
||||
|
||||
# 1. Update init to initialize self.proxies
|
||||
def __init__(self, gallery_data, output_dir, parent=None):
|
||||
super().__init__(parent)
|
||||
self.gallery_data = gallery_data
|
||||
self.output_dir = output_dir
|
||||
self.is_cancelled = False
|
||||
self.proxies = None # Placeholder, will be injected by main_window
|
||||
|
||||
def run(self):
|
||||
# 2. Log Proxy Usage
|
||||
if self.proxies:
|
||||
self.progress_signal.emit(f" 🌍 Network: Using Proxy {self.proxies}")
|
||||
else:
|
||||
self.progress_signal.emit(" 🌍 Network: Direct Connection (No Proxy)")
|
||||
|
||||
title = self.gallery_data.get("title", {}).get("english", f"gallery_{self.gallery_data.get('id')}")
|
||||
gallery_id = self.gallery_data.get("id")
|
||||
media_id = self.gallery_data.get("media_id")
|
||||
pages_info = self.gallery_data.get("pages", [])
|
||||
|
||||
folder_name = clean_folder_name(title)
|
||||
gallery_path = os.path.join(self.output_dir, folder_name)
|
||||
save_path = os.path.join(self.output_dir, folder_name)
|
||||
|
||||
try:
|
||||
os.makedirs(gallery_path, exist_ok=True)
|
||||
except OSError as e:
|
||||
self.progress_signal.emit(f"❌ Critical error creating directory: {e}")
|
||||
os.makedirs(save_path, exist_ok=True)
|
||||
self.progress_signal.emit(f" Saving to: {folder_name}")
|
||||
except Exception as e:
|
||||
self.progress_signal.emit(f" ❌ Error creating directory: {e}")
|
||||
self.finished_signal.emit(0, len(pages_info), False)
|
||||
return
|
||||
|
||||
self.progress_signal.emit(f"⬇️ Downloading '{title}' to folder '{folder_name}'...")
|
||||
|
||||
scraper = cloudscraper.create_scraper()
|
||||
download_count = 0
|
||||
skip_count = 0
|
||||
total_pages = len(pages_info)
|
||||
|
||||
# 3. Use requests.Session instead of cloudscraper
|
||||
scraper = requests.Session()
|
||||
|
||||
# 4. Smart timeout logic
|
||||
img_timeout = (30, 120) if self.proxies else 60
|
||||
|
||||
for i, page_data in enumerate(pages_info):
|
||||
if self.is_cancelled:
|
||||
break
|
||||
if self.is_cancelled: break
|
||||
|
||||
page_num = i + 1
|
||||
|
||||
ext_char = page_data.get('t', 'j')
|
||||
extension = self.EXTENSION_MAP.get(ext_char, 'jpg')
|
||||
|
||||
relative_path = f"/galleries/{media_id}/{page_num}.{extension}"
|
||||
|
||||
local_filename = f"{page_num:03d}.{extension}"
|
||||
filepath = os.path.join(gallery_path, local_filename)
|
||||
file_ext = self.EXTENSION_MAP.get(page_data.get('t'), 'jpg')
|
||||
local_filename = f"{i+1:03d}.{file_ext}"
|
||||
filepath = os.path.join(save_path, local_filename)
|
||||
|
||||
if os.path.exists(filepath):
|
||||
self.progress_signal.emit(f" -> Skip (Exists): {local_filename}")
|
||||
self.progress_signal.emit(f" Skipping {local_filename} (already exists).")
|
||||
skip_count += 1
|
||||
continue
|
||||
|
||||
download_successful = False
|
||||
for server in self.IMAGE_SERVERS:
|
||||
if self.is_cancelled:
|
||||
break
|
||||
|
||||
full_url = f"{server}{relative_path}"
|
||||
# Try servers until one works
|
||||
for server in self.IMAGE_SERVERS:
|
||||
if self.is_cancelled: break
|
||||
|
||||
# Construct URL: server/galleries/media_id/page_num.ext
|
||||
full_url = f"{server}/galleries/{media_id}/{i+1}.{file_ext}"
|
||||
|
||||
try:
|
||||
self.progress_signal.emit(f" Downloading page {page_num}/{len(pages_info)} from {server} ...")
|
||||
self.progress_signal.emit(f" Downloading page {i+1}/{total_pages}...")
|
||||
|
||||
headers = {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36',
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
|
||||
'Referer': f'https://nhentai.net/g/{gallery_id}/'
|
||||
}
|
||||
|
||||
response = scraper.get(full_url, headers=headers, timeout=60, stream=True)
|
||||
# 5. Add proxies, verify=False, and timeout
|
||||
response = scraper.get(full_url, headers=headers, timeout=img_timeout, stream=True, proxies=self.proxies, verify=False)
|
||||
|
||||
if response.status_code == 200:
|
||||
with open(filepath, 'wb') as f:
|
||||
@ -86,12 +96,14 @@ class NhentaiDownloadThread(QThread):
|
||||
f.write(chunk)
|
||||
download_count += 1
|
||||
download_successful = True
|
||||
break
|
||||
break # Stop trying servers
|
||||
else:
|
||||
self.progress_signal.emit(f" -> {server} returned status {response.status_code}. Trying next server...")
|
||||
# self.progress_signal.emit(f" -> {server} returned status {response.status_code}...")
|
||||
pass
|
||||
|
||||
except Exception as e:
|
||||
self.progress_signal.emit(f" -> {server} failed to connect or timed out: {e}. Trying next server...")
|
||||
# self.progress_signal.emit(f" -> {server} failed: {e}")
|
||||
pass
|
||||
|
||||
if not download_successful:
|
||||
self.progress_signal.emit(f" ❌ Failed to download {local_filename} from all servers.")
|
||||
|
||||
@ -73,7 +73,6 @@ class HelpGuideDialog(QDialog):
|
||||
<li>fap-nation.org/</li>
|
||||
<li>Discord</li>
|
||||
<li>allporncomic.com</li>
|
||||
<li>allporncomic.com</li>
|
||||
<li>hentai2read.com</li>
|
||||
<li>mangadex.org</li>
|
||||
<li>Simpcity</li>
|
||||
@ -279,6 +278,46 @@ class HelpGuideDialog(QDialog):
|
||||
</ul>
|
||||
"""),
|
||||
|
||||
("Add to Queue",
|
||||
"""
|
||||
<p>This feature allows you to queue up multiple distinct downloads with different settings and run them all sequentially.</p>
|
||||
|
||||
<h3 style='color: #E0E0E0;'>Step 1: Prepare the Download</h3>
|
||||
<p>Before clicking add, configure the download exactly how you want it processed for this specific link:</p>
|
||||
<ul>
|
||||
<li><b>Select Directory:</b> Choose where you want the files to go.</li>
|
||||
<li><b>Configure Options:</b> Check/uncheck boxes (e.g., "Separate Folders", "Use Cookie", "Manga Mode").</li>
|
||||
<li><b>Paste URL:</b> Enter the link for the creator or post you want to download.</li>
|
||||
</ul>
|
||||
|
||||
<h3 style='color: #E0E0E0;'>Step 2: Add to Queue</h3>
|
||||
<ol>
|
||||
<li>Click the <b>Add to Queue</b> button (located near the Start Download).</li>
|
||||
<li><b>Confirmation:</b> You will see a popup message and the log will print <code>✅ Job added to queue</code>.</li>
|
||||
<li>The URL box will clear, allowing you to immediately paste the next link.</li>
|
||||
</ol>
|
||||
|
||||
<h3 style='color: #E0E0E0;'>Step 3: Repeat & Start</h3>
|
||||
<p>You can repeat steps 1 and 2 as many times as you like. You can even change settings (like the download folder) between adds; the queue remembers the specific settings for each individual link.</p>
|
||||
<p>To start processing the queue:</p>
|
||||
<ol>
|
||||
<li>In the Link Input box, type exactly: <code>start queue</code></li>
|
||||
<li>The main "Start Download" button will change to <b>"🚀 Execute Queue"</b>.</li>
|
||||
<li>Click that button to begin.</li>
|
||||
</ol>
|
||||
|
||||
<h3 style='color: #E0E0E0;'>Processing Behavior</h3>
|
||||
<p>Once started, the app will lock the UI, load the first job, download it until finished, and automatically move to the next until the queue is empty.</p>
|
||||
|
||||
<h3 style='color: #E0E0E0;'>Special Case: Creator Selection Popup</h3>
|
||||
<p>If you use the <b>Creator Selection</b> popup (the 🎨 button):</p>
|
||||
<ul>
|
||||
<li>Select multiple creators in that popup and click <b>"Queue Selected"</b>.</li>
|
||||
<li>The app internally adds them to a temporary list.</li>
|
||||
<li>When you click the main <b>"Add to Queue"</b> button on the main window, it will detect that list and automatically bulk-create job files for all the creators you selected.</li>
|
||||
</ul>
|
||||
"""),
|
||||
|
||||
("Special Commands",
|
||||
"""
|
||||
<p>You can add special commands to the <b>"Filter by Character(s)"</b> input field to change download behavior for a single task. Commands are keywords wrapped in square brackets <code>[]</code>.</p>
|
||||
@ -451,6 +490,15 @@ class HelpGuideDialog(QDialog):
|
||||
"""
|
||||
<p>These features provide advanced control over your downloads, sessions, and application settings.</p>
|
||||
|
||||
<h3 style='color: #E0E0E0;'>🛡️ Proxy Support </h3>
|
||||
<p>You can now configure a proxy to bypass region blocks or ISP restrictions (e.g., for AllComic or Nhentai).</p>
|
||||
<p>Go to <b>Settings ⚙️ > Proxy Tab</b> to set it up:</p>
|
||||
<ul>
|
||||
<li><b>Protocols:</b> Full support for <b>HTTP</b>, <b>SOCKS4</b>, and <b>SOCKS5</b>.</li>
|
||||
<li><b>Authentication:</b> Supports username and password for private proxies.</li>
|
||||
<li><b>Global Effect:</b> Once enabled, all app connections (including API fetches and file downloads) will route through this proxy.</li>
|
||||
</ul>
|
||||
|
||||
<h3 style='color: #E0E0E0;'>Use Cookie</h3>
|
||||
<p>This is essential for downloading from sites that require a login (like <b>SimpCity</b> or accessing your <b>favorites</b> on Kemono/Coomer). You can either:</p>
|
||||
<ul>
|
||||
@ -484,6 +532,7 @@ class HelpGuideDialog(QDialog):
|
||||
<li>Toggle <b>"Fetch First"</b> (to find all posts from a creator before starting any downloads).</li>
|
||||
</ul>
|
||||
</li>
|
||||
<li><b>Proxy Tab:</b> Configure HTTP/SOCKS proxies and authentication.</li>
|
||||
<li><b>Updates Tab:</b> Check for and install new application updates.</li>
|
||||
</ul>
|
||||
|
||||
@ -605,7 +654,8 @@ class HelpGuideDialog(QDialog):
|
||||
main_layout.addLayout(content_layout, 1)
|
||||
|
||||
self.nav_list = QListWidget()
|
||||
self.nav_list.setFixedWidth(int(220 * scale))
|
||||
# Increased width to prevent scrollbar overlap
|
||||
self.nav_list.setFixedWidth(int(280 * scale))
|
||||
# Styles are now set in the __init__ method
|
||||
content_layout.addWidget(self.nav_list)
|
||||
|
||||
|
||||
@ -28,8 +28,8 @@ class UpdateCheckDialog(QDialog):
|
||||
self.selected_profiles_list = [] # Will store a list of {'name': ..., 'data': ...}
|
||||
|
||||
self._default_checkbox_tooltip = (
|
||||
"If checked, the settings from the selected profile will be loaded into the main window.\n"
|
||||
"You can then modify them. When you start the download, the new settings will be saved to the profile."
|
||||
"If checked, the settings fields will be unlocked and editable.\n"
|
||||
"If unchecked, settings will still load, but in 'Read-Only' mode."
|
||||
)
|
||||
|
||||
self._init_ui()
|
||||
@ -65,13 +65,17 @@ class UpdateCheckDialog(QDialog):
|
||||
self.list_widget.itemChanged.connect(self._handle_item_changed)
|
||||
layout.addWidget(self.list_widget)
|
||||
|
||||
# --- NEW: Checkbox to Load Settings ---
|
||||
self.load_settings_checkbox = QCheckBox("Load profile settings into UI (Edit Settings)")
|
||||
self.load_settings_checkbox.setToolTip(self._default_checkbox_tooltip)
|
||||
layout.addWidget(self.load_settings_checkbox)
|
||||
# Renamed text to reflect new behavior
|
||||
self.edit_settings_checkbox = QCheckBox("Enable Editing (Unlock Settings)")
|
||||
self.edit_settings_checkbox.setToolTip(self._default_checkbox_tooltip)
|
||||
|
||||
# Checked by default as requested
|
||||
self.edit_settings_checkbox.setChecked(True)
|
||||
|
||||
layout.addWidget(self.edit_settings_checkbox)
|
||||
# -------------------------------------
|
||||
|
||||
# --- All Buttons in One Horizontal Layout ---
|
||||
|
||||
button_layout = QHBoxLayout()
|
||||
button_layout.setSpacing(6) # small even spacing between all buttons
|
||||
|
||||
@ -110,7 +114,8 @@ class UpdateCheckDialog(QDialog):
|
||||
self.deselect_all_button.setText(self._tr("deselect_all_button_text", "Deselect All"))
|
||||
self.check_button.setText(self._tr("update_check_dialog_check_button", "Check Selected"))
|
||||
self.close_button.setText(self._tr("update_check_dialog_close_button", "Close"))
|
||||
self.load_settings_checkbox.setText(self._tr("update_check_load_settings_checkbox", "Load profile settings into UI (Edit Settings)"))
|
||||
# Updated translation key and default text
|
||||
self.edit_settings_checkbox.setText(self._tr("update_check_enable_editing_checkbox", "Enable Editing (Unlock Settings)"))
|
||||
|
||||
def _load_profiles(self):
|
||||
"""Loads all .json files from the creator_profiles directory as checkable items."""
|
||||
@ -133,7 +138,6 @@ class UpdateCheckDialog(QDialog):
|
||||
with open(filepath, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
|
||||
# Basic validation to ensure it's a valid profile
|
||||
if 'creator_url' in data and 'processed_post_ids' in data:
|
||||
creator_name = os.path.splitext(filename)[0]
|
||||
profiles_found.append({'name': creator_name, 'data': data})
|
||||
@ -147,7 +151,6 @@ class UpdateCheckDialog(QDialog):
|
||||
for profile_info in profiles_found:
|
||||
item = QListWidgetItem(profile_info['name'])
|
||||
item.setData(Qt.UserRole, profile_info)
|
||||
# --- Make item checkable ---
|
||||
item.setFlags(item.flags() | Qt.ItemIsUserCheckable)
|
||||
item.setCheckState(Qt.Unchecked)
|
||||
self.list_widget.addItem(item)
|
||||
@ -158,14 +161,13 @@ class UpdateCheckDialog(QDialog):
|
||||
self.check_button.setEnabled(False)
|
||||
self.select_all_button.setEnabled(False)
|
||||
self.deselect_all_button.setEnabled(False)
|
||||
self.load_settings_checkbox.setEnabled(False)
|
||||
self.edit_settings_checkbox.setEnabled(False)
|
||||
|
||||
def _toggle_all_checkboxes(self):
|
||||
"""Handles Select All and Deselect All button clicks."""
|
||||
sender = self.sender()
|
||||
check_state = Qt.Checked if sender == self.select_all_button else Qt.Unchecked
|
||||
|
||||
# Block signals to prevent triggering _handle_item_changed repeatedly
|
||||
self.list_widget.blockSignals(True)
|
||||
for i in range(self.list_widget.count()):
|
||||
item = self.list_widget.item(i)
|
||||
@ -173,13 +175,12 @@ class UpdateCheckDialog(QDialog):
|
||||
item.setCheckState(check_state)
|
||||
self.list_widget.blockSignals(False)
|
||||
|
||||
# Manually trigger the update once after batch change
|
||||
self._handle_item_changed(None)
|
||||
|
||||
def _handle_item_changed(self, item):
|
||||
"""
|
||||
Monitors how many items are checked.
|
||||
If more than 1 item is checked, disable the 'Load Settings' checkbox.
|
||||
If more than 1 item is checked, disable the 'Enable Editing' checkbox.
|
||||
"""
|
||||
checked_count = 0
|
||||
for i in range(self.list_widget.count()):
|
||||
@ -187,15 +188,15 @@ class UpdateCheckDialog(QDialog):
|
||||
checked_count += 1
|
||||
|
||||
if checked_count > 1:
|
||||
self.load_settings_checkbox.setChecked(False)
|
||||
self.load_settings_checkbox.setEnabled(False)
|
||||
self.load_settings_checkbox.setToolTip(
|
||||
self.edit_settings_checkbox.setChecked(False)
|
||||
self.edit_settings_checkbox.setEnabled(False)
|
||||
self.edit_settings_checkbox.setToolTip(
|
||||
self._tr("update_check_multi_selection_warning",
|
||||
"Editing settings is disabled when multiple profiles are selected.")
|
||||
)
|
||||
else:
|
||||
self.load_settings_checkbox.setEnabled(True)
|
||||
self.load_settings_checkbox.setToolTip(self._default_checkbox_tooltip)
|
||||
self.edit_settings_checkbox.setEnabled(True)
|
||||
self.edit_settings_checkbox.setToolTip(self._default_checkbox_tooltip)
|
||||
|
||||
def on_check_selected(self):
|
||||
"""Handles the 'Check Selected' button click."""
|
||||
@ -221,6 +222,18 @@ class UpdateCheckDialog(QDialog):
|
||||
return self.selected_profiles_list
|
||||
|
||||
def should_load_into_ui(self):
|
||||
"""Returns True if the 'Load settings into UI' checkbox is checked."""
|
||||
# Only return True if it's enabled and checked (double safety)
|
||||
return self.load_settings_checkbox.isEnabled() and self.load_settings_checkbox.isChecked()
|
||||
"""
|
||||
Returns True if the settings SHOULD be loaded into the UI.
|
||||
|
||||
NEW LOGIC: Returns True if exactly ONE profile is selected.
|
||||
It does NOT care about the checkbox state anymore, because we want
|
||||
to load settings even if the user can't edit them.
|
||||
"""
|
||||
return len(self.selected_profiles_list) == 1
|
||||
|
||||
def should_enable_editing(self):
|
||||
"""
|
||||
NEW METHOD: Returns True if the user is allowed to edit the settings.
|
||||
This is linked to the checkbox.
|
||||
"""
|
||||
return self.edit_settings_checkbox.isEnabled() and self.edit_settings_checkbox.isChecked()
|
||||
@ -346,7 +346,7 @@ class DownloaderApp (QWidget ):
|
||||
self.download_location_label_widget = None
|
||||
self.remove_from_filename_label_widget = None
|
||||
self.skip_words_label_widget = None
|
||||
self.setWindowTitle("Kemono Downloader v7.8.0")
|
||||
self.setWindowTitle("Kemono Downloader v7.9.0")
|
||||
setup_ui(self)
|
||||
self._connect_signals()
|
||||
if hasattr(self, 'character_input'):
|
||||
@ -366,18 +366,14 @@ class DownloaderApp (QWidget ):
|
||||
def add_current_settings_to_queue(self):
|
||||
"""Saves the current UI settings as a JSON job file with creator-specific paths."""
|
||||
|
||||
# --- Helper: Append Name to Path safely ---
|
||||
def get_creator_specific_path(base_dir, folder_name):
|
||||
if not folder_name:
|
||||
return base_dir
|
||||
safe_name = clean_folder_name(folder_name)
|
||||
# Avoid double pathing (e.g. if base is .../Artist and we append /Artist again)
|
||||
|
||||
if base_dir.replace('\\', '/').rstrip('/').endswith(safe_name):
|
||||
return base_dir
|
||||
return os.path.join(base_dir, safe_name)
|
||||
# ------------------------------------------
|
||||
|
||||
# --- SCENARIO 1: Items from Creator Selection (Popup) ---
|
||||
if self.favorite_download_queue:
|
||||
count = 0
|
||||
base_settings = self._get_current_ui_settings_as_dict()
|
||||
@ -407,7 +403,7 @@ class DownloaderApp (QWidget ):
|
||||
QMessageBox.warning(self, "Queue Error", "Failed to add selected items to queue.")
|
||||
return
|
||||
|
||||
# --- SCENARIO 2: Manual URL Entry ---
|
||||
|
||||
url = self.link_input.text().strip()
|
||||
if not url:
|
||||
QMessageBox.warning(self, "Input Error", "Cannot add to queue: URL is empty.")
|
||||
@ -416,23 +412,20 @@ class DownloaderApp (QWidget ):
|
||||
settings = self._get_current_ui_settings_as_dict()
|
||||
settings['api_url'] = url
|
||||
|
||||
# Attempt to resolve name from URL + Cache (creators.json)
|
||||
|
||||
service, user_id, post_id = extract_post_info(url)
|
||||
name_hint = "Job"
|
||||
|
||||
if service and user_id:
|
||||
# Try to find name in your local creators cache
|
||||
|
||||
cache_key = (service.lower(), str(user_id))
|
||||
cached_name = self.creator_name_cache.get(cache_key)
|
||||
|
||||
if cached_name:
|
||||
# CASE A: Creator Found -> Use Creator Name
|
||||
|
||||
name_hint = cached_name
|
||||
settings['output_dir'] = get_creator_specific_path(settings['output_dir'], cached_name)
|
||||
else:
|
||||
# CASE B: Creator NOT Found -> Use Post ID or User ID
|
||||
# If it's a single post link, 'post_id' will have a value.
|
||||
# If it's a profile link, 'post_id' is None, so we use 'user_id'.
|
||||
if post_id:
|
||||
folder_name = str(post_id)
|
||||
else:
|
||||
@ -476,7 +469,7 @@ class DownloaderApp (QWidget ):
|
||||
QMessageBox.information(self, "Queue Empty", "No job files found in appdata/jobs.")
|
||||
return
|
||||
|
||||
# --- FIX: Clear error log at the start of the entire queue session ---
|
||||
|
||||
self.permanently_failed_files_for_dialog.clear()
|
||||
self._update_error_button_count()
|
||||
# -------------------------------------------------------------------
|
||||
@ -2975,6 +2968,25 @@ class DownloaderApp (QWidget ):
|
||||
else:
|
||||
self.log_signal.emit("ℹ️ Link export was cancelled by the user.")
|
||||
|
||||
def _set_inputs_read_only(self, read_only):
|
||||
"""Disables input fields (Read-Only mode) but keeps action buttons enabled."""
|
||||
# List of widgets to disable in Read-Only mode
|
||||
widgets_to_lock = [
|
||||
self.link_input, self.dir_input, self.character_input,
|
||||
self.skip_words_input, self.remove_from_filename_input,
|
||||
self.custom_folder_input, self.cookie_text_input,
|
||||
self.thread_count_input, self.start_page_input, self.end_page_input,
|
||||
self.use_subfolders_checkbox, self.use_subfolder_per_post_checkbox,
|
||||
self.skip_zip_checkbox, self.download_thumbnails_checkbox,
|
||||
self.compress_images_checkbox, self.scan_content_images_checkbox,
|
||||
self.use_cookie_checkbox, self.manga_mode_checkbox,
|
||||
self.radio_all, self.radio_images, self.radio_videos,
|
||||
self.char_filter_scope_toggle_button, self.skip_scope_toggle_button
|
||||
]
|
||||
|
||||
for widget in widgets_to_lock:
|
||||
if widget:
|
||||
widget.setEnabled(not read_only)
|
||||
|
||||
def get_filter_mode (self ):
|
||||
if self.radio_more and self.radio_more.isChecked():
|
||||
@ -3243,7 +3255,6 @@ class DownloaderApp (QWidget ):
|
||||
if self.single_pdf_setting:
|
||||
self.use_subfolder_per_post_checkbox.setChecked(False)
|
||||
|
||||
# --- Logging ---
|
||||
self.log_signal.emit(f"ℹ️ 'More' filter set: {scope_text}, Format: {self.text_export_format.upper()}")
|
||||
if is_any_pdf_mode:
|
||||
status_single = "Enabled" if self.single_pdf_setting else "Disabled"
|
||||
@ -3252,19 +3263,18 @@ class DownloaderApp (QWidget ):
|
||||
self.log_signal.emit(" ↳ Multithreading disabled for PDF export.")
|
||||
|
||||
else:
|
||||
# --- User clicked Cancel: Revert to default ---
|
||||
self.log_signal.emit("ℹ️ 'More' filter selection cancelled. Reverting to 'All'.")
|
||||
if hasattr(self, 'radio_all'):
|
||||
self.radio_all.setChecked(True)
|
||||
|
||||
# Case 2: Switched AWAY from the "More" button (e.g., clicked 'Images' or 'All')
|
||||
|
||||
elif button != self.radio_more and checked:
|
||||
self.radio_more.setText("More")
|
||||
self.more_filter_scope = None
|
||||
self.single_pdf_setting = False
|
||||
self.add_info_in_pdf_setting = False # Reset setting
|
||||
|
||||
# Restore enabled states for options that PDF mode might have disabled
|
||||
|
||||
if hasattr(self, 'use_multithreading_checkbox'):
|
||||
self.use_multithreading_checkbox.setEnabled(True)
|
||||
self._update_multithreading_for_date_mode() # Re-check manga logic
|
||||
@ -4183,9 +4193,12 @@ class DownloaderApp (QWidget ):
|
||||
|
||||
self.cancellation_message_logged_this_session = False
|
||||
|
||||
# START of the new refactored block
|
||||
service, id1, id2 = extract_post_info(api_url)
|
||||
|
||||
# [NEW] Get proxy settings immediately
|
||||
ui_settings = self._get_current_ui_settings_as_dict()
|
||||
proxies_to_use = ui_settings.get('proxies')
|
||||
|
||||
specialized_thread = create_downloader_thread(
|
||||
main_app=self,
|
||||
api_url=api_url,
|
||||
@ -4208,15 +4221,15 @@ class DownloaderApp (QWidget ):
|
||||
|
||||
self.set_ui_enabled(False)
|
||||
self.download_thread = specialized_thread
|
||||
|
||||
# [NEW] Inject proxies into the thread manually
|
||||
if hasattr(self.download_thread, 'proxies'):
|
||||
self.download_thread.proxies = proxies_to_use
|
||||
|
||||
self._connect_specialized_thread_signals(self.download_thread)
|
||||
self.download_thread.start()
|
||||
self._update_button_states_and_connections()
|
||||
return True
|
||||
# END of the new refactored block
|
||||
|
||||
if not service or not id1:
|
||||
QMessageBox.critical(self, "Input Error", "Invalid or unsupported URL format.")
|
||||
return False
|
||||
|
||||
user_id, post_id_from_url = id1, id2
|
||||
|
||||
@ -5101,8 +5114,54 @@ class DownloaderApp (QWidget ):
|
||||
self.is_ready_to_download_batch_update = True
|
||||
|
||||
self.progress_label.setText(f"Found {total_posts} new posts. Ready to download.")
|
||||
self.set_ui_enabled(True) # Re-enable UI
|
||||
self._update_button_states_and_connections() # Update buttons to "Start Download (X)"
|
||||
self.set_ui_enabled(True) # Re-enable UI first
|
||||
|
||||
# [NEW] Apply Read-Only mode if it was selected in the dialog
|
||||
if getattr(self, 'update_settings_read_only_mode', False):
|
||||
self._set_inputs_read_only(True)
|
||||
|
||||
self._update_button_states_and_connections()
|
||||
|
||||
def _show_update_check_dialog(self):
|
||||
"""Shows the Update Check Dialog and applies Load/Edit logic."""
|
||||
if self.is_restore_pending:
|
||||
QMessageBox.warning(self, "Restore Pending", "Please restore or discard the previous session first.")
|
||||
return
|
||||
|
||||
dialog = UpdateCheckDialog(self.user_data_path, self, self)
|
||||
|
||||
if dialog.exec_() == QDialog.Accepted:
|
||||
profiles = dialog.get_selected_profiles()
|
||||
if not profiles: return
|
||||
|
||||
self.active_update_profiles_list = profiles
|
||||
|
||||
# --- LOGIC START ---
|
||||
|
||||
# 1. ALWAYS Load Settings if appropriate (e.g. Single Profile selected)
|
||||
# The dialog now returns True for should_load_into_ui() if count == 1, regardless of checkbox
|
||||
if dialog.should_load_into_ui():
|
||||
# Load settings from the FIRST selected profile
|
||||
first_profile_settings = profiles[0]['data'].get('settings', {})
|
||||
self._load_ui_from_settings_dict(first_profile_settings)
|
||||
|
||||
# 2. Check if Editing is Allowed
|
||||
if dialog.should_enable_editing():
|
||||
self.update_settings_read_only_mode = False
|
||||
self.override_update_profile_settings = True # Use UI values for download
|
||||
self.log_signal.emit("ℹ️ Settings loaded in EDITABLE mode.")
|
||||
else:
|
||||
self.update_settings_read_only_mode = True
|
||||
self.override_update_profile_settings = False # Use original JSON values (safer for Read-Only)
|
||||
self.log_signal.emit("ℹ️ Settings loaded in READ-ONLY mode.")
|
||||
else:
|
||||
# Multiple profiles or load disabled
|
||||
self.update_settings_read_only_mode = False
|
||||
self.override_update_profile_settings = False
|
||||
|
||||
# --- LOGIC END ---
|
||||
|
||||
self._start_batch_update_check(self.active_update_profiles_list)
|
||||
|
||||
def _start_download_of_batch_update(self):
|
||||
"""
|
||||
@ -5454,8 +5513,13 @@ class DownloaderApp (QWidget ):
|
||||
global PostProcessorWorker, download_from_api
|
||||
|
||||
worker_args_template = fetcher_args['worker_args_template']
|
||||
logger_func = lambda msg: self.log_signal.emit(f"[Fetcher] {msg}")
|
||||
|
||||
def logger_func(msg):
|
||||
try:
|
||||
import sip
|
||||
if not sip.isdeleted(self):
|
||||
self.log_signal.emit(f"[Fetcher] {msg}")
|
||||
except (RuntimeError, ImportError, AttributeError):
|
||||
pass # Window is gone, ignore logging
|
||||
try:
|
||||
# This single call now handles all fetching logic, including 'Fetch First'.
|
||||
post_generator = download_from_api(
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user