mirror of
https://github.com/Yuvi9587/Kemono-Downloader.git
synced 2025-12-29 16:14:44 +00:00
Commit
This commit is contained in:
parent
4bf57eb752
commit
77bd428b91
@ -5,7 +5,8 @@ import time
|
|||||||
import random
|
import random
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
def get_chapter_list(scraper, series_url, logger_func):
|
# 1. Update arguments to accept proxies=None
|
||||||
|
def get_chapter_list(scraper, series_url, logger_func, proxies=None):
|
||||||
"""
|
"""
|
||||||
Checks if a URL is a series page and returns a list of all chapter URLs if it is.
|
Checks if a URL is a series page and returns a list of all chapter URLs if it is.
|
||||||
Relies on a passed-in scraper session for connection.
|
Relies on a passed-in scraper session for connection.
|
||||||
@ -16,9 +17,13 @@ def get_chapter_list(scraper, series_url, logger_func):
|
|||||||
response = None
|
response = None
|
||||||
max_retries = 8
|
max_retries = 8
|
||||||
|
|
||||||
|
# 2. Define smart timeout logic
|
||||||
|
req_timeout = (30, 120) if proxies else 30
|
||||||
|
|
||||||
for attempt in range(max_retries):
|
for attempt in range(max_retries):
|
||||||
try:
|
try:
|
||||||
response = scraper.get(series_url, headers=headers, timeout=30)
|
# 3. Add proxies, verify=False, and the new timeout
|
||||||
|
response = scraper.get(series_url, headers=headers, timeout=req_timeout, proxies=proxies, verify=False)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
logger_func(f" [AllComic] Successfully connected to series page on attempt {attempt + 1}.")
|
logger_func(f" [AllComic] Successfully connected to series page on attempt {attempt + 1}.")
|
||||||
break
|
break
|
||||||
@ -53,7 +58,8 @@ def get_chapter_list(scraper, series_url, logger_func):
|
|||||||
logger_func(f" [AllComic] ❌ Error parsing chapters after successful connection: {e}")
|
logger_func(f" [AllComic] ❌ Error parsing chapters after successful connection: {e}")
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def fetch_chapter_data(scraper, chapter_url, logger_func):
|
# 4. Update arguments here too
|
||||||
|
def fetch_chapter_data(scraper, chapter_url, logger_func, proxies=None):
|
||||||
"""
|
"""
|
||||||
Fetches the comic title, chapter title, and image URLs for a single chapter page.
|
Fetches the comic title, chapter title, and image URLs for a single chapter page.
|
||||||
Relies on a passed-in scraper session for connection.
|
Relies on a passed-in scraper session for connection.
|
||||||
@ -64,9 +70,14 @@ def fetch_chapter_data(scraper, chapter_url, logger_func):
|
|||||||
|
|
||||||
response = None
|
response = None
|
||||||
max_retries = 8
|
max_retries = 8
|
||||||
|
|
||||||
|
# 5. Define smart timeout logic again
|
||||||
|
req_timeout = (30, 120) if proxies else 30
|
||||||
|
|
||||||
for attempt in range(max_retries):
|
for attempt in range(max_retries):
|
||||||
try:
|
try:
|
||||||
response = scraper.get(chapter_url, headers=headers, timeout=30)
|
# 6. Add proxies, verify=False, and timeout
|
||||||
|
response = scraper.get(chapter_url, headers=headers, timeout=req_timeout, proxies=proxies, verify=False)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
break
|
break
|
||||||
except requests.RequestException as e:
|
except requests.RequestException as e:
|
||||||
|
|||||||
@ -40,8 +40,11 @@ def fetch_posts_paginated(api_url_base, headers, offset, logger, cancellation_ev
|
|||||||
log_message += f" (Attempt {attempt + 1}/{max_retries})"
|
log_message += f" (Attempt {attempt + 1}/{max_retries})"
|
||||||
logger(log_message)
|
logger(log_message)
|
||||||
|
|
||||||
|
request_timeout = (30, 120) if proxies else (15, 60)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with requests.get(paginated_url, headers=headers, timeout=(15, 60), cookies=cookies_dict, proxies=proxies) as response:
|
with requests.get(paginated_url, headers=headers, timeout=request_timeout, cookies=cookies_dict, proxies=proxies, verify=False) as response:
|
||||||
|
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
response.encoding = 'utf-8'
|
response.encoding = 'utf-8'
|
||||||
return response.json()
|
return response.json()
|
||||||
@ -92,7 +95,11 @@ def fetch_single_post_data(api_domain, service, user_id, post_id, headers, logge
|
|||||||
scraper = None
|
scraper = None
|
||||||
try:
|
try:
|
||||||
scraper = cloudscraper.create_scraper()
|
scraper = cloudscraper.create_scraper()
|
||||||
response = scraper.get(post_api_url, headers=headers, timeout=(15, 300), cookies=cookies_dict, proxies=proxies)
|
# Keep the 300s read timeout for both, but increase connect timeout for proxies
|
||||||
|
request_timeout = (30, 300) if proxies else (15, 300)
|
||||||
|
|
||||||
|
response = scraper.get(post_api_url, headers=headers, timeout=request_timeout, cookies=cookies_dict, proxies=proxies, verify=False)
|
||||||
|
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
full_post_data = response.json()
|
full_post_data = response.json()
|
||||||
@ -120,7 +127,9 @@ def fetch_post_comments(api_domain, service, user_id, post_id, headers, logger,
|
|||||||
logger(f" Fetching comments: {comments_api_url}")
|
logger(f" Fetching comments: {comments_api_url}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with requests.get(comments_api_url, headers=headers, timeout=(10, 30), cookies=cookies_dict, proxies=proxies) as response:
|
request_timeout = (30, 60) if proxies else (10, 30)
|
||||||
|
|
||||||
|
with requests.get(comments_api_url, headers=headers, timeout=request_timeout, cookies=cookies_dict, proxies=proxies, verify=False) as response:
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
response.encoding = 'utf-8'
|
response.encoding = 'utf-8'
|
||||||
return response.json()
|
return response.json()
|
||||||
@ -180,7 +189,9 @@ def download_from_api(
|
|||||||
direct_post_api_url = f"https://{api_domain}/api/v1/{service}/user/{user_id}/post/{target_post_id}"
|
direct_post_api_url = f"https://{api_domain}/api/v1/{service}/user/{user_id}/post/{target_post_id}"
|
||||||
logger(f" Attempting direct fetch for target post: {direct_post_api_url}")
|
logger(f" Attempting direct fetch for target post: {direct_post_api_url}")
|
||||||
try:
|
try:
|
||||||
with requests.get(direct_post_api_url, headers=headers, timeout=(10, 30), cookies=cookies_for_api, proxies=proxies) as direct_response:
|
request_timeout = (30, 60) if proxies else (10, 30)
|
||||||
|
|
||||||
|
with requests.get(direct_post_api_url, headers=headers, timeout=request_timeout, cookies=cookies_for_api, proxies=proxies, verify=False) as direct_response:
|
||||||
direct_response.raise_for_status()
|
direct_response.raise_for_status()
|
||||||
direct_response.encoding = 'utf-8'
|
direct_response.encoding = 'utf-8'
|
||||||
direct_post_data = direct_response.json()
|
direct_post_data = direct_response.json()
|
||||||
|
|||||||
@ -11,9 +11,18 @@ class DeviantArtClient:
|
|||||||
CLIENT_SECRET = "76b08c69cfb27f26d6161f9ab6d061a1"
|
CLIENT_SECRET = "76b08c69cfb27f26d6161f9ab6d061a1"
|
||||||
BASE_API = "https://www.deviantart.com/api/v1/oauth2"
|
BASE_API = "https://www.deviantart.com/api/v1/oauth2"
|
||||||
|
|
||||||
def __init__(self, logger_func=print):
|
# 1. Accept proxies in init
|
||||||
|
def __init__(self, logger_func=print, proxies=None):
|
||||||
self.session = requests.Session()
|
self.session = requests.Session()
|
||||||
# Headers matching 1.py (Firefox)
|
|
||||||
|
# 2. Configure Session with Proxy & SSL settings immediately
|
||||||
|
if proxies:
|
||||||
|
self.session.proxies.update(proxies)
|
||||||
|
self.session.verify = False # Ignore SSL for proxies
|
||||||
|
self.proxies_enabled = True
|
||||||
|
else:
|
||||||
|
self.proxies_enabled = False
|
||||||
|
|
||||||
self.session.headers.update({
|
self.session.headers.update({
|
||||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:128.0) Gecko/20100101 Firefox/128.0",
|
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:128.0) Gecko/20100101 Firefox/128.0",
|
||||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/png,image/svg+xml,*/*;q=0.8",
|
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/png,image/svg+xml,*/*;q=0.8",
|
||||||
@ -41,7 +50,10 @@ class DeviantArtClient:
|
|||||||
"client_id": self.CLIENT_ID,
|
"client_id": self.CLIENT_ID,
|
||||||
"client_secret": self.CLIENT_SECRET
|
"client_secret": self.CLIENT_SECRET
|
||||||
}
|
}
|
||||||
resp = self.session.post(url, data=data, timeout=10)
|
# 3. Smart timeout (longer if proxy)
|
||||||
|
req_timeout = 30 if self.proxies_enabled else 10
|
||||||
|
|
||||||
|
resp = self.session.post(url, data=data, timeout=req_timeout)
|
||||||
resp.raise_for_status()
|
resp.raise_for_status()
|
||||||
data = resp.json()
|
data = resp.json()
|
||||||
self.access_token = data.get("access_token")
|
self.access_token = data.get("access_token")
|
||||||
@ -63,18 +75,22 @@ class DeviantArtClient:
|
|||||||
retries = 0
|
retries = 0
|
||||||
max_retries = 4
|
max_retries = 4
|
||||||
backoff_delay = 2
|
backoff_delay = 2
|
||||||
|
|
||||||
|
# 4. Smart timeout
|
||||||
|
req_timeout = 30 if self.proxies_enabled else 20
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
resp = self.session.get(url, params=params, timeout=20)
|
resp = self.session.get(url, params=params, timeout=req_timeout)
|
||||||
|
|
||||||
# 429: Rate Limit (Retry infinitely like 1.py)
|
# 429: Rate Limit
|
||||||
if resp.status_code == 429:
|
if resp.status_code == 429:
|
||||||
retry_after = resp.headers.get('Retry-After')
|
retry_after = resp.headers.get('Retry-After')
|
||||||
if retry_after:
|
if retry_after:
|
||||||
sleep_time = int(retry_after) + 1
|
sleep_time = int(retry_after) + 2 # Add buffer
|
||||||
else:
|
else:
|
||||||
sleep_time = 5 # Default sleep from 1.py
|
# 5. Increase default wait time for 429s
|
||||||
|
sleep_time = 15
|
||||||
|
|
||||||
self._log_once(sleep_time, f" [DeviantArt] ⚠️ Rate limit (429). Sleeping {sleep_time}s...")
|
self._log_once(sleep_time, f" [DeviantArt] ⚠️ Rate limit (429). Sleeping {sleep_time}s...")
|
||||||
time.sleep(sleep_time)
|
time.sleep(sleep_time)
|
||||||
@ -90,7 +106,7 @@ class DeviantArtClient:
|
|||||||
raise Exception("Failed to refresh token")
|
raise Exception("Failed to refresh token")
|
||||||
|
|
||||||
if 400 <= resp.status_code < 500:
|
if 400 <= resp.status_code < 500:
|
||||||
resp.raise_for_status() # This raises immediately, breaking the loop
|
resp.raise_for_status()
|
||||||
|
|
||||||
if 500 <= resp.status_code < 600:
|
if 500 <= resp.status_code < 600:
|
||||||
resp.raise_for_status()
|
resp.raise_for_status()
|
||||||
@ -105,12 +121,9 @@ class DeviantArtClient:
|
|||||||
except requests.exceptions.HTTPError as e:
|
except requests.exceptions.HTTPError as e:
|
||||||
if e.response is not None and 400 <= e.response.status_code < 500:
|
if e.response is not None and 400 <= e.response.status_code < 500:
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
# Otherwise fall through to general retry logic (for 5xx)
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
except requests.exceptions.RequestException as e:
|
except requests.exceptions.RequestException as e:
|
||||||
# Network errors / 5xx errors -> Retry
|
|
||||||
if retries < max_retries:
|
if retries < max_retries:
|
||||||
self._log_once("conn_error", f" [DeviantArt] Connection error: {e}. Retrying...")
|
self._log_once("conn_error", f" [DeviantArt] Connection error: {e}. Retrying...")
|
||||||
time.sleep(backoff_delay)
|
time.sleep(backoff_delay)
|
||||||
@ -131,7 +144,8 @@ class DeviantArtClient:
|
|||||||
def get_deviation_uuid(self, url):
|
def get_deviation_uuid(self, url):
|
||||||
"""Scrapes the deviation page to find the UUID."""
|
"""Scrapes the deviation page to find the UUID."""
|
||||||
try:
|
try:
|
||||||
resp = self.session.get(url, timeout=15)
|
req_timeout = 30 if self.proxies_enabled else 15
|
||||||
|
resp = self.session.get(url, timeout=req_timeout)
|
||||||
match = re.search(r'"deviationUuid":"([^"]+)"', resp.text)
|
match = re.search(r'"deviationUuid":"([^"]+)"', resp.text)
|
||||||
if match:
|
if match:
|
||||||
return match.group(1)
|
return match.group(1)
|
||||||
@ -144,17 +158,13 @@ class DeviantArtClient:
|
|||||||
|
|
||||||
def get_deviation_content(self, uuid):
|
def get_deviation_content(self, uuid):
|
||||||
"""Fetches download info."""
|
"""Fetches download info."""
|
||||||
# 1. Try high-res download endpoint
|
|
||||||
try:
|
try:
|
||||||
data = self._api_call(f"/deviation/download/{uuid}")
|
data = self._api_call(f"/deviation/download/{uuid}")
|
||||||
if 'src' in data:
|
if 'src' in data:
|
||||||
return data
|
return data
|
||||||
except:
|
except:
|
||||||
# If 400/403 (Not downloadable), we fail silently here
|
|
||||||
# and proceed to step 2 (Metadata fallback)
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# 2. Fallback to standard content
|
|
||||||
try:
|
try:
|
||||||
meta = self._api_call(f"/deviation/{uuid}")
|
meta = self._api_call(f"/deviation/{uuid}")
|
||||||
if 'content' in meta:
|
if 'content' in meta:
|
||||||
|
|||||||
@ -1,31 +1,35 @@
|
|||||||
import requests
|
import requests
|
||||||
import cloudscraper
|
|
||||||
import json
|
import json
|
||||||
|
|
||||||
def fetch_nhentai_gallery(gallery_id, logger=print):
|
# 1. Update arguments to accept proxies=None
|
||||||
|
def fetch_nhentai_gallery(gallery_id, logger=print, proxies=None):
|
||||||
"""
|
"""
|
||||||
Fetches the metadata for a single nhentai gallery using cloudscraper to bypass Cloudflare.
|
Fetches the metadata for a single nhentai gallery.
|
||||||
|
Switched to standard requests to support proxies with self-signed certs.
|
||||||
Args:
|
|
||||||
gallery_id (str or int): The ID of the nhentai gallery.
|
|
||||||
logger (function): A function to log progress and error messages.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict: A dictionary containing the gallery's metadata if successful, otherwise None.
|
|
||||||
"""
|
"""
|
||||||
api_url = f"https://nhentai.net/api/gallery/{gallery_id}"
|
api_url = f"https://nhentai.net/api/gallery/{gallery_id}"
|
||||||
|
|
||||||
scraper = cloudscraper.create_scraper()
|
# 2. Use a real User-Agent to avoid immediate blocking
|
||||||
|
headers = {
|
||||||
|
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
|
||||||
|
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
|
||||||
|
}
|
||||||
|
|
||||||
logger(f" Fetching nhentai gallery metadata from: {api_url}")
|
logger(f" Fetching nhentai gallery metadata from: {api_url}")
|
||||||
|
|
||||||
|
# 3. Smart timeout logic
|
||||||
|
req_timeout = (30, 120) if proxies else 20
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Use the scraper to make the GET request
|
# 4. Use requests.get with proxies, verify=False, and timeout
|
||||||
response = scraper.get(api_url, timeout=20)
|
response = requests.get(api_url, headers=headers, timeout=req_timeout, proxies=proxies, verify=False)
|
||||||
|
|
||||||
if response.status_code == 404:
|
if response.status_code == 404:
|
||||||
logger(f" ❌ Gallery not found (404): ID {gallery_id}")
|
logger(f" ❌ Gallery not found (404): ID {gallery_id}")
|
||||||
return None
|
return None
|
||||||
|
elif response.status_code == 403:
|
||||||
|
logger(f" ❌ Access Denied (403): Cloudflare blocked the request. Try a different proxy or User-Agent.")
|
||||||
|
return None
|
||||||
|
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
@ -36,9 +40,9 @@ def fetch_nhentai_gallery(gallery_id, logger=print):
|
|||||||
gallery_data['pages'] = gallery_data.pop('images')['pages']
|
gallery_data['pages'] = gallery_data.pop('images')['pages']
|
||||||
return gallery_data
|
return gallery_data
|
||||||
else:
|
else:
|
||||||
logger(" ❌ API response is missing essential keys (id, media_id, or images).")
|
logger(" ❌ API response is missing essential keys (id, media_id, images).")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger(f" ❌ An error occurred while fetching gallery {gallery_id}: {e}")
|
logger(f" ❌ Error fetching nhentai metadata: {e}")
|
||||||
return None
|
return None
|
||||||
@ -263,7 +263,7 @@ class PostProcessorWorker:
|
|||||||
new_url = parsed_url._replace(netloc=new_domain).geturl()
|
new_url = parsed_url._replace(netloc=new_domain).geturl()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with requests.head(new_url, headers={'User-Agent': 'Mozilla/5.0'}, timeout=5, allow_redirects=True, proxies=self.proxies) as resp:
|
with requests.head(new_url, headers={'User-Agent': 'Mozilla/5.0'}, timeout=5, allow_redirects=True, proxies=self.proxies, verify=False) as resp:
|
||||||
if resp.status_code == 200:
|
if resp.status_code == 200:
|
||||||
return new_url
|
return new_url
|
||||||
except requests.RequestException:
|
except requests.RequestException:
|
||||||
@ -338,7 +338,7 @@ class PostProcessorWorker:
|
|||||||
api_original_filename_for_size_check = file_info.get('_original_name_for_log', file_info.get('name'))
|
api_original_filename_for_size_check = file_info.get('_original_name_for_log', file_info.get('name'))
|
||||||
try:
|
try:
|
||||||
# Use a stream=True HEAD request to get headers without downloading the body
|
# Use a stream=True HEAD request to get headers without downloading the body
|
||||||
with requests.head(file_url, headers=file_download_headers, timeout=15, cookies=cookies_to_use_for_file, allow_redirects=True, proxies=self.proxies) as head_response:
|
with requests.head(file_url, headers=file_download_headers, timeout=15, cookies=cookies_to_use_for_file, allow_redirects=True, proxies=self.proxies, verify=False) as head_response:
|
||||||
|
|
||||||
head_response.raise_for_status()
|
head_response.raise_for_status()
|
||||||
content_length = head_response.headers.get('Content-Length')
|
content_length = head_response.headers.get('Content-Length')
|
||||||
@ -673,7 +673,7 @@ class PostProcessorWorker:
|
|||||||
|
|
||||||
current_url_to_try = file_url
|
current_url_to_try = file_url
|
||||||
|
|
||||||
response = requests.get(current_url_to_try, headers=file_download_headers, timeout=(30, 300), stream=True, cookies=cookies_to_use_for_file, proxies=self.proxies)
|
response = requests.get(current_url_to_try, headers=file_download_headers, timeout=(30, 300), stream=True, cookies=cookies_to_use_for_file, proxies=self.proxies, verify=False)
|
||||||
|
|
||||||
if response.status_code == 403 and ('kemono.' in current_url_to_try or 'coomer.' in current_url_to_try):
|
if response.status_code == 403 and ('kemono.' in current_url_to_try or 'coomer.' in current_url_to_try):
|
||||||
self.logger(f" ⚠️ Got 403 Forbidden for '{api_original_filename}'. Attempting subdomain rotation...")
|
self.logger(f" ⚠️ Got 403 Forbidden for '{api_original_filename}'. Attempting subdomain rotation...")
|
||||||
@ -682,7 +682,7 @@ class PostProcessorWorker:
|
|||||||
self.logger(f" Retrying with new URL: {new_url}")
|
self.logger(f" Retrying with new URL: {new_url}")
|
||||||
file_url = new_url
|
file_url = new_url
|
||||||
response.close() # Close the old response
|
response.close() # Close the old response
|
||||||
response = requests.get(new_url, headers=file_download_headers, timeout=(30, 300), stream=True, cookies=cookies_to_use_for_file, proxies=self.proxies)
|
response = requests.get(new_url, headers=file_download_headers, timeout=(30, 300), stream=True, cookies=cookies_to_use_for_file, proxies=self.proxies, verify=False)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
# --- REVISED AND MOVED SIZE CHECK LOGIC ---
|
# --- REVISED AND MOVED SIZE CHECK LOGIC ---
|
||||||
|
|||||||
@ -19,12 +19,14 @@ class AllcomicDownloadThread(QThread):
|
|||||||
finished_signal = pyqtSignal(int, int, bool)
|
finished_signal = pyqtSignal(int, int, bool)
|
||||||
overall_progress_signal = pyqtSignal(int, int)
|
overall_progress_signal = pyqtSignal(int, int)
|
||||||
|
|
||||||
def __init__(self, url, output_dir, parent=None):
|
# 1. Update __init__ to accept proxies
|
||||||
|
def __init__(self, url, output_dir, parent=None, proxies=None):
|
||||||
super().__init__(parent)
|
super().__init__(parent)
|
||||||
self.comic_url = url
|
self.comic_url = url
|
||||||
self.output_dir = output_dir
|
self.output_dir = output_dir
|
||||||
self.is_cancelled = False
|
self.is_cancelled = False
|
||||||
self.pause_event = parent.pause_event if hasattr(parent, 'pause_event') else threading.Event()
|
self.pause_event = parent.pause_event if hasattr(parent, 'pause_event') else threading.Event()
|
||||||
|
self.proxies = proxies # Store the proxies
|
||||||
|
|
||||||
def _check_pause(self):
|
def _check_pause(self):
|
||||||
if self.is_cancelled: return True
|
if self.is_cancelled: return True
|
||||||
@ -40,13 +42,19 @@ class AllcomicDownloadThread(QThread):
|
|||||||
grand_total_dl = 0
|
grand_total_dl = 0
|
||||||
grand_total_skip = 0
|
grand_total_skip = 0
|
||||||
|
|
||||||
# Create the scraper session ONCE for the entire job
|
if self.proxies:
|
||||||
scraper = cloudscraper.create_scraper(
|
self.progress_signal.emit(f" 🌍 Network: Using Proxy {self.proxies}")
|
||||||
browser={'browser': 'firefox', 'platform': 'windows', 'desktop': True}
|
else:
|
||||||
)
|
self.progress_signal.emit(" 🌍 Network: Direct Connection (No Proxy)")
|
||||||
|
|
||||||
# Pass the scraper to the function
|
scraper = requests.Session()
|
||||||
chapters_to_download = allcomic_get_list(scraper, self.comic_url, self.progress_signal.emit)
|
scraper.headers.update({
|
||||||
|
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
|
||||||
|
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
|
||||||
|
})
|
||||||
|
|
||||||
|
# 2. Pass self.proxies to get_chapter_list
|
||||||
|
chapters_to_download = allcomic_get_list(scraper, self.comic_url, self.progress_signal.emit, proxies=self.proxies)
|
||||||
|
|
||||||
if not chapters_to_download:
|
if not chapters_to_download:
|
||||||
chapters_to_download = [self.comic_url]
|
chapters_to_download = [self.comic_url]
|
||||||
@ -57,8 +65,9 @@ class AllcomicDownloadThread(QThread):
|
|||||||
if self._check_pause(): break
|
if self._check_pause(): break
|
||||||
|
|
||||||
self.progress_signal.emit(f"\n-- Processing Chapter {chapter_idx + 1}/{len(chapters_to_download)} --")
|
self.progress_signal.emit(f"\n-- Processing Chapter {chapter_idx + 1}/{len(chapters_to_download)} --")
|
||||||
# Pass the scraper to the function
|
|
||||||
comic_title, chapter_title, image_urls = allcomic_fetch_data(scraper, chapter_url, self.progress_signal.emit)
|
# 3. Pass self.proxies to fetch_chapter_data
|
||||||
|
comic_title, chapter_title, image_urls = allcomic_fetch_data(scraper, chapter_url, self.progress_signal.emit, proxies=self.proxies)
|
||||||
|
|
||||||
if not image_urls:
|
if not image_urls:
|
||||||
self.progress_signal.emit(f"❌ Failed to get data for chapter. Skipping.")
|
self.progress_signal.emit(f"❌ Failed to get data for chapter. Skipping.")
|
||||||
@ -80,6 +89,9 @@ class AllcomicDownloadThread(QThread):
|
|||||||
self.overall_progress_signal.emit(total_files_in_chapter, 0)
|
self.overall_progress_signal.emit(total_files_in_chapter, 0)
|
||||||
headers = {'Referer': chapter_url}
|
headers = {'Referer': chapter_url}
|
||||||
|
|
||||||
|
# 4. Define smart timeout for images
|
||||||
|
img_timeout = (30, 120) if self.proxies else 60
|
||||||
|
|
||||||
for i, img_url in enumerate(image_urls):
|
for i, img_url in enumerate(image_urls):
|
||||||
if self._check_pause(): break
|
if self._check_pause(): break
|
||||||
|
|
||||||
@ -97,8 +109,9 @@ class AllcomicDownloadThread(QThread):
|
|||||||
if self._check_pause(): break
|
if self._check_pause(): break
|
||||||
try:
|
try:
|
||||||
self.progress_signal.emit(f" Downloading ({i+1}/{total_files_in_chapter}): '{filename}' (Attempt {attempt + 1})...")
|
self.progress_signal.emit(f" Downloading ({i+1}/{total_files_in_chapter}): '{filename}' (Attempt {attempt + 1})...")
|
||||||
# Use the persistent scraper object
|
|
||||||
response = scraper.get(img_url, stream=True, headers=headers, timeout=60)
|
# 5. Use proxies, verify=False, and new timeout
|
||||||
|
response = scraper.get(img_url, stream=True, headers=headers, timeout=img_timeout, proxies=self.proxies, verify=False)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
with open(filepath, 'wb') as f:
|
with open(filepath, 'wb') as f:
|
||||||
@ -125,7 +138,7 @@ class AllcomicDownloadThread(QThread):
|
|||||||
grand_total_skip += 1
|
grand_total_skip += 1
|
||||||
|
|
||||||
self.overall_progress_signal.emit(total_files_in_chapter, i + 1)
|
self.overall_progress_signal.emit(total_files_in_chapter, i + 1)
|
||||||
time.sleep(0.5) # Increased delay between images for this site
|
time.sleep(0.5)
|
||||||
|
|
||||||
if self._check_pause(): break
|
if self._check_pause(): break
|
||||||
|
|
||||||
|
|||||||
@ -2,8 +2,8 @@ import os
|
|||||||
import time
|
import time
|
||||||
import requests
|
import requests
|
||||||
import re
|
import re
|
||||||
|
import random # Needed for random delays
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
# REMOVED: ThreadPoolExecutor, wait (Not needed for sequential speed)
|
|
||||||
from PyQt5.QtCore import QThread, pyqtSignal
|
from PyQt5.QtCore import QThread, pyqtSignal
|
||||||
from ...core.deviantart_client import DeviantArtClient
|
from ...core.deviantart_client import DeviantArtClient
|
||||||
from ...utils.file_utils import clean_folder_name
|
from ...utils.file_utils import clean_folder_name
|
||||||
@ -14,24 +14,29 @@ class DeviantArtDownloadThread(QThread):
|
|||||||
overall_progress_signal = pyqtSignal(int, int)
|
overall_progress_signal = pyqtSignal(int, int)
|
||||||
finished_signal = pyqtSignal(int, int, bool, list)
|
finished_signal = pyqtSignal(int, int, bool, list)
|
||||||
|
|
||||||
def __init__(self, url, output_dir, pause_event, cancellation_event, parent=None):
|
# 1. Accept proxies in init
|
||||||
|
def __init__(self, url, output_dir, pause_event, cancellation_event, parent=None, proxies=None):
|
||||||
super().__init__(parent)
|
super().__init__(parent)
|
||||||
self.url = url
|
self.url = url
|
||||||
self.output_dir = output_dir
|
self.output_dir = output_dir
|
||||||
self.pause_event = pause_event
|
self.pause_event = pause_event
|
||||||
self.cancellation_event = cancellation_event
|
self.cancellation_event = cancellation_event
|
||||||
|
self.proxies = proxies # Store proxies
|
||||||
# Pass logger to client
|
|
||||||
self.client = DeviantArtClient(logger_func=self.progress_signal.emit)
|
|
||||||
|
|
||||||
self.parent_app = parent
|
self.parent_app = parent
|
||||||
self.download_count = 0
|
self.download_count = 0
|
||||||
self.skip_count = 0
|
self.skip_count = 0
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
|
self.client = DeviantArtClient(logger_func=self.progress_signal.emit, proxies=self.proxies)
|
||||||
|
|
||||||
|
if self.proxies:
|
||||||
|
self.progress_signal.emit(f" 🌍 Network: Using Proxy {self.proxies}")
|
||||||
|
else:
|
||||||
|
self.progress_signal.emit(" 🌍 Network: Direct Connection")
|
||||||
|
|
||||||
self.progress_signal.emit("=" * 40)
|
self.progress_signal.emit("=" * 40)
|
||||||
self.progress_signal.emit(f"🚀 Starting DeviantArt download for: {self.url}")
|
self.progress_signal.emit(f"🚀 Starting DeviantArt download for: {self.url}")
|
||||||
self.progress_signal.emit(f" ℹ️ Mode: High-Speed Sequential (Matches 1.py)")
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if not self.client.authenticate():
|
if not self.client.authenticate():
|
||||||
@ -87,7 +92,6 @@ class DeviantArtDownloadThread(QThread):
|
|||||||
if not os.path.exists(base_folder):
|
if not os.path.exists(base_folder):
|
||||||
os.makedirs(base_folder, exist_ok=True)
|
os.makedirs(base_folder, exist_ok=True)
|
||||||
|
|
||||||
# --- OPTIMIZED LOOP (Matches 1.py structure) ---
|
|
||||||
while has_more:
|
while has_more:
|
||||||
if self._check_pause_cancel(): break
|
if self._check_pause_cancel(): break
|
||||||
|
|
||||||
@ -98,12 +102,14 @@ class DeviantArtDownloadThread(QThread):
|
|||||||
|
|
||||||
if not results: break
|
if not results: break
|
||||||
|
|
||||||
# DIRECT LOOP - No ThreadPoolExecutor overhead
|
|
||||||
for deviation in results:
|
for deviation in results:
|
||||||
if self._check_pause_cancel(): break
|
if self._check_pause_cancel(): break
|
||||||
self._process_deviation_task(deviation, base_folder)
|
self._process_deviation_task(deviation, base_folder)
|
||||||
|
|
||||||
|
# 4. FIX 429: Add a small random delay between items
|
||||||
|
# This prevents hammering the API 24 times in a single second.
|
||||||
|
time.sleep(random.uniform(0.5, 1.2))
|
||||||
|
|
||||||
# Be nice to API (1 second sleep per batch of 24)
|
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
|
|
||||||
def _process_deviation_task(self, deviation, base_folder):
|
def _process_deviation_task(self, deviation, base_folder):
|
||||||
@ -113,7 +119,6 @@ class DeviantArtDownloadThread(QThread):
|
|||||||
title = deviation.get('title', 'Unknown')
|
title = deviation.get('title', 'Unknown')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Try to get content (Handles fallback internally now)
|
|
||||||
content = self.client.get_deviation_content(dev_id)
|
content = self.client.get_deviation_content(dev_id)
|
||||||
if content:
|
if content:
|
||||||
self._download_file(content['src'], deviation, override_dir=base_folder)
|
self._download_file(content['src'], deviation, override_dir=base_folder)
|
||||||
@ -168,7 +173,6 @@ class DeviantArtDownloadThread(QThread):
|
|||||||
final_filename = f"{clean_folder_name(new_name)}{ext}"
|
final_filename = f"{clean_folder_name(new_name)}{ext}"
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# Reduced logging verbosity slightly for speed
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
save_dir = override_dir if override_dir else self.output_dir
|
save_dir = override_dir if override_dir else self.output_dir
|
||||||
@ -185,7 +189,11 @@ class DeviantArtDownloadThread(QThread):
|
|||||||
try:
|
try:
|
||||||
self.progress_signal.emit(f" ⬇️ Downloading: {final_filename}")
|
self.progress_signal.emit(f" ⬇️ Downloading: {final_filename}")
|
||||||
|
|
||||||
with requests.get(file_url, stream=True, timeout=30) as r:
|
# 5. Determine smart timeout for files
|
||||||
|
timeout_val = (30, 120) if self.proxies else 30
|
||||||
|
|
||||||
|
# 6. Use proxies and verify=False
|
||||||
|
with requests.get(file_url, stream=True, timeout=timeout_val, proxies=self.proxies, verify=False) as r:
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
|
|
||||||
with open(filepath, 'wb') as f:
|
with open(filepath, 'wb') as f:
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
import cloudscraper
|
import requests
|
||||||
from PyQt5.QtCore import QThread, pyqtSignal
|
from PyQt5.QtCore import QThread, pyqtSignal
|
||||||
|
|
||||||
from ...utils.file_utils import clean_folder_name
|
from ...utils.file_utils import clean_folder_name
|
||||||
@ -17,68 +17,78 @@ class NhentaiDownloadThread(QThread):
|
|||||||
|
|
||||||
EXTENSION_MAP = {'j': 'jpg', 'p': 'png', 'g': 'gif', 'w': 'webp' }
|
EXTENSION_MAP = {'j': 'jpg', 'p': 'png', 'g': 'gif', 'w': 'webp' }
|
||||||
|
|
||||||
|
# 1. Update init to initialize self.proxies
|
||||||
def __init__(self, gallery_data, output_dir, parent=None):
|
def __init__(self, gallery_data, output_dir, parent=None):
|
||||||
super().__init__(parent)
|
super().__init__(parent)
|
||||||
self.gallery_data = gallery_data
|
self.gallery_data = gallery_data
|
||||||
self.output_dir = output_dir
|
self.output_dir = output_dir
|
||||||
self.is_cancelled = False
|
self.is_cancelled = False
|
||||||
|
self.proxies = None # Placeholder, will be injected by main_window
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
|
# 2. Log Proxy Usage
|
||||||
|
if self.proxies:
|
||||||
|
self.progress_signal.emit(f" 🌍 Network: Using Proxy {self.proxies}")
|
||||||
|
else:
|
||||||
|
self.progress_signal.emit(" 🌍 Network: Direct Connection (No Proxy)")
|
||||||
|
|
||||||
title = self.gallery_data.get("title", {}).get("english", f"gallery_{self.gallery_data.get('id')}")
|
title = self.gallery_data.get("title", {}).get("english", f"gallery_{self.gallery_data.get('id')}")
|
||||||
gallery_id = self.gallery_data.get("id")
|
gallery_id = self.gallery_data.get("id")
|
||||||
media_id = self.gallery_data.get("media_id")
|
media_id = self.gallery_data.get("media_id")
|
||||||
pages_info = self.gallery_data.get("pages", [])
|
pages_info = self.gallery_data.get("pages", [])
|
||||||
|
|
||||||
folder_name = clean_folder_name(title)
|
folder_name = clean_folder_name(title)
|
||||||
gallery_path = os.path.join(self.output_dir, folder_name)
|
save_path = os.path.join(self.output_dir, folder_name)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
os.makedirs(gallery_path, exist_ok=True)
|
os.makedirs(save_path, exist_ok=True)
|
||||||
except OSError as e:
|
self.progress_signal.emit(f" Saving to: {folder_name}")
|
||||||
self.progress_signal.emit(f"❌ Critical error creating directory: {e}")
|
except Exception as e:
|
||||||
|
self.progress_signal.emit(f" ❌ Error creating directory: {e}")
|
||||||
self.finished_signal.emit(0, len(pages_info), False)
|
self.finished_signal.emit(0, len(pages_info), False)
|
||||||
return
|
return
|
||||||
|
|
||||||
self.progress_signal.emit(f"⬇️ Downloading '{title}' to folder '{folder_name}'...")
|
|
||||||
|
|
||||||
scraper = cloudscraper.create_scraper()
|
|
||||||
download_count = 0
|
download_count = 0
|
||||||
skip_count = 0
|
skip_count = 0
|
||||||
|
total_pages = len(pages_info)
|
||||||
|
|
||||||
|
# 3. Use requests.Session instead of cloudscraper
|
||||||
|
scraper = requests.Session()
|
||||||
|
|
||||||
|
# 4. Smart timeout logic
|
||||||
|
img_timeout = (30, 120) if self.proxies else 60
|
||||||
|
|
||||||
for i, page_data in enumerate(pages_info):
|
for i, page_data in enumerate(pages_info):
|
||||||
if self.is_cancelled:
|
if self.is_cancelled: break
|
||||||
break
|
|
||||||
|
|
||||||
page_num = i + 1
|
|
||||||
|
|
||||||
ext_char = page_data.get('t', 'j')
|
file_ext = self.EXTENSION_MAP.get(page_data.get('t'), 'jpg')
|
||||||
extension = self.EXTENSION_MAP.get(ext_char, 'jpg')
|
local_filename = f"{i+1:03d}.{file_ext}"
|
||||||
|
filepath = os.path.join(save_path, local_filename)
|
||||||
relative_path = f"/galleries/{media_id}/{page_num}.{extension}"
|
|
||||||
|
|
||||||
local_filename = f"{page_num:03d}.{extension}"
|
|
||||||
filepath = os.path.join(gallery_path, local_filename)
|
|
||||||
|
|
||||||
if os.path.exists(filepath):
|
if os.path.exists(filepath):
|
||||||
self.progress_signal.emit(f" -> Skip (Exists): {local_filename}")
|
self.progress_signal.emit(f" Skipping {local_filename} (already exists).")
|
||||||
skip_count += 1
|
skip_count += 1
|
||||||
continue
|
continue
|
||||||
|
|
||||||
download_successful = False
|
download_successful = False
|
||||||
|
|
||||||
|
# Try servers until one works
|
||||||
for server in self.IMAGE_SERVERS:
|
for server in self.IMAGE_SERVERS:
|
||||||
if self.is_cancelled:
|
if self.is_cancelled: break
|
||||||
break
|
|
||||||
|
# Construct URL: server/galleries/media_id/page_num.ext
|
||||||
|
full_url = f"{server}/galleries/{media_id}/{i+1}.{file_ext}"
|
||||||
|
|
||||||
full_url = f"{server}{relative_path}"
|
|
||||||
try:
|
try:
|
||||||
self.progress_signal.emit(f" Downloading page {page_num}/{len(pages_info)} from {server} ...")
|
self.progress_signal.emit(f" Downloading page {i+1}/{total_pages}...")
|
||||||
|
|
||||||
headers = {
|
headers = {
|
||||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36',
|
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
|
||||||
'Referer': f'https://nhentai.net/g/{gallery_id}/'
|
'Referer': f'https://nhentai.net/g/{gallery_id}/'
|
||||||
}
|
}
|
||||||
|
|
||||||
response = scraper.get(full_url, headers=headers, timeout=60, stream=True)
|
# 5. Add proxies, verify=False, and timeout
|
||||||
|
response = scraper.get(full_url, headers=headers, timeout=img_timeout, stream=True, proxies=self.proxies, verify=False)
|
||||||
|
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
with open(filepath, 'wb') as f:
|
with open(filepath, 'wb') as f:
|
||||||
@ -86,12 +96,14 @@ class NhentaiDownloadThread(QThread):
|
|||||||
f.write(chunk)
|
f.write(chunk)
|
||||||
download_count += 1
|
download_count += 1
|
||||||
download_successful = True
|
download_successful = True
|
||||||
break
|
break # Stop trying servers
|
||||||
else:
|
else:
|
||||||
self.progress_signal.emit(f" -> {server} returned status {response.status_code}. Trying next server...")
|
# self.progress_signal.emit(f" -> {server} returned status {response.status_code}...")
|
||||||
|
pass
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.progress_signal.emit(f" -> {server} failed to connect or timed out: {e}. Trying next server...")
|
# self.progress_signal.emit(f" -> {server} failed: {e}")
|
||||||
|
pass
|
||||||
|
|
||||||
if not download_successful:
|
if not download_successful:
|
||||||
self.progress_signal.emit(f" ❌ Failed to download {local_filename} from all servers.")
|
self.progress_signal.emit(f" ❌ Failed to download {local_filename} from all servers.")
|
||||||
|
|||||||
@ -73,7 +73,6 @@ class HelpGuideDialog(QDialog):
|
|||||||
<li>fap-nation.org/</li>
|
<li>fap-nation.org/</li>
|
||||||
<li>Discord</li>
|
<li>Discord</li>
|
||||||
<li>allporncomic.com</li>
|
<li>allporncomic.com</li>
|
||||||
<li>allporncomic.com</li>
|
|
||||||
<li>hentai2read.com</li>
|
<li>hentai2read.com</li>
|
||||||
<li>mangadex.org</li>
|
<li>mangadex.org</li>
|
||||||
<li>Simpcity</li>
|
<li>Simpcity</li>
|
||||||
@ -279,6 +278,46 @@ class HelpGuideDialog(QDialog):
|
|||||||
</ul>
|
</ul>
|
||||||
"""),
|
"""),
|
||||||
|
|
||||||
|
("Add to Queue",
|
||||||
|
"""
|
||||||
|
<p>This feature allows you to queue up multiple distinct downloads with different settings and run them all sequentially.</p>
|
||||||
|
|
||||||
|
<h3 style='color: #E0E0E0;'>Step 1: Prepare the Download</h3>
|
||||||
|
<p>Before clicking add, configure the download exactly how you want it processed for this specific link:</p>
|
||||||
|
<ul>
|
||||||
|
<li><b>Select Directory:</b> Choose where you want the files to go.</li>
|
||||||
|
<li><b>Configure Options:</b> Check/uncheck boxes (e.g., "Separate Folders", "Use Cookie", "Manga Mode").</li>
|
||||||
|
<li><b>Paste URL:</b> Enter the link for the creator or post you want to download.</li>
|
||||||
|
</ul>
|
||||||
|
|
||||||
|
<h3 style='color: #E0E0E0;'>Step 2: Add to Queue</h3>
|
||||||
|
<ol>
|
||||||
|
<li>Click the <b>Add to Queue</b> button (located near the Start Download).</li>
|
||||||
|
<li><b>Confirmation:</b> You will see a popup message and the log will print <code>✅ Job added to queue</code>.</li>
|
||||||
|
<li>The URL box will clear, allowing you to immediately paste the next link.</li>
|
||||||
|
</ol>
|
||||||
|
|
||||||
|
<h3 style='color: #E0E0E0;'>Step 3: Repeat & Start</h3>
|
||||||
|
<p>You can repeat steps 1 and 2 as many times as you like. You can even change settings (like the download folder) between adds; the queue remembers the specific settings for each individual link.</p>
|
||||||
|
<p>To start processing the queue:</p>
|
||||||
|
<ol>
|
||||||
|
<li>In the Link Input box, type exactly: <code>start queue</code></li>
|
||||||
|
<li>The main "Start Download" button will change to <b>"🚀 Execute Queue"</b>.</li>
|
||||||
|
<li>Click that button to begin.</li>
|
||||||
|
</ol>
|
||||||
|
|
||||||
|
<h3 style='color: #E0E0E0;'>Processing Behavior</h3>
|
||||||
|
<p>Once started, the app will lock the UI, load the first job, download it until finished, and automatically move to the next until the queue is empty.</p>
|
||||||
|
|
||||||
|
<h3 style='color: #E0E0E0;'>Special Case: Creator Selection Popup</h3>
|
||||||
|
<p>If you use the <b>Creator Selection</b> popup (the 🎨 button):</p>
|
||||||
|
<ul>
|
||||||
|
<li>Select multiple creators in that popup and click <b>"Queue Selected"</b>.</li>
|
||||||
|
<li>The app internally adds them to a temporary list.</li>
|
||||||
|
<li>When you click the main <b>"Add to Queue"</b> button on the main window, it will detect that list and automatically bulk-create job files for all the creators you selected.</li>
|
||||||
|
</ul>
|
||||||
|
"""),
|
||||||
|
|
||||||
("Special Commands",
|
("Special Commands",
|
||||||
"""
|
"""
|
||||||
<p>You can add special commands to the <b>"Filter by Character(s)"</b> input field to change download behavior for a single task. Commands are keywords wrapped in square brackets <code>[]</code>.</p>
|
<p>You can add special commands to the <b>"Filter by Character(s)"</b> input field to change download behavior for a single task. Commands are keywords wrapped in square brackets <code>[]</code>.</p>
|
||||||
@ -450,7 +489,16 @@ class HelpGuideDialog(QDialog):
|
|||||||
("Utility & Advanced Options",
|
("Utility & Advanced Options",
|
||||||
"""
|
"""
|
||||||
<p>These features provide advanced control over your downloads, sessions, and application settings.</p>
|
<p>These features provide advanced control over your downloads, sessions, and application settings.</p>
|
||||||
|
|
||||||
|
<h3 style='color: #E0E0E0;'>🛡️ Proxy Support </h3>
|
||||||
|
<p>You can now configure a proxy to bypass region blocks or ISP restrictions (e.g., for AllComic or Nhentai).</p>
|
||||||
|
<p>Go to <b>Settings ⚙️ > Proxy Tab</b> to set it up:</p>
|
||||||
|
<ul>
|
||||||
|
<li><b>Protocols:</b> Full support for <b>HTTP</b>, <b>SOCKS4</b>, and <b>SOCKS5</b>.</li>
|
||||||
|
<li><b>Authentication:</b> Supports username and password for private proxies.</li>
|
||||||
|
<li><b>Global Effect:</b> Once enabled, all app connections (including API fetches and file downloads) will route through this proxy.</li>
|
||||||
|
</ul>
|
||||||
|
|
||||||
<h3 style='color: #E0E0E0;'>Use Cookie</h3>
|
<h3 style='color: #E0E0E0;'>Use Cookie</h3>
|
||||||
<p>This is essential for downloading from sites that require a login (like <b>SimpCity</b> or accessing your <b>favorites</b> on Kemono/Coomer). You can either:</p>
|
<p>This is essential for downloading from sites that require a login (like <b>SimpCity</b> or accessing your <b>favorites</b> on Kemono/Coomer). You can either:</p>
|
||||||
<ul>
|
<ul>
|
||||||
@ -484,6 +532,7 @@ class HelpGuideDialog(QDialog):
|
|||||||
<li>Toggle <b>"Fetch First"</b> (to find all posts from a creator before starting any downloads).</li>
|
<li>Toggle <b>"Fetch First"</b> (to find all posts from a creator before starting any downloads).</li>
|
||||||
</ul>
|
</ul>
|
||||||
</li>
|
</li>
|
||||||
|
<li><b>Proxy Tab:</b> Configure HTTP/SOCKS proxies and authentication.</li>
|
||||||
<li><b>Updates Tab:</b> Check for and install new application updates.</li>
|
<li><b>Updates Tab:</b> Check for and install new application updates.</li>
|
||||||
</ul>
|
</ul>
|
||||||
|
|
||||||
@ -605,7 +654,8 @@ class HelpGuideDialog(QDialog):
|
|||||||
main_layout.addLayout(content_layout, 1)
|
main_layout.addLayout(content_layout, 1)
|
||||||
|
|
||||||
self.nav_list = QListWidget()
|
self.nav_list = QListWidget()
|
||||||
self.nav_list.setFixedWidth(int(220 * scale))
|
# Increased width to prevent scrollbar overlap
|
||||||
|
self.nav_list.setFixedWidth(int(280 * scale))
|
||||||
# Styles are now set in the __init__ method
|
# Styles are now set in the __init__ method
|
||||||
content_layout.addWidget(self.nav_list)
|
content_layout.addWidget(self.nav_list)
|
||||||
|
|
||||||
|
|||||||
@ -28,8 +28,8 @@ class UpdateCheckDialog(QDialog):
|
|||||||
self.selected_profiles_list = [] # Will store a list of {'name': ..., 'data': ...}
|
self.selected_profiles_list = [] # Will store a list of {'name': ..., 'data': ...}
|
||||||
|
|
||||||
self._default_checkbox_tooltip = (
|
self._default_checkbox_tooltip = (
|
||||||
"If checked, the settings from the selected profile will be loaded into the main window.\n"
|
"If checked, the settings fields will be unlocked and editable.\n"
|
||||||
"You can then modify them. When you start the download, the new settings will be saved to the profile."
|
"If unchecked, settings will still load, but in 'Read-Only' mode."
|
||||||
)
|
)
|
||||||
|
|
||||||
self._init_ui()
|
self._init_ui()
|
||||||
@ -65,13 +65,17 @@ class UpdateCheckDialog(QDialog):
|
|||||||
self.list_widget.itemChanged.connect(self._handle_item_changed)
|
self.list_widget.itemChanged.connect(self._handle_item_changed)
|
||||||
layout.addWidget(self.list_widget)
|
layout.addWidget(self.list_widget)
|
||||||
|
|
||||||
# --- NEW: Checkbox to Load Settings ---
|
# Renamed text to reflect new behavior
|
||||||
self.load_settings_checkbox = QCheckBox("Load profile settings into UI (Edit Settings)")
|
self.edit_settings_checkbox = QCheckBox("Enable Editing (Unlock Settings)")
|
||||||
self.load_settings_checkbox.setToolTip(self._default_checkbox_tooltip)
|
self.edit_settings_checkbox.setToolTip(self._default_checkbox_tooltip)
|
||||||
layout.addWidget(self.load_settings_checkbox)
|
|
||||||
|
# Checked by default as requested
|
||||||
|
self.edit_settings_checkbox.setChecked(True)
|
||||||
|
|
||||||
|
layout.addWidget(self.edit_settings_checkbox)
|
||||||
# -------------------------------------
|
# -------------------------------------
|
||||||
|
|
||||||
# --- All Buttons in One Horizontal Layout ---
|
|
||||||
button_layout = QHBoxLayout()
|
button_layout = QHBoxLayout()
|
||||||
button_layout.setSpacing(6) # small even spacing between all buttons
|
button_layout.setSpacing(6) # small even spacing between all buttons
|
||||||
|
|
||||||
@ -110,7 +114,8 @@ class UpdateCheckDialog(QDialog):
|
|||||||
self.deselect_all_button.setText(self._tr("deselect_all_button_text", "Deselect All"))
|
self.deselect_all_button.setText(self._tr("deselect_all_button_text", "Deselect All"))
|
||||||
self.check_button.setText(self._tr("update_check_dialog_check_button", "Check Selected"))
|
self.check_button.setText(self._tr("update_check_dialog_check_button", "Check Selected"))
|
||||||
self.close_button.setText(self._tr("update_check_dialog_close_button", "Close"))
|
self.close_button.setText(self._tr("update_check_dialog_close_button", "Close"))
|
||||||
self.load_settings_checkbox.setText(self._tr("update_check_load_settings_checkbox", "Load profile settings into UI (Edit Settings)"))
|
# Updated translation key and default text
|
||||||
|
self.edit_settings_checkbox.setText(self._tr("update_check_enable_editing_checkbox", "Enable Editing (Unlock Settings)"))
|
||||||
|
|
||||||
def _load_profiles(self):
|
def _load_profiles(self):
|
||||||
"""Loads all .json files from the creator_profiles directory as checkable items."""
|
"""Loads all .json files from the creator_profiles directory as checkable items."""
|
||||||
@ -133,7 +138,6 @@ class UpdateCheckDialog(QDialog):
|
|||||||
with open(filepath, 'r', encoding='utf-8') as f:
|
with open(filepath, 'r', encoding='utf-8') as f:
|
||||||
data = json.load(f)
|
data = json.load(f)
|
||||||
|
|
||||||
# Basic validation to ensure it's a valid profile
|
|
||||||
if 'creator_url' in data and 'processed_post_ids' in data:
|
if 'creator_url' in data and 'processed_post_ids' in data:
|
||||||
creator_name = os.path.splitext(filename)[0]
|
creator_name = os.path.splitext(filename)[0]
|
||||||
profiles_found.append({'name': creator_name, 'data': data})
|
profiles_found.append({'name': creator_name, 'data': data})
|
||||||
@ -147,7 +151,6 @@ class UpdateCheckDialog(QDialog):
|
|||||||
for profile_info in profiles_found:
|
for profile_info in profiles_found:
|
||||||
item = QListWidgetItem(profile_info['name'])
|
item = QListWidgetItem(profile_info['name'])
|
||||||
item.setData(Qt.UserRole, profile_info)
|
item.setData(Qt.UserRole, profile_info)
|
||||||
# --- Make item checkable ---
|
|
||||||
item.setFlags(item.flags() | Qt.ItemIsUserCheckable)
|
item.setFlags(item.flags() | Qt.ItemIsUserCheckable)
|
||||||
item.setCheckState(Qt.Unchecked)
|
item.setCheckState(Qt.Unchecked)
|
||||||
self.list_widget.addItem(item)
|
self.list_widget.addItem(item)
|
||||||
@ -158,14 +161,13 @@ class UpdateCheckDialog(QDialog):
|
|||||||
self.check_button.setEnabled(False)
|
self.check_button.setEnabled(False)
|
||||||
self.select_all_button.setEnabled(False)
|
self.select_all_button.setEnabled(False)
|
||||||
self.deselect_all_button.setEnabled(False)
|
self.deselect_all_button.setEnabled(False)
|
||||||
self.load_settings_checkbox.setEnabled(False)
|
self.edit_settings_checkbox.setEnabled(False)
|
||||||
|
|
||||||
def _toggle_all_checkboxes(self):
|
def _toggle_all_checkboxes(self):
|
||||||
"""Handles Select All and Deselect All button clicks."""
|
"""Handles Select All and Deselect All button clicks."""
|
||||||
sender = self.sender()
|
sender = self.sender()
|
||||||
check_state = Qt.Checked if sender == self.select_all_button else Qt.Unchecked
|
check_state = Qt.Checked if sender == self.select_all_button else Qt.Unchecked
|
||||||
|
|
||||||
# Block signals to prevent triggering _handle_item_changed repeatedly
|
|
||||||
self.list_widget.blockSignals(True)
|
self.list_widget.blockSignals(True)
|
||||||
for i in range(self.list_widget.count()):
|
for i in range(self.list_widget.count()):
|
||||||
item = self.list_widget.item(i)
|
item = self.list_widget.item(i)
|
||||||
@ -173,13 +175,12 @@ class UpdateCheckDialog(QDialog):
|
|||||||
item.setCheckState(check_state)
|
item.setCheckState(check_state)
|
||||||
self.list_widget.blockSignals(False)
|
self.list_widget.blockSignals(False)
|
||||||
|
|
||||||
# Manually trigger the update once after batch change
|
|
||||||
self._handle_item_changed(None)
|
self._handle_item_changed(None)
|
||||||
|
|
||||||
def _handle_item_changed(self, item):
|
def _handle_item_changed(self, item):
|
||||||
"""
|
"""
|
||||||
Monitors how many items are checked.
|
Monitors how many items are checked.
|
||||||
If more than 1 item is checked, disable the 'Load Settings' checkbox.
|
If more than 1 item is checked, disable the 'Enable Editing' checkbox.
|
||||||
"""
|
"""
|
||||||
checked_count = 0
|
checked_count = 0
|
||||||
for i in range(self.list_widget.count()):
|
for i in range(self.list_widget.count()):
|
||||||
@ -187,15 +188,15 @@ class UpdateCheckDialog(QDialog):
|
|||||||
checked_count += 1
|
checked_count += 1
|
||||||
|
|
||||||
if checked_count > 1:
|
if checked_count > 1:
|
||||||
self.load_settings_checkbox.setChecked(False)
|
self.edit_settings_checkbox.setChecked(False)
|
||||||
self.load_settings_checkbox.setEnabled(False)
|
self.edit_settings_checkbox.setEnabled(False)
|
||||||
self.load_settings_checkbox.setToolTip(
|
self.edit_settings_checkbox.setToolTip(
|
||||||
self._tr("update_check_multi_selection_warning",
|
self._tr("update_check_multi_selection_warning",
|
||||||
"Editing settings is disabled when multiple profiles are selected.")
|
"Editing settings is disabled when multiple profiles are selected.")
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
self.load_settings_checkbox.setEnabled(True)
|
self.edit_settings_checkbox.setEnabled(True)
|
||||||
self.load_settings_checkbox.setToolTip(self._default_checkbox_tooltip)
|
self.edit_settings_checkbox.setToolTip(self._default_checkbox_tooltip)
|
||||||
|
|
||||||
def on_check_selected(self):
|
def on_check_selected(self):
|
||||||
"""Handles the 'Check Selected' button click."""
|
"""Handles the 'Check Selected' button click."""
|
||||||
@ -221,6 +222,18 @@ class UpdateCheckDialog(QDialog):
|
|||||||
return self.selected_profiles_list
|
return self.selected_profiles_list
|
||||||
|
|
||||||
def should_load_into_ui(self):
|
def should_load_into_ui(self):
|
||||||
"""Returns True if the 'Load settings into UI' checkbox is checked."""
|
"""
|
||||||
# Only return True if it's enabled and checked (double safety)
|
Returns True if the settings SHOULD be loaded into the UI.
|
||||||
return self.load_settings_checkbox.isEnabled() and self.load_settings_checkbox.isChecked()
|
|
||||||
|
NEW LOGIC: Returns True if exactly ONE profile is selected.
|
||||||
|
It does NOT care about the checkbox state anymore, because we want
|
||||||
|
to load settings even if the user can't edit them.
|
||||||
|
"""
|
||||||
|
return len(self.selected_profiles_list) == 1
|
||||||
|
|
||||||
|
def should_enable_editing(self):
|
||||||
|
"""
|
||||||
|
NEW METHOD: Returns True if the user is allowed to edit the settings.
|
||||||
|
This is linked to the checkbox.
|
||||||
|
"""
|
||||||
|
return self.edit_settings_checkbox.isEnabled() and self.edit_settings_checkbox.isChecked()
|
||||||
@ -346,7 +346,7 @@ class DownloaderApp (QWidget ):
|
|||||||
self.download_location_label_widget = None
|
self.download_location_label_widget = None
|
||||||
self.remove_from_filename_label_widget = None
|
self.remove_from_filename_label_widget = None
|
||||||
self.skip_words_label_widget = None
|
self.skip_words_label_widget = None
|
||||||
self.setWindowTitle("Kemono Downloader v7.8.0")
|
self.setWindowTitle("Kemono Downloader v7.9.0")
|
||||||
setup_ui(self)
|
setup_ui(self)
|
||||||
self._connect_signals()
|
self._connect_signals()
|
||||||
if hasattr(self, 'character_input'):
|
if hasattr(self, 'character_input'):
|
||||||
@ -366,18 +366,14 @@ class DownloaderApp (QWidget ):
|
|||||||
def add_current_settings_to_queue(self):
|
def add_current_settings_to_queue(self):
|
||||||
"""Saves the current UI settings as a JSON job file with creator-specific paths."""
|
"""Saves the current UI settings as a JSON job file with creator-specific paths."""
|
||||||
|
|
||||||
# --- Helper: Append Name to Path safely ---
|
|
||||||
def get_creator_specific_path(base_dir, folder_name):
|
def get_creator_specific_path(base_dir, folder_name):
|
||||||
if not folder_name:
|
if not folder_name:
|
||||||
return base_dir
|
return base_dir
|
||||||
safe_name = clean_folder_name(folder_name)
|
safe_name = clean_folder_name(folder_name)
|
||||||
# Avoid double pathing (e.g. if base is .../Artist and we append /Artist again)
|
|
||||||
if base_dir.replace('\\', '/').rstrip('/').endswith(safe_name):
|
if base_dir.replace('\\', '/').rstrip('/').endswith(safe_name):
|
||||||
return base_dir
|
return base_dir
|
||||||
return os.path.join(base_dir, safe_name)
|
return os.path.join(base_dir, safe_name)
|
||||||
# ------------------------------------------
|
|
||||||
|
|
||||||
# --- SCENARIO 1: Items from Creator Selection (Popup) ---
|
|
||||||
if self.favorite_download_queue:
|
if self.favorite_download_queue:
|
||||||
count = 0
|
count = 0
|
||||||
base_settings = self._get_current_ui_settings_as_dict()
|
base_settings = self._get_current_ui_settings_as_dict()
|
||||||
@ -407,7 +403,7 @@ class DownloaderApp (QWidget ):
|
|||||||
QMessageBox.warning(self, "Queue Error", "Failed to add selected items to queue.")
|
QMessageBox.warning(self, "Queue Error", "Failed to add selected items to queue.")
|
||||||
return
|
return
|
||||||
|
|
||||||
# --- SCENARIO 2: Manual URL Entry ---
|
|
||||||
url = self.link_input.text().strip()
|
url = self.link_input.text().strip()
|
||||||
if not url:
|
if not url:
|
||||||
QMessageBox.warning(self, "Input Error", "Cannot add to queue: URL is empty.")
|
QMessageBox.warning(self, "Input Error", "Cannot add to queue: URL is empty.")
|
||||||
@ -416,23 +412,20 @@ class DownloaderApp (QWidget ):
|
|||||||
settings = self._get_current_ui_settings_as_dict()
|
settings = self._get_current_ui_settings_as_dict()
|
||||||
settings['api_url'] = url
|
settings['api_url'] = url
|
||||||
|
|
||||||
# Attempt to resolve name from URL + Cache (creators.json)
|
|
||||||
service, user_id, post_id = extract_post_info(url)
|
service, user_id, post_id = extract_post_info(url)
|
||||||
name_hint = "Job"
|
name_hint = "Job"
|
||||||
|
|
||||||
if service and user_id:
|
if service and user_id:
|
||||||
# Try to find name in your local creators cache
|
|
||||||
cache_key = (service.lower(), str(user_id))
|
cache_key = (service.lower(), str(user_id))
|
||||||
cached_name = self.creator_name_cache.get(cache_key)
|
cached_name = self.creator_name_cache.get(cache_key)
|
||||||
|
|
||||||
if cached_name:
|
if cached_name:
|
||||||
# CASE A: Creator Found -> Use Creator Name
|
|
||||||
name_hint = cached_name
|
name_hint = cached_name
|
||||||
settings['output_dir'] = get_creator_specific_path(settings['output_dir'], cached_name)
|
settings['output_dir'] = get_creator_specific_path(settings['output_dir'], cached_name)
|
||||||
else:
|
else:
|
||||||
# CASE B: Creator NOT Found -> Use Post ID or User ID
|
|
||||||
# If it's a single post link, 'post_id' will have a value.
|
|
||||||
# If it's a profile link, 'post_id' is None, so we use 'user_id'.
|
|
||||||
if post_id:
|
if post_id:
|
||||||
folder_name = str(post_id)
|
folder_name = str(post_id)
|
||||||
else:
|
else:
|
||||||
@ -476,7 +469,7 @@ class DownloaderApp (QWidget ):
|
|||||||
QMessageBox.information(self, "Queue Empty", "No job files found in appdata/jobs.")
|
QMessageBox.information(self, "Queue Empty", "No job files found in appdata/jobs.")
|
||||||
return
|
return
|
||||||
|
|
||||||
# --- FIX: Clear error log at the start of the entire queue session ---
|
|
||||||
self.permanently_failed_files_for_dialog.clear()
|
self.permanently_failed_files_for_dialog.clear()
|
||||||
self._update_error_button_count()
|
self._update_error_button_count()
|
||||||
# -------------------------------------------------------------------
|
# -------------------------------------------------------------------
|
||||||
@ -2975,6 +2968,25 @@ class DownloaderApp (QWidget ):
|
|||||||
else:
|
else:
|
||||||
self.log_signal.emit("ℹ️ Link export was cancelled by the user.")
|
self.log_signal.emit("ℹ️ Link export was cancelled by the user.")
|
||||||
|
|
||||||
|
def _set_inputs_read_only(self, read_only):
|
||||||
|
"""Disables input fields (Read-Only mode) but keeps action buttons enabled."""
|
||||||
|
# List of widgets to disable in Read-Only mode
|
||||||
|
widgets_to_lock = [
|
||||||
|
self.link_input, self.dir_input, self.character_input,
|
||||||
|
self.skip_words_input, self.remove_from_filename_input,
|
||||||
|
self.custom_folder_input, self.cookie_text_input,
|
||||||
|
self.thread_count_input, self.start_page_input, self.end_page_input,
|
||||||
|
self.use_subfolders_checkbox, self.use_subfolder_per_post_checkbox,
|
||||||
|
self.skip_zip_checkbox, self.download_thumbnails_checkbox,
|
||||||
|
self.compress_images_checkbox, self.scan_content_images_checkbox,
|
||||||
|
self.use_cookie_checkbox, self.manga_mode_checkbox,
|
||||||
|
self.radio_all, self.radio_images, self.radio_videos,
|
||||||
|
self.char_filter_scope_toggle_button, self.skip_scope_toggle_button
|
||||||
|
]
|
||||||
|
|
||||||
|
for widget in widgets_to_lock:
|
||||||
|
if widget:
|
||||||
|
widget.setEnabled(not read_only)
|
||||||
|
|
||||||
def get_filter_mode (self ):
|
def get_filter_mode (self ):
|
||||||
if self.radio_more and self.radio_more.isChecked():
|
if self.radio_more and self.radio_more.isChecked():
|
||||||
@ -3243,7 +3255,6 @@ class DownloaderApp (QWidget ):
|
|||||||
if self.single_pdf_setting:
|
if self.single_pdf_setting:
|
||||||
self.use_subfolder_per_post_checkbox.setChecked(False)
|
self.use_subfolder_per_post_checkbox.setChecked(False)
|
||||||
|
|
||||||
# --- Logging ---
|
|
||||||
self.log_signal.emit(f"ℹ️ 'More' filter set: {scope_text}, Format: {self.text_export_format.upper()}")
|
self.log_signal.emit(f"ℹ️ 'More' filter set: {scope_text}, Format: {self.text_export_format.upper()}")
|
||||||
if is_any_pdf_mode:
|
if is_any_pdf_mode:
|
||||||
status_single = "Enabled" if self.single_pdf_setting else "Disabled"
|
status_single = "Enabled" if self.single_pdf_setting else "Disabled"
|
||||||
@ -3252,19 +3263,18 @@ class DownloaderApp (QWidget ):
|
|||||||
self.log_signal.emit(" ↳ Multithreading disabled for PDF export.")
|
self.log_signal.emit(" ↳ Multithreading disabled for PDF export.")
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# --- User clicked Cancel: Revert to default ---
|
|
||||||
self.log_signal.emit("ℹ️ 'More' filter selection cancelled. Reverting to 'All'.")
|
self.log_signal.emit("ℹ️ 'More' filter selection cancelled. Reverting to 'All'.")
|
||||||
if hasattr(self, 'radio_all'):
|
if hasattr(self, 'radio_all'):
|
||||||
self.radio_all.setChecked(True)
|
self.radio_all.setChecked(True)
|
||||||
|
|
||||||
# Case 2: Switched AWAY from the "More" button (e.g., clicked 'Images' or 'All')
|
|
||||||
elif button != self.radio_more and checked:
|
elif button != self.radio_more and checked:
|
||||||
self.radio_more.setText("More")
|
self.radio_more.setText("More")
|
||||||
self.more_filter_scope = None
|
self.more_filter_scope = None
|
||||||
self.single_pdf_setting = False
|
self.single_pdf_setting = False
|
||||||
self.add_info_in_pdf_setting = False # Reset setting
|
self.add_info_in_pdf_setting = False # Reset setting
|
||||||
|
|
||||||
# Restore enabled states for options that PDF mode might have disabled
|
|
||||||
if hasattr(self, 'use_multithreading_checkbox'):
|
if hasattr(self, 'use_multithreading_checkbox'):
|
||||||
self.use_multithreading_checkbox.setEnabled(True)
|
self.use_multithreading_checkbox.setEnabled(True)
|
||||||
self._update_multithreading_for_date_mode() # Re-check manga logic
|
self._update_multithreading_for_date_mode() # Re-check manga logic
|
||||||
@ -4183,9 +4193,12 @@ class DownloaderApp (QWidget ):
|
|||||||
|
|
||||||
self.cancellation_message_logged_this_session = False
|
self.cancellation_message_logged_this_session = False
|
||||||
|
|
||||||
# START of the new refactored block
|
|
||||||
service, id1, id2 = extract_post_info(api_url)
|
service, id1, id2 = extract_post_info(api_url)
|
||||||
|
|
||||||
|
# [NEW] Get proxy settings immediately
|
||||||
|
ui_settings = self._get_current_ui_settings_as_dict()
|
||||||
|
proxies_to_use = ui_settings.get('proxies')
|
||||||
|
|
||||||
specialized_thread = create_downloader_thread(
|
specialized_thread = create_downloader_thread(
|
||||||
main_app=self,
|
main_app=self,
|
||||||
api_url=api_url,
|
api_url=api_url,
|
||||||
@ -4208,18 +4221,18 @@ class DownloaderApp (QWidget ):
|
|||||||
|
|
||||||
self.set_ui_enabled(False)
|
self.set_ui_enabled(False)
|
||||||
self.download_thread = specialized_thread
|
self.download_thread = specialized_thread
|
||||||
|
|
||||||
|
# [NEW] Inject proxies into the thread manually
|
||||||
|
if hasattr(self.download_thread, 'proxies'):
|
||||||
|
self.download_thread.proxies = proxies_to_use
|
||||||
|
|
||||||
self._connect_specialized_thread_signals(self.download_thread)
|
self._connect_specialized_thread_signals(self.download_thread)
|
||||||
self.download_thread.start()
|
self.download_thread.start()
|
||||||
self._update_button_states_and_connections()
|
self._update_button_states_and_connections()
|
||||||
return True
|
return True
|
||||||
# END of the new refactored block
|
|
||||||
|
|
||||||
if not service or not id1:
|
|
||||||
QMessageBox.critical(self, "Input Error", "Invalid or unsupported URL format.")
|
|
||||||
return False
|
|
||||||
|
|
||||||
user_id, post_id_from_url = id1, id2
|
user_id, post_id_from_url = id1, id2
|
||||||
|
|
||||||
if direct_api_url and not post_id_from_url and item_type_from_queue and 'post' in item_type_from_queue:
|
if direct_api_url and not post_id_from_url and item_type_from_queue and 'post' in item_type_from_queue:
|
||||||
self.log_signal.emit(f"❌ CRITICAL ERROR: Could not parse post ID from the queued POST URL: {api_url}")
|
self.log_signal.emit(f"❌ CRITICAL ERROR: Could not parse post ID from the queued POST URL: {api_url}")
|
||||||
self.log_signal.emit(" Skipping this item. This might be due to an unsupported URL format or a temporary issue.")
|
self.log_signal.emit(" Skipping this item. This might be due to an unsupported URL format or a temporary issue.")
|
||||||
@ -5101,8 +5114,54 @@ class DownloaderApp (QWidget ):
|
|||||||
self.is_ready_to_download_batch_update = True
|
self.is_ready_to_download_batch_update = True
|
||||||
|
|
||||||
self.progress_label.setText(f"Found {total_posts} new posts. Ready to download.")
|
self.progress_label.setText(f"Found {total_posts} new posts. Ready to download.")
|
||||||
self.set_ui_enabled(True) # Re-enable UI
|
self.set_ui_enabled(True) # Re-enable UI first
|
||||||
self._update_button_states_and_connections() # Update buttons to "Start Download (X)"
|
|
||||||
|
# [NEW] Apply Read-Only mode if it was selected in the dialog
|
||||||
|
if getattr(self, 'update_settings_read_only_mode', False):
|
||||||
|
self._set_inputs_read_only(True)
|
||||||
|
|
||||||
|
self._update_button_states_and_connections()
|
||||||
|
|
||||||
|
def _show_update_check_dialog(self):
|
||||||
|
"""Shows the Update Check Dialog and applies Load/Edit logic."""
|
||||||
|
if self.is_restore_pending:
|
||||||
|
QMessageBox.warning(self, "Restore Pending", "Please restore or discard the previous session first.")
|
||||||
|
return
|
||||||
|
|
||||||
|
dialog = UpdateCheckDialog(self.user_data_path, self, self)
|
||||||
|
|
||||||
|
if dialog.exec_() == QDialog.Accepted:
|
||||||
|
profiles = dialog.get_selected_profiles()
|
||||||
|
if not profiles: return
|
||||||
|
|
||||||
|
self.active_update_profiles_list = profiles
|
||||||
|
|
||||||
|
# --- LOGIC START ---
|
||||||
|
|
||||||
|
# 1. ALWAYS Load Settings if appropriate (e.g. Single Profile selected)
|
||||||
|
# The dialog now returns True for should_load_into_ui() if count == 1, regardless of checkbox
|
||||||
|
if dialog.should_load_into_ui():
|
||||||
|
# Load settings from the FIRST selected profile
|
||||||
|
first_profile_settings = profiles[0]['data'].get('settings', {})
|
||||||
|
self._load_ui_from_settings_dict(first_profile_settings)
|
||||||
|
|
||||||
|
# 2. Check if Editing is Allowed
|
||||||
|
if dialog.should_enable_editing():
|
||||||
|
self.update_settings_read_only_mode = False
|
||||||
|
self.override_update_profile_settings = True # Use UI values for download
|
||||||
|
self.log_signal.emit("ℹ️ Settings loaded in EDITABLE mode.")
|
||||||
|
else:
|
||||||
|
self.update_settings_read_only_mode = True
|
||||||
|
self.override_update_profile_settings = False # Use original JSON values (safer for Read-Only)
|
||||||
|
self.log_signal.emit("ℹ️ Settings loaded in READ-ONLY mode.")
|
||||||
|
else:
|
||||||
|
# Multiple profiles or load disabled
|
||||||
|
self.update_settings_read_only_mode = False
|
||||||
|
self.override_update_profile_settings = False
|
||||||
|
|
||||||
|
# --- LOGIC END ---
|
||||||
|
|
||||||
|
self._start_batch_update_check(self.active_update_profiles_list)
|
||||||
|
|
||||||
def _start_download_of_batch_update(self):
|
def _start_download_of_batch_update(self):
|
||||||
"""
|
"""
|
||||||
@ -5454,8 +5513,13 @@ class DownloaderApp (QWidget ):
|
|||||||
global PostProcessorWorker, download_from_api
|
global PostProcessorWorker, download_from_api
|
||||||
|
|
||||||
worker_args_template = fetcher_args['worker_args_template']
|
worker_args_template = fetcher_args['worker_args_template']
|
||||||
logger_func = lambda msg: self.log_signal.emit(f"[Fetcher] {msg}")
|
def logger_func(msg):
|
||||||
|
try:
|
||||||
|
import sip
|
||||||
|
if not sip.isdeleted(self):
|
||||||
|
self.log_signal.emit(f"[Fetcher] {msg}")
|
||||||
|
except (RuntimeError, ImportError, AttributeError):
|
||||||
|
pass # Window is gone, ignore logging
|
||||||
try:
|
try:
|
||||||
# This single call now handles all fetching logic, including 'Fetch First'.
|
# This single call now handles all fetching logic, including 'Fetch First'.
|
||||||
post_generator = download_from_api(
|
post_generator = download_from_api(
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user