init
This commit is contained in:
246
app/core/services/preview_thread.py
Normal file
246
app/core/services/preview_thread.py
Normal file
@@ -0,0 +1,246 @@
|
||||
import os
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import requests
|
||||
from PyQt6.QtCore import QThread, pyqtSignal
|
||||
from PyQt6.QtGui import QImage, QPixmap
|
||||
|
||||
from app.core import miaoStarsBasicApi
|
||||
|
||||
|
||||
class TextLoaderThread(QThread):
|
||||
"""文本文件加载线程"""
|
||||
|
||||
textLoaded = pyqtSignal(str)
|
||||
errorOccurred = pyqtSignal(str)
|
||||
progressUpdated = pyqtSignal(int) # 进度更新信号
|
||||
|
||||
def __init__(self, url):
|
||||
super().__init__()
|
||||
self.url = url
|
||||
|
||||
def run(self):
|
||||
"""线程执行函数"""
|
||||
try:
|
||||
# 1. 设置网络请求参数 - 优化连接参数
|
||||
session = requests.Session()
|
||||
adapter = requests.adapters.HTTPAdapter(
|
||||
pool_connections=20,
|
||||
pool_maxsize=20,
|
||||
max_retries=5, # 增加重试次数
|
||||
pool_block=False,
|
||||
)
|
||||
session.mount("http://", adapter)
|
||||
session.mount("https://", adapter)
|
||||
|
||||
# 2. 增加超时时间并添加重试机制
|
||||
response = miaoStarsBasicApi.returnSession().get(
|
||||
self.url,
|
||||
stream=True,
|
||||
timeout=(15, 30), # 增加超时时间:连接15秒,读取30秒
|
||||
)
|
||||
|
||||
response.raise_for_status()
|
||||
|
||||
# 3. 获取文件大小(如果服务器支持)
|
||||
total_size = int(response.headers.get("content-length", 0))
|
||||
downloaded_size = 0
|
||||
|
||||
# 4. 分块读取并处理 - 使用二进制读取提高速度
|
||||
content_chunks = []
|
||||
for chunk in response.iter_content(chunk_size=16384): # 增大块大小
|
||||
if chunk:
|
||||
content_chunks.append(chunk)
|
||||
downloaded_size += len(chunk)
|
||||
|
||||
# 更新进度(如果知道总大小)
|
||||
if total_size > 0:
|
||||
progress = int((downloaded_size / total_size) * 100)
|
||||
self.progressUpdated.emit(progress)
|
||||
|
||||
# 5. 合并内容并解码
|
||||
binary_content = b"".join(content_chunks)
|
||||
|
||||
if not binary_content:
|
||||
self.errorOccurred.emit("下载内容为空")
|
||||
return
|
||||
|
||||
# 6. 智能编码检测和解码
|
||||
text_content = self._decode_content(binary_content)
|
||||
|
||||
# 7. 发射加载完成的信号
|
||||
self.textLoaded.emit(text_content)
|
||||
|
||||
except requests.exceptions.Timeout:
|
||||
self.errorOccurred.emit("请求超时,请检查网络连接或尝试重新加载")
|
||||
except requests.exceptions.ConnectionError:
|
||||
self.errorOccurred.emit("网络连接错误,请检查网络设置")
|
||||
except requests.exceptions.RequestException as e:
|
||||
self.errorOccurred.emit(f"网络请求错误: {str(e)}")
|
||||
except Exception as e:
|
||||
self.errorOccurred.emit(f"文本处理错误: {str(e)}")
|
||||
|
||||
def _decode_content(self, binary_content):
|
||||
"""智能解码二进制内容"""
|
||||
# 优先尝试UTF-8
|
||||
encodings = ["utf-8", "gbk", "gb2312", "latin-1", "iso-8859-1", "cp1252"]
|
||||
|
||||
for encoding in encodings:
|
||||
try:
|
||||
return binary_content.decode(encoding)
|
||||
except UnicodeDecodeError:
|
||||
continue
|
||||
|
||||
# 如果所有编码都失败,使用替换错误处理
|
||||
try:
|
||||
return binary_content.decode("utf-8", errors="replace")
|
||||
except:
|
||||
# 最后尝试忽略错误
|
||||
return binary_content.decode("utf-8", errors="ignore")
|
||||
|
||||
def cancel(self):
|
||||
"""取消下载"""
|
||||
if self.isRunning():
|
||||
self.terminate()
|
||||
self.wait(1000) # 等待线程结束
|
||||
|
||||
|
||||
class ImageLoaderThread(QThread):
|
||||
"""优化的图片加载线程"""
|
||||
|
||||
imageLoaded = pyqtSignal(QPixmap)
|
||||
errorOccurred = pyqtSignal(str)
|
||||
progressUpdated = pyqtSignal(int) # 进度更新信号
|
||||
|
||||
def __init__(
|
||||
self, url, cache_dir="image_cache", max_cache_size=50 * 1024 * 1024
|
||||
): # 50MB缓存
|
||||
super().__init__()
|
||||
self.url = url
|
||||
self.cache_dir = cache_dir
|
||||
self.max_cache_size = max_cache_size
|
||||
self._setup_cache()
|
||||
|
||||
def _setup_cache(self):
|
||||
"""设置图片缓存目录"""
|
||||
if not os.path.exists(self.cache_dir):
|
||||
os.makedirs(self.cache_dir)
|
||||
|
||||
def _get_cache_filename(self):
|
||||
"""生成缓存文件名"""
|
||||
parsed_url = urlparse(self.url)
|
||||
filename = os.path.basename(parsed_url.path) or "image"
|
||||
# 添加URL哈希避免重名
|
||||
import hashlib
|
||||
|
||||
url_hash = hashlib.md5(self.url.encode()).hexdigest()[:8]
|
||||
return f"{url_hash}_{filename}"
|
||||
|
||||
def _get_cached_image(self):
|
||||
"""获取缓存图片"""
|
||||
cache_file = os.path.join(self.cache_dir, self._get_cache_filename())
|
||||
if os.path.exists(cache_file):
|
||||
try:
|
||||
pixmap = QPixmap(cache_file)
|
||||
if not pixmap.isNull():
|
||||
return pixmap
|
||||
except Exception:
|
||||
pass
|
||||
return None
|
||||
|
||||
def _save_to_cache(self, pixmap):
|
||||
"""保存图片到缓存"""
|
||||
try:
|
||||
cache_file = os.path.join(self.cache_dir, self._get_cache_filename())
|
||||
pixmap.save(cache_file, "JPG", 80) # 压缩质量80%
|
||||
self._cleanup_cache() # 清理过期缓存
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _cleanup_cache(self):
|
||||
"""清理过期的缓存文件"""
|
||||
# noinspection PyBroadException
|
||||
try:
|
||||
files = []
|
||||
for f in os.listdir(self.cache_dir):
|
||||
filepath = os.path.join(self.cache_dir, f)
|
||||
if os.path.isfile(filepath):
|
||||
files.append((filepath, os.path.getmtime(filepath)))
|
||||
|
||||
# 按修改时间排序
|
||||
files.sort(key=lambda x: x[1])
|
||||
|
||||
# 计算总大小
|
||||
total_size = sum(os.path.getsize(f[0]) for f in files)
|
||||
|
||||
# 如果超过最大缓存大小,删除最旧的文件
|
||||
while total_size > self.max_cache_size and files:
|
||||
oldest_file = files.pop(0)
|
||||
total_size -= os.path.getsize(oldest_file[0])
|
||||
os.remove(oldest_file[0])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def run(self):
|
||||
"""线程执行函数"""
|
||||
try:
|
||||
# 1. 首先检查缓存
|
||||
cached_pixmap = self._get_cached_image()
|
||||
if cached_pixmap:
|
||||
self.imageLoaded.emit(cached_pixmap)
|
||||
return
|
||||
|
||||
# 2. 设置更短的超时时间
|
||||
session = requests.Session()
|
||||
adapter = requests.adapters.HTTPAdapter(
|
||||
pool_connections=10, pool_maxsize=10, max_retries=5 # 重试2次
|
||||
)
|
||||
session.mount("http://", adapter)
|
||||
session.mount("https://", adapter)
|
||||
|
||||
# 3. 流式下载,支持进度显示
|
||||
response = miaoStarsBasicApi.returnSession().get(
|
||||
self.url, stream=True, timeout=(20, 30) # 连接超时5秒,读取超时10秒
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
# 获取文件大小(如果服务器支持)
|
||||
total_size = int(response.headers.get("content-length", 0))
|
||||
downloaded_size = 0
|
||||
|
||||
# 4. 分块读取并处理
|
||||
image_data = b""
|
||||
for chunk in response.iter_content(chunk_size=8192):
|
||||
if chunk:
|
||||
image_data += chunk
|
||||
downloaded_size += len(chunk)
|
||||
|
||||
# 更新进度(如果知道总大小)
|
||||
if total_size > 0:
|
||||
progress = int((downloaded_size / total_size) * 100)
|
||||
self.progressUpdated.emit(progress)
|
||||
|
||||
# 5. 从数据创建QImage(比QPixmap更快)
|
||||
image = QImage()
|
||||
image.loadFromData(image_data)
|
||||
|
||||
if image.isNull():
|
||||
raise Exception("无法加载图片数据")
|
||||
|
||||
# 6. 转换为QPixmap
|
||||
pixmap = QPixmap.fromImage(image)
|
||||
|
||||
# 7. 保存到缓存
|
||||
self._save_to_cache(pixmap)
|
||||
|
||||
# 发射加载完成的信号
|
||||
self.imageLoaded.emit(pixmap)
|
||||
|
||||
except requests.exceptions.Timeout:
|
||||
self.errorOccurred.emit("请求超时,请检查网络连接")
|
||||
except requests.exceptions.ConnectionError:
|
||||
self.errorOccurred.emit("网络连接错误")
|
||||
except requests.exceptions.RequestException as e:
|
||||
self.errorOccurred.emit(f"网络请求错误: {str(e)}")
|
||||
except Exception as e:
|
||||
self.errorOccurred.emit(f"图片处理错误: {str(e)}")
|
||||
Reference in New Issue
Block a user