提交 cbcbd03e authored 作者: 陈泽健's avatar 陈泽健

perf(预定系统-门口屏): 优化文件下载过程

-增加 Connection: keep-alive 头,提高连接效率
- 调整线程数和连接池大小,提升并发处理能力
- 优化进度日志输出,每 1MB 打印一次
- 增加重试等待时间,提高下载成功率
- 移除不必要的信息打印,简化日志输出
上级 a3d350f5
......@@ -21,7 +21,7 @@ logging.basicConfig(
# 全局配置
message_queue = Queue()
THREAD_COUNT = 5 # 减少线程数量便于观察进度
THREAD_COUNT = 20 # 减少线程数量便于观察进度
DOWNLOAD_DIR = "downloads"
os.makedirs(DOWNLOAD_DIR, exist_ok=True)
......@@ -148,29 +148,21 @@ def worker_thread():
'RandomCode': 'KuYKL30nDA6QmIKeysgbM6qpali4YB1EqklwuOhAtS5Hzoc4fvkvAiTRtcuz',
'X-SIGN': 'xjcyiFmPtQNNrU6uGxSLeQQWM7+PW9Q445EPAeaU6EH07V3KKvTigeCg5visZaI3IavQhMIhVMLiJEUyPDEfAIyPfNZGPex1C+8fSYoAk9Q=',
'X-TIMESTAMP': '1717749773',
'X-RANDOM': 'TbSyCQyC5efAT5dRNs7RxWMg'
'X-RANDOM': 'TbSyCQyC5efAT5dRNs7RxWMg',
'Connection': 'keep-alive'
}
# 确定保存路径
file_name = f"{udid}_downloaded_file.bin" # 可根据需要修改扩展名
file_name = f"{udid}_downloaded_file.bin"
save_path = os.path.abspath(os.path.join(DOWNLOAD_DIR, file_name))
# 打印请求信息
logging.info("\n" + "=" * 50 + " 请求信息 " + "=" * 50)
logging.info(f"线程: {threading.current_thread().name}")
logging.info(f"UDID: {udid}")
logging.info(f"请求URL: {url}")
logging.info(f"保存路径: {save_path}")
logging.info("请求头:")
logging.info(json.dumps(headers, indent=2, ensure_ascii=False))
logging.info("=" * 100 + "\n")
# 创建会话并设置重试机制
session = requests.Session()
adapter = requests.adapters.HTTPAdapter(
max_retries=3,
pool_connections=THREAD_COUNT,
pool_maxsize=THREAD_COUNT
pool_connections=20, # 减少连接池大小
pool_maxsize=20,
pool_block=True
)
session.mount('http://', adapter)
session.mount('https://', adapter)
......@@ -178,12 +170,13 @@ def worker_thread():
max_retries = 3
for attempt in range(max_retries):
try:
# 增加更长的超时时间
with session.post(
url,
headers=headers,
stream=True, # 关键:启用流式下载
stream=True,
verify=False,
timeout=(30, 300)
timeout=(30, 300), # 连接超时30秒,读取超时300秒
) as response:
response.raise_for_status()
......@@ -192,62 +185,41 @@ def worker_thread():
if total_size == 0:
raise ValueError("无法获取文件总大小")
logging.info(f"开始下载文件,总大小: {total_size / 1024 / 1024:.2f}MB")
# 创建下载目录
os.makedirs(DOWNLOAD_DIR, exist_ok=True)
# 写入文件
downloaded = 0
start_time = time.time()
last_update = start_time
last_log = ""
with open(save_path, 'wb') as f:
for chunk in response.iter_content(chunk_size=8192):
if chunk:
f.write(chunk)
f.flush()
os.fsync(f.fileno())
downloaded += len(chunk)
# 显示进度(改用日志记录方式)
if time.time() - last_update >= 0.1:
percent = (downloaded / total_size) * 100
# 每1MB打印一次进度
if downloaded % (1024 * 1024) == 0:
speed = downloaded / (time.time() - start_time) / 1024
current_log = (
f"进度: {percent:.1f}% | "
f"速度: {speed:.1f}KB/s | "
f"{downloaded / 1024 / 1024:.1f}MB/{total_size / 1024 / 1024:.1f}MB"
logging.info(
f"{udid} 进度: {downloaded / 1024 / 1024:.1f}MB/"
f"{total_size / 1024 / 1024:.1f}MB | "
f"速度: {speed:.1f}KB/s"
)
if current_log != last_log:
logging.info(current_log)
last_log = current_log
last_update = time.time()
logging.info("进度: 100.0% | 下载完成!")
logging.info("文件下载完成!")
# 验证文件大小
final_size = os.path.getsize(save_path)
if final_size != total_size:
raise ValueError(f"文件大小不匹配: {final_size} != {total_size}")
logging.info(f"文件保存成功: {save_path}")
logging.info(f"文件大小: {final_size} bytes")
logging.info(f"{udid} 下载完成! 大小: {downloaded} bytes")
break
except (requests.exceptions.RequestException, ValueError) as e:
if attempt == max_retries - 1:
logging.error(f"下载最终失败: {str(e)}")
logging.error(f"{udid} 下载最终失败: {str(e)}")
raise
wait_time = (attempt + 1) * 5
logging.warning(f"下载失败,{wait_time}秒后重试... ({attempt + 1}/{max_retries})")
wait_time = (attempt + 1) * 10 # 增加重试等待时间
logging.warning(f"{udid} 下载失败,{wait_time}秒后重试... ({attempt + 1}/{max_retries})")
time.sleep(wait_time)
except Exception as e:
logging.exception(f"线程处理异常: {str(e)}")
logging.exception(f"{udid} 线程处理异常: {str(e)}")
finally:
message_queue.task_done()
......
......@@ -59,7 +59,7 @@ if __name__ == "__main__":
username = "mqtt@cmdb"
password = "mqtt@webpassw0RD"
port = 1883
num_repeats = 200
num_repeats = 2000
interval_between_repeats = 1
num_threads = 100 # 线程数量
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论