104 lines
3.2 KiB
Python
104 lines
3.2 KiB
Python
# -*- coding:utf-8 -*-
|
||
# HTTP工具模块
|
||
|
||
import requests
|
||
import logging
|
||
from typing import Dict, Optional, Union, Any
|
||
from urllib.parse import urljoin
|
||
|
||
# 配置日志
|
||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||
logger = logging.getLogger("http_utils")
|
||
|
||
class HttpClient:
|
||
"""HTTP客户端工具类,处理常见的HTTP请求操作"""
|
||
|
||
DEFAULT_HEADERS = {
|
||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36"
|
||
}
|
||
|
||
@staticmethod
|
||
def get(url: str, headers: Optional[Dict[str, str]] = None,
|
||
params: Optional[Dict[str, Any]] = None, timeout: int = 30,
|
||
verify: bool = False) -> Optional[requests.Response]:
|
||
"""
|
||
发送GET请求
|
||
|
||
Args:
|
||
url: 请求的URL
|
||
headers: 请求头
|
||
params: 查询参数
|
||
timeout: 超时时间(秒)
|
||
verify: 是否验证SSL证书
|
||
|
||
Returns:
|
||
Response对象,请求失败返回None
|
||
"""
|
||
_headers = headers or HttpClient.DEFAULT_HEADERS
|
||
|
||
try:
|
||
logger.debug(f"发送GET请求: {url}")
|
||
response = requests.get(url, headers=_headers, params=params,
|
||
timeout=timeout, verify=verify)
|
||
|
||
if response.status_code != 200:
|
||
logger.warning(f"请求失败,状态码:{response.status_code}, URL: {url}")
|
||
|
||
return response
|
||
except Exception as e:
|
||
logger.error(f"请求异常: {url}, 错误: {str(e)}")
|
||
return None
|
||
|
||
@staticmethod
|
||
def download_file(url: str, save_path: str, headers: Optional[Dict[str, str]] = None,
|
||
timeout: int = 60, verify: bool = False, chunk_size: int = 8192) -> bool:
|
||
"""
|
||
下载文件
|
||
|
||
Args:
|
||
url: 文件URL
|
||
save_path: 保存路径
|
||
headers: 请求头
|
||
timeout: 超时时间(秒)
|
||
verify: 是否验证SSL证书
|
||
chunk_size: 每次读取的块大小
|
||
|
||
Returns:
|
||
是否下载成功
|
||
"""
|
||
_headers = headers or HttpClient.DEFAULT_HEADERS
|
||
|
||
try:
|
||
logger.info(f"正在下载: {url} -> {save_path}")
|
||
response = requests.get(url, headers=_headers, timeout=timeout,
|
||
verify=verify, stream=True)
|
||
|
||
if response.status_code != 200:
|
||
logger.error(f"下载失败,状态码: {response.status_code}")
|
||
return False
|
||
|
||
with open(save_path, 'wb') as f:
|
||
for chunk in response.iter_content(chunk_size=chunk_size):
|
||
if chunk:
|
||
f.write(chunk)
|
||
|
||
logger.info(f"下载完成: {save_path}")
|
||
return True
|
||
except Exception as e:
|
||
logger.error(f"下载文件异常: {url}, 错误: {str(e)}")
|
||
return False
|
||
|
||
@staticmethod
|
||
def join_url(base_url: str, path: str) -> str:
|
||
"""
|
||
拼接URL
|
||
|
||
Args:
|
||
base_url: 基础URL
|
||
path: 路径
|
||
|
||
Returns:
|
||
完整URL
|
||
"""
|
||
return urljoin(base_url.rstrip('/') + '/', path.lstrip('/'))
|