Files
ProxyPool/app/api/routes/proxies.py
祀梦 b972b64616 refactor: 全面重构核心架构,消除反复修改的根因
- 删除 ValidationQueue 双轨持久化队列,替换为纯内存 AsyncWorkerPool
- 引入统一后台任务框架 JobExecutor(Job/CrawlJob/ValidateAllJob)
- 新增 PluginRunner 统一插件执行(超时、重试、健康检查、统计)
- 重构 SchedulerService 职责收敛为仅定时触发 ValidateAllJob
- 使用 AsyncExitStack 重构 lifespan,安全管理长生命周期资源
- 路由层瘦身 50%+,业务异常上抛由全局中间件统一处理
- 实现设置全热更新(WorkerPool 并发、Validator 超时即时生效)
- 前端 Store 强制写后重新拉取,消除乐观更新数据不同步
- 删除 queue.py / task_repo.py / task_service.py
- 新增 execution 单元测试,全部 85 个测试通过
2026-04-04 22:36:57 +08:00

102 lines
3.4 KiB
Python

"""代理相关路由(含统计信息)"""
from typing import Optional
from fastapi import APIRouter, Depends, Query
from fastapi.responses import StreamingResponse
from app.services.proxy_service import ProxyService
from app.services.scheduler_service import SchedulerService
from app.models.schemas import ProxyListRequest, BatchDeleteRequest
from app.api.deps import get_proxy_service, get_scheduler_service
from app.api.common import success_response, format_proxy
from app.core.exceptions import ProxyPoolException, ProxyNotFoundException
router = APIRouter(prefix="/api/proxies", tags=["proxies"])
@router.get("/stats")
async def get_stats(
proxy_service: ProxyService = Depends(get_proxy_service),
scheduler_service: SchedulerService = Depends(get_scheduler_service),
):
stats = await proxy_service.get_stats()
stats["scheduler_running"] = scheduler_service.running
return success_response("获取统计信息成功", stats)
@router.post("")
async def list_proxies(
request: ProxyListRequest,
service: ProxyService = Depends(get_proxy_service),
):
proxies, total = await service.list_proxies(
page=request.page,
page_size=request.page_size,
protocol=request.protocol,
min_score=request.min_score,
max_score=request.max_score,
sort_by=request.sort_by,
sort_order=request.sort_order,
)
return success_response(
"获取代理列表成功",
{
"list": [format_proxy(p) for p in proxies],
"total": total,
"page": request.page,
"page_size": request.page_size,
},
)
@router.get("/random")
async def get_random_proxy(service: ProxyService = Depends(get_proxy_service)):
proxy = await service.get_random_proxy()
if not proxy:
raise ProxyNotFoundException("", 0)
return success_response("获取随机代理成功", format_proxy(proxy))
@router.get("/export/{fmt}")
async def export_proxies(
fmt: str,
protocol: Optional[str] = None,
limit: int = Query(default=10000, ge=1, le=100000),
service: ProxyService = Depends(get_proxy_service),
):
if fmt not in ("csv", "txt", "json"):
raise ProxyPoolException("不支持的导出格式", 400)
media_types = {"csv": "text/csv", "txt": "text/plain", "json": "application/json"}
async def generate():
async for chunk in service.export_proxies(fmt, protocol, limit):
yield chunk
return StreamingResponse(
generate(),
media_type=media_types[fmt],
headers={"Content-Disposition": f"attachment; filename=proxies.{fmt}"},
)
@router.delete("/{ip}/{port}")
async def delete_proxy(ip: str, port: int, service: ProxyService = Depends(get_proxy_service)):
await service.delete_proxy(ip, port)
return success_response("删除代理成功")
@router.post("/batch-delete")
async def batch_delete(
request: BatchDeleteRequest,
service: ProxyService = Depends(get_proxy_service),
):
proxies = [(item.ip, item.port) for item in request.proxies]
deleted = await service.batch_delete(proxies)
return success_response(f"批量删除 {deleted} 个代理成功", {"deleted_count": deleted})
@router.delete("/clean-invalid")
async def clean_invalid(service: ProxyService = Depends(get_proxy_service)):
count = await service.clean_invalid()
return success_response(f"清理了 {count} 个无效代理", {"deleted_count": count})