Files
ProxyPool/app/services/proxy_service.py
祀梦 b972b64616 refactor: 全面重构核心架构,消除反复修改的根因
- 删除 ValidationQueue 双轨持久化队列,替换为纯内存 AsyncWorkerPool
- 引入统一后台任务框架 JobExecutor(Job/CrawlJob/ValidateAllJob)
- 新增 PluginRunner 统一插件执行(超时、重试、健康检查、统计)
- 重构 SchedulerService 职责收敛为仅定时触发 ValidateAllJob
- 使用 AsyncExitStack 重构 lifespan,安全管理长生命周期资源
- 路由层瘦身 50%+,业务异常上抛由全局中间件统一处理
- 实现设置全热更新(WorkerPool 并发、Validator 超时即时生效)
- 前端 Store 强制写后重新拉取,消除乐观更新数据不同步
- 删除 queue.py / task_repo.py / task_service.py
- 新增 execution 单元测试,全部 85 个测试通过
2026-04-04 22:36:57 +08:00

108 lines
3.6 KiB
Python

"""代理业务服务"""
import csv
import json
import io
from datetime import datetime
from typing import List, Optional, Tuple, AsyncIterator
from app.core.db import get_db, transaction
from app.repositories.proxy_repo import ProxyRepository
from app.models.domain import Proxy
from app.core.log import logger
class ProxyService:
def __init__(self, proxy_repo: ProxyRepository = ProxyRepository()):
self.proxy_repo = proxy_repo
async def get_stats(self) -> dict:
async with get_db() as db:
stats = await self.proxy_repo.get_stats(db)
stats["today_new"] = await self.proxy_repo.get_today_new_count(db)
return stats
async def list_proxies(
self,
page: int = 1,
page_size: int = 20,
protocol: Optional[str] = None,
min_score: int = 0,
max_score: Optional[int] = None,
sort_by: str = "last_check",
sort_order: str = "DESC",
) -> Tuple[List[Proxy], int]:
async with get_db() as db:
return await self.proxy_repo.list_paginated(
db, page, page_size, protocol, min_score, max_score, sort_by, sort_order
)
async def get_random_proxy(self) -> Optional[Proxy]:
async with get_db() as db:
return await self.proxy_repo.get_random(db)
async def delete_proxy(self, ip: str, port: int) -> None:
async with get_db() as db:
await self.proxy_repo.delete(db, ip, port)
async def batch_delete(self, proxies: List[Tuple[str, int]]) -> int:
async with get_db() as db:
return await self.proxy_repo.batch_delete(db, proxies)
async def clean_invalid(self) -> int:
async with get_db() as db:
return await self.proxy_repo.clean_invalid(db)
async def clean_expired(self, days: int) -> int:
async with get_db() as db:
return await self.proxy_repo.clean_expired(db, days)
async def export_proxies(
self,
fmt: str,
protocol: Optional[str] = None,
limit: int = 10000,
) -> AsyncIterator[str]:
if fmt == "csv":
yield "\ufeffIP,Port,Protocol,Score,Last Check\n"
elif fmt == "txt":
pass
elif fmt == "json":
yield "["
first = True
exported = 0
async with get_db() as db:
async for batch in self.proxy_repo.iter_batches(db, protocol=protocol, batch_size=1000):
for p in batch:
if exported >= limit:
break
if fmt == "csv":
yield f"{p.ip},{p.port},{p.protocol},{p.score},{self._fmt_time(p.last_check)}\n"
elif fmt == "txt":
yield f"{p.ip}:{p.port}\n"
elif fmt == "json":
item = {
"ip": p.ip,
"port": p.port,
"protocol": p.protocol,
"score": p.score,
"last_check": self._fmt_time(p.last_check),
}
prefix = "" if first else ","
yield prefix + json.dumps(item, ensure_ascii=False)
first = False
exported += 1
if exported >= limit:
break
if fmt == "json":
yield "]"
@staticmethod
def _fmt_time(dt: Optional[datetime]) -> str:
if not dt:
return ""
if isinstance(dt, str):
return dt
return dt.isoformat()