- Add Free_Proxy_Website-style fpw_* plugins and register them - Per-plugin crawl timeout (crawl_timeout_seconds=120); remove global crawl_timeout setting - Validator: fix connect vs total timeout on save; SOCKS session LRU cache; drop redundant semaphore - Validation handler uses single DB connection; batch upsert after crawl; WorkerPool put_nowait - Remove unused max_retries from settings API/UI; settings maintenance SQL + init_db cleanup of deprecated keys - WebSocket dashboard stats; ProxyList pool_filter and API alignment - POST /api/proxies/delete-one for IPv6-safe deletes; task poll stops on 404 - pytest uses PROXYPOOL_DB_PATH=db/proxies.test.sqlite so tests do not wipe production DB - .gitignore: explicit proxies.test.sqlite patterns; fix plugin_service ValidationException import Made-with: Cursor
119 lines
3.8 KiB
Python
119 lines
3.8 KiB
Python
"""代理业务服务"""
|
|
import csv
|
|
import json
|
|
import io
|
|
from datetime import datetime
|
|
from typing import List, Optional, Tuple, AsyncIterator
|
|
|
|
from app.core.db import get_db
|
|
from app.repositories.proxy_repo import ProxyRepository
|
|
from app.models.domain import Proxy
|
|
from app.core.log import logger
|
|
|
|
|
|
class ProxyService:
|
|
def __init__(self, proxy_repo: ProxyRepository = ProxyRepository()):
|
|
self.proxy_repo = proxy_repo
|
|
|
|
async def get_stats(self) -> dict:
|
|
async with get_db() as db:
|
|
stats = await self.proxy_repo.get_stats(db)
|
|
stats["today_new"] = await self.proxy_repo.get_today_new_count(db)
|
|
return stats
|
|
|
|
async def list_proxies(
|
|
self,
|
|
page: int = 1,
|
|
page_size: int = 20,
|
|
protocol: Optional[str] = None,
|
|
min_score: int = 0,
|
|
max_score: Optional[int] = None,
|
|
sort_by: str = "last_check",
|
|
sort_order: str = "DESC",
|
|
pool_filter: Optional[str] = None,
|
|
) -> Tuple[List[Proxy], int]:
|
|
async with get_db() as db:
|
|
return await self.proxy_repo.list_paginated(
|
|
db,
|
|
page,
|
|
page_size,
|
|
protocol,
|
|
min_score,
|
|
max_score,
|
|
sort_by,
|
|
sort_order,
|
|
pool_filter=pool_filter,
|
|
)
|
|
|
|
async def get_random_proxy(self) -> Optional[Proxy]:
|
|
async with get_db() as db:
|
|
return await self.proxy_repo.get_random(db)
|
|
|
|
async def delete_proxy(self, ip: str, port: int) -> None:
|
|
async with get_db() as db:
|
|
await self.proxy_repo.delete(db, ip, port)
|
|
|
|
async def batch_delete(self, proxies: List[Tuple[str, int]]) -> int:
|
|
async with get_db() as db:
|
|
return await self.proxy_repo.batch_delete(db, proxies)
|
|
|
|
async def clean_invalid(self) -> int:
|
|
async with get_db() as db:
|
|
return await self.proxy_repo.clean_invalid(db)
|
|
|
|
async def clean_expired(self, days: int) -> int:
|
|
async with get_db() as db:
|
|
return await self.proxy_repo.clean_expired(db, days)
|
|
|
|
async def export_proxies(
|
|
self,
|
|
fmt: str,
|
|
protocol: Optional[str] = None,
|
|
limit: int = 10000,
|
|
) -> AsyncIterator[str]:
|
|
if fmt == "csv":
|
|
yield "\ufeffIP,Port,Protocol,Score,Last Check\n"
|
|
elif fmt == "txt":
|
|
pass
|
|
elif fmt == "json":
|
|
yield "["
|
|
first = True
|
|
|
|
exported = 0
|
|
async with get_db() as db:
|
|
async for batch in self.proxy_repo.iter_batches(
|
|
db, protocol=protocol, batch_size=1000, only_usable=True
|
|
):
|
|
for p in batch:
|
|
if exported >= limit:
|
|
break
|
|
if fmt == "csv":
|
|
yield f"{p.ip},{p.port},{p.protocol},{p.score},{self._fmt_time(p.last_check)}\n"
|
|
elif fmt == "txt":
|
|
yield f"{p.ip}:{p.port}\n"
|
|
elif fmt == "json":
|
|
item = {
|
|
"ip": p.ip,
|
|
"port": p.port,
|
|
"protocol": p.protocol,
|
|
"score": p.score,
|
|
"last_check": self._fmt_time(p.last_check),
|
|
}
|
|
prefix = "" if first else ","
|
|
yield prefix + json.dumps(item, ensure_ascii=False)
|
|
first = False
|
|
exported += 1
|
|
if exported >= limit:
|
|
break
|
|
|
|
if fmt == "json":
|
|
yield "]"
|
|
|
|
@staticmethod
|
|
def _fmt_time(dt: Optional[datetime]) -> str:
|
|
if not dt:
|
|
return ""
|
|
if isinstance(dt, str):
|
|
return dt
|
|
return dt.isoformat()
|