Files
ProxyPool/app/api/routes/plugins.py
祀梦 7bc6d4e4de feat: JSON 配置、质量分与仪表盘,及设置与爬取流程
- 后端改为 config/app.json;pytest 使用 config/app.test.json 与 set_config_file,不再依赖环境变量;移除 pydantic-settings。

- 前端 API/WebSocket 由 config/webui.json 经 Vite define 注入。

- 代理分数按延迟与随机取用次数计算,新增 use_count 与 proxy_scoring;保存设置时同步调度器启停。

- 仪表盘双饼图(可用/待验证协议);设置页去掉调度器启停按钮并移动立即验证;爬取全部结束后自动提交全量验证。

- 删除 script/settings_maintain.py(此前已标记删除)。

Made-with: Cursor
2026-04-05 16:08:32 +08:00

193 lines
6.8 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

"""插件相关路由"""
from fastapi import APIRouter, Depends
from pydantic import BaseModel
from app.services.plugin_service import PluginService
from app.services.plugin_runner import PluginRunner
from app.core.execution import JobExecutor, CrawlJob, ValidateAllJob
from app.core.log import logger
from app.core.exceptions import PluginNotFoundException
from app.api.deps import get_plugin_service, get_plugin_runner, get_executor
from app.api.common import success_response, format_plugin
router = APIRouter(prefix="/api/plugins", tags=["plugins"])
class ToggleRequest(BaseModel):
enabled: bool
class ConfigRequest(BaseModel):
config: dict
@router.get("")
async def list_plugins(service: PluginService = Depends(get_plugin_service)):
plugins = await service.list_plugins()
return success_response("获取插件列表成功", {"plugins": [format_plugin(p) for p in plugins]})
@router.put("/{plugin_id}/toggle")
async def toggle_plugin(
plugin_id: str,
request: ToggleRequest,
service: PluginService = Depends(get_plugin_service),
):
await service.toggle_plugin(plugin_id, request.enabled)
return success_response(
f"插件 {plugin_id}{'启用' if request.enabled else '禁用'}",
{"plugin_id": plugin_id, "enabled": request.enabled},
)
@router.get("/{plugin_id}/config")
async def get_plugin_config(
plugin_id: str,
service: PluginService = Depends(get_plugin_service),
):
config = await service.get_plugin_config(plugin_id)
return success_response("获取插件配置成功", {"plugin_id": plugin_id, "config": config})
@router.post("/{plugin_id}/config")
async def update_plugin_config(
plugin_id: str,
request: ConfigRequest,
service: PluginService = Depends(get_plugin_service),
):
await service.update_plugin_config(plugin_id, request.config)
return success_response("保存插件配置成功", {"plugin_id": plugin_id, "config": request.config})
@router.post("/{plugin_id}/crawl")
async def crawl_plugin(
plugin_id: str,
plugin_service: PluginService = Depends(get_plugin_service),
plugin_runner: PluginRunner = Depends(get_plugin_runner),
executor: JobExecutor = Depends(get_executor),
):
plugin = plugin_service.get_plugin_or_raise(plugin_id)
job = CrawlJob(
plugin_id=plugin_id,
plugin_runner=plugin_runner,
proxy_service=plugin_service,
validator_pool=executor.worker_pool,
)
job_id = executor.submit_job(job)
return success_response(
"爬取任务已启动", {"task_id": job_id, "plugin_id": plugin_id}
)
@router.post("/crawl-all")
async def crawl_all(
plugin_service: PluginService = Depends(get_plugin_service),
plugin_runner: PluginRunner = Depends(get_plugin_runner),
executor: JobExecutor = Depends(get_executor),
):
"""为所有启用插件创建 CrawlJob返回一个聚合任务 ID 保持 API 兼容"""
from app.core.plugin_system.registry import registry
job_ids = []
for plugin in registry.list_plugins():
if not plugin.enabled:
continue
job = CrawlJob(
plugin_id=plugin.name,
plugin_runner=plugin_runner,
proxy_service=plugin_service,
validator_pool=executor.worker_pool,
)
job_ids.append(executor.submit_job(job))
# 为了保持 API 兼容(前端/测试期望单个 task_id
# 创建一个虚拟的聚合 Job 用于状态查询,它内部会等待所有 crawl job 完成
aggregator = _create_crawl_all_aggregator(job_ids, executor)
agg_id = executor.submit_job(aggregator)
return success_response("爬取任务已启动", {"task_id": agg_id})
def _create_crawl_all_aggregator(job_ids, executor):
"""创建一个简单的聚合 Job查询所有子 Job 的状态汇总;正常结束时自动提交一次全量验证"""
from app.core.execution.job import Job
import asyncio
class CrawlAllAggregator(Job):
async def run(self):
self._set_running()
# 等待所有子 job 完成(最多约 5 分钟,与前端轮询一致)
for _ in range(3000):
if self.is_cancelled:
break
all_done = all(
executor.get_job(jid) and executor.get_job(jid).status.value in ("completed", "failed", "cancelled")
for jid in job_ids
)
if all_done:
break
await asyncio.sleep(0.1)
total = 0
plugins_failed = 0
per_plugin = []
for jid in job_ids:
job = executor.get_job(jid)
plugin_id = getattr(job, "plugin_id", "") if job else ""
proxy_count = 0
crawl_failed = False
err_msg = None
job_status = job.status.value if job else "missing"
if not job:
per_plugin.append({
"plugin_id": plugin_id,
"proxy_count": 0,
"crawl_failed": True,
"error": "任务不存在",
"job_status": job_status,
})
plugins_failed += 1
continue
if job.status.value == "failed":
crawl_failed = True
plugins_failed += 1
err_msg = job.error or "任务失败"
elif job.result:
r = job.result
plugin_id = r.get("plugin_id") or plugin_id
proxy_count = r.get("proxy_count", 0)
total += proxy_count
if r.get("crawl_failed") or r.get("failure_count", 0) > 0:
crawl_failed = True
plugins_failed += 1
err_msg = r.get("error")
else:
total += 0
per_plugin.append({
"plugin_id": plugin_id,
"proxy_count": proxy_count,
"crawl_failed": crawl_failed,
"error": err_msg,
"job_status": job_status,
})
result = {
"total_crawled": total,
"plugins_failed": plugins_failed,
"per_plugin": per_plugin,
}
if self.is_cancelled:
result["cancelled"] = True
else:
v_job = ValidateAllJob(validator_pool=executor.worker_pool)
result["validate_all_task_id"] = executor.submit_job(v_job)
logger.info(
"Crawl-all finished; submitted ValidateAllJob %s",
result["validate_all_task_id"],
)
return result
return CrawlAllAggregator()