- 删除 ValidationQueue 双轨持久化队列,替换为纯内存 AsyncWorkerPool - 引入统一后台任务框架 JobExecutor(Job/CrawlJob/ValidateAllJob) - 新增 PluginRunner 统一插件执行(超时、重试、健康检查、统计) - 重构 SchedulerService 职责收敛为仅定时触发 ValidateAllJob - 使用 AsyncExitStack 重构 lifespan,安全管理长生命周期资源 - 路由层瘦身 50%+,业务异常上抛由全局中间件统一处理 - 实现设置全热更新(WorkerPool 并发、Validator 超时即时生效) - 前端 Store 强制写后重新拉取,消除乐观更新数据不同步 - 删除 queue.py / task_repo.py / task_service.py - 新增 execution 单元测试,全部 85 个测试通过
149 lines
5.1 KiB
Python
149 lines
5.1 KiB
Python
"""插件相关路由"""
|
||
from fastapi import APIRouter, Depends
|
||
from pydantic import BaseModel
|
||
|
||
from app.services.plugin_service import PluginService
|
||
from app.services.plugin_runner import PluginRunner
|
||
from app.core.execution import JobExecutor, CrawlJob
|
||
from app.core.exceptions import PluginNotFoundException
|
||
from app.api.deps import get_plugin_service, get_plugin_runner, get_executor
|
||
from app.api.common import success_response
|
||
|
||
router = APIRouter(prefix="/api/plugins", tags=["plugins"])
|
||
|
||
|
||
class ToggleRequest(BaseModel):
|
||
enabled: bool
|
||
|
||
|
||
class ConfigRequest(BaseModel):
|
||
config: dict
|
||
|
||
|
||
@router.get("")
|
||
async def list_plugins(service: PluginService = Depends(get_plugin_service)):
|
||
plugins = await service.list_plugins()
|
||
return success_response("获取插件列表成功", {"plugins": [format_plugin(p) for p in plugins]})
|
||
|
||
|
||
@router.put("/{plugin_id}/toggle")
|
||
async def toggle_plugin(
|
||
plugin_id: str,
|
||
request: ToggleRequest,
|
||
service: PluginService = Depends(get_plugin_service),
|
||
):
|
||
await service.toggle_plugin(plugin_id, request.enabled)
|
||
return success_response(
|
||
f"插件 {plugin_id} 已{'启用' if request.enabled else '禁用'}",
|
||
{"plugin_id": plugin_id, "enabled": request.enabled},
|
||
)
|
||
|
||
|
||
@router.get("/{plugin_id}/config")
|
||
async def get_plugin_config(
|
||
plugin_id: str,
|
||
service: PluginService = Depends(get_plugin_service),
|
||
):
|
||
config = await service.get_plugin_config(plugin_id)
|
||
return success_response("获取插件配置成功", {"plugin_id": plugin_id, "config": config})
|
||
|
||
|
||
@router.post("/{plugin_id}/config")
|
||
async def update_plugin_config(
|
||
plugin_id: str,
|
||
request: ConfigRequest,
|
||
service: PluginService = Depends(get_plugin_service),
|
||
):
|
||
success = await service.update_plugin_config(plugin_id, request.config)
|
||
if not success:
|
||
raise PluginNotFoundException(plugin_id)
|
||
return success_response("保存插件配置成功", {"plugin_id": plugin_id, "config": request.config})
|
||
|
||
|
||
@router.post("/{plugin_id}/crawl")
|
||
async def crawl_plugin(
|
||
plugin_id: str,
|
||
plugin_service: PluginService = Depends(get_plugin_service),
|
||
plugin_runner: PluginRunner = Depends(get_plugin_runner),
|
||
executor: JobExecutor = Depends(get_executor),
|
||
):
|
||
plugin = plugin_service.get_plugin_or_raise(plugin_id)
|
||
job = CrawlJob(
|
||
plugin_id=plugin_id,
|
||
plugin_runner=plugin_runner,
|
||
proxy_service=plugin_service,
|
||
validator_pool=executor.worker_pool,
|
||
)
|
||
job_id = executor.submit_job(job)
|
||
return success_response(
|
||
"爬取任务已启动", {"task_id": job_id, "plugin_id": plugin_id}
|
||
)
|
||
|
||
|
||
@router.post("/crawl-all")
|
||
async def crawl_all(
|
||
plugin_service: PluginService = Depends(get_plugin_service),
|
||
plugin_runner: PluginRunner = Depends(get_plugin_runner),
|
||
executor: JobExecutor = Depends(get_executor),
|
||
):
|
||
"""为所有启用插件创建 CrawlJob,返回一个聚合任务 ID 保持 API 兼容"""
|
||
from app.core.plugin_system.registry import registry
|
||
job_ids = []
|
||
for plugin in registry.list_plugins():
|
||
if not plugin.enabled:
|
||
continue
|
||
job = CrawlJob(
|
||
plugin_id=plugin.name,
|
||
plugin_runner=plugin_runner,
|
||
proxy_service=plugin_service,
|
||
validator_pool=executor.worker_pool,
|
||
)
|
||
job_ids.append(executor.submit_job(job))
|
||
|
||
# 为了保持 API 兼容(前端/测试期望单个 task_id),
|
||
# 创建一个虚拟的聚合 Job 用于状态查询,它内部会等待所有 crawl job 完成
|
||
aggregator = _create_crawl_all_aggregator(job_ids, executor)
|
||
agg_id = executor.submit_job(aggregator)
|
||
return success_response("爬取任务已启动", {"task_id": agg_id})
|
||
|
||
|
||
def _create_crawl_all_aggregator(job_ids, executor):
|
||
"""创建一个简单的聚合 Job,查询所有子 Job 的状态汇总"""
|
||
from app.core.execution.job import Job
|
||
import asyncio
|
||
|
||
class CrawlAllAggregator(Job):
|
||
async def run(self):
|
||
# 等待所有子 job 完成(最多等 30 秒)
|
||
for _ in range(300):
|
||
all_done = all(
|
||
executor.get_job(jid) and executor.get_job(jid).status.value in ("completed", "failed", "cancelled")
|
||
for jid in job_ids
|
||
)
|
||
if all_done:
|
||
break
|
||
await asyncio.sleep(0.1)
|
||
total = 0
|
||
valid = 0
|
||
for jid in job_ids:
|
||
job = executor.get_job(jid)
|
||
if job and job.result:
|
||
total += job.result.get("proxy_count", 0)
|
||
valid += job.result.get("success_count", 0)
|
||
return {"total_crawled": total, "valid_count": valid, "invalid_count": 0}
|
||
|
||
return CrawlAllAggregator()
|
||
|
||
|
||
def format_plugin(plugin) -> dict:
|
||
return {
|
||
"id": plugin.id,
|
||
"name": plugin.display_name,
|
||
"display_name": plugin.display_name,
|
||
"description": plugin.description,
|
||
"enabled": plugin.enabled,
|
||
"last_run": plugin.last_run.isoformat() if plugin.last_run else None,
|
||
"success_count": plugin.success_count,
|
||
"failure_count": plugin.failure_count,
|
||
}
|