Files
ProxyPool/api/routes/plugins.py
祀梦 66943df864 实现插件配置持久化与任务队列持久化
插件配置持久化:
- plugin_settings 表新增 config_json 字段,支持存储每个插件的自定义配置
- BaseCrawlerPlugin 新增 default_config 属性和 update_config 方法
- PluginSettingsRepository 新增 get_config / set_config 方法
- PluginService 新增 get_plugin_config 和 update_plugin_config
- api/routes/plugins.py 新增 GET /{id}/config 和 POST /{id}/config 接口
- 前端 Plugins.vue 增加配置编辑对话框,支持动态渲染数字/布尔/字符串类型配置
- ip3366 插件示例化:增加 max_pages 配置项,验证配置生效后会动态更新爬取 URL

任务队列持久化:
- 新建 validation_tasks 表:id, ip, port, protocol, status, result, response_time_ms, created_at, updated_at
- 新建 ValidationTaskRepository,提供 insert_batch / acquire_pending / complete_task / reset_processing 等方法
- ValidationQueue 重构:
  - submit() 时把任务写入数据库并唤醒 Worker
  - Worker 通过 acquire_pending 原子取任务并验证
  - 验证完成后更新任务状态并入库有效代理
  - 启动时自动恢复之前中断的 processing 任务为 pending
  - 支持 drain() 等待所有 pending 完成
- 调度器验证流程同样自动持久化到任务表

其他适配:
- 更新 api/deps.py 和 api/lifespan.py,移除对已删除 settings_service 的残留引用
- 更新前端 pluginService.js 和 api/index.js 增加配置相关 API
2026-04-02 12:35:06 +08:00

168 lines
5.8 KiB
Python

"""插件相关路由"""
from fastapi import APIRouter, Depends
from services.plugin_service import PluginService
from services.scheduler_service import SchedulerService
from api.deps import get_plugin_service, get_scheduler_service
from core.log import logger
router = APIRouter(prefix="/api/plugins", tags=["plugins"])
def success_response(message: str, data=None):
return {"code": 200, "message": message, "data": data}
def error_response(message: str, code: int = 500):
return {"code": code, "message": message, "data": None}
@router.get("")
async def list_plugins(service: PluginService = Depends(get_plugin_service)):
plugins = await service.list_plugins()
return success_response(
"获取插件列表成功",
{
"plugins": [
{
"id": p.id,
"name": p.display_name,
"display_name": p.display_name,
"description": p.description,
"enabled": p.enabled,
"last_run": p.last_run.isoformat() if p.last_run else None,
"success_count": p.success_count,
"failure_count": p.failure_count,
}
for p in plugins
]
},
)
@router.put("/{plugin_id}/toggle")
async def toggle_plugin(
plugin_id: str,
request: dict,
service: PluginService = Depends(get_plugin_service),
):
enabled = request.get("enabled")
if enabled is None:
return error_response("缺少 enabled 参数", 400)
success = await service.toggle_plugin(plugin_id, enabled)
if not success:
return error_response("插件不存在", 404)
return success_response(
f"插件 {plugin_id}{'启用' if enabled else '禁用'}",
{"plugin_id": plugin_id, "enabled": enabled},
)
@router.get("/{plugin_id}/config")
async def get_plugin_config(
plugin_id: str,
service: PluginService = Depends(get_plugin_service),
):
config = await service.get_plugin_config(plugin_id)
if config is None:
return error_response("插件不存在", 404)
return success_response("获取插件配置成功", {"plugin_id": plugin_id, "config": config})
@router.post("/{plugin_id}/config")
async def update_plugin_config(
plugin_id: str,
request: dict,
service: PluginService = Depends(get_plugin_service),
):
config = request.get("config", {})
if not isinstance(config, dict):
return error_response("config 必须是对象", 400)
success = await service.update_plugin_config(plugin_id, config)
if not success:
return error_response("插件不存在或配置无效", 404)
return success_response("保存插件配置成功", {"plugin_id": plugin_id, "config": config})
@router.post("/{plugin_id}/crawl")
async def crawl_plugin(
plugin_id: str,
plugin_service: PluginService = Depends(get_plugin_service),
scheduler_service: SchedulerService = Depends(get_scheduler_service),
):
plugin = plugin_service.get_plugin(plugin_id)
if not plugin:
return error_response("插件不存在", 404)
try:
results = await plugin_service.run_plugin(plugin_id)
if not results:
return success_response(
f"插件 {plugin_id} 爬取完成,未获取到代理",
{"plugin_id": plugin_id, "proxy_count": 0, "valid_count": 0},
)
logger.info(f"Plugin {plugin_id} crawled {len(results)} proxies, sending to validation queue")
scheduler_service.validation_queue.reset_stats()
await scheduler_service.validation_queue.submit(results)
# 等待队列排空(最多等 30 秒,避免前端超时)
try:
await asyncio.wait_for(scheduler_service.validation_queue.drain(), timeout=30.0)
except asyncio.TimeoutError:
pass
valid_count = scheduler_service.validation_queue.valid_count
invalid_count = scheduler_service.validation_queue.invalid_count
return success_response(
f"插件 {plugin_id} 爬取并验证完成",
{
"plugin_id": plugin_id,
"proxy_count": len(results),
"valid_count": valid_count,
"invalid_count": invalid_count,
},
)
except Exception as e:
logger.error(f"Crawl plugin {plugin_id} failed: {e}")
return error_response(f"插件爬取失败: {str(e)}")
@router.post("/crawl-all")
async def crawl_all(
plugin_service: PluginService = Depends(get_plugin_service),
scheduler_service: SchedulerService = Depends(get_scheduler_service),
):
try:
results = await plugin_service.run_all_plugins()
if not results:
return success_response(
"所有插件爬取完成,未获取到代理",
{"total_crawled": 0, "valid_count": 0, "invalid_count": 0},
)
logger.info(f"All plugins crawled {len(results)} unique proxies, sending to validation queue")
scheduler_service.validation_queue.reset_stats()
await scheduler_service.validation_queue.submit(results)
try:
await asyncio.wait_for(scheduler_service.validation_queue.drain(), timeout=60.0)
except asyncio.TimeoutError:
pass
valid_count = scheduler_service.validation_queue.valid_count
invalid_count = scheduler_service.validation_queue.invalid_count
return success_response(
"所有插件爬取并验证完成",
{
"total_crawled": len(results),
"valid_count": valid_count,
"invalid_count": invalid_count,
},
)
except Exception as e:
logger.error(f"Crawl all failed: {e}")
return error_response(f"批量爬取失败: {str(e)}")
import asyncio