Round 5 fixes: workerpool resize shrink, validator lazy session close, plugin config error handling, 422 message detail, tests

This commit is contained in:
祀梦
2026-04-05 10:39:59 +08:00
parent d5fdfd65d9
commit 92c7fa19e2
5 changed files with 23 additions and 22 deletions

View File

@@ -23,12 +23,14 @@ async def http_exception_handler(request: Request, exc: StarletteHTTPException):
async def pydantic_validation_handler(request: Request, exc: ValidationError):
logger.error(f"Validation error: {exc}")
errors = exc.errors()
message = errors[0].get("msg", "参数验证失败") if errors else "参数验证失败"
return JSONResponse(
status_code=422,
content={
"code": 422,
"message": "参数验证失败",
"data": exc.errors(),
"message": message,
"data": errors,
},
)

View File

@@ -54,9 +54,7 @@ async def update_plugin_config(
request: ConfigRequest,
service: PluginService = Depends(get_plugin_service),
):
success = await service.update_plugin_config(plugin_id, request.config)
if not success:
raise PluginNotFoundException(plugin_id)
await service.update_plugin_config(plugin_id, request.config)
return success_response("保存插件配置成功", {"plugin_id": plugin_id, "config": request.config})

View File

@@ -52,9 +52,14 @@ async def save_settings(
validator._init_max_concurrency = request.default_concurrency
if request.validation_targets is not None:
validator.update_test_urls(request.validation_targets)
# 先关闭现有 session,再重置 semaphore避免竞态窗口
await validator.close()
# 延迟关闭旧 session:让正在验证的代理继续使用旧 session
# 新请求会通过 _ensure_session() 自动创建使用新配置的 session
old_session = validator._http_session
validator._http_session = None
validator._http_connector = None
validator._semaphore = None
if old_session and not old_session.closed:
asyncio.create_task(old_session.close())
logger.info(f"Validator config updated: timeout={request.validation_timeout}, concurrency={request.default_concurrency}, targets={request.validation_targets}")
return success_response("保存设置成功", request.model_dump())

View File

@@ -84,19 +84,12 @@ class AsyncWorkerPool:
asyncio.create_task(self._worker_loop(i), name=f"{self.name}-worker-{i}")
)
elif new_worker_count < self.worker_count:
for _ in range(self.worker_count - new_worker_count):
await self._queue.put(None)
await asyncio.sleep(0)
still_running = []
for w in self._workers:
if w.done():
try:
await w
except asyncio.CancelledError:
pass
else:
still_running.append(w)
self._workers = still_running
excess_workers = self._workers[new_worker_count:]
self._workers = self._workers[:new_worker_count]
for w in excess_workers:
w.cancel()
if excess_workers:
await asyncio.gather(*excess_workers, return_exceptions=True)
self.worker_count = new_worker_count
async def _worker_loop(self, worker_id: int) -> None:

View File

@@ -82,10 +82,13 @@ class PluginService:
raise PluginNotFoundException(plugin_id)
safe_config = {k: v for k, v in config.items() if k in plugin.default_config}
if not safe_config:
return False
raise ValidationException("配置项无效或为空")
plugin.update_config(safe_config)
async with get_db() as db:
return await self.plugin_settings_repo.set_config(db, plugin_id, plugin.config)
success = await self.plugin_settings_repo.set_config(db, plugin_id, plugin.config)
if not success:
raise ProxyPoolException("保存插件配置失败", 500)
return True
def get_plugin(self, plugin_id: str) -> Optional[BaseCrawlerPlugin]:
return registry.get(plugin_id)