- Fix SQL injection risks in proxy_repo and task_repo - Atomic acquire_pending with UPDATE ... RETURNING - Reuse aiohttp ClientSession in ValidatorService - Replace polling with asyncio.Event in SchedulerService - Optimize ValidationQueue.drain with asyncio.Condition - Concurrent plugin crawling with asyncio.gather - Unify ProxyRaw model import path - Fix test baseline and remove tracked __pycache__ files
57 lines
1.9 KiB
Python
57 lines
1.9 KiB
Python
from typing import List
|
|
from app.core.plugin_system import ProxyRaw
|
|
from app.plugins.base import BaseHTTPPlugin
|
|
from app.core.log import logger
|
|
|
|
|
|
class ProxyListDownloadPlugin(BaseHTTPPlugin):
|
|
default_config = {"max_pages": 5}
|
|
name = "proxylist_download"
|
|
display_name = "ProxyListDownload"
|
|
description = "从 ProxyListDownload API 获取代理"
|
|
|
|
def __init__(self):
|
|
super().__init__()
|
|
self.urls = [
|
|
"https://www.proxy-list.download/api/v1/get?type=http",
|
|
"https://www.proxy-list.download/api/v1/get?type=https",
|
|
"https://www.proxy-list.download/api/v1/get?type=socks4",
|
|
"https://www.proxy-list.download/api/v1/get?type=socks5",
|
|
]
|
|
|
|
async def crawl(self) -> List[ProxyRaw]:
|
|
results = []
|
|
for url in self.urls:
|
|
html = await self.fetch(url, timeout=30)
|
|
if not html:
|
|
continue
|
|
|
|
# 根据 URL 判断协议
|
|
if "type=socks4" in url:
|
|
protocol = "socks4"
|
|
elif "type=socks5" in url:
|
|
protocol = "socks5"
|
|
elif "type=https" in url:
|
|
protocol = "https"
|
|
else:
|
|
protocol = "http"
|
|
|
|
lines = html.split("\r\n")
|
|
if len(lines) <= 1:
|
|
lines = html.split("\n")
|
|
|
|
for line in lines:
|
|
line = line.strip()
|
|
if not line or ":" not in line:
|
|
continue
|
|
parts = line.split(":")
|
|
if len(parts) >= 2:
|
|
ip = parts[0].strip()
|
|
port = parts[1].strip()
|
|
if ip and port.isdigit():
|
|
results.append(ProxyRaw(ip, int(port), protocol))
|
|
|
|
if results:
|
|
logger.info(f"{self.display_name} 解析完成,获得 {len(results)} 个潜在代理")
|
|
return results
|