- Fix SQL injection risks in proxy_repo and task_repo - Atomic acquire_pending with UPDATE ... RETURNING - Reuse aiohttp ClientSession in ValidatorService - Replace polling with asyncio.Event in SchedulerService - Optimize ValidationQueue.drain with asyncio.Condition - Concurrent plugin crawling with asyncio.gather - Unify ProxyRaw model import path - Fix test baseline and remove tracked __pycache__ files
41 lines
1.3 KiB
Python
41 lines
1.3 KiB
Python
import re
|
|
from typing import List
|
|
from bs4 import BeautifulSoup
|
|
from app.core.plugin_system import ProxyRaw
|
|
from app.plugins.base import BaseHTTPPlugin
|
|
from app.core.log import logger
|
|
|
|
|
|
class Ip89Plugin(BaseHTTPPlugin):
|
|
default_config = {"max_pages": 5}
|
|
name = "ip89"
|
|
display_name = "89免费代理"
|
|
description = "从 89ip.cn 爬取免费代理"
|
|
|
|
def __init__(self):
|
|
super().__init__()
|
|
self.urls = [f"https://www.89ip.cn/index_{i}.html" for i in range(1, 6)]
|
|
|
|
async def crawl(self) -> List[ProxyRaw]:
|
|
results = []
|
|
for url in self.urls:
|
|
html = await self.fetch(url, timeout=15)
|
|
if not html:
|
|
continue
|
|
soup = BeautifulSoup(html, "lxml")
|
|
table = soup.find("table", class_="layui-table")
|
|
if not table:
|
|
continue
|
|
|
|
for row in table.find_all("tr"):
|
|
tds = row.find_all("td")
|
|
if len(tds) >= 2:
|
|
ip = tds[0].get_text(strip=True)
|
|
port = tds[1].get_text(strip=True)
|
|
if re.match(r"^\d+\.\d+\.\d+\.\d+$", ip) and port.isdigit():
|
|
results.append(ProxyRaw(ip, int(port), "http"))
|
|
|
|
if results:
|
|
logger.info(f"{self.display_name} 解析完成,获取 {len(results)} 个潜在代理")
|
|
return results
|