import sys import os sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) from core.crawler import BasePlugin from core.log import logger from bs4 import BeautifulSoup import re import asyncio VALID_PROTOCOLS = ['http', 'https', 'socks4', 'socks5'] class Ip3366Plugin(BasePlugin): def __init__(self): super().__init__() self.name = "IP3366" # 抓取高匿和普通代理的前 5 页 self.urls = [ f"http://www.ip3366.net/free/?stype=1&page={i}" for i in range(1, 6) ] + [ f"http://www.ip3366.net/free/?stype=2&page={i}" for i in range(1, 6) ] async def parse(self, html): if not html: return soup = BeautifulSoup(html, 'lxml') list_div = soup.find('div', id='list') if not list_div: return table = list_div.find('table') if not table: return rows = table.find_all('tr') count = 0 for row in rows: tds = row.find_all('td') if len(tds) >= 5: ip = tds[0].get_text(strip=True) port = tds[1].get_text(strip=True) protocol = tds[4].get_text(strip=True).lower() if len(tds) > 4 else 'http' if protocol not in VALID_PROTOCOLS: protocol = 'http' if re.match(r'^\d+\.\d+\.\d+\.\d+$', ip) and port.isdigit(): yield ip, int(port), protocol count += 1 if count > 0: logger.info(f"{self.name} 解析完成,获得 {count} 个潜在代理") if __name__ == "__main__": async def test_plugin(): plugin = Ip3366Plugin() print(f"========== 测试 {plugin.name} ==========") print(f"目标URL数量: {len(plugin.urls)}") print(f"开始抓取...\n") proxies = await plugin.run() print(f"\n========== 抓取结果 ==========") print(f"总计获取 {len(proxies)} 个代理:") print("-" * 60) for idx, (ip, port, protocol) in enumerate(proxies, 1): print(f"{idx:3d}. {ip:15s} : {str(port):5s} | {protocol}") print("-" * 60) print(f"完成!共 {len(proxies)} 个代理~") asyncio.run(test_plugin())