- get_headers(url): Referer, Sec-Fetch-*, sec-ch-ua, API vs HTML Accept - httpx AsyncClient/ sync Client with optional HTTP/2 (h2 extra) - On 403/429/503/520-523/525/567 or request errors, retry via curl_cffi chrome124 impersonate - POST: Origin, Referer, Content-Type for form posts - kuaidaili/ip3366: forward get_headers(url=...) Made-with: Cursor
65 lines
2.3 KiB
Python
65 lines
2.3 KiB
Python
import re
|
|
from typing import List, Optional
|
|
from bs4 import BeautifulSoup
|
|
from app.core.plugin_system import ProxyRaw
|
|
from app.plugins.base import BaseHTTPPlugin
|
|
from app.core.log import logger
|
|
|
|
VALID_PROTOCOLS = ("http", "https", "socks4", "socks5")
|
|
|
|
|
|
class Ip3366Plugin(BaseHTTPPlugin):
|
|
name = "ip3366"
|
|
display_name = "IP3366"
|
|
description = "从 IP3366 网站爬取免费代理"
|
|
default_config = {"max_pages": 3}
|
|
|
|
def __init__(self):
|
|
super().__init__()
|
|
self._update_urls()
|
|
|
|
def _update_urls(self):
|
|
max_pages = self.config.get("max_pages", 3)
|
|
self.urls = [
|
|
f"http://www.ip3366.net/free/?stype=1&page={i}" for i in range(1, max_pages + 1)
|
|
] + [
|
|
f"http://www.ip3366.net/free/?stype=2&page={i}" for i in range(1, max_pages + 1)
|
|
]
|
|
|
|
def get_headers(self, url: Optional[str] = None, **kwargs) -> dict:
|
|
headers = super().get_headers(url=url, **kwargs)
|
|
headers["Referer"] = "http://www.ip3366.net/free/"
|
|
return headers
|
|
|
|
async def crawl(self) -> List[ProxyRaw]:
|
|
results = []
|
|
htmls = await self.fetch_all(self.urls)
|
|
for html in htmls:
|
|
if not html:
|
|
continue
|
|
soup = BeautifulSoup(html, "lxml")
|
|
list_div = soup.find("div", id="list")
|
|
if not list_div:
|
|
continue
|
|
table = list_div.find("table")
|
|
if not table:
|
|
continue
|
|
|
|
for row in table.find_all("tr"):
|
|
tds = row.find_all("td")
|
|
if len(tds) >= 5:
|
|
ip = tds[0].get_text(strip=True)
|
|
port = tds[1].get_text(strip=True)
|
|
protocol = tds[4].get_text(strip=True).lower() if len(tds) > 4 else "http"
|
|
if protocol not in VALID_PROTOCOLS:
|
|
protocol = "http"
|
|
if re.match(r"^\d+\.\d+\.\d+\.\d+$", ip) and port.isdigit() and 1 <= int(port) <= 65535:
|
|
try:
|
|
results.append(ProxyRaw(ip, int(port), protocol))
|
|
except ValueError:
|
|
continue
|
|
|
|
if results:
|
|
logger.info(f"{self.display_name} 解析完成,获得 {len(results)} 个潜在代理")
|
|
return results
|