Files
ProxyPool/app/plugins/fpw_proxy_list_download.py
祀梦 957cee3100 fix(crawl): throttle concurrent CrawlJobs and relax fpw/proxyscrape HTTP
- CrawlJob waits on crawl_slot before JobExecutor semaphore so crawl-all does not fill slots while queued
- BaseHTTPPlugin: longer connect budget for slow international links
- proxyscrape: jsDelivr mirror + longer GitHub/API phases
- fpw_*: higher timeouts/retries; lower internal concurrency on heavy multi-URL plugins

Made-with: Cursor
2026-04-05 13:48:41 +08:00

55 lines
2.6 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

"""www.proxy-list.download 公开 APIREADME: Free_Proxy_Website"""
from typing import List
from app.core.plugin_system import ProxyRaw
from app.plugins.base import BaseHTTPPlugin
from app.core.log import logger
class FpwProxyListDownloadPlugin(BaseHTTPPlugin):
name = "fpw_proxy_list_download"
display_name = "Proxy-List.download"
description = "proxy-list.download 官方 APIhttp/https/socks4/socks5"
def __init__(self):
super().__init__()
self.max_concurrency = 4
self.api_pairs = [
("http", "https://www.proxy-list.download/api/v1/get?type=http"),
("https", "https://www.proxy-list.download/api/v1/get?type=https"),
("socks4", "https://www.proxy-list.download/api/v1/get?type=socks4"),
("socks5", "https://www.proxy-list.download/api/v1/get?type=socks5"),
]
self.fallback_pairs = [
("http", "https://api.proxyscrape.com/v2/?request=get&protocol=http&timeout=10000&country=all&ssl=all&anonymity=all"),
("https", "https://api.proxyscrape.com/v2/?request=get&protocol=https&timeout=10000&country=all&ssl=all&anonymity=all"),
("socks4", "https://api.proxyscrape.com/v2/?request=get&protocol=socks4&timeout=10000&country=all&ssl=all&anonymity=all"),
("socks5", "https://api.proxyscrape.com/v2/?request=get&protocol=socks5&timeout=10000&country=all&ssl=all&anonymity=all"),
]
async def crawl(self) -> List[ProxyRaw]:
results: List[ProxyRaw] = []
urls = [u for _, u in self.api_pairs]
htmls = await self.fetch_all(urls, timeout=25, retries=2)
for (protocol, _), text in zip(self.api_pairs, htmls):
if not text:
continue
batch = self.parse_text_proxies(text, protocol)
if batch:
results.extend(batch)
logger.info(f"{self.display_name} {protocol}: {len(batch)}")
if not results:
logger.warning(f"{self.display_name} 主 API 无数据,尝试 ProxyScrape 备用")
fb_urls = [u for _, u in self.fallback_pairs]
fb_htmls = await self.fetch_all(fb_urls, timeout=25, retries=2)
for (protocol, _), text in zip(self.fallback_pairs, fb_htmls):
if not text:
continue
batch = self.parse_text_proxies(text, protocol)
if batch:
results.extend(batch)
logger.info(f"{self.display_name} fallback {protocol}: {len(batch)}")
if results:
logger.info(f"{self.display_name} 合计 {len(results)}")
return results