Files
ProxyPool/core/validator.py
2026-01-27 21:17:36 +08:00

77 lines
3.0 KiB
Python

import asyncio
import aiohttp
import random
import time
from core.log import logger
class ProxyValidator:
def __init__(self, max_concurrency=50, timeout=5):
# 验证目标源(使用更适合代理验证的源)
self.http_sources = [
"http://httpbin.org/ip",
"http://api.ipify.org"
]
self.https_sources = [
"https://httpbin.org/ip",
"https://api.ipify.org"
]
self.semaphore = asyncio.Semaphore(max_concurrency)
self.timeout = timeout
self.session = None
async def __aenter__(self):
# 允许通过 async with 管理 session
if not self.session:
self.session = aiohttp.ClientSession(
connector=aiohttp.TCPConnector(ssl=False, limit=0, force_close=True),
timeout=aiohttp.ClientTimeout(total=self.timeout, connect=3)
)
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
if self.session:
await self.session.close()
async def validate(self, ip, port, protocol='http'):
"""
验证单个代理是否可用
"""
protocol = protocol.lower()
sources = self.https_sources if protocol == 'https' else self.http_sources
test_url = random.choice(sources)
# aiohttp 代理 URL 格式
proxy_url = f"http://{ip}:{port}"
async with self.semaphore:
start_time = time.time()
try:
# 复用 session
async with self.session.get(
test_url,
proxy=proxy_url,
allow_redirects=True,
timeout=aiohttp.ClientTimeout(total=self.timeout, connect=3)
) as response:
# 检查状态码和响应内容
if response.status in [200, 301, 302]:
try:
content = await response.text()
# 确保返回了有效的JSON响应
if 'ip' in content.lower() or 'origin' in content.lower():
latency = round((time.time() - start_time) * 1000, 2)
logger.info(f"验证成功: {ip}:{port} ({protocol}) - 延迟: {latency}ms")
return True, latency
except:
# 即使无法解析内容,如果状态码正常也认为可用
latency = round((time.time() - start_time) * 1000, 2)
logger.info(f"验证成功: {ip}:{port} ({protocol}) - 延迟: {latency}ms")
return True, latency
return False, 0
except asyncio.TimeoutError:
logger.warning(f"验证超时: {ip}:{port} ({protocol})")
return False, 0
except Exception as e:
logger.warning(f"验证失败: {ip}:{port} ({protocol}) - {e}")
return False, 0