Update app.py
Browse files
app.py
CHANGED
|
@@ -8,24 +8,23 @@ import traceback
|
|
| 8 |
from aiohttp import web
|
| 9 |
from urllib.parse import parse_qs
|
| 10 |
from cachetools import TTLCache
|
|
|
|
| 11 |
|
| 12 |
-
# 创建一个TTL缓存,最多存储1000个项目,每个项目的有效期为
|
| 13 |
cache = TTLCache(maxsize=1000, ttl=1800)
|
| 14 |
|
| 15 |
async def fetch_url(url, session):
|
| 16 |
async with session.get(url) as response:
|
| 17 |
return await response.text()
|
| 18 |
|
| 19 |
-
def extract_and_transform_proxies(input_text):
|
| 20 |
try:
|
| 21 |
-
# 尝试解析整个输入作为YAML
|
| 22 |
data = yaml.safe_load(input_text)
|
| 23 |
if isinstance(data, dict) and 'proxies' in data:
|
| 24 |
proxies_list = data['proxies']
|
| 25 |
elif isinstance(data, list):
|
| 26 |
proxies_list = data
|
| 27 |
else:
|
| 28 |
-
# 如果不是预期的格式,尝试提取proxies部分
|
| 29 |
proxies_match = re.search(r'proxies:\s*\n((?:[-\s]*{.*\n?)*)', input_text, re.MULTILINE)
|
| 30 |
if proxies_match:
|
| 31 |
proxies_text = proxies_match.group(1)
|
|
@@ -79,11 +78,12 @@ def extract_and_transform_proxies(input_text):
|
|
| 79 |
|
| 80 |
return "\n".join(transformed_proxies) if transformed_proxies else "未找到有效的SS或Trojan代理配置"
|
| 81 |
|
| 82 |
-
|
| 83 |
async def log_request(request, response, cache_status):
|
| 84 |
timestamp = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
| 85 |
client_ip = request.remote
|
| 86 |
-
request_line = f"{request.method} {request.
|
|
|
|
|
|
|
| 87 |
status_code = response.status
|
| 88 |
content_length = response.content_length
|
| 89 |
user_agent = request.headers.get('User-Agent', 'Unknown')
|
|
@@ -94,36 +94,51 @@ async def log_request(request, response, cache_status):
|
|
| 94 |
)
|
| 95 |
print(log_message, flush=True)
|
| 96 |
|
|
|
|
| 97 |
@web.middleware
|
| 98 |
async def logging_middleware(request, handler):
|
| 99 |
-
|
| 100 |
-
|
| 101 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 102 |
|
| 103 |
async def handle_request(request):
|
| 104 |
if request.path == '/':
|
| 105 |
query_params = parse_qs(request.query_string)
|
| 106 |
if 'url' in query_params:
|
| 107 |
url = query_params['url'][0]
|
| 108 |
-
force_refresh = 'nocache' in query_params
|
| 109 |
|
| 110 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 111 |
try:
|
| 112 |
-
|
| 113 |
-
|
| 114 |
-
|
| 115 |
-
|
| 116 |
-
|
| 117 |
-
|
| 118 |
-
|
| 119 |
-
|
| 120 |
-
|
| 121 |
-
if not force_refresh:
|
| 122 |
-
cache[url] = result
|
| 123 |
|
| 124 |
-
return web.Response(text=result, content_type='text/plain')
|
| 125 |
except Exception as e:
|
| 126 |
-
|
|
|
|
|
|
|
| 127 |
else:
|
| 128 |
usage_guide = """
|
| 129 |
<html>
|
|
@@ -131,13 +146,12 @@ async def handle_request(request):
|
|
| 131 |
<h1>代理配置转换工具</h1>
|
| 132 |
<p>使用方法:在URL参数中提供包含代理配置的网址。</p>
|
| 133 |
<p>示例:<code>http://localhost:8080/?url=https://example.com/path-to-proxy-config</code></p>
|
| 134 |
-
<p>强制刷新缓存:<code>http://localhost:8080/?url=https://example.com/path-to-proxy-config&nocache</code></p>
|
| 135 |
</body>
|
| 136 |
</html>
|
| 137 |
"""
|
| 138 |
-
return web.Response(text=usage_guide, content_type='text/html')
|
| 139 |
else:
|
| 140 |
-
return web.Response(text="Not Found", status=404)
|
| 141 |
|
| 142 |
async def init_app():
|
| 143 |
app = web.Application(middlewares=[logging_middleware])
|
|
|
|
| 8 |
from aiohttp import web
|
| 9 |
from urllib.parse import parse_qs
|
| 10 |
from cachetools import TTLCache
|
| 11 |
+
from functools import partial
|
| 12 |
|
| 13 |
+
# 创建一个TTL缓存,最多存储1000个项目,每个项目的有效期为1小时
|
| 14 |
cache = TTLCache(maxsize=1000, ttl=1800)
|
| 15 |
|
| 16 |
async def fetch_url(url, session):
|
| 17 |
async with session.get(url) as response:
|
| 18 |
return await response.text()
|
| 19 |
|
| 20 |
+
async def extract_and_transform_proxies(input_text):
|
| 21 |
try:
|
|
|
|
| 22 |
data = yaml.safe_load(input_text)
|
| 23 |
if isinstance(data, dict) and 'proxies' in data:
|
| 24 |
proxies_list = data['proxies']
|
| 25 |
elif isinstance(data, list):
|
| 26 |
proxies_list = data
|
| 27 |
else:
|
|
|
|
| 28 |
proxies_match = re.search(r'proxies:\s*\n((?:[-\s]*{.*\n?)*)', input_text, re.MULTILINE)
|
| 29 |
if proxies_match:
|
| 30 |
proxies_text = proxies_match.group(1)
|
|
|
|
| 78 |
|
| 79 |
return "\n".join(transformed_proxies) if transformed_proxies else "未找到有效的SS或Trojan代理配置"
|
| 80 |
|
|
|
|
| 81 |
async def log_request(request, response, cache_status):
|
| 82 |
timestamp = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
| 83 |
client_ip = request.remote
|
| 84 |
+
request_line = f"{request.method} {request.path}"
|
| 85 |
+
if request.query_string:
|
| 86 |
+
request_line += f"?{request.query_string}"
|
| 87 |
status_code = response.status
|
| 88 |
content_length = response.content_length
|
| 89 |
user_agent = request.headers.get('User-Agent', 'Unknown')
|
|
|
|
| 94 |
)
|
| 95 |
print(log_message, flush=True)
|
| 96 |
|
| 97 |
+
|
| 98 |
@web.middleware
|
| 99 |
async def logging_middleware(request, handler):
|
| 100 |
+
start_time = datetime.datetime.now()
|
| 101 |
+
try:
|
| 102 |
+
response = await handler(request)
|
| 103 |
+
await log_request(request, response)
|
| 104 |
+
end_time = datetime.datetime.now()
|
| 105 |
+
print(f"Request processing time: {end_time - start_time}", flush=True)
|
| 106 |
+
return response
|
| 107 |
+
except Exception as e:
|
| 108 |
+
end_time = datetime.datetime.now()
|
| 109 |
+
print(f"Error occurred: {str(e)}", flush=True)
|
| 110 |
+
print(f"Request processing time: {end_time - start_time}", flush=True)
|
| 111 |
+
print("Traceback:", flush=True)
|
| 112 |
+
traceback.print_exc()
|
| 113 |
+
return web.Response(text=f"Internal Server Error: {str(e)}", status=500)
|
| 114 |
|
| 115 |
async def handle_request(request):
|
| 116 |
if request.path == '/':
|
| 117 |
query_params = parse_qs(request.query_string)
|
| 118 |
if 'url' in query_params:
|
| 119 |
url = query_params['url'][0]
|
|
|
|
| 120 |
|
| 121 |
+
# 检查缓存
|
| 122 |
+
if url in cache:
|
| 123 |
+
print(f"Cache hit for URL: {url}", flush=True)
|
| 124 |
+
return web.Response(text=cache[url], content_type='text/plain')
|
| 125 |
+
|
| 126 |
try:
|
| 127 |
+
print(f"Fetching URL: {url}", flush=True)
|
| 128 |
+
async with aiohttp.ClientSession() as session:
|
| 129 |
+
input_text = await fetch_url(url, session)
|
| 130 |
+
print(f"URL content length: {len(input_text)}", flush=True)
|
| 131 |
+
result = await extract_and_transform_proxies(input_text)
|
| 132 |
+
print(f"Transformed result length: {len(result)}", flush=True)
|
| 133 |
+
|
| 134 |
+
# 将结果存入缓存
|
| 135 |
+
cache[url] = result
|
|
|
|
|
|
|
| 136 |
|
| 137 |
+
return web.Response(text=result, content_type='text/plain')
|
| 138 |
except Exception as e:
|
| 139 |
+
print(f"Error processing request: {str(e)}", flush=True)
|
| 140 |
+
traceback.print_exc()
|
| 141 |
+
return web.Response(text=f"Error: {str(e)}", status=500)
|
| 142 |
else:
|
| 143 |
usage_guide = """
|
| 144 |
<html>
|
|
|
|
| 146 |
<h1>代理配置转换工具</h1>
|
| 147 |
<p>使用方法:在URL参数中提供包含代理配置的网址。</p>
|
| 148 |
<p>示例:<code>http://localhost:8080/?url=https://example.com/path-to-proxy-config</code></p>
|
|
|
|
| 149 |
</body>
|
| 150 |
</html>
|
| 151 |
"""
|
| 152 |
+
return web.Response(text=usage_guide, content_type='text/html')
|
| 153 |
else:
|
| 154 |
+
return web.Response(text="Not Found", status=404)
|
| 155 |
|
| 156 |
async def init_app():
|
| 157 |
app = web.Application(middlewares=[logging_middleware])
|