rkihacker commited on
Commit
2446f5f
·
verified ·
1 Parent(s): d5133b1

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +41 -101
main.py CHANGED
@@ -1,104 +1,44 @@
1
- import requests
2
- import random
3
- import time
4
- import threading
5
- from flask import Flask, request, Response, jsonify, stream_with_context
6
-
7
- PROXY_LIST_URL = "https://proxies.typegpt.net/ips.txt"
8
- proxies_cache = []
9
- last_refresh = 0
10
-
11
- app = Flask(__name__)
12
-
13
- # Fixed headers for DeepInfra requests
14
- DEEPINFRA_HEADERS = {
15
- "accept": "text/event-stream",
16
- "content-type": "application/json",
17
- "referer": "https://deepinfra.com/",
18
- "sec-ch-ua": '"Chromium";v="140", "Not=A?Brand";v="24", "Google Chrome";v="140"',
19
- "sec-ch-ua-mobile": "?0",
20
- "sec-ch-ua-platform": '"Windows"',
21
- "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/140.0.0.0 Safari/537.36",
22
- "x-deepinfra-source": "web-embed",
23
- }
24
-
25
- def fetch_proxies():
26
- try:
27
- resp = requests.get(PROXY_LIST_URL, timeout=10)
28
- resp.raise_for_status()
29
- proxies = [line.strip() for line in resp.text.splitlines() if line.strip()]
30
- return proxies
31
- except Exception as e:
32
- print(f"[ERROR] Failed to fetch proxies: {e}")
33
- return []
34
-
35
- def get_random_proxy(proxies):
36
- if not proxies:
37
- return None
38
- return random.choice(proxies)
39
-
40
- def refresh_proxies_loop():
41
- global proxies_cache, last_refresh
42
- while True:
43
- if time.time() - last_refresh > 300 or not proxies_cache:
44
- proxies_cache = fetch_proxies()
45
- last_refresh = time.time()
46
- print(f"[INFO] Refreshed {len(proxies_cache)} proxies.")
47
- time.sleep(60)
48
-
49
- @app.route("/health", methods=["GET"])
50
- def health():
51
- return "Healthy", 200
52
-
53
- @app.route("/deepinfra", methods=["POST"])
54
- def proxy_deepinfra():
55
- target_url = "https://api.deepinfra.com/v1/openai/chat/completions"
56
- proxy = get_random_proxy(proxies_cache)
57
- if not proxy:
58
- return jsonify({"error": "No proxies available"}), 500
59
-
60
- proxies = {"http": proxy, "https": proxy}
61
-
62
  try:
63
- print(f"[INFO] Forwarding POST to {target_url} via {proxy}")
64
-
65
- # Always use fixed headers (ignore client-supplied headers)
66
- forward_headers = dict(DEEPINFRA_HEADERS)
67
-
68
- upstream = requests.post(
69
- url=target_url,
70
- headers=forward_headers,
71
- data=request.get_data(),
72
- params=request.args,
73
- proxies=proxies,
74
- stream=True,
75
- timeout=120,
76
- )
77
-
78
- def generate():
79
- for chunk in upstream.iter_content(chunk_size=None):
80
- if chunk:
81
- yield chunk
82
-
83
- headers = dict(upstream.headers)
84
- headers["X-Proxy-Used"] = proxy
85
-
86
- # 🔑 Return exactly what DeepInfra sent (status, body, headers)
87
- return Response(
88
- stream_with_context(generate()),
89
- status=upstream.status_code,
90
- headers=headers,
91
- content_type=upstream.headers.get("Content-Type", "application/json")
92
- )
93
-
94
- except requests.exceptions.RequestException as e:
95
- # Only if the proxy itself fails (not DeepInfra)
96
- return jsonify({"error": "Proxy failed", "proxy": proxy, "details": str(e)}), 502
97
-
98
- def main():
99
- t = threading.Thread(target=refresh_proxies_loop, daemon=True)
100
- t.start()
101
- app.run(host="0.0.0.0", port=5000)
102
 
103
  if __name__ == "__main__":
104
- main()
 
 
1
+ import httpx
2
+ from fastapi import FastAPI, Request
3
+ from fastapi.responses import StreamingResponse
4
+ import json
5
+
6
+ app = FastAPI()
7
+
8
+ @app.post("/v1/openai/chat/completions")
9
+ async def proxy_deepinfra(request: Request):
10
+ """
11
+ Proxies chat completion requests to the DeepInfra API.
12
+ """
13
+ client = httpx.AsyncClient()
14
+ url = "https://api.deepinfra.com/v1/openai/chat/completions"
15
+
16
+ # Extract the raw body from the request
17
+ body = await request.body()
18
+ # Decode the body to a string and then load it as JSON
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
19
  try:
20
+ data = json.loads(body.decode('utf-8'))
21
+ except json.JSONDecodeError:
22
+ return {"error": "Invalid JSON in request body"}, 400
23
+
24
+ headers = {
25
+ 'sec-ch-ua-platform': request.headers.get('sec-ch-ua-platform', '"Windows"'),
26
+ 'Referer': request.headers.get('Referer', 'https://deepinfra.com/'),
27
+ 'sec-ch-ua': request.headers.get('sec-ch-ua', '"Chromium";v="140", "Not=A?Brand";v="24", "Google Chrome";v="140"'),
28
+ 'sec-ch-ua-mobile': request.headers.get('sec-ch-ua-mobile', '?0'),
29
+ 'User-Agent': request.headers.get('User-Agent', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/140.0.0.0 Safari/537.36'),
30
+ 'accept': request.headers.get('accept', 'text/event-stream'),
31
+ 'X-Deepinfra-Source': request.headers.get('X-Deepinfra-Source', 'web-embed'),
32
+ 'Content-Type': request.headers.get('Content-Type', 'application/json'),
33
+ }
34
+
35
+ async def stream_response():
36
+ async with client.stream("POST", url, headers=headers, json=data, timeout=None) as response:
37
+ async for chunk in response.aiter_bytes():
38
+ yield chunk
39
+
40
+ return StreamingResponse(stream_response(), media_type="text/event-stream")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
41
 
42
  if __name__ == "__main__":
43
+ import uvicorn
44
+ uvicorn.run(app, host="0.0.0.0", port=8000)