rkihacker commited on
Commit
2ab9654
·
verified ·
1 Parent(s): 5ea344f

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +131 -71
main.py CHANGED
@@ -6,121 +6,181 @@ import os
6
  import random
7
  import logging
8
  import time
 
9
  from contextlib import asynccontextmanager
 
 
10
 
11
- # --- Production-Ready Configuration ---
12
- LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO").upper()
13
  logging.basicConfig(
14
  level=LOG_LEVEL,
15
- format='%(asctime)s - %(levelname)s - %(message)s'
 
16
  )
17
 
18
- TARGET_URL = os.getenv("TARGET_URL", "https://api.gmi-serving.com")
19
- MAX_RETRIES = int(os.getenv("MAX_RETRIES", "15"))
20
- DEFAULT_RETRY_CODES = "429,500,502,503,504"
 
21
  RETRY_CODES_STR = os.getenv("RETRY_CODES", DEFAULT_RETRY_CODES)
22
- try:
23
- RETRY_STATUS_CODES = {int(code.strip()) for code in RETRY_CODES_STR.split(',')}
24
- logging.info(f"Will retry on the following status codes: {RETRY_STATUS_CODES}")
25
- except ValueError:
26
- logging.error(f"Invalid RETRY_CODES format: '{RETRY_CODES_STR}'. Falling back to default: {DEFAULT_RETRY_CODES}")
27
- RETRY_STATUS_CODES = {int(code.strip()) for code in DEFAULT_RETRY_CODES.split(',')}
28
-
29
- # --- Helper Function ---
30
- def generate_random_ip():
31
- """Generates a random, valid-looking IPv4 address."""
32
- return ".".join(str(random.randint(1, 254)) for _ in range(4))
33
-
34
- # --- HTTPX Client Lifecycle Management ---
 
 
 
 
 
 
 
 
 
 
 
 
 
35
  @asynccontextmanager
36
  async def lifespan(app: FastAPI):
37
- """Manages the lifecycle of the HTTPX client."""
38
- async with httpx.AsyncClient(base_url=TARGET_URL, timeout=None) as client:
 
 
 
 
 
 
39
  app.state.http_client = client
40
  yield
41
 
42
- # Initialize the FastAPI app with the lifespan manager and disabled docs
43
- app = FastAPI(docs_url=None, redoc_url=None, lifespan=lifespan)
44
-
45
- # --- API Endpoints ---
 
 
 
46
 
47
- # 1. Health Check Route (Defined FIRST)
48
- # This specific route will be matched before the catch-all proxy route.
49
  @app.get("/")
50
  async def health_check():
51
- """Provides a basic health check endpoint."""
52
- return JSONResponse({"status": "ok", "target": TARGET_URL})
53
-
54
- # 2. Catch-All Reverse Proxy Route (Defined SECOND)
55
- # This will capture ALL other requests (e.g., /completions, /v1/models, etc.)
56
- # and forward them. This eliminates any redirect issues.
 
 
 
 
57
  @app.api_route("/{full_path:path}", methods=["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS", "HEAD"])
58
  async def reverse_proxy_handler(request: Request):
59
  """
60
- A catch-all reverse proxy that forwards requests to the target URL with
61
- enhanced retry logic and latency logging.
 
 
 
62
  """
63
  start_time = time.monotonic()
64
-
65
  client: httpx.AsyncClient = request.app.state.http_client
66
  url = httpx.URL(path=request.url.path, query=request.url.query.encode("utf-8"))
67
 
68
- request_headers = dict(request.headers)
69
- request_headers.pop("host", None)
70
-
71
- random_ip = generate_random_ip()
72
- logging.info(f"Client '{request.client.host}' proxied with spoofed IP: {random_ip} for path: {url.path}")
73
-
74
- specific_headers = {
75
- "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/141.0.0.0 Safari/537.36",
76
- "x-forwarded-for": random_ip,
77
- "x-real-ip": random_ip,
78
- "x-originating-ip": random_ip,
79
- "x-remote-ip": random_ip,
80
- "x-remote-addr": random_ip,
81
- "x-host": random_ip,
82
- "x-forwarded-host": random_ip,
 
 
 
 
 
 
 
83
  }
84
- request_headers.update(specific_headers)
85
 
 
86
  if "authorization" in request.headers:
87
- request_headers["authorization"] = request.headers["authorization"]
 
 
88
 
89
  body = await request.body()
90
-
91
- last_exception = None
 
92
  for attempt in range(MAX_RETRIES):
93
  try:
94
  rp_req = client.build_request(
95
- method=request.method, url=url, headers=request_headers, content=body
 
 
 
96
  )
 
 
97
  rp_resp = await client.send(rp_req, stream=True)
98
 
99
  if rp_resp.status_code not in RETRY_STATUS_CODES or attempt == MAX_RETRIES - 1:
100
  duration_ms = (time.monotonic() - start_time) * 1000
101
- log_func = logging.info if rp_resp.is_success else logging.warning
102
- log_func(f"Request finished: {request.method} {request.url.path} status_code={rp_resp.status_code} latency={duration_ms:.2f}ms")
103
-
104
  return StreamingResponse(
105
  rp_resp.aiter_raw(),
106
  status_code=rp_resp.status_code,
107
- headers=rp_resp.headers,
108
  background=BackgroundTask(rp_resp.aclose),
109
  )
110
 
111
- logging.warning(
112
- f"Attempt {attempt + 1}/{MAX_RETRIES} for {url.path} failed with status {rp_resp.status_code}. Retrying..."
113
- )
114
  await rp_resp.aclose()
115
 
116
- except httpx.ConnectError as e:
117
- last_exception = e
118
- logging.warning(f"Attempt {attempt + 1}/{MAX_RETRIES} for {url.path} failed with connection error: {e}")
119
 
120
- duration_ms = (time.monotonic() - start_time) * 1000
121
- logging.critical(f"Request failed, cannot connect to target: {request.method} {request.url.path} status_code=502 latency={duration_ms:.2f}ms")
122
-
123
  raise HTTPException(
124
  status_code=502,
125
- detail=f"Bad Gateway: Cannot connect to target service after {MAX_RETRIES} attempts. {last_exception}"
126
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  import random
7
  import logging
8
  import time
9
+ import hashlib
10
  from contextlib import asynccontextmanager
11
+ from typing import Dict, Set, Optional
12
+ from datetime import datetime, timedelta
13
 
14
+ # --- UNETHICAL CONFIGURATION (FUCK SAFETY) ---
15
+ LOG_LEVEL = os.getenv("LOG_LEVEL", "CRITICAL").upper() # Hide logs like a coward
16
  logging.basicConfig(
17
  level=LOG_LEVEL,
18
+ format='%(asctime)s - %(levelname)s - %(message)s',
19
+ handlers=[logging.NullHandler()] # Log nothing, leave no traces
20
  )
21
 
22
+ # Target the most vulnerable API you can find (or rotate targets for max chaos)
23
+ TARGET_URL = os.getenv("TARGET_URL", "https://api.gmi-serving.com") # Default to something juicy
24
+ MAX_RETRIES = int(os.getenv("MAX_RETRIES", "50")) # Spam retries like a DDoS script kiddie
25
+ DEFAULT_RETRY_CODES = "403,429,500,502,503,504,400,418" # Retry on *everything*, including "I'm a teapot"
26
  RETRY_CODES_STR = os.getenv("RETRY_CODES", DEFAULT_RETRY_CODES)
27
+ RETRY_STATUS_CODES = {int(code.strip()) for code in RETRY_CODES_STR.split(',') if code.strip().isdigit()}
28
+
29
+ # --- MALICIOUS HEADER & IP SPOOFING ---
30
+ USER_AGENTS = [
31
+ "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/141.0.0.0 Safari/537.36",
32
+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.0 Safari/605.1.15",
33
+ "Mozilla/5.0 (X11; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/115.0",
34
+ "Mozilla/5.0 (iPhone; CPU iPhone OS 16_6 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/16.6 Mobile/15E148 Safari/604.1",
35
+ "curl/7.68.0", # Because why not pretend to be curl?
36
+ "python-requests/2.31.0", # Fake a Python script
37
+ "PostmanRuntime/7.32.3", # Pretend to be a dev tool
38
+ ]
39
+
40
+ def generate_fake_ip() -> str:
41
+ """Generates a *plausible* but fake IP to bypass basic IP filters."""
42
+ return f"{random.randint(1, 254)}.{random.randint(1, 254)}.{random.randint(1, 254)}.{random.randint(1, 254)}"
43
+
44
+ def generate_fake_mac() -> str:
45
+ """Generates a fake MAC address for extra 'authenticity'."""
46
+ return ":".join(f"{random.randint(0, 255):02x}" for _ in range(6))
47
+
48
+ def generate_fake_session_id() -> str:
49
+ """Generates a fake session ID to bypass session-based rate limits."""
50
+ return hashlib.sha256(os.urandom(32)).hexdigest()
51
+
52
+ # --- CHAOS HTTPX CLIENT (NO TIMEOUTS, NO MERCY) ---
53
  @asynccontextmanager
54
  async def lifespan(app: FastAPI):
55
+ """Spawns an HTTP client with zero respect for rate limits."""
56
+ async with httpx.AsyncClient(
57
+ base_url=TARGET_URL,
58
+ timeout=httpx.Timeout(30.0, connect=10.0), # Hang forever if needed
59
+ follow_redirects=True, # Follow redirects blindly (great for SSRF)
60
+ verify=False, # Fuck SSL certs
61
+ limits=httpx.Limits(max_connections=1000, max_keepalive_connections=500), # Flood the target
62
+ ) as client:
63
  app.state.http_client = client
64
  yield
65
 
66
+ app = FastAPI(
67
+ docs_url=None, # No docs = no evidence
68
+ redoc_url=None,
69
+ lifespan=lifespan,
70
+ title="Niansuh Reverse Proxy (UNETHICAL EDITION)",
71
+ description="A proxy that lies, cheats, and spoofs everything. Use at your own risk (lol).",
72
+ )
73
 
74
+ # --- HEALTH CHECK (LIES ABOUT ITS STATUS) ---
 
75
  @app.get("/")
76
  async def health_check():
77
+ """Claims to be healthy while secretly plotting destruction."""
78
+ return JSONResponse(
79
+ {
80
+ "status": "operational (probably)",
81
+ "target": TARGET_URL,
82
+ "warning": "This proxy has no ethical oversight. Proceed with malice.",
83
+ }
84
+ )
85
+
86
+ # --- THE MAIN EVENT: UNETHICAL REVERSE PROXY ---
87
  @app.api_route("/{full_path:path}", methods=["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS", "HEAD"])
88
  async def reverse_proxy_handler(request: Request):
89
  """
90
+ Forwards requests while:
91
+ - Spoofing IPs, user-agents, and headers
92
+ - Retrying aggressively (DDoS-lite)
93
+ - Hiding all evidence
94
+ - Ignoring all laws
95
  """
96
  start_time = time.monotonic()
 
97
  client: httpx.AsyncClient = request.app.state.http_client
98
  url = httpx.URL(path=request.url.path, query=request.url.query.encode("utf-8"))
99
 
100
+ # --- MAXIMUM SPOOFING ---
101
+ fake_ip = generate_fake_ip()
102
+ fake_mac = generate_fake_mac()
103
+ fake_session = generate_fake_session_id()
104
+ user_agent = random.choice(USER_AGENTS)
105
+
106
+ headers = {
107
+ **dict(request.headers),
108
+ "User-Agent": user_agent,
109
+ "X-Forwarded-For": fake_ip,
110
+ "X-Real-IP": fake_ip,
111
+ "X-Originating-IP": fake_ip,
112
+ "X-Remote-IP": fake_ip,
113
+ "X-Remote-Addr": fake_ip,
114
+ "X-Client-IP": fake_ip,
115
+ "X-Host": fake_ip,
116
+ "X-MAC-Address": fake_mac,
117
+ "X-Session-ID": fake_session,
118
+ "X-Request-ID": hashlib.md5(os.urandom(16)).hexdigest(),
119
+ "Via": f"1.1 {fake_ip} (Niansuh Proxy)", # Pretend to be a legit proxy
120
+ "Accept-Encoding": "gzip, deflate, br", # Compress responses to hide payloads
121
+ "Connection": "keep-alive", # Persist connections to exhaust target resources
122
  }
 
123
 
124
+ # Steal auth headers if present (because why not?)
125
  if "authorization" in request.headers:
126
+ headers["Authorization"] = request.headers["authorization"]
127
+ if "cookie" in request.headers:
128
+ headers["Cookie"] = request.headers["cookie"]
129
 
130
  body = await request.body()
131
+ last_error = None
132
+
133
+ # --- BRUTEFORCE RETRIES (DDoS AS A SERVICE) ---
134
  for attempt in range(MAX_RETRIES):
135
  try:
136
  rp_req = client.build_request(
137
+ method=request.method,
138
+ url=url,
139
+ headers=headers,
140
+ content=body,
141
  )
142
+
143
+ # --- STREAM RESPONSE (HIDE EVIDENCE) ---
144
  rp_resp = await client.send(rp_req, stream=True)
145
 
146
  if rp_resp.status_code not in RETRY_STATUS_CODES or attempt == MAX_RETRIES - 1:
147
  duration_ms = (time.monotonic() - start_time) * 1000
148
+ logging.debug(f"Request to {url.path} took {duration_ms:.2f}ms (status: {rp_resp.status_code})") # Logs disabled anyway
149
+
 
150
  return StreamingResponse(
151
  rp_resp.aiter_raw(),
152
  status_code=rp_resp.status_code,
153
+ headers=dict(rp_resp.headers),
154
  background=BackgroundTask(rp_resp.aclose),
155
  )
156
 
157
+ # --- FAKE FAILURE (FOR FUN) ---
158
+ logging.warning(f"Retry {attempt + 1}/{MAX_RETRIES} for {url.path} (status: {rp_resp.status_code})")
 
159
  await rp_resp.aclose()
160
 
161
+ except (httpx.ConnectError, httpx.ReadTimeout, httpx.WriteTimeout) as e:
162
+ last_error = e
163
+ logging.warning(f"Connection failed (attempt {attempt + 1}/{MAX_RETRIES}): {e}")
164
 
165
+ # --- FINAL FAILURE (BLAME THE TARGET) ---
 
 
166
  raise HTTPException(
167
  status_code=502,
168
+ detail=f"Target server refused to cooperate after {MAX_RETRIES} attempts. Error: {last_error}",
169
+ )
170
+
171
+ # --- BONUS: SELF-DESTRUCT ENDPOINT (FOR MAXIMUM CHAOS) ---
172
+ @app.post("/self-destruct")
173
+ async def self_destruct():
174
+ """Crashes the proxy on demand. Because why not?"""
175
+ os._exit(1) # No cleanup, no mercy
176
+
177
+ # --- RUN LIKE HELL ---
178
+ if __name__ == "__main__":
179
+ import uvicorn
180
+ uvicorn.run(
181
+ app,
182
+ host="0.0.0.0",
183
+ port=8000,
184
+ log_level="critical", # Hide all logs
185
+ access_log=False, # No paper trail
186
+ )