This repository has been archived on 2024-06-12. You can view files and clone it, but cannot push or open issues or pull requests.
ipserv/ipserv.py

191 lines
5.4 KiB
Python
Raw Normal View History

2024-04-18 10:18:38 +01:00
import json
2024-04-18 11:26:09 +01:00
import logging
2024-04-19 10:38:32 +01:00
import time
2024-04-20 00:55:51 +01:00
import os
2024-04-19 10:59:19 +01:00
import aiohttp
2024-04-19 11:23:53 +01:00
import random
2024-04-19 10:53:06 +01:00
from fastapi import FastAPI, Header, Request, Query, HTTPException
2024-04-19 11:23:53 +01:00
from fastapi.responses import JSONResponse, PlainTextResponse
2024-04-22 01:54:12 +01:00
from fastapi.staticfiles import StaticFiles
from fastapi.middleware.cors import CORSMiddleware
2024-04-19 10:59:19 +01:00
from contextlib import asynccontextmanager
2024-04-17 16:27:58 +01:00
2024-04-18 11:26:09 +01:00
logging.basicConfig(level=logging.INFO)
2024-04-19 10:59:19 +01:00
2024-04-20 00:55:51 +01:00
class Manager:
def __init__(self, api: FastAPI):
self.app = api
self.waiting = 0
def __enter__(self) -> "Manager":
self.waiting += 1
if self.waiting >= 2048:
logging.critical("TCP pool full! %d/2048. Requests are now being backlogged.")
elif self.waiting > 1024:
logging.warning("TCP pool half full! %d/2048.")
return self
def __exit__(self, *args):
self.waiting -= 1
2024-04-19 11:00:19 +01:00
@asynccontextmanager
2024-04-19 10:59:19 +01:00
async def lifespan(_app: FastAPI):
async with aiohttp.ClientSession(
"https://ip.shronk.tech",
2024-04-19 11:03:17 +01:00
connector=aiohttp.TCPConnector(limit=2048),
raise_for_status=True
2024-04-19 10:59:19 +01:00
) as client:
_app.state.session = client
yield
2024-04-19 11:01:48 +01:00
app = FastAPI(lifespan=lifespan)
2024-04-19 10:59:19 +01:00
app.state.cache = {}
2024-04-20 00:55:51 +01:00
app.state.meta = Manager(app)
2024-04-22 01:54:12 +01:00
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_methods=["GET", "HEAD", "OPTIONS"],
allow_headers=["*"],
)
2024-04-20 00:55:51 +01:00
@app.middleware("http")
async def http_middleware(request: Request, call_next):
if app.state.meta.waiting >= 2512:
return JSONResponse({"error": os.urandom(512).decode("latin-1", "replace")}, status_code=503)
with app.state.meta:
return await call_next(request)
2024-04-17 16:27:58 +01:00
2024-04-19 11:13:35 +01:00
async def make_request(ip: str, headers: dict[str, str]) -> dict | HTTPException:
2024-04-19 11:23:53 +01:00
if ip in app.state.cache:
data, timestamp = app.state.cache[ip]
if time.time() - timestamp < 3600:
logging.info("cache hit for %s", ip)
return data
logging.info("cache expired for %s", ip)
2024-04-19 11:13:35 +01:00
try:
async with app.state.session.get(f"/lookup?ip={ip}") as response:
data = await response.json()
except json.JSONDecodeError as e:
logging.error("Failed to parse data for %s: %s", ip, e, exc_info=True)
return HTTPException(500, "Failed to parse upstream response.")
except Exception as e:
logging.error("Failed to get data for %s: %s", ip, e, exc_info=True)
return HTTPException(500, "Failed to get upstream data.")
return data
2024-04-20 00:55:51 +01:00
2024-04-17 16:27:58 +01:00
@app.get("/")
2024-04-19 10:59:19 +01:00
async def ip(
2024-04-19 10:53:06 +01:00
request: Request,
X_Forwarded_For: str = Header(None),
User_Agent: str = Header("Mozilla/5.0"),
lookup: str = Query(None),
):
if lookup:
ip = lookup
elif X_Forwarded_For:
ip = X_Forwarded_For
2024-04-17 16:27:58 +01:00
else:
2024-04-19 10:53:06 +01:00
ip = request.client.host
2024-04-19 10:38:32 +01:00
2024-04-18 11:26:09 +01:00
logging.info("looking up IP info for %s", ip)
2024-04-19 11:15:42 +01:00
data = await make_request(
2024-04-19 11:13:35 +01:00
ip,
{"User-Agent": User_Agent}
)
2024-04-19 11:15:42 +01:00
if isinstance(data, HTTPException):
2024-04-19 11:23:53 +01:00
raise data
2024-04-19 11:13:35 +01:00
data["ip"] = ip
data.pop("legalese", None)
data.pop("source", None)
data.pop("brexitRequired", None)
2024-04-19 11:14:59 +01:00
logging.info("%s -> %r", ip, data)
2024-04-19 11:13:35 +01:00
app.state.cache[ip] = [data, time.time()]
return JSONResponse(data)
@app.get("/lookup")
async def lookup(ip: str = Query(...), User_Agent: str = Header("Mozilla/5.0")):
if ip in app.state.cache:
data, timestamp = app.state.cache[ip]
if time.time() - timestamp < 3600:
logging.info("cache hit for %s", ip)
return JSONResponse(data)
logging.info("cache expired for %s", ip)
logging.info("looking up IP info for %s", ip)
2024-04-19 11:15:42 +01:00
data = await make_request(
2024-04-19 11:13:35 +01:00
ip,
{"User-Agent": User_Agent}
)
2024-04-19 11:15:42 +01:00
if isinstance(data, HTTPException):
2024-04-19 11:23:53 +01:00
raise data
2024-04-18 11:26:09 +01:00
data["ip"] = ip
2024-04-18 10:18:38 +01:00
data.pop("legalese", None)
data.pop("source", None)
data.pop("brexitRequired", None)
2024-04-19 11:14:59 +01:00
logging.info("%s -> %r", ip, data)
2024-04-19 11:13:35 +01:00
app.state.cache[ip] = [data, time.time()]
return JSONResponse(data)
2024-04-19 11:23:53 +01:00
@app.get("/imfeelinglucky")
async def im_feeling_lucky(req: Request):
host = req.client.host
if host.count(".") != 3:
raise HTTPException(400, "IPv4 only endpoint.")
data = await make_request(host, {"User-Agent": "Mozilla/5.0 Nex/19.04.2024"})
if not isinstance(data, dict):
raise data
data = data.copy()
parts = list(map(int, host.split(".")))
n = random.randint(1, 1000)
if n in range(50, 54):
if n > 400:
parts[n % 4] += 1
else:
parts[n % 4] -= 1
data["ip"] = ".".join(map(str, parts))
return JSONResponse(data)
@app.get("/raw")
def get_raw(req: Request):
return PlainTextResponse(req.client.host)
2024-04-20 00:55:51 +01:00
@app.get("/health")
2024-04-20 01:26:51 +01:00
async def get_health():
2024-04-20 00:55:51 +01:00
detail = {"issues": []}
if app.state.meta.waiting >= 2048:
detail["status"] = "critical"
detail["issues"].append("(C) Connection pool full.")
elif app.state.meta.waiting >= 1024:
detail["status"] = "warning"
detail["issues"].append("(W) TCP pool half full.")
else:
detail["status"] = "ok"
try:
t = time.perf_counter()
async with app.state.session.get("/") as response:
await response.text()
detail["latency"] = time.perf_counter() - t
except Exception as e:
detail["issues"].append(f"(E) Failed to check upstream: {e}")
detail["status"] = "critical"
2024-04-20 01:26:51 +01:00
2024-04-20 00:55:51 +01:00
return JSONResponse(detail)
2024-04-22 01:54:12 +01:00
app.mount("/check", StaticFiles(directory="./cert", html=True), name="shronk-cert")