2024-04-18 10:18:38 +01:00
|
|
|
import json
|
2024-04-18 11:26:09 +01:00
|
|
|
import logging
|
2024-04-19 10:38:32 +01:00
|
|
|
import time
|
2024-04-20 00:55:51 +01:00
|
|
|
import os
|
2024-04-19 10:59:19 +01:00
|
|
|
import aiohttp
|
2024-04-19 11:23:53 +01:00
|
|
|
import random
|
2024-04-23 20:52:34 +01:00
|
|
|
import ipaddress
|
2024-04-19 10:53:06 +01:00
|
|
|
from fastapi import FastAPI, Header, Request, Query, HTTPException
|
2024-04-28 01:04:24 +01:00
|
|
|
from fastapi.responses import JSONResponse, PlainTextResponse, RedirectResponse
|
2024-04-22 01:54:12 +01:00
|
|
|
from fastapi.staticfiles import StaticFiles
|
|
|
|
from fastapi.middleware.cors import CORSMiddleware
|
2024-04-19 10:59:19 +01:00
|
|
|
from contextlib import asynccontextmanager
|
2024-04-17 16:27:58 +01:00
|
|
|
|
2024-04-18 11:26:09 +01:00
|
|
|
logging.basicConfig(level=logging.INFO)
|
2024-04-19 10:59:19 +01:00
|
|
|
|
|
|
|
|
2024-04-20 00:55:51 +01:00
|
|
|
class Manager:
|
|
|
|
def __init__(self, api: FastAPI):
|
|
|
|
self.app = api
|
|
|
|
self.waiting = 0
|
|
|
|
|
|
|
|
def __enter__(self) -> "Manager":
|
|
|
|
self.waiting += 1
|
|
|
|
if self.waiting >= 2048:
|
|
|
|
logging.critical("TCP pool full! %d/2048. Requests are now being backlogged.")
|
|
|
|
elif self.waiting > 1024:
|
|
|
|
logging.warning("TCP pool half full! %d/2048.")
|
|
|
|
return self
|
|
|
|
|
|
|
|
def __exit__(self, *args):
|
|
|
|
self.waiting -= 1
|
|
|
|
|
|
|
|
|
2024-04-19 11:00:19 +01:00
|
|
|
@asynccontextmanager
|
2024-04-19 10:59:19 +01:00
|
|
|
async def lifespan(_app: FastAPI):
|
|
|
|
async with aiohttp.ClientSession(
|
|
|
|
"https://ip.shronk.tech",
|
2024-04-19 11:03:17 +01:00
|
|
|
connector=aiohttp.TCPConnector(limit=2048),
|
|
|
|
raise_for_status=True
|
2024-04-19 10:59:19 +01:00
|
|
|
) as client:
|
|
|
|
_app.state.session = client
|
|
|
|
yield
|
|
|
|
|
|
|
|
|
2024-04-19 11:01:48 +01:00
|
|
|
app = FastAPI(lifespan=lifespan)
|
2024-04-19 10:59:19 +01:00
|
|
|
app.state.cache = {}
|
2024-04-20 00:55:51 +01:00
|
|
|
app.state.meta = Manager(app)
|
|
|
|
|
2024-04-22 01:54:12 +01:00
|
|
|
app.add_middleware(
|
|
|
|
CORSMiddleware,
|
|
|
|
allow_origins=["*"],
|
|
|
|
allow_methods=["GET", "HEAD", "OPTIONS"],
|
|
|
|
allow_headers=["*"],
|
|
|
|
)
|
|
|
|
|
2024-04-20 00:55:51 +01:00
|
|
|
|
|
|
|
@app.middleware("http")
|
|
|
|
async def http_middleware(request: Request, call_next):
|
|
|
|
if app.state.meta.waiting >= 2512:
|
2024-04-28 01:04:24 +01:00
|
|
|
return JSONResponse({"detail": os.urandom(512).decode("latin-1", "replace")}, status_code=503)
|
|
|
|
|
|
|
|
try:
|
|
|
|
_ip = ipaddress.ip_address(request.client.host)
|
|
|
|
if isinstance(_ip, ipaddress.IPv6Address):
|
|
|
|
return JSONResponse(
|
|
|
|
{"detail": "IPv6 is not supported at this time."},
|
|
|
|
status_code=400
|
|
|
|
)
|
|
|
|
except ValueError:
|
|
|
|
return JSONResponse(
|
|
|
|
{"detail": "Invalid IP address."},
|
|
|
|
status_code=400
|
|
|
|
)
|
|
|
|
|
2024-04-20 00:55:51 +01:00
|
|
|
with app.state.meta:
|
|
|
|
return await call_next(request)
|
2024-04-17 16:27:58 +01:00
|
|
|
|
|
|
|
|
2024-04-28 01:04:24 +01:00
|
|
|
async def make_request(ip: str, headers: dict[str, str] = None) -> dict | HTTPException:
|
2024-04-19 11:23:53 +01:00
|
|
|
if ip in app.state.cache:
|
|
|
|
data, timestamp = app.state.cache[ip]
|
|
|
|
if time.time() - timestamp < 3600:
|
|
|
|
logging.info("cache hit for %s", ip)
|
|
|
|
return data
|
|
|
|
logging.info("cache expired for %s", ip)
|
|
|
|
|
2024-04-19 11:13:35 +01:00
|
|
|
try:
|
2024-04-28 01:04:24 +01:00
|
|
|
async with app.state.session.get(f"/lookup?ip={ip}", headers=headers) as response:
|
2024-04-19 11:13:35 +01:00
|
|
|
data = await response.json()
|
2024-04-28 01:31:20 +01:00
|
|
|
data["source"] = "SHRoNKNet"
|
2024-04-19 11:13:35 +01:00
|
|
|
except json.JSONDecodeError as e:
|
|
|
|
logging.error("Failed to parse data for %s: %s", ip, e, exc_info=True)
|
|
|
|
return HTTPException(500, "Failed to parse upstream response.")
|
|
|
|
except Exception as e:
|
|
|
|
logging.error("Failed to get data for %s: %s", ip, e, exc_info=True)
|
|
|
|
return HTTPException(500, "Failed to get upstream data.")
|
|
|
|
return data
|
|
|
|
|
2024-04-20 00:55:51 +01:00
|
|
|
|
2024-04-28 01:31:20 +01:00
|
|
|
async def _get_ipinfo_no_token(ip: str):
|
|
|
|
try:
|
|
|
|
async with app.state.session.get("https://ipinfo.io/widget/demo/" + ip) as response:
|
|
|
|
data = (await response.json())["data"]
|
|
|
|
return {
|
|
|
|
"ip": data["ip"],
|
|
|
|
"city": data["city"],
|
|
|
|
"country": data["region"],
|
|
|
|
"countryCode": data["country"],
|
|
|
|
"asn": int(data["asn"]["asn"][2:]),
|
|
|
|
"isp": data["asn"]["name"],
|
|
|
|
"source": "ipinfo",
|
|
|
|
"lat": data["loc"].split(",")[0],
|
|
|
|
"lon": data["loc"].split(",")[1],
|
|
|
|
"hostname": data["hostname"],
|
|
|
|
"timezone": data["timezone"],
|
|
|
|
"subnet": data["abuse"]["network"],
|
|
|
|
"abuse": data["abuse"]["email"]
|
|
|
|
}
|
|
|
|
except json.JSONDecodeError as e:
|
|
|
|
logging.error("Failed to parse data for %s: %s", ip, e, exc_info=True)
|
|
|
|
return HTTPException(500, "Failed to parse upstream response.")
|
|
|
|
except Exception as e:
|
|
|
|
logging.error("Failed to get data for %s: %s", ip, e, exc_info=True)
|
|
|
|
return HTTPException(500, "Failed to get upstream data.")
|
|
|
|
|
|
|
|
|
|
|
|
async def _get_ipinfo_token(ip: str, token: str):
|
|
|
|
try:
|
|
|
|
async with app.state.session.get("https://ipinfo.io/%s?token=%s" % (ip, token)) as response:
|
|
|
|
data = await response.json()
|
|
|
|
return {
|
|
|
|
"ip": data["ip"],
|
|
|
|
"city": data["city"],
|
|
|
|
"country": data["region"],
|
|
|
|
"countryCode": data["country"],
|
|
|
|
"asn": int(data["org"].split()[0][2:]),
|
|
|
|
"isp": data["org"].split(" ", 1)[1],
|
|
|
|
"source": "ipinfo",
|
|
|
|
"lat": data["loc"].split(",")[0],
|
|
|
|
"lon": data["loc"].split(",")[1],
|
|
|
|
"hostname": data["hostname"],
|
|
|
|
"timezone": data["timezone"],
|
|
|
|
}
|
|
|
|
except json.JSONDecodeError as e:
|
|
|
|
logging.error("Failed to parse data for %s: %s", ip, e, exc_info=True)
|
|
|
|
return HTTPException(500, "Failed to parse upstream response.")
|
|
|
|
except Exception as e:
|
|
|
|
logging.error("Failed to get data for %s: %s", ip, e, exc_info=True)
|
|
|
|
return HTTPException(500, "Failed to get upstream data.")
|
|
|
|
|
|
|
|
|
|
|
|
async def get_from_ipinfo(ip: str) -> dict | HTTPException:
|
|
|
|
if ip in app.state.cache:
|
|
|
|
data, timestamp = app.state.cache[ip]
|
|
|
|
if time.time() - timestamp < 3600:
|
|
|
|
return data
|
|
|
|
|
|
|
|
token = os.getenv("IPINFO_TOKEN")
|
|
|
|
if token:
|
|
|
|
return await _get_ipinfo_token(ip, token)
|
|
|
|
return await _get_ipinfo_no_token(ip)
|
|
|
|
|
|
|
|
|
|
|
|
async def get_ip_information(ip: str) -> dict | HTTPException:
|
|
|
|
try:
|
|
|
|
data = await make_request(ip)
|
|
|
|
if isinstance(data, dict):
|
|
|
|
return data
|
|
|
|
except Exception as e:
|
|
|
|
logging.error("Failed to contact shronk net ip: %s", e, exc_info=True)
|
|
|
|
|
|
|
|
try:
|
|
|
|
data = await get_ip_information(ip)
|
|
|
|
except Exception as e:
|
|
|
|
logging.error("Failed to contact ipinfo: %s", e, exc_info=True)
|
|
|
|
else:
|
|
|
|
return data
|
|
|
|
return {"ip": ip}
|
|
|
|
|
|
|
|
|
2024-04-17 16:27:58 +01:00
|
|
|
@app.get("/")
|
2024-04-19 10:59:19 +01:00
|
|
|
async def ip(
|
2024-04-19 10:53:06 +01:00
|
|
|
request: Request,
|
|
|
|
X_Forwarded_For: str = Header(None),
|
|
|
|
lookup: str = Query(None),
|
|
|
|
):
|
|
|
|
if lookup:
|
|
|
|
ip = lookup
|
|
|
|
elif X_Forwarded_For:
|
|
|
|
ip = X_Forwarded_For
|
2024-04-17 16:27:58 +01:00
|
|
|
else:
|
2024-04-19 10:53:06 +01:00
|
|
|
ip = request.client.host
|
2024-04-28 01:04:24 +01:00
|
|
|
|
|
|
|
if ip == "127.0.0.1":
|
|
|
|
ip = ".".join(map(str, (random.randint(0, 255) for _ in range(4))))
|
2024-04-19 10:38:32 +01:00
|
|
|
|
2024-04-18 11:26:09 +01:00
|
|
|
logging.info("looking up IP info for %s", ip)
|
2024-04-28 01:04:24 +01:00
|
|
|
data = await make_request(ip)
|
2024-04-19 11:15:42 +01:00
|
|
|
if isinstance(data, HTTPException):
|
2024-04-19 11:23:53 +01:00
|
|
|
raise data
|
2024-04-19 11:13:35 +01:00
|
|
|
data["ip"] = ip
|
|
|
|
data.pop("legalese", None)
|
|
|
|
data.pop("source", None)
|
|
|
|
data.pop("brexitRequired", None)
|
2024-04-19 11:14:59 +01:00
|
|
|
logging.info("%s -> %r", ip, data)
|
2024-04-19 11:13:35 +01:00
|
|
|
app.state.cache[ip] = [data, time.time()]
|
|
|
|
return JSONResponse(data)
|
|
|
|
|
|
|
|
|
|
|
|
@app.get("/lookup")
|
|
|
|
async def lookup(ip: str = Query(...), User_Agent: str = Header("Mozilla/5.0")):
|
|
|
|
if ip in app.state.cache:
|
|
|
|
data, timestamp = app.state.cache[ip]
|
|
|
|
if time.time() - timestamp < 3600:
|
|
|
|
logging.info("cache hit for %s", ip)
|
|
|
|
return JSONResponse(data)
|
|
|
|
logging.info("cache expired for %s", ip)
|
2024-04-28 01:04:24 +01:00
|
|
|
|
|
|
|
if ip == "127.0.0.1":
|
|
|
|
ip = ".".join(map(str, (random.randint(0, 255) for _ in range(4))))
|
2024-04-19 11:13:35 +01:00
|
|
|
|
|
|
|
logging.info("looking up IP info for %s", ip)
|
2024-04-19 11:15:42 +01:00
|
|
|
data = await make_request(
|
2024-04-19 11:13:35 +01:00
|
|
|
ip,
|
|
|
|
{"User-Agent": User_Agent}
|
|
|
|
)
|
2024-04-19 11:15:42 +01:00
|
|
|
if isinstance(data, HTTPException):
|
2024-04-19 11:23:53 +01:00
|
|
|
raise data
|
2024-04-18 11:26:09 +01:00
|
|
|
data["ip"] = ip
|
2024-04-18 10:18:38 +01:00
|
|
|
data.pop("legalese", None)
|
|
|
|
data.pop("source", None)
|
|
|
|
data.pop("brexitRequired", None)
|
2024-04-19 11:14:59 +01:00
|
|
|
logging.info("%s -> %r", ip, data)
|
2024-04-19 11:13:35 +01:00
|
|
|
app.state.cache[ip] = [data, time.time()]
|
|
|
|
return JSONResponse(data)
|
2024-04-19 11:23:53 +01:00
|
|
|
|
|
|
|
|
|
|
|
@app.get("/imfeelinglucky")
|
|
|
|
async def im_feeling_lucky(req: Request):
|
2024-04-28 01:04:24 +01:00
|
|
|
ip = req.client.host
|
|
|
|
if ip == "127.0.0.1":
|
|
|
|
ip = ".".join(map(str, (random.randint(0, 255) for _ in range(4))))
|
|
|
|
data = await make_request(ip)
|
2024-04-19 11:23:53 +01:00
|
|
|
if not isinstance(data, dict):
|
|
|
|
raise data
|
|
|
|
data = data.copy()
|
|
|
|
|
2024-04-28 01:04:24 +01:00
|
|
|
parts = list(map(int, ip.split(".")))
|
2024-04-19 11:23:53 +01:00
|
|
|
n = random.randint(1, 1000)
|
|
|
|
if n in range(50, 54):
|
|
|
|
if n > 400:
|
|
|
|
parts[n % 4] += 1
|
|
|
|
else:
|
|
|
|
parts[n % 4] -= 1
|
|
|
|
data["ip"] = ".".join(map(str, parts))
|
|
|
|
return JSONResponse(data)
|
|
|
|
|
|
|
|
|
|
|
|
@app.get("/raw")
|
|
|
|
def get_raw(req: Request):
|
|
|
|
return PlainTextResponse(req.client.host)
|
2024-04-20 00:55:51 +01:00
|
|
|
|
|
|
|
|
2024-04-28 01:04:24 +01:00
|
|
|
@app.get("/random")
|
|
|
|
def get_random():
|
|
|
|
ip = ".".join(map(str, (random.randint(0, 255) for _ in range(4))))
|
|
|
|
return RedirectResponse(f"/lookup?ip={ip}")
|
|
|
|
|
|
|
|
|
2024-04-20 00:55:51 +01:00
|
|
|
@app.get("/health")
|
2024-04-20 01:26:51 +01:00
|
|
|
async def get_health():
|
2024-04-20 00:55:51 +01:00
|
|
|
detail = {"issues": []}
|
|
|
|
if app.state.meta.waiting >= 2048:
|
|
|
|
detail["status"] = "critical"
|
|
|
|
detail["issues"].append("(C) Connection pool full.")
|
|
|
|
elif app.state.meta.waiting >= 1024:
|
|
|
|
detail["status"] = "warning"
|
|
|
|
detail["issues"].append("(W) TCP pool half full.")
|
|
|
|
else:
|
|
|
|
detail["status"] = "ok"
|
|
|
|
|
|
|
|
try:
|
|
|
|
t = time.perf_counter()
|
|
|
|
async with app.state.session.get("/") as response:
|
|
|
|
await response.text()
|
|
|
|
detail["latency"] = time.perf_counter() - t
|
|
|
|
except Exception as e:
|
|
|
|
detail["issues"].append(f"(E) Failed to check upstream: {e}")
|
|
|
|
detail["status"] = "critical"
|
2024-04-20 01:26:51 +01:00
|
|
|
|
2024-04-20 00:55:51 +01:00
|
|
|
return JSONResponse(detail)
|
2024-04-22 01:54:12 +01:00
|
|
|
|
|
|
|
|
|
|
|
app.mount("/check", StaticFiles(directory="./cert", html=True), name="shronk-cert")
|
2024-04-28 01:04:24 +01:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
import uvicorn
|
|
|
|
logging.critical("Running in development mode! Even then, you should use the docker container!")
|
|
|
|
app.debug = True
|
|
|
|
uvicorn.run("ipserv:app", port=0, reload=True, log_level="info")
|