2022-12-01 13:34:26 +00:00
|
|
|
import asyncio
|
2023-11-13 19:29:44 +00:00
|
|
|
import base64
|
2023-08-16 01:25:59 +01:00
|
|
|
import functools
|
2023-06-03 14:43:31 +01:00
|
|
|
import glob
|
2022-12-01 12:22:11 +00:00
|
|
|
import io
|
2023-04-27 11:13:25 +01:00
|
|
|
import json
|
2023-11-10 22:38:47 +00:00
|
|
|
import typing
|
2023-11-13 19:29:44 +00:00
|
|
|
import zlib
|
2023-11-10 22:38:47 +00:00
|
|
|
|
2023-08-16 01:25:59 +01:00
|
|
|
import math
|
2022-12-01 12:22:11 +00:00
|
|
|
import os
|
2023-01-03 13:56:23 +00:00
|
|
|
import random
|
|
|
|
import re
|
2023-11-04 17:18:39 +00:00
|
|
|
import shutil
|
2023-09-09 21:50:36 +01:00
|
|
|
import subprocess
|
2023-08-16 01:25:59 +01:00
|
|
|
import sys
|
2023-03-14 12:39:57 +00:00
|
|
|
import tempfile
|
2023-01-03 15:17:09 +00:00
|
|
|
import textwrap
|
2023-09-09 21:55:18 +01:00
|
|
|
import traceback
|
2023-04-29 02:25:19 +01:00
|
|
|
from functools import partial
|
2023-03-16 21:45:01 +00:00
|
|
|
from io import BytesIO
|
2023-01-03 13:56:23 +00:00
|
|
|
from pathlib import Path
|
2023-11-04 17:18:39 +00:00
|
|
|
from time import sleep, time, time_ns
|
|
|
|
from typing import Dict, Literal, Optional, Tuple
|
2023-01-03 14:43:49 +00:00
|
|
|
from urllib.parse import urlparse
|
2022-11-18 14:11:53 +00:00
|
|
|
|
2023-11-04 17:18:39 +00:00
|
|
|
import aiofiles
|
2022-11-13 23:16:47 +00:00
|
|
|
import aiohttp
|
2023-01-03 13:56:23 +00:00
|
|
|
import discord
|
2023-11-04 17:18:39 +00:00
|
|
|
import dns.resolver
|
|
|
|
import httpx
|
2023-01-03 13:56:23 +00:00
|
|
|
import psutil
|
2023-11-04 17:18:39 +00:00
|
|
|
import pytesseract
|
|
|
|
import pyttsx3
|
2023-11-10 23:15:06 +00:00
|
|
|
from discord.ext import commands
|
2023-11-04 17:18:39 +00:00
|
|
|
from dns import asyncresolver
|
|
|
|
from PIL import Image
|
2022-12-29 17:41:41 +00:00
|
|
|
from rich.tree import Tree
|
2023-11-10 23:15:06 +00:00
|
|
|
from rich import print
|
2023-01-03 13:56:23 +00:00
|
|
|
from selenium import webdriver
|
2023-01-16 15:51:03 +00:00
|
|
|
from selenium.common.exceptions import WebDriverException
|
2023-01-03 13:56:23 +00:00
|
|
|
from selenium.webdriver.chrome.options import Options as ChromeOptions
|
|
|
|
from selenium.webdriver.chrome.service import Service as ChromeService
|
|
|
|
from selenium.webdriver.firefox.options import Options as FirefoxOptions
|
|
|
|
from selenium.webdriver.firefox.service import Service as FirefoxService
|
2023-11-04 17:18:39 +00:00
|
|
|
|
|
|
|
from utils import Timer, console
|
2022-11-13 23:16:47 +00:00
|
|
|
|
2023-04-28 21:31:00 +01:00
|
|
|
try:
|
|
|
|
from config import proxy
|
|
|
|
except ImportError:
|
|
|
|
proxy = None
|
|
|
|
try:
|
|
|
|
from config import proxies
|
|
|
|
except ImportError:
|
|
|
|
if proxy:
|
|
|
|
proxies = [proxy] * 2
|
|
|
|
else:
|
|
|
|
proxies = []
|
|
|
|
|
2023-08-16 01:25:59 +01:00
|
|
|
try:
|
|
|
|
_engine = pyttsx3.init()
|
|
|
|
# noinspection PyTypeChecker
|
|
|
|
VOICES = [x.id for x in _engine.getProperty("voices")]
|
|
|
|
del _engine
|
|
|
|
except Exception as _pyttsx3_err:
|
2023-11-10 23:15:06 +00:00
|
|
|
print("Failed to load pyttsx3: %s" % _pyttsx3_err, file=sys.stderr)
|
2023-08-16 01:25:59 +01:00
|
|
|
pyttsx3 = None
|
|
|
|
VOICES = []
|
2023-03-20 14:48:23 +00:00
|
|
|
|
2022-11-13 23:16:47 +00:00
|
|
|
|
2023-11-10 22:38:47 +00:00
|
|
|
# class OllamaStreamReader:
|
|
|
|
# def __init__(self, response: httpx.Response):
|
|
|
|
# self.response = response
|
|
|
|
# self.stream = response.aiter_bytes(1)
|
|
|
|
# self._buffer = b""
|
|
|
|
#
|
|
|
|
# async def __aiter__(self):
|
|
|
|
# return self
|
|
|
|
#
|
|
|
|
# async def __anext__(self) -> dict[str, str | int | bool]:
|
|
|
|
# if self.response.is_stream_consumed:
|
|
|
|
# raise StopAsyncIteration
|
|
|
|
# self._buffer = b""
|
|
|
|
# while not self._buffer.endswith(b"}\n"):
|
|
|
|
# async for char in self.stream:
|
|
|
|
# self._buffer += char
|
|
|
|
#
|
|
|
|
# return json.loads(self._buffer.decode("utf-8", "replace"))
|
|
|
|
|
|
|
|
|
|
|
|
async def ollama_stream_reader(response: httpx.Response) -> typing.AsyncGenerator[
|
|
|
|
dict[str, str | int | bool], None
|
|
|
|
]:
|
2023-11-10 23:45:52 +00:00
|
|
|
print("Starting to iterate over ollama response %r..." % response, file=sys.stderr)
|
|
|
|
async for chunk in response.aiter_bytes():
|
2023-11-10 23:36:18 +00:00
|
|
|
# Each chunk is a JSON string
|
|
|
|
try:
|
2023-11-10 23:43:32 +00:00
|
|
|
loaded = json.loads(chunk.strip().decode("utf-8", "replace"))
|
2023-11-10 23:45:52 +00:00
|
|
|
print("Loaded chunk: %r" % loaded)
|
2023-11-10 23:43:32 +00:00
|
|
|
yield loaded
|
2023-11-10 23:40:27 +00:00
|
|
|
except json.JSONDecodeError as e:
|
|
|
|
print("Failed to decode chunk %r: %r" % (chunk, e), file=sys.stderr)
|
2023-11-10 23:36:18 +00:00
|
|
|
pass
|
2023-11-10 22:30:15 +00:00
|
|
|
|
|
|
|
|
2023-04-28 21:31:00 +01:00
|
|
|
def format_autocomplete(ctx: discord.AutocompleteContext):
|
|
|
|
url = ctx.options.get("url", os.urandom(6).hex())
|
|
|
|
self: "OtherCog" = ctx.bot.cogs["OtherCog"] # type: ignore
|
|
|
|
if url in self._fmt_cache:
|
2023-04-29 02:59:17 +01:00
|
|
|
suitable = []
|
|
|
|
for _format_key in self._fmt_cache[url]:
|
|
|
|
_format = self._fmt_cache[url][_format_key]
|
|
|
|
_format_nice = _format["format"]
|
|
|
|
if ctx.value.lower() in _format_nice.lower():
|
|
|
|
suitable.append(_format_nice)
|
|
|
|
return suitable
|
2023-04-28 21:31:00 +01:00
|
|
|
|
|
|
|
try:
|
|
|
|
parsed = urlparse(url, allow_fragments=True)
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
if parsed.scheme in ("http", "https") and parsed.netloc:
|
|
|
|
self._fmt_queue.put_nowait(url)
|
2023-04-29 02:27:18 +01:00
|
|
|
return []
|
2023-04-28 21:31:00 +01:00
|
|
|
|
|
|
|
|
2022-11-18 14:11:53 +00:00
|
|
|
# noinspection DuplicatedCode
|
2022-11-13 23:16:47 +00:00
|
|
|
class OtherCog(commands.Cog):
|
|
|
|
def __init__(self, bot):
|
|
|
|
self.bot = bot
|
2023-01-18 15:33:51 +00:00
|
|
|
self.lock = asyncio.Lock()
|
2023-03-20 14:39:22 +00:00
|
|
|
self.http = httpx.AsyncClient()
|
2023-04-28 21:31:00 +01:00
|
|
|
self._fmt_cache = {}
|
|
|
|
self._fmt_queue = asyncio.Queue()
|
|
|
|
self._worker_task = self.bot.loop.create_task(self.cache_population_job())
|
|
|
|
|
2023-11-11 18:31:48 +00:00
|
|
|
self.ollama_locks: dict[discord.Message, asyncio.Event] = {}
|
|
|
|
|
2023-04-28 21:31:00 +01:00
|
|
|
def cog_unload(self):
|
|
|
|
self._worker_task.cancel()
|
|
|
|
|
|
|
|
async def cache_population_job(self):
|
|
|
|
while True:
|
|
|
|
url = await self._fmt_queue.get()
|
|
|
|
if url not in self._fmt_cache:
|
|
|
|
await self.list_formats(url, use_proxy=1)
|
|
|
|
self._fmt_queue.task_done()
|
|
|
|
|
|
|
|
async def list_formats(self, url: str, *, use_proxy: int = 0) -> dict:
|
|
|
|
if url in self._fmt_cache:
|
|
|
|
return self._fmt_cache[url]
|
|
|
|
|
2023-04-29 02:59:17 +01:00
|
|
|
import yt_dlp
|
|
|
|
|
|
|
|
class NullLogger:
|
|
|
|
def debug(self, *args, **kwargs):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def info(self, *args, **kwargs):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def warning(self, *args, **kwargs):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def error(self, *args, **kwargs):
|
|
|
|
pass
|
|
|
|
|
|
|
|
with tempfile.TemporaryDirectory(prefix="jimmy-ytdl", suffix="-info") as tempdir:
|
|
|
|
with yt_dlp.YoutubeDL(
|
2023-11-04 17:18:39 +00:00
|
|
|
{
|
|
|
|
"windowsfilenames": True,
|
|
|
|
"restrictfilenames": True,
|
|
|
|
"noplaylist": True,
|
|
|
|
"nocheckcertificate": True,
|
|
|
|
"no_color": True,
|
|
|
|
"noprogress": True,
|
|
|
|
"logger": NullLogger(),
|
|
|
|
"paths": {"home": tempdir, "temp": tempdir},
|
|
|
|
"cookiefile": Path(__file__).parent.parent / "jimmy-cookies.txt",
|
|
|
|
}
|
2023-04-29 02:59:17 +01:00
|
|
|
) as downloader:
|
|
|
|
try:
|
|
|
|
info = await self.bot.loop.run_in_executor(
|
2023-11-04 17:18:39 +00:00
|
|
|
None, partial(downloader.extract_info, url, download=False)
|
2023-04-29 02:59:17 +01:00
|
|
|
)
|
|
|
|
except yt_dlp.utils.DownloadError:
|
|
|
|
return {}
|
|
|
|
info = downloader.sanitize_info(info)
|
|
|
|
new = {
|
|
|
|
fmt["format_id"]: {
|
|
|
|
"id": fmt["format_id"],
|
|
|
|
"ext": fmt["ext"],
|
|
|
|
"protocol": fmt["protocol"],
|
2023-06-01 01:09:05 +01:00
|
|
|
"acodec": fmt.get("acodec", "?"),
|
|
|
|
"vcodec": fmt.get("vcodec", "?"),
|
|
|
|
"resolution": fmt.get("resolution", "?x?"),
|
2023-11-04 17:18:39 +00:00
|
|
|
"filesize": fmt.get("filesize", float("inf")),
|
|
|
|
"format": fmt.get("format", "?"),
|
2023-04-29 02:59:17 +01:00
|
|
|
}
|
|
|
|
for fmt in info["formats"]
|
|
|
|
}
|
2023-04-28 21:31:00 +01:00
|
|
|
self._fmt_cache[url] = new
|
|
|
|
return new
|
2022-11-14 17:20:31 +00:00
|
|
|
|
2023-01-23 15:22:38 +00:00
|
|
|
class AbortScreenshotTask(discord.ui.View):
|
|
|
|
def __init__(self, task: asyncio.Task):
|
|
|
|
super().__init__()
|
|
|
|
self.task = task
|
|
|
|
|
|
|
|
@discord.ui.button(label="Abort", style=discord.ButtonStyle.red)
|
|
|
|
async def abort(self, button: discord.ui.Button, interaction: discord.Interaction):
|
|
|
|
new: discord.Interaction = await interaction.response.send_message("Aborting...", ephemeral=True)
|
|
|
|
self.task.cancel()
|
|
|
|
try:
|
|
|
|
await self.task
|
|
|
|
except asyncio.CancelledError:
|
|
|
|
pass
|
|
|
|
self.disable_all_items()
|
|
|
|
button.label = "[ aborted ]"
|
|
|
|
await new.edit_original_response(content="Aborted screenshot task.", view=self)
|
|
|
|
self.stop()
|
|
|
|
|
2023-01-03 15:20:50 +00:00
|
|
|
async def screenshot_website(
|
2023-01-15 19:39:07 +00:00
|
|
|
self,
|
|
|
|
ctx: discord.ApplicationContext,
|
|
|
|
website: str,
|
|
|
|
driver: Literal["chrome", "firefox"],
|
|
|
|
render_time: int = 10,
|
2023-02-02 12:16:02 +00:00
|
|
|
load_timeout: int = 30,
|
2023-01-15 19:39:07 +00:00
|
|
|
window_height: int = 1920,
|
|
|
|
window_width: int = 1080,
|
|
|
|
full_screenshot: bool = False,
|
2023-01-16 15:51:03 +00:00
|
|
|
) -> Tuple[discord.File, str, int, int]:
|
2023-01-15 19:39:07 +00:00
|
|
|
async def _blocking(*args):
|
|
|
|
return await self.bot.loop.run_in_executor(None, *args)
|
|
|
|
|
|
|
|
def find_driver():
|
2023-01-16 09:55:45 +00:00
|
|
|
nonlocal driver, driver_path
|
2023-01-15 19:39:07 +00:00
|
|
|
drivers = {
|
|
|
|
"firefox": [
|
|
|
|
"/usr/bin/firefox-esr",
|
|
|
|
"/usr/bin/firefox",
|
|
|
|
],
|
2023-01-18 20:54:48 +00:00
|
|
|
"chrome": ["/usr/bin/chromium", "/usr/bin/chrome", "/usr/bin/chrome-browser", "/usr/bin/google-chrome"],
|
2023-01-15 19:39:07 +00:00
|
|
|
}
|
|
|
|
selected_driver = driver
|
|
|
|
arr = drivers.pop(selected_driver)
|
|
|
|
for binary in arr:
|
|
|
|
b = Path(binary).resolve()
|
|
|
|
if not b.exists():
|
|
|
|
continue
|
|
|
|
driver = selected_driver
|
|
|
|
driver_path = b
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
for key, value in drivers.items():
|
|
|
|
for binary in value:
|
|
|
|
b = Path(binary).resolve()
|
|
|
|
if not b.exists():
|
|
|
|
continue
|
|
|
|
driver = key
|
|
|
|
driver_path = b
|
|
|
|
break
|
|
|
|
else:
|
2023-01-13 22:42:03 +00:00
|
|
|
continue
|
|
|
|
break
|
|
|
|
else:
|
2023-01-15 19:39:07 +00:00
|
|
|
raise RuntimeError("No browser binary.")
|
|
|
|
return driver, driver_path
|
|
|
|
|
|
|
|
driver, driver_path = find_driver()
|
|
|
|
console.log(
|
|
|
|
"Using driver '{}' with binary '{}' to screenshot '{}', as requested by {}.".format(
|
|
|
|
driver, driver_path, website, ctx.user
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
def _setup():
|
|
|
|
nonlocal driver
|
|
|
|
if driver == "chrome":
|
|
|
|
options = ChromeOptions()
|
|
|
|
options.add_argument("--headless")
|
|
|
|
options.add_argument("--disable-dev-shm-usage")
|
|
|
|
options.add_argument("--disable-gpu")
|
|
|
|
options.add_argument("--disable-extensions")
|
|
|
|
options.add_argument("--incognito")
|
|
|
|
options.binary_location = str(driver_path)
|
|
|
|
service = ChromeService("/usr/bin/chromedriver")
|
|
|
|
driver = webdriver.Chrome(service=service, options=options)
|
|
|
|
driver.set_window_size(window_height, window_width)
|
2023-01-13 22:42:03 +00:00
|
|
|
else:
|
2023-01-15 19:39:07 +00:00
|
|
|
options = FirefoxOptions()
|
|
|
|
options.add_argument("--headless")
|
|
|
|
options.add_argument("--private-window")
|
|
|
|
options.add_argument("--safe-mode")
|
|
|
|
options.add_argument("--new-instance")
|
|
|
|
options.binary_location = str(driver_path)
|
|
|
|
service = FirefoxService("/usr/bin/geckodriver")
|
|
|
|
driver = webdriver.Firefox(service=service, options=options)
|
|
|
|
driver.set_window_size(window_height, window_width)
|
|
|
|
return driver, textwrap.shorten(website, 100)
|
|
|
|
|
|
|
|
# Is it overkill to cast this to a thread? yes
|
|
|
|
# Do I give a flying fuck? kinda
|
|
|
|
# Why am I doing this? I suspect setup is causing a ~10-second block of the event loop
|
2023-01-16 15:51:03 +00:00
|
|
|
driver_name = driver
|
|
|
|
start_init = time()
|
2023-01-15 19:39:07 +00:00
|
|
|
driver, friendly_url = await asyncio.to_thread(_setup)
|
2023-01-16 15:51:03 +00:00
|
|
|
end_init = time()
|
2023-01-18 20:54:48 +00:00
|
|
|
console.log("Driver '{}' initialised in {} seconds.".format(driver_name, round(end_init - start_init, 2)))
|
2023-01-03 13:56:23 +00:00
|
|
|
|
2023-01-23 15:22:38 +00:00
|
|
|
def _edit(content: str):
|
2023-01-09 14:36:32 +00:00
|
|
|
self.bot.loop.create_task(ctx.interaction.edit_original_response(content=content))
|
|
|
|
|
2023-02-02 12:16:02 +00:00
|
|
|
expires = round(time() + load_timeout)
|
|
|
|
_edit(content=f"Screenshotting <{friendly_url}>... (49%, loading webpage, aborts <t:{expires}:R>)")
|
|
|
|
await _blocking(driver.set_page_load_timeout, load_timeout)
|
2023-01-16 15:51:03 +00:00
|
|
|
start = time()
|
2023-01-15 19:39:07 +00:00
|
|
|
await _blocking(driver.get, website)
|
2023-01-16 15:51:03 +00:00
|
|
|
end = time()
|
|
|
|
get_time = round((end - start) * 1000)
|
2023-01-23 15:10:21 +00:00
|
|
|
render_time_expires = round(time() + render_time)
|
2023-01-23 15:22:38 +00:00
|
|
|
_edit(content=f"Screenshotting <{friendly_url}>... (66%, stopping render <t:{render_time_expires}:R>)")
|
2023-01-03 14:29:33 +00:00
|
|
|
await asyncio.sleep(render_time)
|
2023-01-23 15:22:38 +00:00
|
|
|
_edit(content=f"Screenshotting <{friendly_url}>... (83%, saving screenshot)")
|
2023-01-03 13:56:23 +00:00
|
|
|
domain = re.sub(r"https?://", "", website)
|
2023-01-15 19:39:07 +00:00
|
|
|
|
|
|
|
screenshot_method = driver.get_screenshot_as_png
|
2023-01-16 15:51:03 +00:00
|
|
|
if full_screenshot and driver_name == "firefox":
|
2023-01-15 19:39:07 +00:00
|
|
|
screenshot_method = driver.get_full_page_screenshot_as_png
|
|
|
|
|
2023-01-16 15:51:03 +00:00
|
|
|
start = time()
|
2023-01-15 19:39:07 +00:00
|
|
|
data = await _blocking(screenshot_method)
|
2023-01-03 13:56:23 +00:00
|
|
|
_io = io.BytesIO()
|
2023-01-15 19:39:07 +00:00
|
|
|
# Write the data async because HAHAHAHAHAHAHA
|
|
|
|
# We'll do it in the existing event loop though because less overhead
|
|
|
|
await _blocking(_io.write, data)
|
2023-01-03 13:56:23 +00:00
|
|
|
_io.seek(0)
|
2023-01-16 15:51:03 +00:00
|
|
|
end = time()
|
|
|
|
screenshot_time = round((end - start) * 1000)
|
2023-01-03 13:56:23 +00:00
|
|
|
driver.quit()
|
2023-01-16 15:51:03 +00:00
|
|
|
return discord.File(_io, f"{domain}.png"), driver_name, get_time, screenshot_time
|
2023-01-03 13:56:23 +00:00
|
|
|
|
2022-12-29 17:41:41 +00:00
|
|
|
@staticmethod
|
|
|
|
async def get_interface_ip_addresses() -> Dict[str, list[Dict[str, str | bool | int]]]:
|
|
|
|
addresses = await asyncio.to_thread(psutil.net_if_addrs)
|
|
|
|
stats = await asyncio.to_thread(psutil.net_if_stats)
|
|
|
|
result = {}
|
|
|
|
for key in addresses.keys():
|
|
|
|
result[key] = []
|
|
|
|
for ip_addr in addresses[key]:
|
|
|
|
if ip_addr.broadcast is None:
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
result[key].append(
|
|
|
|
{
|
|
|
|
"ip": ip_addr.address,
|
|
|
|
"netmask": ip_addr.netmask,
|
|
|
|
"broadcast": ip_addr.broadcast,
|
|
|
|
"up": stats[key].isup,
|
2023-01-03 15:20:50 +00:00
|
|
|
"speed": stats[key].speed,
|
2022-12-29 17:41:41 +00:00
|
|
|
}
|
|
|
|
)
|
|
|
|
return result
|
|
|
|
|
2022-11-18 14:11:53 +00:00
|
|
|
async def analyse_text(self, text: str) -> Optional[Tuple[float, float, float, float]]:
|
|
|
|
"""Analyse text for positivity, negativity and neutrality."""
|
|
|
|
|
|
|
|
def inner():
|
|
|
|
try:
|
|
|
|
from utils.sentiment_analysis import intensity_analyser
|
|
|
|
except ImportError:
|
|
|
|
return None
|
|
|
|
scores = intensity_analyser.polarity_scores(text)
|
|
|
|
return scores["pos"], scores["neu"], scores["neg"], scores["compound"]
|
|
|
|
|
|
|
|
async with self.bot.training_lock:
|
|
|
|
return await self.bot.loop.run_in_executor(None, inner)
|
|
|
|
|
2022-11-14 17:20:31 +00:00
|
|
|
@staticmethod
|
|
|
|
async def get_xkcd(session: aiohttp.ClientSession, n: int) -> dict | None:
|
|
|
|
async with session.get("https://xkcd.com/{!s}/info.0.json".format(n)) as response:
|
|
|
|
if response.status == 200:
|
2022-11-13 23:16:47 +00:00
|
|
|
data = await response.json()
|
2022-11-14 17:20:31 +00:00
|
|
|
return data
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
async def random_xkcd_number(session: aiohttp.ClientSession) -> int:
|
|
|
|
async with session.get("https://c.xkcd.com/random/comic") as response:
|
|
|
|
if response.status != 302:
|
|
|
|
number = random.randint(100, 999)
|
|
|
|
else:
|
2022-11-18 14:11:53 +00:00
|
|
|
number = int(response.headers["location"].split("/")[-2])
|
2022-11-14 17:20:31 +00:00
|
|
|
return number
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
async def random_xkcd(session: aiohttp.ClientSession) -> dict | None:
|
|
|
|
"""Fetches a random XKCD.
|
|
|
|
|
|
|
|
Basically a shorthand for random_xkcd_number and get_xkcd.
|
|
|
|
"""
|
|
|
|
number = await OtherCog.random_xkcd_number(session)
|
|
|
|
return await OtherCog.get_xkcd(session, number)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_xkcd_embed(data: dict) -> discord.Embed:
|
2022-11-13 23:16:47 +00:00
|
|
|
embed = discord.Embed(
|
2022-11-18 14:11:53 +00:00
|
|
|
title=data["safe_title"], description=data["alt"], color=discord.Colour.embed_background()
|
2022-11-13 23:16:47 +00:00
|
|
|
)
|
2022-11-18 14:11:53 +00:00
|
|
|
embed.set_footer(text="XKCD #{!s}".format(data["num"]))
|
|
|
|
embed.set_image(url=data["img"])
|
2022-11-14 17:20:31 +00:00
|
|
|
return embed
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
async def generate_xkcd(n: int = None) -> discord.Embed:
|
|
|
|
async with aiohttp.ClientSession() as session:
|
|
|
|
if n is None:
|
|
|
|
data = await OtherCog.random_xkcd(session)
|
2022-11-18 14:11:53 +00:00
|
|
|
n = data["num"]
|
2022-11-14 17:20:31 +00:00
|
|
|
else:
|
|
|
|
data = await OtherCog.get_xkcd(session, n)
|
|
|
|
if data is None:
|
|
|
|
return discord.Embed(
|
2022-11-18 14:11:53 +00:00
|
|
|
title="Failed to load XKCD :(", description="Try again later.", color=discord.Colour.red()
|
2022-11-14 17:20:31 +00:00
|
|
|
).set_footer(text="Attempted to retrieve XKCD #{!s}".format(n))
|
|
|
|
return OtherCog.get_xkcd_embed(data)
|
|
|
|
|
|
|
|
class XKCDGalleryView(discord.ui.View):
|
|
|
|
def __init__(self, n: int):
|
|
|
|
super().__init__(timeout=300, disable_on_timeout=True)
|
|
|
|
self.n = n
|
|
|
|
|
2022-11-16 17:28:47 +00:00
|
|
|
def __rich_repr__(self):
|
|
|
|
yield "n", self.n
|
|
|
|
yield "message", self.message
|
|
|
|
|
2022-11-18 14:11:53 +00:00
|
|
|
@discord.ui.button(label="Previous", style=discord.ButtonStyle.blurple)
|
2022-11-14 17:20:31 +00:00
|
|
|
async def previous_comic(self, _, interaction: discord.Interaction):
|
|
|
|
self.n -= 1
|
|
|
|
await interaction.response.defer()
|
|
|
|
await interaction.edit_original_response(embed=await OtherCog.generate_xkcd(self.n))
|
|
|
|
|
2022-11-18 14:11:53 +00:00
|
|
|
@discord.ui.button(label="Random", style=discord.ButtonStyle.blurple)
|
2022-11-14 17:20:31 +00:00
|
|
|
async def random_comic(self, _, interaction: discord.Interaction):
|
|
|
|
await interaction.response.defer()
|
|
|
|
await interaction.edit_original_response(embed=await OtherCog.generate_xkcd())
|
|
|
|
self.n = random.randint(1, 999)
|
|
|
|
|
2022-11-18 14:11:53 +00:00
|
|
|
@discord.ui.button(label="Next", style=discord.ButtonStyle.blurple)
|
2022-11-14 17:20:31 +00:00
|
|
|
async def next_comic(self, _, interaction: discord.Interaction):
|
|
|
|
self.n += 1
|
|
|
|
await interaction.response.defer()
|
|
|
|
await interaction.edit_original_response(embed=await OtherCog.generate_xkcd(self.n))
|
|
|
|
|
|
|
|
@commands.slash_command()
|
|
|
|
async def xkcd(self, ctx: discord.ApplicationContext, *, number: int = None):
|
|
|
|
"""Shows an XKCD comic"""
|
|
|
|
embed = await self.generate_xkcd(number)
|
|
|
|
view = self.XKCDGalleryView(number)
|
|
|
|
return await ctx.respond(embed=embed, view=view)
|
2022-11-13 23:16:47 +00:00
|
|
|
|
2022-11-18 14:11:53 +00:00
|
|
|
@commands.slash_command()
|
|
|
|
async def sentiment(self, ctx: discord.ApplicationContext, *, text: str):
|
|
|
|
"""Attempts to detect a text's tone"""
|
|
|
|
await ctx.defer()
|
|
|
|
if not text:
|
|
|
|
return await ctx.respond("You need to provide some text to analyse.")
|
|
|
|
result = await self.analyse_text(text)
|
|
|
|
if result is None:
|
|
|
|
return await ctx.edit(content="Failed to load sentiment analysis module.")
|
|
|
|
embed = discord.Embed(title="Sentiment Analysis", color=discord.Colour.embed_background())
|
|
|
|
embed.add_field(name="Positive", value="{:.2%}".format(result[0]))
|
|
|
|
embed.add_field(name="Neutral", value="{:.2%}".format(result[2]))
|
|
|
|
embed.add_field(name="Negative", value="{:.2%}".format(result[1]))
|
|
|
|
embed.add_field(name="Compound", value="{:.2%}".format(result[3]))
|
|
|
|
return await ctx.edit(content=None, embed=embed)
|
|
|
|
|
|
|
|
@commands.message_command(name="Detect Sentiment")
|
|
|
|
async def message_sentiment(self, ctx: discord.ApplicationContext, message: discord.Message):
|
|
|
|
await ctx.defer()
|
|
|
|
text = str(message.clean_content)
|
|
|
|
if not text:
|
|
|
|
return await ctx.respond("You need to provide some text to analyse.")
|
|
|
|
await ctx.respond("Analyzing (this may take some time)...")
|
|
|
|
result = await self.analyse_text(text)
|
|
|
|
if result is None:
|
|
|
|
return await ctx.edit(content="Failed to load sentiment analysis module.")
|
|
|
|
embed = discord.Embed(title="Sentiment Analysis", color=discord.Colour.embed_background())
|
|
|
|
embed.add_field(name="Positive", value="{:.2%}".format(result[0]))
|
|
|
|
embed.add_field(name="Neutral", value="{:.2%}".format(result[2]))
|
|
|
|
embed.add_field(name="Negative", value="{:.2%}".format(result[1]))
|
|
|
|
embed.add_field(name="Compound", value="{:.2%}".format(result[3]))
|
|
|
|
embed.url = message.jump_url
|
|
|
|
return await ctx.edit(content=None, embed=embed)
|
|
|
|
|
2022-12-01 12:22:11 +00:00
|
|
|
corrupt_file = discord.SlashCommandGroup(
|
|
|
|
name="corrupt-file",
|
|
|
|
description="Corrupts files.",
|
|
|
|
)
|
|
|
|
|
|
|
|
@corrupt_file.command(name="generate")
|
|
|
|
async def generate_corrupt_file(self, ctx: discord.ApplicationContext, file_name: str, size_in_megabytes: float):
|
|
|
|
"""Generates a "corrupted" file."""
|
2022-12-08 11:33:58 +00:00
|
|
|
limit_mb = round(ctx.guild.filesize_limit / 1024 / 1024)
|
|
|
|
if size_in_megabytes > limit_mb:
|
2022-12-28 21:14:14 +00:00
|
|
|
return await ctx.respond(
|
|
|
|
f"File size must be less than {limit_mb} MB.\n"
|
|
|
|
"Want to corrupt larger files? see https://github.com/EEKIM10/cli-utils#installing-the-right-way"
|
|
|
|
" (and then run `ruin <file>`)."
|
|
|
|
)
|
2022-12-01 12:22:11 +00:00
|
|
|
await ctx.defer()
|
2022-12-08 11:33:58 +00:00
|
|
|
|
|
|
|
size = max(min(int(size_in_megabytes * 1024 * 1024), ctx.guild.filesize_limit), 1)
|
|
|
|
|
2022-12-01 12:22:11 +00:00
|
|
|
file = io.BytesIO()
|
2022-12-08 11:35:15 +00:00
|
|
|
file.write(os.urandom(size - 1024))
|
2022-12-01 12:22:11 +00:00
|
|
|
file.seek(0)
|
|
|
|
return await ctx.respond(file=discord.File(file, file_name))
|
|
|
|
|
2022-12-01 13:30:04 +00:00
|
|
|
@staticmethod
|
|
|
|
def do_file_corruption(file: io.BytesIO, passes: int, bound_start: int, bound_end: int):
|
|
|
|
for _ in range(passes):
|
|
|
|
file.seek(random.randint(bound_start, bound_end))
|
|
|
|
file.write(os.urandom(random.randint(128, 2048)))
|
|
|
|
file.seek(0)
|
2022-12-01 13:32:57 +00:00
|
|
|
return file
|
2022-12-01 13:30:04 +00:00
|
|
|
|
2022-12-01 12:22:11 +00:00
|
|
|
@corrupt_file.command(name="ruin")
|
2022-12-01 12:42:51 +00:00
|
|
|
async def ruin_corrupt_file(
|
2022-12-28 21:14:14 +00:00
|
|
|
self,
|
|
|
|
ctx: discord.ApplicationContext,
|
|
|
|
file: discord.Attachment,
|
|
|
|
passes: int = 10,
|
|
|
|
metadata_safety_boundary: float = 5,
|
2022-12-01 12:42:51 +00:00
|
|
|
):
|
2022-12-01 12:22:11 +00:00
|
|
|
"""Takes a file and corrupts parts of it"""
|
|
|
|
await ctx.defer()
|
|
|
|
attachment = file
|
2022-12-01 12:30:48 +00:00
|
|
|
if attachment.size > 8388608:
|
2022-12-28 21:14:14 +00:00
|
|
|
return await ctx.respond(
|
|
|
|
"File is too large. Max size 8mb.\n"
|
|
|
|
"Want to corrupt larger files? see https://github.com/EEKIM10/cli-utils#installing-the-right-way"
|
|
|
|
" (and then run `ruin <file>`)."
|
|
|
|
)
|
2022-12-01 12:42:51 +00:00
|
|
|
bound_pct = attachment.size * (0.01 * metadata_safety_boundary)
|
|
|
|
bound_start = round(bound_pct)
|
|
|
|
bound_end = round(attachment.size - bound_pct)
|
2022-12-01 13:30:04 +00:00
|
|
|
await ctx.respond("Downloading file...")
|
2022-12-01 12:22:11 +00:00
|
|
|
file = io.BytesIO(await file.read())
|
|
|
|
file.seek(0)
|
2022-12-01 13:30:04 +00:00
|
|
|
await ctx.edit(content="Corrupting file...")
|
2022-12-28 21:14:14 +00:00
|
|
|
file = await asyncio.to_thread(self.do_file_corruption, file, passes, bound_start, bound_end)
|
2022-12-01 12:26:29 +00:00
|
|
|
file.seek(0)
|
2022-12-01 13:30:04 +00:00
|
|
|
await ctx.edit(content="Uploading file...")
|
|
|
|
await ctx.edit(content="Here's your corrupted file!", file=discord.File(file, attachment.filename))
|
2022-12-01 12:22:11 +00:00
|
|
|
|
2022-12-29 17:41:41 +00:00
|
|
|
@commands.command(name="kys", aliases=["kill"])
|
2022-12-28 21:14:14 +00:00
|
|
|
@commands.is_owner()
|
|
|
|
async def end_your_life(self, ctx: commands.Context):
|
|
|
|
await ctx.send(":( okay")
|
|
|
|
await self.bot.close()
|
|
|
|
|
2022-12-29 17:41:41 +00:00
|
|
|
@commands.slash_command()
|
|
|
|
async def ip(self, ctx: discord.ApplicationContext, detailed: bool = False, secure: bool = True):
|
|
|
|
"""Gets current IP"""
|
|
|
|
if not await self.bot.is_owner(ctx.user):
|
2023-01-09 14:25:44 +00:00
|
|
|
return await ctx.respond("Internal IP: 0.0.0.0\nExternal IP: 0.0.0.0")
|
2022-12-29 17:41:41 +00:00
|
|
|
|
|
|
|
await ctx.defer(ephemeral=secure)
|
|
|
|
ips = await self.get_interface_ip_addresses()
|
|
|
|
root = Tree("IP Addresses")
|
|
|
|
internal = root.add("Internal")
|
|
|
|
external = root.add("External")
|
|
|
|
interfaces = internal.add("Interfaces")
|
|
|
|
for interface, addresses in ips.items():
|
|
|
|
interface_tree = interfaces.add(interface)
|
|
|
|
for address in addresses:
|
|
|
|
colour = "green" if address["up"] else "red"
|
|
|
|
ip_tree = interface_tree.add(
|
|
|
|
f"[{colour}]" + address["ip"] + ((" (up)" if address["up"] else " (down)") if not detailed else "")
|
|
|
|
)
|
|
|
|
if detailed:
|
|
|
|
ip_tree.add(f"IF Up: {'yes' if address['up'] else 'no'}")
|
|
|
|
ip_tree.add(f"Netmask: {address['netmask']}")
|
|
|
|
ip_tree.add(f"Broadcast: {address['broadcast']}")
|
|
|
|
|
|
|
|
async with aiohttp.ClientSession() as session:
|
|
|
|
try:
|
|
|
|
async with session.get("https://api.ipify.org") as resp:
|
|
|
|
external.add(await resp.text())
|
|
|
|
except aiohttp.ClientError as e:
|
|
|
|
external.add(f" [red]Error: {e}")
|
|
|
|
|
|
|
|
with console.capture() as capture:
|
|
|
|
console.print(root)
|
|
|
|
text = capture.get()
|
|
|
|
paginator = commands.Paginator(prefix="```", suffix="```")
|
|
|
|
for line in text.splitlines():
|
|
|
|
paginator.add_line(line)
|
|
|
|
for page in paginator.pages:
|
|
|
|
await ctx.respond(page, ephemeral=secure)
|
2022-11-13 23:16:47 +00:00
|
|
|
|
2023-01-23 16:57:31 +00:00
|
|
|
@commands.slash_command()
|
|
|
|
async def dig(
|
2023-02-09 13:44:49 +00:00
|
|
|
self,
|
|
|
|
ctx: discord.ApplicationContext,
|
|
|
|
domain: str,
|
|
|
|
_type: discord.Option(
|
|
|
|
str,
|
|
|
|
name="type",
|
|
|
|
default="A",
|
|
|
|
choices=[
|
|
|
|
"A",
|
|
|
|
"AAAA",
|
|
|
|
"ANY",
|
|
|
|
"AXFR",
|
|
|
|
"CNAME",
|
|
|
|
"HINFO",
|
|
|
|
"LOC",
|
|
|
|
"MX",
|
|
|
|
"NS",
|
|
|
|
"PTR",
|
|
|
|
"SOA",
|
|
|
|
"SRV",
|
|
|
|
"TXT",
|
|
|
|
],
|
|
|
|
),
|
2023-01-23 16:57:31 +00:00
|
|
|
):
|
|
|
|
"""Looks up a domain name"""
|
|
|
|
await ctx.defer()
|
|
|
|
if re.search(r"\s+", domain):
|
|
|
|
return await ctx.respond("Domain name cannot contain spaces.")
|
|
|
|
try:
|
|
|
|
response = await asyncresolver.resolve(
|
|
|
|
domain,
|
|
|
|
_type.upper(),
|
|
|
|
)
|
|
|
|
except Exception as e:
|
|
|
|
return await ctx.respond(f"Error: {e}")
|
|
|
|
res = response
|
|
|
|
tree = Tree(f"DNS Lookup for {domain}")
|
|
|
|
for record in res:
|
|
|
|
record_tree = tree.add(f"{record.rdtype.name} Record")
|
|
|
|
record_tree.add(f"Name: {res.name}")
|
|
|
|
record_tree.add(f"Value: {record.to_text()}")
|
|
|
|
with console.capture() as capture:
|
|
|
|
console.print(tree)
|
|
|
|
text = capture.get()
|
|
|
|
paginator = commands.Paginator(prefix="```", suffix="```")
|
|
|
|
for line in text.splitlines():
|
|
|
|
paginator.add_line(line)
|
|
|
|
paginator.add_line(empty=True)
|
|
|
|
paginator.add_line(f"Exit code: {0}")
|
|
|
|
paginator.add_line(f"DNS Server used: {res.nameserver}")
|
|
|
|
for page in paginator.pages:
|
|
|
|
await ctx.respond(page)
|
|
|
|
|
2023-01-29 19:17:44 +00:00
|
|
|
@commands.slash_command()
|
|
|
|
async def traceroute(
|
2023-02-09 13:44:49 +00:00
|
|
|
self,
|
|
|
|
ctx: discord.ApplicationContext,
|
|
|
|
url: str,
|
|
|
|
port: discord.Option(int, description="Port to use", default=None),
|
|
|
|
ping_type: discord.Option(
|
|
|
|
str,
|
|
|
|
name="ping-type",
|
|
|
|
description="Type of ping to use. See `traceroute --help`",
|
|
|
|
choices=["icmp", "tcp", "udp", "udplite", "dccp", "default"],
|
|
|
|
default="default",
|
|
|
|
),
|
|
|
|
use_ip_version: discord.Option(
|
|
|
|
str, name="ip-version", description="IP version to use.", choices=["ipv4", "ipv6"], default="ipv4"
|
|
|
|
),
|
|
|
|
max_ttl: discord.Option(int, name="ttl", description="Max number of hops", default=30),
|
2023-01-29 19:17:44 +00:00
|
|
|
):
|
|
|
|
"""Performs a traceroute request."""
|
|
|
|
await ctx.defer()
|
|
|
|
if re.search(r"\s+", url):
|
|
|
|
return await ctx.respond("URL cannot contain spaces.")
|
|
|
|
|
2023-02-09 13:44:49 +00:00
|
|
|
args = ["sudo", "-E", "-n", "traceroute"]
|
2023-01-29 19:17:44 +00:00
|
|
|
flags = {
|
|
|
|
"ping_type": {
|
|
|
|
"icmp": "-I",
|
|
|
|
"tcp": "-T",
|
|
|
|
"udp": "-U",
|
|
|
|
"udplite": "-UL",
|
|
|
|
"dccp": "-D",
|
|
|
|
},
|
2023-02-09 13:44:49 +00:00
|
|
|
"use_ip_version": {"ipv4": "-4", "ipv6": "-6"},
|
2023-01-29 19:17:44 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if ping_type != "default":
|
|
|
|
args.append(flags["ping_type"][ping_type])
|
|
|
|
else:
|
|
|
|
args = args[3:] # removes sudo
|
|
|
|
args.append(flags["use_ip_version"][use_ip_version])
|
|
|
|
args.append("-m")
|
|
|
|
args.append(str(max_ttl))
|
|
|
|
if port is not None:
|
|
|
|
args.append("-p")
|
|
|
|
args.append(str(port))
|
|
|
|
args.append(url)
|
|
|
|
paginator = commands.Paginator()
|
|
|
|
paginator.add_line(f"Running command: {' '.join(args[3 if args[0] == 'sudo' else 0:])}")
|
|
|
|
paginator.add_line(empty=True)
|
|
|
|
try:
|
|
|
|
start = time_ns()
|
|
|
|
process = await asyncio.create_subprocess_exec(
|
|
|
|
args[0],
|
|
|
|
*args[1:],
|
|
|
|
stdout=asyncio.subprocess.PIPE,
|
|
|
|
stderr=asyncio.subprocess.PIPE,
|
|
|
|
)
|
|
|
|
await process.wait()
|
|
|
|
stdout, stderr = await process.communicate()
|
|
|
|
end = time_ns()
|
|
|
|
time_taken_in_ms = (end - start) / 1000000
|
|
|
|
if stdout:
|
|
|
|
for line in stdout.splitlines():
|
|
|
|
paginator.add_line(line.decode())
|
|
|
|
if stderr:
|
|
|
|
for line in stderr.splitlines():
|
|
|
|
paginator.add_line(line.decode())
|
|
|
|
paginator.add_line(empty=True)
|
|
|
|
paginator.add_line(f"Exit code: {process.returncode}")
|
|
|
|
paginator.add_line(f"Time taken: {time_taken_in_ms:,.1f}ms")
|
|
|
|
except Exception as e:
|
|
|
|
paginator.add_line(f"Error: {e}")
|
|
|
|
for page in paginator.pages:
|
|
|
|
await ctx.respond(page)
|
|
|
|
|
2023-01-03 13:56:23 +00:00
|
|
|
@commands.slash_command()
|
2023-01-23 15:22:38 +00:00
|
|
|
@commands.max_concurrency(1, commands.BucketType.user)
|
|
|
|
@commands.cooldown(1, 30, commands.BucketType.user)
|
2023-01-03 13:56:23 +00:00
|
|
|
async def screenshot(
|
2023-01-03 15:20:50 +00:00
|
|
|
self,
|
|
|
|
ctx: discord.ApplicationContext,
|
|
|
|
url: str,
|
2023-02-02 12:16:02 +00:00
|
|
|
browser: discord.Option(str, description="Browser to use", choices=["chrome", "firefox"], default="chrome"),
|
|
|
|
render_timeout: discord.Option(int, name="render-timeout", description="Timeout for rendering", default=3),
|
|
|
|
load_timeout: discord.Option(int, name="load-timeout", description="Timeout for page load", default=60),
|
2023-01-15 19:39:07 +00:00
|
|
|
window_height: discord.Option(
|
|
|
|
int, name="window-height", description="the height of the window in pixels", default=1920
|
|
|
|
),
|
|
|
|
window_width: discord.Option(
|
|
|
|
int, name="window-width", description="the width of the window in pixels", default=1080
|
|
|
|
),
|
|
|
|
capture_whole_page: discord.Option(
|
|
|
|
bool,
|
|
|
|
name="capture-full-page",
|
|
|
|
description="(firefox only) whether to capture the full page or just the viewport.",
|
|
|
|
default=False,
|
2023-01-18 20:54:48 +00:00
|
|
|
),
|
2023-01-03 13:56:23 +00:00
|
|
|
):
|
|
|
|
"""Takes a screenshot of a URL"""
|
2023-03-14 10:44:56 +00:00
|
|
|
if capture_whole_page and browser != "firefox":
|
|
|
|
return await ctx.respond("The capture-full-page option is only available for firefox.")
|
2023-01-16 15:51:03 +00:00
|
|
|
window_width = max(min(1080 * 6, window_width), 1080 // 6)
|
|
|
|
window_height = max(min(1920 * 6, window_height), 1920 // 6)
|
2023-01-03 13:56:23 +00:00
|
|
|
await ctx.defer()
|
2023-05-12 11:05:57 +01:00
|
|
|
# if ctx.user.id == 1019233057519177778 and ctx.me.guild_permissions.moderate_members:
|
|
|
|
# if ctx.user.communication_disabled_until is None:
|
|
|
|
# await ctx.user.timeout_for(timedelta(minutes=2), reason="no")
|
2023-01-03 14:43:49 +00:00
|
|
|
url = urlparse(url)
|
2023-01-16 09:59:37 +00:00
|
|
|
if not url.scheme:
|
2023-01-16 10:57:42 +00:00
|
|
|
if "/" in url.path:
|
|
|
|
hostname, path = url.path.split("/", 1)
|
|
|
|
else:
|
|
|
|
hostname = url.path
|
|
|
|
path = ""
|
2023-01-16 10:59:05 +00:00
|
|
|
url = url._replace(scheme="http", netloc=hostname, path=path)
|
2023-01-16 09:59:37 +00:00
|
|
|
|
2023-01-09 14:25:44 +00:00
|
|
|
friendly_url = textwrap.shorten(url.geturl(), 100)
|
2023-01-03 14:43:49 +00:00
|
|
|
|
2023-01-23 15:10:21 +00:00
|
|
|
await ctx.edit(content=f"Preparing to screenshot <{friendly_url}>... (0%, checking filters)")
|
2023-01-03 14:46:05 +00:00
|
|
|
|
2023-01-09 14:36:32 +00:00
|
|
|
async def blacklist_check() -> bool | str:
|
2023-03-22 14:20:20 +00:00
|
|
|
async with aiofiles.open("./assets/domains.txt") as blacklist:
|
2023-03-07 15:10:56 +00:00
|
|
|
for ln in await blacklist.readlines():
|
2023-01-23 15:10:21 +00:00
|
|
|
if not ln.strip():
|
2023-01-09 14:25:44 +00:00
|
|
|
continue
|
2023-01-23 15:10:21 +00:00
|
|
|
if re.match(ln.strip(), url.netloc):
|
2023-01-09 14:36:32 +00:00
|
|
|
return "Local blacklist"
|
2023-01-09 14:25:44 +00:00
|
|
|
return True
|
|
|
|
|
2023-01-09 14:36:32 +00:00
|
|
|
async def dns_check() -> Optional[bool | str]:
|
2023-01-09 14:25:44 +00:00
|
|
|
try:
|
2023-04-18 16:58:18 +01:00
|
|
|
# noinspection PyTypeChecker
|
2023-01-09 14:25:44 +00:00
|
|
|
for response in await asyncio.to_thread(dns.resolver.resolve, url.hostname, "A"):
|
|
|
|
if response.address == "0.0.0.0":
|
2023-01-09 14:36:32 +00:00
|
|
|
return "DNS blacklist"
|
2023-01-16 10:51:54 +00:00
|
|
|
except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer, dns.resolver.LifetimeTimeout, AttributeError):
|
2023-01-23 15:10:21 +00:00
|
|
|
return "Invalid domain or DNS error"
|
2023-03-07 15:02:34 +00:00
|
|
|
return True
|
2023-01-09 14:25:44 +00:00
|
|
|
|
|
|
|
done, pending = await asyncio.wait(
|
|
|
|
[
|
|
|
|
asyncio.create_task(blacklist_check(), name="local"),
|
|
|
|
asyncio.create_task(dns_check(), name="dns"),
|
|
|
|
],
|
|
|
|
return_when=asyncio.FIRST_COMPLETED,
|
2023-01-03 15:20:50 +00:00
|
|
|
)
|
2023-01-09 14:43:22 +00:00
|
|
|
done_tasks = done
|
2023-01-09 14:44:30 +00:00
|
|
|
try:
|
2023-01-09 14:45:57 +00:00
|
|
|
done = done_tasks.pop()
|
2023-01-09 14:44:30 +00:00
|
|
|
except KeyError:
|
2023-01-15 19:39:07 +00:00
|
|
|
return await ctx.respond("Something went wrong. Try again?\n")
|
2023-01-09 14:36:32 +00:00
|
|
|
result = await done
|
2023-03-07 15:02:34 +00:00
|
|
|
if not result:
|
2023-01-09 14:25:44 +00:00
|
|
|
return await ctx.edit(
|
|
|
|
content="That domain is blacklisted, doesn't exist, or there was no answer from the DNS server."
|
2023-01-15 19:39:07 +00:00
|
|
|
f" ({result!r})"
|
2023-01-09 14:25:44 +00:00
|
|
|
)
|
|
|
|
|
2023-01-23 15:10:21 +00:00
|
|
|
await ctx.edit(content=f"Preparing to screenshot <{friendly_url}>... (16%, checking filters)")
|
2023-01-09 14:45:57 +00:00
|
|
|
okay = await (pending or done_tasks).pop()
|
2023-03-07 15:02:34 +00:00
|
|
|
if not okay:
|
2023-01-09 14:25:44 +00:00
|
|
|
return await ctx.edit(
|
|
|
|
content="That domain is blacklisted, doesn't exist, or there was no answer from the DNS server."
|
2023-03-14 10:44:16 +00:00
|
|
|
f" ({okay!r})"
|
2023-01-09 14:25:44 +00:00
|
|
|
)
|
|
|
|
|
2023-01-23 15:10:21 +00:00
|
|
|
await ctx.edit(content=f"Screenshotting {textwrap.shorten(url.geturl(), 100)}... (33%, initializing browser)")
|
2023-01-03 13:56:23 +00:00
|
|
|
try:
|
2023-01-18 15:33:51 +00:00
|
|
|
async with self.lock:
|
|
|
|
screenshot, driver, fetch_time, screenshot_time = await self.screenshot_website(
|
2023-02-09 13:44:49 +00:00
|
|
|
ctx,
|
|
|
|
url.geturl(),
|
|
|
|
browser,
|
|
|
|
render_timeout,
|
|
|
|
load_timeout,
|
|
|
|
window_height,
|
|
|
|
window_width,
|
|
|
|
capture_whole_page,
|
2023-01-18 15:33:51 +00:00
|
|
|
)
|
2023-01-16 15:51:03 +00:00
|
|
|
except TimeoutError:
|
|
|
|
return await ctx.edit(content="Rendering screenshot timed out. Try using a smaller resolution.")
|
|
|
|
except WebDriverException as e:
|
|
|
|
paginator = commands.Paginator(prefix="```", suffix="```")
|
|
|
|
paginator.add_line("WebDriver Error (did you pass extreme or invalid command options?)")
|
|
|
|
paginator.add_line("Traceback:", empty=True)
|
|
|
|
for line in e.msg.splitlines():
|
|
|
|
paginator.add_line(line)
|
|
|
|
for page in paginator.pages:
|
|
|
|
await ctx.respond(page)
|
2023-01-03 13:56:23 +00:00
|
|
|
except Exception as e:
|
2023-01-03 14:32:21 +00:00
|
|
|
console.print_exception()
|
2023-01-16 15:51:03 +00:00
|
|
|
return await ctx.edit(content=f"Failed: {e}", delete_after=30)
|
2023-01-03 13:56:23 +00:00
|
|
|
else:
|
2023-01-23 15:10:21 +00:00
|
|
|
await ctx.edit(content=f"Screenshotting <{friendly_url}>... (99%, uploading image)")
|
2023-01-16 15:51:03 +00:00
|
|
|
await ctx.edit(
|
|
|
|
content="Here's your screenshot!\n"
|
2023-01-18 20:54:48 +00:00
|
|
|
"Details:\n"
|
2023-09-03 17:28:19 +01:00
|
|
|
f"* Browser: {driver}\n"
|
|
|
|
f"* Resolution: {window_height}x{window_width} ({window_width*window_height:,} pixels)\n"
|
|
|
|
f"* URL: <{friendly_url}>\n"
|
|
|
|
f"* Load time: {fetch_time:.2f}ms\n"
|
|
|
|
f"* Screenshot render time: {screenshot_time:.2f}ms\n"
|
2023-11-04 17:18:39 +00:00
|
|
|
f"* Total time: {(fetch_time + screenshot_time):.2f}ms\n"
|
|
|
|
+ (
|
|
|
|
"* Probability of being scat or something else horrifying: 100%"
|
|
|
|
if ctx.user.id == 1019233057519177778
|
|
|
|
else ""
|
2023-03-07 15:07:00 +00:00
|
|
|
),
|
2023-01-18 20:54:48 +00:00
|
|
|
file=screenshot,
|
2023-01-16 15:51:03 +00:00
|
|
|
)
|
2023-01-03 13:56:23 +00:00
|
|
|
|
2023-01-03 14:43:49 +00:00
|
|
|
domains = discord.SlashCommandGroup("domains", "Commands for managing domains")
|
|
|
|
|
|
|
|
@domains.command(name="add")
|
|
|
|
async def add_domain(self, ctx: discord.ApplicationContext, domain: str):
|
|
|
|
"""Adds a domain to the blacklist"""
|
|
|
|
await ctx.defer()
|
|
|
|
if not await self.bot.is_owner(ctx.user):
|
|
|
|
return await ctx.respond("You are not allowed to do that.")
|
2023-03-22 14:20:20 +00:00
|
|
|
async with aiofiles.open("./assets/domains.txt", "a") as blacklist:
|
2023-01-03 15:17:09 +00:00
|
|
|
await blacklist.write(domain.lower() + "\n")
|
2023-01-03 14:43:49 +00:00
|
|
|
await ctx.respond("Added domain to blacklist.")
|
|
|
|
|
|
|
|
@domains.command(name="remove")
|
|
|
|
async def remove_domain(self, ctx: discord.ApplicationContext, domain: str):
|
|
|
|
"""Removes a domain from the blacklist"""
|
|
|
|
await ctx.defer()
|
|
|
|
if not await self.bot.is_owner(ctx.user):
|
|
|
|
return await ctx.respond("You are not allowed to do that.")
|
2023-03-22 14:20:20 +00:00
|
|
|
async with aiofiles.open("./assets/domains.txt") as blacklist:
|
2023-01-03 15:17:09 +00:00
|
|
|
lines = await blacklist.readlines()
|
2023-03-22 14:20:20 +00:00
|
|
|
async with aiofiles.open("./assets/domains.txt", "w") as blacklist:
|
2023-01-03 14:43:49 +00:00
|
|
|
for line in lines:
|
|
|
|
if line.strip() != domain.lower():
|
2023-01-03 15:17:09 +00:00
|
|
|
await blacklist.write(line)
|
2023-01-03 14:43:49 +00:00
|
|
|
await ctx.respond("Removed domain from blacklist.")
|
|
|
|
|
2023-11-04 16:47:14 +00:00
|
|
|
@staticmethod
|
2023-11-04 16:54:42 +00:00
|
|
|
async def check_proxy(url: str = "socks5://localhost:1090"):
|
2023-11-06 20:31:10 +00:00
|
|
|
client = httpx.AsyncClient(http2=True, timeout=3)
|
2023-11-04 16:54:42 +00:00
|
|
|
my_ip4 = (await client.get("https://api.ipify.org")).text
|
2023-11-04 16:56:03 +00:00
|
|
|
real_ips = [my_ip4]
|
2023-11-04 16:54:42 +00:00
|
|
|
await client.aclose()
|
2023-11-04 16:47:14 +00:00
|
|
|
|
|
|
|
# Check the proxy
|
2023-11-06 20:31:10 +00:00
|
|
|
client = httpx.AsyncClient(http2=True, proxies=url, timeout=3)
|
2023-11-04 16:54:42 +00:00
|
|
|
try:
|
|
|
|
response = await client.get(
|
|
|
|
"https://1.1.1.1/cdn-cgi/trace",
|
|
|
|
)
|
|
|
|
response.raise_for_status()
|
|
|
|
for line in response.text.splitlines():
|
|
|
|
if line.startswith("ip"):
|
|
|
|
if any(x in line for x in real_ips):
|
|
|
|
return 1
|
|
|
|
except (httpx.TransportError, httpx.HTTPStatusError):
|
|
|
|
return 2
|
|
|
|
await client.aclose()
|
2023-11-04 16:55:27 +00:00
|
|
|
return 0
|
2023-11-04 16:47:14 +00:00
|
|
|
|
2023-08-28 21:46:13 +01:00
|
|
|
@commands.slash_command(name="yt-dl")
|
2023-04-29 02:25:19 +01:00
|
|
|
@commands.max_concurrency(1, commands.BucketType.user)
|
|
|
|
async def yt_dl_2(
|
2023-11-04 17:18:39 +00:00
|
|
|
self,
|
|
|
|
ctx: discord.ApplicationContext,
|
|
|
|
url: discord.Option(description="The URL to download.", type=str),
|
|
|
|
_format: discord.Option(
|
|
|
|
name="format", description="The format to download.", type=str, autocomplete=format_autocomplete, default=""
|
|
|
|
) = "",
|
|
|
|
extract_audio: bool = False,
|
|
|
|
cookies_txt: discord.Attachment = None,
|
|
|
|
disable_filesize_buffer: bool = False,
|
2023-04-29 02:25:19 +01:00
|
|
|
):
|
|
|
|
"""Downloads a video using youtube-dl"""
|
2023-10-30 18:32:19 +00:00
|
|
|
cookies = io.StringIO()
|
|
|
|
cookies.seek(0)
|
|
|
|
|
2023-04-29 02:25:19 +01:00
|
|
|
await ctx.defer()
|
2023-11-04 16:47:14 +00:00
|
|
|
from urllib.parse import parse_qs
|
2023-11-04 17:18:39 +00:00
|
|
|
|
2023-11-05 15:23:29 +00:00
|
|
|
MAX_SIZE_MB = 20
|
|
|
|
REAL_MAX_SIZE_MB = 25
|
2023-11-04 16:47:14 +00:00
|
|
|
if disable_filesize_buffer is False:
|
2023-11-04 17:34:51 +00:00
|
|
|
MAX_SIZE_MB *= 0.8
|
2023-05-11 18:55:58 +01:00
|
|
|
BYTES_REMAINING = (MAX_SIZE_MB - 0.256) * 1024 * 1024
|
2023-04-29 02:59:17 +01:00
|
|
|
import yt_dlp
|
2023-04-29 02:25:19 +01:00
|
|
|
|
2023-05-17 19:50:24 +01:00
|
|
|
with tempfile.TemporaryDirectory(prefix="jimmy-ytdl-") as tempdir_str:
|
2023-04-29 02:25:19 +01:00
|
|
|
tempdir = Path(tempdir_str).resolve()
|
|
|
|
stdout = tempdir / "stdout.txt"
|
|
|
|
stderr = tempdir / "stderr.txt"
|
|
|
|
|
2023-11-04 17:18:39 +00:00
|
|
|
default_cookies_txt = Path.cwd() / "jimmy-cookies.txt"
|
2023-10-30 18:40:44 +00:00
|
|
|
real_cookies_txt = tempdir / "cookies.txt"
|
|
|
|
if cookies_txt is not None:
|
|
|
|
await cookies_txt.save(fp=real_cookies_txt)
|
|
|
|
else:
|
|
|
|
default_cookies_txt.touch()
|
|
|
|
shutil.copy(default_cookies_txt, real_cookies_txt)
|
|
|
|
|
2023-04-29 02:25:19 +01:00
|
|
|
class Logger:
|
|
|
|
def __init__(self):
|
|
|
|
self.stdout = open(stdout, "w+")
|
|
|
|
self.stderr = open(stderr, "w+")
|
|
|
|
|
|
|
|
def __del__(self):
|
|
|
|
self.stdout.close()
|
|
|
|
self.stderr.close()
|
|
|
|
|
|
|
|
def debug(self, msg: str):
|
|
|
|
if msg.startswith("[debug]"):
|
|
|
|
return
|
|
|
|
self.info(msg)
|
|
|
|
|
|
|
|
def info(self, msg: str):
|
|
|
|
self.stdout.write(msg + "\n")
|
|
|
|
self.stdout.flush()
|
|
|
|
|
|
|
|
def warning(self, msg: str):
|
|
|
|
self.stderr.write(msg + "\n")
|
|
|
|
self.stderr.flush()
|
|
|
|
|
|
|
|
def error(self, msg: str):
|
|
|
|
self.stderr.write(msg + "\n")
|
|
|
|
self.stderr.flush()
|
|
|
|
|
|
|
|
logger = Logger()
|
|
|
|
paths = {
|
|
|
|
target: str(tempdir)
|
|
|
|
for target in (
|
|
|
|
"home",
|
|
|
|
"temp",
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2023-05-17 20:05:18 +01:00
|
|
|
args = {
|
|
|
|
"windowsfilenames": True,
|
|
|
|
"restrictfilenames": True,
|
|
|
|
"noplaylist": True,
|
|
|
|
"nocheckcertificate": True,
|
|
|
|
"no_color": True,
|
|
|
|
"noprogress": True,
|
|
|
|
"logger": logger,
|
|
|
|
"format": _format or None,
|
|
|
|
"paths": paths,
|
|
|
|
"outtmpl": f"{ctx.user.id}-%(title).50s.%(ext)s",
|
|
|
|
"trim_file_name": 128,
|
|
|
|
"extract_audio": extract_audio,
|
2023-09-09 22:08:18 +01:00
|
|
|
"format_sort": [
|
2023-11-04 17:18:39 +00:00
|
|
|
"vcodec:h264",
|
|
|
|
"acodec:aac",
|
|
|
|
"vcodec:vp9",
|
|
|
|
"acodec:opus",
|
2023-09-09 22:08:18 +01:00
|
|
|
"acodec:vorbis",
|
2023-11-04 17:18:39 +00:00
|
|
|
"vcodec:vp8",
|
|
|
|
"ext",
|
2023-09-09 22:08:18 +01:00
|
|
|
],
|
|
|
|
"merge_output_format": "webm/mp4/mov/flv/avi/ogg/m4a/wav/mp3/opus/mka/mkv",
|
2023-09-03 17:24:33 +01:00
|
|
|
"source_address": "0.0.0.0",
|
2023-11-04 17:18:39 +00:00
|
|
|
"cookiefile": str(real_cookies_txt.resolve().absolute()),
|
2023-11-04 17:31:11 +00:00
|
|
|
"concurrent_fragment_downloads": 4
|
2023-05-17 20:05:18 +01:00
|
|
|
}
|
2023-11-04 16:47:14 +00:00
|
|
|
description = ""
|
2023-11-04 16:51:09 +00:00
|
|
|
proxy_url = "socks5://localhost:1090"
|
2023-11-04 16:47:14 +00:00
|
|
|
try:
|
2023-11-04 16:51:09 +00:00
|
|
|
proxy_down = await self.check_proxy("socks5://localhost:1090")
|
2023-11-04 16:47:14 +00:00
|
|
|
if proxy_down > 0:
|
|
|
|
if proxy_down == 1:
|
2023-11-04 17:18:39 +00:00
|
|
|
description += ":warning: (SHRoNK) Proxy check leaked IP - trying backup proxy.\n"
|
2023-11-04 16:47:14 +00:00
|
|
|
elif proxy_down == 2:
|
2023-11-04 17:18:39 +00:00
|
|
|
description += ":warning: (SHRoNK) Proxy connection failed - trying backup proxy.\n"
|
2023-11-04 16:47:14 +00:00
|
|
|
else:
|
2023-11-04 17:18:39 +00:00
|
|
|
description += ":warning: (SHRoNK) Unknown proxy error - trying backup proxy.\n"
|
2023-11-04 16:47:14 +00:00
|
|
|
|
2023-11-04 16:51:09 +00:00
|
|
|
proxy_down = await self.check_proxy("socks5://localhost:1080")
|
2023-11-04 16:47:14 +00:00
|
|
|
if proxy_down > 0:
|
|
|
|
if proxy_down == 1:
|
2023-11-04 17:18:39 +00:00
|
|
|
description += ":warning: (NexBox) Proxy check leaked IP..\n"
|
2023-11-04 16:47:14 +00:00
|
|
|
elif proxy_down == 2:
|
2023-11-04 17:18:39 +00:00
|
|
|
description += ":warning: (NexBox) Proxy connection failed.\n"
|
2023-11-04 16:47:14 +00:00
|
|
|
else:
|
2023-11-04 17:18:39 +00:00
|
|
|
description += ":warning: (NexBox) Unknown proxy error.\n"
|
2023-11-04 16:47:14 +00:00
|
|
|
proxy_url = None
|
|
|
|
else:
|
2023-11-04 16:51:09 +00:00
|
|
|
proxy_url = "socks5://localhost:1080"
|
2023-11-04 17:18:39 +00:00
|
|
|
description += "\N{white heavy check mark} Using fallback NexBox proxy."
|
|
|
|
else:
|
|
|
|
description += "\N{white heavy check mark} Using the SHRoNK proxy."
|
2023-11-04 16:47:14 +00:00
|
|
|
except Exception as e:
|
|
|
|
traceback.print_exc()
|
|
|
|
description += f":warning: Failed to check proxy (`{e}`). Going unproxied."
|
|
|
|
if proxy_url:
|
|
|
|
args["proxy"] = proxy_url
|
2023-05-17 20:05:18 +01:00
|
|
|
if extract_audio:
|
|
|
|
args["postprocessors"] = [
|
2023-11-04 17:18:39 +00:00
|
|
|
{"key": "FFmpegExtractAudio", "preferredquality": "24", "preferredcodec": "opus"}
|
2023-05-17 20:05:18 +01:00
|
|
|
]
|
2023-10-30 19:22:48 +00:00
|
|
|
args["format"] = args["format"] or f"(ba/b)[filesize<={MAX_SIZE_MB}M]/ba/b"
|
2023-05-17 20:05:18 +01:00
|
|
|
|
|
|
|
if args["format"] is None:
|
2023-09-09 22:18:20 +01:00
|
|
|
args["format"] = f"(bv+ba/b)[vcodec!=h265][vcodec!=av01][filesize<={MAX_SIZE_MB}M]/b"
|
2023-08-28 21:46:13 +01:00
|
|
|
|
2023-05-17 20:05:18 +01:00
|
|
|
with yt_dlp.YoutubeDL(args) as downloader:
|
2023-04-29 02:25:19 +01:00
|
|
|
try:
|
2023-11-04 17:34:51 +00:00
|
|
|
extracted_info = await asyncio.to_thread(downloader.extract_info, url, download=False)
|
2023-11-04 17:18:39 +00:00
|
|
|
except yt_dlp.utils.DownloadError:
|
2023-11-10 22:30:15 +00:00
|
|
|
title = "error"
|
2023-11-06 20:32:43 +00:00
|
|
|
thumbnail_url = webpage_url = discord.Embed.Empty
|
2023-11-04 17:18:39 +00:00
|
|
|
else:
|
|
|
|
title = extracted_info.get("title", url)
|
2023-11-06 20:32:43 +00:00
|
|
|
title = textwrap.shorten(title, 100)
|
2023-11-04 17:18:39 +00:00
|
|
|
thumbnail_url = extracted_info.get("thumbnail") or discord.Embed.Empty
|
2023-11-06 20:32:43 +00:00
|
|
|
webpage_url = extracted_info.get("webpage_url") or discord.Embed.Empty
|
2023-11-04 17:28:33 +00:00
|
|
|
|
|
|
|
chosen_format = extracted_info.get("format")
|
|
|
|
chosen_format_id = extracted_info.get("format_id")
|
|
|
|
final_extension = extracted_info.get("ext")
|
|
|
|
format_note = extracted_info.get("format_note", "%s (%s)" % (chosen_format, chosen_format_id))
|
|
|
|
resolution = extracted_info.get("resolution")
|
|
|
|
fps = extracted_info.get("fps")
|
|
|
|
vcodec = extracted_info.get("vcodec")
|
|
|
|
acodec = extracted_info.get("acodec")
|
|
|
|
|
|
|
|
lines = []
|
|
|
|
if chosen_format and chosen_format_id:
|
|
|
|
lines.append(
|
|
|
|
"* Chosen format: `%s` (`%s`)" % (chosen_format, chosen_format_id),
|
|
|
|
)
|
|
|
|
if format_note:
|
|
|
|
lines.append(
|
|
|
|
"* Format note: %r" % format_note
|
|
|
|
)
|
|
|
|
if final_extension:
|
|
|
|
lines.append(
|
|
|
|
"* File extension: " + final_extension
|
|
|
|
)
|
|
|
|
if resolution:
|
|
|
|
_s = resolution
|
|
|
|
if fps:
|
|
|
|
_s += " @ %s FPS" % fps
|
|
|
|
lines.append(
|
|
|
|
"* Resolution: " + _s
|
|
|
|
)
|
|
|
|
if vcodec or acodec:
|
|
|
|
lines.append("%s+%s" % (vcodec or "N/A", acodec or "N/A"))
|
|
|
|
|
|
|
|
if lines:
|
|
|
|
description += "\n"
|
|
|
|
description += "\n".join(lines)
|
|
|
|
|
2023-11-04 17:18:39 +00:00
|
|
|
try:
|
|
|
|
embed = discord.Embed(
|
|
|
|
title="Downloading %r..." % title,
|
|
|
|
description=description,
|
|
|
|
colour=discord.Colour.blurple(),
|
|
|
|
url=webpage_url,
|
2023-04-29 02:25:19 +01:00
|
|
|
)
|
2023-11-04 17:18:39 +00:00
|
|
|
embed.set_thumbnail(url=thumbnail_url)
|
|
|
|
await ctx.respond(embed=embed)
|
2023-11-04 17:34:51 +00:00
|
|
|
await asyncio.to_thread(partial(downloader.download, [url]))
|
2023-04-29 02:25:19 +01:00
|
|
|
except yt_dlp.utils.DownloadError as e:
|
2023-09-09 21:54:13 +01:00
|
|
|
traceback.print_exc()
|
2023-04-29 02:25:19 +01:00
|
|
|
return await ctx.edit(
|
|
|
|
embed=discord.Embed(
|
|
|
|
title="Error",
|
|
|
|
description=f"Download failed:\n```\n{e}\n```",
|
2023-11-04 17:18:39 +00:00
|
|
|
colour=discord.Colour.red(),
|
|
|
|
url=webpage_url,
|
2023-09-09 21:50:36 +01:00
|
|
|
),
|
2023-11-04 17:18:39 +00:00
|
|
|
delete_after=60,
|
2023-04-29 02:25:19 +01:00
|
|
|
)
|
|
|
|
else:
|
2023-09-09 21:50:36 +01:00
|
|
|
parsed_qs = parse_qs(url)
|
2023-11-04 17:18:39 +00:00
|
|
|
if "t" in parsed_qs and parsed_qs["t"] and parsed_qs["t"][0].isdigit():
|
2023-09-09 21:50:36 +01:00
|
|
|
# Assume is timestamp
|
2023-11-04 17:18:39 +00:00
|
|
|
timestamp = round(float(parsed_qs["t"][0]))
|
2023-09-09 22:18:20 +01:00
|
|
|
end_timestamp = None
|
2023-09-09 23:49:10 +01:00
|
|
|
if len(parsed_qs["t"]) >= 2:
|
2023-11-04 17:18:39 +00:00
|
|
|
end_timestamp = round(float(parsed_qs["t"][1]))
|
2023-09-09 22:18:20 +01:00
|
|
|
if end_timestamp < timestamp:
|
2023-11-04 17:18:39 +00:00
|
|
|
end_timestamp, timestamp = reversed((end_timestamp, timestamp))
|
2023-09-09 22:18:20 +01:00
|
|
|
_end = "to %s" % end_timestamp if len(parsed_qs["t"]) == 2 else "onward"
|
2023-09-09 21:50:36 +01:00
|
|
|
embed = discord.Embed(
|
|
|
|
title="Trimming...",
|
2023-09-09 22:18:20 +01:00
|
|
|
description=f"Trimming from {timestamp} seconds {_end}.\nThis may take a while.",
|
2023-11-04 17:18:39 +00:00
|
|
|
colour=discord.Colour.blurple(),
|
2023-09-09 21:50:36 +01:00
|
|
|
)
|
|
|
|
await ctx.edit(embed=embed)
|
|
|
|
for file in tempdir.glob("%s-*" % ctx.user.id):
|
|
|
|
try:
|
2023-09-09 22:11:47 +01:00
|
|
|
bak = file.with_name(file.name + "-" + os.urandom(4).hex())
|
2023-09-09 21:50:36 +01:00
|
|
|
shutil.copy(str(file), str(bak))
|
2023-09-09 22:13:11 +01:00
|
|
|
minutes, seconds = divmod(timestamp, 60)
|
|
|
|
hours, minutes = divmod(minutes, 60)
|
2023-09-09 22:18:20 +01:00
|
|
|
_args = [
|
|
|
|
"ffmpeg",
|
|
|
|
"-i",
|
|
|
|
str(bak),
|
|
|
|
"-ss",
|
|
|
|
"{!s}:{!s}:{!s}".format(*map(round, (hours, minutes, seconds))),
|
|
|
|
"-y",
|
2023-09-09 22:54:52 +01:00
|
|
|
"-c",
|
|
|
|
"copy",
|
2023-11-04 17:18:39 +00:00
|
|
|
str(file),
|
2023-09-09 22:18:20 +01:00
|
|
|
]
|
2023-09-09 22:20:18 +01:00
|
|
|
if end_timestamp is not None:
|
|
|
|
minutes, seconds = divmod(end_timestamp, 60)
|
|
|
|
hours, minutes = divmod(minutes, 60)
|
|
|
|
_args.insert(5, "-to")
|
|
|
|
_args.insert(6, "{!s}:{!s}:{!s}".format(*map(round, (hours, minutes, seconds))))
|
2023-09-09 22:18:20 +01:00
|
|
|
|
2023-09-09 21:50:36 +01:00
|
|
|
await self.bot.loop.run_in_executor(
|
2023-11-04 17:18:39 +00:00
|
|
|
None, partial(subprocess.run, _args, check=True, capture_output=True)
|
2023-09-09 21:50:36 +01:00
|
|
|
)
|
2023-09-09 22:41:39 +01:00
|
|
|
bak.unlink(True)
|
2023-09-09 21:50:36 +01:00
|
|
|
except subprocess.CalledProcessError as e:
|
2023-09-09 21:54:13 +01:00
|
|
|
traceback.print_exc()
|
2023-09-09 21:50:36 +01:00
|
|
|
return await ctx.edit(
|
|
|
|
embed=discord.Embed(
|
|
|
|
title="Error",
|
|
|
|
description=f"Trimming failed:\n```\n{e}\n```",
|
2023-11-04 17:18:39 +00:00
|
|
|
colour=discord.Colour.red(),
|
2023-09-09 21:50:36 +01:00
|
|
|
),
|
2023-11-04 17:18:39 +00:00
|
|
|
delete_after=30,
|
2023-09-09 21:50:36 +01:00
|
|
|
)
|
|
|
|
|
2023-04-29 02:25:19 +01:00
|
|
|
embed = discord.Embed(
|
2023-11-04 17:18:39 +00:00
|
|
|
title="Downloaded %r!" % title, description="", colour=discord.Colour.green(), url=webpage_url
|
2023-04-29 02:25:19 +01:00
|
|
|
)
|
2023-11-04 17:18:39 +00:00
|
|
|
embed.set_thumbnail(url=thumbnail_url)
|
2023-04-29 02:25:19 +01:00
|
|
|
del logger
|
|
|
|
files = []
|
|
|
|
|
|
|
|
for file in tempdir.glob(f"{ctx.user.id}-*"):
|
|
|
|
if file.stat().st_size == 0:
|
|
|
|
embed.description += f"\N{warning sign}\ufe0f {file.name} is empty.\n"
|
|
|
|
continue
|
|
|
|
st = file.stat().st_size
|
2023-11-04 17:01:21 +00:00
|
|
|
if st / 1024 / 1024 >= REAL_MAX_SIZE_MB:
|
2023-04-29 02:25:19 +01:00
|
|
|
units = ["B", "KB", "MB", "GB", "TB"]
|
|
|
|
st_r = st
|
|
|
|
while st_r > 1024:
|
|
|
|
st_r /= 1024
|
|
|
|
units.pop(0)
|
2023-11-04 17:18:39 +00:00
|
|
|
embed.description += (
|
|
|
|
"\N{warning sign}\ufe0f {} is too large to upload ({!s}{}"
|
|
|
|
", max is {}MB).\n".format(
|
|
|
|
file.name,
|
|
|
|
round(st_r, 2),
|
|
|
|
units[0],
|
|
|
|
REAL_MAX_SIZE_MB,
|
|
|
|
)
|
|
|
|
)
|
2023-04-29 02:25:19 +01:00
|
|
|
continue
|
2023-05-11 18:55:58 +01:00
|
|
|
else:
|
|
|
|
files.append(discord.File(file, file.name))
|
|
|
|
BYTES_REMAINING -= st
|
2023-04-29 02:25:19 +01:00
|
|
|
|
|
|
|
if not files:
|
2023-04-29 02:59:17 +01:00
|
|
|
embed.description += "No files to upload. Directory list:\n%s" % (
|
2023-11-04 17:18:39 +00:00
|
|
|
"\n".join(r"\* " + f.name for f in tempdir.iterdir())
|
2023-04-29 02:59:17 +01:00
|
|
|
)
|
2023-04-29 02:25:19 +01:00
|
|
|
return await ctx.edit(embed=embed)
|
|
|
|
else:
|
|
|
|
_desc = embed.description
|
2023-09-09 22:11:47 +01:00
|
|
|
embed.description += f"Uploading {len(files)} file(s):\n%s" % (
|
2023-11-04 17:18:39 +00:00
|
|
|
"\n".join("* `%s`" % f.filename for f in files)
|
2023-09-09 22:11:47 +01:00
|
|
|
)
|
2023-04-29 02:25:19 +01:00
|
|
|
await ctx.edit(embed=embed)
|
|
|
|
await ctx.channel.trigger_typing()
|
|
|
|
embed.description = _desc
|
2023-05-17 19:50:24 +01:00
|
|
|
start = time()
|
2023-04-29 02:25:19 +01:00
|
|
|
await ctx.edit(embed=embed, files=files)
|
2023-05-17 19:50:24 +01:00
|
|
|
end = time()
|
2023-05-16 17:13:15 +01:00
|
|
|
if (end - start) < 10:
|
|
|
|
await ctx.respond("*clearing typing*", delete_after=0.01)
|
2023-05-12 11:09:50 +01:00
|
|
|
|
2023-05-11 22:41:56 +01:00
|
|
|
async def bgtask():
|
|
|
|
await asyncio.sleep(120.0)
|
|
|
|
try:
|
|
|
|
await ctx.edit(embed=None)
|
|
|
|
except discord.NotFound:
|
|
|
|
pass
|
2023-11-04 17:18:39 +00:00
|
|
|
|
2023-05-11 22:41:56 +01:00
|
|
|
self.bot.loop.create_task(bgtask())
|
2023-05-12 16:39:34 +01:00
|
|
|
|
2023-03-16 21:45:01 +00:00
|
|
|
@commands.slash_command(name="text-to-mp3")
|
|
|
|
@commands.cooldown(5, 600, commands.BucketType.user)
|
2023-03-16 23:49:37 +00:00
|
|
|
async def text_to_mp3(
|
2023-11-04 17:18:39 +00:00
|
|
|
self,
|
2023-03-16 23:49:37 +00:00
|
|
|
ctx: discord.ApplicationContext,
|
2023-11-04 17:18:39 +00:00
|
|
|
speed: discord.Option(int, "The speed of the voice. Default is 150.", required=False, default=150),
|
2023-03-20 14:48:23 +00:00
|
|
|
voice: discord.Option(
|
|
|
|
str,
|
|
|
|
"The voice to use. Some may cause timeout.",
|
2023-03-20 14:49:45 +00:00
|
|
|
autocomplete=discord.utils.basic_autocomplete(VOICES),
|
2023-11-04 17:18:39 +00:00
|
|
|
default="default",
|
|
|
|
),
|
2023-03-16 23:49:37 +00:00
|
|
|
):
|
2023-03-16 21:45:01 +00:00
|
|
|
"""Converts text to MP3. 5 uses per 10 minutes."""
|
2023-03-20 14:49:45 +00:00
|
|
|
if voice not in VOICES:
|
|
|
|
return await ctx.respond("Invalid voice.")
|
2023-03-16 23:58:18 +00:00
|
|
|
speed = min(300, max(50, speed))
|
2023-03-20 14:39:22 +00:00
|
|
|
_self = self
|
2023-03-16 21:45:01 +00:00
|
|
|
_bot = self.bot
|
2023-03-17 09:25:26 +00:00
|
|
|
|
2023-03-16 21:45:01 +00:00
|
|
|
class TextModal(discord.ui.Modal):
|
|
|
|
def __init__(self):
|
|
|
|
super().__init__(
|
|
|
|
discord.ui.InputText(
|
|
|
|
label="Text",
|
|
|
|
placeholder="Enter text to read",
|
|
|
|
min_length=1,
|
|
|
|
max_length=4000,
|
2023-11-04 17:18:39 +00:00
|
|
|
style=discord.InputTextStyle.long,
|
2023-03-16 21:45:01 +00:00
|
|
|
),
|
2023-11-04 17:18:39 +00:00
|
|
|
title="Convert text to an MP3",
|
2023-03-16 21:45:01 +00:00
|
|
|
)
|
2023-03-20 14:48:23 +00:00
|
|
|
|
2023-03-16 21:45:01 +00:00
|
|
|
async def callback(self, interaction: discord.Interaction):
|
2023-03-20 14:39:22 +00:00
|
|
|
def _convert(text: str) -> Tuple[BytesIO, int]:
|
2023-08-16 01:25:59 +01:00
|
|
|
assert pyttsx3
|
2023-03-20 14:39:22 +00:00
|
|
|
tmp_dir = tempfile.gettempdir()
|
|
|
|
target_fn = Path(tmp_dir) / f"jimmy-tts-{ctx.user.id}-{ctx.interaction.id}.mp3"
|
|
|
|
target_fn = str(target_fn)
|
2023-03-16 23:02:18 +00:00
|
|
|
engine = pyttsx3.init()
|
2023-03-20 14:48:23 +00:00
|
|
|
engine.setProperty("voice", voice)
|
2023-03-16 23:49:37 +00:00
|
|
|
engine.setProperty("rate", speed)
|
2023-03-16 23:02:18 +00:00
|
|
|
_io = BytesIO()
|
|
|
|
engine.save_to_file(text, target_fn)
|
|
|
|
engine.runAndWait()
|
2023-03-16 23:22:27 +00:00
|
|
|
last_3_sizes = [-3, -2, -1]
|
2023-03-20 14:39:22 +00:00
|
|
|
no_exists = 0
|
2023-03-16 23:27:03 +00:00
|
|
|
|
|
|
|
def should_loop():
|
|
|
|
if not os.path.exists(target_fn):
|
2023-03-20 14:39:22 +00:00
|
|
|
nonlocal no_exists
|
|
|
|
assert no_exists < 300, "File does not exist for 5 minutes."
|
|
|
|
no_exists += 1
|
2023-03-16 23:27:03 +00:00
|
|
|
return True
|
2023-11-04 17:18:39 +00:00
|
|
|
|
2023-03-16 23:27:03 +00:00
|
|
|
stat = os.stat(target_fn)
|
|
|
|
for _result in last_3_sizes:
|
|
|
|
if stat.st_size != _result:
|
|
|
|
return True
|
2023-11-04 17:18:39 +00:00
|
|
|
|
2023-03-16 23:27:03 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
while should_loop():
|
2023-03-16 23:31:38 +00:00
|
|
|
if os.path.exists(target_fn):
|
2023-03-16 23:34:29 +00:00
|
|
|
last_3_sizes.pop(0)
|
2023-03-16 23:31:38 +00:00
|
|
|
last_3_sizes.append(os.stat(target_fn).st_size)
|
2023-03-16 23:49:46 +00:00
|
|
|
sleep(1)
|
2023-03-16 23:22:27 +00:00
|
|
|
|
2023-03-16 23:02:18 +00:00
|
|
|
with open(target_fn, "rb") as f:
|
2023-03-20 14:39:22 +00:00
|
|
|
x = f.read()
|
|
|
|
_io.write(x)
|
2023-03-16 23:02:18 +00:00
|
|
|
os.remove(target_fn)
|
|
|
|
_io.seek(0)
|
2023-03-20 14:39:22 +00:00
|
|
|
return _io, len(x)
|
2023-03-16 22:45:04 +00:00
|
|
|
|
2023-03-16 21:45:01 +00:00
|
|
|
await interaction.response.defer()
|
2023-03-16 22:45:04 +00:00
|
|
|
text_pre = self.children[0].value
|
2023-03-20 14:39:22 +00:00
|
|
|
if text_pre.startswith("url:"):
|
|
|
|
_url = text_pre[4:].strip()
|
|
|
|
_msg = await interaction.followup.send("Downloading text...")
|
|
|
|
try:
|
2023-05-11 18:01:47 +01:00
|
|
|
response = await _self.http.get(
|
2023-11-04 17:18:39 +00:00
|
|
|
_url, headers={"User-Agent": "Mozilla/5.0"}, follow_redirects=True
|
2023-05-11 18:01:47 +01:00
|
|
|
)
|
2023-03-20 14:39:22 +00:00
|
|
|
if response.status_code != 200:
|
|
|
|
await _msg.edit(content=f"Failed to download text. Status code: {response.status_code}")
|
|
|
|
return
|
|
|
|
|
|
|
|
ct = response.headers.get("Content-Type", "application/octet-stream")
|
|
|
|
if not ct.startswith("text/plain"):
|
|
|
|
await _msg.edit(content=f"Failed to download text. Content-Type is {ct!r}, not text/plain")
|
|
|
|
return
|
|
|
|
text_pre = response.text
|
|
|
|
except (ConnectionError, httpx.HTTPError, httpx.NetworkError) as e:
|
|
|
|
await _msg.edit(content="Failed to download text. " + str(e))
|
|
|
|
return
|
2023-05-15 18:24:06 +01:00
|
|
|
|
2023-03-20 14:39:22 +00:00
|
|
|
else:
|
2023-05-11 18:01:47 +01:00
|
|
|
_msg = await interaction.followup.send("Converting text to MP3... (0 seconds elapsed)")
|
2023-05-15 18:24:06 +01:00
|
|
|
|
2023-05-11 18:01:47 +01:00
|
|
|
async def assurance_task():
|
|
|
|
while True:
|
|
|
|
await asyncio.sleep(5.5)
|
|
|
|
await _msg.edit(
|
2023-05-11 18:04:53 +01:00
|
|
|
content=f"Converting text to MP3... ({time() - start_time:.1f} seconds elapsed)"
|
2023-05-11 18:01:47 +01:00
|
|
|
)
|
|
|
|
|
2023-05-11 18:04:53 +01:00
|
|
|
start_time = time()
|
2023-05-11 18:02:37 +01:00
|
|
|
task = _bot.loop.create_task(assurance_task())
|
2023-03-16 23:28:24 +00:00
|
|
|
try:
|
2023-11-04 17:18:39 +00:00
|
|
|
mp3, size = await asyncio.wait_for(_bot.loop.run_in_executor(None, _convert, text_pre), timeout=600)
|
2023-05-11 18:01:47 +01:00
|
|
|
except asyncio.TimeoutError:
|
|
|
|
task.cancel()
|
|
|
|
await _msg.edit(content="Failed to convert text to MP3 - Timeout. Try shorter/less complex text.")
|
|
|
|
return
|
2023-03-16 23:28:24 +00:00
|
|
|
except (Exception, IOError) as e:
|
2023-05-11 18:01:47 +01:00
|
|
|
task.cancel()
|
2023-03-16 23:29:34 +00:00
|
|
|
await _msg.edit(content="failed. " + str(e))
|
|
|
|
raise e
|
2023-05-11 18:01:47 +01:00
|
|
|
task.cancel()
|
|
|
|
del task
|
2023-03-20 14:39:22 +00:00
|
|
|
if size >= ctx.guild.filesize_limit - 1500:
|
|
|
|
await _msg.edit(
|
2023-04-27 11:22:06 +01:00
|
|
|
content=f"MP3 is too large ({size / 1024 / 1024}Mb vs "
|
2023-11-04 17:18:39 +00:00
|
|
|
f"{ctx.guild.filesize_limit / 1024 / 1024}Mb)"
|
2023-03-20 14:39:22 +00:00
|
|
|
)
|
|
|
|
return
|
2023-03-16 22:45:04 +00:00
|
|
|
fn = ""
|
|
|
|
_words = text_pre.split()
|
|
|
|
while len(fn) < 28:
|
|
|
|
try:
|
|
|
|
word = _words.pop(0)
|
|
|
|
except IndexError:
|
|
|
|
break
|
|
|
|
if len(fn) + len(word) + 1 > 28:
|
|
|
|
continue
|
|
|
|
fn += word + "-"
|
2023-03-16 23:28:24 +00:00
|
|
|
fn = fn[:-1]
|
|
|
|
fn = fn[:28]
|
2023-11-04 17:18:39 +00:00
|
|
|
await _msg.edit(content="Here's your MP3!", file=discord.File(mp3, filename=fn + ".mp3"))
|
|
|
|
|
2023-03-16 21:45:01 +00:00
|
|
|
await ctx.send_modal(TextModal())
|
2023-11-04 17:18:39 +00:00
|
|
|
|
2023-03-27 23:16:28 +01:00
|
|
|
@commands.slash_command()
|
|
|
|
@commands.cooldown(5, 10, commands.BucketType.user)
|
|
|
|
@commands.max_concurrency(1, commands.BucketType.user)
|
|
|
|
async def quote(self, ctx: discord.ApplicationContext):
|
|
|
|
"""Generates a random quote"""
|
2023-11-04 17:18:39 +00:00
|
|
|
emoji = discord.PartialEmoji(name="loading", animated=True, id=1101463077586735174)
|
2023-04-28 21:31:00 +01:00
|
|
|
|
2023-04-28 15:20:15 +01:00
|
|
|
async def get_quote() -> str | discord.File:
|
|
|
|
try:
|
|
|
|
response = await self.http.get("https://inspirobot.me/api?generate=true")
|
|
|
|
except (ConnectionError, httpx.HTTPError, httpx.NetworkError) as e:
|
|
|
|
return "Failed to get quote. " + str(e)
|
|
|
|
if response.status_code != 200:
|
|
|
|
return f"Failed to get quote. Status code: {response.status_code}"
|
|
|
|
url = response.text
|
|
|
|
try:
|
|
|
|
response = await self.http.get(url)
|
|
|
|
except (ConnectionError, httpx.HTTPError, httpx.NetworkError) as e:
|
|
|
|
return url
|
|
|
|
else:
|
|
|
|
if response.status_code != 200:
|
|
|
|
return url
|
|
|
|
x = io.BytesIO(response.content)
|
|
|
|
x.seek(0)
|
|
|
|
return discord.File(x, filename="quote.jpg")
|
|
|
|
|
|
|
|
class GenerateNewView(discord.ui.View):
|
2023-04-28 15:39:32 +01:00
|
|
|
def __init__(self):
|
2023-11-04 17:18:39 +00:00
|
|
|
super().__init__(timeout=300, disable_on_timeout=True)
|
2023-04-28 15:39:32 +01:00
|
|
|
|
2023-04-28 15:20:15 +01:00
|
|
|
async def __aenter__(self):
|
|
|
|
self.disable_all_items()
|
|
|
|
if self.message:
|
|
|
|
await self.message.edit(view=self)
|
|
|
|
return self
|
|
|
|
|
|
|
|
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
|
|
self.enable_all_items()
|
|
|
|
if self.message:
|
|
|
|
await self.message.edit(view=self)
|
|
|
|
return self
|
|
|
|
|
2023-04-28 15:24:28 +01:00
|
|
|
async def interaction_check(self, interaction: discord.Interaction) -> bool:
|
|
|
|
return interaction.user == ctx.user and interaction.channel == ctx.channel
|
|
|
|
|
2023-04-28 15:20:15 +01:00
|
|
|
@discord.ui.button(
|
|
|
|
label="New Quote",
|
|
|
|
style=discord.ButtonStyle.green,
|
2023-11-04 17:18:39 +00:00
|
|
|
emoji=discord.PartialEmoji.from_str("\U000023ed\U0000fe0f"),
|
2023-04-28 15:20:15 +01:00
|
|
|
)
|
|
|
|
async def new_quote(self, _, interaction: discord.Interaction):
|
2023-04-28 15:32:15 +01:00
|
|
|
await interaction.response.defer(invisible=True)
|
2023-04-28 15:20:15 +01:00
|
|
|
async with self:
|
2023-04-28 15:38:05 +01:00
|
|
|
followup = await interaction.followup.send(f"{emoji} Generating quote")
|
2023-04-28 15:20:15 +01:00
|
|
|
new_result = await get_quote()
|
|
|
|
if isinstance(new_result, discord.File):
|
2023-04-28 15:40:46 +01:00
|
|
|
return await followup.edit(content=None, file=new_result, view=GenerateNewView())
|
2023-04-28 15:20:15 +01:00
|
|
|
else:
|
2023-04-28 15:38:05 +01:00
|
|
|
return await followup.edit(content=new_result, view=GenerateNewView())
|
2023-04-28 15:20:15 +01:00
|
|
|
|
|
|
|
@discord.ui.button(
|
2023-11-04 17:18:39 +00:00
|
|
|
label="Regenerate", style=discord.ButtonStyle.blurple, emoji=discord.PartialEmoji.from_str("\U0001f504")
|
2023-04-28 15:20:15 +01:00
|
|
|
)
|
|
|
|
async def regenerate(self, _, interaction: discord.Interaction):
|
2023-04-28 15:32:15 +01:00
|
|
|
await interaction.response.defer(invisible=True)
|
2023-04-28 15:20:15 +01:00
|
|
|
async with self:
|
2023-04-28 15:24:28 +01:00
|
|
|
message = await interaction.original_response()
|
2023-04-28 15:38:05 +01:00
|
|
|
if "\U00002b50" in [_reaction.emoji for _reaction in message.reactions]:
|
2023-04-28 15:24:28 +01:00
|
|
|
return await interaction.followup.send(
|
|
|
|
"\N{cross mark} Message is starred and cannot be regenerated. You can press "
|
|
|
|
"'New Quote' to generate a new quote instead.",
|
2023-11-04 17:18:39 +00:00
|
|
|
ephemeral=True,
|
2023-04-28 15:24:28 +01:00
|
|
|
)
|
2023-04-28 15:20:15 +01:00
|
|
|
new_result = await get_quote()
|
|
|
|
if isinstance(new_result, discord.File):
|
|
|
|
return await interaction.edit_original_response(file=new_result)
|
|
|
|
else:
|
|
|
|
return await interaction.edit_original_response(content=new_result)
|
|
|
|
|
2023-11-04 17:18:39 +00:00
|
|
|
@discord.ui.button(label="Delete", style=discord.ButtonStyle.red, emoji="\N{wastebasket}\U0000fe0f")
|
2023-04-28 15:26:09 +01:00
|
|
|
async def delete(self, _, interaction: discord.Interaction):
|
2023-04-28 15:32:15 +01:00
|
|
|
await interaction.response.defer(invisible=True)
|
2023-04-28 15:26:09 +01:00
|
|
|
await interaction.delete_original_response()
|
|
|
|
self.stop()
|
|
|
|
|
2023-03-27 23:16:28 +01:00
|
|
|
await ctx.defer()
|
2023-04-28 15:20:15 +01:00
|
|
|
result = await get_quote()
|
|
|
|
if isinstance(result, discord.File):
|
|
|
|
return await ctx.respond(file=result, view=GenerateNewView())
|
2023-03-27 23:16:28 +01:00
|
|
|
else:
|
2023-04-28 15:20:15 +01:00
|
|
|
return await ctx.respond(result, view=GenerateNewView())
|
2023-03-14 12:39:57 +00:00
|
|
|
|
2023-05-05 10:35:17 +01:00
|
|
|
@commands.slash_command()
|
|
|
|
@commands.cooldown(1, 30, commands.BucketType.user)
|
|
|
|
@commands.max_concurrency(1, commands.BucketType.user)
|
|
|
|
async def ocr(
|
2023-11-04 17:18:39 +00:00
|
|
|
self,
|
|
|
|
ctx: discord.ApplicationContext,
|
|
|
|
attachment: discord.Option(
|
|
|
|
discord.SlashCommandOptionType.attachment,
|
|
|
|
description="Image to perform OCR on",
|
|
|
|
),
|
2023-05-05 10:35:17 +01:00
|
|
|
):
|
|
|
|
"""OCRs an image"""
|
|
|
|
await ctx.defer()
|
2023-05-05 20:08:47 +01:00
|
|
|
timings: Dict[str, float] = {}
|
2023-05-05 10:35:17 +01:00
|
|
|
attachment: discord.Attachment
|
2023-05-30 19:53:47 +01:00
|
|
|
with Timer() as _t:
|
2023-05-05 20:08:47 +01:00
|
|
|
data = await attachment.read()
|
|
|
|
file = io.BytesIO(data)
|
|
|
|
file.seek(0)
|
2023-05-30 19:53:47 +01:00
|
|
|
timings["Download attachment"] = _t.total
|
|
|
|
with Timer() as _t:
|
2023-05-05 20:08:47 +01:00
|
|
|
img = await self.bot.loop.run_in_executor(None, Image.open, file)
|
2023-05-30 19:53:47 +01:00
|
|
|
timings["Parse image"] = _t.total
|
2023-05-05 10:35:17 +01:00
|
|
|
try:
|
2023-05-30 19:53:47 +01:00
|
|
|
with Timer() as _t:
|
2023-05-05 20:08:47 +01:00
|
|
|
text = await self.bot.loop.run_in_executor(None, pytesseract.image_to_string, img)
|
2023-05-30 19:53:47 +01:00
|
|
|
timings["Perform OCR"] = _t.total
|
2023-05-05 10:35:17 +01:00
|
|
|
except pytesseract.TesseractError as e:
|
|
|
|
return await ctx.respond(f"Failed to perform OCR: `{e}`")
|
|
|
|
|
2023-05-05 20:08:47 +01:00
|
|
|
if len(text) > 4096:
|
2023-05-30 19:53:47 +01:00
|
|
|
with Timer() as _t:
|
2023-05-05 20:08:47 +01:00
|
|
|
try:
|
|
|
|
response = await self.http.put(
|
|
|
|
"https://api.mystb.in/paste",
|
|
|
|
json={
|
2023-11-04 17:18:39 +00:00
|
|
|
"files": [{"filename": "ocr.txt", "content": text}],
|
|
|
|
},
|
2023-05-05 20:08:47 +01:00
|
|
|
)
|
|
|
|
response.raise_for_status()
|
|
|
|
except httpx.HTTPError:
|
|
|
|
return await ctx.respond("OCR content too large to post.")
|
|
|
|
else:
|
|
|
|
data = response.json()
|
|
|
|
with Timer(timings, "Respond (URL)"):
|
|
|
|
embed = discord.Embed(
|
|
|
|
description="View on [mystb.in](%s)" % ("https://mystb.in/" + data["id"]),
|
2023-11-04 17:18:39 +00:00
|
|
|
colour=discord.Colour.dark_theme(),
|
2023-05-05 20:08:47 +01:00
|
|
|
)
|
|
|
|
await ctx.respond(embed=embed)
|
2023-05-30 19:53:47 +01:00
|
|
|
timings["Upload text to mystbin"] = _t.total
|
2023-08-28 19:43:25 +01:00
|
|
|
elif len(text) <= 1500:
|
2023-05-30 19:53:47 +01:00
|
|
|
with Timer() as _t:
|
|
|
|
await ctx.respond(embed=discord.Embed(description=text))
|
|
|
|
timings["Respond (Text)"] = _t.total
|
2023-05-05 20:08:47 +01:00
|
|
|
else:
|
2023-05-30 19:53:47 +01:00
|
|
|
with Timer() as _t:
|
2023-05-05 20:08:47 +01:00
|
|
|
out_file = io.BytesIO(text.encode("utf-8", "replace"))
|
|
|
|
await ctx.respond(file=discord.File(out_file, filename="ocr.txt"))
|
2023-05-30 19:53:47 +01:00
|
|
|
timings["Respond (File)"] = _t.total
|
2023-05-05 10:35:17 +01:00
|
|
|
|
2023-05-15 18:24:06 +01:00
|
|
|
if timings:
|
2023-05-30 19:56:39 +01:00
|
|
|
text = "Timings:\n" + "\n".join("{}: {:.2f}s".format(k.title(), v) for k, v in timings.items())
|
2023-05-15 18:24:06 +01:00
|
|
|
await ctx.edit(
|
2023-05-30 19:53:47 +01:00
|
|
|
content=text,
|
2023-05-15 18:24:06 +01:00
|
|
|
)
|
2023-05-05 10:35:17 +01:00
|
|
|
|
2023-05-25 09:51:16 +01:00
|
|
|
@commands.message_command(name="Convert Image to GIF")
|
|
|
|
async def convert_image_to_gif(self, ctx: discord.ApplicationContext, message: discord.Message):
|
2023-05-25 09:55:02 +01:00
|
|
|
await ctx.defer()
|
2023-05-25 09:51:16 +01:00
|
|
|
for attachment in message.attachments:
|
|
|
|
if attachment.content_type.startswith("image/"):
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
return await ctx.respond("No image found.")
|
|
|
|
image = attachment
|
|
|
|
image: discord.Attachment
|
|
|
|
with tempfile.TemporaryFile("wb+") as f:
|
|
|
|
await image.save(f)
|
|
|
|
f.seek(0)
|
|
|
|
img = await self.bot.loop.run_in_executor(None, Image.open, f)
|
|
|
|
if img.format.upper() not in ("PNG", "JPEG", "WEBP", "HEIF", "BMP", "TIFF"):
|
|
|
|
return await ctx.respond("Image must be PNG, JPEG, WEBP, or HEIF.")
|
|
|
|
|
|
|
|
with tempfile.TemporaryFile("wb+") as f2:
|
|
|
|
caller = partial(img.save, f2, format="GIF")
|
|
|
|
await self.bot.loop.run_in_executor(None, caller)
|
|
|
|
f2.seek(0)
|
|
|
|
try:
|
|
|
|
await ctx.respond(file=discord.File(f2, filename="image.gif"))
|
|
|
|
except discord.HTTPException as e:
|
|
|
|
if e.code == 40005:
|
|
|
|
return await ctx.respond("Image is too large.")
|
|
|
|
return await ctx.respond(f"Failed to upload: `{e}`")
|
|
|
|
try:
|
2023-05-25 09:53:24 +01:00
|
|
|
f2.seek(0)
|
2023-05-25 09:51:16 +01:00
|
|
|
await ctx.user.send(file=discord.File(f2, filename="image.gif"))
|
|
|
|
except discord.Forbidden:
|
|
|
|
return await ctx.respond("Unable to mirror to your DM - am I blocked?", ephemeral=True)
|
|
|
|
|
2023-06-03 14:17:34 +01:00
|
|
|
@commands.slash_command()
|
|
|
|
@commands.cooldown(1, 180, commands.BucketType.user)
|
|
|
|
@commands.max_concurrency(1, commands.BucketType.user)
|
|
|
|
async def sherlock(
|
2023-11-04 17:18:39 +00:00
|
|
|
self, ctx: discord.ApplicationContext, username: str, search_nsfw: bool = False, use_tor: bool = False
|
2023-06-03 14:17:34 +01:00
|
|
|
):
|
|
|
|
"""Sherlocks a username."""
|
2023-06-28 22:19:40 +01:00
|
|
|
# git clone https://github.com/sherlock-project/sherlock.git && cd sherlock && docker build -t sherlock .
|
|
|
|
|
2023-06-03 14:43:31 +01:00
|
|
|
if re.search(r"\s", username) is not None:
|
|
|
|
return await ctx.respond("Username cannot contain spaces.")
|
|
|
|
|
2023-06-03 14:22:35 +01:00
|
|
|
async def background_task():
|
2023-06-03 14:46:36 +01:00
|
|
|
chars = ["|", "/", "-", "\\"]
|
|
|
|
n = 0
|
2023-06-03 14:22:35 +01:00
|
|
|
# Every 5 seconds update the embed to show that the command is still running
|
|
|
|
while True:
|
2023-06-03 14:57:35 +01:00
|
|
|
await asyncio.sleep(2.5)
|
2023-06-03 14:23:53 +01:00
|
|
|
elapsed = time() - start_time
|
2023-06-03 14:28:44 +01:00
|
|
|
embed = discord.Embed(
|
2023-06-03 14:47:49 +01:00
|
|
|
title="Sherlocking username %s" % chars[n % 4],
|
2023-06-03 14:28:44 +01:00
|
|
|
description=f"Elapsed: {elapsed:.0f}s",
|
2023-11-04 17:18:39 +00:00
|
|
|
colour=discord.Colour.dark_theme(),
|
2023-06-03 14:22:35 +01:00
|
|
|
)
|
2023-11-04 17:18:39 +00:00
|
|
|
await ctx.edit(embed=embed)
|
2023-06-03 14:46:36 +01:00
|
|
|
n += 1
|
2023-06-03 14:22:35 +01:00
|
|
|
|
2023-06-03 14:17:34 +01:00
|
|
|
await ctx.defer()
|
|
|
|
# output results to a temporary directory
|
2023-06-03 15:20:34 +01:00
|
|
|
tempdir = Path("./tmp/sherlock").resolve()
|
|
|
|
tempdir.mkdir(parents=True, exist_ok=True)
|
|
|
|
command = [
|
|
|
|
"docker",
|
|
|
|
"run",
|
|
|
|
"--rm",
|
|
|
|
"-t",
|
|
|
|
"-v",
|
|
|
|
f"{tempdir}:/opt/sherlock/results",
|
|
|
|
"sherlock",
|
2023-11-04 17:18:39 +00:00
|
|
|
"--folderoutput",
|
|
|
|
"/opt/sherlock/results",
|
2023-06-03 15:20:34 +01:00
|
|
|
"--print-found",
|
2023-11-04 17:18:39 +00:00
|
|
|
"--csv",
|
2023-06-03 15:20:34 +01:00
|
|
|
]
|
|
|
|
if search_nsfw:
|
|
|
|
command.append("--nsfw")
|
|
|
|
if use_tor:
|
|
|
|
command.append("--tor")
|
|
|
|
# Output to result.csv
|
|
|
|
# Username to search for
|
|
|
|
command.append(username)
|
|
|
|
# Run the command
|
|
|
|
start_time = time()
|
|
|
|
result = await asyncio.create_subprocess_exec(
|
|
|
|
*command,
|
|
|
|
stdout=asyncio.subprocess.PIPE,
|
|
|
|
stderr=asyncio.subprocess.PIPE,
|
|
|
|
)
|
|
|
|
await ctx.respond(embed=discord.Embed(title="Starting..."))
|
|
|
|
task = asyncio.create_task(background_task())
|
|
|
|
# Wait for it to finish
|
|
|
|
stdout, stderr = await result.communicate()
|
|
|
|
await result.wait()
|
|
|
|
task.cancel()
|
|
|
|
# wait for task to exit
|
|
|
|
try:
|
|
|
|
await task
|
|
|
|
except asyncio.CancelledError:
|
|
|
|
pass
|
|
|
|
# If it errored, send the error
|
|
|
|
if result.returncode != 0:
|
|
|
|
shutil.rmtree(tempdir, ignore_errors=True)
|
|
|
|
return await ctx.edit(
|
2023-06-03 14:17:34 +01:00
|
|
|
embed=discord.Embed(
|
2023-06-03 15:20:34 +01:00
|
|
|
title="Error",
|
|
|
|
description=f"```ansi\n{stderr.decode()[:4000]}```",
|
|
|
|
colour=discord.Colour.red(),
|
|
|
|
)
|
2023-06-03 14:17:34 +01:00
|
|
|
)
|
2023-06-03 15:20:34 +01:00
|
|
|
# If it didn't error, send the results
|
|
|
|
stdout = stdout.decode()
|
|
|
|
if len(stdout) > 4000:
|
|
|
|
paginator = commands.Paginator("```ansi", max_size=4000)
|
|
|
|
for line in stdout.splitlines():
|
|
|
|
paginator.add_line(line)
|
|
|
|
desc = paginator.pages[0]
|
|
|
|
title = "Results (truncated)"
|
|
|
|
else:
|
|
|
|
desc = f"```ansi\n{stdout}```"
|
|
|
|
title = "Results"
|
|
|
|
files = list(map(discord.File, glob.glob(f"{tempdir}/*")))
|
|
|
|
await ctx.edit(
|
|
|
|
files=files,
|
|
|
|
embed=discord.Embed(
|
|
|
|
title=title,
|
|
|
|
description=desc,
|
|
|
|
colour=discord.Colour.green(),
|
|
|
|
),
|
|
|
|
)
|
|
|
|
shutil.rmtree(tempdir, ignore_errors=True)
|
2023-06-03 14:17:34 +01:00
|
|
|
|
2023-08-16 01:25:59 +01:00
|
|
|
@commands.slash_command()
|
|
|
|
@discord.guild_only()
|
2023-08-16 12:23:40 +01:00
|
|
|
async def opusinate(self, ctx: discord.ApplicationContext, file: discord.Attachment, size_mb: float = 8):
|
2023-08-16 01:25:59 +01:00
|
|
|
"""Converts the given file into opus with the given size."""
|
2023-11-04 17:18:39 +00:00
|
|
|
|
2023-08-16 19:12:44 +01:00
|
|
|
def humanise(v: int) -> str:
|
|
|
|
units = ["B", "KB", "MB", "GB", "TB", "PB", "EB"]
|
|
|
|
while v > 1024:
|
|
|
|
v /= 1024
|
|
|
|
units.pop(0)
|
|
|
|
n = round(v, 2) if v % 1 else v
|
|
|
|
return "%s%s" % (n, units[0])
|
|
|
|
|
2023-08-16 01:25:59 +01:00
|
|
|
await ctx.defer()
|
|
|
|
size_bytes = size_mb * 1024 * 1024
|
|
|
|
max_size = ctx.guild.filesize_limit if ctx.guild else 8 * 1024 * 1024
|
|
|
|
share = False
|
|
|
|
if os.path.exists("/mnt/vol/share/droplet.secret"):
|
2023-08-16 01:34:56 +01:00
|
|
|
share = True
|
2023-08-16 01:25:59 +01:00
|
|
|
|
2023-08-16 01:34:56 +01:00
|
|
|
if size_bytes > max_size or share is False or (share is True and size_mb >= 250):
|
2023-08-16 01:25:59 +01:00
|
|
|
return await ctx.respond(":x: Max file size is %dMB" % round(max_size / 1024 / 1024))
|
|
|
|
|
|
|
|
ct, suffix = file.content_type.split("/")
|
|
|
|
if ct not in ("audio", "video"):
|
|
|
|
return await ctx.respond(":x: Only audio or video please.")
|
|
|
|
with tempfile.NamedTemporaryFile(suffix="." + suffix) as raw_file:
|
|
|
|
location = Path(raw_file.name)
|
|
|
|
location.write_bytes(await file.read(use_cached=False))
|
|
|
|
|
|
|
|
process = await asyncio.create_subprocess_exec(
|
|
|
|
"ffprobe",
|
|
|
|
"-v",
|
|
|
|
"error",
|
|
|
|
"-of",
|
|
|
|
"json",
|
|
|
|
"-show_entries",
|
|
|
|
"format=duration,bit_rate,channels",
|
|
|
|
"-show_streams",
|
|
|
|
"-select_streams",
|
|
|
|
"a", # select audio-nly
|
|
|
|
str(location),
|
|
|
|
stdout=asyncio.subprocess.PIPE,
|
2023-11-04 17:18:39 +00:00
|
|
|
stderr=asyncio.subprocess.PIPE,
|
2023-08-16 01:25:59 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
stdout, stderr = await process.communicate()
|
|
|
|
if process.returncode != 0:
|
|
|
|
return await ctx.respond(
|
|
|
|
":x: Error gathering metadata.\n```\n%s\n```" % discord.utils.escape_markdown(stderr.decode())
|
|
|
|
)
|
|
|
|
|
|
|
|
metadata = json.loads(stdout.decode())
|
|
|
|
try:
|
|
|
|
stream = metadata["streams"].pop()
|
|
|
|
except IndexError:
|
2023-11-04 17:18:39 +00:00
|
|
|
return await ctx.respond(":x: No audio streams to transcode.")
|
2023-08-16 01:25:59 +01:00
|
|
|
duration = float(metadata["format"]["duration"])
|
|
|
|
bit_rate = math.floor(int(metadata["format"]["bit_rate"]) / 1024)
|
|
|
|
channels = int(stream["channels"])
|
|
|
|
codec = stream["codec_name"]
|
|
|
|
|
2023-08-16 01:44:17 +01:00
|
|
|
target_bitrate = math.floor((size_mb * 8192) / duration)
|
2023-08-16 01:25:59 +01:00
|
|
|
if target_bitrate <= 0:
|
|
|
|
return await ctx.respond(
|
|
|
|
":x: Target size too small (would've had a negative bitrate of %d)" % target_bitrate
|
|
|
|
)
|
2023-08-16 01:34:56 +01:00
|
|
|
br_ceiling = 255 * channels
|
|
|
|
end_br = min(bit_rate, target_bitrate, br_ceiling)
|
2023-08-16 01:25:59 +01:00
|
|
|
|
|
|
|
with tempfile.NamedTemporaryFile(suffix=".ogg", prefix=file.filename) as output_file:
|
|
|
|
command = [
|
|
|
|
"ffmpeg",
|
|
|
|
"-i",
|
|
|
|
str(location),
|
|
|
|
"-v",
|
|
|
|
"error",
|
|
|
|
"-vn",
|
|
|
|
"-sn",
|
|
|
|
"-c:a",
|
|
|
|
"libopus",
|
|
|
|
"-b:a",
|
|
|
|
"%sK" % end_br,
|
2023-08-16 01:27:56 +01:00
|
|
|
"-y",
|
2023-11-04 17:18:39 +00:00
|
|
|
output_file.name,
|
2023-08-16 01:25:59 +01:00
|
|
|
]
|
|
|
|
process = await asyncio.create_subprocess_exec(
|
2023-11-04 17:18:39 +00:00
|
|
|
command[0], *command[1:], stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
|
2023-08-16 01:25:59 +01:00
|
|
|
)
|
|
|
|
stdout, stderr = await process.communicate()
|
|
|
|
|
|
|
|
if process.returncode != 0:
|
|
|
|
return await ctx.respond(
|
2023-11-04 17:18:39 +00:00
|
|
|
":x: There was an error while transcoding:\n```\n%s\n```"
|
|
|
|
% discord.utils.escape_markdown(stderr.decode())
|
2023-08-16 01:25:59 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
output_location = Path(output_file.name)
|
|
|
|
stat = output_location.stat()
|
2023-11-04 17:18:39 +00:00
|
|
|
content = (
|
|
|
|
"\N{white heavy check mark} Transcoded from %r to opus @ %dkbps.\n\n"
|
|
|
|
"* Source: %dKbps\n* Target: %dKbps\n* Ceiling: %dKbps\n* Calculated: %dKbps\n"
|
|
|
|
"* Duration: %.1f seconds\n* Input size: %s\n* Output size: %s\n* Difference: %s"
|
|
|
|
" (%dKbps)"
|
|
|
|
) % (
|
2023-08-16 01:34:56 +01:00
|
|
|
codec,
|
|
|
|
end_br,
|
|
|
|
bit_rate,
|
|
|
|
target_bitrate,
|
|
|
|
br_ceiling,
|
|
|
|
end_br,
|
2023-08-16 19:12:44 +01:00
|
|
|
duration,
|
|
|
|
humanise(file.size),
|
2023-08-16 19:23:45 +01:00
|
|
|
humanise(stat.st_size),
|
2023-10-03 18:57:41 +01:00
|
|
|
humanise(file.size - stat.st_size),
|
2023-11-04 17:18:39 +00:00
|
|
|
bit_rate - end_br,
|
2023-08-16 01:34:56 +01:00
|
|
|
)
|
|
|
|
if stat.st_size <= max_size or share is False:
|
|
|
|
if stat.st_size >= (size_bytes - 100):
|
2023-11-04 17:18:39 +00:00
|
|
|
return await ctx.respond(":x: File was too large.")
|
|
|
|
return await ctx.respond(content, file=discord.File(output_location))
|
2023-08-16 01:25:59 +01:00
|
|
|
else:
|
2023-08-16 19:23:45 +01:00
|
|
|
share_location = Path("/mnt/vol/share/tmp/") / output_location.name
|
|
|
|
share_location.touch(0o755)
|
2023-08-16 01:34:56 +01:00
|
|
|
await self.bot.loop.run_in_executor(
|
2023-11-04 17:18:39 +00:00
|
|
|
None, functools.partial(shutil.copy, output_location, share_location)
|
2023-08-16 01:34:56 +01:00
|
|
|
)
|
|
|
|
return await ctx.respond(
|
2023-11-04 17:18:39 +00:00
|
|
|
"%s\n* [Download](https://droplet.nexy7574.co.uk/share/tmp/%s)"
|
|
|
|
% (content, output_location.name)
|
2023-08-16 01:34:56 +01:00
|
|
|
)
|
2023-08-16 01:25:59 +01:00
|
|
|
|
2023-11-11 18:31:48 +00:00
|
|
|
class OllamaKillSwitchView(discord.ui.View):
|
2023-11-13 19:29:44 +00:00
|
|
|
def __init__(self, ctx: discord.ApplicationContext, msg: discord.Message):
|
2023-11-11 18:31:48 +00:00
|
|
|
super().__init__(timeout=None)
|
|
|
|
self.ctx = ctx
|
|
|
|
self.msg = msg
|
|
|
|
|
|
|
|
async def interaction_check(self, interaction: discord.Interaction) -> bool:
|
|
|
|
return interaction.user == self.ctx.author and interaction.channel == self.ctx.channel
|
|
|
|
|
|
|
|
@discord.ui.button(
|
|
|
|
label="Abort",
|
|
|
|
style=discord.ButtonStyle.red,
|
2023-11-11 18:33:32 +00:00
|
|
|
emoji="\N{wastebasket}",
|
2023-11-11 18:31:48 +00:00
|
|
|
)
|
|
|
|
async def abort_button(self, _, interaction: discord.Interaction):
|
|
|
|
await interaction.response.defer()
|
|
|
|
if self.msg in self.ctx.command.cog.ollama_locks:
|
|
|
|
self.ctx.command.cog.ollama_locks[self.msg].set()
|
|
|
|
self.disable_all_items()
|
|
|
|
await interaction.edit_original_response(view=self)
|
|
|
|
self.stop()
|
|
|
|
|
2023-11-13 19:29:44 +00:00
|
|
|
@commands.slash_command()
|
|
|
|
@commands.max_concurrency(1, commands.BucketType.user, wait=False)
|
|
|
|
async def ollama(
|
|
|
|
self,
|
|
|
|
ctx: discord.ApplicationContext,
|
|
|
|
model: str = "orca-mini",
|
|
|
|
query: str = None,
|
|
|
|
context: str = None
|
|
|
|
):
|
2023-11-10 22:30:15 +00:00
|
|
|
""":3"""
|
2023-11-13 19:29:44 +00:00
|
|
|
with open("./assets/ollama-prompt.txt") as file:
|
|
|
|
system_prompt = file.read().replace("\n", " ").strip()
|
|
|
|
if query is None:
|
|
|
|
class InputPrompt(discord.ui.Modal):
|
|
|
|
def __init__(self, is_owner: bool):
|
|
|
|
super().__init__(
|
|
|
|
discord.ui.InputText(
|
|
|
|
label="User Prompt",
|
|
|
|
placeholder="Enter prompt",
|
|
|
|
min_length=1,
|
|
|
|
max_length=4000,
|
|
|
|
style=discord.InputTextStyle.long,
|
|
|
|
),
|
|
|
|
title="Enter prompt",
|
|
|
|
timeout=120
|
|
|
|
)
|
|
|
|
if is_owner:
|
|
|
|
self.add_item(
|
|
|
|
discord.ui.InputText(
|
|
|
|
label="System Prompt",
|
|
|
|
placeholder="Enter prompt",
|
|
|
|
min_length=1,
|
|
|
|
max_length=4000,
|
|
|
|
style=discord.InputTextStyle.long,
|
|
|
|
value=system_prompt,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
self.user_prompt = None
|
|
|
|
self.system_prompt = system_prompt
|
|
|
|
|
|
|
|
async def callback(self, interaction: discord.Interaction):
|
|
|
|
self.user_prompt = self.children[0].value
|
|
|
|
if len(self.children) > 1:
|
|
|
|
self.system_prompt = self.children[1].value
|
|
|
|
await interaction.response.defer()
|
|
|
|
self.stop()
|
|
|
|
|
|
|
|
modal = InputPrompt(await self.bot.is_owner(ctx.author))
|
|
|
|
await ctx.send_modal(modal)
|
|
|
|
await modal.wait()
|
|
|
|
query = modal.user_prompt
|
|
|
|
if not modal.user_prompt:
|
|
|
|
return
|
|
|
|
system_prompt = modal.system_prompt or system_prompt
|
|
|
|
else:
|
|
|
|
await ctx.defer()
|
2023-11-13 19:36:45 +00:00
|
|
|
|
|
|
|
if context:
|
|
|
|
try:
|
|
|
|
context_decoded = base64.b85decode(context).decode()
|
|
|
|
context_decompressed = await asyncio.to_thread(
|
|
|
|
functools.partial(zlib.decompress, context_decoded.encode())
|
|
|
|
)
|
|
|
|
context = json.loads(context_decompressed)
|
|
|
|
except (ValueError, zlib.error, UnicodeDecodeError) as e:
|
|
|
|
return await ctx.respond("Failed to decode context: " + str(e))
|
|
|
|
|
2023-11-11 21:58:10 +00:00
|
|
|
content = None
|
2023-11-11 21:56:45 +00:00
|
|
|
try_hosts = {
|
|
|
|
"127.0.0.1:11434": "localhost",
|
|
|
|
"100.106.34.86:11434": "Nex Laptop",
|
|
|
|
"100.66.187.46:11434": "Nexbox",
|
|
|
|
"100.116.242.161:11434": "PortaPi"
|
|
|
|
}
|
2023-11-11 19:34:58 +00:00
|
|
|
model = model.casefold()
|
|
|
|
|
2023-11-11 19:24:49 +00:00
|
|
|
if not await self.bot.is_owner(ctx.author):
|
|
|
|
if not model.startswith("orca-mini"):
|
2023-11-13 19:29:44 +00:00
|
|
|
await ctx.respond(
|
|
|
|
":warning: You can only use `orca-mini` models.",
|
|
|
|
delete_after=30,
|
|
|
|
ephemeral=True
|
|
|
|
)
|
2023-11-11 19:32:17 +00:00
|
|
|
model = "orca-mini"
|
2023-11-13 19:29:44 +00:00
|
|
|
async with httpx.AsyncClient(follow_redirects=True) as client:
|
|
|
|
for host in try_hosts.keys():
|
|
|
|
try:
|
|
|
|
response = await client.get(
|
|
|
|
f"http://{host}/api/tags",
|
|
|
|
)
|
|
|
|
response.raise_for_status()
|
|
|
|
except (httpx.TransportError, httpx.NetworkError, httpx.HTTPStatusError):
|
|
|
|
continue
|
2023-11-11 18:49:54 +00:00
|
|
|
else:
|
2023-11-13 19:29:44 +00:00
|
|
|
break
|
|
|
|
else:
|
|
|
|
return await ctx.respond(":x: No servers available.")
|
2023-11-11 18:42:33 +00:00
|
|
|
|
2023-11-11 21:56:45 +00:00
|
|
|
embed = discord.Embed(
|
|
|
|
colour=discord.Colour.greyple()
|
|
|
|
)
|
|
|
|
embed.set_author(
|
|
|
|
name=f"Loading {model}",
|
|
|
|
url=f"http://{host}",
|
|
|
|
icon_url="https://cdn.discordapp.com/emojis/1101463077586735174.gif"
|
|
|
|
)
|
|
|
|
embed.set_footer(text="Using server {} ({})".format(host, try_hosts.get(host, "Other")))
|
|
|
|
|
2023-11-13 19:29:44 +00:00
|
|
|
msg = await ctx.respond(embed=embed, ephemeral=False)
|
2023-11-11 18:49:54 +00:00
|
|
|
async with httpx.AsyncClient(base_url=f"http://{host}/api", follow_redirects=True) as client:
|
2023-11-10 22:30:15 +00:00
|
|
|
# get models
|
|
|
|
try:
|
|
|
|
response = await client.post("/show", json={"name": model})
|
|
|
|
except httpx.TransportError as e:
|
2023-11-11 21:56:45 +00:00
|
|
|
embed = discord.Embed(
|
|
|
|
title="Failed to connect to Ollama.",
|
|
|
|
description=str(e),
|
|
|
|
colour=discord.Colour.red()
|
|
|
|
)
|
|
|
|
embed.set_footer(text="Using server {} ({})".format(host, try_hosts.get(host, "Other")))
|
|
|
|
return await msg.edit(embed=embed)
|
2023-11-10 22:30:15 +00:00
|
|
|
if response.status_code == 404:
|
2023-11-11 21:56:45 +00:00
|
|
|
embed.title = f"Downloading {model}"
|
|
|
|
await msg.edit(embed=embed)
|
2023-11-10 22:30:15 +00:00
|
|
|
async with ctx.channel.typing():
|
|
|
|
async with client.stream(
|
|
|
|
"POST",
|
|
|
|
"/pull",
|
2023-11-10 23:23:28 +00:00
|
|
|
json={"name": model, "stream": True},
|
2023-11-10 22:30:15 +00:00
|
|
|
timeout=None
|
|
|
|
) as response:
|
|
|
|
if response.status_code != 200:
|
2023-11-11 18:31:48 +00:00
|
|
|
error = await response.aread()
|
2023-11-11 21:56:45 +00:00
|
|
|
embed = discord.Embed(
|
|
|
|
title=f"Failed to download model {model}:",
|
|
|
|
description=f"HTTP {response.status_code}:\n```{error or '<no body>'}\n```",
|
|
|
|
colour=discord.Colour.red()
|
|
|
|
)
|
|
|
|
embed.set_footer(text="Using server {} ({})".format(host, try_hosts.get(host, "Other")))
|
|
|
|
return await msg.edit(embed=embed)
|
|
|
|
lines: dict[str, str] = {}
|
2023-11-10 22:38:47 +00:00
|
|
|
async for chunk in ollama_stream_reader(response):
|
2023-11-10 22:30:15 +00:00
|
|
|
if "total" in chunk and "completed" in chunk:
|
|
|
|
completed = chunk["completed"] or 1 # avoid division by zero
|
|
|
|
total = chunk["total"] or 1
|
2023-11-10 22:41:59 +00:00
|
|
|
percent = round(completed / total * 100)
|
2023-11-10 23:54:19 +00:00
|
|
|
if percent == 100 and completed != total:
|
|
|
|
percent = round(completed / total * 100, 2)
|
2023-11-11 20:18:54 +00:00
|
|
|
total_gigabytes = total / 1024 / 1024 / 1024
|
|
|
|
completed_gigabytes = completed / 1024 / 1024 / 1024
|
2023-11-11 21:56:45 +00:00
|
|
|
lines[chunk["status"]] = (f"{percent}% "
|
|
|
|
f"({completed_gigabytes:.2f}GB/{total_gigabytes:.2f}GB)")
|
2023-11-10 22:30:15 +00:00
|
|
|
else:
|
2023-11-12 22:40:25 +00:00
|
|
|
status = chunk.get("status", chunk.get("error", os.urandom(3).hex()))
|
|
|
|
lines[status] = status
|
2023-11-11 21:56:45 +00:00
|
|
|
|
|
|
|
embed.description = "\n".join(f"`{k}`: {v}" for k, v in lines.items())
|
|
|
|
if (time() - msg.created_at.timestamp()) >= 5:
|
|
|
|
await msg.edit(embed=embed)
|
|
|
|
embed.title = f"Downloaded {model}!"
|
|
|
|
embed.colour = discord.Colour.green()
|
|
|
|
await msg.edit(embed=embed)
|
2023-11-11 18:31:48 +00:00
|
|
|
while (await client.post("/show", json={"name": model})).status_code != 200:
|
|
|
|
await asyncio.sleep(5)
|
2023-11-10 22:30:15 +00:00
|
|
|
elif response.status_code != 200:
|
2023-11-11 18:31:48 +00:00
|
|
|
error = await response.aread()
|
2023-11-11 21:56:45 +00:00
|
|
|
embed = discord.Embed(
|
|
|
|
title=f"Failed to download model {model}:",
|
|
|
|
description=f"HTTP {response.status_code}:\n```{error or '<no body>'}\n```",
|
|
|
|
colour=discord.Colour.red()
|
|
|
|
)
|
|
|
|
embed.set_footer(text="Using server {} ({})".format(host, try_hosts.get(host, "Other")))
|
|
|
|
return await msg.edit(embed=embed)
|
2023-11-10 22:30:15 +00:00
|
|
|
|
2023-11-11 21:56:45 +00:00
|
|
|
embed = discord.Embed(
|
2023-11-11 19:05:17 +00:00
|
|
|
title=f"{model} says:",
|
2023-11-10 22:30:15 +00:00
|
|
|
description="",
|
|
|
|
colour=discord.Colour.blurple(),
|
2023-11-11 18:31:48 +00:00
|
|
|
timestamp=discord.utils.utcnow()
|
2023-11-10 22:30:15 +00:00
|
|
|
)
|
2023-11-11 21:56:45 +00:00
|
|
|
embed.set_footer(text=f"Powered by Ollama • {host} ({try_hosts.get(host, 'Other')})")
|
|
|
|
await msg.edit(embed=embed)
|
2023-11-10 22:30:15 +00:00
|
|
|
async with ctx.channel.typing():
|
2023-11-13 19:36:45 +00:00
|
|
|
payload = {
|
|
|
|
"model": model,
|
|
|
|
"prompt": query,
|
|
|
|
"format": "json",
|
|
|
|
"system": system_prompt,
|
|
|
|
"stream": True
|
|
|
|
}
|
|
|
|
if context:
|
|
|
|
payload["context"] = context
|
2023-11-10 22:30:15 +00:00
|
|
|
async with client.stream(
|
|
|
|
"POST",
|
|
|
|
"/generate",
|
2023-11-13 19:36:45 +00:00
|
|
|
json=payload,
|
2023-11-10 22:44:10 +00:00
|
|
|
timeout=None
|
2023-11-10 22:30:15 +00:00
|
|
|
) as response:
|
|
|
|
if response.status_code != 200:
|
2023-11-11 18:31:48 +00:00
|
|
|
error = await response.aread()
|
2023-11-11 21:56:45 +00:00
|
|
|
embed = discord.Embed(
|
|
|
|
title=f"Failed to generate response from {model}:",
|
|
|
|
description=f"HTTP {response.status_code}:\n```{error or '<no body>'}\n```",
|
|
|
|
colour=discord.Colour.red()
|
|
|
|
)
|
|
|
|
embed.set_footer(text="Using server {} ({})".format(host, try_hosts.get(host, "Other")))
|
|
|
|
return await msg.edit(embed=embed)
|
2023-11-11 18:42:33 +00:00
|
|
|
self.ollama_locks[msg] = asyncio.Event()
|
|
|
|
view = self.OllamaKillSwitchView(ctx, msg)
|
|
|
|
await msg.edit(view=view)
|
2023-11-10 22:38:47 +00:00
|
|
|
async for chunk in ollama_stream_reader(response):
|
2023-11-10 22:46:21 +00:00
|
|
|
if "done" not in chunk.keys() or "response" not in chunk.keys():
|
2023-11-10 22:30:15 +00:00
|
|
|
continue
|
|
|
|
else:
|
2023-11-10 22:38:47 +00:00
|
|
|
if chunk["done"] is True:
|
|
|
|
content = None
|
2023-11-11 21:56:45 +00:00
|
|
|
embed.remove_author()
|
|
|
|
else:
|
|
|
|
embed.set_author(
|
|
|
|
name=f"Generating response with {model}",
|
|
|
|
url=f"http://{host}",
|
|
|
|
icon_url="https://cdn.discordapp.com/emojis/1101463077586735174.gif"
|
|
|
|
)
|
|
|
|
embed.description += chunk["response"]
|
2023-11-10 23:48:05 +00:00
|
|
|
last_edit = msg.edited_at.timestamp() if msg.edited_at else msg.created_at.timestamp()
|
2023-11-10 22:30:15 +00:00
|
|
|
if (time() - last_edit) >= 5 or chunk["done"] is True:
|
2023-11-11 21:56:45 +00:00
|
|
|
await msg.edit(content=content, embed=embed, view=view)
|
2023-11-11 18:42:33 +00:00
|
|
|
if self.ollama_locks[msg].is_set():
|
2023-11-11 21:56:45 +00:00
|
|
|
embed.title = embed.title[:-1] + " (Aborted)"
|
|
|
|
embed.colour = discord.Colour.red()
|
|
|
|
return await msg.edit(embed=embed, view=None)
|
|
|
|
if len(embed.description) >= 4000:
|
|
|
|
embed.add_field(
|
2023-11-11 19:03:12 +00:00
|
|
|
name="Aborting early",
|
|
|
|
value="Output exceeded 4000 characters."
|
|
|
|
)
|
2023-11-11 21:56:45 +00:00
|
|
|
embed.title = embed.title[:-1] + " (Aborted)"
|
|
|
|
embed.colour = discord.Colour.red()
|
|
|
|
embed.description = embed.description[:4096]
|
2023-11-11 19:03:12 +00:00
|
|
|
break
|
2023-11-11 21:56:45 +00:00
|
|
|
else:
|
|
|
|
embed.colour = discord.Colour.green()
|
|
|
|
embed.remove_author()
|
2023-11-11 18:31:48 +00:00
|
|
|
|
|
|
|
def get_time_spent(nanoseconds: int) -> str:
|
|
|
|
hours, minutes, seconds = 0, 0, 0
|
2023-11-11 18:52:41 +00:00
|
|
|
seconds = nanoseconds / 1e9
|
2023-11-11 18:31:48 +00:00
|
|
|
if seconds >= 60:
|
|
|
|
minutes, seconds = divmod(seconds, 60)
|
|
|
|
if minutes >= 60:
|
|
|
|
hours, minutes = divmod(minutes, 60)
|
|
|
|
|
|
|
|
result = []
|
|
|
|
if seconds:
|
|
|
|
if seconds != 1:
|
|
|
|
label = "seconds"
|
|
|
|
else:
|
|
|
|
label = "second"
|
|
|
|
result.append(f"{round(seconds)} {label}")
|
|
|
|
if minutes:
|
|
|
|
if minutes != 1:
|
|
|
|
label = "minutes"
|
|
|
|
else:
|
|
|
|
label = "minute"
|
|
|
|
result.append(f"{round(minutes)} {label}")
|
|
|
|
if hours:
|
|
|
|
if hours != 1:
|
|
|
|
label = "hours"
|
|
|
|
else:
|
|
|
|
label = "hour"
|
|
|
|
result.append(f"{round(hours)} {label}")
|
|
|
|
return ", ".join(reversed(result))
|
|
|
|
|
2023-11-11 22:01:39 +00:00
|
|
|
total_time_spent = get_time_spent(chunk.get("total_duration", 999999999.0))
|
|
|
|
eval_time_spent = get_time_spent(chunk.get("eval_duration", 999999999.0))
|
|
|
|
load_time_spent = get_time_spent(chunk.get("load_duration", 999999999.0))
|
|
|
|
sample_time_sent = get_time_spent(chunk.get("sample_duration", 999999999.0))
|
|
|
|
prompt_eval_time_spent = get_time_spent(chunk.get("prompt_eval_duration", 999999999.0))
|
2023-11-13 19:29:44 +00:00
|
|
|
context: Optional[list[int]] = chunk.get("context")
|
|
|
|
# noinspection PyTypeChecker
|
|
|
|
if context:
|
|
|
|
context_json = json.dumps(context)
|
|
|
|
start = time()
|
|
|
|
context_json_compressed = await asyncio.to_thread(
|
2023-11-13 19:36:45 +00:00
|
|
|
functools.partial(zlib.compress, context_json.encode())
|
2023-11-13 19:29:44 +00:00
|
|
|
)
|
|
|
|
end = time()
|
2023-11-13 19:36:45 +00:00
|
|
|
compress_time_spent = format(round(end * 1000 - start * 1000), ",")
|
|
|
|
context: str = base64.b85encode(context_json_compressed).decode()
|
2023-11-13 19:29:44 +00:00
|
|
|
else:
|
|
|
|
compress_time_spent = "N/A"
|
|
|
|
context = None
|
2023-11-11 21:56:45 +00:00
|
|
|
value = ("* Total: {}\n"
|
|
|
|
"* Model load: {}\n"
|
|
|
|
"* Sample generation: {}\n"
|
|
|
|
"* Prompt eval: {}\n"
|
2023-11-13 19:29:44 +00:00
|
|
|
"* Response generation: {}\n"
|
|
|
|
"* Context compression: {} milliseconds").format(
|
2023-11-11 21:56:45 +00:00
|
|
|
total_time_spent,
|
|
|
|
load_time_spent,
|
|
|
|
sample_time_sent,
|
|
|
|
prompt_eval_time_spent,
|
2023-11-13 19:29:44 +00:00
|
|
|
eval_time_spent,
|
|
|
|
compress_time_spent
|
2023-11-11 21:56:45 +00:00
|
|
|
)
|
|
|
|
embed.add_field(
|
2023-11-11 18:31:48 +00:00
|
|
|
name="Timings",
|
2023-11-11 21:56:45 +00:00
|
|
|
value=value
|
2023-11-11 18:31:48 +00:00
|
|
|
)
|
2023-11-13 19:37:21 +00:00
|
|
|
await msg.edit(content=None, embed=embed, view=None)
|
2023-11-13 19:29:44 +00:00
|
|
|
if context:
|
2023-11-13 19:37:21 +00:00
|
|
|
await ctx.respond(
|
|
|
|
"Context:\n"
|
|
|
|
"```\n"
|
|
|
|
f"{context}\n"
|
|
|
|
"```",
|
|
|
|
ephemeral=True
|
2023-11-13 19:29:44 +00:00
|
|
|
)
|
2023-11-11 18:42:33 +00:00
|
|
|
self.ollama_locks.pop(msg, None)
|
2023-11-10 22:30:15 +00:00
|
|
|
|
2023-01-03 14:43:49 +00:00
|
|
|
|
2022-11-13 23:16:47 +00:00
|
|
|
def setup(bot):
|
|
|
|
bot.add_cog(OtherCog(bot))
|