From 8b21954443d11f136859ca7f748f9a0d2abee4de Mon Sep 17 00:00:00 2001 From: nexy7574 Date: Fri, 7 Jun 2024 20:56:23 +0100 Subject: [PATCH] Add a preview to truths --- src/cogs/ollama.py | 159 ++++++++++++++++++++++++--------------------- 1 file changed, 85 insertions(+), 74 deletions(-) diff --git a/src/cogs/ollama.py b/src/cogs/ollama.py index 2a9bc0e..5f08e60 100644 --- a/src/cogs/ollama.py +++ b/src/cogs/ollama.py @@ -1053,14 +1053,13 @@ class Ollama(commands.Cog): ctx.author, system ) - async with httpx.AsyncClient() as client: - r = CONFIG["truth"].get("api", "https://bots.nexy7574.co.uk/jimmy/v2/api") - username = CONFIG["truth"].get("username", "1") - password = CONFIG["truth"].get("password", "2") - response = await client.get( - r + "/truths", + r = CONFIG["truth"].get("api", "https://bots.nexy7574.co.uk/jimmy/v2/api") + username = CONFIG["truth"].get("username", "1") + password = CONFIG["truth"].get("password", "2") + async with httpx.AsyncClient(base_url=r, auth=(username, password)) as http_client: + response = await http_client.get( + "/truths", timeout=60, - auth=(username, password), ) response.raise_for_status() truths = response.json() @@ -1082,77 +1081,89 @@ class Ollama(commands.Cog): save=False ) ) - self.history.add_message(thread_id, "user", "Generate a new truth post.") + self.history.add_message(thread_id, "user", "Generate a new truth post.") - tried = set() - for _ in range(10): - server = self.next_server(tried) - if await self.check_server(CONFIG["ollama"][server]["base_url"]): - break - tried.add(server) - else: - return await ctx.reply("All servers are offline. Please try again later.", delete_after=300) + tried = set() + for _ in range(10): + server = self.next_server(tried) + if await self.check_server(CONFIG["ollama"][server]["base_url"]): + break + tried.add(server) + else: + return await ctx.reply("All servers are offline. Please try again later.", delete_after=300) - client = OllamaClient(CONFIG["ollama"][server]["base_url"]) - async with self.servers[server]: - if not await client.has_model_named("llama2-uncensored", "7b-chat"): - with client.download_model("llama2-uncensored", "7b-chat") as handler: - await handler.flatten() + client = OllamaClient(CONFIG["ollama"][server]["base_url"]) + async with self.servers[server]: + if not await client.has_model_named("llama2-uncensored", "7b-chat"): + with client.download_model("llama2-uncensored", "7b-chat") as handler: + await handler.flatten() - embed = discord.Embed( - title="New Truth!", - description="", - colour=0x6559FF - ) - msg = await ctx.reply(embed=embed) - last_edit = time.time() - messages = self.history.get_history(thread_id) - with client.new_chat("llama2-uncensored:7b-chat", messages) as handler: - async for ln in handler: - embed.description += ln["message"]["content"] - if len(embed.description) >= 4000: - break - if (time.time() - last_edit) >= 2.5: - await msg.edit(embed=embed) - last_edit = time.time() - - similar = {} - for truth in truths: - _ratio = fuzz.ratio(truth.content, embed.description) - if truth.content == embed.description: - embed.add_field( - name="Repeated truth :(", - value="This truth was already truthed. Shit AI." - ) - elif _ratio >= 70: - similar[truth.id] = _ratio - - if similar: - if len(similar) > 1: - lns = [] - for truth_id, _ratio in similar.items(): - lns.append(f"* `{truth_id}`: {_ratio:.2f}%") - embed.add_field( - name="Possibly repeated truth", - value="This truth was similar to the following existing ones:\n" + "\n".join(lns), - inline=False - ) - else: - truth_id = tuple(similar)[0] - _ratio = similar[truth_id] - embed.add_field( - name="Possibly repeated truth", - value=f"This truth was {_ratio:.2f}% similar to `{truth_id}`." - ) - - embed.set_footer( - text="Finished generating truth based off of {:,} messages, using server {!r} | {!s}".format( - len(messages) - 2, - server, - thread_id + embed = discord.Embed( + title="New Truth!", + description="", + colour=0x6559FF ) - ) - await msg.edit(embed=embed) + msg = await ctx.reply(embed=embed) + last_edit = time.time() + messages = self.history.get_history(thread_id) + with client.new_chat("llama2-uncensored:7b-chat", messages) as handler: + async for ln in handler: + embed.description += ln["message"]["content"] + if len(embed.description) >= 4000: + break + if (time.time() - last_edit) >= 2.5: + await msg.edit(embed=embed) + last_edit = time.time() + + similar = {} + for truth in truths: + _ratio = fuzz.ratio(truth.content, embed.description) + if truth.content == embed.description: + embed.add_field( + name="Repeated truth :(", + value="This truth was already truthed. Shit AI." + ) + elif _ratio >= 70: + similar[truth.id] = _ratio + + if similar: + if len(similar) > 1: + lns = [] + keys = sorted(similar.keys(), key=lambda k: similar[k], reverse=True) + for truth_id in keys: + _ratio = similar[truth_id] + truth = discord.utils.get(truths, id=truth_id) + first_line = truth.content.splitlines()[0] + preview = discord.utils.escape_markdown(textwrap.shorten(first_line, 100)) + lns.append(f"* `{truth_id}`: {_ratio}% - `{preview}`") + if len(lns) > 5: + lc = len(lns) - 5 + lns = lns[:5] + lns.append(f"*... and {lc} more*") + embed.add_field( + name="Possibly repeated truth", + value="This truth was similar to the following existing ones:\n" + "\n".join(lns), + inline=False + ) + else: + truth_id = tuple(similar)[0] + _ratio = similar[truth_id] + truth = discord.utils.get(truths, id=truth_id) + first_line = truth.content.splitlines()[0] + preview = discord.utils.escape_markdown(textwrap.shorten(first_line, 512)) + embed.add_field( + name="Possibly repeated truth", + value=f"This truth was {_ratio}% similar to `{truth_id}`.\n>>> {preview}" + ) + + embed.set_footer( + text="Finished generating truth based off of {:,} messages, using server {!r} | {!s}".format( + len(messages) - 2, + server, + thread_id + ) + ) + await msg.edit(embed=embed) @commands.command() @commands.guild_only()