diff --git a/jimmy/cogs/chat.py b/jimmy/cogs/chat.py index 2f75b0f..0a93258 100644 --- a/jimmy/cogs/chat.py +++ b/jimmy/cogs/chat.py @@ -63,7 +63,15 @@ class Chat(commands.Cog): self.server_locks[server.name] = asyncio.Lock() self.log = logging.getLogger(__name__) - @commands.slash_command() + ollama_group = discord.SlashCommandGroup( + name="ollama", + description="Commands related to ollama.", + guild_only=True, + max_concurrency=commands.MaxConcurrency(1, per=commands.BucketType.user, wait=False), + cooldown=commands.CooldownMapping(commands.Cooldown(1, 10), commands.BucketType.user) + ) + + @ollama_group.command() async def status(self, ctx: discord.ApplicationContext): """Checks the status on all servers.""" await ctx.defer() @@ -115,13 +123,38 @@ class Chat(commands.Cog): ) await ctx.edit(embed=embed) - ollama_group = discord.SlashCommandGroup( - name="ollama", - description="Commands related to ollama.", - guild_only=True, - max_concurrency=commands.MaxConcurrency(1, per=commands.BucketType.user, wait=False), - cooldown=commands.CooldownMapping(commands.Cooldown(1, 10), commands.BucketType.user) - ) + @ollama_group.command(name="server-info") + async def get_server_info( + self, + ctx: discord.ApplicationContext, + server: typing.Annotated[ + str, + discord.Option( + discord.SlashCommandOptionType.string, + description="The server to use.", + autocomplete=_ServerOptionAutocomplete, + default=get_servers()[0].name + ) + ] + ): + """Gets information on a given server""" + await ctx.defer() + server = get_server(server) + is_online = await server.is_online() + y = "\N{white heavy check mark}" + x = "\N{cross mark}" + t = {True: y, False: x} + rt = "VRAM" if server.gpu else "RAM" + lines = [ + f"Name: {server.name!r}", + f"Base URL: {server.base_url!r}", + f"GPU Enabled: {t[server.gpu]}", + f"{rt}: {server.vram_gb:,} GB", + f"Default Model: {server.default_model!r}", + f"Is Online: {t[is_online]}" + ] + p = "```md\n" + "\n".join(lines) + "```" + return await ctx.respond(p) @ollama_group.command(name="chat") async def start_ollama_chat(