Use orca-mini:7b instead of 3b
This commit is contained in:
parent
33319feb86
commit
531c0e9d4e
1 changed files with 4 additions and 4 deletions
|
@ -878,11 +878,11 @@ class Ollama(commands.Cog):
|
||||||
return await ctx.respond("All servers are offline. Please try again later.", ephemeral=True)
|
return await ctx.respond("All servers are offline. Please try again later.", ephemeral=True)
|
||||||
|
|
||||||
client = OllamaClient(CONFIG["ollama"][server]["base_url"])
|
client = OllamaClient(CONFIG["ollama"][server]["base_url"])
|
||||||
if not await client.has_model_named("orca-mini", "3b"):
|
if not await client.has_model_named("orca-mini", "7b"):
|
||||||
with client.download_model("orca-mini", "3b") as handler:
|
with client.download_model("orca-mini", "7b") as handler:
|
||||||
async for _ in handler:
|
async for _ in handler:
|
||||||
self.log.info(
|
self.log.info(
|
||||||
"Downloading orca-mini:3b on server %r - %s (%.2f%%)", server, handler.status, handler.percent
|
"Downloading orca-mini:7b on server %r - %s (%.2f%%)", server, handler.status, handler.percent
|
||||||
)
|
)
|
||||||
|
|
||||||
if self.lock.locked():
|
if self.lock.locked():
|
||||||
|
@ -896,7 +896,7 @@ class Ollama(commands.Cog):
|
||||||
await ctx.respond(embed=embed, ephemeral=True)
|
await ctx.respond(embed=embed, ephemeral=True)
|
||||||
last_edit = time.time()
|
last_edit = time.time()
|
||||||
msg = None
|
msg = None
|
||||||
with client.new_chat("orca-mini:3b", messages) as handler:
|
with client.new_chat("orca-mini:7b", messages) as handler:
|
||||||
self.log.info("New chat connection established.")
|
self.log.info("New chat connection established.")
|
||||||
async for ln in handler:
|
async for ln in handler:
|
||||||
done = ln.get("done") is True
|
done = ln.get("done") is True
|
||||||
|
|
Loading…
Reference in a new issue