interstellar_ai/py/ai.py

33 lines
922 B
Python
Raw Normal View History

2024-09-20 09:03:46 +02:00
from mistralai import Mistral
import ollama
class AI:
@staticmethod
def process_local(model, messages, return_class, access_token):
stream = ollama.chat(
model=model,
messages=messages,
stream=True,
options={"temperature": 0.8},
)
for chunk in stream:
print(chunk['message']['content'])
return_class.ai_response[access_token] += chunk['message']['content']
@staticmethod
def process_mistralai(model, messages, return_class, access_token):
with open("api_key.txt", 'r') as f:
api_key = f.read().strip()
client = Mistral(api_key=api_key)
stream_response = client.chat.stream(
model=model,
messages=messages
)
for chunk in stream_response:
return_class.ai_response[access_token] += chunk.data.choices[0].delta.content