forked from React-Group/interstellar_ai
backend changes
This commit is contained in:
parent
042304133c
commit
7bf91cdf7e
11 changed files with 36 additions and 64 deletions
32
py/ai.py
Normal file
32
py/ai.py
Normal file
|
@ -0,0 +1,32 @@
|
|||
from mistralai import Mistral
|
||||
import ollama
|
||||
|
||||
|
||||
class AI:
|
||||
@staticmethod
|
||||
def process_local(model, messages, return_class, access_token):
|
||||
stream = ollama.chat(
|
||||
model=model,
|
||||
messages=messages,
|
||||
stream=True,
|
||||
options={"temperature": 0.8},
|
||||
)
|
||||
|
||||
for chunk in stream:
|
||||
print(chunk['message']['content'])
|
||||
return_class.ai_response[access_token] += chunk['message']['content']
|
||||
|
||||
@staticmethod
|
||||
def process_mistralai(model, messages, return_class, access_token):
|
||||
with open("api_key.txt", 'r') as f:
|
||||
api_key = f.read().strip()
|
||||
|
||||
client = Mistral(api_key=api_key)
|
||||
|
||||
stream_response = client.chat.stream(
|
||||
model=model,
|
||||
messages=messages
|
||||
)
|
||||
|
||||
for chunk in stream_response:
|
||||
return_class.ai_response[access_token] += chunk.data.choices[0].delta.content
|
Loading…
Add table
Add a link
Reference in a new issue