Fixes and Multithreading

This commit is contained in:
Patrick_Pluto 2024-09-23 11:01:39 +02:00
parent 1d1705ebfa
commit a8f9ba27bd
3 changed files with 44 additions and 13 deletions

View file

@ -1,4 +1,5 @@
from mistralai import Mistral
from openai import OpenAI
import ollama
@ -12,15 +13,15 @@ class AI:
options={"temperature": 0.5},
)
return_class.ai_response[access_token] = ""
with return_class.ai_response_lock:
return_class.ai_response[access_token] = ""
for chunk in stream:
return_class.ai_response[access_token] += chunk['message']['content']
with return_class.ai_response_lock:
return_class.ai_response[access_token] += chunk['message']['content']
@staticmethod
def process_mistralai(model, messages, return_class, access_token):
with open("api_key.txt", 'r') as f:
api_key = f.read().strip()
def process_mistralai(model, messages, return_class, access_token, api_key):
client = Mistral(api_key=api_key)
@ -29,7 +30,26 @@ class AI:
messages=messages
)
return_class.ai_response[access_token] = ""
with return_class.ai_response_lock:
return_class.ai_response[access_token] = ""
for chunk in stream_response:
return_class.ai_response[access_token] += chunk.data.choices[0].delta.content
with return_class.ai_response_lock:
return_class.ai_response[access_token] += chunk.data.choices[0].delta.content
@staticmethod
def process_openai(model, messages, return_class, access_token, api_key):
client = OpenAI(api_key=api_key)
stream_response = client.chat.completions.create(
model=model,
messages=messages
)
with return_class.ai_response_lock:
return_class.ai_response[access_token] = ""
for chunk in stream_response:
with return_class.ai_response_lock:
return_class.ai_response[access_token] += chunk.choices[0].delta.content