from mistralai import Mistral from openai import OpenAI import ollama class AI: @staticmethod def process_local(model, messages, return_class, access_token): stream = ollama.chat( model=model, messages=messages, stream=True, options={"temperature": 0.5}, ) with return_class.ai_response_lock: return_class.ai_response[access_token] = "" for chunk in stream: with return_class.ai_response_lock: return_class.ai_response[access_token] += chunk['message']['content'] @staticmethod def process_mistralai(model, messages, return_class, access_token, api_key): client = Mistral(api_key=api_key) stream_response = client.chat.stream( model=model, messages=messages ) with return_class.ai_response_lock: return_class.ai_response[access_token] = "" for chunk in stream_response: with return_class.ai_response_lock: return_class.ai_response[access_token] += chunk.data.choices[0].delta.content @staticmethod def process_openai(model, messages, return_class, access_token, api_key): client = OpenAI(api_key=api_key) stream_response = client.chat.completions.create( model=model, messages=messages ) with return_class.ai_response_lock: return_class.ai_response[access_token] = "" for chunk in stream_response: with return_class.ai_response_lock: return_class.ai_response[access_token] += chunk.choices[0].delta.content