from mistralai import Mistral from openai import OpenAI import google.generativeai as genai import anthropic import ollama class AI: @staticmethod def process_local(model, messages, return_class, access_token): stream = ollama.chat( model=model, messages=messages, stream=True, options={"temperature": 0.5}, ) with return_class.ai_response_lock: return_class.ai_response[access_token] = "" for chunk in stream: with return_class.ai_response_lock: return_class.ai_response[access_token] += chunk["message"]["content"] @staticmethod def process_mistralai(model, messages, return_class, access_token, api_key): client = Mistral(api_key=api_key) stream_response = client.chat.stream(model=model, messages=messages) with return_class.ai_response_lock: return_class.ai_response[access_token] = "" for chunk in stream_response: with return_class.ai_response_lock: return_class.ai_response[access_token] += chunk.data.choices[ 0 ].delta.content @staticmethod def process_openai(model, messages, return_class, access_token, api_key): client = OpenAI(api_key=api_key) stream_response = client.chat.completions.create( model=model, messages=messages, stream=True ) with return_class.ai_response_lock: return_class.ai_response[access_token] = "" for chunk in stream_response: with return_class.ai_response_lock: return_class.ai_response[access_token] += chunk.choices[0].delta.content @staticmethod def process_anthropic(model, messages, return_class, access_token, api_key): client = anthropic.Anthropic(api_key=api_key) with return_class.ai_response_lock: return_class.ai_response[access_token] = "" with client.messages.stream( max_tokens=1024, model=model, messages=messages, ) as stream: for text in stream.text_stream: with return_class.ai_response_lock: return_class.ai_response[access_token] += text @staticmethod def process_google(model, messages, return_class, access_token, api_key): message = messages[-1]["content"] messages.pop() for msg in messages: msg["parts"] = msg.pop()["content"] for msg in messages: if msg["role"] == "assistant": msg["role"] = "model" genai.configure(api_key=api_key) model = genai.GenerativeModel(model) chat = model.start_chat( history=messages, ) response = chat.send_message(message, stream=True) for chunk in response: return_class.ai_response[access_token] += chunk.text