from mistralai import Mistral import ollama class AI: @staticmethod def process_local(model, messages, return_class, access_token): stream = ollama.chat( model=model, messages=messages, stream=True, options={"temperature": 0.5}, ) for i in messages: print(i) return_class.ai_response[access_token] = "" for chunk in stream: print(chunk['message']['content']) return_class.ai_response[access_token] += chunk['message']['content'] @staticmethod def process_mistralai(model, messages, return_class, access_token): with open("api_key.txt", 'r') as f: api_key = f.read().strip() client = Mistral(api_key=api_key) stream_response = client.chat.stream( model=model, messages=messages ) return_class.ai_response[access_token] = "" for chunk in stream_response: return_class.ai_response[access_token] += chunk.data.choices[0].delta.content