interstellar_ai/py/ai.py

75 lines
2.2 KiB
Python
Raw Normal View History

2024-09-20 09:03:46 +02:00
from mistralai import Mistral
2024-09-23 11:01:39 +02:00
from openai import OpenAI
2024-09-23 11:57:16 +02:00
import anthropic
2024-09-20 09:03:46 +02:00
import ollama
class AI:
@staticmethod
def process_local(model, messages, return_class, access_token):
stream = ollama.chat(
model=model,
messages=messages,
stream=True,
2024-09-20 09:21:07 +02:00
options={"temperature": 0.5},
2024-09-20 09:03:46 +02:00
)
2024-09-23 11:01:39 +02:00
with return_class.ai_response_lock:
return_class.ai_response[access_token] = ""
2024-09-20 09:17:00 +02:00
2024-09-20 09:03:46 +02:00
for chunk in stream:
2024-09-23 11:01:39 +02:00
with return_class.ai_response_lock:
return_class.ai_response[access_token] += chunk['message']['content']
2024-09-20 09:03:46 +02:00
@staticmethod
2024-09-23 11:01:39 +02:00
def process_mistralai(model, messages, return_class, access_token, api_key):
2024-09-20 09:03:46 +02:00
client = Mistral(api_key=api_key)
stream_response = client.chat.stream(
model=model,
messages=messages
)
2024-09-23 11:01:39 +02:00
with return_class.ai_response_lock:
return_class.ai_response[access_token] = ""
2024-09-20 09:17:00 +02:00
2024-09-20 09:03:46 +02:00
for chunk in stream_response:
2024-09-23 11:01:39 +02:00
with return_class.ai_response_lock:
return_class.ai_response[access_token] += chunk.data.choices[0].delta.content
@staticmethod
def process_openai(model, messages, return_class, access_token, api_key):
client = OpenAI(api_key=api_key)
stream_response = client.chat.completions.create(
model=model,
2024-09-23 11:57:16 +02:00
messages=messages,
stream=True
2024-09-23 11:01:39 +02:00
)
with return_class.ai_response_lock:
return_class.ai_response[access_token] = ""
for chunk in stream_response:
with return_class.ai_response_lock:
2024-09-23 11:57:16 +02:00
return_class.ai_response[access_token] += chunk.choices[0].delta.content
@staticmethod
def process_anthropic(model, messages, return_class, access_token, api_key):
client = anthropic.Anthropic(api_key=api_key)
with return_class.ai_response_lock:
return_class.ai_response[access_token] = ""
with client.messages.stream(
max_tokens=1024,
model=model,
messages=messages,
) as stream:
for text in stream.text_stream:
with return_class.ai_response_lock:
return_class.ai_response[access_token] += text