forked from React-Group/interstellar_ai
More models
This commit is contained in:
parent
d0f27295d7
commit
83e0614bdb
3 changed files with 34 additions and 2 deletions
23
py/ai.py
23
py/ai.py
|
@ -1,5 +1,6 @@
|
|||
from mistralai import Mistral
|
||||
from openai import OpenAI
|
||||
import anthropic
|
||||
import ollama
|
||||
|
||||
|
||||
|
@ -44,7 +45,8 @@ class AI:
|
|||
|
||||
stream_response = client.chat.completions.create(
|
||||
model=model,
|
||||
messages=messages
|
||||
messages=messages,
|
||||
stream=True
|
||||
)
|
||||
|
||||
with return_class.ai_response_lock:
|
||||
|
@ -52,4 +54,21 @@ class AI:
|
|||
|
||||
for chunk in stream_response:
|
||||
with return_class.ai_response_lock:
|
||||
return_class.ai_response[access_token] += chunk.choices[0].delta.content
|
||||
return_class.ai_response[access_token] += chunk.choices[0].delta.content
|
||||
|
||||
@staticmethod
|
||||
def process_anthropic(model, messages, return_class, access_token, api_key):
|
||||
|
||||
client = anthropic.Anthropic(api_key=api_key)
|
||||
|
||||
with return_class.ai_response_lock:
|
||||
return_class.ai_response[access_token] = ""
|
||||
|
||||
with client.messages.stream(
|
||||
max_tokens=1024,
|
||||
model=model,
|
||||
messages=messages,
|
||||
) as stream:
|
||||
for text in stream.text_stream:
|
||||
with return_class.ai_response_lock:
|
||||
return_class.ai_response[access_token] += text
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue