Uhmm fixed
This commit is contained in:
parent
c1d1bf246c
commit
7010be84ea
5 changed files with 40 additions and 13 deletions
29
py/ai.py
29
py/ai.py
|
@ -1,5 +1,6 @@
|
|||
from mistralai import Mistral
|
||||
from openai import OpenAI
|
||||
import google.generativeai as genai
|
||||
import anthropic
|
||||
import ollama
|
||||
|
||||
|
@ -72,3 +73,31 @@ class AI:
|
|||
for text in stream.text_stream:
|
||||
with return_class.ai_response_lock:
|
||||
return_class.ai_response[access_token] += text
|
||||
|
||||
@staticmethod
|
||||
def process_google(model, messages, return_class, access_token, api_key):
|
||||
|
||||
message = messages[-1]['content']
|
||||
messages.pop()
|
||||
|
||||
system = None
|
||||
if messages and messages[0]['role'] == 'system':
|
||||
system = messages[0]['content']
|
||||
messages.pop(0)
|
||||
|
||||
for msg in messages:
|
||||
msg['parts'] = msg.pop('content')
|
||||
|
||||
for msg in messages:
|
||||
if msg['role'] == 'assistant':
|
||||
msg['role'] = 'model'
|
||||
|
||||
genai.configure(api_key=api_key)
|
||||
model = genai.GenerativeModel("gemini-1.5-flash")
|
||||
chat = model.start_chat(
|
||||
system_instruction=system,
|
||||
history=[
|
||||
{"role": "user", "parts": "Hello"},
|
||||
{"role": "model", "parts": "Great to meet you. What would you like to know?"},
|
||||
]
|
||||
)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue