81 lines
2.6 KiB
Python
81 lines
2.6 KiB
Python
import requests
|
|
import json
|
|
from gradio_client import Client
|
|
import os
|
|
from mistralai import Mistral
|
|
|
|
class API:
|
|
@staticmethod
|
|
def process_text_mistralai(prompt, model, system):
|
|
with open("token.txt", "r") as f:
|
|
token = f.readlines()[0].strip()
|
|
|
|
api_key = token
|
|
|
|
client = Mistral(api_key=api_key)
|
|
|
|
chat_response = client.chat.complete(
|
|
model=model,
|
|
messages=[
|
|
{
|
|
"role": "user",
|
|
"content": prompt,
|
|
}, {
|
|
"role": "system",
|
|
"content": system,
|
|
},
|
|
]
|
|
)
|
|
return chat_response.choices[0].message.content
|
|
@staticmethod
|
|
def process_text_gradio(prompt, model, system):
|
|
client = Client(model)
|
|
result = client.predict(
|
|
message=prompt,
|
|
system_message=system,
|
|
max_tokens=512,
|
|
temperature=0.7,
|
|
top_p=0.95,
|
|
api_name="/chat"
|
|
)
|
|
return result;
|
|
|
|
# This method processes a message via ollama
|
|
@staticmethod
|
|
def process_text_local(prompt, model, system):
|
|
ollama_url = "http://localhost:11434"
|
|
|
|
response = requests.post(
|
|
f"{ollama_url}/api/generate", json={"model": model, "prompt": prompt, "system": system}
|
|
)
|
|
|
|
if response.status_code == 200:
|
|
response_data = []
|
|
for line in response.iter_lines():
|
|
line_decoded = line.decode("utf-8")
|
|
line_raw = json.loads(line_decoded)
|
|
response_data.append(line_raw["response"])
|
|
|
|
final_response = "".join(response_data)
|
|
return final_response
|
|
else:
|
|
return "Error: " + response.text
|
|
|
|
# This method sends a message to a certain AI.
|
|
|
|
def send_message(self, message, model, system):
|
|
if model == 1:
|
|
answer = self.process_text_local(message, "phi3.5", system)
|
|
elif model == 2:
|
|
answer = self.process_text_local(message, "gemma2:9b", system)
|
|
elif model == 3:
|
|
answer = self.process_text_local(message, "codegemma:2b", system)
|
|
elif model == 4:
|
|
answer = self.process_text_gradio(message, "PatrickPluto/InterstellarAIChatbot", system)
|
|
elif model == 5:
|
|
answer = self.process_text_mistralai(message, "mistral-large-latest", system)
|
|
elif model == 6:
|
|
answer = self.process_text_mistralai(message, "codestral-latest", system)
|
|
else:
|
|
return "Invalid choice"
|
|
return answer
|