ai-virtual-assistant/py/api.py

83 lines
2.6 KiB
Python
Raw Permalink Normal View History

2024-09-16 10:03:26 +02:00
import requests
2024-09-16 10:29:26 +02:00
import json
2024-09-17 12:36:32 +02:00
from gradio_client import Client
from mistralai import Mistral
2024-09-16 13:38:30 +02:00
2024-09-17 13:49:52 +02:00
2024-09-16 11:27:00 +02:00
class API:
@staticmethod
2024-09-17 12:36:32 +02:00
def process_text_mistralai(prompt, model, system):
with open("token.txt", "r") as f:
token = f.readlines()[0].strip()
api_key = token
2024-09-16 11:44:35 +02:00
2024-09-17 12:36:32 +02:00
client = Mistral(api_key=api_key)
2024-09-16 11:44:35 +02:00
2024-09-17 12:36:32 +02:00
chat_response = client.chat.complete(
model=model,
messages=[
{
"role": "user",
"content": prompt,
}, {
"role": "system",
"content": system,
},
]
)
return chat_response.choices[0].message.content
2024-09-17 13:49:52 +02:00
2024-09-17 12:36:32 +02:00
@staticmethod
def process_text_gradio(prompt, model, system):
client = Client(model)
result = client.predict(
message=prompt,
system_message=system,
max_tokens=512,
temperature=0.7,
top_p=0.95,
api_name="/chat"
)
2024-09-17 13:49:52 +02:00
return result
2024-09-16 11:44:35 +02:00
2024-09-16 15:14:24 +02:00
# This method processes a message via ollama
2024-09-16 11:44:35 +02:00
@staticmethod
2024-09-17 08:18:19 +02:00
def process_text_local(prompt, model, system):
2024-09-16 10:29:26 +02:00
ollama_url = "http://localhost:11434"
response = requests.post(
2024-09-17 08:18:19 +02:00
f"{ollama_url}/api/generate", json={"model": model, "prompt": prompt, "system": system}
2024-09-16 10:29:26 +02:00
)
if response.status_code == 200:
response_data = []
for line in response.iter_lines():
line_decoded = line.decode("utf-8")
line_raw = json.loads(line_decoded)
response_data.append(line_raw["response"])
final_response = "".join(response_data)
2024-09-16 12:17:06 +02:00
return final_response
2024-09-16 10:29:26 +02:00
else:
return "Error: " + response.text
2024-09-16 10:03:26 +02:00
2024-09-16 15:14:24 +02:00
# This method sends a message to a certain AI.
2024-09-17 08:18:19 +02:00
def send_message(self, message, model, system):
2024-09-16 11:27:00 +02:00
if model == 1:
2024-09-17 08:18:19 +02:00
answer = self.process_text_local(message, "phi3.5", system)
2024-09-16 11:27:00 +02:00
elif model == 2:
2024-09-17 12:36:32 +02:00
answer = self.process_text_local(message, "gemma2:9b", system)
2024-09-16 11:27:00 +02:00
elif model == 3:
2024-09-17 08:18:19 +02:00
answer = self.process_text_local(message, "codegemma:2b", system)
2024-09-17 12:36:32 +02:00
elif model == 4:
answer = self.process_text_gradio(message, "PatrickPluto/InterstellarAIChatbot", system)
2024-09-16 11:44:35 +02:00
elif model == 5:
2024-09-17 15:03:51 +02:00
answer = self.process_text_mistralai(message, "open-mistral-7b", system)
2024-09-17 12:36:32 +02:00
elif model == 6:
answer = self.process_text_mistralai(message, "codestral-latest", system)
2024-09-16 11:27:00 +02:00
else:
return "Invalid choice"
2024-09-16 13:38:30 +02:00
return answer