diff --git a/.gitignore b/.gitignore
index bb3b79a..ea5d9c5 100644
--- a/.gitignore
+++ b/.gitignore
@@ -41,3 +41,4 @@ venv/
key.pem
cert.pem
+api_key.txt
diff --git a/py/.idea/.gitignore b/py/.idea/.gitignore
deleted file mode 100644
index 26d3352..0000000
--- a/py/.idea/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-# Default ignored files
-/shelf/
-/workspace.xml
diff --git a/py/.idea/inspectionProfiles/profiles_settings.xml b/py/.idea/inspectionProfiles/profiles_settings.xml
deleted file mode 100644
index 105ce2d..0000000
--- a/py/.idea/inspectionProfiles/profiles_settings.xml
+++ /dev/null
@@ -1,6 +0,0 @@
-
-
-
-
-
-
\ No newline at end of file
diff --git a/py/.idea/misc.xml b/py/.idea/misc.xml
deleted file mode 100644
index 3671ece..0000000
--- a/py/.idea/misc.xml
+++ /dev/null
@@ -1,10 +0,0 @@
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/py/.idea/modules.xml b/py/.idea/modules.xml
deleted file mode 100644
index 3a65488..0000000
--- a/py/.idea/modules.xml
+++ /dev/null
@@ -1,8 +0,0 @@
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/py/.idea/py.iml b/py/.idea/py.iml
deleted file mode 100644
index 49f4c24..0000000
--- a/py/.idea/py.iml
+++ /dev/null
@@ -1,13 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/py/.idea/vcs.xml b/py/.idea/vcs.xml
deleted file mode 100644
index 6c0b863..0000000
--- a/py/.idea/vcs.xml
+++ /dev/null
@@ -1,6 +0,0 @@
-
-
-
-
-
-
\ No newline at end of file
diff --git a/py/__pycache__/ai.cpython-312.pyc b/py/__pycache__/ai.cpython-312.pyc
new file mode 100644
index 0000000..998a499
Binary files /dev/null and b/py/__pycache__/ai.cpython-312.pyc differ
diff --git a/py/ai.py b/py/ai.py
new file mode 100644
index 0000000..900b2ff
--- /dev/null
+++ b/py/ai.py
@@ -0,0 +1,32 @@
+from mistralai import Mistral
+import ollama
+
+
+class AI:
+ @staticmethod
+ def process_local(model, messages, return_class, access_token):
+ stream = ollama.chat(
+ model=model,
+ messages=messages,
+ stream=True,
+ options={"temperature": 0.8},
+ )
+
+ for chunk in stream:
+ print(chunk['message']['content'])
+ return_class.ai_response[access_token] += chunk['message']['content']
+
+ @staticmethod
+ def process_mistralai(model, messages, return_class, access_token):
+ with open("api_key.txt", 'r') as f:
+ api_key = f.read().strip()
+
+ client = Mistral(api_key=api_key)
+
+ stream_response = client.chat.stream(
+ model=model,
+ messages=messages
+ )
+
+ for chunk in stream_response:
+ return_class.ai_response[access_token] += chunk.data.choices[0].delta.content
diff --git a/py/api.py b/py/api.py
index 9cf2e63..b25e4d4 100644
--- a/py/api.py
+++ b/py/api.py
@@ -1,22 +1,7 @@
from flask import Flask, request, jsonify
from flask_cors import CORS
-import ollama
import secrets
-
-
-class AI:
- @staticmethod
- def process_local(model, messages, return_class, access_token):
- stream = ollama.chat(
- model=model,
- messages=messages,
- stream=True,
- options={"temperature": 0},
- )
-
- for chunk in stream:
- print(chunk['message']['content'])
- return_class.ai_response[access_token] += chunk['message']['content']
+from ai import AI
class API:
@@ -58,4 +43,3 @@ class API:
if __name__ == '__main__':
api = API()
api.run()
-
diff --git a/py/requirements.txt b/py/requirements.txt
index 1e95d6f..b4f811a 100644
--- a/py/requirements.txt
+++ b/py/requirements.txt
@@ -1,3 +1,4 @@
flask
flask-cors
-ollama
\ No newline at end of file
+ollama
+mistralai
\ No newline at end of file