From 7bf91cdf7eab1aaf55c328119fe0643b611610f0 Mon Sep 17 00:00:00 2001 From: Patrick_Pluto Date: Fri, 20 Sep 2024 09:03:46 +0200 Subject: [PATCH] backend changes --- .gitignore | 1 + py/.idea/.gitignore | 3 -- .../inspectionProfiles/profiles_settings.xml | 6 ---- py/.idea/misc.xml | 10 ------ py/.idea/modules.xml | 8 ----- py/.idea/py.iml | 13 ------- py/.idea/vcs.xml | 6 ---- py/__pycache__/ai.cpython-312.pyc | Bin 0 -> 1769 bytes py/ai.py | 32 ++++++++++++++++++ py/api.py | 18 +--------- py/requirements.txt | 3 +- 11 files changed, 36 insertions(+), 64 deletions(-) delete mode 100644 py/.idea/.gitignore delete mode 100644 py/.idea/inspectionProfiles/profiles_settings.xml delete mode 100644 py/.idea/misc.xml delete mode 100644 py/.idea/modules.xml delete mode 100644 py/.idea/py.iml delete mode 100644 py/.idea/vcs.xml create mode 100644 py/__pycache__/ai.cpython-312.pyc create mode 100644 py/ai.py diff --git a/.gitignore b/.gitignore index bb3b79a..ea5d9c5 100644 --- a/.gitignore +++ b/.gitignore @@ -41,3 +41,4 @@ venv/ key.pem cert.pem +api_key.txt diff --git a/py/.idea/.gitignore b/py/.idea/.gitignore deleted file mode 100644 index 26d3352..0000000 --- a/py/.idea/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -# Default ignored files -/shelf/ -/workspace.xml diff --git a/py/.idea/inspectionProfiles/profiles_settings.xml b/py/.idea/inspectionProfiles/profiles_settings.xml deleted file mode 100644 index 105ce2d..0000000 --- a/py/.idea/inspectionProfiles/profiles_settings.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - \ No newline at end of file diff --git a/py/.idea/misc.xml b/py/.idea/misc.xml deleted file mode 100644 index 3671ece..0000000 --- a/py/.idea/misc.xml +++ /dev/null @@ -1,10 +0,0 @@ - - - - - - - - \ No newline at end of file diff --git a/py/.idea/modules.xml b/py/.idea/modules.xml deleted file mode 100644 index 3a65488..0000000 --- a/py/.idea/modules.xml +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - \ No newline at end of file diff --git a/py/.idea/py.iml b/py/.idea/py.iml deleted file mode 100644 index 49f4c24..0000000 --- a/py/.idea/py.iml +++ /dev/null @@ -1,13 +0,0 @@ - - - - - - - - - - - - \ No newline at end of file diff --git a/py/.idea/vcs.xml b/py/.idea/vcs.xml deleted file mode 100644 index 6c0b863..0000000 --- a/py/.idea/vcs.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - \ No newline at end of file diff --git a/py/__pycache__/ai.cpython-312.pyc b/py/__pycache__/ai.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..998a499725690d2957d619b87d4ab9b2bdfd3657 GIT binary patch literal 1769 zcmZvdOK2NM7=ZuTmt=WmNAfGN8yZsE>Ec&vsGty9lICSf4KBS{v@FZ+I@8 zWn|ie4?3g}Q>dF<>O&7L&Y?ZE$I?@vmxvfFrY#iOOAZCQ3K)FqKU%Gy9ZCQGGyiL6 z_xne`rqe0F(HQ(^;jswt7oWsKYAelgR5ri@i+E5Zc|y1@ctTzf;0{>g8d%bn5P9dt zilE6a_(UTt@xBv;)bM7_$QsGEE3^~akS7-81zWVp8f>*a^AgU5V{^G8Y7!H!&zSLz zanA()@W!F}4T=p230_*mjjO~9c&$Q}Si}-6(IHQyt#+POsVd!skFNr(%Ap*)S7l4S zfD=pMa;u$}Kvi&+*yre46_j#%=J}@Kt(?0) zGkIc#rwnuypTXamfsLW{p+iHdYxULRwe)L?O{@(s+rk=a~bETe}tE+R{>hzwBflVv{Cotia zTbL~1i#(1A$026-P2ecF-0#$gp>z-NIC6+>t1@;4e&TEdLJntZd@4T?V}w?fs^aq3 zaca1orxtL^fkjZ;D6b}}2?X^ff|^7O)&KE7!Z7J}owd8?&aOq6yVtBeFnTaBHmHbJ|2*X=`BXbnBlDS9%K4&l~)PAM1fqVGY=;=d$!Xf z-)0*90~-tL3ttpAO6#TP{iC%z`yzCWzEmK4VRK+Rqt%o@2QPg+_GoNpaJ)V^9yPo&QiLLr_Pecbye3tSu(t~MGZnDbj+e1 z7JQ5LV-h-y;ya)NT<4F7@`h*$t_1v_VhtK{W=}}TACRU9N_MxU?w&_=Pf+E2v>K^V zA|8szSjXY>j9xqEH_ESZ?Xc!S@>+DmZ__a}@RK><#$AyR@&{b}4buC{5dKg73tYAW F{{t)}g^>UN literal 0 HcmV?d00001 diff --git a/py/ai.py b/py/ai.py new file mode 100644 index 0000000..900b2ff --- /dev/null +++ b/py/ai.py @@ -0,0 +1,32 @@ +from mistralai import Mistral +import ollama + + +class AI: + @staticmethod + def process_local(model, messages, return_class, access_token): + stream = ollama.chat( + model=model, + messages=messages, + stream=True, + options={"temperature": 0.8}, + ) + + for chunk in stream: + print(chunk['message']['content']) + return_class.ai_response[access_token] += chunk['message']['content'] + + @staticmethod + def process_mistralai(model, messages, return_class, access_token): + with open("api_key.txt", 'r') as f: + api_key = f.read().strip() + + client = Mistral(api_key=api_key) + + stream_response = client.chat.stream( + model=model, + messages=messages + ) + + for chunk in stream_response: + return_class.ai_response[access_token] += chunk.data.choices[0].delta.content diff --git a/py/api.py b/py/api.py index 9cf2e63..b25e4d4 100644 --- a/py/api.py +++ b/py/api.py @@ -1,22 +1,7 @@ from flask import Flask, request, jsonify from flask_cors import CORS -import ollama import secrets - - -class AI: - @staticmethod - def process_local(model, messages, return_class, access_token): - stream = ollama.chat( - model=model, - messages=messages, - stream=True, - options={"temperature": 0}, - ) - - for chunk in stream: - print(chunk['message']['content']) - return_class.ai_response[access_token] += chunk['message']['content'] +from ai import AI class API: @@ -58,4 +43,3 @@ class API: if __name__ == '__main__': api = API() api.run() - diff --git a/py/requirements.txt b/py/requirements.txt index 1e95d6f..b4f811a 100644 --- a/py/requirements.txt +++ b/py/requirements.txt @@ -1,3 +1,4 @@ flask flask-cors -ollama \ No newline at end of file +ollama +mistralai \ No newline at end of file