Compare commits

...

7 commits

12 changed files with 195 additions and 219 deletions

1
.gitignore vendored
View file

@ -41,3 +41,4 @@ venv/
key.pem
cert.pem
api_key.txt

3
py/.idea/.gitignore vendored
View file

@ -1,3 +0,0 @@
# Default ignored files
/shelf/
/workspace.xml

View file

@ -1,6 +0,0 @@
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>

View file

@ -1,10 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="Black">
<option name="sdkName" value="Python 3.12" />
</component>
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.12" project-jdk-type="Python SDK" />
<component name="PyCharmProfessionalAdvertiser">
<option name="shown" value="true" />
</component>
</project>

View file

@ -1,8 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/py.iml" filepath="$PROJECT_DIR$/.idea/py.iml" />
</modules>
</component>
</project>

View file

@ -1,13 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$">
<excludeFolder url="file://$MODULE_DIR$/venv" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
<component name="PackageRequirementsSettings">
<option name="versionSpecifier" value="Greater or equal (&gt;=x.y.z)" />
</component>
</module>

View file

@ -1,6 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$/.." vcs="Git" />
</component>
</project>

Binary file not shown.

36
py/ai.py Normal file
View file

@ -0,0 +1,36 @@
from mistralai import Mistral
import ollama
class AI:
@staticmethod
def process_local(model, messages, return_class, access_token):
stream = ollama.chat(
model=model,
messages=messages,
stream=True,
options={"temperature": 0.8},
)
return_class.ai_response[access_token] = ""
for chunk in stream:
print(chunk['message']['content'])
return_class.ai_response[access_token] += chunk['message']['content']
@staticmethod
def process_mistralai(model, messages, return_class, access_token):
with open("api_key.txt", 'r') as f:
api_key = f.read().strip()
client = Mistral(api_key=api_key)
stream_response = client.chat.stream(
model=model,
messages=messages
)
return_class.ai_response[access_token] = ""
for chunk in stream_response:
return_class.ai_response[access_token] += chunk.data.choices[0].delta.content

View file

@ -1,22 +1,7 @@
from flask import Flask, request, jsonify
from flask_cors import CORS
import ollama
import secrets
class AI:
@staticmethod
def process_local(model, messages, return_class, access_token):
stream = ollama.chat(
model=model,
messages=messages,
stream=True,
options={"temperature": 0},
)
for chunk in stream:
print(chunk['message']['content'])
return_class.ai_response[access_token] += chunk['message']['content']
from ai import AI
class API:
@ -58,4 +43,3 @@ class API:
if __name__ == '__main__':
api = API()
api.run()

View file

@ -1,3 +1,4 @@
flask
flask-cors
ollama
mistralai