forked from React-Group/interstellar_ai
Compare commits
7 commits
ce635d3089
...
4d54c81056
Author | SHA1 | Date | |
---|---|---|---|
4d54c81056 | |||
|
eb8fef13ff | ||
|
7bf91cdf7e | ||
042304133c | |||
267e06a4cb | |||
ed6e812b6c | |||
eceef58ce3 |
12 changed files with 195 additions and 219 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -41,3 +41,4 @@ venv/
|
||||||
|
|
||||||
key.pem
|
key.pem
|
||||||
cert.pem
|
cert.pem
|
||||||
|
api_key.txt
|
||||||
|
|
3
py/.idea/.gitignore
vendored
3
py/.idea/.gitignore
vendored
|
@ -1,3 +0,0 @@
|
||||||
# Default ignored files
|
|
||||||
/shelf/
|
|
||||||
/workspace.xml
|
|
|
@ -1,6 +0,0 @@
|
||||||
<component name="InspectionProjectProfileManager">
|
|
||||||
<settings>
|
|
||||||
<option name="USE_PROJECT_PROFILE" value="false" />
|
|
||||||
<version value="1.0" />
|
|
||||||
</settings>
|
|
||||||
</component>
|
|
|
@ -1,10 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<project version="4">
|
|
||||||
<component name="Black">
|
|
||||||
<option name="sdkName" value="Python 3.12" />
|
|
||||||
</component>
|
|
||||||
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.12" project-jdk-type="Python SDK" />
|
|
||||||
<component name="PyCharmProfessionalAdvertiser">
|
|
||||||
<option name="shown" value="true" />
|
|
||||||
</component>
|
|
||||||
</project>
|
|
|
@ -1,8 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<project version="4">
|
|
||||||
<component name="ProjectModuleManager">
|
|
||||||
<modules>
|
|
||||||
<module fileurl="file://$PROJECT_DIR$/.idea/py.iml" filepath="$PROJECT_DIR$/.idea/py.iml" />
|
|
||||||
</modules>
|
|
||||||
</component>
|
|
||||||
</project>
|
|
|
@ -1,13 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<module type="PYTHON_MODULE" version="4">
|
|
||||||
<component name="NewModuleRootManager">
|
|
||||||
<content url="file://$MODULE_DIR$">
|
|
||||||
<excludeFolder url="file://$MODULE_DIR$/venv" />
|
|
||||||
</content>
|
|
||||||
<orderEntry type="inheritedJdk" />
|
|
||||||
<orderEntry type="sourceFolder" forTests="false" />
|
|
||||||
</component>
|
|
||||||
<component name="PackageRequirementsSettings">
|
|
||||||
<option name="versionSpecifier" value="Greater or equal (>=x.y.z)" />
|
|
||||||
</component>
|
|
||||||
</module>
|
|
|
@ -1,6 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<project version="4">
|
|
||||||
<component name="VcsDirectoryMappings">
|
|
||||||
<mapping directory="$PROJECT_DIR$/.." vcs="Git" />
|
|
||||||
</component>
|
|
||||||
</project>
|
|
BIN
py/__pycache__/ai.cpython-312.pyc
Normal file
BIN
py/__pycache__/ai.cpython-312.pyc
Normal file
Binary file not shown.
36
py/ai.py
Normal file
36
py/ai.py
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
from mistralai import Mistral
|
||||||
|
import ollama
|
||||||
|
|
||||||
|
|
||||||
|
class AI:
|
||||||
|
@staticmethod
|
||||||
|
def process_local(model, messages, return_class, access_token):
|
||||||
|
stream = ollama.chat(
|
||||||
|
model=model,
|
||||||
|
messages=messages,
|
||||||
|
stream=True,
|
||||||
|
options={"temperature": 0.8},
|
||||||
|
)
|
||||||
|
|
||||||
|
return_class.ai_response[access_token] = ""
|
||||||
|
|
||||||
|
for chunk in stream:
|
||||||
|
print(chunk['message']['content'])
|
||||||
|
return_class.ai_response[access_token] += chunk['message']['content']
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def process_mistralai(model, messages, return_class, access_token):
|
||||||
|
with open("api_key.txt", 'r') as f:
|
||||||
|
api_key = f.read().strip()
|
||||||
|
|
||||||
|
client = Mistral(api_key=api_key)
|
||||||
|
|
||||||
|
stream_response = client.chat.stream(
|
||||||
|
model=model,
|
||||||
|
messages=messages
|
||||||
|
)
|
||||||
|
|
||||||
|
return_class.ai_response[access_token] = ""
|
||||||
|
|
||||||
|
for chunk in stream_response:
|
||||||
|
return_class.ai_response[access_token] += chunk.data.choices[0].delta.content
|
18
py/api.py
18
py/api.py
|
@ -1,22 +1,7 @@
|
||||||
from flask import Flask, request, jsonify
|
from flask import Flask, request, jsonify
|
||||||
from flask_cors import CORS
|
from flask_cors import CORS
|
||||||
import ollama
|
|
||||||
import secrets
|
import secrets
|
||||||
|
from ai import AI
|
||||||
|
|
||||||
class AI:
|
|
||||||
@staticmethod
|
|
||||||
def process_local(model, messages, return_class, access_token):
|
|
||||||
stream = ollama.chat(
|
|
||||||
model=model,
|
|
||||||
messages=messages,
|
|
||||||
stream=True,
|
|
||||||
options={"temperature": 0},
|
|
||||||
)
|
|
||||||
|
|
||||||
for chunk in stream:
|
|
||||||
print(chunk['message']['content'])
|
|
||||||
return_class.ai_response[access_token] += chunk['message']['content']
|
|
||||||
|
|
||||||
|
|
||||||
class API:
|
class API:
|
||||||
|
@ -58,4 +43,3 @@ class API:
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
api = API()
|
api = API()
|
||||||
api.run()
|
api.run()
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
flask
|
flask
|
||||||
flask-cors
|
flask-cors
|
||||||
ollama
|
ollama
|
||||||
|
mistralai
|
Loading…
Reference in a new issue