forked from React-Group/interstellar_ai
Compare commits
4 commits
e50a5259ad
...
e73b49c4e4
Author | SHA1 | Date | |
---|---|---|---|
e73b49c4e4 | |||
|
fcc147f8ae | ||
|
89cc832a6b | ||
|
7b63b35d6b |
10 changed files with 106 additions and 1 deletions
4
.gitignore
vendored
4
.gitignore
vendored
|
@ -35,4 +35,6 @@ yarn-error.log*
|
|||
*.tsbuildinfo
|
||||
next-env.d.ts
|
||||
|
||||
api_key.txt
|
||||
|
||||
.idea/
|
||||
venv/
|
||||
|
|
3
py/.idea/.gitignore
vendored
Normal file
3
py/.idea/.gitignore
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
# Default ignored files
|
||||
/shelf/
|
||||
/workspace.xml
|
6
py/.idea/inspectionProfiles/profiles_settings.xml
Normal file
6
py/.idea/inspectionProfiles/profiles_settings.xml
Normal file
|
@ -0,0 +1,6 @@
|
|||
<component name="InspectionProjectProfileManager">
|
||||
<settings>
|
||||
<option name="USE_PROJECT_PROFILE" value="false" />
|
||||
<version value="1.0" />
|
||||
</settings>
|
||||
</component>
|
10
py/.idea/misc.xml
Normal file
10
py/.idea/misc.xml
Normal file
|
@ -0,0 +1,10 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="Black">
|
||||
<option name="sdkName" value="Python 3.12" />
|
||||
</component>
|
||||
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.12" project-jdk-type="Python SDK" />
|
||||
<component name="PyCharmProfessionalAdvertiser">
|
||||
<option name="shown" value="true" />
|
||||
</component>
|
||||
</project>
|
8
py/.idea/modules.xml
Normal file
8
py/.idea/modules.xml
Normal file
|
@ -0,0 +1,8 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ProjectModuleManager">
|
||||
<modules>
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/py.iml" filepath="$PROJECT_DIR$/.idea/py.iml" />
|
||||
</modules>
|
||||
</component>
|
||||
</project>
|
13
py/.idea/py.iml
Normal file
13
py/.idea/py.iml
Normal file
|
@ -0,0 +1,13 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="PYTHON_MODULE" version="4">
|
||||
<component name="NewModuleRootManager">
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<excludeFolder url="file://$MODULE_DIR$/venv" />
|
||||
</content>
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
<component name="PackageRequirementsSettings">
|
||||
<option name="versionSpecifier" value="Greater or equal (>=x.y.z)" />
|
||||
</component>
|
||||
</module>
|
6
py/.idea/vcs.xml
Normal file
6
py/.idea/vcs.xml
Normal file
|
@ -0,0 +1,6 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="VcsDirectoryMappings">
|
||||
<mapping directory="$PROJECT_DIR$/.." vcs="Git" />
|
||||
</component>
|
||||
</project>
|
51
py/api.py
Normal file
51
py/api.py
Normal file
|
@ -0,0 +1,51 @@
|
|||
from flask import Flask, request, jsonify
|
||||
import ollama
|
||||
|
||||
|
||||
class AI:
|
||||
@staticmethod
|
||||
def process_local(model, message, system, return_class, access_token):
|
||||
stream = ollama.chat(
|
||||
model=model,
|
||||
messages=[{'role': 'user', 'content': message}, {'role': 'system', 'content': system}],
|
||||
stream=True,
|
||||
)
|
||||
|
||||
for chunk in stream:
|
||||
print(chunk['message']['content'])
|
||||
return_class.ai_response[access_token] += chunk['message']['content']
|
||||
|
||||
|
||||
class API:
|
||||
def __init__(self):
|
||||
self.app = Flask(__name__)
|
||||
self.ai_response = []
|
||||
self.ai = AI()
|
||||
|
||||
def run(self):
|
||||
@self.app.route('/interstellar/api/ai_create', methods=['GET'])
|
||||
def create_ai():
|
||||
self.ai_response.append("")
|
||||
return jsonify({'status': 200, 'access_token': len(self.ai_response) - 1})
|
||||
|
||||
@self.app.route('/interstellar/api/ai_send', methods=['POST'])
|
||||
def send_ai():
|
||||
data = request.get_json()
|
||||
message = data.get('message')
|
||||
ai_model = data.get('ai_model')
|
||||
system_prompt = data.get('system_prompt')
|
||||
access_token = data.get('access_token')
|
||||
self.ai.process_local(ai_model, message, system_prompt, self, access_token)
|
||||
return jsonify({'status': 200})
|
||||
|
||||
@self.app.route('/interstellar/api/ai_get', methods=['GET'])
|
||||
def get_ai():
|
||||
data = request.args.get('access_token')
|
||||
return jsonify({'status': 200, 'response': self.ai_response[int(data)]})
|
||||
|
||||
self.app.run(debug=True)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
api = API()
|
||||
api.run()
|
4
py/install.sh
Normal file
4
py/install.sh
Normal file
|
@ -0,0 +1,4 @@
|
|||
python -m venv venv
|
||||
source venv/bin/activate
|
||||
pip install -r requirements.txt
|
||||
deactivate
|
2
py/requirements.txt
Normal file
2
py/requirements.txt
Normal file
|
@ -0,0 +1,2 @@
|
|||
flask
|
||||
ollama
|
Loading…
Reference in a new issue