uh oh
This commit is contained in:
parent
908272edcf
commit
cb8d545764
2 changed files with 20 additions and 20 deletions
10
py/api.py
10
py/api.py
|
@ -6,11 +6,12 @@ import secrets
|
|||
|
||||
class AI:
|
||||
@staticmethod
|
||||
def process_local(model, message, system, return_class, access_token):
|
||||
def process_local(model, messages, return_class, access_token):
|
||||
stream = ollama.chat(
|
||||
model=model,
|
||||
messages=[{'role': 'user', 'content': message}, {'role': 'system', 'content': system}],
|
||||
messages=messages,
|
||||
stream=True,
|
||||
options={"temperature": 0},
|
||||
)
|
||||
|
||||
for chunk in stream:
|
||||
|
@ -35,13 +36,12 @@ class API:
|
|||
@self.app.route('/interstellar/api/ai_send', methods=['POST'])
|
||||
def send_ai():
|
||||
data = request.get_json()
|
||||
message = data.get('message')
|
||||
messages = data.get('messages')
|
||||
ai_model = data.get('ai_model')
|
||||
system_prompt = data.get('system_prompt')
|
||||
access_token = data.get('access_token')
|
||||
if access_token not in self.ai_response:
|
||||
return jsonify({'status': 401, 'error': 'Invalid access token'})
|
||||
self.ai.process_local(ai_model, message, system_prompt, self, access_token)
|
||||
self.ai.process_local(ai_model, messages, self, access_token)
|
||||
return jsonify({'status': 200})
|
||||
|
||||
@self.app.route('/interstellar/api/ai_get', methods=['GET'])
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue