forked from React-Group/interstellar_ai
uh oh
This commit is contained in:
parent
908272edcf
commit
cb8d545764
2 changed files with 20 additions and 20 deletions
|
@ -10,30 +10,30 @@ onmessage = function (e) {
|
|||
};
|
||||
switch (functionName) {
|
||||
case "getAccess":
|
||||
axios.get('https://localhost:5000/interstellar/api/ai_create')
|
||||
axios.get('https://127.0.0.1:5000/interstellar/api/ai_create')
|
||||
.then(Response => {
|
||||
postMessage(Response.data.access_token)
|
||||
}).catch(error => {
|
||||
console.error("Error with GET Token request:", error)
|
||||
})
|
||||
postMessage(Response.data.access_token)
|
||||
}).catch(error => {
|
||||
console.error("Error with GET Token request:", error)
|
||||
})
|
||||
break
|
||||
case "postRequest":
|
||||
axios.post('https://localhost:5000/interstellar/api/ai_send', data)
|
||||
axios.post('https://127.0.0.1:5000/interstellar/api/ai_send', data)
|
||||
.then(Response => {
|
||||
postMessage(Response.data)
|
||||
}).catch(error => {
|
||||
console.error("Error:", error)
|
||||
console.error("Error:", error)
|
||||
})
|
||||
break
|
||||
case "getResponse":
|
||||
axios.get('https://localhost:5000/interstellar/api/ai_get?access_token='+access_token)
|
||||
.then(Response => {
|
||||
postMessage(Response.data.response)
|
||||
}).catch(error => {
|
||||
console.error("Error with GET response request:", error)
|
||||
})
|
||||
axios.get('https://127.0.0.1:5000/interstellar/api/ai_get?access_token=' + access_token)
|
||||
.then(Response => {
|
||||
postMessage(Response.data.response)
|
||||
}).catch(error => {
|
||||
console.error("Error with GET response request:", error)
|
||||
})
|
||||
break
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
10
py/api.py
10
py/api.py
|
@ -6,11 +6,12 @@ import secrets
|
|||
|
||||
class AI:
|
||||
@staticmethod
|
||||
def process_local(model, message, system, return_class, access_token):
|
||||
def process_local(model, messages, return_class, access_token):
|
||||
stream = ollama.chat(
|
||||
model=model,
|
||||
messages=[{'role': 'user', 'content': message}, {'role': 'system', 'content': system}],
|
||||
messages=messages,
|
||||
stream=True,
|
||||
options={"temperature": 0},
|
||||
)
|
||||
|
||||
for chunk in stream:
|
||||
|
@ -35,13 +36,12 @@ class API:
|
|||
@self.app.route('/interstellar/api/ai_send', methods=['POST'])
|
||||
def send_ai():
|
||||
data = request.get_json()
|
||||
message = data.get('message')
|
||||
messages = data.get('messages')
|
||||
ai_model = data.get('ai_model')
|
||||
system_prompt = data.get('system_prompt')
|
||||
access_token = data.get('access_token')
|
||||
if access_token not in self.ai_response:
|
||||
return jsonify({'status': 401, 'error': 'Invalid access token'})
|
||||
self.ai.process_local(ai_model, message, system_prompt, self, access_token)
|
||||
self.ai.process_local(ai_model, messages, self, access_token)
|
||||
return jsonify({'status': 200})
|
||||
|
||||
@self.app.route('/interstellar/api/ai_get', methods=['GET'])
|
||||
|
|
Loading…
Reference in a new issue