Merge pull request 'main' (#9) from React-Group/interstellar_ai:main into main

Reviewed-on: https://interstellardevelopment.org/code/code/YasinOnm08/interstellar_ai/pulls/9
This commit is contained in:
YasinOnm08 2024-09-19 16:53:04 +02:00
commit ce635d3089
4 changed files with 102 additions and 60 deletions

View file

@ -14,7 +14,7 @@ const Header: React.FC<HeaderProps> = ({ onViewChange, showDivs, toggleDivs, sho
<header> <header>
<ul> <ul>
<li> <li>
<button onClick={() => onViewChange('AI')} className="header-button"> <button onClick={() => onViewChange('AI')} className="header-button header-logo">
<img src="/img/logo.png" alt="logo" className="header-logo" /> <img src="/img/logo.png" alt="logo" className="header-logo" />
</button> </button>
</li> </li>

View file

@ -10,30 +10,30 @@ onmessage = function (e) {
}; };
switch (functionName) { switch (functionName) {
case "getAccess": case "getAccess":
axios.get('https://localhost:5000/interstellar/api/ai_create') axios.get('https://127.0.0.1:5000/interstellar/api/ai_create')
.then(Response => { .then(Response => {
postMessage(Response.data.access_token) postMessage(Response.data.access_token)
}).catch(error => { }).catch(error => {
console.error("Error with GET Token request:", error) console.error("Error with GET Token request:", error)
}) })
break break
case "postRequest": case "postRequest":
axios.post('https://localhost:5000/interstellar/api/ai_send', data) axios.post('https://127.0.0.1:5000/interstellar/api/ai_send', data)
.then(Response => { .then(Response => {
postMessage(Response.data) postMessage(Response.data)
}).catch(error => { }).catch(error => {
console.error("Error:", error) console.error("Error:", error)
}) })
break break
case "getResponse": case "getResponse":
axios.get('https://localhost:5000/interstellar/api/ai_get?access_token='+access_token) axios.get('https://127.0.0.1:5000/interstellar/api/ai_get?access_token=' + access_token)
.then(Response => { .then(Response => {
postMessage(Response.data.response) postMessage(Response.data.response)
}).catch(error => { }).catch(error => {
console.error("Error with GET response request:", error) console.error("Error with GET response request:", error)
}) })
break break
} }
} }

View file

@ -1,51 +1,93 @@
/* Responsive behavior - applies only on smaller screens */ /* Responsive behavior - applies only on smaller screens */
@media (max-width: 1200px) { @media (max-width: 1200px) {
*{
margin: 0;
padding: 0;
}
/* Header styles */
header {
flex-direction: column;
align-items: center;
position: relative;
top: 0;
left: 0;
margin-top: 5px;
padding-top: 0;
width: 100%;
}
/* Container styles */
.container {
display: flex;
flex-direction: column;
align-items: center;
width: 100vw;
overflow: hidden;
margin: 0;
padding: 0;
}
header li{ header li{
display: flex;
flex-direction: column;
justify-content: center;
margin: 0;
}
header li button {
margin: 2px;
margin-bottom: 0;
}
header li img {
height: 1.5em;
vertical-align: middle;
margin-top: 10px;
}
/* Left panel styles */
.left-panel {
display: none; /* Initially hidden */
min-width: 90%; /* Takes full width when visible */
margin: 0;
}
.left-panel.visible {
display: block; display: block;
} }
.container { /* Conversation container styles */
display: flex; .conversation-container {
justify-content: center; /* Centers horizontally */ width: 90%;
width: 90vw; height: 90%;
margin-top: 10vh;
}
.left-panel {
display: none; /* Initially hidden */
min-width: 90%; /* Takes full width when shown */
margin: 0;
}
.conversation-container {
width: 90%;
} }
.left-panel.visible { .conversation-container.collapsed {
display: block; width: 0;
padding: 0;
border: none;
overflow: hidden;
} }
.conversation-container.collapsed { .conversation-container.expanded {
display: none; width: 100%;
} }
.conversation-container.expanded { /* Grid styles */
width: 100%; .grid {
}
.grid {
grid-template-columns: 1fr; /* One item per line */ grid-template-columns: 1fr; /* One item per line */
} }
.model-box { /* Model box styles */
max-width: none; /* Remove max-width constraint */ .model-box {
margin: 0 auto; /* Center each model-box */ max-width: none; /* Remove max-width */
margin: 0 auto; /* Center each model-box */
} }
.input { /* Input styles */
.input {
grid-column: 1 / -1; grid-column: 1 / -1;
flex-direction: row;
gap: 10px; gap: 10px;
padding: 0.5em; padding: 0.5em;
margin: 0 auto; margin: 0 auto;
@ -53,16 +95,16 @@
width: 90%; width: 90%;
} }
.input input { .input input {
font-size: 1em; /* Adjust font size */ font-size: 1em; /* Adjust font size */
max-width: 65%; max-width: 65%;
margin-right: 0; margin-right: 0;
} }
.input button { .input button {
height: 40px; /* Adjust button height */ height: 40px; /* Adjust button height */
width: 40px; /* Adjust button width */ width: 40px; /* Adjust button width */
font-size: 1.2em; /* Adjust button font size */ font-size: 1.2em; /* Adjust button font size */
margin: auto; margin: auto;
} }
} }

View file

@ -6,11 +6,12 @@ import secrets
class AI: class AI:
@staticmethod @staticmethod
def process_local(model, message, system, return_class, access_token): def process_local(model, messages, return_class, access_token):
stream = ollama.chat( stream = ollama.chat(
model=model, model=model,
messages=[{'role': 'user', 'content': message}, {'role': 'system', 'content': system}], messages=messages,
stream=True, stream=True,
options={"temperature": 0},
) )
for chunk in stream: for chunk in stream:
@ -35,13 +36,12 @@ class API:
@self.app.route('/interstellar/api/ai_send', methods=['POST']) @self.app.route('/interstellar/api/ai_send', methods=['POST'])
def send_ai(): def send_ai():
data = request.get_json() data = request.get_json()
message = data.get('message') messages = data.get('messages')
ai_model = data.get('ai_model') ai_model = data.get('ai_model')
system_prompt = data.get('system_prompt')
access_token = data.get('access_token') access_token = data.get('access_token')
if access_token not in self.ai_response: if access_token not in self.ai_response:
return jsonify({'status': 401, 'error': 'Invalid access token'}) return jsonify({'status': 401, 'error': 'Invalid access token'})
self.ai.process_local(ai_model, message, system_prompt, self, access_token) self.ai.process_local(ai_model, messages, self, access_token)
return jsonify({'status': 200}) return jsonify({'status': 200})
@self.app.route('/interstellar/api/ai_get', methods=['GET']) @self.app.route('/interstellar/api/ai_get', methods=['GET'])