main #28

Merged
sageTheDm merged 14 commits from React-Group/interstellar_ai:main into main 2024-09-30 12:53:09 +02:00
22 changed files with 344 additions and 205 deletions

View file

@ -2,7 +2,7 @@
import React, { use, useEffect, useRef, useState } from "react";
import ConversationFrontend from "../components/ConversationFrontend";
import InputFrontend from "../components/InputFrontend";
import VoiceSend from "./voice_backend"
import { sendToVoiceRecognition } from "./voice_backend"
import { AudioRecorder } from "./AudioRecorder";
import axios from "axios";
import { resolve } from "path";
@ -11,41 +11,54 @@ import { fetchFile, toBlobURL } from "@ffmpeg/util"
const InputOutputBackend: React.FC = () => {
// # variables
type Message = {
role: string
content: string
}
/* Variables for System-prompt */
const [preferredCurrency, setPreferredCurrency] = useState<string | null>("")
const [preferredLanguage, setPreferredLanguage] = useState<string | null>("")
const [timeFormat, setTimeFormat] = useState<string | null>("")
const [preferredMeasurement, setPreferredMeasurement] = useState<string | null>("")
const [timeZone, setTimeZone] = useState<string | null>("")
const [dateFormat, setDateFormat] = useState<string | null>("")
const [preferredCurrency, setPreferredCurrency] = useState<string | null>(null);
const [preferredLanguage, setPreferredLanguage] = useState<string | null>(null);
const [timeFormat, setTimeFormat] = useState<string | null>(null);
const [preferredMeasurement, setPreferredMeasurement] = useState<string | null>(null);
const [timeZone, setTimeZone] = useState<string | null>(null);
const [dateFormat, setDateFormat] = useState<string | null>(null);
const [messages, setMessages] = useState<Message[]>([]);
useEffect(() => {
setPreferredCurrency(localStorage.getItem("preferredCurrency"))
setPreferredLanguage(localStorage.getItem("preferredLanguage"))
setTimeFormat(localStorage.getItem("timeFormat"))
setPreferredMeasurement(localStorage.getItem("preferredMeasurement"))
setTimeZone(localStorage.getItem("timeZone"))
setDateFormat(localStorage.getItem("dateFormat"))
})
setPreferredCurrency(localStorage.getItem("preferredCurrency"));
setPreferredLanguage(localStorage.getItem("preferredLanguage"));
setTimeFormat(localStorage.getItem("timeFormat"));
setPreferredMeasurement(localStorage.getItem("preferredMeasurement"));
setTimeZone(localStorage.getItem("timeZone"));
setDateFormat(localStorage.getItem("dateFormat"));
}, []);
useEffect(() => {
if (preferredCurrency && preferredLanguage && timeFormat && dateFormat && preferredMeasurement && timeZone) {
setMessages([
{
role: "system",
content: `You are in the timezone: ${timeZone}.
You use the time format ${timeFormat}.
You use the date format ${dateFormat} for all references of dates.
You use the ${preferredMeasurement} system.
You use the currency ${preferredCurrency}.
You will only answer in the language (you will receive the country code) ${preferredLanguage}.
But in the case the user specifically states to answer in another language, do that. Speaking in
another language is not stating you should answer in that language.
Additionally, under no circumstances translate your answer into multiple languages.`,
},
{ role: "assistant", content: "Hello! How can I help you?" },
]);
}
}, [preferredCurrency, preferredLanguage, timeFormat, dateFormat, preferredMeasurement, timeZone]);
const [copyClicked, setCopyClicked] = useState(false)
const [accessToken, setAccessToken] = useState("")
const postWorkerRef = useRef<Worker | null>(null)
const getWorkerRef = useRef<Worker | null>(null)
const [messages, setMessages] = useState<Message[]>([{ role: "system",
content: `You are in the timezone: ${timeZone}.
You use the time format ${timeFormat}.
You use the date format ${dateFormat} for all references of dates.
You use the ${preferredMeasurement} system. You use the currency ${preferredCurrency}.
You will only answer in the language (you will receive the country code) ${preferredLanguage}.
But in the case the user specifically states to answer in an other language do that speaking in a
nother language is not stating you should answer in that language. Additionally do not translate your answer into multiple languages`
},{ role: "assistant", content: "Hello! How can I help you?" }])
const [liveMessage, setLiveMessage] = useState("")
const [inputMessage, setInputMessage] = useState<string>("")
const [inputDisabled, setInputDisabled] = useState(false)
@ -151,11 +164,6 @@ const InputOutputBackend: React.FC = () => {
});
};
useEffect(() => {
},[preferredCurrency, preferredLanguage, timeFormat, preferredMeasurement, timeZone, dateFormat])
const addMessage = (role: string, content: string) => {
setMessages(previous => [...previous, { role, content }])
}
@ -173,40 +181,46 @@ const InputOutputBackend: React.FC = () => {
}
}
const startRecording = async () => {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true })
const mediaRecorder = new MediaRecorder(stream)
mediaRecorderRef.current = mediaRecorder
const startRecording = async (): Promise<string> => {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
const mediaRecorder = new MediaRecorder(stream);
mediaRecorderRef.current = mediaRecorder;
audioChunks.current = []; // Initialize audio chunks
// Create a promise that resolves when the onstop event is done
const stopRecordingPromise = new Promise<string>((resolve) => {
mediaRecorder.ondataavailable = (event) => {
audioChunks.current.push(event.data)
}
audioChunks.current.push(event.data);
};
mediaRecorder.onstop = async () => {
const audioBlob = new Blob(audioChunks.current, { type: "audio/ogg" })
audioChunks.current = []
// console.log(audioBlob);
// const url = URL.createObjectURL(audioBlob)
// const audio = new Audio(url);
// audio.play().catch(error => console.error("Error playing audio:", error));
const audioBlob = new Blob(audioChunks.current, { type: "audio/ogg" });
audioChunks.current = [];
const remote = new VoiceSend()
remote.sendToVoiceRecognition(audioBlob)
}
const text_voice = await sendToVoiceRecognition(audioBlob);
console.log(text_voice);
resolve(text_voice); // Resolve the promise with the recognized text
};
});
mediaRecorder.start()
setIsRecording(true)
}
mediaRecorder.start();
setIsRecording(true);
// Wait for the recording to stop and get the recognized text
return stopRecordingPromise;
};
const stopRecording = () => {
mediaRecorderRef.current?.stop()
setIsRecording(false)
}
mediaRecorderRef.current?.stop();
setIsRecording(false);
};
const handleMicClick = () => {
const handleMicClick = async () => {
if (!isRecording) {
startRecording();
const recognizedText = await startRecording();
setInputMessage(recognizedText); // Set the recognized text after recording
console.log("Set!")
} else {
stopRecording();
}
@ -253,7 +267,7 @@ const InputOutputBackend: React.FC = () => {
return (
<div>
<>
<ConversationFrontend
messages={messages}
onResendClick={handleResendClick}
@ -268,7 +282,7 @@ const InputOutputBackend: React.FC = () => {
inputDisabled={inputDisabled}
isRecording={isRecording}
/>
</div>
</>
)
}

View file

@ -1,15 +1,13 @@
import axios from "axios";
class VoiceSend {
sendToVoiceRecognition(audio_data: Blob) {
export const sendToVoiceRecognition = (audio_data: Blob): Promise<string> => {
console.log("sending recording...");
const formdata = new FormData()
formdata.append("audio", audio_data)
const dataSend = { option:"offline", type:"basic",audio:audio_data }
axios.post("http://localhost:5000/interstellar_ai/api/voice_recognition", formdata)
const dataSend = { option: "offline", type: "basic", audio: audio_data }
return axios.post("http://localhost:5000/interstellar_ai/api/voice_recognition", formdata)
.then((response) => {
console.log(response.data)
return response.data.response
@ -17,10 +15,6 @@ class VoiceSend {
.catch(error => {
console.log("Error calling API:", error)
postMessage({ status: 500 })
return "Error"
})
}
}
export default VoiceSend;

View file

@ -4,11 +4,9 @@ import InputOutputBackend from '../backend/InputOutputHandler';
const AI: React.FC = () => {
return (
<div>
<div className="ai-container">
<InputOutputBackend />
</div>
</div>
);
};

View file

@ -17,13 +17,6 @@ const ConversationFrontend = React.forwardRef<HTMLDivElement, ConversationProps>
({ messages, onResendClick, onEditClick, onCopyClick, isClicked}, ref: ForwardedRef<HTMLDivElement>) => {
const endOfMessagesRef = useRef<HTMLDivElement>(null);
// Auto-scroll to the bottom of the conversation whenever a new message is added
useEffect(() => {
if (endOfMessagesRef.current) {
endOfMessagesRef.current.scrollIntoView({ behavior: 'smooth' });
}
}, [messages]); // Triggers the effect whenever the 'messages' array changes
useEffect(() => {
console.log(isClicked);

View file

@ -6,13 +6,17 @@ interface InputProps {
onSendClick: (message: string, override: boolean) => void;
onMicClick: () => void;
inputDisabled: boolean;
isRecording:boolean
isRecording: boolean
}
const InputFrontend = React.forwardRef<HTMLDivElement, InputProps>(
({ message, onSendClick, onMicClick, inputDisabled, isRecording}, ref: ForwardedRef<HTMLDivElement>) => {
({ message, onSendClick, onMicClick, inputDisabled, isRecording }, ref: ForwardedRef<HTMLDivElement>) => {
const [inputValue, setInputValue] = useState('');
useEffect(() => {
setInputValue(message);
}, [message]);
const handleInputChange = (e: React.ChangeEvent<HTMLInputElement>) => {
setInputValue(e.target.value);
};
@ -40,7 +44,7 @@ const InputFrontend = React.forwardRef<HTMLDivElement, InputProps>(
<button type="button" onClick={() => onSendClick(inputValue, false)} disabled={inputDisabled ? true : false}>
<img src="/img/send.svg" alt="send" />
</button>
<button className={`microphone-button ${isRecording ? "red": "var(--input-button-color)"}`} type="button" onClick={onMicClick}>
<button className={`microphone-button ${isRecording ? "red" : "var(--input-button-color)"}`} type="button" onClick={onMicClick}>
<img src="/img/microphone.svg" alt="microphone" />
</button>
</div>

View file

@ -1,7 +1,143 @@
"use client";
import React, { useState, useEffect } from 'react';
// Define the available model options
// Define all models that should be available.
const modelList = {
'Offline Fast': {
'model_type': 'local',
'Math': 'qwen2-math:1.5b',
'Code': 'starcoder2',
'Language': 'llama3.2',
'Character': 'dolphin-phi',
'Finance': 'qwen2-math:1.5b',
'Weather': 'llama3.2',
'Time': 'llama3.2',
'Image': 'llava-phi3'
},
'Offline Slow': {
'model_type': 'local',
'Math': 'wizard-math',
'Code': 'starcoder2:7b',
'Language': 'llama3.1',
'Character': 'dolphin-llama3',
'Finance': 'wizard-math',
'Weather': 'llama3.1',
'Time': 'llama3.1',
'Image': 'llava'
},
'Offline Fast (FOSS)': {
'model_type': 'local',
'Math': 'qwen2-math:1.5b',
'Code': 'qwen2.5-coder:1.5b',
'Language': 'phi3.5',
'Character': 'dolphin-mistral',
'Finance': 'qwen2-math:1.5b',
'Weather': 'phi3.5',
'Time': 'phi3.5',
'Image': 'llava'
},
'Offline Slow (FOSS)': {
'model_type': 'local',
'Math': 'mathstral',
'Code': 'qwen2.5-coder',
'Language': 'qwen2.5',
'Character': 'dolphin-mistral',
'Finance': 'mathstral',
'Weather': 'qwen2.5',
'Time': 'qwen2.5',
'Image': 'llava'
},
'Online Cheap (OpenAI)': {
'model_type': 'openai',
'Math': 'gpt-4o-mini',
'Code': 'gpt-4o-mini',
'Language': 'gpt-4o-mini',
'Character': 'gpt-4o-mini',
'Finance': 'gpt-4o-mini',
'Weather': 'gpt-4o-mini',
'Time': 'gpt-4o-mini',
'Image': 'gpt-4o-mini'
},
'Online Expensive (OpenAI)': {
'model_type': 'openai',
'Math': 'gpt-4o',
'Code': 'gpt-4o',
'Language': 'gpt-4o',
'Character': 'gpt-4o',
'Finance': 'gpt-4o',
'Weather': 'gpt-4o',
'Time': 'gpt-4o',
'Image': 'gpt-4o'
},
'Online Cheap (Anthropic)': {
'model_type': 'anthropic',
'Math': 'claude-3-haiku',
'Code': 'claude-3-haiku',
'Language': 'claude-3-haiku',
'Character': 'claude-3-haiku',
'Finance': 'claude-3-haiku',
'Weather': 'claude-3-haiku',
'Time': 'claude-3-haiku',
'Image': 'claude-3-haiku'
},
'Online Expensive (Anthropic)': {
'model_type': 'anthropic',
'Math': 'claude-3-5-sonnet',
'Code': 'claude-3-5-sonnet',
'Language': 'claude-3-5-sonnet',
'Character': 'claude-3-5-sonnet',
'Finance': 'claude-3-5-sonnet',
'Weather': 'claude-3-5-sonnet',
'Time': 'claude-3-5-sonnet',
'Image': 'claude-3-5-sonnet'
},
'Online Cheap (Google)': {
'model_type': 'google',
'Math': 'gemini-1.5-flash-latest',
'Code': 'gemini-1.5-flash-latest',
'Language': 'gemini-1.5-flash-latest',
'Character': 'gemini-1.5-flash-latest',
'Finance': 'gemini-1.5-flash-latest',
'Weather': 'gemini-1.5-flash-latest',
'Time': 'gemini-1.5-flash-latest',
'Image': 'gemini-1.5-flash-latest'
},
'Online Expensive (Google)': {
'model_type': 'google',
'Math': 'gemini-1.5-pro-latest',
'Code': 'gemini-1.5-pro-latest',
'Language': 'gemini-1.5-pro-latest',
'Character': 'gemini-1.5-pro-latest',
'Finance': 'gemini-1.5-pro-latest',
'Weather': 'gemini-1.5-pro-latest',
'Time': 'gemini-1.5-pro-latest',
'Image': 'gemini-1.5-pro-latest'
},
'Online (La Plateforme)': {
'model_type': 'mistral',
'Math': 'open-mistral-nemo',
'Code': 'codestral-latest',
'Language': 'mistral-small-latest',
'Character': 'mistral-large-latest',
'Finance': 'open-mistral-nemo',
'Weather': 'mistral-small-latest',
'Time': 'mistral-small-latest',
'Image': 'pixtral-12b-2409'
},
'Online (FOSS) (La Plateforme)': {
'model_type': 'mistral',
'Math': 'open-mistral-nemo',
'Code': 'open-codestral-mamba',
'Language': 'open-mistral-nemo',
'Character': 'open-mixtral-8x22b',
'Finance': 'open-mixtral-8x22b',
'Weather': 'open-mistral-nemo',
'Time': 'open-mistral-nemo',
'Image': 'pixtral-12b-2409'
}
}
// Define the available category options
const modelDropdown = {
offlineWithoutFoss: ['Offline Fast', 'Offline Slow'],
offlineFoss: ['Offline Fast (FOSS)', 'Offline Slow (FOSS)'],
@ -12,6 +148,7 @@ const modelDropdown = {
'Online Expensive (Anthropic)',
'Online Cheap (Google)',
'Online Expensive (Google)',
'Online (La Plateforme)'
],
onlineFoss: ['Online (FOSS) (La Plateforme)'],
};
@ -98,6 +235,13 @@ const Models: React.FC = () => {
const isOfflineModel = (model: string) =>
modelDropdown.offlineWithoutFoss.includes(model) || modelDropdown.offlineFoss.includes(model);
const modelClicked = (model: string) => {
const category = selectedModel as keyof typeof modelList;
console.log(model)
console.log(category)
console.log(modelList[category][model as keyof typeof modelList[typeof category]]);
}
return (
<div className="model-background">
<div className="models">
@ -122,7 +266,7 @@ const Models: React.FC = () => {
<div className="grid">
{['Code', 'Math', 'Language', 'Character', 'Finance', 'Weather', 'Time', 'Image', 'Custom1', 'Custom2'].map(
(category) => (
<button key={category} className={`${category.toLowerCase()}-model model-box`}>
<button key={category} className={`${category.toLowerCase()}-model model-box`} onClick={() => modelClicked(category)}>
<div className="overlay">
<h3>{category}</h3>
{isOfflineModel(selectedModel) && <img src="/img/nowifi.svg" alt="No Wi-Fi" />}

View file

@ -5,6 +5,9 @@ export const metadata = {
description: 'A little AI chat that is able to assist you in little tasks',
};
export default function RootLayout({ children }: { children: ReactNode }) {
return (
<html lang="en">

View file

@ -15,31 +15,6 @@ const LandingPage: React.FC = () => {
const [view, setView] = useState<'AI' | 'FAQ' | 'Documentation' | 'Credits'>('AI'); // Added 'Credits' here
const conversationRef = useRef<HTMLDivElement>(null);
useEffect(() => {
const scrollToBottom = () => {
const conversation = conversationRef.current;
if (conversation) {
conversation.scrollTop = conversation.scrollHeight;
}
};
scrollToBottom();
const observer = new MutationObserver(scrollToBottom);
if (conversationRef.current) {
observer.observe(conversationRef.current, {
childList: true,
subtree: true,
});
}
return () => {
if (conversationRef.current) {
observer.disconnect();
}
};
}, []);
const toggleDivs = () => {
setShowDivs(prevState => !prevState);
};

View file

@ -9,7 +9,6 @@
display: flex;
justify-content: center;
align-items: center;
z-index: 10000;
}
.settings-main h2 {
@ -104,14 +103,14 @@
padding: 5px 10px;
cursor: pointer;
position: absolute; /* Position the button absolutely */
top: 10px; /* Distance from the top */
right: 10px; /* Distance from the right */
top: 15px; /* Distance from the top */
right: 40px; /* Distance from the right */
transition: background 0.3s;
}
/* Close button positioning */
.apply {
background: var(--close-button-color); /* Use variable for close button color */
background: var(--apply-button-color); /* Use variable for close button color */
color: white; /* Use white for text color */
border: none;
border-radius: 5px;
@ -119,7 +118,7 @@
cursor: pointer;
position: absolute; /* Position the button absolutely */
top: 50px; /* Distance from the top */
right: 10px; /* Distance from the right */
right: 40px; /* Distance from the right */
transition: background 0.3s;
}

View file

@ -1,19 +1,19 @@
/* container.css */
.container,
.content {
.container{
display: flex;
width: 100vw;
padding-top: 0.025vh;
height: 100vh;
padding-top: 12vh;
}
.left-panel {
margin-top: 5em;
width: 25vw; /* Adjust as needed */
transition: width 0.3s ease, opacity 0.3s ease, visibility 0.3s ease; /* Smooth transitions for all properties */
background-color: var(--left-panel-background-color); /* Use variable for background color */
border-radius: 0 1em 0 0;
margin-left: 0;
padding-right: 1em;
height: 100%;
}
.left-panel.hidden {
@ -23,12 +23,11 @@
}
.conversation-container {
margin-top: 5em;
flex: 1;
transition: margin-left 0.3s ease; /* Smooth margin adjustment */
background-color: var(--conversation-background-color); /* Use variable for background color */
border-radius: 1em 0 0 0;
height: min-content;
height: 100%;
}
/* Adjust margin-left when panel is shown or hidden */

View file

@ -1,7 +1,7 @@
html,
body {
height: 100vh;
/* overflow: hidden; */
overflow: hidden;
position: relative;
}

View file

@ -7,11 +7,14 @@ header{
height: 10vh;
display: flex;
align-items: center;
justify-content: center;
font-size: 1em;
z-index: 999;
}
.hamburger{
position: absolute;
left: 5vw;
display: none;
flex-direction: column;
cursor: pointer;
@ -39,9 +42,10 @@ header{
.nav-links{
position: absolute;
left: 1vw;
display: flex;
gap: 1em;
width: 35vw;
gap: 0.5vw;
width:max-content;
height: 100%;
align-items: center;
}
@ -52,7 +56,8 @@ header{
font-size: 0.9em;
height: 50%;
border-radius: 5px;
padding: 1px 15px;
padding: 2px 15px;
font-family: var(--font-family);
}
.nav-btn:hover{
@ -60,7 +65,6 @@ header{
}
.header-logo{
margin:auto;
width: 250px;
height: 5vh;
background-image: url(../../public/img/logo.png);
@ -72,16 +76,19 @@ header{
}
.login-button-container{
position: absolute;
top: 0.2vh;
right: 1vw;
height: 100%;
display: flex;
align-items: center;
}
.header-login-button{
font-size: 2vh;
position: absolute;
font-size: var(--font-size);
/* position: absolute;
top: 1.5vh;
right: 1vw;
right: 1vw; */
padding: 10px 20px;
background-color: var(--input-button-color);
color: var(--text-color);
@ -89,6 +96,7 @@ header{
border-radius: 5px;
cursor: pointer;
transition: background-color 0.3s;
font-family: var(--font-family);
}
.header-login-button:hover {

View file

@ -1,13 +1,13 @@
.history-background {
grid-column: 1/2;
grid-row: 1/2;
height: 40vh;
height: 45%;
overflow: hidden;
background-color: var(--history-background-color);
padding: 1em;
margin: 1em;
margin-right: 0;
border-radius: 2em;
border-radius: 1em;
}
.history {
@ -37,3 +37,7 @@
.history ul li a:hover {
background-color: var(--input-button-hover-color);
}
.history-models{
height: 100%;
}

View file

@ -2,22 +2,24 @@
.input {
grid-column: 2/3;
grid-row: 4/5;
border-radius: 20px;
border-radius: 8px;
background-color: var(--input-background-color);
padding: 1em;
margin: 1em;
padding-left: 0.5em;
padding-right: 0;
margin: 0 10px;
margin-bottom: 10px;
display: flex;
justify-content: space-between;
align-items: center;
height: auto;
gap: 10px;
height: 10vh;
}
.input input {
flex-grow: 1;
padding: 5px;
font-size: 1.2em;
font-size: 1em;
border-radius: 8px;
border: 2px solid var(--input-button-color);
outline: none;
@ -33,16 +35,17 @@
}
.input button {
padding: 1em;
padding: 5px;
margin: 5px;
margin-left: 0;
background-color: var(--input-button-color);
color: var(--user-message-text-color); /* Use variable for button text color */
border: none;
border-radius: 50%;
border-radius: 8px;
font-size: 1.5em;
cursor: pointer;
height: 50px;
width: 50px;
width: 75px;
display: flex;
justify-content: center;
align-items: center;
@ -52,7 +55,7 @@
}
.input button img {
height: 1em;
height: 20px;
}
.input button:hover {

View file

@ -1,13 +1,13 @@
.model-background {
grid-column: 1/2;
grid-row: 1/2;
height: 45vh;
height: 45%;
overflow: hidden;
background-color: var(--history-background-color);
padding: 1em;
margin: 1em;
margin: 0 1em;
margin-right: 0;
border-radius: 2em;
border-radius: 1em;
}
.models {
@ -17,12 +17,12 @@
overflow-y: scroll;
}
.models .titel {
padding-bottom: 1em;
.models .title {
display: flex;
justify-content: center;
align-items: center;
font-size: 0.7em;
font-size: 1.5em;
margin-bottom: 0;
}
.model-dropdown {
@ -56,7 +56,6 @@
}
.overlay {
z-index: 900;
position: absolute;
left: 0;
width: 100%;

View file

@ -2,24 +2,21 @@
.output {
grid-column: 2;
grid-row: 1 / 4;
border-radius: 2em;
background-color: var(--output-background-color);
padding: 1em;
margin: 1em;
margin-top: 0;
display: flex;
flex-direction: column;
justify-content: flex-start;
font-size: 1em;
overflow-y: auto;
width: calc(100% - 2em); /* Corrected calculation for width */
height: 75vh;
height: 70vh;
}
#conversation {
display: flex;
flex-direction: column;
padding: 10px;
padding-left: 10px;
overflow-y: auto;
max-height: 80vh;
background-color: var(--output-background-color);
@ -51,16 +48,17 @@
/* Button Container */
.button-container {
display: flex;
padding: 10px 0;
}
.button-container button {
background: none;
border: none;
cursor: pointer;
border-radius: 50%;
border-radius: 100%;
padding: 10px;
transition: background-color 0.3s ease;
height: 40px;
width: 40px;
}
.button-container button:hover {
@ -68,7 +66,8 @@
}
.button-container img {
height: 1.5em;
height: 20px;
width: 20px;
}
#copiedText{

View file

@ -27,13 +27,6 @@
padding: 7em 0 0 0 ;
}
header li {
display: flex;
flex-direction: column;
justify-content: center;
margin: 0;
}
/* Left panel styles */
.left-panel {
display: hidden; /* Initially hidden */
@ -43,6 +36,7 @@
.left-panel.visible {
display: block;
height: min-content;
}
/* Conversation container styles */
@ -104,10 +98,9 @@
color: var(--user-message-text-color); /* Use variable for button text color */
}
.header-login-button{
position: absolute;
top: 3.5vh;
right: 5vw;
.header-logo{
position: relative;
margin-left: -40px;
}
.hamburger.open{
@ -130,17 +123,23 @@
.nav-links.active{
display: flex;
height: fit-content;
}
.nav-btn{
width: 100%;
text-align: left;
text-align: center;
padding: 10px;
height: 50px;
}
.hamburger {
display: flex;
}
.header-login-button{
right: 5vh;
}
}
/* Responsive adjustments for the settings */

View file

@ -25,3 +25,9 @@
margin-right: auto;
text-align: left;
}
.ai-container{
height: min-content;
bottom: 0;
width: 100%;
}

View file

@ -20,6 +20,8 @@
--doc-background-color: #ffffff; /* Background color for documents */
--close-button-color: red;
--close-button-hover-color: #9e0101; /*NEW*/
--apply-button-color:#8B9635;
--apply-button-hover-color:#6b7c2b;
--burger-menu-background-color: #79832e; /*NEW*/
--overlay-text-color:white; /*NEW*/

View file

@ -80,11 +80,6 @@ class AI:
message = messages[-1]['content']
messages.pop()
system = None
if messages and messages[0]['role'] == 'system':
system = messages[0]['content']
messages.pop(0)
for msg in messages:
msg['parts'] = msg.pop('content')
@ -97,8 +92,8 @@ class AI:
model = genai.GenerativeModel(model)
chat = model.start_chat(
system_instruction=system,
history=messages
history=messages,
)
response = chat.send_message(message, stream=True)

View file

@ -70,6 +70,13 @@ class API:
thread.start()
thread.join()
return jsonify({'status': 200})
elif model_type == "google":
api_key = data.get('api_key')
thread = threading.Thread(target=self.ai.process_google,
args=(ai_model, messages, self, access_token, api_key))
thread.start()
thread.join()
return jsonify({'status': 200})
return jsonify({'status': 401, 'error': 'Invalid AI model type'})

View file

@ -8,24 +8,18 @@ class VoiceRecognition:
def recognition(audio):
audio_buffer = io.BytesIO(audio.read())
try:
audio_segment = AudioSegment.from_file(audio_buffer, format="ogg")
wav_io = io.BytesIO()
audio_segment.export(wav_io, format="wav")
wav_io.seek(0)
except:
print("audio to wav failed")
model_size = "base"
model = WhisperModel(model_size, device="cpu", compute_type=" ")
model = WhisperModel(model_size, device="cpu", compute_type="int8")
segments, _ = model.transcribe(wav_io)
transcription = ""
for segment in segments:
transcription += segment.text + " "
result = transcription.strip()
print(result)
return result
# npm install @ffmpeg/ffmpeg @ffmpeg/util @ffmpeg/types @ffmpeg/core-mt