Compare commits

..

No commits in common. "ea3fd09ea2107c2636c3f1f87d685c8586ab5a84" and "95eadb5ee2ef0c08c2629952c323e18aee158c4c" have entirely different histories.

8 changed files with 93 additions and 111 deletions

View file

@ -0,0 +1,39 @@
// import React, { useState, useRef } from 'react'
// const AudioRecorder: React.FC = () => {
// const [isRecording, setIsRecording] = useState(false)
// const [audioURL, setAudioURL] = useState<string | null>(null)
// const medaRecorderRef = useRef<MediaRecorder | null>(null)
// const audioChunks = useRef<Blob[]>([])
// const startRecording = async () => {
// const stream = await navigator.mediaDevices.getUserMedia({ audio: true })
// const mediaRecorder = new MediaRecorder(stream)
// medaRecorderRef.current = mediaRecorder
// mediaRecorder.ondataavailable = (event) => {
// audioChunks.current.push(event.data)
// }
// mediaRecorder.onstop = () => {
// const audioBlob = new Blob(audioChunks.current, { type: "audio/wav" })
// const url = URL.createObjectURL(audioBlob)
// setAudioURL(url)
// audioChunks.current = []
// }
// mediaRecorder.start()
// setIsRecording(true)
// const stopRecording = () => {
// medaRecorderRef.current?.stop()
// setIsRecording(false)
// }
// return (
// <div></div>
// )
// }
// }
// export default AudioRecorder

View file

@ -1,34 +0,0 @@
import React, { useState, useRef } from 'react'
export const AudioRecorder= () => {
const [isRecording, setIsRecording] = useState(false)
const [audioURL, setAudioURL] = useState<string | null>(null)
const mediaRecorderRef = useRef<MediaRecorder | null>(null)
const audioChunks = useRef<Blob[]>([])
const startRecording = async () => {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true })
const mediaRecorder = new MediaRecorder(stream)
mediaRecorderRef.current = mediaRecorder
mediaRecorder.ondataavailable = (event) => {
audioChunks.current.push(event.data)
}
mediaRecorder.onstop = () => {
const audioBlob = new Blob(audioChunks.current, { type: "audio/wav" })
const url = URL.createObjectURL(audioBlob)
setAudioURL(url)
audioChunks.current = []
}
mediaRecorder.start()
setIsRecording(true)
}
const stopRecording = () => {
mediaRecorderRef.current?.stop()
setIsRecording(false)
}
}

View file

@ -3,8 +3,8 @@ import React, { useEffect, useRef, useState } from "react";
import ConversationFrontend from "../components/ConversationFrontend"; import ConversationFrontend from "../components/ConversationFrontend";
import InputFrontend from "../components/InputFrontend"; import InputFrontend from "../components/InputFrontend";
import VoiceSend from "./voice_backend" import VoiceSend from "./voice_backend"
import { AudioRecorder } from "./AudioRecorder";
import axios from "axios"; import axios from "axios";
import { skip } from "node:test";
const InputOutputBackend: React.FC = () => { const InputOutputBackend: React.FC = () => {
@ -20,10 +20,10 @@ const InputOutputBackend: React.FC = () => {
const [liveMessage, setLiveMessage] = useState("") const [liveMessage, setLiveMessage] = useState("")
const [inputMessage, setInputMessage] = useState<string>("") const [inputMessage, setInputMessage] = useState<string>("")
const [inputDisabled, setInputDisabled] = useState(false) const [inputDisabled, setInputDisabled] = useState(false)
const [isRecording, setIsRecording] = useState(false) const [lastMessage, setLastMessage] = useState<Message>({ role: "user", content: "Not supposed to happen." })
const [audioURL, setAudioURL] = useState<string | null>(null) const [isRecording, setIsRecording] = useState(false);
const mediaRecorderRef = useRef<MediaRecorder | null>(null) const mediaRecorderRef = useRef<MediaRecorder | null>(null);
const audioChunks = useRef<Blob[]>([]) const audioChunksRef = useRef<Blob[]>([]);
console.log(messages); console.log(messages);
@ -129,6 +129,7 @@ const InputOutputBackend: React.FC = () => {
} }
const handleSendClick = (inputValue: string, override: boolean) => { const handleSendClick = (inputValue: string, override: boolean) => {
if (inputValue != "") { if (inputValue != "") {
console.log(inputDisabled)
if (!inputDisabled || override) { if (!inputDisabled || override) {
setInputDisabled(true) setInputDisabled(true)
if (postWorkerRef.current) { if (postWorkerRef.current) {
@ -142,33 +143,37 @@ const InputOutputBackend: React.FC = () => {
} }
const startRecording = async () => { const startRecording = async () => {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true }) const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
const mediaRecorder = new MediaRecorder(stream) mediaRecorderRef.current = new MediaRecorder(stream);
mediaRecorderRef.current = mediaRecorder
mediaRecorder.ondataavailable = (event) => { mediaRecorderRef.current.ondataavailable = (event) => {
audioChunks.current.push(event.data) audioChunksRef.current.push(event.data);
} };
mediaRecorder.onstop = () => { mediaRecorderRef.current.onstop = () => {
const audioBlob = new Blob(audioChunks.current, { type: "audio/wav" }) const audioBlob = new Blob(audioChunksRef.current, { type: 'audio/wav' });
const url = URL.createObjectURL(audioBlob) audioChunksRef.current = []; // Clear the chunks for the next recording
console.log(url); // Call your existing function to send the audioBlob
setAudioURL(url) // Example: sendAudioToApi(audioBlob);
audioChunks.current = [] };
const remote = new VoiceSend()
remote.sendToVoiceRecognition(audioBlob,)
}
mediaRecorder.start() mediaRecorderRef.current.start();
setIsRecording(true) setIsRecording(true);
} // Automatically stop recording after 10 seconds
setTimeout(() => {
stopRecording();
}, 10000);
};
const stopRecording = () => { const stopRecording = () => {
mediaRecorderRef.current?.stop() if (mediaRecorderRef.current) {
setIsRecording(false) mediaRecorderRef.current.stop();
setIsRecording(false);
var remote = new VoiceSend()
remote.sendToVoiceRecognition(new Blob(audioChunksRef.current, { type: 'audio/wav' }), remote.voiceDataTemplate);
} }
};
const handleMicClick = () => { const handleMicClick = () => {
@ -219,7 +224,6 @@ const InputOutputBackend: React.FC = () => {
onSendClick={handleSendClick} onSendClick={handleSendClick}
onMicClick={handleMicClick} onMicClick={handleMicClick}
inputDisabled={inputDisabled} inputDisabled={inputDisabled}
isRecording={isRecording}
/> />
</div> </div>
) )

View file

@ -1,22 +1,20 @@
import axios from "axios"; import axios from "axios";
class VoiceSend { class VoiceSend {
sendToVoiceRecognition(audio_data: Blob) {
console.log("sending recording...");
console.log(typeof (audio_data));
console.log(audio_data instanceof Blob);
const formdata = new FormData() voiceDataTemplate = {
formdata.append("audio", audio_data) type: "basic",
formdata.append("option", "offline") audio_data: null,
formdata.append("type", "basic") option: "offline"
}
const dataSend = { option:"offline", type:"basic",audio:audio_data } sendToVoiceRecognition(audio_data: Blob, data: any) {
axios.post("http://localhost:5000/interstellar_ai/api/voice_recognition", formdata) var dataSend = data
.then((response) => { dataSend['audio_data'] = audio_data
console.log(response.data) axios.post("http://localhost:5000/interstellar_ai/api/voice_recognition", dataSend)
return response.data.response .then((response: any) => {
console.log(response['response'])
return response['response']
}) })
.catch(error => { .catch(error => {
console.log("Error calling API:", error) console.log("Error calling API:", error)

View file

@ -1,16 +1,14 @@
import React, { useState, ForwardedRef, useEffect } from 'react'; import React, { useState, ForwardedRef, useEffect } from 'react';
import "../styles/variables.css"
interface InputProps { interface InputProps {
message: string; message: string;
onSendClick: (message: string, override: boolean) => void; onSendClick: (message: string, override: boolean) => void;
onMicClick: () => void; onMicClick: () => void;
inputDisabled: boolean; inputDisabled: boolean
isRecording:boolean
} }
const InputFrontend = React.forwardRef<HTMLDivElement, InputProps>( const InputFrontend = React.forwardRef<HTMLDivElement, InputProps>(
({ message, onSendClick, onMicClick, inputDisabled, isRecording}, ref: ForwardedRef<HTMLDivElement>) => { ({ message, onSendClick, onMicClick, inputDisabled }, ref: ForwardedRef<HTMLDivElement>) => {
const [inputValue, setInputValue] = useState(''); const [inputValue, setInputValue] = useState('');
useEffect(() => { useEffect(() => {
@ -31,10 +29,6 @@ const InputFrontend = React.forwardRef<HTMLDivElement, InputProps>(
} }
}; };
const styles = {
}
return ( return (
<div className="input" id="inputForm" ref={ref}> <div className="input" id="inputForm" ref={ref}>
<input <input
@ -48,7 +42,7 @@ const InputFrontend = React.forwardRef<HTMLDivElement, InputProps>(
<button type="button" onClick={() => onSendClick(inputValue, false)} disabled={inputDisabled ? true : false}> <button type="button" onClick={() => onSendClick(inputValue, false)} disabled={inputDisabled ? true : false}>
<img src="/img/send.svg" alt="send" /> <img src="/img/send.svg" alt="send" />
</button> </button>
<button className={`microphone-button ${isRecording ? "red":"green"}`} type="button" onClick={onMicClick}> <button type="button" onClick={onMicClick}>
<img src="/img/microphone.svg" alt="microphone" /> <img src="/img/microphone.svg" alt="microphone" />
</button> </button>
</div> </div>

View file

@ -59,19 +59,3 @@
background-color: var(--input-button-hover-color); background-color: var(--input-button-hover-color);
box-shadow: 0 6px 15px rgba(0, 0, 0, 0.2); box-shadow: 0 6px 15px rgba(0, 0, 0, 0.2);
} }
.microphone-button.red{
background-color: var(--close-button-color);
}
.microphone-button.green{
background-color: var(--button-background-color);
}
.microphone-button.red:hover{
background-color: var(--close-button-hover-color);
}
.microphone-button.green:hover{
background-color: var(--input-button-hover-color);
}

View file

@ -19,7 +19,6 @@
--conversation-background-color: #79832e; /* Background color for conversation container */ --conversation-background-color: #79832e; /* Background color for conversation container */
--doc-background-color: #ffffff; /* Background color for documents */ --doc-background-color: #ffffff; /* Background color for documents */
--close-button-color: red; --close-button-color: red;
--close-button-hover-color: #9e0101; /*NEW*/
--burger-menu-background-color: #79832e; /*NEW*/ --burger-menu-background-color: #79832e; /*NEW*/
--overlay-text-color:white; /*NEW*/ --overlay-text-color:white; /*NEW*/

View file

@ -99,12 +99,10 @@ class API:
@self.app.route('/interstellar_ai/api/voice_recognition', methods=['POST']) @self.app.route('/interstellar_ai/api/voice_recognition', methods=['POST'])
def voice_recognition(): def voice_recognition():
print(request.args) type = request.args.get('type')
recog_type = request.form.get('type') audio = request.args.get('audio')
print(recog_type) option = request.args.get('option')
audio = request.files.get('audio') if type == "basic":
option = request.form.get('option')
if recog_type == "basic":
text = self.voice.basic_recognition(audio, option) text = self.voice.basic_recognition(audio, option)
return jsonify({'status': 200, 'response': text}) return jsonify({'status': 200, 'response': text})
else: else: