WOHOOOOOOOO

This commit is contained in:
Patrick_Pluto 2024-09-30 12:45:19 +02:00
parent 1e05319266
commit 1fd916a3eb
3 changed files with 66 additions and 61 deletions

View file

@ -2,7 +2,7 @@
import React, { use, useEffect, useRef, useState } from "react"; import React, { use, useEffect, useRef, useState } from "react";
import ConversationFrontend from "../components/ConversationFrontend"; import ConversationFrontend from "../components/ConversationFrontend";
import InputFrontend from "../components/InputFrontend"; import InputFrontend from "../components/InputFrontend";
import VoiceSend from "./voice_backend" import { sendToVoiceRecognition } from "./voice_backend"
import { AudioRecorder } from "./AudioRecorder"; import { AudioRecorder } from "./AudioRecorder";
import axios from "axios"; import axios from "axios";
import { resolve } from "path"; import { resolve } from "path";
@ -38,7 +38,8 @@ const InputOutputBackend: React.FC = () => {
const [accessToken, setAccessToken] = useState("") const [accessToken, setAccessToken] = useState("")
const postWorkerRef = useRef<Worker | null>(null) const postWorkerRef = useRef<Worker | null>(null)
const getWorkerRef = useRef<Worker | null>(null) const getWorkerRef = useRef<Worker | null>(null)
const [messages, setMessages] = useState<Message[]>([{ role: "system", const [messages, setMessages] = useState<Message[]>([{
role: "system",
content: `You are in the timezone: ${timeZone}. content: `You are in the timezone: ${timeZone}.
You use the time format ${timeFormat}. You use the time format ${timeFormat}.
You use the date format ${dateFormat} for all references of dates. You use the date format ${dateFormat} for all references of dates.
@ -169,40 +170,46 @@ const InputOutputBackend: React.FC = () => {
} }
} }
const startRecording = async () => { const startRecording = async (): Promise<string> => {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true }) const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
const mediaRecorder = new MediaRecorder(stream) const mediaRecorder = new MediaRecorder(stream);
mediaRecorderRef.current = mediaRecorder mediaRecorderRef.current = mediaRecorder;
audioChunks.current = []; // Initialize audio chunks
// Create a promise that resolves when the onstop event is done
const stopRecordingPromise = new Promise<string>((resolve) => {
mediaRecorder.ondataavailable = (event) => { mediaRecorder.ondataavailable = (event) => {
audioChunks.current.push(event.data) audioChunks.current.push(event.data);
} };
mediaRecorder.onstop = async () => { mediaRecorder.onstop = async () => {
const audioBlob = new Blob(audioChunks.current, { type: "audio/ogg" }) const audioBlob = new Blob(audioChunks.current, { type: "audio/ogg" });
audioChunks.current = [] audioChunks.current = [];
// console.log(audioBlob);
// const url = URL.createObjectURL(audioBlob)
// const audio = new Audio(url);
// audio.play().catch(error => console.error("Error playing audio:", error));
const remote = new VoiceSend() const text_voice = await sendToVoiceRecognition(audioBlob);
remote.sendToVoiceRecognition(audioBlob) console.log(text_voice);
} resolve(text_voice); // Resolve the promise with the recognized text
};
});
mediaRecorder.start() mediaRecorder.start();
setIsRecording(true) setIsRecording(true);
}
// Wait for the recording to stop and get the recognized text
return stopRecordingPromise;
};
const stopRecording = () => { const stopRecording = () => {
mediaRecorderRef.current?.stop() mediaRecorderRef.current?.stop();
setIsRecording(false) setIsRecording(false);
} };
const handleMicClick = async () => {
const handleMicClick = () => {
if (!isRecording) { if (!isRecording) {
startRecording(); const recognizedText = await startRecording();
setInputMessage(recognizedText); // Set the recognized text after recording
console.log("Set!")
} else { } else {
stopRecording(); stopRecording();
} }

View file

@ -1,15 +1,13 @@
import axios from "axios"; import axios from "axios";
export const sendToVoiceRecognition = (audio_data: Blob): Promise<string> => {
class VoiceSend {
sendToVoiceRecognition(audio_data: Blob) {
console.log("sending recording..."); console.log("sending recording...");
const formdata = new FormData() const formdata = new FormData()
formdata.append("audio", audio_data) formdata.append("audio", audio_data)
const dataSend = { option: "offline", type: "basic", audio: audio_data } const dataSend = { option: "offline", type: "basic", audio: audio_data }
axios.post("http://localhost:5000/interstellar_ai/api/voice_recognition", formdata) return axios.post("http://localhost:5000/interstellar_ai/api/voice_recognition", formdata)
.then((response) => { .then((response) => {
console.log(response.data) console.log(response.data)
return response.data.response return response.data.response
@ -17,10 +15,6 @@ class VoiceSend {
.catch(error => { .catch(error => {
console.log("Error calling API:", error) console.log("Error calling API:", error)
postMessage({ status: 500 }) postMessage({ status: 500 })
return "Error"
}) })
} }
}
export default VoiceSend;

View file

@ -13,6 +13,10 @@ const InputFrontend = React.forwardRef<HTMLDivElement, InputProps>(
({ message, onSendClick, onMicClick, inputDisabled, isRecording }, ref: ForwardedRef<HTMLDivElement>) => { ({ message, onSendClick, onMicClick, inputDisabled, isRecording }, ref: ForwardedRef<HTMLDivElement>) => {
const [inputValue, setInputValue] = useState(''); const [inputValue, setInputValue] = useState('');
useEffect(() => {
setInputValue(message);
}, [message]);
const handleInputChange = (e: React.ChangeEvent<HTMLInputElement>) => { const handleInputChange = (e: React.ChangeEvent<HTMLInputElement>) => {
setInputValue(e.target.value); setInputValue(e.target.value);
}; };