forked from React-Group/interstellar_ai
voice recognition trial 1
This commit is contained in:
parent
edc7260966
commit
0d84454a17
5 changed files with 74 additions and 88 deletions
|
@ -1,39 +0,0 @@
|
||||||
// import React, { useState, useRef } from 'react'
|
|
||||||
|
|
||||||
// const AudioRecorder: React.FC = () => {
|
|
||||||
// const [isRecording, setIsRecording] = useState(false)
|
|
||||||
// const [audioURL, setAudioURL] = useState<string | null>(null)
|
|
||||||
// const medaRecorderRef = useRef<MediaRecorder | null>(null)
|
|
||||||
// const audioChunks = useRef<Blob[]>([])
|
|
||||||
|
|
||||||
// const startRecording = async () => {
|
|
||||||
// const stream = await navigator.mediaDevices.getUserMedia({ audio: true })
|
|
||||||
// const mediaRecorder = new MediaRecorder(stream)
|
|
||||||
// medaRecorderRef.current = mediaRecorder
|
|
||||||
|
|
||||||
// mediaRecorder.ondataavailable = (event) => {
|
|
||||||
// audioChunks.current.push(event.data)
|
|
||||||
// }
|
|
||||||
|
|
||||||
// mediaRecorder.onstop = () => {
|
|
||||||
// const audioBlob = new Blob(audioChunks.current, { type: "audio/wav" })
|
|
||||||
// const url = URL.createObjectURL(audioBlob)
|
|
||||||
// setAudioURL(url)
|
|
||||||
// audioChunks.current = []
|
|
||||||
// }
|
|
||||||
|
|
||||||
// mediaRecorder.start()
|
|
||||||
// setIsRecording(true)
|
|
||||||
|
|
||||||
// const stopRecording = () => {
|
|
||||||
// medaRecorderRef.current?.stop()
|
|
||||||
// setIsRecording(false)
|
|
||||||
// }
|
|
||||||
|
|
||||||
// return (
|
|
||||||
// <div></div>
|
|
||||||
// )
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
// export default AudioRecorder
|
|
34
app/backend/AudioRecorder.ts
Normal file
34
app/backend/AudioRecorder.ts
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
import React, { useState, useRef } from 'react'
|
||||||
|
|
||||||
|
export const AudioRecorder= () => {
|
||||||
|
const [isRecording, setIsRecording] = useState(false)
|
||||||
|
const [audioURL, setAudioURL] = useState<string | null>(null)
|
||||||
|
const mediaRecorderRef = useRef<MediaRecorder | null>(null)
|
||||||
|
const audioChunks = useRef<Blob[]>([])
|
||||||
|
|
||||||
|
const startRecording = async () => {
|
||||||
|
const stream = await navigator.mediaDevices.getUserMedia({ audio: true })
|
||||||
|
const mediaRecorder = new MediaRecorder(stream)
|
||||||
|
mediaRecorderRef.current = mediaRecorder
|
||||||
|
|
||||||
|
mediaRecorder.ondataavailable = (event) => {
|
||||||
|
audioChunks.current.push(event.data)
|
||||||
|
}
|
||||||
|
|
||||||
|
mediaRecorder.onstop = () => {
|
||||||
|
const audioBlob = new Blob(audioChunks.current, { type: "audio/wav" })
|
||||||
|
const url = URL.createObjectURL(audioBlob)
|
||||||
|
setAudioURL(url)
|
||||||
|
audioChunks.current = []
|
||||||
|
}
|
||||||
|
|
||||||
|
mediaRecorder.start()
|
||||||
|
setIsRecording(true)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
const stopRecording = () => {
|
||||||
|
mediaRecorderRef.current?.stop()
|
||||||
|
setIsRecording(false)
|
||||||
|
}
|
||||||
|
}
|
|
@ -3,8 +3,8 @@ import React, { useEffect, useRef, useState } from "react";
|
||||||
import ConversationFrontend from "../components/ConversationFrontend";
|
import ConversationFrontend from "../components/ConversationFrontend";
|
||||||
import InputFrontend from "../components/InputFrontend";
|
import InputFrontend from "../components/InputFrontend";
|
||||||
import VoiceSend from "./voice_backend"
|
import VoiceSend from "./voice_backend"
|
||||||
|
import { AudioRecorder } from "./AudioRecorder";
|
||||||
import axios from "axios";
|
import axios from "axios";
|
||||||
import { skip } from "node:test";
|
|
||||||
|
|
||||||
|
|
||||||
const InputOutputBackend: React.FC = () => {
|
const InputOutputBackend: React.FC = () => {
|
||||||
|
@ -20,10 +20,10 @@ const InputOutputBackend: React.FC = () => {
|
||||||
const [liveMessage, setLiveMessage] = useState("")
|
const [liveMessage, setLiveMessage] = useState("")
|
||||||
const [inputMessage, setInputMessage] = useState<string>("")
|
const [inputMessage, setInputMessage] = useState<string>("")
|
||||||
const [inputDisabled, setInputDisabled] = useState(false)
|
const [inputDisabled, setInputDisabled] = useState(false)
|
||||||
const [lastMessage, setLastMessage] = useState<Message>({ role: "user", content: "Not supposed to happen." })
|
const [isRecording, setIsRecording] = useState(false)
|
||||||
const [isRecording, setIsRecording] = useState(false);
|
const [audioURL, setAudioURL] = useState<string | null>(null)
|
||||||
const mediaRecorderRef = useRef<MediaRecorder | null>(null);
|
const mediaRecorderRef = useRef<MediaRecorder | null>(null)
|
||||||
const audioChunksRef = useRef<Blob[]>([]);
|
const audioChunks = useRef<Blob[]>([])
|
||||||
|
|
||||||
|
|
||||||
console.log(messages);
|
console.log(messages);
|
||||||
|
@ -129,7 +129,6 @@ const InputOutputBackend: React.FC = () => {
|
||||||
}
|
}
|
||||||
const handleSendClick = (inputValue: string, override: boolean) => {
|
const handleSendClick = (inputValue: string, override: boolean) => {
|
||||||
if (inputValue != "") {
|
if (inputValue != "") {
|
||||||
console.log(inputDisabled)
|
|
||||||
if (!inputDisabled || override) {
|
if (!inputDisabled || override) {
|
||||||
setInputDisabled(true)
|
setInputDisabled(true)
|
||||||
if (postWorkerRef.current) {
|
if (postWorkerRef.current) {
|
||||||
|
@ -143,37 +142,33 @@ const InputOutputBackend: React.FC = () => {
|
||||||
}
|
}
|
||||||
|
|
||||||
const startRecording = async () => {
|
const startRecording = async () => {
|
||||||
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
const stream = await navigator.mediaDevices.getUserMedia({ audio: true })
|
||||||
mediaRecorderRef.current = new MediaRecorder(stream);
|
const mediaRecorder = new MediaRecorder(stream)
|
||||||
|
mediaRecorderRef.current = mediaRecorder
|
||||||
|
|
||||||
mediaRecorderRef.current.ondataavailable = (event) => {
|
mediaRecorder.ondataavailable = (event) => {
|
||||||
audioChunksRef.current.push(event.data);
|
audioChunks.current.push(event.data)
|
||||||
};
|
}
|
||||||
|
|
||||||
mediaRecorderRef.current.onstop = () => {
|
mediaRecorder.onstop = () => {
|
||||||
const audioBlob = new Blob(audioChunksRef.current, { type: 'audio/wav' });
|
const audioBlob = new Blob(audioChunks.current, { type: "audio/wav" })
|
||||||
audioChunksRef.current = []; // Clear the chunks for the next recording
|
const url = URL.createObjectURL(audioBlob)
|
||||||
// Call your existing function to send the audioBlob
|
console.log(url);
|
||||||
// Example: sendAudioToApi(audioBlob);
|
setAudioURL(url)
|
||||||
};
|
audioChunks.current = []
|
||||||
|
const remote = new VoiceSend()
|
||||||
|
remote.sendToVoiceRecognition(audioBlob,)
|
||||||
|
}
|
||||||
|
|
||||||
mediaRecorderRef.current.start();
|
mediaRecorder.start()
|
||||||
setIsRecording(true);
|
setIsRecording(true)
|
||||||
|
|
||||||
// Automatically stop recording after 10 seconds
|
}
|
||||||
setTimeout(() => {
|
|
||||||
stopRecording();
|
|
||||||
}, 10000);
|
|
||||||
};
|
|
||||||
|
|
||||||
const stopRecording = () => {
|
const stopRecording = () => {
|
||||||
if (mediaRecorderRef.current) {
|
mediaRecorderRef.current?.stop()
|
||||||
mediaRecorderRef.current.stop();
|
setIsRecording(false)
|
||||||
setIsRecording(false);
|
|
||||||
var remote = new VoiceSend()
|
|
||||||
remote.sendToVoiceRecognition(new Blob(audioChunksRef.current, { type: 'audio/wav' }), remote.voiceDataTemplate);
|
|
||||||
}
|
}
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
const handleMicClick = () => {
|
const handleMicClick = () => {
|
||||||
|
@ -224,6 +219,7 @@ const InputOutputBackend: React.FC = () => {
|
||||||
onSendClick={handleSendClick}
|
onSendClick={handleSendClick}
|
||||||
onMicClick={handleMicClick}
|
onMicClick={handleMicClick}
|
||||||
inputDisabled={inputDisabled}
|
inputDisabled={inputDisabled}
|
||||||
|
isRecording={isRecording}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,16 +1,10 @@
|
||||||
import axios from "axios";
|
import axios from "axios";
|
||||||
|
|
||||||
class VoiceSend {
|
class VoiceSend {
|
||||||
|
sendToVoiceRecognition(audio_data: Blob) {
|
||||||
|
console.log("sending recording...");
|
||||||
|
|
||||||
voiceDataTemplate = {
|
const dataSend = { audio_data }
|
||||||
type: "basic",
|
|
||||||
audio_data: null,
|
|
||||||
option: "offline"
|
|
||||||
}
|
|
||||||
|
|
||||||
sendToVoiceRecognition(audio_data: Blob, data: any) {
|
|
||||||
var dataSend = data
|
|
||||||
dataSend['audio_data'] = audio_data
|
|
||||||
axios.post("http://localhost:5000/interstellar_ai/api/voice_recognition", dataSend)
|
axios.post("http://localhost:5000/interstellar_ai/api/voice_recognition", dataSend)
|
||||||
.then((response: any) => {
|
.then((response: any) => {
|
||||||
console.log(response['response'])
|
console.log(response['response'])
|
||||||
|
|
|
@ -4,11 +4,12 @@ interface InputProps {
|
||||||
message: string;
|
message: string;
|
||||||
onSendClick: (message: string, override: boolean) => void;
|
onSendClick: (message: string, override: boolean) => void;
|
||||||
onMicClick: () => void;
|
onMicClick: () => void;
|
||||||
inputDisabled: boolean
|
inputDisabled: boolean;
|
||||||
|
isRecording:boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
const InputFrontend = React.forwardRef<HTMLDivElement, InputProps>(
|
const InputFrontend = React.forwardRef<HTMLDivElement, InputProps>(
|
||||||
({ message, onSendClick, onMicClick, inputDisabled }, ref: ForwardedRef<HTMLDivElement>) => {
|
({ message, onSendClick, onMicClick, inputDisabled, isRecording}, ref: ForwardedRef<HTMLDivElement>) => {
|
||||||
const [inputValue, setInputValue] = useState('');
|
const [inputValue, setInputValue] = useState('');
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
@ -42,7 +43,7 @@ const InputFrontend = React.forwardRef<HTMLDivElement, InputProps>(
|
||||||
<button type="button" onClick={() => onSendClick(inputValue, false)} disabled={inputDisabled ? true : false}>
|
<button type="button" onClick={() => onSendClick(inputValue, false)} disabled={inputDisabled ? true : false}>
|
||||||
<img src="/img/send.svg" alt="send" />
|
<img src="/img/send.svg" alt="send" />
|
||||||
</button>
|
</button>
|
||||||
<button type="button" onClick={onMicClick}>
|
<button type="button" onClick={onMicClick} style={{backgroundColor: isRecording? "red" : "green"}}>
|
||||||
<img src="/img/microphone.svg" alt="microphone" />
|
<img src="/img/microphone.svg" alt="microphone" />
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
|
|
Loading…
Reference in a new issue