forked from React-Group/interstellar_ai
		
	voice recognition trial 1
This commit is contained in:
		
							parent
							
								
									edc7260966
								
							
						
					
					
						commit
						0d84454a17
					
				
					 5 changed files with 74 additions and 88 deletions
				
			
		| 
						 | 
				
			
			@ -1,39 +0,0 @@
 | 
			
		|||
// import React, { useState, useRef } from 'react'
 | 
			
		||||
 | 
			
		||||
// const AudioRecorder: React.FC = () => {
 | 
			
		||||
//     const [isRecording, setIsRecording] = useState(false)
 | 
			
		||||
//     const [audioURL, setAudioURL] = useState<string | null>(null)
 | 
			
		||||
//     const medaRecorderRef = useRef<MediaRecorder | null>(null)
 | 
			
		||||
//     const audioChunks = useRef<Blob[]>([])
 | 
			
		||||
 | 
			
		||||
//     const startRecording = async () => {
 | 
			
		||||
//         const stream = await navigator.mediaDevices.getUserMedia({ audio: true })
 | 
			
		||||
//         const mediaRecorder = new MediaRecorder(stream)
 | 
			
		||||
//         medaRecorderRef.current = mediaRecorder
 | 
			
		||||
 | 
			
		||||
//         mediaRecorder.ondataavailable = (event) => {
 | 
			
		||||
//             audioChunks.current.push(event.data)
 | 
			
		||||
//         }
 | 
			
		||||
 | 
			
		||||
//         mediaRecorder.onstop = () => {
 | 
			
		||||
//             const audioBlob = new Blob(audioChunks.current, { type: "audio/wav" })
 | 
			
		||||
//             const url = URL.createObjectURL(audioBlob)
 | 
			
		||||
//             setAudioURL(url)
 | 
			
		||||
//             audioChunks.current = []
 | 
			
		||||
//         }
 | 
			
		||||
 | 
			
		||||
//         mediaRecorder.start()
 | 
			
		||||
//         setIsRecording(true)
 | 
			
		||||
 | 
			
		||||
//         const stopRecording = () => {
 | 
			
		||||
//             medaRecorderRef.current?.stop()
 | 
			
		||||
//             setIsRecording(false)
 | 
			
		||||
//         }
 | 
			
		||||
 | 
			
		||||
//         return (
 | 
			
		||||
//             <div></div>
 | 
			
		||||
//         )
 | 
			
		||||
//     }
 | 
			
		||||
// }
 | 
			
		||||
 | 
			
		||||
// export default AudioRecorder
 | 
			
		||||
							
								
								
									
										34
									
								
								app/backend/AudioRecorder.ts
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										34
									
								
								app/backend/AudioRecorder.ts
									
										
									
									
									
										Normal file
									
								
							| 
						 | 
				
			
			@ -0,0 +1,34 @@
 | 
			
		|||
import React, { useState, useRef } from 'react'
 | 
			
		||||
 | 
			
		||||
    export const AudioRecorder= () => {
 | 
			
		||||
        const [isRecording, setIsRecording] = useState(false)
 | 
			
		||||
        const [audioURL, setAudioURL] = useState<string | null>(null)
 | 
			
		||||
        const mediaRecorderRef = useRef<MediaRecorder | null>(null)
 | 
			
		||||
        const audioChunks = useRef<Blob[]>([])
 | 
			
		||||
    
 | 
			
		||||
        const startRecording = async () => {
 | 
			
		||||
            const stream = await navigator.mediaDevices.getUserMedia({ audio: true })
 | 
			
		||||
            const mediaRecorder = new MediaRecorder(stream)
 | 
			
		||||
            mediaRecorderRef.current = mediaRecorder
 | 
			
		||||
        
 | 
			
		||||
            mediaRecorder.ondataavailable = (event) => {
 | 
			
		||||
                audioChunks.current.push(event.data)
 | 
			
		||||
            }
 | 
			
		||||
        
 | 
			
		||||
            mediaRecorder.onstop = () => {
 | 
			
		||||
                const audioBlob = new Blob(audioChunks.current, { type: "audio/wav" })
 | 
			
		||||
                const url = URL.createObjectURL(audioBlob)
 | 
			
		||||
                setAudioURL(url)
 | 
			
		||||
                audioChunks.current = []
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
            mediaRecorder.start()
 | 
			
		||||
            setIsRecording(true)
 | 
			
		||||
        
 | 
			
		||||
        }
 | 
			
		||||
    
 | 
			
		||||
        const stopRecording = () => {
 | 
			
		||||
            mediaRecorderRef.current?.stop()
 | 
			
		||||
           setIsRecording(false)
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
| 
						 | 
				
			
			@ -3,8 +3,8 @@ import React, { useEffect, useRef, useState } from "react";
 | 
			
		|||
import ConversationFrontend from "../components/ConversationFrontend";
 | 
			
		||||
import InputFrontend from "../components/InputFrontend";
 | 
			
		||||
import VoiceSend from "./voice_backend"
 | 
			
		||||
import { AudioRecorder } from "./AudioRecorder";
 | 
			
		||||
import axios from "axios";
 | 
			
		||||
import { skip } from "node:test";
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
const InputOutputBackend: React.FC = () => {
 | 
			
		||||
| 
						 | 
				
			
			@ -20,10 +20,10 @@ const InputOutputBackend: React.FC = () => {
 | 
			
		|||
  const [liveMessage, setLiveMessage] = useState("")
 | 
			
		||||
  const [inputMessage, setInputMessage] = useState<string>("")
 | 
			
		||||
  const [inputDisabled, setInputDisabled] = useState(false)
 | 
			
		||||
  const [lastMessage, setLastMessage] = useState<Message>({ role: "user", content: "Not supposed to happen." })
 | 
			
		||||
  const [isRecording, setIsRecording] = useState(false);
 | 
			
		||||
  const mediaRecorderRef = useRef<MediaRecorder | null>(null);
 | 
			
		||||
  const audioChunksRef = useRef<Blob[]>([]);
 | 
			
		||||
  const [isRecording, setIsRecording] = useState(false)
 | 
			
		||||
  const [audioURL, setAudioURL] = useState<string | null>(null)
 | 
			
		||||
  const mediaRecorderRef = useRef<MediaRecorder | null>(null)
 | 
			
		||||
  const audioChunks = useRef<Blob[]>([])
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
  console.log(messages);
 | 
			
		||||
| 
						 | 
				
			
			@ -129,7 +129,6 @@ const InputOutputBackend: React.FC = () => {
 | 
			
		|||
  }
 | 
			
		||||
  const handleSendClick = (inputValue: string, override: boolean) => {
 | 
			
		||||
    if (inputValue != "") {
 | 
			
		||||
      console.log(inputDisabled)
 | 
			
		||||
      if (!inputDisabled || override) {
 | 
			
		||||
        setInputDisabled(true)
 | 
			
		||||
        if (postWorkerRef.current) {
 | 
			
		||||
| 
						 | 
				
			
			@ -143,37 +142,33 @@ const InputOutputBackend: React.FC = () => {
 | 
			
		|||
  }
 | 
			
		||||
 | 
			
		||||
  const startRecording = async () => {
 | 
			
		||||
    const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
 | 
			
		||||
    mediaRecorderRef.current = new MediaRecorder(stream);
 | 
			
		||||
    const stream = await navigator.mediaDevices.getUserMedia({ audio: true })
 | 
			
		||||
    const mediaRecorder = new MediaRecorder(stream)
 | 
			
		||||
    mediaRecorderRef.current = mediaRecorder
 | 
			
		||||
    
 | 
			
		||||
    mediaRecorderRef.current.ondataavailable = (event) => {
 | 
			
		||||
      audioChunksRef.current.push(event.data);
 | 
			
		||||
    };
 | 
			
		||||
 | 
			
		||||
    mediaRecorderRef.current.onstop = () => {
 | 
			
		||||
      const audioBlob = new Blob(audioChunksRef.current, { type: 'audio/wav' });
 | 
			
		||||
      audioChunksRef.current = []; // Clear the chunks for the next recording
 | 
			
		||||
      // Call your existing function to send the audioBlob
 | 
			
		||||
      // Example: sendAudioToApi(audioBlob);
 | 
			
		||||
    };
 | 
			
		||||
 | 
			
		||||
    mediaRecorderRef.current.start();
 | 
			
		||||
    setIsRecording(true);
 | 
			
		||||
 | 
			
		||||
    // Automatically stop recording after 10 seconds
 | 
			
		||||
    setTimeout(() => {
 | 
			
		||||
      stopRecording();
 | 
			
		||||
    }, 10000);
 | 
			
		||||
  };
 | 
			
		||||
 | 
			
		||||
  const stopRecording = () => {
 | 
			
		||||
    if (mediaRecorderRef.current) {
 | 
			
		||||
      mediaRecorderRef.current.stop();
 | 
			
		||||
      setIsRecording(false);
 | 
			
		||||
      var remote = new VoiceSend()
 | 
			
		||||
      remote.sendToVoiceRecognition(new Blob(audioChunksRef.current, { type: 'audio/wav' }), remote.voiceDataTemplate);
 | 
			
		||||
    mediaRecorder.ondataavailable = (event) => {
 | 
			
		||||
      audioChunks.current.push(event.data)
 | 
			
		||||
    }
 | 
			
		||||
        
 | 
			
		||||
    mediaRecorder.onstop = () => {
 | 
			
		||||
      const audioBlob = new Blob(audioChunks.current, { type: "audio/wav" })
 | 
			
		||||
      const url = URL.createObjectURL(audioBlob)
 | 
			
		||||
      console.log(url);
 | 
			
		||||
      setAudioURL(url)
 | 
			
		||||
      audioChunks.current = []
 | 
			
		||||
      const remote = new VoiceSend()
 | 
			
		||||
      remote.sendToVoiceRecognition(audioBlob,)
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    mediaRecorder.start()
 | 
			
		||||
    setIsRecording(true)
 | 
			
		||||
        
 | 
			
		||||
    }
 | 
			
		||||
    
 | 
			
		||||
    const stopRecording = () => {
 | 
			
		||||
      mediaRecorderRef.current?.stop()
 | 
			
		||||
      setIsRecording(false)
 | 
			
		||||
    }
 | 
			
		||||
  };
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
  const handleMicClick = () => {
 | 
			
		||||
| 
						 | 
				
			
			@ -224,6 +219,7 @@ const InputOutputBackend: React.FC = () => {
 | 
			
		|||
        onSendClick={handleSendClick}
 | 
			
		||||
        onMicClick={handleMicClick}
 | 
			
		||||
        inputDisabled={inputDisabled}
 | 
			
		||||
        isRecording={isRecording}
 | 
			
		||||
      />
 | 
			
		||||
    </div>
 | 
			
		||||
  )
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -1,16 +1,10 @@
 | 
			
		|||
import axios from "axios";
 | 
			
		||||
 | 
			
		||||
class VoiceSend {
 | 
			
		||||
    sendToVoiceRecognition(audio_data: Blob) {
 | 
			
		||||
        console.log("sending recording...");
 | 
			
		||||
        
 | 
			
		||||
    voiceDataTemplate = {
 | 
			
		||||
        type: "basic",
 | 
			
		||||
        audio_data: null,
 | 
			
		||||
        option: "offline"
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    sendToVoiceRecognition(audio_data: Blob, data: any) {
 | 
			
		||||
        var dataSend = data
 | 
			
		||||
        dataSend['audio_data'] = audio_data
 | 
			
		||||
        const dataSend = { audio_data }
 | 
			
		||||
        axios.post("http://localhost:5000/interstellar_ai/api/voice_recognition", dataSend)
 | 
			
		||||
            .then((response: any) => {
 | 
			
		||||
                console.log(response['response'])
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -4,11 +4,12 @@ interface InputProps {
 | 
			
		|||
  message: string;
 | 
			
		||||
  onSendClick: (message: string, override: boolean) => void;
 | 
			
		||||
  onMicClick: () => void;
 | 
			
		||||
  inputDisabled: boolean
 | 
			
		||||
  inputDisabled: boolean;
 | 
			
		||||
  isRecording:boolean
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
const InputFrontend = React.forwardRef<HTMLDivElement, InputProps>(
 | 
			
		||||
  ({ message, onSendClick, onMicClick, inputDisabled }, ref: ForwardedRef<HTMLDivElement>) => {
 | 
			
		||||
  ({ message, onSendClick, onMicClick, inputDisabled, isRecording}, ref: ForwardedRef<HTMLDivElement>) => {
 | 
			
		||||
    const [inputValue, setInputValue] = useState('');
 | 
			
		||||
 | 
			
		||||
    useEffect(() => {
 | 
			
		||||
| 
						 | 
				
			
			@ -42,7 +43,7 @@ const InputFrontend = React.forwardRef<HTMLDivElement, InputProps>(
 | 
			
		|||
        <button type="button" onClick={() => onSendClick(inputValue, false)} disabled={inputDisabled ? true : false}>
 | 
			
		||||
          <img src="/img/send.svg" alt="send" />
 | 
			
		||||
        </button>
 | 
			
		||||
        <button type="button" onClick={onMicClick}>
 | 
			
		||||
        <button type="button" onClick={onMicClick} style={{backgroundColor: isRecording? "red" : "green"}}>
 | 
			
		||||
          <img src="/img/microphone.svg" alt="microphone" />
 | 
			
		||||
        </button>
 | 
			
		||||
      </div>
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue