interstellar_ai/app/backend/InputOutputHandler.tsx

282 lines
No EOL
8.6 KiB
TypeScript

"use client"
import React, { use, useEffect, useRef, useState } from "react";
import ConversationFrontend from "../components/ConversationFrontend";
import InputFrontend from "../components/InputFrontend";
import VoiceSend from "./voice_backend"
import { AudioRecorder } from "./AudioRecorder";
import axios from "axios";
import { resolve } from "path";
import { FFmpeg } from "@ffmpeg/ffmpeg";
import { fetchFile, toBlobURL } from "@ffmpeg/util"
const InputOutputBackend: React.FC = () => {
type Message = {
role: string
content: string
}
const [copyClicked, setCopyClicked] = useState(false)
const [accessToken, setAccessToken] = useState("")
const postWorkerRef = useRef<Worker | null>(null)
const getWorkerRef = useRef<Worker | null>(null)
const [messages, setMessages] = useState<Message[]>([{ role: "assistant", content: "Hello! How can I help you?" }])
const [liveMessage, setLiveMessage] = useState("")
const [inputMessage, setInputMessage] = useState<string>("")
const [inputDisabled, setInputDisabled] = useState(false)
const [isRecording, setIsRecording] = useState(false)
const [audioURL, setAudioURL] = useState<string | null>(null)
const mediaRecorderRef = useRef<MediaRecorder | null>(null)
const audioChunks = useRef<Blob[]>([])
console.log(messages);
useEffect(() => {
getNewToken()
postWorkerRef.current = new Worker(new URL("./threads/PostWorker.js", import.meta.url))
postWorkerRef.current.onmessage = (event) => {
const status = event.data.status
if (status == 200) {
setInputDisabled(false)
endGetWorker()
} else if (status == 500) {
setInputDisabled(false)
if (getWorkerRef.current) {
addMessage("assistant", "There was an Error with the AI response")
getWorkerRef.current.postMessage("terminate")
getWorkerRef.current.terminate()
}
}
}
return () => {
if (postWorkerRef.current) {
postWorkerRef.current.terminate()
}
if (getWorkerRef.current) {
getWorkerRef.current.postMessage("terminate")
getWorkerRef.current.terminate()
}
}
}, [])
const getNewToken = () => {
console.log("getting access");
axios.get("http://localhost:5000/interstellar_ai/api/ai_create")
.then(response => {
setAccessToken(response.data.access_token)
console.log(response.data.access_token);
})
.catch(error => {
console.log("error:", error.message);
})
}
const startGetWorker = () => {
if (!getWorkerRef.current) {
getWorkerRef.current = new Worker(new URL("./threads/GetWorker.js", import.meta.url))
getWorkerRef.current.postMessage({ action: "start", access_token: accessToken })
addMessage("assistant", "")
getWorkerRef.current.onmessage = (event) => {
const data = event.data
if (event.data == "error") {
setLiveMessage("error getting AI response: " + data.error)
} else {
console.log("Received data:", data);
editLastMessage(data.response)
}
}
getWorkerRef.current.onerror = (error) => {
console.error("Worker error:", error)
}
}
}
const endGetWorker = () => {
if (getWorkerRef.current) {
getWorkerRef.current.postMessage({ action: "terminate" })
getWorkerRef.current.terminate()
getWorkerRef.current = null
console.log(messages);
}
}
const editLastMessage = (newContent: string) => {
if (newContent == "") {
newContent = "Generating answer..."
}
setMessages((prevMessages) => {
const updatedMessages = prevMessages.slice(); // Create a shallow copy of the current messages
if (updatedMessages.length > 0) {
const lastMessage = updatedMessages[updatedMessages.length - 1];
updatedMessages[updatedMessages.length - 1] = {
...lastMessage, // Keep the existing role and other properties
content: newContent, // Update only the content
};
}
return updatedMessages; // Return the updated array
});
};
/* Variables for System-prompt */
const [preferredCurrency, setPreferredCurrency] = useState(localStorage.getItem("preferredCurrency") || "")
const [preferredLanguage, setPreferredLanguage] = useState(localStorage.getItem("preferredLanguage") || "")
const [timeFormat, setTimeFormat] = useState(localStorage.getItem("timeFormat") || "")
const [preferredMeasurement, setPreferredMeasurement] = useState(localStorage.getItem("preferredMeasurement") || "")
const [timeZone, setTimeZone] = useState(localStorage.getItem("timeZone") || "")
const [dateFormat, setDateFormat] = useState(localStorage.getItem("dateFormat") || "")
useEffect(() => {
},[preferredCurrency, preferredLanguage, timeFormat, preferredMeasurement, timeZone, dateFormat])
const addMessage = (role: string, content: string) => {
setMessages(previous => [...previous, { role, content }])
}
const handleSendClick = (inputValue: string, override: boolean) => {
if (inputValue != "") {
if (!inputDisabled || override) {
setInputDisabled(true)
if (postWorkerRef.current) {
addMessage("user", inputValue)
console.log("input:", inputValue);
postWorkerRef.current.postMessage({ messages: [...messages, { role: "user", content: inputValue }], ai_model: "phi3.5", access_token: accessToken })
startGetWorker()
}
}
}
}
const startRecording = async () => {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true })
const mediaRecorder = new MediaRecorder(stream)
mediaRecorderRef.current = mediaRecorder
mediaRecorder.ondataavailable = (event) => {
audioChunks.current.push(event.data)
}
mediaRecorder.onstop = async () => {
const audioBlob = new Blob(audioChunks.current, { type: "audio/ogg" })
audioChunks.current = []
// console.log(audioBlob);
// const url = URL.createObjectURL(audioBlob)
// const audio = new Audio(url);
// audio.play().catch(error => console.error("Error playing audio:", error));
const remote = new VoiceSend()
remote.sendToVoiceRecognition(audioBlob)
}
mediaRecorder.start()
setIsRecording(true)
}
const ffmpegRef = useRef<FFmpeg | null>(null)
const audioRef = useRef("")
const loadFFmpeg = async () => {
if (!ffmpegRef.current) {
ffmpegRef.current = new FFmpeg()
await ffmpegRef.current.load()
}
}
const convertOggToWav = async (oggFile: File | Blob) => {
await loadFFmpeg()
const ffmpeg = ffmpegRef.current!
await ffmpeg.writeFile("input.ogg", await fetchFile(oggFile))
await ffmpeg.exec(["-i", "input.ogg", "output.wav"])
const wavData = await ffmpeg.readFile("output.wav")
console.log(wavData);
const wavBlob = new Blob([wavData], { type: "audio/wav" })
audioRef.current = URL.createObjectURL(wavBlob)
return wavBlob
}
const stopRecording = () => {
mediaRecorderRef.current?.stop()
setIsRecording(false)
}
const handleMicClick = () => {
if (!isRecording) {
startRecording();
} else {
stopRecording();
}
};
const handleResendClick = () => {
var temporary_message = messages[messages.length - 2]['content']
const updatedMessages = messages.slice(0, -2)
setMessages(updatedMessages)
endGetWorker()
getNewToken()
setInputDisabled(false)
handleSendClick(temporary_message, true)
}
const handleEditClick = () => {
setInputMessage(messages[messages.length - 2]['content'])
const updatedMessages = messages.slice(0, -2)
setMessages(updatedMessages)
endGetWorker()
getNewToken()
setInputDisabled(false)
}
const handleCopyClick = async () => {
setCopyClicked(false)
try {
await navigator.clipboard.writeText(messages[messages.length - 1]['content']);
fadeCopyText()
} catch (err) {
console.error('Failed to copy: ', err);
}
}
const wait = (time: number) => {
return new Promise(resolve => setTimeout(resolve, time));
}
const fadeCopyText = async () => {
setCopyClicked(true)
await wait(1000)
setCopyClicked(false)
}
return (
<div>
<ConversationFrontend
messages={messages}
onResendClick={handleResendClick}
onEditClick={handleEditClick}
onCopyClick={handleCopyClick}
isClicked={copyClicked}
/>
<InputFrontend
message={inputMessage}
onSendClick={handleSendClick}
onMicClick={handleMicClick}
inputDisabled={inputDisabled}
isRecording={isRecording}
/>
</div>
)
}
export default InputOutputBackend