"use client" import React, { use, useEffect, useRef, useState } from "react"; import ConversationFrontend from "../components/ConversationFrontend"; import InputFrontend from "../components/InputFrontend"; import VoiceSend from "./voice_backend" import { AudioRecorder } from "./AudioRecorder"; import axios from "axios"; import { resolve } from "path"; import { FFmpeg } from "@ffmpeg/ffmpeg"; import { fetchFile, toBlobURL } from "@ffmpeg/util" const InputOutputBackend: React.FC = () => { type Message = { role: string content: string } const [preferredCurrency, setPreferredCurrency] = useState(null); const [preferredLanguage, setPreferredLanguage] = useState(null); const [timeFormat, setTimeFormat] = useState(null); const [preferredMeasurement, setPreferredMeasurement] = useState(null); const [timeZone, setTimeZone] = useState(null); const [dateFormat, setDateFormat] = useState(null); const [messages, setMessages] = useState([]); useEffect(() => { setPreferredCurrency(localStorage.getItem("preferredCurrency")); setPreferredLanguage(localStorage.getItem("preferredLanguage")); setTimeFormat(localStorage.getItem("timeFormat")); setPreferredMeasurement(localStorage.getItem("preferredMeasurement")); setTimeZone(localStorage.getItem("timeZone")); setDateFormat(localStorage.getItem("dateFormat")); }, []); useEffect(() => { if (preferredCurrency && preferredLanguage && timeFormat && dateFormat && preferredMeasurement && timeZone) { setMessages([ { role: "system", content: `You are in the timezone: ${timeZone}. You use the time format ${timeFormat}. You use the date format ${dateFormat} for all references of dates. You use the ${preferredMeasurement} system. You use the currency ${preferredCurrency}. You will only answer in the language (you will receive the country code) ${preferredLanguage}. But in the case the user specifically states to answer in another language, do that. Speaking in another language is not stating you should answer in that language. Additionally, under no circumstances translate your answer into multiple languages.`, }, { role: "assistant", content: "Hello! How can I help you?" }, ]); } }, [preferredCurrency, preferredLanguage, timeFormat, dateFormat, preferredMeasurement, timeZone]); const [copyClicked, setCopyClicked] = useState(false) const [accessToken, setAccessToken] = useState("") const postWorkerRef = useRef(null) const getWorkerRef = useRef(null) const [liveMessage, setLiveMessage] = useState("") const [inputMessage, setInputMessage] = useState("") const [inputDisabled, setInputDisabled] = useState(false) const [isRecording, setIsRecording] = useState(false) const mediaRecorderRef = useRef(null) const audioChunks = useRef([]) console.log(messages); useEffect(() => { getNewToken() postWorkerRef.current = new Worker(new URL("./threads/PostWorker.js", import.meta.url)) postWorkerRef.current.onmessage = (event) => { const status = event.data.status if (status == 200) { setInputDisabled(false) endGetWorker() } else if (status == 500) { setInputDisabled(false) if (getWorkerRef.current) { addMessage("assistant", "There was an Error with the AI response") getWorkerRef.current.postMessage("terminate") getWorkerRef.current.terminate() } } } return () => { if (postWorkerRef.current) { postWorkerRef.current.terminate() } if (getWorkerRef.current) { getWorkerRef.current.postMessage("terminate") getWorkerRef.current.terminate() } } }, []) const getNewToken = () => { console.log("getting access"); axios.get("http://localhost:5000/interstellar_ai/api/ai_create") .then(response => { setAccessToken(response.data.access_token) console.log(response.data.access_token); }) .catch(error => { console.log("error:", error.message); }) } const startGetWorker = () => { if (!getWorkerRef.current) { getWorkerRef.current = new Worker(new URL("./threads/GetWorker.js", import.meta.url)) getWorkerRef.current.postMessage({ action: "start", access_token: accessToken }) addMessage("assistant", "") getWorkerRef.current.onmessage = (event) => { const data = event.data if (event.data == "error") { setLiveMessage("error getting AI response: " + data.error) } else { console.log("Received data:", data); editLastMessage(data.response) } } getWorkerRef.current.onerror = (error) => { console.error("Worker error:", error) } } } const endGetWorker = () => { if (getWorkerRef.current) { getWorkerRef.current.postMessage({ action: "terminate" }) getWorkerRef.current.terminate() getWorkerRef.current = null console.log(messages); } } const editLastMessage = (newContent: string) => { if (newContent == "") { newContent = "Generating answer..." } setMessages((prevMessages) => { const updatedMessages = prevMessages.slice(); // Create a shallow copy of the current messages if (updatedMessages.length > 0) { const lastMessage = updatedMessages[updatedMessages.length - 1]; updatedMessages[updatedMessages.length - 1] = { ...lastMessage, // Keep the existing role and other properties content: newContent, // Update only the content }; } return updatedMessages; // Return the updated array }); }; useEffect(() => { },[preferredCurrency, preferredLanguage, timeFormat, preferredMeasurement, timeZone, dateFormat]) const addMessage = (role: string, content: string) => { setMessages(previous => [...previous, { role, content }]) } const handleSendClick = (inputValue: string, override: boolean) => { if (inputValue != "") { if (!inputDisabled || override) { setInputDisabled(true) if (postWorkerRef.current) { addMessage("user", inputValue) console.log("input:", inputValue); postWorkerRef.current.postMessage({ messages: [...messages, { role: "user", content: inputValue }], ai_model: "llama3.2", access_token: accessToken }) startGetWorker() } } } } const startRecording = async () => { const stream = await navigator.mediaDevices.getUserMedia({ audio: true }) const mediaRecorder = new MediaRecorder(stream) mediaRecorderRef.current = mediaRecorder mediaRecorder.ondataavailable = (event) => { audioChunks.current.push(event.data) } mediaRecorder.onstop = async () => { const audioBlob = new Blob(audioChunks.current, { type: "audio/ogg" }) audioChunks.current = [] // console.log(audioBlob); // const url = URL.createObjectURL(audioBlob) // const audio = new Audio(url); // audio.play().catch(error => console.error("Error playing audio:", error)); const remote = new VoiceSend() remote.sendToVoiceRecognition(audioBlob) } mediaRecorder.start() setIsRecording(true) } const stopRecording = () => { mediaRecorderRef.current?.stop() setIsRecording(false) } const handleMicClick = () => { if (!isRecording) { startRecording(); } else { stopRecording(); } }; const handleResendClick = () => { var temporary_message = messages[messages.length - 2]['content'] const updatedMessages = messages.slice(0, -2) setMessages(updatedMessages) endGetWorker() getNewToken() setInputDisabled(false) handleSendClick(temporary_message, true) } const handleEditClick = () => { setInputMessage(messages[messages.length - 2]['content']) const updatedMessages = messages.slice(0, -2) setMessages(updatedMessages) endGetWorker() getNewToken() setInputDisabled(false) } const handleCopyClick = async () => { setCopyClicked(false) try { await navigator.clipboard.writeText(messages[messages.length - 1]['content']); fadeCopyText() } catch (err) { console.error('Failed to copy: ', err); } } const wait = (time: number) => { return new Promise(resolve => setTimeout(resolve, time)); } const fadeCopyText = async () => { setCopyClicked(true) await wait(1000) setCopyClicked(false) } return (
) } export default InputOutputBackend