convo with new prompts

This commit is contained in:
YasinOnm08 2024-09-20 09:17:28 +02:00
parent ce635d3089
commit e279f01d42
3 changed files with 59 additions and 44 deletions

View file

@ -27,30 +27,31 @@ const handleCopyClick = () => {
const InputOutputBackend: React.FC = () => {
const [accessToken, setAccessToken] = useState("")
const workerRef = useRef<Worker | null>(null)
type Message = {
role: string
content: string
}
type SystemPrompt = {
}
const handleSendClick = (message: string) => {
var system = "You give really short answers (maximum of 30 sentences). The following is the chat history."
for (let index = 0; index < messages.length; index++) {
system += messages[index] + " ";
};
const handleSendClick = (message:string) => {
var system:Message = {role:"system" ,content:"You are a helpful assistant that gives short answers."}
HandlePostRequest(message, "phi3.5", system)
addMessage('User: ' + message);
addMessage("user",message);
HandlePostRequest(messages, "phi3.5", system)
};
const [messages, setMessages] = useState([
'User: Hello!',
'AI: Hi there!',
'User: How are you?',
'AI: Im good, thank you!'
{ role:"assistant", content:'Hello. I\'m Your AI Virtual Assistant' }
]);
const addMessage = (message: string) => {
setMessages((prevMessages) => [...prevMessages, message]);
const addMessage = (role:string ,content: string) => {
setMessages((prevMessages) => [...prevMessages, {role,content}]);
};
useEffect(() => {
workerRef.current = new Worker(new URL("./ProcessAPI.js", import.meta.url))
workerRef.current.postMessage({})
@ -65,20 +66,20 @@ const InputOutputBackend: React.FC = () => {
}
},[])
const HandleGetRequest = (message: string, ai_model: string, system_prompt: string) => {
const HandleGetRequest = (messages: Message[], ai_model: string, system_prompt: Message) => {
if (workerRef.current) {
workerRef.current.postMessage({ functionName: "getResponse", access_token: accessToken, message: message, ai_model: ai_model, system_prompt: system_prompt })
workerRef.current.postMessage({ functionName: "getResponse", access_token: accessToken, messages: messages, ai_model: ai_model, system_prompt: system_prompt })
workerRef.current.onmessage = (e) => {
addMessage("AI: " + e.data)
addMessage("assistant",e.data)
}
}
}
const HandlePostRequest = (message: string, ai_model: string, system_prompt: string) => {
const HandlePostRequest = (messages: Message[], ai_model: string, system_prompt: Message) => {
if (workerRef.current) {
workerRef.current.postMessage({ functionName: "postRequest", access_token: accessToken, message: message, ai_model: ai_model, system_prompt: system_prompt })
workerRef.current.postMessage({ functionName: "postRequest", access_token: accessToken, messages: messages, ai_model: ai_model, system_prompt: system_prompt })
workerRef.current.onmessage = (e) => {
HandleGetRequest(message,ai_model,system_prompt)
HandleGetRequest(messages,ai_model,system_prompt)
}
}
}
@ -97,7 +98,7 @@ const InputOutputBackend: React.FC = () => {
onMicClick={handleMicClick}
/>
</div>
);
)
}
export default InputOutputBackend