diff --git a/app/backend/InputOutputHandler.tsx b/app/backend/InputOutputHandler.tsx
index 17fee2f..d6b8885 100644
--- a/app/backend/InputOutputHandler.tsx
+++ b/app/backend/InputOutputHandler.tsx
@@ -51,7 +51,7 @@ const InputOutputBackend: React.FC = () => {
             ,
         },
         { role: "assistant", content: "Hello! How may I help you?" },
-      ]); 
+      ]);
     }
   }, [preferredCurrency, preferredLanguage, timeFormat, dateFormat, preferredMeasurement, timeZone]);
 
diff --git a/app/components/Models.tsx b/app/components/Models.tsx
index a839a1c..5d7a6d7 100644
--- a/app/components/Models.tsx
+++ b/app/components/Models.tsx
@@ -162,6 +162,8 @@ const Models: React.FC = () => {
     setRadioSelection(localStorage.getItem('radioSelection'))
     const handleStorageChange = () => {
       setSelectedModel(localStorage.getItem('selectedModel') || '');
+      console.log("Changed the selectedModel")
+      console.log(selectedModel)
     };
     handleStorageChange();
 
@@ -177,7 +179,7 @@ const Models: React.FC = () => {
   const handleModelChange = (event: React.ChangeEvent<HTMLSelectElement>) => {
     const newModel = event.target.value;
     setSelectedModel(newModel);
-    localStorage.setItem('radioSelection', newModel); // Update localStorage directly
+    localStorage.setItem('selectedModel', newModel); // Update localStorage directly
   };
 
   // Determine the filtered models based on current radioSelection
diff --git a/py/api.py b/py/api.py
index 7683afe..bfaf1cd 100644
--- a/py/api.py
+++ b/py/api.py
@@ -1,3 +1,5 @@
+from time import sleep
+
 from flask import Flask, request, jsonify
 from flask_cors import CORS
 import secrets
@@ -48,6 +50,7 @@ class API:
                 thread = threading.Thread(target=self.ai.process_local, args=(ai_model, messages, self, access_token))
                 thread.start()
                 thread.join()
+                sleep(0.5)
                 return jsonify({'status': 200})
             elif model_type == "mistral":
                 api_key = data.get('api_key')
@@ -55,6 +58,7 @@ class API:
                                           args=(ai_model, messages, self, access_token, api_key))
                 thread.start()
                 thread.join()
+                sleep(0.5)
                 return jsonify({'status': 200})
             elif model_type == "openai":
                 api_key = data.get('api_key')
@@ -62,6 +66,7 @@ class API:
                                           args=(ai_model, messages, self, access_token, api_key))
                 thread.start()
                 thread.join()
+                sleep(0.5)
                 return jsonify({'status': 200})
             elif model_type == "anthropic":
                 api_key = data.get('api_key')
@@ -69,6 +74,7 @@ class API:
                                           args=(ai_model, messages, self, access_token, api_key))
                 thread.start()
                 thread.join()
+                sleep(0.5)
                 return jsonify({'status': 200})
             elif model_type == "google":
                 api_key = data.get('api_key')
@@ -76,6 +82,7 @@ class API:
                                           args=(ai_model, messages, self, access_token, api_key))
                 thread.start()
                 thread.join()
+                sleep(0.5)
                 return jsonify({'status': 200})
 
             return jsonify({'status': 401, 'error': 'Invalid AI model type'})