forked from React-Group/interstellar_ai
		
	Compare commits
	
		
			No commits in common. "8ee87b88e92b1e22e482046fe938d100ac82abfd" and "2650fb82055e020b8053a78040fedb7f1acbc7a2" have entirely different histories.
		
	
	
		
			8ee87b88e9
			...
			2650fb8205
		
	
		
					 3 changed files with 2 additions and 11 deletions
				
			
		|  | @ -162,8 +162,6 @@ const Models: React.FC = () => { | ||||||
|     setRadioSelection(localStorage.getItem('radioSelection')) |     setRadioSelection(localStorage.getItem('radioSelection')) | ||||||
|     const handleStorageChange = () => { |     const handleStorageChange = () => { | ||||||
|       setSelectedModel(localStorage.getItem('selectedModel') || ''); |       setSelectedModel(localStorage.getItem('selectedModel') || ''); | ||||||
|       console.log("Changed the selectedModel") |  | ||||||
|       console.log(selectedModel) |  | ||||||
|     }; |     }; | ||||||
|     handleStorageChange(); |     handleStorageChange(); | ||||||
| 
 | 
 | ||||||
|  | @ -179,7 +177,7 @@ const Models: React.FC = () => { | ||||||
|   const handleModelChange = (event: React.ChangeEvent<HTMLSelectElement>) => { |   const handleModelChange = (event: React.ChangeEvent<HTMLSelectElement>) => { | ||||||
|     const newModel = event.target.value; |     const newModel = event.target.value; | ||||||
|     setSelectedModel(newModel); |     setSelectedModel(newModel); | ||||||
|     localStorage.setItem('selectedModel', newModel); // Update localStorage directly
 |     localStorage.setItem('radioSelection', newModel); // Update localStorage directly
 | ||||||
|   }; |   }; | ||||||
| 
 | 
 | ||||||
|   // Determine the filtered models based on current radioSelection
 |   // Determine the filtered models based on current radioSelection
 | ||||||
|  |  | ||||||
|  | @ -1,5 +1,3 @@ | ||||||
| from time import sleep |  | ||||||
| 
 |  | ||||||
| from flask import Flask, request, jsonify | from flask import Flask, request, jsonify | ||||||
| from flask_cors import CORS | from flask_cors import CORS | ||||||
| import secrets | import secrets | ||||||
|  | @ -50,7 +48,6 @@ class API: | ||||||
|                 thread = threading.Thread(target=self.ai.process_local, args=(ai_model, messages, self, access_token)) |                 thread = threading.Thread(target=self.ai.process_local, args=(ai_model, messages, self, access_token)) | ||||||
|                 thread.start() |                 thread.start() | ||||||
|                 thread.join() |                 thread.join() | ||||||
|                 sleep(0.5) |  | ||||||
|                 return jsonify({'status': 200}) |                 return jsonify({'status': 200}) | ||||||
|             elif model_type == "mistral": |             elif model_type == "mistral": | ||||||
|                 api_key = data.get('api_key') |                 api_key = data.get('api_key') | ||||||
|  | @ -58,7 +55,6 @@ class API: | ||||||
|                                           args=(ai_model, messages, self, access_token, api_key)) |                                           args=(ai_model, messages, self, access_token, api_key)) | ||||||
|                 thread.start() |                 thread.start() | ||||||
|                 thread.join() |                 thread.join() | ||||||
|                 sleep(0.5) |  | ||||||
|                 return jsonify({'status': 200}) |                 return jsonify({'status': 200}) | ||||||
|             elif model_type == "openai": |             elif model_type == "openai": | ||||||
|                 api_key = data.get('api_key') |                 api_key = data.get('api_key') | ||||||
|  | @ -66,7 +62,6 @@ class API: | ||||||
|                                           args=(ai_model, messages, self, access_token, api_key)) |                                           args=(ai_model, messages, self, access_token, api_key)) | ||||||
|                 thread.start() |                 thread.start() | ||||||
|                 thread.join() |                 thread.join() | ||||||
|                 sleep(0.5) |  | ||||||
|                 return jsonify({'status': 200}) |                 return jsonify({'status': 200}) | ||||||
|             elif model_type == "anthropic": |             elif model_type == "anthropic": | ||||||
|                 api_key = data.get('api_key') |                 api_key = data.get('api_key') | ||||||
|  | @ -74,7 +69,6 @@ class API: | ||||||
|                                           args=(ai_model, messages, self, access_token, api_key)) |                                           args=(ai_model, messages, self, access_token, api_key)) | ||||||
|                 thread.start() |                 thread.start() | ||||||
|                 thread.join() |                 thread.join() | ||||||
|                 sleep(0.5) |  | ||||||
|                 return jsonify({'status': 200}) |                 return jsonify({'status': 200}) | ||||||
|             elif model_type == "google": |             elif model_type == "google": | ||||||
|                 api_key = data.get('api_key') |                 api_key = data.get('api_key') | ||||||
|  | @ -82,7 +76,6 @@ class API: | ||||||
|                                           args=(ai_model, messages, self, access_token, api_key)) |                                           args=(ai_model, messages, self, access_token, api_key)) | ||||||
|                 thread.start() |                 thread.start() | ||||||
|                 thread.join() |                 thread.join() | ||||||
|                 sleep(0.5) |  | ||||||
|                 return jsonify({'status': 200}) |                 return jsonify({'status': 200}) | ||||||
| 
 | 
 | ||||||
|             return jsonify({'status': 401, 'error': 'Invalid AI model type'}) |             return jsonify({'status': 401, 'error': 'Invalid AI model type'}) | ||||||
|  |  | ||||||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue