forked from React-Group/interstellar_ai
		
	main #52
					 5 changed files with 79 additions and 185 deletions
				
			
		| 
						 | 
				
			
			@ -1,8 +1,21 @@
 | 
			
		|||
"use client";
 | 
			
		||||
import React, { useState, useEffect } from 'react';
 | 
			
		||||
 | 
			
		||||
// Define all models that should be available.
 | 
			
		||||
const modelList = {
 | 
			
		||||
// Define the types for ModelType and ModelListType
 | 
			
		||||
type ModelType = {
 | 
			
		||||
  model_type: string;
 | 
			
		||||
  Math: string;
 | 
			
		||||
  Code: string;
 | 
			
		||||
  Language: string;
 | 
			
		||||
  Weather: string;
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
type ModelListType = {
 | 
			
		||||
  [key: string]: ModelType;
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
// Define the AI models list
 | 
			
		||||
const modelList: ModelListType = {
 | 
			
		||||
  'Offline Fast': {
 | 
			
		||||
    model_type: 'local',
 | 
			
		||||
    Math: 'qwen2-math:1.5b',
 | 
			
		||||
| 
						 | 
				
			
			@ -86,161 +99,47 @@ const modelList = {
 | 
			
		|||
    Code: 'open-codestral-mamba',
 | 
			
		||||
    Language: 'open-mistral-nemo',
 | 
			
		||||
    Weather: 'open-mistral-nemo',
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
// Define the available selectedAIFunction options
 | 
			
		||||
const modelDropdown = {
 | 
			
		||||
  offlineNonFoss: ['Offline Fast', 'Offline Slow'],
 | 
			
		||||
  offlineFoss: ['Offline Fast (FOSS)', 'Offline Slow (FOSS)'],
 | 
			
		||||
  onlineNonFoss: [
 | 
			
		||||
    'Online Cheap (OpenAI)',
 | 
			
		||||
    'Online Expensive (OpenAI)',
 | 
			
		||||
    'Online Cheap (Anthropic)',
 | 
			
		||||
    'Online Expensive (Anthropic)',
 | 
			
		||||
    'Online Cheap (Google)',
 | 
			
		||||
    'Online Expensive (Google)',
 | 
			
		||||
    'Online (La Plateforme)'
 | 
			
		||||
  ],
 | 
			
		||||
  onlineFoss: ['Online (FOSS) (La Plateforme)'],
 | 
			
		||||
  },
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
const selectedAIFunction = [
 | 
			
		||||
  'Code',
 | 
			
		||||
  'Math',
 | 
			
		||||
  'Language',
 | 
			
		||||
  'Weather'
 | 
			
		||||
]
 | 
			
		||||
// AI Functions list
 | 
			
		||||
const aiFunctions = ['Math', 'Code', 'Language', 'Weather'] as const;
 | 
			
		||||
type AIFunction = typeof aiFunctions[number]; // Restrict to these exact function names
 | 
			
		||||
 | 
			
		||||
const ModelSection: React.FC = () => {
 | 
			
		||||
  // Initialize state with value from localStorage or default to ''
 | 
			
		||||
  const [selectedModelDropdown, setSelectedModelDropdown] = useState('');
 | 
			
		||||
  const [radioSelection, setRadioSelection] = useState<string | null>("")
 | 
			
		||||
  const [activeSelectedAIFunction, setActiveSelectedAIFunction] = useState('');
 | 
			
		||||
  const [currentSelectedAIFunction, setCurrentSelectedAIFunction] = useState<string | null>("");
 | 
			
		||||
  const [isOpenSourceMode, setIsOpenSourceMode] = useState<string|null>("false")
 | 
			
		||||
  const [selectedModelDropdown, setSelectedModelDropdown] = useState(() => "Offline Fast");
 | 
			
		||||
  const [activeSelectedAIFunction, setActiveSelectedAIFunction] = useState(() => "Code");
 | 
			
		||||
  const [, setSelectedModel] = useState<string>("");
 | 
			
		||||
  const [, setSelectedModelType] = useState<string>("");
 | 
			
		||||
 | 
			
		||||
  useEffect(() => {
 | 
			
		||||
    if (typeof localStorage !== 'undefined') {
 | 
			
		||||
      const defaultValues = {
 | 
			
		||||
        selectedModelDropdown: 'Offline Fast',
 | 
			
		||||
        activeSelectedAIFunction: 'Code',
 | 
			
		||||
        model: 'starcoder2',
 | 
			
		||||
        radioSelection: 'None',
 | 
			
		||||
        type: 'local',
 | 
			
		||||
      };
 | 
			
		||||
  
 | 
			
		||||
      Object.entries(defaultValues).forEach(([key, value]) => {
 | 
			
		||||
        if (!localStorage.getItem(key)) {
 | 
			
		||||
          localStorage.setItem(key, value);
 | 
			
		||||
        }
 | 
			
		||||
      });
 | 
			
		||||
  
 | 
			
		||||
      setIsOpenSourceMode(localStorage.getItem("openSourceMode"));
 | 
			
		||||
      setActiveSelectedAIFunction(localStorage.getItem("activeSelectedAIFunction") || '');
 | 
			
		||||
      setRadioSelection(localStorage.getItem("radioSelection") || '');
 | 
			
		||||
      setSelectedModelDropdown(localStorage.getItem('selectedModelDropdown') || '');
 | 
			
		||||
  
 | 
			
		||||
      const handleStorageChange = () => {
 | 
			
		||||
        setSelectedModelDropdown(localStorage.getItem('selectedModelDropdown') || '');
 | 
			
		||||
      };
 | 
			
		||||
  
 | 
			
		||||
      // Update immediately when localStorage changes
 | 
			
		||||
      window.addEventListener('storage', handleStorageChange);
 | 
			
		||||
      
 | 
			
		||||
      // Cleanup listener on component unmount
 | 
			
		||||
      return () => {
 | 
			
		||||
        window.removeEventListener('storage', handleStorageChange);
 | 
			
		||||
      };
 | 
			
		||||
    }
 | 
			
		||||
    setSelectedModelDropdown(localStorage.getItem("selectedModelDropdown")|| "Offline Fast");
 | 
			
		||||
    setActiveSelectedAIFunction(localStorage.getItem("activeSelectedAIFunction") || "Code");
 | 
			
		||||
  }, []);
 | 
			
		||||
  
 | 
			
		||||
 | 
			
		||||
  // Update the model and type when the dropdown or function changes
 | 
			
		||||
  useEffect(() => {
 | 
			
		||||
    if (typeof localStorage !== 'undefined') {
 | 
			
		||||
      const storedActiveSelectedAIFunction = localStorage.getItem("activeSelectedAIFunction") || "";
 | 
			
		||||
      if (storedActiveSelectedAIFunction !== currentSelectedAIFunction) {
 | 
			
		||||
        setCurrentSelectedAIFunction(storedActiveSelectedAIFunction);
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
  }, [activeSelectedAIFunction]);
 | 
			
		||||
    const aiFunctionsActiveSelectedAIFunction = activeSelectedAIFunction as AIFunction
 | 
			
		||||
    const newSelectedModel = modelList[selectedModelDropdown]?.[aiFunctionsActiveSelectedAIFunction] || "";
 | 
			
		||||
    const newModelType = modelList[selectedModelDropdown]?.model_type || "";
 | 
			
		||||
 | 
			
		||||
    setSelectedModel(newSelectedModel);
 | 
			
		||||
    setSelectedModelType(newModelType);
 | 
			
		||||
 | 
			
		||||
    localStorage.setItem("model", newSelectedModel);
 | 
			
		||||
    localStorage.setItem("type", newModelType);
 | 
			
		||||
  }, [selectedModelDropdown, activeSelectedAIFunction]);
 | 
			
		||||
 | 
			
		||||
  const handleModelChange = (event: React.ChangeEvent<HTMLSelectElement>) => {
 | 
			
		||||
    const newModel = event.target.value;
 | 
			
		||||
    setSelectedModelDropdown(newModel);
 | 
			
		||||
    if (typeof localStorage !== 'undefined') {
 | 
			
		||||
      localStorage.setItem('selectedModelDropdown', newModel); // Update localStorage directly
 | 
			
		||||
      const model = localStorage.getItem('activeSelectedAIFunction') || "Code"
 | 
			
		||||
      modelClicked(model)
 | 
			
		||||
    }
 | 
			
		||||
    localStorage.setItem('selectedModelDropdown', newModel);
 | 
			
		||||
  };
 | 
			
		||||
 | 
			
		||||
  // Determine the filtered models based on current radioSelection
 | 
			
		||||
  const filteredModels = (() => {
 | 
			
		||||
    let models = [];
 | 
			
		||||
    switch (radioSelection) {
 | 
			
		||||
      case 'Offline':
 | 
			
		||||
        models = [
 | 
			
		||||
          ...modelDropdown.onlineNonFoss,
 | 
			
		||||
          ...modelDropdown.onlineFoss,
 | 
			
		||||
        ];
 | 
			
		||||
        if (isOpenSourceMode == "true") {
 | 
			
		||||
          models = [
 | 
			
		||||
            ...modelDropdown.onlineFoss,
 | 
			
		||||
          ];
 | 
			
		||||
        } // Show only offline models without FOSS
 | 
			
		||||
        break;
 | 
			
		||||
      case 'Online':
 | 
			
		||||
        models = [
 | 
			
		||||
          ...modelDropdown.offlineNonFoss,
 | 
			
		||||
          ...modelDropdown.offlineFoss,
 | 
			
		||||
        ];
 | 
			
		||||
        if (isOpenSourceMode == "true") {
 | 
			
		||||
          models = [
 | 
			
		||||
            ...modelDropdown.offlineFoss,
 | 
			
		||||
          ];
 | 
			
		||||
        } // Show only online models without FOSS
 | 
			
		||||
        break;
 | 
			
		||||
      case 'None':
 | 
			
		||||
        models = [
 | 
			
		||||
          ...modelDropdown.offlineNonFoss,
 | 
			
		||||
          ...modelDropdown.offlineFoss,
 | 
			
		||||
          ...modelDropdown.onlineNonFoss,
 | 
			
		||||
          ...modelDropdown.onlineFoss,
 | 
			
		||||
        ];
 | 
			
		||||
        if (isOpenSourceMode == "true") {
 | 
			
		||||
          models = [
 | 
			
		||||
            ...modelDropdown.offlineFoss,
 | 
			
		||||
            ...modelDropdown.onlineFoss,
 | 
			
		||||
          ];
 | 
			
		||||
        } // Show all models if nothing matches
 | 
			
		||||
        break;
 | 
			
		||||
      default:
 | 
			
		||||
        models = [
 | 
			
		||||
          ...modelDropdown.offlineNonFoss,
 | 
			
		||||
          ...modelDropdown.offlineFoss,
 | 
			
		||||
          ...modelDropdown.onlineNonFoss,
 | 
			
		||||
          ...modelDropdown.onlineFoss,
 | 
			
		||||
        ]; // Show all models if nothing matches
 | 
			
		||||
        break;
 | 
			
		||||
    }
 | 
			
		||||
    return Array.from(new Set(models)); // Remove duplicates using Set
 | 
			
		||||
  })();
 | 
			
		||||
 | 
			
		||||
  const isOfflineModel = (model: string) =>
 | 
			
		||||
    modelDropdown.offlineNonFoss.includes(model) || modelDropdown.offlineFoss.includes(model);
 | 
			
		||||
 | 
			
		||||
  const modelClicked = (model: string) => {
 | 
			
		||||
    if (typeof localStorage !== 'undefined') {
 | 
			
		||||
      localStorage.setItem('activeSelectedAIFunction', model)
 | 
			
		||||
      setActiveSelectedAIFunction(model)
 | 
			
		||||
      const modelDropdown = localStorage.getItem('selectedModelDropdown') || 'Offline Fast'
 | 
			
		||||
      const selectedAIFunction = modelDropdown as keyof typeof modelList;
 | 
			
		||||
      localStorage.setItem("model", modelList[selectedAIFunction][model as keyof typeof modelList[typeof selectedAIFunction]])
 | 
			
		||||
      localStorage.setItem("type", modelList[selectedAIFunction]['model_type' as keyof typeof modelList[typeof selectedAIFunction]])
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
  const modelClicked = (functionName: AIFunction) => {
 | 
			
		||||
    setActiveSelectedAIFunction(functionName);
 | 
			
		||||
    localStorage.setItem('activeSelectedAIFunction', functionName);
 | 
			
		||||
  };
 | 
			
		||||
 | 
			
		||||
  return (
 | 
			
		||||
    <div className="model-background">
 | 
			
		||||
| 
						 | 
				
			
			@ -249,33 +148,29 @@ const ModelSection: React.FC = () => {
 | 
			
		|||
          <h3>Different AI Models</h3>
 | 
			
		||||
        </div>
 | 
			
		||||
 | 
			
		||||
        {/* Model Selection Dropdown */}
 | 
			
		||||
        <div className="model-dropdown">
 | 
			
		||||
          <label htmlFor="model-select">Select AI Model:</label>
 | 
			
		||||
          <select id="model-select" value={selectedModelDropdown} onChange={handleModelChange}>
 | 
			
		||||
            {filteredModels.map((model) => (
 | 
			
		||||
            {Object.keys(modelList).map((model) => (
 | 
			
		||||
              <option key={model} value={model}>
 | 
			
		||||
                {model}
 | 
			
		||||
              </option>
 | 
			
		||||
            ))}
 | 
			
		||||
          </select>
 | 
			
		||||
        </div>
 | 
			
		||||
        {/* Model Grid with Cards */}
 | 
			
		||||
 | 
			
		||||
        <div className="grid">
 | 
			
		||||
          {selectedAIFunction.map(
 | 
			
		||||
            (displayedCategory) => (
 | 
			
		||||
              <button
 | 
			
		||||
                key={displayedCategory}
 | 
			
		||||
                className={`${displayedCategory.toLowerCase()}-model model-box ${currentSelectedAIFunction === displayedCategory ? 'selected' : ''}`}
 | 
			
		||||
                onClick={() => modelClicked(displayedCategory)}
 | 
			
		||||
              >
 | 
			
		||||
                <div className="overlay">
 | 
			
		||||
                  <h3>{displayedCategory}</h3>
 | 
			
		||||
                  {isOfflineModel(selectedModelDropdown) && <img src="/img/nowifi.svg" alt="No Wi-Fi" />}
 | 
			
		||||
                </div>
 | 
			
		||||
              </button>
 | 
			
		||||
            )
 | 
			
		||||
          )}
 | 
			
		||||
          {aiFunctions.map((functionName) => (
 | 
			
		||||
            <button
 | 
			
		||||
              key={functionName}
 | 
			
		||||
              className={`${functionName.toLowerCase()}-model model-box ${activeSelectedAIFunction === functionName ? 'selected' : ''}`}
 | 
			
		||||
              onClick={() => modelClicked(functionName)}
 | 
			
		||||
            >
 | 
			
		||||
              <div className="overlay">
 | 
			
		||||
                <h3>{functionName}</h3>
 | 
			
		||||
              </div>
 | 
			
		||||
            </button>
 | 
			
		||||
          ))}
 | 
			
		||||
        </div>
 | 
			
		||||
      </div>
 | 
			
		||||
    </div>
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -12,9 +12,11 @@ import PrivacySettings from './PrivacySettings';
 | 
			
		|||
import FontSizeSetting from './FontSize';
 | 
			
		||||
import OpenSourceModeToggle from './OpenSourceToggle';
 | 
			
		||||
import {
 | 
			
		||||
  changeHistory,
 | 
			
		||||
  changeSettings,
 | 
			
		||||
  createAccount,
 | 
			
		||||
  deleteAccount,
 | 
			
		||||
  getHistory,
 | 
			
		||||
} from '../../backend/database';
 | 
			
		||||
import ThemeDropdown from './DropDownTheme';
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -372,7 +374,11 @@ const Settings: React.FC<{ closeSettings: () => void; accountName: string }> = (
 | 
			
		|||
          localStorage.setItem("currentEmail", useEmail)
 | 
			
		||||
          alert('Account successfully changed!')
 | 
			
		||||
          window.location.reload()
 | 
			
		||||
        } else {
 | 
			
		||||
          alert("failed to send settings")
 | 
			
		||||
        }
 | 
			
		||||
      } else {
 | 
			
		||||
        alert("failed to create account")
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
  };
 | 
			
		||||
| 
						 | 
				
			
			@ -409,7 +415,7 @@ const Settings: React.FC<{ closeSettings: () => void; accountName: string }> = (
 | 
			
		|||
              checked={myBoolean}
 | 
			
		||||
              setChecked={setMyBoolean}
 | 
			
		||||
            />
 | 
			
		||||
             <TextSettings
 | 
			
		||||
            <TextSettings
 | 
			
		||||
              label="Nearest City"
 | 
			
		||||
              value={weatherInfo}
 | 
			
		||||
              type='text'
 | 
			
		||||
| 
						 | 
				
			
			@ -624,6 +630,7 @@ const Settings: React.FC<{ closeSettings: () => void; accountName: string }> = (
 | 
			
		|||
              onClick={handleLogout} // Function to call on click
 | 
			
		||||
              className="update-credentials-button" // Custom styling class
 | 
			
		||||
            />
 | 
			
		||||
            <p>WARNING: Will delete your chat history.</p>
 | 
			
		||||
            <ButtonSetting
 | 
			
		||||
              label="Update Credentials" // Button label
 | 
			
		||||
              onClick={handleUpdateCredentials} // Function to call on click
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -49,8 +49,8 @@
 | 
			
		|||
    border-radius: 5%;
 | 
			
		||||
    overflow: hidden;
 | 
			
		||||
    position: relative;
 | 
			
		||||
    height: 18vh;
 | 
			
		||||
    width: 18vh;
 | 
			
		||||
    height: 8vw;
 | 
			
		||||
    width: 8vw;
 | 
			
		||||
    margin: auto; /* Center each model box in the grid cell */
 | 
			
		||||
}
 | 
			
		||||
.model-box.selected {
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -57,8 +57,8 @@
 | 
			
		|||
  }
 | 
			
		||||
  /* Model box styles */
 | 
			
		||||
  .model-box {
 | 
			
		||||
    max-width: none; /* Remove max-width */
 | 
			
		||||
    margin: 0 auto; /* Center each model-box */
 | 
			
		||||
    width: 50vw;
 | 
			
		||||
    height: 50vw;
 | 
			
		||||
  }
 | 
			
		||||
  /* Input styles */
 | 
			
		||||
  .input {
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
							
								
								
									
										34
									
								
								py/ai.py
									
										
									
									
									
								
							
							
						
						
									
										34
									
								
								py/ai.py
									
										
									
									
									
								
							| 
						 | 
				
			
			@ -20,34 +20,29 @@ class AI:
 | 
			
		|||
 | 
			
		||||
        for chunk in stream:
 | 
			
		||||
            with return_class.ai_response_lock:
 | 
			
		||||
                return_class.ai_response[access_token] += chunk['message']['content']
 | 
			
		||||
                return_class.ai_response[access_token] += chunk["message"]["content"]
 | 
			
		||||
 | 
			
		||||
    @staticmethod
 | 
			
		||||
    def process_mistralai(model, messages, return_class, access_token, api_key):
 | 
			
		||||
 | 
			
		||||
        client = Mistral(api_key=api_key)
 | 
			
		||||
 | 
			
		||||
        stream_response = client.chat.stream(
 | 
			
		||||
            model=model,
 | 
			
		||||
            messages=messages
 | 
			
		||||
        )
 | 
			
		||||
        stream_response = client.chat.stream(model=model, messages=messages)
 | 
			
		||||
 | 
			
		||||
        with return_class.ai_response_lock:
 | 
			
		||||
            return_class.ai_response[access_token] = ""
 | 
			
		||||
 | 
			
		||||
        for chunk in stream_response:
 | 
			
		||||
            with return_class.ai_response_lock:
 | 
			
		||||
                return_class.ai_response[access_token] += chunk.data.choices[0].delta.content
 | 
			
		||||
                return_class.ai_response[access_token] += chunk.data.choices[
 | 
			
		||||
                    0
 | 
			
		||||
                ].delta.content
 | 
			
		||||
 | 
			
		||||
    @staticmethod
 | 
			
		||||
    def process_openai(model, messages, return_class, access_token, api_key):
 | 
			
		||||
 | 
			
		||||
        client = OpenAI(api_key=api_key)
 | 
			
		||||
 | 
			
		||||
        stream_response = client.chat.completions.create(
 | 
			
		||||
            model=model,
 | 
			
		||||
            messages=messages,
 | 
			
		||||
            stream=True
 | 
			
		||||
            model=model, messages=messages, stream=True
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        with return_class.ai_response_lock:
 | 
			
		||||
| 
						 | 
				
			
			@ -59,16 +54,15 @@ class AI:
 | 
			
		|||
 | 
			
		||||
    @staticmethod
 | 
			
		||||
    def process_anthropic(model, messages, return_class, access_token, api_key):
 | 
			
		||||
 | 
			
		||||
        client = anthropic.Anthropic(api_key=api_key)
 | 
			
		||||
 | 
			
		||||
        with return_class.ai_response_lock:
 | 
			
		||||
            return_class.ai_response[access_token] = ""
 | 
			
		||||
 | 
			
		||||
        with client.messages.stream(
 | 
			
		||||
                max_tokens=1024,
 | 
			
		||||
                model=model,
 | 
			
		||||
                messages=messages,
 | 
			
		||||
            max_tokens=1024,
 | 
			
		||||
            model=model,
 | 
			
		||||
            messages=messages,
 | 
			
		||||
        ) as stream:
 | 
			
		||||
            for text in stream.text_stream:
 | 
			
		||||
                with return_class.ai_response_lock:
 | 
			
		||||
| 
						 | 
				
			
			@ -76,16 +70,15 @@ class AI:
 | 
			
		|||
 | 
			
		||||
    @staticmethod
 | 
			
		||||
    def process_google(model, messages, return_class, access_token, api_key):
 | 
			
		||||
 | 
			
		||||
        message = messages[-1]['content']
 | 
			
		||||
        message = messages[-1]["content"]
 | 
			
		||||
        messages.pop()
 | 
			
		||||
 | 
			
		||||
        for msg in messages:
 | 
			
		||||
            msg['parts'] = msg.pop('content')
 | 
			
		||||
            msg["parts"] = msg.pop()["content"]
 | 
			
		||||
 | 
			
		||||
        for msg in messages:
 | 
			
		||||
            if msg['role'] == 'assistant':
 | 
			
		||||
                msg['role'] = 'model'
 | 
			
		||||
            if msg["role"] == "assistant":
 | 
			
		||||
                msg["role"] = "model"
 | 
			
		||||
 | 
			
		||||
        genai.configure(api_key=api_key)
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -93,7 +86,6 @@ class AI:
 | 
			
		|||
 | 
			
		||||
        chat = model.start_chat(
 | 
			
		||||
            history=messages,
 | 
			
		||||
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        response = chat.send_message(message, stream=True)
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue