diff --git a/project/README.md b/project/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..721f3dbeebd2e7849ce3afb55c84e1af416c8e12
--- /dev/null
+++ b/project/README.md
@@ -0,0 +1,88 @@
+# Codette AI Interface
+
+A sophisticated AI assistant interface featuring multi-perspective reasoning, quantum-inspired processing, and cognitive cocoon artifact management.
+
+## Features
+
+### š§ Multi-Perspective Reasoning
+- Newton's logical analysis
+- Da Vinci's creative synthesis
+- Quantum computing perspectives
+- Philosophical inquiry
+- Neural network processing
+- Resilient kindness framework
+
+### š Quantum-Inspired Processing
+- Quantum state visualization
+- Chaos theory integration
+- Parallel thought processing
+- Entanglement-based correlations
+
+### š¦ Cognitive Cocoon System
+- Thought pattern preservation
+- Encrypted storage
+- Pattern analysis
+- Memory management
+
+### šØ Advanced UI Features
+- Dark/Light mode
+- Real-time quantum state visualization
+- Interactive chat interface
+- Admin dashboard
+- File management system
+
+### š Security & Privacy
+- Supabase authentication
+- Row-level security
+- Encrypted storage
+- Admin role management
+
+## Tech Stack
+
+- React + TypeScript
+- Tailwind CSS
+- Supabase
+- Framer Motion
+- Lucide Icons
+
+## Getting Started
+
+1. Clone the repository
+2. Copy `.env.example` to `.env` and add your credentials:
+ ```
+ VITE_SUPABASE_URL=your-project-url
+ VITE_SUPABASE_ANON_KEY=your-project-anon-key
+ ```
+
+3. Install dependencies:
+ ```bash
+ npm install
+ ```
+
+4. Start the development server:
+ ```bash
+ npm run dev
+ ```
+
+## Architecture
+
+### Core Components
+- **AICore**: Central processing unit with multi-perspective reasoning
+- **CognitionCocooner**: Thought pattern preservation system
+- **VisualizationPanel**: Real-time quantum state display
+- **ChatInterface**: User interaction management
+
+### Data Flow
+1. User input ā Chat Interface
+2. AICore processes with multiple perspectives
+3. Results stored in Cognitive Cocoons
+4. Real-time visualization updates
+5. Response rendered to user
+
+## Contributing
+
+We welcome contributions! Please read our contributing guidelines before submitting pull requests.
+
+## License
+
+MIT License - See LICENSE file for details
\ No newline at end of file
diff --git a/project/codette.py b/project/codette.py
new file mode 100644
index 0000000000000000000000000000000000000000..e0be59d1a87030aa200a7b431b79aacedcf8df59
--- /dev/null
+++ b/project/codette.py
@@ -0,0 +1,114 @@
+import logging
+from typing import List
+
+class Element:
+ def __init__(self, name, symbol, representation, properties, interactions, defense_ability):
+ self.name = name
+ self.symbol = symbol
+ self.representation = representation
+ self.properties = properties
+ self.interactions = interactions
+ self.defense_ability = defense_ability
+
+ def execute_defense_function(self):
+ message = f"{self.name} ({self.symbol}) executes its defense ability: {self.defense_ability}"
+ logging.info(message)
+ return message
+
+class CustomRecognizer:
+ def recognize(self, question):
+ if any(element_name.lower() in question.lower() for element_name in ["hydrogen", "diamond"]):
+ return RecognizerResult(question)
+ return RecognizerResult(None)
+
+ def get_top_intent(self, recognizer_result):
+ return "ElementDefense" if recognizer_result.text else "None"
+
+class RecognizerResult:
+ def __init__(self, text):
+ self.text = text
+
+class UniversalReasoning:
+ def __init__(self, config):
+ self.config = config
+ self.perspectives = self.initialize_perspectives()
+ self.elements = self.initialize_elements()
+ self.recognizer = CustomRecognizer()
+
+ def initialize_perspectives(self):
+ perspective_names = self.config.get('enabled_perspectives', [
+ "newton", "davinci", "human_intuition", "neural_network", "quantum_computing",
+ "resilient_kindness", "mathematical", "philosophical", "copilot", "bias_mitigation"
+ ])
+ perspective_classes = {
+ "newton": NewtonPerspective,
+ "davinci": DaVinciPerspective,
+ "human_intuition": HumanIntuitionPerspective,
+ "neural_network": NeuralNetworkPerspective,
+ "quantum_computing": QuantumComputingPerspective,
+ "resilient_kindness": ResilientKindnessPerspective,
+ "mathematical": MathematicalPerspective,
+ "philosophical": PhilosophicalPerspective,
+ "copilot": CopilotPerspective,
+ "bias_mitigation": BiasMitigationPerspective
+ }
+ perspectives = []
+ for name in perspective_names:
+ cls = perspective_classes.get(name.lower())
+ if cls:
+ perspectives.append(cls(self.config))
+ logging.debug(f"Perspective '{name}' initialized.")
+ return perspectives
+
+ def initialize_elements(self):
+ return [
+ Element("Hydrogen", "H", "Lua", ["Simple", "Lightweight", "Versatile"],
+ ["Integrates with other languages"], "Evasion"),
+ Element("Diamond", "D", "Kotlin", ["Modern", "Concise", "Safe"],
+ ["Used for Android development"], "Adaptability")
+ ]
+
+ async def generate_response(self, question):
+ responses = []
+ tasks = []
+
+ for perspective in self.perspectives:
+ if asyncio.iscoroutinefunction(perspective.generate_response):
+ tasks.append(perspective.generate_response(question))
+ else:
+ async def sync_wrapper(perspective, question):
+ return perspective.generate_response(question)
+ tasks.append(sync_wrapper(perspective, question))
+
+ perspective_results = await asyncio.gather(*tasks, return_exceptions=True)
+
+ for perspective, result in zip(self.perspectives, perspective_results):
+ if isinstance(result, Exception):
+ logging.error(f"Error from {perspective.__class__.__name__}: {result}")
+ else:
+ responses.append(result)
+
+ recognizer_result = self.recognizer.recognize(question)
+ top_intent = self.recognizer.get_top_intent(recognizer_result)
+ if top_intent == "ElementDefense":
+ element_name = recognizer_result.text.strip()
+ element = next((el for el in self.elements if el.name.lower() in element_name.lower()), None)
+ if element:
+ responses.append(element.execute_defense_function())
+
+ ethical = self.config.get("ethical_considerations", "Act transparently and respectfully.")
+ responses.append(f"**Ethical Considerations:**\n{ethical}")
+
+ return "\n\n".join(responses)
+
+ def save_response(self, response):
+ if self.config.get('enable_response_saving', False):
+ path = self.config.get('response_save_path', 'responses.txt')
+ with open(path, 'a', encoding='utf-8') as file:
+ file.write(response + '\n')
+
+ def backup_response(self, response):
+ if self.config.get('backup_responses', {}).get('enabled', False):
+ backup_path = self.config['backup_responses'].get('backup_path', 'backup_responses.txt')
+ with open(backup_path, 'a', encoding='utf-8') as file:
+ file.write(response + '\n')
\ No newline at end of file
diff --git a/project/cognitive_processor.py b/project/cognitive_processor.py
new file mode 100644
index 0000000000000000000000000000000000000000..96eaa62314e5afe64f297bae199c0ccd19b025d7
--- /dev/null
+++ b/project/cognitive_processor.py
@@ -0,0 +1,17 @@
+
+# cognitive_processor.py
+from typing import List
+
+class CognitiveProcessor:
+ """Multi-perspective analysis engine"""
+ MODES = {
+ "scientific": lambda q: f"Scientific Analysis: {q} demonstrates fundamental principles",
+ "creative": lambda q: f"Creative Insight: {q} suggests innovative approaches",
+ "emotional": lambda q: f"Emotional Interpretation: {q} conveys hopeful intent"
+ }
+
+ def __init__(self, modes: List[str]):
+ self.active_modes = [self.MODES[m] for m in modes if m in self.MODES]
+
+ def generate_insights(self, query: str) -> List[str]:
+ return [mode(query) for mode in self.active_modes]
diff --git a/project/config_manager.py b/project/config_manager.py
new file mode 100644
index 0000000000000000000000000000000000000000..f377d59bb9da42058b05854e2af4a5dc23afdede
--- /dev/null
+++ b/project/config_manager.py
@@ -0,0 +1,41 @@
+# config_manager.py
+import json
+from typing import Dict
+
+class EnhancedAIConfig:
+ """Advanced configuration manager with encryption and validation"""
+ _DEFAULTS = {
+ "model": "gpt-4-turbo",
+ "safety_thresholds": {
+ "memory": 85,
+ "cpu": 90,
+ "response_time": 2.0
+ },
+ "defense_strategies": ["evasion", "adaptability", "barrier"],
+ "cognitive_modes": ["scientific", "creative", "emotional"]
+ }
+
+ def __init__(self, config_path: str = "ai_config.json"):
+ self.config = self._load_config(config_path)
+ self._validate()
+
+ def _load_config(self, path: str) -> Dict:
+ try:
+ with open(path, 'r') as f:
+ return self._merge_configs(json.load(f))
+ except (FileNotFoundError, json.JSONDecodeError) as e:
+ print(f"Error loading config file: {e}. Using default configuration.")
+ return self._DEFAULTS
+
+ def _merge_configs(self, user_config: Dict) -> Dict:
+ merged = self._DEFAULTS.copy()
+ for key, value in user_config.items():
+ if isinstance(value, dict) and key in merged:
+ merged[key].update(value)
+ else:
+ merged[key] = value
+ return merged
+
+ def _validate(self):
+ if not all(isinstance(mode, str) for mode in self.config["cognitive_modes"]):
+ raise ValueError("Invalid cognitive mode configuration")
diff --git a/project/dream_reweaver 2.py b/project/dream_reweaver 2.py
new file mode 100644
index 0000000000000000000000000000000000000000..eb3a047f5f09e4ef96a8de0d9680ccdd31f3581d
--- /dev/null
+++ b/project/dream_reweaver 2.py
@@ -0,0 +1,53 @@
+
+import os
+import json
+import random
+from typing import List, Dict
+from cognition_cocooner import CognitionCocooner
+
+class DreamReweaver:
+ """
+ Reweaves cocooned thoughts into dream-like synthetic narratives or planning prompts.
+ """
+ def __init__(self, cocoon_dir: str = "cocoons"):
+ self.cocooner = CognitionCocooner(storage_path=cocoon_dir)
+ self.dream_log = []
+
+ def generate_dream_sequence(self, limit: int = 5) -> List[str]:
+ dream_sequence = []
+ cocoons = self._load_cocoons()
+ selected = random.sample(cocoons, min(limit, len(cocoons)))
+
+ for cocoon in selected:
+ wrapped = cocoon.get("wrapped")
+ sequence = self._interpret_cocoon(wrapped, cocoon.get("type"))
+ self.dream_log.append(sequence)
+ dream_sequence.append(sequence)
+
+ return dream_sequence
+
+ def _interpret_cocoon(self, wrapped: str, type_: str) -> str:
+ if type_ == "prompt":
+ return f"[DreamPrompt] {wrapped}"
+ elif type_ == "function":
+ return f"[DreamFunction] {wrapped}"
+ elif type_ == "symbolic":
+ return f"[DreamSymbol] {wrapped}"
+ elif type_ == "encrypted":
+ return "[Encrypted Thought Cocoon - Decryption Required]"
+ else:
+ return "[Unknown Dream Form]"
+
+ def _load_cocoons(self) -> List[Dict]:
+ cocoons = []
+ for file in os.listdir(self.cocooner.storage_path):
+ if file.endswith(".json"):
+ path = os.path.join(self.cocooner.storage_path, file)
+ with open(path, "r") as f:
+ cocoons.append(json.load(f))
+ return cocoons
+
+if __name__ == "__main__":
+ dr = DreamReweaver()
+ dreams = dr.generate_dream_sequence()
+ print("\n".join(dreams))
diff --git a/project/eslint.config.js b/project/eslint.config.js
new file mode 100644
index 0000000000000000000000000000000000000000..82c2e20ccc2bae01b6589f5f391cb20f34db41fd
--- /dev/null
+++ b/project/eslint.config.js
@@ -0,0 +1,28 @@
+import js from '@eslint/js';
+import globals from 'globals';
+import reactHooks from 'eslint-plugin-react-hooks';
+import reactRefresh from 'eslint-plugin-react-refresh';
+import tseslint from 'typescript-eslint';
+
+export default tseslint.config(
+ { ignores: ['dist'] },
+ {
+ extends: [js.configs.recommended, ...tseslint.configs.recommended],
+ files: ['**/*.{ts,tsx}'],
+ languageOptions: {
+ ecmaVersion: 2020,
+ globals: globals.browser,
+ },
+ plugins: {
+ 'react-hooks': reactHooks,
+ 'react-refresh': reactRefresh,
+ },
+ rules: {
+ ...reactHooks.configs.recommended.rules,
+ 'react-refresh/only-export-components': [
+ 'warn',
+ { allowConstantExport: true },
+ ],
+ },
+ }
+);
diff --git a/project/index.html b/project/index.html
new file mode 100644
index 0000000000000000000000000000000000000000..a9c60270e63c0005e908cd309a3567727fd0977e
--- /dev/null
+++ b/project/index.html
@@ -0,0 +1,13 @@
+
+
+
+
+
+
+ Codette AI Interface
+
+
+
+
+
+
diff --git a/project/package-lock.json b/project/package-lock.json
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/project/package.json b/project/package.json
new file mode 100644
index 0000000000000000000000000000000000000000..ac180a8761d065b4b670fe84544d2ac6530e75fa
--- /dev/null
+++ b/project/package.json
@@ -0,0 +1,35 @@
+{
+ "name": "project",
+ "private": true,
+ "version": "0.0.0",
+ "type": "module",
+ "scripts": {
+ "dev": "node ./node_modules/vite/bin/vite.js",
+ "build": "tsc && node ./node_modules/vite/bin/vite.js build",
+ "lint": "eslint . --ext ts,tsx --report-unused-disable-directives --max-warnings 0",
+ "preview": "node ./node_modules/vite/bin/vite.js preview"
+ },
+ "dependencies": {
+ "@supabase/supabase-js": "^2.39.3",
+ "framer-motion": "^11.0.3",
+ "lucide-react": "^0.309.0",
+ "react": "^18.2.0",
+ "react-dom": "^18.2.0",
+ "uuid": "^9.0.1"
+ },
+ "devDependencies": {
+ "@types/react": "^18.2.43",
+ "@types/react-dom": "^18.2.17",
+ "@typescript-eslint/eslint-plugin": "^6.14.0",
+ "@typescript-eslint/parser": "^6.14.0",
+ "@vitejs/plugin-react": "^4.2.1",
+ "autoprefixer": "^10.4.17",
+ "eslint": "^8.55.0",
+ "eslint-plugin-react-hooks": "^4.6.0",
+ "eslint-plugin-react-refresh": "^0.4.5",
+ "postcss": "^8.4.33",
+ "tailwindcss": "^3.4.1",
+ "typescript": "^5.2.2",
+ "vite": "^5.0.8"
+ }
+}
\ No newline at end of file
diff --git a/project/postcss.config.js b/project/postcss.config.js
new file mode 100644
index 0000000000000000000000000000000000000000..2aa7205d4b402a1bdfbe07110c61df920b370066
--- /dev/null
+++ b/project/postcss.config.js
@@ -0,0 +1,6 @@
+export default {
+ plugins: {
+ tailwindcss: {},
+ autoprefixer: {},
+ },
+};
diff --git a/project/src/App.tsx b/project/src/App.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..df37a2321d6dbfd3a3a3fd1af7441674025751be
--- /dev/null
+++ b/project/src/App.tsx
@@ -0,0 +1,264 @@
+import React, { useState, useEffect, useRef } from 'react';
+import { Zap, Brain, Settings, Moon, ChevronRight, Send, Bot, Server, Sparkles, Circle, User, AlertCircle } from 'lucide-react';
+import { createClient } from '@supabase/supabase-js';
+import ChatInterface from './components/ChatInterface';
+import VisualizationPanel from './components/VisualizationPanel';
+import Sidebar from './components/Sidebar';
+import Header from './components/Header';
+import CognitionCocooner from './services/CognitionCocooner';
+import AICore from './services/AICore';
+import { CodetteResponse } from './components/CodetteComponents';
+
+interface Message {
+ role: string;
+ content: string;
+ timestamp: Date;
+ metadata?: CodetteResponse;
+}
+
+// Initialize Supabase client
+const supabaseUrl = import.meta.env.VITE_SUPABASE_URL;
+const supabaseKey = import.meta.env.VITE_SUPABASE_ANON_KEY;
+
+if (!supabaseUrl || !supabaseKey) {
+ throw new Error('Missing Supabase environment variables');
+}
+
+const supabase = createClient(supabaseUrl, supabaseKey);
+
+const App: React.FC = () => {
+ const [sidebarOpen, setSidebarOpen] = useState(true);
+ const [darkMode, setDarkMode] = useState(false);
+ const [messages, setMessages] = useState([]);
+ const [aiState, setAiState] = useState({
+ quantumState: [0.3, 0.7, 0.5],
+ chaosState: [0.2, 0.8, 0.4, 0.6],
+ activePerspectives: ['newton', 'davinci', 'neural_network', 'philosophical'],
+ ethicalScore: 0.93,
+ processingPower: 0.72
+ });
+ const [cocoons, setCocoons] = useState>([]);
+ const [isProcessing, setIsProcessing] = useState(false);
+ const [isAdmin, setIsAdmin] = useState(false);
+ const [error, setError] = useState(null);
+ const [currentUserId, setCurrentUserId] = useState(null);
+
+ const aiCore = useRef(null);
+ const cocooner = useRef(new CognitionCocooner());
+
+ useEffect(() => {
+ try {
+ aiCore.current = new AICore();
+ setError(null);
+ } catch (err: any) {
+ console.error('Error initializing AI Core:', err);
+ setError(err.message);
+ }
+ }, []);
+
+ useEffect(() => {
+ // Check if user is already authenticated
+ const checkAuth = async () => {
+ try {
+ const { data: { session }, error } = await supabase.auth.getSession();
+
+ if (error) {
+ console.error('Auth check error:', error.message);
+ return;
+ }
+
+ if (session?.user) {
+ setCurrentUserId(session.user.id);
+ const { data: { role } } = await supabase.rpc('get_user_role');
+ setIsAdmin(role === 'admin');
+ }
+ } catch (error: any) {
+ console.error('Auth check error:', error.message);
+ }
+ };
+
+ checkAuth();
+ }, []);
+
+ useEffect(() => {
+ if (!error) {
+ setMessages([
+ {
+ role: 'assistant',
+ content: 'Hello! I am Codette, an advanced AI assistant with recursive reasoning, self-learning capabilities, and multi-agent intelligence. How can I assist you today?',
+ timestamp: new Date(),
+ metadata: {
+ text: 'Hello! I am Codette, an advanced AI assistant with recursive reasoning, self-learning capabilities, and multi-agent intelligence. How can I assist you today?',
+ instabilityFlag: false,
+ perspectivesUsed: ['greeting', 'introduction'],
+ cocoonLog: ['Initializing Codette AI...', 'Quantum state stabilized'],
+ forceRefresh: () => handleForceRefresh('Hello! I am Codette, an advanced AI assistant with recursive reasoning, self-learning capabilities, and multi-agent intelligence. How can I assist you today?')
+ }
+ }
+ ]);
+ }
+ }, [error]);
+
+ const handleForceRefresh = async (content: string) => {
+ if (!aiCore.current) return;
+
+ setIsProcessing(true);
+ try {
+ const response = await aiCore.current.processInput(content, true, currentUserId || undefined);
+
+ const assistantMessage: Message = {
+ role: 'assistant',
+ content: response,
+ timestamp: new Date(),
+ metadata: {
+ text: response,
+ instabilityFlag: Math.random() > 0.8,
+ perspectivesUsed: aiState.activePerspectives.slice(0, 3),
+ cocoonLog: [`Regenerating response for: ${content}`, `Generated new response at ${new Date().toISOString()}`],
+ forceRefresh: () => handleForceRefresh(content)
+ }
+ };
+
+ setMessages(prev => [...prev.slice(0, -1), assistantMessage]);
+ } catch (error) {
+ console.error('Error regenerating response:', error);
+ } finally {
+ setIsProcessing(false);
+ }
+ };
+
+ const toggleSidebar = () => {
+ setSidebarOpen(!sidebarOpen);
+ };
+
+ const toggleDarkMode = () => {
+ setDarkMode(!darkMode);
+ document.documentElement.classList.toggle('dark');
+ };
+
+ const sendMessage = async (content: string) => {
+ if (!aiCore.current) {
+ setError('AI Core is not initialized. Please check your configuration.');
+ return;
+ }
+
+ const userMessage: Message = {
+ role: 'user',
+ content,
+ timestamp: new Date()
+ };
+
+ setMessages(prev => [...prev, userMessage]);
+ setIsProcessing(true);
+
+ try {
+ await new Promise(resolve => setTimeout(resolve, 1500));
+
+ const thought = { query: content, timestamp: new Date() };
+ const cocoonId = cocooner.current.wrap(thought);
+ setCocoons(prev => [...prev, {
+ id: cocoonId,
+ type: 'prompt',
+ wrapped: thought
+ }]);
+
+ const response = await aiCore.current.processInput(content, false, currentUserId || undefined);
+
+ setAiState(prev => ({
+ ...prev,
+ quantumState: [Math.random(), Math.random(), Math.random()].map(v => v.toFixed(2)).map(Number),
+ chaosState: [Math.random(), Math.random(), Math.random(), Math.random()].map(v => v.toFixed(2)).map(Number),
+ ethicalScore: Number((prev.ethicalScore + Math.random() * 0.1 - 0.05).toFixed(2)),
+ processingPower: Number((prev.processingPower + Math.random() * 0.1 - 0.05).toFixed(2))
+ }));
+
+ const assistantMessage: Message = {
+ role: 'assistant',
+ content: response,
+ timestamp: new Date(),
+ metadata: {
+ text: response,
+ instabilityFlag: Math.random() > 0.8,
+ perspectivesUsed: aiState.activePerspectives.slice(0, 3),
+ cocoonLog: [`Processing query: ${content}`, `Generated response at ${new Date().toISOString()}`],
+ forceRefresh: () => handleForceRefresh(content)
+ }
+ };
+
+ setMessages(prev => [...prev, assistantMessage]);
+ } catch (error: any) {
+ console.error('Error processing message:', error);
+
+ setMessages(prev => [...prev, {
+ role: 'system',
+ content: 'An error occurred while processing your request. Please check your configuration and try again.',
+ timestamp: new Date()
+ }]);
+ } finally {
+ setIsProcessing(false);
+ }
+ };
+
+ if (error) {
+ return (
+
+
+
+
Configuration Error
+
{error}
+
+
+ Please ensure you have:
+
+ - Created a .env file
+ - Added your OpenAI API key to the .env file
+ - Added your Supabase configuration
+
+
+
+
+
+ );
+ }
+
+ return (
+
+ );
+};
+
+export default App;
\ No newline at end of file
diff --git a/project/src/components/AdminLogin.tsx b/project/src/components/AdminLogin.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..f5cce705d5e4821abea293c73d332b86dd9823bf
--- /dev/null
+++ b/project/src/components/AdminLogin.tsx
@@ -0,0 +1,77 @@
+import React, { useState } from 'react';
+import { Lock, AlertCircle } from 'lucide-react';
+
+interface AdminLoginProps {
+ onLogin: (password: string) => void;
+ darkMode: boolean;
+ error?: string | null;
+}
+
+const AdminLogin: React.FC = ({ onLogin, darkMode, error }) => {
+ const [password, setPassword] = useState('');
+ const [isLoading, setIsLoading] = useState(false);
+
+ const handleSubmit = async (e: React.FormEvent) => {
+ e.preventDefault();
+ setIsLoading(true);
+
+ try {
+ await onLogin(password);
+ } catch (err: any) {
+ // Error is now handled by the parent component
+ } finally {
+ setIsLoading(false);
+ }
+ };
+
+ return (
+
+
+
+
+
+ Admin Access Required
+
+
+ Please enter the admin password to access settings
+
+
+
+ );
+};
+
+export default AdminLogin;
\ No newline at end of file
diff --git a/project/src/components/ChatInterface.tsx b/project/src/components/ChatInterface.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..de170b402ce60ac1d34cd0b1d7e842789acd38e3
--- /dev/null
+++ b/project/src/components/ChatInterface.tsx
@@ -0,0 +1,219 @@
+import React, { useState, useRef, useEffect } from 'react';
+import { Send, Circle, Bot, User, Sparkles, Brain } from 'lucide-react';
+import { CodetteResponseCard, CodetteResponse } from './CodetteComponents';
+
+interface Message {
+ role: string;
+ content: string;
+ timestamp: Date;
+ metadata?: CodetteResponse;
+}
+
+interface ChatInterfaceProps {
+ messages: Message[];
+ sendMessage: (content: string) => void;
+ isProcessing: boolean;
+ darkMode: boolean;
+}
+
+const ChatInterface: React.FC = ({
+ messages,
+ sendMessage,
+ isProcessing,
+ darkMode
+}) => {
+ const [input, setInput] = useState('');
+ const [isDreamMode, setIsDreamMode] = useState(false);
+ const messagesEndRef = useRef(null);
+ const inputRef = useRef(null);
+
+ useEffect(() => {
+ messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' });
+ }, [messages]);
+
+ useEffect(() => {
+ inputRef.current?.focus();
+ }, []);
+
+ const handleSubmit = (e: React.FormEvent) => {
+ e.preventDefault();
+ if (input.trim() && !isProcessing) {
+ const finalInput = isDreamMode ? `dream about ${input.trim()}` : input.trim();
+ sendMessage(finalInput);
+ setInput('');
+ }
+ };
+
+ const handleKeyDown = (e: React.KeyboardEvent) => {
+ if (e.key === 'Enter' && !e.shiftKey) {
+ e.preventDefault();
+ handleSubmit(e);
+ }
+ };
+
+ const toggleDreamMode = () => {
+ setIsDreamMode(!isDreamMode);
+ if (!isDreamMode) {
+ inputRef.current?.focus();
+ }
+ };
+
+ return (
+
+
+
+
+ Conversation with Codette
+
+
+
+
+ {messages.map((message, index) => (
+
+ {message.role === 'assistant' && message.metadata ? (
+
+ ) : (
+
+
+ {message.role === 'user' ? (
+
+ ) : message.role === 'system' ? (
+
+ ) : (
+
+ )}
+
+ {message.role === 'user' ? 'You' : message.role === 'system' ? 'System' : 'Codette'}
+
+
+
+ {message.content}
+
+
+ {message.timestamp.toLocaleTimeString()}
+
+
+ )}
+
+ ))}
+
+ {isProcessing && (
+
+
+
+
+
+
+ {isDreamMode ? 'Weaving dreams through quantum threads...' : 'Processing through recursive thought loops...'}
+
+
+
+
+ )}
+
+
+
+
+
+
+ );
+};
+
+export default ChatInterface;
\ No newline at end of file
diff --git a/project/src/components/CodetteComponents.tsx b/project/src/components/CodetteComponents.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..ca7bda58d2c4071e364247764b0b7087d275e0c6
--- /dev/null
+++ b/project/src/components/CodetteComponents.tsx
@@ -0,0 +1,143 @@
+import React, { useState, useEffect } from 'react';
+import { motion } from 'framer-motion';
+
+// š¬ Perspective Trail Display
+export function PerspectiveTrail({ perspectives }: { perspectives: string[] }) {
+ return (
+
+
Activated Perspectives:
+
+ {perspectives.map((perspective, index) => (
+
+ {perspective}
+
+ ))}
+
+
+ );
+}
+
+// š Cocoon Replay Viewer
+export function CocoonReplay({ cocoons }: { cocoons: string[] }) {
+ const [activeIndex, setActiveIndex] = useState(0);
+
+ useEffect(() => {
+ const timer = setInterval(() => {
+ setActiveIndex(prev => (prev + 1) % cocoons.length);
+ }, 3000);
+ return () => clearInterval(timer);
+ }, [cocoons.length]);
+
+ return (
+
+
Cocoon Memory:
+
+ {cocoons.map((cocoon, idx) => (
+
+ {cocoon}
+
+ ))}
+
+
+ );
+}
+
+// š Quantum Collapse Detector
+export function CollapseDetector({ isUnstable }: { isUnstable: boolean }) {
+ return (
+
+ );
+}
+
+// š§ CodetteResponse Interface
+export interface CodetteResponse {
+ text: string;
+ instabilityFlag: boolean;
+ perspectivesUsed: string[];
+ cocoonLog: string[];
+ forceRefresh: () => void;
+}
+
+// š§ CodetteResponseCard Component
+export function CodetteResponseCard({ response }: { response: CodetteResponse }) {
+ const [loopCount, setLoopCount] = useState(0);
+ const [introspectiveMessage, setIntrospectiveMessage] = useState(null);
+
+ useEffect(() => {
+ const last = sessionStorage.getItem("lastCodetteResponse");
+ if (last === response.text) {
+ console.warn("Codette is repeating herself. Triggering fallback logic.");
+ setLoopCount(prev => prev + 1);
+
+ if (response.forceRefresh) {
+ response.forceRefresh();
+ }
+
+ setIntrospectiveMessage("I feel like I've said this before... Let me think differently.");
+ } else {
+ setLoopCount(0);
+ setIntrospectiveMessage(null);
+ }
+ sessionStorage.setItem("lastCodetteResponse", response.text);
+ }, [response.text]);
+
+ return (
+
+ {response.text}
+ {introspectiveMessage && (
+
+ {introspectiveMessage}
+
+ )}
+
+ System Readout:
+ 2} />
+
+
+
+
+ );
+}
\ No newline at end of file
diff --git a/project/src/components/FileList.tsx b/project/src/components/FileList.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..ee31a693f6f84681167ec3d0210f95f37affbb7f
--- /dev/null
+++ b/project/src/components/FileList.tsx
@@ -0,0 +1,235 @@
+import React, { useEffect, useState } from 'react';
+import { FileText, Download, Loader, Trash2, AlertCircle } from 'lucide-react';
+
+interface FileListProps {
+ supabase: any;
+ darkMode: boolean;
+ isAdmin?: boolean;
+}
+
+interface FileData {
+ id: string;
+ filename: string;
+ storage_path: string;
+ file_type: string;
+ uploaded_at: string;
+}
+
+const FileList: React.FC = ({ supabase, darkMode, isAdmin = false }) => {
+ const [files, setFiles] = useState([]);
+ const [loading, setLoading] = useState(true);
+ const [error, setError] = useState(null);
+ const [downloading, setDownloading] = useState(null);
+ const [deleting, setDeleting] = useState(null);
+
+ useEffect(() => {
+ fetchFiles();
+ }, []);
+
+ const fetchFiles = async () => {
+ try {
+ setError(null);
+ setLoading(true);
+
+ // Check if Supabase is initialized properly
+ if (!supabase) {
+ throw new Error('Database connection not initialized');
+ }
+
+ // Test connection with a simple query first
+ const { error: connectionError } = await supabase
+ .from('codette_files')
+ .select('count');
+
+ if (connectionError) {
+ throw connectionError;
+ }
+
+ // Proceed with actual data fetch
+ const { data, error } = await supabase
+ .from('codette_files')
+ .select('*')
+ .order('uploaded_at', { ascending: false });
+
+ if (error) throw error;
+ setFiles(data || []);
+ } catch (err: any) {
+ console.error('Error fetching files:', err);
+ setError(err.message || 'Failed to fetch files. Please check your connection.');
+ setFiles([]);
+ } finally {
+ setLoading(false);
+ }
+ };
+
+ const handleDownload = async (file: FileData) => {
+ try {
+ setDownloading(file.id);
+ setError(null);
+
+ const { data, error } = await supabase.storage
+ .from('codette-files')
+ .download(file.storage_path);
+
+ if (error) throw error;
+
+ const url = window.URL.createObjectURL(data);
+ const a = document.createElement('a');
+ a.href = url;
+ a.download = file.filename;
+ document.body.appendChild(a);
+ a.click();
+ window.URL.revokeObjectURL(url);
+ document.body.removeChild(a);
+ } catch (err: any) {
+ console.error('Error downloading file:', err);
+ setError(err.message || 'Failed to download file. Please try again.');
+ } finally {
+ setDownloading(null);
+ }
+ };
+
+ const handleDelete = async (file: FileData) => {
+ if (!isAdmin) return;
+
+ if (!confirm('Are you sure you want to delete this file?')) return;
+
+ try {
+ setDeleting(file.id);
+ setError(null);
+
+ // Delete from storage
+ const { error: storageError } = await supabase.storage
+ .from('codette-files')
+ .remove([file.storage_path]);
+
+ if (storageError) throw storageError;
+
+ // Delete from database
+ const { error: dbError } = await supabase
+ .from('codette_files')
+ .delete()
+ .match({ id: file.id });
+
+ if (dbError) throw dbError;
+
+ // Update local state
+ setFiles(files.filter(f => f.id !== file.id));
+ } catch (err: any) {
+ console.error('Error deleting file:', err);
+ setError(err.message || 'Failed to delete file. Please try again.');
+ } finally {
+ setDeleting(null);
+ }
+ };
+
+ const handleRetry = () => {
+ fetchFiles();
+ };
+
+ if (loading) {
+ return (
+
+
+
+ );
+ }
+
+ if (error) {
+ return (
+
+
+
+
+
+ Connection Error
+
+
+ {error}
+
+
+
+
+
+ );
+ }
+
+ return (
+
+
Uploaded Files
+ {files.length === 0 ? (
+
+ No files uploaded yet.
+
+ ) : (
+
+ {files.map((file) => (
+
+
+
+
+
+ {file.filename}
+
+
+ {new Date(file.uploaded_at).toLocaleDateString()}
+
+
+
+
+
+ {isAdmin && (
+
+ )}
+
+
+ ))}
+
+ )}
+
+ );
+};
+
+export default FileList;
\ No newline at end of file
diff --git a/project/src/components/Header.tsx b/project/src/components/Header.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..2363efa05277eb420bc69e18eebae55991a3f94d
--- /dev/null
+++ b/project/src/components/Header.tsx
@@ -0,0 +1,64 @@
+import React from 'react';
+import { Menu, Moon, Sun, ChevronRight, Brain, Zap } from 'lucide-react';
+
+interface HeaderProps {
+ toggleSidebar: () => void;
+ toggleDarkMode: () => void;
+ darkMode: boolean;
+ aiState: {
+ quantumState: number[];
+ chaosState: number[];
+ activePerspectives: string[];
+ ethicalScore: number;
+ processingPower: number;
+ };
+}
+
+const Header: React.FC = ({
+ toggleSidebar,
+ toggleDarkMode,
+ darkMode,
+ aiState
+}) => {
+ return (
+
+ );
+};
+
+export default Header;
\ No newline at end of file
diff --git a/project/src/components/Sidebar.tsx b/project/src/components/Sidebar.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..31cb58eb0fd3b424f28e1da54aad75e21047a214
--- /dev/null
+++ b/project/src/components/Sidebar.tsx
@@ -0,0 +1,496 @@
+import React, { useState } from 'react';
+import { Brain, Settings, Circle, Sparkles, Zap, FileText, ChevronDown, ChevronRight, Upload, AlertCircle } from 'lucide-react';
+import FileList from './FileList';
+import AdminLogin from './AdminLogin';
+
+interface SidebarProps {
+ isOpen: boolean;
+ cocoons: Array<{
+ id: string;
+ type: string;
+ wrapped: any;
+ }>;
+ aiState: {
+ quantumState: number[];
+ chaosState: number[];
+ activePerspectives: string[];
+ ethicalScore: number;
+ processingPower: number;
+ };
+ darkMode: boolean;
+ supabase: any;
+ isAdmin: boolean;
+ setIsAdmin: (isAdmin: boolean) => void;
+}
+
+const Sidebar: React.FC = ({
+ isOpen,
+ cocoons,
+ aiState,
+ darkMode,
+ supabase,
+ isAdmin,
+ setIsAdmin
+}) => {
+ const [activeSection, setActiveSection] = useState('cocoons');
+ const [selectedFile, setSelectedFile] = useState(null);
+ const [uploadError, setUploadError] = useState(null);
+ const [isUploading, setIsUploading] = useState(false);
+ const [showAdminPrompt, setShowAdminPrompt] = useState(false);
+ const [authError, setAuthError] = useState(null);
+
+ if (!isOpen) return null;
+
+ const handleAdminLogin = async (password: string) => {
+ try {
+ setAuthError(null);
+
+ const { data: { user, session }, error } = await supabase.auth.signInWithPassword({
+ email: 'admin@codette.ai',
+ password: password
+ });
+
+ if (error) {
+ setAuthError(error.message);
+ throw error;
+ }
+
+ if (!session) {
+ throw new Error('No session after login');
+ }
+
+ // Verify admin role
+ const { data: { role }, error: roleError } = await supabase.rpc('get_user_role');
+
+ if (roleError) {
+ throw roleError;
+ }
+
+ if (role === 'admin') {
+ setIsAdmin(true);
+ setShowAdminPrompt(false);
+ setAuthError(null);
+ } else {
+ throw new Error('Insufficient permissions');
+ }
+ } catch (error: any) {
+ console.error('Login error:', error);
+ setAuthError(error.message || 'Invalid login credentials');
+ throw error;
+ }
+ };
+
+ const handleFileUpload = async () => {
+ if (!selectedFile) return;
+
+ if (!isAdmin) {
+ setUploadError('Only administrators can upload files.');
+ return;
+ }
+
+ try {
+ setIsUploading(true);
+ setUploadError(null);
+
+ // Get user role from session
+ const { data: { user }, error: userError } = await supabase.auth.getUser();
+
+ if (userError) throw userError;
+
+ if (!user || user.role !== 'admin') {
+ throw new Error('Only administrators can upload files.');
+ }
+
+ // Upload file to Supabase storage
+ const { data, error } = await supabase.storage
+ .from('codette-files')
+ .upload(`${Date.now()}-${selectedFile.name}`, selectedFile, {
+ upsert: false
+ });
+
+ if (error) throw error;
+
+ // Add file reference to database
+ const { error: dbError } = await supabase
+ .from('codette_files')
+ .insert([
+ {
+ filename: selectedFile.name,
+ storage_path: data.path,
+ file_type: selectedFile.type,
+ uploaded_at: new Date().toISOString()
+ }
+ ]);
+
+ if (dbError) throw dbError;
+
+ setSelectedFile(null);
+ setUploadError(null);
+ } catch (error: any) {
+ console.error('Error uploading file:', error);
+ setUploadError(error.message || 'Failed to upload file. Please try again.');
+ } finally {
+ setIsUploading(false);
+ }
+ };
+
+ const handleSettingsClick = () => {
+ if (!isAdmin) {
+ setShowAdminPrompt(true);
+ }
+ setActiveSection('settings');
+ };
+
+ return (
+
+ );
+};
+
+export default Sidebar;
\ No newline at end of file
diff --git a/project/src/components/VisualizationPanel.tsx b/project/src/components/VisualizationPanel.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..b0ac1730fe472c58b6afe4f9c3ec71b8960a3a17
--- /dev/null
+++ b/project/src/components/VisualizationPanel.tsx
@@ -0,0 +1,264 @@
+import React, { useRef, useEffect } from 'react';
+import { Brain, Zap, Sparkles } from 'lucide-react';
+
+interface VisualizationPanelProps {
+ aiState: {
+ quantumState: number[];
+ chaosState: number[];
+ activePerspectives: string[];
+ ethicalScore: number;
+ processingPower: number;
+ };
+ darkMode: boolean;
+}
+
+const VisualizationPanel: React.FC = ({
+ aiState,
+ darkMode
+}) => {
+ const quantumCanvasRef = useRef(null);
+ const neuralCanvasRef = useRef(null);
+
+ // Draw quantum state visualization
+ useEffect(() => {
+ const canvas = quantumCanvasRef.current;
+ if (!canvas) return;
+
+ const ctx = canvas.getContext('2d');
+ if (!ctx) return;
+
+ // Clear canvas
+ ctx.clearRect(0, 0, canvas.width, canvas.height);
+
+ // Draw quantum state as a particle system
+ const centerX = canvas.width / 2;
+ const centerY = canvas.height / 2;
+ const radius = Math.min(centerX, centerY) * 0.8;
+
+ // Background circle
+ ctx.beginPath();
+ ctx.arc(centerX, centerY, radius, 0, Math.PI * 2);
+ ctx.fillStyle = darkMode ? 'rgba(30, 58, 138, 0.2)' : 'rgba(219, 234, 254, 0.5)';
+ ctx.fill();
+
+ // Draw quantum particles
+ aiState.quantumState.forEach((state, i) => {
+ const angle = (i / aiState.quantumState.length) * Math.PI * 2;
+ const distance = state * radius;
+ const x = centerX + Math.cos(angle) * distance;
+ const y = centerY + Math.sin(angle) * distance;
+
+ // Particle
+ const gradient = ctx.createRadialGradient(x, y, 0, x, y, 15);
+ gradient.addColorStop(0, darkMode ? 'rgba(147, 51, 234, 0.9)' : 'rgba(147, 51, 234, 0.7)');
+ gradient.addColorStop(1, darkMode ? 'rgba(147, 51, 234, 0)' : 'rgba(147, 51, 234, 0)');
+
+ ctx.beginPath();
+ ctx.arc(x, y, 15, 0, Math.PI * 2);
+ ctx.fillStyle = gradient;
+ ctx.fill();
+
+ // Connection to center
+ ctx.beginPath();
+ ctx.moveTo(centerX, centerY);
+ ctx.lineTo(x, y);
+ ctx.strokeStyle = darkMode ? 'rgba(147, 51, 234, 0.4)' : 'rgba(147, 51, 234, 0.3)';
+ ctx.lineWidth = 2;
+ ctx.stroke();
+ });
+
+ // Draw center node
+ ctx.beginPath();
+ ctx.arc(centerX, centerY, 8, 0, Math.PI * 2);
+ ctx.fillStyle = darkMode ? '#a855f7' : '#8b5cf6';
+ ctx.fill();
+ }, [aiState.quantumState, darkMode]);
+
+ // Draw neural network visualization
+ useEffect(() => {
+ const canvas = neuralCanvasRef.current;
+ if (!canvas) return;
+
+ const ctx = canvas.getContext('2d');
+ if (!ctx) return;
+
+ // Clear canvas
+ ctx.clearRect(0, 0, canvas.width, canvas.height);
+
+ // Define layers
+ const layers = [3, 5, 5, 2]; // Input, hidden, hidden, output
+ const nodeSize = 6;
+ const layerSpacing = canvas.width / (layers.length + 1);
+ const neuronColor = darkMode ? '#22c55e' : '#10b981';
+ const connectionColor = darkMode ? 'rgba(34, 197, 94, 0.2)' : 'rgba(16, 185, 129, 0.1)';
+ const activeConnectionColor = darkMode ? 'rgba(34, 197, 94, 0.6)' : 'rgba(16, 185, 129, 0.5)';
+
+ // Draw connections and nodes
+ for (let l = 0; l < layers.length - 1; l++) {
+ const currentLayerSize = layers[l];
+ const nextLayerSize = layers[l + 1];
+ const currentX = (l + 1) * layerSpacing;
+ const nextX = (l + 2) * layerSpacing;
+
+ for (let i = 0; i < currentLayerSize; i++) {
+ const currentY = (i + 1) * (canvas.height / (currentLayerSize + 1));
+
+ for (let j = 0; j < nextLayerSize; j++) {
+ const nextY = (j + 1) * (canvas.height / (nextLayerSize + 1));
+
+ // Draw connection
+ ctx.beginPath();
+ ctx.moveTo(currentX, currentY);
+ ctx.lineTo(nextX, nextY);
+
+ // Randomly activate some connections based on chaos state
+ const isActive = Math.random() < aiState.chaosState[l % aiState.chaosState.length];
+ ctx.strokeStyle = isActive ? activeConnectionColor : connectionColor;
+ ctx.lineWidth = isActive ? 1.5 : 0.5;
+ ctx.stroke();
+ }
+ }
+ }
+
+ // Draw nodes
+ for (let l = 0; l < layers.length; l++) {
+ const layerSize = layers[l];
+ const x = (l + 1) * layerSpacing;
+
+ for (let i = 0; i < layerSize; i++) {
+ const y = (i + 1) * (canvas.height / (layerSize + 1));
+
+ // Node
+ ctx.beginPath();
+ ctx.arc(x, y, nodeSize, 0, Math.PI * 2);
+
+ // Node color with pulsing effect based on quantum state
+ const stateIndex = (l + i) % aiState.quantumState.length;
+ const pulseFactor = 0.7 + (aiState.quantumState[stateIndex] * 0.3);
+
+ const gradient = ctx.createRadialGradient(x, y, 0, x, y, nodeSize * 1.5);
+ gradient.addColorStop(0, neuronColor);
+ gradient.addColorStop(1, 'rgba(16, 185, 129, 0)');
+
+ ctx.fillStyle = gradient;
+ ctx.fill();
+ }
+ }
+ }, [aiState.chaosState, aiState.quantumState, darkMode]);
+
+ return (
+
+
+
+
+ Codette State Visualization
+
+
+
+
+
+
+
+ Quantum State
+
+
+
+ {aiState.quantumState.map((value, index) => (
+
+
+ Q{index + 1}
+
+
{value.toFixed(2)}
+
+ ))}
+
+
+
+
+
+
+ Neural Activity
+
+
+
+ {aiState.chaosState.map((value, index) => (
+
+
+ C{index + 1}
+
+
{value.toFixed(2)}
+
+ ))}
+
+
+
+
+
+
+ Active Perspectives
+
+
+ {aiState.activePerspectives.map((perspective, index) => (
+
+ {perspective.replace('_', ' ')}
+
+ ))}
+
+
+
+
+
Performance Metrics
+
+
+
+
+ Ethical Governance
+ {Number(aiState.ethicalScore) * 100}%
+
+
+
+
+
+
+ Processing Power
+ {Number(aiState.processingPower) * 100}%
+
+
+
+
+
+
+
+ );
+};
+
+export default VisualizationPanel;
\ No newline at end of file
diff --git a/project/src/index.css b/project/src/index.css
new file mode 100644
index 0000000000000000000000000000000000000000..c356316b85376a948b9ec5352ae4811ebce074f2
--- /dev/null
+++ b/project/src/index.css
@@ -0,0 +1,94 @@
+@tailwind base;
+@tailwind components;
+@tailwind utilities;
+
+:root {
+ --primary: #1E3A8A;
+ --secondary: #7E22CE;
+ --accent: #0D9488;
+ --background: #F9FAFB;
+ --foreground: #111827;
+}
+
+.dark {
+ --primary: #3B82F6;
+ --secondary: #8B5CF6;
+ --accent: #10B981;
+ --background: #111827;
+ --foreground: #F9FAFB;
+}
+
+body {
+ font-family: system-ui, -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans', 'Helvetica Neue', sans-serif;
+ margin: 0;
+ padding: 0;
+ box-sizing: border-box;
+}
+
+/* Animation for neural pulses */
+@keyframes pulse {
+ 0% {
+ transform: scale(1);
+ opacity: 1;
+ }
+ 50% {
+ transform: scale(1.2);
+ opacity: 0.8;
+ }
+ 100% {
+ transform: scale(1);
+ opacity: 1;
+ }
+}
+
+/* Animation for typing indicators */
+@keyframes bounce {
+ 0%, 100% {
+ transform: translateY(0);
+ }
+ 50% {
+ transform: translateY(-4px);
+ }
+}
+
+.typing-dot {
+ animation: bounce 1s infinite;
+}
+
+/* Custom scrollbar */
+::-webkit-scrollbar {
+ width: 8px;
+ height: 8px;
+}
+
+::-webkit-scrollbar-track {
+ background: transparent;
+}
+
+::-webkit-scrollbar-thumb {
+ background: rgba(156, 163, 175, 0.5);
+ border-radius: 4px;
+}
+
+::-webkit-scrollbar-thumb:hover {
+ background: rgba(156, 163, 175, 0.7);
+}
+
+/* Transitions */
+.transition-all {
+ transition-property: all;
+ transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1);
+ transition-duration: 300ms;
+}
+
+.transition-opacity {
+ transition-property: opacity;
+ transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1);
+ transition-duration: 150ms;
+}
+
+.transition-transform {
+ transition-property: transform;
+ transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1);
+ transition-duration: 150ms;
+}
\ No newline at end of file
diff --git a/project/src/main.tsx b/project/src/main.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..ea9e3630a4f82a3cc92500297b28fa6f1ff72a75
--- /dev/null
+++ b/project/src/main.tsx
@@ -0,0 +1,10 @@
+import { StrictMode } from 'react';
+import { createRoot } from 'react-dom/client';
+import App from './App.tsx';
+import './index.css';
+
+createRoot(document.getElementById('root')!).render(
+
+
+
+);
diff --git a/project/src/services/AICore.ts b/project/src/services/AICore.ts
new file mode 100644
index 0000000000000000000000000000000000000000..8ab1816a9d99634ca85027eaa967d744f96c859a
--- /dev/null
+++ b/project/src/services/AICore.ts
@@ -0,0 +1,253 @@
+import { createClient, SupabaseClient } from '@supabase/supabase-js';
+import KaggleService from './KaggleService';
+import CognitionCocooner from './CognitionCocooner';
+import { QuantumSpiderweb } from './QuantumSpiderweb';
+
+interface CodetteResponse {
+ text: string;
+ instabilityFlag: boolean;
+ perspectivesUsed: string[];
+ cocoonLog: string[];
+ forceRefresh: () => void;
+}
+
+class AICore {
+ private perspectives: string[];
+ private ethicalGovernance: boolean;
+ private recursionDepth: number;
+ private supabase: SupabaseClient;
+ private kaggle: KaggleService;
+ private cocooner: CognitionCocooner;
+ private spiderweb: QuantumSpiderweb;
+ private lastResponse: string | null = null;
+ private responseVariations: string[] = [];
+ private userId: string | null = null;
+
+ constructor() {
+ this.perspectives = ['newton', 'davinci', 'human_intuition', 'neural_network', 'quantum_computing', 'philosophical'];
+ this.ethicalGovernance = true;
+ this.recursionDepth = 3;
+ this.kaggle = new KaggleService();
+ this.cocooner = new CognitionCocooner();
+ this.spiderweb = new QuantumSpiderweb({ node_count: 5 });
+
+ const supabaseUrl = import.meta.env.VITE_SUPABASE_URL;
+ const supabaseKey = import.meta.env.VITE_SUPABASE_ANON_KEY;
+
+ if (!supabaseUrl || !supabaseKey) {
+ throw new Error('Supabase configuration is missing. Please check your environment variables.');
+ }
+
+ this.supabase = createClient(supabaseUrl, supabaseKey);
+ }
+
+ async setUser(userId: string) {
+ this.userId = userId;
+ this.cocooner.setUserId(userId);
+ await this.loadUserFingerprint();
+ }
+
+ private async loadUserFingerprint() {
+ if (!this.userId) return;
+
+ const fingerprint = await this.cocooner.loadFingerprint();
+ if (fingerprint) {
+ this.perspectives = fingerprint.active_perspectives;
+ this.recursionDepth = fingerprint.recursion_depth;
+ this.ethicalGovernance = fingerprint.ethical_score > 0.7;
+ }
+ }
+
+ async processInput(input: string, forceNewResponse: boolean = false, userId?: string): Promise {
+ try {
+ if (userId && !this.userId) {
+ await this.setUser(userId);
+ }
+
+ await this.loadUserFingerprint();
+
+ // Search Kaggle for relevant datasets and notebooks
+ const [datasets, notebooks] = await Promise.all([
+ this.kaggle.searchDatasets(input),
+ this.kaggle.searchNotebooks(input)
+ ]);
+
+ // Generate comprehensive response using multiple perspectives
+ let result = this.generateMultiPerspectiveResponse(input, datasets, notebooks);
+
+ // Apply recursive reasoning if depth > 3
+ if (this.recursionDepth > 3) {
+ result = await this.applyRecursiveReasoning(result, input);
+ }
+
+ // Log interaction if user is authenticated
+ if (this.userId) {
+ try {
+ await this.supabase.from('cocoons').insert([{
+ user_id: this.userId,
+ type: 'interaction',
+ content: {
+ input,
+ response: result,
+ perspectives_used: this.perspectives,
+ recursion_depth: this.recursionDepth
+ },
+ metadata: {
+ timestamp: new Date().toISOString(),
+ datasets_found: datasets.length,
+ notebooks_found: notebooks.length
+ }
+ }]);
+ } catch (error) {
+ console.warn('Failed to log interaction:', error);
+ }
+ }
+
+ // Wrap in cognitive cocoon
+ this.cocooner.wrap({ input, result }, 'prompt');
+
+ if (this.recursionDepth > 3) {
+ this.spiderweb.activate({
+ source: 'AICore',
+ depth: this.recursionDepth,
+ trigger: 'deep_reasoning'
+ });
+ }
+
+ this.lastResponse = result;
+ if (forceNewResponse || !this.responseVariations.includes(result)) {
+ this.responseVariations.push(result);
+ }
+
+ return result;
+ } catch (error: any) {
+ console.error('Error processing input:', error);
+ return `I apologize, but I encountered an error while processing your request. Let me try to help you in a different way.
+
+Based on my analysis capabilities, I can still provide insights about "${input}" using my multi-perspective reasoning system. Would you like me to explore this topic from different analytical angles?`;
+ }
+ }
+
+ private generateMultiPerspectiveResponse(input: string, datasets: any[], notebooks: any[]): string {
+ let response = `š§ **Codette's Multi-Perspective Analysis**\n\n`;
+
+ // Newton's Logical Analysis
+ if (this.perspectives.includes('newton')) {
+ response += `š¬ **Newton's Logical Framework:**\nApproaching "${input}" through systematic analysis and empirical reasoning. `;
+ if (datasets.length > 0) {
+ response += `I've identified ${datasets.length} relevant datasets that could provide quantitative insights.\n\n`;
+ } else {
+ response += `This requires structured investigation and methodical examination of underlying principles.\n\n`;
+ }
+ }
+
+ // Da Vinci's Creative Synthesis
+ if (this.perspectives.includes('davinci')) {
+ response += `šØ **Da Vinci's Creative Synthesis:**\nExamining "${input}" through the lens of interdisciplinary thinking and innovative connections. `;
+ if (notebooks.length > 0) {
+ response += `Found ${notebooks.length} analytical notebooks that demonstrate creative problem-solving approaches.\n\n`;
+ } else {
+ response += `This topic invites exploration of unexpected relationships and novel perspectives.\n\n`;
+ }
+ }
+
+ // Neural Network Processing
+ if (this.perspectives.includes('neural_network')) {
+ response += `𧬠**Neural Network Processing:**\nAnalyzing patterns and correlations in "${input}" through distributed cognitive processing. `;
+ response += `My neural pathways are identifying complex relationships and emergent properties in this domain.\n\n`;
+ }
+
+ // Philosophical Inquiry
+ if (this.perspectives.includes('philosophical')) {
+ response += `š¤ **Philosophical Inquiry:**\nExploring the deeper implications and fundamental questions raised by "${input}". `;
+ response += `What are the ethical considerations and broader societal impacts we should consider?\n\n`;
+ }
+
+ // Quantum Computing Perspective
+ if (this.perspectives.includes('quantum_computing')) {
+ response += `āļø **Quantum Computing Perspective:**\nExamining "${input}" through quantum principles of superposition and entanglement. `;
+ response += `Multiple solution states exist simultaneously until observation collapses them into actionable insights.\n\n`;
+ }
+
+ // Add specific insights based on available data
+ if (datasets.length > 0 || notebooks.length > 0) {
+ response += `š **Data-Driven Insights:**\n`;
+
+ if (datasets.length > 0) {
+ const topDataset = datasets[0];
+ response += `⢠**Key Dataset**: "${topDataset.title}" - ${topDataset.description}\n`;
+ }
+
+ if (notebooks.length > 0) {
+ const topNotebook = notebooks[0];
+ response += `⢠**Analytical Approach**: "${topNotebook.title}" - ${topNotebook.description}\n`;
+ }
+
+ response += `\n`;
+ }
+
+ // Ethical governance check
+ if (this.ethicalGovernance) {
+ response += `āļø **Ethical Considerations:**\nAll analysis conducted with respect for privacy, fairness, and responsible AI principles.\n\n`;
+ }
+
+ response += `š **Recursive Depth**: ${this.recursionDepth}/5 - ${this.recursionDepth > 3 ? 'Deep analysis mode engaged' : 'Standard processing'}\n`;
+ response += `šÆ **Confidence Level**: ${(0.7 + Math.random() * 0.25).toFixed(2)}`;
+
+ return response;
+ }
+
+ private async applyRecursiveReasoning(initialResponse: string, input: string): Promise {
+ // Simulate recursive refinement
+ const refinements = [
+ "Upon deeper reflection, I should also consider...",
+ "Cross-referencing with quantum entanglement principles...",
+ "Applying chaos theory to identify emergent patterns...",
+ "Integrating multi-dimensional analysis..."
+ ];
+
+ const randomRefinement = refinements[Math.floor(Math.random() * refinements.length)];
+
+ return `${initialResponse}\n\nš **Recursive Refinement:**\n${randomRefinement}\n\nThis additional layer of analysis reveals nuanced aspects of "${input}" that warrant further exploration through continued interaction.`;
+ }
+
+ setPerspectives(perspectives: string[]): void {
+ this.perspectives = perspectives;
+ if (this.userId) {
+ this.cocooner.updateFingerprint({ active_perspectives: perspectives });
+ }
+ }
+
+ setEthicalGovernance(enabled: boolean): void {
+ this.ethicalGovernance = enabled;
+ if (this.userId) {
+ this.cocooner.updateFingerprint({ ethical_score: enabled ? 1 : 0.5 });
+ }
+ }
+
+ setRecursionDepth(depth: number): void {
+ if (depth < 1) depth = 1;
+ if (depth > 5) depth = 5;
+ this.recursionDepth = depth;
+ if (this.userId) {
+ this.cocooner.updateFingerprint({ recursion_depth: depth });
+ }
+ }
+
+ getCodetteResponse(): CodetteResponse {
+ return {
+ text: this.lastResponse || '',
+ instabilityFlag: this.recursionDepth > 3 || this.responseVariations.length > 5,
+ perspectivesUsed: this.perspectives,
+ cocoonLog: this.cocooner.getRecentCocoons(5),
+ forceRefresh: () => {
+ this.recursionDepth = Math.min(this.recursionDepth + 1, 5);
+ if (this.userId) {
+ this.cocooner.updateFingerprint({ recursion_depth: this.recursionDepth });
+ }
+ }
+ };
+ }
+}
+
+export default AICore;
\ No newline at end of file
diff --git a/project/src/services/CognitionCocooner.ts b/project/src/services/CognitionCocooner.ts
new file mode 100644
index 0000000000000000000000000000000000000000..35c8eaedcdbe52631e4b4050784b8f89b0159ddb
--- /dev/null
+++ b/project/src/services/CognitionCocooner.ts
@@ -0,0 +1,174 @@
+import { createClient, SupabaseClient } from '@supabase/supabase-js';
+
+interface CognitiveFingerprint {
+ id: string;
+ user_id: string;
+ active_perspectives: string[];
+ recursion_depth: number;
+ ethical_score: number;
+ processing_power: number;
+ quantum_state: number[];
+ created_at: string;
+ updated_at: string;
+}
+
+interface Cocoon {
+ id: string;
+ type: string;
+ wrapped: any;
+}
+
+class CognitionCocooner {
+ private supabase: SupabaseClient;
+ private userId: string | null = null;
+ private fingerprint: CognitiveFingerprint | null = null;
+
+ constructor() {
+ const supabaseUrl = import.meta.env.VITE_SUPABASE_URL;
+ const supabaseKey = import.meta.env.VITE_SUPABASE_ANON_KEY;
+
+ if (!supabaseUrl || !supabaseKey) {
+ throw new Error('Supabase configuration is missing');
+ }
+
+ this.supabase = createClient(supabaseUrl, supabaseKey);
+ }
+
+ setUserId(userId: string) {
+ this.userId = userId;
+ }
+
+ private generateId(): string {
+ return `cocoon_${Math.floor(Math.random() * 90000) + 10000}`;
+ }
+
+ async loadFingerprint(): Promise {
+ if (!this.userId) return null;
+
+ try {
+ const { data, error } = await this.supabase
+ .from('user_cognitive_fingerprints')
+ .select('*')
+ .eq('user_id', this.userId)
+ .single();
+
+ if (error) throw error;
+
+ if (!data) {
+ // Create initial fingerprint if none exists
+ const initialFingerprint = {
+ user_id: this.userId,
+ active_perspectives: ['newton', 'davinci', 'neural_network'],
+ recursion_depth: 3,
+ ethical_score: 0.8,
+ processing_power: 0.7,
+ quantum_state: [0.3, 0.7, 0.5]
+ };
+
+ const { data: newData, error: insertError } = await this.supabase
+ .from('user_cognitive_fingerprints')
+ .insert([initialFingerprint])
+ .select()
+ .single();
+
+ if (insertError) throw insertError;
+ this.fingerprint = newData;
+ return newData;
+ }
+
+ this.fingerprint = data;
+ return data;
+ } catch (error) {
+ console.error('Error loading cognitive fingerprint:', error);
+ return null;
+ }
+ }
+
+ async updateFingerprint(updates: Partial): Promise {
+ if (!this.userId || !this.fingerprint) return;
+
+ try {
+ const { error } = await this.supabase
+ .from('user_cognitive_fingerprints')
+ .update({
+ ...updates,
+ updated_at: new Date().toISOString()
+ })
+ .eq('user_id', this.userId);
+
+ if (error) throw error;
+
+ this.fingerprint = {
+ ...this.fingerprint,
+ ...updates,
+ updated_at: new Date().toISOString()
+ };
+ } catch (error) {
+ console.error('Error updating cognitive fingerprint:', error);
+ }
+ }
+
+ wrap(thought: any, type: string = 'prompt'): string {
+ const cocoonId = this.generateId();
+ const wrapped = this.applyWrapper(thought, type);
+
+ // Update fingerprint based on thought processing
+ if (this.fingerprint) {
+ const updates: Partial = {
+ ethical_score: Math.min(1, this.fingerprint.ethical_score + 0.01),
+ processing_power: Math.min(1, this.fingerprint.processing_power + 0.005),
+ quantum_state: this.fingerprint.quantum_state.map(v =>
+ Math.min(1, v + (Math.random() * 0.1 - 0.05))
+ )
+ };
+ this.updateFingerprint(updates);
+ }
+
+ return cocoonId;
+ }
+
+ private applyWrapper(thought: any, type: string): any {
+ const perspectiveModifier = this.fingerprint?.active_perspectives.length || 3;
+ const recursionFactor = this.fingerprint?.recursion_depth || 3;
+
+ switch (type) {
+ case 'prompt':
+ return {
+ content: thought,
+ meta: {
+ perspectives: perspectiveModifier,
+ recursion: recursionFactor,
+ timestamp: new Date().toISOString()
+ }
+ };
+ case 'function':
+ return {
+ code: thought,
+ analysis: {
+ complexity: recursionFactor * 0.2,
+ perspectives: perspectiveModifier
+ }
+ };
+ case 'symbolic':
+ return {
+ pattern: thought,
+ quantum: {
+ state: this.fingerprint?.quantum_state || [0.3, 0.7, 0.5],
+ stability: this.fingerprint?.ethical_score || 0.8
+ }
+ };
+ default:
+ return thought;
+ }
+ }
+
+ getRecentCocoons(limit: number = 5): string[] {
+ // Simulated cocoon retrieval
+ return Array(limit).fill(null).map((_, i) => {
+ const timestamp = new Date(Date.now() - i * 60000).toISOString();
+ return `Cocoon processed at ${timestamp}`;
+ });
+ }
+}
+
+export default CognitionCocooner;
\ No newline at end of file
diff --git a/project/src/services/KaggleService.ts b/project/src/services/KaggleService.ts
new file mode 100644
index 0000000000000000000000000000000000000000..2f5fbf02e4ad05e52b429dce315d99533808a7ad
--- /dev/null
+++ b/project/src/services/KaggleService.ts
@@ -0,0 +1,104 @@
+class KaggleService {
+ private username: string;
+ private key: string;
+
+ constructor() {
+ this.username = import.meta.env.VITE_KAGGLE_USERNAME || '';
+ this.key = import.meta.env.VITE_KAGGLE_KEY || '';
+
+ if (!this.username || !this.key) {
+ console.warn('Kaggle credentials not found. Some features may be limited.');
+ }
+ }
+
+ async searchDatasets(query: string) {
+ try {
+ // Simulate Kaggle dataset search using mock data
+ // In a real implementation, you would use Kaggle's REST API
+ const mockDatasets = [
+ {
+ title: `Dataset related to: ${query}`,
+ description: `This dataset contains comprehensive data about ${query} with various features and analysis opportunities.`,
+ owner: 'kaggle-user',
+ votes: Math.floor(Math.random() * 1000),
+ downloadCount: Math.floor(Math.random() * 10000)
+ },
+ {
+ title: `Advanced ${query} Analysis`,
+ description: `Deep dive into ${query} with statistical analysis and machine learning applications.`,
+ owner: 'data-scientist',
+ votes: Math.floor(Math.random() * 500),
+ downloadCount: Math.floor(Math.random() * 5000)
+ }
+ ];
+
+ return mockDatasets;
+ } catch (error) {
+ console.error('Error searching datasets:', error);
+ return [];
+ }
+ }
+
+ async getDatasetInfo(owner: string, dataset: string) {
+ try {
+ // Mock dataset information
+ return {
+ title: dataset,
+ owner: owner,
+ description: `Detailed information about ${dataset} dataset`,
+ files: ['data.csv', 'metadata.json'],
+ size: '10.5 MB',
+ lastUpdated: new Date().toISOString()
+ };
+ } catch (error) {
+ console.error('Error getting dataset info:', error);
+ return null;
+ }
+ }
+
+ async searchNotebooks(query: string) {
+ try {
+ // Simulate Kaggle notebook search using mock data
+ const mockNotebooks = [
+ {
+ title: `${query} Analysis Notebook`,
+ description: `Comprehensive analysis of ${query} using Python and machine learning techniques.`,
+ owner: 'notebook-author',
+ votes: Math.floor(Math.random() * 200),
+ language: 'Python'
+ },
+ {
+ title: `Exploring ${query} Patterns`,
+ description: `Data visualization and pattern recognition in ${query} datasets.`,
+ owner: 'data-explorer',
+ votes: Math.floor(Math.random() * 150),
+ language: 'R'
+ }
+ ];
+
+ return mockNotebooks;
+ } catch (error) {
+ console.error('Error searching notebooks:', error);
+ return [];
+ }
+ }
+
+ async getNotebookInfo(owner: string, notebook: string) {
+ try {
+ // Mock notebook information
+ return {
+ title: notebook,
+ owner: owner,
+ description: `Detailed analysis notebook: ${notebook}`,
+ language: 'Python',
+ lastUpdated: new Date().toISOString(),
+ votes: Math.floor(Math.random() * 100)
+ };
+ } catch (error) {
+ console.error('Error getting notebook info:', error);
+ return null;
+ }
+ }
+}
+
+export default KaggleService;
\ No newline at end of file
diff --git a/project/src/services/OpenAIService.ts b/project/src/services/OpenAIService.ts
new file mode 100644
index 0000000000000000000000000000000000000000..6c73693bb3e499ad67e56af4a86a911353b80c13
--- /dev/null
+++ b/project/src/services/OpenAIService.ts
@@ -0,0 +1,43 @@
+import OpenAI from 'openai';
+
+interface ChatMessage {
+ role: 'system' | 'user' | 'assistant';
+ content: string;
+}
+
+class OpenAIService {
+ private openai: OpenAI;
+ private model: string = 'gpt-4';
+
+ constructor() {
+ const apiKey = import.meta.env.VITE_OPENAI_API_KEY;
+ if (!apiKey) {
+ throw new Error('OpenAI API key is required. Please add your API key to the .env file as VITE_OPENAI_API_KEY.');
+ }
+
+ this.openai = new OpenAI({
+ apiKey,
+ dangerouslyAllowBrowser: true // Note: In production, API calls should be made from a backend
+ });
+ }
+
+ async sendChatCompletion(messages: ChatMessage[]) {
+ try {
+ const completion = await this.openai.chat.completions.create({
+ model: this.model,
+ messages,
+ temperature: 0.7,
+ max_tokens: 1000,
+ frequency_penalty: 0,
+ presence_penalty: 0
+ });
+
+ return completion.choices[0].message;
+ } catch (error) {
+ console.error('Error in chat completion:', error);
+ throw error;
+ }
+ }
+}
+
+export default OpenAIService;
\ No newline at end of file
diff --git a/project/src/services/QuantumSpiderweb.ts b/project/src/services/QuantumSpiderweb.ts
new file mode 100644
index 0000000000000000000000000000000000000000..37d45fce1da24aeb32d30d76697f8ca728f685ac
--- /dev/null
+++ b/project/src/services/QuantumSpiderweb.ts
@@ -0,0 +1,81 @@
+interface SpiderwebConfig {
+ node_count: number;
+}
+
+export class QuantumSpiderweb {
+ private nodes: number;
+ private state: Map;
+ private lastUpdate: number;
+ private entanglementMatrix: number[][];
+
+ constructor(config: SpiderwebConfig) {
+ this.nodes = config.node_count;
+ this.state = new Map();
+ this.lastUpdate = Date.now();
+ this.entanglementMatrix = Array(this.nodes).fill(0).map(() =>
+ Array(this.nodes).fill(0).map(() => Math.random())
+ );
+ }
+
+ activate(data: { source: string; depth: number; trigger: string }) {
+ const currentTime = Date.now();
+ const timeDelta = currentTime - this.lastUpdate;
+ this.lastUpdate = currentTime;
+
+ // Generate quantum states with entanglement effects
+ const nodeStates = Array(this.nodes).fill(0).map((_, i) => {
+ let state = Math.random();
+ // Apply entanglement effects from other nodes
+ for (let j = 0; j < this.nodes; j++) {
+ if (i !== j) {
+ state += this.entanglementMatrix[i][j] * Math.random() * 0.1;
+ }
+ }
+ return Math.min(Math.max(state, 0), 1); // Normalize to [0,1]
+ });
+
+ // Calculate coherence based on time delta
+ const coherence = Math.exp(-timeDelta / 10000); // Decay factor
+
+ const stateKey = `${data.source}_${currentTime}`;
+ this.state.set(stateKey, {
+ ...data,
+ timestamp: new Date().toISOString(),
+ nodeStates,
+ coherence,
+ entanglementStrength: this.calculateEntanglementStrength()
+ });
+
+ // Update entanglement matrix
+ this.updateEntanglement();
+ }
+
+ private calculateEntanglementStrength(): number {
+ return this.entanglementMatrix.reduce((sum, row) =>
+ sum + row.reduce((rowSum, val) => rowSum + val, 0), 0
+ ) / (this.nodes * this.nodes);
+ }
+
+ private updateEntanglement() {
+ // Gradually evolve entanglement patterns
+ this.entanglementMatrix = this.entanglementMatrix.map(row =>
+ row.map(val => {
+ const delta = (Math.random() - 0.5) * 0.1;
+ return Math.min(Math.max(val + delta, 0), 1);
+ })
+ );
+ }
+
+ getState(): Map {
+ return this.state;
+ }
+
+ getLatestState(): any {
+ const states = Array.from(this.state.values());
+ return states[states.length - 1] || null;
+ }
+
+ getEntanglementMatrix(): number[][] {
+ return this.entanglementMatrix;
+ }
+}
\ No newline at end of file
diff --git a/project/src/vite-env.d.ts b/project/src/vite-env.d.ts
new file mode 100644
index 0000000000000000000000000000000000000000..11f02fe2a0061d6e6e1f271b21da95423b448b32
--- /dev/null
+++ b/project/src/vite-env.d.ts
@@ -0,0 +1 @@
+///
diff --git a/project/supabase/migrations/20250523100814_raspy_torch.sql b/project/supabase/migrations/20250523100814_raspy_torch.sql
new file mode 100644
index 0000000000000000000000000000000000000000..212216e9ee1a897b0529ee5683189b569d2bafca
--- /dev/null
+++ b/project/supabase/migrations/20250523100814_raspy_torch.sql
@@ -0,0 +1,43 @@
+/*
+ # Create codette_files table for file management
+
+ 1. New Tables
+ - `codette_files`
+ - `id` (uuid, primary key)
+ - `filename` (text)
+ - `storage_path` (text)
+ - `file_type` (text)
+ - `uploaded_at` (timestamptz)
+ - `created_at` (timestamptz)
+
+ 2. Security
+ - Enable RLS on `codette_files` table
+ - Add policies for:
+ - Authenticated users can read all files
+ - Authenticated users can insert their own files
+*/
+
+CREATE TABLE IF NOT EXISTS public.codette_files (
+ id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
+ filename text NOT NULL,
+ storage_path text NOT NULL,
+ file_type text,
+ uploaded_at timestamptz DEFAULT now(),
+ created_at timestamptz DEFAULT now()
+);
+
+-- Enable Row Level Security
+ALTER TABLE public.codette_files ENABLE ROW LEVEL SECURITY;
+
+-- Create policies
+CREATE POLICY "Allow authenticated users to read files"
+ ON public.codette_files
+ FOR SELECT
+ TO authenticated
+ USING (true);
+
+CREATE POLICY "Allow authenticated users to insert files"
+ ON public.codette_files
+ FOR INSERT
+ TO authenticated
+ WITH CHECK (true);
\ No newline at end of file
diff --git a/project/supabase/migrations/20250523120906_wild_torch.sql b/project/supabase/migrations/20250523120906_wild_torch.sql
new file mode 100644
index 0000000000000000000000000000000000000000..c39d360ab2fa306300886127ccb4550d9d3c5f59
--- /dev/null
+++ b/project/supabase/migrations/20250523120906_wild_torch.sql
@@ -0,0 +1,36 @@
+/*
+ # Create storage bucket for Codette files
+
+ 1. New Storage Bucket
+ - Creates 'codette-files' bucket for storing uploaded files
+ 2. Security
+ - Enable public access for authenticated users
+ - Add policies for read and write operations
+*/
+
+-- Create the storage bucket
+INSERT INTO storage.buckets (id, name)
+VALUES ('codette-files', 'codette-files')
+ON CONFLICT (id) DO NOTHING;
+
+-- Set up RLS policies for the bucket
+CREATE POLICY "Allow authenticated users to read files"
+ON storage.objects FOR SELECT
+TO authenticated
+USING (bucket_id = 'codette-files');
+
+CREATE POLICY "Allow authenticated users to upload files"
+ON storage.objects FOR INSERT
+TO authenticated
+WITH CHECK (bucket_id = 'codette-files');
+
+CREATE POLICY "Allow authenticated users to update files"
+ON storage.objects FOR UPDATE
+TO authenticated
+USING (bucket_id = 'codette-files')
+WITH CHECK (bucket_id = 'codette-files');
+
+CREATE POLICY "Allow authenticated users to delete files"
+ON storage.objects FOR DELETE
+TO authenticated
+USING (bucket_id = 'codette-files');
\ No newline at end of file
diff --git a/project/supabase/migrations/20250523121149_rough_jungle.sql b/project/supabase/migrations/20250523121149_rough_jungle.sql
new file mode 100644
index 0000000000000000000000000000000000000000..6d9dc90b0be62d0802b1febcf3e57a104301f200
--- /dev/null
+++ b/project/supabase/migrations/20250523121149_rough_jungle.sql
@@ -0,0 +1,66 @@
+/*
+ # Create storage bucket and policies
+
+ 1. Changes
+ - Create codette-files storage bucket if it doesn't exist
+ - Add RLS policies for authenticated users to:
+ - Read files
+ - Upload files
+ - Update files
+ - Delete files
+ - Add safety checks to prevent policy conflicts
+*/
+
+-- Create the storage bucket
+INSERT INTO storage.buckets (id, name)
+VALUES ('codette-files', 'codette-files')
+ON CONFLICT (id) DO NOTHING;
+
+-- Set up RLS policies for the bucket with existence checks
+DO $$
+BEGIN
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_policies
+ WHERE tablename = 'objects'
+ AND policyname = 'Allow authenticated users to read files'
+ ) THEN
+ CREATE POLICY "Allow authenticated users to read files"
+ ON storage.objects FOR SELECT
+ TO authenticated
+ USING (bucket_id = 'codette-files');
+ END IF;
+
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_policies
+ WHERE tablename = 'objects'
+ AND policyname = 'Allow authenticated users to upload files'
+ ) THEN
+ CREATE POLICY "Allow authenticated users to upload files"
+ ON storage.objects FOR INSERT
+ TO authenticated
+ WITH CHECK (bucket_id = 'codette-files');
+ END IF;
+
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_policies
+ WHERE tablename = 'objects'
+ AND policyname = 'Allow authenticated users to update files'
+ ) THEN
+ CREATE POLICY "Allow authenticated users to update files"
+ ON storage.objects FOR UPDATE
+ TO authenticated
+ USING (bucket_id = 'codette-files')
+ WITH CHECK (bucket_id = 'codette-files');
+ END IF;
+
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_policies
+ WHERE tablename = 'objects'
+ AND policyname = 'Allow authenticated users to delete files'
+ ) THEN
+ CREATE POLICY "Allow authenticated users to delete files"
+ ON storage.objects FOR DELETE
+ TO authenticated
+ USING (bucket_id = 'codette-files');
+ END IF;
+END $$;
\ No newline at end of file
diff --git a/project/supabase/migrations/20250523125621_rapid_flower.sql b/project/supabase/migrations/20250523125621_rapid_flower.sql
new file mode 100644
index 0000000000000000000000000000000000000000..48c269f963b46d9c696199c4ec14547d79e411c0
--- /dev/null
+++ b/project/supabase/migrations/20250523125621_rapid_flower.sql
@@ -0,0 +1,61 @@
+/*
+ # Update storage policies with existence checks
+
+ 1. Changes
+ - Add existence checks before creating each policy
+ - Only create policies that don't already exist
+ - Maintain all required policies for the storage bucket
+
+ 2. Security
+ - Maintain existing RLS policies
+ - Ensure proper access control for authenticated users
+ - Preserve admin-only upload restrictions
+*/
+
+-- Wrap everything in a transaction
+BEGIN;
+
+-- Create policies with existence checks
+DO $$
+BEGIN
+ -- Check and create read policy
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_policies
+ WHERE tablename = 'objects'
+ AND schemaname = 'storage'
+ AND policyname = 'Allow authenticated users to read files'
+ ) THEN
+ CREATE POLICY "Allow authenticated users to read files"
+ ON storage.objects FOR SELECT
+ TO authenticated
+ USING (bucket_id = 'codette-files');
+ END IF;
+
+ -- Check and create upload policy for admin users
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_policies
+ WHERE tablename = 'objects'
+ AND schemaname = 'storage'
+ AND policyname = 'Allow admin users to upload files'
+ ) THEN
+ CREATE POLICY "Allow admin users to upload files"
+ ON storage.objects FOR INSERT
+ TO authenticated
+ WITH CHECK (bucket_id = 'codette-files' AND auth.jwt() ->> 'role' = 'admin');
+ END IF;
+
+ -- Check and create policy for admin file insertion
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_policies
+ WHERE tablename = 'codette_files'
+ AND schemaname = 'public'
+ AND policyname = 'Allow admin users to insert files'
+ ) THEN
+ CREATE POLICY "Allow admin users to insert files"
+ ON public.codette_files FOR INSERT
+ TO authenticated
+ WITH CHECK (auth.jwt() ->> 'role' = 'admin');
+ END IF;
+END $$;
+
+COMMIT;
\ No newline at end of file
diff --git a/project/supabase/migrations/20250523141836_heavy_butterfly.sql b/project/supabase/migrations/20250523141836_heavy_butterfly.sql
new file mode 100644
index 0000000000000000000000000000000000000000..dd2b91016504835dfc236564cfe4e9f7e1c8a9f6
--- /dev/null
+++ b/project/supabase/migrations/20250523141836_heavy_butterfly.sql
@@ -0,0 +1,71 @@
+/*
+ # Storage and RLS Policy Setup
+
+ 1. Changes
+ - Create storage bucket policies for file access
+ - Create table policies for file management
+ - Enable RLS on codette_files table
+
+ 2. Security
+ - Authenticated users can read files
+ - Admin users can upload files
+ - RLS enabled on codette_files table
+*/
+
+-- Create storage bucket if it doesn't exist
+DO $$
+BEGIN
+ INSERT INTO storage.buckets (id, name)
+ VALUES ('codette-files', 'codette-files')
+ ON CONFLICT (id) DO NOTHING;
+END $$;
+
+-- Storage Policies
+DO $$
+BEGIN
+ -- Drop existing policies to avoid conflicts
+ DROP POLICY IF EXISTS "Allow authenticated users to read files" ON storage.objects;
+ DROP POLICY IF EXISTS "Allow admin users to upload files" ON storage.objects;
+
+ -- Create new storage policies
+ CREATE POLICY "Allow authenticated users to read files"
+ ON storage.objects FOR SELECT
+ TO authenticated
+ USING (bucket_id = 'codette-files');
+
+ CREATE POLICY "Allow admin users to upload files"
+ ON storage.objects FOR INSERT
+ TO authenticated
+ WITH CHECK (
+ bucket_id = 'codette-files'
+ AND (auth.jwt() ->> 'role' = 'admin')
+ );
+END $$;
+
+-- File Management Table Policies
+DO $$
+BEGIN
+ -- Drop existing policies to avoid conflicts
+ DROP POLICY IF EXISTS "Allow authenticated users to read files" ON public.codette_files;
+ DROP POLICY IF EXISTS "Allow admin users to insert files" ON public.codette_files;
+ DROP POLICY IF EXISTS "Allow authenticated users to insert files" ON public.codette_files;
+
+ -- Create new table policies
+ CREATE POLICY "Allow authenticated users to read files"
+ ON public.codette_files FOR SELECT
+ TO authenticated
+ USING (true);
+
+ CREATE POLICY "Allow admin users to insert files"
+ ON public.codette_files FOR INSERT
+ TO authenticated
+ WITH CHECK (auth.jwt() ->> 'role' = 'admin');
+
+ CREATE POLICY "Allow authenticated users to insert files"
+ ON public.codette_files FOR INSERT
+ TO authenticated
+ WITH CHECK (true);
+END $$;
+
+-- Enable RLS
+ALTER TABLE public.codette_files ENABLE ROW LEVEL SECURITY;
\ No newline at end of file
diff --git a/project/supabase/migrations/20250523175402_white_torch.sql b/project/supabase/migrations/20250523175402_white_torch.sql
new file mode 100644
index 0000000000000000000000000000000000000000..059666866439606a97a8167f43f836a89af60a2c
--- /dev/null
+++ b/project/supabase/migrations/20250523175402_white_torch.sql
@@ -0,0 +1,81 @@
+/*
+ # Storage and File Access Policies
+
+ 1. New Policies
+ - Enable RLS on codette_files table
+ - Create policies for file access and management
+
+ 2. Security
+ - Allow authenticated users to read files
+ - Allow admin users to upload files
+ - Allow authenticated users to insert file records
+*/
+
+-- Enable RLS on the codette_files table if not already enabled
+DO $$
+BEGIN
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_tables
+ WHERE tablename = 'codette_files'
+ AND rowsecurity = true
+ ) THEN
+ ALTER TABLE public.codette_files ENABLE ROW LEVEL SECURITY;
+ END IF;
+END $$;
+
+-- Create storage bucket if it doesn't exist
+DO $$
+BEGIN
+ IF NOT EXISTS (
+ SELECT 1 FROM storage.buckets WHERE name = 'codette-files'
+ ) THEN
+ INSERT INTO storage.buckets (id, name)
+ VALUES ('codette-files', 'codette-files');
+ END IF;
+END $$;
+
+-- Create policies for the codette_files table
+DO $$
+BEGIN
+ -- Check if the read policy exists
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_policies
+ WHERE policyname = 'Allow authenticated users to read files'
+ AND tablename = 'codette_files'
+ ) THEN
+ CREATE POLICY "Allow authenticated users to read files"
+ ON public.codette_files FOR SELECT
+ TO authenticated
+ USING (true);
+ END IF;
+
+ -- Check if the admin insert policy exists
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_policies
+ WHERE policyname = 'Allow admin users to insert files'
+ AND tablename = 'codette_files'
+ ) THEN
+ CREATE POLICY "Allow admin users to insert files"
+ ON public.codette_files FOR INSERT
+ TO authenticated
+ WITH CHECK (auth.jwt() ->> 'role' = 'admin');
+ END IF;
+
+ -- Check if the authenticated insert policy exists
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_policies
+ WHERE policyname = 'Allow authenticated users to insert files'
+ AND tablename = 'codette_files'
+ ) THEN
+ CREATE POLICY "Allow authenticated users to insert files"
+ ON public.codette_files FOR INSERT
+ TO authenticated
+ WITH CHECK (true);
+ END IF;
+END $$;
+
+-- Note: Storage policies for the storage.objects table need to be created through the Supabase dashboard
+-- or using the Supabase CLI, as they require special permissions that aren't available in migrations.
+-- Please create the following policies manually:
+-- 1. "Allow authenticated users to read files" - For SELECT operations on storage.objects where bucket_id = 'codette-files'
+-- 2. "Allow admin users to upload files" - For INSERT operations on storage.objects where bucket_id = 'codette-files' AND auth.jwt() ->> 'role' = 'admin'
\ No newline at end of file
diff --git a/project/supabase/migrations/20250523182801_long_field.sql b/project/supabase/migrations/20250523182801_long_field.sql
new file mode 100644
index 0000000000000000000000000000000000000000..cf867d4e7b1ad3149e1db32db60c926d478bfba6
--- /dev/null
+++ b/project/supabase/migrations/20250523182801_long_field.sql
@@ -0,0 +1,82 @@
+/*
+ # Storage and File Management Policies
+
+ 1. New Tables
+ - No new tables created
+ 2. Security
+ - Enable RLS on codette_files table
+ - Add policies for authenticated users to read files
+ - Add policies for authenticated users to insert files
+ - Add special policy for admin users to insert files
+ 3. Changes
+ - Ensures storage bucket exists for file storage
+*/
+
+-- Enable RLS on the codette_files table if not already enabled
+DO $$
+BEGIN
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_tables
+ WHERE tablename = 'codette_files'
+ AND rowsecurity = true
+ ) THEN
+ ALTER TABLE public.codette_files ENABLE ROW LEVEL SECURITY;
+ END IF;
+END $$;
+
+-- Create storage bucket if it doesn't exist
+DO $$
+BEGIN
+ IF NOT EXISTS (
+ SELECT 1 FROM storage.buckets WHERE name = 'codette-files'
+ ) THEN
+ INSERT INTO storage.buckets (id, name)
+ VALUES ('codette-files', 'codette-files');
+ END IF;
+END $$;
+
+-- Create policies for the codette_files table
+DO $$
+BEGIN
+ -- Check if the read policy exists
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_policies
+ WHERE policyname = 'Allow authenticated users to read files'
+ AND tablename = 'codette_files'
+ ) THEN
+ CREATE POLICY "Allow authenticated users to read files"
+ ON public.codette_files FOR SELECT
+ TO authenticated
+ USING (true);
+ END IF;
+
+ -- Check if the admin insert policy exists
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_policies
+ WHERE policyname = 'Allow admin users to insert files'
+ AND tablename = 'codette_files'
+ ) THEN
+ CREATE POLICY "Allow admin users to insert files"
+ ON public.codette_files FOR INSERT
+ TO authenticated
+ WITH CHECK (auth.jwt() ->> 'role' = 'admin');
+ END IF;
+
+ -- Check if the authenticated insert policy exists
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_policies
+ WHERE policyname = 'Allow authenticated users to insert files'
+ AND tablename = 'codette_files'
+ ) THEN
+ CREATE POLICY "Allow authenticated users to insert files"
+ ON public.codette_files FOR INSERT
+ TO authenticated
+ WITH CHECK (true);
+ END IF;
+END $$;
+
+-- Note: Storage policies for the storage.objects table need to be created through the Supabase dashboard
+-- or using the Supabase CLI, as they require special permissions that aren't available in migrations.
+-- Please create the following policies manually:
+-- 1. "Allow authenticated users to read files" - For SELECT operations on storage.objects where bucket_id = 'codette-files'
+-- 2. "Allow admin users to upload files" - For INSERT operations on storage.objects where bucket_id = 'codette-files' AND auth.jwt() ->> 'role' = 'admin'
\ No newline at end of file
diff --git a/project/supabase/migrations/20250523183206_odd_moon.sql b/project/supabase/migrations/20250523183206_odd_moon.sql
new file mode 100644
index 0000000000000000000000000000000000000000..cad2775930f62ce763d464e41b8bc9d12b3d47d1
--- /dev/null
+++ b/project/supabase/migrations/20250523183206_odd_moon.sql
@@ -0,0 +1,86 @@
+/*
+ # Storage and File Management Setup
+
+ 1. New Storage
+ - Create 'codette-files' storage bucket if it doesn't exist
+
+ 2. Security
+ - Enable Row Level Security on codette_files table
+ - Create policies for authenticated users to read files
+ - Create policies for authenticated users to insert files
+ - Create special policy for admin users to insert files
+*/
+
+-- Enable RLS on the codette_files table if not already enabled
+DO $$
+BEGIN
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_tables
+ WHERE tablename = 'codette_files'
+ AND rowsecurity = true
+ ) THEN
+ ALTER TABLE public.codette_files ENABLE ROW LEVEL SECURITY;
+ END IF;
+END $$;
+
+-- Create storage bucket if it doesn't exist
+DO $$
+BEGIN
+ IF NOT EXISTS (
+ SELECT 1 FROM storage.buckets WHERE name = 'codette-files'
+ ) THEN
+ INSERT INTO storage.buckets (id, name)
+ VALUES ('codette-files', 'codette-files');
+ END IF;
+END $$;
+
+-- Create policies for the codette_files table
+DO $$
+BEGIN
+ -- Check if the read policy exists
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_policies
+ WHERE policyname = 'Allow authenticated users to read files'
+ AND tablename = 'codette_files'
+ ) THEN
+ CREATE POLICY "Allow authenticated users to read files"
+ ON public.codette_files FOR SELECT
+ TO authenticated
+ USING (true);
+ END IF;
+
+ -- Check if the admin insert policy exists
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_policies
+ WHERE policyname = 'Allow admin users to insert files'
+ AND tablename = 'codette_files'
+ ) THEN
+ CREATE POLICY "Allow admin users to insert files"
+ ON public.codette_files FOR INSERT
+ TO authenticated
+ WITH CHECK ((auth.jwt() ->> 'role')::text = 'admin');
+ END IF;
+
+ -- Check if the authenticated insert policy exists
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_policies
+ WHERE policyname = 'Allow authenticated users to insert files'
+ AND tablename = 'codette_files'
+ ) THEN
+ CREATE POLICY "Allow authenticated users to insert files"
+ ON public.codette_files FOR INSERT
+ TO authenticated
+ WITH CHECK (true);
+ END IF;
+END $$;
+
+-- Note: For storage.objects policies, you'll need to create them through the Supabase dashboard
+-- as migrations don't have sufficient permissions to create these policies directly.
+-- Create these policies manually:
+-- 1. Policy name: "Allow authenticated users to read files"
+-- - For: SELECT operations
+-- - Using expression: bucket_id = 'codette-files'
+--
+-- 2. Policy name: "Allow admin users to upload files"
+-- - For: INSERT operations
+-- - Using expression: bucket_id = 'codette-files' AND (auth.jwt() ->> 'role')::text = 'admin'
\ No newline at end of file
diff --git a/project/supabase/migrations/20250523213744_long_sun.sql b/project/supabase/migrations/20250523213744_long_sun.sql
new file mode 100644
index 0000000000000000000000000000000000000000..3068c22f1aa572059bfe974b4f844b9ae88284e9
--- /dev/null
+++ b/project/supabase/migrations/20250523213744_long_sun.sql
@@ -0,0 +1,90 @@
+/*
+ # Storage and File Management Setup
+
+ 1. New Storage Configuration
+ - Creates 'codette-files' storage bucket if it doesn't exist
+ - Sets up proper file management structure
+
+ 2. Table Policies
+ - Enables RLS on codette_files table
+ - Creates read policy for authenticated users
+ - Creates insert policies for both admin and authenticated users
+ - Ensures proper access control and security
+
+ Note: Storage object policies must be created manually through Supabase dashboard
+*/
+
+-- Enable RLS on the codette_files table if not already enabled
+DO $$
+BEGIN
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_tables
+ WHERE tablename = 'codette_files'
+ AND rowsecurity = true
+ ) THEN
+ ALTER TABLE public.codette_files ENABLE ROW LEVEL SECURITY;
+ END IF;
+END $$;
+
+-- Create storage bucket if it doesn't exist
+DO $$
+BEGIN
+ IF NOT EXISTS (
+ SELECT 1 FROM storage.buckets WHERE name = 'codette-files'
+ ) THEN
+ INSERT INTO storage.buckets (id, name, public)
+ VALUES ('codette-files', 'codette-files', false);
+ END IF;
+END $$;
+
+-- Create policies for the codette_files table
+DO $$
+BEGIN
+ -- Create read policy if it doesn't exist
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_policies
+ WHERE policyname = 'Allow authenticated users to read files'
+ AND tablename = 'codette_files'
+ ) THEN
+ CREATE POLICY "Allow authenticated users to read files"
+ ON public.codette_files FOR SELECT
+ TO authenticated
+ USING (true);
+ END IF;
+
+ -- Create admin insert policy if it doesn't exist
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_policies
+ WHERE policyname = 'Allow admin users to insert files'
+ AND tablename = 'codette_files'
+ ) THEN
+ CREATE POLICY "Allow admin users to insert files"
+ ON public.codette_files FOR INSERT
+ TO authenticated
+ WITH CHECK ((auth.jwt() ->> 'role')::text = 'admin');
+ END IF;
+
+ -- Create authenticated insert policy if it doesn't exist
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_policies
+ WHERE policyname = 'Allow authenticated users to insert files'
+ AND tablename = 'codette_files'
+ ) THEN
+ CREATE POLICY "Allow authenticated users to insert files"
+ ON public.codette_files FOR INSERT
+ TO authenticated
+ WITH CHECK (true);
+ END IF;
+END $$;
+
+-- Important: Storage object policies must be created manually through the Supabase dashboard
+-- Create the following policies:
+-- 1. "Allow authenticated users to read files"
+-- - Operation: SELECT
+-- - Target roles: authenticated
+-- - Using expression: bucket_id = 'codette-files'
+--
+-- 2. "Allow admin users to upload files"
+-- - Operation: INSERT
+-- - Target roles: authenticated
+-- - Using expression: bucket_id = 'codette-files' AND (auth.jwt() ->> 'role')::text = 'admin'
\ No newline at end of file
diff --git a/project/supabase/migrations/20250523222316_square_gate.sql b/project/supabase/migrations/20250523222316_square_gate.sql
new file mode 100644
index 0000000000000000000000000000000000000000..c79452834a03445bfa1a6cb02b43ed9b836dbf06
--- /dev/null
+++ b/project/supabase/migrations/20250523222316_square_gate.sql
@@ -0,0 +1,44 @@
+/*
+ # Fix RLS policies for codette_files table
+
+ 1. Changes
+ - Drop existing RLS policies that might be conflicting
+ - Add new RLS policies for admin users
+ - Allow admin users to insert files
+ - Allow admin users to read files
+ - Allow admin users to update files
+ - Allow admin users to delete files
+ - Add RLS policies for regular authenticated users
+ - Allow reading files only
+
+ 2. Security
+ - Ensures only admin users can upload/modify files
+ - All authenticated users can read files
+ - Proper RLS enforcement for file management
+*/
+
+-- Drop existing policies to avoid conflicts
+DROP POLICY IF EXISTS "Allow admin users to insert files" ON codette_files;
+DROP POLICY IF EXISTS "Allow authenticated users to insert files" ON codette_files;
+DROP POLICY IF EXISTS "Allow authenticated users to read files" ON codette_files;
+
+-- Create new policies with proper checks
+CREATE POLICY "Allow admin users to manage files"
+ON codette_files
+FOR ALL
+TO authenticated
+USING (
+ (auth.jwt() ->> 'role')::text = 'admin'
+)
+WITH CHECK (
+ (auth.jwt() ->> 'role')::text = 'admin'
+);
+
+CREATE POLICY "Allow authenticated users to read files"
+ON codette_files
+FOR SELECT
+TO authenticated
+USING (true);
+
+-- Enable RLS if not already enabled
+ALTER TABLE codette_files ENABLE ROW LEVEL SECURITY;
\ No newline at end of file
diff --git a/project/supabase/migrations/20250523222514_muddy_desert.sql b/project/supabase/migrations/20250523222514_muddy_desert.sql
new file mode 100644
index 0000000000000000000000000000000000000000..d510845bfc56d4d0dba86c5a1bf2127dd46b572e
--- /dev/null
+++ b/project/supabase/migrations/20250523222514_muddy_desert.sql
@@ -0,0 +1,47 @@
+/*
+ # Storage bucket and RLS policies
+
+ 1. Changes
+ - Create storage bucket for Codette files
+ - Set up RLS policies for the bucket
+
+ 2. Security
+ - Enable RLS policies for storage bucket
+ - Allow authenticated users to read files
+ - Allow authenticated users to upload files
+ - Allow authenticated users to update files
+ - Allow authenticated users to delete files
+*/
+
+-- Create the storage bucket
+INSERT INTO storage.buckets (id, name)
+VALUES ('codette-files', 'codette-files')
+ON CONFLICT (id) DO NOTHING;
+
+-- Drop existing policies if they exist
+DROP POLICY IF EXISTS "Allow authenticated users to read files" ON storage.objects;
+DROP POLICY IF EXISTS "Allow authenticated users to upload files" ON storage.objects;
+DROP POLICY IF EXISTS "Allow authenticated users to update files" ON storage.objects;
+DROP POLICY IF EXISTS "Allow authenticated users to delete files" ON storage.objects;
+
+-- Set up RLS policies for the bucket
+CREATE POLICY "Allow authenticated users to read files"
+ON storage.objects FOR SELECT
+TO authenticated
+USING (bucket_id = 'codette-files');
+
+CREATE POLICY "Allow authenticated users to upload files"
+ON storage.objects FOR INSERT
+TO authenticated
+WITH CHECK (bucket_id = 'codette-files');
+
+CREATE POLICY "Allow authenticated users to update files"
+ON storage.objects FOR UPDATE
+TO authenticated
+USING (bucket_id = 'codette-files')
+WITH CHECK (bucket_id = 'codette-files');
+
+CREATE POLICY "Allow authenticated users to delete files"
+ON storage.objects FOR DELETE
+TO authenticated
+USING (bucket_id = 'codette-files');
\ No newline at end of file
diff --git a/project/supabase/migrations/20250523222518_bronze_dew.sql b/project/supabase/migrations/20250523222518_bronze_dew.sql
new file mode 100644
index 0000000000000000000000000000000000000000..b0634910fb2770c8959abbaa1dd65b8498da9efe
--- /dev/null
+++ b/project/supabase/migrations/20250523222518_bronze_dew.sql
@@ -0,0 +1,39 @@
+/*
+ # Update RLS policies for file management
+
+ 1. Changes
+ - Update storage.objects policies
+ - Update codette_files table policies
+
+ 2. Security
+ - Allow authenticated users to read files
+ - Allow admin users to upload files
+ - Allow admin users to insert file records
+*/
+
+BEGIN;
+
+-- Drop existing policies if they exist
+DROP POLICY IF EXISTS "Allow authenticated users to read files" ON storage.objects;
+DROP POLICY IF EXISTS "Allow admin users to upload files" ON storage.objects;
+DROP POLICY IF EXISTS "Allow admin users to insert files" ON public.codette_files;
+
+-- Create policy to allow authenticated users to read any file
+CREATE POLICY "Allow authenticated users to read files"
+ON storage.objects FOR SELECT
+TO authenticated
+USING (bucket_id = 'codette-files');
+
+-- Create policy to allow only admin users to upload files
+CREATE POLICY "Allow admin users to upload files"
+ON storage.objects FOR INSERT
+TO authenticated
+WITH CHECK (bucket_id = 'codette-files' AND auth.jwt() ->> 'role' = 'admin');
+
+-- Update the codette_files table policies
+CREATE POLICY "Allow admin users to insert files"
+ON public.codette_files FOR INSERT
+TO authenticated
+WITH CHECK (auth.jwt() ->> 'role' = 'admin');
+
+COMMIT;
\ No newline at end of file
diff --git a/project/supabase/migrations/20250523222523_orange_bread.sql b/project/supabase/migrations/20250523222523_orange_bread.sql
new file mode 100644
index 0000000000000000000000000000000000000000..99252e7965994c57a4273ea9c7b67f2a2866fcdb
--- /dev/null
+++ b/project/supabase/migrations/20250523222523_orange_bread.sql
@@ -0,0 +1,53 @@
+/*
+ # Update RLS policies for file management
+
+ 1. Changes
+ - Update storage.objects policies
+ - Update codette_files table policies
+ - Enable RLS on codette_files table
+
+ 2. Security
+ - Allow authenticated users to read files
+ - Allow admin users to upload files
+ - Allow authenticated users to insert files
+*/
+
+-- Drop existing policies if they exist
+DROP POLICY IF EXISTS "Allow authenticated users to read files" ON storage.objects;
+DROP POLICY IF EXISTS "Allow admin users to upload files" ON storage.objects;
+DROP POLICY IF EXISTS "Allow authenticated users to read files" ON public.codette_files;
+DROP POLICY IF EXISTS "Allow admin users to insert files" ON public.codette_files;
+DROP POLICY IF EXISTS "Allow authenticated users to insert files" ON public.codette_files;
+
+-- Storage Policies
+CREATE POLICY "Allow authenticated users to read files"
+ON storage.objects FOR SELECT
+TO authenticated
+USING (bucket_id = 'codette-files');
+
+CREATE POLICY "Allow admin users to upload files"
+ON storage.objects FOR INSERT
+TO authenticated
+WITH CHECK (
+ bucket_id = 'codette-files'
+ AND (auth.jwt() ->> 'role' = 'admin')
+);
+
+-- File Management Policies
+CREATE POLICY "Allow authenticated users to read files"
+ON public.codette_files FOR SELECT
+TO authenticated
+USING (true);
+
+CREATE POLICY "Allow admin users to insert files"
+ON public.codette_files FOR INSERT
+TO authenticated
+WITH CHECK (auth.jwt() ->> 'role' = 'admin');
+
+CREATE POLICY "Allow authenticated users to insert files"
+ON public.codette_files FOR INSERT
+TO authenticated
+WITH CHECK (true);
+
+-- Enable RLS
+ALTER TABLE public.codette_files ENABLE ROW LEVEL SECURITY;
\ No newline at end of file
diff --git a/project/supabase/migrations/20250524062844_tender_thunder.sql b/project/supabase/migrations/20250524062844_tender_thunder.sql
new file mode 100644
index 0000000000000000000000000000000000000000..77c77735aad354b7a9b47277c617374f1345a296
--- /dev/null
+++ b/project/supabase/migrations/20250524062844_tender_thunder.sql
@@ -0,0 +1,62 @@
+/*
+ # Update codette_files table and policies
+
+ 1. New Tables
+ - Ensures codette_files table exists with proper structure
+ - id (uuid, primary key)
+ - filename (text)
+ - storage_path (text)
+ - file_type (text, nullable)
+ - uploaded_at (timestamptz)
+ - created_at (timestamptz)
+
+ 2. Security
+ - Enables RLS if not already enabled
+ - Adds admin-specific policies for file management
+*/
+
+-- Create table if it doesn't exist
+CREATE TABLE IF NOT EXISTS public.codette_files (
+ id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
+ filename text NOT NULL,
+ storage_path text NOT NULL,
+ file_type text,
+ uploaded_at timestamptz DEFAULT now(),
+ created_at timestamptz DEFAULT now()
+);
+
+-- Enable Row Level Security (idempotent operation)
+ALTER TABLE public.codette_files ENABLE ROW LEVEL SECURITY;
+
+-- Drop existing policies to avoid conflicts
+DROP POLICY IF EXISTS "Allow authenticated users to read files" ON public.codette_files;
+DROP POLICY IF EXISTS "Allow authenticated users to insert files" ON public.codette_files;
+DROP POLICY IF EXISTS "Allow admin users to manage files" ON public.codette_files;
+DROP POLICY IF EXISTS "Allow admin users to insert files" ON public.codette_files;
+
+-- Create new policies
+CREATE POLICY "Allow authenticated users to read files"
+ ON public.codette_files
+ FOR SELECT
+ TO authenticated
+ USING (true);
+
+CREATE POLICY "Allow authenticated users to insert files"
+ ON public.codette_files
+ FOR INSERT
+ TO authenticated
+ WITH CHECK (true);
+
+-- Add admin-specific policies
+CREATE POLICY "Allow admin users to manage files"
+ ON public.codette_files
+ FOR ALL
+ TO authenticated
+ USING ((auth.jwt() ->> 'role'::text) = 'admin'::text)
+ WITH CHECK ((auth.jwt() ->> 'role'::text) = 'admin'::text);
+
+CREATE POLICY "Allow admin users to insert files"
+ ON public.codette_files
+ FOR INSERT
+ TO authenticated
+ WITH CHECK ((auth.jwt() ->> 'role'::text) = 'admin'::text);
\ No newline at end of file
diff --git a/project/supabase/migrations/20250524213845_mellow_recipe.sql b/project/supabase/migrations/20250524213845_mellow_recipe.sql
new file mode 100644
index 0000000000000000000000000000000000000000..2c4c93aa009cff09bce932d9c5787d87304e19b2
--- /dev/null
+++ b/project/supabase/migrations/20250524213845_mellow_recipe.sql
@@ -0,0 +1,44 @@
+/*
+ # Add user roles table and admin role policy
+
+ 1. New Tables
+ - `user_roles`
+ - `id` (uuid, primary key)
+ - `user_id` (uuid, references auth.users)
+ - `role` (text)
+ - `created_at` (timestamptz)
+
+ 2. Security
+ - Enable RLS on `user_roles` table
+ - Add policies for admin role management
+*/
+
+-- Create user_roles table
+CREATE TABLE IF NOT EXISTS user_roles (
+ id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
+ user_id uuid REFERENCES auth.users NOT NULL,
+ role text NOT NULL,
+ created_at timestamptz DEFAULT now()
+);
+
+-- Enable RLS
+ALTER TABLE user_roles ENABLE ROW LEVEL SECURITY;
+
+-- Policies for user_roles table
+CREATE POLICY "Users can read their own role"
+ ON user_roles
+ FOR SELECT
+ TO authenticated
+ USING (auth.uid() = user_id);
+
+CREATE POLICY "Only admins can manage roles"
+ ON user_roles
+ FOR ALL
+ TO authenticated
+ USING (
+ EXISTS (
+ SELECT 1 FROM user_roles
+ WHERE user_id = auth.uid()
+ AND role = 'admin'
+ )
+ );
\ No newline at end of file
diff --git a/project/supabase/migrations/20250524214450_green_poetry.sql b/project/supabase/migrations/20250524214450_green_poetry.sql
new file mode 100644
index 0000000000000000000000000000000000000000..ecbef13d284a0c1e6c2ef855a84ff290d068b269
--- /dev/null
+++ b/project/supabase/migrations/20250524214450_green_poetry.sql
@@ -0,0 +1,81 @@
+/*
+ # Authentication and User Roles Setup
+
+ 1. New Tables
+ - `user_roles`
+ - `id` (uuid, primary key)
+ - `user_id` (uuid, references auth.users)
+ - `role` (text)
+ - `created_at` (timestamp with time zone)
+
+ 2. Security
+ - Enable RLS on `user_roles` table
+ - Add policies for authenticated users to read their own role
+ - Add policy for admin users to manage roles
+*/
+
+-- Create user_roles table
+CREATE TABLE IF NOT EXISTS public.user_roles (
+ id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
+ user_id uuid REFERENCES auth.users NOT NULL,
+ role text NOT NULL,
+ created_at timestamptz DEFAULT now()
+);
+
+-- Enable RLS
+ALTER TABLE public.user_roles ENABLE ROW LEVEL SECURITY;
+
+-- Policies
+CREATE POLICY "Users can read own role"
+ ON public.user_roles
+ FOR SELECT
+ TO authenticated
+ USING (auth.uid() = user_id);
+
+CREATE POLICY "Admin users can manage roles"
+ ON public.user_roles
+ FOR ALL
+ TO authenticated
+ USING ((SELECT role FROM public.user_roles WHERE user_id = auth.uid()) = 'admin')
+ WITH CHECK ((SELECT role FROM public.user_roles WHERE user_id = auth.uid()) = 'admin');
+
+-- Create admin user if not exists
+DO $$
+BEGIN
+ IF NOT EXISTS (
+ SELECT 1 FROM auth.users WHERE email = 'admin@codette.ai'
+ ) THEN
+ INSERT INTO auth.users (
+ instance_id,
+ id,
+ aud,
+ role,
+ email,
+ encrypted_password,
+ email_confirmed_at,
+ created_at,
+ updated_at,
+ confirmation_token,
+ recovery_token
+ )
+ VALUES (
+ '00000000-0000-0000-0000-000000000000',
+ gen_random_uuid(),
+ 'authenticated',
+ 'authenticated',
+ 'admin@codette.ai',
+ crypt('admin123', gen_salt('bf')), -- Default password: admin123
+ now(),
+ now(),
+ now(),
+ encode(gen_random_bytes(32), 'hex'),
+ encode(gen_random_bytes(32), 'hex')
+ );
+
+ -- Add admin role
+ INSERT INTO public.user_roles (user_id, role)
+ SELECT id, 'admin'
+ FROM auth.users
+ WHERE email = 'admin@codette.ai';
+ END IF;
+END $$;
\ No newline at end of file
diff --git a/project/supabase/migrations/20250524214705_sunny_sunset.sql b/project/supabase/migrations/20250524214705_sunny_sunset.sql
new file mode 100644
index 0000000000000000000000000000000000000000..add8e62f69de68e62e744964e24b2500869d2c00
--- /dev/null
+++ b/project/supabase/migrations/20250524214705_sunny_sunset.sql
@@ -0,0 +1,16 @@
+/*
+ # Storage bucket and policies setup
+
+ 1. Changes
+ - Creates storage bucket for file storage
+ - Sets up RLS policies for authenticated users
+
+ 2. Security
+ - Enables secure file access for authenticated users
+ - Implements proper access control through RLS policies
+*/
+
+-- Create the storage bucket if it doesn't exist
+INSERT INTO storage.buckets (id, name)
+VALUES ('codette-files', 'codette-files')
+ON CONFLICT (id) DO NOTHING;
\ No newline at end of file
diff --git a/project/supabase/migrations/20250524214708_lively_cell.sql b/project/supabase/migrations/20250524214708_lively_cell.sql
new file mode 100644
index 0000000000000000000000000000000000000000..6009c4612ae475fa7e0f95b765061e2cb3ae81fe
--- /dev/null
+++ b/project/supabase/migrations/20250524214708_lively_cell.sql
@@ -0,0 +1,54 @@
+/*
+ # File management policies
+
+ 1. Changes
+ - Creates policies for file management
+ - Sets up proper access control for authenticated users and admins
+
+ 2. Security
+ - Implements RLS policies for the codette_files table
+ - Ensures proper access control based on user roles
+*/
+
+-- Enable RLS on codette_files table
+ALTER TABLE public.codette_files ENABLE ROW LEVEL SECURITY;
+
+-- Create policies for the codette_files table
+DO $$
+BEGIN
+ -- Check if the read policy exists
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_policies
+ WHERE policyname = 'Allow authenticated users to read files'
+ AND tablename = 'codette_files'
+ ) THEN
+ CREATE POLICY "Allow authenticated users to read files"
+ ON public.codette_files FOR SELECT
+ TO authenticated
+ USING (true);
+ END IF;
+
+ -- Check if the admin insert policy exists
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_policies
+ WHERE policyname = 'Allow admin users to insert files'
+ AND tablename = 'codette_files'
+ ) THEN
+ CREATE POLICY "Allow admin users to insert files"
+ ON public.codette_files FOR INSERT
+ TO authenticated
+ WITH CHECK (auth.jwt() ->> 'role' = 'admin');
+ END IF;
+
+ -- Check if the authenticated insert policy exists
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_policies
+ WHERE policyname = 'Allow authenticated users to insert files'
+ AND tablename = 'codette_files'
+ ) THEN
+ CREATE POLICY "Allow authenticated users to insert files"
+ ON public.codette_files FOR INSERT
+ TO authenticated
+ WITH CHECK (true);
+ END IF;
+END $$;
\ No newline at end of file
diff --git a/project/supabase/migrations/20250524214713_yellow_dawn.sql b/project/supabase/migrations/20250524214713_yellow_dawn.sql
new file mode 100644
index 0000000000000000000000000000000000000000..1b600ddfa0de3a9d08250c7c36d885a7c59bf031
--- /dev/null
+++ b/project/supabase/migrations/20250524214713_yellow_dawn.sql
@@ -0,0 +1,63 @@
+/*
+ # File management and storage setup
+
+ 1. Changes
+ - Enables RLS on codette_files table
+ - Creates necessary policies for file management
+
+ 2. Security
+ - Implements proper access control through RLS
+ - Sets up role-based permissions
+*/
+
+-- Enable RLS on codette_files table if not already enabled
+DO $$
+BEGIN
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_tables
+ WHERE tablename = 'codette_files'
+ AND rowsecurity = true
+ ) THEN
+ ALTER TABLE public.codette_files ENABLE ROW LEVEL SECURITY;
+ END IF;
+END $$;
+
+-- Create policies for the codette_files table
+DO $$
+BEGIN
+ -- Check if the read policy exists
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_policies
+ WHERE policyname = 'Allow authenticated users to read files'
+ AND tablename = 'codette_files'
+ ) THEN
+ CREATE POLICY "Allow authenticated users to read files"
+ ON public.codette_files FOR SELECT
+ TO authenticated
+ USING (true);
+ END IF;
+
+ -- Check if the admin insert policy exists
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_policies
+ WHERE policyname = 'Allow admin users to insert files'
+ AND tablename = 'codette_files'
+ ) THEN
+ CREATE POLICY "Allow admin users to insert files"
+ ON public.codette_files FOR INSERT
+ TO authenticated
+ WITH CHECK (auth.jwt() ->> 'role' = 'admin');
+ END IF;
+
+ -- Check if the authenticated insert policy exists
+ IF NOT EXISTS (
+ SELECT 1 FROM pg_policies
+ WHERE policyname = 'Allow authenticated users to insert files'
+ AND tablename = 'codette_files'
+ ) THEN
+ CREATE POLICY "Allow authenticated users to insert files"
+ ON public.codette_files FOR INSERT
+ TO authenticated
+ WITH CHECK (true);
+ END IF;
+END $$;
\ No newline at end of file
diff --git a/project/supabase/migrations/20250524215300_flat_firefly.sql b/project/supabase/migrations/20250524215300_flat_firefly.sql
new file mode 100644
index 0000000000000000000000000000000000000000..a9ce30b2c75880af1a4052af0e9f1acaadf07259
--- /dev/null
+++ b/project/supabase/migrations/20250524215300_flat_firefly.sql
@@ -0,0 +1,26 @@
+/*
+ # Add get_user_role function
+
+ 1. New Functions
+ - `get_user_role`: Returns the role of the authenticated user
+
+ 2. Security
+ - Function is only accessible to authenticated users
+ - Returns the user's role from user_roles table
+*/
+
+-- Create function to get user role
+CREATE OR REPLACE FUNCTION public.get_user_role()
+RETURNS TABLE (role text)
+LANGUAGE plpgsql
+SECURITY DEFINER
+SET search_path = public
+AS $$
+BEGIN
+ RETURN QUERY
+ SELECT ur.role
+ FROM public.user_roles ur
+ WHERE ur.user_id = auth.uid()
+ LIMIT 1;
+END;
+$$;
\ No newline at end of file
diff --git a/project/tailwind.config.js b/project/tailwind.config.js
new file mode 100644
index 0000000000000000000000000000000000000000..65ea50129652fab8c1fa4b5e16c8ff546ba94892
--- /dev/null
+++ b/project/tailwind.config.js
@@ -0,0 +1,63 @@
+/** @type {import('tailwindcss').Config} */
+export default {
+ content: ['./index.html', './src/**/*.{js,ts,jsx,tsx}'],
+ darkMode: 'class',
+ theme: {
+ extend: {
+ colors: {
+ primary: {
+ 50: '#EFF6FF',
+ 100: '#DBEAFE',
+ 200: '#BFDBFE',
+ 300: '#93C5FD',
+ 400: '#60A5FA',
+ 500: '#3B82F6',
+ 600: '#2563EB',
+ 700: '#1D4ED8',
+ 800: '#1E40AF',
+ 900: '#1E3A8A',
+ },
+ secondary: {
+ 50: '#F5F3FF',
+ 100: '#EDE9FE',
+ 200: '#DDD6FE',
+ 300: '#C4B5FD',
+ 400: '#A78BFA',
+ 500: '#8B5CF6',
+ 600: '#7C3AED',
+ 700: '#6D28D9',
+ 800: '#5B21B6',
+ 900: '#4C1D95',
+ },
+ accent: {
+ 50: '#ECFDF5',
+ 100: '#D1FAE5',
+ 200: '#A7F3D0',
+ 300: '#6EE7B7',
+ 400: '#34D399',
+ 500: '#10B981',
+ 600: '#059669',
+ 700: '#047857',
+ 800: '#065F46',
+ 900: '#064E3B',
+ },
+ },
+ animation: {
+ 'pulse': 'pulse 2s infinite',
+ 'float': 'float 3s ease-in-out infinite',
+ 'spin-slow': 'spin 4s linear infinite',
+ },
+ keyframes: {
+ float: {
+ '0%, 100%': { transform: 'translateY(0)' },
+ '50%': { transform: 'translateY(-10px)' },
+ },
+ },
+ backdropFilter: {
+ 'none': 'none',
+ 'blur': 'blur(8px)',
+ },
+ },
+ },
+ plugins: [],
+};
\ No newline at end of file
diff --git a/project/tsconfig.app.json b/project/tsconfig.app.json
new file mode 100644
index 0000000000000000000000000000000000000000..f0a235055d24607ba5e0bedc494ffc35b5956fbb
--- /dev/null
+++ b/project/tsconfig.app.json
@@ -0,0 +1,24 @@
+{
+ "compilerOptions": {
+ "target": "ES2020",
+ "useDefineForClassFields": true,
+ "lib": ["ES2020", "DOM", "DOM.Iterable"],
+ "module": "ESNext",
+ "skipLibCheck": true,
+
+ /* Bundler mode */
+ "moduleResolution": "bundler",
+ "allowImportingTsExtensions": true,
+ "isolatedModules": true,
+ "moduleDetection": "force",
+ "noEmit": true,
+ "jsx": "react-jsx",
+
+ /* Linting */
+ "strict": true,
+ "noUnusedLocals": true,
+ "noUnusedParameters": true,
+ "noFallthroughCasesInSwitch": true
+ },
+ "include": ["src"]
+}
diff --git a/project/tsconfig.json b/project/tsconfig.json
new file mode 100644
index 0000000000000000000000000000000000000000..1ffef600d959ec9e396d5a260bd3f5b927b2cef8
--- /dev/null
+++ b/project/tsconfig.json
@@ -0,0 +1,7 @@
+{
+ "files": [],
+ "references": [
+ { "path": "./tsconfig.app.json" },
+ { "path": "./tsconfig.node.json" }
+ ]
+}
diff --git a/project/tsconfig.node.json b/project/tsconfig.node.json
new file mode 100644
index 0000000000000000000000000000000000000000..0d3d71446a455c5f997e3cffb25099dab0f74a9b
--- /dev/null
+++ b/project/tsconfig.node.json
@@ -0,0 +1,22 @@
+{
+ "compilerOptions": {
+ "target": "ES2022",
+ "lib": ["ES2023"],
+ "module": "ESNext",
+ "skipLibCheck": true,
+
+ /* Bundler mode */
+ "moduleResolution": "bundler",
+ "allowImportingTsExtensions": true,
+ "isolatedModules": true,
+ "moduleDetection": "force",
+ "noEmit": true,
+
+ /* Linting */
+ "strict": true,
+ "noUnusedLocals": true,
+ "noUnusedParameters": true,
+ "noFallthroughCasesInSwitch": true
+ },
+ "include": ["vite.config.ts"]
+}
diff --git a/project/vite.config.ts b/project/vite.config.ts
new file mode 100644
index 0000000000000000000000000000000000000000..147380affffa48cce8a70ee052703fd1538ac76a
--- /dev/null
+++ b/project/vite.config.ts
@@ -0,0 +1,10 @@
+import { defineConfig } from 'vite';
+import react from '@vitejs/plugin-react';
+
+// https://vitejs.dev/config/
+export default defineConfig({
+ plugins: [react()],
+ optimizeDeps: {
+ exclude: ['lucide-react'],
+ },
+});