diff --git a/.github/workflows/cleanup-uploads.yml b/.github/workflows/cleanup-uploads.yml new file mode 100644 index 0000000..5c577f6 --- /dev/null +++ b/.github/workflows/cleanup-uploads.yml @@ -0,0 +1,23 @@ +# GitHub Actions workflow to clean up uploaded files on Render +# This runs every 5 hours +# The workflow calls the cleanup API endpoint + +name: Cleanup Uploaded Files + +on: + schedule: + # Run every 5 hours + - cron: '0 */5 * * *' + workflow_dispatch: # Allow manual triggering + +jobs: + cleanup: + runs-on: ubuntu-latest + + steps: + - name: Trigger cleanup endpoint + run: | + curl -X POST \ + -H "Content-Type: application/json" \ + "${{ secrets.CLEANUP_ENDPOINT_URL }}" \ + -d '{"secret": "${{ secrets.CLEANUP_SECRET }}"}' diff --git a/.gitignore b/.gitignore index 8842d13..d6d12da 100644 --- a/.gitignore +++ b/.gitignore @@ -73,6 +73,7 @@ sqlnet.ora # Django staticfiles and media staticfiles/ media/ + temp_uploads/ # Python build artifacts *.egg-info/ diff --git a/project/.env.example b/project/.env.example index 83bf439..417d073 100644 --- a/project/.env.example +++ b/project/.env.example @@ -7,16 +7,26 @@ # ===================================================== # Django Settings -DJANGO_SECRET_KEY=your-secret-key-here -DJANGO_DEBUG=True +# CRITICAL: Generate a secure secret key for production +# python -c 'from django.core.management.utils import get_random_secret_key; print(get_random_secret_key())' +DJANGO_SECRET_KEY=your-secret-key-here-CHANGE-THIS-IN-PRODUCTION +DJANGO_DEBUG=False DJANGO_ALLOWED_HOSTS=localhost,127.0.0.1 # Deployment Environment # Set to "DEV" or "LOCAL" to use server-side API keys (for local development) -# Set to "PROD" or leave empty to require users to provide their own keys (for remote deployment) -# Default: PROD (users bring their own keys) +# Set to "PROD" for production deployment (users bring their own keys) +# IMPORTANT: In production, also set CORS_ALLOWED_ORIGINS and CSRF_TRUSTED_ORIGINS ENVIRONMENT=DEV +# Production CORS Configuration (required when DEBUG=False) +# Comma-separated list of allowed origins +# CORS_ALLOWED_ORIGINS=https://yourdomain.com,https://www.yourdomain.com + +# Production CSRF Configuration (required when DEBUG=False) +# Comma-separated list of trusted origins +# CSRF_TRUSTED_ORIGINS=https://yourdomain.com,https://www.yourdomain.com + # AI Provider Configuration # Choose which AI provider to use: 'gemini' or 'claude' AI_PROVIDER=gemini @@ -60,3 +70,8 @@ ORACLE_WALLET_PASSWORD= # GitHub OAuth (configured in Firebase Console) GITHUB_CLIENT_ID= GITHUB_CLIENT_SECRET= + +# File Cleanup Configuration +# Secret token for triggering remote file cleanup +# Generate with: python -c 'import secrets; print(secrets.token_urlsafe(32))' +CLEANUP_SECRET_TOKEN=your-cleanup-secret-token-here diff --git a/project/authentication/firebase_auth.py b/project/authentication/firebase_auth.py index f1eccd1..452a01c 100644 --- a/project/authentication/firebase_auth.py +++ b/project/authentication/firebase_auth.py @@ -2,11 +2,12 @@ Firebase Admin SDK initialization and authentication utilities. """ import os +import logging import firebase_admin from firebase_admin import credentials, auth from dotenv import load_dotenv -# Load environment variables +logger = logging.getLogger('authentication') load_dotenv() @@ -53,8 +54,17 @@ def verify_firebase_token(id_token): initialize_firebase() decoded_token = auth.verify_id_token(id_token) return decoded_token + except auth.InvalidIdTokenError as e: + logger.warning(f"Invalid Firebase ID token: {str(e)}") + return None + except auth.ExpiredIdTokenError as e: + logger.warning(f"Expired Firebase ID token: {str(e)}") + return None + except auth.RevokedIdTokenError as e: + logger.warning(f"Revoked Firebase ID token: {str(e)}") + return None except Exception as e: - print(f"Firebase token verification error: {str(e)}") + logger.error(f"Unexpected Firebase token verification error: {str(e)}", exc_info=True) return None diff --git a/project/authentication/middleware.py b/project/authentication/middleware.py index dade051..2920fed 100644 --- a/project/authentication/middleware.py +++ b/project/authentication/middleware.py @@ -1,11 +1,14 @@ """ Firebase authentication middleware for Django. """ +import logging from django.utils.deprecation import MiddlewareMixin from django.http import JsonResponse from .firebase_auth import verify_firebase_token, get_user_info_from_token from .models import User +logger = logging.getLogger('authentication') + class FirebaseAuthenticationMiddleware(MiddlewareMixin): """ @@ -25,7 +28,8 @@ def process_request(self, request): # Skip authentication for certain paths exempt_paths = [ '/admin/', - '/api/auth/verify-token', # Allow token verification endpoint + '/api/auth/verify-token', + '/api/auth/csrf', ] if any(request.path.startswith(path) for path in exempt_paths): @@ -63,7 +67,7 @@ def process_request(self, request): else: request.firebase_user = None except Exception as e: - print(f"Firebase authentication error: {str(e)}") + logger.error(f"Firebase authentication error: {str(e)}", exc_info=True) request.firebase_user = None return None @@ -81,6 +85,7 @@ def my_view(request): """ def wrapper(request, *args, **kwargs): if not hasattr(request, 'firebase_user') or request.firebase_user is None: + logger.warning(f"Unauthorized access attempt to {request.path} from IP {request.META.get('REMOTE_ADDR')}") return JsonResponse({ 'error': 'Authentication required', 'message': 'You must be logged in to access this endpoint' diff --git a/project/authentication/urls.py b/project/authentication/urls.py index 6d7f13b..2f74ff4 100644 --- a/project/authentication/urls.py +++ b/project/authentication/urls.py @@ -5,6 +5,7 @@ from . import views urlpatterns = [ + path('csrf', views.get_csrf_token, name='csrf_token'), path('verify-token', views.verify_token, name='verify_token'), path('me', views.get_current_user, name='get_current_user'), path('update-session', views.update_session, name='update_session'), diff --git a/project/authentication/views.py b/project/authentication/views.py index 8ea9331..22a07c3 100644 --- a/project/authentication/views.py +++ b/project/authentication/views.py @@ -2,21 +2,40 @@ Authentication API views for Firebase integration. """ from django.http import JsonResponse, HttpRequest -from django.views.decorators.csrf import csrf_exempt +from django.views.decorators.csrf import csrf_exempt, ensure_csrf_cookie from django.views.decorators.http import require_http_methods from django.utils import timezone +from django.conf import settings from django_ratelimit.decorators import ratelimit import json import traceback +import logging from .firebase_auth import verify_firebase_token, get_user_info_from_token from .models import User from .middleware import require_authentication +logger = logging.getLogger('authentication') + + +@require_http_methods(["GET"]) +@ensure_csrf_cookie +def get_csrf_token(request: HttpRequest) -> JsonResponse: + """ + Get CSRF token for subsequent requests. + + GET /api/auth/csrf + + Returns: + 200: CSRF token in response header and body + """ + response = JsonResponse({'success': True, 'detail': 'CSRF cookie set'}) + return response + @csrf_exempt @require_http_methods(["POST"]) -@ratelimit(key='ip', rate='30/m', method='POST', block=True) +@ratelimit(key='ip', rate='10/m', method='POST', block=True) def verify_token(request: HttpRequest) -> JsonResponse: """ Verify Firebase ID token and create/update user in database. @@ -44,6 +63,7 @@ def verify_token(request: HttpRequest) -> JsonResponse: # Verify token decoded_token = verify_firebase_token(token) if not decoded_token: + logger.warning(f"Failed token verification from IP {request.META.get('REMOTE_ADDR')}") return JsonResponse({ 'error': 'Invalid token', 'message': 'Failed to verify Firebase token' @@ -114,11 +134,14 @@ def verify_token(request: HttpRequest) -> JsonResponse: 'message': 'Request body must be valid JSON' }, status=400) except Exception as e: - traceback.print_exc() # Print full traceback to console for debugging - return JsonResponse({ + logger.error(f"Unexpected error in verify_token: {str(e)}", exc_info=True) + response = { 'error': 'Server error', 'message': 'An unexpected error occurred. Please try again later.' - }, status=500) + } + if settings.DEBUG: + response['traceback'] = traceback.format_exc() + return JsonResponse(response, status=500) @csrf_exempt @@ -161,7 +184,7 @@ def get_current_user(request: HttpRequest) -> JsonResponse: @csrf_exempt @require_http_methods(["POST"]) @require_authentication -@ratelimit(key='user_or_ip', rate='120/h', method='POST', block=True) +@ratelimit(key='user_or_ip', rate='60/h', method='POST', block=True) def update_session(request): """ Update session information (increment session count, add time spent). @@ -204,11 +227,14 @@ def update_session(request): 'message': 'Request body must be valid JSON' }, status=400) except Exception as e: - traceback.print_exc() # Print full traceback to console for debugging - return JsonResponse({ + logger.error(f"Error in update_session: {str(e)}", exc_info=True) + response = { 'error': 'Server error', 'message': 'An unexpected error occurred. Please try again later.' - }, status=500) + } + if settings.DEBUG: + response['traceback'] = traceback.format_exc() + return JsonResponse(response, status=500) @csrf_exempt @@ -235,17 +261,20 @@ def logout(request): }, status=200) except Exception as e: - traceback.print_exc() # Print full traceback to console for debugging - return JsonResponse({ + logger.error(f"Error in logout: {str(e)}", exc_info=True) + response = { 'error': 'Server error', 'message': 'An unexpected error occurred. Please try again later.' - }, status=500) + } + if settings.DEBUG: + response['traceback'] = traceback.format_exc() + return JsonResponse(response, status=500) @csrf_exempt @require_http_methods(["POST"]) @require_authentication -@ratelimit(key='user_or_ip', rate='60/h', method='POST', block=True) +@ratelimit(key='user_or_ip', rate='30/h', method='POST', block=True) def mark_milestone(request): """ Mark user milestones (first model created, first export). @@ -293,8 +322,11 @@ def mark_milestone(request): 'message': 'Request body must be valid JSON' }, status=400) except Exception as e: - traceback.print_exc() # Print full traceback to console for debugging - return JsonResponse({ + logger.error(f"Error in mark_milestone: {str(e)}", exc_info=True) + response = { 'error': 'Server error', 'message': 'An unexpected error occurred. Please try again later.' - }, status=500) + } + if settings.DEBUG: + response['traceback'] = traceback.format_exc() + return JsonResponse(response, status=500) diff --git a/project/backend/settings.py b/project/backend/settings.py index 1fc54d2..c2c84f4 100644 --- a/project/backend/settings.py +++ b/project/backend/settings.py @@ -25,10 +25,12 @@ # See https://docs.djangoproject.com/en/5.2/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! -SECRET_KEY = os.getenv('DJANGO_SECRET_KEY', 'django-insecure-oy%j%4%)w%7#sx@e!h+m-hai9zvl*)-5$5uz%wlro4ry1*4vc-') +SECRET_KEY = os.getenv('DJANGO_SECRET_KEY') +if not SECRET_KEY: + raise ValueError("DJANGO_SECRET_KEY environment variable must be set") # SECURITY WARNING: don't run with debug turned on in production! -DEBUG = os.getenv('DJANGO_DEBUG', 'True') == 'True' +DEBUG = os.getenv('DJANGO_DEBUG', 'False') == 'True' # Environment Variable Validation (Production Only) REQUIRED_ENV_VARS = [ @@ -106,7 +108,12 @@ ] # CORS configuration - read from environment variable for production -cors_origins = os.getenv('CORS_ALLOWED_ORIGINS', 'http://localhost:3000,http://localhost:5173,http://localhost:5000') +if DEBUG: + cors_origins = os.getenv('CORS_ALLOWED_ORIGINS', 'http://localhost:3000,http://localhost:5173,http://localhost:5000') +else: + cors_origins = os.getenv('CORS_ALLOWED_ORIGINS') + if not cors_origins: + raise ValueError("CORS_ALLOWED_ORIGINS environment variable must be set in production") CORS_ALLOWED_ORIGINS = [origin.strip() for origin in cors_origins.split(',')] CORS_ALLOW_CREDENTIALS = True @@ -125,12 +132,19 @@ 'x-firebase-token', ] -csrf_origins = os.getenv('CSRF_TRUSTED_ORIGINS', 'http://localhost:3000,http://localhost:5173,http://localhost:5000') +if DEBUG: + csrf_origins = os.getenv('CSRF_TRUSTED_ORIGINS', 'http://localhost:3000,http://localhost:5173,http://localhost:5000') +else: + csrf_origins = os.getenv('CSRF_TRUSTED_ORIGINS') + if not csrf_origins: + raise ValueError("CSRF_TRUSTED_ORIGINS environment variable must be set in production") CSRF_TRUSTED_ORIGINS = [origin.strip() for origin in csrf_origins.split(',')] -# CSRF configuration: keep cookie HTTP-only; frontend should use X-CSRFToken header pattern +# CSRF Protection CSRF_COOKIE_HTTPONLY = True -CSRF_USE_SESSIONS = False # Use cookie-based CSRF tokens +CSRF_USE_SESSIONS = False +CSRF_COOKIE_SAMESITE = 'Lax' +CSRF_HEADER_NAME = 'HTTP_X_CSRFTOKEN' # Environment mode configuration # Controls API key behavior: PROD/missing = BYOK, DEV/LOCAL = server keys @@ -139,18 +153,16 @@ REQUIRES_USER_API_KEY = IS_PRODUCTION REST_FRAMEWORK = { - # For local dev only: allow unauthenticated access to endpoints "DEFAULT_PERMISSION_CLASSES": [ - "rest_framework.permissions.AllowAny", + "rest_framework.permissions.IsAuthenticatedOrReadOnly", ], - # Rate limiting via DRF throttling (global fallback) "DEFAULT_THROTTLE_CLASSES": [ "rest_framework.throttling.AnonRateThrottle", "rest_framework.throttling.UserRateThrottle", ], "DEFAULT_THROTTLE_RATES": { - "anon": "100/hour", - "user": "1000/hour", + "anon": "60/hour", + "user": "600/hour", }, } @@ -260,10 +272,16 @@ # https://github.com/firebase/firebase-js-sdk/issues/8541 SECURE_CROSS_ORIGIN_OPENER_POLICY = 'same-origin-allow-popups' - # Content Security Policy (basic - customize as needed) - # CSP_DEFAULT_SRC = ("'self'",) - # CSP_SCRIPT_SRC = ("'self'", "'unsafe-inline'") # Adjust based on your needs - # CSP_STYLE_SRC = ("'self'", "'unsafe-inline'") + # Content Security Policy + CSP_DEFAULT_SRC = ("'self'",) + CSP_SCRIPT_SRC = ("'self'", "'unsafe-inline'", "'unsafe-eval'", "https://www.gstatic.com", "https://apis.google.com") + CSP_STYLE_SRC = ("'self'", "'unsafe-inline'", "https://fonts.googleapis.com") + CSP_IMG_SRC = ("'self'", "data:", "https:", "blob:") + CSP_FONT_SRC = ("'self'", "https://fonts.gstatic.com", "data:") + CSP_CONNECT_SRC = ("'self'", "https://firebasestorage.googleapis.com", "https://identitytoolkit.googleapis.com", "https://securetoken.googleapis.com", "https://*.googleapis.com") + CSP_FRAME_SRC = ("https://accounts.google.com", "https://github.com") + CSP_OBJECT_SRC = ("'none'",) + CSP_BASE_URI = ("'self'",) else: # Development settings - less strict @@ -295,3 +313,12 @@ # https://docs.djangoproject.com/en/5.2/ref/settings/#default-auto-field DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField' + +# ========================================== +# FILE UPLOAD CONFIGURATION +# ========================================== +TEMP_UPLOAD_DIR = BASE_DIR / 'temp_uploads' +UPLOAD_RETENTION_HOURS = 2 # Delete files older than 2 hours + +# Create upload directory if it doesn't exist +TEMP_UPLOAD_DIR.mkdir(exist_ok=True) diff --git a/project/backend/urls.py b/project/backend/urls.py index 82e0408..6ea3542 100644 --- a/project/backend/urls.py +++ b/project/backend/urls.py @@ -19,6 +19,7 @@ # API endpoints - these must come BEFORE the catch-all route path('api/v1/', include('block_manager.urls')), path('api/v1/auth/', include('authentication.urls')), + path('api/v1/maintenance/', include('block_manager.maintenance_urls')), ] # Serve static files in development diff --git a/project/block_manager/maintenance_urls.py b/project/block_manager/maintenance_urls.py new file mode 100644 index 0000000..a688301 --- /dev/null +++ b/project/block_manager/maintenance_urls.py @@ -0,0 +1,10 @@ +""" +URL routing for maintenance endpoints. +""" +from django.urls import path +from block_manager.views import maintenance_views + +urlpatterns = [ + path('cleanup-files', maintenance_views.trigger_file_cleanup, name='trigger_file_cleanup'), + path('upload-stats', maintenance_views.get_upload_stats, name='upload_stats'), +] diff --git a/project/block_manager/models.py b/project/block_manager/models.py index ca191dc..57fa317 100644 --- a/project/block_manager/models.py +++ b/project/block_manager/models.py @@ -1,6 +1,12 @@ from django.db import models import uuid import json +from .validators import ( + validate_canvas_state, + validate_block_config, + validate_group_internal_structure, + validate_shape_data, +) class Project(models.Model): @@ -39,7 +45,7 @@ class ModelArchitecture(models.Model): on_delete=models.CASCADE, related_name='architecture' ) - canvas_state = models.JSONField(default=dict, blank=True) + canvas_state = models.JSONField(default=dict, blank=True, validators=[validate_canvas_state]) is_valid = models.BooleanField(default=False) validation_errors = models.JSONField(default=list, blank=True) created_at = models.DateTimeField(auto_now_add=True) @@ -63,7 +69,7 @@ class GroupBlockDefinition(models.Model): color = models.CharField(max_length=50, default='#9333ea') # Serialized structure: {nodes, edges, portMappings} - internal_structure = models.JSONField(default=dict, blank=True) + internal_structure = models.JSONField(default=dict, blank=True, validators=[validate_group_internal_structure]) created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now=True) @@ -89,9 +95,9 @@ class Block(models.Model): block_type = models.CharField(max_length=50) position_x = models.FloatField(default=0) position_y = models.FloatField(default=0) - config = models.JSONField(default=dict, blank=True) - input_shape = models.JSONField(null=True, blank=True) - output_shape = models.JSONField(null=True, blank=True) + config = models.JSONField(default=dict, blank=True, validators=[validate_block_config]) + input_shape = models.JSONField(null=True, blank=True, validators=[validate_shape_data]) + output_shape = models.JSONField(null=True, blank=True, validators=[validate_shape_data]) # Group block fields group_definition = models.ForeignKey( diff --git a/project/block_manager/services/enhanced_pytorch_codegen.py b/project/block_manager/services/enhanced_pytorch_codegen.py index b559d57..6ea9ad3 100644 --- a/project/block_manager/services/enhanced_pytorch_codegen.py +++ b/project/block_manager/services/enhanced_pytorch_codegen.py @@ -1,5 +1,6 @@ from typing import Any, Dict, List, Optional, Tuple from collections import deque +import json # ============================================ # Custom Exception Classes @@ -1629,9 +1630,23 @@ def forward(self, x: torch.Tensor) -> torch.Tensor: # Skip input/output nodes but track input in var_map if node_type in ('input', 'dataloader', 'output'): if node_type == 'input': - # Extract input shape - config = node.get('data').get('config') - input_shape = eval(config.get('shape', '[1, 3, 224, 224]')) + # Extract input shape safely without using eval + config = node.get('data').get('config') or {} + raw_shape = config.get('shape', [1, 3, 224, 224]) + parsed_shape = (1, 3, 224, 224) + try: + # If shape is a string, try to parse it as JSON (e.g. "[1, 3, 224, 224]") + if isinstance(raw_shape, str): + loaded = json.loads(raw_shape) + else: + loaded = raw_shape + # Accept list/tuple of ints + if isinstance(loaded, (list, tuple)) and all(isinstance(d, int) for d in loaded): + parsed_shape = tuple(loaded) + except (ValueError, TypeError, json.JSONDecodeError): + # Fall back to the default shape on any parsing/validation error + parsed_shape = (1, 3, 224, 224) + input_shape = parsed_shape var_map[(node_id, 'default')] = 'x' elif node_type == 'output': var_map[(node_id, 'default')] = 'x' diff --git a/project/block_manager/services/gemini_service.py b/project/block_manager/services/gemini_service.py index 07efb31..3025e8a 100644 --- a/project/block_manager/services/gemini_service.py +++ b/project/block_manager/services/gemini_service.py @@ -1,7 +1,7 @@ """ Gemini AI Service for chat functionality and workflow modifications. """ -import google.generativeai as genai +from google import genai import json import os import tempfile @@ -29,10 +29,10 @@ def __init__(self, api_key: Optional[str] = None): if not final_api_key: raise ValueError("GEMINI_API_KEY environment variable is not set") - genai.configure(api_key=final_api_key) - # Use gemini-2.0-flash-lite - best free tier availability in 2025 - # (gemini-1.5-* deprecated April 2025, gemini-2.5-* severely limited) - self.model = genai.GenerativeModel('gemini-2.0-flash-lite') + # Create client with API key (new unified SDK) + self.client = genai.Client(api_key=final_api_key) + # Use gemini-2.0-flash-exp - experimental 2.0 model (or use gemini-1.5-flash for stable) + self.model_name = 'gemini-2.5-flash-lite' def _format_workflow_context(self, workflow_state: Optional[Dict[str, Any]]) -> str: """Format workflow state into a readable context for the AI.""" @@ -325,7 +325,7 @@ def _build_system_prompt(self, modification_mode: bool, workflow_state: Optional return f"{base_prompt}\n{mode_prompt}\n{workflow_context}" def _format_chat_history(self, history: List[Dict[str, str]]) -> List[Dict[str, Any]]: - """Convert chat history to Gemini format.""" + """Convert chat history to Gemini format for new SDK.""" formatted_history = [] for message in history: @@ -335,9 +335,10 @@ def _format_chat_history(self, history: List[Dict[str, str]]) -> List[Dict[str, # Gemini uses 'user' and 'model' roles gemini_role = 'model' if role == 'assistant' else 'user' + # New SDK expects parts to be text objects, not plain strings formatted_history.append({ 'role': gemini_role, - 'parts': [content] + 'parts': [{'text': content}] }) return formatted_history @@ -359,8 +360,8 @@ def upload_file_to_gemini(self, uploaded_file: UploadedFile) -> Optional[Any]: temp_file.write(chunk) temp_path = temp_file.name - # Upload to Gemini - gemini_file = genai.upload_file(temp_path, display_name=uploaded_file.name) + # Upload to Gemini using new SDK + gemini_file = self.client.files.upload(path=temp_path) # Clean up temporary file os.unlink(temp_path) @@ -451,8 +452,11 @@ def analyze_file_for_architecture( Provide each node as a separate JSON block with appropriate configurations using lowercase nodeType values. """ - # Generate content with the file - response = self.model.generate_content([analysis_prompt, gemini_file]) + # Generate content with the file using new SDK + response = self.client.models.generate_content( + model=self.model_name, + contents=[analysis_prompt, gemini_file] + ) response_text = response.text # Extract modifications @@ -512,8 +516,11 @@ def chat( # This ensures the AI always knows the current state and formatting requirements full_message = f"{system_prompt}\n\nUser: {message}" - # Create chat session with history - chat = self.model.start_chat(history=formatted_history) + # Create chat session with history using new SDK + chat = self.client.chats.create( + model=self.model_name, + history=formatted_history + ) # Send message and get response response = chat.send_message(full_message) @@ -586,7 +593,10 @@ def generate_suggestions( Format your response as a simple numbered list.""" - response = self.model.generate_content(prompt) + response = self.client.models.generate_content( + model=self.model_name, + contents=prompt + ) response_text = response.text # Parse suggestions from numbered list diff --git a/project/block_manager/services/tensorflow_codegen.py b/project/block_manager/services/tensorflow_codegen.py index 70e2bca..23dac16 100644 --- a/project/block_manager/services/tensorflow_codegen.py +++ b/project/block_manager/services/tensorflow_codegen.py @@ -7,22 +7,183 @@ from collections import deque import logging -# Import shared utilities and exceptions from PyTorch codegen (framework-agnostic) -from .pytorch_codegen import ( - GroupBlockShapeComputer, +# Import shared exceptions from PyTorch codegen (framework-agnostic) +from .enhanced_pytorch_codegen import ( GroupDefinitionNotFoundError, ShapeMismatchError, CyclicDependencyError, UnsupportedNodeTypeError, ShapeInferenceError, - MissingShapeDataError, - safe_get_shape_data + MissingShapeDataError ) # Configure logging logger = logging.getLogger(__name__) +# ============================================ +# Helper Functions for Shape Inference +# ============================================ + +def safe_get_shape_data( + shape_map: Dict[str, Dict[str, Any]], + node_id: str, + upstream_node_id: str, + required_keys: List[str], + default_values: Optional[Dict[str, Any]] = None +) -> Dict[str, Any]: + """ + Safely retrieve shape data from upstream node with proper error handling. + + Args: + shape_map: Dictionary mapping node IDs to shape information + node_id: ID of the current node requesting shape data + upstream_node_id: ID of the upstream node to get shape from + required_keys: List of required keys in the shape data + default_values: Optional default values to use if keys are missing + + Returns: + Dictionary with shape data + + Raises: + MissingShapeDataError: If required keys are missing and no defaults provided + """ + if upstream_node_id not in shape_map: + if default_values: + return default_values + raise MissingShapeDataError( + node_id=node_id, + upstream_node_id=upstream_node_id, + missing_keys=required_keys + ) + + upstream_shape = shape_map[upstream_node_id] + missing_keys = [key for key in required_keys if key not in upstream_shape] + + if missing_keys: + if default_values: + # Use defaults for missing keys but keep existing values + result = upstream_shape.copy() + for key in missing_keys: + if key in default_values: + result[key] = default_values[key] + return result + raise MissingShapeDataError( + node_id=node_id, + upstream_node_id=upstream_node_id, + missing_keys=missing_keys + ) + + return upstream_shape + + +class GroupBlockShapeComputer: + """ + Computes shapes for group blocks by analyzing their internal structure. + TensorFlow version using NHWC format (batch, height, width, channels). + """ + + def __init__(self, group_definitions: Dict[str, Any]): + """ + Initialize the shape computer. + + Args: + group_definitions: Dictionary mapping definition IDs to group definitions + """ + self.group_definitions = group_definitions + + def compute_output_shape( + self, + group_def_id: str, + input_shape: Dict[str, Any] + ) -> Tuple[Dict[str, Any], List[Exception]]: + """ + Compute the output shape of a group block given its input shape. + + Args: + group_def_id: ID of the group definition + input_shape: Input shape dictionary + + Returns: + Tuple of (output shape dictionary, list of errors) + """ + if group_def_id not in self.group_definitions: + error = GroupDefinitionNotFoundError( + node_id=f"group_block_{group_def_id}", + definition_id=group_def_id + ) + return {}, [error] + + definition = self.group_definitions[group_def_id] + internal_structure = definition.get('internal_structure', {}) + internal_nodes = internal_structure.get('nodes', []) + internal_edges = internal_structure.get('edges', []) + port_mappings = internal_structure.get('portMappings', []) + + # Compute internal shapes + internal_shape_map, errors = self.compute_internal_shapes( + internal_nodes, + internal_edges, + port_mappings, + input_shape, + definition.get('name', 'UnnamedBlock') + ) + + # Find output port and return its shape + output_ports = [pm for pm in port_mappings if pm['type'] == 'output'] + if output_ports: + output_node_id = output_ports[0]['internalNodeId'] + if output_node_id in internal_shape_map: + return internal_shape_map[output_node_id], errors + + # Fallback: return input shape + return input_shape.copy(), errors + + def compute_internal_shapes( + self, + internal_nodes: List[Dict[str, Any]], + internal_edges: List[Dict[str, Any]], + port_mappings: List[Dict[str, Any]], + input_shape: Dict[str, Any], + block_name: str + ) -> Tuple[Dict[str, Dict[str, Any]], List[Exception]]: + """ + Compute shapes for all internal nodes in a group block. + + Args: + internal_nodes: List of internal node definitions + internal_edges: List of internal edges + port_mappings: Port mappings (input/output) + input_shape: Input shape for the block + block_name: Name of the block for error messages + + Returns: + Tuple of (shape map for internal nodes, list of errors) + """ + # Sort nodes topologically + sorted_nodes = topological_sort(internal_nodes, internal_edges) + + # Initialize shape map with input port shapes + shape_map = {} + input_ports = [pm for pm in port_mappings if pm['type'] == 'input'] + for port in input_ports: + internal_node_id = port['internalNodeId'] + shape_map[internal_node_id] = input_shape.copy() + + # Use the main infer_shapes function but pass None for group_definitions + # to avoid recursive group block resolution + internal_shape_map, errors = infer_shapes(sorted_nodes, internal_edges, None) + + # Merge input port shapes + for node_id, shape in shape_map.items(): + if node_id in internal_shape_map: + internal_shape_map[node_id].update(shape) + else: + internal_shape_map[node_id] = shape + + return internal_shape_map, errors + + class TensorFlowBlockGenerator: """ Generator for TensorFlow/Keras tf.keras.Model code for group blocks. diff --git a/project/block_manager/utils/__init__.py b/project/block_manager/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/project/block_manager/utils/file_cleanup.py b/project/block_manager/utils/file_cleanup.py new file mode 100644 index 0000000..5db78ee --- /dev/null +++ b/project/block_manager/utils/file_cleanup.py @@ -0,0 +1,82 @@ +""" +Utilities for managing temporary file uploads. +""" +import os +import time +import logging +from pathlib import Path +from django.conf import settings + +logger = logging.getLogger(__name__) + + +def save_uploaded_file_temporarily(uploaded_file): + """ + Save an uploaded file to the temporary directory with timestamp. + + Args: + uploaded_file: Django UploadedFile object + + Returns: + Path object of saved file + """ + upload_dir = Path(getattr(settings, 'TEMP_UPLOAD_DIR', '/tmp/visionforge_uploads')) + upload_dir.mkdir(parents=True, exist_ok=True) + + # Sanitize filename to prevent path traversal attacks + # Extract only the basename to remove any path components + original_name = os.path.basename(uploaded_file.name) + # Remove path separators that might have been missed (defense in depth) + safe_name = original_name.replace('/', '_').replace('\\', '_') + # Remove null bytes which can cause issues + safe_name = safe_name.replace('\x00', '') + + timestamp = int(time.time()) + safe_filename = f"{timestamp}_{safe_name}" + file_path = upload_dir / safe_filename + + # Verify the resolved path is within the upload directory (additional security check) + resolved_path = file_path.resolve() + if not str(resolved_path).startswith(str(upload_dir.resolve())): + raise ValueError("Invalid file path detected - potential path traversal attack") + + with open(file_path, 'wb+') as destination: + for chunk in uploaded_file.chunks(): + destination.write(chunk) + + logger.info(f"Saved temporary file: {safe_filename} ({uploaded_file.size} bytes)") + return file_path + + +def cleanup_file_after_processing(file_path): + """ + Immediately delete a file after processing. + + Args: + file_path: Path object or string path to file + """ + try: + if isinstance(file_path, str): + file_path = Path(file_path) + + if file_path.exists(): + file_path.unlink() + logger.info(f"Cleaned up processed file: {file_path.name}") + except Exception as e: + logger.error(f"Error cleaning up file {file_path}: {str(e)}") + + +def get_upload_directory_size(): + """ + Get the total size of all files in the upload directory. + + Returns: + Total size in bytes + """ + upload_dir = Path(getattr(settings, 'TEMP_UPLOAD_DIR', '/tmp/visionforge_uploads')) + + if not upload_dir.exists(): + return 0 + + total_size = sum(f.stat().st_size for f in upload_dir.rglob('*') if f.is_file()) + return total_size diff --git a/project/block_manager/validators.py b/project/block_manager/validators.py new file mode 100644 index 0000000..ce9bc60 --- /dev/null +++ b/project/block_manager/validators.py @@ -0,0 +1,76 @@ +""" +JSON schema validators for block_manager models. +""" +import json +from django.core.exceptions import ValidationError + + +def validate_canvas_state(value): + """ + Validate canvas_state JSON structure. + """ + if not isinstance(value, dict): + raise ValidationError("Canvas state must be a dictionary") + + if 'nodes' not in value or not isinstance(value.get('nodes'), list): + raise ValidationError("Canvas state must contain a 'nodes' list") + + if 'edges' not in value or not isinstance(value.get('edges'), list): + raise ValidationError("Canvas state must contain an 'edges' list") + + # Validate node structure + for node in value.get('nodes', []): + if not isinstance(node, dict): + raise ValidationError("Each node must be a dictionary") + if 'id' not in node: + raise ValidationError("Each node must have an 'id' field") + if 'data' not in node or not isinstance(node.get('data'), dict): + raise ValidationError("Each node must have a 'data' dictionary") + + +def validate_block_config(value): + """ + Validate block configuration JSON. + """ + if not isinstance(value, dict): + raise ValidationError("Block config must be a dictionary") + + # Max size check to prevent DoS + json_str = json.dumps(value) + if len(json_str) > 10000: # 10KB limit + raise ValidationError("Block config exceeds maximum size") + + +def validate_group_internal_structure(value): + """ + Validate group block internal structure. + """ + if not isinstance(value, dict): + raise ValidationError("Internal structure must be a dictionary") + + if 'nodes' in value and not isinstance(value['nodes'], list): + raise ValidationError("Internal structure 'nodes' must be a list") + + if 'edges' in value and not isinstance(value['edges'], list): + raise ValidationError("Internal structure 'edges' must be a list") + + # Max size check + json_str = json.dumps(value) + if len(json_str) > 50000: # 50KB limit for group structures + raise ValidationError("Internal structure exceeds maximum size") + + +def validate_shape_data(value): + """ + Validate shape data (input_shape, output_shape). + """ + if value is None: + return + + if not isinstance(value, dict): + raise ValidationError("Shape data must be a dictionary") + + # Max size check + json_str = json.dumps(value) + if len(json_str) > 1000: # 1KB limit for shape data + raise ValidationError("Shape data exceeds maximum size") diff --git a/project/block_manager/views/architecture_views.py b/project/block_manager/views/architecture_views.py index 8ad070a..9a92b74 100644 --- a/project/block_manager/views/architecture_views.py +++ b/project/block_manager/views/architecture_views.py @@ -399,10 +399,9 @@ def render_node_code(request): 'format': 'class' }) except Exception as e: - import traceback - traceback.print_exc() # Log to console for debugging + logger.error(f"Error generating node code: {str(e)}", exc_info=True) return Response( - {'success': False, 'error': f'Error generating node code: {str(e)}'}, + {'success': False, 'error': 'Error generating node code'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR ) diff --git a/project/block_manager/views/chat_views.py b/project/block_manager/views/chat_views.py index af84935..a8b1dd5 100644 --- a/project/block_manager/views/chat_views.py +++ b/project/block_manager/views/chat_views.py @@ -1,16 +1,26 @@ -from rest_framework.decorators import api_view +from rest_framework.decorators import api_view, permission_classes +from rest_framework.permissions import AllowAny from rest_framework.response import Response from rest_framework import status import logging from django_ratelimit.decorators import ratelimit from block_manager.services.ai_service_factory import AIServiceFactory +from block_manager.utils.file_cleanup import save_uploaded_file_temporarily, cleanup_file_after_processing logger = logging.getLogger(__name__) +ALLOWED_FILE_TYPES = [ + 'image/jpeg', 'image/jpg', 'image/png', 'image/gif', 'image/webp', + 'application/pdf', + 'text/plain', 'text/csv', +] +MAX_FILE_SIZE = 10 * 1024 * 1024 # 10MB + @api_view(['POST']) -@ratelimit(key='user_or_ip', rate='20/m', method='POST', block=True) +@permission_classes([AllowAny]) +@ratelimit(key='user_or_ip', rate='10/m', method='POST', block=True) def chat_message(request): """ Handle chat messages with AI integration supporting both BYOK and server-side keys. @@ -55,6 +65,28 @@ def chat_message(request): uploaded_file = request.FILES.get('file', None) if uploaded_file: + # Validate file type + if uploaded_file.content_type not in ALLOWED_FILE_TYPES: + return Response( + { + 'error': 'Invalid file type', + 'response': f'Only the following file types are allowed: {", ".join(ALLOWED_FILE_TYPES)}' + }, + status=status.HTTP_400_BAD_REQUEST + ) + + # Validate file size + if uploaded_file.size > MAX_FILE_SIZE: + return Response( + { + 'error': 'File too large', + 'response': f'Maximum file size is {MAX_FILE_SIZE / (1024 * 1024)}MB' + }, + status=status.HTTP_400_BAD_REQUEST + ) + + logger.info(f"File upload validation passed: {uploaded_file.name} ({uploaded_file.content_type}, {uploaded_file.size} bytes)") + # Parse FormData parameters message = request.POST.get('message', '') try: @@ -84,6 +116,7 @@ def chat_message(request): status=status.HTTP_400_BAD_REQUEST ) + saved_file_path = None try: # Initialize AI service with appropriate API keys based on mode ai_service = AIServiceFactory.create_service( @@ -97,6 +130,12 @@ def chat_message(request): if uploaded_file: logger.info(f"Processing file with {provider_name}: {uploaded_file.name}") + # Save file temporarily for tracking and cleanup + try: + saved_file_path = save_uploaded_file_temporarily(uploaded_file) + except Exception as e: + logger.error(f"Failed to save uploaded file: {str(e)}") + # For Gemini, upload file to Gemini API if provider_name == 'Gemini': file_content = ai_service.upload_file_to_gemini(uploaded_file) @@ -131,6 +170,10 @@ def chat_message(request): **({'gemini_file': file_content} if provider_name == 'Gemini' else {'file_content': file_content}) ) + # Clean up the saved file immediately after processing + if saved_file_path: + cleanup_file_after_processing(saved_file_path) + return Response(result) except ValueError as e: @@ -161,19 +204,23 @@ def chat_message(request): ) except Exception as e: - # Other errors logger.error(f"Error in chat_message: {e}", exc_info=True) - return Response( - { - 'error': str(e), - 'response': 'An error occurred while processing your message. Please try again.' - }, - status=status.HTTP_500_INTERNAL_SERVER_ERROR - ) + response = { + 'error': 'Server error', + 'response': 'An error occurred while processing your message. Please try again.' + } + if settings.DEBUG: + response['traceback'] = str(e) + return Response(response, status=status.HTTP_500_INTERNAL_SERVER_ERROR) + finally: + # Ensure file cleanup even if an error occurs + if saved_file_path: + cleanup_file_after_processing(saved_file_path) @api_view(['POST']) -@ratelimit(key='user_or_ip', rate='15/m', method='POST', block=True) +@permission_classes([AllowAny]) +@ratelimit(key='user_or_ip', rate='10/m', method='POST', block=True) def get_suggestions(request): """ Get model architecture suggestions based on current workflow. @@ -249,6 +296,7 @@ def get_suggestions(request): @api_view(['GET']) +@permission_classes([AllowAny]) def get_environment_info(request): """ Get environment configuration information. diff --git a/project/block_manager/views/export_views.py b/project/block_manager/views/export_views.py index 7321d1a..0fa2430 100644 --- a/project/block_manager/views/export_views.py +++ b/project/block_manager/views/export_views.py @@ -1,9 +1,12 @@ -from rest_framework.decorators import api_view +from rest_framework.decorators import api_view, permission_classes from rest_framework.response import Response from rest_framework import status from rest_framework.request import Request +from rest_framework.permissions import AllowAny from django.http import HttpResponse +from django.conf import settings from django_ratelimit.decorators import ratelimit +import logging from block_manager.serializers import ExportRequestSerializer from block_manager.services.tensorflow_codegen import generate_tensorflow_code @@ -13,10 +16,13 @@ import zipfile import io +logger = logging.getLogger(__name__) + @api_view(['POST']) -@require_authentication # Require authentication for export -@ratelimit(key='user', rate='30/h', method='POST', block=True) +@permission_classes([AllowAny]) +@require_authentication +@ratelimit(key='user_or_ip', rate='5/m', method='POST', block=True) def export_model(request: Request) -> Response: """ Export model code with professional class-based structure. @@ -173,15 +179,10 @@ def export_model(request: Request) -> Response: }) except Exception as e: - # Pass detailed error messages to frontend import traceback - traceback.print_exc() # Log to console for debugging - return Response( - { - 'error': f'Code generation failed: {str(e)}', - 'details': str(e), - 'traceback': traceback.format_exc() - }, - status=status.HTTP_500_INTERNAL_SERVER_ERROR - ) + logger.error(f"Error in export_model: {str(e)}", exc_info=True) + response = { + 'error': 'Code generation failed' + } + return Response(response, status=status.HTTP_500_INTERNAL_SERVER_ERROR) diff --git a/project/block_manager/views/maintenance_views.py b/project/block_manager/views/maintenance_views.py new file mode 100644 index 0000000..a9e7790 --- /dev/null +++ b/project/block_manager/views/maintenance_views.py @@ -0,0 +1,185 @@ +""" +Maintenance endpoints for system administration tasks. +""" +import logging +import time +import hmac +from pathlib import Path +from django.http import JsonResponse +from django.views.decorators.csrf import csrf_exempt +from django.views.decorators.http import require_http_methods +from django.conf import settings + +logger = logging.getLogger(__name__) + + +def _cleanup_old_files(retention_hours=None): + """ + Internal function to clean up old uploaded files. + + Args: + retention_hours: Number of hours to retain files (default from settings) + + Returns: + dict: Cleanup statistics + """ + if retention_hours is None: + retention_hours = getattr(settings, 'UPLOAD_RETENTION_HOURS', 2) + + retention_seconds = retention_hours * 3600 + upload_dir = Path(getattr(settings, 'TEMP_UPLOAD_DIR', '/tmp/visionforge_uploads')) + + if not upload_dir.exists(): + return { + 'deleted_count': 0, + 'deleted_size_mb': 0, + 'error_count': 0, + 'message': 'Upload directory does not exist' + } + + current_time = time.time() + deleted_count = 0 + deleted_size = 0 + error_count = 0 + + for file_path in upload_dir.rglob('*'): + if not file_path.is_file(): + continue + + try: + file_age = current_time - file_path.stat().st_mtime + + if file_age > retention_seconds: + file_size = file_path.stat().st_size + file_path.unlink() + logger.info(f'Deleted old upload: {file_path.name} (age: {file_age/3600:.1f}h)') + deleted_count += 1 + deleted_size += file_size + + except Exception as e: + error_count += 1 + logger.error(f'Error processing file {file_path}: {str(e)}') + + # Clean up empty directories + for dir_path in sorted(upload_dir.rglob('*'), reverse=True): + if dir_path.is_dir() and not any(dir_path.iterdir()): + try: + dir_path.rmdir() + logger.info(f'Removed empty directory: {dir_path}') + except Exception as e: + logger.error(f'Error removing directory {dir_path}: {str(e)}') + + return { + 'deleted_count': deleted_count, + 'deleted_size_mb': round(deleted_size / 1024 / 1024, 2), + 'error_count': error_count, + 'retention_hours': retention_hours + } + + +@csrf_exempt +@require_http_methods(["POST"]) +def trigger_file_cleanup(request): + """ + Endpoint to trigger file cleanup remotely. + Protected by a secret token to prevent unauthorized access. + + POST /api/v1/maintenance/cleanup-files + Body: {"secret": "your-secret-token"} + + Returns: + 200: Cleanup completed successfully + 401: Unauthorized + """ + import json + + try: + data = json.loads(request.body) + provided_secret = data.get('secret', '') + + # Verify secret token using constant-time comparison to prevent timing attacks + expected_secret = settings.CLEANUP_SECRET_TOKEN + if not expected_secret or not hmac.compare_digest(provided_secret, expected_secret): + logger.warning(f"Unauthorized cleanup attempt from IP {request.META.get('REMOTE_ADDR')}") + return JsonResponse({ + 'error': 'Unauthorized', + 'message': 'Invalid secret token' + }, status=401) + + # Run cleanup + stats = _cleanup_old_files() + logger.info(f"File cleanup completed: {stats['deleted_count']} files, {stats['deleted_size_mb']}MB") + + return JsonResponse({ + 'success': True, + 'message': 'File cleanup completed', + 'stats': stats + }) + + except json.JSONDecodeError: + return JsonResponse({ + 'error': 'Invalid JSON', + 'message': 'Request body must be valid JSON' + }, status=400) + except Exception as e: + logger.error(f"Error in trigger_file_cleanup: {str(e)}", exc_info=True) + return JsonResponse({ + 'error': 'Server error', + 'message': 'An error occurred during cleanup' + }, status=500) + + +@csrf_exempt +@require_http_methods(["GET"]) +def get_upload_stats(request): + """ + Get statistics about uploaded files. + + GET /api/v1/maintenance/upload-stats?secret=your-secret-token + + Returns: + 200: Upload statistics + 401: Unauthorized + """ + try: + provided_secret = request.GET.get('secret', '') + + # Verify secret token using constant-time comparison to prevent timing attacks + expected_secret = settings.CLEANUP_SECRET_TOKEN + if not expected_secret or not hmac.compare_digest(provided_secret, expected_secret): + logger.warning(f"Unauthorized stats access attempt from IP {request.META.get('REMOTE_ADDR')}") + return JsonResponse({ + 'error': 'Unauthorized', + 'message': 'Invalid secret token' + }, status=401) + + from block_manager.utils.file_cleanup import get_upload_directory_size + upload_dir = Path(settings.TEMP_UPLOAD_DIR) + total_size = get_upload_directory_size() + file_count = sum(1 for _ in upload_dir.rglob('*') if _.is_file()) if upload_dir.exists() else 0 + + # Get oldest file age + oldest_age = None + if upload_dir.exists(): + files = [f for f in upload_dir.rglob('*') if f.is_file()] + if files: + oldest_file = min(files, key=lambda f: f.stat().st_mtime) + oldest_age = (time.time() - oldest_file.stat().st_mtime) / 3600 # hours + + return JsonResponse({ + 'success': True, + 'stats': { + 'total_size_mb': round(total_size / 1024 / 1024, 2), + 'file_count': file_count, + 'oldest_file_age_hours': round(oldest_age, 2) if oldest_age else None, + 'retention_hours': settings.UPLOAD_RETENTION_HOURS, + 'upload_directory': str(upload_dir) + } + }) + + except Exception as e: + logger.error(f"Error in get_upload_stats: {str(e)}", exc_info=True) + return JsonResponse({ + 'error': 'Server error', + 'message': 'An error occurred while fetching stats' + }, status=500) diff --git a/project/block_manager/views/validation_views.py b/project/block_manager/views/validation_views.py index 15a4d21..27bddb6 100644 --- a/project/block_manager/views/validation_views.py +++ b/project/block_manager/views/validation_views.py @@ -1,14 +1,18 @@ -from rest_framework.decorators import api_view +from rest_framework.decorators import api_view, permission_classes from rest_framework.response import Response from rest_framework import status -import traceback +from rest_framework.permissions import AllowAny +import logging from block_manager.serializers import SaveArchitectureSerializer from block_manager.services.validation import validate_architecture from block_manager.services.inference import infer_dimensions +logger = logging.getLogger(__name__) + @api_view(['POST']) +@permission_classes([AllowAny]) def validate_model(request): """ Validate model architecture @@ -41,13 +45,12 @@ def validate_model(request): except Exception as e: # Log the error for debugging - print(f"Validation error: {str(e)}") - traceback.print_exc() + logger.error(f"Validation error: {str(e)}", exc_info=True) return Response( { 'isValid': False, - 'errors': [{'message': f'Server error: {str(e)}', 'type': 'error'}], + 'errors': [{'message': 'Server error during validation', 'type': 'error'}], }, status=status.HTTP_500_INTERNAL_SERVER_ERROR ) diff --git a/project/frontend/src/components/ui/chart.tsx b/project/frontend/src/components/ui/chart.tsx index 101c153..9d83eda 100644 --- a/project/frontend/src/components/ui/chart.tsx +++ b/project/frontend/src/components/ui/chart.tsx @@ -45,7 +45,9 @@ function ChartContainer({ >["children"] }) { const uniqueId = useId() - const chartId = `chart-${id || uniqueId.replace(/:/g, "")}` + // Sanitize ID to prevent XSS - only allow alphanumeric, hyphens, and underscores + const baseId = (id || uniqueId).replace(/[^a-zA-Z0-9-_]/g, '') + const chartId = `chart-${baseId}` return ( @@ -76,19 +78,26 @@ const ChartStyle = ({ id, config }: { id: string; config: ChartConfig }) => { return null } + // Sanitize id to prevent CSS injection - only allow alphanumeric, hyphens, and underscores + const sanitizedId = id.replace(/[^a-zA-Z0-9-_]/g, '') + return (