diff --git a/analyzers/__init__.py b/analyzers/__init__.py new file mode 100644 index 00000000..5040ec7f --- /dev/null +++ b/analyzers/__init__.py @@ -0,0 +1,35 @@ +""" +Codebase Analyzers +================== + +Modules for analyzing existing codebases to detect tech stack, +extract features, and prepare for import into Autocoder. + +Main entry points: +- StackDetector: Detect tech stack and extract routes/endpoints +- extract_features: Transform detection result into Autocoder features +- extract_from_project: One-step detection and feature extraction +""" + +from .base_analyzer import BaseAnalyzer +from .feature_extractor import ( + DetectedFeature, + FeatureExtractionResult, + extract_features, + extract_from_project, + features_to_bulk_create_format, +) +from .stack_detector import StackDetectionResult, StackDetector + +__all__ = [ + # Stack Detection + "StackDetector", + "StackDetectionResult", + "BaseAnalyzer", + # Feature Extraction + "DetectedFeature", + "FeatureExtractionResult", + "extract_features", + "extract_from_project", + "features_to_bulk_create_format", +] diff --git a/analyzers/base_analyzer.py b/analyzers/base_analyzer.py new file mode 100644 index 00000000..9bb31de2 --- /dev/null +++ b/analyzers/base_analyzer.py @@ -0,0 +1,152 @@ +""" +Base Analyzer +============= + +Abstract base class for all stack analyzers. +Each analyzer detects a specific tech stack and extracts relevant information. +""" + +from abc import ABC, abstractmethod +from pathlib import Path +from typing import TypedDict + + +class RouteInfo(TypedDict): + """Information about a detected route.""" + path: str + method: str # GET, POST, PUT, DELETE, etc. + handler: str # Function or component name + file: str # Source file path + + +class ComponentInfo(TypedDict): + """Information about a detected component.""" + name: str + file: str + type: str # page, component, layout, etc. + + +class EndpointInfo(TypedDict): + """Information about an API endpoint.""" + path: str + method: str + handler: str + file: str + description: str # Generated description + + +class AnalysisResult(TypedDict): + """Result of analyzing a codebase with a specific analyzer.""" + stack_name: str + confidence: float # 0.0 to 1.0 + routes: list[RouteInfo] + components: list[ComponentInfo] + endpoints: list[EndpointInfo] + entry_point: str | None + config_files: list[str] + dependencies: dict[str, str] # name: version + metadata: dict # Additional stack-specific info + + +class BaseAnalyzer(ABC): + """ + Abstract base class for stack analyzers. + + Each analyzer is responsible for: + 1. Detecting if a codebase uses its stack (can_analyze) + 2. Extracting routes, components, and endpoints (analyze) + """ + + def __init__(self, project_dir: Path): + """ + Initialize the analyzer. + + Args: + project_dir: Path to the project directory to analyze + """ + self.project_dir = project_dir + + @property + @abstractmethod + def stack_name(self) -> str: + """The name of the stack this analyzer handles (e.g., 'react', 'nextjs').""" + pass + + @abstractmethod + def can_analyze(self) -> tuple[bool, float]: + """ + Check if this analyzer can handle the codebase. + + Returns: + (can_handle, confidence) where: + - can_handle: True if the analyzer recognizes the stack + - confidence: 0.0 to 1.0 indicating how confident the detection is + """ + pass + + @abstractmethod + def analyze(self) -> AnalysisResult: + """ + Analyze the codebase and extract information. + + Returns: + AnalysisResult with detected routes, components, endpoints, etc. + """ + pass + + def _read_file_safe(self, path: Path, max_size: int = 1024 * 1024) -> str | None: + """ + Safely read a file, returning None if it doesn't exist or is too large. + + Args: + path: Path to the file + max_size: Maximum file size in bytes (default 1MB) + + Returns: + File contents or None + """ + if not path.exists(): + return None + + try: + if path.stat().st_size > max_size: + return None + return path.read_text(encoding="utf-8") + except (OSError, UnicodeDecodeError): + return None + + def _find_files(self, pattern: str, exclude_dirs: list[str] | None = None) -> list[Path]: + """ + Find files matching a glob pattern, excluding common non-source directories. + + Args: + pattern: Glob pattern (e.g., "**/*.tsx") + exclude_dirs: Additional directories to exclude + + Returns: + List of matching file paths + """ + default_exclude = [ + "node_modules", + "venv", + ".venv", + "__pycache__", + ".git", + "dist", + "build", + ".next", + ".nuxt", + "coverage", + ] + + if exclude_dirs: + default_exclude.extend(exclude_dirs) + + results = [] + for path in self.project_dir.glob(pattern): + # Check if any parent is in exclude list + parts = path.relative_to(self.project_dir).parts + if not any(part in default_exclude for part in parts): + results.append(path) + + return results diff --git a/analyzers/feature_extractor.py b/analyzers/feature_extractor.py new file mode 100644 index 00000000..f528d535 --- /dev/null +++ b/analyzers/feature_extractor.py @@ -0,0 +1,472 @@ +""" +Feature Extractor +================= + +Transforms detected routes, endpoints, and components into Autocoder features. +Each feature is marked as pending (passes=False) for verification. + +Generates features in the format expected by feature_create_bulk MCP tool. +""" + +from pathlib import Path +from typing import TypedDict + +from .stack_detector import StackDetectionResult + +# Feature extraction thresholds +MIN_FEATURES_BEFORE_COMPONENTS = 10 +MAX_COMPONENT_FEATURES = 10 + + +class DetectedFeature(TypedDict): + """A feature extracted from codebase analysis.""" + category: str + name: str + description: str + steps: list[str] + source_type: str # "route", "endpoint", "component", "inferred" + source_file: str | None + confidence: float # 0.0 to 1.0 + + +class FeatureExtractionResult(TypedDict): + """Result of feature extraction.""" + features: list[DetectedFeature] + count: int + by_category: dict[str, int] + summary: str + + +def _get_base_stack(stack: str | None) -> str | None: + """Extract base stack name from variants like 'react-vite' -> 'react'.""" + if not stack: + return None + return stack.split("-")[0].lower() + + +def _route_to_feature_name(path: str, method: str = "GET") -> str: + """ + Convert a route path to a human-readable feature name. + + Examples: + "/" -> "View home page" + "/users" -> "View users page" + "/users/:id" -> "View user details page" + "/api/users" -> "API: List users" + """ + # Clean up path + path = path.strip("/") + + if not path: + return "View home page" + + # Handle API routes + if path.startswith("api/"): + api_path = path[4:] # Remove "api/" + parts = api_path.split("/") + + # Handle dynamic segments + parts = [p for p in parts if not p.startswith(":") and not p.startswith("[")] + + if not parts: + return "API: Root endpoint" + + resource = parts[-1].replace("-", " ").replace("_", " ").title() + + if method == "GET": + if any(p.startswith(":") or p.startswith("[") for p in api_path.split("/")): + return f"API: Get {resource} details" + return f"API: List {resource}" + elif method == "POST": + return f"API: Create {resource}" + elif method == "PUT" or method == "PATCH": + return f"API: Update {resource}" + elif method == "DELETE": + return f"API: Delete {resource}" + else: + return f"API: {resource} endpoint" + + # Handle page routes + parts = path.split("/") + + # Handle dynamic segments (remove them from naming) + clean_parts = [p for p in parts if not p.startswith(":") and not p.startswith("[")] + + if not clean_parts: + return "View dynamic page" + + # Build name from path parts + page_name = " ".join(p.replace("-", " ").replace("_", " ") for p in clean_parts) + page_name = page_name.title() + + # Check if it's a detail page (has dynamic segment) + has_dynamic = any(p.startswith(":") or p.startswith("[") for p in parts) + + if has_dynamic: + return f"View {page_name} details page" + + return f"View {page_name} page" + + +def _generate_page_steps(path: str, stack: str | None) -> list[str]: + """Generate test steps for a page route.""" + clean_path = path + + # Replace dynamic segments with example values + if ":id" in clean_path or "[id]" in clean_path: + clean_path = clean_path.replace(":id", "123").replace("[id]", "123") + + # Generate steps + steps = [ + f"Navigate to {clean_path}", + "Verify the page loads without errors", + "Verify the page title and main content are visible", + ] + + # Add stack-specific checks (normalize to handle variants like react-vite) + base_stack = _get_base_stack(stack) + if base_stack in ("react", "nextjs", "vue", "nuxt", "angular"): + steps.append("Verify no console errors in browser developer tools") + steps.append("Verify responsive layout at mobile and desktop widths") + + return steps + + +def _generate_api_steps(path: str, method: str) -> list[str]: + """Generate test steps for an API endpoint.""" + # Replace dynamic segments with example values + test_path = path.replace(":id", "123").replace("[id]", "123") + + steps = [] + + if method == "GET": + steps = [ + f"Send GET request to {test_path}", + "Verify response status code is 200", + "Verify response body contains expected data structure", + ] + elif method == "POST": + steps = [ + f"Send POST request to {test_path} with valid payload", + "Verify response status code is 201 (created)", + "Verify response contains the created resource", + f"Send POST request to {test_path} with invalid payload", + "Verify response status code is 400 (bad request)", + ] + elif method in ("PUT", "PATCH"): + steps = [ + f"Send {method} request to {test_path} with valid payload", + "Verify response status code is 200", + "Verify response contains the updated resource", + "Verify the resource was actually updated", + ] + elif method == "DELETE": + steps = [ + f"Send DELETE request to {test_path}", + "Verify response status code is 200 or 204", + "Verify the resource no longer exists", + ] + else: + steps = [ + f"Send {method} request to {test_path}", + "Verify response status code is appropriate", + ] + + return steps + + +def _generate_component_steps(name: str, comp_type: str) -> list[str]: + """Generate test steps for a component.""" + if comp_type == "page": + return [ + f"Navigate to the {name} page", + "Verify all UI elements render correctly", + "Test user interactions (buttons, forms, etc.)", + "Verify data is fetched and displayed", + ] + elif comp_type == "model": + return [ + f"Verify {name} model schema matches expected fields", + "Test CRUD operations on the model", + "Verify validation rules work correctly", + ] + elif comp_type == "middleware": + return [ + f"Verify {name} middleware processes requests correctly", + "Test edge cases and error handling", + ] + elif comp_type == "service": + return [ + f"Verify {name} service methods work correctly", + "Test error handling in service layer", + ] + else: + return [ + f"Verify {name} component renders correctly", + "Test component props and state", + "Verify component interactions work", + ] + + +def extract_features(detection_result: StackDetectionResult) -> FeatureExtractionResult: + """ + Extract features from a stack detection result. + + Converts routes, endpoints, and components into Autocoder features. + Each feature is ready to be created via feature_create_bulk. + + Args: + detection_result: Result from StackDetector.detect() + + Returns: + FeatureExtractionResult with list of features + """ + features: list[DetectedFeature] = [] + primary_frontend = detection_result.get("primary_frontend") + + # Track unique features to avoid duplicates + seen_features: set[str] = set() + + # Extract features from routes (frontend pages) + for route in detection_result.get("all_routes", []): + path = route.get("path", "") + method = route.get("method", "GET") + source_file = route.get("file") + + feature_name = _route_to_feature_name(path, method) + + # Skip duplicates + feature_key = f"route:{path}:{method}" + if feature_key in seen_features: + continue + seen_features.add(feature_key) + + features.append({ + "category": "Navigation", + "name": feature_name, + "description": f"User can navigate to and view the {path or '/'} page. The page should load correctly and display the expected content.", + "steps": _generate_page_steps(path, primary_frontend), + "source_type": "route", + "source_file": source_file, + "confidence": 0.8, + }) + + # Extract features from API endpoints + for endpoint in detection_result.get("all_endpoints", []): + path = endpoint.get("path", "") + method = endpoint.get("method", "ALL") + source_file = endpoint.get("file") + + # Handle ALL method by creating GET endpoint + if method == "ALL": + method = "GET" + + # Ensure API endpoints get API-style naming + name_path = path + # Avoid double-prefixing: check for "api" or "api/" at start + stripped = name_path.lstrip("/") + if stripped != "api" and not stripped.startswith("api/"): + name_path = f"/api{name_path if name_path.startswith('/') else '/' + name_path}" + feature_name = _route_to_feature_name(name_path, method) + + # Skip duplicates + feature_key = f"endpoint:{path}:{method}" + if feature_key in seen_features: + continue + seen_features.add(feature_key) + + # Determine category based on path + category = "API" + path_lower = path.lower() + if "auth" in path_lower or "login" in path_lower or "register" in path_lower: + category = "Authentication" + elif "user" in path_lower or "profile" in path_lower: + category = "User Management" + elif "admin" in path_lower: + category = "Administration" + + features.append({ + "category": category, + "name": feature_name, + "description": f"{method} endpoint at {path}. Should handle requests appropriately and return correct responses.", + "steps": _generate_api_steps(path, method), + "source_type": "endpoint", + "source_file": source_file, + "confidence": 0.85, + }) + + # Extract features from components (with lower priority) + component_features: list[DetectedFeature] = [] + for component in detection_result.get("all_components", []): + name = component.get("name", "") + comp_type = component.get("type", "component") + source_file = component.get("file") + + # Skip common/generic components + skip_names = [ + "index", "app", "main", "layout", "_app", "_document", + "header", "footer", "sidebar", "navbar", "nav", + "loading", "error", "not-found", "404", "500", + ] + if name.lower() in skip_names: + continue + + # Skip duplicates + feature_key = f"component:{name}:{comp_type}" + if feature_key in seen_features: + continue + seen_features.add(feature_key) + + # Only include significant components + if comp_type in ("page", "view", "model", "service"): + clean_name = name.replace("-", " ").replace("_", " ").title() + + # Determine category + if comp_type == "model": + category = "Data Models" + elif comp_type == "service": + category = "Services" + elif comp_type in ("page", "view"): + category = "Pages" + else: + category = "Components" + + component_features.append({ + "category": category, + "name": f"{clean_name} {comp_type.title()}", + "description": f"The {clean_name} {comp_type} should function correctly and handle all expected use cases.", + "steps": _generate_component_steps(name, comp_type), + "source_type": "component", + "source_file": source_file, + "confidence": 0.6, # Lower confidence for component-based features + }) + + # Add component features if we don't have many from routes/endpoints + if len(features) < MIN_FEATURES_BEFORE_COMPONENTS: + features.extend(component_features[:MAX_COMPONENT_FEATURES]) + + # Add basic infrastructure features + basic_features = _generate_basic_features(detection_result) + features.extend(basic_features) + + # Count by category + by_category: dict[str, int] = {} + for f in features: + cat = f["category"] + by_category[cat] = by_category.get(cat, 0) + 1 + + # Build summary + summary = f"Extracted {len(features)} features from {len(detection_result.get('detected_stacks', []))} detected stack(s)" + + return { + "features": features, + "count": len(features), + "by_category": by_category, + "summary": summary, + } + + +def _generate_basic_features(detection_result: StackDetectionResult) -> list[DetectedFeature]: + """Generate basic infrastructure features based on detected stack.""" + features: list[DetectedFeature] = [] + + primary_frontend = detection_result.get("primary_frontend") + primary_backend = detection_result.get("primary_backend") + + # Normalize stack names to handle variants like react-vite, fastify, etc. + frontend_base = _get_base_stack(primary_frontend) + backend_base = _get_base_stack(primary_backend) + + # Application startup feature + if primary_frontend or primary_backend: + features.append({ + "category": "Infrastructure", + "name": "Application starts successfully", + "description": "The application should start without errors and be accessible.", + "steps": [ + "Run the application start command", + "Verify the server starts without errors", + "Access the application URL", + "Verify the main page loads", + ], + "source_type": "inferred", + "source_file": None, + "confidence": 1.0, + }) + + # Frontend-specific features (handle variants like react-vite, vue-cli) + if frontend_base in ("react", "nextjs", "vue", "nuxt", "angular"): + features.append({ + "category": "Infrastructure", + "name": "No console errors on page load", + "description": "The application should load without JavaScript errors in the browser console.", + "steps": [ + "Open browser developer tools", + "Navigate to the home page", + "Check the console for errors", + "Navigate to other pages and repeat", + ], + "source_type": "inferred", + "source_file": None, + "confidence": 0.9, + }) + + # Backend-specific features (expanded list for all backend stacks) + if backend_base in ("express", "fastify", "koa", "nodejs", "node", + "fastapi", "django", "flask", "nestjs", "python"): + features.append({ + "category": "Infrastructure", + "name": "Health check endpoint responds", + "description": "The API should have a health check endpoint that responds correctly.", + "steps": [ + "Send GET request to /health or /api/health", + "Verify response status is 200", + "Verify response indicates healthy status", + ], + "source_type": "inferred", + "source_file": None, + "confidence": 0.7, + }) + + return features + + +def features_to_bulk_create_format(features: list[DetectedFeature]) -> list[dict]: + """ + Convert extracted features to the format expected by feature_create_bulk. + + Removes source_type, source_file, and confidence fields. + Returns a list ready for MCP tool consumption. + + Args: + features: List of DetectedFeature objects + + Returns: + List of dicts with category, name, description, steps + """ + return [ + { + "category": f["category"], + "name": f["name"], + "description": f["description"], + "steps": f["steps"], + } + for f in features + ] + + +def extract_from_project(project_dir: str | Path) -> FeatureExtractionResult: + """ + Convenience function to detect stack and extract features in one step. + + Args: + project_dir: Path to the project directory + + Returns: + FeatureExtractionResult with extracted features + """ + from .stack_detector import StackDetector + + detector = StackDetector(Path(project_dir)) + detection_result = detector.detect() + return extract_features(detection_result) diff --git a/analyzers/node_analyzer.py b/analyzers/node_analyzer.py new file mode 100644 index 00000000..4bfeb68d --- /dev/null +++ b/analyzers/node_analyzer.py @@ -0,0 +1,354 @@ +""" +Node.js Analyzer +================ + +Detects Node.js/Express/NestJS projects. +Extracts API endpoints from Express router definitions. +""" + +import json +import re +from pathlib import Path + +from .base_analyzer import ( + AnalysisResult, + BaseAnalyzer, + ComponentInfo, + EndpointInfo, + RouteInfo, +) + + +class NodeAnalyzer(BaseAnalyzer): + """Analyzer for Node.js/Express/NestJS projects.""" + + @property + def stack_name(self) -> str: + return self._detected_stack + + def __init__(self, project_dir: Path): + super().__init__(project_dir) + self._detected_stack = "nodejs" # Default, may change to "express" or "nestjs" + + def can_analyze(self) -> tuple[bool, float]: + """Detect if this is a Node.js/Express/NestJS project.""" + confidence = 0.0 + + # Check package.json + package_json = self.project_dir / "package.json" + if package_json.exists(): + try: + data = json.loads(package_json.read_text()) + deps = { + **data.get("dependencies", {}), + **data.get("devDependencies", {}), + } + + # Check for NestJS first (more specific) + if "@nestjs/core" in deps: + self._detected_stack = "nestjs" + confidence = 0.95 + return True, confidence + + # Check for Express + if "express" in deps: + self._detected_stack = "express" + confidence = 0.85 + + # Bonus for having typical Express structure + if (self.project_dir / "routes").exists() or \ + (self.project_dir / "src" / "routes").exists(): + confidence = 0.9 + + return True, confidence + + # Check for Fastify + if "fastify" in deps: + self._detected_stack = "fastify" + confidence = 0.85 + return True, confidence + + # Check for Koa + if "koa" in deps: + self._detected_stack = "koa" + confidence = 0.85 + return True, confidence + + # Generic Node.js (has node-specific files but no specific framework) + if "type" in data and data["type"] == "module": + self._detected_stack = "nodejs" + confidence = 0.5 + return True, confidence + + except (json.JSONDecodeError, OSError): + pass + + # Check for common Node.js files + common_files = ["app.js", "server.js", "index.js", "src/app.js", "src/server.js"] + for file in common_files: + if (self.project_dir / file).exists(): + self._detected_stack = "nodejs" + return True, 0.5 + + return False, 0.0 + + def analyze(self) -> AnalysisResult: + """Analyze the Node.js project.""" + routes: list[RouteInfo] = [] + components: list[ComponentInfo] = [] + endpoints: list[EndpointInfo] = [] + config_files: list[str] = [] + dependencies: dict[str, str] = {} + entry_point: str | None = None + + # Load dependencies from package.json + package_json = self.project_dir / "package.json" + if package_json.exists(): + try: + data = json.loads(package_json.read_text()) + dependencies = { + **data.get("dependencies", {}), + **data.get("devDependencies", {}), + } + + # Detect entry point from package.json + entry_point = data.get("main") + if not entry_point: + scripts = data.get("scripts", {}) + start_script = scripts.get("start", "") + if "node" in start_script: + # Extract file from "node src/index.js" etc. + match = re.search(r"node\s+(\S+)", start_script) + if match: + entry_point = match.group(1) + + except (json.JSONDecodeError, OSError): + pass + + # Collect config files + for config_name in [ + "tsconfig.json", ".eslintrc.js", ".eslintrc.json", + "jest.config.js", "nodemon.json", ".env.example", + ]: + if (self.project_dir / config_name).exists(): + config_files.append(config_name) + + # Detect entry point if not found + if not entry_point: + for candidate in ["src/index.js", "src/index.ts", "src/app.js", "src/app.ts", + "index.js", "app.js", "server.js"]: + if (self.project_dir / candidate).exists(): + entry_point = candidate + break + + # Extract endpoints based on stack type + if self._detected_stack == "express": + endpoints = self._extract_express_routes() + elif self._detected_stack == "nestjs": + endpoints = self._extract_nestjs_routes() + elif self._detected_stack == "fastify": + endpoints = self._extract_fastify_routes() + else: + # Generic Node.js - try Express patterns + endpoints = self._extract_express_routes() + + # Extract middleware/components + components = self._extract_components() + + return { + "stack_name": self._detected_stack, + "confidence": 0.85, + "routes": routes, + "components": components, + "endpoints": endpoints, + "entry_point": entry_point, + "config_files": config_files, + "dependencies": dependencies, + "metadata": { + "has_typescript": "typescript" in dependencies, + "has_prisma": "prisma" in dependencies or "@prisma/client" in dependencies, + "has_mongoose": "mongoose" in dependencies, + "has_sequelize": "sequelize" in dependencies, + }, + } + + def _extract_express_routes(self) -> list[EndpointInfo]: + """Extract routes from Express router definitions.""" + endpoints: list[EndpointInfo] = [] + + # Find route files + route_files = ( + self._find_files("**/routes/**/*.js") + + self._find_files("**/routes/**/*.ts") + + self._find_files("**/router/**/*.js") + + self._find_files("**/router/**/*.ts") + + self._find_files("**/controllers/**/*.js") + + self._find_files("**/controllers/**/*.ts") + ) + + # Also check main files + for main_file in ["app.js", "app.ts", "server.js", "server.ts", + "src/app.js", "src/app.ts", "index.js", "index.ts"]: + main_path = self.project_dir / main_file + if main_path.exists(): + route_files.append(main_path) + + # Pattern for Express routes + # router.get('/path', handler) + # app.post('/path', handler) + route_pattern = re.compile( + r'(?:router|app)\.(get|post|put|patch|delete|all)\s*\(\s*["\']([^"\']+)["\']', + re.IGNORECASE + ) + + for file in route_files: + content = self._read_file_safe(file) + if content is None: + continue + + for match in route_pattern.finditer(content): + method = match.group(1).upper() + path = match.group(2) + + endpoints.append({ + "path": path, + "method": method, + "handler": "handler", + "file": str(file.relative_to(self.project_dir)), + "description": f"{method} {path}", + }) + + return endpoints + + def _extract_nestjs_routes(self) -> list[EndpointInfo]: + """Extract routes from NestJS controllers.""" + endpoints: list[EndpointInfo] = [] + + # Find controller files + controller_files = ( + self._find_files("**/*.controller.ts") + + self._find_files("**/*.controller.js") + ) + + # Pattern for NestJS decorators + # @Get('/path'), @Post(), etc. + decorator_pattern = re.compile( + r'@(Get|Post|Put|Patch|Delete|All)\s*\(\s*["\']?([^"\')\s]*)["\']?\s*\)', + re.IGNORECASE + ) + + # Pattern for controller path + controller_pattern = re.compile( + r'@Controller\s*\(\s*["\']?([^"\')\s]*)["\']?\s*\)', + re.IGNORECASE + ) + + for file in controller_files: + content = self._read_file_safe(file) + if content is None: + continue + + # Get controller base path (normalize to avoid double slashes) + controller_match = controller_pattern.search(content) + base_path = "" + if controller_match: + base_path = "/" + controller_match.group(1).lstrip("/") + + for match in decorator_pattern.finditer(content): + method = match.group(1).upper() + path = match.group(2) or "" + + full_path = base_path + if path: + full_path = f"{base_path}/{path}".replace("//", "/") + + endpoints.append({ + "path": full_path or "/", + "method": method, + "handler": "controller", + "file": str(file.relative_to(self.project_dir)), + "description": f"{method} {full_path or '/'}", + }) + + return endpoints + + def _extract_fastify_routes(self) -> list[EndpointInfo]: + """Extract routes from Fastify route definitions.""" + endpoints: list[EndpointInfo] = [] + + # Find route files + route_files = ( + self._find_files("**/routes/**/*.js") + + self._find_files("**/routes/**/*.ts") + + self._find_files("**/*.routes.js") + + self._find_files("**/*.routes.ts") + ) + + # Pattern for Fastify routes + # fastify.get('/path', handler) + route_pattern = re.compile( + r'(?:fastify|server|app)\.(get|post|put|patch|delete|all)\s*\(\s*["\']([^"\']+)["\']', + re.IGNORECASE + ) + + for file in route_files: + content = self._read_file_safe(file) + if content is None: + continue + + for match in route_pattern.finditer(content): + method = match.group(1).upper() + path = match.group(2) + + endpoints.append({ + "path": path, + "method": method, + "handler": "handler", + "file": str(file.relative_to(self.project_dir)), + "description": f"{method} {path}", + }) + + return endpoints + + def _extract_components(self) -> list[ComponentInfo]: + """Extract middleware and service components.""" + components: list[ComponentInfo] = [] + + # Find middleware files + middleware_files = self._find_files("**/middleware/**/*.js") + \ + self._find_files("**/middleware/**/*.ts") + + for file in middleware_files: + components.append({ + "name": file.stem, + "file": str(file.relative_to(self.project_dir)), + "type": "middleware", + }) + + # Find service files + service_files = self._find_files("**/services/**/*.js") + \ + self._find_files("**/services/**/*.ts") + \ + self._find_files("**/*.service.js") + \ + self._find_files("**/*.service.ts") + + for file in service_files: + components.append({ + "name": file.stem, + "file": str(file.relative_to(self.project_dir)), + "type": "service", + }) + + # Find model files + model_files = self._find_files("**/models/**/*.js") + \ + self._find_files("**/models/**/*.ts") + \ + self._find_files("**/*.model.js") + \ + self._find_files("**/*.model.ts") + + for file in model_files: + components.append({ + "name": file.stem, + "file": str(file.relative_to(self.project_dir)), + "type": "model", + }) + + return components diff --git a/analyzers/python_analyzer.py b/analyzers/python_analyzer.py new file mode 100644 index 00000000..499cea11 --- /dev/null +++ b/analyzers/python_analyzer.py @@ -0,0 +1,395 @@ +""" +Python Analyzer +=============== + +Detects FastAPI, Django, and Flask projects. +Extracts API endpoints from route/view definitions. +""" + +import re +from pathlib import Path + +from .base_analyzer import ( + AnalysisResult, + BaseAnalyzer, + ComponentInfo, + EndpointInfo, + RouteInfo, +) + + +class PythonAnalyzer(BaseAnalyzer): + """Analyzer for FastAPI, Django, and Flask projects.""" + + @property + def stack_name(self) -> str: + return self._detected_stack + + def __init__(self, project_dir: Path): + super().__init__(project_dir) + self._detected_stack = "python" # Default, may change + + def can_analyze(self) -> tuple[bool, float]: + """Detect if this is a Python web framework project.""" + confidence = 0.0 + + # Check for Django first + if (self.project_dir / "manage.py").exists(): + self._detected_stack = "django" + confidence = 0.95 + return True, confidence + + # Check requirements.txt + requirements = self.project_dir / "requirements.txt" + if requirements.exists(): + try: + content = requirements.read_text().lower() + + if "fastapi" in content: + self._detected_stack = "fastapi" + confidence = 0.9 + return True, confidence + + if "flask" in content: + self._detected_stack = "flask" + confidence = 0.85 + return True, confidence + + if "django" in content: + self._detected_stack = "django" + confidence = 0.85 + return True, confidence + + except OSError: + pass + + # Check pyproject.toml + pyproject = self.project_dir / "pyproject.toml" + if pyproject.exists(): + try: + content = pyproject.read_text().lower() + + if "fastapi" in content: + self._detected_stack = "fastapi" + confidence = 0.9 + return True, confidence + + if "flask" in content: + self._detected_stack = "flask" + confidence = 0.85 + return True, confidence + + if "django" in content: + self._detected_stack = "django" + confidence = 0.85 + return True, confidence + + except OSError: + pass + + # Check for common FastAPI patterns + main_py = self.project_dir / "main.py" + if main_py.exists(): + main_content = self._read_file_safe(main_py) + if main_content and "from fastapi import" in main_content: + self._detected_stack = "fastapi" + return True, 0.9 + + # Check for Flask patterns + app_py = self.project_dir / "app.py" + if app_py.exists(): + app_content = self._read_file_safe(app_py) + if app_content and "from flask import" in app_content: + self._detected_stack = "flask" + return True, 0.85 + + return False, 0.0 + + def analyze(self) -> AnalysisResult: + """Analyze the Python project.""" + routes: list[RouteInfo] = [] + components: list[ComponentInfo] = [] + endpoints: list[EndpointInfo] = [] + config_files: list[str] = [] + dependencies: dict[str, str] = {} + entry_point: str | None = None + + # Load dependencies from requirements.txt + requirements = self.project_dir / "requirements.txt" + if requirements.exists(): + try: + for line in requirements.read_text().splitlines(): + line = line.strip() + if line and not line.startswith("#"): + # Parse package==version or package>=version etc. + match = re.match(r"([a-zA-Z0-9_-]+)(?:[=<>!~]+(.+))?", line) + if match: + dependencies[match.group(1)] = match.group(2) or "*" + except OSError: + pass + + # Collect config files + for config_name in [ + "pyproject.toml", "setup.py", "setup.cfg", + "requirements.txt", "requirements-dev.txt", + ".env.example", "alembic.ini", "pytest.ini", + ]: + if (self.project_dir / config_name).exists(): + config_files.append(config_name) + + # Extract endpoints based on framework + if self._detected_stack == "fastapi": + endpoints = self._extract_fastapi_routes() + entry_point = "main.py" + elif self._detected_stack == "django": + endpoints = self._extract_django_routes() + entry_point = "manage.py" + elif self._detected_stack == "flask": + endpoints = self._extract_flask_routes() + entry_point = "app.py" + + # Find entry point if not set + if not entry_point or not (self.project_dir / entry_point).exists(): + for candidate in ["main.py", "app.py", "server.py", "run.py", "src/main.py"]: + if (self.project_dir / candidate).exists(): + entry_point = candidate + break + + # Extract components (models, services, etc.) + components = self._extract_components() + + return { + "stack_name": self._detected_stack, + "confidence": 0.85, + "routes": routes, + "components": components, + "endpoints": endpoints, + "entry_point": entry_point, + "config_files": config_files, + "dependencies": dependencies, + "metadata": { + "has_sqlalchemy": "sqlalchemy" in dependencies, + "has_alembic": "alembic" in dependencies, + "has_pytest": "pytest" in dependencies, + "has_celery": "celery" in dependencies, + }, + } + + def _extract_fastapi_routes(self) -> list[EndpointInfo]: + """Extract routes from FastAPI decorators.""" + endpoints: list[EndpointInfo] = [] + + # Find Python files + py_files = self._find_files("**/*.py") + + # Pattern for FastAPI routes + # @app.get("/path") + # @router.post("/path") + route_pattern = re.compile( + r'@(?:app|router)\.(get|post|put|patch|delete)\s*\(\s*["\']([^"\']+)["\']', + re.IGNORECASE + ) + + # Pattern for APIRouter prefix + router_prefix_pattern = re.compile( + r'APIRouter\s*\([^)]*prefix\s*=\s*["\']([^"\']+)["\']', + re.IGNORECASE + ) + + for file in py_files: + content = self._read_file_safe(file) + if content is None: + continue + + # Skip if not a route file + if "@app." not in content and "@router." not in content: + continue + + # Try to find router prefix + prefix = "" + prefix_match = router_prefix_pattern.search(content) + if prefix_match: + prefix = prefix_match.group(1) + + for match in route_pattern.finditer(content): + method = match.group(1).upper() + path = match.group(2) + + full_path = prefix + path if prefix else path + + endpoints.append({ + "path": full_path, + "method": method, + "handler": "handler", + "file": str(file.relative_to(self.project_dir)), + "description": f"{method} {full_path}", + }) + + return endpoints + + def _extract_django_routes(self) -> list[EndpointInfo]: + """Extract routes from Django URL patterns.""" + endpoints: list[EndpointInfo] = [] + + # Find urls.py files + url_files = self._find_files("**/urls.py") + + # Pattern for Django URL patterns + # path('api/users/', views.user_list) + # path('api/users//', views.user_detail) + path_pattern = re.compile( + r'path\s*\(\s*["\']([^"\']+)["\']', + re.IGNORECASE + ) + + # Pattern for re_path + re_path_pattern = re.compile( + r're_path\s*\(\s*["\']([^"\']+)["\']', + re.IGNORECASE + ) + + for file in url_files: + content = self._read_file_safe(file) + if content is None: + continue + + for match in path_pattern.finditer(content): + path = "/" + match.group(1).rstrip("/") + if path == "/": + path = "/" + + # Django uses for params, convert to :name + path = re.sub(r"<\w+:(\w+)>", r":\1", path) + path = re.sub(r"<(\w+)>", r":\1", path) + + endpoints.append({ + "path": path, + "method": "ALL", # Django views typically handle multiple methods + "handler": "view", + "file": str(file.relative_to(self.project_dir)), + "description": f"Django view at {path}", + }) + + for match in re_path_pattern.finditer(content): + # re_path uses regex, just record the pattern + path = "/" + match.group(1) + + endpoints.append({ + "path": path, + "method": "ALL", + "handler": "view", + "file": str(file.relative_to(self.project_dir)), + "description": "Django regex route", + }) + + return endpoints + + def _extract_flask_routes(self) -> list[EndpointInfo]: + """Extract routes from Flask decorators.""" + endpoints: list[EndpointInfo] = [] + + # Find Python files + py_files = self._find_files("**/*.py") + + # Pattern for Flask routes + # @app.route('/path', methods=['GET', 'POST']) + # @bp.route('/path') + route_pattern = re.compile( + r'@(?:app|bp|blueprint)\s*\.\s*route\s*\(\s*["\']([^"\']+)["\'](?:\s*,\s*methods\s*=\s*\[([^\]]+)\])?', + re.IGNORECASE + ) + + # Pattern for Blueprint prefix + blueprint_pattern = re.compile( + r'Blueprint\s*\(\s*[^,]+\s*,\s*[^,]+\s*(?:,\s*url_prefix\s*=\s*["\']([^"\']+)["\'])?', + re.IGNORECASE + ) + + for file in py_files: + content = self._read_file_safe(file) + if content is None: + continue + + # Skip if not a route file + if "@app." not in content and "@bp." not in content and "@blueprint" not in content.lower(): + continue + + # Try to find blueprint prefix + prefix = "" + prefix_match = blueprint_pattern.search(content) + if prefix_match and prefix_match.group(1): + prefix = prefix_match.group(1) + + for match in route_pattern.finditer(content): + path = match.group(1) + methods_str = match.group(2) + + full_path = prefix + path if prefix else path + + # Parse methods + methods = ["GET"] # Default + if methods_str: + # Parse ['GET', 'POST'] format + methods = re.findall(r"['\"](\w+)['\"]", methods_str) + + for method in methods: + endpoints.append({ + "path": full_path, + "method": method.upper(), + "handler": "view", + "file": str(file.relative_to(self.project_dir)), + "description": f"{method.upper()} {full_path}", + }) + + return endpoints + + def _extract_components(self) -> list[ComponentInfo]: + """Extract models, services, and other components.""" + components: list[ComponentInfo] = [] + + # Find model files + model_files = ( + self._find_files("**/models.py") + + self._find_files("**/models/**/*.py") + + self._find_files("**/*_model.py") + ) + + for file in model_files: + if file.name != "__init__.py": + components.append({ + "name": file.stem, + "file": str(file.relative_to(self.project_dir)), + "type": "model", + }) + + # Find view/controller files + view_files = ( + self._find_files("**/views.py") + + self._find_files("**/views/**/*.py") + + self._find_files("**/routers/**/*.py") + + self._find_files("**/api/**/*.py") + ) + + for file in view_files: + if file.name != "__init__.py": + components.append({ + "name": file.stem, + "file": str(file.relative_to(self.project_dir)), + "type": "view", + }) + + # Find service files + service_files = ( + self._find_files("**/services/**/*.py") + + self._find_files("**/*_service.py") + ) + + for file in service_files: + if file.name != "__init__.py": + components.append({ + "name": file.stem, + "file": str(file.relative_to(self.project_dir)), + "type": "service", + }) + + return components diff --git a/analyzers/react_analyzer.py b/analyzers/react_analyzer.py new file mode 100644 index 00000000..1a345ac1 --- /dev/null +++ b/analyzers/react_analyzer.py @@ -0,0 +1,407 @@ +""" +React Analyzer +============== + +Detects React, Vite, and Next.js projects. +Extracts routes from React Router and Next.js file-based routing. +""" + +import json +import re +from pathlib import Path + +from .base_analyzer import ( + AnalysisResult, + BaseAnalyzer, + ComponentInfo, + EndpointInfo, + RouteInfo, +) + + +class ReactAnalyzer(BaseAnalyzer): + """Analyzer for React, Vite, and Next.js projects.""" + + @property + def stack_name(self) -> str: + return self._detected_stack + + def __init__(self, project_dir: Path): + super().__init__(project_dir) + self._detected_stack = "react" # Default, may change to "nextjs" + + def can_analyze(self) -> tuple[bool, float]: + """Detect if this is a React/Next.js project.""" + confidence = 0.0 + + # Check package.json + package_json = self.project_dir / "package.json" + if package_json.exists(): + try: + data = json.loads(package_json.read_text()) + deps = { + **data.get("dependencies", {}), + **data.get("devDependencies", {}), + } + + # Check for Next.js first (more specific) + if "next" in deps: + self._detected_stack = "nextjs" + confidence = 0.95 + return True, confidence + + # Check for React + if "react" in deps: + confidence = 0.85 + + # Check for Vite + if "vite" in deps: + self._detected_stack = "react-vite" + confidence = 0.9 + + # Check for Create React App + if "react-scripts" in deps: + self._detected_stack = "react-cra" + confidence = 0.9 + + return True, confidence + + except (json.JSONDecodeError, OSError): + pass + + # Check for Next.js config + if (self.project_dir / "next.config.js").exists() or \ + (self.project_dir / "next.config.mjs").exists() or \ + (self.project_dir / "next.config.ts").exists(): + self._detected_stack = "nextjs" + return True, 0.95 + + # Check for common React files + if (self.project_dir / "src" / "App.tsx").exists() or \ + (self.project_dir / "src" / "App.jsx").exists(): + return True, 0.7 + + return False, 0.0 + + def analyze(self) -> AnalysisResult: + """Analyze the React/Next.js project.""" + routes: list[RouteInfo] = [] + components: list[ComponentInfo] = [] + endpoints: list[EndpointInfo] = [] + config_files: list[str] = [] + dependencies: dict[str, str] = {} + entry_point: str | None = None + + # Load dependencies from package.json + package_json = self.project_dir / "package.json" + if package_json.exists(): + try: + data = json.loads(package_json.read_text()) + dependencies = { + **data.get("dependencies", {}), + **data.get("devDependencies", {}), + } + except (json.JSONDecodeError, OSError): + pass + + # Collect config files + for config_name in [ + "next.config.js", "next.config.mjs", "next.config.ts", + "vite.config.js", "vite.config.ts", + "tsconfig.json", "tailwind.config.js", "tailwind.config.ts", + ]: + if (self.project_dir / config_name).exists(): + config_files.append(config_name) + + # Detect entry point (check all common extensions) + # Note: app/layout.ts not included - Next.js only supports .js, .jsx, .tsx for layouts + for entry in [ + "src/main.tsx", "src/main.ts", "src/main.jsx", "src/main.js", + "src/index.tsx", "src/index.ts", "src/index.jsx", "src/index.js", + "pages/_app.tsx", "pages/_app.ts", "pages/_app.jsx", "pages/_app.js", + "app/layout.tsx", "app/layout.jsx", "app/layout.js", + ]: + if (self.project_dir / entry).exists(): + entry_point = entry + break + + # Extract routes based on stack type + if self._detected_stack == "nextjs": + routes = self._extract_nextjs_routes() + endpoints = self._extract_nextjs_api_routes() + else: + routes = self._extract_react_router_routes() + + # Extract components + components = self._extract_components() + + return { + "stack_name": self._detected_stack, + "confidence": 0.9, + "routes": routes, + "components": components, + "endpoints": endpoints, + "entry_point": entry_point, + "config_files": config_files, + "dependencies": dependencies, + "metadata": { + "has_typescript": "typescript" in dependencies, + "has_tailwind": "tailwindcss" in dependencies, + "has_react_router": "react-router-dom" in dependencies, + }, + } + + def _extract_nextjs_routes(self) -> list[RouteInfo]: + """Extract routes from Next.js file-based routing.""" + routes: list[RouteInfo] = [] + + # Check for App Router (Next.js 13+) + # Prefer root directories over src/ to avoid duplicates + app_dir = self.project_dir / "app" + if app_dir.exists() and app_dir.is_dir(): + routes.extend(self._extract_app_router_routes(app_dir)) + else: + # Only check src/app if root app/ doesn't exist + src_app = self.project_dir / "src" / "app" + if src_app.exists() and src_app.is_dir(): + routes.extend(self._extract_app_router_routes(src_app)) + + # Check for Pages Router + pages_dir = self.project_dir / "pages" + if pages_dir.exists() and pages_dir.is_dir(): + routes.extend(self._extract_pages_router_routes(pages_dir)) + else: + # Only check src/pages if root pages/ doesn't exist + src_pages = self.project_dir / "src" / "pages" + if src_pages.exists() and src_pages.is_dir(): + routes.extend(self._extract_pages_router_routes(src_pages)) + + return routes + + def _extract_app_router_routes(self, app_dir: Path) -> list[RouteInfo]: + """Extract routes from Next.js App Router.""" + routes: list[RouteInfo] = [] + seen_paths: set[str] = set() + + # Check all page file extensions: .tsx, .jsx, .ts, .js + for pattern in ("page.tsx", "page.jsx", "page.ts", "page.js"): + for page_file in app_dir.rglob(pattern): + rel_path = page_file.relative_to(app_dir) + route_path = "/" + "/".join(rel_path.parent.parts) + + # Handle dynamic routes: [id] -> :id + route_path = re.sub(r"\[([^\]]+)\]", r":\1", route_path) + + # Clean up + if route_path == "/.": + route_path = "/" + route_path = route_path.replace("//", "/") + + # Skip if we've already seen this route (deduplication) + if route_path in seen_paths: + continue + seen_paths.add(route_path) + + routes.append({ + "path": route_path, + "method": "GET", + "handler": "Page", + "file": str(page_file.relative_to(self.project_dir)), + }) + + return routes + + def _extract_pages_router_routes(self, pages_dir: Path) -> list[RouteInfo]: + """Extract routes from Next.js Pages Router.""" + routes: list[RouteInfo] = [] + + # Check all page file extensions: .tsx, .jsx, .ts, .js + for ext in ("tsx", "jsx", "ts", "js"): + for page_file in pages_dir.rglob(f"*.{ext}"): + if page_file.name.startswith("_"): # Skip _app.tsx, _document.tsx + continue + if "api" in page_file.parts: # Skip API routes + continue + + rel_path = page_file.relative_to(pages_dir) + route_path = "/" + str(rel_path.with_suffix("")) + + # Handle index files + route_path = route_path.replace("/index", "") + if not route_path: + route_path = "/" + + # Handle dynamic routes + route_path = re.sub(r"\[([^\]]+)\]", r":\1", route_path) + + routes.append({ + "path": route_path, + "method": "GET", + "handler": page_file.stem, + "file": str(page_file.relative_to(self.project_dir)), + }) + + return routes + + def _extract_nextjs_api_routes(self) -> list[EndpointInfo]: + """Extract API routes from Next.js.""" + endpoints: list[EndpointInfo] = [] + + # Check pages/api (Pages Router) + api_dirs = [ + self.project_dir / "pages" / "api", + self.project_dir / "src" / "pages" / "api", + ] + + for api_dir in api_dirs: + if api_dir.exists(): + for api_file in api_dir.rglob("*.ts"): + endpoints.extend(self._parse_api_route(api_file, api_dir)) + for api_file in api_dir.rglob("*.js"): + endpoints.extend(self._parse_api_route(api_file, api_dir)) + + # Check app/api (App Router - route.ts files) + app_api_dirs = [ + self.project_dir / "app" / "api", + self.project_dir / "src" / "app" / "api", + ] + + for app_api in app_api_dirs: + if app_api.exists(): + for pattern in ("route.ts", "route.js", "route.tsx", "route.jsx"): + for route_file in app_api.rglob(pattern): + endpoints.extend(self._parse_app_router_api(route_file, app_api)) + + return endpoints + + def _parse_api_route(self, api_file: Path, api_dir: Path) -> list[EndpointInfo]: + """Parse a Pages Router API route file.""" + rel_path = api_file.relative_to(api_dir) + route_path = "/api/" + str(rel_path.with_suffix("")) + route_path = route_path.replace("/index", "") + route_path = re.sub(r"\[([^\]]+)\]", r":\1", route_path) + + return [{ + "path": route_path, + "method": "ALL", # Default export handles all methods + "handler": "handler", + "file": str(api_file.relative_to(self.project_dir)), + "description": f"API endpoint at {route_path}", + }] + + def _parse_app_router_api(self, route_file: Path, api_dir: Path) -> list[EndpointInfo]: + """Parse an App Router API route file.""" + rel_path = route_file.relative_to(api_dir) + route_path = "/api/" + "/".join(rel_path.parent.parts) + route_path = re.sub(r"\[([^\]]+)\]", r":\1", route_path) + if route_path.endswith("/"): + route_path = route_path[:-1] + + # Try to detect which methods are exported + content = self._read_file_safe(route_file) + methods = [] + if content: + for method in ["GET", "POST", "PUT", "PATCH", "DELETE", "HEAD", "OPTIONS"]: + if (f"export async function {method}" in content or + f"export function {method}" in content or + f"export const {method}" in content): + methods.append(method) + + if not methods: + methods = ["ALL"] + + return [ + { + "path": route_path, + "method": method, + "handler": method, + "file": str(route_file.relative_to(self.project_dir)), + "description": f"{method} {route_path}", + } + for method in methods + ] + + def _extract_react_router_routes(self) -> list[RouteInfo]: + """Extract routes from React Router configuration.""" + routes: list[RouteInfo] = [] + + # Look for route definitions in common files (include all JS/TS extensions) + route_files = ( + self._find_files("**/*.tsx") + self._find_files("**/*.jsx") + + self._find_files("**/*.ts") + self._find_files("**/*.js") + ) + + # Pattern for React Router elements + # Note: No IGNORECASE - JSX/TSX is case-sensitive, Route must be capitalized + route_pattern = re.compile( + r']*path=["\']([^"\']+)["\'][^>]*>' + ) + + # Pattern for createBrowserRouter routes + browser_router_pattern = re.compile( + r'{\s*path:\s*["\']([^"\']+)["\']' + ) + + for file in route_files: + content = self._read_file_safe(file) + if content is None: + continue + + # Skip if not likely a routing file + if "Route" not in content and "createBrowserRouter" not in content: + continue + + # Extract routes from JSX + for match in route_pattern.finditer(content): + routes.append({ + "path": match.group(1), + "method": "GET", + "handler": "Route", + "file": str(file.relative_to(self.project_dir)), + }) + + # Extract routes from createBrowserRouter + for match in browser_router_pattern.finditer(content): + routes.append({ + "path": match.group(1), + "method": "GET", + "handler": "RouterRoute", + "file": str(file.relative_to(self.project_dir)), + }) + + return routes + + def _extract_components(self) -> list[ComponentInfo]: + """Extract React components.""" + components: list[ComponentInfo] = [] + + # Find component files (include .js for JavaScript projects) + component_files = ( + self._find_files("**/components/**/*.tsx") + + self._find_files("**/components/**/*.jsx") + + self._find_files("**/components/**/*.js") + ) + + for file in component_files: + components.append({ + "name": file.stem, + "file": str(file.relative_to(self.project_dir)), + "type": "component", + }) + + # Find page files (include .js for JavaScript projects) + page_files = ( + self._find_files("**/pages/**/*.tsx") + + self._find_files("**/pages/**/*.jsx") + + self._find_files("**/pages/**/*.js") + ) + + for file in page_files: + if not file.name.startswith("_"): + components.append({ + "name": file.stem, + "file": str(file.relative_to(self.project_dir)), + "type": "page", + }) + + return components diff --git a/analyzers/stack_detector.py b/analyzers/stack_detector.py new file mode 100644 index 00000000..eb50172e --- /dev/null +++ b/analyzers/stack_detector.py @@ -0,0 +1,228 @@ +""" +Stack Detector +============== + +Orchestrates detection of tech stacks in a codebase. +Uses multiple analyzers to detect frontend, backend, and database technologies. +""" + +import json +import logging +from pathlib import Path +from typing import TypedDict + +from .base_analyzer import AnalysisResult + +logger = logging.getLogger(__name__) + + +class StackInfo(TypedDict): + """Information about a detected stack.""" + name: str + category: str # frontend, backend, database, other + confidence: float + analysis: AnalysisResult | None + + +class StackDetectionResult(TypedDict): + """Complete result of stack detection.""" + project_dir: str + detected_stacks: list[StackInfo] + primary_frontend: str | None + primary_backend: str | None + database: str | None + routes_count: int + components_count: int + endpoints_count: int + all_routes: list[dict] + all_endpoints: list[dict] + all_components: list[dict] + summary: str + + +class StackDetector: + """ + Detects tech stacks in a codebase by running multiple analyzers. + + Usage: + detector = StackDetector(project_dir) + result = detector.detect() + """ + + def __init__(self, project_dir: Path): + """ + Initialize the stack detector. + + Args: + project_dir: Path to the project directory to analyze + """ + self.project_dir = Path(project_dir).resolve() + self._analyzers: list = [] + self._load_analyzers() + + def _load_analyzers(self) -> None: + """Load all available analyzers.""" + # Import analyzers here to avoid circular imports + from .node_analyzer import NodeAnalyzer + from .python_analyzer import PythonAnalyzer + from .react_analyzer import ReactAnalyzer + from .vue_analyzer import VueAnalyzer + + # Order matters: more specific analyzers first (Next.js before React) + self._analyzers = [ + ReactAnalyzer(self.project_dir), + VueAnalyzer(self.project_dir), + NodeAnalyzer(self.project_dir), + PythonAnalyzer(self.project_dir), + ] + + def detect(self) -> StackDetectionResult: + """ + Run all analyzers and compile results. + + Returns: + StackDetectionResult with all detected stacks and extracted information + """ + detected_stacks: list[StackInfo] = [] + all_routes: list[dict] = [] + all_endpoints: list[dict] = [] + all_components: list[dict] = [] + + for analyzer in self._analyzers: + can_analyze, confidence = analyzer.can_analyze() + + if can_analyze and confidence > 0.3: # Minimum confidence threshold + try: + analysis = analyzer.analyze() + + # Determine category (include all variants) + stack_name = analyzer.stack_name.lower() + if stack_name in ( + "react", "react-vite", "react-cra", + "nextjs", + "vue", "vue-vite", "vue-cli", + "nuxt", "angular" + ): + category = "frontend" + elif stack_name in ( + "express", "fastify", "koa", "nodejs", + "fastapi", "django", "flask", "nestjs", + "python", + ): + category = "backend" + elif stack_name in ("postgres", "mysql", "mongodb", "sqlite"): + category = "database" + else: + category = "other" + + detected_stacks.append({ + "name": analyzer.stack_name, + "category": category, + "confidence": confidence, + "analysis": analysis, + }) + + # Collect all routes, endpoints, components + all_routes.extend(analysis.get("routes", [])) + all_endpoints.extend(analysis.get("endpoints", [])) + all_components.extend(analysis.get("components", [])) + + except Exception as e: + # Log but don't fail - continue with other analyzers + logger.warning(f"{analyzer.stack_name} analyzer failed: {e}") + + # Sort by confidence + detected_stacks.sort(key=lambda x: x["confidence"], reverse=True) + + # Determine primary frontend and backend + primary_frontend = None + primary_backend = None + database = None + + for stack in detected_stacks: + if stack["category"] == "frontend" and primary_frontend is None: + primary_frontend = stack["name"] + elif stack["category"] == "backend" and primary_backend is None: + primary_backend = stack["name"] + elif stack["category"] == "database" and database is None: + database = stack["name"] + + # Build summary + stack_names = [s["name"] for s in detected_stacks] + if stack_names: + summary = f"Detected: {', '.join(stack_names)}" + else: + summary = "No recognized tech stack detected" + + if all_routes: + summary += f" | {len(all_routes)} routes" + if all_endpoints: + summary += f" | {len(all_endpoints)} endpoints" + if all_components: + summary += f" | {len(all_components)} components" + + return { + "project_dir": str(self.project_dir), + "detected_stacks": detected_stacks, + "primary_frontend": primary_frontend, + "primary_backend": primary_backend, + "database": database, + "routes_count": len(all_routes), + "components_count": len(all_components), + "endpoints_count": len(all_endpoints), + "all_routes": all_routes, + "all_endpoints": all_endpoints, + "all_components": all_components, + "summary": summary, + } + + def detect_quick(self) -> dict: + """ + Quick detection without full analysis. + + Returns a simplified result with just stack names and confidence. + Useful for UI display before full analysis. + """ + results = [] + + for analyzer in self._analyzers: + can_analyze, confidence = analyzer.can_analyze() + if can_analyze and confidence > 0.3: + results.append({ + "name": analyzer.stack_name, + "confidence": confidence, + }) + + results.sort(key=lambda x: x["confidence"], reverse=True) + + return { + "project_dir": str(self.project_dir), + "stacks": results, + "primary": results[0]["name"] if results else None, + } + + def to_json(self, result: StackDetectionResult) -> str: + """Convert detection result to JSON string.""" + # Remove analysis objects for cleaner output + clean_result = { + **result, + "detected_stacks": [ + {k: v for k, v in stack.items() if k != "analysis"} + for stack in result["detected_stacks"] + ], + } + return json.dumps(clean_result, indent=2) + + +def detect_stack(project_dir: str | Path) -> StackDetectionResult: + """ + Convenience function to detect stack in a project. + + Args: + project_dir: Path to the project directory + + Returns: + StackDetectionResult + """ + detector = StackDetector(Path(project_dir)) + return detector.detect() diff --git a/analyzers/vue_analyzer.py b/analyzers/vue_analyzer.py new file mode 100644 index 00000000..75b3ae41 --- /dev/null +++ b/analyzers/vue_analyzer.py @@ -0,0 +1,319 @@ +""" +Vue.js Analyzer +=============== + +Detects Vue.js and Nuxt.js projects. +Extracts routes from Vue Router and Nuxt file-based routing. +""" + +import json +import re +from pathlib import Path + +from .base_analyzer import ( + AnalysisResult, + BaseAnalyzer, + ComponentInfo, + EndpointInfo, + RouteInfo, +) + + +class VueAnalyzer(BaseAnalyzer): + """Analyzer for Vue.js and Nuxt.js projects.""" + + @property + def stack_name(self) -> str: + return self._detected_stack + + def __init__(self, project_dir: Path): + super().__init__(project_dir) + self._detected_stack = "vue" # Default, may change to "nuxt" + + def can_analyze(self) -> tuple[bool, float]: + """Detect if this is a Vue.js/Nuxt.js project.""" + confidence = 0.0 + + # Check package.json + package_json = self.project_dir / "package.json" + if package_json.exists(): + try: + data = json.loads(package_json.read_text()) + deps = { + **data.get("dependencies", {}), + **data.get("devDependencies", {}), + } + + # Check for Nuxt first (more specific) + if "nuxt" in deps or "nuxt3" in deps: + self._detected_stack = "nuxt" + confidence = 0.95 + return True, confidence + + # Check for Vue + if "vue" in deps: + confidence = 0.85 + + # Check for Vite + if "vite" in deps: + self._detected_stack = "vue-vite" + confidence = 0.9 + + # Check for Vue CLI + if "@vue/cli-service" in deps: + self._detected_stack = "vue-cli" + confidence = 0.9 + + return True, confidence + + except (json.JSONDecodeError, OSError): + pass + + # Check for Nuxt config + if (self.project_dir / "nuxt.config.js").exists() or \ + (self.project_dir / "nuxt.config.ts").exists(): + self._detected_stack = "nuxt" + return True, 0.95 + + # Check for common Vue files + if (self.project_dir / "src" / "App.vue").exists(): + return True, 0.7 + + return False, 0.0 + + def analyze(self) -> AnalysisResult: + """Analyze the Vue.js/Nuxt.js project.""" + routes: list[RouteInfo] = [] + components: list[ComponentInfo] = [] + endpoints: list[EndpointInfo] = [] + config_files: list[str] = [] + dependencies: dict[str, str] = {} + entry_point: str | None = None + + # Load dependencies from package.json + package_json = self.project_dir / "package.json" + if package_json.exists(): + try: + data = json.loads(package_json.read_text()) + dependencies = { + **data.get("dependencies", {}), + **data.get("devDependencies", {}), + } + except (json.JSONDecodeError, OSError): + pass + + # Collect config files + for config_name in [ + "nuxt.config.js", "nuxt.config.ts", + "vite.config.js", "vite.config.ts", + "vue.config.js", "tsconfig.json", + "tailwind.config.js", "tailwind.config.ts", + ]: + if (self.project_dir / config_name).exists(): + config_files.append(config_name) + + # Detect entry point + for entry in ["src/main.ts", "src/main.js", "app.vue", "src/App.vue"]: + if (self.project_dir / entry).exists(): + entry_point = entry + break + + # Extract routes based on stack type + if self._detected_stack == "nuxt": + routes = self._extract_nuxt_routes() + endpoints = self._extract_nuxt_api_routes() + else: + routes = self._extract_vue_router_routes() + + # Extract components + components = self._extract_components() + + return { + "stack_name": self._detected_stack, + "confidence": 0.85, + "routes": routes, + "components": components, + "endpoints": endpoints, + "entry_point": entry_point, + "config_files": config_files, + "dependencies": dependencies, + "metadata": { + "has_typescript": "typescript" in dependencies, + "has_tailwind": "tailwindcss" in dependencies, + "has_vue_router": "vue-router" in dependencies, + "has_pinia": "pinia" in dependencies, + "has_vuex": "vuex" in dependencies, + }, + } + + def _extract_nuxt_routes(self) -> list[RouteInfo]: + """Extract routes from Nuxt file-based routing.""" + routes: list[RouteInfo] = [] + + # Check for pages directory + pages_dirs = [ + self.project_dir / "pages", + self.project_dir / "src" / "pages", + ] + + for pages_dir in pages_dirs: + if pages_dir.exists(): + routes.extend(self._extract_pages_routes(pages_dir)) + + return routes + + def _extract_pages_routes(self, pages_dir: Path) -> list[RouteInfo]: + """Extract routes from Nuxt pages directory.""" + routes: list[RouteInfo] = [] + + for page_file in pages_dir.rglob("*.vue"): + rel_path = page_file.relative_to(pages_dir) + route_path = "/" + str(rel_path.with_suffix("")) + + # Handle index files + route_path = route_path.replace("/index", "") + if not route_path: + route_path = "/" + + # Handle dynamic routes: [id].vue or _id.vue -> :id + route_path = re.sub(r"\[([^\]]+)\]", r":\1", route_path) + route_path = re.sub(r"/_([^/]+)", r"/:\1", route_path) + + routes.append({ + "path": route_path, + "method": "GET", + "handler": page_file.stem, + "file": str(page_file.relative_to(self.project_dir)), + }) + + return routes + + def _extract_nuxt_api_routes(self) -> list[EndpointInfo]: + """Extract API routes from Nuxt server directory.""" + endpoints: list[EndpointInfo] = [] + + # Nuxt 3 uses server/api directory + api_dirs = [ + self.project_dir / "server" / "api", + self.project_dir / "server" / "routes", + ] + + for api_dir in api_dirs: + if not api_dir.exists(): + continue + + for api_file in api_dir.rglob("*.ts"): + rel_path = api_file.relative_to(api_dir) + route_path = "/api/" + str(rel_path.with_suffix("")) + + # Handle index files + route_path = route_path.replace("/index", "") + + # Handle dynamic routes + route_path = re.sub(r"\[([^\]]+)\]", r":\1", route_path) + + # Try to detect method from filename + method = "ALL" + for m in ["get", "post", "put", "patch", "delete"]: + if api_file.stem.endswith(f".{m}") or api_file.stem == m: + method = m.upper() + route_path = route_path.replace(f".{m}", "") + break + + endpoints.append({ + "path": route_path, + "method": method, + "handler": "handler", + "file": str(api_file.relative_to(self.project_dir)), + "description": f"{method} {route_path}", + }) + + # Also check .js files + for api_file in api_dir.rglob("*.js"): + rel_path = api_file.relative_to(api_dir) + route_path = "/api/" + str(rel_path.with_suffix("")) + route_path = route_path.replace("/index", "") + route_path = re.sub(r"\[([^\]]+)\]", r":\1", route_path) + + endpoints.append({ + "path": route_path, + "method": "ALL", + "handler": "handler", + "file": str(api_file.relative_to(self.project_dir)), + "description": f"API endpoint at {route_path}", + }) + + return endpoints + + def _extract_vue_router_routes(self) -> list[RouteInfo]: + """Extract routes from Vue Router configuration.""" + routes: list[RouteInfo] = [] + + # Look for router configuration files + router_files = ( + self._find_files("**/router/**/*.js") + + self._find_files("**/router/**/*.ts") + + self._find_files("**/router.js") + + self._find_files("**/router.ts") + + self._find_files("**/routes.js") + + self._find_files("**/routes.ts") + ) + + # Pattern for Vue Router routes + # { path: '/about', ... } + route_pattern = re.compile( + r'{\s*path:\s*["\']([^"\']+)["\']', + re.IGNORECASE + ) + + for file in router_files: + content = self._read_file_safe(file) + if content is None: + continue + + for match in route_pattern.finditer(content): + routes.append({ + "path": match.group(1), + "method": "GET", + "handler": "RouterRoute", + "file": str(file.relative_to(self.project_dir)), + }) + + return routes + + def _extract_components(self) -> list[ComponentInfo]: + """Extract Vue components.""" + components: list[ComponentInfo] = [] + + # Find component files + component_files = ( + self._find_files("**/components/**/*.vue") + + self._find_files("**/views/**/*.vue") + ) + + for file in component_files: + # Determine component type + if "views" in file.parts: + comp_type = "view" + elif "layouts" in file.parts: + comp_type = "layout" + else: + comp_type = "component" + + components.append({ + "name": file.stem, + "file": str(file.relative_to(self.project_dir)), + "type": comp_type, + }) + + # Find page files (Nuxt) + page_files = self._find_files("**/pages/**/*.vue") + + for file in page_files: + components.append({ + "name": file.stem, + "file": str(file.relative_to(self.project_dir)), + "type": "page", + }) + + return components diff --git a/server/main.py b/server/main.py index 33fd3484..1c274607 100644 --- a/server/main.py +++ b/server/main.py @@ -35,6 +35,7 @@ expand_project_router, features_router, filesystem_router, + import_project_router, projects_router, schedules_router, settings_router, @@ -158,6 +159,7 @@ async def require_localhost(request: Request, call_next): app.include_router(assistant_chat_router) app.include_router(settings_router) app.include_router(terminal_router) +app.include_router(import_project_router) # ============================================================================ diff --git a/server/routers/__init__.py b/server/routers/__init__.py index f4d02f51..db7654a0 100644 --- a/server/routers/__init__.py +++ b/server/routers/__init__.py @@ -11,6 +11,7 @@ from .expand_project import router as expand_project_router from .features import router as features_router from .filesystem import router as filesystem_router +from .import_project import router as import_project_router from .projects import router as projects_router from .schedules import router as schedules_router from .settings import router as settings_router @@ -29,4 +30,5 @@ "assistant_chat_router", "settings_router", "terminal_router", + "import_project_router", ] diff --git a/server/routers/import_project.py b/server/routers/import_project.py new file mode 100644 index 00000000..22896219 --- /dev/null +++ b/server/routers/import_project.py @@ -0,0 +1,335 @@ +""" +Import Project Router +===================== + +REST and WebSocket endpoints for importing existing projects into Autocoder. + +The import flow: +1. POST /api/import/analyze - Analyze codebase, detect stack +2. POST /api/import/extract-features - Generate features from analysis +3. POST /api/import/create-features - Create features in database +""" + +import logging +import re +import sys +from pathlib import Path +from typing import Optional + +from fastapi import APIRouter, HTTPException +from pydantic import BaseModel, Field + +logger = logging.getLogger(__name__) + +router = APIRouter(prefix="/api/import", tags=["import-project"]) + +# Root directory +ROOT_DIR = Path(__file__).parent.parent.parent + +# Add root to path for imports +if str(ROOT_DIR) not in sys.path: + sys.path.insert(0, str(ROOT_DIR)) + + +def _get_project_path(project_name: str) -> Path | None: + """Get project path from registry.""" + from registry import get_project_path + return get_project_path(project_name) + + +def validate_path(path: str) -> bool: + """Validate path to prevent traversal attacks.""" + # Check for null bytes + if "\x00" in path: + return False + try: + p = Path(path) + except Exception: + return False + # Require absolute paths + if not p.is_absolute(): + return False + # Check for ".." as a path component (not just substring) + if ".." in p.parts: + return False + return True + + +# ============================================================================ +# Request/Response Models +# ============================================================================ + +class AnalyzeRequest(BaseModel): + """Request to analyze a project directory.""" + path: str = Field(..., description="Absolute path to the project directory") + + +class StackInfo(BaseModel): + """Information about a detected stack.""" + name: str + category: str + confidence: float + + +class AnalyzeResponse(BaseModel): + """Response from project analysis.""" + project_dir: str + detected_stacks: list[StackInfo] + primary_frontend: Optional[str] = None + primary_backend: Optional[str] = None + database: Optional[str] = None + routes_count: int + components_count: int + endpoints_count: int + summary: str + + +class ExtractFeaturesRequest(BaseModel): + """Request to extract features from an analyzed project.""" + path: str = Field(..., description="Absolute path to the project directory") + + +class DetectedFeature(BaseModel): + """A feature extracted from codebase analysis.""" + category: str + name: str + description: str + steps: list[str] + source_type: str + source_file: Optional[str] = None + confidence: float + + +class ExtractFeaturesResponse(BaseModel): + """Response from feature extraction.""" + features: list[DetectedFeature] + count: int + by_category: dict[str, int] + summary: str + + +class CreateFeaturesRequest(BaseModel): + """Request to create features in the database.""" + project_name: str = Field(..., description="Name of the registered project") + features: list[dict] = Field(..., description="Features to create (category, name, description, steps)") + + +class CreateFeaturesResponse(BaseModel): + """Response from feature creation.""" + created: int + project_name: str + message: str + + +# ============================================================================ +# REST Endpoints +# ============================================================================ + +@router.post("/analyze", response_model=AnalyzeResponse) +async def analyze_project(request: AnalyzeRequest): + """ + Analyze a project directory to detect tech stack. + + Returns detected stacks with confidence scores, plus counts of + routes, endpoints, and components found. + """ + if not validate_path(request.path): + raise HTTPException(status_code=400, detail="Invalid path") + + project_dir = Path(request.path).resolve() + + if not project_dir.exists(): + raise HTTPException(status_code=404, detail="Directory not found") + + if not project_dir.is_dir(): + raise HTTPException(status_code=400, detail="Path is not a directory") + + try: + from analyzers import StackDetector + + detector = StackDetector(project_dir) + result = detector.detect() + + # Convert to response model + stacks = [ + StackInfo( + name=s["name"], + category=s["category"], + confidence=s["confidence"], + ) + for s in result["detected_stacks"] + ] + + return AnalyzeResponse( + project_dir=str(project_dir), + detected_stacks=stacks, + primary_frontend=result.get("primary_frontend"), + primary_backend=result.get("primary_backend"), + database=result.get("database"), + routes_count=result.get("routes_count", 0), + components_count=result.get("components_count", 0), + endpoints_count=result.get("endpoints_count", 0), + summary=result.get("summary", ""), + ) + + except Exception as e: + logger.exception(f"Error analyzing project: {e}") + raise HTTPException(status_code=500, detail=f"Analysis failed: {str(e)}") + + +@router.post("/extract-features", response_model=ExtractFeaturesResponse) +async def extract_features(request: ExtractFeaturesRequest): + """ + Extract features from an analyzed project. + + Returns a list of features ready for import, each with: + - category, name, description, steps + - source_type (route, endpoint, component, inferred) + - confidence score + """ + if not validate_path(request.path): + raise HTTPException(status_code=400, detail="Invalid path") + + project_dir = Path(request.path).resolve() + + if not project_dir.exists(): + raise HTTPException(status_code=404, detail="Directory not found") + + if not project_dir.is_dir(): + raise HTTPException(status_code=400, detail="Path is not a directory") + + try: + from analyzers import extract_from_project + + result = extract_from_project(project_dir) + + # Convert to response model + features = [ + DetectedFeature( + category=f["category"], + name=f["name"], + description=f["description"], + steps=f["steps"], + source_type=f["source_type"], + source_file=f.get("source_file"), + confidence=f["confidence"], + ) + for f in result["features"] + ] + + return ExtractFeaturesResponse( + features=features, + count=result["count"], + by_category=result["by_category"], + summary=result["summary"], + ) + + except Exception as e: + logger.exception(f"Error extracting features: {e}") + raise HTTPException(status_code=500, detail=f"Feature extraction failed: {str(e)}") + + +@router.post("/create-features", response_model=CreateFeaturesResponse) +async def create_features(request: CreateFeaturesRequest): + """ + Create features in the database for a registered project. + + Takes extracted features and creates them via the feature database. + All features are created with passes=False (pending verification). + """ + # Validate project name + if not re.match(r'^[a-zA-Z0-9_-]{1,50}$', request.project_name): + raise HTTPException(status_code=400, detail="Invalid project name") + + project_dir = _get_project_path(request.project_name) + if not project_dir: + raise HTTPException(status_code=404, detail="Project not found in registry") + + if not project_dir.exists(): + raise HTTPException(status_code=404, detail="Project directory not found") + + if not request.features: + raise HTTPException(status_code=400, detail="No features provided") + + try: + from api.database import Feature, create_database + + # Initialize database + engine, SessionLocal = create_database(project_dir) + session = SessionLocal() + + try: + # Get starting priority + from sqlalchemy import func + max_priority = session.query(func.max(Feature.priority)).scalar() or 0 + + # Create features + created_count = 0 + for i, f in enumerate(request.features): + # Validate required fields + if not all(key in f for key in ["category", "name", "description", "steps"]): + logger.warning(f"Skipping feature missing required fields: {f}") + continue + + feature = Feature( + priority=max_priority + i + 1, + category=f["category"], + name=f["name"], + description=f["description"], + steps=f["steps"], + passes=False, + in_progress=False, + ) + session.add(feature) + created_count += 1 + + session.commit() + + return CreateFeaturesResponse( + created=created_count, + project_name=request.project_name, + message=f"Created {created_count} features for project '{request.project_name}'", + ) + + finally: + session.close() + + except Exception as e: + logger.exception(f"Error creating features: {e}") + raise HTTPException(status_code=500, detail=f"Feature creation failed: {str(e)}") + + +@router.get("/quick-detect") +async def quick_detect(path: str): + """ + Quick detection endpoint for UI preview. + + Returns only stack names and confidence without full analysis. + Useful for showing detected stack while user configures import. + """ + if not validate_path(path): + raise HTTPException(status_code=400, detail="Invalid path") + + project_dir = Path(path).resolve() + + if not project_dir.exists(): + raise HTTPException(status_code=404, detail="Directory not found") + + if not project_dir.is_dir(): + raise HTTPException(status_code=400, detail="Path is not a directory") + + try: + from analyzers import StackDetector + + detector = StackDetector(project_dir) + result = detector.detect_quick() + + return { + "project_dir": str(project_dir), + "stacks": result.get("stacks", []), + "primary": result.get("primary"), + } + + except Exception as e: + logger.exception(f"Error in quick detect: {e}") + raise HTTPException(status_code=500, detail=f"Detection failed: {str(e)}") diff --git a/ui/src/components/ImportProjectModal.tsx b/ui/src/components/ImportProjectModal.tsx new file mode 100644 index 00000000..e61d591d --- /dev/null +++ b/ui/src/components/ImportProjectModal.tsx @@ -0,0 +1,711 @@ +/** + * Import Project Modal Component + * + * Multi-step wizard for importing existing projects: + * 1. Select project folder + * 2. Analyze and detect tech stack + * 3. Extract features from codebase + * 4. Review and select features to import + * 5. Create features in database + */ + +import { useState, useEffect, useRef, useCallback } from 'react' +import { + X, + Folder, + Search, + Layers, + CheckCircle2, + AlertCircle, + Loader2, + ArrowRight, + ArrowLeft, + Code, + Database, + Server, + Layout, + CheckSquare, + Square, + ChevronDown, + ChevronRight, +} from 'lucide-react' +import { useImportProject } from '../hooks/useImportProject' +import { useCreateProject } from '../hooks/useProjects' +import { FolderBrowser } from './FolderBrowser' + +type Step = 'folder' | 'analyzing' | 'detected' | 'features' | 'register' | 'complete' | 'error' + +interface ImportProjectModalProps { + isOpen: boolean + onClose: () => void + onProjectImported: (projectName: string) => void +} + +export function ImportProjectModal({ + isOpen, + onClose, + onProjectImported, +}: ImportProjectModalProps) { + const [step, setStep] = useState('folder') + const [projectName, setProjectName] = useState('') + const [expandedCategories, setExpandedCategories] = useState>(new Set()) + const [registerError, setRegisterError] = useState(null) + const redirectTimeoutRef = useRef | null>(null) + + // Track if modal is active to prevent stale state updates after close/unmount + const isActiveRef = useRef(true) + + const { + state, + analyze, + extractFeatures, + createFeatures, + toggleFeature, + selectAllFeatures, + deselectAllFeatures, + reset, + } = useImportProject() + + const createProject = useCreateProject() + + // Set active on mount/open, inactive on close/unmount + useEffect(() => { + isActiveRef.current = isOpen + return () => { + isActiveRef.current = false + } + }, [isOpen]) + + // Define handleClose early with useCallback so it can be used in useEffect + const handleClose = useCallback(() => { + isActiveRef.current = false + if (redirectTimeoutRef.current) { + clearTimeout(redirectTimeoutRef.current) + redirectTimeoutRef.current = null + } + setStep('folder') + setProjectName('') + setExpandedCategories(new Set()) + setRegisterError(null) + reset() + onClose() + }, [onClose, reset]) + + // Expand all categories when features are extracted + useEffect(() => { + if (step === 'features' && state.featuresResult) { + setExpandedCategories(new Set(Object.keys(state.featuresResult.by_category))) + } + }, [step, state.featuresResult]) + + // Cleanup redirect timeout on unmount + useEffect(() => { + return () => { + if (redirectTimeoutRef.current) { + clearTimeout(redirectTimeoutRef.current) + } + } + }, []) + + // Handle Escape key to close modal + useEffect(() => { + const handleKeyDown = (e: KeyboardEvent) => { + if (e.key === 'Escape') { + handleClose() + } + } + if (isOpen) { + document.addEventListener('keydown', handleKeyDown) + return () => document.removeEventListener('keydown', handleKeyDown) + } + }, [isOpen, handleClose]) + + if (!isOpen) return null + + const handleFolderSelect = async (path: string) => { + setStep('analyzing') + const success = await analyze(path) + // Check if still active after async operation + if (!isActiveRef.current) return + if (success) { + setStep('detected') + } else { + setStep('error') + } + } + + const handleExtractFeatures = async () => { + const success = await extractFeatures() + // Check if still active after async operation + if (!isActiveRef.current) return + if (success) { + setStep('features') + // Expand all categories by default - need to get fresh state via callback + // The featuresResult will be available after the state update from extractFeatures + } else { + setStep('error') + } + } + + const handleContinueToRegister = () => { + // Generate default project name from path + const pathParts = state.projectPath?.split(/[/\\]/) || [] + const defaultName = pathParts[pathParts.length - 1] || 'imported-project' + setProjectName(defaultName.replace(/[^a-zA-Z0-9_-]/g, '-')) + setStep('register') + } + + const handleRegisterAndCreate = async () => { + const trimmedName = projectName.trim() + if (!trimmedName || !state.projectPath) return + + // Validate project name format + if (!/^[a-zA-Z0-9_-]+$/.test(trimmedName)) { + setRegisterError('Project name can only contain letters, numbers, hyphens, and underscores') + return + } + + setRegisterError(null) + + try { + // First register the project + await createProject.mutateAsync({ + name: trimmedName, + path: state.projectPath, + specMethod: 'manual', + }) + + // Check if still active after async operation + if (!isActiveRef.current) return + + // Then create features + const success = await createFeatures(trimmedName) + + // Check if still active after async operation + if (!isActiveRef.current) return + + if (success) { + setStep('complete') + redirectTimeoutRef.current = setTimeout(() => { + if (!isActiveRef.current) return + onProjectImported(trimmedName) + handleClose() + }, 1500) + } + } catch (err) { + if (!isActiveRef.current) return + setRegisterError(err instanceof Error ? err.message : 'Failed to register project') + } + } + + const handleBack = () => { + if (step === 'detected' || step === 'analyzing' || step === 'error') { + setStep('folder') + reset() + } else if (step === 'features') { + setStep('detected') + } else if (step === 'register') { + setStep('features') + } + } + + const toggleCategory = (category: string) => { + setExpandedCategories(prev => { + const next = new Set(prev) + if (next.has(category)) { + next.delete(category) + } else { + next.add(category) + } + return next + }) + } + + const getStackIcon = (category: string) => { + switch (category.toLowerCase()) { + case 'frontend': + return + case 'backend': + return + case 'database': + return + default: + return + } + } + + // Folder selection step + if (step === 'folder') { + return ( +
+
e.stopPropagation()} + > +
+
+ +
+

+ Import Existing Project +

+

+ Select the folder containing your existing project +

+
+
+ +
+ +
+ +
+
+
+ ) + } + + // Analyzing step + if (step === 'analyzing' || state.step === 'analyzing') { + return ( +
+
e.stopPropagation()} + > +
+

+ Analyzing Project +

+ +
+ +
+
+ +
+

Detecting Tech Stack

+

+ Scanning your project for frameworks, routes, and components... +

+ +
+
+
+ ) + } + + // Error state (check both local and hook state for consistency) + if (step === 'error' || state.step === 'error') { + return ( +
+
e.stopPropagation()} + > +
+

+ Error +

+ +
+ +
+
+ +
+

Analysis Failed

+

+ {state.error || 'An unexpected error occurred. Please try again.'} +

+ +
+
+
+ ) + } + + // Detection results step + if (step === 'detected' && state.analyzeResult) { + const result = state.analyzeResult + return ( +
+
e.stopPropagation()} + > +
+
+ +

+ Stack Detected +

+
+ +
+ +
+ {/* Summary */} +
+

{result.summary}

+
+ + {/* Detected Stacks */} +

Detected Technologies

+
+ {result.detected_stacks.map((stack, i) => ( +
+ {getStackIcon(stack.category)} +
+
{stack.name}
+
+ {stack.category} +
+
+
+ {Math.round(stack.confidence * 100)}% +
+
+ ))} +
+ + {/* Stats */} +

Codebase Analysis

+
+
+
+ {result.routes_count} +
+
Routes
+
+
+
+ {result.endpoints_count} +
+
Endpoints
+
+
+
+ {result.components_count} +
+
Components
+
+
+
+ +
+ + +
+
+
+ ) + } + + // Features review step + if (step === 'features' && state.featuresResult) { + const result = state.featuresResult + const categories = Object.keys(result.by_category) + + // Group features by category + const featuresByCategory: Record = {} + result.features.forEach(f => { + if (!featuresByCategory[f.category]) { + featuresByCategory[f.category] = [] + } + featuresByCategory[f.category].push(f) + }) + + return ( +
+
e.stopPropagation()} + > +
+
+ +
+

+ Review Features +

+

+ {state.selectedFeatures.length} of {result.count} features selected +

+
+
+ +
+ + {/* Selection controls */} +
+ + +
+ +
+ {categories.map(category => ( +
+ + + {expandedCategories.has(category) && ( +
+ {featuresByCategory[category]?.map((feature) => { + const isSelected = state.selectedFeatures.some( + f => f.name === feature.name && f.category === feature.category + ) + return ( +
toggleFeature(feature)} + onKeyDown={(e) => { + if (e.key === 'Enter' || e.key === ' ') { + e.preventDefault() + toggleFeature(feature) + } + }} + role="checkbox" + aria-checked={isSelected} + tabIndex={0} + className={` + flex items-start gap-3 p-3 cursor-pointer transition-all + border-2 border-[var(--color-neo-border)] + ${isSelected + ? 'bg-[var(--color-neo-done-light)] border-[var(--color-neo-done)]' + : 'bg-white hover:bg-[var(--color-neo-bg-secondary)]' + } + `} + > + {isSelected ? ( + + ) : ( + + )} +
+
{feature.name}
+
+ {feature.description} +
+
+ + {feature.source_type} + + {feature.source_file && ( + + {feature.source_file} + + )} +
+
+
+ ) + })} +
+ )} +
+ ))} +
+ +
+ + +
+
+
+ ) + } + + // Register project step + if (step === 'register') { + return ( +
+
e.stopPropagation()} + > +
+

+ Register Project +

+ +
+ +
+
+ + setProjectName(e.target.value)} + placeholder="my-project" + className="neo-input" + pattern="^[a-zA-Z0-9_-]+$" + autoFocus + /> +

+ Use letters, numbers, hyphens, and underscores only. +

+
+ +
+
+
+ Features to create: + {state.selectedFeatures.length} +
+
+ Project path: + + {state.projectPath} + +
+
+
+ + {(registerError || state.error) && ( +
+ {registerError || state.error} +
+ )} + +
+ + +
+
+
+
+ ) + } + + // Complete step + if (step === 'complete') { + return ( +
+
e.stopPropagation()} + > +
+

+ Import Complete +

+
+ +
+
+ +
+

{projectName}

+

+ Project imported successfully! +

+

+ {state.createResult?.created} features created +

+
+ + Redirecting... +
+
+
+
+ ) + } + + return null +} diff --git a/ui/src/components/NewProjectModal.tsx b/ui/src/components/NewProjectModal.tsx index 4b460232..eb20dda7 100644 --- a/ui/src/components/NewProjectModal.tsx +++ b/ui/src/components/NewProjectModal.tsx @@ -11,29 +11,17 @@ import { useState } from 'react' import { createPortal } from 'react-dom' -import { Bot, FileEdit, ArrowRight, ArrowLeft, Loader2, CheckCircle2, Folder } from 'lucide-react' +import { X, Bot, FileEdit, ArrowRight, ArrowLeft, Loader2, CheckCircle2, Folder, Download } from 'lucide-react' import { useCreateProject } from '../hooks/useProjects' import { SpecCreationChat } from './SpecCreationChat' import { FolderBrowser } from './FolderBrowser' +import { ImportProjectModal } from './ImportProjectModal' import { startAgent } from '../lib/api' -import { - Dialog, - DialogContent, - DialogHeader, - DialogTitle, - DialogDescription, - DialogFooter, -} from '@/components/ui/dialog' -import { Button } from '@/components/ui/button' -import { Input } from '@/components/ui/input' -import { Label } from '@/components/ui/label' -import { Alert, AlertDescription } from '@/components/ui/alert' -import { Badge } from '@/components/ui/badge' -import { Card, CardContent } from '@/components/ui/card' type InitializerStatus = 'idle' | 'starting' | 'error' -type Step = 'name' | 'folder' | 'method' | 'chat' | 'complete' +type Step = 'choose' | 'name' | 'folder' | 'method' | 'chat' | 'complete' | 'import' +type ProjectType = 'new' | 'import' type SpecMethod = 'claude' | 'manual' interface NewProjectModalProps { @@ -49,18 +37,16 @@ export function NewProjectModal({ onProjectCreated, onStepChange, }: NewProjectModalProps) { - const [step, setStep] = useState('name') + const [step, setStep] = useState('choose') + const [, setProjectType] = useState(null) const [projectName, setProjectName] = useState('') const [projectPath, setProjectPath] = useState(null) - const [_specMethod, setSpecMethod] = useState(null) + const [, setSpecMethod] = useState(null) const [error, setError] = useState(null) const [initializerStatus, setInitializerStatus] = useState('idle') const [initializerError, setInitializerError] = useState(null) const [yoloModeSelected, setYoloModeSelected] = useState(false) - // Suppress unused variable warning - specMethod may be used in future - void _specMethod - const createProject = useCreateProject() // Wrapper to notify parent of step changes @@ -90,7 +76,7 @@ export function NewProjectModal({ } const handleFolderSelect = (path: string) => { - setProjectPath(path) + setProjectPath(path) // Use selected path directly - no subfolder creation changeStep('method') } @@ -180,7 +166,8 @@ export function NewProjectModal({ } const handleClose = () => { - changeStep('name') + changeStep('choose') + setProjectType(null) setProjectName('') setProjectPath(null) setSpecMethod(null) @@ -198,13 +185,41 @@ export function NewProjectModal({ } else if (step === 'folder') { changeStep('name') setProjectPath(null) + } else if (step === 'name') { + changeStep('choose') + setProjectType(null) + } + } + + const handleProjectTypeSelect = (type: ProjectType) => { + setProjectType(type) + if (type === 'new') { + changeStep('name') + } else { + changeStep('import') } } + const handleImportComplete = (importedProjectName: string) => { + onProjectCreated(importedProjectName) + handleClose() + } + + // Import project view + if (step === 'import') { + return ( + + ) + } + // Full-screen chat view - use portal to render at body level if (step === 'chat') { return createPortal( -
+
!open && handleClose()}> - +
+
e.stopPropagation()} + > {/* Header */} - +
- +
- Select Project Location - - Select the folder to use for project {projectName}. Create a new folder or choose an existing one. - +

+ Select Project Location +

+

+ Select the folder to use for project {projectName}. Create a new folder or choose an existing one. +

- + +
{/* Folder Browser */}
@@ -244,151 +270,270 @@ export function NewProjectModal({ onCancel={handleFolderCancel} />
- - +
+
) } return ( - !open && handleClose()}> - - - +
+
e.stopPropagation()} + > + {/* Header */} +
+

+ {step === 'choose' && 'New Project'} {step === 'name' && 'Create New Project'} {step === 'method' && 'Choose Setup Method'} {step === 'complete' && 'Project Created!'} - - - - {/* Step 1: Project Name */} - {step === 'name' && ( -
-
- - setProjectName(e.target.value)} - placeholder="my-awesome-app" - pattern="^[a-zA-Z0-9_-]+$" - autoFocus - /> -

- Use letters, numbers, hyphens, and underscores only. +

+ +
+ + {/* Content */} +
+ {/* Step 0: Choose project type */} + {step === 'choose' && ( +
+

+ What would you like to do?

-
- {error && ( - - {error} - - )} - - - - - - )} - - {/* Step 2: Spec Method */} - {step === 'method' && ( -
- - How would you like to define your project? - - -
- {/* Claude option */} - !createProject.isPending && handleMethodSelect('claude')} - > - +
+ {/* New project option */} + + + {/* Import existing option */} + +
+ )} + + {/* Step 1: Project Name */} + {step === 'name' && ( +
+
+ + setProjectName(e.target.value)} + placeholder="my-awesome-app" + className="neo-input" + pattern="^[a-zA-Z0-9_-]+$" + autoFocus + /> +

+ Use letters, numbers, hyphens, and underscores only. +

+
- {error && ( - - {error} - - )} - - {createProject.isPending && ( -
- - Creating project... + {error && ( +
+ {error} +
+ )} + +
+ +
- )} + + )} + + {/* Step 2: Spec Method */} + {step === 'method' && ( +
+

+ How would you like to define your project? +

- - - -
- )} +
+ {/* Claude option */} + + + {/* Manual option */} + +
- {/* Step 3: Complete */} - {step === 'complete' && ( -
-
- + {error && ( +
+ {error} +
+ )} + + {createProject.isPending && ( +
+ + Creating project... +
+ )} + +
+ +
-

{projectName}

-

- Your project has been created successfully! -

-
- - Redirecting... + )} + + {/* Step 3: Complete */} + {step === 'complete' && ( +
+
+ +
+

+ {projectName} +

+

+ Your project has been created successfully! +

+
+ + Redirecting... +
-
- )} - -
+ )} +
+
+ ) } diff --git a/ui/src/hooks/useImportProject.ts b/ui/src/hooks/useImportProject.ts new file mode 100644 index 00000000..3d0628da --- /dev/null +++ b/ui/src/hooks/useImportProject.ts @@ -0,0 +1,250 @@ +/** + * Hook for managing project import workflow + * + * Handles: + * - Stack detection via API + * - Feature extraction + * - Feature creation in database + */ + +import { useState, useCallback } from 'react' + +// API base path (relative since frontend is served from same origin) +const API_BASE = '/api' + +// API response types +interface StackInfo { + name: string + category: string + confidence: number +} + +interface AnalyzeResponse { + project_dir: string + detected_stacks: StackInfo[] + primary_frontend: string | null + primary_backend: string | null + database: string | null + routes_count: number + components_count: number + endpoints_count: number + summary: string +} + +interface DetectedFeature { + category: string + name: string + description: string + steps: string[] + source_type: string + source_file: string | null + confidence: number +} + +interface ExtractFeaturesResponse { + features: DetectedFeature[] + count: number + by_category: Record + summary: string +} + +interface CreateFeaturesResponse { + created: number + project_name: string + message: string +} + +// Hook state +interface ImportState { + step: 'idle' | 'analyzing' | 'detected' | 'extracting' | 'extracted' | 'creating' | 'complete' | 'error' + projectPath: string | null + analyzeResult: AnalyzeResponse | null + featuresResult: ExtractFeaturesResponse | null + createResult: CreateFeaturesResponse | null + error: string | null + selectedFeatures: DetectedFeature[] +} + +export interface UseImportProjectReturn { + state: ImportState + analyze: (path: string) => Promise + extractFeatures: () => Promise + createFeatures: (projectName: string) => Promise + toggleFeature: (feature: DetectedFeature) => void + selectAllFeatures: () => void + deselectAllFeatures: () => void + reset: () => void +} + +const initialState: ImportState = { + step: 'idle', + projectPath: null, + analyzeResult: null, + featuresResult: null, + createResult: null, + error: null, + selectedFeatures: [], +} + +export function useImportProject(): UseImportProjectReturn { + const [state, setState] = useState(initialState) + + const analyze = useCallback(async (path: string): Promise => { + setState(prev => ({ ...prev, step: 'analyzing', projectPath: path, error: null })) + + try { + const response = await fetch(`${API_BASE}/import/analyze`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ path }), + }) + + if (!response.ok) { + const error = await response.json() + throw new Error(error.detail || 'Failed to analyze project') + } + + const result: AnalyzeResponse = await response.json() + setState(prev => ({ + ...prev, + step: 'detected', + analyzeResult: result, + })) + return true + } catch (err) { + setState(prev => ({ + ...prev, + step: 'error', + error: err instanceof Error ? err.message : 'Analysis failed', + })) + return false + } + }, []) + + const extractFeatures = useCallback(async (): Promise => { + if (!state.projectPath) return false + + setState(prev => ({ ...prev, step: 'extracting', error: null })) + + try { + const response = await fetch(`${API_BASE}/import/extract-features`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ path: state.projectPath }), + }) + + if (!response.ok) { + const error = await response.json() + throw new Error(error.detail || 'Failed to extract features') + } + + const result: ExtractFeaturesResponse = await response.json() + setState(prev => ({ + ...prev, + step: 'extracted', + featuresResult: result, + selectedFeatures: result.features, // Select all by default + })) + return true + } catch (err) { + setState(prev => ({ + ...prev, + step: 'error', + error: err instanceof Error ? err.message : 'Feature extraction failed', + })) + return false + } + }, [state.projectPath]) + + const createFeatures = useCallback(async (projectName: string): Promise => { + if (!state.selectedFeatures.length) return false + + setState(prev => ({ ...prev, step: 'creating', error: null })) + + try { + const features = state.selectedFeatures.map(f => ({ + category: f.category, + name: f.name, + description: f.description, + steps: f.steps, + })) + + const response = await fetch(`${API_BASE}/import/create-features`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ project_name: projectName, features }), + }) + + if (!response.ok) { + const error = await response.json() + throw new Error(error.detail || 'Failed to create features') + } + + const result: CreateFeaturesResponse = await response.json() + setState(prev => ({ + ...prev, + step: 'complete', + createResult: result, + })) + return true + } catch (err) { + setState(prev => ({ + ...prev, + step: 'error', + error: err instanceof Error ? err.message : 'Feature creation failed', + })) + return false + } + }, [state.selectedFeatures]) + + const toggleFeature = useCallback((feature: DetectedFeature) => { + setState(prev => { + const isSelected = prev.selectedFeatures.some( + f => f.name === feature.name && f.category === feature.category + ) + + if (isSelected) { + return { + ...prev, + selectedFeatures: prev.selectedFeatures.filter( + f => !(f.name === feature.name && f.category === feature.category) + ), + } + } else { + return { + ...prev, + selectedFeatures: [...prev.selectedFeatures, feature], + } + } + }) + }, []) + + const selectAllFeatures = useCallback(() => { + setState(prev => ({ + ...prev, + selectedFeatures: prev.featuresResult?.features || [], + })) + }, []) + + const deselectAllFeatures = useCallback(() => { + setState(prev => ({ + ...prev, + selectedFeatures: [], + })) + }, []) + + const reset = useCallback(() => { + setState(initialState) + }, []) + + return { + state, + analyze, + extractFeatures, + createFeatures, + toggleFeature, + selectAllFeatures, + deselectAllFeatures, + reset, + } +}