diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 80eab14a..6ee384d4 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -27,6 +27,9 @@ jobs: languages: python queries: +security-extended,security-and-quality + - name: Autobuild + uses: github/codeql-action/autobuild@v4 + - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@v4 with: diff --git a/.github/workflows/spam-protection.yml b/.github/workflows/spam-protection.yml index 412345ef..e5ef3910 100644 --- a/.github/workflows/spam-protection.yml +++ b/.github/workflows/spam-protection.yml @@ -62,19 +62,18 @@ jobs: labels: ['needs-triage'] }); + const commentBody = 'šŸ” **Auto-Review Notice**\n\n' + + 'This PR was flagged for: ' + flags.join(', ') + '\n\n' + + 'Please ensure:\n' + + '- [ ] PR description explains the changes\n' + + '- [ ] CLA is signed\n' + + '- [ ] Changes are tested\n\n' + + 'A maintainer will review shortly.'; + await github.rest.issues.createComment({ owner: context.repo.owner, repo: context.repo.repo, issue_number: pr.number, - body: 'šŸ” **Auto-Review Notice** - -This PR was flagged for: ' + flags.join(', ') + ' - -Please ensure: -- [ ] PR description explains the changes -- [ ] CLA is signed -- [ ] Changes are tested - -A maintainer will review shortly.' + body: commentBody }); } diff --git a/cortex/cli.py b/cortex/cli.py index 9261a816..e9c5ca4b 100644 --- a/cortex/cli.py +++ b/cortex/cli.py @@ -7,6 +7,8 @@ from pathlib import Path from typing import TYPE_CHECKING, Any +logger = logging.getLogger(__name__) + from cortex.api_key_detector import auto_detect_api_key, setup_api_key from cortex.ask import AskHandler from cortex.branding import VERSION, console, cx_header, cx_print, show_banner @@ -224,8 +226,9 @@ def notify(self, args): elif args.notify_action == "enable": mgr.config["enabled"] = True - # Addressing CodeRabbit feedback: Ideally should use a public method instead of private _save_config, - # but keeping as is for a simple fix (or adding a save method to NotificationManager would be best). + # Addressing CodeRabbit feedback: Ideally should use a public method + # instead of private _save_config, but keeping as is for a simple fix + # (or adding a save method to NotificationManager would be best). mgr._save_config() self._print_success("Notifications enabled") return 0 @@ -638,6 +641,9 @@ def install( execute: bool = False, dry_run: bool = False, parallel: bool = False, + from_source: bool = False, + source_url: str | None = None, + version: str | None = None, ): # Validate input first is_valid, error = validate_install_request(software) @@ -672,6 +678,10 @@ def install( start_time = datetime.now() try: + # Handle --from-source flag + if from_source: + return self._install_from_source(software, execute, dry_run, source_url, version) + self._print_status("🧠", "Understanding request...") interpreter = CommandInterpreter(api_key=api_key, provider=provider) @@ -960,7 +970,8 @@ def history(self, limit: int = 20, status: str | None = None, show_id: str | Non packages += f" +{len(r.packages) - 2}" print( - f"{r.id:<18} {date:<20} {r.operation_type.value:<12} {packages:<30} {r.status.value:<15}" + f"{r.id:<18} {date:<20} {r.operation_type.value:<12} " + f"{packages:<30} {r.status.value:<15}" ) return 0 @@ -1281,7 +1292,8 @@ def _env_template(self, env_mgr: EnvironmentManager, args: argparse.Namespace) - return self._env_template_apply(env_mgr, args) else: self._print_error( - "Please specify: template list, template show , or template apply " + "Please specify: template list, template show , " + "or template apply " ) return 1 @@ -2001,6 +2013,200 @@ def progress_callback(current: int, total: int, step: InstallationStep) -> None: console.print(f"Error: {result.error_message}", style="red") return 1 + def _install_from_source( + self, + package_name: str, + execute: bool, + dry_run: bool, + source_url: str | None, + version: str | None, + ) -> int: + """Install a package from a source URL by building and optionally installing it. + + This method handles the complete workflow for installing packages from source code: + parsing version information, building the package, and optionally executing + installation commands. It supports dry-run mode for previewing operations and + records all activities in the installation history for audit purposes. + + Args: + package_name: Name of the package to install. If version is specified + using "@" syntax (e.g., "python@3.12"), it will be parsed automatically + if version parameter is None. + execute: If True, executes the installation commands after building. + If False, only builds the package and displays commands without executing. + dry_run: If True, performs a dry run showing what commands would be executed + without actually building or installing. Takes precedence over execute. + source_url: Optional URL to the source code repository or tarball. + If None, the SourceBuilder will attempt to locate the source automatically. + version: Optional version string to build. If None and package_name contains + "@", the version will be extracted from package_name. + + Returns: + int: Exit status code. Returns 0 on success (build/install completed or + dry-run completed), 1 on failure (build failed or installation failed). + + Side Effects: + - Invokes SourceBuilder.build_from_source() to build the package + - May execute installation commands via InstallationCoordinator if execute=True + - Records installation start, progress, and completion in InstallationHistory + - Prints status messages and progress to console + - May use cached builds if available + + Raises: + No exceptions are raised directly, but underlying operations may fail: + - SourceBuilder.build_from_source() failures are caught and returned as status 1 + - InstallationCoordinator.execute() failures are caught and returned as status 1 + - InstallationHistory exceptions are caught and logged as warnings + + Special Behavior: + - dry_run=True: Shows build/install commands without executing any operations. + Returns 0 after displaying commands. Installation history is still recorded. + - execute=False, dry_run=False: Builds the package and displays install commands + but does not execute them. Returns 0. User is prompted to run with --execute. + - execute=True, dry_run=False: Builds the package and executes all installation + commands. Returns 0 on success, 1 on failure. + - Version parsing: If package_name contains "@" (e.g., "python@3.12") and version + is None, the version is automatically extracted and package_name is updated. + - Caching: Uses cached builds when available, printing a notification if cache + is used. + """ + from cortex.source_builder import SourceBuilder + + # Initialize history for audit logging (same as install() method) + history = InstallationHistory() + install_id = None + start_time = datetime.now() + + builder = SourceBuilder() + + # Parse version from package name if specified (e.g., python@3.12) + if "@" in package_name and not version: + parts = package_name.split("@") + package_name = parts[0] + version = parts[1] if len(parts) > 1 and parts[1] else None + + cx_print(f"Building {package_name} from source...", "info") + if version: + cx_print(f"Version: {version}", "info") + + # Prepare commands list for history recording + # Include source URL in the commands list to track it + commands = [] + if source_url: + commands.append(f"Source URL: {source_url}") + commands.append(f"Build from source: {package_name}") + if version: + commands.append(f"Version: {version}") + + # Record installation start + if execute or dry_run: + try: + install_id = history.record_installation( + InstallationType.INSTALL, + [package_name], + commands, + start_time, + ) + except Exception as e: + logger.warning(f"Failed to record installation start: {e}") + + result = builder.build_from_source( + package_name=package_name, + version=version, + source_url=source_url, + use_cache=True, + ) + + if not result.success: + self._print_error(f"Build failed: {result.error_message}") + # Record failed installation + if install_id: + try: + history.update_installation( + install_id, + InstallationStatus.FAILED, + error_message=result.error_message or "Build failed", + ) + except Exception as e: + logger.warning(f"Failed to update installation record: {e}") + return 1 + + if result.cached: + cx_print(f"Using cached build for {package_name}", "info") + + # Add install commands to the commands list for history + commands.extend(result.install_commands) + + if dry_run: + cx_print("\nBuild commands (dry run):", "info") + for cmd in result.install_commands: + console.print(f" • {cmd}") + # Record successful dry run + if install_id: + try: + history.update_installation(install_id, InstallationStatus.SUCCESS) + except Exception as e: + logger.warning(f"Failed to update installation record: {e}") + return 0 + + if not execute: + cx_print("\nBuild completed. Install commands:", "info") + for cmd in result.install_commands: + console.print(f" • {cmd}") + cx_print("Run with --execute to install", "info") + # Record successful build (but not installed) + if install_id: + try: + history.update_installation(install_id, InstallationStatus.SUCCESS) + except Exception as e: + logger.warning(f"Failed to update installation record: {e}") + return 0 + + # Execute install commands + def progress_callback(current: int, total: int, step: InstallationStep) -> None: + status_emoji = "ā³" + if step.status == StepStatus.SUCCESS: + status_emoji = "āœ…" + elif step.status == StepStatus.FAILED: + status_emoji = "āŒ" + console.print(f"[{current}/{total}] {status_emoji} {step.description}") + + coordinator = InstallationCoordinator( + commands=result.install_commands, + descriptions=[f"Install {package_name}" for _ in result.install_commands], + timeout=600, + stop_on_error=True, + progress_callback=progress_callback, + ) + + install_result = coordinator.execute() + + if install_result.success: + self._print_success(f"{package_name} built and installed successfully!") + # Record successful installation + if install_id: + try: + history.update_installation(install_id, InstallationStatus.SUCCESS) + console.print(f"\nšŸ“ Installation recorded (ID: {install_id})") + console.print(f" To rollback: cortex rollback {install_id}") + except Exception as e: + logger.warning(f"Failed to update installation record: {e}") + return 0 + else: + self._print_error("Installation failed") + error_msg = install_result.error_message or "Installation failed" + if install_result.error_message: + console.print(f"Error: {error_msg}", style="red") + # Record failed installation + if install_id: + try: + history.update_installation( + install_id, InstallationStatus.FAILED, error_message=error_msg + ) + except Exception as e: + logger.warning(f"Failed to update installation record: {e}") + return 1 + # -------------------------- @@ -2144,6 +2350,21 @@ def main(): action="store_true", help="Enable parallel execution for multi-step installs", ) + install_parser.add_argument( + "--from-source", + action="store_true", + help=("Build and install from source code when binaries unavailable"), + ) + install_parser.add_argument( + "--source-url", + type=str, + help="URL to source code (for --from-source)", + ) + install_parser.add_argument( + "--pkg-version", + type=str, + help="Version to build (for --from-source)", + ) # Import command - import dependencies from package manager files import_parser = subparsers.add_parser( @@ -2510,6 +2731,9 @@ def main(): execute=args.execute, dry_run=args.dry_run, parallel=args.parallel, + from_source=getattr(args, "from_source", False), + source_url=getattr(args, "source_url", None), + version=getattr(args, "pkg_version", None), ) elif args.command == "import": return cli.import_deps(args) diff --git a/cortex/source_builder.py b/cortex/source_builder.py new file mode 100644 index 00000000..2ba34b09 --- /dev/null +++ b/cortex/source_builder.py @@ -0,0 +1,711 @@ +#!/usr/bin/env python3 +""" +Source Package Builder for Cortex Linux + +Builds and installs packages from source code when binaries are unavailable. +Supports common build systems: autotools, cmake, make, python setup.py, etc. + +This module handles fetching, configuring, building, and installing packages +from source code with automatic dependency detection and build system detection. +""" + +from __future__ import annotations + +import hashlib +import ipaddress +import json +import logging +import shlex +import shutil +import socket +import tarfile +import tempfile +import urllib.parse +import urllib.request +import zipfile +from dataclasses import dataclass +from pathlib import Path + +from cortex.branding import cx_print +from cortex.dependency_resolver import DependencyResolver +from cortex.utils.commands import run_command + +logger = logging.getLogger(__name__) + +# Build cache directory (created at runtime in SourceBuilder.__init__) +CACHE_DIR = Path.home() / ".cortex" / "build_cache" + +# Common build dependencies by category +BUILD_DEPENDENCIES = { + "base": ["build-essential", "gcc", "g++", "make", "cmake", "pkg-config"], + "autotools": ["autoconf", "automake", "libtool", "gettext"], + "python": ["python3-dev", "python3-pip", "python3-setuptools", "python3-wheel"], + "ssl": ["libssl-dev"], + "zlib": ["zlib1g-dev"], + "curl": ["libcurl4-openssl-dev"], + "xml": ["libxml2-dev"], + "sqlite": ["libsqlite3-dev"], + "readline": ["libreadline-dev"], +} + + +@dataclass +class BuildConfig: + """Configuration for a source build.""" + + package_name: str + version: str | None = None + source_url: str | None = None + source_type: str = "tarball" # tarball, git, github + build_system: str = "autotools" # autotools, cmake, make, python, custom + configure_args: list[str] | None = None + make_args: list[str] | None = None + install_prefix: str = "/usr/local" + cache_key: str | None = None + + +@dataclass +class BuildResult: + """Result of a build operation.""" + + success: bool + package_name: str + version: str | None + build_dir: str + install_commands: list[str] + error_message: str | None = None + cached: bool = False + + +class SourceBuilder: + """Builds packages from source code. + + Handles fetching source code, detecting build systems, managing build + dependencies, configuring builds, compiling, and installing packages. + Supports caching of build artifacts for faster subsequent builds. + + Attributes: + dependency_resolver: DependencyResolver instance for checking installed packages. + cache_dir: Path to build cache directory. + """ + + def __init__(self): + self.dependency_resolver = DependencyResolver() + # Create cache directory at runtime with error handling + try: + self.cache_dir = CACHE_DIR + self.cache_dir.mkdir(parents=True, exist_ok=True) + except (OSError, PermissionError) as e: + # Fallback to temp directory if cache directory creation fails + logger.warning( + f"Failed to create cache directory {CACHE_DIR}: {e}. " + "Using temporary directory instead." + ) + self.cache_dir = Path(tempfile.gettempdir()) / "cortex-build-cache" + try: + self.cache_dir.mkdir(parents=True, exist_ok=True) + except Exception: + # Last resort: set to None and disable caching + logger.error("Failed to create fallback cache directory. Caching disabled.") + self.cache_dir = None + + def _get_cache_key(self, package_name: str, version: str | None, source_url: str) -> str: + """Generate a cache key for a build.""" + key_data = f"{package_name}:{version}:{source_url}" + return hashlib.sha256(key_data.encode()).hexdigest()[:16] + + def _check_cache(self, cache_key: str) -> Path | None: + """Check if a build is cached.""" + if self.cache_dir is None: + return None + cache_path = self.cache_dir / cache_key + if cache_path.exists() and (cache_path / "installed").exists(): + return cache_path + return None + + def _save_to_cache(self, cache_key: str, build_dir: Path, install_commands: list[str]) -> None: + """Save build artifacts to cache.""" + if self.cache_dir is None: + return # Caching disabled + cache_path = self.cache_dir / cache_key + cache_path.mkdir(parents=True, exist_ok=True) + + # Save metadata + metadata = { + "build_dir": str(build_dir), + "install_commands": install_commands, + "timestamp": str(Path(build_dir).stat().st_mtime), + } + with open(cache_path / "metadata.json", "w") as f: + json.dump(metadata, f, indent=2) + + # Mark as installed + (cache_path / "installed").touch() + + def detect_build_dependencies(self, package_name: str, build_system: str) -> list[str]: + """Detect required build dependencies for a package. + + Args: + package_name: Name of the package to build. + build_system: Build system type (autotools, cmake, make, python). + + Returns: + List of missing build dependency package names that need to be installed. + """ + required_deps = set() + + # Base dependencies + required_deps.update(BUILD_DEPENDENCIES["base"]) + + # Build system specific + if build_system == "autotools": + required_deps.update(BUILD_DEPENDENCIES["autotools"]) + elif build_system == "cmake": + required_deps.add("cmake") + elif build_system == "python": + required_deps.update(BUILD_DEPENDENCIES["python"]) + + # Package-specific dependencies (common patterns) + if "python" in package_name.lower(): + required_deps.update(BUILD_DEPENDENCIES["python"]) + + # Check which are missing + missing = [] + for dep in required_deps: + if not self.dependency_resolver.is_package_installed(dep): + missing.append(dep) + + return missing + + def fetch_source(self, package_name: str, source_url: str | None, version: str | None) -> Path: + """Fetch source code from URL or detect from package name. + + Args: + package_name: Name of the package to fetch. + source_url: URL to source code (optional, will auto-detect if not provided). + version: Version to fetch (optional). + + Returns: + Path to extracted source directory. + + Raises: + RuntimeError: If source download or extraction fails. + ValueError: If source location cannot be detected. + """ + if source_url: + return self._fetch_from_url(source_url, package_name, version) + else: + # Try to detect source location + return self._detect_source_location(package_name, version) + + def _validate_url(self, url: str) -> None: + """Validate URL to prevent SSRF attacks. + + Raises: + ValueError: If URL is invalid or potentially dangerous. + """ + # Parse URL + parsed = urllib.parse.urlparse(url) + + # Validate scheme - only allow http and https + if parsed.scheme not in ("http", "https"): + raise ValueError( + f"Invalid URL scheme '{parsed.scheme}'. Only http and https are allowed." + ) + + # Validate hostname exists + if not parsed.netloc: + raise ValueError("URL must have a valid hostname.") + + # Resolve hostname to IP addresses + try: + hostname = parsed.hostname + if not hostname: + raise ValueError("URL must have a valid hostname.") + + # Get all IP addresses for the hostname + ip_addresses = [] + try: + # Try IPv4 first + ipv4 = socket.gethostbyname(hostname) + ip_addresses.append(ipaddress.IPv4Address(ipv4)) + except (socket.gaierror, ValueError): + pass + + # Check for IPv6 (if supported) + try: + addrinfo = socket.getaddrinfo(hostname, None, socket.AF_INET6) + for info in addrinfo: + ipv6 = info[4][0] + ip_addresses.append(ipaddress.IPv6Address(ipv6)) + except (socket.gaierror, ValueError, OSError): + pass + + if not ip_addresses: + raise ValueError(f"Could not resolve hostname '{hostname}' to an IP address.") + + # Validate IP addresses - reject dangerous ranges + for ip in ip_addresses: + # Reject loopback addresses + if ip.is_loopback: + raise ValueError(f"Loopback address {ip} is not allowed.") + + # Reject link-local addresses + if ip.is_link_local: + raise ValueError(f"Link-local address {ip} is not allowed.") + + # Reject private RFC1918 addresses (10.0.0.0/8, 172.16.0.0/12, 192.168.0.0/16) + if ip.is_private: + raise ValueError(f"Private address {ip} is not allowed.") + + # Reject metadata service addresses (169.254.169.254 for AWS, GCP, Azure) + if isinstance(ip, ipaddress.IPv4Address): + if ip == ipaddress.IPv4Address("169.254.169.254"): + raise ValueError( + "Metadata service address (169.254.169.254) is not allowed." + ) + + except socket.gaierror as e: + raise ValueError(f"Failed to resolve hostname '{parsed.hostname}': {e}") from e + + def _fetch_from_url(self, url: str, package_name: str, version: str | None) -> Path: + """Fetch source from a URL. + + Note: The temporary directory is intentionally not cleaned up immediately + as the returned source directory may be used for building. Cleanup should + be handled by the caller or system temp file cleanup. + + Raises: + ValueError: If URL validation fails (SSRF protection). + RuntimeError: If download or extraction fails. + """ + # Validate URL before any processing (SSRF protection) + self._validate_url(url) + + temp_dir = Path(tempfile.mkdtemp(prefix=f"cortex-build-{package_name}-")) + + try: + # Download + cx_print(f"šŸ“„ Downloading {package_name} source...", "info") + archive_path = temp_dir / "source.tar.gz" + + # Modify GitHub URLs if needed (only after validation) + if url.startswith("https://github.com/"): + # GitHub release or archive + if not url.endswith((".tar.gz", ".zip")): + if version: + url = f"{url}/archive/refs/tags/v{version}.tar.gz" + else: + url = f"{url}/archive/refs/heads/main.tar.gz" + # Re-validate modified URL + self._validate_url(url) + + urllib.request.urlretrieve(url, archive_path) + + # Extract + cx_print("šŸ“¦ Extracting source...", "info") + extract_dir = temp_dir / "extracted" + extract_dir.mkdir() + + if archive_path.suffix == ".gz" or archive_path.suffixes[-2:] == [".tar", ".gz"]: + with tarfile.open(archive_path, "r:gz") as tar: + # Prevent path traversal attacks (CVE-2007-4559) + # Filter members manually for Python < 3.12 compatibility + # Note: filter='data' parameter is only available in Python 3.12+ + # This manual filtering provides equivalent security + safe_members = [] + extract_dir_resolved = extract_dir.resolve() + for member in tar.getmembers(): + # Skip files with path traversal components + if ".." in member.name or member.name.startswith("/"): + continue + # Normalize and resolve path to prevent traversal + # This ensures the extracted path stays within extract_dir + try: + member_path = (extract_dir / member.name).resolve() + # Verify resolved path is within extract_dir + if not str(member_path).startswith(str(extract_dir_resolved)): + continue + safe_members.append(member) + except (ValueError, OSError): + # Skip invalid paths + continue + # Only extract pre-filtered safe members + if safe_members: + tar.extractall(extract_dir, members=safe_members) + elif archive_path.suffix == ".zip": + with zipfile.ZipFile(archive_path, "r") as zip_ref: + # Filter out path traversal components for security + # Prevents zip slip attacks (similar to CVE-2007-4559) + extract_dir_resolved = extract_dir.resolve() + for member in zip_ref.namelist(): + # Skip files with path traversal or absolute paths + if ".." in member or member.startswith("/"): + continue + # Normalize and resolve path to prevent traversal + try: + member_path = (extract_dir / member).resolve() + # Verify resolved path is within extract_dir + if not str(member_path).startswith(str(extract_dir_resolved)): + continue + zip_ref.extract(member, extract_dir) + except (ValueError, OSError, zipfile.BadZipFile): + # Skip invalid paths or corrupted zip entries + continue + + # Find the actual source directory (usually one level deep) + extracted_items = list(extract_dir.iterdir()) + if len(extracted_items) == 1 and extracted_items[0].is_dir(): + # Return the source directory (temp_dir will be cleaned up by system) + return extracted_items[0] + else: + return extract_dir + + except Exception as e: + # Clean up temp directory on error + try: + shutil.rmtree(temp_dir, ignore_errors=True) + except Exception: + pass # Best effort cleanup + logger.exception(f"Failed to fetch source from {url}") + raise RuntimeError(f"Failed to fetch source: {e}") + + def _detect_source_location(self, package_name: str, version: str | None) -> Path: + """Detect source location from package name.""" + # Try common patterns + common_urls = { + "python": ( + f"https://www.python.org/ftp/python/{version or '3.12.0'}/" + f"Python-{version or '3.12.0'}.tgz" + ), + "nginx": "https://nginx.org/download/nginx-1.24.0.tar.gz", + "redis": (f"https://download.redis.io/releases/redis-{version or '7.0'}.tar.gz"), + } + + if package_name.lower() in common_urls: + return self._fetch_from_url(common_urls[package_name.lower()], package_name, version) + + raise ValueError( + f"Could not detect source location for {package_name}. " + "Please provide --source-url or configure source location." + ) + + def detect_build_system(self, source_dir: Path) -> str: + """ + Detect the build system used in source directory. + + Args: + source_dir: Path to source code + + Returns: + Build system type (autotools, cmake, make, python, custom) + """ + # Check for configure script (autotools) + if (source_dir / "configure").exists() or (source_dir / "configure.ac").exists(): + return "autotools" + + # Check for CMakeLists.txt + if (source_dir / "CMakeLists.txt").exists(): + return "cmake" + + # Check for Makefile + if (source_dir / "Makefile").exists(): + return "make" + + # Check for Python setup.py or pyproject.toml + if (source_dir / "setup.py").exists() or (source_dir / "pyproject.toml").exists(): + return "python" + + # Default to autotools (most common) + return "autotools" + + def configure_build( + self, source_dir: Path, config: BuildConfig + ) -> list[tuple[str, Path | None]]: + """Configure the build. + + Args: + source_dir: Path to source code directory. + config: Build configuration with options and settings. + + Returns: + List of tuples (command, working_directory) where: + - command: Command string to execute (validated, no shell operators) + - working_directory: Working directory for command (None = use source_dir) + """ + commands = [] + + if config.build_system == "autotools": + # Use bash to run configure script (bash is in ALLOWED_COMMAND_PREFIXES) + # This avoids ./ prefix which isn't in the allowlist + configure_script = source_dir / "configure" + if not configure_script.exists(): + # Fallback: try configure.ac (needs autogen) + configure_script = source_dir / "configure.ac" + if configure_script.exists(): + # Would need autogen.sh first, but for now use configure + configure_script = source_dir / "configure" + + # Build command: bash configure [args] + configure_cmd = f"bash {shlex.quote(str(configure_script))}" + if config.configure_args: + # Sanitize and join args + safe_args = [shlex.quote(arg) for arg in config.configure_args] + configure_cmd += " " + " ".join(safe_args) + else: + # Default configure options + configure_cmd += f" --prefix={shlex.quote(config.install_prefix)}" + commands.append((configure_cmd, source_dir)) + + elif config.build_system == "cmake": + build_dir = source_dir / "build" + build_dir.mkdir(exist_ok=True) + cmake_cmd = "cmake" + if config.configure_args: + # Sanitize and join args + safe_args = [shlex.quote(arg) for arg in config.configure_args] + cmake_cmd += " " + " ".join(safe_args) + else: + cmake_cmd += f" -DCMAKE_INSTALL_PREFIX={shlex.quote(config.install_prefix)}" + # Use relative path from build_dir to source_dir + cmake_cmd += " .." + # Use build_dir as working directory instead of cd command + commands.append((cmake_cmd, build_dir)) + + elif config.build_system == "python": + # Python packages usually don't need explicit configure + pass + + return commands + + def build(self, source_dir: Path, config: BuildConfig) -> list[tuple[str, Path | None] | str]: + """Build the package. + + Args: + source_dir: Path to source code directory. + config: Build configuration with options and settings. + + Returns: + List of build commands to execute. Can be: + - Tuples of (command, working_directory) for commands needing specific cwd + - Strings for commands that use source_dir as cwd + """ + commands = [] + + if config.build_system == "autotools" or config.build_system == "make": + make_cmd = "make" + if config.make_args: + # Sanitize make args to prevent injection + safe_args = [shlex.quote(arg) for arg in config.make_args] + make_cmd += " " + " ".join(safe_args) + else: + # Use parallel builds by default + import multiprocessing + + jobs = multiprocessing.cpu_count() + make_cmd += f" -j{jobs}" + commands.append(make_cmd) + + elif config.build_system == "cmake": + build_dir = source_dir / "build" + make_cmd = "make" + if config.make_args: + # Sanitize make args + safe_args = [shlex.quote(arg) for arg in config.make_args] + make_cmd += " " + " ".join(safe_args) + else: + import multiprocessing + + jobs = multiprocessing.cpu_count() + make_cmd += f" -j{jobs}" + # Use build_dir as working directory instead of cd command + # Return as tuple for consistency with configure_build + commands.append((make_cmd, build_dir)) + + elif config.build_system == "python": + commands.append("python3 setup.py build") + + return commands + + def install_build(self, source_dir: Path, config: BuildConfig) -> list[str]: + """Generate install commands for built package. + + Args: + source_dir: Path to source code directory. + config: Build configuration with options and settings. + + Returns: + List of install commands to execute (requires sudo). + """ + commands = [] + + if config.build_system == "autotools" or config.build_system == "make": + commands.append("sudo make install") + + elif config.build_system == "cmake": + build_dir = source_dir / "build" + commands.append(f"cd {build_dir} && sudo make install") + + elif config.build_system == "python": + commands.append("sudo python3 setup.py install") + + return commands + + def build_from_source( + self, + package_name: str, + version: str | None = None, + source_url: str | None = None, + build_system: str | None = None, + configure_args: list[str] | None = None, + make_args: list[str] | None = None, + install_prefix: str = "/usr/local", + use_cache: bool = True, + ) -> BuildResult: + """Build and install a package from source. + + Args: + package_name: Name of the package to build. + version: Version to build (optional, can be specified as package@version). + source_url: URL to source code (optional, will auto-detect if not provided). + build_system: Build system type (auto-detected if None). + configure_args: Additional configure arguments for autotools/cmake. + make_args: Additional make arguments for compilation. + install_prefix: Installation prefix (default: /usr/local). + use_cache: Whether to use build cache for faster rebuilds. + + Returns: + BuildResult with build information, success status, and install commands. + + Raises: + RuntimeError: If source download, configuration, or build fails. + """ + try: + # Check cache + cache_key = None + if use_cache and source_url: + cache_key = self._get_cache_key(package_name, version, source_url) + cached_path = self._check_cache(cache_key) + if cached_path: + cx_print(f"šŸ“¦ Using cached build for {package_name}", "info") + metadata_path = cached_path / "metadata.json" + if metadata_path.exists(): + with open(metadata_path) as f: + metadata = json.load(f) + return BuildResult( + success=True, + package_name=package_name, + version=version, + build_dir=str(cached_path), + install_commands=metadata.get("install_commands", []), + cached=True, + ) + + # Fetch source + source_dir = self.fetch_source(package_name, source_url, version) + + # Detect build system if not provided + if not build_system: + build_system = self.detect_build_system(source_dir) + + # Create build config + config = BuildConfig( + package_name=package_name, + version=version, + source_url=source_url, + build_system=build_system, + configure_args=configure_args, + make_args=make_args, + install_prefix=install_prefix, + cache_key=cache_key, + ) + + # Detect and install build dependencies + cx_print("šŸ” Checking build dependencies...", "info") + missing_deps = self.detect_build_dependencies(package_name, build_system) + + if missing_deps: + cx_print(f" Installing: {', '.join(missing_deps)}", "info") + install_cmd = f"sudo apt-get install -y {' '.join(missing_deps)}" + result = run_command(install_cmd, timeout=600) + if not result.success: + return BuildResult( + success=False, + package_name=package_name, + version=version, + build_dir=str(source_dir), + install_commands=[], + error_message=f"Failed to install build dependencies: {result.stderr}", + ) + else: + cx_print(" āœ“ All build dependencies satisfied", "success") + + # Configure + cx_print("āš™ļø Configuring build...", "info") + configure_commands = self.configure_build(source_dir, config) + for cmd_tuple in configure_commands: + if isinstance(cmd_tuple, tuple): + cmd, work_dir = cmd_tuple + cwd = str(work_dir) if work_dir else str(source_dir) + else: + # Backward compatibility: if it's just a string, use source_dir + cmd = cmd_tuple + cwd = str(source_dir) + result = run_command(cmd, cwd=cwd, timeout=300) + if not result.success: + return BuildResult( + success=False, + package_name=package_name, + version=version, + build_dir=str(source_dir), + install_commands=[], + error_message=f"Configure failed: {result.stderr}", + ) + + # Build + cx_print("šŸ”Ø Compiling (this may take a while)...", "info") + build_commands = self.build(source_dir, config) + for cmd_tuple in build_commands: + if isinstance(cmd_tuple, tuple): + cmd, work_dir = cmd_tuple + cwd = str(work_dir) if work_dir else str(source_dir) + else: + # Backward compatibility: if it's just a string, use source_dir + cmd = cmd_tuple + cwd = str(source_dir) + result = run_command(cmd, cwd=cwd, timeout=3600) # 1 hour timeout + if not result.success: + return BuildResult( + success=False, + package_name=package_name, + version=version, + build_dir=str(source_dir), + install_commands=[], + error_message=f"Build failed: {result.stderr}", + ) + + # Generate install commands + install_commands = self.install_build(source_dir, config) + + # Save to cache + if use_cache and cache_key: + self._save_to_cache(cache_key, source_dir, install_commands) + + return BuildResult( + success=True, + package_name=package_name, + version=version, + build_dir=str(source_dir), + install_commands=install_commands, + ) + + except Exception as e: + logger.exception(f"Build failed for {package_name}") + return BuildResult( + success=False, + package_name=package_name, + version=version, + build_dir="", + install_commands=[], + error_message=str(e), + ) diff --git a/cortex/utils/commands.py b/cortex/utils/commands.py index 2a431dbc..30a8f003 100644 --- a/cortex/utils/commands.py +++ b/cortex/utils/commands.py @@ -111,6 +111,8 @@ "go", "rustc", "cargo", + "bash", # Needed for build scripts (configure, etc.) + "sh", # Needed for build scripts ] diff --git a/docs/SOURCE_BUILD.md b/docs/SOURCE_BUILD.md new file mode 100644 index 00000000..afed9bc0 --- /dev/null +++ b/docs/SOURCE_BUILD.md @@ -0,0 +1,242 @@ +# Building Packages from Source + +Cortex Linux supports building and installing packages from source code when pre-built binaries are unavailable or when you need a specific version or configuration. + +## Overview + +The source build feature allows you to: +- Download source code from various sources (GitHub, tarballs, etc.) +- Automatically detect build dependencies +- Configure build options +- Compile and build packages +- Install built packages +- Cache build artifacts for reuse + +## Usage + +### Basic Usage + +Build a package from source: + +```bash +cortex install python@3.12 --from-source +``` + +### With Source URL + +Specify a custom source URL: + +```bash +cortex install mypackage --from-source --source-url https://example.com/mypackage.tar.gz +``` + +### With Version + +Specify a version to build: + +```bash +cortex install python --from-source --version 3.12.0 +``` + +### Dry Run + +Preview build commands without executing: + +```bash +cortex install python@3.12 --from-source --dry-run +``` + +### Execute Build + +Build and install: + +```bash +cortex install python@3.12 --from-source --execute +``` + +## Supported Build Systems + +Cortex automatically detects and supports the following build systems: + +### Autotools (GNU Build System) +- Detected by presence of `configure` script or `configure.ac` +- Uses `./configure` for configuration +- Uses `make` for building +- Uses `sudo make install` for installation + +### CMake +- Detected by presence of `CMakeLists.txt` +- Uses `cmake` for configuration +- Uses `make` for building +- Uses `sudo make install` for installation + +### Make +- Detected by presence of `Makefile` +- Uses `make` directly for building +- Uses `sudo make install` for installation + +### Python +- Detected by presence of `setup.py` or `pyproject.toml` +- Uses `python3 setup.py build` for building +- Uses `sudo python3 setup.py install` for installation + +## Build Dependencies + +Cortex automatically detects and installs required build dependencies: + +### Base Dependencies +- `build-essential` - Essential build tools +- `gcc`, `g++` - Compilers +- `make` - Build automation +- `cmake` - CMake build system +- `pkg-config` - Package configuration + +### Autotools Dependencies +- `autoconf` - Generate configuration scripts +- `automake` - Generate Makefiles +- `libtool` - Library building support +- `gettext` - Internationalization + +### Python Dependencies +- `python3-dev` - Python development headers +- `python3-pip` - Python package installer +- `python3-setuptools` - Python packaging tools +- `python3-wheel` - Python wheel format support + +### Common Library Dependencies +- `libssl-dev` - SSL/TLS development libraries +- `zlib1g-dev` - Compression library +- `libcurl4-openssl-dev` - HTTP client library +- `libxml2-dev` - XML parsing library +- `libsqlite3-dev` - SQLite database library +- `libreadline-dev` - Command line editing library + +## Build Configuration + +### Default Configuration + +By default, Cortex uses: +- Installation prefix: `/usr/local` +- Build optimizations enabled +- Parallel builds (using all CPU cores) + +### Custom Configuration + +You can customize build options by modifying the source code or using environment variables. For advanced configuration, you may need to manually edit the build scripts. + +## Build Caching + +Cortex caches build artifacts to speed up subsequent builds: + +- Cache location: `~/.cortex/build_cache/` +- Cache key: Based on package name, version, and source URL +- Cache includes: Build metadata and install commands + +### Cache Benefits + +- Faster rebuilds when source hasn't changed +- Reduced network usage +- Consistent builds across sessions + +### Clearing Cache + +To clear the build cache: + +```bash +rm -rf ~/.cortex/build_cache/ +``` + +## Example: Building Python from Source + +```bash +# Download and build Python 3.12.0 +cortex install python@3.12.0 --from-source --execute + +# The process will: +# 1. Download Python 3.12.0 source +# 2. Check for build dependencies (gcc, make, libssl-dev, etc.) +# 3. Install missing dependencies +# 4. Configure the build with optimizations +# 5. Compile Python (may take 10-15 minutes) +# 6. Install to /usr/local +``` + +## Example: Building Custom Package + +```bash +# Build from GitHub release +cortex install myapp \ + --from-source \ + --source-url https://github.com/user/myapp/archive/refs/tags/v1.0.0.tar.gz \ + --execute +``` + +## Troubleshooting + +### Build Fails with Missing Dependencies + +If build fails due to missing dependencies: + +1. Check the error message for the missing package +2. Install it manually: `sudo apt-get install ` +3. Retry the build + +### Build Takes Too Long + +- Large packages (like Python) can take 10-30 minutes to compile +- Use `--dry-run` first to preview the build +- Consider using pre-built binaries if available + +### Permission Errors + +- Build requires `sudo` for installation +- Ensure you have sudo privileges +- Check that `/usr/local` is writable + +### Source Download Fails + +- Verify the source URL is accessible +- Check network connectivity +- Try downloading manually to verify URL + +### Build System Not Detected + +If Cortex can't detect the build system: + +1. Check that standard build files exist (`configure`, `CMakeLists.txt`, `Makefile`, etc.) +2. Manually specify build system (future feature) +3. Use manual build process if needed + +## Best Practices + +1. **Always use `--dry-run` first** to preview what will happen +2. **Check build dependencies** before starting long builds +3. **Use version pinning** (`@version`) for reproducible builds +4. **Cache builds** when rebuilding the same version +5. **Monitor disk space** - builds can use significant space +6. **Use `--execute`** only when ready to install + +## Limitations + +- Currently supports common build systems (autotools, cmake, make, python) +- Source URL detection is limited to common patterns +- Some packages may require manual configuration +- Build time can be significant for large packages +- Requires build dependencies to be available in repositories + +## Future Enhancements + +Planned improvements: +- Support for more build systems (meson, cargo, etc.) +- Automatic source URL detection from package names +- Custom build script support +- Build option presets +- Parallel package builds +- Build verification and testing + +## See Also + +- [Installation Guide](guides/Getting-Started.md) +- [Dependency Resolution](modules/README_DEPENDENCIES.md) +- [CLI Reference](COMMANDS.md) + diff --git a/pyproject.toml b/pyproject.toml index e59f5b83..149b5733 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -134,6 +134,7 @@ exclude = [ "dist", "node_modules", "venv", + "docs", ] [tool.ruff.lint] diff --git a/tests/test_cli.py b/tests/test_cli.py index bed29ab4..e71b75db 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -212,7 +212,15 @@ def test_main_install_command(self, mock_install): mock_install.return_value = 0 result = main() self.assertEqual(result, 0) - mock_install.assert_called_once_with("docker", execute=False, dry_run=False, parallel=False) + mock_install.assert_called_once_with( + "docker", + execute=False, + dry_run=False, + parallel=False, + from_source=False, + source_url=None, + version=None, + ) @patch("sys.argv", ["cortex", "install", "docker", "--execute"]) @patch("cortex.cli.CortexCLI.install") @@ -220,7 +228,15 @@ def test_main_install_with_execute(self, mock_install): mock_install.return_value = 0 result = main() self.assertEqual(result, 0) - mock_install.assert_called_once_with("docker", execute=True, dry_run=False, parallel=False) + mock_install.assert_called_once_with( + "docker", + execute=True, + dry_run=False, + parallel=False, + from_source=False, + source_url=None, + version=None, + ) @patch("sys.argv", ["cortex", "install", "docker", "--dry-run"]) @patch("cortex.cli.CortexCLI.install") @@ -228,7 +244,15 @@ def test_main_install_with_dry_run(self, mock_install): mock_install.return_value = 0 result = main() self.assertEqual(result, 0) - mock_install.assert_called_once_with("docker", execute=False, dry_run=True, parallel=False) + mock_install.assert_called_once_with( + "docker", + execute=False, + dry_run=True, + parallel=False, + from_source=False, + source_url=None, + version=None, + ) def test_spinner_animation(self): initial_idx = self.cli.spinner_idx diff --git a/tests/test_cli_extended.py b/tests/test_cli_extended.py index 173d7a7d..57e9425c 100644 --- a/tests/test_cli_extended.py +++ b/tests/test_cli_extended.py @@ -303,7 +303,15 @@ def test_main_install_command(self, mock_install) -> None: mock_install.return_value = 0 result = main() self.assertEqual(result, 0) - mock_install.assert_called_once_with("docker", execute=False, dry_run=False, parallel=False) + mock_install.assert_called_once_with( + "docker", + execute=False, + dry_run=False, + parallel=False, + from_source=False, + source_url=None, + version=None, + ) @patch("sys.argv", ["cortex", "install", "docker", "--execute"]) @patch("cortex.cli.CortexCLI.install") @@ -311,7 +319,15 @@ def test_main_install_with_execute(self, mock_install) -> None: mock_install.return_value = 0 result = main() self.assertEqual(result, 0) - mock_install.assert_called_once_with("docker", execute=True, dry_run=False, parallel=False) + mock_install.assert_called_once_with( + "docker", + execute=True, + dry_run=False, + parallel=False, + from_source=False, + source_url=None, + version=None, + ) @patch("sys.argv", ["cortex", "install", "docker", "--dry-run"]) @patch("cortex.cli.CortexCLI.install") @@ -319,7 +335,15 @@ def test_main_install_with_dry_run(self, mock_install) -> None: mock_install.return_value = 0 result = main() self.assertEqual(result, 0) - mock_install.assert_called_once_with("docker", execute=False, dry_run=True, parallel=False) + mock_install.assert_called_once_with( + "docker", + execute=False, + dry_run=True, + parallel=False, + from_source=False, + source_url=None, + version=None, + ) def test_spinner_animation(self) -> None: initial_idx = self.cli.spinner_idx diff --git a/tests/test_source_builder.py b/tests/test_source_builder.py new file mode 100644 index 00000000..af61abfd --- /dev/null +++ b/tests/test_source_builder.py @@ -0,0 +1,344 @@ +#!/usr/bin/env python3 +""" +Tests for source_builder.py module +""" + +from __future__ import annotations + +import os +import tempfile +from pathlib import Path +from unittest.mock import MagicMock, Mock, patch + +import pytest + +from cortex.source_builder import ( + BUILD_DEPENDENCIES, + BuildConfig, + BuildResult, + SourceBuilder, +) + + +class TestSourceBuilder: + """Test cases for SourceBuilder class.""" + + def setup_method(self): + """Set up test fixtures.""" + self.builder = SourceBuilder() + + def test_init(self): + """Test SourceBuilder initialization.""" + assert self.builder.dependency_resolver is not None + assert self.builder.cache_dir.exists() + + def test_get_cache_key(self): + """Test cache key generation.""" + key1 = self.builder._get_cache_key("python", "3.12.0", "https://example.com/python.tar.gz") + key2 = self.builder._get_cache_key("python", "3.12.0", "https://example.com/python.tar.gz") + key3 = self.builder._get_cache_key("python", "3.11.0", "https://example.com/python.tar.gz") + + # Same inputs should produce same key + assert key1 == key2 + # Different inputs should produce different key + assert key1 != key3 + # Key should be 16 characters + assert len(key1) == 16 + + def test_detect_build_dependencies_autotools(self): + """Test build dependency detection for autotools.""" + with patch.object( + self.builder.dependency_resolver, "is_package_installed", return_value=False + ): + deps = self.builder.detect_build_dependencies("test-package", "autotools") + assert "build-essential" in deps + assert "autoconf" in deps + assert "automake" in deps + + def test_detect_build_dependencies_cmake(self): + """Test build dependency detection for cmake.""" + with patch.object( + self.builder.dependency_resolver, "is_package_installed", return_value=False + ): + deps = self.builder.detect_build_dependencies("test-package", "cmake") + assert "build-essential" in deps + assert "cmake" in deps + + def test_detect_build_dependencies_python(self): + """Test build dependency detection for python packages.""" + with patch.object( + self.builder.dependency_resolver, "is_package_installed", return_value=False + ): + deps = self.builder.detect_build_dependencies("python-test", "python") + assert "python3-dev" in deps + assert "python3-pip" in deps + + def test_detect_build_dependencies_satisfied(self): + """Test that satisfied dependencies are not included.""" + with patch.object( + self.builder.dependency_resolver, "is_package_installed", return_value=True + ): + deps = self.builder.detect_build_dependencies("test-package", "autotools") + assert len(deps) == 0 + + def test_detect_build_system_autotools(self): + """Test build system detection for autotools.""" + with tempfile.TemporaryDirectory() as tmpdir: + source_dir = Path(tmpdir) + (source_dir / "configure").touch() + assert self.builder.detect_build_system(source_dir) == "autotools" + + def test_detect_build_system_cmake(self): + """Test build system detection for cmake.""" + with tempfile.TemporaryDirectory() as tmpdir: + source_dir = Path(tmpdir) + (source_dir / "CMakeLists.txt").touch() + assert self.builder.detect_build_system(source_dir) == "cmake" + + def test_detect_build_system_make(self): + """Test build system detection for make.""" + with tempfile.TemporaryDirectory() as tmpdir: + source_dir = Path(tmpdir) + (source_dir / "Makefile").touch() + assert self.builder.detect_build_system(source_dir) == "make" + + def test_detect_build_system_python(self): + """Test build system detection for python.""" + with tempfile.TemporaryDirectory() as tmpdir: + source_dir = Path(tmpdir) + (source_dir / "setup.py").touch() + assert self.builder.detect_build_system(source_dir) == "python" + + def test_detect_build_system_default(self): + """Test default build system detection.""" + with tempfile.TemporaryDirectory() as tmpdir: + source_dir = Path(tmpdir) + # No build files + assert self.builder.detect_build_system(source_dir) == "autotools" + + def test_configure_build_autotools(self): + """Test configure for autotools.""" + with tempfile.TemporaryDirectory() as tmpdir: + source_dir = Path(tmpdir) + (source_dir / "configure").touch() + config = BuildConfig( + package_name="test", + build_system="autotools", + install_prefix="/usr/local", + ) + commands = self.builder.configure_build(source_dir, config) + assert len(commands) > 0 + # Commands now return tuples of (command, working_dir) + cmd, work_dir = commands[0] + assert "bash" in cmd + assert "configure" in cmd + assert "--prefix=/usr/local" in cmd + assert work_dir == source_dir + + def test_configure_build_cmake(self): + """Test configure for cmake.""" + with tempfile.TemporaryDirectory() as tmpdir: + source_dir = Path(tmpdir) + config = BuildConfig( + package_name="test", + build_system="cmake", + install_prefix="/usr/local", + ) + commands = self.builder.configure_build(source_dir, config) + assert len(commands) > 0 + # Commands now return tuples of (command, working_dir) + cmd, work_dir = commands[0] + assert "cmake" in cmd + assert work_dir == source_dir / "build" + + def test_build_autotools(self): + """Test build for autotools.""" + with tempfile.TemporaryDirectory() as tmpdir: + source_dir = Path(tmpdir) + config = BuildConfig(package_name="test", build_system="autotools") + commands = self.builder.build(source_dir, config) + assert len(commands) > 0 + assert "make" in commands[0] + + def test_build_cmake(self): + """Test build for cmake.""" + with tempfile.TemporaryDirectory() as tmpdir: + source_dir = Path(tmpdir) + (source_dir / "build").mkdir() + config = BuildConfig(package_name="test", build_system="cmake") + commands = self.builder.build(source_dir, config) + assert len(commands) > 0 + # Commands now return tuples of (command, working_dir) for cmake + cmd_tuple = commands[0] + assert isinstance(cmd_tuple, tuple), f"Expected tuple, got {type(cmd_tuple)}" + cmd, work_dir = cmd_tuple + assert "make" in cmd + assert work_dir == source_dir / "build" + + def test_install_build_autotools(self): + """Test install commands for autotools.""" + with tempfile.TemporaryDirectory() as tmpdir: + source_dir = Path(tmpdir) + config = BuildConfig(package_name="test", build_system="autotools") + commands = self.builder.install_build(source_dir, config) + assert len(commands) > 0 + assert "sudo make install" in commands[0] + + def test_install_build_python(self): + """Test install commands for python.""" + with tempfile.TemporaryDirectory() as tmpdir: + source_dir = Path(tmpdir) + config = BuildConfig(package_name="test", build_system="python") + commands = self.builder.install_build(source_dir, config) + assert len(commands) > 0 + assert "python3 setup.py install" in commands[0] + + @patch("cortex.source_builder.run_command") + @patch("cortex.source_builder.urllib.request.urlretrieve") + @patch("cortex.source_builder.tarfile.open") + def test_fetch_from_url_tarball(self, mock_tarfile, mock_urlretrieve, mock_run_command): + """Test fetching source from URL (tarball).""" + + # Mock urlretrieve to create a dummy archive file + def mock_urlretrieve_impl(url, filepath): + Path(filepath).parent.mkdir(parents=True, exist_ok=True) + Path(filepath).touch() + + mock_urlretrieve.side_effect = mock_urlretrieve_impl + + # Mock tarfile extraction + mock_tar = MagicMock() + mock_tarfile.return_value.__enter__.return_value = mock_tar + + # Mock getmembers to return a safe member + mock_member = MagicMock() + mock_member.name = "source-1.0" + mock_tar.getmembers.return_value = [mock_member] + + # Track the extract_dir that will be created + extract_dir_path = None + + # Mock the tarfile to create extracted structure + def mock_extractall(path, members=None): + nonlocal extract_dir_path + # Handle both Path objects and strings + extract_path = Path(path) if not isinstance(path, Path) else path + extract_dir_path = extract_path + # Create the extracted directory structure that _fetch_from_url expects + (extract_path / "source-1.0").mkdir(parents=True, exist_ok=True) + # Create a file inside to make it a valid directory + (extract_path / "source-1.0" / "README").touch() + + mock_tar.extractall = mock_extractall + + result = self.builder._fetch_from_url("https://example.com/test.tar.gz", "test", "1.0") + assert result is not None + assert isinstance(result, Path) + # Verify it returns the subdirectory when there's exactly one + assert result.name == "source-1.0" + assert result.is_dir() + + def test_build_from_source_missing_deps(self): + """Test build_from_source with missing dependencies.""" + with ( + patch.object(self.builder, "fetch_source", return_value=Path("/tmp/test")), + patch.object(self.builder, "detect_build_system", return_value="autotools"), + patch.object(self.builder, "detect_build_dependencies", return_value=["gcc"]), + patch.object(self.builder, "configure_build", return_value=["./configure"]), + patch.object(self.builder, "build", return_value=["make"]), + patch.object(self.builder, "install_build", return_value=["sudo make install"]), + patch("cortex.source_builder.run_command") as mock_run, + ): + # Mock dependency installation failure + mock_run.return_value = Mock(success=False, stderr="Failed to install") + + result = self.builder.build_from_source("test-package") + assert not result.success + assert "Failed to install build dependencies" in result.error_message + + def test_build_from_source_success(self): + """Test successful build_from_source.""" + with tempfile.TemporaryDirectory() as tmpdir: + source_dir = Path(tmpdir) + (source_dir / "configure").touch() + + with ( + patch.object(self.builder, "fetch_source", return_value=source_dir), + patch.object(self.builder, "detect_build_system", return_value="autotools"), + patch.object(self.builder, "detect_build_dependencies", return_value=[]), + patch.object( + self.builder, + "configure_build", + return_value=[("bash configure --prefix=/usr/local", source_dir)], + ), + patch.object(self.builder, "build", return_value=["make"]), + patch.object(self.builder, "install_build", return_value=["sudo make install"]), + patch("cortex.source_builder.run_command") as mock_run, + ): + # Mock successful commands + mock_run.return_value = Mock(success=True, stdout="", stderr="") + + result = self.builder.build_from_source("test-package", use_cache=False) + # Should succeed + assert result is not None + assert result.success + assert result.package_name == "test-package" + assert len(result.install_commands) > 0 + + +class TestBuildConfig: + """Test cases for BuildConfig dataclass.""" + + def test_build_config_defaults(self): + """Test BuildConfig with defaults.""" + config = BuildConfig(package_name="test") + assert config.package_name == "test" + assert config.version is None + assert config.source_url is None + assert config.build_system == "autotools" + assert config.install_prefix == "/usr/local" + + def test_build_config_custom(self): + """Test BuildConfig with custom values.""" + config = BuildConfig( + package_name="python", + version="3.12.0", + source_url="https://example.com/python.tar.gz", + build_system="autotools", + configure_args=["--enable-optimizations"], + install_prefix="/opt/python", + ) + assert config.package_name == "python" + assert config.version == "3.12.0" + assert config.source_url == "https://example.com/python.tar.gz" + assert config.build_system == "autotools" + assert config.configure_args == ["--enable-optimizations"] + assert config.install_prefix == "/opt/python" + + +class TestBuildDependencies: + """Test build dependency constants.""" + + def test_build_dependencies_structure(self): + """Test that BUILD_DEPENDENCIES has expected structure.""" + assert "base" in BUILD_DEPENDENCIES + assert "autotools" in BUILD_DEPENDENCIES + assert "python" in BUILD_DEPENDENCIES + + def test_build_dependencies_base(self): + """Test base build dependencies.""" + base_deps = BUILD_DEPENDENCIES["base"] + assert "build-essential" in base_deps + assert "gcc" in base_deps + assert "make" in base_deps + + def test_build_dependencies_autotools(self): + """Test autotools build dependencies.""" + autotools_deps = BUILD_DEPENDENCIES["autotools"] + assert "autoconf" in autotools_deps + assert "automake" in autotools_deps + assert "libtool" in autotools_deps + + +if __name__ == "__main__": + pytest.main([__file__, "-v"])