diff --git a/.github/cla-signers.json b/.github/cla-signers.json index 15f0e828..00d23798 100644 --- a/.github/cla-signers.json +++ b/.github/cla-signers.json @@ -1,6 +1,6 @@ { "version": "1.0", - "last_updated": "2025-12-31", + "last_updated": "2026-01-03", "individuals": [ { "name": "Mike Morgan", @@ -122,6 +122,15 @@ ], "signed_date": "2026-01-02", "cla_version": "1.0" + }, + { + "name": "Ajay Bandaru", + "github_username": "aybanda", + "emails": [ + "jsaj2024@gmail.com" + ], + "signed_date": "2026-01-03", + "cla_version": "1.0" } ], "corporations": { diff --git a/cortex/cli.py b/cortex/cli.py index 7d248002..fc26b064 100644 --- a/cortex/cli.py +++ b/cortex/cli.py @@ -543,6 +543,9 @@ def install( execute: bool = False, dry_run: bool = False, parallel: bool = False, + from_source: bool = False, + source_url: str | None = None, + version: str | None = None, ): # Validate input first is_valid, error = validate_install_request(software) @@ -577,6 +580,12 @@ def install( start_time = datetime.now() try: + # Handle --from-source flag + if from_source: + return self._install_from_source( + software, execute, dry_run, source_url, version + ) + self._print_status("🧠", "Understanding request...") interpreter = CommandInterpreter(api_key=api_key, provider=provider) @@ -1521,6 +1530,86 @@ def progress_callback(current: int, total: int, step: InstallationStep) -> None: console.print(f"Error: {result.error_message}", style="red") return 1 + def _install_from_source( + self, + package_name: str, + execute: bool, + dry_run: bool, + source_url: str | None, + version: str | None, + ) -> int: + """Handle installation from source.""" + from cortex.source_builder import SourceBuilder + + builder = SourceBuilder() + + # Parse version from package name if specified (e.g., python@3.12) + if "@" in package_name and not version: + parts = package_name.split("@") + package_name = parts[0] + version = parts[1] if len(parts) > 1 else None + + cx_print(f"Building {package_name} from source...", "info") + if version: + cx_print(f"Version: {version}", "info") + + result = builder.build_from_source( + package_name=package_name, + version=version, + source_url=source_url, + use_cache=True, + ) + + if not result.success: + self._print_error(f"Build failed: {result.error_message}") + return 1 + + if result.cached: + cx_print(f"Using cached build for {package_name}", "info") + + if dry_run: + cx_print("\nBuild commands (dry run):", "info") + for cmd in result.install_commands: + console.print(f" • {cmd}") + return 0 + + if not execute: + cx_print("\nBuild completed. Install commands:", "info") + for cmd in result.install_commands: + console.print(f" • {cmd}") + cx_print("Run with --execute to install", "info") + return 0 + + # Execute install commands + from cortex.coordinator import InstallationCoordinator, InstallationStep, StepStatus + + def progress_callback(current: int, total: int, step: InstallationStep) -> None: + status_emoji = "⏳" + if step.status == StepStatus.SUCCESS: + status_emoji = "✅" + elif step.status == StepStatus.FAILED: + status_emoji = "❌" + console.print(f"[{current}/{total}] {status_emoji} {step.description}") + + coordinator = InstallationCoordinator( + commands=result.install_commands, + descriptions=[f"Install {package_name}" for _ in result.install_commands], + timeout=600, + stop_on_error=True, + progress_callback=progress_callback, + ) + + install_result = coordinator.execute() + + if install_result.success: + self._print_success(f"{package_name} built and installed successfully!") + return 0 + else: + self._print_error("Installation failed") + if install_result.error_message: + console.print(f"Error: {install_result.error_message}", style="red") + return 1 + # -------------------------- @@ -1641,6 +1730,21 @@ def main(): action="store_true", help="Enable parallel execution for multi-step installs", ) + install_parser.add_argument( + "--from-source", + action="store_true", + help="Build and install from source code when binaries unavailable", + ) + install_parser.add_argument( + "--source-url", + type=str, + help="URL to source code (for --from-source)", + ) + install_parser.add_argument( + "--version", + type=str, + help="Version to build (for --from-source)", + ) # Import command - import dependencies from package manager files import_parser = subparsers.add_parser( @@ -1882,6 +1986,9 @@ def main(): execute=args.execute, dry_run=args.dry_run, parallel=args.parallel, + from_source=getattr(args, "from_source", False), + source_url=getattr(args, "source_url", None), + version=getattr(args, "version", None), ) elif args.command == "import": return cli.import_deps(args) diff --git a/cortex/source_builder.py b/cortex/source_builder.py new file mode 100644 index 00000000..60d5a54d --- /dev/null +++ b/cortex/source_builder.py @@ -0,0 +1,518 @@ +#!/usr/bin/env python3 +""" +Source Package Builder for Cortex Linux + +Builds and installs packages from source code when binaries are unavailable. +Supports common build systems: autotools, cmake, make, python setup.py, etc. +""" + +from __future__ import annotations + +import hashlib +import json +import logging +import os +import re +import shutil +import subprocess +import tarfile +import tempfile +import urllib.request +import zipfile +from dataclasses import dataclass +from pathlib import Path +from typing import Any + +from cortex.branding import cx_print +from cortex.dependency_resolver import DependencyResolver +from cortex.utils.commands import CommandResult, run_command, validate_command + +logger = logging.getLogger(__name__) + +# Build cache directory +CACHE_DIR = Path.home() / ".cortex" / "build_cache" +CACHE_DIR.mkdir(parents=True, exist_ok=True) + +# Common build dependencies by category +BUILD_DEPENDENCIES = { + "base": ["build-essential", "gcc", "g++", "make", "cmake", "pkg-config"], + "autotools": ["autoconf", "automake", "libtool", "gettext"], + "python": ["python3-dev", "python3-pip", "python3-setuptools", "python3-wheel"], + "ssl": ["libssl-dev"], + "zlib": ["zlib1g-dev"], + "curl": ["libcurl4-openssl-dev"], + "xml": ["libxml2-dev"], + "sqlite": ["libsqlite3-dev"], + "readline": ["libreadline-dev"], +} + + +@dataclass +class BuildConfig: + """Configuration for a source build.""" + + package_name: str + version: str | None = None + source_url: str | None = None + source_type: str = "tarball" # tarball, git, github + build_system: str = "autotools" # autotools, cmake, make, python, custom + configure_args: list[str] | None = None + make_args: list[str] | None = None + install_prefix: str = "/usr/local" + cache_key: str | None = None + + +@dataclass +class BuildResult: + """Result of a build operation.""" + + success: bool + package_name: str + version: str | None + build_dir: str + install_commands: list[str] + error_message: str | None = None + cached: bool = False + + +class SourceBuilder: + """Builds packages from source code. + + Handles fetching source code, detecting build systems, managing build + dependencies, configuring builds, compiling, and installing packages. + Supports caching of build artifacts for faster subsequent builds. + + Attributes: + dependency_resolver: DependencyResolver instance for checking installed packages. + cache_dir: Path to build cache directory. + """ + + def __init__(self): + self.dependency_resolver = DependencyResolver() + self.cache_dir = CACHE_DIR + self.cache_dir.mkdir(parents=True, exist_ok=True) + + def _get_cache_key(self, package_name: str, version: str | None, source_url: str) -> str: + """Generate a cache key for a build.""" + key_data = f"{package_name}:{version}:{source_url}" + return hashlib.sha256(key_data.encode()).hexdigest()[:16] + + def _check_cache(self, cache_key: str) -> Path | None: + """Check if a build is cached.""" + cache_path = self.cache_dir / cache_key + if cache_path.exists() and (cache_path / "installed").exists(): + return cache_path + return None + + def _save_to_cache(self, cache_key: str, build_dir: Path, install_commands: list[str]) -> None: + """Save build artifacts to cache.""" + cache_path = self.cache_dir / cache_key + cache_path.mkdir(parents=True, exist_ok=True) + + # Save metadata + metadata = { + "build_dir": str(build_dir), + "install_commands": install_commands, + "timestamp": str(Path(build_dir).stat().st_mtime), + } + with open(cache_path / "metadata.json", "w") as f: + json.dump(metadata, f, indent=2) + + # Mark as installed + (cache_path / "installed").touch() + + def detect_build_dependencies(self, package_name: str, build_system: str) -> list[str]: + """Detect required build dependencies for a package. + + Args: + package_name: Name of the package to build. + build_system: Build system type (autotools, cmake, make, python). + + Returns: + List of missing build dependency package names that need to be installed. + """ + required_deps = set() + + # Base dependencies + required_deps.update(BUILD_DEPENDENCIES["base"]) + + # Build system specific + if build_system == "autotools": + required_deps.update(BUILD_DEPENDENCIES["autotools"]) + elif build_system == "cmake": + required_deps.add("cmake") + elif build_system == "python": + required_deps.update(BUILD_DEPENDENCIES["python"]) + + # Package-specific dependencies (common patterns) + if "python" in package_name.lower(): + required_deps.update(BUILD_DEPENDENCIES["python"]) + + # Check which are missing + missing = [] + for dep in required_deps: + if not self.dependency_resolver.is_package_installed(dep): + missing.append(dep) + + return missing + + def fetch_source( + self, package_name: str, source_url: str | None, version: str | None + ) -> Path: + """Fetch source code from URL or detect from package name. + + Args: + package_name: Name of the package to fetch. + source_url: URL to source code (optional, will auto-detect if not provided). + version: Version to fetch (optional). + + Returns: + Path to extracted source directory. + + Raises: + RuntimeError: If source download or extraction fails. + ValueError: If source location cannot be detected. + """ + if source_url: + return self._fetch_from_url(source_url, package_name, version) + else: + # Try to detect source location + return self._detect_source_location(package_name, version) + + def _fetch_from_url(self, url: str, package_name: str, version: str | None) -> Path: + """Fetch source from a URL.""" + temp_dir = Path(tempfile.mkdtemp(prefix=f"cortex-build-{package_name}-")) + + try: + # Download + cx_print(f"📥 Downloading {package_name} source...", "info") + archive_path = temp_dir / "source.tar.gz" + + if url.startswith("https://github.com/"): + # GitHub release or archive + if not url.endswith((".tar.gz", ".zip")): + if version: + url = f"{url}/archive/refs/tags/v{version}.tar.gz" + else: + url = f"{url}/archive/refs/heads/main.tar.gz" + + urllib.request.urlretrieve(url, archive_path) + + # Extract + cx_print(f"📦 Extracting source...", "info") + extract_dir = temp_dir / "extracted" + extract_dir.mkdir() + + if archive_path.suffix == ".gz" or archive_path.suffixes[-2:] == [".tar", ".gz"]: + with tarfile.open(archive_path, "r:gz") as tar: + tar.extractall(extract_dir) + elif archive_path.suffix == ".zip": + with zipfile.ZipFile(archive_path, "r") as zip_ref: + zip_ref.extractall(extract_dir) + + # Find the actual source directory (usually one level deep) + extracted_items = list(extract_dir.iterdir()) + if len(extracted_items) == 1 and extracted_items[0].is_dir(): + return extracted_items[0] + else: + return extract_dir + + except Exception as e: + logger.exception(f"Failed to fetch source from {url}") + raise RuntimeError(f"Failed to fetch source: {e}") + + def _detect_source_location(self, package_name: str, version: str | None) -> Path: + """Detect source location from package name.""" + # Try common patterns + common_urls = { + "python": f"https://www.python.org/ftp/python/{version or '3.12.0'}/Python-{version or '3.12.0'}.tgz", + "nginx": "https://nginx.org/download/nginx-1.24.0.tar.gz", + "redis": f"https://download.redis.io/releases/redis-{version or '7.0'}.tar.gz", + } + + if package_name.lower() in common_urls: + return self._fetch_from_url(common_urls[package_name.lower()], package_name, version) + + raise ValueError( + f"Could not detect source location for {package_name}. " + "Please provide --source-url or configure source location." + ) + + def detect_build_system(self, source_dir: Path) -> str: + """ + Detect the build system used in source directory. + + Args: + source_dir: Path to source code + + Returns: + Build system type (autotools, cmake, make, python, custom) + """ + # Check for configure script (autotools) + if (source_dir / "configure").exists() or (source_dir / "configure.ac").exists(): + return "autotools" + + # Check for CMakeLists.txt + if (source_dir / "CMakeLists.txt").exists(): + return "cmake" + + # Check for Makefile + if (source_dir / "Makefile").exists(): + return "make" + + # Check for Python setup.py or pyproject.toml + if (source_dir / "setup.py").exists() or (source_dir / "pyproject.toml").exists(): + return "python" + + # Default to autotools (most common) + return "autotools" + + def configure_build(self, source_dir: Path, config: BuildConfig) -> list[str]: + """Configure the build. + + Args: + source_dir: Path to source code directory. + config: Build configuration with options and settings. + + Returns: + List of configure commands to execute. + """ + commands = [] + + if config.build_system == "autotools": + configure_cmd = "./configure" + if config.configure_args: + configure_cmd += " " + " ".join(config.configure_args) + else: + # Default configure options + configure_cmd += f" --prefix={config.install_prefix}" + configure_cmd += " --enable-optimizations" + commands.append(configure_cmd) + + elif config.build_system == "cmake": + build_dir = source_dir / "build" + build_dir.mkdir(exist_ok=True) + cmake_cmd = "cmake" + if config.configure_args: + cmake_cmd += " " + " ".join(config.configure_args) + else: + cmake_cmd += f" -DCMAKE_INSTALL_PREFIX={config.install_prefix}" + cmake_cmd += " .." + commands.append(f"cd {build_dir} && {cmake_cmd}") + + elif config.build_system == "python": + # Python packages usually don't need explicit configure + pass + + return commands + + def build(self, source_dir: Path, config: BuildConfig) -> list[str]: + """Build the package. + + Args: + source_dir: Path to source code directory. + config: Build configuration with options and settings. + + Returns: + List of build commands to execute. + """ + commands = [] + + if config.build_system == "autotools" or config.build_system == "make": + make_cmd = "make" + if config.make_args: + make_cmd += " " + " ".join(config.make_args) + else: + # Use parallel builds by default + import multiprocessing + + jobs = multiprocessing.cpu_count() + make_cmd += f" -j{jobs}" + commands.append(make_cmd) + + elif config.build_system == "cmake": + build_dir = source_dir / "build" + make_cmd = "make" + if config.make_args: + make_cmd += " " + " ".join(config.make_args) + else: + import multiprocessing + + jobs = multiprocessing.cpu_count() + make_cmd += f" -j{jobs}" + commands.append(f"cd {build_dir} && {make_cmd}") + + elif config.build_system == "python": + commands.append("python3 setup.py build") + + return commands + + def install_build(self, source_dir: Path, config: BuildConfig) -> list[str]: + """Generate install commands for built package. + + Args: + source_dir: Path to source code directory. + config: Build configuration with options and settings. + + Returns: + List of install commands to execute (requires sudo). + """ + commands = [] + + if config.build_system == "autotools" or config.build_system == "make": + commands.append("sudo make install") + + elif config.build_system == "cmake": + build_dir = source_dir / "build" + commands.append(f"cd {build_dir} && sudo make install") + + elif config.build_system == "python": + commands.append("sudo python3 setup.py install") + + return commands + + def build_from_source( + self, + package_name: str, + version: str | None = None, + source_url: str | None = None, + build_system: str | None = None, + configure_args: list[str] | None = None, + make_args: list[str] | None = None, + install_prefix: str = "/usr/local", + use_cache: bool = True, + ) -> BuildResult: + """Build and install a package from source. + + Args: + package_name: Name of the package to build. + version: Version to build (optional, can be specified as package@version). + source_url: URL to source code (optional, will auto-detect if not provided). + build_system: Build system type (auto-detected if None). + configure_args: Additional configure arguments for autotools/cmake. + make_args: Additional make arguments for compilation. + install_prefix: Installation prefix (default: /usr/local). + use_cache: Whether to use build cache for faster rebuilds. + + Returns: + BuildResult with build information, success status, and install commands. + + Raises: + RuntimeError: If source download, configuration, or build fails. + """ + try: + # Check cache + cache_key = None + if use_cache and source_url: + cache_key = self._get_cache_key(package_name, version, source_url) + cached_path = self._check_cache(cache_key) + if cached_path: + cx_print(f"📦 Using cached build for {package_name}", "info") + metadata_path = cached_path / "metadata.json" + if metadata_path.exists(): + with open(metadata_path) as f: + metadata = json.load(f) + return BuildResult( + success=True, + package_name=package_name, + version=version, + build_dir=str(cached_path), + install_commands=metadata.get("install_commands", []), + cached=True, + ) + + # Fetch source + source_dir = self.fetch_source(package_name, source_url, version) + + # Detect build system if not provided + if not build_system: + build_system = self.detect_build_system(source_dir) + + # Create build config + config = BuildConfig( + package_name=package_name, + version=version, + source_url=source_url, + build_system=build_system, + configure_args=configure_args, + make_args=make_args, + install_prefix=install_prefix, + cache_key=cache_key, + ) + + # Detect and install build dependencies + cx_print(f"🔍 Checking build dependencies...", "info") + missing_deps = self.detect_build_dependencies(package_name, build_system) + + if missing_deps: + cx_print(f" Installing: {', '.join(missing_deps)}", "info") + install_cmd = f"sudo apt-get install -y {' '.join(missing_deps)}" + result = run_command(install_cmd, timeout=600) + if not result.success: + return BuildResult( + success=False, + package_name=package_name, + version=version, + build_dir=str(source_dir), + install_commands=[], + error_message=f"Failed to install build dependencies: {result.stderr}", + ) + else: + cx_print(f" ✓ All build dependencies satisfied", "success") + + # Configure + cx_print(f"⚙️ Configuring build...", "info") + configure_commands = self.configure_build(source_dir, config) + for cmd in configure_commands: + result = run_command(cmd, cwd=str(source_dir), timeout=300) + if not result.success: + return BuildResult( + success=False, + package_name=package_name, + version=version, + build_dir=str(source_dir), + install_commands=[], + error_message=f"Configure failed: {result.stderr}", + ) + + # Build + cx_print(f"🔨 Compiling (this may take a while)...", "info") + build_commands = self.build(source_dir, config) + for cmd in build_commands: + result = run_command(cmd, cwd=str(source_dir), timeout=3600) # 1 hour timeout + if not result.success: + return BuildResult( + success=False, + package_name=package_name, + version=version, + build_dir=str(source_dir), + install_commands=[], + error_message=f"Build failed: {result.stderr}", + ) + + # Generate install commands + install_commands = self.install_build(source_dir, config) + + # Save to cache + if use_cache and cache_key: + self._save_to_cache(cache_key, source_dir, install_commands) + + return BuildResult( + success=True, + package_name=package_name, + version=version, + build_dir=str(source_dir), + install_commands=install_commands, + ) + + except Exception as e: + logger.exception(f"Build failed for {package_name}") + return BuildResult( + success=False, + package_name=package_name, + version=version, + build_dir="", + install_commands=[], + error_message=str(e), + ) + diff --git a/docs/SOURCE_BUILD.md b/docs/SOURCE_BUILD.md new file mode 100644 index 00000000..22fe02eb --- /dev/null +++ b/docs/SOURCE_BUILD.md @@ -0,0 +1,242 @@ +# Building Packages from Source + +Cortex Linux supports building and installing packages from source code when pre-built binaries are unavailable or when you need a specific version or configuration. + +## Overview + +The source build feature allows you to: +- Download source code from various sources (GitHub, tarballs, etc.) +- Automatically detect build dependencies +- Configure build options +- Compile and build packages +- Install built packages +- Cache build artifacts for reuse + +## Usage + +### Basic Usage + +Build a package from source: + +```bash +cortex install python@3.12 --from-source +``` + +### With Source URL + +Specify a custom source URL: + +```bash +cortex install mypackage --from-source --source-url https://example.com/mypackage.tar.gz +``` + +### With Version + +Specify a version to build: + +```bash +cortex install python --from-source --version 3.12.0 +``` + +### Dry Run + +Preview build commands without executing: + +```bash +cortex install python@3.12 --from-source --dry-run +``` + +### Execute Build + +Build and install: + +```bash +cortex install python@3.12 --from-source --execute +``` + +## Supported Build Systems + +Cortex automatically detects and supports the following build systems: + +### Autotools (GNU Build System) +- Detected by presence of `configure` script or `configure.ac` +- Uses `./configure` for configuration +- Uses `make` for building +- Uses `sudo make install` for installation + +### CMake +- Detected by presence of `CMakeLists.txt` +- Uses `cmake` for configuration +- Uses `make` for building +- Uses `sudo make install` for installation + +### Make +- Detected by presence of `Makefile` +- Uses `make` directly for building +- Uses `sudo make install` for installation + +### Python +- Detected by presence of `setup.py` or `pyproject.toml` +- Uses `python3 setup.py build` for building +- Uses `sudo python3 setup.py install` for installation + +## Build Dependencies + +Cortex automatically detects and installs required build dependencies: + +### Base Dependencies +- `build-essential` - Essential build tools +- `gcc`, `g++` - Compilers +- `make` - Build automation +- `cmake` - CMake build system +- `pkg-config` - Package configuration + +### Autotools Dependencies +- `autoconf` - Generate configuration scripts +- `automake` - Generate Makefiles +- `libtool` - Library building support +- `gettext` - Internationalization + +### Python Dependencies +- `python3-dev` - Python development headers +- `python3-pip` - Python package installer +- `python3-setuptools` - Python packaging tools +- `python3-wheel` - Python wheel format support + +### Common Library Dependencies +- `libssl-dev` - SSL/TLS development libraries +- `zlib1g-dev` - Compression library +- `libcurl4-openssl-dev` - HTTP client library +- `libxml2-dev` - XML parsing library +- `libsqlite3-dev` - SQLite database library +- `libreadline-dev` - Command line editing library + +## Build Configuration + +### Default Configuration + +By default, Cortex uses: +- Installation prefix: `/usr/local` +- Build optimizations enabled +- Parallel builds (using all CPU cores) + +### Custom Configuration + +You can customize build options by modifying the source code or using environment variables. For advanced configuration, you may need to manually edit the build scripts. + +## Build Caching + +Cortex caches build artifacts to speed up subsequent builds: + +- Cache location: `~/.cortex/build_cache/` +- Cache key: Based on package name, version, and source URL +- Cache includes: Build metadata and install commands + +### Cache Benefits + +- Faster rebuilds when source hasn't changed +- Reduced network usage +- Consistent builds across sessions + +### Clearing Cache + +To clear the build cache: + +```bash +rm -rf ~/.cortex/build_cache/ +``` + +## Example: Building Python from Source + +```bash +# Download and build Python 3.12.0 +cortex install python@3.12.0 --from-source --execute + +# The process will: +# 1. Download Python 3.12.0 source +# 2. Check for build dependencies (gcc, make, libssl-dev, etc.) +# 3. Install missing dependencies +# 4. Configure the build with optimizations +# 5. Compile Python (may take 10-15 minutes) +# 6. Install to /usr/local +``` + +## Example: Building Custom Package + +```bash +# Build from GitHub release +cortex install myapp \ + --from-source \ + --source-url https://github.com/user/myapp/archive/refs/tags/v1.0.0.tar.gz \ + --execute +``` + +## Troubleshooting + +### Build Fails with Missing Dependencies + +If build fails due to missing dependencies: + +1. Check the error message for the missing package +2. Install it manually: `sudo apt-get install ` +3. Retry the build + +### Build Takes Too Long + +- Large packages (like Python) can take 10-30 minutes to compile +- Use `--dry-run` first to preview the build +- Consider using pre-built binaries if available + +### Permission Errors + +- Build requires `sudo` for installation +- Ensure you have sudo privileges +- Check that `/usr/local` is writable + +### Source Download Fails + +- Verify the source URL is accessible +- Check network connectivity +- Try downloading manually to verify URL + +### Build System Not Detected + +If Cortex can't detect the build system: + +1. Check that standard build files exist (`configure`, `CMakeLists.txt`, `Makefile`, etc.) +2. Manually specify build system (future feature) +3. Use manual build process if needed + +## Best Practices + +1. **Always use `--dry-run` first** to preview what will happen +2. **Check build dependencies** before starting long builds +3. **Use version pinning** (`@version`) for reproducible builds +4. **Cache builds** when rebuilding the same version +5. **Monitor disk space** - builds can use significant space +6. **Use `--execute`** only when ready to install + +## Limitations + +- Currently supports common build systems (autotools, cmake, make, python) +- Source URL detection is limited to common patterns +- Some packages may require manual configuration +- Build time can be significant for large packages +- Requires build dependencies to be available in repositories + +## Future Enhancements + +Planned improvements: +- Support for more build systems (meson, cargo, etc.) +- Automatic source URL detection from package names +- Custom build script support +- Build option presets +- Parallel package builds +- Build verification and testing + +## See Also + +- [Installation Guide](Getting-Started.md) +- [Dependency Resolution](README_DEPENDENCIES.md) +- [CLI Reference](COMMANDS.md) + diff --git a/tests/test_source_builder.py b/tests/test_source_builder.py new file mode 100644 index 00000000..adbf32ae --- /dev/null +++ b/tests/test_source_builder.py @@ -0,0 +1,312 @@ +#!/usr/bin/env python3 +""" +Tests for source_builder.py module +""" + +from __future__ import annotations + +import os +import tempfile +from pathlib import Path +from unittest.mock import MagicMock, Mock, patch + +import pytest + +from cortex.source_builder import ( + BUILD_DEPENDENCIES, + BuildConfig, + BuildResult, + SourceBuilder, +) + + +class TestSourceBuilder: + """Test cases for SourceBuilder class.""" + + def setup_method(self): + """Set up test fixtures.""" + self.builder = SourceBuilder() + + def test_init(self): + """Test SourceBuilder initialization.""" + assert self.builder.dependency_resolver is not None + assert self.builder.cache_dir.exists() + + def test_get_cache_key(self): + """Test cache key generation.""" + key1 = self.builder._get_cache_key("python", "3.12.0", "https://example.com/python.tar.gz") + key2 = self.builder._get_cache_key("python", "3.12.0", "https://example.com/python.tar.gz") + key3 = self.builder._get_cache_key("python", "3.11.0", "https://example.com/python.tar.gz") + + # Same inputs should produce same key + assert key1 == key2 + # Different inputs should produce different key + assert key1 != key3 + # Key should be 16 characters + assert len(key1) == 16 + + def test_detect_build_dependencies_autotools(self): + """Test build dependency detection for autotools.""" + with patch.object( + self.builder.dependency_resolver, "is_package_installed", return_value=False + ): + deps = self.builder.detect_build_dependencies("test-package", "autotools") + assert "build-essential" in deps + assert "autoconf" in deps + assert "automake" in deps + + def test_detect_build_dependencies_cmake(self): + """Test build dependency detection for cmake.""" + with patch.object( + self.builder.dependency_resolver, "is_package_installed", return_value=False + ): + deps = self.builder.detect_build_dependencies("test-package", "cmake") + assert "build-essential" in deps + assert "cmake" in deps + + def test_detect_build_dependencies_python(self): + """Test build dependency detection for python packages.""" + with patch.object( + self.builder.dependency_resolver, "is_package_installed", return_value=False + ): + deps = self.builder.detect_build_dependencies("python-test", "python") + assert "python3-dev" in deps + assert "python3-pip" in deps + + def test_detect_build_dependencies_satisfied(self): + """Test that satisfied dependencies are not included.""" + with patch.object( + self.builder.dependency_resolver, "is_package_installed", return_value=True + ): + deps = self.builder.detect_build_dependencies("test-package", "autotools") + assert len(deps) == 0 + + def test_detect_build_system_autotools(self): + """Test build system detection for autotools.""" + with tempfile.TemporaryDirectory() as tmpdir: + source_dir = Path(tmpdir) + (source_dir / "configure").touch() + assert self.builder.detect_build_system(source_dir) == "autotools" + + def test_detect_build_system_cmake(self): + """Test build system detection for cmake.""" + with tempfile.TemporaryDirectory() as tmpdir: + source_dir = Path(tmpdir) + (source_dir / "CMakeLists.txt").touch() + assert self.builder.detect_build_system(source_dir) == "cmake" + + def test_detect_build_system_make(self): + """Test build system detection for make.""" + with tempfile.TemporaryDirectory() as tmpdir: + source_dir = Path(tmpdir) + (source_dir / "Makefile").touch() + assert self.builder.detect_build_system(source_dir) == "make" + + def test_detect_build_system_python(self): + """Test build system detection for python.""" + with tempfile.TemporaryDirectory() as tmpdir: + source_dir = Path(tmpdir) + (source_dir / "setup.py").touch() + assert self.builder.detect_build_system(source_dir) == "python" + + def test_detect_build_system_default(self): + """Test default build system detection.""" + with tempfile.TemporaryDirectory() as tmpdir: + source_dir = Path(tmpdir) + # No build files + assert self.builder.detect_build_system(source_dir) == "autotools" + + def test_configure_build_autotools(self): + """Test configure for autotools.""" + with tempfile.TemporaryDirectory() as tmpdir: + source_dir = Path(tmpdir) + (source_dir / "configure").touch() + config = BuildConfig( + package_name="test", + build_system="autotools", + install_prefix="/usr/local", + ) + commands = self.builder.configure_build(source_dir, config) + assert len(commands) > 0 + assert "./configure" in commands[0] + assert "--prefix=/usr/local" in commands[0] + + def test_configure_build_cmake(self): + """Test configure for cmake.""" + with tempfile.TemporaryDirectory() as tmpdir: + source_dir = Path(tmpdir) + config = BuildConfig( + package_name="test", + build_system="cmake", + install_prefix="/usr/local", + ) + commands = self.builder.configure_build(source_dir, config) + assert len(commands) > 0 + assert "cmake" in commands[0] + + def test_build_autotools(self): + """Test build for autotools.""" + with tempfile.TemporaryDirectory() as tmpdir: + source_dir = Path(tmpdir) + config = BuildConfig(package_name="test", build_system="autotools") + commands = self.builder.build(source_dir, config) + assert len(commands) > 0 + assert "make" in commands[0] + + def test_build_cmake(self): + """Test build for cmake.""" + with tempfile.TemporaryDirectory() as tmpdir: + source_dir = Path(tmpdir) + (source_dir / "build").mkdir() + config = BuildConfig(package_name="test", build_system="cmake") + commands = self.builder.build(source_dir, config) + assert len(commands) > 0 + assert "make" in commands[0] + + def test_install_build_autotools(self): + """Test install commands for autotools.""" + with tempfile.TemporaryDirectory() as tmpdir: + source_dir = Path(tmpdir) + config = BuildConfig(package_name="test", build_system="autotools") + commands = self.builder.install_build(source_dir, config) + assert len(commands) > 0 + assert "sudo make install" in commands[0] + + def test_install_build_python(self): + """Test install commands for python.""" + with tempfile.TemporaryDirectory() as tmpdir: + source_dir = Path(tmpdir) + config = BuildConfig(package_name="test", build_system="python") + commands = self.builder.install_build(source_dir, config) + assert len(commands) > 0 + assert "python3 setup.py install" in commands[0] + + @patch("cortex.source_builder.run_command") + @patch("cortex.source_builder.urllib.request.urlretrieve") + @patch("cortex.source_builder.tarfile.open") + def test_fetch_from_url_tarball(self, mock_tarfile, mock_urlretrieve, mock_run_command): + """Test fetching source from URL (tarball).""" + # Mock tarfile extraction + mock_tar = MagicMock() + mock_tarfile.return_value.__enter__.return_value = mock_tar + + with tempfile.TemporaryDirectory() as tmpdir: + # Create a mock extracted directory structure + extract_dir = Path(tmpdir) / "extracted" + extract_dir.mkdir() + source_subdir = extract_dir / "source-1.0" + source_subdir.mkdir() + + # Mock the tarfile to return our structure + def mock_extractall(path): + (Path(path) / "source-1.0").mkdir(parents=True) + + mock_tar.extractall = mock_extractall + + result = self.builder._fetch_from_url( + "https://example.com/test.tar.gz", "test", "1.0" + ) + assert result is not None + + def test_build_from_source_missing_deps(self): + """Test build_from_source with missing dependencies.""" + with patch.object( + self.builder, "fetch_source", return_value=Path("/tmp/test") + ), patch.object( + self.builder, "detect_build_system", return_value="autotools" + ), patch.object( + self.builder, "detect_build_dependencies", return_value=["gcc"] + ), patch.object( + self.builder, "configure_build", return_value=["./configure"] + ), patch.object( + self.builder, "build", return_value=["make"] + ), patch.object( + self.builder, "install_build", return_value=["sudo make install"] + ), patch( + "cortex.source_builder.run_command" + ) as mock_run: + # Mock dependency installation failure + mock_run.return_value = Mock(success=False, stderr="Failed to install") + + result = self.builder.build_from_source("test-package") + assert not result.success + assert "Failed to install build dependencies" in result.error_message + + def test_build_from_source_success(self): + """Test successful build_from_source.""" + with tempfile.TemporaryDirectory() as tmpdir: + source_dir = Path(tmpdir) + (source_dir / "configure").touch() + + with patch.object( + self.builder, "fetch_source", return_value=source_dir + ), patch.object( + self.builder, "detect_build_dependencies", return_value=[] + ), patch( + "cortex.source_builder.run_command" + ) as mock_run: + # Mock successful commands + mock_run.return_value = Mock(success=True, stdout="", stderr="") + + result = self.builder.build_from_source("test-package", use_cache=False) + # Should succeed (or at least not fail on dependency check) + assert result is not None + + +class TestBuildConfig: + """Test cases for BuildConfig dataclass.""" + + def test_build_config_defaults(self): + """Test BuildConfig with defaults.""" + config = BuildConfig(package_name="test") + assert config.package_name == "test" + assert config.version is None + assert config.source_url is None + assert config.build_system == "autotools" + assert config.install_prefix == "/usr/local" + + def test_build_config_custom(self): + """Test BuildConfig with custom values.""" + config = BuildConfig( + package_name="python", + version="3.12.0", + source_url="https://example.com/python.tar.gz", + build_system="autotools", + configure_args=["--enable-optimizations"], + install_prefix="/opt/python", + ) + assert config.package_name == "python" + assert config.version == "3.12.0" + assert config.source_url == "https://example.com/python.tar.gz" + assert config.build_system == "autotools" + assert config.configure_args == ["--enable-optimizations"] + assert config.install_prefix == "/opt/python" + + +class TestBuildDependencies: + """Test build dependency constants.""" + + def test_build_dependencies_structure(self): + """Test that BUILD_DEPENDENCIES has expected structure.""" + assert "base" in BUILD_DEPENDENCIES + assert "autotools" in BUILD_DEPENDENCIES + assert "python" in BUILD_DEPENDENCIES + + def test_build_dependencies_base(self): + """Test base build dependencies.""" + base_deps = BUILD_DEPENDENCIES["base"] + assert "build-essential" in base_deps + assert "gcc" in base_deps + assert "make" in base_deps + + def test_build_dependencies_autotools(self): + """Test autotools build dependencies.""" + autotools_deps = BUILD_DEPENDENCIES["autotools"] + assert "autoconf" in autotools_deps + assert "automake" in autotools_deps + assert "libtool" in autotools_deps + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) +