| | |
| | | out_of_date: datetime | None |
| | | first_submitted: datetime |
| | | last_modified: datetime |
| | | package_base: str |
| | | depends: list[str] = field(default_factory=list) |
| | | makedepends: list[str] = field(default_factory=list) |
| | | checkdepends: list[str] = field(default_factory=list) |
| | |
| | | |
| | | @property |
| | | def git_url(self) -> str: |
| | | """Get the git clone URL for this package.""" |
| | | return f"{AUR_GIT_URL}/{self.name}.git" |
| | | """Get the git clone URL for this package (using PackageBase).""" |
| | | return f"{AUR_GIT_URL}/{self.package_base}.git" |
| | | |
| | | @property |
| | | def aur_url(self) -> str: |
| | |
| | | ), |
| | | first_submitted=datetime.fromtimestamp(data["FirstSubmitted"]), |
| | | last_modified=datetime.fromtimestamp(data["LastModified"]), |
| | | package_base=data.get("PackageBase", data["Name"]), |
| | | depends=data.get("Depends", []), |
| | | makedepends=data.get("MakeDepends", []), |
| | | checkdepends=data.get("CheckDepends", []), |
| | |
| | | from enum import Enum |
| | | from pathlib import Path |
| | | from concurrent.futures import ProcessPoolExecutor |
| | | from typing import Any |
| | | from typing import Any, TYPE_CHECKING |
| | | if TYPE_CHECKING: |
| | | from archrepobuild.repo import RepoManager |
| | | |
| | | from archrepobuild.aur import AURClient |
| | | from archrepobuild.config import Config, PackageOverride |
| | |
| | | self, |
| | | config: Config, |
| | | aur_client: AURClient, |
| | | repo: RepoManager | None = None, |
| | | ): |
| | | """Initialize builder. |
| | | |
| | | Args: |
| | | config: Application configuration |
| | | aur_client: AUR client for package info |
| | | repo: Optional repository manager for incremental registration |
| | | """ |
| | | self.config = config |
| | | self.aur_client = aur_client |
| | | self.repo = repo |
| | | self.resolver = DependencyResolver(aur_client) |
| | | self._lock_dir = config.repository.build_dir / ".locks" |
| | | self._executor: ProcessPoolExecutor | None = None |
| | |
| | | raise ValueError(f"Package not found in AUR: {package}") |
| | | |
| | | pkg_dir.parent.mkdir(parents=True, exist_ok=True) |
| | | subprocess.run( |
| | | ["git", "clone", pkg_info.git_url, str(pkg_dir)], |
| | | check=True, |
| | | capture_output=True, |
| | | ) |
| | | try: |
| | | subprocess.run( |
| | | ["git", "clone", pkg_info.git_url, str(pkg_dir)], |
| | | check=True, |
| | | capture_output=True, |
| | | text=True, |
| | | ) |
| | | except subprocess.CalledProcessError as e: |
| | | logger.error(f"Failed to clone {package} from {pkg_info.git_url}: {e.stderr}") |
| | | raise ValueError(f"Failed to clone package from AUR: {e.stderr}") |
| | | return True |
| | | |
| | | def _is_vcs_package(self, package_dir: Path) -> bool: |
| | |
| | | |
| | | return list(results) |
| | | |
| | | async def download_package(self, package: str) -> BuildResult: |
| | | """Download a package from a repository using pacman. |
| | | |
| | | Args: |
| | | package: Package name |
| | | |
| | | Returns: |
| | | BuildResult with status and artifact path |
| | | """ |
| | | start_time = datetime.now() |
| | | logger.info(f"Downloading package from repositories: {package}") |
| | | |
| | | dest_dir = self.config.repository.build_dir / "downloads" |
| | | dest_dir.mkdir(parents=True, exist_ok=True) |
| | | |
| | | try: |
| | | # Use pacman -Sw to download to a specific directory is not directly possible |
| | | # But we can use pacman -Sp to get the URL and download it |
| | | result = subprocess.run( |
| | | ["pacman", "-Sp", "--noconfirm", package], |
| | | capture_output=True, |
| | | text=True, |
| | | check=True, |
| | | ) |
| | | |
| | | urls = [line for line in result.stdout.strip().split("\n") if line.startswith("http") or line.startswith("ftp") or line.startswith("file")] |
| | | if not urls: |
| | | raise ValueError(f"Could not find download URL for package: {package}") |
| | | |
| | | artifacts: list[Path] = [] |
| | | import aiohttp |
| | | async with aiohttp.ClientSession() as session: |
| | | for url in urls: |
| | | filename = url.split("/")[-1] |
| | | dest_path = dest_dir / filename |
| | | |
| | | logger.debug(f"Downloading {url} to {dest_path}") |
| | | async with session.get(url) as response: |
| | | response.raise_for_status() |
| | | with open(dest_path, "wb") as f: |
| | | while True: |
| | | chunk = await response.content.read(8192) |
| | | if not chunk: |
| | | break |
| | | f.write(chunk) |
| | | artifacts.append(dest_path) |
| | | |
| | | duration = (datetime.now() - start_time).total_seconds() |
| | | logger.info(f"Successfully downloaded {package} in {duration:.1f}s") |
| | | return BuildResult( |
| | | package=package, |
| | | status=BuildStatus.SUCCESS, |
| | | duration=duration, |
| | | artifacts=artifacts, |
| | | ) |
| | | |
| | | except Exception as e: |
| | | duration = (datetime.now() - start_time).total_seconds() |
| | | logger.error(f"Failed to download {package}: {e}") |
| | | return BuildResult( |
| | | package=package, |
| | | status=BuildStatus.FAILED, |
| | | duration=duration, |
| | | error=str(e), |
| | | ) |
| | | |
| | | async def add_package(self, package: str) -> BuildResult: |
| | | """Add and build a new package with dependencies. |
| | | """Add and build (or download) a new package with dependencies. |
| | | |
| | | Args: |
| | | package: Package name |
| | |
| | | # Resolve dependencies |
| | | build_order = await self.resolver.resolve([package]) |
| | | |
| | | if package not in build_order.packages: |
| | | logger.info(f"Package {package} does not need to be built") |
| | | return BuildResult( |
| | | package=package, |
| | | status=BuildStatus.SKIPPED, |
| | | ) |
| | | # Filter build order: skip managed repo, download others, build AUR |
| | | final_results: list[BuildResult] = [] |
| | | for pkg_name in build_order: |
| | | repo = self.resolver.is_in_repos(pkg_name) |
| | | |
| | | if repo == self.config.repository.name: |
| | | logger.info(f"Package {pkg_name} already in managed repository, skipping") |
| | | if pkg_name == package: |
| | | return BuildResult(package=package, status=BuildStatus.SKIPPED) |
| | | continue |
| | | |
| | | # Build dependencies first |
| | | results: list[BuildResult] = [] |
| | | for dep in build_order: |
| | | if dep != package: |
| | | logger.info(f"Building dependency: {dep}") |
| | | result = await self.build_package(dep, force=True) |
| | | results.append(result) |
| | | if repo: |
| | | logger.info(f"Package {pkg_name} found in {repo}, downloading...") |
| | | result = await self.download_package(pkg_name) |
| | | else: |
| | | logger.info(f"Package {pkg_name} only in AUR, building...") |
| | | result = await self.build_package(pkg_name, force=True) |
| | | |
| | | if result.status == BuildStatus.FAILED: |
| | | logger.error(f"Dependency {dep} failed, aborting") |
| | | return BuildResult( |
| | | package=package, |
| | | status=BuildStatus.FAILED, |
| | | error=f"Dependency {dep} failed to build", |
| | | ) |
| | | final_results.append(result) |
| | | |
| | | # Build main package |
| | | return await self.build_package(package, force=True) |
| | | if result.status == BuildStatus.SUCCESS: |
| | | if self.repo: |
| | | added = self.repo.add_packages(result) |
| | | if added: |
| | | logger.info(f"Added to repository: {', '.join(added)}") |
| | | # Refresh resolver cache to recognize the newly added packages |
| | | self.resolver._refresh_pacman_cache(sync=True) |
| | | elif result.status == BuildStatus.FAILED: |
| | | logger.error(f"Failed to process {pkg_name}, aborting") |
| | | if pkg_name == package: |
| | | return result |
| | | return BuildResult( |
| | | package=package, |
| | | status=BuildStatus.FAILED, |
| | | error=f"Dependency {pkg_name} failed: {result.error}", |
| | | ) |
| | | |
| | | # Return result for the main package |
| | | for r in final_results: |
| | | if r.package == package: |
| | | return r |
| | | |
| | | return BuildResult(package=package, status=BuildStatus.SKIPPED) |
| | | |
| | | def remove_package(self, package: str) -> bool: |
| | | """Remove a package from the build directory. |
| | |
| | | |
| | | async def _add() -> None: |
| | | async with AURClient() as aur: |
| | | async with Builder(config, aur) as builder: |
| | | repo = RepoManager(config) |
| | | |
| | | repo = RepoManager(config) |
| | | async with Builder(config, aur, repo=repo) as builder: |
| | | results = [] |
| | | for package in packages: |
| | | console.print(f"[bold blue]Adding package:[/] {package}") |
| | |
| | | results.append(result) |
| | | |
| | | if result.status == BuildStatus.SUCCESS: |
| | | repo.add_packages(result) |
| | | console.print(f"[green]✓[/] {package} added successfully") |
| | | if len(result.artifacts) > 1: |
| | | console.print(f"[green]✓[/] {package} processed successfully ({len(result.artifacts)} artifacts registered)") |
| | | else: |
| | | console.print(f"[green]✓[/] {package} processed successfully") |
| | | elif result.status == BuildStatus.SKIPPED: |
| | | console.print(f"[yellow]⏭[/] {package} skipped (already in official repos or installed)") |
| | | console.print(f"[yellow]⏭[/] {package} skipped (already in managed repository)") |
| | | else: |
| | | console.print(f"[red]✗[/] {package} failed: {result.error}") |
| | | |
| | |
| | | resolver = DependencyResolver(aur) |
| | | |
| | | for pkg in repo.list_packages(): |
| | | if resolver.is_in_official_repos(pkg.name): |
| | | console.print(f"[yellow]Removing {pkg.name}[/] (now in official repos)") |
| | | if resolver.is_in_repos(pkg.name): |
| | | console.print(f"[yellow]Removing {pkg.name}[/] (now in repositories)") |
| | | builder.remove_package(pkg.name) |
| | | repo.remove_package(pkg.name) |
| | | else: |
| | |
| | | # Ignore debug packages if the regular version is in official repos |
| | | if pkg.name.endswith("-debug"): |
| | | base_name = pkg.name[:-6] |
| | | if resolver.is_in_official_repos(base_name, include_all=all_repos) or await aur.is_available(base_name): |
| | | if resolver.is_in_repos(base_name, include_all=all_repos) or await aur.is_available(base_name): |
| | | continue |
| | | |
| | | if resolver.is_in_official_repos(pkg.name, include_all=all_repos): |
| | | if resolver.is_in_repos(pkg.name, include_all=all_repos): |
| | | in_official.append(pkg.name) |
| | | elif not await aur.is_available(pkg.name): |
| | | not_in_aur.append(pkg.name) |
| | |
| | | except (subprocess.CalledProcessError, ValueError): |
| | | return 0 |
| | | |
| | | def _parse_pkg_filename(self, filename: str) -> tuple[str, str]: |
| | | """Parse package name and version from a filename. |
| | | def _parse_pkg_filename(self, filename: str) -> tuple[str, str, str]: |
| | | """Parse package name, version, and architecture from a filename. |
| | | |
| | | Args: |
| | | filename: Package filename (e.g. name-version-rel-arch.pkg.tar.zst) |
| | | |
| | | Returns: |
| | | Tuple of (package_name, version-release) |
| | | Tuple of (package_name, version-release, architecture) |
| | | """ |
| | | # Remove suffixes |
| | | stem = filename |
| | |
| | | if len(parts) == 4: |
| | | name = parts[0] |
| | | version = f"{parts[1]}-{parts[2]}" |
| | | return name, version |
| | | arch = parts[3] |
| | | return name, version, arch |
| | | |
| | | return stem, "unknown" |
| | | return stem, "unknown", "unknown" |
| | | |
| | | def ensure_repo_exists(self) -> None: |
| | | """Ensure repository directory and database exist.""" |
| | |
| | | if result.returncode != 0: |
| | | logger.warning(f"Could not create empty database: {result.stderr}") |
| | | |
| | | def add_packages(self, build_result: BuildResult) -> bool: |
| | | def add_packages(self, build_result: BuildResult) -> list[str]: |
| | | """Add built packages to the repository. |
| | | |
| | | Args: |
| | | build_result: Result from package build |
| | | |
| | | Returns: |
| | | True if packages were added successfully |
| | | List of filenames added successfully |
| | | """ |
| | | if build_result.status != BuildStatus.SUCCESS: |
| | | logger.warning(f"Cannot add {build_result.package}: build was not successful") |
| | | return False |
| | | return [] |
| | | |
| | | if not build_result.artifacts: |
| | | logger.warning(f"No artifacts to add for {build_result.package}") |
| | | return False |
| | | return [] |
| | | |
| | | with self._get_repo_lock(): |
| | | self.ensure_repo_exists() |
| | | |
| | | # Group artifacts by package name and only keep the latest version |
| | | latest_artifacts: dict[str, Path] = {} |
| | | # Group artifacts by (name, arch) and only keep the latest version |
| | | latest_artifacts: dict[tuple[str, str], Path] = {} |
| | | for artifact in build_result.artifacts: |
| | | name, version = self._parse_pkg_filename(artifact.name) |
| | | if name not in latest_artifacts: |
| | | latest_artifacts[name] = artifact |
| | | name, version, arch = self._parse_pkg_filename(artifact.name) |
| | | key = (name, arch) |
| | | if key not in latest_artifacts: |
| | | latest_artifacts[key] = artifact |
| | | else: |
| | | _, current_best_ver = self._parse_pkg_filename(latest_artifacts[name].name) |
| | | _, current_best_ver, _ = self._parse_pkg_filename(latest_artifacts[key].name) |
| | | if self._vercmp(version, current_best_ver) > 0: |
| | | latest_artifacts[name] = artifact |
| | | latest_artifacts[key] = artifact |
| | | |
| | | artifacts_to_copy = list(latest_artifacts.values()) |
| | | |
| | |
| | | |
| | | if result.returncode != 0: |
| | | logger.error(f"Failed to add packages to database: {result.stderr}") |
| | | return False |
| | | return [] |
| | | |
| | | # Clean up old versions in repo for each package name added |
| | | for name in latest_artifacts.keys(): |
| | | for (name, arch) in latest_artifacts.keys(): |
| | | self._remove_old_packages(name) |
| | | |
| | | logger.info(f"Added {len(copied_files)} package(s) to repository") |
| | | return True |
| | | added_names = [f.name for f in copied_files] |
| | | logger.info(f"Added to repository: {', '.join(added_names)}") |
| | | return added_names |
| | | |
| | | def remove_package(self, package: str) -> bool: |
| | | """Remove a package from the repository. |
| | |
| | | if f.name.endswith(".sig"): |
| | | continue |
| | | |
| | | name, version = self._parse_pkg_filename(f.name) |
| | | name, version, arch = self._parse_pkg_filename(f.name) |
| | | |
| | | stat = f.stat() |
| | | packages.append(PackageInfo( |
| | |
| | | self._pacman_cache: dict[str, set[str]] = {} # repo -> packages |
| | | self._pacman_checked = False |
| | | |
| | | def _refresh_pacman_cache(self) -> None: |
| | | """Refresh cache of packages available from official repos.""" |
| | | def _refresh_pacman_cache(self, sync: bool = False) -> None: |
| | | """Refresh cache of packages available from official repos. |
| | | |
| | | Args: |
| | | sync: Whether to synchronize pacman databases first using sudo pacman -Sy |
| | | """ |
| | | try: |
| | | if sync: |
| | | logger.info("Synchronizing pacman databases...") |
| | | subprocess.run( |
| | | ["sudo", "pacman", "-Sy", "--noconfirm"], |
| | | capture_output=True, |
| | | text=True, |
| | | check=True, |
| | | ) |
| | | |
| | | result = subprocess.run( |
| | | ["pacman", "-Sl"], |
| | | capture_output=True, |
| | |
| | | total_pkgs = sum(len(pkgs) for pkgs in self._pacman_cache.values()) |
| | | logger.debug(f"Cached {total_pkgs} packages from {len(self._pacman_cache)} repos") |
| | | except subprocess.CalledProcessError as e: |
| | | logger.warning(f"Failed to get pacman package list: {e}") |
| | | self._pacman_cache = {} |
| | | logger.warning(f"Failed to refresh pacman cache: {e}") |
| | | if not self._pacman_cache: |
| | | self._pacman_cache = {} |
| | | |
| | | def is_in_official_repos(self, name: str, include_all: bool = True) -> bool: |
| | | """Check if package is available in official repositories. |
| | | def is_in_repos(self, name: str, include_all: bool = True) -> str | None: |
| | | """Check if package is available in repositories. |
| | | |
| | | Args: |
| | | name: Package name (without version constraint) |
| | | include_all: If True, check all enabled repos. If False, only official ones. |
| | | |
| | | Returns: |
| | | True if available in repos |
| | | Name of repository where package was found, or None if not found |
| | | """ |
| | | if not self._pacman_checked: |
| | | self._refresh_pacman_cache() |
| | |
| | | if not include_all and repo not in OFFICIAL_REPOS: |
| | | continue |
| | | if base_name in pkgs: |
| | | return True |
| | | return False |
| | | return repo |
| | | |
| | | # Fallback: check provides via pacman -Sp |
| | | try: |
| | | # Use pacman -Sp --noconfirm to see if pacman can resolve it |
| | | result = subprocess.run( |
| | | ["pacman", "-Sp", "--noconfirm", base_name], |
| | | capture_output=True, |
| | | text=True, |
| | | ) |
| | | if result.returncode == 0: |
| | | # Successfully resolved. Find what it resolved to. |
| | | # Output looks like: file:///var/cache/pacman/pkg/name-version... |
| | | output = result.stdout.strip().split("\n")[-1] |
| | | if output: |
| | | filename = output.split("/")[-1] |
| | | # The package name is before the version |
| | | import re |
| | | match = re.search(r"^(.*?)-[0-9].*", filename) |
| | | if match: |
| | | resolved_name = match.group(1) |
| | | # Now check which repo this resolved_name belongs to |
| | | # We already have resolved_name in our cache if it's in a repo |
| | | for repo, pkgs in self._pacman_cache.items(): |
| | | if not include_all and repo not in OFFICIAL_REPOS: |
| | | continue |
| | | if resolved_name in pkgs: |
| | | return repo |
| | | except Exception as e: |
| | | logger.debug(f"Failed to resolve provides for {base_name}: {e}") |
| | | |
| | | return None |
| | | |
| | | def is_installed(self, name: str) -> bool: |
| | | """Check if package is already installed. |
| | |
| | | all_deps: list[str] = [] |
| | | all_deps.extend(package.depends) |
| | | all_deps.extend(package.makedepends) |
| | | all_deps.extend(package.checkdepends) |
| | | |
| | | aur_deps: list[str] = [] |
| | | for dep in all_deps: |
| | | dep_parsed = Dependency.parse(dep) |
| | | base_name = dep_parsed.name |
| | | |
| | | # Skip if in official repos or already installed |
| | | if self.is_in_official_repos(base_name): |
| | | # Skip if in repos or already installed |
| | | if self.is_in_repos(base_name): |
| | | continue |
| | | if self.is_installed(base_name): |
| | | continue |
| | |
| | | Raises: |
| | | ValueError: If package not found or circular dependency |
| | | """ |
| | | # Filter out packages already in official repos or installed |
| | | # Filter out packages already in repos or installed |
| | | aur_package_names = [] |
| | | for name in package_names: |
| | | if self.is_in_official_repos(name): |
| | | logger.info(f"Package {name} found in official repositories, skipping AUR lookup") |
| | | if self.is_in_repos(name): |
| | | logger.info(f"Package {name} found in repositories, skipping AUR lookup") |
| | | continue |
| | | if self.is_installed(name): |
| | | logger.info(f"Package {name} is already installed, skipping AUR lookup") |
| | |
| | | deps: list[Dependency] = [] |
| | | for dep in pkg.depends: |
| | | parsed = Dependency.parse(dep, DependencyType.RUNTIME) |
| | | if not self.is_in_official_repos(parsed.name): |
| | | if not self.is_in_repos(parsed.name): |
| | | parsed.is_aur = True |
| | | deps.append(parsed) |
| | | for dep in pkg.makedepends: |
| | | parsed = Dependency.parse(dep, DependencyType.BUILD) |
| | | if not self.is_in_official_repos(parsed.name): |
| | | if not self.is_in_repos(parsed.name): |
| | | parsed.is_aur = True |
| | | deps.append(parsed) |
| | | for dep in pkg.checkdepends: |
| | | parsed = Dependency.parse(dep, DependencyType.CHECK) |
| | | if not self.is_in_repos(parsed.name): |
| | | parsed.is_aur = True |
| | | deps.append(parsed) |
| | | aur_deps[name] = deps |
| | |
| | | assert len(cycles) > 0 |
| | | |
| | | @patch("archrepobuild.resolver.subprocess.run") |
| | | def test_is_in_official_repos(self, mock_run, mock_aur_client): |
| | | """Test checking official repos.""" |
| | | def test_is_in_repos(self, mock_run, mock_aur_client): |
| | | """Test checking repos.""" |
| | | mock_run.return_value.returncode = 0 |
| | | mock_run.return_value.stdout = "core base\nextra git\ncustom mypkg\n" |
| | | |
| | | resolver = DependencyResolver(mock_aur_client) |
| | | |
| | | # Test default (include_all=True) |
| | | assert resolver.is_in_official_repos("git") |
| | | assert resolver.is_in_official_repos("mypkg") |
| | | assert resolver.is_in_official_repos("base") |
| | | assert not resolver.is_in_official_repos("yay") |
| | | assert resolver.is_in_repos("git") |
| | | assert resolver.is_in_repos("mypkg") |
| | | assert resolver.is_in_repos("base") |
| | | assert not resolver.is_in_repos("yay") |
| | | |
| | | # Test official_only (include_all=False) |
| | | assert resolver.is_in_official_repos("git", include_all=False) |
| | | assert resolver.is_in_official_repos("base", include_all=False) |
| | | assert not resolver.is_in_official_repos("mypkg", include_all=False) |
| | | assert resolver.is_in_repos("git", include_all=False) |
| | | assert resolver.is_in_repos("base", include_all=False) |
| | | assert not resolver.is_in_repos("mypkg", include_all=False) |
| | | |
| | | @pytest.mark.asyncio |
| | | async def test_resolve_includes_checkdepends(self, mock_aur_client): |
| | | """Test that resolve includes checkdepends in the build order or dependency map.""" |
| | | from archrepobuild.aur import Package |
| | | |
| | | resolver = DependencyResolver(mock_aur_client) |
| | | |
| | | # Mock AUR response |
| | | pkg = Package( |
| | | name="test-pkg", |
| | | version="1.0", |
| | | description="test", |
| | | url=None, |
| | | maintainer=None, |
| | | votes=0, |
| | | popularity=0.0, |
| | | out_of_date=None, |
| | | first_submitted=None, |
| | | last_modified=None, |
| | | depends=[], |
| | | makedepends=[], |
| | | checkdepends=["check-dep"], |
| | | ) |
| | | |
| | | dep_pkg = Package( |
| | | name="check-dep", |
| | | version="1.0", |
| | | description="test", |
| | | url=None, |
| | | maintainer=None, |
| | | votes=0, |
| | | popularity=0.0, |
| | | out_of_date=None, |
| | | first_submitted=None, |
| | | last_modified=None, |
| | | depends=[], |
| | | makedepends=[], |
| | | checkdepends=[], |
| | | ) |
| | | |
| | | mock_aur_client.get_packages.side_effect = [[pkg], [dep_pkg]] |
| | | |
| | | # Assume neither is in repos or installed |
| | | with patch.object(resolver, "is_in_repos", return_value=False), \ |
| | | patch.object(resolver, "is_installed", return_value=False): |
| | | |
| | | build_order = await resolver.resolve(["test-pkg"]) |
| | | |
| | | assert "check-dep" in build_order.packages |
| | | assert "check-dep" in [d.name for d in build_order.aur_dependencies["test-pkg"]] |
| | | assert any(d.dep_type == DependencyType.CHECK for d in build_order.aur_dependencies["test-pkg"]) |