diff --git a/conda_forge_feedstock_check_solvable/check_solvable.py b/conda_forge_feedstock_check_solvable/check_solvable.py index 455c8a8..5989e2d 100644 --- a/conda_forge_feedstock_check_solvable/check_solvable.py +++ b/conda_forge_feedstock_check_solvable/check_solvable.py @@ -6,10 +6,7 @@ import psutil from ruamel.yaml import YAML -from conda_forge_feedstock_check_solvable.mamba_solver import ( - _mamba_factory, - virtual_package_repodata, -) +from conda_forge_feedstock_check_solvable.mamba_solver import mamba_solver_factory from conda_forge_feedstock_check_solvable.utils import ( MAX_GLIBC_MINOR, apply_pins, @@ -20,6 +17,9 @@ remove_reqs_by_name, suppress_output, ) +from conda_forge_feedstock_check_solvable.virtual_packages import ( + virtual_package_repodata, +) def _func(feedstock_dir, additional_channels, build_platform, verbosity, conn): @@ -283,8 +283,8 @@ def _is_recipe_solvable_on_platform( # we check run and host and ignore the rest print_debug("getting mamba solver") with suppress_output(): - solver = _mamba_factory(tuple(channel_sources), f"{platform}-{arch}") - build_solver = _mamba_factory( + solver = mamba_solver_factory(tuple(channel_sources), f"{platform}-{arch}") + build_solver = mamba_solver_factory( tuple(channel_sources), f"{build_platform}-{build_arch}", ) diff --git a/conda_forge_feedstock_check_solvable/mamba_solver.py b/conda_forge_feedstock_check_solvable/mamba_solver.py index a95a6d3..93e6054 100644 --- a/conda_forge_feedstock_check_solvable/mamba_solver.py +++ b/conda_forge_feedstock_check_solvable/mamba_solver.py @@ -10,18 +10,9 @@ https://gist.github.com/wolfv/cd12bd4a448c77ff02368e97ffdf495a. """ -import atexit import copy -import functools -import os -import pathlib import pprint -import subprocess -import tempfile -import time -from collections import defaultdict -from dataclasses import dataclass, field -from typing import Dict, FrozenSet, Iterable, List, Set, Tuple +from typing import List, Tuple import cachetools.func import libmambapy as api @@ -31,12 +22,7 @@ from conda_forge_feedstock_check_solvable.mamba_utils import load_channels from conda_forge_feedstock_check_solvable.utils import ( - ALL_PLATFORMS, DEFAULT_RUN_EXPORTS, - MAX_GLIBC_MINOR, - MINIMUM_CUDA_VERS, - MINIMUM_OSX_64_VERS, - MINIMUM_OSX_ARM64_VERS, convert_spec_to_conda_build, get_run_exports, print_debug, @@ -54,81 +40,6 @@ api.Context().channel_priority = api.ChannelPriority.kStrict -@dataclass(frozen=True) -class FakePackage: - name: str - version: str = "1.0" - build_string: str = "" - build_number: int = 0 - noarch: str = "" - depends: FrozenSet[str] = field(default_factory=frozenset) - timestamp: int = field( - default_factory=lambda: int(time.mktime(time.gmtime()) * 1000), - ) - - def to_repodata_entry(self): - out = self.__dict__.copy() - if self.build_string: - build = f"{self.build_string}_{self.build_number}" - else: - build = f"{self.build_number}" - out["depends"] = list(out["depends"]) - out["build"] = build - fname = f"{self.name}-{self.version}-{build}.tar.bz2" - return fname, out - - -class FakeRepoData: - def __init__(self, base_dir: pathlib.Path): - self.base_path = base_dir - self.packages_by_subdir: Dict[FakePackage, Set[str]] = defaultdict(set) - - @property - def channel_url(self): - return f"file://{str(self.base_path.absolute())}" - - def add_package(self, package: FakePackage, subdirs: Iterable[str] = ()): - subdirs = frozenset(subdirs) - if not subdirs: - subdirs = frozenset(["noarch"]) - self.packages_by_subdir[package].update(subdirs) - - def _write_subdir(self, subdir): - packages = {} - out = {"info": {"subdir": subdir}, "packages": packages} - for pkg, subdirs in self.packages_by_subdir.items(): - if subdir not in subdirs: - continue - fname, info_dict = pkg.to_repodata_entry() - info_dict["subdir"] = subdir - packages[fname] = info_dict - - (self.base_path / subdir).mkdir(exist_ok=True) - (self.base_path / subdir / "repodata.json").write_text(json.dumps(out)) - - def write(self): - all_subdirs = ALL_PLATFORMS.copy() - all_subdirs.add("noarch") - for subdirs in self.packages_by_subdir.values(): - all_subdirs.update(subdirs) - - for subdir in all_subdirs: - self._write_subdir(subdir) - - print_debug("Wrote fake repodata to %s", self.base_path) - import glob - - for filename in glob.iglob(str(self.base_path / "**"), recursive=True): - print_debug(filename) - print_debug("repo: %s", self.channel_url) - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - self.write() - - class MambaSolver: """Run the mamba solver. @@ -296,73 +207,5 @@ def _get_run_exports( @cachetools.func.ttl_cache(maxsize=8, ttl=60) -def _mamba_factory(channels, platform): +def mamba_solver_factory(channels, platform): return MambaSolver(list(channels), platform) - - -@functools.lru_cache(maxsize=1) -def virtual_package_repodata(): - # TODO: we might not want to use TemporaryDirectory - import shutil - - # tmp directory in github actions - runner_tmp = os.environ.get("RUNNER_TEMP") - tmp_dir = tempfile.mkdtemp(dir=runner_tmp) - - if not runner_tmp: - # no need to bother cleaning up on CI - def clean(): - shutil.rmtree(tmp_dir, ignore_errors=True) - - atexit.register(clean) - - tmp_path = pathlib.Path(tmp_dir) - repodata = FakeRepoData(tmp_path) - - # glibc - for glibc_minor in range(12, MAX_GLIBC_MINOR + 1): - repodata.add_package(FakePackage("__glibc", "2.%d" % glibc_minor)) - - # cuda - get from cuda-version on conda-forge - try: - cuda_pkgs = json.loads( - subprocess.check_output( - "CONDA_SUBDIR=linux-64 conda search cuda-version -c conda-forge --json", - shell=True, - text=True, - stderr=subprocess.PIPE, - ) - ) - cuda_vers = [pkg["version"] for pkg in cuda_pkgs["cuda-version"]] - except Exception: - cuda_vers = [] - # extra hard coded list to make sure we don't miss anything - cuda_vers += MINIMUM_CUDA_VERS - cuda_vers = set(cuda_vers) - for cuda_ver in cuda_vers: - repodata.add_package(FakePackage("__cuda", cuda_ver)) - - for osx_ver in MINIMUM_OSX_64_VERS: - repodata.add_package(FakePackage("__osx", osx_ver), subdirs=["osx-64"]) - for osx_ver in MINIMUM_OSX_ARM64_VERS: - repodata.add_package( - FakePackage("__osx", osx_ver), subdirs=["osx-arm64", "osx-64"] - ) - - repodata.add_package( - FakePackage("__win", "0"), - subdirs=list(subdir for subdir in ALL_PLATFORMS if subdir.startswith("win")), - ) - repodata.add_package( - FakePackage("__linux", "0"), - subdirs=list(subdir for subdir in ALL_PLATFORMS if subdir.startswith("linux")), - ) - repodata.add_package( - FakePackage("__unix", "0"), - subdirs=list( - subdir for subdir in ALL_PLATFORMS if not subdir.startswith("win") - ), - ) - repodata.write() - - return repodata.channel_url diff --git a/conda_forge_feedstock_check_solvable/rattler_solver.py b/conda_forge_feedstock_check_solvable/rattler_solver.py new file mode 100644 index 0000000..e69de29 diff --git a/conda_forge_feedstock_check_solvable/virtual_packages.py b/conda_forge_feedstock_check_solvable/virtual_packages.py new file mode 100644 index 0000000..948440c --- /dev/null +++ b/conda_forge_feedstock_check_solvable/virtual_packages.py @@ -0,0 +1,164 @@ +import atexit +import functools +import os +import pathlib +import subprocess +import tempfile +import time +from collections import defaultdict +from dataclasses import dataclass, field +from typing import Dict, FrozenSet, Iterable, Set + +import rapidjson as json + +from conda_forge_feedstock_check_solvable.utils import ( + ALL_PLATFORMS, + MAX_GLIBC_MINOR, + MINIMUM_CUDA_VERS, + MINIMUM_OSX_64_VERS, + MINIMUM_OSX_ARM64_VERS, + print_debug, +) + + +@dataclass(frozen=True) +class FakePackage: + name: str + version: str = "1.0" + build_string: str = "" + build_number: int = 0 + noarch: str = "" + depends: FrozenSet[str] = field(default_factory=frozenset) + timestamp: int = field( + default_factory=lambda: int(time.mktime(time.gmtime()) * 1000), + ) + + def to_repodata_entry(self): + out = self.__dict__.copy() + if self.build_string: + build = f"{self.build_string}_{self.build_number}" + else: + build = f"{self.build_number}" + out["depends"] = list(out["depends"]) + out["build"] = build + fname = f"{self.name}-{self.version}-{build}.tar.bz2" + return fname, out + + +class FakeRepoData: + def __init__(self, base_dir: pathlib.Path): + self.base_path = base_dir + self.packages_by_subdir: Dict[FakePackage, Set[str]] = defaultdict(set) + + @property + def channel_url(self): + return f"file://{str(self.base_path.absolute())}" + + def add_package(self, package: FakePackage, subdirs: Iterable[str] = ()): + subdirs = frozenset(subdirs) + if not subdirs: + subdirs = frozenset(["noarch"]) + self.packages_by_subdir[package].update(subdirs) + + def _write_subdir(self, subdir): + packages = {} + out = {"info": {"subdir": subdir}, "packages": packages} + for pkg, subdirs in self.packages_by_subdir.items(): + if subdir not in subdirs: + continue + fname, info_dict = pkg.to_repodata_entry() + info_dict["subdir"] = subdir + packages[fname] = info_dict + + (self.base_path / subdir).mkdir(exist_ok=True) + (self.base_path / subdir / "repodata.json").write_text(json.dumps(out)) + + def write(self): + all_subdirs = ALL_PLATFORMS.copy() + all_subdirs.add("noarch") + for subdirs in self.packages_by_subdir.values(): + all_subdirs.update(subdirs) + + for subdir in all_subdirs: + self._write_subdir(subdir) + + print_debug("Wrote fake repodata to %s", self.base_path) + import glob + + for filename in glob.iglob(str(self.base_path / "**"), recursive=True): + print_debug(filename) + print_debug("repo: %s", self.channel_url) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.write() + + +@functools.lru_cache(maxsize=1) +def virtual_package_repodata(): + # TODO: we might not want to use TemporaryDirectory + import shutil + + # tmp directory in github actions + runner_tmp = os.environ.get("RUNNER_TEMP") + tmp_dir = tempfile.mkdtemp(dir=runner_tmp) + + if not runner_tmp: + # no need to bother cleaning up on CI + def clean(): + shutil.rmtree(tmp_dir, ignore_errors=True) + + atexit.register(clean) + + tmp_path = pathlib.Path(tmp_dir) + repodata = FakeRepoData(tmp_path) + + # glibc + for glibc_minor in range(12, MAX_GLIBC_MINOR + 1): + repodata.add_package(FakePackage("__glibc", "2.%d" % glibc_minor)) + + # cuda - get from cuda-version on conda-forge + try: + cuda_pkgs = json.loads( + subprocess.check_output( + "CONDA_SUBDIR=linux-64 conda search cuda-version -c conda-forge --json", + shell=True, + text=True, + stderr=subprocess.PIPE, + ) + ) + cuda_vers = [pkg["version"] for pkg in cuda_pkgs["cuda-version"]] + except Exception: + cuda_vers = [] + # extra hard coded list to make sure we don't miss anything + cuda_vers += MINIMUM_CUDA_VERS + cuda_vers = set(cuda_vers) + for cuda_ver in cuda_vers: + repodata.add_package(FakePackage("__cuda", cuda_ver)) + + for osx_ver in MINIMUM_OSX_64_VERS: + repodata.add_package(FakePackage("__osx", osx_ver), subdirs=["osx-64"]) + for osx_ver in MINIMUM_OSX_ARM64_VERS: + repodata.add_package( + FakePackage("__osx", osx_ver), subdirs=["osx-arm64", "osx-64"] + ) + + repodata.add_package( + FakePackage("__win", "0"), + subdirs=list(subdir for subdir in ALL_PLATFORMS if subdir.startswith("win")), + ) + repodata.add_package( + FakePackage("__linux", "0"), + subdirs=list(subdir for subdir in ALL_PLATFORMS if subdir.startswith("linux")), + ) + repodata.add_package( + FakePackage("__unix", "0"), + subdirs=list( + subdir for subdir in ALL_PLATFORMS if not subdir.startswith("win") + ), + ) + repodata.write() + + return repodata.channel_url diff --git a/tests/test_mamba_solvable.py b/tests/test_mamba_solvable.py index 745ab1f..f55cd79 100644 --- a/tests/test_mamba_solvable.py +++ b/tests/test_mamba_solvable.py @@ -13,7 +13,7 @@ FakePackage, FakeRepoData, MambaSolver, - _mamba_factory, + mamba_solver_factory, virtual_package_repodata, ) from conda_forge_feedstock_check_solvable.utils import apply_pins, suppress_output @@ -90,7 +90,7 @@ def test_mamba_solver_apply_pins(tmp_path): config=config, ) - solver = _mamba_factory(("conda-forge", "defaults"), "linux-64") + solver = mamba_solver_factory(("conda-forge", "defaults"), "linux-64") metas = conda_build.api.render( str(tmp_path), @@ -127,7 +127,7 @@ def test_mamba_solver_apply_pins(tmp_path): @flaky def test_mamba_solver_constraints(): with suppress_output(): - solver = _mamba_factory(("conda-forge",), "osx-64") + solver = mamba_solver_factory(("conda-forge",), "osx-64") solvable, err, solution = solver.solve( ["simplejson"], constraints=["python=3.10", "zeromq=4.2"] ) @@ -565,7 +565,7 @@ def test_virtual_package(feedstock_dir, tmp_path_factory): @flaky def test_mamba_solver_hangs(): with suppress_output(): - solver = _mamba_factory(("conda-forge", "defaults"), "osx-64") + solver = mamba_solver_factory(("conda-forge", "defaults"), "osx-64") res = solver.solve( [ "pytest", @@ -601,7 +601,7 @@ def test_mamba_solver_hangs(): assert res[0] with suppress_output(): - solver = _mamba_factory(("conda-forge", "defaults"), "linux-64") + solver = mamba_solver_factory(("conda-forge", "defaults"), "linux-64") res = solver.solve( [ "gdal >=2.1.0",