diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 71e7d36..b1cb9f7 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -36,7 +36,7 @@ jobs: shell: bash -l {0} run: | mamba install --yes --file=requirements.txt - mamba install --yes pytest flake8 flaky pip python-build setuptools_scm>=7 setuptools>=45 toml + mamba install --yes pytest flaky pip python-build setuptools_scm>=7 setuptools>=45 toml pip install -e . - name: test versions @@ -63,7 +63,7 @@ jobs: python -m pip install -v --no-deps --no-build-isolation -e . - - name: test + - name: test w/ rattler shell: bash -l {0} run: | - pytest -vvs tests + pytest -vv --durations=0 --solver=rattler tests diff --git a/.github/workflows/tests_mamba.yml b/.github/workflows/tests_mamba.yml new file mode 100644 index 0000000..9fc269a --- /dev/null +++ b/.github/workflows/tests_mamba.yml @@ -0,0 +1,69 @@ +name: tests-mamba + +on: + push: + branches: + - main + pull_request: null + +env: + PY_COLORS: "1" + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + tests-mamba: + name: tests-mamba + runs-on: "ubuntu-latest" + steps: + - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v3 + with: + fetch-depth: 0 + + - uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca # v2 + with: + channels: conda-forge,defaults + channel-priority: strict + show-channel-urls: true + miniforge-version: latest + miniforge-variant: Mambaforge + python-version: 3.11 + use-mamba: true + + - name: configure conda and install code + shell: bash -l {0} + run: | + mamba install --yes --file=requirements.txt + mamba install --yes pytest flaky pip python-build setuptools_scm>=7 setuptools>=45 toml + pip install -e . + + - name: test versions + shell: bash -el {0} + run: | + pip uninstall conda-forge-feedstock-check-solvable --yes + [[ $(python setup.py --version) != "0.0.0" ]] || exit 1 + + rm -rf dist/* + python setup.py sdist + pip install --no-deps --no-build-isolation dist/*.tar.gz + pushd .. + python -c "import conda_forge_feedstock_check_solvable; assert conda_forge_feedstock_check_solvable.__version__ != '0.0.0'" + popd + pip uninstall conda-forge-feedstock-check-solvable --yes + + rm -rf dist/* + python -m build --sdist . --outdir dist + pip install --no-deps --no-build-isolation dist/*.tar.gz + pushd .. + python -c "import conda_forge_feedstock_check_solvable; assert conda_forge_feedstock_check_solvable.__version__ != '0.0.0'" + popd + pip uninstall conda-forge-feedstock-check-solvable --yes + + python -m pip install -v --no-deps --no-build-isolation -e . + + - name: test w/ mamba + shell: bash -l {0} + run: | + pytest -vv --durations=0 --solver=mamba tests diff --git a/conda_forge_feedstock_check_solvable/check_solvable.py b/conda_forge_feedstock_check_solvable/check_solvable.py index 1b9b329..3f8d528 100644 --- a/conda_forge_feedstock_check_solvable/check_solvable.py +++ b/conda_forge_feedstock_check_solvable/check_solvable.py @@ -6,9 +6,13 @@ import psutil from ruamel.yaml import YAML +import conda_forge_feedstock_check_solvable.utils from conda_forge_feedstock_check_solvable.mamba_solver import mamba_solver_factory +from conda_forge_feedstock_check_solvable.rattler_solver import rattler_solver_factory from conda_forge_feedstock_check_solvable.utils import ( MAX_GLIBC_MINOR, + TimeoutTimer, + TimeoutTimerException, apply_pins, get_run_exports, override_env_var, @@ -23,27 +27,14 @@ ) -def _func(feedstock_dir, additional_channels, build_platform, verbosity, conn): - try: - res = _is_recipe_solvable( - feedstock_dir, - additional_channels=additional_channels, - build_platform=build_platform, - verbosity=verbosity, - ) - conn.send(res) - except Exception as e: - conn.send(e) - finally: - conn.close() - - def is_recipe_solvable( feedstock_dir, additional_channels=None, timeout=600, build_platform=None, verbosity=1, + solver="rattler", + fail_fast=False, ) -> Tuple[bool, List[str], Dict[str, bool]]: """Compute if a recipe is solvable. @@ -59,12 +50,20 @@ def is_recipe_solvable( additional_channels : list of str, optional If given, these channels will be used in addition to the main ones. timeout : int, optional - If not None, then the work will be run in a separate process and - this function will return True if the work doesn't complete before `timeout` - seconds. + If not None, then this function will return True if the solver checks don't + complete before `timeout` seconds. + build_platform : dict, optional + A dictionary mapping the target platform-arch to the platform-arch to use for + the build. If not given, the build platform-arch will be the same as + the target platform-arch. verbosity : int An int indicating the level of verbosity from 0 (no output) to 3 (gobbs of output). + solver : str + The solver to use. One of `mamba` or `rattler`. + fail_fast : bool + If True, then the function will return as soon as it finds a non-solvable + configuration. Returns ------- @@ -72,56 +71,26 @@ def is_recipe_solvable( The logical AND of the solvability of the recipe on all platforms in the CI scripts. errors : list of str - A list of errors from mamba. Empty if recipe is solvable. + A list of errors from the solver. Empty if recipe is solvable. solvable_by_variant : dict A lookup by variant config that shows if a particular config is solvable """ - if timeout: - from multiprocessing import Pipe, Process - - parent_conn, child_conn = Pipe() - p = Process( - target=_func, - args=( - feedstock_dir, - additional_channels, - build_platform, - verbosity, - child_conn, - ), - ) - p.start() - if parent_conn.poll(timeout): - res = parent_conn.recv() - if isinstance(res, Exception): - res = ( - False, - [repr(res)], - {}, - ) - else: - print_warning("SOLVER TIMEOUT for %s", feedstock_dir) - res = ( - True, - [], - {}, - ) - - parent_conn.close() - - p.join(0) - p.terminate() - p.kill() - try: - p.close() - except ValueError: - pass - else: + try: res = _is_recipe_solvable( feedstock_dir, additional_channels=additional_channels, build_platform=build_platform, verbosity=verbosity, + solver=solver, + timeout_timer=TimeoutTimer(timeout if timeout is not None else 6e5), + fail_fast=fail_fast, + ) + except TimeoutTimerException: + print_warning("SOLVER TIMEOUT for %s", feedstock_dir) + res = ( + True, + [], + {}, ) return res @@ -132,15 +101,20 @@ def _is_recipe_solvable( additional_channels=(), build_platform=None, verbosity=1, + solver="mamba", + timeout_timer=None, + fail_fast=False, ) -> Tuple[bool, List[str], Dict[str, bool]]: - global VERBOSITY - VERBOSITY = verbosity + conda_forge_feedstock_check_solvable.utils.VERBOSITY = verbosity + timeout_timer = timeout_timer or TimeoutTimer(6e5) build_platform = build_platform or {} additional_channels = additional_channels or [] additional_channels += [virtual_package_repodata()] + timeout_timer.raise_for_timeout() + with override_env_var("CONDA_OVERRIDE_GLIBC", "2.%d" % MAX_GLIBC_MINOR): errors = [] cbcs = sorted(glob.glob(os.path.join(feedstock_dir, ".ci_support", "*.yaml"))) @@ -165,6 +139,8 @@ def _is_recipe_solvable( solvable = True solvable_by_cbc = {} for cbc_fname in cbcs: + timeout_timer.raise_for_timeout() + # we need to extract the platform (e.g., osx, linux) and arch (e.g., 64, aarm64) # conda smithy forms a string that is # @@ -179,6 +155,7 @@ def _is_recipe_solvable( arch = "64" print_info("CHECKING RECIPE SOLVABLE: %s", os.path.basename(cbc_fname)) + _solvable, _errors = _is_recipe_solvable_on_platform( os.path.join(feedstock_dir, "recipe"), cbc_fname, @@ -188,12 +165,18 @@ def _is_recipe_solvable( build_platform.get(f"{platform}_{arch}", f"{platform}_{arch}") ), additional_channels=additional_channels, + solver_backend=solver, + timeout_timer=timeout_timer, + fail_fast=fail_fast, ) solvable = solvable and _solvable cbc_name = os.path.basename(cbc_fname).rsplit(".", maxsplit=1)[0] errors.extend([f"{cbc_name}: {e}" for e in _errors]) solvable_by_cbc[cbc_name] = _solvable + if not solvable and fail_fast: + break + return solvable, errors, solvable_by_cbc @@ -204,7 +187,12 @@ def _is_recipe_solvable_on_platform( arch, build_platform_arch=None, additional_channels=(), + solver_backend="mamba", + timeout_timer=None, + fail_fast=False, ): + timeout_timer = timeout_timer or TimeoutTimer(6e5) + # parse the channel sources from the CBC parser = YAML(typ="jinja2") parser.indent(mapping=2, sequence=4, offset=2) @@ -219,9 +207,9 @@ def _is_recipe_solvable_on_platform( # channel_sources might be part of some zip_key channel_sources.extend([c.strip() for c in source.split(",")]) else: - channel_sources = ["conda-forge", "defaults", "msys2"] + channel_sources = ["conda-forge", "defaults"] - if "msys2" not in channel_sources: + if "msys2" not in channel_sources and platform.startswith("win"): channel_sources.append("msys2") if additional_channels: @@ -234,12 +222,15 @@ def _is_recipe_solvable_on_platform( arch, ) + timeout_timer.raise_for_timeout() + # here we extract the conda build config in roughly the same way that # it would be used in a real build print_debug("rendering recipe with conda build") with suppress_output(): for att in range(2): + timeout_timer.raise_for_timeout() try: if att == 1: os.system("rm -f %s/conda_build_config.yaml" % recipe_dir) @@ -259,6 +250,8 @@ def _is_recipe_solvable_on_platform( else: raise e + timeout_timer.raise_for_timeout() + # now we render the meta.yaml into an actual recipe metas = conda_build.api.render( recipe_dir, @@ -272,6 +265,8 @@ def _is_recipe_solvable_on_platform( channel_urls=channel_sources, ) + timeout_timer.raise_for_timeout() + # get build info if build_platform_arch is not None: build_platform, build_arch = build_platform_arch.split("_") @@ -280,17 +275,29 @@ def _is_recipe_solvable_on_platform( # now we loop through each one and check if we can solve it # we check run and host and ignore the rest - print_debug("getting mamba solver") - with suppress_output(): - solver = mamba_solver_factory(tuple(channel_sources), f"{platform}-{arch}") - build_solver = mamba_solver_factory( - tuple(channel_sources), - f"{build_platform}-{build_arch}", - ) + print_debug("getting solver") + if solver_backend == "rattler": + solver_factory = rattler_solver_factory + elif solver_backend == "mamba": + solver_factory = mamba_solver_factory + else: + raise ValueError(f"Unknown solver backend {solver_backend}") + + solver = solver_factory(tuple(channel_sources), f"{platform}-{arch}") + timeout_timer.raise_for_timeout() + + build_solver = solver_factory( + tuple(channel_sources), + f"{build_platform}-{build_arch}", + ) + timeout_timer.raise_for_timeout() + solvable = True errors = [] outnames = [m.name() for m, _, _ in metas] for m, _, _ in metas: + timeout_timer.raise_for_timeout() + print_debug("checking recipe %s", m.name()) build_req = m.get_value("requirements/build", []) @@ -308,10 +315,17 @@ def _is_recipe_solvable_on_platform( get_run_exports=True, ignore_run_exports_from=ign_runex_from, ignore_run_exports=ign_runex, + timeout=timeout_timer.remaining + if solver_backend == "rattler" + else None, ) + timeout_timer.raise_for_timeout() + solvable = solvable and _solvable if _err is not None: errors.append(_err) + if not solvable and fail_fast: + break run_constrained = list(set(run_constrained) | build_rx["strong_constrains"]) @@ -340,10 +354,17 @@ def _is_recipe_solvable_on_platform( get_run_exports=True, ignore_run_exports_from=ign_runex_from, ignore_run_exports=ign_runex, + timeout=timeout_timer.remaining + if solver_backend == "rattler" + else None, ) + timeout_timer.raise_for_timeout() + solvable = solvable and _solvable if _err is not None: errors.append(_err) + if not solvable and fail_fast: + break if m.is_cross: if m.noarch or m.noarch_python: @@ -363,10 +384,20 @@ def _is_recipe_solvable_on_platform( if run_req: run_req = apply_pins(run_req, host_req or [], build_req or [], outnames, m) run_req = remove_reqs_by_name(run_req, outnames) - _solvable, _err, _ = solver.solve(run_req, constraints=run_constrained) + _solvable, _err, _ = solver.solve( + run_req, + constraints=run_constrained, + timeout=timeout_timer.remaining + if solver_backend == "rattler" + else None, + ) + timeout_timer.raise_for_timeout() + solvable = solvable and _solvable if _err is not None: errors.append(_err) + if not solvable and fail_fast: + break tst_req = ( m.get_value("test/requires", []) @@ -375,12 +406,23 @@ def _is_recipe_solvable_on_platform( ) if tst_req: tst_req = remove_reqs_by_name(tst_req, outnames) - _solvable, _err, _ = solver.solve(tst_req, constraints=run_constrained) + _solvable, _err, _ = solver.solve( + tst_req, + constraints=run_constrained, + timeout=timeout_timer.remaining + if solver_backend == "rattler" + else None, + ) + timeout_timer.raise_for_timeout() + solvable = solvable and _solvable if _err is not None: errors.append(_err) + if not solvable and fail_fast: + break print_info("RUN EXPORT CACHE STATUS: %s", get_run_exports.cache_info()) + print_info("SOLVER CACHE STATUS: %s", solver_factory.cache_info()) print_info( "SOLVER MEM USAGE: %d MB", psutil.Process().memory_info().rss // 1024**2, diff --git a/conda_forge_feedstock_check_solvable/mamba_solver.py b/conda_forge_feedstock_check_solvable/mamba_solver.py index 6be5bad..abf3de3 100644 --- a/conda_forge_feedstock_check_solvable/mamba_solver.py +++ b/conda_forge_feedstock_check_solvable/mamba_solver.py @@ -10,24 +10,31 @@ https://gist.github.com/wolfv/cd12bd4a448c77ff02368e97ffdf495a. """ +import atexit import copy +import os import pprint +import shutil +import tempfile import textwrap from typing import List, Tuple -import cachetools.func import libmambapy as api import rapidjson as json from conda.base.context import context from conda.models.match_spec import MatchSpec -from conda_forge_feedstock_check_solvable.mamba_utils import load_channels +from conda_forge_feedstock_check_solvable.mamba_utils import ( + get_cached_index, + load_channels, +) from conda_forge_feedstock_check_solvable.utils import ( DEFAULT_RUN_EXPORTS, convert_spec_to_conda_build, get_run_exports, print_debug, print_warning, + suppress_output, ) pkgs_dirs = context.pkgs_dirs @@ -41,6 +48,59 @@ api.Context().channel_priority = api.ChannelPriority.kStrict +def _make_installed_repo(): + # tmp directory in github actions + runner_tmp = os.environ.get("RUNNER_TEMP") + tmp_dir = tempfile.mkdtemp(dir=runner_tmp) + + if not runner_tmp: + # no need to bother cleaning up on CI + def clean(): + shutil.rmtree(tmp_dir, ignore_errors=True) + + atexit.register(clean) + + pth = os.path.join(tmp_dir, "installed", "repodata.json") + os.makedirs(os.path.dirname(pth), exist_ok=True) + with open(pth, "w") as f: + f.write("{}") + + return pth + + +def _get_pool(channels, platform): + with suppress_output(): + pool = api.Pool() + + repos = [] + load_channels( + pool, + channels, + repos, + platform=platform, + has_priority=True, + ) + + return pool, repos + + +def _get_solver(channels, platform, constraints): + pool, repos = _get_pool(channels, platform) + + solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)] + solver = api.Solver(pool, solver_options) + + if constraints: + # add_pin needs an "installed" Repo to store the pin info + repo = api.Repo(pool, "installed", _make_installed_repo(), "") + repo.set_installed() + + for constraint in constraints: + solver.add_pin(constraint) + + return solver, pool + + class MambaSolver: """Run the mamba solver. @@ -60,19 +120,6 @@ class MambaSolver: def __init__(self, channels, platform): self.channels = channels self.platform = platform - self.pool = api.Pool() - - self.repos = [] - self.index = load_channels( - self.pool, - self.channels, - self.repos, - platform=platform, - has_priority=True, - ) - for repo in self.repos: - # need set_installed for add_pin, not sure why - repo.set_installed() def solve( self, @@ -81,6 +128,7 @@ def solve( ignore_run_exports_from=None, ignore_run_exports=None, constraints=None, + timeout=None, ) -> Tuple[bool, List[str]]: """Solve given a set of specs. @@ -99,6 +147,8 @@ def solve( constraints : list, optional A list of package specs to apply as constraints to the solve. These packages are not included in the solution. + timeout : int, optional + Ignored by mamba. Returns ------- @@ -112,22 +162,22 @@ def solve( A dictionary with the weak and strong run exports for the packages. Only returned if get_run_exports is True. """ + if timeout is not None: + raise RuntimeError("The `timeout` keyword is not supported by mamba!") + ignore_run_exports_from = ignore_run_exports_from or [] ignore_run_exports = ignore_run_exports or [] - solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)] - solver = api.Solver(self.pool, solver_options) - _specs = [convert_spec_to_conda_build(s) for s in specs] _constraints = [convert_spec_to_conda_build(s) for s in constraints or []] + solver, pool = _get_solver(self.channels, self.platform, tuple(_constraints)) + print_debug( "MAMBA running solver for specs \n\n%s\nconstraints: %s\n", pprint.pformat(_specs), pprint.pformat(_constraints), ) - for constraint in _constraints: - solver.add_pin(constraint) solver.add_jobs(_specs, api.SOLVER_INSTALL) success = solver.solve() @@ -137,10 +187,12 @@ def solve( print_warning( "MAMBA failed to solve specs \n\n%s\n\nwith " "constraints \n\n%s\n\nfor channels " + "\n\n%s\n\non platform " "\n\n%s\n\nThe reported errors are:\n\n%s\n", textwrap.indent(pprint.pformat(_specs), " "), textwrap.indent(pprint.pformat(_constraints), " "), textwrap.indent(pprint.pformat(self.channels), " "), + textwrap.indent(pprint.pformat(self.platform), " "), textwrap.indent(solver.explain_problems(), " "), ) err = solver.explain_problems() @@ -148,7 +200,7 @@ def solve( run_exports = copy.deepcopy(DEFAULT_RUN_EXPORTS) else: t = api.Transaction( - self.pool, + pool, solver, PACKAGE_CACHE, ) @@ -209,6 +261,10 @@ def _get_run_exports( return run_exports -@cachetools.func.ttl_cache(maxsize=8, ttl=60) def mamba_solver_factory(channels, platform): - return MambaSolver(list(channels), platform) + return MambaSolver(tuple(channels), platform) + + +mamba_solver_factory.cache_info = get_cached_index.cache_info +mamba_solver_factory.cache_clear = get_cached_index.cache_clear +mamba_solver_factory.cache_parameters = get_cached_index.cache_parameters diff --git a/conda_forge_feedstock_check_solvable/mamba_utils.py b/conda_forge_feedstock_check_solvable/mamba_utils.py index d6b67cc..e4b5531 100644 --- a/conda_forge_feedstock_check_solvable/mamba_utils.py +++ b/conda_forge_feedstock_check_solvable/mamba_utils.py @@ -2,43 +2,37 @@ # SPDX-License-Identifier: BSD-3-Clause # Copied from mamba 1.5.2 +import copy +import os import urllib.parse from collections import OrderedDict +from functools import lru_cache import libmambapy as api from conda.base.constants import ChannelPriority from conda.base.context import context -from conda.core.index import check_allowlist from conda.gateways.connection.session import CondaHttpAuth -def get_index( - channel_urls=(), - prepend=True, +@lru_cache(maxsize=128) +def get_cached_index( + channel_url, platform=None, - use_local=False, - use_cache=False, - unknown=None, - prefix=None, repodata_fn="repodata.json", ): if isinstance(platform, str): platform = [platform, "noarch"] all_channels = [] - if use_local: - all_channels.append("local") - all_channels.extend(channel_urls) - if prepend: - all_channels.extend(context.channels) - check_allowlist(all_channels) + all_channels.append(channel_url) # Remove duplicates but retain order all_channels = list(OrderedDict.fromkeys(all_channels)) + orig_all_channels = copy.deepcopy(all_channels) dlist = api.DownloadTargetList() - index = [] + subdirs = [] def fixup_channel_spec(spec): at_count = spec.count("@") @@ -57,7 +51,9 @@ def fixup_channel_spec(spec): pkgs_dirs = api.MultiPackageCache(context.pkgs_dirs) api.create_cache_dir(str(pkgs_dirs.first_writable_path)) - for channel in api.get_channels(all_channels): + for orig_channel_name, channel in zip( + orig_all_channels, api.get_channels(all_channels) + ): for channel_platform, url in channel.platform_urls(with_credentials=True): full_url = CondaHttpAuth.add_binstar_token(url) @@ -66,29 +62,29 @@ def fixup_channel_spec(spec): ) needs_finalising = sd.download_and_check_targets(dlist) - index.append( + if needs_finalising: + sd.finalize_checks() + + subdirs.append( ( sd, { "platform": channel_platform, "url": url, "channel": channel, - "needs_finalising": needs_finalising, + "needs_finalising": False, + "input_channel": orig_channel_name, }, ) ) - - for sd, info in index: - if info["needs_finalising"]: - sd.finalize_checks() - dlist.add(sd) + dlist.add(sd) is_downloaded = dlist.download(api.MAMBA_DOWNLOAD_FAILFAST) if not is_downloaded: raise RuntimeError("Error downloading repodata.") - return index + return subdirs def load_channels( @@ -96,20 +92,16 @@ def load_channels( channels, repos, has_priority=None, - prepend=True, platform=None, - use_local=False, - use_cache=True, repodata_fn="repodata.json", ): - index = get_index( - channel_urls=channels, - prepend=prepend, - platform=platform, - use_local=use_local, - repodata_fn=repodata_fn, - use_cache=use_cache, - ) + index = [] + for channel in channels: + index += get_cached_index( + channel_url=channel, + platform=platform, + repodata_fn=repodata_fn, + ) if has_priority is None: has_priority = context.channel_priority in [ @@ -133,25 +125,21 @@ def load_channels( priority = channel_prio else: priority = 0 + if has_priority: - subpriority = 0 + # as done in conda-libmamba-solver + subpriority = 1 else: subpriority = subprio_index subprio_index -= 1 - if not subdir.loaded() and entry["platform"] != "noarch": - # ignore non-loaded subdir if channel is != noarch - continue - - if context.verbosity != 0 and not context.json: - print( - "Channel: {}, platform: {}, prio: {} : {}".format( - entry["channel"], entry["platform"], priority, subpriority - ) - ) - print("Cache path: ", subdir.cache_path()) + cache_path = str(subdir.cache_path()) + if os.path.exists(cache_path.replace(".json", ".solv")): + cache_path = cache_path.replace(".json", ".solv") - repo = subdir.create_repo(pool) + repo = api.Repo( + pool, entry["url"], cache_path, urllib.parse.quote(entry["url"]) + ) repo.set_priority(priority, subpriority) repos.append(repo) diff --git a/conda_forge_feedstock_check_solvable/rattler_solver.py b/conda_forge_feedstock_check_solvable/rattler_solver.py new file mode 100644 index 0000000..e91bf0e --- /dev/null +++ b/conda_forge_feedstock_check_solvable/rattler_solver.py @@ -0,0 +1,188 @@ +import asyncio +import copy +import datetime +import os +import pprint +import textwrap +from functools import lru_cache +from typing import List + +from rattler import Channel, MatchSpec, Platform, RepoDataRecord, solve + +from conda_forge_feedstock_check_solvable.utils import ( + DEFAULT_RUN_EXPORTS, + get_run_exports, + print_debug, + print_warning, +) + + +class RattlerSolver: + """Run the rattler solver (resolvo). + + Parameters + ---------- + channels : list of str + A list of the channels (e.g., `[conda-forge]`, etc.) + platform : str + The platform to be used (e.g., `linux-64`). + + Example + ------- + >>> solver = RattlerSolver(['conda-forge', 'conda-forge'], "linux-64") + >>> solver.solve(["xtensor 0.18"]) + """ + + def __init__(self, channels, platform_arch) -> None: + self.channels = channels + _channels = [] + for c in channels: + if c == "defaults": + _channels.append("https://repo.anaconda.com/pkgs/main") + _channels.append("https://repo.anaconda.com/pkgs/r") + if platform_arch.startswith("win"): + _channels.append("https://repo.anaconda.com/pkgs/msys2") + else: + _channels.append(c) + self._channels = [Channel(c) for c in _channels] + self.platform_arch = platform_arch + self._platforms = [Platform(self.platform_arch), Platform("noarch")] + + def solve( + self, + specs: List[str], + get_run_exports: bool = False, + ignore_run_exports_from: List[str] = None, + ignore_run_exports: List[str] = None, + constraints=None, + timeout: int | None = None, + ): + """Solve given a set of specs. + + Parameters + ---------- + specs : list of str + A list of package specs. You can use `conda.models.match_spec.MatchSpec` + to get them to the right form by calling + `MatchSpec(mypec).conda_build_form()` + get_run_exports : bool, optional + If True, return run exports else do not. + ignore_run_exports_from : list, optional + A list of packages from which to ignore the run exports. + ignore_run_exports : list, optional + A list of things that should be ignore in the run exports. + constraints : list, optional + A list of package specs to apply as constraints to the solve. + These packages are not included in the solution. + timeout : int, optional + The time in seconds to wait for the solver to finish before giving up. + + Returns + ------- + solvable : bool + True if the set of specs has a solution, False otherwise. + err : str + The errors as a string. If no errors, is None. + solution : list of str + A list of concrete package specs for the env. + run_exports : dict of list of str + A dictionary with the weak and strong run exports for the packages. + Only returned if get_run_exports is True. + """ + ignore_run_exports_from = ignore_run_exports_from or [] + ignore_run_exports = ignore_run_exports or [] + success = False + err = None + run_exports = copy.deepcopy(DEFAULT_RUN_EXPORTS) + + try: + _specs = [MatchSpec(s) for s in specs] + _constraints = [MatchSpec(c) for c in constraints] if constraints else None + + print_debug( + "RATTLER running solver for specs \n\n%s\n", pprint.pformat(_specs) + ) + + if timeout is not None: + timeout = datetime.timedelta(seconds=timeout) + + solution = asyncio.run( + solve( + channels=self._channels, + specs=_specs, + platforms=self._platforms, + timeout=timeout, + constraints=_constraints, + ) + ) + success = True + str_solution = [ + f"{record.name.normalized} {record.version} {record.build}" + for record in solution + ] + + if get_run_exports: + run_exports = self._get_run_exports( + solution, + _specs, + [MatchSpec(igrf) for igrf in ignore_run_exports_from], + [MatchSpec(igr) for igr in ignore_run_exports], + ) + + except Exception as e: + err = str(e) + print_warning( + "RATTLER failed to solve specs \n\n%s\n\nwith " + "constraints \n\n%s\n\nfor channels " + "\n\n%s\n\non platform " + "\n\n%s\n\nThe reported errors are:\n\n%s\n", + textwrap.indent(pprint.pformat(specs), " "), + textwrap.indent(pprint.pformat(constraints), " "), + textwrap.indent(pprint.pformat(self.channels), " "), + textwrap.indent(pprint.pformat(self.platform_arch), " "), + textwrap.indent(err, " "), + ) + success = False + run_exports = copy.deepcopy(DEFAULT_RUN_EXPORTS) + str_solution = None + + if get_run_exports: + return success, err, str_solution, run_exports + else: + return success, err, str_solution + + def _get_run_exports( + self, + repodata_records: List[RepoDataRecord], + _specs: List[MatchSpec], + ignore_run_exports_from: List[MatchSpec], + ignore_run_exports: List[MatchSpec], + ): + """Given a set of repodata records, produce a + dict with the weak and strong run exports for the packages. + + We only look up export data for things explicitly listed in the original + specs. + """ + names = {s.name for s in _specs} + ign_rex_from = {s.name for s in ignore_run_exports_from} + ign_rex = {s.name for s in ignore_run_exports} + run_exports = copy.deepcopy(DEFAULT_RUN_EXPORTS) + for record in repodata_records: + lt_name = record.name + if lt_name in names and lt_name not in ign_rex_from: + channel_url = record.channel + subdir = record.subdir + file_name = record.file_name + rx = get_run_exports(os.path.join(channel_url, subdir), file_name) + for key in rx: + rx[key] = {v for v in rx[key] if v not in ign_rex} + for key in DEFAULT_RUN_EXPORTS: + run_exports[key] |= rx[key] + + return run_exports + + +@lru_cache(maxsize=128) +def rattler_solver_factory(channels, platform): + return RattlerSolver(list(channels), platform) diff --git a/conda_forge_feedstock_check_solvable/utils.py b/conda_forge_feedstock_check_solvable/utils.py index 3ee9962..f1803a0 100644 --- a/conda_forge_feedstock_check_solvable/utils.py +++ b/conda_forge_feedstock_check_solvable/utils.py @@ -5,6 +5,7 @@ import os import subprocess import tempfile +import time import traceback from collections.abc import Mapping @@ -148,7 +149,7 @@ def override_env_var(name, value): @contextlib.contextmanager def suppress_output(): - if "CONDA_FORGE_FEEDSTOCK_CHECK_SOLVABLE_DEBUG" in os.environ: + if "CONDA_FORGE_FEEDSTOCK_CHECK_SOLVABLE_DEBUG" in os.environ or VERBOSITY > 2: suppress = False else: suppress = True @@ -177,6 +178,29 @@ def suppress_output(): pass +class TimeoutTimerException(Exception): + pass + + +class TimeoutTimer: + def __init__(self, timeout, name=None): + self.timeout = timeout + self.name = name + self._start = time.monotonic() + + @property + def elapsed(self): + return time.monotonic() - self._start + + @property + def remaining(self): + return self.timeout - self.elapsed + + def raise_for_timeout(self): + if self.elapsed > self.timeout: + raise TimeoutTimerException("timeout out for %s" % self.name) + + def _munge_req_star(req): reqs = [] diff --git a/conda_forge_feedstock_check_solvable/virtual_packages.py b/conda_forge_feedstock_check_solvable/virtual_packages.py index 9dce91b..60887a8 100644 --- a/conda_forge_feedstock_check_solvable/virtual_packages.py +++ b/conda_forge_feedstock_check_solvable/virtual_packages.py @@ -64,7 +64,7 @@ def _write_subdir(self, subdir): out = { "info": {"subdir": subdir}, "packages": packages, - "paxkages.conda": {}, + "packages.conda": {}, "removed": [], "repodata_version": 1, } @@ -81,7 +81,9 @@ def _write_subdir(self, subdir): packages[fname] = info_dict (self.base_path / subdir).mkdir(exist_ok=True) - (self.base_path / subdir / "repodata.json").write_text(json.dumps(out)) + (self.base_path / subdir / "repodata.json").write_text( + json.dumps(out, sort_keys=True) + ) def write(self): all_subdirs = ALL_PLATFORMS.copy() diff --git a/requirements.txt b/requirements.txt index da460e4..963a84b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,6 @@ python-rapidjson requests ruamel.yaml -cachetools conda conda-package-handling conda-smithy @@ -12,4 +11,5 @@ conda-forge-metadata>=0.2.0 wurlitzer requests zstandard -boltons >=23.0.0 +boltons>=23.0.0 +py-rattler>=0.6.2,<0.7a0 diff --git a/tests/conftest.py b/tests/conftest.py index f0330ea..d53613e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,9 +5,10 @@ import pytest from conda_forge_feedstock_check_solvable.mamba_solver import mamba_solver_factory +from conda_forge_feedstock_check_solvable.rattler_solver import rattler_solver_factory FEEDSTOCK_DIR = os.path.join(os.path.dirname(__file__), "test_feedstock") -ALL_SOLVERS = ["mamba"] +ALL_SOLVERS = ["rattler", "mamba"] def pytest_addoption(parser): @@ -40,6 +41,8 @@ def pytest_generate_tests(metafunc): for solver in solvers: if solver == "mamba": factories.append(mamba_solver_factory) + elif solver == "rattler": + factories.append(rattler_solver_factory) else: raise ValueError(f"Unknown solver {solver}") metafunc.parametrize("solver_factory", factories) diff --git a/tests/test_check_solvable.py b/tests/test_check_solvable.py index b2c24d1..d7b9653 100644 --- a/tests/test_check_solvable.py +++ b/tests/test_check_solvable.py @@ -13,9 +13,12 @@ FakeRepoData, ) +FEEDSTOCK_DIR = os.path.join(os.path.dirname(__file__), "test_feedstock") +VERB = 1 + @flaky -def test_is_recipe_solvable_ok(feedstock_dir): +def test_is_recipe_solvable_ok(feedstock_dir, solver): recipe_file = os.path.join(feedstock_dir, "recipe", "meta.yaml") os.makedirs(os.path.dirname(recipe_file), exist_ok=True) with open(recipe_file, "w") as fp: @@ -58,11 +61,17 @@ def test_is_recipe_solvable_ok(feedstock_dir): - conda-forge/bot """, ) - assert is_recipe_solvable(feedstock_dir)[0] + assert is_recipe_solvable( + feedstock_dir, + solver=solver, + verbosity=VERB, + timeout=None, + fail_fast=True, + )[0] @flaky -def test_unsolvable_for_particular_python(feedstock_dir): +def test_unsolvable_for_particular_python(feedstock_dir, solver): recipe_file = os.path.join(feedstock_dir, "recipe", "meta.yaml") os.makedirs(os.path.dirname(recipe_file), exist_ok=True) with open(recipe_file, "w") as fp: @@ -106,7 +115,12 @@ def test_unsolvable_for_particular_python(feedstock_dir): - conda-forge/bot """, ) - solvable, errors, solvable_by_variant = is_recipe_solvable(feedstock_dir) + solvable, errors, solvable_by_variant = is_recipe_solvable( + feedstock_dir, + solver=solver, + verbosity=VERB, + timeout=None, + ) print(solvable_by_variant) assert not solvable # we don't have galsim for this variant so this is an expected failure @@ -117,29 +131,47 @@ def test_unsolvable_for_particular_python(feedstock_dir): @flaky -def test_r_base_cross_solvable(): +def test_r_base_cross_solvable(solver): feedstock_dir = os.path.join(os.path.dirname(__file__), "r-base-feedstock") - solvable, errors, _ = is_recipe_solvable(feedstock_dir) + solvable, errors, _ = is_recipe_solvable( + feedstock_dir, solver=solver, verbosity=VERB + ) assert solvable, pprint.pformat(errors) solvable, errors, _ = is_recipe_solvable( feedstock_dir, build_platform={"osx_arm64": "osx_64"}, + solver=solver, + verbosity=VERB, + timeout=None, + fail_fast=True, ) assert solvable, pprint.pformat(errors) @flaky -def test_xgboost_solvable(): +def test_xgboost_solvable(solver): feedstock_dir = os.path.join(os.path.dirname(__file__), "xgboost-feedstock") - solvable, errors, _ = is_recipe_solvable(feedstock_dir) + solvable, errors, _ = is_recipe_solvable( + feedstock_dir, + solver=solver, + verbosity=VERB, + timeout=None, + fail_fast=True, + ) assert solvable, pprint.pformat(errors) @flaky -def test_pandas_solvable(): +def test_pandas_solvable(solver): feedstock_dir = os.path.join(os.path.dirname(__file__), "pandas-feedstock") - solvable, errors, _ = is_recipe_solvable(feedstock_dir) + solvable, errors, _ = is_recipe_solvable( + feedstock_dir, + solver=solver, + verbosity=VERB, + timeout=None, + fail_fast=True, + ) assert solvable, pprint.pformat(errors) @@ -152,58 +184,82 @@ def clone_and_checkout_repo(base_path: pathlib.Path, origin_url: str, ref: str): @flaky -def test_arrow_solvable(tmp_path): +def test_arrow_solvable(tmp_path, solver): feedstock_dir = clone_and_checkout_repo( tmp_path, "https://github.com/conda-forge/arrow-cpp-feedstock", ref="main", ) - solvable, errors, solvable_by_variant = is_recipe_solvable(feedstock_dir) + solvable, errors, solvable_by_variant = is_recipe_solvable( + feedstock_dir, + solver=solver, + verbosity=VERB, + timeout=None, + fail_fast=True, + ) pprint.pprint(solvable_by_variant) assert solvable, pprint.pformat(errors) @flaky -def test_guiqwt_solvable(tmp_path): +def test_guiqwt_solvable(tmp_path, solver): """test for run exports as a single string in pyqt""" feedstock_dir = clone_and_checkout_repo( tmp_path, "https://github.com/conda-forge/guiqwt-feedstock", ref="main", ) - solvable, errors, solvable_by_variant = is_recipe_solvable(feedstock_dir) + solvable, errors, solvable_by_variant = is_recipe_solvable( + feedstock_dir, + solver=solver, + verbosity=VERB, + timeout=None, + fail_fast=True, + ) pprint.pprint(solvable_by_variant) assert solvable, pprint.pformat(errors) @flaky -def test_datalad_solvable(tmp_path): +def test_datalad_solvable(tmp_path, solver): """has an odd thing where it hangs""" feedstock_dir = clone_and_checkout_repo( tmp_path, "https://github.com/conda-forge/datalad-feedstock", ref="main", ) - solvable, errors, solvable_by_variant = is_recipe_solvable(feedstock_dir) + solvable, errors, solvable_by_variant = is_recipe_solvable( + feedstock_dir, + solver=solver, + verbosity=VERB, + timeout=None, + fail_fast=True, + ) pprint.pprint(solvable_by_variant) assert solvable, pprint.pformat(errors) @flaky -def test_grpcio_solvable(tmp_path): +def test_grpcio_solvable(tmp_path, solver): """grpcio has a runtime dep on openssl which has strange pinning things in it""" feedstock_dir = clone_and_checkout_repo( tmp_path, "https://github.com/conda-forge/grpcio-feedstock", ref="main", ) - solvable, errors, solvable_by_variant = is_recipe_solvable(feedstock_dir) + solvable, errors, solvable_by_variant = is_recipe_solvable( + feedstock_dir, + solver=solver, + verbosity=VERB, + timeout=None, + fail_fast=True, + ) pprint.pprint(solvable_by_variant) assert solvable, pprint.pformat(errors) @flaky -def test_cupy_solvable(tmp_path): +def test_cupy_solvable(tmp_path, solver): """grpcio has a runtime dep on openssl which has strange pinning things in it""" feedstock_dir = clone_and_checkout_repo( tmp_path, @@ -215,13 +271,19 @@ def test_cupy_solvable(tmp_path): shell=True, check=True, ) - solvable, errors, solvable_by_variant = is_recipe_solvable(feedstock_dir) + solvable, errors, solvable_by_variant = is_recipe_solvable( + feedstock_dir, + solver=solver, + verbosity=VERB, + timeout=None, + fail_fast=True, + ) pprint.pprint(solvable_by_variant) assert solvable, pprint.pformat(errors) @flaky -def test_run_exports_constrains_conflict(feedstock_dir, tmp_path_factory): +def test_run_exports_constrains_conflict(feedstock_dir, tmp_path_factory, solver): recipe_file = os.path.join(feedstock_dir, "recipe", "meta.yaml") os.makedirs(os.path.dirname(recipe_file), exist_ok=True) @@ -268,13 +330,16 @@ def test_run_exports_constrains_conflict(feedstock_dir, tmp_path_factory): solvable, errors, solve_by_variant = is_recipe_solvable( feedstock_dir, additional_channels=[repodata.channel_url], + solver=solver, + verbosity=VERB, timeout=None, + fail_fast=True, ) assert solvable, pprint.pformat(errors) @flaky -def test_run_exports_constrains_notok(feedstock_dir, tmp_path_factory): +def test_run_exports_constrains_notok(feedstock_dir, tmp_path_factory, solver): recipe_file = os.path.join(feedstock_dir, "recipe", "meta.yaml") os.makedirs(os.path.dirname(recipe_file), exist_ok=True) @@ -309,12 +374,17 @@ def test_run_exports_constrains_notok(feedstock_dir, tmp_path_factory): for cbc in pathlib.Path(feedstock_dir).glob(".ci_support/*.yaml"): if cbc.name != "linux_python3.8.____cpython.yaml": cbc.unlink() - solvable, errors, solvable_by_variant = is_recipe_solvable(feedstock_dir) + solvable, errors, solvable_by_variant = is_recipe_solvable( + feedstock_dir, + solver=solver, + verbosity=VERB, + timeout=None, + ) assert not solvable, pprint.pformat(errors) @flaky -def test_is_recipe_solvable_notok(feedstock_dir): +def test_is_recipe_solvable_notok(feedstock_dir, solver): recipe_file = os.path.join(feedstock_dir, "recipe", "meta.yaml") os.makedirs(os.path.dirname(recipe_file), exist_ok=True) with open(recipe_file, "w") as fp: @@ -358,11 +428,12 @@ def test_is_recipe_solvable_notok(feedstock_dir): - conda-forge/bot """, ) - assert not is_recipe_solvable(feedstock_dir)[0] + assert not is_recipe_solvable( + feedstock_dir, solver=solver, verbosity=VERB, timeout=None + )[0] -@flaky -def test_arrow_solvable_timeout(tmp_path): +def test_arrow_solvable_timeout(tmp_path, solver): feedstock_dir = clone_and_checkout_repo( tmp_path, "https://github.com/conda-forge/arrow-cpp-feedstock", @@ -373,7 +444,10 @@ def test_arrow_solvable_timeout(tmp_path): for _ in range(6): solvable, errors, solvable_by_variant = is_recipe_solvable( feedstock_dir, - timeout=10, + timeout=0.1, + solver=solver, + verbosity=VERB, + fail_fast=True, ) assert solvable assert errors == [] @@ -381,7 +455,7 @@ def test_arrow_solvable_timeout(tmp_path): @pytest.mark.xfail -def test_pillow_solvable(tmp_path): +def test_pillow_solvable(tmp_path, solver): """pillow acted up for python310""" feedstock_dir = clone_and_checkout_repo( tmp_path, @@ -444,7 +518,13 @@ def test_pillow_solvable(tmp_path): check=True, ) - solvable, errors, solvable_by_variant = is_recipe_solvable(feedstock_dir) + solvable, errors, solvable_by_variant = is_recipe_solvable( + feedstock_dir, + solver=solver, + verbosity=VERB, + timeout=None, + fail_fast=True, + ) pprint.pprint(solvable_by_variant) assert solvable, pprint.pformat(errors) assert any("python3.10" in k for k in solvable_by_variant) diff --git a/tests/test_solvers.py b/tests/test_solvers.py index 5609e77..9801416 100644 --- a/tests/test_solvers.py +++ b/tests/test_solvers.py @@ -1,7 +1,17 @@ +import inspect import pprint +import pytest from flaky import flaky +from conda_forge_feedstock_check_solvable.mamba_solver import ( + MambaSolver, + mamba_solver_factory, +) +from conda_forge_feedstock_check_solvable.rattler_solver import ( + RattlerSolver, + rattler_solver_factory, +) from conda_forge_feedstock_check_solvable.utils import apply_pins, suppress_output from conda_forge_feedstock_check_solvable.virtual_packages import ( virtual_package_repodata, @@ -215,3 +225,167 @@ def test_solvers_hang(solver_factory): ], ) assert res[0] + + +@pytest.mark.parametrize("mamba_factory", [MambaSolver, mamba_solver_factory]) +@pytest.mark.parametrize("rattler_factory", [RattlerSolver, rattler_solver_factory]) +def test_solvers_compare_output(mamba_factory, rattler_factory): + if inspect.isfunction(mamba_factory) and inspect.isfunction(rattler_factory): + mamba_factory.cache_clear() + rattler_factory.cache_clear() + + specs_linux = ( + "libutf8proc >=2.8.0,<3.0a0", + "orc >=2.0.1,<2.0.2.0a0", + "glog >=0.7.0,<0.8.0a0", + "libabseil * cxx17*", + "libgcc-ng >=12", + "libbrotlidec >=1.1.0,<1.2.0a0", + "bzip2 >=1.0.8,<2.0a0", + "libbrotlienc >=1.1.0,<1.2.0a0", + "libgoogle-cloud-storage >=2.24.0,<2.25.0a0", + "libstdcxx-ng >=12", + "re2", + "gflags >=2.2.2,<2.3.0a0", + "libabseil >=20240116.2,<20240117.0a0", + "libre2-11 >=2023.9.1,<2024.0a0", + "libgoogle-cloud >=2.24.0,<2.25.0a0", + "lz4-c >=1.9.3,<1.10.0a0", + "libbrotlicommon >=1.1.0,<1.2.0a0", + "aws-sdk-cpp >=1.11.329,<1.11.330.0a0", + "snappy >=1.2.0,<1.3.0a0", + "zstd >=1.5.6,<1.6.0a0", + "aws-crt-cpp >=0.26.9,<0.26.10.0a0", + "libzlib >=1.2.13,<2.0a0", + ) + constraints_linux = ("apache-arrow-proc * cpu", "arrow-cpp <0.0a0") + + specs_linux_again = ( + "glog >=0.7.0,<0.8.0a0", + "bzip2 >=1.0.8,<2.0a0", + "lz4-c >=1.9.3,<1.10.0a0", + "libbrotlidec >=1.1.0,<1.2.0a0", + "zstd >=1.5.6,<1.6.0a0", + "gflags >=2.2.2,<2.3.0a0", + "libzlib >=1.2.13,<2.0a0", + "libbrotlienc >=1.1.0,<1.2.0a0", + "re2", + "aws-sdk-cpp >=1.11.329,<1.11.330.0a0", + "libgoogle-cloud-storage >=2.24.0,<2.25.0a0", + "libgoogle-cloud >=2.24.0,<2.25.0a0", + "libstdcxx-ng >=12", + "libutf8proc >=2.8.0,<3.0a0", + "libabseil * cxx17*", + "snappy >=1.2.0,<1.3.0a0", + "__glibc >=2.17,<3.0.a0", + "orc >=2.0.1,<2.0.2.0a0", + "libgcc-ng >=12", + "libabseil >=20240116.2,<20240117.0a0", + "libbrotlicommon >=1.1.0,<1.2.0a0", + "libre2-11 >=2023.9.1,<2024.0a0", + "aws-crt-cpp >=0.26.9,<0.26.10.0a0", + ) + constraints_linux_again = ("arrow-cpp <0.0a0", "apache-arrow-proc * cuda") + + specs_win = ( + "re2", + "libabseil * cxx17*", + "vc >=14.2,<15", + "libbrotlidec >=1.1.0,<1.2.0a0", + "lz4-c >=1.9.3,<1.10.0a0", + "aws-sdk-cpp >=1.11.329,<1.11.330.0a0", + "libbrotlicommon >=1.1.0,<1.2.0a0", + "snappy >=1.2.0,<1.3.0a0", + "ucrt >=10.0.20348.0", + "orc >=2.0.1,<2.0.2.0a0", + "zstd >=1.5.6,<1.6.0a0", + "libcrc32c >=1.1.2,<1.2.0a0", + "libre2-11 >=2023.9.1,<2024.0a0", + "libbrotlienc >=1.1.0,<1.2.0a0", + "libcurl >=8.8.0,<9.0a0", + "libabseil >=20240116.2,<20240117.0a0", + "bzip2 >=1.0.8,<2.0a0", + "libgoogle-cloud >=2.24.0,<2.25.0a0", + "vc14_runtime >=14.29.30139", + "libzlib >=1.2.13,<2.0a0", + "libgoogle-cloud-storage >=2.24.0,<2.25.0a0", + "libutf8proc >=2.8.0,<3.0a0", + "aws-crt-cpp >=0.26.9,<0.26.10.0a0", + ) + constraints_win = ("arrow-cpp <0.0a0", "apache-arrow-proc * cuda") + + channels = (virtual_package_repodata(), "conda-forge", "msys2") + + platform = "linux-64" + mamba_solver = mamba_factory(channels, platform) + rattler_solver = rattler_factory(channels, platform) + mamba_solvable, mamba_err, mamba_solution = mamba_solver.solve( + specs_linux, constraints=constraints_linux + ) + rattler_solvable, rattler_err, rattler_solution = rattler_solver.solve( + specs_linux, constraints=constraints_linux + ) + assert set(mamba_solution or []) == set(rattler_solution or []) + assert mamba_solvable == rattler_solvable + + platform = "linux-64" + mamba_solver = mamba_factory(channels, platform) + rattler_solver = rattler_factory(channels, platform) + mamba_solvable, mamba_err, mamba_solution = mamba_solver.solve( + specs_linux_again, constraints=constraints_linux_again + ) + rattler_solvable, rattler_err, rattler_solution = rattler_solver.solve( + specs_linux_again, constraints=constraints_linux_again + ) + assert set(mamba_solution or []) == set(rattler_solution or []) + assert mamba_solvable == rattler_solvable + + platform = "linux-64" + mamba_solver = mamba_factory(channels, platform) + rattler_solver = rattler_factory(channels, platform) + mamba_solvable, mamba_err, mamba_solution = mamba_solver.solve( + specs_linux, constraints=constraints_linux + ) + rattler_solvable, rattler_err, rattler_solution = rattler_solver.solve( + specs_linux, constraints=constraints_linux + ) + assert set(mamba_solution or []) == set(rattler_solution or []) + assert mamba_solvable == rattler_solvable + + platform = "win-64" + mamba_solver = mamba_factory(channels, platform) + rattler_solver = rattler_factory(channels, platform) + mamba_solvable, mamba_err, mamba_solution = mamba_solver.solve( + specs_win, constraints=constraints_win + ) + rattler_solvable, rattler_err, rattler_solution = rattler_solver.solve( + specs_win, constraints=constraints_win + ) + assert set(mamba_solution or []) == set(rattler_solution or []) + assert mamba_solvable == rattler_solvable + + if inspect.isfunction(mamba_factory) and inspect.isfunction(rattler_factory): + assert ( + mamba_factory.cache_info().misses > rattler_factory.cache_info().misses + ), { + "mamba cache info": mamba_factory.cache_info(), + "rattler cache info": rattler_factory.cache_info(), + } + + +@pytest.mark.parametrize("mamba_factory", [MambaSolver, mamba_solver_factory]) +@pytest.mark.parametrize("rattler_factory", [RattlerSolver, rattler_solver_factory]) +def test_solvers_python(mamba_factory, rattler_factory): + channels = (virtual_package_repodata(), "conda-forge", "defaults", "msys2") + platform = "linux-64" + for _ in range(4): + mamba_solver = mamba_factory(channels, platform) + rattler_solver = rattler_factory(channels, platform) + mamba_solvable, mamba_err, mamba_solution = mamba_solver.solve( + ["python"], + ) + rattler_solvable, rattler_err, rattler_solution = rattler_solver.solve( + ["python"], + ) + assert set(mamba_solution or []) == set(rattler_solution or []) + assert mamba_solvable == rattler_solvable diff --git a/tests/test_virtual_packages.py b/tests/test_virtual_packages.py index 099c753..1841ff4 100644 --- a/tests/test_virtual_packages.py +++ b/tests/test_virtual_packages.py @@ -11,11 +11,11 @@ @flaky -def test_virtual_package(feedstock_dir, tmp_path_factory): +def test_virtual_package(feedstock_dir, tmp_path, solver): recipe_file = os.path.join(feedstock_dir, "recipe", "meta.yaml") os.makedirs(os.path.dirname(recipe_file), exist_ok=True) - with FakeRepoData(tmp_path_factory.mktemp("channel")) as repodata: + with FakeRepoData(tmp_path) as repodata: for pkg in [ FakePackage("fakehostvirtualpkgdep", depends=frozenset(["__virtual >=10"])), FakePackage("__virtual", version="10"), @@ -50,5 +50,6 @@ def test_virtual_package(feedstock_dir, tmp_path_factory): solvable, err, solve_by_variant = is_recipe_solvable( feedstock_dir, additional_channels=[repodata.channel_url], + solver=solver, ) assert solvable