Skip to content

Commit

Permalink
WIP first pass at more refactors for rattler
Browse files Browse the repository at this point in the history
  • Loading branch information
beckermr committed Jun 1, 2024
1 parent 93912a6 commit 2ca9916
Show file tree
Hide file tree
Showing 5 changed files with 177 additions and 170 deletions.
12 changes: 6 additions & 6 deletions conda_forge_feedstock_check_solvable/check_solvable.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,7 @@
import psutil
from ruamel.yaml import YAML

from conda_forge_feedstock_check_solvable.mamba_solver import (
_mamba_factory,
virtual_package_repodata,
)
from conda_forge_feedstock_check_solvable.mamba_solver import mamba_solver_factory
from conda_forge_feedstock_check_solvable.utils import (
MAX_GLIBC_MINOR,
apply_pins,
Expand All @@ -20,6 +17,9 @@
remove_reqs_by_name,
suppress_output,
)
from conda_forge_feedstock_check_solvable.virtual_packages import (
virtual_package_repodata,
)


def _func(feedstock_dir, additional_channels, build_platform, verbosity, conn):
Expand Down Expand Up @@ -283,8 +283,8 @@ def _is_recipe_solvable_on_platform(
# we check run and host and ignore the rest
print_debug("getting mamba solver")
with suppress_output():
solver = _mamba_factory(tuple(channel_sources), f"{platform}-{arch}")
build_solver = _mamba_factory(
solver = mamba_solver_factory(tuple(channel_sources), f"{platform}-{arch}")
build_solver = mamba_solver_factory(
tuple(channel_sources),
f"{build_platform}-{build_arch}",
)
Expand Down
161 changes: 2 additions & 159 deletions conda_forge_feedstock_check_solvable/mamba_solver.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,18 +10,9 @@
https://gist.github.com/wolfv/cd12bd4a448c77ff02368e97ffdf495a.
"""

import atexit
import copy
import functools
import os
import pathlib
import pprint
import subprocess
import tempfile
import time
from collections import defaultdict
from dataclasses import dataclass, field
from typing import Dict, FrozenSet, Iterable, List, Set, Tuple
from typing import List, Tuple

import cachetools.func
import libmambapy as api
Expand All @@ -31,12 +22,7 @@

from conda_forge_feedstock_check_solvable.mamba_utils import load_channels
from conda_forge_feedstock_check_solvable.utils import (
ALL_PLATFORMS,
DEFAULT_RUN_EXPORTS,
MAX_GLIBC_MINOR,
MINIMUM_CUDA_VERS,
MINIMUM_OSX_64_VERS,
MINIMUM_OSX_ARM64_VERS,
convert_spec_to_conda_build,
get_run_exports,
print_debug,
Expand All @@ -54,81 +40,6 @@
api.Context().channel_priority = api.ChannelPriority.kStrict


@dataclass(frozen=True)
class FakePackage:
name: str
version: str = "1.0"
build_string: str = ""
build_number: int = 0
noarch: str = ""
depends: FrozenSet[str] = field(default_factory=frozenset)
timestamp: int = field(
default_factory=lambda: int(time.mktime(time.gmtime()) * 1000),
)

def to_repodata_entry(self):
out = self.__dict__.copy()
if self.build_string:
build = f"{self.build_string}_{self.build_number}"
else:
build = f"{self.build_number}"
out["depends"] = list(out["depends"])
out["build"] = build
fname = f"{self.name}-{self.version}-{build}.tar.bz2"
return fname, out


class FakeRepoData:
def __init__(self, base_dir: pathlib.Path):
self.base_path = base_dir
self.packages_by_subdir: Dict[FakePackage, Set[str]] = defaultdict(set)

@property
def channel_url(self):
return f"file://{str(self.base_path.absolute())}"

def add_package(self, package: FakePackage, subdirs: Iterable[str] = ()):
subdirs = frozenset(subdirs)
if not subdirs:
subdirs = frozenset(["noarch"])
self.packages_by_subdir[package].update(subdirs)

def _write_subdir(self, subdir):
packages = {}
out = {"info": {"subdir": subdir}, "packages": packages}
for pkg, subdirs in self.packages_by_subdir.items():
if subdir not in subdirs:
continue
fname, info_dict = pkg.to_repodata_entry()
info_dict["subdir"] = subdir
packages[fname] = info_dict

(self.base_path / subdir).mkdir(exist_ok=True)
(self.base_path / subdir / "repodata.json").write_text(json.dumps(out))

def write(self):
all_subdirs = ALL_PLATFORMS.copy()
all_subdirs.add("noarch")
for subdirs in self.packages_by_subdir.values():
all_subdirs.update(subdirs)

for subdir in all_subdirs:
self._write_subdir(subdir)

print_debug("Wrote fake repodata to %s", self.base_path)
import glob

for filename in glob.iglob(str(self.base_path / "**"), recursive=True):
print_debug(filename)
print_debug("repo: %s", self.channel_url)

def __enter__(self):
return self

def __exit__(self, exc_type, exc_val, exc_tb):
self.write()


class MambaSolver:
"""Run the mamba solver.
Expand Down Expand Up @@ -296,73 +207,5 @@ def _get_run_exports(


@cachetools.func.ttl_cache(maxsize=8, ttl=60)
def _mamba_factory(channels, platform):
def mamba_solver_factory(channels, platform):
return MambaSolver(list(channels), platform)


@functools.lru_cache(maxsize=1)
def virtual_package_repodata():
# TODO: we might not want to use TemporaryDirectory
import shutil

# tmp directory in github actions
runner_tmp = os.environ.get("RUNNER_TEMP")
tmp_dir = tempfile.mkdtemp(dir=runner_tmp)

if not runner_tmp:
# no need to bother cleaning up on CI
def clean():
shutil.rmtree(tmp_dir, ignore_errors=True)

atexit.register(clean)

tmp_path = pathlib.Path(tmp_dir)
repodata = FakeRepoData(tmp_path)

# glibc
for glibc_minor in range(12, MAX_GLIBC_MINOR + 1):
repodata.add_package(FakePackage("__glibc", "2.%d" % glibc_minor))

# cuda - get from cuda-version on conda-forge
try:
cuda_pkgs = json.loads(
subprocess.check_output(
"CONDA_SUBDIR=linux-64 conda search cuda-version -c conda-forge --json",
shell=True,
text=True,
stderr=subprocess.PIPE,
)
)
cuda_vers = [pkg["version"] for pkg in cuda_pkgs["cuda-version"]]
except Exception:
cuda_vers = []
# extra hard coded list to make sure we don't miss anything
cuda_vers += MINIMUM_CUDA_VERS
cuda_vers = set(cuda_vers)
for cuda_ver in cuda_vers:
repodata.add_package(FakePackage("__cuda", cuda_ver))

for osx_ver in MINIMUM_OSX_64_VERS:
repodata.add_package(FakePackage("__osx", osx_ver), subdirs=["osx-64"])
for osx_ver in MINIMUM_OSX_ARM64_VERS:
repodata.add_package(
FakePackage("__osx", osx_ver), subdirs=["osx-arm64", "osx-64"]
)

repodata.add_package(
FakePackage("__win", "0"),
subdirs=list(subdir for subdir in ALL_PLATFORMS if subdir.startswith("win")),
)
repodata.add_package(
FakePackage("__linux", "0"),
subdirs=list(subdir for subdir in ALL_PLATFORMS if subdir.startswith("linux")),
)
repodata.add_package(
FakePackage("__unix", "0"),
subdirs=list(
subdir for subdir in ALL_PLATFORMS if not subdir.startswith("win")
),
)
repodata.write()

return repodata.channel_url
Empty file.
164 changes: 164 additions & 0 deletions conda_forge_feedstock_check_solvable/virtual_packages.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,164 @@
import atexit
import functools
import os
import pathlib
import subprocess
import tempfile
import time
from collections import defaultdict
from dataclasses import dataclass, field
from typing import Dict, FrozenSet, Iterable, Set

import rapidjson as json

from conda_forge_feedstock_check_solvable.utils import (
ALL_PLATFORMS,
MAX_GLIBC_MINOR,
MINIMUM_CUDA_VERS,
MINIMUM_OSX_64_VERS,
MINIMUM_OSX_ARM64_VERS,
print_debug,
)


@dataclass(frozen=True)
class FakePackage:
name: str
version: str = "1.0"
build_string: str = ""
build_number: int = 0
noarch: str = ""
depends: FrozenSet[str] = field(default_factory=frozenset)
timestamp: int = field(
default_factory=lambda: int(time.mktime(time.gmtime()) * 1000),
)

def to_repodata_entry(self):
out = self.__dict__.copy()
if self.build_string:
build = f"{self.build_string}_{self.build_number}"
else:
build = f"{self.build_number}"
out["depends"] = list(out["depends"])
out["build"] = build
fname = f"{self.name}-{self.version}-{build}.tar.bz2"
return fname, out


class FakeRepoData:
def __init__(self, base_dir: pathlib.Path):
self.base_path = base_dir
self.packages_by_subdir: Dict[FakePackage, Set[str]] = defaultdict(set)

@property
def channel_url(self):
return f"file://{str(self.base_path.absolute())}"

def add_package(self, package: FakePackage, subdirs: Iterable[str] = ()):
subdirs = frozenset(subdirs)
if not subdirs:
subdirs = frozenset(["noarch"])
self.packages_by_subdir[package].update(subdirs)

def _write_subdir(self, subdir):
packages = {}
out = {"info": {"subdir": subdir}, "packages": packages}
for pkg, subdirs in self.packages_by_subdir.items():
if subdir not in subdirs:
continue
fname, info_dict = pkg.to_repodata_entry()
info_dict["subdir"] = subdir
packages[fname] = info_dict

(self.base_path / subdir).mkdir(exist_ok=True)
(self.base_path / subdir / "repodata.json").write_text(json.dumps(out))

def write(self):
all_subdirs = ALL_PLATFORMS.copy()
all_subdirs.add("noarch")
for subdirs in self.packages_by_subdir.values():
all_subdirs.update(subdirs)

for subdir in all_subdirs:
self._write_subdir(subdir)

print_debug("Wrote fake repodata to %s", self.base_path)
import glob

for filename in glob.iglob(str(self.base_path / "**"), recursive=True):
print_debug(filename)
print_debug("repo: %s", self.channel_url)

def __enter__(self):
return self

def __exit__(self, exc_type, exc_val, exc_tb):
self.write()


@functools.lru_cache(maxsize=1)
def virtual_package_repodata():
# TODO: we might not want to use TemporaryDirectory
import shutil

# tmp directory in github actions
runner_tmp = os.environ.get("RUNNER_TEMP")
tmp_dir = tempfile.mkdtemp(dir=runner_tmp)

if not runner_tmp:
# no need to bother cleaning up on CI
def clean():
shutil.rmtree(tmp_dir, ignore_errors=True)

atexit.register(clean)

tmp_path = pathlib.Path(tmp_dir)
repodata = FakeRepoData(tmp_path)

# glibc
for glibc_minor in range(12, MAX_GLIBC_MINOR + 1):
repodata.add_package(FakePackage("__glibc", "2.%d" % glibc_minor))

# cuda - get from cuda-version on conda-forge
try:
cuda_pkgs = json.loads(
subprocess.check_output(
"CONDA_SUBDIR=linux-64 conda search cuda-version -c conda-forge --json",
shell=True,
text=True,
stderr=subprocess.PIPE,
)
)
cuda_vers = [pkg["version"] for pkg in cuda_pkgs["cuda-version"]]
except Exception:
cuda_vers = []
# extra hard coded list to make sure we don't miss anything
cuda_vers += MINIMUM_CUDA_VERS
cuda_vers = set(cuda_vers)
for cuda_ver in cuda_vers:
repodata.add_package(FakePackage("__cuda", cuda_ver))

for osx_ver in MINIMUM_OSX_64_VERS:
repodata.add_package(FakePackage("__osx", osx_ver), subdirs=["osx-64"])
for osx_ver in MINIMUM_OSX_ARM64_VERS:
repodata.add_package(
FakePackage("__osx", osx_ver), subdirs=["osx-arm64", "osx-64"]
)

repodata.add_package(
FakePackage("__win", "0"),
subdirs=list(subdir for subdir in ALL_PLATFORMS if subdir.startswith("win")),
)
repodata.add_package(
FakePackage("__linux", "0"),
subdirs=list(subdir for subdir in ALL_PLATFORMS if subdir.startswith("linux")),
)
repodata.add_package(
FakePackage("__unix", "0"),
subdirs=list(
subdir for subdir in ALL_PLATFORMS if not subdir.startswith("win")
),
)
repodata.write()

return repodata.channel_url
Loading

0 comments on commit 2ca9916

Please sign in to comment.