From 4f52badeffff177026e8fa68a538af5908d00b4a Mon Sep 17 00:00:00 2001 From: Enrique Gonzalez Paredes Date: Wed, 29 May 2024 12:58:15 +0200 Subject: [PATCH 1/6] WIP: requirements manager --- .pre-commit-config.yaml | 6 +- noxfile.py | 21 +- pyproject.toml | 38 ++-- requirements/base.in | 3 + requirements/base.txt | 91 +++++++++ requirements/cuda12.in | 4 + requirements/cuda12.txt | 74 +++++++ requirements/dev-cuda12.in | 2 + requirements/dev-cuda12.txt | 12 ++ requirements-dev.txt => requirements/dev.in | 11 +- requirements/dev.txt | 95 +++++++++ requirements/sync_tool.py | 209 ++++++++++++++++++++ 12 files changed, 534 insertions(+), 32 deletions(-) create mode 100644 requirements/base.in create mode 100644 requirements/base.txt create mode 100644 requirements/cuda12.in create mode 100644 requirements/cuda12.txt create mode 100644 requirements/dev-cuda12.in create mode 100644 requirements/dev-cuda12.txt rename requirements-dev.txt => requirements/dev.in (60%) create mode 100644 requirements/dev.txt create mode 100644 requirements/sync_tool.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 13841be..59402ca 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -48,11 +48,10 @@ repos: - id: mixed-line-ending - id: name-tests-test args: ["--pytest-test-first"] - - id: requirements-txt-fixer - id: trailing-whitespace - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.4.6 + rev: v0.4.8 hooks: - id: ruff args: ["--fix", "--show-fixes", "--preview"] @@ -68,7 +67,8 @@ repos: - dace==0.15.1 - jax[cpu]==0.4.28 - numpy==1.26.4 - - pytest==8.2.1 + - pytest==8.2.2 + - typing-extensions==4.12.2 - repo: https://github.com/codespell-project/codespell rev: "v2.2.6" hooks: diff --git a/noxfile.py b/noxfile.py index 3772f2d..19c45f4 100644 --- a/noxfile.py +++ b/noxfile.py @@ -10,7 +10,7 @@ DIR = Path(__file__).parent.resolve() nox.needs_version = ">=2024.3.2" -nox.options.sessions = ["lint", "pylint", "tests"] +nox.options.sessions = ["lint", "tests"] nox.options.default_venv_backend = "uv|virtualenv" @@ -101,3 +101,22 @@ def build(session: nox.Session) -> None: session.install("build") session.run("python", "-m", "build") + + +@nox.session +def requirements(session: nox.Session) -> None: + """ + Freeze dependencies from input specs and synchronize across tools. + """ + requirements_path = DIR / "requirements" + req_sync_tool = requirements_path / "sync_tool.py" + + dependencies = ["pre-commit"] + nox.project.load_toml(req_sync_tool)["dependencies"] + session.install(*dependencies) + session.install("pip-compile-multi") + + session.run("python", req_sync_tool, "pull") + session.run("pip-compile-multi", "-g", "--skip-constraints") + session.run("python", req_sync_tool, "push") + + session.run("pre-commit", "run", "--files", ".pre-commit-config.yaml", success_codes=[0, 1]) diff --git a/pyproject.toml b/pyproject.toml index 3556e8a..175a679 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,9 +3,7 @@ build-backend = "setuptools.build_meta" requires = ["setuptools>=61"] [project] -authors = [ - {name = "ETH Zurich", email = "gridtools@cscs.ch"} -] +authors = [{name = "ETH Zurich", email = "gridtools@cscs.ch"}] classifiers = [ "Development Status :: 1 - Planning", "Intended Audience :: Science/Research", @@ -21,11 +19,7 @@ classifiers = [ "Topic :: Scientific/Engineering", "Typing :: Typed" ] -dependencies = [ - "dace>=0.15", - "jax[cpu]>=0.4.24", - "numpy>=1.26.0" -] +dependencies = ["dace>=0.15", "jax[cpu]>=0.4.24", "numpy>=1.26.0"] description = "JAX jit using DaCe (Data Centric Parallel Programming)" name = "JaCe" readme = "README.md" @@ -34,11 +28,7 @@ version = "0.1.0" license.file = "LICENSE" [project.optional-dependencies] -cuda12 = [ - "cupy-cuda12x>=12.1.0", - "jax[cuda12]>=0.4.24", - "optuna>=3.4.0" -] +cuda12 = ["cupy-cuda12x>=12.1.0", "jax[cuda12]>=0.4.24", "optuna>=3.4.0"] [project.urls] "Bug Tracker" = "https://github.com/GridTools/JaCe/issues" @@ -47,10 +37,7 @@ Discussions = "https://github.com/GridTools/JaCe/discussions" Homepage = "https://github.com/GridTools/JaCe" [tool.coverage] -report.exclude_also = [ - '\.\.\.', - 'if typing.TYPE_CHECKING:' -] +report.exclude_also = ['\.\.\.', 'if typing.TYPE_CHECKING:'] run.source = ["jace"] # -- mypy -- @@ -83,14 +70,10 @@ module = ["tests.*", "dace.*", "jax.*", "jaxlib.*"] [tool.pytest.ini_options] addopts = ["-ra", "--showlocals", "--strict-markers", "--strict-config"] -filterwarnings = [ - "error" -] +filterwarnings = ["error"] log_cli_level = "INFO" minversion = "6.0" -testpaths = [ - "tests" -] +testpaths = ["tests"] xfail_strict = true # -- ruff -- @@ -109,7 +92,9 @@ extend-select = [ "B", # flake8-bugbear "I", # isort "G", # flake8-logging-format + "W", # pycodestyle-warning "C4", # flake8-comprehensions + "C90", # mccabe "PT", # flake8-pytest-style "UP", # pyupgrade # TODO: in evaluation "ARG", # flake8-unused-arguments @@ -129,6 +114,7 @@ extend-select = [ ignore = [ 'B905', # [zip-without-explicit-strict] 'E501', # [line-too-long] + 'TCH003', # [typing-only-standard-library-import] 'UP038' # [non-pep604-isinstance] ] # ignore-init-module-imports = true # deprecated in preview mode @@ -160,7 +146,13 @@ section-order = [ [tool.ruff.lint.isort.sections] tests = ["tests", "unit_tests", "integration_tests"] +[tool.ruff.lint.mccabe] +max-complexity = 12 + [tool.ruff.lint.per-file-ignores] "!tests/**.py" = ["PT"] # Ignore `flake8-pytest-style` everywhere except in `tests/` "noxfile.py" = ["T20"] # Ignore `flake8-print` "tests/**" = ["T10", "T20"] # Ignore `flake8-debugger` and `flake8-print` + +[tool.ruff.lint.pycodestyle] +max-doc-length = 85 diff --git a/requirements/base.in b/requirements/base.in new file mode 100644 index 0000000..b25ef34 --- /dev/null +++ b/requirements/base.in @@ -0,0 +1,3 @@ +dace>=0.15 +jax[cpu]>=0.4.24 +numpy>=1.26.0 \ No newline at end of file diff --git a/requirements/base.txt b/requirements/base.txt new file mode 100644 index 0000000..d388c2f --- /dev/null +++ b/requirements/base.txt @@ -0,0 +1,91 @@ +# SHA1:190b0703818fae41383e79f02d34ca019cedca4d +# +# This file is autogenerated by pip-compile-multi +# To update, run: +# +# pip-compile-multi +# +aenum==3.1.15 + # via dace +astunparse==1.6.3 + # via dace +blinker==1.8.2 + # via flask +certifi==2024.6.2 + # via requests +charset-normalizer==3.3.2 + # via requests +click==8.1.7 + # via flask +dace==0.15.1 + # via -r requirements/base.in +dill==0.3.8 + # via dace +flask==3.0.3 + # via dace +fparser==0.1.4 + # via dace +idna==3.7 + # via requests +itsdangerous==2.2.0 + # via flask +jax[cpu]==0.4.28 + # via -r requirements/base.in +jaxlib==0.4.28 + # via jax +jinja2==3.1.4 + # via flask +markupsafe==2.1.5 + # via + # jinja2 + # werkzeug +ml-dtypes==0.4.0 + # via + # jax + # jaxlib +mpmath==1.3.0 + # via sympy +networkx==3.3 + # via dace +numpy==1.26.4 + # via + # -r requirements/base.in + # dace + # jax + # jaxlib + # ml-dtypes + # opt-einsum + # scipy +opt-einsum==3.3.0 + # via jax +packaging==24.1 + # via setuptools-scm +ply==3.11 + # via dace +pyyaml==6.0.1 + # via dace +requests==2.32.3 + # via dace +scipy==1.13.1 + # via + # jax + # jaxlib +setuptools-scm==8.1.0 + # via fparser +six==1.16.0 + # via astunparse +sympy==1.9 + # via dace +tomli==2.0.1 + # via setuptools-scm +urllib3==2.2.1 + # via requests +websockets==12.0 + # via dace +werkzeug==3.0.3 + # via flask +wheel==0.43.0 + # via astunparse + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/cuda12.in b/requirements/cuda12.in new file mode 100644 index 0000000..5c9b956 --- /dev/null +++ b/requirements/cuda12.in @@ -0,0 +1,4 @@ +-r base.in +cupy-cuda12x>=12.1.0 +jax[cuda12]>=0.4.24 +optuna>=3.4.0 \ No newline at end of file diff --git a/requirements/cuda12.txt b/requirements/cuda12.txt new file mode 100644 index 0000000..098643e --- /dev/null +++ b/requirements/cuda12.txt @@ -0,0 +1,74 @@ +# SHA1:035352ab483a9ee349c593a1ff7f359a88012cc9 +# +# This file is autogenerated by pip-compile-multi +# To update, run: +# +# pip-compile-multi +# +-r base.txt +alembic==1.13.1 + # via optuna +colorlog==6.8.2 + # via optuna +cupy-cuda12x==13.1.0 + # via -r requirements/cuda12.in +fastrlock==0.8.2 + # via cupy-cuda12x +greenlet==3.0.3 + # via sqlalchemy +jax[cpu,cuda12]==0.4.28 + # via + # -r requirements/base.in + # -r requirements/cuda12.in +jax-cuda12-pjrt==0.4.28 + # via jax-cuda12-plugin +jax-cuda12-plugin==0.4.28 + # via jax +mako==1.3.5 + # via alembic +nvidia-cublas-cu12==12.5.2.13 + # via + # jax + # nvidia-cudnn-cu12 + # nvidia-cusolver-cu12 +nvidia-cuda-cupti-cu12==12.5.39 + # via jax +nvidia-cuda-nvcc-cu12==12.5.40 + # via jax +nvidia-cuda-nvrtc-cu12==12.5.40 + # via nvidia-cudnn-cu12 +nvidia-cuda-runtime-cu12==12.5.39 + # via jax +nvidia-cudnn-cu12==8.9.7.29 + # via jax +nvidia-cufft-cu12==11.2.3.18 + # via jax +nvidia-cusolver-cu12==11.6.2.40 + # via jax +nvidia-cusparse-cu12==12.4.1.24 + # via + # jax + # nvidia-cusolver-cu12 +nvidia-nccl-cu12==2.21.5 + # via jax +nvidia-nvjitlink-cu12==12.5.40 + # via + # jax + # nvidia-cufft-cu12 + # nvidia-cusolver-cu12 + # nvidia-cusparse-cu12 +optuna==3.6.1 + # via -r requirements/cuda12.in +sqlalchemy==2.0.30 + # via + # alembic + # optuna +tqdm==4.66.4 + # via optuna +typing-extensions==4.12.2 + # via + # alembic + # sqlalchemy + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/dev-cuda12.in b/requirements/dev-cuda12.in new file mode 100644 index 0000000..aa00469 --- /dev/null +++ b/requirements/dev-cuda12.in @@ -0,0 +1,2 @@ +-r base.in +-r dev.in diff --git a/requirements/dev-cuda12.txt b/requirements/dev-cuda12.txt new file mode 100644 index 0000000..7c894e8 --- /dev/null +++ b/requirements/dev-cuda12.txt @@ -0,0 +1,12 @@ +# SHA1:d9f19ac423500f255d32c3e29dd96fd3b5c649a8 +# +# This file is autogenerated by pip-compile-multi +# To update, run: +# +# pip-compile-multi +# +-r base.txt +-r dev.txt + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements-dev.txt b/requirements/dev.in similarity index 60% rename from requirements-dev.txt rename to requirements/dev.in index a7a822e..b648f8a 100644 --- a/requirements-dev.txt +++ b/requirements/dev.in @@ -1,11 +1,12 @@ +-r base.in furo>=2023.08.17 -mypy >= 1.9.0 +mypy>=1.9.0 myst_parser>=0.13 -pytest >=6 -pytest-cov >=3 -ruff >= 0.3.5 +pytest>=6 +pytest-cov>=3 +ruff>=0.3.5 sphinx>=7.0 sphinx_autodoc_typehints sphinx_copybutton -types-all +tomlkit>=0.12.4 typing-extensions>=4.10.0 diff --git a/requirements/dev.txt b/requirements/dev.txt new file mode 100644 index 0000000..176b9a2 --- /dev/null +++ b/requirements/dev.txt @@ -0,0 +1,95 @@ +# SHA1:a7338646990b5874d5aa51bb3e2bd37753c754eb +# +# This file is autogenerated by pip-compile-multi +# To update, run: +# +# pip-compile-multi +# +-r base.txt +alabaster==0.7.16 + # via sphinx +babel==2.15.0 + # via sphinx +beautifulsoup4==4.12.3 + # via furo +coverage[toml]==7.5.3 + # via pytest-cov +docutils==0.21.2 + # via + # myst-parser + # sphinx +exceptiongroup==1.2.1 + # via pytest +furo==2024.5.6 + # via -r requirements/dev.in +imagesize==1.4.1 + # via sphinx +iniconfig==2.0.0 + # via pytest +markdown-it-py==3.0.0 + # via + # mdit-py-plugins + # myst-parser +mdit-py-plugins==0.4.1 + # via myst-parser +mdurl==0.1.2 + # via markdown-it-py +mypy==1.10.0 + # via -r requirements/dev.in +mypy-extensions==1.0.0 + # via mypy +myst-parser==3.0.1 + # via -r requirements/dev.in +pluggy==1.5.0 + # via pytest +pygments==2.18.0 + # via + # furo + # sphinx +pytest==8.2.2 + # via + # -r requirements/dev.in + # pytest-cov +pytest-cov==5.0.0 + # via -r requirements/dev.in +ruff==0.4.8 + # via -r requirements/dev.in +snowballstemmer==2.2.0 + # via sphinx +soupsieve==2.5 + # via beautifulsoup4 +sphinx==7.3.7 + # via + # -r requirements/dev.in + # furo + # myst-parser + # sphinx-autodoc-typehints + # sphinx-basic-ng + # sphinx-copybutton +sphinx-autodoc-typehints==2.1.1 + # via -r requirements/dev.in +sphinx-basic-ng==1.0.0b2 + # via furo +sphinx-copybutton==0.5.2 + # via -r requirements/dev.in +sphinxcontrib-applehelp==1.0.8 + # via sphinx +sphinxcontrib-devhelp==1.0.6 + # via sphinx +sphinxcontrib-htmlhelp==2.0.5 + # via sphinx +sphinxcontrib-jsmath==1.0.1 + # via sphinx +sphinxcontrib-qthelp==1.0.7 + # via sphinx +sphinxcontrib-serializinghtml==1.1.10 + # via sphinx +tomlkit==0.12.5 + # via -r requirements/dev.in +typing-extensions==4.12.2 + # via + # -r requirements/dev.in + # mypy + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/sync_tool.py b/requirements/sync_tool.py new file mode 100644 index 0000000..1eabda6 --- /dev/null +++ b/requirements/sync_tool.py @@ -0,0 +1,209 @@ +#! /usr/bin/env python3 + +# JaCe - JAX Just-In-Time compilation using DaCe (Data Centric Parallel Programming) +# +# Copyright (c) 2024, ETH Zurich +# All rights reserved. +# +# SPDX-License-Identifier: BSD-3-Clause + +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "packaging>=24.0", +# "tomlkit>=0.12.4", +# "typer-slim>=0.12.3", +# "yamlpath>=3.8.2" +# ] +# /// + +from __future__ import annotations + +import pathlib +import re +import types +from collections.abc import Iterable, Mapping +from typing import NamedTuple, TypeAlias + +import tomlkit +import typer +import yamlpath +from packaging import ( + markers as pkg_markers, + requirements as pkg_requirements, + specifiers as pkg_specifiers, +) + + +# -- Classes -- +class RequirementSpec(NamedTuple): + package: pkg_requirements.Requirement + specifiers: pkg_specifiers.SpecifierSet | None = None + marker: pkg_markers.Marker | None = None + + @classmethod + def from_text(cls, req_text: str) -> RequirementSpec: + req_text = req_text.strip() + assert req_text, "Requirement string cannot be empty" + + m = re.match(r"^([^><=~]*)\s*([^;]*)\s*;?\s*(.*)$", req_text) + return RequirementSpec( + pkg_requirements.Requirement(m[1]), + pkg_specifiers.Specifier(m[2]) if m[2] else None, + pkg_markers.Marker(m[3]) if m[3] else None, + ) + + def as_text(self) -> str: + return f"{self.package!s}{(self.specifiers or '')!s}{(self.marker or '')!s}".strip() + + +class Requirement(NamedTuple): + text: str + spec: RequirementSpec + + @classmethod + def from_text(cls, req_text: str) -> Requirement: + return Requirement(req_text, RequirementSpec.from_text(req_text)) + + @classmethod + def from_spec(cls, req: RequirementSpec) -> Requirement: + return Requirement(req.as_text(), req) + + def dump(self, *, template: str | None = None) -> str: + template = template or "{req.text}" + return template.format(req=self) + + +class RequirementDumpSpec(NamedTuple): + value: Requirement | Iterable[Requirement] + template: str | None = None + + +DumpSpec: TypeAlias = ( + RequirementDumpSpec | tuple[Requirement | Iterable[Requirement], str | None] | str +) + + +# -- Functions -- +def make_requirements_map( + requirements: Iterable[Requirement], +) -> dict[str, Requirement]: + return {req.spec.package.name: req for req in requirements} + + +def load_from_requirements(filename: str) -> list[Requirement]: + requirements = [] + with pathlib.Path(filename).open() as f: + for line in f: + if (end := line.find("#")) != -1: + line = line[:end] + line = line.strip() + if line and not line.startswith("-"): + requirements.append(Requirement.from_text(line)) + + return requirements + + +def load_from_toml(filename: str, key: str) -> list[Requirement]: + with pathlib.Path(filename).open() as f: + toml_data = tomlkit.loads(f.read()) + + section = toml_data + for part in key.split("."): + section = section[part] + + return [Requirement.from_text(req) for req in section] + + +def dump(requirements: Iterable[Requirement], *, template: str | None = None) -> None: + return [req.dump(template=template) for req in requirements] + + +def dump_to_requirements( + requirements: Iterable[Requirement], + filename: str, + *, + template: str | None = None, + header: str | None = None, + footer: str | None = None, +) -> None: + with pathlib.Path(filename).open("w") as f: + if header: + f.write(f"{header}\n") + f.write("\n".join(dump(requirements, template=template))) + if footer: + f.write(f"{footer}\n") + + +def dump_to_yaml(requirements_map: Mapping[str, DumpSpec], filename: str) -> None: + file_path = pathlib.Path(filename) + logging_args = types.SimpleNamespace(quiet=False, verbose=False, debug=False) + console_log = yamlpath.wrappers.ConsolePrinter(logging_args) + yaml = yamlpath.common.Parsers.get_yaml_editor() + (yaml_data, doc_loaded) = yamlpath.common.Parsers.get_yaml_data(yaml, console_log, file_path) + assert doc_loaded + processor = yamlpath.Processor(console_log, yaml_data) + + for key_path, (value, template) in requirements_map.items(): + match value: + case str(): + processor.set_value(yamlpath.YAMLPath(key_path), value) + case Requirement(): + processor.set_value(yamlpath.YAMLPath(key_path), value.dump(template=template)) + case Iterable(): + for _ in processor.delete_nodes(yamlpath.YAMLPath(key_path)): + pass + for i, req in enumerate(dump(value, template=template)): + item_path = yamlpath.YAMLPath(f"{key_path}[{i}]") + processor.set_value(item_path, req) + + with file_path.open("w") as f: + yaml.dump(yaml_data, f) + + +# -- CLI -- +app = typer.Typer() + + +@app.command() +def pull(): + base = load_from_toml("pyproject.toml", "project.dependencies") + dump_to_requirements(base, "requirements/base.in") + cuda12 = load_from_toml("pyproject.toml", "project.optional-dependencies.cuda12") + dump_to_requirements(cuda12, "requirements/cuda12.in", header="-r base.in") + + +@app.command() +def push(): + base_names = {r.spec.package for r in load_from_toml("pyproject.toml", "project.dependencies")} + base_versions = [ + r for r in load_from_requirements("requirements/base.txt") if r.spec.package in base_names + ] + dev_versions_map = make_requirements_map(load_from_requirements("requirements/dev.txt")) + mypy_req_versions = sorted( + base_versions + [dev_versions_map[r] for r in ("pytest", "typing-extensions")], + key=lambda r: str(r.spec.package), + ) + dump_to_yaml( + { + # ruff + "repos[.repo%https://github.com/astral-sh/ruff-pre-commit].rev": ( + dev_versions_map["ruff"], + "v{req.spec.specifiers.version}", + ), + # mypy + "repos[.repo%https://github.com/pre-commit/mirrors-mypy].rev": ( + dev_versions_map["mypy"], + "v{req.spec.specifiers.version}", + ), + "repos[.repo%https://github.com/pre-commit/mirrors-mypy].hooks[.id%mypy].additional_dependencies": ( + mypy_req_versions, + None, + ), + }, + ".pre-commit-config.yaml", + ) + + +if __name__ == "__main__": + app() From 03d6d081ca814f56298170997ddef5063c8f4745 Mon Sep 17 00:00:00 2001 From: Enrique Gonzalez Paredes Date: Wed, 12 Jun 2024 13:42:20 +0200 Subject: [PATCH 2/6] Updates from new config --- .pre-commit-config.yaml | 51 ++++++------- pyproject.toml | 156 ++++++++++++++++++++++++++++++---------- 2 files changed, 147 insertions(+), 60 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 59402ca..59057f5 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,41 +2,47 @@ default_language_version: python: python3.10 ci: - autoupdate_commit_msg: "chore: update pre-commit hooks" - autofix_commit_msg: "style: pre-commit fixes" + autoupdate_commit_msg: 'chore: update pre-commit hooks' + autofix_commit_msg: 'style: pre-commit fixes' repos: - repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks - rev: v2.6.0 + rev: v2.13.0 hooks: - id: pretty-format-ini args: [--autofix] - id: pretty-format-toml - args: [--autofix] + args: [--autofix, --indent, '2', --trailing-commas] additional_dependencies: - setuptools>=69.2.0 - id: pretty-format-yaml - args: [--autofix, --preserve-quotes, --indent, "2"] + args: [--autofix, --indent, '2', --line-width, '100'] additional_dependencies: - setuptools>=69.2.0 -- repo: https://github.com/pre-commit/mirrors-prettier - rev: "v3.1.0" +- repo: https://github.com/executablebooks/mdformat + rev: 0.7.17 hooks: - - id: prettier - types_or: [markdown, html, css, scss, javascript, json] - args: [--prose-wrap=preserve] + - id: mdformat + args: [--number] + additional_dependencies: + - mdformat-gfm + - mdformat-black - repo: https://github.com/Lucas-C/pre-commit-hooks - rev: v1.1.9 + rev: v1.5.5 hooks: - id: insert-license - exclude: ^\..*$ + exclude: | + (?x)^( + ^\..*$ | + noxfile.py + )$ types: [python] - args: [--comment-style, "|#|", --license-filepath, ./LICENSE_HEADER.txt] + args: [--comment-style, '|#|', --license-filepath, ./LICENSE_HEADER.txt] - repo: https://github.com/pre-commit/pre-commit-hooks - rev: "v4.6.0" + rev: v4.6.0 hooks: - id: check-added-large-files - id: check-case-conflict @@ -46,15 +52,13 @@ repos: - id: debug-statements - id: end-of-file-fixer - id: mixed-line-ending - - id: name-tests-test - args: ["--pytest-test-first"] - id: trailing-whitespace - repo: https://github.com/astral-sh/ruff-pre-commit rev: v0.4.8 hooks: - id: ruff - args: ["--fix", "--show-fixes", "--preview"] + args: [--fix, --show-fixes] - id: ruff-format - repo: https://github.com/pre-commit/mirrors-mypy @@ -67,15 +71,14 @@ repos: - dace==0.15.1 - jax[cpu]==0.4.28 - numpy==1.26.4 - - pytest==8.2.2 - - typing-extensions==4.12.2 + - pytest==8.2.1 - repo: https://github.com/codespell-project/codespell - rev: "v2.2.6" + rev: v2.3.0 hooks: - id: codespell - repo: https://github.com/shellcheck-py/shellcheck-py - rev: "v0.10.0.1" + rev: v0.10.0.1 hooks: - id: shellcheck @@ -88,13 +91,13 @@ repos: exclude: .pre-commit-config.yaml - repo: https://github.com/abravalheri/validate-pyproject - rev: "v0.16" + rev: v0.18 hooks: - id: validate-pyproject - additional_dependencies: ["validate-pyproject-schema-store[all]"] + additional_dependencies: ['validate-pyproject-schema-store[all]'] - repo: https://github.com/python-jsonschema/check-jsonschema - rev: "0.28.1" + rev: 0.28.5 hooks: - id: check-dependabot - id: check-github-workflows diff --git a/pyproject.toml b/pyproject.toml index 175a679..d186132 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,9 +1,13 @@ [build-system] build-backend = "setuptools.build_meta" -requires = ["setuptools>=61"] +requires = [ + "setuptools>=61", +] [project] -authors = [{name = "ETH Zurich", email = "gridtools@cscs.ch"}] +authors = [ + {name = "ETH Zurich", email = "gridtools@cscs.ch"}, +] classifiers = [ "Development Status :: 1 - Planning", "Intended Audience :: Science/Research", @@ -17,9 +21,13 @@ classifiers = [ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: Scientific/Engineering", - "Typing :: Typed" + "Typing :: Typed", +] +dependencies = [ + "dace>=0.15", + "jax[cpu]>=0.4.24", + "numpy>=1.26.0", ] -dependencies = ["dace>=0.15", "jax[cpu]>=0.4.24", "numpy>=1.26.0"] description = "JAX jit using DaCe (Data Centric Parallel Programming)" name = "JaCe" readme = "README.md" @@ -28,7 +36,11 @@ version = "0.1.0" license.file = "LICENSE" [project.optional-dependencies] -cuda12 = ["cupy-cuda12x>=12.1.0", "jax[cuda12]>=0.4.24", "optuna>=3.4.0"] +cuda12 = [ + "cupy-cuda12x>=12.1.0", + "jax[cuda12]>=0.4.24", + "optuna>=3.4.0", +] [project.urls] "Bug Tracker" = "https://github.com/GridTools/JaCe/issues" @@ -36,9 +48,32 @@ Changelog = "https://github.com/GridTools/JaCe/releases" Discussions = "https://github.com/GridTools/JaCe/discussions" Homepage = "https://github.com/GridTools/JaCe" +# -- coverage -- [tool.coverage] -report.exclude_also = ['\.\.\.', 'if typing.TYPE_CHECKING:'] -run.source = ["jace"] + +[tool.coverage.html] +show_contexts = true + +[tool.coverage.report] +exclude_also = [ + '\.\.\.', + 'if TYPE_CHECKING:', + 'if typing.TYPE_CHECKING:', + 'def __repr__', + '@overload', + 'raise AssertionError', + 'raise NotImplementedError', + 'if 0:', + 'if __name__ == .__main__.:', + '@(abc\\.)?abstractmethod', + '@(abc\\.)?abstract', + 'class .*\bProtocol\):', +] + +[tool.coverage.run] +branch = true +dynamic_context = "test_function" +source = ["jace"] # -- mypy -- [tool.mypy] @@ -63,7 +98,12 @@ warn_unused_ignores = true disallow_incomplete_defs = false disallow_untyped_defs = false ignore_missing_imports = true -module = ["tests.*", "dace.*", "jax.*", "jaxlib.*"] +module = [ + "tests.*", + "dace.*", + "jax.*", + "jaxlib.*", +] # -- pytest -- [tool.pytest] @@ -79,6 +119,7 @@ xfail_strict = true # -- ruff -- [tool.ruff] line-length = 100 +preview = true respect-gitignore = true show-fixes = true src = ["src"] @@ -87,72 +128,115 @@ src = ["src"] docstring-code-format = true [tool.ruff.lint] +extend-safe-fixes = ["D", "TCH"] extend-select = [ "A", # flake8-builtins "B", # flake8-bugbear "I", # isort "G", # flake8-logging-format + "N", # pep8-naming "W", # pycodestyle-warning "C4", # flake8-comprehensions "C90", # mccabe + "D", # pydocstyle + "D213", # multi-line-summary-second-line (off by default in pydocstyle "google' convention) "PT", # flake8-pytest-style - "UP", # pyupgrade # TODO: in evaluation + "TD", # flake8-todo + "UP", # pyupgrade "ARG", # flake8-unused-arguments "ERA", # eradicate + "FLY", # flynt "ICN", # flake8-import-conventions + "NPY", # NumPy specific rules + "PERF", # Perflint "PGH", # pygrep-hooks "PIE", # flake8-pie + "PL", # pylint "PTH", # flake8-use-pathlib - "RET", # flake8-return # TODO: in evaluation + "RET", # flake8-return "RUF", # Ruff-specific - "SIM", # flake8-simplify # TODO: in evaluation + "SIM", # flake8-simplify + "SLOT", # flake8-slots "T10", # flake8-debugger - "T20", # flake8-print # TODO: in evaluation - "TCH", # flake8-type-checking # TODO: in evaluation - "NPY" # NumPy specific rules + "T20", # flake8-print + "TCH", # flake8-type-checking + "TRY", # tryceratops ] ignore = [ - 'B905', # [zip-without-explicit-strict] - 'E501', # [line-too-long] - 'TCH003', # [typing-only-standard-library-import] - 'UP038' # [non-pep604-isinstance] + "B905", # [zip-without-explicit-strict] + "D105", # [undocumented-magic-method] + "D107", # [undocumented-public-init] + "D212", # [multi-line-summary-first-line] + "D402", # [no-signature] + "E501", # [line-too-long] + "TCH003", # [typing-only-standard-library-import] + "TD003", # [missing-todo-link] + "TRY003", # [raise-vanilla-args] # TODO(egparedes): reevaluate if it should be activated + "UP038", # [non-pep604-isinstance] ] +task-tags = ["TODO"] # ignore-init-module-imports = true # deprecated in preview mode unfixable = [] [tool.ruff.lint.isort] combine-as-imports = true -known-first-party = ['jace'] +known-first-party = ["jace"] known-third-party = [ - 'cupy', - 'dace', - 'jax', - 'numpy', - 'pytest', - 'typing_extensions' + "cupy", + "dace", + "jax", + "numpy", + "pytest", + "typing_extensions", ] lines-after-imports = 2 order-by-type = true required-imports = ["from __future__ import annotations"] section-order = [ - 'future', - 'standard-library', - 'third-party', - 'first-party', - 'tests', - 'local-folder' + "future", + "standard-library", + "third-party", + "first-party", + "tests", + "local-folder", ] [tool.ruff.lint.isort.sections] -tests = ["tests", "unit_tests", "integration_tests"] +tests = [ + "tests", + "unit_tests", + "integration_tests", +] [tool.ruff.lint.mccabe] max-complexity = 12 [tool.ruff.lint.per-file-ignores] -"!tests/**.py" = ["PT"] # Ignore `flake8-pytest-style` everywhere except in `tests/` -"noxfile.py" = ["T20"] # Ignore `flake8-print` -"tests/**" = ["T10", "T20"] # Ignore `flake8-debugger` and `flake8-print` +"!tests/**.py" = ["PT"] # Ignore flake8-pytest-style outside 'tests/' +"docs/**" = [ + "D", # pydocstyle + "T10", # flake8-debugger + "T20", # flake8-print +] +"noxfile.py" = [ + "D", # pydocstyle + "T20", # flake8-print +] +"tests/**" = [ + "D", # pydocstyle + "N", # TODO(egparedes): remove ignore as soon as all tests are properly named + "PLR2004", # [magic-value-comparison] + "T10", # flake8-debugger + "T20", # flake8-print +] [tool.ruff.lint.pycodestyle] -max-doc-length = 85 +ignore-overlong-task-comments = true +max-doc-length = 88 + +[tool.ruff.lint.pydocstyle] +convention = "google" +ignore-decorators = ["typing.overload"] + +[tool.ruff.lint.pylint] +max-positional-args = 6 From d1231665c120474f088d3633ac6e88b1eabe8bde Mon Sep 17 00:00:00 2001 From: Enrique Gonzalez Paredes Date: Wed, 12 Jun 2024 16:07:19 +0200 Subject: [PATCH 3/6] Final cleanups and fixes --- .pre-commit-config.yaml | 5 +++-- noxfile.py | 11 +++++------ pyproject.toml | 8 +++++++- requirements/base.in | 2 +- requirements/base.txt | 4 ++-- requirements/cuda12.in | 2 +- requirements/cuda12.txt | 10 ++++------ requirements/sync_tool.py | 25 +++++++++++++++---------- 8 files changed, 38 insertions(+), 29 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 59057f5..255e6ed 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -69,9 +69,10 @@ repos: args: [--no-install-types] additional_dependencies: - dace==0.15.1 - - jax[cpu]==0.4.28 + - jax[cpu]==0.4.29 - numpy==1.26.4 - - pytest==8.2.1 + - pytest==8.2.2 + - typing-extensions==4.12.2 - repo: https://github.com/codespell-project/codespell rev: v2.3.0 hooks: diff --git a/noxfile.py b/noxfile.py index 9aba077..4b9ba40 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1,3 +1,5 @@ +"""Nox session definitions.""" + from __future__ import annotations import argparse @@ -24,7 +26,7 @@ def lint(session: nox.Session) -> None: @nox.session def tests(session: nox.Session) -> None: """Run the unit and regular tests.""" - session.install(".[test]") + session.install("-e", ".", "-r", "requirements/dev.txt") session.run("pytest", *session.posargs) @@ -40,8 +42,7 @@ def docs(session: nox.Session) -> None: session.error("Must not specify non-HTML builder with --serve") extra_installs = ["sphinx-autobuild"] if args.serve else [] - - session.install("-e.[docs]", *extra_installs) + session.install("-e", ".", "-r", "requirements/dev.txt", *extra_installs) session.chdir("docs") if args.builder == "linkcheck": @@ -92,9 +93,7 @@ def build(session: nox.Session) -> None: @nox.session def requirements(session: nox.Session) -> None: - """ - Freeze dependencies from input specs and synchronize across tools. - """ + """Freeze dependencies from input specs and synchronize across tools.""" requirements_path = DIR / "requirements" req_sync_tool = requirements_path / "sync_tool.py" diff --git a/pyproject.toml b/pyproject.toml index 37c0d3d..73eebf9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -211,7 +211,7 @@ tests = [ max-complexity = 12 [tool.ruff.lint.per-file-ignores] -"!tests/**.py" = ["PT"] # Ignore flake8-pytest-style outside 'tests/' +"!tests/**" = ["PT"] # Ignore flake8-pytest-style outside 'tests/' "docs/**" = [ "D", # pydocstyle "T10", # flake8-debugger @@ -219,6 +219,12 @@ max-complexity = 12 ] "noxfile.py" = [ "D", # pydocstyle + "T10", # flake8-debugger + "T20", # flake8-print +] +"requirements/**" = [ + "D", # pydocstyle + "T10", # flake8-debugger "T20", # flake8-print ] "tests/**" = [ diff --git a/requirements/base.in b/requirements/base.in index b25ef34..c077dae 100644 --- a/requirements/base.in +++ b/requirements/base.in @@ -1,3 +1,3 @@ dace>=0.15 jax[cpu]>=0.4.24 -numpy>=1.26.0 \ No newline at end of file +numpy>=1.26.0 diff --git a/requirements/base.txt b/requirements/base.txt index d388c2f..1aae055 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -29,9 +29,9 @@ idna==3.7 # via requests itsdangerous==2.2.0 # via flask -jax[cpu]==0.4.28 +jax[cpu]==0.4.29 # via -r requirements/base.in -jaxlib==0.4.28 +jaxlib==0.4.29 # via jax jinja2==3.1.4 # via flask diff --git a/requirements/cuda12.in b/requirements/cuda12.in index 5c9b956..d603a3b 100644 --- a/requirements/cuda12.in +++ b/requirements/cuda12.in @@ -1,4 +1,4 @@ -r base.in cupy-cuda12x>=12.1.0 jax[cuda12]>=0.4.24 -optuna>=3.4.0 \ No newline at end of file +optuna>=3.4.0 diff --git a/requirements/cuda12.txt b/requirements/cuda12.txt index 098643e..078edf2 100644 --- a/requirements/cuda12.txt +++ b/requirements/cuda12.txt @@ -16,13 +16,13 @@ fastrlock==0.8.2 # via cupy-cuda12x greenlet==3.0.3 # via sqlalchemy -jax[cpu,cuda12]==0.4.28 +jax[cpu,cuda12]==0.4.29 # via # -r requirements/base.in # -r requirements/cuda12.in -jax-cuda12-pjrt==0.4.28 +jax-cuda12-pjrt==0.4.29 # via jax-cuda12-plugin -jax-cuda12-plugin==0.4.28 +jax-cuda12-plugin==0.4.29 # via jax mako==1.3.5 # via alembic @@ -35,11 +35,9 @@ nvidia-cuda-cupti-cu12==12.5.39 # via jax nvidia-cuda-nvcc-cu12==12.5.40 # via jax -nvidia-cuda-nvrtc-cu12==12.5.40 - # via nvidia-cudnn-cu12 nvidia-cuda-runtime-cu12==12.5.39 # via jax -nvidia-cudnn-cu12==8.9.7.29 +nvidia-cudnn-cu12==9.1.1.17 # via jax nvidia-cufft-cu12==11.2.3.18 # via jax diff --git a/requirements/sync_tool.py b/requirements/sync_tool.py index 1eabda6..6755092 100644 --- a/requirements/sync_tool.py +++ b/requirements/sync_tool.py @@ -17,6 +17,8 @@ # ] # /// +"""Script to synchronize requirements across tools.""" + from __future__ import annotations import pathlib @@ -37,6 +39,8 @@ # -- Classes -- class RequirementSpec(NamedTuple): + """A parsed requirement specification.""" + package: pkg_requirements.Requirement specifiers: pkg_specifiers.SpecifierSet | None = None marker: pkg_markers.Marker | None = None @@ -58,6 +62,8 @@ def as_text(self) -> str: class Requirement(NamedTuple): + """An item in a list of requirements and its parsed specification.""" + text: str spec: RequirementSpec @@ -85,19 +91,17 @@ class RequirementDumpSpec(NamedTuple): # -- Functions -- -def make_requirements_map( - requirements: Iterable[Requirement], -) -> dict[str, Requirement]: +def make_requirements_map(requirements: Iterable[Requirement]) -> dict[str, Requirement]: return {req.spec.package.name: req for req in requirements} def load_from_requirements(filename: str) -> list[Requirement]: requirements = [] - with pathlib.Path(filename).open() as f: - for line in f: - if (end := line.find("#")) != -1: - line = line[:end] - line = line.strip() + with pathlib.Path(filename).open(encoding="locale") as f: + for raw_line in f: + if (end := raw_line.find("#")) != -1: + raw_line = raw_line[:end] # noqa: PLW2901 [redefined-loop-name] + line = raw_line.strip() if line and not line.startswith("-"): requirements.append(Requirement.from_text(line)) @@ -105,7 +109,7 @@ def load_from_requirements(filename: str) -> list[Requirement]: def load_from_toml(filename: str, key: str) -> list[Requirement]: - with pathlib.Path(filename).open() as f: + with pathlib.Path(filename).open(encoding="locale") as f: toml_data = tomlkit.loads(f.read()) section = toml_data @@ -127,12 +131,13 @@ def dump_to_requirements( header: str | None = None, footer: str | None = None, ) -> None: - with pathlib.Path(filename).open("w") as f: + with pathlib.Path(filename).open("w", encoding="locale") as f: if header: f.write(f"{header}\n") f.write("\n".join(dump(requirements, template=template))) if footer: f.write(f"{footer}\n") + f.write("\n") def dump_to_yaml(requirements_map: Mapping[str, DumpSpec], filename: str) -> None: From 39dd108e36ead70348c5996abbc417b6dcd03cac Mon Sep 17 00:00:00 2001 From: Enrique Gonzalez Paredes Date: Wed, 12 Jun 2024 16:12:25 +0200 Subject: [PATCH 4/6] Clean up sync script --- requirements/sync_tool.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/sync_tool.py b/requirements/sync_tool.py index 6755092..846cb0e 100644 --- a/requirements/sync_tool.py +++ b/requirements/sync_tool.py @@ -75,7 +75,7 @@ def from_text(cls, req_text: str) -> Requirement: def from_spec(cls, req: RequirementSpec) -> Requirement: return Requirement(req.as_text(), req) - def dump(self, *, template: str | None = None) -> str: + def as_text(self, *, template: str | None = None) -> str: template = template or "{req.text}" return template.format(req=self) @@ -120,7 +120,7 @@ def load_from_toml(filename: str, key: str) -> list[Requirement]: def dump(requirements: Iterable[Requirement], *, template: str | None = None) -> None: - return [req.dump(template=template) for req in requirements] + return [req.as_text(template=template) for req in requirements] def dump_to_requirements( @@ -154,7 +154,7 @@ def dump_to_yaml(requirements_map: Mapping[str, DumpSpec], filename: str) -> Non case str(): processor.set_value(yamlpath.YAMLPath(key_path), value) case Requirement(): - processor.set_value(yamlpath.YAMLPath(key_path), value.dump(template=template)) + processor.set_value(yamlpath.YAMLPath(key_path), value.as_text(template=template)) case Iterable(): for _ in processor.delete_nodes(yamlpath.YAMLPath(key_path)): pass From da57db9132719296b1d2efed12e92d445e602f0b Mon Sep 17 00:00:00 2001 From: Enrique Gonzalez Paredes Date: Fri, 14 Jun 2024 15:08:33 +0200 Subject: [PATCH 5/6] Improve developer documentation and add devenv session setup to nox --- .github/workflows/ci.yml | 2 +- .pre-commit-config.yaml | 2 +- CONTRIBUTING.md | 9 ++- noxfile.py | 152 ++++++++++++++++++++++++++---------- requirements/base.txt | 30 +------ requirements/cuda12.txt | 2 +- requirements/dev-cuda12.in | 2 +- requirements/dev-cuda12.txt | 4 +- requirements/dev.in | 6 +- requirements/dev.txt | 45 ++++++++++- 10 files changed, 174 insertions(+), 80 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b065ed7..0bb29c7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -50,7 +50,7 @@ jobs: allow-prereleases: true - name: Install requirementes - run: python -m pip install -r requirements-dev.txt + run: python -m pip install -r requirements/dev.txt - name: Install package run: python -m pip install . diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 255e6ed..4e4b514 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -68,7 +68,7 @@ repos: files: src|tests args: [--no-install-types] additional_dependencies: - - dace==0.15.1 + - dace==0.16 - jax[cpu]==0.4.29 - numpy==1.26.4 - pytest==8.2.2 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 19d5adb..1b060b1 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -9,6 +9,7 @@ The fastest way to start with development is to use nox. If you don't have nox, To use, run `nox`. This will lint and test using every installed version of Python on your system, skipping ones that are not installed. You can also run specific jobs: ```console +$ nox -s venv-3.10 # (or venv-3.11, or venv-3.12) Setup a fully working development envinroment $ nox -s lint # Lint only $ nox -s tests # Python tests $ nox -s docs -- --serve # Build and serve the docs @@ -25,16 +26,16 @@ You can set up a development environment by running: python3 -m venv .venv source ./.venv/bin/activate pip install --upgrade pip setuptools wheel -pip install -r requirements-dev.txt +pip install -r requirements/dev.txt pip install -v -e . ``` -If you have the [Python Launcher for Unix](https://github.com/brettcannon/python-launcher), you can instead do: +Or, if you have the [Python Launcher for Unix](https://github.com/brettcannon/python-launcher), you could do: ```bash py -m venv .venv py -m pip install --upgrade pip setuptools wheel -py -m pip install -r requirements-dev.txt +py -m pip install -r requirements/dev.txt py -m pip install -v -e . ``` @@ -43,7 +44,7 @@ py -m pip install -v -e . You should prepare pre-commit, which will help you by checking that commits pass required checks: ```bash -pip install pre-commit # or brew install pre-commit on macOS +pipx install pre-commit # or brew install pre-commit on macOS pre-commit install # Will install a pre-commit hook into the git repo ``` diff --git a/noxfile.py b/noxfile.py index 4b9ba40..e55b13f 100644 --- a/noxfile.py +++ b/noxfile.py @@ -3,22 +3,43 @@ from __future__ import annotations import argparse +import pathlib +import re import shutil -from pathlib import Path import nox -DIR = Path(__file__).parent.resolve() - nox.needs_version = ">=2024.3.2" nox.options.sessions = ["lint", "tests"] nox.options.default_venv_backend = "uv|virtualenv" +ROOT_DIR = pathlib.Path(__file__).parent.resolve() +DEFAULT_DEV_VENV_PATH = ROOT_DIR / ".venv" + + +def load_from_frozen_requirements(filename: str) -> dict[str, str]: + requirements = {} + with pathlib.Path(filename).open(encoding="locale") as f: + for raw_line in f: + if (end := raw_line.find("#")) != -1: + raw_line = raw_line[:end] # noqa: PLW2901 [redefined-loop-name] + line = raw_line.strip() + if line and not line.startswith("-"): + m = re.match(r"^([^=]*)\s*([^;]*)\s*;?\s*(.*)$", line) + if m: + requirements[m[1]] = m[2] + + return requirements + + +REQUIREMENTS = load_from_frozen_requirements(ROOT_DIR / "requirements" / "dev.txt") + + @nox.session def lint(session: nox.Session) -> None: - """Run the linter.""" + """Run the linter (pre-commit).""" session.install("pre-commit") session.run("pre-commit", "run", "--all-files", "--show-diff-on-failure", *session.posargs) @@ -30,9 +51,93 @@ def tests(session: nox.Session) -> None: session.run("pytest", *session.posargs) +@nox.session(python=["3.10", "3.11", "3.12"]) +def venv(session: nox.Session) -> None: + """ + Sets up a Python development environment. Use as: `nox -s venv -- [dest_path] [req_preset] + + This session will: + - Create a python virtualenv for the session + - Install the `virtualenv` cli tool into this environment + - Use `virtualenv` to create a project virtual environment + - Invoke the python interpreter from the created project environment + to install the project and all it's development dependencies. + """ # noqa: W505 [doc-line-too-long] + venv_path = f"{DEFAULT_DEV_VENV_PATH}-{session.python}" + req_preset = "dev" + virtualenv_args = [] + if session.posargs: + venv_path, *more_pos_args = session.posargs + if more_pos_args: + req_preset, _ = more_pos_args + venv_path = pathlib.Path(venv_path).resolve() + + if not venv_path.exists(): + print(f"Creating virtualenv at '{venv_path}' (options: {virtualenv_args})...") + session.install("virtualenv") + session.run("virtualenv", venv_path, silent=True) + + python_path = venv_path / "bin" / "python" + requirements_file = f"requirements/{req_preset}.txt" + + # Use the venv's interpreter to install the project along with + # all it's dev dependencies, this ensures it's installed in the right way + print(f"Setting up development environment from '{requirements_file}'...") + session.run( + python_path, + "-m", + "pip", + "install", + "-r", + requirements_file, + "-e.", + external=True, + ) + + +@nox.session +def requirements(session: nox.Session) -> None: + """Freeze requirements files from project specification and synchronize versions across tools.""" # noqa: W505 [doc-line-too-long] + requirements_path = ROOT_DIR / "requirements" + req_sync_tool = requirements_path / "sync_tool.py" + + dependencies = ["pre-commit"] + nox.project.load_toml(req_sync_tool)["dependencies"] + session.install(*dependencies) + session.install("pip-compile-multi") + + session.run("python", req_sync_tool, "pull") + session.run("pip-compile-multi", "-g", "--skip-constraints") + session.run("python", req_sync_tool, "push") + + session.run("pre-commit", "run", "--files", ".pre-commit-config.yaml", success_codes=[0, 1]) + + @nox.session(reuse_venv=True) def docs(session: nox.Session) -> None: - """Build the docs. Pass "--serve" to serve. Pass "-b linkcheck" to check links.""" + """Regenerate and build all API and user docs.""" + session.notify("api_docs") + session.notify("user_docs", posargs=session.posargs) + + +@nox.session(reuse_venv=True) +def api_docs(session: nox.Session) -> None: + """Build (regenerate) API docs.""" + session.install(f"sphinx=={REQUIREMENTS['sphinx']}") + session.chdir("docs") + session.run( + "sphinx-apidoc", + "-o", + "api/", + "--module-first", + "--no-toc", + "--force", + "../src/jace", + ) + + +@nox.session(reuse_venv=True) +def user_docs(session: nox.Session) -> None: + """Build the user docs. Pass "--serve" to serve. Pass "-b linkcheck" to check links.""" # noqa: W505 [doc-line-too-long] parser = argparse.ArgumentParser() parser.add_argument("--serve", action="store_true", help="Serve after building") parser.add_argument("-b", dest="builder", default="html", help="Build target (default: html)") @@ -64,45 +169,12 @@ def docs(session: nox.Session) -> None: session.run("sphinx-build", "--keep-going", *shared_args) -@nox.session -def build_api_docs(session: nox.Session) -> None: - """Build (regenerate) API docs.""" - session.install("sphinx") - session.chdir("docs") - session.run( - "sphinx-apidoc", - "-o", - "api/", - "--module-first", - "--no-toc", - "--force", - "../src/jace", - ) - - @nox.session def build(session: nox.Session) -> None: """Build an SDist and wheel.""" - build_path = DIR.joinpath("build") + build_path = ROOT_DIR / "build" if build_path.exists(): shutil.rmtree(build_path) - session.install("build") + session.install(f"build=={REQUIREMENTS['build']}") session.run("python", "-m", "build") - - -@nox.session -def requirements(session: nox.Session) -> None: - """Freeze dependencies from input specs and synchronize across tools.""" - requirements_path = DIR / "requirements" - req_sync_tool = requirements_path / "sync_tool.py" - - dependencies = ["pre-commit"] + nox.project.load_toml(req_sync_tool)["dependencies"] - session.install(*dependencies) - session.install("pip-compile-multi") - - session.run("python", req_sync_tool, "pull") - session.run("pip-compile-multi", "-g", "--skip-constraints") - session.run("python", req_sync_tool, "push") - - session.run("pre-commit", "run", "--files", ".pre-commit-config.yaml", success_codes=[0, 1]) diff --git a/requirements/base.txt b/requirements/base.txt index 1aae055..70bc827 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -9,36 +9,20 @@ aenum==3.1.15 # via dace astunparse==1.6.3 # via dace -blinker==1.8.2 - # via flask -certifi==2024.6.2 - # via requests -charset-normalizer==3.3.2 - # via requests -click==8.1.7 - # via flask -dace==0.15.1 +dace==0.16 # via -r requirements/base.in dill==0.3.8 # via dace -flask==3.0.3 - # via dace fparser==0.1.4 # via dace -idna==3.7 - # via requests -itsdangerous==2.2.0 - # via flask jax[cpu]==0.4.29 # via -r requirements/base.in jaxlib==0.4.29 # via jax jinja2==3.1.4 - # via flask + # via dace markupsafe==2.1.5 - # via - # jinja2 - # werkzeug + # via jinja2 ml-dtypes==0.4.0 # via # jax @@ -64,8 +48,6 @@ ply==3.11 # via dace pyyaml==6.0.1 # via dace -requests==2.32.3 - # via dace scipy==1.13.1 # via # jax @@ -74,16 +56,12 @@ setuptools-scm==8.1.0 # via fparser six==1.16.0 # via astunparse -sympy==1.9 +sympy==1.12.1 # via dace tomli==2.0.1 # via setuptools-scm -urllib3==2.2.1 - # via requests websockets==12.0 # via dace -werkzeug==3.0.3 - # via flask wheel==0.43.0 # via astunparse diff --git a/requirements/cuda12.txt b/requirements/cuda12.txt index 078edf2..ebeb3aa 100644 --- a/requirements/cuda12.txt +++ b/requirements/cuda12.txt @@ -10,7 +10,7 @@ alembic==1.13.1 # via optuna colorlog==6.8.2 # via optuna -cupy-cuda12x==13.1.0 +cupy-cuda12x==13.2.0 # via -r requirements/cuda12.in fastrlock==0.8.2 # via cupy-cuda12x diff --git a/requirements/dev-cuda12.in b/requirements/dev-cuda12.in index aa00469..496e623 100644 --- a/requirements/dev-cuda12.in +++ b/requirements/dev-cuda12.in @@ -1,2 +1,2 @@ --r base.in +-r cuda12.in -r dev.in diff --git a/requirements/dev-cuda12.txt b/requirements/dev-cuda12.txt index 7c894e8..0dca1e7 100644 --- a/requirements/dev-cuda12.txt +++ b/requirements/dev-cuda12.txt @@ -1,11 +1,11 @@ -# SHA1:d9f19ac423500f255d32c3e29dd96fd3b5c649a8 +# SHA1:bdbfa7e1d9b9ca837d092c4efc6792c2b58238be # # This file is autogenerated by pip-compile-multi # To update, run: # # pip-compile-multi # --r base.txt +-r cuda12.txt -r dev.txt # The following packages are considered to be unsafe in a requirements file: diff --git a/requirements/dev.in b/requirements/dev.in index b648f8a..4421d27 100644 --- a/requirements/dev.in +++ b/requirements/dev.in @@ -1,4 +1,5 @@ -r base.in +build>=1.2 furo>=2023.08.17 mypy>=1.9.0 myst_parser>=0.13 @@ -6,7 +7,8 @@ pytest>=6 pytest-cov>=3 ruff>=0.3.5 sphinx>=7.0 -sphinx_autodoc_typehints -sphinx_copybutton +sphinx-autobuild>=2021.3.14 +sphinx_autodoc_typehints>=2.1 +sphinx_copybutton>=0.5 tomlkit>=0.12.4 typing-extensions>=4.10.0 diff --git a/requirements/dev.txt b/requirements/dev.txt index 176b9a2..4b45b5e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -1,4 +1,4 @@ -# SHA1:a7338646990b5874d5aa51bb3e2bd37753c754eb +# SHA1:60e060370596513d7e06534a0655974dcc750dcd # # This file is autogenerated by pip-compile-multi # To update, run: @@ -8,10 +8,24 @@ -r base.txt alabaster==0.7.16 # via sphinx +anyio==4.4.0 + # via + # starlette + # watchfiles babel==2.15.0 # via sphinx beautifulsoup4==4.12.3 # via furo +build==1.2.1 + # via -r requirements/dev.in +certifi==2024.6.2 + # via requests +charset-normalizer==3.3.2 + # via requests +click==8.1.7 + # via uvicorn +colorama==0.4.6 + # via sphinx-autobuild coverage[toml]==7.5.3 # via pytest-cov docutils==0.21.2 @@ -19,9 +33,17 @@ docutils==0.21.2 # myst-parser # sphinx exceptiongroup==1.2.1 - # via pytest + # via + # anyio + # pytest furo==2024.5.6 # via -r requirements/dev.in +h11==0.14.0 + # via uvicorn +idna==3.7 + # via + # anyio + # requests imagesize==1.4.1 # via sphinx iniconfig==2.0.0 @@ -46,14 +68,20 @@ pygments==2.18.0 # via # furo # sphinx +pyproject-hooks==1.1.0 + # via build pytest==8.2.2 # via # -r requirements/dev.in # pytest-cov pytest-cov==5.0.0 # via -r requirements/dev.in +requests==2.32.3 + # via sphinx ruff==0.4.8 # via -r requirements/dev.in +sniffio==1.3.1 + # via anyio snowballstemmer==2.2.0 # via sphinx soupsieve==2.5 @@ -63,9 +91,12 @@ sphinx==7.3.7 # -r requirements/dev.in # furo # myst-parser + # sphinx-autobuild # sphinx-autodoc-typehints # sphinx-basic-ng # sphinx-copybutton +sphinx-autobuild==2024.4.16 + # via -r requirements/dev.in sphinx-autodoc-typehints==2.1.1 # via -r requirements/dev.in sphinx-basic-ng==1.0.0b2 @@ -84,12 +115,22 @@ sphinxcontrib-qthelp==1.0.7 # via sphinx sphinxcontrib-serializinghtml==1.1.10 # via sphinx +starlette==0.37.2 + # via sphinx-autobuild tomlkit==0.12.5 # via -r requirements/dev.in typing-extensions==4.12.2 # via # -r requirements/dev.in + # anyio # mypy + # uvicorn +urllib3==2.2.1 + # via requests +uvicorn==0.30.1 + # via sphinx-autobuild +watchfiles==0.22.0 + # via sphinx-autobuild # The following packages are considered to be unsafe in a requirements file: # setuptools From 6eef078c00f4f4ced6ec0a4a7bce770f8fc4b9d0 Mon Sep 17 00:00:00 2001 From: Enrique Gonzalez Paredes Date: Fri, 14 Jun 2024 15:13:13 +0200 Subject: [PATCH 6/6] Add information message --- noxfile.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/noxfile.py b/noxfile.py index e55b13f..27f9404 100644 --- a/noxfile.py +++ b/noxfile.py @@ -76,6 +76,8 @@ def venv(session: nox.Session) -> None: print(f"Creating virtualenv at '{venv_path}' (options: {virtualenv_args})...") session.install("virtualenv") session.run("virtualenv", venv_path, silent=True) + else: + print(f"'{venv_path}' path already exists. Skipping virtualenv creation...") python_path = venv_path / "bin" / "python" requirements_file = f"requirements/{req_preset}.txt"