From a44e04292c852f526326831432f069588b6283e8 Mon Sep 17 00:00:00 2001 From: wpbonelli Date: Thu, 18 Jul 2024 12:42:26 -0400 Subject: [PATCH] ci(release): fix nightly build --- .build_rtd_docs/conf.py | 12 +++ .github/workflows/release.yml | 4 + .github/workflows/release_dispatch.yml | 2 +- distribution/benchmark.py | 3 +- distribution/build_dist.py | 45 ++++---- distribution/build_docs.py | 141 ++++++++++++++----------- distribution/update_version.py | 1 - doc/mf6io/mf6ivar/deprecations.py | 11 +- 8 files changed, 133 insertions(+), 86 deletions(-) diff --git a/.build_rtd_docs/conf.py b/.build_rtd_docs/conf.py index 2de05fbe9be..bfc37a3a41f 100644 --- a/.build_rtd_docs/conf.py +++ b/.build_rtd_docs/conf.py @@ -81,6 +81,18 @@ dst = os.path.join(dstdir, fpth) shutil.copy(src, dst) +# -- build the deprecations table -------------------------------------------- +print("Build the deprecations markdown table") +pth = os.path.join("..", "doc", "mf6io", "mf6ivar") +args = (sys.executable, "deprecations.py") +# run the command +proc = Popen(args, stdout=PIPE, stderr=PIPE, cwd=pth) +stdout, stderr = proc.communicate() +if stdout: + print(stdout.decode("utf-8")) +if stderr: + print("Errors:\n{}".format(stderr.decode("utf-8"))) + # -- copy deprecations markdown --------------------------------------------- print("Copy the deprecations table") dstdir = "_mf6run" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 8f1fb7b720e..65e103de583 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -459,6 +459,10 @@ jobs: name: deprecations path: modflow6/doc/mf6io/mf6ivar/md/deprecations.md + - name: Build MF6IO files from DFNs + working-directory: modflow6/doc/mf6io/mf6ivar + run: python mf6ivar.py + - name: Build documentation env: # this step is lazy about building the mf6 examples PDF document, first diff --git a/.github/workflows/release_dispatch.yml b/.github/workflows/release_dispatch.yml index 1bfc1a0bd52..8705a9608c2 100644 --- a/.github/workflows/release_dispatch.yml +++ b/.github/workflows/release_dispatch.yml @@ -146,7 +146,7 @@ jobs: echo "models=$models" >> $GITHUB_OUTPUT make_dist: name: Make distribution - uses: MODFLOW-USGS/modflow6/.github/workflows/release.yml@develop + uses: wpbonelli/modflow6/.github/workflows/release.yml@fix-nightly-build needs: set_options with: # If the workflow is manually triggered, the maintainer must manually set approve=true to approve a release. diff --git a/distribution/benchmark.py b/distribution/benchmark.py index 599335df18e..a633703e771 100644 --- a/distribution/benchmark.py +++ b/distribution/benchmark.py @@ -428,11 +428,10 @@ def test_run_benchmarks(tmp_path): if __name__ == "__main__": parser = argparse.ArgumentParser( - prog="Benchmark MODFLOW 6 versions on example models", formatter_class=argparse.RawDescriptionHelpFormatter, epilog=textwrap.dedent( """\ - Benchmarks the current version of MODFLOW 6 against the latest official release. + Benchmarks the current version of MODFLOW 6 against the latest official release, with the example models stored in the MODFLOW-USGS/modflow6-examples repository. """ ), diff --git a/distribution/build_dist.py b/distribution/build_dist.py index c457db6f65e..fb178f10b46 100644 --- a/distribution/build_dist.py +++ b/distribution/build_dist.py @@ -111,16 +111,21 @@ def test_copy_sources(tmp_path): def setup_examples( bin_path: PathLike, examples_path: PathLike, - overwrite: bool = False, + force: bool = False, models: Optional[List[str]] = None, ): examples_path = Path(examples_path).expanduser().absolute() - latest = get_release("MODFLOW-USGS/modflow6-examples", "latest") + + # find and download example models distribution from latest examples release + latest = get_release( + "MODFLOW-USGS/modflow6-examples", tag="latest", verbose=True + ) assets = latest["assets"] + print(f"Found {len(assets)} assets from the latest examples release:") + pprint([a["name"] for a in assets]) asset = next( - iter([a for a in assets if a["name"] == "mf6examples.zip"]), None + iter([a for a in assets if a["name"].endswith("examples.zip")]), None ) - # download example models zip asset download_and_unzip( asset["browser_download_url"], examples_path, verbose=True ) @@ -141,7 +146,7 @@ def setup_examples( model_paths = get_model_paths(examples_path) for mp in model_paths: script_path = mp / f"run{SCR_EXT}" - if not overwrite and script_path.is_file(): + if not force and script_path.is_file(): print(f"Script {script_path} already exists") else: print(f"Creating {script_path}") @@ -165,7 +170,7 @@ def setup_examples( # add runall.sh/bat, which runs all examples script_path = examples_path / f"runall{SCR_EXT}" - if not overwrite and script_path.is_file(): + if not force and script_path.is_file(): print(f"Script {script_path} already exists") else: print(f"Creating {script_path}") @@ -191,7 +196,7 @@ def setup_examples( def build_programs_meson( - build_path: PathLike, bin_path: PathLike, overwrite: bool = False + build_path: PathLike, bin_path: PathLike, force: bool = False ): build_path = Path(build_path).expanduser().absolute() bin_path = Path(bin_path).expanduser().absolute() @@ -204,7 +209,7 @@ def build_programs_meson( lib_paths = [bin_path / f"libmf6{LIB_EXT}"] if ( - not overwrite + not force and all(p.is_file() for p in exe_paths) and all(p.is_file() for p in lib_paths) ): @@ -293,7 +298,7 @@ def build_distribution( build_path: PathLike, output_path: PathLike, full: bool = False, - overwrite: bool = False, + force: bool = False, models: Optional[List[str]] = None, ): print(f"Building {'full' if full else 'minimal'} distribution") @@ -305,7 +310,7 @@ def build_distribution( build_programs_meson( build_path=build_path, bin_path=output_path / "bin", - overwrite=overwrite, + force=force, ) # code.json metadata @@ -319,7 +324,7 @@ def build_distribution( setup_examples( bin_path=output_path / "bin", examples_path=output_path / "examples", - overwrite=overwrite, + force=force, models=models, ) @@ -334,7 +339,7 @@ def build_distribution( bin_path=output_path / "bin", full=full, output_path=output_path / "doc", - overwrite=overwrite, + force=force, ) @@ -348,7 +353,7 @@ def test_build_distribution(tmp_path, full): build_path=tmp_path / "builddir", output_path=output_path, full=full, - overwrite=True, + force=True, ) if full: @@ -378,16 +383,20 @@ def test_build_distribution(tmp_path, full): if __name__ == "__main__": parser = argparse.ArgumentParser( - prog="Create a Modflow 6 distribution directory for release", formatter_class=argparse.RawDescriptionHelpFormatter, epilog=textwrap.dedent( """\ - Create a distribution folder. If no output path is provided - distribution files are written to the distribution/ folder. + Create a MODFLOW 6 distribution. If output path is provided + distribution files are written to the selected path, if not + they are written to the distribution/ project subdirectory. By default a minimal distribution containing only binaries, mf6io documentation, release notes and metadata (code.json) is created. To create a full distribution including sources - and examples, use the --full flag. + and examples, use the --full flag. Models to be included in + the examples and documentation can be selected with --model + (or -m), which may be used multiple times. Use --force (-f) + to overwrite preexisting distribution artifacts; by default + the script is lazy and will only create what it can't find. """ ), ) @@ -436,6 +445,6 @@ def test_build_distribution(tmp_path, full): build_path=build_path, output_path=out_path, full=args.full, - overwrite=args.force, + force=args.force, models=models, ) diff --git a/distribution/build_docs.py b/distribution/build_docs.py index e34241b8791..0dfc5fb6cbe 100644 --- a/distribution/build_docs.py +++ b/distribution/build_docs.py @@ -7,7 +7,7 @@ from datetime import datetime from os import PathLike, environ from pathlib import Path -from pprint import pprint +from pprint import pformat, pprint from tempfile import TemporaryDirectory from typing import List, Optional from urllib.error import HTTPError @@ -71,6 +71,8 @@ def clean_tex_files(): + """Remove LaTeX files before a clean rebuild.""" + print("Cleaning latex files") exts = ["pdf", "aux", "bbl", "idx", "lof", "out", "toc"] pth = PROJ_ROOT_PATH / "doc" / "mf6io" @@ -116,6 +118,8 @@ def download_benchmarks( verbose: bool = False, repo_owner: str = "MODFLOW-USGS", ) -> Optional[Path]: + """Try to download MF6 benchmarks from GitHub Actions.""" + output_path = Path(output_path).expanduser().absolute() name = "run-time-comparison" # todo make configurable repo = ( @@ -169,9 +173,11 @@ def test_download_benchmarks(tmp_path, github_user): def build_benchmark_tex( output_path: PathLike, - overwrite: bool = False, + force: bool = False, repo_owner: str = "MODFLOW-USGS", ): + """Build LaTeX files for MF6 performance benchmarks to go into the release notes.""" + BENCHMARKS_PATH.mkdir(parents=True, exist_ok=True) benchmarks_path = BENCHMARKS_PATH / "run-time-comparison.md" @@ -182,7 +188,7 @@ def build_benchmark_tex( ) # run benchmarks again if no benchmarks found on GitHub or overwrite requested - if overwrite or not benchmarks_path.is_file(): + if force or not benchmarks_path.is_file(): run_benchmarks( build_path=PROJ_ROOT_PATH / "builddir", current_bin_path=PROJ_ROOT_PATH / "bin", @@ -222,6 +228,8 @@ def test_build_benchmark_tex(tmp_path): def build_deprecations_tex(): + """Build LaTeX files for the deprecations table to go into the release notes.""" + mf6ivar_path = MF6IO_PATH / "mf6ivar" md_path = mf6ivar_path / "md" md_path.mkdir(exist_ok=True) @@ -260,70 +268,80 @@ def test_build_deprecations_tex(): build_deprecations_tex() -def build_mf6io_tex_from_dfn( - overwrite: bool = False, models: Optional[List[str]] = None -): - if overwrite: +def build_mf6io_tex(models: Optional[List[str]] = None, force: bool = False): + """Build LaTeX files for the MF6IO guide from DFN files.""" + if force: clean_tex_files() - def files_match(tex_path, dfn_path, ignored): - dfn_names = [ - f.stem - for f in dfn_path.glob("*") + def match(tex_names, dfn_names): + tex = set(tex_names) + dfn = set(dfn_names) + diff = tex ^ dfn + return not any(diff) + + def assert_match(tex_names, dfn_names): + tex = set(tex_names) + dfn = set(dfn_names) + diff = tex ^ dfn + assert not any(diff), ( + f"=> symmetric difference:\n{pformat(diff)}\n" + f"=> tex - dfn:\n{pformat(tex - dfn)}\n" + f"=> dfn - tex:\n{pformat(dfn - tex)}\n" + ) + + with set_dir(PROJ_ROOT_PATH / "doc" / "mf6io" / "mf6ivar"): + ignored = ["appendix", "common"] + list( + set(DEFAULT_MODELS) - set(models) + ) + included = models + ["sim", "utl", "exg", "sln"] + tex_files = [ + f + for f in Path("tex").glob("*.tex") if f.is_file() - and "dfn" in f.suffix + and any(pattern in f.name for pattern in included) and not any(pattern in f.name for pattern in ignored) ] - tex_names = [ - f.stem.replace("-desc", "") - for f in tex_path.glob("*") + dfn_files = [ + f + for f in Path("dfn").glob("*.dfn") if f.is_file() - and "tex" in f.suffix + and any(pattern in f.name for pattern in included) and not any(pattern in f.name for pattern in ignored) ] - - return set(tex_names) == set(dfn_names) - - with set_dir(PROJ_ROOT_PATH / "doc" / "mf6io" / "mf6ivar"): - ignored = ["appendix", "common"] - tex_pth = Path("tex") - dfn_pth = Path("dfn") - tex_files = [f for f in tex_pth.glob("*") if f.is_file()] - dfn_files = [f for f in dfn_pth.glob("*") if f.is_file()] - - if ( - not overwrite - and any(tex_files) - and any(dfn_files) - and files_match(tex_pth, dfn_pth, ignored) - ): + dfn_names = [f.stem for f in dfn_files] + tex_names = [f.stem.replace("-desc", "") for f in tex_files] + if match(tex_names, dfn_names) and not force: print("DFN files already exist:") pprint(dfn_files) else: for f in tex_files: f.unlink() - # run python script + # run mf6ivar script and make sure a tex + # file was generated for each dfn args = [sys.executable, "mf6ivar.py"] if models is not None and any(models): for model in models: args += ["--model", model] - out, err, ret = run_cmd(*args) + out, err, ret = run_cmd(*args, verbose=True) assert not ret, out + err - - # check that dfn and tex files match - assert files_match(tex_pth, dfn_pth, ignored) + assert_match(tex_names, dfn_names) @no_parallel -@pytest.mark.parametrize("overwrite", [True, False]) -def test_build_mf6io_tex_from_dfn(overwrite): - build_mf6io_tex_from_dfn(overwrite=overwrite) +@pytest.mark.parametrize("force", [True, False]) +def test_build_mf6io_tex(force): + build_mf6io_tex(force=force) -def build_mf6io_tex_example( +def build_usage_example_tex( workspace_path: PathLike, bin_path: PathLike, example_model_path: PathLike ): + """ + Build LaTeX files for the MF6 usage example in the MF6IO guide. + Runs MF6 to capture the output and insert into the document. + """ + workspace_path = Path(workspace_path) / "workspace" bin_path = Path(bin_path).expanduser().absolute() mf6_exe_path = bin_path / f"mf6{EXE_EXT}" @@ -386,12 +404,14 @@ def build_mf6io_tex_example( f.write("}\n") -def build_pdfs_from_tex( +def build_pdfs( tex_paths: List[PathLike], output_path: PathLike, passes: int = 3, - overwrite: bool = False, + force: bool = False, ): + """Build PDF documents from LaTeX files.""" + print("Building PDFs from LaTex:") pprint(tex_paths) @@ -402,7 +422,7 @@ def build_pdfs_from_tex( pdf_name = tex_path.stem + ".pdf" pdf_path = tex_path.parent / pdf_name tgt_path = output_path / pdf_name - if overwrite or not tgt_path.is_file(): + if force or not tgt_path.is_file(): print(f"Converting {tex_path} to PDF") with set_dir(tex_path.parent): first = True @@ -454,25 +474,27 @@ def test_build_pdfs_from_tex(tmp_path): DOCS_PATH / "ConverterGuide" / "converter_mf5to6.bbl", ] - build_pdfs_from_tex(tex_paths, tmp_path) + build_pdfs(tex_paths, tmp_path) for p in tex_paths[:-1] + bbl_paths: assert p.is_file() def build_documentation( bin_path: PathLike, + force: bool = False, full: bool = False, + models: Optional[List[str]] = None, output_path: Optional[PathLike] = None, - overwrite: bool = False, repo_owner: str = "MODFLOW-USGS", - models: Optional[List[str]] = None, ): + """Build documentation for a MODFLOW 6 distribution.""" + print(f"Building {'full' if full else 'minimal'} documentation") bin_path = Path(bin_path).expanduser().absolute() output_path = Path(output_path).expanduser().absolute() - if (output_path / "mf6io.pdf").is_file() and not overwrite: + if (output_path / "mf6io.pdf").is_file() and not force: print(f"{output_path / 'mf6io.pdf'} already exists") return @@ -480,13 +502,13 @@ def build_documentation( output_path.mkdir(parents=True, exist_ok=True) # build LaTex input/output docs from DFN files - build_mf6io_tex_from_dfn(overwrite=overwrite, models=models) + build_mf6io_tex(force=force, models=models) # build LaTeX input/output example model docs with TemporaryDirectory() as temp: - build_mf6io_tex_example( - workspace_path=Path(temp), + build_usage_example_tex( bin_path=bin_path, + workspace_path=Path(temp), example_model_path=PROJ_ROOT_PATH / ".mf6minsim", ) @@ -496,12 +518,12 @@ def build_documentation( if full: # convert benchmarks to LaTex, running them first if necessary build_benchmark_tex( - output_path=output_path, overwrite=overwrite, repo_owner=repo_owner + output_path=output_path, force=force, repo_owner=repo_owner ) # download example docs pdf_name = "mf6examples.pdf" - if overwrite or not (output_path / pdf_name).is_file(): + if force or not (output_path / pdf_name).is_file(): latest = get_release(f"{repo_owner}/modflow6-examples", "latest") assets = latest["assets"] asset = next( @@ -524,17 +546,17 @@ def build_documentation( raise # convert LaTex to PDF - build_pdfs_from_tex( + build_pdfs( tex_paths=TEX_PATHS["full"], output_path=output_path, - overwrite=overwrite, + force=force, ) else: # just convert LaTeX to PDF - build_pdfs_from_tex( + build_pdfs( tex_paths=TEX_PATHS["minimal"], output_path=output_path, - overwrite=overwrite, + force=force, ) # enforce os line endings on all text files @@ -563,7 +585,6 @@ def test_build_documentation(tmp_path): if __name__ == "__main__": parser = argparse.ArgumentParser( - prog="Convert LaTeX docs to PDFs", formatter_class=argparse.RawDescriptionHelpFormatter, epilog=textwrap.dedent( """\ @@ -625,9 +646,9 @@ def test_build_documentation(tmp_path): models = args.model if args.model else DEFAULT_MODELS build_documentation( bin_path=bin_path, + force=args.force, full=args.full, + models=models, output_path=output_path, - overwrite=args.force, repo_owner=args.repo_owner, - models=models, ) diff --git a/distribution/update_version.py b/distribution/update_version.py index 3df05b104d4..2b781927896 100755 --- a/distribution/update_version.py +++ b/distribution/update_version.py @@ -467,7 +467,6 @@ def test_update_version(version, approved, developmode): if __name__ == "__main__": parser = argparse.ArgumentParser( - prog="Update Modflow 6 version", formatter_class=argparse.RawDescriptionHelpFormatter, epilog=textwrap.dedent( """\ diff --git a/doc/mf6io/mf6ivar/deprecations.py b/doc/mf6io/mf6ivar/deprecations.py index d6896607e83..9f2efe0f5f5 100644 --- a/doc/mf6io/mf6ivar/deprecations.py +++ b/doc/mf6io/mf6ivar/deprecations.py @@ -1,9 +1,11 @@ -import os from pathlib import Path from typing import List, Optional, Tuple from packaging.version import Version +PROJ_ROOT_PATH = Path(__file__).parents[3] +MF6IVAR_PATH = PROJ_ROOT_PATH / "doc" / "mf6io" / "mf6ivar" + def get_deprecations( dfndir, @@ -33,7 +35,7 @@ def get_deprecations( def create_deprecations_file(dfndir, mddir, verbose): deprecations = get_deprecations(dfndir) - deps_path = (Path(mddir) / "deprecations.md").absolute() + deps_path = (mddir / "deprecations.md").absolute() if verbose: print(f"Found {len(deprecations)} deprecations, writing {deps_path}") with open(deps_path, "w") as f: @@ -50,6 +52,7 @@ def create_deprecations_file(dfndir, mddir, verbose): if __name__ == "__main__": - dfndir = os.path.join(".", "dfn") - mddir = os.path.join(".", "md") + dfndir = MF6IVAR_PATH / "dfn" + mddir = MF6IVAR_PATH / "md" + mddir.mkdir(exist_ok=True) create_deprecations_file(dfndir, mddir, verbose=True)