From 25388afa0af8ff721dd66fd4267400d6f64aa2a7 Mon Sep 17 00:00:00 2001 From: danielfromearth Date: Mon, 25 Mar 2024 13:48:10 -0400 Subject: [PATCH 01/38] restrict sorting to first entry of tuple --- concatenator/stitchee.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/concatenator/stitchee.py b/concatenator/stitchee.py index 89c7090..ce19b10 100644 --- a/concatenator/stitchee.py +++ b/concatenator/stitchee.py @@ -120,7 +120,10 @@ def stitchee( xrdataset_list.append(xrds) # Reorder the xarray datasets according to the concat dim values. - xrdataset_list = [x for _, x in sorted(zip(concat_dim_order, xrdataset_list))] + xrdataset_list = [ + dataset + for _, dataset in sorted(zip(concat_dim_order, xrdataset_list), key=lambda x: x[0]) + ] # Flattened files are concatenated together (Using XARRAY). start_time = time.time() From 2cc1cdedfab4a6bc26f5ea50c4e535900171fe46 Mon Sep 17 00:00:00 2001 From: danielfromearth Date: Mon, 25 Mar 2024 13:49:13 -0400 Subject: [PATCH 02/38] add new test (without data files) for stitching together subsetted outputs --- tests/test_concat.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/tests/test_concat.py b/tests/test_concat.py index b8f7e7a..a36a61c 100644 --- a/tests/test_concat.py +++ b/tests/test_concat.py @@ -88,6 +88,14 @@ def test_tempo_no2_concat_with_stitchee(self, temp_output_dir): concat_method="xarray-concat", ) + def test_tempo_no2_subsetter_output_concat_with_stitchee(self, temp_output_dir): + self.run_verification_with_stitchee( + input_dir=data_for_tests_dir / "tempo/no2_subsetted", + output_dir=temp_output_dir, + output_name="tempo_no2_stitcheed.nc", + concat_method="xarray-concat", + ) + def test_tempo_hcho_concat_with_stitchee(self, temp_output_dir): self.run_verification_with_stitchee( input_dir=data_for_tests_dir / "tempo/hcho", From 0494d8f6ef1d55b182448ec135c5c2465f9513f6 Mon Sep 17 00:00:00 2001 From: danielfromearth Date: Mon, 25 Mar 2024 13:53:27 -0400 Subject: [PATCH 03/38] use dim not var, and check both original and flattened key in test assertion --- tests/test_concat.py | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/tests/test_concat.py b/tests/test_concat.py index a36a61c..c60e673 100644 --- a/tests/test_concat.py +++ b/tests/test_concat.py @@ -7,6 +7,7 @@ import netCDF4 as nc import pytest +from concatenator.group_handling import GROUP_DELIM from concatenator.stitchee import stitchee from . import data_for_tests_dir @@ -46,10 +47,21 @@ def run_verification_with_stitchee( # Verify that the length of the record dimension in the concatenated file equals # the sum of the lengths across the input files - length_sum = 0 + original_files_length_sum = 0 for file in prepared_input_files: - length_sum += len(nc.Dataset(file).variables[record_dim_name]) - assert length_sum == len(merged_dataset.variables[record_dim_name]) + # length_sum += len(nc.Dataset(file).variables[record_dim_name]) + with nc.Dataset(file) as ncds: + try: + original_files_length_sum += ncds.dimensions[record_dim_name].size + except KeyError: + original_files_length_sum += ncds.dimensions[GROUP_DELIM + record_dim_name].size + + try: + merged_file_length = merged_dataset.dimensions[record_dim_name].size + except KeyError: + merged_file_length = merged_dataset.dimensions[GROUP_DELIM + record_dim_name].size + + assert original_files_length_sum == merged_file_length return merged_dataset From bdc81ba94bd946d25fb99028e48acf5eef7ecabf Mon Sep 17 00:00:00 2001 From: danielfromearth Date: Mon, 25 Mar 2024 13:53:54 -0400 Subject: [PATCH 04/38] autoupdate pre-commmit --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index aaf53a5..93b93ce 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -15,7 +15,7 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.3.3' + rev: 'v0.3.4' hooks: - id: ruff args: [ "--fix" ] From 9446f44c3e621c9954370e53628111768a402fc4 Mon Sep 17 00:00:00 2001 From: danielfromearth Date: Mon, 25 Mar 2024 13:55:09 -0400 Subject: [PATCH 05/38] update CHANGELOG.md --- CHANGELOG.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e90099d..9d0ecf0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -26,4 +26,5 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Deprecated ### Removed ### Fixed -- [PR #4](https://github.com/danielfromearth/stitchee/pull/4): Error with TEMPO ozone profile data because of duplicated dimension names +- [Pull #4](https://github.com/danielfromearth/stitchee/pull/4): Error with TEMPO ozone profile data because of duplicated dimension names +- [Pull #133](https://github.com/danielfromearth/stitchee/pull/133): Fix conflicting dimensions on record dimension sorting From 0ea81f3b9cd8df7640c06bb49e1207c561815852 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 25 Mar 2024 22:47:49 +0000 Subject: [PATCH 06/38] Bump ruff from 0.3.3 to 0.3.4 Bumps [ruff](https://github.com/astral-sh/ruff) from 0.3.3 to 0.3.4. - [Release notes](https://github.com/astral-sh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/CHANGELOG.md) - [Commits](https://github.com/astral-sh/ruff/compare/v0.3.3...v0.3.4) --- updated-dependencies: - dependency-name: ruff dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 38 +++++++++++++++++++------------------- pyproject.toml | 2 +- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/poetry.lock b/poetry.lock index 4b236c5..c357aa1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1054,28 +1054,28 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "ruff" -version = "0.3.3" +version = "0.3.4" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.3.3-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:973a0e388b7bc2e9148c7f9be8b8c6ae7471b9be37e1cc732f8f44a6f6d7720d"}, - {file = "ruff-0.3.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:cfa60d23269d6e2031129b053fdb4e5a7b0637fc6c9c0586737b962b2f834493"}, - {file = "ruff-0.3.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1eca7ff7a47043cf6ce5c7f45f603b09121a7cc047447744b029d1b719278eb5"}, - {file = "ruff-0.3.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7d3f6762217c1da954de24b4a1a70515630d29f71e268ec5000afe81377642d"}, - {file = "ruff-0.3.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b24c19e8598916d9c6f5a5437671f55ee93c212a2c4c569605dc3842b6820386"}, - {file = "ruff-0.3.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5a6cbf216b69c7090f0fe4669501a27326c34e119068c1494f35aaf4cc683778"}, - {file = "ruff-0.3.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:352e95ead6964974b234e16ba8a66dad102ec7bf8ac064a23f95371d8b198aab"}, - {file = "ruff-0.3.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d6ab88c81c4040a817aa432484e838aaddf8bfd7ca70e4e615482757acb64f8"}, - {file = "ruff-0.3.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79bca3a03a759cc773fca69e0bdeac8abd1c13c31b798d5bb3c9da4a03144a9f"}, - {file = "ruff-0.3.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:2700a804d5336bcffe063fd789ca2c7b02b552d2e323a336700abb8ae9e6a3f8"}, - {file = "ruff-0.3.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fd66469f1a18fdb9d32e22b79f486223052ddf057dc56dea0caaf1a47bdfaf4e"}, - {file = "ruff-0.3.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:45817af234605525cdf6317005923bf532514e1ea3d9270acf61ca2440691376"}, - {file = "ruff-0.3.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:0da458989ce0159555ef224d5b7c24d3d2e4bf4c300b85467b08c3261c6bc6a8"}, - {file = "ruff-0.3.3-py3-none-win32.whl", hash = "sha256:f2831ec6a580a97f1ea82ea1eda0401c3cdf512cf2045fa3c85e8ef109e87de0"}, - {file = "ruff-0.3.3-py3-none-win_amd64.whl", hash = "sha256:be90bcae57c24d9f9d023b12d627e958eb55f595428bafcb7fec0791ad25ddfc"}, - {file = "ruff-0.3.3-py3-none-win_arm64.whl", hash = "sha256:0171aab5fecdc54383993389710a3d1227f2da124d76a2784a7098e818f92d61"}, - {file = "ruff-0.3.3.tar.gz", hash = "sha256:38671be06f57a2f8aba957d9f701ea889aa5736be806f18c0cd03d6ff0cbca8d"}, + {file = "ruff-0.3.4-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:60c870a7d46efcbc8385d27ec07fe534ac32f3b251e4fc44b3cbfd9e09609ef4"}, + {file = "ruff-0.3.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6fc14fa742e1d8f24910e1fff0bd5e26d395b0e0e04cc1b15c7c5e5fe5b4af91"}, + {file = "ruff-0.3.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3ee7880f653cc03749a3bfea720cf2a192e4f884925b0cf7eecce82f0ce5854"}, + {file = "ruff-0.3.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cf133dd744f2470b347f602452a88e70dadfbe0fcfb5fd46e093d55da65f82f7"}, + {file = "ruff-0.3.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f3860057590e810c7ffea75669bdc6927bfd91e29b4baa9258fd48b540a4365"}, + {file = "ruff-0.3.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:986f2377f7cf12efac1f515fc1a5b753c000ed1e0a6de96747cdf2da20a1b369"}, + {file = "ruff-0.3.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fd98e85869603e65f554fdc5cddf0712e352fe6e61d29d5a6fe087ec82b76c"}, + {file = "ruff-0.3.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64abeed785dad51801b423fa51840b1764b35d6c461ea8caef9cf9e5e5ab34d9"}, + {file = "ruff-0.3.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df52972138318bc7546d92348a1ee58449bc3f9eaf0db278906eb511889c4b50"}, + {file = "ruff-0.3.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:98e98300056445ba2cc27d0b325fd044dc17fcc38e4e4d2c7711585bd0a958ed"}, + {file = "ruff-0.3.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:519cf6a0ebed244dce1dc8aecd3dc99add7a2ee15bb68cf19588bb5bf58e0488"}, + {file = "ruff-0.3.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:bb0acfb921030d00070539c038cd24bb1df73a2981e9f55942514af8b17be94e"}, + {file = "ruff-0.3.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cf187a7e7098233d0d0c71175375c5162f880126c4c716fa28a8ac418dcf3378"}, + {file = "ruff-0.3.4-py3-none-win32.whl", hash = "sha256:af27ac187c0a331e8ef91d84bf1c3c6a5dea97e912a7560ac0cef25c526a4102"}, + {file = "ruff-0.3.4-py3-none-win_amd64.whl", hash = "sha256:de0d5069b165e5a32b3c6ffbb81c350b1e3d3483347196ffdf86dc0ef9e37dd6"}, + {file = "ruff-0.3.4-py3-none-win_arm64.whl", hash = "sha256:6810563cc08ad0096b57c717bd78aeac888a1bfd38654d9113cb3dc4d3f74232"}, + {file = "ruff-0.3.4.tar.gz", hash = "sha256:f0f4484c6541a99862b693e13a151435a279b271cff20e37101116a21e2a1ad1"}, ] [[package]] @@ -1208,4 +1208,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "45f3f1ff77ebde64e9018898030958d42cd2cc5c985ebe40863651155ba625a0" +content-hash = "a4eb4cb12811dd650ea4bd2bf413d97d5c10a71ec930ef247e564411b8758395" diff --git a/pyproject.toml b/pyproject.toml index d18f861..3da6719 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,7 +22,7 @@ harmony-service-lib = "^1.0.25" pytest = "^8.1.1" mypy = "^1.9.0" black = "^24.2.0" -ruff = "^0.3.2" +ruff = "^0.3.4" coverage = "^7.4.4" [tool.poetry.group.extras.dependencies] From d61102ad15646f10d5c09f13d2ff67b3d9ae3cb4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Apr 2024 22:54:11 +0000 Subject: [PATCH 07/38] Bump ruff from 0.3.4 to 0.3.5 Bumps [ruff](https://github.com/astral-sh/ruff) from 0.3.4 to 0.3.5. - [Release notes](https://github.com/astral-sh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/CHANGELOG.md) - [Commits](https://github.com/astral-sh/ruff/compare/v0.3.4...v0.3.5) --- updated-dependencies: - dependency-name: ruff dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 38 +++++++++++++++++++------------------- pyproject.toml | 2 +- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/poetry.lock b/poetry.lock index c357aa1..77bdd9b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1054,28 +1054,28 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "ruff" -version = "0.3.4" +version = "0.3.5" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.3.4-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:60c870a7d46efcbc8385d27ec07fe534ac32f3b251e4fc44b3cbfd9e09609ef4"}, - {file = "ruff-0.3.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6fc14fa742e1d8f24910e1fff0bd5e26d395b0e0e04cc1b15c7c5e5fe5b4af91"}, - {file = "ruff-0.3.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3ee7880f653cc03749a3bfea720cf2a192e4f884925b0cf7eecce82f0ce5854"}, - {file = "ruff-0.3.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cf133dd744f2470b347f602452a88e70dadfbe0fcfb5fd46e093d55da65f82f7"}, - {file = "ruff-0.3.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f3860057590e810c7ffea75669bdc6927bfd91e29b4baa9258fd48b540a4365"}, - {file = "ruff-0.3.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:986f2377f7cf12efac1f515fc1a5b753c000ed1e0a6de96747cdf2da20a1b369"}, - {file = "ruff-0.3.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fd98e85869603e65f554fdc5cddf0712e352fe6e61d29d5a6fe087ec82b76c"}, - {file = "ruff-0.3.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64abeed785dad51801b423fa51840b1764b35d6c461ea8caef9cf9e5e5ab34d9"}, - {file = "ruff-0.3.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df52972138318bc7546d92348a1ee58449bc3f9eaf0db278906eb511889c4b50"}, - {file = "ruff-0.3.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:98e98300056445ba2cc27d0b325fd044dc17fcc38e4e4d2c7711585bd0a958ed"}, - {file = "ruff-0.3.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:519cf6a0ebed244dce1dc8aecd3dc99add7a2ee15bb68cf19588bb5bf58e0488"}, - {file = "ruff-0.3.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:bb0acfb921030d00070539c038cd24bb1df73a2981e9f55942514af8b17be94e"}, - {file = "ruff-0.3.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cf187a7e7098233d0d0c71175375c5162f880126c4c716fa28a8ac418dcf3378"}, - {file = "ruff-0.3.4-py3-none-win32.whl", hash = "sha256:af27ac187c0a331e8ef91d84bf1c3c6a5dea97e912a7560ac0cef25c526a4102"}, - {file = "ruff-0.3.4-py3-none-win_amd64.whl", hash = "sha256:de0d5069b165e5a32b3c6ffbb81c350b1e3d3483347196ffdf86dc0ef9e37dd6"}, - {file = "ruff-0.3.4-py3-none-win_arm64.whl", hash = "sha256:6810563cc08ad0096b57c717bd78aeac888a1bfd38654d9113cb3dc4d3f74232"}, - {file = "ruff-0.3.4.tar.gz", hash = "sha256:f0f4484c6541a99862b693e13a151435a279b271cff20e37101116a21e2a1ad1"}, + {file = "ruff-0.3.5-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:aef5bd3b89e657007e1be6b16553c8813b221ff6d92c7526b7e0227450981eac"}, + {file = "ruff-0.3.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:89b1e92b3bd9fca249153a97d23f29bed3992cff414b222fcd361d763fc53f12"}, + {file = "ruff-0.3.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e55771559c89272c3ebab23326dc23e7f813e492052391fe7950c1a5a139d89"}, + {file = "ruff-0.3.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dabc62195bf54b8a7876add6e789caae0268f34582333cda340497c886111c39"}, + {file = "ruff-0.3.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a05f3793ba25f194f395578579c546ca5d83e0195f992edc32e5907d142bfa3"}, + {file = "ruff-0.3.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:dfd3504e881082959b4160ab02f7a205f0fadc0a9619cc481982b6837b2fd4c0"}, + {file = "ruff-0.3.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87258e0d4b04046cf1d6cc1c56fadbf7a880cc3de1f7294938e923234cf9e498"}, + {file = "ruff-0.3.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:712e71283fc7d9f95047ed5f793bc019b0b0a29849b14664a60fd66c23b96da1"}, + {file = "ruff-0.3.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a532a90b4a18d3f722c124c513ffb5e5eaff0cc4f6d3aa4bda38e691b8600c9f"}, + {file = "ruff-0.3.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:122de171a147c76ada00f76df533b54676f6e321e61bd8656ae54be326c10296"}, + {file = "ruff-0.3.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d80a6b18a6c3b6ed25b71b05eba183f37d9bc8b16ace9e3d700997f00b74660b"}, + {file = "ruff-0.3.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a7b6e63194c68bca8e71f81de30cfa6f58ff70393cf45aab4c20f158227d5936"}, + {file = "ruff-0.3.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a759d33a20c72f2dfa54dae6e85e1225b8e302e8ac655773aff22e542a300985"}, + {file = "ruff-0.3.5-py3-none-win32.whl", hash = "sha256:9d8605aa990045517c911726d21293ef4baa64f87265896e491a05461cae078d"}, + {file = "ruff-0.3.5-py3-none-win_amd64.whl", hash = "sha256:dc56bb16a63c1303bd47563c60482a1512721053d93231cf7e9e1c6954395a0e"}, + {file = "ruff-0.3.5-py3-none-win_arm64.whl", hash = "sha256:faeeae9905446b975dcf6d4499dc93439b131f1443ee264055c5716dd947af55"}, + {file = "ruff-0.3.5.tar.gz", hash = "sha256:a067daaeb1dc2baf9b82a32dae67d154d95212080c80435eb052d95da647763d"}, ] [[package]] @@ -1208,4 +1208,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "a4eb4cb12811dd650ea4bd2bf413d97d5c10a71ec930ef247e564411b8758395" +content-hash = "071d7273360d10824c06802c115eb78612de750eccd651af48e64cd838550099" diff --git a/pyproject.toml b/pyproject.toml index 3da6719..de8d6b9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,7 +22,7 @@ harmony-service-lib = "^1.0.25" pytest = "^8.1.1" mypy = "^1.9.0" black = "^24.2.0" -ruff = "^0.3.4" +ruff = "^0.3.5" coverage = "^7.4.4" [tool.poetry.group.extras.dependencies] From cf504a538944dd8e2958e1b0e06f37b1da9159d9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Apr 2024 17:19:45 +0000 Subject: [PATCH 08/38] Bump dask from 2024.3.1 to 2024.4.1 Bumps [dask](https://github.com/dask/dask) from 2024.3.1 to 2024.4.1. - [Changelog](https://github.com/dask/dask/blob/main/docs/release-procedure.md) - [Commits](https://github.com/dask/dask/compare/2024.3.1...2024.4.1) --- updated-dependencies: - dependency-name: dask dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 10 +++++----- pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index 77bdd9b..2944327 100644 --- a/poetry.lock +++ b/poetry.lock @@ -396,13 +396,13 @@ toml = ["tomli"] [[package]] name = "dask" -version = "2024.3.1" +version = "2024.4.1" description = "Parallel PyData with Task Scheduling" optional = false python-versions = ">=3.9" files = [ - {file = "dask-2024.3.1-py3-none-any.whl", hash = "sha256:1ac260b8716b1a9fc144c0d7f958336812cfc3ef542a3742c9ae02387189b32b"}, - {file = "dask-2024.3.1.tar.gz", hash = "sha256:78bee2ffd735514e572adaa669fc2a437ec256aecb6bec036a1f5b8dd36b2e60"}, + {file = "dask-2024.4.1-py3-none-any.whl", hash = "sha256:cac5d28b9de7a7cfde46d6fbd8fa81f5654980d010b44d1dbe04dd13b5b63126"}, + {file = "dask-2024.4.1.tar.gz", hash = "sha256:6cd8eb03ddc8dc08d6ca5b167b8de559872bc51cc2b6587d0e9dc754ab19cdf0"}, ] [package.dependencies] @@ -420,7 +420,7 @@ array = ["numpy (>=1.21)"] complete = ["dask[array,dataframe,diagnostics,distributed]", "lz4 (>=4.3.2)", "pyarrow (>=7.0)", "pyarrow-hotfix"] dataframe = ["dask-expr (>=1.0,<1.1)", "dask[array]", "pandas (>=1.3)"] diagnostics = ["bokeh (>=2.4.2)", "jinja2 (>=2.10.3)"] -distributed = ["distributed (==2024.3.1)"] +distributed = ["distributed (==2024.4.1)"] test = ["pandas[test]", "pre-commit", "pytest", "pytest-cov", "pytest-rerunfailures", "pytest-timeout", "pytest-xdist"] [[package]] @@ -1208,4 +1208,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "071d7273360d10824c06802c115eb78612de750eccd651af48e64cd838550099" +content-hash = "90b2e1cd2e016632e682433bfb6ccce3f624afe9dbf303076a39753f89de84c4" diff --git a/pyproject.toml b/pyproject.toml index de8d6b9..abd44fe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,7 +15,7 @@ packages = [ python = "^3.10" netcdf4 = "^1.6.5" xarray = "^2024.2.0" -dask = "^2024.2.1" +dask = "^2024.4.1" harmony-service-lib = "^1.0.25" [tool.poetry.group.dev.dependencies] From de1cfca4b4ecd6b4d89d87b129e93609affcb415 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Apr 2024 17:26:03 +0000 Subject: [PATCH 09/38] Bump xarray from 2024.2.0 to 2024.3.0 Bumps [xarray](https://github.com/pydata/xarray) from 2024.2.0 to 2024.3.0. - [Release notes](https://github.com/pydata/xarray/releases) - [Changelog](https://github.com/pydata/xarray/blob/main/HOW_TO_RELEASE.md) - [Commits](https://github.com/pydata/xarray/compare/v2024.02.0...v2024.03.0) --- updated-dependencies: - dependency-name: xarray dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 2944327..88e97f1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1168,13 +1168,13 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "xarray" -version = "2024.2.0" +version = "2024.3.0" description = "N-D labeled arrays and datasets in Python" optional = false python-versions = ">=3.9" files = [ - {file = "xarray-2024.2.0-py3-none-any.whl", hash = "sha256:a31a9b37e39bd5aeb098070a75d6dd4d59019eb339d735b86108b9e0cb391f94"}, - {file = "xarray-2024.2.0.tar.gz", hash = "sha256:a105f02791082c888ebe2622090beaff2e7b68571488d62fe6afdab35b4b717f"}, + {file = "xarray-2024.3.0-py3-none-any.whl", hash = "sha256:ca2bc4da2bf2e7879e15862a7a7c3fc76ad19f6a08931d030220cef39a29118d"}, + {file = "xarray-2024.3.0.tar.gz", hash = "sha256:5c1db19efdde61db7faedad8fc944f4e29698fb6fbd578d352668b63598bd1d8"}, ] [package.dependencies] @@ -1208,4 +1208,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "90b2e1cd2e016632e682433bfb6ccce3f624afe9dbf303076a39753f89de84c4" +content-hash = "684a82d688e8cc667a4157910b5b5faf2662e3927483f3e811de0685aa6061e3" diff --git a/pyproject.toml b/pyproject.toml index abd44fe..af8193e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,7 @@ packages = [ [tool.poetry.dependencies] python = "^3.10" netcdf4 = "^1.6.5" -xarray = "^2024.2.0" +xarray = "^2024.3.0" dask = "^2024.4.1" harmony-service-lib = "^1.0.25" From 7de37b97914590b074559449f9362e284ec74596 Mon Sep 17 00:00:00 2001 From: Daniel Kaufman <114174502+danielfromearth@users.noreply.github.com> Date: Tue, 9 Apr 2024 09:58:53 -0400 Subject: [PATCH 10/38] Create LICENSE --- LICENSE | 201 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 201 insertions(+) create mode 100644 LICENSE diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..261eeb9 --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. From 2cbb1dd584c0bbd9145f5c86a8f9abf2e24c73a4 Mon Sep 17 00:00:00 2001 From: Daniel Kaufman <114174502+danielfromearth@users.noreply.github.com> Date: Tue, 9 Apr 2024 15:23:33 -0400 Subject: [PATCH 11/38] Add SRA number to end of README.md --- README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/README.md b/README.md index 7dfce40..d2a4c3c 100644 --- a/README.md +++ b/README.md @@ -83,3 +83,6 @@ For example: ```shell poetry run stitchee /path/to/netcdf/directory/ -o /path/to/output.nc ``` + +--- +This package is NASA Software Release Authorization (SRA) # LAR-20433-1 From 0ffcb893b78ef4f88a44a2482044570df2135318 Mon Sep 17 00:00:00 2001 From: Daniel Kaufman <114174502+danielfromearth@users.noreply.github.com> Date: Wed, 10 Apr 2024 12:11:55 -0400 Subject: [PATCH 12/38] Bump version to 1.0.0 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index af8193e..7ffd09c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "stitchee" -version = "0.1.0" +version = "1.0.0" description = "NetCDF4 Along-existing-dimension Concatenation Service" authors = ["Daniel Kaufman "] readme = "README.md" From a6f507b8cc30cf8a3cf79ffccefc54b9feb8499b Mon Sep 17 00:00:00 2001 From: danielfromearth Date: Wed, 10 Apr 2024 12:34:31 -0400 Subject: [PATCH 13/38] add pytest-cov as dev dependency --- poetry.lock | 63 +++++++++++++++++++++++++++++++++----------------- pyproject.toml | 3 +-- 2 files changed, 43 insertions(+), 23 deletions(-) diff --git a/poetry.lock b/poetry.lock index 88e97f1..a981165 100644 --- a/poetry.lock +++ b/poetry.lock @@ -48,17 +48,17 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "boto3" -version = "1.34.65" +version = "1.34.81" description = "The AWS SDK for Python" optional = false -python-versions = ">= 3.8" +python-versions = ">=3.8" files = [ - {file = "boto3-1.34.65-py3-none-any.whl", hash = "sha256:b611de58ab28940a36c77d7ef9823427ebf25d5ee8277b802f9979b14e780534"}, - {file = "boto3-1.34.65.tar.gz", hash = "sha256:db97f9c29f1806cf9020679be0dd5ffa2aff2670e28e0e2046f98b979be498a4"}, + {file = "boto3-1.34.81-py3-none-any.whl", hash = "sha256:18224d206a8a775bcaa562d22ed3d07854934699190e12b52fcde87aac76a80e"}, + {file = "boto3-1.34.81.tar.gz", hash = "sha256:004dad209d37b3d2df88f41da13b7ad702a751904a335fac095897ff7a19f82b"}, ] [package.dependencies] -botocore = ">=1.34.65,<1.35.0" +botocore = ">=1.34.81,<1.35.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -67,13 +67,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.65" +version = "1.34.81" description = "Low-level, data-driven core of boto 3." optional = false -python-versions = ">= 3.8" +python-versions = ">=3.8" files = [ - {file = "botocore-1.34.65-py3-none-any.whl", hash = "sha256:3b0012d7293880c0a4883883047e93f2888d7317b5e9e8a982a991b90d951f3e"}, - {file = "botocore-1.34.65.tar.gz", hash = "sha256:399a1b1937f7957f0ee2e0df351462b86d44986b795ced980c11eb768b0e61c5"}, + {file = "botocore-1.34.81-py3-none-any.whl", hash = "sha256:85f6fd7c5715eeef7a236c50947de00f57d72e7439daed1125491014b70fab01"}, + {file = "botocore-1.34.81.tar.gz", hash = "sha256:f79bf122566cc1f09d71cc9ac9fcf52d47ba48b761cbc3f064017b36a3c40eb8"}, ] [package.dependencies] @@ -391,6 +391,9 @@ files = [ {file = "coverage-7.4.4.tar.gz", hash = "sha256:c901df83d097649e257e803be22592aedfd5182f07b3cc87d640bbb9afd50f49"}, ] +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + [package.extras] toml = ["tomli"] @@ -519,13 +522,13 @@ files = [ [[package]] name = "importlib-metadata" -version = "7.0.2" +version = "7.1.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.0.2-py3-none-any.whl", hash = "sha256:f4bc4c0c070c490abf4ce96d715f68e95923320370efb66143df00199bb6c100"}, - {file = "importlib_metadata-7.0.2.tar.gz", hash = "sha256:198f568f3230878cb1b44fbd7975f87906c22336dba2e4a7f05278c281fbd792"}, + {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, + {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, ] [package.dependencies] @@ -534,7 +537,7 @@ zipp = ">=0.5" [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "iniconfig" @@ -861,13 +864,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pycparser" -version = "2.21" +version = "2.22" description = "C parser in Python" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.8" files = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] [[package]] @@ -935,6 +938,24 @@ tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +[[package]] +name = "pytest-cov" +version = "5.0.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, + {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] + [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -1130,13 +1151,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.10.0" +version = "4.11.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, - {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, ] [[package]] @@ -1208,4 +1229,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "684a82d688e8cc667a4157910b5b5faf2662e3927483f3e811de0685aa6061e3" +content-hash = "3d0031fb785d4045660c26520158b547671d8f70b377cd72da13b3b16c5eb672" diff --git a/pyproject.toml b/pyproject.toml index 7ffd09c..8af8b24 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,8 +24,7 @@ mypy = "^1.9.0" black = "^24.2.0" ruff = "^0.3.5" coverage = "^7.4.4" - -[tool.poetry.group.extras.dependencies] +pytest-cov = "^5.0.0" [tool.poetry.scripts] stitchee_harmony = 'concatenator.harmony.cli:main' From b592cb7c14cc45dfba76c984c24749ddc128962b Mon Sep 17 00:00:00 2001 From: danielfromearth Date: Wed, 10 Apr 2024 12:34:56 -0400 Subject: [PATCH 14/38] modify test running workflow to use pytest-cov and codecov --- .github/workflows/run_tests.yml | 37 +++++++-------------------------- 1 file changed, 8 insertions(+), 29 deletions(-) diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index bd44def..9434a34 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -38,34 +38,13 @@ jobs: run: | poetry run ruff concatenator - - name: Run tests with coverage - run: | - poetry run coverage run -m pytest tests/test_group_handling.py >& test_results.txt - # TODO: expand tests to include full concatenation runs, i.e., don't just run test_group_handling.py - - - name: Generate coverage report - if: ${{ always() }} - run: | - poetry run coverage report -m >& coverage_report.txt - poetry run coverage html --dir htmlcov - - - name: Archive test results - if: ${{ always() }} - uses: actions/upload-artifact@v4 - with: - name: test result - path: test_results.txt - - - name: Archive code coverage report (plain text) - if: ${{ always() }} - uses: actions/upload-artifact@v4 - with: - name: code coverage report (plain text) - path: coverage_report.txt + - name: Run tests and collect coverage + run: poetry run pytest --cov=concatenator tests/test_group_handling.py + # TODO: expand tests to include full concatenation runs, i.e., not only test_group_handling.py - - name: Archive code coverage report (HTML) - if: ${{ always() }} - uses: actions/upload-artifact@v4 + - name: Upload coverage reports to Codecov + uses: codecov/codecov-action@v4.0.1 with: - name: code coverage report (HTML) - path: htmlcov/* + token: ${{ secrets.CODECOV_TOKEN }} + slug: nasa/stitchee + verbose: true From 6fdf273b4527c39281a1c488e01473e71e619922 Mon Sep 17 00:00:00 2001 From: danielfromearth Date: Wed, 10 Apr 2024 12:41:45 -0400 Subject: [PATCH 15/38] add codecove badge to readme --- README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/README.md b/README.md index d2a4c3c..2be4b06 100644 --- a/README.md +++ b/README.md @@ -14,6 +14,9 @@ Mypy checked + + Code coverage +

[//]: # (Using deprecated `align="center"` for the logo image and badges above, because of https://stackoverflow.com/a/62383408) From 195711fbe8b74b1c90e6d655e1c4fb63fc35035e Mon Sep 17 00:00:00 2001 From: danielfromearth Date: Wed, 10 Apr 2024 13:15:55 -0400 Subject: [PATCH 16/38] autoupdate pre-commit --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 93b93ce..b0f78ff 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,7 +1,7 @@ --- repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v4.6.0 hooks: - id: trailing-whitespace exclude: tests(/\w*)*/functional/t/trailing_whitespaces.py|tests/pyreverse/data/.*.html|doc/data/messages/t/trailing-whitespace/bad.py @@ -15,7 +15,7 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.3.4' + rev: 'v0.3.5' hooks: - id: ruff args: [ "--fix" ] From e581123516a27c7d172f076bcc6f371c840163e0 Mon Sep 17 00:00:00 2001 From: danielfromearth Date: Wed, 10 Apr 2024 13:23:11 -0400 Subject: [PATCH 17/38] rename group_handling to be dataset_and_group_handling.py --- .github/workflows/run_tests.yml | 4 ++-- .../{group_handling.py => dataset_and_group_handling.py} | 2 +- concatenator/stitchee.py | 6 +++--- tests/test_concat.py | 2 +- tests/unit/__init__.py | 0 5 files changed, 7 insertions(+), 7 deletions(-) rename concatenator/{group_handling.py => dataset_and_group_handling.py} (99%) create mode 100644 tests/unit/__init__.py diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index 9434a34..596e34b 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -39,8 +39,8 @@ jobs: poetry run ruff concatenator - name: Run tests and collect coverage - run: poetry run pytest --cov=concatenator tests/test_group_handling.py - # TODO: expand tests to include full concatenation runs, i.e., not only test_group_handling.py + run: poetry run pytest --cov=concatenator tests/test_dataset_and_group_handling.py + # TODO: expand tests to include full concatenation runs, i.e., not only test_dataset_and_group_handling.py - name: Upload coverage reports to Codecov uses: codecov/codecov-action@v4.0.1 diff --git a/concatenator/group_handling.py b/concatenator/dataset_and_group_handling.py similarity index 99% rename from concatenator/group_handling.py rename to concatenator/dataset_and_group_handling.py index b051c16..afc6553 100644 --- a/concatenator/group_handling.py +++ b/concatenator/dataset_and_group_handling.py @@ -1,5 +1,5 @@ """ -group_handling.py +dataset_and_group_handling.py Functions for converting multidimensional data structures between a group hierarchy and a flat structure diff --git a/concatenator/stitchee.py b/concatenator/stitchee.py index ce19b10..0926100 100644 --- a/concatenator/stitchee.py +++ b/concatenator/stitchee.py @@ -13,12 +13,12 @@ import xarray as xr from concatenator import GROUP_DELIM -from concatenator.dimension_cleanup import remove_duplicate_dims -from concatenator.file_ops import add_label_to_path -from concatenator.group_handling import ( +from concatenator.dataset_and_group_handling import ( flatten_grouped_dataset, regroup_flattened_dataset, ) +from concatenator.dimension_cleanup import remove_duplicate_dims +from concatenator.file_ops import add_label_to_path default_logger = logging.getLogger(__name__) diff --git a/tests/test_concat.py b/tests/test_concat.py index c60e673..0b54f84 100644 --- a/tests/test_concat.py +++ b/tests/test_concat.py @@ -7,7 +7,7 @@ import netCDF4 as nc import pytest -from concatenator.group_handling import GROUP_DELIM +from concatenator.dataset_and_group_handling import GROUP_DELIM from concatenator.stitchee import stitchee from . import data_for_tests_dir diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 0000000..e69de29 From 9cda07f0c44dd230cecbfe041c065466c01775e0 Mon Sep 17 00:00:00 2001 From: danielfromearth Date: Wed, 10 Apr 2024 13:23:35 -0400 Subject: [PATCH 18/38] move and format with black --- tests/test_group_handling.py | 28 ------------ tests/unit/test_dataset_and_group_handling.py | 44 +++++++++++++++++++ 2 files changed, 44 insertions(+), 28 deletions(-) delete mode 100644 tests/test_group_handling.py create mode 100644 tests/unit/test_dataset_and_group_handling.py diff --git a/tests/test_group_handling.py b/tests/test_group_handling.py deleted file mode 100644 index 0515910..0000000 --- a/tests/test_group_handling.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Tests for manipulating netCDF groups.""" - -# pylint: disable=C0116, C0301 - -from concatenator.attribute_handling import (_flatten_coordinate_attribute, - regroup_coordinate_attribute) - - -def test_coordinate_attribute_flattening(): - # Case with groups present and double spaces. - assert _flatten_coordinate_attribute( - "Time_and_Position/time Time_and_Position/instrument_fov_latitude Time_and_Position/instrument_fov_longitude" - ) == '__Time_and_Position__time __Time_and_Position__instrument_fov_latitude __Time_and_Position__instrument_fov_longitude' - - # Case with NO groups present and single spaces. - assert _flatten_coordinate_attribute( - "time longitude latitude ozone_profile_pressure ozone_profile_altitude" - ) == "__time __longitude __latitude __ozone_profile_pressure __ozone_profile_altitude" - - -def test_coordinate_attribute_regrouping(): - # Case with groups present and double spaces. - assert regroup_coordinate_attribute( - '__Time_and_Position__time __Time_and_Position__instrument_fov_latitude __Time_and_Position__instrument_fov_longitude') == "Time_and_Position/time Time_and_Position/instrument_fov_latitude Time_and_Position/instrument_fov_longitude" - - # Case with NO groups present and single spaces. - assert regroup_coordinate_attribute( - "__time __longitude __latitude __ozone_profile_pressure __ozone_profile_altitude") == "time longitude latitude ozone_profile_pressure ozone_profile_altitude" diff --git a/tests/unit/test_dataset_and_group_handling.py b/tests/unit/test_dataset_and_group_handling.py new file mode 100644 index 0000000..89a51a1 --- /dev/null +++ b/tests/unit/test_dataset_and_group_handling.py @@ -0,0 +1,44 @@ +"""Tests for manipulating netCDF groups.""" + +# pylint: disable=C0116, C0301 + +from concatenator.attribute_handling import ( + _flatten_coordinate_attribute, + regroup_coordinate_attribute, +) + + +def test_coordinate_attribute_flattening(): + # Case with groups present and double spaces. + assert ( + _flatten_coordinate_attribute( + "Time_and_Position/time Time_and_Position/instrument_fov_latitude Time_and_Position/instrument_fov_longitude" + ) + == "__Time_and_Position__time __Time_and_Position__instrument_fov_latitude __Time_and_Position__instrument_fov_longitude" + ) + + # Case with NO groups present and single spaces. + assert ( + _flatten_coordinate_attribute( + "time longitude latitude ozone_profile_pressure ozone_profile_altitude" + ) + == "__time __longitude __latitude __ozone_profile_pressure __ozone_profile_altitude" + ) + + +def test_coordinate_attribute_regrouping(): + # Case with groups present and double spaces. + assert ( + regroup_coordinate_attribute( + "__Time_and_Position__time __Time_and_Position__instrument_fov_latitude __Time_and_Position__instrument_fov_longitude" + ) + == "Time_and_Position/time Time_and_Position/instrument_fov_latitude Time_and_Position/instrument_fov_longitude" + ) + + # Case with NO groups present and single spaces. + assert ( + regroup_coordinate_attribute( + "__time __longitude __latitude __ozone_profile_pressure __ozone_profile_altitude" + ) + == "time longitude latitude ozone_profile_pressure ozone_profile_altitude" + ) From 81663a035843444ded761826da3850b64d7924cc Mon Sep 17 00:00:00 2001 From: danielfromearth Date: Wed, 10 Apr 2024 13:30:24 -0400 Subject: [PATCH 19/38] move functions to dataset_and_group_handling.py module --- concatenator/dataset_and_group_handling.py | 38 ++++++++++++++++++++++ concatenator/stitchee.py | 32 ++---------------- 2 files changed, 40 insertions(+), 30 deletions(-) diff --git a/concatenator/dataset_and_group_handling.py b/concatenator/dataset_and_group_handling.py index afc6553..79ae054 100644 --- a/concatenator/dataset_and_group_handling.py +++ b/concatenator/dataset_and_group_handling.py @@ -5,6 +5,8 @@ between a group hierarchy and a flat structure """ +from __future__ import annotations + import re import netCDF4 as nc @@ -313,3 +315,39 @@ def _get_dimension_size(dataset: nc.Dataset, dim_name: str) -> int: if dim_size is None: print(f"Dimension {dim_name} not found when searching for sizes!") return dim_size + + +def validate_workable_files(files_to_concat, logger) -> tuple[list[str], int]: + """Remove files from list that are not open-able as netCDF or that are empty.""" + workable_files = [] + for file in files_to_concat: + try: + with nc.Dataset(file, "r") as dataset: + is_empty = _is_file_empty(dataset) + if is_empty is False: + workable_files.append(file) + except OSError: + logger.debug("Error opening <%s> as a netCDF dataset. Skipping.", file) + + number_of_workable_files = len(workable_files) + + return workable_files, number_of_workable_files + + +def _is_file_empty(parent_group: nc.Dataset | nc.Group) -> bool: + """Check if netCDF dataset is empty or not. + + Tests if all variable sizes in a dataset are size 0. + As soon as a variable array size not equal to 0 is detected, + the granule is considered non-empty. + + Returns + ------- + False if the dataset is considered non-empty; True otherwise (dataset is indeed empty). + """ + for var in parent_group.variables.values(): + if var.size != 0: + return False + for child_group in parent_group.groups.values(): + return _is_file_empty(child_group) + return True diff --git a/concatenator/stitchee.py b/concatenator/stitchee.py index 0926100..feb68de 100644 --- a/concatenator/stitchee.py +++ b/concatenator/stitchee.py @@ -16,6 +16,7 @@ from concatenator.dataset_and_group_handling import ( flatten_grouped_dataset, regroup_flattened_dataset, + validate_workable_files, ) from concatenator.dimension_cleanup import remove_duplicate_dims from concatenator.file_ops import add_label_to_path @@ -65,7 +66,7 @@ def stitchee( benchmark_log = {"flattening": 0.0, "concatenating": 0.0, "reconstructing_groups": 0.0} # Proceed to concatenate only files that are workable (can be opened and are not empty). - input_files, num_input_files = _validate_workable_files(files_to_concat, logger) + input_files, num_input_files = validate_workable_files(files_to_concat, logger) # Exit cleanly if no workable netCDF files found. if num_input_files < 1: @@ -196,32 +197,3 @@ def stitchee( raise err return output_file - - -def _validate_workable_files(files_to_concat, logger) -> tuple[list[str], int]: - """Remove files from list that are not open-able as netCDF or that are empty.""" - workable_files = [] - for file in files_to_concat: - try: - with nc.Dataset(file, "r") as dataset: - is_empty = _is_file_empty(dataset) - if is_empty is False: - workable_files.append(file) - except OSError: - logger.debug("Error opening <%s> as a netCDF dataset. Skipping.", file) - - number_of_workable_files = len(workable_files) - - return workable_files, number_of_workable_files - - -def _is_file_empty(parent_group: nc.Dataset | nc.Group) -> bool: - """ - Function to test if a all variable size in a dataset is 0 - """ - for var in parent_group.variables.values(): - if var.size != 0: - return False - for child_group in parent_group.groups.values(): - return _is_file_empty(child_group) - return True From ec3953545f5df36eaec06d79f4ffbdefe0fab525 Mon Sep 17 00:00:00 2001 From: danielfromearth Date: Wed, 10 Apr 2024 14:49:04 -0400 Subject: [PATCH 20/38] modify empty checking logic, and add unit test with toy and sample data file --- concatenator/dataset_and_group_handling.py | 22 ++++++++++- tests/conftest.py | 33 ++++++++++++++++ ..._S013G03_product_vertical_column_total.nc4 | Bin 0 -> 36967 bytes tests/unit/test_dataset_and_group_handling.py | 37 ++++++++++++++++++ 4 files changed, 90 insertions(+), 2 deletions(-) create mode 100644 tests/data/unit-test-data/singleton_null_variables-TEMPO_NO2_L2_V01_20240123T231358Z_S013G03_product_vertical_column_total.nc4 diff --git a/concatenator/dataset_and_group_handling.py b/concatenator/dataset_and_group_handling.py index 79ae054..4912f72 100644 --- a/concatenator/dataset_and_group_handling.py +++ b/concatenator/dataset_and_group_handling.py @@ -345,9 +345,27 @@ def _is_file_empty(parent_group: nc.Dataset | nc.Group) -> bool: ------- False if the dataset is considered non-empty; True otherwise (dataset is indeed empty). """ - for var in parent_group.variables.values(): + for var_name, var in parent_group.variables.items(): if var.size != 0: - return False + if "_FillValue" in var.ncattrs(): + fill_or_null = getattr(var, "_FillValue") + else: + fill_or_null = np.nan + + # This checks three ways that the variable's array might be considered empty. + # If none of the ways are true, + # a non-empty variable has been found and False is returned. + # If one of the ways is true, we consider the variable empty, + # and continue checking other variables. + empty_way_1 = False + if np.ma.isMaskedArray(var[:]): + empty_way_1 = var[:].mask.all() + empty_way_2 = np.all(var[:].data == fill_or_null) + empty_way_3 = np.all(np.isnan(var[:].data)) + + if not (empty_way_1 or empty_way_2 or empty_way_3): + return False # Found a non-empty variable. + for child_group in parent_group.groups.values(): return _is_file_empty(child_group) return True diff --git a/tests/conftest.py b/tests/conftest.py index c4bd1e3..232b69e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,6 +5,7 @@ from pathlib import Path import netCDF4 as nc +import numpy as np import pytest @@ -54,6 +55,38 @@ def temp_output_dir(tmpdir_factory) -> Path: return Path(tmpdir_factory.mktemp("tmp-")) +@pytest.fixture(scope="function") +def toy_empty_dataset(temp_toy_data_dir): + """Creates groups, dimensions, variables; and uses chosen step values in an open dataset""" + + filepath = temp_toy_data_dir / "test_empty_dataset.nc" + + f = nc.Dataset(filename=filepath, mode="w") + + grp1 = f.createGroup("Group1") + + # Root-level Dimensions/Variables + f.createDimension("step", 1) + f.createDimension("track", 1) + f.createVariable("step", "f4", ("step",), fill_value=False) + f.createVariable("track", "f4", ("track",), fill_value=False) + f.createVariable("var0", "f4", ("step", "track")) + + # + f["step"][:] = [np.nan] + f["track"][:] = [np.nan] + f["var0"][:] = [np.nan] + + # Group 1 Dimensions/Variables + grp1.createVariable("var1", "f8", ("step", "track")) + # + grp1["var1"][:] = [np.nan] + + f.close() + + return filepath + + def add_to_ds_3dims_3vars_4coords_1group_with_step_values(open_ds: nc.Dataset, step_values: list): """Creates groups, dimensions, variables; and uses chosen step values in an open dataset""" grp1 = open_ds.createGroup("Group1") diff --git a/tests/data/unit-test-data/singleton_null_variables-TEMPO_NO2_L2_V01_20240123T231358Z_S013G03_product_vertical_column_total.nc4 b/tests/data/unit-test-data/singleton_null_variables-TEMPO_NO2_L2_V01_20240123T231358Z_S013G03_product_vertical_column_total.nc4 new file mode 100644 index 0000000000000000000000000000000000000000..7c2abc993a194332a97709728b3015332fae59f6 GIT binary patch literal 36967 zcmeG_3v?q_bx&6I6R-1GlI(`?kG@X4S*^NI5ysG&Btd2!YemoD&M6d`g=hP6$u};oIg%Qy|dOLVHN>eIIE? zmi*Z@u;k1n>!*2h-+lMpci(;Q-Fs)=JH|&w&*z`aa#$PxIWT@kgRad)nJWTnMuHRruE>z{$3Gqf)HobhA(?{}dm=wRso*b)7~F zz^^UUS|#7enIGpF0!I&m!L_YasMRX9Y~3`fC7yZq+4$rUUe9arrg-)~uL%abgpHJ|*Bb zGW~Y>=_8cz^5iHh z_$V)b-fmjJy@j;%K-~9Tf}n1)nQTSS*hF-Cnw$e!C^6VGBZtrg+g6||8=Qsn6Z&jDGxGHu7f~Jvkg-!SIpL&k(g;F6;&W6z_I3UD^kely1_b}rjIcXje^6dH061eVQ z2@yQpym35a!|~r(OUMGRXk9I*l5r+4X-RtPy$`2i*>Lf;G!ik#s3J|N;Q z78Fw6FpXNNP&V??T&+@)<_e|>zw1T4Z1QR(y_}b*8kM4xqY~)WTm$9-QKH_c&FMJ< zAmIP1RGE`>vs9^97mS+Gw-3j9Q!_Q7MRiIaN4y>0md&o-Wa2B$mOE~FF2qEVDokR<3<2^KnwD7t11 zWn^B5d8Is8sg>yR2qZ)sR5R)@gX1d7Bi*3L(lsC; zSioE`q#Ve(QA1h7yU~JC%;QbknAdB0X<#&!-sh2^8mKH^fchJNZ@#9NQPR9pT+c0t zTaa-WRdtF7kQQo&l&sXug~91c!(3q0Nq~<|e3f!>Nz$b%0N`B6tB>n-bMOsD-9$LA zlmUuZ+jT(3+n)zY_+;uw!l3;gI<9h+vZ=!}D^~zD4E!n^Lxr42ssr~-acdclt0{;& z){z4xrPr3Eyl(1JqYlFss?!TKsifEI5)54v_=H2;R*TO{fmAoDI*@>k7Q-KYQ}lW0 zD6>{D7JA||E%eY<w4dL7#~2f3*CprO&s!4WSX8<_k+3=~VA^V8vg!vTi_4hI|#I2>>|;Bdg< zfWv`5EDnsO$EWD#x<^<5kbYr`{Y!2Rcp3~J^b*^8k@9S@yXLoD6DZa!$xsh~#UUFK z1^zy9<0sGtxb+p7<8}!;L?JXGx=E5->+crD4v8NdpGv>_t7)W280Ib}ueyrW^3Ehn zALQv~YS?t0#tjK%^D#6MTq6t(a`w^+It=OTQJBI@f2OJJK9B??`aKPb!!+}3k4lpnH&sGmm=->@rx7Qmz zp2XeLGs_p9xX8o>rdU{STDE)@%hD+f zTk$#Dn@Q9IE%rVl6@f~E5)o%5?nP9Uxc^@Geb)g>@=)Xpw=?Oy=LBLjI)o(kqGQ{A z3DWx27ZX3X$7f(}hxm-YxC}{QE^#Cf!GyH-l0!Tu=Wf0={+!q%KPMW4Wu5C~&VdJC zJNA}?neh?OWiVd0Lu>N`;n%W8_rjoO*A+W|d_ILb9ih&kPzzx%->zwIkiZ6{MgLyBaSP59e|?;Bdgqmb=H#;ue?BF&zKXCLIhvFL+LLT_?#QhY(gS_ut5JdkX z4fl4PMaa*o3Bd(cqZi-~HYInt$Tf63<@%nB^i%)v*DiScMTQptikG}Zj<7X|`|n)z z%!RZ<;mmfyF6)hy;?S8t@{Ep;9BgjtX_?Ev`rVs1F>GX!haw|orH!RRvfo~pl zu{7aQ8=D|r8t7tYI*)@@3&$XsorjyZ%5-?+0=vzM7(>;qr8~R?f!kUvk~iidCc2(2 zW8gUwMBgrffa*d%3$JF7?b6zsPROSpZ7;4LTTk(WZrI&Y&dpXD=1I5YOvMhVH zQ7>BHMi)0i!r&%eV1O*=I)ypF%#C|ouDwX)T3!j;bRtH@r_+oFC?fdJZd7cHv!`3Q zQXS-}c>8Smz#(N;;&=JCFnF4Q^{v%@(611m**<3|D5Tksndqaaq3A{8+Tf}g9|5k$;_fM3`hRz}AY2b+Q6^uavD zR^T;%8&7ZL=}jy>O5+?_k@h0kMWpJLU%Hb){$BsC{CWVGeCQ=?Ky){QIfA$BanlXX zZ@VT?tXYzw#@7R$Zf+00v;E&+-R^9^k8hD$&h}h!)AjAn_WNGXDs4d>c-vi{XU=xf z3bEY~sUWl6*?vcmJrPcJksd!7x{f>B{}Um=FWz*vzaNoWo$a?E!~u)5{ZY*DFw+Ee zwtowYKn9w4sIz?!WHNy)t7%gzl z)gCYduOs*qhXGz9#owqHJD9A|GKFBmWDVcJFn^2I`qO!J&5!+bG1p^A6q02%xVOF# z)bR^R`0^PRX`)_kus;byy#EU)Wr+93IvL_Ic+^Jn23z8h+t}~I5-)=BAj`Qk)6HAbWY zRl9NV>zv{k2N3CJ$3A)|H!kkLwJdyl9pkN@edbI;uEpj?&`wUsU4MP&Y2;e06hcX` z)mwf8_QC6MFZ2;YE`sFUllSc)J(vMiT?k3!Z-2C#9LJs58wfSk_B@)tfV_y5-b?7` zZJ+qzfj#6IME^QM!#y9n=#jnTjcA#KH;Q&c@@(Mft63l@`^*U`YEKQ42avE~_KMMU z_hcfZANODhLi!s>7LBKSI`7iwP)(p7|34^d2?8A{?o;&+zkD4EVpD(}5YFy-OCiB=#=xXFN`2_v} z8Ahhs_(c8}a}IFx`lSdAolj-aDm=ChBhMh%rTldsYe(6Hg*=Y6h2bOrkl1>QSOt{~ zV}F6PfV_gl;tdy2vJ2^e8~VALS$>0W(vb)F4IPbEfwc8G`8&i06kwF2O-zZh1#&QR#uRZu@2a5N%++;7kK~D7bU{8`g!~Zvrd3YVnB$D6`u=dMS|8Y;Q z=S}SK_MSZZaqfL{vrn?0&fk|22e|&Xj*4sFkBraqreKr6-X|&*Vjb~M@BfG6 zHi`ev^Pjqz?6An(?i+r6vrU(LdUEG2Hi5t5mM`9FQ$3fy<|}Wv4did1f8X7<0rQMJ z_)*)adm(c0Gqy4L#TR@J+s5)Il=>sKk#XHy_k7tlrgxLK{j*K8><+*8Uv0`Retg$= ziB+j+4@SRl(^=>D&0qX$9WCBhxm;k}U2F9mj z>BJQNq$kI$BA3(SsdNTUj5#@mSVeLbCbVi?xzAdC%f`ov4HozmJ*}qe2xwKaQdg|Z z->%(~NKVaU)}|mR=u|3^1l_Z8UugVs7xgf}^+P&3lATai)ejz_PZH_09bO4%5Yr-nTQcE4QboqM1Y#$eWBNlCe}W0|_vbva4$$kpILH zgq>KaIG-$-BwGItOJh^1iNj;5B&!FXb&_IETrQPIuP*Fh327*dJ)%;p0 z;E@IuIp_@s17XMs1R{YD&Cw(a4@8@tRM`5P&lk-f-T<~mpa*352hSCC-3+m`%hIr}tE1L9Si6F>;9XH6+2Fd4 zr}&kzi5XlBQ&L7JqGPsl-8-ZWtc$|yPqLMi&*7E)H`bc@N| zH4~o+ThklI-W{`T?(7P-mN~j>;qKFV*EIp`=*HsBodRt{2Yw>Uhb0ONmBaLlC3uUw zO^W<+w;pfqE3{P>(e1UanZ^y+`)EU|JfsEIP9d#x4HO=Xk0p}H-vcA<6|aZV4PmFE z4F$q$VP}(0!O^Zlzf6L=%KFq;oNs~FvR!OJb4fliK_BSXyfDG9w#T>B2Gw6E2NR@`#m|)X4@vz@eS>8vl2O7#a$qQ+c|$=ppad?F z(*>GU*6X@=zOv}UXX+q4 z9E4-`OgLQ6$or_-h5fbI1ARybY&KwZyzJSEwn88eaKp)pl(M2)uMbw2kZ3$p5l#rC zZS=xAyq4228&44gJ1jiJC$qDaW5Y*}!ln$?fP)eFDvxwjgP(r+s;i`2!yL}k8U`)K z>d3-@dsYtQO0^E{9`gG&Umf_7Tkyf@iaDcd)_v2ky;EUh6%P6br&6(r)XWGX-doQt z7$x0`bV09`D&?gPhm}~LO93zVd>kd)`(%@TaX10QaC^ZC)d!Yk--r^F!(ksfntWrj;sbBgigOKEWfb9;Qox>ka`maJ*B63L;C?Xm zr0dgGt*_nhc5?0WuUa2iqJpA*{*!KBVDxbpLPF9t$Bz!4&Z`G&#|UD0Ao~82TYd= z<^rr0;4}786b>V8B8YhFNSk)1$)F-K3Ai;_6e zd)aZ*u=dM0a_|D!dBV+JgyYtNFG}%cBRLz` zh@2$k0R2q92>oeK-SAax>fpb|Sb7T_7S9@b8V?{3`LZiGY1BkbHLBtK7pX>;U12r&nn{K zE)LA0E-Tf;T^yLhJXRj&;)q}lb6C+FSq?P)RUYQy2-6JkRzXeF!2xwMUzLJYgMUMV z4C<*0d6;_xA*i29@o?`3RKvYgG$){08nAC4j&LgX79POsmf6xeK1%rZ%4gwa)(&`d zb6wv?eE;3=h<3%Z4tgFqrvM*G>|=dfC|4V1cCJt~nBuvG_bO9!YxAdjBnG{*H(fxj z)tByulNG39ELLDj$-)l3+$b8^LY_*W$42Mc;)0q=o-2J+@=aR?#VT>E&2=Y)7pXkf zMfEGvOeQu6?lKwKM-_iwU&>bIvP-a6UsdDNfK;fUxvmB>-VAdMtr??UaWqj*Hs^#Y ze6Mq^QY%pqF#{#arctcJw>kZ?(R|0YUTM^FZ05t*Y(zD4Xz);CT3T4Dg0Z3s71dy7 zE|m27Lb(}U3Se;A$3N!We&=IHnx7!eCSy}Dxf?aJz@@|sAXo*T?1oiO85Y<1xnVyn zMzq1av1k-4)e=nIo`w8eU|8`gs#nFgtZ9$J^qS38O0ayO4IALt^x0Y6KRcV7izxD3 za5fAc%bcO+=W_b2o`){cWpINVdZcNj;{BL4p@|d|1_dKsAP-IP0ic{wuS27=MWl-Y z;xvVT0LG_q3z&5ZM`eg%y`APZM1oyKC83_v%h__HM8#mGQW(PDlpi70Ybl70!|mSYN_<3iES{Q< zjSj)MG;;LY0XtY#mMMaLUN)PnF{O&Z;n*qhsp+vHzorH|s&SlF*72#_&+YymOe@=% T^l_0z?T8D{mE;elw(a_VFxW$E literal 0 HcmV?d00001 diff --git a/tests/unit/test_dataset_and_group_handling.py b/tests/unit/test_dataset_and_group_handling.py index 89a51a1..9b1aaf6 100644 --- a/tests/unit/test_dataset_and_group_handling.py +++ b/tests/unit/test_dataset_and_group_handling.py @@ -2,10 +2,47 @@ # pylint: disable=C0116, C0301 +import netCDF4 as nc + from concatenator.attribute_handling import ( _flatten_coordinate_attribute, regroup_coordinate_attribute, ) +from concatenator.dataset_and_group_handling import _is_file_empty + +from .. import data_for_tests_dir + + +def test_dataset_with_singleton_null_values_is_identified_as_empty(): + """Ensure that a dataset with only null arrays with 1-length dimensions is identified as empty.""" + singleton_null_values_file = ( + data_for_tests_dir + / "unit-test-data" + / "singleton_null_variables-TEMPO_NO2_L2_V01_20240123T231358Z_S013G03_product_vertical_column_total.nc4" + ) + with nc.Dataset(singleton_null_values_file) as ds: + assert _is_file_empty(ds) + + +def test_toy_dataset_with_singleton_null_values_is_identified_as_empty(toy_empty_dataset): + """Ensure that a dataset with only null arrays with 1-length dimensions is identified as empty.""" + with nc.Dataset(toy_empty_dataset) as ds: + assert _is_file_empty(ds) + + +def test_dataset_with_values_is_identified_as_not_empty(): + """Ensure that a dataset with only null arrays with 1-length dimensions is identified as empty.""" + file_with_values = ( + data_for_tests_dir + / "tempo" + / "no2_subsetted" + / + # "TEMPO_NO2_L2_V01_20231206T140555Z_S003G05_SUBSETTED.nc" + "TEMPO_NO2_L2_V01_20231206T132550Z_S002G05_SUBSETTED.nc" + # "TEMPO_NO2_L2_V01_20231206T133227Z_S002G06_SUBSETTED.nc" + ) + with nc.Dataset(file_with_values) as ds: + assert _is_file_empty(ds) is False def test_coordinate_attribute_flattening(): From 70db04920352f67e758271b201bce16f7c602ce6 Mon Sep 17 00:00:00 2001 From: danielfromearth Date: Wed, 10 Apr 2024 14:57:01 -0400 Subject: [PATCH 21/38] update docstring for file empty check --- concatenator/dataset_and_group_handling.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/concatenator/dataset_and_group_handling.py b/concatenator/dataset_and_group_handling.py index 4912f72..fe05308 100644 --- a/concatenator/dataset_and_group_handling.py +++ b/concatenator/dataset_and_group_handling.py @@ -337,9 +337,9 @@ def validate_workable_files(files_to_concat, logger) -> tuple[list[str], int]: def _is_file_empty(parent_group: nc.Dataset | nc.Group) -> bool: """Check if netCDF dataset is empty or not. - Tests if all variable sizes in a dataset are size 0. - As soon as a variable array size not equal to 0 is detected, - the granule is considered non-empty. + Tests if all variable arrays are empty. + As soon as a variable is detected with both (i) an array size not equal to zero and + (ii) not all null/fill values, then the granule is considered non-empty. Returns ------- From c2d4dd2815934cdc75c76d9c3e243d25790779fb Mon Sep 17 00:00:00 2001 From: danielfromearth Date: Wed, 10 Apr 2024 14:57:45 -0400 Subject: [PATCH 22/38] fix path to test module --- .github/workflows/run_tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index 596e34b..52366f7 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -39,7 +39,7 @@ jobs: poetry run ruff concatenator - name: Run tests and collect coverage - run: poetry run pytest --cov=concatenator tests/test_dataset_and_group_handling.py + run: poetry run pytest --cov=concatenator tests/unit/test_dataset_and_group_handling.py # TODO: expand tests to include full concatenation runs, i.e., not only test_dataset_and_group_handling.py - name: Upload coverage reports to Codecov From d76d6f24bc2db841e6f77de6ec1e3f2c3431c57e Mon Sep 17 00:00:00 2001 From: danielfromearth Date: Wed, 10 Apr 2024 15:03:16 -0400 Subject: [PATCH 23/38] change not-empty function test to use toy data created at runtime --- tests/unit/test_dataset_and_group_handling.py | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/tests/unit/test_dataset_and_group_handling.py b/tests/unit/test_dataset_and_group_handling.py index 9b1aaf6..03678a9 100644 --- a/tests/unit/test_dataset_and_group_handling.py +++ b/tests/unit/test_dataset_and_group_handling.py @@ -30,18 +30,9 @@ def test_toy_dataset_with_singleton_null_values_is_identified_as_empty(toy_empty assert _is_file_empty(ds) -def test_dataset_with_values_is_identified_as_not_empty(): - """Ensure that a dataset with only null arrays with 1-length dimensions is identified as empty.""" - file_with_values = ( - data_for_tests_dir - / "tempo" - / "no2_subsetted" - / - # "TEMPO_NO2_L2_V01_20231206T140555Z_S003G05_SUBSETTED.nc" - "TEMPO_NO2_L2_V01_20231206T132550Z_S002G05_SUBSETTED.nc" - # "TEMPO_NO2_L2_V01_20231206T133227Z_S002G06_SUBSETTED.nc" - ) - with nc.Dataset(file_with_values) as ds: +def test_dataset_with_values_is_identified_as_not_empty(ds_3dims_3vars_4coords_1group_part1): + """Ensure that a dataset with non-null arrays is identified as NOT empty.""" + with nc.Dataset(ds_3dims_3vars_4coords_1group_part1) as ds: assert _is_file_empty(ds) is False From 9d802059391b9b7581219bfa9f37bc1202e8a5ee Mon Sep 17 00:00:00 2001 From: danielfromearth Date: Wed, 10 Apr 2024 15:04:32 -0400 Subject: [PATCH 24/38] update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9d0ecf0..98224ac 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -23,6 +23,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - [Issue #44](https://github.com/danielfromearth/stitchee/issues/44): Concatenation dimension CLI argument is required but isn't listed as such in the help message - [Issue #81](https://github.com/danielfromearth/stitchee/issues/81): Remove `nco` related code - [Pull #129](https://github.com/danielfromearth/stitchee/pull/129): Sort according to extend dimension + - [Pull #152](https://github.com/danielfromearth/stitchee/pull/152): Consider empty a netCDF with only singleton null-values ### Deprecated ### Removed ### Fixed From c3fec97a650de6209beb2ed060ddb507039b8bc9 Mon Sep 17 00:00:00 2001 From: danielfromearth Date: Mon, 15 Apr 2024 13:48:58 -0400 Subject: [PATCH 25/38] add version bumping and tag steps --- .github/workflows/build-pipeline.yml | 152 +++++++++++++++++++++++++ .github/workflows/push.yml | 164 --------------------------- 2 files changed, 152 insertions(+), 164 deletions(-) create mode 100644 .github/workflows/build-pipeline.yml delete mode 100644 .github/workflows/push.yml diff --git a/.github/workflows/build-pipeline.yml b/.github/workflows/build-pipeline.yml new file mode 100644 index 0000000..3cd9548 --- /dev/null +++ b/.github/workflows/build-pipeline.yml @@ -0,0 +1,152 @@ +# This is the main build pipeline that verifies and publishes the software +name: Build + +# Controls when the workflow will run +on: + # Triggers the workflow on push events + push: + branches: [ develop, release/**, main, feature/**, issue/**, issues/** ] + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +env: + POETRY_VERSION: "1.3.2" + PYTHON_VERSION: "3.10" + REGISTRY: ghcr.io + IMAGE_NAME: ${{ github.repository }} + +jobs: + # The first job in the workflow confirms that the software's own tests pass. + run_tests: + uses: ./.github/workflows/run_tests.yml + + # Second job in the workflow verifies the software + build: + needs: run_tests + runs-on: ubuntu-latest + steps: + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - name: Retrieve repository + uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: ${{ env.PYTHON_VERSION }} + - name: Install Poetry + uses: abatilo/actions-poetry@v3.0.0 + with: + poetry-version: ${{ env.POETRY_VERSION }} + - name: Get version + id: get-version + run: | + echo "current_version=$(poetry version | awk '{print $2}')" >> $GITHUB_OUTPUT + echo "pyproject_name=$(poetry version | awk '{print $1}')" >> $GITHUB_ENV + + # Bumps the version, based on which branch is the target. + - name: Bump pre-alpha version + # If triggered by push to a feature branch + if: | + ${{ startsWith(github.ref, 'refs/heads/issue') }} || + ${{ startsWith(github.ref, 'refs/heads/dependabot/') }} || + ${{ startsWith(github.ref, 'refs/heads/feature/') }} + run: | + new_ver="${{ steps.get-version.outputs.current_version }}+$(git rev-parse --short ${GITHUB_SHA})" + poetry version $new_ver + echo "software_version=$(poetry version | awk '{print $2}')" >> $GITHUB_ENV + - name: Bump alpha version + # If triggered by push to the develop branch + if: ${{ github.ref == 'refs/heads/develop' }} + run: | + poetry version prerelease + echo "software_version=$(poetry version | awk '{print $2}')" >> $GITHUB_ENV + echo "venue=sit" >> $GITHUB_ENV + - name: Bump rc version + # If triggered by push to a release branch + if: ${{ startsWith(github.ref, 'refs/heads/release/') }} + env: + # True if the version already has a 'rc' pre-release identifier + BUMP_RC: ${{ contains(steps.get-version.outputs.current_version, 'rc') }} + run: | + if [ "$BUMP_RC" = true ]; then + poetry version prerelease + else + poetry version ${GITHUB_REF#refs/heads/release/}rc1 + fi + echo "software_version=$(poetry version | awk '{print $2}')" >> $GITHUB_ENV + echo "venue=uat" >> $GITHUB_ENV + - name: Release version + # If triggered by push to the main branch + if: ${{ startsWith(github.ref, 'refs/heads/main') }} + env: + CURRENT_VERSION: ${{ steps.get-version.outputs.current_version }} + # Remove rc* from the end of version string + # The ${string%%substring} syntax below deletes the longest match of $substring from back of $string. + run: | + poetry version ${CURRENT_VERSION%%rc*} + echo "software_version=$(poetry version | awk '{print $2}')" >> $GITHUB_ENV + echo "venue=ops" >> $GITHUB_ENV + + - name: Commit Version Bump + # If building the `develop`, a `release` branch, or `main`, + # then we commit the version bump back to the repo. + if: | + github.ref == 'refs/heads/develop' || + github.ref == 'refs/heads/main' || + startsWith(github.ref, 'refs/heads/release') + run: | + git config --global user.name 'stitchee bot' + git config --global user.email 'stitchee@noreply.github.com' + git commit -am "/version ${{ env.software_version }}" + git push + + # Builds and pushes a Docker image + - name: Log in to the Container registry + if: ${{ !startsWith(github.ref, 'refs/heads/feature/') }} + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract metadata (tags, labels) for Docker + if: ${{ !startsWith(github.ref, 'refs/heads/feature/') }} + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + tags: | + type=raw,pattern={{version}},value=${{ env.software_version }} + type=raw,value=${{ env.venue }} + + # - name: Wait for package + ## if: ${{ !startsWith(github.ref, 'refs/heads/feature') }} + # if: ${{ startsWith(github.ref, 'refs/heads/feature/') }} + # run: | + # pip install tenacity + # ${GITHUB_WORKSPACE}/.github/workflows/wait-for-pypi.py ${{env.pyproject_name}}[harmony]==${{ env.software_version }} + + - name: Build and push Docker image + if: ${{ !startsWith(github.ref, 'refs/heads/feature/') }} + id: docker-push + uses: docker/build-push-action@v5 + with: + context: . + file: Dockerfile + build-args: | + SOURCE=${{env.pyproject_name}}[harmony]==${{ env.software_version }} + push: true + pull: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + + - name: Push Tag + if: | + github.ref == 'refs/heads/develop' || + github.ref == 'refs/heads/main' || + startsWith(github.ref, 'refs/heads/release') + run: | + git config user.name "${GITHUB_ACTOR}" + git config user.email "${GITHUB_ACTOR}@users.noreply.github.com" + git tag -a "${{ env.software_version }}" -m "Version ${{ env.software_version }}" + git push origin "${{ env.software_version }}" diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml deleted file mode 100644 index 7539bb6..0000000 --- a/.github/workflows/push.yml +++ /dev/null @@ -1,164 +0,0 @@ -name: Lint and Test - -# Controls when the workflow will run -on: - # Triggers the workflow on push events - push: - branches: [ develop, release/**, main, feature/**, issue/**, issues/** ] - - # Allows you to run this workflow manually from the Actions tab - workflow_dispatch: - -env: - POETRY_VERSION: "1.3.2" - PYTHON_VERSION: "3.10" - REGISTRY: ghcr.io - IMAGE_NAME: ${{ github.repository }} - -jobs: - run_tests: - uses: ./.github/workflows/run_tests.yml - - bump_version: - needs: run_tests - runs-on: ubuntu-20.04 - - steps: - - name: Retrieve repository - uses: actions/checkout@v4 - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: ${{ env.PYTHON_VERSION }} - - - name: Install Poetry - uses: abatilo/actions-poetry@v3.0.0 - with: - poetry-version: ${{ env.POETRY_VERSION }} - - - name: Get version - id: get-version - run: | - echo "current_version=$(poetry version | awk '{print $2}')" >> $GITHUB_OUTPUT - echo "pyproject_name=$(poetry version | awk '{print $1}')" >> $GITHUB_ENV - - - name: Bump pre-alpha version - # If triggered by push to a feature branch - if: | - ${{ startsWith(github.ref, 'refs/heads/issue') }} || - ${{ startsWith(github.ref, 'refs/heads/dependabot/') }} || - ${{ startsWith(github.ref, 'refs/heads/feature/') }} - run: | - new_ver="${{ steps.get-version.outputs.current_version }}+$(git rev-parse --short ${GITHUB_SHA})" - poetry version $new_ver - echo "software_version=$(poetry version | awk '{print $2}')" >> $GITHUB_ENV - - - name: Bump alpha version - # If triggered by push to the develop branch - if: ${{ github.ref == 'refs/heads/develop' }} - run: | - poetry version prerelease - echo "software_version=$(poetry version | awk '{print $2}')" >> $GITHUB_ENV - echo "venue=sit" >> $GITHUB_ENV - - - name: Bump rc version - # If triggered by push to a release branch - if: ${{ startsWith(github.ref, 'refs/heads/release/') }} - env: - # True if the version already has a 'rc' pre-release identifier - BUMP_RC: ${{ contains(steps.get-version.outputs.current_version, 'rc') }} - run: | - if [ "$BUMP_RC" = true ]; then - poetry version prerelease - else - poetry version ${GITHUB_REF#refs/heads/release/}rc1 - fi - echo "software_version=$(poetry version | awk '{print $2}')" >> $GITHUB_ENV - echo "venue=uat" >> $GITHUB_ENV - - - name: Release version - # If triggered by push to the main branch - if: ${{ startsWith(github.ref, 'refs/heads/main') }} - env: - CURRENT_VERSION: ${{ steps.get-version.outputs.current_version }} - # True if the version already has a 'rc' pre-release identifier - BUMP_RC: ${{ contains(steps.get-version.outputs.current_version, 'rc') }} - # True if the version already has an 'alpha' pre-release identifier - BUMP_A: ${{ contains(steps.get-version.outputs.current_version, 'a') }} - # True if the version already has a 'beta' pre-release identifier - BUMP_B: ${{ contains(steps.get-version.outputs.current_version, 'b') }} - # Remove rc* from the end of version string - # The ${string%%substring} syntax below deletes the longest match of $substring from back of $string. - run: | - if [ "$BUMP_RC" = true ]; then - poetry version ${CURRENT_VERSION%%rc*} - elif [ "$BUMP_B" = true ]; then - poetry version ${CURRENT_VERSION%%b*} - elif [ "$BUMP_A" = true ]; then - poetry version ${CURRENT_VERSION%%a*} - fi - echo "software_version=$(poetry version | awk '{print $2}')" >> $GITHUB_ENV - echo "venue=ops" >> $GITHUB_ENV - - - name: Log in to the Container registry - if: ${{ !startsWith(github.ref, 'refs/heads/feature/') }} - uses: docker/login-action@v3 - with: - registry: ${{ env.REGISTRY }} - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Extract metadata (tags, labels) for Docker - if: ${{ !startsWith(github.ref, 'refs/heads/feature/') }} - id: meta - uses: docker/metadata-action@v5 - with: - images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} - tags: | - type=raw,pattern={{version}},value=${{ env.software_version }} - type=raw,value=${{ env.venue }} - -# - name: Wait for package -## if: ${{ !startsWith(github.ref, 'refs/heads/feature') }} -# if: ${{ startsWith(github.ref, 'refs/heads/feature/') }} -# run: | -# pip install tenacity -# ${GITHUB_WORKSPACE}/.github/workflows/wait-for-pypi.py ${{env.pyproject_name}}[harmony]==${{ env.software_version }} - - - name: Build and push Docker image - if: ${{ !startsWith(github.ref, 'refs/heads/feature/') }} - id: docker-push - uses: docker/build-push-action@v5 - with: - context: . - file: Dockerfile - build-args: | - SOURCE=${{env.pyproject_name}}[harmony]==${{ env.software_version }} - push: true - pull: true - tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} - -# - name: Commit Version Bump -# # If building develop, a release branch, or main then we commit the version bump back to the repo -# if: | -# github.ref == 'refs/heads/develop' || -# github.ref == 'refs/heads/main' || -# startsWith(github.ref, 'refs/heads/release') -# run: | -# git config --global user.name 'stitchee bot' -# git config --global user.email 'stitchee@noreply.github.com' -# git commit -am "/version ${{ env.software_version }}" -# git push -# -# - name: Push Tag -# if: | -# github.ref == 'refs/heads/develop' || -# github.ref == 'refs/heads/main' || -# startsWith(github.ref, 'refs/heads/release') -# run: | -# git config user.name "${GITHUB_ACTOR}" -# git config user.email "${GITHUB_ACTOR}@users.noreply.github.com" -# git tag -a "${{ env.software_version }}" -m "Version ${{ env.software_version }}" -# git push origin "${{ env.software_version }}" From d5b6554d0388793e3b54d521e678a84428c0203e Mon Sep 17 00:00:00 2001 From: danielfromearth Date: Mon, 15 Apr 2024 13:49:24 -0400 Subject: [PATCH 26/38] add snyk scanning steps to build pipeline --- .github/workflows/build-pipeline.yml | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/.github/workflows/build-pipeline.yml b/.github/workflows/build-pipeline.yml index 3cd9548..c17d298 100644 --- a/.github/workflows/build-pipeline.yml +++ b/.github/workflows/build-pipeline.yml @@ -87,6 +87,27 @@ jobs: echo "software_version=$(poetry version | awk '{print $2}')" >> $GITHUB_ENV echo "venue=ops" >> $GITHUB_ENV + - name: Run Snyk as a blocking step + uses: snyk/actions/python-3.10@master + env: + SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} + with: + command: test + args: > + --org=${{ secrets.SNYK_ORG_ID }} + --project-name=${{ github.repository }} + --severity-threshold=high + --fail-on=all + - name: Run Snyk on Python + uses: snyk/actions/python-3.10@master + env: + SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} + with: + command: monitor + args: > + --org=${{ secrets.SNYK_ORG_ID }} + --project-name=${{ github.repository }} + - name: Commit Version Bump # If building the `develop`, a `release` branch, or `main`, # then we commit the version bump back to the repo. From e1074367383a7114a0ecdbf9531e3c546fa4cd5e Mon Sep 17 00:00:00 2001 From: danielfromearth Date: Mon, 15 Apr 2024 13:56:26 -0400 Subject: [PATCH 27/38] build docker image on main --- .github/workflows/build-pipeline.yml | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/.github/workflows/build-pipeline.yml b/.github/workflows/build-pipeline.yml index c17d298..f717e3c 100644 --- a/.github/workflows/build-pipeline.yml +++ b/.github/workflows/build-pipeline.yml @@ -123,15 +123,14 @@ jobs: # Builds and pushes a Docker image - name: Log in to the Container registry - if: ${{ !startsWith(github.ref, 'refs/heads/feature/') }} + if: ${{ !startsWith(github.ref, 'refs/heads/main/') }} uses: docker/login-action@v3 with: registry: ${{ env.REGISTRY }} username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - - name: Extract metadata (tags, labels) for Docker - if: ${{ !startsWith(github.ref, 'refs/heads/feature/') }} + if: ${{ !startsWith(github.ref, 'refs/heads/main/') }} id: meta uses: docker/metadata-action@v5 with: @@ -139,16 +138,8 @@ jobs: tags: | type=raw,pattern={{version}},value=${{ env.software_version }} type=raw,value=${{ env.venue }} - - # - name: Wait for package - ## if: ${{ !startsWith(github.ref, 'refs/heads/feature') }} - # if: ${{ startsWith(github.ref, 'refs/heads/feature/') }} - # run: | - # pip install tenacity - # ${GITHUB_WORKSPACE}/.github/workflows/wait-for-pypi.py ${{env.pyproject_name}}[harmony]==${{ env.software_version }} - - name: Build and push Docker image - if: ${{ !startsWith(github.ref, 'refs/heads/feature/') }} + if: ${{ !startsWith(github.ref, 'refs/heads/main/') }} id: docker-push uses: docker/build-push-action@v5 with: From ef6a3a4401eb500c2c0af23e56c413b24b3934c5 Mon Sep 17 00:00:00 2001 From: danielfromearth Date: Mon, 15 Apr 2024 14:05:13 -0400 Subject: [PATCH 28/38] update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 98224ac..ac8dfc3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -24,6 +24,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - [Issue #81](https://github.com/danielfromearth/stitchee/issues/81): Remove `nco` related code - [Pull #129](https://github.com/danielfromearth/stitchee/pull/129): Sort according to extend dimension - [Pull #152](https://github.com/danielfromearth/stitchee/pull/152): Consider empty a netCDF with only singleton null-values + - [Pull #157](https://github.com/danielfromearth/stitchee/pull/157): Update CI pipeline ### Deprecated ### Removed ### Fixed From 5b42d4a3e5470cec54e646d359b31650555164ac Mon Sep 17 00:00:00 2001 From: danielfromearth Date: Mon, 15 Apr 2024 14:06:54 -0400 Subject: [PATCH 29/38] add release-created.yml --- .github/workflows/release-created.yml | 38 +++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 .github/workflows/release-created.yml diff --git a/.github/workflows/release-created.yml b/.github/workflows/release-created.yml new file mode 100644 index 0000000..226b347 --- /dev/null +++ b/.github/workflows/release-created.yml @@ -0,0 +1,38 @@ +name: Release Branch Created + +# Run whenever a ref is created https://docs.github.com/en/actions/reference/events-that-trigger-workflows#create +on: + create + +jobs: + # First job in the workflow builds and verifies the software artifacts + bump: + name: Bump minor version on develop + # The type of runner that the job will run on + runs-on: ubuntu-latest + # Only run if ref created was a release branch + if: + ${{ startsWith(github.ref, 'refs/heads/release/') }} + steps: + # Checks-out the develop branch + - uses: actions/checkout@v4 + with: + ref: 'refs/heads/develop' + - uses: actions/setup-python@v5 + with: + python-version: 3.10 + - name: Install Poetry + uses: abatilo/actions-poetry@v3.0.0 + with: + poetry-version: 1.3.2 + - name: Bump minor version + run: | + poetry version ${GITHUB_REF#refs/heads/release/} + poetry version preminor + echo "software_version=$(poetry version | awk '{print $2}')" >> $GITHUB_ENV + - name: Commit Version Bump + run: | + git config --global user.name 'stitchee bot' + git config --global user.email 'stitchee@noreply.github.com' + git commit -am "/version ${{ env.software_version }}" + git push From 6fea794512212524aac2f813ddf282bf9e113f45 Mon Sep 17 00:00:00 2001 From: stitchee bot Date: Mon, 15 Apr 2024 18:12:28 +0000 Subject: [PATCH 30/38] /version 1.0.1a0 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 8af8b24..2d2e770 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "stitchee" -version = "1.0.0" +version = "1.0.1a0" description = "NetCDF4 Along-existing-dimension Concatenation Service" authors = ["Daniel Kaufman "] readme = "README.md" From d92531b34ea462880b470915299c6afca73fd655 Mon Sep 17 00:00:00 2001 From: danielfromearth Date: Mon, 15 Apr 2024 14:24:26 -0400 Subject: [PATCH 31/38] add pypi publishing steps to build-pipeline.yml --- .github/workflows/build-pipeline.yml | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/.github/workflows/build-pipeline.yml b/.github/workflows/build-pipeline.yml index f717e3c..e996ea3 100644 --- a/.github/workflows/build-pipeline.yml +++ b/.github/workflows/build-pipeline.yml @@ -121,6 +121,32 @@ jobs: git commit -am "/version ${{ env.software_version }}" git push + # Builds and pushes the package to the Python Package Index (PyPI) + - name: Build Python Artifact + run: | + poetry build + - uses: actions/upload-artifact@v4 + with: + name: python-artifact + path: dist/* + - name: Publish to test.pypi.org + id: pypi-test-publish + if: | + github.ref == 'refs/heads/develop' || + startsWith(github.ref, 'refs/heads/release') + env: + POETRY_PYPI_TOKEN_TESTPYPI: ${{secrets.PYPI_TOKEN_TESTPYPI}} + run: | + poetry config repositories.testpypi https://test.pypi.org/legacy/ + poetry publish -r testpypi + - name: Publish to pypi.org + if: ${{ github.ref == 'refs/heads/main' }} + id: pypi-publish + env: + POETRY_PYPI_TOKEN_PYPI: ${{secrets.PYPI_TOKEN_PYPI}} + run: | + poetry publish + # Builds and pushes a Docker image - name: Log in to the Container registry if: ${{ !startsWith(github.ref, 'refs/heads/main/') }} From 27568fa1313aa271d2b442cf957340ff22f7679b Mon Sep 17 00:00:00 2001 From: danielfromearth Date: Mon, 15 Apr 2024 14:37:22 -0400 Subject: [PATCH 32/38] update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index ac8dfc3..cbf20bd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,6 +25,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - [Pull #129](https://github.com/danielfromearth/stitchee/pull/129): Sort according to extend dimension - [Pull #152](https://github.com/danielfromearth/stitchee/pull/152): Consider empty a netCDF with only singleton null-values - [Pull #157](https://github.com/danielfromearth/stitchee/pull/157): Update CI pipeline + - [Pull #158](https://github.com/danielfromearth/stitchee/pull/158): Add pypi publishing steps to CI pipeline ### Deprecated ### Removed ### Fixed From 565b9ca0a44a6c5efe2b528b9a7b81d4a726cbd6 Mon Sep 17 00:00:00 2001 From: stitchee bot Date: Mon, 15 Apr 2024 18:43:41 +0000 Subject: [PATCH 33/38] /version 1.0.1a1 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 2d2e770..fb8223c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "stitchee" -version = "1.0.1a0" +version = "1.0.1a1" description = "NetCDF4 Along-existing-dimension Concatenation Service" authors = ["Daniel Kaufman "] readme = "README.md" From 5530ef29a58fb16d3c8146e4ac71671734ff41b5 Mon Sep 17 00:00:00 2001 From: stitchee bot Date: Mon, 15 Apr 2024 18:48:21 +0000 Subject: [PATCH 34/38] /version 1.0.0rc1 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index fb8223c..cf754df 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "stitchee" -version = "1.0.1a1" +version = "1.0.0rc1" description = "NetCDF4 Along-existing-dimension Concatenation Service" authors = ["Daniel Kaufman "] readme = "README.md" From a68101d46cca9ea4ff98d3a71da03edca6cce1ba Mon Sep 17 00:00:00 2001 From: danielfromearth Date: Mon, 15 Apr 2024 14:52:32 -0400 Subject: [PATCH 35/38] update version header in CHANGELOG.md --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index cbf20bd..55104da 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,7 +4,7 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## [Unreleased] +## [1.0.0] ### Added - [Pull #1](https://github.com/danielfromearth/stitchee/pull/1): An initial GitHub Actions workflow From 34dab412f1fe84cf94c5650b45d7f9edd060a9b8 Mon Sep 17 00:00:00 2001 From: stitchee bot Date: Mon, 15 Apr 2024 18:54:26 +0000 Subject: [PATCH 36/38] /version 1.0.0rc2 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index cf754df..ad34ef4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "stitchee" -version = "1.0.0rc1" +version = "1.0.0rc2" description = "NetCDF4 Along-existing-dimension Concatenation Service" authors = ["Daniel Kaufman "] readme = "README.md" From 180e69846e3222fc312c97b28be251b10637ef1d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Apr 2024 22:27:21 +0000 Subject: [PATCH 37/38] Bump ruff from 0.3.5 to 0.3.7 Bumps [ruff](https://github.com/astral-sh/ruff) from 0.3.5 to 0.3.7. - [Release notes](https://github.com/astral-sh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/CHANGELOG.md) - [Commits](https://github.com/astral-sh/ruff/compare/v0.3.5...v0.3.7) --- updated-dependencies: - dependency-name: ruff dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 38 +++++++++++++++++++------------------- pyproject.toml | 2 +- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/poetry.lock b/poetry.lock index eadf88b..b3d225a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1075,28 +1075,28 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "ruff" -version = "0.3.5" +version = "0.3.7" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.3.5-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:aef5bd3b89e657007e1be6b16553c8813b221ff6d92c7526b7e0227450981eac"}, - {file = "ruff-0.3.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:89b1e92b3bd9fca249153a97d23f29bed3992cff414b222fcd361d763fc53f12"}, - {file = "ruff-0.3.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e55771559c89272c3ebab23326dc23e7f813e492052391fe7950c1a5a139d89"}, - {file = "ruff-0.3.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dabc62195bf54b8a7876add6e789caae0268f34582333cda340497c886111c39"}, - {file = "ruff-0.3.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a05f3793ba25f194f395578579c546ca5d83e0195f992edc32e5907d142bfa3"}, - {file = "ruff-0.3.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:dfd3504e881082959b4160ab02f7a205f0fadc0a9619cc481982b6837b2fd4c0"}, - {file = "ruff-0.3.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87258e0d4b04046cf1d6cc1c56fadbf7a880cc3de1f7294938e923234cf9e498"}, - {file = "ruff-0.3.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:712e71283fc7d9f95047ed5f793bc019b0b0a29849b14664a60fd66c23b96da1"}, - {file = "ruff-0.3.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a532a90b4a18d3f722c124c513ffb5e5eaff0cc4f6d3aa4bda38e691b8600c9f"}, - {file = "ruff-0.3.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:122de171a147c76ada00f76df533b54676f6e321e61bd8656ae54be326c10296"}, - {file = "ruff-0.3.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d80a6b18a6c3b6ed25b71b05eba183f37d9bc8b16ace9e3d700997f00b74660b"}, - {file = "ruff-0.3.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a7b6e63194c68bca8e71f81de30cfa6f58ff70393cf45aab4c20f158227d5936"}, - {file = "ruff-0.3.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a759d33a20c72f2dfa54dae6e85e1225b8e302e8ac655773aff22e542a300985"}, - {file = "ruff-0.3.5-py3-none-win32.whl", hash = "sha256:9d8605aa990045517c911726d21293ef4baa64f87265896e491a05461cae078d"}, - {file = "ruff-0.3.5-py3-none-win_amd64.whl", hash = "sha256:dc56bb16a63c1303bd47563c60482a1512721053d93231cf7e9e1c6954395a0e"}, - {file = "ruff-0.3.5-py3-none-win_arm64.whl", hash = "sha256:faeeae9905446b975dcf6d4499dc93439b131f1443ee264055c5716dd947af55"}, - {file = "ruff-0.3.5.tar.gz", hash = "sha256:a067daaeb1dc2baf9b82a32dae67d154d95212080c80435eb052d95da647763d"}, + {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e8377cccb2f07abd25e84fc5b2cbe48eeb0fea9f1719cad7caedb061d70e5ce"}, + {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:15a4d1cc1e64e556fa0d67bfd388fed416b7f3b26d5d1c3e7d192c897e39ba4b"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d28bdf3d7dc71dd46929fafeec98ba89b7c3550c3f0978e36389b5631b793663"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:379b67d4f49774ba679593b232dcd90d9e10f04d96e3c8ce4a28037ae473f7bb"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c060aea8ad5ef21cdfbbe05475ab5104ce7827b639a78dd55383a6e9895b7c51"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:ebf8f615dde968272d70502c083ebf963b6781aacd3079081e03b32adfe4d58a"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d48098bd8f5c38897b03604f5428901b65e3c97d40b3952e38637b5404b739a2"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da8a4fda219bf9024692b1bc68c9cff4b80507879ada8769dc7e985755d662ea"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c44e0149f1d8b48c4d5c33d88c677a4aa22fd09b1683d6a7ff55b816b5d074f"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3050ec0af72b709a62ecc2aca941b9cd479a7bf2b36cc4562f0033d688e44fa1"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a29cc38e4c1ab00da18a3f6777f8b50099d73326981bb7d182e54a9a21bb4ff7"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5b15cc59c19edca917f51b1956637db47e200b0fc5e6e1878233d3a938384b0b"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e491045781b1e38b72c91247cf4634f040f8d0cb3e6d3d64d38dcf43616650b4"}, + {file = "ruff-0.3.7-py3-none-win32.whl", hash = "sha256:bc931de87593d64fad3a22e201e55ad76271f1d5bfc44e1a1887edd0903c7d9f"}, + {file = "ruff-0.3.7-py3-none-win_amd64.whl", hash = "sha256:5ef0e501e1e39f35e03c2acb1d1238c595b8bb36cf7a170e7c1df1b73da00e74"}, + {file = "ruff-0.3.7-py3-none-win_arm64.whl", hash = "sha256:789e144f6dc7019d1f92a812891c645274ed08af6037d11fc65fcbc183b7d59f"}, + {file = "ruff-0.3.7.tar.gz", hash = "sha256:d5c1aebee5162c2226784800ae031f660c350e7a3402c4d1f8ea4e97e232e3ba"}, ] [[package]] @@ -1229,4 +1229,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "3d0031fb785d4045660c26520158b547671d8f70b377cd72da13b3b16c5eb672" +content-hash = "4aded716a2462ce682e2dc8228895f785620feeae5cd79cf417dcbe433898274" diff --git a/pyproject.toml b/pyproject.toml index fb8223c..a95407c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,7 +22,7 @@ harmony-service-lib = "^1.0.25" pytest = "^8.1.1" mypy = "^1.9.0" black = "^24.2.0" -ruff = "^0.3.5" +ruff = "^0.3.7" coverage = "^7.4.4" pytest-cov = "^5.0.0" From 8aa10ce6b803cdba4b4bdd630e855ca38d4eab96 Mon Sep 17 00:00:00 2001 From: stitchee bot Date: Tue, 16 Apr 2024 13:29:32 +0000 Subject: [PATCH 38/38] /version 1.0.0rc3 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 2337e5d..6e1719d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "stitchee" -version = "1.0.0rc2" +version = "1.0.0rc3" description = "NetCDF4 Along-existing-dimension Concatenation Service" authors = ["Daniel Kaufman "] readme = "README.md"