Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

One zarr to rule them all #12

Merged
merged 21 commits into from
Apr 22, 2024
Merged
Show file tree
Hide file tree
Changes from 20 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ sweeps
test_*.sh
lightning_logs
.vscode
outputs

### Python ###
# Byte-compiled / optimized / DLL files
Expand Down
1 change: 1 addition & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ repos:
description: Check for spelling errors
language: system
entry: codespell
args: ['--ignore-words-list=laf']
- repo: local
hooks:
- id: black
Expand Down
8 changes: 4 additions & 4 deletions create_parameter_weights.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,12 +88,12 @@ def main():
flux_squares = []
for batch_data in tqdm(loader):
if constants.GRID_FORCING_DIM > 0:
init_batch, target_batch, forcing_batch = batch_data
init_batch, target_batch, _, forcing_batch = batch_data
flux_batch = forcing_batch[:, :, :, 0] # Flux is first index
flux_means.append(torch.mean(flux_batch)) # (,)
flux_squares.append(torch.mean(flux_batch**2)) # (,)
else:
init_batch, target_batch = batch_data
init_batch, target_batch, _ = batch_data

batch = torch.cat(
(init_batch, target_batch), dim=1
Expand Down Expand Up @@ -134,12 +134,12 @@ def main():
diff_squares = []
for batch_data in tqdm(loader_standard):
if constants.GRID_FORCING_DIM > 0:
init_batch, target_batch, forcing_batch = batch_data
init_batch, target_batch, _, forcing_batch = batch_data
flux_batch = forcing_batch[:, :, :, 0] # Flux is first index
flux_means.append(torch.mean(flux_batch)) # (,)
flux_squares.append(torch.mean(flux_batch**2)) # (,)
else:
init_batch, target_batch = batch_data
init_batch, target_batch, _ = batch_data
batch_diffs = init_batch[:, 1:] - target_batch
# (N_batch', N_t-1, N_grid, d_features)

Expand Down
109 changes: 0 additions & 109 deletions create_single_zarr.py

This file was deleted.

35 changes: 18 additions & 17 deletions create_static_features.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
# Standard library
import os
from argparse import ArgumentParser

# Third-party
Expand All @@ -15,28 +16,28 @@ def main():
parser.add_argument(
"--xdim",
type=str,
default="x_1",
help="Name of the x-dimension in the dataset (default: x_1)",
default="x",
help="Name of the x-dimension in the dataset (default: x)",
)
parser.add_argument(
"--ydim",
type=str,
default="y_1",
help="Name of the x-dimension in the dataset (default: y_1)",
default="y",
help="Name of the x-dimension in the dataset (default: y)",
)
parser.add_argument(
"--zdim",
type=str,
default="z_1",
help="Name of the x-dimension in the dataset (default: z_1)",
default="z",
help="Name of the x-dimension in the dataset (default: z)",
)
parser.add_argument(
"--field_names",
nargs="+",
default=["hsurf", "FI", "P0FL"],
default=["HSURF", "FI", "HFL"],
help=(
"Names of the fields to extract from the .nc file "
'(default: ["hsurf", "FI", "P0FL"])'
'(default: ["HSURF", "FI", "HFL"])'
),
)
parser.add_argument(
Expand All @@ -49,14 +50,12 @@ def main():
),
)
parser.add_argument(
"--outdir",
"--dataset",
type=str,
default="data/cosmo/static/",
help=(
"Output directory for the static features "
"(default: data/cosmo/static/)"
),
default="cosmo",
help=("Name of the dataset (default: cosmo)"),
)

args = parser.parse_args()

ds = xr.open_zarr(constants.EXAMPLE_FILE).isel(time=0)
Expand All @@ -82,8 +81,10 @@ def main():
)
np_fields = np.concatenate(np_fields, axis=-1) # (N_x, N_y, N_fields)

outdir = os.path.join("data", args.dataset, "static/")

# Save the numpy array to a .npy file
np.save(args.outdir + "reference_geopotential_pressure.npy", np_fields)
np.save(outdir + "reference_geopotential_pressure.npy", np_fields)

# Get the dimensions of the dataset
dims = ds.sizes
Expand All @@ -95,7 +96,7 @@ def main():
# Stack the 2D arrays into a 3D array with x and y as the first dimension
grid_xy = np.stack((y_grid, x_grid))

np.save(args.outdir + "nwp_xy.npy", grid_xy) # (2, N_x, N_y)
np.save(outdir + "nwp_xy.npy", grid_xy) # (2, N_x, N_y)

# Create a mask with the same dimensions, initially set to False
mask = np.full((dims[args.xdim], dims[args.ydim]), False)
Expand All @@ -107,7 +108,7 @@ def main():
mask[:, -args.boundaries :] = True # right boundary

# Save the numpy array to a .npy file
np.save(args.outdir + "border_mask", mask) # (N_x, N_y)
np.save(outdir + "border_mask", mask) # (N_x, N_y)


if __name__ == "__main__":
Expand Down
Loading
Loading