Skip to content

Commit

Permalink
clean up
Browse files Browse the repository at this point in the history
  • Loading branch information
cehbrecht committed Nov 8, 2023
1 parent 5005503 commit 6da0762
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 32 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ src/
*.log
*.lock
testdata.json
output_*.nc

# IPython
.ipynb_checkpoints
Expand Down
34 changes: 2 additions & 32 deletions rook/utils/weighted_average_utils.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,14 @@
import numpy as np

import xarray as xr

import collections

from roocs_utils.parameter import collection_parameter
from roocs_utils.parameter import dimension_parameter

from roocs_utils.project_utils import derive_ds_id

from daops.ops.base import Operation
from daops.utils import normalise

from clisops.ops import subset

# from clisops.ops.average import average_over_dims as average


def apply_weighted_mean(ds):
# fix cftime calendar
Expand All @@ -33,44 +26,21 @@ def apply_weighted_mean(ds):


class WeightedAverage(Operation):
def _resolve_params(self, collection, **params):
"""
Resolve the input parameters to `self.params` and parameterise
collection parameter and set to `self.collection`.
"""
dims = dimension_parameter.DimensionParameter(["latitude", "longitude"])
collection = collection_parameter.CollectionParameter(collection)

self.collection = collection
self.params = {
"dims": dims,
"ignore_undetected_dims": params.get("ignore_undetected_dims"),
}

def _calculate(self):
config = {
"output_type": self._output_type,
"output_dir": self._output_dir,
"split_method": self._split_method,
"file_namer": self._file_namer,
}

self.params.update(config)

new_collection = collections.OrderedDict()

for dset in self.collection:
ds_id = derive_ds_id(dset)
new_collection[ds_id] = dset.file_paths

# Normalise (i.e. "fix") data inputs based on "character"
# Normalise data inputs
norm_collection = normalise.normalise(
new_collection, False # self._apply_fixes
)

rs = normalise.ResultSet(vars())

# apply weights
# calculate weighted mean
datasets = []
for ds_id in norm_collection.keys():
ds = norm_collection[ds_id]
Expand Down

0 comments on commit 6da0762

Please sign in to comment.