From 0538cf8cb1ea53d25749fcb1eae6258439d77609 Mon Sep 17 00:00:00 2001 From: Piyush Ingale Date: Fri, 1 Dec 2023 09:24:44 +0000 Subject: [PATCH] fixed lint errors --- weather_mv/loader_pipeline/ee.py | 6 ++++-- weather_mv/loader_pipeline/sinks.py | 8 ++++---- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/weather_mv/loader_pipeline/ee.py b/weather_mv/loader_pipeline/ee.py index a5d6bca0..87d5b389 100644 --- a/weather_mv/loader_pipeline/ee.py +++ b/weather_mv/loader_pipeline/ee.py @@ -536,7 +536,7 @@ def convert_to_asset(self, queue: Queue, uri: str): def get_dims_data(index: int) -> t.List[t.Any]: """Returns dimensions for the given flattened index.""" return [ - dim[int(index / math.prod(dims_shape[i+1:])) % len(dim)] for (i, dim) in enumerate(dims_data) + dim[int(index/math.prod(dims_shape[i+1:])) % len(dim)] for (i, dim) in enumerate(dims_data) ] # Copy CSV to gcs. @@ -548,7 +548,9 @@ def get_dims_data(index: int) -> t.List[t.Any]: # Write rows in batches. for i in range(0, shape, ROWS_PER_WRITE): writer.writerows( - [get_dims_data(i) + list(row) for row in zip(*[d[i:i + ROWS_PER_WRITE] for d in data])] + [get_dims_data(i) + list(row) for row in zip( + *[d[i:i + ROWS_PER_WRITE] for d in data] + )] ) upload(temp.name, target_path) diff --git a/weather_mv/loader_pipeline/sinks.py b/weather_mv/loader_pipeline/sinks.py index bc064599..05f2301c 100644 --- a/weather_mv/loader_pipeline/sinks.py +++ b/weather_mv/loader_pipeline/sinks.py @@ -683,7 +683,7 @@ def open_dataset(uri: str, with open_local(uri) as local_path: _, uri_extension = os.path.splitext(uri) - xr_dataset: xr.Dataset = __open_dataset_file(local_path, + xr_datasets: xr.Dataset = __open_dataset_file(local_path, uri_extension, disable_grib_schema_normalization, local_open_dataset_kwargs, @@ -703,10 +703,11 @@ def open_dataset(uri: str, logger.info(f'opened dataset size: {total_size_in_bytes}') else: + xr_dataset = xr_datasets if start_date is not None and end_date is not None: xr_dataset = xr_dataset.sel(time=slice(start_date, end_date)) if uri_extension in ['.tif', '.tiff']: - xr_dataset = _preprocess_tif(xr_datasets, + xr_dataset = _preprocess_tif(xr_dataset, local_path, tif_metadata_for_start_time, tif_metadata_for_end_time, @@ -714,8 +715,7 @@ def open_dataset(uri: str, band_names_dict, initialization_time_regex, forecast_time_regex) - else: - xr_dataset = xr_datasets + xr_dataset.attrs.update({'dtype': dtype, 'crs': crs, 'transform': transform}) logger.info(f'opened dataset size: {xr_dataset.nbytes}')