From ef2ebe21ba03220eab9ec84020d7a17c43f3210f Mon Sep 17 00:00:00 2001 From: Frederique Date: Tue, 26 Sep 2023 15:11:07 +0200 Subject: [PATCH 01/10] Comments and todos for the hazard functionality --- hydromt_fiat/fiat.py | 7 +-- hydromt_fiat/workflows/hazard.py | 1 + tests/test_hazard.py | 48 ------------------ tests/test_integrations_hazard.py | 81 +++++++++++++++---------------- 4 files changed, 45 insertions(+), 92 deletions(-) delete mode 100644 tests/test_hazard.py diff --git a/hydromt_fiat/fiat.py b/hydromt_fiat/fiat.py index 3209fdc7..907bead5 100644 --- a/hydromt_fiat/fiat.py +++ b/hydromt_fiat/fiat.py @@ -365,9 +365,9 @@ def setup_hazard( """ # check parameters types and size, and existance of provided files of maps - params = check_parameters_type(map_fn, map_type, rp, crs, nodata, var, chunks) - check_parameters_size(params) - check_files(params, self.root) + params = check_parameters_type(map_fn, map_type, rp, crs, nodata, var, chunks) #TODO: remove this function + check_parameters_size(params) # map_fn, map_type, rp, crs, nodata, var, chunks + check_files(params, self.root) # This function is probably redundant rp_list = [] map_name_lst = [] @@ -447,6 +447,7 @@ def setup_hazard( # an extra dimension 'rp' accounting for return period # select first risk maps if risk_output: + # TODO: put the code below in a separate function in hazard.py list_keys = list(self.maps.keys()) first_map = self.maps[list_keys[0]].rename("risk_datarray") list_keys.pop(0) diff --git a/hydromt_fiat/workflows/hazard.py b/hydromt_fiat/workflows/hazard.py index 0f014aa3..1f6f4a85 100644 --- a/hydromt_fiat/workflows/hazard.py +++ b/hydromt_fiat/workflows/hazard.py @@ -61,6 +61,7 @@ def check_parameters_type( dict Dictionary with the parameters and list of parameters used in setup_hazard. """ + #TODO: remove this function params = dict() params["map_fn"] = map_fn diff --git a/tests/test_hazard.py b/tests/test_hazard.py deleted file mode 100644 index b5b942cb..00000000 --- a/tests/test_hazard.py +++ /dev/null @@ -1,48 +0,0 @@ -from hydromt_fiat.fiat import FiatModel -from hydromt_fiat.workflows.hazard import * -from hydromt.config import configread -from pathlib import Path -import pytest -from hydromt.log import setuplog - -DATASET = Path( - "P:/11207949-dhs-phaseii-floodadapt/Model-builder/Delft-FIAT/local_test_database" -) - -_cases = { - "test_hazard": { - "folder": "test_hazard_1", - "ini": "test_hazard.ini", - "catalog": "fiat_catalog.yml", - } -} - - -@pytest.mark.parametrize("case", list(_cases.keys())) -def test_hazard(case): - # Read model in examples folder. - root = DATASET.joinpath(_cases[case]["folder"]) - config_fn = DATASET.joinpath(_cases[case]["ini"]) - data_libs = DATASET.joinpath(_cases[case]["catalog"]) - - logger = setuplog("hydromt_fiat", log_level=10) - - hyfm = FiatModel( - root=root, mode="w", data_libs=data_libs, config_fn=config_fn, logger=logger - ) - - map_fn = configread(config_fn)["setup_hazard"]["map_fn"] - map_type = configread(config_fn)["setup_hazard"]["map_type"] - rp = configread(config_fn)["setup_hazard"]["rp"] - crs = configread(config_fn)["setup_hazard"]["crs"] - nodata = configread(config_fn)["setup_hazard"]["nodata"] - var = configread(config_fn)["setup_hazard"]["var"] - chunks = configread(config_fn)["setup_hazard"]["chunks"] - configread(config_fn)["setup_hazard"]["name_catalog"] - configread(config_fn)["setup_hazard"]["risk_output"] - configread(config_fn)["setup_config"]["hazard_type"] - - params = check_parameters_type(map_fn, map_type, rp, crs, nodata, var, chunks) - check_parameters_size(params) - - assert params diff --git a/tests/test_integrations_hazard.py b/tests/test_integrations_hazard.py index e0b18986..cc7de5fd 100644 --- a/tests/test_integrations_hazard.py +++ b/tests/test_integrations_hazard.py @@ -8,53 +8,54 @@ EXAMPLEDIR = Path( "P:/11207949-dhs-phaseii-floodadapt/Model-builder/Delft-FIAT/local_test_database" -) +) _cases = { - "integration": { + "event_map": { "data_catalogue": EXAMPLEDIR / "fiat_catalog.yml", - "dir": "test_hazard", - "ini": EXAMPLEDIR / "test_hazard_unique.ini", + "dir": "test_event_map", + "configuration": { + "setup_hazard": { + "map_fn": [ + r"P:\11207949-dhs-phaseii-floodadapt\Model-builder\Delft-FIAT\local_test_database\test_RP_floodmaps\RP_100_maps.nc", + ], + "map_type": "water_depth", + "rp": None, + "crs": None, + "nodata": -99999, + "var": "zsmax", + "risk_output": False, + } + } + }, + "risk_maps": { + "data_catalogue": EXAMPLEDIR / "fiat_catalog.yml", + "dir": "test_risk_maps", + "configuration": { + "setup_hazard": { + "map_fn": [ + r"P:\11207949-dhs-phaseii-floodadapt\Model-builder\Delft-FIAT\local_test_database\test_RP_floodmaps\RP_100_maps.nc", + "MULTIPLE FILES" + ], + "map_type": "water_depth", + "rp": [], + "crs": None, + "nodata": -99999, + "var": "zsmax", + "risk_output": False, + } + } }, } + @pytest.mark.parametrize("case", list(_cases.keys())) def test_hazard(case): # Read model in examples folder. root = EXAMPLEDIR.joinpath(_cases[case]["dir"]) - if root.exists: + if root.exists(): shutil.rmtree(root) - # uncomment to test event analysis from geotiff file - configuration = { - "setup_hazard": { - "map_fn": ["P:/11207949-dhs-phaseii-floodadapt/Model-builder/Delft-FIAT/local_test_database/data/Hazard/Current_prob_event_set_combined_doNothing_withSeaWall_RP=1_max_flood_depth.tif"], - "map_type": "water_depth", - "rp": None, - "crs": None, - "nodata": -99999, - "var": None, - "chunks": "auto", - "name_catalog": None, - "risk_output": False, - } - } - - # uncomment to test risk analysis from geotiff file - # configuration = { - # "setup_hazard": { - # "map_fn": ["P:/11207949-dhs-phaseii-floodadapt/Model-builder/Delft-FIAT/local_test_database/data/Hazard/Current_prob_event_set_combined_doNothing_withSeaWall_RP=1_max_flood_depth.tif", "P:/11207949-dhs-phaseii-floodadapt/Model-builder/Delft-FIAT/local_test_database/data/Hazard/Current_prob_event_set_combined_doNothing_withSeaWall_RP=2_max_flood_depth.tif", "P:/11207949-dhs-phaseii-floodadapt/Model-builder/Delft-FIAT/local_test_database/data/Hazard/Current_prob_event_set_combined_doNothing_withSeaWall_RP=5_max_flood_depth.tif"], - # "map_type": "water_depth", - # "rp": None, - # "crs": None, - # "nodata": -99999, - # "var": None, - # "chunks": "auto", - # "name_catalog": None, - # "risk_output": True, - # } - # } - # for these test data sfincs output data is required in local files # uncomment to test event analysis from sfincs output # mode = "single" @@ -80,7 +81,7 @@ def test_hazard(case): # var = "zsmax" # configuration = { - # "setup_hazard": { + # "setup_hazard": { # "map_fn": map_fn, # absolute or relative (with respect to the configuration.ini) path to the hazard file # "map_type": "water_depth", # description of the hazard file type # "rp": None, # hazard return period in years, required for a risk calculation (optional) @@ -89,19 +90,17 @@ def test_hazard(case): # "var": var, # hazard variable name in NetCDF input files (optional) # "chunks": "auto", # chunk sizes along each dimension used to load the hazard file into a dask array (default is 'auto') (optional) # "name_catalog": None, - # "risk_output": risk_output, + # "risk_output": risk_output, # } # } - logger = setuplog("hydromt_fiat", log_level=10) data_catalog_yml = str(_cases[case]["data_catalogue"]) fm = FiatModel(root=root, mode="w", data_libs=[data_catalog_yml], logger=logger) region = fm.data_catalog.get_geodataframe("region", variables=None) - # opt = configread(_cases[case]["ini"]) - # fm.build(region={"geom": region}, opt=opt) - fm.build(region={"geom": region}, opt=configuration) + + fm.build(region={"geom": region}, opt=_cases[case]["configuration"]) fm.write() # Check if the hazard folder exists From 2f5bcd00c3204569e57f0bef176f05e21f3c1f59 Mon Sep 17 00:00:00 2001 From: Mares2022 Date: Wed, 27 Sep 2023 16:50:14 +0200 Subject: [PATCH 02/10] Ordering return periods and adding them as new bands in the netcdf Ordering return periods and adding them as new bands in the netcdf --- hydromt_fiat/fiat.py | 48 +++++++++++++++++++++++++------ tests/test_integrations_hazard.py | 8 ++++-- 2 files changed, 44 insertions(+), 12 deletions(-) diff --git a/hydromt_fiat/fiat.py b/hydromt_fiat/fiat.py index 907bead5..686e1151 100644 --- a/hydromt_fiat/fiat.py +++ b/hydromt_fiat/fiat.py @@ -326,7 +326,8 @@ def setup_hazard( risk_output: bool = False, unit_conversion_factor: float = 1.0, ) -> None: - """Set up hazard maps. This component integrates multiple checks for the maps + """Set up hazard maps. This component integrates multiple checks for the hazard + maps. Parameters ---------- @@ -446,22 +447,51 @@ def setup_hazard( # in case risk_output is required maps are put in a netcdf with a raster with # an extra dimension 'rp' accounting for return period # select first risk maps + + + # Order return periods and maps + my_dict = {} + for rp, name in zip(rp_list, map_name_lst): + my_dict[rp] = name + sorted_keys = sorted(rp_list, reverse=False) + my_dict = {key: my_dict[key] for key in sorted_keys} + + + if risk_output: # TODO: put the code below in a separate function in hazard.py + layer_list = [] list_keys = list(self.maps.keys()) - first_map = self.maps[list_keys[0]].rename("risk_datarray") - list_keys.pop(0) + + for key, value in my_dict.items(): + layer = self.maps[value].rename(key) + layer_list.append(layer) - # add additional risk maps - for idx, x in enumerate(list_keys): - key_name = list_keys[idx] - layer = self.maps[key_name] - first_map = xr.concat([first_map, layer], dim="rp") + + # for idx, x in enumerate(list_keys): + # key_name = list_keys[idx] + # layer = self.maps[key_name].rename(rp_list[idx]) + # layer_list.append(layer) + + da = xr.merge(layer_list) + + # list_keys = list(self.maps.keys()) + # first_map = self.maps[list_keys[0]].rename(rp_list[0]) + # list_keys.pop(0) + # rp_list.pop(0) + + # # add additional risk maps + # for idx, x in enumerate(list_keys): + # key_name = list_keys[idx] + # layer = self.maps[key_name].rename(rp_list[idx]) + # #first_map = xr.concat([first_map, layer], dim="rp") + # first_map = xr.merge([first_map, layer]) # convert to a dataset to be able to write attributes when writing the maps # in the ouput folders. If datarray is provided attributes will not be # shown in the output netcdf dataset - da = first_map.to_dataset(name="risk_maps") + + # da = first_map.to_dataset(name="risk_maps") da.attrs = { "returnperiod": list(rp_list), "type": params["map_type_lst"], diff --git a/tests/test_integrations_hazard.py b/tests/test_integrations_hazard.py index cc7de5fd..cceaaa88 100644 --- a/tests/test_integrations_hazard.py +++ b/tests/test_integrations_hazard.py @@ -34,15 +34,17 @@ "configuration": { "setup_hazard": { "map_fn": [ + r"P:\11207949-dhs-phaseii-floodadapt\Model-builder\Delft-FIAT\local_test_database\test_RP_floodmaps\RP_1_maps.nc", + r"P:\11207949-dhs-phaseii-floodadapt\Model-builder\Delft-FIAT\local_test_database\test_RP_floodmaps\RP_50_maps.nc", + r"P:\11207949-dhs-phaseii-floodadapt\Model-builder\Delft-FIAT\local_test_database\test_RP_floodmaps\RP_10_maps.nc", r"P:\11207949-dhs-phaseii-floodadapt\Model-builder\Delft-FIAT\local_test_database\test_RP_floodmaps\RP_100_maps.nc", - "MULTIPLE FILES" ], "map_type": "water_depth", - "rp": [], + "rp": None, "crs": None, "nodata": -99999, "var": "zsmax", - "risk_output": False, + "risk_output": True, } } }, From 66e7bcca7c878068d4cd489f32f8df0eaae34a39 Mon Sep 17 00:00:00 2001 From: Mares2022 Date: Tue, 3 Oct 2023 12:03:19 +0200 Subject: [PATCH 03/10] Removal of redundant functions, integration of grid functions to save multiband netcdfs Removal of redundant functions, integration of grid functions to save multiband netcdfs --- hydromt_fiat/fiat.py | 185 ++++++--------- hydromt_fiat/validation.py | 130 ----------- hydromt_fiat/workflows/hazard.py | 377 +++++------------------------- tests/test_integrations_hazard.py | 128 ++++++++-- 4 files changed, 244 insertions(+), 576 deletions(-) diff --git a/hydromt_fiat/fiat.py b/hydromt_fiat/fiat.py index 686e1151..d0d07069 100644 --- a/hydromt_fiat/fiat.py +++ b/hydromt_fiat/fiat.py @@ -364,58 +364,35 @@ def setup_hazard( risk_output : bool, optional The parameter that defines if a risk analysis is required, by default False """ - # check parameters types and size, and existance of provided files of maps - - params = check_parameters_type(map_fn, map_type, rp, crs, nodata, var, chunks) #TODO: remove this function - check_parameters_size(params) # map_fn, map_type, rp, crs, nodata, var, chunks - check_files(params, self.root) # This function is probably redundant - + # create lists of maps and their parameters to be able to iterate over them + params = create_lists(map_fn, map_type, rp, crs, nodata, var, chunks) + check_lists_size(params) + rp_list = [] map_name_lst = [] - # retrieve maps information from parameters and datacatalog - # load maps in memory and check them and save the with st_map function for idx, da_map_fn in enumerate(params["map_fn_lst"]): - da_map_fn, da_name, da_type = read_floodmaps(params, da_map_fn, idx) - - # load flood maps to memory - # da = load_floodmaps(self.data_catalog, self.region,da_map_fn,da_name,name_catalog) - # reading from path - if isinstance(da_map_fn, Path): - if da_map_fn.stem == "sfincs_map": - sfincs_root = os.path.dirname(da_map_fn) - sfincs_model = SfincsModel( - sfincs_root, mode="r", logger=self.logger - ) - sfincs_model.read_results() - # save sfincs map as GeoTIFF - # result_list = list(sfincs_model.results.keys()) - # sfincs_model.write_raster("results.zsmax", compress="LZW") - da = sfincs_model.results["zsmax"] - # da = da.squeeze('timemax').drop('timemax') - da = da.isel(timemax=0).drop("timemax") - - # Convert to units of the exposure data if required - if self.exposure.unit != da.units: - da = da * unit_conversion_factor - - else: - if not self.region.empty: - # da = self.data_catalog.get_rasterdataset( - # da_map_fn, geom=self.region - # ) - da = self.data_catalog.get_rasterdataset(da_map_fn) - else: - da = self.data_catalog.get_rasterdataset(da_map_fn) - # reading from the datacatalog + # read maps and retrieve their attributes + da_map_fn, da_name, da_type = read_maps(params, da_map_fn, idx) + + if da_map_fn.stem == "sfincs_map": + sfincs_root = os.path.dirname(da_map_fn) + sfincs_model = SfincsModel( + sfincs_root, mode="r", logger=self.logger + ) + sfincs_model.read_results() + da = sfincs_model.results["zsmax"] + da = da.isel(timemax=0).drop("timemax") + # save sfincs map as GeoTIFF + # result_list = list(sfincs_model.results.keys()) + # sfincs_model.write_raster("results.zsmax", compress="LZW") + + # Convert to units of the exposure data if required + # if self.exposure.unit != da.units: #TODO: self.exposure.units is not definded in this test + # da = da * unit_conversion_factor + else: - if not self.region.empty: - # da = self.data_catalog.get_rasterdataset( - # name_catalog, variables=da_name, geom=self.region - # ) - da = self.data_catalog.get_rasterdataset(map_fn, variables=da_name) - else: - da = self.data_catalog.get_rasterdataset(map_fn, variables=da_name) + da = self.data_catalog.get_rasterdataset(da_map_fn) da.encoding["_FillValue"] = None da = da.raster.gdal_compliant() @@ -426,96 +403,60 @@ def setup_hazard( # check maps return periods da_rp = check_maps_rp(params, da, da_name, idx, risk_output) - # chek if maps are unique - # TODO: check if providing methods like self.get_config can be used - # TODO: create a new funtion to check uniqueness trhough files names - # check_maps_uniquenes(self.get_config,self.staticmaps,params,da,da_map_fn,da_name,da_type,da_rp,idx) + if risk_output and da_map_fn.stem == "sfincs_map": + da_name = da_name + f"_{str(da_rp)}" post = f"(rp {da_rp})" if risk_output else "" self.logger.info(f"Added {hazard_type} hazard map: {da_name} {post}") rp_list.append(da_rp) - - # If a risk calculation is required and the map comes from sfincs, they - # have the same name so give another name - if risk_output and da_map_fn.stem == "sfincs_map": - da_name = da_name + f"_{str(da_rp)}" map_name_lst.append(da_name) - self.set_maps(da, da_name) - - check_map_uniqueness(map_name_lst) - # in case risk_output is required maps are put in a netcdf with a raster with - # an extra dimension 'rp' accounting for return period - # select first risk maps - - - # Order return periods and maps - my_dict = {} - for rp, name in zip(rp_list, map_name_lst): - my_dict[rp] = name - sorted_keys = sorted(rp_list, reverse=False) - my_dict = {key: my_dict[key] for key in sorted_keys} - - - if risk_output: - # TODO: put the code below in a separate function in hazard.py - layer_list = [] - list_keys = list(self.maps.keys()) + da.attrs = { + "returnperiod": str(da_rp), + "type": da_type, + "name": da_name, + "analysis": "event", + } - for key, value in my_dict.items(): - layer = self.maps[value].rename(key) - layer_list.append(layer) + da = da.to_dataset(name= da_name) + self.set_maps(da, da_name) - # for idx, x in enumerate(list_keys): - # key_name = list_keys[idx] - # layer = self.maps[key_name].rename(rp_list[idx]) - # layer_list.append(layer) - - da = xr.merge(layer_list) + check_map_uniqueness(map_name_lst) + + # in case of risk analysis, create a single netcdf with multibans per rp + if risk_output: - # list_keys = list(self.maps.keys()) - # first_map = self.maps[list_keys[0]].rename(rp_list[0]) - # list_keys.pop(0) - # rp_list.pop(0) + da, sorted_rp, sorted_names = create_risk_dataset(params, rp_list, map_name_lst, self.maps) - # # add additional risk maps - # for idx, x in enumerate(list_keys): - # key_name = list_keys[idx] - # layer = self.maps[key_name].rename(rp_list[idx]) - # #first_map = xr.concat([first_map, layer], dim="rp") - # first_map = xr.merge([first_map, layer]) + self.set_grid(da) - # convert to a dataset to be able to write attributes when writing the maps - # in the ouput folders. If datarray is provided attributes will not be - # shown in the output netcdf dataset - - # da = first_map.to_dataset(name="risk_maps") - da.attrs = { - "returnperiod": list(rp_list), - "type": params["map_type_lst"], - "name": map_name_lst, - "Analysis": "risk", + self.grid.attrs = { + "returnperiod": sorted_rp, + "type": params["map_type_lst"], #TODO: This parameter has to be changed in case that a list with different hazard types per map is provided + "name": sorted_names, + "analysis": "risk", } - # load merged map into self.maps - self.set_maps(da) + list_maps = list(self.maps.keys()) - # erase individual maps from self.maps keeping the merged map - for item in list_maps[:-1]: + for item in list_maps[:]: self.maps.pop(item) - self.set_config("hazard.return_periods", rp_list) + # set configuration .toml file + self.set_config("hazard.return_periods", + str(da_rp) if not risk_output else sorted_rp + ) - # the metadata of the hazard maps is saved in the configuration toml files - # this component was modified to provided the element [0] od the list - # in case multiple maps are required then remove [0] self.set_config( "hazard.file", [ str(Path("hazard") / (hazard_map + ".nc")) - for hazard_map in self.maps.keys() + for hazard_map in self.maps.keys() + ][0] if not risk_output else + [ + str(Path("hazard") / ("risk_map" + ".nc")) ][0], ) self.set_config( @@ -523,17 +464,25 @@ def setup_hazard( [ "EPSG:" + str((self.maps[hazard_map].rio.crs.to_epsg())) for hazard_map in self.maps.keys() - ][0], + ][0] if not risk_output else + [ + "EPSG:" + str((self.crs.to_epsg())) + ][0] + , ) self.set_config( - "hazard.elevation_reference", "dem" if da_type == "water_depth" else "datum" + "hazard.elevation_reference", + "dem" if da_type == "water_depth" else "datum" ) # Set the configurations for a multiband netcdf self.set_config( "hazard.settings.subset", - [(self.maps[hazard_map].name) for hazard_map in self.maps.keys()][0], + [ + (self.maps[hazard_map].name) + for hazard_map in self.maps.keys() + ][0] if not risk_output else sorted_rp, ) self.set_config( @@ -764,6 +713,8 @@ def write(self): self.write_config() if self.maps: self.write_maps(fn="hazard/{name}.nc") + if self.grid: + self.write_grid(fn="hazard/risk_map.nc") if self.geoms: self.write_geoms(fn="exposure/{name}.gpkg", driver="GPKG") if self._tables: diff --git a/hydromt_fiat/validation.py b/hydromt_fiat/validation.py index 4507e46f..895c5906 100644 --- a/hydromt_fiat/validation.py +++ b/hydromt_fiat/validation.py @@ -8,94 +8,6 @@ def check_dir_exist(dir, name=None): f"The directory indicated by the '{name}' parameter does not exist." ) -def check_file_exist(root, param_lst, name=None): - root = Path(root) - param_lst = [Path(p) for p in param_lst] - for param_idx, param in enumerate(param_lst): - if isinstance(param, dict): - fn_lst = list(param.values()) - else: - fn_lst = [param] - for fn_idx, fn in enumerate(fn_lst): - if not Path(fn).is_file(): - if root.joinpath(fn).is_file(): - if isinstance(param, dict): - param_lst[param_idx][ - list(param.keys())[fn_idx] - ] = root.joinpath(fn) - else: - param_lst[param_idx] = root.joinpath(fn) - else: - if isinstance(param, dict): - param_lst[param_idx][list(param.keys())[fn_idx]] = Path(fn) - else: - param_lst[param_idx] = Path(fn) - try: - if isinstance(param, dict): - assert isinstance( - param_lst[param_idx][list(param.keys())[fn_idx]], Path - ) - else: - assert isinstance(param_lst[param_idx], Path) - except AssertionError: - raise TypeError( - f"The file indicated by the '{name}' parameter does not" - f" exist in the directory '{root}'." - ) - -#TODO: Improve this tool without calling model.get_congif(input_dir) -# def check_file_exist(get_config, root, param_lst, name=None, input_dir=None): -# root = Path(root) -# param_lst = [Path(p) for p in param_lst] -# for param_idx, param in enumerate(param_lst): -# if isinstance(param, dict): -# fn_lst = list(param.values()) -# else: -# fn_lst = [param] -# for fn_idx, fn in enumerate(fn_lst): -# if not Path(fn).is_file(): -# if root.joinpath(fn).is_file(): -# if isinstance(param, dict): -# param_lst[param_idx][ -# list(param.keys())[fn_idx] -# ] = root.joinpath(fn) -# else: -# param_lst[param_idx] = root.joinpath(fn) -# if input_dir is not None: -# if get_config(input_dir).joinpath(fn).is_file(): -# if isinstance(param, dict): -# param_lst[param_idx][ -# list(param.keys())[fn_idx] -# ] = get_config(input_dir).joinpath(fn) -# else: -# param_lst[param_idx] = get_config( -# input_dir -# ).joinpath(fn) -# else: -# if isinstance(param, dict): -# param_lst[param_idx][list(param.keys())[fn_idx]] = Path(fn) -# else: -# param_lst[param_idx] = Path(fn) -# try: -# if isinstance(param, dict): -# assert isinstance( -# param_lst[param_idx][list(param.keys())[fn_idx]], Path -# ) -# else: -# assert isinstance(param_lst[param_idx], Path) -# except AssertionError: -# if input_dir is None: -# raise TypeError( -# f"The file indicated by the '{name}' parameter does not" -# f" exist in the directory '{root}'." -# ) -# else: -# raise TypeError( -# f"The file indicated by the '{name}' parameter does not" -# f" exist in either of the directories '{root}' or " -# f"'{get_config(input_dir)}'." -# ) - def check_uniqueness(map_name_lst): def check_duplicates(lst): @@ -113,48 +25,6 @@ def check_duplicates(lst): f"The filenames of the hazard maps should be unique." ) -#TODO: Improve this tool without calling model. Just checking the maps names -# def check_uniqueness(model, *args, file_type=None, filename=None): -# """ """ - -# args = list(args) -# if len(args) == 1 and "." in args[0]: -# args = args[0].split(".") + args[1:] -# branch = args.pop(-1) -# for key in args[::-1]: -# branch = {key: branch} - -# if model.get_config(args[0], args[1]): -# for key in model.staticmaps.data_vars: -# if filename == key: -# raise ValueError( -# f"The filenames of the {file_type} maps should be unique." -# ) -# if ( -# model.get_config(args[0], args[1], key) -# == list(branch[args[0]][args[1]].values())[0] -# ): -# raise ValueError(f"Each model input layers must be unique.") - -def check_param_type(param, name=None, types=None): - """ """ - - if not isinstance(param, list): - raise TypeError( - f"The '{name}_lst' parameter should be a of {list}, received a " - f"{type(param)} instead." - ) - for i in param: - if not isinstance(i, types): - if isinstance(types, tuple): - types = " or ".join([str(j) for j in types]) - else: - types = types - raise TypeError( - f"The '{name}' parameter should be a of {types}, received a " - f"{type(i)} instead." - ) - def get_param(param_lst, map_fn_lst, file_type, filename, i, param_name): """ """ diff --git a/hydromt_fiat/workflows/hazard.py b/hydromt_fiat/workflows/hazard.py index 1f6f4a85..b69f8a57 100644 --- a/hydromt_fiat/workflows/hazard.py +++ b/hydromt_fiat/workflows/hazard.py @@ -14,7 +14,7 @@ import geopandas as gpd -def check_parameters_type( +def create_lists( map_fn: Union[str, Path, list[str], list[Path]], map_type: Union[str, list[str]], rp: Union[int, list[int], None] = None, @@ -23,7 +23,7 @@ def check_parameters_type( var: Union[str, list[str], None] = None, chunks: Union[int, str, list[int]] = "auto", ) -> dict: - """Check data type of parameters and save them as list items. + """Make list out of the parameters provided in the setup hazard maps. Parameters ---------- @@ -72,28 +72,28 @@ def check_parameters_type( params["nodata"] = nodata params["var"] = var - def validate_type(dictionary, param, name, types): - params_lst = [param] if isinstance(param, types) else param - check_param_type(params_lst, name=name, types=types) - dictionary[name + "_lst"] = params_lst + def check_list(param, name): + params_lst = [param] if not isinstance(param, list) else param + params[name + "_lst"] = params_lst return + + check_list(map_fn, name="map_fn") + check_list(map_type, name="map_type") - validate_type(params, map_fn, name="map_fn", types=(str, Path)) - validate_type(params, map_type, name="map_type", types=str) if chunks != "auto": - validate_type(params, chunks, name="chunks", types=(int, dict)) + check_list(chunks, name="chunks") if rp is not None: - validate_type(params, rp, name="rp", types=(float, int)) + check_list(rp, name="rp") if crs is not None: - validate_type(params, crs, name="crs", types=(int, str)) + check_list(crs, name="crs") if nodata is not None: - validate_type(params, nodata, name="nodata", types=(float, int)) + check_list(nodata, name="nodata") if var is not None: - validate_type(params, var, name="var", types=str) + check_list(var, name="var") return params -def check_parameters_size( +def check_lists_size( params: dict, ): """Check that list of parameters are of the same size in case multiple maps are @@ -171,29 +171,7 @@ def error_message(variable_list): ): error_message("var") - -def check_files( - params: dict, - root: str, -): - """Check if the provided files paths exists. I will raise an error in case the - flood maps does not exist - - Parameters - ---------- - params : dict - Dictionary with the parameters and list of parameters used in setup_hazard. - root : str - The root directory of the model. - """ - # load dictionary variables - map_fn_lst = params["map_fn_lst"] - - # Check if the hazard input files exist. - check_file_exist(root, param_lst=map_fn_lst, name="map_fn") - - -def read_floodmaps( +def read_maps( params: dict, da_map_fn: str, idx: int, @@ -231,24 +209,36 @@ def read_floodmaps( map_fn_lst = params["map_fn_lst"] map_type_lst = params["map_type_lst"] - # Check if it is a path or a name from the catalog + # check existance of path if os.path.exists(da_map_fn): da_map_fn = Path(da_map_fn) - da_name = da_map_fn.stem + da_name = da_map_fn.stem da_suffix = da_map_fn.suffix else: - da_name = da_map_fn + raise ValueError( + f"The map {da_map_fn} could not be found." + ) - da_type = get_param(map_type_lst, map_fn_lst, "hazard", da_name, idx, "map type") + # retrieve data type + da_type = get_param( + map_type_lst, + map_fn_lst, + "hazard", + da_name, + idx, + "map type" + ) - # Get the local hazard map. + # get chuck area for the map kwargs.update(chunks=chunks if chunks == "auto" else params["chunks_lst"][idx]) - if "da_suffix" in locals() and da_suffix == ".nc": + # check if we are providing a NetCDF file + if da_suffix == ".nc": if var is None: raise ValueError( "The 'var' parameter is required when reading NetCDF data." ) + # retrieve variable name from parameter lists da_var = get_param( params["var_lst"], map_fn_lst, @@ -261,119 +251,6 @@ def read_floodmaps( return da_map_fn, da_name, da_type -def load_floodmaps( - data_catalog: DataCatalog, - region: gpd.GeoDataFrame, - da_map_fn: Union[str, Path], - da_name: str, - name_catalog: str = "flood_maps", - **kwargs, -) -> xr.DataArray: - """Load flood maps in memory from datacatalog or a local path - - Parameters - ---------- - data_catalog : DataCatalog - Data catalog object from model. - region : gpd.GeoDataFrame - Region of the model. - da_map_fn : Union[str, Path] - Path as string or key name in datacatalog of the hazard a specific hazard - map idx. - da_name : str - File name of a specific hazard map. - name_catalog : str, optional - Name of data catalog item to take the flood maps from, by default "flood_maps". - - Returns - ------- - xr.DataArray - Hazard map to be loaded to the model's maps - """ - - # reading from path - if da_map_fn.stem: - if da_map_fn.stem == "sfincs_map": - sfincs_root = os.path.dirname(da_map_fn) - sfincs_model = SfincsModel(sfincs_root, mode="r") - sfincs_model.read_results() - # result_list = list(sfincs_model.results.keys()) - # sfincs_model.write_raster("results.zsmax", compress="LZW") - da = sfincs_model.results["zsmax"] - da.encoding["_FillValue"] = None - else: - if not region.empty: - da = data_catalog.get_rasterdataset(da_map_fn, geom=region, **kwargs) - else: - da = data_catalog.get_rasterdataset(da_map_fn, **kwargs) - # reading from the datacatalog - else: - if not region.empty: - da = data_catalog.get_rasterdataset( - name_catalog, variables=da_name, geom=region - ) - else: - da = data_catalog.get_rasterdataset(name_catalog, variables=da_name) - - return da - -# def load_floodmaps( -# data_catalog: DataCatalog, -# region: gpd.GeoDataFrame, -# da_map_fn: Union[str, Path], -# da_name: str, -# name_catalog: str = "flood_maps", -# **kwargs, -# ) -> xr.DataArray: -# """Load flood maps in memory from datacatalog or a local path - -# Parameters -# ---------- -# data_catalog : DataCatalog -# Data catalog object from model. -# region : gpd.GeoDataFrame -# Region of the model. -# da_map_fn : Union[str, Path] -# Path as string or key name in datacatalog of the hazard a specific hazard -# map idx. -# da_name : str -# File name of a specific hazard map. -# name_catalog : str, optional -# Name of data catalog item to take the flood maps from, by default "flood_maps". - -# Returns -# ------- -# xr.DataArray -# Hazard map to be loaded to the model's maps -# """ - -# # reading from path -# if da_map_fn.stem: -# if da_map_fn.stem == "sfincs_map": -# sfincs_root = os.path.dirname(da_map_fn) -# sfincs_model = SfincsModel(sfincs_root, mode="r") -# sfincs_model.read_results() -# # result_list = list(sfincs_model.results.keys()) -# # sfincs_model.write_raster("results.zsmax", compress="LZW") -# da = sfincs_model.results["zsmax"] -# da.encoding["_FillValue"] = None -# else: -# if not region.empty: -# da = data_catalog.get_rasterdataset(da_map_fn, geom=region, **kwargs) -# else: -# da = data_catalog.get_rasterdataset(da_map_fn, **kwargs) -# # reading from the datacatalog -# else: -# if not region.empty: -# da = data_catalog.get_rasterdataset( -# name_catalog, variables=da_name, geom=region -# ) -# else: -# da = data_catalog.get_rasterdataset(name_catalog, variables=da_name) - -# return da - - def check_maps_metadata( maps: xr.Dataset, params: dict, @@ -428,7 +305,12 @@ def check_maps_metadata( # Set nodata and mask the nodata value. if nodata is not None: da_nodata = get_param( - params["nodata_lst"], map_fn_lst, "hazard", da_name, idx, "nodata" + params["nodata_lst"], + map_fn_lst, + "hazard", + da_name, + idx, + "nodata" ) da.raster.set_nodata(nodata=da_nodata) elif nodata is None and da.raster.nodata is None: @@ -498,11 +380,8 @@ def check_maps_rp( else: da_rp = None - if risk_output: - da = da.expand_dims({"rp": [da_rp]}, axis=0) - if risk_output and da_rp is None: - # Get (if possible) the return period from dataset names if the input parameter is None. + # get (if possible) the return period from dataset names if the input parameter is None. if "rp" in da_name.lower(): def fstrip(x): @@ -526,7 +405,7 @@ def fstrip(x): raise ValueError( "The hazard map must contain a return period in order to conduct a risk calculation." ) - + return da_rp @@ -543,156 +422,28 @@ def check_map_uniqueness( check_uniqueness(map_name_lst) -# old version of check_maps_uniquenes -# def check_maps_uniquenes( -# get_config, -# maps: xr.Dataset, -# params: dict, -# da: xr.DataArray, -# da_map_fn: Union[str, Path], -# da_name: str, -# da_type: str, -# da_rp: Union[int, float], -# idx: int, -# ): - -# chunks = params['chunks'] - -# # Add the hazard map to config and staticmaps. -# check_uniqueness( -# get_config, -# maps, -# "hazard", -# da_type, -# da_name, -# { -# "usage": True, -# "map_fn": da_map_fn, -# "map_type": da_type, -# "rp": da_rp, -# "crs": da.raster.crs, -# "nodata": da.raster.nodata, -# # "var": None if "var_lst" not in locals() else self.var_lst[idx], -# "var": None if not 'var_lst' in params else params['var_lst'][idx], -# "chunks": "auto" if chunks == "auto" else params['chunks_lst'][idx], -# }, -# file_type="hazard", -# filename=da_name, -# ) - - -def check_floodmaps( - get_config, - maps, - params, - da, - da_map_fn, - da_name, - da_type, - idx, - risk_output, - **kwargs, -): - map_fn_lst = params["map_fn_lst"] - chunks = params["chunks"] - crs = params["crs"] - nodata = params["nodata"] - - # Set the coordinate reference system. - if crs is not None: - da_crs = get_param( - params["crs_lst"], - map_fn_lst, - "hazard", - da_name, - idx, - "coordinate reference system", - ) - da_crs_str = da_crs if "EPSG" in da_crs else f"EPSG:{da_crs}" - da.raster.set_crs(da_crs_str) - elif crs is None and not da.raster.crs: - raise ValueError("The hazard map has no coordinate reference system assigned.") - - # Set nodata and mask the nodata value. - if nodata is not None: - da_nodata = get_param( - params["nodata_lst"], map_fn_lst, "hazard", da_name, idx, "nodata" - ) - da.raster.set_nodata(nodata=da_nodata) - elif nodata is None and da.raster.nodata is None: - raise ValueError("The hazard map has no nodata value assigned.") - - # Correct (if necessary) the grid orientation from the lower to the upper left corner. - # This check could not be implemented into the sfincs_map outputs. They require to be transformed to geotiff first - # if da_name != "sfincs_map": - if da.raster.res[1] > 0: - da = da.reindex({da.raster.y_dim: list(reversed(da.raster.ycoords))}) - - # Check if the obtained hazard map is identical. - if maps and not maps.raster.identical_grid(da): - raise ValueError("The hazard maps should have identical grids.") - # Get the return period input parameter. - if "rp_lst" in params: - da_rp = get_param( - params["rp_lst"], - map_fn_lst, - "hazard", - da_name, - idx, - "return period", - ) - else: - da_rp = None - - if risk_output: - da = da.expand_dims({"rp": [da_rp]}, axis=0) - - if risk_output and da_rp is None: - # Get (if possible) the return period from dataset names if the input parameter is None. - if "rp" in da_name.lower(): - - def fstrip(x): - return x in "0123456789." - - rp_str = "".join(filter(fstrip, da_name.lower().split("rp")[-1])).lstrip( - "0" - ) - - try: - assert isinstance( - literal_eval(rp_str) if rp_str else None, (int, float) - ) - da_rp = literal_eval(rp_str) - - except AssertionError: - raise ValueError( - f"Could not derive the return period for hazard map: {da_name}." - ) - else: - raise ValueError( - "The hazard map must contain a return period in order to conduct a risk calculation." - ) - - # Add the hazard map to config and staticmaps. - check_uniqueness( - get_config, +def create_risk_dataset( + params: dict, + rp_list: list, + map_name_lst: list, maps, - "hazard", - da_type, - da_name, - { - "usage": True, - "map_fn": da_map_fn, - "map_type": da_type, - "rp": da_rp, - "crs": da.raster.crs, - "nodata": da.raster.nodata, - # "var": None if "var_lst" not in locals() else self.var_lst[idx], - "var": None if "var_lst" not in params else params["var_lst"][idx], - "chunks": "auto" if chunks == "auto" else params["chunks_lst"][idx], - }, - file_type="hazard", - filename=da_name, - ) - return da_rp +): + # order return periods and maps + dict_rp_name = {} + for rp, name in zip(rp_list, map_name_lst): + dict_rp_name[rp] = name + sorted_rp = sorted(rp_list, reverse=False) + dict_rp_name = {key: dict_rp_name[key] for key in sorted_rp} + + sorted_maps = [] + sorted_names = [] + + for key, value in dict_rp_name.items(): + map_ordered = maps[value].rename(str(key)) + sorted_maps.append(map_ordered) + sorted_names.append(value) + + da = xr.merge(sorted_maps) + + return da, sorted_rp, sorted_names \ No newline at end of file diff --git a/tests/test_integrations_hazard.py b/tests/test_integrations_hazard.py index cceaaa88..800e3b94 100644 --- a/tests/test_integrations_hazard.py +++ b/tests/test_integrations_hazard.py @@ -17,7 +17,7 @@ "configuration": { "setup_hazard": { "map_fn": [ - r"P:\11207949-dhs-phaseii-floodadapt\Model-builder\Delft-FIAT\local_test_database\test_RP_floodmaps\RP_100_maps.nc", + r"P:\11207949-dhs-phaseii-floodadapt\Model-builder\Delft-FIAT\local_test_database\test_RP_floodmaps\RP_1_maps.nc", ], "map_type": "water_depth", "rp": None, @@ -28,9 +28,9 @@ } } }, - "risk_maps": { + "risk_map": { "data_catalogue": EXAMPLEDIR / "fiat_catalog.yml", - "dir": "test_risk_maps", + "dir": "test_risk_map", "configuration": { "setup_hazard": { "map_fn": [ @@ -48,6 +48,101 @@ } } }, + + "event_map_geotiffs": { + "data_catalogue": EXAMPLEDIR / "fiat_catalog.yml", + "dir": "test_event_map_geotiffs", + "configuration": { + "setup_hazard": { + "map_fn": [ + r"P:\11207949-dhs-phaseii-floodadapt\Model-builder\Delft-FIAT\local_test_database\test_RP_floodmaps\Swell_Majuro_case_SW_slr_100_RP1_Final.tif", + # r"P:\11207949-dhs-phaseii-floodadapt\Model-builder\Delft-FIAT\local_test_database\test_RP_floodmaps\Swell_Majuro_case_SW_slr_100_RP10_Final.tif", + # r"P:\11207949-dhs-phaseii-floodadapt\Model-builder\Delft-FIAT\local_test_database\test_RP_floodmaps\Swell_Majuro_case_SW_slr_100_RP50_Final.tif", + ], + "map_type": "water_depth", + "rp": None, + "crs": None, + "nodata": None, + "var": None, + "risk_output": False, + } + } + }, + + "event_kathryn": { + "data_catalogue": EXAMPLEDIR / "fiat_catalog.yml", + "dir": "test_event_kathryn", + "configuration": { + "setup_hazard": { + "map_fn": [ + # r"P:\11207949-dhs-phaseii-floodadapt\Model-builder\Delft-FIAT\local_test_database\test_RP_floodmaps\kingTide_SLR_max_flood_depth.tif", + r"C:\Users\fuentesm\CISNE\data_base\Current_prob_event_set_combined_doNothing_withSeaWall_RP=1_max_flood_depth.tif", + ], + "map_type": "water_depth", + "rp": None, + "crs": None, + "nodata": None, + "var": None, + "risk_output": False, + } + } + }, + + "risk_kathryn": { + "data_catalogue": EXAMPLEDIR / "fiat_catalog.yml", + "dir": "test_risk_kathryn", + "configuration": { + "setup_hazard": { + "map_fn": [ + r"P:\11207949-dhs-phaseii-floodadapt\Model-builder\Delft-FIAT\local_test_database\test_RP_floodmaps\Current_prob_event_set_combined_doNothing_withSeaWall_RP=2_max_flood_depth.tif", + r"P:\11207949-dhs-phaseii-floodadapt\Model-builder\Delft-FIAT\local_test_database\test_RP_floodmaps\Current_prob_event_set_combined_doNothing_withSeaWall_RP=10_max_flood_depth.tif", + r"P:\11207949-dhs-phaseii-floodadapt\Model-builder\Delft-FIAT\local_test_database\test_RP_floodmaps\Current_prob_event_set_combined_doNothing_withSeaWall_RP=100_max_flood_depth.tif", + ], + "map_type": "water_depth", + "rp": None, + "crs": None, + "nodata": None, + "var": None, + "risk_output": True, + } + } + }, + + "event_map_sfincs": { + "data_catalogue": EXAMPLEDIR / "fiat_catalog.yml", + "dir": "test_event_map_sfincs", + "configuration": { + "setup_hazard": { + "map_fn": [ + r"P:\11207949-dhs-phaseii-floodadapt\Model-builder\Delft-FIAT\local_test_database\test_RP_floodmaps\overland\sfincs_map.nc" + ], + "map_type": "water_depth", + "rp": None, + "crs": None, + "nodata": -99999, + "var": "zsmax", + "risk_output": False, + } + } + }, + + "event_map_sfincs_phanos": { + "data_catalogue": EXAMPLEDIR / "fiat_catalog.yml", + "dir": "test_event_map_sfincs_phanos", + "configuration": { + "setup_hazard": { + "map_fn": [ + r"C:/Users/fuentesm/CISNE/Deltares/FloodAdapt/tests/test_database/charleston/output/simulations/current_extreme12ft_no_measures/overland/sfincs_map.nc" + ], + "map_type": "water_depth", + "rp": None, + "crs": None, + "nodata": -99999, + "var": "zsmax", + "risk_output": False, + } + } + }, } @@ -58,6 +153,19 @@ def test_hazard(case): if root.exists(): shutil.rmtree(root) + logger = setuplog("hydromt_fiat", log_level=10) + data_catalog_yml = str(_cases[case]["data_catalogue"]) + + fm = FiatModel(root=root, mode="w", data_libs=[data_catalog_yml], logger=logger) + region = fm.data_catalog.get_geodataframe("region", variables=None) + + fm.build(region={"geom": region}, opt=_cases[case]["configuration"]) + fm.write() + + # Check if the hazard folder exists + assert root.joinpath("hazard").exists() + + # for these test data sfincs output data is required in local files # uncomment to test event analysis from sfincs output # mode = "single" @@ -94,16 +202,4 @@ def test_hazard(case): # "name_catalog": None, # "risk_output": risk_output, # } - # } - - logger = setuplog("hydromt_fiat", log_level=10) - data_catalog_yml = str(_cases[case]["data_catalogue"]) - - fm = FiatModel(root=root, mode="w", data_libs=[data_catalog_yml], logger=logger) - region = fm.data_catalog.get_geodataframe("region", variables=None) - - fm.build(region={"geom": region}, opt=_cases[case]["configuration"]) - fm.write() - - # Check if the hazard folder exists - assert root.joinpath("hazard").exists() + # } \ No newline at end of file From c7480cf7d407d9a94a2b0f417b18f6caab3d4ebd Mon Sep 17 00:00:00 2001 From: Mares2022 Date: Wed, 4 Oct 2023 16:19:15 +0200 Subject: [PATCH 04/10] Update tests for hazard set up Update tests for hazard set up --- hydromt_fiat/fiat.py | 5 ++-- tests/test_integrations_hazard.py | 41 ++++++++++++++++++++++--------- 2 files changed, 32 insertions(+), 14 deletions(-) diff --git a/hydromt_fiat/fiat.py b/hydromt_fiat/fiat.py index d0d07069..af97b7ee 100644 --- a/hydromt_fiat/fiat.py +++ b/hydromt_fiat/fiat.py @@ -388,8 +388,9 @@ def setup_hazard( # sfincs_model.write_raster("results.zsmax", compress="LZW") # Convert to units of the exposure data if required - # if self.exposure.unit != da.units: #TODO: self.exposure.units is not definded in this test - # da = da * unit_conversion_factor + if self.exposure in locals() or self.exposure in globals(): # change to be sure that the unit information is available from the expousure dataset + if self.exposure.unit != da.units: + da = da * unit_conversion_factor else: da = self.data_catalog.get_rasterdataset(da_map_fn) diff --git a/tests/test_integrations_hazard.py b/tests/test_integrations_hazard.py index 800e3b94..5289d583 100644 --- a/tests/test_integrations_hazard.py +++ b/tests/test_integrations_hazard.py @@ -56,8 +56,6 @@ "setup_hazard": { "map_fn": [ r"P:\11207949-dhs-phaseii-floodadapt\Model-builder\Delft-FIAT\local_test_database\test_RP_floodmaps\Swell_Majuro_case_SW_slr_100_RP1_Final.tif", - # r"P:\11207949-dhs-phaseii-floodadapt\Model-builder\Delft-FIAT\local_test_database\test_RP_floodmaps\Swell_Majuro_case_SW_slr_100_RP10_Final.tif", - # r"P:\11207949-dhs-phaseii-floodadapt\Model-builder\Delft-FIAT\local_test_database\test_RP_floodmaps\Swell_Majuro_case_SW_slr_100_RP50_Final.tif", ], "map_type": "water_depth", "rp": None, @@ -69,14 +67,33 @@ } }, - "event_kathryn": { + "risk_map_geotiffs": { "data_catalogue": EXAMPLEDIR / "fiat_catalog.yml", - "dir": "test_event_kathryn", + "dir": "test_risk_map_geotiffs", "configuration": { "setup_hazard": { "map_fn": [ - # r"P:\11207949-dhs-phaseii-floodadapt\Model-builder\Delft-FIAT\local_test_database\test_RP_floodmaps\kingTide_SLR_max_flood_depth.tif", - r"C:\Users\fuentesm\CISNE\data_base\Current_prob_event_set_combined_doNothing_withSeaWall_RP=1_max_flood_depth.tif", + r"P:\11207949-dhs-phaseii-floodadapt\Model-builder\Delft-FIAT\local_test_database\test_RP_floodmaps\Swell_Majuro_case_SW_slr_100_RP1_Final.tif", + r"P:\11207949-dhs-phaseii-floodadapt\Model-builder\Delft-FIAT\local_test_database\test_RP_floodmaps\Swell_Majuro_case_SW_slr_100_RP10_Final.tif", + r"P:\11207949-dhs-phaseii-floodadapt\Model-builder\Delft-FIAT\local_test_database\test_RP_floodmaps\Swell_Majuro_case_SW_slr_100_RP50_Final.tif", + ], + "map_type": "water_depth", + "rp": None, + "crs": None, + "nodata": None, + "var": None, + "risk_output": True, + } + } + }, + + "event_map_geotiff_kath": { + "data_catalogue": EXAMPLEDIR / "fiat_catalog.yml", + "dir": "test_event_map_geotiff_kath", + "configuration": { + "setup_hazard": { + "map_fn": [ + r"P:\11207949-dhs-phaseii-floodadapt\Model-builder\Delft-FIAT\local_test_database\test_RP_floodmaps\kingTide_SLR_max_flood_depth.tif", ], "map_type": "water_depth", "rp": None, @@ -88,9 +105,9 @@ } }, - "risk_kathryn": { + "risk_map_geotiff_kath": { "data_catalogue": EXAMPLEDIR / "fiat_catalog.yml", - "dir": "test_risk_kathryn", + "dir": "test_risk_map_geotiff_kath", "configuration": { "setup_hazard": { "map_fn": [ @@ -108,9 +125,9 @@ } }, - "event_map_sfincs": { + "event_map_sfincs_willem": { "data_catalogue": EXAMPLEDIR / "fiat_catalog.yml", - "dir": "test_event_map_sfincs", + "dir": "test_event_map_sfincs_willem", "configuration": { "setup_hazard": { "map_fn": [ @@ -125,14 +142,14 @@ } } }, - + "event_map_sfincs_phanos": { "data_catalogue": EXAMPLEDIR / "fiat_catalog.yml", "dir": "test_event_map_sfincs_phanos", "configuration": { "setup_hazard": { "map_fn": [ - r"C:/Users/fuentesm/CISNE/Deltares/FloodAdapt/tests/test_database/charleston/output/simulations/current_extreme12ft_no_measures/overland/sfincs_map.nc" + r"P:\11207949-dhs-phaseii-floodadapt\Model-builder\Delft-FIAT\local_test_database\test_RP_floodmaps\charleston\output\simulations\current_extreme12ft_no_measures\overland\sfincs_map.nc" ], "map_type": "water_depth", "rp": None, From 39c1cdeb409b38cc9ab17c6042bda4151574e2ef Mon Sep 17 00:00:00 2001 From: Frederique Date: Wed, 4 Oct 2023 17:16:23 +0200 Subject: [PATCH 05/10] removed the unused imports and changed the names of the test --- tests/test_integrations_hazard.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/tests/test_integrations_hazard.py b/tests/test_integrations_hazard.py index 5289d583..35f5ffe3 100644 --- a/tests/test_integrations_hazard.py +++ b/tests/test_integrations_hazard.py @@ -1,10 +1,8 @@ from hydromt_fiat.fiat import FiatModel -from hydromt.config import configread from hydromt.log import setuplog from pathlib import Path import pytest import shutil -import os EXAMPLEDIR = Path( "P:/11207949-dhs-phaseii-floodadapt/Model-builder/Delft-FIAT/local_test_database" @@ -87,9 +85,9 @@ } }, - "event_map_geotiff_kath": { + "event_map_geotiff2": { "data_catalogue": EXAMPLEDIR / "fiat_catalog.yml", - "dir": "test_event_map_geotiff_kath", + "dir": "test_event_map_geotiff2", "configuration": { "setup_hazard": { "map_fn": [ @@ -105,9 +103,9 @@ } }, - "risk_map_geotiff_kath": { + "risk_map_geotiff2": { "data_catalogue": EXAMPLEDIR / "fiat_catalog.yml", - "dir": "test_risk_map_geotiff_kath", + "dir": "test_risk_map_geotiff2", "configuration": { "setup_hazard": { "map_fn": [ @@ -125,9 +123,9 @@ } }, - "event_map_sfincs_willem": { + "event_map_sfincs": { "data_catalogue": EXAMPLEDIR / "fiat_catalog.yml", - "dir": "test_event_map_sfincs_willem", + "dir": "test_event_map_sfincs", "configuration": { "setup_hazard": { "map_fn": [ @@ -143,9 +141,9 @@ } }, - "event_map_sfincs_phanos": { + "event_map_sfincs2": { "data_catalogue": EXAMPLEDIR / "fiat_catalog.yml", - "dir": "test_event_map_sfincs_phanos", + "dir": "test_event_map_sfincs2", "configuration": { "setup_hazard": { "map_fn": [ From 0de2aa09d662ade985834a4dc45094f1c37a2515 Mon Sep 17 00:00:00 2001 From: Mares2022 Date: Mon, 16 Oct 2023 09:23:11 +0200 Subject: [PATCH 06/10] Modifications made after Frederique's review Modifications made after Frederique's review. Hydromt sfincs functions are out of hydromt fiat now. Every dataset required for hydromt fiat can be done in advace with hydromt sfincs. --- hydromt_fiat/fiat.py | 25 ++++++------------------- hydromt_fiat/workflows/hazard.py | 1 - 2 files changed, 6 insertions(+), 20 deletions(-) diff --git a/hydromt_fiat/fiat.py b/hydromt_fiat/fiat.py index af97b7ee..ad20fa76 100644 --- a/hydromt_fiat/fiat.py +++ b/hydromt_fiat/fiat.py @@ -375,25 +375,12 @@ def setup_hazard( # read maps and retrieve their attributes da_map_fn, da_name, da_type = read_maps(params, da_map_fn, idx) - if da_map_fn.stem == "sfincs_map": - sfincs_root = os.path.dirname(da_map_fn) - sfincs_model = SfincsModel( - sfincs_root, mode="r", logger=self.logger - ) - sfincs_model.read_results() - da = sfincs_model.results["zsmax"] - da = da.isel(timemax=0).drop("timemax") - # save sfincs map as GeoTIFF - # result_list = list(sfincs_model.results.keys()) - # sfincs_model.write_raster("results.zsmax", compress="LZW") - - # Convert to units of the exposure data if required - if self.exposure in locals() or self.exposure in globals(): # change to be sure that the unit information is available from the expousure dataset - if self.exposure.unit != da.units: - da = da * unit_conversion_factor + da = self.data_catalog.get_rasterdataset(da_map_fn) - else: - da = self.data_catalog.get_rasterdataset(da_map_fn) + # Convert to units of the exposure data if required + if self.exposure in locals() or self.exposure in globals(): # change to be sure that the unit information is available from the expousure dataset + if self.exposure.unit != da.units: + da = da * unit_conversion_factor da.encoding["_FillValue"] = None da = da.raster.gdal_compliant() @@ -434,7 +421,7 @@ def setup_hazard( self.set_grid(da) self.grid.attrs = { - "returnperiod": sorted_rp, + "rp": sorted_rp, "type": params["map_type_lst"], #TODO: This parameter has to be changed in case that a list with different hazard types per map is provided "name": sorted_names, "analysis": "risk", diff --git a/hydromt_fiat/workflows/hazard.py b/hydromt_fiat/workflows/hazard.py index b69f8a57..0dc46cf6 100644 --- a/hydromt_fiat/workflows/hazard.py +++ b/hydromt_fiat/workflows/hazard.py @@ -61,7 +61,6 @@ def create_lists( dict Dictionary with the parameters and list of parameters used in setup_hazard. """ - #TODO: remove this function params = dict() params["map_fn"] = map_fn From 0d461cc64718ec273437430b09290ab39fa57ae8 Mon Sep 17 00:00:00 2001 From: Mares2022 Date: Mon, 16 Oct 2023 09:42:47 +0200 Subject: [PATCH 07/10] Making hazard functions explicits after Dirk review Makin hazard function explicit after Dirk review --- hydromt_fiat/fiat.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/hydromt_fiat/fiat.py b/hydromt_fiat/fiat.py index ad20fa76..f8021a2f 100644 --- a/hydromt_fiat/fiat.py +++ b/hydromt_fiat/fiat.py @@ -18,7 +18,13 @@ from . import DATADIR from .config import Config from .workflows.exposure_vector import ExposureVector -from .workflows.hazard import * +from .workflows.hazard import create_lists +from .workflows.hazard import check_lists_size +from .workflows.hazard import read_maps +from .workflows.hazard import check_maps_metadata +from .workflows.hazard import check_maps_rp +from .workflows.hazard import check_map_uniqueness +from .workflows.hazard import create_risk_dataset from .workflows.social_vulnerability_index import SocialVulnerabilityIndex from .workflows.vulnerability import Vulnerability From 3192d50f59276bf4371cb7e6814294baa3a743d4 Mon Sep 17 00:00:00 2001 From: Mares2022 Date: Mon, 16 Oct 2023 10:08:03 +0200 Subject: [PATCH 08/10] Change of .rio for .raster hyndromt core function Change of .rio for .raster hyndromt core function --- hydromt_fiat/fiat.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hydromt_fiat/fiat.py b/hydromt_fiat/fiat.py index f8021a2f..25372898 100644 --- a/hydromt_fiat/fiat.py +++ b/hydromt_fiat/fiat.py @@ -456,7 +456,7 @@ def setup_hazard( self.set_config( "hazard.crs", [ - "EPSG:" + str((self.maps[hazard_map].rio.crs.to_epsg())) + "EPSG:" + str((self.maps[hazard_map].raster.crs.to_epsg())) for hazard_map in self.maps.keys() ][0] if not risk_output else [ From 26a702a0361f4d8cb2886c770daf9c7233208bcf Mon Sep 17 00:00:00 2001 From: Mares2022 Date: Mon, 16 Oct 2023 10:14:00 +0200 Subject: [PATCH 09/10] Adding parameter geom when reading raster data Adding parameter geom when reading raster data. It requires that self.region is always available. --- hydromt_fiat/fiat.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hydromt_fiat/fiat.py b/hydromt_fiat/fiat.py index 25372898..66b08042 100644 --- a/hydromt_fiat/fiat.py +++ b/hydromt_fiat/fiat.py @@ -381,7 +381,7 @@ def setup_hazard( # read maps and retrieve their attributes da_map_fn, da_name, da_type = read_maps(params, da_map_fn, idx) - da = self.data_catalog.get_rasterdataset(da_map_fn) + da = self.data_catalog.get_rasterdataset(da_map_fn, geom=self.region) # Convert to units of the exposure data if required if self.exposure in locals() or self.exposure in globals(): # change to be sure that the unit information is available from the expousure dataset From 00cecae1a0ad0fedd43c9e36df6ebdbba6062bf7 Mon Sep 17 00:00:00 2001 From: Mares2022 Date: Mon, 16 Oct 2023 10:18:00 +0200 Subject: [PATCH 10/10] Removing test using sfincs_map.nc Removing test using sfincs_map.nc. They are not processed anymore through hydromt fiat. --- tests/test_integrations_hazard.py | 36 ------------------------------- 1 file changed, 36 deletions(-) diff --git a/tests/test_integrations_hazard.py b/tests/test_integrations_hazard.py index 35f5ffe3..5992b9e6 100644 --- a/tests/test_integrations_hazard.py +++ b/tests/test_integrations_hazard.py @@ -122,42 +122,6 @@ } } }, - - "event_map_sfincs": { - "data_catalogue": EXAMPLEDIR / "fiat_catalog.yml", - "dir": "test_event_map_sfincs", - "configuration": { - "setup_hazard": { - "map_fn": [ - r"P:\11207949-dhs-phaseii-floodadapt\Model-builder\Delft-FIAT\local_test_database\test_RP_floodmaps\overland\sfincs_map.nc" - ], - "map_type": "water_depth", - "rp": None, - "crs": None, - "nodata": -99999, - "var": "zsmax", - "risk_output": False, - } - } - }, - - "event_map_sfincs2": { - "data_catalogue": EXAMPLEDIR / "fiat_catalog.yml", - "dir": "test_event_map_sfincs2", - "configuration": { - "setup_hazard": { - "map_fn": [ - r"P:\11207949-dhs-phaseii-floodadapt\Model-builder\Delft-FIAT\local_test_database\test_RP_floodmaps\charleston\output\simulations\current_extreme12ft_no_measures\overland\sfincs_map.nc" - ], - "map_type": "water_depth", - "rp": None, - "crs": None, - "nodata": -99999, - "var": "zsmax", - "risk_output": False, - } - } - }, }