diff --git a/unfold/data_cleaning.py b/unfold/data_cleaning.py index 15d0d66..e7b122f 100644 --- a/unfold/data_cleaning.py +++ b/unfold/data_cleaning.py @@ -200,7 +200,7 @@ def check_for_duplicates(db: List[dict], data: List[dict]) -> List[dict]: (x["name"].lower(), x["reference product"].lower(), x["location"]) for x in data if (x["name"].lower(), x["reference product"].lower(), x["location"]) - in db_names + in db_names ] if len(already_exist) > 0: @@ -328,9 +328,9 @@ def correct_fields_format(data: list, name: str) -> list: dataset["parameters"] = [dataset["parameters"]] if ( - dataset["parameters"] is None - or dataset["parameters"] == {} - or dataset["parameters"] == [] + dataset["parameters"] is None + or dataset["parameters"] == {} + or dataset["parameters"] == [] ): del dataset["parameters"] @@ -377,8 +377,8 @@ def check_mandatory_fields(data: list) -> list: for field in dataset_fields: if field not in dataset: if ( - field in ["reference product", "location", "unit", "name"] - and "exchanges" in dataset + field in ["reference product", "location", "unit", "name"] + and "exchanges" in dataset ): for exc in dataset["exchanges"]: if exc["type"] == "production": diff --git a/unfold/fold.py b/unfold/fold.py index 7403dfd..da854ef 100644 --- a/unfold/fold.py +++ b/unfold/fold.py @@ -21,10 +21,10 @@ from . import __version__ from .data_cleaning import ( DATA_DIR, + check_commonality_between_databases, check_mandatory_fields, get_biosphere_code, get_outdated_flows, - check_commonality_between_databases, ) DIR_DATAPACKAGE_TEMP = DATA_DIR / "temp" diff --git a/unfold/unfold.py b/unfold/unfold.py index ed384aa..0f5561d 100644 --- a/unfold/unfold.py +++ b/unfold/unfold.py @@ -627,7 +627,6 @@ def build_superstructure_database(self, matrix: np.ndarray) -> list: def build_single_databases( self, matrix, databases_to_build: List[dict] ) -> list[list[dict]]: - """ Generate a list of single databases for each scenario specified in `databases_to_build`.