Skip to content

Commit

Permalink
changes 'datasets' to 'dataset'
Browse files Browse the repository at this point in the history
  • Loading branch information
kim committed Nov 8, 2023
1 parent 076ecdc commit 0eda5ad
Show file tree
Hide file tree
Showing 9 changed files with 26 additions and 26 deletions.
2 changes: 1 addition & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
------
## [v6.6.4](https://github.com/asfadmin/Discovery-asf_search/compare/v6.6.3...v6.6.4)
### Added
- Adds new `datasets` keyword to `search()` as an alternative to `platform`. Allows users to get results from multiple platforms at once in a single page.
- Adds new `dataset` keyword to `search()` as an alternative to `platform`. Allows users to get results from multiple platforms at once in a single page.

### Changed
- Changes `CMR_FORMAT_EXT` constant from `umm_json_v1_4` to `umm_json`, umm returned from CMR will now be in latest umm format by default
Expand Down
2 changes: 1 addition & 1 deletion asf_search/ASFSearchOptions/validator_map.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def validate(key, value):
'instrument': parse_string,
'collections': parse_string_list,
'temporalBaselineDays': parse_string_list,
'datasets': parse_string_list,
'dataset': parse_string_list,
'absoluteBurstID': parse_int_list,
'relativeBurstID': parse_int_list,
'fullBurstID': parse_string_list,
Expand Down
2 changes: 1 addition & 1 deletion asf_search/CMR/subquery.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def build_subqueries(opts: ASFSearchOptions) -> List[ASFSearchOptions]:
if params.get('product_list') is not None:
params['product_list'] = chunk_list(params['product_list'], CMR_PAGE_SIZE)

list_param_names = ['platform', 'season', 'collections', 'datasets'] # these parameters will dodge the subquery system
list_param_names = ['platform', 'season', 'collections', 'dataset'] # these parameters will dodge the subquery system
skip_param_names = ['maxResults']# these params exist in opts, but shouldn't be passed on to subqueries at ALL

params = dict([ (k, v) for k, v in params.items() if k not in skip_param_names ])
Expand Down
10 changes: 5 additions & 5 deletions asf_search/CMR/translate.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,18 +48,18 @@ def translate_opts(opts: ASFSearchOptions) -> list:
if any(key in dict_opts for key in ['start', 'end', 'season']):
dict_opts = fix_date(dict_opts)

if 'datasets' in dict_opts:
if 'dataset' in dict_opts:
if 'collections' not in dict_opts:
dict_opts['collections'] = []

for dataset in dict_opts['datasets']:
for dataset in dict_opts['dataset']:
if collections_by_short_name := dataset_collections.get(dataset):
for shortName, concept_ids in collections_by_short_name.items():
for concept_ids in collections_by_short_name.values():
dict_opts['collections'].extend(concept_ids)
else:
raise ValueError(f'Could not find dataset named "{dataset}" provided for datasets keyword.')
raise ValueError(f'Could not find dataset named "{dataset}" provided for dataset keyword.')

dict_opts.pop('datasets')
dict_opts.pop('dataset')

# convert the above parameters to a list of key/value tuples
cmr_opts = []
Expand Down
2 changes: 1 addition & 1 deletion asf_search/search/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def search(
fullBurstID: Union[str, Iterable[str]] = None,
collections: Union[str, Iterable[str]] = None,
temporalBaselineDays: Union[str, Iterable[str]] = None,
datasets: Union[str, Iterable[str]] = None,
dataset: Union[str, Iterable[str]] = None,
maxResults: int = None,
opts: ASFSearchOptions = None,
) -> ASFSearchResults:
Expand Down
2 changes: 1 addition & 1 deletion asf_search/search/search_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def search_generator(
fullBurstID: Union[str, Iterable[str]] = None,
collections: Union[str, Iterable[str]] = None,
temporalBaselineDays: Union[str, Iterable[str]] = None,
datasets: Union[str, Iterable[str]] = None,
dataset: Union[str, Iterable[str]] = None,
maxResults: int = None,
opts: ASFSearchOptions = None,
) -> Generator[ASFSearchResults, None, None]:
Expand Down
8 changes: 4 additions & 4 deletions tests/pytest-config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -199,10 +199,10 @@ test_types:
required_keys: results
method: test_output_format

- For running datasets keyword tests:
required_in_title: search-datasets
required_keys: datasets
method: test_search_datasets
- For running dataset keyword tests:
required_in_title: search-dataset
required_keys: dataset
method: test_search_dataset

- For running jupyter notebook example tests:
required_keys: notebook
Expand Down
8 changes: 4 additions & 4 deletions tests/pytest-managers.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from ASFSearchResults.test_ASFSearchResults import run_test_output_format, run_test_ASFSearchResults_intersection
from ASFSession.test_ASFSession import run_auth_with_cookiejar, run_auth_with_creds, run_auth_with_token, run_test_asf_session_rebuild_auth
from BaselineSearch.test_baseline_search import *
from Search.test_search import run_test_ASFSearchResults, run_test_datasets_search, run_test_search, run_test_search_http_error
from Search.test_search import run_test_ASFSearchResults, run_test_dataset_search, run_test_search, run_test_search_http_error
from Search.test_search_generator import run_test_search_generator, run_test_search_generator_multi
from CMR.test_MissionList import run_test_get_project_names

Expand Down Expand Up @@ -417,9 +417,9 @@ def test_ASFSearchResults_intersection(**kwargs) -> None:
wkt = get_resource(kwargs['test_info']['wkt'])
run_test_ASFSearchResults_intersection(wkt)

def test_search_datasets(**kwargs) -> None:
datasets = get_resource(kwargs['test_info']['datasets'])
run_test_datasets_search(datasets)
def test_search_dataset(**kwargs) -> None:
dataset = get_resource(kwargs['test_info']['dataset'])
run_test_dataset_search(dataset)

def test_serialization(**args) -> None:
test_info = args['test_info']
Expand Down
16 changes: 8 additions & 8 deletions tests/yml_tests/test_search.yml
Original file line number Diff line number Diff line change
Expand Up @@ -43,14 +43,14 @@ tests:
status_code: 500
report: "Server Error: This is a Test Error"

- test-search-datasets S1 Datasets:
datasets: ['SENTINEL-1', 'SLC-BURST', 'OPERA-S1']
- test-search-dataset S1 Datasets:
dataset: ['SENTINEL-1', 'SLC-BURST', 'OPERA-S1']

- test-search-datasets S1 Datasets and non-S1:
datasets: ['SENTINEL-1', 'SLC-BURST', 'OPERA-S1', 'UAVSAR']
- test-search-dataset S1 Datasets and non-S1:
dataset: ['SENTINEL-1', 'SLC-BURST', 'OPERA-S1', 'UAVSAR']

- test-search-datasets fake dataset:
datasets: 'FAKE-DATASET-V1'
- test-search-dataset fake dataset:
dataset: 'FAKE-DATASET-V1'

- test-search-datasets S1 Datasets and fake dataset:
datasets: ['SENTINEL-1', 'SLC-BURST', 'OPERA-S1', 'FAKE-DATASET-V2']
- test-search-dataset S1 Datasets and fake dataset:
dataset: ['SENTINEL-1', 'SLC-BURST', 'OPERA-S1', 'FAKE-DATASET-V2']

0 comments on commit 0eda5ad

Please sign in to comment.