diff --git a/mdps_ds_lib/ds_client/ds_client_user.py b/mdps_ds_lib/ds_client/ds_client_user.py index 16f4f14..3a98e10 100644 --- a/mdps_ds_lib/ds_client/ds_client_user.py +++ b/mdps_ds_lib/ds_client/ds_client_user.py @@ -93,6 +93,17 @@ def query_collections(self, limit=10): return response + def query_catalog(self): + request_url = f'{self._uds_url}catalog/' + s = requests.session() + s.trust_env = self._trust_env + response = s.get(url=request_url, headers={ + 'Authorization': f'Bearer {self._token_retriever.get_token()}', + }, verify=self._trust_env) + response.raise_for_status() + response = json.loads(response.text) + return response + def query_collections_next(self): if self.__collection_query_next_page is None: return None diff --git a/mdps_ds_lib/lib/cumulus_stac/granules_catalog.py b/mdps_ds_lib/lib/cumulus_stac/granules_catalog.py index 7ddcbc5..3e90fb9 100644 --- a/mdps_ds_lib/lib/cumulus_stac/granules_catalog.py +++ b/mdps_ds_lib/lib/cumulus_stac/granules_catalog.py @@ -9,6 +9,19 @@ class GranulesCatalog: + + @staticmethod + def standardize_stage_out_collection_id_format(current_collection_id: str): + collection_identifier_parts = current_collection_id.split(':') + if len(collection_identifier_parts) < 6: + raise ValueError( + f'invalid collection ID. Need to be in ::::: but it is {current_collection_id}') + for i in range(5): + collection_identifier_parts[i] = collection_identifier_parts[i].upper() + current_collection_id = ':'.join(collection_identifier_parts) + current_collection_id = f'{current_collection_id}___001' if '___' not in current_collection_id else current_collection_id + return current_collection_id + @staticmethod def get_unity_formatted_collection_id(current_collection_id: str, project_venue_set: tuple): if current_collection_id == '' or current_collection_id is None: @@ -16,13 +29,11 @@ def get_unity_formatted_collection_id(current_collection_id: str, project_venue_ collection_identifier_parts = current_collection_id.split(':') if len(collection_identifier_parts) >= 6: LOGGER.debug(f'current_collection_id is assumed to be in UNITY format: {current_collection_id}') - current_collection_id = f'{current_collection_id}___001' if '___' not in current_collection_id else current_collection_id - return current_collection_id - + return GranulesCatalog.standardize_stage_out_collection_id_format(current_collection_id) LOGGER.info(f'current_collection_id is not UNITY formatted ID: {current_collection_id}') if project_venue_set[0] is None or project_venue_set[1] is None: raise ValueError(f'missing project or venue in ENV which is needed due to current_collection_id not UNITY format: {project_venue_set}') - new_collection = f'URN:NASA:UNITY:{project_venue_set[0]}:{project_venue_set[1]}:{current_collection_id}' + new_collection = f'URN:NASA:UNITY:{project_venue_set[0].upper()}:{project_venue_set[1].upper()}:{current_collection_id}' new_collection = f'{new_collection}___001' if '___' not in new_collection else new_collection LOGGER.info(f'UNITY formatted ID: {new_collection}') return new_collection diff --git a/mdps_ds_lib/stage_in_out/upload_arbitrary_files_as_granules.py b/mdps_ds_lib/stage_in_out/upload_arbitrary_files_as_granules.py index 5bb3b2e..65cc627 100644 --- a/mdps_ds_lib/stage_in_out/upload_arbitrary_files_as_granules.py +++ b/mdps_ds_lib/stage_in_out/upload_arbitrary_files_as_granules.py @@ -51,7 +51,7 @@ def generate_sample_stac(self, filepath: str): "type": "Point", "coordinates": [0.0, 0.0] }, - bbox=[0.0, 0.0, 0.0, 0.0], + bbox=[-180, -90, 180, 90], datetime=TimeUtils().parse_from_unix(0, True).get_datetime_obj(), properties={ "start_datetime": TimeUtils.get_current_time(), @@ -84,10 +84,11 @@ def execute_job(self, job_obj, lock) -> bool: s3_url = self.__s3.upload(job_obj, self.__staging_bucket, f'{self.__collection_id}/{sample_stac_item.id}', self.__delete_files) updating_assets[os.path.basename(s3_url)] = s3_url uploading_current_granule_stac = f'{s3_url}.stac.json' - self.__s3.set_s3_url(uploading_current_granule_stac) - self.__s3.upload_bytes(json.dumps(sample_stac_item.to_dict(False, False),indent=4).encode()) updating_assets[os.path.basename(uploading_current_granule_stac)] = uploading_current_granule_stac self.__gc.update_assets_href(sample_stac_item, updating_assets) + + self.__s3.set_s3_url(uploading_current_granule_stac) + self.__s3.upload_bytes(json.dumps(sample_stac_item.to_dict(False, False),indent=4).encode()) self.__result_list.put(sample_stac_item.to_dict(False, False)) except Exception as e: sample_stac_item.properties['upload_error'] = str(e) @@ -116,6 +117,7 @@ def upload(self, **kwargs) -> str: self._set_props_from_env() if self._collection_id is None: raise ValueError(f'missing COLLECTION ID in ENV') + self._collection_id = GranulesCatalog.standardize_stage_out_collection_id_format(self._collection_id) output_dir = os.environ.get(self.OUTPUT_DIRECTORY) if not FileUtils.dir_exist(output_dir): raise ValueError(f'OUTPUT_DIRECTORY: {output_dir} does not exist') diff --git a/tests/integration_tests/test_docker_stage_out.py b/tests/integration_tests/test_docker_stage_out.py index 2851646..05515d0 100644 --- a/tests/integration_tests/test_docker_stage_out.py +++ b/tests/integration_tests/test_docker_stage_out.py @@ -28,9 +28,9 @@ class TestDockerStageOut(TestCase): def setUp(self) -> None: super().setUp() - self.tenant = 'UDS_MY_LOCAL_ARCHIVE_TEST' # 'uds_local_test' # 'uds_sandbox' - self.tenant_venue = 'DEV' # 'DEV1' # 'dev' - self.collection_name = 'UDS_UNIT_COLLECTION' # 'uds_collection' # 'sbx_collection' + self.tenant = 'UDS_MY_LOCAL_ARCHIVE_TEST'.lower() # 'uds_local_test' # 'uds_sandbox' + self.tenant_venue = 'DEV'.lower() # 'DEV1' # 'dev' + self.collection_name = 'UDS_UNIT_COLLECTION'.lower() # 'uds_collection' # 'sbx_collection' self.collection_version = '24.10.21.12.00'.replace('.', '') # '2402011200' def not_in_used_test_03_upload(self): @@ -293,7 +293,7 @@ def not_in_used_test_03_upload_catalog(self): "type": "Point", "coordinates": [0.0, 0.0] }, - bbox=[0.0, 0.0, 0.0, 0.0], + bbox=[-180, -90, 180, 90], datetime=TimeUtils().parse_from_unix(0, True).get_datetime_obj(), properties={ "start_datetime": "2016-01-31T18:00:00.009057Z", @@ -469,7 +469,7 @@ def test_03_upload_complete_catalog(self): "type": "Point", "coordinates": [0.0, 0.0] }, - bbox=[0.0, 0.0, 0.0, 0.0], + bbox=[-180, -90, 180, 90], datetime=TimeUtils().parse_from_unix(0, True).get_datetime_obj(), properties={ "start_datetime": "2016-01-31T18:00:00.009057Z", @@ -521,10 +521,10 @@ def test_03_upload_complete_catalog(self): result_key_prefix = result_key.split('.')[0] self.assertTrue(f'{result_key_prefix}.nc.cas' in upload_result['assets'], f'missing assets#metadata asset: {result_key_prefix}.nc.cas') self.assertTrue('href' in upload_result['assets'][f'{result_key_prefix}.nc.cas'], 'missing assets#metadata__cas#href') - self.assertTrue(upload_result['assets'][f'{result_key_prefix}.nc.cas']['href'].startswith(f's3://{os.environ["STAGING_BUCKET"]}/URN:NASA:UNITY:{os.environ["PROJECT"]}:{os.environ["VENUE"]}:NA_0')) + self.assertTrue(upload_result['assets'][f'{result_key_prefix}.nc.cas']['href'].startswith(f's3://{os.environ["STAGING_BUCKET"]}/URN:NASA:UNITY:{os.environ["PROJECT"].upper()}:{os.environ["VENUE"].upper()}:NA_0')) self.assertTrue(f'{result_key_prefix}.nc' in upload_result['assets'], f'missing assets#data: {result_key_prefix}.nc') self.assertTrue('href' in upload_result['assets'][f'{result_key_prefix}.nc'], 'missing assets#data#href') - self.assertTrue(upload_result['assets'][f'{result_key_prefix}.nc']['href'].startswith(f's3://{os.environ["STAGING_BUCKET"]}/URN:NASA:UNITY:{os.environ["PROJECT"]}:{os.environ["VENUE"]}:NA_0')) + self.assertTrue(upload_result['assets'][f'{result_key_prefix}.nc']['href'].startswith(f's3://{os.environ["STAGING_BUCKET"]}/URN:NASA:UNITY:{os.environ["PROJECT"].upper()}:{os.environ["VENUE"].upper()}:NA_0')) """ Example output: { @@ -544,7 +544,7 @@ def test_03_upload_complete_catalog(self): 'title': 'metadata cas'}, 'metadata__stac': { 'href': 's3://uds-test-cumulus-staging/NEW_COLLECTION_EXAMPLE_L1B___9/NEW_COLLECTION_EXAMPLE_L1B___9:test_file01/test_file01.nc.stac.json', 'title': 'metadata stac'}}, - 'bbox': [0.0, 0.0, 0.0, 0.0], + 'bbox': [-180, -90, 180, 90], 'stac_extensions': [], 'collection': 'NEW_COLLECTION_EXAMPLE_L1B___9'}]} """ @@ -698,7 +698,7 @@ def test_03_upload_complete_catalog_role_as_key(self): "type": "Point", "coordinates": [0.0, 0.0] }, - bbox=[0.0, 0.0, 0.0, 0.0], + bbox=[-180, -90, 180, 90], datetime=TimeUtils().parse_from_unix(0, True).get_datetime_obj(), properties={ "start_datetime": "2016-01-31T18:00:00.009057Z", @@ -750,10 +750,10 @@ def test_03_upload_complete_catalog_role_as_key(self): self.assertEqual(result_key, 'data', f'worng asset key: {result_key}') self.assertTrue(f'metadata1' in upload_result['assets'], f'missing assets#metadata asset: metadata1') self.assertTrue('href' in upload_result['assets'][f'metadata1'], 'missing assets#metadata__cas#href') - self.assertTrue(upload_result['assets'][f'metadata1']['href'].startswith(f's3://{os.environ["STAGING_BUCKET"]}/URN:NASA:UNITY:{os.environ["PROJECT"]}:{os.environ["VENUE"]}:NA/')) + self.assertTrue(upload_result['assets'][f'metadata1']['href'].startswith(f's3://{os.environ["STAGING_BUCKET"]}/URN:NASA:UNITY:{os.environ["PROJECT"].upper()}:{os.environ["VENUE"].upper()}:NA___001/')) self.assertTrue(f'data' in upload_result['assets'], f'missing assets#data: data') self.assertTrue('href' in upload_result['assets'][f'data'], 'missing assets#data#href') - self.assertTrue(upload_result['assets'][f'data']['href'].startswith(f's3://{os.environ["STAGING_BUCKET"]}/URN:NASA:UNITY:{os.environ["PROJECT"]}:{os.environ["VENUE"]}:NA/')) + self.assertTrue(upload_result['assets'][f'data']['href'].startswith(f's3://{os.environ["STAGING_BUCKET"]}/URN:NASA:UNITY:{os.environ["PROJECT"].upper()}:{os.environ["VENUE"].upper()}:NA___001/')) """ Example output: { @@ -773,7 +773,7 @@ def test_03_upload_complete_catalog_role_as_key(self): 'title': 'metadata cas'}, 'metadata__stac': { 'href': 's3://uds-test-cumulus-staging/NEW_COLLECTION_EXAMPLE_L1B___9/NEW_COLLECTION_EXAMPLE_L1B___9:test_file01/test_file01.nc.stac.json', 'title': 'metadata stac'}}, - 'bbox': [0.0, 0.0, 0.0, 0.0], + 'bbox': [-180, -90, 180, 90], 'stac_extensions': [], 'collection': 'NEW_COLLECTION_EXAMPLE_L1B___9'}]} """ @@ -927,7 +927,7 @@ def test_13_upload_complete_catalog_role_as_key_dry_run_1(self): "type": "Point", "coordinates": [0.0, 0.0] }, - bbox=[0.0, 0.0, 0.0, 0.0], + bbox=[-180, -90, 180, 90], datetime=TimeUtils().parse_from_unix(0, True).get_datetime_obj(), properties={ "start_datetime": "2016-01-31T18:00:00.009057Z", @@ -994,7 +994,7 @@ def test_13_upload_complete_catalog_role_as_key_dry_run_2(self): "type": "Point", "coordinates": [0.0, 0.0] }, - bbox=[0.0, 0.0, 0.0, 0.0], + bbox=[-180, -90, 180, 90], datetime=TimeUtils().parse_from_unix(0, True).get_datetime_obj(), properties={ "start_datetime": "2016-01-31T18:00:00.009057Z", @@ -1061,7 +1061,7 @@ def test_13_upload_complete_catalog_role_as_key_dry_run_3(self): "type": "Point", "coordinates": [0.0, 0.0] }, - bbox=[0.0, 0.0, 0.0, 0.0], + bbox=[-180, -90, 180, 90], datetime=TimeUtils().parse_from_unix(0, True).get_datetime_obj(), properties={ "start_datetime": "2016-01-31T18:00:00.009057Z", @@ -1127,7 +1127,7 @@ def test_13_upload_complete_catalog_role_as_key_dry_run_4(self): "type": "Point", "coordinates": [0.0, 0.0] }, - bbox=[0.0, 0.0, 0.0, 0.0], + bbox=[-180, -90, 180, 90], datetime=TimeUtils().parse_from_unix(0, True).get_datetime_obj(), properties={ "start_datetime": "2016-01-31T18:00:00.009057Z", @@ -1291,7 +1291,7 @@ def test_03_02_upload_complete_catalog(self): "type": "Point", "coordinates": [0.0, 0.0] }, - bbox=[0.0, 0.0, 0.0, 0.0], + bbox=[-180, -90, 180, 90], datetime=TimeUtils().parse_from_unix(0, True).get_datetime_obj(), properties={ "start_datetime": "2016-01-31T18:00:00.009057Z", @@ -1343,10 +1343,10 @@ def test_03_02_upload_complete_catalog(self): result_key_prefix = result_key.split('.')[0] self.assertTrue(f'{result_key_prefix}.nc.cas' in upload_result['assets'], f'missing assets#metadata asset: {result_key_prefix}.nc.cas') self.assertTrue('href' in upload_result['assets'][f'{result_key_prefix}.nc.cas'], 'missing assets#metadata__cas#href') - self.assertTrue(upload_result['assets'][f'{result_key_prefix}.nc.cas']['href'].startswith(f's3://{os.environ["STAGING_BUCKET"]}/URN:NASA:UNITY:{os.environ["PROJECT"]}:{os.environ["VENUE"]}:NA')) + self.assertTrue(upload_result['assets'][f'{result_key_prefix}.nc.cas']['href'].startswith(f's3://{os.environ["STAGING_BUCKET"]}/URN:NASA:UNITY:{os.environ["PROJECT"].upper()}:{os.environ["VENUE"].upper()}:NA')) self.assertTrue(f'{result_key_prefix}.nc' in upload_result['assets'], f'missing assets#data: {result_key_prefix}.nc') self.assertTrue('href' in upload_result['assets'][f'{result_key_prefix}.nc'], 'missing assets#data#href') - self.assertTrue(upload_result['assets'][f'{result_key_prefix}.nc']['href'].startswith(f's3://{os.environ["STAGING_BUCKET"]}/URN:NASA:UNITY:{os.environ["PROJECT"]}:{os.environ["VENUE"]}:NA')) + self.assertTrue(upload_result['assets'][f'{result_key_prefix}.nc']['href'].startswith(f's3://{os.environ["STAGING_BUCKET"]}/URN:NASA:UNITY:{os.environ["PROJECT"].upper()}:{os.environ["VENUE"].upper()}:NA')) """ Example output: { @@ -1366,7 +1366,7 @@ def test_03_02_upload_complete_catalog(self): 'title': 'metadata cas'}, 'metadata__stac': { 'href': 's3://uds-test-cumulus-staging/NEW_COLLECTION_EXAMPLE_L1B___9/NEW_COLLECTION_EXAMPLE_L1B___9:test_file01/test_file01.nc.stac.json', 'title': 'metadata stac'}}, - 'bbox': [0.0, 0.0, 0.0, 0.0], + 'bbox': [-180, -90, 180, 90], 'stac_extensions': [], 'collection': 'NEW_COLLECTION_EXAMPLE_L1B___9'}]} """ @@ -1451,7 +1451,7 @@ def test_03_02_upload_complete_catalog_missing_real_files(self): return def test_03_03_upload_auxiliary_files(self): - temp_collection_id = f'URN:NASA:UNITY:{self.tenant}:{self.tenant_venue}:{self.collection_name}___{self.collection_version}' + temp_collection_id = f'urn:nasa:unity:{self.tenant}:{self.tenant_venue}:{self.collection_name}___{self.collection_version}' os.environ['GRANULES_UPLOAD_TYPE'] = 'UPLOAD_AUXILIARY_FILE_AS_GRANULE' os.environ['COLLECTION_ID'] = temp_collection_id os.environ['STAGING_BUCKET'] = 'uds-sbx-cumulus-staging' @@ -1518,10 +1518,12 @@ def test_03_03_upload_auxiliary_files(self): print(f'example feature: {upload_result}') self.assertTrue('assets' in upload_result, 'missing assets') result_key = [k for k in upload_result['assets'].keys()][0] - self.assertTrue(result_key.startswith('test_file'), f'worng asset key: {result_key}') + temp_collection_id_fixed = f'URN:NASA:UNITY:{self.tenant.upper()}:{self.tenant_venue.upper()}:{self.collection_name}___{self.collection_version}' + self.assertEqual(upload_result['collection'], temp_collection_id_fixed, f'wrong bbox') + self.assertTrue(result_key.startswith('test_file'), f'wrong asset key: {result_key}') self.assertTrue(f'{result_key}.stac.json' in upload_result['assets'], f'missing assets#metadata asset: test_file_0.json') self.assertTrue('href' in upload_result['assets'][f'{result_key}.stac.json'], 'missing assets#metadata__cas#href') - self.assertTrue(upload_result['assets'][f'{result_key}.stac.json']['href'].startswith(f's3://{os.environ["STAGING_BUCKET"]}/{os.environ["COLLECTION_ID"]}/{os.environ["COLLECTION_ID"]}:test_file_'), f"wrong HREF (no S3?): upload_result['assets'][f'{result_key}.stac.json']['href']") + self.assertTrue(upload_result['assets'][f'{result_key}.stac.json']['href'].startswith(f's3://{os.environ["STAGING_BUCKET"]}/{temp_collection_id_fixed}/{temp_collection_id_fixed}:test_file_'), f"wrong HREF (no S3?): upload_result['assets'][f'{result_key}.stac.json']['href']") """ Example output: { @@ -1541,12 +1543,25 @@ def test_03_03_upload_auxiliary_files(self): 'title': 'metadata cas'}, 'metadata__stac': { 'href': 's3://uds-test-cumulus-staging/NEW_COLLECTION_EXAMPLE_L1B___9/NEW_COLLECTION_EXAMPLE_L1B___9:test_file01/test_file01.nc.stac.json', 'title': 'metadata stac'}}, - 'bbox': [0.0, 0.0, 0.0, 0.0], + 'bbox': [-180, -90, 180, 90], 'stac_extensions': [], 'collection': 'NEW_COLLECTION_EXAMPLE_L1B___9'}]} """ s3 = AwsS3() + s3.set_s3_url(upload_result['assets'][f'{result_key}.stac.json']['href']) + local_path = s3.download(tmp_dir_name, 's3_metadata_file.json') + upload_result = FileUtils.read_json(local_path) + self.assertEqual(upload_result['bbox'], [-180, -90, 180, 90], f'wrong bbox') + self.assertTrue('assets' in upload_result, 'missing assets') + result_key = [k for k in upload_result['assets'].keys()][0] + self.assertTrue(result_key.startswith('test_file'), f'wrong asset key: {result_key}') + self.assertTrue(f'{result_key}.stac.json' in upload_result['assets'], f'missing assets#metadata asset: test_file_0.json') + self.assertTrue('href' in upload_result['assets'][f'{result_key}.stac.json'], 'missing assets#metadata__cas#href') + self.assertTrue(upload_result['assets'][f'{result_key}.stac.json']['href'].startswith(f's3://{os.environ["STAGING_BUCKET"]}/{temp_collection_id_fixed}/{temp_collection_id_fixed}:test_file_'), f"wrong HREF (no S3?): upload_result['assets'][f'{result_key}.stac.json']['href']") + + s3_keys = [k for k in s3.get_child_s3_files(os.environ['STAGING_BUCKET'], f"stage_out/successful_features_{starting_time}")] + s3_keys = sorted(s3_keys) print(f's3_keys: {s3_keys}') self.assertTrue(len(s3_keys) > 0, f'empty files in S3') @@ -1559,7 +1574,9 @@ def test_03_03_upload_auxiliary_files(self): def test_03_upload_complete_catalog_invalid_bucket(self): os.environ['VERIFY_SSL'] = 'FALSE' - os.environ['COLLECTION_ID'] = 'NEW_COLLECTION_EXAMPLE_L1B___9' + os.environ['PROJECT'] = 'LOCAL' + os.environ['VENUE'] = 'UNIT_TEST' + # os.environ['COLLECTION_ID'] = 'NEW_COLLECTION_EXAMPLE_L1B___9' os.environ['STAGING_BUCKET'] = 'invalid_bucket' os.environ['GRANULES_SEARCH_DOMAIN'] = 'UNITY' @@ -1691,7 +1708,7 @@ def test_03_upload_complete_catalog_invalid_bucket(self): "type": "Point", "coordinates": [0.0, 0.0] }, - bbox=[0.0, 0.0, 0.0, 0.0], + bbox=[-180, -90, 180, 90], datetime=TimeUtils().parse_from_unix(0, True).get_datetime_obj(), properties={ "start_datetime": "2016-01-31T18:00:00.009057Z", @@ -1750,7 +1767,7 @@ def test_03_upload_complete_catalog_invalid_bucket(self): self.assertTrue('href' in upload_result['assets'][f'{result_key_prefix}.nc.cas'], 'missing assets#metadata__cas#href') self.assertTrue(f'{result_key_prefix}.nc' in upload_result['assets'], f'missing assets#data: {result_key_prefix}.nc') self.assertTrue('href' in upload_result['assets'][f'{result_key_prefix}.nc'], 'missing assets#data#href') - self.assertTrue(FileUtils.file_exist(os.environ['OUTPUT_FILE']), f'missing output file') + # self.assertTrue(FileUtils.file_exist(os.environ['OUTPUT_FILE']), f'missing output file') """ Example output: { @@ -1770,7 +1787,7 @@ def test_03_upload_complete_catalog_invalid_bucket(self): 'title': 'metadata cas'}, 'metadata__stac': { 'href': 's3://uds-test-cumulus-staging/NEW_COLLECTION_EXAMPLE_L1B___9/NEW_COLLECTION_EXAMPLE_L1B___9:test_file01/test_file01.nc.stac.json', 'title': 'metadata stac'}}, - 'bbox': [0.0, 0.0, 0.0, 0.0], + 'bbox': [-180, -90, 180, 90], 'stac_extensions': [], 'collection': 'NEW_COLLECTION_EXAMPLE_L1B___9'}]} """ @@ -1779,7 +1796,10 @@ def test_03_upload_complete_catalog_invalid_bucket(self): def test_03_upload_complete_catalog_missing_data(self): os.environ['VERIFY_SSL'] = 'FALSE' - os.environ['COLLECTION_ID'] = 'NEW_COLLECTION_EXAMPLE_L1B___9' + os.environ['PROJECT'] = 'LOCAL' + os.environ['VENUE'] = 'UNIT_TEST' + + # os.environ['COLLECTION_ID'] = 'NEW_COLLECTION_EXAMPLE_L1B___9' os.environ['STAGING_BUCKET'] = 'invalid_bucket' os.environ['GRANULES_SEARCH_DOMAIN'] = 'UNITY' @@ -1911,7 +1931,7 @@ def test_03_upload_complete_catalog_missing_data(self): "type": "Point", "coordinates": [0.0, 0.0] }, - bbox=[0.0, 0.0, 0.0, 0.0], + bbox=[-180, -90, 180, 90], datetime=TimeUtils().parse_from_unix(0, True).get_datetime_obj(), properties={ "start_datetime": "2016-01-31T18:00:00.009057Z", @@ -1970,7 +1990,7 @@ def test_03_upload_complete_catalog_missing_data(self): self.assertTrue('href' in upload_result['assets'][f'{result_key_prefix}.nc.cas'], 'missing assets#metadata__cas#href') self.assertTrue(f'{result_key_prefix}.nc' in upload_result['assets'], f'missing assets#data: {result_key_prefix}.nc') self.assertTrue('href' in upload_result['assets'][f'{result_key_prefix}.nc'], 'missing assets#data#href') - self.assertTrue(FileUtils.file_exist(os.environ['OUTPUT_FILE']), f'missing output file') + # self.assertTrue(FileUtils.file_exist(os.environ['OUTPUT_FILE']), f'missing output file') """ Example output: { @@ -1990,7 +2010,7 @@ def test_03_upload_complete_catalog_missing_data(self): 'title': 'metadata cas'}, 'metadata__stac': { 'href': 's3://uds-test-cumulus-staging/NEW_COLLECTION_EXAMPLE_L1B___9/NEW_COLLECTION_EXAMPLE_L1B___9:test_file01/test_file01.nc.stac.json', 'title': 'metadata stac'}}, - 'bbox': [0.0, 0.0, 0.0, 0.0], + 'bbox': [-180, -90, 180, 90], 'stac_extensions': [], 'collection': 'NEW_COLLECTION_EXAMPLE_L1B___9'}]} """ diff --git a/tests/mdps_ds_lib/ds_client/test_ds_client_unity.py b/tests/mdps_ds_lib/ds_client/test_ds_client_unity.py index 72ab337..35247c5 100644 --- a/tests/mdps_ds_lib/ds_client/test_ds_client_unity.py +++ b/tests/mdps_ds_lib/ds_client/test_ds_client_unity.py @@ -9,6 +9,28 @@ class TestDsClientAdmin(TestCase): + def test_01_admin(self): + os.environ['TRUST_ENV'] = 'TRUE' + os.environ['PASSWORD_TYPE'] = 'PARAM_STORE' + os.environ['USERNAME'] = '/unity/uds/user/wphyo/username' + os.environ['PASSWORD'] = '/unity/uds/user/wphyo/dwssap' + os.environ['CLIENT_ID'] = '71g0c73jl77gsqhtlfg2ht388c' + os.environ['COGNITO_URL'] = 'https://cognito-idp.us-west-2.amazonaws.com' + + os.environ['TOKEN_FACTORY'] = 'COGNITO' + token_retriever: TokenAbstract = TokenFactory().get_instance(os.getenv('TOKEN_FACTORY')) + client = DsClientAdmin(token_retriever, 'https://d3vc8w9zcq658.cloudfront.net', 'data-sbx') + # client.setup_database() + + client.urn = 'urn' + client.org = 'nasa' + client.project = 'unity' + client.tenant = 'UDS_LOCAL_TEST' + client.tenant_venue = 'DEV' + + client.add_admin_group(['CREATE', 'READ', 'DELETE'], 'Unity_Viewer') + return + def test_query_granules_across_collections(self): os.environ['TRUST_ENV'] = 'TRUE' os.environ['PASSWORD_TYPE'] = 'PARAM_STORE' @@ -225,3 +247,17 @@ def test_delete_single_granule(self): # urn:nasa:unity:uds_local_test:DEV1:CHRP_16_DAY_REBIN___10:SNDR.SS1330.CHIRP.20230101T0000.m06.g001.L1_J1.std.v02_48.G.200101070318_REBIN print(client.delete_single_granule()) return + + def test_query_catalog(self): + os.environ['TRUST_ENV'] = 'TRUE' + os.environ['PASSWORD_TYPE'] = 'PARAM_STORE' + os.environ['USERNAME'] = '/unity/uds/user/wphyo/username' + os.environ['PASSWORD'] = '/unity/uds/user/wphyo/dwssap' + os.environ['CLIENT_ID'] = '71g0c73jl77gsqhtlfg2ht388c' + os.environ['COGNITO_URL'] = 'https://cognito-idp.us-west-2.amazonaws.com' + + os.environ['TOKEN_FACTORY'] = 'COGNITO' + token_retriever: TokenAbstract = TokenFactory().get_instance(os.getenv('TOKEN_FACTORY')) + client = DsClientUser(token_retriever, 'https://d3vc8w9zcq658.cloudfront.net', 'data-sbx') # data-sbx' + print(json.dumps(client.query_catalog(), indent=4)) + return