-
Notifications
You must be signed in to change notification settings - Fork 73
/
test_onprem_download.py
161 lines (141 loc) · 5.64 KB
/
test_onprem_download.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
# package imports
import logging
import os
import random
import shutil
import unittest
from pathlib import Path
import earthaccess
import pytest
from earthaccess import Auth, DataCollections, DataGranules, Store
logger = logging.getLogger(__name__)
daacs_list = [
{
"short_name": "NSIDC",
"collections_count": 50,
"collections_sample_size": 3,
"granules_count": 100,
"granules_sample_size": 2,
"granules_max_size_mb": 100,
},
{
"short_name": "GES_DISC",
"collections_count": 100,
"collections_sample_size": 2,
"granules_count": 100,
"granules_sample_size": 2,
"granules_max_size_mb": 130,
},
{
"short_name": "LPDAAC",
"collections_count": 100,
"collections_sample_size": 2,
"granules_count": 100,
"granules_sample_size": 2,
"granules_max_size_mb": 100,
},
{
"short_name": "ORNLDAAC",
"collections_count": 100,
"collections_sample_size": 3,
"granules_count": 100,
"granules_sample_size": 2,
"granules_max_size_mb": 50,
},
]
assertions = unittest.TestCase("__init__")
# we need to use a valid EDL credential
assertions.assertTrue("EARTHDATA_USERNAME" in os.environ)
assertions.assertTrue("EARTHDATA_PASSWORD" in os.environ)
auth = Auth().login(strategy="environment")
assertions.assertTrue(auth.authenticated)
logger.info(f"Current username: {os.environ['EARTHDATA_USERNAME']}")
logger.info(f"earthaccess version: {earthaccess.__version__}")
store = Store(auth)
def get_sample_granules(granules, sample_size, max_granule_size):
"""Returns a list with sample granules and their size in MB if
the total size is less than the max_granule_size.
"""
files_to_download = []
total_size = 0
max_tries = sample_size * 2
tries = 0
while tries <= max_tries:
g = random.sample(granules, 1)[0]
if g.size() > max_granule_size:
# print(f"G: {g['meta']['concept-id']} exceded max size: {g.size()}")
tries += 1
continue
else:
# print(f"Adding : {g['meta']['concept-id']} size: {g.size()}")
files_to_download.append(g)
total_size += g.size()
if len(files_to_download) >= sample_size:
break
return files_to_download, round(total_size, 2)
def supported_collection(data_links):
for url in data_links:
if "podaac-tools.jpl.nasa.gov/drive" in url:
return False
return True
@pytest.mark.parametrize("daac", daacs_list)
def test_earthaccess_can_download_onprem_collection_granules(daac):
"""Tests that we can download cloud collections using HTTPS links."""
daac_shortname = daac["short_name"]
collections_count = daac["collections_count"]
collections_sample_size = daac["collections_sample_size"]
granules_count = daac["granules_count"]
granules_sample_size = daac["granules_sample_size"]
granules_max_size = daac["granules_max_size_mb"]
collection_query = DataCollections().data_center(daac_shortname).cloud_hosted(False)
hits = collection_query.hits()
logger.info(f"Cloud hosted collections for {daac_shortname}: {hits}")
collections = collection_query.get(collections_count)
assertions.assertGreater(len(collections), collections_sample_size)
# We sample n cloud hosted collections from the results
random_collections = random.sample(collections, collections_sample_size)
logger.info(f"Sampled {len(random_collections)} collections")
for collection in random_collections:
concept_id = collection.concept_id()
granule_query = DataGranules().concept_id(concept_id)
total_granules = granule_query.hits()
granules = granule_query.get(granules_count)
assertions.assertTrue(len(granules) > 0, "Could not fetch granules")
assertions.assertTrue(isinstance(granules[0], earthaccess.results.DataGranule))
data_links = granules[0].data_links()
if not supported_collection(data_links):
logger.warning(f"PODAAC DRIVE is not supported at the moment: {data_links}")
continue
local_path = f"./tests/integration/data/{concept_id}"
granules_to_download, total_size_cmr = get_sample_granules(
granules, granules_sample_size, granules_max_size
)
if len(granules_to_download) == 0:
logger.debug(
f"Skipping {concept_id}, granule size exceeds configured max size"
)
continue
logger.info(
f"Testing {concept_id}, granules in collection: {total_granules}, "
f"download size(MB): {total_size_cmr}"
)
# We are testing this method
downloaded_results = store.get(granules_to_download, local_path=local_path)
assertions.assertTrue(isinstance(downloaded_results, list))
assertions.assertTrue(len(downloaded_results) == granules_sample_size)
path = Path(local_path)
assertions.assertTrue(path.is_dir())
# test that we downloaded the mb reported by CMR
total_mb_downloaded = round(
(sum(file.stat().st_size for file in path.rglob("*")) / 1024**2), 2
)
# clean the directory
shutil.rmtree(path)
# test that we could download the data
if total_mb_downloaded <= 0:
logger.warning(f"earthaccess could not download {concept_id}")
if total_mb_downloaded != total_size_cmr:
logger.warning(
f"Warning: {concept_id} downloaded size {total_mb_downloaded}MB is "
f"different from the size reported by CMR: {total_size_cmr}MB"
)