diff --git a/data/downloads/odp-ftp-greenland-ice.sh b/data/downloads/odp-ftp-greenland-ice.sh index e9fc5f71f..228cc1693 100755 --- a/data/downloads/odp-ftp-greenland-ice.sh +++ b/data/downloads/odp-ftp-greenland-ice.sh @@ -23,6 +23,23 @@ curl --silent $BASE_URL"RT_XO_2005_2009.nc" > $OUTPUT_FODLER/2009-01-01.nc curl --silent $BASE_URL"RT_XO_2006_2010.nc" > $OUTPUT_FODLER/2010-01-01.nc curl --silent $BASE_URL"AT_XO_2007_2011.nc" > $OUTPUT_FODLER/2011-01-01.nc +python ./data/drop-unused-vars.py --file $OUTPUT_FODLER/1996-01-01.nc --variable SEC +python ./data/drop-unused-vars.py --file $OUTPUT_FODLER/1997-01-01.nc --variable SEC +python ./data/drop-unused-vars.py --file $OUTPUT_FODLER/1998-01-01.nc --variable SEC +python ./data/drop-unused-vars.py --file $OUTPUT_FODLER/1999-01-01.nc --variable SEC +python ./data/drop-unused-vars.py --file $OUTPUT_FODLER/2000-01-01.nc --variable SEC +python ./data/drop-unused-vars.py --file $OUTPUT_FODLER/2001-01-01.nc --variable SEC +python ./data/drop-unused-vars.py --file $OUTPUT_FODLER/2002-01-01.nc --variable SEC +python ./data/drop-unused-vars.py --file $OUTPUT_FODLER/2003-01-01.nc --variable SEC +python ./data/drop-unused-vars.py --file $OUTPUT_FODLER/2004-01-01.nc --variable SEC +python ./data/drop-unused-vars.py --file $OUTPUT_FODLER/2005-01-01.nc --variable SEC +python ./data/drop-unused-vars.py --file $OUTPUT_FODLER/2006-01-01.nc --variable SEC +python ./data/drop-unused-vars.py --file $OUTPUT_FODLER/2007-01-01.nc --variable SEC +python ./data/drop-unused-vars.py --file $OUTPUT_FODLER/2008-01-01.nc --variable SEC +python ./data/drop-unused-vars.py --file $OUTPUT_FODLER/2009-01-01.nc --variable SEC +python ./data/drop-unused-vars.py --file $OUTPUT_FODLER/2010-01-01.nc --variable SEC +python ./data/drop-unused-vars.py --file $OUTPUT_FODLER/2011-01-01.nc --variable SEC + python ./data/add-time-coordinate.py --file $OUTPUT_FODLER/1996-01-01.nc --timestamp 1996-01-01 python ./data/add-time-coordinate.py --file $OUTPUT_FODLER/1997-01-01.nc --timestamp 1997-01-01 python ./data/add-time-coordinate.py --file $OUTPUT_FODLER/1998-01-01.nc --timestamp 1998-01-01 diff --git a/data/drop-unused-vars.py b/data/drop-unused-vars.py index 9299a4e2d..dc24f20d3 100644 --- a/data/drop-unused-vars.py +++ b/data/drop-unused-vars.py @@ -8,8 +8,12 @@ args = parser.parse_args() ds = xr.open_dataset(args.file, decode_coords=False, decode_cf=False) -ds_new = ds[args.variable].to_dataset() -ds_new.attrs = ds.attrs + +variables_to_keep = [args.variable, 'lat', 'lon', 'grid_projection'] + +for key in ds.data_vars: + if (key not in variables_to_keep): + ds = ds.drop_vars(names=[key]) os.remove(args.file) -ds_new.to_netcdf(args.file, format='NETCDF4_CLASSIC', mode='w') +ds.to_netcdf(args.file, format='NETCDF4', mode='w') diff --git a/data/layers-config.json b/data/layers-config.json index 8ce72ec79..5118a135c 100644 --- a/data/layers-config.json +++ b/data/layers-config.json @@ -268,9 +268,9 @@ }, "flyTo": { "position": { - "height": 15000000.06, - "latitude": 90, - "longitude": 0 + "height": 5500000, + "latitude": -38.5, + "longitude": 72.29 }, "orientation": { "heading": 0, diff --git a/data/triggers/greenland_ice_sec.sh b/data/triggers/greenland_ice_sec.sh index 29c1030c7..acd9faa6d 100755 --- a/data/triggers/greenland_ice_sec.sh +++ b/data/triggers/greenland_ice_sec.sh @@ -9,8 +9,8 @@ MIN_LON="-90" MAX_LON="7.594643368591434" MIN_LAT="58.854580820213855" MAX_LAT="84.00492144822202" -MIN="auto" -MAX="auto" +MIN="-5" +MAX="5" MACHINE_TYPE="N1_HIGHCPU_8" if [ ! -f ./package.json ]; then diff --git a/data/write-zarr.py b/data/write-zarr.py index 0d5b43f82..0df537aad 100644 --- a/data/write-zarr.py +++ b/data/write-zarr.py @@ -1,5 +1,4 @@ import sys -import time import cate.ops import utility from cate.core.ds import DATA_STORE_REGISTRY @@ -18,7 +17,6 @@ zoom_levels = args.zoom_levels.split('-') # add local datastore from NetCDF files -start_time = time.time() local_store = DATA_STORE_REGISTRY.get_data_store('local') ds_name = 'data' ds_local_name = 'local.' + ds_name @@ -28,12 +26,7 @@ # open dataset ds = cate.ops.open_dataset(ds_local_name, var_names=args.variable_id) data_array = ds[args.variable_id] -try: - units = data_array.attrs['units'] -except KeyError: - units = '' - -print(f'Opened dataset in {time.time() - start_time}s') +units = data_array.get('units') or data_array.attrs['Units'] # get min and max values try: @@ -55,10 +48,8 @@ data_array = data_array.chunk({'lon': shape[2], 'lat': shape[1]}) # write zarr file to disk - start_time = time.time() print('Writing zarr file...') data_array.to_dataset().to_zarr(args.output) - print(f'Written zarr in {time.time() - start_time}s') print('Writing world file...') utility.write_world_file(shape, ds.attrs)