Skip to content

Commit

Permalink
Merge 0a043e1 into afcce94
Browse files Browse the repository at this point in the history
  • Loading branch information
JoranAngevaare committed Jun 20, 2023
2 parents afcce94 + 0a043e1 commit 01e962c
Show file tree
Hide file tree
Showing 10 changed files with 583 additions and 44 deletions.
55 changes: 41 additions & 14 deletions optim_esm_tools/_test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,35 +4,62 @@
EXMPLE_DATA_SET = 'CMIP6/ScenarioMIP/CCCma/CanESM5/ssp585/r3i1p2f1/Amon/tas/gn/v20190429/tas_Amon_CanESM5_ssp585_r3i1p2f1_gn_201501-210012.nc'


def get_file_from_pangeo():
def get_file_from_pangeo(experiment_id='ssp585', refresh=True):
dest_folder = os.path.split(
get_example_data_loc().replace('ssp585', experiment_id)
)[0]
if experiment_id in ['piControl', 'historical']:
dest_folder = dest_folder.replace('ScenarioMIP', 'CMIP')
write_to = os.path.join(dest_folder, 'test.nc')
if os.path.exists(write_to) and not refresh:
print(f'already file at {write_to}')
return write_to

from xmip.utils import google_cmip_col

# import cftime
col = google_cmip_col()
search = col.search(
query = dict(
source_id='CanESM5',
variable_id='tas',
table_id='Amon',
experiment_id='ssp585',
member_id=['r3i1p2f1'],
experiment_id=experiment_id,
)
if experiment_id in ['historical', 'ssp585']:
query.update(dict(member_id=['r3i1p2f1']))
else:
query.update(dict(member_id=['r1i1p1f1']))
search = col.search(**query)

ddict = search.to_dataset_dict(
xarray_open_kwargs={'use_cftime': True},
)
data = list(ddict.values())[0]
# data = data.groupby('time.year').mean('time')
# data = data.rename(year='time')
data = data.mean(set(data.dims) - {'x', 'y', 'time'})
# data['time'] = [cftime.DatetimeNoLeap(y,1,1) for y in data['time']]

write_to = get_example_data_loc()
dest_folder = os.path.split(write_to)[0]
data = data.mean(set(data.dims) - {'x', 'y', 'lat', 'lon', 'time'})

os.makedirs(dest_folder, exist_ok=True)
if os.path.exists(write_to):
print(f'already file at {write_to}')
write_to = os.path.join(dest_folder, 'test.nc')
data.to_netcdf(write_to)
return write_to


def year_means(path, refresh=True):
new_dir = os.path.split(path.replace('Amon', 'AYear'))[0]
new_dest = os.path.join(new_dir, 'test_merged.nc')
if os.path.exists(new_dest) and not refresh:
print(f'File at {new_dest} already exists')
return new_dest
import cftime
import optim_esm_tools as oet

data = oet.cmip_files.io.load_glob(path)

data = data.groupby('time.year').mean('time')
data = data.rename(year='time')
data['time'] = [cftime.DatetimeNoLeap(y, 1, 1) for y in data['time']]

os.makedirs(new_dir, exist_ok=True)
data.to_netcdf(new_dest)
return new_dest


def get_synda_loc():
Expand Down
1 change: 1 addition & 0 deletions optim_esm_tools/analyze/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,4 @@

from . import xarray_tools
from . import clustering
from . import region_finding
7 changes: 4 additions & 3 deletions optim_esm_tools/analyze/cmip_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,7 @@ def read_ds(
min_time: ty.Optional[ty.Tuple[int, int, int]] = None,
_ma_window: int = 10,
_cache: bool = True,
_file_name: str = 'merged.nc',
**kwargs,
) -> xr.Dataset:
"""Read a dataset from a folder called "base".
Expand Down Expand Up @@ -136,7 +137,7 @@ def read_ds(
if os.path.exists(post_processed_file) and _cache:
return oet.synda_files.format_synda.load_glob(post_processed_file)

data_path = os.path.join(base, 'merged.nc')
data_path = os.path.join(base, _file_name)
if not os.path.exists(data_path):
warn(f'No dataset at {data_path}')
return None
Expand Down Expand Up @@ -178,8 +179,8 @@ def _name_cache_file(
path = os.path.join(
base,
f'{variable_of_interest}'
f'_{min_time if min_time else ""}'
f'_{max_time if max_time else ""}'
f'_s{min_time if min_time else ""}'
f'_e{max_time if max_time else ""}'
f'_ma{_ma_window}'
f'_optimesm_v{version}.nc',
)
Expand Down
Loading

0 comments on commit 01e962c

Please sign in to comment.