Skip to content

Commit

Permalink
fix duplication
Browse files Browse the repository at this point in the history
  • Loading branch information
J.R. Angevaare committed Jul 25, 2023
1 parent 69770c5 commit e6d670c
Show file tree
Hide file tree
Showing 3 changed files with 14 additions and 247 deletions.
42 changes: 10 additions & 32 deletions optim_esm_tools/analyze/cmip_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,49 +144,27 @@ def read_ds(
warn(message)
return None

# if pre_process:
# data_set = oet.analyze.pre_process.get_preprocessed_ds(
# source=data_path,
# max_time=max_time,
# min_time=min_time,
# _ma_window=_ma_window,
# variable_id=variable_of_interest,)
# else:
# message='Not preprocessing file is dangerous, dimensions may differ wildly!'
# log.warning(message)
# data_set = oet.analyze.io.load_glob(data_path, load=load)

temp_file = os.path.join(base, 'temp_final.nc')
if pre_process:
data_path = oet.analyze.pre_process.pre_process(
data_set = oet.analyze.pre_process.get_preprocessed_ds(
source=data_path,
max_time=max_time,
min_time=min_time,
save_as=temp_file,
_ma_window=_ma_window,
variable_id=variable_of_interest,
)
else:
log.warning(
'Not preprocessing file is dangerous, dimensions may differ wildly!'
)
# At this point, if load is None, change it to true, we will have to load it anyway to do the
# transforms
load = load if load is not None else True
data_set = oet.analyze.io.load_glob(data_path, load=load)

if os.path.exists(temp_file):
# Maybe we can make this optional, but, for now, let's prevent double caching of
# res_file and temp_file
os.remove(temp_file)
message = 'Not preprocessing file is dangerous, dimensions may differ wildly!'
log.warning(message)
data_set = oet.analyze.io.load_glob(data_path, load=load)

if apply_transform:
data_set = add_conditions_to_ds(
data_set,
variable_of_interest=variable_of_interest,
_ma_window=_ma_window,
**kwargs,
kwargs.update(
dict(
variable_of_interest=variable_of_interest,
_ma_window=_ma_window,
)
)
data_set = add_conditions_to_ds(data_set, **kwargs)

folders = base.split(os.sep)

Expand Down
212 changes: 0 additions & 212 deletions optim_esm_tools/analyze/pre_process copy.py

This file was deleted.

7 changes: 4 additions & 3 deletions optim_esm_tools/analyze/pre_process.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,10 @@ def get_preprocessed_ds(source, **kw):
)
get_logger().warning(message)
with tempfile.TemporaryDirectory() as temp_dir:
for k, v in dict(
source=source, working_dir=temp_dir, clean_up=False, save_as='some_temp.nc'
):
defaults = dict(
source=source, working_dir=temp_dir, clean_up=False, save_as='temp_res.nc'
)
for k, v in defaults.items():
kw.setdefault(k, v)
intermediate_file = pre_process(**kw)
# After with close this "with", we lose the file, so load it just to be sure we have all we need
Expand Down

0 comments on commit e6d670c

Please sign in to comment.