Skip to content

Commit

Permalink
rev + split init cases
Browse files Browse the repository at this point in the history
  • Loading branch information
LenkaNovak committed Jan 31, 2024
1 parent 398b0c6 commit c06719e
Show file tree
Hide file tree
Showing 4 changed files with 250 additions and 134 deletions.
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ DocStringExtensions = "0.8, 0.9"
Insolation = "0.8"
JLD2 = "0.4"
NCDatasets = "0.11, 0.12, 0.13"
Plots = "1.39.0"
Plots = "=1.39.0"
SciMLBase = "1, 2"
StaticArrays = "1"
Statistics = "1"
Expand Down
98 changes: 62 additions & 36 deletions src/BCReader.jl
Original file line number Diff line number Diff line change
Expand Up @@ -182,21 +182,53 @@ The times for which data is extracted depends on the specifications in the
function update_midmonth_data!(date, bcf_info::BCFileInfo{FT}) where {FT}
# monthly count
(; bcfile_dir, comms_ctx, hd_outfile_root, varname, all_dates, scaling_function) = bcf_info
midmonth_idx = bcf_info.segment_idx[1]
midmonth_idx0 = bcf_info.segment_idx0[1]

# case 1: date is before the first date in the file
if (midmonth_idx == midmonth_idx0) && ((date - all_dates[midmonth_idx]).value < 0) # for init
@warn "this time period is before BC data - using file from $(all_dates[midmonth_idx0])"
bcf_info.monthly_fields[1] .= scaling_function(
Regridder.read_from_hdf5(bcfile_dir, hd_outfile_root, all_dates[Int(midmonth_idx0)], varname, comms_ctx),
bcf_info,
)
bcf_info.monthly_fields[2] .= deepcopy(bcf_info.monthly_fields[1])
bcf_info.segment_length .= Int(0)

# case 2: date is after the last date in file
elseif (date - all_dates[end]).value > 0 # for fini
segment_idx = bcf_info.segment_idx[1] # index of the current date in the file. [segment_idx, segment_idx+1] indexes the current segment between which we interpolate
segment_idx0 = bcf_info.segment_idx0[1] # reference index (first segment_idx - 1)

# upon initialization (segment_idx == segment_idx0) and not initialized after final file date
if (segment_idx == segment_idx0) && !((date - all_dates[end]).value >= 0)
# case 1: model date is before the first segment read from file
if (date - all_dates[segment_idx]).value < 0
@warn "this time period is before BC data - using file from $(all_dates[segment_idx0])"
bcf_info.monthly_fields[1] .= scaling_function(
Regridder.read_from_hdf5(bcfile_dir, hd_outfile_root, all_dates[Int(segment_idx0)], varname, comms_ctx),
bcf_info,
)
bcf_info.monthly_fields[2] .= deepcopy(bcf_info.monthly_fields[1])
bcf_info.segment_length .= Int(0)
bcf_info.segment_idx[1] -= Int(1)
bcf_info.segment_idx0[1] -= Int(2)

# case 2: model date is after the first segment read from file
elseif (date - all_dates[Int(segment_idx0) + 1]).value >= 0
nearest_idx = argmin(
abs.(
parse(FT, TimeManager.datetime_to_strdate(date)) .-
parse.(FT, TimeManager.datetime_to_strdate.(all_dates[:]))
),
)
@warn "Initializing with `segment_idx = $nearest_idx"
bcf_info.segment_idx[1] = nearest_idx
bcf_info.segment_idx0[1] = nearest_idx
update_midmonth_data!(date, bcf_info)

# case 3: model date is within the first segment read from file
elseif (date - all_dates[segment_idx]).value >= 0
@warn "On $date updating file data reads: file dates = [ $(all_dates[segment_idx]) , $(all_dates[segment_idx+1]) ]"
bcf_info.segment_length .= (all_dates[segment_idx + 1] - all_dates[segment_idx]).value
bcf_info.monthly_fields[1] .= scaling_function(
Regridder.read_from_hdf5(bcfile_dir, hd_outfile_root, all_dates[segment_idx], varname, comms_ctx),
bcf_info,
)
bcf_info.monthly_fields[2] .= scaling_function(
Regridder.read_from_hdf5(bcfile_dir, hd_outfile_root, all_dates[segment_idx + 1], varname, comms_ctx),
bcf_info,
)
bcf_info.segment_idx0[1] -= Int(1)
end

# case 4: date is at or after the last date in file
elseif (date - all_dates[end]).value >= 0
@warn "this time period is after BC data - using file from $(all_dates[end])"
bcf_info.monthly_fields[1] .= scaling_function(
Regridder.read_from_hdf5(
Expand All @@ -211,35 +243,29 @@ function update_midmonth_data!(date, bcf_info::BCFileInfo{FT}) where {FT}
bcf_info.monthly_fields[2] .= deepcopy(bcf_info.monthly_fields[1])
bcf_info.segment_length .= Int(0)

# case 3: during initialization, warn and reset if there are closer file dates to the current date than the file date implied by midmonth_idx0+1
elseif (midmonth_idx == midmonth_idx0) && (date - all_dates[Int(midmonth_idx0) + 1]).value >= 0
nearest_idx = argmin(
abs.(
parse(FT, TimeManager.datetime_to_strdate(date)) .-
parse.(FT, TimeManager.datetime_to_strdate.(all_dates[:]))
),
)
bcf_info.segment_idx .= nearest_idx
bcf_info.segment_idx0 .= nearest_idx
@warn "init data does not correspond to start date. Initializing with `SIC_info.segment_idx[1] = midmonth_idx = midmonth_idx0 = $nearest_idx` for this start date"

# case 4: date crosses to the next month
elseif (date - all_dates[Int(midmonth_idx)]).value >= 0
@warn "On $date updating monthly data files: mid-month dates = [ $(all_dates[Int(midmonth_idx)]) , $(all_dates[Int(midmonth_idx+1)]) ]"
bcf_info.segment_length .= (all_dates[Int(midmonth_idx + 1)] - all_dates[Int(midmonth_idx)]).value
# case 5: model date crosses to the next segment
elseif (date - all_dates[Int(segment_idx) + 1]).value >= 0
segment_idx = bcf_info.segment_idx[1] += Int(1)

@warn "On $date updating file data reads: file dates = [ $(all_dates[Int(segment_idx)]) , $(all_dates[Int(segment_idx+1)]) ]"
bcf_info.segment_length .= (all_dates[Int(segment_idx + 1)] - all_dates[Int(segment_idx)]).value

bcf_info.monthly_fields[1] .= scaling_function(
Regridder.read_from_hdf5(bcfile_dir, hd_outfile_root, all_dates[Int(midmonth_idx)], varname, comms_ctx),
Regridder.read_from_hdf5(bcfile_dir, hd_outfile_root, all_dates[Int(segment_idx)], varname, comms_ctx),
bcf_info,
)
bcf_info.monthly_fields[2] .= scaling_function(
Regridder.read_from_hdf5(bcfile_dir, hd_outfile_root, all_dates[Int(midmonth_idx + 1)], varname, comms_ctx),
Regridder.read_from_hdf5(bcfile_dir, hd_outfile_root, all_dates[Int(segment_idx + 1)], varname, comms_ctx),
bcf_info,
)
midmonth_idx = bcf_info.segment_idx[1] += Int(1)

# case 5: undefined condition
# case 6: undefined condition
else
throw(ErrorException("Check boundary file specification"))
throw(
ErrorException(
"Check boundary file specification: segment: $(all_dates[segment_idx]) - $(all_dates[segment_idx+1]), date: $date",
),
)
end
end

Expand Down
136 changes: 90 additions & 46 deletions test/bcreader_tests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -227,69 +227,113 @@ for FT in (Float32, Float64)
bcf_info.monthly_fields[1] .= current_fields[1]
bcf_info.monthly_fields[2] .= current_fields[2]
bcf_info.segment_length[1] = Int(1)
bcf_info.segment_idx[1] = bcf_info.segment_idx0[1]
end

hd_outfile_root = varname * "_cgll"

# case 1: segment_idx == segment_idx0, date < all_dates[segment_idx]
# case 1: date < all_dates[segment_idx] (init)
bcf_info.segment_idx[1] = bcf_info.segment_idx0[1]
date = DateTime(bcf_info.all_dates[bcf_info.segment_idx[1]] - Dates.Day(1))
BCReader.update_midmonth_data!(date, bcf_info)

@test bcf_info.monthly_fields[1] == bcf_info.scaling_function(
Regridder.read_from_hdf5(
regrid_dir,
hd_outfile_root,
bcf_info.all_dates[Int(bcf_info.segment_idx0[1])],
varname,
comms_ctx,
),
bcf_info,
)
# unmodified field
@test bcf_info.monthly_fields[2] == bcf_info.monthly_fields[1]
# zero segment length
@test bcf_info.segment_length[1] == Int(0)
# segment index is reset
@test bcf_info.segment_idx0[1] == bcf_info.segment_idx[1] - 1

# cases 2 and 3
extra_days = [Dates.Day(0), Dates.Day(3)]
for extra in extra_days
# case 3: (date - all_dates[Int(segment_idx0)]) >= 0 (init)
reset_bcf_info(bcf_info)
date = DateTime(bcf_info.all_dates[bcf_info.segment_idx0[1]]) + extra
BCReader.update_midmonth_data!(date, bcf_info)

end_field_c2 = deepcopy(bcf_info.monthly_fields[2])
segment_length_c2 = deepcopy(bcf_info.segment_length[1])
current_index_c2 = deepcopy(bcf_info.segment_idx[1])

# modified field
@test end_field_c2 !== bcf_info.monthly_fields[1]
# updated segment length
@test segment_length_c2[1] !== Int(0)
# updated reference segment index
@test current_index_c2 == bcf_info.segment_idx0[1] + 1

# case 2: (date - all_dates[Int(segment_idx0) + 1]) >= 0 (init)
# do not reset segment_idx0. It's current value ensures that we get the same result as case 3
reset_bcf_info(bcf_info)

date = DateTime(bcf_info.all_dates[bcf_info.segment_idx0[1] + 1]) + extra
BCReader.update_midmonth_data!(date, bcf_info)

nearest_idx = argmin(
abs.(
parse(FT, TimeManager.datetime_to_strdate(date)) .-
parse.(FT, TimeManager.datetime_to_strdate.(bcf_info.all_dates[:]))
),
)

@test bcf_info.segment_idx[1] == bcf_info.segment_idx0[1] + 1 == nearest_idx

# compare to case 3 (expecting the same result - this defaults to it):
@test bcf_info.monthly_fields[1] == bcf_info.scaling_function(
Regridder.read_from_hdf5(
regrid_dir,
hd_outfile_root,
bcf_info.all_dates[Int(bcf_info.segment_idx[1])],
varname,
comms_ctx,
),
bcf_info,
)

# check case 2 defaults to case 3
@test end_field_c2 !== bcf_info.monthly_fields[1]
@test segment_length_c2[1] !== Int(0)
@test current_index_c2 == bcf_info.segment_idx0[1] + 1

# case 2: date > all_dates[end]
reset_bcf_info(bcf_info)
date = DateTime(bcf_info.all_dates[end] + Dates.Day(1))
BCReader.update_midmonth_data!(date, bcf_info)
end

@test bcf_info.monthly_fields[1] == bcf_info.scaling_function(
Regridder.read_from_hdf5(
regrid_dir,
hd_outfile_root,
bcf_info.all_dates[Int(length(bcf_info.all_dates))],
varname,
comms_ctx,
),
bcf_info,
)
@test bcf_info.monthly_fields[2] == bcf_info.monthly_fields[1]
@test bcf_info.segment_length[1] == Int(0)
# case 4: date > all_dates[end]
for extra in extra_days
bcf_info.segment_idx0[1] = length(bcf_info.all_dates)
reset_bcf_info(bcf_info)

date = DateTime(bcf_info.all_dates[bcf_info.segment_idx0[1]]) + extra
BCReader.update_midmonth_data!(date, bcf_info)

@test bcf_info.monthly_fields[1] == bcf_info.scaling_function(
Regridder.read_from_hdf5(
regrid_dir,
hd_outfile_root,
bcf_info.all_dates[Int(length(bcf_info.all_dates))],
varname,
comms_ctx,
),
bcf_info,
)
@test bcf_info.monthly_fields[2] == bcf_info.monthly_fields[1]
@test bcf_info.segment_length[1] == Int(0)
end

# case 3: date - all_dates[segment_idx + 1] >= 0
reset_bcf_info(bcf_info)
bcf_info.segment_idx[1] = bcf_info.segment_idx0[1]
date = DateTime(bcf_info.all_dates[bcf_info.segment_idx0[1] + 1] + Dates.Day(3))
BCReader.update_midmonth_data!(date, bcf_info)
nearest_idx = argmin(
abs.(
parse(FT, TimeManager.datetime_to_strdate(date)) .-
parse.(FT, TimeManager.datetime_to_strdate.(bcf_info.all_dates[:]))
),
)
# case 5: Dates.days(date - all_dates[segment_idx]) >= 0

@test bcf_info.segment_idx[1] == bcf_info.segment_idx0[1] == nearest_idx
extra = Dates.Day(3)
for extra in extra_days
bcf_info.segment_idx0[1] = 2
reset_bcf_info(bcf_info)

# case 4: Dates.days(date - all_dates[segment_idx]) >= 0
reset_bcf_info(bcf_info)
bcf_info.segment_idx[1] = bcf_info.segment_idx0[1]
date = DateTime(bcf_info.all_dates[bcf_info.segment_idx0[1]] + Dates.Day(3))
BCReader.update_midmonth_data!(date, bcf_info)
date = DateTime(bcf_info.all_dates[bcf_info.segment_idx0[1]] + extra)
BCReader.update_midmonth_data!(date, bcf_info)

@test bcf_info.segment_idx[1] == bcf_info.segment_idx0[1] + 1
@test bcf_info.segment_idx[1] == bcf_info.segment_idx0[1] + 1
end

# case 5: everything else
# case 6: everything else
reset_bcf_info(bcf_info)
bcf_info.segment_idx[1] = bcf_info.segment_idx0[1] + Int(1)
date = bcf_info.all_dates[bcf_info.segment_idx[1]] - Dates.Day(1)
Expand Down
Loading

0 comments on commit c06719e

Please sign in to comment.