diff --git a/Project.toml b/Project.toml index fafc4e1d87..68ee3c33ac 100644 --- a/Project.toml +++ b/Project.toml @@ -35,7 +35,7 @@ DocStringExtensions = "0.8, 0.9" Insolation = "0.8" JLD2 = "0.4" NCDatasets = "0.11, 0.12, 0.13" -Plots = "1.39.0" +Plots = "=1.39.0" SciMLBase = "1, 2" StaticArrays = "1" Statistics = "1" diff --git a/src/BCReader.jl b/src/BCReader.jl index e24875c2b0..62b8eac8d8 100644 --- a/src/BCReader.jl +++ b/src/BCReader.jl @@ -182,21 +182,53 @@ The times for which data is extracted depends on the specifications in the function update_midmonth_data!(date, bcf_info::BCFileInfo{FT}) where {FT} # monthly count (; bcfile_dir, comms_ctx, hd_outfile_root, varname, all_dates, scaling_function) = bcf_info - midmonth_idx = bcf_info.segment_idx[1] - midmonth_idx0 = bcf_info.segment_idx0[1] - - # case 1: date is before the first date in the file - if (midmonth_idx == midmonth_idx0) && ((date - all_dates[midmonth_idx]).value < 0) # for init - @warn "this time period is before BC data - using file from $(all_dates[midmonth_idx0])" - bcf_info.monthly_fields[1] .= scaling_function( - Regridder.read_from_hdf5(bcfile_dir, hd_outfile_root, all_dates[Int(midmonth_idx0)], varname, comms_ctx), - bcf_info, - ) - bcf_info.monthly_fields[2] .= deepcopy(bcf_info.monthly_fields[1]) - bcf_info.segment_length .= Int(0) - - # case 2: date is after the last date in file - elseif (date - all_dates[end]).value > 0 # for fini + segment_idx = bcf_info.segment_idx[1] # index of the current date in the file. [segment_idx, segment_idx+1] indexes the current segment between which we interpolate + segment_idx0 = bcf_info.segment_idx0[1] # reference index (first segment_idx - 1) + + # upon initialization (segment_idx == segment_idx0) and not initialized after final file date + if (segment_idx == segment_idx0) && !((date - all_dates[end]).value >= 0) + # case 1: model date is before the first segment read from file + if (date - all_dates[segment_idx]).value < 0 + @warn "this time period is before BC data - using file from $(all_dates[segment_idx0])" + bcf_info.monthly_fields[1] .= scaling_function( + Regridder.read_from_hdf5(bcfile_dir, hd_outfile_root, all_dates[Int(segment_idx0)], varname, comms_ctx), + bcf_info, + ) + bcf_info.monthly_fields[2] .= deepcopy(bcf_info.monthly_fields[1]) + bcf_info.segment_length .= Int(0) + bcf_info.segment_idx[1] -= Int(1) + bcf_info.segment_idx0[1] -= Int(2) + + # case 2: model date is after the first segment read from file + elseif (date - all_dates[Int(segment_idx0) + 1]).value >= 0 + nearest_idx = argmin( + abs.( + parse(FT, TimeManager.datetime_to_strdate(date)) .- + parse.(FT, TimeManager.datetime_to_strdate.(all_dates[:])) + ), + ) + @warn "Initializing with `segment_idx = $nearest_idx" + bcf_info.segment_idx[1] = nearest_idx + bcf_info.segment_idx0[1] = nearest_idx + update_midmonth_data!(date, bcf_info) + + # case 3: model date is within the first segment read from file + elseif (date - all_dates[segment_idx]).value >= 0 + @warn "On $date updating file data reads: file dates = [ $(all_dates[segment_idx]) , $(all_dates[segment_idx+1]) ]" + bcf_info.segment_length .= (all_dates[segment_idx + 1] - all_dates[segment_idx]).value + bcf_info.monthly_fields[1] .= scaling_function( + Regridder.read_from_hdf5(bcfile_dir, hd_outfile_root, all_dates[segment_idx], varname, comms_ctx), + bcf_info, + ) + bcf_info.monthly_fields[2] .= scaling_function( + Regridder.read_from_hdf5(bcfile_dir, hd_outfile_root, all_dates[segment_idx + 1], varname, comms_ctx), + bcf_info, + ) + bcf_info.segment_idx0[1] -= Int(1) + end + + # case 4: date is at or after the last date in file + elseif (date - all_dates[end]).value >= 0 @warn "this time period is after BC data - using file from $(all_dates[end])" bcf_info.monthly_fields[1] .= scaling_function( Regridder.read_from_hdf5( @@ -211,35 +243,29 @@ function update_midmonth_data!(date, bcf_info::BCFileInfo{FT}) where {FT} bcf_info.monthly_fields[2] .= deepcopy(bcf_info.monthly_fields[1]) bcf_info.segment_length .= Int(0) - # case 3: during initialization, warn and reset if there are closer file dates to the current date than the file date implied by midmonth_idx0+1 - elseif (midmonth_idx == midmonth_idx0) && (date - all_dates[Int(midmonth_idx0) + 1]).value >= 0 - nearest_idx = argmin( - abs.( - parse(FT, TimeManager.datetime_to_strdate(date)) .- - parse.(FT, TimeManager.datetime_to_strdate.(all_dates[:])) - ), - ) - bcf_info.segment_idx .= nearest_idx - bcf_info.segment_idx0 .= nearest_idx - @warn "init data does not correspond to start date. Initializing with `SIC_info.segment_idx[1] = midmonth_idx = midmonth_idx0 = $nearest_idx` for this start date" - - # case 4: date crosses to the next month - elseif (date - all_dates[Int(midmonth_idx)]).value >= 0 - @warn "On $date updating monthly data files: mid-month dates = [ $(all_dates[Int(midmonth_idx)]) , $(all_dates[Int(midmonth_idx+1)]) ]" - bcf_info.segment_length .= (all_dates[Int(midmonth_idx + 1)] - all_dates[Int(midmonth_idx)]).value + # case 5: model date crosses to the next segment + elseif (date - all_dates[Int(segment_idx) + 1]).value >= 0 + segment_idx = bcf_info.segment_idx[1] += Int(1) + + @warn "On $date updating file data reads: file dates = [ $(all_dates[Int(segment_idx)]) , $(all_dates[Int(segment_idx+1)]) ]" + bcf_info.segment_length .= (all_dates[Int(segment_idx + 1)] - all_dates[Int(segment_idx)]).value + bcf_info.monthly_fields[1] .= scaling_function( - Regridder.read_from_hdf5(bcfile_dir, hd_outfile_root, all_dates[Int(midmonth_idx)], varname, comms_ctx), + Regridder.read_from_hdf5(bcfile_dir, hd_outfile_root, all_dates[Int(segment_idx)], varname, comms_ctx), bcf_info, ) bcf_info.monthly_fields[2] .= scaling_function( - Regridder.read_from_hdf5(bcfile_dir, hd_outfile_root, all_dates[Int(midmonth_idx + 1)], varname, comms_ctx), + Regridder.read_from_hdf5(bcfile_dir, hd_outfile_root, all_dates[Int(segment_idx + 1)], varname, comms_ctx), bcf_info, ) - midmonth_idx = bcf_info.segment_idx[1] += Int(1) - # case 5: undefined condition + # case 6: undefined condition else - throw(ErrorException("Check boundary file specification")) + throw( + ErrorException( + "Check boundary file specification: segment: $(all_dates[segment_idx]) - $(all_dates[segment_idx+1]), date: $date", + ), + ) end end diff --git a/test/bcreader_tests.jl b/test/bcreader_tests.jl index 9448209f15..0540f1d073 100644 --- a/test/bcreader_tests.jl +++ b/test/bcreader_tests.jl @@ -227,69 +227,113 @@ for FT in (Float32, Float64) bcf_info.monthly_fields[1] .= current_fields[1] bcf_info.monthly_fields[2] .= current_fields[2] bcf_info.segment_length[1] = Int(1) + bcf_info.segment_idx[1] = bcf_info.segment_idx0[1] end hd_outfile_root = varname * "_cgll" - # case 1: segment_idx == segment_idx0, date < all_dates[segment_idx] + # case 1: date < all_dates[segment_idx] (init) bcf_info.segment_idx[1] = bcf_info.segment_idx0[1] date = DateTime(bcf_info.all_dates[bcf_info.segment_idx[1]] - Dates.Day(1)) BCReader.update_midmonth_data!(date, bcf_info) - @test bcf_info.monthly_fields[1] == bcf_info.scaling_function( - Regridder.read_from_hdf5( - regrid_dir, - hd_outfile_root, - bcf_info.all_dates[Int(bcf_info.segment_idx0[1])], - varname, - comms_ctx, - ), - bcf_info, - ) + # unmodified field @test bcf_info.monthly_fields[2] == bcf_info.monthly_fields[1] + # zero segment length @test bcf_info.segment_length[1] == Int(0) + # segment index is reset + @test bcf_info.segment_idx0[1] == bcf_info.segment_idx[1] - 1 + + # cases 2 and 3 + extra_days = [Dates.Day(0), Dates.Day(3)] + for extra in extra_days + # case 3: (date - all_dates[Int(segment_idx0)]) >= 0 (init) + reset_bcf_info(bcf_info) + date = DateTime(bcf_info.all_dates[bcf_info.segment_idx0[1]]) + extra + BCReader.update_midmonth_data!(date, bcf_info) + + end_field_c2 = deepcopy(bcf_info.monthly_fields[2]) + segment_length_c2 = deepcopy(bcf_info.segment_length[1]) + current_index_c2 = deepcopy(bcf_info.segment_idx[1]) + + # modified field + @test end_field_c2 !== bcf_info.monthly_fields[1] + # updated segment length + @test segment_length_c2[1] !== Int(0) + # updated reference segment index + @test current_index_c2 == bcf_info.segment_idx0[1] + 1 + + # case 2: (date - all_dates[Int(segment_idx0) + 1]) >= 0 (init) + # do not reset segment_idx0. It's current value ensures that we get the same result as case 3 + reset_bcf_info(bcf_info) + + date = DateTime(bcf_info.all_dates[bcf_info.segment_idx0[1] + 1]) + extra + BCReader.update_midmonth_data!(date, bcf_info) + + nearest_idx = argmin( + abs.( + parse(FT, TimeManager.datetime_to_strdate(date)) .- + parse.(FT, TimeManager.datetime_to_strdate.(bcf_info.all_dates[:])) + ), + ) + + @test bcf_info.segment_idx[1] == bcf_info.segment_idx0[1] + 1 == nearest_idx + + # compare to case 3 (expecting the same result - this defaults to it): + @test bcf_info.monthly_fields[1] == bcf_info.scaling_function( + Regridder.read_from_hdf5( + regrid_dir, + hd_outfile_root, + bcf_info.all_dates[Int(bcf_info.segment_idx[1])], + varname, + comms_ctx, + ), + bcf_info, + ) + + # check case 2 defaults to case 3 + @test end_field_c2 !== bcf_info.monthly_fields[1] + @test segment_length_c2[1] !== Int(0) + @test current_index_c2 == bcf_info.segment_idx0[1] + 1 - # case 2: date > all_dates[end] - reset_bcf_info(bcf_info) - date = DateTime(bcf_info.all_dates[end] + Dates.Day(1)) - BCReader.update_midmonth_data!(date, bcf_info) + end - @test bcf_info.monthly_fields[1] == bcf_info.scaling_function( - Regridder.read_from_hdf5( - regrid_dir, - hd_outfile_root, - bcf_info.all_dates[Int(length(bcf_info.all_dates))], - varname, - comms_ctx, - ), - bcf_info, - ) - @test bcf_info.monthly_fields[2] == bcf_info.monthly_fields[1] - @test bcf_info.segment_length[1] == Int(0) + # case 4: date > all_dates[end] + for extra in extra_days + bcf_info.segment_idx0[1] = length(bcf_info.all_dates) + reset_bcf_info(bcf_info) + + date = DateTime(bcf_info.all_dates[bcf_info.segment_idx0[1]]) + extra + BCReader.update_midmonth_data!(date, bcf_info) + + @test bcf_info.monthly_fields[1] == bcf_info.scaling_function( + Regridder.read_from_hdf5( + regrid_dir, + hd_outfile_root, + bcf_info.all_dates[Int(length(bcf_info.all_dates))], + varname, + comms_ctx, + ), + bcf_info, + ) + @test bcf_info.monthly_fields[2] == bcf_info.monthly_fields[1] + @test bcf_info.segment_length[1] == Int(0) + end - # case 3: date - all_dates[segment_idx + 1] >= 0 - reset_bcf_info(bcf_info) - bcf_info.segment_idx[1] = bcf_info.segment_idx0[1] - date = DateTime(bcf_info.all_dates[bcf_info.segment_idx0[1] + 1] + Dates.Day(3)) - BCReader.update_midmonth_data!(date, bcf_info) - nearest_idx = argmin( - abs.( - parse(FT, TimeManager.datetime_to_strdate(date)) .- - parse.(FT, TimeManager.datetime_to_strdate.(bcf_info.all_dates[:])) - ), - ) + # case 5: Dates.days(date - all_dates[segment_idx]) >= 0 - @test bcf_info.segment_idx[1] == bcf_info.segment_idx0[1] == nearest_idx + extra = Dates.Day(3) + for extra in extra_days + bcf_info.segment_idx0[1] = 2 + reset_bcf_info(bcf_info) - # case 4: Dates.days(date - all_dates[segment_idx]) >= 0 - reset_bcf_info(bcf_info) - bcf_info.segment_idx[1] = bcf_info.segment_idx0[1] - date = DateTime(bcf_info.all_dates[bcf_info.segment_idx0[1]] + Dates.Day(3)) - BCReader.update_midmonth_data!(date, bcf_info) + date = DateTime(bcf_info.all_dates[bcf_info.segment_idx0[1]] + extra) + BCReader.update_midmonth_data!(date, bcf_info) - @test bcf_info.segment_idx[1] == bcf_info.segment_idx0[1] + 1 + @test bcf_info.segment_idx[1] == bcf_info.segment_idx0[1] + 1 + end - # case 5: everything else + # case 6: everything else reset_bcf_info(bcf_info) bcf_info.segment_idx[1] = bcf_info.segment_idx0[1] + Int(1) date = bcf_info.all_dates[bcf_info.segment_idx[1]] - Dates.Day(1) diff --git a/test/mpi_tests/bcreader_mpi_tests.jl b/test/mpi_tests/bcreader_mpi_tests.jl index 9d12a9ec06..6749ba67c0 100644 --- a/test/mpi_tests/bcreader_mpi_tests.jl +++ b/test/mpi_tests/bcreader_mpi_tests.jl @@ -4,7 +4,7 @@ These are in a separate testing file from the other BCReader unit tests so that MPI can be enabled for testing of these functions. =# - +using ClimaCoupler using ClimaCoupler: Regridder, BCReader, TimeManager, Interfacer using ClimaCore: Fields, Meshes, Domains, Topologies, Spaces using ClimaComms @@ -14,7 +14,8 @@ using NCDatasets import ArtifactWrappers as AW # Get the path to the necessary data file - sst map -include(joinpath(@__DIR__, "..", "..", "artifacts", "artifact_funcs.jl")) +pkg_dir = pkgdir(ClimaCoupler) +include(joinpath(pkg_dir, "artifacts", "artifact_funcs.jl")) const sst_data = joinpath(sst_dataset_path(), "sst.nc") # set up MPI communications context @@ -176,73 +177,118 @@ end bcf_info.monthly_fields[1] .= current_fields[1] bcf_info.monthly_fields[2] .= current_fields[2] bcf_info.segment_length[1] = Int(1) + bcf_info.segment_idx[1] = bcf_info.segment_idx0[1] end hd_outfile_root = varname * "_cgll" - # case 1: segment_idx == segment_idx0, date < all_dates[segment_idx] + # case 1: date < all_dates[segment_idx] (init) bcf_info.segment_idx[1] = bcf_info.segment_idx0[1] date = DateTime(bcf_info.all_dates[bcf_info.segment_idx[1]] - Dates.Day(1)) BCReader.update_midmonth_data!(date, bcf_info) ClimaComms.barrier(comms_ctx) - @test bcf_info.monthly_fields[1] == bcf_info.scaling_function( - Regridder.read_from_hdf5( - regrid_dir, - hd_outfile_root, - bcf_info.all_dates[Int(bcf_info.segment_idx0[1])], - varname, - comms_ctx, - ), - bcf_info, - ) + # unmodified field @test bcf_info.monthly_fields[2] == bcf_info.monthly_fields[1] + # zero segment length @test bcf_info.segment_length[1] == Int(0) + # segment index is reset + @test bcf_info.segment_idx0[1] == bcf_info.segment_idx[1] - 1 + + # cases 2 and 3 + extra_days = [Dates.Day(0), Dates.Day(3)] + for extra in extra_days + # case 3: (date - all_dates[Int(segment_idx0)]) >= 0 (init) + reset_bcf_info(bcf_info) + date = DateTime(bcf_info.all_dates[bcf_info.segment_idx0[1]]) + extra + BCReader.update_midmonth_data!(date, bcf_info) + + end_field_c2 = deepcopy(bcf_info.monthly_fields[2]) + segment_length_c2 = deepcopy(bcf_info.segment_length[1]) + current_index_c2 = deepcopy(bcf_info.segment_idx[1]) + + ClimaComms.barrier(comms_ctx) + # modified field + @test end_field_c2 !== bcf_info.monthly_fields[1] + # updated segment length + @test segment_length_c2[1] !== Int(0) + # updated reference segment index + @test current_index_c2 == bcf_info.segment_idx0[1] + 1 + + # case 2: (date - all_dates[Int(segment_idx0) + 1]) >= 0 (init) + # do not reset segment_idx0. It's current value ensures that we get the same result as case 3 + reset_bcf_info(bcf_info) + + date = DateTime(bcf_info.all_dates[bcf_info.segment_idx0[1] + 1]) + extra + BCReader.update_midmonth_data!(date, bcf_info) + + nearest_idx = argmin( + abs.( + parse(FT, TimeManager.datetime_to_strdate(date)) .- + parse.(FT, TimeManager.datetime_to_strdate.(bcf_info.all_dates[:])) + ), + ) + + ClimaComms.barrier(comms_ctx) + @test bcf_info.segment_idx[1] == bcf_info.segment_idx0[1] + 1 == nearest_idx + + # compare to case 3 (expecting the same result - this defaults to it): + @test bcf_info.monthly_fields[1] == bcf_info.scaling_function( + Regridder.read_from_hdf5( + regrid_dir, + hd_outfile_root, + bcf_info.all_dates[Int(bcf_info.segment_idx[1])], + varname, + comms_ctx, + ), + bcf_info, + ) + + # check case 2 defaults to case 3 + @test end_field_c2 !== bcf_info.monthly_fields[1] + @test segment_length_c2[1] !== Int(0) + @test current_index_c2 == bcf_info.segment_idx0[1] + 1 - # case 2: date > all_dates[end] - reset_bcf_info(bcf_info) - date = DateTime(bcf_info.all_dates[end] + Dates.Day(1)) - BCReader.update_midmonth_data!(date, bcf_info) + end - ClimaComms.barrier(comms_ctx) - @test bcf_info.monthly_fields[1] == bcf_info.scaling_function( - Regridder.read_from_hdf5( - regrid_dir, - hd_outfile_root, - bcf_info.all_dates[Int(length(bcf_info.all_dates))], - varname, - comms_ctx, - ), - bcf_info, - ) - @test bcf_info.monthly_fields[2] == bcf_info.monthly_fields[1] - @test bcf_info.segment_length[1] == Int(0) + # case 4: date > all_dates[end] + for extra in extra_days + bcf_info.segment_idx0[1] = length(bcf_info.all_dates) + reset_bcf_info(bcf_info) + + date = DateTime(bcf_info.all_dates[bcf_info.segment_idx0[1]]) + extra + BCReader.update_midmonth_data!(date, bcf_info) + + ClimaComms.barrier(comms_ctx) + @test bcf_info.monthly_fields[1] == bcf_info.scaling_function( + Regridder.read_from_hdf5( + regrid_dir, + hd_outfile_root, + bcf_info.all_dates[Int(length(bcf_info.all_dates))], + varname, + comms_ctx, + ), + bcf_info, + ) + @test bcf_info.monthly_fields[2] == bcf_info.monthly_fields[1] + @test bcf_info.segment_length[1] == Int(0) + end - # case 3: date - all_dates[segment_idx + 1] >= 0 - reset_bcf_info(bcf_info) - bcf_info.segment_idx[1] = bcf_info.segment_idx0[1] - date = DateTime(bcf_info.all_dates[bcf_info.segment_idx0[1] + 1] + Dates.Day(3)) - BCReader.update_midmonth_data!(date, bcf_info) - nearest_idx = argmin( - abs.( - parse(FT, TimeManager.datetime_to_strdate(date)) .- - parse.(FT, TimeManager.datetime_to_strdate.(bcf_info.all_dates[:])) - ), - ) + # case 5: Dates.days(date - all_dates[segment_idx]) >= 0 - ClimaComms.barrier(comms_ctx) - @test bcf_info.segment_idx[1] == bcf_info.segment_idx0[1] == nearest_idx + extra = Dates.Day(3) + for extra in extra_days + bcf_info.segment_idx0[1] = 2 + reset_bcf_info(bcf_info) - # case 4: Dates.days(date - all_dates[segment_idx]) >= 0 - reset_bcf_info(bcf_info) - bcf_info.segment_idx[1] = bcf_info.segment_idx0[1] - date = DateTime(bcf_info.all_dates[bcf_info.segment_idx0[1]] + Dates.Day(3)) - BCReader.update_midmonth_data!(date, bcf_info) + date = DateTime(bcf_info.all_dates[bcf_info.segment_idx0[1]] + extra) + BCReader.update_midmonth_data!(date, bcf_info) - ClimaComms.barrier(comms_ctx) - @test bcf_info.segment_idx[1] == bcf_info.segment_idx0[1] + 1 + ClimaComms.barrier(comms_ctx) + @test bcf_info.segment_idx[1] == bcf_info.segment_idx0[1] + 1 + end - # case 5: everything else + # case 6: everything else reset_bcf_info(bcf_info) bcf_info.segment_idx[1] = bcf_info.segment_idx0[1] + Int(1) date = bcf_info.all_dates[bcf_info.segment_idx[1]] - Dates.Day(1)