diff --git a/data/tests/config/tracking_cfg.yaml b/data/tests/config/tracking_cfg.yaml index aee5c28e..1cedd3d3 100644 --- a/data/tests/config/tracking_cfg.yaml +++ b/data/tests/config/tracking_cfg.yaml @@ -1,11 +1,15 @@ Demo ground station: - start: Visible - end: Visible - schedule: Continuous + scheduler: + handoff: Overlap + cadence: Continuous + min_samples: 10 + sample_alignment: null sampling: 1 min Canberra: - start: Visible - end: Visible - schedule: Continuous + scheduler: + handoff: Eager + cadence: Continuous + min_samples: 10 + sample_alignment: 10 s sampling: 1 min diff --git a/data/tests/config/tracking_cfg_incl_excl.yaml b/data/tests/config/tracking_cfg_incl_excl.yaml deleted file mode 100644 index 43c4852e..00000000 --- a/data/tests/config/tracking_cfg_incl_excl.yaml +++ /dev/null @@ -1,17 +0,0 @@ -Demo ground station: - start: Visible - end: Visible - schedule: Continuous - sampling: 1 min - -Canberra: - start: Visible - end: Visible - schedule: Continuous - sampling: 1 min - inclusion epochs: - - start: 2022-01-15 15:06:48 UTC - end: 2022-01-15 17:06:48 UTC - - - start: 2022-01-16 15:06:48 UTC - end: 2022-01-16 17:06:48 UTC diff --git a/data/tests/config/trk_cfg_od_val.yaml b/data/tests/config/trk_cfg_od_val.yaml new file mode 100644 index 00000000..9906fd0b --- /dev/null +++ b/data/tests/config/trk_cfg_od_val.yaml @@ -0,0 +1,7 @@ +scheduler: + handoff: Overlap # Allow for overlapping measurement + cadence: Continuous + min_samples: 10 + sample_alignment: 10 s +sampling: 10 s +strands: null \ No newline at end of file diff --git a/data/tests/config/trk_cfg_od_val_arc.yaml b/data/tests/config/trk_cfg_od_val_arc.yaml index 60ab7cc0..ab84e712 100644 --- a/data/tests/config/trk_cfg_od_val_arc.yaml +++ b/data/tests/config/trk_cfg_od_val_arc.yaml @@ -1,19 +1,19 @@ Madrid: - start: Visible - end: !Epoch 2020-01-01 03:00:00 UTC - schedule: Continuous + strands: + - start: 2020-01-01 00:00:00 TAI + end: 2020-01-01 03:00:00 TAI sampling: 1 min Canberra: - start: Visible - end: !Epoch 2020-01-01 19:00:00 UTC - schedule: !Intermittent - on: 4 h - off: 8 h + strands: + - start: 2020-01-01 02:40:00 TAI + end: 2020-01-01 07:00:00 TAI sampling: 1 min Goldstone: - start: !Epoch 2020-01-01 09:00:00 UTC - end: Visible - schedule: Continuous + strands: + - start: 2020-01-01 19:30:00 TAI + end: 2020-01-02 00:00:00 TAI + - start: 2020-01-01 09:00:00 TAI + end: 2020-01-01 12:00:00 TAI sampling: 1 min diff --git a/src/io/mod.rs b/src/io/mod.rs index 5a123203..4779a1ad 100644 --- a/src/io/mod.rs +++ b/src/io/mod.rs @@ -64,6 +64,7 @@ use pyo3::prelude::*; /// Configuration for exporting a trajectory to parquet. #[derive(Clone, Default, Serialize, Deserialize, TypedBuilder)] #[cfg_attr(feature = "python", pyclass)] +#[builder(doc)] pub struct ExportCfg { /// Fields to export, if unset, defaults to all possible fields. #[builder(default, setter(strip_option))] @@ -327,6 +328,35 @@ where Ok(orbit_serde.into()) } +pub(crate) fn maybe_duration_to_str( + duration: &Option, + serializer: S, +) -> Result +where + S: Serializer, +{ + if let Some(duration) = duration { + duration_to_str(duration, serializer) + } else { + serializer.serialize_none() + } +} + +pub(crate) fn maybe_duration_from_str<'de, D>(deserializer: D) -> Result, D::Error> +where + D: Deserializer<'de>, +{ + if let Ok(s) = String::deserialize(deserializer) { + if let Ok(duration) = Duration::from_str(&s) { + Ok(Some(duration)) + } else { + Ok(None) + } + } else { + Ok(None) + } +} + #[allow(clippy::upper_case_acronyms)] #[derive(Debug)] pub enum ParsingError { diff --git a/src/md/events/details.rs b/src/md/events/details.rs new file mode 100644 index 00000000..8694df0b --- /dev/null +++ b/src/md/events/details.rs @@ -0,0 +1,193 @@ +/* + Nyx, blazing fast astrodynamics + Copyright (C) 2023 Christopher Rabotin + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published + by the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . +*/ + +use crate::errors::NyxError; +use crate::linalg::allocator::Allocator; +use crate::linalg::DefaultAllocator; +use crate::md::prelude::{Interpolatable, Traj}; +use crate::md::EventEvaluator; +use crate::time::Duration; +use core::fmt; + +/// Enumerates the possible edges of an event in a trajectory. +/// +/// `EventEdge` is used to describe the nature of a trajectory event, particularly in terms of its temporal dynamics relative to a specified condition or threshold. This enum helps in distinguishing whether the event is occurring at a rising edge, a falling edge, or if the edge is unclear due to insufficient data or ambiguous conditions. +#[derive(Copy, Clone, Debug, PartialEq)] +pub enum EventEdge { + /// Represents a rising edge of the event. This indicates that the event is transitioning from a lower to a higher evaluation of the event. For example, in the context of elevation, a rising edge would indicate an increase in elevation from a lower angle. + Rising, + /// Represents a falling edge of the event. This is the opposite of the rising edge, indicating a transition from a higher to a lower value of the event evaluator. For example, if tracking the elevation of an object, a falling edge would signify a + Falling, + /// If the edge cannot be clearly defined, it will be marked as unclear. This happens if the event is at a saddle point and the epoch precision is too large to find the exact slope. + Unclear, +} + +/// Represents the details of an event occurring along a trajectory. +/// +/// `EventDetails` encapsulates the state at which a particular event occurs in a trajectory, along with additional information about the nature of the event. This struct is particularly useful for understanding the dynamics of the event, such as whether it represents a rising or falling edge, or if the edge is unclear. +/// +/// # Generics +/// S: Interpolatable - A type that represents the state of the trajectory. This type must implement the `Interpolatable` trait, ensuring that it can be interpolated and manipulated according to the trajectory's requirements. +#[derive(Clone, Debug, PartialEq)] +pub struct EventDetails +where + DefaultAllocator: + Allocator + Allocator + Allocator, +{ + /// The state of the trajectory at the found event. + pub state: S, + /// Indicates whether the event is a rising edge, falling edge, or unclear. This helps in understanding the direction of change at the event point. + pub edge: EventEdge, + /// Numerical evaluation of the event condition, e.g. if seeking the apoapsis, this returns the near zero + pub value: f64, + /// Numertical evaluation of the event condition one epoch step before the found event (used to compute the rising/falling edge). + pub prev_value: Option, + /// Numertical evaluation of the event condition one epoch step after the found event (used to compute the rising/falling edge). + pub next_value: Option, + /// Precision of the epoch for this value + pub pm_duration: Duration, + // Store the representation of this event as a string because we can't move or clone the event reference + pub repr: String, +} + +impl EventDetails +where + DefaultAllocator: + Allocator + Allocator + Allocator, +{ + /// Generates detailed information about an event at a specific epoch in a trajectory. + /// + /// This takes an `Epoch` as an input and returns a `Result`. + /// It is designed to determine the state of a trajectory at a given epoch, evaluate the specific event at that state, and ascertain the nature of the event (rising, falling, or unclear). + /// The initialization intelligently determines the edge type of the event by comparing the event's value at the current, previous, and next epochs. + /// It ensures robust event characterization in trajectories. + /// + /// # Returns + /// - `Ok(EventDetails)` if the state at the given epoch can be determined and the event details are successfully evaluated. + /// - `Err(NyxError)` if there is an error in retrieving the state at the specified epoch. + /// + pub fn new>( + state: S, + value: f64, + event: &E, + traj: &Traj, + ) -> Result { + let epoch = state.epoch(); + let prev_value = if let Ok(state) = traj.at(epoch - event.epoch_precision()) { + Some(event.eval(&state)) + } else { + None + }; + + let next_value = if let Ok(state) = traj.at(epoch + event.epoch_precision()) { + Some(event.eval(&state)) + } else { + None + }; + + let edge = if let Some(prev_value) = prev_value { + if let Some(next_value) = next_value { + if prev_value > value && value > next_value { + EventEdge::Falling + } else if prev_value < value && value < next_value { + EventEdge::Rising + } else { + warn!("could not determine edge of {} at {}", event, state.epoch(),); + EventEdge::Unclear + } + } else if prev_value > value { + EventEdge::Falling + } else { + EventEdge::Rising + } + } else if let Some(next_value) = next_value { + if next_value > value { + EventEdge::Rising + } else { + EventEdge::Falling + } + } else { + warn!( + "could not determine edge of {} because trajectory could be queried around {}", + event, + state.epoch() + ); + EventEdge::Unclear + }; + + Ok(EventDetails { + edge, + state, + value, + prev_value, + next_value, + pm_duration: event.epoch_precision(), + repr: event.eval_string(&state).to_string(), + }) + } +} + +impl fmt::Display for EventDetails +where + DefaultAllocator: + Allocator + Allocator + Allocator, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let prev_fmt = match self.prev_value { + Some(value) => format!("{value:.6}"), + None => "".to_string(), + }; + + let next_fmt = match self.next_value { + Some(value) => format!("{value:.6}"), + None => "".to_string(), + }; + + write!( + f, + "{} and is {:?} (roots with {} intervals: {}, {:.6}, {})", + self.repr, self.edge, self.pm_duration, prev_fmt, self.value, next_fmt + ) + } +} + +#[derive(Clone, Debug, PartialEq)] +pub struct EventArc +where + DefaultAllocator: + Allocator + Allocator + Allocator, +{ + pub rise: EventDetails, + pub fall: EventDetails, +} + +impl fmt::Display for EventArc +where + DefaultAllocator: + Allocator + Allocator + Allocator, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "{} until {} (lasts {})", + self.rise, + self.fall.state.epoch(), + self.fall.state.epoch() - self.rise.state.epoch() + ) + } +} diff --git a/src/md/events/evaluators.rs b/src/md/events/evaluators.rs index d8ad1521..bea935d3 100644 --- a/src/md/events/evaluators.rs +++ b/src/md/events/evaluators.rs @@ -24,7 +24,7 @@ use crate::md::StateParameter; use crate::utils::between_pm_x; use crate::{Spacecraft, State}; -fn angled_value(cur_angle: f64, desired_angle: f64) -> f64 { +pub(crate) fn angled_value(cur_angle: f64, desired_angle: f64) -> f64 { if between_pm_x(cur_angle, desired_angle) > 0.0 { cur_angle - desired_angle } else { diff --git a/src/md/events/mod.rs b/src/md/events/mod.rs index f860f1ec..9fa7f787 100644 --- a/src/md/events/mod.rs +++ b/src/md/events/mod.rs @@ -16,7 +16,9 @@ along with this program. If not, see . */ +pub mod details; pub mod evaluators; +pub mod search; use super::StateParameter; use crate::cosmic::{Cosm, Frame}; use crate::linalg::allocator::Allocator; diff --git a/src/md/events/search.rs b/src/md/events/search.rs new file mode 100644 index 00000000..ea7a6d97 --- /dev/null +++ b/src/md/events/search.rs @@ -0,0 +1,465 @@ +/* + Nyx, blazing fast astrodynamics + Copyright (C) 2023 Christopher Rabotin + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published + by the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . +*/ + +use super::details::{EventArc, EventDetails, EventEdge}; +use crate::errors::NyxError; +use crate::linalg::allocator::Allocator; +use crate::linalg::DefaultAllocator; +use crate::md::prelude::{Interpolatable, Traj}; +use crate::md::trajectory::TrajError; +use crate::md::EventEvaluator; +use crate::time::{Duration, Epoch, TimeSeries, Unit}; +use rayon::prelude::*; +use std::iter::Iterator; +use std::sync::mpsc::channel; + +impl Traj +where + DefaultAllocator: + Allocator + Allocator + Allocator, +{ + /// Find the exact state where the request event happens. The event function is expected to be monotone in the provided interval because we find the event using a Brent solver. + #[allow(clippy::identity_op)] + pub fn find_bracketed( + &self, + start: Epoch, + end: Epoch, + event: &E, + ) -> Result, NyxError> + where + E: EventEvaluator, + { + let max_iter = 50; + + // Helper lambdas, for f64s only + let has_converged = + |xa: f64, xb: f64| (xa - xb).abs() <= event.epoch_precision().to_seconds(); + let arrange = |a: f64, ya: f64, b: f64, yb: f64| { + if ya.abs() > yb.abs() { + (a, ya, b, yb) + } else { + (b, yb, a, ya) + } + }; + + let xa_e = start; + let xb_e = end; + + // Search in seconds (convert to epoch just in time) + let mut xa = 0.0; + let mut xb = (xb_e - xa_e).to_seconds(); + // Evaluate the event at both bounds + let ya_state = self.at(xa_e)?; + let yb_state = self.at(xb_e)?; + let mut ya = event.eval(&ya_state); + let mut yb = event.eval(&yb_state); + + // Check if we're already at the root + if ya.abs() <= event.value_precision().abs() { + debug!( + "{event} -- found with |{ya}| < {} @ {xa_e}", + event.value_precision().abs() + ); + return EventDetails::new(ya_state, ya, event, self); + } else if yb.abs() <= event.value_precision().abs() { + debug!( + "{event} -- found with |{yb}| < {} @ {xb_e}", + event.value_precision().abs() + ); + return EventDetails::new(yb_state, yb, event, self); + } + + // The Brent solver, from the roots crate (sadly could not directly integrate it here) + // Source: https://docs.rs/roots/0.0.5/src/roots/numerical/brent.rs.html#57-131 + + let (mut xc, mut yc, mut xd) = (xa, ya, xa); + let mut flag = true; + + for _ in 0..max_iter { + if ya.abs() < event.value_precision().abs() { + // Can't fail, we got it earlier + let state = self.at(xa_e + xa * Unit::Second).unwrap(); + debug!( + "{event} -- found with |{ya}| < {} @ {}", + event.value_precision().abs(), + state.epoch(), + ); + return EventDetails::new(state, ya, event, self); + } + if yb.abs() < event.value_precision().abs() { + // Can't fail, we got it earlier + let state = self.at(xa_e + xb * Unit::Second).unwrap(); + debug!( + "{event} -- found with |{yb}| < {} @ {}", + event.value_precision().abs(), + state.epoch() + ); + return EventDetails::new(state, yb, event, self); + } + if has_converged(xa, xb) { + // The event isn't in the bracket + return Err(NyxError::from(TrajError::EventNotFound { + start, + end, + event: format!("{event}"), + })); + } + let mut s = if (ya - yc).abs() > f64::EPSILON && (yb - yc).abs() > f64::EPSILON { + xa * yb * yc / ((ya - yb) * (ya - yc)) + + xb * ya * yc / ((yb - ya) * (yb - yc)) + + xc * ya * yb / ((yc - ya) * (yc - yb)) + } else { + xb - yb * (xb - xa) / (yb - ya) + }; + let cond1 = (s - xb) * (s - (3.0 * xa + xb) / 4.0) > 0.0; + let cond2 = flag && (s - xb).abs() >= (xb - xc).abs() / 2.0; + let cond3 = !flag && (s - xb).abs() >= (xc - xd).abs() / 2.0; + let cond4 = flag && has_converged(xb, xc); + let cond5 = !flag && has_converged(xc, xd); + if cond1 || cond2 || cond3 || cond4 || cond5 { + s = (xa + xb) / 2.0; + flag = true; + } else { + flag = false; + } + let next_try = self.at(xa_e + s * Unit::Second)?; + let ys = event.eval(&next_try); + xd = xc; + xc = xb; + yc = yb; + if ya * ys < 0.0 { + // Root bracketed between a and s + let next_try = self.at(xa_e + xa * Unit::Second)?; + let ya_p = event.eval(&next_try); + let (_a, _ya, _b, _yb) = arrange(xa, ya_p, s, ys); + { + xa = _a; + ya = _ya; + xb = _b; + yb = _yb; + } + } else { + // Root bracketed between s and b + let next_try = self.at(xa_e + xb * Unit::Second)?; + let yb_p = event.eval(&next_try); + let (_a, _ya, _b, _yb) = arrange(s, ys, xb, yb_p); + { + xa = _a; + ya = _ya; + xb = _b; + yb = _yb; + } + } + } + Err(NyxError::MaxIterReached(format!( + "Brent solver failed after {max_iter} iterations", + ))) + } + + /// Find all of the states where the event happens + /// + /// # Limitations + /// This method uses a Brent solver. If the function that defines the event is not unimodal, the event finder may not converge correctly. + /// + /// # Heuristic detail + /// The initial search step is 1% of the duration of the trajectory duration. + /// For example, if the trajectory is 100 days long, then we split the trajectory into 100 chunks of 1 day and see whether + /// the event is in there. If the event happens twice or more times within 1% of the trajectory duration, only the _one_ of + /// such events will be found. + /// + /// If this heuristic fails to find any such events, then `find_minmax` is called on the event with a time precision of `Unit::Second`. + /// Then we search only within the min and max bounds of the provided event. + #[allow(clippy::identity_op)] + pub fn find(&self, event: &E) -> Result>, NyxError> + where + E: EventEvaluator, + { + let start_epoch = self.first().epoch(); + let end_epoch = self.last().epoch(); + if start_epoch == end_epoch { + return Err(NyxError::from(TrajError::EventNotFound { + start: start_epoch, + end: end_epoch, + event: format!("{event}"), + })); + } + let heuristic = (end_epoch - start_epoch) / 100; + info!("Searching for {event} with initial heuristic of {heuristic}",); + + let (sender, receiver) = channel(); + + let epochs: Vec = TimeSeries::inclusive(start_epoch, end_epoch, heuristic).collect(); + epochs.into_par_iter().for_each_with(sender, |s, epoch| { + if let Ok(event_state) = self.find_bracketed(epoch, epoch + heuristic, event) { + s.send(event_state).unwrap() + }; + }); + + let mut states: Vec<_> = receiver.iter().collect(); + + if states.is_empty() { + warn!("Heuristic failed to find any {event} event, using slower approach"); + // Crap, we didn't find the event. + // Let's find the min and max of this event throughout the trajectory, and search around there. + match self.find_minmax(event, Unit::Second) { + Ok((min_event, max_event)) => { + let lower_min_epoch = + if min_event.epoch() - 1 * Unit::Millisecond < self.first().epoch() { + self.first().epoch() + } else { + min_event.epoch() - 1 * Unit::Millisecond + }; + + let lower_max_epoch = + if min_event.epoch() + 1 * Unit::Millisecond > self.last().epoch() { + self.last().epoch() + } else { + min_event.epoch() + 1 * Unit::Millisecond + }; + + let upper_min_epoch = + if max_event.epoch() - 1 * Unit::Millisecond < self.first().epoch() { + self.first().epoch() + } else { + max_event.epoch() - 1 * Unit::Millisecond + }; + + let upper_max_epoch = + if max_event.epoch() + 1 * Unit::Millisecond > self.last().epoch() { + self.last().epoch() + } else { + max_event.epoch() + 1 * Unit::Millisecond + }; + + // Search around the min event + if let Ok(event_state) = + self.find_bracketed(lower_min_epoch, lower_max_epoch, event) + { + states.push(event_state); + }; + + // Search around the max event + if let Ok(event_state) = + self.find_bracketed(upper_min_epoch, upper_max_epoch, event) + { + states.push(event_state); + }; + + // If there still isn't any match, report that the event was not found + if states.is_empty() { + return Err(NyxError::from(TrajError::EventNotFound { + start: start_epoch, + end: end_epoch, + event: format!("{event}"), + })); + } + } + Err(_) => { + return Err(NyxError::from(TrajError::EventNotFound { + start: start_epoch, + end: end_epoch, + event: format!("{event}"), + })); + } + }; + } + // Remove duplicates and reorder + states.sort_by(|s1, s2| s1.state.epoch().partial_cmp(&s2.state.epoch()).unwrap()); + states.dedup(); + + match states.len() { + 0 => info!("Event {event} not found"), + 1 => info!("Event {event} found once on {}", states[0].state.epoch()), + _ => { + info!( + "Event {event} found {} times from {} until {}", + states.len(), + states.first().unwrap().state.epoch(), + states.last().unwrap().state.epoch() + ) + } + }; + + Ok(states) + } + + /// Find the minimum and maximum of the provided event through the trajectory + #[allow(clippy::identity_op)] + pub fn find_minmax(&self, event: &E, precision: Unit) -> Result<(S, S), NyxError> + where + E: EventEvaluator, + { + let step: Duration = 1 * precision; + let mut min_val = std::f64::INFINITY; + let mut max_val = std::f64::NEG_INFINITY; + let mut min_state = S::zeros(); + let mut max_state = S::zeros(); + + let (sender, receiver) = channel(); + + let epochs: Vec = + TimeSeries::inclusive(self.first().epoch(), self.last().epoch(), step).collect(); + + epochs.into_par_iter().for_each_with(sender, |s, epoch| { + let state = self.at(epoch).unwrap(); + let this_eval = event.eval(&state); + s.send((this_eval, state)).unwrap(); + }); + + let evald_states: Vec<_> = receiver.iter().collect(); + for (this_eval, state) in evald_states { + if this_eval < min_val { + min_val = this_eval; + min_state = state; + } + if this_eval > max_val { + max_val = this_eval; + max_state = state; + } + } + + Ok((min_state, max_state)) + } + + /// Identifies and pairs rising and falling edge events in a trajectory. + /// + /// This function processes a sequence of events in a trajectory and pairs each rising edge event with its subsequent falling edge event to form arcs. + /// Each arc represents a complete cycle of an event rising above and then falling below a specified threshold. + /// Use this to analyze a trajectory's behavior when understanding the complete cycle of an event (from rising to falling) is essential, e.g. ground station passes. + /// + /// # Arguments + /// - `event`: A reference to an object implementing the `EventEvaluator` trait, which is used to evaluate and classify events in the trajectory. + /// + /// # Returns + /// - `Result, NyxError>`: On success, returns a vector of EventArc, where each struct contains a pair of `EventDetails` (one for the rising edge and one for the falling edge). Returns an error if any issues occur during the event evaluation process. + /// + /// # Logic + /// - Sorts the events by their epoch to ensure chronological processing. + /// - Iterates through the sorted events, identifying transitions from falling to rising edges and vice versa. + /// - Pairs a rising edge with the subsequent falling edge to form an arc. + /// - Handles edge cases where the trajectory starts or ends with a rising or falling edge. + /// - Prints debug information for each event and arc. + /// + /// ## Note + /// If no zero crossing happens in the trajectory, i.e. the there is "event is true" _and_ "event is false", + /// then this function checks whether the event is true at the start and end of the trajectory. If so, it means + /// that there is a single arc that spans the whole trajectory. + /// + pub fn find_arcs(&self, event: &E) -> Result>, NyxError> + where + E: EventEvaluator, + { + let mut events = match self.find(event) { + Ok(events) => events, + Err(_) => { + // We haven't found the start or end of an arc, i.e. no zero crossing on the event. + // However, if the trajectory start and end are above the event value, then we found an arc. + let first_eval = event.eval(self.first()); + let last_eval = event.eval(self.last()); + if first_eval > 0.0 && last_eval > 0.0 { + // No event crossing found, but from the start until the end of the trajectory, we're in the same arc + // because the evaluation of the event is above the zero crossing. + // Hence, there's a single arc, and it's from start until the end of the trajectory. + vec![ + EventDetails::new(*self.first(), first_eval, event, self)?, + EventDetails::new(*self.last(), last_eval, event, self)?, + ] + } else { + return Err(NyxError::from(TrajError::EventNotFound { + start: self.first().epoch(), + end: self.last().epoch(), + event: format!("{event}"), + })); + } + } + }; + events.sort_by_key(|event| event.state.epoch()); + + // Now, let's pair the events. + let mut arcs = Vec::new(); + + if events.is_empty() { + return Ok(arcs); + } + + // If the first event isn't a rising edge, then we mark the start of the trajectory as a rising edge + let mut prev_rise = if events[0].edge != EventEdge::Rising { + let value = event.eval(self.first()); + Some(EventDetails::new(*self.first(), value, event, self)?) + } else { + Some(events[0].clone()) + }; + + let mut prev_fall = if events[0].edge == EventEdge::Falling { + Some(events[0].clone()) + } else { + None + }; + + for event in events { + if event.edge == EventEdge::Rising { + if prev_rise.is_none() && prev_fall.is_none() { + // This is a new rising edge + prev_rise = Some(event.clone()); + } else if prev_fall.is_some() { + // We've found a transition from a fall to a rise, so we can close this arc out. + if prev_rise.is_some() { + let arc = EventArc { + rise: prev_rise.clone().unwrap(), + fall: prev_fall.clone().unwrap(), + }; + arcs.push(arc); + } else { + let arc = EventArc { + rise: event.clone(), + fall: prev_fall.clone().unwrap(), + }; + arcs.push(arc); + } + prev_fall = None; + // We have a new rising edge since this is how we ended up here. + prev_rise = Some(event.clone()); + } + } else if event.edge == EventEdge::Falling { + prev_fall = Some(event.clone()); + } + } + + // Add the final pass + if prev_rise.is_some() { + if prev_fall.is_some() { + let arc = EventArc { + rise: prev_rise.clone().unwrap(), + fall: prev_fall.clone().unwrap(), + }; + arcs.push(arc); + } else { + // Use the last trajectory as the end of the arc + let value = event.eval(self.last()); + let fall = EventDetails::new(*self.last(), value, event, self)?; + let arc = EventArc { + rise: prev_rise.clone().unwrap(), + fall, + }; + arcs.push(arc); + } + } + + Ok(arcs) + } +} diff --git a/src/md/mod.rs b/src/md/mod.rs index bbb96307..cbff0f71 100644 --- a/src/md/mod.rs +++ b/src/md/mod.rs @@ -46,7 +46,7 @@ pub mod prelude { pub mod trajectory; -mod events; +pub(crate) mod events; pub use events::{Event, EventEvaluator}; pub mod objective; diff --git a/src/md/param.rs b/src/md/param.rs index 8b46786b..7bd90682 100644 --- a/src/md/param.rs +++ b/src/md/param.rs @@ -50,7 +50,7 @@ pub enum StateParameter { Cd, /// Coefficient of reflectivity Cr, - /// Declination (deg) + /// Declination (deg) (also called elevation if in a body fixed frame) Declination, /// Dry mass (kg) DryMass, diff --git a/src/md/trajectory/orbit_traj.rs b/src/md/trajectory/orbit_traj.rs index 08dab323..4fd5c301 100644 --- a/src/md/trajectory/orbit_traj.rs +++ b/src/md/trajectory/orbit_traj.rs @@ -55,7 +55,7 @@ impl Traj { traj.finalize(); #[cfg(not(target_arch = "wasm32"))] - info!( + debug!( "Converted trajectory from {} to {} in {} ms: {traj}", self.first().frame, new_frame, @@ -63,7 +63,7 @@ impl Traj { ); #[cfg(target_arch = "wasm32")] - info!( + debug!( "Converted trajectory from {} to {}: {traj}", self.first().frame, new_frame, diff --git a/src/md/trajectory/traj.rs b/src/md/trajectory/traj.rs index 0d78b112..c03c5572 100644 --- a/src/md/trajectory/traj.rs +++ b/src/md/trajectory/traj.rs @@ -25,13 +25,12 @@ use crate::linalg::allocator::Allocator; use crate::linalg::DefaultAllocator; use crate::md::prelude::{Frame, GuidanceMode, StateParameter}; use crate::md::EventEvaluator; -use crate::time::{Duration, Epoch, TimeSeries, TimeUnits, Unit}; +use crate::time::{Duration, Epoch, TimeSeries, TimeUnits}; use crate::utils::dcm_finite_differencing; use arrow::array::{Array, Float64Builder, StringBuilder}; use arrow::datatypes::{DataType, Field, Schema}; use arrow::record_batch::RecordBatch; use parquet::arrow::ArrowWriter; -use rayon::prelude::*; use std::collections::HashMap; use std::error::Error; use std::fmt; @@ -39,7 +38,6 @@ use std::fs::File; use std::iter::Iterator; use std::ops; use std::path::{Path, PathBuf}; -use std::sync::mpsc::channel; use std::sync::Arc; /// Store a trajectory of any State. @@ -142,283 +140,6 @@ where } } - /// Find the exact state where the request event happens. The event function is expected to be monotone in the provided interval because we find the event using a Brent solver. - #[allow(clippy::identity_op)] - pub fn find_bracketed(&self, start: Epoch, end: Epoch, event: &E) -> Result - where - E: EventEvaluator, - { - let max_iter = 50; - - // Helper lambdas, for f64s only - let has_converged = - |x1: f64, x2: f64| (x1 - x2).abs() <= event.epoch_precision().to_seconds(); - let arrange = |a: f64, ya: f64, b: f64, yb: f64| { - if ya.abs() > yb.abs() { - (a, ya, b, yb) - } else { - (b, yb, a, ya) - } - }; - - let xa_e = start; - let xb_e = end; - - // Search in seconds (convert to epoch just in time) - let mut xa = 0.0; - let mut xb = (xb_e - xa_e).to_seconds(); - // Evaluate the event at both bounds - let mut ya = event.eval(&self.at(xa_e)?); - let mut yb = event.eval(&self.at(xb_e)?); - - // Check if we're already at the root - if ya.abs() <= event.value_precision().abs() { - debug!( - "{event} -- found with |{ya}| < {} @ {xa_e}", - event.value_precision().abs() - ); - return self.at(xa_e); - } else if yb.abs() <= event.value_precision().abs() { - debug!( - "{event} -- found with |{yb}| < {} @ {xb_e}", - event.value_precision().abs() - ); - return self.at(xb_e); - } - - debug!("{event}: eval@{xa_e} = {ya}\t eval@{xb_e} = {yb}"); - - // The Brent solver, from the roots crate (sadly could not directly integrate it here) - // Source: https://docs.rs/roots/0.0.5/src/roots/numerical/brent.rs.html#57-131 - - let (mut xc, mut yc, mut xd) = (xa, ya, xa); - let mut flag = true; - - for _ in 0..max_iter { - if ya.abs() < event.value_precision().abs() { - return self.at(xa_e + xa * Unit::Second); - } - if yb.abs() < event.value_precision().abs() { - return self.at(xa_e + xb * Unit::Second); - } - if has_converged(xa, xb) { - // The event isn't in the bracket - return Err(NyxError::from(TrajError::EventNotFound { - start, - end, - event: format!("{event}"), - })); - } - let mut s = if (ya - yc).abs() > f64::EPSILON && (yb - yc).abs() > f64::EPSILON { - xa * yb * yc / ((ya - yb) * (ya - yc)) - + xb * ya * yc / ((yb - ya) * (yb - yc)) - + xc * ya * yb / ((yc - ya) * (yc - yb)) - } else { - xb - yb * (xb - xa) / (yb - ya) - }; - let cond1 = (s - xb) * (s - (3.0 * xa + xb) / 4.0) > 0.0; - let cond2 = flag && (s - xb).abs() >= (xb - xc).abs() / 2.0; - let cond3 = !flag && (s - xb).abs() >= (xc - xd).abs() / 2.0; - let cond4 = flag && has_converged(xb, xc); - let cond5 = !flag && has_converged(xc, xd); - if cond1 || cond2 || cond3 || cond4 || cond5 { - s = (xa + xb) / 2.0; - flag = true; - } else { - flag = false; - } - let next_try = self.at(xa_e + s * Unit::Second)?; - let ys = event.eval(&next_try); - xd = xc; - xc = xb; - yc = yb; - if ya * ys < 0.0 { - // Root bracketed between a and s - let next_try = self.at(xa_e + xa * Unit::Second)?; - let ya_p = event.eval(&next_try); - let (_a, _ya, _b, _yb) = arrange(xa, ya_p, s, ys); - { - xa = _a; - ya = _ya; - xb = _b; - yb = _yb; - } - } else { - // Root bracketed between s and b - let next_try = self.at(xa_e + xb * Unit::Second)?; - let yb_p = event.eval(&next_try); - let (_a, _ya, _b, _yb) = arrange(s, ys, xb, yb_p); - { - xa = _a; - ya = _ya; - xb = _b; - yb = _yb; - } - } - } - Err(NyxError::MaxIterReached(format!( - "Brent solver failed after {max_iter} iterations", - ))) - } - - /// Find all of the states where the event happens (usually, and with caveats). - /// - /// # Limitations - /// This method uses a Brent solver. If the function that defines the event is not unimodal, the event finder may not converge correctly. - /// - /// # Heuristic detail - /// The initial search step is 1% of the duration of the trajectory duration. - /// For example, if the trajectory is 100 days long, then we split the trajectory into 100 chunks of 1 day and see whether - /// the event is in there. If the event happens twice or more times within 1% of the trajectory duration, only the _one_ of - /// such events will be found. - /// - /// If this heuristic fails to find any such events, then `find_minmax` is called on the event with a time precision of `Unit::Second`. - /// Then we search only within the min and max bounds of the provided event. - #[allow(clippy::identity_op)] - pub fn find_all(&self, event: &E) -> Result, NyxError> - where - E: EventEvaluator, - { - let start_epoch = self.first().epoch(); - let end_epoch = self.last().epoch(); - if start_epoch == end_epoch { - return Err(NyxError::from(TrajError::EventNotFound { - start: start_epoch, - end: end_epoch, - event: format!("{event}"), - })); - } - let heuristic = (end_epoch - start_epoch) / 100; - info!("Searching for {event} with initial heuristic of {heuristic}",); - - let (sender, receiver) = channel(); - - let epochs: Vec = TimeSeries::inclusive(start_epoch, end_epoch, heuristic).collect(); - epochs.into_par_iter().for_each_with(sender, |s, epoch| { - if let Ok(event_state) = self.find_bracketed(epoch, epoch + heuristic, event) { - s.send(event_state).unwrap() - }; - }); - - let mut states: Vec<_> = receiver.iter().collect(); - - if states.is_empty() { - warn!("Heuristic failed to find any {event} event, using slower approach"); - // Crap, we didn't find the event. - // Let's find the min and max of this event throughout the trajectory, and search around there. - match self.find_minmax(event, Unit::Second) { - Ok((min_event, max_event)) => { - let lower_min_epoch = - if min_event.epoch() - 1 * Unit::Millisecond < self.first().epoch() { - self.first().epoch() - } else { - min_event.epoch() - 1 * Unit::Millisecond - }; - - let lower_max_epoch = - if min_event.epoch() + 1 * Unit::Millisecond > self.last().epoch() { - self.last().epoch() - } else { - min_event.epoch() + 1 * Unit::Millisecond - }; - - let upper_min_epoch = - if max_event.epoch() - 1 * Unit::Millisecond < self.first().epoch() { - self.first().epoch() - } else { - max_event.epoch() - 1 * Unit::Millisecond - }; - - let upper_max_epoch = - if max_event.epoch() + 1 * Unit::Millisecond > self.last().epoch() { - self.last().epoch() - } else { - max_event.epoch() + 1 * Unit::Millisecond - }; - - // Search around the min event - if let Ok(event_state) = - self.find_bracketed(lower_min_epoch, lower_max_epoch, event) - { - states.push(event_state); - }; - - // Search around the max event - if let Ok(event_state) = - self.find_bracketed(upper_min_epoch, upper_max_epoch, event) - { - states.push(event_state); - }; - - // If there still isn't any match, report that the event was not found - if states.is_empty() { - return Err(NyxError::from(TrajError::EventNotFound { - start: start_epoch, - end: end_epoch, - event: format!("{event}"), - })); - } - } - Err(_) => { - return Err(NyxError::from(TrajError::EventNotFound { - start: start_epoch, - end: end_epoch, - event: format!("{event}"), - })); - } - }; - } - // Remove duplicates and reorder - states.sort_by(|s1, s2| s1.epoch().partial_cmp(&s2.epoch()).unwrap()); - states.dedup(); - for (cnt, event_state) in states.iter().enumerate() { - info!( - "{event} #{}: {} for {event_state}", - cnt + 1, - event.eval_string(event_state) - ); - } - Ok(states) - } - - /// Find the minimum and maximum of the provided event through the trajectory - #[allow(clippy::identity_op)] - pub fn find_minmax(&self, event: &E, precision: Unit) -> Result<(S, S), NyxError> - where - E: EventEvaluator, - { - let step: Duration = 1 * precision; - let mut min_val = std::f64::INFINITY; - let mut max_val = std::f64::NEG_INFINITY; - let mut min_state = S::zeros(); - let mut max_state = S::zeros(); - - let (sender, receiver) = channel(); - - let epochs: Vec = - TimeSeries::inclusive(self.first().epoch(), self.last().epoch(), step).collect(); - - epochs.into_par_iter().for_each_with(sender, |s, epoch| { - let state = self.at(epoch).unwrap(); - let this_eval = event.eval(&state); - s.send((this_eval, state)).unwrap(); - }); - - let evald_states: Vec<_> = receiver.iter().collect(); - for (this_eval, state) in evald_states { - if this_eval < min_val { - min_val = this_eval; - min_state = state; - } - if this_eval > max_val { - max_val = this_eval; - max_state = state; - } - } - - Ok((min_state, max_state)) - } - /// Store this trajectory arc to a parquet file with the default configuration (depends on the state type, search for `export_params` in the documentation for details). pub fn to_parquet_simple>(&self, path: P) -> Result> { self.to_parquet(path, None, ExportCfg::default()) diff --git a/src/od/filter/kalman.rs b/src/od/filter/kalman.rs index caaa0c65..1c0eca23 100644 --- a/src/od/filter/kalman.rs +++ b/src/od/filter/kalman.rs @@ -372,12 +372,12 @@ where if let Some(ratio_thresh) = resid_ratio_check { if ratio > ratio_thresh { - warn!("{epoch} msr rejected: residual ratio {ratio} > {ratio_thresh}"); + warn!("{epoch} msr rejected: residual ratio {ratio:.3e} > {ratio_thresh}"); // Perform only a time update and return let pred_est = self.time_update(nominal_state)?; return Ok((pred_est, Residual::rejected(epoch, prefit, ratio))); } else { - debug!("{epoch} msr accepted: residual ratio {ratio} < {ratio_thresh}"); + debug!("{epoch} msr accepted: residual ratio {ratio:.3e} < {ratio_thresh}"); } } diff --git a/src/od/ground_station.rs b/src/od/ground_station.rs index 26dd0669..40053c31 100644 --- a/src/od/ground_station.rs +++ b/src/od/ground_station.rs @@ -21,11 +21,13 @@ use super::noise::GaussMarkov; use super::TrackingDeviceSim; use crate::cosmic::{Cosm, Frame, Orbit}; use crate::io::{frame_from_str, frame_to_str, ConfigRepr, Configurable}; -use crate::md::prelude::Traj; +use crate::md::prelude::{Interpolatable, Traj}; +use crate::md::EventEvaluator; use crate::time::Epoch; use crate::utils::between_0_360; use crate::{NyxError, Spacecraft}; -use hifitime::Duration; +use hifitime::{Duration, Unit}; +use nalgebra::{allocator::Allocator, DefaultAllocator}; use rand_pcg::Pcg64Mcg; use serde_derive::{Deserialize, Serialize}; use std::fmt; @@ -403,86 +405,97 @@ impl fmt::Display for GroundStation { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, - "[{}] {} (lat.: {:.4} deg long.: {:.4} deg alt.: {:.3} m)", - self.frame, + "{} (lat.: {:.4} deg long.: {:.4} deg alt.: {:.3} m) [{}]", self.name, self.latitude_deg, self.longitude_deg, self.height_km * 1e3, + self.frame, ) } } -#[test] -fn test_load_single() { - use std::env; - use std::path::PathBuf; - - use hifitime::TimeUnits; - - let cosm = Cosm::de438(); - - // Get the path to the root directory of the current Cargo project - let test_data: PathBuf = [ - env::var("CARGO_MANIFEST_DIR").unwrap(), - "data".to_string(), - "tests".to_string(), - "config".to_string(), - "one_ground_station.yaml".to_string(), - ] - .iter() - .collect(); - - assert!(test_data.exists(), "Could not find the test data"); - - let gs = GroundStation::load(test_data).unwrap(); - - dbg!(&gs); - - let expected_gs = GroundStation { - name: "Demo ground station".to_string(), - frame: cosm.frame("IAU Earth"), - elevation_mask_deg: 5.0, - range_noise_km: Some(GaussMarkov::new(1.days(), 5e-3, 1e-4).unwrap()), - doppler_noise_km_s: Some(GaussMarkov::new(1.days(), 5e-5, 1.5e-6).unwrap()), - latitude_deg: 2.3522, - longitude_deg: 48.8566, - height_km: 0.4, - light_time_correction: false, - timestamp_noise_s: None, - integration_time: None, - }; - - assert_eq!(expected_gs, gs); +impl EventEvaluator for &GroundStation +where + DefaultAllocator: + Allocator + Allocator + Allocator, +{ + /// Compute the elevation in the SEZ frame. This call will panic if the frame of the input state does not match that of the ground station. + fn eval(&self, rx_gs_frame: &S) -> f64 { + let dt = rx_gs_frame.epoch(); + // Then, compute the rotation matrix from the body fixed frame of the ground station to its topocentric frame SEZ. + let tx_gs_frame = self.to_orbit(dt); + // Note: we're only looking at the radii so we don't need to apply the transport theorem here. + let dcm_topo2fixed = tx_gs_frame.dcm_from_traj_frame(Frame::SEZ).unwrap(); + + // Now, rotate the spacecraft in the SEZ frame to compute its elevation as seen from the ground station. + // We transpose the DCM so that it's the fixed to topocentric rotation. + let rx_sez = rx_gs_frame + .orbit() + .with_position_rotated_by(dcm_topo2fixed.transpose()); + let tx_sez = tx_gs_frame.with_position_rotated_by(dcm_topo2fixed.transpose()); + // Now, let's compute the range ρ. + let rho_sez = rx_sez - tx_sez; + + // Finally, compute the elevation (math is the same as declination) + // Source: Vallado, section 4.4.3 + // Only the sine is needed as per Vallado, and the formula is the same as the declination + // because we're in the SEZ frame. + rho_sez.declination_deg() - self.elevation_mask_deg + } + + fn eval_string(&self, state: &S) -> String { + format!( + "Elevation from {} is {:.6} deg on {}", + self.name, + self.eval(state) + self.elevation_mask_deg, + state.epoch() + ) + } + + /// Epoch precision of the election evaluator is 1 ms + fn epoch_precision(&self) -> Duration { + 1 * Unit::Second + } + + /// Angle precision of the elevation evaluator is 1 millidegree. + fn value_precision(&self) -> f64 { + 1e-3 + } } -#[test] -fn test_load_many() { - use crate::cosmic::Cosm; - use hifitime::TimeUnits; - use std::env; - use std::path::PathBuf; +#[cfg(test)] +mod gs_ut { + use crate::io::ConfigRepr; + use crate::od::prelude::*; + + #[test] + fn test_load_single() { + use std::env; + use std::path::PathBuf; + + use hifitime::TimeUnits; - // Get the path to the root directory of the current Cargo project + let cosm = Cosm::de438(); - let test_file: PathBuf = [ - env::var("CARGO_MANIFEST_DIR").unwrap(), - "data".to_string(), - "tests".to_string(), - "config".to_string(), - "many_ground_stations.yaml".to_string(), - ] - .iter() - .collect(); + // Get the path to the root directory of the current Cargo project + let test_data: PathBuf = [ + env::var("CARGO_MANIFEST_DIR").unwrap(), + "data".to_string(), + "tests".to_string(), + "config".to_string(), + "one_ground_station.yaml".to_string(), + ] + .iter() + .collect(); - let stations = GroundStation::load_many(test_file).unwrap(); + assert!(test_data.exists(), "Could not find the test data"); - dbg!(&stations); + let gs = GroundStation::load(test_data).unwrap(); - let cosm = Cosm::de438(); + dbg!(&gs); - let expected = vec![ - GroundStation { + let expected_gs = GroundStation { name: "Demo ground station".to_string(), frame: cosm.frame("IAU Earth"), elevation_mask_deg: 5.0, @@ -494,21 +507,65 @@ fn test_load_many() { light_time_correction: false, timestamp_noise_s: None, integration_time: None, - }, - GroundStation { - name: "Canberra".to_string(), - frame: cosm.frame("IAU Earth"), - elevation_mask_deg: 5.0, - range_noise_km: Some(GaussMarkov::new(1.days(), 5e-3, 1e-4).unwrap()), - doppler_noise_km_s: Some(GaussMarkov::new(1.days(), 5e-5, 1.5e-6).unwrap()), - latitude_deg: -35.398333, - longitude_deg: 148.981944, - height_km: 0.691750, - light_time_correction: false, - timestamp_noise_s: None, - integration_time: None, - }, - ]; + }; - assert_eq!(expected, stations); + assert_eq!(expected_gs, gs); + } + + #[test] + fn test_load_many() { + use crate::cosmic::Cosm; + use hifitime::TimeUnits; + use std::env; + use std::path::PathBuf; + + // Get the path to the root directory of the current Cargo project + + let test_file: PathBuf = [ + env::var("CARGO_MANIFEST_DIR").unwrap(), + "data".to_string(), + "tests".to_string(), + "config".to_string(), + "many_ground_stations.yaml".to_string(), + ] + .iter() + .collect(); + + let stations = GroundStation::load_many(test_file).unwrap(); + + dbg!(&stations); + + let cosm = Cosm::de438(); + + let expected = vec![ + GroundStation { + name: "Demo ground station".to_string(), + frame: cosm.frame("IAU Earth"), + elevation_mask_deg: 5.0, + range_noise_km: Some(GaussMarkov::new(1.days(), 5e-3, 1e-4).unwrap()), + doppler_noise_km_s: Some(GaussMarkov::new(1.days(), 5e-5, 1.5e-6).unwrap()), + latitude_deg: 2.3522, + longitude_deg: 48.8566, + height_km: 0.4, + light_time_correction: false, + timestamp_noise_s: None, + integration_time: None, + }, + GroundStation { + name: "Canberra".to_string(), + frame: cosm.frame("IAU Earth"), + elevation_mask_deg: 5.0, + range_noise_km: Some(GaussMarkov::new(1.days(), 5e-3, 1e-4).unwrap()), + doppler_noise_km_s: Some(GaussMarkov::new(1.days(), 5e-5, 1.5e-6).unwrap()), + latitude_deg: -35.398333, + longitude_deg: 148.981944, + height_km: 0.691750, + light_time_correction: false, + timestamp_noise_s: None, + integration_time: None, + }, + ]; + + assert_eq!(expected, stations); + } } diff --git a/src/od/mod.rs b/src/od/mod.rs index 2c129b5c..daa5d146 100644 --- a/src/od/mod.rs +++ b/src/od/mod.rs @@ -57,6 +57,7 @@ pub mod prelude { pub use super::filter::kalman::*; pub use super::ground_station::*; pub use super::msr::*; + pub use super::noise::GaussMarkov; pub use super::process::*; pub use super::simulator::TrackingArcSim; pub use super::simulator::*; diff --git a/src/od/msr/arc.rs b/src/od/msr/arc.rs index 89f91a15..098b5680 100644 --- a/src/od/msr/arc.rs +++ b/src/od/msr/arc.rs @@ -204,17 +204,20 @@ where /// Returns the minimum duration between two subsequent measurements. /// This is important to correctly set up the propagator and not miss any measurement. pub fn min_duration_sep(&self) -> Option { - let mut windows = self.measurements.windows(2); - let first_window = windows.next()?; - let mut min_interval = first_window[1].1.epoch() - first_window[0].1.epoch(); - for window in windows { - let interval = window[1].1.epoch() - window[0].1.epoch(); - if interval != Duration::ZERO && interval < min_interval { - min_interval = interval; + if self.measurements.is_empty() { + None + } else { + let mut windows = self.measurements.windows(2); + let first_window = windows.next()?; + let mut min_interval = first_window[1].1.epoch() - first_window[0].1.epoch(); + for window in windows { + let interval = window[1].1.epoch() - window[0].1.epoch(); + if interval != Duration::ZERO && interval < min_interval { + min_interval = interval; + } } + Some(min_interval) } - - Some(min_interval) } /// If this tracking arc has devices that can be used to generate simulated measurements, diff --git a/src/od/noise/gauss_markov.rs b/src/od/noise/gauss_markov.rs index 424623fe..f8602c76 100644 --- a/src/od/noise/gauss_markov.rs +++ b/src/od/noise/gauss_markov.rs @@ -203,8 +203,8 @@ impl GaussMarkov { pub fn high_precision_doppler_km_s() -> Self { Self { tau: 12.hours(), - bias_sigma: 50.0e-6, // 5 cm/s - steady_state_sigma: 1.5e-6, // 0.15 cm/s + bias_sigma: 35.0e-7, // 3.5 mm/s + steady_state_sigma: 3.5e-7, // 0.35 mm/s bias: None, epoch: None, } diff --git a/src/od/process/conf.rs b/src/od/process/conf.rs index fb1f501f..2e886787 100644 --- a/src/od/process/conf.rs +++ b/src/od/process/conf.rs @@ -56,9 +56,9 @@ impl fmt::Display for SmoothingArc { pub struct IterationConf { /// The number of measurements to account for in the iteration pub smoother: SmoothingArc, - /// The absolute tolerance of the RMS postfit residual + /// The absolute tolerance of the RMS prefit residual ratios pub absolute_tol: f64, - /// The relative tolerance between the latest RMS postfit residual and the best RMS postfit residual so far + /// The relative tolerance between the latest RMS prefit residual ratios and the best RMS prefit residual ratios so far pub relative_tol: f64, /// The maximum number of iterations to allow (will raise an error if the filter has not converged after this many iterations) pub max_iterations: usize, @@ -149,8 +149,8 @@ impl Default for IterationConf { fn default() -> Self { Self { smoother: SmoothingArc::All, - absolute_tol: 1e-2, - relative_tol: 1e-3, + absolute_tol: 1e-1, + relative_tol: 1e-2, max_iterations: 15, max_divergences: 3, force_failure: false, diff --git a/src/od/process/mod.rs b/src/od/process/mod.rs index 54181795..eab5d522 100644 --- a/src/od/process/mod.rs +++ b/src/od/process/mod.rs @@ -314,7 +314,7 @@ where } info!("***************************"); - info!("*** Iteration number {} ***", iter_cnt); + info!("*** Iteration number {iter_cnt:02} ***"); info!("***************************"); // First, smooth the estimates @@ -331,8 +331,9 @@ where // Compute the new RMS let new_rms = self.rms_residual_ratios(); - let cur_rel_rms = (new_rms - best_rms).abs() / best_rms; - if cur_rel_rms < config.relative_tol { + let cur_rms_num = (new_rms - previous_rms).abs(); + let cur_rel_rms = cur_rms_num / previous_rms; + if cur_rel_rms < config.relative_tol || cur_rms_num < config.absolute_tol * best_rms { info!("*****************"); info!("*** CONVERGED ***"); info!("*****************"); @@ -340,18 +341,22 @@ where "New residual RMS: {:.5}\tPrevious RMS: {:.5}\tBest RMS: {:.5}", new_rms, previous_rms, best_rms ); - info!( - "Filter converged to relative tolerance ({:.2e} < {:.2e}) after {} iterations", - cur_rel_rms, config.relative_tol, iter_cnt - ); - + if cur_rel_rms < config.relative_tol { + info!( + "Filter converged on relative tolerance ({:.2e} < {:.2e}) after {} iterations", + cur_rel_rms, config.relative_tol, iter_cnt + ); + } else { + info!( + "Filter converged on relative change ({:.2e} < {:.2e} * {:.2e}) after {} iterations", + cur_rms_num, config.absolute_tol, best_rms, iter_cnt + ); + } break; - } - - if new_rms > previous_rms { + } else if new_rms > previous_rms { warn!( - "New residual RMS: {:.5}\tPrevious RMS: {:.5}\tBest RMS: {:.5}", - new_rms, previous_rms, best_rms + "New residual RMS: {:.5}\tPrevious RMS: {:.5}\tBest RMS: {:.5} ({cur_rel_rms:.2e} > {:.2e})", + new_rms, previous_rms, best_rms, config.relative_tol ); divergence_cnt += 1; previous_rms = new_rms; @@ -371,8 +376,8 @@ where } } else { info!( - "New residual RMS: {:.5}\tPrevious RMS: {:.5}\tBest RMS: {:.5}", - new_rms, previous_rms, best_rms + "New residual RMS: {:.5}\tPrevious RMS: {:.5}\tBest RMS: {:.5} ({cur_rel_rms:.2e} > {:.2e})", + new_rms, previous_rms, best_rms, config.relative_tol ); // Reset the counter divergence_cnt = 0; @@ -411,7 +416,14 @@ where let mut devices = arc.rebuild_devices::(self.cosm.clone()).unwrap(); let measurements = &arc.measurements; - let step_size = arc.min_duration_sep().unwrap(); + let step_size = match arc.min_duration_sep() { + Some(step_size) => step_size, + None => { + return Err(NyxError::CustomError( + "empty measurement set provided".to_string(), + )) + } + }; self.iterate(measurements, &mut devices, step_size, config) } @@ -425,7 +437,14 @@ where let mut devices = arc.rebuild_devices::(self.cosm.clone()).unwrap(); let measurements = &arc.measurements; - let step_size = arc.min_duration_sep().unwrap(); + let step_size = match arc.min_duration_sep() { + Some(step_size) => step_size, + None => { + return Err(NyxError::CustomError( + "empty measurement set provided".to_string(), + )) + } + }; self.process(measurements, &mut devices, step_size) } @@ -435,12 +454,13 @@ where /// # Argument details /// + The measurements must be a list mapping the name of the measurement device to the measurement itself. /// + The name of all measurement devices must be present in the provided devices, i.e. the key set of `devices` must be a superset of the measurement device names present in the list. + /// + The maximum step size to ensure we don't skip any measurements. #[allow(clippy::erasing_op)] pub fn process( &mut self, measurements: &[(String, Msr)], devices: &mut HashMap, - step_size: Duration, + max_step: Duration, ) -> Result<(), NyxError> where Dev: TrackingDeviceSim, @@ -449,20 +469,27 @@ where measurements.len() >= 2, "must have at least two measurements" ); + + assert!( + max_step.abs() > (0.0 * Unit::Nanosecond), + "step size is zero" + ); + // Start by propagating the estimator (on the same thread). let num_msrs = measurements.len(); // Update the step size of the navigation propagator if it isn't already fixed step if !self.prop.fixed_step { - self.prop.set_step(step_size, false); + self.prop.set_step(max_step, false); } let prop_time = measurements[num_msrs - 1].1.epoch() - self.kf.previous_estimate().epoch(); - info!("Navigation propagating for a total of {prop_time} with step size {step_size}"); + info!("Navigation propagating for a total of {prop_time} with step size {max_step}"); let mut epoch = self.prop.state.epoch(); let mut reported = [false; 11]; + reported[0] = true; // Prevent showing "0% done" info!("Processing {num_msrs} measurements with covariance mapping"); // We'll build a trajectory of the estimated states. This will be used to compute the measurements. @@ -485,16 +512,12 @@ where loop { let delta_t = next_msr_epoch - epoch; - // Propagator for the minimum time between the step size and the duration to the next measurement. - // Ensure that we don't go backward if the previous step we took was indeed backward. - let next_step_size = delta_t.min(if self.prop.details.step.is_negative() { - step_size - } else { - self.prop.details.step - }); + // Propagator for the minimum time between the maximum step size and the duration to the next measurement. + + let next_step_size = delta_t.min(self.prop.step_size); - // Remove old states from the trajectory (this is a manual implementation of `retaint` because we know it's a sorted vec) - // traj.states.retain(|state: &S| state.epoch() <= epoch); + // Remove old states from the trajectory + // This is a manual implementation of `retaint` because we know it's a sorted vec, so no need to resort every time let mut index = traj.states.len(); while index > 0 { index -= 1; @@ -504,7 +527,7 @@ where } traj.states.truncate(index); - debug!("advancing propagator by {next_step_size} (Δt to next msr: {delta_t})"); + debug!("propagate for {next_step_size} (Δt to next msr: {delta_t})"); let (_, traj_covar) = self.prop.for_duration_with_traj(next_step_size)?; for state in traj_covar.states { @@ -602,7 +625,7 @@ where if !reported[msr_prct] { info!( "{:>3}% done ({msr_accepted_cnt:.0} measurements accepted, {:.0} rejected)", - 10 * msr_prct, msr_cnt - (msr_accepted_cnt - 1) + 10 * msr_prct, msr_cnt - msr_accepted_cnt.saturating_sub(1) ); reported[msr_prct] = true; } diff --git a/src/od/simulator/arc.rs b/src/od/simulator/arc.rs index 82d69db1..20723a62 100644 --- a/src/od/simulator/arc.rs +++ b/src/od/simulator/arc.rs @@ -16,7 +16,7 @@ along with this program. If not, see . */ -use hifitime::{Duration, Epoch, TimeSeries}; +use hifitime::{Duration, Epoch, TimeSeries, TimeUnits}; use num::integer::gcd; use rand::SeedableRng; use rand_pcg::Pcg64Mcg; @@ -24,18 +24,20 @@ use rand_pcg::Pcg64Mcg; pub use crate::dynamics::{Dynamics, NyxError}; use crate::io::ConfigError; use crate::md::trajectory::Interpolatable; -use crate::od::msr::TrackingArc; -use crate::od::simulator::{Availability, Schedule}; -use crate::od::Measurement; +use crate::od::msr::{RangeDoppler, TrackingArc}; +use crate::od::prelude::Strand; +use crate::od::simulator::Cadence; +use crate::od::{GroundStation, Measurement}; pub use crate::{cosmic::Cosm, State, TimeTagged}; use crate::{linalg::allocator::Allocator, od::TrackingDeviceSim}; use crate::{linalg::DefaultAllocator, md::prelude::Traj}; -use std::collections::{HashMap, HashSet}; +use crate::{Orbit, Spacecraft}; +use std::collections::HashMap; use std::fmt::Display; use std::marker::PhantomData; use std::sync::Arc; -use super::TrkConfig; +use super::{Handoff, TrkConfig}; #[derive(Clone)] pub struct TrackingArcSim @@ -53,12 +55,9 @@ where /// Map of devices from their names. pub devices: HashMap, /// Receiver trajectory - pub trajectory: Traj, // TODO: Convert this to a reference + pub trajectory: Traj, /// Configuration of each device pub configs: HashMap, - /// Set to true to allow for overlapping measurements (enabled by default), - /// i.e. if N ground stations are available at a given epoch, generate N measurements not just one. - pub allow_overlap: bool, /// Random number generator used for this tracking arc, ensures repeatability rng: Pcg64Mcg, /// Greatest common denominator time series that allows this arc to meet all of the conditions. @@ -79,6 +78,7 @@ where + Allocator::Size> + Allocator, { + /// Build a new tracking arc simulator using the provided seeded random number generator. pub fn with_rng( devices: Vec, trajectory: Traj, @@ -91,17 +91,27 @@ where let mut sampling_rates_ns = Vec::with_capacity(devices.len()); for device in devices { if let Some(cfg) = configs.get(&device.name()) { - cfg.sanity_check()?; + if let Err(e) = cfg.sanity_check() { + warn!("Ignoring device {}: {e}", device.name()); + continue; + } sampling_rates_ns.push(cfg.sampling.truncated_nanoseconds()); } else { - return Err(ConfigError::InvalidConfig(format!( - "device {} has no associated configuration", + warn!( + "Ignoring device {}: no associated tracking configuration", device.name() - ))); + ); + continue; } devices_map.insert(device.name(), device); } + if devices_map.is_empty() { + return Err(ConfigError::InvalidConfig( + "None of the devices are properly configured".to_string(), + )); + } + let common_sampling_rate_ns = sampling_rates_ns .iter() .fold(sampling_rates_ns[0], |a, &b| gcd(a, b)); @@ -118,7 +128,6 @@ where devices: devices_map, trajectory, configs, - allow_overlap: false, rng, time_series, _msr_in: PhantomData, @@ -130,6 +139,7 @@ where Ok(me) } + /// Build a new tracking arc simulator using the provided seed to initialize the random number generator. pub fn with_seed( devices: Vec, trajectory: Traj, @@ -141,6 +151,7 @@ where Self::with_rng(devices, trajectory, configs, rng) } + /// Build a new tracking arc simulator using the system entropy to seed the random number generator. pub fn new( devices: Vec, trajectory: Traj, @@ -151,194 +162,88 @@ where Self::with_rng(devices, trajectory, configs, rng) } - /// Disallows overlapping measurements - pub fn disallow_overlap(&mut self) { - self.allow_overlap = false; - } - - /// Allows overlapping measurements - pub fn allow_overlap(&mut self) { - self.allow_overlap = true; - } - - /// Generates measurements from the simulated tracking arc. + /// Generates measurements for the tracking arc using the defined strands /// - /// Notes: + /// # Warning + /// This function will return an error if any of the devices defines as a scheduler. + /// You must create the schedule first using `build_schedule` first. + /// + /// # Notes /// Although mutable, this function may be called several times to generate different measurements. + /// + /// # Algorithm + /// For each tracking device, and for each strand within that device, sample the trajectory at the sample + /// rate of the tracking device, adding a measurement whenever the spacecraft is visible. + /// Build the measurements as a vector, ordered chronologically. + /// pub fn generate_measurements(&mut self, cosm: Arc) -> Result, NyxError> { - // Stores the first measurement and last measurement of a given sub-arc for each device. - #[derive(Copy, Clone, Debug)] - struct ScheduleData { - start: Option, - prev: Option, - end: Option, - } - - let mut schedule: HashMap = HashMap::new(); - - let mut start_trace_msg = HashSet::new(); - let mut end_trace_msg = HashSet::new(); - let mut schedule_trace_msg = HashSet::new(); - - let start = Epoch::now().unwrap(); let mut measurements = Vec::new(); - // Clone the time series so we don't consume it. - let ts = self.time_series.clone(); - 'ts: for epoch in ts { - 'devices: for (name, device) in self.devices.iter_mut() { - let cfg = &self.configs[name]; - // Check the start condition - if let Availability::Epoch(start_epoch) = cfg.start { - if start_epoch > epoch { - if !start_trace_msg.contains(name) { - info!( - "{name} tracking starts in {} (start = {start_epoch})", - start_epoch - epoch - ); - start_trace_msg.insert(name.clone()); - } - continue; - } - } - // Check the end condition - if let Availability::Epoch(end_epoch) = cfg.end { - if end_epoch < epoch { - if !end_trace_msg.contains(name) { - info!( - "{name} tracking ended {} ago (end = {end_epoch})", - epoch - end_epoch - ); - end_trace_msg.insert(name.clone()); - } - continue; - } + for (name, device) in self.devices.iter_mut() { + let cfg = &self.configs[name]; + if cfg.scheduler.is_some() { + if cfg.strands.is_none() { + return Err(NyxError::CustomError(format!( + "schedule for {name} must be built before generating measurements" + ))); + } else { + warn!("scheduler for {name} is ignored, using the defined tracking strands instead") } + } - // Check the schedule - if let Some(device_schedule) = schedule.get(name) { - // Check that we aren't generating too many measurements - if let Some(prev_epoch) = device_schedule.prev { - if (epoch - prev_epoch) < cfg.sampling { - continue; - } - } - match cfg.schedule { - Schedule::Intermittent { on, off } => { - // Check that we haven't been on for too long - if let Some(start_epoch) = device_schedule.start { - if (epoch - start_epoch) > on { - if !schedule_trace_msg.contains(name) { - info!( - "{name} is now turned off after being on for {}", - epoch - start_epoch - ); - schedule_trace_msg.insert(name.clone()); + let init_msr_count = measurements.len(); + let tick = Epoch::now().unwrap(); + + match cfg.strands.as_ref() { + Some(strands) => { + // Strands are defined at this point + 'strands: for strand in strands { + // Build the time series for this strand, sampling at the correct rate + for epoch in TimeSeries::inclusive(strand.start, strand.end, cfg.sampling) { + match device.measure( + epoch, + &self.trajectory, + Some(&mut self.rng), + cosm.clone(), + ) { + Ok(msr_opt) => { + if let Some(msr) = msr_opt { + measurements.push((name.clone(), msr)); } - continue; } - } - // Check that we haven't been off for enough time - if let Some(end_epoch) = device_schedule.end { - if (epoch - end_epoch) <= off { - if !schedule_trace_msg.contains(name) { - info!( - "{name} will be available again in {}", - epoch - end_epoch - ); - schedule_trace_msg.insert(name.clone()); - } - continue; + Err(e) => { + error!( + "Skipping the remaining strand ending on {}: {e}", + strand.end + ); + continue 'strands; } } } - Schedule::Continuous => { - // No filtering, pass through - } - } - } - - // Check the exclusion epochs - if let Some(excl_list) = &cfg.exclusion_epochs { - for excl in excl_list { - if excl.contains(epoch) { - // We are in an exclusion epoch, move to next device. - debug!("Exclusion guard for {name} at {epoch}"); - continue 'devices; - } } - } - // Check the inclusion epochs - if let Some(incl_list) = &cfg.inclusion_epochs { - for incl in incl_list { - if !incl.contains(epoch) { - // Current epoch is not included in the inclusion epochs list, move to next device. - debug!("Inclusion guard for {name} at {epoch}"); - continue 'devices; - } - } + info!( + "Generated {} measurements for {name} for {} tracking strands in {}", + measurements.len() - init_msr_count, + strands.len(), + (Epoch::now().unwrap() - tick).round(1.0_f64.milliseconds()) + ); } - - // Availability OK, so let's remove this device from the trace messages if needed. - start_trace_msg.remove(name); - schedule_trace_msg.remove(name); - end_trace_msg.remove(name); - - if let Some(msr) = - device.measure(epoch, &self.trajectory, Some(&mut self.rng), cosm.clone())? - { - measurements.push((name.clone(), msr)); - // We have a new measurement, let's update the schedule. - if let Some(device_schedule) = schedule.get_mut(name) { - if device_schedule.start.is_none() { - // Set the start time of this pass - device_schedule.start = Some(epoch); - info!("{name} is now tracking {epoch}"); - } - // In any case, set the end to none and set the prev to now. - device_schedule.prev = Some(epoch); - device_schedule.end = None; - } else { - info!("{name} is now tracking {epoch}"); - // Oh, great, first measurement for this device! - schedule.insert( - name.to_string(), - ScheduleData { - start: Some(epoch), - prev: Some(epoch), - end: None, - }, - ); - } - - if !self.allow_overlap { - // No need to check for the availability of other around station for this epoch - // so let's move to the next epoch in the time series. - continue 'ts; - } - } else if let Some(device_schedule) = schedule.get_mut(name) { - // No measurement was generated, so let's update the schedule if there is one. - if device_schedule.end.is_none() { - device_schedule.start = None; - device_schedule.end = Some(epoch); - } + None => { + warn!("No tracking strands defined for {name}, skipping"); } } } - info!( - "Generated {} measurements in {}", - measurements.len(), - Epoch::now().unwrap() - start - ); - let mut devices = Vec::new(); for device in self.devices.values() { let repr = device.to_config().unwrap(); devices.push(repr); } + // Reorder the measurements + measurements.sort_by_key(|(_name, msr)| msr.epoch()); + // Build the tracking arc. let trk = TrackingArc { device_cfg: serde_yaml::to_string(&devices).unwrap(), @@ -352,7 +257,6 @@ where impl Display for TrackingArcSim where D: TrackingDeviceSim, - MsrIn: State, Msr: Measurement, MsrIn: Interpolatable, DefaultAllocator: Allocator::Size> @@ -371,3 +275,297 @@ where ) } } + +impl TrackingArcSim { + /// Builds the schedule provided the config. Requires the tracker to be a ground station. + /// + /// # Algorithm + /// + /// 1. For each tracking device: + /// 2. Find when the vehicle trajectory has an elevation greater or equal to zero, and use that as the first start of the first tracking arc for this station + /// 3. Find when the vehicle trajectory has an elevation less than zero (i.e. disappears below the horizon), after that initial epoch + /// 4. Repeat 2, 3 until the end of the trajectory + /// 5. Build each of these as "tracking strands" for this tracking device. + /// 6. Organize all of the built tracking strands chronologically. + /// 7. Iterate through all of the strands: + /// 7.a. if that tracker is marked as `Greedy` and it ends after the start of the next strand, change the start date of the next strand. + /// 7.b. if that tracker is marked as `Eager` and it ends after the start of the next strand, change the end date of the current strand. + pub fn generate_schedule( + &self, + cosm: Arc, + ) -> Result, NyxError> { + // Consider using find_all via the heuristic + let mut built_cfg = self.configs.clone(); + for (name, device) in self.devices.iter() { + let cfg = &self.configs[name]; + if let Some(scheduler) = cfg.scheduler { + info!("Building schedule for {name}"); + built_cfg.get_mut(name).unwrap().scheduler = None; + built_cfg.get_mut(name).unwrap().strands = Some(Vec::new()); + // Convert the trajectory into the ground station frame. + let traj = self.trajectory.to_frame(device.frame, cosm.clone())?; + + match traj.find_arcs(&device) { + Err(_) => info!("No measurements from {name}"), + Ok(elevation_arcs) => { + for arc in elevation_arcs { + let strand_start = arc.rise.state.epoch(); + let strand_end = arc.fall.state.epoch(); + + if strand_end - strand_start + < cfg.sampling * i64::from(scheduler.min_samples) + { + info!( + "Too few samples from {name} opportunity from {strand_start} to {strand_end}, discarding strand", + ); + continue; + } + + let mut strand_range = Strand { + start: strand_start, + end: strand_end, + }; + + // If there is an alignment, apply it + if let Some(alignment) = scheduler.sample_alignment { + strand_range.start = strand_range.start.round(alignment); + strand_range.end = strand_range.end.round(alignment); + } + + if let Cadence::Intermittent { on, off } = scheduler.cadence { + // Check that the next start time is within the allocated time + if let Some(prev_strand) = + built_cfg[name].strands.as_ref().unwrap().last() + { + if prev_strand.end + off > strand_range.start { + // We're turning on the tracking sooner than the schedule allows, so let's fix that. + strand_range.start = prev_strand.end + off; + // Check that we didn't eat into the whole tracking opportunity + if strand_range.start > strand_end { + // Lost this whole opportunity. + info!("Discarding {name} opportunity from {strand_start} to {strand_end} due to cadence {:?}", scheduler.cadence); + continue; + } + } + } + // Check that we aren't tracking for longer than configured + if strand_range.end - strand_range.start > on { + strand_range.end = strand_range.start + on; + } + } + + // We've found when the spacecraft is below the horizon, so this is a new strand. + built_cfg + .get_mut(name) + .unwrap() + .strands + .as_mut() + .unwrap() + .push(strand_range); + } + + info!( + "Built {} tracking strands for {name}", + built_cfg[name].strands.as_ref().unwrap().len() + ); + } + } + } + } + // Build all of the strands, remembering which tracker they come from. + let mut cfg_as_vec = Vec::new(); + for (name, cfg) in &built_cfg { + for (ii, strand) in cfg.strands.as_ref().unwrap().iter().enumerate() { + cfg_as_vec.push((name.clone(), ii, *strand)); + } + } + // Iterate through the strands by chronological order. Cannot use maps because we change types. + cfg_as_vec.sort_by_key(|(_, _, strand)| strand.start); + for (ii, (this_name, this_pos, this_strand)) in + cfg_as_vec.iter().take(cfg_as_vec.len() - 1).enumerate() + { + // Grab the config + if let Some(config) = self.configs[this_name].scheduler.as_ref() { + // Grab the next strand, chronologically + if let Some((next_name, next_pos, next_strand)) = cfg_as_vec.get(ii + 1) { + if config.handoff == Handoff::Greedy && this_strand.end >= next_strand.start { + // Modify the built configurations to change the start time of the next strand because the current one is greedy. + let next_config = built_cfg.get_mut(next_name).unwrap(); + let new_start = this_strand.end + next_config.sampling; + next_config.strands.as_mut().unwrap()[*next_pos].start = new_start; + info!( + "{this_name} configured as {:?}, so {next_name} now starts on {new_start}", + config.handoff + ); + } else if config.handoff == Handoff::Eager + && this_strand.end >= next_strand.start + { + let this_config = built_cfg.get_mut(this_name).unwrap(); + let new_end = next_strand.start - this_config.sampling; + this_config.strands.as_mut().unwrap()[*this_pos].end = new_end; + info!( + "{this_name} now hands off to {next_name} on {new_end} because it's configured as {:?}", + config.handoff + ); + } + } else { + // Reached the end + break; + } + } + } + + Ok(built_cfg) + } + + /// Sets the schedule to that built in `generate_schedule` + pub fn build_schedule(&mut self, cosm: Arc) -> Result<(), NyxError> { + self.configs = self.generate_schedule(cosm)?; + + Ok(()) + } +} + +// Literally the same as above, but can't make it generic =( +impl TrackingArcSim { + /// Builds the schedule provided the config. Requires the tracker to be a ground station. + /// + /// # Algorithm + /// + /// 1. For each tracking device: + /// 2. Find when the vehicle trajectory has an elevation greater or equal to zero, and use that as the first start of the first tracking arc for this station + /// 3. Find when the vehicle trajectory has an elevation less than zero (i.e. disappears below the horizon), after that initial epoch + /// 4. Repeat 2, 3 until the end of the trajectory + /// 5. Build each of these as "tracking strands" for this tracking device. + /// 6. Organize all of the built tracking strands chronologically. + /// 7. Iterate through all of the strands: + /// 7.a. if that tracker is marked as `Greedy` and it ends after the start of the next strand, change the start date of the next strand. + /// 7.b. if that tracker is marked as `Eager` and it ends after the start of the next strand, change the end date of the current strand. + pub fn generate_schedule( + &self, + cosm: Arc, + ) -> Result, NyxError> { + let mut built_cfg = self.configs.clone(); + 'devices: for (name, device) in self.devices.iter() { + let cfg = &self.configs[name]; + if let Some(scheduler) = cfg.scheduler { + info!("Building schedule for {name}"); + built_cfg.get_mut(name).unwrap().scheduler = None; + built_cfg.get_mut(name).unwrap().strands = Some(Vec::new()); + // Convert the trajectory into the ground station frame. + let traj = self.trajectory.to_frame(device.frame, cosm.clone())?; + let mut start_at = traj.first().epoch(); + let end_at = traj.last().epoch(); + + while start_at < end_at { + if let Ok(visibility_event) = traj.find_bracketed(start_at, end_at, &device) { + // We've found when the spacecraft is visible + start_at = visibility_event.state.epoch(); + // Search for when the spacecraft is no longer visible. + if let Ok(visibility_event) = + traj.find_bracketed(start_at + 1.0_f64.seconds(), end_at, &device) + { + let strand_end = visibility_event.state.epoch(); + let mut strand_range = Strand { + start: start_at, + end: strand_end, + }; + + if let Cadence::Intermittent { on, off } = scheduler.cadence { + // Check that the next start time is within the allocated time + if let Some(prev_strand) = + built_cfg[name].strands.as_ref().unwrap().last() + { + if prev_strand.end + off > strand_range.start { + // We're turning on the tracking sooner than the schedule allows, so let's fix that. + strand_range.start = prev_strand.end + off; + // Check that we didn't eat into the whole tracking opportunity + if strand_range.start > strand_end { + // Lost this whole opportunity. + info!("Discarding {name} opportunity from {} to {} due to cadence {:?}", start_at, strand_end, scheduler.cadence); + } + } + } + // Check that we aren't tracking for longer than configured + if strand_range.end - strand_range.start > on { + strand_range.end = strand_range.start + on; + } + } + + // We've found when the spacecraft is below the horizon, so this is a new strand. + built_cfg + .get_mut(name) + .unwrap() + .strands + .as_mut() + .unwrap() + .push(strand_range); + // Set the new start time and continue searching + start_at = strand_end; + } else { + continue 'devices; + } + } else { + info!( + "Built {} tracking strands for {name}", + built_cfg[name].strands.as_ref().unwrap().len() + ); + continue 'devices; + } + } + } + } + + // Build all of the strands, remembering which tracker they come from. + let mut cfg_as_vec = Vec::new(); + for (name, cfg) in &built_cfg { + for (ii, strand) in cfg.strands.as_ref().unwrap().iter().enumerate() { + cfg_as_vec.push((name.clone(), ii, *strand)); + } + } + // Iterate through the strands by chronological order. Cannot use maps because we change types. + cfg_as_vec.sort_by_key(|(_, _, strand)| strand.start); + for (ii, (this_name, this_pos, this_strand)) in + cfg_as_vec.iter().take(cfg_as_vec.len() - 1).enumerate() + { + // Grab the config + if let Some(config) = self.configs[this_name].scheduler.as_ref() { + // Grab the next strand, chronologically + if let Some((next_name, next_pos, next_strand)) = cfg_as_vec.get(ii + 1) { + if config.handoff == Handoff::Greedy && this_strand.end >= next_strand.start { + // Modify the built configurations to change the start time of the next strand because the current one is greedy. + let next_config = built_cfg.get_mut(next_name).unwrap(); + let new_start = this_strand.end + next_config.sampling; + next_config.strands.as_mut().unwrap()[*next_pos].start = new_start; + info!( + "{this_name} configured as {:?}, so {next_name} now starts on {new_start}", + config.handoff + ); + } else if config.handoff == Handoff::Eager + && this_strand.end >= next_strand.start + { + let this_config = built_cfg.get_mut(this_name).unwrap(); + let new_end = next_strand.start - this_config.sampling; + this_config.strands.as_mut().unwrap()[*this_pos].end = new_end; + info!( + "{this_name} now hands off to {next_name} on {new_end} because it's configured as {:?}", + config.handoff + ); + } + } else { + // Reached the end + break; + } + } + } + + Ok(built_cfg) + } + + /// Sets the schedule to that built in `build_schedule` + pub fn build_schedule(&mut self, cosm: Arc) -> Result<(), NyxError> { + self.configs = self.generate_schedule(cosm)?; + + Ok(()) + } +} diff --git a/src/od/simulator/mod.rs b/src/od/simulator/mod.rs index 0fceee31..82c8b93b 100644 --- a/src/od/simulator/mod.rs +++ b/src/od/simulator/mod.rs @@ -20,11 +20,9 @@ pub use crate::dynamics::{Dynamics, NyxError}; pub use crate::{cosmic::Cosm, State, TimeTagged}; mod arc; pub use arc::TrackingArcSim; -mod schedule; -pub use schedule::Schedule; +mod scheduler; +pub use scheduler::{Cadence, Handoff, Scheduler}; mod trackdata; pub use trackdata::TrackingDeviceSim; mod trkconfig; -pub use trkconfig::{EpochRanges, TrkConfig}; -mod start_mode; -pub use start_mode::Availability; +pub use trkconfig::{Strand, TrkConfig}; diff --git a/src/od/simulator/schedule.rs b/src/od/simulator/schedule.rs deleted file mode 100644 index d262514b..00000000 --- a/src/od/simulator/schedule.rs +++ /dev/null @@ -1,90 +0,0 @@ -/* - Nyx, blazing fast astrodynamics - Copyright (C) 2023 Christopher Rabotin - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU Affero General Public License as published - by the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU Affero General Public License for more details. - - You should have received a copy of the GNU Affero General Public License - along with this program. If not, see . -*/ - -pub use crate::dynamics::{Dynamics, NyxError}; -use crate::io::{duration_from_str, duration_to_str}; -pub use crate::{cosmic::Cosm, State, TimeTagged}; -use hifitime::Duration; -use serde::Deserialize; -use serde_derive::Serialize; -use std::fmt::Debug; - -/// Determines whether a tracking schedule is continuous or intermittent. -#[derive(Copy, Clone, Serialize, Deserialize, PartialEq)] -pub enum Schedule { - Continuous, - /// An intermittent schedule has On and Off durations. - Intermittent { - #[serde( - serialize_with = "duration_to_str", - deserialize_with = "duration_from_str" - )] - on: Duration, - #[serde( - serialize_with = "duration_to_str", - deserialize_with = "duration_from_str" - )] - off: Duration, - }, -} - -impl Default for Schedule { - fn default() -> Self { - Self::Continuous - } -} - -impl Debug for Schedule { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Self::Continuous => write!(f, "Continuous"), - Self::Intermittent { on, off } => f - .debug_struct("Intermittent") - .field("on", &format!("{on}")) - .field("off", &format!("{off}")) - .finish(), - } - } -} - -#[test] -fn serde_schedule() { - use hifitime::TimeUnits; - use serde_yaml; - - let cont: Schedule = serde_yaml::from_str("!Continuous").unwrap(); - assert_eq!(cont, Schedule::Continuous); - - let int: Schedule = - serde_yaml::from_str("!Intermittent {on: 1 h 35 min, off: 15 h 02 min 3 s}").unwrap(); - assert_eq!( - int, - Schedule::Intermittent { - on: 1.hours() + 35.0.minutes(), - off: 15.hours() + 2.minutes() + 3.seconds() - } - ); - assert_eq!( - format!("{int:?}"), - r#"Intermittent { on: "1 h 35 min", off: "15 h 2 min 3 s" }"# - ); - - let serialized = serde_yaml::to_string(&int).unwrap(); - let deserd: Schedule = serde_yaml::from_str(&serialized).unwrap(); - assert_eq!(deserd, int); -} diff --git a/src/od/simulator/scheduler.rs b/src/od/simulator/scheduler.rs new file mode 100644 index 00000000..821da618 --- /dev/null +++ b/src/od/simulator/scheduler.rs @@ -0,0 +1,189 @@ +/* + Nyx, blazing fast astrodynamics + Copyright (C) 2023 Christopher Rabotin + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published + by the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . +*/ + +pub use crate::dynamics::{Dynamics, NyxError}; +use crate::io::{ + duration_from_str, duration_to_str, maybe_duration_from_str, maybe_duration_to_str, +}; +pub use crate::{cosmic::Cosm, State, TimeTagged}; +use hifitime::{Duration, Unit}; +use serde::Deserialize; +use serde_derive::Serialize; +use std::fmt::Debug; +use typed_builder::TypedBuilder; + +#[cfg(feature = "python")] +use pyo3::prelude::*; + +/// Defines the handoff from a current ground station to the next one that is visible to prevent overlapping of measurements +#[derive(Copy, Clone, Debug, Deserialize, PartialEq, Serialize)] +#[cfg_attr(feature = "python", pyclass)] +#[cfg_attr(feature = "python", pyo3(module = "nyx_space.orbit_determination"))] +pub enum Handoff { + /// If a new station is in visibility of the spacecraft, the "Eager" station will immediately stop tracking and switch over (default) + Eager, + /// If a new station is in visibility of the spacecraft, the "Greedy" station will continue to tracking until the vehicle is below its elevation mask + Greedy, + /// If a new station is in visibility of the spacecraft, the "Overlap" station will continue tracking, and so will the other one + Overlap, +} + +impl Default for Handoff { + fn default() -> Self { + Self::Eager + } +} + +/// A scheduler allows building a scheduling of spaceraft tracking for a set of ground stations. +#[derive(Copy, Clone, Debug, Default, Deserialize, PartialEq, Serialize, TypedBuilder)] +#[builder(doc)] +#[cfg_attr(feature = "python", pyclass)] +#[cfg_attr(feature = "python", pyo3(module = "nyx_space.orbit_determination"))] +pub struct Scheduler { + /// Handoff strategy if two trackers see the vehicle at the same time + #[builder(default)] + pub handoff: Handoff, + /// On/off cadence of this scheduler + #[builder(default)] + pub cadence: Cadence, + /// Minimum number of samples for a valid arc, i.e. if there are less than this many samples during a pass, the strand is discarded. + #[builder(default = 10)] + pub min_samples: u32, + /// Round the time of the samples to the provided duration. For example, if the vehicle is above the horizon at 01:02:03.456 and the alignment + /// is set to 01 seconds, then this will cause the tracking to start at 01:02:03 as it is rounded to the nearest second. + #[builder(default = Some(Unit::Second * 1.0), setter(strip_option))] + #[serde( + serialize_with = "maybe_duration_to_str", + deserialize_with = "maybe_duration_from_str" + )] + pub sample_alignment: Option, +} + +/// Determines whether tracking is continuous or intermittent. +#[derive(Copy, Clone, Deserialize, PartialEq, Serialize)] +pub enum Cadence { + Continuous, + /// An intermittent schedule has On and Off durations. + Intermittent { + #[serde( + serialize_with = "duration_to_str", + deserialize_with = "duration_from_str" + )] + on: Duration, + #[serde( + serialize_with = "duration_to_str", + deserialize_with = "duration_from_str" + )] + off: Duration, + }, +} + +impl Default for Cadence { + fn default() -> Self { + Self::Continuous + } +} + +impl Debug for Cadence { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Continuous => write!(f, "Continuous"), + Self::Intermittent { on, off } => f + .debug_struct("Intermittent") + .field("on", &format!("{on}")) + .field("off", &format!("{off}")) + .finish(), + } + } +} + +#[cfg(test)] +mod scheduler_ut { + use process::simulator::scheduler::Handoff; + + use crate::od::prelude::*; + + use super::Scheduler; + + #[test] + fn serde_cadence() { + use hifitime::TimeUnits; + use serde_yaml; + + let cont: Cadence = serde_yaml::from_str("!Continuous").unwrap(); + assert_eq!(cont, Cadence::Continuous); + + let int: Cadence = + serde_yaml::from_str("!Intermittent {on: 1 h 35 min, off: 15 h 02 min 3 s}").unwrap(); + assert_eq!( + int, + Cadence::Intermittent { + on: 1.hours() + 35.0.minutes(), + off: 15.hours() + 2.minutes() + 3.seconds() + } + ); + assert_eq!( + format!("{int:?}"), + r#"Intermittent { on: "1 h 35 min", off: "15 h 2 min 3 s" }"# + ); + + let serialized = serde_yaml::to_string(&int).unwrap(); + let deserd: Cadence = serde_yaml::from_str(&serialized).unwrap(); + assert_eq!(deserd, int); + } + + #[test] + fn api_and_serde_scheduler() { + use hifitime::TimeUnits; + use serde_yaml; + + let scheduler = Scheduler::default(); + let serialized = serde_yaml::to_string(&scheduler).unwrap(); + assert_eq!( + serialized, + "handoff: Eager\ncadence: Continuous\nmin_samples: 0\nsample_alignment: null\n" + ); + let deserd: Scheduler = serde_yaml::from_str(&serialized).unwrap(); + assert_eq!(deserd, scheduler); + + let scheduler = Scheduler::builder() + .handoff(Handoff::Eager) + .cadence(Cadence::Intermittent { + on: 0.2.hours(), + off: 17.hours() + 5.minutes(), + }) + .build(); + + let serialized = serde_yaml::to_string(&scheduler).unwrap(); + assert_eq!( + serialized, + "handoff: Eager\ncadence: !Intermittent\n on: 12 min\n off: 17 h 5 min\nmin_samples: 10\nsample_alignment: 1 s\n" + ); + let deserd: Scheduler = serde_yaml::from_str(&serialized).unwrap(); + assert_eq!(deserd, scheduler); + } + + #[test] + fn defaults() { + let sched = Scheduler::default(); + + assert_eq!(sched.cadence, Cadence::Continuous); + + assert_eq!(sched.handoff, Handoff::Eager); + } +} diff --git a/src/od/simulator/start_mode.rs b/src/od/simulator/start_mode.rs deleted file mode 100644 index b7dca080..00000000 --- a/src/od/simulator/start_mode.rs +++ /dev/null @@ -1,68 +0,0 @@ -/* - Nyx, blazing fast astrodynamics - Copyright (C) 2023 Christopher Rabotin - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU Affero General Public License as published - by the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU Affero General Public License for more details. - - You should have received a copy of the GNU Affero General Public License - along with this program. If not, see . -*/ - -pub use crate::dynamics::{Dynamics, NyxError}; -use crate::io::{epoch_from_str, epoch_to_str}; -pub use crate::{cosmic::Cosm, State, TimeTagged}; -use hifitime::Epoch; -use serde::Deserialize; -use serde_derive::Serialize; -use std::fmt::{Debug, Display}; - -/// Defines the availability methods for a tracking arc. -#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq)] -pub enum Availability { - /// Start/stop the tracking schedule with respect to when the receiver is visible - Visible, - /// Start/stop the tracking schedule with respect to the provided epoch - #[serde(serialize_with = "epoch_to_str", deserialize_with = "epoch_from_str")] - Epoch(Epoch), -} - -impl Display for Availability { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Availability::Visible => write!(f, "{self:?}"), - Availability::Epoch(e) => write!(f, "Epoch({e})"), - } - } -} - -impl Default for Availability { - fn default() -> Self { - Self::Visible - } -} - -#[test] -fn serde_startmode() { - use core::str::FromStr; - use serde_yaml; - - let vis: Availability = serde_yaml::from_str("!Visible").unwrap(); - assert_eq!(vis, Availability::Visible); - - let val = "!Epoch 2023-02-23T00:00:00 UTC"; - let mode: Availability = serde_yaml::from_str(val).unwrap(); - assert_eq!( - mode, - Availability::Epoch(Epoch::from_str("2023-02-23T00:00:00 UTC").unwrap()) - ); - - assert_eq!(val, serde_yaml::to_string(&mode).unwrap().trim()); -} diff --git a/src/od/simulator/trkconfig.rs b/src/od/simulator/trkconfig.rs index d6e33ed9..d8b593c3 100644 --- a/src/od/simulator/trkconfig.rs +++ b/src/od/simulator/trkconfig.rs @@ -16,6 +16,7 @@ along with this program. If not, see . */ +use super::scheduler::Scheduler; pub use crate::dynamics::{Dynamics, NyxError}; use crate::io::{duration_from_str, duration_to_str, epoch_from_str, epoch_to_str, ConfigError}; use crate::io::{ConfigRepr, Configurable}; @@ -28,36 +29,30 @@ use serde::Deserialize; use serde_derive::Serialize; use std::fmt::Debug; use std::sync::Arc; - -use super::schedule::Schedule; -use super::Availability; +use typed_builder::TypedBuilder; /// Stores a tracking configuration, there is one per tracking data simulator (e.g. one for ground station #1 and another for #2). /// By default, the tracking configuration is continuous and the tracking arc is from the beginning of the simulation to the end. /// In Python, any value that is set to None at initialization will use the default values. -#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, TypedBuilder)] #[cfg_attr(feature = "python", pyclass)] #[cfg_attr(feature = "python", pyo3(module = "nyx_space.orbit_determination"))] +#[builder(doc)] pub struct TrkConfig { - /// Availability configuration to start the tracking arc - #[serde(default)] - pub start: Availability, - /// Availability configuration to end the tracking arc - #[serde(default)] - pub end: Availability, + /// Set to automatically build a tracking schedule based on some criteria #[serde(default)] - pub schedule: Schedule, + #[builder(default, setter(strip_option))] + pub scheduler: Option, #[serde( serialize_with = "duration_to_str", deserialize_with = "duration_from_str" )] + /// Sampling rate once tracking has started + #[builder(default = 1.minutes())] pub sampling: Duration, - /// List of epoch ranges to include - #[serde(rename = "inclusion epochs")] - pub inclusion_epochs: Option>, - /// List of epoch ranges to exclude - #[serde(rename = "exclusion epochs")] - pub exclusion_epochs: Option>, + /// List of tracking strands during which the given tracker will be tracking + #[builder(default, setter(strip_option))] + pub strands: Option>, } impl ConfigRepr for TrkConfig {} @@ -81,47 +76,45 @@ impl Configurable for TrkConfig { } impl TrkConfig { - /// Initialize a default TrkConfig providing only the sample rate + /// Initialize a default TrkConfig providing only the sample rate. + /// Note: this will also set the sample alignment time to the provided duration. pub fn from_sample_rate(sampling: Duration) -> Self { Self { sampling, + scheduler: Some(Scheduler::builder().sample_alignment(sampling).build()), ..Default::default() } } - /// Check that the configuration is valid + /// Check that the configuration is valid: a successful call means that either we have a set of tracking strands or we have a valid scheduler pub(crate) fn sanity_check(&self) -> Result<(), ConfigError> { - if let Some(excl_list) = &self.exclusion_epochs { - for excl in excl_list { - if excl.end - excl.start < self.sampling { - return Err(ConfigError::InvalidConfig(format!( - "Exclusion epoch range {:?} is shorter than the sampling period of {} and therefore ineffective", - excl, self.sampling - ))); - } + if self.strands.is_some() && self.scheduler.is_some() { + return Err(ConfigError::InvalidConfig( + "Both tracking strands and a scheduler are configured, must be one or the other" + .to_string(), + )); + } else if let Some(strands) = &self.strands { + if strands.is_empty() && self.scheduler.is_none() { + return Err(ConfigError::InvalidConfig( + "Provided tracking strands is empty and no scheduler is defined".to_string(), + )); } - } - - if let Some(incl_list) = &self.inclusion_epochs { - for incl in incl_list { - if incl.end - incl.start < self.sampling { + for (ii, strand) in strands.iter().enumerate() { + if strand.duration() < self.sampling { return Err(ConfigError::InvalidConfig(format!( - "Inclusion epoch range {:?} is shorter than the sampling period of {} and therefore ineffective", - incl, self.sampling + "Strand #{ii} is shorter than sampling time" ))); } - } - } - - if let Availability::Epoch(epoch) = self.start { - if let Availability::Epoch(epoch2) = self.end { - if epoch2 < epoch { + if strand.duration().is_negative() { return Err(ConfigError::InvalidConfig(format!( - "End epoch {} is before start epoch {}", - epoch2, epoch + "Strand #{ii} is anti-chronological" ))); } } + } else if self.strands.is_none() && self.scheduler.is_none() { + return Err(ConfigError::InvalidConfig( + "Neither tracking strands not a scheduler is provided".to_string(), + )); } Ok(()) @@ -132,84 +125,159 @@ impl Default for TrkConfig { /// The default configuration is to generate a measurement every minute (continuously) while the vehicle is visible fn default() -> Self { Self { - start: Availability::Visible, - end: Availability::Visible, - schedule: Schedule::Continuous, + // Allows calling the builder's defaults + scheduler: Some(Scheduler::builder().build()), sampling: 1.minutes(), - inclusion_epochs: None, - exclusion_epochs: None, + strands: None, } } } -/// Stores an epoch range for tracking. +/// Stores a tracking strand with a start and end epoch #[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq)] #[cfg_attr(feature = "python", pyclass)] -pub struct EpochRanges { +#[cfg_attr(feature = "python", pyo3(module = "nyx_space.orbit_determination"))] +pub struct Strand { #[serde(serialize_with = "epoch_to_str", deserialize_with = "epoch_from_str")] pub start: Epoch, #[serde(serialize_with = "epoch_to_str", deserialize_with = "epoch_from_str")] pub end: Epoch, } -impl EpochRanges { +impl Strand { /// Returns whether the provided epoch is within the range pub fn contains(&self, epoch: Epoch) -> bool { (self.start..=self.end).contains(&epoch) } -} -#[test] -fn serde_trkconfig() { - use hifitime::{Epoch, TimeScale}; - use serde_yaml; - - // Test the default config - let cfg = TrkConfig::default(); - let serialized = serde_yaml::to_string(&cfg).unwrap(); - println!("{serialized}"); - let deserd: TrkConfig = serde_yaml::from_str(&serialized).unwrap(); - assert_eq!(deserd, cfg); - - // Specify an intermittent schedule and a specific start epoch. - let cfg = TrkConfig { - start: Availability::Epoch(Epoch::from_gregorian_at_midnight( - 2023, - 2, - 22, - TimeScale::TAI, - )), - end: Availability::Visible, - schedule: Schedule::Intermittent { - on: 23.1.hours(), - off: 0.9.hours(), - }, - sampling: 45.2.seconds(), - ..Default::default() - }; - let serialized = serde_yaml::to_string(&cfg).unwrap(); - println!("{serialized}"); - let deserd: TrkConfig = serde_yaml::from_str(&serialized).unwrap(); - assert_eq!(deserd, cfg); + pub fn duration(&self) -> Duration { + self.end - self.start + } } -#[test] -fn deserialize_from_file() { - use std::collections::HashMap; - use std::env; - use std::path::PathBuf; - - // Load the tracking configuration from the test data. - let trkconfg_yaml: PathBuf = [ - &env::var("CARGO_MANIFEST_DIR").unwrap(), - "data", - "tests", - "config", - "tracking_cfg.yaml", - ] - .iter() - .collect(); - - let configs: HashMap = TrkConfig::load_named(trkconfg_yaml).unwrap(); - dbg!(configs); +#[cfg(test)] +mod trkconfig_ut { + use crate::io::ConfigRepr; + use crate::od::prelude::*; + + #[test] + fn sanity_checks() { + let mut cfg = TrkConfig::default(); + assert!(cfg.sanity_check().is_ok(), "default config should be sane"); + + cfg.scheduler = None; + assert!( + cfg.sanity_check().is_err(), + "no scheduler should mark this insane" + ); + + cfg.strands = Some(Vec::new()); + assert!( + cfg.sanity_check().is_err(), + "no scheduler and empty strands should mark this insane" + ); + + let start = Epoch::now().unwrap(); + let end = start + 10.seconds(); + cfg.strands = Some(vec![Strand { start, end }]); + assert!( + cfg.sanity_check().is_err(), + "strand of too short of a duration should mark this insane" + ); + + let end = start + cfg.sampling; + cfg.strands = Some(vec![Strand { start, end }]); + assert!( + cfg.sanity_check().is_ok(), + "strand allowing for a single measurement should be OK" + ); + + // An anti-chronological strand should be invalid + cfg.strands = Some(vec![Strand { + start: end, + end: start, + }]); + assert!( + cfg.sanity_check().is_err(), + "anti chronological strand should be insane" + ); + } + + #[test] + fn serde_trkconfig() { + use serde_yaml; + + // Test the default config + let cfg = TrkConfig::default(); + let serialized = serde_yaml::to_string(&cfg).unwrap(); + println!("{serialized}"); + let deserd: TrkConfig = serde_yaml::from_str(&serialized).unwrap(); + assert_eq!(deserd, cfg); + assert_eq!( + cfg.scheduler.unwrap(), + Scheduler::builder().min_samples(10).build() + ); + assert!(cfg.strands.is_none()); + + // Specify an intermittent schedule and a specific start epoch. + let cfg = TrkConfig { + scheduler: Some(Scheduler { + cadence: Cadence::Intermittent { + on: 23.1.hours(), + off: 0.9.hours(), + }, + handoff: Handoff::Eager, + min_samples: 10, + ..Default::default() + }), + sampling: 45.2.seconds(), + ..Default::default() + }; + let serialized = serde_yaml::to_string(&cfg).unwrap(); + println!("{serialized}"); + let deserd: TrkConfig = serde_yaml::from_str(&serialized).unwrap(); + assert_eq!(deserd, cfg); + } + + #[test] + fn deserialize_from_file() { + use std::collections::HashMap; + use std::env; + use std::path::PathBuf; + + // Load the tracking configuration from the test data. + let trkconfg_yaml: PathBuf = [ + &env::var("CARGO_MANIFEST_DIR").unwrap(), + "data", + "tests", + "config", + "tracking_cfg.yaml", + ] + .iter() + .collect(); + + let configs: HashMap = TrkConfig::load_named(trkconfg_yaml).unwrap(); + dbg!(configs); + } + + #[test] + fn api_trk_config() { + use serde_yaml; + + let cfg = TrkConfig::builder() + .sampling(15.seconds()) + .scheduler(Scheduler::builder().handoff(Handoff::Overlap).build()) + .build(); + + let serialized = serde_yaml::to_string(&cfg).unwrap(); + println!("{serialized}"); + let deserd: TrkConfig = serde_yaml::from_str(&serialized).unwrap(); + assert_eq!(deserd, cfg); + + let cfg = TrkConfig::builder() + .scheduler(Scheduler::builder().handoff(Handoff::Overlap).build()) + .build(); + + assert_eq!(cfg.sampling, 60.seconds()); + } } diff --git a/src/propagators/instance.rs b/src/propagators/instance.rs index 99b324f6..f006c487 100644 --- a/src/propagators/instance.rs +++ b/src/propagators/instance.rs @@ -106,7 +106,7 @@ where { if log_progress { let tock: Duration = tick.elapsed().into(); - info!("Done in {}", tock); + debug!("Done in {}", tock); } } return Ok(self.state); @@ -136,7 +136,7 @@ where { if log_progress { let tock: Duration = tick.elapsed().into(); - info!("Done in {}", tock); + debug!("Done in {}", tock); } } @@ -262,9 +262,9 @@ where let (_, traj) = self.for_duration_with_traj(max_duration)?; // Now, find the requested event - let events = traj.find_all(event)?; + let events = traj.find(event)?; match events.get(trigger) { - Some(event_state) => Ok((*event_state, traj)), + Some(event_state) => Ok((event_state.state, traj)), None => Err(NyxError::UnsufficientTriggers(trigger, events.len())), } } diff --git a/src/python/mission_design/orbit_trajectory.rs b/src/python/mission_design/orbit_trajectory.rs index 9965ca6e..b738de9f 100644 --- a/src/python/mission_design/orbit_trajectory.rs +++ b/src/python/mission_design/orbit_trajectory.rs @@ -99,9 +99,14 @@ impl OrbitTraj { self.inner.last().epoch() }; - Ok(vec![self.inner.find_bracketed(start, end, &event)?]) + Ok(vec![self.inner.find_bracketed(start, end, &event)?.state]) } else { - self.inner.find_all(&event) + Ok(self + .inner + .find(&event)? + .iter() + .map(|details| details.state) + .collect::>()) } } diff --git a/src/python/mission_design/sc_trajectory.rs b/src/python/mission_design/sc_trajectory.rs index 87ac3f18..da529fef 100644 --- a/src/python/mission_design/sc_trajectory.rs +++ b/src/python/mission_design/sc_trajectory.rs @@ -100,9 +100,14 @@ impl SpacecraftTraj { self.inner.last().epoch() }; - Ok(vec![self.inner.find_bracketed(start, end, &event)?]) + Ok(vec![self.inner.find_bracketed(start, end, &event)?.state]) } else { - self.inner.find_all(&event) + Ok(self + .inner + .find(&event)? + .iter() + .map(|details| details.state) + .collect::>()) } } diff --git a/src/python/orbit_determination/arc.rs b/src/python/orbit_determination/arc.rs index b8ba12e3..ff5ea9f1 100644 --- a/src/python/orbit_determination/arc.rs +++ b/src/python/orbit_determination/arc.rs @@ -46,33 +46,17 @@ impl GroundTrackingArcSim { trajectory: TrajectoryLoader, configs: HashMap, seed: u64, - allow_overlap: Option, ) -> Result { // Try to convert the dynamic trajectory into a trajectory let inner = if let Ok(sc_traj) = trajectory.to_traj::() { - let mut inner = TrackingArcSim::with_seed(devices, sc_traj, configs, seed) + let inner = TrackingArcSim::with_seed(devices, sc_traj, configs, seed) .map_err(NyxError::ConfigError)?; - if let Some(allow_overlap) = allow_overlap { - if allow_overlap { - inner.allow_overlap(); - } else { - inner.disallow_overlap(); - } - } Either::Left(inner) } else if let Ok(traj) = trajectory.to_traj::() { - let mut inner = TrackingArcSim::with_seed(devices, traj, configs, seed) + let inner = TrackingArcSim::with_seed(devices, traj, configs, seed) .map_err(NyxError::ConfigError)?; - if let Some(allow_overlap) = allow_overlap { - if allow_overlap { - inner.allow_overlap(); - } else { - inner.disallow_overlap(); - } - } - Either::Right(inner) } else { return Err(NyxError::CustomError("Provided trajectory could neither be parsed as an orbit trajectory or a spacecraft trajectory".to_string())); @@ -83,7 +67,6 @@ impl GroundTrackingArcSim { /// Generates simulated measurements and returns the path where the parquet file containing said measurements is stored. /// You may specify a metadata dictionary to be stored in the parquet file and whether the filename should include the timestamp. - #[pyo3(text_signature = "(path, metadata=None, timestamp=False)")] pub fn generate_measurements( &mut self, path: String, @@ -104,6 +87,24 @@ impl GroundTrackingArcSim { } } + /// Generates a tracking schedule + pub fn generate_schedule(&self) -> Result, NyxError> { + let cosm = Cosm::de438(); + match &self.inner { + Either::Left(arc_sim) => arc_sim.generate_schedule(cosm), + Either::Right(arc_sim) => arc_sim.generate_schedule(cosm), + } + } + + /// Builds a tracking schedule by generating it and storing it in this object. + pub fn build_schedule(&mut self) -> Result<(), NyxError> { + let cosm = Cosm::de438(); + match &mut self.inner { + Either::Left(arc_sim) => arc_sim.build_schedule(cosm), + Either::Right(arc_sim) => arc_sim.build_schedule(cosm), + } + } + pub fn __repr__(&self) -> String { format!("{}", self.inner) } diff --git a/src/python/orbit_determination/mod.rs b/src/python/orbit_determination/mod.rs index df20987b..59181474 100644 --- a/src/python/orbit_determination/mod.rs +++ b/src/python/orbit_determination/mod.rs @@ -20,13 +20,14 @@ use crate::io::tracking_data::DynamicTrackingArc; use crate::io::ExportCfg; use crate::od::noise::GaussMarkov; use crate::od::process::{FltResid, IterationConf}; -pub use crate::od::simulator::TrkConfig; +pub use crate::od::simulator::{Scheduler, TrkConfig}; pub use crate::{io::ConfigError, od::prelude::GroundStation}; use pyo3::{prelude::*, py_run}; mod arc; pub(crate) mod estimate; mod ground_station; mod process; +mod scheduler; mod trkconfig; use estimate::OrbitEstimate; @@ -39,6 +40,7 @@ pub(crate) fn register_od(py: Python<'_>, parent_module: &PyModule) -> PyResult< sm.add_class::()?; sm.add_class::()?; sm.add_class::()?; + sm.add_class::()?; sm.add_class::()?; sm.add_class::()?; sm.add_class::()?; diff --git a/src/python/orbit_determination/scheduler.rs b/src/python/orbit_determination/scheduler.rs new file mode 100644 index 00000000..1c3844ae --- /dev/null +++ b/src/python/orbit_determination/scheduler.rs @@ -0,0 +1,75 @@ +/* + Nyx, blazing fast astrodynamics + Copyright (C) 2023 Christopher Rabotin + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published + by the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . +*/ +pub use crate::io::ConfigError; +pub use crate::od::simulator::{Cadence, Handoff, Scheduler, Strand}; +use crate::NyxError; +use hifitime::Duration; +use pyo3::basic::CompareOp; +use pyo3::prelude::*; +use std::str::FromStr; + +#[pymethods] +impl Scheduler { + #[new] + #[pyo3(text_signature = "(handoff, cadence_on=None, cadence_off=None)")] + fn py_new( + handoff: Handoff, + cadence_on: Option, + cadence_off: Option, + min_samples: Option, + ) -> Result { + let mut me = Self::builder().handoff(handoff).build(); + + if let Some(min_samples) = min_samples { + me.min_samples = min_samples; + } + + if cadence_on.is_some() || cadence_off.is_some() { + me.cadence = Cadence::Intermittent { + on: Duration::from_str(cadence_on.unwrap().as_str()).map_err(|e| { + ConfigError::InvalidConfig(format!( + "{e} invalid format for schedule on (must be specified if schedule off is)" + )) + })?, + off: Duration::from_str(cadence_off.unwrap().as_str()).map_err(|e| { + ConfigError::InvalidConfig(format!( + "{e} invalid format for schedule off (must be specified if schedule on is)" + )) + })?, + }; + } + + Ok(me) + } + + fn __repr__(&self) -> String { + format!("{self:?}") + } + + fn __str__(&self) -> String { + format!("{self:?}") + } + + fn __richcmp__(&self, other: &Self, op: CompareOp) -> Result { + match op { + CompareOp::Eq => Ok(self == other), + CompareOp::Ne => Ok(self != other), + _ => Err(NyxError::CustomError(format!("{op:?} not available"))), + } + } +} diff --git a/src/python/orbit_determination/trkconfig.rs b/src/python/orbit_determination/trkconfig.rs index 4355bf58..64c47c2a 100644 --- a/src/python/orbit_determination/trkconfig.rs +++ b/src/python/orbit_determination/trkconfig.rs @@ -16,14 +16,15 @@ along with this program. If not, see . */ pub use crate::io::ConfigError; -pub use crate::od::simulator::{Schedule, TrkConfig}; -use crate::{io::ConfigRepr, od::simulator::Availability, NyxError}; -use hifitime::{Duration, Epoch}; +pub use crate::od::simulator::{Scheduler, Strand, TrkConfig}; +use crate::{io::ConfigRepr, NyxError}; +use hifitime::Duration; use pyo3::basic::CompareOp; use pyo3::prelude::*; use pyo3::types::PyType; use pythonize::{depythonize, pythonize}; -use std::{collections::HashMap, str::FromStr}; +use std::collections::HashMap; +use std::str::FromStr; #[pymethods] impl TrkConfig { @@ -43,59 +44,26 @@ impl TrkConfig { } #[new] - #[pyo3( - text_signature = "(start=None, end=None, schedule_on=None, schedule_off=None, sampling=None)" - )] + #[pyo3(text_signature = "(sampling=None, strands=None, scheduler=None)")] fn py_new( - start: Option, - end: Option, - schedule_on: Option, - schedule_off: Option, sampling: Option, + strands: Option>, + scheduler: Option, ) -> Result { let mut me = Self::default(); - if schedule_on.is_some() || schedule_off.is_some() { - me.schedule = Schedule::Intermittent { - on: Duration::from_str(schedule_on.unwrap().as_str()).map_err(|e| { - ConfigError::InvalidConfig(format!( - "{e} invalid format for schedule on (must be specified if schedule off is)" - )) - })?, - off: Duration::from_str(schedule_off.unwrap().as_str()).map_err(|e| { - ConfigError::InvalidConfig(format!( - "{e} invalid format for schedule off (must be specified if schedule on is)" - )) - })?, - }; - } - - if let Some(start) = start { - if start.to_ascii_lowercase() == "visible" { - me.start = Availability::Visible - } else { - me.start = Availability::Epoch(Epoch::from_str(&start).map_err(|e| { - ConfigError::InvalidConfig(format!("{e} invalid format for start availability")) - })?) - } - } - - if let Some(end) = end { - if end.to_ascii_lowercase() == "visible" { - me.end = Availability::Visible - } else { - me.end = Availability::Epoch(Epoch::from_str(&end).map_err(|e| { - ConfigError::InvalidConfig(format!("{e} invalid format for end availability")) - })?) - } - } - if let Some(sampling) = sampling { me.sampling = Duration::from_str(&sampling).map_err(|e| { ConfigError::InvalidConfig(format!("{e} invalid format for sampling")) })?; } + me.strands = strands; + + if scheduler.is_some() { + me.scheduler = scheduler; + } + Ok(me) } @@ -144,38 +112,4 @@ impl TrkConfig { self.sampling = sampling; Ok(()) } - - /// Allows setting the start and end availabilities and the sampling. - /// Availabilities must be either `Visible` or an Epoch as a string. - /// The sampling must be a Duration object. - /// Example usage: `cfg.set(start='Visible', end='2020-01-01 15.26.30 UTC')` - fn set( - &mut self, - start: Option, - end: Option, - sampling: Option, - ) -> Result<(), NyxError> { - if let Some(start) = start { - if start.to_ascii_lowercase() == "visible" { - self.start = Availability::Visible - } else { - self.start = Availability::Epoch(Epoch::from_str(&start).map_err(|e| { - NyxError::CustomError(format!("{e} invalid format for start availability")) - })?) - } - } - if let Some(end) = end { - if end.to_ascii_lowercase() == "visible" { - self.end = Availability::Visible - } else { - self.end = Availability::Epoch(Epoch::from_str(&end).map_err(|e| { - NyxError::CustomError(format!("{e} invalid format for end availability")) - })?) - } - } - if let Some(sampling) = sampling { - self.sampling = sampling; - } - Ok(()) - } } diff --git a/tests/orbit_determination/multi_body.rs b/tests/orbit_determination/multi_body.rs index 063988d9..4ceb0567 100644 --- a/tests/orbit_determination/multi_body.rs +++ b/tests/orbit_determination/multi_body.rs @@ -78,7 +78,7 @@ fn od_val_multi_body_ckf_perfect_stations() { // Simulate tracking data let mut arc_sim = TrackingArcSim::with_seed(all_stations, traj, configs, 0).unwrap(); - arc_sim.disallow_overlap(); // Prevent overlapping measurements + arc_sim.build_schedule(cosm.clone()).unwrap(); let arc = arc_sim.generate_measurements(cosm.clone()).unwrap(); @@ -178,7 +178,10 @@ fn multi_body_ckf_covar_map() { let mut configs = HashMap::new(); configs.insert( dss13_goldstone.name.clone(), - TrkConfig::from_sample_rate(10.seconds()), + TrkConfig::builder() + .sampling(10.seconds()) + .scheduler(Scheduler::builder().sample_alignment(10.seconds()).build()) + .build(), ); let all_stations = vec![dss13_goldstone]; @@ -203,7 +206,7 @@ fn multi_body_ckf_covar_map() { // Simulate tracking data let mut arc_sim = TrackingArcSim::with_seed(all_stations, traj, configs, 0).unwrap(); - arc_sim.disallow_overlap(); // Prevent overlapping measurements + arc_sim.build_schedule(cosm.clone()).unwrap(); let arc = arc_sim.generate_measurements(cosm.clone()).unwrap(); @@ -276,8 +279,8 @@ fn multi_body_ckf_covar_map() { // Test that we can generate a navigation trajectory and search it let nav_traj = odp.to_traj().unwrap(); let aop_event = Event::apoapsis(); - for found_event in nav_traj.find_all(&aop_event).unwrap() { - println!("{:x}", found_event); - assert!((found_event.ta_deg() - 180.0).abs() < 1e-2) + for found_event in nav_traj.find(&aop_event).unwrap() { + println!("{:x}", found_event.state); + assert!((found_event.state.ta_deg() - 180.0).abs() < 1e-2) } } diff --git a/tests/orbit_determination/resid_reject.rs b/tests/orbit_determination/resid_reject.rs index 3c0c7aaf..85b1b6cd 100644 --- a/tests/orbit_determination/resid_reject.rs +++ b/tests/orbit_determination/resid_reject.rs @@ -78,24 +78,14 @@ fn devices_n_configs(epoch: Epoch) -> (Vec, HashMap3 km)" ); assert!( - delta.vmag_km_s() < 1e-5, - "Velocity error should be on centimeter level" + delta.vmag_km_s() < 1e-4, + "Velocity error should be on decimeter level" ); } diff --git a/tests/orbit_determination/simulator.rs b/tests/orbit_determination/simulator.rs index 261c154b..c54be725 100644 --- a/tests/orbit_determination/simulator.rs +++ b/tests/orbit_determination/simulator.rs @@ -60,7 +60,7 @@ fn continuous_tracking() { // Load the ground stations from the test data. let ground_station_file: PathBuf = [ - &env::var("CARGO_MANIFEST_DIR").unwrap(), + env!("CARGO_MANIFEST_DIR"), "data", "tests", "config", @@ -71,11 +71,11 @@ fn continuous_tracking() { let devices = GroundStation::load_many(ground_station_file).unwrap(); - dbg!(&devices); + // dbg!(&devices); // Load the tracking configuration from the test data. let trkconfg_yaml: PathBuf = [ - &env::var("CARGO_MANIFEST_DIR").unwrap(), + env!("CARGO_MANIFEST_DIR"), "data", "tests", "config", @@ -93,13 +93,12 @@ fn continuous_tracking() { TrackingArcSim::::with_seed(devices, trajectory, configs, 12345) .unwrap(); + trk.build_schedule(cosm.clone()).unwrap(); let arc = trk.generate_measurements(cosm).unwrap(); - assert_eq!(arc.measurements.len(), 146); - // And serialize to disk let path: PathBuf = [ - &env::var("CARGO_MANIFEST_DIR").unwrap(), + env!("CARGO_MANIFEST_DIR"), "output_data", "simple_arc.parquet", ] @@ -116,6 +115,7 @@ fn continuous_tracking() { println!("{arc_concrete}"); + assert_eq!(arc.measurements.len(), 116); // Check that we've loaded all of the measurements assert_eq!(arc_concrete.measurements.len(), arc.measurements.len()); // Check that we find the same device names too diff --git a/tests/orbit_determination/spacecraft.rs b/tests/orbit_determination/spacecraft.rs index 8e2843a1..f2fe0954 100644 --- a/tests/orbit_determination/spacecraft.rs +++ b/tests/orbit_determination/spacecraft.rs @@ -57,31 +57,30 @@ fn od_val_sc_mb_srp_reals_duals_models() { iau_earth, ); + let epoch = Epoch::from_gregorian_tai_at_midnight(2020, 1, 1); + let prop_time = 1 * Unit::Day; + // Define the tracking configurations let mut configs = HashMap::new(); - configs.insert( - dss65_madrid.name.clone(), - TrkConfig::from_sample_rate(10.seconds()), - ); - configs.insert( - dss34_canberra.name.clone(), - TrkConfig::from_sample_rate(10.seconds()), - ); - configs.insert( - dss13_goldstone.name.clone(), - TrkConfig::from_sample_rate(10.seconds()), - ); + let cfg = TrkConfig::builder() + .strands(vec![Strand { + start: epoch, + end: epoch + prop_time, + }]) + .build(); + + configs.insert(dss65_madrid.name.clone(), cfg.clone()); + configs.insert(dss34_canberra.name.clone(), cfg.clone()); + configs.insert(dss13_goldstone.name.clone(), cfg); let all_stations = vec![dss65_madrid, dss34_canberra, dss13_goldstone]; // Define the propagator information. - let prop_time = 1 * Unit::Day; let step_size = 10.0 * Unit::Second; let opts = PropOpts::with_fixed_step(step_size); // Define state information. let eme2k = cosm.frame("EME2000"); - let epoch = Epoch::from_gregorian_tai_at_midnight(2020, 1, 1); let initial_state = Orbit::keplerian(22000.0, 0.01, 30.0, 80.0, 40.0, 0.0, epoch, eme2k); let dry_mass_kg = 100.0; // in kg @@ -133,7 +132,7 @@ fn od_val_sc_mb_srp_reals_duals_models() { // Simulate tracking data let mut arc_sim = TrackingArcSim::with_seed(all_stations, traj, configs, 0).unwrap(); - arc_sim.disallow_overlap(); // Prevent overlapping measurements + arc_sim.build_schedule(cosm.clone()).unwrap(); let arc = arc_sim.generate_measurements(cosm.clone()).unwrap(); @@ -216,7 +215,7 @@ fn od_val_sc_mb_srp_reals_duals_models() { ); assert!( - est.covar.diagonal().norm() < 1e-5, + est.covar.diagonal().norm() < 1e-4, "estimate covariance norm should be small (perfect dynamics) ({:e})", est.covar.diagonal().norm() ); diff --git a/tests/orbit_determination/trackingarc.rs b/tests/orbit_determination/trackingarc.rs index f247804c..7ba0eec6 100644 --- a/tests/orbit_determination/trackingarc.rs +++ b/tests/orbit_determination/trackingarc.rs @@ -4,7 +4,7 @@ use nyx_space::md::prelude::*; use nyx_space::od::msr::RangeDoppler; use nyx_space::od::prelude::*; use nyx_space::od::simulator::TrackingArcSim; -use nyx_space::od::simulator::{Availability, EpochRanges, Schedule, TrkConfig}; +use nyx_space::od::simulator::{Cadence, Strand, TrkConfig}; use rstest::*; use std::collections::HashMap; use std::env; @@ -33,7 +33,7 @@ fn traj() -> Traj { // Generate a trajectory let (_, trajectory) = Propagator::default(OrbitalDynamics::two_body()) .with(orbit) - .for_duration_with_traj(1.5.days()) + .for_duration_with_traj(3.days()) .unwrap(); println!("{trajectory}"); @@ -45,7 +45,7 @@ fn traj() -> Traj { fn devices() -> Vec { // Load the ground stations from the test data. let ground_station_file: PathBuf = [ - &env::var("CARGO_MANIFEST_DIR").unwrap(), + env!("CARGO_MANIFEST_DIR"), "data", "tests", "config", @@ -86,7 +86,7 @@ fn trk_simple(traj: Traj, devices: Vec) { // Load the tracking configuration from the test data. let trkconfg_yaml: PathBuf = [ - &env::var("CARGO_MANIFEST_DIR").unwrap(), + env!("CARGO_MANIFEST_DIR"), "data", "tests", "config", @@ -103,6 +103,8 @@ fn trk_simple(traj: Traj, devices: Vec) { let mut trk = TrackingArcSim::::with_seed(devices, traj, configs, 12345).unwrap(); + trk.build_schedule(cosm.clone()).unwrap(); + let arc = trk.generate_measurements(cosm).unwrap(); // Test filtering by epoch @@ -147,11 +149,11 @@ fn trk_simple(traj: Traj, devices: Vec) { ); // Regression - assert_eq!(arc.measurements.len(), 146); + assert_eq!(arc.measurements.len(), 197); // And serialize to disk let path: PathBuf = [ - &env::var("CARGO_MANIFEST_DIR").unwrap(), + env!("CARGO_MANIFEST_DIR"), "output_data", "simple_arc.parquet", ] @@ -176,74 +178,31 @@ fn trk_simple(traj: Traj, devices: Vec) { assert_eq!(arc_concrete.device_cfg, arc.device_cfg); } -/// Tests that exclusion epochs work -#[rstest] -fn trkconfig_zero_exclusion(traj: Traj, devices: Vec) { - let cosm = Cosm::de438(); - - // Build a tracking config that should never see this vehicle. - let trkcfg = TrkConfig { - exclusion_epochs: Some(vec![EpochRanges { - start: traj.first().epoch(), - end: traj.last().epoch(), - }]), - ..Default::default() - }; - // Build the configs map - let mut configs = HashMap::new(); - for device in &devices { - configs.insert(device.name.clone(), trkcfg.clone()); - } - - let mut trk = TrackingArcSim::::new(devices, traj, configs).unwrap(); - - let arc = trk.generate_measurements(cosm).unwrap(); - - assert_eq!(arc.measurements.len(), 0); -} - /// Tests that inclusion epochs work #[rstest] fn trkconfig_zero_inclusion(traj: Traj, devices: Vec) { let cosm = Cosm::de438(); // Build a tracking config that should always see this vehicle. - let trkcfg_always = TrkConfig { - inclusion_epochs: Some(vec![EpochRanges { + let trkcfg_always = TrkConfig::builder() + .strands(vec![Strand { start: traj.first().epoch(), end: traj.last().epoch(), - }]), - ..Default::default() - }; + }]) + .build(); - // And one that is never included - let trkcfg_never = TrkConfig { - exclusion_epochs: Some(vec![EpochRanges { - start: traj.first().epoch(), - end: traj.last().epoch(), - }]), - ..Default::default() - }; - // Build the configs map + // Build the configs map, where we only have one of the two stations configured let mut configs = HashMap::new(); - for (dno, device) in devices.iter().enumerate() { - configs.insert( - device.name.clone(), - if dno == 0 { - println!("{}", device.name); - trkcfg_never.clone() - } else { - trkcfg_always.clone() - }, - ); - } + configs.insert(devices[1].name.clone(), trkcfg_always); let mut trk = TrackingArcSim::::new(devices, traj, configs).unwrap(); + trk.build_schedule(cosm.clone()).unwrap(); + let arc = trk.generate_measurements(cosm).unwrap(); // Regression - assert_eq!(arc.measurements.len(), 79); + assert_eq!(arc.measurements.len(), 113); assert_eq!( arc.device_names().len(), @@ -256,13 +215,13 @@ fn trkconfig_zero_inclusion(traj: Traj, devices: Vec) { #[rstest] fn trkconfig_invalid(traj: Traj, devices: Vec) { // Build a tracking config where the exclusion range is less than the sampling rate - let trkcfg = TrkConfig { - exclusion_epochs: Some(vec![EpochRanges { + let trkcfg = TrkConfig::builder() + .strands(vec![Strand { start: traj.first().epoch(), end: traj.first().epoch(), - }]), - ..Default::default() - }; + }]) + .build(); + // Build the configs map let mut configs = HashMap::new(); for device in &devices { @@ -277,32 +236,25 @@ fn trkconfig_invalid(traj: Traj, devices: Vec) { fn trkconfig_delayed_start(traj: Traj, devices: Vec) { let cosm = Cosm::de438(); - let trkcfg = TrkConfig { - inclusion_epochs: Some(vec![EpochRanges { + let trkcfg = TrkConfig::builder() + .strands(vec![Strand { start: traj.first().epoch() + 2.hours(), end: traj.last().epoch(), - }]), - sampling: 1.26.minutes(), - ..Default::default() - }; + }]) + .sampling(1.26.minutes()) + .build(); // Build the configs map with a single ground station let mut configs = HashMap::new(); configs.insert(devices[0].name.clone(), trkcfg); - // Check that if if a device does not have an associated trkconfig, the tracking arc cannot be created. - assert!(TrackingArcSim::::new( - devices.clone(), - traj.clone(), - configs.clone() - ) - .is_err()); - let mut trk = TrackingArcSim::::new(vec![devices[0].clone()], traj, configs) .unwrap(); + trk.build_schedule(cosm.clone()).unwrap(); + let arc = trk.generate_measurements(cosm).unwrap(); // Check the sampling of the arc. @@ -313,7 +265,7 @@ fn trkconfig_delayed_start(traj: Traj, devices: Vec) { ); // Regression - assert_eq!(arc.measurements.len(), 53); + assert_eq!(arc.measurements.len(), 108); } /// Test different cadences and availabilities @@ -326,27 +278,30 @@ fn trkconfig_cadence(traj: Traj, devices: Vec) { configs.insert( devices[0].name.clone(), - TrkConfig { - start: Availability::Visible, - schedule: Schedule::Intermittent { - on: 0.2.hours(), - off: 20.days(), - }, - ..Default::default() - }, + TrkConfig::builder() + .scheduler( + Scheduler::builder() + .cadence(Cadence::Intermittent { + on: 0.2.hours(), + off: 20.days(), + }) + .build(), + ) + .build(), ); configs.insert( devices[1].name.clone(), - TrkConfig { - start: Availability::Epoch(traj.last().epoch() - 10.hours()), - sampling: 26.1.seconds(), - ..Default::default() - }, + TrkConfig::builder() + .sampling(26.1.seconds()) + .scheduler(Scheduler::default()) + .build(), ); let mut trk = TrackingArcSim::::new(devices, traj, configs).unwrap(); + trk.build_schedule(cosm.clone()).unwrap(); + let arc = trk.generate_measurements(cosm).unwrap(); // Check the sampling of the arc is one minute: we don't have any overlap of availability and the default sampling is one minute. @@ -357,5 +312,5 @@ fn trkconfig_cadence(traj: Traj, devices: Vec) { ); // Regression - assert_eq!(arc.measurements.len(), 90); + assert_eq!(arc.measurements.len(), 216); } diff --git a/tests/orbit_determination/two_body.rs b/tests/orbit_determination/two_body.rs index 33dde995..0f50726b 100644 --- a/tests/orbit_determination/two_body.rs +++ b/tests/orbit_determination/two_body.rs @@ -46,20 +46,23 @@ fn od_tb_val_ekf_fixed_step_perfect_stations() { iau_earth, ); - // Define the tracking configurations + // Load the tracking configurations let mut configs = HashMap::new(); - configs.insert( - dss65_madrid.name.clone(), - TrkConfig::from_sample_rate(10.seconds()), - ); - configs.insert( - dss34_canberra.name.clone(), - TrkConfig::from_sample_rate(10.seconds()), - ); - configs.insert( - dss13_goldstone.name.clone(), - TrkConfig::from_sample_rate(10.seconds()), - ); + let trkconfig_yaml: PathBuf = [ + env!("CARGO_MANIFEST_DIR"), + "data", + "tests", + "config", + "trk_cfg_od_val.yaml", + ] + .iter() + .collect(); + + let cfg = TrkConfig::load(trkconfig_yaml).unwrap(); + + configs.insert(dss65_madrid.name.clone(), cfg.clone()); + configs.insert(dss34_canberra.name.clone(), cfg.clone()); + configs.insert(dss13_goldstone.name.clone(), cfg); let all_stations = vec![dss65_madrid, dss34_canberra, dss13_goldstone]; @@ -82,7 +85,7 @@ fn od_tb_val_ekf_fixed_step_perfect_stations() { // Simulate tracking data let mut arc_sim = TrackingArcSim::with_seed(all_stations, traj, configs, 0).unwrap(); - arc_sim.disallow_overlap(); // Prevent overlapping measurements + arc_sim.build_schedule(cosm.clone()).unwrap(); let arc = arc_sim.generate_measurements(cosm.clone()).unwrap(); @@ -127,7 +130,8 @@ fn od_tb_val_ekf_fixed_step_perfect_stations() { println!("Final estimate:\n{est}"); assert!( est.state_deviation().norm() < 1e-12, - "In perfect modeling, the state deviation should be near zero" + "In perfect modeling, the state deviation should be near zero, got {:.3e}", + est.state_deviation().norm() ); for i in 0..6 { assert!( @@ -229,7 +233,7 @@ fn od_tb_val_with_arc() { // Load the tracking configs let trkconfig_yaml: PathBuf = [ - &env::var("CARGO_MANIFEST_DIR").unwrap(), + env!("CARGO_MANIFEST_DIR"), "data", "tests", "config", @@ -242,12 +246,13 @@ fn od_tb_val_with_arc() { // Simulate tracking data of range and range rate let mut arc_sim = TrackingArcSim::with_seed(all_stations, traj, configs, 1).unwrap(); + arc_sim.build_schedule(cosm.clone()).unwrap(); let arc = arc_sim.generate_measurements(cosm.clone()).unwrap(); // And serialize to disk let path: PathBuf = [ - &env::var("CARGO_MANIFEST_DIR").unwrap(), + env!("CARGO_MANIFEST_DIR"), "output_data", "two_body_od_val_arc.parquet", ] @@ -294,8 +299,9 @@ fn od_tb_val_with_arc() { let est = &odp.estimates[odp.estimates.len() - 1]; println!("Final estimate:\n{est}"); assert!( - dbg!(est.state_deviation().norm()) < f64::EPSILON, - "In perfect modeling, the state deviation should be near zero" + est.state_deviation().norm() < f64::EPSILON, + "In perfect modeling, the state deviation should be near zero, got {:.3e}", + est.state_deviation().norm() ); for i in 0..6 { assert!( @@ -376,19 +382,15 @@ fn od_tb_val_ckf_fixed_step_perfect_stations() { ); // Define the tracking configurations + let cfg = TrkConfig::builder() + .sampling(10.seconds()) + .scheduler(Scheduler::builder().sample_alignment(10.seconds()).build()) + .build(); + let mut configs = HashMap::new(); - configs.insert( - dss65_madrid.name.clone(), - TrkConfig::from_sample_rate(10.seconds()), - ); - configs.insert( - dss34_canberra.name.clone(), - TrkConfig::from_sample_rate(10.seconds()), - ); - configs.insert( - dss13_goldstone.name.clone(), - TrkConfig::from_sample_rate(10.seconds()), - ); + configs.insert(dss65_madrid.name.clone(), cfg.clone()); + configs.insert(dss34_canberra.name.clone(), cfg.clone()); + configs.insert(dss13_goldstone.name.clone(), cfg); let all_stations = vec![dss65_madrid, dss34_canberra, dss13_goldstone]; @@ -412,10 +414,21 @@ fn od_tb_val_ckf_fixed_step_perfect_stations() { // Simulate tracking data let mut arc_sim = TrackingArcSim::with_seed(all_stations, traj, configs, 0).unwrap(); - arc_sim.disallow_overlap(); // Prevent overlapping measurements + arc_sim.build_schedule(cosm.clone()).unwrap(); let arc = arc_sim.generate_measurements(cosm.clone()).unwrap(); + // And serialize to disk + let path: PathBuf = [ + env!("CARGO_MANIFEST_DIR"), + "output_data", + "od_tb_val_ckf_fixed_step_perfect_stations.parquet", + ] + .iter() + .collect(); + + arc.to_parquet_simple(path).unwrap(); + // Now that we have the truth data, let's start an OD with no noise at all and compute the estimates. // We expect the estimated orbit to be perfect since we're using strictly the same dynamics, no noise on // the measurements, and the same time step. @@ -451,11 +464,6 @@ fn od_tb_val_ckf_fixed_step_perfect_stations() { odp.to_parquet(path, ExportCfg::default()).unwrap(); - // Check that we have as many estimates as steps taken by the propagator. - // Note that this test cannot work when using a variable step propagator in that same setup. - // We're adding +1 because the propagation time is inclusive on both ends. - let expected_num_estimates = (prop_time.to_seconds() / step_size.to_seconds()) as usize + 1; - // Check that there are no duplicates of epochs. let mut prev_epoch = odp.estimates[0].epoch(); @@ -468,12 +476,6 @@ fn od_tb_val_ckf_fixed_step_perfect_stations() { prev_epoch = this_epoch; } - assert_eq!( - odp.estimates.len(), - expected_num_estimates, - "Different number of estimates received" - ); - for (no, est) in odp.estimates.iter().enumerate() { if no == 0 { // Skip the first estimate which is the initial estimate provided by user @@ -611,19 +613,12 @@ fn od_tb_ckf_fixed_step_iteration_test() { ); // Define the tracking configurations + let cfg = TrkConfig::from_sample_rate(10.seconds()); + let mut configs = HashMap::new(); - configs.insert( - dss65_madrid.name.clone(), - TrkConfig::from_sample_rate(10.seconds()), - ); - configs.insert( - dss34_canberra.name.clone(), - TrkConfig::from_sample_rate(10.seconds()), - ); - configs.insert( - dss13_goldstone.name.clone(), - TrkConfig::from_sample_rate(10.seconds()), - ); + configs.insert(dss65_madrid.name.clone(), cfg.clone()); + configs.insert(dss34_canberra.name.clone(), cfg.clone()); + configs.insert(dss13_goldstone.name.clone(), cfg); let all_stations = vec![dss65_madrid, dss34_canberra, dss13_goldstone]; @@ -645,13 +640,10 @@ fn od_tb_ckf_fixed_step_iteration_test() { // Simulate tracking data let mut arc_sim = TrackingArcSim::with_seed(all_stations, traj, configs, 0).unwrap(); - arc_sim.disallow_overlap(); // Prevent overlapping measurements + arc_sim.build_schedule(cosm.clone()).unwrap(); let arc = arc_sim.generate_measurements(cosm.clone()).unwrap(); - // Check that we have the same number of measurements as before the behavior change. - assert_eq!(arc.measurements.len(), 7954); - // Now that we have the truth data, let's start an OD with no noise at all and compute the estimates. // We expect the estimated orbit to be perfect since we're using strictly the same dynamics, no noise on // the measurements, and the same time step. @@ -780,18 +772,10 @@ fn od_tb_ckf_fixed_step_perfect_stations_snc_covar_map() { // Define the tracking configurations let mut configs = HashMap::new(); - configs.insert( - dss65_madrid.name.clone(), - TrkConfig::from_sample_rate(10.seconds()), - ); - configs.insert( - dss34_canberra.name.clone(), - TrkConfig::from_sample_rate(10.seconds()), - ); - configs.insert( - dss13_goldstone.name.clone(), - TrkConfig::from_sample_rate(10.seconds()), - ); + let cfg = TrkConfig::from_sample_rate(10.seconds()); + configs.insert(dss65_madrid.name.clone(), cfg.clone()); + configs.insert(dss34_canberra.name.clone(), cfg.clone()); + configs.insert(dss13_goldstone.name.clone(), cfg); let all_stations = vec![dss65_madrid, dss34_canberra, dss13_goldstone]; @@ -813,7 +797,7 @@ fn od_tb_ckf_fixed_step_perfect_stations_snc_covar_map() { // Simulate tracking data let mut arc_sim = TrackingArcSim::with_seed(all_stations, traj, configs, 0).unwrap(); - arc_sim.disallow_overlap(); // Prevent overlapping measurements + arc_sim.build_schedule(cosm.clone()).unwrap(); let arc = arc_sim.generate_measurements(cosm.clone()).unwrap(); @@ -1004,18 +988,10 @@ fn od_tb_val_harmonics_ckf_fixed_step_perfect() { // Define the tracking configurations let mut configs = HashMap::new(); - configs.insert( - dss65_madrid.name.clone(), - TrkConfig::from_sample_rate(10.seconds()), - ); - configs.insert( - dss34_canberra.name.clone(), - TrkConfig::from_sample_rate(10.seconds()), - ); - configs.insert( - dss13_goldstone.name.clone(), - TrkConfig::from_sample_rate(10.seconds()), - ); + let cfg = TrkConfig::from_sample_rate(10.seconds()); + configs.insert(dss65_madrid.name.clone(), cfg.clone()); + configs.insert(dss34_canberra.name.clone(), cfg.clone()); + configs.insert(dss13_goldstone.name.clone(), cfg); let all_stations = vec![dss65_madrid, dss34_canberra, dss13_goldstone]; @@ -1040,7 +1016,7 @@ fn od_tb_val_harmonics_ckf_fixed_step_perfect() { // Simulate tracking data let mut arc_sim = TrackingArcSim::with_seed(all_stations, traj, configs, 0).unwrap(); - arc_sim.disallow_overlap(); // Prevent overlapping measurements + arc_sim.build_schedule(cosm.clone()).unwrap(); let arc = arc_sim.generate_measurements(cosm.clone()).unwrap(); @@ -1139,18 +1115,10 @@ fn od_tb_ckf_fixed_step_perfect_stations_several_snc_covar_map() { // Define the tracking configurations let mut configs = HashMap::new(); - configs.insert( - dss65_madrid.name.clone(), - TrkConfig::from_sample_rate(10.seconds()), - ); - configs.insert( - dss34_canberra.name.clone(), - TrkConfig::from_sample_rate(10.seconds()), - ); - configs.insert( - dss13_goldstone.name.clone(), - TrkConfig::from_sample_rate(10.seconds()), - ); + let cfg = TrkConfig::from_sample_rate(10.seconds()); + configs.insert(dss65_madrid.name.clone(), cfg.clone()); + configs.insert(dss34_canberra.name.clone(), cfg.clone()); + configs.insert(dss13_goldstone.name.clone(), cfg); let all_stations = vec![dss65_madrid, dss34_canberra, dss13_goldstone]; @@ -1171,7 +1139,7 @@ fn od_tb_ckf_fixed_step_perfect_stations_several_snc_covar_map() { // Simulate tracking data let mut arc_sim = TrackingArcSim::with_seed(all_stations, traj, configs, 0).unwrap(); - arc_sim.disallow_overlap(); // Prevent overlapping measurements + arc_sim.build_schedule(cosm.clone()).unwrap(); let arc = arc_sim.generate_measurements(cosm.clone()).unwrap(); diff --git a/tests/orbit_determination/xhat_dev.rs b/tests/orbit_determination/xhat_dev.rs index be12439b..d4eb555f 100644 --- a/tests/orbit_determination/xhat_dev.rs +++ b/tests/orbit_determination/xhat_dev.rs @@ -71,7 +71,7 @@ fn xhat_dev_test_ekf_two_body() { let (err_p, err_v) = rss_orbit_errors(&initial_state_dev, &initial_state); println!( - "Initial state dev: {:.3} m\t{:.3} m/s\n{}", + "Initial state dev: {:.3} m\t{:.3} m/s\nDelta: {}", err_p * 1e3, err_v * 1e3, initial_state - initial_state_dev @@ -85,8 +85,9 @@ fn xhat_dev_test_ekf_two_body() { .unwrap(); // Simulate tracking data + println!("{traj}"); let mut arc_sim = TrackingArcSim::with_seed(all_stations, traj.clone(), configs, 0).unwrap(); - arc_sim.disallow_overlap(); // Prevent overlapping measurements + arc_sim.build_schedule(cosm.clone()).unwrap(); let arc = arc_sim.generate_measurements(cosm.clone()).unwrap(); @@ -296,7 +297,7 @@ fn xhat_dev_test_ekf_multi_body() { // Simulate tracking data let mut arc_sim = TrackingArcSim::with_seed(all_stations, traj.clone(), configs, 0).unwrap(); - arc_sim.disallow_overlap(); // Prevent overlapping measurements + arc_sim.build_schedule(cosm.clone()).unwrap(); let arc = arc_sim.generate_measurements(cosm.clone()).unwrap(); @@ -473,7 +474,7 @@ fn xhat_dev_test_ekf_harmonics() { // Simulate tracking data let mut arc_sim = TrackingArcSim::with_seed(all_stations, traj.clone(), configs, 0).unwrap(); - arc_sim.disallow_overlap(); // Prevent overlapping measurements + arc_sim.build_schedule(cosm.clone()).unwrap(); let arc = arc_sim.generate_measurements(cosm.clone()).unwrap(); @@ -624,7 +625,7 @@ fn xhat_dev_test_ekf_realistic() { // Simulate tracking data let mut arc_sim = TrackingArcSim::with_seed(all_stations, traj.clone(), configs, 0).unwrap(); - arc_sim.disallow_overlap(); // Prevent overlapping measurements + arc_sim.build_schedule(cosm.clone()).unwrap(); let arc = arc_sim.generate_measurements(cosm.clone()).unwrap(); @@ -777,7 +778,7 @@ fn xhat_dev_test_ckf_smoother_multi_body() { // Simulate tracking data let mut arc_sim = TrackingArcSim::with_seed(all_stations, traj.clone(), configs, 0).unwrap(); - arc_sim.disallow_overlap(); // Prevent overlapping measurements + arc_sim.build_schedule(cosm.clone()).unwrap(); let arc = arc_sim.generate_measurements(cosm.clone()).unwrap(); @@ -1048,7 +1049,7 @@ fn xhat_dev_test_ekf_snc_smoother_multi_body() { // Simulate tracking data let mut arc_sim = TrackingArcSim::with_seed(all_stations, traj.clone(), configs, 0).unwrap(); - arc_sim.disallow_overlap(); // Prevent overlapping measurements + arc_sim.build_schedule(cosm.clone()).unwrap(); let arc = arc_sim.generate_measurements(cosm.clone()).unwrap(); @@ -1318,7 +1319,7 @@ fn xhat_dev_test_ckf_iteration_multi_body() { // Simulate tracking data let mut arc_sim = TrackingArcSim::with_seed(all_stations, traj.clone(), configs, 0).unwrap(); - arc_sim.disallow_overlap(); // Prevent overlapping measurements + arc_sim.build_schedule(cosm.clone()).unwrap(); let arc = arc_sim.generate_measurements(cosm.clone()).unwrap(); diff --git a/tests/propagation/events.rs b/tests/propagation/events.rs index 54c41ac8..110d6aaa 100644 --- a/tests/propagation/events.rs +++ b/tests/propagation/events.rs @@ -5,7 +5,6 @@ use std::fmt::Write; fn event_tracker_true_anomaly() { use nyx::cosmic::eclipse::{EclipseLocator, EclipseState}; use nyx::md::prelude::*; - use nyx::md::EventEvaluator; // Only needed because we're manually calling e.eval use nyx::od::GroundStation; let cosm = Cosm::de438(); @@ -33,11 +32,15 @@ fn event_tracker_true_anomaly() { // Find all of the events for e in &events { - let found_events = traj.find_all(e).unwrap(); + let found_events = traj.find(e).unwrap(); let pretty = found_events .iter() - .fold(String::new(), |mut output, orbit| { - let _ = writeln!(output, "{orbit:x}\tevent value: {}", e.eval(orbit)); + .fold(String::new(), |mut output, orbit_event| { + let _ = writeln!( + output, + "{:x}\tevent value: {}", + orbit_event.state, orbit_event.value + ); output }); println!("[ta_tracker] {} =>\n{}", e, pretty); @@ -88,16 +91,17 @@ fn event_tracker_true_anomaly() { println!("Max elevation {} degrees @ {}", max_el, max_dt); let umbra_event_loc = e_loc.to_umbra_event(); - let umbra_events = traj.find_all(&umbra_event_loc).unwrap(); + let umbra_events = traj.find(&umbra_event_loc).unwrap(); let pretty = umbra_events .iter() - .fold(String::new(), |mut output, orbit| { + .fold(String::new(), |mut output, orbit_event| { + let orbit = orbit_event.state; let _ = writeln!( output, "{:x}\tevent value: {}\t(-10s: {}\t+10s: {})", orbit, - &e_loc.compute(orbit), + &e_loc.compute(&orbit), &e_loc.compute(&traj.at(orbit.epoch() - 10 * Unit::Second).unwrap()), &e_loc.compute(&traj.at(orbit.epoch() + 10 * Unit::Second).unwrap()) ); @@ -106,16 +110,17 @@ fn event_tracker_true_anomaly() { println!("[eclipses] {} =>\n{}", umbra_event_loc, pretty); let penumbra_event_loc = e_loc.to_penumbra_event(); - let penumbra_events = traj.find_all(&penumbra_event_loc).unwrap(); + let penumbra_events = traj.find(&penumbra_event_loc).unwrap(); let pretty = penumbra_events .iter() - .fold(String::new(), |mut output, orbit| { + .fold(String::new(), |mut output, orbit_event| { + let orbit = orbit_event.state; let _ = writeln!( output, "{:x}\tevent value: {}\t(-10s: {}\t+10s: {})", orbit, - &e_loc.compute(orbit), + &e_loc.compute(&orbit), &e_loc.compute(&traj.at(orbit.epoch() - 10 * Unit::Second).unwrap()), &e_loc.compute(&traj.at(orbit.epoch() + 10 * Unit::Second).unwrap()) ); diff --git a/tests/propagation/stopcond.rs b/tests/propagation/stopcond.rs index 0eae72a1..6337d876 100644 --- a/tests/propagation/stopcond.rs +++ b/tests/propagation/stopcond.rs @@ -8,7 +8,7 @@ use nyx::cosmic::{Bodies, Cosm, Orbit}; use nyx::dynamics::guidance::{FiniteBurns, Mnvr, Thruster}; use nyx::dynamics::orbital::OrbitalDynamics; use nyx::dynamics::SpacecraftDynamics; -use nyx::md::{Event, EventEvaluator, StateParameter}; +use nyx::md::{Event, StateParameter}; use nyx::propagators::error_ctrl::RSSCartesianStep; use nyx::propagators::{PropOpts, Propagator}; use nyx::time::{Epoch, TimeUnits, Unit}; @@ -35,12 +35,12 @@ fn stop_cond_3rd_apo() { // NOTE: We start counting at ZERO, so finding the 3rd means grabbing the second found. let (third_apo, traj) = prop.until_nth_event(5 * period, &apo_event, 2).unwrap(); - let events = traj.find_all(&apo_event).unwrap(); - let mut prev_event_match = events[0].epoch(); + let events = traj.find(&apo_event).unwrap(); + let mut prev_event_match = events[0].state.epoch(); for event_match in events.iter().skip(1) { - let delta_period = event_match.epoch() - prev_event_match - period; + let delta_period = event_match.state.epoch() - prev_event_match - period; assert!(delta_period.abs() < 50.microseconds(), "in two body dyn, event finding should be extremely precise, instead time error of {delta_period}"); - prev_event_match = event_match.epoch(); + prev_event_match = event_match.state.epoch(); } let min_epoch = start_dt + 2.0 * period; @@ -88,12 +88,12 @@ fn stop_cond_3rd_peri() { // which the event finder will find. let (third_peri, traj) = prop.until_nth_event(5 * period, &peri_event, 2).unwrap(); - let events = traj.find_all(&peri_event).unwrap(); - let mut prev_event_match = events[0].epoch(); + let events = traj.find(&peri_event).unwrap(); + let mut prev_event_match = events[0].state.epoch(); for event_match in events.iter().skip(1) { - let delta_period = event_match.epoch() - prev_event_match - period; + let delta_period = event_match.state.epoch() - prev_event_match - period; assert!(delta_period.abs() < 50.microseconds(), "in two body dyn, event finding should be extremely precise, instead time error of {delta_period}"); - prev_event_match = event_match.epoch(); + prev_event_match = event_match.state.epoch(); } let min_epoch = start_dt + 2.0 * period; @@ -193,15 +193,16 @@ fn stop_cond_nrho_apo() { ); // Now, find all of the requested events - let events = traj_luna.find_all(&near_apo_event).unwrap(); + let events = traj_luna.find(&near_apo_event).unwrap(); println!( "Found all {} events in {} ms", near_apo_event, (Instant::now() - end_conv).as_millis() ); - for event_state in &events { + for event in &events { + let event_state = event.state; let delta_t = event_state.epoch() - dt; - println!("{} after start:\n{:x}", delta_t, event_state); + println!("{delta_t} after start:\n{event_state:x}"); assert!((event_state.ta_deg() - 172.0).abs() < near_apo_event.value_precision); } } @@ -355,27 +356,29 @@ fn event_and_combination() { // NOTE: We're unwrapping here, so if the event isn't found, this will cause the test to fail. let event = Event::specific(StateParameter::Declination, 6.0, 3.0, Unit::Minute); let mut decl_deg = 0.0; - if let Ok(matching_states) = traj_moon.find_all(&event) { + if let Ok(matching_states) = traj_moon.find(&event) { for sc_decl_zero in matching_states { - decl_deg = sc_decl_zero.value(StateParameter::Declination).unwrap(); - println!( - "{event}: {} => decl = {} deg", - event.eval_string(&sc_decl_zero), - decl_deg, - ); + decl_deg = sc_decl_zero + .state + .value(StateParameter::Declination) + .unwrap(); + println!("{sc_decl_zero} => decl = {} deg", decl_deg,); assert!((decl_deg - 6.0).abs() < 3.0); } // We should be able to find a similar event with a tighter bound too. - if let Ok(tighter_states) = traj_moon.find_all(&Event::specific( + if let Ok(tighter_states) = traj_moon.find(&Event::specific( StateParameter::Declination, decl_deg, 1.0, Unit::Minute, )) { for sc_decl_zero in tighter_states { - let found_decl_deg = sc_decl_zero.value(StateParameter::Declination).unwrap(); - println!("{sc_decl_zero:x} => decl = {} deg", found_decl_deg); + let found_decl_deg = sc_decl_zero + .state + .value(StateParameter::Declination) + .unwrap(); + println!("{sc_decl_zero} => decl = {} deg", found_decl_deg); assert!((decl_deg - found_decl_deg).abs() < 1.0); } } diff --git a/tests/propulsion/closedloop_multi_oe_ruggiero.rs b/tests/propulsion/closedloop_multi_oe_ruggiero.rs index 732f7616..e481e0ab 100644 --- a/tests/propulsion/closedloop_multi_oe_ruggiero.rs +++ b/tests/propulsion/closedloop_multi_oe_ruggiero.rs @@ -67,7 +67,7 @@ fn qlaw_as_ruggiero_case_a() { for e in &events { println!( "[qlaw_as_ruggiero_case_a] Found {} events of kind {}", - traj.find_all(e).unwrap().len(), + traj.find(e).unwrap().len(), e ); } diff --git a/tests/propulsion/closedloop_single_oe_ruggiero.rs b/tests/propulsion/closedloop_single_oe_ruggiero.rs index 01f5766f..97673573 100644 --- a/tests/propulsion/closedloop_single_oe_ruggiero.rs +++ b/tests/propulsion/closedloop_single_oe_ruggiero.rs @@ -660,7 +660,7 @@ fn rugg_raan() { let fuel_usage = fuel_mass - final_state.fuel_mass_kg; println!("[rugg_raan] {:x}", final_state.orbit); let event = Event::new(StateParameter::RAAN, 5.0); - println!("[rugg_raan] {} => {:?}", event, traj.find_all(&event)); + println!("[rugg_raan] {} => {:?}", event, traj.find(&event)); println!("[rugg_raan] fuel usage: {:.3} kg", fuel_usage); assert!( diff --git a/tests/python/test_orbit_determination.py b/tests/python/test_orbit_determination.py index d399acf6..ef539fa4 100644 --- a/tests/python/test_orbit_determination.py +++ b/tests/python/test_orbit_determination.py @@ -33,7 +33,7 @@ def test_filter_arc(): # Initialize logging FORMAT = "%(levelname)s %(name)s %(asctime)-15s %(filename)s:%(lineno)d %(message)s" logging.basicConfig(format=FORMAT) - logging.getLogger().setLevel(logging.INFO) + logging.getLogger().setLevel(logging.DEBUG) # Base path root = Path(__file__).joinpath("../../../").resolve() @@ -90,6 +90,8 @@ def test_filter_arc(): # Build the simulated tracking arc, setting the seed to zero arc_sim = GroundTrackingArcSim(devices, traj, trk_cfg, 0) # Generate the measurements + print(arc_sim.generate_schedule()) + arc_sim.build_schedule() msr_path = arc_sim.generate_measurements( str(outpath.joinpath("./msr.parquet")), cfg )