From bd02b4e197e8e734da6a9c09c9d65e42af0471fb Mon Sep 17 00:00:00 2001 From: Pratyush Sharma <56130065+pratyush618@users.noreply.github.com> Date: Sat, 9 May 2026 22:00:52 +0530 Subject: [PATCH 01/14] fix: include LICENSE in sdist for PyPI PEP 639 metadata --- pyproject.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 48ea4a7..164187a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,6 +7,8 @@ name = "dagron" version = "0.1.0" description = "A fast Rust-backed DAG engine for Python" requires-python = ">=3.12" +license = "MIT" +license-files = ["LICENSE"] [project.optional-dependencies] ray = ["ray>=2.0"] From 2f5c8a9b4a888e57739c6addeb94bcc429c2bca8 Mon Sep 17 00:00:00 2001 From: Pratyush Sharma <56130065+pratyush618@users.noreply.github.com> Date: Sat, 9 May 2026 22:01:01 +0530 Subject: [PATCH 02/14] chore: add crates.io metadata to dagron-core --- crates/dagron-core/Cargo.toml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/crates/dagron-core/Cargo.toml b/crates/dagron-core/Cargo.toml index 51c7818..a0fd4d3 100644 --- a/crates/dagron-core/Cargo.toml +++ b/crates/dagron-core/Cargo.toml @@ -3,6 +3,11 @@ name = "dagron-core" version = "0.1.0" edition = "2021" rust-version = "1.83" +description = "Fast DAG construction, analysis, and scheduling engine" +license = "MIT" +repository = "https://github.com/ByteVeda/dagron" +keywords = ["dag", "graph", "scheduling", "workflow", "petgraph"] +categories = ["algorithms", "data-structures"] [lib] name = "dagron_core" From 747947a37118e71c3861cc5518c929ab2812b0f6 Mon Sep 17 00:00:00 2001 From: Pratyush Sharma <56130065+pratyush618@users.noreply.github.com> Date: Sun, 10 May 2026 01:36:30 +0530 Subject: [PATCH 03/14] feat: NodeRef typed handles + @flow + Effect + stubgen NodeRef is a stable Arc+epoch handle returned by add_node; every public API accepts str | NodeRef. @dagron.flow records call structure into a DAG; @task is signature-preserving (ParamSpec) and takes effect=Effect.{PURE,READ,WRITE,NETWORK,NONDETERMINISTIC}. FlowFuture[T] / NodeResult[T] are generic; ExecutionResult overloads type lookups by FlowFuture key. stubgen emits Literal[] overloads for typed string-keyed lookup. DAGExecutor(enforce_effect_isolation) serializes nondeterministic tasks. AST-scan warns on impure-looking PURE tasks. effects_of(dag) reads tags from node metadata. --- crates/dagron-core/src/errors.rs | 5 + crates/dagron-core/src/graph/construction.rs | 20 +- crates/dagron-core/src/graph/mod.rs | 34 ++ crates/dagron-core/src/lib.rs | 2 +- crates/dagron-core/src/node.rs | 71 ++- crates/dagron-core/tests/construction.rs | 51 +- crates/dagron-py/src/construction.rs | 86 ++-- crates/dagron-py/src/errors.rs | 7 + crates/dagron-py/src/incremental.rs | 21 +- crates/dagron-py/src/introspection.rs | 104 ++-- crates/dagron-py/src/lib.rs | 6 + crates/dagron-py/src/node.rs | 50 ++ crates/dagron-py/src/noderef.rs | 55 +++ crates/dagron-py/src/paths.rs | 37 +- crates/dagron-py/src/reachability.rs | 31 +- crates/dagron-py/src/subgraph.rs | 19 +- crates/dagron-py/src/transforms.rs | 18 +- py_src/dagron/__init__.py | 11 + py_src/dagron/__init__.pyi | 11 + py_src/dagron/_internal.pyi | 116 +++-- py_src/dagron/analysis/explain.py | 106 ++-- py_src/dagron/analysis/lineage.py | 32 +- py_src/dagron/builder.py | 21 +- py_src/dagron/builder.pyi | 8 +- py_src/dagron/effects.py | 175 +++++++ py_src/dagron/execution/_helpers.py | 4 +- py_src/dagron/execution/_types.py | 38 +- py_src/dagron/execution/cached_executor.py | 2 +- py_src/dagron/execution/checkpoint.py | 2 +- py_src/dagron/execution/conditions.py | 2 +- .../dagron/execution/distributed_executor.py | 2 +- py_src/dagron/execution/dynamic.py | 2 +- py_src/dagron/execution/executor.py | 81 +++- py_src/dagron/execution/incremental.py | 2 +- py_src/dagron/execution/pipeline.py | 76 +-- py_src/dagron/execution/resources.py | 8 +- py_src/dagron/flow.py | 458 ++++++++++++++++++ py_src/dagron/stubgen.py | 169 +++++++ tests/python/core/test_construction.py | 6 +- tests/python/test_effects.py | 350 +++++++++++++ tests/python/test_flow.py | 351 ++++++++++++++ tests/python/test_node_ref.py | 318 ++++++++++++ tests/python/test_typing.py | 194 ++++++++ 43 files changed, 2811 insertions(+), 351 deletions(-) create mode 100644 crates/dagron-py/src/noderef.rs create mode 100644 py_src/dagron/effects.py create mode 100644 py_src/dagron/flow.py create mode 100644 py_src/dagron/stubgen.py create mode 100644 tests/python/test_effects.py create mode 100644 tests/python/test_flow.py create mode 100644 tests/python/test_node_ref.py create mode 100644 tests/python/test_typing.py diff --git a/crates/dagron-core/src/errors.rs b/crates/dagron-core/src/errors.rs index 72a2dec..8cae839 100644 --- a/crates/dagron-core/src/errors.rs +++ b/crates/dagron-core/src/errors.rs @@ -12,6 +12,11 @@ pub enum DagronError { #[error("Edge not found: {0} -> {1}")] EdgeNotFound(String, String), + #[error( + "Stale node reference: {0} (the node was removed or replaced after the ref was created)" + )] + StaleNodeRef(String), + #[error("Graph error: {0}")] Graph(String), } diff --git a/crates/dagron-core/src/graph/construction.rs b/crates/dagron-core/src/graph/construction.rs index 0497679..18f9889 100644 --- a/crates/dagron-core/src/graph/construction.rs +++ b/crates/dagron-core/src/graph/construction.rs @@ -1,6 +1,6 @@ use crate::algorithms; use crate::errors::DagronError; -use crate::node::NodeId; +use crate::node::NodeRef; use crate::types::EdgeData; use super::DAG; @@ -8,9 +8,12 @@ use super::DAG; impl

DAG

{ /// Add a single node to the graph. /// - /// Returns NodeId for the newly created node. - /// Returns DagronError::DuplicateNode if a node with this name already exists. - pub fn add_node(&mut self, name: String, payload: P) -> Result { + /// Returns a [`NodeRef`] for the newly created node. The ref is stable — + /// it remains valid across edge mutations and other-node changes, and is + /// invalidated only if this node is removed. + /// + /// Returns `DagronError::DuplicateNode` if a node with this name already exists. + pub fn add_node(&mut self, name: String, payload: P) -> Result { if self.name_to_index.contains_key(&name) { return Err(DagronError::DuplicateNode(name)); } @@ -19,12 +22,12 @@ impl

DAG

{ payload, }; let idx = self.graph.add_node(node_data); + let epoch = self.next_node_epoch; + self.next_node_epoch = self.next_node_epoch.wrapping_add(1); self.name_to_index.insert(name.clone(), idx); + self.node_epochs.insert(name.clone(), epoch); self.bump_generation(); - Ok(NodeId { - index: idx.index() as u32, - name, - }) + Ok(NodeRef::new(name, epoch)) } /// Add a directed edge from one node to another. @@ -71,6 +74,7 @@ impl

DAG

{ let idx = self.resolve_name(name)?; self.graph.remove_node(idx); self.name_to_index.remove(name); + self.node_epochs.remove(name); self.bump_generation(); Ok(()) } diff --git a/crates/dagron-core/src/graph/mod.rs b/crates/dagron-core/src/graph/mod.rs index 8477dba..b4d0be2 100644 --- a/crates/dagron-core/src/graph/mod.rs +++ b/crates/dagron-core/src/graph/mod.rs @@ -21,11 +21,17 @@ use std::sync::RwLock; use ahash::AHashMap; use crate::errors::DagronError; +use crate::node::NodeRef; use crate::types::{InternalGraph, InternalNodeIndex}; pub struct DAG

{ pub(crate) graph: InternalGraph

, pub(crate) name_to_index: AHashMap, + /// Per-node creation epoch — used to validate `NodeRef`s against + /// remove/re-add cycles. The entry is removed when the node is removed. + pub(crate) node_epochs: AHashMap, + /// Monotonic counter; assigned to each newly created node. + pub(crate) next_node_epoch: u64, generation: u64, pub(crate) cache: RwLock, } @@ -35,6 +41,8 @@ impl

DAG

{ DAG { graph: InternalGraph::default(), name_to_index: AHashMap::new(), + node_epochs: AHashMap::new(), + next_node_epoch: 0, generation: 0, cache: RwLock::new(cache::DagCache::new()), } @@ -48,6 +56,30 @@ impl

DAG

{ .ok_or_else(|| DagronError::NodeNotFound(name.to_string())) } + /// Resolve a `NodeRef` to its current index, validating that the node + /// still exists with the same creation epoch. Returns + /// `DagronError::NodeNotFound` if the node has been removed and + /// `DagronError::StaleNodeRef` if a different node now occupies the name. + pub fn resolve_ref(&self, r: &NodeRef) -> Result { + let stored_epoch = self + .node_epochs + .get(r.name.as_ref()) + .ok_or_else(|| DagronError::NodeNotFound(r.name.to_string()))?; + if *stored_epoch != r.epoch { + return Err(DagronError::StaleNodeRef(r.name.to_string())); + } + self.name_to_index + .get(r.name.as_ref()) + .copied() + .ok_or_else(|| DagronError::NodeNotFound(r.name.to_string())) + } + + /// Look up the current `NodeRef` for a name, if it exists. + pub fn node_ref(&self, name: &str) -> Option { + let epoch = *self.node_epochs.get(name)?; + Some(NodeRef::new(name, epoch)) + } + /// Access the underlying petgraph. pub fn inner_graph(&self) -> &InternalGraph

{ &self.graph @@ -160,6 +192,8 @@ impl DAG

{ DAG { graph: self.graph.clone(), name_to_index: self.name_to_index.clone(), + node_epochs: self.node_epochs.clone(), + next_node_epoch: self.next_node_epoch, generation: self.generation, cache: RwLock::new(cache::DagCache::new()), } diff --git a/crates/dagron-core/src/lib.rs b/crates/dagron-core/src/lib.rs index d4274f7..d6089b6 100644 --- a/crates/dagron-core/src/lib.rs +++ b/crates/dagron-core/src/lib.rs @@ -16,5 +16,5 @@ pub use graph::serialization::{SerializableEdge, SerializableGraph, Serializable pub use graph::stats::GraphStats; pub use graph::transforms::MergeConflict; pub use graph::DAG; -pub use node::NodeId; +pub use node::{NodeId, NodeRef}; pub use types::{EdgeData, InternalGraph, InternalNodeIndex, NodeData}; diff --git a/crates/dagron-core/src/node.rs b/crates/dagron-core/src/node.rs index 95b71da..aba937c 100644 --- a/crates/dagron-core/src/node.rs +++ b/crates/dagron-core/src/node.rs @@ -1,11 +1,16 @@ use std::fmt; +use std::hash::{Hash, Hasher}; +use std::sync::Arc; -/// A unique identifier for a node in a DAG. +/// A snapshot identifier for a node in a DAG. /// -/// **Note:** The `index` field corresponds to the internal `petgraph` node index -/// and is only valid for the lifetime of that node in the graph. After a node is -/// removed, its index may be reused by a subsequently added node. Do not persist -/// or compare `index` values across graph mutations that involve removals. +/// `NodeId` is returned by enumeration methods (`nodes()`, `successors()`, …). +/// It carries the node's `name` plus its current `petgraph` index. The `index` +/// field is a *snapshot*: after a node is removed, its index may be reused by +/// a subsequently added node, so do not persist or compare `index` values +/// across graph mutations that involve removals. +/// +/// For a stable, persistent handle that survives mutations, use [`NodeRef`]. #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct NodeId { pub index: u32, @@ -17,3 +22,59 @@ impl fmt::Display for NodeId { write!(f, "{}", self.name) } } + +/// A stable, persistent handle to a node. +/// +/// `NodeRef` is returned by [`DAG::add_node`] and remains valid as long as the +/// node it points to has not been removed (or removed-and-readded with the +/// same name, which produces a fresh `epoch`). Use it anywhere a `&str` name +/// is accepted; resolution is O(1) and detects stale references. +/// +/// `NodeRef` clones cheaply: the name is reference-counted via `Arc`. +#[derive(Debug, Clone)] +pub struct NodeRef { + pub name: Arc, + pub epoch: u64, +} + +impl NodeRef { + /// Construct a `NodeRef` directly. Prefer obtaining one from + /// [`DAG::add_node`] or [`DAG::node_ref`]. + pub fn new(name: impl Into>, epoch: u64) -> Self { + NodeRef { + name: name.into(), + epoch, + } + } + + /// Borrow the name as a string slice. + pub fn name(&self) -> &str { + &self.name + } + + /// The creation epoch this ref was minted with. + pub fn epoch(&self) -> u64 { + self.epoch + } +} + +impl PartialEq for NodeRef { + fn eq(&self, other: &Self) -> bool { + self.epoch == other.epoch && self.name == other.name + } +} + +impl Eq for NodeRef {} + +impl Hash for NodeRef { + fn hash(&self, state: &mut H) { + self.name.hash(state); + self.epoch.hash(state); + } +} + +impl fmt::Display for NodeRef { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.name) + } +} diff --git a/crates/dagron-core/tests/construction.rs b/crates/dagron-core/tests/construction.rs index cd46dd7..483daba 100644 --- a/crates/dagron-core/tests/construction.rs +++ b/crates/dagron-core/tests/construction.rs @@ -14,10 +14,55 @@ fn diamond_dag() -> DAG { } #[test] -fn add_node_returns_node_id() { +fn add_node_returns_node_ref() { let mut dag = DAG::new(); - let id = dag.add_node("alpha".into(), ()).unwrap(); - assert_eq!(id.name, "alpha"); + let r = dag.add_node("alpha".into(), ()).unwrap(); + assert_eq!(r.name(), "alpha"); + // resolve the ref back to confirm it's valid + assert!(dag.resolve_ref(&r).is_ok()); +} + +#[test] +fn node_ref_survives_unrelated_mutations() { + let mut dag = DAG::new(); + let a = dag.add_node("a".into(), ()).unwrap(); + let _b = dag.add_node("b".into(), ()).unwrap(); + dag.add_edge("a", "b", None, None).unwrap(); + // unrelated mutations should NOT invalidate `a` + let _c = dag.add_node("c".into(), ()).unwrap(); + dag.remove_node("b").unwrap(); + assert!(dag.resolve_ref(&a).is_ok()); +} + +#[test] +fn node_ref_invalidated_when_node_removed() { + let mut dag = DAG::new(); + let a = dag.add_node("a".into(), ()).unwrap(); + dag.remove_node("a").unwrap(); + let err = dag.resolve_ref(&a).unwrap_err(); + assert!(matches!(err, DagronError::NodeNotFound(name) if name == "a")); +} + +#[test] +fn node_ref_invalidated_when_name_reused() { + let mut dag = DAG::new(); + let a1 = dag.add_node("a".into(), ()).unwrap(); + dag.remove_node("a").unwrap(); + let a2 = dag.add_node("a".into(), ()).unwrap(); + // a2 is fine + assert!(dag.resolve_ref(&a2).is_ok()); + // a1 is now stale (different epoch on the same name) + let err = dag.resolve_ref(&a1).unwrap_err(); + assert!(matches!(err, DagronError::StaleNodeRef(name) if name == "a")); +} + +#[test] +fn node_ref_lookup_via_name() { + let mut dag = DAG::new(); + let original = dag.add_node("foo".into(), ()).unwrap(); + let looked_up = dag.node_ref("foo").unwrap(); + assert_eq!(original, looked_up); + assert!(dag.node_ref("missing").is_none()); } #[test] diff --git a/crates/dagron-py/src/construction.rs b/crates/dagron-py/src/construction.rs index d0c8bc2..5d129f5 100644 --- a/crates/dagron-py/src/construction.rs +++ b/crates/dagron-py/src/construction.rs @@ -3,7 +3,8 @@ use pyo3::types::PyList; use crate::dag::PyDAG; use crate::errors; -use crate::node::PyNodeId; +use crate::node::PyNodeRef; +use crate::noderef::NodeArg; use crate::payload::PyNodePayload; #[pymethods] @@ -16,7 +17,8 @@ impl PyDAG { /// metadata: Optional metadata Python object. /// /// Returns: - /// NodeId for the newly created node. + /// A NodeRef for the newly created node. NodeRef is a stable handle + /// that can be passed to any method that accepts a node identifier. /// /// Raises: /// DuplicateNodeError: If a node with this name already exists. @@ -26,13 +28,19 @@ impl PyDAG { name: String, payload: Option>, metadata: Option>, - ) -> PyResult { + ) -> PyResult { let py_payload = PyNodePayload { payload, metadata }; - let node_id = self + let node_ref = self .inner .add_node(name, py_payload) .map_err(errors::into_pyerr)?; - Ok(node_id.into()) + Ok(node_ref.into()) + } + + /// Look up the current NodeRef for a given name, returning None if no + /// node with that name exists. + pub fn node_ref(&self, name: &str) -> Option { + self.inner.node_ref(name).map(PyNodeRef::from) } /// Add multiple nodes at once. More efficient than repeated add_node calls. @@ -41,11 +49,11 @@ impl PyDAG { /// nodes: List of node names (strings) or (name, payload) tuples or (name, payload, metadata) tuples. /// /// Returns: - /// List of NodeId objects. + /// List of NodeRef objects. /// /// Raises: /// DuplicateNodeError: If any node name already exists. - pub fn add_nodes(&mut self, nodes: &Bound<'_, PyList>) -> PyResult> { + pub fn add_nodes(&mut self, nodes: &Bound<'_, PyList>) -> PyResult> { let mut result = Vec::with_capacity(nodes.len()); for item in nodes.iter() { if let Ok(name) = item.extract::() { @@ -68,32 +76,30 @@ impl PyDAG { /// Add a directed edge from one node to another. /// /// Args: - /// from_node: Name of the source node. - /// to_node: Name of the target node. + /// from_node: Source node — accepts either a string name or a NodeRef. + /// to_node: Target node — accepts either a string name or a NodeRef. /// weight: Optional edge weight (default 1.0). /// label: Optional edge label. /// /// Raises: /// NodeNotFoundError: If either node doesn't exist. + /// StaleNodeRefError: If a NodeRef points to a removed/replaced node. /// CycleError: If the edge would create a cycle. #[pyo3(signature = (from_node, to_node, weight=None, label=None))] pub fn add_edge( &mut self, py: Python<'_>, - from_node: &str, - to_node: &str, + from_node: NodeArg, + to_node: NodeArg, weight: Option, label: Option, ) -> PyResult<()> { + let from = from_node.into_name(&self.inner)?; + let to = to_node.into_name(&self.inner)?; + // Resolve names first while we have &self - let from_idx = self - .inner - .resolve_name(from_node) - .map_err(errors::into_pyerr)?; - let to_idx = self - .inner - .resolve_name(to_node) - .map_err(errors::into_pyerr)?; + let from_idx = self.inner.resolve_name(&from).map_err(errors::into_pyerr)?; + let to_idx = self.inner.resolve_name(&to).map_err(errors::into_pyerr)?; // Check for cycle — release GIL for the graph traversal let graph_ref = self.inner.inner_graph(); @@ -109,8 +115,8 @@ impl PyDAG { return Err(errors::into_pyerr(dagron_core::DagronError::Cycle( format!( "Edge {} -> {} would create a cycle: {}", - from_node, - to_node, + from, + to, names.join(" -> ") ), ))); @@ -130,25 +136,26 @@ impl PyDAG { /// Add multiple edges at once. /// /// Args: - /// edges: List of (from, to) tuples, optionally with weight and label: - /// (from, to), (from, to, weight), or (from, to, weight, label). + /// edges: List of (from, to) tuples (either strings or NodeRefs), + /// optionally with weight and label. /// /// Raises: /// NodeNotFoundError: If any referenced node doesn't exist. /// CycleError: If any edge would create a cycle. pub fn add_edges(&mut self, py: Python<'_>, edges: &Bound<'_, PyList>) -> PyResult<()> { for item in edges.iter() { - if let Ok((from, to)) = item.extract::<(String, String)>() { - self.add_edge(py, &from, &to, None, None)?; - } else if let Ok((from, to, weight)) = item.extract::<(String, String, f64)>() { - self.add_edge(py, &from, &to, Some(weight), None)?; - } else if let Ok((from, to, weight, label)) = - item.extract::<(String, String, f64, String)>() + // Try the largest tuple first so we don't lose weight/label. + if let Ok((from, to, weight, label)) = item.extract::<(NodeArg, NodeArg, f64, String)>() { - self.add_edge(py, &from, &to, Some(weight), Some(label))?; + self.add_edge(py, from, to, Some(weight), Some(label))?; + } else if let Ok((from, to, weight)) = item.extract::<(NodeArg, NodeArg, f64)>() { + self.add_edge(py, from, to, Some(weight), None)?; + } else if let Ok((from, to)) = item.extract::<(NodeArg, NodeArg)>() { + self.add_edge(py, from, to, None, None)?; } else { return Err(pyo3::exceptions::PyTypeError::new_err( - "Each edge must be a (from, to), (from, to, weight), or (from, to, weight, label) tuple", + "Each edge must be a (from, to), (from, to, weight), or (from, to, weight, label) tuple. \ + `from` and `to` may be a str or NodeRef.", )); } } @@ -158,26 +165,29 @@ impl PyDAG { /// Remove a node and all its incident edges. /// /// Args: - /// name: Name of the node to remove. + /// node: Node to remove (str or NodeRef). /// /// Raises: /// NodeNotFoundError: If the node doesn't exist. - pub fn remove_node(&mut self, name: &str) -> PyResult<()> { - self.inner.remove_node(name).map_err(errors::into_pyerr) + pub fn remove_node(&mut self, node: NodeArg) -> PyResult<()> { + let name = node.into_name(&self.inner)?; + self.inner.remove_node(&name).map_err(errors::into_pyerr) } /// Remove an edge between two nodes. /// /// Args: - /// from_node: Name of the source node. - /// to_node: Name of the target node. + /// from_node: Source node (str or NodeRef). + /// to_node: Target node (str or NodeRef). /// /// Raises: /// NodeNotFoundError: If either node doesn't exist. /// EdgeNotFoundError: If no edge exists between the nodes. - pub fn remove_edge(&mut self, from_node: &str, to_node: &str) -> PyResult<()> { + pub fn remove_edge(&mut self, from_node: NodeArg, to_node: NodeArg) -> PyResult<()> { + let from = from_node.into_name(&self.inner)?; + let to = to_node.into_name(&self.inner)?; self.inner - .remove_edge(from_node, to_node) + .remove_edge(&from, &to) .map_err(errors::into_pyerr) } diff --git a/crates/dagron-py/src/errors.rs b/crates/dagron-py/src/errors.rs index 17b705c..2894a88 100644 --- a/crates/dagron-py/src/errors.rs +++ b/crates/dagron-py/src/errors.rs @@ -37,6 +37,12 @@ create_exception!( DagronError, "Raised for general graph operation errors." ); +create_exception!( + dagron, + StaleNodeRefError, + DagronError, + "Raised when a NodeRef refers to a node that has been removed or replaced." +); /// Convert a DagronError into a PyErr. pub fn into_pyerr(err: dagron_core::DagronError) -> pyo3::PyErr { @@ -47,6 +53,7 @@ pub fn into_pyerr(err: dagron_core::DagronError) -> pyo3::PyErr { dagron_core::DagronError::EdgeNotFound(from, to) => { EdgeNotFoundError::new_err(format!("{from} -> {to}")) } + dagron_core::DagronError::StaleNodeRef(msg) => StaleNodeRefError::new_err(msg), dagron_core::DagronError::Graph(msg) => GraphError::new_err(msg), } } diff --git a/crates/dagron-py/src/incremental.rs b/crates/dagron-py/src/incremental.rs index 94d2d52..d984e40 100644 --- a/crates/dagron-py/src/incremental.rs +++ b/crates/dagron-py/src/incremental.rs @@ -4,34 +4,43 @@ use pyo3::prelude::*; use crate::dag::PyDAG; use crate::errors; +use crate::noderef::NodeArg; #[pymethods] impl PyDAG { /// Compute the dirty set: changed nodes plus their transitive descendants. /// /// Args: - /// changed: List of node names that have changed. + /// changed: List of node identifiers (str or NodeRef) that have changed. /// /// Returns: /// List of node names that need recomputation. - pub fn dirty_set(&self, py: Python<'_>, changed: Vec) -> PyResult> { - let refs: Vec<&str> = changed.iter().map(|s| s.as_str()).collect(); + pub fn dirty_set(&self, py: Python<'_>, changed: Vec) -> PyResult> { + let names: Vec = changed + .into_iter() + .map(|n| n.into_name(&self.inner)) + .collect::>()?; + let refs: Vec<&str> = names.iter().map(|s| s.as_str()).collect(); py.allow_threads(|| self.inner.dirty_set(&refs).map_err(errors::into_pyerr)) } /// For each dirty node, determine which changed nodes are its ancestors. /// /// Args: - /// changed: List of node names that have changed. + /// changed: List of node identifiers (str or NodeRef) that have changed. /// /// Returns: /// Dict mapping dirty node name to list of changed ancestor names. pub fn change_provenance( &self, py: Python<'_>, - changed: Vec, + changed: Vec, ) -> PyResult>> { - let refs: Vec<&str> = changed.iter().map(|s| s.as_str()).collect(); + let names: Vec = changed + .into_iter() + .map(|n| n.into_name(&self.inner)) + .collect::>()?; + let refs: Vec<&str> = names.iter().map(|s| s.as_str()).collect(); py.allow_threads(|| { self.inner .change_provenance(&refs) diff --git a/crates/dagron-py/src/introspection.rs b/crates/dagron-py/src/introspection.rs index e434677..2a0ade6 100644 --- a/crates/dagron-py/src/introspection.rs +++ b/crates/dagron-py/src/introspection.rs @@ -4,22 +4,26 @@ use crate::dag::PyDAG; use crate::errors; use crate::iterators::PyNodeIterator; use crate::node::PyNodeId; +use crate::noderef::NodeArg; #[pymethods] impl PyDAG { - /// Check if a node with the given name exists. - pub fn has_node(&self, name: &str) -> bool { - self.inner.has_node(name) + /// Check if a node with the given name (or NodeRef) exists. + pub fn has_node(&self, node: NodeArg) -> bool { + match node { + NodeArg::Name(s) => self.inner.has_node(&s), + NodeArg::Ref(r) => self.inner.resolve_ref(&r).is_ok(), + } } - /// Check if an edge exists between two nodes. + /// Check if an edge exists between two nodes (str or NodeRef). /// /// Raises: /// NodeNotFoundError: If either node doesn't exist. - pub fn has_edge(&self, from_node: &str, to_node: &str) -> PyResult { - self.inner - .has_edge(from_node, to_node) - .map_err(errors::into_pyerr) + pub fn has_edge(&self, from_node: NodeArg, to_node: NodeArg) -> PyResult { + let from = from_node.into_name(&self.inner)?; + let to = to_node.into_name(&self.inner)?; + self.inner.has_edge(&from, &to).map_err(errors::into_pyerr) } /// Return the number of nodes in the graph. @@ -32,74 +36,81 @@ impl PyDAG { self.inner.edge_count() } - /// Get the payload associated with a node. + /// Get the payload associated with a node (str or NodeRef). /// /// Raises: /// NodeNotFoundError: If the node doesn't exist. - pub fn get_payload(&self, py: Python<'_>, name: &str) -> PyResult> { - let p = self.inner.get_payload(name).map_err(errors::into_pyerr)?; + pub fn get_payload(&self, py: Python<'_>, node: NodeArg) -> PyResult> { + let name = node.into_name(&self.inner)?; + let p = self.inner.get_payload(&name).map_err(errors::into_pyerr)?; Ok(p.payload.as_ref().map(|v| v.clone_ref(py))) } - /// Set the payload for a node. + /// Set the payload for a node (str or NodeRef). /// /// Raises: /// NodeNotFoundError: If the node doesn't exist. - pub fn set_payload(&mut self, name: &str, payload: Option>) -> PyResult<()> { + pub fn set_payload(&mut self, node: NodeArg, payload: Option>) -> PyResult<()> { + let name = node.into_name(&self.inner)?; let p = self .inner - .get_payload_mut(name) + .get_payload_mut(&name) .map_err(errors::into_pyerr)?; p.payload = payload; Ok(()) } - /// Get the metadata associated with a node. + /// Get the metadata associated with a node (str or NodeRef). /// /// Raises: /// NodeNotFoundError: If the node doesn't exist. - pub fn get_metadata(&self, py: Python<'_>, name: &str) -> PyResult> { - let p = self.inner.get_payload(name).map_err(errors::into_pyerr)?; + pub fn get_metadata(&self, py: Python<'_>, node: NodeArg) -> PyResult> { + let name = node.into_name(&self.inner)?; + let p = self.inner.get_payload(&name).map_err(errors::into_pyerr)?; Ok(p.metadata.as_ref().map(|v| v.clone_ref(py))) } - /// Set the metadata for a node. + /// Set the metadata for a node (str or NodeRef). /// /// Raises: /// NodeNotFoundError: If the node doesn't exist. - pub fn set_metadata(&mut self, name: &str, metadata: Option>) -> PyResult<()> { + pub fn set_metadata(&mut self, node: NodeArg, metadata: Option>) -> PyResult<()> { + let name = node.into_name(&self.inner)?; let p = self .inner - .get_payload_mut(name) + .get_payload_mut(&name) .map_err(errors::into_pyerr)?; p.metadata = metadata; Ok(()) } - /// Get the immediate predecessors (parents) of a node. + /// Get the immediate predecessors (parents) of a node (str or NodeRef). /// /// Raises: /// NodeNotFoundError: If the node doesn't exist. - pub fn predecessors(&self, name: &str) -> PyResult> { - let nodes = self.inner.predecessors(name).map_err(errors::into_pyerr)?; + pub fn predecessors(&self, node: NodeArg) -> PyResult> { + let name = node.into_name(&self.inner)?; + let nodes = self.inner.predecessors(&name).map_err(errors::into_pyerr)?; Ok(nodes.into_iter().map(PyNodeId::from).collect()) } - /// Get the immediate successors (children) of a node. + /// Get the immediate successors (children) of a node (str or NodeRef). /// /// Raises: /// NodeNotFoundError: If the node doesn't exist. - pub fn successors(&self, name: &str) -> PyResult> { - let nodes = self.inner.successors(name).map_err(errors::into_pyerr)?; + pub fn successors(&self, node: NodeArg) -> PyResult> { + let name = node.into_name(&self.inner)?; + let nodes = self.inner.successors(&name).map_err(errors::into_pyerr)?; Ok(nodes.into_iter().map(PyNodeId::from).collect()) } - /// Get all ancestors of a node (transitive predecessors). + /// Get all ancestors of a node (transitive predecessors). Accepts str or NodeRef. /// /// Raises: /// NodeNotFoundError: If the node doesn't exist. - pub fn ancestors(&self, py: Python<'_>, name: &str) -> PyResult> { - let idx = self.inner.resolve_name(name).map_err(errors::into_pyerr)?; + pub fn ancestors(&self, py: Python<'_>, node: NodeArg) -> PyResult> { + let name = node.into_name(&self.inner)?; + let idx = self.inner.resolve_name(&name).map_err(errors::into_pyerr)?; let graph_ref = self.inner.inner_graph(); let indices = py.allow_threads(|| dagron_core::algorithms::ancestors(graph_ref, idx)); Ok(indices @@ -111,12 +122,13 @@ impl PyDAG { .collect()) } - /// Get all descendants of a node (transitive successors). + /// Get all descendants of a node (transitive successors). Accepts str or NodeRef. /// /// Raises: /// NodeNotFoundError: If the node doesn't exist. - pub fn descendants(&self, py: Python<'_>, name: &str) -> PyResult> { - let idx = self.inner.resolve_name(name).map_err(errors::into_pyerr)?; + pub fn descendants(&self, py: Python<'_>, node: NodeArg) -> PyResult> { + let name = node.into_name(&self.inner)?; + let idx = self.inner.resolve_name(&name).map_err(errors::into_pyerr)?; let graph_ref = self.inner.inner_graph(); let indices = py.allow_threads(|| dagron_core::algorithms::descendants(graph_ref, idx)); Ok(indices @@ -128,20 +140,22 @@ impl PyDAG { .collect()) } - /// Get the in-degree (number of incoming edges) of a node. + /// Get the in-degree (number of incoming edges) of a node (str or NodeRef). /// /// Raises: /// NodeNotFoundError: If the node doesn't exist. - pub fn in_degree(&self, name: &str) -> PyResult { - self.inner.in_degree(name).map_err(errors::into_pyerr) + pub fn in_degree(&self, node: NodeArg) -> PyResult { + let name = node.into_name(&self.inner)?; + self.inner.in_degree(&name).map_err(errors::into_pyerr) } - /// Get the out-degree (number of outgoing edges) of a node. + /// Get the out-degree (number of outgoing edges) of a node (str or NodeRef). /// /// Raises: /// NodeNotFoundError: If the node doesn't exist. - pub fn out_degree(&self, name: &str) -> PyResult { - self.inner.out_degree(name).map_err(errors::into_pyerr) + pub fn out_degree(&self, node: NodeArg) -> PyResult { + let name = node.into_name(&self.inner)?; + self.inner.out_degree(&name).map_err(errors::into_pyerr) } /// Get all root nodes (nodes with no incoming edges). @@ -196,9 +210,10 @@ impl PyDAG { PyNodeIterator::new(items) } - /// Return a lazy iterator over ancestors of a node. - pub fn iter_ancestors(&self, py: Python<'_>, name: &str) -> PyResult { - let idx = self.inner.resolve_name(name).map_err(errors::into_pyerr)?; + /// Return a lazy iterator over ancestors of a node (str or NodeRef). + pub fn iter_ancestors(&self, py: Python<'_>, node: NodeArg) -> PyResult { + let name = node.into_name(&self.inner)?; + let idx = self.inner.resolve_name(&name).map_err(errors::into_pyerr)?; let graph_ref = self.inner.inner_graph(); let indices = py.allow_threads(|| dagron_core::algorithms::ancestors(graph_ref, idx)); let items: Vec<(u32, String)> = indices @@ -208,9 +223,10 @@ impl PyDAG { Ok(PyNodeIterator::new(items)) } - /// Return a lazy iterator over descendants of a node. - pub fn iter_descendants(&self, py: Python<'_>, name: &str) -> PyResult { - let idx = self.inner.resolve_name(name).map_err(errors::into_pyerr)?; + /// Return a lazy iterator over descendants of a node (str or NodeRef). + pub fn iter_descendants(&self, py: Python<'_>, node: NodeArg) -> PyResult { + let name = node.into_name(&self.inner)?; + let idx = self.inner.resolve_name(&name).map_err(errors::into_pyerr)?; let graph_ref = self.inner.inner_graph(); let indices = py.allow_threads(|| dagron_core::algorithms::descendants(graph_ref, idx)); let items: Vec<(u32, String)> = indices diff --git a/crates/dagron-py/src/lib.rs b/crates/dagron-py/src/lib.rs index 3ef2fe2..4ee8905 100644 --- a/crates/dagron-py/src/lib.rs +++ b/crates/dagron-py/src/lib.rs @@ -9,6 +9,7 @@ pub mod introspection; pub mod iterators; pub mod matching; pub mod node; +pub mod noderef; pub mod partition; pub mod paths; pub mod payload; @@ -29,6 +30,7 @@ use pyo3::prelude::*; fn dagron(m: &Bound<'_, PyModule>) -> PyResult<()> { m.add_class::()?; m.add_class::()?; + m.add_class::()?; m.add_class::()?; m.add_class::()?; m.add_class::()?; @@ -59,6 +61,10 @@ fn dagron(m: &Bound<'_, PyModule>) -> PyResult<()> { m.py().get_type::(), )?; m.add("GraphError", m.py().get_type::())?; + m.add( + "StaleNodeRefError", + m.py().get_type::(), + )?; Ok(()) } diff --git a/crates/dagron-py/src/node.rs b/crates/dagron-py/src/node.rs index bdcc3e0..4170fa4 100644 --- a/crates/dagron-py/src/node.rs +++ b/crates/dagron-py/src/node.rs @@ -1,5 +1,10 @@ use pyo3::prelude::*; +/// A snapshot identifier for a node, returned by enumeration methods +/// (`nodes()`, `successors()`, `roots()`, …). Carries the node's `name` +/// plus its current internal `index`. The `index` is a snapshot — after +/// a node is removed it may be reused. For a stable handle, use +/// [`PyNodeRef`]. #[pyclass(frozen, eq, hash, name = "NodeId")] #[derive(Clone, PartialEq, Eq, Hash)] pub struct PyNodeId { @@ -28,3 +33,48 @@ impl From for PyNodeId { } } } + +/// A stable, persistent handle to a node, returned by `add_node`. +/// +/// `NodeRef` survives unrelated mutations and is invalidated only when its +/// own node is removed (or removed-and-readded with the same name, which +/// produces a fresh handle with a different `epoch`). Pass it anywhere a +/// `str` name is accepted — both forms are interchangeable in dagron's API. +#[pyclass(frozen, eq, hash, name = "NodeRef")] +#[derive(Clone, PartialEq, Eq, Hash)] +pub struct PyNodeRef { + pub inner: dagron_core::NodeRef, +} + +#[pymethods] +impl PyNodeRef { + /// The node's name. + #[getter] + pub fn name(&self) -> &str { + self.inner.name() + } + + /// The creation epoch this ref was minted with. + #[getter] + pub fn epoch(&self) -> u64 { + self.inner.epoch() + } + + fn __repr__(&self) -> String { + format!( + "NodeRef(name={:?}, epoch={})", + self.inner.name(), + self.inner.epoch() + ) + } + + fn __str__(&self) -> &str { + self.inner.name() + } +} + +impl From for PyNodeRef { + fn from(r: dagron_core::NodeRef) -> Self { + PyNodeRef { inner: r } + } +} diff --git a/crates/dagron-py/src/noderef.rs b/crates/dagron-py/src/noderef.rs new file mode 100644 index 0000000..b96bfce --- /dev/null +++ b/crates/dagron-py/src/noderef.rs @@ -0,0 +1,55 @@ +//! Helper for accepting either `str` or `NodeRef` as a node identifier +//! at the public API boundary. + +use pyo3::prelude::*; +use pyo3::types::PyString; + +use crate::errors; +use crate::node::PyNodeRef; +use crate::payload::PyNodePayload; + +/// A node identifier passed in from Python — either a plain string name +/// or a [`PyNodeRef`]. Use [`NodeArg::into_name`] to resolve to a `&str`, +/// validating any embedded NodeRef against the DAG. +pub enum NodeArg { + Name(String), + Ref(dagron_core::NodeRef), +} + +impl<'py> FromPyObject<'py> for NodeArg { + fn extract_bound(ob: &Bound<'py, PyAny>) -> PyResult { + if let Ok(s) = ob.downcast::() { + return Ok(NodeArg::Name(s.to_string())); + } + if let Ok(r) = ob.extract::>() { + return Ok(NodeArg::Ref(r.inner.clone())); + } + Err(pyo3::exceptions::PyTypeError::new_err( + "expected str or NodeRef for node identifier", + )) + } +} + +impl NodeArg { + /// Resolve to an owned name. If the arg is a `NodeRef`, validates it + /// against the DAG first (so a stale ref errors instead of silently + /// resolving by name). + pub fn into_name(self, dag: &dagron_core::DAG) -> PyResult { + match self { + NodeArg::Name(s) => Ok(s), + NodeArg::Ref(r) => { + dag.resolve_ref(&r).map_err(errors::into_pyerr)?; + Ok(r.name.to_string()) + } + } + } + + /// Borrow the name without consuming. NodeRef variants are NOT validated + /// here — use `into_name` if validation is required. + pub fn name_str(&self) -> &str { + match self { + NodeArg::Name(s) => s.as_str(), + NodeArg::Ref(r) => r.name(), + } + } +} diff --git a/crates/dagron-py/src/paths.rs b/crates/dagron-py/src/paths.rs index ebc7ecf..4f0d563 100644 --- a/crates/dagron-py/src/paths.rs +++ b/crates/dagron-py/src/paths.rs @@ -6,14 +6,15 @@ use pyo3::prelude::*; use crate::dag::PyDAG; use crate::errors; use crate::node::PyNodeId; +use crate::noderef::NodeArg; #[pymethods] impl PyDAG { /// Find all directed paths from one node to another. /// /// Args: - /// from_node: Source node name. - /// to_node: Target node name. + /// from_node: Source node (str or NodeRef). + /// to_node: Target node (str or NodeRef). /// limit: Maximum number of paths to return (None = unlimited). /// /// Returns: @@ -25,13 +26,15 @@ impl PyDAG { pub fn all_paths( &self, py: Python<'_>, - from_node: String, - to_node: String, + from_node: NodeArg, + to_node: NodeArg, limit: Option, ) -> PyResult>> { + let from = from_node.into_name(&self.inner)?; + let to = to_node.into_name(&self.inner)?; let inner_ref = &self.inner; let paths = py - .allow_threads(|| inner_ref.all_paths(&from_node, &to_node, limit)) + .allow_threads(|| inner_ref.all_paths(&from, &to, limit)) .map_err(errors::into_pyerr)?; Ok(paths .into_iter() @@ -42,8 +45,8 @@ impl PyDAG { /// Find the shortest path (fewest edges) between two nodes. /// /// Args: - /// from_node: Source node name. - /// to_node: Target node name. + /// from_node: Source node (str or NodeRef). + /// to_node: Target node (str or NodeRef). /// /// Returns: /// List of NodeId representing the path, or None if unreachable. @@ -53,12 +56,14 @@ impl PyDAG { pub fn shortest_path( &self, py: Python<'_>, - from_node: String, - to_node: String, + from_node: NodeArg, + to_node: NodeArg, ) -> PyResult>> { + let from = from_node.into_name(&self.inner)?; + let to = to_node.into_name(&self.inner)?; let inner_ref = &self.inner; let result = py - .allow_threads(|| inner_ref.shortest_path(&from_node, &to_node)) + .allow_threads(|| inner_ref.shortest_path(&from, &to)) .map_err(errors::into_pyerr)?; Ok(result.map(|path| path.into_iter().map(PyNodeId::from).collect())) } @@ -66,8 +71,8 @@ impl PyDAG { /// Find the longest weighted path between two nodes. /// /// Args: - /// from_node: Source node name. - /// to_node: Target node name. + /// from_node: Source node (str or NodeRef). + /// to_node: Target node (str or NodeRef). /// costs: Optional dict mapping node names to costs (default 1.0). /// /// Returns: @@ -79,14 +84,16 @@ impl PyDAG { pub fn longest_path( &self, py: Python<'_>, - from_node: String, - to_node: String, + from_node: NodeArg, + to_node: NodeArg, costs: Option>, ) -> PyResult, f64)>> { + let from = from_node.into_name(&self.inner)?; + let to = to_node.into_name(&self.inner)?; let costs_map: AHashMap = costs.unwrap_or_default().into_iter().collect(); let inner_ref = &self.inner; let result = py - .allow_threads(|| inner_ref.longest_path(&from_node, &to_node, &costs_map)) + .allow_threads(|| inner_ref.longest_path(&from, &to, &costs_map)) .map_err(errors::into_pyerr)?; Ok(result.map(|(path, cost)| (path.into_iter().map(PyNodeId::from).collect(), cost))) } diff --git a/crates/dagron-py/src/reachability.rs b/crates/dagron-py/src/reachability.rs index 31e5942..fe06079 100644 --- a/crates/dagron-py/src/reachability.rs +++ b/crates/dagron-py/src/reachability.rs @@ -5,6 +5,7 @@ use dagron_core::types::InternalNodeIndex; use crate::dag::PyDAG; use crate::errors; +use crate::noderef::NodeArg; /// Precomputed reachability index for O(1) ancestor/descendant queries. /// @@ -24,26 +25,26 @@ impl PyReachabilityIndex { /// Check if `from_node` can reach `to_node` in O(1). /// /// Args: - /// from_node: Source node name. - /// to_node: Target node name. + /// from_node: Source node (str or NodeRef). + /// to_node: Target node (str or NodeRef). /// /// Returns: /// True if from_node can reach to_node. - pub fn can_reach(&self, from_node: &str, to_node: &str) -> PyResult { - let from_idx = self.resolve(from_node)?; - let to_idx = self.resolve(to_node)?; + pub fn can_reach(&self, from_node: NodeArg, to_node: NodeArg) -> PyResult { + let from_idx = self.resolve(from_node.name_str())?; + let to_idx = self.resolve(to_node.name_str())?; Ok(self.inner.can_reach(from_idx, to_idx)) } /// All nodes reachable from `node` (excluding self). /// /// Args: - /// node: Node name. + /// node: Node (str or NodeRef). /// /// Returns: /// List of reachable node names. - pub fn reachable_from(&self, node: &str) -> PyResult> { - let idx = self.resolve(node)?; + pub fn reachable_from(&self, node: NodeArg) -> PyResult> { + let idx = self.resolve(node.name_str())?; Ok(self .inner .reachable_from(idx) @@ -55,12 +56,12 @@ impl PyReachabilityIndex { /// All nodes that can reach `node` (excluding self). /// /// Args: - /// node: Node name. + /// node: Node (str or NodeRef). /// /// Returns: /// List of ancestor node names. - pub fn ancestors_of(&self, node: &str) -> PyResult> { - let idx = self.resolve(node)?; + pub fn ancestors_of(&self, node: NodeArg) -> PyResult> { + let idx = self.resolve(node.name_str())?; Ok(self .inner .ancestors_of(idx) @@ -128,11 +129,13 @@ impl PyDAG { pub fn is_ancestor( &self, py: Python<'_>, - ancestor: String, - descendant: String, + ancestor: NodeArg, + descendant: NodeArg, ) -> PyResult { + let a = ancestor.into_name(&self.inner)?; + let d = descendant.into_name(&self.inner)?; let inner_ref = &self.inner; - py.allow_threads(|| inner_ref.is_ancestor(&ancestor, &descendant)) + py.allow_threads(|| inner_ref.is_ancestor(&a, &d)) .map_err(errors::into_pyerr) } } diff --git a/crates/dagron-py/src/subgraph.rs b/crates/dagron-py/src/subgraph.rs index 055ad51..36b841c 100644 --- a/crates/dagron-py/src/subgraph.rs +++ b/crates/dagron-py/src/subgraph.rs @@ -2,6 +2,7 @@ use pyo3::prelude::*; use crate::dag::PyDAG; use crate::errors; +use crate::noderef::NodeArg; use crate::transforms::clone_edges; #[pymethods] @@ -18,11 +19,17 @@ impl PyDAG { /// /// Raises: /// NodeNotFoundError: If any node doesn't exist. - pub fn subgraph(&self, py: Python<'_>, nodes: Vec) -> PyResult { + pub fn subgraph(&self, py: Python<'_>, nodes: Vec) -> PyResult { let mut new_dag = dagron_core::DAG::new(); + // Resolve all node args to validated names first + let names: Vec = nodes + .into_iter() + .map(|n| n.into_name(&self.inner)) + .collect::>()?; + // Add matching nodes with cloned payloads - for name in &nodes { + for name in &names { if !self.inner.has_node(name) { return Err(errors::into_pyerr(dagron_core::DagronError::NodeNotFound( name.clone(), @@ -57,7 +64,7 @@ impl PyDAG { pub fn subgraph_by_depth( &self, py: Python<'_>, - root: String, + root: NodeArg, depth: usize, direction: &str, ) -> PyResult { @@ -73,7 +80,11 @@ impl PyDAG { }; // Resolve root and compute neighborhood - let root_idx = self.inner.resolve_name(&root).map_err(errors::into_pyerr)?; + let root_name = root.into_name(&self.inner)?; + let root_idx = self + .inner + .resolve_name(&root_name) + .map_err(errors::into_pyerr)?; let neighborhood = py.allow_threads(|| { dagron_core::algorithms::depth_neighborhood( self.inner.inner_graph(), diff --git a/crates/dagron-py/src/transforms.rs b/crates/dagron-py/src/transforms.rs index 8f37228..b800065 100644 --- a/crates/dagron-py/src/transforms.rs +++ b/crates/dagron-py/src/transforms.rs @@ -2,6 +2,7 @@ use pyo3::prelude::*; use crate::dag::PyDAG; use crate::errors; +use crate::noderef::NodeArg; use crate::payload::PyNodePayload; /// Clone a PyNodePayload, using the GIL to clone Py references. @@ -86,11 +87,15 @@ impl PyDAG { pub fn collapse( &self, py: Python<'_>, - nodes: Vec, + nodes: Vec, collapsed_name: String, payload: Option, ) -> PyResult { - let node_refs: Vec<&str> = nodes.iter().map(|s| s.as_str()).collect(); + let node_names: Vec = nodes + .into_iter() + .map(|n| n.into_name(&self.inner)) + .collect::>()?; + let node_refs: Vec<&str> = node_names.iter().map(|s| s.as_str()).collect(); let collapse_set: std::collections::HashSet<&str> = node_refs.iter().copied().collect(); // Validate all nodes exist @@ -168,8 +173,13 @@ impl PyDAG { /// /// Returns: /// A list of (node, immediate_dominator) tuples. - pub fn dominator_tree(&self, py: Python<'_>, root: String) -> PyResult> { - py.allow_threads(|| self.inner.dominator_tree(&root).map_err(errors::into_pyerr)) + pub fn dominator_tree(&self, py: Python<'_>, root: NodeArg) -> PyResult> { + let root_name = root.into_name(&self.inner)?; + py.allow_threads(|| { + self.inner + .dominator_tree(&root_name) + .map_err(errors::into_pyerr) + }) } /// Create an independent snapshot (deep clone) of this DAG. diff --git a/py_src/dagron/__init__.py b/py_src/dagron/__init__.py index bf08983..65e4971 100644 --- a/py_src/dagron/__init__.py +++ b/py_src/dagron/__init__.py @@ -15,8 +15,10 @@ NodeIterator, NodeLevelIterator, NodeNotFoundError, + NodeRef, ReachabilityIndex, ScheduledNode, + StaleNodeRefError, ) from dagron._patches import apply_patches as _apply_patches from dagron.analysis import ( @@ -48,6 +50,7 @@ validate_schema, ) from dagron.display import pretty_print +from dagron.effects import Effect, effects_of from dagron.execution import ( ApprovalGate, AsyncDAGExecutor, @@ -98,6 +101,7 @@ profile_execution, task, ) +from dagron.flow import Flow, FlowFuture, flow from dagron.integration import from_records from dagron.plugins import ( DagronPlugin, @@ -162,11 +166,14 @@ def __getattr__(name: str) -> object: "DynamicModification", "DynamicNodeSpec", "EdgeNotFoundError", + "Effect", "ExecutionCallbacks", "ExecutionPlan", "ExecutionResult", "ExecutionStep", "ExecutionTrace", + "Flow", + "FlowFuture", "GateController", "GateRejectedError", "GateStatus", @@ -195,6 +202,7 @@ def __getattr__(name: str) -> object: "NodeLevelIterator", "NodeNotFoundError", "NodeProfile", + "NodeRef", "NodeResult", "NodeStatus", "PartitionedDAGExecutor", @@ -211,6 +219,7 @@ def __getattr__(name: str) -> object: "ResourceTimeline", "ScheduledNode", "SchemaViolation", + "StaleNodeRefError", "TemplateError", "TemplateParam", "ThreadBackend", @@ -219,7 +228,9 @@ def __getattr__(name: str) -> object: "VersionedDAG", "WhatIfResult", "compose", + "effects_of", "extract_contracts", + "flow", "from_records", "pretty_print", "profile_execution", diff --git a/py_src/dagron/__init__.pyi b/py_src/dagron/__init__.pyi index 159a0a7..d812ba8 100644 --- a/py_src/dagron/__init__.pyi +++ b/py_src/dagron/__init__.pyi @@ -45,12 +45,18 @@ from dagron._internal import ( from dagron._internal import ( NodeNotFoundError as NodeNotFoundError, ) +from dagron._internal import ( + NodeRef as NodeRef, +) from dagron._internal import ( ReachabilityIndex as ReachabilityIndex, ) from dagron._internal import ( ScheduledNode as ScheduledNode, ) +from dagron._internal import ( + StaleNodeRefError as StaleNodeRefError, +) from dagron.analysis import ( DAGSchema as DAGSchema, ) @@ -116,6 +122,8 @@ from dagron.dataframe import ( from dagron.dataframe import ( validate_schema as validate_schema, ) +from dagron.effects import Effect as Effect +from dagron.effects import effects_of as effects_of from dagron.execution import ( AsyncDAGExecutor as AsyncDAGExecutor, ) @@ -200,6 +208,9 @@ from dagron.execution import ( from dagron.execution import ( task as task, ) +from dagron.flow import Flow as Flow +from dagron.flow import FlowFuture as FlowFuture +from dagron.flow import flow as flow from dagron.versioning import ( Mutation as Mutation, ) diff --git a/py_src/dagron/_internal.pyi b/py_src/dagron/_internal.pyi index 2ac8515..761b652 100644 --- a/py_src/dagron/_internal.pyi +++ b/py_src/dagron/_internal.pyi @@ -6,7 +6,11 @@ from typing import Any, Literal from dagron.builder import DAGBuilder class NodeId: - """An immutable identifier for a node in the DAG.""" + """Snapshot identifier for a node, returned by enumeration methods. + + Carries the node's `name` plus a snapshot internal `index`. The `index` + may be reused after node removal — for a stable handle, use `NodeRef`. + """ @property def index(self) -> int: ... @@ -17,6 +21,28 @@ class NodeId: def __eq__(self, other: object) -> bool: ... def __hash__(self) -> int: ... +class NodeRef: + """A stable, persistent handle to a node. + + Returned by `DAG.add_node`. Survives unrelated mutations and is invalidated + only when its own node is removed (or removed-and-readded with the same + name, which yields a fresh handle with a different `epoch`). Pass it + anywhere a `str` name is accepted. + """ + + @property + def name(self) -> str: ... + @property + def epoch(self) -> int: ... + def __repr__(self) -> str: ... + def __str__(self) -> str: ... + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + +# Public alias: anywhere dagron accepts a node identifier, you can pass either +# a string name or a NodeRef. +type NodeArg = str | NodeRef + class ScheduledNode: """A node with scheduling information.""" @@ -61,9 +87,9 @@ class ReachabilityIndex: Note: The index becomes stale if the DAG is mutated after building. """ - def can_reach(self, from_node: str, to_node: str) -> bool: ... - def reachable_from(self, node: str) -> list[str]: ... - def ancestors_of(self, node: str) -> list[str]: ... + def can_reach(self, from_node: NodeArg, to_node: NodeArg) -> bool: ... + def reachable_from(self, node: NodeArg) -> list[str]: ... + def ancestors_of(self, node: NodeArg) -> list[str]: ... def node_count(self) -> int: ... class GraphStats: @@ -151,42 +177,47 @@ class DAG: name: str, payload: Any = None, metadata: Any = None, - ) -> NodeId: ... + ) -> NodeRef: ... def add_nodes( self, nodes: Sequence[str | tuple[str, Any] | tuple[str, Any, Any]], - ) -> list[NodeId]: ... + ) -> list[NodeRef]: ... + def node_ref(self, name: str) -> NodeRef | None: ... def add_edge( self, - from_node: str, - to_node: str, + from_node: NodeArg, + to_node: NodeArg, weight: float | None = None, label: str | None = None, ) -> None: ... def add_edges( self, - edges: list[tuple[str, str] | tuple[str, str, float] | tuple[str, str, float, str]], + edges: list[ + tuple[NodeArg, NodeArg] + | tuple[NodeArg, NodeArg, float] + | tuple[NodeArg, NodeArg, float, str] + ], ) -> None: ... - def remove_node(self, name: str) -> None: ... - def remove_edge(self, from_node: str, to_node: str) -> None: ... + def remove_node(self, node: NodeArg) -> None: ... + def remove_edge(self, from_node: NodeArg, to_node: NodeArg) -> None: ... @staticmethod def builder() -> DAGBuilder: ... # --- Introspection --- - def has_node(self, name: str) -> bool: ... - def has_edge(self, from_node: str, to_node: str) -> bool: ... + def has_node(self, node: NodeArg) -> bool: ... + def has_edge(self, from_node: NodeArg, to_node: NodeArg) -> bool: ... def node_count(self) -> int: ... def edge_count(self) -> int: ... - def get_payload(self, name: str) -> Any: ... - def set_payload(self, name: str, payload: Any) -> None: ... - def get_metadata(self, name: str) -> Any: ... - def set_metadata(self, name: str, metadata: Any) -> None: ... - def predecessors(self, name: str) -> list[NodeId]: ... - def successors(self, name: str) -> list[NodeId]: ... - def ancestors(self, name: str) -> list[NodeId]: ... - def descendants(self, name: str) -> list[NodeId]: ... - def in_degree(self, name: str) -> int: ... - def out_degree(self, name: str) -> int: ... + def get_payload(self, node: NodeArg) -> Any: ... + def set_payload(self, node: NodeArg, payload: Any) -> None: ... + def get_metadata(self, node: NodeArg) -> Any: ... + def set_metadata(self, node: NodeArg, metadata: Any) -> None: ... + def predecessors(self, node: NodeArg) -> list[NodeId]: ... + def successors(self, node: NodeArg) -> list[NodeId]: ... + def ancestors(self, node: NodeArg) -> list[NodeId]: ... + def descendants(self, node: NodeArg) -> list[NodeId]: ... + def in_degree(self, node: NodeArg) -> int: ... + def out_degree(self, node: NodeArg) -> int: ... def roots(self) -> list[NodeId]: ... def leaves(self) -> list[NodeId]: ... def nodes(self) -> list[NodeId]: ... @@ -195,8 +226,8 @@ class DAG: def iter_nodes(self) -> NodeIterator: ... def iter_roots(self) -> NodeIterator: ... def iter_leaves(self) -> NodeIterator: ... - def iter_ancestors(self, name: str) -> NodeIterator: ... - def iter_descendants(self, name: str) -> NodeIterator: ... + def iter_ancestors(self, node: NodeArg) -> NodeIterator: ... + def iter_descendants(self, node: NodeArg) -> NodeIterator: ... # --- Validation --- def validate(self) -> bool: ... @@ -253,11 +284,11 @@ class DAG: def reverse(self) -> DAG: ... def collapse( self, - nodes: list[str], + nodes: Sequence[NodeArg], collapsed_name: str, payload: Any = None, ) -> DAG: ... - def dominator_tree(self, root: str) -> list[tuple[str, str]]: ... + def dominator_tree(self, root: NodeArg) -> list[tuple[str, str]]: ... def snapshot(self) -> DAG: ... def transitive_reduction(self) -> DAG: ... def transitive_closure(self) -> DAG: ... @@ -270,14 +301,14 @@ class DAG: ) -> DAG: ... # --- Incremental --- - def dirty_set(self, changed: list[str]) -> list[str]: ... - def change_provenance(self, changed: list[str]) -> dict[str, list[str]]: ... + def dirty_set(self, changed: Sequence[NodeArg]) -> list[str]: ... + def change_provenance(self, changed: Sequence[NodeArg]) -> dict[str, list[str]]: ... # --- Subgraph --- - def subgraph(self, nodes: list[str]) -> DAG: ... + def subgraph(self, nodes: Sequence[NodeArg]) -> DAG: ... def subgraph_by_depth( self, - root: str, + root: NodeArg, depth: int, direction: Literal["forward", "backward", "both"] = "both", ) -> DAG: ... @@ -285,21 +316,21 @@ class DAG: # --- Paths --- def all_paths( self, - from_node: str, - to_node: str, + from_node: NodeArg, + to_node: NodeArg, limit: int | None = None, ) -> list[list[NodeId]]: ... - def shortest_path(self, from_node: str, to_node: str) -> list[NodeId] | None: ... + def shortest_path(self, from_node: NodeArg, to_node: NodeArg) -> list[NodeId] | None: ... def longest_path( self, - from_node: str, - to_node: str, + from_node: NodeArg, + to_node: NodeArg, costs: dict[str, float] | None = None, ) -> tuple[list[NodeId], float] | None: ... # --- Reachability --- def build_reachability_index(self) -> ReachabilityIndex: ... - def is_ancestor(self, ancestor: str, descendant: str) -> bool: ... + def is_ancestor(self, ancestor: NodeArg, descendant: NodeArg) -> bool: ... # --- Matching --- def nodes_matching_regex(self, pattern: str) -> list[NodeId]: ... @@ -361,16 +392,16 @@ class DAG: # --- Analysis (monkey-patched) --- def explain( self, - node: str, + node: NodeArg, costs: dict[str, float] | None = None, ) -> Any: ... def what_if( self, *, - remove_nodes: list[str] | None = None, - remove_edges: list[tuple[str, str]] | None = None, - add_nodes: list[str] | None = None, - add_edges: list[tuple[str, str]] | None = None, + remove_nodes: Sequence[NodeArg] | None = None, + remove_edges: Sequence[tuple[NodeArg, NodeArg]] | None = None, + add_nodes: Sequence[str] | None = None, + add_edges: Sequence[tuple[NodeArg, NodeArg]] | None = None, costs: dict[str, float] | None = None, ) -> Any: ... def lint( @@ -412,3 +443,4 @@ class NodeNotFoundError(DagronError): ... class DuplicateNodeError(DagronError): ... class EdgeNotFoundError(DagronError): ... class GraphError(DagronError): ... +class StaleNodeRefError(DagronError): ... diff --git a/py_src/dagron/analysis/explain.py b/py_src/dagron/analysis/explain.py index 34bd120..bd0811f 100644 --- a/py_src/dagron/analysis/explain.py +++ b/py_src/dagron/analysis/explain.py @@ -5,10 +5,17 @@ from dataclasses import dataclass, field from typing import TYPE_CHECKING +from dagron._internal import NodeRef + if TYPE_CHECKING: from dagron._internal import DAG +def _name_of(node: str | NodeRef) -> str: + """Coerce either a string or a NodeRef into a node name.""" + return node.name if isinstance(node, NodeRef) else node + + @dataclass(frozen=True) class NodeExplanation: """Structured diagnostic for a single node in the DAG.""" @@ -80,18 +87,21 @@ def summary(self) -> str: return "\n".join(lines) -def explain(dag: DAG, node_name: str, costs: dict[str, float] | None = None) -> NodeExplanation: +def explain( + dag: DAG, node: str | NodeRef, costs: dict[str, float] | None = None +) -> NodeExplanation: """Generate a structured diagnostic for a node. Args: dag: The DAG to analyze. - node_name: Name of the node to explain. + node: The node to explain (str name or NodeRef). costs: Optional cost mapping for critical path analysis. Returns: NodeExplanation with depth, critical path membership, bottleneck score, dominator set, and dependency chains. """ + node_name = _name_of(node) # Depth: which topological level is this node on? levels = dag.topological_levels() depth_from_root = 0 @@ -170,19 +180,20 @@ def explain(dag: DAG, node_name: str, costs: dict[str, float] | None = None) -> def what_if( dag: DAG, *, - remove_nodes: list[str] | None = None, - remove_edges: list[tuple[str, str]] | None = None, + remove_nodes: list[str | NodeRef] | None = None, + remove_edges: list[tuple[str | NodeRef, str | NodeRef]] | None = None, add_nodes: list[str] | None = None, - add_edges: list[tuple[str, str]] | None = None, + add_edges: list[tuple[str | NodeRef, str | NodeRef]] | None = None, costs: dict[str, float] | None = None, ) -> WhatIfResult: """Analyze the effect of hypothetical mutations without modifying the DAG. Args: dag: The DAG to analyze. - remove_nodes: Nodes to hypothetically remove. + remove_nodes: Nodes to hypothetically remove (str or NodeRef). remove_edges: Edges to hypothetically remove. - add_nodes: Nodes to hypothetically add. + add_nodes: Nodes to hypothetically add (must be string names — these + are new nodes that don't exist yet). add_edges: Edges to hypothetically add. costs: Optional cost mapping for critical path analysis. @@ -191,6 +202,16 @@ def what_if( """ from dagron._internal import CycleError + # Normalize NodeRef inputs to plain string names so the rest of the + # function stays string-typed. + norm_remove_nodes: list[str] = [_name_of(n) for n in remove_nodes] if remove_nodes else [] + norm_remove_edges: list[tuple[str, str]] = ( + [(_name_of(f), _name_of(t)) for f, t in remove_edges] if remove_edges else [] + ) + norm_add_edges: list[tuple[str, str]] = ( + [(_name_of(f), _name_of(t)) for f, t in add_edges] if add_edges else [] + ) + # Get baseline stats original_stats = dag.stats() original_max_parallelism = original_stats.width @@ -199,36 +220,35 @@ def what_if( mutated = dag.snapshot() # Check for cycle creation from new edges before making any changes - if add_edges: - for from_node, to_node in add_edges: - # First ensure both nodes exist - from_exists = mutated.has_node(from_node) - to_exists = mutated.has_node(to_node) - - if not from_exists: - mutated.add_node(from_node) - if not to_exists: - mutated.add_node(to_node) - - # Try adding the edge, catch cycle error + for from_node, to_node in norm_add_edges: + # First ensure both nodes exist + from_exists = mutated.has_node(from_node) + to_exists = mutated.has_node(to_node) + + if not from_exists: + mutated.add_node(from_node) + if not to_exists: + mutated.add_node(to_node) + + # Try adding the edge, catch cycle error + try: + mutated.add_edge(from_node, to_node) + except CycleError: + # Find the cycle path + # The cycle goes: to_node -> ... -> from_node -> to_node try: - mutated.add_edge(from_node, to_node) - except CycleError: - # Find the cycle path - # The cycle goes: to_node -> ... -> from_node -> to_node - try: - path_nodes = mutated.shortest_path(to_node, from_node) - if path_nodes: - cycle_path = [n.name for n in path_nodes] + [to_node] - else: - cycle_path = [from_node, to_node, from_node] - except Exception: + path_nodes = mutated.shortest_path(to_node, from_node) + if path_nodes: + cycle_path = [n.name for n in path_nodes] + [to_node] + else: cycle_path = [from_node, to_node, from_node] + except Exception: + cycle_path = [from_node, to_node, from_node] - return WhatIfResult( - would_create_cycle=True, - cycle_path=cycle_path, - ) + return WhatIfResult( + would_create_cycle=True, + cycle_path=cycle_path, + ) # Add new nodes if add_nodes: @@ -237,16 +257,14 @@ def what_if( mutated.add_node(name) # Remove edges - if remove_edges: - for from_node, to_node in remove_edges: - if mutated.has_edge(from_node, to_node): - mutated.remove_edge(from_node, to_node) + for from_node, to_node in norm_remove_edges: + if mutated.has_edge(from_node, to_node): + mutated.remove_edge(from_node, to_node) # Remove nodes - if remove_nodes: - for name in remove_nodes: - if mutated.has_node(name): - mutated.remove_node(name) + for name in norm_remove_nodes: + if mutated.has_node(name): + mutated.remove_node(name) # Analyze the result new_stats = mutated.stats() @@ -254,9 +272,9 @@ def what_if( # Find orphaned nodes: nodes that were NOT roots in the original DAG # but became roots (no predecessors) in the mutated DAG. orphaned: list[str] = [] - if remove_nodes or remove_edges: + if norm_remove_nodes or norm_remove_edges: original_roots = {n.name for n in dag.roots()} - removed_set = set(remove_nodes) if remove_nodes else set() + removed_set = set(norm_remove_nodes) for node in mutated.topological_sort(): name = node.name if name in removed_set: diff --git a/py_src/dagron/analysis/lineage.py b/py_src/dagron/analysis/lineage.py index a7aa8cf..8ebd63a 100644 --- a/py_src/dagron/analysis/lineage.py +++ b/py_src/dagron/analysis/lineage.py @@ -5,11 +5,17 @@ from dataclasses import dataclass from typing import TYPE_CHECKING +from dagron._internal import NodeRef + if TYPE_CHECKING: from dagron._internal import DAG from dagron.execution._types import ExecutionResult +def _name_of(node: str | NodeRef) -> str: + return node.name if isinstance(node, NodeRef) else node + + @dataclass(frozen=True) class LineageRecord: """Lineage information for a single completed node. @@ -65,11 +71,11 @@ def _build_completed_set(self) -> set[str]: if nr.status == NodeStatus.COMPLETED } - def lineage(self, node: str) -> LineageRecord: + def lineage(self, node: str | NodeRef) -> LineageRecord: """Compute lineage for a single node. Args: - node: Name of the node to analyse. + node: The node to analyse (str name or NodeRef). Returns: A :class:`LineageRecord` containing the node's upstream @@ -78,14 +84,15 @@ def lineage(self, node: str) -> LineageRecord: Raises: KeyError: If the node is not in the DAG. """ - preds = [n.name for n in self._dag.predecessors(node)] + node_name = _name_of(node) + preds = [n.name for n in self._dag.predecessors(node_name)] direct_inputs = sorted(n for n in preds if n in self._completed) - ancestors = {n.name for n in self._dag.ancestors(node)} + ancestors = {n.name for n in self._dag.ancestors(node_name)} completed_ancestors = ancestors & self._completed upstream_chain = sorted(completed_ancestors) - depth = self._compute_depth(node, completed_ancestors) + depth = self._compute_depth(node_name, completed_ancestors) return LineageRecord( direct_inputs=direct_inputs, @@ -94,11 +101,11 @@ def lineage(self, node: str) -> LineageRecord: depth=depth, ) - def impact(self, node: str) -> ImpactRecord: + def impact(self, node: str | NodeRef) -> ImpactRecord: """Compute downstream impact of a single node. Args: - node: Name of the node to analyse. + node: The node to analyse (str name or NodeRef). Returns: An :class:`ImpactRecord` describing downstream completed nodes. @@ -106,10 +113,11 @@ def impact(self, node: str) -> ImpactRecord: Raises: KeyError: If the node is not in the DAG. """ - succs = [n.name for n in self._dag.successors(node)] + node_name = _name_of(node) + succs = [n.name for n in self._dag.successors(node_name)] directly_affects = sorted(n for n in succs if n in self._completed) - descs = {n.name for n in self._dag.descendants(node)} + descs = {n.name for n in self._dag.descendants(node_name)} completed_descs = descs & self._completed transitively_affects = sorted(completed_descs) @@ -122,7 +130,7 @@ def impact(self, node: str) -> ImpactRecord: affected_leaves=affected_leaves, ) - def data_flow_path(self, source: str, target: str) -> list[str] | None: + def data_flow_path(self, source: str | NodeRef, target: str | NodeRef) -> list[str] | None: """Find the shortest path where all intermediate nodes completed. Args: @@ -133,7 +141,9 @@ def data_flow_path(self, source: str, target: str) -> list[str] | None: List of node names forming the shortest completed path, or ``None`` if no such path exists. """ - all_paths_result = self._dag.all_paths(source, target) + source_name = _name_of(source) + target_name = _name_of(target) + all_paths_result = self._dag.all_paths(source_name, target_name) best: list[str] | None = None for path in all_paths_result: names = [n.name for n in path] diff --git a/py_src/dagron/builder.py b/py_src/dagron/builder.py index 8a485ba..8a717d5 100644 --- a/py_src/dagron/builder.py +++ b/py_src/dagron/builder.py @@ -4,12 +4,17 @@ from typing import TYPE_CHECKING -from dagron._internal import DAG +from dagron._internal import DAG, NodeRef if TYPE_CHECKING: from dagron.contracts import ContractViolation, NodeContract +def _name_of(arg: str | NodeRef) -> str: + """Extract a node's name from either a string or a NodeRef.""" + return arg.name if isinstance(arg, NodeRef) else arg + + class DAGBuilder: """Fluent builder for constructing DAGs. @@ -50,28 +55,28 @@ def add_node( def add_edge( self, - from_node: str, - to_node: str, + from_node: str | NodeRef, + to_node: str | NodeRef, weight: float | None = None, label: str | None = None, ) -> DAGBuilder: """Add an edge to the builder. Args: - from_node: Source node name. - to_node: Target node name. + from_node: Source node — accepts a string name or a NodeRef. + to_node: Target node — accepts a string name or a NodeRef. weight: Optional edge weight. label: Optional edge label. Returns: self for chaining. """ - self._edges.append((from_node, to_node, weight, label)) + self._edges.append((_name_of(from_node), _name_of(to_node), weight, label)) return self def contract( self, - node: str, + node: str | NodeRef, *, inputs: dict[str, type] | None = None, output: type = object, @@ -88,7 +93,7 @@ def contract( """ from dagron.contracts import NodeContract - self._contracts[node] = NodeContract( + self._contracts[_name_of(node)] = NodeContract( inputs=inputs or {}, output=output, ) diff --git a/py_src/dagron/builder.pyi b/py_src/dagron/builder.pyi index a922e6d..4aee440 100644 --- a/py_src/dagron/builder.pyi +++ b/py_src/dagron/builder.pyi @@ -2,7 +2,7 @@ from typing import Any -from dagron._internal import DAG +from dagron._internal import DAG, NodeRef from dagron.contracts import ContractViolation class DAGBuilder: @@ -17,14 +17,14 @@ class DAGBuilder: ) -> DAGBuilder: ... def add_edge( self, - from_node: str, - to_node: str, + from_node: str | NodeRef, + to_node: str | NodeRef, weight: float | None = None, label: str | None = None, ) -> DAGBuilder: ... def contract( self, - node: str, + node: str | NodeRef, *, inputs: dict[str, type] | None = None, output: type = ..., diff --git a/py_src/dagron/effects.py b/py_src/dagron/effects.py new file mode 100644 index 0000000..5a4309b --- /dev/null +++ b/py_src/dagron/effects.py @@ -0,0 +1,175 @@ +"""Effect tags for `@task`-decorated functions. + +Effect tags classify each task by what side effects (if any) it performs. +Phase 4 stores them as metadata on the `TaskSpec` and on each DAG node; +later phases use them to gate parallelism (Phase 4 executor flag), enable +content-addressed caching (Phase 6 — only PURE/READ are cacheable), drive +reactive recomputation (Phase 5 — only PURE auto-recomputes on input +change), and constrain replay (Phase 7 — NONDETERMINISTIC nodes can't be +replayed deterministically). + +Default for a `@task` is `Effect.PURE`; an AST-scan heuristic emits a +`UserWarning` at decoration time if a `PURE` task touches obviously-impure +builtins like `time.time()`, `random.*`, `os.*`, or `requests.*`. +""" + +from __future__ import annotations + +import ast +import inspect +import textwrap +import warnings +from enum import Enum +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from collections.abc import Callable + + +class Effect(Enum): + """Purity / side-effect classification for a `@dagron.task`. + + Ordered from most-pure to most-impure: + + * `PURE` — deterministic; no I/O, no clock/random/threading. Cacheable, + replayable, freely parallelizable. The default. + * `READ` — reads from a stable external source (file, DB) but is + idempotent over a snapshot. Cacheable when the source is pinned; + replayable. + * `WRITE` — writes to an external system. Not cacheable; replay is + idempotent only if the receiver is. + * `NETWORK` — performs network I/O. Output may vary; not cacheable; + not deterministically replayable. + * `NONDETERMINISTIC` — uses time, randomness, threading, or other + sources of non-determinism. Not cacheable, not replayable; serializes + under effect isolation. + """ + + PURE = "pure" + READ = "read" + WRITE = "write" + NETWORK = "network" + NONDETERMINISTIC = "nondeterministic" + + @property + def is_cacheable(self) -> bool: + """True if this effect class admits content-addressed caching.""" + return self in (Effect.PURE, Effect.READ) + + @property + def is_deterministic(self) -> bool: + """True if multiple invocations with the same inputs produce the same output.""" + return self in (Effect.PURE, Effect.READ) + + @property + def is_isolated(self) -> bool: + """True if instances of this effect class must serialize under effect isolation.""" + return self == Effect.NONDETERMINISTIC + + +# --------------------------------------------------------------------------- +# AST-scan heuristic: detect obviously-impure calls inside a PURE task body +# --------------------------------------------------------------------------- + + +# Module-attribute pairs whose presence in a PURE task body is suspicious. +# Format: (module_name, attribute_name) — None for the attribute means any access. +_IMPURE_CALLS: frozenset[tuple[str, str | None]] = frozenset( + { + ("time", "time"), + ("time", "monotonic"), + ("time", "perf_counter"), + ("time", "sleep"), + ("random", None), + ("os", None), + ("requests", None), + ("urllib", None), + ("httpx", None), + ("aiohttp", None), + ("socket", None), + ("uuid", "uuid1"), + ("uuid", "uuid4"), + ("threading", None), + ("multiprocessing", None), + } +) + + +def _scan_for_impure_calls(fn: Callable[..., object]) -> list[str]: + """Walk a function's AST and return a list of suspicious call names. + + Returns an empty list if the source can't be retrieved (built-in, + REPL-defined function, etc.) or if no impure calls are found. + """ + try: + source = inspect.getsource(fn) + except (OSError, TypeError): + return [] + + try: + tree = ast.parse(textwrap.dedent(source)) + except SyntaxError: + return [] + + findings: list[str] = [] + for node in ast.walk(tree): + # Pattern: `module.attr(...)` or `module.attr` access inside the function + if isinstance(node, ast.Attribute) and isinstance(node.value, ast.Name): + mod = node.value.id + attr = node.attr + if (mod, attr) in _IMPURE_CALLS or (mod, None) in _IMPURE_CALLS: + findings.append(f"{mod}.{attr}") + # Direct name access (e.g. `random` imported as `from random import *`) + # is harder to detect reliably without import tracking; skip for now. + + # Deduplicate while preserving order + seen: set[str] = set() + deduped: list[str] = [] + for f in findings: + if f not in seen: + seen.add(f) + deduped.append(f) + return deduped + + +def _warn_if_impure(fn: Callable[..., object], effect: Effect) -> None: + """Emit a UserWarning if a PURE-tagged function appears to be impure.""" + if effect != Effect.PURE: + return + findings = _scan_for_impure_calls(fn) + if not findings: + return + qualname = getattr(fn, "__qualname__", fn.__name__) + warnings.warn( + f"@task(effect=Effect.PURE) {qualname!r} appears to call impure " + f"functions: {', '.join(findings)}. If the call is intentional, " + f"either silence with @task(effect=Effect.NONDETERMINISTIC) (or a " + f"more specific tag) or refactor to inject the dependency. " + f"This is a heuristic — false positives are possible.", + UserWarning, + stacklevel=3, + ) + + +def effects_of(dag: object) -> dict[str, Effect]: + """Read every node's effect tag from a DAG built by `@dagron.flow`. + + Looks up each node's metadata for an `"effect"` string and converts + it back to an `Effect`. Nodes without an effect tag default to + `Effect.PURE` (which is also the default for `@task`). + + Args: + dag: A `dagron.DAG` instance (typed as `object` to avoid an import + cycle with `dagron._internal`). + """ + result: dict[str, Effect] = {} + for node in dag.nodes(): # type: ignore[attr-defined] + meta = dag.get_metadata(node.name) # type: ignore[attr-defined] + if isinstance(meta, dict) and isinstance(meta.get("effect"), str): + try: + result[node.name] = Effect(meta["effect"]) + continue + except ValueError: + pass + result[node.name] = Effect.PURE + return result diff --git a/py_src/dagron/execution/_helpers.py b/py_src/dagron/execution/_helpers.py index 0474dc4..cdbc3c0 100644 --- a/py_src/dagron/execution/_helpers.py +++ b/py_src/dagron/execution/_helpers.py @@ -16,7 +16,7 @@ def _run_sync_task( name: str, task_fn: Callable[[], Any], callbacks: ExecutionCallbacks, -) -> NodeResult: +) -> NodeResult[Any]: """Execute a synchronous task function with callbacks.""" from dagron.execution._types import NodeResult, NodeStatus @@ -57,7 +57,7 @@ def _record_skip( from dagron.execution._types import NodeResult, NodeStatus from dagron.execution.tracing import TraceEventType - nr = NodeResult(name=name, status=NodeStatus.SKIPPED) + nr: NodeResult[Any] = NodeResult(name=name, status=NodeStatus.SKIPPED) result.node_results[name] = nr result.skipped += 1 if callbacks.on_skip: diff --git a/py_src/dagron/execution/_types.py b/py_src/dagron/execution/_types.py index d33cf4d..83c2329 100644 --- a/py_src/dagron/execution/_types.py +++ b/py_src/dagron/execution/_types.py @@ -4,12 +4,19 @@ from dataclasses import dataclass, field from enum import Enum -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, TypeVar, overload if TYPE_CHECKING: from collections.abc import Callable + from dagron._internal import NodeRef from dagron.execution.tracing import ExecutionTrace + from dagron.flow import FlowFuture + + +# Type variable used for `__getitem__` overloads (PEP 695 generic syntax is +# used directly on `NodeResult` below, so this TypeVar is method-scoped). +T = TypeVar("T") class NodeStatus(Enum): @@ -26,8 +33,14 @@ class NodeStatus(Enum): @dataclass -class NodeResult: - """Result of executing a single node.""" +class NodeResult[T]: + """Result of executing a single node. + + Generic in the wrapped value type so typed lookups + (e.g. `result[my_flow_future]`) preserve the value's type at the + *class* level. The `result` field is typed `Any` for backwards + compat with code that subscripts it directly (`result["k"].result["x"]`). + """ name: str status: NodeStatus @@ -55,7 +68,7 @@ class ExecutionCallbacks: class ExecutionResult: """Aggregate result of executing an entire DAG.""" - node_results: dict[str, NodeResult] = field(default_factory=dict) + node_results: dict[str, NodeResult[Any]] = field(default_factory=dict) succeeded: int = 0 failed: int = 0 skipped: int = 0 @@ -63,3 +76,20 @@ class ExecutionResult: cancelled: int = 0 total_duration_seconds: float = 0.0 trace: ExecutionTrace | None = None + + @overload + def __getitem__(self, node: FlowFuture[T]) -> NodeResult[T]: ... + @overload + def __getitem__(self, node: str | NodeRef) -> NodeResult[Any]: ... + def __getitem__(self, node: str | NodeRef | FlowFuture[Any]) -> NodeResult[Any]: + """Look up a node's result by string name, NodeRef, or FlowFuture.""" + # Both NodeRef and FlowFuture expose `.name`. + key = node if isinstance(node, str) else node.name + return self.node_results[key] + + def __contains__(self, node: object) -> bool: + if isinstance(node, str): + return node in self.node_results + # NodeRef and FlowFuture both expose `.name`. + name = getattr(node, "name", None) + return name in self.node_results if isinstance(name, str) else False diff --git a/py_src/dagron/execution/cached_executor.py b/py_src/dagron/execution/cached_executor.py index 194392f..a0590b5 100644 --- a/py_src/dagron/execution/cached_executor.py +++ b/py_src/dagron/execution/cached_executor.py @@ -106,7 +106,7 @@ def get_ancestors(name: str) -> set[str]: for name in topo_order: # Skip if a dependency has failed if self._fail_fast and failed_nodes and get_ancestors(name) & failed_nodes: - nr = NodeResult(name=name, status=NodeStatus.SKIPPED) + nr: NodeResult[Any] = NodeResult(name=name, status=NodeStatus.SKIPPED) result.node_results[name] = nr result.skipped += 1 if self._callbacks.on_skip: diff --git a/py_src/dagron/execution/checkpoint.py b/py_src/dagron/execution/checkpoint.py index 1b664fc..7b04046 100644 --- a/py_src/dagron/execution/checkpoint.py +++ b/py_src/dagron/execution/checkpoint.py @@ -208,7 +208,7 @@ def _run( if resume_from and name in resume_from: saved = self._load_node_result(name) if saved and saved["status"] == "completed": - nr = NodeResult( + nr: NodeResult[Any] = NodeResult( name=name, status=NodeStatus.COMPLETED, result=saved["result"], diff --git a/py_src/dagron/execution/conditions.py b/py_src/dagron/execution/conditions.py index 786d288..d06ba99 100644 --- a/py_src/dagron/execution/conditions.py +++ b/py_src/dagron/execution/conditions.py @@ -161,7 +161,7 @@ def execute( if self._fail_fast and failed_nodes: ancestors = {n.name for n in self._dag.ancestors(name)} if ancestors & failed_nodes: - nr = NodeResult(name=name, status=NodeStatus.SKIPPED) + nr: NodeResult[Any] = NodeResult(name=name, status=NodeStatus.SKIPPED) result.node_results[name] = nr result.skipped += 1 skipped_nodes.add(name) diff --git a/py_src/dagron/execution/distributed_executor.py b/py_src/dagron/execution/distributed_executor.py index fa71e3f..0e59ea2 100644 --- a/py_src/dagron/execution/distributed_executor.py +++ b/py_src/dagron/execution/distributed_executor.py @@ -137,7 +137,7 @@ def get_ancestors(name: str) -> set[str]: try: value = self._backend.result(future, timeout=self._node_timeout) duration = time.monotonic() - t0 - nr = NodeResult( + nr: NodeResult[Any] = NodeResult( name=name, status=NodeStatus.COMPLETED, result=value, diff --git a/py_src/dagron/execution/dynamic.py b/py_src/dagron/execution/dynamic.py index 107e77f..16895fa 100644 --- a/py_src/dagron/execution/dynamic.py +++ b/py_src/dagron/execution/dynamic.py @@ -139,7 +139,7 @@ def get_ready_nodes() -> list[str]: for name in ready: # Skip if a dependency has failed if self._fail_fast and failed_nodes and get_ancestors(name) & failed_nodes: - nr = NodeResult(name=name, status=NodeStatus.SKIPPED) + nr: NodeResult[Any] = NodeResult(name=name, status=NodeStatus.SKIPPED) result.node_results[name] = nr result.skipped += 1 completed_nodes.add(name) diff --git a/py_src/dagron/execution/executor.py b/py_src/dagron/execution/executor.py index 87e388a..a740cad 100644 --- a/py_src/dagron/execution/executor.py +++ b/py_src/dagron/execution/executor.py @@ -9,11 +9,12 @@ if TYPE_CHECKING: import threading - from collections.abc import Awaitable, Callable + from collections.abc import Awaitable, Callable, Mapping from dagron._internal import DAG from dagron.plugins.hooks import HookRegistry +from dagron._internal import NodeRef from dagron.execution._helpers import _record_skip, _run_sync_task from dagron.execution._types import ( ExecutionCallbacks, @@ -23,6 +24,13 @@ ) +def _normalize_tasks( + tasks: Mapping[Any, Any], +) -> dict[str, Any]: + """Normalize a tasks dict that may use NodeRef keys to string-keyed.""" + return {(k.name if isinstance(k, NodeRef) else k): v for k, v in tasks.items()} + + def _fire_hook(hooks: HookRegistry | None, **kwargs: Any) -> None: """Fire a hook if the registry is available.""" if hooks is not None: @@ -43,6 +51,9 @@ class DAGExecutor: costs: Optional dict mapping node names to duration estimates. callbacks: Optional ExecutionCallbacks for lifecycle events. fail_fast: If True, skip downstream nodes when a dependency fails. + enforce_effect_isolation: If True, NONDETERMINISTIC tasks within a + step run sequentially (other effect classes still parallelize). + Reads each node's effect tag from `dagron.effects.effects_of`. """ def __init__( @@ -54,6 +65,7 @@ def __init__( fail_fast: bool = True, enable_tracing: bool = False, hooks: HookRegistry | None = None, + enforce_effect_isolation: bool = False, ): self._dag = dag self._max_workers = max_workers @@ -62,17 +74,31 @@ def __init__( self._fail_fast = fail_fast self._enable_tracing = enable_tracing self._hooks = hooks + self._enforce_effect_isolation = enforce_effect_isolation + # Cache effect tags per node so the executor doesn't re-read metadata + # on every step. + self._effects: dict[str, str] = {} + if enforce_effect_isolation: + from dagron.effects import effects_of + + self._effects = {n: e.value for n, e in effects_of(dag).items()} + + def _is_isolated(self, name: str) -> bool: + """True if `name`'s effect tag requires serial execution under isolation.""" + from dagron.effects import Effect + + return self._effects.get(name) == Effect.NONDETERMINISTIC.value def execute( self, - tasks: dict[str, Callable[[], Any]], + tasks: Mapping[Any, Callable[[], Any]], timeout: float | None = None, cancel_event: threading.Event | None = None, ) -> ExecutionResult: """Execute tasks according to the DAG's dependency order. Args: - tasks: Dict mapping node names to callables. + tasks: Dict mapping node identifiers (str or NodeRef) to callables. Each callable takes no arguments and returns a result. timeout: Optional per-node timeout in seconds. Nodes that exceed this duration get TIMED_OUT status. @@ -85,6 +111,8 @@ def execute( from dagron.execution.tracing import ExecutionTrace, TraceEventType from dagron.plugins.hooks import HookEvent + str_tasks: dict[str, Callable[[], Any]] = _normalize_tasks(tasks) + if self._max_workers is not None: plan = self._dag.execution_plan_constrained(self._max_workers, self._costs) else: @@ -109,6 +137,22 @@ def get_ancestors(name: str) -> set[str]: ancestors_cache[name] = anc return ancestors_cache[name] + # Lock ensuring at most one NONDETERMINISTIC task runs at a time + # under effect isolation. PURE/READ/WRITE/NETWORK tasks ignore it. + import threading as _threading + + isolation_lock = _threading.Lock() if self._enforce_effect_isolation else None + + def maybe_isolate(node_name: str, fn: Callable[[], Any]) -> Callable[[], Any]: + if isolation_lock is not None and self._is_isolated(node_name): + + def serialised() -> Any: + with isolation_lock: + return fn() + + return serialised + return fn + pool_workers = self._max_workers or plan.max_parallelism or 1 with ThreadPoolExecutor(max_workers=pool_workers) as pool: for step in plan.steps: @@ -120,8 +164,10 @@ def get_ancestors(name: str) -> set[str]: for scheduled_node in step.nodes: name = scheduled_node.node.name if name not in result.node_results: - nr = NodeResult(name=name, status=NodeStatus.CANCELLED) - result.node_results[name] = nr + cancel_nr: NodeResult[Any] = NodeResult( + name=name, status=NodeStatus.CANCELLED + ) + result.node_results[name] = cancel_nr result.cancelled += 1 if trace: trace.record(TraceEventType.NODE_CANCELLED, node_name=name) @@ -138,7 +184,7 @@ def get_ancestors(name: str) -> set[str]: _record_skip(name, result, self._callbacks, trace) continue - task_fn = tasks.get(name) + task_fn = str_tasks.get(name) if task_fn is None: _record_skip(name, result, self._callbacks, trace) continue @@ -148,11 +194,13 @@ def get_ancestors(name: str) -> set[str]: _fire_hook( self._hooks, event=HookEvent.PRE_NODE, dag=self._dag, node_name=name ) - futures[pool.submit(_run_sync_task, name, task_fn, self._callbacks)] = name + isolated_fn = maybe_isolate(name, task_fn) + futures[pool.submit(_run_sync_task, name, isolated_fn, self._callbacks)] = name # Wait for all futures in this step for future in futures: name = futures[future] + nr: NodeResult[Any] try: nr = future.result(timeout=timeout) except TimeoutError: @@ -262,15 +310,16 @@ def __init__( async def execute( self, - tasks: dict[str, Callable[[], Awaitable[Any]]], + tasks: Mapping[Any, Callable[[], Awaitable[Any]]], timeout: float | None = None, cancel_event: asyncio.Event | None = None, ) -> ExecutionResult: """Execute async tasks according to the DAG's dependency order. Args: - tasks: Dict mapping node names to async callables. - Each callable takes no arguments and returns an awaitable. + tasks: Dict mapping node identifiers (str or NodeRef) to async + callables. Each callable takes no arguments and returns an + awaitable. timeout: Optional per-node timeout in seconds. Nodes that exceed this duration get TIMED_OUT status. cancel_event: Optional asyncio.Event. When set, remaining @@ -282,6 +331,8 @@ async def execute( from dagron.execution.tracing import ExecutionTrace, TraceEventType from dagron.plugins.hooks import HookEvent + str_tasks: dict[str, Callable[[], Awaitable[Any]]] = _normalize_tasks(tasks) + if self._max_workers is not None: plan = self._dag.execution_plan_constrained(self._max_workers, self._costs) else: @@ -315,8 +366,10 @@ def get_ancestors(name: str) -> set[str]: for scheduled_node in step.nodes: name = scheduled_node.node.name if name not in result.node_results: - nr = NodeResult(name=name, status=NodeStatus.CANCELLED) - result.node_results[name] = nr + cancel_nr2: NodeResult[Any] = NodeResult( + name=name, status=NodeStatus.CANCELLED + ) + result.node_results[name] = cancel_nr2 result.cancelled += 1 if trace: trace.record(TraceEventType.NODE_CANCELLED, node_name=name) @@ -334,7 +387,7 @@ def get_ancestors(name: str) -> set[str]: _record_skip(name, result, self._callbacks, trace) continue - task_fn = tasks.get(name) + task_fn = str_tasks.get(name) if task_fn is None: _record_skip(name, result, self._callbacks, trace) continue @@ -419,7 +472,7 @@ async def _run_task( task_fn: Callable[[], Awaitable[Any]], semaphore: asyncio.Semaphore | None, timeout: float | None = None, - ) -> NodeResult: + ) -> NodeResult[Any]: if self._callbacks.on_start: self._callbacks.on_start(name) diff --git a/py_src/dagron/execution/incremental.py b/py_src/dagron/execution/incremental.py index fa28545..3fdd3b9 100644 --- a/py_src/dagron/execution/incremental.py +++ b/py_src/dagron/execution/incremental.py @@ -20,7 +20,7 @@ class IncrementalResult: """Result of an incremental DAG execution.""" - node_results: dict[str, NodeResult] = field(default_factory=dict) + node_results: dict[str, NodeResult[Any]] = field(default_factory=dict) recomputed: list[str] = field(default_factory=list) early_cutoff: list[str] = field(default_factory=list) reused: list[str] = field(default_factory=list) diff --git a/py_src/dagron/execution/pipeline.py b/py_src/dagron/execution/pipeline.py index 8b3aa01..830861b 100644 --- a/py_src/dagron/execution/pipeline.py +++ b/py_src/dagron/execution/pipeline.py @@ -2,80 +2,22 @@ from __future__ import annotations -import asyncio import inspect -from dataclasses import dataclass from typing import TYPE_CHECKING, Any +# Re-export the shared @task / TaskSpec / _get_spec from flow.py. +# The same @task decorator powers both Pipeline (param-name wiring) and +# @dagron.flow (call-structure wiring). +from dagron.flow import TaskSpec, _get_spec, task + if TYPE_CHECKING: from collections.abc import Callable - from dagron._internal import DAG + from dagron._internal import DAG, NodeRef from dagron.execution._types import ExecutionCallbacks, ExecutionResult -@dataclass(frozen=True) -class TaskSpec: - """Metadata for a decorated task function.""" - - name: str - fn: Callable[..., Any] - dependencies: list[str] - is_async: bool - - -def task[F: Callable[..., Any]](fn: F) -> F: - """Decorator that marks a function as a DAG task. - - Dependencies are inferred from the function's parameter names. - Each parameter name must match the name of another ``@task``-decorated - function whose return value will be passed as that argument. - - Parameters with defaults are treated as optional dependencies: - if no matching task exists, the default value is used. - - Example:: - - @dagron.task - def fetch_data() -> list[dict]: ... - - @dagron.task - def process(fetch_data: list[dict]) -> pd.DataFrame: ... - - pipeline = dagron.Pipeline([fetch_data, process]) - result = pipeline.execute() - - """ - sig = inspect.signature(fn) - deps = [] - for param_name, param in sig.parameters.items(): - # Skip *args, **kwargs - if param.kind in ( - inspect.Parameter.VAR_POSITIONAL, - inspect.Parameter.VAR_KEYWORD, - ): - continue - deps.append(param_name) - - spec = TaskSpec( - name=fn.__name__, - fn=fn, - dependencies=deps, - is_async=asyncio.iscoroutinefunction(fn), - ) - fn._dagron_task = spec # type: ignore[attr-defined] - return fn - - -def _get_spec(fn: Any) -> TaskSpec: - """Extract the TaskSpec from a decorated function.""" - spec: TaskSpec | None = getattr(fn, "_dagron_task", None) - if spec is None: - raise TypeError( - f"{fn!r} is not a @dagron.task-decorated function. " - "Use @dagron.task to mark functions as pipeline tasks." - ) - return spec +__all__ = ["Pipeline", "TaskSpec", "task"] class Pipeline: @@ -179,7 +121,7 @@ def validate_contracts( def _make_task_callables( self, overrides: dict[str, Any] | None = None - ) -> dict[str, Callable[[], Any]]: + ) -> dict[str | NodeRef, Callable[[], Any]]: """Build the task dict for executors, wiring outputs as inputs.""" results: dict[str, Any] = {} if overrides: @@ -219,7 +161,7 @@ def run() -> Any: def _make_async_task_callables( self, overrides: dict[str, Any] | None = None - ) -> dict[str, Callable[[], Any]]: + ) -> dict[str | NodeRef, Callable[[], Any]]: """Build the async task dict for AsyncDAGExecutor.""" results: dict[str, Any] = {} if overrides: diff --git a/py_src/dagron/execution/resources.py b/py_src/dagron/execution/resources.py index 8a0da6a..ed06bb2 100644 --- a/py_src/dagron/execution/resources.py +++ b/py_src/dagron/execution/resources.py @@ -308,7 +308,7 @@ def get_ancestors(name: str) -> set[str]: key=lambda n: -bottom_levels.get(n, 0.0), ) - active_futures: dict[Future[NodeResult], tuple[str, ResourceRequirements]] = {} + active_futures: dict[Future[NodeResult[Any]], tuple[str, ResourceRequirements]] = {} pool_workers = max(len(tasks), 1) with ThreadPoolExecutor(max_workers=pool_workers) as pool: @@ -523,7 +523,7 @@ def get_ancestors(name: str) -> set[str]: key=lambda n: -bottom_levels.get(n, 0.0), ) - active_tasks: dict[asyncio.Task[NodeResult], tuple[str, ResourceRequirements]] = {} + active_tasks: dict[asyncio.Task[NodeResult[Any]], tuple[str, ResourceRequirements]] = {} all_nodes = set(in_degree.keys()) while completed_nodes != all_nodes: @@ -533,7 +533,7 @@ def get_ancestors(name: str) -> set[str]: continue if self._fail_fast and failed_nodes and get_ancestors(name) & failed_nodes: - nr = NodeResult(name=name, status=NodeStatus.SKIPPED) + nr: NodeResult[Any] = NodeResult(name=name, status=NodeStatus.SKIPPED) result.node_results[name] = nr result.skipped += 1 completed_nodes.add(name) @@ -622,7 +622,7 @@ async def _run_task( self, name: str, task_fn: Callable[[], Awaitable[Any]], - ) -> NodeResult: + ) -> NodeResult[Any]: if self._callbacks.on_start: self._callbacks.on_start(name) t0 = time.monotonic() diff --git a/py_src/dagron/flow.py b/py_src/dagron/flow.py new file mode 100644 index 0000000..eefd9b3 --- /dev/null +++ b/py_src/dagron/flow.py @@ -0,0 +1,458 @@ +"""Pythonic compose API — `@dagron.flow` builds a DAG from Python call structure. + +Example:: + + @dagron.task + def fetch() -> list[dict]: ... + + @dagron.task + def transform(rows: list[dict]) -> pd.DataFrame: ... + + @dagron.flow + def pipeline(): + raw = fetch() + return transform(raw) + + result = pipeline() # ExecutionResult + df = result[transform].result + dag = pipeline.dag() # the underlying DAG, for analysis +""" + +from __future__ import annotations + +import asyncio +import functools +import inspect +from contextvars import ContextVar +from dataclasses import dataclass, field +from typing import TYPE_CHECKING, Any, overload + +from dagron._internal import DAG +from dagron.effects import Effect, _warn_if_impure + +if TYPE_CHECKING: + from collections.abc import Callable + + from dagron._internal import NodeRef + from dagron.execution._types import ExecutionCallbacks, ExecutionResult + + +# --------------------------------------------------------------------------- +# TaskSpec — metadata attached to every @task function +# --------------------------------------------------------------------------- + + +@dataclass(frozen=True) +class TaskSpec: + """Metadata attached to a `@dagron.task`-decorated function. + + Used by both the legacy `Pipeline` (parameter-name dependency inference) + and the `@dagron.flow` API (call-structure tracing). + """ + + name: str + fn: Callable[..., Any] + dependencies: list[str] + is_async: bool + effect: Effect = Effect.PURE + + +def _get_spec(fn: Any) -> TaskSpec: + """Extract the TaskSpec from a decorated function. Raises TypeError otherwise.""" + spec: TaskSpec | None = getattr(fn, "_dagron_task", None) + if spec is None: + raise TypeError( + f"{fn!r} is not a @dagron.task-decorated function. " + "Use @dagron.task to mark functions as pipeline tasks." + ) + return spec + + +# --------------------------------------------------------------------------- +# FlowFuture — placeholder returned from a @task call inside a @flow body +# --------------------------------------------------------------------------- + + +class FlowFuture[T]: + """Stand-in for a task's eventual return value during flow tracing. + + Returned from a `@dagron.task` call inside a `@dagron.flow` body. Pass + it as an argument to other task calls to wire dependencies. The type + parameter `T` carries the wrapped task's return type so downstream + annotations can be statically checked. + """ + + __slots__ = ("_name",) + + def __init__(self, name: str) -> None: + self._name = name + + @property + def name(self) -> str: + return self._name + + def __repr__(self) -> str: + return f"FlowFuture({self._name!r})" + + def __hash__(self) -> int: + return hash(self._name) + + def __eq__(self, other: object) -> bool: + return isinstance(other, FlowFuture) and self._name == other._name + + +# --------------------------------------------------------------------------- +# Tracing — recorded calls + active flow context +# --------------------------------------------------------------------------- + + +@dataclass +class _NodeCall: + """One recorded call to a `@task` function inside a `@flow` body.""" + + name: str + fn: Callable[..., Any] + args: tuple[Any, ...] + kwargs: dict[str, Any] + deps: list[str] # names of upstream FlowFutures referenced in args/kwargs + + +@dataclass +class _FlowTrace: + """Records calls made inside a single @flow invocation.""" + + calls: list[_NodeCall] = field(default_factory=list) + counter: dict[str, int] = field(default_factory=dict) + + def fresh_name(self, base: str) -> str: + n = self.counter.get(base, 0) + self.counter[base] = n + 1 + return base if n == 0 else f"{base}_{n}" + + def record( + self, fn: Callable[..., Any], args: tuple[Any, ...], kwargs: dict[str, Any] + ) -> FlowFuture[Any]: + deps: list[str] = [] + seen: set[str] = set() + + def collect(value: Any) -> None: + if isinstance(value, FlowFuture) and value.name not in seen: + seen.add(value.name) + deps.append(value.name) + + for a in args: + collect(a) + for v in kwargs.values(): + collect(v) + + name = self.fresh_name(fn.__name__) + self.calls.append(_NodeCall(name=name, fn=fn, args=args, kwargs=kwargs, deps=deps)) + return FlowFuture(name=name) + + +_current_flow: ContextVar[_FlowTrace | None] = ContextVar("_dagron_flow", default=None) + + +# --------------------------------------------------------------------------- +# @task decorator — flow-aware +# --------------------------------------------------------------------------- + + +def _wrap_task[**P, R](fn: Callable[P, R], effect: Effect) -> Callable[P, R]: + """Internal: wrap `fn` with flow-aware dispatch and attach a TaskSpec.""" + sig = inspect.signature(fn) + deps = [ + p_name + for p_name, p in sig.parameters.items() + if p.kind not in (inspect.Parameter.VAR_POSITIONAL, inspect.Parameter.VAR_KEYWORD) + ] + spec = TaskSpec( + name=fn.__name__, + fn=fn, + dependencies=deps, + is_async=asyncio.iscoroutinefunction(fn), + effect=effect, + ) + _warn_if_impure(fn, effect) + + @functools.wraps(fn) + def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: + trace = _current_flow.get() + if trace is not None: + # Pass the wrapper itself so consumers downstream of the trace + # (e.g. metadata mirroring) can read its `_dagron_task` spec. + # The cast is the deliberate "type lie": at trace time we hand + # back a FlowFuture, but the type system sees R. + return trace.record(wrapper, args, kwargs) # type: ignore[return-value] + return fn(*args, **kwargs) + + wrapper._dagron_task = spec # type: ignore[attr-defined] + return wrapper + + +@overload +def task[**P, R](fn: Callable[P, R], /) -> Callable[P, R]: ... +@overload +def task[**P, R](*, effect: Effect = ...) -> Callable[[Callable[P, R]], Callable[P, R]]: ... +def task( + fn: Callable[..., Any] | None = None, + /, + *, + effect: Effect = Effect.PURE, +) -> Any: + """Decorator that marks a function as a dagron task. + + Behavior: + + * **Outside** a `@dagron.flow` context, the decorated function executes + normally. + * **Inside** a `@dagron.flow` context, calling it records the call in + the flow trace and returns a `FlowFuture[R]` placeholder. The + function body is *not* executed during tracing. + + Typed as a passthrough: `Callable[P, R] -> Callable[P, R]`. The + runtime values inside a flow context are `FlowFuture[R]` — pass them + to other `@task` calls to wire dependencies. + + Functions decorated with this are also compatible with the legacy + `Pipeline` class (parameter-name dependency inference). + + Args: + fn: The function to decorate. Provided automatically when used as + a bare `@task` decorator. + effect: Side-effect classification of this task. Default + `Effect.PURE`. See `dagron.effects.Effect` for the ladder. + + Example:: + + @dagron.task + def fetch_data() -> list[dict]: ... # defaults to PURE + + @dagron.task(effect=Effect.NETWORK) + def fetch_user(uid: int) -> dict: ... + + @dagron.flow + def my_flow(): + raw = fetch_data() + return process(raw) + """ + if fn is not None: + # Bare @task usage: `@task\ndef foo(): ...` + return _wrap_task(fn, effect) + + # Parameterised: `@task(effect=...)\ndef foo(): ...` + def decorator(real_fn: Callable[..., Any]) -> Callable[..., Any]: + return _wrap_task(real_fn, effect) + + return decorator + + +# --------------------------------------------------------------------------- +# Building DAG + executor callables from a trace +# --------------------------------------------------------------------------- + + +def _build_dag(trace: _FlowTrace) -> DAG: + dag = DAG() + for call in trace.calls: + # Mirror the task's effect onto node metadata so downstream + # consumers (executor isolation, content cache, reactive engine, + # replay) can read it without re-introspecting the function. + spec = getattr(call.fn, "_dagron_task", None) + metadata = {"effect": spec.effect.value} if spec is not None else None + dag.add_node(call.name, metadata=metadata) + for call in trace.calls: + for dep in call.deps: + dag.add_edge(dep, call.name) + return dag + + +def _make_callables(trace: _FlowTrace, results: dict[str, Any]) -> dict[str, Callable[[], Any]]: + """Build executor callables that resolve FlowFuture args from `results`. + + `results` is shared mutable state; each callable writes its own result + on completion and reads upstream results on entry. + """ + + def make_callable(call: _NodeCall) -> Callable[[], Any]: + def run() -> Any: + resolved_args = tuple( + results[a._name] if isinstance(a, FlowFuture) else a for a in call.args + ) + resolved_kwargs = { + k: (results[v._name] if isinstance(v, FlowFuture) else v) + for k, v in call.kwargs.items() + } + value = call.fn(*resolved_args, **resolved_kwargs) + results[call.name] = value + return value + + return run + + return {call.name: make_callable(call) for call in trace.calls} + + +def _make_async_callables( + trace: _FlowTrace, results: dict[str, Any] +) -> dict[str, Callable[[], Any]]: + """Async variant of `_make_callables`.""" + + def make_callable(call: _NodeCall) -> Callable[[], Any]: + async def arun() -> Any: + resolved_args = tuple( + results[a._name] if isinstance(a, FlowFuture) else a for a in call.args + ) + resolved_kwargs = { + k: (results[v._name] if isinstance(v, FlowFuture) else v) + for k, v in call.kwargs.items() + } + value = call.fn(*resolved_args, **resolved_kwargs) + if asyncio.iscoroutine(value): + value = await value + results[call.name] = value + return value + + return arun + + return {call.name: make_callable(call) for call in trace.calls} + + +# --------------------------------------------------------------------------- +# Flow class & @flow decorator +# --------------------------------------------------------------------------- + + +class Flow: + """A flow built from a `@dagron.flow`-decorated function. + + Calling a `Flow` instance traces the function, builds the DAG, executes + it, and returns an `ExecutionResult`. For DAG-only inspection (without + execution), use `Flow.dag()`. + + Each call retraces the function body, so dynamic branching (`if`/`for` + on outer parameters) produces a fresh DAG per invocation. + """ + + def __init__(self, fn: Callable[..., Any]) -> None: + self._fn = fn + functools.update_wrapper(self, fn) + + def _trace(self, *args: Any, **kwargs: Any) -> tuple[_FlowTrace, FlowFuture[Any] | None]: + """Run the flow body in trace mode and return (trace, return_value).""" + if _current_flow.get() is not None: + raise RuntimeError( + "Nested @dagron.flow invocations are not supported. " + "If you need composition, build sub-flows separately." + ) + trace = _FlowTrace() + token = _current_flow.set(trace) + try: + ret = self._fn(*args, **kwargs) + finally: + _current_flow.reset(token) + if ret is not None and not isinstance(ret, FlowFuture): + raise TypeError( + f"@flow {self._fn.__name__!r} must return a FlowFuture or None. " + f"Got {type(ret).__name__}. Did you forget to call a " + "@dagron.task function, or call a non-task function inside the flow body?" + ) + return trace, ret + + def dag(self, *args: Any, **kwargs: Any) -> DAG: + """Trace the flow and return the resulting DAG without executing it.""" + trace, _ = self._trace(*args, **kwargs) + return _build_dag(trace) + + # Plan alias. + build = dag + + def run( + self, + *args: Any, + max_workers: int | None = None, + callbacks: ExecutionCallbacks | None = None, + fail_fast: bool = True, + enable_tracing: bool = False, + **kwargs: Any, + ) -> ExecutionResult: + """Trace, build, and execute the flow synchronously.""" + from dagron.execution.executor import DAGExecutor + + trace, _ = self._trace(*args, **kwargs) + dag = _build_dag(trace) + results: dict[str, Any] = {} + # Widen the dict's static key type so executor.execute (which accepts + # `Mapping[str | NodeRef, ...]`) type-checks. Runtime keys stay `str`. + tasks: dict[str | NodeRef, Callable[[], Any]] = { # noqa: C416 + k: v for k, v in _make_callables(trace, results).items() + } + # Sequential execution preserves the FlowFuture wiring contract: + # downstream tasks need their upstream's results in `results` before + # they run, which only happens after upstream completes. The Rust + # executor handles the topo order; max_workers=1 by default to avoid + # races on the shared `results` dict. + executor = DAGExecutor( + dag, + max_workers=max_workers, + callbacks=callbacks, + fail_fast=fail_fast, + enable_tracing=enable_tracing, + ) + return executor.execute(tasks) + + async def run_async( + self, + *args: Any, + max_workers: int | None = None, + callbacks: ExecutionCallbacks | None = None, + fail_fast: bool = True, + enable_tracing: bool = False, + **kwargs: Any, + ) -> ExecutionResult: + """Trace, build, and execute the flow asynchronously.""" + from dagron.execution.executor import AsyncDAGExecutor + + trace, _ = self._trace(*args, **kwargs) + dag = _build_dag(trace) + results: dict[str, Any] = {} + tasks: dict[str | NodeRef, Callable[[], Any]] = { # noqa: C416 + k: v for k, v in _make_async_callables(trace, results).items() + } + executor = AsyncDAGExecutor( + dag, + max_workers=max_workers or 1, + callbacks=callbacks, + fail_fast=fail_fast, + enable_tracing=enable_tracing, + ) + return await executor.execute(tasks) + + def __call__(self, *args: Any, **kwargs: Any) -> ExecutionResult: + """Calling the flow runs it and returns the ExecutionResult.""" + return self.run(*args, **kwargs) + + +def flow(fn: Callable[..., Any]) -> Flow: + """Decorator: build a DAG from the call structure of a Python function. + + Inside the decorated function, calls to `@dagron.task` functions are + recorded as nodes; passing one task's return value to another records + an edge. + + Example:: + + @dagron.task + def fetch() -> list: ... + + @dagron.task + def process(rows: list) -> dict: ... + + @dagron.flow + def pipeline(): + raw = fetch() + return process(raw) + + result = pipeline() # ExecutionResult + dag = pipeline.dag() # DAG for inspection + """ + return Flow(fn) diff --git a/py_src/dagron/stubgen.py b/py_src/dagron/stubgen.py new file mode 100644 index 0000000..f4ea0af --- /dev/null +++ b/py_src/dagron/stubgen.py @@ -0,0 +1,169 @@ +"""Generate `.pyi` type stubs that statically type a built DAG's results. + +After building a DAG, call `generate_stub(dag, type_hints={...})` to emit +a stub describing each node's return type. Saving the result alongside +your code lets `mypy` give precise types to `result["my_node"]` lookups. + +Example:: + + from dagron import DAG, flow, task + from dagron.stubgen import generate_stub + + @task + def fetch() -> list[int]: ... + + @task + def total(rows: list[int]) -> int: ... + + @flow + def pipeline(): + return total(fetch()) + + dag = pipeline.dag() + print(generate_stub(dag, name="MyResult")) + +emits:: + + from typing import overload, Literal + from dagron.execution import NodeResult + + class MyResult: + @overload + def __getitem__(self, key: Literal["fetch"]) -> NodeResult[list[int]]: ... + @overload + def __getitem__(self, key: Literal["total"]) -> NodeResult[int]: ... +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, get_args, get_origin, get_type_hints + +from dagron.flow import _get_spec + +if TYPE_CHECKING: + from collections.abc import Callable + + from dagron._internal import DAG + + +def _format_type(t: Any) -> str: + """Format a type annotation for inclusion in a .pyi file. + + Handles generic aliases (`list[int]`, `dict[str, int]`, `int | None`) + by recursively formatting the parameters. + """ + if t is type(None): + return "None" + + # Generic alias: list[int], dict[str, int], Union, etc. + origin = get_origin(t) + args = get_args(t) + if origin is not None and args: + import types + import typing + + # Special-case PEP 604 unions ("int | None") and typing.Union + if origin is typing.Union or origin is types.UnionType: + return " | ".join(_format_type(a) for a in args) + + origin_str = _format_type(origin) + args_str = ", ".join(_format_type(a) for a in args) + return f"{origin_str}[{args_str}]" + + if hasattr(t, "__module__") and hasattr(t, "__qualname__"): + qualname: str = t.__qualname__ + if t.__module__ == "builtins": + return qualname + return f"{t.__module__}.{qualname}" + return repr(t) + + +def _infer_type(fn: Callable[..., Any]) -> str | None: + """Extract a function's return type as a stringified annotation, or None.""" + try: + hints = get_type_hints(fn) + except (NameError, AttributeError): + return None + rt = hints.get("return") + if rt is None: + return None + return _format_type(rt) + + +def generate_stub( + dag: DAG, + *, + type_hints: dict[str, type | str] | None = None, + tasks: dict[str, Callable[..., Any]] | None = None, + name: str = "TypedExecutionResult", +) -> str: + """Generate a `.pyi` stub for typed lookup of DAG results. + + Args: + dag: The built DAG to introspect. + type_hints: Optional explicit mapping of node-name to result type + (or stringified annotation). Wins over inferred hints. + tasks: Optional mapping of node-name to the implementing function. + Used to infer return types when no explicit hint is provided. + If a node-name has a corresponding `@dagron.task`-decorated + function in scope, its annotation is picked up automatically. + name: Class name for the generated typed result wrapper. + + Returns: + A `.pyi` source string. + """ + type_hints = dict(type_hints or {}) + tasks = tasks or {} + + # Discover types from each node's task function (if any). + annotations: dict[str, str] = {} + for node in dag.nodes(): + n = node.name + if n in type_hints: + ann = type_hints[n] + annotations[n] = ann if isinstance(ann, str) else _format_type(ann) + continue + fn = tasks.get(n) + if fn is not None: + inferred = _infer_type(fn) + if inferred is not None: + annotations[n] = inferred + continue + # Fall back to the TaskSpec's wrapped function + try: + spec = _get_spec(fn) + inferred2 = _infer_type(spec.fn) + if inferred2 is not None: + annotations[n] = inferred2 + continue + except TypeError: + pass + annotations[n] = "Any" + + lines: list[str] = [ + '"""Generated by dagron.stubgen — do not edit by hand."""', + "", + "from typing import Any, Literal, overload", + "", + "from dagron.execution import NodeResult", + "", + "", + f"class {name}:", + ] + + for node_name, ret in sorted(annotations.items()): + lines.append(" @overload") + lines.append( + f" def __getitem__(self, key: Literal[{node_name!r}]) -> NodeResult[{ret}]: ..." + ) + + # Default fallback overload + lines.append(" @overload") + lines.append(" def __getitem__(self, key: str) -> NodeResult[Any]: ...") + + if not annotations: + # Empty class needs a body + lines.append(" pass") + + lines.append("") + return "\n".join(lines) diff --git a/tests/python/core/test_construction.py b/tests/python/core/test_construction.py index 765d554..9b47df7 100644 --- a/tests/python/core/test_construction.py +++ b/tests/python/core/test_construction.py @@ -4,15 +4,15 @@ CycleError, DuplicateNodeError, EdgeNotFoundError, - NodeId, NodeNotFoundError, + NodeRef, ) class TestAddNode: def test_add_single_node(self, empty_dag): node = empty_dag.add_node("a") - assert isinstance(node, NodeId) + assert isinstance(node, NodeRef) assert node.name == "a" assert empty_dag.node_count() == 1 @@ -35,7 +35,7 @@ def test_add_multiple_nodes(self, empty_dag): nodes = empty_dag.add_nodes(["a", "b", "c"]) assert len(nodes) == 3 assert empty_dag.node_count() == 3 - assert all(isinstance(n, NodeId) for n in nodes) + assert all(isinstance(n, NodeRef) for n in nodes) def test_add_nodes_with_payloads(self, empty_dag): nodes = empty_dag.add_nodes([("a", 1), ("b", 2)]) diff --git a/tests/python/test_effects.py b/tests/python/test_effects.py new file mode 100644 index 0000000..ce6ef37 --- /dev/null +++ b/tests/python/test_effects.py @@ -0,0 +1,350 @@ +"""Tests for Phase 4 — effect-typed nodes. + +Effect tags classify each `@task` by its purity / side-effect class. They +are queryable on the TaskSpec, mirrored onto DAG node metadata, and (when +opted in) gate parallelism in the executor: NONDETERMINISTIC tasks +serialize amongst themselves; PURE/READ/WRITE/NETWORK parallelize freely. +""" + +from __future__ import annotations + +import threading +import time +import warnings +from typing import TYPE_CHECKING + +from dagron import ( + DAG, + DAGExecutor, + Effect, + NodeRef, + effects_of, + flow, + task, +) + +if TYPE_CHECKING: + from collections.abc import Callable + from typing import Any + +# --------------------------------------------------------------------------- +# Effect enum properties +# --------------------------------------------------------------------------- + + +class TestEffectEnum: + def test_default_classes(self): + assert Effect.PURE.value == "pure" + assert Effect.NONDETERMINISTIC.value == "nondeterministic" + + def test_is_cacheable(self): + assert Effect.PURE.is_cacheable + assert Effect.READ.is_cacheable + assert not Effect.WRITE.is_cacheable + assert not Effect.NETWORK.is_cacheable + assert not Effect.NONDETERMINISTIC.is_cacheable + + def test_is_deterministic(self): + assert Effect.PURE.is_deterministic + assert Effect.READ.is_deterministic + assert not Effect.NONDETERMINISTIC.is_deterministic + + def test_is_isolated(self): + assert Effect.NONDETERMINISTIC.is_isolated + assert not Effect.PURE.is_isolated + + +# --------------------------------------------------------------------------- +# @task(effect=...) parameter +# --------------------------------------------------------------------------- + + +class TestTaskEffectParameter: + def test_bare_task_defaults_to_pure(self): + @task + def f() -> int: + return 1 + + assert f._dagron_task.effect == Effect.PURE # type: ignore[attr-defined] + + def test_explicit_effect(self): + @task(effect=Effect.NETWORK) + def fetch_url(url: str) -> str: + return url + + assert fetch_url._dagron_task.effect == Effect.NETWORK # type: ignore[attr-defined] + + def test_each_effect_class(self): + for e in Effect: + + @task(effect=e) + def f(_: Effect = e) -> int: # default arg captures e per closure + return 0 + + assert f._dagron_task.effect == e # type: ignore[attr-defined] + + def test_decorated_function_still_callable(self): + @task(effect=Effect.READ) + def add(a: int, b: int) -> int: + return a + b + + assert add(2, 3) == 5 + + +# --------------------------------------------------------------------------- +# AST scan heuristic +# --------------------------------------------------------------------------- + + +class TestAstScan: + def test_pure_task_with_time_call_warns(self): + with warnings.catch_warnings(record=True) as caught: + warnings.simplefilter("always") + + @task + def impure_pure() -> float: + return time.time() + + assert any( + "impure" in str(w.message) and "time.time" in str(w.message) for w in caught + ), [str(w.message) for w in caught] + + def test_pure_task_without_impure_calls_does_not_warn(self): + with warnings.catch_warnings(record=True) as caught: + warnings.simplefilter("always") + + @task + def clean(a: int, b: int) -> int: + return a + b + + assert not any("impure" in str(w.message) for w in caught), [ + str(w.message) for w in caught + ] + + def test_nondeterministic_task_does_not_warn_about_time(self): + with warnings.catch_warnings(record=True) as caught: + warnings.simplefilter("always") + + @task(effect=Effect.NONDETERMINISTIC) + def now() -> float: + return time.time() + + assert not any("impure" in str(w.message) for w in caught), [ + str(w.message) for w in caught + ] + + def test_pure_task_with_random_warns(self): + import random + + with warnings.catch_warnings(record=True) as caught: + warnings.simplefilter("always") + + @task + def roll_dice() -> int: + return random.randint(1, 6) + + assert any("impure" in str(w.message) for w in caught), [ + str(w.message) for w in caught + ] + + +# --------------------------------------------------------------------------- +# Effect → DAG metadata mirroring +# --------------------------------------------------------------------------- + + +class TestDagMetadataMirror: + def test_flow_writes_effect_to_metadata(self): + @task(effect=Effect.READ) + def read_db() -> list[int]: + return [1, 2, 3] + + @task(effect=Effect.NETWORK) + def push_to_api(rows: list[int]) -> str: + return f"sent {len(rows)} rows" + + @flow + def pipeline(): + return push_to_api(read_db()) + + dag = pipeline.dag() + meta_read = dag.get_metadata("read_db") + meta_push = dag.get_metadata("push_to_api") + assert meta_read == {"effect": "read"} + assert meta_push == {"effect": "network"} + + def test_effects_of_helper(self): + @task + def a() -> int: + return 1 + + @task(effect=Effect.WRITE) + def b(x: int) -> None: + return None + + @flow + def pipeline(): + return b(a()) + + eff = effects_of(pipeline.dag()) + assert eff == {"a": Effect.PURE, "b": Effect.WRITE} + + def test_effects_of_untagged_dag_defaults_to_pure(self): + d = DAG() + d.add_node("x") + assert effects_of(d) == {"x": Effect.PURE} + + +# --------------------------------------------------------------------------- +# Executor effect isolation +# --------------------------------------------------------------------------- + + +class TestExecutorIsolation: + def test_two_pure_nodes_run_in_parallel(self): + """When isolation is enforced, PURE tasks should still parallelize.""" + # Two parallel-ready pure nodes (no dependency between them). + # Suppress the AST-scan warning — the time.sleep here is just to + # make parallelism observable. + with warnings.catch_warnings(): + warnings.simplefilter("ignore", UserWarning) + + @task + def slow_pure_a() -> str: + time.sleep(0.05) + return "a" + + @task + def slow_pure_b() -> str: + time.sleep(0.05) + return "b" + + @flow + def pipeline(): + slow_pure_a() + return slow_pure_b() + + # Build dag and run with isolation + dag = pipeline.dag() + + def _sleep_a() -> str: + time.sleep(0.05) + return "a" + + def _sleep_b() -> str: + time.sleep(0.05) + return "b" + + tasks_dict: dict[str | NodeRef, Callable[[], Any]] = { + "slow_pure_a": _sleep_a, + "slow_pure_b": _sleep_b, + } + executor = DAGExecutor( + dag, + max_workers=2, + enforce_effect_isolation=True, + ) + t0 = time.monotonic() + result = executor.execute(tasks_dict) + elapsed = time.monotonic() - t0 + + assert result.succeeded == 2 + # Two 50ms sleeps, run in parallel, should finish in < 90ms. + # Generous bound to avoid CI flakiness. + assert elapsed < 0.15, f"PURE tasks did not parallelize ({elapsed:.3f}s)" + + def test_two_nondeterministic_nodes_serialize(self): + """Under isolation, two NONDETERMINISTIC tasks must NOT overlap.""" + # Track concurrent-execution count via a shared counter. + active = 0 + max_active = 0 + lock = threading.Lock() + + def make_nd_fn(): + def fn(): + nonlocal active, max_active + with lock: + active += 1 + max_active = max(max_active, active) + time.sleep(0.05) + with lock: + active -= 1 + return active + + return fn + + @task(effect=Effect.NONDETERMINISTIC) + def nd_a(): + return None + + @task(effect=Effect.NONDETERMINISTIC) + def nd_b(): + return None + + @flow + def pipeline(): + nd_a() + return nd_b() + + dag = pipeline.dag() + nd_tasks: dict[str | NodeRef, Callable[[], Any]] = { + "nd_a": make_nd_fn(), + "nd_b": make_nd_fn(), + } + executor = DAGExecutor( + dag, + max_workers=2, + enforce_effect_isolation=True, + ) + result = executor.execute(nd_tasks) + + assert result.succeeded == 2 + assert max_active == 1, ( + f"NONDETERMINISTIC tasks ran concurrently (max_active={max_active}); " + "isolation lock failed." + ) + + def test_isolation_off_lets_nondeterministic_overlap(self): + """Without isolation, NONDETERMINISTIC tasks can overlap (no enforcement).""" + active = 0 + max_active = 0 + lock = threading.Lock() + + def make_fn(): + def fn(): + nonlocal active, max_active + with lock: + active += 1 + max_active = max(max_active, active) + time.sleep(0.05) + with lock: + active -= 1 + return None + + return fn + + @task(effect=Effect.NONDETERMINISTIC) + def x(): + return None + + @task(effect=Effect.NONDETERMINISTIC) + def y(): + return None + + @flow + def pipeline(): + x() + return y() + + dag = pipeline.dag() + executor = DAGExecutor( + dag, + max_workers=2, + enforce_effect_isolation=False, # opt out + ) + loose_tasks: dict[str | NodeRef, Callable[[], Any]] = { + "x": make_fn(), + "y": make_fn(), + } + executor.execute(loose_tasks) + assert max_active == 2, "without isolation, ND tasks should overlap" diff --git a/tests/python/test_flow.py b/tests/python/test_flow.py new file mode 100644 index 0000000..21dabe7 --- /dev/null +++ b/tests/python/test_flow.py @@ -0,0 +1,351 @@ +"""Tests for the @dagron.flow Pythonic compose API. + +`@flow` lets users describe a DAG by writing a regular Python function +that calls `@task`-decorated functions. The call structure becomes the +edges; no string IDs required. +""" + +from __future__ import annotations + +import pytest + +from dagron import ( + DAG, + Flow, + FlowFuture, + Pipeline, + flow, + task, +) + +# --------------------------------------------------------------------------- +# Tracing — single call returns a FlowFuture +# --------------------------------------------------------------------------- + + +class TestTracing: + def test_task_outside_flow_executes_normally(self): + @task + def double(x: int) -> int: + return x * 2 + + # Calling outside a @flow context runs the function for real. + assert double(5) == 10 + + def test_task_inside_flow_returns_future(self): + @task + def double(x: int) -> int: + return x * 2 + + captured: list[object] = [] + + @flow + def f(): + value = double(5) + captured.append(value) + return value + + result = f.dag() + assert isinstance(result, DAG) + assert len(captured) == 1 + assert isinstance(captured[0], FlowFuture) + assert captured[0].name == "double" + + def test_flow_decorator_returns_flow_object(self): + @flow + def f(): + return None + + assert isinstance(f, Flow) + + +# --------------------------------------------------------------------------- +# DAG construction from call structure +# --------------------------------------------------------------------------- + + +class TestDagBuilding: + def test_linear(self): + @task + def a(): + return 1 + + @task + def b(x): + return x + 1 + + @task + def c(x): + return x * 10 + + @flow + def pipeline(): + return c(b(a())) + + dag = pipeline.dag() + assert dag.node_count() == 3 + assert dag.edges() == [("a", "b"), ("b", "c")] + + def test_diamond(self): + @task + def src(): + return 1 + + @task + def left(x): + return x + 1 + + @task + def right(x): + return x * 2 + + @task + def merge(left_v, right_v): + return left_v + right_v + + @flow + def pipeline(): + s = src() + return merge(left(s), right(s)) + + dag = pipeline.dag() + assert dag.node_count() == 4 + edges = sorted(dag.edges()) + assert edges == [ + ("left", "merge"), + ("right", "merge"), + ("src", "left"), + ("src", "right"), + ] + + def test_repeated_task_gets_unique_names(self): + @task + def fetch(url): + return url + + @flow + def pipeline(): + a = fetch("a") + b = fetch("b") + c = fetch("c") + return [a, b, c] # Returns a list — flow expects FlowFuture or None + + # Returning a list should fail + with pytest.raises(TypeError, match="must return a FlowFuture"): + pipeline.dag() + + def test_repeated_task_each_gets_own_node(self): + @task + def fetch(url): + return url + + @task + def join(*xs): + return list(xs) + + @flow + def pipeline(): + return join(fetch("a"), fetch("b"), fetch("c")) + + dag = pipeline.dag() + names = sorted(n.name for n in dag.nodes()) + assert names == ["fetch", "fetch_1", "fetch_2", "join"] + + def test_literal_args_are_not_dependencies(self): + @task + def add(x, y): + return x + y + + @flow + def pipeline(): + return add(40, 2) + + dag = pipeline.dag() + assert dag.node_count() == 1 + assert dag.edges() == [] + + def test_kwargs_wired_correctly(self): + @task + def src(): + return 7 + + @task + def use(*, value): + return value + + @flow + def pipeline(): + return use(value=src()) + + dag = pipeline.dag() + assert dag.edges() == [("src", "use")] + + +# --------------------------------------------------------------------------- +# Execution +# --------------------------------------------------------------------------- + + +class TestExecution: + def test_run_returns_execution_result(self): + @task + def src(): + return 5 + + @task + def double(x): + return x * 2 + + @flow + def pipeline(): + return double(src()) + + result = pipeline.run() + assert result.succeeded == 2 + assert result["src"].result == 5 + assert result["double"].result == 10 + + def test_calling_flow_runs_it(self): + @task + def hello(): + return "hi" + + @flow + def pipeline(): + return hello() + + # Calling the Flow object is shorthand for run() + result = pipeline() + assert result["hello"].result == "hi" + + def test_diamond_runs_in_correct_order(self): + order: list[str] = [] + + @task + def s(): + order.append("s") + return 1 + + @task + def left(x): + order.append("left") + return x + 10 + + @task + def right(x): + order.append("right") + return x + 100 + + @task + def m(a, b): + order.append("m") + return a + b + + @flow + def pipeline(): + sv = s() + return m(left(sv), right(sv)) + + result = pipeline.run() + assert result["m"].result == 1 + 10 + 1 + 100 # 112 + # s must come before left/right; left/right before m. + assert order[0] == "s" + assert order[-1] == "m" + middle = order[1:-1] + assert "left" in middle + assert "right" in middle + + def test_kwargs_resolved_at_execution(self): + @task + def src(): + return 7 + + @task + def use(*, value): + return value * 3 + + @flow + def pipeline(): + return use(value=src()) + + result = pipeline.run() + assert result["use"].result == 21 + + +# --------------------------------------------------------------------------- +# Error cases +# --------------------------------------------------------------------------- + + +class TestErrors: + def test_returning_non_flow_future_raises(self): + @task + def t(): + return 42 + + @flow + def bad(): + return "not a future" + + with pytest.raises(TypeError, match="must return a FlowFuture"): + bad.dag() + + def test_nested_flow_invocation_raises(self): + @task + def t(): + return 1 + + @flow + def inner(): + return t() + + @flow + def outer(): + inner.dag() # nested invocation + return t() + + with pytest.raises(RuntimeError, match=r"Nested @dagron\.flow"): + outer.dag() + + def test_task_can_be_called_directly_outside_flow(self): + # Sanity check: @task decoration doesn't break direct callability. + @task + def add(x, y): + return x + y + + assert add(2, 3) == 5 + + +# --------------------------------------------------------------------------- +# Pipeline backwards compatibility — same @task works in both +# --------------------------------------------------------------------------- + + +class TestPipelineCompat: + def test_same_task_works_in_pipeline_and_flow(self): + @task + def fetch_users(): + return [{"id": 1}, {"id": 2}] + + @task + def fetch_orders(): + return [{"oid": 1}] + + @task + def merge(fetch_users, fetch_orders): + return {"users": fetch_users, "orders": fetch_orders} + + # Pipeline (param-name wiring) + p_result = Pipeline([fetch_users, fetch_orders, merge]).execute() + assert p_result["merge"].result == { + "users": [{"id": 1}, {"id": 2}], + "orders": [{"oid": 1}], + } + + # @flow (call-structure wiring) — same task functions + @flow + def pipeline(): + return merge(fetch_users(), fetch_orders()) + + f_result = pipeline.run() + assert f_result["merge"].result == p_result["merge"].result diff --git a/tests/python/test_node_ref.py b/tests/python/test_node_ref.py new file mode 100644 index 0000000..8a029f0 --- /dev/null +++ b/tests/python/test_node_ref.py @@ -0,0 +1,318 @@ +"""Tests for the typed NodeRef handle. + +NodeRef is dagron's stable, persistent handle to a node returned by +`add_node`. Every public method that accepts a `str` node name should also +accept a `NodeRef`. This file exercises the cross-cutting NodeRef behavior +that the older string-based test suite doesn't cover. +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +import pytest + +from dagron import ( + DAG, + DAGBuilder, + DAGExecutor, + NodeNotFoundError, + NodeRef, + StaleNodeRefError, +) + +if TYPE_CHECKING: + from collections.abc import Callable + from typing import Any + +# --------------------------------------------------------------------------- +# Construction / identity +# --------------------------------------------------------------------------- + + +class TestAddNodeReturnsRef: + def test_returns_node_ref(self): + dag = DAG() + ref = dag.add_node("alpha") + assert isinstance(ref, NodeRef) + assert ref.name == "alpha" + assert isinstance(ref.epoch, int) + + def test_each_node_has_distinct_epoch(self): + dag = DAG() + a = dag.add_node("a") + b = dag.add_node("b") + assert a.epoch != b.epoch + + def test_add_nodes_returns_refs(self): + dag = DAG() + refs = dag.add_nodes(["a", "b", "c"]) + assert all(isinstance(r, NodeRef) for r in refs) + assert [r.name for r in refs] == ["a", "b", "c"] + + def test_node_ref_lookup(self): + dag = DAG() + original = dag.add_node("foo") + looked_up = dag.node_ref("foo") + assert looked_up == original + assert dag.node_ref("missing") is None + + def test_node_ref_equality_and_hash(self): + dag = DAG() + a = dag.add_node("a") + same = dag.node_ref("a") + assert a == same + assert hash(a) == hash(same) + # NodeRef should work as a dict key + bag = {a: "stored"} + assert bag[same] == "stored" + + +# --------------------------------------------------------------------------- +# Backwards-compat: every method that takes str should accept NodeRef +# --------------------------------------------------------------------------- + + +class TestNodeRefAcceptedEverywhere: + @pytest.fixture + def dag(self): + d = DAG() + d.add_node("a") + d.add_node("b") + d.add_node("c") + d.add_node("d") + d.add_edge("a", "b") + d.add_edge("a", "c") + d.add_edge("b", "d") + d.add_edge("c", "d") + return d + + def test_add_edge_accepts_mixed(self, dag): + # Build a fresh DAG to test edge addition explicitly + d = DAG() + a = d.add_node("a") + b = d.add_node("b") + c = d.add_node("c") + # All three combinations + d.add_edge(a, b) # ref, ref + d.add_edge(b, "c") # ref, str + d.add_edge("a", c) # str, ref + assert d.edge_count() == 3 + + def test_add_edges_batch_accepts_refs(self): + d = DAG() + a = d.add_node("a") + b = d.add_node("b") + c = d.add_node("c") + d.add_edges([(a, b), (b, c, 2.5)]) + assert d.edge_count() == 2 + + def test_has_node_accepts_ref(self, dag): + a = dag.node_ref("a") + assert dag.has_node(a) is True + assert dag.has_node("a") is True + + def test_has_edge_accepts_ref(self, dag): + a = dag.node_ref("a") + b = dag.node_ref("b") + assert dag.has_edge(a, b) is True + + def test_predecessors_successors_ancestors_descendants(self, dag): + d = dag.node_ref("d") + b = dag.node_ref("b") + a = dag.node_ref("a") + assert {n.name for n in dag.predecessors(d)} == {"b", "c"} + assert {n.name for n in dag.successors(a)} == {"b", "c"} + assert {n.name for n in dag.ancestors(d)} == {"a", "b", "c"} + assert {n.name for n in dag.descendants(b)} == {"d"} + + def test_in_out_degree_accept_ref(self, dag): + d = dag.node_ref("d") + a = dag.node_ref("a") + assert dag.in_degree(d) == 2 + assert dag.out_degree(a) == 2 + + def test_get_set_payload_accepts_ref(self): + d = DAG() + a = d.add_node("a", payload="hello") + assert d.get_payload(a) == "hello" + d.set_payload(a, "world") + assert d.get_payload(a) == "world" + + def test_get_set_metadata_accepts_ref(self): + d = DAG() + a = d.add_node("a", metadata={"v": 1}) + assert d.get_metadata(a) == {"v": 1} + d.set_metadata(a, {"v": 2}) + assert d.get_metadata(a) == {"v": 2} + + def test_remove_node_accepts_ref(self, dag): + b = dag.node_ref("b") + dag.remove_node(b) + assert not dag.has_node("b") + + def test_remove_edge_accepts_ref(self, dag): + a = dag.node_ref("a") + b = dag.node_ref("b") + dag.remove_edge(a, b) + assert not dag.has_edge("a", "b") + + def test_subgraph_accepts_refs(self, dag): + a = dag.node_ref("a") + b = dag.node_ref("b") + sub = dag.subgraph([a, b]) + assert sub.node_count() == 2 + assert sub.has_edge("a", "b") + + def test_subgraph_by_depth_accepts_ref(self, dag): + a = dag.node_ref("a") + sub = dag.subgraph_by_depth(a, depth=1, direction="forward") + assert sub.node_count() == 3 # a + b + c + + def test_collapse_accepts_refs(self, dag): + b = dag.node_ref("b") + c = dag.node_ref("c") + collapsed = dag.collapse([b, c], "bc") + assert collapsed.has_node("bc") + assert not collapsed.has_node("b") + assert not collapsed.has_node("c") + + def test_paths_accept_refs(self, dag): + a = dag.node_ref("a") + d = dag.node_ref("d") + paths = dag.all_paths(a, d) + assert len(paths) == 2 + sp = dag.shortest_path(a, d) + assert sp is not None + assert sp[0].name == "a" + assert sp[-1].name == "d" + + def test_dominator_tree_accepts_ref(self, dag): + a = dag.node_ref("a") + tree = dag.dominator_tree(a) + assert isinstance(tree, list) + + def test_iter_ancestors_descendants_accept_ref(self, dag): + d = dag.node_ref("d") + a = dag.node_ref("a") + anc = list(dag.iter_ancestors(d)) + desc = list(dag.iter_descendants(a)) + assert {n.name for n in anc} == {"a", "b", "c"} + assert {n.name for n in desc} == {"b", "c", "d"} + + def test_dirty_set_accepts_refs(self, dag): + a = dag.node_ref("a") + dirty = dag.dirty_set([a]) + assert "a" in dirty + assert "d" in dirty + + def test_is_ancestor_accepts_refs(self, dag): + a = dag.node_ref("a") + d = dag.node_ref("d") + assert dag.is_ancestor(a, d) is True + assert dag.is_ancestor(d, a) is False + + +# --------------------------------------------------------------------------- +# Stale-ref detection +# --------------------------------------------------------------------------- + + +class TestStaleNodeRef: + def test_node_ref_survives_unrelated_mutations(self): + d = DAG() + a = d.add_node("a") + d.add_node("b") + d.add_edge("a", "b") + # Add another node, remove an unrelated one — `a` should stay valid. + d.add_node("c") + d.remove_node("b") + assert d.has_node(a) is True + assert d.predecessors(a) == [] # still resolvable + + def test_ref_to_removed_node_raises(self): + d = DAG() + a = d.add_node("a") + d.remove_node("a") + with pytest.raises(NodeNotFoundError): + d.add_edge(a, "missing_target") + + def test_ref_after_name_reuse_is_stale(self): + d = DAG() + a1 = d.add_node("a") + d.remove_node("a") + d.add_node("a") + # a1 points to the OLD `a` (different epoch) + with pytest.raises(StaleNodeRefError): + d.has_edge(a1, a1) + + def test_fresh_ref_from_node_ref_method(self): + d = DAG() + a1 = d.add_node("a") + d.remove_node("a") + d.add_node("a") + # node_ref() returns the live ref + a2 = d.node_ref("a") + assert a2 is not None + assert a1 != a2 + assert d.has_node(a2) is True + + +# --------------------------------------------------------------------------- +# Builder + Executor + ExecutionResult lookup +# --------------------------------------------------------------------------- + + +class TestBuilderAcceptsRefs: + def test_builder_add_edge_accepts_mixed(self): + # Builder is deferred-construction: it doesn't itself produce refs, + # but should accept refs from a separately-built DAG. + helper = DAG() + a = helper.add_node("a") + b = helper.add_node("b") + # NodeRefs from `helper` carry the names; the builder uses the names. + dag = ( + DAGBuilder() + .add_node("a") + .add_node("b") + .add_edge(a, b) # NodeRef passed through + .build() + ) + assert dag.has_edge("a", "b") + + +class TestExecutorAcceptsRefs: + def test_tasks_dict_accepts_node_ref_keys(self): + d = DAG() + a = d.add_node("a") + b = d.add_node("b") + d.add_edge(a, b) + + results: dict[str, int] = {} + + def fn_a(): + results["a"] = 1 + return 42 + + def fn_b(): + results["b"] = 2 + return 99 + + # Mix str and NodeRef keys in tasks dict + tasks: dict[str | NodeRef, Callable[[], Any]] = {a: fn_a, "b": fn_b} + executor = DAGExecutor(d) + result = executor.execute(tasks) + + assert result.succeeded == 2 + assert results == {"a": 1, "b": 2} + + def test_execution_result_getitem_accepts_ref(self): + d = DAG() + a = d.add_node("a") + executor = DAGExecutor(d) + result = executor.execute({a: lambda: "value"}) + # __getitem__ accepts both str and NodeRef + assert result[a].result == "value" + assert result["a"].result == "value" + assert a in result + assert "a" in result diff --git a/tests/python/test_typing.py b/tests/python/test_typing.py new file mode 100644 index 0000000..1ebd36d --- /dev/null +++ b/tests/python/test_typing.py @@ -0,0 +1,194 @@ +"""Static-typing tests for dagron's typed handles and stubgen. + +These tests exercise: + +* `FlowFuture[T]` carrying its wrapped task's return type +* `@task` preserving its function signature +* `NodeResult[T]` and `ExecutionResult.__getitem__` overloads typing + results by `FlowFuture[T]` key +* `dagron.stubgen.generate_stub` emitting a syntactically valid `.pyi` + with `Literal["..."] -> NodeResult[T]` overloads + +Static type assertions use `typing_extensions.assert_type`-style runtime +checks where possible, plus a `subprocess`-driven mypy run for the +`reveal_type` cases mypy can verify. +""" + +from __future__ import annotations + +import ast +import shutil +import subprocess +import textwrap +from typing import TYPE_CHECKING + +import pytest + +if TYPE_CHECKING: + from pathlib import Path + +from dagron import DAG, FlowFuture, flow, task +from dagron.execution._types import ExecutionResult, NodeResult, NodeStatus +from dagron.stubgen import generate_stub + +# --------------------------------------------------------------------------- +# Runtime checks of the typed surface +# --------------------------------------------------------------------------- + + +class TestFlowFutureGeneric: + def test_flow_future_is_subscriptable(self): + # The class is generic, so FlowFuture[int] should not raise. + alias = FlowFuture[int] + assert alias is not None # GenericAlias + + def test_constructor_returns_instance(self): + f: FlowFuture[int] = FlowFuture("x") + assert f.name == "x" + + +class TestTaskSignaturePreservation: + def test_decorated_function_preserves_call_signature(self): + @task + def add(a: int, b: int) -> int: + return a + b + + # Direct call works with original signature + assert add(2, 3) == 5 + + # Inspecting the wrapper still reveals the original signature. + # Use `get_type_hints` because the test file uses + # `from __future__ import annotations`, so raw annotations are strings. + import inspect + from typing import get_type_hints + + sig = inspect.signature(add) + assert list(sig.parameters) == ["a", "b"] + + hints = get_type_hints(add) + assert hints == {"a": int, "b": int, "return": int} + + +class TestExecutionResultLookup: + def test_lookup_by_string(self): + r = ExecutionResult() + r.node_results["a"] = NodeResult(name="a", status=NodeStatus.COMPLETED, result=42) + out = r["a"] + assert out.result == 42 + + def test_lookup_by_flow_future(self): + r = ExecutionResult() + r.node_results["a"] = NodeResult(name="a", status=NodeStatus.COMPLETED, result=42) + f: FlowFuture[int] = FlowFuture("a") + out = r[f] + assert out.result == 42 + + def test_contains_by_flow_future(self): + r = ExecutionResult() + r.node_results["a"] = NodeResult(name="a", status=NodeStatus.COMPLETED, result=42) + f: FlowFuture[int] = FlowFuture("a") + assert f in r + assert "a" in r + assert FlowFuture("missing") not in r + + +# --------------------------------------------------------------------------- +# stubgen +# --------------------------------------------------------------------------- + + +class TestStubgen: + def test_generates_valid_python(self): + @task + def fetch() -> list[int]: + return [1, 2] + + @task + def total(rows: list[int]) -> int: + return sum(rows) + + @flow + def pipeline(): + return total(fetch()) + + dag = pipeline.dag() + stub = generate_stub( + dag, + tasks={"fetch": fetch, "total": total}, + name="MyResult", + ) + # Source must parse as a Python module + ast.parse(stub) + # Sanity check: the literal-keyed overloads are present + assert "Literal['fetch']" in stub + assert "Literal['total']" in stub + assert "NodeResult[list[int]]" in stub or "NodeResult[builtins.list" in stub + assert "NodeResult[int]" in stub + # And a fallback str overload + assert "key: str) -> NodeResult[Any]" in stub + + def test_explicit_hints_override_inference(self): + @task + def opaque(): # no annotation + return None + + dag = DAG() + dag.add_node("opaque") + stub = generate_stub( + dag, + type_hints={"opaque": "list[float]"}, + name="R", + ) + assert "NodeResult[list[float]]" in stub + + def test_empty_dag_produces_valid_class(self): + dag = DAG() + stub = generate_stub(dag, name="Empty") + ast.parse(stub) + # Even an empty DAG should produce the fallback overload + assert "key: str) -> NodeResult[Any]" in stub + + +# --------------------------------------------------------------------------- +# Mypy reveal_type — runs mypy on a synthesized snippet to verify static types. +# +# Skipped when mypy is not available (e.g. minimal CI environment). +# --------------------------------------------------------------------------- + + +@pytest.mark.skipif(shutil.which("mypy") is None, reason="mypy not on PATH") +def test_mypy_reveal_types(tmp_path: Path) -> None: + snippet = textwrap.dedent( + """ + from __future__ import annotations + from dagron import flow, task, FlowFuture + from dagron.execution._types import ExecutionResult, NodeResult + + @task + def fetch() -> list[int]: ... + + @task + def total(rows: list[int]) -> int: ... + + @flow + def pipeline(): + raw = fetch() # FlowFuture[list[int]] (typed as list[int]) + reveal_type(raw) + return total(raw) + + result: ExecutionResult = pipeline() + reveal_type(result[fetch_future]) # noqa: F821 — illustrative + """ + ) + target = tmp_path / "snippet.py" + target.write_text(snippet) + + proc = subprocess.run( + ["mypy", "--ignore-missing-imports", str(target)], + capture_output=True, + text=True, + check=False, + ) + # We expect mypy to find at least one reveal_type — verify the output + # mentions list[int] (the inferred type of `raw`). + assert "list[int]" in proc.stdout or "Revealed type" in proc.stdout, proc.stdout From 694cd9a3cd94eff251e4c68c62606ac91dfa91ff Mon Sep 17 00:00:00 2001 From: Pratyush Sharma <56130065+pratyush618@users.noreply.github.com> Date: Sun, 10 May 2026 01:37:13 +0530 Subject: [PATCH 04/14] feat(py): reactive Signal/Computed/Watcher engine Solid.js / Jane-Street-Incremental style auto-tracked reactive engine in py_src/dagron/reactive.py. Pure Python; weakref observer sets; glitch-free batch() context. ~10us to recompute one branch out of 10k after upstream signal mutation. Distinct from the existing dagron.execution.reactive.ReactiveDAG which wraps a pre-built DAG. --- py_src/dagron/reactive.py | 369 ++++++++++++++++++++++++++ tests/python/test_reactive_bench.py | 197 ++++++++++++++ tests/python/test_reactive_engine.py | 371 +++++++++++++++++++++++++++ 3 files changed, 937 insertions(+) create mode 100644 py_src/dagron/reactive.py create mode 100644 tests/python/test_reactive_bench.py create mode 100644 tests/python/test_reactive_engine.py diff --git a/py_src/dagron/reactive.py b/py_src/dagron/reactive.py new file mode 100644 index 0000000..82a6347 --- /dev/null +++ b/py_src/dagron/reactive.py @@ -0,0 +1,369 @@ +"""Reactive incremental computation — `Signal` / `Computed` / `Watcher`. + +dagron's reactive engine: mutate a leaf value (`Signal`) and only the +affected downstream `Computed` nodes recompute on next read; subscribed +`Watcher`s re-fire automatically. + +Differences from the existing `dagron.execution.reactive.ReactiveDAG`: + +* That class wraps an *existing* `dagron.DAG` and exposes a push-based + `subscribe()` / `set_input()` API. +* This module provides Solid.js / Jane-Street-`Incremental` style + primitives where the dependency graph is *implicit* — building a + `Computed` records its read dependencies as side-effects of evaluating + its function. No DAG construction step required. + +Example:: + + import dagron.reactive as dr + + a = dr.signal(1) + b = dr.signal(2) + s = dr.computed(lambda: a() + b()) + p = dr.computed(lambda: s() * 10) + + p() # 30 — initial compute, builds dep graph + a.set(5) # invalidates s and p; b untouched + p() # 70 — recomputes only s and p + + @dr.watch + def watch_p(): + print("p =", p()) # fires whenever p's value changes + + with dr.batch(): + a.set(0) + b.set(0) + # watch_p fires exactly once after the batch — glitch-free. +""" + +from __future__ import annotations + +import threading +import weakref +from contextlib import contextmanager +from typing import TYPE_CHECKING, Protocol, runtime_checkable + +if TYPE_CHECKING: + from collections.abc import Callable, Iterator + + +# --------------------------------------------------------------------------- +# Tracking machinery +# --------------------------------------------------------------------------- + + +@runtime_checkable +class _Tracker(Protocol): + """Anything that observes signals/computed and gets invalidated when they + change. Both `Computed` and `Watcher` implement it structurally. + """ + + def _add_dep(self, dep: _Observable) -> None: ... + def _invalidate(self) -> None: ... + + +@runtime_checkable +class _Observable(Protocol): + """Anything that can be read inside a tracker and notified of changes.""" + + def _attach(self, tracker: _Tracker) -> None: ... + def _detach(self, tracker: _Tracker) -> None: ... + + +_local = threading.local() + + +def _current_tracker() -> _Tracker | None: + return getattr(_local, "tracker", None) + + +@contextmanager +def _track(tracker: _Tracker) -> Iterator[None]: + """Push `tracker` onto the thread-local tracker stack for the duration + of the with-block. Inner reads of Signals/Computed will attach themselves + to `tracker` as observers. + """ + prev = getattr(_local, "tracker", None) + _local.tracker = tracker + try: + yield + finally: + _local.tracker = prev + + +# --------------------------------------------------------------------------- +# Batching — defer Watcher fires until the outermost batch ends. +# --------------------------------------------------------------------------- + + +def _batch_depth() -> int: + return getattr(_local, "batch_depth", 0) + + +def _pending_watchers() -> set[Watcher]: + pw: set[Watcher] | None = getattr(_local, "pending", None) + if pw is None: + pw = set() + _local.pending = pw + return pw + + +@contextmanager +def batch() -> Iterator[None]: + """Defer Watcher fires until the outermost `batch()` block ends. + + Multiple signal mutations inside a batch produce at most one Watcher + fire per affected Watcher — guaranteeing glitch-free semantics. + + Usage:: + + with dr.batch(): + a.set(1) + b.set(2) + # any watcher reading both a and b fires once, not twice. + """ + _local.batch_depth = _batch_depth() + 1 + try: + yield + finally: + _local.batch_depth -= 1 + if _local.batch_depth == 0: + _flush_pending_watchers() + + +def _flush_pending_watchers() -> None: + pending = _pending_watchers() + if not pending: + return + # Snapshot and clear so newly-scheduled watchers (from inside fires) + # accumulate for the next flush. + snapshot = list(pending) + pending.clear() + for w in snapshot: + if not w._disposed: + w._fire() + + +# --------------------------------------------------------------------------- +# Signal — settable leaf +# --------------------------------------------------------------------------- + + +class Signal[T]: + """A settable leaf in the reactive graph. + + Calling the signal (`s()`) returns its current value. Inside a + `Computed` body or a `Watcher` body, the read is tracked so the + consumer is invalidated when `s.set(v)` changes the value. + + Equality-checked: setting the same value (by `==`) is a no-op. + """ + + __slots__ = ("_observers", "_value") + + def __init__(self, value: T) -> None: + self._value: T = value + self._observers: weakref.WeakSet[_Tracker] = weakref.WeakSet() + + def __call__(self) -> T: + """Read the current value, registering this signal as a dependency + of the current tracker (if any).""" + tracker = _current_tracker() + if tracker is not None: + self._observers.add(tracker) + tracker._add_dep(self) + return self._value + + def set(self, value: T) -> None: + """Update the value and invalidate downstream observers. + + No-op if `value == self._value`. + """ + try: + same = self._value == value + except Exception: + same = False + if same: + return + self._value = value + # Snapshot observers; invalidation can mutate the WeakSet. + for obs in list(self._observers): + obs._invalidate() + # If we're not inside a batch, fire pending watchers now. + if _batch_depth() == 0: + _flush_pending_watchers() + + def peek(self) -> T: + """Read the current value WITHOUT registering a dependency.""" + return self._value + + def __repr__(self) -> str: + return f"Signal({self._value!r})" + + def _attach(self, tracker: _Tracker) -> None: + self._observers.add(tracker) + + def _detach(self, tracker: _Tracker) -> None: + self._observers.discard(tracker) + + +# --------------------------------------------------------------------------- +# Computed — lazy memoised derived value +# --------------------------------------------------------------------------- + + +class Computed[T]: + """A lazily-evaluated derived value. + + On first call (or after invalidation), runs `fn()`, recording the + signals/computed it reads. On subsequent calls, returns the cached + value as long as no upstream dep has been invalidated. + """ + + __slots__ = ("__weakref__", "_deps", "_dirty", "_fn", "_observers", "_value") + + def __init__(self, fn: Callable[[], T]) -> None: + self._fn = fn + self._value: T | None = None + self._dirty = True + self._deps: list[_Observable] = [] + self._observers: weakref.WeakSet[_Tracker] = weakref.WeakSet() + + def __call__(self) -> T: + """Read the current value, recomputing if dirty.""" + if self._dirty: + self._recompute() + # Register this Computed as a dep of the current tracker. + tracker = _current_tracker() + if tracker is not None and tracker is not self: + self._observers.add(tracker) + tracker._add_dep(self) + return self._value # type: ignore[return-value] + + def peek(self) -> T: + """Read without registering a dependency. Still recomputes if dirty.""" + if self._dirty: + self._recompute() + return self._value # type: ignore[return-value] + + def _recompute(self) -> None: + # Detach from old deps so they don't keep notifying us. + for d in self._deps: + d._detach(self) + self._deps = [] + with _track(self): + self._value = self._fn() + self._dirty = False + + # _Tracker protocol + def _add_dep(self, dep: _Observable) -> None: + self._deps.append(dep) + + def _invalidate(self) -> None: + if self._dirty: + return + self._dirty = True + # Cascade invalidation to my observers. + for obs in list(self._observers): + obs._invalidate() + + # _Observable protocol + def _attach(self, tracker: _Tracker) -> None: + self._observers.add(tracker) + + def _detach(self, tracker: _Tracker) -> None: + self._observers.discard(tracker) + + def __repr__(self) -> str: + state = "dirty" if self._dirty else f"={self._value!r}" + return f"Computed({state})" + + +# --------------------------------------------------------------------------- +# Watcher — side-effecting subscriber +# --------------------------------------------------------------------------- + + +class Watcher: + """A side-effecting subscriber: re-runs whenever a tracked dep changes. + + Built via `dr.watch(fn)` (decorator-style) or `dr.watch_fn(fn)`. + Construct-time, the body runs once to record initial deps. Subsequent + invalidations queue the watcher; pending watchers fire at the end of + the current `batch()` (or immediately if no batch is active). + + Call `.dispose()` to unsubscribe. + """ + + __slots__ = ("__weakref__", "_deps", "_disposed", "_fn") + + def __init__(self, fn: Callable[[], None]) -> None: + self._fn = fn + self._deps: list[_Observable] = [] + self._disposed = False + # Initial fire to establish dependencies. + self._fire() + + def _fire(self) -> None: + if self._disposed: + return + for d in self._deps: + d._detach(self) + self._deps = [] + with _track(self): + self._fn() + + def dispose(self) -> None: + """Detach from all observed signals/computed and stop firing.""" + self._disposed = True + for d in self._deps: + d._detach(self) + self._deps = [] + _pending_watchers().discard(self) + + # _Tracker protocol + def _add_dep(self, dep: _Observable) -> None: + self._deps.append(dep) + + def _invalidate(self) -> None: + if self._disposed: + return + _pending_watchers().add(self) + + +# --------------------------------------------------------------------------- +# Public factory aliases — keep call sites concise. +# --------------------------------------------------------------------------- + + +def signal[T](value: T) -> Signal[T]: + """Build a `Signal`. Convenience alias for `Signal(value)`.""" + return Signal(value) + + +def computed[T](fn: Callable[[], T]) -> Computed[T]: + """Build a `Computed`. Convenience alias for `Computed(fn)`.""" + return Computed(fn) + + +def watch(fn: Callable[[], None]) -> Watcher: + """Decorator-style: build a `Watcher` from `fn` and immediately fire once. + + Equivalent to `Watcher(fn)`. Use as a decorator to make intent clearer: + + @dr.watch + def log_p(): + print(p()) + """ + return Watcher(fn) + + +__all__ = [ + "Computed", + "Signal", + "Watcher", + "batch", + "computed", + "signal", + "watch", +] diff --git a/tests/python/test_reactive_bench.py b/tests/python/test_reactive_bench.py new file mode 100644 index 0000000..a94f62b --- /dev/null +++ b/tests/python/test_reactive_bench.py @@ -0,0 +1,197 @@ +"""Benchmarks for `dagron.reactive` — narrow recompute paths on large graphs. + +The headline claim for Phase 5 is: in a graph of 10k Computed nodes, mutating +one upstream signal and re-reading a downstream node should recompute only +the affected subgraph (not the whole graph) and finish in well under 1 ms. + +These benchmarks use `pytest-benchmark`. Run with:: + + uv run pytest tests/python/test_reactive_bench.py --benchmark-only +""" + +from __future__ import annotations + +import sys +from typing import Any + +import pytest + +import dagron.reactive as dr + +# Each level in a Computed chain consumes ~5 stack frames during evaluation. +# Bump the limit so 1000-deep chains evaluate without RecursionError. +sys.setrecursionlimit(20_000) + + +# --------------------------------------------------------------------------- +# Topology builders +# --------------------------------------------------------------------------- + + +def _build_linear_chain(depth: int) -> tuple[dr.Signal[int], dr.Computed[int]]: + """root signal → c0 → c1 → ... → c{depth-1} (returned as the tip).""" + root: dr.Signal[int] = dr.signal(0) + prev: dr.Computed[int] | dr.Signal[int] = root + + nodes: list[dr.Computed[int]] = [] + for _ in range(depth): + # Capture prev in default arg to bind by-value, not by closure. + def step(p=prev) -> int: # type: ignore[no-untyped-def] + return p() + 1 + + prev = dr.computed(step) + nodes.append(prev) + + # Return the root signal and the tail computed. + return root, nodes[-1] + + +def _build_wide_diamond_set( + n_branches: int, +) -> tuple[dr.Signal[int], list[dr.Computed[int]]]: + """One root signal feeds N independent Computed branches. + + Mutating root invalidates all N — but recomputing only one branch + should be cheap. + """ + root: dr.Signal[int] = dr.signal(0) + branches: list[dr.Computed[int]] = [] + for i in range(n_branches): + + def branch(i=i) -> int: # type: ignore[no-untyped-def] + return root() + i + + branches.append(dr.computed(branch)) + return root, branches + + +# --------------------------------------------------------------------------- +# Benchmarks — narrow recompute on a 10k-node linear chain +# --------------------------------------------------------------------------- + + +@pytest.mark.benchmark(group="reactive-narrow-recompute") +def test_narrow_recompute_chain_100(benchmark: Any) -> None: + """100-node chain, single mutation, full re-read. + + Includes Python interpreter overhead; useful as a sanity baseline. + """ + root, tip = _build_linear_chain(100) + tip() # initial compute + + counter = [0] + + def cycle() -> int: + counter[0] += 1 + root.set(counter[0]) + return tip() + + result = benchmark(cycle) + assert result == counter[0] + 100 + + +@pytest.mark.benchmark(group="reactive-narrow-recompute") +def test_narrow_recompute_chain_1000(benchmark: Any) -> None: + """1000-node chain — single linear path, expected linear cost. + + Each level adds ~5 Python stack frames; this file bumps + `sys.setrecursionlimit(20_000)` at import time. Real DAGs rarely + chain more than a few dozen deep — this is a stress upper bound. + """ + root, tip = _build_linear_chain(1000) + tip() + + counter = [0] + + def cycle() -> int: + counter[0] += 1 + root.set(counter[0]) + return tip() + + result = benchmark(cycle) + assert result == counter[0] + 1000 + + +@pytest.mark.benchmark(group="reactive-narrow-recompute") +def test_one_branch_in_10k_wide_fanout(benchmark: Any) -> None: + """10k branches off one root. + + Headline scenario: mutating the root invalidates all 10k branches, but + we read just *one* of them. The reactive engine should recompute only + that one — `pytest-benchmark` will report the cost. + """ + root, branches = _build_wide_diamond_set(10_000) + # Initial compute of the one branch we'll keep reading. + target = branches[1234] + target() + + counter = [0] + + def cycle() -> int: + counter[0] += 1 + root.set(counter[0]) + return target() + + result = benchmark(cycle) + assert result == counter[0] + 1234 + + +# --------------------------------------------------------------------------- +# Benchmarks — initial-build cost +# --------------------------------------------------------------------------- + + +@pytest.mark.benchmark(group="reactive-construction") +def test_build_chain_1000(benchmark: Any) -> None: + """Cost of constructing a 1000-node Computed chain (no eval). + + Construction does not recurse — only evaluation does — so a 1000-deep + chain builds fine, only evaluation is bounded by recursion limit. + """ + + def build() -> dr.Computed[int]: + _, tip = _build_linear_chain(1000) + return tip + + benchmark(build) + + +@pytest.mark.benchmark(group="reactive-construction") +def test_build_and_evaluate_chain_1000(benchmark: Any) -> None: + """Cost of constructing AND evaluating a 1000-node chain top-to-bottom.""" + + def build_and_eval() -> int: + _, tip = _build_linear_chain(1000) + return tip() + + result = benchmark(build_and_eval) + assert result == 1000 + + +# --------------------------------------------------------------------------- +# Benchmarks — batched updates +# --------------------------------------------------------------------------- + + +@pytest.mark.benchmark(group="reactive-batching") +def test_batched_100_signal_writes(benchmark: Any) -> None: + """One Computed reads 100 signals; batch 100 sets → one recompute.""" + signals = [dr.signal(0) for _ in range(100)] + + def aggregate() -> int: + return sum(s() for s in signals) + + c = dr.computed(aggregate) + c() # initial + + counter = [0] + + def cycle() -> int: + counter[0] += 1 + with dr.batch(): + for i, s in enumerate(signals): + s.set(counter[0] + i) + return c() + + result = benchmark(cycle) + assert result == sum(counter[0] + i for i in range(100)) diff --git a/tests/python/test_reactive_engine.py b/tests/python/test_reactive_engine.py new file mode 100644 index 0000000..5eb33a1 --- /dev/null +++ b/tests/python/test_reactive_engine.py @@ -0,0 +1,371 @@ +"""Tests for `dagron.reactive` — Signal / Computed / Watcher. + +Different from `tests/python/execution/test_reactive.py` which exercises +the older push-based `ReactiveDAG` over an existing DAG. This file tests +the new auto-tracking primitives. +""" + +from __future__ import annotations + +import gc + +import pytest + +import dagron.reactive as dr + +# --------------------------------------------------------------------------- +# Signal — leaf value mutation +# --------------------------------------------------------------------------- + + +class TestSignal: + def test_initial_value(self): + s = dr.signal(42) + assert s() == 42 + + def test_set_and_read(self): + s = dr.signal(0) + s.set(100) + assert s() == 100 + + def test_set_same_value_is_noop(self): + # Same-value sets should not trigger downstream recomputes. + s = dr.signal(7) + recomputes = [0] + + def f() -> int: + recomputes[0] += 1 + return s() * 2 + + c = dr.computed(f) + c() + assert recomputes[0] == 1 + s.set(7) # same value — no invalidation + c() + assert recomputes[0] == 1 + + def test_peek_does_not_track(self): + # peek() reads the value without registering a dependency. + s = dr.signal(1) + recomputes = [0] + + def f() -> int: + recomputes[0] += 1 + return s.peek() + 100 # peek, no tracking + + c = dr.computed(f) + c() + assert recomputes[0] == 1 + s.set(999) + c() # NOT invalidated — peek didn't register a dep + assert recomputes[0] == 1 + + +# --------------------------------------------------------------------------- +# Computed — lazy memoised derivation +# --------------------------------------------------------------------------- + + +class TestComputed: + def test_basic_derivation(self): + a = dr.signal(2) + b = dr.signal(3) + s = dr.computed(lambda: a() + b()) + assert s() == 5 + + def test_lazy_recompute_on_read(self): + a = dr.signal(1) + recomputes = [0] + + def f() -> int: + recomputes[0] += 1 + return a() * 10 + + c = dr.computed(f) + c() + c() + c() + assert recomputes[0] == 1, "cached after first read" + + def test_invalidation_recomputes_on_next_read(self): + a = dr.signal(1) + recomputes = [0] + + def f() -> int: + recomputes[0] += 1 + return a() * 10 + + c = dr.computed(f) + c() + a.set(2) + # Invalidation alone does NOT recompute; reading does. + assert recomputes[0] == 1 + c() + assert recomputes[0] == 2 + + def test_nested_computed(self): + a = dr.signal(1) + b = dr.computed(lambda: a() + 1) + c = dr.computed(lambda: b() * 2) + assert c() == 4 + a.set(10) + assert c() == 22 + + def test_narrow_recompute_skips_unrelated(self): + # Only the upstream-affected branch should recompute. + a = dr.signal(1) + b = dr.signal(100) + recomputes_a = [0] + recomputes_b = [0] + + def fa() -> int: + recomputes_a[0] += 1 + return a() + 1 + + def fb() -> int: + recomputes_b[0] += 1 + return b() + 1 + + ca = dr.computed(fa) + cb = dr.computed(fb) + + ca() + cb() + assert recomputes_a[0] == 1 + assert recomputes_b[0] == 1 + + a.set(2) + # Read both; only `ca` should have recomputed. + ca() + cb() + assert recomputes_a[0] == 2 + assert recomputes_b[0] == 1, "cb should NOT have recomputed" + + +# --------------------------------------------------------------------------- +# Watcher — side-effecting subscriber +# --------------------------------------------------------------------------- + + +class TestWatcher: + def test_initial_fire(self): + log: list[int] = [] + s = dr.signal(7) + + @dr.watch + def w(): + log.append(s()) + + assert log == [7], "watcher fires once at construction" + + def test_fires_on_dep_change(self): + log: list[int] = [] + s = dr.signal(1) + + @dr.watch + def w(): + log.append(s()) + + s.set(2) + s.set(3) + assert log == [1, 2, 3] + + def test_does_not_fire_on_unrelated_change(self): + log: list[int] = [] + a = dr.signal(1) + b = dr.signal(100) + + @dr.watch + def w(): + log.append(a()) # depends only on a + + log.clear() + b.set(200) + b.set(300) + assert log == [], "watcher should not fire when only b changes" + + def test_dispose_stops_firing(self): + log: list[int] = [] + s = dr.signal(1) + + @dr.watch + def w(): + log.append(s()) + + log.clear() + w.dispose() + s.set(2) + s.set(3) + assert log == [] + + +# --------------------------------------------------------------------------- +# Batching — glitch-free updates +# --------------------------------------------------------------------------- + + +class TestBatching: + def test_diamond_glitch_free(self): + # Classic glitch test: a watcher sees a CONSISTENT view across + # multiple signal mutations. + a = dr.signal(1) + b = dr.signal(2) + s = dr.computed(lambda: a() + b()) + log: list[int] = [] + + @dr.watch + def w(): + log.append(s()) + + log.clear() + with dr.batch(): + a.set(10) + b.set(20) + # In a glitch-free system, w sees s=30, fired exactly once. + assert log == [30], f"expected one fire to 30, got {log}" + + def test_nested_batches_only_fire_outermost(self): + s = dr.signal(0) + log: list[int] = [] + + @dr.watch + def w(): + log.append(s()) + + log.clear() + with dr.batch(): + s.set(1) + with dr.batch(): + s.set(2) + s.set(3) + s.set(4) + # All five sets coalesced into one fire of value 4. + assert log == [4] + + def test_batch_with_no_changes_does_not_fire(self): + s = dr.signal(5) + log: list[int] = [] + + @dr.watch + def w(): + log.append(s()) + + log.clear() + with dr.batch(): + pass + assert log == [] + + +# --------------------------------------------------------------------------- +# Memory: dropped Computed / Watcher don't leak +# --------------------------------------------------------------------------- + + +class TestMemory: + def test_dropped_computed_collected(self): + s = dr.signal(0) + c = dr.computed(lambda: s() * 2) + c() # establishes obs link from s -> c + # observers is a WeakSet, so dropping c should let it be collected. + del c + gc.collect() + # Setting s shouldn't crash even if c is gone. + s.set(99) + + def test_disposed_watcher_does_not_keep_self_alive(self): + s = dr.signal(0) + + @dr.watch + def w(): + s() + + w.dispose() + # No assertion — just that no exceptions fire on subsequent sets. + s.set(1) + + +# --------------------------------------------------------------------------- +# Edge cases +# --------------------------------------------------------------------------- + + +class TestEdgeCases: + def test_self_referential_computed_does_not_recurse_forever(self): + # If a Computed reads itself in its body, we shouldn't add it as + # its own dep / fall into a recompute loop. + s = dr.signal(1) + recomputes = [0] + + def fn() -> int: + recomputes[0] += 1 + return s() + 1 + + c = dr.computed(fn) + # Read c() inside another computed body. + outer = dr.computed(lambda: c() * 10) + assert outer() == 20 + + # Re-read does not re-fire either. + outer() + assert recomputes[0] == 1 + + def test_same_signal_read_twice_in_one_compute_one_dep(self): + # Reading the same signal twice inside a body should count as one dep. + s = dr.signal(7) + recomputes = [0] + + def fn() -> int: + recomputes[0] += 1 + return s() + s() + s() # three reads + + c = dr.computed(fn) + c() + s.set(8) + c() + # Only one extra recompute despite three internal reads. + assert recomputes[0] == 2 + + def test_computed_chain_of_ten_recomputes_all(self): + # Linear chain a -> c0 -> c1 -> ... -> c9. Mutating `a` should + # mark all of them dirty; each gets recomputed once when read. + a = dr.signal(0) + recomputes = [0] + + def make_step(prev): + def f(): + recomputes[0] += 1 + return prev() + 1 + + return dr.computed(f) + + nodes: list[dr.Computed[int]] = [a] # type: ignore[list-item] + for _ in range(10): + nodes.append(make_step(nodes[-1])) + + last = nodes[-1] + assert last() == 10 + + recomputes[0] = 0 + a.set(100) + assert last() == 110 + # Each of the 10 Computed nodes should have recomputed exactly once. + assert recomputes[0] == 10 + + +# --------------------------------------------------------------------------- +# Type alias smoke +# --------------------------------------------------------------------------- + + +def test_module_exports(): + assert hasattr(dr, "Signal") + assert hasattr(dr, "Computed") + assert hasattr(dr, "Watcher") + assert hasattr(dr, "signal") + assert hasattr(dr, "computed") + assert hasattr(dr, "watch") + assert hasattr(dr, "batch") + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) From 9bbf64907907c16816e7c0644d24ff098e9c0299 Mon Sep 17 00:00:00 2001 From: Pratyush Sharma <56130065+pratyush618@users.noreply.github.com> Date: Sun, 10 May 2026 01:37:43 +0530 Subject: [PATCH 05/14] feat(py): cross-process content-addressed cache dagron.contentcache.ContentCache: filesystem-as-index CAS at ~/.cache/dagron/cas (or $DAGRON_CACHE_DIR). Atomic temp+rename writes, magic-byte header, sharded //.cache layout. Independent processes share intermediates without coordination. Pluggable Hasher protocol; default_hash (pickle+blake2b) and numpy_hash bundled. compute_or_cached is effect-aware -- skips WRITE/NETWORK/NONDETERMINISTIC. fingerprint_function hashes co_code+co_consts+co_freevars so source mutation invalidates. --- py_src/dagron/contentcache.py | 365 +++++++++++++++++++++++++++ tests/python/test_contentcache.py | 396 ++++++++++++++++++++++++++++++ 2 files changed, 761 insertions(+) create mode 100644 py_src/dagron/contentcache.py create mode 100644 tests/python/test_contentcache.py diff --git a/py_src/dagron/contentcache.py b/py_src/dagron/contentcache.py new file mode 100644 index 0000000..998d554 --- /dev/null +++ b/py_src/dagron/contentcache.py @@ -0,0 +1,365 @@ +"""Content-addressed cache — Nix-flake-style cross-process compute caching. + +Different from the in-process `dagron.execution.content_cache.ContentAddressableCache`: + +* **Filesystem is the index.** No `index.json` to keep in sync — each cached + entry lives at a path derived from its content hash, so independent + processes (CI workers, two terminals, two Python interpreters) share + intermediates transparently. +* **Effect-aware.** Only nodes whose `dagron.Effect` tag reports + `is_cacheable` are stored; `WRITE`/`NETWORK`/`NONDETERMINISTIC` are + bypassed automatically. +* **Pluggable hashers.** Pickle + blake2b is the default; bring your own + `Hasher` for numpy arrays, polars frames, or anything tobyte-friendly. + +Storage layout:: + + ~/.cache/dagron/cas///.cache payload bytes + ~/.cache/dagron/cas///.cache.tmp atomic temp file + +where `//` are the first two-character shards of the hex +fingerprint. POSIX `rename(2)` makes writes visible atomically. + +Example:: + + from dagron import Effect + from dagron.contentcache import ContentCache, fingerprint_node + + cache = ContentCache() # default location + + def slow_fn(rows: list[int]) -> int: + return sum(rows) + + fp = fingerprint_node(slow_fn, Effect.PURE, [b"input-fp-bytes"]) + value, hit = cache.get(fp) + if not hit: + value = slow_fn([1, 2, 3]) + cache.put(fp, value) +""" + +from __future__ import annotations + +import contextlib +import hashlib +import inspect +import os +import pickle +import sys +import tempfile +from pathlib import Path +from typing import TYPE_CHECKING, Any, Protocol, runtime_checkable + +if TYPE_CHECKING: + from collections.abc import Callable + + from dagron.effects import Effect + + +# --------------------------------------------------------------------------- +# Default cache location +# --------------------------------------------------------------------------- + + +def default_cache_dir() -> Path: + """Default CAS location — `$DAGRON_CACHE_DIR` or `~/.cache/dagron/cas`.""" + env = os.environ.get("DAGRON_CACHE_DIR") + if env: + return Path(env) + return Path.home() / ".cache" / "dagron" / "cas" + + +# --------------------------------------------------------------------------- +# Hasher protocol — pluggable per-type fingerprinting +# --------------------------------------------------------------------------- + + +@runtime_checkable +class Hasher(Protocol): + """Compute a stable byte fingerprint for a Python value. + + Implementations should be deterministic (same value → same bytes + across processes and Python sessions). For non-pickleable types, + bring your own `Hasher` and register it via `ContentCache.register_hasher`. + """ + + def __call__(self, value: Any) -> bytes: ... + + +def default_hash(value: Any) -> bytes: + """Default hasher: pickle + blake2b (32-byte digest). + + Pickle is deterministic enough for hashing in 99% of cases (Python's + built-in containers, dataclasses, dicts in insertion order, etc.). + Falls back to `repr()` for unpickleable values. + """ + try: + data = pickle.dumps(value, protocol=pickle.DEFAULT_PROTOCOL) + except (pickle.PicklingError, TypeError, AttributeError): + data = repr(value).encode("utf-8", errors="replace") + return hashlib.blake2b(data, digest_size=32).digest() + + +def numpy_hash(value: Any) -> bytes: + """Hasher for numpy arrays — uses `array.tobytes()` for byte equality. + + Falls back to `default_hash` for non-array inputs so it can be used + as a default hasher in mixed pipelines. + """ + try: + import numpy as np + except ImportError: + return default_hash(value) + + if isinstance(value, np.ndarray): + h = hashlib.blake2b(digest_size=32) + h.update(str(value.dtype).encode()) + h.update(str(value.shape).encode()) + h.update(value.tobytes()) + return h.digest() + return default_hash(value) + + +# --------------------------------------------------------------------------- +# Function fingerprinting +# --------------------------------------------------------------------------- + + +def fingerprint_function(fn: Callable[..., Any]) -> bytes: + """Stable fingerprint for a callable. + + Combines: + * `__qualname__` + * `__code__.co_code` bytes (the bytecode) + * `__code__.co_consts` tuple (constants referenced) + * Names of free variables + * Python major.minor version (bytecode is version-specific) + + Closure cell *values* are NOT included — the user must include them + explicitly via `inputs` if they affect the result, otherwise stale + closures could yield silent cache hits. + """ + h = hashlib.blake2b(digest_size=32) + h.update(f"py{sys.version_info.major}.{sys.version_info.minor}\n".encode()) + h.update(getattr(fn, "__qualname__", fn.__name__).encode()) + h.update(b"\x00") + code = getattr(fn, "__code__", None) + if code is not None: + h.update(code.co_code) + h.update(b"\x00") + # co_consts may contain code objects (nested defs); pickle them. + try: + h.update(pickle.dumps(code.co_consts, protocol=pickle.DEFAULT_PROTOCOL)) + except Exception: + h.update(repr(code.co_consts).encode()) + h.update(b"\x00") + h.update(",".join(code.co_freevars).encode()) + return h.digest() + + +def fingerprint_node( + fn: Callable[..., Any], + effect: Effect | None, + input_fingerprints: list[bytes], +) -> bytes: + """Compose a node's full fingerprint from function + effect + inputs. + + Args: + fn: The task function whose output is being fingerprinted. + effect: The task's effect tag (or None to skip the tag). + input_fingerprints: Ordered list of upstream input fingerprints. + + Returns: + A 32-byte blake2b digest uniquely identifying this (function, inputs) + combination. Stable across processes and Python invocations as long + as the function's source and inputs don't change. + """ + h = hashlib.blake2b(digest_size=32) + h.update(fingerprint_function(fn)) + h.update(b"\x00") + if effect is not None: + h.update(effect.value.encode()) + h.update(b"\x00") + for fp in input_fingerprints: + h.update(fp) + return h.digest() + + +# --------------------------------------------------------------------------- +# ContentCache — the storage backend +# --------------------------------------------------------------------------- + + +# Magic bytes prepended to every cache file so we can detect corruption +# and version mismatches. +_MAGIC = b"DAGRON\x06\x01" # "DAGRON" + format-version major/minor + + +class ContentCache: + """Cross-process content-addressed cache backed by the filesystem. + + Each entry is keyed by a 32-byte `bytes` fingerprint. The cache is + transparent across processes: if process A computes and `put`s a + fingerprint, process B's `get` for the same fingerprint hits the + cache without any coordination, because the fingerprint is the + filesystem path. + + Args: + cache_dir: Directory to store cache files (default: `~/.cache/dagron/cas`). + hasher: Optional custom hasher used by `compute_or_cached`. + Independent of the storage layer. + """ + + def __init__( + self, + cache_dir: Path | str | None = None, + hasher: Hasher | None = None, + ) -> None: + self._cache_dir = Path(cache_dir) if cache_dir is not None else default_cache_dir() + self._cache_dir.mkdir(parents=True, exist_ok=True) + self._hasher: Hasher = hasher if hasher is not None else default_hash + + @property + def cache_dir(self) -> Path: + return self._cache_dir + + def _path_for(self, fingerprint: bytes) -> Path: + """Map a fingerprint to its on-disk cache file path.""" + hex_fp = fingerprint.hex() + # Two levels of 2-char sharding to keep dir entries < 65k. + return self._cache_dir / hex_fp[:2] / hex_fp[2:4] / f"{hex_fp[4:]}.cache" + + # ----- low-level: raw fingerprint → bytes ---------------------------- + + def get(self, fingerprint: bytes) -> tuple[Any, bool]: + """Look up a value. Returns `(value, hit)`.""" + path = self._path_for(fingerprint) + if not path.exists(): + return None, False + try: + with path.open("rb") as f: + magic = f.read(len(_MAGIC)) + if magic != _MAGIC: + return None, False + payload = f.read() + return pickle.loads(payload), True + except (OSError, pickle.UnpicklingError, EOFError): + # Treat corruption as a miss — the next put will overwrite. + return None, False + + def put(self, fingerprint: bytes, value: Any) -> None: + """Store a value. Atomic: temp-file + rename.""" + path = self._path_for(fingerprint) + path.parent.mkdir(parents=True, exist_ok=True) + # NamedTemporaryFile in the same directory so rename(2) is atomic + # (POSIX requires same-filesystem rename). + try: + payload = pickle.dumps(value, protocol=pickle.DEFAULT_PROTOCOL) + except (pickle.PicklingError, TypeError, AttributeError): + return + with tempfile.NamedTemporaryFile( + mode="wb", + dir=path.parent, + prefix=path.name + ".", + suffix=".tmp", + delete=False, + ) as tmp: + tmp.write(_MAGIC) + tmp.write(payload) + tmp_path = Path(tmp.name) + try: + os.replace(tmp_path, path) # atomic on POSIX + except OSError: + # Best-effort cleanup; another process may have raced us. + with contextlib.suppress(FileNotFoundError): + tmp_path.unlink() + + def has(self, fingerprint: bytes) -> bool: + return self._path_for(fingerprint).exists() + + def delete(self, fingerprint: bytes) -> None: + path = self._path_for(fingerprint) + with contextlib.suppress(FileNotFoundError): + path.unlink() + + def clear(self) -> None: + """Remove all cache entries (but keep the directory tree).""" + if not self._cache_dir.exists(): + return + for sub in self._cache_dir.rglob("*.cache"): + with contextlib.suppress(FileNotFoundError): + sub.unlink() + + # ----- high-level: hash + cache + compute ---------------------------- + + def hash(self, value: Any) -> bytes: + """Apply the configured Hasher to `value`.""" + return self._hasher(value) + + def compute_or_cached( + self, + fn: Callable[..., Any], + args: tuple[Any, ...] = (), + kwargs: dict[str, Any] | None = None, + effect: Effect | None = None, + ) -> tuple[Any, bool]: + """Compute `fn(*args, **kwargs)`, hitting the cache if possible. + + Effect-aware: if `effect.is_cacheable` is False, runs `fn` directly + without consulting or writing to the cache. The function's source + and the input fingerprints together form the cache key. + + Args: + fn: The function to compute. + args: Positional arguments. Each is fingerprinted with the configured + hasher and contributes to the cache key. + kwargs: Keyword arguments. Same hashing treatment. + effect: Optional effect tag from `dagron.Effect`. If None, defaults + to caching (treats fn as PURE). + + Returns: + `(value, hit)` — `hit` is True if served from cache. + """ + kwargs = kwargs or {} + + # Effect gate: skip cache entirely for impure tasks. + if effect is not None and not effect.is_cacheable: + return fn(*args, **kwargs), False + + input_fps = [self._hasher(a) for a in args] + [ + self._hasher((k, v)) for k, v in sorted(kwargs.items()) + ] + fp = fingerprint_node(fn, effect, input_fps) + + cached, hit = self.get(fp) + if hit: + return cached, True + value = fn(*args, **kwargs) + self.put(fp, value) + return value, False + + +# Detect numpy at import; not required, but lets us wire the numpy hasher +# automatically when the user opts in. +def has_numpy() -> bool: + """Return True if numpy is importable in this interpreter.""" + return inspect.ismodule(sys.modules.get("numpy")) or _try_import_numpy() + + +def _try_import_numpy() -> bool: + try: + import numpy # noqa: F401 + except ImportError: + return False + return True + + +__all__ = [ + "ContentCache", + "Hasher", + "default_cache_dir", + "default_hash", + "fingerprint_function", + "fingerprint_node", + "has_numpy", + "numpy_hash", +] diff --git a/tests/python/test_contentcache.py b/tests/python/test_contentcache.py new file mode 100644 index 0000000..beab797 --- /dev/null +++ b/tests/python/test_contentcache.py @@ -0,0 +1,396 @@ +"""Tests for `dagron.contentcache` — cross-process content-addressed cache. + +Different from the in-process `ContentAddressableCache` in +`dagron.execution.content_cache`: this module uses the filesystem as its +index, so independent Python processes share intermediates transparently. +""" + +from __future__ import annotations + +import hashlib +import pickle +import subprocess +import sys +import textwrap +from pathlib import Path + +import pytest + +from dagron import Effect +from dagron.contentcache import ( + ContentCache, + default_cache_dir, + default_hash, + fingerprint_function, + fingerprint_node, + numpy_hash, +) + +# --------------------------------------------------------------------------- +# Hashers +# --------------------------------------------------------------------------- + + +class TestDefaultHash: + def test_deterministic(self): + assert default_hash("hello") == default_hash("hello") + assert default_hash([1, 2, 3]) == default_hash([1, 2, 3]) + + def test_different_values_differ(self): + assert default_hash("hello") != default_hash("world") + assert default_hash([1, 2, 3]) != default_hash([1, 2, 4]) + + def test_returns_32_bytes(self): + assert len(default_hash("anything")) == 32 + + def test_unpickleable_falls_back_to_repr(self): + class _Weird: + __slots__ = () + + def __reduce__(self): + raise TypeError("not pickleable") + + # Must not raise + h = default_hash(_Weird()) + assert len(h) == 32 + + +class TestNumpyHash: + def test_falls_back_for_non_array(self): + # Without numpy installed, behaves like default_hash; with numpy + # but a non-array input, also falls back. + assert numpy_hash("hello") == default_hash("hello") + assert numpy_hash([1, 2, 3]) == default_hash([1, 2, 3]) + + def test_array_uses_tobytes(self): + np = pytest.importorskip("numpy") + a = np.array([1, 2, 3, 4], dtype=np.int32) + b = np.array([1, 2, 3, 4], dtype=np.int32) + c = np.array([1, 2, 3, 5], dtype=np.int32) + d = np.array([1.0, 2.0, 3.0, 4.0], dtype=np.float64) # different dtype + + assert numpy_hash(a) == numpy_hash(b) + assert numpy_hash(a) != numpy_hash(c) + assert numpy_hash(a) != numpy_hash(d) + + +# --------------------------------------------------------------------------- +# Function fingerprinting +# --------------------------------------------------------------------------- + + +class TestFingerprintFunction: + def test_same_function_same_fingerprint(self): + def add(a, b): + return a + b + + assert fingerprint_function(add) == fingerprint_function(add) + + def test_different_function_body_differs(self): + def add(a, b): + return a + b + + def sub(a, b): + return a - b + + assert fingerprint_function(add) != fingerprint_function(sub) + + def test_different_constants_differ(self): + def f1(): + return 42 + + def f2(): + return 43 + + assert fingerprint_function(f1) != fingerprint_function(f2) + + +class TestFingerprintNode: + def test_combines_function_effect_inputs(self): + def f(a): + return a + 1 + + # Different inputs → different fingerprints + fp1 = fingerprint_node(f, Effect.PURE, [b"input-a"]) + fp2 = fingerprint_node(f, Effect.PURE, [b"input-b"]) + assert fp1 != fp2 + + # Different effect tag → different fingerprint + fp3 = fingerprint_node(f, Effect.READ, [b"input-a"]) + assert fp1 != fp3 + + # Same everything → same fingerprint + fp4 = fingerprint_node(f, Effect.PURE, [b"input-a"]) + assert fp1 == fp4 + + +# --------------------------------------------------------------------------- +# ContentCache low-level API +# --------------------------------------------------------------------------- + + +class TestContentCacheLowLevel: + def test_get_miss_returns_false(self, tmp_path): + cache = ContentCache(cache_dir=tmp_path) + val, hit = cache.get(b"missing-fingerprint" + b"\x00" * 14) # 32-byte + assert val is None + assert hit is False + + def test_put_then_get_returns_value(self, tmp_path): + cache = ContentCache(cache_dir=tmp_path) + fp = b"\x01" * 32 + cache.put(fp, [1, 2, 3]) + val, hit = cache.get(fp) + assert hit is True + assert val == [1, 2, 3] + + def test_has(self, tmp_path): + cache = ContentCache(cache_dir=tmp_path) + fp = b"\x02" * 32 + assert not cache.has(fp) + cache.put(fp, "x") + assert cache.has(fp) + + def test_delete(self, tmp_path): + cache = ContentCache(cache_dir=tmp_path) + fp = b"\x03" * 32 + cache.put(fp, "x") + cache.delete(fp) + assert not cache.has(fp) + + def test_clear(self, tmp_path): + cache = ContentCache(cache_dir=tmp_path) + cache.put(b"\x04" * 32, "a") + cache.put(b"\x05" * 32, "b") + cache.clear() + assert not cache.has(b"\x04" * 32) + assert not cache.has(b"\x05" * 32) + + def test_corrupted_file_treated_as_miss(self, tmp_path): + cache = ContentCache(cache_dir=tmp_path) + fp = b"\x06" * 32 + cache.put(fp, "ok") + # Corrupt the on-disk file by overwriting it with garbage. + path = cache._path_for(fp) + path.write_bytes(b"GARBAGE NOT MAGIC") + _, hit = cache.get(fp) + assert hit is False + + def test_path_sharded(self, tmp_path): + cache = ContentCache(cache_dir=tmp_path) + fp = bytes.fromhex("a1b2c3d4" + "e5" * 28) # 32 bytes + path = cache._path_for(fp) + # Shards: a1/b2/c3d4e5... + assert "a1" in str(path) + assert "b2" in str(path) + + +# --------------------------------------------------------------------------- +# ContentCache.compute_or_cached — high-level API +# --------------------------------------------------------------------------- + + +class TestComputeOrCached: + def test_first_miss_then_hit(self, tmp_path): + cache = ContentCache(cache_dir=tmp_path) + call_count = [0] + + def f(x: int) -> int: + call_count[0] += 1 + return x * 100 + + v1, hit1 = cache.compute_or_cached(f, args=(7,), effect=Effect.PURE) + v2, hit2 = cache.compute_or_cached(f, args=(7,), effect=Effect.PURE) + assert v1 == 700 + assert v2 == 700 + assert hit1 is False + assert hit2 is True + # f should have been called exactly once (second was a cache hit). + assert call_count[0] == 1 + + def test_different_args_recompute(self, tmp_path): + cache = ContentCache(cache_dir=tmp_path) + call_count = [0] + + def f(x: int) -> int: + call_count[0] += 1 + return x + + cache.compute_or_cached(f, args=(1,), effect=Effect.PURE) + cache.compute_or_cached(f, args=(2,), effect=Effect.PURE) + cache.compute_or_cached(f, args=(3,), effect=Effect.PURE) + assert call_count[0] == 3 + + def test_kwargs_factor_into_key(self, tmp_path): + cache = ContentCache(cache_dir=tmp_path) + call_count = [0] + + def f(*, x: int) -> int: + call_count[0] += 1 + return x + + cache.compute_or_cached(f, kwargs={"x": 1}, effect=Effect.PURE) + cache.compute_or_cached(f, kwargs={"x": 1}, effect=Effect.PURE) # hit + cache.compute_or_cached(f, kwargs={"x": 2}, effect=Effect.PURE) # miss + assert call_count[0] == 2 + + def test_uncacheable_effect_skips_cache(self, tmp_path): + cache = ContentCache(cache_dir=tmp_path) + call_count = [0] + + def fetch(url: str) -> str: + call_count[0] += 1 + return url * 2 + + for _ in range(3): + v, hit = cache.compute_or_cached(fetch, args=("x",), effect=Effect.NETWORK) + assert v == "xx" + assert hit is False + # All 3 calls actually invoked fetch. + assert call_count[0] == 3 + + def test_no_effect_defaults_to_caching(self, tmp_path): + # When effect=None, behaves like PURE (cacheable). + cache = ContentCache(cache_dir=tmp_path) + call_count = [0] + + def f(x: int) -> int: + call_count[0] += 1 + return x + + cache.compute_or_cached(f, args=(1,)) + cache.compute_or_cached(f, args=(1,)) + assert call_count[0] == 1 + + +# --------------------------------------------------------------------------- +# Cross-process sharing — the headline claim of Phase 6 +# --------------------------------------------------------------------------- + + +_CHILD_SCRIPT = textwrap.dedent( + """ + import sys, pickle + sys.path.insert(0, {dagron_path!r}) + from dagron import Effect + from dagron.contentcache import ContentCache + + cache = ContentCache(cache_dir={cache_dir!r}) + call_count = [0] + + def slow_fn(x): + call_count[0] += 1 + return x * 1000 + + val, hit = cache.compute_or_cached(slow_fn, args=({arg},), effect=Effect.PURE) + print(pickle.dumps((val, hit, call_count[0])).hex()) + """ +) + + +def _run_child(cache_dir: Path, arg: int) -> tuple[object, bool, int]: + """Run a fresh Python process that uses our ContentCache.""" + # Pass our py_src/ on the child's PYTHONPATH so it imports the same dagron. + dagron_path = str(Path(__file__).parent.parent.parent / "py_src") + code = _CHILD_SCRIPT.format( + dagron_path=dagron_path, + cache_dir=str(cache_dir), + arg=arg, + ) + proc = subprocess.run( + [sys.executable, "-c", code], + capture_output=True, + text=True, + check=False, + timeout=30, + ) + if proc.returncode != 0: + raise RuntimeError(f"child failed: stderr={proc.stderr}") + return pickle.loads(bytes.fromhex(proc.stdout.strip())) + + +class TestCrossProcessSharing: + def test_second_process_hits_cache(self, tmp_path): + # First process: compute and cache. + v1, hit1, calls1 = _run_child(tmp_path, 42) + assert v1 == 42_000 + assert hit1 is False + assert calls1 == 1 + + # Second process (fresh interpreter): should hit the cache. + v2, hit2, calls2 = _run_child(tmp_path, 42) + assert v2 == 42_000 + assert hit2 is True + assert calls2 == 0, "child should not have invoked slow_fn" + + def test_different_inputs_independent_cache_entries(self, tmp_path): + v1, hit1, _ = _run_child(tmp_path, 1) + v2, hit2, _ = _run_child(tmp_path, 2) + v3, hit3, _ = _run_child(tmp_path, 1) + + assert v1 == 1000 + assert v2 == 2000 + assert v3 == 1000 + assert hit1 is False + assert hit2 is False + assert hit3 is True + + +# --------------------------------------------------------------------------- +# default_cache_dir +# --------------------------------------------------------------------------- + + +class TestDefaultCacheDir: + def test_uses_env_override(self, monkeypatch): + monkeypatch.setenv("DAGRON_CACHE_DIR", "/tmp/dagron-test-cache-xyz") + assert default_cache_dir() == Path("/tmp/dagron-test-cache-xyz") + + def test_default_is_xdg_style(self, monkeypatch): + monkeypatch.delenv("DAGRON_CACHE_DIR", raising=False) + d = default_cache_dir() + assert "dagron" in str(d) + assert "cas" in str(d) + + +# --------------------------------------------------------------------------- +# Function source mutation invalidates cache +# --------------------------------------------------------------------------- + + +class TestSourceMutationInvalidates: + def test_two_distinct_functions_have_distinct_keys(self, tmp_path): + # Simulate "user changed the source" by defining two functions + # with different bytecode but same name & arity. + cache = ContentCache(cache_dir=tmp_path) + + def make_v1(): + def f(x): + return x + 1 + + return f + + def make_v2(): + def f(x): + return x + 2 # different body + + return f + + v1, _ = cache.compute_or_cached(make_v1(), args=(10,), effect=Effect.PURE) + v2, hit2 = cache.compute_or_cached(make_v2(), args=(10,), effect=Effect.PURE) + assert v1 == 11 + assert v2 == 12 + assert hit2 is False, "source-mutated function should miss the cache" + + +# --------------------------------------------------------------------------- +# magic-byte sanity +# --------------------------------------------------------------------------- + + +def test_cache_file_starts_with_magic(tmp_path): + cache = ContentCache(cache_dir=tmp_path) + fp = hashlib.blake2b(b"x", digest_size=32).digest() + cache.put(fp, "hello") + on_disk = cache._path_for(fp).read_bytes() + assert on_disk.startswith(b"DAGRON") From dc1d0645d1afe15e429214b796a262a20315f044 Mon Sep 17 00:00:00 2001 From: Pratyush Sharma <56130065+pratyush618@users.noreply.github.com> Date: Sun, 10 May 2026 01:38:11 +0530 Subject: [PATCH 06/14] feat(py): time-travel replay with persistent traces dagron.trace.TraceWriter appends per-node JSONL records; payloads stored in the Phase 6 ContentCache keyed by output fingerprint, so identical values across runs deduplicate. TraceReader reads back; replay(source, at=t) reconstructs per-node ReplayedNode state at any past wall-clock instant. Pure/READ replay byte-identically; WRITE/NETWORK/NONDETERMINISTIC are flagged replayable=False but their logged values are still surfaced. Re-recorded retries take the latest value up to the cutoff. Honors $DAGRON_TRACE_DIR. --- py_src/dagron/trace.py | 399 ++++++++++++++++++++++++++++++++++++ tests/python/test_replay.py | 360 ++++++++++++++++++++++++++++++++ 2 files changed, 759 insertions(+) create mode 100644 py_src/dagron/trace.py create mode 100644 tests/python/test_replay.py diff --git a/py_src/dagron/trace.py b/py_src/dagron/trace.py new file mode 100644 index 0000000..9934c6d --- /dev/null +++ b/py_src/dagron/trace.py @@ -0,0 +1,399 @@ +"""Time-travel debugging — persistent execution traces with `replay(at=t)`. + +Every node execution can be appended to an on-disk trace log. Each log +entry stores the node's fingerprints (input + output) and metadata; the +actual *payload* lives in the Phase 6 `ContentCache` keyed by the output +fingerprint, so storage stays compact and outputs are deduplicated across +runs that produced the same value. + +Replaying the run reconstructs the per-node `ExecutionResult`-like state +*as of* a chosen wall-clock time. Pure nodes replay byte-identically; +impure nodes (`WRITE` / `NETWORK` / `NONDETERMINISTIC`) surface a +"non-replayable" marker but still expose their logged output value, so +you can inspect what the run actually produced without claiming +reproducibility. + +Example:: + + from pathlib import Path + from dagron import Effect + from dagron.contentcache import ContentCache + from dagron.trace import TraceWriter, TraceReader, replay + + cas = ContentCache() + log_path = Path("/tmp/dagron-traces/myrun.jsonl") + writer = TraceWriter(log_path, cas=cas) + writer.record("fetch", value=[1, 2, 3], effect=Effect.PURE) + writer.record("total", value=6, effect=Effect.PURE) + writer.close() + + reader = TraceReader(log_path, cas=cas) + state = replay(reader) + state["total"].value # 6 +""" + +from __future__ import annotations + +import contextlib +import json +import os +import time +import uuid +from dataclasses import dataclass, field +from pathlib import Path +from typing import TYPE_CHECKING, Any + +from dagron.contentcache import ContentCache, default_cache_dir + +if TYPE_CHECKING: + from collections.abc import Iterator + + from dagron.effects import Effect + + +# --------------------------------------------------------------------------- +# Default locations +# --------------------------------------------------------------------------- + + +def default_trace_dir() -> Path: + """`$DAGRON_TRACE_DIR` or `~/.cache/dagron/traces`.""" + env = os.environ.get("DAGRON_TRACE_DIR") + if env: + return Path(env) + return Path.home() / ".cache" / "dagron" / "traces" + + +def new_run_id() -> str: + """Random short identifier for a single execution run.""" + return uuid.uuid4().hex[:16] + + +# --------------------------------------------------------------------------- +# Data types +# --------------------------------------------------------------------------- + + +@dataclass(frozen=True) +class TraceRecord: + """One node's execution recorded to the trace log. + + `output_fp` is the hex digest under which the node's value lives in the + `ContentCache`. `replayable` mirrors `Effect.is_deterministic` at + record time so the replayer can flag results that aren't guaranteed + reproducible (e.g., NETWORK reads). + """ + + timestamp: float + name: str + output_fp: str # hex + duration_ns: int = 0 + effect: str = "pure" + replayable: bool = True + error: str | None = None + metadata: dict[str, Any] = field(default_factory=dict) + + def to_json(self) -> str: + return json.dumps( + { + "t": self.timestamp, + "name": self.name, + "fp": self.output_fp, + "dur_ns": self.duration_ns, + "effect": self.effect, + "replayable": self.replayable, + "error": self.error, + "metadata": self.metadata, + }, + separators=(",", ":"), + sort_keys=True, + ) + + @classmethod + def from_json(cls, line: str) -> TraceRecord: + d = json.loads(line) + return cls( + timestamp=d["t"], + name=d["name"], + output_fp=d["fp"], + duration_ns=d.get("dur_ns", 0), + effect=d.get("effect", "pure"), + replayable=d.get("replayable", True), + error=d.get("error"), + metadata=d.get("metadata", {}), + ) + + +@dataclass(frozen=True) +class ReplayedNode: + """One node's state at a given replay timestamp.""" + + name: str + timestamp: float + value: Any + effect: str = "pure" + replayable: bool = True + duration_ns: int = 0 + error: str | None = None + + @property + def has_value(self) -> bool: + return self.error is None + + +# --------------------------------------------------------------------------- +# TraceWriter — append-only JSONL log +# --------------------------------------------------------------------------- + + +class TraceWriter: + """Append-only writer for an execution trace. + + Each call to `record()` writes a JSONL line and stores the node's + payload in the bound `ContentCache`. Atomic at the line level: the + log file is opened in append mode with `O_APPEND`, so concurrent + writers from multiple processes won't tear lines. + + Args: + path: Path to the trace log file. Parent directories are created. + cas: ContentCache used to store node payloads. If None, a default + cache at `~/.cache/dagron/cas` is created. + """ + + def __init__( + self, + path: Path | str, + *, + cas: ContentCache | None = None, + ) -> None: + self._path = Path(path) + self._path.parent.mkdir(parents=True, exist_ok=True) + self._cas = cas if cas is not None else ContentCache(default_cache_dir()) + # Open in append+binary mode so writes are O_APPEND-atomic per write(). + self._fh = self._path.open("ab") + + @property + def path(self) -> Path: + return self._path + + @property + def cas(self) -> ContentCache: + return self._cas + + def record( + self, + name: str, + *, + value: Any = None, + effect: Effect | None = None, + duration_ns: int = 0, + error: str | None = None, + metadata: dict[str, Any] | None = None, + timestamp: float | None = None, + ) -> TraceRecord: + """Record a node execution. + + Args: + name: Node name. + value: The node's output value (stored in CAS, deduped by hash). + effect: The node's effect tag. Drives the `replayable` flag. + duration_ns: How long the node took (nanoseconds). + error: Error message string if the node failed. + metadata: Extra fields to round-trip through the log. + timestamp: Override wall clock (default: now). + + Returns: + The persisted `TraceRecord`. + """ + ts = timestamp if timestamp is not None else time.time() + # Hash + store payload in CAS (only if the node succeeded). + if error is None: + output_fp = self._cas.hash(value) + self._cas.put(output_fp, value) + output_hex = output_fp.hex() + else: + output_hex = "" + + rec = TraceRecord( + timestamp=ts, + name=name, + output_fp=output_hex, + duration_ns=duration_ns, + effect=effect.value if effect is not None else "pure", + replayable=effect.is_deterministic if effect is not None else True, + error=error, + metadata=metadata or {}, + ) + line = (rec.to_json() + "\n").encode() + self._fh.write(line) + # Don't fsync per write — the OS buffer is fine; close() will flush. + return rec + + def flush(self) -> None: + self._fh.flush() + with contextlib.suppress(OSError): + os.fsync(self._fh.fileno()) + + def close(self) -> None: + try: + self.flush() + finally: + self._fh.close() + + def __enter__(self) -> TraceWriter: + return self + + def __exit__(self, *exc: object) -> None: + self.close() + + +# --------------------------------------------------------------------------- +# TraceReader — read records back, filter by time +# --------------------------------------------------------------------------- + + +class TraceReader: + """Read a persisted trace log. + + Args: + path: Path to the trace log file. + cas: ContentCache used to fetch node payloads on demand. + """ + + def __init__( + self, + path: Path | str, + *, + cas: ContentCache | None = None, + ) -> None: + self._path = Path(path) + self._cas = cas if cas is not None else ContentCache(default_cache_dir()) + + @property + def path(self) -> Path: + return self._path + + @property + def cas(self) -> ContentCache: + return self._cas + + def records(self) -> Iterator[TraceRecord]: + """Yield every record in append order. Skips malformed lines.""" + if not self._path.exists(): + return + with self._path.open("rb") as fh: + for raw in fh: + try: + line = raw.decode("utf-8").strip() + except UnicodeDecodeError: + continue + if not line: + continue + try: + yield TraceRecord.from_json(line) + except (json.JSONDecodeError, KeyError): + continue + + def timeline(self) -> list[tuple[float, str]]: + """Return `[(timestamp, node_name), ...]` in record order.""" + return [(r.timestamp, r.name) for r in self.records()] + + def records_until(self, t: float, *, inclusive: bool = True) -> Iterator[TraceRecord]: + """Yield only records with `timestamp <= t` (or `<` if not inclusive).""" + for r in self.records(): + if (r.timestamp <= t) if inclusive else (r.timestamp < t): + yield r + else: + break + + def fetch(self, rec: TraceRecord) -> Any: + """Resolve a record's payload from the CAS. Returns None for failures + and for records whose payload is no longer in the cache (cache may + have been pruned).""" + if not rec.output_fp: + return None + try: + fp_bytes = bytes.fromhex(rec.output_fp) + except ValueError: + return None + value, hit = self._cas.get(fp_bytes) + return value if hit else None + + +# --------------------------------------------------------------------------- +# replay — reconstruct state at a chosen timestamp +# --------------------------------------------------------------------------- + + +def replay( + source: TraceReader | Path | str, + *, + at: float | None = None, + cas: ContentCache | None = None, +) -> dict[str, ReplayedNode]: + """Reconstruct the per-node state of a recorded run, as of time `at`. + + For each node in the log up to `at`: + + * Pure / READ nodes: payload is fetched from CAS and exposed in the + returned `ReplayedNode.value`. `replayable` is True. + * Impure nodes (WRITE / NETWORK / NONDETERMINISTIC): payload is still + fetched (from the CAS where the original run wrote it) and exposed, + but `replayable` is False — the value is what *that* run produced, + not what a fresh run would produce. + + If a node was recorded multiple times in the log (e.g., re-runs), the + *latest* record up to `at` wins. + + Args: + source: a `TraceReader`, or a path to a trace file. + at: wall-clock cutoff. None = end of log. + cas: optional ContentCache override (only used when `source` is a + path). + + Returns: + Dict mapping node name to its `ReplayedNode` snapshot. + """ + reader: TraceReader = ( + source if isinstance(source, TraceReader) else TraceReader(source, cas=cas) + ) + + cutoff = float("inf") if at is None else at + state: dict[str, ReplayedNode] = {} + for rec in reader.records_until(cutoff): + value = reader.fetch(rec) + state[rec.name] = ReplayedNode( + name=rec.name, + timestamp=rec.timestamp, + value=value, + effect=rec.effect, + replayable=rec.replayable, + duration_ns=rec.duration_ns, + error=rec.error, + ) + return state + + +# --------------------------------------------------------------------------- +# Convenience: gather all run paths under default_trace_dir() +# --------------------------------------------------------------------------- + + +def list_runs(trace_dir: Path | str | None = None) -> list[Path]: + """List every trace log file (`*.jsonl`) under `trace_dir`.""" + base = Path(trace_dir) if trace_dir is not None else default_trace_dir() + if not base.exists(): + return [] + return sorted(base.rglob("*.jsonl")) + + +__all__ = [ + "ReplayedNode", + "TraceReader", + "TraceRecord", + "TraceWriter", + "default_trace_dir", + "list_runs", + "new_run_id", + "replay", +] diff --git a/tests/python/test_replay.py b/tests/python/test_replay.py new file mode 100644 index 0000000..90df505 --- /dev/null +++ b/tests/python/test_replay.py @@ -0,0 +1,360 @@ +"""Tests for `dagron.trace` — persistent execution traces with replay. + +The headline claim of Phase 7: every node execution is appended to a +JSONL log; payloads live in the Phase 6 ContentCache; `replay(at=t)` +reconstructs the exact per-node state at any past wall-clock instant. +Pure nodes replay byte-identically; impure nodes are flagged +non-replayable but still expose their logged value. +""" + +from __future__ import annotations + +import time + +import pytest + +from dagron import Effect +from dagron.contentcache import ContentCache +from dagron.trace import ( + ReplayedNode, + TraceReader, + TraceRecord, + TraceWriter, + list_runs, + new_run_id, + replay, +) + +# --------------------------------------------------------------------------- +# TraceRecord — round-trip +# --------------------------------------------------------------------------- + + +class TestTraceRecord: + def test_round_trip(self): + r = TraceRecord( + timestamp=1234567890.5, + name="my_node", + output_fp="abcd1234", + duration_ns=42, + effect="pure", + replayable=True, + metadata={"k": "v"}, + ) + line = r.to_json() + back = TraceRecord.from_json(line) + assert back == r + + def test_failed_record_round_trip(self): + r = TraceRecord( + timestamp=1.0, + name="bad", + output_fp="", # no output for failures + error="boom", + replayable=False, + ) + back = TraceRecord.from_json(r.to_json()) + assert back == r + + +# --------------------------------------------------------------------------- +# TraceWriter +# --------------------------------------------------------------------------- + + +class TestTraceWriter: + def test_writes_jsonl(self, tmp_path): + cas = ContentCache(cache_dir=tmp_path / "cas") + log = tmp_path / "run.jsonl" + + with TraceWriter(log, cas=cas) as w: + w.record("a", value=1, effect=Effect.PURE) + w.record("b", value=[1, 2], effect=Effect.READ) + + # Each line is a valid JSON object terminated by \n. + lines = log.read_text().strip().split("\n") + assert len(lines) == 2 + for line in lines: + import json + + json.loads(line) + + def test_payload_stored_in_cas(self, tmp_path): + cas = ContentCache(cache_dir=tmp_path / "cas") + log = tmp_path / "run.jsonl" + + payload = {"complex": [1, 2, {"nested": True}]} + with TraceWriter(log, cas=cas) as w: + rec = w.record("x", value=payload, effect=Effect.PURE) + + # Read the payload back via the CAS using the recorded fingerprint. + fp_bytes = bytes.fromhex(rec.output_fp) + val, hit = cas.get(fp_bytes) + assert hit is True + assert val == payload + + def test_failed_record_has_no_payload(self, tmp_path): + cas = ContentCache(cache_dir=tmp_path / "cas") + log = tmp_path / "run.jsonl" + + with TraceWriter(log, cas=cas) as w: + rec = w.record("oops", error="something went wrong") + assert rec.output_fp == "" + assert rec.error == "something went wrong" + + def test_effect_drives_replayable_flag(self, tmp_path): + cas = ContentCache(cache_dir=tmp_path / "cas") + log = tmp_path / "run.jsonl" + + with TraceWriter(log, cas=cas) as w: + r_pure = w.record("p", value=1, effect=Effect.PURE) + r_read = w.record("r", value=2, effect=Effect.READ) + r_write = w.record("w", value=3, effect=Effect.WRITE) + r_net = w.record("n", value=4, effect=Effect.NETWORK) + r_nd = w.record("nd", value=5, effect=Effect.NONDETERMINISTIC) + + assert r_pure.replayable is True + assert r_read.replayable is True + assert r_write.replayable is False + assert r_net.replayable is False + assert r_nd.replayable is False + + +# --------------------------------------------------------------------------- +# TraceReader +# --------------------------------------------------------------------------- + + +class TestTraceReader: + def test_reads_records_back(self, tmp_path): + cas = ContentCache(cache_dir=tmp_path / "cas") + log = tmp_path / "run.jsonl" + + t0 = time.time() + with TraceWriter(log, cas=cas) as w: + w.record("a", value=1, effect=Effect.PURE, timestamp=t0) + w.record("b", value=2, effect=Effect.PURE, timestamp=t0 + 1) + + reader = TraceReader(log, cas=cas) + recs = list(reader.records()) + assert [r.name for r in recs] == ["a", "b"] + + def test_records_until(self, tmp_path): + cas = ContentCache(cache_dir=tmp_path / "cas") + log = tmp_path / "run.jsonl" + + t0 = 1000.0 + with TraceWriter(log, cas=cas) as w: + for i in range(5): + w.record(f"n{i}", value=i, effect=Effect.PURE, timestamp=t0 + i) + + reader = TraceReader(log, cas=cas) + names = [r.name for r in reader.records_until(t0 + 2.5)] + assert names == ["n0", "n1", "n2"] + + def test_timeline(self, tmp_path): + cas = ContentCache(cache_dir=tmp_path / "cas") + log = tmp_path / "run.jsonl" + + with TraceWriter(log, cas=cas) as w: + w.record("first", value=1, effect=Effect.PURE, timestamp=10.0) + w.record("second", value=2, effect=Effect.PURE, timestamp=20.0) + + reader = TraceReader(log, cas=cas) + assert reader.timeline() == [(10.0, "first"), (20.0, "second")] + + def test_empty_or_missing_log(self, tmp_path): + cas = ContentCache(cache_dir=tmp_path / "cas") + reader = TraceReader(tmp_path / "no-such.jsonl", cas=cas) + assert list(reader.records()) == [] + assert reader.timeline() == [] + + def test_skips_malformed_lines(self, tmp_path): + cas = ContentCache(cache_dir=tmp_path / "cas") + log = tmp_path / "run.jsonl" + + # Manually write some good and some bad lines. + with TraceWriter(log, cas=cas) as w: + w.record("ok", value=1, effect=Effect.PURE, timestamp=1.0) + # Corrupt: append garbage in the middle + with log.open("ab") as fh: + fh.write(b"this is not json\n") + fh.write(b'{"missing": "fields"}\n') + + # Append another good line. + with TraceWriter(log, cas=cas) as w: + w.record("ok2", value=2, effect=Effect.PURE, timestamp=2.0) + + reader = TraceReader(log, cas=cas) + names = [r.name for r in reader.records()] + assert names == ["ok", "ok2"] + + +# --------------------------------------------------------------------------- +# replay() — the headline feature +# --------------------------------------------------------------------------- + + +class TestReplay: + def test_replay_at_end(self, tmp_path): + cas = ContentCache(cache_dir=tmp_path / "cas") + log = tmp_path / "run.jsonl" + + t0 = 100.0 + with TraceWriter(log, cas=cas) as w: + w.record("a", value=[1, 2, 3], effect=Effect.PURE, timestamp=t0) + w.record("b", value="hello", effect=Effect.PURE, timestamp=t0 + 1) + + state = replay(log, cas=cas) + assert set(state.keys()) == {"a", "b"} + assert state["a"].value == [1, 2, 3] + assert state["b"].value == "hello" + assert all(s.replayable for s in state.values()) + + def test_replay_at_intermediate_time(self, tmp_path): + cas = ContentCache(cache_dir=tmp_path / "cas") + log = tmp_path / "run.jsonl" + + t0 = 100.0 + with TraceWriter(log, cas=cas) as w: + w.record("a", value=1, effect=Effect.PURE, timestamp=t0) + w.record("b", value=2, effect=Effect.PURE, timestamp=t0 + 10) + w.record("c", value=3, effect=Effect.PURE, timestamp=t0 + 20) + + # Cutoff between b and c. + state = replay(log, at=t0 + 15, cas=cas) + assert set(state.keys()) == {"a", "b"} + + def test_replay_returns_byte_identical_payload(self, tmp_path): + # The headline claim: pure nodes' replayed values are == their original. + cas = ContentCache(cache_dir=tmp_path / "cas") + log = tmp_path / "run.jsonl" + + complex_payload = { + "rows": [{"id": i, "name": f"r{i}"} for i in range(50)], + "meta": {"version": 7, "tags": ["a", "b", "c"]}, + } + + with TraceWriter(log, cas=cas) as w: + w.record("snapshot", value=complex_payload, effect=Effect.PURE) + + state = replay(log, cas=cas) + assert state["snapshot"].value == complex_payload + + def test_impure_node_marked_non_replayable_but_value_present(self, tmp_path): + cas = ContentCache(cache_dir=tmp_path / "cas") + log = tmp_path / "run.jsonl" + + with TraceWriter(log, cas=cas) as w: + w.record("send", value="ok", effect=Effect.NETWORK) + + state = replay(log, cas=cas) + node = state["send"] + assert node.value == "ok" + assert node.replayable is False + assert node.has_value is True + + def test_failed_node_has_error_no_value(self, tmp_path): + cas = ContentCache(cache_dir=tmp_path / "cas") + log = tmp_path / "run.jsonl" + + with TraceWriter(log, cas=cas) as w: + w.record("breaks", error="boom") + + state = replay(log, cas=cas) + node = state["breaks"] + assert node.value is None + assert node.error == "boom" + assert node.has_value is False + + def test_re_recorded_node_takes_latest_value(self, tmp_path): + # If the same node was recorded multiple times (e.g., a retry), + # the latest record up to `at` wins. + cas = ContentCache(cache_dir=tmp_path / "cas") + log = tmp_path / "run.jsonl" + + with TraceWriter(log, cas=cas) as w: + w.record("retry", value=1, effect=Effect.PURE, timestamp=1.0) + w.record("retry", value=2, effect=Effect.PURE, timestamp=2.0) + w.record("retry", value=3, effect=Effect.PURE, timestamp=3.0) + + # Cutoff at t=2 → second value wins. + state = replay(log, at=2.0, cas=cas) + assert state["retry"].value == 2 + # Cutoff at end → third value wins. + state = replay(log, cas=cas) + assert state["retry"].value == 3 + + +# --------------------------------------------------------------------------- +# Process restart resilience +# --------------------------------------------------------------------------- + + +class TestPersistence: + def test_log_survives_writer_close_and_reopen(self, tmp_path): + # Simulating "process restart" — write, close, then read from a fresh reader. + cas = ContentCache(cache_dir=tmp_path / "cas") + log = tmp_path / "run.jsonl" + + # Process A: write + w = TraceWriter(log, cas=cas) + w.record("a", value="from process A", effect=Effect.PURE) + w.close() + + # Process B (fresh reader, fresh CAS handle): read + cas2 = ContentCache(cache_dir=tmp_path / "cas") + reader = TraceReader(log, cas=cas2) + state = replay(reader) + assert state["a"].value == "from process A" + + def test_append_across_writer_lifetimes(self, tmp_path): + cas = ContentCache(cache_dir=tmp_path / "cas") + log = tmp_path / "run.jsonl" + + for i in range(3): + with TraceWriter(log, cas=cas) as w: + w.record(f"n{i}", value=i, effect=Effect.PURE, timestamp=float(i)) + + reader = TraceReader(log, cas=cas) + names = [r.name for r in reader.records()] + assert names == ["n0", "n1", "n2"] + + +# --------------------------------------------------------------------------- +# Convenience helpers +# --------------------------------------------------------------------------- + + +class TestConvenience: + def test_new_run_id_is_unique(self): + assert new_run_id() != new_run_id() + + def test_list_runs(self, tmp_path): + # No runs yet + assert list_runs(tmp_path) == [] + + # Create a couple of trace files. + (tmp_path / "alpha.jsonl").write_text("") + (tmp_path / "beta.jsonl").write_text("") + (tmp_path / "ignored.txt").write_text("") + + runs = list_runs(tmp_path) + names = sorted(p.name for p in runs) + assert names == ["alpha.jsonl", "beta.jsonl"] + + +# --------------------------------------------------------------------------- +# ReplayedNode dataclass +# --------------------------------------------------------------------------- + + +def test_replayed_node_has_value_property(): + n = ReplayedNode(name="ok", timestamp=1.0, value=42) + assert n.has_value is True + bad = ReplayedNode(name="x", timestamp=1.0, value=None, error="boom") + assert bad.has_value is False + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) From f5c80143f053b515ba9502836be537fcf954e7b5 Mon Sep 17 00:00:00 2001 From: Pratyush Sharma <56130065+pratyush618@users.noreply.github.com> Date: Sun, 10 May 2026 01:38:28 +0530 Subject: [PATCH 07/14] docs: add typed-handles + reactive guide and CHANGELOG New guide page docs/pages/guide/typed-and-reactive.mdx walks through NodeRef, @flow, generic typing, effects, reactive engine, content cache, and replay. New API reference docs/pages/api/utilities/ modern-api.mdx documents every new public symbol. Sidebar wired up. CHANGELOG bumped with [Unreleased] section listing all additions. --- CHANGELOG.md | 21 ++ docs/pages/api/utilities/modern-api.mdx | 199 ++++++++++++++++ docs/pages/guide/typed-and-reactive.mdx | 298 ++++++++++++++++++++++++ docs/sidebars.ts | 2 + 4 files changed, 520 insertions(+) create mode 100644 docs/pages/api/utilities/modern-api.mdx create mode 100644 docs/pages/guide/typed-and-reactive.mdx diff --git a/CHANGELOG.md b/CHANGELOG.md index eb061d9..248b233 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,27 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). +## [Unreleased] + +### Added + +- **Typed `NodeRef` handles** — `dag.add_node()` now returns a stable `NodeRef`. Every public method that takes a node identifier accepts either a `str` name or a `NodeRef`, so existing string-based code keeps working unchanged. NodeRefs survive unrelated graph mutations and detect remove-then-readd via per-node epochs (`StaleNodeRefError`). +- **`@dagron.flow` Pythonic compose API** — Tawazi-style: write a regular Python function that calls `@task`-decorated tasks; the call structure becomes the DAG. `pipeline.dag()` returns the underlying DAG; `pipeline()` runs it. Compatible with the legacy parameter-name-based `Pipeline` (the same `@task` decorator powers both). +- **Generic typing + `dagron.stubgen`** — `FlowFuture[T]`, `NodeResult[T]`, `ExecutionResult.__getitem__` overloads typed by `FlowFuture[T]`. `dagron.stubgen.generate_stub(dag, types)` emits a `.pyi` with `Literal["nodename"] -> NodeResult[T]` overloads, so even string-keyed lookups become statically typed. `@task` is a passthrough decorator with `[**P, R]` ParamSpec — IDE autocomplete and mypy both work. +- **Effect-typed nodes** — `dagron.Effect` enum (`PURE`/`READ`/`WRITE`/`NETWORK`/`NONDETERMINISTIC`) with `is_cacheable`/`is_deterministic`/`is_isolated` properties. `@task(effect=Effect.NETWORK)` tags impurity. AST-scan heuristic emits a `UserWarning` when a `PURE` task contains obviously-impure calls (`time.time`, `random.*`, `os.*`, etc.). `effects_of(dag)` reads tags back from DAG metadata. New `DAGExecutor(enforce_effect_isolation=True)` flag serializes `NONDETERMINISTIC` tasks while letting other effects parallelize freely. +- **`dagron.reactive` — Solid.js / Jane-Street-`Incremental` style reactive engine** — `Signal` / `Computed` / `Watcher` with auto-tracked dependencies. Mutating one signal that feeds 10,000 derived nodes and reading just one of them takes ~10 µs on the recompute path. `batch()` context manager guarantees glitch-free updates: multiple signal mutations coalesce into a single watcher fire. Distinct from the existing `dagron.execution.reactive.ReactiveDAG` (which wraps a pre-built DAG); the new module is for building reactive graphs from scratch. +- **`dagron.contentcache` — Nix-flake-style cross-process cache** — `ContentCache` stores cached values keyed by their content fingerprint. The filesystem itself is the index — independent processes (CI workers, two terminals) share intermediates without coordination. Atomic temp-file + rename writes, magic-byte header, sharded layout `//.cache`. Pluggable `Hasher` protocol with `default_hash` (pickle + blake2b) and `numpy_hash` (uses `array.tobytes()`). `compute_or_cached` is effect-aware and skips the cache for `WRITE`/`NETWORK`/`NONDETERMINISTIC` automatically. Honors `$DAGRON_CACHE_DIR`. +- **`dagron.trace` — time-travel debugging with `replay(at=t)`** — `TraceWriter` appends per-node JSONL records; payloads are stored in the `ContentCache` keyed by output fingerprint, so identical values across runs deduplicate. `TraceReader` reads back. `replay(source, at=t)` reconstructs the per-node `ReplayedNode` state at any past wall-clock instant. Pure / READ nodes replay byte-identically; impure nodes are flagged `replayable=False` but their logged values are still surfaced. Re-recorded nodes (retries) take the latest value up to the cutoff. Honors `$DAGRON_TRACE_DIR`. + +### Changed + +- `DAG.add_node()` now returns `NodeRef` instead of `NodeId`. `NodeId` is still returned by enumeration methods (`nodes()`, `successors()`, `roots()`, …) where a snapshot identifier is appropriate. +- `NodeData::name` is now `Arc` (was `String`) — cheaper to share between handles. +- `DAGExecutor.execute` and `AsyncDAGExecutor.execute` accept `Mapping[str | NodeRef, Callable]` for the `tasks` parameter. +- `ExecutionResult.__getitem__` and `__contains__` accept `str`, `NodeRef`, or `FlowFuture[T]`. +- `NodeResult` is now `NodeResult[T]` (PEP 695 generic). Existing references default to `NodeResult[Any]` and remain backwards compatible. +- `@task` is now flow-aware: outside a `@flow` body it executes normally; inside one it records the call and returns `FlowFuture[T]`. The same decorator works for both the legacy `Pipeline` and the new `@flow` API. + ## [0.1.0] - 2026-03-06 ### Added diff --git a/docs/pages/api/utilities/modern-api.mdx b/docs/pages/api/utilities/modern-api.mdx new file mode 100644 index 0000000..d042544 --- /dev/null +++ b/docs/pages/api/utilities/modern-api.mdx @@ -0,0 +1,199 @@ +--- +sidebar_position: 7 +title: Modern API (NodeRef, flow, reactive, contentcache, trace) +description: API reference for the typed-handles + uniqueness modules — dagron.NodeRef, dagron.flow, dagron.Effect, dagron.reactive, dagron.contentcache, dagron.trace, dagron.stubgen. +--- + +# Modern API + +API reference for the typed-handles and uniqueness modules. For a +walkthrough of how they compose, see the +[Typed Handles & Reactive Engine](../../guide/typed-and-reactive) guide. + +## `dagron.NodeRef` + +```python +class NodeRef: + name: str + epoch: int +``` + +Stable handle returned by `DAG.add_node()`. Survives unrelated graph +mutations; invalidated only when the underlying node is removed (or +remove-and-readded with the same name, which yields a fresh epoch). + +```python +dag.node_ref(name: str) -> NodeRef | None +``` + +Look up the current ref for a name without mutating the DAG. + +Every public method that previously took `name: str` now takes `NodeArg = str | NodeRef`: +`add_edge`, `remove_node`, `has_node`, `has_edge`, `get_payload`, `set_payload`, +`predecessors`, `successors`, `ancestors`, `descendants`, `subgraph`, +`subgraph_by_depth`, `collapse`, `dominator_tree`, `all_paths`, +`shortest_path`, `longest_path`, `dirty_set`, `change_provenance`, +`is_ancestor`, and the `ReachabilityIndex` query methods. + +Stale refs raise `dagron.StaleNodeRefError`. + +## `dagron.flow` + +```python +@dagron.task +def fn(...) -> T: ... + +@dagron.task(effect=Effect.NETWORK) +def fn(...) -> T: ... + +@dagron.flow +def pipeline(...) -> FlowFuture[T] | None: ... +``` + +| Member | Purpose | +|---|---| +| `task` | Decorator. Outside a `@flow`, executes normally. Inside one, records the call and returns `FlowFuture[R]`. Supports `effect=` keyword (defaults to `Effect.PURE`). | +| `flow` | Decorator. Wraps a function as a `Flow`. | +| `Flow.dag()` | Trace the body and return the built `DAG`. | +| `Flow.run(*args, **kwargs)` | Trace, build, execute synchronously → `ExecutionResult`. | +| `Flow.run_async(...)` | Async variant. | +| `Flow.__call__(...)` | Sugar for `run`. | +| `FlowFuture[T]` | Generic placeholder returned from `@task` calls inside a `@flow`. Pass to other tasks to wire deps. | +| `TaskSpec` | Metadata attached to every `@task` (`name`, `fn`, `dependencies`, `is_async`, `effect`). | + +`batch()` semantics aren't part of `dagron.flow` — they live in +[`dagron.reactive`](#dagronreactive). + +## `dagron.Effect` + +```python +class Effect(Enum): + PURE = "pure" + READ = "read" + WRITE = "write" + NETWORK = "network" + NONDETERMINISTIC = "nondeterministic" + + is_cacheable: bool + is_deterministic: bool + is_isolated: bool +``` + +```python +def effects_of(dag: DAG) -> dict[str, Effect] +``` + +Read every node's effect tag from a DAG built by `@dagron.flow`. Returns +`Effect.PURE` for nodes without a tag. + +`DAGExecutor(enforce_effect_isolation=True)` reads these tags and runs +`NONDETERMINISTIC` nodes through a shared lock, so they don't overlap. + +## `dagron.reactive` + +```python +import dagron.reactive as dr + +s = dr.signal(value) # → Signal[T] +c = dr.computed(lambda: ...) # → Computed[T] +w = dr.watch(lambda: ...) # → Watcher (also fires once now) +with dr.batch(): ... # glitch-free coalesced updates +``` + +| Member | API | +|---|---| +| `Signal[T]` | `__call__() -> T`, `set(v: T)`, `peek() -> T` (no tracking). Equality-checked sets are no-ops. | +| `Computed[T]` | `__call__() -> T`, `peek() -> T`. Lazy memoised. | +| `Watcher` | Auto-fires when any tracked dep changes. `.dispose()` to detach. | +| `batch()` | Context manager. Defers Watcher fires until the outermost block ends. Multiple signal mutations coalesce into one fire. | +| `signal()` / `computed()` / `watch()` | Convenience factories. | + +Track via thread-local; reads inside a `Computed` body or `Watcher` body +register the source as a dep. Observers are held by `weakref.WeakSet` +so dropped derived nodes don't leak. + +## `dagron.contentcache` + +```python +from dagron.contentcache import ContentCache, default_cache_dir + +cache = ContentCache(cache_dir=None, hasher=None) +cache.compute_or_cached(fn, args=(), kwargs=None, effect=None) -> tuple[Any, bool] +cache.get(fingerprint: bytes) -> tuple[Any, bool] +cache.put(fingerprint: bytes, value: Any) -> None +cache.has(fingerprint: bytes) -> bool +cache.delete(fingerprint: bytes) -> None +cache.clear() -> None +cache.hash(value: Any) -> bytes # delegates to the configured Hasher +``` + +| Helper | Purpose | +|---|---| +| `default_cache_dir()` | `$DAGRON_CACHE_DIR` or `~/.cache/dagron/cas`. | +| `default_hash(value)` | pickle + blake2b 256-bit. Falls back to `repr()` for unpickleable inputs. | +| `numpy_hash(value)` | `array.tobytes()` for numpy arrays; falls back to `default_hash`. | +| `fingerprint_function(fn)` | Hashes `co_code`, `co_consts`, `co_freevars`, qualname, Python major.minor. | +| `fingerprint_node(fn, effect, input_fingerprints)` | Composite fingerprint used as the cache key. | + +`compute_or_cached` is **effect-aware**: `WRITE` / `NETWORK` / +`NONDETERMINISTIC` skip the cache entirely; `PURE` and `READ` go through +it. + +Storage layout: `///.cache` where the +fingerprint hex is ``. POSIX `rename(2)` makes writes +atomic. The filesystem itself is the index — independent processes +share intermediates with no coordination. + +## `dagron.trace` + +```python +from dagron.trace import TraceWriter, TraceReader, TraceRecord, ReplayedNode, replay + +writer = TraceWriter(path, cas=None) +writer.record(name, *, value=None, effect=None, duration_ns=0, + error=None, metadata=None, timestamp=None) -> TraceRecord +writer.flush() +writer.close() # also via context manager + +reader = TraceReader(path, cas=None) +reader.records() -> Iterator[TraceRecord] +reader.records_until(t, *, inclusive=True) -> Iterator[TraceRecord] +reader.timeline() -> list[tuple[float, str]] +reader.fetch(rec) -> Any # resolves payload via the CAS + +replay(source, *, at=None, cas=None) -> dict[str, ReplayedNode] +``` + +`ReplayedNode` carries `name`, `timestamp`, `value`, `effect`, +`replayable`, `duration_ns`, `error`, and a derived `has_value` +property. `replayable` mirrors `effect.is_deterministic`: pure / READ +nodes can be reproduced; impure nodes' values are *what that run +produced*, not what a fresh run would produce. + +| Helper | Purpose | +|---|---| +| `default_trace_dir()` | `$DAGRON_TRACE_DIR` or `~/.cache/dagron/traces`. | +| `new_run_id()` | 16-hex-char random id for naming a run's log file. | +| `list_runs(trace_dir=None)` | Every `*.jsonl` under `trace_dir`. | + +Logs are append-only JSONL. Payloads live in the bound `ContentCache`, +deduplicated across runs that produced the same value. + +## `dagron.stubgen` + +```python +from dagron.stubgen import generate_stub + +generate_stub( + dag, + *, + type_hints: dict[str, type | str] | None = None, + tasks: dict[str, Callable] | None = None, + name: str = "TypedExecutionResult", +) -> str +``` + +Emits `.pyi`-formatted source declaring a class with `Literal["nodename"]` +overloads typed by inferred (or explicitly provided) return types. +Drop the result into a stub file alongside your code so even string-keyed +`result["nodename"]` lookups become statically typed. diff --git a/docs/pages/guide/typed-and-reactive.mdx b/docs/pages/guide/typed-and-reactive.mdx new file mode 100644 index 0000000..6ac9a10 --- /dev/null +++ b/docs/pages/guide/typed-and-reactive.mdx @@ -0,0 +1,298 @@ +--- +sidebar_position: 7 +title: Typed Handles & Reactive Engine +description: NodeRef typed handles, @dagron.flow compose API, generic FlowFuture / NodeResult, effect tags, the reactive Signal/Computed/Watcher engine, the cross-process content cache, and time-travel replay. +--- + +# Typed Handles & Reactive Engine + +dagron ships seven coordinated additions that move beyond stringly-typed +node addressing and add four headline differentiators no other Python DAG +library combines: typed handles, a Tawazi-style flow API, fine-grained +reactive recomputation, content-addressed cross-process caching, and +time-travel replay. Existing string-based code keeps working — every new +feature is opt-in. + +## 1. `NodeRef` — typed node handles + +`dag.add_node()` returns a stable `NodeRef`. Every public method that +takes a node identifier accepts both `str` and `NodeRef`, so existing +code keeps working. + +```python +from dagron import DAG, NodeRef + +dag = DAG() +extract = dag.add_node("extract") # NodeRef +transform = dag.add_node("transform") # NodeRef + +dag.add_edge(extract, transform) # NodeRef → NodeRef +dag.add_edge("extract", transform) # str → NodeRef +dag.add_edge(extract, "transform") # NodeRef → str + +isinstance(extract, NodeRef) # True +extract.name # "extract" +extract.epoch # 0 +``` + +NodeRefs survive unrelated mutations (adding other nodes / edges) and +detect *remove-then-readd*: removing `"extract"` and re-adding a node +with the same name produces a NodeRef with a different epoch, so the old +reference correctly raises `StaleNodeRefError`. + +```python +import pytest +from dagron import StaleNodeRefError + +dag.remove_node(extract) +new_extract = dag.add_node("extract") # fresh epoch +with pytest.raises(StaleNodeRefError): + dag.has_edge(extract, transform) # the old extract is stale +``` + +## 2. `@dagron.flow` — Pythonic compose API + +Build a DAG by writing a regular Python function. Each `@task` call +inside a `@flow` body records a node; passing one task's return value to +another wires the edge. No string IDs, no fluent builder — just Python. + +```python +import dagron + +@dagron.task +def fetch() -> list[int]: + return [1, 2, 3, 4] + +@dagron.task +def total(rows: list[int]) -> int: + return sum(rows) + +@dagron.task +def label(value: int) -> str: + return f"Total = {value}" + +@dagron.flow +def pipeline(): + return label(total(fetch())) + +dag = pipeline.dag() # the underlying DAG, for analysis +result = pipeline() # builds + runs → ExecutionResult +result["label"].result # "Total = 10" +``` + +The same `@task` decorator is compatible with the legacy parameter-name +inference of `Pipeline`, so a single set of tasks can power both APIs. +Inside a `@flow` context, calling `transform(raw)` returns a +`FlowFuture[T]` placeholder; outside one, it executes normally. + +## 3. Generic typing & `dagron.stubgen` + +`FlowFuture[T]` and `NodeResult[T]` carry the wrapped task's return type +all the way through: + +```python +from dagron import FlowFuture +from dagron.execution._types import NodeResult + +@dagron.task +def fetch() -> list[int]: ... + +@dagron.task +def total(rows: list[int]) -> int: ... + +@dagron.flow +def pipeline(): + raw = fetch() # type-checks as list[int] + return total(raw) # type-checks as int + +result = pipeline() +result[fetch].result # NodeResult[list[int]] → list[int] +result[total].result # NodeResult[int] → int +``` + +For string-keyed lookups, generate a stub: + +```python +from dagron.stubgen import generate_stub + +stub = generate_stub( + pipeline.dag(), + tasks={"fetch": fetch, "total": total}, + name="PipelineResult", +) +print(stub) +# class PipelineResult: +# @overload +# def __getitem__(self, key: Literal['fetch']) -> NodeResult[list[int]]: ... +# @overload +# def __getitem__(self, key: Literal['total']) -> NodeResult[int]: ... +``` + +Save the output as a `.pyi` file alongside your code; `mypy` will type +`result["fetch"]` as `NodeResult[list[int]]` even though `result` itself +is just `ExecutionResult`. + +## 4. Effect tags + +Tag every `@task` with its side-effect class — the engine uses these +for parallelism gating today and for cache / replay semantics in the +features below. + +```python +from dagron import Effect + +@dagron.task # defaults to Effect.PURE +def add(a: int, b: int) -> int: return a + b + +@dagron.task(effect=Effect.NETWORK) +def fetch_user(uid: int) -> dict: ... + +@dagron.task(effect=Effect.NONDETERMINISTIC) +def now() -> float: + import time; return time.time() +``` + +Properties: + +| Effect | `is_cacheable` | `is_deterministic` | `is_isolated` | +|--------|-----:|-----:|-----:| +| `PURE` | ✅ | ✅ | ❌ | +| `READ` | ✅ | ✅ | ❌ | +| `WRITE` | ❌ | ❌ | ❌ | +| `NETWORK` | ❌ | ❌ | ❌ | +| `NONDETERMINISTIC` | ❌ | ❌ | ✅ | + +`@flow` mirrors each task's effect onto its DAG node's metadata; read +back with `dagron.effects_of(dag)`. An AST-scan heuristic emits a +`UserWarning` when a `PURE` task appears to call impure functions +(`time.time`, `random.*`, `os.*`, `requests.*`, …). + +`DAGExecutor(enforce_effect_isolation=True)` serializes +`NONDETERMINISTIC` tasks while letting other effects parallelize freely. + +## 5. Reactive engine — `Signal` / `Computed` / `Watcher` + +`dagron.reactive` provides Solid.js / Jane-Street-`Incremental` style +primitives where the dependency graph is *implicit*: building a +`Computed` records its read dependencies as a side-effect of evaluating +the function. + +```python +import dagron.reactive as dr + +a = dr.signal(1) +b = dr.signal(2) +s = dr.computed(lambda: a() + b()) +p = dr.computed(lambda: s() * 10) + +p() # 30 — initial compute +a.set(5) # invalidates s and p; b untouched +p() # 70 — recomputes only s and p + +@dr.watch +def watch_p(): + print("p =", p()) + +with dr.batch(): # glitch-free + a.set(0) + b.set(0) +# watch_p fires exactly once after the batch, sees p == 0 +``` + +**Headline benchmark**: in a graph of 10,000 derived nodes off one root +signal, mutating the root and reading just one branch takes ~10 µs — +the engine recomputes only the read path, skipping the other 9999 +invalidated-but-unread branches. This is the differentiator no other +Python DAG library delivers. + +This module is distinct from the existing +`dagron.execution.reactive.ReactiveDAG`, which wraps a *pre-built* +`dagron.DAG` and exposes a push-based `subscribe()` / `set_input()` API. +Use whichever fits your shape: the reactive primitives for fresh +dependency graphs you build in code; `ReactiveDAG` to layer reactivity +over a DAG you already have. + +## 6. Cross-process content-addressed cache + +`dagron.contentcache` is Nix-flake-style: the cache is keyed by content +hash, the filesystem path *is* the index, and there's no +`index.json` to keep in sync. Independent processes share intermediates +transparently — a build on one CI worker hits the cache on another the +moment they compute the same fingerprint. + +```python +from dagron import Effect +from dagron.contentcache import ContentCache + +cache = ContentCache() # ~/.cache/dagron/cas + +def expensive(x: int) -> int: + return x * 1000 + +# First call: miss, computes, writes payload to CAS. +val, hit = cache.compute_or_cached(expensive, args=(42,), effect=Effect.PURE) +# In another process / another day: +val, hit = cache.compute_or_cached(expensive, args=(42,), effect=Effect.PURE) +# `hit` is True; the payload deserialized straight from disk. +``` + +Effect-aware: `WRITE` / `NETWORK` / `NONDETERMINISTIC` tasks bypass the +cache entirely (their results aren't reproducible). Pluggable via the +`Hasher` protocol — `default_hash` (pickle + blake2b) handles most +Python types; `numpy_hash` uses `array.tobytes()` for byte equality; +write your own for polars frames or any tobyte-friendly type. Honors +`$DAGRON_CACHE_DIR`. + +## 7. Time-travel replay + +`dagron.trace` writes an append-only JSONL log of node executions; each +record references a payload stored by fingerprint in the +`ContentCache`, so identical values across runs deduplicate +automatically. `replay(at=t)` walks the log up to time `t` and +reconstructs the per-node state. + +```python +from dagron.contentcache import ContentCache +from dagron.trace import TraceWriter, replay + +cas = ContentCache() +log_path = "run-2026-05.jsonl" + +with TraceWriter(log_path, cas=cas) as w: + w.record("fetch", value=[1, 2, 3], effect=Effect.PURE, timestamp=t0) + w.record("transform", value=6, effect=Effect.PURE, timestamp=t0 + 1) + w.record("publish", value="ok", effect=Effect.NETWORK, timestamp=t0 + 2) + +# Days later, in another process: +state = replay(log_path, at=t0 + 1.5, cas=cas) +state["fetch"].value # [1, 2, 3] — byte-identical to the original run +state["transform"].value # 6 +"publish" in state # False — cutoff was before publish ran + +state = replay(log_path, cas=cas) +state["publish"].value # "ok" — surfaced from the log +state["publish"].replayable # False — NETWORK is non-deterministic +``` + +Pure / READ nodes replay byte-identically. Impure nodes +(`WRITE`/`NETWORK`/`NONDETERMINISTIC`) are flagged `replayable=False` +but their *logged* values are still exposed, so you can audit what the +run actually produced. Honors `$DAGRON_TRACE_DIR`. + +## How they fit together + +The seven additions are designed to compose: + +* **NodeRef** is the substrate — every later API references nodes by + the typed handle. +* **`@flow`** records call structure into a `dagron.DAG`, mirroring each + task's **effect** onto node metadata. +* **`stubgen`** turns the `@flow`-built DAG into a typed lookup stub. +* **Effects** drive parallelism isolation, cache opt-in, and replay + reproducibility flags — one tag, three downstream behaviours. +* **Reactive** is the "live" face of computation; **content cache** is + its persistent face; **replay** is its retrospective face. + +You can adopt any subset independently. The string-based DAG API, +`Pipeline`, and the existing `ReactiveDAG` / `ContentAddressableCache` +classes remain unchanged. diff --git a/docs/sidebars.ts b/docs/sidebars.ts index 06e6da2..9905bc4 100644 --- a/docs/sidebars.ts +++ b/docs/sidebars.ts @@ -11,6 +11,7 @@ const sidebars: SidebarsConfig = { link: { type: 'generated-index', title: 'User Guide', slug: '/guide' }, items: [ 'guide/getting-started', + 'guide/typed-and-reactive', 'guide/benchmarks', 'guide/cookbook', { @@ -124,6 +125,7 @@ const sidebars: SidebarsConfig = { 'api/utilities/display', 'api/utilities/integration', 'api/utilities/plugins', + 'api/utilities/modern-api', ], }, ], From bd0444abb5cc3183081fa3f86578b22d5dd730a9 Mon Sep 17 00:00:00 2001 From: Pratyush Sharma <56130065+pratyush618@users.noreply.github.com> Date: Sun, 10 May 2026 02:15:21 +0530 Subject: [PATCH 08/14] chore(docs): scaffold fumadocs replacement at docs-next/ MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Side-by-side migration target — old docs/ stays live until verified. Built on Next.js 16 + Fumadocs 16 + Tailwind v4, mirrors taskito/docs/ layout (src/app/, src/components/{ui,...}, src/lib/). 54 MDX files migrated via scripts/migrate.mjs (drops sidebar_position+slug, removes @site imports, converts :::tip to ). Reusable components: ui primitives (Button, CodePanel, SectionHeader), MDX-globals (DagDiagram, StatusBadge, EffectBadge, FeatureCard/Grid, ApiSignature, ParamTable), client-side themed Mermaid. meta.json per directory preserves the old sidebar order. Build passes; pnpm lint + types:check clean. Deploy still GitHub Pages /dagron via DOCS_BASE_PATH env. --- docs-next/.gitignore | 26 + docs-next/README.md | 51 + docs-next/biome.json | 42 + .../content/docs/api/analysis/analysis.mdx | 618 ++ .../content/docs/api/analysis/contracts.mdx | 315 + .../content/docs/api/analysis/dataframe.mdx | 374 ++ docs-next/content/docs/api/analysis/meta.json | 5 + docs-next/content/docs/api/core/builder.mdx | 267 + docs-next/content/docs/api/core/core.mdx | 863 +++ docs-next/content/docs/api/core/errors.mdx | 274 + docs-next/content/docs/api/core/meta.json | 5 + .../content/docs/api/execution/caching.mdx | 367 ++ .../content/docs/api/execution/checkpoint.mdx | 256 + .../content/docs/api/execution/conditions.mdx | 278 + .../docs/api/execution/distributed.mdx | 421 ++ .../content/docs/api/execution/dynamic.mdx | 262 + .../content/docs/api/execution/execution.mdx | 345 + .../content/docs/api/execution/gates.mdx | 348 + .../docs/api/execution/incremental.mdx | 256 + .../content/docs/api/execution/meta.json | 17 + .../content/docs/api/execution/pipeline.mdx | 330 + .../content/docs/api/execution/reactive.mdx | 376 ++ .../content/docs/api/execution/resources.mdx | 371 ++ docs-next/content/docs/api/meta.json | 5 + .../content/docs/api/observability/meta.json | 5 + .../docs/api/observability/profiling.mdx | 268 + .../docs/api/observability/tracing.mdx | 324 + .../content/docs/api/utilities/compose.mdx | 247 + .../content/docs/api/utilities/display.mdx | 251 + .../docs/api/utilities/integration.mdx | 236 + .../content/docs/api/utilities/meta.json | 13 + .../content/docs/api/utilities/modern-api.mdx | 198 + .../content/docs/api/utilities/plugins.mdx | 502 ++ .../content/docs/api/utilities/template.mdx | 363 ++ .../content/docs/api/utilities/versioning.mdx | 408 ++ .../content/docs/guide/advanced/contracts.mdx | 384 ++ .../docs/guide/advanced/dataframes.mdx | 475 ++ .../content/docs/guide/advanced/meta.json | 11 + .../docs/guide/advanced/plugins-hooks.mdx | 532 ++ .../content/docs/guide/advanced/templates.mdx | 452 ++ .../docs/guide/advanced/versioning.mdx | 437 ++ docs-next/content/docs/guide/architecture.mdx | 158 + docs-next/content/docs/guide/benchmarks.mdx | 166 + docs-next/content/docs/guide/cookbook.mdx | 340 + .../guide/core-concepts/building-dags.mdx | 516 ++ .../guide/core-concepts/executing-tasks.mdx | 584 ++ .../guide/core-concepts/inspecting-graphs.mdx | 496 ++ .../docs/guide/core-concepts/meta.json | 11 + .../guide/core-concepts/serialization.mdx | 481 ++ .../docs/guide/core-concepts/transforms.mdx | 527 ++ .../execution-strategies/approval-gates.mdx | 462 ++ .../guide/execution-strategies/caching.mdx | 457 ++ .../execution-strategies/checkpointing.mdx | 473 ++ .../execution-strategies/conditional.mdx | 452 ++ .../execution-strategies/distributed.mdx | 556 ++ .../execution-strategies/dynamic-dags.mdx | 471 ++ .../execution-strategies/incremental.mdx | 393 ++ .../docs/guide/execution-strategies/meta.json | 14 + .../resource-scheduling.mdx | 480 ++ .../content/docs/guide/getting-started.mdx | 389 ++ docs-next/content/docs/guide/meta.json | 16 + .../guide/observability/error-handling.mdx | 603 ++ .../docs/guide/observability/meta.json | 5 + .../guide/observability/tracing-profiling.mdx | 446 ++ .../guide/observability/visualization.mdx | 439 ++ .../content/docs/guide/typed-and-reactive.mdx | 297 + docs-next/content/docs/guide/why-dagron.mdx | 52 + docs-next/content/docs/index.mdx | 184 + docs-next/content/docs/meta.json | 4 + docs-next/next.config.mjs | 23 + docs-next/package.json | 40 + docs-next/pnpm-lock.yaml | 5659 +++++++++++++++++ docs-next/postcss.config.mjs | 7 + docs-next/public/.nojekyll | 0 docs-next/public/img/favicon.ico | Bin 0 -> 3626 bytes docs-next/public/img/logo.svg | 1 + docs-next/scripts/migrate.mjs | 135 + docs-next/source.config.ts | 23 + docs-next/src/app/(docs)/[...slug]/page.tsx | 67 + docs-next/src/app/(docs)/layout.tsx | 11 + .../src/app/(home)/_sections/features.tsx | 146 + docs-next/src/app/(home)/_sections/hero.tsx | 64 + docs-next/src/app/(home)/_sections/index.ts | 2 + docs-next/src/app/(home)/layout.tsx | 6 + docs-next/src/app/(home)/page.tsx | 10 + docs-next/src/app/api/search/route.ts | 9 + docs-next/src/app/global.css | 59 + docs-next/src/app/layout.tsx | 40 + docs-next/src/app/llms-full.txt/route.ts | 10 + docs-next/src/app/llms.txt/route.ts | 8 + docs-next/src/components/api-signature.tsx | 48 + docs-next/src/components/dag-diagram.tsx | 26 + docs-next/src/components/diagram-carousel.tsx | 149 + docs-next/src/components/effect-badge.tsx | 43 + docs-next/src/components/feature-card.tsx | 86 + docs-next/src/components/mdx.tsx | 46 + docs-next/src/components/mermaid.tsx | 174 + docs-next/src/components/param-table.tsx | 64 + docs-next/src/components/provider.tsx | 8 + docs-next/src/components/search.tsx | 54 + docs-next/src/components/status-badge.tsx | 46 + docs-next/src/components/ui/button.tsx | 56 + docs-next/src/components/ui/code-panel.tsx | 83 + docs-next/src/components/ui/index.ts | 10 + .../src/components/ui/section-header.tsx | 32 + docs-next/tsconfig.json | 35 + 106 files changed, 28330 insertions(+) create mode 100644 docs-next/.gitignore create mode 100644 docs-next/README.md create mode 100644 docs-next/biome.json create mode 100644 docs-next/content/docs/api/analysis/analysis.mdx create mode 100644 docs-next/content/docs/api/analysis/contracts.mdx create mode 100644 docs-next/content/docs/api/analysis/dataframe.mdx create mode 100644 docs-next/content/docs/api/analysis/meta.json create mode 100644 docs-next/content/docs/api/core/builder.mdx create mode 100644 docs-next/content/docs/api/core/core.mdx create mode 100644 docs-next/content/docs/api/core/errors.mdx create mode 100644 docs-next/content/docs/api/core/meta.json create mode 100644 docs-next/content/docs/api/execution/caching.mdx create mode 100644 docs-next/content/docs/api/execution/checkpoint.mdx create mode 100644 docs-next/content/docs/api/execution/conditions.mdx create mode 100644 docs-next/content/docs/api/execution/distributed.mdx create mode 100644 docs-next/content/docs/api/execution/dynamic.mdx create mode 100644 docs-next/content/docs/api/execution/execution.mdx create mode 100644 docs-next/content/docs/api/execution/gates.mdx create mode 100644 docs-next/content/docs/api/execution/incremental.mdx create mode 100644 docs-next/content/docs/api/execution/meta.json create mode 100644 docs-next/content/docs/api/execution/pipeline.mdx create mode 100644 docs-next/content/docs/api/execution/reactive.mdx create mode 100644 docs-next/content/docs/api/execution/resources.mdx create mode 100644 docs-next/content/docs/api/meta.json create mode 100644 docs-next/content/docs/api/observability/meta.json create mode 100644 docs-next/content/docs/api/observability/profiling.mdx create mode 100644 docs-next/content/docs/api/observability/tracing.mdx create mode 100644 docs-next/content/docs/api/utilities/compose.mdx create mode 100644 docs-next/content/docs/api/utilities/display.mdx create mode 100644 docs-next/content/docs/api/utilities/integration.mdx create mode 100644 docs-next/content/docs/api/utilities/meta.json create mode 100644 docs-next/content/docs/api/utilities/modern-api.mdx create mode 100644 docs-next/content/docs/api/utilities/plugins.mdx create mode 100644 docs-next/content/docs/api/utilities/template.mdx create mode 100644 docs-next/content/docs/api/utilities/versioning.mdx create mode 100644 docs-next/content/docs/guide/advanced/contracts.mdx create mode 100644 docs-next/content/docs/guide/advanced/dataframes.mdx create mode 100644 docs-next/content/docs/guide/advanced/meta.json create mode 100644 docs-next/content/docs/guide/advanced/plugins-hooks.mdx create mode 100644 docs-next/content/docs/guide/advanced/templates.mdx create mode 100644 docs-next/content/docs/guide/advanced/versioning.mdx create mode 100644 docs-next/content/docs/guide/architecture.mdx create mode 100644 docs-next/content/docs/guide/benchmarks.mdx create mode 100644 docs-next/content/docs/guide/cookbook.mdx create mode 100644 docs-next/content/docs/guide/core-concepts/building-dags.mdx create mode 100644 docs-next/content/docs/guide/core-concepts/executing-tasks.mdx create mode 100644 docs-next/content/docs/guide/core-concepts/inspecting-graphs.mdx create mode 100644 docs-next/content/docs/guide/core-concepts/meta.json create mode 100644 docs-next/content/docs/guide/core-concepts/serialization.mdx create mode 100644 docs-next/content/docs/guide/core-concepts/transforms.mdx create mode 100644 docs-next/content/docs/guide/execution-strategies/approval-gates.mdx create mode 100644 docs-next/content/docs/guide/execution-strategies/caching.mdx create mode 100644 docs-next/content/docs/guide/execution-strategies/checkpointing.mdx create mode 100644 docs-next/content/docs/guide/execution-strategies/conditional.mdx create mode 100644 docs-next/content/docs/guide/execution-strategies/distributed.mdx create mode 100644 docs-next/content/docs/guide/execution-strategies/dynamic-dags.mdx create mode 100644 docs-next/content/docs/guide/execution-strategies/incremental.mdx create mode 100644 docs-next/content/docs/guide/execution-strategies/meta.json create mode 100644 docs-next/content/docs/guide/execution-strategies/resource-scheduling.mdx create mode 100644 docs-next/content/docs/guide/getting-started.mdx create mode 100644 docs-next/content/docs/guide/meta.json create mode 100644 docs-next/content/docs/guide/observability/error-handling.mdx create mode 100644 docs-next/content/docs/guide/observability/meta.json create mode 100644 docs-next/content/docs/guide/observability/tracing-profiling.mdx create mode 100644 docs-next/content/docs/guide/observability/visualization.mdx create mode 100644 docs-next/content/docs/guide/typed-and-reactive.mdx create mode 100644 docs-next/content/docs/guide/why-dagron.mdx create mode 100644 docs-next/content/docs/index.mdx create mode 100644 docs-next/content/docs/meta.json create mode 100644 docs-next/next.config.mjs create mode 100644 docs-next/package.json create mode 100644 docs-next/pnpm-lock.yaml create mode 100644 docs-next/postcss.config.mjs create mode 100644 docs-next/public/.nojekyll create mode 100644 docs-next/public/img/favicon.ico create mode 100644 docs-next/public/img/logo.svg create mode 100644 docs-next/scripts/migrate.mjs create mode 100644 docs-next/source.config.ts create mode 100644 docs-next/src/app/(docs)/[...slug]/page.tsx create mode 100644 docs-next/src/app/(docs)/layout.tsx create mode 100644 docs-next/src/app/(home)/_sections/features.tsx create mode 100644 docs-next/src/app/(home)/_sections/hero.tsx create mode 100644 docs-next/src/app/(home)/_sections/index.ts create mode 100644 docs-next/src/app/(home)/layout.tsx create mode 100644 docs-next/src/app/(home)/page.tsx create mode 100644 docs-next/src/app/api/search/route.ts create mode 100644 docs-next/src/app/global.css create mode 100644 docs-next/src/app/layout.tsx create mode 100644 docs-next/src/app/llms-full.txt/route.ts create mode 100644 docs-next/src/app/llms.txt/route.ts create mode 100644 docs-next/src/components/api-signature.tsx create mode 100644 docs-next/src/components/dag-diagram.tsx create mode 100644 docs-next/src/components/diagram-carousel.tsx create mode 100644 docs-next/src/components/effect-badge.tsx create mode 100644 docs-next/src/components/feature-card.tsx create mode 100644 docs-next/src/components/mdx.tsx create mode 100644 docs-next/src/components/mermaid.tsx create mode 100644 docs-next/src/components/param-table.tsx create mode 100644 docs-next/src/components/provider.tsx create mode 100644 docs-next/src/components/search.tsx create mode 100644 docs-next/src/components/status-badge.tsx create mode 100644 docs-next/src/components/ui/button.tsx create mode 100644 docs-next/src/components/ui/code-panel.tsx create mode 100644 docs-next/src/components/ui/index.ts create mode 100644 docs-next/src/components/ui/section-header.tsx create mode 100644 docs-next/tsconfig.json diff --git a/docs-next/.gitignore b/docs-next/.gitignore new file mode 100644 index 0000000..9e429e4 --- /dev/null +++ b/docs-next/.gitignore @@ -0,0 +1,26 @@ +# deps +/node_modules + +# generated content +.source + +# test & build +/coverage +/.next/ +/out/ +/build +*.tsbuildinfo + +# misc +.DS_Store +*.pem +/.pnp +.pnp.js +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +# others +.env*.local +.vercel +next-env.d.ts \ No newline at end of file diff --git a/docs-next/README.md b/docs-next/README.md new file mode 100644 index 0000000..cc081d2 --- /dev/null +++ b/docs-next/README.md @@ -0,0 +1,51 @@ +# dagron docs (Fumadocs) + +Side-by-side replacement for `../docs/` (Docusaurus). Once verified, this +directory will be swapped in as `docs/`. + +## Develop + +```bash +pnpm install +pnpm dev # http://localhost:3000 +``` + +## Build for GitHub Pages + +```bash +DOCS_BASE_PATH=/dagron pnpm build +npx serve out/ # then visit http://localhost:3000/dagron/ +``` + +Local builds without `DOCS_BASE_PATH` serve cleanly from `/`. + +## Lint & types + +```bash +pnpm lint # biome +pnpm types:check # fumadocs-mdx + next typegen + tsc --noEmit +``` + +## Layout + +``` +src/ +├── app/ # Next.js App Router +│ ├── (home)/ # marketing landing +│ ├── (docs)/ # docs sidebar + page renderer +│ └── api/ # Orama search endpoint, llms.txt routes +├── components/ +│ ├── ui/ # generic primitives (Button, CodePanel, SectionHeader) +│ ├── mdx.tsx # global MDX component map +│ ├── mermaid.tsx # client-side mermaid with theme awareness +│ └── ... # dagron-specific (DagDiagram, StatusBadge, FeatureCard, …) +└── lib/ + ├── source.ts # Fumadocs source loader + ├── shared.ts # appName, gitConfig, route constants + └── layout.shared.tsx # nav + sidebar config + +content/docs/ # 54 MDX files, organised by guide/ + api/ +``` + +Components are registered globally in `src/components/mdx.tsx`, so MDX +authors don't need to write `import` lines. diff --git a/docs-next/biome.json b/docs-next/biome.json new file mode 100644 index 0000000..87cba3b --- /dev/null +++ b/docs-next/biome.json @@ -0,0 +1,42 @@ +{ + "$schema": "https://biomejs.dev/schemas/2.4.14/schema.json", + "vcs": { + "enabled": true, + "clientKind": "git", + "useIgnoreFile": true + }, + "files": { + "ignoreUnknown": true, + "includes": [ + "**", + "!node_modules", + "!.next", + "!dist", + "!build", + "!.source", + "!src/app/global.css" + ] + }, + "formatter": { + "enabled": true, + "indentStyle": "space", + "indentWidth": 2 + }, + "linter": { + "enabled": true, + "rules": { + "recommended": true + }, + "domains": { + "next": "recommended", + "react": "recommended" + } + }, + "assist": { + "actions": { + "source": { + "organizeImports": "on" + } + } + } +} diff --git a/docs-next/content/docs/api/analysis/analysis.mdx b/docs-next/content/docs/api/analysis/analysis.mdx new file mode 100644 index 0000000..c13d7d9 --- /dev/null +++ b/docs-next/content/docs/api/analysis/analysis.mdx @@ -0,0 +1,618 @@ +--- +title: Analysis +description: API reference for dagron's graph analysis toolkit -- explain, what-if, lineage tracking, linting, schema validation, and query DSL. +--- + +# Analysis + +The analysis module provides tools for understanding, validating, and querying +DAG structure. It includes node diagnostics (`explain`), hypothetical mutation +analysis (`what_if`), data lineage tracking, structural linting, schema +validation, and a mini query DSL. + +For a guided introduction, see [Inspecting Graphs](/guide/core-concepts/inspecting-graphs). + +```python +from dagron.analysis import ( + explain, what_if, track_lineage, lint, query, + NodeExplanation, WhatIfResult, LineageReport, + LintReport, DAGSchema, +) +``` + +--- + +## explain + + NodeExplanation`} /> + +Generate a structured diagnostic for a single node in the DAG. The +explanation includes depth, critical path membership, a bottleneck score, +dominator analysis, and dependency information. + + + +**Returns:** `NodeExplanation` -- Frozen dataclass with all diagnostic information. + +```python +from dagron.analysis import explain + +info = explain(dag, "transform") +print(info.summary()) +# Node: transform +# Depth from root: 1 +# On critical path: True +# Bottleneck score: 0.50 +# In-degree: 1, Out-degree: 1 +# Ancestors: 1, Descendants: 1 +# Root: False, Leaf: False +# Blocked by: extract +# Blocks: load +``` + +--- + +## NodeExplanation + + + +Structured diagnostic for a single node in the DAG. + + + +#### NodeExplanation.summary + + str`} /> + +**Returns:** `str` -- Human-readable multi-line summary of the node diagnostic. + +--- + +## what_if + + WhatIfResult`} /> + +Analyze the effect of hypothetical mutations without modifying the original +DAG. The function creates an internal snapshot, applies the proposed changes, +and reports the structural impact. + + + +**Returns:** `WhatIfResult` -- Frozen dataclass describing the impact of the proposed changes. + +```python +from dagron.analysis import what_if + +# What happens if we remove the "validate" node? +result = what_if(dag, remove_nodes=["validate"]) +print(result.summary()) + +# Would this edge create a cycle? +result = what_if(dag, add_edges=[("load", "extract")]) +if result.would_create_cycle: + print(f"Cycle detected: {' -> '.join(result.cycle_path)}") +``` + +--- + +## WhatIfResult + + + +Result of a hypothetical graph mutation analysis. + + + +#### WhatIfResult.summary + + str`} /> + +**Returns:** `str` -- Human-readable summary. Reports the cycle path if one would be created, otherwise shows structural statistics. + +--- + +## track_lineage + + LineageReport`} /> + +Convenience function to create a `LineageReport` for post-execution data +lineage analysis. Equivalent to `LineageReport(dag, execution_result)`. + + + +**Returns:** `LineageReport` -- A lineage analysis report. + +```python +from dagron.analysis import track_lineage + +report = track_lineage(dag, result) +print(report.summary()) +``` + +--- + +## LineageReport + + None: ...`} /> + +Post-execution lineage analysis over a DAG and its execution result. Tracks +which upstream nodes contributed to each output, detects broken lineage +(upstream failures that did not prevent downstream execution), and finds +data flow paths between any two nodes. + + + +### Methods + +--- + +#### LineageReport.lineage + + LineageRecord`} /> + +Compute lineage for a single node, filtered to actually-completed upstream nodes. + + + +**Returns:** `LineageRecord` -- The node's upstream provenance. + +**Raises:** `KeyError` -- If the node is not in the DAG. + +```python +record = report.lineage("load") +print(f"Direct inputs: {record.direct_inputs}") +print(f"Full upstream chain: {record.upstream_chain}") +print(f"Depth: {record.depth}") +``` + +--- + +#### LineageReport.impact + + ImpactRecord`} /> + +Compute downstream impact of a single node, restricted to nodes that +actually completed. + + + +**Returns:** `ImpactRecord` -- Downstream impact analysis. + +**Raises:** `KeyError` -- If the node is not in the DAG. + +```python +impact = report.impact("extract") +print(f"Directly affects: {impact.directly_affects}") +print(f"Transitively affects: {impact.transitively_affects}") +print(f"Affected leaves: {impact.affected_leaves}") +``` + +--- + +#### LineageReport.data_flow_path + + list[str] | None`} /> + +Find the shortest path from `source` to `target` where all intermediate +nodes completed successfully. + + + +**Returns:** `list[str] | None` -- Ordered list of node names forming the path, or `None` if no completed path exists. + +--- + +#### LineageReport.broken_lineage + + list[tuple[str, str]]`} /> + +Find edges where the upstream node failed but the downstream node still +completed. This can happen when `fail_fast=False`. + +**Returns:** `list[tuple[str, str]]` -- List of `(upstream, downstream)` tuples. + +--- + +#### LineageReport.full_lineage + + dict[str, LineageRecord]`} /> + +Compute lineage for all completed nodes. + +**Returns:** `dict[str, LineageRecord]` -- Mapping of node name to `LineageRecord` for every successfully completed node. + +--- + +#### LineageReport.summary + + str`} /> + +**Returns:** `str` -- Human-readable summary including total nodes, completed count, source/leaf nodes, and broken lineage edges. + +--- + +## LineageRecord + + + + + +--- + +## ImpactRecord + + + + + +--- + +## lint + + LintReport`} /> + +Analyze a DAG for structural anti-patterns. Checks for high fan-in/fan-out +nodes, disconnected components, redundant transitive edges, excessive depth, +and isolated nodes. + + + +**Returns:** `LintReport` -- Report containing all detected warnings. + +```python +from dagron.analysis import lint + +report = lint(dag, max_fan_in=5, max_fan_out=5) +if not report.ok: + print(report.summary()) +else: + print("DAG passes all lint checks") +``` + +### Lint codes + +| Code | Severity | Description | +|------|----------|-------------| +| `EMPTY_GRAPH` | INFO | DAG has no nodes. | +| `HIGH_FAN_IN` | WARNING | Nodes with in-degree exceeding the threshold. | +| `HIGH_FAN_OUT` | WARNING | Nodes with out-degree exceeding the threshold. | +| `DISCONNECTED` | WARNING | DAG has multiple disconnected components. | +| `EXCESSIVE_DEPTH` | WARNING | DAG depth exceeds the threshold. | +| `REDUNDANT_EDGES` | INFO | Transitive edges that could be removed without changing reachability. | +| `ISOLATED_NODES` | WARNING | Nodes with no edges in a graph that has edges. | + +--- + +## LintReport + + + + + +### Properties + +| Property | Type | Description | +|----------|------|-------------| +| `error_count` | `int` | Number of ERROR-severity warnings. | +| `warning_count` | `int` | Number of WARNING-severity warnings. | +| `info_count` | `int` | Number of INFO-severity warnings. | +| `ok` | `bool` | `True` if there are zero errors (warnings and info are allowed). | + +#### LintReport.summary + + str`} /> + +**Returns:** `str` -- Formatted report with counts and individual warning details. + +--- + +## LintWarning + + + + + +--- + +## LintSeverity + + + +Severity levels for lint warnings. + +--- + +## DAGSchema + + None: ...`} /> + +Declarative structural constraints for DAG validation. Define expected +structural properties and validate any DAG against them. + + + +#### DAGSchema.validate + + list[str]`} /> + +Validate a DAG against the schema constraints. + + + +**Returns:** `list[str]` -- List of error messages. An empty list means validation passed. + +```python +from dagron.analysis import DAGSchema + +schema = DAGSchema( + single_root=True, + single_leaf=True, + max_depth=10, + connected=True, + required_nodes=["extract", "load"], + leaf_pattern="output_*", +) + +errors = schema.validate(dag) +if errors: + for err in errors: + print(f"Schema violation: {err}") +else: + print("DAG passes schema validation") +``` + +--- + +## query + + list[str]`} /> + +Select nodes using a concise query expression. The query DSL supports +set functions, filters, and set operations. Results are returned in +topological order. + + + +**Returns:** `list[str]` -- Matching node names in topological order. + +**Raises:** `ValueError` -- If the expression is not a valid query. + +### Query syntax + +**Set functions:** + +| Function | Description | +|----------|-------------| +| `roots` | Root nodes (in-degree 0). | +| `leaves` | Leaf nodes (out-degree 0). | +| `critical_path` | Nodes on the critical path. | +| `ancestors(node)` | All transitive ancestors of `node`. | +| `descendants(node)` | All transitive descendants of `node`. | +| `predecessors(node)` | Direct predecessors of `node`. | +| `successors(node)` | Direct successors of `node`. | + +**Filters:** + +| Filter | Description | +|--------|-------------| +| `depth <= N`, `depth >= N`, `depth == N` | Filter by topological depth. Also supports `<` and `>`. | +| `in_degree <= N`, `in_degree >= N` | Filter by in-degree. | +| `out_degree <= N`, `out_degree >= N` | Filter by out-degree. | +| `name:pattern` | Glob pattern matching on node names (`*` and `?` wildcards). | + +**Set operations:** + +| Operator | Description | +|----------|-------------| +| `A \| B` | Union of sets A and B. | +| `A & B` | Intersection of sets A and B. | +| `A - B` | Difference (A minus B). | + +```python +from dagron.analysis import query + +# Find all root nodes +roots = query(dag, "roots") + +# Find ancestors of "deploy" within depth 3 +shallow = query(dag, "ancestors(deploy) & depth <= 3") + +# Find all test nodes on the critical path +critical_tests = query(dag, "critical_path & name:test_*") + +# Find descendants of extract that are not leaves +internal = query(dag, "descendants(extract) - leaves") + +# Combine with union +targets = query(dag, "roots | leaves") +``` + +--- + +## See also + +- [Profiling](/api/observability/profiling) -- post-execution performance analysis. +- [Contracts](/api/analysis/contracts) -- type-level validation for node inputs and outputs. +- [Inspecting Graphs guide](/guide/core-concepts/inspecting-graphs) -- walkthrough of analysis workflows. diff --git a/docs-next/content/docs/api/analysis/contracts.mdx b/docs-next/content/docs/api/analysis/contracts.mdx new file mode 100644 index 0000000..73e7122 --- /dev/null +++ b/docs-next/content/docs/api/analysis/contracts.mdx @@ -0,0 +1,315 @@ +--- +title: Contracts +description: API reference for dagron's type contract system -- declare, extract, and validate typed data contracts across DAG edges. +--- + +# Contracts + +The contracts module provides build-time type checking for DAG edges. You +can declare the expected input and output types for each node, and the +validator checks that producer output types are compatible with consumer +input types across every edge. This catches type mismatches before execution. + +For pipelines built with the `@task` decorator, contracts can be +automatically extracted from type annotations. + +```python +from dagron.contracts import ( + NodeContract, + ContractValidator, + ContractViolation, + extract_contracts, + validate_contracts, +) +``` + +--- + +## NodeContract + + + +Type contract for a single node's inputs and outputs. This is a frozen +dataclass, so instances are hashable and immutable after creation. + + + +```python +from dagron.contracts import NodeContract + +# A node that takes a list from 'extract' and produces a dict +transform_contract = NodeContract( + inputs={"extract": list}, + output=dict, +) + +# A node with no type constraints (wildcard) +passthrough = NodeContract() +``` + +--- + +## ContractViolation + + + +A single type-contract violation detected during validation. Frozen +dataclass, so instances are immutable and hashable. + + + +```python +for violation in violations: + print(f"Edge {violation.from_node} -> {violation.to_node}: {violation.message}") +``` + +--- + +## ContractValidator + + None: ...`} /> + +Validates type contracts across DAG edges. For every edge `(u, v)` in the +DAG, the validator checks that the output type of `u` is compatible with +the expected input type declared by `v` for dependency `u`. Compatibility +is determined via `issubclass`. The `object` type acts as a wildcard. + + + +### Methods + +--- + +#### ContractValidator.validate + + list[ContractViolation]`} /> + +Run validation and return all detected violations. An empty list means +all contracts are satisfied. + +**Returns:** `list[ContractViolation]` -- List of type mismatches found across DAG edges. + +```python +import dagron +from dagron.contracts import NodeContract, ContractValidator + +dag = ( + dagron.DAG.builder() + .add_edge("extract", "transform") + .add_edge("transform", "load") + .build() +) + +contracts = { + "extract": NodeContract(output=list), + "transform": NodeContract(inputs={"extract": dict}, output=str), + "load": NodeContract(inputs={"transform": str}), +} + +validator = ContractValidator(dag, contracts) +violations = validator.validate() + +for v in violations: + print(v.message) +# Type mismatch on edge extract -> transform: producer outputs list, +# but consumer expects dict +``` + +### Compatibility rules + +The validator uses `issubclass` to check compatibility: + +- `list` is compatible with `list` (exact match). +- `bool` is compatible with `int` (subclass relationship). +- `object` is always compatible (wildcard / Any equivalent). +- Generic type aliases (e.g., `list[int]`) are treated as compatible if `issubclass` raises `TypeError`. + +--- + +## extract_contracts + + dict[str, NodeContract]`} /> + +Auto-extract `NodeContract` instances from a Pipeline's `@task` functions. +Uses `typing.get_type_hints()` to read input parameter types and return +annotations from each decorated function. + + + +**Returns:** `dict[str, NodeContract]` -- Mapping of task names to their extracted contracts. + +```python +from dagron import Pipeline, task +from dagron.contracts import extract_contracts + +@task +def extract() -> list: + return [1, 2, 3] + +@task +def transform(extract: list) -> dict: + return {"data": extract} + +@task +def load(transform: dict) -> str: + return "done" + +pipeline = Pipeline(tasks=[extract, transform, load]) +contracts = extract_contracts(pipeline) + +print(contracts["extract"].output) # +print(contracts["transform"].inputs) # {'extract': } +print(contracts["transform"].output) # +``` + +--- + +## validate_contracts + + list[ContractViolation]`} /> + +Convenience function that extracts contracts from a pipeline and validates +them in a single call. Optionally merges manually specified contracts that +override the auto-extracted ones. + + + +**Returns:** `list[ContractViolation]` -- List of violations. Empty means all contracts are satisfied. + +```python +from dagron import Pipeline, task +from dagron.contracts import validate_contracts, NodeContract + +@task +def extract() -> list: + return [1, 2, 3] + +@task +def transform(extract: dict) -> str: # Bug: expects dict, but extract returns list + return str(extract) + +pipeline = Pipeline(tasks=[extract, transform]) +violations = validate_contracts(pipeline) + +if violations: + for v in violations: + print(f"Contract violation: {v.message}") + # Contract violation: Type mismatch on edge extract -> transform: + # producer outputs list, but consumer expects dict +``` + +### Overriding extracted contracts + +Sometimes auto-extraction is not enough -- for example, when functions lack +type annotations or when you want stricter constraints: + +```python +from dagron.contracts import validate_contracts, NodeContract + +overrides = { + "transform": NodeContract( + inputs={"extract": list}, + output=dict, + ), +} + +violations = validate_contracts(pipeline, extra_contracts=overrides) +``` + +--- + +## Complete example + +```python +import dagron +from dagron import Pipeline, task +from dagron.contracts import ( + NodeContract, + ContractValidator, + extract_contracts, + validate_contracts, +) + +# Define a typed pipeline +@task +def fetch_users() -> list: + return [{"id": 1, "name": "Alice"}, {"id": 2, "name": "Bob"}] + +@task +def normalize(fetch_users: list) -> list: + return [{"id": u["id"], "name": u["name"].upper()} for u in fetch_users] + +@task +def store(normalize: list) -> int: + return len(normalize) + +pipeline = Pipeline(tasks=[fetch_users, normalize, store], name="users") + +# Validate contracts automatically +violations = validate_contracts(pipeline) +assert not violations, f"Contract violations: {violations}" + +# Or extract and inspect contracts manually +contracts = extract_contracts(pipeline) +for name, contract in contracts.items(): + print(f"{name}: inputs={contract.inputs}, output={contract.output}") + +# Manual validation against an arbitrary DAG +dag = ( + dagron.DAG.builder() + .add_edge("source", "sink") + .build() +) + +manual_contracts = { + "source": NodeContract(output=str), + "sink": NodeContract(inputs={"source": int}), # Mismatch! +} + +validator = ContractValidator(dag, manual_contracts) +for v in validator.validate(): + print(v.message) +# Type mismatch on edge source -> sink: producer outputs str, +# but consumer expects int +``` + +--- + +## See also + +- [DataFrames](/api/analysis/dataframe) -- schema validation for DataFrame outputs. +- [Analysis](/api/analysis/analysis) -- structural analysis and linting. +- [Pipeline](/api/execution/pipeline) -- the `@task` decorator and Pipeline class. +- [Building DAGs guide](/guide/core-concepts/building-dags) -- builder-level contract declarations. diff --git a/docs-next/content/docs/api/analysis/dataframe.mdx b/docs-next/content/docs/api/analysis/dataframe.mdx new file mode 100644 index 0000000..37cf144 --- /dev/null +++ b/docs-next/content/docs/api/analysis/dataframe.mdx @@ -0,0 +1,374 @@ +--- +title: DataFrames +description: API reference for dagron's DataFrame integration -- schema validation for pandas and polars DataFrames at DAG edge boundaries. +--- + +# DataFrames + +The dataframe module provides schema validation for pandas and polars +DataFrames at DAG edge boundaries. Define expected column schemas (names, +dtypes, nullability) and row count constraints for each node, then validate +execution results or individual values against those schemas. + +This module auto-detects whether a value is a pandas or polars DataFrame +and applies the appropriate introspection methods. + +```python +from dagron.dataframe import ( + DataFramePipeline, + DataFrameSchema, + ColumnSchema, + SchemaViolation, + validate_schema, +) +``` + +--- + +## ColumnSchema + + + +Schema definition for a single column in a DataFrame. Frozen dataclass. + + + +```python +from dagron.dataframe import ColumnSchema + +id_col = ColumnSchema("id", dtype="int", nullable=False, required=True) +name_col = ColumnSchema("name", dtype="object", nullable=True) +score_col = ColumnSchema("score", dtype="float", required=False) # optional column +``` + +--- + +## DataFrameSchema + + + +Schema definition for a DataFrame at an edge boundary. Combines column +schemas with optional row count constraints. Frozen dataclass. + + + +```python +from dagron.dataframe import DataFrameSchema, ColumnSchema + +user_schema = DataFrameSchema( + columns=[ + ColumnSchema("id", dtype="int", nullable=False), + ColumnSchema("name", dtype="object", nullable=False), + ColumnSchema("email", dtype="object", nullable=True), + ], + min_rows=1, + max_rows=10000, +) +``` + +--- + +## SchemaViolation + + + +A single schema violation detected during validation. Frozen dataclass. + + + +```python +for violation in violations: + print(f"[{violation.node_name}] {violation.message}") +# [extract] Missing required column 'id' +# [extract] Column 'score' has null values but nullable=False +``` + +--- + +## validate_schema + + list[SchemaViolation]`} /> + +Validate a DataFrame against a schema. Works with both pandas and polars +DataFrames. The framework is auto-detected from the object's type. + + + +**Returns:** `list[SchemaViolation]` -- List of violations. An empty list means the DataFrame is valid. + +### Validation checks + +The function performs the following checks in order: + +1. **Framework detection** -- verifies the object is a pandas or polars DataFrame. +2. **Required columns** -- checks that all required columns exist. +3. **Dtype matching** -- for each column with a `dtype` constraint, checks that the actual dtype string contains the expected substring (case-insensitive). +4. **Nullability** -- for columns with `nullable=False`, checks for null values. +5. **Row count** -- validates `min_rows` and `max_rows` constraints. + +```python +import pandas as pd +from dagron.dataframe import validate_schema, DataFrameSchema, ColumnSchema + +df = pd.DataFrame({"id": [1, 2, None], "name": ["Alice", "Bob", "Charlie"]}) + +schema = DataFrameSchema( + columns=[ + ColumnSchema("id", dtype="int", nullable=False), + ColumnSchema("name", dtype="object"), + ColumnSchema("email", nullable=True, required=True), + ], + min_rows=1, +) + +violations = validate_schema(df, schema, node_name="extract") +for v in violations: + print(v.message) +# Column 'id' has null values but nullable=False +# Missing required column 'email' +``` + +--- + +## DataFramePipeline + + None: ...`} /> + +Execute a DAG pipeline with schema validation at edge boundaries. Validates +that each node's output DataFrame matches the expected schema. + + + +### Methods + +--- + +#### DataFramePipeline.validate_result + + list[SchemaViolation]`} /> + +Validate all completed node outputs in an execution result against their +declared schemas. Only nodes that completed successfully and have a schema +defined are checked. + + + +**Returns:** `list[SchemaViolation]` -- All violations found across all validated nodes. + +```python +import dagron +from dagron.dataframe import DataFramePipeline, DataFrameSchema, ColumnSchema + +dag = ( + dagron.DAG.builder() + .add_edge("extract", "transform") + .add_edge("transform", "load") + .build() +) + +schemas = { + "extract": DataFrameSchema( + columns=[ColumnSchema("id", dtype="int"), ColumnSchema("name")], + min_rows=1, + ), + "transform": DataFrameSchema( + columns=[ + ColumnSchema("id", dtype="int", nullable=False), + ColumnSchema("name_upper", nullable=False), + ], + ), +} + +pipeline = DataFramePipeline(dag, schemas) + +# After execution... +executor = dagron.DAGExecutor(dag) +result = executor.execute(tasks) + +violations = pipeline.validate_result(result) +if violations: + for v in violations: + print(f"[{v.node_name}] {v.message}") +else: + print("All DataFrames match their schemas") +``` + +--- + +#### DataFramePipeline.validate_value + + list[SchemaViolation]`} /> + +Validate a single value against a specific node's schema. Useful for +testing individual node outputs without running the full pipeline. + + + +**Returns:** `list[SchemaViolation]` -- Violations found, or empty list if valid. Returns empty list if no schema is defined for the given node. + +```python +import pandas as pd +from dagron.dataframe import DataFramePipeline, DataFrameSchema, ColumnSchema + +# Validate a single DataFrame in isolation +df = pd.DataFrame({"id": [1, 2], "name": ["Alice", "Bob"]}) +violations = pipeline.validate_value("extract", df) +assert not violations +``` + +--- + +## Complete example + +```python +import dagron +import pandas as pd +from dagron.dataframe import ( + DataFramePipeline, + DataFrameSchema, + ColumnSchema, + validate_schema, +) + +# Define schemas for each pipeline stage +schemas = { + "extract": DataFrameSchema( + columns=[ + ColumnSchema("user_id", dtype="int", nullable=False), + ColumnSchema("username", dtype="object", nullable=False), + ColumnSchema("score", dtype="float", nullable=True), + ], + min_rows=1, + ), + "transform": DataFrameSchema( + columns=[ + ColumnSchema("user_id", dtype="int", nullable=False), + ColumnSchema("username", dtype="object", nullable=False), + ColumnSchema("score_normalized", dtype="float", nullable=False), + ], + ), + "load": DataFrameSchema( + columns=[ + ColumnSchema("user_id", dtype="int", nullable=False), + ColumnSchema("username", dtype="object", nullable=False), + ColumnSchema("score_normalized", dtype="float", nullable=False), + ColumnSchema("loaded_at", dtype="datetime", nullable=False), + ], + max_rows=100000, + ), +} + +# Build the DAG +dag = ( + dagron.DAG.builder() + .add_edge("extract", "transform") + .add_edge("transform", "load") + .build() +) + +pipeline = DataFramePipeline(dag, schemas) + +# Validate individual DataFrames during development +test_df = pd.DataFrame({ + "user_id": [1, 2, 3], + "username": ["alice", "bob", "charlie"], + "score": [0.85, None, 0.92], +}) + +violations = pipeline.validate_value("extract", test_df) +print(f"Extract violations: {len(violations)}") +for v in violations: + print(f" {v.message}") + +# Or use the standalone function +violations = validate_schema(test_df, schemas["extract"], "extract") + +# After full pipeline execution, validate all outputs +executor = dagron.DAGExecutor(dag) +result = executor.execute(tasks) +all_violations = pipeline.validate_result(result) +``` + +### Using with polars + +The module works identically with polars DataFrames -- no configuration +changes needed: + +```python +import polars as pl +from dagron.dataframe import validate_schema, DataFrameSchema, ColumnSchema + +df = pl.DataFrame({ + "id": [1, 2, 3], + "value": [10.5, 20.3, 30.1], +}) + +schema = DataFrameSchema( + columns=[ + ColumnSchema("id", dtype="i64", nullable=False), + ColumnSchema("value", dtype="f64"), + ], +) + +violations = validate_schema(df, schema, "my_node") +``` + +--- + +## See also + +- [Contracts](/api/analysis/contracts) -- type-level contracts for arbitrary Python types. +- [Pipeline](/api/execution/pipeline) -- the Pipeline class for decorator-based DAG construction. +- [Execution](/api/execution/execution) -- `ExecutionResult` containing node outputs. diff --git a/docs-next/content/docs/api/analysis/meta.json b/docs-next/content/docs/api/analysis/meta.json new file mode 100644 index 0000000..a15c79a --- /dev/null +++ b/docs-next/content/docs/api/analysis/meta.json @@ -0,0 +1,5 @@ +{ + "title": "Analysis & Validation", + "defaultOpen": false, + "pages": ["analysis", "contracts", "dataframe"] +} diff --git a/docs-next/content/docs/api/core/builder.mdx b/docs-next/content/docs/api/core/builder.mdx new file mode 100644 index 0000000..910e6cf --- /dev/null +++ b/docs-next/content/docs/api/core/builder.mdx @@ -0,0 +1,267 @@ +--- +title: "DAGBuilder" +description: "API reference for DAGBuilder — the fluent builder pattern for constructing validated DAGs." +--- + +# DAGBuilder + +The `DAGBuilder` provides a fluent, chainable API for constructing DAGs. Every +mutating method returns `self`, so you can chain calls together. The builder +validates the graph on `.build()`, ensuring you never receive an invalid DAG. + +Obtain a builder via `DAG.builder()`: + +```python +import dagron + +dag = ( + dagron.DAG.builder() + .add_node("extract") + .add_node("transform") + .add_node("load") + .add_edge("extract", "transform") + .add_edge("transform", "load") + .build() +) +``` + +See the [Building DAGs](/guide/core-concepts/building-dags) guide for construction +patterns and best practices. + +--- + +## Constructor + + + +Create a new empty builder. In most cases you will use `DAG.builder()` instead +of instantiating this class directly. + +```python +# Preferred +builder = dagron.DAG.builder() + +# Also valid +builder = dagron.DAGBuilder() +``` + +--- + +## Methods + +### add_node + + DAGBuilder`} /> + +Add a node to the graph under construction. Returns `self` for chaining. + + + +**Raises:** +- `DuplicateNodeError` — if a node with the same name has already been added. + +```python +builder = ( + dagron.DAG.builder() + .add_node("fetch", payload={"url": "https://api.example.com"}) + .add_node("parse", metadata={"team": "data-eng"}) + .add_node("store") +) +``` + +--- + +### add_edge + + DAGBuilder`} /> + +Add a directed edge from `from_node` to `to_node`. Returns `self` for chaining. +The builder defers cycle detection to `.build()`, so you can add edges in any +order. + + + +```python +builder = ( + dagron.DAG.builder() + .add_node("a") + .add_node("b") + .add_node("c") + .add_edge("a", "b", weight=2.0) + .add_edge("b", "c", label="transform-to-load") +) +``` + +--- + +### contract + + DAGBuilder`} /> + +Attach a type contract to a node. Contracts declare the expected input types +(keyed by predecessor name) and the output type. Use `validate_contracts()` to +check all contracts before building, or let `build()` validate them +automatically. + + + +**Raises:** +- `NodeNotFoundError` — if the specified node has not been added. + +```python +builder = ( + dagron.DAG.builder() + .add_node("fetch") + .add_node("parse") + .add_node("validate") + .add_edge("fetch", "parse") + .add_edge("parse", "validate") + .contract("fetch", output=list) + .contract("parse", inputs={"fetch": list}, output=dict) + .contract("validate", inputs={"parse": dict}, output=bool) +) +``` + +See the [Contracts](/guide/advanced/contracts) guide for more details on type +contracts. + +--- + +### validate_contracts + + list[ContractViolation]`} /> + +Check all attached contracts for consistency (e.g., output type of `A` matches +the input type expected by `B` on the `A -> B` edge). Returns a list of +`ContractViolation` objects describing any mismatches. An empty list means all +contracts are consistent. + +**Returns:** `list[ContractViolation]` — a list of violations, empty if valid. + +```python +violations = builder.validate_contracts() +if violations: + for v in violations: + print(f"Contract violation: {v}") +else: + print("All contracts consistent.") +``` + +--- + +### build + + DAG`} /> + +Finalize and validate the graph. Returns a fully constructed +[DAG](/api/core/core) instance. This method performs: + +1. **Node existence checks** — every edge endpoint must reference an existing node. +2. **Cycle detection** — raises `CycleError` if the graph contains a cycle. +3. **Contract validation** — if any contracts were attached, they are validated. + +**Returns:** `DAG` — the validated directed acyclic graph. + +**Raises:** +- `CycleError` — if the graph contains a cycle. +- `NodeNotFoundError` — if an edge references a non-existent node. + +```python +dag = ( + dagron.DAG.builder() + .add_node("a") + .add_node("b") + .add_edge("a", "b") + .build() +) + +print(dag.node_count()) # 2 +print(dag.edge_count()) # 1 +``` + +--- + +## Complete Example + +A full builder workflow with contracts, metadata, and weighted edges: + +```python +import dagron + +dag = ( + dagron.DAG.builder() + # Nodes with payloads and metadata + .add_node("ingest", payload={"source": "s3"}, metadata={"tier": "bronze"}) + .add_node("clean", metadata={"tier": "silver"}) + .add_node("enrich", metadata={"tier": "silver"}) + .add_node("aggregate", metadata={"tier": "gold"}) + .add_node("publish", metadata={"tier": "gold"}) + + # Weighted edges for cost-aware scheduling + .add_edge("ingest", "clean", weight=1.0) + .add_edge("ingest", "enrich", weight=1.0) + .add_edge("clean", "aggregate", weight=3.0) + .add_edge("enrich", "aggregate", weight=2.0) + .add_edge("aggregate", "publish", weight=0.5) + + # Type contracts + .contract("ingest", output=list) + .contract("clean", inputs={"ingest": list}, output=list) + .contract("enrich", inputs={"ingest": list}, output=list) + .contract("aggregate", inputs={"clean": list, "enrich": list}, output=dict) + .contract("publish", inputs={"aggregate": dict}, output=bool) + + .build() +) + +print(dag.node_count()) # 5 +print(dag.edge_count()) # 5 +print(dag.topological_levels()) # [[ingest], [clean, enrich], [aggregate], [publish]] +``` + +--- + +## ContractViolation + + + +Returned by `DAGBuilder.validate_contracts()` when a type mismatch is found. + +| Property | Type | Description | +|----------|------|-------------| +| `from_node` | `str` | The upstream node whose output type does not match. | +| `to_node` | `str` | The downstream node whose expected input type does not match. | +| `expected` | `type` | The type expected by the downstream node. | +| `actual` | `type` | The type declared by the upstream node's output. | + +```python +violations = builder.validate_contracts() +for v in violations: + print(f"{v.from_node} -> {v.to_node}: expected {v.expected}, got {v.actual}") +``` diff --git a/docs-next/content/docs/api/core/core.mdx b/docs-next/content/docs/api/core/core.mdx new file mode 100644 index 0000000..7aed47a --- /dev/null +++ b/docs-next/content/docs/api/core/core.mdx @@ -0,0 +1,863 @@ +--- +title: "Core — DAG, NodeId, GraphStats" +description: "Complete API reference for the DAG class, NodeId, GraphStats, GraphDiff, ReachabilityIndex, and ExecutionPlan." +--- + +# Core — DAG, NodeId, GraphStats + +The core module provides the foundational graph data structure that powers every +feature in dagron. The `DAG` class contains approximately 78 methods spanning +construction, inspection, traversal, scheduling, transforms, serialization, and +more. The underlying graph lives in Rust for zero-copy speed, exposed to Python +via PyO3. + +See the [Building DAGs](/guide/core-concepts/building-dags) guide for usage patterns and +the [Inspecting Graphs](/guide/core-concepts/inspecting-graphs) guide for analysis +workflows. + +--- + +## DAG + + + +The central directed acyclic graph. Create an empty graph with `DAG()` or use +the fluent [DAGBuilder](/api/core/builder) via `DAG.builder()`. + +```python +import dagron + +# Empty graph, then mutate +dag = dagron.DAG() +dag.add_node("a") +dag.add_node("b") +dag.add_edge("a", "b") + +# Or use the builder +dag = dagron.DAG.builder().add_node("a").add_node("b").add_edge("a", "b").build() +``` + +--- + +### Construction + + DAGBuilder`} /> + +Return a new [DAGBuilder](/api/core/builder) instance for fluent graph +construction. The builder validates the graph when `.build()` is called. + + NodeId`} /> + +Add a node to the graph. Returns a `NodeId` handle. Raises `DuplicateNodeError` +if a node with the same name already exists. + + + + list[NodeId]`} /> + +Bulk-add multiple nodes. Each element can be a bare name string, a +`(name, payload)` tuple, or a `(name, payload, metadata)` tuple. + + + + None`} /> + +Add a directed edge from `from_node` to `to_node`. Raises `NodeNotFoundError` +if either node does not exist, and `CycleError` if the edge would create a +cycle. + + + + None`} /> + +Bulk-add multiple edges. Each element is a tuple of `(from, to)`, +`(from, to, weight)`, or `(from, to, weight, label)`. + + + + None`} /> + +Remove a node and all edges connected to it. Raises `NodeNotFoundError` if the +node does not exist. + + + + None`} /> + +Remove a directed edge. Raises `EdgeNotFoundError` if the edge does not exist. + + + +--- + +### Inspection + + bool`} /> + +Return `True` if a node with the given name exists. + + bool`} /> + +Return `True` if a directed edge exists between the two nodes. + + int`} /> + +Return the total number of nodes in the graph. + + int`} /> + +Return the total number of edges in the graph. + + Any`} /> + +Return the payload attached to a node. Raises `NodeNotFoundError` if the node +does not exist. + + None`} /> + +Replace the payload on an existing node. + + dict | None`} /> + +Return the metadata dictionary for a node, or `None` if no metadata was set. + + None`} /> + +Replace the metadata on an existing node. + + list[NodeId]`} /> + +Return the direct predecessors (parents) of a node. + + list[NodeId]`} /> + +Return the direct successors (children) of a node. + + list[NodeId]`} /> + +Return all transitive predecessors of a node (the full upstream lineage). + + list[NodeId]`} /> + +Return all transitive successors of a node (the full downstream lineage). + + int`} /> + +Return the number of incoming edges to a node. + + int`} /> + +Return the number of outgoing edges from a node. + + list[NodeId]`} /> + +Return all root nodes (nodes with no predecessors). + + list[NodeId]`} /> + +Return all leaf nodes (nodes with no successors). + + list[NodeId]`} /> + +Return all nodes in the graph as a list of `NodeId` objects. + +```python +dag = dagron.DAG() +dag.add_node("extract", payload={"source": "api"}) +dag.add_node("transform") +dag.add_edge("extract", "transform") + +print(dag.has_node("extract")) # True +print(dag.predecessors("transform")) # [NodeId("extract")] +print(dag.get_payload("extract")) # {"source": "api"} +print(dag.roots()) # [NodeId("extract")] +print(dag.leaves()) # [NodeId("transform")] +``` + +--- + +### Iterators + +Lazy iterator variants that yield items one at a time instead of building a full +list in memory. Useful for very large graphs. + + Iterator[NodeId]`} /> + + Iterator[NodeId]`} /> + + Iterator[NodeId]`} /> + + Iterator[NodeId]`} /> + + Iterator[NodeId]`} /> + + Iterator[NodeId]`} /> + + Iterator[list[NodeId]]`} /> + +```python +for node in dag.iter_topological_sort(): + print(node.name) + +for level in dag.iter_topological_levels(): + print([n.name for n in level]) +``` + +--- + +### Topological Sorting + + list[NodeId]`} /> + +Return a topological ordering of all nodes using Kahn's algorithm. The ordering +is deterministic (stable sort by insertion order). + + list[NodeId]`} /> + +Return a topological ordering computed via depth-first search. May produce a +different valid ordering than the Kahn-based `topological_sort()`. + + list[list[NodeId]]`} /> + +Group nodes into levels where all nodes at the same level can execute in +parallel (all predecessors are in earlier levels). + + list[list[NodeId]]`} /> + +Enumerate all valid topological orderings. Use `limit` to cap the output for +large graphs (the number of orderings can be exponential). + + + + list[NodeId]`} /> + +Return a topological ordering that respects priority hints. Nodes with higher +priority values are scheduled earlier when topologically valid. + + + + list[list[NodeId]]`} /> + +Like `topological_levels()`, but nodes within each level are sorted by priority. + + + +--- + +### Paths + + list[NodeId] | None`} /> + +Return the shortest path (fewest edges) between two nodes, or `None` if no path +exists. + + + + tuple[list[NodeId], float] | None`} /> + +Return the longest path and its total cost between two nodes, or `None` if no +path exists. If `costs` is provided, edge/node costs are summed; otherwise every +node has cost 1. + + + + list[list[NodeId]]`} /> + +Enumerate all directed paths between two nodes. + + + +```python +path = dag.shortest_path("extract", "load") +print([n.name for n in path]) # ["extract", "transform", "load"] +``` + +--- + +### Scheduling + + ExecutionPlan`} /> + +Compute an optimal execution plan (topological-level schedule) with unlimited +parallelism. See [ExecutionPlan](#executionplan) below. + + + + ExecutionPlan`} /> + +Compute an execution plan constrained to a fixed number of workers. + + + + tuple[list[NodeId], float]`} /> + +Return the critical path (longest weighted path through the graph) and its total +cost. This determines the theoretical minimum makespan. + + + + dict[str, float]`} /> + +Return the bottom-level priority of each node (the longest path from that node +to any leaf). Used internally for scheduling heuristics. + + + +```python +costs = {"extract": 2.0, "transform": 5.0, "load": 1.0} +path, total = dag.critical_path(costs) +print([n.name for n in path], total) # ["extract", "transform", "load"], 8.0 +``` + +--- + +### Transforms + + DAG`} /> + +Return a new DAG with all edge directions reversed. + + DAG`} /> + +Return a new DAG containing only nodes for which `predicate(name)` returns +`True`. Edges between remaining nodes are preserved. + + + + DAG`} /> + +Merge another DAG into this one. When both DAGs share a node name, `conflict` +determines the behavior: `"keep_first"`, `"keep_second"`, `"error"`, or use a +custom `conflict_resolver` callable. + + payload for resolving conflicts."}, +]} /> + + DAG`} /> + +Collapse multiple nodes into a single node. Edges are rewired to the collapsed +node. + + + + DAG`} /> + +Return the transitive reduction: the smallest DAG with the same reachability +relation. Removes redundant edges. + + DAG`} /> + +Return the transitive closure: a DAG with an edge `(u, v)` for every pair of +nodes where `u` can reach `v`. + + DAG`} /> + +Return a deep copy of the DAG at this point in time. + + list[tuple[str, str]]`} /> + +Compute the dominator tree rooted at the given node. Returns a list of +`(dominator, dominated)` edge pairs. + + + +```python +reduced = dag.transitive_reduction() +filtered = dag.filter(lambda name: name.startswith("transform")) +merged = dag1.merge(dag2, conflict="keep_second") +``` + +--- + +### Subgraph + + DAG`} /> + +Extract a subgraph containing only the specified nodes and edges between them. + + + + DAG`} /> + +Extract a subgraph by traversing up to `depth` hops from `root`. + + + +--- + +### Serialization + + str`} /> + +Serialize the DAG to a JSON string. If nodes carry non-JSON-serializable +payloads, provide a custom `payload_serializer`. + + DAG`} /> + +Deserialize a DAG from a JSON string. + + str`} /> + +Export the graph in Graphviz DOT format. + + dict of DOT attributes per node."}, +]} /> + + str`} /> + +Export the graph as a Mermaid diagram string. + + bytes`} /> + +Serialize the DAG to a compact binary format. + + DAG`} /> + +Deserialize a DAG from binary data. + + None`} /> + +Save the DAG to a file on disk (binary format). + + DAG`} /> + +Load a DAG from a file on disk. + +```python +# Round-trip through JSON +json_str = dag.to_json() +restored = dagron.DAG.from_json(json_str) + +# Save / load from disk +dag.save("pipeline.dagron") +loaded = dagron.DAG.load("pipeline.dagron") + +# Export for visualization +print(dag.to_mermaid()) +``` + +See the [Serialization](/guide/core-concepts/serialization) guide for detailed examples. + +--- + +### Matching + + list[NodeId]`} /> + +Return all nodes whose names match the given regular expression pattern. + + + + list[NodeId]`} /> + +Return all nodes whose names match the given glob pattern (supports `*`, `?`, +`[...]`). + + + +```python +transforms = dag.nodes_matching_glob("transform_*") +etl_nodes = dag.nodes_matching_regex(r"^(extract|transform|load)_\d+$") +``` + +--- + +### Reachability + + ReachabilityIndex`} /> + +Precompute a reachability index for O(1) ancestor/descendant queries. See +[ReachabilityIndex](#reachabilityindex) below. + + bool`} /> + +Return `True` if `ancestor` is a transitive predecessor of `descendant`. This +performs a graph traversal each time; for repeated queries, use +`build_reachability_index()`. + + + +--- + +### Diffing + + GraphDiff`} /> + +Compute a structural diff between this DAG and another. Returns a +[GraphDiff](#graphdiff) object. See the [Versioning](/guide/advanced/versioning) +guide for diff workflows. + + + +--- + +### Partitioning + + PartitionResult`} /> + +Partition the graph into `k` groups using topological-level-based assignment. + + PartitionResult`} /> + +Partition the graph into `k` balanced groups minimizing total cost imbalance. + + PartitionResult`} /> + +Partition the graph into `k` groups minimizing inter-partition communication +(cross-partition edges). + + + +--- + +### Incremental + + list[str]`} /> + +Given a list of changed nodes, return the full set of nodes that need +re-execution (the changed nodes plus all their downstream descendants). + + + + dict[str, list[str]]`} /> + +For each downstream node, return which changed nodes are responsible for +invalidating it. + + + +See the [Incremental Execution](/api/execution/incremental) API page for the full +incremental executor. + +--- + +### Validation, Stats, and Cache + + bool`} /> + +Validate graph invariants (acyclicity, no orphan edges). Returns `True` if the +graph is valid. Raises `CycleError` if a cycle is detected. + + GraphStats`} /> + +Return a [GraphStats](#graphstats) summary of the graph. + + dict`} /> + +Return information about the internal Rust-side cache (hit/miss counts for +memoized graph algorithms). + + None`} /> + +Clear the internal algorithm cache. The cache is automatically invalidated on +mutation, but you can force-clear it if needed. + + int`} /> + +A monotonically increasing counter that increments on every mutation. Useful for +cache invalidation in external systems. + +```python +stats = dag.stats() +print(f"Nodes: {stats.node_count}, Depth: {stats.depth}, Width: {stats.width}") +print(f"Generation: {dag.generation}") +``` + +--- + +## NodeId + + + +An opaque handle returned by node-creation methods. Provides a `.name` property +for accessing the underlying string identifier. + +| Property | Type | Description | +|----------|------|-------------| +| `name` | `str` | The unique string name of the node. | + +```python +node_id = dag.add_node("extract") +print(node_id.name) # "extract" +``` + +--- + +## GraphStats + + + +A read-only summary of graph metrics, returned by `DAG.stats()`. + +| Property | Type | Description | +|----------|------|-------------| +| `node_count` | `int` | Total number of nodes. | +| `edge_count` | `int` | Total number of edges. | +| `depth` | `int` | Length of the longest path (in edges). | +| `width` | `int` | Maximum number of nodes at any topological level. | +| `density` | `float` | Edge density: `edge_count / (node_count * (node_count - 1))`. | +| `longest_path_length` | `int` | Number of edges in the longest path. | +| `avg_in_degree` | `float` | Average incoming edges per node. | +| `avg_out_degree` | `float` | Average outgoing edges per node. | +| `max_in_degree` | `int` | Highest in-degree of any node. | +| `max_out_degree` | `int` | Highest out-degree of any node. | +| `root_count` | `int` | Number of root nodes. | +| `leaf_count` | `int` | Number of leaf nodes. | +| `is_weakly_connected` | `bool` | Whether the underlying undirected graph is connected. | +| `component_count` | `int` | Number of weakly connected components. | + +```python +stats = dag.stats() +print(f"Density: {stats.density:.4f}") +print(f"Connected: {stats.is_weakly_connected}") +print(f"Components: {stats.component_count}") +``` + +--- + +## GraphDiff + + + +The result of `DAG.diff(other)`. Contains sets of added, removed, and changed +nodes and edges. + +| Property | Type | Description | +|----------|------|-------------| +| `added_nodes` | `list[str]` | Nodes present in `other` but not `self`. | +| `removed_nodes` | `list[str]` | Nodes present in `self` but not `other`. | +| `changed_nodes` | `list[str]` | Nodes with different payloads or metadata. | +| `added_edges` | `list[tuple[str, str]]` | Edges present in `other` but not `self`. | +| `removed_edges` | `list[tuple[str, str]]` | Edges present in `self` but not `other`. | +| `changed_edges` | `list[tuple[str, str]]` | Edges with different weights or labels. | + +```python +diff = dag_v1.diff(dag_v2) +print(f"Added nodes: {diff.added_nodes}") +print(f"Removed edges: {diff.removed_edges}") +``` + +--- + +## ReachabilityIndex + + + +A precomputed index for O(1) reachability queries. Built via +`DAG.build_reachability_index()`. The index is a snapshot; it does not track +subsequent mutations to the DAG. + + bool`} /> + +Return `True` if `from_node` can reach `to_node` via directed edges. + + list[str]`} /> + +Return all nodes reachable from the given node. + + list[str]`} /> + +Return all ancestors of the given node. + + int`} /> + +Return the number of nodes in the index. + +```python +index = dag.build_reachability_index() +print(index.can_reach("extract", "load")) # True +print(index.reachable_from("extract")) # ["transform", "load"] +print(index.ancestors_of("load")) # ["extract", "transform"] +``` + +--- + +## ExecutionPlan + + + +A computed execution schedule returned by `DAG.execution_plan()` and +`DAG.execution_plan_constrained()`. + +| Property | Type | Description | +|----------|------|-------------| +| `steps` | `list[list[str]]` | Ordered list of execution steps. Each step is a list of node names that can run concurrently. | +| `total_nodes` | `int` | Total number of nodes in the plan. | +| `max_parallelism` | `int` | Maximum number of nodes in any single step. | +| `estimated_makespan` | `float` | Estimated total execution time based on provided costs. | +| `critical_path` | `list[str]` | The critical path through the plan. | + +```python +plan = dag.execution_plan(costs={"extract": 2.0, "transform": 5.0, "load": 1.0}) +print(f"Steps: {len(plan.steps)}") +print(f"Max parallelism: {plan.max_parallelism}") +print(f"Estimated makespan: {plan.estimated_makespan}s") + +for i, step in enumerate(plan.steps): + print(f" Step {i}: {step}") +``` diff --git a/docs-next/content/docs/api/core/errors.mdx b/docs-next/content/docs/api/core/errors.mdx new file mode 100644 index 0000000..a1ad345 --- /dev/null +++ b/docs-next/content/docs/api/core/errors.mdx @@ -0,0 +1,274 @@ +--- +title: "Errors" +description: "API reference for all dagron exception classes and the error hierarchy." +--- + +# Errors + +All dagron exceptions inherit from `DagronError`, making it easy to catch any +library error with a single `except` clause. More specific exceptions allow +targeted handling of individual failure modes. + +--- + +## Error Hierarchy + +```mermaid +classDiagram + DagronError <|-- GraphError + GraphError <|-- CycleError + GraphError <|-- DuplicateNodeError + GraphError <|-- NodeNotFoundError + GraphError <|-- EdgeNotFoundError + + class DagronError { + +str message + } + class GraphError { + +str message + } + class CycleError { + +list~str~ cycle + +str message + } + class DuplicateNodeError { + +str node_name + +str message + } + class NodeNotFoundError { + +str node_name + +str message + } + class EdgeNotFoundError { + +str from_node + +str to_node + +str message + } +``` + +--- + +## DagronError + + + +The base exception for all dagron errors. Every exception raised by dagron is a +subclass of this class, so you can write a single catch-all handler: + +```python +import dagron + +try: + dag = dagron.DAG() + dag.add_edge("x", "y") # nodes don't exist +except dagron.DagronError as e: + print(f"dagron error: {e}") +``` + +| Attribute | Type | Description | +|-----------|------|-------------| +| `message` | `str` | Human-readable error description. | + +--- + +## GraphError + + + +Base class for all errors related to graph structure operations. Covers node +and edge manipulation, cycle detection, and structural invariant violations. + +| Attribute | Type | Description | +|-----------|------|-------------| +| `message` | `str` | Human-readable error description. | + +```python +try: + dag.add_edge("nonexistent", "also_nonexistent") +except dagron.GraphError as e: + # Catches NodeNotFoundError, EdgeNotFoundError, CycleError, etc. + print(f"Graph error: {e}") +``` + +--- + +## CycleError + + + +Raised when an operation would introduce a cycle into the DAG. The `cycle` +attribute contains the list of node names forming the cycle, which is invaluable +for debugging. + + + +```python +import dagron + +dag = dagron.DAG() +dag.add_node("a") +dag.add_node("b") +dag.add_node("c") +dag.add_edge("a", "b") +dag.add_edge("b", "c") + +try: + dag.add_edge("c", "a") # would create a -> b -> c -> a +except dagron.CycleError as e: + print(f"Cycle detected: {e.cycle}") + # Cycle detected: ['a', 'b', 'c', 'a'] +``` + +`CycleError` is also raised by `DAG.validate()` if the graph contains a cycle, +and by `DAGBuilder.build()` during construction. + +--- + +## DuplicateNodeError + + + +Raised when attempting to add a node with a name that already exists in the +graph. + + + +```python +dag = dagron.DAG() +dag.add_node("extract") + +try: + dag.add_node("extract") # duplicate +except dagron.DuplicateNodeError as e: + print(f"Duplicate: {e.node_name}") + # Duplicate: extract +``` + +--- + +## NodeNotFoundError + + + +Raised when referencing a node that does not exist in the graph. Common triggers +include `add_edge()` with a non-existent endpoint, `remove_node()` on a missing +node, or `get_payload()` for an unknown name. + + + +```python +dag = dagron.DAG() +dag.add_node("a") + +try: + dag.add_edge("a", "nonexistent") +except dagron.NodeNotFoundError as e: + print(f"Missing node: {e.node_name}") + # Missing node: nonexistent +``` + +--- + +## EdgeNotFoundError + + + +Raised when referencing an edge that does not exist in the graph. Typically +triggered by `remove_edge()`. + + + +```python +dag = dagron.DAG() +dag.add_node("a") +dag.add_node("b") + +try: + dag.remove_edge("a", "b") # edge doesn't exist +except dagron.EdgeNotFoundError as e: + print(f"No edge from {e.from_node} to {e.to_node}") + # No edge from a to b +``` + +--- + +## Error Handling Patterns + +### Catch-all + +Use `DagronError` as a catch-all for any dagron-specific error: + +```python +try: + dag = build_complex_pipeline() + result = dagron.DAGExecutor(dag).execute(tasks) +except dagron.DagronError as e: + logger.error(f"Pipeline failed: {e}") + raise +``` + +### Granular handling + +For more control, catch specific exceptions: + +```python +try: + dag = ( + dagron.DAG.builder() + .add_node("a") + .add_node("b") + .add_edge("a", "b") + .add_edge("b", "a") # cycle + .build() + ) +except dagron.CycleError as e: + print(f"Fix the cycle: {' -> '.join(e.cycle)}") +except dagron.DuplicateNodeError as e: + print(f"Remove duplicate node: {e.node_name}") +except dagron.NodeNotFoundError as e: + print(f"Add missing node first: {e.node_name}") +``` + +### Checking before acting + +Use predicate methods to avoid exceptions entirely: + +```python +if dag.has_node("transform"): + dag.remove_node("transform") + +if dag.has_edge("a", "b"): + dag.remove_edge("a", "b") +``` + +--- + +## Related + +- [DAG](/api/core/core) — core graph class whose methods raise these errors. +- [DAGBuilder](/api/core/builder) — builder that raises `CycleError` on `.build()`. +- [Gates](/api/execution/gates) — `GateRejectedError` and `GateTimeoutError` for gate-specific errors. diff --git a/docs-next/content/docs/api/core/meta.json b/docs-next/content/docs/api/core/meta.json new file mode 100644 index 0000000..9097189 --- /dev/null +++ b/docs-next/content/docs/api/core/meta.json @@ -0,0 +1,5 @@ +{ + "title": "Core", + "defaultOpen": true, + "pages": ["core", "builder", "errors"] +} diff --git a/docs-next/content/docs/api/execution/caching.mdx b/docs-next/content/docs/api/execution/caching.mdx new file mode 100644 index 0000000..39f9527 --- /dev/null +++ b/docs-next/content/docs/api/execution/caching.mdx @@ -0,0 +1,367 @@ +--- +title: "Caching" +description: "API reference for CachedDAGExecutor, ContentAddressableCache, CachePolicy, and cache backends." +--- + +# Caching + +The caching module provides content-addressable, Merkle-tree-based caching for +DAG execution. A node's cache key is derived from its task code, its +predecessors' output hashes, and its name — so if nothing upstream has changed, +the cached result is returned without re-execution. + +See the [Caching](/guide/execution-strategies/caching) guide for configuration patterns, +backend selection, and cache invalidation strategies. + +--- + +## CachedDAGExecutor + + + +An executor that wraps the standard [DAGExecutor](/api/execution/execution) with a +content-addressable cache layer. Before executing a node, the executor checks +the cache; if a valid entry exists, the cached result is returned and the node +is marked `CACHE_HIT`. + + + +### execute + + CachedExecutionResult`} /> + +Execute tasks with caching. Returns a [CachedExecutionResult](#cachedexecutionresult) +with cache statistics. + + + +```python +import dagron + +dag = ( + dagron.DAG.builder() + .add_node("fetch").add_node("process").add_node("save") + .add_edge("fetch", "process").add_edge("process", "save") + .build() +) + +cache = dagron.ContentAddressableCache( + backend=dagron.FileSystemCacheBackend("./cache_dir") +) + +executor = dagron.CachedDAGExecutor(dag, cache=cache) + +# First run — all nodes executed +result = executor.execute({ + "fetch": lambda: [1, 2, 3], + "process": lambda: [2, 4, 6], + "save": lambda: "done", +}) +print(result.cache_hits, result.cache_misses) # 0, 3 + +# Second run — all nodes cached +result = executor.execute({ + "fetch": lambda: [1, 2, 3], + "process": lambda: [2, 4, 6], + "save": lambda: "done", +}) +print(result.cache_hits, result.cache_misses) # 3, 0 +``` + +--- + +## CachedExecutionResult + + + +Extends the standard [ExecutionResult](/api/execution/execution) with cache +statistics. + +| Property | Type | Description | +|----------|------|-------------| +| `execution_result` | `ExecutionResult` | The underlying execution result with per-node details. | +| `cache_hits` | `int` | Number of nodes whose results were loaded from cache. | +| `cache_misses` | `int` | Number of nodes that were executed (cache miss). | +| `nodes_executed` | `int` | Number of nodes that were actually executed. | +| `nodes_cached` | `int` | Number of nodes that returned cached results. | + +```python +print(f"Hit rate: {result.cache_hits}/{result.cache_hits + result.cache_misses}") +print(f"Nodes executed: {result.nodes_executed}") +print(f"Nodes cached: {result.nodes_cached}") +``` + +--- + +## ContentAddressableCache + + + +A Merkle-tree cache that computes content-addressable keys from task code and +predecessor output hashes. This ensures that a cache entry is only valid when +the exact same computation with the exact same inputs was previously executed. + + + +### Methods + + str`} /> + +Compute the content-addressable cache key for a node. + + + + Any | None`} /> + +Retrieve a cached value by key. Returns `None` on cache miss. + + None`} /> + +Store a value in the cache. + + bool`} /> + +Check if a key exists in the cache. + + None`} /> + +Clear all entries from the cache. + + CacheStats`} /> + +Return current cache statistics. + +```python +cache = dagron.ContentAddressableCache( + backend=dagron.FileSystemCacheBackend("./my_cache") +) + +key = cache.compute_key("process", process_fn, {"fetch": "abc123"}) +cache.put(key, [2, 4, 6]) +print(cache.has(key)) # True +print(cache.get(key)) # [2, 4, 6] +print(cache.stats()) # CacheStats(...) +``` + +--- + +## CachePolicy + + + +A policy that controls cache eviction. Applied to a cache backend to limit +storage consumption. + + + +```python +policy = dagron.CachePolicy( + max_entries=1000, + max_size_bytes=500 * 1024 * 1024, # 500 MB + ttl_seconds=3600, # 1 hour +) +backend = dagron.FileSystemCacheBackend("./cache_dir", policy=policy) +``` + +--- + +## CacheStats + + + +Cache performance statistics. + +| Property | Type | Description | +|----------|------|-------------| +| `hits` | `int` | Total cache hits since creation or last clear. | +| `misses` | `int` | Total cache misses. | +| `evictions` | `int` | Total entries evicted by policy. | +| `total_entries` | `int` | Current number of entries in the cache. | +| `total_size_bytes` | `int` | Current total size of cached data in bytes. | + + float`} /> + +The cache hit rate as a float between 0.0 and 1.0. Returns 0.0 if no lookups +have been performed. + +```python +stats = cache.stats() +print(f"Hit rate: {stats.hit_rate:.1%}") +print(f"Entries: {stats.total_entries}") +print(f"Size: {stats.total_size_bytes / 1024 / 1024:.1f} MB") +print(f"Evictions: {stats.evictions}") +``` + +--- + +## FileSystemCacheBackend + + + +A cache backend that stores entries as files on the local filesystem. Each cache +key maps to a file in `cache_dir`. Supports optional eviction via a +[CachePolicy](#cachepolicy). + + + +### Methods + + Any | None`} /> + +Retrieve a cached value by key from disk. + + None`} /> + +Store a value on disk. + + bool`} /> + +Check if a key exists on disk. + + None`} /> + +Remove a single entry from disk. + + None`} /> + +Remove all cache files. + + CacheStats`} /> + +Return current backend statistics. + +```python +backend = dagron.FileSystemCacheBackend( + "./my_pipeline_cache", + policy=dagron.CachePolicy(max_entries=500, ttl_seconds=7200), +) + +backend.put("key123", {"data": [1, 2, 3]}) +print(backend.has("key123")) # True +print(backend.get("key123")) # {"data": [1, 2, 3]} +``` + +--- + +## CacheKeyBuilder + + + +A utility for manually constructing cache keys. Used internally by +`ContentAddressableCache` but available for advanced use cases. + + str`} /> + +Compute a hash of a callable's bytecode and closure. + + str`} /> + +Compute a hash of an arbitrary Python value. + + str`} /> + +Combine a node name, task hash, and predecessor hashes into a final cache key. + + + +```python +builder = dagron.CacheKeyBuilder() +task_hash = builder.hash_task(my_function) +value_hash = builder.hash_value([1, 2, 3]) +key = builder.build_key("process", task_hash, {"fetch": value_hash}) +``` + +--- + +## CacheKeyProtocol + + str: ...`} /> + +A protocol for objects that provide their own cache key. If a task's return +value implements this protocol, the cache uses its `__dagron_cache_key__()` +method instead of the default hashing strategy. + +```python +class MyModel: + def __init__(self, version, data): + self.version = version + self.data = data + + def __dagron_cache_key__(self) -> str: + return f"model-v{self.version}-{hash(tuple(self.data))}" + +# When MyModel is returned from a task, the cache uses __dagron_cache_key__ +``` + +--- + +## Related + +- [DAGExecutor](/api/execution/execution) — the base executor that caching wraps. +- [Incremental Execution](/api/execution/incremental) — dirty-set-based re-execution. +- [Checkpointing](/api/execution/checkpoint) — save progress to disk for resume. +- [Caching guide](/guide/execution-strategies/caching) — configuration and invalidation patterns. diff --git a/docs-next/content/docs/api/execution/checkpoint.mdx b/docs-next/content/docs/api/execution/checkpoint.mdx new file mode 100644 index 0000000..5838fbb --- /dev/null +++ b/docs-next/content/docs/api/execution/checkpoint.mdx @@ -0,0 +1,256 @@ +--- +title: "Checkpointing" +description: "API reference for CheckpointExecutor and CheckpointInfo — save progress and resume execution after failures." +--- + +# Checkpointing + +The checkpointing module allows you to persist execution progress to disk and +resume after failures. When a node completes, its result is saved to a +checkpoint directory. If execution is interrupted (crash, timeout, manual stop), +you can resume from where it left off without re-executing completed nodes. + +See the [Checkpointing](/guide/execution-strategies/checkpointing) guide for usage patterns +and failure recovery strategies. + +--- + +## CheckpointExecutor + + + +An executor that saves completed node results to a checkpoint directory. On +failure, call `.resume()` to pick up where execution left off. + + + +### execute + + ExecutionResult`} /> + +Execute all tasks, saving results to the checkpoint directory as each node +completes. If a previous checkpoint exists, it is cleared and a fresh execution +begins. + + + +**Returns:** [ExecutionResult](/api/execution/execution) + +```python +import dagron + +dag = ( + dagron.DAG.builder() + .add_node("download").add_node("parse").add_node("validate") + .add_node("transform").add_node("upload") + .add_edge("download", "parse") + .add_edge("parse", "validate") + .add_edge("validate", "transform") + .add_edge("transform", "upload") + .build() +) + +executor = dagron.CheckpointExecutor(dag, checkpoint_dir="./checkpoints") +result = executor.execute({ + "download": lambda: "raw_data", + "parse": lambda: "parsed", + "validate": lambda: "valid", + "transform": lambda: "transformed", + "upload": lambda: "uploaded", +}) + +print(result.succeeded) # 5 +``` + +### resume + + ExecutionResult`} /> + +Resume execution from the last checkpoint. Nodes that completed successfully in +a previous run are skipped (their saved results are loaded). Nodes that failed +or were never started are re-executed. + + + +**Returns:** [ExecutionResult](/api/execution/execution) + +```python +# Suppose download and parse completed, but validate failed. +# Fix the issue and resume: +executor = dagron.CheckpointExecutor(dag, checkpoint_dir="./checkpoints") +result = executor.resume({ + "download": lambda: "raw_data", + "parse": lambda: "parsed", + "validate": lambda: "valid", # fixed + "transform": lambda: "transformed", + "upload": lambda: "uploaded", +}) + +print(result.succeeded) # 5 +# download and parse were loaded from checkpoint +# validate, transform, upload were re-executed +``` + +### checkpoint_info + + CheckpointInfo | None`} /> + +Return information about the current checkpoint state, or `None` if no +checkpoint exists. + +**Returns:** [CheckpointInfo](#checkpointinfo) or `None` + +```python +info = executor.checkpoint_info() +if info is not None: + print(f"Completed: {len(info.completed_nodes)}/{info.total_nodes}") + print(f"Failed: {info.failed_nodes}") + print(f"Timestamp: {info.timestamp}") +else: + print("No checkpoint found.") +``` + +### clear_checkpoint + + None`} /> + +Delete all checkpoint files from the checkpoint directory. Use this after a +successful run to clean up, or to force a fresh execution on the next call. + +```python +executor.clear_checkpoint() +assert executor.checkpoint_info() is None +``` + +--- + +## CheckpointInfo + + + +Metadata about the current checkpoint state. + +| Property | Type | Description | +|----------|------|-------------| +| `checkpoint_dir` | `str` | The directory where checkpoint files are stored. | +| `completed_nodes` | `list[str]` | Names of nodes that completed successfully. | +| `failed_nodes` | `list[str]` | Names of nodes that failed. | +| `total_nodes` | `int` | Total number of nodes in the DAG. | +| `timestamp` | `str` | ISO-8601 timestamp of the last checkpoint write. | + +```python +info = executor.checkpoint_info() +print(f"Progress: {len(info.completed_nodes)}/{info.total_nodes}") +print(f"Completed: {info.completed_nodes}") +print(f"Failed: {info.failed_nodes}") +print(f"Last updated: {info.timestamp}") +``` + +--- + +## Complete Example + +A long-running data pipeline with checkpoint-and-resume: + +```python +import dagron + +dag = ( + dagron.DAG.builder() + .add_node("fetch_users") + .add_node("fetch_orders") + .add_node("join") + .add_node("enrich") + .add_node("validate") + .add_node("write_parquet") + .add_node("upload_s3") + .add_edge("fetch_users", "join") + .add_edge("fetch_orders", "join") + .add_edge("join", "enrich") + .add_edge("enrich", "validate") + .add_edge("validate", "write_parquet") + .add_edge("write_parquet", "upload_s3") + .build() +) + +tasks = { + "fetch_users": lambda: "1M users fetched", + "fetch_orders": lambda: "5M orders fetched", + "join": lambda: "joined dataset", + "enrich": lambda: "enriched with geo data", + "validate": lambda: "all checks passed", + "write_parquet": lambda: "wrote 2GB parquet", + "upload_s3": lambda: "uploaded to s3://bucket/output", +} + +executor = dagron.CheckpointExecutor(dag, checkpoint_dir="/tmp/pipeline_ckpt") + +# First attempt — may fail partway through +try: + result = executor.execute(tasks) +except Exception: + info = executor.checkpoint_info() + print(f"Interrupted: {len(info.completed_nodes)}/{info.total_nodes} complete") + +# Resume after fixing the issue +result = executor.resume(tasks) +print(f"All done: {result.succeeded} nodes succeeded") + +# Clean up +executor.clear_checkpoint() +``` + +--- + +## Checkpoint File Layout + +The checkpoint directory contains one file per completed node, plus a metadata +file: + +``` +./checkpoints/ + _meta.json # CheckpointInfo (completed/failed lists, timestamp) + fetch_users.pkl # Pickled result of fetch_users + fetch_orders.pkl # Pickled result of fetch_orders + join.pkl # Pickled result of join + ... +``` + + +Checkpoint files use Python's `pickle` module. Only resume from checkpoints you +trust. Do not load checkpoint files from untrusted sources. + +--- + +## Related + +- [DAGExecutor](/api/execution/execution) — the base executor without checkpointing. +- [Caching](/api/execution/caching) — content-addressable caching (complementary to checkpointing). +- [Incremental Execution](/api/execution/incremental) — re-execute only changed nodes. +- [Checkpointing guide](/guide/execution-strategies/checkpointing) — usage patterns and recovery strategies. diff --git a/docs-next/content/docs/api/execution/conditions.mdx b/docs-next/content/docs/api/execution/conditions.mdx new file mode 100644 index 0000000..3aa1f8f --- /dev/null +++ b/docs-next/content/docs/api/execution/conditions.mdx @@ -0,0 +1,278 @@ +--- +title: "Conditional Execution" +description: "API reference for ConditionalDAGBuilder, ConditionalEdge, and ConditionalExecutor — predicate-gated edges that skip branches at runtime." +--- + +# Conditional Execution + +The conditional execution module allows edges in your DAG to carry predicate +functions. At runtime, the executor evaluates each condition before traversing +the edge. If the condition returns `False`, the downstream node (and its +subtree) is skipped. This enables branching, feature flags, and data-dependent +routing without modifying the graph structure. + +See the [Conditional Execution](/guide/execution-strategies/conditional) guide for patterns +including if/else branches, switch-case routing, and dynamic feature flags. + +--- + +## ConditionalDAGBuilder + + + +A specialized builder that supports conditional edges. Similar to +[DAGBuilder](/api/core/builder) but with an additional `condition` parameter on +`add_edge()`. The builder produces both a DAG and a conditions dictionary that +the [ConditionalExecutor](#conditionalexecutor) uses at runtime. + +```python +import dagron + +builder = dagron.ConditionalDAGBuilder() +``` + +### add_node + + ConditionalDAGBuilder`} /> + +Add a node to the graph. Returns `self` for chaining. + + + +### add_edge + + ConditionalDAGBuilder`} /> + +Add a directed edge with an optional condition predicate. If `condition` is +provided, the edge is only traversed at runtime when `condition()` returns +`True`. If `condition` is `None`, the edge is unconditional (always traversed). + + + +### build + + tuple[DAG, dict[tuple[str, str], Callable[[], bool]]]`} /> + +Finalize and validate the graph. Returns a tuple of the +[DAG](/api/core/core) and a dictionary mapping `(from_node, to_node)` pairs to +their condition predicates. Pass both to the +[ConditionalExecutor](#conditionalexecutor). + +**Returns:** `tuple[DAG, dict]` — the validated DAG and the conditions map. + +**Raises:** +- `CycleError` — if the graph contains a cycle. +- `NodeNotFoundError` — if an edge references a non-existent node. + +```python +import dagron + +use_gpu = True # runtime flag + +dag, conditions = ( + dagron.ConditionalDAGBuilder() + .add_node("preprocess") + .add_node("cpu_train") + .add_node("gpu_train") + .add_node("evaluate") + .add_edge("preprocess", "cpu_train", condition=lambda: not use_gpu) + .add_edge("preprocess", "gpu_train", condition=lambda: use_gpu) + .add_edge("cpu_train", "evaluate") + .add_edge("gpu_train", "evaluate") + .build() +) + +print(dag.node_count()) # 4 +print(len(conditions)) # 2 (only conditional edges) +``` + +--- + +## ConditionalEdge + + + +A data class representing a conditional edge. Returned by +`ConditionalDAGBuilder` internals and useful for introspection. + +| Property | Type | Description | +|----------|------|-------------| +| `from_node` | `str` | The source node name. | +| `to_node` | `str` | The target node name. | +| `condition` | `Callable[[], bool]` | The predicate function. | +| `label` | `str | None` | Optional human-readable label. | + +--- + +## ConditionalExecutor + + + +An executor that evaluates edge conditions at runtime. Before dispatching a +node, the executor checks all incoming conditional edges. If any required +condition returns `False`, the node is skipped. A node is executed only when at +least one incoming conditional edge evaluates to `True` (or the node has at +least one unconditional incoming edge whose source completed). + + + +### execute + + ExecutionResult`} /> + +Execute tasks, evaluating conditions on each edge before dispatching. + + + +**Returns:** [ExecutionResult](/api/execution/execution) + +```python +import dagron + +use_gpu = True + +dag, conditions = ( + dagron.ConditionalDAGBuilder() + .add_node("preprocess") + .add_node("cpu_train") + .add_node("gpu_train") + .add_node("evaluate") + .add_edge("preprocess", "cpu_train", condition=lambda: not use_gpu) + .add_edge("preprocess", "gpu_train", condition=lambda: use_gpu) + .add_edge("cpu_train", "evaluate") + .add_edge("gpu_train", "evaluate") + .build() +) + +executor = dagron.ConditionalExecutor(dag, conditions) +result = executor.execute({ + "preprocess": lambda: "data ready", + "cpu_train": lambda: "trained on CPU", + "gpu_train": lambda: "trained on GPU", + "evaluate": lambda: "accuracy: 0.95", +}) + +print(result.succeeded) # 3 +print(result.skipped) # 1 (cpu_train skipped) +print(result.node_results["gpu_train"].status) # COMPLETED +print(result.node_results["cpu_train"].status) # SKIPPED +``` + +--- + +## Patterns + +### If/Else Branch + +```python +flag = True + +dag, conditions = ( + dagron.ConditionalDAGBuilder() + .add_node("check") + .add_node("branch_true") + .add_node("branch_false") + .add_node("merge") + .add_edge("check", "branch_true", condition=lambda: flag) + .add_edge("check", "branch_false", condition=lambda: not flag) + .add_edge("branch_true", "merge") + .add_edge("branch_false", "merge") + .build() +) +``` + +### Feature Flags + +```python +import os + +dag, conditions = ( + dagron.ConditionalDAGBuilder() + .add_node("fetch") + .add_node("cache_result") + .add_node("process") + .add_edge("fetch", "cache_result", + condition=lambda: os.getenv("ENABLE_CACHE") == "1") + .add_edge("fetch", "process") + .add_edge("cache_result", "process") + .build() +) +``` + +### Data-Dependent Routing + +Conditions can inspect shared state that is updated by upstream tasks: + +```python +shared = {} + +def classify(): + shared["category"] = "premium" + return shared["category"] + +dag, conditions = ( + dagron.ConditionalDAGBuilder() + .add_node("classify") + .add_node("premium_flow") + .add_node("standard_flow") + .add_node("finalize") + .add_edge("classify", "premium_flow", + condition=lambda: shared.get("category") == "premium") + .add_edge("classify", "standard_flow", + condition=lambda: shared.get("category") != "premium") + .add_edge("premium_flow", "finalize") + .add_edge("standard_flow", "finalize") + .build() +) +``` + +--- + +## Related + +- [DAGExecutor](/api/execution/execution) — the base executor without conditions. +- [DAGBuilder](/api/core/builder) — the standard builder without conditional edges. +- [Dynamic Execution](/api/execution/dynamic) — modify the graph at runtime instead of skipping edges. +- [Approval Gates](/api/execution/gates) — human-in-the-loop pause/resume. +- [Conditional Execution guide](/guide/execution-strategies/conditional) — patterns and best practices. diff --git a/docs-next/content/docs/api/execution/distributed.mdx b/docs-next/content/docs/api/execution/distributed.mdx new file mode 100644 index 0000000..c2db6bf --- /dev/null +++ b/docs-next/content/docs/api/execution/distributed.mdx @@ -0,0 +1,421 @@ +--- +title: "Distributed Execution" +description: "API reference for DistributedExecutor, DistributedBackend, and PartitionedDAGExecutor — run DAGs across threads, processes, Ray, and Celery." +--- + +# Distributed Execution + +The distributed execution module lets you run DAG tasks across multiple +backends: threads, processes, Ray clusters, or Celery workers. A pluggable +backend protocol makes it easy to integrate with any distributed computing +framework. + +For large DAGs, the `PartitionedDAGExecutor` splits the graph into partitions +and assigns each partition to a different worker group for improved data +locality and reduced communication overhead. + +See the [Distributed Execution](/guide/execution-strategies/distributed) guide for deployment +patterns and backend selection advice. + +--- + +## DistributedExecutor + + + +An executor that dispatches tasks to a pluggable distributed backend. +Supports the context manager protocol for automatic backend shutdown. + + + +### execute + + DistributedExecutionResult`} /> + +Execute tasks via the distributed backend. + + + +**Returns:** [DistributedExecutionResult](#distributedexecutionresult) + +### Context Manager + +The executor can be used as a context manager for automatic backend shutdown: + +```python +import dagron + +dag = ( + dagron.DAG.builder() + .add_node("fetch").add_node("process").add_node("store") + .add_edge("fetch", "process").add_edge("process", "store") + .build() +) + +with dagron.DistributedExecutor(dag, backend=dagron.ThreadBackend(max_workers=4)) as executor: + result = executor.execute({ + "fetch": lambda: "data", + "process": lambda: "processed", + "store": lambda: "stored", + }) + +print(result.succeeded) # 3 +# Backend is automatically shut down on exit +``` + +--- + +## DistributedExecutionResult + + + +The result of a distributed execution. Wraps the standard +[ExecutionResult](/api/execution/execution) with backend-specific metadata. + +| Property | Type | Description | +|----------|------|-------------| +| `execution_result` | `ExecutionResult` | The underlying execution result with per-node details. | +| `backend_name` | `str` | Name of the backend used (e.g., `"thread"`, `"ray"`, `"celery"`). | +| `dispatch_info` | `dict[str, Any]` | Backend-specific dispatch metadata (worker IDs, queue names, etc.). | + +```python +print(f"Backend: {result.backend_name}") +print(f"Succeeded: {result.execution_result.succeeded}") +print(f"Dispatch info: {result.dispatch_info}") +``` + +--- + +## DistributedBackend Protocol + + str: ... + + def submit( + self, + task: Callable, + node_name: str, + ) -> Any: ... + + def result( + self, + future: Any, + timeout: float | None = None, + ) -> Any: ... + + def shutdown(self) -> None: ...`} /> + +The protocol that all distributed backends must implement. You can create custom +backends by implementing these four members. + +| Method | Description | +|--------|-------------| +| `name` | A human-readable backend name. | +| `submit(task, node_name)` | Submit a task for execution. Returns a future-like object. | +| `result(future, timeout)` | Block until the future completes and return its result. Raises on timeout. | +| `shutdown()` | Shut down the backend and release all resources. | + +```python +class MyCustomBackend: + @property + def name(self) -> str: + return "custom" + + def submit(self, task, node_name): + # dispatch to your infrastructure + return my_cluster.submit(task) + + def result(self, future, timeout=None): + return future.get(timeout=timeout) + + def shutdown(self): + my_cluster.close() +``` + +--- + +## Built-in Backends + +### ThreadBackend + + + +A backend that dispatches tasks to a `concurrent.futures.ThreadPoolExecutor`. +Best for I/O-bound tasks. + + + +```python +backend = dagron.ThreadBackend(max_workers=8) +``` + +### MultiprocessingBackend + + + +A backend that dispatches tasks to a `concurrent.futures.ProcessPoolExecutor`. +Best for CPU-bound tasks. Tasks must be picklable. + + + +```python +backend = dagron.MultiprocessingBackend(max_workers=4) +``` + + +Tasks submitted to `MultiprocessingBackend` must be picklable. Lambdas and +closures will fail. Use module-level functions instead. + +### RayBackend + + + +A backend that dispatches tasks to a [Ray](https://ray.io) cluster. Requires +`ray` to be installed (`pip install dagron[ray]`). + + + +```python +backend = dagron.RayBackend( + address="ray://cluster:10001", + num_cpus=2, + num_gpus=1, +) + +with dagron.DistributedExecutor(dag, backend=backend) as executor: + result = executor.execute(tasks) +``` + +### CeleryBackend + + + +A backend that dispatches tasks to [Celery](https://docs.celeryq.dev/) workers. +Requires `celery` to be installed (`pip install dagron[celery]`). + + + +```python +backend = dagron.CeleryBackend( + broker="redis://localhost:6379/0", + backend_url="redis://localhost:6379/1", + queue="dagron_tasks", +) + +with dagron.DistributedExecutor(dag, backend=backend) as executor: + result = executor.execute(tasks) +``` + +--- + +## PartitionedDAGExecutor + + + +An executor that partitions the DAG into `k` groups and executes each partition +with a dedicated worker pool. This reduces inter-partition communication and +improves data locality for large DAGs. + + + +### execute + + ExecutionResult`} /> + +Partition the DAG and execute tasks. + + + +**Returns:** [ExecutionResult](/api/execution/execution) + +```python +import dagron + +# A large DAG with many nodes +dag = dagron.DAG.builder() +for i in range(100): + dag = dag.add_node(f"node_{i}") +for i in range(99): + dag = dag.add_edge(f"node_{i}", f"node_{i+1}") +dag = dag.build() + +tasks = {f"node_{i}": lambda i=i: f"result_{i}" for i in range(100)} + +executor = dagron.PartitionedDAGExecutor( + dag, + k=4, + strategy="balanced", + max_workers=8, +) + +result = executor.execute(tasks) +print(f"Succeeded: {result.succeeded}") # 100 +``` + +### Strategies + +| Strategy | Description | +|----------|-------------| +| `"level_based"` | Assign nodes to partitions based on their topological level. Simple and fast. | +| `"balanced"` | Balance node costs across partitions. Good general-purpose strategy. | +| `"communication_min"` | Minimize cross-partition edges using Kernighan-Lin refinement. Best for data-intensive pipelines. | + +These map to `DAG.partition_level_based()`, `DAG.partition_balanced()`, and +`DAG.partition_communication_min()` respectively. See [DAG partitioning](/api/core/core) +for the underlying algorithms. + +--- + +## Complete Example: Ray Cluster + +A complete distributed ML training pipeline running on Ray: + +```python +import dagron + +dag = ( + dagron.DAG.builder() + .add_node("load_data") + .add_node("preprocess") + .add_node("train_xgb") + .add_node("train_nn") + .add_node("ensemble") + .add_node("evaluate") + .add_edge("load_data", "preprocess") + .add_edge("preprocess", "train_xgb") + .add_edge("preprocess", "train_nn") + .add_edge("train_xgb", "ensemble") + .add_edge("train_nn", "ensemble") + .add_edge("ensemble", "evaluate") + .build() +) + +def load_data(): + return "loaded 1M rows" + +def preprocess(): + return "preprocessed features" + +def train_xgb(): + import time; time.sleep(5) + return {"model": "xgb", "auc": 0.92} + +def train_nn(): + import time; time.sleep(10) + return {"model": "nn", "auc": 0.94} + +def ensemble(): + return {"model": "ensemble", "auc": 0.96} + +def evaluate(): + return "evaluation report saved" + +tasks = { + "load_data": load_data, + "preprocess": preprocess, + "train_xgb": train_xgb, + "train_nn": train_nn, + "ensemble": ensemble, + "evaluate": evaluate, +} + +backend = dagron.RayBackend(num_cpus=2, num_gpus=1) + +with dagron.DistributedExecutor( + dag, + backend=backend, + enable_tracing=True, + node_timeout=300, + callbacks=dagron.ExecutionCallbacks( + on_start=lambda n: print(f"[{backend.name}] Starting {n}"), + on_complete=lambda n, r: print(f"[{backend.name}] Completed {n}"), + ), +) as executor: + result = executor.execute(tasks) + +print(f"\nBackend: {result.backend_name}") +print(f"Succeeded: {result.execution_result.succeeded}") +print(f"Duration: {result.execution_result.total_duration_seconds:.1f}s") +``` + +--- + +## Related + +- [DAGExecutor](/api/execution/execution) — the local thread-pool executor. +- [Resource Scheduling](/api/execution/resources) — GPU/CPU/memory-aware local scheduling. +- [DAG partitioning](/api/core/core) — the partitioning algorithms used by `PartitionedDAGExecutor`. +- [Distributed Execution guide](/guide/execution-strategies/distributed) — deployment and backend selection guide. diff --git a/docs-next/content/docs/api/execution/dynamic.mdx b/docs-next/content/docs/api/execution/dynamic.mdx new file mode 100644 index 0000000..888558d --- /dev/null +++ b/docs-next/content/docs/api/execution/dynamic.mdx @@ -0,0 +1,262 @@ +--- +title: "Dynamic Execution" +description: "API reference for DynamicExecutor, DynamicModification, and DynamicNodeSpec — expand the DAG at runtime based on node results." +--- + +# Dynamic Execution + +The dynamic execution module lets you modify the DAG at runtime. When a node +completes, an **expander function** can inspect its result and add or remove +nodes before execution continues. This supports fan-out patterns where the +number of downstream tasks depends on data discovered at runtime (e.g., one task +per file found in a directory, one task per API page to fetch). + +See the [Dynamic DAGs](/guide/execution-strategies/dynamic-dags) guide for usage patterns and +best practices. + +--- + +## DynamicExecutor + + + +An executor that supports runtime graph modifications. After each node +completes, the executor checks if an expander function is registered for that +node. If so, the expander is called with the node's return value and can return +a `DynamicModification` describing nodes and edges to add or remove. + + + +### execute + + ExecutionResult`} /> + +Execute tasks with dynamic expansion. The `tasks` dictionary should contain +tasks for all initially known nodes. Dynamically added nodes must include their +task callable in the [DynamicNodeSpec](#dynamicnodespec). + + + +**Returns:** [ExecutionResult](/api/execution/execution) — includes results for both +initial and dynamically added nodes. + +```python +import dagron + +dag = ( + dagron.DAG.builder() + .add_node("discover") + .add_node("aggregate") + .add_edge("discover", "aggregate") + .build() +) + +def discover_expander(result): + """Add one processing node per discovered file.""" + files = result # e.g., ["a.csv", "b.csv", "c.csv"] + nodes = [ + dagron.DynamicNodeSpec( + name=f"process_{f}", + task=lambda f=f: f"processed {f}", + dependencies=["discover"], + dependents=["aggregate"], + ) + for f in files + ] + return dagron.DynamicModification(add_nodes=nodes) + +executor = dagron.DynamicExecutor( + dag, + expanders={"discover": discover_expander}, + max_workers=4, +) + +result = executor.execute({ + "discover": lambda: ["a.csv", "b.csv", "c.csv"], + "aggregate": lambda: "all files processed", +}) + +print(result.succeeded) # 5 (discover + 3 process + aggregate) +``` + +--- + +## DynamicModification + + + +A description of graph modifications to apply after a node completes. Returned +by expander functions. + + + +```python +# Add nodes +mod = dagron.DynamicModification( + add_nodes=[ + dagron.DynamicNodeSpec("task_1", task=lambda: "result_1", dependencies=["source"]), + dagron.DynamicNodeSpec("task_2", task=lambda: "result_2", dependencies=["source"]), + ] +) + +# Remove nodes +mod = dagron.DynamicModification(remove_nodes=["obsolete_node"]) + +# Both +mod = dagron.DynamicModification( + add_nodes=[dagron.DynamicNodeSpec("replacement", task=lambda: "new", dependencies=["source"])], + remove_nodes=["old_task"], +) +``` + +--- + +## DynamicNodeSpec + + + +A specification for a node to be dynamically added during execution. Includes +the task callable and edge connections. + + + +The `dependencies` list creates edges `dep -> new_node` and the `dependents` +list creates edges `new_node -> dependent`. This wires the new node into the +existing graph topology. + +```python +spec = dagron.DynamicNodeSpec( + name="process_chunk_42", + task=lambda: "chunk 42 processed", + dependencies=["split"], # split -> process_chunk_42 + dependents=["merge"], # process_chunk_42 -> merge +) +``` + +--- + +## Complete Example: Map-Reduce + +A dynamic map-reduce pipeline where the mapper discovers the number of chunks at +runtime: + +```python +import dagron + +dag = ( + dagron.DAG.builder() + .add_node("read_input") + .add_node("reduce") + .add_edge("read_input", "reduce") + .build() +) + +def map_expander(data): + """Split input into chunks and create a mapper node per chunk.""" + chunks = [data[i:i+100] for i in range(0, len(data), 100)] + return dagron.DynamicModification( + add_nodes=[ + dagron.DynamicNodeSpec( + name=f"map_{i}", + task=lambda chunk=chunk: sum(chunk), + dependencies=["read_input"], + dependents=["reduce"], + ) + for i, chunk in enumerate(chunks) + ] + ) + +executor = dagron.DynamicExecutor( + dag, + expanders={"read_input": map_expander}, + max_workers=8, + callbacks=dagron.ExecutionCallbacks( + on_dynamic_expand=lambda name, new_nodes: print( + f"[EXPAND] {name} added {len(new_nodes)} nodes" + ), + ), +) + +result = executor.execute({ + "read_input": lambda: list(range(500)), + "reduce": lambda: "reduction complete", +}) + +print(f"Total nodes executed: {result.succeeded}") +# read_input + 5 mappers + reduce = 7 +``` + +--- + +## Callbacks for Dynamic Expansion + +The `on_dynamic_expand` callback in [ExecutionCallbacks](/api/execution/execution) is +called whenever new nodes are added: + +```python +callbacks = dagron.ExecutionCallbacks( + on_dynamic_expand=lambda node_name, new_nodes: print( + f"Node '{node_name}' expanded with: {new_nodes}" + ), +) +``` + +This is useful for logging, monitoring, and debugging dynamic DAGs in +production. + +--- + +## Constraints and Safety + +- **No cycles:** dynamically added edges must not create cycles. The executor + validates this and raises `CycleError` if a cycle would be introduced. +- **No duplicate names:** dynamically added nodes must have unique names. A + `DuplicateNodeError` is raised otherwise. +- **Topological consistency:** new nodes are inserted into the execution + schedule at the correct topological position. Nodes that have already been + dispatched are not re-executed. +- **Expander idempotency:** expanders should be idempotent. If execution is + retried (e.g., via checkpointing), expanders may run again. + +--- + +## Related + +- [DAGExecutor](/api/execution/execution) — the base executor without dynamic expansion. +- [Conditional Execution](/api/execution/conditions) — skip branches without modifying the graph. +- [Pipeline](/api/execution/pipeline) — a static decorator-based pipeline API. +- [Dynamic DAGs guide](/guide/execution-strategies/dynamic-dags) — patterns and best practices. diff --git a/docs-next/content/docs/api/execution/execution.mdx b/docs-next/content/docs/api/execution/execution.mdx new file mode 100644 index 0000000..7bdec2e --- /dev/null +++ b/docs-next/content/docs/api/execution/execution.mdx @@ -0,0 +1,345 @@ +--- +title: "Execution" +description: "API reference for DAGExecutor, AsyncDAGExecutor, ExecutionResult, NodeResult, NodeStatus, and ExecutionCallbacks." +--- + +# Execution + +The execution module provides thread-pool and async executors that walk the DAG +in topological order, dispatching tasks with maximum parallelism while respecting +dependency constraints. + +See the [Executing Tasks](/guide/core-concepts/executing-tasks) guide for patterns +including timeouts, cancellation, callbacks, and fail-fast behavior. + +--- + +## DAGExecutor + + + +A synchronous executor that runs DAG tasks on a thread pool. Tasks are +dispatched in topological order with maximum parallelism bounded by +`max_workers`. + + + +### execute + + ExecutionResult`} /> + +Execute all tasks according to the DAG topology. Each key in `tasks` must match +a node name in the DAG. Returns an [ExecutionResult](#executionresult) when all +tasks have completed, failed, or been skipped. + + + +```python +import dagron + +dag = ( + dagron.DAG.builder() + .add_node("a").add_node("b").add_node("c") + .add_edge("a", "b").add_edge("b", "c") + .build() +) + +result = dagron.DAGExecutor(dag, max_workers=4).execute({ + "a": lambda: "step-a", + "b": lambda: "step-b", + "c": lambda: "step-c", +}) + +print(result.succeeded) # 3 +``` + +--- + +## AsyncDAGExecutor + + + +An async executor that runs DAG tasks on an asyncio event loop. Same interface +as `DAGExecutor` but all tasks must be async callables (coroutines). + + + +### execute (async) + + ExecutionResult`} /> + +Execute all async tasks according to the DAG topology. + + + +```python +import asyncio +import dagron + +dag = ( + dagron.DAG.builder() + .add_node("fetch").add_node("process") + .add_edge("fetch", "process") + .build() +) + +async def main(): + result = await dagron.AsyncDAGExecutor(dag).execute({ + "fetch": lambda: asyncio.sleep(0.1), + "process": lambda: asyncio.sleep(0.05), + }) + print(result.succeeded) # 2 + +asyncio.run(main()) +``` + +--- + +## ExecutionResult + + + +The aggregate result of executing all tasks in a DAG. Returned by every +executor's `.execute()` method. + +| Property | Type | Description | +|----------|------|-------------| +| `node_results` | `dict[str, NodeResult]` | Per-node results keyed by node name. | +| `succeeded` | `int` | Number of nodes that completed successfully. | +| `failed` | `int` | Number of nodes that raised exceptions. | +| `skipped` | `int` | Number of nodes skipped due to upstream failures. | +| `timed_out` | `int` | Number of nodes that exceeded the timeout. | +| `cancelled` | `int` | Number of nodes cancelled by the cancel event. | +| `total_duration_seconds` | `float` | Wall-clock duration of the entire execution. | +| `trace` | `Trace | None` | Chrome-compatible trace data if `enable_tracing=True`. | + +```python +result = executor.execute(tasks) + +print(f"Succeeded: {result.succeeded}/{result.succeeded + result.failed}") +print(f"Duration: {result.total_duration_seconds:.3f}s") + +for name, nr in result.node_results.items(): + print(f" {name}: {nr.status} ({nr.duration_seconds:.3f}s)") +``` + +--- + +## NodeResult + + + +The result of executing a single node. + +| Property | Type | Description | +|----------|------|-------------| +| `name` | `str` | The node name. | +| `status` | `NodeStatus` | The terminal status of this node. | +| `result` | `Any` | The return value of the task callable, or `None` if it did not complete. | +| `error` | `Exception | None` | The exception raised by the task, or `None` on success. | +| `duration_seconds` | `float` | Wall-clock duration of this node's execution. | + +```python +nr = result.node_results["transform"] +if nr.status == dagron.NodeStatus.COMPLETED: + print(f"Transform returned: {nr.result}") +elif nr.status == dagron.NodeStatus.FAILED: + print(f"Transform failed: {nr.error}") +``` + +--- + +## NodeStatus + + + +Enumeration of possible node execution states. + +| Value | Description | +|-------|-------------| +| `PENDING` | Node has not yet started. | +| `RUNNING` | Node is currently executing. | +| `COMPLETED` | Node finished successfully. | +| `FAILED` | Node raised an exception. | +| `SKIPPED` | Node was skipped because an upstream dependency failed (fail-fast mode). | +| `TIMED_OUT` | Node exceeded the execution timeout. | +| `CANCELLED` | Node was cancelled by the cancel event. | +| `CACHE_HIT` | Node result was loaded from cache instead of executing. | + +--- + +## ExecutionCallbacks + + + +Lifecycle callbacks invoked during execution. All callbacks are optional. Each +receives the node name as the first argument. + + + +```python +import dagron + +callbacks = dagron.ExecutionCallbacks( + on_start=lambda name: print(f"[START] {name}"), + on_complete=lambda name, result: print(f"[DONE] {name} -> {result}"), + on_failure=lambda name, err: print(f"[FAIL] {name}: {err}"), + on_skip=lambda name: print(f"[SKIP] {name}"), +) + +executor = dagron.DAGExecutor(dag, callbacks=callbacks) +result = executor.execute(tasks) +``` + +--- + +## Timeouts and Cancellation + +### Global timeout + +Pass a `timeout` to `.execute()` to set a wall-clock limit on the entire +execution: + +```python +result = executor.execute(tasks, timeout=30.0) +print(result.timed_out) # number of nodes that exceeded the timeout +``` + +### External cancellation + +Use a `threading.Event` (or `asyncio.Event` for async) to cancel execution from +another thread: + +```python +import threading + +cancel = threading.Event() + +def watchdog(): + import time + time.sleep(10) + cancel.set() + +threading.Thread(target=watchdog, daemon=True).start() +result = executor.execute(tasks, cancel_event=cancel) +print(result.cancelled) # number of nodes cancelled +``` + +--- + +## Tracing + +Enable execution tracing to produce Chrome-compatible trace data: + +```python +executor = dagron.DAGExecutor(dag, enable_tracing=True) +result = executor.execute(tasks) + +# Write trace to file for chrome://tracing +with open("trace.json", "w") as f: + f.write(result.trace.to_json()) +``` + +See the [Tracing & Profiling](/guide/observability/tracing-profiling) guide for +visualization instructions. + +--- + +## Related + +- [DAG](/api/core/core) — the graph driving execution order. +- [Pipeline](/api/execution/pipeline) — a higher-level decorator-based execution API. +- [Incremental Execution](/api/execution/incremental) — re-execute only changed nodes. +- [Caching](/api/execution/caching) — skip nodes whose inputs have not changed. +- [Resource Scheduling](/api/execution/resources) — GPU/CPU/memory-aware execution. diff --git a/docs-next/content/docs/api/execution/gates.mdx b/docs-next/content/docs/api/execution/gates.mdx new file mode 100644 index 0000000..bdd12ff --- /dev/null +++ b/docs-next/content/docs/api/execution/gates.mdx @@ -0,0 +1,348 @@ +--- +title: "Approval Gates" +description: "API reference for ApprovalGate, GateController, GateStatus, and gate-related errors — human-in-the-loop pause and resume." +--- + +# Approval Gates + +Approval gates pause DAG execution at specific nodes until a human (or external +system) explicitly approves or rejects the continuation. This enables +human-in-the-loop workflows such as deployment approvals, data quality +sign-offs, and compliance checks. + +Gates integrate with any executor via [ExecutionCallbacks](/api/execution/execution) +and provide both synchronous and asynchronous waiting interfaces. + +See the [Approval Gates](/guide/execution-strategies/approval-gates) guide for end-to-end +workflow patterns. + +--- + +## ApprovalGate + + + +A single approval gate that can be attached to a node. The gate starts in +`PENDING` state, transitions to `WAITING` when the executor reaches it, and +resolves to `APPROVED`, `REJECTED`, or `TIMED_OUT`. + + + +### approve + + None`} /> + +Approve the gate, allowing execution to proceed past this point. + +### reject + + None`} /> + +Reject the gate, causing the gated node to fail with a `GateRejectedError`. + + + +### wait_sync + + None`} /> + +Block the current thread until the gate is resolved (approved, rejected, or +timed out). Raises `GateRejectedError` on rejection and `GateTimeoutError` on +timeout. + +### wait_async + + None`} /> + +Await gate resolution in an async context. Raises `GateRejectedError` on +rejection and `GateTimeoutError` on timeout. + +### reset + + None`} /> + +Reset the gate to `PENDING` state for reuse. + +### status + + GateStatus`} /> + +The current gate status. + +### reason + + str | None`} /> + +The rejection reason, or `None` if the gate was not rejected. + +```python +import dagron +import threading + +gate = dagron.ApprovalGate(timeout=60.0) + +# In another thread or process: +def approval_ui(): + input("Press Enter to approve deployment...") + gate.approve() + +threading.Thread(target=approval_ui, daemon=True).start() + +# In the task: +gate.wait_sync() # blocks until approved +print(f"Gate status: {gate.status}") # GateStatus.APPROVED +``` + +--- + +## GateController + + + +A centralized controller for managing multiple gates. Provides a single +interface for approving, rejecting, and querying the status of all gates in a +pipeline. + + + +### add_gate + + ApprovalGate`} /> + +Add a gate to the controller. If `gate` is `None`, a new default `ApprovalGate` +is created. Returns the gate instance. + + + +### approve + + None`} /> + +Approve a named gate. + +### reject + + None`} /> + +Reject a named gate. + +### status + + GateStatus`} /> + +Return the status of a named gate. + +### waiting_gates + + list[str]`} /> + +Return the names of all gates currently in `WAITING` status. + +### get_gate + + ApprovalGate`} /> + +Return the `ApprovalGate` instance for a named gate. + +### has_gate + + bool`} /> + +Return `True` if a gate with the given name exists. + +### wait_sync + + None`} /> + +Block until a named gate is resolved. + +### wait_async + + None`} /> + +Await resolution of a named gate. + +### reset_all + + None`} /> + +Reset all gates to `PENDING` status. + +```python +import dagron + +controller = dagron.GateController() +controller.add_gate("qa_review", dagron.ApprovalGate(timeout=300)) +controller.add_gate("deploy_prod", dagron.ApprovalGate(timeout=600)) + +# Check what's waiting +print(controller.waiting_gates()) # [] + +# Later, in a webhook handler: +controller.approve("qa_review") +print(controller.status("qa_review")) # GateStatus.APPROVED + +controller.reject("deploy_prod", reason="Failed canary check") +print(controller.status("deploy_prod")) # GateStatus.REJECTED +``` + +--- + +## GateStatus + + + +Enumeration of gate states. + +| Value | Description | +|-------|-------------| +| `PENDING` | Gate has been created but execution has not reached it yet. | +| `WAITING` | Execution has reached the gate and is waiting for approval. | +| `APPROVED` | Gate was approved; execution proceeds. | +| `REJECTED` | Gate was rejected; the gated node fails. | +| `TIMED_OUT` | Gate was not resolved before its timeout expired. | + +--- + +## GateRejectedError + + + +Raised when a gate is rejected. The gated node's task will receive this as its +exception, and it will appear in the node's `NodeResult.error`. + + + +```python +try: + gate.wait_sync() +except dagron.GateRejectedError as e: + print(f"Gate '{e.gate_name}' rejected: {e.reason}") +``` + +--- + +## GateTimeoutError + + + +Raised when a gate times out before being approved or rejected. + + + +```python +try: + gate.wait_sync() +except dagron.GateTimeoutError as e: + print(f"Gate '{e.gate_name}' timed out after {e.timeout}s") +``` + +--- + +## Complete Example: Deployment Pipeline + +A deployment pipeline with QA approval and production deployment gates: + +```python +import dagron +import threading + +# Build the DAG +dag = ( + dagron.DAG.builder() + .add_node("build") + .add_node("test") + .add_node("qa_gate") + .add_node("deploy_staging") + .add_node("prod_gate") + .add_node("deploy_prod") + .add_edge("build", "test") + .add_edge("test", "qa_gate") + .add_edge("qa_gate", "deploy_staging") + .add_edge("deploy_staging", "prod_gate") + .add_edge("prod_gate", "deploy_prod") + .build() +) + +# Set up gates +controller = dagron.GateController() +qa_gate = controller.add_gate("qa_gate", dagron.ApprovalGate(timeout=3600)) +prod_gate = controller.add_gate("prod_gate", dagron.ApprovalGate(timeout=7200)) + +# Define tasks +tasks = { + "build": lambda: "artifact-v1.2.3", + "test": lambda: "42 tests passed", + "qa_gate": lambda: qa_gate.wait_sync(), + "deploy_staging": lambda: "deployed to staging", + "prod_gate": lambda: prod_gate.wait_sync(), + "deploy_prod": lambda: "deployed to production", +} + +# Simulate external approval (in production, this would be a web UI or API) +def simulate_approvals(): + import time + time.sleep(2) + print("QA approved!") + controller.approve("qa_gate") + time.sleep(2) + print("Prod approved!") + controller.approve("prod_gate") + +threading.Thread(target=simulate_approvals, daemon=True).start() + +# Execute with gate callbacks +result = dagron.DAGExecutor( + dag, + callbacks=dagron.ExecutionCallbacks( + on_gate_waiting=lambda name: print(f"Waiting for gate: {name}"), + on_gate_resolved=lambda name, status: print(f"Gate {name}: {status}"), + ), +).execute(tasks) + +print(f"\nPipeline: {result.succeeded} succeeded, {result.failed} failed") +``` + +--- + +## Related + +- [DAGExecutor](/api/execution/execution) — the executor that integrates with gates via callbacks. +- [Conditional Execution](/api/execution/conditions) — automated branching (no human involved). +- [Checkpointing](/api/execution/checkpoint) — save and resume after gate rejection. +- [Approval Gates guide](/guide/execution-strategies/approval-gates) — end-to-end workflow patterns. diff --git a/docs-next/content/docs/api/execution/incremental.mdx b/docs-next/content/docs/api/execution/incremental.mdx new file mode 100644 index 0000000..2652152 --- /dev/null +++ b/docs-next/content/docs/api/execution/incremental.mdx @@ -0,0 +1,256 @@ +--- +title: "Incremental Execution" +description: "API reference for IncrementalExecutor and IncrementalResult — re-execute only what changed." +--- + +# Incremental Execution + +The incremental execution module provides an executor that re-runs only the +nodes affected by a set of changes. Unchanged nodes are reused from the previous +run, dramatically reducing execution time for large DAGs where only a few inputs +have changed. + +dagron computes the "dirty set" from the changed nodes and their downstream +descendants, then applies **early cutoff** optimization: if a recomputed node +produces the same result as its cached value, its descendants are not recomputed +even if they were in the initial dirty set. + +See the [Incremental Execution](/guide/execution-strategies/incremental) guide for workflow +patterns and best practices. + +--- + +## IncrementalExecutor + + + +An executor that tracks previous results and only re-executes nodes in the dirty +set. On the first invocation, all nodes are executed. On subsequent invocations +with `changed_nodes`, only the affected subset is re-executed. + + + +### execute + + IncrementalResult`} /> + +Execute tasks incrementally. On the first call (or when `changed_nodes` is +`None`), all nodes are executed. On subsequent calls, only nodes downstream of +the changed nodes are re-executed. The executor applies early cutoff: if a +recomputed node produces the same output as the cached value, its descendants +are skipped. + + + +**Returns:** [IncrementalResult](#incrementalresult) + +```python +import dagron + +dag = ( + dagron.DAG.builder() + .add_node("source_a") + .add_node("source_b") + .add_node("transform") + .add_node("aggregate") + .add_node("report") + .add_edge("source_a", "transform") + .add_edge("source_b", "transform") + .add_edge("transform", "aggregate") + .add_edge("aggregate", "report") + .build() +) + +tasks = { + "source_a": lambda: [1, 2, 3], + "source_b": lambda: [4, 5, 6], + "transform": lambda: "transformed", + "aggregate": lambda: "aggregated", + "report": lambda: "report ready", +} + +executor = dagron.IncrementalExecutor(dag) + +# First run: execute everything +result = executor.execute(tasks) +print(result.recomputed) # 5 (all nodes) +print(result.reused) # 0 + +# Second run: only source_a changed +result = executor.execute(tasks, changed_nodes=["source_a"]) +print(result.recomputed) # 4 (source_a, transform, aggregate, report) +print(result.reused) # 1 (source_b) +``` + +--- + +## IncrementalResult + + + +The result of an incremental execution. Extends the standard execution result +with incremental-specific metrics. + +| Property | Type | Description | +|----------|------|-------------| +| `node_results` | `dict[str, NodeResult]` | Per-node results keyed by node name. Includes both recomputed and reused nodes. | +| `recomputed` | `int` | Number of nodes that were actually re-executed. | +| `early_cutoff` | `int` | Number of nodes in the dirty set that were skipped because an upstream recomputation produced the same result. | +| `reused` | `int` | Number of nodes that were reused from the previous execution without recomputation. | +| `provenance` | `dict[str, list[str]]` | For each recomputed node, the list of changed root nodes responsible for its invalidation. | +| `total_duration_seconds` | `float` | Wall-clock duration of this incremental execution. | +| `trace` | `Trace | None` | Chrome-compatible trace data if `enable_tracing=True`. | + +```python +result = executor.execute(tasks, changed_nodes=["source_a"]) + +print(f"Recomputed: {result.recomputed}") +print(f"Early cutoff: {result.early_cutoff}") +print(f"Reused: {result.reused}") +print(f"Duration: {result.total_duration_seconds:.3f}s") + +# Why was each node recomputed? +for node, causes in result.provenance.items(): + print(f" {node} invalidated by: {causes}") +``` + +--- + +## How Dirty Set Computation Works + +The dirty set is the set of nodes that must be re-evaluated. It is computed as +follows: + +1. Start with the explicitly `changed_nodes`. +2. Add all downstream descendants (transitive successors) of each changed node. +3. The union of these sets is the **dirty set**. + +You can preview the dirty set without executing via `DAG.dirty_set()`: + +```python +dirty = dag.dirty_set(["source_a"]) +print(dirty) # ["source_a", "transform", "aggregate", "report"] +``` + +And the provenance (which change caused which recomputation) via +`DAG.change_provenance()`: + +```python +prov = dag.change_provenance(["source_a"]) +print(prov) +# { +# "source_a": ["source_a"], +# "transform": ["source_a"], +# "aggregate": ["source_a"], +# "report": ["source_a"], +# } +``` + +--- + +## Early Cutoff + +Early cutoff is an optimization that stops propagation when a recomputed node +produces the same result as its cached value. The comparison uses Python's `==` +operator. + +For example, if `transform` is in the dirty set but produces the same output as +the previous run, then `aggregate` and `report` are cut off and reused from +cache, even though they were in the dirty set. + +```python +counter = {"calls": 0} + +def transform(): + counter["calls"] += 1 + return "always_same" # same output regardless of input + +# First run +result1 = executor.execute(tasks) + +# Second run: source_a changed, but transform produces same output +result2 = executor.execute(tasks, changed_nodes=["source_a"]) +print(result2.early_cutoff) # 2 (aggregate and report were cut off) +print(result2.recomputed) # 2 (source_a and transform were recomputed) +print(result2.reused) # 1 (source_b) +``` + +--- + +## Complete Example + +A build system that recompiles only changed source files: + +```python +import dagron + +dag = ( + dagron.DAG.builder() + .add_node("parse_module_a") + .add_node("parse_module_b") + .add_node("type_check") + .add_node("optimize") + .add_node("codegen") + .add_node("link") + .add_edge("parse_module_a", "type_check") + .add_edge("parse_module_b", "type_check") + .add_edge("type_check", "optimize") + .add_edge("optimize", "codegen") + .add_edge("codegen", "link") + .build() +) + +tasks = { + "parse_module_a": lambda: "ast_a", + "parse_module_b": lambda: "ast_b", + "type_check": lambda: "typed_ast", + "optimize": lambda: "optimized_ir", + "codegen": lambda: "machine_code", + "link": lambda: "executable", +} + +executor = dagron.IncrementalExecutor(dag, enable_tracing=True) + +# Initial full build +result = executor.execute(tasks) +print(f"Full build: {result.recomputed} nodes in {result.total_duration_seconds:.3f}s") + +# Developer edits module_a.py — incremental rebuild +result = executor.execute(tasks, changed_nodes=["parse_module_a"]) +print(f"Incremental: {result.recomputed} recomputed, {result.reused} reused") +print(f"Speed improvement: {result.reused}/{result.recomputed + result.reused} nodes skipped") +``` + +--- + +## Related + +- [DAG.dirty_set](/api/core/core) — preview the dirty set without executing. +- [DAG.change_provenance](/api/core/core) — understand why each node needs recomputation. +- [Caching](/api/execution/caching) — content-addressable caching for persistent result storage. +- [Checkpointing](/api/execution/checkpoint) — save and resume execution state. +- [Executing Tasks](/guide/core-concepts/executing-tasks) — general execution guide. diff --git a/docs-next/content/docs/api/execution/meta.json b/docs-next/content/docs/api/execution/meta.json new file mode 100644 index 0000000..7f38254 --- /dev/null +++ b/docs-next/content/docs/api/execution/meta.json @@ -0,0 +1,17 @@ +{ + "title": "Execution", + "defaultOpen": true, + "pages": [ + "execution", + "pipeline", + "incremental", + "caching", + "checkpoint", + "conditions", + "dynamic", + "gates", + "resources", + "distributed", + "reactive" + ] +} diff --git a/docs-next/content/docs/api/execution/pipeline.mdx b/docs-next/content/docs/api/execution/pipeline.mdx new file mode 100644 index 0000000..13aa7da --- /dev/null +++ b/docs-next/content/docs/api/execution/pipeline.mdx @@ -0,0 +1,330 @@ +--- +title: "Pipeline" +description: "API reference for the @task decorator and Pipeline class — a high-level API for building and executing DAGs from decorated functions." +--- + +# Pipeline + +The Pipeline API provides a high-level, decorator-based approach to building and +executing DAGs. Instead of manually creating nodes and edges, you decorate +functions with `@task` and let dagron infer the graph structure from function +parameter names. + +See the [Executing Tasks](/guide/core-concepts/executing-tasks) guide for usage patterns +and the [Contracts](/guide/advanced/contracts) guide for type validation with +pipelines. + +--- + +## @task + + + +A decorator that marks a function as a pipeline task. The DAG is inferred from +function parameter names: each parameter name corresponds to the name of an +upstream task whose return value is passed as the argument. + +When used without arguments, the task name is the function name. When used with +arguments, you can customize the name, timeout, and retry behavior. + + + +```python +from dagron import task + +@task +def extract(): + """Root task — no parameters, so no dependencies.""" + return [1, 2, 3, 4, 5] + +@task +def transform(extract): + """Depends on 'extract'. Receives extract's return value.""" + return [x * 10 for x in extract] + +@task +def load(transform): + """Depends on 'transform'. Receives transform's return value.""" + return f"loaded {len(transform)} rows" +``` + +### Dependency inference + +The `@task` decorator inspects each parameter name and wires it as a dependency: + +| Parameter name | Matched to | +|---------------|------------| +| `extract` | The task named `"extract"` | +| `transform` | The task named `"transform"` | +| Any other name | The task with that name | + +If a parameter name does not match any task in the pipeline, an error is raised +at pipeline construction time. + +### Fan-in pattern + +Tasks with multiple parameters depend on multiple upstream tasks: + +```python +@task +def merge(api_data, db_data): + """Depends on both 'api_data' and 'db_data' tasks.""" + return {**api_data, **db_data} +``` + +--- + +## Pipeline + + + +A high-level container that builds a DAG from a list of `@task`-decorated +functions and provides execution methods. + + + +```python +from dagron import Pipeline, task + +@task +def fetch(): + return {"users": [1, 2, 3]} + +@task +def enrich(fetch): + return {**fetch, "enriched": True} + +@task +def store(enrich): + return f"stored {len(enrich)} keys" + +pipeline = Pipeline(tasks=[fetch, enrich, store], name="user-pipeline") +``` + +--- + +### Properties + + DAG`} /> + +Access the underlying [DAG](/api/core/core) instance. Useful for inspection, +visualization, or passing to lower-level executors. + +```python +print(pipeline.dag.node_count()) # 3 +print(pipeline.dag.topological_sort()) # [fetch, enrich, store] +print(pipeline.dag.to_mermaid()) # Mermaid diagram string +``` + + list[str]`} /> + +Return the names of all tasks in the pipeline, in topological order. + +```python +print(pipeline.task_names) # ["fetch", "enrich", "store"] +``` + +--- + +### execute + + ExecutionResult`} /> + +Execute the pipeline synchronously. Task return values are automatically passed +as arguments to downstream tasks based on parameter names. + + + +**Returns:** [ExecutionResult](/api/execution/execution) — the aggregate execution +result. + +```python +result = pipeline.execute(max_workers=4) + +print(result.succeeded) # 3 +print(result.node_results["store"].result) # "stored 3 keys" +print(f"Took {result.total_duration_seconds:.3f}s") +``` + +#### Overriding tasks + +Use `overrides` to replace tasks at execution time without modifying the +pipeline definition. This is especially useful for testing: + +```python +result = pipeline.execute(overrides={ + "fetch": lambda: {"users": [99]}, # mock data +}) +print(result.node_results["store"].result) # "stored 2 keys" +``` + +--- + +### execute_async + + ExecutionResult`} /> + +Execute the pipeline asynchronously. All tasks should be async callables. + + + +```python +import asyncio +from dagron import Pipeline, task + +@task +async def fetch(): + await asyncio.sleep(0.1) + return [1, 2, 3] + +@task +async def process(fetch): + return [x * 2 for x in fetch] + +pipeline = Pipeline(tasks=[fetch, process]) +result = asyncio.run(pipeline.execute_async()) +print(result.node_results["process"].result) # [2, 4, 6] +``` + +--- + +### validate_contracts + + list[ContractViolation]`} /> + +Validate type contracts across all pipeline edges. If tasks have type +annotations, those are used as implicit contracts. You can supply additional +explicit contracts via `extra_contracts`. + + + +**Returns:** `list[ContractViolation]` — empty if all contracts are consistent. + +```python +from dagron import Pipeline, task + +@task +def fetch() -> list: + return [1, 2, 3] + +@task +def process(fetch: list) -> dict: + return {"data": fetch} + +pipeline = Pipeline(tasks=[fetch, process]) +violations = pipeline.validate_contracts() +assert len(violations) == 0 +``` + +--- + +## Complete Example + +A realistic data pipeline with fan-out, fan-in, callbacks, and tracing: + +```python +from dagron import Pipeline, task, ExecutionCallbacks + +@task +def api_source(): + return {"source": "api", "rows": 100} + +@task +def db_source(): + return {"source": "db", "rows": 250} + +@task +def clean_api(api_source): + return {**api_source, "cleaned": True} + +@task +def clean_db(db_source): + return {**db_source, "cleaned": True} + +@task +def merge(clean_api, clean_db): + total = clean_api["rows"] + clean_db["rows"] + return {"total_rows": total, "sources": 2} + +@task +def publish(merge): + return f"Published {merge['total_rows']} rows from {merge['sources']} sources" + +pipeline = Pipeline( + tasks=[api_source, db_source, clean_api, clean_db, merge, publish], + name="etl-pipeline", +) + +# Inspect the generated DAG +print(pipeline.dag.to_mermaid()) +print(f"Parallelism levels: {pipeline.dag.topological_levels()}") + +# Execute with callbacks and tracing +result = pipeline.execute( + max_workers=4, + callbacks=ExecutionCallbacks( + on_start=lambda n: print(f" Starting {n}..."), + on_complete=lambda n, r: print(f" Finished {n}"), + ), + enable_tracing=True, +) + +print(f"\n{result.node_results['publish'].result}") +# Published 350 rows from 2 sources +``` + +--- + +## Related + +- [DAGExecutor](/api/execution/execution) — the lower-level thread-pool executor used internally. +- [DAGBuilder](/api/core/builder) — the builder that the Pipeline constructs behind the scenes. +- [Contracts](/guide/advanced/contracts) — type contract validation guide. +- [Tracing & Profiling](/guide/observability/tracing-profiling) — visualizing pipeline traces. diff --git a/docs-next/content/docs/api/execution/reactive.mdx b/docs-next/content/docs/api/execution/reactive.mdx new file mode 100644 index 0000000..8f1513e --- /dev/null +++ b/docs-next/content/docs/api/execution/reactive.mdx @@ -0,0 +1,376 @@ +--- +title: Reactive DAG +description: API reference for dagron's reactive DAG execution -- push-based incremental recomputation with subscriptions and early cutoff. +--- + +# Reactive DAG + +The reactive module extends dagron's execution model into a push-based +reactive system. When you set an input value, the `ReactiveDAG` +automatically cascades recomputation through the graph, only recomputing +nodes whose inputs have actually changed (early cutoff). Subscriber +callbacks are fired whenever a node's value changes, enabling live +dashboards, incremental pipelines, and interactive data exploration. + +```python +from dagron.execution.reactive import ReactiveDAG +``` + +--- + +## ReactiveDAG + + None: ...`} /> + +Push-based reactive DAG execution system. Setting an input value +automatically cascades recomputation through the graph and fires subscriber +callbacks when outputs change. + +Each task function receives keyword arguments named after its predecessor +nodes, with the current values of those predecessors. For example, a node +`"transform"` with predecessor `"extract"` receives +`transform(extract=)`. + + + +```python +import dagron +from dagron.execution.reactive import ReactiveDAG + +dag = ( + dagron.DAG.builder() + .add_edge("raw", "cleaned") + .add_edge("cleaned", "features") + .add_edge("features", "prediction") + .build() +) + +tasks = { + "raw": lambda: None, # Input node -- value set externally + "cleaned": lambda raw=None: [x.strip() for x in raw] if raw else [], + "features": lambda cleaned=None: len(cleaned) if cleaned else 0, + "prediction": lambda features=None: features > 5 if features else False, +} + +reactive = ReactiveDAG(dag, tasks) +``` + +### Properties + +--- + +#### ReactiveDAG.dag + + DAG`} /> + +The underlying DAG. + +**Returns:** `DAG` -- The DAG defining the dependency structure. + +--- + +#### ReactiveDAG.values + + dict[str, Any]`} /> + +Current values of all computed nodes. Returns a read-only copy. + +**Returns:** `dict[str, Any]` -- Mapping of node names to their current values. + +```python +all_values = reactive.values +for name, value in all_values.items(): + print(f"{name}: {value}") +``` + +### Methods + +--- + +#### ReactiveDAG.initialize + + dict[str, Any]`} /> + +Compute all nodes in topological order. This performs the initial full +computation. After this, use `set_input()` for incremental updates. + +Any values pre-set via `set_input()` before initialization are preserved +and used during the computation. + +**Returns:** `dict[str, Any]` -- Dictionary of all computed values. + +```python +reactive = ReactiveDAG(dag, tasks) +values = reactive.initialize() +print(f"Initial prediction: {values.get('prediction')}") +``` + +--- + +#### ReactiveDAG.set_input + + dict[str, Any]`} /> + +Set an input value and cascade recomputation through the graph. Only +nodes that are transitively downstream of the changed input are considered +for recomputation, and the early cutoff optimization skips nodes whose +computed value has not actually changed. + +If `initialize()` has not been called yet, this method stores the value +and then calls `initialize()` automatically. + + + +**Returns:** `dict[str, Any]` -- Dictionary of all nodes that were recomputed, mapping node name to new value. Includes the input node itself. + +```python +# Set a new input and see what changed +changed = reactive.set_input("raw", [" Alice ", " Bob ", " Charlie "]) +print(f"Changed nodes: {list(changed.keys())}") +# Changed nodes: ['raw', 'cleaned', 'features', 'prediction'] + +# Set the same value again -- early cutoff prevents recomputation +changed = reactive.set_input("raw", [" Alice ", " Bob ", " Charlie "]) +print(f"Changed nodes: {list(changed.keys())}") +# Changed nodes: {} (nothing changed) +``` + +--- + +#### ReactiveDAG.set_inputs + + dict[str, Any]`} /> + +Set multiple input values and cascade recomputation. More efficient than +calling `set_input()` multiple times because it computes the combined dirty +set and processes all changes in a single topological pass. + + + +**Returns:** `dict[str, Any]` -- Dictionary of all nodes that were recomputed. + +```python +changed = reactive.set_inputs({ + "raw": [" Alice ", " Bob "], +}) +``` + +--- + +#### ReactiveDAG.subscribe + + Callable[[], None]`} /> + +Subscribe to changes on a specific node. The callback is called with +`(node_name, new_value)` whenever the node's value changes during +`set_input()` or `initialize()`. + + + +**Returns:** `Callable[[], None]` -- An unsubscribe function. Call it to remove the subscription. + +```python +def on_prediction_change(name: str, value): + print(f"Prediction updated: {value}") + +unsubscribe = reactive.subscribe("prediction", on_prediction_change) + +reactive.set_input("raw", ["a", "b", "c", "d", "e", "f"]) +# Prints: Prediction updated: True + +# Stop listening +unsubscribe() +``` + +--- + +#### ReactiveDAG.subscribe_all + + Callable[[], None]`} /> + +Subscribe to changes on any node in the DAG. The callback is called with +`(node_name, new_value)` whenever any node's value changes. + + + +**Returns:** `Callable[[], None]` -- An unsubscribe function. + +```python +changes_log = [] + +def log_all(name: str, value): + changes_log.append((name, value)) + +unsub = reactive.subscribe_all(log_all) +reactive.set_input("raw", ["x", "y"]) + +print(f"Total changes: {len(changes_log)}") +for name, val in changes_log: + print(f" {name} = {val}") +``` + +--- + +#### ReactiveDAG.get + + Any`} /> + +Get the current value of a node. + + + +**Returns:** `Any` -- The node's current value, or `None` if not yet computed. + +```python +prediction = reactive.get("prediction") +features = reactive.get("features") +print(f"Features: {features}, Prediction: {prediction}") +``` + +--- + +## Early cutoff + +The reactive system implements early cutoff optimization. When a node is +recomputed and its new value equals the old value (via `==`), downstream +nodes are not recomputed. This prevents unnecessary cascading through the +graph. + +```python +import dagron +from dagron.execution.reactive import ReactiveDAG + +dag = ( + dagron.DAG.builder() + .add_edge("input", "round") + .add_edge("round", "format") + .build() +) + +tasks = { + "input": lambda: None, + "round": lambda input=None: round(input, 2) if input else 0, + "format": lambda round=None: f"Value: {round}", +} + +reactive = ReactiveDAG(dag, tasks) +reactive.initialize() + +# Setting 3.14159 -- round(3.14159, 2) = 3.14 +changed = reactive.set_input("input", 3.14159) +print(f"Changed: {list(changed.keys())}") +# Changed: ['input', 'round', 'format'] + +# Setting 3.14001 -- round(3.14001, 2) = 3.14 (same!) +changed = reactive.set_input("input", 3.14001) +print(f"Changed: {list(changed.keys())}") +# Changed: ['input', 'round'] +# 'format' was NOT recomputed because 'round' value didn't change +``` + +--- + +## Complete example + +```python +import dagron +from dagron.execution.reactive import ReactiveDAG + +# Build a reactive data processing pipeline +dag = ( + dagron.DAG.builder() + .add_edge("raw_data", "clean") + .add_edge("clean", "stats") + .add_edge("clean", "top_n") + .add_edge("stats", "report") + .add_edge("top_n", "report") + .build() +) + +tasks = { + "raw_data": lambda: None, + "clean": lambda raw_data=None: ( + [x for x in raw_data if x is not None] if raw_data else [] + ), + "stats": lambda clean=None: ( + {"count": len(clean), "sum": sum(clean)} if clean else {} + ), + "top_n": lambda clean=None: ( + sorted(clean, reverse=True)[:3] if clean else [] + ), + "report": lambda stats=None, top_n=None: ( + f"Count: {stats.get('count', 0)}, Top 3: {top_n}" + ), +} + +# Create the reactive DAG +reactive = ReactiveDAG(dag, tasks) + +# Subscribe to the report node +def on_report(name, value): + print(f"Report updated: {value}") + +reactive.subscribe("report", on_report) + +# Initialize with initial data +reactive.set_input("raw_data", [10, 20, None, 30, 5, 15]) +# Report updated: Count: 5, Top 3: [30, 20, 15] + +# Update with new data -- only affected nodes recompute +changed = reactive.set_input("raw_data", [10, 20, 30, 5, 15, 25]) +print(f"\nRecomputed: {list(changed.keys())}") +# Recomputed: ['raw_data', 'clean', 'stats', 'top_n', 'report'] + +# Check current values +print(f"\nCurrent stats: {reactive.get('stats')}") +print(f"Current top 3: {reactive.get('top_n')}") +print(f"Current report: {reactive.get('report')}") + +# Set multiple inputs at once +changes = reactive.set_inputs({"raw_data": [100, 200, 300]}) +print(f"\nBatch update changed {len(changes)} nodes") +``` + +--- + +## See also + +- [Execution](/api/execution/execution) -- standard batch execution with `DAGExecutor`. +- [Plugins](/api/utilities/plugins) -- hook-based lifecycle extensions. +- [Tracing](/api/observability/tracing) -- recording events during reactive updates. diff --git a/docs-next/content/docs/api/execution/resources.mdx b/docs-next/content/docs/api/execution/resources.mdx new file mode 100644 index 0000000..86a42a8 --- /dev/null +++ b/docs-next/content/docs/api/execution/resources.mdx @@ -0,0 +1,371 @@ +--- +title: "Resource Scheduling" +description: "API reference for ResourceAwareExecutor, ResourcePool, ResourceRequirements, and resource tracking — GPU, CPU, and memory-aware scheduling." +--- + +# Resource Scheduling + +The resource scheduling module extends the standard executor with capacity-aware +scheduling. Nodes declare their resource requirements (GPU, CPU, memory, or +custom resources), and the executor only dispatches a node when the resource pool +has sufficient capacity. This prevents oversubscription and enables scheduling of +heterogeneous workloads. + +See the [Resource Scheduling](/guide/execution-strategies/resource-scheduling) guide for usage +patterns and capacity planning. + +--- + +## ResourceAwareExecutor + + + +A synchronous executor that checks resource availability before dispatching each +node. When a node cannot be scheduled due to insufficient resources, it blocks +until resources are released by completed nodes. + + + +### execute + + ExecutionResult`} /> + +Execute tasks with resource-aware scheduling. + + + +**Returns:** [ExecutionResult](/api/execution/execution) + +```python +import dagron + +dag = ( + dagron.DAG.builder() + .add_node("preprocess") + .add_node("train_model_a") + .add_node("train_model_b") + .add_node("evaluate") + .add_edge("preprocess", "train_model_a") + .add_edge("preprocess", "train_model_b") + .add_edge("train_model_a", "evaluate") + .add_edge("train_model_b", "evaluate") + .build() +) + +pool = dagron.ResourcePool(capacities={"gpu": 2, "cpu": 8, "memory_mb": 16000}) + +requirements = { + "preprocess": dagron.ResourceRequirements(resources={"cpu": 2, "memory_mb": 2000}), + "train_model_a": dagron.ResourceRequirements(resources={"gpu": 1, "cpu": 4, "memory_mb": 8000}), + "train_model_b": dagron.ResourceRequirements(resources={"gpu": 1, "cpu": 4, "memory_mb": 8000}), + "evaluate": dagron.ResourceRequirements(resources={"cpu": 2, "memory_mb": 4000}), +} + +executor = dagron.ResourceAwareExecutor(dag, pool, requirements) +result = executor.execute({ + "preprocess": lambda: "data ready", + "train_model_a": lambda: "model A trained", + "train_model_b": lambda: "model B trained", + "evaluate": lambda: "evaluation complete", +}) + +print(result.succeeded) # 4 +``` + +--- + +## AsyncResourceAwareExecutor + + + +An async variant of `ResourceAwareExecutor`. Same interface but tasks must be +async callables. + + + +### execute (async) + + ExecutionResult`} /> + +Execute async tasks with resource-aware scheduling. + +```python +import asyncio +import dagron + +async def main(): + executor = dagron.AsyncResourceAwareExecutor(dag, pool, requirements) + result = await executor.execute({ + "preprocess": lambda: preprocess_async(), + "train_model_a": lambda: train_async("model_a"), + "train_model_b": lambda: train_async("model_b"), + "evaluate": lambda: evaluate_async(), + }) + print(result.succeeded) + +asyncio.run(main()) +``` + +--- + +## ResourcePool + + + +A pool of named resources with finite capacities. Resources are acquired before +a node runs and released after it completes (or fails). + + + +### can_satisfy + + bool`} /> + +Return `True` if the pool's total capacity can satisfy the given requirements +(ignoring current allocation). Useful for validating requirements before +execution. + +### try_acquire + + bool`} /> + +Attempt to acquire resources without blocking. Returns `True` if successful, +`False` if insufficient resources are currently available. + +### acquire + + bool`} /> + +Acquire resources, blocking until they become available or the timeout expires. +Returns `True` on success, `False` on timeout. + + + +### release + + None`} /> + +Release previously acquired resources back to the pool. + +### Properties + + dict[str, float]`} /> + +The total capacity of each resource. + + dict[str, float]`} /> + +The currently available (unallocated) amount of each resource. + + dict[str, float]`} /> + +The currently allocated amount of each resource. + + ResourceTimeline`} /> + +A timeline of resource allocation events for visualization and debugging. + +```python +pool = dagron.ResourcePool(capacities={"gpu": 4, "cpu": 16, "memory_mb": 32000}) + +print(pool.capacities) # {"gpu": 4, "cpu": 16, "memory_mb": 32000} +print(pool.available) # {"gpu": 4, "cpu": 16, "memory_mb": 32000} + +req = dagron.ResourceRequirements(resources={"gpu": 2}) +pool.acquire(req) +print(pool.available) # {"gpu": 2, "cpu": 16, "memory_mb": 32000} +print(pool.allocated) # {"gpu": 2, "cpu": 0, "memory_mb": 0} + +pool.release(req) +print(pool.available) # {"gpu": 4, "cpu": 16, "memory_mb": 32000} +``` + +--- + +## ResourceRequirements + + + +A set of resource requirements for a single node. + + + +### Convenience Constructors + + ResourceRequirements`} /> + +Create a requirement for `n` GPUs. + + ResourceRequirements`} /> + +Create a requirement for `n` CPU cores. + + ResourceRequirements`} /> + +Create a requirement for `mb` megabytes of memory. + +### fits + + bool`} /> + +Return `True` if the given available resources can satisfy this requirement. + + + +```python +# Explicit construction +req = dagron.ResourceRequirements(resources={"gpu": 2, "cpu": 4, "memory_mb": 8000}) + +# Convenience constructors +gpu_req = dagron.ResourceRequirements.gpu(1) +cpu_req = dagron.ResourceRequirements.cpu(4) +mem_req = dagron.ResourceRequirements.memory(4096) + +# Check fit +print(req.fits({"gpu": 4, "cpu": 16, "memory_mb": 32000})) # True +print(req.fits({"gpu": 1, "cpu": 16, "memory_mb": 32000})) # False (needs 2 GPUs) +``` + +--- + +## ResourceSnapshot + + + +A point-in-time snapshot of resource allocation. Recorded by the +`ResourceTimeline`. + +| Property | Type | Description | +|----------|------|-------------| +| `timestamp` | `float` | Unix timestamp of the snapshot. | +| `allocated` | `dict[str, float]` | Allocated resources at this point. | +| `available` | `dict[str, float]` | Available resources at this point. | +| `node_name` | `str | None` | The node that triggered this allocation event, if any. | +| `event` | `str` | Event type: `"acquire"` or `"release"`. | + +--- + +## ResourceTimeline + + + +A recorded timeline of resource allocation events. Access via +`ResourcePool.timeline`. + +### record + + None`} /> + +Manually record a snapshot (typically done automatically by the pool). + +### snapshots + + list[ResourceSnapshot]`} /> + +All recorded snapshots in chronological order. + +### peak_utilization + + dict[str, float]`} /> + +Return the peak utilization (as a fraction 0.0-1.0) for each resource across +the entire timeline. + +```python +result = executor.execute(tasks) + +timeline = pool.timeline +print(f"Snapshots: {len(timeline.snapshots)}") +print(f"Peak utilization: {timeline.peak_utilization()}") +# {"gpu": 1.0, "cpu": 0.75, "memory_mb": 0.5} + +for snap in timeline.snapshots: + print(f" t={snap.timestamp:.3f} {snap.event} {snap.node_name}: gpu={snap.allocated.get('gpu', 0)}") +``` + +--- + +## Related + +- [DAGExecutor](/api/execution/execution) — the base executor without resource awareness. +- [Distributed Execution](/api/execution/distributed) — multi-backend execution for cluster workloads. +- [Execution Plans](/api/core/core) — cost-aware scheduling at the graph level. +- [Resource Scheduling guide](/guide/execution-strategies/resource-scheduling) — usage patterns and capacity planning. diff --git a/docs-next/content/docs/api/meta.json b/docs-next/content/docs/api/meta.json new file mode 100644 index 0000000..042b56c --- /dev/null +++ b/docs-next/content/docs/api/meta.json @@ -0,0 +1,5 @@ +{ + "title": "API Reference", + "root": true, + "pages": ["core", "execution", "observability", "analysis", "utilities"] +} diff --git a/docs-next/content/docs/api/observability/meta.json b/docs-next/content/docs/api/observability/meta.json new file mode 100644 index 0000000..104b16f --- /dev/null +++ b/docs-next/content/docs/api/observability/meta.json @@ -0,0 +1,5 @@ +{ + "title": "Observability", + "defaultOpen": false, + "pages": ["tracing", "profiling"] +} diff --git a/docs-next/content/docs/api/observability/profiling.mdx b/docs-next/content/docs/api/observability/profiling.mdx new file mode 100644 index 0000000..fb4510f --- /dev/null +++ b/docs-next/content/docs/api/observability/profiling.mdx @@ -0,0 +1,268 @@ +--- +title: Profiling +description: API reference for dagron's post-execution profiling -- critical path analysis, slack computation, bottleneck detection, and parallelism efficiency. +--- + +# Profiling + +The profiling module analyzes completed executions against the DAG structure +to identify the critical path, compute slack for every node, detect +bottlenecks, and measure parallelism efficiency. Unlike tracing (which +records events in real time), profiling is a post-execution analysis step +that requires both the DAG and a completed `ExecutionResult`. + +For a guided walkthrough, see [Tracing & Profiling](/guide/observability/tracing-profiling). + +```python +from dagron.execution.profiling import profile_execution, ProfileReport, NodeProfile +``` + +--- + +## profile_execution + + ProfileReport`} /> + +Analyze an execution result against the DAG structure. This function +performs a forward and backward pass over the DAG to compute earliest +start times, latest start times, slack, and critical path membership +from actual recorded durations. + + + +**Returns:** `ProfileReport` -- A complete profiling report with per-node +analysis, critical path, bottlenecks, and efficiency metrics. + +The function performs the following analysis: + +1. **Forward pass** -- compute earliest start time for each node based on predecessor completion times. +2. **Backward pass** -- compute latest start time from the makespan working backwards. +3. **Slack** -- the difference between latest and earliest start times. Zero-slack nodes are on the critical path. +4. **Bottleneck scoring** -- nodes ranked by `duration * (1 + descendant_count)`. +5. **Parallelism efficiency** -- ratio of total work to makespan, indicating how well the DAG exploits concurrency. + +```python +import dagron +from dagron.execution.profiling import profile_execution + +dag = ( + dagron.DAG.builder() + .add_edge("extract", "transform_a") + .add_edge("extract", "transform_b") + .add_edge("transform_a", "merge") + .add_edge("transform_b", "merge") + .add_edge("merge", "load") + .build() +) + +executor = dagron.DAGExecutor(dag, max_workers=4) +result = executor.execute(tasks) + +report = profile_execution(dag, result) +print(report.summary()) +``` + + +Only nodes with `NodeStatus.COMPLETED` are included in the analysis. +Failed, skipped, or cancelled nodes are excluded from critical path +and slack computations. + +--- + +## ProfileReport + + + +Complete profiling report for a DAG execution. Contains per-node profiles, +the critical path, bottleneck rankings, and overall efficiency metrics. + + + +### Methods + +--- + +#### ProfileReport.summary + + str`} /> + +Return a human-readable summary of the profiling report. + +**Returns:** `str` -- Multi-line summary including critical path, efficiency, and bottleneck list. + +```python +print(report.summary()) +# Profile Report +# Nodes profiled: 5 +# Critical path: extract -> transform_a -> merge -> load +# Critical path duration: 2.3456s +# Parallelism efficiency: 1.80 +# Max parallelism: 2 +# Bottlenecks: extract, merge, transform_a, transform_b, load +``` + +--- + +#### ProfileReport.to_dict + + dict[str, Any]`} /> + +Convert the report to a plain dictionary suitable for JSON serialization +or logging. + +**Returns:** `dict[str, Any]` -- Dictionary representation of the report, including nested node profiles. + +```python +import json + +report_dict = report.to_dict() +print(json.dumps(report_dict, indent=2)) + +# Access specific node data +extract_profile = report_dict["node_profiles"]["extract"] +print(f"Extract slack: {extract_profile['slack']:.4f}s") +``` + +--- + +## NodeProfile + + + +Profile data for a single node, computed from the forward/backward pass +over actual execution timings. + + + +```python +for name, profile in report.node_profiles.items(): + status = "CRITICAL" if profile.on_critical_path else f"slack={profile.slack:.3f}s" + print(f"{name}: {profile.duration:.3f}s ({status}), blocks {profile.blocked_descendants} nodes") +``` + +### Understanding slack + +Slack represents how much a node's start time can be delayed without +affecting the overall pipeline completion time. Nodes with zero slack +form the critical path -- any delay in these nodes directly delays the +entire execution. + +```python +# Find nodes with scheduling flexibility +flexible = [ + (name, p.slack) + for name, p in report.node_profiles.items() + if p.slack > 0 +] +flexible.sort(key=lambda x: -x[1]) + +for name, slack in flexible: + print(f"{name}: can be delayed by {slack:.3f}s without impact") +``` + +--- + +## Complete example + +```python +import dagron +from dagron.execution.profiling import profile_execution + +# Build and execute a pipeline +dag = ( + dagron.DAG.builder() + .add_edge("fetch_api", "parse_api") + .add_edge("fetch_db", "parse_db") + .add_edge("parse_api", "merge") + .add_edge("parse_db", "merge") + .add_edge("merge", "validate") + .add_edge("validate", "store") + .build() +) + +import time + +tasks = { + "fetch_api": lambda: time.sleep(0.5) or "api_data", + "fetch_db": lambda: time.sleep(0.3) or "db_data", + "parse_api": lambda: time.sleep(0.2) or "parsed_api", + "parse_db": lambda: time.sleep(0.1) or "parsed_db", + "merge": lambda: time.sleep(0.15) or "merged", + "validate": lambda: time.sleep(0.05) or "valid", + "store": lambda: time.sleep(0.1) or "stored", +} + +executor = dagron.DAGExecutor(dag, max_workers=4) +result = executor.execute(tasks) + +# Profile the execution +report = profile_execution(dag, result) + +# Print the overall summary +print(report.summary()) + +# Identify the critical path +print("\nCritical path:") +for node_name in report.critical_path: + p = report.node_profiles[node_name] + print(f" {node_name}: {p.duration:.3f}s") +print(f" Total: {report.critical_path_duration:.3f}s") + +# Find optimization opportunities +print("\nOptimization targets (nodes with most blocked descendants):") +ranked = sorted( + report.node_profiles.values(), + key=lambda p: p.blocked_descendants, + reverse=True, +) +for p in ranked[:3]: + print(f" {p.name}: blocks {p.blocked_descendants} nodes, duration {p.duration:.3f}s") + +# Export for logging +import json +print(json.dumps(report.to_dict(), indent=2)) +``` + +--- + +## See also + +- [Tracing](/api/observability/tracing) -- the event recording system that feeds execution timings. +- [Analysis](/api/analysis/analysis) -- structural analysis including `explain()` and `what_if()`. +- [Tracing & Profiling guide](/guide/observability/tracing-profiling) -- end-to-end walkthrough. diff --git a/docs-next/content/docs/api/observability/tracing.mdx b/docs-next/content/docs/api/observability/tracing.mdx new file mode 100644 index 0000000..f210a24 --- /dev/null +++ b/docs-next/content/docs/api/observability/tracing.mdx @@ -0,0 +1,324 @@ +--- +title: Tracing +description: API reference for dagron's execution tracing system — record, query, and export structured timeline events from DAG execution. +--- + +# Tracing + +The tracing module provides a structured timeline log of every event that occurs +during DAG execution. Traces capture node starts, completions, failures, gate +interactions, resource acquisitions, and cache events. You can export traces as +JSON or in Chrome Tracing format for visualization in `chrome://tracing`. + +For a higher-level introduction, see the [Tracing & Profiling](/guide/observability/tracing-profiling) +guide. + +```python +from dagron.execution.tracing import ExecutionTrace, TraceEvent, TraceEventType +``` + +--- + +## TraceEventType + + + +An enumeration of all event types that can be recorded during DAG execution. +Each value corresponds to a distinct lifecycle moment. + +### Event categories + +| Category | Events | Description | +|----------|--------|-------------| +| **Execution** | `EXECUTION_STARTED`, `EXECUTION_COMPLETED` | Overall execution boundaries. | +| **Step** | `STEP_STARTED`, `STEP_COMPLETED` | Topological level boundaries within execution. | +| **Node lifecycle** | `NODE_STARTED`, `NODE_COMPLETED`, `NODE_FAILED`, `NODE_SKIPPED`, `NODE_TIMED_OUT`, `NODE_CANCELLED` | Individual node state transitions. | +| **Gates** | `NODE_GATE_WAITING`, `NODE_GATE_RESOLVED` | Approval gate interactions. See [Gates](/api/execution/execution). | +| **Resources** | `RESOURCE_ACQUIRED`, `RESOURCE_RELEASED` | Resource pool acquisition and release. | +| **Cache** | `NODE_CACHE_HIT`, `NODE_CACHE_MISS` | Content-addressable cache interactions. | + +```python +# Check event type +event = trace.events[0] +if event.event_type == TraceEventType.NODE_FAILED: + print(f"Node {event.node_name} failed: {event.error}") +``` + +--- + +## TraceEvent + + + +A single trace event captured during execution. Each event has a type, a +monotonic timestamp relative to the start of recording, and optional fields +that vary by event type. + + + +```python +for event in trace.events: + if event.node_name: + print(f"[{event.timestamp:.4f}s] {event.event_type.value}: {event.node_name}") + if event.duration: + print(f" Duration: {event.duration:.4f}s") + if event.error: + print(f" Error: {event.error}") +``` + +--- + +## ExecutionTrace + + None: ...`} /> + +A structured timeline log that collects events during DAG execution. The +executor creates an `ExecutionTrace` automatically when tracing is enabled. +You can also create one manually for custom recording scenarios. + +The trace uses `time.monotonic()` internally, so all timestamps are relative +to the first recorded event and monotonically increasing. + +```python +from dagron.execution.tracing import ExecutionTrace, TraceEventType + +trace = ExecutionTrace() +trace.record(TraceEventType.EXECUTION_STARTED) +trace.record(TraceEventType.NODE_STARTED, node_name="extract") +trace.record(TraceEventType.NODE_COMPLETED, node_name="extract", duration=0.45) +trace.record(TraceEventType.EXECUTION_COMPLETED) + +print(len(trace.events)) # 4 +``` + +### Methods + +--- + +#### ExecutionTrace.record + + None`} /> + +Record a trace event. The timestamp is captured automatically using +`time.monotonic()` relative to the first recorded event. + + + +```python +trace.record( + TraceEventType.NODE_FAILED, + node_name="transform", + error="ValueError: missing column 'id'", + metadata={"retry_count": 2}, +) +``` + +--- + +#### ExecutionTrace.events + + list[TraceEvent]`} /> + +Returns a copy of all recorded events in chronological order. + +**Returns:** `list[TraceEvent]` -- All events recorded so far. + +```python +for event in trace.events: + print(f"{event.event_type.value} at {event.timestamp:.4f}s") +``` + +--- + +#### ExecutionTrace.events_for_node + + list[TraceEvent]`} /> + +Filter events for a specific node. Returns only events where `node_name` +matches the given name. + + + +**Returns:** `list[TraceEvent]` -- Events associated with the named node. + +```python +extract_events = trace.events_for_node("extract") +for e in extract_events: + print(f" {e.event_type.value}: {e.timestamp:.4f}s") +# NODE_STARTED: 0.0001s +# NODE_COMPLETED: 0.4502s +``` + +--- + +#### ExecutionTrace.to_json + + str`} /> + +Export the trace as a JSON string. Each event becomes a JSON object with +`event_type`, `timestamp`, and any non-None optional fields. + +**Returns:** `str` -- Pretty-printed JSON array of event objects. + +```python +import json + +json_str = trace.to_json() +events = json.loads(json_str) +print(events[0]) +# {"event_type": "execution_started", "timestamp": 0.0} +``` + +--- + +#### ExecutionTrace.to_chrome_trace + + str`} /> + +Export the trace in [Chrome Tracing format](https://docs.google.com/document/d/1CvAClvFfyA5R-PhYUmn5OOQtYMH4h6I0nSsKchNAySU/preview). +The output is a JSON string compatible with `chrome://tracing` or +[Perfetto](https://ui.perfetto.dev/). + +Node executions become Duration events (`B`/`E` pairs). Each unique node +gets its own thread ID for visual separation. Timestamps are converted +to microseconds. + +**Returns:** `str` -- Chrome Tracing JSON string. + +```python +# Write the trace to a file and open in chrome://tracing +chrome_json = trace.to_chrome_trace() +with open("trace.json", "w") as f: + f.write(chrome_json) +``` + +--- + +#### ExecutionTrace.summary + + str`} /> + +Return a human-readable summary of the trace, including total event count, +unique node count, and counts by outcome (completed, failed, skipped, +timed out, cancelled). + +**Returns:** `str` -- Multi-line summary string. + +```python +print(trace.summary()) +# Execution Trace Summary +# Total events: 14 +# Unique nodes: 5 +# Completed: 4 +# Failed: 1 +# Skipped: 0 +# Timed out: 0 +# Cancelled: 0 +# Duration: 1.2345s +``` + +--- + +## Complete example + +```python +import dagron +from dagron.execution.tracing import ExecutionTrace, TraceEventType + +# Build a DAG +dag = ( + dagron.DAG.builder() + .add_edge("extract", "transform") + .add_edge("transform", "load") + .build() +) + +# Execute with tracing enabled +executor = dagron.DAGExecutor(dag, trace=True) +tasks = { + "extract": lambda: [1, 2, 3], + "transform": lambda: [2, 4, 6], + "load": lambda: "done", +} +result = executor.execute(tasks) + +# Access the trace from the result +trace = result.trace + +# Inspect events +print(trace.summary()) + +# Find slow nodes +for event in trace.events: + if event.event_type == TraceEventType.NODE_COMPLETED and event.duration: + if event.duration > 1.0: + print(f"Slow node: {event.node_name} ({event.duration:.2f}s)") + +# Export for Chrome Tracing visualization +with open("pipeline_trace.json", "w") as f: + f.write(trace.to_chrome_trace()) + +# Export as plain JSON for custom analysis +with open("pipeline_events.json", "w") as f: + f.write(trace.to_json()) +``` + +--- + +## See also + +- [Profiling](/api/observability/profiling) -- post-execution performance analysis built on trace data. +- [Tracing & Profiling guide](/guide/observability/tracing-profiling) -- walkthrough of tracing and profiling workflows. +- [Execution](/api/execution/execution) -- `DAGExecutor` and the `trace` parameter. diff --git a/docs-next/content/docs/api/utilities/compose.mdx b/docs-next/content/docs/api/utilities/compose.mdx new file mode 100644 index 0000000..ef9a0ba --- /dev/null +++ b/docs-next/content/docs/api/utilities/compose.mdx @@ -0,0 +1,247 @@ +--- +title: Composition +description: API reference for dagron's DAG composition -- merge multiple DAGs into one with namespace prefixes and cross-namespace connections. +--- + +# Composition + +The compose module provides multi-DAG composition with automatic namespace +prefixing. Combine independent DAGs into a single unified graph, preserving +node payloads and metadata, with optional cross-namespace connections. + +```python +from dagron.compose import compose +``` + +--- + +## compose + + DAG`} /> + +Compose multiple DAGs into one with namespace prefixes. Each DAG's nodes +are prefixed with its namespace key (e.g., a node `"load"` in namespace +`"etl"` becomes `"etl/load"`). Internal edges within each DAG are preserved +with the same prefixing. Cross-namespace edges can be added via the +`connections` parameter. + + + +**Returns:** `DAG` -- A new DAG containing all nodes and edges from all input DAGs with namespaces applied, plus any cross-namespace connections. + +### Node prefixing + +Every node in the resulting DAG has a name of the form +`{namespace}{separator}{original_name}`. Payloads and metadata from the +original nodes are preserved on the prefixed nodes. + +### Edge preservation + +All edges within each input DAG are preserved with prefixed names. For +example, an edge `("a", "b")` in namespace `"etl"` becomes +`("etl/a", "etl/b")` in the composed DAG. + +```python +import dagron +from dagron.compose import compose + +# Build individual DAGs +etl_dag = ( + dagron.DAG.builder() + .add_edge("extract", "transform") + .add_edge("transform", "load") + .build() +) + +ml_dag = ( + dagron.DAG.builder() + .add_edge("train", "evaluate") + .add_edge("evaluate", "deploy") + .build() +) + +# Compose with a cross-namespace connection +combined = compose( + dags={"etl": etl_dag, "ml": ml_dag}, + connections=[("etl/load", "ml/train")], +) + +print(list(combined.nodes())) +# ['etl/extract', 'etl/transform', 'etl/load', +# 'ml/train', 'ml/evaluate', 'ml/deploy'] + +print(combined.edge_count()) +# 5 (2 from etl + 2 from ml + 1 cross-namespace) + +print(combined.roots()) # ['etl/extract'] +print(combined.leaves()) # ['ml/deploy'] +``` + +### Custom separators + +Use the `separator` parameter to change the namespace delimiter: + +```python +combined = compose( + dags={"etl": etl_dag, "ml": ml_dag}, + connections=[("etl.load", "ml.train")], + separator=".", +) + +print(list(combined.nodes())) +# ['etl.extract', 'etl.transform', 'etl.load', +# 'ml.train', 'ml.evaluate', 'ml.deploy'] +``` + +### Composing many DAGs + +The function accepts any number of DAGs: + +```python +ingestion = dagron.DAG.builder().add_edge("fetch", "parse").build() +validation = dagron.DAG.builder().add_edge("check_schema", "check_values").build() +storage = dagron.DAG.builder().add_edge("write_db", "write_cache").build() + +pipeline = compose( + dags={ + "ingest": ingestion, + "validate": validation, + "store": storage, + }, + connections=[ + ("ingest/parse", "validate/check_schema"), + ("validate/check_values", "store/write_db"), + ], +) + +print(pipeline.node_count()) # 6 +print(pipeline.edge_count()) # 5 +``` + +### Preserving payloads and metadata + +Payloads and metadata from the original DAGs are carried over to the +composed DAG: + +```python +dag_with_payloads = dagron.DAG() +dag_with_payloads.add_node("train", payload={"epochs": 10, "lr": 0.001}) +dag_with_payloads.add_node("evaluate", payload={"metrics": ["acc", "f1"]}) +dag_with_payloads.add_edge("train", "evaluate") + +combined = compose(dags={"ml": dag_with_payloads}) + +# Payloads are accessible via prefixed names +print(combined.get_payload("ml/train")) +# {"epochs": 10, "lr": 0.001} +``` + +--- + +## Complete example + +```python +import dagron +from dagron.compose import compose + +# === Build sub-DAGs for different teams === + +# Data Engineering team +data_eng = ( + dagron.DAG.builder() + .add_node("raw_ingest", payload={"source": "s3://bucket/raw"}) + .add_node("clean") + .add_node("feature_store") + .add_edge("raw_ingest", "clean") + .add_edge("clean", "feature_store") + .build() +) + +# ML team +ml = ( + dagron.DAG.builder() + .add_node("train", payload={"model": "xgboost"}) + .add_node("evaluate") + .add_node("register") + .add_edge("train", "evaluate") + .add_edge("evaluate", "register") + .build() +) + +# Platform team +platform = ( + dagron.DAG.builder() + .add_node("deploy_staging") + .add_node("integration_test") + .add_node("deploy_prod") + .add_edge("deploy_staging", "integration_test") + .add_edge("integration_test", "deploy_prod") + .build() +) + +# === Compose into a unified pipeline === +full_pipeline = compose( + dags={ + "data": data_eng, + "ml": ml, + "platform": platform, + }, + connections=[ + ("data/feature_store", "ml/train"), + ("ml/register", "platform/deploy_staging"), + ], +) + +print(f"Full pipeline: {full_pipeline.node_count()} nodes, {full_pipeline.edge_count()} edges") +# Full pipeline: 9 nodes, 8 edges + +# Inspect the composed structure +for level_idx, level in enumerate(full_pipeline.topological_levels()): + names = [n.name for n in level] + print(f"Level {level_idx}: {names}") +# Level 0: ['data/raw_ingest'] +# Level 1: ['data/clean'] +# Level 2: ['data/feature_store'] +# Level 3: ['ml/train'] +# Level 4: ['ml/evaluate'] +# Level 5: ['ml/register'] +# Level 6: ['platform/deploy_staging'] +# Level 7: ['platform/integration_test'] +# Level 8: ['platform/deploy_prod'] + +# Execute the composed DAG +tasks = { + "data/raw_ingest": lambda: "raw data", + "data/clean": lambda: "clean data", + "data/feature_store": lambda: "features", + "ml/train": lambda: "model", + "ml/evaluate": lambda: {"accuracy": 0.95}, + "ml/register": lambda: "model_v1", + "platform/deploy_staging": lambda: "staging OK", + "platform/integration_test": lambda: "tests pass", + "platform/deploy_prod": lambda: "deployed", +} + +executor = dagron.DAGExecutor(full_pipeline, max_workers=4) +result = executor.execute(tasks) +print(f"Succeeded: {result.succeeded}/{full_pipeline.node_count()}") +``` + +--- + +## See also + +- [DAG](/api/core/core) -- the core graph class. +- [Templates](/api/utilities/template) -- parameterized DAG construction. +- [Versioning](/api/utilities/versioning) -- tracking changes to composed DAGs. +- [Building DAGs guide](/guide/core-concepts/building-dags) -- construction patterns and composition walkthrough. +- [Graph Transforms guide](/guide/core-concepts/transforms) -- filtering, merging, and reshaping DAGs. diff --git a/docs-next/content/docs/api/utilities/display.mdx b/docs-next/content/docs/api/utilities/display.mdx new file mode 100644 index 0000000..7f79e44 --- /dev/null +++ b/docs-next/content/docs/api/utilities/display.mdx @@ -0,0 +1,251 @@ +--- +title: Display +description: API reference for dagron's display utilities -- ASCII rendering, Jupyter SVG auto-display, and custom node formatting. +--- + +# Display + +The display module provides visualization utilities for DAGs. It includes +ASCII rendering for terminals and logs, and SVG generation for Jupyter +notebooks. The SVG renderer tries Graphviz first (Python package or CLI) +and falls back to an ASCII-in-SVG representation. + +```python +from dagron.display import pretty_print, _repr_svg_ +``` + +--- + +## pretty_print + + str`} /> + +Render the DAG as an ASCII diagram. The output is a multi-line string +suitable for printing to a terminal, writing to log files, or embedding +in text reports. + + + +**Returns:** `str` -- Multi-line ASCII string representing the DAG. + +**Raises:** `ValueError` -- If `dag.node_count()` exceeds `max_nodes`. + +### Vertical layout + +The default layout renders topological levels as rows, with edges shown +as connectors between levels: + +```python +import dagron +from dagron.display import pretty_print + +dag = ( + dagron.DAG.builder() + .add_edge("extract", "transform") + .add_edge("transform", "load") + .build() +) + +print(pretty_print(dag)) +# [ extract ] +# | +# [ transform ] +# | +# [ load ] +``` + +### Horizontal layout + +The horizontal layout renders levels as columns, with arrows between them: + +```python +print(pretty_print(dag, layout="horizontal")) +# [ extract ]-->[ transform ]-->[ load ] +``` + +### With payloads + +```python +dag = dagron.DAG() +dag.add_node("train", payload={"epochs": 10}) +dag.add_node("evaluate", payload={"metric": "f1"}) +dag.add_edge("train", "evaluate") + +print(pretty_print(dag, show_payloads=True)) +# [ train={'epochs': 10} ] +# | +# [ evaluate={'metric': 'f1'} ] +``` + +### Custom node formatter + +```python +def short_label(name: str, payload: object) -> str: + if payload and isinstance(payload, dict): + return f"{name} ({len(payload)} params)" + return name + +print(pretty_print(dag, node_formatter=short_label)) +# [ train (1 params) ] +# | +# [ evaluate (1 params) ] +``` + +### Empty graphs + +```python +empty = dagron.DAG() +print(pretty_print(empty)) +# (empty graph) +``` + +### Large graph safety + +```python +try: + print(pretty_print(large_dag, max_nodes=10)) +except ValueError as e: + print(e) + # Graph has 100 nodes, exceeding max_nodes=10. Increase max_nodes to render. +``` + +--- + +## _repr_svg_ + + str`} /> + +Return an SVG representation of the DAG for Jupyter notebooks. This +function is used by dagron's Jupyter integration to provide auto-display +when a DAG object is the last expression in a cell. + + + +**Returns:** `str` -- SVG string. + +### Rendering strategy + +The function tries multiple rendering backends in order: + +1. **Empty graph** -- returns a simple SVG with `"(empty graph)"` text. +2. **Too many nodes** -- returns a summary SVG showing node and edge counts. +3. **Graphviz Python package** -- tries `graphviz.Source(dot).pipe(format='svg')`. +4. **Graphviz `dot` CLI** -- tries `dot -Tsvg` via subprocess. +5. **ASCII fallback** -- wraps the `pretty_print()` output in an SVG `` element. + +### Jupyter auto-display + +When working in a Jupyter notebook, DAG objects automatically display as +SVG. This is enabled through dagron's `_repr_svg_` integration: + +```python +# In a Jupyter notebook cell: +dag = ( + dagron.DAG.builder() + .add_edge("a", "b") + .add_edge("b", "c") + .build() +) + +dag # Auto-renders as SVG in the notebook output +``` + +### Graphviz installation + +For the best rendering quality, install Graphviz: + +```bash +# Python package +pip install graphviz + +# System package (for the dot CLI) +# Ubuntu/Debian: +sudo apt install graphviz +# macOS: +brew install graphviz +``` + +### Manual SVG export + +```python +from dagron.display import _repr_svg_ + +svg = _repr_svg_(dag) +with open("pipeline.svg", "w") as f: + f.write(svg) +``` + +--- + +## Complete example + +```python +import dagron +from dagron.display import pretty_print, _repr_svg_ + +# Build a pipeline +dag = ( + dagron.DAG.builder() + .add_node("fetch_api", payload={"url": "https://api.example.com"}) + .add_node("fetch_db", payload={"table": "users"}) + .add_node("merge") + .add_node("validate") + .add_node("store") + .add_edge("fetch_api", "merge") + .add_edge("fetch_db", "merge") + .add_edge("merge", "validate") + .add_edge("validate", "store") + .build() +) + +# ASCII output for terminal +print("=== Vertical ===") +print(pretty_print(dag)) + +print("\n=== Horizontal ===") +print(pretty_print(dag, layout="horizontal")) + +print("\n=== With Payloads ===") +print(pretty_print(dag, show_payloads=True)) + +# Custom formatter for a clean summary +def status_label(name: str, payload: object) -> str: + icon = ">" if payload else "-" + return f"{icon} {name}" + +print("\n=== Custom Labels ===") +print(pretty_print(dag, node_formatter=status_label)) + +# SVG export +svg = _repr_svg_(dag) +with open("pipeline.svg", "w") as f: + f.write(svg) +print(f"\nSVG exported ({len(svg)} bytes)") +``` + +--- + +## See also + +- [DAG](/api/core/core) -- the `to_dot()` and `to_mermaid()` export methods. +- [Serialization guide](/guide/core-concepts/serialization) -- full guide to serialization and visualization. diff --git a/docs-next/content/docs/api/utilities/integration.mdx b/docs-next/content/docs/api/utilities/integration.mdx new file mode 100644 index 0000000..f3aae90 --- /dev/null +++ b/docs-next/content/docs/api/utilities/integration.mdx @@ -0,0 +1,236 @@ +--- +title: Integration +description: API reference for dagron's integration helpers -- build DAGs from dicts, dataclasses, and Pydantic models. +--- + +# Integration + +The integration module provides helpers for building DAGs from common Python +data structures. The primary function, `from_records`, converts sequences of +dicts, dataclasses, or Pydantic models into DAGs with minimal boilerplate. + +```python +from dagron.integration import from_records +``` + +--- + +## from_records + + DAG`} /> + +Build a DAG from a sequence of records. Works with dicts, dataclasses, +and Pydantic `BaseModel` instances. Each record becomes a node, with the +node name extracted from the specified field. Edges and payloads are derived +via optional callback functions. + + + +**Returns:** `DAG` -- A new DAG built from the records. + +**Raises:** +- `DuplicateNodeError` -- If any records share the same name. +- `NodeNotFoundError` -- If `edge_fn` references a node that does not exist. +- `CycleError` -- If the edges derived from `edge_fn` would create a cycle. + +### Building from dicts + +The most common use case is converting a list of dictionaries, such as +records loaded from a YAML file, JSON API, or database query: + +```python +from dagron.integration import from_records + +records = [ + {"name": "extract", "depends_on": [], "config": {"source": "s3"}}, + {"name": "transform", "depends_on": ["extract"], "config": {"mode": "batch"}}, + {"name": "load", "depends_on": ["transform"], "config": {"target": "warehouse"}}, +] + +dag = from_records( + records, + edge_fn=lambda r: r["depends_on"], + payload_fn=lambda r: r["config"], +) + +print(dag.node_count()) # 3 +print(dag.edge_count()) # 2 +print(dag.get_payload("extract")) # {"source": "s3"} +``` + +### Building from dataclasses + +```python +from dataclasses import dataclass, field +from dagron.integration import from_records + +@dataclass +class TaskSpec: + name: str + dependencies: list[str] = field(default_factory=list) + timeout: float = 30.0 + +specs = [ + TaskSpec("fetch", timeout=60.0), + TaskSpec("parse", dependencies=["fetch"], timeout=10.0), + TaskSpec("validate", dependencies=["parse"]), + TaskSpec("store", dependencies=["validate"]), +] + +dag = from_records( + specs, + edge_fn=lambda s: s.dependencies, + payload_fn=lambda s: {"timeout": s.timeout}, +) + +print(list(dag.nodes())) # ['fetch', 'parse', 'validate', 'store'] +``` + +### Building from Pydantic models + +```python +from pydantic import BaseModel +from dagron.integration import from_records + +class PipelineStep(BaseModel): + name: str + depends_on: list[str] = [] + retries: int = 3 + +steps = [ + PipelineStep(name="ingest", retries=5), + PipelineStep(name="clean", depends_on=["ingest"]), + PipelineStep(name="publish", depends_on=["clean"], retries=1), +] + +dag = from_records( + steps, + edge_fn=lambda s: s.depends_on, + payload_fn=lambda s: {"retries": s.retries}, +) +``` + +### Custom name fields + +Use the `name_field` parameter when your records use a different field +for the node identifier: + +```python +records = [ + {"id": "step_1", "after": []}, + {"id": "step_2", "after": ["step_1"]}, + {"id": "step_3", "after": ["step_2"]}, +] + +dag = from_records( + records, + name_field="id", + edge_fn=lambda r: r["after"], +) + +print(list(dag.nodes())) # ['step_1', 'step_2', 'step_3'] +``` + +### No edges + +When `edge_fn` is not provided, the resulting DAG contains nodes but no +edges. This is useful when you want to add edges separately: + +```python +records = [{"name": "a"}, {"name": "b"}, {"name": "c"}] + +dag = from_records(records) +print(dag.node_count()) # 3 +print(dag.edge_count()) # 0 + +# Add edges manually +dag.add_edge("a", "b") +dag.add_edge("b", "c") +``` + +### Default payloads + +When `payload_fn` is not provided, the entire record is stored as the +payload: + +```python +records = [ + {"name": "extract", "source": "api", "timeout": 30}, + {"name": "load", "target": "db", "batch_size": 100}, +] + +dag = from_records(records) +print(dag.get_payload("extract")) +# {"name": "extract", "source": "api", "timeout": 30} +``` + +--- + +## Complete example + +```python +import json +from dagron.integration import from_records + +# Simulate loading pipeline config from JSON +config_json = """ +[ + {"name": "raw_data", "deps": [], "type": "source", "params": {"path": "/data/raw"}}, + {"name": "clean", "deps": ["raw_data"], "type": "transform", "params": {"drop_nulls": true}}, + {"name": "features", "deps": ["clean"], "type": "transform", "params": {"method": "tfidf"}}, + {"name": "train", "deps": ["features"], "type": "model", "params": {"epochs": 50}}, + {"name": "evaluate", "deps": ["train"], "type": "model", "params": {"metrics": ["acc", "f1"]}}, + {"name": "deploy", "deps": ["evaluate"], "type": "deploy", "params": {"env": "prod"}} +] +""" + +records = json.loads(config_json) + +# Build DAG with structured payloads +dag = from_records( + records, + edge_fn=lambda r: r["deps"], + payload_fn=lambda r: {"type": r["type"], "params": r["params"]}, +) + +print(f"Pipeline: {dag.node_count()} nodes, {dag.edge_count()} edges") +# Pipeline: 6 nodes, 5 edges + +# Inspect the structure +for level_idx, level in enumerate(dag.topological_levels()): + names = [n.name for n in level] + print(f"Level {level_idx}: {names}") + +# Access payload metadata +for name in ["raw_data", "train", "deploy"]: + payload = dag.get_payload(name) + print(f"{name}: type={payload['type']}, params={payload['params']}") + +# Execute with task functions +import dagron + +tasks = {name: (lambda: f"completed {name}") for name in dag.nodes()} +executor = dagron.DAGExecutor(dag) +result = executor.execute(tasks) +print(f"Succeeded: {result.succeeded}/{dag.node_count()}") +``` + +--- + +## See also + +- [DAG](/api/core/core) -- the core graph class. +- [DAGBuilder](/api/core/builder) -- fluent builder for programmatic construction. +- [Templates](/api/utilities/template) -- parameterized DAG construction. +- [Building DAGs guide](/guide/core-concepts/building-dags) -- construction patterns overview. diff --git a/docs-next/content/docs/api/utilities/meta.json b/docs-next/content/docs/api/utilities/meta.json new file mode 100644 index 0000000..54734cb --- /dev/null +++ b/docs-next/content/docs/api/utilities/meta.json @@ -0,0 +1,13 @@ +{ + "title": "Utilities", + "defaultOpen": false, + "pages": [ + "template", + "versioning", + "compose", + "display", + "integration", + "plugins", + "modern-api" + ] +} diff --git a/docs-next/content/docs/api/utilities/modern-api.mdx b/docs-next/content/docs/api/utilities/modern-api.mdx new file mode 100644 index 0000000..ec1a9d5 --- /dev/null +++ b/docs-next/content/docs/api/utilities/modern-api.mdx @@ -0,0 +1,198 @@ +--- +title: Modern API (NodeRef, flow, reactive, contentcache, trace) +description: API reference for the typed-handles + uniqueness modules — dagron.NodeRef, dagron.flow, dagron.Effect, dagron.reactive, dagron.contentcache, dagron.trace, dagron.stubgen. +--- + +# Modern API + +API reference for the typed-handles and uniqueness modules. For a +walkthrough of how they compose, see the +[Typed Handles & Reactive Engine](../../guide/typed-and-reactive) guide. + +## `dagron.NodeRef` + +```python +class NodeRef: + name: str + epoch: int +``` + +Stable handle returned by `DAG.add_node()`. Survives unrelated graph +mutations; invalidated only when the underlying node is removed (or +remove-and-readded with the same name, which yields a fresh epoch). + +```python +dag.node_ref(name: str) -> NodeRef | None +``` + +Look up the current ref for a name without mutating the DAG. + +Every public method that previously took `name: str` now takes `NodeArg = str | NodeRef`: +`add_edge`, `remove_node`, `has_node`, `has_edge`, `get_payload`, `set_payload`, +`predecessors`, `successors`, `ancestors`, `descendants`, `subgraph`, +`subgraph_by_depth`, `collapse`, `dominator_tree`, `all_paths`, +`shortest_path`, `longest_path`, `dirty_set`, `change_provenance`, +`is_ancestor`, and the `ReachabilityIndex` query methods. + +Stale refs raise `dagron.StaleNodeRefError`. + +## `dagron.flow` + +```python +@dagron.task +def fn(...) -> T: ... + +@dagron.task(effect=Effect.NETWORK) +def fn(...) -> T: ... + +@dagron.flow +def pipeline(...) -> FlowFuture[T] | None: ... +``` + +| Member | Purpose | +|---|---| +| `task` | Decorator. Outside a `@flow`, executes normally. Inside one, records the call and returns `FlowFuture[R]`. Supports `effect=` keyword (defaults to `Effect.PURE`). | +| `flow` | Decorator. Wraps a function as a `Flow`. | +| `Flow.dag()` | Trace the body and return the built `DAG`. | +| `Flow.run(*args, **kwargs)` | Trace, build, execute synchronously → `ExecutionResult`. | +| `Flow.run_async(...)` | Async variant. | +| `Flow.__call__(...)` | Sugar for `run`. | +| `FlowFuture[T]` | Generic placeholder returned from `@task` calls inside a `@flow`. Pass to other tasks to wire deps. | +| `TaskSpec` | Metadata attached to every `@task` (`name`, `fn`, `dependencies`, `is_async`, `effect`). | + +`batch()` semantics aren't part of `dagron.flow` — they live in +[`dagron.reactive`](#dagronreactive). + +## `dagron.Effect` + +```python +class Effect(Enum): + PURE = "pure" + READ = "read" + WRITE = "write" + NETWORK = "network" + NONDETERMINISTIC = "nondeterministic" + + is_cacheable: bool + is_deterministic: bool + is_isolated: bool +``` + +```python +def effects_of(dag: DAG) -> dict[str, Effect] +``` + +Read every node's effect tag from a DAG built by `@dagron.flow`. Returns +`Effect.PURE` for nodes without a tag. + +`DAGExecutor(enforce_effect_isolation=True)` reads these tags and runs +`NONDETERMINISTIC` nodes through a shared lock, so they don't overlap. + +## `dagron.reactive` + +```python +import dagron.reactive as dr + +s = dr.signal(value) # → Signal[T] +c = dr.computed(lambda: ...) # → Computed[T] +w = dr.watch(lambda: ...) # → Watcher (also fires once now) +with dr.batch(): ... # glitch-free coalesced updates +``` + +| Member | API | +|---|---| +| `Signal[T]` | `__call__() -> T`, `set(v: T)`, `peek() -> T` (no tracking). Equality-checked sets are no-ops. | +| `Computed[T]` | `__call__() -> T`, `peek() -> T`. Lazy memoised. | +| `Watcher` | Auto-fires when any tracked dep changes. `.dispose()` to detach. | +| `batch()` | Context manager. Defers Watcher fires until the outermost block ends. Multiple signal mutations coalesce into one fire. | +| `signal()` / `computed()` / `watch()` | Convenience factories. | + +Track via thread-local; reads inside a `Computed` body or `Watcher` body +register the source as a dep. Observers are held by `weakref.WeakSet` +so dropped derived nodes don't leak. + +## `dagron.contentcache` + +```python +from dagron.contentcache import ContentCache, default_cache_dir + +cache = ContentCache(cache_dir=None, hasher=None) +cache.compute_or_cached(fn, args=(), kwargs=None, effect=None) -> tuple[Any, bool] +cache.get(fingerprint: bytes) -> tuple[Any, bool] +cache.put(fingerprint: bytes, value: Any) -> None +cache.has(fingerprint: bytes) -> bool +cache.delete(fingerprint: bytes) -> None +cache.clear() -> None +cache.hash(value: Any) -> bytes # delegates to the configured Hasher +``` + +| Helper | Purpose | +|---|---| +| `default_cache_dir()` | `$DAGRON_CACHE_DIR` or `~/.cache/dagron/cas`. | +| `default_hash(value)` | pickle + blake2b 256-bit. Falls back to `repr()` for unpickleable inputs. | +| `numpy_hash(value)` | `array.tobytes()` for numpy arrays; falls back to `default_hash`. | +| `fingerprint_function(fn)` | Hashes `co_code`, `co_consts`, `co_freevars`, qualname, Python major.minor. | +| `fingerprint_node(fn, effect, input_fingerprints)` | Composite fingerprint used as the cache key. | + +`compute_or_cached` is **effect-aware**: `WRITE` / `NETWORK` / +`NONDETERMINISTIC` skip the cache entirely; `PURE` and `READ` go through +it. + +Storage layout: `///.cache` where the +fingerprint hex is ``. POSIX `rename(2)` makes writes +atomic. The filesystem itself is the index — independent processes +share intermediates with no coordination. + +## `dagron.trace` + +```python +from dagron.trace import TraceWriter, TraceReader, TraceRecord, ReplayedNode, replay + +writer = TraceWriter(path, cas=None) +writer.record(name, *, value=None, effect=None, duration_ns=0, + error=None, metadata=None, timestamp=None) -> TraceRecord +writer.flush() +writer.close() # also via context manager + +reader = TraceReader(path, cas=None) +reader.records() -> Iterator[TraceRecord] +reader.records_until(t, *, inclusive=True) -> Iterator[TraceRecord] +reader.timeline() -> list[tuple[float, str]] +reader.fetch(rec) -> Any # resolves payload via the CAS + +replay(source, *, at=None, cas=None) -> dict[str, ReplayedNode] +``` + +`ReplayedNode` carries `name`, `timestamp`, `value`, `effect`, +`replayable`, `duration_ns`, `error`, and a derived `has_value` +property. `replayable` mirrors `effect.is_deterministic`: pure / READ +nodes can be reproduced; impure nodes' values are *what that run +produced*, not what a fresh run would produce. + +| Helper | Purpose | +|---|---| +| `default_trace_dir()` | `$DAGRON_TRACE_DIR` or `~/.cache/dagron/traces`. | +| `new_run_id()` | 16-hex-char random id for naming a run's log file. | +| `list_runs(trace_dir=None)` | Every `*.jsonl` under `trace_dir`. | + +Logs are append-only JSONL. Payloads live in the bound `ContentCache`, +deduplicated across runs that produced the same value. + +## `dagron.stubgen` + +```python +from dagron.stubgen import generate_stub + +generate_stub( + dag, + *, + type_hints: dict[str, type | str] | None = None, + tasks: dict[str, Callable] | None = None, + name: str = "TypedExecutionResult", +) -> str +``` + +Emits `.pyi`-formatted source declaring a class with `Literal["nodename"]` +overloads typed by inferred (or explicitly provided) return types. +Drop the result into a stub file alongside your code so even string-keyed +`result["nodename"]` lookups become statically typed. diff --git a/docs-next/content/docs/api/utilities/plugins.mdx b/docs-next/content/docs/api/utilities/plugins.mdx new file mode 100644 index 0000000..78df89d --- /dev/null +++ b/docs-next/content/docs/api/utilities/plugins.mdx @@ -0,0 +1,502 @@ +--- +title: Plugins +description: API reference for dagron's plugin system -- lifecycle hooks, plugin discovery, the DashboardPlugin, and the @dagron_plugin decorator. +--- + +# Plugins + +The plugin system provides extensible lifecycle hooks for DAG construction +and execution. Plugins can observe and react to events such as node starts, +completions, failures, and full execution boundaries. dagron includes a +built-in `DashboardPlugin` that serves a live web dashboard backed by a +Rust web server. + +```python +from dagron.plugins import ( + DagronPlugin, + HookRegistry, + HookEvent, + HookContext, + PluginManager, + dagron_plugin, +) +from dagron.dashboard import DashboardPlugin +``` + +--- + +## HookEvent + + + +Events that hooks can subscribe to. These correspond to lifecycle moments +during DAG construction and execution. + +| Event | When it fires | Context fields | +|-------|--------------|----------------| +| `PRE_EXECUTE` | Before execution starts. | `dag` | +| `POST_EXECUTE` | After execution completes. | `dag`, `execution_result` | +| `PRE_NODE` | Before a node task runs. | `dag`, `node_name` | +| `POST_NODE` | After a node task completes. | `dag`, `node_name`, `node_result` | +| `ON_ERROR` | When a node task fails. | `dag`, `node_name`, `error` | +| `PRE_BUILD` | Before `DAGBuilder.build()`. | `dag` | +| `POST_BUILD` | After `DAGBuilder.build()`. | `dag` | + +--- + +## HookContext + + + +Context object passed to hook callbacks. Fields are populated based on the +event type -- not all fields are set for every event. + + + +--- + +## HookRegistry + + None: ...`} /> + +Registry for event hooks with priority ordering. Hooks are fire-and-forget: +exceptions in callbacks are caught and issued as `RuntimeWarning`, never +propagated to the caller. + +```python +from dagron.plugins import HookRegistry, HookEvent, HookContext + +hooks = HookRegistry() + +# Register a simple logging hook +def log_node_start(ctx: HookContext): + print(f"Starting node: {ctx.node_name}") + +unregister = hooks.register(HookEvent.PRE_NODE, log_node_start) + +# Later, remove the hook +unregister() +``` + +### Methods + +--- + +#### HookRegistry.register + + Callable[[], None]`} /> + +Register a hook callback for an event. Higher priority callbacks run first. + + + +**Returns:** `Callable[[], None]` -- An unregister function. Call it to remove this hook. + +```python +# High-priority hook runs first +hooks.register(HookEvent.PRE_EXECUTE, auth_check, priority=100) +hooks.register(HookEvent.PRE_EXECUTE, log_start, priority=0) +``` + +--- + +#### HookRegistry.fire + + None`} /> + +Fire all hooks registered for the context's event. Callbacks are called +in priority order (descending). Exceptions in callbacks are caught and +issued as `RuntimeWarning`. + + + +```python +from dagron.plugins import HookContext, HookEvent + +ctx = HookContext(event=HookEvent.PRE_NODE, dag=dag, node_name="extract") +hooks.fire(ctx) +``` + +--- + +#### HookRegistry.clear + + None`} /> + +Clear all hooks, or hooks for a specific event. + + + +--- + +#### HookRegistry.hook_count + + int`} /> + +Return the number of registered hooks. + + + +**Returns:** `int` -- Number of registered hooks. + +--- + +## DagronPlugin + + str: ... + + @abstractmethod + def initialize(self, hooks: HookRegistry) -> None: ... + + def teardown(self) -> None: ...`} /> + +Abstract base class for dagron plugins. Subclass this and implement the +`name` property and `initialize()` method. The `teardown()` method is +optional. + +### Abstract members + +| Member | Type | Description | +|--------|------|-------------| +| `name` | `property -> str` | Unique name for the plugin (abstract). | +| `initialize(hooks)` | `method` | Called when the plugin is initialized. Register hooks here (abstract). | +| `teardown()` | `method` | Called when the plugin is torn down. Clean up resources (optional, default no-op). | + +```python +from dagron.plugins import DagronPlugin, HookRegistry, HookEvent, HookContext + +class TimingPlugin(DagronPlugin): + @property + def name(self) -> str: + return "timing" + + def initialize(self, hooks: HookRegistry) -> None: + self._starts: dict[str, float] = {} + + def on_pre_node(ctx: HookContext): + import time + if ctx.node_name: + self._starts[ctx.node_name] = time.monotonic() + + def on_post_node(ctx: HookContext): + import time + if ctx.node_name and ctx.node_name in self._starts: + elapsed = time.monotonic() - self._starts[ctx.node_name] + print(f"{ctx.node_name}: {elapsed:.3f}s") + + hooks.register(HookEvent.PRE_NODE, on_pre_node) + hooks.register(HookEvent.POST_NODE, on_post_node) + + def teardown(self) -> None: + self._starts.clear() +``` + +--- + +## PluginManager + + None: ...`} /> + +Manages plugin discovery, initialization, and teardown. Shares a single +`HookRegistry` across all managed plugins. + + + +### Properties + +--- + +#### PluginManager.hooks + + HookRegistry`} /> + +The hook registry shared by all plugins. + +--- + +#### PluginManager.plugins + + dict[str, DagronPlugin]`} /> + +Currently registered plugins, as a copy of the internal mapping. + +**Returns:** `dict[str, DagronPlugin]` -- Mapping of plugin names to plugin instances. + +### Methods + +--- + +#### PluginManager.register + + None`} /> + +Register a plugin instance. If a plugin with the same name is already +registered, it is replaced with a `RuntimeWarning`. + + + +--- + +#### PluginManager.discover + + list[str]`} /> + +Discover plugins via `entry_points(group='dagron.plugins')`. Each +discovered entry point is loaded, instantiated, and registered. + +**Returns:** `list[str]` -- Names of successfully discovered and registered plugins. + +```python +manager = PluginManager() +discovered = manager.discover() +print(f"Discovered plugins: {discovered}") +``` + +--- + +#### PluginManager.initialize_all + + None`} /> + +Initialize all registered plugins that have not been initialized yet. +Each plugin's `initialize()` method is called with the shared hook registry. +Failures are caught and issued as `RuntimeWarning`. + +--- + +#### PluginManager.teardown_all + + None`} /> + +Tear down all initialized plugins by calling their `teardown()` methods. +Failures are caught and issued as `RuntimeWarning`. + +```python +from dagron.plugins import PluginManager + +manager = PluginManager() +manager.register(TimingPlugin()) +manager.initialize_all() + +# Use manager.hooks with an executor +executor = dagron.DAGExecutor(dag, hooks=manager.hooks) +result = executor.execute(tasks) + +# Clean up +manager.teardown_all() +``` + +--- + +## @dagron_plugin + + type`} /> + +Class decorator that registers a plugin class with the global plugin +manager. The class must be a `DagronPlugin` subclass. It is instantiated +and registered immediately when the decorator is applied. + + + +**Returns:** `type` -- The class, unmodified. + +**Raises:** `TypeError` -- If `cls` is not a `DagronPlugin` subclass. + +```python +from dagron.plugins import dagron_plugin, DagronPlugin, HookRegistry + +@dagron_plugin +class MyPlugin(DagronPlugin): + @property + def name(self) -> str: + return "my_plugin" + + def initialize(self, hooks: HookRegistry) -> None: + # Register hooks here + pass +``` + +--- + +## DashboardPlugin + + None: ...`} /> + +A plugin that serves a live web dashboard showing real-time DAG execution +status. The web server runs in Rust (axum + tokio) on a background OS +thread for minimal Python overhead. + + + + +The `DashboardPlugin` requires dagron to be built with the `dashboard` +Cargo feature. If the feature is not available, `initialize()` raises +`ImportError` with instructions to rebuild. + +### Registered hooks + +The dashboard automatically registers hooks for these events: + +| Event | Behavior | +|-------|----------| +| `PRE_EXECUTE` | Resets the dashboard state, sends the DAG structure. | +| `PRE_NODE` | Marks a node as "started" in the UI. | +| `POST_NODE` | Marks a node as "completed" in the UI. | +| `ON_ERROR` | Marks a node as "failed" with the error message. | +| `POST_EXECUTE` | Sends final execution statistics. | + +```python +import dagron +from dagron.dashboard import DashboardPlugin +from dagron.plugins import HookRegistry + +# Set up hooks and dashboard +hooks = HookRegistry() +dashboard = DashboardPlugin(port=8765, open_browser=True) +dashboard.initialize(hooks) +# Prints: Dashboard: http://127.0.0.1:8765 + +# Execute with hooks +executor = dagron.DAGExecutor(dag, hooks=hooks) +result = executor.execute(tasks) + +# Clean up +dashboard.teardown() +``` + +--- + +## Complete example + +```python +import dagron +from dagron.plugins import ( + DagronPlugin, + HookRegistry, + HookEvent, + HookContext, + PluginManager, +) + +# Define a custom metrics plugin +class MetricsPlugin(DagronPlugin): + @property + def name(self) -> str: + return "metrics" + + def initialize(self, hooks: HookRegistry) -> None: + self.node_count = 0 + self.error_count = 0 + + def count_nodes(ctx: HookContext): + self.node_count += 1 + + def count_errors(ctx: HookContext): + self.error_count += 1 + + hooks.register(HookEvent.POST_NODE, count_nodes) + hooks.register(HookEvent.ON_ERROR, count_errors) + + def teardown(self) -> None: + print(f"Metrics: {self.node_count} nodes executed, {self.error_count} errors") + +# Use with PluginManager +manager = PluginManager() +manager.register(MetricsPlugin()) +manager.initialize_all() + +# Build and execute +dag = ( + dagron.DAG.builder() + .add_edge("extract", "transform") + .add_edge("transform", "load") + .build() +) + +tasks = { + "extract": lambda: [1, 2, 3], + "transform": lambda: [2, 4, 6], + "load": lambda: "done", +} + +executor = dagron.DAGExecutor(dag, hooks=manager.hooks) +result = executor.execute(tasks) + +# Teardown prints metrics +manager.teardown_all() +# Metrics: 3 nodes executed, 0 errors +``` + +--- + +## See also + +- [Execution](/api/execution/execution) -- the `hooks` parameter on `DAGExecutor`. +- [Tracing](/api/observability/tracing) -- structured event recording during execution. +- [Reactive DAG](/api/execution/reactive) -- push-based reactive execution with subscriptions. diff --git a/docs-next/content/docs/api/utilities/template.mdx b/docs-next/content/docs/api/utilities/template.mdx new file mode 100644 index 0000000..3a57cff --- /dev/null +++ b/docs-next/content/docs/api/utilities/template.mdx @@ -0,0 +1,363 @@ +--- +title: Templates +description: API reference for dagron's DAG template system -- parameterized DAG construction with substitution, validation, and rendering. +--- + +# Templates + +The template module provides parameterized DAG construction. Define a DAG +shape with placeholder values in node names (and optionally in payloads and +metadata), then render concrete DAGs by supplying parameter values. Templates +support type checking, custom validators, default values, and configurable +delimiters. + +```python +from dagron.template import DAGTemplate, TemplateParam, TemplateError +``` + +--- + +## DAGTemplate + + None: ...`} /> + +A parameterized DAG template that renders concrete DAGs by substituting +placeholder values. Placeholders in node names, payloads, and edge labels +are replaced at render time. + + + +```python +from dagron.template import DAGTemplate + +template = DAGTemplate( + params={"env": str, "replicas": int}, + defaults={"env": "staging"}, + descriptions={"env": "Target environment", "replicas": "Number of replicas"}, + validators={"replicas": lambda n: 1 <= n <= 10}, +) + +template.add_node("extract_{{env}}") +template.add_node("load_{{env}}") +template.add_edge("extract_{{env}}", "load_{{env}}") + +dag = template.render(env="prod", replicas=3) +print(list(dag.nodes())) # ['extract_prod', 'load_prod'] +``` + +### Properties + +--- + +#### DAGTemplate.params + + dict[str, TemplateParam]`} /> + +Return a copy of the template parameters. + +**Returns:** `dict[str, TemplateParam]` -- Mapping of parameter names to their `TemplateParam` specifications. + +```python +for name, param in template.params.items(): + print(f"{name}: type={param.type.__name__}, default={param.default}") +``` + +### Methods + +--- + +#### DAGTemplate.add_node + + DAGTemplate`} /> + +Add a templated node. The name may contain `{{param}}` placeholders that +are resolved at render time. Returns `self` for method chaining. + + + +**Returns:** `DAGTemplate` -- Self, for fluent chaining. + +```python +template.add_node("worker_{{env}}_{{replicas}}", payload={"region": "{{env}}"}) +``` + +--- + +#### DAGTemplate.add_edge + + DAGTemplate`} /> + +Add a templated edge. Both node names may contain placeholders. Returns +`self` for method chaining. + + + +**Returns:** `DAGTemplate` -- Self, for fluent chaining. + +```python +template.add_edge("extract_{{env}}", "transform_{{env}}", label="{{env}}_pipeline") +``` + +--- + +#### DAGTemplate.validate_params + + list[str]`} /> + +Validate parameters without rendering. Checks for unknown parameters, +missing required parameters, type mismatches, and custom validator failures. + +**Returns:** `list[str]` -- List of error messages. Empty means all parameters are valid. + +```python +errors = template.validate_params(env="prod", replicas="three") +for err in errors: + print(err) +# Parameter 'replicas' expects int, got str +``` + +--- + +#### DAGTemplate.render + + DAG`} /> + +Render the template into a concrete DAG. All placeholders are resolved +with the provided parameter values (merged with defaults). + +**Returns:** `DAG` -- A new `DAG` instance with all placeholders resolved. + +**Raises:** `TemplateError` -- If parameters are missing, wrong type, or fail custom validation. + +```python +dag = template.render(env="production", replicas=5) +print(dag.node_count()) +``` + +### Type-preserving substitution + +If the entire string is a single placeholder (e.g., `"{{replicas}}"`), the +raw Python value is returned rather than a stringified version. This means +integer and other non-string parameters pass through as their original type +when they are the sole content of a placeholder. + +--- + +#### DAGTemplate.render_builder + + DAGBuilder`} /> + +Render the template into a pre-populated `DAGBuilder`. This allows you to +add additional nodes and edges before calling `.build()`. + +**Returns:** `DAGBuilder` -- A builder with all templated nodes and edges added. + +**Raises:** `TemplateError` -- If parameter validation fails. + +```python +builder = template.render_builder(env="staging", replicas=2) +# Add extra nodes to the rendered template +builder.add_node("monitoring") +builder.add_edge("load_staging", "monitoring") +dag = builder.build() +``` + +--- + +#### DAGTemplate.render_pipeline + + Pipeline`} /> + +Render the template into a `Pipeline`. + + + +**Returns:** `Pipeline` -- A Pipeline wrapping the rendered DAG. + +--- + +## TemplateParam + + + +Specification for a single template parameter. Frozen dataclass. + + + +### Methods + +--- + +#### TemplateParam.validate + + None`} /> + +Validate a value against this parameter's type and optional custom +validator. + + + +**Raises:** `TemplateError` -- If the value has the wrong type or fails the custom validator. + +```python +from dagron.template import TemplateParam, TemplateError + +param = TemplateParam( + name="replicas", + type=int, + validator=lambda n: 1 <= n <= 10, +) + +param.validate(5) # OK + +try: + param.validate(15) # Fails validator +except TemplateError as e: + print(e) # Parameter 'replicas' failed custom validation + +try: + param.validate("five") # Wrong type +except TemplateError as e: + print(e) # Parameter 'replicas' expects int, got str +``` + +--- + +## TemplateError + + + +Raised when template validation or rendering fails. This includes missing +parameters, type mismatches, unknown parameters, and custom validator +failures. + +```python +from dagron.template import DAGTemplate, TemplateError + +template = DAGTemplate(params={"env": str}) +template.add_node("node_{{env}}") + +try: + dag = template.render() # Missing required parameter +except TemplateError as e: + print(e) # Missing required parameter: 'env' +``` + +--- + +## Complete example + +```python +from dagron.template import DAGTemplate + +# Define a reusable ETL template +etl_template = DAGTemplate( + params={ + "source": str, + "target": str, + "parallelism": int, + }, + defaults={"parallelism": 4}, + descriptions={ + "source": "Data source identifier", + "target": "Destination database", + "parallelism": "Number of parallel transform workers", + }, + validators={ + "parallelism": lambda n: 1 <= n <= 32, + }, +) + +# Define the template shape +etl_template.add_node("extract_{{source}}") +for i in range(4): # max parallelism slots + etl_template.add_node(f"transform_{{source}}_{i}") + etl_template.add_edge(f"extract_{{{{source}}}}", f"transform_{{{{source}}}}_{i}") +etl_template.add_node("load_{{target}}") + +# Validate before rendering +errors = etl_template.validate_params(source="api", target="warehouse") +assert not errors + +# Render multiple concrete DAGs +api_dag = etl_template.render(source="api", target="warehouse") +db_dag = etl_template.render(source="db", target="lake", parallelism=2) + +# Use render_builder for customization +builder = etl_template.render_builder(source="s3", target="redshift") +builder.add_node("notify") +builder.add_edge("load_redshift", "notify") +custom_dag = builder.build() + +# Inspect template parameters +for name, param in etl_template.params.items(): + print(f" {name} ({param.type.__name__}): {param.description}") + if param.default is not None: + print(f" default: {param.default}") +``` + +--- + +## See also + +- [DAGBuilder](/api/core/builder) -- the builder returned by `render_builder()`. +- [Composition](/api/utilities/compose) -- combining multiple rendered DAGs. +- [Building DAGs guide](/guide/core-concepts/building-dags) -- construction patterns overview. diff --git a/docs-next/content/docs/api/utilities/versioning.mdx b/docs-next/content/docs/api/utilities/versioning.mdx new file mode 100644 index 0000000..1ce481f --- /dev/null +++ b/docs-next/content/docs/api/utilities/versioning.mdx @@ -0,0 +1,408 @@ +--- +title: Versioning +description: API reference for dagron's DAG versioning system -- structural time-travel, mutation history, diffing, and forking. +--- + +# Versioning + +The versioning module provides structural versioning and time-travel for +DAGs. Every mutation (add/remove node, add/remove edge, set payload/metadata) +is recorded in an append-only log. You can navigate to any historical version, +diff between arbitrary versions, inspect the full mutation history, and fork +independent copies from any point. + +```python +from dagron.versioning import VersionedDAG, Mutation, MutationType +``` + +--- + +## VersionedDAG + + None: ...`} /> + +DAG with full structural versioning and time-travel. Every mutation is +recorded in an append-only log with timestamps. The version counter starts +at 0 (empty state) and increments by 1 for each mutation. + + + +```python +from dagron.versioning import VersionedDAG + +vdag = VersionedDAG() +vdag.add_node("extract") +vdag.add_node("transform") +vdag.add_edge("extract", "transform") + +print(vdag.version) # 3 +print(vdag.dag.node_count()) # 2 +``` + +### Properties + +--- + +#### VersionedDAG.dag + + DAG`} /> + +The current DAG object. Read-only access is recommended -- use the +`VersionedDAG` mutation methods instead to ensure changes are tracked. + +**Returns:** `DAG` -- The underlying DAG in its current state. + +--- + +#### VersionedDAG.version + + int`} /> + +Current version number. Starts at 0 and increments by 1 for each mutation. + +**Returns:** `int` -- The current version. + +### Mutation methods + +These methods mirror the `DAG` API but record each operation in the +version log. + +--- + +#### VersionedDAG.add_node + + None`} /> + +Add a node and record the mutation. + + + +--- + +#### VersionedDAG.remove_node + + None`} /> + +Remove a node and all its edges, and record the mutation. + + + +--- + +#### VersionedDAG.add_edge + + None`} /> + +Add an edge and record the mutation. + + + +**Raises:** `CycleError` -- If adding the edge would create a cycle. + +--- + +#### VersionedDAG.remove_edge + + None`} /> + +Remove an edge and record the mutation. + + + +--- + +#### VersionedDAG.set_payload + + None`} /> + +Set a node's payload and record the mutation. + + + +--- + +#### VersionedDAG.set_metadata + + None`} /> + +Set a node's metadata and record the mutation. + + + +### Time-travel methods + +--- + +#### VersionedDAG.at_version + + DAG`} /> + +Reconstruct the DAG at a specific version by replaying mutations from +version 0 up to the specified version. + + + +**Returns:** `DAG` -- A new DAG representing the state at that version. + +**Raises:** `ValueError` -- If version is negative or exceeds the current version. + +```python +# Build up history +vdag = VersionedDAG() +vdag.add_node("a") # version 1 +vdag.add_node("b") # version 2 +vdag.add_edge("a", "b") # version 3 +vdag.add_node("c") # version 4 + +# Time-travel to version 2 +old_dag = vdag.at_version(2) +print(old_dag.node_count()) # 2 (only "a" and "b") +print(old_dag.edge_count()) # 0 (edge was added in version 3) +``` + +--- + +#### VersionedDAG.diff_versions + + GraphDiff`} /> + +Diff two versions of the DAG. Reconstructs both versions and computes +the structural difference. + + + +**Returns:** `GraphDiff` -- Structural differences between the two versions, including added/removed nodes and edges. + +```python +diff = vdag.diff_versions(1, 4) +print(diff) # Shows nodes/edges added between version 1 and version 4 +``` + +--- + +#### VersionedDAG.history + + list[Mutation]`} /> + +Get the full mutation history as an ordered list. + +**Returns:** `list[Mutation]` -- All recorded mutations in chronological order. + +```python +for mutation in vdag.history(): + print(f"v{mutation.version}: {mutation.mutation_type.value} {mutation.args}") +# v1: add_node {'name': 'a'} +# v2: add_node {'name': 'b'} +# v3: add_edge {'from_node': 'a', 'to_node': 'b'} +``` + +--- + +#### VersionedDAG.history_since + + list[Mutation]`} /> + +Get mutations since a specific version (exclusive). + + this value."}, +]} /> + +**Returns:** `list[Mutation]` -- Mutations after the given version. + +```python +recent = vdag.history_since(2) +# Returns mutations at version 3 and 4 +``` + +--- + +#### VersionedDAG.fork + + VersionedDAG`} /> + +Create an independent fork of this versioned DAG. The fork gets its own +copy of the DAG and history up to the fork point. Subsequent mutations +in either the original or the fork do not affect the other. + + + +**Returns:** `VersionedDAG` -- A new independent `VersionedDAG` forked from the specified version. + +```python +# Fork from the current state +fork = vdag.fork() +fork.add_node("d") + +print(vdag.dag.node_count()) # 3 (original unchanged) +print(fork.dag.node_count()) # 4 + +# Fork from a specific historical version +old_fork = vdag.fork(at_version=2) +print(old_fork.version) # 2 +print(old_fork.dag.node_count()) # 2 +``` + +--- + +## Mutation + + + +A single recorded mutation in the version log. Frozen dataclass. + + + +--- + +## MutationType + + + +Enumeration of all mutation types that can be recorded. + +| Value | Description | +|-------|-------------| +| `ADD_NODE` | A node was added to the DAG. | +| `REMOVE_NODE` | A node (and its edges) was removed. | +| `ADD_EDGE` | An edge was added between two nodes. | +| `REMOVE_EDGE` | An edge was removed. | +| `SET_PAYLOAD` | A node's payload was updated. | +| `SET_METADATA` | A node's metadata was updated. | + +--- + +## Complete example + +```python +from dagron.versioning import VersionedDAG, MutationType + +# Create a versioned DAG and build it incrementally +vdag = VersionedDAG() + +# Phase 1: Basic pipeline +vdag.add_node("extract", payload={"source": "api"}) # v1 +vdag.add_node("transform") # v2 +vdag.add_edge("extract", "transform") # v3 +vdag.add_node("load") # v4 +vdag.add_edge("transform", "load") # v5 +print(f"Phase 1 complete: v{vdag.version}") # v5 + +# Phase 2: Add validation step +vdag.add_node("validate") # v6 +vdag.add_edge("transform", "validate") # v7 +vdag.add_edge("validate", "load") # v8 +vdag.remove_edge("transform", "load") # v9 +print(f"Phase 2 complete: v{vdag.version}") # v9 + +# Time-travel: see the DAG before validation was added +phase1_dag = vdag.at_version(5) +print(f"Phase 1: {phase1_dag.node_count()} nodes, {phase1_dag.edge_count()} edges") +# Phase 1: 3 nodes, 2 edges + +# Diff between phases +diff = vdag.diff_versions(5, 9) +print(diff) + +# Inspect mutation log +print("\nMutation history:") +for m in vdag.history(): + print(f" v{m.version}: {m.mutation_type.value} {m.args}") + +# Fork for experimentation +experiment = vdag.fork() +experiment.add_node("cache") +experiment.add_edge("extract", "cache") +experiment.add_edge("cache", "transform") + +print(f"\nOriginal: {vdag.dag.node_count()} nodes") # 4 +print(f"Experiment: {experiment.dag.node_count()} nodes") # 5 + +# Fork from an earlier version +legacy = vdag.fork(at_version=5) +print(f"Legacy fork: v{legacy.version}, {legacy.dag.node_count()} nodes") +# Legacy fork: v5, 3 nodes + +# Get recent changes +recent = vdag.history_since(5) +print(f"\nChanges since v5: {len(recent)} mutations") +for m in recent: + print(f" {m.mutation_type.value}: {m.args}") +``` + +--- + +## See also + +- [DAG](/api/core/core) -- the core graph class that `VersionedDAG` wraps. +- [Composition](/api/utilities/compose) -- combining versioned DAGs. +- [Building DAGs guide](/guide/core-concepts/building-dags) -- construction patterns and best practices. diff --git a/docs-next/content/docs/guide/advanced/contracts.mdx b/docs-next/content/docs/guide/advanced/contracts.mdx new file mode 100644 index 0000000..6f3b6d9 --- /dev/null +++ b/docs-next/content/docs/guide/advanced/contracts.mdx @@ -0,0 +1,384 @@ +--- +title: Contracts +description: Enforce type contracts across DAG edges to catch mismatches before execution. +--- + +# Contracts + +When DAGs grow large, it becomes easy for the output type of one node to drift from what a downstream node expects. dagron's **contract system** lets you declare input and output types for each node and validate them at build time -- before any task runs. This catches type mismatches early, similar to how a compiler checks function signatures. + +output: list"] + clean["clean
input: list, output: dict"] + model["train_model
input: dict, output: float"] + report["report
input: str ❌"] + fetch --> clean + clean --> model + model --> report + style report fill:#ffcdd2,stroke:#c62828`} + caption="The contract validator catches that 'train_model' outputs float but 'report' expects str as input." +/> + +--- + +## Core Classes + +| Class | Role | +|---|---| +| [`NodeContract`](/api/analysis/contracts#nodecontract) | Declares input types (per dependency) and output type for a single node. | +| [`ContractValidator`](/api/analysis/contracts#contractvalidator) | Validates contracts across all edges in a DAG. | +| [`ContractViolation`](/api/analysis/contracts#contractviolation) | Describes a single type mismatch: from_node, to_node, and a human-readable message. | +| [`extract_contracts`](/api/analysis/contracts#extract_contracts) | Auto-extract contracts from a Pipeline's `@task` functions using `typing.get_type_hints`. | +| [`validate_contracts`](/api/analysis/contracts#validate_contracts) | Convenience function: extract + validate in one call. | + +--- + +## Defining Contracts Manually + +### NodeContract + +A `NodeContract` declares what types a node expects from its dependencies and what type it produces: + +```python +from dagron.contracts import NodeContract + +# This node expects its "fetch_data" dependency to provide a list, +# and it outputs a dict. +clean_contract = NodeContract( + inputs={"fetch_data": list}, + output=dict, +) + +# This node expects "clean_data" to provide a dict, +# and outputs a float. +train_contract = NodeContract( + inputs={"clean_data": dict}, + output=float, +) +``` + +The `inputs` dict maps **dependency node names** to their expected types. The `output` is the type this node produces. + +The special type `object` acts as a wildcard (equivalent to `Any`) -- it matches any type. + +### Validating + +Create a `ContractValidator` with a DAG and contracts, then call `validate()`: + +```python +import dagron +from dagron.contracts import ContractValidator, NodeContract + +dag = ( + dagron.DAG.builder() + .add_node("fetch_data") + .add_node("clean_data") + .add_node("train_model") + .add_node("generate_report") + .add_edge("fetch_data", "clean_data") + .add_edge("clean_data", "train_model") + .add_edge("train_model", "generate_report") + .build() +) + +contracts = { + "fetch_data": NodeContract(output=list), + "clean_data": NodeContract(inputs={"fetch_data": list}, output=dict), + "train_model": NodeContract(inputs={"clean_data": dict}, output=float), + "generate_report": NodeContract(inputs={"train_model": str}, output=str), + # ^^^ BUG: should be float +} + +validator = ContractValidator(dag, contracts) +violations = validator.validate() + +for v in violations: + print(f" {v.from_node} -> {v.to_node}: {v.message}") +``` + +Output: + +``` + train_model -> generate_report: Type mismatch on edge train_model -> generate_report: + producer outputs float, but consumer expects str +``` + +--- + +## How Validation Works + +For every edge `(u, v)` in the DAG, the validator: + +1. Looks up `v`'s contract to find the expected input type for dependency `u`. +2. Looks up `u`'s contract to find its declared output type. +3. Checks compatibility using `issubclass(actual_output, expected_input)`. + +If the output type is not a subclass of the expected input type, a `ContractViolation` is recorded. + +### Type Compatibility Rules + +- `object` always matches (wildcard). +- Standard Python inheritance works: if `B` is a subclass of `A`, then `B` satisfies an `A` contract. +- Generic type aliases (e.g., `list[int]`) fall back to `True` if `issubclass` raises `TypeError`. + +```python +from dagron.contracts import NodeContract + +# int is a subclass of object -- always valid +NodeContract(inputs={"dep": object}, output=int) + +# bool is a subclass of int -- valid +NodeContract(inputs={"dep": int}, output=bool) + +# str is NOT a subclass of int -- violation +NodeContract(inputs={"dep": int}, output=str) +``` + +--- + +## ContractViolation + +Each violation is a frozen dataclass with three fields: + +```python +@dataclass(frozen=True) +class ContractViolation: + from_node: str # the upstream node + to_node: str # the downstream node + message: str # human-readable description +``` + +You can use violations to fail a CI check: + +```python +violations = validator.validate() +if violations: + for v in violations: + print(f"ERROR: {v.message}") + raise SystemExit(1) # fail the build +``` + +--- + +## Auto-Extracting Contracts from Pipelines + +If you use dagron's `Pipeline` / `@task` decorator pattern with type annotations, you can auto-extract contracts from the function signatures: + +```python +import dagron +from dagron.contracts import extract_contracts, validate_contracts + +@dagron.task(dependencies=[]) +def fetch_data() -> list: + return [1, 2, 3] + +@dagron.task(dependencies=["fetch_data"]) +def clean_data(fetch_data: list) -> dict: + return {"values": fetch_data} + +@dagron.task(dependencies=["clean_data"]) +def train_model(clean_data: dict) -> float: + return 0.95 + +@dagron.task(dependencies=["train_model"]) +def generate_report(train_model: float) -> str: + return f"Accuracy: {train_model}" + +pipeline = dagron.Pipeline([fetch_data, clean_data, train_model, generate_report]) +``` + +### extract_contracts() + +Reads `typing.get_type_hints()` from each task function to build `NodeContract` instances: + +```python +contracts = extract_contracts(pipeline) + +for name, contract in contracts.items(): + print(f" {name}: inputs={contract.inputs}, output={contract.output}") +``` + +Output: + +``` + fetch_data: inputs={}, output= + clean_data: inputs={'fetch_data': }, output= + train_model: inputs={'clean_data': }, output= + generate_report: inputs={'train_model': }, output= +``` + +### validate_contracts() + +One-liner that extracts and validates in a single call: + +```python +violations = validate_contracts(pipeline) +if not violations: + print("All contracts valid!") +``` + +You can also provide extra manually-defined contracts that override the auto-extracted ones: + +```python +violations = validate_contracts( + pipeline, + extra_contracts={ + "fetch_data": NodeContract(output=dict), # override + }, +) +``` + +--- + +## Using Contracts with DAGBuilder + +You can attach contracts during DAG construction via the builder pattern: + +```python +dag = ( + dagron.DAG.builder() + .add_node("extract") + .add_node("transform") + .add_node("load") + .add_edge("extract", "transform") + .add_edge("transform", "load") + .contract("extract", NodeContract(output=list)) + .contract("transform", NodeContract(inputs={"extract": list}, output=dict)) + .contract("load", NodeContract(inputs={"transform": dict}, output=bool)) + .build() +) +``` + +--- + +## Complete Validation Example + +Here is a full example that demonstrates catching a type mismatch in a data pipeline: + +```python +import dagron +from dagron.contracts import ContractValidator, ContractViolation, NodeContract + +# Build the DAG +dag = ( + dagron.DAG.builder() + .add_node("read_csv") + .add_node("parse_dates") + .add_node("compute_stats") + .add_node("render_chart") + .add_node("send_email") + .add_edge("read_csv", "parse_dates") + .add_edge("parse_dates", "compute_stats") + .add_edge("compute_stats", "render_chart") + .add_edge("render_chart", "send_email") + .build() +) + +# Define contracts +contracts = { + "read_csv": NodeContract(output=list), # list of rows + "parse_dates": NodeContract(inputs={"read_csv": list}, output=list), + "compute_stats": NodeContract(inputs={"parse_dates": list}, output=dict), + "render_chart": NodeContract(inputs={"compute_stats": dict}, output=bytes), # PNG bytes + "send_email": NodeContract(inputs={"render_chart": str}, output=bool), + # ^^^ BUG: chart is bytes, not str +} + +# Validate +validator = ContractValidator(dag, contracts) +violations = validator.validate() + +if violations: + print(f"Found {len(violations)} contract violation(s):") + for v in violations: + print(f" {v.message}") +else: + print("All contracts valid.") +``` + +Output: + +``` +Found 1 contract violation(s): + Type mismatch on edge render_chart -> send_email: + producer outputs bytes, but consumer expects str +``` + +--- + +## Contracts in CI/CD + +Add contract validation as a pre-execution check in your CI pipeline: + +```python +def validate_pipeline(pipeline): + """Run as part of CI -- fail if contracts are violated.""" + violations = validate_contracts(pipeline) + if violations: + print("Contract violations detected:") + for v in violations: + print(f" ERROR: {v.message}") + raise SystemExit(1) + print("All contracts valid.") + +# In your CI script: +validate_pipeline(my_pipeline) +``` + +This catches type drift when someone changes a task's return type without updating the downstream consumer. + +--- + +## Partial Contracts + +You do not need to define contracts for every node. Nodes without contracts are silently skipped during validation. This lets you adopt contracts incrementally: + +```python +contracts = { + # Only validate the critical path + "train_model": NodeContract(inputs={"features": dict}, output=float), + "deploy": NodeContract(inputs={"train_model": float}, output=bool), +} + +validator = ContractValidator(dag, contracts) +violations = validator.validate() # only checks edges between contracted nodes +``` + +--- + +## Wildcard Types + +Use `object` as a wildcard that accepts any type: + +```python +# This node accepts anything from its dependency +contracts = { + "logger": NodeContract(inputs={"any_node": object}, output=object), +} +``` + +This is useful for utility nodes (loggers, monitors) that process arbitrary data. + +--- + +## Best Practices + +1. **Add type annotations to all `@task` functions.** This enables `extract_contracts()` to work automatically. + +2. **Run `validate_contracts()` in CI.** Catch type mismatches before they cause runtime errors. + +3. **Start with the critical path.** You do not need full coverage immediately -- contract a few key nodes and expand over time. + +4. **Use concrete types, not `object`.** The more specific your contracts, the more errors they catch. Reserve `object` for truly polymorphic nodes. + +5. **Combine with [DataFrames](/guide/advanced/dataframes).** For pandas/polars pipelines, use `DataFrameSchema` for column-level validation and `NodeContract` for edge-level type checking. + +--- + +## Related + +- [API Reference: Contracts](/api/analysis/contracts) -- full API documentation. +- [DataFrames](/guide/advanced/dataframes) -- schema validation for DataFrame pipelines. +- [Building DAGs](/guide/core-concepts/building-dags) -- the builder `.contract()` method. +- [Error Handling](/guide/observability/error-handling) -- how violations integrate with the error flow. diff --git a/docs-next/content/docs/guide/advanced/dataframes.mdx b/docs-next/content/docs/guide/advanced/dataframes.mdx new file mode 100644 index 0000000..8c4602f --- /dev/null +++ b/docs-next/content/docs/guide/advanced/dataframes.mdx @@ -0,0 +1,475 @@ +--- +title: DataFrames +description: Validate pandas and polars DataFrames at DAG edge boundaries with schema definitions. +--- + +# DataFrames + +Data pipelines frequently pass DataFrames between nodes. A missing column, a wrong dtype, or an unexpectedly empty table can cascade through the pipeline and produce silent corruption. dagron's DataFrame integration lets you define **schemas** at edge boundaries and validate DataFrames automatically -- catching issues at the source instead of downstream. + +The system works with both **pandas** and **polars** DataFrames. No additional dependencies are required beyond what you already use. + +Schema: id(int), name(str), email(str)
min_rows: 1"] + clean["clean
Schema: id(int), name(str), email(str)
no nulls in email"] + aggregate["aggregate
Schema: domain(str), count(int)"] + report["report"] + extract --> clean --> aggregate --> report + style extract fill:#e3f2fd,stroke:#1565c0 + style clean fill:#e3f2fd,stroke:#1565c0 + style aggregate fill:#e3f2fd,stroke:#1565c0`} + caption="Each node has a schema that its output DataFrame must satisfy." +/> + +--- + +## Core Classes + +| Class | Role | +|---|---| +| [`DataFramePipeline`](/api/analysis/dataframe#dataframepipeline) | Wraps a DAG with schema definitions. Validates execution results against schemas. | +| [`DataFrameSchema`](/api/analysis/dataframe#dataframeschema) | Defines the expected shape of a DataFrame: columns, row count bounds. | +| [`ColumnSchema`](/api/analysis/dataframe#columnschema) | Defines a single column: name, dtype, nullable, required. | +| [`SchemaViolation`](/api/analysis/dataframe#schemaviolation) | Describes a single validation failure: node name and message. | +| [`validate_schema`](/api/analysis/dataframe#validate_schema) | Standalone function to validate any DataFrame against a schema. | + +--- + +## Defining Schemas + +### ColumnSchema + +Each column is described by a `ColumnSchema`: + +```python +from dagron.dataframe import ColumnSchema + +col = ColumnSchema( + name="user_id", + dtype="int", # substring match against the actual dtype string + nullable=False, # reject null values + required=True, # column must be present (default) +) +``` + +| Parameter | Type | Default | Description | +|---|---|---|---| +| `name` | `str` | -- | Column name (exact match). | +| `dtype` | `str \| None` | `None` | Expected dtype as a substring. `"int"` matches `int64`, `Int64`, etc. | +| `nullable` | `bool` | `True` | If `False`, the column must contain no null/NaN values. | +| `required` | `bool` | `True` | If `True`, the column must exist in the DataFrame. | + +The `dtype` check uses substring matching, so `"int"` matches both pandas `int64` and polars `Int64`. This provides cross-framework compatibility without requiring exact dtype strings. + +### DataFrameSchema + +Group column schemas and optional row count bounds into a `DataFrameSchema`: + +```python +from dagron.dataframe import DataFrameSchema, ColumnSchema + +schema = DataFrameSchema( + columns=[ + ColumnSchema("id", dtype="int", nullable=False), + ColumnSchema("name", dtype="str"), + ColumnSchema("email", dtype="str", nullable=False), + ColumnSchema("age", dtype="int", required=False), # optional column + ], + min_rows=1, # at least 1 row + max_rows=1000000, # at most 1M rows +) +``` + +--- + +## Quick Start + +```python +import dagron +import pandas as pd +from dagron.dataframe import ( + ColumnSchema, + DataFramePipeline, + DataFrameSchema, +) + +# 1. Build the DAG +dag = ( + dagron.DAG.builder() + .add_node("extract") + .add_node("clean") + .add_node("aggregate") + .add_edge("extract", "clean") + .add_edge("clean", "aggregate") + .build() +) + +# 2. Define schemas for each node's output +schemas = { + "extract": DataFrameSchema( + columns=[ + ColumnSchema("user_id", dtype="int", nullable=False), + ColumnSchema("name", dtype="str"), + ColumnSchema("email", dtype="str"), + ], + min_rows=1, + ), + "clean": DataFrameSchema( + columns=[ + ColumnSchema("user_id", dtype="int", nullable=False), + ColumnSchema("name", dtype="str", nullable=False), + ColumnSchema("email", dtype="str", nullable=False), + ], + ), + "aggregate": DataFrameSchema( + columns=[ + ColumnSchema("domain", dtype="str"), + ColumnSchema("count", dtype="int"), + ], + ), +} + +# 3. Define tasks +tasks = { + "extract": lambda: pd.DataFrame({ + "user_id": [1, 2, 3], + "name": ["Alice", "Bob", None], + "email": ["alice@a.com", "bob@b.com", "charlie@c.com"], + }), + "clean": lambda: pd.DataFrame({ + "user_id": [1, 2, 3], + "name": ["Alice", "Bob", "Charlie"], + "email": ["alice@a.com", "bob@b.com", "charlie@c.com"], + }), + "aggregate": lambda: pd.DataFrame({ + "domain": ["a.com", "b.com", "c.com"], + "count": [1, 1, 1], + }), +} + +# 4. Execute the DAG +executor = dagron.DAGExecutor(dag) +result = executor.execute(tasks) + +# 5. Validate results against schemas +pipeline = DataFramePipeline(dag, schemas) +violations = pipeline.validate_result(result) + +if violations: + for v in violations: + print(f" [{v.node_name}] {v.message}") +else: + print("All schemas valid!") +``` + +--- + +## Validation Rules + +The schema validator checks four things for each node: + +### 1. Required Columns + +If a column is `required=True` (the default) and is missing from the DataFrame: + +```python +schema = DataFrameSchema(columns=[ColumnSchema("missing_col", required=True)]) +# Violation: "Missing required column 'missing_col'" +``` + +### 2. Data Types + +If a `dtype` is specified, the actual dtype must contain the expected string as a substring: + +```python +schema = DataFrameSchema(columns=[ColumnSchema("age", dtype="int")]) + +# pandas int64 -> "int64" contains "int" -> valid +# pandas object -> "object" does NOT contain "int" -> violation +``` + +This means `dtype="int"` matches `int8`, `int16`, `int32`, `int64`, `Int64`, `UInt32`, etc. + +### 3. Null Values + +If `nullable=False`, the column must not contain any null/NaN values: + +```python +schema = DataFrameSchema(columns=[ColumnSchema("email", nullable=False)]) + +# DataFrame with email = ["alice@a.com", None, "bob@b.com"] +# Violation: "Column 'email' has null values but nullable=False" +``` + +### 4. Row Count Bounds + +```python +schema = DataFrameSchema(min_rows=1, max_rows=10000) + +# Empty DataFrame +# Violation: "Expected at least 1 rows, got 0" + +# DataFrame with 20000 rows +# Violation: "Expected at most 10000 rows, got 20000" +``` + +--- + +## Standalone Validation + +Use `validate_schema()` to validate any DataFrame without a full pipeline: + +```python +import pandas as pd +from dagron.dataframe import validate_schema, DataFrameSchema, ColumnSchema + +df = pd.DataFrame({ + "id": [1, 2, 3], + "name": ["Alice", None, "Charlie"], +}) + +schema = DataFrameSchema( + columns=[ + ColumnSchema("id", dtype="int", nullable=False), + ColumnSchema("name", dtype="str", nullable=False), + ], + min_rows=1, +) + +violations = validate_schema(df, schema, node_name="my_step") +for v in violations: + print(f" {v.message}") +# "Column 'name' has null values but nullable=False" +``` + +--- + +## Polars Support + +The same schemas work with polars DataFrames -- no changes needed: + +```python +import polars as pl +from dagron.dataframe import validate_schema, DataFrameSchema, ColumnSchema + +df = pl.DataFrame({ + "id": [1, 2, 3], + "name": ["Alice", "Bob", "Charlie"], + "score": [0.9, 0.8, 0.7], +}) + +schema = DataFrameSchema( + columns=[ + ColumnSchema("id", dtype="Int", nullable=False), # polars uses "Int64" + ColumnSchema("name", dtype="Utf8"), + ColumnSchema("score", dtype="Float"), + ], +) + +violations = validate_schema(df, schema) +if not violations: + print("Valid!") +``` + +dagron detects the framework automatically by inspecting the object's module path. Both `pandas.DataFrame` and `polars.DataFrame` are supported. + +--- + +## Validating Individual Node Outputs + +Use `validate_value()` on a `DataFramePipeline` to validate a single node's output: + +```python +pipeline = DataFramePipeline(dag, schemas) + +df = pd.DataFrame({"user_id": [1, 2], "name": ["Alice", "Bob"]}) +violations = pipeline.validate_value("extract", df) + +if violations: + print("extract output is invalid:") + for v in violations: + print(f" {v.message}") +``` + +This is useful for validating intermediate results during development. + +--- + +## SchemaViolation + +Each violation is a frozen dataclass: + +```python +@dataclass(frozen=True) +class SchemaViolation: + node_name: str # which node produced the invalid DataFrame + message: str # human-readable description +``` + +--- + +## Complete pandas Pipeline Example + +```python +import dagron +import pandas as pd +from dagron.dataframe import ( + ColumnSchema, + DataFramePipeline, + DataFrameSchema, + validate_schema, +) + +# Build DAG +dag = ( + dagron.DAG.builder() + .add_node("read_orders") + .add_node("filter_active") + .add_node("compute_revenue") + .add_node("top_customers") + .add_edge("read_orders", "filter_active") + .add_edge("filter_active", "compute_revenue") + .add_edge("compute_revenue", "top_customers") + .build() +) + +# Define schemas +schemas = { + "read_orders": DataFrameSchema( + columns=[ + ColumnSchema("order_id", dtype="int", nullable=False), + ColumnSchema("customer_id", dtype="int", nullable=False), + ColumnSchema("amount", dtype="float"), + ColumnSchema("status", dtype="str"), + ], + min_rows=1, + ), + "filter_active": DataFrameSchema( + columns=[ + ColumnSchema("order_id", dtype="int", nullable=False), + ColumnSchema("customer_id", dtype="int", nullable=False), + ColumnSchema("amount", dtype="float", nullable=False), + ], + ), + "compute_revenue": DataFrameSchema( + columns=[ + ColumnSchema("customer_id", dtype="int", nullable=False), + ColumnSchema("total_revenue", dtype="float", nullable=False), + ], + ), + "top_customers": DataFrameSchema( + columns=[ + ColumnSchema("customer_id", dtype="int"), + ColumnSchema("total_revenue", dtype="float"), + ColumnSchema("rank", dtype="int"), + ], + max_rows=100, # top 100 + ), +} + +# Tasks +def read_orders(): + return pd.DataFrame({ + "order_id": [1, 2, 3, 4, 5], + "customer_id": [101, 102, 101, 103, 102], + "amount": [50.0, 75.0, 30.0, 100.0, 45.0], + "status": ["active", "active", "cancelled", "active", "active"], + }) + +def filter_active(): + df = read_orders() + return df[df["status"] == "active"][["order_id", "customer_id", "amount"]] + +def compute_revenue(): + df = filter_active() + return df.groupby("customer_id")["amount"].sum().reset_index().rename( + columns={"amount": "total_revenue"} + ) + +def top_customers(): + df = compute_revenue() + df = df.sort_values("total_revenue", ascending=False).head(100) + df["rank"] = range(1, len(df) + 1) + return df + +# Execute and validate +executor = dagron.DAGExecutor(dag) +result = executor.execute({ + "read_orders": read_orders, + "filter_active": filter_active, + "compute_revenue": compute_revenue, + "top_customers": top_customers, +}) + +pipeline = DataFramePipeline(dag, schemas) +violations = pipeline.validate_result(result) + +if violations: + print(f"{len(violations)} schema violation(s):") + for v in violations: + print(f" [{v.node_name}] {v.message}") +else: + print("All schemas valid!") +``` + +--- + +## Combining with Contracts + +Use `NodeContract` for type-level checking (ensuring nodes produce DataFrames) and `DataFrameSchema` for content-level checking (columns, dtypes, nulls): + +```python +from dagron.contracts import NodeContract, ContractValidator + +# Type-level: ensure the node outputs a DataFrame +import pandas as pd +contracts = { + "read_orders": NodeContract(output=pd.DataFrame), + "filter_active": NodeContract(inputs={"read_orders": pd.DataFrame}, output=pd.DataFrame), + "compute_revenue": NodeContract(inputs={"filter_active": pd.DataFrame}, output=pd.DataFrame), +} + +# Validate types +type_violations = ContractValidator(dag, contracts).validate() + +# Content-level: validate column schemas +content_violations = DataFramePipeline(dag, schemas).validate_result(result) +``` + +--- + +## Non-DataFrame Handling + +If a node's output is not a pandas or polars DataFrame, the validator reports it: + +```python +schema = DataFrameSchema(columns=[ColumnSchema("id")]) +violations = validate_schema("not a dataframe", schema, node_name="bad_node") +# SchemaViolation("bad_node", "Expected DataFrame, got str") +``` + +--- + +## Best Practices + +1. **Define schemas for every node that produces a DataFrame.** This creates a complete validation boundary at every step. + +2. **Use `nullable=False` for critical columns.** Null values in ID or key columns are a common source of downstream errors. + +3. **Set `min_rows=1` for extract nodes.** Catch empty result sets immediately instead of letting them flow through. + +4. **Use `max_rows` for output nodes.** Prevent accidentally returning millions of rows to a reporting step. + +5. **Run validation in CI.** Execute the pipeline with test data and validate schemas as part of your test suite. + +6. **Use substring dtype matching.** Specify `"int"` instead of `"int64"` for cross-framework compatibility between pandas and polars. + +--- + +## Related + +- [API Reference: DataFrames](/api/analysis/dataframe) -- full API documentation. +- [Contracts](/guide/advanced/contracts) -- type-level edge validation. +- [Executing Tasks](/guide/core-concepts/executing-tasks) -- the execution model that produces `ExecutionResult`. +- [Error Handling](/guide/observability/error-handling) -- handling violations as part of the error flow. diff --git a/docs-next/content/docs/guide/advanced/meta.json b/docs-next/content/docs/guide/advanced/meta.json new file mode 100644 index 0000000..4aa34e8 --- /dev/null +++ b/docs-next/content/docs/guide/advanced/meta.json @@ -0,0 +1,11 @@ +{ + "title": "Advanced", + "defaultOpen": false, + "pages": [ + "templates", + "versioning", + "contracts", + "dataframes", + "plugins-hooks" + ] +} diff --git a/docs-next/content/docs/guide/advanced/plugins-hooks.mdx b/docs-next/content/docs/guide/advanced/plugins-hooks.mdx new file mode 100644 index 0000000..c8416ac --- /dev/null +++ b/docs-next/content/docs/guide/advanced/plugins-hooks.mdx @@ -0,0 +1,532 @@ +--- +title: Plugins & Hooks +description: Extend dagron with plugins and react to lifecycle events via the hook system. +--- + +# Plugins & Hooks + +dagron provides an event-driven plugin system that lets you hook into every stage of DAG construction and execution. Plugins can log events, collect metrics, serve live dashboards, send notifications, or implement any cross-cutting concern without modifying your pipeline code. + +The system has three layers: + +1. **HookRegistry** -- registers and fires callbacks for lifecycle events. +2. **DagronPlugin** -- abstract base class for plugin implementations. +3. **PluginManager** -- discovers, initializes, and tears down plugins. + +```mermaid +graph LR + Plugin1["LoggingPlugin"] --> HookRegistry + Plugin2["MetricsPlugin"] --> HookRegistry + Plugin3["DashboardPlugin"] --> HookRegistry + HookRegistry --> PRE_EXECUTE + HookRegistry --> PRE_NODE + HookRegistry --> POST_NODE + HookRegistry --> ON_ERROR + HookRegistry --> POST_EXECUTE +``` + +--- + +## Hook Events + +The `HookEvent` enum defines the lifecycle events you can subscribe to: + +| Event | Fires when... | Context fields | +|---|---|---| +| `PRE_BUILD` | A DAGBuilder starts building. | `dag` (partial) | +| `POST_BUILD` | A DAGBuilder finishes building. | `dag` (complete) | +| `PRE_EXECUTE` | Execution begins. | `dag` | +| `POST_EXECUTE` | Execution completes. | `dag`, `execution_result` | +| `PRE_NODE` | A node is about to execute. | `dag`, `node_name` | +| `POST_NODE` | A node has finished executing. | `dag`, `node_name`, `node_result` | +| `ON_ERROR` | A node raises an exception. | `dag`, `node_name`, `error` | + +```python +from dagron.plugins.hooks import HookEvent + +HookEvent.PRE_EXECUTE # "pre_execute" +HookEvent.POST_EXECUTE # "post_execute" +HookEvent.PRE_NODE # "pre_node" +HookEvent.POST_NODE # "post_node" +HookEvent.ON_ERROR # "on_error" +HookEvent.PRE_BUILD # "pre_build" +HookEvent.POST_BUILD # "post_build" +``` + +--- + +## HookRegistry + +The `HookRegistry` is the central event bus. You register callbacks for specific events and fire them with a `HookContext`: + +```python +from dagron.plugins.hooks import HookRegistry, HookEvent, HookContext + +hooks = HookRegistry() + +# Register a callback +def on_node_start(ctx: HookContext): + print(f" Starting node: {ctx.node_name}") + +unregister = hooks.register(HookEvent.PRE_NODE, on_node_start) + +# Fire the event (the executor does this automatically) +hooks.fire(HookContext( + event=HookEvent.PRE_NODE, + dag=dag, + node_name="extract", +)) +# prints: " Starting node: extract" + +# Unregister when no longer needed +unregister() +``` + +### HookContext + +Every callback receives a `HookContext` with the relevant information: + +```python +from dagron.plugins.hooks import HookContext + +ctx = HookContext( + event=HookEvent.POST_NODE, + dag=dag, + node_name="transform", + node_result=result, + error=None, + execution_result=None, + metadata={"extra": "info"}, +) +``` + +| Field | Type | Description | +|---|---|---| +| `event` | `HookEvent` | The event that triggered this callback. | +| `dag` | `DAG \| None` | The DAG being built or executed. | +| `node_name` | `str \| None` | The node involved (for node-level events). | +| `node_result` | `Any` | The node's result (for `POST_NODE`). | +| `error` | `Exception \| None` | The exception (for `ON_ERROR`). | +| `execution_result` | `Any` | The full execution result (for `POST_EXECUTE`). | +| `metadata` | `dict` | Arbitrary extra data. | + +### Priority + +Callbacks run in **descending priority order**. Higher priority runs first: + +```python +hooks.register(HookEvent.PRE_NODE, first_callback, priority=100) +hooks.register(HookEvent.PRE_NODE, second_callback, priority=50) +hooks.register(HookEvent.PRE_NODE, last_callback, priority=0) + +# Order: first_callback -> second_callback -> last_callback +``` + +### Error Isolation + +Hook callbacks are fire-and-forget. If a callback raises an exception, it is caught and issued as a `RuntimeWarning`, but execution continues: + +```python +def buggy_hook(ctx): + raise ValueError("oops") + +hooks.register(HookEvent.PRE_NODE, buggy_hook) + +# This fires the hook but does NOT stop execution +# Instead, a RuntimeWarning is issued +hooks.fire(HookContext(event=HookEvent.PRE_NODE)) +``` + +### Clearing Hooks + +```python +# Clear all hooks for a specific event +hooks.clear(HookEvent.PRE_NODE) + +# Clear all hooks for all events +hooks.clear() + +# Count registered hooks +hooks.hook_count() # total across all events +hooks.hook_count(HookEvent.PRE_NODE) # for a specific event +``` + +--- + +## Writing a Plugin + +### DagronPlugin ABC + +Subclass `DagronPlugin` and implement `name`, `initialize()`, and optionally `teardown()`: + +```python +from dagron.plugins.base import DagronPlugin +from dagron.plugins.hooks import HookEvent, HookContext, HookRegistry + +class TimingPlugin(DagronPlugin): + """Plugin that measures and logs node execution times.""" + + def __init__(self): + self._start_times: dict[str, float] = {} + + @property + def name(self) -> str: + return "timing" + + def initialize(self, hooks: HookRegistry) -> None: + """Register hooks for node timing.""" + import time + + def on_pre_node(ctx: HookContext): + if ctx.node_name: + self._start_times[ctx.node_name] = time.monotonic() + + def on_post_node(ctx: HookContext): + if ctx.node_name and ctx.node_name in self._start_times: + elapsed = time.monotonic() - self._start_times[ctx.node_name] + print(f" [{ctx.node_name}] completed in {elapsed:.3f}s") + + hooks.register(HookEvent.PRE_NODE, on_pre_node) + hooks.register(HookEvent.POST_NODE, on_post_node) + + def teardown(self) -> None: + """Clean up resources.""" + self._start_times.clear() +``` + +### Using the Plugin + +```python +from dagron.plugins.hooks import HookRegistry +from dagron.plugins.manager import PluginManager + +# Create the hook registry and plugin manager +hooks = HookRegistry() +manager = PluginManager(hooks) + +# Register and initialize plugins +manager.register(TimingPlugin()) +manager.initialize_all() + +# Pass hooks to the executor +executor = dagron.DAGExecutor(dag, hooks=hooks) +result = executor.execute(tasks) + +# Clean up +manager.teardown_all() +``` + +--- + +## PluginManager + +The `PluginManager` handles the plugin lifecycle: + +```python +from dagron.plugins.manager import PluginManager + +manager = PluginManager() + +# Register plugins manually +manager.register(TimingPlugin()) +manager.register(LoggingPlugin()) + +# Auto-discover plugins from entry_points +discovered = manager.discover() +print(f"Discovered: {discovered}") + +# Initialize all registered plugins +manager.initialize_all() + +# Access the shared hook registry +hooks = manager.hooks + +# List registered plugins +print(manager.plugins) + +# Tear down all plugins +manager.teardown_all() +``` + +### Plugin Discovery + +Plugins can be auto-discovered via Python entry points. Add to your `pyproject.toml`: + +```toml +[project.entry-points."dagron.plugins"] +my_plugin = "my_package.plugins:MyPlugin" +``` + +Then `manager.discover()` will find and register them automatically. + +--- + +## @dagron_plugin Decorator + +For quick plugin registration, use the `@dagron_plugin` class decorator: + +```python +from dagron.plugins.base import DagronPlugin +from dagron.plugins.manager import dagron_plugin +from dagron.plugins.hooks import HookEvent, HookContext, HookRegistry + +@dagron_plugin +class NotificationPlugin(DagronPlugin): + """Send a notification when execution fails.""" + + @property + def name(self) -> str: + return "notifications" + + def initialize(self, hooks: HookRegistry) -> None: + def on_error(ctx: HookContext): + send_alert(f"Node {ctx.node_name} failed: {ctx.error}") + + hooks.register(HookEvent.ON_ERROR, on_error) +``` + +The `@dagron_plugin` decorator automatically instantiates and registers the plugin with dagron's global plugin manager. + +--- + +## DashboardPlugin + +dagron ships with a built-in `DashboardPlugin` that serves a live web dashboard showing real-time execution status. The web server runs in Rust (axum + tokio) on a background thread. + +```python +from dagron.dashboard import DashboardPlugin +from dagron.execution.gates import ApprovalGate, GateController +from dagron.plugins.hooks import HookRegistry +from dagron.plugins.manager import PluginManager + +# Optional: set up gates for the dashboard to manage +controller = GateController({ + "review": ApprovalGate(timeout=600), + "deploy": ApprovalGate(timeout=300), +}) + +# Create the dashboard plugin +dashboard = DashboardPlugin( + host="127.0.0.1", + port=8765, + gate_controller=controller, + open_browser=True, # auto-open in browser +) + +# Wire it up +hooks = HookRegistry() +manager = PluginManager(hooks) +manager.register(dashboard) +manager.initialize_all() +# prints: "Dashboard: http://127.0.0.1:8765" + +# Execute with hooks +executor = dagron.DAGExecutor(dag, hooks=hooks) +result = executor.execute(tasks) + +# Clean up +manager.teardown_all() +``` + +The dashboard shows: + +- A live graph visualization with node status (pending, running, completed, failed). +- Execution timing for each node. +- Approve/reject buttons for any gates in the `WAITING` state. +- Summary statistics after execution completes. + +### Dashboard Hooks + +The `DashboardPlugin` registers hooks for these events: + +| Event | Dashboard action | +|---|---| +| `PRE_EXECUTE` | Resets the dashboard with the DAG structure. | +| `PRE_NODE` | Marks the node as "running" in the UI. | +| `POST_NODE` | Marks the node as "completed". | +| `ON_ERROR` | Marks the node as "failed" with error details. | +| `POST_EXECUTE` | Shows final execution summary. | + +--- + +## Practical Plugin Examples + +### Logging Plugin + +```python +import logging + +class LoggingPlugin(DagronPlugin): + """Log all lifecycle events.""" + + def __init__(self, logger_name: str = "dagron"): + self._logger = logging.getLogger(logger_name) + + @property + def name(self) -> str: + return "logging" + + def initialize(self, hooks: HookRegistry) -> None: + def on_pre_execute(ctx: HookContext): + self._logger.info( + "Execution started: %d nodes", + ctx.dag.node_count() if ctx.dag else 0, + ) + + def on_pre_node(ctx: HookContext): + self._logger.info("Node started: %s", ctx.node_name) + + def on_post_node(ctx: HookContext): + self._logger.info("Node completed: %s", ctx.node_name) + + def on_error(ctx: HookContext): + self._logger.error( + "Node failed: %s - %s", + ctx.node_name, + ctx.error, + ) + + def on_post_execute(ctx: HookContext): + r = ctx.execution_result + if r: + self._logger.info( + "Execution finished: %d succeeded, %d failed in %.1fs", + r.succeeded, + r.failed, + r.total_duration_seconds, + ) + + hooks.register(HookEvent.PRE_EXECUTE, on_pre_execute) + hooks.register(HookEvent.PRE_NODE, on_pre_node) + hooks.register(HookEvent.POST_NODE, on_post_node) + hooks.register(HookEvent.ON_ERROR, on_error) + hooks.register(HookEvent.POST_EXECUTE, on_post_execute) +``` + +### Metrics Plugin (Prometheus) + +```python +class PrometheusPlugin(DagronPlugin): + """Export execution metrics to Prometheus.""" + + def __init__(self): + from prometheus_client import Counter, Histogram + self.node_duration = Histogram( + "dagron_node_duration_seconds", + "Node execution duration", + ["node_name"], + ) + self.node_failures = Counter( + "dagron_node_failures_total", + "Total node failures", + ["node_name"], + ) + + @property + def name(self) -> str: + return "prometheus" + + def initialize(self, hooks: HookRegistry) -> None: + def on_post_node(ctx: HookContext): + if ctx.node_name and ctx.node_result: + self.node_duration.labels( + node_name=ctx.node_name + ).observe(ctx.node_result.duration_seconds) + + def on_error(ctx: HookContext): + if ctx.node_name: + self.node_failures.labels(node_name=ctx.node_name).inc() + + hooks.register(HookEvent.POST_NODE, on_post_node) + hooks.register(HookEvent.ON_ERROR, on_error) +``` + +### Slack Notification Plugin + +```python +class SlackPlugin(DagronPlugin): + """Send Slack notifications on execution failure.""" + + def __init__(self, webhook_url: str, channel: str = "#alerts"): + self._webhook_url = webhook_url + self._channel = channel + + @property + def name(self) -> str: + return "slack" + + def initialize(self, hooks: HookRegistry) -> None: + def on_post_execute(ctx: HookContext): + r = ctx.execution_result + if r and r.failed > 0: + import httpx + httpx.post(self._webhook_url, json={ + "channel": self._channel, + "text": ( + f"dagron pipeline failed: " + f"{r.failed} node(s) failed, " + f"{r.succeeded} succeeded" + ), + }) + + hooks.register(HookEvent.POST_EXECUTE, on_post_execute) +``` + +--- + +## Composing Multiple Plugins + +Register multiple plugins and they all receive the same events: + +```python +manager = PluginManager() + +manager.register(LoggingPlugin()) +manager.register(TimingPlugin()) +manager.register(DashboardPlugin(port=8765)) +manager.register(SlackPlugin(webhook_url="https://hooks.slack.com/...")) + +manager.initialize_all() + +# All plugins receive events during execution +executor = dagron.DAGExecutor(dag, hooks=manager.hooks) +result = executor.execute(tasks) + +manager.teardown_all() +``` + +Use **priority** to control the order when it matters: + +```python +# Logging should run first (highest priority) +hooks.register(HookEvent.PRE_NODE, log_callback, priority=100) + +# Metrics second +hooks.register(HookEvent.PRE_NODE, metrics_callback, priority=50) + +# Dashboard last +hooks.register(HookEvent.PRE_NODE, dashboard_callback, priority=0) +``` + +--- + +## Best Practices + +1. **Keep hooks lightweight.** Callbacks run on the executor thread, so heavy work (network calls, disk I/O) should be offloaded to a background thread or queue. + +2. **Never raise from hooks.** Exceptions in hooks are caught and warned, but they can mask real errors. Log errors and continue. + +3. **Use `teardown()` for cleanup.** Close file handles, flush metrics, and shut down background threads in the teardown method. + +4. **Use entry points for distribution.** Package plugins as standalone PyPI packages with `dagron.plugins` entry points for automatic discovery. + +5. **Test plugins in isolation.** Create a `HookRegistry`, register your plugin, fire test events, and assert the behavior. + +--- + +## Related + +- [API Reference: Plugins](/api/utilities/plugins) -- full API documentation. +- [Approval Gates](/guide/execution-strategies/approval-gates) -- gate integration with the DashboardPlugin. +- [Visualization](/guide/observability/visualization) -- other ways to visualize DAG execution. +- [Executing Tasks](/guide/core-concepts/executing-tasks) -- how the executor fires hook events. diff --git a/docs-next/content/docs/guide/advanced/templates.mdx b/docs-next/content/docs/guide/advanced/templates.mdx new file mode 100644 index 0000000..b4449b2 --- /dev/null +++ b/docs-next/content/docs/guide/advanced/templates.mdx @@ -0,0 +1,452 @@ +--- +title: Templates +description: Create parameterized DAG templates with placeholder expansion for reusable pipeline patterns. +--- + +# Templates + +Many organizations run the same pipeline structure against different environments, datasets, or configurations. Instead of building a separate DAG for each variant, dagron lets you define a **DAGTemplate** with `{{placeholder}}` syntax and render concrete DAGs by supplying parameter values. + +Templates enforce type safety and support custom validators, so invalid parameter combinations are caught before the graph is ever built. + + T_T --> T_L + end + subgraph "render(env='prod')" + P_E["extract_prod"] + P_T["transform_prod"] + P_L["load_prod"] + P_E --> P_T --> P_L + end + subgraph "render(env='staging')" + S_E["extract_staging"] + S_T["transform_staging"] + S_L["load_staging"] + S_E --> S_T --> S_L + end`} + caption="A single template renders into different concrete DAGs depending on the parameter values." +/> + +--- + +## Quick Start + +```python +from dagron.template import DAGTemplate + +# 1. Define the template +template = DAGTemplate( + params={"env": str, "batch_size": int}, + defaults={"env": "staging", "batch_size": 1000}, +) + +# 2. Add templated nodes and edges +template.add_node("extract_{{env}}") +template.add_node("transform_{{env}}") +template.add_node("load_{{env}}") +template.add_edge("extract_{{env}}", "transform_{{env}}") +template.add_edge("transform_{{env}}", "load_{{env}}") + +# 3. Render a concrete DAG +dag = template.render(env="prod", batch_size=5000) + +# The DAG now has nodes: extract_prod, transform_prod, load_prod +print([n.name for n in dag.topological_sort()]) +# ['extract_prod', 'transform_prod', 'load_prod'] +``` + +--- + +## Template Parameters + +### Declaring Parameters + +Parameters are declared with their types when constructing the template: + +```python +template = DAGTemplate( + params={ + "env": str, + "replicas": int, + "gpu_enabled": bool, + }, + defaults={ + "env": "staging", + "replicas": 1, + }, + descriptions={ + "env": "Target deployment environment", + "replicas": "Number of parallel workers", + "gpu_enabled": "Whether to use GPU acceleration", + }, +) +``` + +Parameters without a default are **required** -- `render()` will raise a `TemplateError` if they are missing. + +### TemplateParam + +Under the hood, each parameter is a `TemplateParam` dataclass: + +```python +from dagron.template import TemplateParam + +param = TemplateParam( + name="env", + type=str, + default="staging", + description="Target deployment environment", + validator=lambda v: v in ("dev", "staging", "prod"), +) +``` + +You can access all parameter specs through the template: + +```python +for name, param in template.params.items(): + print(f" {name}: {param.type.__name__}, default={param.default}") + print(f" {param.description}") +``` + +--- + +## Placeholder Syntax + +### Default Delimiters + +By default, placeholders use double-brace syntax: `{{param_name}}`. You can place them anywhere in a node name or edge label: + +```python +template.add_node("train_{{model}}_{{env}}") +template.add_edge("data_{{env}}", "train_{{model}}_{{env}}") +``` + +### Custom Delimiters + +If double braces conflict with your naming conventions, specify custom delimiters: + +```python +template = DAGTemplate( + params={"env": str}, + delimiters=("${", "}"), # shell-style +) + +template.add_node("extract_${env}") +``` + +### Type-Preserving Substitution + +If an entire node name is a single placeholder (e.g., `"{{replicas}}"`), dagron returns the raw Python value instead of stringifying it. This is useful for metadata: + +```python +template = DAGTemplate(params={"replicas": int}) +template.add_node("worker", metadata="{{replicas}}") + +dag = template.render(replicas=4) +# The metadata is the integer 4, not the string "4" +``` + +When a placeholder is part of a larger string (e.g., `"worker_{{env}}"`), values are converted to strings via `str()`. + +--- + +## Validation + +### Automatic Type Checking + +Parameters are validated against their declared types at render time: + +```python +template = DAGTemplate(params={"replicas": int}) + +try: + template.render(replicas="three") # str is not int +except TemplateError as e: + print(e) + # "Parameter 'replicas' expects int, got str" +``` + +### Custom Validators + +Supply a validator function for each parameter to enforce domain-specific constraints: + +```python +template = DAGTemplate( + params={"env": str, "replicas": int}, + validators={ + "env": lambda v: v in ("dev", "staging", "prod"), + "replicas": lambda v: 1 <= v <= 100, + }, +) + +try: + template.render(env="banana", replicas=1) +except TemplateError as e: + print(e) + # "Parameter 'env' failed custom validation" +``` + +### Pre-Validation + +Use `validate_params()` to check parameters without rendering. This returns a list of error messages instead of raising: + +```python +errors = template.validate_params(env="prod", replicas=-1) +for error in errors: + print(f" - {error}") +# - Parameter 'replicas' failed custom validation + +errors = template.validate_params() # missing required params +# - Missing required parameter: 'replicas' +``` + +### Unknown Parameters + +Passing parameters not declared in the template is an error: + +```python +try: + template.render(env="prod", replicas=3, color="blue") +except TemplateError as e: + print(e) + # "Unknown parameters: color" +``` + +--- + +## Rendering Methods + +### `render()` -- Direct DAG + +The simplest rendering method produces a finalized DAG: + +```python +dag = template.render(env="prod", replicas=3) +# dag is a dagron.DAG, ready for execution +``` + +### `render_builder()` -- DAGBuilder for Further Modification + +If you need to add extra nodes or edges after rendering, use `render_builder()` to get a `DAGBuilder`: + +```python +builder = template.render_builder(env="prod", replicas=3) + +# Add extra nodes beyond what the template defines +builder.add_node("monitoring") +builder.add_edge("load_prod", "monitoring") + +dag = builder.build() +``` + +This is useful when you have a standard template but need per-deployment customizations. + +### `render_pipeline()` -- Pipeline + +Render into a `Pipeline` for use with the `@task` decorator workflow: + +```python +pipeline = template.render_pipeline(env="prod", replicas=3) +``` + +--- + +## Fluent API + +`add_node()` and `add_edge()` return `self`, so you can chain calls: + +```python +template = DAGTemplate(params={"env": str}, defaults={"env": "dev"}) + +template = ( + template + .add_node("extract_{{env}}") + .add_node("transform_{{env}}") + .add_node("load_{{env}}") + .add_edge("extract_{{env}}", "transform_{{env}}") + .add_edge("transform_{{env}}", "load_{{env}}") +) + +dag = template.render(env="prod") +``` + +--- + +## Parameterized ETL Example + +Here is a realistic ETL pipeline template that generates environment-specific DAGs: + +```python +from dagron.template import DAGTemplate + +def create_etl_template(): + """Create a reusable ETL pipeline template.""" + template = DAGTemplate( + params={ + "env": str, + "source_table": str, + "target_table": str, + "batch_size": int, + "validate": bool, + }, + defaults={ + "batch_size": 10000, + "validate": True, + }, + descriptions={ + "env": "Target environment (dev, staging, prod)", + "source_table": "Source database table name", + "target_table": "Target data warehouse table", + "batch_size": "Number of rows per batch", + "validate": "Whether to run data validation", + }, + validators={ + "env": lambda v: v in ("dev", "staging", "prod"), + "batch_size": lambda v: 100 <= v <= 1_000_000, + }, + ) + + # Core ETL nodes + ( + template + .add_node("extract_{{source_table}}_{{env}}") + .add_node("validate_{{source_table}}_{{env}}") + .add_node("transform_{{source_table}}_{{env}}") + .add_node("load_{{target_table}}_{{env}}") + .add_node("verify_{{target_table}}_{{env}}") + .add_edge("extract_{{source_table}}_{{env}}", "validate_{{source_table}}_{{env}}") + .add_edge("validate_{{source_table}}_{{env}}", "transform_{{source_table}}_{{env}}") + .add_edge("transform_{{source_table}}_{{env}}", "load_{{target_table}}_{{env}}") + .add_edge("load_{{target_table}}_{{env}}", "verify_{{target_table}}_{{env}}") + ) + + return template + +# Create the template once +etl_template = create_etl_template() + +# Render for different environments +dev_dag = etl_template.render( + env="dev", + source_table="users", + target_table="dim_users", +) + +prod_dag = etl_template.render( + env="prod", + source_table="users", + target_table="dim_users", + batch_size=100000, +) + +print(f"Dev nodes: {[n.name for n in dev_dag.topological_sort()]}") +# ['extract_users_dev', 'validate_users_dev', 'transform_users_dev', +# 'load_dim_users_dev', 'verify_dim_users_dev'] + +print(f"Prod nodes: {[n.name for n in prod_dag.topological_sort()]}") +# ['extract_users_prod', 'validate_users_prod', 'transform_users_prod', +# 'load_dim_users_prod', 'verify_dim_users_prod'] +``` + +--- + +## Multi-Tenant Pipeline Generation + +Templates are powerful for generating per-tenant pipelines: + +```python +template = DAGTemplate( + params={"tenant": str, "region": str}, + defaults={"region": "us-east-1"}, +) + +( + template + .add_node("ingest_{{tenant}}_{{region}}") + .add_node("process_{{tenant}}_{{region}}") + .add_node("deliver_{{tenant}}_{{region}}") + .add_edge("ingest_{{tenant}}_{{region}}", "process_{{tenant}}_{{region}}") + .add_edge("process_{{tenant}}_{{region}}", "deliver_{{tenant}}_{{region}}") +) + +tenants = ["acme", "globex", "initech"] +dags = { + tenant: template.render(tenant=tenant, region="eu-west-1") + for tenant in tenants +} + +for tenant, dag in dags.items(): + nodes = [n.name for n in dag.topological_sort()] + print(f"{tenant}: {nodes}") +``` + +--- + +## Template Composition with render_builder + +Use `render_builder()` to compose a base template with per-use customizations: + +```python +# Base template: standard ML training pipeline +base = DAGTemplate( + params={"model": str, "dataset": str}, +) +( + base + .add_node("load_{{dataset}}") + .add_node("preprocess_{{dataset}}") + .add_node("train_{{model}}") + .add_node("evaluate_{{model}}") + .add_edge("load_{{dataset}}", "preprocess_{{dataset}}") + .add_edge("preprocess_{{dataset}}", "train_{{model}}") + .add_edge("train_{{model}}", "evaluate_{{model}}") +) + +# Render with customization +builder = base.render_builder(model="resnet50", dataset="imagenet") +builder.add_node("deploy_resnet50") +builder.add_edge("evaluate_resnet50", "deploy_resnet50") + +# Only production builds get a deploy step +dag = builder.build() +``` + +--- + +## Repr and Debugging + +Templates have a helpful repr: + +```python +print(template) +# DAGTemplate(params=[batch_size, env, source_table, target_table, validate], +# nodes=5, edges=4) +``` + +--- + +## Best Practices + +1. **Define templates as factory functions.** Return a `DAGTemplate` from a function so that the template definition is reusable and testable. + +2. **Use validators for all string parameters.** Catch typos like `env="prodd"` at render time instead of at execution time. + +3. **Provide defaults for optional parameters.** This makes the most common usage concise while still allowing customization. + +4. **Use `validate_params()` in CI.** Run parameter validation in your test suite to catch invalid configurations early. + +5. **Prefer `render_builder()` when composing.** It gives you flexibility to add environment-specific nodes without modifying the base template. + +--- + +## Related + +- [API Reference: Templates](/api/utilities/template) -- full API documentation. +- [Building DAGs](/guide/core-concepts/building-dags) -- the DAGBuilder that templates render into. +- [Versioning](/guide/advanced/versioning) -- version-tracking the rendered DAGs. +- [Contracts](/guide/advanced/contracts) -- type-checking the rendered DAG's edges. diff --git a/docs-next/content/docs/guide/advanced/versioning.mdx b/docs-next/content/docs/guide/advanced/versioning.mdx new file mode 100644 index 0000000..068911c --- /dev/null +++ b/docs-next/content/docs/guide/advanced/versioning.mdx @@ -0,0 +1,437 @@ +--- +title: Versioning +description: Track DAG mutations with an append-only log, time-travel to any historical version, diff changes, and fork branches. +--- + +# Versioning + +As pipelines evolve, you often need to answer questions like "What did this DAG look like last week?" or "What changed between version 12 and version 15?". dagron's `VersionedDAG` wraps a standard DAG with an **append-only mutation log** that records every structural change. You can time-travel to any historical version, diff any two versions, and fork independent branches from any point. + + B3 + end + subgraph "v5 (fork from v3)" + A5["A"] + B5["B"] + C5["C"] + A5 --> B5 + A5 --> C5 + end`} + caption="A VersionedDAG accumulates mutations. You can fork at any version to create an independent branch." +/> + +--- + +## Core Concepts + +| Class | Role | +|---|---| +| [`VersionedDAG`](/api/utilities/versioning#versioneddag) | Wraps a DAG with an append-only mutation log. Every mutation increments the version counter. | +| [`Mutation`](/api/utilities/versioning#mutation) | A single recorded change: version number, mutation type, arguments, and timestamp. | +| [`MutationType`](/api/utilities/versioning#mutationtype) | Enum of mutation types: `ADD_NODE`, `REMOVE_NODE`, `ADD_EDGE`, `REMOVE_EDGE`, `SET_PAYLOAD`, `SET_METADATA`. | + +--- + +## Creating a VersionedDAG + +### Starting Empty + +```python +from dagron.versioning import VersionedDAG + +vdag = VersionedDAG() +print(vdag.version) # 0 -- no mutations yet +``` + +### Wrapping an Existing DAG + +```python +import dagron + +dag = ( + dagron.DAG.builder() + .add_node("extract") + .add_node("transform") + .add_edge("extract", "transform") + .build() +) + +vdag = VersionedDAG(dag) +print(vdag.version) # 0 -- initial state, no tracked mutations yet +``` + +Note that mutations made to the DAG *before* wrapping are not tracked. The version log starts from the moment you create the `VersionedDAG`. + +--- + +## Making Mutations + +Every structural change is recorded and increments the version: + +```python +vdag = VersionedDAG() + +vdag.add_node("extract") # version 1 +vdag.add_node("transform") # version 2 +vdag.add_node("load") # version 3 +vdag.add_edge("extract", "transform") # version 4 +vdag.add_edge("transform", "load") # version 5 + +print(vdag.version) # 5 +``` + +All mutation methods mirror the standard DAG API: + +```python +vdag.add_node("name", payload=..., metadata=...) +vdag.remove_node("name") +vdag.add_edge("from", "to", weight=..., label=...) +vdag.remove_edge("from", "to") +vdag.set_payload("name", payload) +vdag.set_metadata("name", metadata) +``` + +Each call is recorded with its full arguments and a timestamp. + +--- + +## Accessing the Current DAG + +The `.dag` property returns the underlying DAG for read-only access: + +```python +dag = vdag.dag +print(dag.node_count()) # 3 +print(dag.edge_count()) # 2 + +for node in dag.topological_sort(): + print(node.name) +``` + +You can pass `vdag.dag` to any executor or analysis function that expects a `DAG`. + +--- + +## Time-Travel with at_version() + +Reconstruct the DAG as it was at any historical version: + +```python +vdag = VersionedDAG() +vdag.add_node("a") # v1 +vdag.add_node("b") # v2 +vdag.add_edge("a", "b") # v3 +vdag.add_node("c") # v4 +vdag.add_edge("b", "c") # v5 + +# Go back to version 2 (only nodes "a" and "b", no edges) +dag_v2 = vdag.at_version(2) +print(dag_v2.node_count()) # 2 +print(dag_v2.edge_count()) # 0 + +# Version 0 is the empty DAG +dag_v0 = vdag.at_version(0) +print(dag_v0.node_count()) # 0 +``` + +`at_version()` **replays** the mutation log up to the specified version, constructing a fresh DAG. The original `VersionedDAG` is not modified. + +### Version Bounds + +```python +try: + vdag.at_version(999) +except ValueError as e: + print(e) + # "Version 999 out of range [0, 5]." +``` + +--- + +## Diffing Versions + +Compare any two versions to see what changed: + +```python +diff = vdag.diff_versions(2, 5) + +print(f"Added nodes: {diff.added_nodes}") +print(f"Removed nodes: {diff.removed_nodes}") +print(f"Added edges: {diff.added_edges}") +print(f"Removed edges: {diff.removed_edges}") +``` + +This uses the Rust-side `DAG.diff()` method, which produces a `GraphDiff` object with sets of added/removed nodes and edges. + +|"added"| B_v5 + B_v5 -->|"added"| C_v5 + end + style C_v5 fill:#c8e6c9,stroke:#2e7d32`} + caption="Diffing v2 and v5 shows node 'c' and two edges were added." +/> + +--- + +## Mutation History + +### Full History + +```python +for mutation in vdag.history(): + print( + f" v{mutation.version}: {mutation.mutation_type.value} " + f"args={mutation.args} " + f"at={mutation.timestamp:.0f}" + ) +``` + +Output: + +``` + v1: add_node args={'name': 'a', 'payload': None, 'metadata': None} at=1709400000 + v2: add_node args={'name': 'b', 'payload': None, 'metadata': None} at=1709400001 + v3: add_edge args={'from_node': 'a', 'to_node': 'b', 'weight': None, 'label': None} at=1709400001 + v4: add_node args={'name': 'c', 'payload': None, 'metadata': None} at=1709400002 + v5: add_edge args={'from_node': 'b', 'to_node': 'c', 'weight': None, 'label': None} at=1709400002 +``` + +### History Since a Version + +Get only the mutations after a specific version: + +```python +recent = vdag.history_since(3) +for mutation in recent: + print(f" v{mutation.version}: {mutation.mutation_type.value}") +# v4: add_node +# v5: add_edge +``` + +This is useful for incremental synchronization -- fetch only the mutations that happened since the last sync. + +--- + +## Mutation Dataclass + +Each `Mutation` is a frozen dataclass: + +| Field | Type | Description | +|---|---|---| +| `version` | `int` | The version number this mutation created (1-based). | +| `mutation_type` | `MutationType` | One of `ADD_NODE`, `REMOVE_NODE`, `ADD_EDGE`, `REMOVE_EDGE`, `SET_PAYLOAD`, `SET_METADATA`. | +| `args` | `dict[str, Any]` | The arguments passed to the mutation method. | +| `timestamp` | `float` | Unix timestamp when the mutation was recorded. | + +--- + +## MutationType Enum + +```python +from dagron.versioning import MutationType + +MutationType.ADD_NODE # "add_node" +MutationType.REMOVE_NODE # "remove_node" +MutationType.ADD_EDGE # "add_edge" +MutationType.REMOVE_EDGE # "remove_edge" +MutationType.SET_PAYLOAD # "set_payload" +MutationType.SET_METADATA # "set_metadata" +``` + +--- + +## Forking + +Create an independent copy of the `VersionedDAG` at any version. The fork has its own mutation log and does not affect the original: + +```python +vdag = VersionedDAG() +vdag.add_node("a") # v1 +vdag.add_node("b") # v2 +vdag.add_edge("a", "b") # v3 +vdag.add_node("c") # v4 + +# Fork from version 3 (before "c" was added) +fork = vdag.fork(at_version=3) +print(fork.version) # 3 +print(fork.dag.node_count()) # 2 (a, b) + +# Mutate the fork independently +fork.add_node("d") +fork.add_edge("a", "d") +print(fork.version) # 5 + +# Original is unaffected +print(vdag.version) # 4 +print(vdag.dag.node_count()) # 3 (a, b, c) +``` + +### Fork at Current Version + +Call `fork()` without arguments to fork at the current version: + +```python +fork = vdag.fork() +print(fork.version) # same as vdag.version +``` + +--- + +## Use Cases + +### Pipeline Auditing + +Record every change to a production pipeline and audit the history later: + +```python +vdag = VersionedDAG() + +# Day 1: initial pipeline +vdag.add_node("ingest") +vdag.add_node("transform") +vdag.add_edge("ingest", "transform") + +# Day 2: add a new output +vdag.add_node("export_csv") +vdag.add_edge("transform", "export_csv") + +# Day 3: add monitoring +vdag.add_node("monitor") +vdag.add_edge("transform", "monitor") + +# Audit: what was the pipeline on day 1? +dag_day1 = vdag.at_version(2) +print(f"Day 1 nodes: {[n.name for n in dag_day1.topological_sort()]}") + +# What changed between day 1 and day 3? +diff = vdag.diff_versions(2, 5) +print(f"Added: {diff.added_nodes}") +``` + +### A/B Testing Pipeline Variants + +Fork a pipeline and try different approaches: + +```python +# Base pipeline +base = VersionedDAG() +base.add_node("data") +base.add_node("features") +base.add_edge("data", "features") + +# Variant A: XGBoost +variant_a = base.fork() +variant_a.add_node("xgboost") +variant_a.add_edge("features", "xgboost") + +# Variant B: Neural Network +variant_b = base.fork() +variant_b.add_node("neural_net") +variant_b.add_edge("features", "neural_net") + +# Execute both variants +executor_a = dagron.DAGExecutor(variant_a.dag) +executor_b = dagron.DAGExecutor(variant_b.dag) +``` + +### Rollback + +If a mutation causes problems, reconstruct the previous version and create a new `VersionedDAG` from it: + +```python +# Something went wrong after version 10 +good_dag = vdag.at_version(10) + +# Start fresh from the known-good state +new_vdag = VersionedDAG(good_dag) +# Continue making mutations on the recovered DAG +``` + +--- + +## Serialization Pattern + +While `VersionedDAG` does not have built-in serialization, the mutation log is easy to serialize: + +```python +import json + +# Serialize the mutation log +log_data = [] +for m in vdag.history(): + log_data.append({ + "version": m.version, + "type": m.mutation_type.value, + "args": m.args, + "timestamp": m.timestamp, + }) + +with open("pipeline_history.json", "w") as f: + json.dump(log_data, f, indent=2, default=str) + +# Deserialize and replay +from dagron.versioning import VersionedDAG, MutationType + +vdag_restored = VersionedDAG() +with open("pipeline_history.json") as f: + log_data = json.load(f) + +for entry in log_data: + mt = MutationType(entry["type"]) + args = entry["args"] + if mt == MutationType.ADD_NODE: + vdag_restored.add_node(args["name"], payload=args.get("payload"), metadata=args.get("metadata")) + elif mt == MutationType.ADD_EDGE: + vdag_restored.add_edge(args["from_node"], args["to_node"], weight=args.get("weight"), label=args.get("label")) + elif mt == MutationType.REMOVE_NODE: + vdag_restored.remove_node(args["name"]) + elif mt == MutationType.REMOVE_EDGE: + vdag_restored.remove_edge(args["from_node"], args["to_node"]) + elif mt == MutationType.SET_PAYLOAD: + vdag_restored.set_payload(args["name"], args.get("payload")) + elif mt == MutationType.SET_METADATA: + vdag_restored.set_metadata(args["name"], args.get("metadata")) +``` + +--- + +## Best Practices + +1. **Use `VersionedDAG` during development.** Wrap your DAG early so you have a full audit trail from the start. + +2. **Serialize the mutation log to version control.** Store `pipeline_history.json` alongside your code to track pipeline structure changes in git. + +3. **Use `diff_versions()` in code review.** Compare the pipeline before and after a change to verify that only the intended modifications were made. + +4. **Fork for experiments.** Instead of modifying the main pipeline, fork it, try your changes, and merge back only if they work. + +5. **Use `history_since()` for incremental sync.** If you are syncing pipeline state across services, send only the mutations since the last known version. + +--- + +## Related + +- [API Reference: Versioning](/api/utilities/versioning) -- full API documentation. +- [Building DAGs](/guide/core-concepts/building-dags) -- the underlying DAG API that `VersionedDAG` wraps. +- [Templates](/guide/advanced/templates) -- parameterized DAG generation. +- [Inspecting Graphs](/guide/core-concepts/inspecting-graphs) -- analysis and querying of the versioned graph. diff --git a/docs-next/content/docs/guide/architecture.mdx b/docs-next/content/docs/guide/architecture.mdx new file mode 100644 index 0000000..c3983b1 --- /dev/null +++ b/docs-next/content/docs/guide/architecture.mdx @@ -0,0 +1,158 @@ +--- +title: Architecture +description: How dagron is built — Rust core, PyO3 bindings, crate structure, caching, and the optional web dashboard. +--- + +# Architecture + +dagron is a layered system: a Rust core providing the graph data structure and algorithms, a PyO3 binding layer that exposes everything to Python, and an optional web dashboard for real-time execution monitoring. + +```mermaid +graph TB + PY["Python API
(dagron package)"] --> PYO3["PyO3 Bindings
(dagron-py crate)"] + PYO3 --> CORE["Rust Core
(dagron-core crate)"] + CORE --> PG["petgraph::StableGraph"] + PYO3 -.-> UI["Dashboard
(dagron-ui crate, optional)"] + UI --> AXUM["Axum + Tokio"] +``` + +--- + +## Crate Structure + +| Crate | Path | Purpose | +|-------|------|---------| +| `dagron-core` | `crates/dagron-core/` | Graph data structure, algorithms, serialization, scheduling — pure Rust, no Python dependency | +| `dagron-py` | `crates/dagron-py/` | PyO3 bindings wrapping `dagron-core` as a Python extension module | +| `dagron-ui` | `crates/dagron-ui/` | Optional Axum-based web dashboard for live execution visualization | + +The Python package (`py_src/dagron/`) adds higher-level execution strategies (incremental, checkpoint, caching, distributed), the builder pattern, analysis utilities, and plugin system — all in pure Python, calling into the Rust core for graph operations. + +--- + +## Rust Core Internals + +### DAG\ + +The central type is `DAG

` in `dagron-core`: + +```rust +pub struct DAG

{ + graph: StableGraph, EdgeData, Directed, u32>, + name_to_index: AHashMap, + generation: u64, + cache: RwLock, +} +``` + +- **`graph`** — petgraph's `StableGraph` with arena-allocated node/edge storage. `StableGraph` preserves indices across removals, which is critical for caching correctness. +- **`name_to_index`** — `AHashMap` for O(1) string-to-index lookups. ahash is a fast, non-cryptographic hash map that outperforms the standard `HashMap`. +- **`generation`** — monotonically increasing counter, bumped on every structural mutation (add/remove node/edge). +- **`cache`** — `RwLock` storing cached results for expensive computations. + +### Generational Cache + +The cache avoids recomputing expensive results (topological sorts, roots, leaves) when the graph hasn't changed: + +```rust +struct DagCache { + gen: u64, // generation when cache was populated + hits: u64, + misses: u64, + roots: Option>, + leaves: Option>, + topo_sort: Option>, + topo_sort_dfs: Option>, + topo_levels: Option>>, +} +``` + +**How it works:** +1. Every mutation to the DAG increments `generation` +2. When a cached result is requested, the cache compares its stored `gen` against the DAG's current `generation` +3. On mismatch, all cached entries are invalidated (set to `None`) +4. On match, the cached result is returned directly + +This gives O(1) amortized cost for repeated queries on an unchanged graph. + +### Algorithm Modules + +| Module | Algorithms | +|--------|-----------| +| `toposort` | Kahn's algorithm, DFS-based sort, topological levels, all orderings | +| `reachability` | Bitset-based reachability index — O(V*E/64) build, O(1) `can_reach` queries | +| `scheduling` | Critical path, bottom-level computation, max-parallelism and resource-constrained plans | +| `partition` | Level-based, size-balanced, and communication-minimizing graph partitioning | +| `paths` | All paths (DFS), shortest path (BFS), longest path | +| `cycle` | Tarjan's SCC for cycle detection, `would_create_cycle` for edge insertion checks | +| `dominators` | Immediate dominators via Cooper-Harvey-Kennedy algorithm | +| `transforms` | Transitive reduction, transitive closure | +| `incremental` | Dirty-set propagation (BFS from changed nodes), change provenance tracking | +| `traversal` | Ancestors, descendants | +| `diff` | Structural graph diffing | + +--- + +## PyO3 Boundary + +`dagron-py` wraps `DAG` as `PyDAG`, where `PyNodePayload` holds an `Option>` for arbitrary Python objects as node payloads. + +### GIL Release Points + +Every CPU-intensive Rust operation releases the Python GIL via `py.allow_threads()`: + +- **Topological sort** — all variants (Kahn, DFS, levels) +- **Ancestors / descendants** — graph traversals +- **Reachability** — index building and queries +- **Scheduling** — execution planning, critical path +- **Validation** — cycle detection +- **Serialization** — JSON, bincode, DOT export +- **Transforms** — transitive reduction/closure, dominator tree +- **Partitioning** — all three strategies +- **Pattern matching** — regex/glob node filtering +- **Incremental** — dirty set computation, change provenance +- **Stats** — graph statistics computation + +This means Python threads are not blocked while Rust computes. In multi-threaded executors, multiple graph operations can genuinely run in parallel. + +### Exception Mapping + +Rust errors are mapped to a Python exception hierarchy: + +``` +DagronError (base) + +-- CycleError + +-- NodeNotFoundError + +-- DuplicateNodeError + +-- EdgeNotFoundError + +-- GraphError +``` + +--- + +## Dashboard (Optional) + +The `dagron-ui` crate provides a live web dashboard for monitoring DAG execution. It is feature-gated and only built when `--features dashboard` is passed to maturin. + +### Architecture + +```mermaid +graph LR + EX["Python Executor"] -->|"hooks"| STATE["DashboardState
(Arc<RwLock>)"] + STATE -->|"SSE broadcast"| SERVER["Axum Server
(background thread)"] + SERVER -->|"HTML + SSE"| BROWSER["Browser"] +``` + +1. **Startup:** `DashboardHandle::start(host, port)` spawns a background OS thread running a Tokio runtime with an Axum server +2. **State:** `DashboardState` is shared via `Arc>` between the executor and the server thread +3. **Hooks:** The executor calls `node_started()`, `node_finished()`, `execution_finished()` to update state +4. **SSE:** State changes are broadcast to all connected browsers via Server-Sent Events +5. **Endpoints:** + - `GET /` — single-file HTML/CSS/JS dashboard (embedded at compile time) + - `GET /api/state` — JSON snapshot of current execution state + - `GET /api/events` — SSE stream for real-time updates + - `GET /api/profile` — execution profile statistics + - `POST /api/gates/{name}/approve` — approve an approval gate + - `POST /api/gates/{name}/reject` — reject an approval gate + +The dashboard requires no external build tools — the entire UI is a single HTML file embedded in the Rust binary. diff --git a/docs-next/content/docs/guide/benchmarks.mdx b/docs-next/content/docs/guide/benchmarks.mdx new file mode 100644 index 0000000..23aae5f --- /dev/null +++ b/docs-next/content/docs/guide/benchmarks.mdx @@ -0,0 +1,166 @@ +--- +title: Benchmarks +description: Performance benchmarks comparing dagron's Rust core against NetworkX on common DAG operations. +--- + +# Benchmarks + +dagron's core graph engine is written in Rust (petgraph + PyO3), giving it a significant performance advantage over pure-Python graph libraries. This page presents representative benchmarks comparing dagron against [NetworkX](https://networkx.org/) on equivalent operations. + + +```bash +uv pip install pytest-benchmark networkx +uv run python -m pytest tests/python/test_benchmarks.py --benchmark-only --benchmark-columns=mean,stddev,rounds -q +``` + +All Python benchmarks below were measured with `pytest-benchmark` on a single machine. Rust-only benchmarks use [Criterion](https://bheisler.github.io/criterion.rs/). Numbers will vary by hardware — treat ratios as the meaningful signal. + +**Hardware:** AMD Ryzen / Intel Core (modern x86_64), Linux, Python 3.12, dagron 0.1.0. +**Last measured:** March 2026. + +--- + +## dagron vs NetworkX (Python API) + +### Construction (10K nodes) + +| Benchmark | dagron | NetworkX | Speedup | +|-----------|--------|----------|---------| +| Chain (10K nodes, 9,999 edges) | 7.66 ms | 21.04 ms | **2.7x** | +| Wide (1,000 roots x 10 depth) | 7.05 ms | 22.45 ms | **3.2x** | + +### Topological Sort (10K-node chain) + +| Benchmark | dagron | NetworkX | Speedup | +|-----------|--------|----------|---------| +| Topological sort | 880 us | 6,668 us | **7.6x** | + +### Ancestors / Descendants (mid-node on 10K chain) + +| Benchmark | dagron | NetworkX | Speedup | +|-----------|--------|----------|---------| +| Ancestors of node 5000 | 562 us | 1,955 us | **3.5x** | +| Descendants of node 5000 | 548 us | 2,053 us | **3.7x** | + +### Cycle Detection / Validation (10K chain) + +| Benchmark | dagron | NetworkX | Speedup | +|-----------|--------|----------|---------| +| Validate (acyclic check) | 535 us | 6,718 us | **12.6x** | + +### JSON Serialization (1K-node chain) + +| Benchmark | dagron | NetworkX | Speedup | +|-----------|--------|----------|---------| +| Serialize to JSON | 258 us | 823 us | **3.2x** | + +### Reachability (5K-node chain) + +| Benchmark | dagron | NetworkX | +|-----------|--------|----------| +| Build reachability index | 1,818 us | N/A (no equivalent) | +| Batch query (10 pairs) | 3.2 us | 12,637 us (nx.has_path) | + +Once the reachability index is built, dagron answers batch reachability queries **~3,900x faster** than NetworkX's `has_path` (which re-traverses the graph each call). + +### BFS / Topological Levels (10K chain) + +| Benchmark | dagron | NetworkX | Speedup | +|-----------|--------|----------|---------| +| Topological levels | 2,045 us | 9,312 us | **4.6x** | + +--- + +## Why the performance gap? + +Three factors drive dagron's advantage: + +1. **Rust core releases the GIL.** Every expensive operation in dagron runs inside `py.allow_threads()`, so the Rust code executes without Python interpreter overhead. +2. **petgraph's adjacency list is cache-friendly.** Nodes and edges are stored in contiguous arena-allocated vectors, giving excellent CPU cache behavior during traversals. +3. **ahash beats Python dict overhead.** Node name lookups use `AHashMap` — a fast, non-cryptographic hash map — instead of Python's general-purpose `dict`. + +--- + +## Rust-Only Numbers (Criterion) + +These benchmarks run entirely in Rust, showing the pure performance ceiling before any PyO3 overhead. + +### Construction + +| Benchmark | Time | +|-----------|------| +| Chain 1K | 252 us | +| Chain 10K | 2.73 ms | +| Chain 100K | 48.6 ms | +| Wide 1,000x10 | 2.96 ms | +| Diamond 10x10 | 111 us | + +### Topological Sort (10K nodes) + +| Benchmark | Time | +|-----------|------| +| Kahn (chain) | 332 us | +| Kahn (wide) | 332 us | +| DFS (chain) | 336 us | +| DFS (wide) | 337 us | +| Levels (chain) | 617 us | +| Levels (wide) | 331 us | + +### Cycle Detection (10K chain) + +| Benchmark | Time | +|-----------|------| +| validate (acyclic) | 466 us | +| would_create_cycle | 501 us | + +### Reachability (10K chain) + +| Benchmark | Time | +|-----------|------| +| Build index | 3.26 ms | +| can_reach (single query) | 6.8 ns | +| reachable_from | 11.5 us | +| ancestors_of | 27.4 us | + +### Serialization + +| Benchmark | Time | +|-----------|------| +| to_json (1K) | 241 us | +| from_json (1K) | 481 us | +| to_bincode (1K) | 57.9 us | +| from_bincode (1K) | 305 us | +| to_bincode (10K) | 617 us | +| from_bincode (10K) | 3.30 ms | +| to_bincode (100K) | 6.60 ms | +| to_dot (1K) | 48.0 us | + +### Scheduling (1K chain) + +| Benchmark | Time | +|-----------|------| +| Max parallelism plan | 281 us | +| Resource-constrained (4 workers) | 450 us | +| Critical path | 133 us | + +### Transforms (1K chain) + +| Benchmark | Time | +|-----------|------| +| Transitive reduction | 268 us | +| Snapshot (deep clone) | 70 us | + +### Introspection (10K chain) + +| Benchmark | Time | +|-----------|------| +| Ancestors (mid-node) | 273 us | +| Descendants (mid-node) | 274 us | +| Roots | 37 ns | +| Leaves | 39 ns | + + +```bash +cargo bench --bench graph_bench +``` + \ No newline at end of file diff --git a/docs-next/content/docs/guide/cookbook.mdx b/docs-next/content/docs/guide/cookbook.mdx new file mode 100644 index 0000000..23cbb48 --- /dev/null +++ b/docs-next/content/docs/guide/cookbook.mdx @@ -0,0 +1,340 @@ +--- +title: Cookbook +description: Complete real-world examples — build systems, spreadsheet engines, ETL pipelines, and CI/CD schedulers. +--- + +# Cookbook + +Four complete examples showing how to use dagron in real-world scenarios. Each includes full code and a DAG diagram. + +--- + +## 1. Build System Dependency Resolver + +Model file targets as nodes, detect stale targets via incremental execution, and skip unchanged targets automatically. + +```mermaid +graph LR + A[parse_config] --> B[compile_lib] + A --> C[compile_utils] + B --> D[link_binary] + C --> D + D --> E[run_tests] +``` + +```python +import dagron +from dagron.execution import IncrementalExecutor + +# Define build targets as a DAG +dag = ( + dagron.DAG.builder() + .add_node("parse_config", metadata={"output": "config.json"}) + .add_node("compile_lib", metadata={"output": "lib.o"}) + .add_node("compile_utils", metadata={"output": "utils.o"}) + .add_node("link_binary", metadata={"output": "app"}) + .add_node("run_tests", metadata={"output": "test_report.xml"}) + .add_edge("parse_config", "compile_lib") + .add_edge("parse_config", "compile_utils") + .add_edge("compile_lib", "link_binary") + .add_edge("compile_utils", "link_binary") + .add_edge("link_binary", "run_tests") + .build() +) + +# Simulated build functions +def parse_config(): + print(" Parsing config...") + return {"version": "1.0", "flags": ["-O2"]} + +def compile_lib(): + print(" Compiling lib...") + return "lib.o" + +def compile_utils(): + print(" Compiling utils...") + return "utils.o" + +def link_binary(): + print(" Linking binary...") + return "app" + +def run_tests(): + print(" Running tests...") + return "PASSED" + +tasks = { + "parse_config": parse_config, + "compile_lib": compile_lib, + "compile_utils": compile_utils, + "link_binary": link_binary, + "run_tests": run_tests, +} + +# First build: runs everything +executor = IncrementalExecutor(dag) +print("=== First build ===") +result = executor.execute(tasks) + +# Second build: only re-runs if inputs changed +print("\n=== Incremental rebuild (nothing changed) ===") +result = executor.execute(tasks) + +# Mark a node as changed and rebuild +print("\n=== Incremental rebuild (compile_lib changed) ===") +executor.mark_changed("compile_lib") +result = executor.execute(tasks) +``` + + +**Incremental execution** — only changed nodes and their downstream dependents re-execute. The second build is a no-op; the third only re-runs `compile_lib`, `link_binary`, and `run_tests`. + +--- + +## 2. Spreadsheet Formula Engine + +Cells are nodes. Formula dependencies are edges. When a cell changes, only dependent cells recalculate. + +```mermaid +graph TD + A1[A1: Price = 100] --> C1[C1: Total = A1 * B1] + B1[B1: Qty = 5] --> C1 + C1 --> D1[D1: Tax = C1 * 0.08] + C1 --> E1[E1: Discount = C1 * 0.1] + D1 --> F1[F1: Final = C1 + D1 - E1] + E1 --> F1 + C1 --> F1 +``` + +```python +import dagron +from dagron.execution import IncrementalExecutor + +# Build the cell dependency graph +dag = ( + dagron.DAG.builder() + .add_node("A1") # Price + .add_node("B1") # Quantity + .add_node("C1") # Total = A1 * B1 + .add_node("D1") # Tax = C1 * 0.08 + .add_node("E1") # Discount = C1 * 0.1 + .add_node("F1") # Final = C1 + D1 - E1 + .add_edge("A1", "C1") + .add_edge("B1", "C1") + .add_edge("C1", "D1") + .add_edge("C1", "E1") + .add_edge("C1", "F1") + .add_edge("D1", "F1") + .add_edge("E1", "F1") + .build() +) + +# Cell values (mutable state) +cells = {"A1": 100, "B1": 5} + +def eval_cell(name): + """Evaluate a single cell.""" + if name == "A1": + return cells["A1"] + elif name == "B1": + return cells["B1"] + elif name == "C1": + cells["C1"] = cells["A1"] * cells["B1"] + return cells["C1"] + elif name == "D1": + cells["D1"] = cells["C1"] * 0.08 + return cells["D1"] + elif name == "E1": + cells["E1"] = cells["C1"] * 0.1 + return cells["E1"] + elif name == "F1": + cells["F1"] = cells["C1"] + cells["D1"] - cells["E1"] + return cells["F1"] + +tasks = {name: (lambda n=name: eval_cell(n)) for name in dag.nodes()} + +# Initial calculation +executor = IncrementalExecutor(dag) +result = executor.execute(tasks) +print(f"Initial: Price={cells['A1']}, Qty={cells['B1']}, Final={cells['F1']}") + +# User edits A1 (price) — only C1, D1, E1, F1 recalculate +cells["A1"] = 150 +executor.mark_changed("A1") +result = executor.execute(tasks) +print(f"After edit: Price={cells['A1']}, Qty={cells['B1']}, Final={cells['F1']}") +``` + + +**Change propagation** — editing cell A1 triggers recalculation of only the cells that depend on it (C1, D1, E1, F1). B1 is untouched. + +--- + +## 3. ETL Pipeline with Checkpointing + +A multi-stage pipeline that writes checkpoints to disk, simulates a crash, and resumes from the last checkpoint. + +```mermaid +graph LR + E[extract] --> V[validate] + V --> T[transform] + T --> A[aggregate] + A --> L[load] +``` + +```python +import dagron +from dagron.execution import CheckpointExecutor + +dag = ( + dagron.DAG.builder() + .add_node("extract") + .add_node("validate") + .add_node("transform") + .add_node("aggregate") + .add_node("load") + .add_edge("extract", "validate") + .add_edge("validate", "transform") + .add_edge("transform", "aggregate") + .add_edge("aggregate", "load") + .build() +) + +call_count = {"transform": 0} + +def extract(): + print(" Extracting 10,000 rows from source...") + return list(range(10_000)) + +def validate(): + print(" Validating schema...") + return True + +def transform(): + call_count["transform"] += 1 + if call_count["transform"] == 1: + print(" Transforming... CRASH!") + raise RuntimeError("Simulated crash during transform") + print(" Transforming data...") + return "transformed" + +def aggregate(): + print(" Aggregating results...") + return {"total": 10_000, "valid": 9_950} + +def load(): + print(" Loading into warehouse...") + return "success" + +tasks = { + "extract": extract, + "validate": validate, + "transform": transform, + "aggregate": aggregate, + "load": load, +} + +executor = CheckpointExecutor(dag, checkpoint_dir="/tmp/dagron_checkpoints") + +# First run: crashes during transform +print("=== Run 1 (will crash) ===") +try: + result = executor.execute(tasks) +except Exception as e: + print(f" Pipeline failed: {e}") + +# Second run: resumes from checkpoint, skips extract + validate +print("\n=== Run 2 (resume from checkpoint) ===") +result = executor.execute(tasks) +print(f" Pipeline completed: {result}") +``` + + +**Checkpointing** — the first run completes `extract` and `validate` before crashing. The second run skips those stages and resumes from `transform`. No wasted work. + +--- + +## 4. CI/CD Task Scheduler + +Lint, test, build, and deploy with resource constraints and an approval gate before production deployment. + +```mermaid +graph LR + L[lint] --> B[build] + T[test_unit] --> B + T2[test_integration] --> B + B --> S[staging_deploy] + S --> G{approval_gate} + G --> P[prod_deploy] +``` + +```python +import dagron +from dagron.execution import ResourceAwareExecutor + +dag = ( + dagron.DAG.builder() + .add_node("lint", metadata={"cpu": 1}) + .add_node("test_unit", metadata={"cpu": 2}) + .add_node("test_integration", metadata={"cpu": 2}) + .add_node("build", metadata={"cpu": 2}) + .add_node("staging_deploy", metadata={"cpu": 1}) + .add_node("approval_gate", metadata={"gate": True}) + .add_node("prod_deploy", metadata={"cpu": 1}) + .add_edge("lint", "build") + .add_edge("test_unit", "build") + .add_edge("test_integration", "build") + .add_edge("build", "staging_deploy") + .add_edge("staging_deploy", "approval_gate") + .add_edge("approval_gate", "prod_deploy") + .build() +) + +def lint(): + print(" [1 CPU] Linting...") + return "ok" + +def test_unit(): + print(" [2 CPU] Running unit tests...") + return "148 passed" + +def test_integration(): + print(" [2 CPU] Running integration tests...") + return "32 passed" + +def build(): + print(" [2 CPU] Building Docker image...") + return "sha256:abc123" + +def staging_deploy(): + print(" [1 CPU] Deploying to staging...") + return "https://staging.example.com" + +def approval_gate(): + print(" Approval gate: auto-approved for demo") + return True + +def prod_deploy(): + print(" [1 CPU] Deploying to production...") + return "https://example.com" + +tasks = { + "lint": lint, + "test_unit": test_unit, + "test_integration": test_integration, + "build": build, + "staging_deploy": staging_deploy, + "approval_gate": approval_gate, + "prod_deploy": prod_deploy, +} + +# Execute with a 4-CPU constraint — lint + test_unit run in parallel, +# test_integration waits for a free slot +executor = ResourceAwareExecutor(dag, max_workers=4) +result = executor.execute(tasks) +print(f"\nPipeline result: {result}") +``` + + +**Resource-aware scheduling** — with 4 CPU slots, `lint` (1 CPU) and `test_unit` (2 CPU) run in parallel (3 slots used). `test_integration` (2 CPU) waits until a slot frees up. The approval gate pauses execution until approved. + \ No newline at end of file diff --git a/docs-next/content/docs/guide/core-concepts/building-dags.mdx b/docs-next/content/docs/guide/core-concepts/building-dags.mdx new file mode 100644 index 0000000..5104b37 --- /dev/null +++ b/docs-next/content/docs/guide/core-concepts/building-dags.mdx @@ -0,0 +1,516 @@ +--- +title: Building DAGs +description: A comprehensive guide to constructing directed acyclic graphs in dagron — fluent builder, direct mutation, bulk operations, payloads, and metadata. +--- + +# Building DAGs + +dagron offers multiple ways to construct a directed acyclic graph. This guide covers +every construction pattern, from quick one-liners to advanced builder configurations +with payloads and metadata. + +## Construction patterns at a glance + +| Pattern | Best for | Example | +|---------|----------|---------| +| `DAG.builder()` | Most use cases — fluent, validated | `DAG.builder().add_node("a").build()` | +| `DAG()` + mutations | Incremental / dynamic construction | `dag.add_node("a")` | +| Bulk helpers | Large graphs from lists | `builder.add_nodes([...]).add_edges([...])` | +| `Pipeline` | Linear function chains | `Pipeline(tasks=[fn1, fn2])` | + +## The fluent builder + +[`DAG.builder()`](/api/core/builder) returns a `DAGBuilder` that chains method +calls and validates the graph when you call `.build()`. + +```python +import dagron + +dag = ( + dagron.DAG.builder() + .add_node("fetch") + .add_node("parse") + .add_node("validate") + .add_node("store") + .add_edge("fetch", "parse") + .add_edge("parse", "validate") + .add_edge("validate", "store") + .build() +) +``` + + parse --> validate --> store`} + caption="Linear four-node pipeline created with the builder." +/> + +### Cycle detection + +The builder rejects cycles at `.build()` time: + +```python +try: + dagron.DAG.builder() \ + .add_node("a").add_node("b").add_node("c") \ + .add_edge("a", "b") \ + .add_edge("b", "c") \ + .add_edge("c", "a") \ + .build() +except dagron.CycleError as e: + print(e) # Cycle detected: c -> a +``` + +This guarantee means that any `DAG` instance you hold is always valid. + +### Implicit node creation + +When you add an edge, both endpoints are created automatically if they do not +already exist: + +```python +dag = ( + dagron.DAG.builder() + .add_edge("a", "b") + .add_edge("b", "c") + .build() +) +print(dag.node_count()) # 3 +``` + +This shorthand is convenient for small graphs where you do not need to attach +metadata to every node. + +## Direct construction + +If you prefer an imperative style, create a bare `DAG` and mutate it: + +```python +dag = dagron.DAG() + +dag.add_node("ingest") +dag.add_node("clean") +dag.add_node("enrich") +dag.add_node("publish") + +dag.add_edge("ingest", "clean") +dag.add_edge("clean", "enrich") +dag.add_edge("enrich", "publish") +``` + +Direct mutation is useful when the graph structure is determined at runtime — for +example, when reading a config file or discovering tasks from a plugin registry. + +### Checking membership + +```python +print(dag.has_node("clean")) # True +print(dag.has_edge("clean", "enrich")) # True +``` + +### Removing nodes and edges + +```python +dag.remove_edge("enrich", "publish") +dag.remove_node("publish") + +print(dag.node_count()) # 3 +print(dag.edge_count()) # 2 +``` + +Removing a node also removes all edges connected to it. + +## Bulk operations + +When building large graphs, individual `add_node` / `add_edge` calls become +verbose. Use the bulk helpers instead: + +```python +dag = ( + dagron.DAG.builder() + .add_nodes(["extract", "transform_a", "transform_b", "merge", "load"]) + .add_edges([ + ("extract", "transform_a"), + ("extract", "transform_b"), + ("transform_a", "merge"), + ("transform_b", "merge"), + ("merge", "load"), + ]) + .build() +) +``` + + transform_a --> merge + extract --> transform_b --> merge + merge --> load`} + caption="Fan-out / fan-in graph created with bulk helpers." +/> + +### Building from data + +A common pattern is constructing the graph from a list of records (e.g., rows +from a database or lines in a YAML file): + +```python +records = [ + {"name": "extract", "depends_on": []}, + {"name": "transform", "depends_on": ["extract"]}, + {"name": "load", "depends_on": ["transform"]}, +] + +builder = dagron.DAG.builder() +for rec in records: + builder.add_node(rec["name"]) +for rec in records: + for dep in rec["depends_on"]: + builder.add_edge(dep, rec["name"]) +dag = builder.build() +``` + +This makes it straightforward to drive graph construction from external +configuration. + +## Node payloads + +Every node can carry an arbitrary Python object called a **payload**. Payloads +are useful for attaching configuration, metadata, or cost hints without +polluting the task functions. + +```python +dag = dagron.DAG() +dag.add_node("train", payload={"epochs": 10, "lr": 0.001}) +dag.add_node("evaluate", payload={"metrics": ["accuracy", "f1"]}) +dag.add_edge("train", "evaluate") +``` + +Retrieve payloads later: + +```python +# Access via the nodes iterator +for name in dag.nodes(): + print(name, dag.get_payload(name)) +``` + +### Payloads with the builder + +```python +dag = ( + dagron.DAG.builder() + .add_node("fetch", payload={"url": "https://api.example.com/data"}) + .add_node("parse", payload={"format": "json"}) + .add_edge("fetch", "parse") + .build() +) +``` + +## Contracts + +The builder supports **contracts** that declare the expected output type of a +node. Contracts are checked at build time and serve as living documentation of +your pipeline's data flow. + +```python +dag = ( + dagron.DAG.builder() + .add_node("extract") + .add_node("transform") + .add_node("load") + .add_edge("extract", "transform") + .add_edge("transform", "load") + .contract("extract", output=list) + .contract("transform", output=dict) + .contract("load", output=str) + .build() +) +``` + +See [Contracts](/api/analysis/contracts) in the API reference for details on +runtime enforcement and custom validators. + +## Understanding the graph structure + +Once a DAG is built, you have a rich set of read-only accessors: + +### Nodes and edges + +```python +print(list(dag.nodes())) # ['extract', 'transform', 'load'] +print(dag.node_count()) # 3 +print(dag.edge_count()) # 2 +``` + +### Roots and leaves + +**Roots** have no incoming edges (in-degree 0). **Leaves** have no outgoing +edges (out-degree 0). + +```python +print(dag.roots()) # ['extract'] +print(dag.leaves()) # ['load'] +``` + + transform --> load:::leaf`} + caption="Roots (green) and leaves (yellow) of a linear pipeline." +/> + +### Degree + +```python +print(dag.in_degree("transform")) # 1 +print(dag.out_degree("transform")) # 1 +print(dag.in_degree("extract")) # 0 (root) +print(dag.out_degree("load")) # 0 (leaf) +``` + +### Neighbourhood queries + +```python +# Direct parents +print(dag.predecessors("transform")) # ['extract'] + +# Direct children +print(dag.successors("transform")) # ['load'] + +# Transitive ancestors (all upstream nodes) +print(dag.ancestors("load")) # ['extract', 'transform'] + +# Transitive descendants (all downstream nodes) +print(dag.descendants("extract")) # ['transform', 'load'] +``` + +### Topological ordering + +A topological sort produces an ordering where every node appears after all of +its dependencies: + +```python +print(dag.topological_sort()) +# ['extract', 'transform', 'load'] +``` + +For parallel execution planning, **topological levels** group nodes that can run +concurrently: + +```python +for level, nodes in enumerate(dag.topological_levels()): + print(f"Level {level}: {nodes}") +# Level 0: ['extract'] +# Level 1: ['transform'] +# Level 2: ['load'] +``` + +See [Inspecting Graphs](/guide/core-concepts/inspecting-graphs) for the full analysis +toolkit. + +## Diamond dependencies + +A common pattern in data pipelines is the **diamond** shape — one root fans out +to multiple branches that converge at a single join node: + +```python +dag = ( + dagron.DAG.builder() + .add_nodes(["source", "branch_a", "branch_b", "branch_c", "join"]) + .add_edges([ + ("source", "branch_a"), + ("source", "branch_b"), + ("source", "branch_c"), + ("branch_a", "join"), + ("branch_b", "join"), + ("branch_c", "join"), + ]) + .build() +) +``` + + branch_a --> join + source --> branch_b --> join + source --> branch_c --> join`} + caption="Diamond DAG. The three branches execute in parallel; join waits for all of them." +/> + +The executor automatically parallelises the three branches and synchronises +at the join node. + +## Multi-layer pipelines + +For complex ML workflows you might have many layers: + +```python +dag = ( + dagron.DAG.builder() + # Data layer + .add_node("raw_data") + .add_node("clean_data") + .add_edge("raw_data", "clean_data") + # Feature layer + .add_node("feature_numeric") + .add_node("feature_text") + .add_node("feature_image") + .add_edge("clean_data", "feature_numeric") + .add_edge("clean_data", "feature_text") + .add_edge("clean_data", "feature_image") + # Model layer + .add_node("train_model") + .add_edge("feature_numeric", "train_model") + .add_edge("feature_text", "train_model") + .add_edge("feature_image", "train_model") + # Evaluation layer + .add_node("evaluate") + .add_node("deploy") + .add_edge("train_model", "evaluate") + .add_edge("evaluate", "deploy") + .build() +) + +print(dag.node_count()) # 8 +print(dag.edge_count()) # 8 + +for level, nodes in enumerate(dag.topological_levels()): + print(f"Level {level}: {nodes}") +# Level 0: ['raw_data'] +# Level 1: ['clean_data'] +# Level 2: ['feature_numeric', 'feature_text', 'feature_image'] +# Level 3: ['train_model'] +# Level 4: ['evaluate'] +# Level 5: ['deploy'] +``` + + clean_data + clean_data --> feature_numeric + clean_data --> feature_text + clean_data --> feature_image + feature_numeric --> train_model + feature_text --> train_model + feature_image --> train_model + train_model --> evaluate + evaluate --> deploy`} + caption="Multi-layer ML pipeline with parallel feature extraction." +/> + +## Pattern matching on node names + +dagron supports finding nodes by name patterns, which is handy when you build +graphs programmatically with naming conventions: + +```python +# Glob-style matching +feature_nodes = dag.nodes_matching_glob("feature_*") +print(feature_nodes) # ['feature_numeric', 'feature_text', 'feature_image'] + +# Regex matching +data_nodes = dag.nodes_matching_regex(r".*_data$") +print(data_nodes) # ['raw_data', 'clean_data'] +``` + +## Graph statistics + +The [`stats()`](/api/core/core) method returns a summary of the graph: + +```python +s = dag.stats() +print(s) +# DAGStats(nodes=8, edges=8, roots=1, leaves=1, depth=5, width=3, density=0.143) +``` + +This is useful for logging and monitoring in production. + +## Composing multiple DAGs + +Large systems often consist of several independent DAGs that need to be wired +together. The [`compose()`](/api/core/core) function merges DAGs with namespaced +node names and cross-DAG connections: + +```python +etl_dag = ( + dagron.DAG.builder() + .add_edge("extract", "transform") + .add_edge("transform", "load") + .build() +) + +ml_dag = ( + dagron.DAG.builder() + .add_edge("train", "evaluate") + .build() +) + +combined = dagron.compose( + dags={"etl": etl_dag, "ml": ml_dag}, + connections=[("etl/load", "ml/train")], +) + +print(list(combined.nodes())) +# ['etl/extract', 'etl/transform', 'etl/load', 'ml/train', 'ml/evaluate'] +``` + + etl/transform --> etl/load --> ml/train --> ml/evaluate`} + caption="Two DAGs composed into a single pipeline with a cross-DAG edge." +/> + +Composition is covered fully in [Graph Transforms](/guide/core-concepts/transforms). + +## Validation and linting + +After building a DAG, you can run the built-in linter to catch common issues: + +```python +from dagron import lint + +warnings = lint(dag) +for w in warnings: + print(w) +# e.g., "Node 'deploy' has in-degree 1 and out-degree 0 — consider if it should be a leaf." +``` + +And validate structural integrity at any time: + +```python +dag.validate() # Raises if the graph is malformed +``` + +See [Inspecting Graphs](/guide/core-concepts/inspecting-graphs) for the full analysis +and linting toolkit. + +## Best practices + +1. **Use the builder for static graphs.** It gives you cycle detection and a + clean, readable construction block. + +2. **Use direct mutation for dynamic graphs.** When the structure depends on + runtime decisions, building imperatively is simpler. + +3. **Attach payloads for configuration.** Keep task functions pure; put + parameters in payloads. + +4. **Name nodes with conventions.** Use prefixes like `extract_`, `transform_`, + `load_` so you can use glob/regex matching later. + +5. **Start small and compose.** Build self-contained sub-DAGs and wire them + together with `compose()`. + +## API reference + +For the full list of construction methods, see: + +- [`DAG`](/api/core/core) — the core graph class and all its methods. +- [`DAGBuilder`](/api/core/builder) — fluent builder API. +- [`compose()`](/api/core/core) — DAG composition. + +## Next steps + +- [Executing Tasks](/guide/core-concepts/executing-tasks) — learn how to run your DAG with executors. +- [Inspecting Graphs](/guide/core-concepts/inspecting-graphs) — analyze structure, find critical paths, run queries. +- [Graph Transforms](/guide/core-concepts/transforms) — filter, merge, reverse, and reshape DAGs. diff --git a/docs-next/content/docs/guide/core-concepts/executing-tasks.mdx b/docs-next/content/docs/guide/core-concepts/executing-tasks.mdx new file mode 100644 index 0000000..ce027fb --- /dev/null +++ b/docs-next/content/docs/guide/core-concepts/executing-tasks.mdx @@ -0,0 +1,584 @@ +--- +title: Executing Tasks +description: Run DAG tasks in parallel with DAGExecutor, AsyncDAGExecutor, and Pipeline — with timeouts, cancellation, callbacks, and result inspection. +--- + +# Executing Tasks + +Once you have a [DAG](/guide/core-concepts/building-dags), you need an **executor** to run +it. dagron ships with several executor types — from a simple thread-pool executor to +async, pipeline, conditional, dynamic, and incremental variants. This guide covers +the two general-purpose executors (`DAGExecutor` and `AsyncDAGExecutor`), the +`Pipeline` convenience API, and all the options you can tune. + +## Executor overview + +| Executor | Runtime | Use case | +|----------|---------|----------| +| [`DAGExecutor`](/api/execution/execution) | Thread pool | CPU-bound tasks, synchronous code | +| [`AsyncDAGExecutor`](/api/execution/execution) | asyncio | I/O-bound tasks, async/await code | +| [`Pipeline`](/api/execution/pipeline) | Thread pool | Simple linear chains with `@task` | + +Specialised executors are covered in their own guides: +[Incremental](/guide/execution-strategies/incremental), +[Conditional](/guide/execution-strategies/conditional), +[Dynamic](/guide/execution-strategies/dynamic-dags), +[Checkpointing](/guide/execution-strategies/checkpointing). + +## DAGExecutor + +The workhorse executor. It schedules tasks across a thread pool, respecting +topological order and maximising parallelism. + +```python +import dagron + +dag = ( + dagron.DAG.builder() + .add_edge("a", "b") + .add_edge("a", "c") + .add_edge("b", "d") + .add_edge("c", "d") + .build() +) + +tasks = { + "a": lambda: "data", + "b": lambda: "processed_b", + "c": lambda: "processed_c", + "d": lambda: "merged", +} + +result = dagron.DAGExecutor(dag).execute(tasks) +``` + + b --> d + a --> c --> d`} + caption="Diamond DAG. Nodes b and c run in parallel after a completes." +/> + +### Constructor parameters + +```python +dagron.DAGExecutor( + dag, # The DAG to execute + max_workers=None, # Thread pool size (default: CPU count) + costs=None, # Dict[str, float] — cost hints for scheduling + callbacks=None, # ExecutionCallbacks instance + fail_fast=True, # Stop on first failure? + enable_tracing=False, # Record execution trace? + hooks=None, # Plugin hooks +) +``` + +### The `execute()` method + +```python +result = executor.execute( + tasks, # Dict[str, Callable] + timeout=None, # Overall timeout in seconds + cancel_event=None, # threading.Event to signal cancellation +) +``` + +## Understanding ExecutionResult + +Every `.execute()` call returns an [`ExecutionResult`](/api/execution/execution): + +```python +result = dagron.DAGExecutor(dag).execute(tasks) + +# Aggregate counts +print(result.succeeded) # int +print(result.failed) # int +print(result.skipped) # int +print(result.timed_out) # int +print(result.cancelled) # int + +# Wall-clock duration +print(result.total_duration_seconds) # float + +# Per-node details +for name, nr in result.node_results.items(): + print(name, nr.status, nr.result, nr.error, nr.duration_seconds) +``` + +### NodeResult + +Each entry in `result.node_results` is a [`NodeResult`](/api/execution/execution): + +| Field | Type | Description | +|-------|------|-------------| +| `name` | `str` | Node name | +| `status` | `NodeStatus` | Final status | +| `result` | `Any` | Return value of the callable | +| `error` | `Exception \| None` | Exception if the task failed | +| `duration_seconds` | `float` | Wall-clock time for this node | + +### NodeStatus + +dagron defines eight possible statuses: + +| Status | Badge | Meaning | +|--------|-------|---------| +| `PENDING` | | Not yet scheduled | +| `RUNNING` | | Currently executing | +| `COMPLETED` | | Finished successfully | +| `FAILED` | | Raised an exception | +| `SKIPPED` | | Skipped (upstream failure or condition) | +| `TIMED_OUT` | | Exceeded timeout | +| `CANCELLED` | | Cancelled via cancel event | +| `CACHE_HIT` | | Result retrieved from cache | + +```python +from dagron import NodeStatus + +if result.node_results["d"].status == NodeStatus.COMPLETED: + print("All good!") +``` + +## Fail-fast vs. best-effort + +### Fail-fast (default) + +When `fail_fast=True`, the executor stops scheduling new tasks as soon as any +node fails. Nodes that depend on the failed node (directly or transitively) +are marked . + +```python +import time + +def slow_a(): + time.sleep(1) + return "ok" + +def failing_b(): + raise RuntimeError("boom") + +def depends_on_b(): + return "never reached" + +dag = dagron.DAG.builder() \ + .add_edge("a", "c") \ + .add_edge("b", "c") \ + .build() + +result = dagron.DAGExecutor(dag, fail_fast=True).execute({ + "a": slow_a, + "b": failing_b, + "c": depends_on_b, +}) + +print(result.node_results["b"].status) # FAILED +print(result.node_results["c"].status) # SKIPPED +``` + +### Best-effort + +With `fail_fast=False`, independent branches continue executing even when one +branch fails. Only direct descendants of the failed node are skipped. + +```python +dag = ( + dagron.DAG.builder() + .add_edge("root", "branch_a") + .add_edge("root", "branch_b") + .add_edge("branch_a", "join") + .add_edge("branch_b", "join") + .build() +) + +def ok(): + return "ok" + +def fail(): + raise RuntimeError("oops") + +result = dagron.DAGExecutor(dag, fail_fast=False).execute({ + "root": ok, + "branch_a": fail, + "branch_b": ok, + "join": ok, # skipped because branch_a failed +}) + +print(result.node_results["branch_b"].status) # COMPLETED (still ran!) +print(result.node_results["join"].status) # SKIPPED +``` + +## Timeouts + +### Global timeout + +Set a wall-clock deadline for the entire execution: + +```python +result = dagron.DAGExecutor(dag).execute(tasks, timeout=30.0) + +# Any node still running after 30 seconds is marked TIMED_OUT +for name, nr in result.node_results.items(): + if nr.status == dagron.NodeStatus.TIMED_OUT: + print(f"{name} timed out!") +``` + +Timed-out nodes appear as in the results. + +### Per-node timeouts + +Per-node timeouts are supported through the cost-aware scheduling system. See +the [Resource Scheduling](/api/execution/resources) API reference for details. + +## Cancellation + +You can cancel a running execution from another thread using a +`threading.Event`: + +```python +import threading + +cancel = threading.Event() + +# In another thread (e.g., signal handler): +# cancel.set() + +result = dagron.DAGExecutor(dag).execute(tasks, cancel_event=cancel) +``` + +When the event is set, the executor finishes any currently-running tasks but +does not schedule new ones. Unstarted nodes are marked +. + +```python +import signal + +cancel = threading.Event() +signal.signal(signal.SIGINT, lambda *_: cancel.set()) + +result = dagron.DAGExecutor(dag).execute(tasks, cancel_event=cancel) +print(result.cancelled) # number of cancelled nodes +``` + +## Callbacks + +Callbacks let you react to execution events — for logging, metrics, progress +bars, or custom logic. + +```python +class MyCallbacks: + def on_node_start(self, name): + print(f"[START] {name}") + + def on_node_complete(self, name, result): + print(f"[DONE] {name} -> {result}") + + def on_node_error(self, name, error): + print(f"[FAIL] {name}: {error}") + + def on_node_skip(self, name): + print(f"[SKIP] {name}") + +result = dagron.DAGExecutor(dag, callbacks=MyCallbacks()).execute(tasks) +``` + +Callbacks are called synchronously on the executor thread that completed the +task. Keep them lightweight to avoid blocking the scheduler. + +### Progress tracking example + +```python +from dagron import DAGExecutor + +class ProgressTracker: + def __init__(self, total): + self.total = total + self.done = 0 + + def on_node_complete(self, name, result): + self.done += 1 + pct = (self.done / self.total) * 100 + print(f"Progress: {self.done}/{self.total} ({pct:.0f}%)") + + def on_node_error(self, name, error): + self.done += 1 + + def on_node_skip(self, name): + self.done += 1 + +tracker = ProgressTracker(dag.node_count()) +result = DAGExecutor(dag, callbacks=tracker).execute(tasks) +``` + +## Cost-aware scheduling + +If some tasks are more expensive than others, provide cost hints so the +executor can schedule them more intelligently: + +```python +costs = { + "train_model": 100.0, + "evaluate": 10.0, + "preprocess": 5.0, +} + +result = dagron.DAGExecutor(dag, costs=costs).execute(tasks) +``` + +The executor uses costs when computing the [critical path](/guide/core-concepts/inspecting-graphs) +and when deciding which ready node to schedule first. + +## AsyncDAGExecutor + +For I/O-bound workloads (HTTP requests, database queries, file operations), use +the async executor: + +```python +import asyncio +import dagron + +dag = ( + dagron.DAG.builder() + .add_edge("fetch_users", "enrich") + .add_edge("fetch_orders", "enrich") + .add_edge("enrich", "store") + .build() +) + +async def fetch_users(): + await asyncio.sleep(0.5) # simulate HTTP call + return [{"id": 1, "name": "Alice"}] + +async def fetch_orders(): + await asyncio.sleep(0.3) + return [{"id": 1, "item": "Widget"}] + +async def enrich(): + return {"users": 1, "orders": 1} + +async def store(): + return "stored" + +async def main(): + executor = dagron.AsyncDAGExecutor(dag) + result = await executor.execute({ + "fetch_users": fetch_users, + "fetch_orders": fetch_orders, + "enrich": enrich, + "store": store, + }) + print(result.succeeded) # 4 + +asyncio.run(main()) +``` + +`AsyncDAGExecutor` accepts the same constructor parameters as `DAGExecutor` +(`max_workers`, `callbacks`, `fail_fast`, `enable_tracing`, `hooks`), and its +`.execute()` method accepts the same `timeout` parameter. + + enrich + fetch_orders --> enrich + enrich --> store`} + caption="Async execution: both fetch tasks run concurrently on the event loop." +/> + +## Pipeline and @task + +For simple function chains where dependencies are inferred from parameter names, +the `Pipeline` API is the most concise approach: + +```python +from dagron import Pipeline, task + +@task +def download(): + return {"raw": [1, 2, 3]} + +@task +def normalize(download): + """Depends on 'download' because of the parameter name.""" + return [x * 10 for x in download["raw"]] + +@task +def summarize(normalize): + return {"count": len(normalize), "total": sum(normalize)} + +pipeline = Pipeline(tasks=[download, normalize, summarize], name="etl") +result = pipeline.execute() + +print(result.node_results["summarize"].result) +# {'count': 3, 'total': 60} +``` + +### How dependency inference works + +The Pipeline inspects each function's parameter names. If a parameter matches +the name of another task in the pipeline, an edge is added: + +```python +@task +def a(): + return 1 + +@task +def b(): + return 2 + +@task +def c(a, b): + """Depends on both a and b.""" + return a + b + +pipeline = Pipeline(tasks=[a, b, c]) +# Internally builds: a -> c, b -> c +``` + +### Async pipelines + +```python +result = await pipeline.execute_async() +``` + +### When to use Pipeline vs. DAGExecutor + +| Feature | Pipeline | DAGExecutor | +|---------|----------|-------------| +| Dependency declaration | Implicit (parameter names) | Explicit (edges) | +| Data passing | Automatic (return values injected) | Manual | +| Graph complexity | Linear / simple fan-in | Any DAG shape | +| Fine-grained control | Limited | Full | + +Use `Pipeline` for quick scripts and prototypes. Switch to `DAGExecutor` when you +need explicit control over graph structure, payloads, or advanced executor features. + +## Controlling parallelism + +### max_workers + +```python +# Use exactly 2 threads +result = dagron.DAGExecutor(dag, max_workers=2).execute(tasks) + +# Use all available cores (default) +result = dagron.DAGExecutor(dag).execute(tasks) +``` + +Setting `max_workers=1` gives you sequential execution in topological order, +which is useful for debugging. + +### Execution plan preview + +Before executing, you can see the planned execution order: + +```python +plan = dag.execution_plan() +for step in plan: + print(step) +# ExecutionStep(level=0, nodes=['a']) +# ExecutionStep(level=1, nodes=['b', 'c']) +# ExecutionStep(level=2, nodes=['d']) +``` + +This tells you which nodes run in parallel at each level. + +## Putting it all together + +Here is a complete example with all major features: + +```python +import dagron +import threading +import time + +# Build the DAG +dag = ( + dagron.DAG.builder() + .add_nodes(["extract", "validate", "transform", "enrich", "load", "notify"]) + .add_edges([ + ("extract", "validate"), + ("validate", "transform"), + ("validate", "enrich"), + ("transform", "load"), + ("enrich", "load"), + ("load", "notify"), + ]) + .build() +) + +# Define tasks +def extract(): + time.sleep(0.1) + return {"rows": 1000} + +def validate(): + return {"valid": True} + +def transform(): + time.sleep(0.2) + return {"transformed": 1000} + +def enrich(): + time.sleep(0.15) + return {"enriched": 1000} + +def load(): + return {"loaded": 1000} + +def notify(): + return "email sent" + +tasks = { + "extract": extract, + "validate": validate, + "transform": transform, + "enrich": enrich, + "load": load, + "notify": notify, +} + +# Callbacks +class Logger: + def on_node_start(self, name): + print(f" -> {name}") + def on_node_complete(self, name, result): + print(f" <- {name}: {result}") + def on_node_error(self, name, error): + print(f" !! {name}: {error}") + +# Cancellation support +cancel = threading.Event() + +# Execute with all options +result = dagron.DAGExecutor( + dag, + max_workers=4, + callbacks=Logger(), + fail_fast=True, + enable_tracing=True, + costs={"transform": 2.0, "enrich": 1.5}, +).execute(tasks, timeout=60.0, cancel_event=cancel) + +# Inspect results +print(f"\nCompleted: {result.succeeded}/{dag.node_count()}") +print(f"Duration: {result.total_duration_seconds:.3f}s") + +for name, nr in result.node_results.items(): + print(f" {name}: {nr.status.name} ({nr.duration_seconds:.3f}s)") +``` + + validate + validate --> transform --> load + validate --> enrich --> load + load --> notify`} + caption="Complete ETL pipeline with parallel transform and enrich branches." +/> + +## Next steps + +- [Tracing & Profiling](/guide/observability/tracing-profiling) — enable tracing and analyse bottlenecks. +- [Incremental Execution](/guide/execution-strategies/incremental) — only re-run what changed. +- [Conditional Execution](/guide/execution-strategies/conditional) — skip branches based on runtime predicates. +- [Checkpointing](/guide/execution-strategies/checkpointing) — resume after failures. diff --git a/docs-next/content/docs/guide/core-concepts/inspecting-graphs.mdx b/docs-next/content/docs/guide/core-concepts/inspecting-graphs.mdx new file mode 100644 index 0000000..02c2858 --- /dev/null +++ b/docs-next/content/docs/guide/core-concepts/inspecting-graphs.mdx @@ -0,0 +1,496 @@ +--- +title: Inspecting Graphs +description: Analyze your DAG's structure — topological ordering, critical path, predecessors, ancestors, explain, what-if, lint, and the query DSL. +--- + +# Inspecting Graphs + +dagron gives you deep introspection into your DAG's structure. This guide covers +every analysis tool — from basic traversals to critical-path analysis, what-if +exploration, linting, and the query DSL. + +## Building a sample graph + +We will use this graph throughout the guide: + +```python +import dagron + +dag = ( + dagron.DAG.builder() + .add_nodes(["raw", "clean", "features_a", "features_b", "train", "evaluate", "deploy"]) + .add_edges([ + ("raw", "clean"), + ("clean", "features_a"), + ("clean", "features_b"), + ("features_a", "train"), + ("features_b", "train"), + ("train", "evaluate"), + ("evaluate", "deploy"), + ]) + .build() +) +``` + + clean + clean --> features_a --> train + clean --> features_b --> train + train --> evaluate --> deploy`} + caption="ML pipeline used throughout this guide." +/> + +## Basic structure + +### Node and edge counts + +```python +print(dag.node_count()) # 7 +print(dag.edge_count()) # 7 +``` + +### Listing nodes + +```python +print(list(dag.nodes())) +# ['raw', 'clean', 'features_a', 'features_b', 'train', 'evaluate', 'deploy'] +``` + +### Roots and leaves + +**Roots** are nodes with no incoming edges — the entry points of your pipeline. +**Leaves** are nodes with no outgoing edges — the terminal outputs. + +```python +print(dag.roots()) # ['raw'] +print(dag.leaves()) # ['deploy'] +``` + +### Membership checks + +```python +print(dag.has_node("train")) # True +print(dag.has_edge("clean", "features_a")) # True +print(dag.has_node("nonexistent")) # False +``` + +## Neighbourhood queries + +### Direct neighbours + +```python +# Parents (nodes that point TO this node) +print(dag.predecessors("train")) # ['features_a', 'features_b'] + +# Children (nodes this node points TO) +print(dag.successors("clean")) # ['features_a', 'features_b'] +``` + +### Degree + +```python +print(dag.in_degree("train")) # 2 +print(dag.out_degree("clean")) # 2 +print(dag.in_degree("raw")) # 0 (root) +print(dag.out_degree("deploy")) # 0 (leaf) +``` + +### Transitive closure queries + +```python +# All upstream nodes (recursive predecessors) +print(dag.ancestors("train")) +# ['raw', 'clean', 'features_a', 'features_b'] + +# All downstream nodes (recursive successors) +print(dag.descendants("clean")) +# ['features_a', 'features_b', 'train', 'evaluate', 'deploy'] +``` + + clean:::ancestor + clean --> features_a:::ancestor --> train:::target + clean --> features_b:::ancestor --> train + train --> evaluate --> deploy`} + caption="Ancestors of 'train' (blue) and the target node (red)." +/> + +## Graph statistics + +The `stats()` method returns a comprehensive summary: + +```python +s = dag.stats() +print(s) +# DAGStats(nodes=7, edges=7, roots=1, leaves=1, depth=5, width=2, density=0.167) +``` + +| Field | Meaning | +|-------|---------| +| `nodes` | Total node count | +| `edges` | Total edge count | +| `roots` | Number of root nodes | +| `leaves` | Number of leaf nodes | +| `depth` | Length of the longest path (in edges) | +| `width` | Maximum number of nodes at any single level | +| `density` | Edge count / maximum possible edges | + +## Topological ordering + +### Topological sort + +A flat ordering where every node appears after all its dependencies: + +```python +print(dag.topological_sort()) +# ['raw', 'clean', 'features_a', 'features_b', 'train', 'evaluate', 'deploy'] +``` + +### Topological levels + +Groups nodes that can execute in parallel — this is what the executor uses +internally: + +```python +for level, nodes in enumerate(dag.topological_levels()): + print(f"Level {level}: {nodes}") +# Level 0: ['raw'] +# Level 1: ['clean'] +# Level 2: ['features_a', 'features_b'] +# Level 3: ['train'] +# Level 4: ['evaluate'] +# Level 5: ['deploy'] +``` + +Level 2 shows that `features_a` and `features_b` can run concurrently. + +### Execution plan + +The execution plan is a richer version of topological levels that includes +scheduling metadata: + +```python +plan = dag.execution_plan() +for step in plan: + print(step) +``` + +## Critical path + +The **critical path** is the longest dependency chain through the DAG. It +determines the minimum wall-clock time, assuming unlimited parallelism. + +```python +path = dag.critical_path() +print(path) +# ['raw', 'clean', 'features_a', 'train', 'evaluate', 'deploy'] +``` + +With cost hints, the critical path accounts for task durations: + +```python +costs = { + "raw": 1.0, + "clean": 2.0, + "features_a": 5.0, + "features_b": 3.0, + "train": 10.0, + "evaluate": 2.0, + "deploy": 1.0, +} + +path = dag.critical_path(costs=costs) +print(path) +# ['raw', 'clean', 'features_a', 'train', 'evaluate', 'deploy'] +# Total cost: 21.0 +``` + + clean:::critical + clean --> features_a:::critical --> train:::critical + clean --> features_b --> train + train --> evaluate:::critical --> deploy:::critical`} + caption="Critical path highlighted in red. Optimising these nodes reduces total pipeline time." +/> + +Understanding the critical path is key to performance tuning — see +[Tracing & Profiling](/guide/observability/tracing-profiling) for post-execution analysis. + +## Shortest and longest paths + +Find the shortest or longest path between any two nodes: + +```python +print(dag.shortest_path("raw", "deploy")) +# ['raw', 'clean', 'features_a', 'train', 'evaluate', 'deploy'] + +print(dag.longest_path("raw", "deploy")) +# ['raw', 'clean', 'features_a', 'train', 'evaluate', 'deploy'] +``` + +### All paths + +Enumerate every path between two nodes: + +```python +paths = dag.all_paths("clean", "train") +for p in paths: + print(p) +# ['clean', 'features_a', 'train'] +# ['clean', 'features_b', 'train'] +``` + +## Pattern matching + +Find nodes by name using glob or regex patterns: + +```python +# Glob-style +print(dag.nodes_matching_glob("features_*")) +# ['features_a', 'features_b'] + +# Regex +print(dag.nodes_matching_regex(r"^(train|evaluate|deploy)$")) +# ['train', 'evaluate', 'deploy'] +``` + +This is especially useful in large graphs with naming conventions. + +## explain() + +The [`explain()`](/api/analysis/analysis) function gives a human-readable summary +of a single node's role in the graph: + +```python +from dagron import explain + +info = explain(dag, "train") +print(info) +``` + +Output: + +``` +Node: train + In-degree: 2 + Out-degree: 1 + Predecessors: features_a, features_b + Successors: evaluate + Ancestors: raw, clean, features_a, features_b + Descendants: evaluate, deploy + Level: 3 + Is root: False + Is leaf: False + On critical path: Yes +``` + +## what_if() + +The [`what_if()`](/api/analysis/analysis) function lets you explore hypothetical +changes without modifying the DAG: + +```python +from dagron import what_if + +report = what_if(dag, remove_nodes=["features_b"]) +print(report) +``` + +Output: + +``` +What-if: remove nodes ['features_b'] + Nodes removed: 1 + Edges removed: 2 + New roots: ['raw'] + New leaves: ['deploy'] + Disconnected: False + Affected downstream: ['train', 'evaluate', 'deploy'] +``` + +This is invaluable for understanding the impact of removing a step or a +dependency before making the change. + +```python +# What if we remove an edge instead? +report = what_if(dag, remove_edges=[("clean", "features_b")]) +print(report) +``` + +## lint() + +The [`lint()`](/api/analysis/analysis) function checks for common structural +issues: + +```python +from dagron import lint + +warnings = lint(dag) +for w in warnings: + print(w) +``` + +Possible warnings include: + +| Warning | Meaning | +|---------|---------| +| Isolated node | Node with no edges (in or out) | +| Single-child bottleneck | Node with high in-degree feeding a single successor | +| Redundant edge | Edge that is implied by transitivity | +| Wide fan-out | Node with many successors (may be a design smell) | + +Linting is especially useful in CI pipelines to enforce graph hygiene. + +## query() + +The [`query()`](/api/analysis/analysis) function provides a mini DSL for selecting +nodes: + +```python +from dagron import query + +# Find all root nodes +roots = query(dag, "roots") +print(roots) # ['raw'] + +# Find all leaves +leaves = query(dag, "leaves") +print(leaves) # ['deploy'] + +# Glob on node names +features = query(dag, "name:features_*") +print(features) # ['features_a', 'features_b'] + +# Combine with set operators +result = query(dag, "roots & name:raw*") +print(result) # ['raw'] +``` + +### Query syntax reference + +| Expression | Meaning | +|------------|---------| +| `roots` | Nodes with in-degree 0 | +| `leaves` | Nodes with out-degree 0 | +| `name:pattern` | Glob match on node names | +| `A & B` | Intersection | +| `A \| B` | Union | +| `!A` | Complement | +| `ancestors(node)` | All ancestors of a node | +| `descendants(node)` | All descendants of a node | + +```python +# All non-leaf nodes +non_leaves = query(dag, "!leaves") +print(non_leaves) +# ['raw', 'clean', 'features_a', 'features_b', 'train', 'evaluate'] + +# Ancestors of train that match a glob +query(dag, "ancestors(train) & name:feature*") +# ['features_a', 'features_b'] +``` + +## Reachability index + +For large graphs where you repeatedly query ancestors/descendants, building a +reachability index dramatically speeds up lookups: + +```python +dag.build_reachability_index() + +# Now these calls use the precomputed index: +print(dag.ancestors("deploy")) +print(dag.descendants("raw")) +``` + +The index is invalidated when the graph is mutated and can be rebuilt at any +time. + +## Dominator tree + +The **dominator tree** reveals which nodes are mandatory bottlenecks — a node +**d** dominates node **n** if every path from any root to **n** passes through +**d**. + +```python +dom_tree = dag.dominator_tree() +print(dom_tree) +# {'clean': 'raw', 'features_a': 'clean', 'features_b': 'clean', +# 'train': 'clean', 'evaluate': 'train', 'deploy': 'evaluate'} +``` + +This tells you, for example, that `clean` dominates everything downstream of +it, making it a critical bottleneck. + +## Validation + +At any time you can verify the DAG is structurally sound: + +```python +dag.validate() # Raises if the graph contains a cycle or is otherwise invalid +``` + +This is called automatically by the builder at `.build()` time, but you may +want to call it after manual mutations. + +## Practical example: debugging a slow pipeline + +Suppose your pipeline is slower than expected. Here is a systematic inspection +workflow: + +```python +import dagron +from dagron import explain, what_if, lint, query + +# 1. Check graph stats +s = dag.stats() +print(f"Nodes: {s.nodes}, Depth: {s.depth}, Width: {s.width}") + +# 2. Find the critical path +cp = dag.critical_path() +print(f"Critical path ({len(cp)} nodes): {' -> '.join(cp)}") + +# 3. Explain the bottleneck node +explain(dag, cp[len(cp) // 2]) + +# 4. What if we parallelise the bottleneck? +what_if(dag, remove_nodes=[cp[len(cp) // 2]]) + +# 5. Lint for structural issues +for w in lint(dag): + print(f"Warning: {w}") + +# 6. Query for specific patterns +heavy_nodes = query(dag, "name:train* | name:feature*") +print(f"Compute-heavy nodes: {heavy_nodes}") +``` + +## API reference + +| Function / Method | Docs | +|-------------------|------| +| `dag.stats()` | [DAG](/api/core/core) | +| `dag.topological_sort()` | [DAG](/api/core/core) | +| `dag.topological_levels()` | [DAG](/api/core/core) | +| `dag.critical_path()` | [DAG](/api/core/core) | +| `dag.execution_plan()` | [DAG](/api/core/core) | +| `dag.shortest_path()` | [DAG](/api/core/core) | +| `dag.longest_path()` | [DAG](/api/core/core) | +| `dag.all_paths()` | [DAG](/api/core/core) | +| `dag.dominator_tree()` | [DAG](/api/core/core) | +| `explain()` | [Analysis](/api/analysis/analysis) | +| `what_if()` | [Analysis](/api/analysis/analysis) | +| `lint()` | [Analysis](/api/analysis/analysis) | +| `query()` | [Analysis](/api/analysis/analysis) | + +## Next steps + +- [Graph Transforms](/guide/core-concepts/transforms) — reshape your DAG with filter, merge, reverse, and more. +- [Tracing & Profiling](/guide/observability/tracing-profiling) — post-execution analysis with Chrome traces. +- [Serialization](/guide/core-concepts/serialization) — export your DAG to JSON, DOT, Mermaid, or binary. diff --git a/docs-next/content/docs/guide/core-concepts/meta.json b/docs-next/content/docs/guide/core-concepts/meta.json new file mode 100644 index 0000000..2c0ecc8 --- /dev/null +++ b/docs-next/content/docs/guide/core-concepts/meta.json @@ -0,0 +1,11 @@ +{ + "title": "Core Concepts", + "defaultOpen": true, + "pages": [ + "building-dags", + "executing-tasks", + "inspecting-graphs", + "transforms", + "serialization" + ] +} diff --git a/docs-next/content/docs/guide/core-concepts/serialization.mdx b/docs-next/content/docs/guide/core-concepts/serialization.mdx new file mode 100644 index 0000000..d9bfda0 --- /dev/null +++ b/docs-next/content/docs/guide/core-concepts/serialization.mdx @@ -0,0 +1,481 @@ +--- +title: Serialization +description: Save and load DAGs in JSON, binary, DOT, and Mermaid formats — plus custom payload serializers. +--- + +# Serialization + +dagron supports multiple serialization formats for persisting DAGs, sharing them +across processes, embedding them in documentation, and visualizing them with +external tools. This guide covers every format and shows how to handle custom +payloads. + +## Format comparison + +| Format | Round-trip? | Human-readable? | Best for | +|--------|:-----------:|:---------------:|----------| +| JSON | Yes | Yes | Config files, APIs, debugging | +| Binary | Yes | No | Performance-critical storage, IPC | +| DOT | No (export only) | Yes | Graphviz visualization | +| Mermaid | No (export only) | Yes | Documentation, Markdown | +| File (save/load) | Yes | No | Disk persistence with compression | + +## JSON serialization + +### Export to JSON + +```python +import dagron + +dag = ( + dagron.DAG.builder() + .add_edge("extract", "transform") + .add_edge("transform", "load") + .build() +) + +json_str = dag.to_json() +print(json_str) +``` + +Output: + +```json +{ + "nodes": ["extract", "transform", "load"], + "edges": [ + ["extract", "transform"], + ["transform", "load"] + ] +} +``` + +### Import from JSON + +```python +restored = dagron.DAG.from_json(json_str) + +print(restored.node_count()) # 3 +print(restored.edge_count()) # 2 +print(list(restored.nodes())) # ['extract', 'transform', 'load'] +``` + +The round-trip preserves all structural information: nodes, edges, and their +ordering. + +### JSON with payloads + +When nodes carry payloads, they are included in the JSON output: + +```python +dag = dagron.DAG() +dag.add_node("train", payload={"epochs": 10, "lr": 0.001}) +dag.add_node("evaluate", payload={"metrics": ["accuracy", "f1"]}) +dag.add_edge("train", "evaluate") + +json_str = dag.to_json() +print(json_str) +``` + +```json +{ + "nodes": [ + {"name": "train", "payload": {"epochs": 10, "lr": 0.001}}, + {"name": "evaluate", "payload": {"metrics": ["accuracy", "f1"]}} + ], + "edges": [ + ["train", "evaluate"] + ] +} +``` + +### Storing JSON to a file + +```python +import json + +# Write +with open("pipeline.json", "w") as f: + f.write(dag.to_json()) + +# Read +with open("pipeline.json", "r") as f: + dag = dagron.DAG.from_json(f.read()) +``` + +### Use case: sharing DAG definitions via APIs + +```python +from flask import Flask, jsonify, request + +app = Flask(__name__) + +@app.route("/pipeline", methods=["GET"]) +def get_pipeline(): + return dag.to_json(), 200, {"Content-Type": "application/json"} + +@app.route("/pipeline", methods=["POST"]) +def set_pipeline(): + new_dag = dagron.DAG.from_json(request.data.decode()) + # ... use new_dag ... + return jsonify({"nodes": new_dag.node_count()}) +``` + +## Binary serialization + +Binary format uses an efficient Rust-native encoding that is significantly faster +and more compact than JSON. Use it when performance matters. + +### Export to bytes + +```python +data = dag.to_bytes() +print(type(data)) # +print(len(data)) # compact binary representation +``` + +### Import from bytes + +```python +restored = dagron.DAG.from_bytes(data) +print(restored.node_count()) # same as original +``` + +### Use case: Redis caching + +```python +import redis + +r = redis.Redis() + +# Store +r.set("pipeline:etl", dag.to_bytes()) + +# Retrieve +data = r.get("pipeline:etl") +if data: + dag = dagron.DAG.from_bytes(data) +``` + +### Use case: inter-process communication + +```python +import multiprocessing as mp + +def worker(dag_bytes): + dag = dagron.DAG.from_bytes(dag_bytes) + print(f"Worker received DAG with {dag.node_count()} nodes") + +# Send the DAG to a subprocess +p = mp.Process(target=worker, args=(dag.to_bytes(),)) +p.start() +p.join() +``` + +### Performance comparison + +Binary serialization is typically 5-10x faster than JSON and produces 3-5x +smaller output, because it avoids string parsing and uses Rust's native +serialization: + +```python +import time + +# JSON +start = time.perf_counter() +for _ in range(10000): + dagron.DAG.from_json(dag.to_json()) +json_time = time.perf_counter() - start + +# Binary +start = time.perf_counter() +for _ in range(10000): + dagron.DAG.from_bytes(dag.to_bytes()) +binary_time = time.perf_counter() - start + +print(f"JSON: {json_time:.3f}s") +print(f"Binary: {binary_time:.3f}s") +print(f"Speedup: {json_time / binary_time:.1f}x") +``` + +## File persistence (save / load) + +The `save()` and `load()` methods write and read DAGs to/from disk files. They +use the binary format internally with optional compression. + +### Saving to disk + +```python +dag.save("pipeline.dagron") +``` + +### Loading from disk + +```python +dag = dagron.DAG.load("pipeline.dagron") +print(dag.node_count()) +``` + +### Use case: checkpoint-style persistence + +```python +import os + +PIPELINE_PATH = "/var/data/pipeline.dagron" + +def get_or_create_pipeline(): + if os.path.exists(PIPELINE_PATH): + return dagron.DAG.load(PIPELINE_PATH) + + dag = ( + dagron.DAG.builder() + .add_edge("extract", "transform") + .add_edge("transform", "load") + .build() + ) + dag.save(PIPELINE_PATH) + return dag +``` + +## DOT export (Graphviz) + +The [DOT language](https://graphviz.org/doc/info/lang.html) is the standard +input format for Graphviz. + +```python +dot = dag.to_dot() +print(dot) +``` + +Output: + +```text +digraph { + "extract" -> "transform" + "transform" -> "load" +} +``` + +### Rendering with Graphviz + +```python +import subprocess + +dot = dag.to_dot() +with open("pipeline.dot", "w") as f: + f.write(dot) + +subprocess.run(["dot", "-Tpng", "pipeline.dot", "-o", "pipeline.png"]) +``` + +### Rendering in a Jupyter notebook + +```python +from IPython.display import SVG, display +import subprocess + +dot = dag.to_dot() +result = subprocess.run( + ["dot", "-Tsvg"], + input=dot.encode(), + capture_output=True, +) +display(SVG(result.stdout)) +``` + +## Mermaid export + +[Mermaid](https://mermaid.js.org/) is a Markdown-friendly diagramming language +supported by GitHub, GitLab, Docusaurus, and many other platforms. + +```python +mermaid = dag.to_mermaid() +print(mermaid) +``` + +Output: + +``` +graph TD + extract --> transform + transform --> load +``` + +### Embedding in Markdown + +````markdown +```mermaid +graph TD + extract --> transform + transform --> load +``` +```` + +### Use case: auto-generated documentation + +````python +def generate_pipeline_docs(dag, output_path): + """Generate a Markdown file with an embedded DAG diagram.""" + mermaid = dag.to_mermaid() + content = f"""# Pipeline Overview + +## DAG Structure + +```mermaid +{mermaid} +``` + +## Statistics + +- Nodes: {dag.node_count()} +- Edges: {dag.edge_count()} +- Roots: {', '.join(dag.roots())} +- Leaves: {', '.join(dag.leaves())} +""" + with open(output_path, "w") as f: + f.write(content) + +generate_pipeline_docs(dag, "pipeline.md") +```` + + transform --> load`} + caption="The same DAG rendered as a DagDiagram component." +/> + +## Custom payload serializers + +By default, dagron serializes payloads using Python's standard JSON encoder, +which handles `dict`, `list`, `str`, `int`, `float`, `bool`, and `None`. For +custom objects, you need to provide serializer functions. + +### Example: serializing dataclass payloads + +```python +import dagron +import json +from dataclasses import dataclass, asdict + +@dataclass +class TaskConfig: + retries: int + timeout_seconds: float + tags: list + +# Build a DAG with dataclass payloads +dag = dagron.DAG() +dag.add_node("fetch", payload=TaskConfig(retries=3, timeout_seconds=30.0, tags=["io"])) +dag.add_node("process", payload=TaskConfig(retries=1, timeout_seconds=120.0, tags=["cpu"])) +dag.add_edge("fetch", "process") + +# Custom encoder +class ConfigEncoder(json.JSONEncoder): + def default(self, obj): + if isinstance(obj, TaskConfig): + return {"__type__": "TaskConfig", **asdict(obj)} + return super().default(obj) + +# Custom decoder +def config_decoder(dct): + if dct.get("__type__") == "TaskConfig": + return TaskConfig( + retries=dct["retries"], + timeout_seconds=dct["timeout_seconds"], + tags=dct["tags"], + ) + return dct + +# Serialize with custom encoder +json_str = dag.to_json(cls=ConfigEncoder) +print(json_str) + +# Deserialize with custom decoder +restored = dagron.DAG.from_json(json_str, object_hook=config_decoder) +``` + +### Example: binary serialization with pickle payloads + +For the binary format, payloads are serialized using pickle by default, so +custom objects work out of the box as long as they are picklable: + +```python +import dagron +import numpy as np + +dag = dagron.DAG() +dag.add_node("matrix", payload=np.array([[1, 2], [3, 4]])) +dag.add_node("result") +dag.add_edge("matrix", "result") + +# Binary round-trip preserves numpy arrays +data = dag.to_bytes() +restored = dagron.DAG.from_bytes(data) +``` + +## Combining serialization with snapshots + +Snapshots and serialization work well together for versioning: + +```python +import dagron +from datetime import datetime + +dag = ( + dagron.DAG.builder() + .add_edge("a", "b") + .add_edge("b", "c") + .build() +) + +# Save version 1 +dag.save(f"pipeline_v1.dagron") + +# Make changes +dag.add_node("d") +dag.add_edge("c", "d") + +# Save version 2 +dag.save(f"pipeline_v2.dagron") + +# Compare versions +v1 = dagron.DAG.load("pipeline_v1.dagron") +v2 = dagron.DAG.load("pipeline_v2.dagron") + +print(f"v1: {v1.node_count()} nodes, {v1.edge_count()} edges") +print(f"v2: {v2.node_count()} nodes, {v2.edge_count()} edges") +``` + +## Format selection guide + +Use this decision tree to pick the right format: + +1. **Need to read/write from Python?** Use `save()` / `load()` for files, or + `to_bytes()` / `from_bytes()` for in-memory. + +2. **Need human-readable config?** Use `to_json()` / `from_json()`. + +3. **Need to visualize with Graphviz?** Use `to_dot()`. + +4. **Need to embed in Markdown/docs?** Use `to_mermaid()`. + +5. **Need maximum performance?** Use `to_bytes()` / `from_bytes()`. + +## API reference + +| Method | Docs | +|--------|------| +| `dag.to_json()` | [DAG](/api/core/core) | +| `DAG.from_json()` | [DAG](/api/core/core) | +| `dag.to_bytes()` | [DAG](/api/core/core) | +| `DAG.from_bytes()` | [DAG](/api/core/core) | +| `dag.save()` | [DAG](/api/core/core) | +| `DAG.load()` | [DAG](/api/core/core) | +| `dag.to_dot()` | [DAG](/api/core/core) | +| `dag.to_mermaid()` | [DAG](/api/core/core) | + +## Next steps + +- [Incremental Execution](/guide/execution-strategies/incremental) — use save/load for caching intermediate state. +- [Checkpointing](/guide/execution-strategies/checkpointing) — persist execution progress to disk. +- [Graph Transforms](/guide/core-concepts/transforms) — create snapshots before applying transforms. diff --git a/docs-next/content/docs/guide/core-concepts/transforms.mdx b/docs-next/content/docs/guide/core-concepts/transforms.mdx new file mode 100644 index 0000000..0227a91 --- /dev/null +++ b/docs-next/content/docs/guide/core-concepts/transforms.mdx @@ -0,0 +1,527 @@ +--- +title: Graph Transforms +description: Reshape your DAGs with reverse, filter, merge, collapse, transitive reduction, subgraph extraction, snapshots, and composition. +--- + +# Graph Transforms + +dagron provides a rich set of **structural transformations** that produce new DAGs +from existing ones. Transforms are non-destructive — the original DAG is never +mutated. This guide covers every built-in transform with before/after diagrams. + +## Transform overview + +| Transform | What it does | +|-----------|-------------| +| `reverse()` | Flip every edge | +| `filter()` | Keep nodes matching a predicate | +| `merge()` | Combine two DAGs into one | +| `collapse()` | Replace a set of nodes with a single node | +| `transitive_reduction()` | Remove redundant edges | +| `transitive_closure()` | Add all implied edges | +| `subgraph()` | Extract a subgraph by node set | +| `subgraph_by_depth()` | Extract nodes within N hops | +| `snapshot()` | Immutable frozen copy | +| `compose()` | Namespace and wire multiple DAGs | + +## Sample DAG + +We will use this graph for most examples: + +```python +import dagron + +dag = ( + dagron.DAG.builder() + .add_nodes(["a", "b", "c", "d", "e"]) + .add_edges([ + ("a", "b"), ("a", "c"), + ("b", "d"), ("c", "d"), + ("d", "e"), + ]) + .build() +) +``` + + b --> d + a --> c --> d + d --> e`} + caption="Original DAG used throughout this guide." +/> + +## reverse() + +Flipping every edge is useful when you want to reason about upstream +dependencies as downstream propagation (e.g., "what gets affected if this node +changes?"). + +```python +rev = dag.reverse() + +print(rev.roots()) # ['e'] (was a leaf) +print(rev.leaves()) # ['a'] (was a root) +print(rev.successors("e")) # ['d'] +print(rev.predecessors("a")) # ['b', 'c'] +``` + + d + d --> b --> a + d --> c --> a`} + caption="Reversed DAG — edges point upstream." +/> + +Reversing is an O(V + E) operation implemented in Rust. + +## filter() + +Keep only the nodes that satisfy a predicate. Edges between remaining nodes are +preserved; edges to/from removed nodes are dropped. + +```python +# Keep only nodes whose names are NOT 'c' +filtered = dag.filter(lambda name: name != "c") + +print(list(filtered.nodes())) # ['a', 'b', 'd', 'e'] +print(filtered.edge_count()) # 3 +``` + + b --> d --> e`} + caption="After filtering out node 'c'." +/> + +### Filtering with payloads + +If you attached payloads, you can filter based on them: + +```python +dag2 = dagron.DAG() +dag2.add_node("gpu_train", payload={"gpu": True}) +dag2.add_node("cpu_prep", payload={"gpu": False}) +dag2.add_node("cpu_eval", payload={"gpu": False}) +dag2.add_edge("cpu_prep", "gpu_train") +dag2.add_edge("gpu_train", "cpu_eval") + +# Keep only GPU nodes +gpu_only = dag2.filter(lambda name: dag2.get_payload(name).get("gpu", False)) +print(list(gpu_only.nodes())) # ['gpu_train'] +``` + +## merge() + +Combine two DAGs into a single graph. Nodes with the same name are unified; +edges from both graphs are included. + +```python +dag_a = ( + dagron.DAG.builder() + .add_edge("x", "y") + .add_edge("y", "z") + .build() +) + +dag_b = ( + dagron.DAG.builder() + .add_edge("y", "w") + .add_edge("w", "z") + .build() +) + +merged = dag_a.merge(dag_b) +print(list(merged.nodes())) # ['x', 'y', 'z', 'w'] +print(merged.edge_count()) # 4 +``` + + y --> z + y --> w --> z`} + caption="Merged DAG. Node 'y' and 'z' were shared between the two inputs." +/> + + +If merging two DAGs would introduce a cycle, `merge()` raises a `CycleError`. + +## collapse() + +Replace a set of nodes with a single representative node. All incoming edges to +the set become incoming edges to the representative; all outgoing edges from the +set become outgoing edges from the representative. + +```python +# Collapse the feature extraction branch into one node +collapsed = dag.collapse( + nodes=["b", "c"], + into="features", +) + +print(list(collapsed.nodes())) # ['a', 'features', 'd', 'e'] +print(collapsed.successors("a")) # ['features'] +print(collapsed.predecessors("d")) # ['features'] +``` + + features --> d --> e`} + caption="After collapsing nodes b and c into 'features'." +/> + +Collapsing is useful for: +- Simplifying large graphs for visualization. +- Creating summary views for stakeholders. +- Reducing overhead when scheduling tightly-coupled tasks. + +## transitive_reduction() + +Remove edges that are implied by other paths. The transitive reduction has the +same reachability as the original graph but with the minimum number of edges. + +```python +# Add a redundant shortcut edge +dag_with_shortcut = ( + dagron.DAG.builder() + .add_edge("a", "b") + .add_edge("b", "c") + .add_edge("a", "c") # redundant — a->b->c already implies a can reach c + .build() +) + +reduced = dag_with_shortcut.transitive_reduction() +print(reduced.edge_count()) # 2 (shortcut removed) +print(reduced.has_edge("a", "c")) # False +print(reduced.has_edge("a", "b")) # True +print(reduced.has_edge("b", "c")) # True +``` + +

+ +
+ +**Before (with shortcut)** + + b --> c + a -.-> c`} + caption="Dashed edge is redundant." +/> + +
+ +
+ +**After (transitive reduction)** + + b --> c`} + caption="Minimum edges preserving reachability." +/> + +
+
+ +This is especially useful for cleaning up graphs generated from broad dependency +specifications. + +## transitive_closure() + +The opposite of reduction: add an edge for every pair of nodes (u, v) where v +is reachable from u. + +```python +closure = dag.transitive_closure() + +# a can reach e (a->b->d->e), so there is now a direct edge: +print(closure.has_edge("a", "e")) # True +print(closure.has_edge("a", "d")) # True +print(closure.has_edge("b", "e")) # True + +print(f"Original edges: {dag.edge_count()}") # 5 +print(f"Closure edges: {closure.edge_count()}") # 10 +``` + +The transitive closure is useful for pre-computing reachability queries. + +## subgraph() + +Extract a subgraph containing only the specified nodes and the edges between +them: + +```python +sub = dag.subgraph(["a", "b", "d"]) + +print(list(sub.nodes())) # ['a', 'b', 'd'] +print(sub.edge_count()) # 2 (a->b, b->d) +``` + + b --> d`} + caption="Subgraph of nodes a, b, d." +/> + +### Extracting ancestors or descendants + +A common pattern is extracting the full upstream or downstream of a node: + +```python +# Everything upstream of 'train' (including train itself) +upstream_nodes = set(dag.ancestors("d")) | {"d"} +upstream = dag.subgraph(list(upstream_nodes)) +print(list(upstream.nodes())) # ['a', 'b', 'c', 'd'] +``` + +## subgraph_by_depth() + +Extract nodes within a certain number of hops from a starting node: + +```python +# All nodes within 1 hop of 'd' +nearby = dag.subgraph_by_depth("d", depth=1) +print(list(nearby.nodes())) # ['b', 'c', 'd', 'e'] +``` + + d + c --> d + d --> e`} + caption="Subgraph within 1 hop of node 'd' (both directions)." +/> + +```python +# Within 2 hops +wider = dag.subgraph_by_depth("d", depth=2) +print(list(wider.nodes())) # ['a', 'b', 'c', 'd', 'e'] +``` + +## snapshot() + +Create an immutable, frozen copy of the DAG: + +```python +snap = dag.snapshot() +``` + +Snapshots are useful for: +- Recording the state of a graph before mutations. +- Passing a read-only view to analysis functions. +- Implementing undo/redo. + +See [Serialization](/guide/core-concepts/serialization) for persisting snapshots to disk. + +### Diffing snapshots + +Compare two snapshots to see what changed: + +```python +snap1 = dag.snapshot() + +# Mutate the original +dag.add_node("f") +dag.add_edge("e", "f") + +snap2 = dag.snapshot() + +diff = dag.diff(snap1, snap2) +print(diff) +# DagDiff(added_nodes=['f'], removed_nodes=[], added_edges=[('e', 'f')], removed_edges=[]) +``` + +## compose() + +The [`compose()`](/api/core/core) function wires multiple DAGs together under +namespaces: + +```python +etl = ( + dagron.DAG.builder() + .add_edge("extract", "transform") + .add_edge("transform", "load") + .build() +) + +ml = ( + dagron.DAG.builder() + .add_edge("train", "evaluate") + .build() +) + +combined = dagron.compose( + dags={"etl": etl, "ml": ml}, + connections=[("etl/load", "ml/train")], +) + +print(list(combined.nodes())) +# ['etl/extract', 'etl/transform', 'etl/load', 'ml/train', 'ml/evaluate'] + +print(combined.successors("etl/load")) # ['ml/train'] +``` + + etl/transform --> etl/load --> ml/train --> ml/evaluate`} + caption="Two sub-DAGs composed into a single pipeline." +/> + +### Composing many DAGs + +```python +ingestion = dagron.DAG.builder().add_edge("fetch", "validate").build() +processing = dagron.DAG.builder().add_edge("clean", "aggregate").build() +reporting = dagron.DAG.builder().add_edge("render", "email").build() + +full_pipeline = dagron.compose( + dags={ + "ingest": ingestion, + "process": processing, + "report": reporting, + }, + connections=[ + ("ingest/validate", "process/clean"), + ("process/aggregate", "report/render"), + ], +) + +for level, nodes in enumerate(full_pipeline.topological_levels()): + print(f"Level {level}: {nodes}") +# Level 0: ['ingest/fetch'] +# Level 1: ['ingest/validate'] +# Level 2: ['process/clean'] +# Level 3: ['process/aggregate'] +# Level 4: ['report/render'] +# Level 5: ['report/email'] +``` + + ingest/validate --> process/clean --> process/aggregate --> report/render --> report/email`} + caption="Three composed DAGs forming a full data pipeline." +/> + +## Chaining transforms + +Transforms return new DAG instances, so you can chain them: + +```python +result = ( + dag + .filter(lambda n: n != "c") + .transitive_reduction() + .reverse() +) + +print(list(result.nodes())) # ['e', 'd', 'b', 'a'] +``` + +Since each transform produces a new DAG, the original is never modified. + +## Partitioning + +dagron includes two partitioning strategies for splitting a DAG into +independent sub-DAGs: + +### Level-based partitioning + +Splits the DAG at topological level boundaries: + +```python +partitions = dag.partition_level_based(num_partitions=2) +for i, part in enumerate(partitions): + print(f"Partition {i}: {list(part.nodes())}") +``` + +### Balanced partitioning + +Tries to balance the number of nodes across partitions: + +```python +partitions = dag.partition_balanced(num_partitions=3) +for i, part in enumerate(partitions): + print(f"Partition {i}: {list(part.nodes())}") +``` + +Partitioning is useful for distributed execution where each partition is sent +to a different worker. + +## Practical example: simplifying a graph for stakeholders + +```python +import dagron + +# A complex internal pipeline +pipeline = ( + dagron.DAG.builder() + .add_nodes([ + "fetch_api", "fetch_db", "fetch_s3", + "validate_api", "validate_db", "validate_s3", + "merge_sources", "feature_eng", "train_xgb", + "train_nn", "ensemble", "evaluate", "deploy" + ]) + .add_edges([ + ("fetch_api", "validate_api"), ("fetch_db", "validate_db"), + ("fetch_s3", "validate_s3"), + ("validate_api", "merge_sources"), ("validate_db", "merge_sources"), + ("validate_s3", "merge_sources"), + ("merge_sources", "feature_eng"), + ("feature_eng", "train_xgb"), ("feature_eng", "train_nn"), + ("train_xgb", "ensemble"), ("train_nn", "ensemble"), + ("ensemble", "evaluate"), ("evaluate", "deploy"), + ]) + .build() +) + +# Collapse ingestion into one node for the executive summary +simplified = pipeline.collapse( + nodes=["fetch_api", "fetch_db", "fetch_s3", + "validate_api", "validate_db", "validate_s3", + "merge_sources"], + into="data_ingestion", +) + +simplified = simplified.collapse( + nodes=["train_xgb", "train_nn", "ensemble"], + into="model_training", +) + +print(list(simplified.nodes())) +# ['data_ingestion', 'feature_eng', 'model_training', 'evaluate', 'deploy'] + +print(simplified.to_mermaid()) +``` + + feature_eng --> model_training --> evaluate --> deploy`} + caption="Simplified stakeholder view after collapsing internal details." +/> + +## API reference + +| Method | Docs | +|--------|------| +| `dag.reverse()` | [DAG](/api/core/core) | +| `dag.filter()` | [DAG](/api/core/core) | +| `dag.merge()` | [DAG](/api/core/core) | +| `dag.collapse()` | [DAG](/api/core/core) | +| `dag.transitive_reduction()` | [DAG](/api/core/core) | +| `dag.transitive_closure()` | [DAG](/api/core/core) | +| `dag.subgraph()` | [DAG](/api/core/core) | +| `dag.subgraph_by_depth()` | [DAG](/api/core/core) | +| `dag.snapshot()` | [DAG](/api/core/core) | +| `dag.diff()` | [DAG](/api/core/core) | +| `dagron.compose()` | [DAG](/api/core/core) | +| `dag.partition_level_based()` | [DAG](/api/core/core) | +| `dag.partition_balanced()` | [DAG](/api/core/core) | + +## Next steps + +- [Serialization](/guide/core-concepts/serialization) — persist DAGs to JSON, binary, DOT, and Mermaid. +- [Inspecting Graphs](/guide/core-concepts/inspecting-graphs) — analyze structure, critical paths, and queries. +- [Incremental Execution](/guide/execution-strategies/incremental) — use transforms to understand dirty propagation. diff --git a/docs-next/content/docs/guide/execution-strategies/approval-gates.mdx b/docs-next/content/docs/guide/execution-strategies/approval-gates.mdx new file mode 100644 index 0000000..80e07f8 --- /dev/null +++ b/docs-next/content/docs/guide/execution-strategies/approval-gates.mdx @@ -0,0 +1,462 @@ +--- +title: Approval Gates +description: Pause DAG execution at human-in-the-loop gates that wait for explicit approval or rejection. +--- + +# Approval Gates + +Production pipelines often need a human checkpoint before proceeding. dagron's **approval gates** let you pause execution at specific nodes until an operator explicitly approves or rejects the step. This is useful for deployment sign-offs, data quality reviews, compliance checks, and any workflow that requires human judgment. + +Gates are **execution-time concerns**, not graph structure. The DAG itself stays pure; gates are attached via a `GateController` that the executor consults at runtime. + + test + test --> gate_qa + gate_qa --> stage + stage --> gate_prod + gate_prod --> deploy + style gate_qa fill:#fff3e0,stroke:#e65100 + style gate_prod fill:#fff3e0,stroke:#e65100`} + caption="A deployment pipeline with two approval gates. Execution pauses at each gate until a human approves." +/> + +--- + +## Core Classes + +| Class | Role | +|---|---| +| [`ApprovalGate`](/api/execution/gates#approvalgate) | A single gate that blocks until approved, rejected, or timed out. | +| [`GateController`](/api/execution/gates#gatecontroller) | Manages multiple named gates. Thread-safe facade for approve/reject operations. | +| [`GateStatus`](/api/execution/gates#gatestatus) | Enum: `PENDING`, `WAITING`, `APPROVED`, `REJECTED`, `TIMED_OUT`. | +| [`GateRejectedError`](/api/execution/gates#gaterejectederror) | Raised when a gate is rejected. | +| [`GateTimeoutError`](/api/execution/gates#gatetimeouterror) | Raised when a gate times out before a decision is made. | + +--- + +## Creating Gates + +### Single Gate + +An `ApprovalGate` represents a single decision point: + +```python +from dagron.execution.gates import ApprovalGate, GateStatus + +gate = ApprovalGate(timeout=300) # 5-minute timeout +print(gate.status) # GateStatus.PENDING +``` + +### GateController + +In practice you manage multiple gates through a `GateController`: + +```python +from dagron.execution.gates import ApprovalGate, GateController + +controller = GateController({ + "qa_review": ApprovalGate(timeout=600), # 10 min + "prod_deploy": ApprovalGate(timeout=300), # 5 min +}) +``` + +The controller provides a thread-safe interface for approving and rejecting gates by name. + +--- + +## Gate Lifecycle + +Each gate transitions through a well-defined set of states: + +```mermaid +stateDiagram-v2 + [*] --> PENDING + PENDING --> WAITING : executor calls wait_sync/wait_async + WAITING --> APPROVED : approve() called + WAITING --> REJECTED : reject(reason) called + WAITING --> TIMED_OUT : timeout expires + APPROVED --> PENDING : reset() + REJECTED --> PENDING : reset() + TIMED_OUT --> PENDING : reset() +``` + +- **PENDING** -- initial state. The gate exists but no one is waiting on it yet. +- **WAITING** -- the executor has reached this gate and is blocked, waiting for a decision. +- **APPROVED** -- the gate was approved and execution continues. +- **REJECTED** -- the gate was rejected, which raises a `GateRejectedError` in the executor. +- **TIMED_OUT** -- the timeout expired before a decision, raising a `GateTimeoutError`. + +--- + +## Approving and Rejecting + +### From Another Thread + +Gates are designed for multi-threaded use. The executor blocks on the gate in one thread, and you approve or reject from another: + +```python +import threading +from dagron.execution.gates import ApprovalGate, GateController + +controller = GateController({ + "deploy": ApprovalGate(timeout=120), +}) + +# In the executor thread, the gate blocks: +# controller.wait_sync("deploy") # blocks until approve/reject + +# From a monitoring thread, API handler, or CLI: +controller.approve("deploy") +# or: +controller.reject("deploy", reason="Failed canary analysis") +``` + +### Querying Status + +```python +print(controller.status("deploy")) # GateStatus.APPROVED + +# List all gates currently waiting for a decision +waiting = controller.waiting_gates() +print(f"Gates awaiting approval: {waiting}") +``` + +--- + +## Integrating Gates with Execution + +Gates are integrated into DAG execution via **callbacks**. The executor calls `controller.wait_sync(node_name)` when it reaches a node that has an associated gate. Here is a complete example: + +```python +import threading +import time +import dagron +from dagron.execution.gates import ApprovalGate, GateController, GateStatus +from dagron.execution._types import ExecutionCallbacks + +# 1. Build the DAG +dag = ( + dagron.DAG.builder() + .add_node("build") + .add_node("test") + .add_node("review_gate") + .add_node("deploy_staging") + .add_node("deploy_gate") + .add_node("deploy_prod") + .add_edge("build", "test") + .add_edge("test", "review_gate") + .add_edge("review_gate", "deploy_staging") + .add_edge("deploy_staging", "deploy_gate") + .add_edge("deploy_gate", "deploy_prod") + .build() +) + +# 2. Set up gates +controller = GateController({ + "review_gate": ApprovalGate(timeout=600), + "deploy_gate": ApprovalGate(timeout=300), +}) + +# 3. Create tasks that wait at gates +def make_gate_task(gate_name): + """Create a task that blocks on a gate.""" + def task(): + print(f" Waiting for approval at '{gate_name}'...") + controller.wait_sync(gate_name) + print(f" Gate '{gate_name}' approved!") + return "approved" + return task + +tasks = { + "build": lambda: print(" Building...") or "build-ok", + "test": lambda: print(" Testing...") or "test-ok", + "review_gate": make_gate_task("review_gate"), + "deploy_staging": lambda: print(" Deploying to staging...") or "staging-ok", + "deploy_gate": make_gate_task("deploy_gate"), + "deploy_prod": lambda: print(" Deploying to production!") or "prod-ok", +} + +# 4. Auto-approve from a background thread (simulates human operator) +def auto_approver(): + while True: + time.sleep(1) + for name in controller.waiting_gates(): + print(f" [approver] Approving '{name}'") + controller.approve(name) + +approver = threading.Thread(target=auto_approver, daemon=True) +approver.start() + +# 5. Execute +executor = dagron.DAGExecutor(dag) +result = executor.execute(tasks) +print(f"Completed: {result.succeeded} nodes") +``` + +--- + +## Handling Rejection + +When a gate is rejected, `wait_sync()` raises a `GateRejectedError`. If the executor is running with `fail_fast=True` (the default), all downstream nodes are skipped: + +```python +from dagron.execution.gates import GateRejectedError + +try: + controller.wait_sync("deploy_gate") +except GateRejectedError as e: + print(f"Gate '{e.gate_name}' rejected: {e.reason}") + # With fail_fast, deploy_prod will be skipped +``` + + test --> gate --> deploy + style gate fill:#ffcdd2,stroke:#c62828 + style deploy fill:#e0e0e0,stroke:#9e9e9e`} + caption="When the deploy_gate is rejected, downstream nodes are skipped." +/> + +--- + +## Handling Timeouts + +If no decision is made before the timeout expires, `GateTimeoutError` is raised: + +```python +from dagron.execution.gates import ApprovalGate, GateTimeoutError + +gate = ApprovalGate(timeout=5) # 5-second timeout + +try: + gate.wait_sync() +except GateTimeoutError as e: + print(f"Timed out after {e.timeout}s") + print(f"Gate status: {gate.status}") # GateStatus.TIMED_OUT +``` + +--- + +## Async Gates + +Gates work seamlessly with async code: + +```python +import asyncio +from dagron.execution.gates import ApprovalGate + +gate = ApprovalGate(timeout=60) + +async def wait_for_approval(): + print("Waiting for approval...") + await gate.wait_async() + print("Approved!") + +async def approve_later(): + await asyncio.sleep(2) + gate.approve() + +async def main(): + await asyncio.gather( + wait_for_approval(), + approve_later(), + ) + +asyncio.run(main()) +``` + +The `GateController` also provides `wait_async()`: + +```python +await controller.wait_async("deploy_gate") +``` + +--- + +## Auto-Approve for Testing + +During development and testing, you often want to skip the human approval step. Set `auto_approve=True`: + +```python +gate = ApprovalGate(auto_approve=True) +print(gate.status) # GateStatus.APPROVED -- immediately approved + +# wait_sync() returns immediately +gate.wait_sync() +``` + +You can use this to build a test-mode controller: + +```python +def make_controller(test_mode=False): + return GateController({ + "qa_review": ApprovalGate( + timeout=None if test_mode else 600, + auto_approve=test_mode, + ), + "prod_deploy": ApprovalGate( + timeout=None if test_mode else 300, + auto_approve=test_mode, + ), + }) + +# In tests: +controller = make_controller(test_mode=True) + +# In production: +controller = make_controller(test_mode=False) +``` + +--- + +## Resetting Gates + +Gates can be reset for reuse. This is useful for retry loops: + +```python +gate = ApprovalGate(timeout=60) +gate.reject("bad config") +print(gate.status) # GateStatus.REJECTED + +gate.reset() +print(gate.status) # GateStatus.PENDING -- ready for another round + +# Reset all gates in a controller +controller.reset_all() +``` + +--- + +## Combining with the Dashboard + +The [DashboardPlugin](/guide/advanced/plugins-hooks) integrates with gates to provide a web UI for approving and rejecting: + +```python +from dagron.dashboard import DashboardPlugin +from dagron.execution.gates import ApprovalGate, GateController + +controller = GateController({ + "review": ApprovalGate(timeout=600), + "deploy": ApprovalGate(timeout=300), +}) + +dashboard = DashboardPlugin( + port=8765, + gate_controller=controller, # wires approve/reject buttons in the UI + open_browser=True, +) +``` + +When a gate enters the `WAITING` state, the dashboard shows an **Approve** / **Reject** button that operators can click from their browser. + +--- + +## GateController API Summary + +```python +controller = GateController({ + "qa": ApprovalGate(timeout=600), + "prod": ApprovalGate(timeout=300), +}) + +# Add a gate after construction +controller.add_gate("staging", ApprovalGate()) + +# Approve / reject +controller.approve("qa") +controller.reject("prod", reason="Canary failed") + +# Query +controller.status("qa") # GateStatus.APPROVED +controller.waiting_gates() # ["staging"] +controller.has_gate("qa") # True +controller.get_gate("qa") # ApprovalGate instance or None + +# Wait (blocking) +controller.wait_sync("staging") + +# Wait (async) +await controller.wait_async("staging") + +# Reset all gates to PENDING +controller.reset_all() +``` + +--- + +## Patterns and Best Practices + +### Pattern: HTTP Webhook Approval + +Expose an HTTP endpoint that approves gates when called by a CI system: + +```python +from fastapi import FastAPI +from dagron.execution.gates import GateController + +app = FastAPI() + +# Shared controller (in-process) +controller: GateController = ... + +@app.post("/gates/{gate_name}/approve") +def approve_gate(gate_name: str): + controller.approve(gate_name) + return {"status": "approved"} + +@app.post("/gates/{gate_name}/reject") +def reject_gate(gate_name: str, reason: str = ""): + controller.reject(gate_name, reason=reason) + return {"status": "rejected"} +``` + +### Pattern: Slack Approval Bot + +Poll `waiting_gates()` and post to Slack when a gate is waiting: + +```python +import time + +while True: + for name in controller.waiting_gates(): + send_slack_message(f"Gate '{name}' needs approval. Reply /approve or /reject") + time.sleep(10) +``` + +### Pattern: Conditional Auto-Approval + +Auto-approve gates based on runtime conditions: + +```python +def conditional_approver(controller, rules): + """Auto-approve gates that pass predefined rules.""" + for name in controller.waiting_gates(): + rule = rules.get(name) + if rule and rule(): + controller.approve(name) + +# Example: auto-approve QA if all tests pass +rules = { + "qa_review": lambda: all_tests_passed, +} +``` + +--- + +## Related + +- [API Reference: Gates](/api/execution/gates) -- full API documentation. +- [Plugins & Hooks](/guide/advanced/plugins-hooks) -- using DashboardPlugin for gate management. +- [Executing Tasks](/guide/core-concepts/executing-tasks) -- standard execution model. +- [Error Handling](/guide/observability/error-handling) -- how `GateRejectedError` and `GateTimeoutError` fit into the error hierarchy. diff --git a/docs-next/content/docs/guide/execution-strategies/caching.mdx b/docs-next/content/docs/guide/execution-strategies/caching.mdx new file mode 100644 index 0000000..6dda20d --- /dev/null +++ b/docs-next/content/docs/guide/execution-strategies/caching.mdx @@ -0,0 +1,457 @@ +--- +title: Caching +description: Content-addressable Merkle-tree caching for cross-run DAG execution with dagron. +--- + +# Caching + +dagron's caching system provides **content-addressable Merkle-tree caching** for DAG execution results. When you re-execute a pipeline, nodes whose inputs have not changed return their cached result instantly -- no recomputation needed. This is conceptually similar to how build systems like Bazel and Nix work: change any upstream node and all downstream cache keys automatically invalidate. + + B --> C + A --> D + style A fill:#c8e6c9,stroke:#2e7d32 + style B fill:#c8e6c9,stroke:#2e7d32 + style C fill:#fff9c4,stroke:#f9a825 + style D fill:#c8e6c9,stroke:#2e7d32`} + caption="Green nodes are cache hits. Yellow nodes had an upstream change and must re-execute." +/> + +--- + +## How Merkle-Tree Keys Work + +For each node, dagron computes a cache key from three inputs: + +1. **Node name** -- the identity of the node. +2. **Task source hash** -- a hash of the callable's source code (or bytecode as fallback). +3. **Predecessor result hashes** -- the hashes of all upstream nodes' results, sorted by name. + +``` +cache_key = SHA256( node_name || task_source_hash || pred1_name:pred1_hash || pred2_name:pred2_hash ) +``` + +Because each node's key includes the hashes of its predecessors' results, a change to **any** upstream node automatically produces a different key for all downstream nodes. This is the "Merkle tree" property -- changes propagate without explicitly tracking what changed. + +```mermaid +graph TD + A["extract
key: sha256(extract, src_hash_A)"] + B["transform
key: sha256(transform, src_hash_B, extract:result_hash_A)"] + C["load
key: sha256(load, src_hash_C, transform:result_hash_B)"] + A --> B --> C +``` + +--- + +## Core Classes + +| Class | Role | +|---|---| +| [`CachedDAGExecutor`](/api/execution/caching#cacheddagexecutor) | Executes a DAG with caching. On cache hit, returns stored result without running the task. | +| [`ContentAddressableCache`](/api/execution/caching#contentaddressablecache) | High-level cache that manages Merkle-tree key computation and delegates storage to a backend. | +| [`CacheKeyBuilder`](/api/execution/caching#cachekeybuilder) | Computes SHA-256 cache keys from node name, task hash, and predecessor hashes. | +| [`CachePolicy`](/api/execution/caching#cachepolicy) | Eviction rules: `max_entries`, `max_size_bytes`, `ttl_seconds`. | +| [`FileSystemCacheBackend`](/api/execution/caching#filesystemcachebackend) | Stores cached values as pickle files on disk with an index for LRU/TTL tracking. | +| [`CacheStats`](/api/execution/caching#cachestats) | Hit count, miss count, eviction count, total entries, total size, and computed `hit_rate`. | +| [`CacheKeyProtocol`](/api/execution/caching#cachekeyprotocol) | Protocol for objects that provide their own cache key via `__dagron_cache_key__()`. | + +--- + +## Quick Start + +```python +import dagron +from dagron.execution.cached_executor import CachedDAGExecutor +from dagron.execution.content_cache import ( + CachePolicy, + ContentAddressableCache, + FileSystemCacheBackend, +) + +# 1. Build the DAG +dag = ( + dagron.DAG.builder() + .add_node("fetch") + .add_node("clean") + .add_node("aggregate") + .add_node("report") + .add_edge("fetch", "clean") + .add_edge("clean", "aggregate") + .add_edge("aggregate", "report") + .build() +) + +# 2. Create a cache backend with eviction policy +policy = CachePolicy( + max_entries=1000, + max_size_bytes=500 * 1024 * 1024, # 500 MB + ttl_seconds=3600, # 1 hour +) +backend = FileSystemCacheBackend("/tmp/dagron_cache", policy=policy) +cache = ContentAddressableCache(backend) + +# 3. Define tasks +tasks = { + "fetch": lambda: {"rows": 10000}, + "clean": lambda: {"rows": 9800}, + "aggregate": lambda: {"total": 42.0}, + "report": lambda: "Report generated", +} + +# 4. First run -- all cache misses +executor = CachedDAGExecutor(dag, cache) +result = executor.execute(tasks) +print(f"Hits: {result.cache_hits}, Misses: {result.cache_misses}") +# Hits: 0, Misses: 4 + +# 5. Second run -- all cache hits (tasks unchanged) +result = executor.execute(tasks) +print(f"Hits: {result.cache_hits}, Misses: {result.cache_misses}") +# Hits: 4, Misses: 0 +``` + +--- + +## CachedExecutionResult + +The `CachedDAGExecutor.execute()` method returns a `CachedExecutionResult` that wraps the standard `ExecutionResult` and adds cache-specific statistics: + +```python +result = executor.execute(tasks) + +# Standard execution stats +print(result.execution_result.succeeded) +print(result.execution_result.failed) + +# Cache stats +print(f"Cache hits: {result.cache_hits}") +print(f"Cache misses: {result.cache_misses}") +print(f"Nodes executed (cache miss): {result.nodes_executed}") +print(f"Nodes cached (cache hit): {result.nodes_cached}") +``` + +When a node is a cache hit, its result status is `NodeStatus.CACHE_HIT` and `duration_seconds` is `0.0`. + +--- + +## Cache Invalidation + +The Merkle-tree approach provides **automatic invalidation**. You never need to manually invalidate cache entries. Here is how changes propagate: + +### Change a task's code + +If you modify the source code of a task function, its source hash changes, which changes its cache key: + +```python +# Original +tasks["clean"] = lambda: {"rows": 9800} + +# Modified -- different source, different key +tasks["clean"] = lambda: {"rows": 9800, "filtered": True} + +result = executor.execute(tasks) +# clean is a cache miss, and so are aggregate and report (downstream) +``` + +### Change an upstream result + +If `fetch` returns different data, its result hash changes, which invalidates `clean`, `aggregate`, and `report`: + +```python +tasks["fetch"] = lambda: {"rows": 20000} # different result + +result = executor.execute(tasks) +# All 4 nodes are cache misses because the root changed +``` + +### Unchanged branches stay cached + +If only one branch changes, the other branch remains cached: + + clean + clean --> stats + clean --> model + stats --> report + model --> report + style raw fill:#fff9c4,stroke:#f9a825 + style clean fill:#fff9c4,stroke:#f9a825 + style stats fill:#fff9c4,stroke:#f9a825 + style model fill:#c8e6c9,stroke:#2e7d32 + style report fill:#fff9c4,stroke:#f9a825`} + caption="If raw_data changes but the model task source is identical, model may still be a cache hit if its upstream result hash is the same." +/> + +--- + +## CachePolicy and Eviction + +The `CachePolicy` controls when old entries are evicted: + +```python +from dagron.execution.content_cache import CachePolicy + +policy = CachePolicy( + max_entries=500, # LRU eviction after 500 entries + max_size_bytes=1_073_741_824, # 1 GB total + ttl_seconds=7200, # entries expire after 2 hours +) +``` + +| Parameter | Behavior | +|---|---| +| `max_entries` | When the entry count exceeds this, the **least recently accessed** entry is evicted. | +| `max_size_bytes` | When total size exceeds this, LRU entries are evicted until the size is under the limit. | +| `ttl_seconds` | Entries older than this are treated as expired on read and automatically removed. | + +All three constraints are checked during `put()`. TTL is also checked during `get()`. + +--- + +## FileSystemCacheBackend + +The `FileSystemCacheBackend` stores values as pickle files and maintains a JSON index for metadata: + +```python +from dagron.execution.content_cache import FileSystemCacheBackend + +backend = FileSystemCacheBackend( + cache_dir="/var/cache/dagron/my_pipeline", + policy=CachePolicy(max_entries=1000, ttl_seconds=86400), +) +``` + +The directory structure looks like: + +``` +/var/cache/dagron/my_pipeline/ + index.json # metadata index (atomic writes) + a1b2c3d4e5f6g7h8.pkl # pickled result (key prefix) + ... +``` + +The backend uses **atomic writes** (`write to .tmp, then rename`) so that cache corruption from crashes is avoided. + +### Backend Operations + +```python +# Get a cached value +value, found = backend.get("sha256_key_here") + +# Store a value +from dagron.execution.content_cache import CacheEntryMetadata +meta = CacheEntryMetadata(node_name="clean", cache_key="sha256_key_here") +backend.put("sha256_key_here", {"rows": 9800}, meta) + +# Check existence +exists = backend.has("sha256_key_here") + +# Delete a specific entry +backend.delete("sha256_key_here") + +# Clear the entire cache +backend.clear() + +# Get statistics +stats = backend.stats() +print(f"Entries: {stats.total_entries}, Size: {stats.total_size_bytes} bytes") +``` + +--- + +## CacheStats + +After execution, inspect cache health: + +```python +stats = cache.stats() +print(f"Hits: {stats.hits}") +print(f"Misses: {stats.misses}") +print(f"Evictions: {stats.evictions}") +print(f"Entries: {stats.total_entries}") +print(f"Size: {stats.total_size_bytes / 1024 / 1024:.1f} MB") +print(f"Hit rate: {stats.hit_rate:.1%}") +``` + +A healthy pipeline running repeatedly should converge toward a high `hit_rate` (90%+ when only a few nodes change between runs). + +--- + +## Custom Cache Keys with CacheKeyProtocol + +If your task returns objects that are not easily serializable via `pickle`, implement the `CacheKeyProtocol`: + +```python +from dagron.execution.content_cache import CacheKeyProtocol + +class TrainedModel: + def __init__(self, weights_path: str, metrics: dict): + self.weights_path = weights_path + self.metrics = metrics + + def __dagron_cache_key__(self) -> str: + """Return a stable, deterministic cache key.""" + import hashlib + content = f"{self.weights_path}:{sorted(self.metrics.items())}" + return hashlib.sha256(content.encode()).hexdigest() +``` + +When dagron hashes this object's result, it calls `__dagron_cache_key__()` instead of pickling the entire object. This is useful for: + +- Large objects where pickle is expensive. +- Objects with non-deterministic pickle output. +- Objects that reference external state (file paths, database connections). + +--- + +## ContentAddressableCache API + +The `ContentAddressableCache` is the high-level interface you pass to `CachedDAGExecutor`: + +```python +from dagron.execution.content_cache import ContentAddressableCache + +cache = ContentAddressableCache(backend) + +# Compute a cache key for a node +key = cache.compute_key( + node_name="clean", + task_fn=clean_fn, + predecessor_result_hashes={"fetch": "a1b2c3..."}, +) + +# Get / put / check +value, found = cache.get(key) +cache.put(key, {"rows": 9800}, node_name="clean") +cache.has(key) + +# Clear all entries +cache.clear() + +# Get stats +stats = cache.stats() +``` + +--- + +## Combining Caching with Other Features + +### Caching + Tracing + +Enable tracing to see which nodes were cache hits vs misses in the execution timeline: + +```python +executor = CachedDAGExecutor(dag, cache, enable_tracing=True) +result = executor.execute(tasks) + +trace = result.execution_result.trace +if trace: + trace.to_chrome_json("cached_run.json") +``` + +The trace will include `NODE_CACHE_HIT` and `NODE_CACHE_MISS` events. + +### Caching + Fail-Fast + +By default, `fail_fast=True`. If a node fails, downstream nodes are skipped (not cached): + +```python +executor = CachedDAGExecutor(dag, cache, fail_fast=True) +``` + +### Caching vs Incremental Execution + +dagron also provides an [IncrementalExecutor](/guide/execution-strategies/incremental) that re-executes only nodes in the "dirty set" of explicitly changed nodes. The key difference: + +| Feature | CachedDAGExecutor | IncrementalExecutor | +|---|---|---| +| Cross-run persistence | Yes (disk-backed) | No (in-memory) | +| Invalidation | Automatic (Merkle keys) | Manual (`changed_nodes` list) | +| Overhead | Hash computation + disk I/O | Dirty-set computation | +| Best for | CI pipelines, batch jobs | Interactive/reactive workflows | + +You can use both together: `CachedDAGExecutor` for cross-run caching and `IncrementalExecutor` for within-run incremental updates. + +--- + +## Writing a Custom Backend + +Implement the `CacheBackend` protocol to use Redis, S3, or any storage system: + +```python +from dagron.execution.content_cache import CacheBackend, CacheEntryMetadata, CacheStats + +class RedisCacheBackend: + """Example Redis-backed cache backend.""" + + def __init__(self, redis_url: str): + import redis + self._client = redis.from_url(redis_url) + self._stats = CacheStats() + + def get(self, key: str) -> tuple[any, bool]: + data = self._client.get(f"dagron:{key}") + if data is None: + self._stats.misses += 1 + return None, False + import pickle + self._stats.hits += 1 + return pickle.loads(data), True + + def put(self, key: str, value: any, metadata: CacheEntryMetadata) -> None: + import pickle + data = pickle.dumps(value) + self._client.set(f"dagron:{key}", data) + + def has(self, key: str) -> bool: + return self._client.exists(f"dagron:{key}") > 0 + + def delete(self, key: str) -> None: + self._client.delete(f"dagron:{key}") + + def clear(self) -> None: + for key in self._client.scan_iter("dagron:*"): + self._client.delete(key) + + def stats(self) -> CacheStats: + return self._stats + +# Usage +backend = RedisCacheBackend("redis://localhost:6379") +cache = ContentAddressableCache(backend) +executor = CachedDAGExecutor(dag, cache) +``` + +--- + +## Best Practices + +1. **Use a TTL in production.** Without a TTL, stale cache entries from old pipeline versions can accumulate. A TTL of 24-48 hours is a reasonable default. + +2. **Set `max_size_bytes`.** Prevent the cache from consuming unbounded disk space. + +3. **Use `CacheKeyProtocol` for large objects.** If your nodes return multi-GB DataFrames, implement `__dagron_cache_key__()` to hash a fingerprint instead of the full data. + +4. **Monitor `hit_rate`.** A low hit rate suggests frequent code changes or non-deterministic task outputs. Check `CacheStats` after each run. + +5. **Share caches across CI runs.** Mount the cache directory as a persistent volume in your CI system to get cross-run cache hits. + +--- + +## Related + +- [API Reference: Caching](/api/execution/caching) -- full API documentation. +- [Incremental Execution](/guide/execution-strategies/incremental) -- in-memory dirty-set-based re-execution. +- [Tracing & Profiling](/guide/observability/tracing-profiling) -- visualizing cache hit/miss events. +- [Checkpointing](/guide/execution-strategies/checkpointing) -- saving and resuming execution state. diff --git a/docs-next/content/docs/guide/execution-strategies/checkpointing.mdx b/docs-next/content/docs/guide/execution-strategies/checkpointing.mdx new file mode 100644 index 0000000..47cfe14 --- /dev/null +++ b/docs-next/content/docs/guide/execution-strategies/checkpointing.mdx @@ -0,0 +1,473 @@ +--- +title: Checkpointing +description: Save execution progress to disk and resume after failures — CheckpointExecutor for fault-tolerant, resumable DAG pipelines. +--- + +# Checkpointing + +Long-running pipelines fail. Networks drop, machines reboot, dependencies crash. +Without checkpointing, a failure at step 95 of 100 means re-running all 95 +successful steps. dagron's [`CheckpointExecutor`](/api/execution/checkpoint) saves +progress to disk after each node completes, so you can resume from exactly where +you left off. + +## Concepts + +### Checkpoint directory + +The `CheckpointExecutor` writes checkpoint files to a directory you specify. Each +file records the status and result of a completed node. On resume, the executor +reads these files, skips already-completed nodes, and picks up from the first +incomplete node. + +### Execute / resume cycle + +1. **First run**: call `.execute(tasks)`. If everything succeeds, checkpoint + files are cleaned up automatically. +2. **Failure**: some nodes fail. Checkpoint files for completed nodes remain on + disk. +3. **Resume**: call `.resume(tasks)`. The executor reads the checkpoint, skips + completed nodes, and retries the failed and remaining nodes. + +### Checkpoint info + +The `.checkpoint_info()` method returns metadata about the current checkpoint +state: which nodes are completed, which failed, and what the last run time was. + +## Basic usage + +```python +import dagron +import time + +dag = ( + dagron.DAG.builder() + .add_edge("extract", "transform") + .add_edge("transform", "validate") + .add_edge("validate", "load") + .add_edge("load", "notify") + .build() +) +``` + + transform --> validate --> load --> notify`} + caption="Five-step pipeline. We will simulate a failure at the 'load' step." +/> + +### First run with a failure + +```python +call_count = 0 + +def flaky_load(): + """Simulates a task that fails on the first attempt.""" + global call_count + call_count += 1 + if call_count == 1: + raise ConnectionError("Database connection lost") + return "loaded 1000 rows" + +tasks = { + "extract": lambda: time.sleep(2) or "extracted 1000 rows", + "transform": lambda: time.sleep(1) or "transformed", + "validate": lambda: "all rows valid", + "load": flaky_load, + "notify": lambda: "email sent", +} + +executor = dagron.CheckpointExecutor(dag, checkpoint_dir="/tmp/pipeline_checkpoint") +result = executor.execute(tasks) + +print(result.succeeded) # 3 (extract, transform, validate) +print(result.failed) # 1 (load) +print(result.skipped) # 1 (notify) +``` + +After this run, the checkpoint directory contains files for the three completed +nodes: + +| Node | Status | Checkpointed? | +|------|--------|:------------:| +| extract | | Yes | +| transform | | Yes | +| validate | | Yes | +| load | | No | +| notify | | No | + +### Resume after fixing the issue + +```python +# The flaky_load function will succeed on the second call (call_count is now 1) +result = executor.resume(tasks) + +print(result.succeeded) # 2 (load, notify — the rest were restored from checkpoint) +print(result.failed) # 0 +``` + +| Node | Status | Source | +|------|--------|--------| +| extract | | Restored from checkpoint | +| transform | | Restored from checkpoint | +| validate | | Restored from checkpoint | +| load | | Re-executed | +| notify | | Executed | + + transform:::restored --> validate:::restored --> load:::rerun --> notify:::rerun`} + caption="On resume, extract/transform/validate are restored from checkpoint (blue). Only load and notify execute (green)." +/> + +The expensive `extract` (2 seconds) and `transform` (1 second) steps were not +re-run, saving 3 seconds on the resume. + +## Constructor + +```python +dagron.CheckpointExecutor( + dag, # The DAG to execute + checkpoint_dir, # Path to the checkpoint directory (str or Path) +) +``` + +The directory is created automatically if it does not exist. + +## API methods + +### execute(tasks) + +Runs the pipeline from scratch, checkpointing each completed node: + +```python +result = executor.execute(tasks) +``` + +If the entire pipeline succeeds, checkpoint files are cleaned up. If any node +fails, checkpoint files for completed nodes remain. + +### resume(tasks) + +Reads the checkpoint and resumes from the last incomplete node: + +```python +result = executor.resume(tasks) +``` + +Nodes that were previously completed are not re-executed — their results are +restored from the checkpoint. + +### checkpoint_info() + +Returns metadata about the current checkpoint: + +```python +info = executor.checkpoint_info() +print(info) +``` + +Output: + +```python +CheckpointInfo( + exists=True, + completed_nodes=['extract', 'transform', 'validate'], + failed_nodes=['load'], + skipped_nodes=['notify'], + last_run_time='2025-01-15T10:30:00', + total_checkpointed=3, +) +``` + +Use this to build monitoring dashboards or decide whether to resume or +start fresh. + +### clear_checkpoint() + +Remove all checkpoint files: + +```python +executor.clear_checkpoint() +info = executor.checkpoint_info() +print(info.exists) # False +``` + +This is useful when you want to force a full re-run. + +## Complete example: ETL with retry + +Here is a production-style pattern with automatic retry: + +```python +import dagron +import time +import logging + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger("pipeline") + +dag = ( + dagron.DAG.builder() + .add_nodes(["fetch_api", "fetch_db", "merge", "transform", + "validate", "load", "update_dashboard"]) + .add_edges([ + ("fetch_api", "merge"), + ("fetch_db", "merge"), + ("merge", "transform"), + ("transform", "validate"), + ("validate", "load"), + ("load", "update_dashboard"), + ]) + .build() +) + +tasks = { + "fetch_api": lambda: time.sleep(5) or {"api_rows": 10000}, + "fetch_db": lambda: time.sleep(3) or {"db_rows": 50000}, + "merge": lambda: {"total_rows": 60000}, + "transform": lambda: time.sleep(10) or {"transformed": 60000}, + "validate": lambda: {"valid": True, "bad_rows": 0}, + "load": lambda: time.sleep(2) or "loaded", + "update_dashboard": lambda: "dashboard updated", +} + +CHECKPOINT_DIR = "/var/data/pipeline_checkpoints/etl_daily" + +def run_with_retry(max_attempts=3): + executor = dagron.CheckpointExecutor(dag, checkpoint_dir=CHECKPOINT_DIR) + + # Check if there is an existing checkpoint to resume + info = executor.checkpoint_info() + if info.exists: + logger.info(f"Found checkpoint with {info.total_checkpointed} completed nodes") + logger.info(f"Resuming from last failure...") + result = executor.resume(tasks) + else: + logger.info("Starting fresh execution") + result = executor.execute(tasks) + + # Retry loop for transient failures + attempts = 1 + while result.failed > 0 and attempts < max_attempts: + attempts += 1 + logger.warning(f"Attempt {attempts}: {result.failed} nodes failed, retrying...") + time.sleep(5) # back off before retry + result = executor.resume(tasks) + + if result.failed > 0: + logger.error(f"Pipeline failed after {attempts} attempts") + # Leave checkpoint for manual inspection + else: + logger.info(f"Pipeline succeeded in {attempts} attempt(s)") + # Checkpoint auto-cleaned on full success + + return result + +result = run_with_retry() +``` + + merge + fetch_db --> merge + merge --> transform --> validate --> load --> update_dashboard`} + caption="ETL pipeline with automatic checkpoint-based retry." +/> + +## Checkpointing with parallel branches + +Checkpointing works correctly with fan-out / fan-in topologies. Each completed +node is checkpointed independently: + +```python +dag = ( + dagron.DAG.builder() + .add_edge("source", "branch_a") + .add_edge("source", "branch_b") + .add_edge("source", "branch_c") + .add_edge("branch_a", "join") + .add_edge("branch_b", "join") + .add_edge("branch_c", "join") + .build() +) +``` + +If `branch_b` fails but `source`, `branch_a`, and `branch_c` succeed, the +checkpoint stores all three. On resume, only `branch_b` and `join` need to +run. + +| Node | First run | Resume | +|------|-----------|--------| +| source | | Restored | +| branch_a | | Restored | +| branch_b | | Re-executed | +| branch_c | | Restored | +| join | | Executed | + + branch_a:::restored --> join:::rerun + source --> branch_b:::rerun --> join + source --> branch_c:::restored --> join`} + caption="On resume, only branch_b and join need to execute." +/> + +## Monitoring checkpoint state + +Build observability around checkpoint state: + +```python +def monitor_pipeline(executor): + info = executor.checkpoint_info() + + if not info.exists: + print("No checkpoint found — pipeline has not run or completed cleanly.") + return + + total = dag.node_count() + completed = info.total_checkpointed + pct = (completed / total) * 100 + + print(f"Pipeline progress: {completed}/{total} ({pct:.0f}%)") + print(f"Completed: {info.completed_nodes}") + print(f"Failed: {info.failed_nodes}") + print(f"Remaining: {total - completed - len(info.failed_nodes)}") + print(f"Last run: {info.last_run_time}") +``` + +### Integrating with alerting + +```python +import json + +def checkpoint_to_metrics(executor): + """Export checkpoint state as metrics for Prometheus/DataDog.""" + info = executor.checkpoint_info() + return { + "pipeline.checkpoint.exists": 1 if info.exists else 0, + "pipeline.checkpoint.completed": info.total_checkpointed, + "pipeline.checkpoint.failed": len(info.failed_nodes), + "pipeline.checkpoint.progress_pct": ( + info.total_checkpointed / dag.node_count() * 100 + if info.exists else 0 + ), + } +``` + +## Checkpoint directory structure + +The checkpoint directory contains one file per completed node plus a metadata +file: + +``` +/tmp/pipeline_checkpoint/ + _metadata.json # Run metadata (start time, DAG hash, etc.) + extract.checkpoint # Serialized NodeResult for 'extract' + transform.checkpoint # Serialized NodeResult for 'transform' + validate.checkpoint # Serialized NodeResult for 'validate' +``` + + +Do not modify checkpoint files manually. The executor validates file integrity +on resume and will reject tampered checkpoints. + +## When to clear checkpoints + +Clear checkpoints when: + +- **The DAG structure changed.** Adding or removing nodes invalidates the + checkpoint. The executor detects this and raises an error on resume. +- **Task logic changed.** If you fixed a bug in a task that already + checkpointed successfully, you need to re-run it. +- **You want a clean start.** For periodic batch pipelines, clear the + checkpoint before each scheduled run. + +```python +# Before a scheduled daily run +executor.clear_checkpoint() +result = executor.execute(tasks) +``` + +## Combining with other features + +### Checkpointing + Incremental execution + +Use checkpointing for fault tolerance and +[incremental execution](/guide/execution-strategies/incremental) for change-based +optimization: + +```python +# First: use IncrementalExecutor to determine what needs to run +inc_executor = dagron.IncrementalExecutor(dag) +inc_result = inc_executor.execute(tasks, changed_nodes=["source"]) + +# Then: use CheckpointExecutor for fault tolerance on the re-execution +cp_executor = dagron.CheckpointExecutor(dag, checkpoint_dir="/tmp/cp") +cp_result = cp_executor.execute(tasks) +``` + +### Checkpointing + Tracing + +```python +executor = dagron.CheckpointExecutor(dag, checkpoint_dir="/tmp/cp") +result = executor.execute(tasks) + +# Note: tracing captures only the nodes that actually executed, +# not the ones restored from checkpoint. +``` + +### Checkpointing + Conditional execution + +Checkpoint files record the status of conditionally-skipped nodes, so resume +correctly handles conditional branches: + +```python +# A conditionally-skipped node is recorded as SKIPPED in the checkpoint +# and remains skipped on resume. +``` + +## Best practices + +1. **Use unique checkpoint directories.** For concurrent pipeline runs, use + unique directories (e.g., include a run ID or timestamp): + ```python + checkpoint_dir = f"/tmp/checkpoints/run_{run_id}" + ``` + +2. **Clean up old checkpoints.** Implement a retention policy to avoid + accumulating stale checkpoint directories. + +3. **Monitor checkpoint size.** Node results are serialized to disk. If + tasks return large objects (dataframes, models), checkpoint files can grow + large. Consider returning metadata references instead. + +4. **Handle DAG changes gracefully.** Before resuming, compare the current DAG + hash against the checkpoint metadata. If they differ, clear and re-run. + +5. **Test your resume path.** Deliberately inject failures in tests and verify + that resume produces the correct final results. + +6. **Set appropriate file permissions.** Checkpoint files may contain sensitive + results. Ensure the checkpoint directory has restrictive permissions. + +## API reference + +| Class / Method | Docs | +|----------------|------| +| `CheckpointExecutor` | [Checkpoint](/api/execution/checkpoint) | +| `CheckpointExecutor.execute()` | [Checkpoint](/api/execution/checkpoint) | +| `CheckpointExecutor.resume()` | [Checkpoint](/api/execution/checkpoint) | +| `CheckpointExecutor.checkpoint_info()` | [Checkpoint](/api/execution/checkpoint) | +| `CheckpointExecutor.clear_checkpoint()` | [Checkpoint](/api/execution/checkpoint) | + +## Next steps + +- [Incremental Execution](/guide/execution-strategies/incremental) — only re-run what changed. +- [Tracing & Profiling](/guide/observability/tracing-profiling) — analyze resume execution performance. +- [Conditional Execution](/guide/execution-strategies/conditional) — gate branches with runtime predicates. +- [Getting Started](/guide/getting-started) — back to the basics. diff --git a/docs-next/content/docs/guide/execution-strategies/conditional.mdx b/docs-next/content/docs/guide/execution-strategies/conditional.mdx new file mode 100644 index 0000000..d3e1972 --- /dev/null +++ b/docs-next/content/docs/guide/execution-strategies/conditional.mdx @@ -0,0 +1,452 @@ +--- +title: Conditional Execution +description: Gate DAG branches with runtime predicates — skip nodes dynamically based on upstream results using ConditionalDAGBuilder and ConditionalExecutor. +--- + +# Conditional Execution + +Not every branch in a pipeline should always run. Sometimes you want to skip +expensive model training if validation fails, route data to different processors +based on its type, or gate a deployment step on quality metrics. dagron's +**conditional execution** system lets you attach predicates to edges that +are evaluated at runtime, dynamically gating which branches execute. + +## Concepts + +### Conditional edges + +A **conditional edge** is an edge with an attached predicate — a Python function +that receives the upstream node's result and returns `True` (execute the +downstream node) or `False` (skip it). + +``` +condition: lambda result: result["score"] > 0.9 +``` + +When a conditional edge evaluates to `False`, the downstream node and all of +its descendants are . + +### ConditionalDAGBuilder + +The [`ConditionalDAGBuilder`](/api/execution/conditions) extends the standard builder +with support for condition predicates on edges. + +### ConditionalExecutor + +The [`ConditionalExecutor`](/api/execution/conditions) evaluates conditions at +runtime and only dispatches nodes whose incoming conditions are satisfied. + +## Building a conditional DAG + +```python +import dagron + +builder = dagron.ConditionalDAGBuilder() + +builder.add_node("validate") +builder.add_node("fast_path") +builder.add_node("slow_path") +builder.add_node("merge") + +# Unconditional edge +builder.add_edge("validate", "fast_path", condition=lambda r: r["size"] < 1000) +builder.add_edge("validate", "slow_path", condition=lambda r: r["size"] >= 1000) +builder.add_edge("fast_path", "merge") +builder.add_edge("slow_path", "merge") + +dag, conditions = builder.build() +``` + +The `.build()` method returns a tuple: the DAG and a dictionary of conditions +keyed by `(from_node, to_node)` tuples. + +|"size < 1000"| fast_path --> merge + validate -->|"size >= 1000"| slow_path --> merge`} + caption="Conditional branching. Only one path executes based on the validation result." +/> + +## Executing with conditions + +```python +tasks = { + "validate": lambda: {"size": 500, "valid": True}, + "fast_path": lambda: "processed quickly", + "slow_path": lambda: "processed with full pipeline", + "merge": lambda: "done", +} + +executor = dagron.ConditionalExecutor(dag, conditions) +result = executor.execute(tasks) + +# fast_path runs (size=500 < 1000), slow_path is skipped +print(result.node_results["fast_path"].status) # COMPLETED +print(result.node_results["slow_path"].status) # SKIPPED +print(result.node_results["merge"].status) # COMPLETED +``` + +| Node | Status | Why | +|------|--------|-----| +| validate | | Always runs (root) | +| fast_path | | Condition `size < 1000` is True | +| slow_path | | Condition `size >= 1000` is False | +| merge | | At least one predecessor completed | + +### What if the data is large? + +```python +tasks["validate"] = lambda: {"size": 5000, "valid": True} + +result = executor.execute(tasks) +print(result.node_results["fast_path"].status) # SKIPPED +print(result.node_results["slow_path"].status) # COMPLETED +``` + +|"size < 1000 ✗"| fast_path:::skipped + validate:::active -->|"size >= 1000 ✓"| slow_path:::active + fast_path --> merge:::active + slow_path --> merge`} + caption="With size=5000, slow_path executes and fast_path is skipped." +/> + +## Condition predicates + +Conditions are plain Python callables that receive the predecessor's return +value: + +```python +# Simple threshold +condition=lambda result: result > 0.9 + +# Dictionary access +condition=lambda result: result["status"] == "ok" + +# Complex logic +def should_retrain(result): + return ( + result["accuracy"] < 0.95 + or result["data_drift"] > 0.1 + or result["days_since_last_train"] > 7 + ) + +builder.add_edge("evaluate", "retrain", condition=should_retrain) +``` + +### Multi-input conditions + +When a node has multiple conditional predecessors, the executor evaluates each +incoming edge independently. The node runs if **at least one** incoming +conditional edge evaluates to `True`: + +```python +builder = dagron.ConditionalDAGBuilder() +builder.add_node("source_a") +builder.add_node("source_b") +builder.add_node("process") + +builder.add_edge("source_a", "process", condition=lambda r: r is not None) +builder.add_edge("source_b", "process", condition=lambda r: r is not None) + +dag, conditions = builder.build() + +tasks = { + "source_a": lambda: None, # condition False + "source_b": lambda: "data", # condition True + "process": lambda: "processed", +} + +result = dagron.ConditionalExecutor(dag, conditions).execute(tasks) +print(result.node_results["process"].status) # COMPLETED (source_b passed) +``` + +## Real-world example: ML pipeline with quality gates + +A common pattern is gating deployment on model quality: + +```python +import dagron + +builder = dagron.ConditionalDAGBuilder() + +# Pipeline stages +builder.add_node("load_data") +builder.add_node("train") +builder.add_node("evaluate") +builder.add_node("deploy_prod") +builder.add_node("deploy_staging") +builder.add_node("alert_team") + +# Edges with conditions +builder.add_edge("load_data", "train") +builder.add_edge("train", "evaluate") + +# Gate: deploy to prod only if accuracy >= 0.95 +builder.add_edge("evaluate", "deploy_prod", + condition=lambda r: r["accuracy"] >= 0.95) + +# Gate: deploy to staging if accuracy between 0.85 and 0.95 +builder.add_edge("evaluate", "deploy_staging", + condition=lambda r: 0.85 <= r["accuracy"] < 0.95) + +# Gate: alert team if accuracy < 0.85 +builder.add_edge("evaluate", "alert_team", + condition=lambda r: r["accuracy"] < 0.85) + +dag, conditions = builder.build() +``` + + train --> evaluate + evaluate -->|"acc >= 0.95"| deploy_prod + evaluate -->|"0.85 <= acc < 0.95"| deploy_staging + evaluate -->|"acc < 0.85"| alert_team`} + caption="ML pipeline with three quality gates. Exactly one downstream path activates." +/> + +```python +# Scenario 1: Great model +tasks = { + "load_data": lambda: "loaded", + "train": lambda: "trained", + "evaluate": lambda: {"accuracy": 0.97, "f1": 0.96}, + "deploy_prod": lambda: "deployed to production!", + "deploy_staging": lambda: "deployed to staging", + "alert_team": lambda: "alert sent", +} + +result = dagron.ConditionalExecutor(dag, conditions).execute(tasks) +print(result.node_results["deploy_prod"].status) # COMPLETED +print(result.node_results["deploy_staging"].status) # SKIPPED +print(result.node_results["alert_team"].status) # SKIPPED +``` + +```python +# Scenario 2: Mediocre model +tasks["evaluate"] = lambda: {"accuracy": 0.90, "f1": 0.88} +result = dagron.ConditionalExecutor(dag, conditions).execute(tasks) +print(result.node_results["deploy_prod"].status) # SKIPPED +print(result.node_results["deploy_staging"].status) # COMPLETED +print(result.node_results["alert_team"].status) # SKIPPED +``` + +```python +# Scenario 3: Bad model +tasks["evaluate"] = lambda: {"accuracy": 0.70, "f1": 0.65} +result = dagron.ConditionalExecutor(dag, conditions).execute(tasks) +print(result.node_results["deploy_prod"].status) # SKIPPED +print(result.node_results["deploy_staging"].status) # SKIPPED +print(result.node_results["alert_team"].status) # COMPLETED +``` + +## Conditional chains + +Conditions propagate through the graph. If a conditional edge skips a node, +that node's descendants are also skipped: + +```python +builder = dagron.ConditionalDAGBuilder() +builder.add_node("check") +builder.add_node("step_1") +builder.add_node("step_2") +builder.add_node("step_3") + +builder.add_edge("check", "step_1", condition=lambda r: r["go"]) +builder.add_edge("step_1", "step_2") # unconditional +builder.add_edge("step_2", "step_3") # unconditional + +dag, conditions = builder.build() + +tasks = { + "check": lambda: {"go": False}, + "step_1": lambda: "1", + "step_2": lambda: "2", + "step_3": lambda: "3", +} + +result = dagron.ConditionalExecutor(dag, conditions).execute(tasks) +``` + +| Node | Status | Why | +|------|--------|-----| +| check | | Root node | +| step_1 | | Condition is False | +| step_2 | | Predecessor skipped | +| step_3 | | Predecessor skipped | + +## Mixing conditional and unconditional edges + +You can freely mix conditional and unconditional edges in the same graph: + +```python +builder = dagron.ConditionalDAGBuilder() + +builder.add_node("extract") +builder.add_node("validate") +builder.add_node("transform") +builder.add_node("quarantine") +builder.add_node("load") + +# Unconditional: extract -> validate +builder.add_edge("extract", "validate") + +# Conditional: validate -> transform (if valid) +builder.add_edge("validate", "transform", + condition=lambda r: r["valid"]) + +# Conditional: validate -> quarantine (if invalid) +builder.add_edge("validate", "quarantine", + condition=lambda r: not r["valid"]) + +# Unconditional: transform -> load +builder.add_edge("transform", "load") + +dag, conditions = builder.build() +``` + + validate + validate -->|valid| transform --> load + validate -->|invalid| quarantine`} + caption="Mixed conditional and unconditional edges. Invalid data is quarantined." +/> + +## Combining conditions with fail-fast + +When `fail_fast=True` (the default in the underlying executor), a failure in +any executed node skips its descendants — this combines naturally with +conditional skipping: + +```python +tasks = { + "extract": lambda: "data", + "validate": lambda: {"valid": True}, + "transform": lambda: (_ for _ in ()).throw(RuntimeError("transform error")), + "quarantine": lambda: "quarantined", + "load": lambda: "loaded", +} + +result = dagron.ConditionalExecutor(dag, conditions).execute(tasks) +``` + +| Node | Status | Why | +|------|--------|-----| +| extract | | Root | +| validate | | Unconditional | +| transform | | Raised exception | +| quarantine | | Condition False (valid=True) | +| load | | Predecessor failed | + +## Debugging conditions + +To understand why a node was skipped, inspect the conditions dictionary: + +```python +dag, conditions = builder.build() + +# List all conditional edges +for (src, dst), predicate in conditions.items(): + print(f"{src} -> {dst}: {predicate}") +``` + +You can also test conditions in isolation: + +```python +validate_result = {"valid": True, "size": 500} + +for (src, dst), predicate in conditions.items(): + if src == "validate": + print(f"validate -> {dst}: {predicate(validate_result)}") +``` + +## Example: data routing pipeline + +Route records to different processors based on their type: + +```python +import dagron + +builder = dagron.ConditionalDAGBuilder() + +builder.add_node("classify") +builder.add_node("process_text") +builder.add_node("process_image") +builder.add_node("process_video") +builder.add_node("store") + +builder.add_edge("classify", "process_text", + condition=lambda r: r["type"] == "text") +builder.add_edge("classify", "process_image", + condition=lambda r: r["type"] == "image") +builder.add_edge("classify", "process_video", + condition=lambda r: r["type"] == "video") + +builder.add_edge("process_text", "store") +builder.add_edge("process_image", "store") +builder.add_edge("process_video", "store") + +dag, conditions = builder.build() + +tasks = { + "classify": lambda: {"type": "image", "data": b"..."}, + "process_text": lambda: "text processed", + "process_image": lambda: "image processed", + "process_video": lambda: "video processed", + "store": lambda: "stored", +} + +result = dagron.ConditionalExecutor(dag, conditions).execute(tasks) +print(result.node_results["process_image"].status) # COMPLETED +print(result.node_results["process_text"].status) # SKIPPED +print(result.node_results["process_video"].status) # SKIPPED +``` + +|text| process_text --> store + classify -->|image| process_image --> store + classify -->|video| process_video --> store`} + caption="Data routing. Only the matching processor runs based on the classification result." +/> + +## Best practices + +1. **Keep conditions pure.** Conditions should only examine the input result, + not produce side effects or access external state. + +2. **Handle None results.** If a predecessor might return `None`, guard against + it in your condition: `condition=lambda r: r is not None and r["ok"]`. + +3. **Use descriptive function names.** Named functions are easier to debug than + lambdas: + ```python + def is_high_quality(result): + return result["accuracy"] >= 0.95 + + builder.add_edge("evaluate", "deploy", condition=is_high_quality) + ``` + +4. **Test conditions independently.** Unit-test your predicate functions with + various inputs before wiring them into the DAG. + +5. **Combine with tracing.** Enable tracing to see which conditions fired and + which branches were taken. See [Tracing & Profiling](/guide/observability/tracing-profiling). + +## API reference + +| Class / Method | Docs | +|----------------|------| +| `ConditionalDAGBuilder` | [Conditions](/api/execution/conditions) | +| `ConditionalExecutor` | [Conditions](/api/execution/conditions) | + +## Next steps + +- [Dynamic DAGs](/guide/execution-strategies/dynamic-dags) — expand the graph at runtime based on node results. +- [Incremental Execution](/guide/execution-strategies/incremental) — combine conditions with incremental recomputation. +- [Checkpointing](/guide/execution-strategies/checkpointing) — resume conditional pipelines after failures. diff --git a/docs-next/content/docs/guide/execution-strategies/distributed.mdx b/docs-next/content/docs/guide/execution-strategies/distributed.mdx new file mode 100644 index 0000000..3737f8d --- /dev/null +++ b/docs-next/content/docs/guide/execution-strategies/distributed.mdx @@ -0,0 +1,556 @@ +--- +title: Distributed Execution +description: Execute DAG nodes across threads, processes, Ray clusters, and Celery workers with pluggable backends. +--- + +# Distributed Execution + +dagron's distributed execution system lets you run DAG nodes across different concurrency and distribution primitives -- from local thread pools to Ray clusters and Celery workers -- using a single, unified API. A pluggable `DistributedBackend` protocol abstracts away the transport, so you can switch from threads to Ray by changing one line. + +For large DAGs, the `PartitionedDAGExecutor` splits the graph into partitions and executes each partition as a unit, minimizing cross-partition communication. + + B + end + subgraph "Partition 2 (Worker B)" + C["feature_eng"] + D["train"] + C --> D + end + subgraph "Partition 3 (Worker C)" + E["evaluate"] + F["deploy"] + E --> F + end + B --> C + D --> E + style A fill:#e3f2fd,stroke:#1565c0 + style B fill:#e3f2fd,stroke:#1565c0 + style C fill:#fff3e0,stroke:#e65100 + style D fill:#fff3e0,stroke:#e65100 + style E fill:#e8f5e9,stroke:#2e7d32 + style F fill:#e8f5e9,stroke:#2e7d32`} + caption="A DAG split into 3 partitions. Each partition runs on a different worker." +/> + +--- + +## Architecture Overview + +There are two main approaches to distributed execution: + +| Executor | Approach | Best for | +|---|---|---| +| [`DistributedExecutor`](/api/execution/distributed#distributedexecutor) | Dispatches individual nodes to a backend by topological level. | Fine-grained distribution where each node runs independently. | +| [`PartitionedDAGExecutor`](/api/execution/distributed#partitioneddagexecutor) | Splits the DAG into k partitions, executes each partition as a sub-DAG. | Coarse-grained distribution that minimizes serialization overhead. | + +Both use the `DistributedBackend` protocol for the actual task dispatch. + +--- + +## DistributedBackend Protocol + +All backends implement three methods: + +```python +class DistributedBackend(Protocol): + @property + def name(self) -> str: ... + + def submit(self, fn, *args, **kwargs) -> Any: + """Submit a callable for execution. Returns a future.""" + ... + + def result(self, future, timeout=None) -> Any: + """Retrieve the result of a submitted task.""" + ... + + def shutdown(self, wait=True) -> None: + """Shut down the backend and release resources.""" + ... +``` + +dagron ships with four backends: + +| Backend | Module | Use case | +|---|---|---| +| `ThreadBackend` | `dagron.execution.backends.thread` | I/O-bound tasks, testing, development. | +| `MultiprocessingBackend` | `dagron.execution.backends.multiprocessing` | CPU-bound tasks on a single machine. | +| `RayBackend` | `dagron.execution.backends.ray` | Multi-machine clusters. Requires `pip install dagron[ray]`. | +| `CeleryBackend` | `dagron.execution.backends.celery` | Existing Celery infrastructure. Requires `pip install dagron[celery]`. | + +--- + +## ThreadBackend + +The simplest backend, using Python's `ThreadPoolExecutor`. Good for I/O-bound workloads (API calls, database queries, file downloads): + +```python +import dagron +from dagron.execution.distributed_executor import DistributedExecutor +from dagron.execution.backends.thread import ThreadBackend + +dag = ( + dagron.DAG.builder() + .add_node("fetch_users") + .add_node("fetch_orders") + .add_node("join") + .add_edge("fetch_users", "join") + .add_edge("fetch_orders", "join") + .build() +) + +backend = ThreadBackend(max_workers=8) + +with DistributedExecutor(dag, backend) as executor: + result = executor.execute({ + "fetch_users": lambda: fetch_from_api("/users"), + "fetch_orders": lambda: fetch_from_api("/orders"), + "join": lambda: merge_data(), + }) + +print(f"Backend: {result.backend_name}") # "thread" +print(f"Succeeded: {result.execution_result.succeeded}") +``` + +The `with` statement ensures `backend.shutdown()` is called when execution completes. + +--- + +## MultiprocessingBackend + +Bypasses the GIL for CPU-bound workloads by dispatching tasks to separate processes: + +```python +from dagron.execution.backends.multiprocessing import MultiprocessingBackend + +backend = MultiprocessingBackend(max_workers=4) + +with DistributedExecutor(dag, backend) as executor: + result = executor.execute(tasks) +``` + + +Tasks must be **picklable** when using `MultiprocessingBackend`. Lambda functions and closures cannot be pickled. Use module-level functions instead. + +```python +# This works: +def compute_features(): + return heavy_computation() + +tasks = {"features": compute_features} + +# This does NOT work with multiprocessing: +tasks = {"features": lambda: heavy_computation()} +``` + +--- + +## RayBackend + +Distribute tasks across a Ray cluster for true multi-machine parallelism: + +```python +from dagron.execution.backends.ray import RayBackend + +# Initialize Ray (or connect to an existing cluster) +backend = RayBackend(num_cpus=16) + +with DistributedExecutor(dag, backend, node_timeout=300) as executor: + result = executor.execute(tasks) +``` + +Ray must be installed separately: + +```bash +pip install dagron[ray] +``` + +If Ray is already initialized (e.g., you called `ray.init()` elsewhere), `RayBackend` detects this and reuses the existing session. + +### Ray Cluster Example + +```python +import ray +from dagron.execution.backends.ray import RayBackend + +# Connect to a remote cluster +ray.init(address="ray://cluster-head:10001") + +backend = RayBackend() # uses the existing Ray session + +with DistributedExecutor(dag, backend) as executor: + result = executor.execute({ + "train_model_a": lambda: train_on_gpu("model_a"), + "train_model_b": lambda: train_on_gpu("model_b"), + "ensemble": lambda: combine_models(), + }) +``` + +--- + +## CeleryBackend + +Integrate with existing Celery infrastructure for message-broker-based distribution: + +```python +from celery import Celery +from dagron.execution.backends.celery import CeleryBackend + +app = Celery("dagron_tasks", broker="redis://localhost:6379") + +backend = CeleryBackend(app=app, queue="dagron") + +with DistributedExecutor(dag, backend) as executor: + result = executor.execute(tasks) +``` + +Celery must be installed separately: + +```bash +pip install dagron[celery] +``` + +The `queue` parameter routes all dagron tasks to a specific Celery queue, keeping them separate from your other Celery tasks. + +--- + +## DistributedExecutor + +The `DistributedExecutor` dispatches nodes **by topological level**. All nodes in a level are submitted to the backend concurrently, and results are collected before advancing to the next level. + +```python +from dagron.execution.distributed_executor import DistributedExecutor + +executor = DistributedExecutor( + dag, + backend=backend, + fail_fast=True, # skip downstream on failure + enable_tracing=True, # record execution trace + node_timeout=60.0, # per-node timeout in seconds +) + +result = executor.execute(tasks) +``` + +### DistributedExecutionResult + +The result contains the standard `ExecutionResult` plus distributed metadata: + +```python +result = executor.execute(tasks) + +# Standard execution stats +er = result.execution_result +print(f"Succeeded: {er.succeeded}, Failed: {er.failed}") +print(f"Total time: {er.total_duration_seconds:.1f}s") + +# Distributed metadata +print(f"Backend: {result.backend_name}") +print(f"Dispatch info: {result.dispatch_info}") +# e.g. {"fetch_users": {"backend": "ray"}, ...} +``` + +### Context Manager + +`DistributedExecutor` supports context-manager usage for automatic cleanup: + +```python +with DistributedExecutor(dag, backend) as executor: + result = executor.execute(tasks) +# backend.shutdown(wait=True) is called automatically +``` + +### Node Timeout + +Set `node_timeout` to fail nodes that take too long: + +```python +executor = DistributedExecutor(dag, backend, node_timeout=30.0) +result = executor.execute(tasks) + +# Check for timed-out nodes +print(f"Timed out: {result.execution_result.timed_out}") +``` + +Timed-out nodes are treated as failures and trigger fail-fast behavior for downstream nodes. + +--- + +## PartitionedDAGExecutor + +For large DAGs, dispatching every node individually to a remote backend can create excessive serialization overhead. The `PartitionedDAGExecutor` solves this by splitting the DAG into **k partitions** and executing each partition as a sub-DAG: + +```python +from dagron.execution.distributed import PartitionedDAGExecutor + +executor = PartitionedDAGExecutor( + dag, + k=4, # target number of partitions + strategy="level_based", # partitioning strategy + max_workers=8, # workers per partition + fail_fast=True, +) + +result = executor.execute(tasks) +``` + +### Partitioning Strategies + +| Strategy | Description | Best for | +|---|---|---| +| `"level_based"` | Assigns nodes to partitions based on their topological level. | Balanced, predictable partitions. | +| `"balanced"` | Distributes nodes to minimize the maximum partition cost. | Cost-aware balancing when node costs vary widely. | +| `"communication_min"` | Minimizes cross-partition edges (Kernighan-Lin style). | Minimizing serialization overhead between partitions. | + +### Level-Based Partitioning + +Groups nodes by topological level and distributes levels across k partitions: + +```python +executor = PartitionedDAGExecutor(dag, k=3, strategy="level_based") +``` + + C + A --> D + B --> D + C --> E + D --> E`} + caption="Level-based partitioning: each level maps to a partition." +/> + +### Balanced Partitioning + +When nodes have very different execution costs, use balanced partitioning: + +```python +costs = { + "extract": 5.0, + "heavy_transform": 120.0, + "light_transform": 2.0, + "load": 10.0, +} + +executor = PartitionedDAGExecutor( + dag, + k=2, + strategy="balanced", + costs=costs, +) +``` + +### Communication-Minimizing Partitioning + +Minimizes the number of edges that cross partition boundaries: + +```python +executor = PartitionedDAGExecutor( + dag, + k=3, + strategy="communication_min", + max_iterations=20, # Kernighan-Lin iterations + max_imbalance=0.3, # allow 30% size imbalance +) +``` + +The `max_imbalance` parameter controls the trade-off between partition balance and communication minimization. A value of `0.0` requires perfectly balanced partitions; `0.3` allows 30% deviation. + +--- + +## Choosing Between Executors + +| Scenario | Recommended Executor | +|---|---| +| Small DAG, I/O-bound tasks | `DistributedExecutor` + `ThreadBackend` | +| Small DAG, CPU-bound tasks | `DistributedExecutor` + `MultiprocessingBackend` | +| Large DAG, multi-machine cluster | `PartitionedDAGExecutor` with `"communication_min"` | +| Existing Celery infrastructure | `DistributedExecutor` + `CeleryBackend` | +| GPU cluster | `DistributedExecutor` + `RayBackend` | + +--- + +## Writing a Custom Backend + +Implement the `DistributedBackend` protocol to integrate with any execution system: + +```python +from dagron.execution.backends.base import DistributedBackend + +class DaskBackend: + """Example backend using Dask distributed.""" + + def __init__(self, scheduler_address: str): + from dask.distributed import Client + self._client = Client(scheduler_address) + + @property + def name(self) -> str: + return "dask" + + def submit(self, fn, *args, **kwargs): + return self._client.submit(fn, *args, **kwargs) + + def result(self, future, timeout=None): + return future.result(timeout=timeout) + + def shutdown(self, wait=True): + self._client.close() + +# Usage +backend = DaskBackend("tcp://scheduler:8786") +executor = DistributedExecutor(dag, backend) +``` + +--- + +## Combining with Other Features + +### Distributed + Tracing + +Enable tracing to see per-node timing across distributed workers: + +```python +executor = DistributedExecutor(dag, backend, enable_tracing=True) +result = executor.execute(tasks) + +trace = result.execution_result.trace +if trace: + trace.to_chrome_json("distributed_trace.json") +``` + +### Distributed + Fail-Fast + +```python +executor = DistributedExecutor(dag, backend, fail_fast=True) +``` + +When a node fails, all downstream nodes are skipped, even across different topological levels. + +### Partitioned + Cost Estimates + +Provide cost estimates for better partitioning: + +```python +costs = {node: estimate_cost(node) for node in dag.node_names()} + +executor = PartitionedDAGExecutor( + dag, + k=4, + strategy="balanced", + costs=costs, +) +``` + +--- + +## Complete Example: Ray Cluster Training + +```python +import dagron +from dagron.execution.distributed_executor import DistributedExecutor +from dagron.execution.backends.ray import RayBackend + +# Build a training pipeline +dag = ( + dagron.DAG.builder() + .add_node("load_data") + .add_node("preprocess") + .add_node("train_model_a") + .add_node("train_model_b") + .add_node("train_model_c") + .add_node("ensemble") + .add_node("evaluate") + .add_edge("load_data", "preprocess") + .add_edge("preprocess", "train_model_a") + .add_edge("preprocess", "train_model_b") + .add_edge("preprocess", "train_model_c") + .add_edge("train_model_a", "ensemble") + .add_edge("train_model_b", "ensemble") + .add_edge("train_model_c", "ensemble") + .add_edge("ensemble", "evaluate") + .build() +) + +def load_data(): + return load_dataset("imagenet") + +def preprocess(): + return normalize_images() + +def train_model_a(): + return train("resnet50", epochs=10) + +def train_model_b(): + return train("vgg16", epochs=10) + +def train_model_c(): + return train("efficientnet", epochs=10) + +def ensemble(): + return combine_predictions() + +def evaluate(): + return compute_metrics() + +tasks = { + "load_data": load_data, + "preprocess": preprocess, + "train_model_a": train_model_a, + "train_model_b": train_model_b, + "train_model_c": train_model_c, + "ensemble": ensemble, + "evaluate": evaluate, +} + +# Dispatch to Ray -- models train in parallel on different machines +backend = RayBackend(num_cpus=32) + +with DistributedExecutor(dag, backend, node_timeout=3600) as executor: + result = executor.execute(tasks) + +er = result.execution_result +print(f"Succeeded: {er.succeeded}/{er.succeeded + er.failed}") +print(f"Total time: {er.total_duration_seconds:.0f}s") +``` + +--- + +## Best Practices + +1. **Start with `ThreadBackend` for development.** Switch to `RayBackend` or `CeleryBackend` for production. + +2. **Use `PartitionedDAGExecutor` for large DAGs.** When your DAG has hundreds of nodes, per-node dispatch overhead adds up. Partitioning reduces it. + +3. **Provide cost estimates.** The `balanced` and `communication_min` strategies produce much better partitions when they know how long each node takes. + +4. **Set `node_timeout`.** Prevent runaway tasks from blocking the entire pipeline. + +5. **Use the context manager.** Always use `with DistributedExecutor(...) as executor:` to ensure proper cleanup. + +6. **Avoid lambdas with multiprocessing.** Module-level functions are required for pickling. + +--- + +## Related + +- [API Reference: Distributed](/api/execution/distributed) -- full API documentation. +- [Executing Tasks](/guide/core-concepts/executing-tasks) -- the standard single-machine executor. +- [Resource Scheduling](/guide/execution-strategies/resource-scheduling) -- GPU/CPU/memory-aware scheduling. +- [Tracing & Profiling](/guide/observability/tracing-profiling) -- visualizing distributed execution traces. diff --git a/docs-next/content/docs/guide/execution-strategies/dynamic-dags.mdx b/docs-next/content/docs/guide/execution-strategies/dynamic-dags.mdx new file mode 100644 index 0000000..d8ab75e --- /dev/null +++ b/docs-next/content/docs/guide/execution-strategies/dynamic-dags.mdx @@ -0,0 +1,471 @@ +--- +title: Dynamic DAGs +description: Expand the DAG at runtime based on node results — add or remove nodes dynamically with DynamicExecutor, expanders, and DynamicModification. +--- + +# Dynamic DAGs + +Sometimes you cannot know the full shape of your pipeline until execution is +underway. A data-discovery step might reveal 50 tables that each need their own +processing branch. A model-selection step might choose between three architectures. +A file scanner might find a variable number of inputs. + +dagron's **dynamic execution** system lets you define **expander functions** that +modify the DAG at runtime — adding new nodes, removing nodes, or rewiring edges +based on the results of upstream tasks. + +## Concepts + +### Expanders + +An **expander** is a Python function associated with a specific node. After that +node completes, the executor calls the expander with the node's name and result. +The expander returns a [`DynamicModification`](/api/execution/dynamic) describing +what to change. + +### DynamicModification + +A `DynamicModification` is a data class with two fields: + +| Field | Type | Description | +|-------|------|-------------| +| `add_nodes` | `list[DynamicNodeSpec]` | Nodes to add to the DAG | +| `remove_nodes` | `list[str]` | Nodes to remove from the DAG | + +### DynamicNodeSpec + +Each new node is described by a `DynamicNodeSpec`: + +| Field | Type | Description | +|-------|------|-------------| +| `name` | `str` | The new node's name | +| `task` | `Callable` | The task function to execute | +| `dependencies` | `list[str]` | Nodes this new node depends on | +| `dependents` | `list[str]` | Nodes that depend on this new node | + +### DynamicExecutor + +The [`DynamicExecutor`](/api/execution/dynamic) wraps the standard executor and +applies modifications between scheduling rounds. + +## Basic example + +Let us build a pipeline where a discovery step determines how many files to +process: + +```python +import dagron + +# Initial DAG with a discovery node and a merge node +dag = ( + dagron.DAG.builder() + .add_node("discover") + .add_node("merge") + .add_edge("discover", "merge") + .build() +) +``` + + merge`} + caption="Initial DAG before expansion. The discover node will spawn dynamic children." +/> + +### Define the expander + +```python +def discover_expander(name, result): + """After 'discover' runs, add one processing node per file.""" + files = result # e.g., ["a.csv", "b.csv", "c.csv"] + + new_nodes = [] + for filename in files: + node_name = f"process_{filename.replace('.', '_')}" + new_nodes.append( + dagron.DynamicNodeSpec( + name=node_name, + task=lambda fn=filename: f"processed {fn}", + dependencies=["discover"], + dependents=["merge"], + ) + ) + + return dagron.DynamicModification( + add_nodes=new_nodes, + remove_nodes=[], + ) +``` + +### Execute + +```python +tasks = { + "discover": lambda: ["a.csv", "b.csv", "c.csv"], + "merge": lambda: "all files merged", +} + +expanders = { + "discover": discover_expander, +} + +executor = dagron.DynamicExecutor(dag, expanders=expanders, max_workers=4) +result = executor.execute(tasks) + +print(result.succeeded) # 5 (discover + 3 process nodes + merge) +print(list(result.node_results.keys())) +# ['discover', 'process_a_csv', 'process_b_csv', 'process_c_csv', 'merge'] +``` + +After `discover` completes, the expander fires and adds three new nodes. The +executor then schedules them in parallel, and finally runs `merge`: + + process_a_csv --> merge + discover --> process_b_csv --> merge + discover --> process_c_csv --> merge`} + caption="DAG after dynamic expansion. Three processing nodes were added at runtime." +/> + +## DynamicModification in detail + +### Adding nodes + +```python +dagron.DynamicModification( + add_nodes=[ + dagron.DynamicNodeSpec( + name="new_node", + task=lambda: "hello", + dependencies=["existing_parent"], + dependents=["existing_child"], + ), + ], + remove_nodes=[], +) +``` + +The `dependencies` field creates edges FROM those nodes TO the new node. +The `dependents` field creates edges FROM the new node TO those nodes. + +### Removing nodes + +```python +dagron.DynamicModification( + add_nodes=[], + remove_nodes=["obsolete_node"], +) +``` + +Removing a node also removes all its edges. Be careful not to remove a node +that has already been scheduled or completed. + +### Combined add and remove + +```python +def replace_placeholder(name, result): + """Replace a placeholder node with specific implementations.""" + return dagron.DynamicModification( + add_nodes=[ + dagron.DynamicNodeSpec( + name="specific_impl_a", + task=lambda: "impl a", + dependencies=["upstream"], + dependents=["downstream"], + ), + dagron.DynamicNodeSpec( + name="specific_impl_b", + task=lambda: "impl b", + dependencies=["upstream"], + dependents=["downstream"], + ), + ], + remove_nodes=["placeholder"], + ) +``` + +## Real-world example: dynamic ETL pipeline + +A common scenario is an ETL pipeline that discovers database tables at runtime +and creates a processing branch for each: + +```python +import dagron +import time + +# Initial DAG +dag = ( + dagron.DAG.builder() + .add_node("discover_tables") + .add_node("aggregate") + .add_node("publish") + .add_edge("discover_tables", "aggregate") + .add_edge("aggregate", "publish") + .build() +) + +def discover_tables(): + """Simulate querying a database catalog.""" + time.sleep(0.1) + return ["users", "orders", "products", "reviews"] + +def aggregate(): + return "aggregated all tables" + +def publish(): + return "published to data warehouse" + +def make_table_processor(table_name): + """Factory function for table-specific processors.""" + def process(): + time.sleep(0.2) # simulate processing + return f"processed {table_name}: 1000 rows" + return process + +def table_expander(name, result): + """Create one processing node per discovered table.""" + tables = result + nodes = [] + for table in tables: + nodes.append( + dagron.DynamicNodeSpec( + name=f"process_{table}", + task=make_table_processor(table), + dependencies=["discover_tables"], + dependents=["aggregate"], + ) + ) + return dagron.DynamicModification(add_nodes=nodes, remove_nodes=[]) + +tasks = { + "discover_tables": discover_tables, + "aggregate": aggregate, + "publish": publish, +} + +executor = dagron.DynamicExecutor( + dag, + expanders={"discover_tables": table_expander}, + max_workers=4, +) +result = executor.execute(tasks) + +print(f"Executed {result.succeeded} tasks") +# Executed 7 tasks + +for name, nr in result.node_results.items(): + print(f" {name}: {nr.status.name} ({nr.duration_seconds:.3f}s)") +``` + + process_users --> aggregate + discover_tables --> process_orders --> aggregate + discover_tables --> process_products --> aggregate + discover_tables --> process_reviews --> aggregate + aggregate --> publish`} + caption="After expansion: four table-processing nodes run in parallel." +/> + +## Chained expansion + +Expanders can trigger further expansions. If a dynamically-added node also has +an expander, it fires after that node completes: + +```python +dag = ( + dagron.DAG.builder() + .add_node("level_0") + .add_node("final") + .add_edge("level_0", "final") + .build() +) + +def level_0_expander(name, result): + return dagron.DynamicModification( + add_nodes=[ + dagron.DynamicNodeSpec( + name="level_1", + task=lambda: ["sub_a", "sub_b"], + dependencies=["level_0"], + dependents=["final"], + ), + ], + remove_nodes=[], + ) + +def level_1_expander(name, result): + nodes = [] + for sub in result: + nodes.append( + dagron.DynamicNodeSpec( + name=f"level_2_{sub}", + task=lambda s=sub: f"processed {s}", + dependencies=["level_1"], + dependents=["final"], + ) + ) + return dagron.DynamicModification(add_nodes=nodes, remove_nodes=[]) + +tasks = { + "level_0": lambda: "started", + "final": lambda: "done", +} + +expanders = { + "level_0": level_0_expander, + "level_1": level_1_expander, +} + +executor = dagron.DynamicExecutor(dag, expanders=expanders) +result = executor.execute(tasks) + +print(list(result.node_results.keys())) +# ['level_0', 'level_1', 'level_2_sub_a', 'level_2_sub_b', 'final'] +``` + + level_1 + level_1 --> level_2_sub_a --> final + level_1 --> level_2_sub_b --> final`} + caption="Two-level chained expansion. level_0 spawns level_1, which spawns level_2 nodes." +/> + +## Model selection example + +Use dynamic expansion to choose a model architecture at runtime: + +```python +import dagron + +dag = ( + dagron.DAG.builder() + .add_edge("prepare_data", "select_model") + .add_edge("select_model", "evaluate") + .build() +) + +def model_selector_expander(name, result): + """Based on data characteristics, pick the right model.""" + data_size = result["rows"] + + if data_size < 1000: + model = dagron.DynamicNodeSpec( + name="train_linear", + task=lambda: {"model": "linear", "accuracy": 0.85}, + dependencies=["select_model"], + dependents=["evaluate"], + ) + elif data_size < 100000: + model = dagron.DynamicNodeSpec( + name="train_xgboost", + task=lambda: {"model": "xgboost", "accuracy": 0.92}, + dependencies=["select_model"], + dependents=["evaluate"], + ) + else: + model = dagron.DynamicNodeSpec( + name="train_neural_net", + task=lambda: {"model": "nn", "accuracy": 0.96}, + dependencies=["select_model"], + dependents=["evaluate"], + ) + + return dagron.DynamicModification(add_nodes=[model], remove_nodes=[]) + +tasks = { + "prepare_data": lambda: {"rows": 50000, "features": 20}, + "select_model": lambda: {"rows": 50000}, + "evaluate": lambda: "evaluated", +} + +executor = dagron.DynamicExecutor( + dag, + expanders={"select_model": model_selector_expander}, +) +result = executor.execute(tasks) + +# With 50000 rows, xgboost was selected +print("train_xgboost" in result.node_results) # True +print(result.node_results["train_xgboost"].result) +# {'model': 'xgboost', 'accuracy': 0.92} +``` + + select_model --> train_xgboost --> evaluate`} + caption="Dynamic model selection. XGBoost was chosen based on the data size." +/> + +## Error handling in expanders + +If an expander raises an exception, the node that triggered it is marked as + and its descendants are skipped: + +```python +def bad_expander(name, result): + raise RuntimeError("Expansion failed!") + +expanders = {"discover": bad_expander} +executor = dagron.DynamicExecutor(dag, expanders=expanders) +result = executor.execute(tasks) + +print(result.node_results["discover"].status) # FAILED +``` + +To handle expansion errors gracefully, wrap your expander logic in try/except: + +```python +def safe_expander(name, result): + try: + # ... expansion logic ... + return dagron.DynamicModification(add_nodes=nodes, remove_nodes=[]) + except Exception: + # Return empty modification — no expansion, but no failure + return dagron.DynamicModification(add_nodes=[], remove_nodes=[]) +``` + +## Best practices + +1. **Use factory functions for tasks.** When creating tasks in a loop, use a + factory to capture the loop variable correctly: + ```python + def make_task(item): + def task(): + return process(item) + return task + + # NOT: lambda: process(item) -- captures the variable, not the value + ``` + +2. **Name dynamic nodes predictably.** Use naming conventions like + `process_{table}` so you can use glob/regex matching later. + +3. **Limit expansion depth.** Chained expansions can grow the graph + unexpectedly. Set reasonable limits in your expander logic. + +4. **Combine with conditional edges.** Use + [Conditional Execution](/guide/execution-strategies/conditional) to gate whether expansion + happens at all. + +5. **Test expanders in isolation.** Write unit tests for your expander functions + with various inputs before running them in the full pipeline. + +6. **Monitor graph size.** Log `dag.node_count()` after expansion to detect + runaway growth. + +## API reference + +| Class / Method | Docs | +|----------------|------| +| `DynamicExecutor` | [Dynamic](/api/execution/dynamic) | +| `DynamicModification` | [Dynamic](/api/execution/dynamic) | +| `DynamicNodeSpec` | [Dynamic](/api/execution/dynamic) | + +## Next steps + +- [Conditional Execution](/guide/execution-strategies/conditional) — gate branches with runtime predicates. +- [Checkpointing](/guide/execution-strategies/checkpointing) — checkpoint dynamic pipelines for resume. +- [Tracing & Profiling](/guide/observability/tracing-profiling) — trace dynamic expansion events. diff --git a/docs-next/content/docs/guide/execution-strategies/incremental.mdx b/docs-next/content/docs/guide/execution-strategies/incremental.mdx new file mode 100644 index 0000000..21fed25 --- /dev/null +++ b/docs-next/content/docs/guide/execution-strategies/incremental.mdx @@ -0,0 +1,393 @@ +--- +title: Incremental Execution +description: Re-execute only what changed — dirty sets, early cutoff, and the IncrementalExecutor for efficient recomputation. +--- + +# Incremental Execution + +When a small part of your input changes, you should not have to re-run the entire +pipeline. dagron's **incremental execution** engine tracks which nodes are +affected by a change, re-executes only those nodes, and applies **early cutoff** +to stop propagation when a recomputed node produces the same result as before. + +This guide explains the concepts behind incremental execution, walks through the +[`IncrementalExecutor`](/api/execution/incremental) API, and shows real-world +patterns for using it effectively. + +## Why incremental? + +Consider a data pipeline with 20 nodes. If one source table changes, a naive +executor re-runs all 20 tasks. With incremental execution: + +1. You declare which nodes changed (the **dirty set**). +2. The executor computes the **affected set** — all downstream descendants of + the dirty nodes. +3. Only the affected set is re-executed, in topological order. +4. If a recomputed node's output matches its previous output, the executor + applies **early cutoff** and skips its descendants. + +The result: you re-run 3 nodes instead of 20, saving minutes or hours on +large pipelines. + +## Concepts + +### Dirty set + +The **dirty set** is the set of nodes whose inputs have changed since the last +run. You provide this set explicitly via the `changed_nodes` parameter. + +### Affected set + +The **affected set** is the transitive closure of the dirty set's descendants. +dagron computes this automatically. + +### Early cutoff + +After re-executing a node, the executor compares its new result to the cached +previous result. If they are equal, the node's descendants are **not** +re-executed — even if they are in the affected set. This is called **early +cutoff** and can dramatically reduce recomputation. + +### Reused set + +Nodes that are not in the affected set (or were cut off early) keep their +previous results. These are the **reused** nodes. + + B:::recomputed --> D:::cutoff + A --> C:::recomputed --> E:::reused + D --> F:::reused + E --> F`} + caption="Incremental execution. A is dirty (red). B and C are recomputed (orange). D produces the same result as before so early cutoff applies (green). E and F are reused (blue)." +/> + +## IncrementalExecutor + +### Basic usage + +```python +import dagron + +dag = ( + dagron.DAG.builder() + .add_edge("source_a", "transform") + .add_edge("source_b", "transform") + .add_edge("transform", "aggregate") + .add_edge("aggregate", "report") + .build() +) + +tasks = { + "source_a": lambda: [1, 2, 3], + "source_b": lambda: [4, 5, 6], + "transform": lambda: [1, 2, 3, 4, 5, 6], + "aggregate": lambda: {"sum": 21, "count": 6}, + "report": lambda: "Report: 6 items, sum=21", +} + +executor = dagron.IncrementalExecutor(dag) + +# First run — everything executes +result = executor.execute(tasks) +print(result.recomputed) # ['source_a', 'source_b', 'transform', 'aggregate', 'report'] +print(result.reused) # [] +print(result.early_cutoff) # [] +``` + +### Subsequent run with changes + +```python +# Only source_a changed +tasks["source_a"] = lambda: [10, 20, 30] + +result = executor.execute(tasks, changed_nodes=["source_a"]) +print(result.recomputed) # ['source_a', 'transform', 'aggregate', 'report'] +print(result.reused) # ['source_b'] +print(result.early_cutoff) # [] +``` + +Node `source_b` was not in the dirty set or the affected set, so its previous +result is reused. + + transform:::recomputed + source_b:::reused --> transform + transform --> aggregate:::recomputed --> report:::recomputed`} + caption="Only source_a changed. source_b is reused; everything downstream of source_a is recomputed." +/> + +### Early cutoff in action + +Early cutoff activates when a recomputed node produces the same result as its +previous run: + +```python +# source_a changed, but transform produces the same result anyway +tasks["source_a"] = lambda: [1, 2, 3] # same as original +tasks["transform"] = lambda: [1, 2, 3, 4, 5, 6] # same output + +result = executor.execute(tasks, changed_nodes=["source_a"]) +print(result.recomputed) # ['source_a', 'transform'] +print(result.early_cutoff) # ['transform'] +print(result.reused) # ['source_b', 'aggregate', 'report'] +``` + +Even though `transform` is downstream of the dirty node, its output did not +change, so `aggregate` and `report` are **not** re-executed. + + transform:::cutoff + source_b:::reused --> transform + transform --> aggregate:::reused --> report:::reused`} + caption="Early cutoff at transform (green) prevents recomputation of aggregate and report." +/> + +## Constructor parameters + +```python +dagron.IncrementalExecutor( + dag, # The DAG + callbacks=None, # ExecutionCallbacks instance + fail_fast=True, # Stop on first failure? + enable_tracing=False, # Record execution trace? +) +``` + +The constructor is similar to [`DAGExecutor`](/api/execution/execution) but without +`max_workers` or `costs`, because incremental execution focuses on minimising +work rather than parallelising it. + +## IncrementalResult + +The `.execute()` method returns an `IncrementalResult` that extends +`ExecutionResult` with three additional fields: + +| Field | Type | Description | +|-------|------|-------------| +| `recomputed` | `list[str]` | Nodes that were actually re-executed | +| `early_cutoff` | `list[str]` | Nodes where early cutoff stopped propagation | +| `reused` | `list[str]` | Nodes whose previous results were kept | + +```python +result = executor.execute(tasks, changed_nodes=["source_a"]) + +# Standard ExecutionResult fields still work +print(result.succeeded) +print(result.total_duration_seconds) +for name, nr in result.node_results.items(): + print(f"{name}: {nr.status}") + +# Incremental-specific fields +print(f"Recomputed: {result.recomputed}") +print(f"Early cutoff: {result.early_cutoff}") +print(f"Reused: {result.reused}") +``` + +## Computing dirty sets + +### Manual dirty set + +The simplest approach: you know which inputs changed, so you list them +explicitly: + +```python +result = executor.execute(tasks, changed_nodes=["source_a"]) +``` + +### Using DAG's dirty_set() + +If you track changes at a finer granularity (e.g., file modification times), +dagron can compute the dirty set for you: + +```python +# After changing node configurations or data sources, +# ask the DAG which nodes are dirty: +dirty = dag.dirty_set( + changed=["source_a"], +) +print(dirty) # ['source_a', 'transform', 'aggregate', 'report'] +``` + +The `dirty_set()` method returns the full affected set, not just the immediate +changes. This is the set of nodes that **might** need re-execution (before +early cutoff). + +### Using change_provenance() + +For more sophisticated change tracking, use `change_provenance()` to understand +**why** a node is dirty: + +```python +provenance = dag.change_provenance(changed=["source_a"]) +for node, reason in provenance.items(): + print(f"{node}: dirty because of {reason}") +# source_a: dirty because of direct change +# transform: dirty because of ancestor source_a +# aggregate: dirty because of ancestor source_a +# report: dirty because of ancestor source_a +``` + +## Callbacks with incremental execution + +Callbacks work the same as with `DAGExecutor`, but you get additional events +for skip/reuse: + +```python +class IncrementalLogger: + def on_node_start(self, name): + print(f" [RUN] {name}") + + def on_node_complete(self, name, result): + print(f" [DONE] {name}") + + def on_node_skip(self, name): + print(f" [REUSE] {name}") + +executor = dagron.IncrementalExecutor(dag, callbacks=IncrementalLogger()) +result = executor.execute(tasks, changed_nodes=["source_a"]) +``` + +## Fail-fast behaviour + +When `fail_fast=True` (the default), a failure in any recomputed node skips +its descendants — just like the standard executor: + +```python +tasks["transform"] = lambda: (_ for _ in ()).throw(ValueError("bad data")) + +result = executor.execute(tasks, changed_nodes=["source_a"]) +print(result.failed) # 1 +print(result.skipped) # 2 (aggregate, report) +``` + +| Node | Status | +|------|--------| +| source_a | | +| source_b | (reused) | +| transform | | +| aggregate | | +| report | | + +## Real-world example: incremental ML training + +```python +import dagron +import hashlib +import json + +dag = ( + dagron.DAG.builder() + .add_edge("load_data", "feature_eng") + .add_edge("feature_eng", "train") + .add_edge("train", "evaluate") + .add_edge("evaluate", "report") + .build() +) + +# Simulate expensive tasks +def load_data(): + return {"rows": 10000, "checksum": "abc123"} + +def feature_eng(): + return {"features": 50, "checksum": "def456"} + +def train(): + import time + time.sleep(2) # expensive! + return {"model": "xgb_v1", "accuracy": 0.95} + +def evaluate(): + return {"accuracy": 0.95, "f1": 0.93} + +def report(): + return "Model accuracy: 95%" + +tasks = { + "load_data": load_data, + "feature_eng": feature_eng, + "train": train, + "evaluate": evaluate, + "report": report, +} + +executor = dagron.IncrementalExecutor(dag, enable_tracing=True) + +# First run: everything executes +result = executor.execute(tasks) +print(f"First run: {result.total_duration_seconds:.1f}s, recomputed={len(result.recomputed)}") + +# New data arrives, but only load_data changes +tasks["load_data"] = lambda: {"rows": 10001, "checksum": "abc124"} + +# Second run: only affected nodes execute +result = executor.execute(tasks, changed_nodes=["load_data"]) +print(f"Second run: {result.total_duration_seconds:.1f}s, recomputed={len(result.recomputed)}") +print(f"Reused: {result.reused}") +``` + +## Combining with tracing + +Enable tracing to understand incremental execution performance: + +```python +executor = dagron.IncrementalExecutor(dag, enable_tracing=True) +result = executor.execute(tasks, changed_nodes=["source_a"]) + +# The trace shows which nodes were recomputed vs reused +if result.trace: + print(result.trace.summary()) +``` + +See [Tracing & Profiling](/guide/observability/tracing-profiling) for the full tracing +guide. + +## Tips for effective incremental execution + +1. **Keep tasks deterministic.** Early cutoff works best when the same inputs + produce the same outputs. Non-deterministic tasks (e.g., those using random + seeds or wall-clock time) defeat early cutoff. + +2. **Use fine-grained nodes.** The more granular your nodes, the more + opportunities for early cutoff. A single monolithic "transform" node that + changes its output on every run provides no cutoff benefit. + +3. **Track changes accurately.** Over-reporting dirty nodes wastes computation. + Under-reporting produces stale results. Use checksums or file modification + times for accurate change detection. + +4. **Combine with checkpointing.** For long-running pipelines, use + [`CheckpointExecutor`](/guide/execution-strategies/checkpointing) alongside incremental + execution to resume from failures without losing incremental state. + +5. **Monitor cutoff rates.** Track `len(result.early_cutoff)` over time. If + cutoff rates are low, your nodes may not be deterministic or your change + detection may be too coarse. + +## API reference + +| Class / Method | Docs | +|----------------|------| +| `IncrementalExecutor` | [Incremental](/api/execution/incremental) | +| `IncrementalResult` | [Incremental](/api/execution/incremental) | +| `dag.dirty_set()` | [DAG](/api/core/core) | +| `dag.change_provenance()` | [DAG](/api/core/core) | + +## Next steps + +- [Tracing & Profiling](/guide/observability/tracing-profiling) — combine tracing with incremental runs. +- [Checkpointing](/guide/execution-strategies/checkpointing) — persist incremental state across restarts. +- [Conditional Execution](/guide/execution-strategies/conditional) — combine conditions with incremental logic. diff --git a/docs-next/content/docs/guide/execution-strategies/meta.json b/docs-next/content/docs/guide/execution-strategies/meta.json new file mode 100644 index 0000000..4960e12 --- /dev/null +++ b/docs-next/content/docs/guide/execution-strategies/meta.json @@ -0,0 +1,14 @@ +{ + "title": "Execution Strategies", + "defaultOpen": true, + "pages": [ + "incremental", + "conditional", + "dynamic-dags", + "checkpointing", + "caching", + "resource-scheduling", + "approval-gates", + "distributed" + ] +} diff --git a/docs-next/content/docs/guide/execution-strategies/resource-scheduling.mdx b/docs-next/content/docs/guide/execution-strategies/resource-scheduling.mdx new file mode 100644 index 0000000..ad17dd0 --- /dev/null +++ b/docs-next/content/docs/guide/execution-strategies/resource-scheduling.mdx @@ -0,0 +1,480 @@ +--- +title: Resource Scheduling +description: Schedule DAG tasks with GPU, CPU, and memory constraints using dagron's resource-aware executors. +--- + +# Resource Scheduling + +Many real-world pipelines require access to scarce physical resources -- GPUs for model training, memory for large datasets, or CPU slots for compute-heavy transforms. dagron's resource scheduling system lets you declare per-node requirements and execute the DAG with a scheduler that respects capacity constraints at all times. + +The scheduler dispatches nodes in **bottom-level priority order** (longest-path-to-sink first), so the critical path gets resources before less important branches. + +## Core Concepts + + B + A --> C + B --> D + C --> D + D --> E + style B fill:#f9a825,stroke:#f57f17 + style C fill:#f9a825,stroke:#f57f17 + style D fill:#ffcc80,stroke:#ef6c00`} + caption="A pipeline where training nodes compete for 4 GPUs. The scheduler ensures only 2 training nodes of this size run simultaneously." +/> + +There are four main building blocks: + +| Class | Role | +|---|---| +| [`ResourcePool`](/api/execution/resources#resourcepool) | Holds the total capacity of each resource and manages blocking acquire/release. | +| [`ResourceRequirements`](/api/execution/resources#resourcerequirements) | Declares how much of each resource a single node needs. | +| [`ResourceAwareExecutor`](/api/execution/resources#resourceawareexecutor) | Synchronous executor that dispatches nodes when their resources are available. | +| [`AsyncResourceAwareExecutor`](/api/execution/resources#asyncresourceawareexecutor) | Async (`asyncio`) variant of the same scheduler. | +| [`ResourceTimeline`](/api/execution/resources#resourcetimeline) | Records timestamped snapshots of resource utilization during execution. | + +--- + +## Declaring Resources + +### ResourcePool + +A `ResourcePool` represents the total resources available on the machine or cluster. Resources are named strings with integer capacities: + +```python +from dagron.execution.resources import ResourcePool + +# A machine with 4 GPUs, 16 CPU slots, and 32 GB of memory +pool = ResourcePool(capacities={ + "gpu": 4, + "cpu_slots": 16, + "memory_mb": 32768, +}) + +print(pool.capacities) # {'gpu': 4, 'cpu_slots': 16, 'memory_mb': 32768} +print(pool.available) # same as capacities initially +print(pool.allocated) # {'gpu': 0, 'cpu_slots': 0, 'memory_mb': 0} +``` + +The pool is **thread-safe**. Internally it uses a `threading.Condition` so that the executor can block on `acquire()` until resources are freed by another thread. + +### ResourceRequirements + +Each node declares its needs via a `ResourceRequirements` object. You can use the constructor directly or one of the shorthand factory methods: + +```python +from dagron.execution.resources import ResourceRequirements + +# Explicit constructor +req = ResourceRequirements(resources={"gpu": 2, "memory_mb": 4096}) + +# Shorthand factories +gpu_req = ResourceRequirements.gpu(2) # {"gpu": 2} +cpu_req = ResourceRequirements.cpu(4) # {"cpu_slots": 4} +mem_req = ResourceRequirements.memory(8192) # {"memory_mb": 8192} +``` + +You can combine multiple resource types in a single `ResourceRequirements`: + +```python +heavy_req = ResourceRequirements(resources={ + "gpu": 2, + "cpu_slots": 4, + "memory_mb": 16384, +}) +``` + +The `fits()` method checks whether a requirement can be satisfied by a given availability dict: + +```python +available = {"gpu": 3, "cpu_slots": 8, "memory_mb": 16384} +print(heavy_req.fits(available)) # True + +available["gpu"] = 1 +print(heavy_req.fits(available)) # False -- only 1 GPU available +``` + +--- + +## Building a Resource-Scheduled Pipeline + +Here is a complete example that trains two ML models concurrently, limited by GPU availability: + +```python +import dagron +from dagron.execution.resources import ( + ResourceAwareExecutor, + ResourcePool, + ResourceRequirements, +) + +# 1. Build the DAG +dag = ( + dagron.DAG.builder() + .add_node("fetch_data") + .add_node("preprocess") + .add_node("train_resnet") + .add_node("train_bert") + .add_node("ensemble") + .add_node("deploy") + .add_edge("fetch_data", "preprocess") + .add_edge("preprocess", "train_resnet") + .add_edge("preprocess", "train_bert") + .add_edge("train_resnet", "ensemble") + .add_edge("train_bert", "ensemble") + .add_edge("ensemble", "deploy") + .build() +) + +# 2. Declare resource requirements per node +requirements = { + "train_resnet": ResourceRequirements.gpu(2), + "train_bert": ResourceRequirements.gpu(3), + "ensemble": ResourceRequirements.gpu(1), + # fetch_data, preprocess, deploy need no special resources +} + +# 3. Create the resource pool (4 GPUs available) +pool = ResourcePool(capacities={"gpu": 4}) + +# 4. Create the executor +executor = ResourceAwareExecutor( + dag, + resource_pool=pool, + requirements=requirements, +) + +# 5. Define tasks +def fetch_data(): + print("Fetching dataset...") + return {"rows": 10000} + +def preprocess(): + print("Cleaning data...") + return {"rows": 9500} + +def train_resnet(): + print("Training ResNet on 2 GPUs...") + return {"accuracy": 0.91} + +def train_bert(): + print("Training BERT on 3 GPUs...") + return {"accuracy": 0.94} + +def ensemble(): + print("Ensembling models on 1 GPU...") + return {"accuracy": 0.96} + +def deploy(): + print("Deploying model...") + return "deployed" + +# 6. Execute +result = executor.execute({ + "fetch_data": fetch_data, + "preprocess": preprocess, + "train_resnet": train_resnet, + "train_bert": train_bert, + "ensemble": ensemble, + "deploy": deploy, +}) + +print(f"Succeeded: {result.succeeded}, Failed: {result.failed}") +``` + +Because `train_resnet` needs 2 GPUs and `train_bert` needs 3, and the pool has only 4, they **cannot run simultaneously**. The scheduler dispatches whichever has a higher bottom-level priority first, then dispatches the other once the first releases its GPUs. + + B1 --> C1 --> D1 --> E1 --> F1`} + caption="Serialized timeline: train_bert runs first (higher bottom-level priority), then train_resnet once GPUs are freed." +/> + +If you change the pool to 5 GPUs, both training nodes can run concurrently since 2 + 3 = 5 fits within capacity. + +--- + +## Pre-Validation + +Before execution begins, `ResourceAwareExecutor` validates that every node's requirements **can ever be satisfied** by the pool: + +```python +# This will raise immediately -- a single node needs 8 GPUs but pool has 4 +requirements["train_huge"] = ResourceRequirements.gpu(8) + +try: + executor.execute(tasks) +except ValueError as e: + print(e) + # "Node 'train_huge' requires {'gpu': 8} but pool capacity is {'gpu': 4}" +``` + +This check prevents deadlocks where a node would block forever because the pool is too small. + +--- + +## Priority Scheduling + +The executor computes **bottom-level priorities** for each node. The bottom level is the longest weighted path from a node to any sink. Nodes with higher bottom levels are dispatched first because they sit on the critical path. + +You can provide optional cost estimates to influence priority: + +```python +costs = { + "train_resnet": 120.0, # seconds + "train_bert": 300.0, + "ensemble": 60.0, +} + +executor = ResourceAwareExecutor( + dag, + resource_pool=pool, + requirements=requirements, + costs=costs, +) +``` + +With these costs, `train_bert` has a higher bottom-level value and the scheduler gives it resources first. + +--- + +## Async Resource Scheduling + +For `asyncio`-based pipelines, use `AsyncResourceAwareExecutor`: + +```python +import asyncio +from dagron.execution.resources import AsyncResourceAwareExecutor + +async def train_resnet_async(): + await asyncio.sleep(2) # simulate training + return {"accuracy": 0.91} + +async def train_bert_async(): + await asyncio.sleep(5) + return {"accuracy": 0.94} + +async def main(): + executor = AsyncResourceAwareExecutor( + dag, + resource_pool=pool, + requirements=requirements, + ) + result = await executor.execute({ + "fetch_data": lambda: "data", + "preprocess": lambda: "cleaned", + "train_resnet": train_resnet_async, + "train_bert": train_bert_async, + "ensemble": lambda: "ensembled", + "deploy": lambda: "deployed", + }) + print(f"Done in {result.total_duration_seconds:.1f}s") + +asyncio.run(main()) +``` + +The async executor uses `asyncio.create_task` for concurrency while the underlying `ResourcePool` still uses threading primitives for acquire/release (safe from async code via the GIL). + +--- + +## ResourceTimeline and Utilization Tracking + +Every `ResourcePool` automatically records a `ResourceTimeline` that captures timestamped snapshots of resource allocation and availability. After execution you can inspect utilization: + +```python +timeline = pool.timeline + +# Iterate over snapshots +for snap in timeline.snapshots: + print( + f" t={snap.timestamp:.3f}s " + f"node={snap.node_name} " + f"event={snap.event} " + f"allocated={snap.allocated} " + f"available={snap.available}" + ) + +# Peak utilization across the entire execution +peaks = timeline.peak_utilization() +print(f"Peak GPU utilization: {peaks.get('gpu', 0)} / {pool.capacities['gpu']}") +``` + +A typical timeline output might look like: + +``` + t=0.001s node=train_bert event=acquired allocated={'gpu': 3} available={'gpu': 1} + t=5.012s node=train_bert event=released allocated={'gpu': 0} available={'gpu': 4} + t=5.013s node=train_resnet event=acquired allocated={'gpu': 2} available={'gpu': 2} + t=7.045s node=train_resnet event=released allocated={'gpu': 0} available={'gpu': 4} + t=7.046s node=ensemble event=acquired allocated={'gpu': 1} available={'gpu': 3} + t=7.102s node=ensemble event=released allocated={'gpu': 0} available={'gpu': 4} +Peak GPU utilization: 3 / 4 +``` + +### ResourceSnapshot + +Each snapshot is a `ResourceSnapshot` dataclass: + +| Field | Type | Description | +|---|---|---| +| `timestamp` | `float` | Seconds since the first snapshot was recorded. | +| `allocated` | `dict[str, int]` | Resources currently allocated at this point in time. | +| `available` | `dict[str, int]` | Resources still available at this point in time. | +| `node_name` | `str \| None` | The node that triggered this snapshot. | +| `event` | `str` | Either `"acquired"` or `"released"`. | + +--- + +## Manual Acquire and Release + +You can also use the pool directly outside of an executor, for example in custom scheduling logic: + +```python +pool = ResourcePool(capacities={"gpu": 4}) +req = ResourceRequirements.gpu(2) + +# Non-blocking attempt +if pool.try_acquire(req, node_name="my_node"): + try: + run_gpu_work() + finally: + pool.release(req, node_name="my_node") +else: + print("GPUs not available right now") + +# Blocking with timeout +acquired = pool.acquire(req, node_name="my_node", timeout=30.0) +if acquired: + try: + run_gpu_work() + finally: + pool.release(req, node_name="my_node") +else: + print("Timed out waiting for GPUs") +``` + +--- + +## Callbacks and Tracing + +`ResourceAwareExecutor` accepts `ExecutionCallbacks` that fire during execution. Two resource-specific callbacks are available: + +```python +from dagron.execution._types import ExecutionCallbacks + +callbacks = ExecutionCallbacks( + on_start=lambda name: print(f" [{name}] started"), + on_complete=lambda name, val: print(f" [{name}] completed: {val}"), + on_failure=lambda name, err: print(f" [{name}] FAILED: {err}"), + on_resource_acquired=lambda name, res: print(f" [{name}] acquired {res}"), + on_resource_released=lambda name, res: print(f" [{name}] released {res}"), +) + +executor = ResourceAwareExecutor( + dag, + resource_pool=pool, + requirements=requirements, + callbacks=callbacks, + enable_tracing=True, +) + +result = executor.execute(tasks) + +# Access the trace for Chrome-compatible profiling +if result.trace: + result.trace.to_chrome_json("resource_trace.json") +``` + +When `enable_tracing=True`, the executor records `RESOURCE_ACQUIRED` and `RESOURCE_RELEASED` trace events alongside the standard node start/complete events. + +--- + +## Fail-Fast Behavior + +By default, `fail_fast=True`. If a node fails, all downstream nodes are skipped immediately without acquiring resources: + +```python +executor = ResourceAwareExecutor( + dag, + resource_pool=pool, + requirements=requirements, + fail_fast=True, # default +) +``` + +Set `fail_fast=False` to let independent branches continue executing even when one branch fails. + +--- + +## Multi-Resource Scheduling Example + +Here is a more realistic example combining GPU, CPU, and memory constraints: + +```python +pool = ResourcePool(capacities={ + "gpu": 4, + "cpu_slots": 16, + "memory_mb": 65536, # 64 GB +}) + +requirements = { + "ingest": ResourceRequirements(resources={"cpu_slots": 2, "memory_mb": 4096}), + "feature_eng": ResourceRequirements(resources={"cpu_slots": 8, "memory_mb": 16384}), + "train_xgboost": ResourceRequirements(resources={"cpu_slots": 4, "memory_mb": 8192}), + "train_nn": ResourceRequirements(resources={"gpu": 2, "cpu_slots": 2, "memory_mb": 16384}), + "explain_shap": ResourceRequirements(resources={"cpu_slots": 8, "memory_mb": 32768}), + "deploy": ResourceRequirements(resources={"cpu_slots": 1}), +} + +executor = ResourceAwareExecutor(dag, pool, requirements) +result = executor.execute(tasks) +``` + +A node is only dispatched when **all** of its required resources are simultaneously available. This prevents situations where a node acquires some GPUs but blocks on memory, starving other nodes. + +--- + +## Best Practices + +1. **Right-size your pools.** Start with the actual hardware capacity. If you have 4 GPUs, set `"gpu": 4`. + +2. **Use `costs` for critical-path optimization.** Provide estimated runtimes so the scheduler prioritizes the bottleneck path. + +3. **Always release resources.** The executor handles this automatically, but if you use `ResourcePool` manually, use `try/finally`. + +4. **Check `peak_utilization()` after execution.** If peak usage is far below capacity, you may have too-conservative requirements. If it equals capacity, you are fully saturating your hardware. + +5. **Combine with tracing.** Enable `enable_tracing=True` to generate Chrome-compatible traces that show resource acquire/release events overlaid on the execution timeline. + +--- + +## Related + +- [API Reference: Resources](/api/execution/resources) -- full API documentation for all resource classes. +- [Executing Tasks](/guide/core-concepts/executing-tasks) -- standard execution without resource constraints. +- [Distributed Execution](/guide/execution-strategies/distributed) -- running nodes across multiple machines. +- [Tracing & Profiling](/guide/observability/tracing-profiling) -- Chrome-compatible execution traces. diff --git a/docs-next/content/docs/guide/getting-started.mdx b/docs-next/content/docs/guide/getting-started.mdx new file mode 100644 index 0000000..f7b8b28 --- /dev/null +++ b/docs-next/content/docs/guide/getting-started.mdx @@ -0,0 +1,389 @@ +--- +title: Getting Started +description: Install dagron and build your first DAG — a complete walkthrough from zero to executing a parallel ETL pipeline. +--- + +# Getting Started + +This guide walks you through installing dagron, creating your first directed acyclic +graph, executing tasks in parallel, and inspecting the results. By the end you will +have a working ETL pipeline that extracts, transforms, and loads data with full +observability. + +## Installation + +dagron is distributed as a single wheel that bundles the Rust core via PyO3. Install +it from PyPI: + +```bash +pip install dagron +``` + +For async execution support, install the optional `async` extra: + +```bash +pip install "dagron[async]" +``` + +Verify the installation: + +```python +import dagron +print(dagron.__version__) +``` + + +dagron requires Python 3.9 or later. The Rust extension is pre-compiled for +Linux (x86_64, aarch64), macOS (x86_64, Apple Silicon), and Windows (x86_64). + +## Core concepts + +Before writing code, it helps to understand three concepts that appear throughout +dagron: + +| Concept | What it is | +|---------|-----------| +| **DAG** | A directed acyclic graph whose nodes represent units of work and whose edges represent dependencies. The graph structure lives in Rust for speed. | +| **Executor** | A scheduler that walks the DAG in topological order, dispatching tasks to a thread pool (or async event loop) with maximum parallelism. | +| **Result** | A structured report containing every node's status, return value, error (if any), and wall-clock duration. | + +The typical workflow is: **build a DAG, map tasks to nodes, execute, inspect results**. + +## Your first DAG + +Let us model a classic ETL pipeline with three stages: **extract**, **transform**, +and **load**. The transform step depends on extract, and load depends on transform. + + transform --> load`} + caption="A simple three-node ETL pipeline." +/> + +### Step 1 — Build the graph + +The easiest way to create a DAG is with the fluent **builder** pattern: + +```python +import dagron + +dag = ( + dagron.DAG.builder() + .add_node("extract") + .add_node("transform") + .add_node("load") + .add_edge("extract", "transform") + .add_edge("transform", "load") + .build() +) +``` + +The builder validates the graph at `.build()` time. If you accidentally introduce a +cycle, dagron raises a `CycleError` immediately — you never get an invalid graph. + +```python +print(dag.node_count()) # 3 +print(dag.edge_count()) # 2 +``` + +### Step 2 — Define tasks + +A **task** is any Python callable (function, lambda, method). You map node names to +callables in a plain dictionary: + +```python +def extract(): + """Simulate fetching rows from an API.""" + print("Extracting data...") + return [{"id": 1, "name": "Alice"}, {"id": 2, "name": "Bob"}] + +def transform(): + """Normalize names to uppercase.""" + print("Transforming data...") + return [{"id": 1, "name": "ALICE"}, {"id": 2, "name": "BOB"}] + +def load(): + """Write results to the database.""" + print("Loading data...") + return "2 rows written" + +tasks = { + "extract": extract, + "transform": transform, + "load": load, +} +``` + + +Each task runs independently. If you need to pass data between tasks, see the +[Executing Tasks](/guide/core-concepts/executing-tasks) guide for strategies including +shared state and the Pipeline API. + +### Step 3 — Execute + +Create a [`DAGExecutor`](/api/execution/execution) and call `.execute()`: + +```python +executor = dagron.DAGExecutor(dag) +result = executor.execute(tasks) +``` + +The executor honours the dependency order: **extract** runs first, then +**transform**, then **load**. Independent nodes (if any) would run in parallel +across all available CPU cores. + +### Step 4 — Inspect results + +The returned [`ExecutionResult`](/api/execution/execution) contains per-node +outcomes: + +```python +print(result.succeeded) # 3 +print(result.failed) # 0 + +for name, node_result in result.node_results.items(): + print(f"{name}: {node_result.status} in {node_result.duration_seconds:.3f}s") + # extract: NodeStatus.COMPLETED in 0.001s + # transform: NodeStatus.COMPLETED in 0.001s + # load: NodeStatus.COMPLETED in 0.001s +``` + +Each [`NodeResult`](/api/execution/execution) carries the callable's return value: + +```python +print(result.node_results["load"].result) +# "2 rows written" +``` + +And the overall wall-clock time: + +```python +print(f"Pipeline finished in {result.total_duration_seconds:.3f}s") +``` + +## A more realistic example + +Real pipelines have fan-out and fan-in. Let us expand the ETL to extract from +two sources in parallel, transform each independently, then merge and load: + + transform_api --> merge + extract_db --> transform_db --> merge + merge --> load`} + caption="Fan-out / fan-in ETL pipeline. The two extract-transform branches run in parallel." +/> + +```python +import dagron +import time + +# -- Build the DAG -- +dag = ( + dagron.DAG.builder() + .add_node("extract_api") + .add_node("extract_db") + .add_node("transform_api") + .add_node("transform_db") + .add_node("merge") + .add_node("load") + .add_edge("extract_api", "transform_api") + .add_edge("extract_db", "transform_db") + .add_edge("transform_api", "merge") + .add_edge("transform_db", "merge") + .add_edge("merge", "load") + .build() +) + +# -- Define tasks -- +def extract_api(): + time.sleep(0.5) # simulate network I/O + return [{"source": "api", "id": 1}] + +def extract_db(): + time.sleep(0.3) # simulate query + return [{"source": "db", "id": 2}] + +def transform_api(): + return [{"source": "api", "id": 1, "clean": True}] + +def transform_db(): + return [{"source": "db", "id": 2, "clean": True}] + +def merge(): + return "merged 2 sources" + +def load(): + return "loaded to warehouse" + +tasks = { + "extract_api": extract_api, + "extract_db": extract_db, + "transform_api": transform_api, + "transform_db": transform_db, + "merge": merge, + "load": load, +} + +# -- Execute -- +result = dagron.DAGExecutor(dag, max_workers=4).execute(tasks) + +print(f"Completed {result.succeeded}/{dag.node_count()} tasks") +print(f"Wall time: {result.total_duration_seconds:.3f}s") +``` + +Because `extract_api` and `extract_db` have no mutual dependency, they execute +**concurrently**. The merge node waits until both transform branches finish, +then load runs last. + +### Understanding execution order + +You can preview the execution plan without running anything: + +```python +print(dag.topological_sort()) +# ['extract_api', 'extract_db', 'transform_api', 'transform_db', 'merge', 'load'] + +for level, nodes in enumerate(dag.topological_levels()): + print(f"Level {level}: {nodes}") +# Level 0: ['extract_api', 'extract_db'] +# Level 1: ['transform_api', 'transform_db'] +# Level 2: ['merge'] +# Level 3: ['load'] +``` + +Nodes at the same level can run in parallel. The executor uses this structure +internally to maximise concurrency. + +## Handling failures + +By default, the executor uses **fail-fast** mode: if any node raises an exception, +downstream nodes are skipped and the result reports the failure. + +```python +def bad_transform(): + raise ValueError("Data quality check failed!") + +tasks_with_failure = { + "extract": extract, + "transform": bad_transform, + "load": load, +} + +result = dagron.DAGExecutor(dag).execute(tasks_with_failure) + +print(result.failed) # 1 +print(result.skipped) # 1 (load was skipped) +``` + +Node statuses after a failure: + +| Node | Status | +|------|--------| +| extract | | +| transform | | +| load | | + +To continue executing independent branches even after a failure, disable +fail-fast: + +```python +result = dagron.DAGExecutor(dag, fail_fast=False).execute(tasks) +``` + +See the [Executing Tasks](/guide/core-concepts/executing-tasks) guide for the full set +of executor options. + +## Builder shortcuts + +The builder supports several convenience patterns: + +### Bulk operations + +```python +dag = ( + dagron.DAG.builder() + .add_nodes(["a", "b", "c", "d"]) + .add_edges([("a", "b"), ("a", "c"), ("b", "d"), ("c", "d")]) + .build() +) +``` + +### Direct construction + +If you prefer an imperative style, create a bare `DAG` and mutate it: + +```python +dag = dagron.DAG() +dag.add_node("x") +dag.add_node("y") +dag.add_edge("x", "y") +``` + +### Pipeline decorator + +For simple linear pipelines, the `@task` decorator infers dependencies from +function parameter names: + +```python +from dagron import Pipeline, task + +@task +def fetch(): + return [1, 2, 3] + +@task +def double(fetch): + return [x * 2 for x in fetch] + +@task +def save(double): + return f"saved {len(double)} items" + +pipeline = Pipeline(tasks=[fetch, double, save], name="numbers") +result = pipeline.execute() +print(result.node_results["save"].result) # "saved 3 items" +``` + +The Pipeline API is covered in depth in [Executing Tasks](/guide/core-concepts/executing-tasks). + +## Visualizing your DAG + +dagron can export the graph in several formats: + +```python +# Mermaid (great for docs) +print(dag.to_mermaid()) + +# Graphviz DOT +print(dag.to_dot()) + +# JSON (for programmatic consumption) +print(dag.to_json()) +``` + +See [Serialization](/guide/core-concepts/serialization) for the full serialization guide. + +## Quick reference + +Here is a summary of the objects introduced in this guide with links to the API +reference: + +| Object | Purpose | API docs | +|--------|---------|----------| +| `dagron.DAG` | The core graph | [DAG](/api/core/core) | +| `dagron.DAG.builder()` | Fluent graph construction | [DAGBuilder](/api/core/builder) | +| `dagron.DAGExecutor` | Thread-pool executor | [DAGExecutor](/api/execution/execution) | +| `ExecutionResult` | Aggregate execution report | [ExecutionResult](/api/execution/execution) | +| `NodeResult` | Per-node outcome | [NodeResult](/api/execution/execution) | +| `Pipeline` / `@task` | Decorator-based pipelines | [Pipeline](/api/execution/pipeline) | + +## Next steps + +You now know how to install dagron, build a DAG, execute tasks, and read +results. Continue with: + +- [Building DAGs](/guide/core-concepts/building-dags) — deep dive into construction patterns, metadata, and payloads. +- [Executing Tasks](/guide/core-concepts/executing-tasks) — timeouts, cancellation, callbacks, async execution. +- [Inspecting Graphs](/guide/core-concepts/inspecting-graphs) — analysis, querying, and what-if exploration. +- [Tracing & Profiling](/guide/observability/tracing-profiling) — Chrome-compatible traces and bottleneck detection. diff --git a/docs-next/content/docs/guide/meta.json b/docs-next/content/docs/guide/meta.json new file mode 100644 index 0000000..a711357 --- /dev/null +++ b/docs-next/content/docs/guide/meta.json @@ -0,0 +1,16 @@ +{ + "title": "Guide", + "root": true, + "pages": [ + "why-dagron", + "getting-started", + "typed-and-reactive", + "benchmarks", + "cookbook", + "core-concepts", + "execution-strategies", + "observability", + "advanced", + "architecture" + ] +} diff --git a/docs-next/content/docs/guide/observability/error-handling.mdx b/docs-next/content/docs/guide/observability/error-handling.mdx new file mode 100644 index 0000000..17ba52c --- /dev/null +++ b/docs-next/content/docs/guide/observability/error-handling.mdx @@ -0,0 +1,603 @@ +--- +title: Error Handling +description: Understand dagron's error hierarchy, fail-fast behavior, and patterns for robust pipeline error recovery. +--- + +# Error Handling + +Pipelines fail. Data is missing, APIs time out, code has bugs. dagron provides a structured error hierarchy, clear fail-fast semantics, and patterns for graceful error recovery so you can build pipelines that handle failure predictably. + +--- + +## Error Hierarchy + +All dagron-specific errors inherit from `DagronError`, which itself inherits from Python's `Exception`. This gives you a single base class to catch all dagron errors: + +```mermaid +classDiagram + Exception <|-- DagronError + DagronError <|-- GraphError + DagronError <|-- CycleError + DagronError <|-- DuplicateNodeError + DagronError <|-- NodeNotFoundError + DagronError <|-- EdgeNotFoundError + Exception <|-- GateRejectedError + Exception <|-- GateTimeoutError + Exception <|-- TemplateError +``` + +### Core Errors + +These are raised by the Rust-backed graph engine: + +| Error | When it is raised | +|---|---| +| `DagronError` | Base class for all dagron errors. Never raised directly. | +| `GraphError` | General graph structure error (e.g., invalid operation on the graph). | +| `CycleError` | Adding an edge would create a cycle, violating the DAG property. | +| `DuplicateNodeError` | Adding a node with a name that already exists in the graph. | +| `NodeNotFoundError` | Referencing a node name that does not exist. | +| `EdgeNotFoundError` | Referencing an edge that does not exist (e.g., during removal). | + +### Execution Errors + +These are raised during task execution: + +| Error | When it is raised | +|---|---| +| `GateRejectedError` | An [approval gate](/guide/execution-strategies/approval-gates) was rejected. | +| `GateTimeoutError` | An approval gate timed out before a decision was made. | +| `TemplateError` | A [template](/guide/advanced/templates) parameter is invalid. | + +--- + +## Graph Construction Errors + +### CycleError + +The most common graph error. Raised when adding an edge would create a cycle: + +```python +import dagron + +dag = dagron.DAG() +dag.add_node("a") +dag.add_node("b") +dag.add_node("c") +dag.add_edge("a", "b") +dag.add_edge("b", "c") + +try: + dag.add_edge("c", "a") # would create a -> b -> c -> a cycle +except dagron.CycleError as e: + print(f"Cycle detected: {e}") +``` + + B --> C + C -.->|"rejected"| A + style C fill:#ffcdd2,stroke:#c62828`} + caption="The edge c -> a is rejected because it would create a cycle." +/> + +Use the builder pattern with `allow_cycles=False` (the default) to catch cycles at build time: + +```python +try: + dag = ( + dagron.DAG.builder() + .add_node("a").add_node("b").add_node("c") + .add_edge("a", "b") + .add_edge("b", "c") + .add_edge("c", "a") # CycleError raised here + .build() + ) +except dagron.CycleError: + print("Cannot build: graph contains a cycle") +``` + +### DuplicateNodeError + +Raised when you try to add a node with a name that already exists: + +```python +dag = dagron.DAG() +dag.add_node("extract") + +try: + dag.add_node("extract") # already exists +except dagron.DuplicateNodeError as e: + print(f"Duplicate: {e}") +``` + +### NodeNotFoundError + +Raised when referencing a node that does not exist: + +```python +dag = dagron.DAG() +dag.add_node("a") + +try: + dag.add_edge("a", "b") # "b" does not exist +except dagron.NodeNotFoundError as e: + print(f"Not found: {e}") +``` + +Also raised when trying to get the payload, metadata, or predecessors of a nonexistent node: + +```python +try: + dag.get_payload("nonexistent") +except dagron.NodeNotFoundError: + print("Node does not exist") +``` + +### EdgeNotFoundError + +Raised when trying to remove or reference an edge that does not exist: + +```python +dag = dagron.DAG() +dag.add_node("a") +dag.add_node("b") + +try: + dag.remove_edge("a", "b") # no edge between a and b +except dagron.EdgeNotFoundError as e: + print(f"Edge not found: {e}") +``` + +### GraphError + +A general graph error for operations that are not covered by the more specific errors: + +```python +try: + dag.some_invalid_operation() +except dagron.GraphError as e: + print(f"Graph error: {e}") +``` + +--- + +## Catching All dagron Errors + +Use `DagronError` as a catch-all: + +```python +import dagron + +try: + dag = ( + dagron.DAG.builder() + .add_node("a") + .add_edge("a", "nonexistent") + .build() + ) +except dagron.DagronError as e: + print(f"dagron error: {type(e).__name__}: {e}") +``` + +This catches `CycleError`, `DuplicateNodeError`, `NodeNotFoundError`, `EdgeNotFoundError`, and `GraphError`. + +--- + +## Fail-Fast Execution + +During execution, dagron uses **fail-fast** semantics by default. When a node fails, all downstream nodes are immediately skipped: + +```python +dag = ( + dagron.DAG.builder() + .add_node("extract") + .add_node("transform") + .add_node("load") + .add_node("report") + .add_edge("extract", "transform") + .add_edge("transform", "load") + .add_edge("load", "report") + .build() +) + +def failing_transform(): + raise ValueError("Bad data format") + +executor = dagron.DAGExecutor(dag) +result = executor.execute({ + "extract": lambda: "raw data", + "transform": failing_transform, + "load": lambda: "loaded", + "report": lambda: "report", +}) + +for name, nr in result.node_results.items(): + print(f" {name}: {nr.status.value}") +``` + +Output: + +``` + extract: completed + transform: failed + load: skipped + report: skipped +``` + + transform --> load --> report + style extract fill:#c8e6c9,stroke:#2e7d32 + style transform fill:#ffcdd2,stroke:#c62828 + style load fill:#e0e0e0,stroke:#9e9e9e + style report fill:#e0e0e0,stroke:#9e9e9e`} + caption="When transform fails, load and report are skipped." +/> + +### Disabling Fail-Fast + +Set `fail_fast=False` to let independent branches continue executing: + +```python +dag = ( + dagron.DAG.builder() + .add_node("extract") + .add_node("path_a") + .add_node("path_b") + .add_node("merge") + .add_edge("extract", "path_a") + .add_edge("extract", "path_b") + .add_edge("path_a", "merge") + .add_edge("path_b", "merge") + .build() +) + +executor = dagron.DAGExecutor(dag, fail_fast=False) +result = executor.execute({ + "extract": lambda: "data", + "path_a": lambda: 1 / 0, # fails + "path_b": lambda: "success", # still runs + "merge": lambda: "merged", # still runs (has at least one completed dep) +}) + +for name, nr in result.node_results.items(): + print(f" {name}: {nr.status.value}") +``` + +Output: + +``` + extract: completed + path_a: failed + path_b: completed + merge: completed +``` + +--- + +## Inspecting Node Errors + +Each `NodeResult` contains the original exception if the node failed: + +```python +for name, nr in result.node_results.items(): + if nr.status == dagron.NodeStatus.FAILED: + print(f"Node '{name}' failed:") + print(f" Error type: {type(nr.error).__name__}") + print(f" Message: {nr.error}") + print(f" Duration: {nr.duration_seconds:.3f}s") +``` + +--- + +## ExecutionResult Summary + +The `ExecutionResult` provides aggregate counts: + +```python +result = executor.execute(tasks) + +print(f"Succeeded: {result.succeeded}") +print(f"Failed: {result.failed}") +print(f"Skipped: {result.skipped}") +print(f"Timed out: {result.timed_out}") +print(f"Cancelled: {result.cancelled}") +print(f"Duration: {result.total_duration_seconds:.1f}s") + +# Check if the entire execution succeeded +if result.failed == 0: + print("All nodes completed successfully") +else: + print(f"{result.failed} node(s) failed") +``` + +--- + +## Error Recovery Patterns + +### Pattern: Retry with Backoff + +Wrap tasks in a retry decorator: + +```python +import time + +def retry(fn, max_retries=3, backoff=1.0): + """Retry a task function with exponential backoff.""" + def wrapper(): + last_error = None + for attempt in range(max_retries): + try: + return fn() + except Exception as e: + last_error = e + if attempt < max_retries - 1: + time.sleep(backoff * (2 ** attempt)) + raise last_error + return wrapper + +tasks = { + "fetch_api": retry(lambda: call_flaky_api(), max_retries=3), + "process": lambda: process_data(), +} +``` + +### Pattern: Fallback Values + +Provide a fallback when a task fails: + +```python +def with_fallback(fn, default): + """Return a default value if the task fails.""" + def wrapper(): + try: + return fn() + except Exception: + return default + return wrapper + +tasks = { + "fetch_cache": with_fallback(lambda: get_from_cache(), default=None), + "fetch_api": with_fallback(lambda: get_from_api(), default={}), +} +``` + +### Pattern: Error Callbacks + +Use execution callbacks to log errors as they happen: + +```python +from dagron.execution._types import ExecutionCallbacks + +def on_failure(name, error): + log.error(f"Node '{name}' failed: {error}") + send_alert(f"Pipeline node '{name}' failed") + +callbacks = ExecutionCallbacks( + on_failure=on_failure, +) + +executor = dagron.DAGExecutor(dag, callbacks=callbacks) +``` + +### Pattern: Checkpoint and Resume + +Use [checkpointing](/guide/execution-strategies/checkpointing) to save progress and resume after fixing the failure: + +```python +from dagron.execution.checkpoint import CheckpointExecutor + +executor = CheckpointExecutor(dag, checkpoint_dir="/tmp/checkpoints") +result = executor.execute(tasks) + +if result.failed > 0: + # Fix the failing task, then resume from the checkpoint + result = executor.resume(tasks) +``` + +### Pattern: Graceful Degradation + +Design your DAG so that non-critical branches can fail without affecting the critical path: + +```python +dag = ( + dagron.DAG.builder() + .add_node("extract") + .add_node("transform") # critical + .add_node("load") # critical + .add_node("send_metrics") # non-critical + .add_node("send_slack") # non-critical + .add_edge("extract", "transform") + .add_edge("transform", "load") + .add_edge("transform", "send_metrics") + .add_edge("load", "send_slack") + .build() +) + +# With fail_fast=False, metrics/slack failures don't block load +executor = dagron.DAGExecutor(dag, fail_fast=False) +``` + +(non-critical)"] + slack["send_slack
(non-critical)"] + extract --> transform + transform --> load + transform --> metrics + load --> slack + style load fill:#c8e6c9,stroke:#2e7d32 + style metrics fill:#fff9c4,stroke:#f9a825 + style slack fill:#fff9c4,stroke:#f9a825`} + caption="Non-critical branches can fail independently without affecting the critical path." +/> + +--- + +## Gate Errors + +[Approval gates](/guide/execution-strategies/approval-gates) raise their own errors: + +```python +from dagron.execution.gates import GateRejectedError, GateTimeoutError + +try: + controller.wait_sync("deploy") +except GateRejectedError as e: + print(f"Gate '{e.gate_name}' rejected: {e.reason}") +except GateTimeoutError as e: + print(f"Gate '{e.gate_name}' timed out after {e.timeout}s") +``` + +These errors are **not** subclasses of `DagronError` since they originate from the execution layer, not the graph engine. + +--- + +## Template Errors + +[Template](/guide/advanced/templates) parameter validation raises `TemplateError`: + +```python +from dagron.template import DAGTemplate, TemplateError + +template = DAGTemplate(params={"env": str}) + +try: + template.render(env=42) # wrong type +except TemplateError as e: + print(f"Template error: {e}") + # "Parameter 'env' expects str, got int" +``` + +--- + +## Error Handling in Hooks + +[Plugin hooks](/guide/advanced/plugins-hooks) catch and warn on callback errors to prevent them from breaking execution: + +```python +from dagron.plugins.hooks import HookRegistry, HookEvent, HookContext + +hooks = HookRegistry() + +def buggy_hook(ctx: HookContext): + raise RuntimeError("hook crashed") + +hooks.register(HookEvent.PRE_NODE, buggy_hook) + +# This fires the hook but does NOT raise. +# Instead, a RuntimeWarning is issued. +hooks.fire(HookContext(event=HookEvent.PRE_NODE)) +``` + +If you need to observe hook errors, use Python's `warnings` module: + +```python +import warnings + +with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + hooks.fire(HookContext(event=HookEvent.PRE_NODE)) + if w: + print(f"Hook warning: {w[0].message}") +``` + +--- + +## Defensive DAG Construction + +Use try/except around builder operations to build robust DAG construction code: + +```python +import dagron + +def build_pipeline_safely(node_specs): + """Build a DAG from specs, skipping invalid entries.""" + builder = dagron.DAG.builder() + errors = [] + + for spec in node_specs: + try: + builder.add_node(spec["name"]) + except dagron.DuplicateNodeError: + errors.append(f"Duplicate node: {spec['name']}") + + for spec in node_specs: + for dep in spec.get("dependencies", []): + try: + builder.add_edge(dep, spec["name"]) + except dagron.NodeNotFoundError as e: + errors.append(f"Missing dependency: {e}") + except dagron.CycleError as e: + errors.append(f"Would create cycle: {e}") + + if errors: + print(f"Warnings during build ({len(errors)}):") + for err in errors: + print(f" - {err}") + + return builder.build() +``` + +--- + +## Error Hierarchy Summary + +```python +import dagron +from dagron.execution.gates import GateRejectedError, GateTimeoutError +from dagron.template import TemplateError + +# Graph engine errors (from Rust) +dagron.DagronError # base class +dagron.GraphError # general graph error +dagron.CycleError # edge would create a cycle +dagron.DuplicateNodeError # node already exists +dagron.NodeNotFoundError # node does not exist +dagron.EdgeNotFoundError # edge does not exist + +# Execution errors (from Python) +GateRejectedError # gate was rejected +GateTimeoutError # gate timed out +TemplateError # template parameter invalid +``` + +--- + +## Best Practices + +1. **Catch specific errors.** Use `CycleError`, `NodeNotFoundError`, etc. instead of the broad `DagronError` when you know what might go wrong. + +2. **Use `fail_fast=True` by default.** This prevents wasting compute on nodes that will fail anyway due to missing upstream data. + +3. **Disable fail-fast for non-critical branches.** If some branches are optional (metrics, notifications), use `fail_fast=False` so they do not block the critical path. + +4. **Log errors via callbacks.** Use `ExecutionCallbacks.on_failure` to capture errors as they happen, not just at the end. + +5. **Inspect `NodeResult.error`.** When a node fails, the original exception is preserved for debugging. + +6. **Use retry wrappers for transient failures.** Network errors, API rate limits, and temporary file locks benefit from retry logic. + +7. **Checkpoint long-running pipelines.** For pipelines that take hours, use checkpointing so you do not lose progress on failure. + +--- + +## Related + +- [API Reference: Errors](/api/core/errors) -- full documentation for all error classes. +- [Executing Tasks](/guide/core-concepts/executing-tasks) -- how fail-fast works in the executor. +- [Approval Gates](/guide/execution-strategies/approval-gates) -- `GateRejectedError` and `GateTimeoutError`. +- [Templates](/guide/advanced/templates) -- `TemplateError`. +- [Checkpointing](/guide/execution-strategies/checkpointing) -- saving and resuming execution state. +- [Plugins & Hooks](/guide/advanced/plugins-hooks) -- error isolation in hooks. diff --git a/docs-next/content/docs/guide/observability/meta.json b/docs-next/content/docs/guide/observability/meta.json new file mode 100644 index 0000000..86594a7 --- /dev/null +++ b/docs-next/content/docs/guide/observability/meta.json @@ -0,0 +1,5 @@ +{ + "title": "Observability", + "defaultOpen": false, + "pages": ["tracing-profiling", "visualization", "error-handling"] +} diff --git a/docs-next/content/docs/guide/observability/tracing-profiling.mdx b/docs-next/content/docs/guide/observability/tracing-profiling.mdx new file mode 100644 index 0000000..eea6383 --- /dev/null +++ b/docs-next/content/docs/guide/observability/tracing-profiling.mdx @@ -0,0 +1,446 @@ +--- +title: Tracing & Profiling +description: Record Chrome-compatible execution traces, analyse critical paths, detect bottlenecks, and profile parallelism efficiency. +--- + +# Tracing & Profiling + +When your DAG pipeline is slow, you need data — not guesswork. dagron provides +two complementary observability tools: + +- **Tracing** records a timestamped event log for every node during execution. +- **Profiling** analyses the trace to find the critical path, detect bottlenecks, + and measure parallelism efficiency. + +This guide shows you how to enable tracing, explore traces, export to Chrome's +trace viewer, and use the profiling API to optimise your pipelines. + +## Enabling tracing + +Pass `enable_tracing=True` to any executor: + +```python +import dagron + +dag = ( + dagron.DAG.builder() + .add_nodes(["extract", "transform_a", "transform_b", "merge", "load"]) + .add_edges([ + ("extract", "transform_a"), + ("extract", "transform_b"), + ("transform_a", "merge"), + ("transform_b", "merge"), + ("merge", "load"), + ]) + .build() +) + +import time + +tasks = { + "extract": lambda: time.sleep(0.1) or "data", + "transform_a": lambda: time.sleep(0.3) or "a_done", + "transform_b": lambda: time.sleep(0.2) or "b_done", + "merge": lambda: time.sleep(0.05) or "merged", + "load": lambda: time.sleep(0.1) or "loaded", +} + +result = dagron.DAGExecutor( + dag, + max_workers=4, + enable_tracing=True, +).execute(tasks) +``` + + transform_a --> merge + extract --> transform_b --> merge + merge --> load`} + caption="Pipeline with parallel transform branches — we will trace this execution." +/> + +## ExecutionTrace + +When tracing is enabled, `result.trace` contains an +[`ExecutionTrace`](/api/observability/tracing) object: + +```python +trace = result.trace + +# Quick summary +print(trace.summary()) +``` + +Output: + +``` +Execution Trace Summary +======================= +Total duration: 0.552s +Nodes executed: 5 + COMPLETED: 5 + +Timeline: + extract [0.000s - 0.102s] (0.102s) COMPLETED + transform_a [0.102s - 0.401s] (0.299s) COMPLETED + transform_b [0.102s - 0.305s] (0.203s) COMPLETED + merge [0.401s - 0.452s] (0.051s) COMPLETED + load [0.452s - 0.552s] (0.100s) COMPLETED +``` + +### Trace events + +Each node's lifecycle is recorded as a series of +[`TraceEvent`](/api/observability/tracing) objects: + +```python +for event in trace.events: + print(f"{event.timestamp:.3f}s {event.node_name:20s} {event.event_type}") +``` + +Output: + +``` +0.000s extract STARTED +0.102s extract COMPLETED +0.102s transform_a STARTED +0.102s transform_b STARTED +0.305s transform_b COMPLETED +0.401s transform_a COMPLETED +0.401s merge STARTED +0.452s merge COMPLETED +0.452s load STARTED +0.552s load COMPLETED +``` + +### Events for a specific node + +```python +events = trace.events_for_node("transform_a") +for e in events: + print(f"{e.event_type}: {e.timestamp:.3f}s") +# STARTED: 0.102s +# COMPLETED: 0.401s +``` + +## Chrome trace format + +dagron can export traces in Chrome's +[Trace Event Format](https://docs.google.com/document/d/1CvAClvFfyA5R-PhYUmn5OOQtYMH4h6I0nSsKchNAySU/), +which you can visualize in `chrome://tracing` or [Perfetto](https://ui.perfetto.dev/). + +### Exporting + +```python +chrome_json = trace.to_chrome_trace() + +with open("trace.json", "w") as f: + f.write(chrome_json) +``` + +### Viewing + +1. Open Chrome and navigate to `chrome://tracing` +2. Click **Load** and select `trace.json` +3. You will see a timeline with each node as a horizontal bar + +Alternatively, open [Perfetto UI](https://ui.perfetto.dev/) and drag the file +onto the page. + +The Chrome trace view shows: +- **Parallel lanes** for concurrent tasks +- **Gap analysis** between sequential tasks +- **Duration bars** proportional to wall-clock time +- **Zoom and pan** for exploring long traces + +### JSON trace format + +```python +json_str = trace.to_json() +``` + +This exports the raw trace data as JSON (dagron's own format, not Chrome's). +Useful for custom analysis or storage. + +## Profiling with profile_execution() + +The [`profile_execution()`](/api/observability/tracing) function takes a DAG and an +execution result, and produces a [`ProfileReport`](/api/observability/tracing) with +actionable insights: + +```python +from dagron import profile_execution + +report = profile_execution(dag, result) +``` + +### Critical path analysis + +The critical path is the sequence of nodes that determined the total wall-clock +time: + +```python +print("Critical path:") +for node in report.critical_path: + print(f" {node}") +# extract +# transform_a <-- bottleneck (longest task) +# merge +# load +``` + + transform_a:::critical --> merge:::critical + extract --> transform_b --> merge + merge --> load:::critical`} + caption="Critical path highlighted in red. transform_a is the bottleneck." +/> + +The critical path tells you exactly which nodes to optimise for maximum +speedup. + +### Bottleneck detection + +```python +print("Bottlenecks:") +for b in report.bottlenecks: + print(f" {b.node}: {b.duration_seconds:.3f}s ({b.percentage:.1f}% of total)") +# transform_a: 0.299s (54.2% of total) +# transform_b: 0.203s (36.8% of total) +``` + +Bottlenecks are nodes that consume a disproportionate share of the total +execution time. + +### Parallelism efficiency + +```python +print(f"Parallelism efficiency: {report.parallelism_efficiency:.1%}") +# Parallelism efficiency: 85.3% +``` + +Parallelism efficiency is the ratio of the sequential sum of all task durations +to the wall-clock time multiplied by the number of workers. A value of 100% +means all workers were busy the entire time. Low values indicate scheduling +gaps or sequential bottlenecks. + +```python +# Detailed breakdown +print(f"Sequential sum: {report.sequential_sum:.3f}s") +print(f"Wall-clock: {report.wall_clock:.3f}s") +print(f"Speedup: {report.sequential_sum / report.wall_clock:.2f}x") +``` + +## Complete profiling workflow + +Here is a full example that builds, executes, traces, and profiles a pipeline: + +```python +import dagron +from dagron import profile_execution +import time + +# 1. Build the DAG +dag = ( + dagron.DAG.builder() + .add_nodes(["fetch", "validate", "feature_a", "feature_b", "feature_c", + "train", "evaluate", "deploy"]) + .add_edges([ + ("fetch", "validate"), + ("validate", "feature_a"), + ("validate", "feature_b"), + ("validate", "feature_c"), + ("feature_a", "train"), + ("feature_b", "train"), + ("feature_c", "train"), + ("train", "evaluate"), + ("evaluate", "deploy"), + ]) + .build() +) + +# 2. Define tasks with realistic durations +tasks = { + "fetch": lambda: time.sleep(0.5) or "fetched", + "validate": lambda: time.sleep(0.1) or "valid", + "feature_a": lambda: time.sleep(1.0) or "features_a", + "feature_b": lambda: time.sleep(0.8) or "features_b", + "feature_c": lambda: time.sleep(0.3) or "features_c", + "train": lambda: time.sleep(2.0) or "model", + "evaluate": lambda: time.sleep(0.5) or "metrics", + "deploy": lambda: time.sleep(0.2) or "deployed", +} + +# 3. Execute with tracing +result = dagron.DAGExecutor( + dag, + max_workers=4, + enable_tracing=True, +).execute(tasks) + +# 4. Profile +report = profile_execution(dag, result) + +# 5. Print report +print("=" * 60) +print("PROFILING REPORT") +print("=" * 60) +print(f"Total wall-clock time: {result.total_duration_seconds:.3f}s") +print(f"Parallelism efficiency: {report.parallelism_efficiency:.1%}") +print() + +print("Critical path:") +for node in report.critical_path: + nr = result.node_results[node] + print(f" {node:20s} {nr.duration_seconds:.3f}s") +print() + +print("Top bottlenecks:") +for b in report.bottlenecks[:3]: + print(f" {b.node:20s} {b.duration_seconds:.3f}s ({b.percentage:.1f}%)") +print() + +# 6. Export trace for Chrome +with open("pipeline_trace.json", "w") as f: + f.write(result.trace.to_chrome_trace()) +print("Trace exported to pipeline_trace.json") +``` + + validate + validate --> feature_a --> train + validate --> feature_b --> train + validate --> feature_c --> train + train --> evaluate --> deploy`} + caption="ML pipeline with three parallel feature extraction branches." +/> + +## Tracing with other executors + +Tracing works with all executor types: + +### AsyncDAGExecutor + +```python +import asyncio + +async def main(): + executor = dagron.AsyncDAGExecutor(dag, enable_tracing=True) + result = await executor.execute(tasks) + print(result.trace.summary()) + +asyncio.run(main()) +``` + +### IncrementalExecutor + +```python +executor = dagron.IncrementalExecutor(dag, enable_tracing=True) +result = executor.execute(tasks, changed_nodes=["fetch"]) + +# Trace shows which nodes were recomputed vs reused +print(result.trace.summary()) +``` + +### CheckpointExecutor + +```python +executor = dagron.CheckpointExecutor(dag, checkpoint_dir="/tmp/checkpoints") +# Note: CheckpointExecutor uses tracing internally for resume support +result = executor.execute(tasks) +``` + +## Interpreting traces + +### Identifying scheduling gaps + +Look for periods where no node is running. These indicate: +- **Sequential bottlenecks** — a node with high in-degree that must wait for + all predecessors. +- **Under-utilisation** — `max_workers` is too low, or the graph is too + sequential. + +### Identifying stragglers + +If one branch takes much longer than its siblings, the join node waits +for the straggler. In the example above, `feature_a` (1.0s) is a straggler +compared to `feature_c` (0.3s). + +### Measuring overhead + +Compare the sequential sum to the parallel execution time: + +```python +sequential = sum(nr.duration_seconds for nr in result.node_results.values()) +parallel = result.total_duration_seconds +overhead = parallel - (sequential / 4) # with 4 workers + +print(f"Sequential sum: {sequential:.3f}s") +print(f"Parallel time: {parallel:.3f}s") +print(f"Overhead: {overhead:.3f}s") +``` + +## Tracing in production + +For production pipelines, consider these patterns: + +### Conditional tracing + +```python +import os + +enable = os.environ.get("DAGRON_TRACING", "false").lower() == "true" +result = dagron.DAGExecutor(dag, enable_tracing=enable).execute(tasks) +``` + +### Trace sampling + +```python +import random + +# Trace 10% of executions +enable = random.random() < 0.1 +result = dagron.DAGExecutor(dag, enable_tracing=enable).execute(tasks) + +if result.trace: + with open(f"trace_{int(time.time())}.json", "w") as f: + f.write(result.trace.to_chrome_trace()) +``` + +### Monitoring integration + +```python +from dagron import profile_execution + +result = dagron.DAGExecutor(dag, enable_tracing=True).execute(tasks) +report = profile_execution(dag, result) + +# Send metrics to your monitoring system +metrics = { + "pipeline.duration": result.total_duration_seconds, + "pipeline.parallelism_efficiency": report.parallelism_efficiency, + "pipeline.critical_path_length": len(report.critical_path), + "pipeline.nodes_succeeded": result.succeeded, + "pipeline.nodes_failed": result.failed, +} +# send_to_datadog(metrics) # or Prometheus, Grafana, etc. +``` + +## API reference + +| Class / Function | Docs | +|------------------|------| +| `ExecutionTrace` | [Tracing](/api/observability/tracing) | +| `TraceEvent` | [Tracing](/api/observability/tracing) | +| `profile_execution()` | [Tracing](/api/observability/tracing) | +| `ProfileReport` | [Tracing](/api/observability/tracing) | + +## Next steps + +- [Inspecting Graphs](/guide/core-concepts/inspecting-graphs) — pre-execution analysis and critical path estimation. +- [Incremental Execution](/guide/execution-strategies/incremental) — combine tracing with incremental runs. +- [Checkpointing](/guide/execution-strategies/checkpointing) — persist progress and resume after failures. diff --git a/docs-next/content/docs/guide/observability/visualization.mdx b/docs-next/content/docs/guide/observability/visualization.mdx new file mode 100644 index 0000000..249f06f --- /dev/null +++ b/docs-next/content/docs/guide/observability/visualization.mdx @@ -0,0 +1,439 @@ +--- +title: Visualization +description: Render DAGs as ASCII art, SVG, Graphviz DOT, Mermaid diagrams, and live web dashboards. +--- + +# Visualization + +dagron provides multiple ways to visualize your DAGs, from quick ASCII previews in the terminal to rich SVG renderings in Jupyter notebooks and live web dashboards for production monitoring. + +| Method | Output | Best for | +|---|---|---| +| `pretty_print()` | ASCII text | Terminal, logs, CI output | +| `_repr_svg_()` | SVG | Jupyter notebooks | +| `dag.to_dot()` | Graphviz DOT | External tools, custom rendering | +| `dag.to_mermaid()` | Mermaid syntax | Documentation, Markdown | +| `DashboardPlugin` | Live web UI | Production monitoring, gate approval | + +--- + +## ASCII Pretty Print + +The `pretty_print()` function renders a DAG as an ASCII diagram directly in the terminal: + +```python +import dagron +from dagron.display import pretty_print + +dag = ( + dagron.DAG.builder() + .add_node("extract") + .add_node("transform") + .add_node("validate") + .add_node("load") + .add_edge("extract", "transform") + .add_edge("extract", "validate") + .add_edge("transform", "load") + .add_edge("validate", "load") + .build() +) + +print(pretty_print(dag)) +``` + +Output: + +``` + [ extract ] + +---------------+ + [ transform ] [ validate ] + +---------------+ + [ load ] +``` + +### Layout Options + +Choose between vertical (top-to-bottom) and horizontal (left-to-right) layouts: + +```python +# Vertical (default) +print(pretty_print(dag, layout="vertical")) + +# Horizontal +print(pretty_print(dag, layout="horizontal")) +``` + +Horizontal output: + +``` +[ extract ]-->[ transform ]-->[ load ] + [ validate ]---> +``` + +### Show Payloads + +Include node payloads in the ASCII output: + +```python +dag = ( + dagron.DAG.builder() + .add_node("extract", payload="csv") + .add_node("transform", payload="pandas") + .add_node("load", payload="postgres") + .add_edge("extract", "transform") + .add_edge("transform", "load") + .build() +) + +print(pretty_print(dag, show_payloads=True)) +``` + +Output: + +``` + [ extract=csv ] + | + [ transform=pandas ] + | + [ load=postgres ] +``` + +### Custom Formatters + +Supply a custom formatter to control node labels: + +```python +def status_formatter(name, payload): + status = payload or "pending" + return f"{name} ({status})" + +print(pretty_print(dag, node_formatter=status_formatter)) +``` + +### Max Nodes Guard + +For large graphs, `pretty_print` raises `ValueError` to prevent terminal floods: + +```python +try: + print(pretty_print(huge_dag)) +except ValueError as e: + print(e) + # "Graph has 500 nodes, exceeding max_nodes=50. Increase max_nodes to render." + +# Override the limit +print(pretty_print(huge_dag, max_nodes=500)) +``` + +--- + +## Jupyter SVG Rendering + +In Jupyter notebooks, dagron DAGs render as SVG automatically. The `_repr_svg_()` function is called by Jupyter's display system: + +```python +# In a Jupyter notebook cell: +dag # displays as an SVG graph +``` + +The rendering strategy has multiple fallbacks: + +1. **graphviz Python package** -- if installed, produces high-quality SVG via `Source(dot).pipe()`. +2. **dot CLI** -- if the `graphviz` system package is installed, calls `dot -Tsvg`. +3. **ASCII fallback** -- wraps the ASCII pretty-print output in an SVG `` element. + +### Installing Graphviz + +For the best Jupyter experience, install graphviz: + +```bash +# Python package +pip install graphviz + +# System package (needed by the Python package) +# Ubuntu/Debian: +sudo apt install graphviz +# macOS: +brew install graphviz +``` + +### Direct SVG Generation + +You can also call `_repr_svg_()` directly: + +```python +from dagron.display import _repr_svg_ + +svg_string = _repr_svg_(dag) + +# Save to file +with open("pipeline.svg", "w") as f: + f.write(svg_string) +``` + +### Large Graph Handling + +For graphs exceeding `max_nodes` (default 100), a summary SVG is returned instead: + +``` +DAG(nodes=500, edges=1200) -- too large to render +``` + +--- + +## Graphviz DOT Export + +Export the DAG as a Graphviz DOT string for use with external tools: + +```python +dot_string = dag.to_dot() +print(dot_string) +``` + +Output: + +```text +digraph { + rankdir=TB; + node [shape=box, style=rounded]; + "extract" -> "transform"; + "extract" -> "validate"; + "transform" -> "load"; + "validate" -> "load"; +} +``` + +### Rendering with Graphviz + +```python +import graphviz + +src = graphviz.Source(dag.to_dot()) +src.render("pipeline", format="png", cleanup=True) +# Creates pipeline.png +``` + +### Command-Line Rendering + +```bash +python -c "import dagron; print(dagron.DAG.builder()...build().to_dot())" | dot -Tpng > pipeline.png +``` + +--- + +## Mermaid Export + +Export as Mermaid syntax for embedding in Markdown documentation: + +```python +mermaid_string = dag.to_mermaid() +print(mermaid_string) +``` + +Output: + +``` +graph TD + extract --> transform + extract --> validate + transform --> load + validate --> load +``` + +### Embedding in Markdown + +````markdown +```mermaid +graph TD + extract --> transform + extract --> validate + transform --> load + validate --> load +``` +```` + +### Using with DagDiagram Component + +In dagron's documentation site, use the `DagDiagram` component for interactive rendering: + +```jsx + +``` + +The `DagDiagram` component renders an interactive Mermaid diagram of your pipeline. + +--- + +## Live Web Dashboard + +For production monitoring, the `DashboardPlugin` serves a real-time web UI: + +```python +from dagron.dashboard import DashboardPlugin +from dagron.plugins.hooks import HookRegistry +from dagron.plugins.manager import PluginManager + +# Set up the dashboard +dashboard = DashboardPlugin( + host="127.0.0.1", + port=8765, + open_browser=True, +) + +hooks = HookRegistry() +manager = PluginManager(hooks) +manager.register(dashboard) +manager.initialize_all() +# prints: "Dashboard: http://127.0.0.1:8765" + +# Execute with hooks +executor = dagron.DAGExecutor(dag, hooks=hooks) +result = executor.execute(tasks) + +manager.teardown_all() +``` + +### Dashboard Features + +The dashboard provides: + +- **Live graph visualization** -- nodes change color as they transition through states: + - Pending + - Running + - Completed + - Failed + - Skipped + +- **Execution timeline** -- see which nodes are running at each point in time. + +- **Gate management** -- if a `GateController` is provided, approve/reject buttons appear for waiting gates. + +- **Execution summary** -- after completion, shows total duration and per-status counts. + +### Dashboard with Gates + +```python +from dagron.execution.gates import ApprovalGate, GateController + +controller = GateController({ + "review": ApprovalGate(timeout=600), + "deploy": ApprovalGate(timeout=300), +}) + +dashboard = DashboardPlugin( + port=8765, + gate_controller=controller, +) +``` + +When a gate enters `WAITING` state, the dashboard shows clickable **Approve** and **Reject** buttons. + +### Technical Details + +The dashboard web server is implemented in Rust using axum and tokio. It runs on a background OS thread and communicates with the Python hooks via thread-safe callbacks. The Rust implementation ensures low overhead even during high-frequency hook events. + + +The dashboard requires dagron to be built with the `dashboard` Cargo feature. If it is not available, importing `DashboardPlugin` raises an `ImportError` with build instructions. + +--- + +## Combining Visualization Methods + +### Export All Formats + +```python +import dagron +from dagron.display import pretty_print, _repr_svg_ + +dag = ( + dagron.DAG.builder() + .add_node("A").add_node("B").add_node("C") + .add_edge("A", "B").add_edge("A", "C") + .build() +) + +# ASCII +ascii_art = pretty_print(dag) +print(ascii_art) + +# DOT +dot = dag.to_dot() +with open("graph.dot", "w") as f: + f.write(dot) + +# Mermaid +mermaid = dag.to_mermaid() +with open("graph.mmd", "w") as f: + f.write(mermaid) + +# SVG +svg = _repr_svg_(dag) +with open("graph.svg", "w") as f: + f.write(svg) +``` + +### Visualization in CI Logs + +Use `pretty_print()` to include a graph visualization in your CI output: + +```python +import dagron +from dagron.display import pretty_print + +def print_pipeline_summary(dag, result): + """Print a visual summary at the end of a CI run.""" + print("\n--- Pipeline Graph ---") + print(pretty_print(dag, layout="horizontal")) + print(f"\n--- Results ---") + print(f" Succeeded: {result.succeeded}") + print(f" Failed: {result.failed}") + print(f" Skipped: {result.skipped}") + print(f" Duration: {result.total_duration_seconds:.1f}s") +``` + +### Visualization in Documentation + +Generate Mermaid diagrams for your project documentation: + +````python +# Generate documentation diagrams +mermaid = dag.to_mermaid() + +doc = f""" +# Pipeline Architecture + +```mermaid +{mermaid} +``` + +This pipeline has {dag.node_count()} nodes and {dag.edge_count()} edges. +""" +```` + +--- + +## Best Practices + +1. **Use `pretty_print()` for quick debugging.** It requires no external dependencies and works in any terminal. + +2. **Install `graphviz` for Jupyter.** The SVG rendering is significantly better with the graphviz package. + +3. **Use `to_mermaid()` for documentation.** Mermaid renders natively in GitHub, GitLab, and most documentation sites. + +4. **Use `DashboardPlugin` for production.** The live dashboard gives operators real-time visibility and gate control. + +5. **Set `max_nodes` appropriately.** For large graphs, increase `max_nodes` or use `to_dot()` with Graphviz's layout engines, which handle hundreds of nodes well. + +6. **Export DOT for complex layouts.** When Mermaid's layout is not sufficient, use `to_dot()` and render with Graphviz's `neato`, `fdp`, or `sfdp` engines. + +--- + +## Related + +- [API Reference: Display](/api/utilities/display) -- full API documentation for visualization functions. +- [Plugins & Hooks](/guide/advanced/plugins-hooks) -- the plugin system that powers the DashboardPlugin. +- [Approval Gates](/guide/execution-strategies/approval-gates) -- gate approval via the dashboard UI. +- [Inspecting Graphs](/guide/core-concepts/inspecting-graphs) -- programmatic graph analysis. diff --git a/docs-next/content/docs/guide/typed-and-reactive.mdx b/docs-next/content/docs/guide/typed-and-reactive.mdx new file mode 100644 index 0000000..7af3be5 --- /dev/null +++ b/docs-next/content/docs/guide/typed-and-reactive.mdx @@ -0,0 +1,297 @@ +--- +title: Typed Handles & Reactive Engine +description: NodeRef typed handles, @dagron.flow compose API, generic FlowFuture / NodeResult, effect tags, the reactive Signal/Computed/Watcher engine, the cross-process content cache, and time-travel replay. +--- + +# Typed Handles & Reactive Engine + +dagron ships seven coordinated additions that move beyond stringly-typed +node addressing and add four headline differentiators no other Python DAG +library combines: typed handles, a Tawazi-style flow API, fine-grained +reactive recomputation, content-addressed cross-process caching, and +time-travel replay. Existing string-based code keeps working — every new +feature is opt-in. + +## 1. `NodeRef` — typed node handles + +`dag.add_node()` returns a stable `NodeRef`. Every public method that +takes a node identifier accepts both `str` and `NodeRef`, so existing +code keeps working. + +```python +from dagron import DAG, NodeRef + +dag = DAG() +extract = dag.add_node("extract") # NodeRef +transform = dag.add_node("transform") # NodeRef + +dag.add_edge(extract, transform) # NodeRef → NodeRef +dag.add_edge("extract", transform) # str → NodeRef +dag.add_edge(extract, "transform") # NodeRef → str + +isinstance(extract, NodeRef) # True +extract.name # "extract" +extract.epoch # 0 +``` + +NodeRefs survive unrelated mutations (adding other nodes / edges) and +detect *remove-then-readd*: removing `"extract"` and re-adding a node +with the same name produces a NodeRef with a different epoch, so the old +reference correctly raises `StaleNodeRefError`. + +```python +import pytest +from dagron import StaleNodeRefError + +dag.remove_node(extract) +new_extract = dag.add_node("extract") # fresh epoch +with pytest.raises(StaleNodeRefError): + dag.has_edge(extract, transform) # the old extract is stale +``` + +## 2. `@dagron.flow` — Pythonic compose API + +Build a DAG by writing a regular Python function. Each `@task` call +inside a `@flow` body records a node; passing one task's return value to +another wires the edge. No string IDs, no fluent builder — just Python. + +```python +import dagron + +@dagron.task +def fetch() -> list[int]: + return [1, 2, 3, 4] + +@dagron.task +def total(rows: list[int]) -> int: + return sum(rows) + +@dagron.task +def label(value: int) -> str: + return f"Total = {value}" + +@dagron.flow +def pipeline(): + return label(total(fetch())) + +dag = pipeline.dag() # the underlying DAG, for analysis +result = pipeline() # builds + runs → ExecutionResult +result["label"].result # "Total = 10" +``` + +The same `@task` decorator is compatible with the legacy parameter-name +inference of `Pipeline`, so a single set of tasks can power both APIs. +Inside a `@flow` context, calling `transform(raw)` returns a +`FlowFuture[T]` placeholder; outside one, it executes normally. + +## 3. Generic typing & `dagron.stubgen` + +`FlowFuture[T]` and `NodeResult[T]` carry the wrapped task's return type +all the way through: + +```python +from dagron import FlowFuture +from dagron.execution._types import NodeResult + +@dagron.task +def fetch() -> list[int]: ... + +@dagron.task +def total(rows: list[int]) -> int: ... + +@dagron.flow +def pipeline(): + raw = fetch() # type-checks as list[int] + return total(raw) # type-checks as int + +result = pipeline() +result[fetch].result # NodeResult[list[int]] → list[int] +result[total].result # NodeResult[int] → int +``` + +For string-keyed lookups, generate a stub: + +```python +from dagron.stubgen import generate_stub + +stub = generate_stub( + pipeline.dag(), + tasks={"fetch": fetch, "total": total}, + name="PipelineResult", +) +print(stub) +# class PipelineResult: +# @overload +# def __getitem__(self, key: Literal['fetch']) -> NodeResult[list[int]]: ... +# @overload +# def __getitem__(self, key: Literal['total']) -> NodeResult[int]: ... +``` + +Save the output as a `.pyi` file alongside your code; `mypy` will type +`result["fetch"]` as `NodeResult[list[int]]` even though `result` itself +is just `ExecutionResult`. + +## 4. Effect tags + +Tag every `@task` with its side-effect class — the engine uses these +for parallelism gating today and for cache / replay semantics in the +features below. + +```python +from dagron import Effect + +@dagron.task # defaults to Effect.PURE +def add(a: int, b: int) -> int: return a + b + +@dagron.task(effect=Effect.NETWORK) +def fetch_user(uid: int) -> dict: ... + +@dagron.task(effect=Effect.NONDETERMINISTIC) +def now() -> float: + import time; return time.time() +``` + +Properties: + +| Effect | `is_cacheable` | `is_deterministic` | `is_isolated` | +|--------|-----:|-----:|-----:| +| `PURE` | ✅ | ✅ | ❌ | +| `READ` | ✅ | ✅ | ❌ | +| `WRITE` | ❌ | ❌ | ❌ | +| `NETWORK` | ❌ | ❌ | ❌ | +| `NONDETERMINISTIC` | ❌ | ❌ | ✅ | + +`@flow` mirrors each task's effect onto its DAG node's metadata; read +back with `dagron.effects_of(dag)`. An AST-scan heuristic emits a +`UserWarning` when a `PURE` task appears to call impure functions +(`time.time`, `random.*`, `os.*`, `requests.*`, …). + +`DAGExecutor(enforce_effect_isolation=True)` serializes +`NONDETERMINISTIC` tasks while letting other effects parallelize freely. + +## 5. Reactive engine — `Signal` / `Computed` / `Watcher` + +`dagron.reactive` provides Solid.js / Jane-Street-`Incremental` style +primitives where the dependency graph is *implicit*: building a +`Computed` records its read dependencies as a side-effect of evaluating +the function. + +```python +import dagron.reactive as dr + +a = dr.signal(1) +b = dr.signal(2) +s = dr.computed(lambda: a() + b()) +p = dr.computed(lambda: s() * 10) + +p() # 30 — initial compute +a.set(5) # invalidates s and p; b untouched +p() # 70 — recomputes only s and p + +@dr.watch +def watch_p(): + print("p =", p()) + +with dr.batch(): # glitch-free + a.set(0) + b.set(0) +# watch_p fires exactly once after the batch, sees p == 0 +``` + +**Headline benchmark**: in a graph of 10,000 derived nodes off one root +signal, mutating the root and reading just one branch takes ~10 µs — +the engine recomputes only the read path, skipping the other 9999 +invalidated-but-unread branches. This is the differentiator no other +Python DAG library delivers. + +This module is distinct from the existing +`dagron.execution.reactive.ReactiveDAG`, which wraps a *pre-built* +`dagron.DAG` and exposes a push-based `subscribe()` / `set_input()` API. +Use whichever fits your shape: the reactive primitives for fresh +dependency graphs you build in code; `ReactiveDAG` to layer reactivity +over a DAG you already have. + +## 6. Cross-process content-addressed cache + +`dagron.contentcache` is Nix-flake-style: the cache is keyed by content +hash, the filesystem path *is* the index, and there's no +`index.json` to keep in sync. Independent processes share intermediates +transparently — a build on one CI worker hits the cache on another the +moment they compute the same fingerprint. + +```python +from dagron import Effect +from dagron.contentcache import ContentCache + +cache = ContentCache() # ~/.cache/dagron/cas + +def expensive(x: int) -> int: + return x * 1000 + +# First call: miss, computes, writes payload to CAS. +val, hit = cache.compute_or_cached(expensive, args=(42,), effect=Effect.PURE) +# In another process / another day: +val, hit = cache.compute_or_cached(expensive, args=(42,), effect=Effect.PURE) +# `hit` is True; the payload deserialized straight from disk. +``` + +Effect-aware: `WRITE` / `NETWORK` / `NONDETERMINISTIC` tasks bypass the +cache entirely (their results aren't reproducible). Pluggable via the +`Hasher` protocol — `default_hash` (pickle + blake2b) handles most +Python types; `numpy_hash` uses `array.tobytes()` for byte equality; +write your own for polars frames or any tobyte-friendly type. Honors +`$DAGRON_CACHE_DIR`. + +## 7. Time-travel replay + +`dagron.trace` writes an append-only JSONL log of node executions; each +record references a payload stored by fingerprint in the +`ContentCache`, so identical values across runs deduplicate +automatically. `replay(at=t)` walks the log up to time `t` and +reconstructs the per-node state. + +```python +from dagron.contentcache import ContentCache +from dagron.trace import TraceWriter, replay + +cas = ContentCache() +log_path = "run-2026-05.jsonl" + +with TraceWriter(log_path, cas=cas) as w: + w.record("fetch", value=[1, 2, 3], effect=Effect.PURE, timestamp=t0) + w.record("transform", value=6, effect=Effect.PURE, timestamp=t0 + 1) + w.record("publish", value="ok", effect=Effect.NETWORK, timestamp=t0 + 2) + +# Days later, in another process: +state = replay(log_path, at=t0 + 1.5, cas=cas) +state["fetch"].value # [1, 2, 3] — byte-identical to the original run +state["transform"].value # 6 +"publish" in state # False — cutoff was before publish ran + +state = replay(log_path, cas=cas) +state["publish"].value # "ok" — surfaced from the log +state["publish"].replayable # False — NETWORK is non-deterministic +``` + +Pure / READ nodes replay byte-identically. Impure nodes +(`WRITE`/`NETWORK`/`NONDETERMINISTIC`) are flagged `replayable=False` +but their *logged* values are still exposed, so you can audit what the +run actually produced. Honors `$DAGRON_TRACE_DIR`. + +## How they fit together + +The seven additions are designed to compose: + +* **NodeRef** is the substrate — every later API references nodes by + the typed handle. +* **`@flow`** records call structure into a `dagron.DAG`, mirroring each + task's **effect** onto node metadata. +* **`stubgen`** turns the `@flow`-built DAG into a typed lookup stub. +* **Effects** drive parallelism isolation, cache opt-in, and replay + reproducibility flags — one tag, three downstream behaviours. +* **Reactive** is the "live" face of computation; **content cache** is + its persistent face; **replay** is its retrospective face. + +You can adopt any subset independently. The string-based DAG API, +`Pipeline`, and the existing `ReactiveDAG` / `ContentAddressableCache` +classes remain unchanged. diff --git a/docs-next/content/docs/guide/why-dagron.mdx b/docs-next/content/docs/guide/why-dagron.mdx new file mode 100644 index 0000000..7372c89 --- /dev/null +++ b/docs-next/content/docs/guide/why-dagron.mdx @@ -0,0 +1,52 @@ +--- +title: Why dagron? +description: How dagron compares to NetworkX, graphlib, Dask, Airflow, and Prefect — and when to use it. +--- + +# Why dagron? + +**dagron is an embeddable DAG library, not a deployment framework.** It gives your application a Rust-fast graph engine with Python ergonomics — no scheduler daemon, no YAML configs, no cloud console. + +You import dagron, build a graph, and execute it. Your process, your rules. + +--- + +## Comparison + +| | dagron | NetworkX | graphlib | Dask | Airflow | Prefect | +|---|---|---|---|---|---|---| +| **Type** | Library | Library | Stdlib | Framework | Orchestrator | Orchestrator | +| **Language** | Rust + Python | Python | Python | Python | Python | Python | +| **DAG execution** | Built-in (thread/async/distributed) | No | No | Yes | Yes | Yes | +| **Incremental recomputation** | Yes | No | No | No | No | No | +| **Checkpointing** | Yes | No | No | Partial | Yes | Yes | +| **Dynamic DAG expansion** | Yes (runtime) | N/A | N/A | Limited | Yes (2.x) | Yes | +| **Approval gates** | Yes | No | No | No | Plugin | Plugin | +| **Resource-aware scheduling** | Yes (CPU/GPU/memory slots) | No | No | Yes (workers) | Yes (pools) | Yes (work pools) | +| **Distributed execution** | Ray, Celery backends | No | No | Yes (native) | Yes (Celery) | Yes (native) | +| **Overhead** | ~0 (library import) | ~0 | ~0 | Scheduler process | Web server + DB + scheduler | API server + DB | +| **Performance** | Rust core, 3-12x vs NetworkX | Pure Python | Pure Python (minimal) | Python + C extensions | Python | Python | + +--- + +## When to use dagron + +dagron is a great fit when you need a **task graph inside your own process**: + +- **Build systems** — model file targets as nodes, skip unchanged targets with incremental execution +- **Spreadsheet engines** — cells as nodes, formula dependencies as edges, recalculate only dirty cells +- **CI/CD schedulers** — lint/test/build/deploy with resource constraints and approval gates +- **ETL pipelines** — multi-stage data pipelines with checkpointing and crash recovery +- **Game asset pipelines** — texture/model/shader compilation with dependency tracking +- **Reactive UIs** — propagate state changes through a dependency graph + +The common thread: you want the graph engine **embedded in your application**, not running as a separate service. + +--- + +## When NOT to use dagron + +- **Managed cloud orchestration** — If you want a web UI, user management, scheduled triggers, and a managed service, use [Airflow](https://airflow.apache.org/) or [Prefect](https://www.prefect.io/). +- **General graph database** — If you need property graphs, Cypher queries, or persistent graph storage, use [Neo4j](https://neo4j.com/). +- **Undirected / cyclic graphs** — dagron enforces acyclicity. For general graph algorithms on undirected graphs, use [NetworkX](https://networkx.org/). +- **Distributed-first data processing** — If your primary need is data parallelism across a cluster, use [Dask](https://www.dask.org/) or [Spark](https://spark.apache.org/). diff --git a/docs-next/content/docs/index.mdx b/docs-next/content/docs/index.mdx new file mode 100644 index 0000000..9a5b11e --- /dev/null +++ b/docs-next/content/docs/index.mdx @@ -0,0 +1,184 @@ +--- +title: Introduction +description: dagron — a high-performance DAG execution engine for Python, powered by Rust. +--- + +# dagron + +**High-performance DAG execution engine for Python, powered by Rust.** + +**Up to 12x faster than NetworkX** on 10K-node DAG validation, with sub-microsecond reachability queries after index build. [See benchmarks](/guide/benchmarks). + +**For engineers embedding task graphs in applications** — build systems, data pipelines, spreadsheet engines, CI/CD schedulers. [Why dagron?](/guide/why-dagron) + +dagron lets you define directed acyclic graphs of tasks and execute them with maximum parallelism, rich observability, and dozens of execution strategies — from simple thread pools to distributed clusters. + +The core graph data structure lives in Rust (via PyO3) for zero-copy speed, while the Python layer provides an ergonomic API for building, executing, analyzing, and visualizing your DAGs. + +```python +import dagron + +dag = ( + dagron.DAG.builder() + .add_node("extract") + .add_node("transform") + .add_node("load") + .add_edge("extract", "transform") + .add_edge("transform", "load") + .build() +) + +result = dagron.DAGExecutor(dag).execute({ + "extract": lambda: fetch_data(), + "transform": lambda: clean(result), + "load": lambda: write_to_db(result), +}) +``` + +## Features + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +## Getting Started + +Install dagron from PyPI: + +```bash +pip install dagron +``` + +**Requirements:** Python 3.12+ · Linux (x86_64, aarch64) · macOS (x86_64, Apple Silicon) · Windows (x86_64) + +Then head to the [Getting Started](/guide/getting-started) guide. + +--- + +**[Benchmarks](/guide/benchmarks)** | **[Why dagron?](/guide/why-dagron)** | **[Cookbook](/guide/cookbook)** | **[Architecture](/guide/architecture)** | **[Changelog](https://github.com/pratyush618/dagron/blob/master/CHANGELOG.md)** diff --git a/docs-next/content/docs/meta.json b/docs-next/content/docs/meta.json new file mode 100644 index 0000000..80e4b7f --- /dev/null +++ b/docs-next/content/docs/meta.json @@ -0,0 +1,4 @@ +{ + "title": "dagron", + "pages": ["index", "guide", "api"] +} diff --git a/docs-next/next.config.mjs b/docs-next/next.config.mjs new file mode 100644 index 0000000..bf33f76 --- /dev/null +++ b/docs-next/next.config.mjs @@ -0,0 +1,23 @@ +import { createMDX } from "fumadocs-mdx/next"; + +const withMDX = createMDX(); + +// Set `DOCS_BASE_PATH=/taskito` in CI to deploy under github.io/taskito. +// Local `pnpm build && pnpm start` leaves it unset, so the static export +// serves cleanly from the root and `serve out` just works. +const basePath = process.env.DOCS_BASE_PATH ?? ""; + +/** @type {import('next').NextConfig} */ +const config = { + output: "export", + basePath, + reactStrictMode: true, + images: { + unoptimized: true, + }, + env: { + NEXT_PUBLIC_DOCS_BASE_PATH: basePath, + }, +}; + +export default withMDX(config); diff --git a/docs-next/package.json b/docs-next/package.json new file mode 100644 index 0000000..2ab6dd3 --- /dev/null +++ b/docs-next/package.json @@ -0,0 +1,40 @@ +{ + "name": "docs", + "version": "0.0.0", + "private": true, + "scripts": { + "build": "next build", + "dev": "next dev", + "start": "serve out", + "types:check": "fumadocs-mdx && next typegen && tsc --noEmit", + "postinstall": "fumadocs-mdx", + "lint": "biome check", + "format": "biome format --write" + }, + "dependencies": { + "@orama/orama": "^3.1.18", + "clsx": "^2.1.1", + "fumadocs-core": "16.8.5", + "fumadocs-mdx": "14.3.2", + "fumadocs-ui": "16.8.5", + "lucide-react": "^1.14.0", + "mermaid": "^11.14.0", + "next": "16.2.4", + "next-themes": "^0.4.6", + "react": "^19.2.5", + "react-dom": "^19.2.5", + "tailwind-merge": "^3.5.0" + }, + "devDependencies": { + "@biomejs/biome": "^2.4.14", + "@tailwindcss/postcss": "^4.2.4", + "@types/mdx": "^2.0.13", + "@types/node": "^25.6.0", + "@types/react": "^19.2.14", + "@types/react-dom": "^19.2.3", + "postcss": "^8.5.13", + "serve": "^14.2.6", + "tailwindcss": "^4.2.4", + "typescript": "^6.0.3" + } +} diff --git a/docs-next/pnpm-lock.yaml b/docs-next/pnpm-lock.yaml new file mode 100644 index 0000000..f8a2bd5 --- /dev/null +++ b/docs-next/pnpm-lock.yaml @@ -0,0 +1,5659 @@ +lockfileVersion: '9.0' + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +importers: + + .: + dependencies: + '@orama/orama': + specifier: ^3.1.18 + version: 3.1.18 + clsx: + specifier: ^2.1.1 + version: 2.1.1 + fumadocs-core: + specifier: 16.8.5 + version: 16.8.5(@mdx-js/mdx@3.1.1)(@types/estree-jsx@1.0.5)(@types/hast@3.0.4)(@types/mdast@4.0.4)(@types/react@19.2.14)(lucide-react@1.14.0(react@19.2.6))(next@16.2.4(react-dom@19.2.6(react@19.2.6))(react@19.2.6))(react-dom@19.2.6(react@19.2.6))(react@19.2.6)(zod@4.4.3) + fumadocs-mdx: + specifier: 14.3.2 + version: 14.3.2(@types/mdast@4.0.4)(@types/mdx@2.0.13)(@types/react@19.2.14)(fumadocs-core@16.8.5(@mdx-js/mdx@3.1.1)(@types/estree-jsx@1.0.5)(@types/hast@3.0.4)(@types/mdast@4.0.4)(@types/react@19.2.14)(lucide-react@1.14.0(react@19.2.6))(next@16.2.4(react-dom@19.2.6(react@19.2.6))(react@19.2.6))(react-dom@19.2.6(react@19.2.6))(react@19.2.6)(zod@4.4.3))(next@16.2.4(react-dom@19.2.6(react@19.2.6))(react@19.2.6))(react@19.2.6) + fumadocs-ui: + specifier: 16.8.5 + version: 16.8.5(@tailwindcss/oxide@4.3.0)(@types/mdx@2.0.13)(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(fumadocs-core@16.8.5(@mdx-js/mdx@3.1.1)(@types/estree-jsx@1.0.5)(@types/hast@3.0.4)(@types/mdast@4.0.4)(@types/react@19.2.14)(lucide-react@1.14.0(react@19.2.6))(next@16.2.4(react-dom@19.2.6(react@19.2.6))(react@19.2.6))(react-dom@19.2.6(react@19.2.6))(react@19.2.6)(zod@4.4.3))(next@16.2.4(react-dom@19.2.6(react@19.2.6))(react@19.2.6))(react-dom@19.2.6(react@19.2.6))(react@19.2.6)(tailwindcss@4.3.0) + lucide-react: + specifier: ^1.14.0 + version: 1.14.0(react@19.2.6) + mermaid: + specifier: ^11.14.0 + version: 11.14.0 + next: + specifier: 16.2.4 + version: 16.2.4(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + next-themes: + specifier: ^0.4.6 + version: 0.4.6(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + react: + specifier: ^19.2.5 + version: 19.2.6 + react-dom: + specifier: ^19.2.5 + version: 19.2.6(react@19.2.6) + tailwind-merge: + specifier: ^3.5.0 + version: 3.5.0 + devDependencies: + '@biomejs/biome': + specifier: ^2.4.14 + version: 2.4.15 + '@tailwindcss/postcss': + specifier: ^4.2.4 + version: 4.3.0 + '@types/mdx': + specifier: ^2.0.13 + version: 2.0.13 + '@types/node': + specifier: ^25.6.0 + version: 25.6.2 + '@types/react': + specifier: ^19.2.14 + version: 19.2.14 + '@types/react-dom': + specifier: ^19.2.3 + version: 19.2.3(@types/react@19.2.14) + postcss: + specifier: ^8.5.13 + version: 8.5.14 + serve: + specifier: ^14.2.6 + version: 14.2.6 + tailwindcss: + specifier: ^4.2.4 + version: 4.3.0 + typescript: + specifier: ^6.0.3 + version: 6.0.3 + +packages: + + '@alloc/quick-lru@5.2.0': + resolution: {integrity: sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==} + engines: {node: '>=10'} + + '@antfu/install-pkg@1.1.0': + resolution: {integrity: sha512-MGQsmw10ZyI+EJo45CdSER4zEb+p31LpDAFp2Z3gkSd1yqVZGi0Ebx++YTEMonJy4oChEMLsxZ64j8FH6sSqtQ==} + + '@biomejs/biome@2.4.15': + resolution: {integrity: sha512-j5VH3a/h/HXTKBM50MDMxRCzkeLv9S2XJcW2WgnZT1+xyisi+0bISrXR82gCX+8S9lvK0skEvHJRN+3Ktr2hlw==} + engines: {node: '>=14.21.3'} + hasBin: true + + '@biomejs/cli-darwin-arm64@2.4.15': + resolution: {integrity: sha512-rF3PPqLq1yoST79zaQbDjVJwsuIeci/O+9bgNmC5QpgOqz6aqYuzA4abyAGx+mgyiDXn4A049xAN8gijbuR1Qg==} + engines: {node: '>=14.21.3'} + cpu: [arm64] + os: [darwin] + + '@biomejs/cli-darwin-x64@2.4.15': + resolution: {integrity: sha512-/5KHXYMfSJs1fNXiX30xFtI8JcCFV6zaVVLxOa0M2sfqBKHkpQhRTv94yxQWxeTY2lzo2OuTlNvPC+hDQt2wcQ==} + engines: {node: '>=14.21.3'} + cpu: [x64] + os: [darwin] + + '@biomejs/cli-linux-arm64-musl@2.4.15': + resolution: {integrity: sha512-ZPcxznxm0pogHBLZhYntyR3sR+MrZjqJIKEr7ZqVen0Rl+P/4upVmfYXjftizi9RoqZntg33fv/1fbdhbYXpEQ==} + engines: {node: '>=14.21.3'} + cpu: [arm64] + os: [linux] + libc: [musl] + + '@biomejs/cli-linux-arm64@2.4.15': + resolution: {integrity: sha512-owaAMZD/T4LrD0ELNCk0Km3qrRHuM0X6EAyVE1FSqGY0rbLoiDLrO4Us2tllm6cAeB2Ioa9C2C08NZPdr8+0Ug==} + engines: {node: '>=14.21.3'} + cpu: [arm64] + os: [linux] + libc: [glibc] + + '@biomejs/cli-linux-x64-musl@2.4.15': + resolution: {integrity: sha512-CNq/9W38SYSH023lfcQ4KKU8K0YX8T//FZUhcgtMMRABDojx5XsMV7jlweAvGSl389wJQB29Qo6Zb/a+jdvt+w==} + engines: {node: '>=14.21.3'} + cpu: [x64] + os: [linux] + libc: [musl] + + '@biomejs/cli-linux-x64@2.4.15': + resolution: {integrity: sha512-0jj7THz12GbUOLmMibktK6DZjqz2zV64KFxyBtcFTKPiiOIY0a7vns1elpO1dERvxpsZ5ik0oFfz0oGwFde1+g==} + engines: {node: '>=14.21.3'} + cpu: [x64] + os: [linux] + libc: [glibc] + + '@biomejs/cli-win32-arm64@2.4.15': + resolution: {integrity: sha512-ouhkYdlhp/1GghEJPdWwD/Vi3gQ1nFxuSpMolWsbq3Lsq3QUR4jl6UdhhscdCugKU5vOEuMiJhvKj66O0OCq+w==} + engines: {node: '>=14.21.3'} + cpu: [arm64] + os: [win32] + + '@biomejs/cli-win32-x64@2.4.15': + resolution: {integrity: sha512-zBrGq5mx5wwpnow4+2BxUvleDM+GNd4sLbPaMapsSLQLD0NGRCquqPBTgN+7XkUteHvj7M+BstuI8tmnV7+HgQ==} + engines: {node: '>=14.21.3'} + cpu: [x64] + os: [win32] + + '@braintree/sanitize-url@7.1.2': + resolution: {integrity: sha512-jigsZK+sMF/cuiB7sERuo9V7N9jx+dhmHHnQyDSVdpZwVutaBu7WvNYqMDLSgFgfB30n452TP3vjDAvFC973mA==} + + '@chevrotain/cst-dts-gen@12.0.0': + resolution: {integrity: sha512-fSL4KXjTl7cDgf0B5Rip9Q05BOrYvkJV/RrBTE/bKDN096E4hN/ySpcBK5B24T76dlQ2i32Zc3PAE27jFnFrKg==} + + '@chevrotain/gast@12.0.0': + resolution: {integrity: sha512-1ne/m3XsIT8aEdrvT33so0GUC+wkctpUPK6zU9IlOyJLUbR0rg4G7ZiApiJbggpgPir9ERy3FRjT6T7lpgetnQ==} + + '@chevrotain/regexp-to-ast@12.0.0': + resolution: {integrity: sha512-p+EW9MaJwgaHguhoqwOtx/FwuGr+DnNn857sXWOi/mClXIkPGl3rn7hGNWvo31HA3vyeQxjqe+H36yZJwYU8cA==} + + '@chevrotain/types@12.0.0': + resolution: {integrity: sha512-S+04vjFQKeuYw0/eW3U52LkAHQsB1ASxsPGsLPUyQgrZ2iNNibQrsidruDzjEX2JYfespXMG0eZmXlhA6z7nWA==} + + '@chevrotain/utils@12.0.0': + resolution: {integrity: sha512-lB59uJoaGIfOOL9knQqQRfhl9g7x8/wqFkp13zTdkRu1huG9kg6IJs1O8hqj9rs6h7orGxHJUKb+mX3rPbWGhA==} + + '@emnapi/runtime@1.10.0': + resolution: {integrity: sha512-ewvYlk86xUoGI0zQRNq/mC+16R1QeDlKQy21Ki3oSYXNgLb45GV1P6A0M+/s6nyCuNDqe5VpaY84BzXGwVbwFA==} + + '@esbuild/aix-ppc64@0.28.0': + resolution: {integrity: sha512-lhRUCeuOyJQURhTxl4WkpFTjIsbDayJHih5kZC1giwE+MhIzAb7mEsQMqMf18rHLsrb5qI1tafG20mLxEWcWlA==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [aix] + + '@esbuild/android-arm64@0.28.0': + resolution: {integrity: sha512-+WzIXQOSaGs33tLEgYPYe/yQHf0WTU0X42Jca3y8NWMbUVhp7rUnw+vAsRC/QiDrdD31IszMrZy+qwPOPjd+rw==} + engines: {node: '>=18'} + cpu: [arm64] + os: [android] + + '@esbuild/android-arm@0.28.0': + resolution: {integrity: sha512-wqh0ByljabXLKHeWXYLqoJ5jKC4XBaw6Hk08OfMrCRd2nP2ZQ5eleDZC41XHyCNgktBGYMbqnrJKq/K/lzPMSQ==} + engines: {node: '>=18'} + cpu: [arm] + os: [android] + + '@esbuild/android-x64@0.28.0': + resolution: {integrity: sha512-+VJggoaKhk2VNNqVL7f6S189UzShHC/mR9EE8rDdSkdpN0KflSwWY/gWjDrNxxisg8Fp1ZCD9jLMo4m0OUfeUA==} + engines: {node: '>=18'} + cpu: [x64] + os: [android] + + '@esbuild/darwin-arm64@0.28.0': + resolution: {integrity: sha512-0T+A9WZm+bZ84nZBtk1ckYsOvyA3x7e2Acj1KdVfV4/2tdG4fzUp91YHx+GArWLtwqp77pBXVCPn2We7Letr0Q==} + engines: {node: '>=18'} + cpu: [arm64] + os: [darwin] + + '@esbuild/darwin-x64@0.28.0': + resolution: {integrity: sha512-fyzLm/DLDl/84OCfp2f/XQ4flmORsjU7VKt8HLjvIXChJoFFOIL6pLJPH4Yhd1n1gGFF9mPwtlN5Wf82DZs+LQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [darwin] + + '@esbuild/freebsd-arm64@0.28.0': + resolution: {integrity: sha512-l9GeW5UZBT9k9brBYI+0WDffcRxgHQD8ShN2Ur4xWq/NFzUKm3k5lsH4PdaRgb2w7mI9u61nr2gI2mLI27Nh3Q==} + engines: {node: '>=18'} + cpu: [arm64] + os: [freebsd] + + '@esbuild/freebsd-x64@0.28.0': + resolution: {integrity: sha512-BXoQai/A0wPO6Es3yFJ7APCiKGc1tdAEOgeTNy3SsB491S3aHn4S4r3e976eUnPdU+NbdtmBuLncYir2tMU9Nw==} + engines: {node: '>=18'} + cpu: [x64] + os: [freebsd] + + '@esbuild/linux-arm64@0.28.0': + resolution: {integrity: sha512-RVyzfb3FWsGA55n6WY0MEIEPURL1FcbhFE6BffZEMEekfCzCIMtB5yyDcFnVbTnwk+CLAgTujmV/Lgvih56W+A==} + engines: {node: '>=18'} + cpu: [arm64] + os: [linux] + + '@esbuild/linux-arm@0.28.0': + resolution: {integrity: sha512-CjaaREJagqJp7iTaNQjjidaNbCKYcd4IDkzbwwxtSvjI7NZm79qiHc8HqciMddQ6CKvJT6aBd8lO9kN/ZudLlw==} + engines: {node: '>=18'} + cpu: [arm] + os: [linux] + + '@esbuild/linux-ia32@0.28.0': + resolution: {integrity: sha512-KBnSTt1kxl9x70q+ydterVdl+Cn0H18ngRMRCEQfrbqdUuntQQ0LoMZv47uB97NljZFzY6HcfqEZ2SAyIUTQBQ==} + engines: {node: '>=18'} + cpu: [ia32] + os: [linux] + + '@esbuild/linux-loong64@0.28.0': + resolution: {integrity: sha512-zpSlUce1mnxzgBADvxKXX5sl8aYQHo2ezvMNI8I0lbblJtp8V4odlm3Yzlj7gPyt3T8ReksE6bK+pT3WD+aJRg==} + engines: {node: '>=18'} + cpu: [loong64] + os: [linux] + + '@esbuild/linux-mips64el@0.28.0': + resolution: {integrity: sha512-2jIfP6mmjkdmeTlsX/9vmdmhBmKADrWqN7zcdtHIeNSCH1SqIoNI63cYsjQR8J+wGa4Y5izRcSHSm8K3QWmk3w==} + engines: {node: '>=18'} + cpu: [mips64el] + os: [linux] + + '@esbuild/linux-ppc64@0.28.0': + resolution: {integrity: sha512-bc0FE9wWeC0WBm49IQMPSPILRocGTQt3j5KPCA8os6VprfuJ7KD+5PzESSrJ6GmPIPJK965ZJHTUlSA6GNYEhg==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [linux] + + '@esbuild/linux-riscv64@0.28.0': + resolution: {integrity: sha512-SQPZOwoTTT/HXFXQJG/vBX8sOFagGqvZyXcgLA3NhIqcBv1BJU1d46c0rGcrij2B56Z2rNiSLaZOYW5cUk7yLQ==} + engines: {node: '>=18'} + cpu: [riscv64] + os: [linux] + + '@esbuild/linux-s390x@0.28.0': + resolution: {integrity: sha512-SCfR0HN8CEEjnYnySJTd2cw0k9OHB/YFzt5zgJEwa+wL/T/raGWYMBqwDNAC6dqFKmJYZoQBRfHjgwLHGSrn3Q==} + engines: {node: '>=18'} + cpu: [s390x] + os: [linux] + + '@esbuild/linux-x64@0.28.0': + resolution: {integrity: sha512-us0dSb9iFxIi8srnpl931Nvs65it/Jd2a2K3qs7fz2WfGPHqzfzZTfec7oxZJRNPXPnNYZtanmRc4AL/JwVzHQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [linux] + + '@esbuild/netbsd-arm64@0.28.0': + resolution: {integrity: sha512-CR/RYotgtCKwtftMwJlUU7xCVNg3lMYZ0RzTmAHSfLCXw3NtZtNpswLEj/Kkf6kEL3Gw+BpOekRX0BYCtklhUw==} + engines: {node: '>=18'} + cpu: [arm64] + os: [netbsd] + + '@esbuild/netbsd-x64@0.28.0': + resolution: {integrity: sha512-nU1yhmYutL+fQ71Kxnhg8uEOdC0pwEW9entHykTgEbna2pw2dkbFSMeqjjyHZoCmt8SBkOSvV+yNmm94aUrrqw==} + engines: {node: '>=18'} + cpu: [x64] + os: [netbsd] + + '@esbuild/openbsd-arm64@0.28.0': + resolution: {integrity: sha512-cXb5vApOsRsxsEl4mcZ1XY3D4DzcoMxR/nnc4IyqYs0rTI8ZKmW6kyyg+11Z8yvgMfAEldKzP7AdP64HnSC/6g==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openbsd] + + '@esbuild/openbsd-x64@0.28.0': + resolution: {integrity: sha512-8wZM2qqtv9UP3mzy7HiGYNH/zjTA355mpeuA+859TyR+e+Tc08IHYpLJuMsfpDJwoLo1ikIJI8jC3GFjnRClzA==} + engines: {node: '>=18'} + cpu: [x64] + os: [openbsd] + + '@esbuild/openharmony-arm64@0.28.0': + resolution: {integrity: sha512-FLGfyizszcef5C3YtoyQDACyg95+dndv79i2EekILBofh5wpCa1KuBqOWKrEHZg3zrL3t5ouE5jgr94vA+Wb2w==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openharmony] + + '@esbuild/sunos-x64@0.28.0': + resolution: {integrity: sha512-1ZgjUoEdHZZl/YlV76TSCz9Hqj9h9YmMGAgAPYd+q4SicWNX3G5GCyx9uhQWSLcbvPW8Ni7lj4gDa1T40akdlw==} + engines: {node: '>=18'} + cpu: [x64] + os: [sunos] + + '@esbuild/win32-arm64@0.28.0': + resolution: {integrity: sha512-Q9StnDmQ/enxnpxCCLSg0oo4+34B9TdXpuyPeTedN/6+iXBJ4J+zwfQI28u/Jl40nOYAxGoNi7mFP40RUtkmUA==} + engines: {node: '>=18'} + cpu: [arm64] + os: [win32] + + '@esbuild/win32-ia32@0.28.0': + resolution: {integrity: sha512-zF3ag/gfiCe6U2iczcRzSYJKH1DCI+ByzSENHlM2FcDbEeo5Zd2C86Aq0tKUYAJJ1obRP84ymxIAksZUcdztHA==} + engines: {node: '>=18'} + cpu: [ia32] + os: [win32] + + '@esbuild/win32-x64@0.28.0': + resolution: {integrity: sha512-pEl1bO9mfAmIC+tW5btTmrKaujg3zGtUmWNdCw/xs70FBjwAL3o9OEKNHvNmnyylD6ubxUERiEhdsL0xBQ9efw==} + engines: {node: '>=18'} + cpu: [x64] + os: [win32] + + '@floating-ui/core@1.7.5': + resolution: {integrity: sha512-1Ih4WTWyw0+lKyFMcBHGbb5U5FtuHJuujoyyr5zTaWS5EYMeT6Jb2AuDeftsCsEuchO+mM2ij5+q9crhydzLhQ==} + + '@floating-ui/dom@1.7.6': + resolution: {integrity: sha512-9gZSAI5XM36880PPMm//9dfiEngYoC6Am2izES1FF406YFsjvyBMmeJ2g4SAju3xWwtuynNRFL2s9hgxpLI5SQ==} + + '@floating-ui/react-dom@2.1.8': + resolution: {integrity: sha512-cC52bHwM/n/CxS87FH0yWdngEZrjdtLW/qVruo68qg+prK7ZQ4YGdut2GyDVpoGeAYe/h899rVeOVm6Oi40k2A==} + peerDependencies: + react: '>=16.8.0' + react-dom: '>=16.8.0' + + '@floating-ui/utils@0.2.11': + resolution: {integrity: sha512-RiB/yIh78pcIxl6lLMG0CgBXAZ2Y0eVHqMPYugu+9U0AeT6YBeiJpf7lbdJNIugFP5SIjwNRgo4DhR1Qxi26Gg==} + + '@fumadocs/tailwind@0.0.5': + resolution: {integrity: sha512-ENKPWUDRmriccsrUDE4bDBq3FNr/ms3BP2rWlsAEMV1yP23pcCaan+ceGfeBUsAQjw7sj9Q3R4Kl3g/TCStPzQ==} + peerDependencies: + '@tailwindcss/oxide': ^4.0.0 + tailwindcss: ^4.0.0 + peerDependenciesMeta: + '@tailwindcss/oxide': + optional: true + tailwindcss: + optional: true + + '@iconify/types@2.0.0': + resolution: {integrity: sha512-+wluvCrRhXrhyOmRDJ3q8mux9JkKy5SJ/v8ol2tu4FVjyYvtEzkc/3pK15ET6RKg4b4w4BmTk1+gsCUhf21Ykg==} + + '@iconify/utils@3.1.3': + resolution: {integrity: sha512-LPKOXPn/zV+zis1oOfGWogaXVpqUybF3ZS6SCZIsz8vg0ivVp9+fVqyYB7xq0aiST/VhUQYGO1qo6uoYSiEJqw==} + + '@img/colour@1.1.0': + resolution: {integrity: sha512-Td76q7j57o/tLVdgS746cYARfSyxk8iEfRxewL9h4OMzYhbW4TAcppl0mT4eyqXddh6L/jwoM75mo7ixa/pCeQ==} + engines: {node: '>=18'} + + '@img/sharp-darwin-arm64@0.34.5': + resolution: {integrity: sha512-imtQ3WMJXbMY4fxb/Ndp6HBTNVtWCUI0WdobyheGf5+ad6xX8VIDO8u2xE4qc/fr08CKG/7dDseFtn6M6g/r3w==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [arm64] + os: [darwin] + + '@img/sharp-darwin-x64@0.34.5': + resolution: {integrity: sha512-YNEFAF/4KQ/PeW0N+r+aVVsoIY0/qxxikF2SWdp+NRkmMB7y9LBZAVqQ4yhGCm/H3H270OSykqmQMKLBhBJDEw==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [x64] + os: [darwin] + + '@img/sharp-libvips-darwin-arm64@1.2.4': + resolution: {integrity: sha512-zqjjo7RatFfFoP0MkQ51jfuFZBnVE2pRiaydKJ1G/rHZvnsrHAOcQALIi9sA5co5xenQdTugCvtb1cuf78Vf4g==} + cpu: [arm64] + os: [darwin] + + '@img/sharp-libvips-darwin-x64@1.2.4': + resolution: {integrity: sha512-1IOd5xfVhlGwX+zXv2N93k0yMONvUlANylbJw1eTah8K/Jtpi15KC+WSiaX/nBmbm2HxRM1gZ0nSdjSsrZbGKg==} + cpu: [x64] + os: [darwin] + + '@img/sharp-libvips-linux-arm64@1.2.4': + resolution: {integrity: sha512-excjX8DfsIcJ10x1Kzr4RcWe1edC9PquDRRPx3YVCvQv+U5p7Yin2s32ftzikXojb1PIFc/9Mt28/y+iRklkrw==} + cpu: [arm64] + os: [linux] + libc: [glibc] + + '@img/sharp-libvips-linux-arm@1.2.4': + resolution: {integrity: sha512-bFI7xcKFELdiNCVov8e44Ia4u2byA+l3XtsAj+Q8tfCwO6BQ8iDojYdvoPMqsKDkuoOo+X6HZA0s0q11ANMQ8A==} + cpu: [arm] + os: [linux] + libc: [glibc] + + '@img/sharp-libvips-linux-ppc64@1.2.4': + resolution: {integrity: sha512-FMuvGijLDYG6lW+b/UvyilUWu5Ayu+3r2d1S8notiGCIyYU/76eig1UfMmkZ7vwgOrzKzlQbFSuQfgm7GYUPpA==} + cpu: [ppc64] + os: [linux] + libc: [glibc] + + '@img/sharp-libvips-linux-riscv64@1.2.4': + resolution: {integrity: sha512-oVDbcR4zUC0ce82teubSm+x6ETixtKZBh/qbREIOcI3cULzDyb18Sr/Wcyx7NRQeQzOiHTNbZFF1UwPS2scyGA==} + cpu: [riscv64] + os: [linux] + libc: [glibc] + + '@img/sharp-libvips-linux-s390x@1.2.4': + resolution: {integrity: sha512-qmp9VrzgPgMoGZyPvrQHqk02uyjA0/QrTO26Tqk6l4ZV0MPWIW6LTkqOIov+J1yEu7MbFQaDpwdwJKhbJvuRxQ==} + cpu: [s390x] + os: [linux] + libc: [glibc] + + '@img/sharp-libvips-linux-x64@1.2.4': + resolution: {integrity: sha512-tJxiiLsmHc9Ax1bz3oaOYBURTXGIRDODBqhveVHonrHJ9/+k89qbLl0bcJns+e4t4rvaNBxaEZsFtSfAdquPrw==} + cpu: [x64] + os: [linux] + libc: [glibc] + + '@img/sharp-libvips-linuxmusl-arm64@1.2.4': + resolution: {integrity: sha512-FVQHuwx1IIuNow9QAbYUzJ+En8KcVm9Lk5+uGUQJHaZmMECZmOlix9HnH7n1TRkXMS0pGxIJokIVB9SuqZGGXw==} + cpu: [arm64] + os: [linux] + libc: [musl] + + '@img/sharp-libvips-linuxmusl-x64@1.2.4': + resolution: {integrity: sha512-+LpyBk7L44ZIXwz/VYfglaX/okxezESc6UxDSoyo2Ks6Jxc4Y7sGjpgU9s4PMgqgjj1gZCylTieNamqA1MF7Dg==} + cpu: [x64] + os: [linux] + libc: [musl] + + '@img/sharp-linux-arm64@0.34.5': + resolution: {integrity: sha512-bKQzaJRY/bkPOXyKx5EVup7qkaojECG6NLYswgktOZjaXecSAeCWiZwwiFf3/Y+O1HrauiE3FVsGxFg8c24rZg==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [arm64] + os: [linux] + libc: [glibc] + + '@img/sharp-linux-arm@0.34.5': + resolution: {integrity: sha512-9dLqsvwtg1uuXBGZKsxem9595+ujv0sJ6Vi8wcTANSFpwV/GONat5eCkzQo/1O6zRIkh0m/8+5BjrRr7jDUSZw==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [arm] + os: [linux] + libc: [glibc] + + '@img/sharp-linux-ppc64@0.34.5': + resolution: {integrity: sha512-7zznwNaqW6YtsfrGGDA6BRkISKAAE1Jo0QdpNYXNMHu2+0dTrPflTLNkpc8l7MUP5M16ZJcUvysVWWrMefZquA==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [ppc64] + os: [linux] + libc: [glibc] + + '@img/sharp-linux-riscv64@0.34.5': + resolution: {integrity: sha512-51gJuLPTKa7piYPaVs8GmByo7/U7/7TZOq+cnXJIHZKavIRHAP77e3N2HEl3dgiqdD/w0yUfiJnII77PuDDFdw==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [riscv64] + os: [linux] + libc: [glibc] + + '@img/sharp-linux-s390x@0.34.5': + resolution: {integrity: sha512-nQtCk0PdKfho3eC5MrbQoigJ2gd1CgddUMkabUj+rBevs8tZ2cULOx46E7oyX+04WGfABgIwmMC0VqieTiR4jg==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [s390x] + os: [linux] + libc: [glibc] + + '@img/sharp-linux-x64@0.34.5': + resolution: {integrity: sha512-MEzd8HPKxVxVenwAa+JRPwEC7QFjoPWuS5NZnBt6B3pu7EG2Ge0id1oLHZpPJdn3OQK+BQDiw9zStiHBTJQQQQ==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [x64] + os: [linux] + libc: [glibc] + + '@img/sharp-linuxmusl-arm64@0.34.5': + resolution: {integrity: sha512-fprJR6GtRsMt6Kyfq44IsChVZeGN97gTD331weR1ex1c1rypDEABN6Tm2xa1wE6lYb5DdEnk03NZPqA7Id21yg==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [arm64] + os: [linux] + libc: [musl] + + '@img/sharp-linuxmusl-x64@0.34.5': + resolution: {integrity: sha512-Jg8wNT1MUzIvhBFxViqrEhWDGzqymo3sV7z7ZsaWbZNDLXRJZoRGrjulp60YYtV4wfY8VIKcWidjojlLcWrd8Q==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [x64] + os: [linux] + libc: [musl] + + '@img/sharp-wasm32@0.34.5': + resolution: {integrity: sha512-OdWTEiVkY2PHwqkbBI8frFxQQFekHaSSkUIJkwzclWZe64O1X4UlUjqqqLaPbUpMOQk6FBu/HtlGXNblIs0huw==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [wasm32] + + '@img/sharp-win32-arm64@0.34.5': + resolution: {integrity: sha512-WQ3AgWCWYSb2yt+IG8mnC6Jdk9Whs7O0gxphblsLvdhSpSTtmu69ZG1Gkb6NuvxsNACwiPV6cNSZNzt0KPsw7g==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [arm64] + os: [win32] + + '@img/sharp-win32-ia32@0.34.5': + resolution: {integrity: sha512-FV9m/7NmeCmSHDD5j4+4pNI8Cp3aW+JvLoXcTUo0IqyjSfAZJ8dIUmijx1qaJsIiU+Hosw6xM5KijAWRJCSgNg==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [ia32] + os: [win32] + + '@img/sharp-win32-x64@0.34.5': + resolution: {integrity: sha512-+29YMsqY2/9eFEiW93eqWnuLcWcufowXewwSNIT6UwZdUUCrM3oFjMWH/Z6/TMmb4hlFenmfAVbpWeup2jryCw==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [x64] + os: [win32] + + '@jridgewell/gen-mapping@0.3.13': + resolution: {integrity: sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==} + + '@jridgewell/remapping@2.3.5': + resolution: {integrity: sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==} + + '@jridgewell/resolve-uri@3.1.2': + resolution: {integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==} + engines: {node: '>=6.0.0'} + + '@jridgewell/sourcemap-codec@1.5.5': + resolution: {integrity: sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==} + + '@jridgewell/trace-mapping@0.3.31': + resolution: {integrity: sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==} + + '@mdx-js/mdx@3.1.1': + resolution: {integrity: sha512-f6ZO2ifpwAQIpzGWaBQT2TXxPv6z3RBzQKpVftEWN78Vl/YweF1uwussDx8ECAXVtr3Rs89fKyG9YlzUs9DyGQ==} + + '@mermaid-js/parser@1.1.0': + resolution: {integrity: sha512-gxK9ZX2+Fex5zu8LhRQoMeMPEHbc73UKZ0FQ54YrQtUxE1VVhMwzeNtKRPAu5aXks4FasbMe4xB4bWrmq6Jlxw==} + + '@next/env@16.2.4': + resolution: {integrity: sha512-dKkkOzOSwFYe5RX6y26fZgkSpVAlIOJKQHIiydQcrWH6y/97+RceSOAdjZ14Qa3zLduVUy0TXcn+EiM6t4rPgw==} + + '@next/swc-darwin-arm64@16.2.4': + resolution: {integrity: sha512-OXTFFox5EKN1Ym08vfrz+OXxmCcEjT4SFMbNRsWZE99dMqt2Kcusl5MqPXcW232RYkMLQTy0hqgAMEsfEd/l2A==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [darwin] + + '@next/swc-darwin-x64@16.2.4': + resolution: {integrity: sha512-XhpVnUfmYWvD3YrXu55XdcAkQtOnvaI6wtQa8fuF5fGoKoxIUZ0kWPtcOfqJEWngFF/lOS9l3+O9CcownhiQxQ==} + engines: {node: '>= 10'} + cpu: [x64] + os: [darwin] + + '@next/swc-linux-arm64-gnu@16.2.4': + resolution: {integrity: sha512-Mx/tjlNA3G8kg14QvuGAJ4xBwPk1tUHq56JxZ8CXnZwz1Etz714soCEzGQQzVMz4bEnGPowzkV6Xrp6wAkEWOQ==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [linux] + libc: [glibc] + + '@next/swc-linux-arm64-musl@16.2.4': + resolution: {integrity: sha512-iVMMp14514u7Nup2umQS03nT/bN9HurK8ufylC3FZNykrwjtx7V1A7+4kvhbDSCeonTVqV3Txnv0Lu+m2oDXNg==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [linux] + libc: [musl] + + '@next/swc-linux-x64-gnu@16.2.4': + resolution: {integrity: sha512-EZOvm1aQWgnI/N/xcWOlnS3RQBk0VtVav5Zo7n4p0A7UKyTDx047k8opDbXgBpHl4CulRqRfbw3QrX2w5UOXMQ==} + engines: {node: '>= 10'} + cpu: [x64] + os: [linux] + libc: [glibc] + + '@next/swc-linux-x64-musl@16.2.4': + resolution: {integrity: sha512-h9FxsngCm9cTBf71AR4fGznDEDx1hS7+kSEiIRjq5kO1oXWm07DxVGZjCvk0SGx7TSjlUqhI8oOyz7NfwAdPoA==} + engines: {node: '>= 10'} + cpu: [x64] + os: [linux] + libc: [musl] + + '@next/swc-win32-arm64-msvc@16.2.4': + resolution: {integrity: sha512-3NdJV5OXMSOeJYijX+bjaLge3mJBlh4ybydbT4GFoB/2hAojWHtMhl3CYlYoMrjPuodp0nzFVi4Tj2+WaMg+Ow==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [win32] + + '@next/swc-win32-x64-msvc@16.2.4': + resolution: {integrity: sha512-kMVGgsqhO5YTYODD9IPGGhA6iprWidQckK3LmPeW08PIFENRmgfb4MjXHO+p//d+ts2rpjvK5gXWzXSMrPl9cw==} + engines: {node: '>= 10'} + cpu: [x64] + os: [win32] + + '@orama/orama@3.1.18': + resolution: {integrity: sha512-a61ljmRVVyG5MC/698C8/FfFDw5a8LOIvyOLW5fztgUXqUpc1jOfQzOitSCbge657OgXXThmY3Tk8fpiDb4UcA==} + engines: {node: '>= 20.0.0'} + + '@radix-ui/number@1.1.1': + resolution: {integrity: sha512-MkKCwxlXTgz6CFoJx3pCwn07GKp36+aZyu/u2Ln2VrA5DcdyCZkASEDBTd8x5whTQQL5CiYf4prXKLcgQdv29g==} + + '@radix-ui/primitive@1.1.3': + resolution: {integrity: sha512-JTF99U/6XIjCBo0wqkU5sK10glYe27MRRsfwoiq5zzOEZLHU3A3KCMa5X/azekYRCJ0HlwI0crAXS/5dEHTzDg==} + + '@radix-ui/react-accordion@1.2.12': + resolution: {integrity: sha512-T4nygeh9YE9dLRPhAHSeOZi7HBXo+0kYIPJXayZfvWOWA0+n3dESrZbjfDPUABkUNym6Hd+f2IR113To8D2GPA==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-arrow@1.1.7': + resolution: {integrity: sha512-F+M1tLhO+mlQaOWspE8Wstg+z6PwxwRd8oQ8IXceWz92kfAmalTRf0EjrouQeo7QssEPfCn05B4Ihs1K9WQ/7w==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-collapsible@1.1.12': + resolution: {integrity: sha512-Uu+mSh4agx2ib1uIGPP4/CKNULyajb3p92LsVXmH2EHVMTfZWpll88XJ0j4W0z3f8NK1eYl1+Mf/szHPmcHzyA==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-collection@1.1.7': + resolution: {integrity: sha512-Fh9rGN0MoI4ZFUNyfFVNU4y9LUz93u9/0K+yLgA2bwRojxM8JU1DyvvMBabnZPBgMWREAJvU2jjVzq+LrFUglw==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-compose-refs@1.1.2': + resolution: {integrity: sha512-z4eqJvfiNnFMHIIvXP3CY57y2WJs5g2v3X0zm9mEJkrkNv4rDxu+sg9Jh8EkXyeqBkB7SOcboo9dMVqhyrACIg==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-context@1.1.2': + resolution: {integrity: sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-dialog@1.1.15': + resolution: {integrity: sha512-TCglVRtzlffRNxRMEyR36DGBLJpeusFcgMVD9PZEzAKnUs1lKCgX5u9BmC2Yg+LL9MgZDugFFs1Vl+Jp4t/PGw==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-direction@1.1.1': + resolution: {integrity: sha512-1UEWRX6jnOA2y4H5WczZ44gOOjTEmlqv1uNW4GAJEO5+bauCBhv8snY65Iw5/VOS/ghKN9gr2KjnLKxrsvoMVw==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-dismissable-layer@1.1.11': + resolution: {integrity: sha512-Nqcp+t5cTB8BinFkZgXiMJniQH0PsUt2k51FUhbdfeKvc4ACcG2uQniY/8+h1Yv6Kza4Q7lD7PQV0z0oicE0Mg==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-focus-guards@1.1.3': + resolution: {integrity: sha512-0rFg/Rj2Q62NCm62jZw0QX7a3sz6QCQU0LpZdNrJX8byRGaGVTqbrW9jAoIAHyMQqsNpeZ81YgSizOt5WXq0Pw==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-focus-scope@1.1.7': + resolution: {integrity: sha512-t2ODlkXBQyn7jkl6TNaw/MtVEVvIGelJDCG41Okq/KwUsJBwQ4XVZsHAVUkK4mBv3ewiAS3PGuUWuY2BoK4ZUw==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-id@1.1.1': + resolution: {integrity: sha512-kGkGegYIdQsOb4XjsfM97rXsiHaBwco+hFI66oO4s9LU+PLAC5oJ7khdOVFxkhsmlbpUqDAvXw11CluXP+jkHg==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-navigation-menu@1.2.14': + resolution: {integrity: sha512-YB9mTFQvCOAQMHU+C/jVl96WmuWeltyUEpRJJky51huhds5W2FQr1J8D/16sQlf0ozxkPK8uF3niQMdUwZPv5w==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-popover@1.1.15': + resolution: {integrity: sha512-kr0X2+6Yy/vJzLYJUPCZEc8SfQcf+1COFoAqauJm74umQhta9M7lNJHP7QQS3vkvcGLQUbWpMzwrXYwrYztHKA==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-popper@1.2.8': + resolution: {integrity: sha512-0NJQ4LFFUuWkE7Oxf0htBKS6zLkkjBH+hM1uk7Ng705ReR8m/uelduy1DBo0PyBXPKVnBA6YBlU94MBGXrSBCw==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-portal@1.1.9': + resolution: {integrity: sha512-bpIxvq03if6UNwXZ+HTK71JLh4APvnXntDc6XOX8UVq4XQOVl7lwok0AvIl+b8zgCw3fSaVTZMpAPPagXbKmHQ==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-presence@1.1.5': + resolution: {integrity: sha512-/jfEwNDdQVBCNvjkGit4h6pMOzq8bHkopq458dPt2lMjx+eBQUohZNG9A7DtO/O5ukSbxuaNGXMjHicgwy6rQQ==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-primitive@2.1.3': + resolution: {integrity: sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-roving-focus@1.1.11': + resolution: {integrity: sha512-7A6S9jSgm/S+7MdtNDSb+IU859vQqJ/QAtcYQcfFC6W8RS4IxIZDldLR0xqCFZ6DCyrQLjLPsxtTNch5jVA4lA==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-scroll-area@1.2.10': + resolution: {integrity: sha512-tAXIa1g3sM5CGpVT0uIbUx/U3Gs5N8T52IICuCtObaos1S8fzsrPXG5WObkQN3S6NVl6wKgPhAIiBGbWnvc97A==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-slot@1.2.3': + resolution: {integrity: sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-slot@1.2.4': + resolution: {integrity: sha512-Jl+bCv8HxKnlTLVrcDE8zTMJ09R9/ukw4qBs/oZClOfoQk/cOTbDn+NceXfV7j09YPVQUryJPHurafcSg6EVKA==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-tabs@1.1.13': + resolution: {integrity: sha512-7xdcatg7/U+7+Udyoj2zodtI9H/IIopqo+YOIcZOq1nJwXWBZ9p8xiu5llXlekDbZkca79a/fozEYQXIA4sW6A==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/react-use-callback-ref@1.1.1': + resolution: {integrity: sha512-FkBMwD+qbGQeMu1cOHnuGB6x4yzPjho8ap5WtbEJ26umhgqVXbhekKUQO+hZEL1vU92a3wHwdp0HAcqAUF5iDg==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-use-controllable-state@1.2.2': + resolution: {integrity: sha512-BjasUjixPFdS+NKkypcyyN5Pmg83Olst0+c6vGov0diwTEo6mgdqVR6hxcEgFuh4QrAs7Rc+9KuGJ9TVCj0Zzg==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-use-effect-event@0.0.2': + resolution: {integrity: sha512-Qp8WbZOBe+blgpuUT+lw2xheLP8q0oatc9UpmiemEICxGvFLYmHm9QowVZGHtJlGbS6A6yJ3iViad/2cVjnOiA==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-use-escape-keydown@1.1.1': + resolution: {integrity: sha512-Il0+boE7w/XebUHyBjroE+DbByORGR9KKmITzbR7MyQ4akpORYP/ZmbhAr0DG7RmmBqoOnZdy2QlvajJ2QA59g==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-use-layout-effect@1.1.1': + resolution: {integrity: sha512-RbJRS4UWQFkzHTTwVymMTUv8EqYhOp8dOOviLj2ugtTiXRaRQS7GLGxZTLL1jWhMeoSCf5zmcZkqTl9IiYfXcQ==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-use-previous@1.1.1': + resolution: {integrity: sha512-2dHfToCj/pzca2Ck724OZ5L0EVrr3eHRNsG/b3xQJLA2hZpVCS99bLAX+hm1IHXDEnzU6by5z/5MIY794/a8NQ==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-use-rect@1.1.1': + resolution: {integrity: sha512-QTYuDesS0VtuHNNvMh+CjlKJ4LJickCMUAqjlE3+j8w+RlRpwyX3apEQKGFzbZGdo7XNG1tXa+bQqIE7HIXT2w==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-use-size@1.1.1': + resolution: {integrity: sha512-ewrXRDTAqAXlkl6t/fkXWNAhFX9I+CkKlw6zjEwk86RSPKwZr3xpBRso655aqYafwtnbpHLj6toFzmd6xdVptQ==} + peerDependencies: + '@types/react': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + '@radix-ui/react-visually-hidden@1.2.3': + resolution: {integrity: sha512-pzJq12tEaaIhqjbzpCuv/OypJY/BPavOofm+dbab+MHLajy277+1lLm6JFcGgF5eskJ6mquGirhXY2GD/8u8Ug==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + + '@radix-ui/rect@1.1.1': + resolution: {integrity: sha512-HPwpGIzkl28mWyZqG52jiqDJ12waP11Pa1lGoiyUkIEuMLBP0oeK/C89esbXrxsky5we7dfd8U58nm0SgAWpVw==} + + '@shikijs/core@4.0.2': + resolution: {integrity: sha512-hxT0YF4ExEqB8G/qFdtJvpmHXBYJ2lWW7qTHDarVkIudPFE6iCIrqdgWxGn5s+ppkGXI0aEGlibI0PAyzP3zlw==} + engines: {node: '>=20'} + + '@shikijs/engine-javascript@4.0.2': + resolution: {integrity: sha512-7PW0Nm49DcoUIQEXlJhNNBHyoGMjalRETTCcjMqEaMoJRLljy1Bi/EGV3/qLBgLKQejdspiiYuHGQW6dX94Nag==} + engines: {node: '>=20'} + + '@shikijs/engine-oniguruma@4.0.2': + resolution: {integrity: sha512-UpCB9Y2sUKlS9z8juFSKz7ZtysmeXCgnRF0dlhXBkmQnek7lAToPte8DkxmEYGNTMii72zU/lyXiCB6StuZeJg==} + engines: {node: '>=20'} + + '@shikijs/langs@4.0.2': + resolution: {integrity: sha512-KaXby5dvoeuZzN0rYQiPMjFoUrz4hgwIE+D6Du9owcHcl6/g16/yT5BQxSW5cGt2MZBz6Hl0YuRqf12omRfUUg==} + engines: {node: '>=20'} + + '@shikijs/primitive@4.0.2': + resolution: {integrity: sha512-M6UMPrSa3fN5ayeJwFVl9qWofl273wtK1VG8ySDZ1mQBfhCpdd8nEx7nPZ/tk7k+TYcpqBZzj/AnwxT9lO+HJw==} + engines: {node: '>=20'} + + '@shikijs/themes@4.0.2': + resolution: {integrity: sha512-mjCafwt8lJJaVSsQvNVrJumbnnj1RI8jbUKrPKgE6E3OvQKxnuRoBaYC51H4IGHePsGN/QtALglWBU7DoKDFnA==} + engines: {node: '>=20'} + + '@shikijs/types@4.0.2': + resolution: {integrity: sha512-qzbeRooUTPnLE+sHD/Z8DStmaDgnbbc/pMrU203950aRqjX/6AFHeDYT+j00y2lPdz0ywJKx7o/7qnqTivtlXg==} + engines: {node: '>=20'} + + '@shikijs/vscode-textmate@10.0.2': + resolution: {integrity: sha512-83yeghZ2xxin3Nj8z1NMd/NCuca+gsYXswywDy5bHvwlWL8tpTQmzGeUuHd9FC3E/SBEMvzJRwWEOz5gGes9Qg==} + + '@standard-schema/spec@1.1.0': + resolution: {integrity: sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==} + + '@swc/helpers@0.5.15': + resolution: {integrity: sha512-JQ5TuMi45Owi4/BIMAJBoSQoOJu12oOk/gADqlcUL9JEdHB8vyjUSsxqeNXnmXHjYKMi2WcYtezGEEhqUI/E2g==} + + '@tailwindcss/node@4.3.0': + resolution: {integrity: sha512-aFb4gUhFOgdh9AXo4IzBEOzBkkAxm9VigwDJnMIYv3lcfXCJVesNfbEaBl4BNgVRyid92AmdviqwBUBRKSeY3g==} + + '@tailwindcss/oxide-android-arm64@4.3.0': + resolution: {integrity: sha512-TJPiq67tKlLuObP6RkwvVGDoxCMBVtDgKkLfa/uyj7/FyxvQwHS+UOnVrXXgbEsfUaMgiVvC4KbJnRr26ho4Ng==} + engines: {node: '>= 20'} + cpu: [arm64] + os: [android] + + '@tailwindcss/oxide-darwin-arm64@4.3.0': + resolution: {integrity: sha512-oMN/WZRb+SO37BmUElEgeEWuU8E/HXRkiODxJxLe1UTHVXLrdVSgfaJV7pSlhRGMSOiXLuxTIjfsF3wYvz8cgQ==} + engines: {node: '>= 20'} + cpu: [arm64] + os: [darwin] + + '@tailwindcss/oxide-darwin-x64@4.3.0': + resolution: {integrity: sha512-N6CUmu4a6bKVADfw77p+iw6Yd9Q3OBhe0veaDX+QazfuVYlQsHfDgxBrsjQ/IW+zywL8mTrNd0SdJT/zgtvMdA==} + engines: {node: '>= 20'} + cpu: [x64] + os: [darwin] + + '@tailwindcss/oxide-freebsd-x64@4.3.0': + resolution: {integrity: sha512-zDL5hBkQdH5C6MpqbK3gQAgP80tsMwSI26vjOzjJtNCMUo0lFgOItzHKBIupOZNQxt3ouPH7RPhvNhiTfCe5CQ==} + engines: {node: '>= 20'} + cpu: [x64] + os: [freebsd] + + '@tailwindcss/oxide-linux-arm-gnueabihf@4.3.0': + resolution: {integrity: sha512-R06HdNi7A7OEoMsf6d4tjZ71RCWnZQPHj2mnotSFURjNLdBC+cIgXQ7l81CqeoiQftjf6OOblxXMInMgN2VzMA==} + engines: {node: '>= 20'} + cpu: [arm] + os: [linux] + + '@tailwindcss/oxide-linux-arm64-gnu@4.3.0': + resolution: {integrity: sha512-qTJHELX8jetjhRQHCLilkVLmybpzNQAtaI/gaoVoidn/ufbNDbAo8KlK2J+yPoc8wQxvDxCmh/5lr8nC1+lTbg==} + engines: {node: '>= 20'} + cpu: [arm64] + os: [linux] + libc: [glibc] + + '@tailwindcss/oxide-linux-arm64-musl@4.3.0': + resolution: {integrity: sha512-Z6sukiQsngnWO+l39X4pPbiWT81IC+PLKF+PHxIlyZbGNb9MODfYlXEVlFvej5BOZInWX01kVyzeLvHsXhfczQ==} + engines: {node: '>= 20'} + cpu: [arm64] + os: [linux] + libc: [musl] + + '@tailwindcss/oxide-linux-x64-gnu@4.3.0': + resolution: {integrity: sha512-DRNdQRpSGzRGfARVuVkxvM8Q12nh19l4BF/G7zGA1oe+9wcC6saFBHTISrpIcKzhiXtSrlSrluCfvMuledoCTQ==} + engines: {node: '>= 20'} + cpu: [x64] + os: [linux] + libc: [glibc] + + '@tailwindcss/oxide-linux-x64-musl@4.3.0': + resolution: {integrity: sha512-Z0IADbDo8bh6I7h2IQMx601AdXBLfFpEdUotft86evd/8ZPflZe9COPO8Q1vw+pfLWIUo9zN/JGZvwuAJqduqg==} + engines: {node: '>= 20'} + cpu: [x64] + os: [linux] + libc: [musl] + + '@tailwindcss/oxide-wasm32-wasi@4.3.0': + resolution: {integrity: sha512-HNZGOUxEmElksYR7S6sC5jTeNGpobAsy9u7Gu0AskJ8/20FR9GqebUyB+HBcU/ax6BHuiuJi+Oda4B+YX6H1yA==} + engines: {node: '>=14.0.0'} + cpu: [wasm32] + bundledDependencies: + - '@napi-rs/wasm-runtime' + - '@emnapi/core' + - '@emnapi/runtime' + - '@tybys/wasm-util' + - '@emnapi/wasi-threads' + - tslib + + '@tailwindcss/oxide-win32-arm64-msvc@4.3.0': + resolution: {integrity: sha512-Pe+RPVTi1T+qymuuRpcdvwSVZjnll/f7n8gBxMMh3xLTctMDKqpdfGimbMyioqtLhUYZxdJ9wGNhV7MKHvgZsQ==} + engines: {node: '>= 20'} + cpu: [arm64] + os: [win32] + + '@tailwindcss/oxide-win32-x64-msvc@4.3.0': + resolution: {integrity: sha512-Mvrf2kXW/yeW/OTezZlCGOirXRcUuLIBx/5Y12BaPM7wJoryG6dfS/NJL8aBPqtTEx/Vm4T4vKzFUcKDT+TKUA==} + engines: {node: '>= 20'} + cpu: [x64] + os: [win32] + + '@tailwindcss/oxide@4.3.0': + resolution: {integrity: sha512-F7HZGBeN9I0/AuuJS5PwcD8xayx5ri5GhjYUDBEVYUkexyA/giwbDNjRVrxSezE3T250OU2K/wp/ltWx3UOefg==} + engines: {node: '>= 20'} + + '@tailwindcss/postcss@4.3.0': + resolution: {integrity: sha512-Jm05Tjx+9yCLGv5qw1c+84Psds8MnyrEQYCB+FFk2lgGiUjlRqdxke4mVTuYrj2xnVZqKim2Apr5ySuQRYAw/w==} + + '@types/d3-array@3.2.2': + resolution: {integrity: sha512-hOLWVbm7uRza0BYXpIIW5pxfrKe0W+D5lrFiAEYR+pb6w3N2SwSMaJbXdUfSEv+dT4MfHBLtn5js0LAWaO6otw==} + + '@types/d3-axis@3.0.6': + resolution: {integrity: sha512-pYeijfZuBd87T0hGn0FO1vQ/cgLk6E1ALJjfkC0oJ8cbwkZl3TpgS8bVBLZN+2jjGgg38epgxb2zmoGtSfvgMw==} + + '@types/d3-brush@3.0.6': + resolution: {integrity: sha512-nH60IZNNxEcrh6L1ZSMNA28rj27ut/2ZmI3r96Zd+1jrZD++zD3LsMIjWlvg4AYrHn/Pqz4CF3veCxGjtbqt7A==} + + '@types/d3-chord@3.0.6': + resolution: {integrity: sha512-LFYWWd8nwfwEmTZG9PfQxd17HbNPksHBiJHaKuY1XeqscXacsS2tyoo6OdRsjf+NQYeB6XrNL3a25E3gH69lcg==} + + '@types/d3-color@3.1.3': + resolution: {integrity: sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==} + + '@types/d3-contour@3.0.6': + resolution: {integrity: sha512-BjzLgXGnCWjUSYGfH1cpdo41/hgdWETu4YxpezoztawmqsvCeep+8QGfiY6YbDvfgHz/DkjeIkkZVJavB4a3rg==} + + '@types/d3-delaunay@6.0.4': + resolution: {integrity: sha512-ZMaSKu4THYCU6sV64Lhg6qjf1orxBthaC161plr5KuPHo3CNm8DTHiLw/5Eq2b6TsNP0W0iJrUOFscY6Q450Hw==} + + '@types/d3-dispatch@3.0.7': + resolution: {integrity: sha512-5o9OIAdKkhN1QItV2oqaE5KMIiXAvDWBDPrD85e58Qlz1c1kI/J0NcqbEG88CoTwJrYe7ntUCVfeUl2UJKbWgA==} + + '@types/d3-drag@3.0.7': + resolution: {integrity: sha512-HE3jVKlzU9AaMazNufooRJ5ZpWmLIoc90A37WU2JMmeq28w1FQqCZswHZ3xR+SuxYftzHq6WU6KJHvqxKzTxxQ==} + + '@types/d3-dsv@3.0.7': + resolution: {integrity: sha512-n6QBF9/+XASqcKK6waudgL0pf/S5XHPPI8APyMLLUHd8NqouBGLsU8MgtO7NINGtPBtk9Kko/W4ea0oAspwh9g==} + + '@types/d3-ease@3.0.2': + resolution: {integrity: sha512-NcV1JjO5oDzoK26oMzbILE6HW7uVXOHLQvHshBUW4UMdZGfiY6v5BeQwh9a9tCzv+CeefZQHJt5SRgK154RtiA==} + + '@types/d3-fetch@3.0.7': + resolution: {integrity: sha512-fTAfNmxSb9SOWNB9IoG5c8Hg6R+AzUHDRlsXsDZsNp6sxAEOP0tkP3gKkNSO/qmHPoBFTxNrjDprVHDQDvo5aA==} + + '@types/d3-force@3.0.10': + resolution: {integrity: sha512-ZYeSaCF3p73RdOKcjj+swRlZfnYpK1EbaDiYICEEp5Q6sUiqFaFQ9qgoshp5CzIyyb/yD09kD9o2zEltCexlgw==} + + '@types/d3-format@3.0.4': + resolution: {integrity: sha512-fALi2aI6shfg7vM5KiR1wNJnZ7r6UuggVqtDA+xiEdPZQwy/trcQaHnwShLuLdta2rTymCNpxYTiMZX/e09F4g==} + + '@types/d3-geo@3.1.0': + resolution: {integrity: sha512-856sckF0oP/diXtS4jNsiQw/UuK5fQG8l/a9VVLeSouf1/PPbBE1i1W852zVwKwYCBkFJJB7nCFTbk6UMEXBOQ==} + + '@types/d3-hierarchy@3.1.7': + resolution: {integrity: sha512-tJFtNoYBtRtkNysX1Xq4sxtjK8YgoWUNpIiUee0/jHGRwqvzYxkq0hGVbbOGSz+JgFxxRu4K8nb3YpG3CMARtg==} + + '@types/d3-interpolate@3.0.4': + resolution: {integrity: sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==} + + '@types/d3-path@3.1.1': + resolution: {integrity: sha512-VMZBYyQvbGmWyWVea0EHs/BwLgxc+MKi1zLDCONksozI4YJMcTt8ZEuIR4Sb1MMTE8MMW49v0IwI5+b7RmfWlg==} + + '@types/d3-polygon@3.0.2': + resolution: {integrity: sha512-ZuWOtMaHCkN9xoeEMr1ubW2nGWsp4nIql+OPQRstu4ypeZ+zk3YKqQT0CXVe/PYqrKpZAi+J9mTs05TKwjXSRA==} + + '@types/d3-quadtree@3.0.6': + resolution: {integrity: sha512-oUzyO1/Zm6rsxKRHA1vH0NEDG58HrT5icx/azi9MF1TWdtttWl0UIUsjEQBBh+SIkrpd21ZjEv7ptxWys1ncsg==} + + '@types/d3-random@3.0.3': + resolution: {integrity: sha512-Imagg1vJ3y76Y2ea0871wpabqp613+8/r0mCLEBfdtqC7xMSfj9idOnmBYyMoULfHePJyxMAw3nWhJxzc+LFwQ==} + + '@types/d3-scale-chromatic@3.1.0': + resolution: {integrity: sha512-iWMJgwkK7yTRmWqRB5plb1kadXyQ5Sj8V/zYlFGMUBbIPKQScw+Dku9cAAMgJG+z5GYDoMjWGLVOvjghDEFnKQ==} + + '@types/d3-scale@4.0.9': + resolution: {integrity: sha512-dLmtwB8zkAeO/juAMfnV+sItKjlsw2lKdZVVy6LRr0cBmegxSABiLEpGVmSJJ8O08i4+sGR6qQtb6WtuwJdvVw==} + + '@types/d3-selection@3.0.11': + resolution: {integrity: sha512-bhAXu23DJWsrI45xafYpkQ4NtcKMwWnAC/vKrd2l+nxMFuvOT3XMYTIj2opv8vq8AO5Yh7Qac/nSeP/3zjTK0w==} + + '@types/d3-shape@3.1.8': + resolution: {integrity: sha512-lae0iWfcDeR7qt7rA88BNiqdvPS5pFVPpo5OfjElwNaT2yyekbM0C9vK+yqBqEmHr6lDkRnYNoTBYlAgJa7a4w==} + + '@types/d3-time-format@4.0.3': + resolution: {integrity: sha512-5xg9rC+wWL8kdDj153qZcsJ0FWiFt0J5RB6LYUNZjwSnesfblqrI/bJ1wBdJ8OQfncgbJG5+2F+qfqnqyzYxyg==} + + '@types/d3-time@3.0.4': + resolution: {integrity: sha512-yuzZug1nkAAaBlBBikKZTgzCeA+k1uy4ZFwWANOfKw5z5LRhV0gNA7gNkKm7HoK+HRN0wX3EkxGk0fpbWhmB7g==} + + '@types/d3-timer@3.0.2': + resolution: {integrity: sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw==} + + '@types/d3-transition@3.0.9': + resolution: {integrity: sha512-uZS5shfxzO3rGlu0cC3bjmMFKsXv+SmZZcgp0KD22ts4uGXp5EVYGzu/0YdwZeKmddhcAccYtREJKkPfXkZuCg==} + + '@types/d3-zoom@3.0.8': + resolution: {integrity: sha512-iqMC4/YlFCSlO8+2Ii1GGGliCAY4XdeG748w5vQUbevlbDu0zSjH/+jojorQVBK/se0j6DUFNPBGSqD3YWYnDw==} + + '@types/d3@7.4.3': + resolution: {integrity: sha512-lZXZ9ckh5R8uiFVt8ogUNf+pIrK4EsWrx2Np75WvF/eTpJ0FMHNhjXk8CKEx/+gpHbNQyJWehbFaTvqmHWB3ww==} + + '@types/debug@4.1.13': + resolution: {integrity: sha512-KSVgmQmzMwPlmtljOomayoR89W4FynCAi3E8PPs7vmDVPe84hT+vGPKkJfThkmXs0x0jAaa9U8uW8bbfyS2fWw==} + + '@types/estree-jsx@1.0.5': + resolution: {integrity: sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg==} + + '@types/estree@1.0.9': + resolution: {integrity: sha512-GhdPgy1el4/ImP05X05Uw4cw2/M93BCUmnEvWZNStlCzEKME4Fkk+YpoA5OiHNQmoS7Cafb8Xa3Pya8m1Qrzeg==} + + '@types/geojson@7946.0.16': + resolution: {integrity: sha512-6C8nqWur3j98U6+lXDfTUWIfgvZU+EumvpHKcYjujKH7woYyLj2sUmff0tRhrqM7BohUw7Pz3ZB1jj2gW9Fvmg==} + + '@types/hast@3.0.4': + resolution: {integrity: sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==} + + '@types/mdast@4.0.4': + resolution: {integrity: sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==} + + '@types/mdx@2.0.13': + resolution: {integrity: sha512-+OWZQfAYyio6YkJb3HLxDrvnx6SWWDbC0zVPfBRzUk0/nqoDyf6dNxQi3eArPe8rJ473nobTMQ/8Zk+LxJ+Yuw==} + + '@types/ms@2.1.0': + resolution: {integrity: sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==} + + '@types/node@25.6.2': + resolution: {integrity: sha512-sokuT28dxf9JT5Kady1fsXOvI4HVpjZa95NKT5y9PNTIrs2AsobR4GFAA90ZG8M+nxVRLysCXsVj6eGC7Vbrlw==} + + '@types/react-dom@19.2.3': + resolution: {integrity: sha512-jp2L/eY6fn+KgVVQAOqYItbF0VY/YApe5Mz2F0aykSO8gx31bYCZyvSeYxCHKvzHG5eZjc+zyaS5BrBWya2+kQ==} + peerDependencies: + '@types/react': ^19.2.0 + + '@types/react@19.2.14': + resolution: {integrity: sha512-ilcTH/UniCkMdtexkoCN0bI7pMcJDvmQFPvuPvmEaYA/NSfFTAgdUSLAoVjaRJm7+6PvcM+q1zYOwS4wTYMF9w==} + + '@types/trusted-types@2.0.7': + resolution: {integrity: sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==} + + '@types/unist@2.0.11': + resolution: {integrity: sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==} + + '@types/unist@3.0.3': + resolution: {integrity: sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==} + + '@ungap/structured-clone@1.3.1': + resolution: {integrity: sha512-mUFwbeTqrVgDQxFveS+df2yfap6iuP20NAKAsBt5jDEoOTDew+zwLAOilHCeQJOVSvmgCX4ogqIrA0mnyr08yQ==} + + '@upsetjs/venn.js@2.0.0': + resolution: {integrity: sha512-WbBhLrooyePuQ1VZxrJjtLvTc4NVfpOyKx0sKqioq9bX1C1m7Jgykkn8gLrtwumBioXIqam8DLxp88Adbue6Hw==} + + '@zeit/schemas@2.36.0': + resolution: {integrity: sha512-7kjMwcChYEzMKjeex9ZFXkt1AyNov9R5HZtjBKVsmVpw7pa7ZtlCGvCBC2vnnXctaYN+aRI61HjIqeetZW5ROg==} + + acorn-jsx@5.3.2: + resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} + peerDependencies: + acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 + + acorn@8.16.0: + resolution: {integrity: sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==} + engines: {node: '>=0.4.0'} + hasBin: true + + ajv@8.18.0: + resolution: {integrity: sha512-PlXPeEWMXMZ7sPYOHqmDyCJzcfNrUr3fGNKtezX14ykXOEIvyK81d+qydx89KY5O71FKMPaQ2vBfBFI5NHR63A==} + + ansi-align@3.0.1: + resolution: {integrity: sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w==} + + ansi-regex@5.0.1: + resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} + engines: {node: '>=8'} + + ansi-regex@6.2.2: + resolution: {integrity: sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==} + engines: {node: '>=12'} + + ansi-styles@4.3.0: + resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} + engines: {node: '>=8'} + + ansi-styles@6.2.3: + resolution: {integrity: sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==} + engines: {node: '>=12'} + + arch@2.2.0: + resolution: {integrity: sha512-Of/R0wqp83cgHozfIYLbBMnej79U/SVGOOyuB3VVFv1NRM/PSFMK12x9KVtiYzJqmnU5WR2qp0Z5rHb7sWGnFQ==} + + arg@5.0.2: + resolution: {integrity: sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==} + + argparse@2.0.1: + resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} + + aria-hidden@1.2.6: + resolution: {integrity: sha512-ik3ZgC9dY/lYVVM++OISsaYDeg1tb0VtP5uL3ouh1koGOaUMDPpbFIei4JkFimWUFPn90sbMNMXQAIVOlnYKJA==} + engines: {node: '>=10'} + + astring@1.9.0: + resolution: {integrity: sha512-LElXdjswlqjWrPpJFg1Fx4wpkOCxj1TDHlSV4PlaRxHGWko024xICaa97ZkMfs6DRKlCguiAI+rbXv5GWwXIkg==} + hasBin: true + + bail@2.0.2: + resolution: {integrity: sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==} + + balanced-match@1.0.2: + resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + + baseline-browser-mapping@2.10.29: + resolution: {integrity: sha512-Asa2krT+XTPZINCS+2QcyS8WTkObE77RwkydwF7h6DmnKqbvlalz93m/dnphUyCa6SWSP51VgtEUf2FN+gelFQ==} + engines: {node: '>=6.0.0'} + hasBin: true + + boxen@7.0.0: + resolution: {integrity: sha512-j//dBVuyacJbvW+tvZ9HuH03fZ46QcaKvvhZickZqtB271DxJ7SNRSNxrV/dZX0085m7hISRZWbzWlJvx/rHSg==} + engines: {node: '>=14.16'} + + brace-expansion@1.1.14: + resolution: {integrity: sha512-MWPGfDxnyzKU7rNOW9SP/c50vi3xrmrua/+6hfPbCS2ABNWfx24vPidzvC7krjU/RTo235sV776ymlsMtGKj8g==} + + bytes@3.0.0: + resolution: {integrity: sha512-pMhOfFDPiv9t5jjIXkHosWmkSyQbvsgEVNkz0ERHbuLh2T/7j4Mqqpz523Fe8MVY89KC6Sh/QfS2sM+SjgFDcw==} + engines: {node: '>= 0.8'} + + bytes@3.1.2: + resolution: {integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==} + engines: {node: '>= 0.8'} + + camelcase@7.0.1: + resolution: {integrity: sha512-xlx1yCK2Oc1APsPXDL2LdlNP6+uu8OCDdhOBSVT279M/S+y75O30C2VuD8T2ogdePBBl7PfPF4504tnLgX3zfw==} + engines: {node: '>=14.16'} + + caniuse-lite@1.0.30001792: + resolution: {integrity: sha512-hVLMUZFgR4JJ6ACt1uEESvQN1/dBVqPAKY0hgrV70eN3391K6juAfTjKZLKvOMsx8PxA7gsY1/tLMMTcfFLLpw==} + + ccount@2.0.1: + resolution: {integrity: sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==} + + chalk-template@0.4.0: + resolution: {integrity: sha512-/ghrgmhfY8RaSdeo43hNXxpoHAtxdbskUHjPpfqUWGttFgycUhYPGx3YZBCnUCvOa7Doivn1IZec3DEGFoMgLg==} + engines: {node: '>=12'} + + chalk@4.1.2: + resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} + engines: {node: '>=10'} + + chalk@5.0.1: + resolution: {integrity: sha512-Fo07WOYGqMfCWHOzSXOt2CxDbC6skS/jO9ynEcmpANMoPrD+W1r1K6Vx7iNm+AQmETU1Xr2t+n8nzkV9t6xh3w==} + engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} + + character-entities-html4@2.1.0: + resolution: {integrity: sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==} + + character-entities-legacy@3.0.0: + resolution: {integrity: sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==} + + character-entities@2.0.2: + resolution: {integrity: sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==} + + character-reference-invalid@2.0.1: + resolution: {integrity: sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw==} + + chevrotain-allstar@0.4.3: + resolution: {integrity: sha512-2X4mkroolSMKqW+H22pyPMUVDqYZzPhephTmg/NODKb1IGYPHfxfhcW0EjS7wcPJNbze2i4vBWT7zT5FKF2lrQ==} + peerDependencies: + chevrotain: ^12.0.0 + + chevrotain@12.0.0: + resolution: {integrity: sha512-csJvb+6kEiQaqo1woTdSAuOWdN0WTLIydkKrBnS+V5gZz0oqBrp4kQ35519QgK6TpBThiG3V1vNSHlIkv4AglQ==} + engines: {node: '>=22.0.0'} + + chokidar@5.0.0: + resolution: {integrity: sha512-TQMmc3w+5AxjpL8iIiwebF73dRDF4fBIieAqGn9RGCWaEVwQ6Fb2cGe31Yns0RRIzii5goJ1Y7xbMwo1TxMplw==} + engines: {node: '>= 20.19.0'} + + class-variance-authority@0.7.1: + resolution: {integrity: sha512-Ka+9Trutv7G8M6WT6SeiRWz792K5qEqIGEGzXKhAE6xOWAY6pPH8U+9IY3oCMv6kqTmLsv7Xh/2w2RigkePMsg==} + + cli-boxes@3.0.0: + resolution: {integrity: sha512-/lzGpEWL/8PfI0BmBOPRwp0c/wFNX1RdUML3jK/RcSBA9T8mZDdQpqYBKtCFTOfQbwPqWEOpjqW+Fnayc0969g==} + engines: {node: '>=10'} + + client-only@0.0.1: + resolution: {integrity: sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==} + + clipboardy@3.0.0: + resolution: {integrity: sha512-Su+uU5sr1jkUy1sGRpLKjKrvEOVXgSgiSInwa/qeID6aJ07yh+5NWc3h2QfjHjBnfX4LhtFcuAWKUsJ3r+fjbg==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + clsx@2.1.1: + resolution: {integrity: sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==} + engines: {node: '>=6'} + + collapse-white-space@2.1.0: + resolution: {integrity: sha512-loKTxY1zCOuG4j9f6EPnuyyYkf58RnhhWTvRoZEokgB+WbdXehfjFviyOVYkqzEWz1Q5kRiZdBYS5SwxbQYwzw==} + + color-convert@2.0.1: + resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} + engines: {node: '>=7.0.0'} + + color-name@1.1.4: + resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + + comma-separated-tokens@2.0.3: + resolution: {integrity: sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==} + + commander@7.2.0: + resolution: {integrity: sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==} + engines: {node: '>= 10'} + + commander@8.3.0: + resolution: {integrity: sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==} + engines: {node: '>= 12'} + + compressible@2.0.18: + resolution: {integrity: sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==} + engines: {node: '>= 0.6'} + + compression@1.8.1: + resolution: {integrity: sha512-9mAqGPHLakhCLeNyxPkK4xVo746zQ/czLH1Ky+vkitMnWfWZps8r0qXuwhwizagCRttsL4lfG4pIOvaWLpAP0w==} + engines: {node: '>= 0.8.0'} + + compute-scroll-into-view@3.1.1: + resolution: {integrity: sha512-VRhuHOLoKYOy4UbilLbUzbYg93XLjv2PncJC50EuTWPA3gaja1UjBsUP/D/9/juV3vQFr6XBEzn9KCAHdUvOHw==} + + concat-map@0.0.1: + resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} + + content-disposition@0.5.2: + resolution: {integrity: sha512-kRGRZw3bLlFISDBgwTSA1TMBFN6J6GWDeubmDE3AF+3+yXL8hTWv8r5rkLbqYXY4RjPk/EzHnClI3zQf1cFmHA==} + engines: {node: '>= 0.6'} + + cose-base@1.0.3: + resolution: {integrity: sha512-s9whTXInMSgAp/NVXVNuVxVKzGH2qck3aQlVHxDCdAEPgtMKwc4Wq6/QKhgdEdgbLSi9rBTAcPoRa6JpiG4ksg==} + + cose-base@2.2.0: + resolution: {integrity: sha512-AzlgcsCbUMymkADOJtQm3wO9S3ltPfYOFD5033keQn9NJzIbtnZj+UdBJe7DYml/8TdbtHJW3j58SOnKhWY/5g==} + + cross-spawn@7.0.6: + resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==} + engines: {node: '>= 8'} + + csstype@3.2.3: + resolution: {integrity: sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==} + + cytoscape-cose-bilkent@4.1.0: + resolution: {integrity: sha512-wgQlVIUJF13Quxiv5e1gstZ08rnZj2XaLHGoFMYXz7SkNfCDOOteKBE6SYRfA9WxxI/iBc3ajfDoc6hb/MRAHQ==} + peerDependencies: + cytoscape: ^3.2.0 + + cytoscape-fcose@2.2.0: + resolution: {integrity: sha512-ki1/VuRIHFCzxWNrsshHYPs6L7TvLu3DL+TyIGEsRcvVERmxokbf5Gdk7mFxZnTdiGtnA4cfSmjZJMviqSuZrQ==} + peerDependencies: + cytoscape: ^3.2.0 + + cytoscape@3.33.3: + resolution: {integrity: sha512-Gej7U+OKR+LZ8kvX7rb2HhCYJ0IhvEFsnkud4SB1PR+BUY/TsSO0dmOW59WEVLu51b1Rm+gQRKoz4bLYxGSZ2g==} + engines: {node: '>=0.10'} + + d3-array@2.12.1: + resolution: {integrity: sha512-B0ErZK/66mHtEsR1TkPEEkwdy+WDesimkM5gpZr5Dsg54BiTA5RXtYW5qTLIAcekaS9xfZrzBLF/OAkB3Qn1YQ==} + + d3-array@3.2.4: + resolution: {integrity: sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==} + engines: {node: '>=12'} + + d3-axis@3.0.0: + resolution: {integrity: sha512-IH5tgjV4jE/GhHkRV0HiVYPDtvfjHQlQfJHs0usq7M30XcSBvOotpmH1IgkcXsO/5gEQZD43B//fc7SRT5S+xw==} + engines: {node: '>=12'} + + d3-brush@3.0.0: + resolution: {integrity: sha512-ALnjWlVYkXsVIGlOsuWH1+3udkYFI48Ljihfnh8FZPF2QS9o+PzGLBslO0PjzVoHLZ2KCVgAM8NVkXPJB2aNnQ==} + engines: {node: '>=12'} + + d3-chord@3.0.1: + resolution: {integrity: sha512-VE5S6TNa+j8msksl7HwjxMHDM2yNK3XCkusIlpX5kwauBfXuyLAtNg9jCp/iHH61tgI4sb6R/EIMWCqEIdjT/g==} + engines: {node: '>=12'} + + d3-color@3.1.0: + resolution: {integrity: sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==} + engines: {node: '>=12'} + + d3-contour@4.0.2: + resolution: {integrity: sha512-4EzFTRIikzs47RGmdxbeUvLWtGedDUNkTcmzoeyg4sP/dvCexO47AaQL7VKy/gul85TOxw+IBgA8US2xwbToNA==} + engines: {node: '>=12'} + + d3-delaunay@6.0.4: + resolution: {integrity: sha512-mdjtIZ1XLAM8bm/hx3WwjfHt6Sggek7qH043O8KEjDXN40xi3vx/6pYSVTwLjEgiXQTbvaouWKynLBiUZ6SK6A==} + engines: {node: '>=12'} + + d3-dispatch@3.0.1: + resolution: {integrity: sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==} + engines: {node: '>=12'} + + d3-drag@3.0.0: + resolution: {integrity: sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==} + engines: {node: '>=12'} + + d3-dsv@3.0.1: + resolution: {integrity: sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q==} + engines: {node: '>=12'} + hasBin: true + + d3-ease@3.0.1: + resolution: {integrity: sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==} + engines: {node: '>=12'} + + d3-fetch@3.0.1: + resolution: {integrity: sha512-kpkQIM20n3oLVBKGg6oHrUchHM3xODkTzjMoj7aWQFq5QEM+R6E4WkzT5+tojDY7yjez8KgCBRoj4aEr99Fdqw==} + engines: {node: '>=12'} + + d3-force@3.0.0: + resolution: {integrity: sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg==} + engines: {node: '>=12'} + + d3-format@3.1.2: + resolution: {integrity: sha512-AJDdYOdnyRDV5b6ArilzCPPwc1ejkHcoyFarqlPqT7zRYjhavcT3uSrqcMvsgh2CgoPbK3RCwyHaVyxYcP2Arg==} + engines: {node: '>=12'} + + d3-geo@3.1.1: + resolution: {integrity: sha512-637ln3gXKXOwhalDzinUgY83KzNWZRKbYubaG+fGVuc/dxO64RRljtCTnf5ecMyE1RIdtqpkVcq0IbtU2S8j2Q==} + engines: {node: '>=12'} + + d3-hierarchy@3.1.2: + resolution: {integrity: sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA==} + engines: {node: '>=12'} + + d3-interpolate@3.0.1: + resolution: {integrity: sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==} + engines: {node: '>=12'} + + d3-path@1.0.9: + resolution: {integrity: sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg==} + + d3-path@3.1.0: + resolution: {integrity: sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==} + engines: {node: '>=12'} + + d3-polygon@3.0.1: + resolution: {integrity: sha512-3vbA7vXYwfe1SYhED++fPUQlWSYTTGmFmQiany/gdbiWgU/iEyQzyymwL9SkJjFFuCS4902BSzewVGsHHmHtXg==} + engines: {node: '>=12'} + + d3-quadtree@3.0.1: + resolution: {integrity: sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw==} + engines: {node: '>=12'} + + d3-random@3.0.1: + resolution: {integrity: sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ==} + engines: {node: '>=12'} + + d3-sankey@0.12.3: + resolution: {integrity: sha512-nQhsBRmM19Ax5xEIPLMY9ZmJ/cDvd1BG3UVvt5h3WRxKg5zGRbvnteTyWAbzeSvlh3tW7ZEmq4VwR5mB3tutmQ==} + + d3-scale-chromatic@3.1.0: + resolution: {integrity: sha512-A3s5PWiZ9YCXFye1o246KoscMWqf8BsD9eRiJ3He7C9OBaxKhAd5TFCdEx/7VbKtxxTsu//1mMJFrEt572cEyQ==} + engines: {node: '>=12'} + + d3-scale@4.0.2: + resolution: {integrity: sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==} + engines: {node: '>=12'} + + d3-selection@3.0.0: + resolution: {integrity: sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==} + engines: {node: '>=12'} + + d3-shape@1.3.7: + resolution: {integrity: sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw==} + + d3-shape@3.2.0: + resolution: {integrity: sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==} + engines: {node: '>=12'} + + d3-time-format@4.1.0: + resolution: {integrity: sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==} + engines: {node: '>=12'} + + d3-time@3.1.0: + resolution: {integrity: sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==} + engines: {node: '>=12'} + + d3-timer@3.0.1: + resolution: {integrity: sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==} + engines: {node: '>=12'} + + d3-transition@3.0.1: + resolution: {integrity: sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==} + engines: {node: '>=12'} + peerDependencies: + d3-selection: 2 - 3 + + d3-zoom@3.0.0: + resolution: {integrity: sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==} + engines: {node: '>=12'} + + d3@7.9.0: + resolution: {integrity: sha512-e1U46jVP+w7Iut8Jt8ri1YsPOvFpg46k+K8TpCb0P+zjCkjkPnV7WzfDJzMHy1LnA+wj5pLT1wjO901gLXeEhA==} + engines: {node: '>=12'} + + dagre-d3-es@7.0.14: + resolution: {integrity: sha512-P4rFMVq9ESWqmOgK+dlXvOtLwYg0i7u0HBGJER0LZDJT2VHIPAMZ/riPxqJceWMStH5+E61QxFra9kIS3AqdMg==} + + dayjs@1.11.20: + resolution: {integrity: sha512-YbwwqR/uYpeoP4pu043q+LTDLFBLApUP6VxRihdfNTqu4ubqMlGDLd6ErXhEgsyvY0K6nCs7nggYumAN+9uEuQ==} + + debug@2.6.9: + resolution: {integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + + debug@4.4.3: + resolution: {integrity: sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + + decode-named-character-reference@1.3.0: + resolution: {integrity: sha512-GtpQYB283KrPp6nRw50q3U9/VfOutZOe103qlN7BPP6Ad27xYnOIWv4lPzo8HCAL+mMZofJ9KEy30fq6MfaK6Q==} + + deep-extend@0.6.0: + resolution: {integrity: sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==} + engines: {node: '>=4.0.0'} + + delaunator@5.1.0: + resolution: {integrity: sha512-AGrQ4QSgssa1NGmWmLPqN5NY2KajF5MqxetNEO+o0n3ZwZZeTmt7bBnvzHWrmkZFxGgr4HdyFgelzgi06otLuQ==} + + dequal@2.0.3: + resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==} + engines: {node: '>=6'} + + detect-libc@2.1.2: + resolution: {integrity: sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==} + engines: {node: '>=8'} + + detect-node-es@1.1.0: + resolution: {integrity: sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==} + + devlop@1.1.0: + resolution: {integrity: sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==} + + dompurify@3.4.2: + resolution: {integrity: sha512-lHeS9SA/IKeIFFyYciHBr2n0v1VMPlSj843HdLOwjb2OxNwdq9Xykxqhk+FE42MzAdHvInbAolSE4mhahPpjXA==} + + eastasianwidth@0.2.0: + resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} + + emoji-regex@8.0.0: + resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} + + emoji-regex@9.2.2: + resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==} + + enhanced-resolve@5.21.2: + resolution: {integrity: sha512-xe9vQb5kReirPUxgQrXA3ihgbCqssmTiM7cOZ+Gzu+VeGWgpV98lLZvp0dl4yriyAePcewxGUs9UpKD8PET9KQ==} + engines: {node: '>=10.13.0'} + + entities@6.0.1: + resolution: {integrity: sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==} + engines: {node: '>=0.12'} + + esast-util-from-estree@2.0.0: + resolution: {integrity: sha512-4CyanoAudUSBAn5K13H4JhsMH6L9ZP7XbLVe/dKybkxMO7eDyLsT8UHl9TRNrU2Gr9nz+FovfSIjuXWJ81uVwQ==} + + esast-util-from-js@2.0.1: + resolution: {integrity: sha512-8Ja+rNJ0Lt56Pcf3TAmpBZjmx8ZcK5Ts4cAzIOjsjevg9oSXJnl6SUQ2EevU8tv3h6ZLWmoKL5H4fgWvdvfETw==} + + esbuild@0.28.0: + resolution: {integrity: sha512-sNR9MHpXSUV/XB4zmsFKN+QgVG82Cc7+/aaxJ8Adi8hyOac+EXptIp45QBPaVyX3N70664wRbTcLTOemCAnyqw==} + engines: {node: '>=18'} + hasBin: true + + escape-string-regexp@5.0.0: + resolution: {integrity: sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==} + engines: {node: '>=12'} + + estree-util-attach-comments@3.0.0: + resolution: {integrity: sha512-cKUwm/HUcTDsYh/9FgnuFqpfquUbwIqwKM26BVCGDPVgvaCl/nDCCjUfiLlx6lsEZ3Z4RFxNbOQ60pkaEwFxGw==} + + estree-util-build-jsx@3.0.1: + resolution: {integrity: sha512-8U5eiL6BTrPxp/CHbs2yMgP8ftMhR5ww1eIKoWRMlqvltHF8fZn5LRDvTKuxD3DUn+shRbLGqXemcP51oFCsGQ==} + + estree-util-is-identifier-name@3.0.0: + resolution: {integrity: sha512-hFtqIDZTIUZ9BXLb8y4pYGyk6+wekIivNVTcmvk8NoOh+VeRn5y6cEHzbURrWbfp1fIqdVipilzj+lfaadNZmg==} + + estree-util-scope@1.0.0: + resolution: {integrity: sha512-2CAASclonf+JFWBNJPndcOpA8EMJwa0Q8LUFJEKqXLW6+qBvbFZuF5gItbQOs/umBUkjviCSDCbBwU2cXbmrhQ==} + + estree-util-to-js@2.0.0: + resolution: {integrity: sha512-WDF+xj5rRWmD5tj6bIqRi6CkLIXbbNQUcxQHzGysQzvHmdYG2G7p/Tf0J0gpxGgkeMZNTIjT/AoSvC9Xehcgdg==} + + estree-util-value-to-estree@3.5.0: + resolution: {integrity: sha512-aMV56R27Gv3QmfmF1MY12GWkGzzeAezAX+UplqHVASfjc9wNzI/X6hC0S9oxq61WT4aQesLGslWP9tKk6ghRZQ==} + + estree-util-visit@2.0.0: + resolution: {integrity: sha512-m5KgiH85xAhhW8Wta0vShLcUvOsh3LLPI2YVwcbio1l7E09NTLL1EyMZFM1OyWowoH0skScNbhOPl4kcBgzTww==} + + estree-walker@3.0.3: + resolution: {integrity: sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==} + + execa@5.1.1: + resolution: {integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==} + engines: {node: '>=10'} + + extend@3.0.2: + resolution: {integrity: sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==} + + fast-deep-equal@3.1.3: + resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} + + fast-uri@3.1.2: + resolution: {integrity: sha512-rVjf7ArG3LTk+FS6Yw81V1DLuZl1bRbNrev6Tmd/9RaroeeRRJhAt7jg/6YFxbvAQXUCavSoZhPPj6oOx+5KjQ==} + + fdir@6.5.0: + resolution: {integrity: sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==} + engines: {node: '>=12.0.0'} + peerDependencies: + picomatch: ^3 || ^4 + peerDependenciesMeta: + picomatch: + optional: true + + framer-motion@12.38.0: + resolution: {integrity: sha512-rFYkY/pigbcswl1XQSb7q424kSTQ8q6eAC+YUsSKooHQYuLdzdHjrt6uxUC+PRAO++q5IS7+TamgIw1AphxR+g==} + peerDependencies: + '@emotion/is-prop-valid': '*' + react: ^18.0.0 || ^19.0.0 + react-dom: ^18.0.0 || ^19.0.0 + peerDependenciesMeta: + '@emotion/is-prop-valid': + optional: true + react: + optional: true + react-dom: + optional: true + + fumadocs-core@16.8.5: + resolution: {integrity: sha512-4MRqh/KWtR5Q5+LJd2SFv3nLDHtuZw3q8rwApd9nAWkunHVU30U17fUVq6nY+IDoLs7bSLnvDGvoE+Ynelrn3A==} + peerDependencies: + '@mdx-js/mdx': '*' + '@mixedbread/sdk': 0.x.x + '@orama/core': 1.x.x + '@oramacloud/client': 2.x.x + '@tanstack/react-router': 1.x.x + '@types/estree-jsx': '*' + '@types/hast': '*' + '@types/mdast': '*' + '@types/react': '*' + algoliasearch: 5.x.x + flexsearch: '*' + lucide-react: '*' + next: 16.x.x + react: ^19.2.0 + react-dom: ^19.2.0 + react-router: 7.x.x + waku: ^0.26.0 || ^0.27.0 || ^1.0.0 + zod: 4.x.x + peerDependenciesMeta: + '@mdx-js/mdx': + optional: true + '@mixedbread/sdk': + optional: true + '@orama/core': + optional: true + '@oramacloud/client': + optional: true + '@tanstack/react-router': + optional: true + '@types/estree-jsx': + optional: true + '@types/hast': + optional: true + '@types/mdast': + optional: true + '@types/react': + optional: true + algoliasearch: + optional: true + flexsearch: + optional: true + lucide-react: + optional: true + next: + optional: true + react: + optional: true + react-dom: + optional: true + react-router: + optional: true + waku: + optional: true + zod: + optional: true + + fumadocs-mdx@14.3.2: + resolution: {integrity: sha512-73SoZkbUuqnD91G/0zBcaQdM1TMnYw5JJzKgkGvQTiZbtLQFuWTt8/uRqnzFMuNIUu/WY9Lo9d1iZ8G+jOVieA==} + hasBin: true + peerDependencies: + '@types/mdast': '*' + '@types/mdx': '*' + '@types/react': '*' + fumadocs-core: ^15.0.0 || ^16.0.0 + mdast-util-directive: '*' + next: ^15.3.0 || ^16.0.0 + react: ^19.2.0 + vite: 6.x.x || 7.x.x || 8.x.x + peerDependenciesMeta: + '@types/mdast': + optional: true + '@types/mdx': + optional: true + '@types/react': + optional: true + mdast-util-directive: + optional: true + next: + optional: true + react: + optional: true + vite: + optional: true + + fumadocs-ui@16.8.5: + resolution: {integrity: sha512-caJjSfUhNkwoqumOBKfHxE1UjVHxkTsoaUhA96IvCM3G82bU2OKhf1pYtf/GbZ0XVdIlmY8Z47Cqwsze0HlXjg==} + peerDependencies: + '@takumi-rs/image-response': '*' + '@types/mdx': '*' + '@types/react': '*' + fumadocs-core: 16.8.5 + next: 16.x.x + react: ^19.2.0 + react-dom: ^19.2.0 + peerDependenciesMeta: + '@takumi-rs/image-response': + optional: true + '@types/mdx': + optional: true + '@types/react': + optional: true + next: + optional: true + + get-nonce@1.0.1: + resolution: {integrity: sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q==} + engines: {node: '>=6'} + + get-stream@6.0.1: + resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} + engines: {node: '>=10'} + + github-slugger@2.0.0: + resolution: {integrity: sha512-IaOQ9puYtjrkq7Y0Ygl9KDZnrf/aiUJYUpVf89y8kyaxbRG7Y1SrX/jaumrv81vc61+kiMempujsM3Yw7w5qcw==} + + graceful-fs@4.2.11: + resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} + + hachure-fill@0.5.2: + resolution: {integrity: sha512-3GKBOn+m2LX9iq+JC1064cSFprJY4jL1jCXTcpnfER5HYE2l/4EfWSGzkPa/ZDBmYI0ZOEj5VHV/eKnPGkHuOg==} + + has-flag@4.0.0: + resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} + engines: {node: '>=8'} + + hast-util-from-parse5@8.0.3: + resolution: {integrity: sha512-3kxEVkEKt0zvcZ3hCRYI8rqrgwtlIOFMWkbclACvjlDw8Li9S2hk/d51OI0nr/gIpdMHNepwgOKqZ/sy0Clpyg==} + + hast-util-parse-selector@4.0.0: + resolution: {integrity: sha512-wkQCkSYoOGCRKERFWcxMVMOcYE2K1AaNLU8DXS9arxnLOUEWbOXKXiJUNzEpqZ3JOKpnha3jkFrumEjVliDe7A==} + + hast-util-raw@9.1.0: + resolution: {integrity: sha512-Y8/SBAHkZGoNkpzqqfCldijcuUKh7/su31kEBp67cFY09Wy0mTRgtsLYsiIxMJxlu0f6AA5SUTbDR8K0rxnbUw==} + + hast-util-to-estree@3.1.3: + resolution: {integrity: sha512-48+B/rJWAp0jamNbAAf9M7Uf//UVqAoMmgXhBdxTDJLGKY+LRnZ99qcG+Qjl5HfMpYNzS5v4EAwVEF34LeAj7w==} + + hast-util-to-html@9.0.5: + resolution: {integrity: sha512-OguPdidb+fbHQSU4Q4ZiLKnzWo8Wwsf5bZfbvu7//a9oTYoqD/fWpe96NuHkoS9h0ccGOTe0C4NGXdtS0iObOw==} + + hast-util-to-jsx-runtime@2.3.6: + resolution: {integrity: sha512-zl6s8LwNyo1P9uw+XJGvZtdFF1GdAkOg8ujOw+4Pyb76874fLps4ueHXDhXWdk6YHQ6OgUtinliG7RsYvCbbBg==} + + hast-util-to-parse5@8.0.1: + resolution: {integrity: sha512-MlWT6Pjt4CG9lFCjiz4BH7l9wmrMkfkJYCxFwKQic8+RTZgWPuWxwAfjJElsXkex7DJjfSJsQIt931ilUgmwdA==} + + hast-util-whitespace@3.0.0: + resolution: {integrity: sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw==} + + hastscript@9.0.1: + resolution: {integrity: sha512-g7df9rMFX/SPi34tyGCyUBREQoKkapwdY/T04Qn9TDWfHhAYt4/I0gMVirzK5wEzeUqIjEB+LXC/ypb7Aqno5w==} + + html-void-elements@3.0.0: + resolution: {integrity: sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg==} + + human-signals@2.1.0: + resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==} + engines: {node: '>=10.17.0'} + + iconv-lite@0.6.3: + resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==} + engines: {node: '>=0.10.0'} + + import-meta-resolve@4.2.0: + resolution: {integrity: sha512-Iqv2fzaTQN28s/FwZAoFq0ZSs/7hMAHJVX+w8PZl3cY19Pxk6jFFalxQoIfW2826i/fDLXv8IiEZRIT0lDuWcg==} + + ini@1.3.8: + resolution: {integrity: sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==} + + inline-style-parser@0.2.7: + resolution: {integrity: sha512-Nb2ctOyNR8DqQoR0OwRG95uNWIC0C1lCgf5Naz5H6Ji72KZ8OcFZLz2P5sNgwlyoJ8Yif11oMuYs5pBQa86csA==} + + internmap@1.0.1: + resolution: {integrity: sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw==} + + internmap@2.0.3: + resolution: {integrity: sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==} + engines: {node: '>=12'} + + is-alphabetical@2.0.1: + resolution: {integrity: sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ==} + + is-alphanumerical@2.0.1: + resolution: {integrity: sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw==} + + is-decimal@2.0.1: + resolution: {integrity: sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A==} + + is-docker@2.2.1: + resolution: {integrity: sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==} + engines: {node: '>=8'} + hasBin: true + + is-fullwidth-code-point@3.0.0: + resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} + engines: {node: '>=8'} + + is-hexadecimal@2.0.1: + resolution: {integrity: sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg==} + + is-plain-obj@4.1.0: + resolution: {integrity: sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==} + engines: {node: '>=12'} + + is-port-reachable@4.0.0: + resolution: {integrity: sha512-9UoipoxYmSk6Xy7QFgRv2HDyaysmgSG75TFQs6S+3pDM7ZhKTF/bskZV+0UlABHzKjNVhPjYCLfeZUEg1wXxig==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + is-stream@2.0.1: + resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==} + engines: {node: '>=8'} + + is-wsl@2.2.0: + resolution: {integrity: sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==} + engines: {node: '>=8'} + + isexe@2.0.0: + resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + + jiti@2.7.0: + resolution: {integrity: sha512-AC/7JofJvZGrrneWNaEnJeOLUx+JlGt7tNa0wZiRPT4MY1wmfKjt2+6O2p2uz2+skll8OZZmJMNqeke7kKbNgQ==} + hasBin: true + + js-yaml@4.1.1: + resolution: {integrity: sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==} + hasBin: true + + json-schema-traverse@1.0.0: + resolution: {integrity: sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==} + + katex@0.16.45: + resolution: {integrity: sha512-pQpZbdBu7wCTmQUh7ufPmLr0pFoObnGUoL/yhtwJDgmmQpbkg/0HSVti25Fu4rmd1oCR6NGWe9vqTWuWv3GcNA==} + hasBin: true + + khroma@2.1.0: + resolution: {integrity: sha512-Ls993zuzfayK269Svk9hzpeGUKob/sIgZzyHYdjQoAdQetRKpOLj+k/QQQ/6Qi0Yz65mlROrfd+Ev+1+7dz9Kw==} + + langium@4.2.3: + resolution: {integrity: sha512-sOPIi4hISFnY7twwV97ca1TsxpBtXq0URu/LL1AvxwccPG/RIBBlKS7a/f/EL6w8lTNaS0EFs/F+IdSOaqYpng==} + engines: {node: '>=20.10.0', npm: '>=10.2.3'} + + layout-base@1.0.2: + resolution: {integrity: sha512-8h2oVEZNktL4BH2JCOI90iD1yXwL6iNW7KcCKT2QZgQJR2vbqDsldCTPRU9NifTCqHZci57XvQQ15YTu+sTYPg==} + + layout-base@2.0.1: + resolution: {integrity: sha512-dp3s92+uNI1hWIpPGH3jK2kxE2lMjdXdr+DH8ynZHpd6PUlH6x6cbuXnoMmiNumznqaNO31xu9e79F0uuZ0JFg==} + + lightningcss-android-arm64@1.32.0: + resolution: {integrity: sha512-YK7/ClTt4kAK0vo6w3X+Pnm0D2cf2vPHbhOXdoNti1Ga0al1P4TBZhwjATvjNwLEBCnKvjJc2jQgHXH0NEwlAg==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [android] + + lightningcss-darwin-arm64@1.32.0: + resolution: {integrity: sha512-RzeG9Ju5bag2Bv1/lwlVJvBE3q6TtXskdZLLCyfg5pt+HLz9BqlICO7LZM7VHNTTn/5PRhHFBSjk5lc4cmscPQ==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [darwin] + + lightningcss-darwin-x64@1.32.0: + resolution: {integrity: sha512-U+QsBp2m/s2wqpUYT/6wnlagdZbtZdndSmut/NJqlCcMLTWp5muCrID+K5UJ6jqD2BFshejCYXniPDbNh73V8w==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [darwin] + + lightningcss-freebsd-x64@1.32.0: + resolution: {integrity: sha512-JCTigedEksZk3tHTTthnMdVfGf61Fky8Ji2E4YjUTEQX14xiy/lTzXnu1vwiZe3bYe0q+SpsSH/CTeDXK6WHig==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [freebsd] + + lightningcss-linux-arm-gnueabihf@1.32.0: + resolution: {integrity: sha512-x6rnnpRa2GL0zQOkt6rts3YDPzduLpWvwAF6EMhXFVZXD4tPrBkEFqzGowzCsIWsPjqSK+tyNEODUBXeeVHSkw==} + engines: {node: '>= 12.0.0'} + cpu: [arm] + os: [linux] + + lightningcss-linux-arm64-gnu@1.32.0: + resolution: {integrity: sha512-0nnMyoyOLRJXfbMOilaSRcLH3Jw5z9HDNGfT/gwCPgaDjnx0i8w7vBzFLFR1f6CMLKF8gVbebmkUN3fa/kQJpQ==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [linux] + libc: [glibc] + + lightningcss-linux-arm64-musl@1.32.0: + resolution: {integrity: sha512-UpQkoenr4UJEzgVIYpI80lDFvRmPVg6oqboNHfoH4CQIfNA+HOrZ7Mo7KZP02dC6LjghPQJeBsvXhJod/wnIBg==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [linux] + libc: [musl] + + lightningcss-linux-x64-gnu@1.32.0: + resolution: {integrity: sha512-V7Qr52IhZmdKPVr+Vtw8o+WLsQJYCTd8loIfpDaMRWGUZfBOYEJeyJIkqGIDMZPwPx24pUMfwSxxI8phr/MbOA==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [linux] + libc: [glibc] + + lightningcss-linux-x64-musl@1.32.0: + resolution: {integrity: sha512-bYcLp+Vb0awsiXg/80uCRezCYHNg1/l3mt0gzHnWV9XP1W5sKa5/TCdGWaR/zBM2PeF/HbsQv/j2URNOiVuxWg==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [linux] + libc: [musl] + + lightningcss-win32-arm64-msvc@1.32.0: + resolution: {integrity: sha512-8SbC8BR40pS6baCM8sbtYDSwEVQd4JlFTOlaD3gWGHfThTcABnNDBda6eTZeqbofalIJhFx0qKzgHJmcPTnGdw==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [win32] + + lightningcss-win32-x64-msvc@1.32.0: + resolution: {integrity: sha512-Amq9B/SoZYdDi1kFrojnoqPLxYhQ4Wo5XiL8EVJrVsB8ARoC1PWW6VGtT0WKCemjy8aC+louJnjS7U18x3b06Q==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [win32] + + lightningcss@1.32.0: + resolution: {integrity: sha512-NXYBzinNrblfraPGyrbPoD19C1h9lfI/1mzgWYvXUTe414Gz/X1FD2XBZSZM7rRTrMA8JL3OtAaGifrIKhQ5yQ==} + engines: {node: '>= 12.0.0'} + + lodash-es@4.18.1: + resolution: {integrity: sha512-J8xewKD/Gk22OZbhpOVSwcs60zhd95ESDwezOFuA3/099925PdHJ7OFHNTGtajL3AlZkykD32HykiMo+BIBI8A==} + + longest-streak@3.1.0: + resolution: {integrity: sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==} + + lucide-react@1.14.0: + resolution: {integrity: sha512-+1mdWcfSJVUsaTIjN9zoezmUhfXo5l0vP7ekBMPo3jcS/aIkxHnXqAPsByszMZx/Y8oQBRJxJx5xg+RH3urzxA==} + peerDependencies: + react: ^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0 + + magic-string@0.30.21: + resolution: {integrity: sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==} + + markdown-extensions@2.0.0: + resolution: {integrity: sha512-o5vL7aDWatOTX8LzaS1WMoaoxIiLRQJuIKKe2wAw6IeULDHaqbiqiggmx+pKvZDb1Sj+pE46Sn1T7lCqfFtg1Q==} + engines: {node: '>=16'} + + markdown-table@3.0.4: + resolution: {integrity: sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw==} + + marked@16.4.2: + resolution: {integrity: sha512-TI3V8YYWvkVf3KJe1dRkpnjs68JUPyEa5vjKrp1XEEJUAOaQc+Qj+L1qWbPd0SJuAdQkFU0h73sXXqwDYxsiDA==} + engines: {node: '>= 20'} + hasBin: true + + mdast-util-find-and-replace@3.0.2: + resolution: {integrity: sha512-Tmd1Vg/m3Xz43afeNxDIhWRtFZgM2VLyaf4vSTYwudTyeuTneoL3qtWMA5jeLyz/O1vDJmmV4QuScFCA2tBPwg==} + + mdast-util-from-markdown@2.0.3: + resolution: {integrity: sha512-W4mAWTvSlKvf8L6J+VN9yLSqQ9AOAAvHuoDAmPkz4dHf553m5gVj2ejadHJhoJmcmxEnOv6Pa8XJhpxE93kb8Q==} + + mdast-util-gfm-autolink-literal@2.0.1: + resolution: {integrity: sha512-5HVP2MKaP6L+G6YaxPNjuL0BPrq9orG3TsrZ9YXbA3vDw/ACI4MEsnoDpn6ZNm7GnZgtAcONJyPhOP8tNJQavQ==} + + mdast-util-gfm-footnote@2.1.0: + resolution: {integrity: sha512-sqpDWlsHn7Ac9GNZQMeUzPQSMzR6Wv0WKRNvQRg0KqHh02fpTz69Qc1QSseNX29bhz1ROIyNyxExfawVKTm1GQ==} + + mdast-util-gfm-strikethrough@2.0.0: + resolution: {integrity: sha512-mKKb915TF+OC5ptj5bJ7WFRPdYtuHv0yTRxK2tJvi+BDqbkiG7h7u/9SI89nRAYcmap2xHQL9D+QG/6wSrTtXg==} + + mdast-util-gfm-table@2.0.0: + resolution: {integrity: sha512-78UEvebzz/rJIxLvE7ZtDd/vIQ0RHv+3Mh5DR96p7cS7HsBhYIICDBCu8csTNWNO6tBWfqXPWekRuj2FNOGOZg==} + + mdast-util-gfm-task-list-item@2.0.0: + resolution: {integrity: sha512-IrtvNvjxC1o06taBAVJznEnkiHxLFTzgonUdy8hzFVeDun0uTjxxrRGVaNFqkU1wJR3RBPEfsxmU6jDWPofrTQ==} + + mdast-util-gfm@3.1.0: + resolution: {integrity: sha512-0ulfdQOM3ysHhCJ1p06l0b0VKlhU0wuQs3thxZQagjcjPrlFRqY215uZGHHJan9GEAXd9MbfPjFJz+qMkVR6zQ==} + + mdast-util-mdx-expression@2.0.1: + resolution: {integrity: sha512-J6f+9hUp+ldTZqKRSg7Vw5V6MqjATc+3E4gf3CFNcuZNWD8XdyI6zQ8GqH7f8169MM6P7hMBRDVGnn7oHB9kXQ==} + + mdast-util-mdx-jsx@3.2.0: + resolution: {integrity: sha512-lj/z8v0r6ZtsN/cGNNtemmmfoLAFZnjMbNyLzBafjzikOM+glrjNHPlf6lQDOTccj9n5b0PPihEBbhneMyGs1Q==} + + mdast-util-mdx@3.0.0: + resolution: {integrity: sha512-JfbYLAW7XnYTTbUsmpu0kdBUVe+yKVJZBItEjwyYJiDJuZ9w4eeaqks4HQO+R7objWgS2ymV60GYpI14Ug554w==} + + mdast-util-mdxjs-esm@2.0.1: + resolution: {integrity: sha512-EcmOpxsZ96CvlP03NghtH1EsLtr0n9Tm4lPUJUBccV9RwUOneqSycg19n5HGzCf+10LozMRSObtVr3ee1WoHtg==} + + mdast-util-phrasing@4.1.0: + resolution: {integrity: sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w==} + + mdast-util-to-hast@13.2.1: + resolution: {integrity: sha512-cctsq2wp5vTsLIcaymblUriiTcZd0CwWtCbLvrOzYCDZoWyMNV8sZ7krj09FSnsiJi3WVsHLM4k6Dq/yaPyCXA==} + + mdast-util-to-markdown@2.1.2: + resolution: {integrity: sha512-xj68wMTvGXVOKonmog6LwyJKrYXZPvlwabaryTjLh9LuvovB/KAH+kvi8Gjj+7rJjsFi23nkUxRQv1KqSroMqA==} + + mdast-util-to-string@4.0.0: + resolution: {integrity: sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==} + + merge-stream@2.0.0: + resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==} + + mermaid@11.14.0: + resolution: {integrity: sha512-GSGloRsBs+JINmmhl0JDwjpuezCsHB4WGI4NASHxL3fHo3o/BRXTxhDLKnln8/Q0lRFRyDdEjmk1/d5Sn1Xz8g==} + + micromark-core-commonmark@2.0.3: + resolution: {integrity: sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg==} + + micromark-extension-gfm-autolink-literal@2.1.0: + resolution: {integrity: sha512-oOg7knzhicgQ3t4QCjCWgTmfNhvQbDDnJeVu9v81r7NltNCVmhPy1fJRX27pISafdjL+SVc4d3l48Gb6pbRypw==} + + micromark-extension-gfm-footnote@2.1.0: + resolution: {integrity: sha512-/yPhxI1ntnDNsiHtzLKYnE3vf9JZ6cAisqVDauhp4CEHxlb4uoOTxOCJ+9s51bIB8U1N1FJ1RXOKTIlD5B/gqw==} + + micromark-extension-gfm-strikethrough@2.1.0: + resolution: {integrity: sha512-ADVjpOOkjz1hhkZLlBiYA9cR2Anf8F4HqZUO6e5eDcPQd0Txw5fxLzzxnEkSkfnD0wziSGiv7sYhk/ktvbf1uw==} + + micromark-extension-gfm-table@2.1.1: + resolution: {integrity: sha512-t2OU/dXXioARrC6yWfJ4hqB7rct14e8f7m0cbI5hUmDyyIlwv5vEtooptH8INkbLzOatzKuVbQmAYcbWoyz6Dg==} + + micromark-extension-gfm-tagfilter@2.0.0: + resolution: {integrity: sha512-xHlTOmuCSotIA8TW1mDIM6X2O1SiX5P9IuDtqGonFhEK0qgRI4yeC6vMxEV2dgyr2TiD+2PQ10o+cOhdVAcwfg==} + + micromark-extension-gfm-task-list-item@2.1.0: + resolution: {integrity: sha512-qIBZhqxqI6fjLDYFTBIa4eivDMnP+OZqsNwmQ3xNLE4Cxwc+zfQEfbs6tzAo2Hjq+bh6q5F+Z8/cksrLFYWQQw==} + + micromark-extension-gfm@3.0.0: + resolution: {integrity: sha512-vsKArQsicm7t0z2GugkCKtZehqUm31oeGBV/KVSorWSy8ZlNAv7ytjFhvaryUiCUJYqs+NoE6AFhpQvBTM6Q4w==} + + micromark-extension-mdx-expression@3.0.1: + resolution: {integrity: sha512-dD/ADLJ1AeMvSAKBwO22zG22N4ybhe7kFIZ3LsDI0GlsNr2A3KYxb0LdC1u5rj4Nw+CHKY0RVdnHX8vj8ejm4Q==} + + micromark-extension-mdx-jsx@3.0.2: + resolution: {integrity: sha512-e5+q1DjMh62LZAJOnDraSSbDMvGJ8x3cbjygy2qFEi7HCeUT4BDKCvMozPozcD6WmOt6sVvYDNBKhFSz3kjOVQ==} + + micromark-extension-mdx-md@2.0.0: + resolution: {integrity: sha512-EpAiszsB3blw4Rpba7xTOUptcFeBFi+6PY8VnJ2hhimH+vCQDirWgsMpz7w1XcZE7LVrSAUGb9VJpG9ghlYvYQ==} + + micromark-extension-mdxjs-esm@3.0.0: + resolution: {integrity: sha512-DJFl4ZqkErRpq/dAPyeWp15tGrcrrJho1hKK5uBS70BCtfrIFg81sqcTVu3Ta+KD1Tk5vAtBNElWxtAa+m8K9A==} + + micromark-extension-mdxjs@3.0.0: + resolution: {integrity: sha512-A873fJfhnJ2siZyUrJ31l34Uqwy4xIFmvPY1oj+Ean5PHcPBYzEsvqvWGaWcfEIr11O5Dlw3p2y0tZWpKHDejQ==} + + micromark-factory-destination@2.0.1: + resolution: {integrity: sha512-Xe6rDdJlkmbFRExpTOmRj9N3MaWmbAgdpSrBQvCFqhezUn4AHqJHbaEnfbVYYiexVSs//tqOdY/DxhjdCiJnIA==} + + micromark-factory-label@2.0.1: + resolution: {integrity: sha512-VFMekyQExqIW7xIChcXn4ok29YE3rnuyveW3wZQWWqF4Nv9Wk5rgJ99KzPvHjkmPXF93FXIbBp6YdW3t71/7Vg==} + + micromark-factory-mdx-expression@2.0.3: + resolution: {integrity: sha512-kQnEtA3vzucU2BkrIa8/VaSAsP+EJ3CKOvhMuJgOEGg9KDC6OAY6nSnNDVRiVNRqj7Y4SlSzcStaH/5jge8JdQ==} + + micromark-factory-space@2.0.1: + resolution: {integrity: sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==} + + micromark-factory-title@2.0.1: + resolution: {integrity: sha512-5bZ+3CjhAd9eChYTHsjy6TGxpOFSKgKKJPJxr293jTbfry2KDoWkhBb6TcPVB4NmzaPhMs1Frm9AZH7OD4Cjzw==} + + micromark-factory-whitespace@2.0.1: + resolution: {integrity: sha512-Ob0nuZ3PKt/n0hORHyvoD9uZhr+Za8sFoP+OnMcnWK5lngSzALgQYKMr9RJVOWLqQYuyn6ulqGWSXdwf6F80lQ==} + + micromark-util-character@2.1.1: + resolution: {integrity: sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==} + + micromark-util-chunked@2.0.1: + resolution: {integrity: sha512-QUNFEOPELfmvv+4xiNg2sRYeS/P84pTW0TCgP5zc9FpXetHY0ab7SxKyAQCNCc1eK0459uoLI1y5oO5Vc1dbhA==} + + micromark-util-classify-character@2.0.1: + resolution: {integrity: sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q==} + + micromark-util-combine-extensions@2.0.1: + resolution: {integrity: sha512-OnAnH8Ujmy59JcyZw8JSbK9cGpdVY44NKgSM7E9Eh7DiLS2E9RNQf0dONaGDzEG9yjEl5hcqeIsj4hfRkLH/Bg==} + + micromark-util-decode-numeric-character-reference@2.0.2: + resolution: {integrity: sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw==} + + micromark-util-decode-string@2.0.1: + resolution: {integrity: sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ==} + + micromark-util-encode@2.0.1: + resolution: {integrity: sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==} + + micromark-util-events-to-acorn@2.0.3: + resolution: {integrity: sha512-jmsiEIiZ1n7X1Rr5k8wVExBQCg5jy4UXVADItHmNk1zkwEVhBuIUKRu3fqv+hs4nxLISi2DQGlqIOGiFxgbfHg==} + + micromark-util-html-tag-name@2.0.1: + resolution: {integrity: sha512-2cNEiYDhCWKI+Gs9T0Tiysk136SnR13hhO8yW6BGNyhOC4qYFnwF1nKfD3HFAIXA5c45RrIG1ub11GiXeYd1xA==} + + micromark-util-normalize-identifier@2.0.1: + resolution: {integrity: sha512-sxPqmo70LyARJs0w2UclACPUUEqltCkJ6PhKdMIDuJ3gSf/Q+/GIe3WKl0Ijb/GyH9lOpUkRAO2wp0GVkLvS9Q==} + + micromark-util-resolve-all@2.0.1: + resolution: {integrity: sha512-VdQyxFWFT2/FGJgwQnJYbe1jjQoNTS4RjglmSjTUlpUMa95Htx9NHeYW4rGDJzbjvCsl9eLjMQwGeElsqmzcHg==} + + micromark-util-sanitize-uri@2.0.1: + resolution: {integrity: sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==} + + micromark-util-subtokenize@2.1.0: + resolution: {integrity: sha512-XQLu552iSctvnEcgXw6+Sx75GflAPNED1qx7eBJ+wydBb2KCbRZe+NwvIEEMM83uml1+2WSXpBAcp9IUCgCYWA==} + + micromark-util-symbol@2.0.1: + resolution: {integrity: sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==} + + micromark-util-types@2.0.2: + resolution: {integrity: sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==} + + micromark@4.0.2: + resolution: {integrity: sha512-zpe98Q6kvavpCr1NPVSCMebCKfD7CA2NqZ+rykeNhONIJBpc1tFKt9hucLGwha3jNTNI8lHpctWJWoimVF4PfA==} + + mime-db@1.33.0: + resolution: {integrity: sha512-BHJ/EKruNIqJf/QahvxwQZXKygOQ256myeN/Ew+THcAa5q+PjyTTMMeNQC4DZw5AwfvelsUrA6B67NKMqXDbzQ==} + engines: {node: '>= 0.6'} + + mime-db@1.54.0: + resolution: {integrity: sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==} + engines: {node: '>= 0.6'} + + mime-types@2.1.18: + resolution: {integrity: sha512-lc/aahn+t4/SWV/qcmumYjymLsWfN3ELhpmVuUFjgsORruuZPVSwAQryq+HHGvO/SI2KVX26bx+En+zhM8g8hQ==} + engines: {node: '>= 0.6'} + + mimic-fn@2.1.0: + resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} + engines: {node: '>=6'} + + minimatch@3.1.5: + resolution: {integrity: sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==} + + minimist@1.2.8: + resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} + + motion-dom@12.38.0: + resolution: {integrity: sha512-pdkHLD8QYRp8VfiNLb8xIBJis1byQ9gPT3Jnh2jqfFtAsWUA3dEepDlsWe/xMpO8McV+VdpKVcp+E+TGJEtOoA==} + + motion-utils@12.36.0: + resolution: {integrity: sha512-eHWisygbiwVvf6PZ1vhaHCLamvkSbPIeAYxWUuL3a2PD/TROgE7FvfHWTIH4vMl798QLfMw15nRqIaRDXTlYRg==} + + motion@12.38.0: + resolution: {integrity: sha512-uYfXzeHlgThchzwz5Te47dlv5JOUC7OB4rjJ/7XTUgtBZD8CchMN8qEJ4ZVsUmTyYA44zjV0fBwsiktRuFnn+w==} + peerDependencies: + '@emotion/is-prop-valid': '*' + react: ^18.0.0 || ^19.0.0 + react-dom: ^18.0.0 || ^19.0.0 + peerDependenciesMeta: + '@emotion/is-prop-valid': + optional: true + react: + optional: true + react-dom: + optional: true + + ms@2.0.0: + resolution: {integrity: sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==} + + ms@2.1.3: + resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + + nanoid@3.3.12: + resolution: {integrity: sha512-ZB9RH/39qpq5Vu6Y+NmUaFhQR6pp+M2Xt76XBnEwDaGcVAqhlvxrl3B2bKS5D3NH3QR76v3aSrKaF/Kiy7lEtQ==} + engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} + hasBin: true + + negotiator@0.6.4: + resolution: {integrity: sha512-myRT3DiWPHqho5PrJaIRyaMv2kgYf0mUVgBNOYMuCH5Ki1yEiQaf/ZJuQ62nvpc44wL5WDbTX7yGJi1Neevw8w==} + engines: {node: '>= 0.6'} + + next-themes@0.4.6: + resolution: {integrity: sha512-pZvgD5L0IEvX5/9GWyHMf3m8BKiVQwsCMHfoFosXtXBMnaS0ZnIJ9ST4b4NqLVKDEm8QBxoNNGNaBv2JNF6XNA==} + peerDependencies: + react: ^16.8 || ^17 || ^18 || ^19 || ^19.0.0-rc + react-dom: ^16.8 || ^17 || ^18 || ^19 || ^19.0.0-rc + + next@16.2.4: + resolution: {integrity: sha512-kPvz56wF5frc+FxlHI5qnklCzbq53HTwORaWBGdT0vNoKh1Aya9XC8aPauH4NJxqtzbWsS5mAbctm4cr+EkQ2Q==} + engines: {node: '>=20.9.0'} + hasBin: true + peerDependencies: + '@opentelemetry/api': ^1.1.0 + '@playwright/test': ^1.51.1 + babel-plugin-react-compiler: '*' + react: ^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0 + react-dom: ^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0 + sass: ^1.3.0 + peerDependenciesMeta: + '@opentelemetry/api': + optional: true + '@playwright/test': + optional: true + babel-plugin-react-compiler: + optional: true + sass: + optional: true + + npm-run-path@4.0.1: + resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==} + engines: {node: '>=8'} + + on-headers@1.1.0: + resolution: {integrity: sha512-737ZY3yNnXy37FHkQxPzt4UZ2UWPWiCZWLvFZ4fu5cueciegX0zGPnrlY6bwRg4FdQOe9YU8MkmJwGhoMybl8A==} + engines: {node: '>= 0.8'} + + onetime@5.1.2: + resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} + engines: {node: '>=6'} + + oniguruma-parser@0.12.2: + resolution: {integrity: sha512-6HVa5oIrgMC6aA6WF6XyyqbhRPJrKR02L20+2+zpDtO5QAzGHAUGw5TKQvwi5vctNnRHkJYmjAhRVQF2EKdTQw==} + + oniguruma-to-es@4.3.6: + resolution: {integrity: sha512-csuQ9x3Yr0cEIs/Zgx/OEt9iBw9vqIunAPQkx19R/fiMq2oGVTgcMqO/V3Ybqefr1TBvosI6jU539ksaBULJyA==} + + package-manager-detector@1.6.0: + resolution: {integrity: sha512-61A5ThoTiDG/C8s8UMZwSorAGwMJ0ERVGj2OjoW5pAalsNOg15+iQiPzrLJ4jhZ1HJzmC2PIHT2oEiH3R5fzNA==} + + parse-entities@4.0.2: + resolution: {integrity: sha512-GG2AQYWoLgL877gQIKeRPGO1xF9+eG1ujIb5soS5gPvLQ1y2o8FL90w2QWNdf9I361Mpp7726c+lj3U0qK1uGw==} + + parse5@7.3.0: + resolution: {integrity: sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw==} + + path-data-parser@0.1.0: + resolution: {integrity: sha512-NOnmBpt5Y2RWbuv0LMzsayp3lVylAHLPUTut412ZA3l+C4uw4ZVkQbjShYCQ8TCpUMdPapr4YjUqLYD6v68j+w==} + + path-is-inside@1.0.2: + resolution: {integrity: sha512-DUWJr3+ULp4zXmol/SZkFf3JGsS9/SIv+Y3Rt93/UjPpDpklB5f1er4O3POIbUuUJ3FXgqte2Q7SrU6zAqwk8w==} + + path-key@3.1.1: + resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} + engines: {node: '>=8'} + + path-to-regexp@3.3.0: + resolution: {integrity: sha512-qyCH421YQPS2WFDxDjftfc1ZR5WKQzVzqsp4n9M2kQhVOo/ByahFoUNJfl58kOcEGfQ//7weFTDhm+ss8Ecxgw==} + + picocolors@1.1.1: + resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} + + picomatch@4.0.4: + resolution: {integrity: sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==} + engines: {node: '>=12'} + + points-on-curve@0.2.0: + resolution: {integrity: sha512-0mYKnYYe9ZcqMCWhUjItv/oHjvgEsfKvnUTg8sAtnHr3GVy7rGkXCb6d5cSyqrWqL4k81b9CPg3urd+T7aop3A==} + + points-on-path@0.2.1: + resolution: {integrity: sha512-25ClnWWuw7JbWZcgqY/gJ4FQWadKxGWk+3kR/7kD0tCaDtPPMj7oHu2ToLaVhfpnHrZzYby2w6tUA0eOIuUg8g==} + + postcss@8.4.31: + resolution: {integrity: sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==} + engines: {node: ^10 || ^12 || >=14} + + postcss@8.5.14: + resolution: {integrity: sha512-SoSL4+OSEtR99LHFZQiJLkT59C5B1amGO1NzTwj7TT1qCUgUO6hxOvzkOYxD+vMrXBM3XJIKzokoERdqQq/Zmg==} + engines: {node: ^10 || ^12 || >=14} + + property-information@7.1.0: + resolution: {integrity: sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==} + + range-parser@1.2.0: + resolution: {integrity: sha512-kA5WQoNVo4t9lNx2kQNFCxKeBl5IbbSNBl1M/tLkw9WCn+hxNBAW5Qh8gdhs63CJnhjJ2zQWFoqPJP2sK1AV5A==} + engines: {node: '>= 0.6'} + + rc@1.2.8: + resolution: {integrity: sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==} + hasBin: true + + react-dom@19.2.6: + resolution: {integrity: sha512-0prMI+hvBbPjsWnxDLxlCGyM8PN6UuWjEUCYmZhO67xIV9Xasa/r/vDnq+Xyq4Lo27g8QSbO5YzARu0D1Sps3g==} + peerDependencies: + react: ^19.2.6 + + react-remove-scroll-bar@2.3.8: + resolution: {integrity: sha512-9r+yi9+mgU33AKcj6IbT9oRCO78WriSj6t/cF8DWBZJ9aOGPOTEDvdUDz1FwKim7QXWwmHqtdHnRJfhAxEG46Q==} + engines: {node: '>=10'} + peerDependencies: + '@types/react': '*' + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + peerDependenciesMeta: + '@types/react': + optional: true + + react-remove-scroll@2.7.2: + resolution: {integrity: sha512-Iqb9NjCCTt6Hf+vOdNIZGdTiH1QSqr27H/Ek9sv/a97gfueI/5h1s3yRi1nngzMUaOOToin5dI1dXKdXiF+u0Q==} + engines: {node: '>=10'} + peerDependencies: + '@types/react': '*' + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + react-style-singleton@2.2.3: + resolution: {integrity: sha512-b6jSvxvVnyptAiLjbkWLE/lOnR4lfTtDAl+eUC7RZy+QQWc6wRzIV2CE6xBuMmDxc2qIihtDCZD5NPOFl7fRBQ==} + engines: {node: '>=10'} + peerDependencies: + '@types/react': '*' + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + react@19.2.6: + resolution: {integrity: sha512-sfWGGfavi0xr8Pg0sVsyHMAOziVYKgPLNrS7ig+ivMNb3wbCBw3KxtflsGBAwD3gYQlE/AEZsTLgToRrSCjb0Q==} + engines: {node: '>=0.10.0'} + + readdirp@5.0.0: + resolution: {integrity: sha512-9u/XQ1pvrQtYyMpZe7DXKv2p5CNvyVwzUB6uhLAnQwHMSgKMBR62lc7AHljaeteeHXn11XTAaLLUVZYVZyuRBQ==} + engines: {node: '>= 20.19.0'} + + recma-build-jsx@1.0.0: + resolution: {integrity: sha512-8GtdyqaBcDfva+GUKDr3nev3VpKAhup1+RvkMvUxURHpW7QyIvk9F5wz7Vzo06CEMSilw6uArgRqhpiUcWp8ew==} + + recma-jsx@1.0.1: + resolution: {integrity: sha512-huSIy7VU2Z5OLv6oFLosQGGDqPqdO1iq6bWNAdhzMxSJP7RAso4fCZ1cKu8j9YHCZf3TPrq4dw3okhrylgcd7w==} + peerDependencies: + acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 + + recma-parse@1.0.0: + resolution: {integrity: sha512-OYLsIGBB5Y5wjnSnQW6t3Xg7q3fQ7FWbw/vcXtORTnyaSFscOtABg+7Pnz6YZ6c27fG1/aN8CjfwoUEUIdwqWQ==} + + recma-stringify@1.0.0: + resolution: {integrity: sha512-cjwII1MdIIVloKvC9ErQ+OgAtwHBmcZ0Bg4ciz78FtbT8In39aAYbaA7zvxQ61xVMSPE8WxhLwLbhif4Js2C+g==} + + regex-recursion@6.0.2: + resolution: {integrity: sha512-0YCaSCq2VRIebiaUviZNs0cBz1kg5kVS2UKUfNIx8YVs1cN3AV7NTctO5FOKBA+UT2BPJIWZauYHPqJODG50cg==} + + regex-utilities@2.3.0: + resolution: {integrity: sha512-8VhliFJAWRaUiVvREIiW2NXXTmHs4vMNnSzuJVhscgmGav3g9VDxLrQndI3dZZVVdp0ZO/5v0xmX516/7M9cng==} + + regex@6.1.0: + resolution: {integrity: sha512-6VwtthbV4o/7+OaAF9I5L5V3llLEsoPyq9P1JVXkedTP33c7MfCG0/5NOPcSJn0TzXcG9YUrR0gQSWioew3LDg==} + + registry-auth-token@3.3.2: + resolution: {integrity: sha512-JL39c60XlzCVgNrO+qq68FoNb56w/m7JYvGR2jT5iR1xBrUA3Mfx5Twk5rqTThPmQKMWydGmq8oFtDlxfrmxnQ==} + + registry-url@3.1.0: + resolution: {integrity: sha512-ZbgR5aZEdf4UKZVBPYIgaglBmSF2Hi94s2PcIHhRGFjKYu+chjJdYfHn4rt3hB6eCKLJ8giVIIfgMa1ehDfZKA==} + engines: {node: '>=0.10.0'} + + rehype-raw@7.0.0: + resolution: {integrity: sha512-/aE8hCfKlQeA8LmyeyQvQF3eBiLRGNlfBJEvWH7ivp9sBqs7TNqBL5X3v157rM4IFETqDnIOO+z5M/biZbo9Ww==} + + rehype-recma@1.0.0: + resolution: {integrity: sha512-lqA4rGUf1JmacCNWWZx0Wv1dHqMwxzsDWYMTowuplHF3xH0N/MmrZ/G3BDZnzAkRmxDadujCjaKM2hqYdCBOGw==} + + remark-gfm@4.0.1: + resolution: {integrity: sha512-1quofZ2RQ9EWdeN34S79+KExV1764+wCUGop5CPL1WGdD0ocPpu91lzPGbwWMECpEpd42kJGQwzRfyov9j4yNg==} + + remark-mdx@3.1.1: + resolution: {integrity: sha512-Pjj2IYlUY3+D8x00UJsIOg5BEvfMyeI+2uLPn9VO9Wg4MEtN/VTIq2NEJQfde9PnX15KgtHyl9S0BcTnWrIuWg==} + + remark-parse@11.0.0: + resolution: {integrity: sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA==} + + remark-rehype@11.1.2: + resolution: {integrity: sha512-Dh7l57ianaEoIpzbp0PC9UKAdCSVklD8E5Rpw7ETfbTl3FqcOOgq5q2LVDhgGCkaBv7p24JXikPdvhhmHvKMsw==} + + remark-stringify@11.0.0: + resolution: {integrity: sha512-1OSmLd3awB/t8qdoEOMazZkNsfVTeY4fTsgzcQFdXNq8ToTN4ZGwrMnlda4K6smTFKD+GRV6O48i6Z4iKgPPpw==} + + remark@15.0.1: + resolution: {integrity: sha512-Eht5w30ruCXgFmxVUSlNWQ9iiimq07URKeFS3hNc8cUWy1llX4KDWfyEDZRycMc+znsN9Ux5/tJ/BFdgdOwA3A==} + + require-from-string@2.0.2: + resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==} + engines: {node: '>=0.10.0'} + + robust-predicates@3.0.3: + resolution: {integrity: sha512-NS3levdsRIUOmiJ8FZWCP7LG3QpJyrs/TE0Zpf1yvZu8cAJJ6QMW92H1c7kWpdIHo8RvmLxN/o2JXTKHp74lUA==} + + roughjs@4.6.6: + resolution: {integrity: sha512-ZUz/69+SYpFN/g/lUlo2FXcIjRkSu3nDarreVdGGndHEBJ6cXPdKguS8JGxwj5HA5xIbVKSmLgr5b3AWxtRfvQ==} + + rw@1.3.3: + resolution: {integrity: sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ==} + + safe-buffer@5.2.1: + resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} + + safer-buffer@2.1.2: + resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} + + scheduler@0.27.0: + resolution: {integrity: sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q==} + + scroll-into-view-if-needed@3.1.0: + resolution: {integrity: sha512-49oNpRjWRvnU8NyGVmUaYG4jtTkNonFZI86MmGRDqBphEK2EXT9gdEUoQPZhuBM8yWHxCWbobltqYO5M4XrUvQ==} + + semver@7.8.0: + resolution: {integrity: sha512-AcM7dV/5ul4EekoQ29Agm5vri8JNqRyj39o0qpX6vDF2GZrtutZl5RwgD1XnZjiTAfncsJhMI48QQH3sN87YNA==} + engines: {node: '>=10'} + hasBin: true + + serve-handler@6.1.7: + resolution: {integrity: sha512-CinAq1xWb0vR3twAv9evEU8cNWkXCb9kd5ePAHUKJBkOsUpR1wt/CvGdeca7vqumL1U5cSaeVQ6zZMxiJ3yWsg==} + + serve@14.2.6: + resolution: {integrity: sha512-QEjUSA+sD4Rotm1znR8s50YqA3kYpRGPmtd5GlFxbaL9n/FdUNbqMhxClqdditSk0LlZyA/dhud6XNRTOC9x2Q==} + engines: {node: '>= 14'} + hasBin: true + + sharp@0.34.5: + resolution: {integrity: sha512-Ou9I5Ft9WNcCbXrU9cMgPBcCK8LiwLqcbywW3t4oDV37n1pzpuNLsYiAV8eODnjbtQlSDwZ2cUEeQz4E54Hltg==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + + shebang-command@2.0.0: + resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} + engines: {node: '>=8'} + + shebang-regex@3.0.0: + resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} + engines: {node: '>=8'} + + shiki@4.0.2: + resolution: {integrity: sha512-eAVKTMedR5ckPo4xne/PjYQYrU3qx78gtJZ+sHlXEg5IHhhoQhMfZVzetTYuaJS0L2Ef3AcCRzCHV8T0WI6nIQ==} + engines: {node: '>=20'} + + signal-exit@3.0.7: + resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} + + source-map-js@1.2.1: + resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==} + engines: {node: '>=0.10.0'} + + source-map@0.7.6: + resolution: {integrity: sha512-i5uvt8C3ikiWeNZSVZNWcfZPItFQOsYTUAOkcUPGd8DqDy1uOUikjt5dG+uRlwyvR108Fb9DOd4GvXfT0N2/uQ==} + engines: {node: '>= 12'} + + space-separated-tokens@2.0.2: + resolution: {integrity: sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==} + + string-width@4.2.3: + resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} + engines: {node: '>=8'} + + string-width@5.1.2: + resolution: {integrity: sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==} + engines: {node: '>=12'} + + stringify-entities@4.0.4: + resolution: {integrity: sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==} + + strip-ansi@6.0.1: + resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} + engines: {node: '>=8'} + + strip-ansi@7.2.0: + resolution: {integrity: sha512-yDPMNjp4WyfYBkHnjIRLfca1i6KMyGCtsVgoKe/z1+6vukgaENdgGBZt+ZmKPc4gavvEZ5OgHfHdrazhgNyG7w==} + engines: {node: '>=12'} + + strip-final-newline@2.0.0: + resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} + engines: {node: '>=6'} + + strip-json-comments@2.0.1: + resolution: {integrity: sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==} + engines: {node: '>=0.10.0'} + + style-to-js@1.1.21: + resolution: {integrity: sha512-RjQetxJrrUJLQPHbLku6U/ocGtzyjbJMP9lCNK7Ag0CNh690nSH8woqWH9u16nMjYBAok+i7JO1NP2pOy8IsPQ==} + + style-to-object@1.0.14: + resolution: {integrity: sha512-LIN7rULI0jBscWQYaSswptyderlarFkjQ+t79nzty8tcIAceVomEVlLzH5VP4Cmsv6MtKhs7qaAiwlcp+Mgaxw==} + + styled-jsx@5.1.6: + resolution: {integrity: sha512-qSVyDTeMotdvQYoHWLNGwRFJHC+i+ZvdBRYosOFgC+Wg1vx4frN2/RG/NA7SYqqvKNLf39P2LSRA2pu6n0XYZA==} + engines: {node: '>= 12.0.0'} + peerDependencies: + '@babel/core': '*' + babel-plugin-macros: '*' + react: '>= 16.8.0 || 17.x.x || ^18.0.0-0 || ^19.0.0-0' + peerDependenciesMeta: + '@babel/core': + optional: true + babel-plugin-macros: + optional: true + + stylis@4.4.0: + resolution: {integrity: sha512-5Z9ZpRzfuH6l/UAvCPAPUo3665Nk2wLaZU3x+TLHKVzIz33+sbJqbtrYoC3KD4/uVOr2Zp+L0LySezP9OHV9yA==} + + supports-color@7.2.0: + resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} + engines: {node: '>=8'} + + tailwind-merge@3.5.0: + resolution: {integrity: sha512-I8K9wewnVDkL1NTGoqWmVEIlUcB9gFriAEkXkfCjX5ib8ezGxtR3xD7iZIxrfArjEsH7F1CHD4RFUtxefdqV/A==} + + tailwindcss@4.3.0: + resolution: {integrity: sha512-y6nxMGB1nMW9R6k96e5gdIFzcfL/gTJRNaqGes1YvkLnPVXzWgbqFF2yLC0T8G774n24cx3Pe8XrKoniCOAH+Q==} + + tapable@2.3.3: + resolution: {integrity: sha512-uxc/zpqFg6x7C8vOE7lh6Lbda8eEL9zmVm/PLeTPBRhh1xCgdWaQ+J1CUieGpIfm2HdtsUpRv+HshiasBMcc6A==} + engines: {node: '>=6'} + + tinyexec@1.1.2: + resolution: {integrity: sha512-dAqSqE/RabpBKI8+h26GfLq6Vb3JVXs30XYQjdMjaj/c2tS8IYYMbIzP599KtRj7c57/wYApb3QjgRgXmrCukA==} + engines: {node: '>=18'} + + tinyglobby@0.2.16: + resolution: {integrity: sha512-pn99VhoACYR8nFHhxqix+uvsbXineAasWm5ojXoN8xEwK5Kd3/TrhNn1wByuD52UxWRLy8pu+kRMniEi6Eq9Zg==} + engines: {node: '>=12.0.0'} + + trim-lines@3.0.1: + resolution: {integrity: sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==} + + trough@2.2.0: + resolution: {integrity: sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw==} + + ts-dedent@2.2.0: + resolution: {integrity: sha512-q5W7tVM71e2xjHZTlgfTDoPF/SmqKG5hddq9SzR49CH2hayqRKJtQ4mtRlSxKaJlR/+9rEM+mnBHf7I2/BQcpQ==} + engines: {node: '>=6.10'} + + tslib@2.8.1: + resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==} + + type-fest@2.19.0: + resolution: {integrity: sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==} + engines: {node: '>=12.20'} + + typescript@6.0.3: + resolution: {integrity: sha512-y2TvuxSZPDyQakkFRPZHKFm+KKVqIisdg9/CZwm9ftvKXLP8NRWj38/ODjNbr43SsoXqNuAisEf1GdCxqWcdBw==} + engines: {node: '>=14.17'} + hasBin: true + + undici-types@7.19.2: + resolution: {integrity: sha512-qYVnV5OEm2AW8cJMCpdV20CDyaN3g0AjDlOGf1OW4iaDEx8MwdtChUp4zu4H0VP3nDRF/8RKWH+IPp9uW0YGZg==} + + unified@11.0.5: + resolution: {integrity: sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==} + + unist-util-is@6.0.1: + resolution: {integrity: sha512-LsiILbtBETkDz8I9p1dQ0uyRUWuaQzd/cuEeS1hoRSyW5E5XGmTzlwY1OrNzzakGowI9Dr/I8HVaw4hTtnxy8g==} + + unist-util-position-from-estree@2.0.0: + resolution: {integrity: sha512-KaFVRjoqLyF6YXCbVLNad/eS4+OfPQQn2yOd7zF/h5T/CSL2v8NpN6a5TPvtbXthAGw5nG+PuTtq+DdIZr+cRQ==} + + unist-util-position@5.0.0: + resolution: {integrity: sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==} + + unist-util-remove-position@5.0.0: + resolution: {integrity: sha512-Hp5Kh3wLxv0PHj9m2yZhhLt58KzPtEYKQQ4yxfYFEO7EvHwzyDYnduhHnY1mDxoqr7VUwVuHXk9RXKIiYS1N8Q==} + + unist-util-stringify-position@4.0.0: + resolution: {integrity: sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==} + + unist-util-visit-parents@6.0.2: + resolution: {integrity: sha512-goh1s1TBrqSqukSc8wrjwWhL0hiJxgA8m4kFxGlQ+8FYQ3C/m11FcTs4YYem7V664AhHVvgoQLk890Ssdsr2IQ==} + + unist-util-visit@5.1.0: + resolution: {integrity: sha512-m+vIdyeCOpdr/QeQCu2EzxX/ohgS8KbnPDgFni4dQsfSCtpz8UqDyY5GjRru8PDKuYn7Fq19j1CQ+nJSsGKOzg==} + + update-check@1.5.4: + resolution: {integrity: sha512-5YHsflzHP4t1G+8WGPlvKbJEbAJGCgw+Em+dGR1KmBUbr1J36SJBqlHLjR7oob7sco5hWHGQVcr9B2poIVDDTQ==} + + use-callback-ref@1.3.3: + resolution: {integrity: sha512-jQL3lRnocaFtu3V00JToYz/4QkNWswxijDaCVNZRiRTO3HQDLsdu1ZtmIUvV4yPp+rvWm5j0y0TG/S61cuijTg==} + engines: {node: '>=10'} + peerDependencies: + '@types/react': '*' + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + use-sidecar@1.1.3: + resolution: {integrity: sha512-Fedw0aZvkhynoPYlA5WXrMCAMm+nSWdZt6lzJQ7Ok8S6Q+VsHmHpRWndVRJ8Be0ZbkfPc5LRYH+5XrzXcEeLRQ==} + engines: {node: '>=10'} + peerDependencies: + '@types/react': '*' + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + + uuid@11.1.1: + resolution: {integrity: sha512-vIYxrBCC/N/K+Js3qSN88go7kIfNPssr/hHCesKCQNAjmgvYS2oqr69kIufEG+O4+PfezOH4EbIeHCfFov8ZgQ==} + hasBin: true + + vary@1.1.2: + resolution: {integrity: sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==} + engines: {node: '>= 0.8'} + + vfile-location@5.0.3: + resolution: {integrity: sha512-5yXvWDEgqeiYiBe1lbxYF7UMAIm/IcopxMHrMQDq3nvKcjPKIhZklUKL+AE7J7uApI4kwe2snsK+eI6UTj9EHg==} + + vfile-message@4.0.3: + resolution: {integrity: sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==} + + vfile@6.0.3: + resolution: {integrity: sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==} + + vscode-jsonrpc@8.2.0: + resolution: {integrity: sha512-C+r0eKJUIfiDIfwJhria30+TYWPtuHJXHtI7J0YlOmKAo7ogxP20T0zxB7HZQIFhIyvoBPwWskjxrvAtfjyZfA==} + engines: {node: '>=14.0.0'} + + vscode-languageserver-protocol@3.17.5: + resolution: {integrity: sha512-mb1bvRJN8SVznADSGWM9u/b07H7Ecg0I3OgXDuLdn307rl/J3A9YD6/eYOssqhecL27hK1IPZAsaqh00i/Jljg==} + + vscode-languageserver-textdocument@1.0.12: + resolution: {integrity: sha512-cxWNPesCnQCcMPeenjKKsOCKQZ/L6Tv19DTRIGuLWe32lyzWhihGVJ/rcckZXJxfdKCFvRLS3fpBIsV/ZGX4zA==} + + vscode-languageserver-types@3.17.5: + resolution: {integrity: sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg==} + + vscode-languageserver@9.0.1: + resolution: {integrity: sha512-woByF3PDpkHFUreUa7Hos7+pUWdeWMXRd26+ZX2A8cFx6v/JPTtd4/uN0/jB6XQHYaOlHbio03NTHCqrgG5n7g==} + hasBin: true + + vscode-uri@3.1.0: + resolution: {integrity: sha512-/BpdSx+yCQGnCvecbyXdxHDkuk55/G3xwnC0GqY4gmQ3j+A+g8kzzgB4Nk/SINjqn6+waqw3EgbVF2QKExkRxQ==} + + web-namespaces@2.0.1: + resolution: {integrity: sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ==} + + which@2.0.2: + resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} + engines: {node: '>= 8'} + hasBin: true + + widest-line@4.0.1: + resolution: {integrity: sha512-o0cyEG0e8GPzT4iGHphIOh0cJOV8fivsXxddQasHPHfoZf1ZexrfeA21w2NaEN1RHE+fXlfISmOE8R9N3u3Qig==} + engines: {node: '>=12'} + + wrap-ansi@8.1.0: + resolution: {integrity: sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==} + engines: {node: '>=12'} + + zod@4.4.3: + resolution: {integrity: sha512-ytENFjIJFl2UwYglde2jchW2Hwm4GJFLDiSXWdTrJQBIN9Fcyp7n4DhxJEiWNAJMV1/BqWfW/kkg71UDcHJyTQ==} + + zwitch@2.0.4: + resolution: {integrity: sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==} + +snapshots: + + '@alloc/quick-lru@5.2.0': {} + + '@antfu/install-pkg@1.1.0': + dependencies: + package-manager-detector: 1.6.0 + tinyexec: 1.1.2 + + '@biomejs/biome@2.4.15': + optionalDependencies: + '@biomejs/cli-darwin-arm64': 2.4.15 + '@biomejs/cli-darwin-x64': 2.4.15 + '@biomejs/cli-linux-arm64': 2.4.15 + '@biomejs/cli-linux-arm64-musl': 2.4.15 + '@biomejs/cli-linux-x64': 2.4.15 + '@biomejs/cli-linux-x64-musl': 2.4.15 + '@biomejs/cli-win32-arm64': 2.4.15 + '@biomejs/cli-win32-x64': 2.4.15 + + '@biomejs/cli-darwin-arm64@2.4.15': + optional: true + + '@biomejs/cli-darwin-x64@2.4.15': + optional: true + + '@biomejs/cli-linux-arm64-musl@2.4.15': + optional: true + + '@biomejs/cli-linux-arm64@2.4.15': + optional: true + + '@biomejs/cli-linux-x64-musl@2.4.15': + optional: true + + '@biomejs/cli-linux-x64@2.4.15': + optional: true + + '@biomejs/cli-win32-arm64@2.4.15': + optional: true + + '@biomejs/cli-win32-x64@2.4.15': + optional: true + + '@braintree/sanitize-url@7.1.2': {} + + '@chevrotain/cst-dts-gen@12.0.0': + dependencies: + '@chevrotain/gast': 12.0.0 + '@chevrotain/types': 12.0.0 + + '@chevrotain/gast@12.0.0': + dependencies: + '@chevrotain/types': 12.0.0 + + '@chevrotain/regexp-to-ast@12.0.0': {} + + '@chevrotain/types@12.0.0': {} + + '@chevrotain/utils@12.0.0': {} + + '@emnapi/runtime@1.10.0': + dependencies: + tslib: 2.8.1 + optional: true + + '@esbuild/aix-ppc64@0.28.0': + optional: true + + '@esbuild/android-arm64@0.28.0': + optional: true + + '@esbuild/android-arm@0.28.0': + optional: true + + '@esbuild/android-x64@0.28.0': + optional: true + + '@esbuild/darwin-arm64@0.28.0': + optional: true + + '@esbuild/darwin-x64@0.28.0': + optional: true + + '@esbuild/freebsd-arm64@0.28.0': + optional: true + + '@esbuild/freebsd-x64@0.28.0': + optional: true + + '@esbuild/linux-arm64@0.28.0': + optional: true + + '@esbuild/linux-arm@0.28.0': + optional: true + + '@esbuild/linux-ia32@0.28.0': + optional: true + + '@esbuild/linux-loong64@0.28.0': + optional: true + + '@esbuild/linux-mips64el@0.28.0': + optional: true + + '@esbuild/linux-ppc64@0.28.0': + optional: true + + '@esbuild/linux-riscv64@0.28.0': + optional: true + + '@esbuild/linux-s390x@0.28.0': + optional: true + + '@esbuild/linux-x64@0.28.0': + optional: true + + '@esbuild/netbsd-arm64@0.28.0': + optional: true + + '@esbuild/netbsd-x64@0.28.0': + optional: true + + '@esbuild/openbsd-arm64@0.28.0': + optional: true + + '@esbuild/openbsd-x64@0.28.0': + optional: true + + '@esbuild/openharmony-arm64@0.28.0': + optional: true + + '@esbuild/sunos-x64@0.28.0': + optional: true + + '@esbuild/win32-arm64@0.28.0': + optional: true + + '@esbuild/win32-ia32@0.28.0': + optional: true + + '@esbuild/win32-x64@0.28.0': + optional: true + + '@floating-ui/core@1.7.5': + dependencies: + '@floating-ui/utils': 0.2.11 + + '@floating-ui/dom@1.7.6': + dependencies: + '@floating-ui/core': 1.7.5 + '@floating-ui/utils': 0.2.11 + + '@floating-ui/react-dom@2.1.8(react-dom@19.2.6(react@19.2.6))(react@19.2.6)': + dependencies: + '@floating-ui/dom': 1.7.6 + react: 19.2.6 + react-dom: 19.2.6(react@19.2.6) + + '@floating-ui/utils@0.2.11': {} + + '@fumadocs/tailwind@0.0.5(@tailwindcss/oxide@4.3.0)(tailwindcss@4.3.0)': + optionalDependencies: + '@tailwindcss/oxide': 4.3.0 + tailwindcss: 4.3.0 + + '@iconify/types@2.0.0': {} + + '@iconify/utils@3.1.3': + dependencies: + '@antfu/install-pkg': 1.1.0 + '@iconify/types': 2.0.0 + import-meta-resolve: 4.2.0 + + '@img/colour@1.1.0': + optional: true + + '@img/sharp-darwin-arm64@0.34.5': + optionalDependencies: + '@img/sharp-libvips-darwin-arm64': 1.2.4 + optional: true + + '@img/sharp-darwin-x64@0.34.5': + optionalDependencies: + '@img/sharp-libvips-darwin-x64': 1.2.4 + optional: true + + '@img/sharp-libvips-darwin-arm64@1.2.4': + optional: true + + '@img/sharp-libvips-darwin-x64@1.2.4': + optional: true + + '@img/sharp-libvips-linux-arm64@1.2.4': + optional: true + + '@img/sharp-libvips-linux-arm@1.2.4': + optional: true + + '@img/sharp-libvips-linux-ppc64@1.2.4': + optional: true + + '@img/sharp-libvips-linux-riscv64@1.2.4': + optional: true + + '@img/sharp-libvips-linux-s390x@1.2.4': + optional: true + + '@img/sharp-libvips-linux-x64@1.2.4': + optional: true + + '@img/sharp-libvips-linuxmusl-arm64@1.2.4': + optional: true + + '@img/sharp-libvips-linuxmusl-x64@1.2.4': + optional: true + + '@img/sharp-linux-arm64@0.34.5': + optionalDependencies: + '@img/sharp-libvips-linux-arm64': 1.2.4 + optional: true + + '@img/sharp-linux-arm@0.34.5': + optionalDependencies: + '@img/sharp-libvips-linux-arm': 1.2.4 + optional: true + + '@img/sharp-linux-ppc64@0.34.5': + optionalDependencies: + '@img/sharp-libvips-linux-ppc64': 1.2.4 + optional: true + + '@img/sharp-linux-riscv64@0.34.5': + optionalDependencies: + '@img/sharp-libvips-linux-riscv64': 1.2.4 + optional: true + + '@img/sharp-linux-s390x@0.34.5': + optionalDependencies: + '@img/sharp-libvips-linux-s390x': 1.2.4 + optional: true + + '@img/sharp-linux-x64@0.34.5': + optionalDependencies: + '@img/sharp-libvips-linux-x64': 1.2.4 + optional: true + + '@img/sharp-linuxmusl-arm64@0.34.5': + optionalDependencies: + '@img/sharp-libvips-linuxmusl-arm64': 1.2.4 + optional: true + + '@img/sharp-linuxmusl-x64@0.34.5': + optionalDependencies: + '@img/sharp-libvips-linuxmusl-x64': 1.2.4 + optional: true + + '@img/sharp-wasm32@0.34.5': + dependencies: + '@emnapi/runtime': 1.10.0 + optional: true + + '@img/sharp-win32-arm64@0.34.5': + optional: true + + '@img/sharp-win32-ia32@0.34.5': + optional: true + + '@img/sharp-win32-x64@0.34.5': + optional: true + + '@jridgewell/gen-mapping@0.3.13': + dependencies: + '@jridgewell/sourcemap-codec': 1.5.5 + '@jridgewell/trace-mapping': 0.3.31 + + '@jridgewell/remapping@2.3.5': + dependencies: + '@jridgewell/gen-mapping': 0.3.13 + '@jridgewell/trace-mapping': 0.3.31 + + '@jridgewell/resolve-uri@3.1.2': {} + + '@jridgewell/sourcemap-codec@1.5.5': {} + + '@jridgewell/trace-mapping@0.3.31': + dependencies: + '@jridgewell/resolve-uri': 3.1.2 + '@jridgewell/sourcemap-codec': 1.5.5 + + '@mdx-js/mdx@3.1.1': + dependencies: + '@types/estree': 1.0.9 + '@types/estree-jsx': 1.0.5 + '@types/hast': 3.0.4 + '@types/mdx': 2.0.13 + acorn: 8.16.0 + collapse-white-space: 2.1.0 + devlop: 1.1.0 + estree-util-is-identifier-name: 3.0.0 + estree-util-scope: 1.0.0 + estree-walker: 3.0.3 + hast-util-to-jsx-runtime: 2.3.6 + markdown-extensions: 2.0.0 + recma-build-jsx: 1.0.0 + recma-jsx: 1.0.1(acorn@8.16.0) + recma-stringify: 1.0.0 + rehype-recma: 1.0.0 + remark-mdx: 3.1.1 + remark-parse: 11.0.0 + remark-rehype: 11.1.2 + source-map: 0.7.6 + unified: 11.0.5 + unist-util-position-from-estree: 2.0.0 + unist-util-stringify-position: 4.0.0 + unist-util-visit: 5.1.0 + vfile: 6.0.3 + transitivePeerDependencies: + - supports-color + + '@mermaid-js/parser@1.1.0': + dependencies: + langium: 4.2.3 + + '@next/env@16.2.4': {} + + '@next/swc-darwin-arm64@16.2.4': + optional: true + + '@next/swc-darwin-x64@16.2.4': + optional: true + + '@next/swc-linux-arm64-gnu@16.2.4': + optional: true + + '@next/swc-linux-arm64-musl@16.2.4': + optional: true + + '@next/swc-linux-x64-gnu@16.2.4': + optional: true + + '@next/swc-linux-x64-musl@16.2.4': + optional: true + + '@next/swc-win32-arm64-msvc@16.2.4': + optional: true + + '@next/swc-win32-x64-msvc@16.2.4': + optional: true + + '@orama/orama@3.1.18': {} + + '@radix-ui/number@1.1.1': {} + + '@radix-ui/primitive@1.1.3': {} + + '@radix-ui/react-accordion@1.2.12(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6)': + dependencies: + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-collapsible': 1.1.12(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-collection': 1.1.7(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-direction': 1.1.1(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-id': 1.1.1(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.14)(react@19.2.6) + react: 19.2.6 + react-dom: 19.2.6(react@19.2.6) + optionalDependencies: + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) + + '@radix-ui/react-arrow@1.1.7(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6)': + dependencies: + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + react: 19.2.6 + react-dom: 19.2.6(react@19.2.6) + optionalDependencies: + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) + + '@radix-ui/react-collapsible@1.1.12(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6)': + dependencies: + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-id': 1.1.1(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.14)(react@19.2.6) + react: 19.2.6 + react-dom: 19.2.6(react@19.2.6) + optionalDependencies: + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) + + '@radix-ui/react-collection@1.1.7(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6)': + dependencies: + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-slot': 1.2.3(@types/react@19.2.14)(react@19.2.6) + react: 19.2.6 + react-dom: 19.2.6(react@19.2.6) + optionalDependencies: + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) + + '@radix-ui/react-compose-refs@1.1.2(@types/react@19.2.14)(react@19.2.6)': + dependencies: + react: 19.2.6 + optionalDependencies: + '@types/react': 19.2.14 + + '@radix-ui/react-context@1.1.2(@types/react@19.2.14)(react@19.2.6)': + dependencies: + react: 19.2.6 + optionalDependencies: + '@types/react': 19.2.14 + + '@radix-ui/react-dialog@1.1.15(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6)': + dependencies: + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-focus-guards': 1.1.3(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-focus-scope': 1.1.7(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-id': 1.1.1(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-portal': 1.1.9(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-slot': 1.2.3(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.14)(react@19.2.6) + aria-hidden: 1.2.6 + react: 19.2.6 + react-dom: 19.2.6(react@19.2.6) + react-remove-scroll: 2.7.2(@types/react@19.2.14)(react@19.2.6) + optionalDependencies: + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) + + '@radix-ui/react-direction@1.1.1(@types/react@19.2.14)(react@19.2.6)': + dependencies: + react: 19.2.6 + optionalDependencies: + '@types/react': 19.2.14 + + '@radix-ui/react-dismissable-layer@1.1.11(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6)': + dependencies: + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-use-escape-keydown': 1.1.1(@types/react@19.2.14)(react@19.2.6) + react: 19.2.6 + react-dom: 19.2.6(react@19.2.6) + optionalDependencies: + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) + + '@radix-ui/react-focus-guards@1.1.3(@types/react@19.2.14)(react@19.2.6)': + dependencies: + react: 19.2.6 + optionalDependencies: + '@types/react': 19.2.14 + + '@radix-ui/react-focus-scope@1.1.7(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6)': + dependencies: + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.2.14)(react@19.2.6) + react: 19.2.6 + react-dom: 19.2.6(react@19.2.6) + optionalDependencies: + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) + + '@radix-ui/react-id@1.1.1(@types/react@19.2.14)(react@19.2.6)': + dependencies: + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.14)(react@19.2.6) + react: 19.2.6 + optionalDependencies: + '@types/react': 19.2.14 + + '@radix-ui/react-navigation-menu@1.2.14(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6)': + dependencies: + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-collection': 1.1.7(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-direction': 1.1.1(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-id': 1.1.1(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-use-previous': 1.1.1(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-visually-hidden': 1.2.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + react: 19.2.6 + react-dom: 19.2.6(react@19.2.6) + optionalDependencies: + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) + + '@radix-ui/react-popover@1.1.15(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6)': + dependencies: + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-focus-guards': 1.1.3(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-focus-scope': 1.1.7(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-id': 1.1.1(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-popper': 1.2.8(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-portal': 1.1.9(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-slot': 1.2.3(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.14)(react@19.2.6) + aria-hidden: 1.2.6 + react: 19.2.6 + react-dom: 19.2.6(react@19.2.6) + react-remove-scroll: 2.7.2(@types/react@19.2.14)(react@19.2.6) + optionalDependencies: + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) + + '@radix-ui/react-popper@1.2.8(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6)': + dependencies: + '@floating-ui/react-dom': 2.1.8(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-arrow': 1.1.7(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-use-rect': 1.1.1(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-use-size': 1.1.1(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/rect': 1.1.1 + react: 19.2.6 + react-dom: 19.2.6(react@19.2.6) + optionalDependencies: + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) + + '@radix-ui/react-portal@1.1.9(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6)': + dependencies: + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.14)(react@19.2.6) + react: 19.2.6 + react-dom: 19.2.6(react@19.2.6) + optionalDependencies: + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) + + '@radix-ui/react-presence@1.1.5(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6)': + dependencies: + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.14)(react@19.2.6) + react: 19.2.6 + react-dom: 19.2.6(react@19.2.6) + optionalDependencies: + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) + + '@radix-ui/react-primitive@2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6)': + dependencies: + '@radix-ui/react-slot': 1.2.3(@types/react@19.2.14)(react@19.2.6) + react: 19.2.6 + react-dom: 19.2.6(react@19.2.6) + optionalDependencies: + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) + + '@radix-ui/react-roving-focus@1.1.11(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6)': + dependencies: + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-collection': 1.1.7(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-direction': 1.1.1(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-id': 1.1.1(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.14)(react@19.2.6) + react: 19.2.6 + react-dom: 19.2.6(react@19.2.6) + optionalDependencies: + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) + + '@radix-ui/react-scroll-area@1.2.10(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6)': + dependencies: + '@radix-ui/number': 1.1.1 + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-direction': 1.1.1(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.14)(react@19.2.6) + react: 19.2.6 + react-dom: 19.2.6(react@19.2.6) + optionalDependencies: + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) + + '@radix-ui/react-slot@1.2.3(@types/react@19.2.14)(react@19.2.6)': + dependencies: + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.6) + react: 19.2.6 + optionalDependencies: + '@types/react': 19.2.14 + + '@radix-ui/react-slot@1.2.4(@types/react@19.2.14)(react@19.2.6)': + dependencies: + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.14)(react@19.2.6) + react: 19.2.6 + optionalDependencies: + '@types/react': 19.2.14 + + '@radix-ui/react-tabs@1.1.13(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6)': + dependencies: + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-context': 1.1.2(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-direction': 1.1.1(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-id': 1.1.1(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-roving-focus': 1.1.11(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.14)(react@19.2.6) + react: 19.2.6 + react-dom: 19.2.6(react@19.2.6) + optionalDependencies: + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) + + '@radix-ui/react-use-callback-ref@1.1.1(@types/react@19.2.14)(react@19.2.6)': + dependencies: + react: 19.2.6 + optionalDependencies: + '@types/react': 19.2.14 + + '@radix-ui/react-use-controllable-state@1.2.2(@types/react@19.2.14)(react@19.2.6)': + dependencies: + '@radix-ui/react-use-effect-event': 0.0.2(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.14)(react@19.2.6) + react: 19.2.6 + optionalDependencies: + '@types/react': 19.2.14 + + '@radix-ui/react-use-effect-event@0.0.2(@types/react@19.2.14)(react@19.2.6)': + dependencies: + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.14)(react@19.2.6) + react: 19.2.6 + optionalDependencies: + '@types/react': 19.2.14 + + '@radix-ui/react-use-escape-keydown@1.1.1(@types/react@19.2.14)(react@19.2.6)': + dependencies: + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.2.14)(react@19.2.6) + react: 19.2.6 + optionalDependencies: + '@types/react': 19.2.14 + + '@radix-ui/react-use-layout-effect@1.1.1(@types/react@19.2.14)(react@19.2.6)': + dependencies: + react: 19.2.6 + optionalDependencies: + '@types/react': 19.2.14 + + '@radix-ui/react-use-previous@1.1.1(@types/react@19.2.14)(react@19.2.6)': + dependencies: + react: 19.2.6 + optionalDependencies: + '@types/react': 19.2.14 + + '@radix-ui/react-use-rect@1.1.1(@types/react@19.2.14)(react@19.2.6)': + dependencies: + '@radix-ui/rect': 1.1.1 + react: 19.2.6 + optionalDependencies: + '@types/react': 19.2.14 + + '@radix-ui/react-use-size@1.1.1(@types/react@19.2.14)(react@19.2.6)': + dependencies: + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.14)(react@19.2.6) + react: 19.2.6 + optionalDependencies: + '@types/react': 19.2.14 + + '@radix-ui/react-visually-hidden@1.2.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6)': + dependencies: + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + react: 19.2.6 + react-dom: 19.2.6(react@19.2.6) + optionalDependencies: + '@types/react': 19.2.14 + '@types/react-dom': 19.2.3(@types/react@19.2.14) + + '@radix-ui/rect@1.1.1': {} + + '@shikijs/core@4.0.2': + dependencies: + '@shikijs/primitive': 4.0.2 + '@shikijs/types': 4.0.2 + '@shikijs/vscode-textmate': 10.0.2 + '@types/hast': 3.0.4 + hast-util-to-html: 9.0.5 + + '@shikijs/engine-javascript@4.0.2': + dependencies: + '@shikijs/types': 4.0.2 + '@shikijs/vscode-textmate': 10.0.2 + oniguruma-to-es: 4.3.6 + + '@shikijs/engine-oniguruma@4.0.2': + dependencies: + '@shikijs/types': 4.0.2 + '@shikijs/vscode-textmate': 10.0.2 + + '@shikijs/langs@4.0.2': + dependencies: + '@shikijs/types': 4.0.2 + + '@shikijs/primitive@4.0.2': + dependencies: + '@shikijs/types': 4.0.2 + '@shikijs/vscode-textmate': 10.0.2 + '@types/hast': 3.0.4 + + '@shikijs/themes@4.0.2': + dependencies: + '@shikijs/types': 4.0.2 + + '@shikijs/types@4.0.2': + dependencies: + '@shikijs/vscode-textmate': 10.0.2 + '@types/hast': 3.0.4 + + '@shikijs/vscode-textmate@10.0.2': {} + + '@standard-schema/spec@1.1.0': {} + + '@swc/helpers@0.5.15': + dependencies: + tslib: 2.8.1 + + '@tailwindcss/node@4.3.0': + dependencies: + '@jridgewell/remapping': 2.3.5 + enhanced-resolve: 5.21.2 + jiti: 2.7.0 + lightningcss: 1.32.0 + magic-string: 0.30.21 + source-map-js: 1.2.1 + tailwindcss: 4.3.0 + + '@tailwindcss/oxide-android-arm64@4.3.0': + optional: true + + '@tailwindcss/oxide-darwin-arm64@4.3.0': + optional: true + + '@tailwindcss/oxide-darwin-x64@4.3.0': + optional: true + + '@tailwindcss/oxide-freebsd-x64@4.3.0': + optional: true + + '@tailwindcss/oxide-linux-arm-gnueabihf@4.3.0': + optional: true + + '@tailwindcss/oxide-linux-arm64-gnu@4.3.0': + optional: true + + '@tailwindcss/oxide-linux-arm64-musl@4.3.0': + optional: true + + '@tailwindcss/oxide-linux-x64-gnu@4.3.0': + optional: true + + '@tailwindcss/oxide-linux-x64-musl@4.3.0': + optional: true + + '@tailwindcss/oxide-wasm32-wasi@4.3.0': + optional: true + + '@tailwindcss/oxide-win32-arm64-msvc@4.3.0': + optional: true + + '@tailwindcss/oxide-win32-x64-msvc@4.3.0': + optional: true + + '@tailwindcss/oxide@4.3.0': + optionalDependencies: + '@tailwindcss/oxide-android-arm64': 4.3.0 + '@tailwindcss/oxide-darwin-arm64': 4.3.0 + '@tailwindcss/oxide-darwin-x64': 4.3.0 + '@tailwindcss/oxide-freebsd-x64': 4.3.0 + '@tailwindcss/oxide-linux-arm-gnueabihf': 4.3.0 + '@tailwindcss/oxide-linux-arm64-gnu': 4.3.0 + '@tailwindcss/oxide-linux-arm64-musl': 4.3.0 + '@tailwindcss/oxide-linux-x64-gnu': 4.3.0 + '@tailwindcss/oxide-linux-x64-musl': 4.3.0 + '@tailwindcss/oxide-wasm32-wasi': 4.3.0 + '@tailwindcss/oxide-win32-arm64-msvc': 4.3.0 + '@tailwindcss/oxide-win32-x64-msvc': 4.3.0 + + '@tailwindcss/postcss@4.3.0': + dependencies: + '@alloc/quick-lru': 5.2.0 + '@tailwindcss/node': 4.3.0 + '@tailwindcss/oxide': 4.3.0 + postcss: 8.5.14 + tailwindcss: 4.3.0 + + '@types/d3-array@3.2.2': {} + + '@types/d3-axis@3.0.6': + dependencies: + '@types/d3-selection': 3.0.11 + + '@types/d3-brush@3.0.6': + dependencies: + '@types/d3-selection': 3.0.11 + + '@types/d3-chord@3.0.6': {} + + '@types/d3-color@3.1.3': {} + + '@types/d3-contour@3.0.6': + dependencies: + '@types/d3-array': 3.2.2 + '@types/geojson': 7946.0.16 + + '@types/d3-delaunay@6.0.4': {} + + '@types/d3-dispatch@3.0.7': {} + + '@types/d3-drag@3.0.7': + dependencies: + '@types/d3-selection': 3.0.11 + + '@types/d3-dsv@3.0.7': {} + + '@types/d3-ease@3.0.2': {} + + '@types/d3-fetch@3.0.7': + dependencies: + '@types/d3-dsv': 3.0.7 + + '@types/d3-force@3.0.10': {} + + '@types/d3-format@3.0.4': {} + + '@types/d3-geo@3.1.0': + dependencies: + '@types/geojson': 7946.0.16 + + '@types/d3-hierarchy@3.1.7': {} + + '@types/d3-interpolate@3.0.4': + dependencies: + '@types/d3-color': 3.1.3 + + '@types/d3-path@3.1.1': {} + + '@types/d3-polygon@3.0.2': {} + + '@types/d3-quadtree@3.0.6': {} + + '@types/d3-random@3.0.3': {} + + '@types/d3-scale-chromatic@3.1.0': {} + + '@types/d3-scale@4.0.9': + dependencies: + '@types/d3-time': 3.0.4 + + '@types/d3-selection@3.0.11': {} + + '@types/d3-shape@3.1.8': + dependencies: + '@types/d3-path': 3.1.1 + + '@types/d3-time-format@4.0.3': {} + + '@types/d3-time@3.0.4': {} + + '@types/d3-timer@3.0.2': {} + + '@types/d3-transition@3.0.9': + dependencies: + '@types/d3-selection': 3.0.11 + + '@types/d3-zoom@3.0.8': + dependencies: + '@types/d3-interpolate': 3.0.4 + '@types/d3-selection': 3.0.11 + + '@types/d3@7.4.3': + dependencies: + '@types/d3-array': 3.2.2 + '@types/d3-axis': 3.0.6 + '@types/d3-brush': 3.0.6 + '@types/d3-chord': 3.0.6 + '@types/d3-color': 3.1.3 + '@types/d3-contour': 3.0.6 + '@types/d3-delaunay': 6.0.4 + '@types/d3-dispatch': 3.0.7 + '@types/d3-drag': 3.0.7 + '@types/d3-dsv': 3.0.7 + '@types/d3-ease': 3.0.2 + '@types/d3-fetch': 3.0.7 + '@types/d3-force': 3.0.10 + '@types/d3-format': 3.0.4 + '@types/d3-geo': 3.1.0 + '@types/d3-hierarchy': 3.1.7 + '@types/d3-interpolate': 3.0.4 + '@types/d3-path': 3.1.1 + '@types/d3-polygon': 3.0.2 + '@types/d3-quadtree': 3.0.6 + '@types/d3-random': 3.0.3 + '@types/d3-scale': 4.0.9 + '@types/d3-scale-chromatic': 3.1.0 + '@types/d3-selection': 3.0.11 + '@types/d3-shape': 3.1.8 + '@types/d3-time': 3.0.4 + '@types/d3-time-format': 4.0.3 + '@types/d3-timer': 3.0.2 + '@types/d3-transition': 3.0.9 + '@types/d3-zoom': 3.0.8 + + '@types/debug@4.1.13': + dependencies: + '@types/ms': 2.1.0 + + '@types/estree-jsx@1.0.5': + dependencies: + '@types/estree': 1.0.9 + + '@types/estree@1.0.9': {} + + '@types/geojson@7946.0.16': {} + + '@types/hast@3.0.4': + dependencies: + '@types/unist': 3.0.3 + + '@types/mdast@4.0.4': + dependencies: + '@types/unist': 3.0.3 + + '@types/mdx@2.0.13': {} + + '@types/ms@2.1.0': {} + + '@types/node@25.6.2': + dependencies: + undici-types: 7.19.2 + + '@types/react-dom@19.2.3(@types/react@19.2.14)': + dependencies: + '@types/react': 19.2.14 + + '@types/react@19.2.14': + dependencies: + csstype: 3.2.3 + + '@types/trusted-types@2.0.7': + optional: true + + '@types/unist@2.0.11': {} + + '@types/unist@3.0.3': {} + + '@ungap/structured-clone@1.3.1': {} + + '@upsetjs/venn.js@2.0.0': + optionalDependencies: + d3-selection: 3.0.0 + d3-transition: 3.0.1(d3-selection@3.0.0) + + '@zeit/schemas@2.36.0': {} + + acorn-jsx@5.3.2(acorn@8.16.0): + dependencies: + acorn: 8.16.0 + + acorn@8.16.0: {} + + ajv@8.18.0: + dependencies: + fast-deep-equal: 3.1.3 + fast-uri: 3.1.2 + json-schema-traverse: 1.0.0 + require-from-string: 2.0.2 + + ansi-align@3.0.1: + dependencies: + string-width: 4.2.3 + + ansi-regex@5.0.1: {} + + ansi-regex@6.2.2: {} + + ansi-styles@4.3.0: + dependencies: + color-convert: 2.0.1 + + ansi-styles@6.2.3: {} + + arch@2.2.0: {} + + arg@5.0.2: {} + + argparse@2.0.1: {} + + aria-hidden@1.2.6: + dependencies: + tslib: 2.8.1 + + astring@1.9.0: {} + + bail@2.0.2: {} + + balanced-match@1.0.2: {} + + baseline-browser-mapping@2.10.29: {} + + boxen@7.0.0: + dependencies: + ansi-align: 3.0.1 + camelcase: 7.0.1 + chalk: 5.0.1 + cli-boxes: 3.0.0 + string-width: 5.1.2 + type-fest: 2.19.0 + widest-line: 4.0.1 + wrap-ansi: 8.1.0 + + brace-expansion@1.1.14: + dependencies: + balanced-match: 1.0.2 + concat-map: 0.0.1 + + bytes@3.0.0: {} + + bytes@3.1.2: {} + + camelcase@7.0.1: {} + + caniuse-lite@1.0.30001792: {} + + ccount@2.0.1: {} + + chalk-template@0.4.0: + dependencies: + chalk: 4.1.2 + + chalk@4.1.2: + dependencies: + ansi-styles: 4.3.0 + supports-color: 7.2.0 + + chalk@5.0.1: {} + + character-entities-html4@2.1.0: {} + + character-entities-legacy@3.0.0: {} + + character-entities@2.0.2: {} + + character-reference-invalid@2.0.1: {} + + chevrotain-allstar@0.4.3(chevrotain@12.0.0): + dependencies: + chevrotain: 12.0.0 + lodash-es: 4.18.1 + + chevrotain@12.0.0: + dependencies: + '@chevrotain/cst-dts-gen': 12.0.0 + '@chevrotain/gast': 12.0.0 + '@chevrotain/regexp-to-ast': 12.0.0 + '@chevrotain/types': 12.0.0 + '@chevrotain/utils': 12.0.0 + + chokidar@5.0.0: + dependencies: + readdirp: 5.0.0 + + class-variance-authority@0.7.1: + dependencies: + clsx: 2.1.1 + + cli-boxes@3.0.0: {} + + client-only@0.0.1: {} + + clipboardy@3.0.0: + dependencies: + arch: 2.2.0 + execa: 5.1.1 + is-wsl: 2.2.0 + + clsx@2.1.1: {} + + collapse-white-space@2.1.0: {} + + color-convert@2.0.1: + dependencies: + color-name: 1.1.4 + + color-name@1.1.4: {} + + comma-separated-tokens@2.0.3: {} + + commander@7.2.0: {} + + commander@8.3.0: {} + + compressible@2.0.18: + dependencies: + mime-db: 1.54.0 + + compression@1.8.1: + dependencies: + bytes: 3.1.2 + compressible: 2.0.18 + debug: 2.6.9 + negotiator: 0.6.4 + on-headers: 1.1.0 + safe-buffer: 5.2.1 + vary: 1.1.2 + transitivePeerDependencies: + - supports-color + + compute-scroll-into-view@3.1.1: {} + + concat-map@0.0.1: {} + + content-disposition@0.5.2: {} + + cose-base@1.0.3: + dependencies: + layout-base: 1.0.2 + + cose-base@2.2.0: + dependencies: + layout-base: 2.0.1 + + cross-spawn@7.0.6: + dependencies: + path-key: 3.1.1 + shebang-command: 2.0.0 + which: 2.0.2 + + csstype@3.2.3: {} + + cytoscape-cose-bilkent@4.1.0(cytoscape@3.33.3): + dependencies: + cose-base: 1.0.3 + cytoscape: 3.33.3 + + cytoscape-fcose@2.2.0(cytoscape@3.33.3): + dependencies: + cose-base: 2.2.0 + cytoscape: 3.33.3 + + cytoscape@3.33.3: {} + + d3-array@2.12.1: + dependencies: + internmap: 1.0.1 + + d3-array@3.2.4: + dependencies: + internmap: 2.0.3 + + d3-axis@3.0.0: {} + + d3-brush@3.0.0: + dependencies: + d3-dispatch: 3.0.1 + d3-drag: 3.0.0 + d3-interpolate: 3.0.1 + d3-selection: 3.0.0 + d3-transition: 3.0.1(d3-selection@3.0.0) + + d3-chord@3.0.1: + dependencies: + d3-path: 3.1.0 + + d3-color@3.1.0: {} + + d3-contour@4.0.2: + dependencies: + d3-array: 3.2.4 + + d3-delaunay@6.0.4: + dependencies: + delaunator: 5.1.0 + + d3-dispatch@3.0.1: {} + + d3-drag@3.0.0: + dependencies: + d3-dispatch: 3.0.1 + d3-selection: 3.0.0 + + d3-dsv@3.0.1: + dependencies: + commander: 7.2.0 + iconv-lite: 0.6.3 + rw: 1.3.3 + + d3-ease@3.0.1: {} + + d3-fetch@3.0.1: + dependencies: + d3-dsv: 3.0.1 + + d3-force@3.0.0: + dependencies: + d3-dispatch: 3.0.1 + d3-quadtree: 3.0.1 + d3-timer: 3.0.1 + + d3-format@3.1.2: {} + + d3-geo@3.1.1: + dependencies: + d3-array: 3.2.4 + + d3-hierarchy@3.1.2: {} + + d3-interpolate@3.0.1: + dependencies: + d3-color: 3.1.0 + + d3-path@1.0.9: {} + + d3-path@3.1.0: {} + + d3-polygon@3.0.1: {} + + d3-quadtree@3.0.1: {} + + d3-random@3.0.1: {} + + d3-sankey@0.12.3: + dependencies: + d3-array: 2.12.1 + d3-shape: 1.3.7 + + d3-scale-chromatic@3.1.0: + dependencies: + d3-color: 3.1.0 + d3-interpolate: 3.0.1 + + d3-scale@4.0.2: + dependencies: + d3-array: 3.2.4 + d3-format: 3.1.2 + d3-interpolate: 3.0.1 + d3-time: 3.1.0 + d3-time-format: 4.1.0 + + d3-selection@3.0.0: {} + + d3-shape@1.3.7: + dependencies: + d3-path: 1.0.9 + + d3-shape@3.2.0: + dependencies: + d3-path: 3.1.0 + + d3-time-format@4.1.0: + dependencies: + d3-time: 3.1.0 + + d3-time@3.1.0: + dependencies: + d3-array: 3.2.4 + + d3-timer@3.0.1: {} + + d3-transition@3.0.1(d3-selection@3.0.0): + dependencies: + d3-color: 3.1.0 + d3-dispatch: 3.0.1 + d3-ease: 3.0.1 + d3-interpolate: 3.0.1 + d3-selection: 3.0.0 + d3-timer: 3.0.1 + + d3-zoom@3.0.0: + dependencies: + d3-dispatch: 3.0.1 + d3-drag: 3.0.0 + d3-interpolate: 3.0.1 + d3-selection: 3.0.0 + d3-transition: 3.0.1(d3-selection@3.0.0) + + d3@7.9.0: + dependencies: + d3-array: 3.2.4 + d3-axis: 3.0.0 + d3-brush: 3.0.0 + d3-chord: 3.0.1 + d3-color: 3.1.0 + d3-contour: 4.0.2 + d3-delaunay: 6.0.4 + d3-dispatch: 3.0.1 + d3-drag: 3.0.0 + d3-dsv: 3.0.1 + d3-ease: 3.0.1 + d3-fetch: 3.0.1 + d3-force: 3.0.0 + d3-format: 3.1.2 + d3-geo: 3.1.1 + d3-hierarchy: 3.1.2 + d3-interpolate: 3.0.1 + d3-path: 3.1.0 + d3-polygon: 3.0.1 + d3-quadtree: 3.0.1 + d3-random: 3.0.1 + d3-scale: 4.0.2 + d3-scale-chromatic: 3.1.0 + d3-selection: 3.0.0 + d3-shape: 3.2.0 + d3-time: 3.1.0 + d3-time-format: 4.1.0 + d3-timer: 3.0.1 + d3-transition: 3.0.1(d3-selection@3.0.0) + d3-zoom: 3.0.0 + + dagre-d3-es@7.0.14: + dependencies: + d3: 7.9.0 + lodash-es: 4.18.1 + + dayjs@1.11.20: {} + + debug@2.6.9: + dependencies: + ms: 2.0.0 + + debug@4.4.3: + dependencies: + ms: 2.1.3 + + decode-named-character-reference@1.3.0: + dependencies: + character-entities: 2.0.2 + + deep-extend@0.6.0: {} + + delaunator@5.1.0: + dependencies: + robust-predicates: 3.0.3 + + dequal@2.0.3: {} + + detect-libc@2.1.2: {} + + detect-node-es@1.1.0: {} + + devlop@1.1.0: + dependencies: + dequal: 2.0.3 + + dompurify@3.4.2: + optionalDependencies: + '@types/trusted-types': 2.0.7 + + eastasianwidth@0.2.0: {} + + emoji-regex@8.0.0: {} + + emoji-regex@9.2.2: {} + + enhanced-resolve@5.21.2: + dependencies: + graceful-fs: 4.2.11 + tapable: 2.3.3 + + entities@6.0.1: {} + + esast-util-from-estree@2.0.0: + dependencies: + '@types/estree-jsx': 1.0.5 + devlop: 1.1.0 + estree-util-visit: 2.0.0 + unist-util-position-from-estree: 2.0.0 + + esast-util-from-js@2.0.1: + dependencies: + '@types/estree-jsx': 1.0.5 + acorn: 8.16.0 + esast-util-from-estree: 2.0.0 + vfile-message: 4.0.3 + + esbuild@0.28.0: + optionalDependencies: + '@esbuild/aix-ppc64': 0.28.0 + '@esbuild/android-arm': 0.28.0 + '@esbuild/android-arm64': 0.28.0 + '@esbuild/android-x64': 0.28.0 + '@esbuild/darwin-arm64': 0.28.0 + '@esbuild/darwin-x64': 0.28.0 + '@esbuild/freebsd-arm64': 0.28.0 + '@esbuild/freebsd-x64': 0.28.0 + '@esbuild/linux-arm': 0.28.0 + '@esbuild/linux-arm64': 0.28.0 + '@esbuild/linux-ia32': 0.28.0 + '@esbuild/linux-loong64': 0.28.0 + '@esbuild/linux-mips64el': 0.28.0 + '@esbuild/linux-ppc64': 0.28.0 + '@esbuild/linux-riscv64': 0.28.0 + '@esbuild/linux-s390x': 0.28.0 + '@esbuild/linux-x64': 0.28.0 + '@esbuild/netbsd-arm64': 0.28.0 + '@esbuild/netbsd-x64': 0.28.0 + '@esbuild/openbsd-arm64': 0.28.0 + '@esbuild/openbsd-x64': 0.28.0 + '@esbuild/openharmony-arm64': 0.28.0 + '@esbuild/sunos-x64': 0.28.0 + '@esbuild/win32-arm64': 0.28.0 + '@esbuild/win32-ia32': 0.28.0 + '@esbuild/win32-x64': 0.28.0 + + escape-string-regexp@5.0.0: {} + + estree-util-attach-comments@3.0.0: + dependencies: + '@types/estree': 1.0.9 + + estree-util-build-jsx@3.0.1: + dependencies: + '@types/estree-jsx': 1.0.5 + devlop: 1.1.0 + estree-util-is-identifier-name: 3.0.0 + estree-walker: 3.0.3 + + estree-util-is-identifier-name@3.0.0: {} + + estree-util-scope@1.0.0: + dependencies: + '@types/estree': 1.0.9 + devlop: 1.1.0 + + estree-util-to-js@2.0.0: + dependencies: + '@types/estree-jsx': 1.0.5 + astring: 1.9.0 + source-map: 0.7.6 + + estree-util-value-to-estree@3.5.0: + dependencies: + '@types/estree': 1.0.9 + + estree-util-visit@2.0.0: + dependencies: + '@types/estree-jsx': 1.0.5 + '@types/unist': 3.0.3 + + estree-walker@3.0.3: + dependencies: + '@types/estree': 1.0.9 + + execa@5.1.1: + dependencies: + cross-spawn: 7.0.6 + get-stream: 6.0.1 + human-signals: 2.1.0 + is-stream: 2.0.1 + merge-stream: 2.0.0 + npm-run-path: 4.0.1 + onetime: 5.1.2 + signal-exit: 3.0.7 + strip-final-newline: 2.0.0 + + extend@3.0.2: {} + + fast-deep-equal@3.1.3: {} + + fast-uri@3.1.2: {} + + fdir@6.5.0(picomatch@4.0.4): + optionalDependencies: + picomatch: 4.0.4 + + framer-motion@12.38.0(react-dom@19.2.6(react@19.2.6))(react@19.2.6): + dependencies: + motion-dom: 12.38.0 + motion-utils: 12.36.0 + tslib: 2.8.1 + optionalDependencies: + react: 19.2.6 + react-dom: 19.2.6(react@19.2.6) + + fumadocs-core@16.8.5(@mdx-js/mdx@3.1.1)(@types/estree-jsx@1.0.5)(@types/hast@3.0.4)(@types/mdast@4.0.4)(@types/react@19.2.14)(lucide-react@1.14.0(react@19.2.6))(next@16.2.4(react-dom@19.2.6(react@19.2.6))(react@19.2.6))(react-dom@19.2.6(react@19.2.6))(react@19.2.6)(zod@4.4.3): + dependencies: + '@orama/orama': 3.1.18 + estree-util-value-to-estree: 3.5.0 + github-slugger: 2.0.0 + hast-util-to-estree: 3.1.3 + hast-util-to-jsx-runtime: 2.3.6 + js-yaml: 4.1.1 + mdast-util-mdx: 3.0.0 + mdast-util-to-markdown: 2.1.2 + remark: 15.0.1 + remark-gfm: 4.0.1 + remark-rehype: 11.1.2 + scroll-into-view-if-needed: 3.1.0 + shiki: 4.0.2 + tinyglobby: 0.2.16 + unified: 11.0.5 + unist-util-visit: 5.1.0 + vfile: 6.0.3 + optionalDependencies: + '@mdx-js/mdx': 3.1.1 + '@types/estree-jsx': 1.0.5 + '@types/hast': 3.0.4 + '@types/mdast': 4.0.4 + '@types/react': 19.2.14 + lucide-react: 1.14.0(react@19.2.6) + next: 16.2.4(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + react: 19.2.6 + react-dom: 19.2.6(react@19.2.6) + zod: 4.4.3 + transitivePeerDependencies: + - supports-color + + fumadocs-mdx@14.3.2(@types/mdast@4.0.4)(@types/mdx@2.0.13)(@types/react@19.2.14)(fumadocs-core@16.8.5(@mdx-js/mdx@3.1.1)(@types/estree-jsx@1.0.5)(@types/hast@3.0.4)(@types/mdast@4.0.4)(@types/react@19.2.14)(lucide-react@1.14.0(react@19.2.6))(next@16.2.4(react-dom@19.2.6(react@19.2.6))(react@19.2.6))(react-dom@19.2.6(react@19.2.6))(react@19.2.6)(zod@4.4.3))(next@16.2.4(react-dom@19.2.6(react@19.2.6))(react@19.2.6))(react@19.2.6): + dependencies: + '@mdx-js/mdx': 3.1.1 + '@standard-schema/spec': 1.1.0 + chokidar: 5.0.0 + esbuild: 0.28.0 + estree-util-value-to-estree: 3.5.0 + fumadocs-core: 16.8.5(@mdx-js/mdx@3.1.1)(@types/estree-jsx@1.0.5)(@types/hast@3.0.4)(@types/mdast@4.0.4)(@types/react@19.2.14)(lucide-react@1.14.0(react@19.2.6))(next@16.2.4(react-dom@19.2.6(react@19.2.6))(react@19.2.6))(react-dom@19.2.6(react@19.2.6))(react@19.2.6)(zod@4.4.3) + js-yaml: 4.1.1 + mdast-util-mdx: 3.0.0 + mdast-util-to-markdown: 2.1.2 + picocolors: 1.1.1 + picomatch: 4.0.4 + tinyexec: 1.1.2 + tinyglobby: 0.2.16 + unified: 11.0.5 + unist-util-remove-position: 5.0.0 + unist-util-visit: 5.1.0 + vfile: 6.0.3 + zod: 4.4.3 + optionalDependencies: + '@types/mdast': 4.0.4 + '@types/mdx': 2.0.13 + '@types/react': 19.2.14 + next: 16.2.4(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + react: 19.2.6 + transitivePeerDependencies: + - supports-color + + fumadocs-ui@16.8.5(@tailwindcss/oxide@4.3.0)(@types/mdx@2.0.13)(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(fumadocs-core@16.8.5(@mdx-js/mdx@3.1.1)(@types/estree-jsx@1.0.5)(@types/hast@3.0.4)(@types/mdast@4.0.4)(@types/react@19.2.14)(lucide-react@1.14.0(react@19.2.6))(next@16.2.4(react-dom@19.2.6(react@19.2.6))(react@19.2.6))(react-dom@19.2.6(react@19.2.6))(react@19.2.6)(zod@4.4.3))(next@16.2.4(react-dom@19.2.6(react@19.2.6))(react@19.2.6))(react-dom@19.2.6(react@19.2.6))(react@19.2.6)(tailwindcss@4.3.0): + dependencies: + '@fumadocs/tailwind': 0.0.5(@tailwindcss/oxide@4.3.0)(tailwindcss@4.3.0) + '@radix-ui/react-accordion': 1.2.12(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-collapsible': 1.1.12(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-dialog': 1.1.15(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-direction': 1.1.1(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-navigation-menu': 1.2.14(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-popover': 1.1.15(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-scroll-area': 1.2.10(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + '@radix-ui/react-slot': 1.2.4(@types/react@19.2.14)(react@19.2.6) + '@radix-ui/react-tabs': 1.1.13(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + class-variance-authority: 0.7.1 + fumadocs-core: 16.8.5(@mdx-js/mdx@3.1.1)(@types/estree-jsx@1.0.5)(@types/hast@3.0.4)(@types/mdast@4.0.4)(@types/react@19.2.14)(lucide-react@1.14.0(react@19.2.6))(next@16.2.4(react-dom@19.2.6(react@19.2.6))(react@19.2.6))(react-dom@19.2.6(react@19.2.6))(react@19.2.6)(zod@4.4.3) + lucide-react: 1.14.0(react@19.2.6) + motion: 12.38.0(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + next-themes: 0.4.6(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + react: 19.2.6 + react-dom: 19.2.6(react@19.2.6) + react-remove-scroll: 2.7.2(@types/react@19.2.14)(react@19.2.6) + rehype-raw: 7.0.0 + scroll-into-view-if-needed: 3.1.0 + shiki: 4.0.2 + tailwind-merge: 3.5.0 + unist-util-visit: 5.1.0 + optionalDependencies: + '@types/mdx': 2.0.13 + '@types/react': 19.2.14 + next: 16.2.4(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + transitivePeerDependencies: + - '@emotion/is-prop-valid' + - '@tailwindcss/oxide' + - '@types/react-dom' + - tailwindcss + + get-nonce@1.0.1: {} + + get-stream@6.0.1: {} + + github-slugger@2.0.0: {} + + graceful-fs@4.2.11: {} + + hachure-fill@0.5.2: {} + + has-flag@4.0.0: {} + + hast-util-from-parse5@8.0.3: + dependencies: + '@types/hast': 3.0.4 + '@types/unist': 3.0.3 + devlop: 1.1.0 + hastscript: 9.0.1 + property-information: 7.1.0 + vfile: 6.0.3 + vfile-location: 5.0.3 + web-namespaces: 2.0.1 + + hast-util-parse-selector@4.0.0: + dependencies: + '@types/hast': 3.0.4 + + hast-util-raw@9.1.0: + dependencies: + '@types/hast': 3.0.4 + '@types/unist': 3.0.3 + '@ungap/structured-clone': 1.3.1 + hast-util-from-parse5: 8.0.3 + hast-util-to-parse5: 8.0.1 + html-void-elements: 3.0.0 + mdast-util-to-hast: 13.2.1 + parse5: 7.3.0 + unist-util-position: 5.0.0 + unist-util-visit: 5.1.0 + vfile: 6.0.3 + web-namespaces: 2.0.1 + zwitch: 2.0.4 + + hast-util-to-estree@3.1.3: + dependencies: + '@types/estree': 1.0.9 + '@types/estree-jsx': 1.0.5 + '@types/hast': 3.0.4 + comma-separated-tokens: 2.0.3 + devlop: 1.1.0 + estree-util-attach-comments: 3.0.0 + estree-util-is-identifier-name: 3.0.0 + hast-util-whitespace: 3.0.0 + mdast-util-mdx-expression: 2.0.1 + mdast-util-mdx-jsx: 3.2.0 + mdast-util-mdxjs-esm: 2.0.1 + property-information: 7.1.0 + space-separated-tokens: 2.0.2 + style-to-js: 1.1.21 + unist-util-position: 5.0.0 + zwitch: 2.0.4 + transitivePeerDependencies: + - supports-color + + hast-util-to-html@9.0.5: + dependencies: + '@types/hast': 3.0.4 + '@types/unist': 3.0.3 + ccount: 2.0.1 + comma-separated-tokens: 2.0.3 + hast-util-whitespace: 3.0.0 + html-void-elements: 3.0.0 + mdast-util-to-hast: 13.2.1 + property-information: 7.1.0 + space-separated-tokens: 2.0.2 + stringify-entities: 4.0.4 + zwitch: 2.0.4 + + hast-util-to-jsx-runtime@2.3.6: + dependencies: + '@types/estree': 1.0.9 + '@types/hast': 3.0.4 + '@types/unist': 3.0.3 + comma-separated-tokens: 2.0.3 + devlop: 1.1.0 + estree-util-is-identifier-name: 3.0.0 + hast-util-whitespace: 3.0.0 + mdast-util-mdx-expression: 2.0.1 + mdast-util-mdx-jsx: 3.2.0 + mdast-util-mdxjs-esm: 2.0.1 + property-information: 7.1.0 + space-separated-tokens: 2.0.2 + style-to-js: 1.1.21 + unist-util-position: 5.0.0 + vfile-message: 4.0.3 + transitivePeerDependencies: + - supports-color + + hast-util-to-parse5@8.0.1: + dependencies: + '@types/hast': 3.0.4 + comma-separated-tokens: 2.0.3 + devlop: 1.1.0 + property-information: 7.1.0 + space-separated-tokens: 2.0.2 + web-namespaces: 2.0.1 + zwitch: 2.0.4 + + hast-util-whitespace@3.0.0: + dependencies: + '@types/hast': 3.0.4 + + hastscript@9.0.1: + dependencies: + '@types/hast': 3.0.4 + comma-separated-tokens: 2.0.3 + hast-util-parse-selector: 4.0.0 + property-information: 7.1.0 + space-separated-tokens: 2.0.2 + + html-void-elements@3.0.0: {} + + human-signals@2.1.0: {} + + iconv-lite@0.6.3: + dependencies: + safer-buffer: 2.1.2 + + import-meta-resolve@4.2.0: {} + + ini@1.3.8: {} + + inline-style-parser@0.2.7: {} + + internmap@1.0.1: {} + + internmap@2.0.3: {} + + is-alphabetical@2.0.1: {} + + is-alphanumerical@2.0.1: + dependencies: + is-alphabetical: 2.0.1 + is-decimal: 2.0.1 + + is-decimal@2.0.1: {} + + is-docker@2.2.1: {} + + is-fullwidth-code-point@3.0.0: {} + + is-hexadecimal@2.0.1: {} + + is-plain-obj@4.1.0: {} + + is-port-reachable@4.0.0: {} + + is-stream@2.0.1: {} + + is-wsl@2.2.0: + dependencies: + is-docker: 2.2.1 + + isexe@2.0.0: {} + + jiti@2.7.0: {} + + js-yaml@4.1.1: + dependencies: + argparse: 2.0.1 + + json-schema-traverse@1.0.0: {} + + katex@0.16.45: + dependencies: + commander: 8.3.0 + + khroma@2.1.0: {} + + langium@4.2.3: + dependencies: + '@chevrotain/regexp-to-ast': 12.0.0 + chevrotain: 12.0.0 + chevrotain-allstar: 0.4.3(chevrotain@12.0.0) + vscode-languageserver: 9.0.1 + vscode-languageserver-textdocument: 1.0.12 + vscode-uri: 3.1.0 + + layout-base@1.0.2: {} + + layout-base@2.0.1: {} + + lightningcss-android-arm64@1.32.0: + optional: true + + lightningcss-darwin-arm64@1.32.0: + optional: true + + lightningcss-darwin-x64@1.32.0: + optional: true + + lightningcss-freebsd-x64@1.32.0: + optional: true + + lightningcss-linux-arm-gnueabihf@1.32.0: + optional: true + + lightningcss-linux-arm64-gnu@1.32.0: + optional: true + + lightningcss-linux-arm64-musl@1.32.0: + optional: true + + lightningcss-linux-x64-gnu@1.32.0: + optional: true + + lightningcss-linux-x64-musl@1.32.0: + optional: true + + lightningcss-win32-arm64-msvc@1.32.0: + optional: true + + lightningcss-win32-x64-msvc@1.32.0: + optional: true + + lightningcss@1.32.0: + dependencies: + detect-libc: 2.1.2 + optionalDependencies: + lightningcss-android-arm64: 1.32.0 + lightningcss-darwin-arm64: 1.32.0 + lightningcss-darwin-x64: 1.32.0 + lightningcss-freebsd-x64: 1.32.0 + lightningcss-linux-arm-gnueabihf: 1.32.0 + lightningcss-linux-arm64-gnu: 1.32.0 + lightningcss-linux-arm64-musl: 1.32.0 + lightningcss-linux-x64-gnu: 1.32.0 + lightningcss-linux-x64-musl: 1.32.0 + lightningcss-win32-arm64-msvc: 1.32.0 + lightningcss-win32-x64-msvc: 1.32.0 + + lodash-es@4.18.1: {} + + longest-streak@3.1.0: {} + + lucide-react@1.14.0(react@19.2.6): + dependencies: + react: 19.2.6 + + magic-string@0.30.21: + dependencies: + '@jridgewell/sourcemap-codec': 1.5.5 + + markdown-extensions@2.0.0: {} + + markdown-table@3.0.4: {} + + marked@16.4.2: {} + + mdast-util-find-and-replace@3.0.2: + dependencies: + '@types/mdast': 4.0.4 + escape-string-regexp: 5.0.0 + unist-util-is: 6.0.1 + unist-util-visit-parents: 6.0.2 + + mdast-util-from-markdown@2.0.3: + dependencies: + '@types/mdast': 4.0.4 + '@types/unist': 3.0.3 + decode-named-character-reference: 1.3.0 + devlop: 1.1.0 + mdast-util-to-string: 4.0.0 + micromark: 4.0.2 + micromark-util-decode-numeric-character-reference: 2.0.2 + micromark-util-decode-string: 2.0.1 + micromark-util-normalize-identifier: 2.0.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + unist-util-stringify-position: 4.0.0 + transitivePeerDependencies: + - supports-color + + mdast-util-gfm-autolink-literal@2.0.1: + dependencies: + '@types/mdast': 4.0.4 + ccount: 2.0.1 + devlop: 1.1.0 + mdast-util-find-and-replace: 3.0.2 + micromark-util-character: 2.1.1 + + mdast-util-gfm-footnote@2.1.0: + dependencies: + '@types/mdast': 4.0.4 + devlop: 1.1.0 + mdast-util-from-markdown: 2.0.3 + mdast-util-to-markdown: 2.1.2 + micromark-util-normalize-identifier: 2.0.1 + transitivePeerDependencies: + - supports-color + + mdast-util-gfm-strikethrough@2.0.0: + dependencies: + '@types/mdast': 4.0.4 + mdast-util-from-markdown: 2.0.3 + mdast-util-to-markdown: 2.1.2 + transitivePeerDependencies: + - supports-color + + mdast-util-gfm-table@2.0.0: + dependencies: + '@types/mdast': 4.0.4 + devlop: 1.1.0 + markdown-table: 3.0.4 + mdast-util-from-markdown: 2.0.3 + mdast-util-to-markdown: 2.1.2 + transitivePeerDependencies: + - supports-color + + mdast-util-gfm-task-list-item@2.0.0: + dependencies: + '@types/mdast': 4.0.4 + devlop: 1.1.0 + mdast-util-from-markdown: 2.0.3 + mdast-util-to-markdown: 2.1.2 + transitivePeerDependencies: + - supports-color + + mdast-util-gfm@3.1.0: + dependencies: + mdast-util-from-markdown: 2.0.3 + mdast-util-gfm-autolink-literal: 2.0.1 + mdast-util-gfm-footnote: 2.1.0 + mdast-util-gfm-strikethrough: 2.0.0 + mdast-util-gfm-table: 2.0.0 + mdast-util-gfm-task-list-item: 2.0.0 + mdast-util-to-markdown: 2.1.2 + transitivePeerDependencies: + - supports-color + + mdast-util-mdx-expression@2.0.1: + dependencies: + '@types/estree-jsx': 1.0.5 + '@types/hast': 3.0.4 + '@types/mdast': 4.0.4 + devlop: 1.1.0 + mdast-util-from-markdown: 2.0.3 + mdast-util-to-markdown: 2.1.2 + transitivePeerDependencies: + - supports-color + + mdast-util-mdx-jsx@3.2.0: + dependencies: + '@types/estree-jsx': 1.0.5 + '@types/hast': 3.0.4 + '@types/mdast': 4.0.4 + '@types/unist': 3.0.3 + ccount: 2.0.1 + devlop: 1.1.0 + mdast-util-from-markdown: 2.0.3 + mdast-util-to-markdown: 2.1.2 + parse-entities: 4.0.2 + stringify-entities: 4.0.4 + unist-util-stringify-position: 4.0.0 + vfile-message: 4.0.3 + transitivePeerDependencies: + - supports-color + + mdast-util-mdx@3.0.0: + dependencies: + mdast-util-from-markdown: 2.0.3 + mdast-util-mdx-expression: 2.0.1 + mdast-util-mdx-jsx: 3.2.0 + mdast-util-mdxjs-esm: 2.0.1 + mdast-util-to-markdown: 2.1.2 + transitivePeerDependencies: + - supports-color + + mdast-util-mdxjs-esm@2.0.1: + dependencies: + '@types/estree-jsx': 1.0.5 + '@types/hast': 3.0.4 + '@types/mdast': 4.0.4 + devlop: 1.1.0 + mdast-util-from-markdown: 2.0.3 + mdast-util-to-markdown: 2.1.2 + transitivePeerDependencies: + - supports-color + + mdast-util-phrasing@4.1.0: + dependencies: + '@types/mdast': 4.0.4 + unist-util-is: 6.0.1 + + mdast-util-to-hast@13.2.1: + dependencies: + '@types/hast': 3.0.4 + '@types/mdast': 4.0.4 + '@ungap/structured-clone': 1.3.1 + devlop: 1.1.0 + micromark-util-sanitize-uri: 2.0.1 + trim-lines: 3.0.1 + unist-util-position: 5.0.0 + unist-util-visit: 5.1.0 + vfile: 6.0.3 + + mdast-util-to-markdown@2.1.2: + dependencies: + '@types/mdast': 4.0.4 + '@types/unist': 3.0.3 + longest-streak: 3.1.0 + mdast-util-phrasing: 4.1.0 + mdast-util-to-string: 4.0.0 + micromark-util-classify-character: 2.0.1 + micromark-util-decode-string: 2.0.1 + unist-util-visit: 5.1.0 + zwitch: 2.0.4 + + mdast-util-to-string@4.0.0: + dependencies: + '@types/mdast': 4.0.4 + + merge-stream@2.0.0: {} + + mermaid@11.14.0: + dependencies: + '@braintree/sanitize-url': 7.1.2 + '@iconify/utils': 3.1.3 + '@mermaid-js/parser': 1.1.0 + '@types/d3': 7.4.3 + '@upsetjs/venn.js': 2.0.0 + cytoscape: 3.33.3 + cytoscape-cose-bilkent: 4.1.0(cytoscape@3.33.3) + cytoscape-fcose: 2.2.0(cytoscape@3.33.3) + d3: 7.9.0 + d3-sankey: 0.12.3 + dagre-d3-es: 7.0.14 + dayjs: 1.11.20 + dompurify: 3.4.2 + katex: 0.16.45 + khroma: 2.1.0 + lodash-es: 4.18.1 + marked: 16.4.2 + roughjs: 4.6.6 + stylis: 4.4.0 + ts-dedent: 2.2.0 + uuid: 11.1.1 + + micromark-core-commonmark@2.0.3: + dependencies: + decode-named-character-reference: 1.3.0 + devlop: 1.1.0 + micromark-factory-destination: 2.0.1 + micromark-factory-label: 2.0.1 + micromark-factory-space: 2.0.1 + micromark-factory-title: 2.0.1 + micromark-factory-whitespace: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-chunked: 2.0.1 + micromark-util-classify-character: 2.0.1 + micromark-util-html-tag-name: 2.0.1 + micromark-util-normalize-identifier: 2.0.1 + micromark-util-resolve-all: 2.0.1 + micromark-util-subtokenize: 2.1.0 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-extension-gfm-autolink-literal@2.1.0: + dependencies: + micromark-util-character: 2.1.1 + micromark-util-sanitize-uri: 2.0.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-extension-gfm-footnote@2.1.0: + dependencies: + devlop: 1.1.0 + micromark-core-commonmark: 2.0.3 + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-normalize-identifier: 2.0.1 + micromark-util-sanitize-uri: 2.0.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-extension-gfm-strikethrough@2.1.0: + dependencies: + devlop: 1.1.0 + micromark-util-chunked: 2.0.1 + micromark-util-classify-character: 2.0.1 + micromark-util-resolve-all: 2.0.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-extension-gfm-table@2.1.1: + dependencies: + devlop: 1.1.0 + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-extension-gfm-tagfilter@2.0.0: + dependencies: + micromark-util-types: 2.0.2 + + micromark-extension-gfm-task-list-item@2.1.0: + dependencies: + devlop: 1.1.0 + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-extension-gfm@3.0.0: + dependencies: + micromark-extension-gfm-autolink-literal: 2.1.0 + micromark-extension-gfm-footnote: 2.1.0 + micromark-extension-gfm-strikethrough: 2.1.0 + micromark-extension-gfm-table: 2.1.1 + micromark-extension-gfm-tagfilter: 2.0.0 + micromark-extension-gfm-task-list-item: 2.1.0 + micromark-util-combine-extensions: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-extension-mdx-expression@3.0.1: + dependencies: + '@types/estree': 1.0.9 + devlop: 1.1.0 + micromark-factory-mdx-expression: 2.0.3 + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-events-to-acorn: 2.0.3 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-extension-mdx-jsx@3.0.2: + dependencies: + '@types/estree': 1.0.9 + devlop: 1.1.0 + estree-util-is-identifier-name: 3.0.0 + micromark-factory-mdx-expression: 2.0.3 + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-events-to-acorn: 2.0.3 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + vfile-message: 4.0.3 + + micromark-extension-mdx-md@2.0.0: + dependencies: + micromark-util-types: 2.0.2 + + micromark-extension-mdxjs-esm@3.0.0: + dependencies: + '@types/estree': 1.0.9 + devlop: 1.1.0 + micromark-core-commonmark: 2.0.3 + micromark-util-character: 2.1.1 + micromark-util-events-to-acorn: 2.0.3 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + unist-util-position-from-estree: 2.0.0 + vfile-message: 4.0.3 + + micromark-extension-mdxjs@3.0.0: + dependencies: + acorn: 8.16.0 + acorn-jsx: 5.3.2(acorn@8.16.0) + micromark-extension-mdx-expression: 3.0.1 + micromark-extension-mdx-jsx: 3.0.2 + micromark-extension-mdx-md: 2.0.0 + micromark-extension-mdxjs-esm: 3.0.0 + micromark-util-combine-extensions: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-factory-destination@2.0.1: + dependencies: + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-factory-label@2.0.1: + dependencies: + devlop: 1.1.0 + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-factory-mdx-expression@2.0.3: + dependencies: + '@types/estree': 1.0.9 + devlop: 1.1.0 + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-events-to-acorn: 2.0.3 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + unist-util-position-from-estree: 2.0.0 + vfile-message: 4.0.3 + + micromark-factory-space@2.0.1: + dependencies: + micromark-util-character: 2.1.1 + micromark-util-types: 2.0.2 + + micromark-factory-title@2.0.1: + dependencies: + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-factory-whitespace@2.0.1: + dependencies: + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-util-character@2.1.1: + dependencies: + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-util-chunked@2.0.1: + dependencies: + micromark-util-symbol: 2.0.1 + + micromark-util-classify-character@2.0.1: + dependencies: + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-util-combine-extensions@2.0.1: + dependencies: + micromark-util-chunked: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-util-decode-numeric-character-reference@2.0.2: + dependencies: + micromark-util-symbol: 2.0.1 + + micromark-util-decode-string@2.0.1: + dependencies: + decode-named-character-reference: 1.3.0 + micromark-util-character: 2.1.1 + micromark-util-decode-numeric-character-reference: 2.0.2 + micromark-util-symbol: 2.0.1 + + micromark-util-encode@2.0.1: {} + + micromark-util-events-to-acorn@2.0.3: + dependencies: + '@types/estree': 1.0.9 + '@types/unist': 3.0.3 + devlop: 1.1.0 + estree-util-visit: 2.0.0 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + vfile-message: 4.0.3 + + micromark-util-html-tag-name@2.0.1: {} + + micromark-util-normalize-identifier@2.0.1: + dependencies: + micromark-util-symbol: 2.0.1 + + micromark-util-resolve-all@2.0.1: + dependencies: + micromark-util-types: 2.0.2 + + micromark-util-sanitize-uri@2.0.1: + dependencies: + micromark-util-character: 2.1.1 + micromark-util-encode: 2.0.1 + micromark-util-symbol: 2.0.1 + + micromark-util-subtokenize@2.1.0: + dependencies: + devlop: 1.1.0 + micromark-util-chunked: 2.0.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-util-symbol@2.0.1: {} + + micromark-util-types@2.0.2: {} + + micromark@4.0.2: + dependencies: + '@types/debug': 4.1.13 + debug: 4.4.3 + decode-named-character-reference: 1.3.0 + devlop: 1.1.0 + micromark-core-commonmark: 2.0.3 + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-chunked: 2.0.1 + micromark-util-combine-extensions: 2.0.1 + micromark-util-decode-numeric-character-reference: 2.0.2 + micromark-util-encode: 2.0.1 + micromark-util-normalize-identifier: 2.0.1 + micromark-util-resolve-all: 2.0.1 + micromark-util-sanitize-uri: 2.0.1 + micromark-util-subtokenize: 2.1.0 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + transitivePeerDependencies: + - supports-color + + mime-db@1.33.0: {} + + mime-db@1.54.0: {} + + mime-types@2.1.18: + dependencies: + mime-db: 1.33.0 + + mimic-fn@2.1.0: {} + + minimatch@3.1.5: + dependencies: + brace-expansion: 1.1.14 + + minimist@1.2.8: {} + + motion-dom@12.38.0: + dependencies: + motion-utils: 12.36.0 + + motion-utils@12.36.0: {} + + motion@12.38.0(react-dom@19.2.6(react@19.2.6))(react@19.2.6): + dependencies: + framer-motion: 12.38.0(react-dom@19.2.6(react@19.2.6))(react@19.2.6) + tslib: 2.8.1 + optionalDependencies: + react: 19.2.6 + react-dom: 19.2.6(react@19.2.6) + + ms@2.0.0: {} + + ms@2.1.3: {} + + nanoid@3.3.12: {} + + negotiator@0.6.4: {} + + next-themes@0.4.6(react-dom@19.2.6(react@19.2.6))(react@19.2.6): + dependencies: + react: 19.2.6 + react-dom: 19.2.6(react@19.2.6) + + next@16.2.4(react-dom@19.2.6(react@19.2.6))(react@19.2.6): + dependencies: + '@next/env': 16.2.4 + '@swc/helpers': 0.5.15 + baseline-browser-mapping: 2.10.29 + caniuse-lite: 1.0.30001792 + postcss: 8.4.31 + react: 19.2.6 + react-dom: 19.2.6(react@19.2.6) + styled-jsx: 5.1.6(react@19.2.6) + optionalDependencies: + '@next/swc-darwin-arm64': 16.2.4 + '@next/swc-darwin-x64': 16.2.4 + '@next/swc-linux-arm64-gnu': 16.2.4 + '@next/swc-linux-arm64-musl': 16.2.4 + '@next/swc-linux-x64-gnu': 16.2.4 + '@next/swc-linux-x64-musl': 16.2.4 + '@next/swc-win32-arm64-msvc': 16.2.4 + '@next/swc-win32-x64-msvc': 16.2.4 + sharp: 0.34.5 + transitivePeerDependencies: + - '@babel/core' + - babel-plugin-macros + + npm-run-path@4.0.1: + dependencies: + path-key: 3.1.1 + + on-headers@1.1.0: {} + + onetime@5.1.2: + dependencies: + mimic-fn: 2.1.0 + + oniguruma-parser@0.12.2: {} + + oniguruma-to-es@4.3.6: + dependencies: + oniguruma-parser: 0.12.2 + regex: 6.1.0 + regex-recursion: 6.0.2 + + package-manager-detector@1.6.0: {} + + parse-entities@4.0.2: + dependencies: + '@types/unist': 2.0.11 + character-entities-legacy: 3.0.0 + character-reference-invalid: 2.0.1 + decode-named-character-reference: 1.3.0 + is-alphanumerical: 2.0.1 + is-decimal: 2.0.1 + is-hexadecimal: 2.0.1 + + parse5@7.3.0: + dependencies: + entities: 6.0.1 + + path-data-parser@0.1.0: {} + + path-is-inside@1.0.2: {} + + path-key@3.1.1: {} + + path-to-regexp@3.3.0: {} + + picocolors@1.1.1: {} + + picomatch@4.0.4: {} + + points-on-curve@0.2.0: {} + + points-on-path@0.2.1: + dependencies: + path-data-parser: 0.1.0 + points-on-curve: 0.2.0 + + postcss@8.4.31: + dependencies: + nanoid: 3.3.12 + picocolors: 1.1.1 + source-map-js: 1.2.1 + + postcss@8.5.14: + dependencies: + nanoid: 3.3.12 + picocolors: 1.1.1 + source-map-js: 1.2.1 + + property-information@7.1.0: {} + + range-parser@1.2.0: {} + + rc@1.2.8: + dependencies: + deep-extend: 0.6.0 + ini: 1.3.8 + minimist: 1.2.8 + strip-json-comments: 2.0.1 + + react-dom@19.2.6(react@19.2.6): + dependencies: + react: 19.2.6 + scheduler: 0.27.0 + + react-remove-scroll-bar@2.3.8(@types/react@19.2.14)(react@19.2.6): + dependencies: + react: 19.2.6 + react-style-singleton: 2.2.3(@types/react@19.2.14)(react@19.2.6) + tslib: 2.8.1 + optionalDependencies: + '@types/react': 19.2.14 + + react-remove-scroll@2.7.2(@types/react@19.2.14)(react@19.2.6): + dependencies: + react: 19.2.6 + react-remove-scroll-bar: 2.3.8(@types/react@19.2.14)(react@19.2.6) + react-style-singleton: 2.2.3(@types/react@19.2.14)(react@19.2.6) + tslib: 2.8.1 + use-callback-ref: 1.3.3(@types/react@19.2.14)(react@19.2.6) + use-sidecar: 1.1.3(@types/react@19.2.14)(react@19.2.6) + optionalDependencies: + '@types/react': 19.2.14 + + react-style-singleton@2.2.3(@types/react@19.2.14)(react@19.2.6): + dependencies: + get-nonce: 1.0.1 + react: 19.2.6 + tslib: 2.8.1 + optionalDependencies: + '@types/react': 19.2.14 + + react@19.2.6: {} + + readdirp@5.0.0: {} + + recma-build-jsx@1.0.0: + dependencies: + '@types/estree': 1.0.9 + estree-util-build-jsx: 3.0.1 + vfile: 6.0.3 + + recma-jsx@1.0.1(acorn@8.16.0): + dependencies: + acorn: 8.16.0 + acorn-jsx: 5.3.2(acorn@8.16.0) + estree-util-to-js: 2.0.0 + recma-parse: 1.0.0 + recma-stringify: 1.0.0 + unified: 11.0.5 + + recma-parse@1.0.0: + dependencies: + '@types/estree': 1.0.9 + esast-util-from-js: 2.0.1 + unified: 11.0.5 + vfile: 6.0.3 + + recma-stringify@1.0.0: + dependencies: + '@types/estree': 1.0.9 + estree-util-to-js: 2.0.0 + unified: 11.0.5 + vfile: 6.0.3 + + regex-recursion@6.0.2: + dependencies: + regex-utilities: 2.3.0 + + regex-utilities@2.3.0: {} + + regex@6.1.0: + dependencies: + regex-utilities: 2.3.0 + + registry-auth-token@3.3.2: + dependencies: + rc: 1.2.8 + safe-buffer: 5.2.1 + + registry-url@3.1.0: + dependencies: + rc: 1.2.8 + + rehype-raw@7.0.0: + dependencies: + '@types/hast': 3.0.4 + hast-util-raw: 9.1.0 + vfile: 6.0.3 + + rehype-recma@1.0.0: + dependencies: + '@types/estree': 1.0.9 + '@types/hast': 3.0.4 + hast-util-to-estree: 3.1.3 + transitivePeerDependencies: + - supports-color + + remark-gfm@4.0.1: + dependencies: + '@types/mdast': 4.0.4 + mdast-util-gfm: 3.1.0 + micromark-extension-gfm: 3.0.0 + remark-parse: 11.0.0 + remark-stringify: 11.0.0 + unified: 11.0.5 + transitivePeerDependencies: + - supports-color + + remark-mdx@3.1.1: + dependencies: + mdast-util-mdx: 3.0.0 + micromark-extension-mdxjs: 3.0.0 + transitivePeerDependencies: + - supports-color + + remark-parse@11.0.0: + dependencies: + '@types/mdast': 4.0.4 + mdast-util-from-markdown: 2.0.3 + micromark-util-types: 2.0.2 + unified: 11.0.5 + transitivePeerDependencies: + - supports-color + + remark-rehype@11.1.2: + dependencies: + '@types/hast': 3.0.4 + '@types/mdast': 4.0.4 + mdast-util-to-hast: 13.2.1 + unified: 11.0.5 + vfile: 6.0.3 + + remark-stringify@11.0.0: + dependencies: + '@types/mdast': 4.0.4 + mdast-util-to-markdown: 2.1.2 + unified: 11.0.5 + + remark@15.0.1: + dependencies: + '@types/mdast': 4.0.4 + remark-parse: 11.0.0 + remark-stringify: 11.0.0 + unified: 11.0.5 + transitivePeerDependencies: + - supports-color + + require-from-string@2.0.2: {} + + robust-predicates@3.0.3: {} + + roughjs@4.6.6: + dependencies: + hachure-fill: 0.5.2 + path-data-parser: 0.1.0 + points-on-curve: 0.2.0 + points-on-path: 0.2.1 + + rw@1.3.3: {} + + safe-buffer@5.2.1: {} + + safer-buffer@2.1.2: {} + + scheduler@0.27.0: {} + + scroll-into-view-if-needed@3.1.0: + dependencies: + compute-scroll-into-view: 3.1.1 + + semver@7.8.0: + optional: true + + serve-handler@6.1.7: + dependencies: + bytes: 3.0.0 + content-disposition: 0.5.2 + mime-types: 2.1.18 + minimatch: 3.1.5 + path-is-inside: 1.0.2 + path-to-regexp: 3.3.0 + range-parser: 1.2.0 + + serve@14.2.6: + dependencies: + '@zeit/schemas': 2.36.0 + ajv: 8.18.0 + arg: 5.0.2 + boxen: 7.0.0 + chalk: 5.0.1 + chalk-template: 0.4.0 + clipboardy: 3.0.0 + compression: 1.8.1 + is-port-reachable: 4.0.0 + serve-handler: 6.1.7 + update-check: 1.5.4 + transitivePeerDependencies: + - supports-color + + sharp@0.34.5: + dependencies: + '@img/colour': 1.1.0 + detect-libc: 2.1.2 + semver: 7.8.0 + optionalDependencies: + '@img/sharp-darwin-arm64': 0.34.5 + '@img/sharp-darwin-x64': 0.34.5 + '@img/sharp-libvips-darwin-arm64': 1.2.4 + '@img/sharp-libvips-darwin-x64': 1.2.4 + '@img/sharp-libvips-linux-arm': 1.2.4 + '@img/sharp-libvips-linux-arm64': 1.2.4 + '@img/sharp-libvips-linux-ppc64': 1.2.4 + '@img/sharp-libvips-linux-riscv64': 1.2.4 + '@img/sharp-libvips-linux-s390x': 1.2.4 + '@img/sharp-libvips-linux-x64': 1.2.4 + '@img/sharp-libvips-linuxmusl-arm64': 1.2.4 + '@img/sharp-libvips-linuxmusl-x64': 1.2.4 + '@img/sharp-linux-arm': 0.34.5 + '@img/sharp-linux-arm64': 0.34.5 + '@img/sharp-linux-ppc64': 0.34.5 + '@img/sharp-linux-riscv64': 0.34.5 + '@img/sharp-linux-s390x': 0.34.5 + '@img/sharp-linux-x64': 0.34.5 + '@img/sharp-linuxmusl-arm64': 0.34.5 + '@img/sharp-linuxmusl-x64': 0.34.5 + '@img/sharp-wasm32': 0.34.5 + '@img/sharp-win32-arm64': 0.34.5 + '@img/sharp-win32-ia32': 0.34.5 + '@img/sharp-win32-x64': 0.34.5 + optional: true + + shebang-command@2.0.0: + dependencies: + shebang-regex: 3.0.0 + + shebang-regex@3.0.0: {} + + shiki@4.0.2: + dependencies: + '@shikijs/core': 4.0.2 + '@shikijs/engine-javascript': 4.0.2 + '@shikijs/engine-oniguruma': 4.0.2 + '@shikijs/langs': 4.0.2 + '@shikijs/themes': 4.0.2 + '@shikijs/types': 4.0.2 + '@shikijs/vscode-textmate': 10.0.2 + '@types/hast': 3.0.4 + + signal-exit@3.0.7: {} + + source-map-js@1.2.1: {} + + source-map@0.7.6: {} + + space-separated-tokens@2.0.2: {} + + string-width@4.2.3: + dependencies: + emoji-regex: 8.0.0 + is-fullwidth-code-point: 3.0.0 + strip-ansi: 6.0.1 + + string-width@5.1.2: + dependencies: + eastasianwidth: 0.2.0 + emoji-regex: 9.2.2 + strip-ansi: 7.2.0 + + stringify-entities@4.0.4: + dependencies: + character-entities-html4: 2.1.0 + character-entities-legacy: 3.0.0 + + strip-ansi@6.0.1: + dependencies: + ansi-regex: 5.0.1 + + strip-ansi@7.2.0: + dependencies: + ansi-regex: 6.2.2 + + strip-final-newline@2.0.0: {} + + strip-json-comments@2.0.1: {} + + style-to-js@1.1.21: + dependencies: + style-to-object: 1.0.14 + + style-to-object@1.0.14: + dependencies: + inline-style-parser: 0.2.7 + + styled-jsx@5.1.6(react@19.2.6): + dependencies: + client-only: 0.0.1 + react: 19.2.6 + + stylis@4.4.0: {} + + supports-color@7.2.0: + dependencies: + has-flag: 4.0.0 + + tailwind-merge@3.5.0: {} + + tailwindcss@4.3.0: {} + + tapable@2.3.3: {} + + tinyexec@1.1.2: {} + + tinyglobby@0.2.16: + dependencies: + fdir: 6.5.0(picomatch@4.0.4) + picomatch: 4.0.4 + + trim-lines@3.0.1: {} + + trough@2.2.0: {} + + ts-dedent@2.2.0: {} + + tslib@2.8.1: {} + + type-fest@2.19.0: {} + + typescript@6.0.3: {} + + undici-types@7.19.2: {} + + unified@11.0.5: + dependencies: + '@types/unist': 3.0.3 + bail: 2.0.2 + devlop: 1.1.0 + extend: 3.0.2 + is-plain-obj: 4.1.0 + trough: 2.2.0 + vfile: 6.0.3 + + unist-util-is@6.0.1: + dependencies: + '@types/unist': 3.0.3 + + unist-util-position-from-estree@2.0.0: + dependencies: + '@types/unist': 3.0.3 + + unist-util-position@5.0.0: + dependencies: + '@types/unist': 3.0.3 + + unist-util-remove-position@5.0.0: + dependencies: + '@types/unist': 3.0.3 + unist-util-visit: 5.1.0 + + unist-util-stringify-position@4.0.0: + dependencies: + '@types/unist': 3.0.3 + + unist-util-visit-parents@6.0.2: + dependencies: + '@types/unist': 3.0.3 + unist-util-is: 6.0.1 + + unist-util-visit@5.1.0: + dependencies: + '@types/unist': 3.0.3 + unist-util-is: 6.0.1 + unist-util-visit-parents: 6.0.2 + + update-check@1.5.4: + dependencies: + registry-auth-token: 3.3.2 + registry-url: 3.1.0 + + use-callback-ref@1.3.3(@types/react@19.2.14)(react@19.2.6): + dependencies: + react: 19.2.6 + tslib: 2.8.1 + optionalDependencies: + '@types/react': 19.2.14 + + use-sidecar@1.1.3(@types/react@19.2.14)(react@19.2.6): + dependencies: + detect-node-es: 1.1.0 + react: 19.2.6 + tslib: 2.8.1 + optionalDependencies: + '@types/react': 19.2.14 + + uuid@11.1.1: {} + + vary@1.1.2: {} + + vfile-location@5.0.3: + dependencies: + '@types/unist': 3.0.3 + vfile: 6.0.3 + + vfile-message@4.0.3: + dependencies: + '@types/unist': 3.0.3 + unist-util-stringify-position: 4.0.0 + + vfile@6.0.3: + dependencies: + '@types/unist': 3.0.3 + vfile-message: 4.0.3 + + vscode-jsonrpc@8.2.0: {} + + vscode-languageserver-protocol@3.17.5: + dependencies: + vscode-jsonrpc: 8.2.0 + vscode-languageserver-types: 3.17.5 + + vscode-languageserver-textdocument@1.0.12: {} + + vscode-languageserver-types@3.17.5: {} + + vscode-languageserver@9.0.1: + dependencies: + vscode-languageserver-protocol: 3.17.5 + + vscode-uri@3.1.0: {} + + web-namespaces@2.0.1: {} + + which@2.0.2: + dependencies: + isexe: 2.0.0 + + widest-line@4.0.1: + dependencies: + string-width: 5.1.2 + + wrap-ansi@8.1.0: + dependencies: + ansi-styles: 6.2.3 + string-width: 5.1.2 + strip-ansi: 7.2.0 + + zod@4.4.3: {} + + zwitch@2.0.4: {} diff --git a/docs-next/postcss.config.mjs b/docs-next/postcss.config.mjs new file mode 100644 index 0000000..61e3684 --- /dev/null +++ b/docs-next/postcss.config.mjs @@ -0,0 +1,7 @@ +const config = { + plugins: { + "@tailwindcss/postcss": {}, + }, +}; + +export default config; diff --git a/docs-next/public/.nojekyll b/docs-next/public/.nojekyll new file mode 100644 index 0000000..e69de29 diff --git a/docs-next/public/img/favicon.ico b/docs-next/public/img/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..c01d54bcd39a5f853428f3cd5aa0f383d963c484 GIT binary patch literal 3626 zcmb`Je@s(X6vrR`EK3%b%orErlDW({vnABqA zcfaS{d+xbU5JKp0*;0YOg+;Fl!eT)XRuapIwFLL`=imZCSon$`se`_<%@MB=M~KG+ z=EW^FL`w|Bo>*ktlaS^(fut!95`iG5u=SZ8nfDHO#GaTlH1-XG^;vsjUb^gWTVz0+ z^=WR1wv9-2oeR=_;fL0H7rNWqAzGtO(D;`~cX(RcN0w2v24Y8)6t`cS^_ghs`_ho? z{0ka~1Dgo8TfAP$r*ua?>$_V+kZ!-(TvEJ7O2f;Y#tezt$&R4 zLI}=-y@Z!grf*h3>}DUL{km4R>ya_I5Ag#{h_&?+HpKS!;$x3LC#CqUQ8&nM?X))Q zXAy2?`YL4FbC5CgJu(M&Q|>1st8XXLZ|5MgwgjP$m_2Vt0(J z&Gu7bOlkbGzGm2sh?X`){7w69Y$1#@P@7DF{ZE=4%T0NDS)iH`tiPSKpDNW)zmtn( zw;4$f>k)4$LBc>eBAaTZeCM2(iD+sHlj!qd z2GjRJ>f_Qes(+mnzdA^NH?^NB(^o-%Gmg$c8MNMq&`vm@9Ut;*&$xSD)PKH{wBCEC z4P9%NQ;n2s59ffMn8*5)5AAg4-93gBXBDX`A7S& zH-|%S3Wd%T79fk-e&l`{!?lve8_epXhE{d3Hn$Cg!t=-4D(t$cK~7f&4s?t7wr3ZP z*!SRQ-+tr|e1|hbc__J`k3S!rMy<0PHy&R`v#aJv?`Y?2{avK5sQz%=Us()jcNuZV z*$>auD4cEw>;t`+m>h?f?%VFJZj8D|Y1e_SjxG%J4{-AkFtT2+ZZS5UScS~%;dp!V>)7zi`w(xwSd*FS;Lml=f6hn#jq)2is4nkp+aTrV?)F6N z>DY#SU0IZ;*?Hu%tSj4edd~kYNHMFvS&5}#3-M;mBCOCZL3&;2obdG?qZ>rD|zC|Lu|sny76pn2xl|6sk~Hs{X9{8iBW zwiwgQt+@hi`FYMEhX2 \ No newline at end of file diff --git a/docs-next/scripts/migrate.mjs b/docs-next/scripts/migrate.mjs new file mode 100644 index 0000000..1eef04d --- /dev/null +++ b/docs-next/scripts/migrate.mjs @@ -0,0 +1,135 @@ +#!/usr/bin/env node +/** + * One-shot migrator: docs/pages/**.mdx → docs-next/content/docs/**.mdx + * + * Transforms applied to each MDX file: + * 1. Drop frontmatter keys `sidebar_position` and `slug` (Fumadocs uses + * per-directory meta.json + file path for routing). + * 2. Drop `import X from '@site/src/components/X'` lines — components + * are globally registered via src/components/mdx.tsx. + * 3. Convert Docusaurus admonitions to Fumadocs ``: + * :::tip Heading → + * … body … body + * ::: + * (note→note, warning→warn, danger→error, info→info, tip→info) + * 4. Rename `intro.mdx` (Docusaurus's slug:/) to `index.mdx` + * (Fumadocs's root convention). + * + * Idempotent — re-running on already-migrated files is a no-op. + */ +import { mkdir, readdir, readFile, stat, writeFile } from "node:fs/promises"; +import { dirname, join, relative } from "node:path"; +import { fileURLToPath } from "node:url"; + +const HERE = dirname(fileURLToPath(import.meta.url)); +const SRC = join(HERE, "..", "..", "docs", "pages"); +const DST = join(HERE, "..", "content", "docs"); + +const ADMONITION_MAP = { + tip: "info", + note: "note", + info: "info", + warning: "warn", + caution: "warn", + danger: "error", +}; + +async function* walk(dir) { + for (const entry of await readdir(dir, { withFileTypes: true })) { + const full = join(dir, entry.name); + if (entry.isDirectory()) { + yield* walk(full); + } else if (entry.isFile() && entry.name.endsWith(".mdx")) { + yield full; + } + } +} + +function transformFrontmatter(src) { + const fmMatch = src.match(/^---\n([\s\S]*?)\n---\n?/); + if (!fmMatch) return src; + const body = fmMatch[1]; + const cleaned = body + .split("\n") + .filter((line) => !/^\s*(sidebar_position|slug)\s*:/.test(line)) + .join("\n"); + return `---\n${cleaned}\n---\n${src.slice(fmMatch[0].length)}`; +} + +function dropSiteImports(src) { + return ( + src + .split("\n") + .filter( + (line) => + !/^import\s+\w+\s+from\s+['"]@site\/src\/components\/[^'"]+['"];?\s*$/.test( + line.trim(), + ), + ) + .join("\n") + // collapse 3+ blank lines that result from import removals + .replace(/\n{3,}/g, "\n\n") + ); +} + +/** Shiki's default bundle has no `dot` grammar — fall back to plain text. */ +function relabelDotCodeBlocks(src) { + return src.replace(/^```dot\s*$/gm, "```text"); +} + +function convertAdmonitions(src) { + // Match ::: [optional title]\n…\n::: + return src.replace( + /^:::(tip|note|info|warning|caution|danger)([ \t]+([^\n]+))?\n([\s\S]*?)^:::\s*$/gm, + (_full, type, _gap, title, body) => { + const calloutType = ADMONITION_MAP[type] ?? "info"; + const trimmedBody = body.replace(/\s+$/g, ""); + const titleAttr = title ? ` title="${title.trim()}"` : ""; + return `\n${trimmedBody}\n`; + }, + ); +} + +async function ensureDir(d) { + await mkdir(d, { recursive: true }); +} + +async function migrateFile(srcPath) { + const rel = relative(SRC, srcPath); + // intro.mdx → index.mdx (Fumadocs root convention) + const remapped = rel === "intro.mdx" ? "index.mdx" : rel; + const dstPath = join(DST, remapped); + await ensureDir(dirname(dstPath)); + + let content = await readFile(srcPath, "utf8"); + content = transformFrontmatter(content); + content = dropSiteImports(content); + content = convertAdmonitions(content); + content = relabelDotCodeBlocks(content); + + await writeFile(dstPath, content, "utf8"); + return { srcPath, dstPath, rel }; +} + +async function main() { + await ensureDir(DST); + + // Sanity: source must exist + try { + const s = await stat(SRC); + if (!s.isDirectory()) throw new Error("not a directory"); + } catch { + console.error(`Source not found: ${SRC}`); + process.exit(1); + } + + let count = 0; + for await (const srcPath of walk(SRC)) { + const { rel } = await migrateFile(srcPath); + process.stdout.write(` ✓ ${rel}\n`); + count += 1; + } + console.log(`\nMigrated ${count} MDX files → ${DST}`); +} + +await main(); diff --git a/docs-next/source.config.ts b/docs-next/source.config.ts new file mode 100644 index 0000000..4cef005 --- /dev/null +++ b/docs-next/source.config.ts @@ -0,0 +1,23 @@ +import { metaSchema, pageSchema } from "fumadocs-core/source/schema"; +import { defineConfig, defineDocs } from "fumadocs-mdx/config"; + +// You can customize Zod schemas for frontmatter and `meta.json` here +// see https://fumadocs.dev/docs/mdx/collections +export const docs = defineDocs({ + dir: "content/docs", + docs: { + schema: pageSchema, + postprocess: { + includeProcessedMarkdown: true, + }, + }, + meta: { + schema: metaSchema, + }, +}); + +export default defineConfig({ + mdxOptions: { + // MDX options + }, +}); diff --git a/docs-next/src/app/(docs)/[...slug]/page.tsx b/docs-next/src/app/(docs)/[...slug]/page.tsx new file mode 100644 index 0000000..c16b155 --- /dev/null +++ b/docs-next/src/app/(docs)/[...slug]/page.tsx @@ -0,0 +1,67 @@ +import { + DocsBody, + DocsDescription, + DocsPage, + DocsTitle, + MarkdownCopyButton, + ViewOptionsPopover, +} from "fumadocs-ui/layouts/docs/page"; +import { createRelativeLink } from "fumadocs-ui/mdx"; +import type { Metadata } from "next"; +import { notFound } from "next/navigation"; +import { getMDXComponents } from "@/components/mdx"; +import { gitConfig } from "@/lib/shared"; +import { getPageImage, getPageMarkdownUrl, source } from "@/lib/source"; + +export default async function Page(props: PageProps<"/[...slug]">) { + const params = await props.params; + const page = source.getPage(params.slug); + if (!page) notFound(); + + const MDX = page.data.body; + const markdownUrl = getPageMarkdownUrl(page).url; + + return ( + + {page.data.title} + + {page.data.description} + +
+ + +
+ + + +
+ ); +} + +export async function generateStaticParams() { + return source.generateParams(); +} + +export async function generateMetadata( + props: PageProps<"/[...slug]">, +): Promise { + const params = await props.params; + const page = source.getPage(params.slug); + if (!page) notFound(); + + return { + title: page.data.title, + description: page.data.description, + openGraph: { + images: getPageImage(page).url, + }, + }; +} diff --git a/docs-next/src/app/(docs)/layout.tsx b/docs-next/src/app/(docs)/layout.tsx new file mode 100644 index 0000000..cdde5f7 --- /dev/null +++ b/docs-next/src/app/(docs)/layout.tsx @@ -0,0 +1,11 @@ +import { DocsLayout } from "fumadocs-ui/layouts/docs"; +import { baseOptions } from "@/lib/layout.shared"; +import { source } from "@/lib/source"; + +export default function Layout({ children }: LayoutProps<"/">) { + return ( + + {children} + + ); +} diff --git a/docs-next/src/app/(home)/_sections/features.tsx b/docs-next/src/app/(home)/_sections/features.tsx new file mode 100644 index 0000000..9028ea7 --- /dev/null +++ b/docs-next/src/app/(home)/_sections/features.tsx @@ -0,0 +1,146 @@ +import { FeatureCard, FeatureGrid } from "@/components/feature-card"; +import { SectionHeader } from "@/components/ui/section-header"; + +const FEATURES = [ + { + title: "Typed Node Handles", + description: + "NodeRef carries an Arc+epoch handle returned by add_node — every API accepts str | NodeRef, stale handles error fast.", + guideHref: "/guide/typed-and-reactive", + }, + { + title: "@dagron.flow", + description: + "Tawazi-style: write a Python function, let the call structure become the DAG. Pythonic, no string IDs.", + guideHref: "/guide/typed-and-reactive", + }, + { + title: "Reactive Engine", + description: + "Signal/Computed/Watcher with auto-tracked deps. ~10 µs to recompute one branch out of 10k after upstream mutation.", + guideHref: "/guide/typed-and-reactive", + }, + { + title: "Content-Addressed Cache", + description: + "Nix-flake-style cross-process cache backed by the filesystem. Two CI workers share intermediates without coordination.", + guideHref: "/guide/typed-and-reactive", + }, + { + title: "Time-Travel Replay", + description: + "Append-only JSONL traces + payload-deduped CAS. replay(at=t) reconstructs any past run state.", + guideHref: "/guide/typed-and-reactive", + }, + { + title: "Effect-Typed Tasks", + description: + "PURE / READ / WRITE / NETWORK / NONDETERMINISTIC tags drive cache opt-in, replay safety, and executor isolation.", + guideHref: "/guide/typed-and-reactive", + }, + { + title: "DAG Builder", + description: + "Fluent builder, from_records, and Pipeline / @task decorator for defining DAGs.", + guideHref: "/guide/core-concepts/building-dags", + apiHref: "/api/core/builder", + }, + { + title: "Parallel Execution", + description: + "Thread-pool and async executors with topological scheduling and cost-aware planning.", + guideHref: "/guide/core-concepts/executing-tasks", + apiHref: "/api/execution/execution", + }, + { + title: "Incremental Execution", + description: "Early-cutoff recomputation — only re-execute what changed.", + guideHref: "/guide/execution-strategies/incremental", + apiHref: "/api/execution/incremental", + }, + { + title: "Checkpointing", + description: "Save progress to disk and resume after failures.", + guideHref: "/guide/execution-strategies/checkpointing", + apiHref: "/api/execution/checkpoint", + }, + { + title: "Conditional & Dynamic DAGs", + description: + "Predicate-gated edges, runtime expansion based on node results.", + guideHref: "/guide/execution-strategies/conditional", + apiHref: "/api/execution/conditions", + }, + { + title: "Resource & Approval Gates", + description: + "GPU/CPU/memory-aware scheduling; human-in-the-loop pauses until approved.", + guideHref: "/guide/execution-strategies/resource-scheduling", + apiHref: "/api/execution/resources", + }, + { + title: "Distributed Execution", + description: "Pluggable backends: threads, multiprocessing, Ray, Celery.", + guideHref: "/guide/execution-strategies/distributed", + apiHref: "/api/execution/distributed", + }, + { + title: "Tracing & Profiling", + description: + "Chrome-compatible execution traces and critical-path analysis.", + guideHref: "/guide/observability/tracing-profiling", + apiHref: "/api/observability/tracing", + }, + { + title: "Graph Analysis", + description: + "Explain, what-if, lineage tracking, linting, and a query DSL.", + guideHref: "/guide/core-concepts/inspecting-graphs", + apiHref: "/api/analysis/analysis", + }, + { + title: "Contracts & DataFrames", + description: + "Type contracts across edges, validated at build time. Schema validation for pandas/polars pipelines.", + guideHref: "/guide/advanced/contracts", + apiHref: "/api/analysis/contracts", + }, + { + title: "Templates & Versioning", + description: + "Parameterised templates with placeholder expansion; append-only mutation log with diffing and forking.", + guideHref: "/guide/advanced/templates", + apiHref: "/api/utilities/template", + }, + { + title: "Plugins & Hooks", + description: + "Event-driven plugin system with hook registry and auto-discovery.", + guideHref: "/guide/advanced/plugins-hooks", + apiHref: "/api/utilities/plugins", + }, + { + title: "Visualization", + description: "ASCII, SVG, Mermaid, and a live web dashboard (Axum + SSE).", + guideHref: "/guide/observability/visualization", + apiHref: "/api/utilities/display", + }, +]; + +export function Features() { + return ( +
+
+ + + {FEATURES.map((f) => ( + + ))} + +
+
+ ); +} diff --git a/docs-next/src/app/(home)/_sections/hero.tsx b/docs-next/src/app/(home)/_sections/hero.tsx new file mode 100644 index 0000000..f358238 --- /dev/null +++ b/docs-next/src/app/(home)/_sections/hero.tsx @@ -0,0 +1,64 @@ +import { DynamicCodeBlock } from "fumadocs-ui/components/dynamic-codeblock"; +import { Button } from "@/components/ui/button"; + +const SAMPLE_CODE = `import dagron + +dag = ( + dagron.DAG.builder() + .add_node("extract") + .add_node("transform") + .add_node("load") + .add_edge("extract", "transform") + .add_edge("transform", "load") + .build() +) + +result = dagron.DAGExecutor(dag).execute({ + "extract": lambda: fetch_data(), + "transform": lambda: clean(result), + "load": lambda: write_to_db(result), +})`; + +export function Hero() { + return ( +
+
+
+

+ dagron +

+

+ High-performance DAG execution engine for Python, powered by Rust. +

+

+ + Up to 12× faster than NetworkX + {" "} + on 10k-node DAG validation, with sub-microsecond reachability + queries after index build. Build pipelines, schedulers, build + systems — anything that runs as a graph. +

+
+ + + +
+
+
+ +
+
+
+ ); +} diff --git a/docs-next/src/app/(home)/_sections/index.ts b/docs-next/src/app/(home)/_sections/index.ts new file mode 100644 index 0000000..9767ccc --- /dev/null +++ b/docs-next/src/app/(home)/_sections/index.ts @@ -0,0 +1,2 @@ +export { Features } from "./features"; +export { Hero } from "./hero"; diff --git a/docs-next/src/app/(home)/layout.tsx b/docs-next/src/app/(home)/layout.tsx new file mode 100644 index 0000000..d7c0853 --- /dev/null +++ b/docs-next/src/app/(home)/layout.tsx @@ -0,0 +1,6 @@ +import { HomeLayout } from "fumadocs-ui/layouts/home"; +import { homeOptions } from "@/lib/layout.shared"; + +export default function Layout({ children }: LayoutProps<"/">) { + return {children}; +} diff --git a/docs-next/src/app/(home)/page.tsx b/docs-next/src/app/(home)/page.tsx new file mode 100644 index 0000000..0ada2b0 --- /dev/null +++ b/docs-next/src/app/(home)/page.tsx @@ -0,0 +1,10 @@ +import { Features, Hero } from "./_sections"; + +export default function HomePage() { + return ( +
+ + +
+ ); +} diff --git a/docs-next/src/app/api/search/route.ts b/docs-next/src/app/api/search/route.ts new file mode 100644 index 0000000..c312b91 --- /dev/null +++ b/docs-next/src/app/api/search/route.ts @@ -0,0 +1,9 @@ +import { createFromSource } from "fumadocs-core/search/server"; +import { source } from "@/lib/source"; + +export const revalidate = false; + +export const { staticGET: GET } = createFromSource(source, { + // https://docs.orama.com/docs/orama-js/supported-languages + language: "english", +}); diff --git a/docs-next/src/app/global.css b/docs-next/src/app/global.css new file mode 100644 index 0000000..d31d9d5 --- /dev/null +++ b/docs-next/src/app/global.css @@ -0,0 +1,59 @@ +@import "tailwindcss"; +@import "fumadocs-ui/css/neutral.css"; +@import "fumadocs-ui/css/preset.css"; + +@theme { + --font-sans: + var(--font-sans), "IBM Plex Sans", ui-sans-serif, system-ui, sans-serif; + --font-mono: + var(--font-mono), "IBM Plex Mono", ui-monospace, SFMono-Regular, + "Cascadia Mono", monospace; + + /* dagron purple — primary brand */ + --color-fd-primary: #6c5ce7; + + /* status palette (light mode) */ + --color-dagron-pending: #a0aec0; + --color-dagron-running: #4299e1; + --color-dagron-completed: #48bb78; + --color-dagron-failed: #f56565; + --color-dagron-skipped: #ed8936; + --color-dagron-timed-out: #ed64a6; + --color-dagron-cancelled: #a0aec0; + --color-dagron-cache-hit: #38b2ac; + + /* effect palette */ + --color-dagron-effect-pure: #6c5ce7; + --color-dagron-effect-read: #38b2ac; + --color-dagron-effect-write: #ed8936; + --color-dagron-effect-network: #4299e1; + --color-dagron-effect-nondeterministic: #f56565; +} + +.dark { + --color-fd-primary: #a29bfe; + + --color-dagron-pending: #718096; + --color-dagron-running: #63b3ed; + --color-dagron-completed: #68d391; + --color-dagron-failed: #fc8181; + --color-dagron-skipped: #f6ad55; + --color-dagron-timed-out: #f687b3; + --color-dagron-cancelled: #718096; + --color-dagron-cache-hit: #4fd1c5; + + --color-dagron-effect-pure: #a29bfe; + --color-dagron-effect-read: #4fd1c5; + --color-dagron-effect-write: #f6ad55; + --color-dagron-effect-network: #63b3ed; + --color-dagron-effect-nondeterministic: #fc8181; +} + +html { + scrollbar-gutter: stable; +} + +html > body[data-scroll-locked] { + margin-right: 0px !important; + --removed-body-scroll-bar-size: 0px !important; +} diff --git a/docs-next/src/app/layout.tsx b/docs-next/src/app/layout.tsx new file mode 100644 index 0000000..553911d --- /dev/null +++ b/docs-next/src/app/layout.tsx @@ -0,0 +1,40 @@ +import type { Metadata } from "next"; +import { IBM_Plex_Mono, IBM_Plex_Sans } from "next/font/google"; +import { Provider } from "@/components/provider"; +import "./global.css"; + +const ibmPlexSans = IBM_Plex_Sans({ + subsets: ["latin"], + weight: ["400", "500", "600", "700"], + variable: "--font-sans", +}); + +const ibmPlexMono = IBM_Plex_Mono({ + subsets: ["latin"], + weight: ["400", "500", "600"], + variable: "--font-mono", +}); + +export const metadata: Metadata = { + metadataBase: new URL("https://byteveda.github.io/dagron"), + title: { + default: "dagron", + template: "%s | dagron", + }, + description: + "High-performance DAG execution engine for Python, powered by Rust.", +}; + +export default function Layout({ children }: LayoutProps<"/">) { + return ( + + + {children} + + + ); +} diff --git a/docs-next/src/app/llms-full.txt/route.ts b/docs-next/src/app/llms-full.txt/route.ts new file mode 100644 index 0000000..fcccaee --- /dev/null +++ b/docs-next/src/app/llms-full.txt/route.ts @@ -0,0 +1,10 @@ +import { getLLMText, source } from "@/lib/source"; + +export const revalidate = false; + +export async function GET() { + const scan = source.getPages().map(getLLMText); + const scanned = await Promise.all(scan); + + return new Response(scanned.join("\n\n")); +} diff --git a/docs-next/src/app/llms.txt/route.ts b/docs-next/src/app/llms.txt/route.ts new file mode 100644 index 0000000..775be71 --- /dev/null +++ b/docs-next/src/app/llms.txt/route.ts @@ -0,0 +1,8 @@ +import { llms } from "fumadocs-core/source"; +import { source } from "@/lib/source"; + +export const revalidate = false; + +export function GET() { + return new Response(llms(source).index()); +} diff --git a/docs-next/src/components/api-signature.tsx b/docs-next/src/components/api-signature.tsx new file mode 100644 index 0000000..ad2064f --- /dev/null +++ b/docs-next/src/components/api-signature.tsx @@ -0,0 +1,48 @@ +import { DynamicCodeBlock } from "fumadocs-ui/components/dynamic-codeblock"; +import { cn } from "@/lib/cn"; + +export type ApiSignatureProps = { + name: string; + signature: string; + language?: string; + className?: string; +}; + +function toAnchorId(name: string): string { + return name + .toLowerCase() + .replace(/[^a-z0-9]+/g, "-") + .replace(/^-|-$/g, ""); +} + +/** + * Anchored signature block used for API reference pages. Renders the symbol + * name as a deep-linkable header above a syntax-highlighted code block. + */ +export function ApiSignature({ + name, + signature, + language = "python", + className, +}: ApiSignatureProps) { + const id = toAnchorId(name); + return ( +
+ + {name} + +
+ +
+
+ ); +} diff --git a/docs-next/src/components/dag-diagram.tsx b/docs-next/src/components/dag-diagram.tsx new file mode 100644 index 0000000..0951883 --- /dev/null +++ b/docs-next/src/components/dag-diagram.tsx @@ -0,0 +1,26 @@ +import { cn } from "@/lib/cn"; +import { Mermaid } from "./mermaid"; + +export type DagDiagramProps = { + chart: string; + caption?: string; + className?: string; +}; + +/** + * MDX-friendly wrapper around `Mermaid` that adds an optional caption and + * matches dagron's documentation styling. Falls back to vanilla `` + * if you only need a diagram with no caption. + */ +export function DagDiagram({ chart, caption, className }: DagDiagramProps) { + return ( +
+ + {caption ? ( +
+ {caption} +
+ ) : null} +
+ ); +} diff --git a/docs-next/src/components/diagram-carousel.tsx b/docs-next/src/components/diagram-carousel.tsx new file mode 100644 index 0000000..dbe690f --- /dev/null +++ b/docs-next/src/components/diagram-carousel.tsx @@ -0,0 +1,149 @@ +"use client"; + +import { ChevronLeft, ChevronRight } from "lucide-react"; +import { + Children, + isValidElement, + type ReactElement, + type ReactNode, + useCallback, + useEffect, + useRef, + useState, +} from "react"; +import { cn } from "@/lib/cn"; + +type DiagramSlideProps = { + title?: string; + children: ReactNode; +}; + +export function DiagramSlide({ children }: DiagramSlideProps) { + return <>{children}; +} + +type SlideElement = ReactElement; + +export function DiagramCarousel({ + children, + title, +}: { + children: ReactNode; + title?: string; +}) { + const slides = Children.toArray(children).filter( + (child): child is SlideElement => isValidElement(child), + ); + const total = slides.length; + const containerRef = useRef(null); + const [index, setIndex] = useState(0); + + const prev = useCallback( + () => setIndex((i) => (i - 1 + total) % total), + [total], + ); + const next = useCallback(() => setIndex((i) => (i + 1) % total), [total]); + + useEffect(() => { + const node = containerRef.current; + if (!node) return; + function onKey(e: KeyboardEvent) { + if (e.key === "ArrowLeft") { + e.preventDefault(); + prev(); + } else if (e.key === "ArrowRight") { + e.preventDefault(); + next(); + } + } + node.addEventListener("keydown", onKey); + return () => node.removeEventListener("keydown", onKey); + }, [prev, next]); + + if (total === 0) return null; + + const activeTitle = slides[index]?.props.title; + + return ( +
} + className="taskito-carousel my-6 rounded-lg border border-fd-border bg-fd-card overflow-hidden focus:outline-none focus:ring-2 focus:ring-fd-primary/40" + // biome-ignore lint/a11y/noNoninteractiveTabindex: container hosts arrow-key navigation per WAI-ARIA carousel pattern + tabIndex={0} + aria-roledescription="carousel" + aria-label={title ?? "Diagram carousel"} + > +
+
+ {title && ( + + {title} + + )} + {activeTitle && ( + + {title ? "·" : ""} {activeTitle} + + )} +
+
+ + {index + 1} / {total} + +
+ + +
+
+
+
+ {slides.map((slide, i) => ( +
+ {slide} +
+ ))} +
+
+ {slides.map((slide, i) => { + const slideTitle = slide.props.title; + return ( +
+
+ ); +} diff --git a/docs-next/src/components/effect-badge.tsx b/docs-next/src/components/effect-badge.tsx new file mode 100644 index 0000000..72048b4 --- /dev/null +++ b/docs-next/src/components/effect-badge.tsx @@ -0,0 +1,43 @@ +import { cn } from "@/lib/cn"; + +export type Effect = "pure" | "read" | "write" | "network" | "nondeterministic"; + +const EFFECT_CLASSES: Record = { + pure: "bg-[var(--color-dagron-effect-pure)]", + read: "bg-[var(--color-dagron-effect-read)]", + write: "bg-[var(--color-dagron-effect-write)]", + network: "bg-[var(--color-dagron-effect-network)]", + nondeterministic: "bg-[var(--color-dagron-effect-nondeterministic)]", +}; + +const EFFECT_LABELS: Record = { + pure: "PURE", + read: "READ", + write: "WRITE", + network: "NETWORK", + nondeterministic: "ND", +}; + +export type EffectBadgeProps = { + effect: Effect; + label?: string; + className?: string; +}; + +/** + * Pill labelling a `@dagron.task`'s effect class — matches the runtime + * `dagron.Effect` enum (PURE/READ/WRITE/NETWORK/NONDETERMINISTIC). + */ +export function EffectBadge({ effect, label, className }: EffectBadgeProps) { + return ( + + {label ?? EFFECT_LABELS[effect]} + + ); +} diff --git a/docs-next/src/components/feature-card.tsx b/docs-next/src/components/feature-card.tsx new file mode 100644 index 0000000..05ba571 --- /dev/null +++ b/docs-next/src/components/feature-card.tsx @@ -0,0 +1,86 @@ +import Link from "next/link"; +import type { ReactNode } from "react"; +import { cn } from "@/lib/cn"; + +export type FeatureCardProps = { + title: string; + description: string; + icon?: ReactNode; + guideHref?: string; + apiHref?: string; + className?: string; +}; + +/** + * Card with a title, short description, and optional Guide/API links. + * + * Used both on the home page (in a grid) and inside MDX pages for cross-linking + * between sections of the documentation. + */ +export function FeatureCard({ + title, + description, + icon, + guideHref, + apiHref, + className, +}: FeatureCardProps) { + return ( +
+
+ {icon ?
{icon}
: null} +

{title}

+
+

+ {description} +

+ {(guideHref || apiHref) && ( +
+ {guideHref ? ( + + Guide → + + ) : null} + {apiHref ? ( + + API → + + ) : null} +
+ )} +
+ ); +} + +/** Convenience wrapper that lays out a responsive grid of FeatureCards. */ +export function FeatureGrid({ + children, + className, +}: { + children: ReactNode; + className?: string; +}) { + return ( +
+ {children} +
+ ); +} diff --git a/docs-next/src/components/mdx.tsx b/docs-next/src/components/mdx.tsx new file mode 100644 index 0000000..d1fa9c1 --- /dev/null +++ b/docs-next/src/components/mdx.tsx @@ -0,0 +1,46 @@ +import defaultMdxComponents from "fumadocs-ui/mdx"; +import type { MDXComponents } from "mdx/types"; +import { ApiSignature } from "./api-signature"; +import { DagDiagram } from "./dag-diagram"; +import { DiagramCarousel, DiagramSlide } from "./diagram-carousel"; +import { EffectBadge } from "./effect-badge"; +import { FeatureCard, FeatureGrid } from "./feature-card"; +import { Mermaid } from "./mermaid"; +import { ParamTable } from "./param-table"; +import { StatusBadge } from "./status-badge"; +import { Button, CodePanel, SectionHeader } from "./ui"; + +/** + * Components made available to every MDX file globally — authors don't need + * to write `import` lines. Pass `components` to override or add per-page. + */ +export function getMDXComponents(components?: MDXComponents) { + return { + ...defaultMdxComponents, + // Diagram primitives + Mermaid, + DagDiagram, + DiagramCarousel, + DiagramSlide, + // Annotation pills + StatusBadge, + EffectBadge, + // API reference helpers + ApiSignature, + ParamTable, + // Card / grid + FeatureCard, + FeatureGrid, + // UI primitives (reusable across MDX and React pages) + Button, + CodePanel, + SectionHeader, + ...components, + } satisfies MDXComponents; +} + +export const useMDXComponents = getMDXComponents; + +declare global { + type MDXProvidedComponents = ReturnType; +} diff --git a/docs-next/src/components/mermaid.tsx b/docs-next/src/components/mermaid.tsx new file mode 100644 index 0000000..ed173a7 --- /dev/null +++ b/docs-next/src/components/mermaid.tsx @@ -0,0 +1,174 @@ +"use client"; + +import { useTheme } from "next-themes"; +import { useEffect, useId, useRef, useState } from "react"; + +const DARK_THEME_VARIABLES = { + darkMode: true, + background: "transparent", + primaryColor: "#1f2024", + primaryTextColor: "#f5f5f7", + primaryBorderColor: "#7a7d85", + lineColor: "#9b9ea6", + secondaryColor: "#2a2c31", + tertiaryColor: "#26282d", + mainBkg: "#1f2024", + nodeBkg: "#1f2024", + nodeBorder: "#7a7d85", + clusterBkg: "#1a1b1e", + clusterBorder: "#5a5d65", + edgeLabelBackground: "#2a2c31", + titleColor: "#f5f5f7", + labelTextColor: "#f5f5f7", + textColor: "#f5f5f7", + noteBkgColor: "#3a3c41", + noteTextColor: "#f5f5f7", + noteBorderColor: "#7a7d85", + errorBkgColor: "#4a1d1d", + errorTextColor: "#fca5a5", + // erDiagram-specific (entity attribute rows) — same color, no zebra stripe + attributeBackgroundColorOdd: "#1f2024", + attributeBackgroundColorEven: "#1f2024", + // stateDiagram-specific + altBackground: "#26282d", + // flowchart label colors + labelBackground: "#2a2c31", + // sequenceDiagram + actorBkg: "#1f2024", + actorBorder: "#7a7d85", + actorTextColor: "#f5f5f7", + actorLineColor: "#9b9ea6", + signalColor: "#f5f5f7", + signalTextColor: "#f5f5f7", + labelBoxBkgColor: "#2a2c31", + labelBoxBorderColor: "#7a7d85", + loopTextColor: "#f5f5f7", + activationBkgColor: "#3a3c41", + activationBorderColor: "#9b9ea6", +} as const; + +const DARK_THEME_CSS = ` + .er.entityBox { fill: #1f2024 !important; stroke: #7a7d85 !important; } + .er.entityLabel { fill: #f5f5f7 !important; } + .er.attributeBoxOdd { fill: #1f2024 !important; } + .er.attributeBoxEven { fill: #1f2024 !important; } + .er .er.attribute-text, + .er .attribute-text, + .er text { + fill: #f5f5f7 !important; + } + .er.relationshipLabel, + .er.relationshipLabelBox { + fill: #f5f5f7 !important; + } + .er.relationshipLabelBox + text { fill: #1a1a1a !important; } +`; + +const LIGHT_THEME_CSS = ` + .er.entityBox { fill: #ffffff !important; stroke: #3a3a3a !important; } + .er.entityLabel { fill: #1a1a1a !important; } + .er.attributeBoxOdd { fill: #ffffff !important; } + .er.attributeBoxEven { fill: #f4f4f5 !important; } + .er .er.attribute-text, + .er .attribute-text, + .er text { + fill: #1a1a1a !important; + } +`; + +const LIGHT_THEME_VARIABLES = { + darkMode: false, + background: "transparent", + primaryColor: "#ffffff", + primaryTextColor: "#1a1a1a", + primaryBorderColor: "#3a3a3a", + lineColor: "#4a4a4a", + secondaryColor: "#f4f4f5", + tertiaryColor: "#fafafa", + mainBkg: "#ffffff", + nodeBkg: "#ffffff", + nodeBorder: "#3a3a3a", + clusterBkg: "#f4f4f5", + clusterBorder: "#a1a1aa", + edgeLabelBackground: "#ffffff", + titleColor: "#1a1a1a", + labelTextColor: "#1a1a1a", + textColor: "#1a1a1a", + noteBkgColor: "#fef9c3", + noteTextColor: "#1a1a1a", + noteBorderColor: "#a1a1aa", + // erDiagram-specific (entity attribute rows) + attributeBackgroundColorOdd: "#ffffff", + attributeBackgroundColorEven: "#f4f4f5", + // stateDiagram-specific + altBackground: "#f4f4f5", + // flowchart label colors + labelBackground: "#ffffff", + // sequenceDiagram + actorBkg: "#ffffff", + actorBorder: "#3a3a3a", + actorTextColor: "#1a1a1a", + actorLineColor: "#4a4a4a", + signalColor: "#1a1a1a", + signalTextColor: "#1a1a1a", + labelBoxBkgColor: "#ffffff", + labelBoxBorderColor: "#3a3a3a", + loopTextColor: "#1a1a1a", + activationBkgColor: "#f4f4f5", + activationBorderColor: "#4a4a4a", +} as const; + +export function Mermaid({ chart }: { chart: string }) { + const id = useId(); + const containerRef = useRef(null); + const { resolvedTheme } = useTheme(); + const [svg, setSvg] = useState(""); + + useEffect(() => { + let cancelled = false; + void (async () => { + const mermaid = (await import("mermaid")).default; + if (typeof document !== "undefined" && document.fonts?.ready) { + await document.fonts.ready; + } + const isDark = resolvedTheme === "dark"; + mermaid.initialize({ + startOnLoad: false, + theme: "base", + themeVariables: isDark ? DARK_THEME_VARIABLES : LIGHT_THEME_VARIABLES, + themeCSS: isDark ? DARK_THEME_CSS : LIGHT_THEME_CSS, + securityLevel: "loose", + fontFamily: '"IBM Plex Sans", "Inter", system-ui, sans-serif', + flowchart: { padding: 18, htmlLabels: true, useMaxWidth: true }, + sequence: { + actorFontFamily: '"IBM Plex Sans", sans-serif', + noteFontFamily: '"IBM Plex Sans", sans-serif', + messageFontFamily: '"IBM Plex Sans", sans-serif', + }, + }); + try { + const renderId = `m${id.replace(/[^a-zA-Z0-9]/g, "")}`; + const { svg: rendered } = await mermaid.render(renderId, chart); + if (!cancelled) { + setSvg(rendered); + } + } catch (err) { + if (!cancelled) { + setSvg(`
Mermaid render error: ${(err as Error).message}
`); + } + } + })(); + return () => { + cancelled = true; + }; + }, [chart, id, resolvedTheme]); + + return ( +
+ ); +} diff --git a/docs-next/src/components/param-table.tsx b/docs-next/src/components/param-table.tsx new file mode 100644 index 0000000..e8c33d0 --- /dev/null +++ b/docs-next/src/components/param-table.tsx @@ -0,0 +1,64 @@ +import { cn } from "@/lib/cn"; + +export type Param = { + name: string; + type: string; + default?: string; + description: string; +}; + +export type ParamTableProps = { + params: Param[]; + className?: string; +}; + +/** + * Renders a parameter reference table with name / type / default / description + * columns. Used heavily on API reference pages. + */ +export function ParamTable({ params, className }: ParamTableProps) { + return ( +
+ + + + + + + + + + + {params.map((p) => ( + + + + + + + ))} + +
+ Parameter + + Type + + Default + + Description +
+ {p.name} + + {p.type} + + {p.default !== undefined ? ( + {p.default} + ) : ( + required + )} + + {p.description} +
+
+ ); +} diff --git a/docs-next/src/components/provider.tsx b/docs-next/src/components/provider.tsx new file mode 100644 index 0000000..de8ad12 --- /dev/null +++ b/docs-next/src/components/provider.tsx @@ -0,0 +1,8 @@ +"use client"; +import { RootProvider } from "fumadocs-ui/provider/next"; +import type { ReactNode } from "react"; +import SearchDialog from "@/components/search"; + +export function Provider({ children }: { children: ReactNode }) { + return {children}; +} diff --git a/docs-next/src/components/search.tsx b/docs-next/src/components/search.tsx new file mode 100644 index 0000000..3b2dc8a --- /dev/null +++ b/docs-next/src/components/search.tsx @@ -0,0 +1,54 @@ +"use client"; +import { create } from "@orama/orama"; +import { useDocsSearch } from "fumadocs-core/search/client"; +import { + SearchDialog, + SearchDialogClose, + SearchDialogContent, + SearchDialogHeader, + SearchDialogIcon, + SearchDialogInput, + SearchDialogList, + SearchDialogOverlay, + type SharedProps, +} from "fumadocs-ui/components/dialog/search"; +import { useI18n } from "fumadocs-ui/contexts/i18n"; + +function initOrama() { + return create({ + schema: { _: "string" }, + // https://docs.orama.com/docs/orama-js/supported-languages + language: "english", + }); +} + +const basePath = process.env.NEXT_PUBLIC_DOCS_BASE_PATH ?? ""; + +export default function DefaultSearchDialog(props: SharedProps) { + const { locale } = useI18n(); // (optional) for i18n + const { search, setSearch, query } = useDocsSearch({ + type: "static", + initOrama, + locale, + from: `${basePath}/api/search`, + }); + + return ( + + + + + + + + + + + + ); +} diff --git a/docs-next/src/components/status-badge.tsx b/docs-next/src/components/status-badge.tsx new file mode 100644 index 0000000..67094bb --- /dev/null +++ b/docs-next/src/components/status-badge.tsx @@ -0,0 +1,46 @@ +import { cn } from "@/lib/cn"; + +export type Status = + | "pending" + | "running" + | "completed" + | "failed" + | "skipped" + | "timed-out" + | "cancelled" + | "cache-hit"; + +const STATUS_CLASSES: Record = { + pending: "bg-[var(--color-dagron-pending)]", + running: "bg-[var(--color-dagron-running)]", + completed: "bg-[var(--color-dagron-completed)]", + failed: "bg-[var(--color-dagron-failed)]", + skipped: "bg-[var(--color-dagron-skipped)]", + "timed-out": "bg-[var(--color-dagron-timed-out)]", + cancelled: "bg-[var(--color-dagron-cancelled)]", + "cache-hit": "bg-[var(--color-dagron-cache-hit)]", +}; + +export type StatusBadgeProps = { + status: Status; + label?: string; + className?: string; +}; + +/** + * Inline status pill used to annotate node states in execution traces and + * conceptual diagrams (running / completed / failed / skipped / …). + */ +export function StatusBadge({ status, label, className }: StatusBadgeProps) { + return ( + + {label ?? status.replace("-", " ")} + + ); +} diff --git a/docs-next/src/components/ui/button.tsx b/docs-next/src/components/ui/button.tsx new file mode 100644 index 0000000..343e817 --- /dev/null +++ b/docs-next/src/components/ui/button.tsx @@ -0,0 +1,56 @@ +import Link from "next/link"; +import type { ComponentProps, ReactNode } from "react"; +import { cn } from "@/lib/cn"; + +export type ButtonVariant = "primary" | "secondary" | "ghost"; + +const VARIANT_CLASSES: Record = { + primary: + "bg-fd-primary text-fd-primary-foreground hover:opacity-90 transition-opacity", + secondary: + "border border-fd-border bg-fd-card hover:bg-fd-accent transition-colors", + ghost: "text-fd-muted-foreground hover:text-fd-foreground transition-colors", +}; + +const BASE_CLASSES = + "inline-flex items-center gap-2 rounded-md px-5 py-2.5 text-sm font-medium"; + +type CommonProps = { + variant?: ButtonVariant; + icon?: ReactNode; + children: ReactNode; + className?: string; +}; + +type AsLink = CommonProps & { + href: string; +} & Omit, "href" | "className" | "children">; + +type AsButton = CommonProps & { + href?: undefined; +} & Omit< + ComponentProps<"button">, + "className" | "children" | keyof CommonProps + >; + +export function Button(props: AsLink | AsButton) { + const { variant = "primary", icon, children, className, ...rest } = props; + const classes = cn(BASE_CLASSES, VARIANT_CLASSES[variant], className); + + if ("href" in rest && rest.href !== undefined) { + const { href, ...linkRest } = rest; + return ( + + {children} + {icon} + + ); + } + + return ( + + ); +} diff --git a/docs-next/src/components/ui/code-panel.tsx b/docs-next/src/components/ui/code-panel.tsx new file mode 100644 index 0000000..1072eef --- /dev/null +++ b/docs-next/src/components/ui/code-panel.tsx @@ -0,0 +1,83 @@ +import type { ReactNode } from "react"; +import { cn } from "@/lib/cn"; + +export type CodePanelTone = "primary" | "muted" | "default"; + +const TONE_CLASSES: Record = { + primary: "border-t-2 border-t-fd-primary", + muted: "border-t-2 border-t-fd-border", + default: "", +}; + +const LABEL_TONE_CLASSES: Record = { + primary: "text-fd-primary", + muted: "text-fd-foreground", + default: "text-fd-foreground", +}; + +type LabelHeader = { + label: string; + caption?: string; + header?: undefined; +}; + +type CustomHeader = { + label?: undefined; + caption?: undefined; + header: ReactNode; +}; + +type NoHeader = { + label?: undefined; + caption?: undefined; + header?: undefined; +}; + +export type CodePanelProps = { + tone?: CodePanelTone; + className?: string; + children: ReactNode; +} & (LabelHeader | CustomHeader | NoHeader); + +export function CodePanel(props: CodePanelProps) { + const { tone = "default", className, children } = props; + + return ( +
+ {renderHeader(props)} + {children} +
+ ); +} + +function renderHeader(props: CodePanelProps) { + if (props.header !== undefined) { + return ( +
+ {props.header} +
+ ); + } + if (props.label !== undefined) { + const tone = props.tone ?? "default"; + return ( +
+ + {props.label} + + {props.caption ? ( + + {props.caption} + + ) : null} +
+ ); + } + return null; +} diff --git a/docs-next/src/components/ui/index.ts b/docs-next/src/components/ui/index.ts new file mode 100644 index 0000000..3a4bccd --- /dev/null +++ b/docs-next/src/components/ui/index.ts @@ -0,0 +1,10 @@ +export { Button, type ButtonVariant } from "./button"; +export { + CodePanel, + type CodePanelProps, + type CodePanelTone, +} from "./code-panel"; +export { + SectionHeader, + type SectionHeaderAlign, +} from "./section-header"; diff --git a/docs-next/src/components/ui/section-header.tsx b/docs-next/src/components/ui/section-header.tsx new file mode 100644 index 0000000..d8196d9 --- /dev/null +++ b/docs-next/src/components/ui/section-header.tsx @@ -0,0 +1,32 @@ +import { cn } from "@/lib/cn"; + +export type SectionHeaderAlign = "center" | "left"; + +export function SectionHeader({ + title, + description, + align = "center", + className, +}: { + title: string; + description?: string; + align?: SectionHeaderAlign; + className?: string; +}) { + return ( +
+

+ {title} +

+ {description ? ( +

{description}

+ ) : null} +
+ ); +} diff --git a/docs-next/tsconfig.json b/docs-next/tsconfig.json new file mode 100644 index 0000000..f43f873 --- /dev/null +++ b/docs-next/tsconfig.json @@ -0,0 +1,35 @@ +{ + "compilerOptions": { + "target": "ESNext", + "lib": ["dom", "dom.iterable", "esnext"], + "allowJs": true, + "skipLibCheck": true, + "strict": true, + "forceConsistentCasingInFileNames": true, + "noEmit": true, + "esModuleInterop": true, + "module": "esnext", + "moduleResolution": "bundler", + "resolveJsonModule": true, + "isolatedModules": true, + "jsx": "react-jsx", + "incremental": true, + "paths": { + "@/*": ["./src/*"], + "collections/*": ["./.source/*"] + }, + "plugins": [ + { + "name": "next" + } + ] + }, + "include": [ + "next-env.d.ts", + "**/*.ts", + "**/*.tsx", + ".next/types/**/*.ts", + ".next/dev/types/**/*.ts" + ], + "exclude": ["node_modules"] +} From dee4e8a92ac32e70d2a419aceb8a8cfdb71d0efa Mon Sep 17 00:00:00 2001 From: Pratyush Sharma <56130065+pratyush618@users.noreply.github.com> Date: Sun, 10 May 2026 02:44:11 +0530 Subject: [PATCH 09/14] =?UTF-8?q?fix(docs-next):=20polish=20=E2=80=94=20st?= =?UTF-8?q?rip=20duplicate=20H1s,=20third=20root=20section,=20dark=20merma?= =?UTF-8?q?id?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Strip the # Heading that duplicated the frontmatter title across 53 MDX files, plus 2 hand-fixes where titles diverged. Move typed-and-reactive into its own root section so it appears in the sidebar dropdown alongside Guide and API. Add api/index.mdx so the API root has a landing page (without it, fumadocs hid it from the dropdown). DagDiagram now remaps light-mode hex colors to dark equivalents via the original DARK_COLOR_MAP — diagrams stay legible in dark mode. FeatureCards use h-full + flex column so a row of cards has consistent height; link footer sits tight under description (no mt-auto gap). --- .../content/docs/api/analysis/analysis.mdx | 2 - .../content/docs/api/analysis/contracts.mdx | 2 - .../content/docs/api/analysis/dataframe.mdx | 2 - docs-next/content/docs/api/core/builder.mdx | 2 - docs-next/content/docs/api/core/core.mdx | 2 - docs-next/content/docs/api/core/errors.mdx | 2 - .../content/docs/api/execution/caching.mdx | 2 - .../content/docs/api/execution/checkpoint.mdx | 2 - .../content/docs/api/execution/conditions.mdx | 2 - .../docs/api/execution/distributed.mdx | 2 - .../content/docs/api/execution/dynamic.mdx | 2 - .../content/docs/api/execution/execution.mdx | 2 - .../content/docs/api/execution/gates.mdx | 2 - .../docs/api/execution/incremental.mdx | 2 - .../content/docs/api/execution/pipeline.mdx | 2 - .../content/docs/api/execution/reactive.mdx | 2 - .../content/docs/api/execution/resources.mdx | 2 - docs-next/content/docs/api/index.mdx | 42 +++++++++ docs-next/content/docs/api/meta.json | 9 +- .../docs/api/observability/profiling.mdx | 2 - .../docs/api/observability/tracing.mdx | 2 - .../content/docs/api/utilities/compose.mdx | 2 - .../content/docs/api/utilities/display.mdx | 2 - .../docs/api/utilities/integration.mdx | 2 - .../content/docs/api/utilities/modern-api.mdx | 10 +-- .../content/docs/api/utilities/plugins.mdx | 2 - .../content/docs/api/utilities/template.mdx | 2 - .../content/docs/api/utilities/versioning.mdx | 2 - .../content/docs/guide/advanced/contracts.mdx | 2 - .../docs/guide/advanced/dataframes.mdx | 2 - .../docs/guide/advanced/plugins-hooks.mdx | 2 - .../content/docs/guide/advanced/templates.mdx | 2 - .../docs/guide/advanced/versioning.mdx | 2 - docs-next/content/docs/guide/architecture.mdx | 2 - docs-next/content/docs/guide/benchmarks.mdx | 2 - docs-next/content/docs/guide/cookbook.mdx | 2 - .../guide/core-concepts/building-dags.mdx | 2 - .../guide/core-concepts/executing-tasks.mdx | 2 - .../guide/core-concepts/inspecting-graphs.mdx | 2 - .../guide/core-concepts/serialization.mdx | 2 - .../docs/guide/core-concepts/transforms.mdx | 2 - .../execution-strategies/approval-gates.mdx | 2 - .../guide/execution-strategies/caching.mdx | 2 - .../execution-strategies/checkpointing.mdx | 2 - .../execution-strategies/conditional.mdx | 2 - .../execution-strategies/distributed.mdx | 2 - .../execution-strategies/dynamic-dags.mdx | 2 - .../execution-strategies/incremental.mdx | 2 - .../resource-scheduling.mdx | 2 - .../content/docs/guide/getting-started.mdx | 2 - docs-next/content/docs/guide/meta.json | 1 - .../guide/observability/error-handling.mdx | 2 - .../guide/observability/tracing-profiling.mdx | 2 - .../guide/observability/visualization.mdx | 2 - docs-next/content/docs/guide/why-dagron.mdx | 2 - docs-next/content/docs/index.mdx | 6 +- docs-next/content/docs/meta.json | 2 +- .../index.mdx} | 2 - .../content/docs/typed-and-reactive/meta.json | 5 ++ docs-next/scripts/migrate.mjs | 26 ++++++ .../src/app/(home)/_sections/features.tsx | 12 +-- docs-next/src/app/(home)/_sections/hero.tsx | 2 +- docs-next/src/components/dag-diagram.tsx | 90 ++++++++++++++++++- docs-next/src/components/feature-card.tsx | 4 +- 64 files changed, 185 insertions(+), 128 deletions(-) create mode 100644 docs-next/content/docs/api/index.mdx rename docs-next/content/docs/{guide/typed-and-reactive.mdx => typed-and-reactive/index.mdx} (99%) create mode 100644 docs-next/content/docs/typed-and-reactive/meta.json diff --git a/docs-next/content/docs/api/analysis/analysis.mdx b/docs-next/content/docs/api/analysis/analysis.mdx index c13d7d9..32ac74a 100644 --- a/docs-next/content/docs/api/analysis/analysis.mdx +++ b/docs-next/content/docs/api/analysis/analysis.mdx @@ -3,8 +3,6 @@ title: Analysis description: API reference for dagron's graph analysis toolkit -- explain, what-if, lineage tracking, linting, schema validation, and query DSL. --- -# Analysis - The analysis module provides tools for understanding, validating, and querying DAG structure. It includes node diagnostics (`explain`), hypothetical mutation analysis (`what_if`), data lineage tracking, structural linting, schema diff --git a/docs-next/content/docs/api/analysis/contracts.mdx b/docs-next/content/docs/api/analysis/contracts.mdx index 73e7122..92b70ea 100644 --- a/docs-next/content/docs/api/analysis/contracts.mdx +++ b/docs-next/content/docs/api/analysis/contracts.mdx @@ -3,8 +3,6 @@ title: Contracts description: API reference for dagron's type contract system -- declare, extract, and validate typed data contracts across DAG edges. --- -# Contracts - The contracts module provides build-time type checking for DAG edges. You can declare the expected input and output types for each node, and the validator checks that producer output types are compatible with consumer diff --git a/docs-next/content/docs/api/analysis/dataframe.mdx b/docs-next/content/docs/api/analysis/dataframe.mdx index 37cf144..4ea040a 100644 --- a/docs-next/content/docs/api/analysis/dataframe.mdx +++ b/docs-next/content/docs/api/analysis/dataframe.mdx @@ -3,8 +3,6 @@ title: DataFrames description: API reference for dagron's DataFrame integration -- schema validation for pandas and polars DataFrames at DAG edge boundaries. --- -# DataFrames - The dataframe module provides schema validation for pandas and polars DataFrames at DAG edge boundaries. Define expected column schemas (names, dtypes, nullability) and row count constraints for each node, then validate diff --git a/docs-next/content/docs/api/core/builder.mdx b/docs-next/content/docs/api/core/builder.mdx index 910e6cf..5216cb0 100644 --- a/docs-next/content/docs/api/core/builder.mdx +++ b/docs-next/content/docs/api/core/builder.mdx @@ -3,8 +3,6 @@ title: "DAGBuilder" description: "API reference for DAGBuilder — the fluent builder pattern for constructing validated DAGs." --- -# DAGBuilder - The `DAGBuilder` provides a fluent, chainable API for constructing DAGs. Every mutating method returns `self`, so you can chain calls together. The builder validates the graph on `.build()`, ensuring you never receive an invalid DAG. diff --git a/docs-next/content/docs/api/core/core.mdx b/docs-next/content/docs/api/core/core.mdx index 7aed47a..bbae650 100644 --- a/docs-next/content/docs/api/core/core.mdx +++ b/docs-next/content/docs/api/core/core.mdx @@ -3,8 +3,6 @@ title: "Core — DAG, NodeId, GraphStats" description: "Complete API reference for the DAG class, NodeId, GraphStats, GraphDiff, ReachabilityIndex, and ExecutionPlan." --- -# Core — DAG, NodeId, GraphStats - The core module provides the foundational graph data structure that powers every feature in dagron. The `DAG` class contains approximately 78 methods spanning construction, inspection, traversal, scheduling, transforms, serialization, and diff --git a/docs-next/content/docs/api/core/errors.mdx b/docs-next/content/docs/api/core/errors.mdx index a1ad345..46b162b 100644 --- a/docs-next/content/docs/api/core/errors.mdx +++ b/docs-next/content/docs/api/core/errors.mdx @@ -3,8 +3,6 @@ title: "Errors" description: "API reference for all dagron exception classes and the error hierarchy." --- -# Errors - All dagron exceptions inherit from `DagronError`, making it easy to catch any library error with a single `except` clause. More specific exceptions allow targeted handling of individual failure modes. diff --git a/docs-next/content/docs/api/execution/caching.mdx b/docs-next/content/docs/api/execution/caching.mdx index 39f9527..4da15e8 100644 --- a/docs-next/content/docs/api/execution/caching.mdx +++ b/docs-next/content/docs/api/execution/caching.mdx @@ -3,8 +3,6 @@ title: "Caching" description: "API reference for CachedDAGExecutor, ContentAddressableCache, CachePolicy, and cache backends." --- -# Caching - The caching module provides content-addressable, Merkle-tree-based caching for DAG execution. A node's cache key is derived from its task code, its predecessors' output hashes, and its name — so if nothing upstream has changed, diff --git a/docs-next/content/docs/api/execution/checkpoint.mdx b/docs-next/content/docs/api/execution/checkpoint.mdx index 5838fbb..a0bf870 100644 --- a/docs-next/content/docs/api/execution/checkpoint.mdx +++ b/docs-next/content/docs/api/execution/checkpoint.mdx @@ -3,8 +3,6 @@ title: "Checkpointing" description: "API reference for CheckpointExecutor and CheckpointInfo — save progress and resume execution after failures." --- -# Checkpointing - The checkpointing module allows you to persist execution progress to disk and resume after failures. When a node completes, its result is saved to a checkpoint directory. If execution is interrupted (crash, timeout, manual stop), diff --git a/docs-next/content/docs/api/execution/conditions.mdx b/docs-next/content/docs/api/execution/conditions.mdx index 3aa1f8f..45680b0 100644 --- a/docs-next/content/docs/api/execution/conditions.mdx +++ b/docs-next/content/docs/api/execution/conditions.mdx @@ -3,8 +3,6 @@ title: "Conditional Execution" description: "API reference for ConditionalDAGBuilder, ConditionalEdge, and ConditionalExecutor — predicate-gated edges that skip branches at runtime." --- -# Conditional Execution - The conditional execution module allows edges in your DAG to carry predicate functions. At runtime, the executor evaluates each condition before traversing the edge. If the condition returns `False`, the downstream node (and its diff --git a/docs-next/content/docs/api/execution/distributed.mdx b/docs-next/content/docs/api/execution/distributed.mdx index c2db6bf..5d59af5 100644 --- a/docs-next/content/docs/api/execution/distributed.mdx +++ b/docs-next/content/docs/api/execution/distributed.mdx @@ -3,8 +3,6 @@ title: "Distributed Execution" description: "API reference for DistributedExecutor, DistributedBackend, and PartitionedDAGExecutor — run DAGs across threads, processes, Ray, and Celery." --- -# Distributed Execution - The distributed execution module lets you run DAG tasks across multiple backends: threads, processes, Ray clusters, or Celery workers. A pluggable backend protocol makes it easy to integrate with any distributed computing diff --git a/docs-next/content/docs/api/execution/dynamic.mdx b/docs-next/content/docs/api/execution/dynamic.mdx index 888558d..5e6b7e2 100644 --- a/docs-next/content/docs/api/execution/dynamic.mdx +++ b/docs-next/content/docs/api/execution/dynamic.mdx @@ -3,8 +3,6 @@ title: "Dynamic Execution" description: "API reference for DynamicExecutor, DynamicModification, and DynamicNodeSpec — expand the DAG at runtime based on node results." --- -# Dynamic Execution - The dynamic execution module lets you modify the DAG at runtime. When a node completes, an **expander function** can inspect its result and add or remove nodes before execution continues. This supports fan-out patterns where the diff --git a/docs-next/content/docs/api/execution/execution.mdx b/docs-next/content/docs/api/execution/execution.mdx index 7bdec2e..0fbb78e 100644 --- a/docs-next/content/docs/api/execution/execution.mdx +++ b/docs-next/content/docs/api/execution/execution.mdx @@ -3,8 +3,6 @@ title: "Execution" description: "API reference for DAGExecutor, AsyncDAGExecutor, ExecutionResult, NodeResult, NodeStatus, and ExecutionCallbacks." --- -# Execution - The execution module provides thread-pool and async executors that walk the DAG in topological order, dispatching tasks with maximum parallelism while respecting dependency constraints. diff --git a/docs-next/content/docs/api/execution/gates.mdx b/docs-next/content/docs/api/execution/gates.mdx index bdd12ff..c8574c8 100644 --- a/docs-next/content/docs/api/execution/gates.mdx +++ b/docs-next/content/docs/api/execution/gates.mdx @@ -3,8 +3,6 @@ title: "Approval Gates" description: "API reference for ApprovalGate, GateController, GateStatus, and gate-related errors — human-in-the-loop pause and resume." --- -# Approval Gates - Approval gates pause DAG execution at specific nodes until a human (or external system) explicitly approves or rejects the continuation. This enables human-in-the-loop workflows such as deployment approvals, data quality diff --git a/docs-next/content/docs/api/execution/incremental.mdx b/docs-next/content/docs/api/execution/incremental.mdx index 2652152..50ed6ec 100644 --- a/docs-next/content/docs/api/execution/incremental.mdx +++ b/docs-next/content/docs/api/execution/incremental.mdx @@ -3,8 +3,6 @@ title: "Incremental Execution" description: "API reference for IncrementalExecutor and IncrementalResult — re-execute only what changed." --- -# Incremental Execution - The incremental execution module provides an executor that re-runs only the nodes affected by a set of changes. Unchanged nodes are reused from the previous run, dramatically reducing execution time for large DAGs where only a few inputs diff --git a/docs-next/content/docs/api/execution/pipeline.mdx b/docs-next/content/docs/api/execution/pipeline.mdx index 13aa7da..b3db348 100644 --- a/docs-next/content/docs/api/execution/pipeline.mdx +++ b/docs-next/content/docs/api/execution/pipeline.mdx @@ -3,8 +3,6 @@ title: "Pipeline" description: "API reference for the @task decorator and Pipeline class — a high-level API for building and executing DAGs from decorated functions." --- -# Pipeline - The Pipeline API provides a high-level, decorator-based approach to building and executing DAGs. Instead of manually creating nodes and edges, you decorate functions with `@task` and let dagron infer the graph structure from function diff --git a/docs-next/content/docs/api/execution/reactive.mdx b/docs-next/content/docs/api/execution/reactive.mdx index 8f1513e..c97d2fa 100644 --- a/docs-next/content/docs/api/execution/reactive.mdx +++ b/docs-next/content/docs/api/execution/reactive.mdx @@ -3,8 +3,6 @@ title: Reactive DAG description: API reference for dagron's reactive DAG execution -- push-based incremental recomputation with subscriptions and early cutoff. --- -# Reactive DAG - The reactive module extends dagron's execution model into a push-based reactive system. When you set an input value, the `ReactiveDAG` automatically cascades recomputation through the graph, only recomputing diff --git a/docs-next/content/docs/api/execution/resources.mdx b/docs-next/content/docs/api/execution/resources.mdx index 86a42a8..be3eb31 100644 --- a/docs-next/content/docs/api/execution/resources.mdx +++ b/docs-next/content/docs/api/execution/resources.mdx @@ -3,8 +3,6 @@ title: "Resource Scheduling" description: "API reference for ResourceAwareExecutor, ResourcePool, ResourceRequirements, and resource tracking — GPU, CPU, and memory-aware scheduling." --- -# Resource Scheduling - The resource scheduling module extends the standard executor with capacity-aware scheduling. Nodes declare their resource requirements (GPU, CPU, memory, or custom resources), and the executor only dispatches a node when the resource pool diff --git a/docs-next/content/docs/api/index.mdx b/docs-next/content/docs/api/index.mdx new file mode 100644 index 0000000..b8e61fc --- /dev/null +++ b/docs-next/content/docs/api/index.mdx @@ -0,0 +1,42 @@ +--- +title: API Reference +description: Complete reference for every public class, function, and protocol exposed by dagron. +--- + +dagron's public API is grouped into five sections. Pick one from the +sidebar, or jump straight in: + + + + + + + + + +## Looking for the new typed handles? + +If you're new to the `NodeRef`, `@flow`, `Effect`, reactive engine, +content cache, and replay APIs, start with the [Typed & +Reactive](/typed-and-reactive) walkthrough — it composes everything +end-to-end with worked examples. diff --git a/docs-next/content/docs/api/meta.json b/docs-next/content/docs/api/meta.json index 042b56c..48c759c 100644 --- a/docs-next/content/docs/api/meta.json +++ b/docs-next/content/docs/api/meta.json @@ -1,5 +1,12 @@ { "title": "API Reference", "root": true, - "pages": ["core", "execution", "observability", "analysis", "utilities"] + "pages": [ + "index", + "core", + "execution", + "observability", + "analysis", + "utilities" + ] } diff --git a/docs-next/content/docs/api/observability/profiling.mdx b/docs-next/content/docs/api/observability/profiling.mdx index fb4510f..e8063d8 100644 --- a/docs-next/content/docs/api/observability/profiling.mdx +++ b/docs-next/content/docs/api/observability/profiling.mdx @@ -3,8 +3,6 @@ title: Profiling description: API reference for dagron's post-execution profiling -- critical path analysis, slack computation, bottleneck detection, and parallelism efficiency. --- -# Profiling - The profiling module analyzes completed executions against the DAG structure to identify the critical path, compute slack for every node, detect bottlenecks, and measure parallelism efficiency. Unlike tracing (which diff --git a/docs-next/content/docs/api/observability/tracing.mdx b/docs-next/content/docs/api/observability/tracing.mdx index f210a24..6d1ee6f 100644 --- a/docs-next/content/docs/api/observability/tracing.mdx +++ b/docs-next/content/docs/api/observability/tracing.mdx @@ -3,8 +3,6 @@ title: Tracing description: API reference for dagron's execution tracing system — record, query, and export structured timeline events from DAG execution. --- -# Tracing - The tracing module provides a structured timeline log of every event that occurs during DAG execution. Traces capture node starts, completions, failures, gate interactions, resource acquisitions, and cache events. You can export traces as diff --git a/docs-next/content/docs/api/utilities/compose.mdx b/docs-next/content/docs/api/utilities/compose.mdx index ef9a0ba..5d21b91 100644 --- a/docs-next/content/docs/api/utilities/compose.mdx +++ b/docs-next/content/docs/api/utilities/compose.mdx @@ -3,8 +3,6 @@ title: Composition description: API reference for dagron's DAG composition -- merge multiple DAGs into one with namespace prefixes and cross-namespace connections. --- -# Composition - The compose module provides multi-DAG composition with automatic namespace prefixing. Combine independent DAGs into a single unified graph, preserving node payloads and metadata, with optional cross-namespace connections. diff --git a/docs-next/content/docs/api/utilities/display.mdx b/docs-next/content/docs/api/utilities/display.mdx index 7f79e44..2d22969 100644 --- a/docs-next/content/docs/api/utilities/display.mdx +++ b/docs-next/content/docs/api/utilities/display.mdx @@ -3,8 +3,6 @@ title: Display description: API reference for dagron's display utilities -- ASCII rendering, Jupyter SVG auto-display, and custom node formatting. --- -# Display - The display module provides visualization utilities for DAGs. It includes ASCII rendering for terminals and logs, and SVG generation for Jupyter notebooks. The SVG renderer tries Graphviz first (Python package or CLI) diff --git a/docs-next/content/docs/api/utilities/integration.mdx b/docs-next/content/docs/api/utilities/integration.mdx index f3aae90..cfc7956 100644 --- a/docs-next/content/docs/api/utilities/integration.mdx +++ b/docs-next/content/docs/api/utilities/integration.mdx @@ -3,8 +3,6 @@ title: Integration description: API reference for dagron's integration helpers -- build DAGs from dicts, dataclasses, and Pydantic models. --- -# Integration - The integration module provides helpers for building DAGs from common Python data structures. The primary function, `from_records`, converts sequences of dicts, dataclasses, or Pydantic models into DAGs with minimal boilerplate. diff --git a/docs-next/content/docs/api/utilities/modern-api.mdx b/docs-next/content/docs/api/utilities/modern-api.mdx index ec1a9d5..8ff344b 100644 --- a/docs-next/content/docs/api/utilities/modern-api.mdx +++ b/docs-next/content/docs/api/utilities/modern-api.mdx @@ -1,13 +1,13 @@ --- -title: Modern API (NodeRef, flow, reactive, contentcache, trace) +title: Modern API description: API reference for the typed-handles + uniqueness modules — dagron.NodeRef, dagron.flow, dagron.Effect, dagron.reactive, dagron.contentcache, dagron.trace, dagron.stubgen. --- -# Modern API - -API reference for the typed-handles and uniqueness modules. For a +API reference for the typed-handles and uniqueness modules — `NodeRef`, +`@dagron.flow`, `Effect`, `dagron.reactive`, `dagron.contentcache`, and +`dagron.trace`. For a walkthrough of how they compose, see the -[Typed Handles & Reactive Engine](../../guide/typed-and-reactive) guide. +[Typed Handles & Reactive Engine](../../typed-and-reactive) guide. ## `dagron.NodeRef` diff --git a/docs-next/content/docs/api/utilities/plugins.mdx b/docs-next/content/docs/api/utilities/plugins.mdx index 78df89d..4843b9d 100644 --- a/docs-next/content/docs/api/utilities/plugins.mdx +++ b/docs-next/content/docs/api/utilities/plugins.mdx @@ -3,8 +3,6 @@ title: Plugins description: API reference for dagron's plugin system -- lifecycle hooks, plugin discovery, the DashboardPlugin, and the @dagron_plugin decorator. --- -# Plugins - The plugin system provides extensible lifecycle hooks for DAG construction and execution. Plugins can observe and react to events such as node starts, completions, failures, and full execution boundaries. dagron includes a diff --git a/docs-next/content/docs/api/utilities/template.mdx b/docs-next/content/docs/api/utilities/template.mdx index 3a57cff..1799e9f 100644 --- a/docs-next/content/docs/api/utilities/template.mdx +++ b/docs-next/content/docs/api/utilities/template.mdx @@ -3,8 +3,6 @@ title: Templates description: API reference for dagron's DAG template system -- parameterized DAG construction with substitution, validation, and rendering. --- -# Templates - The template module provides parameterized DAG construction. Define a DAG shape with placeholder values in node names (and optionally in payloads and metadata), then render concrete DAGs by supplying parameter values. Templates diff --git a/docs-next/content/docs/api/utilities/versioning.mdx b/docs-next/content/docs/api/utilities/versioning.mdx index 1ce481f..630154b 100644 --- a/docs-next/content/docs/api/utilities/versioning.mdx +++ b/docs-next/content/docs/api/utilities/versioning.mdx @@ -3,8 +3,6 @@ title: Versioning description: API reference for dagron's DAG versioning system -- structural time-travel, mutation history, diffing, and forking. --- -# Versioning - The versioning module provides structural versioning and time-travel for DAGs. Every mutation (add/remove node, add/remove edge, set payload/metadata) is recorded in an append-only log. You can navigate to any historical version, diff --git a/docs-next/content/docs/guide/advanced/contracts.mdx b/docs-next/content/docs/guide/advanced/contracts.mdx index 6f3b6d9..c259737 100644 --- a/docs-next/content/docs/guide/advanced/contracts.mdx +++ b/docs-next/content/docs/guide/advanced/contracts.mdx @@ -3,8 +3,6 @@ title: Contracts description: Enforce type contracts across DAG edges to catch mismatches before execution. --- -# Contracts - When DAGs grow large, it becomes easy for the output type of one node to drift from what a downstream node expects. dagron's **contract system** lets you declare input and output types for each node and validate them at build time -- before any task runs. This catches type mismatches early, similar to how a compiler checks function signatures. diff --git a/docs-next/content/docs/guide/cookbook.mdx b/docs-next/content/docs/guide/cookbook.mdx index 23cbb48..3f2cc1c 100644 --- a/docs-next/content/docs/guide/cookbook.mdx +++ b/docs-next/content/docs/guide/cookbook.mdx @@ -3,8 +3,6 @@ title: Cookbook description: Complete real-world examples — build systems, spreadsheet engines, ETL pipelines, and CI/CD schedulers. --- -# Cookbook - Four complete examples showing how to use dagron in real-world scenarios. Each includes full code and a DAG diagram. --- diff --git a/docs-next/content/docs/guide/core-concepts/building-dags.mdx b/docs-next/content/docs/guide/core-concepts/building-dags.mdx index 5104b37..44ec635 100644 --- a/docs-next/content/docs/guide/core-concepts/building-dags.mdx +++ b/docs-next/content/docs/guide/core-concepts/building-dags.mdx @@ -3,8 +3,6 @@ title: Building DAGs description: A comprehensive guide to constructing directed acyclic graphs in dagron — fluent builder, direct mutation, bulk operations, payloads, and metadata. --- -# Building DAGs - dagron offers multiple ways to construct a directed acyclic graph. This guide covers every construction pattern, from quick one-liners to advanced builder configurations with payloads and metadata. diff --git a/docs-next/content/docs/guide/core-concepts/executing-tasks.mdx b/docs-next/content/docs/guide/core-concepts/executing-tasks.mdx index ce027fb..b6cfc78 100644 --- a/docs-next/content/docs/guide/core-concepts/executing-tasks.mdx +++ b/docs-next/content/docs/guide/core-concepts/executing-tasks.mdx @@ -3,8 +3,6 @@ title: Executing Tasks description: Run DAG tasks in parallel with DAGExecutor, AsyncDAGExecutor, and Pipeline — with timeouts, cancellation, callbacks, and result inspection. --- -# Executing Tasks - Once you have a [DAG](/guide/core-concepts/building-dags), you need an **executor** to run it. dagron ships with several executor types — from a simple thread-pool executor to async, pipeline, conditional, dynamic, and incremental variants. This guide covers diff --git a/docs-next/content/docs/guide/core-concepts/inspecting-graphs.mdx b/docs-next/content/docs/guide/core-concepts/inspecting-graphs.mdx index 02c2858..b094dc8 100644 --- a/docs-next/content/docs/guide/core-concepts/inspecting-graphs.mdx +++ b/docs-next/content/docs/guide/core-concepts/inspecting-graphs.mdx @@ -3,8 +3,6 @@ title: Inspecting Graphs description: Analyze your DAG's structure — topological ordering, critical path, predecessors, ancestors, explain, what-if, lint, and the query DSL. --- -# Inspecting Graphs - dagron gives you deep introspection into your DAG's structure. This guide covers every analysis tool — from basic traversals to critical-path analysis, what-if exploration, linting, and the query DSL. diff --git a/docs-next/content/docs/guide/core-concepts/serialization.mdx b/docs-next/content/docs/guide/core-concepts/serialization.mdx index d9bfda0..265ef2a 100644 --- a/docs-next/content/docs/guide/core-concepts/serialization.mdx +++ b/docs-next/content/docs/guide/core-concepts/serialization.mdx @@ -3,8 +3,6 @@ title: Serialization description: Save and load DAGs in JSON, binary, DOT, and Mermaid formats — plus custom payload serializers. --- -# Serialization - dagron supports multiple serialization formats for persisting DAGs, sharing them across processes, embedding them in documentation, and visualizing them with external tools. This guide covers every format and shows how to handle custom diff --git a/docs-next/content/docs/guide/core-concepts/transforms.mdx b/docs-next/content/docs/guide/core-concepts/transforms.mdx index 0227a91..7d0e760 100644 --- a/docs-next/content/docs/guide/core-concepts/transforms.mdx +++ b/docs-next/content/docs/guide/core-concepts/transforms.mdx @@ -3,8 +3,6 @@ title: Graph Transforms description: Reshape your DAGs with reverse, filter, merge, collapse, transitive reduction, subgraph extraction, snapshots, and composition. --- -# Graph Transforms - dagron provides a rich set of **structural transformations** that produce new DAGs from existing ones. Transforms are non-destructive — the original DAG is never mutated. This guide covers every built-in transform with before/after diagrams. diff --git a/docs-next/content/docs/guide/execution-strategies/approval-gates.mdx b/docs-next/content/docs/guide/execution-strategies/approval-gates.mdx index 80e07f8..3f50472 100644 --- a/docs-next/content/docs/guide/execution-strategies/approval-gates.mdx +++ b/docs-next/content/docs/guide/execution-strategies/approval-gates.mdx @@ -3,8 +3,6 @@ title: Approval Gates description: Pause DAG execution at human-in-the-loop gates that wait for explicit approval or rejection. --- -# Approval Gates - Production pipelines often need a human checkpoint before proceeding. dagron's **approval gates** let you pause execution at specific nodes until an operator explicitly approves or rejects the step. This is useful for deployment sign-offs, data quality reviews, compliance checks, and any workflow that requires human judgment. Gates are **execution-time concerns**, not graph structure. The DAG itself stays pure; gates are attached via a `GateController` that the executor consults at runtime. diff --git a/docs-next/content/docs/guide/execution-strategies/caching.mdx b/docs-next/content/docs/guide/execution-strategies/caching.mdx index 6dda20d..37c5756 100644 --- a/docs-next/content/docs/guide/execution-strategies/caching.mdx +++ b/docs-next/content/docs/guide/execution-strategies/caching.mdx @@ -3,8 +3,6 @@ title: Caching description: Content-addressable Merkle-tree caching for cross-run DAG execution with dagron. --- -# Caching - dagron's caching system provides **content-addressable Merkle-tree caching** for DAG execution results. When you re-execute a pipeline, nodes whose inputs have not changed return their cached result instantly -- no recomputation needed. This is conceptually similar to how build systems like Bazel and Nix work: change any upstream node and all downstream cache keys automatically invalidate. ` already renders the frontmatter `title`. If the + * MDX body opens with `# Same Title`, drop it so the page doesn't print + * the heading twice. Tolerates leading blank lines and arbitrary + * whitespace around the heading text. + */ +function dropDuplicateH1(src) { + const fmMatch = src.match(/^---\n([\s\S]*?)\n---\n?/); + if (!fmMatch) return src; + const titleMatch = fmMatch[1].match(/^title:\s*(.+?)\s*$/m); + if (!titleMatch) return src; + const title = titleMatch[1].replace(/^["']|["']$/g, "").trim(); + const body = src.slice(fmMatch[0].length); + + // Match the FIRST `# Heading` line (allowing leading blank lines). + const h1Re = /^[\s\n]*#\s+(.+?)\s*$/m; + const h1Match = body.match(h1Re); + if (!h1Match) return src; + if (h1Match[1].trim() !== title) return src; + + const start = h1Match.index ?? 0; + const stripped = body.slice(0, start) + body.slice(start + h1Match[0].length); + return src.slice(0, fmMatch[0].length) + stripped.replace(/^\n+/, "\n"); +} + function convertAdmonitions(src) { // Match ::: [optional title]\n…\n::: return src.replace( @@ -106,6 +131,7 @@ async function migrateFile(srcPath) { content = dropSiteImports(content); content = convertAdmonitions(content); content = relabelDotCodeBlocks(content); + content = dropDuplicateH1(content); await writeFile(dstPath, content, "utf8"); return { srcPath, dstPath, rel }; diff --git a/docs-next/src/app/(home)/_sections/features.tsx b/docs-next/src/app/(home)/_sections/features.tsx index 9028ea7..4c0ada8 100644 --- a/docs-next/src/app/(home)/_sections/features.tsx +++ b/docs-next/src/app/(home)/_sections/features.tsx @@ -6,37 +6,37 @@ const FEATURES = [ title: "Typed Node Handles", description: "NodeRef carries an Arc+epoch handle returned by add_node — every API accepts str | NodeRef, stale handles error fast.", - guideHref: "/guide/typed-and-reactive", + guideHref: "/typed-and-reactive", }, { title: "@dagron.flow", description: "Tawazi-style: write a Python function, let the call structure become the DAG. Pythonic, no string IDs.", - guideHref: "/guide/typed-and-reactive", + guideHref: "/typed-and-reactive", }, { title: "Reactive Engine", description: "Signal/Computed/Watcher with auto-tracked deps. ~10 µs to recompute one branch out of 10k after upstream mutation.", - guideHref: "/guide/typed-and-reactive", + guideHref: "/typed-and-reactive", }, { title: "Content-Addressed Cache", description: "Nix-flake-style cross-process cache backed by the filesystem. Two CI workers share intermediates without coordination.", - guideHref: "/guide/typed-and-reactive", + guideHref: "/typed-and-reactive", }, { title: "Time-Travel Replay", description: "Append-only JSONL traces + payload-deduped CAS. replay(at=t) reconstructs any past run state.", - guideHref: "/guide/typed-and-reactive", + guideHref: "/typed-and-reactive", }, { title: "Effect-Typed Tasks", description: "PURE / READ / WRITE / NETWORK / NONDETERMINISTIC tags drive cache opt-in, replay safety, and executor isolation.", - guideHref: "/guide/typed-and-reactive", + guideHref: "/typed-and-reactive", }, { title: "DAG Builder", diff --git a/docs-next/src/app/(home)/_sections/hero.tsx b/docs-next/src/app/(home)/_sections/hero.tsx index f358238..fbfabcd 100644 --- a/docs-next/src/app/(home)/_sections/hero.tsx +++ b/docs-next/src/app/(home)/_sections/hero.tsx @@ -42,7 +42,7 @@ export function Hero() { -
-

+

{description}

{(guideHref || apiHref) && ( From 1c14f35e8fe223930cfa13b76ce6b2dfdbe3d180 Mon Sep 17 00:00:00 2001 From: Pratyush Sharma <56130065+pratyush618@users.noreply.github.com> Date: Sun, 10 May 2026 02:52:55 +0530 Subject: [PATCH 10/14] =?UTF-8?q?chore(docs):=20swap=20docusaurus=20?= =?UTF-8?q?=E2=86=92=20fumadocs,=20drop=20docs-next=20prefix?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Remove the old docusaurus tree under docs/, rename docs-next/ → docs/. Update .github/workflows/docs.yml: pnpm + Next.js static export with DOCS_BASE_PATH=/dagron, artifact path docs/out (was docs/build). Update .pre-commit-config.yaml biome hook to run pnpm exec biome check inside the new docs/ structure. Anchor /lib/ in root .gitignore to repo root so it doesn't catch docs/src/lib/. --- .github/workflows/docs.yml | 22 +- .gitignore | 5 +- .pre-commit-config.yaml | 4 +- docs-next/.gitignore | 26 - docs-next/README.md | 51 - docs-next/biome.json | 42 - docs-next/package.json | 40 - docs-next/tsconfig.json | 35 - docs/.gitignore | 30 +- docs/README.md | 58 +- docs/biome.json | 93 +- .../content/docs/api/analysis/analysis.mdx | 0 .../content/docs/api/analysis/contracts.mdx | 0 .../content/docs/api/analysis/dataframe.mdx | 0 .../content/docs/api/analysis/meta.json | 0 .../content/docs/api/core/builder.mdx | 0 .../content/docs/api/core/core.mdx | 0 .../content/docs/api/core/errors.mdx | 0 .../content/docs/api/core/meta.json | 0 .../content/docs/api/execution/caching.mdx | 0 .../content/docs/api/execution/checkpoint.mdx | 0 .../content/docs/api/execution/conditions.mdx | 0 .../docs/api/execution/distributed.mdx | 0 .../content/docs/api/execution/dynamic.mdx | 0 .../content/docs/api/execution/execution.mdx | 0 .../content/docs/api/execution/gates.mdx | 0 .../docs/api/execution/incremental.mdx | 0 .../content/docs/api/execution/meta.json | 0 .../content/docs/api/execution/pipeline.mdx | 0 .../content/docs/api/execution/reactive.mdx | 0 .../content/docs/api/execution/resources.mdx | 0 .../content/docs/api/index.mdx | 0 .../content/docs/api/meta.json | 0 .../content/docs/api/observability/meta.json | 0 .../docs/api/observability/profiling.mdx | 0 .../docs/api/observability/tracing.mdx | 0 .../content/docs/api/utilities/compose.mdx | 0 .../content/docs/api/utilities/display.mdx | 0 .../docs/api/utilities/integration.mdx | 0 .../content/docs/api/utilities/meta.json | 0 .../content/docs/api/utilities/modern-api.mdx | 0 .../content/docs/api/utilities/plugins.mdx | 0 .../content/docs/api/utilities/template.mdx | 0 .../content/docs/api/utilities/versioning.mdx | 0 .../content/docs/guide/advanced/contracts.mdx | 0 .../docs/guide/advanced/dataframes.mdx | 0 .../content/docs/guide/advanced/meta.json | 0 .../docs/guide/advanced/plugins-hooks.mdx | 0 .../content/docs/guide/advanced/templates.mdx | 0 .../docs/guide/advanced/versioning.mdx | 0 .../content/docs/guide/architecture.mdx | 0 .../content/docs/guide/benchmarks.mdx | 0 .../content/docs/guide/cookbook.mdx | 0 .../guide/core-concepts/building-dags.mdx | 0 .../guide/core-concepts/executing-tasks.mdx | 0 .../guide/core-concepts/inspecting-graphs.mdx | 0 .../docs/guide/core-concepts/meta.json | 0 .../guide/core-concepts/serialization.mdx | 0 .../docs/guide/core-concepts/transforms.mdx | 0 .../execution-strategies/approval-gates.mdx | 0 .../guide/execution-strategies/caching.mdx | 0 .../execution-strategies/checkpointing.mdx | 0 .../execution-strategies/conditional.mdx | 0 .../execution-strategies/distributed.mdx | 0 .../execution-strategies/dynamic-dags.mdx | 0 .../execution-strategies/incremental.mdx | 0 .../docs/guide/execution-strategies/meta.json | 0 .../resource-scheduling.mdx | 0 .../content/docs/guide/getting-started.mdx | 0 .../content/docs/guide/meta.json | 0 .../guide/observability/error-handling.mdx | 0 .../docs/guide/observability/meta.json | 0 .../guide/observability/tracing-profiling.mdx | 0 .../guide/observability/visualization.mdx | 0 .../content/docs/guide/why-dagron.mdx | 0 {docs-next => docs}/content/docs/index.mdx | 0 {docs-next => docs}/content/docs/meta.json | 0 .../content/docs/typed-and-reactive/index.mdx | 0 .../content/docs/typed-and-reactive/meta.json | 0 docs/docusaurus.config.ts | 147 - {docs-next => docs}/next.config.mjs | 0 docs/package-lock.json | 19867 ---------------- docs/package.json | 68 +- docs/pages/api/analysis/analysis.mdx | 623 - docs/pages/api/analysis/contracts.mdx | 319 - docs/pages/api/analysis/dataframe.mdx | 378 - docs/pages/api/core/builder.mdx | 271 - docs/pages/api/core/core.mdx | 868 - docs/pages/api/core/errors.mdx | 278 - docs/pages/api/execution/caching.mdx | 371 - docs/pages/api/execution/checkpoint.mdx | 261 - docs/pages/api/execution/conditions.mdx | 282 - docs/pages/api/execution/distributed.mdx | 426 - docs/pages/api/execution/dynamic.mdx | 266 - docs/pages/api/execution/execution.mdx | 350 - docs/pages/api/execution/gates.mdx | 352 - docs/pages/api/execution/incremental.mdx | 260 - docs/pages/api/execution/pipeline.mdx | 334 - docs/pages/api/execution/reactive.mdx | 380 - docs/pages/api/execution/resources.mdx | 375 - docs/pages/api/observability/profiling.mdx | 273 - docs/pages/api/observability/tracing.mdx | 328 - docs/pages/api/utilities/compose.mdx | 251 - docs/pages/api/utilities/display.mdx | 255 - docs/pages/api/utilities/integration.mdx | 240 - docs/pages/api/utilities/modern-api.mdx | 199 - docs/pages/api/utilities/plugins.mdx | 507 - docs/pages/api/utilities/template.mdx | 367 - docs/pages/api/utilities/versioning.mdx | 412 - docs/pages/guide/advanced/contracts.mdx | 388 - docs/pages/guide/advanced/dataframes.mdx | 479 - docs/pages/guide/advanced/plugins-hooks.mdx | 538 - docs/pages/guide/advanced/templates.mdx | 456 - docs/pages/guide/advanced/versioning.mdx | 441 - docs/pages/guide/architecture.mdx | 159 - docs/pages/guide/benchmarks.mdx | 168 - docs/pages/guide/cookbook.mdx | 344 - .../guide/core-concepts/building-dags.mdx | 520 - .../guide/core-concepts/executing-tasks.mdx | 588 - .../guide/core-concepts/inspecting-graphs.mdx | 500 - .../guide/core-concepts/serialization.mdx | 485 - docs/pages/guide/core-concepts/transforms.mdx | 532 - .../execution-strategies/approval-gates.mdx | 466 - .../guide/execution-strategies/caching.mdx | 461 - .../execution-strategies/checkpointing.mdx | 478 - .../execution-strategies/conditional.mdx | 456 - .../execution-strategies/distributed.mdx | 561 - .../execution-strategies/dynamic-dags.mdx | 475 - .../execution-strategies/incremental.mdx | 397 - .../resource-scheduling.mdx | 484 - docs/pages/guide/getting-started.mdx | 395 - .../guide/observability/error-handling.mdx | 607 - .../guide/observability/tracing-profiling.mdx | 450 - .../guide/observability/visualization.mdx | 444 - docs/pages/guide/typed-and-reactive.mdx | 298 - docs/pages/guide/why-dagron.mdx | 53 - docs/pages/intro.mdx | 188 - {docs-next => docs}/pnpm-lock.yaml | 0 {docs-next => docs}/postcss.config.mjs | 0 {docs-next => docs}/public/.nojekyll | 0 {docs-next => docs}/public/img/favicon.ico | Bin {docs-next => docs}/public/img/logo.svg | 0 {docs-next => docs}/scripts/migrate.mjs | 0 docs/sidebars.ts | 136 - {docs-next => docs}/source.config.ts | 0 .../src/app/(docs)/[...slug]/page.tsx | 0 {docs-next => docs}/src/app/(docs)/layout.tsx | 0 .../src/app/(home)/_sections/features.tsx | 0 .../src/app/(home)/_sections/hero.tsx | 0 .../src/app/(home)/_sections/index.ts | 0 {docs-next => docs}/src/app/(home)/layout.tsx | 0 {docs-next => docs}/src/app/(home)/page.tsx | 0 .../src/app/api/search/route.ts | 0 {docs-next => docs}/src/app/global.css | 0 {docs-next => docs}/src/app/layout.tsx | 0 .../src/app/llms-full.txt/route.ts | 0 {docs-next => docs}/src/app/llms.txt/route.ts | 0 docs/src/components/ApiSignature.tsx | 23 - docs/src/components/DagDiagram.tsx | 104 - docs/src/components/FeatureCard.tsx | 31 - docs/src/components/ParamTable.tsx | 39 - docs/src/components/StatusBadge.tsx | 22 - .../src/components/api-signature.tsx | 0 .../src/components/dag-diagram.tsx | 0 .../src/components/diagram-carousel.tsx | 0 .../src/components/effect-badge.tsx | 0 .../src/components/feature-card.tsx | 0 {docs-next => docs}/src/components/mdx.tsx | 0 .../src/components/mermaid.tsx | 0 .../src/components/param-table.tsx | 0 .../src/components/provider.tsx | 0 {docs-next => docs}/src/components/search.tsx | 0 .../src/components/status-badge.tsx | 0 .../src/components/ui/button.tsx | 0 .../src/components/ui/code-panel.tsx | 0 .../src/components/ui/index.ts | 0 .../src/components/ui/section-header.tsx | 0 docs/src/css/custom.css | 162 - docs/src/lib/cn.ts | 6 + docs/src/lib/layout.shared.tsx | 38 + docs/src/lib/shared.ts | 10 + docs/src/lib/source.ts | 36 + docs/static/.nojekyll | 0 docs/static/img/favicon.ico | Bin 3626 -> 0 bytes docs/static/img/logo.svg | 1 - docs/tsconfig.json | 35 +- 186 files changed, 263 insertions(+), 41905 deletions(-) delete mode 100644 docs-next/.gitignore delete mode 100644 docs-next/README.md delete mode 100644 docs-next/biome.json delete mode 100644 docs-next/package.json delete mode 100644 docs-next/tsconfig.json rename {docs-next => docs}/content/docs/api/analysis/analysis.mdx (100%) rename {docs-next => docs}/content/docs/api/analysis/contracts.mdx (100%) rename {docs-next => docs}/content/docs/api/analysis/dataframe.mdx (100%) rename {docs-next => docs}/content/docs/api/analysis/meta.json (100%) rename {docs-next => docs}/content/docs/api/core/builder.mdx (100%) rename {docs-next => docs}/content/docs/api/core/core.mdx (100%) rename {docs-next => docs}/content/docs/api/core/errors.mdx (100%) rename {docs-next => docs}/content/docs/api/core/meta.json (100%) rename {docs-next => docs}/content/docs/api/execution/caching.mdx (100%) rename {docs-next => docs}/content/docs/api/execution/checkpoint.mdx (100%) rename {docs-next => docs}/content/docs/api/execution/conditions.mdx (100%) rename {docs-next => docs}/content/docs/api/execution/distributed.mdx (100%) rename {docs-next => docs}/content/docs/api/execution/dynamic.mdx (100%) rename {docs-next => docs}/content/docs/api/execution/execution.mdx (100%) rename {docs-next => docs}/content/docs/api/execution/gates.mdx (100%) rename {docs-next => docs}/content/docs/api/execution/incremental.mdx (100%) rename {docs-next => docs}/content/docs/api/execution/meta.json (100%) rename {docs-next => docs}/content/docs/api/execution/pipeline.mdx (100%) rename {docs-next => docs}/content/docs/api/execution/reactive.mdx (100%) rename {docs-next => docs}/content/docs/api/execution/resources.mdx (100%) rename {docs-next => docs}/content/docs/api/index.mdx (100%) rename {docs-next => docs}/content/docs/api/meta.json (100%) rename {docs-next => docs}/content/docs/api/observability/meta.json (100%) rename {docs-next => docs}/content/docs/api/observability/profiling.mdx (100%) rename {docs-next => docs}/content/docs/api/observability/tracing.mdx (100%) rename {docs-next => docs}/content/docs/api/utilities/compose.mdx (100%) rename {docs-next => docs}/content/docs/api/utilities/display.mdx (100%) rename {docs-next => docs}/content/docs/api/utilities/integration.mdx (100%) rename {docs-next => docs}/content/docs/api/utilities/meta.json (100%) rename {docs-next => docs}/content/docs/api/utilities/modern-api.mdx (100%) rename {docs-next => docs}/content/docs/api/utilities/plugins.mdx (100%) rename {docs-next => docs}/content/docs/api/utilities/template.mdx (100%) rename {docs-next => docs}/content/docs/api/utilities/versioning.mdx (100%) rename {docs-next => docs}/content/docs/guide/advanced/contracts.mdx (100%) rename {docs-next => docs}/content/docs/guide/advanced/dataframes.mdx (100%) rename {docs-next => docs}/content/docs/guide/advanced/meta.json (100%) rename {docs-next => docs}/content/docs/guide/advanced/plugins-hooks.mdx (100%) rename {docs-next => docs}/content/docs/guide/advanced/templates.mdx (100%) rename {docs-next => docs}/content/docs/guide/advanced/versioning.mdx (100%) rename {docs-next => docs}/content/docs/guide/architecture.mdx (100%) rename {docs-next => docs}/content/docs/guide/benchmarks.mdx (100%) rename {docs-next => docs}/content/docs/guide/cookbook.mdx (100%) rename {docs-next => docs}/content/docs/guide/core-concepts/building-dags.mdx (100%) rename {docs-next => docs}/content/docs/guide/core-concepts/executing-tasks.mdx (100%) rename {docs-next => docs}/content/docs/guide/core-concepts/inspecting-graphs.mdx (100%) rename {docs-next => docs}/content/docs/guide/core-concepts/meta.json (100%) rename {docs-next => docs}/content/docs/guide/core-concepts/serialization.mdx (100%) rename {docs-next => docs}/content/docs/guide/core-concepts/transforms.mdx (100%) rename {docs-next => docs}/content/docs/guide/execution-strategies/approval-gates.mdx (100%) rename {docs-next => docs}/content/docs/guide/execution-strategies/caching.mdx (100%) rename {docs-next => docs}/content/docs/guide/execution-strategies/checkpointing.mdx (100%) rename {docs-next => docs}/content/docs/guide/execution-strategies/conditional.mdx (100%) rename {docs-next => docs}/content/docs/guide/execution-strategies/distributed.mdx (100%) rename {docs-next => docs}/content/docs/guide/execution-strategies/dynamic-dags.mdx (100%) rename {docs-next => docs}/content/docs/guide/execution-strategies/incremental.mdx (100%) rename {docs-next => docs}/content/docs/guide/execution-strategies/meta.json (100%) rename {docs-next => docs}/content/docs/guide/execution-strategies/resource-scheduling.mdx (100%) rename {docs-next => docs}/content/docs/guide/getting-started.mdx (100%) rename {docs-next => docs}/content/docs/guide/meta.json (100%) rename {docs-next => docs}/content/docs/guide/observability/error-handling.mdx (100%) rename {docs-next => docs}/content/docs/guide/observability/meta.json (100%) rename {docs-next => docs}/content/docs/guide/observability/tracing-profiling.mdx (100%) rename {docs-next => docs}/content/docs/guide/observability/visualization.mdx (100%) rename {docs-next => docs}/content/docs/guide/why-dagron.mdx (100%) rename {docs-next => docs}/content/docs/index.mdx (100%) rename {docs-next => docs}/content/docs/meta.json (100%) rename {docs-next => docs}/content/docs/typed-and-reactive/index.mdx (100%) rename {docs-next => docs}/content/docs/typed-and-reactive/meta.json (100%) delete mode 100644 docs/docusaurus.config.ts rename {docs-next => docs}/next.config.mjs (100%) delete mode 100644 docs/package-lock.json delete mode 100644 docs/pages/api/analysis/analysis.mdx delete mode 100644 docs/pages/api/analysis/contracts.mdx delete mode 100644 docs/pages/api/analysis/dataframe.mdx delete mode 100644 docs/pages/api/core/builder.mdx delete mode 100644 docs/pages/api/core/core.mdx delete mode 100644 docs/pages/api/core/errors.mdx delete mode 100644 docs/pages/api/execution/caching.mdx delete mode 100644 docs/pages/api/execution/checkpoint.mdx delete mode 100644 docs/pages/api/execution/conditions.mdx delete mode 100644 docs/pages/api/execution/distributed.mdx delete mode 100644 docs/pages/api/execution/dynamic.mdx delete mode 100644 docs/pages/api/execution/execution.mdx delete mode 100644 docs/pages/api/execution/gates.mdx delete mode 100644 docs/pages/api/execution/incremental.mdx delete mode 100644 docs/pages/api/execution/pipeline.mdx delete mode 100644 docs/pages/api/execution/reactive.mdx delete mode 100644 docs/pages/api/execution/resources.mdx delete mode 100644 docs/pages/api/observability/profiling.mdx delete mode 100644 docs/pages/api/observability/tracing.mdx delete mode 100644 docs/pages/api/utilities/compose.mdx delete mode 100644 docs/pages/api/utilities/display.mdx delete mode 100644 docs/pages/api/utilities/integration.mdx delete mode 100644 docs/pages/api/utilities/modern-api.mdx delete mode 100644 docs/pages/api/utilities/plugins.mdx delete mode 100644 docs/pages/api/utilities/template.mdx delete mode 100644 docs/pages/api/utilities/versioning.mdx delete mode 100644 docs/pages/guide/advanced/contracts.mdx delete mode 100644 docs/pages/guide/advanced/dataframes.mdx delete mode 100644 docs/pages/guide/advanced/plugins-hooks.mdx delete mode 100644 docs/pages/guide/advanced/templates.mdx delete mode 100644 docs/pages/guide/advanced/versioning.mdx delete mode 100644 docs/pages/guide/architecture.mdx delete mode 100644 docs/pages/guide/benchmarks.mdx delete mode 100644 docs/pages/guide/cookbook.mdx delete mode 100644 docs/pages/guide/core-concepts/building-dags.mdx delete mode 100644 docs/pages/guide/core-concepts/executing-tasks.mdx delete mode 100644 docs/pages/guide/core-concepts/inspecting-graphs.mdx delete mode 100644 docs/pages/guide/core-concepts/serialization.mdx delete mode 100644 docs/pages/guide/core-concepts/transforms.mdx delete mode 100644 docs/pages/guide/execution-strategies/approval-gates.mdx delete mode 100644 docs/pages/guide/execution-strategies/caching.mdx delete mode 100644 docs/pages/guide/execution-strategies/checkpointing.mdx delete mode 100644 docs/pages/guide/execution-strategies/conditional.mdx delete mode 100644 docs/pages/guide/execution-strategies/distributed.mdx delete mode 100644 docs/pages/guide/execution-strategies/dynamic-dags.mdx delete mode 100644 docs/pages/guide/execution-strategies/incremental.mdx delete mode 100644 docs/pages/guide/execution-strategies/resource-scheduling.mdx delete mode 100644 docs/pages/guide/getting-started.mdx delete mode 100644 docs/pages/guide/observability/error-handling.mdx delete mode 100644 docs/pages/guide/observability/tracing-profiling.mdx delete mode 100644 docs/pages/guide/observability/visualization.mdx delete mode 100644 docs/pages/guide/typed-and-reactive.mdx delete mode 100644 docs/pages/guide/why-dagron.mdx delete mode 100644 docs/pages/intro.mdx rename {docs-next => docs}/pnpm-lock.yaml (100%) rename {docs-next => docs}/postcss.config.mjs (100%) rename {docs-next => docs}/public/.nojekyll (100%) rename {docs-next => docs}/public/img/favicon.ico (100%) rename {docs-next => docs}/public/img/logo.svg (100%) rename {docs-next => docs}/scripts/migrate.mjs (100%) delete mode 100644 docs/sidebars.ts rename {docs-next => docs}/source.config.ts (100%) rename {docs-next => docs}/src/app/(docs)/[...slug]/page.tsx (100%) rename {docs-next => docs}/src/app/(docs)/layout.tsx (100%) rename {docs-next => docs}/src/app/(home)/_sections/features.tsx (100%) rename {docs-next => docs}/src/app/(home)/_sections/hero.tsx (100%) rename {docs-next => docs}/src/app/(home)/_sections/index.ts (100%) rename {docs-next => docs}/src/app/(home)/layout.tsx (100%) rename {docs-next => docs}/src/app/(home)/page.tsx (100%) rename {docs-next => docs}/src/app/api/search/route.ts (100%) rename {docs-next => docs}/src/app/global.css (100%) rename {docs-next => docs}/src/app/layout.tsx (100%) rename {docs-next => docs}/src/app/llms-full.txt/route.ts (100%) rename {docs-next => docs}/src/app/llms.txt/route.ts (100%) delete mode 100644 docs/src/components/ApiSignature.tsx delete mode 100644 docs/src/components/DagDiagram.tsx delete mode 100644 docs/src/components/FeatureCard.tsx delete mode 100644 docs/src/components/ParamTable.tsx delete mode 100644 docs/src/components/StatusBadge.tsx rename {docs-next => docs}/src/components/api-signature.tsx (100%) rename {docs-next => docs}/src/components/dag-diagram.tsx (100%) rename {docs-next => docs}/src/components/diagram-carousel.tsx (100%) rename {docs-next => docs}/src/components/effect-badge.tsx (100%) rename {docs-next => docs}/src/components/feature-card.tsx (100%) rename {docs-next => docs}/src/components/mdx.tsx (100%) rename {docs-next => docs}/src/components/mermaid.tsx (100%) rename {docs-next => docs}/src/components/param-table.tsx (100%) rename {docs-next => docs}/src/components/provider.tsx (100%) rename {docs-next => docs}/src/components/search.tsx (100%) rename {docs-next => docs}/src/components/status-badge.tsx (100%) rename {docs-next => docs}/src/components/ui/button.tsx (100%) rename {docs-next => docs}/src/components/ui/code-panel.tsx (100%) rename {docs-next => docs}/src/components/ui/index.ts (100%) rename {docs-next => docs}/src/components/ui/section-header.tsx (100%) delete mode 100644 docs/src/css/custom.css create mode 100644 docs/src/lib/cn.ts create mode 100644 docs/src/lib/layout.shared.tsx create mode 100644 docs/src/lib/shared.ts create mode 100644 docs/src/lib/source.ts delete mode 100644 docs/static/.nojekyll delete mode 100644 docs/static/img/favicon.ico delete mode 100644 docs/static/img/logo.svg diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 630563b..18d274e 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -20,26 +20,36 @@ concurrency: jobs: build: runs-on: ubuntu-latest + defaults: + run: + working-directory: docs steps: - uses: actions/checkout@v6 + - name: Set up pnpm + uses: pnpm/action-setup@v4 + with: + version: 10 + - name: Set up Node.js uses: actions/setup-node@v6 with: node-version: "22" - cache: npm - cache-dependency-path: docs/package-lock.json + cache: pnpm + cache-dependency-path: docs/pnpm-lock.yaml - name: Install dependencies - run: cd docs && npm ci + run: pnpm install --frozen-lockfile - - name: Build Docusaurus - run: cd docs && npm run build + - name: Build Fumadocs site + env: + DOCS_BASE_PATH: /dagron + run: pnpm build - name: Upload Pages artifact uses: actions/upload-pages-artifact@v3 with: - path: docs/build + path: docs/out deploy: needs: build diff --git a/.gitignore b/.gitignore index 45b6de5..7845353 100644 --- a/.gitignore +++ b/.gitignore @@ -162,8 +162,9 @@ dist/ downloads/ eggs/ .eggs/ -lib/ -lib64/ +# Anchor `lib/` to repo root so it doesn't catch e.g. docs/src/lib/. +/lib/ +/lib64/ parts/ sdist/ var/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 144af30..258e386 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -31,7 +31,7 @@ repos: pass_filenames: false - id: biome name: biome - entry: bash -c 'cd docs && npx biome ci src/ docusaurus.config.ts sidebars.ts' + entry: bash -c 'cd docs && pnpm exec biome check' language: system - files: ^docs/src/|^docs/docusaurus\.config\.ts|^docs/sidebars\.ts + files: ^docs/(src/|content/|source\.config\.ts|next\.config\.mjs|biome\.json) pass_filenames: false diff --git a/docs-next/.gitignore b/docs-next/.gitignore deleted file mode 100644 index 9e429e4..0000000 --- a/docs-next/.gitignore +++ /dev/null @@ -1,26 +0,0 @@ -# deps -/node_modules - -# generated content -.source - -# test & build -/coverage -/.next/ -/out/ -/build -*.tsbuildinfo - -# misc -.DS_Store -*.pem -/.pnp -.pnp.js -npm-debug.log* -yarn-debug.log* -yarn-error.log* - -# others -.env*.local -.vercel -next-env.d.ts \ No newline at end of file diff --git a/docs-next/README.md b/docs-next/README.md deleted file mode 100644 index cc081d2..0000000 --- a/docs-next/README.md +++ /dev/null @@ -1,51 +0,0 @@ -# dagron docs (Fumadocs) - -Side-by-side replacement for `../docs/` (Docusaurus). Once verified, this -directory will be swapped in as `docs/`. - -## Develop - -```bash -pnpm install -pnpm dev # http://localhost:3000 -``` - -## Build for GitHub Pages - -```bash -DOCS_BASE_PATH=/dagron pnpm build -npx serve out/ # then visit http://localhost:3000/dagron/ -``` - -Local builds without `DOCS_BASE_PATH` serve cleanly from `/`. - -## Lint & types - -```bash -pnpm lint # biome -pnpm types:check # fumadocs-mdx + next typegen + tsc --noEmit -``` - -## Layout - -``` -src/ -├── app/ # Next.js App Router -│ ├── (home)/ # marketing landing -│ ├── (docs)/ # docs sidebar + page renderer -│ └── api/ # Orama search endpoint, llms.txt routes -├── components/ -│ ├── ui/ # generic primitives (Button, CodePanel, SectionHeader) -│ ├── mdx.tsx # global MDX component map -│ ├── mermaid.tsx # client-side mermaid with theme awareness -│ └── ... # dagron-specific (DagDiagram, StatusBadge, FeatureCard, …) -└── lib/ - ├── source.ts # Fumadocs source loader - ├── shared.ts # appName, gitConfig, route constants - └── layout.shared.tsx # nav + sidebar config - -content/docs/ # 54 MDX files, organised by guide/ + api/ -``` - -Components are registered globally in `src/components/mdx.tsx`, so MDX -authors don't need to write `import` lines. diff --git a/docs-next/biome.json b/docs-next/biome.json deleted file mode 100644 index 87cba3b..0000000 --- a/docs-next/biome.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "$schema": "https://biomejs.dev/schemas/2.4.14/schema.json", - "vcs": { - "enabled": true, - "clientKind": "git", - "useIgnoreFile": true - }, - "files": { - "ignoreUnknown": true, - "includes": [ - "**", - "!node_modules", - "!.next", - "!dist", - "!build", - "!.source", - "!src/app/global.css" - ] - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 2 - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true - }, - "domains": { - "next": "recommended", - "react": "recommended" - } - }, - "assist": { - "actions": { - "source": { - "organizeImports": "on" - } - } - } -} diff --git a/docs-next/package.json b/docs-next/package.json deleted file mode 100644 index 2ab6dd3..0000000 --- a/docs-next/package.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "name": "docs", - "version": "0.0.0", - "private": true, - "scripts": { - "build": "next build", - "dev": "next dev", - "start": "serve out", - "types:check": "fumadocs-mdx && next typegen && tsc --noEmit", - "postinstall": "fumadocs-mdx", - "lint": "biome check", - "format": "biome format --write" - }, - "dependencies": { - "@orama/orama": "^3.1.18", - "clsx": "^2.1.1", - "fumadocs-core": "16.8.5", - "fumadocs-mdx": "14.3.2", - "fumadocs-ui": "16.8.5", - "lucide-react": "^1.14.0", - "mermaid": "^11.14.0", - "next": "16.2.4", - "next-themes": "^0.4.6", - "react": "^19.2.5", - "react-dom": "^19.2.5", - "tailwind-merge": "^3.5.0" - }, - "devDependencies": { - "@biomejs/biome": "^2.4.14", - "@tailwindcss/postcss": "^4.2.4", - "@types/mdx": "^2.0.13", - "@types/node": "^25.6.0", - "@types/react": "^19.2.14", - "@types/react-dom": "^19.2.3", - "postcss": "^8.5.13", - "serve": "^14.2.6", - "tailwindcss": "^4.2.4", - "typescript": "^6.0.3" - } -} diff --git a/docs-next/tsconfig.json b/docs-next/tsconfig.json deleted file mode 100644 index f43f873..0000000 --- a/docs-next/tsconfig.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "compilerOptions": { - "target": "ESNext", - "lib": ["dom", "dom.iterable", "esnext"], - "allowJs": true, - "skipLibCheck": true, - "strict": true, - "forceConsistentCasingInFileNames": true, - "noEmit": true, - "esModuleInterop": true, - "module": "esnext", - "moduleResolution": "bundler", - "resolveJsonModule": true, - "isolatedModules": true, - "jsx": "react-jsx", - "incremental": true, - "paths": { - "@/*": ["./src/*"], - "collections/*": ["./.source/*"] - }, - "plugins": [ - { - "name": "next" - } - ] - }, - "include": [ - "next-env.d.ts", - "**/*.ts", - "**/*.tsx", - ".next/types/**/*.ts", - ".next/dev/types/**/*.ts" - ], - "exclude": ["node_modules"] -} diff --git a/docs/.gitignore b/docs/.gitignore index b2d6de3..9e429e4 100644 --- a/docs/.gitignore +++ b/docs/.gitignore @@ -1,20 +1,26 @@ -# Dependencies +# deps /node_modules -# Production -/build +# generated content +.source -# Generated files -.docusaurus -.cache-loader +# test & build +/coverage +/.next/ +/out/ +/build +*.tsbuildinfo -# Misc +# misc .DS_Store -.env.local -.env.development.local -.env.test.local -.env.production.local - +*.pem +/.pnp +.pnp.js npm-debug.log* yarn-debug.log* yarn-error.log* + +# others +.env*.local +.vercel +next-env.d.ts \ No newline at end of file diff --git a/docs/README.md b/docs/README.md index 0c6c2c2..cc081d2 100644 --- a/docs/README.md +++ b/docs/README.md @@ -1,41 +1,51 @@ -# Website +# dagron docs (Fumadocs) -This website is built using [Docusaurus](https://docusaurus.io/), a modern static website generator. +Side-by-side replacement for `../docs/` (Docusaurus). Once verified, this +directory will be swapped in as `docs/`. -### Installation +## Develop -``` -$ yarn +```bash +pnpm install +pnpm dev # http://localhost:3000 ``` -### Local Development +## Build for GitHub Pages -``` -$ yarn start +```bash +DOCS_BASE_PATH=/dagron pnpm build +npx serve out/ # then visit http://localhost:3000/dagron/ ``` -This command starts a local development server and opens up a browser window. Most changes are reflected live without having to restart the server. +Local builds without `DOCS_BASE_PATH` serve cleanly from `/`. -### Build +## Lint & types +```bash +pnpm lint # biome +pnpm types:check # fumadocs-mdx + next typegen + tsc --noEmit ``` -$ yarn build -``` - -This command generates static content into the `build` directory and can be served using any static contents hosting service. -### Deployment +## Layout -Using SSH: - -``` -$ USE_SSH=true yarn deploy ``` +src/ +├── app/ # Next.js App Router +│ ├── (home)/ # marketing landing +│ ├── (docs)/ # docs sidebar + page renderer +│ └── api/ # Orama search endpoint, llms.txt routes +├── components/ +│ ├── ui/ # generic primitives (Button, CodePanel, SectionHeader) +│ ├── mdx.tsx # global MDX component map +│ ├── mermaid.tsx # client-side mermaid with theme awareness +│ └── ... # dagron-specific (DagDiagram, StatusBadge, FeatureCard, …) +└── lib/ + ├── source.ts # Fumadocs source loader + ├── shared.ts # appName, gitConfig, route constants + └── layout.shared.tsx # nav + sidebar config -Not using SSH: - -``` -$ GIT_USER= yarn deploy +content/docs/ # 54 MDX files, organised by guide/ + api/ ``` -If you are using GitHub pages for hosting, this command is a convenient way to build the website and push to the `gh-pages` branch. +Components are registered globally in `src/components/mdx.tsx`, so MDX +authors don't need to write `import` lines. diff --git a/docs/biome.json b/docs/biome.json index 0817143..87cba3b 100644 --- a/docs/biome.json +++ b/docs/biome.json @@ -1,55 +1,42 @@ { - "$schema": "https://biomejs.dev/schemas/2.4.9/schema.json", - "vcs": { - "enabled": true, - "clientKind": "git", - "useIgnoreFile": true - }, - "files": { - "ignoreUnknown": true, - "includes": ["src/**", "docusaurus.config.ts", "sidebars.ts"] - }, - "formatter": { - "enabled": true, - "indentStyle": "space", - "indentWidth": 2 - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "correctness": { - "noUnusedImports": "warn" - }, - "suspicious": { - "noExplicitAny": "off", - "noArrayIndexKey": "off" - }, - "a11y": { - "useButtonType": "warn", - "noStaticElementInteractions": "off", - "noLabelWithoutControl": "off" - }, - "complexity": { - "noImportantStyles": "off" - }, - "style": { - "noNonNullAssertion": "off" - } - } - }, - "javascript": { - "formatter": { - "quoteStyle": "single", - "trailingCommas": "all" - } - }, - "assist": { - "enabled": true, - "actions": { - "source": { - "organizeImports": "on" - } - } - } + "$schema": "https://biomejs.dev/schemas/2.4.14/schema.json", + "vcs": { + "enabled": true, + "clientKind": "git", + "useIgnoreFile": true + }, + "files": { + "ignoreUnknown": true, + "includes": [ + "**", + "!node_modules", + "!.next", + "!dist", + "!build", + "!.source", + "!src/app/global.css" + ] + }, + "formatter": { + "enabled": true, + "indentStyle": "space", + "indentWidth": 2 + }, + "linter": { + "enabled": true, + "rules": { + "recommended": true + }, + "domains": { + "next": "recommended", + "react": "recommended" + } + }, + "assist": { + "actions": { + "source": { + "organizeImports": "on" + } + } + } } diff --git a/docs-next/content/docs/api/analysis/analysis.mdx b/docs/content/docs/api/analysis/analysis.mdx similarity index 100% rename from docs-next/content/docs/api/analysis/analysis.mdx rename to docs/content/docs/api/analysis/analysis.mdx diff --git a/docs-next/content/docs/api/analysis/contracts.mdx b/docs/content/docs/api/analysis/contracts.mdx similarity index 100% rename from docs-next/content/docs/api/analysis/contracts.mdx rename to docs/content/docs/api/analysis/contracts.mdx diff --git a/docs-next/content/docs/api/analysis/dataframe.mdx b/docs/content/docs/api/analysis/dataframe.mdx similarity index 100% rename from docs-next/content/docs/api/analysis/dataframe.mdx rename to docs/content/docs/api/analysis/dataframe.mdx diff --git a/docs-next/content/docs/api/analysis/meta.json b/docs/content/docs/api/analysis/meta.json similarity index 100% rename from docs-next/content/docs/api/analysis/meta.json rename to docs/content/docs/api/analysis/meta.json diff --git a/docs-next/content/docs/api/core/builder.mdx b/docs/content/docs/api/core/builder.mdx similarity index 100% rename from docs-next/content/docs/api/core/builder.mdx rename to docs/content/docs/api/core/builder.mdx diff --git a/docs-next/content/docs/api/core/core.mdx b/docs/content/docs/api/core/core.mdx similarity index 100% rename from docs-next/content/docs/api/core/core.mdx rename to docs/content/docs/api/core/core.mdx diff --git a/docs-next/content/docs/api/core/errors.mdx b/docs/content/docs/api/core/errors.mdx similarity index 100% rename from docs-next/content/docs/api/core/errors.mdx rename to docs/content/docs/api/core/errors.mdx diff --git a/docs-next/content/docs/api/core/meta.json b/docs/content/docs/api/core/meta.json similarity index 100% rename from docs-next/content/docs/api/core/meta.json rename to docs/content/docs/api/core/meta.json diff --git a/docs-next/content/docs/api/execution/caching.mdx b/docs/content/docs/api/execution/caching.mdx similarity index 100% rename from docs-next/content/docs/api/execution/caching.mdx rename to docs/content/docs/api/execution/caching.mdx diff --git a/docs-next/content/docs/api/execution/checkpoint.mdx b/docs/content/docs/api/execution/checkpoint.mdx similarity index 100% rename from docs-next/content/docs/api/execution/checkpoint.mdx rename to docs/content/docs/api/execution/checkpoint.mdx diff --git a/docs-next/content/docs/api/execution/conditions.mdx b/docs/content/docs/api/execution/conditions.mdx similarity index 100% rename from docs-next/content/docs/api/execution/conditions.mdx rename to docs/content/docs/api/execution/conditions.mdx diff --git a/docs-next/content/docs/api/execution/distributed.mdx b/docs/content/docs/api/execution/distributed.mdx similarity index 100% rename from docs-next/content/docs/api/execution/distributed.mdx rename to docs/content/docs/api/execution/distributed.mdx diff --git a/docs-next/content/docs/api/execution/dynamic.mdx b/docs/content/docs/api/execution/dynamic.mdx similarity index 100% rename from docs-next/content/docs/api/execution/dynamic.mdx rename to docs/content/docs/api/execution/dynamic.mdx diff --git a/docs-next/content/docs/api/execution/execution.mdx b/docs/content/docs/api/execution/execution.mdx similarity index 100% rename from docs-next/content/docs/api/execution/execution.mdx rename to docs/content/docs/api/execution/execution.mdx diff --git a/docs-next/content/docs/api/execution/gates.mdx b/docs/content/docs/api/execution/gates.mdx similarity index 100% rename from docs-next/content/docs/api/execution/gates.mdx rename to docs/content/docs/api/execution/gates.mdx diff --git a/docs-next/content/docs/api/execution/incremental.mdx b/docs/content/docs/api/execution/incremental.mdx similarity index 100% rename from docs-next/content/docs/api/execution/incremental.mdx rename to docs/content/docs/api/execution/incremental.mdx diff --git a/docs-next/content/docs/api/execution/meta.json b/docs/content/docs/api/execution/meta.json similarity index 100% rename from docs-next/content/docs/api/execution/meta.json rename to docs/content/docs/api/execution/meta.json diff --git a/docs-next/content/docs/api/execution/pipeline.mdx b/docs/content/docs/api/execution/pipeline.mdx similarity index 100% rename from docs-next/content/docs/api/execution/pipeline.mdx rename to docs/content/docs/api/execution/pipeline.mdx diff --git a/docs-next/content/docs/api/execution/reactive.mdx b/docs/content/docs/api/execution/reactive.mdx similarity index 100% rename from docs-next/content/docs/api/execution/reactive.mdx rename to docs/content/docs/api/execution/reactive.mdx diff --git a/docs-next/content/docs/api/execution/resources.mdx b/docs/content/docs/api/execution/resources.mdx similarity index 100% rename from docs-next/content/docs/api/execution/resources.mdx rename to docs/content/docs/api/execution/resources.mdx diff --git a/docs-next/content/docs/api/index.mdx b/docs/content/docs/api/index.mdx similarity index 100% rename from docs-next/content/docs/api/index.mdx rename to docs/content/docs/api/index.mdx diff --git a/docs-next/content/docs/api/meta.json b/docs/content/docs/api/meta.json similarity index 100% rename from docs-next/content/docs/api/meta.json rename to docs/content/docs/api/meta.json diff --git a/docs-next/content/docs/api/observability/meta.json b/docs/content/docs/api/observability/meta.json similarity index 100% rename from docs-next/content/docs/api/observability/meta.json rename to docs/content/docs/api/observability/meta.json diff --git a/docs-next/content/docs/api/observability/profiling.mdx b/docs/content/docs/api/observability/profiling.mdx similarity index 100% rename from docs-next/content/docs/api/observability/profiling.mdx rename to docs/content/docs/api/observability/profiling.mdx diff --git a/docs-next/content/docs/api/observability/tracing.mdx b/docs/content/docs/api/observability/tracing.mdx similarity index 100% rename from docs-next/content/docs/api/observability/tracing.mdx rename to docs/content/docs/api/observability/tracing.mdx diff --git a/docs-next/content/docs/api/utilities/compose.mdx b/docs/content/docs/api/utilities/compose.mdx similarity index 100% rename from docs-next/content/docs/api/utilities/compose.mdx rename to docs/content/docs/api/utilities/compose.mdx diff --git a/docs-next/content/docs/api/utilities/display.mdx b/docs/content/docs/api/utilities/display.mdx similarity index 100% rename from docs-next/content/docs/api/utilities/display.mdx rename to docs/content/docs/api/utilities/display.mdx diff --git a/docs-next/content/docs/api/utilities/integration.mdx b/docs/content/docs/api/utilities/integration.mdx similarity index 100% rename from docs-next/content/docs/api/utilities/integration.mdx rename to docs/content/docs/api/utilities/integration.mdx diff --git a/docs-next/content/docs/api/utilities/meta.json b/docs/content/docs/api/utilities/meta.json similarity index 100% rename from docs-next/content/docs/api/utilities/meta.json rename to docs/content/docs/api/utilities/meta.json diff --git a/docs-next/content/docs/api/utilities/modern-api.mdx b/docs/content/docs/api/utilities/modern-api.mdx similarity index 100% rename from docs-next/content/docs/api/utilities/modern-api.mdx rename to docs/content/docs/api/utilities/modern-api.mdx diff --git a/docs-next/content/docs/api/utilities/plugins.mdx b/docs/content/docs/api/utilities/plugins.mdx similarity index 100% rename from docs-next/content/docs/api/utilities/plugins.mdx rename to docs/content/docs/api/utilities/plugins.mdx diff --git a/docs-next/content/docs/api/utilities/template.mdx b/docs/content/docs/api/utilities/template.mdx similarity index 100% rename from docs-next/content/docs/api/utilities/template.mdx rename to docs/content/docs/api/utilities/template.mdx diff --git a/docs-next/content/docs/api/utilities/versioning.mdx b/docs/content/docs/api/utilities/versioning.mdx similarity index 100% rename from docs-next/content/docs/api/utilities/versioning.mdx rename to docs/content/docs/api/utilities/versioning.mdx diff --git a/docs-next/content/docs/guide/advanced/contracts.mdx b/docs/content/docs/guide/advanced/contracts.mdx similarity index 100% rename from docs-next/content/docs/guide/advanced/contracts.mdx rename to docs/content/docs/guide/advanced/contracts.mdx diff --git a/docs-next/content/docs/guide/advanced/dataframes.mdx b/docs/content/docs/guide/advanced/dataframes.mdx similarity index 100% rename from docs-next/content/docs/guide/advanced/dataframes.mdx rename to docs/content/docs/guide/advanced/dataframes.mdx diff --git a/docs-next/content/docs/guide/advanced/meta.json b/docs/content/docs/guide/advanced/meta.json similarity index 100% rename from docs-next/content/docs/guide/advanced/meta.json rename to docs/content/docs/guide/advanced/meta.json diff --git a/docs-next/content/docs/guide/advanced/plugins-hooks.mdx b/docs/content/docs/guide/advanced/plugins-hooks.mdx similarity index 100% rename from docs-next/content/docs/guide/advanced/plugins-hooks.mdx rename to docs/content/docs/guide/advanced/plugins-hooks.mdx diff --git a/docs-next/content/docs/guide/advanced/templates.mdx b/docs/content/docs/guide/advanced/templates.mdx similarity index 100% rename from docs-next/content/docs/guide/advanced/templates.mdx rename to docs/content/docs/guide/advanced/templates.mdx diff --git a/docs-next/content/docs/guide/advanced/versioning.mdx b/docs/content/docs/guide/advanced/versioning.mdx similarity index 100% rename from docs-next/content/docs/guide/advanced/versioning.mdx rename to docs/content/docs/guide/advanced/versioning.mdx diff --git a/docs-next/content/docs/guide/architecture.mdx b/docs/content/docs/guide/architecture.mdx similarity index 100% rename from docs-next/content/docs/guide/architecture.mdx rename to docs/content/docs/guide/architecture.mdx diff --git a/docs-next/content/docs/guide/benchmarks.mdx b/docs/content/docs/guide/benchmarks.mdx similarity index 100% rename from docs-next/content/docs/guide/benchmarks.mdx rename to docs/content/docs/guide/benchmarks.mdx diff --git a/docs-next/content/docs/guide/cookbook.mdx b/docs/content/docs/guide/cookbook.mdx similarity index 100% rename from docs-next/content/docs/guide/cookbook.mdx rename to docs/content/docs/guide/cookbook.mdx diff --git a/docs-next/content/docs/guide/core-concepts/building-dags.mdx b/docs/content/docs/guide/core-concepts/building-dags.mdx similarity index 100% rename from docs-next/content/docs/guide/core-concepts/building-dags.mdx rename to docs/content/docs/guide/core-concepts/building-dags.mdx diff --git a/docs-next/content/docs/guide/core-concepts/executing-tasks.mdx b/docs/content/docs/guide/core-concepts/executing-tasks.mdx similarity index 100% rename from docs-next/content/docs/guide/core-concepts/executing-tasks.mdx rename to docs/content/docs/guide/core-concepts/executing-tasks.mdx diff --git a/docs-next/content/docs/guide/core-concepts/inspecting-graphs.mdx b/docs/content/docs/guide/core-concepts/inspecting-graphs.mdx similarity index 100% rename from docs-next/content/docs/guide/core-concepts/inspecting-graphs.mdx rename to docs/content/docs/guide/core-concepts/inspecting-graphs.mdx diff --git a/docs-next/content/docs/guide/core-concepts/meta.json b/docs/content/docs/guide/core-concepts/meta.json similarity index 100% rename from docs-next/content/docs/guide/core-concepts/meta.json rename to docs/content/docs/guide/core-concepts/meta.json diff --git a/docs-next/content/docs/guide/core-concepts/serialization.mdx b/docs/content/docs/guide/core-concepts/serialization.mdx similarity index 100% rename from docs-next/content/docs/guide/core-concepts/serialization.mdx rename to docs/content/docs/guide/core-concepts/serialization.mdx diff --git a/docs-next/content/docs/guide/core-concepts/transforms.mdx b/docs/content/docs/guide/core-concepts/transforms.mdx similarity index 100% rename from docs-next/content/docs/guide/core-concepts/transforms.mdx rename to docs/content/docs/guide/core-concepts/transforms.mdx diff --git a/docs-next/content/docs/guide/execution-strategies/approval-gates.mdx b/docs/content/docs/guide/execution-strategies/approval-gates.mdx similarity index 100% rename from docs-next/content/docs/guide/execution-strategies/approval-gates.mdx rename to docs/content/docs/guide/execution-strategies/approval-gates.mdx diff --git a/docs-next/content/docs/guide/execution-strategies/caching.mdx b/docs/content/docs/guide/execution-strategies/caching.mdx similarity index 100% rename from docs-next/content/docs/guide/execution-strategies/caching.mdx rename to docs/content/docs/guide/execution-strategies/caching.mdx diff --git a/docs-next/content/docs/guide/execution-strategies/checkpointing.mdx b/docs/content/docs/guide/execution-strategies/checkpointing.mdx similarity index 100% rename from docs-next/content/docs/guide/execution-strategies/checkpointing.mdx rename to docs/content/docs/guide/execution-strategies/checkpointing.mdx diff --git a/docs-next/content/docs/guide/execution-strategies/conditional.mdx b/docs/content/docs/guide/execution-strategies/conditional.mdx similarity index 100% rename from docs-next/content/docs/guide/execution-strategies/conditional.mdx rename to docs/content/docs/guide/execution-strategies/conditional.mdx diff --git a/docs-next/content/docs/guide/execution-strategies/distributed.mdx b/docs/content/docs/guide/execution-strategies/distributed.mdx similarity index 100% rename from docs-next/content/docs/guide/execution-strategies/distributed.mdx rename to docs/content/docs/guide/execution-strategies/distributed.mdx diff --git a/docs-next/content/docs/guide/execution-strategies/dynamic-dags.mdx b/docs/content/docs/guide/execution-strategies/dynamic-dags.mdx similarity index 100% rename from docs-next/content/docs/guide/execution-strategies/dynamic-dags.mdx rename to docs/content/docs/guide/execution-strategies/dynamic-dags.mdx diff --git a/docs-next/content/docs/guide/execution-strategies/incremental.mdx b/docs/content/docs/guide/execution-strategies/incremental.mdx similarity index 100% rename from docs-next/content/docs/guide/execution-strategies/incremental.mdx rename to docs/content/docs/guide/execution-strategies/incremental.mdx diff --git a/docs-next/content/docs/guide/execution-strategies/meta.json b/docs/content/docs/guide/execution-strategies/meta.json similarity index 100% rename from docs-next/content/docs/guide/execution-strategies/meta.json rename to docs/content/docs/guide/execution-strategies/meta.json diff --git a/docs-next/content/docs/guide/execution-strategies/resource-scheduling.mdx b/docs/content/docs/guide/execution-strategies/resource-scheduling.mdx similarity index 100% rename from docs-next/content/docs/guide/execution-strategies/resource-scheduling.mdx rename to docs/content/docs/guide/execution-strategies/resource-scheduling.mdx diff --git a/docs-next/content/docs/guide/getting-started.mdx b/docs/content/docs/guide/getting-started.mdx similarity index 100% rename from docs-next/content/docs/guide/getting-started.mdx rename to docs/content/docs/guide/getting-started.mdx diff --git a/docs-next/content/docs/guide/meta.json b/docs/content/docs/guide/meta.json similarity index 100% rename from docs-next/content/docs/guide/meta.json rename to docs/content/docs/guide/meta.json diff --git a/docs-next/content/docs/guide/observability/error-handling.mdx b/docs/content/docs/guide/observability/error-handling.mdx similarity index 100% rename from docs-next/content/docs/guide/observability/error-handling.mdx rename to docs/content/docs/guide/observability/error-handling.mdx diff --git a/docs-next/content/docs/guide/observability/meta.json b/docs/content/docs/guide/observability/meta.json similarity index 100% rename from docs-next/content/docs/guide/observability/meta.json rename to docs/content/docs/guide/observability/meta.json diff --git a/docs-next/content/docs/guide/observability/tracing-profiling.mdx b/docs/content/docs/guide/observability/tracing-profiling.mdx similarity index 100% rename from docs-next/content/docs/guide/observability/tracing-profiling.mdx rename to docs/content/docs/guide/observability/tracing-profiling.mdx diff --git a/docs-next/content/docs/guide/observability/visualization.mdx b/docs/content/docs/guide/observability/visualization.mdx similarity index 100% rename from docs-next/content/docs/guide/observability/visualization.mdx rename to docs/content/docs/guide/observability/visualization.mdx diff --git a/docs-next/content/docs/guide/why-dagron.mdx b/docs/content/docs/guide/why-dagron.mdx similarity index 100% rename from docs-next/content/docs/guide/why-dagron.mdx rename to docs/content/docs/guide/why-dagron.mdx diff --git a/docs-next/content/docs/index.mdx b/docs/content/docs/index.mdx similarity index 100% rename from docs-next/content/docs/index.mdx rename to docs/content/docs/index.mdx diff --git a/docs-next/content/docs/meta.json b/docs/content/docs/meta.json similarity index 100% rename from docs-next/content/docs/meta.json rename to docs/content/docs/meta.json diff --git a/docs-next/content/docs/typed-and-reactive/index.mdx b/docs/content/docs/typed-and-reactive/index.mdx similarity index 100% rename from docs-next/content/docs/typed-and-reactive/index.mdx rename to docs/content/docs/typed-and-reactive/index.mdx diff --git a/docs-next/content/docs/typed-and-reactive/meta.json b/docs/content/docs/typed-and-reactive/meta.json similarity index 100% rename from docs-next/content/docs/typed-and-reactive/meta.json rename to docs/content/docs/typed-and-reactive/meta.json diff --git a/docs/docusaurus.config.ts b/docs/docusaurus.config.ts deleted file mode 100644 index b84298b..0000000 --- a/docs/docusaurus.config.ts +++ /dev/null @@ -1,147 +0,0 @@ -import type * as Preset from '@docusaurus/preset-classic'; -import type { Config, Plugin } from '@docusaurus/types'; -import { themes as prismThemes } from 'prism-react-renderer'; - -const config: Config = { - title: 'dagron', - tagline: 'High-performance DAG execution engine for Python, powered by Rust', - favicon: 'img/favicon.ico', - - url: 'https://byteveda.github.io', - baseUrl: '/dagron/', - - organizationName: 'ByteVeda', - projectName: 'dagron', - - onBrokenLinks: 'throw', - - i18n: { - defaultLocale: 'en', - locales: ['en'], - }, - - markdown: { - mermaid: true, - hooks: { - onBrokenMarkdownLinks: 'warn', - }, - }, - - themes: ['@docusaurus/theme-mermaid'], - - plugins: [ - function ignoreVscodeLanguageServerWarning(): Plugin { - return { - name: 'ignore-vscode-languageserver-warning', - configureWebpack() { - return { - ignoreWarnings: [{ module: /vscode-languageserver-types/ }], - }; - }, - }; - }, - ], - - presets: [ - [ - 'classic', - { - docs: { - path: 'pages', - routeBasePath: '/', - sidebarPath: './sidebars.ts', - }, - blog: false, - theme: { - customCss: './src/css/custom.css', - }, - } satisfies Preset.Options, - ], - ], - - themeConfig: { - image: 'img/dagron-social-card.png', - navbar: { - title: 'dagron', - items: [ - { - type: 'docSidebar', - sidebarId: 'guideSidebar', - position: 'left', - label: 'Guide', - }, - { - type: 'docSidebar', - sidebarId: 'apiSidebar', - position: 'left', - label: 'API Reference', - }, - { - href: 'https://github.com/ByteVeda/dagron/blob/master/CHANGELOG.md', - label: 'Changelog', - position: 'right', - }, - { - href: 'https://github.com/ByteVeda/dagron/blob/master/CONTRIBUTING.md', - label: 'Contributing', - position: 'right', - }, - { - href: 'https://github.com/ByteVeda/dagron', - label: 'GitHub', - position: 'right', - }, - ], - }, - footer: { - style: 'dark', - links: [ - { - title: 'Documentation', - items: [ - { label: 'Guide', to: '/guide/getting-started' }, - { label: 'API Reference', to: '/api/core/core' }, - ], - }, - { - title: 'More', - items: [ - { label: 'GitHub', href: 'https://github.com/ByteVeda/dagron' }, - { label: 'PyPI', href: 'https://pypi.org/project/dagron/' }, - { - label: 'Changelog', - href: 'https://github.com/ByteVeda/dagron/blob/master/CHANGELOG.md', - }, - { - label: 'Contributing', - href: 'https://github.com/ByteVeda/dagron/blob/master/CONTRIBUTING.md', - }, - ], - }, - ], - copyright: `Copyright © ${new Date().getFullYear()} dagron contributors.`, - }, - prism: { - theme: prismThemes.github, - darkTheme: prismThemes.dracula, - additionalLanguages: [ - 'python', - 'rust', - 'typescript', - 'bash', - 'json', - 'toml', - ], - }, - mermaid: { - theme: { light: 'default', dark: 'dark' }, - options: {}, - }, - colorMode: { - defaultMode: 'dark', - respectPrefersColorScheme: true, - }, - } satisfies Preset.ThemeConfig, -}; - -export default config; diff --git a/docs-next/next.config.mjs b/docs/next.config.mjs similarity index 100% rename from docs-next/next.config.mjs rename to docs/next.config.mjs diff --git a/docs/package-lock.json b/docs/package-lock.json deleted file mode 100644 index 70f6f58..0000000 --- a/docs/package-lock.json +++ /dev/null @@ -1,19867 +0,0 @@ -{ - "name": "docs", - "version": "0.0.0", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "docs", - "version": "0.0.0", - "dependencies": { - "@docusaurus/core": "^3.9.2", - "@docusaurus/preset-classic": "^3.9.2", - "@docusaurus/theme-mermaid": "^3.9.2", - "@mdx-js/react": "^3.0.0", - "clsx": "^2.0.0", - "prism-react-renderer": "^2.3.0", - "react": "^19.0.0", - "react-dom": "^19.0.0" - }, - "devDependencies": { - "@biomejs/biome": "2.4.10", - "@docusaurus/module-type-aliases": "^3.9.2", - "@docusaurus/tsconfig": "^3.9.2", - "@docusaurus/types": "^3.9.2", - "typescript": "~5.6.2" - }, - "engines": { - "node": ">=18.0" - } - }, - "node_modules/@algolia/abtesting": { - "version": "1.15.1", - "resolved": "https://registry.npmjs.org/@algolia/abtesting/-/abtesting-1.15.1.tgz", - "integrity": "sha512-2yuIC48rUuHGhU1U5qJ9kJHaxYpJ0jpDHJVI5ekOxSMYXlH4+HP+pA31G820lsAznfmu2nzDV7n5RO44zIY1zw==", - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.49.1", - "@algolia/requester-browser-xhr": "5.49.1", - "@algolia/requester-fetch": "5.49.1", - "@algolia/requester-node-http": "5.49.1" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/autocomplete-core": { - "version": "1.19.2", - "resolved": "https://registry.npmjs.org/@algolia/autocomplete-core/-/autocomplete-core-1.19.2.tgz", - "integrity": "sha512-mKv7RyuAzXvwmq+0XRK8HqZXt9iZ5Kkm2huLjgn5JoCPtDy+oh9yxUMfDDaVCw0oyzZ1isdJBc7l9nuCyyR7Nw==", - "license": "MIT", - "dependencies": { - "@algolia/autocomplete-plugin-algolia-insights": "1.19.2", - "@algolia/autocomplete-shared": "1.19.2" - } - }, - "node_modules/@algolia/autocomplete-plugin-algolia-insights": { - "version": "1.19.2", - "resolved": "https://registry.npmjs.org/@algolia/autocomplete-plugin-algolia-insights/-/autocomplete-plugin-algolia-insights-1.19.2.tgz", - "integrity": "sha512-TjxbcC/r4vwmnZaPwrHtkXNeqvlpdyR+oR9Wi2XyfORkiGkLTVhX2j+O9SaCCINbKoDfc+c2PB8NjfOnz7+oKg==", - "license": "MIT", - "dependencies": { - "@algolia/autocomplete-shared": "1.19.2" - }, - "peerDependencies": { - "search-insights": ">= 1 < 3" - } - }, - "node_modules/@algolia/autocomplete-shared": { - "version": "1.19.2", - "resolved": "https://registry.npmjs.org/@algolia/autocomplete-shared/-/autocomplete-shared-1.19.2.tgz", - "integrity": "sha512-jEazxZTVD2nLrC+wYlVHQgpBoBB5KPStrJxLzsIFl6Kqd1AlG9sIAGl39V5tECLpIQzB3Qa2T6ZPJ1ChkwMK/w==", - "license": "MIT", - "peerDependencies": { - "@algolia/client-search": ">= 4.9.1 < 6", - "algoliasearch": ">= 4.9.1 < 6" - } - }, - "node_modules/@algolia/client-abtesting": { - "version": "5.49.1", - "resolved": "https://registry.npmjs.org/@algolia/client-abtesting/-/client-abtesting-5.49.1.tgz", - "integrity": "sha512-h6M7HzPin+45/l09q0r2dYmocSSt2MMGOOk5c4O5K/bBBlEwf1BKfN6z+iX4b8WXcQQhf7rgQwC52kBZJt/ZZw==", - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.49.1", - "@algolia/requester-browser-xhr": "5.49.1", - "@algolia/requester-fetch": "5.49.1", - "@algolia/requester-node-http": "5.49.1" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/client-analytics": { - "version": "5.49.1", - "resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-5.49.1.tgz", - "integrity": "sha512-048T9/Z8OeLmTk8h76QUqaNFp7Rq2VgS2Zm6Y2tNMYGQ1uNuzePY/udB5l5krlXll7ZGflyCjFvRiOtlPZpE9g==", - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.49.1", - "@algolia/requester-browser-xhr": "5.49.1", - "@algolia/requester-fetch": "5.49.1", - "@algolia/requester-node-http": "5.49.1" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/client-common": { - "version": "5.49.1", - "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-5.49.1.tgz", - "integrity": "sha512-vp5/a9ikqvf3mn9QvHN8PRekn8hW34aV9eX+O0J5mKPZXeA6Pd5OQEh2ZWf7gJY6yyfTlLp5LMFzQUAU+Fpqpg==", - "license": "MIT", - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/client-insights": { - "version": "5.49.1", - "resolved": "https://registry.npmjs.org/@algolia/client-insights/-/client-insights-5.49.1.tgz", - "integrity": "sha512-B6N7PgkvYrul3bntTz/l6uXnhQ2bvP+M7NqTcayh681tSqPaA5cJCUBp/vrP7vpPRpej4Eeyx2qz5p0tE/2N2g==", - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.49.1", - "@algolia/requester-browser-xhr": "5.49.1", - "@algolia/requester-fetch": "5.49.1", - "@algolia/requester-node-http": "5.49.1" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/client-personalization": { - "version": "5.49.1", - "resolved": "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-5.49.1.tgz", - "integrity": "sha512-v+4DN+lkYfBd01Hbnb9ZrCHe7l+mvihyx218INRX/kaCXROIWUDIT1cs3urQxfE7kXBFnLsqYeOflQALv/gA5w==", - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.49.1", - "@algolia/requester-browser-xhr": "5.49.1", - "@algolia/requester-fetch": "5.49.1", - "@algolia/requester-node-http": "5.49.1" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/client-query-suggestions": { - "version": "5.49.1", - "resolved": "https://registry.npmjs.org/@algolia/client-query-suggestions/-/client-query-suggestions-5.49.1.tgz", - "integrity": "sha512-Un11cab6ZCv0W+Jiak8UktGIqoa4+gSNgEZNfG8m8eTsXGqwIEr370H3Rqwj87zeNSlFpH2BslMXJ/cLNS1qtg==", - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.49.1", - "@algolia/requester-browser-xhr": "5.49.1", - "@algolia/requester-fetch": "5.49.1", - "@algolia/requester-node-http": "5.49.1" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/client-search": { - "version": "5.49.1", - "resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-5.49.1.tgz", - "integrity": "sha512-Nt9hri7nbOo0RipAsGjIssHkpLMHHN/P7QqENywAq5TLsoYDzUyJGny8FEiD/9KJUxtGH8blGpMedilI6kK3rA==", - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.49.1", - "@algolia/requester-browser-xhr": "5.49.1", - "@algolia/requester-fetch": "5.49.1", - "@algolia/requester-node-http": "5.49.1" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/events": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/@algolia/events/-/events-4.0.1.tgz", - "integrity": "sha512-FQzvOCgoFXAbf5Y6mYozw2aj5KCJoA3m4heImceldzPSMbdyS4atVjJzXKMsfX3wnZTFYwkkt8/z8UesLHlSBQ==", - "license": "MIT" - }, - "node_modules/@algolia/ingestion": { - "version": "1.49.1", - "resolved": "https://registry.npmjs.org/@algolia/ingestion/-/ingestion-1.49.1.tgz", - "integrity": "sha512-b5hUXwDqje0Y4CpU6VL481DXgPgxpTD5sYMnfQTHKgUispGnaCLCm2/T9WbJo1YNUbX3iHtYDArp804eD6CmRQ==", - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.49.1", - "@algolia/requester-browser-xhr": "5.49.1", - "@algolia/requester-fetch": "5.49.1", - "@algolia/requester-node-http": "5.49.1" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/monitoring": { - "version": "1.49.1", - "resolved": "https://registry.npmjs.org/@algolia/monitoring/-/monitoring-1.49.1.tgz", - "integrity": "sha512-bvrXwZ0WsL3rN6Q4m4QqxsXFCo6WAew7sAdrpMQMK4Efn4/W920r9ptOuckejOSSvyLr9pAWgC5rsHhR2FYuYw==", - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.49.1", - "@algolia/requester-browser-xhr": "5.49.1", - "@algolia/requester-fetch": "5.49.1", - "@algolia/requester-node-http": "5.49.1" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/recommend": { - "version": "5.49.1", - "resolved": "https://registry.npmjs.org/@algolia/recommend/-/recommend-5.49.1.tgz", - "integrity": "sha512-h2yz3AGeGkQwNgbLmoe3bxYs8fac4An1CprKTypYyTU/k3Q+9FbIvJ8aS1DoBKaTjSRZVoyQS7SZQio6GaHbZw==", - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.49.1", - "@algolia/requester-browser-xhr": "5.49.1", - "@algolia/requester-fetch": "5.49.1", - "@algolia/requester-node-http": "5.49.1" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/requester-browser-xhr": { - "version": "5.49.1", - "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-5.49.1.tgz", - "integrity": "sha512-2UPyRuUR/qpqSqH8mxFV5uBZWEpxhGPHLlx9Xf6OVxr79XO2ctzZQAhsmTZ6X22x+N8MBWpB9UEky7YU2HGFgA==", - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.49.1" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/requester-fetch": { - "version": "5.49.1", - "resolved": "https://registry.npmjs.org/@algolia/requester-fetch/-/requester-fetch-5.49.1.tgz", - "integrity": "sha512-N+xlE4lN+wpuT+4vhNEwPVlrfN+DWAZmSX9SYhbz986Oq8AMsqdntOqUyiOXVxYsQtfLwmiej24vbvJGYv1Qtw==", - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.49.1" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/requester-node-http": { - "version": "5.49.1", - "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-5.49.1.tgz", - "integrity": "sha512-zA5bkUOB5PPtTr182DJmajCiizHp0rCJQ0Chf96zNFvkdESKYlDeYA3tQ7r2oyHbu/8DiohAQ5PZ85edctzbXA==", - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.49.1" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@antfu/install-pkg": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@antfu/install-pkg/-/install-pkg-1.1.0.tgz", - "integrity": "sha512-MGQsmw10ZyI+EJo45CdSER4zEb+p31LpDAFp2Z3gkSd1yqVZGi0Ebx++YTEMonJy4oChEMLsxZ64j8FH6sSqtQ==", - "license": "MIT", - "dependencies": { - "package-manager-detector": "^1.3.0", - "tinyexec": "^1.0.1" - }, - "funding": { - "url": "https://github.com/sponsors/antfu" - } - }, - "node_modules/@babel/code-frame": { - "version": "7.29.0", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.29.0.tgz", - "integrity": "sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==", - "license": "MIT", - "dependencies": { - "@babel/helper-validator-identifier": "^7.28.5", - "js-tokens": "^4.0.0", - "picocolors": "^1.1.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/compat-data": { - "version": "7.29.0", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.29.0.tgz", - "integrity": "sha512-T1NCJqT/j9+cn8fvkt7jtwbLBfLC/1y1c7NtCeXFRgzGTsafi68MRv8yzkYSapBnFA6L3U2VSc02ciDzoAJhJg==", - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/core": { - "version": "7.29.0", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.29.0.tgz", - "integrity": "sha512-CGOfOJqWjg2qW/Mb6zNsDm+u5vFQ8DxXfbM09z69p5Z6+mE1ikP2jUXw+j42Pf1XTYED2Rni5f95npYeuwMDQA==", - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.29.0", - "@babel/generator": "^7.29.0", - "@babel/helper-compilation-targets": "^7.28.6", - "@babel/helper-module-transforms": "^7.28.6", - "@babel/helpers": "^7.28.6", - "@babel/parser": "^7.29.0", - "@babel/template": "^7.28.6", - "@babel/traverse": "^7.29.0", - "@babel/types": "^7.29.0", - "@jridgewell/remapping": "^2.3.5", - "convert-source-map": "^2.0.0", - "debug": "^4.1.0", - "gensync": "^1.0.0-beta.2", - "json5": "^2.2.3", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/babel" - } - }, - "node_modules/@babel/core/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/@babel/generator": { - "version": "7.29.1", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.29.1.tgz", - "integrity": "sha512-qsaF+9Qcm2Qv8SRIMMscAvG4O3lJ0F1GuMo5HR/Bp02LopNgnZBC/EkbevHFeGs4ls/oPz9v+Bsmzbkbe+0dUw==", - "license": "MIT", - "dependencies": { - "@babel/parser": "^7.29.0", - "@babel/types": "^7.29.0", - "@jridgewell/gen-mapping": "^0.3.12", - "@jridgewell/trace-mapping": "^0.3.28", - "jsesc": "^3.0.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-annotate-as-pure": { - "version": "7.27.3", - "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.27.3.tgz", - "integrity": "sha512-fXSwMQqitTGeHLBC08Eq5yXz2m37E4pJX1qAU1+2cNedz/ifv/bVXft90VeSav5nFO61EcNgwr0aJxbyPaWBPg==", - "license": "MIT", - "dependencies": { - "@babel/types": "^7.27.3" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-compilation-targets": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.28.6.tgz", - "integrity": "sha512-JYtls3hqi15fcx5GaSNL7SCTJ2MNmjrkHXg4FSpOA/grxK8KwyZ5bubHsCq8FXCkua6xhuaaBit+3b7+VZRfcA==", - "license": "MIT", - "dependencies": { - "@babel/compat-data": "^7.28.6", - "@babel/helper-validator-option": "^7.27.1", - "browserslist": "^4.24.0", - "lru-cache": "^5.1.1", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-compilation-targets/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/@babel/helper-create-class-features-plugin": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.28.6.tgz", - "integrity": "sha512-dTOdvsjnG3xNT9Y0AUg1wAl38y+4Rl4sf9caSQZOXdNqVn+H+HbbJ4IyyHaIqNR6SW9oJpA/RuRjsjCw2IdIow==", - "license": "MIT", - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.27.3", - "@babel/helper-member-expression-to-functions": "^7.28.5", - "@babel/helper-optimise-call-expression": "^7.27.1", - "@babel/helper-replace-supers": "^7.28.6", - "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1", - "@babel/traverse": "^7.28.6", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/helper-create-class-features-plugin/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/@babel/helper-create-regexp-features-plugin": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.28.5.tgz", - "integrity": "sha512-N1EhvLtHzOvj7QQOUCCS3NrPJP8c5W6ZXCHDn7Yialuy1iu4r5EmIYkXlKNqT99Ciw+W0mDqWoR6HWMZlFP3hw==", - "license": "MIT", - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.27.3", - "regexpu-core": "^6.3.1", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/helper-create-regexp-features-plugin/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/@babel/helper-define-polyfill-provider": { - "version": "0.6.6", - "resolved": "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.6.6.tgz", - "integrity": "sha512-mOAsxeeKkUKayvZR3HeTYD/fICpCPLJrU5ZjelT/PA6WHtNDBOE436YiaEUvHN454bRM3CebhDsIpieCc4texA==", - "license": "MIT", - "dependencies": { - "@babel/helper-compilation-targets": "^7.28.6", - "@babel/helper-plugin-utils": "^7.28.6", - "debug": "^4.4.3", - "lodash.debounce": "^4.0.8", - "resolve": "^1.22.11" - }, - "peerDependencies": { - "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" - } - }, - "node_modules/@babel/helper-globals": { - "version": "7.28.0", - "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", - "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-member-expression-to-functions": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.28.5.tgz", - "integrity": "sha512-cwM7SBRZcPCLgl8a7cY0soT1SptSzAlMH39vwiRpOQkJlh53r5hdHwLSCZpQdVLT39sZt+CRpNwYG4Y2v77atg==", - "license": "MIT", - "dependencies": { - "@babel/traverse": "^7.28.5", - "@babel/types": "^7.28.5" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-module-imports": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.28.6.tgz", - "integrity": "sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw==", - "license": "MIT", - "dependencies": { - "@babel/traverse": "^7.28.6", - "@babel/types": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-module-transforms": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.6.tgz", - "integrity": "sha512-67oXFAYr2cDLDVGLXTEABjdBJZ6drElUSI7WKp70NrpyISso3plG9SAGEF6y7zbha/wOzUByWWTJvEDVNIUGcA==", - "license": "MIT", - "dependencies": { - "@babel/helper-module-imports": "^7.28.6", - "@babel/helper-validator-identifier": "^7.28.5", - "@babel/traverse": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/helper-optimise-call-expression": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.27.1.tgz", - "integrity": "sha512-URMGH08NzYFhubNSGJrpUEphGKQwMQYBySzat5cAByY1/YgIRkULnIy3tAMeszlL/so2HbeilYloUmSpd7GdVw==", - "license": "MIT", - "dependencies": { - "@babel/types": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-plugin-utils": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.28.6.tgz", - "integrity": "sha512-S9gzZ/bz83GRysI7gAD4wPT/AI3uCnY+9xn+Mx/KPs2JwHJIz1W8PZkg2cqyt3RNOBM8ejcXhV6y8Og7ly/Dug==", - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-remap-async-to-generator": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.27.1.tgz", - "integrity": "sha512-7fiA521aVw8lSPeI4ZOD3vRFkoqkJcS+z4hFo82bFSH/2tNd6eJ5qCVMS5OzDmZh/kaHQeBaeyxK6wljcPtveA==", - "license": "MIT", - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.27.1", - "@babel/helper-wrap-function": "^7.27.1", - "@babel/traverse": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/helper-replace-supers": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.28.6.tgz", - "integrity": "sha512-mq8e+laIk94/yFec3DxSjCRD2Z0TAjhVbEJY3UQrlwVo15Lmt7C2wAUbK4bjnTs4APkwsYLTahXRraQXhb1WCg==", - "license": "MIT", - "dependencies": { - "@babel/helper-member-expression-to-functions": "^7.28.5", - "@babel/helper-optimise-call-expression": "^7.27.1", - "@babel/traverse": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/helper-skip-transparent-expression-wrappers": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.27.1.tgz", - "integrity": "sha512-Tub4ZKEXqbPjXgWLl2+3JpQAYBJ8+ikpQ2Ocj/q/r0LwE3UhENh7EUabyHjz2kCEsrRY83ew2DQdHluuiDQFzg==", - "license": "MIT", - "dependencies": { - "@babel/traverse": "^7.27.1", - "@babel/types": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-string-parser": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", - "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-validator-identifier": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", - "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-validator-option": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", - "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-wrap-function": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.28.6.tgz", - "integrity": "sha512-z+PwLziMNBeSQJonizz2AGnndLsP2DeGHIxDAn+wdHOGuo4Fo1x1HBPPXeE9TAOPHNNWQKCSlA2VZyYyyibDnQ==", - "license": "MIT", - "dependencies": { - "@babel/template": "^7.28.6", - "@babel/traverse": "^7.28.6", - "@babel/types": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helpers": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.6.tgz", - "integrity": "sha512-xOBvwq86HHdB7WUDTfKfT/Vuxh7gElQ+Sfti2Cy6yIWNW05P8iUslOVcZ4/sKbE+/jQaukQAdz/gf3724kYdqw==", - "license": "MIT", - "dependencies": { - "@babel/template": "^7.28.6", - "@babel/types": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/parser": { - "version": "7.29.0", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.29.0.tgz", - "integrity": "sha512-IyDgFV5GeDUVX4YdF/3CPULtVGSXXMLh1xVIgdCgxApktqnQV0r7/8Nqthg+8YLGaAtdyIlo2qIdZrbCv4+7ww==", - "license": "MIT", - "dependencies": { - "@babel/types": "^7.29.0" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/plugin-bugfix-firefox-class-in-computed-class-key": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-firefox-class-in-computed-class-key/-/plugin-bugfix-firefox-class-in-computed-class-key-7.28.5.tgz", - "integrity": "sha512-87GDMS3tsmMSi/3bWOte1UblL+YUTFMV8SZPZ2eSEL17s74Cw/l63rR6NmGVKMYW2GYi85nE+/d6Hw5N0bEk2Q==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/traverse": "^7.28.5" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-bugfix-safari-class-field-initializer-scope": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-safari-class-field-initializer-scope/-/plugin-bugfix-safari-class-field-initializer-scope-7.27.1.tgz", - "integrity": "sha512-qNeq3bCKnGgLkEXUuFry6dPlGfCdQNZbn7yUAPCInwAJHMU7THJfrBSozkcWq5sNM6RcF3S8XyQL2A52KNR9IA==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.27.1.tgz", - "integrity": "sha512-g4L7OYun04N1WyqMNjldFwlfPCLVkgB54A/YCXICZYBsvJJE3kByKv9c9+R/nAfmIfjl2rKYLNyMHboYbZaWaA==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.27.1.tgz", - "integrity": "sha512-oO02gcONcD5O1iTLi/6frMJBIwWEHceWGSGqrpCmEL8nogiS6J9PBlE48CaK20/Jx1LuRml9aDftLgdjXT8+Cw==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1", - "@babel/plugin-transform-optional-chaining": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.13.0" - } - }, - "node_modules/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/-/plugin-bugfix-v8-static-class-fields-redefine-readonly-7.28.6.tgz", - "integrity": "sha512-a0aBScVTlNaiUe35UtfxAN7A/tehvvG4/ByO6+46VPKTRSlfnAFsgKy0FUh+qAkQrDTmhDkT+IBOKlOoMUxQ0g==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.28.6", - "@babel/traverse": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-proposal-private-property-in-object": { - "version": "7.21.0-placeholder-for-preset-env.2", - "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.21.0-placeholder-for-preset-env.2.tgz", - "integrity": "sha512-SOSkfJDddaM7mak6cPEpswyTRnuRltl429hMraQEglW+OkovnCzsiszTmsrlY//qLFjCpQDFRvjdm2wA5pPm9w==", - "license": "MIT", - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-dynamic-import": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz", - "integrity": "sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.8.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-import-assertions": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.28.6.tgz", - "integrity": "sha512-pSJUpFHdx9z5nqTSirOCMtYVP2wFgoWhP0p3g8ONK/4IHhLIBd0B9NYqAvIUAhq+OkhO4VM1tENCt0cjlsNShw==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-import-attributes": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.28.6.tgz", - "integrity": "sha512-jiLC0ma9XkQT3TKJ9uYvlakm66Pamywo+qwL+oL8HJOvc6TWdZXVfhqJr8CCzbSGUAbDOzlGHJC1U+vRfLQDvw==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-jsx": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.28.6.tgz", - "integrity": "sha512-wgEmr06G6sIpqr8YDwA2dSRTE3bJ+V0IfpzfSY3Lfgd7YWOaAdlykvJi13ZKBt8cZHfgH1IXN+CL656W3uUa4w==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-typescript": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.28.6.tgz", - "integrity": "sha512-+nDNmQye7nlnuuHDboPbGm00Vqg3oO8niRRL27/4LYHUsHYh0zJ1xWOz0uRwNFmM1Avzk8wZbc6rdiYhomzv/A==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-unicode-sets-regex": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-unicode-sets-regex/-/plugin-syntax-unicode-sets-regex-7.18.6.tgz", - "integrity": "sha512-727YkEAPwSIQTv5im8QHz3upqp92JTWhidIC81Tdx4VJYIte/VndKf1qKrfnnhPLiPghStWfvC/iFaMCQu7Nqg==", - "license": "MIT", - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.18.6", - "@babel/helper-plugin-utils": "^7.18.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-transform-arrow-functions": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.27.1.tgz", - "integrity": "sha512-8Z4TGic6xW70FKThA5HYEKKyBpOOsucTOD1DjU3fZxDg+K3zBJcXMFnt/4yQiZnf5+MiOMSXQ9PaEK/Ilh1DeA==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-async-generator-functions": { - "version": "7.29.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.29.0.tgz", - "integrity": "sha512-va0VdWro4zlBr2JsXC+ofCPB2iG12wPtVGTWFx2WLDOM3nYQZZIGP82qku2eW/JR83sD+k2k+CsNtyEbUqhU6w==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.28.6", - "@babel/helper-remap-async-to-generator": "^7.27.1", - "@babel/traverse": "^7.29.0" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-async-to-generator": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.28.6.tgz", - "integrity": "sha512-ilTRcmbuXjsMmcZ3HASTe4caH5Tpo93PkTxF9oG2VZsSWsahydmcEHhix9Ik122RcTnZnUzPbmux4wh1swfv7g==", - "license": "MIT", - "dependencies": { - "@babel/helper-module-imports": "^7.28.6", - "@babel/helper-plugin-utils": "^7.28.6", - "@babel/helper-remap-async-to-generator": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-block-scoped-functions": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.27.1.tgz", - "integrity": "sha512-cnqkuOtZLapWYZUYM5rVIdv1nXYuFVIltZ6ZJ7nIj585QsjKM5dhL2Fu/lICXZ1OyIAFc7Qy+bvDAtTXqGrlhg==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-block-scoping": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.28.6.tgz", - "integrity": "sha512-tt/7wOtBmwHPNMPu7ax4pdPz6shjFrmHDghvNC+FG9Qvj7D6mJcoRQIF5dy4njmxR941l6rgtvfSB2zX3VlUIw==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-class-properties": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-properties/-/plugin-transform-class-properties-7.28.6.tgz", - "integrity": "sha512-dY2wS3I2G7D697VHndN91TJr8/AAfXQNt5ynCTI/MpxMsSzHp+52uNivYT5wCPax3whc47DR8Ba7cmlQMg24bw==", - "license": "MIT", - "dependencies": { - "@babel/helper-create-class-features-plugin": "^7.28.6", - "@babel/helper-plugin-utils": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-class-static-block": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.28.6.tgz", - "integrity": "sha512-rfQ++ghVwTWTqQ7w8qyDxL1XGihjBss4CmTgGRCTAC9RIbhVpyp4fOeZtta0Lbf+dTNIVJer6ych2ibHwkZqsQ==", - "license": "MIT", - "dependencies": { - "@babel/helper-create-class-features-plugin": "^7.28.6", - "@babel/helper-plugin-utils": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.12.0" - } - }, - "node_modules/@babel/plugin-transform-classes": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.28.6.tgz", - "integrity": "sha512-EF5KONAqC5zAqT783iMGuM2ZtmEBy+mJMOKl2BCvPZ2lVrwvXnB6o+OBWCS+CoeCCpVRF2sA2RBKUxvT8tQT5Q==", - "license": "MIT", - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.27.3", - "@babel/helper-compilation-targets": "^7.28.6", - "@babel/helper-globals": "^7.28.0", - "@babel/helper-plugin-utils": "^7.28.6", - "@babel/helper-replace-supers": "^7.28.6", - "@babel/traverse": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-computed-properties": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.28.6.tgz", - "integrity": "sha512-bcc3k0ijhHbc2lEfpFHgx7eYw9KNXqOerKWfzbxEHUGKnS3sz9C4CNL9OiFN1297bDNfUiSO7DaLzbvHQQQ1BQ==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.28.6", - "@babel/template": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-destructuring": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.28.5.tgz", - "integrity": "sha512-Kl9Bc6D0zTUcFUvkNuQh4eGXPKKNDOJQXVyyM4ZAQPMveniJdxi8XMJwLo+xSoW3MIq81bD33lcUe9kZpl0MCw==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/traverse": "^7.28.5" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-dotall-regex": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.28.6.tgz", - "integrity": "sha512-SljjowuNKB7q5Oayv4FoPzeB74g3QgLt8IVJw9ADvWy3QnUb/01aw8I4AVv8wYnPvQz2GDDZ/g3GhcNyDBI4Bg==", - "license": "MIT", - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.28.5", - "@babel/helper-plugin-utils": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-duplicate-keys": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.27.1.tgz", - "integrity": "sha512-MTyJk98sHvSs+cvZ4nOauwTTG1JeonDjSGvGGUNHreGQns+Mpt6WX/dVzWBHgg+dYZhkC4X+zTDfkTU+Vy9y7Q==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-duplicate-named-capturing-groups-regex": { - "version": "7.29.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-named-capturing-groups-regex/-/plugin-transform-duplicate-named-capturing-groups-regex-7.29.0.tgz", - "integrity": "sha512-zBPcW2lFGxdiD8PUnPwJjag2J9otbcLQzvbiOzDxpYXyCuYX9agOwMPGn1prVH0a4qzhCKu24rlH4c1f7yA8rw==", - "license": "MIT", - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.28.5", - "@babel/helper-plugin-utils": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-transform-dynamic-import": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dynamic-import/-/plugin-transform-dynamic-import-7.27.1.tgz", - "integrity": "sha512-MHzkWQcEmjzzVW9j2q8LGjwGWpG2mjwaaB0BNQwst3FIjqsg8Ct/mIZlvSPJvfi9y2AC8mi/ktxbFVL9pZ1I4A==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-explicit-resource-management": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-explicit-resource-management/-/plugin-transform-explicit-resource-management-7.28.6.tgz", - "integrity": "sha512-Iao5Konzx2b6g7EPqTy40UZbcdXE126tTxVFr/nAIj+WItNxjKSYTEw3RC+A2/ZetmdJsgueL1KhaMCQHkLPIg==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.28.6", - "@babel/plugin-transform-destructuring": "^7.28.5" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-exponentiation-operator": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.28.6.tgz", - "integrity": "sha512-WitabqiGjV/vJ0aPOLSFfNY1u9U3R7W36B03r5I2KoNix+a3sOhJ3pKFB3R5It9/UiK78NiO0KE9P21cMhlPkw==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-export-namespace-from": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-export-namespace-from/-/plugin-transform-export-namespace-from-7.27.1.tgz", - "integrity": "sha512-tQvHWSZ3/jH2xuq/vZDy0jNn+ZdXJeM8gHvX4lnJmsc3+50yPlWdZXIc5ay+umX+2/tJIqHqiEqcJvxlmIvRvQ==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-for-of": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.27.1.tgz", - "integrity": "sha512-BfbWFFEJFQzLCQ5N8VocnCtA8J1CLkNTe2Ms2wocj75dd6VpiqS5Z5quTYcUoo4Yq+DN0rtikODccuv7RU81sw==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-function-name": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.27.1.tgz", - "integrity": "sha512-1bQeydJF9Nr1eBCMMbC+hdwmRlsv5XYOMu03YSWFwNs0HsAmtSxxF1fyuYPqemVldVyFmlCU7w8UE14LupUSZQ==", - "license": "MIT", - "dependencies": { - "@babel/helper-compilation-targets": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/traverse": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-json-strings": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-json-strings/-/plugin-transform-json-strings-7.28.6.tgz", - "integrity": "sha512-Nr+hEN+0geQkzhbdgQVPoqr47lZbm+5fCUmO70722xJZd0Mvb59+33QLImGj6F+DkK3xgDi1YVysP8whD6FQAw==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-literals": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.27.1.tgz", - "integrity": "sha512-0HCFSepIpLTkLcsi86GG3mTUzxV5jpmbv97hTETW3yzrAij8aqlD36toB1D0daVFJM8NK6GvKO0gslVQmm+zZA==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-logical-assignment-operators": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.28.6.tgz", - "integrity": "sha512-+anKKair6gpi8VsM/95kmomGNMD0eLz1NQ8+Pfw5sAwWH9fGYXT50E55ZpV0pHUHWf6IUTWPM+f/7AAff+wr9A==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-member-expression-literals": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.27.1.tgz", - "integrity": "sha512-hqoBX4dcZ1I33jCSWcXrP+1Ku7kdqXf1oeah7ooKOIiAdKQ+uqftgCFNOSzA5AMS2XIHEYeGFg4cKRCdpxzVOQ==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-modules-amd": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.27.1.tgz", - "integrity": "sha512-iCsytMg/N9/oFq6n+gFTvUYDZQOMK5kEdeYxmxt91fcJGycfxVP9CnrxoliM0oumFERba2i8ZtwRUCMhvP1LnA==", - "license": "MIT", - "dependencies": { - "@babel/helper-module-transforms": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-modules-commonjs": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.28.6.tgz", - "integrity": "sha512-jppVbf8IV9iWWwWTQIxJMAJCWBuuKx71475wHwYytrRGQ2CWiDvYlADQno3tcYpS/T2UUWFQp3nVtYfK/YBQrA==", - "license": "MIT", - "dependencies": { - "@babel/helper-module-transforms": "^7.28.6", - "@babel/helper-plugin-utils": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-modules-systemjs": { - "version": "7.29.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.29.0.tgz", - "integrity": "sha512-PrujnVFbOdUpw4UHiVwKvKRLMMic8+eC0CuNlxjsyZUiBjhFdPsewdXCkveh2KqBA9/waD0W1b4hXSOBQJezpQ==", - "license": "MIT", - "dependencies": { - "@babel/helper-module-transforms": "^7.28.6", - "@babel/helper-plugin-utils": "^7.28.6", - "@babel/helper-validator-identifier": "^7.28.5", - "@babel/traverse": "^7.29.0" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-modules-umd": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.27.1.tgz", - "integrity": "sha512-iQBE/xC5BV1OxJbp6WG7jq9IWiD+xxlZhLrdwpPkTX3ydmXdvoCpyfJN7acaIBZaOqTfr76pgzqBJflNbeRK+w==", - "license": "MIT", - "dependencies": { - "@babel/helper-module-transforms": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-named-capturing-groups-regex": { - "version": "7.29.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.29.0.tgz", - "integrity": "sha512-1CZQA5KNAD6ZYQLPw7oi5ewtDNxH/2vuCh+6SmvgDfhumForvs8a1o9n0UrEoBD8HU4djO2yWngTQlXl1NDVEQ==", - "license": "MIT", - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.28.5", - "@babel/helper-plugin-utils": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-transform-new-target": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.27.1.tgz", - "integrity": "sha512-f6PiYeqXQ05lYq3TIfIDu/MtliKUbNwkGApPUvyo6+tc7uaR4cPjPe7DFPr15Uyycg2lZU6btZ575CuQoYh7MQ==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-nullish-coalescing-operator": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-nullish-coalescing-operator/-/plugin-transform-nullish-coalescing-operator-7.28.6.tgz", - "integrity": "sha512-3wKbRgmzYbw24mDJXT7N+ADXw8BC/imU9yo9c9X9NKaLF1fW+e5H1U5QjMUBe4Qo4Ox/o++IyUkl1sVCLgevKg==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-numeric-separator": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-numeric-separator/-/plugin-transform-numeric-separator-7.28.6.tgz", - "integrity": "sha512-SJR8hPynj8outz+SlStQSwvziMN4+Bq99it4tMIf5/Caq+3iOc0JtKyse8puvyXkk3eFRIA5ID/XfunGgO5i6w==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-object-rest-spread": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.28.6.tgz", - "integrity": "sha512-5rh+JR4JBC4pGkXLAcYdLHZjXudVxWMXbB6u6+E9lRL5TrGVbHt1TjxGbZ8CkmYw9zjkB7jutzOROArsqtncEA==", - "license": "MIT", - "dependencies": { - "@babel/helper-compilation-targets": "^7.28.6", - "@babel/helper-plugin-utils": "^7.28.6", - "@babel/plugin-transform-destructuring": "^7.28.5", - "@babel/plugin-transform-parameters": "^7.27.7", - "@babel/traverse": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-object-super": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.27.1.tgz", - "integrity": "sha512-SFy8S9plRPbIcxlJ8A6mT/CxFdJx/c04JEctz4jf8YZaVS2px34j7NXRrlGlHkN/M2gnpL37ZpGRGVFLd3l8Ng==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-replace-supers": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-optional-catch-binding": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-catch-binding/-/plugin-transform-optional-catch-binding-7.28.6.tgz", - "integrity": "sha512-R8ja/Pyrv0OGAvAXQhSTmWyPJPml+0TMqXlO5w+AsMEiwb2fg3WkOvob7UxFSL3OIttFSGSRFKQsOhJ/X6HQdQ==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-optional-chaining": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.28.6.tgz", - "integrity": "sha512-A4zobikRGJTsX9uqVFdafzGkqD30t26ck2LmOzAuLL8b2x6k3TIqRiT2xVvA9fNmFeTX484VpsdgmKNA0bS23w==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.28.6", - "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-parameters": { - "version": "7.27.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.27.7.tgz", - "integrity": "sha512-qBkYTYCb76RRxUM6CcZA5KRu8K4SM8ajzVeUgVdMVO9NN9uI/GaVmBg/WKJJGnNokV9SY8FxNOVWGXzqzUidBg==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-private-methods": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-methods/-/plugin-transform-private-methods-7.28.6.tgz", - "integrity": "sha512-piiuapX9CRv7+0st8lmuUlRSmX6mBcVeNQ1b4AYzJxfCMuBfB0vBXDiGSmm03pKJw1v6cZ8KSeM+oUnM6yAExg==", - "license": "MIT", - "dependencies": { - "@babel/helper-create-class-features-plugin": "^7.28.6", - "@babel/helper-plugin-utils": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-private-property-in-object": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.28.6.tgz", - "integrity": "sha512-b97jvNSOb5+ehyQmBpmhOCiUC5oVK4PMnpRvO7+ymFBoqYjeDHIU9jnrNUuwHOiL9RpGDoKBpSViarV+BU+eVA==", - "license": "MIT", - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.27.3", - "@babel/helper-create-class-features-plugin": "^7.28.6", - "@babel/helper-plugin-utils": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-property-literals": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.27.1.tgz", - "integrity": "sha512-oThy3BCuCha8kDZ8ZkgOg2exvPYUlprMukKQXI1r1pJ47NCvxfkEy8vK+r/hT9nF0Aa4H1WUPZZjHTFtAhGfmQ==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-react-constant-elements": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-constant-elements/-/plugin-transform-react-constant-elements-7.27.1.tgz", - "integrity": "sha512-edoidOjl/ZxvYo4lSBOQGDSyToYVkTAwyVoa2tkuYTSmjrB1+uAedoL5iROVLXkxH+vRgA7uP4tMg2pUJpZ3Ug==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-react-display-name": { - "version": "7.28.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.28.0.tgz", - "integrity": "sha512-D6Eujc2zMxKjfa4Zxl4GHMsmhKKZ9VpcqIchJLvwTxad9zWIYulwYItBovpDOoNLISpcZSXoDJ5gaGbQUDqViA==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-react-jsx": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.28.6.tgz", - "integrity": "sha512-61bxqhiRfAACulXSLd/GxqmAedUSrRZIu/cbaT18T1CetkTmtDN15it7i80ru4DVqRK1WMxQhXs+Lf9kajm5Ow==", - "license": "MIT", - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.27.3", - "@babel/helper-module-imports": "^7.28.6", - "@babel/helper-plugin-utils": "^7.28.6", - "@babel/plugin-syntax-jsx": "^7.28.6", - "@babel/types": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-react-jsx-development": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.27.1.tgz", - "integrity": "sha512-ykDdF5yI4f1WrAolLqeF3hmYU12j9ntLQl/AOG1HAS21jxyg1Q0/J/tpREuYLfatGdGmXp/3yS0ZA76kOlVq9Q==", - "license": "MIT", - "dependencies": { - "@babel/plugin-transform-react-jsx": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-react-pure-annotations": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.27.1.tgz", - "integrity": "sha512-JfuinvDOsD9FVMTHpzA/pBLisxpv1aSf+OIV8lgH3MuWrks19R27e6a6DipIg4aX1Zm9Wpb04p8wljfKrVSnPA==", - "license": "MIT", - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-regenerator": { - "version": "7.29.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.29.0.tgz", - "integrity": "sha512-FijqlqMA7DmRdg/aINBSs04y8XNTYw/lr1gJ2WsmBnnaNw1iS43EPkJW+zK7z65auG3AWRFXWj+NcTQwYptUog==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-regexp-modifiers": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regexp-modifiers/-/plugin-transform-regexp-modifiers-7.28.6.tgz", - "integrity": "sha512-QGWAepm9qxpaIs7UM9FvUSnCGlb8Ua1RhyM4/veAxLwt3gMat/LSGrZixyuj4I6+Kn9iwvqCyPTtbdxanYoWYg==", - "license": "MIT", - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.28.5", - "@babel/helper-plugin-utils": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-transform-reserved-words": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.27.1.tgz", - "integrity": "sha512-V2ABPHIJX4kC7HegLkYoDpfg9PVmuWy/i6vUM5eGK22bx4YVFD3M5F0QQnWQoDs6AGsUWTVOopBiMFQgHaSkVw==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-runtime": { - "version": "7.29.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.29.0.tgz", - "integrity": "sha512-jlaRT5dJtMaMCV6fAuLbsQMSwz/QkvaHOHOSXRitGGwSpR1blCY4KUKoyP2tYO8vJcqYe8cEj96cqSztv3uF9w==", - "license": "MIT", - "dependencies": { - "@babel/helper-module-imports": "^7.28.6", - "@babel/helper-plugin-utils": "^7.28.6", - "babel-plugin-polyfill-corejs2": "^0.4.14", - "babel-plugin-polyfill-corejs3": "^0.13.0", - "babel-plugin-polyfill-regenerator": "^0.6.5", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-runtime/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/@babel/plugin-transform-shorthand-properties": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.27.1.tgz", - "integrity": "sha512-N/wH1vcn4oYawbJ13Y/FxcQrWk63jhfNa7jef0ih7PHSIHX2LB7GWE1rkPrOnka9kwMxb6hMl19p7lidA+EHmQ==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-spread": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.28.6.tgz", - "integrity": "sha512-9U4QObUC0FtJl05AsUcodau/RWDytrU6uKgkxu09mLR9HLDAtUMoPuuskm5huQsoktmsYpI+bGmq+iapDcriKA==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.28.6", - "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-sticky-regex": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.27.1.tgz", - "integrity": "sha512-lhInBO5bi/Kowe2/aLdBAawijx+q1pQzicSgnkB6dUPc1+RC8QmJHKf2OjvU+NZWitguJHEaEmbV6VWEouT58g==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-template-literals": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.27.1.tgz", - "integrity": "sha512-fBJKiV7F2DxZUkg5EtHKXQdbsbURW3DZKQUWphDum0uRP6eHGGa/He9mc0mypL680pb+e/lDIthRohlv8NCHkg==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-typeof-symbol": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.27.1.tgz", - "integrity": "sha512-RiSILC+nRJM7FY5srIyc4/fGIwUhyDuuBSdWn4y6yT6gm652DpCHZjIipgn6B7MQ1ITOUnAKWixEUjQRIBIcLw==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-typescript": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.28.6.tgz", - "integrity": "sha512-0YWL2RFxOqEm9Efk5PvreamxPME8OyY0wM5wh5lHjF+VtVhdneCWGzZeSqzOfiobVqQaNCd2z0tQvnI9DaPWPw==", - "license": "MIT", - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.27.3", - "@babel/helper-create-class-features-plugin": "^7.28.6", - "@babel/helper-plugin-utils": "^7.28.6", - "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1", - "@babel/plugin-syntax-typescript": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-unicode-escapes": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.27.1.tgz", - "integrity": "sha512-Ysg4v6AmF26k9vpfFuTZg8HRfVWzsh1kVfowA23y9j/Gu6dOuahdUVhkLqpObp3JIv27MLSii6noRnuKN8H0Mg==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-unicode-property-regex": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-property-regex/-/plugin-transform-unicode-property-regex-7.28.6.tgz", - "integrity": "sha512-4Wlbdl/sIZjzi/8St0evF0gEZrgOswVO6aOzqxh1kDZOl9WmLrHq2HtGhnOJZmHZYKP8WZ1MDLCt5DAWwRo57A==", - "license": "MIT", - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.28.5", - "@babel/helper-plugin-utils": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-unicode-regex": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.27.1.tgz", - "integrity": "sha512-xvINq24TRojDuyt6JGtHmkVkrfVV3FPT16uytxImLeBZqW3/H52yN+kM1MGuyPkIQxrzKwPHs5U/MP3qKyzkGw==", - "license": "MIT", - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-unicode-sets-regex": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-sets-regex/-/plugin-transform-unicode-sets-regex-7.28.6.tgz", - "integrity": "sha512-/wHc/paTUmsDYN7SZkpWxogTOBNnlx7nBQYfy6JJlCT7G3mVhltk3e++N7zV0XfgGsrqBxd4rJQt9H16I21Y1Q==", - "license": "MIT", - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.28.5", - "@babel/helper-plugin-utils": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/preset-env": { - "version": "7.29.0", - "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.29.0.tgz", - "integrity": "sha512-fNEdfc0yi16lt6IZo2Qxk3knHVdfMYX33czNb4v8yWhemoBhibCpQK/uYHtSKIiO+p/zd3+8fYVXhQdOVV608w==", - "license": "MIT", - "dependencies": { - "@babel/compat-data": "^7.29.0", - "@babel/helper-compilation-targets": "^7.28.6", - "@babel/helper-plugin-utils": "^7.28.6", - "@babel/helper-validator-option": "^7.27.1", - "@babel/plugin-bugfix-firefox-class-in-computed-class-key": "^7.28.5", - "@babel/plugin-bugfix-safari-class-field-initializer-scope": "^7.27.1", - "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.27.1", - "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.27.1", - "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": "^7.28.6", - "@babel/plugin-proposal-private-property-in-object": "7.21.0-placeholder-for-preset-env.2", - "@babel/plugin-syntax-import-assertions": "^7.28.6", - "@babel/plugin-syntax-import-attributes": "^7.28.6", - "@babel/plugin-syntax-unicode-sets-regex": "^7.18.6", - "@babel/plugin-transform-arrow-functions": "^7.27.1", - "@babel/plugin-transform-async-generator-functions": "^7.29.0", - "@babel/plugin-transform-async-to-generator": "^7.28.6", - "@babel/plugin-transform-block-scoped-functions": "^7.27.1", - "@babel/plugin-transform-block-scoping": "^7.28.6", - "@babel/plugin-transform-class-properties": "^7.28.6", - "@babel/plugin-transform-class-static-block": "^7.28.6", - "@babel/plugin-transform-classes": "^7.28.6", - "@babel/plugin-transform-computed-properties": "^7.28.6", - "@babel/plugin-transform-destructuring": "^7.28.5", - "@babel/plugin-transform-dotall-regex": "^7.28.6", - "@babel/plugin-transform-duplicate-keys": "^7.27.1", - "@babel/plugin-transform-duplicate-named-capturing-groups-regex": "^7.29.0", - "@babel/plugin-transform-dynamic-import": "^7.27.1", - "@babel/plugin-transform-explicit-resource-management": "^7.28.6", - "@babel/plugin-transform-exponentiation-operator": "^7.28.6", - "@babel/plugin-transform-export-namespace-from": "^7.27.1", - "@babel/plugin-transform-for-of": "^7.27.1", - "@babel/plugin-transform-function-name": "^7.27.1", - "@babel/plugin-transform-json-strings": "^7.28.6", - "@babel/plugin-transform-literals": "^7.27.1", - "@babel/plugin-transform-logical-assignment-operators": "^7.28.6", - "@babel/plugin-transform-member-expression-literals": "^7.27.1", - "@babel/plugin-transform-modules-amd": "^7.27.1", - "@babel/plugin-transform-modules-commonjs": "^7.28.6", - "@babel/plugin-transform-modules-systemjs": "^7.29.0", - "@babel/plugin-transform-modules-umd": "^7.27.1", - "@babel/plugin-transform-named-capturing-groups-regex": "^7.29.0", - "@babel/plugin-transform-new-target": "^7.27.1", - "@babel/plugin-transform-nullish-coalescing-operator": "^7.28.6", - "@babel/plugin-transform-numeric-separator": "^7.28.6", - "@babel/plugin-transform-object-rest-spread": "^7.28.6", - "@babel/plugin-transform-object-super": "^7.27.1", - "@babel/plugin-transform-optional-catch-binding": "^7.28.6", - "@babel/plugin-transform-optional-chaining": "^7.28.6", - "@babel/plugin-transform-parameters": "^7.27.7", - "@babel/plugin-transform-private-methods": "^7.28.6", - "@babel/plugin-transform-private-property-in-object": "^7.28.6", - "@babel/plugin-transform-property-literals": "^7.27.1", - "@babel/plugin-transform-regenerator": "^7.29.0", - "@babel/plugin-transform-regexp-modifiers": "^7.28.6", - "@babel/plugin-transform-reserved-words": "^7.27.1", - "@babel/plugin-transform-shorthand-properties": "^7.27.1", - "@babel/plugin-transform-spread": "^7.28.6", - "@babel/plugin-transform-sticky-regex": "^7.27.1", - "@babel/plugin-transform-template-literals": "^7.27.1", - "@babel/plugin-transform-typeof-symbol": "^7.27.1", - "@babel/plugin-transform-unicode-escapes": "^7.27.1", - "@babel/plugin-transform-unicode-property-regex": "^7.28.6", - "@babel/plugin-transform-unicode-regex": "^7.27.1", - "@babel/plugin-transform-unicode-sets-regex": "^7.28.6", - "@babel/preset-modules": "0.1.6-no-external-plugins", - "babel-plugin-polyfill-corejs2": "^0.4.15", - "babel-plugin-polyfill-corejs3": "^0.14.0", - "babel-plugin-polyfill-regenerator": "^0.6.6", - "core-js-compat": "^3.48.0", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/preset-env/node_modules/babel-plugin-polyfill-corejs3": { - "version": "0.14.0", - "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.14.0.tgz", - "integrity": "sha512-AvDcMxJ34W4Wgy4KBIIePQTAOP1Ie2WFwkQp3dB7FQ/f0lI5+nM96zUnYEOE1P9sEg0es5VCP0HxiWu5fUHZAQ==", - "license": "MIT", - "dependencies": { - "@babel/helper-define-polyfill-provider": "^0.6.6", - "core-js-compat": "^3.48.0" - }, - "peerDependencies": { - "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" - } - }, - "node_modules/@babel/preset-env/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/@babel/preset-modules": { - "version": "0.1.6-no-external-plugins", - "resolved": "https://registry.npmjs.org/@babel/preset-modules/-/preset-modules-0.1.6-no-external-plugins.tgz", - "integrity": "sha512-HrcgcIESLm9aIR842yhJ5RWan/gebQUJ6E/E5+rf0y9o6oj7w0Br+sWuL6kEQ/o/AdfvR1Je9jG18/gnpwjEyA==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/types": "^7.4.4", - "esutils": "^2.0.2" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0 || ^8.0.0-0 <8.0.0" - } - }, - "node_modules/@babel/preset-react": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/preset-react/-/preset-react-7.28.5.tgz", - "integrity": "sha512-Z3J8vhRq7CeLjdC58jLv4lnZ5RKFUJWqH5emvxmv9Hv3BD1T9R/Im713R4MTKwvFaV74ejZ3sM01LyEKk4ugNQ==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-validator-option": "^7.27.1", - "@babel/plugin-transform-react-display-name": "^7.28.0", - "@babel/plugin-transform-react-jsx": "^7.27.1", - "@babel/plugin-transform-react-jsx-development": "^7.27.1", - "@babel/plugin-transform-react-pure-annotations": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/preset-typescript": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.28.5.tgz", - "integrity": "sha512-+bQy5WOI2V6LJZpPVxY+yp66XdZ2yifu0Mc1aP5CQKgjn4QM5IN2i5fAZ4xKop47pr8rpVhiAeu+nDQa12C8+g==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-validator-option": "^7.27.1", - "@babel/plugin-syntax-jsx": "^7.27.1", - "@babel/plugin-transform-modules-commonjs": "^7.27.1", - "@babel/plugin-transform-typescript": "^7.28.5" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/runtime": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.6.tgz", - "integrity": "sha512-05WQkdpL9COIMz4LjTxGpPNCdlpyimKppYNoJ5Di5EUObifl8t4tuLuUBBZEpoLYOmfvIWrsp9fCl0HoPRVTdA==", - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/runtime-corejs3": { - "version": "7.29.0", - "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.29.0.tgz", - "integrity": "sha512-TgUkdp71C9pIbBcHudc+gXZnihEDOjUAmXO1VO4HHGES7QLZcShR0stfKIxLSNIYx2fqhmJChOjm/wkF8wv4gA==", - "license": "MIT", - "dependencies": { - "core-js-pure": "^3.48.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/template": { - "version": "7.28.6", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.28.6.tgz", - "integrity": "sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ==", - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.28.6", - "@babel/parser": "^7.28.6", - "@babel/types": "^7.28.6" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/traverse": { - "version": "7.29.0", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.29.0.tgz", - "integrity": "sha512-4HPiQr0X7+waHfyXPZpWPfWL/J7dcN1mx9gL6WdQVMbPnF3+ZhSMs8tCxN7oHddJE9fhNE7+lxdnlyemKfJRuA==", - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.29.0", - "@babel/generator": "^7.29.0", - "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.29.0", - "@babel/template": "^7.28.6", - "@babel/types": "^7.29.0", - "debug": "^4.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/types": { - "version": "7.29.0", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.29.0.tgz", - "integrity": "sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==", - "license": "MIT", - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.28.5" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@biomejs/biome": { - "version": "2.4.10", - "resolved": "https://registry.npmjs.org/@biomejs/biome/-/biome-2.4.10.tgz", - "integrity": "sha512-xxA3AphFQ1geij4JTHXv4EeSTda1IFn22ye9LdyVPoJU19fNVl0uzfEuhsfQ4Yue/0FaLs2/ccVi4UDiE7R30w==", - "dev": true, - "license": "MIT OR Apache-2.0", - "bin": { - "biome": "bin/biome" - }, - "engines": { - "node": ">=14.21.3" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/biome" - }, - "optionalDependencies": { - "@biomejs/cli-darwin-arm64": "2.4.10", - "@biomejs/cli-darwin-x64": "2.4.10", - "@biomejs/cli-linux-arm64": "2.4.10", - "@biomejs/cli-linux-arm64-musl": "2.4.10", - "@biomejs/cli-linux-x64": "2.4.10", - "@biomejs/cli-linux-x64-musl": "2.4.10", - "@biomejs/cli-win32-arm64": "2.4.10", - "@biomejs/cli-win32-x64": "2.4.10" - } - }, - "node_modules/@biomejs/cli-darwin-arm64": { - "version": "2.4.10", - "resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-arm64/-/cli-darwin-arm64-2.4.10.tgz", - "integrity": "sha512-vuzzI1cWqDVzOMIkYyHbKqp+AkQq4K7k+UCXWpkYcY/HDn1UxdsbsfgtVpa40shem8Kax4TLDLlx8kMAecgqiw==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT OR Apache-2.0", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=14.21.3" - } - }, - "node_modules/@biomejs/cli-darwin-x64": { - "version": "2.4.10", - "resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-x64/-/cli-darwin-x64-2.4.10.tgz", - "integrity": "sha512-14fzASRo+BPotwp7nWULy2W5xeUyFnTaq1V13Etrrxkrih+ez/2QfgFm5Ehtf5vSjtgx/IJycMMpn5kPd5ZNaA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT OR Apache-2.0", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=14.21.3" - } - }, - "node_modules/@biomejs/cli-linux-arm64": { - "version": "2.4.10", - "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64/-/cli-linux-arm64-2.4.10.tgz", - "integrity": "sha512-7MH1CMW5uuxQ/s7FLST63qF8B3Hgu2HRdZ7tA1X1+mk+St4JOuIrqdhIBnnyqeyWJNI+Bww7Es5QZ0wIc1Cmkw==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT OR Apache-2.0", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=14.21.3" - } - }, - "node_modules/@biomejs/cli-linux-arm64-musl": { - "version": "2.4.10", - "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.4.10.tgz", - "integrity": "sha512-WrJY6UuiSD/Dh+nwK2qOTu8kdMDlLV3dLMmychIghHPAysWFq1/DGC1pVZx8POE3ZkzKR3PUUnVrtZfMfaJjyQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT OR Apache-2.0", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=14.21.3" - } - }, - "node_modules/@biomejs/cli-linux-x64": { - "version": "2.4.10", - "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64/-/cli-linux-x64-2.4.10.tgz", - "integrity": "sha512-tZLvEEi2u9Xu1zAqRjTcpIDGVtldigVvzug2fTuPG0ME/g8/mXpRPcNgLB22bGn6FvLJpHHnqLnwliOu8xjYrg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT OR Apache-2.0", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=14.21.3" - } - }, - "node_modules/@biomejs/cli-linux-x64-musl": { - "version": "2.4.10", - "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64-musl/-/cli-linux-x64-musl-2.4.10.tgz", - "integrity": "sha512-kDTi3pI6PBN6CiczsWYOyP2zk0IJI08EWEQyDMQWW221rPaaEz6FvjLhnU07KMzLv8q3qSuoB93ua6inSQ55Tw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT OR Apache-2.0", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=14.21.3" - } - }, - "node_modules/@biomejs/cli-win32-arm64": { - "version": "2.4.10", - "resolved": "https://registry.npmjs.org/@biomejs/cli-win32-arm64/-/cli-win32-arm64-2.4.10.tgz", - "integrity": "sha512-umwQU6qPzH+ISTf/eHyJ/QoQnJs3V9Vpjz2OjZXe9MVBZ7prgGafMy7yYeRGnlmDAn87AKTF3Q6weLoMGpeqdQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT OR Apache-2.0", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=14.21.3" - } - }, - "node_modules/@biomejs/cli-win32-x64": { - "version": "2.4.10", - "resolved": "https://registry.npmjs.org/@biomejs/cli-win32-x64/-/cli-win32-x64-2.4.10.tgz", - "integrity": "sha512-aW/JU5GuyH4uxMrNYpoC2kjaHlyJGLgIa3XkhPEZI0uKhZhJZU8BuEyJmvgzSPQNGozBwWjC972RaNdcJ9KyJg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT OR Apache-2.0", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=14.21.3" - } - }, - "node_modules/@braintree/sanitize-url": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/@braintree/sanitize-url/-/sanitize-url-7.1.2.tgz", - "integrity": "sha512-jigsZK+sMF/cuiB7sERuo9V7N9jx+dhmHHnQyDSVdpZwVutaBu7WvNYqMDLSgFgfB30n452TP3vjDAvFC973mA==", - "license": "MIT" - }, - "node_modules/@chevrotain/cst-dts-gen": { - "version": "11.1.2", - "resolved": "https://registry.npmjs.org/@chevrotain/cst-dts-gen/-/cst-dts-gen-11.1.2.tgz", - "integrity": "sha512-XTsjvDVB5nDZBQB8o0o/0ozNelQtn2KrUVteIHSlPd2VAV2utEb6JzyCJaJ8tGxACR4RiBNWy5uYUHX2eji88Q==", - "license": "Apache-2.0", - "dependencies": { - "@chevrotain/gast": "11.1.2", - "@chevrotain/types": "11.1.2", - "lodash-es": "4.17.23" - } - }, - "node_modules/@chevrotain/gast": { - "version": "11.1.2", - "resolved": "https://registry.npmjs.org/@chevrotain/gast/-/gast-11.1.2.tgz", - "integrity": "sha512-Z9zfXR5jNZb1Hlsd/p+4XWeUFugrHirq36bKzPWDSIacV+GPSVXdk+ahVWZTwjhNwofAWg/sZg58fyucKSQx5g==", - "license": "Apache-2.0", - "dependencies": { - "@chevrotain/types": "11.1.2", - "lodash-es": "4.17.23" - } - }, - "node_modules/@chevrotain/regexp-to-ast": { - "version": "11.1.2", - "resolved": "https://registry.npmjs.org/@chevrotain/regexp-to-ast/-/regexp-to-ast-11.1.2.tgz", - "integrity": "sha512-nMU3Uj8naWer7xpZTYJdxbAs6RIv/dxYzkYU8GSwgUtcAAlzjcPfX1w+RKRcYG8POlzMeayOQ/znfwxEGo5ulw==", - "license": "Apache-2.0" - }, - "node_modules/@chevrotain/types": { - "version": "11.1.2", - "resolved": "https://registry.npmjs.org/@chevrotain/types/-/types-11.1.2.tgz", - "integrity": "sha512-U+HFai5+zmJCkK86QsaJtoITlboZHBqrVketcO2ROv865xfCMSFpELQoz1GkX5GzME8pTa+3kbKrZHQtI0gdbw==", - "license": "Apache-2.0" - }, - "node_modules/@chevrotain/utils": { - "version": "11.1.2", - "resolved": "https://registry.npmjs.org/@chevrotain/utils/-/utils-11.1.2.tgz", - "integrity": "sha512-4mudFAQ6H+MqBTfqLmU7G1ZwRzCLfJEooL/fsF6rCX5eePMbGhoy5n4g+G4vlh2muDcsCTJtL+uKbOzWxs5LHA==", - "license": "Apache-2.0" - }, - "node_modules/@colors/colors": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz", - "integrity": "sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==", - "license": "MIT", - "optional": true, - "engines": { - "node": ">=0.1.90" - } - }, - "node_modules/@csstools/cascade-layer-name-parser": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@csstools/cascade-layer-name-parser/-/cascade-layer-name-parser-2.0.5.tgz", - "integrity": "sha512-p1ko5eHgV+MgXFVa4STPKpvPxr6ReS8oS2jzTukjR74i5zJNyWO1ZM1m8YKBXnzDKWfBN1ztLYlHxbVemDD88A==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT", - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4" - } - }, - "node_modules/@csstools/color-helpers": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/@csstools/color-helpers/-/color-helpers-5.1.0.tgz", - "integrity": "sha512-S11EXWJyy0Mz5SYvRmY8nJYTFFd1LCNV+7cXyAgQtOOuzb4EsgfqDufL+9esx72/eLhsRdGZwaldu/h+E4t4BA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "engines": { - "node": ">=18" - } - }, - "node_modules/@csstools/css-calc": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/@csstools/css-calc/-/css-calc-2.1.4.tgz", - "integrity": "sha512-3N8oaj+0juUw/1H3YwmDDJXCgTB1gKU6Hc/bB502u9zR0q2vd786XJH9QfrKIEgFlZmhZiq6epXl4rHqhzsIgQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT", - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4" - } - }, - "node_modules/@csstools/css-color-parser": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/@csstools/css-color-parser/-/css-color-parser-3.1.0.tgz", - "integrity": "sha512-nbtKwh3a6xNVIp/VRuXV64yTKnb1IjTAEEh3irzS+HkKjAOYLTGNb9pmVNntZ8iVBHcWDA2Dof0QtPgFI1BaTA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT", - "dependencies": { - "@csstools/color-helpers": "^5.1.0", - "@csstools/css-calc": "^2.1.4" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4" - } - }, - "node_modules/@csstools/css-parser-algorithms": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/@csstools/css-parser-algorithms/-/css-parser-algorithms-3.0.5.tgz", - "integrity": "sha512-DaDeUkXZKjdGhgYaHNJTV9pV7Y9B3b644jCLs9Upc3VeNGg6LWARAT6O+Q+/COo+2gg/bM5rhpMAtf70WqfBdQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT", - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@csstools/css-tokenizer": "^3.0.4" - } - }, - "node_modules/@csstools/css-tokenizer": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@csstools/css-tokenizer/-/css-tokenizer-3.0.4.tgz", - "integrity": "sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT", - "engines": { - "node": ">=18" - } - }, - "node_modules/@csstools/media-query-list-parser": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/@csstools/media-query-list-parser/-/media-query-list-parser-4.0.3.tgz", - "integrity": "sha512-HAYH7d3TLRHDOUQK4mZKf9k9Ph/m8Akstg66ywKR4SFAigjs3yBiUeZtFxywiTm5moZMAp/5W/ZuFnNXXYLuuQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT", - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4" - } - }, - "node_modules/@csstools/postcss-alpha-function": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@csstools/postcss-alpha-function/-/postcss-alpha-function-1.0.1.tgz", - "integrity": "sha512-isfLLwksH3yHkFXfCI2Gcaqg7wGGHZZwunoJzEZk0yKYIokgre6hYVFibKL3SYAoR1kBXova8LB+JoO5vZzi9w==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/css-color-parser": "^3.1.0", - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4", - "@csstools/postcss-progressive-custom-properties": "^4.2.1", - "@csstools/utilities": "^2.0.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-cascade-layers": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/@csstools/postcss-cascade-layers/-/postcss-cascade-layers-5.0.2.tgz", - "integrity": "sha512-nWBE08nhO8uWl6kSAeCx4im7QfVko3zLrtgWZY4/bP87zrSPpSyN/3W3TDqz1jJuH+kbKOHXg5rJnK+ZVYcFFg==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/selector-specificity": "^5.0.0", - "postcss-selector-parser": "^7.0.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-cascade-layers/node_modules/@csstools/selector-specificity": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/@csstools/selector-specificity/-/selector-specificity-5.0.0.tgz", - "integrity": "sha512-PCqQV3c4CoVm3kdPhyeZ07VmBRdH2EpMFA/pd9OASpOEC3aXNGoqPDAZ80D0cLpMBxnmk0+yNhGsEx31hq7Gtw==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss-selector-parser": "^7.0.0" - } - }, - "node_modules/@csstools/postcss-cascade-layers/node_modules/postcss-selector-parser": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz", - "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==", - "license": "MIT", - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/@csstools/postcss-color-function": { - "version": "4.0.12", - "resolved": "https://registry.npmjs.org/@csstools/postcss-color-function/-/postcss-color-function-4.0.12.tgz", - "integrity": "sha512-yx3cljQKRaSBc2hfh8rMZFZzChaFgwmO2JfFgFr1vMcF3C/uyy5I4RFIBOIWGq1D+XbKCG789CGkG6zzkLpagA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/css-color-parser": "^3.1.0", - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4", - "@csstools/postcss-progressive-custom-properties": "^4.2.1", - "@csstools/utilities": "^2.0.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-color-function-display-p3-linear": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@csstools/postcss-color-function-display-p3-linear/-/postcss-color-function-display-p3-linear-1.0.1.tgz", - "integrity": "sha512-E5qusdzhlmO1TztYzDIi8XPdPoYOjoTY6HBYBCYSj+Gn4gQRBlvjgPQXzfzuPQqt8EhkC/SzPKObg4Mbn8/xMg==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/css-color-parser": "^3.1.0", - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4", - "@csstools/postcss-progressive-custom-properties": "^4.2.1", - "@csstools/utilities": "^2.0.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-color-mix-function": { - "version": "3.0.12", - "resolved": "https://registry.npmjs.org/@csstools/postcss-color-mix-function/-/postcss-color-mix-function-3.0.12.tgz", - "integrity": "sha512-4STERZfCP5Jcs13P1U5pTvI9SkgLgfMUMhdXW8IlJWkzOOOqhZIjcNhWtNJZes2nkBDsIKJ0CJtFtuaZ00moag==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/css-color-parser": "^3.1.0", - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4", - "@csstools/postcss-progressive-custom-properties": "^4.2.1", - "@csstools/utilities": "^2.0.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-color-mix-variadic-function-arguments": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@csstools/postcss-color-mix-variadic-function-arguments/-/postcss-color-mix-variadic-function-arguments-1.0.2.tgz", - "integrity": "sha512-rM67Gp9lRAkTo+X31DUqMEq+iK+EFqsidfecmhrteErxJZb6tUoJBVQca1Vn1GpDql1s1rD1pKcuYzMsg7Z1KQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/css-color-parser": "^3.1.0", - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4", - "@csstools/postcss-progressive-custom-properties": "^4.2.1", - "@csstools/utilities": "^2.0.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-content-alt-text": { - "version": "2.0.8", - "resolved": "https://registry.npmjs.org/@csstools/postcss-content-alt-text/-/postcss-content-alt-text-2.0.8.tgz", - "integrity": "sha512-9SfEW9QCxEpTlNMnpSqFaHyzsiRpZ5J5+KqCu1u5/eEJAWsMhzT40qf0FIbeeglEvrGRMdDzAxMIz3wqoGSb+Q==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4", - "@csstools/postcss-progressive-custom-properties": "^4.2.1", - "@csstools/utilities": "^2.0.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-contrast-color-function": { - "version": "2.0.12", - "resolved": "https://registry.npmjs.org/@csstools/postcss-contrast-color-function/-/postcss-contrast-color-function-2.0.12.tgz", - "integrity": "sha512-YbwWckjK3qwKjeYz/CijgcS7WDUCtKTd8ShLztm3/i5dhh4NaqzsbYnhm4bjrpFpnLZ31jVcbK8YL77z3GBPzA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/css-color-parser": "^3.1.0", - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4", - "@csstools/postcss-progressive-custom-properties": "^4.2.1", - "@csstools/utilities": "^2.0.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-exponential-functions": { - "version": "2.0.9", - "resolved": "https://registry.npmjs.org/@csstools/postcss-exponential-functions/-/postcss-exponential-functions-2.0.9.tgz", - "integrity": "sha512-abg2W/PI3HXwS/CZshSa79kNWNZHdJPMBXeZNyPQFbbj8sKO3jXxOt/wF7juJVjyDTc6JrvaUZYFcSBZBhaxjw==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/css-calc": "^2.1.4", - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-font-format-keywords": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/@csstools/postcss-font-format-keywords/-/postcss-font-format-keywords-4.0.0.tgz", - "integrity": "sha512-usBzw9aCRDvchpok6C+4TXC57btc4bJtmKQWOHQxOVKen1ZfVqBUuCZ/wuqdX5GHsD0NRSr9XTP+5ID1ZZQBXw==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/utilities": "^2.0.0", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-gamut-mapping": { - "version": "2.0.11", - "resolved": "https://registry.npmjs.org/@csstools/postcss-gamut-mapping/-/postcss-gamut-mapping-2.0.11.tgz", - "integrity": "sha512-fCpCUgZNE2piVJKC76zFsgVW1apF6dpYsqGyH8SIeCcM4pTEsRTWTLCaJIMKFEundsCKwY1rwfhtrio04RJ4Dw==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/css-color-parser": "^3.1.0", - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-gradients-interpolation-method": { - "version": "5.0.12", - "resolved": "https://registry.npmjs.org/@csstools/postcss-gradients-interpolation-method/-/postcss-gradients-interpolation-method-5.0.12.tgz", - "integrity": "sha512-jugzjwkUY0wtNrZlFeyXzimUL3hN4xMvoPnIXxoZqxDvjZRiSh+itgHcVUWzJ2VwD/VAMEgCLvtaJHX+4Vj3Ow==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/css-color-parser": "^3.1.0", - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4", - "@csstools/postcss-progressive-custom-properties": "^4.2.1", - "@csstools/utilities": "^2.0.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-hwb-function": { - "version": "4.0.12", - "resolved": "https://registry.npmjs.org/@csstools/postcss-hwb-function/-/postcss-hwb-function-4.0.12.tgz", - "integrity": "sha512-mL/+88Z53KrE4JdePYFJAQWFrcADEqsLprExCM04GDNgHIztwFzj0Mbhd/yxMBngq0NIlz58VVxjt5abNs1VhA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/css-color-parser": "^3.1.0", - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4", - "@csstools/postcss-progressive-custom-properties": "^4.2.1", - "@csstools/utilities": "^2.0.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-ic-unit": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/@csstools/postcss-ic-unit/-/postcss-ic-unit-4.0.4.tgz", - "integrity": "sha512-yQ4VmossuOAql65sCPppVO1yfb7hDscf4GseF0VCA/DTDaBc0Wtf8MTqVPfjGYlT5+2buokG0Gp7y0atYZpwjg==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/postcss-progressive-custom-properties": "^4.2.1", - "@csstools/utilities": "^2.0.0", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-initial": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@csstools/postcss-initial/-/postcss-initial-2.0.1.tgz", - "integrity": "sha512-L1wLVMSAZ4wovznquK0xmC7QSctzO4D0Is590bxpGqhqjboLXYA16dWZpfwImkdOgACdQ9PqXsuRroW6qPlEsg==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-is-pseudo-class": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/@csstools/postcss-is-pseudo-class/-/postcss-is-pseudo-class-5.0.3.tgz", - "integrity": "sha512-jS/TY4SpG4gszAtIg7Qnf3AS2pjcUM5SzxpApOrlndMeGhIbaTzWBzzP/IApXoNWEW7OhcjkRT48jnAUIFXhAQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/selector-specificity": "^5.0.0", - "postcss-selector-parser": "^7.0.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-is-pseudo-class/node_modules/@csstools/selector-specificity": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/@csstools/selector-specificity/-/selector-specificity-5.0.0.tgz", - "integrity": "sha512-PCqQV3c4CoVm3kdPhyeZ07VmBRdH2EpMFA/pd9OASpOEC3aXNGoqPDAZ80D0cLpMBxnmk0+yNhGsEx31hq7Gtw==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss-selector-parser": "^7.0.0" - } - }, - "node_modules/@csstools/postcss-is-pseudo-class/node_modules/postcss-selector-parser": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz", - "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==", - "license": "MIT", - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/@csstools/postcss-light-dark-function": { - "version": "2.0.11", - "resolved": "https://registry.npmjs.org/@csstools/postcss-light-dark-function/-/postcss-light-dark-function-2.0.11.tgz", - "integrity": "sha512-fNJcKXJdPM3Lyrbmgw2OBbaioU7yuKZtiXClf4sGdQttitijYlZMD5K7HrC/eF83VRWRrYq6OZ0Lx92leV2LFA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4", - "@csstools/postcss-progressive-custom-properties": "^4.2.1", - "@csstools/utilities": "^2.0.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-logical-float-and-clear": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@csstools/postcss-logical-float-and-clear/-/postcss-logical-float-and-clear-3.0.0.tgz", - "integrity": "sha512-SEmaHMszwakI2rqKRJgE+8rpotFfne1ZS6bZqBoQIicFyV+xT1UF42eORPxJkVJVrH9C0ctUgwMSn3BLOIZldQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-logical-overflow": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@csstools/postcss-logical-overflow/-/postcss-logical-overflow-2.0.0.tgz", - "integrity": "sha512-spzR1MInxPuXKEX2csMamshR4LRaSZ3UXVaRGjeQxl70ySxOhMpP2252RAFsg8QyyBXBzuVOOdx1+bVO5bPIzA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-logical-overscroll-behavior": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@csstools/postcss-logical-overscroll-behavior/-/postcss-logical-overscroll-behavior-2.0.0.tgz", - "integrity": "sha512-e/webMjoGOSYfqLunyzByZj5KKe5oyVg/YSbie99VEaSDE2kimFm0q1f6t/6Jo+VVCQ/jbe2Xy+uX+C4xzWs4w==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-logical-resize": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@csstools/postcss-logical-resize/-/postcss-logical-resize-3.0.0.tgz", - "integrity": "sha512-DFbHQOFW/+I+MY4Ycd/QN6Dg4Hcbb50elIJCfnwkRTCX05G11SwViI5BbBlg9iHRl4ytB7pmY5ieAFk3ws7yyg==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-logical-viewport-units": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@csstools/postcss-logical-viewport-units/-/postcss-logical-viewport-units-3.0.4.tgz", - "integrity": "sha512-q+eHV1haXA4w9xBwZLKjVKAWn3W2CMqmpNpZUk5kRprvSiBEGMgrNH3/sJZ8UA3JgyHaOt3jwT9uFa4wLX4EqQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/css-tokenizer": "^3.0.4", - "@csstools/utilities": "^2.0.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-media-minmax": { - "version": "2.0.9", - "resolved": "https://registry.npmjs.org/@csstools/postcss-media-minmax/-/postcss-media-minmax-2.0.9.tgz", - "integrity": "sha512-af9Qw3uS3JhYLnCbqtZ9crTvvkR+0Se+bBqSr7ykAnl9yKhk6895z9rf+2F4dClIDJWxgn0iZZ1PSdkhrbs2ig==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT", - "dependencies": { - "@csstools/css-calc": "^2.1.4", - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4", - "@csstools/media-query-list-parser": "^4.0.3" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-media-queries-aspect-ratio-number-values": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/@csstools/postcss-media-queries-aspect-ratio-number-values/-/postcss-media-queries-aspect-ratio-number-values-3.0.5.tgz", - "integrity": "sha512-zhAe31xaaXOY2Px8IYfoVTB3wglbJUVigGphFLj6exb7cjZRH9A6adyE22XfFK3P2PzwRk0VDeTJmaxpluyrDg==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4", - "@csstools/media-query-list-parser": "^4.0.3" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-nested-calc": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/@csstools/postcss-nested-calc/-/postcss-nested-calc-4.0.0.tgz", - "integrity": "sha512-jMYDdqrQQxE7k9+KjstC3NbsmC063n1FTPLCgCRS2/qHUbHM0mNy9pIn4QIiQGs9I/Bg98vMqw7mJXBxa0N88A==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/utilities": "^2.0.0", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-normalize-display-values": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/@csstools/postcss-normalize-display-values/-/postcss-normalize-display-values-4.0.1.tgz", - "integrity": "sha512-TQUGBuRvxdc7TgNSTevYqrL8oItxiwPDixk20qCB5me/W8uF7BPbhRrAvFuhEoywQp/woRsUZ6SJ+sU5idZAIA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-oklab-function": { - "version": "4.0.12", - "resolved": "https://registry.npmjs.org/@csstools/postcss-oklab-function/-/postcss-oklab-function-4.0.12.tgz", - "integrity": "sha512-HhlSmnE1NKBhXsTnNGjxvhryKtO7tJd1w42DKOGFD6jSHtYOrsJTQDKPMwvOfrzUAk8t7GcpIfRyM7ssqHpFjg==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/css-color-parser": "^3.1.0", - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4", - "@csstools/postcss-progressive-custom-properties": "^4.2.1", - "@csstools/utilities": "^2.0.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-position-area-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@csstools/postcss-position-area-property/-/postcss-position-area-property-1.0.0.tgz", - "integrity": "sha512-fUP6KR8qV2NuUZV3Cw8itx0Ep90aRjAZxAEzC3vrl6yjFv+pFsQbR18UuQctEKmA72K9O27CoYiKEgXxkqjg8Q==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-progressive-custom-properties": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/@csstools/postcss-progressive-custom-properties/-/postcss-progressive-custom-properties-4.2.1.tgz", - "integrity": "sha512-uPiiXf7IEKtUQXsxu6uWtOlRMXd2QWWy5fhxHDnPdXKCQckPP3E34ZgDoZ62r2iT+UOgWsSbM4NvHE5m3mAEdw==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-property-rule-prelude-list": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@csstools/postcss-property-rule-prelude-list/-/postcss-property-rule-prelude-list-1.0.0.tgz", - "integrity": "sha512-IxuQjUXq19fobgmSSvUDO7fVwijDJaZMvWQugxfEUxmjBeDCVaDuMpsZ31MsTm5xbnhA+ElDi0+rQ7sQQGisFA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-random-function": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@csstools/postcss-random-function/-/postcss-random-function-2.0.1.tgz", - "integrity": "sha512-q+FQaNiRBhnoSNo+GzqGOIBKoHQ43lYz0ICrV+UudfWnEF6ksS6DsBIJSISKQT2Bvu3g4k6r7t0zYrk5pDlo8w==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/css-calc": "^2.1.4", - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-relative-color-syntax": { - "version": "3.0.12", - "resolved": "https://registry.npmjs.org/@csstools/postcss-relative-color-syntax/-/postcss-relative-color-syntax-3.0.12.tgz", - "integrity": "sha512-0RLIeONxu/mtxRtf3o41Lq2ghLimw0w9ByLWnnEVuy89exmEEq8bynveBxNW3nyHqLAFEeNtVEmC1QK9MZ8Huw==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/css-color-parser": "^3.1.0", - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4", - "@csstools/postcss-progressive-custom-properties": "^4.2.1", - "@csstools/utilities": "^2.0.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-scope-pseudo-class": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/@csstools/postcss-scope-pseudo-class/-/postcss-scope-pseudo-class-4.0.1.tgz", - "integrity": "sha512-IMi9FwtH6LMNuLea1bjVMQAsUhFxJnyLSgOp/cpv5hrzWmrUYU5fm0EguNDIIOHUqzXode8F/1qkC/tEo/qN8Q==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "postcss-selector-parser": "^7.0.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-scope-pseudo-class/node_modules/postcss-selector-parser": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz", - "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==", - "license": "MIT", - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/@csstools/postcss-sign-functions": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/@csstools/postcss-sign-functions/-/postcss-sign-functions-1.1.4.tgz", - "integrity": "sha512-P97h1XqRPcfcJndFdG95Gv/6ZzxUBBISem0IDqPZ7WMvc/wlO+yU0c5D/OCpZ5TJoTt63Ok3knGk64N+o6L2Pg==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/css-calc": "^2.1.4", - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-stepped-value-functions": { - "version": "4.0.9", - "resolved": "https://registry.npmjs.org/@csstools/postcss-stepped-value-functions/-/postcss-stepped-value-functions-4.0.9.tgz", - "integrity": "sha512-h9btycWrsex4dNLeQfyU3y3w40LMQooJWFMm/SK9lrKguHDcFl4VMkncKKoXi2z5rM9YGWbUQABI8BT2UydIcA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/css-calc": "^2.1.4", - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-syntax-descriptor-syntax-production": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@csstools/postcss-syntax-descriptor-syntax-production/-/postcss-syntax-descriptor-syntax-production-1.0.1.tgz", - "integrity": "sha512-GneqQWefjM//f4hJ/Kbox0C6f2T7+pi4/fqTqOFGTL3EjnvOReTqO1qUQ30CaUjkwjYq9qZ41hzarrAxCc4gow==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/css-tokenizer": "^3.0.4" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-system-ui-font-family": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@csstools/postcss-system-ui-font-family/-/postcss-system-ui-font-family-1.0.0.tgz", - "integrity": "sha512-s3xdBvfWYfoPSBsikDXbuorcMG1nN1M6GdU0qBsGfcmNR0A/qhloQZpTxjA3Xsyrk1VJvwb2pOfiOT3at/DuIQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-text-decoration-shorthand": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/@csstools/postcss-text-decoration-shorthand/-/postcss-text-decoration-shorthand-4.0.3.tgz", - "integrity": "sha512-KSkGgZfx0kQjRIYnpsD7X2Om9BUXX/Kii77VBifQW9Ih929hK0KNjVngHDH0bFB9GmfWcR9vJYJJRvw/NQjkrA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/color-helpers": "^5.1.0", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-trigonometric-functions": { - "version": "4.0.9", - "resolved": "https://registry.npmjs.org/@csstools/postcss-trigonometric-functions/-/postcss-trigonometric-functions-4.0.9.tgz", - "integrity": "sha512-Hnh5zJUdpNrJqK9v1/E3BbrQhaDTj5YiX7P61TOvUhoDHnUmsNNxcDAgkQ32RrcWx9GVUvfUNPcUkn8R3vIX6A==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/css-calc": "^2.1.4", - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/postcss-unset-value": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/@csstools/postcss-unset-value/-/postcss-unset-value-4.0.0.tgz", - "integrity": "sha512-cBz3tOCI5Fw6NIFEwU3RiwK6mn3nKegjpJuzCndoGq3BZPkUjnsq7uQmIeMNeMbMk7YD2MfKcgCpZwX5jyXqCA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@csstools/utilities": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@csstools/utilities/-/utilities-2.0.0.tgz", - "integrity": "sha512-5VdOr0Z71u+Yp3ozOx8T11N703wIFGVRgOWbOZMKgglPJsWA54MRIoMNVMa7shUToIhx5J8vX4sOZgD2XiihiQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/@discoveryjs/json-ext": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/@discoveryjs/json-ext/-/json-ext-0.5.7.tgz", - "integrity": "sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw==", - "license": "MIT", - "engines": { - "node": ">=10.0.0" - } - }, - "node_modules/@docsearch/core": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@docsearch/core/-/core-4.6.0.tgz", - "integrity": "sha512-IqG3oSd529jVRQ4dWZQKwZwQLVd//bWJTz2HiL0LkiHrI4U/vLrBasKB7lwQB/69nBAcCgs3TmudxTZSLH/ZQg==", - "license": "MIT", - "peerDependencies": { - "@types/react": ">= 16.8.0 < 20.0.0", - "react": ">= 16.8.0 < 20.0.0", - "react-dom": ">= 16.8.0 < 20.0.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "react": { - "optional": true - }, - "react-dom": { - "optional": true - } - } - }, - "node_modules/@docsearch/css": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@docsearch/css/-/css-4.6.0.tgz", - "integrity": "sha512-YlcAimkXclvqta47g47efzCM5CFxDwv2ClkDfEs/fC/Ak0OxPH2b3czwa4o8O1TRBf+ujFF2RiUwszz2fPVNJQ==", - "license": "MIT" - }, - "node_modules/@docsearch/react": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@docsearch/react/-/react-4.6.0.tgz", - "integrity": "sha512-j8H5B4ArGxBPBWvw3X0J0Rm/Pjv2JDa2rV5OE0DLTp5oiBCptIJ/YlNOhZxuzbO2nwge+o3Z52nJRi3hryK9cA==", - "license": "MIT", - "dependencies": { - "@algolia/autocomplete-core": "1.19.2", - "@docsearch/core": "4.6.0", - "@docsearch/css": "4.6.0" - }, - "peerDependencies": { - "@types/react": ">= 16.8.0 < 20.0.0", - "react": ">= 16.8.0 < 20.0.0", - "react-dom": ">= 16.8.0 < 20.0.0", - "search-insights": ">= 1 < 3" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "react": { - "optional": true - }, - "react-dom": { - "optional": true - }, - "search-insights": { - "optional": true - } - } - }, - "node_modules/@docusaurus/babel": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@docusaurus/babel/-/babel-3.9.2.tgz", - "integrity": "sha512-GEANdi/SgER+L7Japs25YiGil/AUDnFFHaCGPBbundxoWtCkA2lmy7/tFmgED4y1htAy6Oi4wkJEQdGssnw9MA==", - "license": "MIT", - "dependencies": { - "@babel/core": "^7.25.9", - "@babel/generator": "^7.25.9", - "@babel/plugin-syntax-dynamic-import": "^7.8.3", - "@babel/plugin-transform-runtime": "^7.25.9", - "@babel/preset-env": "^7.25.9", - "@babel/preset-react": "^7.25.9", - "@babel/preset-typescript": "^7.25.9", - "@babel/runtime": "^7.25.9", - "@babel/runtime-corejs3": "^7.25.9", - "@babel/traverse": "^7.25.9", - "@docusaurus/logger": "3.9.2", - "@docusaurus/utils": "3.9.2", - "babel-plugin-dynamic-import-node": "^2.3.3", - "fs-extra": "^11.1.1", - "tslib": "^2.6.0" - }, - "engines": { - "node": ">=20.0" - } - }, - "node_modules/@docusaurus/bundler": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@docusaurus/bundler/-/bundler-3.9.2.tgz", - "integrity": "sha512-ZOVi6GYgTcsZcUzjblpzk3wH1Fya2VNpd5jtHoCCFcJlMQ1EYXZetfAnRHLcyiFeBABaI1ltTYbOBtH/gahGVA==", - "license": "MIT", - "dependencies": { - "@babel/core": "^7.25.9", - "@docusaurus/babel": "3.9.2", - "@docusaurus/cssnano-preset": "3.9.2", - "@docusaurus/logger": "3.9.2", - "@docusaurus/types": "3.9.2", - "@docusaurus/utils": "3.9.2", - "babel-loader": "^9.2.1", - "clean-css": "^5.3.3", - "copy-webpack-plugin": "^11.0.0", - "css-loader": "^6.11.0", - "css-minimizer-webpack-plugin": "^5.0.1", - "cssnano": "^6.1.2", - "file-loader": "^6.2.0", - "html-minifier-terser": "^7.2.0", - "mini-css-extract-plugin": "^2.9.2", - "null-loader": "^4.0.1", - "postcss": "^8.5.4", - "postcss-loader": "^7.3.4", - "postcss-preset-env": "^10.2.1", - "terser-webpack-plugin": "^5.3.9", - "tslib": "^2.6.0", - "url-loader": "^4.1.1", - "webpack": "^5.95.0", - "webpackbar": "^6.0.1" - }, - "engines": { - "node": ">=20.0" - }, - "peerDependencies": { - "@docusaurus/faster": "*" - }, - "peerDependenciesMeta": { - "@docusaurus/faster": { - "optional": true - } - } - }, - "node_modules/@docusaurus/core": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@docusaurus/core/-/core-3.9.2.tgz", - "integrity": "sha512-HbjwKeC+pHUFBfLMNzuSjqFE/58+rLVKmOU3lxQrpsxLBOGosYco/Q0GduBb0/jEMRiyEqjNT/01rRdOMWq5pw==", - "license": "MIT", - "dependencies": { - "@docusaurus/babel": "3.9.2", - "@docusaurus/bundler": "3.9.2", - "@docusaurus/logger": "3.9.2", - "@docusaurus/mdx-loader": "3.9.2", - "@docusaurus/utils": "3.9.2", - "@docusaurus/utils-common": "3.9.2", - "@docusaurus/utils-validation": "3.9.2", - "boxen": "^6.2.1", - "chalk": "^4.1.2", - "chokidar": "^3.5.3", - "cli-table3": "^0.6.3", - "combine-promises": "^1.1.0", - "commander": "^5.1.0", - "core-js": "^3.31.1", - "detect-port": "^1.5.1", - "escape-html": "^1.0.3", - "eta": "^2.2.0", - "eval": "^0.1.8", - "execa": "5.1.1", - "fs-extra": "^11.1.1", - "html-tags": "^3.3.1", - "html-webpack-plugin": "^5.6.0", - "leven": "^3.1.0", - "lodash": "^4.17.21", - "open": "^8.4.0", - "p-map": "^4.0.0", - "prompts": "^2.4.2", - "react-helmet-async": "npm:@slorber/react-helmet-async@1.3.0", - "react-loadable": "npm:@docusaurus/react-loadable@6.0.0", - "react-loadable-ssr-addon-v5-slorber": "^1.0.1", - "react-router": "^5.3.4", - "react-router-config": "^5.1.1", - "react-router-dom": "^5.3.4", - "semver": "^7.5.4", - "serve-handler": "^6.1.6", - "tinypool": "^1.0.2", - "tslib": "^2.6.0", - "update-notifier": "^6.0.2", - "webpack": "^5.95.0", - "webpack-bundle-analyzer": "^4.10.2", - "webpack-dev-server": "^5.2.2", - "webpack-merge": "^6.0.1" - }, - "bin": { - "docusaurus": "bin/docusaurus.mjs" - }, - "engines": { - "node": ">=20.0" - }, - "peerDependencies": { - "@mdx-js/react": "^3.0.0", - "react": "^18.0.0 || ^19.0.0", - "react-dom": "^18.0.0 || ^19.0.0" - } - }, - "node_modules/@docusaurus/cssnano-preset": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@docusaurus/cssnano-preset/-/cssnano-preset-3.9.2.tgz", - "integrity": "sha512-8gBKup94aGttRduABsj7bpPFTX7kbwu+xh3K9NMCF5K4bWBqTFYW+REKHF6iBVDHRJ4grZdIPbvkiHd/XNKRMQ==", - "license": "MIT", - "dependencies": { - "cssnano-preset-advanced": "^6.1.2", - "postcss": "^8.5.4", - "postcss-sort-media-queries": "^5.2.0", - "tslib": "^2.6.0" - }, - "engines": { - "node": ">=20.0" - } - }, - "node_modules/@docusaurus/logger": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@docusaurus/logger/-/logger-3.9.2.tgz", - "integrity": "sha512-/SVCc57ByARzGSU60c50rMyQlBuMIJCjcsJlkphxY6B0GV4UH3tcA1994N8fFfbJ9kX3jIBe/xg3XP5qBtGDbA==", - "license": "MIT", - "dependencies": { - "chalk": "^4.1.2", - "tslib": "^2.6.0" - }, - "engines": { - "node": ">=20.0" - } - }, - "node_modules/@docusaurus/mdx-loader": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@docusaurus/mdx-loader/-/mdx-loader-3.9.2.tgz", - "integrity": "sha512-wiYoGwF9gdd6rev62xDU8AAM8JuLI/hlwOtCzMmYcspEkzecKrP8J8X+KpYnTlACBUUtXNJpSoCwFWJhLRevzQ==", - "license": "MIT", - "dependencies": { - "@docusaurus/logger": "3.9.2", - "@docusaurus/utils": "3.9.2", - "@docusaurus/utils-validation": "3.9.2", - "@mdx-js/mdx": "^3.0.0", - "@slorber/remark-comment": "^1.0.0", - "escape-html": "^1.0.3", - "estree-util-value-to-estree": "^3.0.1", - "file-loader": "^6.2.0", - "fs-extra": "^11.1.1", - "image-size": "^2.0.2", - "mdast-util-mdx": "^3.0.0", - "mdast-util-to-string": "^4.0.0", - "rehype-raw": "^7.0.0", - "remark-directive": "^3.0.0", - "remark-emoji": "^4.0.0", - "remark-frontmatter": "^5.0.0", - "remark-gfm": "^4.0.0", - "stringify-object": "^3.3.0", - "tslib": "^2.6.0", - "unified": "^11.0.3", - "unist-util-visit": "^5.0.0", - "url-loader": "^4.1.1", - "vfile": "^6.0.1", - "webpack": "^5.88.1" - }, - "engines": { - "node": ">=20.0" - }, - "peerDependencies": { - "react": "^18.0.0 || ^19.0.0", - "react-dom": "^18.0.0 || ^19.0.0" - } - }, - "node_modules/@docusaurus/module-type-aliases": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-3.9.2.tgz", - "integrity": "sha512-8qVe2QA9hVLzvnxP46ysuofJUIc/yYQ82tvA/rBTrnpXtCjNSFLxEZfd5U8cYZuJIVlkPxamsIgwd5tGZXfvew==", - "license": "MIT", - "dependencies": { - "@docusaurus/types": "3.9.2", - "@types/history": "^4.7.11", - "@types/react": "*", - "@types/react-router-config": "*", - "@types/react-router-dom": "*", - "react-helmet-async": "npm:@slorber/react-helmet-async@1.3.0", - "react-loadable": "npm:@docusaurus/react-loadable@6.0.0" - }, - "peerDependencies": { - "react": "*", - "react-dom": "*" - } - }, - "node_modules/@docusaurus/plugin-content-blog": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-blog/-/plugin-content-blog-3.9.2.tgz", - "integrity": "sha512-3I2HXy3L1QcjLJLGAoTvoBnpOwa6DPUa3Q0dMK19UTY9mhPkKQg/DYhAGTiBUKcTR0f08iw7kLPqOhIgdV3eVQ==", - "license": "MIT", - "dependencies": { - "@docusaurus/core": "3.9.2", - "@docusaurus/logger": "3.9.2", - "@docusaurus/mdx-loader": "3.9.2", - "@docusaurus/theme-common": "3.9.2", - "@docusaurus/types": "3.9.2", - "@docusaurus/utils": "3.9.2", - "@docusaurus/utils-common": "3.9.2", - "@docusaurus/utils-validation": "3.9.2", - "cheerio": "1.0.0-rc.12", - "feed": "^4.2.2", - "fs-extra": "^11.1.1", - "lodash": "^4.17.21", - "schema-dts": "^1.1.2", - "srcset": "^4.0.0", - "tslib": "^2.6.0", - "unist-util-visit": "^5.0.0", - "utility-types": "^3.10.0", - "webpack": "^5.88.1" - }, - "engines": { - "node": ">=20.0" - }, - "peerDependencies": { - "@docusaurus/plugin-content-docs": "*", - "react": "^18.0.0 || ^19.0.0", - "react-dom": "^18.0.0 || ^19.0.0" - } - }, - "node_modules/@docusaurus/plugin-content-docs": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-docs/-/plugin-content-docs-3.9.2.tgz", - "integrity": "sha512-C5wZsGuKTY8jEYsqdxhhFOe1ZDjH0uIYJ9T/jebHwkyxqnr4wW0jTkB72OMqNjsoQRcb0JN3PcSeTwFlVgzCZg==", - "license": "MIT", - "dependencies": { - "@docusaurus/core": "3.9.2", - "@docusaurus/logger": "3.9.2", - "@docusaurus/mdx-loader": "3.9.2", - "@docusaurus/module-type-aliases": "3.9.2", - "@docusaurus/theme-common": "3.9.2", - "@docusaurus/types": "3.9.2", - "@docusaurus/utils": "3.9.2", - "@docusaurus/utils-common": "3.9.2", - "@docusaurus/utils-validation": "3.9.2", - "@types/react-router-config": "^5.0.7", - "combine-promises": "^1.1.0", - "fs-extra": "^11.1.1", - "js-yaml": "^4.1.0", - "lodash": "^4.17.21", - "schema-dts": "^1.1.2", - "tslib": "^2.6.0", - "utility-types": "^3.10.0", - "webpack": "^5.88.1" - }, - "engines": { - "node": ">=20.0" - }, - "peerDependencies": { - "react": "^18.0.0 || ^19.0.0", - "react-dom": "^18.0.0 || ^19.0.0" - } - }, - "node_modules/@docusaurus/plugin-content-pages": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-pages/-/plugin-content-pages-3.9.2.tgz", - "integrity": "sha512-s4849w/p4noXUrGpPUF0BPqIAfdAe76BLaRGAGKZ1gTDNiGxGcpsLcwJ9OTi1/V8A+AzvsmI9pkjie2zjIQZKA==", - "license": "MIT", - "dependencies": { - "@docusaurus/core": "3.9.2", - "@docusaurus/mdx-loader": "3.9.2", - "@docusaurus/types": "3.9.2", - "@docusaurus/utils": "3.9.2", - "@docusaurus/utils-validation": "3.9.2", - "fs-extra": "^11.1.1", - "tslib": "^2.6.0", - "webpack": "^5.88.1" - }, - "engines": { - "node": ">=20.0" - }, - "peerDependencies": { - "react": "^18.0.0 || ^19.0.0", - "react-dom": "^18.0.0 || ^19.0.0" - } - }, - "node_modules/@docusaurus/plugin-css-cascade-layers": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@docusaurus/plugin-css-cascade-layers/-/plugin-css-cascade-layers-3.9.2.tgz", - "integrity": "sha512-w1s3+Ss+eOQbscGM4cfIFBlVg/QKxyYgj26k5AnakuHkKxH6004ZtuLe5awMBotIYF2bbGDoDhpgQ4r/kcj4rQ==", - "license": "MIT", - "dependencies": { - "@docusaurus/core": "3.9.2", - "@docusaurus/types": "3.9.2", - "@docusaurus/utils": "3.9.2", - "@docusaurus/utils-validation": "3.9.2", - "tslib": "^2.6.0" - }, - "engines": { - "node": ">=20.0" - } - }, - "node_modules/@docusaurus/plugin-debug": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@docusaurus/plugin-debug/-/plugin-debug-3.9.2.tgz", - "integrity": "sha512-j7a5hWuAFxyQAkilZwhsQ/b3T7FfHZ+0dub6j/GxKNFJp2h9qk/P1Bp7vrGASnvA9KNQBBL1ZXTe7jlh4VdPdA==", - "license": "MIT", - "dependencies": { - "@docusaurus/core": "3.9.2", - "@docusaurus/types": "3.9.2", - "@docusaurus/utils": "3.9.2", - "fs-extra": "^11.1.1", - "react-json-view-lite": "^2.3.0", - "tslib": "^2.6.0" - }, - "engines": { - "node": ">=20.0" - }, - "peerDependencies": { - "react": "^18.0.0 || ^19.0.0", - "react-dom": "^18.0.0 || ^19.0.0" - } - }, - "node_modules/@docusaurus/plugin-google-analytics": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-3.9.2.tgz", - "integrity": "sha512-mAwwQJ1Us9jL/lVjXtErXto4p4/iaLlweC54yDUK1a97WfkC6Z2k5/769JsFgwOwOP+n5mUQGACXOEQ0XDuVUw==", - "license": "MIT", - "dependencies": { - "@docusaurus/core": "3.9.2", - "@docusaurus/types": "3.9.2", - "@docusaurus/utils-validation": "3.9.2", - "tslib": "^2.6.0" - }, - "engines": { - "node": ">=20.0" - }, - "peerDependencies": { - "react": "^18.0.0 || ^19.0.0", - "react-dom": "^18.0.0 || ^19.0.0" - } - }, - "node_modules/@docusaurus/plugin-google-gtag": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-3.9.2.tgz", - "integrity": "sha512-YJ4lDCphabBtw19ooSlc1MnxtYGpjFV9rEdzjLsUnBCeis2djUyCozZaFhCg6NGEwOn7HDDyMh0yzcdRpnuIvA==", - "license": "MIT", - "dependencies": { - "@docusaurus/core": "3.9.2", - "@docusaurus/types": "3.9.2", - "@docusaurus/utils-validation": "3.9.2", - "@types/gtag.js": "^0.0.12", - "tslib": "^2.6.0" - }, - "engines": { - "node": ">=20.0" - }, - "peerDependencies": { - "react": "^18.0.0 || ^19.0.0", - "react-dom": "^18.0.0 || ^19.0.0" - } - }, - "node_modules/@docusaurus/plugin-google-tag-manager": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-tag-manager/-/plugin-google-tag-manager-3.9.2.tgz", - "integrity": "sha512-LJtIrkZN/tuHD8NqDAW1Tnw0ekOwRTfobWPsdO15YxcicBo2ykKF0/D6n0vVBfd3srwr9Z6rzrIWYrMzBGrvNw==", - "license": "MIT", - "dependencies": { - "@docusaurus/core": "3.9.2", - "@docusaurus/types": "3.9.2", - "@docusaurus/utils-validation": "3.9.2", - "tslib": "^2.6.0" - }, - "engines": { - "node": ">=20.0" - }, - "peerDependencies": { - "react": "^18.0.0 || ^19.0.0", - "react-dom": "^18.0.0 || ^19.0.0" - } - }, - "node_modules/@docusaurus/plugin-sitemap": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@docusaurus/plugin-sitemap/-/plugin-sitemap-3.9.2.tgz", - "integrity": "sha512-WLh7ymgDXjG8oPoM/T4/zUP7KcSuFYRZAUTl8vR6VzYkfc18GBM4xLhcT+AKOwun6kBivYKUJf+vlqYJkm+RHw==", - "license": "MIT", - "dependencies": { - "@docusaurus/core": "3.9.2", - "@docusaurus/logger": "3.9.2", - "@docusaurus/types": "3.9.2", - "@docusaurus/utils": "3.9.2", - "@docusaurus/utils-common": "3.9.2", - "@docusaurus/utils-validation": "3.9.2", - "fs-extra": "^11.1.1", - "sitemap": "^7.1.1", - "tslib": "^2.6.0" - }, - "engines": { - "node": ">=20.0" - }, - "peerDependencies": { - "react": "^18.0.0 || ^19.0.0", - "react-dom": "^18.0.0 || ^19.0.0" - } - }, - "node_modules/@docusaurus/plugin-svgr": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@docusaurus/plugin-svgr/-/plugin-svgr-3.9.2.tgz", - "integrity": "sha512-n+1DE+5b3Lnf27TgVU5jM1d4x5tUh2oW5LTsBxJX4PsAPV0JGcmI6p3yLYtEY0LRVEIJh+8RsdQmRE66wSV8mw==", - "license": "MIT", - "dependencies": { - "@docusaurus/core": "3.9.2", - "@docusaurus/types": "3.9.2", - "@docusaurus/utils": "3.9.2", - "@docusaurus/utils-validation": "3.9.2", - "@svgr/core": "8.1.0", - "@svgr/webpack": "^8.1.0", - "tslib": "^2.6.0", - "webpack": "^5.88.1" - }, - "engines": { - "node": ">=20.0" - }, - "peerDependencies": { - "react": "^18.0.0 || ^19.0.0", - "react-dom": "^18.0.0 || ^19.0.0" - } - }, - "node_modules/@docusaurus/preset-classic": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@docusaurus/preset-classic/-/preset-classic-3.9.2.tgz", - "integrity": "sha512-IgyYO2Gvaigi21LuDIe+nvmN/dfGXAiMcV/murFqcpjnZc7jxFAxW+9LEjdPt61uZLxG4ByW/oUmX/DDK9t/8w==", - "license": "MIT", - "dependencies": { - "@docusaurus/core": "3.9.2", - "@docusaurus/plugin-content-blog": "3.9.2", - "@docusaurus/plugin-content-docs": "3.9.2", - "@docusaurus/plugin-content-pages": "3.9.2", - "@docusaurus/plugin-css-cascade-layers": "3.9.2", - "@docusaurus/plugin-debug": "3.9.2", - "@docusaurus/plugin-google-analytics": "3.9.2", - "@docusaurus/plugin-google-gtag": "3.9.2", - "@docusaurus/plugin-google-tag-manager": "3.9.2", - "@docusaurus/plugin-sitemap": "3.9.2", - "@docusaurus/plugin-svgr": "3.9.2", - "@docusaurus/theme-classic": "3.9.2", - "@docusaurus/theme-common": "3.9.2", - "@docusaurus/theme-search-algolia": "3.9.2", - "@docusaurus/types": "3.9.2" - }, - "engines": { - "node": ">=20.0" - }, - "peerDependencies": { - "react": "^18.0.0 || ^19.0.0", - "react-dom": "^18.0.0 || ^19.0.0" - } - }, - "node_modules/@docusaurus/theme-classic": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@docusaurus/theme-classic/-/theme-classic-3.9.2.tgz", - "integrity": "sha512-IGUsArG5hhekXd7RDb11v94ycpJpFdJPkLnt10fFQWOVxAtq5/D7hT6lzc2fhyQKaaCE62qVajOMKL7OiAFAIA==", - "license": "MIT", - "dependencies": { - "@docusaurus/core": "3.9.2", - "@docusaurus/logger": "3.9.2", - "@docusaurus/mdx-loader": "3.9.2", - "@docusaurus/module-type-aliases": "3.9.2", - "@docusaurus/plugin-content-blog": "3.9.2", - "@docusaurus/plugin-content-docs": "3.9.2", - "@docusaurus/plugin-content-pages": "3.9.2", - "@docusaurus/theme-common": "3.9.2", - "@docusaurus/theme-translations": "3.9.2", - "@docusaurus/types": "3.9.2", - "@docusaurus/utils": "3.9.2", - "@docusaurus/utils-common": "3.9.2", - "@docusaurus/utils-validation": "3.9.2", - "@mdx-js/react": "^3.0.0", - "clsx": "^2.0.0", - "infima": "0.2.0-alpha.45", - "lodash": "^4.17.21", - "nprogress": "^0.2.0", - "postcss": "^8.5.4", - "prism-react-renderer": "^2.3.0", - "prismjs": "^1.29.0", - "react-router-dom": "^5.3.4", - "rtlcss": "^4.1.0", - "tslib": "^2.6.0", - "utility-types": "^3.10.0" - }, - "engines": { - "node": ">=20.0" - }, - "peerDependencies": { - "react": "^18.0.0 || ^19.0.0", - "react-dom": "^18.0.0 || ^19.0.0" - } - }, - "node_modules/@docusaurus/theme-common": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@docusaurus/theme-common/-/theme-common-3.9.2.tgz", - "integrity": "sha512-6c4DAbR6n6nPbnZhY2V3tzpnKnGL+6aOsLvFL26VRqhlczli9eWG0VDUNoCQEPnGwDMhPS42UhSAnz5pThm5Ag==", - "license": "MIT", - "dependencies": { - "@docusaurus/mdx-loader": "3.9.2", - "@docusaurus/module-type-aliases": "3.9.2", - "@docusaurus/utils": "3.9.2", - "@docusaurus/utils-common": "3.9.2", - "@types/history": "^4.7.11", - "@types/react": "*", - "@types/react-router-config": "*", - "clsx": "^2.0.0", - "parse-numeric-range": "^1.3.0", - "prism-react-renderer": "^2.3.0", - "tslib": "^2.6.0", - "utility-types": "^3.10.0" - }, - "engines": { - "node": ">=20.0" - }, - "peerDependencies": { - "@docusaurus/plugin-content-docs": "*", - "react": "^18.0.0 || ^19.0.0", - "react-dom": "^18.0.0 || ^19.0.0" - } - }, - "node_modules/@docusaurus/theme-mermaid": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@docusaurus/theme-mermaid/-/theme-mermaid-3.9.2.tgz", - "integrity": "sha512-5vhShRDq/ntLzdInsQkTdoKWSzw8d1jB17sNPYhA/KvYYFXfuVEGHLM6nrf8MFbV8TruAHDG21Fn3W4lO8GaDw==", - "license": "MIT", - "dependencies": { - "@docusaurus/core": "3.9.2", - "@docusaurus/module-type-aliases": "3.9.2", - "@docusaurus/theme-common": "3.9.2", - "@docusaurus/types": "3.9.2", - "@docusaurus/utils-validation": "3.9.2", - "mermaid": ">=11.6.0", - "tslib": "^2.6.0" - }, - "engines": { - "node": ">=20.0" - }, - "peerDependencies": { - "@mermaid-js/layout-elk": "^0.1.9", - "react": "^18.0.0 || ^19.0.0", - "react-dom": "^18.0.0 || ^19.0.0" - }, - "peerDependenciesMeta": { - "@mermaid-js/layout-elk": { - "optional": true - } - } - }, - "node_modules/@docusaurus/theme-search-algolia": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@docusaurus/theme-search-algolia/-/theme-search-algolia-3.9.2.tgz", - "integrity": "sha512-GBDSFNwjnh5/LdkxCKQHkgO2pIMX1447BxYUBG2wBiajS21uj64a+gH/qlbQjDLxmGrbrllBrtJkUHxIsiwRnw==", - "license": "MIT", - "dependencies": { - "@docsearch/react": "^3.9.0 || ^4.1.0", - "@docusaurus/core": "3.9.2", - "@docusaurus/logger": "3.9.2", - "@docusaurus/plugin-content-docs": "3.9.2", - "@docusaurus/theme-common": "3.9.2", - "@docusaurus/theme-translations": "3.9.2", - "@docusaurus/utils": "3.9.2", - "@docusaurus/utils-validation": "3.9.2", - "algoliasearch": "^5.37.0", - "algoliasearch-helper": "^3.26.0", - "clsx": "^2.0.0", - "eta": "^2.2.0", - "fs-extra": "^11.1.1", - "lodash": "^4.17.21", - "tslib": "^2.6.0", - "utility-types": "^3.10.0" - }, - "engines": { - "node": ">=20.0" - }, - "peerDependencies": { - "react": "^18.0.0 || ^19.0.0", - "react-dom": "^18.0.0 || ^19.0.0" - } - }, - "node_modules/@docusaurus/theme-translations": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@docusaurus/theme-translations/-/theme-translations-3.9.2.tgz", - "integrity": "sha512-vIryvpP18ON9T9rjgMRFLr2xJVDpw1rtagEGf8Ccce4CkTrvM/fRB8N2nyWYOW5u3DdjkwKw5fBa+3tbn9P4PA==", - "license": "MIT", - "dependencies": { - "fs-extra": "^11.1.1", - "tslib": "^2.6.0" - }, - "engines": { - "node": ">=20.0" - } - }, - "node_modules/@docusaurus/tsconfig": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@docusaurus/tsconfig/-/tsconfig-3.9.2.tgz", - "integrity": "sha512-j6/Fp4Rlpxsc632cnRnl5HpOWeb6ZKssDj6/XzzAzVGXXfm9Eptx3rxCC+fDzySn9fHTS+CWJjPineCR1bB5WQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/@docusaurus/types": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@docusaurus/types/-/types-3.9.2.tgz", - "integrity": "sha512-Ux1JUNswg+EfUEmajJjyhIohKceitY/yzjRUpu04WXgvVz+fbhVC0p+R0JhvEu4ytw8zIAys2hrdpQPBHRIa8Q==", - "license": "MIT", - "dependencies": { - "@mdx-js/mdx": "^3.0.0", - "@types/history": "^4.7.11", - "@types/mdast": "^4.0.2", - "@types/react": "*", - "commander": "^5.1.0", - "joi": "^17.9.2", - "react-helmet-async": "npm:@slorber/react-helmet-async@1.3.0", - "utility-types": "^3.10.0", - "webpack": "^5.95.0", - "webpack-merge": "^5.9.0" - }, - "peerDependencies": { - "react": "^18.0.0 || ^19.0.0", - "react-dom": "^18.0.0 || ^19.0.0" - } - }, - "node_modules/@docusaurus/types/node_modules/webpack-merge": { - "version": "5.10.0", - "resolved": "https://registry.npmjs.org/webpack-merge/-/webpack-merge-5.10.0.tgz", - "integrity": "sha512-+4zXKdx7UnO+1jaN4l2lHVD+mFvnlZQP/6ljaJVb4SZiwIKeUnrT5l0gkT8z+n4hKpC+jpOv6O9R+gLtag7pSA==", - "license": "MIT", - "dependencies": { - "clone-deep": "^4.0.1", - "flat": "^5.0.2", - "wildcard": "^2.0.0" - }, - "engines": { - "node": ">=10.0.0" - } - }, - "node_modules/@docusaurus/utils": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@docusaurus/utils/-/utils-3.9.2.tgz", - "integrity": "sha512-lBSBiRruFurFKXr5Hbsl2thmGweAPmddhF3jb99U4EMDA5L+e5Y1rAkOS07Nvrup7HUMBDrCV45meaxZnt28nQ==", - "license": "MIT", - "dependencies": { - "@docusaurus/logger": "3.9.2", - "@docusaurus/types": "3.9.2", - "@docusaurus/utils-common": "3.9.2", - "escape-string-regexp": "^4.0.0", - "execa": "5.1.1", - "file-loader": "^6.2.0", - "fs-extra": "^11.1.1", - "github-slugger": "^1.5.0", - "globby": "^11.1.0", - "gray-matter": "^4.0.3", - "jiti": "^1.20.0", - "js-yaml": "^4.1.0", - "lodash": "^4.17.21", - "micromatch": "^4.0.5", - "p-queue": "^6.6.2", - "prompts": "^2.4.2", - "resolve-pathname": "^3.0.0", - "tslib": "^2.6.0", - "url-loader": "^4.1.1", - "utility-types": "^3.10.0", - "webpack": "^5.88.1" - }, - "engines": { - "node": ">=20.0" - } - }, - "node_modules/@docusaurus/utils-common": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@docusaurus/utils-common/-/utils-common-3.9.2.tgz", - "integrity": "sha512-I53UC1QctruA6SWLvbjbhCpAw7+X7PePoe5pYcwTOEXD/PxeP8LnECAhTHHwWCblyUX5bMi4QLRkxvyZ+IT8Aw==", - "license": "MIT", - "dependencies": { - "@docusaurus/types": "3.9.2", - "tslib": "^2.6.0" - }, - "engines": { - "node": ">=20.0" - } - }, - "node_modules/@docusaurus/utils-validation": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@docusaurus/utils-validation/-/utils-validation-3.9.2.tgz", - "integrity": "sha512-l7yk3X5VnNmATbwijJkexdhulNsQaNDwoagiwujXoxFbWLcxHQqNQ+c/IAlzrfMMOfa/8xSBZ7KEKDesE/2J7A==", - "license": "MIT", - "dependencies": { - "@docusaurus/logger": "3.9.2", - "@docusaurus/utils": "3.9.2", - "@docusaurus/utils-common": "3.9.2", - "fs-extra": "^11.2.0", - "joi": "^17.9.2", - "js-yaml": "^4.1.0", - "lodash": "^4.17.21", - "tslib": "^2.6.0" - }, - "engines": { - "node": ">=20.0" - } - }, - "node_modules/@hapi/hoek": { - "version": "9.3.0", - "resolved": "https://registry.npmjs.org/@hapi/hoek/-/hoek-9.3.0.tgz", - "integrity": "sha512-/c6rf4UJlmHlC9b5BaNvzAcFv7HZ2QHaV0D4/HNlBdvFnvQq8RI4kYdhyPCl7Xj+oWvTWQ8ujhqS53LIgAe6KQ==", - "license": "BSD-3-Clause" - }, - "node_modules/@hapi/topo": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/@hapi/topo/-/topo-5.1.0.tgz", - "integrity": "sha512-foQZKJig7Ob0BMAYBfcJk8d77QtOe7Wo4ox7ff1lQYoNNAb6jwcY1ncdoy2e9wQZzvNy7ODZCYJkK8kzmcAnAg==", - "license": "BSD-3-Clause", - "dependencies": { - "@hapi/hoek": "^9.0.0" - } - }, - "node_modules/@iconify/types": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@iconify/types/-/types-2.0.0.tgz", - "integrity": "sha512-+wluvCrRhXrhyOmRDJ3q8mux9JkKy5SJ/v8ol2tu4FVjyYvtEzkc/3pK15ET6RKg4b4w4BmTk1+gsCUhf21Ykg==", - "license": "MIT" - }, - "node_modules/@iconify/utils": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/@iconify/utils/-/utils-3.1.0.tgz", - "integrity": "sha512-Zlzem1ZXhI1iHeeERabLNzBHdOa4VhQbqAcOQaMKuTuyZCpwKbC2R4Dd0Zo3g9EAc+Y4fiarO8HIHRAth7+skw==", - "license": "MIT", - "dependencies": { - "@antfu/install-pkg": "^1.1.0", - "@iconify/types": "^2.0.0", - "mlly": "^1.8.0" - } - }, - "node_modules/@jest/schemas": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz", - "integrity": "sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==", - "license": "MIT", - "dependencies": { - "@sinclair/typebox": "^0.27.8" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jest/types": { - "version": "29.6.3", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-29.6.3.tgz", - "integrity": "sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw==", - "license": "MIT", - "dependencies": { - "@jest/schemas": "^29.6.3", - "@types/istanbul-lib-coverage": "^2.0.0", - "@types/istanbul-reports": "^3.0.0", - "@types/node": "*", - "@types/yargs": "^17.0.8", - "chalk": "^4.0.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/@jridgewell/gen-mapping": { - "version": "0.3.13", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", - "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", - "license": "MIT", - "dependencies": { - "@jridgewell/sourcemap-codec": "^1.5.0", - "@jridgewell/trace-mapping": "^0.3.24" - } - }, - "node_modules/@jridgewell/remapping": { - "version": "2.3.5", - "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz", - "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", - "license": "MIT", - "dependencies": { - "@jridgewell/gen-mapping": "^0.3.5", - "@jridgewell/trace-mapping": "^0.3.24" - } - }, - "node_modules/@jridgewell/resolve-uri": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", - "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", - "license": "MIT", - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/source-map": { - "version": "0.3.11", - "resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.11.tgz", - "integrity": "sha512-ZMp1V8ZFcPG5dIWnQLr3NSI1MiCU7UETdS/A0G8V/XWHvJv3ZsFqutJn1Y5RPmAPX6F3BiE397OqveU/9NCuIA==", - "license": "MIT", - "dependencies": { - "@jridgewell/gen-mapping": "^0.3.5", - "@jridgewell/trace-mapping": "^0.3.25" - } - }, - "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.5.5", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", - "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", - "license": "MIT" - }, - "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.31", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", - "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", - "license": "MIT", - "dependencies": { - "@jridgewell/resolve-uri": "^3.1.0", - "@jridgewell/sourcemap-codec": "^1.4.14" - } - }, - "node_modules/@jsonjoy.com/base64": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@jsonjoy.com/base64/-/base64-1.1.2.tgz", - "integrity": "sha512-q6XAnWQDIMA3+FTiOYajoYqySkO+JSat0ytXGSuRdq9uXE7o92gzuQwQM14xaCRlBLGq3v5miDGC4vkVTn54xA==", - "license": "Apache-2.0", - "engines": { - "node": ">=10.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/streamich" - }, - "peerDependencies": { - "tslib": "2" - } - }, - "node_modules/@jsonjoy.com/buffers": { - "version": "17.67.0", - "resolved": "https://registry.npmjs.org/@jsonjoy.com/buffers/-/buffers-17.67.0.tgz", - "integrity": "sha512-tfExRpYxBvi32vPs9ZHaTjSP4fHAfzSmcahOfNxtvGHcyJel+aibkPlGeBB+7AoC6hL7lXIE++8okecBxx7lcw==", - "license": "Apache-2.0", - "engines": { - "node": ">=10.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/streamich" - }, - "peerDependencies": { - "tslib": "2" - } - }, - "node_modules/@jsonjoy.com/codegen": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@jsonjoy.com/codegen/-/codegen-1.0.0.tgz", - "integrity": "sha512-E8Oy+08cmCf0EK/NMxpaJZmOxPqM+6iSe2S4nlSBrPZOORoDJILxtbSUEDKQyTamm/BVAhIGllOBNU79/dwf0g==", - "license": "Apache-2.0", - "engines": { - "node": ">=10.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/streamich" - }, - "peerDependencies": { - "tslib": "2" - } - }, - "node_modules/@jsonjoy.com/fs-core": { - "version": "4.56.10", - "resolved": "https://registry.npmjs.org/@jsonjoy.com/fs-core/-/fs-core-4.56.10.tgz", - "integrity": "sha512-PyAEA/3cnHhsGcdY+AmIU+ZPqTuZkDhCXQ2wkXypdLitSpd6d5Ivxhnq4wa2ETRWFVJGabYynBWxIijOswSmOw==", - "license": "Apache-2.0", - "dependencies": { - "@jsonjoy.com/fs-node-builtins": "4.56.10", - "@jsonjoy.com/fs-node-utils": "4.56.10", - "thingies": "^2.5.0" - }, - "engines": { - "node": ">=10.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/streamich" - }, - "peerDependencies": { - "tslib": "2" - } - }, - "node_modules/@jsonjoy.com/fs-fsa": { - "version": "4.56.10", - "resolved": "https://registry.npmjs.org/@jsonjoy.com/fs-fsa/-/fs-fsa-4.56.10.tgz", - "integrity": "sha512-/FVK63ysNzTPOnCCcPoPHt77TOmachdMS422txM4KhxddLdbW1fIbFMYH0AM0ow/YchCyS5gqEjKLNyv71j/5Q==", - "license": "Apache-2.0", - "dependencies": { - "@jsonjoy.com/fs-core": "4.56.10", - "@jsonjoy.com/fs-node-builtins": "4.56.10", - "@jsonjoy.com/fs-node-utils": "4.56.10", - "thingies": "^2.5.0" - }, - "engines": { - "node": ">=10.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/streamich" - }, - "peerDependencies": { - "tslib": "2" - } - }, - "node_modules/@jsonjoy.com/fs-node": { - "version": "4.56.10", - "resolved": "https://registry.npmjs.org/@jsonjoy.com/fs-node/-/fs-node-4.56.10.tgz", - "integrity": "sha512-7R4Gv3tkUdW3dXfXiOkqxkElxKNVdd8BDOWC0/dbERd0pXpPY+s2s1Mino+aTvkGrFPiY+mmVxA7zhskm4Ue4Q==", - "license": "Apache-2.0", - "dependencies": { - "@jsonjoy.com/fs-core": "4.56.10", - "@jsonjoy.com/fs-node-builtins": "4.56.10", - "@jsonjoy.com/fs-node-utils": "4.56.10", - "@jsonjoy.com/fs-print": "4.56.10", - "@jsonjoy.com/fs-snapshot": "4.56.10", - "glob-to-regex.js": "^1.0.0", - "thingies": "^2.5.0" - }, - "engines": { - "node": ">=10.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/streamich" - }, - "peerDependencies": { - "tslib": "2" - } - }, - "node_modules/@jsonjoy.com/fs-node-builtins": { - "version": "4.56.10", - "resolved": "https://registry.npmjs.org/@jsonjoy.com/fs-node-builtins/-/fs-node-builtins-4.56.10.tgz", - "integrity": "sha512-uUnKz8R0YJyKq5jXpZtkGV9U0pJDt8hmYcLRrPjROheIfjMXsz82kXMgAA/qNg0wrZ1Kv+hrg7azqEZx6XZCVw==", - "license": "Apache-2.0", - "engines": { - "node": ">=10.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/streamich" - }, - "peerDependencies": { - "tslib": "2" - } - }, - "node_modules/@jsonjoy.com/fs-node-to-fsa": { - "version": "4.56.10", - "resolved": "https://registry.npmjs.org/@jsonjoy.com/fs-node-to-fsa/-/fs-node-to-fsa-4.56.10.tgz", - "integrity": "sha512-oH+O6Y4lhn9NyG6aEoFwIBNKZeYy66toP5LJcDOMBgL99BKQMUf/zWJspdRhMdn/3hbzQsZ8EHHsuekbFLGUWw==", - "license": "Apache-2.0", - "dependencies": { - "@jsonjoy.com/fs-fsa": "4.56.10", - "@jsonjoy.com/fs-node-builtins": "4.56.10", - "@jsonjoy.com/fs-node-utils": "4.56.10" - }, - "engines": { - "node": ">=10.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/streamich" - }, - "peerDependencies": { - "tslib": "2" - } - }, - "node_modules/@jsonjoy.com/fs-node-utils": { - "version": "4.56.10", - "resolved": "https://registry.npmjs.org/@jsonjoy.com/fs-node-utils/-/fs-node-utils-4.56.10.tgz", - "integrity": "sha512-8EuPBgVI2aDPwFdaNQeNpHsyqPi3rr+85tMNG/lHvQLiVjzoZsvxA//Xd8aB567LUhy4QS03ptT+unkD/DIsNg==", - "license": "Apache-2.0", - "dependencies": { - "@jsonjoy.com/fs-node-builtins": "4.56.10" - }, - "engines": { - "node": ">=10.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/streamich" - }, - "peerDependencies": { - "tslib": "2" - } - }, - "node_modules/@jsonjoy.com/fs-print": { - "version": "4.56.10", - "resolved": "https://registry.npmjs.org/@jsonjoy.com/fs-print/-/fs-print-4.56.10.tgz", - "integrity": "sha512-JW4fp5mAYepzFsSGrQ48ep8FXxpg4niFWHdF78wDrFGof7F3tKDJln72QFDEn/27M1yHd4v7sKHHVPh78aWcEw==", - "license": "Apache-2.0", - "dependencies": { - "@jsonjoy.com/fs-node-utils": "4.56.10", - "tree-dump": "^1.1.0" - }, - "engines": { - "node": ">=10.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/streamich" - }, - "peerDependencies": { - "tslib": "2" - } - }, - "node_modules/@jsonjoy.com/fs-snapshot": { - "version": "4.56.10", - "resolved": "https://registry.npmjs.org/@jsonjoy.com/fs-snapshot/-/fs-snapshot-4.56.10.tgz", - "integrity": "sha512-DkR6l5fj7+qj0+fVKm/OOXMGfDFCGXLfyHkORH3DF8hxkpDgIHbhf/DwncBMs2igu/ST7OEkexn1gIqoU6Y+9g==", - "license": "Apache-2.0", - "dependencies": { - "@jsonjoy.com/buffers": "^17.65.0", - "@jsonjoy.com/fs-node-utils": "4.56.10", - "@jsonjoy.com/json-pack": "^17.65.0", - "@jsonjoy.com/util": "^17.65.0" - }, - "engines": { - "node": ">=10.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/streamich" - }, - "peerDependencies": { - "tslib": "2" - } - }, - "node_modules/@jsonjoy.com/fs-snapshot/node_modules/@jsonjoy.com/base64": { - "version": "17.67.0", - "resolved": "https://registry.npmjs.org/@jsonjoy.com/base64/-/base64-17.67.0.tgz", - "integrity": "sha512-5SEsJGsm15aP8TQGkDfJvz9axgPwAEm98S5DxOuYe8e1EbfajcDmgeXXzccEjh+mLnjqEKrkBdjHWS5vFNwDdw==", - "license": "Apache-2.0", - "engines": { - "node": ">=10.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/streamich" - }, - "peerDependencies": { - "tslib": "2" - } - }, - "node_modules/@jsonjoy.com/fs-snapshot/node_modules/@jsonjoy.com/codegen": { - "version": "17.67.0", - "resolved": "https://registry.npmjs.org/@jsonjoy.com/codegen/-/codegen-17.67.0.tgz", - "integrity": "sha512-idnkUplROpdBOV0HMcwhsCUS5TRUi9poagdGs70A6S4ux9+/aPuKbh8+UYRTLYQHtXvAdNfQWXDqZEx5k4Dj2Q==", - "license": "Apache-2.0", - "engines": { - "node": ">=10.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/streamich" - }, - "peerDependencies": { - "tslib": "2" - } - }, - "node_modules/@jsonjoy.com/fs-snapshot/node_modules/@jsonjoy.com/json-pack": { - "version": "17.67.0", - "resolved": "https://registry.npmjs.org/@jsonjoy.com/json-pack/-/json-pack-17.67.0.tgz", - "integrity": "sha512-t0ejURcGaZsn1ClbJ/3kFqSOjlryd92eQY465IYrezsXmPcfHPE/av4twRSxf6WE+TkZgLY+71vCZbiIiFKA/w==", - "license": "Apache-2.0", - "dependencies": { - "@jsonjoy.com/base64": "17.67.0", - "@jsonjoy.com/buffers": "17.67.0", - "@jsonjoy.com/codegen": "17.67.0", - "@jsonjoy.com/json-pointer": "17.67.0", - "@jsonjoy.com/util": "17.67.0", - "hyperdyperid": "^1.2.0", - "thingies": "^2.5.0", - "tree-dump": "^1.1.0" - }, - "engines": { - "node": ">=10.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/streamich" - }, - "peerDependencies": { - "tslib": "2" - } - }, - "node_modules/@jsonjoy.com/fs-snapshot/node_modules/@jsonjoy.com/json-pointer": { - "version": "17.67.0", - "resolved": "https://registry.npmjs.org/@jsonjoy.com/json-pointer/-/json-pointer-17.67.0.tgz", - "integrity": "sha512-+iqOFInH+QZGmSuaybBUNdh7yvNrXvqR+h3wjXm0N/3JK1EyyFAeGJvqnmQL61d1ARLlk/wJdFKSL+LHJ1eaUA==", - "license": "Apache-2.0", - "dependencies": { - "@jsonjoy.com/util": "17.67.0" - }, - "engines": { - "node": ">=10.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/streamich" - }, - "peerDependencies": { - "tslib": "2" - } - }, - "node_modules/@jsonjoy.com/fs-snapshot/node_modules/@jsonjoy.com/util": { - "version": "17.67.0", - "resolved": "https://registry.npmjs.org/@jsonjoy.com/util/-/util-17.67.0.tgz", - "integrity": "sha512-6+8xBaz1rLSohlGh68D1pdw3AwDi9xydm8QNlAFkvnavCJYSze+pxoW2VKP8p308jtlMRLs5NTHfPlZLd4w7ew==", - "license": "Apache-2.0", - "dependencies": { - "@jsonjoy.com/buffers": "17.67.0", - "@jsonjoy.com/codegen": "17.67.0" - }, - "engines": { - "node": ">=10.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/streamich" - }, - "peerDependencies": { - "tslib": "2" - } - }, - "node_modules/@jsonjoy.com/json-pack": { - "version": "1.21.0", - "resolved": "https://registry.npmjs.org/@jsonjoy.com/json-pack/-/json-pack-1.21.0.tgz", - "integrity": "sha512-+AKG+R2cfZMShzrF2uQw34v3zbeDYUqnQ+jg7ORic3BGtfw9p/+N6RJbq/kkV8JmYZaINknaEQ2m0/f693ZPpg==", - "license": "Apache-2.0", - "dependencies": { - "@jsonjoy.com/base64": "^1.1.2", - "@jsonjoy.com/buffers": "^1.2.0", - "@jsonjoy.com/codegen": "^1.0.0", - "@jsonjoy.com/json-pointer": "^1.0.2", - "@jsonjoy.com/util": "^1.9.0", - "hyperdyperid": "^1.2.0", - "thingies": "^2.5.0", - "tree-dump": "^1.1.0" - }, - "engines": { - "node": ">=10.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/streamich" - }, - "peerDependencies": { - "tslib": "2" - } - }, - "node_modules/@jsonjoy.com/json-pack/node_modules/@jsonjoy.com/buffers": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@jsonjoy.com/buffers/-/buffers-1.2.1.tgz", - "integrity": "sha512-12cdlDwX4RUM3QxmUbVJWqZ/mrK6dFQH4Zxq6+r1YXKXYBNgZXndx2qbCJwh3+WWkCSn67IjnlG3XYTvmvYtgA==", - "license": "Apache-2.0", - "engines": { - "node": ">=10.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/streamich" - }, - "peerDependencies": { - "tslib": "2" - } - }, - "node_modules/@jsonjoy.com/json-pointer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@jsonjoy.com/json-pointer/-/json-pointer-1.0.2.tgz", - "integrity": "sha512-Fsn6wM2zlDzY1U+v4Nc8bo3bVqgfNTGcn6dMgs6FjrEnt4ZCe60o6ByKRjOGlI2gow0aE/Q41QOigdTqkyK5fg==", - "license": "Apache-2.0", - "dependencies": { - "@jsonjoy.com/codegen": "^1.0.0", - "@jsonjoy.com/util": "^1.9.0" - }, - "engines": { - "node": ">=10.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/streamich" - }, - "peerDependencies": { - "tslib": "2" - } - }, - "node_modules/@jsonjoy.com/util": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/@jsonjoy.com/util/-/util-1.9.0.tgz", - "integrity": "sha512-pLuQo+VPRnN8hfPqUTLTHk126wuYdXVxE6aDmjSeV4NCAgyxWbiOIeNJVtID3h1Vzpoi9m4jXezf73I6LgabgQ==", - "license": "Apache-2.0", - "dependencies": { - "@jsonjoy.com/buffers": "^1.0.0", - "@jsonjoy.com/codegen": "^1.0.0" - }, - "engines": { - "node": ">=10.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/streamich" - }, - "peerDependencies": { - "tslib": "2" - } - }, - "node_modules/@jsonjoy.com/util/node_modules/@jsonjoy.com/buffers": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@jsonjoy.com/buffers/-/buffers-1.2.1.tgz", - "integrity": "sha512-12cdlDwX4RUM3QxmUbVJWqZ/mrK6dFQH4Zxq6+r1YXKXYBNgZXndx2qbCJwh3+WWkCSn67IjnlG3XYTvmvYtgA==", - "license": "Apache-2.0", - "engines": { - "node": ">=10.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/streamich" - }, - "peerDependencies": { - "tslib": "2" - } - }, - "node_modules/@leichtgewicht/ip-codec": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@leichtgewicht/ip-codec/-/ip-codec-2.0.5.tgz", - "integrity": "sha512-Vo+PSpZG2/fmgmiNzYK9qWRh8h/CHrwD0mo1h1DzL4yzHNSfWYujGTYsWGreD000gcgmZ7K4Ys6Tx9TxtsKdDw==", - "license": "MIT" - }, - "node_modules/@mdx-js/mdx": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@mdx-js/mdx/-/mdx-3.1.1.tgz", - "integrity": "sha512-f6ZO2ifpwAQIpzGWaBQT2TXxPv6z3RBzQKpVftEWN78Vl/YweF1uwussDx8ECAXVtr3Rs89fKyG9YlzUs9DyGQ==", - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0", - "@types/estree-jsx": "^1.0.0", - "@types/hast": "^3.0.0", - "@types/mdx": "^2.0.0", - "acorn": "^8.0.0", - "collapse-white-space": "^2.0.0", - "devlop": "^1.0.0", - "estree-util-is-identifier-name": "^3.0.0", - "estree-util-scope": "^1.0.0", - "estree-walker": "^3.0.0", - "hast-util-to-jsx-runtime": "^2.0.0", - "markdown-extensions": "^2.0.0", - "recma-build-jsx": "^1.0.0", - "recma-jsx": "^1.0.0", - "recma-stringify": "^1.0.0", - "rehype-recma": "^1.0.0", - "remark-mdx": "^3.0.0", - "remark-parse": "^11.0.0", - "remark-rehype": "^11.0.0", - "source-map": "^0.7.0", - "unified": "^11.0.0", - "unist-util-position-from-estree": "^2.0.0", - "unist-util-stringify-position": "^4.0.0", - "unist-util-visit": "^5.0.0", - "vfile": "^6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/@mdx-js/react": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@mdx-js/react/-/react-3.1.1.tgz", - "integrity": "sha512-f++rKLQgUVYDAtECQ6fn/is15GkEH9+nZPM3MS0RcxVqoTfawHvDlSCH7JbMhAM6uJ32v3eXLvLmLvjGu7PTQw==", - "license": "MIT", - "dependencies": { - "@types/mdx": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - }, - "peerDependencies": { - "@types/react": ">=16", - "react": ">=16" - } - }, - "node_modules/@mermaid-js/parser": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@mermaid-js/parser/-/parser-1.0.0.tgz", - "integrity": "sha512-vvK0Hi/VWndxoh03Mmz6wa1KDriSPjS2XMZL/1l19HFwygiObEEoEwSDxOqyLzzAI6J2PU3261JjTMTO7x+BPw==", - "license": "MIT", - "dependencies": { - "langium": "^4.0.0" - } - }, - "node_modules/@noble/hashes": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.4.0.tgz", - "integrity": "sha512-V1JJ1WTRUqHHrOSh597hURcMqVKVGL/ea3kv0gSnEdsEZ0/+VyPghM1lMNGc00z7CIQorSvbKpuJkxvuHbvdbg==", - "license": "MIT", - "engines": { - "node": ">= 16" - }, - "funding": { - "url": "https://paulmillr.com/funding/" - } - }, - "node_modules/@nodelib/fs.scandir": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", - "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", - "license": "MIT", - "dependencies": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.stat": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", - "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", - "license": "MIT", - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.walk": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", - "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", - "license": "MIT", - "dependencies": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@peculiar/asn1-cms": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/@peculiar/asn1-cms/-/asn1-cms-2.6.1.tgz", - "integrity": "sha512-vdG4fBF6Lkirkcl53q6eOdn3XYKt+kJTG59edgRZORlg/3atWWEReRCx5rYE1ZzTTX6vLK5zDMjHh7vbrcXGtw==", - "license": "MIT", - "dependencies": { - "@peculiar/asn1-schema": "^2.6.0", - "@peculiar/asn1-x509": "^2.6.1", - "@peculiar/asn1-x509-attr": "^2.6.1", - "asn1js": "^3.0.6", - "tslib": "^2.8.1" - } - }, - "node_modules/@peculiar/asn1-csr": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/@peculiar/asn1-csr/-/asn1-csr-2.6.1.tgz", - "integrity": "sha512-WRWnKfIocHyzFYQTka8O/tXCiBquAPSrRjXbOkHbO4qdmS6loffCEGs+rby6WxxGdJCuunnhS2duHURhjyio6w==", - "license": "MIT", - "dependencies": { - "@peculiar/asn1-schema": "^2.6.0", - "@peculiar/asn1-x509": "^2.6.1", - "asn1js": "^3.0.6", - "tslib": "^2.8.1" - } - }, - "node_modules/@peculiar/asn1-ecc": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/@peculiar/asn1-ecc/-/asn1-ecc-2.6.1.tgz", - "integrity": "sha512-+Vqw8WFxrtDIN5ehUdvlN2m73exS2JVG0UAyfVB31gIfor3zWEAQPD+K9ydCxaj3MLen9k0JhKpu9LqviuCE1g==", - "license": "MIT", - "dependencies": { - "@peculiar/asn1-schema": "^2.6.0", - "@peculiar/asn1-x509": "^2.6.1", - "asn1js": "^3.0.6", - "tslib": "^2.8.1" - } - }, - "node_modules/@peculiar/asn1-pfx": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/@peculiar/asn1-pfx/-/asn1-pfx-2.6.1.tgz", - "integrity": "sha512-nB5jVQy3MAAWvq0KY0R2JUZG8bO/bTLpnwyOzXyEh/e54ynGTatAR+csOnXkkVD9AFZ2uL8Z7EV918+qB1qDvw==", - "license": "MIT", - "dependencies": { - "@peculiar/asn1-cms": "^2.6.1", - "@peculiar/asn1-pkcs8": "^2.6.1", - "@peculiar/asn1-rsa": "^2.6.1", - "@peculiar/asn1-schema": "^2.6.0", - "asn1js": "^3.0.6", - "tslib": "^2.8.1" - } - }, - "node_modules/@peculiar/asn1-pkcs8": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/@peculiar/asn1-pkcs8/-/asn1-pkcs8-2.6.1.tgz", - "integrity": "sha512-JB5iQ9Izn5yGMw3ZG4Nw3Xn/hb/G38GYF3lf7WmJb8JZUydhVGEjK/ZlFSWhnlB7K/4oqEs8HnfFIKklhR58Tw==", - "license": "MIT", - "dependencies": { - "@peculiar/asn1-schema": "^2.6.0", - "@peculiar/asn1-x509": "^2.6.1", - "asn1js": "^3.0.6", - "tslib": "^2.8.1" - } - }, - "node_modules/@peculiar/asn1-pkcs9": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/@peculiar/asn1-pkcs9/-/asn1-pkcs9-2.6.1.tgz", - "integrity": "sha512-5EV8nZoMSxeWmcxWmmcolg22ojZRgJg+Y9MX2fnE2bGRo5KQLqV5IL9kdSQDZxlHz95tHvIq9F//bvL1OeNILw==", - "license": "MIT", - "dependencies": { - "@peculiar/asn1-cms": "^2.6.1", - "@peculiar/asn1-pfx": "^2.6.1", - "@peculiar/asn1-pkcs8": "^2.6.1", - "@peculiar/asn1-schema": "^2.6.0", - "@peculiar/asn1-x509": "^2.6.1", - "@peculiar/asn1-x509-attr": "^2.6.1", - "asn1js": "^3.0.6", - "tslib": "^2.8.1" - } - }, - "node_modules/@peculiar/asn1-rsa": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/@peculiar/asn1-rsa/-/asn1-rsa-2.6.1.tgz", - "integrity": "sha512-1nVMEh46SElUt5CB3RUTV4EG/z7iYc7EoaDY5ECwganibQPkZ/Y2eMsTKB/LeyrUJ+W/tKoD9WUqIy8vB+CEdA==", - "license": "MIT", - "dependencies": { - "@peculiar/asn1-schema": "^2.6.0", - "@peculiar/asn1-x509": "^2.6.1", - "asn1js": "^3.0.6", - "tslib": "^2.8.1" - } - }, - "node_modules/@peculiar/asn1-schema": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/@peculiar/asn1-schema/-/asn1-schema-2.6.0.tgz", - "integrity": "sha512-xNLYLBFTBKkCzEZIw842BxytQQATQv+lDTCEMZ8C196iJcJJMBUZxrhSTxLaohMyKK8QlzRNTRkUmanucnDSqg==", - "license": "MIT", - "dependencies": { - "asn1js": "^3.0.6", - "pvtsutils": "^1.3.6", - "tslib": "^2.8.1" - } - }, - "node_modules/@peculiar/asn1-x509": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/@peculiar/asn1-x509/-/asn1-x509-2.6.1.tgz", - "integrity": "sha512-O9jT5F1A2+t3r7C4VT7LYGXqkGLK7Kj1xFpz7U0isPrubwU5PbDoyYtx6MiGst29yq7pXN5vZbQFKRCP+lLZlA==", - "license": "MIT", - "dependencies": { - "@peculiar/asn1-schema": "^2.6.0", - "asn1js": "^3.0.6", - "pvtsutils": "^1.3.6", - "tslib": "^2.8.1" - } - }, - "node_modules/@peculiar/asn1-x509-attr": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/@peculiar/asn1-x509-attr/-/asn1-x509-attr-2.6.1.tgz", - "integrity": "sha512-tlW6cxoHwgcQghnJwv3YS+9OO1737zgPogZ+CgWRUK4roEwIPzRH4JEiG770xe5HX2ATfCpmX60gurfWIF9dcQ==", - "license": "MIT", - "dependencies": { - "@peculiar/asn1-schema": "^2.6.0", - "@peculiar/asn1-x509": "^2.6.1", - "asn1js": "^3.0.6", - "tslib": "^2.8.1" - } - }, - "node_modules/@peculiar/x509": { - "version": "1.14.3", - "resolved": "https://registry.npmjs.org/@peculiar/x509/-/x509-1.14.3.tgz", - "integrity": "sha512-C2Xj8FZ0uHWeCXXqX5B4/gVFQmtSkiuOolzAgutjTfseNOHT3pUjljDZsTSxXFGgio54bCzVFqmEOUrIVk8RDA==", - "license": "MIT", - "dependencies": { - "@peculiar/asn1-cms": "^2.6.0", - "@peculiar/asn1-csr": "^2.6.0", - "@peculiar/asn1-ecc": "^2.6.0", - "@peculiar/asn1-pkcs9": "^2.6.0", - "@peculiar/asn1-rsa": "^2.6.0", - "@peculiar/asn1-schema": "^2.6.0", - "@peculiar/asn1-x509": "^2.6.0", - "pvtsutils": "^1.3.6", - "reflect-metadata": "^0.2.2", - "tslib": "^2.8.1", - "tsyringe": "^4.10.0" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@pnpm/config.env-replace": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@pnpm/config.env-replace/-/config.env-replace-1.1.0.tgz", - "integrity": "sha512-htyl8TWnKL7K/ESFa1oW2UB5lVDxuF5DpM7tBi6Hu2LNL3mWkIzNLG6N4zoCUP1lCKNxWy/3iu8mS8MvToGd6w==", - "license": "MIT", - "engines": { - "node": ">=12.22.0" - } - }, - "node_modules/@pnpm/network.ca-file": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@pnpm/network.ca-file/-/network.ca-file-1.0.2.tgz", - "integrity": "sha512-YcPQ8a0jwYU9bTdJDpXjMi7Brhkr1mXsXrUJvjqM2mQDgkRiz8jFaQGOdaLxgjtUfQgZhKy/O3cG/YwmgKaxLA==", - "license": "MIT", - "dependencies": { - "graceful-fs": "4.2.10" - }, - "engines": { - "node": ">=12.22.0" - } - }, - "node_modules/@pnpm/network.ca-file/node_modules/graceful-fs": { - "version": "4.2.10", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", - "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==", - "license": "ISC" - }, - "node_modules/@pnpm/npm-conf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@pnpm/npm-conf/-/npm-conf-3.0.2.tgz", - "integrity": "sha512-h104Kh26rR8tm+a3Qkc5S4VLYint3FE48as7+/5oCEcKR2idC/pF1G6AhIXKI+eHPJa/3J9i5z0Al47IeGHPkA==", - "license": "MIT", - "dependencies": { - "@pnpm/config.env-replace": "^1.1.0", - "@pnpm/network.ca-file": "^1.0.1", - "config-chain": "^1.1.11" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/@polka/url": { - "version": "1.0.0-next.29", - "resolved": "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.29.tgz", - "integrity": "sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww==", - "license": "MIT" - }, - "node_modules/@sideway/address": { - "version": "4.1.5", - "resolved": "https://registry.npmjs.org/@sideway/address/-/address-4.1.5.tgz", - "integrity": "sha512-IqO/DUQHUkPeixNQ8n0JA6102hT9CmaljNTPmQ1u8MEhBo/R4Q8eKLN/vGZxuebwOroDB4cbpjheD4+/sKFK4Q==", - "license": "BSD-3-Clause", - "dependencies": { - "@hapi/hoek": "^9.0.0" - } - }, - "node_modules/@sideway/formula": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@sideway/formula/-/formula-3.0.1.tgz", - "integrity": "sha512-/poHZJJVjx3L+zVD6g9KgHfYnb443oi7wLu/XKojDviHy6HOEOA6z1Trk5aR1dGcmPenJEgb2sK2I80LeS3MIg==", - "license": "BSD-3-Clause" - }, - "node_modules/@sideway/pinpoint": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@sideway/pinpoint/-/pinpoint-2.0.0.tgz", - "integrity": "sha512-RNiOoTPkptFtSVzQevY/yWtZwf/RxyVnPy/OcA9HBM3MlGDnBEYL5B41H0MTn0Uec8Hi+2qUtTfG2WWZBmMejQ==", - "license": "BSD-3-Clause" - }, - "node_modules/@sinclair/typebox": { - "version": "0.27.10", - "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.10.tgz", - "integrity": "sha512-MTBk/3jGLNB2tVxv6uLlFh1iu64iYOQ2PbdOSK3NW8JZsmlaOh2q6sdtKowBhfw8QFLmYNzTW4/oK4uATIi6ZA==", - "license": "MIT" - }, - "node_modules/@sindresorhus/is": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-4.6.0.tgz", - "integrity": "sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sindresorhus/is?sponsor=1" - } - }, - "node_modules/@slorber/remark-comment": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@slorber/remark-comment/-/remark-comment-1.0.0.tgz", - "integrity": "sha512-RCE24n7jsOj1M0UPvIQCHTe7fI0sFL4S2nwKVWwHyVr/wI/H8GosgsJGyhnsZoGFnD/P2hLf1mSbrrgSLN93NA==", - "license": "MIT", - "dependencies": { - "micromark-factory-space": "^1.0.0", - "micromark-util-character": "^1.1.0", - "micromark-util-symbol": "^1.0.1" - } - }, - "node_modules/@svgr/babel-plugin-add-jsx-attribute": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-8.0.0.tgz", - "integrity": "sha512-b9MIk7yhdS1pMCZM8VeNfUlSKVRhsHZNMl5O9SfaX0l0t5wjdgu4IDzGB8bpnGBBOjGST3rRFVsaaEtI4W6f7g==", - "license": "MIT", - "engines": { - "node": ">=14" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@svgr/babel-plugin-remove-jsx-attribute": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-8.0.0.tgz", - "integrity": "sha512-BcCkm/STipKvbCl6b7QFrMh/vx00vIP63k2eM66MfHJzPr6O2U0jYEViXkHJWqXqQYjdeA9cuCl5KWmlwjDvbA==", - "license": "MIT", - "engines": { - "node": ">=14" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@svgr/babel-plugin-remove-jsx-empty-expression": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-8.0.0.tgz", - "integrity": "sha512-5BcGCBfBxB5+XSDSWnhTThfI9jcO5f0Ai2V24gZpG+wXF14BzwxxdDb4g6trdOux0rhibGs385BeFMSmxtS3uA==", - "license": "MIT", - "engines": { - "node": ">=14" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@svgr/babel-plugin-replace-jsx-attribute-value": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-8.0.0.tgz", - "integrity": "sha512-KVQ+PtIjb1BuYT3ht8M5KbzWBhdAjjUPdlMtpuw/VjT8coTrItWX6Qafl9+ji831JaJcu6PJNKCV0bp01lBNzQ==", - "license": "MIT", - "engines": { - "node": ">=14" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@svgr/babel-plugin-svg-dynamic-title": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-8.0.0.tgz", - "integrity": "sha512-omNiKqwjNmOQJ2v6ge4SErBbkooV2aAWwaPFs2vUY7p7GhVkzRkJ00kILXQvRhA6miHnNpXv7MRnnSjdRjK8og==", - "license": "MIT", - "engines": { - "node": ">=14" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@svgr/babel-plugin-svg-em-dimensions": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-8.0.0.tgz", - "integrity": "sha512-mURHYnu6Iw3UBTbhGwE/vsngtCIbHE43xCRK7kCw4t01xyGqb2Pd+WXekRRoFOBIY29ZoOhUCTEweDMdrjfi9g==", - "license": "MIT", - "engines": { - "node": ">=14" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@svgr/babel-plugin-transform-react-native-svg": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-8.1.0.tgz", - "integrity": "sha512-Tx8T58CHo+7nwJ+EhUwx3LfdNSG9R2OKfaIXXs5soiy5HtgoAEkDay9LIimLOcG8dJQH1wPZp/cnAv6S9CrR1Q==", - "license": "MIT", - "engines": { - "node": ">=14" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@svgr/babel-plugin-transform-svg-component": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-8.0.0.tgz", - "integrity": "sha512-DFx8xa3cZXTdb/k3kfPeaixecQLgKh5NVBMwD0AQxOzcZawK4oo1Jh9LbrcACUivsCA7TLG8eeWgrDXjTMhRmw==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@svgr/babel-preset": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/@svgr/babel-preset/-/babel-preset-8.1.0.tgz", - "integrity": "sha512-7EYDbHE7MxHpv4sxvnVPngw5fuR6pw79SkcrILHJ/iMpuKySNCl5W1qcwPEpU+LgyRXOaAFgH0KhwD18wwg6ug==", - "license": "MIT", - "dependencies": { - "@svgr/babel-plugin-add-jsx-attribute": "8.0.0", - "@svgr/babel-plugin-remove-jsx-attribute": "8.0.0", - "@svgr/babel-plugin-remove-jsx-empty-expression": "8.0.0", - "@svgr/babel-plugin-replace-jsx-attribute-value": "8.0.0", - "@svgr/babel-plugin-svg-dynamic-title": "8.0.0", - "@svgr/babel-plugin-svg-em-dimensions": "8.0.0", - "@svgr/babel-plugin-transform-react-native-svg": "8.1.0", - "@svgr/babel-plugin-transform-svg-component": "8.0.0" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@svgr/core": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/@svgr/core/-/core-8.1.0.tgz", - "integrity": "sha512-8QqtOQT5ACVlmsvKOJNEaWmRPmcojMOzCz4Hs2BGG/toAp/K38LcsMRyLp349glq5AzJbCEeimEoxaX6v/fLrA==", - "license": "MIT", - "dependencies": { - "@babel/core": "^7.21.3", - "@svgr/babel-preset": "8.1.0", - "camelcase": "^6.2.0", - "cosmiconfig": "^8.1.3", - "snake-case": "^3.0.4" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - } - }, - "node_modules/@svgr/hast-util-to-babel-ast": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-8.0.0.tgz", - "integrity": "sha512-EbDKwO9GpfWP4jN9sGdYwPBU0kdomaPIL2Eu4YwmgP+sJeXT+L7bMwJUBnhzfH8Q2qMBqZ4fJwpCyYsAN3mt2Q==", - "license": "MIT", - "dependencies": { - "@babel/types": "^7.21.3", - "entities": "^4.4.0" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - } - }, - "node_modules/@svgr/plugin-jsx": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/@svgr/plugin-jsx/-/plugin-jsx-8.1.0.tgz", - "integrity": "sha512-0xiIyBsLlr8quN+WyuxooNW9RJ0Dpr8uOnH/xrCVO8GLUcwHISwj1AG0k+LFzteTkAA0GbX0kj9q6Dk70PTiPA==", - "license": "MIT", - "dependencies": { - "@babel/core": "^7.21.3", - "@svgr/babel-preset": "8.1.0", - "@svgr/hast-util-to-babel-ast": "8.0.0", - "svg-parser": "^2.0.4" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - }, - "peerDependencies": { - "@svgr/core": "*" - } - }, - "node_modules/@svgr/plugin-svgo": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/@svgr/plugin-svgo/-/plugin-svgo-8.1.0.tgz", - "integrity": "sha512-Ywtl837OGO9pTLIN/onoWLmDQ4zFUycI1g76vuKGEz6evR/ZTJlJuz3G/fIkb6OVBJ2g0o6CGJzaEjfmEo3AHA==", - "license": "MIT", - "dependencies": { - "cosmiconfig": "^8.1.3", - "deepmerge": "^4.3.1", - "svgo": "^3.0.2" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - }, - "peerDependencies": { - "@svgr/core": "*" - } - }, - "node_modules/@svgr/webpack": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/@svgr/webpack/-/webpack-8.1.0.tgz", - "integrity": "sha512-LnhVjMWyMQV9ZmeEy26maJk+8HTIbd59cH4F2MJ439k9DqejRisfFNGAPvRYlKETuh9LrImlS8aKsBgKjMA8WA==", - "license": "MIT", - "dependencies": { - "@babel/core": "^7.21.3", - "@babel/plugin-transform-react-constant-elements": "^7.21.3", - "@babel/preset-env": "^7.20.2", - "@babel/preset-react": "^7.18.6", - "@babel/preset-typescript": "^7.21.0", - "@svgr/core": "8.1.0", - "@svgr/plugin-jsx": "8.1.0", - "@svgr/plugin-svgo": "8.1.0" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/gregberge" - } - }, - "node_modules/@szmarczak/http-timer": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-5.0.1.tgz", - "integrity": "sha512-+PmQX0PiAYPMeVYe237LJAYvOMYW1j2rH5YROyS3b4CTVJum34HfRvKvAzozHAQG0TnHNdUfY9nCeUyRAs//cw==", - "license": "MIT", - "dependencies": { - "defer-to-connect": "^2.0.1" - }, - "engines": { - "node": ">=14.16" - } - }, - "node_modules/@trysound/sax": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/@trysound/sax/-/sax-0.2.0.tgz", - "integrity": "sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA==", - "license": "ISC", - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/@types/body-parser": { - "version": "1.19.6", - "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.6.tgz", - "integrity": "sha512-HLFeCYgz89uk22N5Qg3dvGvsv46B8GLvKKo1zKG4NybA8U2DiEO3w9lqGg29t/tfLRJpJ6iQxnVw4OnB7MoM9g==", - "license": "MIT", - "dependencies": { - "@types/connect": "*", - "@types/node": "*" - } - }, - "node_modules/@types/bonjour": { - "version": "3.5.13", - "resolved": "https://registry.npmjs.org/@types/bonjour/-/bonjour-3.5.13.tgz", - "integrity": "sha512-z9fJ5Im06zvUL548KvYNecEVlA7cVDkGUi6kZusb04mpyEFKCIZJvloCcmpmLaIahDpOQGHaHmG6imtPMmPXGQ==", - "license": "MIT", - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/connect": { - "version": "3.4.38", - "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz", - "integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==", - "license": "MIT", - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/connect-history-api-fallback": { - "version": "1.5.4", - "resolved": "https://registry.npmjs.org/@types/connect-history-api-fallback/-/connect-history-api-fallback-1.5.4.tgz", - "integrity": "sha512-n6Cr2xS1h4uAulPRdlw6Jl6s1oG8KrVilPN2yUITEs+K48EzMJJ3W1xy8K5eWuFvjp3R74AOIGSmp2UfBJ8HFw==", - "license": "MIT", - "dependencies": { - "@types/express-serve-static-core": "*", - "@types/node": "*" - } - }, - "node_modules/@types/d3": { - "version": "7.4.3", - "resolved": "https://registry.npmjs.org/@types/d3/-/d3-7.4.3.tgz", - "integrity": "sha512-lZXZ9ckh5R8uiFVt8ogUNf+pIrK4EsWrx2Np75WvF/eTpJ0FMHNhjXk8CKEx/+gpHbNQyJWehbFaTvqmHWB3ww==", - "license": "MIT", - "dependencies": { - "@types/d3-array": "*", - "@types/d3-axis": "*", - "@types/d3-brush": "*", - "@types/d3-chord": "*", - "@types/d3-color": "*", - "@types/d3-contour": "*", - "@types/d3-delaunay": "*", - "@types/d3-dispatch": "*", - "@types/d3-drag": "*", - "@types/d3-dsv": "*", - "@types/d3-ease": "*", - "@types/d3-fetch": "*", - "@types/d3-force": "*", - "@types/d3-format": "*", - "@types/d3-geo": "*", - "@types/d3-hierarchy": "*", - "@types/d3-interpolate": "*", - "@types/d3-path": "*", - "@types/d3-polygon": "*", - "@types/d3-quadtree": "*", - "@types/d3-random": "*", - "@types/d3-scale": "*", - "@types/d3-scale-chromatic": "*", - "@types/d3-selection": "*", - "@types/d3-shape": "*", - "@types/d3-time": "*", - "@types/d3-time-format": "*", - "@types/d3-timer": "*", - "@types/d3-transition": "*", - "@types/d3-zoom": "*" - } - }, - "node_modules/@types/d3-array": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-3.2.2.tgz", - "integrity": "sha512-hOLWVbm7uRza0BYXpIIW5pxfrKe0W+D5lrFiAEYR+pb6w3N2SwSMaJbXdUfSEv+dT4MfHBLtn5js0LAWaO6otw==", - "license": "MIT" - }, - "node_modules/@types/d3-axis": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/@types/d3-axis/-/d3-axis-3.0.6.tgz", - "integrity": "sha512-pYeijfZuBd87T0hGn0FO1vQ/cgLk6E1ALJjfkC0oJ8cbwkZl3TpgS8bVBLZN+2jjGgg38epgxb2zmoGtSfvgMw==", - "license": "MIT", - "dependencies": { - "@types/d3-selection": "*" - } - }, - "node_modules/@types/d3-brush": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/@types/d3-brush/-/d3-brush-3.0.6.tgz", - "integrity": "sha512-nH60IZNNxEcrh6L1ZSMNA28rj27ut/2ZmI3r96Zd+1jrZD++zD3LsMIjWlvg4AYrHn/Pqz4CF3veCxGjtbqt7A==", - "license": "MIT", - "dependencies": { - "@types/d3-selection": "*" - } - }, - "node_modules/@types/d3-chord": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/@types/d3-chord/-/d3-chord-3.0.6.tgz", - "integrity": "sha512-LFYWWd8nwfwEmTZG9PfQxd17HbNPksHBiJHaKuY1XeqscXacsS2tyoo6OdRsjf+NQYeB6XrNL3a25E3gH69lcg==", - "license": "MIT" - }, - "node_modules/@types/d3-color": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/@types/d3-color/-/d3-color-3.1.3.tgz", - "integrity": "sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==", - "license": "MIT" - }, - "node_modules/@types/d3-contour": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/@types/d3-contour/-/d3-contour-3.0.6.tgz", - "integrity": "sha512-BjzLgXGnCWjUSYGfH1cpdo41/hgdWETu4YxpezoztawmqsvCeep+8QGfiY6YbDvfgHz/DkjeIkkZVJavB4a3rg==", - "license": "MIT", - "dependencies": { - "@types/d3-array": "*", - "@types/geojson": "*" - } - }, - "node_modules/@types/d3-delaunay": { - "version": "6.0.4", - "resolved": "https://registry.npmjs.org/@types/d3-delaunay/-/d3-delaunay-6.0.4.tgz", - "integrity": "sha512-ZMaSKu4THYCU6sV64Lhg6qjf1orxBthaC161plr5KuPHo3CNm8DTHiLw/5Eq2b6TsNP0W0iJrUOFscY6Q450Hw==", - "license": "MIT" - }, - "node_modules/@types/d3-dispatch": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/@types/d3-dispatch/-/d3-dispatch-3.0.7.tgz", - "integrity": "sha512-5o9OIAdKkhN1QItV2oqaE5KMIiXAvDWBDPrD85e58Qlz1c1kI/J0NcqbEG88CoTwJrYe7ntUCVfeUl2UJKbWgA==", - "license": "MIT" - }, - "node_modules/@types/d3-drag": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/@types/d3-drag/-/d3-drag-3.0.7.tgz", - "integrity": "sha512-HE3jVKlzU9AaMazNufooRJ5ZpWmLIoc90A37WU2JMmeq28w1FQqCZswHZ3xR+SuxYftzHq6WU6KJHvqxKzTxxQ==", - "license": "MIT", - "dependencies": { - "@types/d3-selection": "*" - } - }, - "node_modules/@types/d3-dsv": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/@types/d3-dsv/-/d3-dsv-3.0.7.tgz", - "integrity": "sha512-n6QBF9/+XASqcKK6waudgL0pf/S5XHPPI8APyMLLUHd8NqouBGLsU8MgtO7NINGtPBtk9Kko/W4ea0oAspwh9g==", - "license": "MIT" - }, - "node_modules/@types/d3-ease": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/d3-ease/-/d3-ease-3.0.2.tgz", - "integrity": "sha512-NcV1JjO5oDzoK26oMzbILE6HW7uVXOHLQvHshBUW4UMdZGfiY6v5BeQwh9a9tCzv+CeefZQHJt5SRgK154RtiA==", - "license": "MIT" - }, - "node_modules/@types/d3-fetch": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/@types/d3-fetch/-/d3-fetch-3.0.7.tgz", - "integrity": "sha512-fTAfNmxSb9SOWNB9IoG5c8Hg6R+AzUHDRlsXsDZsNp6sxAEOP0tkP3gKkNSO/qmHPoBFTxNrjDprVHDQDvo5aA==", - "license": "MIT", - "dependencies": { - "@types/d3-dsv": "*" - } - }, - "node_modules/@types/d3-force": { - "version": "3.0.10", - "resolved": "https://registry.npmjs.org/@types/d3-force/-/d3-force-3.0.10.tgz", - "integrity": "sha512-ZYeSaCF3p73RdOKcjj+swRlZfnYpK1EbaDiYICEEp5Q6sUiqFaFQ9qgoshp5CzIyyb/yD09kD9o2zEltCexlgw==", - "license": "MIT" - }, - "node_modules/@types/d3-format": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@types/d3-format/-/d3-format-3.0.4.tgz", - "integrity": "sha512-fALi2aI6shfg7vM5KiR1wNJnZ7r6UuggVqtDA+xiEdPZQwy/trcQaHnwShLuLdta2rTymCNpxYTiMZX/e09F4g==", - "license": "MIT" - }, - "node_modules/@types/d3-geo": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/@types/d3-geo/-/d3-geo-3.1.0.tgz", - "integrity": "sha512-856sckF0oP/diXtS4jNsiQw/UuK5fQG8l/a9VVLeSouf1/PPbBE1i1W852zVwKwYCBkFJJB7nCFTbk6UMEXBOQ==", - "license": "MIT", - "dependencies": { - "@types/geojson": "*" - } - }, - "node_modules/@types/d3-hierarchy": { - "version": "3.1.7", - "resolved": "https://registry.npmjs.org/@types/d3-hierarchy/-/d3-hierarchy-3.1.7.tgz", - "integrity": "sha512-tJFtNoYBtRtkNysX1Xq4sxtjK8YgoWUNpIiUee0/jHGRwqvzYxkq0hGVbbOGSz+JgFxxRu4K8nb3YpG3CMARtg==", - "license": "MIT" - }, - "node_modules/@types/d3-interpolate": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@types/d3-interpolate/-/d3-interpolate-3.0.4.tgz", - "integrity": "sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==", - "license": "MIT", - "dependencies": { - "@types/d3-color": "*" - } - }, - "node_modules/@types/d3-path": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@types/d3-path/-/d3-path-3.1.1.tgz", - "integrity": "sha512-VMZBYyQvbGmWyWVea0EHs/BwLgxc+MKi1zLDCONksozI4YJMcTt8ZEuIR4Sb1MMTE8MMW49v0IwI5+b7RmfWlg==", - "license": "MIT" - }, - "node_modules/@types/d3-polygon": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/d3-polygon/-/d3-polygon-3.0.2.tgz", - "integrity": "sha512-ZuWOtMaHCkN9xoeEMr1ubW2nGWsp4nIql+OPQRstu4ypeZ+zk3YKqQT0CXVe/PYqrKpZAi+J9mTs05TKwjXSRA==", - "license": "MIT" - }, - "node_modules/@types/d3-quadtree": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/@types/d3-quadtree/-/d3-quadtree-3.0.6.tgz", - "integrity": "sha512-oUzyO1/Zm6rsxKRHA1vH0NEDG58HrT5icx/azi9MF1TWdtttWl0UIUsjEQBBh+SIkrpd21ZjEv7ptxWys1ncsg==", - "license": "MIT" - }, - "node_modules/@types/d3-random": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@types/d3-random/-/d3-random-3.0.3.tgz", - "integrity": "sha512-Imagg1vJ3y76Y2ea0871wpabqp613+8/r0mCLEBfdtqC7xMSfj9idOnmBYyMoULfHePJyxMAw3nWhJxzc+LFwQ==", - "license": "MIT" - }, - "node_modules/@types/d3-scale": { - "version": "4.0.9", - "resolved": "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.9.tgz", - "integrity": "sha512-dLmtwB8zkAeO/juAMfnV+sItKjlsw2lKdZVVy6LRr0cBmegxSABiLEpGVmSJJ8O08i4+sGR6qQtb6WtuwJdvVw==", - "license": "MIT", - "dependencies": { - "@types/d3-time": "*" - } - }, - "node_modules/@types/d3-scale-chromatic": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/@types/d3-scale-chromatic/-/d3-scale-chromatic-3.1.0.tgz", - "integrity": "sha512-iWMJgwkK7yTRmWqRB5plb1kadXyQ5Sj8V/zYlFGMUBbIPKQScw+Dku9cAAMgJG+z5GYDoMjWGLVOvjghDEFnKQ==", - "license": "MIT" - }, - "node_modules/@types/d3-selection": { - "version": "3.0.11", - "resolved": "https://registry.npmjs.org/@types/d3-selection/-/d3-selection-3.0.11.tgz", - "integrity": "sha512-bhAXu23DJWsrI45xafYpkQ4NtcKMwWnAC/vKrd2l+nxMFuvOT3XMYTIj2opv8vq8AO5Yh7Qac/nSeP/3zjTK0w==", - "license": "MIT" - }, - "node_modules/@types/d3-shape": { - "version": "3.1.8", - "resolved": "https://registry.npmjs.org/@types/d3-shape/-/d3-shape-3.1.8.tgz", - "integrity": "sha512-lae0iWfcDeR7qt7rA88BNiqdvPS5pFVPpo5OfjElwNaT2yyekbM0C9vK+yqBqEmHr6lDkRnYNoTBYlAgJa7a4w==", - "license": "MIT", - "dependencies": { - "@types/d3-path": "*" - } - }, - "node_modules/@types/d3-time": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@types/d3-time/-/d3-time-3.0.4.tgz", - "integrity": "sha512-yuzZug1nkAAaBlBBikKZTgzCeA+k1uy4ZFwWANOfKw5z5LRhV0gNA7gNkKm7HoK+HRN0wX3EkxGk0fpbWhmB7g==", - "license": "MIT" - }, - "node_modules/@types/d3-time-format": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/@types/d3-time-format/-/d3-time-format-4.0.3.tgz", - "integrity": "sha512-5xg9rC+wWL8kdDj153qZcsJ0FWiFt0J5RB6LYUNZjwSnesfblqrI/bJ1wBdJ8OQfncgbJG5+2F+qfqnqyzYxyg==", - "license": "MIT" - }, - "node_modules/@types/d3-timer": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/d3-timer/-/d3-timer-3.0.2.tgz", - "integrity": "sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw==", - "license": "MIT" - }, - "node_modules/@types/d3-transition": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/@types/d3-transition/-/d3-transition-3.0.9.tgz", - "integrity": "sha512-uZS5shfxzO3rGlu0cC3bjmMFKsXv+SmZZcgp0KD22ts4uGXp5EVYGzu/0YdwZeKmddhcAccYtREJKkPfXkZuCg==", - "license": "MIT", - "dependencies": { - "@types/d3-selection": "*" - } - }, - "node_modules/@types/d3-zoom": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/@types/d3-zoom/-/d3-zoom-3.0.8.tgz", - "integrity": "sha512-iqMC4/YlFCSlO8+2Ii1GGGliCAY4XdeG748w5vQUbevlbDu0zSjH/+jojorQVBK/se0j6DUFNPBGSqD3YWYnDw==", - "license": "MIT", - "dependencies": { - "@types/d3-interpolate": "*", - "@types/d3-selection": "*" - } - }, - "node_modules/@types/debug": { - "version": "4.1.12", - "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz", - "integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==", - "license": "MIT", - "dependencies": { - "@types/ms": "*" - } - }, - "node_modules/@types/eslint": { - "version": "9.6.1", - "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-9.6.1.tgz", - "integrity": "sha512-FXx2pKgId/WyYo2jXw63kk7/+TY7u7AziEJxJAnSFzHlqTAS3Ync6SvgYAN/k4/PQpnnVuzoMuVnByKK2qp0ag==", - "license": "MIT", - "dependencies": { - "@types/estree": "*", - "@types/json-schema": "*" - } - }, - "node_modules/@types/eslint-scope": { - "version": "3.7.7", - "resolved": "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.7.tgz", - "integrity": "sha512-MzMFlSLBqNF2gcHWO0G1vP/YQyfvrxZ0bF+u7mzUdZ1/xK4A4sru+nraZz5i3iEIk1l1uyicaDVTB4QbbEkAYg==", - "license": "MIT", - "dependencies": { - "@types/eslint": "*", - "@types/estree": "*" - } - }, - "node_modules/@types/estree": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", - "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", - "license": "MIT" - }, - "node_modules/@types/estree-jsx": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/@types/estree-jsx/-/estree-jsx-1.0.5.tgz", - "integrity": "sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg==", - "license": "MIT", - "dependencies": { - "@types/estree": "*" - } - }, - "node_modules/@types/express": { - "version": "4.17.25", - "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.25.tgz", - "integrity": "sha512-dVd04UKsfpINUnK0yBoYHDF3xu7xVH4BuDotC/xGuycx4CgbP48X/KF/586bcObxT0HENHXEU8Nqtu6NR+eKhw==", - "license": "MIT", - "dependencies": { - "@types/body-parser": "*", - "@types/express-serve-static-core": "^4.17.33", - "@types/qs": "*", - "@types/serve-static": "^1" - } - }, - "node_modules/@types/express-serve-static-core": { - "version": "4.19.8", - "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.19.8.tgz", - "integrity": "sha512-02S5fmqeoKzVZCHPZid4b8JH2eM5HzQLZWN2FohQEy/0eXTq8VXZfSN6Pcr3F6N9R/vNrj7cpgbhjie6m/1tCA==", - "license": "MIT", - "dependencies": { - "@types/node": "*", - "@types/qs": "*", - "@types/range-parser": "*", - "@types/send": "*" - } - }, - "node_modules/@types/geojson": { - "version": "7946.0.16", - "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.16.tgz", - "integrity": "sha512-6C8nqWur3j98U6+lXDfTUWIfgvZU+EumvpHKcYjujKH7woYyLj2sUmff0tRhrqM7BohUw7Pz3ZB1jj2gW9Fvmg==", - "license": "MIT" - }, - "node_modules/@types/gtag.js": { - "version": "0.0.12", - "resolved": "https://registry.npmjs.org/@types/gtag.js/-/gtag.js-0.0.12.tgz", - "integrity": "sha512-YQV9bUsemkzG81Ea295/nF/5GijnD2Af7QhEofh7xu+kvCN6RdodgNwwGWXB5GMI3NoyvQo0odNctoH/qLMIpg==", - "license": "MIT" - }, - "node_modules/@types/hast": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", - "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", - "license": "MIT", - "dependencies": { - "@types/unist": "*" - } - }, - "node_modules/@types/history": { - "version": "4.7.11", - "resolved": "https://registry.npmjs.org/@types/history/-/history-4.7.11.tgz", - "integrity": "sha512-qjDJRrmvBMiTx+jyLxvLfJU7UznFuokDv4f3WRuriHKERccVpFU+8XMQUAbDzoiJCsmexxRExQeMwwCdamSKDA==", - "license": "MIT" - }, - "node_modules/@types/html-minifier-terser": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/@types/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz", - "integrity": "sha512-oh/6byDPnL1zeNXFrDXFLyZjkr1MsBG667IM792caf1L2UPOOMf65NFzjUH/ltyfwjAGfs1rsX1eftK0jC/KIg==", - "license": "MIT" - }, - "node_modules/@types/http-cache-semantics": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.2.0.tgz", - "integrity": "sha512-L3LgimLHXtGkWikKnsPg0/VFx9OGZaC+eN1u4r+OB1XRqH3meBIAVC2zr1WdMH+RHmnRkqliQAOHNJ/E0j/e0Q==", - "license": "MIT" - }, - "node_modules/@types/http-errors": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.5.tgz", - "integrity": "sha512-r8Tayk8HJnX0FztbZN7oVqGccWgw98T/0neJphO91KkmOzug1KkofZURD4UaD5uH8AqcFLfdPErnBod0u71/qg==", - "license": "MIT" - }, - "node_modules/@types/http-proxy": { - "version": "1.17.17", - "resolved": "https://registry.npmjs.org/@types/http-proxy/-/http-proxy-1.17.17.tgz", - "integrity": "sha512-ED6LB+Z1AVylNTu7hdzuBqOgMnvG/ld6wGCG8wFnAzKX5uyW2K3WD52v0gnLCTK/VLpXtKckgWuyScYK6cSPaw==", - "license": "MIT", - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/istanbul-lib-coverage": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz", - "integrity": "sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==", - "license": "MIT" - }, - "node_modules/@types/istanbul-lib-report": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.3.tgz", - "integrity": "sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA==", - "license": "MIT", - "dependencies": { - "@types/istanbul-lib-coverage": "*" - } - }, - "node_modules/@types/istanbul-reports": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.4.tgz", - "integrity": "sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ==", - "license": "MIT", - "dependencies": { - "@types/istanbul-lib-report": "*" - } - }, - "node_modules/@types/json-schema": { - "version": "7.0.15", - "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", - "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", - "license": "MIT" - }, - "node_modules/@types/mdast": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz", - "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==", - "license": "MIT", - "dependencies": { - "@types/unist": "*" - } - }, - "node_modules/@types/mdx": { - "version": "2.0.13", - "resolved": "https://registry.npmjs.org/@types/mdx/-/mdx-2.0.13.tgz", - "integrity": "sha512-+OWZQfAYyio6YkJb3HLxDrvnx6SWWDbC0zVPfBRzUk0/nqoDyf6dNxQi3eArPe8rJ473nobTMQ/8Zk+LxJ+Yuw==", - "license": "MIT" - }, - "node_modules/@types/mime": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz", - "integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==", - "license": "MIT" - }, - "node_modules/@types/ms": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz", - "integrity": "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==", - "license": "MIT" - }, - "node_modules/@types/node": { - "version": "25.3.3", - "resolved": "https://registry.npmjs.org/@types/node/-/node-25.3.3.tgz", - "integrity": "sha512-DpzbrH7wIcBaJibpKo9nnSQL0MTRdnWttGyE5haGwK86xgMOkFLp7vEyfQPGLOJh5wNYiJ3V9PmUMDhV9u8kkQ==", - "license": "MIT", - "dependencies": { - "undici-types": "~7.18.0" - } - }, - "node_modules/@types/prismjs": { - "version": "1.26.6", - "resolved": "https://registry.npmjs.org/@types/prismjs/-/prismjs-1.26.6.tgz", - "integrity": "sha512-vqlvI7qlMvcCBbVe0AKAb4f97//Hy0EBTaiW8AalRnG/xAN5zOiWWyrNqNXeq8+KAuvRewjCVY1+IPxk4RdNYw==", - "license": "MIT" - }, - "node_modules/@types/qs": { - "version": "6.14.0", - "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.14.0.tgz", - "integrity": "sha512-eOunJqu0K1923aExK6y8p6fsihYEn/BYuQ4g0CxAAgFc4b/ZLN4CrsRZ55srTdqoiLzU2B2evC+apEIxprEzkQ==", - "license": "MIT" - }, - "node_modules/@types/range-parser": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.7.tgz", - "integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==", - "license": "MIT" - }, - "node_modules/@types/react": { - "version": "19.2.14", - "resolved": "https://registry.npmjs.org/@types/react/-/react-19.2.14.tgz", - "integrity": "sha512-ilcTH/UniCkMdtexkoCN0bI7pMcJDvmQFPvuPvmEaYA/NSfFTAgdUSLAoVjaRJm7+6PvcM+q1zYOwS4wTYMF9w==", - "license": "MIT", - "dependencies": { - "csstype": "^3.2.2" - } - }, - "node_modules/@types/react-router": { - "version": "5.1.20", - "resolved": "https://registry.npmjs.org/@types/react-router/-/react-router-5.1.20.tgz", - "integrity": "sha512-jGjmu/ZqS7FjSH6owMcD5qpq19+1RS9DeVRqfl1FeBMxTDQAGwlMWOcs52NDoXaNKyG3d1cYQFMs9rCrb88o9Q==", - "license": "MIT", - "dependencies": { - "@types/history": "^4.7.11", - "@types/react": "*" - } - }, - "node_modules/@types/react-router-config": { - "version": "5.0.11", - "resolved": "https://registry.npmjs.org/@types/react-router-config/-/react-router-config-5.0.11.tgz", - "integrity": "sha512-WmSAg7WgqW7m4x8Mt4N6ZyKz0BubSj/2tVUMsAHp+Yd2AMwcSbeFq9WympT19p5heCFmF97R9eD5uUR/t4HEqw==", - "license": "MIT", - "dependencies": { - "@types/history": "^4.7.11", - "@types/react": "*", - "@types/react-router": "^5.1.0" - } - }, - "node_modules/@types/react-router-dom": { - "version": "5.3.3", - "resolved": "https://registry.npmjs.org/@types/react-router-dom/-/react-router-dom-5.3.3.tgz", - "integrity": "sha512-kpqnYK4wcdm5UaWI3fLcELopqLrHgLqNsdpHauzlQktfkHL3npOSwtj1Uz9oKBAzs7lFtVkV8j83voAz2D8fhw==", - "license": "MIT", - "dependencies": { - "@types/history": "^4.7.11", - "@types/react": "*", - "@types/react-router": "*" - } - }, - "node_modules/@types/retry": { - "version": "0.12.2", - "resolved": "https://registry.npmjs.org/@types/retry/-/retry-0.12.2.tgz", - "integrity": "sha512-XISRgDJ2Tc5q4TRqvgJtzsRkFYNJzZrhTdtMoGVBttwzzQJkPnS3WWTFc7kuDRoPtPakl+T+OfdEUjYJj7Jbow==", - "license": "MIT" - }, - "node_modules/@types/sax": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/@types/sax/-/sax-1.2.7.tgz", - "integrity": "sha512-rO73L89PJxeYM3s3pPPjiPgVVcymqU490g0YO5n5By0k2Erzj6tay/4lr1CHAAU4JyOWd1rpQ8bCf6cZfHU96A==", - "license": "MIT", - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/send": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@types/send/-/send-1.2.1.tgz", - "integrity": "sha512-arsCikDvlU99zl1g69TcAB3mzZPpxgw0UQnaHeC1Nwb015xp8bknZv5rIfri9xTOcMuaVgvabfIRA7PSZVuZIQ==", - "license": "MIT", - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/serve-index": { - "version": "1.9.4", - "resolved": "https://registry.npmjs.org/@types/serve-index/-/serve-index-1.9.4.tgz", - "integrity": "sha512-qLpGZ/c2fhSs5gnYsQxtDEq3Oy8SXPClIXkW5ghvAvsNuVSA8k+gCONcUCS/UjLEYvYps+e8uBtfgXgvhwfNug==", - "license": "MIT", - "dependencies": { - "@types/express": "*" - } - }, - "node_modules/@types/serve-static": { - "version": "1.15.10", - "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.10.tgz", - "integrity": "sha512-tRs1dB+g8Itk72rlSI2ZrW6vZg0YrLI81iQSTkMmOqnqCaNr/8Ek4VwWcN5vZgCYWbg/JJSGBlUaYGAOP73qBw==", - "license": "MIT", - "dependencies": { - "@types/http-errors": "*", - "@types/node": "*", - "@types/send": "<1" - } - }, - "node_modules/@types/serve-static/node_modules/@types/send": { - "version": "0.17.6", - "resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.6.tgz", - "integrity": "sha512-Uqt8rPBE8SY0RK8JB1EzVOIZ32uqy8HwdxCnoCOsYrvnswqmFZ/k+9Ikidlk/ImhsdvBsloHbAlewb2IEBV/Og==", - "license": "MIT", - "dependencies": { - "@types/mime": "^1", - "@types/node": "*" - } - }, - "node_modules/@types/sockjs": { - "version": "0.3.36", - "resolved": "https://registry.npmjs.org/@types/sockjs/-/sockjs-0.3.36.tgz", - "integrity": "sha512-MK9V6NzAS1+Ud7JV9lJLFqW85VbC9dq3LmwZCuBe4wBDgKC0Kj/jd8Xl+nSviU+Qc3+m7umHHyHg//2KSa0a0Q==", - "license": "MIT", - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/trusted-types": { - "version": "2.0.7", - "resolved": "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-2.0.7.tgz", - "integrity": "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==", - "license": "MIT", - "optional": true - }, - "node_modules/@types/unist": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", - "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", - "license": "MIT" - }, - "node_modules/@types/ws": { - "version": "8.18.1", - "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.18.1.tgz", - "integrity": "sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==", - "license": "MIT", - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/yargs": { - "version": "17.0.35", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.35.tgz", - "integrity": "sha512-qUHkeCyQFxMXg79wQfTtfndEC+N9ZZg76HJftDJp+qH2tV7Gj4OJi7l+PiWwJ+pWtW8GwSmqsDj/oymhrTWXjg==", - "license": "MIT", - "dependencies": { - "@types/yargs-parser": "*" - } - }, - "node_modules/@types/yargs-parser": { - "version": "21.0.3", - "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz", - "integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==", - "license": "MIT" - }, - "node_modules/@ungap/structured-clone": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz", - "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==", - "license": "ISC" - }, - "node_modules/@webassemblyjs/ast": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.14.1.tgz", - "integrity": "sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ==", - "license": "MIT", - "dependencies": { - "@webassemblyjs/helper-numbers": "1.13.2", - "@webassemblyjs/helper-wasm-bytecode": "1.13.2" - } - }, - "node_modules/@webassemblyjs/floating-point-hex-parser": { - "version": "1.13.2", - "resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.13.2.tgz", - "integrity": "sha512-6oXyTOzbKxGH4steLbLNOu71Oj+C8Lg34n6CqRvqfS2O71BxY6ByfMDRhBytzknj9yGUPVJ1qIKhRlAwO1AovA==", - "license": "MIT" - }, - "node_modules/@webassemblyjs/helper-api-error": { - "version": "1.13.2", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.13.2.tgz", - "integrity": "sha512-U56GMYxy4ZQCbDZd6JuvvNV/WFildOjsaWD3Tzzvmw/mas3cXzRJPMjP83JqEsgSbyrmaGjBfDtV7KDXV9UzFQ==", - "license": "MIT" - }, - "node_modules/@webassemblyjs/helper-buffer": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.14.1.tgz", - "integrity": "sha512-jyH7wtcHiKssDtFPRB+iQdxlDf96m0E39yb0k5uJVhFGleZFoNw1c4aeIcVUPPbXUVJ94wwnMOAqUHyzoEPVMA==", - "license": "MIT" - }, - "node_modules/@webassemblyjs/helper-numbers": { - "version": "1.13.2", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.13.2.tgz", - "integrity": "sha512-FE8aCmS5Q6eQYcV3gI35O4J789wlQA+7JrqTTpJqn5emA4U2hvwJmvFRC0HODS+3Ye6WioDklgd6scJ3+PLnEA==", - "license": "MIT", - "dependencies": { - "@webassemblyjs/floating-point-hex-parser": "1.13.2", - "@webassemblyjs/helper-api-error": "1.13.2", - "@xtuc/long": "4.2.2" - } - }, - "node_modules/@webassemblyjs/helper-wasm-bytecode": { - "version": "1.13.2", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.13.2.tgz", - "integrity": "sha512-3QbLKy93F0EAIXLh0ogEVR6rOubA9AoZ+WRYhNbFyuB70j3dRdwH9g+qXhLAO0kiYGlg3TxDV+I4rQTr/YNXkA==", - "license": "MIT" - }, - "node_modules/@webassemblyjs/helper-wasm-section": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.14.1.tgz", - "integrity": "sha512-ds5mXEqTJ6oxRoqjhWDU83OgzAYjwsCV8Lo/N+oRsNDmx/ZDpqalmrtgOMkHwxsG0iI//3BwWAErYRHtgn0dZw==", - "license": "MIT", - "dependencies": { - "@webassemblyjs/ast": "1.14.1", - "@webassemblyjs/helper-buffer": "1.14.1", - "@webassemblyjs/helper-wasm-bytecode": "1.13.2", - "@webassemblyjs/wasm-gen": "1.14.1" - } - }, - "node_modules/@webassemblyjs/ieee754": { - "version": "1.13.2", - "resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.13.2.tgz", - "integrity": "sha512-4LtOzh58S/5lX4ITKxnAK2USuNEvpdVV9AlgGQb8rJDHaLeHciwG4zlGr0j/SNWlr7x3vO1lDEsuePvtcDNCkw==", - "license": "MIT", - "dependencies": { - "@xtuc/ieee754": "^1.2.0" - } - }, - "node_modules/@webassemblyjs/leb128": { - "version": "1.13.2", - "resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.13.2.tgz", - "integrity": "sha512-Lde1oNoIdzVzdkNEAWZ1dZ5orIbff80YPdHx20mrHwHrVNNTjNr8E3xz9BdpcGqRQbAEa+fkrCb+fRFTl/6sQw==", - "license": "Apache-2.0", - "dependencies": { - "@xtuc/long": "4.2.2" - } - }, - "node_modules/@webassemblyjs/utf8": { - "version": "1.13.2", - "resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.13.2.tgz", - "integrity": "sha512-3NQWGjKTASY1xV5m7Hr0iPeXD9+RDobLll3T9d2AO+g3my8xy5peVyjSag4I50mR1bBSN/Ct12lo+R9tJk0NZQ==", - "license": "MIT" - }, - "node_modules/@webassemblyjs/wasm-edit": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.14.1.tgz", - "integrity": "sha512-RNJUIQH/J8iA/1NzlE4N7KtyZNHi3w7at7hDjvRNm5rcUXa00z1vRz3glZoULfJ5mpvYhLybmVcwcjGrC1pRrQ==", - "license": "MIT", - "dependencies": { - "@webassemblyjs/ast": "1.14.1", - "@webassemblyjs/helper-buffer": "1.14.1", - "@webassemblyjs/helper-wasm-bytecode": "1.13.2", - "@webassemblyjs/helper-wasm-section": "1.14.1", - "@webassemblyjs/wasm-gen": "1.14.1", - "@webassemblyjs/wasm-opt": "1.14.1", - "@webassemblyjs/wasm-parser": "1.14.1", - "@webassemblyjs/wast-printer": "1.14.1" - } - }, - "node_modules/@webassemblyjs/wasm-gen": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.14.1.tgz", - "integrity": "sha512-AmomSIjP8ZbfGQhumkNvgC33AY7qtMCXnN6bL2u2Js4gVCg8fp735aEiMSBbDR7UQIj90n4wKAFUSEd0QN2Ukg==", - "license": "MIT", - "dependencies": { - "@webassemblyjs/ast": "1.14.1", - "@webassemblyjs/helper-wasm-bytecode": "1.13.2", - "@webassemblyjs/ieee754": "1.13.2", - "@webassemblyjs/leb128": "1.13.2", - "@webassemblyjs/utf8": "1.13.2" - } - }, - "node_modules/@webassemblyjs/wasm-opt": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.14.1.tgz", - "integrity": "sha512-PTcKLUNvBqnY2U6E5bdOQcSM+oVP/PmrDY9NzowJjislEjwP/C4an2303MCVS2Mg9d3AJpIGdUFIQQWbPds0Sw==", - "license": "MIT", - "dependencies": { - "@webassemblyjs/ast": "1.14.1", - "@webassemblyjs/helper-buffer": "1.14.1", - "@webassemblyjs/wasm-gen": "1.14.1", - "@webassemblyjs/wasm-parser": "1.14.1" - } - }, - "node_modules/@webassemblyjs/wasm-parser": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.14.1.tgz", - "integrity": "sha512-JLBl+KZ0R5qB7mCnud/yyX08jWFw5MsoalJ1pQ4EdFlgj9VdXKGuENGsiCIjegI1W7p91rUlcB/LB5yRJKNTcQ==", - "license": "MIT", - "dependencies": { - "@webassemblyjs/ast": "1.14.1", - "@webassemblyjs/helper-api-error": "1.13.2", - "@webassemblyjs/helper-wasm-bytecode": "1.13.2", - "@webassemblyjs/ieee754": "1.13.2", - "@webassemblyjs/leb128": "1.13.2", - "@webassemblyjs/utf8": "1.13.2" - } - }, - "node_modules/@webassemblyjs/wast-printer": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.14.1.tgz", - "integrity": "sha512-kPSSXE6De1XOR820C90RIo2ogvZG+c3KiHzqUoO/F34Y2shGzesfqv7o57xrxovZJH/MetF5UjroJ/R/3isoiw==", - "license": "MIT", - "dependencies": { - "@webassemblyjs/ast": "1.14.1", - "@xtuc/long": "4.2.2" - } - }, - "node_modules/@xtuc/ieee754": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/@xtuc/ieee754/-/ieee754-1.2.0.tgz", - "integrity": "sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==", - "license": "BSD-3-Clause" - }, - "node_modules/@xtuc/long": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/@xtuc/long/-/long-4.2.2.tgz", - "integrity": "sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==", - "license": "Apache-2.0" - }, - "node_modules/accepts": { - "version": "1.3.8", - "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", - "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", - "license": "MIT", - "dependencies": { - "mime-types": "~2.1.34", - "negotiator": "0.6.3" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/accepts/node_modules/mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/accepts/node_modules/mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "license": "MIT", - "dependencies": { - "mime-db": "1.52.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/accepts/node_modules/negotiator": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", - "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/acorn": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.16.0.tgz", - "integrity": "sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==", - "license": "MIT", - "bin": { - "acorn": "bin/acorn" - }, - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/acorn-import-phases": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/acorn-import-phases/-/acorn-import-phases-1.0.4.tgz", - "integrity": "sha512-wKmbr/DDiIXzEOiWrTTUcDm24kQ2vGfZQvM2fwg2vXqR5uW6aapr7ObPtj1th32b9u90/Pf4AItvdTh42fBmVQ==", - "license": "MIT", - "engines": { - "node": ">=10.13.0" - }, - "peerDependencies": { - "acorn": "^8.14.0" - } - }, - "node_modules/acorn-jsx": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", - "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", - "license": "MIT", - "peerDependencies": { - "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" - } - }, - "node_modules/acorn-walk": { - "version": "8.3.5", - "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.5.tgz", - "integrity": "sha512-HEHNfbars9v4pgpW6SO1KSPkfoS0xVOM/9UzkJltjlsHZmJasxg8aXkuZa7SMf8vKGIBhpUsPluQSqhJFCqebw==", - "license": "MIT", - "dependencies": { - "acorn": "^8.11.0" - }, - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/address": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/address/-/address-1.2.2.tgz", - "integrity": "sha512-4B/qKCfeE/ODUaAUpSwfzazo5x29WD4r3vXiWsB7I2mSDAihwEqKO+g8GELZUQSSAo5e1XTYh3ZVfLyxBc12nA==", - "license": "MIT", - "engines": { - "node": ">= 10.0.0" - } - }, - "node_modules/aggregate-error": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", - "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", - "license": "MIT", - "dependencies": { - "clean-stack": "^2.0.0", - "indent-string": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/ajv": { - "version": "8.18.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.18.0.tgz", - "integrity": "sha512-PlXPeEWMXMZ7sPYOHqmDyCJzcfNrUr3fGNKtezX14ykXOEIvyK81d+qydx89KY5O71FKMPaQ2vBfBFI5NHR63A==", - "license": "MIT", - "dependencies": { - "fast-deep-equal": "^3.1.3", - "fast-uri": "^3.0.1", - "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/ajv-formats": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-2.1.1.tgz", - "integrity": "sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==", - "license": "MIT", - "dependencies": { - "ajv": "^8.0.0" - }, - "peerDependencies": { - "ajv": "^8.0.0" - }, - "peerDependenciesMeta": { - "ajv": { - "optional": true - } - } - }, - "node_modules/ajv-keywords": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", - "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", - "license": "MIT", - "dependencies": { - "fast-deep-equal": "^3.1.3" - }, - "peerDependencies": { - "ajv": "^8.8.2" - } - }, - "node_modules/algoliasearch": { - "version": "5.49.1", - "resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-5.49.1.tgz", - "integrity": "sha512-X3Pp2aRQhg4xUC6PQtkubn5NpRKuUPQ9FPDQlx36SmpFwwH2N0/tw4c+NXV3nw3PsgeUs+BuWGP0gjz3TvENLQ==", - "license": "MIT", - "dependencies": { - "@algolia/abtesting": "1.15.1", - "@algolia/client-abtesting": "5.49.1", - "@algolia/client-analytics": "5.49.1", - "@algolia/client-common": "5.49.1", - "@algolia/client-insights": "5.49.1", - "@algolia/client-personalization": "5.49.1", - "@algolia/client-query-suggestions": "5.49.1", - "@algolia/client-search": "5.49.1", - "@algolia/ingestion": "1.49.1", - "@algolia/monitoring": "1.49.1", - "@algolia/recommend": "5.49.1", - "@algolia/requester-browser-xhr": "5.49.1", - "@algolia/requester-fetch": "5.49.1", - "@algolia/requester-node-http": "5.49.1" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/algoliasearch-helper": { - "version": "3.28.0", - "resolved": "https://registry.npmjs.org/algoliasearch-helper/-/algoliasearch-helper-3.28.0.tgz", - "integrity": "sha512-GBN0xsxGggaCPElZq24QzMdfphrjIiV2xA+hRXE4/UMpN3nsF2WrM8q+x80OGvGpJWtB7F+4Hq5eSfWwuejXrg==", - "license": "MIT", - "dependencies": { - "@algolia/events": "^4.0.1" - }, - "peerDependencies": { - "algoliasearch": ">= 3.1 < 6" - } - }, - "node_modules/ansi-align": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/ansi-align/-/ansi-align-3.0.1.tgz", - "integrity": "sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w==", - "license": "ISC", - "dependencies": { - "string-width": "^4.1.0" - } - }, - "node_modules/ansi-align/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "license": "MIT" - }, - "node_modules/ansi-align/node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/ansi-escapes": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", - "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", - "license": "MIT", - "dependencies": { - "type-fest": "^0.21.3" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/ansi-escapes/node_modules/type-fest": { - "version": "0.21.3", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", - "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", - "license": "(MIT OR CC0-1.0)", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/ansi-html-community": { - "version": "0.0.8", - "resolved": "https://registry.npmjs.org/ansi-html-community/-/ansi-html-community-0.0.8.tgz", - "integrity": "sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw==", - "engines": [ - "node >= 0.8.0" - ], - "license": "Apache-2.0", - "bin": { - "ansi-html": "bin/ansi-html" - } - }, - "node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "license": "MIT", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/anymatch": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", - "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", - "license": "ISC", - "dependencies": { - "normalize-path": "^3.0.0", - "picomatch": "^2.0.4" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/arg": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz", - "integrity": "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==", - "license": "MIT" - }, - "node_modules/argparse": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", - "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", - "license": "Python-2.0" - }, - "node_modules/array-flatten": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", - "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==", - "license": "MIT" - }, - "node_modules/array-union": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", - "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/asn1js": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/asn1js/-/asn1js-3.0.7.tgz", - "integrity": "sha512-uLvq6KJu04qoQM6gvBfKFjlh6Gl0vOKQuR5cJMDHQkmwfMOQeN3F3SHCv9SNYSL+CRoHvOGFfllDlVz03GQjvQ==", - "license": "BSD-3-Clause", - "dependencies": { - "pvtsutils": "^1.3.6", - "pvutils": "^1.1.3", - "tslib": "^2.8.1" - }, - "engines": { - "node": ">=12.0.0" - } - }, - "node_modules/astring": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/astring/-/astring-1.9.0.tgz", - "integrity": "sha512-LElXdjswlqjWrPpJFg1Fx4wpkOCxj1TDHlSV4PlaRxHGWko024xICaa97ZkMfs6DRKlCguiAI+rbXv5GWwXIkg==", - "license": "MIT", - "bin": { - "astring": "bin/astring" - } - }, - "node_modules/autoprefixer": { - "version": "10.4.27", - "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.27.tgz", - "integrity": "sha512-NP9APE+tO+LuJGn7/9+cohklunJsXWiaWEfV3si4Gi/XHDwVNgkwr1J3RQYFIvPy76GmJ9/bW8vyoU1LcxwKHA==", - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/autoprefixer" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "browserslist": "^4.28.1", - "caniuse-lite": "^1.0.30001774", - "fraction.js": "^5.3.4", - "picocolors": "^1.1.1", - "postcss-value-parser": "^4.2.0" - }, - "bin": { - "autoprefixer": "bin/autoprefixer" - }, - "engines": { - "node": "^10 || ^12 || >=14" - }, - "peerDependencies": { - "postcss": "^8.1.0" - } - }, - "node_modules/babel-loader": { - "version": "9.2.1", - "resolved": "https://registry.npmjs.org/babel-loader/-/babel-loader-9.2.1.tgz", - "integrity": "sha512-fqe8naHt46e0yIdkjUZYqddSXfej3AHajX+CSO5X7oy0EmPc6o5Xh+RClNoHjnieWz9AW4kZxW9yyFMhVB1QLA==", - "license": "MIT", - "dependencies": { - "find-cache-dir": "^4.0.0", - "schema-utils": "^4.0.0" - }, - "engines": { - "node": ">= 14.15.0" - }, - "peerDependencies": { - "@babel/core": "^7.12.0", - "webpack": ">=5" - } - }, - "node_modules/babel-plugin-dynamic-import-node": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz", - "integrity": "sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ==", - "license": "MIT", - "dependencies": { - "object.assign": "^4.1.0" - } - }, - "node_modules/babel-plugin-polyfill-corejs2": { - "version": "0.4.15", - "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.4.15.tgz", - "integrity": "sha512-hR3GwrRwHUfYwGfrisXPIDP3JcYfBrW7wKE7+Au6wDYl7fm/ka1NEII6kORzxNU556JjfidZeBsO10kYvtV1aw==", - "license": "MIT", - "dependencies": { - "@babel/compat-data": "^7.28.6", - "@babel/helper-define-polyfill-provider": "^0.6.6", - "semver": "^6.3.1" - }, - "peerDependencies": { - "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" - } - }, - "node_modules/babel-plugin-polyfill-corejs2/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/babel-plugin-polyfill-corejs3": { - "version": "0.13.0", - "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.13.0.tgz", - "integrity": "sha512-U+GNwMdSFgzVmfhNm8GJUX88AadB3uo9KpJqS3FaqNIPKgySuvMb+bHPsOmmuWyIcuqZj/pzt1RUIUZns4y2+A==", - "license": "MIT", - "dependencies": { - "@babel/helper-define-polyfill-provider": "^0.6.5", - "core-js-compat": "^3.43.0" - }, - "peerDependencies": { - "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" - } - }, - "node_modules/babel-plugin-polyfill-regenerator": { - "version": "0.6.6", - "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.6.6.tgz", - "integrity": "sha512-hYm+XLYRMvupxiQzrvXUj7YyvFFVfv5gI0R71AJzudg1g2AI2vyCPPIFEBjk162/wFzti3inBHo7isWFuEVS/A==", - "license": "MIT", - "dependencies": { - "@babel/helper-define-polyfill-provider": "^0.6.6" - }, - "peerDependencies": { - "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" - } - }, - "node_modules/bail": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/bail/-/bail-2.0.2.tgz", - "integrity": "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/balanced-match": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", - "license": "MIT" - }, - "node_modules/baseline-browser-mapping": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.10.0.tgz", - "integrity": "sha512-lIyg0szRfYbiy67j9KN8IyeD7q7hcmqnJ1ddWmNt19ItGpNN64mnllmxUNFIOdOm6by97jlL6wfpTTJrmnjWAA==", - "license": "Apache-2.0", - "bin": { - "baseline-browser-mapping": "dist/cli.cjs" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/batch": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/batch/-/batch-0.6.1.tgz", - "integrity": "sha512-x+VAiMRL6UPkx+kudNvxTl6hB2XNNCG2r+7wixVfIYwu/2HKRXimwQyaumLjMveWvT2Hkd/cAJw+QBMfJ/EKVw==", - "license": "MIT" - }, - "node_modules/big.js": { - "version": "5.2.2", - "resolved": "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz", - "integrity": "sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ==", - "license": "MIT", - "engines": { - "node": "*" - } - }, - "node_modules/binary-extensions": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", - "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", - "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/body-parser": { - "version": "1.20.4", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.4.tgz", - "integrity": "sha512-ZTgYYLMOXY9qKU/57FAo8F+HA2dGX7bqGc71txDRC1rS4frdFI5R7NhluHxH6M0YItAP0sHB4uqAOcYKxO6uGA==", - "license": "MIT", - "dependencies": { - "bytes": "~3.1.2", - "content-type": "~1.0.5", - "debug": "2.6.9", - "depd": "2.0.0", - "destroy": "~1.2.0", - "http-errors": "~2.0.1", - "iconv-lite": "~0.4.24", - "on-finished": "~2.4.1", - "qs": "~6.14.0", - "raw-body": "~2.5.3", - "type-is": "~1.6.18", - "unpipe": "~1.0.0" - }, - "engines": { - "node": ">= 0.8", - "npm": "1.2.8000 || >= 1.4.16" - } - }, - "node_modules/body-parser/node_modules/bytes": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", - "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/body-parser/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "license": "MIT", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/body-parser/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "license": "MIT" - }, - "node_modules/bonjour-service": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/bonjour-service/-/bonjour-service-1.3.0.tgz", - "integrity": "sha512-3YuAUiSkWykd+2Azjgyxei8OWf8thdn8AITIog2M4UICzoqfjlqr64WIjEXZllf/W6vK1goqleSR6brGomxQqA==", - "license": "MIT", - "dependencies": { - "fast-deep-equal": "^3.1.3", - "multicast-dns": "^7.2.5" - } - }, - "node_modules/boolbase": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", - "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==", - "license": "ISC" - }, - "node_modules/boxen": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/boxen/-/boxen-6.2.1.tgz", - "integrity": "sha512-H4PEsJXfFI/Pt8sjDWbHlQPx4zL/bvSQjcilJmaulGt5mLDorHOHpmdXAJcBcmru7PhYSp/cDMWRko4ZUMFkSw==", - "license": "MIT", - "dependencies": { - "ansi-align": "^3.0.1", - "camelcase": "^6.2.0", - "chalk": "^4.1.2", - "cli-boxes": "^3.0.0", - "string-width": "^5.0.1", - "type-fest": "^2.5.0", - "widest-line": "^4.0.1", - "wrap-ansi": "^8.0.1" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/brace-expansion": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", - "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/braces": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", - "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", - "license": "MIT", - "dependencies": { - "fill-range": "^7.1.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/browserslist": { - "version": "4.28.1", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.1.tgz", - "integrity": "sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==", - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/browserslist" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "baseline-browser-mapping": "^2.9.0", - "caniuse-lite": "^1.0.30001759", - "electron-to-chromium": "^1.5.263", - "node-releases": "^2.0.27", - "update-browserslist-db": "^1.2.0" - }, - "bin": { - "browserslist": "cli.js" - }, - "engines": { - "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" - } - }, - "node_modules/buffer-from": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", - "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", - "license": "MIT" - }, - "node_modules/bundle-name": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/bundle-name/-/bundle-name-4.1.0.tgz", - "integrity": "sha512-tjwM5exMg6BGRI+kNmTntNsvdZS1X8BFYS6tnJ2hdH0kVxM6/eVZ2xy+FqStSWvYmtfFMDLIxurorHwDKfDz5Q==", - "license": "MIT", - "dependencies": { - "run-applescript": "^7.0.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/bytes": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", - "integrity": "sha512-pMhOfFDPiv9t5jjIXkHosWmkSyQbvsgEVNkz0ERHbuLh2T/7j4Mqqpz523Fe8MVY89KC6Sh/QfS2sM+SjgFDcw==", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/bytestreamjs": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/bytestreamjs/-/bytestreamjs-2.0.1.tgz", - "integrity": "sha512-U1Z/ob71V/bXfVABvNr/Kumf5VyeQRBEm6Txb0PQ6S7V5GpBM3w4Cbqz/xPDicR5tN0uvDifng8C+5qECeGwyQ==", - "license": "BSD-3-Clause", - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/cacheable-lookup": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-7.0.0.tgz", - "integrity": "sha512-+qJyx4xiKra8mZrcwhjMRMUhD5NR1R8esPkzIYxX96JiecFoxAXFuz/GpR3+ev4PE1WamHip78wV0vcmPQtp8w==", - "license": "MIT", - "engines": { - "node": ">=14.16" - } - }, - "node_modules/cacheable-request": { - "version": "10.2.14", - "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-10.2.14.tgz", - "integrity": "sha512-zkDT5WAF4hSSoUgyfg5tFIxz8XQK+25W/TLVojJTMKBaxevLBBtLxgqguAuVQB8PVW79FVjHcU+GJ9tVbDZ9mQ==", - "license": "MIT", - "dependencies": { - "@types/http-cache-semantics": "^4.0.2", - "get-stream": "^6.0.1", - "http-cache-semantics": "^4.1.1", - "keyv": "^4.5.3", - "mimic-response": "^4.0.0", - "normalize-url": "^8.0.0", - "responselike": "^3.0.0" - }, - "engines": { - "node": ">=14.16" - } - }, - "node_modules/call-bind": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz", - "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==", - "license": "MIT", - "dependencies": { - "call-bind-apply-helpers": "^1.0.0", - "es-define-property": "^1.0.0", - "get-intrinsic": "^1.2.4", - "set-function-length": "^1.2.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/call-bind-apply-helpers": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", - "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "function-bind": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/call-bound": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", - "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", - "license": "MIT", - "dependencies": { - "call-bind-apply-helpers": "^1.0.2", - "get-intrinsic": "^1.3.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/callsites": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", - "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/camel-case": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/camel-case/-/camel-case-4.1.2.tgz", - "integrity": "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==", - "license": "MIT", - "dependencies": { - "pascal-case": "^3.1.2", - "tslib": "^2.0.3" - } - }, - "node_modules/camelcase": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", - "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/caniuse-api": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/caniuse-api/-/caniuse-api-3.0.0.tgz", - "integrity": "sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw==", - "license": "MIT", - "dependencies": { - "browserslist": "^4.0.0", - "caniuse-lite": "^1.0.0", - "lodash.memoize": "^4.1.2", - "lodash.uniq": "^4.5.0" - } - }, - "node_modules/caniuse-lite": { - "version": "1.0.30001775", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001775.tgz", - "integrity": "sha512-s3Qv7Lht9zbVKE9XoTyRG6wVDCKdtOFIjBGg3+Yhn6JaytuNKPIjBMTMIY1AnOH3seL5mvF+x33oGAyK3hVt3A==", - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/caniuse-lite" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "CC-BY-4.0" - }, - "node_modules/ccount": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz", - "integrity": "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/char-regex": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/char-regex/-/char-regex-1.0.2.tgz", - "integrity": "sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==", - "license": "MIT", - "engines": { - "node": ">=10" - } - }, - "node_modules/character-entities": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-2.0.2.tgz", - "integrity": "sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/character-entities-html4": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/character-entities-html4/-/character-entities-html4-2.1.0.tgz", - "integrity": "sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/character-entities-legacy": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz", - "integrity": "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/character-reference-invalid": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-2.0.1.tgz", - "integrity": "sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/cheerio": { - "version": "1.0.0-rc.12", - "resolved": "https://registry.npmjs.org/cheerio/-/cheerio-1.0.0-rc.12.tgz", - "integrity": "sha512-VqR8m68vM46BNnuZ5NtnGBKIE/DfN0cRIzg9n40EIq9NOv90ayxLBXA8fXC5gquFRGJSTRqBq25Jt2ECLR431Q==", - "license": "MIT", - "dependencies": { - "cheerio-select": "^2.1.0", - "dom-serializer": "^2.0.0", - "domhandler": "^5.0.3", - "domutils": "^3.0.1", - "htmlparser2": "^8.0.1", - "parse5": "^7.0.0", - "parse5-htmlparser2-tree-adapter": "^7.0.0" - }, - "engines": { - "node": ">= 6" - }, - "funding": { - "url": "https://github.com/cheeriojs/cheerio?sponsor=1" - } - }, - "node_modules/cheerio-select": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/cheerio-select/-/cheerio-select-2.1.0.tgz", - "integrity": "sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g==", - "license": "BSD-2-Clause", - "dependencies": { - "boolbase": "^1.0.0", - "css-select": "^5.1.0", - "css-what": "^6.1.0", - "domelementtype": "^2.3.0", - "domhandler": "^5.0.3", - "domutils": "^3.0.1" - }, - "funding": { - "url": "https://github.com/sponsors/fb55" - } - }, - "node_modules/chevrotain": { - "version": "11.1.2", - "resolved": "https://registry.npmjs.org/chevrotain/-/chevrotain-11.1.2.tgz", - "integrity": "sha512-opLQzEVriiH1uUQ4Kctsd49bRoFDXGGSC4GUqj7pGyxM3RehRhvTlZJc1FL/Flew2p5uwxa1tUDWKzI4wNM8pg==", - "license": "Apache-2.0", - "dependencies": { - "@chevrotain/cst-dts-gen": "11.1.2", - "@chevrotain/gast": "11.1.2", - "@chevrotain/regexp-to-ast": "11.1.2", - "@chevrotain/types": "11.1.2", - "@chevrotain/utils": "11.1.2", - "lodash-es": "4.17.23" - } - }, - "node_modules/chevrotain-allstar": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/chevrotain-allstar/-/chevrotain-allstar-0.3.1.tgz", - "integrity": "sha512-b7g+y9A0v4mxCW1qUhf3BSVPg+/NvGErk/dOkrDaHA0nQIQGAtrOjlX//9OQtRlSCy+x9rfB5N8yC71lH1nvMw==", - "license": "MIT", - "dependencies": { - "lodash-es": "^4.17.21" - }, - "peerDependencies": { - "chevrotain": "^11.0.0" - } - }, - "node_modules/chokidar": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", - "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", - "license": "MIT", - "dependencies": { - "anymatch": "~3.1.2", - "braces": "~3.0.2", - "glob-parent": "~5.1.2", - "is-binary-path": "~2.1.0", - "is-glob": "~4.0.1", - "normalize-path": "~3.0.0", - "readdirp": "~3.6.0" - }, - "engines": { - "node": ">= 8.10.0" - }, - "funding": { - "url": "https://paulmillr.com/funding/" - }, - "optionalDependencies": { - "fsevents": "~2.3.2" - } - }, - "node_modules/chrome-trace-event": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/chrome-trace-event/-/chrome-trace-event-1.0.4.tgz", - "integrity": "sha512-rNjApaLzuwaOTjCiT8lSDdGN1APCiqkChLMJxJPWLunPAt5fy8xgU9/jNOchV84wfIxrA0lRQB7oCT8jrn/wrQ==", - "license": "MIT", - "engines": { - "node": ">=6.0" - } - }, - "node_modules/ci-info": { - "version": "3.9.0", - "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz", - "integrity": "sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/sibiraj-s" - } - ], - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/clean-css": { - "version": "5.3.3", - "resolved": "https://registry.npmjs.org/clean-css/-/clean-css-5.3.3.tgz", - "integrity": "sha512-D5J+kHaVb/wKSFcyyV75uCn8fiY4sV38XJoe4CUyGQ+mOU/fMVYUdH1hJC+CJQ5uY3EnW27SbJYS4X8BiLrAFg==", - "license": "MIT", - "dependencies": { - "source-map": "~0.6.0" - }, - "engines": { - "node": ">= 10.0" - } - }, - "node_modules/clean-css/node_modules/source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "license": "BSD-3-Clause", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/clean-stack": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", - "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/cli-boxes": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/cli-boxes/-/cli-boxes-3.0.0.tgz", - "integrity": "sha512-/lzGpEWL/8PfI0BmBOPRwp0c/wFNX1RdUML3jK/RcSBA9T8mZDdQpqYBKtCFTOfQbwPqWEOpjqW+Fnayc0969g==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/cli-table3": { - "version": "0.6.5", - "resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.5.tgz", - "integrity": "sha512-+W/5efTR7y5HRD7gACw9yQjqMVvEMLBHmboM/kPWam+H+Hmyrgjh6YncVKK122YZkXrLudzTuAukUw9FnMf7IQ==", - "license": "MIT", - "dependencies": { - "string-width": "^4.2.0" - }, - "engines": { - "node": "10.* || >= 12.*" - }, - "optionalDependencies": { - "@colors/colors": "1.5.0" - } - }, - "node_modules/cli-table3/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "license": "MIT" - }, - "node_modules/cli-table3/node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/clone-deep": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/clone-deep/-/clone-deep-4.0.1.tgz", - "integrity": "sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==", - "license": "MIT", - "dependencies": { - "is-plain-object": "^2.0.4", - "kind-of": "^6.0.2", - "shallow-clone": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/clsx": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz", - "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/collapse-white-space": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/collapse-white-space/-/collapse-white-space-2.1.0.tgz", - "integrity": "sha512-loKTxY1zCOuG4j9f6EPnuyyYkf58RnhhWTvRoZEokgB+WbdXehfjFviyOVYkqzEWz1Q5kRiZdBYS5SwxbQYwzw==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "license": "MIT", - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "license": "MIT" - }, - "node_modules/colord": { - "version": "2.9.3", - "resolved": "https://registry.npmjs.org/colord/-/colord-2.9.3.tgz", - "integrity": "sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw==", - "license": "MIT" - }, - "node_modules/colorette": { - "version": "2.0.20", - "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz", - "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==", - "license": "MIT" - }, - "node_modules/combine-promises": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/combine-promises/-/combine-promises-1.2.0.tgz", - "integrity": "sha512-VcQB1ziGD0NXrhKxiwyNbCDmRzs/OShMs2GqW2DlU2A/Sd0nQxE1oWDAE5O0ygSx5mgQOn9eIFh7yKPgFRVkPQ==", - "license": "MIT", - "engines": { - "node": ">=10" - } - }, - "node_modules/comma-separated-tokens": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz", - "integrity": "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/commander": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-5.1.0.tgz", - "integrity": "sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg==", - "license": "MIT", - "engines": { - "node": ">= 6" - } - }, - "node_modules/common-path-prefix": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/common-path-prefix/-/common-path-prefix-3.0.0.tgz", - "integrity": "sha512-QE33hToZseCH3jS0qN96O/bSh3kaw/h+Tq7ngyY9eWDUnTlTNUyqfqvCXioLe5Na5jFsL78ra/wuBU4iuEgd4w==", - "license": "ISC" - }, - "node_modules/compressible": { - "version": "2.0.18", - "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz", - "integrity": "sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==", - "license": "MIT", - "dependencies": { - "mime-db": ">= 1.43.0 < 2" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/compressible/node_modules/mime-db": { - "version": "1.54.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", - "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/compression": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/compression/-/compression-1.8.1.tgz", - "integrity": "sha512-9mAqGPHLakhCLeNyxPkK4xVo746zQ/czLH1Ky+vkitMnWfWZps8r0qXuwhwizagCRttsL4lfG4pIOvaWLpAP0w==", - "license": "MIT", - "dependencies": { - "bytes": "3.1.2", - "compressible": "~2.0.18", - "debug": "2.6.9", - "negotiator": "~0.6.4", - "on-headers": "~1.1.0", - "safe-buffer": "5.2.1", - "vary": "~1.1.2" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/compression/node_modules/bytes": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", - "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/compression/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "license": "MIT", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/compression/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "license": "MIT" - }, - "node_modules/concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", - "license": "MIT" - }, - "node_modules/confbox": { - "version": "0.1.8", - "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.1.8.tgz", - "integrity": "sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==", - "license": "MIT" - }, - "node_modules/config-chain": { - "version": "1.1.13", - "resolved": "https://registry.npmjs.org/config-chain/-/config-chain-1.1.13.tgz", - "integrity": "sha512-qj+f8APARXHrM0hraqXYb2/bOVSV4PvJQlNZ/DVj0QrmNM2q2euizkeuVckQ57J+W0mRH6Hvi+k50M4Jul2VRQ==", - "license": "MIT", - "dependencies": { - "ini": "^1.3.4", - "proto-list": "~1.2.1" - } - }, - "node_modules/configstore": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/configstore/-/configstore-6.0.0.tgz", - "integrity": "sha512-cD31W1v3GqUlQvbBCGcXmd2Nj9SvLDOP1oQ0YFuLETufzSPaKp11rYBsSOm7rCsW3OnIRAFM3OxRhceaXNYHkA==", - "license": "BSD-2-Clause", - "dependencies": { - "dot-prop": "^6.0.1", - "graceful-fs": "^4.2.6", - "unique-string": "^3.0.0", - "write-file-atomic": "^3.0.3", - "xdg-basedir": "^5.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/yeoman/configstore?sponsor=1" - } - }, - "node_modules/connect-history-api-fallback": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz", - "integrity": "sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA==", - "license": "MIT", - "engines": { - "node": ">=0.8" - } - }, - "node_modules/consola": { - "version": "3.4.2", - "resolved": "https://registry.npmjs.org/consola/-/consola-3.4.2.tgz", - "integrity": "sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==", - "license": "MIT", - "engines": { - "node": "^14.18.0 || >=16.10.0" - } - }, - "node_modules/content-disposition": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.2.tgz", - "integrity": "sha512-kRGRZw3bLlFISDBgwTSA1TMBFN6J6GWDeubmDE3AF+3+yXL8hTWv8r5rkLbqYXY4RjPk/EzHnClI3zQf1cFmHA==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/content-type": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", - "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/convert-source-map": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", - "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", - "license": "MIT" - }, - "node_modules/cookie": { - "version": "0.7.2", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", - "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/cookie-signature": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.7.tgz", - "integrity": "sha512-NXdYc3dLr47pBkpUCHtKSwIOQXLVn8dZEuywboCOJY/osA0wFSLlSawr3KN8qXJEyX66FcONTH8EIlVuK0yyFA==", - "license": "MIT" - }, - "node_modules/copy-webpack-plugin": { - "version": "11.0.0", - "resolved": "https://registry.npmjs.org/copy-webpack-plugin/-/copy-webpack-plugin-11.0.0.tgz", - "integrity": "sha512-fX2MWpamkW0hZxMEg0+mYnA40LTosOSa5TqZ9GYIBzyJa9C3QUaMPSE2xAi/buNr8u89SfD9wHSQVBzrRa/SOQ==", - "license": "MIT", - "dependencies": { - "fast-glob": "^3.2.11", - "glob-parent": "^6.0.1", - "globby": "^13.1.1", - "normalize-path": "^3.0.0", - "schema-utils": "^4.0.0", - "serialize-javascript": "^6.0.0" - }, - "engines": { - "node": ">= 14.15.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "webpack": "^5.1.0" - } - }, - "node_modules/copy-webpack-plugin/node_modules/glob-parent": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", - "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", - "license": "ISC", - "dependencies": { - "is-glob": "^4.0.3" - }, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/copy-webpack-plugin/node_modules/globby": { - "version": "13.2.2", - "resolved": "https://registry.npmjs.org/globby/-/globby-13.2.2.tgz", - "integrity": "sha512-Y1zNGV+pzQdh7H39l9zgB4PJqjRNqydvdYCDG4HFXM4XuvSaQQlEc91IU1yALL8gUTDomgBAfz3XJdmUS+oo0w==", - "license": "MIT", - "dependencies": { - "dir-glob": "^3.0.1", - "fast-glob": "^3.3.0", - "ignore": "^5.2.4", - "merge2": "^1.4.1", - "slash": "^4.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/copy-webpack-plugin/node_modules/slash": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-4.0.0.tgz", - "integrity": "sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/core-js": { - "version": "3.48.0", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.48.0.tgz", - "integrity": "sha512-zpEHTy1fjTMZCKLHUZoVeylt9XrzaIN2rbPXEt0k+q7JE5CkCZdo6bNq55bn24a69CH7ErAVLKijxJja4fw+UQ==", - "hasInstallScript": true, - "license": "MIT", - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/core-js" - } - }, - "node_modules/core-js-compat": { - "version": "3.48.0", - "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.48.0.tgz", - "integrity": "sha512-OM4cAF3D6VtH/WkLtWvyNC56EZVXsZdU3iqaMG2B4WvYrlqU831pc4UtG5yp0sE9z8Y02wVN7PjW5Zf9Gt0f1Q==", - "license": "MIT", - "dependencies": { - "browserslist": "^4.28.1" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/core-js" - } - }, - "node_modules/core-js-pure": { - "version": "3.48.0", - "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.48.0.tgz", - "integrity": "sha512-1slJgk89tWC51HQ1AEqG+s2VuwpTRr8ocu4n20QUcH1v9lAN0RXen0Q0AABa/DK1I7RrNWLucplOHMx8hfTGTw==", - "hasInstallScript": true, - "license": "MIT", - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/core-js" - } - }, - "node_modules/core-util-is": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", - "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", - "license": "MIT" - }, - "node_modules/cose-base": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/cose-base/-/cose-base-1.0.3.tgz", - "integrity": "sha512-s9whTXInMSgAp/NVXVNuVxVKzGH2qck3aQlVHxDCdAEPgtMKwc4Wq6/QKhgdEdgbLSi9rBTAcPoRa6JpiG4ksg==", - "license": "MIT", - "dependencies": { - "layout-base": "^1.0.0" - } - }, - "node_modules/cosmiconfig": { - "version": "8.3.6", - "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.3.6.tgz", - "integrity": "sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA==", - "license": "MIT", - "dependencies": { - "import-fresh": "^3.3.0", - "js-yaml": "^4.1.0", - "parse-json": "^5.2.0", - "path-type": "^4.0.0" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/d-fischer" - }, - "peerDependencies": { - "typescript": ">=4.9.5" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/cross-spawn": { - "version": "7.0.6", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", - "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", - "license": "MIT", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/crypto-random-string": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-4.0.0.tgz", - "integrity": "sha512-x8dy3RnvYdlUcPOjkEHqozhiwzKNSq7GcPuXFbnyMOCHxX8V3OgIg/pYuabl2sbUPfIJaeAQB7PMOK8DFIdoRA==", - "license": "MIT", - "dependencies": { - "type-fest": "^1.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/crypto-random-string/node_modules/type-fest": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-1.4.0.tgz", - "integrity": "sha512-yGSza74xk0UG8k+pLh5oeoYirvIiWo5t0/o3zHHAO2tRDiZcxWP7fywNlXhqb6/r6sWvwi+RsyQMWhVLe4BVuA==", - "license": "(MIT OR CC0-1.0)", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/css-blank-pseudo": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/css-blank-pseudo/-/css-blank-pseudo-7.0.1.tgz", - "integrity": "sha512-jf+twWGDf6LDoXDUode+nc7ZlrqfaNphrBIBrcmeP3D8yw1uPaix1gCC8LUQUGQ6CycuK2opkbFFWFuq/a94ag==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "postcss-selector-parser": "^7.0.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/css-blank-pseudo/node_modules/postcss-selector-parser": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz", - "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==", - "license": "MIT", - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/css-declaration-sorter": { - "version": "7.3.1", - "resolved": "https://registry.npmjs.org/css-declaration-sorter/-/css-declaration-sorter-7.3.1.tgz", - "integrity": "sha512-gz6x+KkgNCjxq3Var03pRYLhyNfwhkKF1g/yoLgDNtFvVu0/fOLV9C8fFEZRjACp/XQLumjAYo7JVjzH3wLbxA==", - "license": "ISC", - "engines": { - "node": "^14 || ^16 || >=18" - }, - "peerDependencies": { - "postcss": "^8.0.9" - } - }, - "node_modules/css-has-pseudo": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/css-has-pseudo/-/css-has-pseudo-7.0.3.tgz", - "integrity": "sha512-oG+vKuGyqe/xvEMoxAQrhi7uY16deJR3i7wwhBerVrGQKSqUC5GiOVxTpM9F9B9hw0J+eKeOWLH7E9gZ1Dr5rA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/selector-specificity": "^5.0.0", - "postcss-selector-parser": "^7.0.0", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/css-has-pseudo/node_modules/@csstools/selector-specificity": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/@csstools/selector-specificity/-/selector-specificity-5.0.0.tgz", - "integrity": "sha512-PCqQV3c4CoVm3kdPhyeZ07VmBRdH2EpMFA/pd9OASpOEC3aXNGoqPDAZ80D0cLpMBxnmk0+yNhGsEx31hq7Gtw==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss-selector-parser": "^7.0.0" - } - }, - "node_modules/css-has-pseudo/node_modules/postcss-selector-parser": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz", - "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==", - "license": "MIT", - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/css-loader": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/css-loader/-/css-loader-6.11.0.tgz", - "integrity": "sha512-CTJ+AEQJjq5NzLga5pE39qdiSV56F8ywCIsqNIRF0r7BDgWsN25aazToqAFg7ZrtA/U016xudB3ffgweORxX7g==", - "license": "MIT", - "dependencies": { - "icss-utils": "^5.1.0", - "postcss": "^8.4.33", - "postcss-modules-extract-imports": "^3.1.0", - "postcss-modules-local-by-default": "^4.0.5", - "postcss-modules-scope": "^3.2.0", - "postcss-modules-values": "^4.0.0", - "postcss-value-parser": "^4.2.0", - "semver": "^7.5.4" - }, - "engines": { - "node": ">= 12.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "@rspack/core": "0.x || 1.x", - "webpack": "^5.0.0" - }, - "peerDependenciesMeta": { - "@rspack/core": { - "optional": true - }, - "webpack": { - "optional": true - } - } - }, - "node_modules/css-minimizer-webpack-plugin": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-5.0.1.tgz", - "integrity": "sha512-3caImjKFQkS+ws1TGcFn0V1HyDJFq1Euy589JlD6/3rV2kj+w7r5G9WDMgSHvpvXHNZ2calVypZWuEDQd9wfLg==", - "license": "MIT", - "dependencies": { - "@jridgewell/trace-mapping": "^0.3.18", - "cssnano": "^6.0.1", - "jest-worker": "^29.4.3", - "postcss": "^8.4.24", - "schema-utils": "^4.0.1", - "serialize-javascript": "^6.0.1" - }, - "engines": { - "node": ">= 14.15.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "webpack": "^5.0.0" - }, - "peerDependenciesMeta": { - "@parcel/css": { - "optional": true - }, - "@swc/css": { - "optional": true - }, - "clean-css": { - "optional": true - }, - "csso": { - "optional": true - }, - "esbuild": { - "optional": true - }, - "lightningcss": { - "optional": true - } - } - }, - "node_modules/css-prefers-color-scheme": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/css-prefers-color-scheme/-/css-prefers-color-scheme-10.0.0.tgz", - "integrity": "sha512-VCtXZAWivRglTZditUfB4StnsWr6YVZ2PRtuxQLKTNRdtAf8tpzaVPE9zXIF3VaSc7O70iK/j1+NXxyQCqdPjQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/css-select": { - "version": "5.2.2", - "resolved": "https://registry.npmjs.org/css-select/-/css-select-5.2.2.tgz", - "integrity": "sha512-TizTzUddG/xYLA3NXodFM0fSbNizXjOKhqiQQwvhlspadZokn1KDy0NZFS0wuEubIYAV5/c1/lAr0TaaFXEXzw==", - "license": "BSD-2-Clause", - "dependencies": { - "boolbase": "^1.0.0", - "css-what": "^6.1.0", - "domhandler": "^5.0.2", - "domutils": "^3.0.1", - "nth-check": "^2.0.1" - }, - "funding": { - "url": "https://github.com/sponsors/fb55" - } - }, - "node_modules/css-tree": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-2.3.1.tgz", - "integrity": "sha512-6Fv1DV/TYw//QF5IzQdqsNDjx/wc8TrMBZsqjL9eW01tWb7R7k/mq+/VXfJCl7SoD5emsJop9cOByJZfs8hYIw==", - "license": "MIT", - "dependencies": { - "mdn-data": "2.0.30", - "source-map-js": "^1.0.1" - }, - "engines": { - "node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0" - } - }, - "node_modules/css-what": { - "version": "6.2.2", - "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.2.2.tgz", - "integrity": "sha512-u/O3vwbptzhMs3L1fQE82ZSLHQQfto5gyZzwteVIEyeaY5Fc7R4dapF/BvRoSYFeqfBk4m0V1Vafq5Pjv25wvA==", - "license": "BSD-2-Clause", - "engines": { - "node": ">= 6" - }, - "funding": { - "url": "https://github.com/sponsors/fb55" - } - }, - "node_modules/cssdb": { - "version": "8.8.0", - "resolved": "https://registry.npmjs.org/cssdb/-/cssdb-8.8.0.tgz", - "integrity": "sha512-QbLeyz2Bgso1iRlh7IpWk6OKa3lLNGXsujVjDMPl9rOZpxKeiG69icLpbLCFxeURwmcdIfZqQyhlooKJYM4f8Q==", - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - }, - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - } - ], - "license": "MIT-0" - }, - "node_modules/cssesc": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", - "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", - "license": "MIT", - "bin": { - "cssesc": "bin/cssesc" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/cssnano": { - "version": "6.1.2", - "resolved": "https://registry.npmjs.org/cssnano/-/cssnano-6.1.2.tgz", - "integrity": "sha512-rYk5UeX7VAM/u0lNqewCdasdtPK81CgX8wJFLEIXHbV2oldWRgJAsZrdhRXkV1NJzA2g850KiFm9mMU2HxNxMA==", - "license": "MIT", - "dependencies": { - "cssnano-preset-default": "^6.1.2", - "lilconfig": "^3.1.1" - }, - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/cssnano" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/cssnano-preset-advanced": { - "version": "6.1.2", - "resolved": "https://registry.npmjs.org/cssnano-preset-advanced/-/cssnano-preset-advanced-6.1.2.tgz", - "integrity": "sha512-Nhao7eD8ph2DoHolEzQs5CfRpiEP0xa1HBdnFZ82kvqdmbwVBUr2r1QuQ4t1pi+D1ZpqpcO4T+wy/7RxzJ/WPQ==", - "license": "MIT", - "dependencies": { - "autoprefixer": "^10.4.19", - "browserslist": "^4.23.0", - "cssnano-preset-default": "^6.1.2", - "postcss-discard-unused": "^6.0.5", - "postcss-merge-idents": "^6.0.3", - "postcss-reduce-idents": "^6.0.3", - "postcss-zindex": "^6.0.2" - }, - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/cssnano-preset-default": { - "version": "6.1.2", - "resolved": "https://registry.npmjs.org/cssnano-preset-default/-/cssnano-preset-default-6.1.2.tgz", - "integrity": "sha512-1C0C+eNaeN8OcHQa193aRgYexyJtU8XwbdieEjClw+J9d94E41LwT6ivKH0WT+fYwYWB0Zp3I3IZ7tI/BbUbrg==", - "license": "MIT", - "dependencies": { - "browserslist": "^4.23.0", - "css-declaration-sorter": "^7.2.0", - "cssnano-utils": "^4.0.2", - "postcss-calc": "^9.0.1", - "postcss-colormin": "^6.1.0", - "postcss-convert-values": "^6.1.0", - "postcss-discard-comments": "^6.0.2", - "postcss-discard-duplicates": "^6.0.3", - "postcss-discard-empty": "^6.0.3", - "postcss-discard-overridden": "^6.0.2", - "postcss-merge-longhand": "^6.0.5", - "postcss-merge-rules": "^6.1.1", - "postcss-minify-font-values": "^6.1.0", - "postcss-minify-gradients": "^6.0.3", - "postcss-minify-params": "^6.1.0", - "postcss-minify-selectors": "^6.0.4", - "postcss-normalize-charset": "^6.0.2", - "postcss-normalize-display-values": "^6.0.2", - "postcss-normalize-positions": "^6.0.2", - "postcss-normalize-repeat-style": "^6.0.2", - "postcss-normalize-string": "^6.0.2", - "postcss-normalize-timing-functions": "^6.0.2", - "postcss-normalize-unicode": "^6.1.0", - "postcss-normalize-url": "^6.0.2", - "postcss-normalize-whitespace": "^6.0.2", - "postcss-ordered-values": "^6.0.2", - "postcss-reduce-initial": "^6.1.0", - "postcss-reduce-transforms": "^6.0.2", - "postcss-svgo": "^6.0.3", - "postcss-unique-selectors": "^6.0.4" - }, - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/cssnano-utils": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/cssnano-utils/-/cssnano-utils-4.0.2.tgz", - "integrity": "sha512-ZR1jHg+wZ8o4c3zqf1SIUSTIvm/9mU343FMR6Obe/unskbvpGhZOo1J6d/r8D1pzkRQYuwbcH3hToOuoA2G7oQ==", - "license": "MIT", - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/csso": { - "version": "5.0.5", - "resolved": "https://registry.npmjs.org/csso/-/csso-5.0.5.tgz", - "integrity": "sha512-0LrrStPOdJj+SPCCrGhzryycLjwcgUSHBtxNA8aIDxf0GLsRh1cKYhB00Gd1lDOS4yGH69+SNn13+TWbVHETFQ==", - "license": "MIT", - "dependencies": { - "css-tree": "~2.2.0" - }, - "engines": { - "node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0", - "npm": ">=7.0.0" - } - }, - "node_modules/csso/node_modules/css-tree": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-2.2.1.tgz", - "integrity": "sha512-OA0mILzGc1kCOCSJerOeqDxDQ4HOh+G8NbOJFOTgOCzpw7fCBubk0fEyxp8AgOL/jvLgYA/uV0cMbe43ElF1JA==", - "license": "MIT", - "dependencies": { - "mdn-data": "2.0.28", - "source-map-js": "^1.0.1" - }, - "engines": { - "node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0", - "npm": ">=7.0.0" - } - }, - "node_modules/csso/node_modules/mdn-data": { - "version": "2.0.28", - "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.28.tgz", - "integrity": "sha512-aylIc7Z9y4yzHYAJNuESG3hfhC+0Ibp/MAMiaOZgNv4pmEdFyfZhhhny4MNiAfWdBQ1RQ2mfDWmM1x8SvGyp8g==", - "license": "CC0-1.0" - }, - "node_modules/csstype": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz", - "integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==", - "license": "MIT" - }, - "node_modules/cytoscape": { - "version": "3.33.1", - "resolved": "https://registry.npmjs.org/cytoscape/-/cytoscape-3.33.1.tgz", - "integrity": "sha512-iJc4TwyANnOGR1OmWhsS9ayRS3s+XQ185FmuHObThD+5AeJCakAAbWv8KimMTt08xCCLNgneQwFp+JRJOr9qGQ==", - "license": "MIT", - "engines": { - "node": ">=0.10" - } - }, - "node_modules/cytoscape-cose-bilkent": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/cytoscape-cose-bilkent/-/cytoscape-cose-bilkent-4.1.0.tgz", - "integrity": "sha512-wgQlVIUJF13Quxiv5e1gstZ08rnZj2XaLHGoFMYXz7SkNfCDOOteKBE6SYRfA9WxxI/iBc3ajfDoc6hb/MRAHQ==", - "license": "MIT", - "dependencies": { - "cose-base": "^1.0.0" - }, - "peerDependencies": { - "cytoscape": "^3.2.0" - } - }, - "node_modules/cytoscape-fcose": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/cytoscape-fcose/-/cytoscape-fcose-2.2.0.tgz", - "integrity": "sha512-ki1/VuRIHFCzxWNrsshHYPs6L7TvLu3DL+TyIGEsRcvVERmxokbf5Gdk7mFxZnTdiGtnA4cfSmjZJMviqSuZrQ==", - "license": "MIT", - "dependencies": { - "cose-base": "^2.2.0" - }, - "peerDependencies": { - "cytoscape": "^3.2.0" - } - }, - "node_modules/cytoscape-fcose/node_modules/cose-base": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/cose-base/-/cose-base-2.2.0.tgz", - "integrity": "sha512-AzlgcsCbUMymkADOJtQm3wO9S3ltPfYOFD5033keQn9NJzIbtnZj+UdBJe7DYml/8TdbtHJW3j58SOnKhWY/5g==", - "license": "MIT", - "dependencies": { - "layout-base": "^2.0.0" - } - }, - "node_modules/cytoscape-fcose/node_modules/layout-base": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/layout-base/-/layout-base-2.0.1.tgz", - "integrity": "sha512-dp3s92+uNI1hWIpPGH3jK2kxE2lMjdXdr+DH8ynZHpd6PUlH6x6cbuXnoMmiNumznqaNO31xu9e79F0uuZ0JFg==", - "license": "MIT" - }, - "node_modules/d3": { - "version": "7.9.0", - "resolved": "https://registry.npmjs.org/d3/-/d3-7.9.0.tgz", - "integrity": "sha512-e1U46jVP+w7Iut8Jt8ri1YsPOvFpg46k+K8TpCb0P+zjCkjkPnV7WzfDJzMHy1LnA+wj5pLT1wjO901gLXeEhA==", - "license": "ISC", - "dependencies": { - "d3-array": "3", - "d3-axis": "3", - "d3-brush": "3", - "d3-chord": "3", - "d3-color": "3", - "d3-contour": "4", - "d3-delaunay": "6", - "d3-dispatch": "3", - "d3-drag": "3", - "d3-dsv": "3", - "d3-ease": "3", - "d3-fetch": "3", - "d3-force": "3", - "d3-format": "3", - "d3-geo": "3", - "d3-hierarchy": "3", - "d3-interpolate": "3", - "d3-path": "3", - "d3-polygon": "3", - "d3-quadtree": "3", - "d3-random": "3", - "d3-scale": "4", - "d3-scale-chromatic": "3", - "d3-selection": "3", - "d3-shape": "3", - "d3-time": "3", - "d3-time-format": "4", - "d3-timer": "3", - "d3-transition": "3", - "d3-zoom": "3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-array": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.4.tgz", - "integrity": "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==", - "license": "ISC", - "dependencies": { - "internmap": "1 - 2" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-axis": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-axis/-/d3-axis-3.0.0.tgz", - "integrity": "sha512-IH5tgjV4jE/GhHkRV0HiVYPDtvfjHQlQfJHs0usq7M30XcSBvOotpmH1IgkcXsO/5gEQZD43B//fc7SRT5S+xw==", - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-brush": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-brush/-/d3-brush-3.0.0.tgz", - "integrity": "sha512-ALnjWlVYkXsVIGlOsuWH1+3udkYFI48Ljihfnh8FZPF2QS9o+PzGLBslO0PjzVoHLZ2KCVgAM8NVkXPJB2aNnQ==", - "license": "ISC", - "dependencies": { - "d3-dispatch": "1 - 3", - "d3-drag": "2 - 3", - "d3-interpolate": "1 - 3", - "d3-selection": "3", - "d3-transition": "3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-chord": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-chord/-/d3-chord-3.0.1.tgz", - "integrity": "sha512-VE5S6TNa+j8msksl7HwjxMHDM2yNK3XCkusIlpX5kwauBfXuyLAtNg9jCp/iHH61tgI4sb6R/EIMWCqEIdjT/g==", - "license": "ISC", - "dependencies": { - "d3-path": "1 - 3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-color": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/d3-color/-/d3-color-3.1.0.tgz", - "integrity": "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==", - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-contour": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/d3-contour/-/d3-contour-4.0.2.tgz", - "integrity": "sha512-4EzFTRIikzs47RGmdxbeUvLWtGedDUNkTcmzoeyg4sP/dvCexO47AaQL7VKy/gul85TOxw+IBgA8US2xwbToNA==", - "license": "ISC", - "dependencies": { - "d3-array": "^3.2.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-delaunay": { - "version": "6.0.4", - "resolved": "https://registry.npmjs.org/d3-delaunay/-/d3-delaunay-6.0.4.tgz", - "integrity": "sha512-mdjtIZ1XLAM8bm/hx3WwjfHt6Sggek7qH043O8KEjDXN40xi3vx/6pYSVTwLjEgiXQTbvaouWKynLBiUZ6SK6A==", - "license": "ISC", - "dependencies": { - "delaunator": "5" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-dispatch": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-dispatch/-/d3-dispatch-3.0.1.tgz", - "integrity": "sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==", - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-drag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-drag/-/d3-drag-3.0.0.tgz", - "integrity": "sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==", - "license": "ISC", - "dependencies": { - "d3-dispatch": "1 - 3", - "d3-selection": "3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-dsv": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-dsv/-/d3-dsv-3.0.1.tgz", - "integrity": "sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q==", - "license": "ISC", - "dependencies": { - "commander": "7", - "iconv-lite": "0.6", - "rw": "1" - }, - "bin": { - "csv2json": "bin/dsv2json.js", - "csv2tsv": "bin/dsv2dsv.js", - "dsv2dsv": "bin/dsv2dsv.js", - "dsv2json": "bin/dsv2json.js", - "json2csv": "bin/json2dsv.js", - "json2dsv": "bin/json2dsv.js", - "json2tsv": "bin/json2dsv.js", - "tsv2csv": "bin/dsv2dsv.js", - "tsv2json": "bin/dsv2json.js" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-dsv/node_modules/commander": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", - "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", - "license": "MIT", - "engines": { - "node": ">= 10" - } - }, - "node_modules/d3-dsv/node_modules/iconv-lite": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", - "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", - "license": "MIT", - "dependencies": { - "safer-buffer": ">= 2.1.2 < 3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/d3-ease": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-ease/-/d3-ease-3.0.1.tgz", - "integrity": "sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==", - "license": "BSD-3-Clause", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-fetch": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-fetch/-/d3-fetch-3.0.1.tgz", - "integrity": "sha512-kpkQIM20n3oLVBKGg6oHrUchHM3xODkTzjMoj7aWQFq5QEM+R6E4WkzT5+tojDY7yjez8KgCBRoj4aEr99Fdqw==", - "license": "ISC", - "dependencies": { - "d3-dsv": "1 - 3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-force": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-force/-/d3-force-3.0.0.tgz", - "integrity": "sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg==", - "license": "ISC", - "dependencies": { - "d3-dispatch": "1 - 3", - "d3-quadtree": "1 - 3", - "d3-timer": "1 - 3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-format": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/d3-format/-/d3-format-3.1.2.tgz", - "integrity": "sha512-AJDdYOdnyRDV5b6ArilzCPPwc1ejkHcoyFarqlPqT7zRYjhavcT3uSrqcMvsgh2CgoPbK3RCwyHaVyxYcP2Arg==", - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-geo": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/d3-geo/-/d3-geo-3.1.1.tgz", - "integrity": "sha512-637ln3gXKXOwhalDzinUgY83KzNWZRKbYubaG+fGVuc/dxO64RRljtCTnf5ecMyE1RIdtqpkVcq0IbtU2S8j2Q==", - "license": "ISC", - "dependencies": { - "d3-array": "2.5.0 - 3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-hierarchy": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/d3-hierarchy/-/d3-hierarchy-3.1.2.tgz", - "integrity": "sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA==", - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-interpolate": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-3.0.1.tgz", - "integrity": "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==", - "license": "ISC", - "dependencies": { - "d3-color": "1 - 3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-path": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-3.1.0.tgz", - "integrity": "sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==", - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-polygon": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-polygon/-/d3-polygon-3.0.1.tgz", - "integrity": "sha512-3vbA7vXYwfe1SYhED++fPUQlWSYTTGmFmQiany/gdbiWgU/iEyQzyymwL9SkJjFFuCS4902BSzewVGsHHmHtXg==", - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-quadtree": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-quadtree/-/d3-quadtree-3.0.1.tgz", - "integrity": "sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw==", - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-random": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-random/-/d3-random-3.0.1.tgz", - "integrity": "sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ==", - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-sankey": { - "version": "0.12.3", - "resolved": "https://registry.npmjs.org/d3-sankey/-/d3-sankey-0.12.3.tgz", - "integrity": "sha512-nQhsBRmM19Ax5xEIPLMY9ZmJ/cDvd1BG3UVvt5h3WRxKg5zGRbvnteTyWAbzeSvlh3tW7ZEmq4VwR5mB3tutmQ==", - "license": "BSD-3-Clause", - "dependencies": { - "d3-array": "1 - 2", - "d3-shape": "^1.2.0" - } - }, - "node_modules/d3-sankey/node_modules/d3-array": { - "version": "2.12.1", - "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-2.12.1.tgz", - "integrity": "sha512-B0ErZK/66mHtEsR1TkPEEkwdy+WDesimkM5gpZr5Dsg54BiTA5RXtYW5qTLIAcekaS9xfZrzBLF/OAkB3Qn1YQ==", - "license": "BSD-3-Clause", - "dependencies": { - "internmap": "^1.0.0" - } - }, - "node_modules/d3-sankey/node_modules/d3-path": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-1.0.9.tgz", - "integrity": "sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg==", - "license": "BSD-3-Clause" - }, - "node_modules/d3-sankey/node_modules/d3-shape": { - "version": "1.3.7", - "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-1.3.7.tgz", - "integrity": "sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw==", - "license": "BSD-3-Clause", - "dependencies": { - "d3-path": "1" - } - }, - "node_modules/d3-sankey/node_modules/internmap": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/internmap/-/internmap-1.0.1.tgz", - "integrity": "sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw==", - "license": "ISC" - }, - "node_modules/d3-scale": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/d3-scale/-/d3-scale-4.0.2.tgz", - "integrity": "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==", - "license": "ISC", - "dependencies": { - "d3-array": "2.10.0 - 3", - "d3-format": "1 - 3", - "d3-interpolate": "1.2.0 - 3", - "d3-time": "2.1.1 - 3", - "d3-time-format": "2 - 4" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-scale-chromatic": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/d3-scale-chromatic/-/d3-scale-chromatic-3.1.0.tgz", - "integrity": "sha512-A3s5PWiZ9YCXFye1o246KoscMWqf8BsD9eRiJ3He7C9OBaxKhAd5TFCdEx/7VbKtxxTsu//1mMJFrEt572cEyQ==", - "license": "ISC", - "dependencies": { - "d3-color": "1 - 3", - "d3-interpolate": "1 - 3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-selection": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz", - "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==", - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-shape": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-3.2.0.tgz", - "integrity": "sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==", - "license": "ISC", - "dependencies": { - "d3-path": "^3.1.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-time": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-3.1.0.tgz", - "integrity": "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==", - "license": "ISC", - "dependencies": { - "d3-array": "2 - 3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-time-format": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/d3-time-format/-/d3-time-format-4.1.0.tgz", - "integrity": "sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==", - "license": "ISC", - "dependencies": { - "d3-time": "1 - 3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-timer": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-timer/-/d3-timer-3.0.1.tgz", - "integrity": "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==", - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-transition": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-transition/-/d3-transition-3.0.1.tgz", - "integrity": "sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==", - "license": "ISC", - "dependencies": { - "d3-color": "1 - 3", - "d3-dispatch": "1 - 3", - "d3-ease": "1 - 3", - "d3-interpolate": "1 - 3", - "d3-timer": "1 - 3" - }, - "engines": { - "node": ">=12" - }, - "peerDependencies": { - "d3-selection": "2 - 3" - } - }, - "node_modules/d3-zoom": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-zoom/-/d3-zoom-3.0.0.tgz", - "integrity": "sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==", - "license": "ISC", - "dependencies": { - "d3-dispatch": "1 - 3", - "d3-drag": "2 - 3", - "d3-interpolate": "1 - 3", - "d3-selection": "2 - 3", - "d3-transition": "2 - 3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/dagre-d3-es": { - "version": "7.0.13", - "resolved": "https://registry.npmjs.org/dagre-d3-es/-/dagre-d3-es-7.0.13.tgz", - "integrity": "sha512-efEhnxpSuwpYOKRm/L5KbqoZmNNukHa/Flty4Wp62JRvgH2ojwVgPgdYyr4twpieZnyRDdIH7PY2mopX26+j2Q==", - "license": "MIT", - "dependencies": { - "d3": "^7.9.0", - "lodash-es": "^4.17.21" - } - }, - "node_modules/dayjs": { - "version": "1.11.19", - "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.19.tgz", - "integrity": "sha512-t5EcLVS6QPBNqM2z8fakk/NKel+Xzshgt8FFKAn+qwlD1pzZWxh0nVCrvFK7ZDb6XucZeF9z8C7CBWTRIVApAw==", - "license": "MIT" - }, - "node_modules/debounce": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/debounce/-/debounce-1.2.1.tgz", - "integrity": "sha512-XRRe6Glud4rd/ZGQfiV1ruXSfbvfJedlV9Y6zOlP+2K04vBYiJEte6stfFkCP03aMnY5tsipamumUjL14fofug==", - "license": "MIT" - }, - "node_modules/debug": { - "version": "4.4.3", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", - "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", - "license": "MIT", - "dependencies": { - "ms": "^2.1.3" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/decode-named-character-reference": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.3.0.tgz", - "integrity": "sha512-GtpQYB283KrPp6nRw50q3U9/VfOutZOe103qlN7BPP6Ad27xYnOIWv4lPzo8HCAL+mMZofJ9KEy30fq6MfaK6Q==", - "license": "MIT", - "dependencies": { - "character-entities": "^2.0.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/decompress-response": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", - "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", - "license": "MIT", - "dependencies": { - "mimic-response": "^3.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/decompress-response/node_modules/mimic-response": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", - "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/deep-extend": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", - "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", - "license": "MIT", - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/deepmerge": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", - "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/default-browser": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/default-browser/-/default-browser-5.5.0.tgz", - "integrity": "sha512-H9LMLr5zwIbSxrmvikGuI/5KGhZ8E2zH3stkMgM5LpOWDutGM2JZaj460Udnf1a+946zc7YBgrqEWwbk7zHvGw==", - "license": "MIT", - "dependencies": { - "bundle-name": "^4.1.0", - "default-browser-id": "^5.0.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/default-browser-id": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/default-browser-id/-/default-browser-id-5.0.1.tgz", - "integrity": "sha512-x1VCxdX4t+8wVfd1so/9w+vQ4vx7lKd2Qp5tDRutErwmR85OgmfX7RlLRMWafRMY7hbEiXIbudNrjOAPa/hL8Q==", - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/defer-to-connect": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-2.0.1.tgz", - "integrity": "sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==", - "license": "MIT", - "engines": { - "node": ">=10" - } - }, - "node_modules/define-data-property": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", - "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", - "license": "MIT", - "dependencies": { - "es-define-property": "^1.0.0", - "es-errors": "^1.3.0", - "gopd": "^1.0.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/define-lazy-prop": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz", - "integrity": "sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/define-properties": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", - "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==", - "license": "MIT", - "dependencies": { - "define-data-property": "^1.0.1", - "has-property-descriptors": "^1.0.0", - "object-keys": "^1.1.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/delaunator": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/delaunator/-/delaunator-5.0.1.tgz", - "integrity": "sha512-8nvh+XBe96aCESrGOqMp/84b13H9cdKbG5P2ejQCh4d4sK9RL4371qou9drQjMhvnPmhWl5hnmqbEE0fXr9Xnw==", - "license": "ISC", - "dependencies": { - "robust-predicates": "^3.0.2" - } - }, - "node_modules/depd": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", - "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/dequal": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", - "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/destroy": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", - "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==", - "license": "MIT", - "engines": { - "node": ">= 0.8", - "npm": "1.2.8000 || >= 1.4.16" - } - }, - "node_modules/detect-node": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/detect-node/-/detect-node-2.1.0.tgz", - "integrity": "sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g==", - "license": "MIT" - }, - "node_modules/detect-port": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/detect-port/-/detect-port-1.6.1.tgz", - "integrity": "sha512-CmnVc+Hek2egPx1PeTFVta2W78xy2K/9Rkf6cC4T59S50tVnzKj+tnx5mmx5lwvCkujZ4uRrpRSuV+IVs3f90Q==", - "license": "MIT", - "dependencies": { - "address": "^1.0.1", - "debug": "4" - }, - "bin": { - "detect": "bin/detect-port.js", - "detect-port": "bin/detect-port.js" - }, - "engines": { - "node": ">= 4.0.0" - } - }, - "node_modules/devlop": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/devlop/-/devlop-1.1.0.tgz", - "integrity": "sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==", - "license": "MIT", - "dependencies": { - "dequal": "^2.0.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/dir-glob": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", - "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", - "license": "MIT", - "dependencies": { - "path-type": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/dns-packet": { - "version": "5.6.1", - "resolved": "https://registry.npmjs.org/dns-packet/-/dns-packet-5.6.1.tgz", - "integrity": "sha512-l4gcSouhcgIKRvyy99RNVOgxXiicE+2jZoNmaNmZ6JXiGajBOJAesk1OBlJuM5k2c+eudGdLxDqXuPCKIj6kpw==", - "license": "MIT", - "dependencies": { - "@leichtgewicht/ip-codec": "^2.0.1" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/dom-converter": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/dom-converter/-/dom-converter-0.2.0.tgz", - "integrity": "sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA==", - "license": "MIT", - "dependencies": { - "utila": "~0.4" - } - }, - "node_modules/dom-serializer": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", - "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", - "license": "MIT", - "dependencies": { - "domelementtype": "^2.3.0", - "domhandler": "^5.0.2", - "entities": "^4.2.0" - }, - "funding": { - "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" - } - }, - "node_modules/domelementtype": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", - "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/fb55" - } - ], - "license": "BSD-2-Clause" - }, - "node_modules/domhandler": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", - "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", - "license": "BSD-2-Clause", - "dependencies": { - "domelementtype": "^2.3.0" - }, - "engines": { - "node": ">= 4" - }, - "funding": { - "url": "https://github.com/fb55/domhandler?sponsor=1" - } - }, - "node_modules/dompurify": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.3.1.tgz", - "integrity": "sha512-qkdCKzLNtrgPFP1Vo+98FRzJnBRGe4ffyCea9IwHB1fyxPOeNTHpLKYGd4Uk9xvNoH0ZoOjwZxNptyMwqrId1Q==", - "license": "(MPL-2.0 OR Apache-2.0)", - "optionalDependencies": { - "@types/trusted-types": "^2.0.7" - } - }, - "node_modules/domutils": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.2.2.tgz", - "integrity": "sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw==", - "license": "BSD-2-Clause", - "dependencies": { - "dom-serializer": "^2.0.0", - "domelementtype": "^2.3.0", - "domhandler": "^5.0.3" - }, - "funding": { - "url": "https://github.com/fb55/domutils?sponsor=1" - } - }, - "node_modules/dot-case": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/dot-case/-/dot-case-3.0.4.tgz", - "integrity": "sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==", - "license": "MIT", - "dependencies": { - "no-case": "^3.0.4", - "tslib": "^2.0.3" - } - }, - "node_modules/dot-prop": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-6.0.1.tgz", - "integrity": "sha512-tE7ztYzXHIeyvc7N+hR3oi7FIbf/NIjVP9hmAt3yMXzrQ072/fpjGLx2GxNxGxUl5V73MEqYzioOMoVhGMJ5cA==", - "license": "MIT", - "dependencies": { - "is-obj": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/dot-prop/node_modules/is-obj": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", - "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/dunder-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", - "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", - "license": "MIT", - "dependencies": { - "call-bind-apply-helpers": "^1.0.1", - "es-errors": "^1.3.0", - "gopd": "^1.2.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/duplexer": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/duplexer/-/duplexer-0.1.2.tgz", - "integrity": "sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==", - "license": "MIT" - }, - "node_modules/eastasianwidth": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", - "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", - "license": "MIT" - }, - "node_modules/ee-first": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", - "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", - "license": "MIT" - }, - "node_modules/electron-to-chromium": { - "version": "1.5.302", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.302.tgz", - "integrity": "sha512-sM6HAN2LyK82IyPBpznDRqlTQAtuSaO+ShzFiWTvoMJLHyZ+Y39r8VMfHzwbU8MVBzQ4Wdn85+wlZl2TLGIlwg==", - "license": "ISC" - }, - "node_modules/emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", - "license": "MIT" - }, - "node_modules/emojilib": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/emojilib/-/emojilib-2.4.0.tgz", - "integrity": "sha512-5U0rVMU5Y2n2+ykNLQqMoqklN9ICBT/KsvC1Gz6vqHbz2AXXGkG+Pm5rMWk/8Vjrr/mY9985Hi8DYzn1F09Nyw==", - "license": "MIT" - }, - "node_modules/emojis-list": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/emojis-list/-/emojis-list-3.0.0.tgz", - "integrity": "sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==", - "license": "MIT", - "engines": { - "node": ">= 4" - } - }, - "node_modules/emoticon": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/emoticon/-/emoticon-4.1.0.tgz", - "integrity": "sha512-VWZfnxqwNcc51hIy/sbOdEem6D+cVtpPzEEtVAFdaas30+1dgkyaOQ4sQ6Bp0tOMqWO1v+HQfYaoodOkdhK6SQ==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/encodeurl": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", - "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/enhanced-resolve": { - "version": "5.20.0", - "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.20.0.tgz", - "integrity": "sha512-/ce7+jQ1PQ6rVXwe+jKEg5hW5ciicHwIQUagZkp6IufBoY3YDgdTTY1azVs0qoRgVmvsNB+rbjLJxDAeHHtwsQ==", - "license": "MIT", - "dependencies": { - "graceful-fs": "^4.2.4", - "tapable": "^2.3.0" - }, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/entities": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", - "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", - "license": "BSD-2-Clause", - "engines": { - "node": ">=0.12" - }, - "funding": { - "url": "https://github.com/fb55/entities?sponsor=1" - } - }, - "node_modules/error-ex": { - "version": "1.3.4", - "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.4.tgz", - "integrity": "sha512-sqQamAnR14VgCr1A618A3sGrygcpK+HEbenA/HiEAkkUwcZIIB/tgWqHFxWgOyDh4nB4JCRimh79dR5Ywc9MDQ==", - "license": "MIT", - "dependencies": { - "is-arrayish": "^0.2.1" - } - }, - "node_modules/es-define-property": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", - "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-errors": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", - "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-module-lexer": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-2.0.0.tgz", - "integrity": "sha512-5POEcUuZybH7IdmGsD8wlf0AI55wMecM9rVBTI/qEAy2c1kTOm3DjFYjrBdI2K3BaJjJYfYFeRtM0t9ssnRuxw==", - "license": "MIT" - }, - "node_modules/es-object-atoms": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", - "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/esast-util-from-estree": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/esast-util-from-estree/-/esast-util-from-estree-2.0.0.tgz", - "integrity": "sha512-4CyanoAudUSBAn5K13H4JhsMH6L9ZP7XbLVe/dKybkxMO7eDyLsT8UHl9TRNrU2Gr9nz+FovfSIjuXWJ81uVwQ==", - "license": "MIT", - "dependencies": { - "@types/estree-jsx": "^1.0.0", - "devlop": "^1.0.0", - "estree-util-visit": "^2.0.0", - "unist-util-position-from-estree": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/esast-util-from-js": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/esast-util-from-js/-/esast-util-from-js-2.0.1.tgz", - "integrity": "sha512-8Ja+rNJ0Lt56Pcf3TAmpBZjmx8ZcK5Ts4cAzIOjsjevg9oSXJnl6SUQ2EevU8tv3h6ZLWmoKL5H4fgWvdvfETw==", - "license": "MIT", - "dependencies": { - "@types/estree-jsx": "^1.0.0", - "acorn": "^8.0.0", - "esast-util-from-estree": "^2.0.0", - "vfile-message": "^4.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/escalade": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", - "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/escape-goat": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-goat/-/escape-goat-4.0.0.tgz", - "integrity": "sha512-2Sd4ShcWxbx6OY1IHyla/CVNwvg7XwZVoXZHcSu9w9SReNP1EzzD5T8NWKIR38fIqEns9kDWKUQTXXAmlDrdPg==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/escape-html": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", - "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", - "license": "MIT" - }, - "node_modules/escape-string-regexp": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", - "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/eslint-scope": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", - "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", - "license": "BSD-2-Clause", - "dependencies": { - "esrecurse": "^4.3.0", - "estraverse": "^4.1.1" - }, - "engines": { - "node": ">=8.0.0" - } - }, - "node_modules/esprima": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", - "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", - "license": "BSD-2-Clause", - "bin": { - "esparse": "bin/esparse.js", - "esvalidate": "bin/esvalidate.js" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/esrecurse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", - "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", - "license": "BSD-2-Clause", - "dependencies": { - "estraverse": "^5.2.0" - }, - "engines": { - "node": ">=4.0" - } - }, - "node_modules/esrecurse/node_modules/estraverse": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", - "license": "BSD-2-Clause", - "engines": { - "node": ">=4.0" - } - }, - "node_modules/estraverse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", - "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", - "license": "BSD-2-Clause", - "engines": { - "node": ">=4.0" - } - }, - "node_modules/estree-util-attach-comments": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/estree-util-attach-comments/-/estree-util-attach-comments-3.0.0.tgz", - "integrity": "sha512-cKUwm/HUcTDsYh/9FgnuFqpfquUbwIqwKM26BVCGDPVgvaCl/nDCCjUfiLlx6lsEZ3Z4RFxNbOQ60pkaEwFxGw==", - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/estree-util-build-jsx": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/estree-util-build-jsx/-/estree-util-build-jsx-3.0.1.tgz", - "integrity": "sha512-8U5eiL6BTrPxp/CHbs2yMgP8ftMhR5ww1eIKoWRMlqvltHF8fZn5LRDvTKuxD3DUn+shRbLGqXemcP51oFCsGQ==", - "license": "MIT", - "dependencies": { - "@types/estree-jsx": "^1.0.0", - "devlop": "^1.0.0", - "estree-util-is-identifier-name": "^3.0.0", - "estree-walker": "^3.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/estree-util-is-identifier-name": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/estree-util-is-identifier-name/-/estree-util-is-identifier-name-3.0.0.tgz", - "integrity": "sha512-hFtqIDZTIUZ9BXLb8y4pYGyk6+wekIivNVTcmvk8NoOh+VeRn5y6cEHzbURrWbfp1fIqdVipilzj+lfaadNZmg==", - "license": "MIT", - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/estree-util-scope": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/estree-util-scope/-/estree-util-scope-1.0.0.tgz", - "integrity": "sha512-2CAASclonf+JFWBNJPndcOpA8EMJwa0Q8LUFJEKqXLW6+qBvbFZuF5gItbQOs/umBUkjviCSDCbBwU2cXbmrhQ==", - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0", - "devlop": "^1.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/estree-util-to-js": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/estree-util-to-js/-/estree-util-to-js-2.0.0.tgz", - "integrity": "sha512-WDF+xj5rRWmD5tj6bIqRi6CkLIXbbNQUcxQHzGysQzvHmdYG2G7p/Tf0J0gpxGgkeMZNTIjT/AoSvC9Xehcgdg==", - "license": "MIT", - "dependencies": { - "@types/estree-jsx": "^1.0.0", - "astring": "^1.8.0", - "source-map": "^0.7.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/estree-util-value-to-estree": { - "version": "3.5.0", - "resolved": "https://registry.npmjs.org/estree-util-value-to-estree/-/estree-util-value-to-estree-3.5.0.tgz", - "integrity": "sha512-aMV56R27Gv3QmfmF1MY12GWkGzzeAezAX+UplqHVASfjc9wNzI/X6hC0S9oxq61WT4aQesLGslWP9tKk6ghRZQ==", - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/remcohaszing" - } - }, - "node_modules/estree-util-visit": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/estree-util-visit/-/estree-util-visit-2.0.0.tgz", - "integrity": "sha512-m5KgiH85xAhhW8Wta0vShLcUvOsh3LLPI2YVwcbio1l7E09NTLL1EyMZFM1OyWowoH0skScNbhOPl4kcBgzTww==", - "license": "MIT", - "dependencies": { - "@types/estree-jsx": "^1.0.0", - "@types/unist": "^3.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/estree-walker": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", - "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0" - } - }, - "node_modules/esutils": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", - "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", - "license": "BSD-2-Clause", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/eta": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/eta/-/eta-2.2.0.tgz", - "integrity": "sha512-UVQ72Rqjy/ZKQalzV5dCCJP80GrmPrMxh6NlNf+erV6ObL0ZFkhCstWRawS85z3smdr3d2wXPsZEY7rDPfGd2g==", - "license": "MIT", - "engines": { - "node": ">=6.0.0" - }, - "funding": { - "url": "https://github.com/eta-dev/eta?sponsor=1" - } - }, - "node_modules/etag": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", - "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/eval": { - "version": "0.1.8", - "resolved": "https://registry.npmjs.org/eval/-/eval-0.1.8.tgz", - "integrity": "sha512-EzV94NYKoO09GLXGjXj9JIlXijVck4ONSr5wiCWDvhsvj5jxSrzTmRU/9C1DyB6uToszLs8aifA6NQ7lEQdvFw==", - "dependencies": { - "@types/node": "*", - "require-like": ">= 0.1.1" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/eventemitter3": { - "version": "4.0.7", - "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz", - "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==", - "license": "MIT" - }, - "node_modules/events": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", - "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", - "license": "MIT", - "engines": { - "node": ">=0.8.x" - } - }, - "node_modules/execa": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", - "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", - "license": "MIT", - "dependencies": { - "cross-spawn": "^7.0.3", - "get-stream": "^6.0.0", - "human-signals": "^2.1.0", - "is-stream": "^2.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^4.0.1", - "onetime": "^5.1.2", - "signal-exit": "^3.0.3", - "strip-final-newline": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sindresorhus/execa?sponsor=1" - } - }, - "node_modules/express": { - "version": "4.22.1", - "resolved": "https://registry.npmjs.org/express/-/express-4.22.1.tgz", - "integrity": "sha512-F2X8g9P1X7uCPZMA3MVf9wcTqlyNp7IhH5qPCI0izhaOIYXaW9L535tGA3qmjRzpH+bZczqq7hVKxTR4NWnu+g==", - "license": "MIT", - "dependencies": { - "accepts": "~1.3.8", - "array-flatten": "1.1.1", - "body-parser": "~1.20.3", - "content-disposition": "~0.5.4", - "content-type": "~1.0.4", - "cookie": "~0.7.1", - "cookie-signature": "~1.0.6", - "debug": "2.6.9", - "depd": "2.0.0", - "encodeurl": "~2.0.0", - "escape-html": "~1.0.3", - "etag": "~1.8.1", - "finalhandler": "~1.3.1", - "fresh": "~0.5.2", - "http-errors": "~2.0.0", - "merge-descriptors": "1.0.3", - "methods": "~1.1.2", - "on-finished": "~2.4.1", - "parseurl": "~1.3.3", - "path-to-regexp": "~0.1.12", - "proxy-addr": "~2.0.7", - "qs": "~6.14.0", - "range-parser": "~1.2.1", - "safe-buffer": "5.2.1", - "send": "~0.19.0", - "serve-static": "~1.16.2", - "setprototypeof": "1.2.0", - "statuses": "~2.0.1", - "type-is": "~1.6.18", - "utils-merge": "1.0.1", - "vary": "~1.1.2" - }, - "engines": { - "node": ">= 0.10.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/express" - } - }, - "node_modules/express/node_modules/content-disposition": { - "version": "0.5.4", - "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", - "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", - "license": "MIT", - "dependencies": { - "safe-buffer": "5.2.1" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/express/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "license": "MIT", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/express/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "license": "MIT" - }, - "node_modules/express/node_modules/path-to-regexp": { - "version": "0.1.12", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz", - "integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==", - "license": "MIT" - }, - "node_modules/express/node_modules/range-parser": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", - "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/extend": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", - "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", - "license": "MIT" - }, - "node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", - "license": "MIT", - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/fast-deep-equal": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "license": "MIT" - }, - "node_modules/fast-glob": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", - "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", - "license": "MIT", - "dependencies": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.8" - }, - "engines": { - "node": ">=8.6.0" - } - }, - "node_modules/fast-json-stable-stringify": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", - "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", - "license": "MIT" - }, - "node_modules/fast-uri": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.1.0.tgz", - "integrity": "sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/fastify" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/fastify" - } - ], - "license": "BSD-3-Clause" - }, - "node_modules/fastq": { - "version": "1.20.1", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.20.1.tgz", - "integrity": "sha512-GGToxJ/w1x32s/D2EKND7kTil4n8OVk/9mycTc4VDza13lOvpUZTGX3mFSCtV9ksdGBVzvsyAVLM6mHFThxXxw==", - "license": "ISC", - "dependencies": { - "reusify": "^1.0.4" - } - }, - "node_modules/fault": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/fault/-/fault-2.0.1.tgz", - "integrity": "sha512-WtySTkS4OKev5JtpHXnib4Gxiurzh5NCGvWrFaZ34m6JehfTUhKZvn9njTfw48t6JumVQOmrKqpmGcdwxnhqBQ==", - "license": "MIT", - "dependencies": { - "format": "^0.2.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/faye-websocket": { - "version": "0.11.4", - "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.4.tgz", - "integrity": "sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g==", - "license": "Apache-2.0", - "dependencies": { - "websocket-driver": ">=0.5.1" - }, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/feed": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/feed/-/feed-4.2.2.tgz", - "integrity": "sha512-u5/sxGfiMfZNtJ3OvQpXcvotFpYkL0n9u9mM2vkui2nGo8b4wvDkJ8gAkYqbA8QpGyFCv3RK0Z+Iv+9veCS9bQ==", - "license": "MIT", - "dependencies": { - "xml-js": "^1.6.11" - }, - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/figures": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", - "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==", - "license": "MIT", - "dependencies": { - "escape-string-regexp": "^1.0.5" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/figures/node_modules/escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", - "license": "MIT", - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/file-loader": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/file-loader/-/file-loader-6.2.0.tgz", - "integrity": "sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw==", - "license": "MIT", - "dependencies": { - "loader-utils": "^2.0.0", - "schema-utils": "^3.0.0" - }, - "engines": { - "node": ">= 10.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "webpack": "^4.0.0 || ^5.0.0" - } - }, - "node_modules/file-loader/node_modules/ajv": { - "version": "6.14.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.14.0.tgz", - "integrity": "sha512-IWrosm/yrn43eiKqkfkHis7QioDleaXQHdDVPKg0FSwwd/DuvyX79TZnFOnYpB7dcsFAMmtFztZuXPDvSePkFw==", - "license": "MIT", - "dependencies": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/file-loader/node_modules/ajv-keywords": { - "version": "3.5.2", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", - "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", - "license": "MIT", - "peerDependencies": { - "ajv": "^6.9.1" - } - }, - "node_modules/file-loader/node_modules/json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "license": "MIT" - }, - "node_modules/file-loader/node_modules/schema-utils": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz", - "integrity": "sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg==", - "license": "MIT", - "dependencies": { - "@types/json-schema": "^7.0.8", - "ajv": "^6.12.5", - "ajv-keywords": "^3.5.2" - }, - "engines": { - "node": ">= 10.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - } - }, - "node_modules/fill-range": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", - "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", - "license": "MIT", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/finalhandler": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.2.tgz", - "integrity": "sha512-aA4RyPcd3badbdABGDuTXCMTtOneUCAYH/gxoYRTZlIJdF0YPWuGqiAsIrhNnnqdXGswYk6dGujem4w80UJFhg==", - "license": "MIT", - "dependencies": { - "debug": "2.6.9", - "encodeurl": "~2.0.0", - "escape-html": "~1.0.3", - "on-finished": "~2.4.1", - "parseurl": "~1.3.3", - "statuses": "~2.0.2", - "unpipe": "~1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/finalhandler/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "license": "MIT", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/finalhandler/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "license": "MIT" - }, - "node_modules/find-cache-dir": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-4.0.0.tgz", - "integrity": "sha512-9ZonPT4ZAK4a+1pUPVPZJapbi7O5qbbJPdYw/NOQWZZbVLdDTYM3A4R9z/DpAM08IDaFGsvPgiGZ82WEwUDWjg==", - "license": "MIT", - "dependencies": { - "common-path-prefix": "^3.0.0", - "pkg-dir": "^7.0.0" - }, - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/find-up": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-6.3.0.tgz", - "integrity": "sha512-v2ZsoEuVHYy8ZIlYqwPe/39Cy+cFDzp4dXPaxNvkEuouymu+2Jbz0PxpKarJHYJTmv2HWT3O382qY8l4jMWthw==", - "license": "MIT", - "dependencies": { - "locate-path": "^7.1.0", - "path-exists": "^5.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/flat": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", - "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", - "license": "BSD-3-Clause", - "bin": { - "flat": "cli.js" - } - }, - "node_modules/follow-redirects": { - "version": "1.15.11", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz", - "integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==", - "funding": [ - { - "type": "individual", - "url": "https://github.com/sponsors/RubenVerborgh" - } - ], - "license": "MIT", - "engines": { - "node": ">=4.0" - }, - "peerDependenciesMeta": { - "debug": { - "optional": true - } - } - }, - "node_modules/form-data-encoder": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-2.1.4.tgz", - "integrity": "sha512-yDYSgNMraqvnxiEXO4hi88+YZxaHC6QKzb5N84iRCTDeRO7ZALpir/lVmf/uXUhnwUr2O4HU8s/n6x+yNjQkHw==", - "license": "MIT", - "engines": { - "node": ">= 14.17" - } - }, - "node_modules/format": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/format/-/format-0.2.2.tgz", - "integrity": "sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww==", - "engines": { - "node": ">=0.4.x" - } - }, - "node_modules/forwarded": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", - "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/fraction.js": { - "version": "5.3.4", - "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-5.3.4.tgz", - "integrity": "sha512-1X1NTtiJphryn/uLQz3whtY6jK3fTqoE3ohKs0tT+Ujr1W59oopxmoEh7Lu5p6vBaPbgoM0bzveAW4Qi5RyWDQ==", - "license": "MIT", - "engines": { - "node": "*" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/rawify" - } - }, - "node_modules/fresh": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", - "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/fs-extra": { - "version": "11.3.3", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.3.tgz", - "integrity": "sha512-VWSRii4t0AFm6ixFFmLLx1t7wS1gh+ckoa84aOeapGum0h+EZd1EhEumSB+ZdDLnEPuucsVB9oB7cxJHap6Afg==", - "license": "MIT", - "dependencies": { - "graceful-fs": "^4.2.0", - "jsonfile": "^6.0.1", - "universalify": "^2.0.0" - }, - "engines": { - "node": ">=14.14" - } - }, - "node_modules/fsevents": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", - "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", - "hasInstallScript": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" - } - }, - "node_modules/function-bind": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", - "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/gensync": { - "version": "1.0.0-beta.2", - "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", - "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/get-intrinsic": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", - "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", - "license": "MIT", - "dependencies": { - "call-bind-apply-helpers": "^1.0.2", - "es-define-property": "^1.0.1", - "es-errors": "^1.3.0", - "es-object-atoms": "^1.1.1", - "function-bind": "^1.1.2", - "get-proto": "^1.0.1", - "gopd": "^1.2.0", - "has-symbols": "^1.1.0", - "hasown": "^2.0.2", - "math-intrinsics": "^1.1.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/get-own-enumerable-property-symbols": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz", - "integrity": "sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g==", - "license": "ISC" - }, - "node_modules/get-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", - "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", - "license": "MIT", - "dependencies": { - "dunder-proto": "^1.0.1", - "es-object-atoms": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/get-stream": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", - "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/github-slugger": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/github-slugger/-/github-slugger-1.5.0.tgz", - "integrity": "sha512-wIh+gKBI9Nshz2o46B0B3f5k/W+WI9ZAv6y5Dn5WJ5SK1t0TnDimB4WE5rmTD05ZAIn8HALCZVmCsvj0w0v0lw==", - "license": "ISC" - }, - "node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "license": "ISC", - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/glob-to-regex.js": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/glob-to-regex.js/-/glob-to-regex.js-1.2.0.tgz", - "integrity": "sha512-QMwlOQKU/IzqMUOAZWubUOT8Qft+Y0KQWnX9nK3ch0CJg0tTp4TvGZsTfudYKv2NzoQSyPcnA6TYeIQ3jGichQ==", - "license": "Apache-2.0", - "engines": { - "node": ">=10.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/streamich" - }, - "peerDependencies": { - "tslib": "2" - } - }, - "node_modules/glob-to-regexp": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz", - "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==", - "license": "BSD-2-Clause" - }, - "node_modules/global-dirs": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/global-dirs/-/global-dirs-3.0.1.tgz", - "integrity": "sha512-NBcGGFbBA9s1VzD41QXDG+3++t9Mn5t1FpLdhESY6oKY4gYTFpX4wO3sqGUa0Srjtbfj3szX0RnemmrVRUdULA==", - "license": "MIT", - "dependencies": { - "ini": "2.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/global-dirs/node_modules/ini": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ini/-/ini-2.0.0.tgz", - "integrity": "sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA==", - "license": "ISC", - "engines": { - "node": ">=10" - } - }, - "node_modules/globby": { - "version": "11.1.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", - "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", - "license": "MIT", - "dependencies": { - "array-union": "^2.1.0", - "dir-glob": "^3.0.1", - "fast-glob": "^3.2.9", - "ignore": "^5.2.0", - "merge2": "^1.4.1", - "slash": "^3.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/gopd": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", - "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/got": { - "version": "12.6.1", - "resolved": "https://registry.npmjs.org/got/-/got-12.6.1.tgz", - "integrity": "sha512-mThBblvlAF1d4O5oqyvN+ZxLAYwIJK7bpMxgYqPD9okW0C3qm5FFn7k811QrcuEBwaogR3ngOFoCfs6mRv7teQ==", - "license": "MIT", - "dependencies": { - "@sindresorhus/is": "^5.2.0", - "@szmarczak/http-timer": "^5.0.1", - "cacheable-lookup": "^7.0.0", - "cacheable-request": "^10.2.8", - "decompress-response": "^6.0.0", - "form-data-encoder": "^2.1.2", - "get-stream": "^6.0.1", - "http2-wrapper": "^2.1.10", - "lowercase-keys": "^3.0.0", - "p-cancelable": "^3.0.0", - "responselike": "^3.0.0" - }, - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sindresorhus/got?sponsor=1" - } - }, - "node_modules/got/node_modules/@sindresorhus/is": { - "version": "5.6.0", - "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-5.6.0.tgz", - "integrity": "sha512-TV7t8GKYaJWsn00tFDqBw8+Uqmr8A0fRU1tvTQhyZzGv0sJCGRQL3JGMI3ucuKo3XIZdUP+Lx7/gh2t3lewy7g==", - "license": "MIT", - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sindresorhus/is?sponsor=1" - } - }, - "node_modules/graceful-fs": { - "version": "4.2.11", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", - "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", - "license": "ISC" - }, - "node_modules/gray-matter": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/gray-matter/-/gray-matter-4.0.3.tgz", - "integrity": "sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q==", - "license": "MIT", - "dependencies": { - "js-yaml": "^3.13.1", - "kind-of": "^6.0.2", - "section-matter": "^1.0.0", - "strip-bom-string": "^1.0.0" - }, - "engines": { - "node": ">=6.0" - } - }, - "node_modules/gray-matter/node_modules/argparse": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", - "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", - "license": "MIT", - "dependencies": { - "sprintf-js": "~1.0.2" - } - }, - "node_modules/gray-matter/node_modules/js-yaml": { - "version": "3.14.2", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.2.tgz", - "integrity": "sha512-PMSmkqxr106Xa156c2M265Z+FTrPl+oxd/rgOQy2tijQeK5TxQ43psO1ZCwhVOSdnn+RzkzlRz/eY4BgJBYVpg==", - "license": "MIT", - "dependencies": { - "argparse": "^1.0.7", - "esprima": "^4.0.0" - }, - "bin": { - "js-yaml": "bin/js-yaml.js" - } - }, - "node_modules/gzip-size": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/gzip-size/-/gzip-size-6.0.0.tgz", - "integrity": "sha512-ax7ZYomf6jqPTQ4+XCpUGyXKHk5WweS+e05MBO4/y3WJ5RkmPXNKvX+bx1behVILVwr6JSQvZAku021CHPXG3Q==", - "license": "MIT", - "dependencies": { - "duplexer": "^0.1.2" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/hachure-fill": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/hachure-fill/-/hachure-fill-0.5.2.tgz", - "integrity": "sha512-3GKBOn+m2LX9iq+JC1064cSFprJY4jL1jCXTcpnfER5HYE2l/4EfWSGzkPa/ZDBmYI0ZOEj5VHV/eKnPGkHuOg==", - "license": "MIT" - }, - "node_modules/handle-thing": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/handle-thing/-/handle-thing-2.0.1.tgz", - "integrity": "sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg==", - "license": "MIT" - }, - "node_modules/has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/has-property-descriptors": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", - "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", - "license": "MIT", - "dependencies": { - "es-define-property": "^1.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-symbols": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", - "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-yarn": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-yarn/-/has-yarn-3.0.0.tgz", - "integrity": "sha512-IrsVwUHhEULx3R8f/aA8AHuEzAorplsab/v8HBzEiIukwq5i/EC+xmOW+HfP1OaDP+2JkgT1yILHN2O3UFIbcA==", - "license": "MIT", - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/hasown": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", - "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", - "license": "MIT", - "dependencies": { - "function-bind": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/hast-util-from-parse5": { - "version": "8.0.3", - "resolved": "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-8.0.3.tgz", - "integrity": "sha512-3kxEVkEKt0zvcZ3hCRYI8rqrgwtlIOFMWkbclACvjlDw8Li9S2hk/d51OI0nr/gIpdMHNepwgOKqZ/sy0Clpyg==", - "license": "MIT", - "dependencies": { - "@types/hast": "^3.0.0", - "@types/unist": "^3.0.0", - "devlop": "^1.0.0", - "hastscript": "^9.0.0", - "property-information": "^7.0.0", - "vfile": "^6.0.0", - "vfile-location": "^5.0.0", - "web-namespaces": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/hast-util-parse-selector": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-4.0.0.tgz", - "integrity": "sha512-wkQCkSYoOGCRKERFWcxMVMOcYE2K1AaNLU8DXS9arxnLOUEWbOXKXiJUNzEpqZ3JOKpnha3jkFrumEjVliDe7A==", - "license": "MIT", - "dependencies": { - "@types/hast": "^3.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/hast-util-raw": { - "version": "9.1.0", - "resolved": "https://registry.npmjs.org/hast-util-raw/-/hast-util-raw-9.1.0.tgz", - "integrity": "sha512-Y8/SBAHkZGoNkpzqqfCldijcuUKh7/su31kEBp67cFY09Wy0mTRgtsLYsiIxMJxlu0f6AA5SUTbDR8K0rxnbUw==", - "license": "MIT", - "dependencies": { - "@types/hast": "^3.0.0", - "@types/unist": "^3.0.0", - "@ungap/structured-clone": "^1.0.0", - "hast-util-from-parse5": "^8.0.0", - "hast-util-to-parse5": "^8.0.0", - "html-void-elements": "^3.0.0", - "mdast-util-to-hast": "^13.0.0", - "parse5": "^7.0.0", - "unist-util-position": "^5.0.0", - "unist-util-visit": "^5.0.0", - "vfile": "^6.0.0", - "web-namespaces": "^2.0.0", - "zwitch": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/hast-util-to-estree": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/hast-util-to-estree/-/hast-util-to-estree-3.1.3.tgz", - "integrity": "sha512-48+B/rJWAp0jamNbAAf9M7Uf//UVqAoMmgXhBdxTDJLGKY+LRnZ99qcG+Qjl5HfMpYNzS5v4EAwVEF34LeAj7w==", - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0", - "@types/estree-jsx": "^1.0.0", - "@types/hast": "^3.0.0", - "comma-separated-tokens": "^2.0.0", - "devlop": "^1.0.0", - "estree-util-attach-comments": "^3.0.0", - "estree-util-is-identifier-name": "^3.0.0", - "hast-util-whitespace": "^3.0.0", - "mdast-util-mdx-expression": "^2.0.0", - "mdast-util-mdx-jsx": "^3.0.0", - "mdast-util-mdxjs-esm": "^2.0.0", - "property-information": "^7.0.0", - "space-separated-tokens": "^2.0.0", - "style-to-js": "^1.0.0", - "unist-util-position": "^5.0.0", - "zwitch": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/hast-util-to-jsx-runtime": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/hast-util-to-jsx-runtime/-/hast-util-to-jsx-runtime-2.3.6.tgz", - "integrity": "sha512-zl6s8LwNyo1P9uw+XJGvZtdFF1GdAkOg8ujOw+4Pyb76874fLps4ueHXDhXWdk6YHQ6OgUtinliG7RsYvCbbBg==", - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0", - "@types/hast": "^3.0.0", - "@types/unist": "^3.0.0", - "comma-separated-tokens": "^2.0.0", - "devlop": "^1.0.0", - "estree-util-is-identifier-name": "^3.0.0", - "hast-util-whitespace": "^3.0.0", - "mdast-util-mdx-expression": "^2.0.0", - "mdast-util-mdx-jsx": "^3.0.0", - "mdast-util-mdxjs-esm": "^2.0.0", - "property-information": "^7.0.0", - "space-separated-tokens": "^2.0.0", - "style-to-js": "^1.0.0", - "unist-util-position": "^5.0.0", - "vfile-message": "^4.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/hast-util-to-parse5": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-8.0.1.tgz", - "integrity": "sha512-MlWT6Pjt4CG9lFCjiz4BH7l9wmrMkfkJYCxFwKQic8+RTZgWPuWxwAfjJElsXkex7DJjfSJsQIt931ilUgmwdA==", - "license": "MIT", - "dependencies": { - "@types/hast": "^3.0.0", - "comma-separated-tokens": "^2.0.0", - "devlop": "^1.0.0", - "property-information": "^7.0.0", - "space-separated-tokens": "^2.0.0", - "web-namespaces": "^2.0.0", - "zwitch": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/hast-util-whitespace": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-3.0.0.tgz", - "integrity": "sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw==", - "license": "MIT", - "dependencies": { - "@types/hast": "^3.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/hastscript": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-9.0.1.tgz", - "integrity": "sha512-g7df9rMFX/SPi34tyGCyUBREQoKkapwdY/T04Qn9TDWfHhAYt4/I0gMVirzK5wEzeUqIjEB+LXC/ypb7Aqno5w==", - "license": "MIT", - "dependencies": { - "@types/hast": "^3.0.0", - "comma-separated-tokens": "^2.0.0", - "hast-util-parse-selector": "^4.0.0", - "property-information": "^7.0.0", - "space-separated-tokens": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/he": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", - "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", - "license": "MIT", - "bin": { - "he": "bin/he" - } - }, - "node_modules/history": { - "version": "4.10.1", - "resolved": "https://registry.npmjs.org/history/-/history-4.10.1.tgz", - "integrity": "sha512-36nwAD620w12kuzPAsyINPWJqlNbij+hpK1k9XRloDtym8mxzGYl2c17LnV6IAGB2Dmg4tEa7G7DlawS0+qjew==", - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.1.2", - "loose-envify": "^1.2.0", - "resolve-pathname": "^3.0.0", - "tiny-invariant": "^1.0.2", - "tiny-warning": "^1.0.0", - "value-equal": "^1.0.1" - } - }, - "node_modules/hoist-non-react-statics": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz", - "integrity": "sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==", - "license": "BSD-3-Clause", - "dependencies": { - "react-is": "^16.7.0" - } - }, - "node_modules/hpack.js": { - "version": "2.1.6", - "resolved": "https://registry.npmjs.org/hpack.js/-/hpack.js-2.1.6.tgz", - "integrity": "sha512-zJxVehUdMGIKsRaNt7apO2Gqp0BdqW5yaiGHXXmbpvxgBYVZnAql+BJb4RO5ad2MgpbZKn5G6nMnegrH1FcNYQ==", - "license": "MIT", - "dependencies": { - "inherits": "^2.0.1", - "obuf": "^1.0.0", - "readable-stream": "^2.0.1", - "wbuf": "^1.1.0" - } - }, - "node_modules/hpack.js/node_modules/isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", - "license": "MIT" - }, - "node_modules/hpack.js/node_modules/readable-stream": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", - "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", - "license": "MIT", - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "node_modules/hpack.js/node_modules/safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "license": "MIT" - }, - "node_modules/hpack.js/node_modules/string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "license": "MIT", - "dependencies": { - "safe-buffer": "~5.1.0" - } - }, - "node_modules/html-escaper": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", - "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", - "license": "MIT" - }, - "node_modules/html-minifier-terser": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/html-minifier-terser/-/html-minifier-terser-7.2.0.tgz", - "integrity": "sha512-tXgn3QfqPIpGl9o+K5tpcj3/MN4SfLtsx2GWwBC3SSd0tXQGyF3gsSqad8loJgKZGM3ZxbYDd5yhiBIdWpmvLA==", - "license": "MIT", - "dependencies": { - "camel-case": "^4.1.2", - "clean-css": "~5.3.2", - "commander": "^10.0.0", - "entities": "^4.4.0", - "param-case": "^3.0.4", - "relateurl": "^0.2.7", - "terser": "^5.15.1" - }, - "bin": { - "html-minifier-terser": "cli.js" - }, - "engines": { - "node": "^14.13.1 || >=16.0.0" - } - }, - "node_modules/html-minifier-terser/node_modules/commander": { - "version": "10.0.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-10.0.1.tgz", - "integrity": "sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==", - "license": "MIT", - "engines": { - "node": ">=14" - } - }, - "node_modules/html-tags": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/html-tags/-/html-tags-3.3.1.tgz", - "integrity": "sha512-ztqyC3kLto0e9WbNp0aeP+M3kTt+nbaIveGmUxAtZa+8iFgKLUOD4YKM5j+f3QD89bra7UeumolZHKuOXnTmeQ==", - "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/html-void-elements": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-3.0.0.tgz", - "integrity": "sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/html-webpack-plugin": { - "version": "5.6.6", - "resolved": "https://registry.npmjs.org/html-webpack-plugin/-/html-webpack-plugin-5.6.6.tgz", - "integrity": "sha512-bLjW01UTrvoWTJQL5LsMRo1SypHW80FTm12OJRSnr3v6YHNhfe+1r0MYUZJMACxnCHURVnBWRwAsWs2yPU9Ezw==", - "license": "MIT", - "dependencies": { - "@types/html-minifier-terser": "^6.0.0", - "html-minifier-terser": "^6.0.2", - "lodash": "^4.17.21", - "pretty-error": "^4.0.0", - "tapable": "^2.0.0" - }, - "engines": { - "node": ">=10.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/html-webpack-plugin" - }, - "peerDependencies": { - "@rspack/core": "0.x || 1.x", - "webpack": "^5.20.0" - }, - "peerDependenciesMeta": { - "@rspack/core": { - "optional": true - }, - "webpack": { - "optional": true - } - } - }, - "node_modules/html-webpack-plugin/node_modules/commander": { - "version": "8.3.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", - "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==", - "license": "MIT", - "engines": { - "node": ">= 12" - } - }, - "node_modules/html-webpack-plugin/node_modules/html-minifier-terser": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz", - "integrity": "sha512-YXxSlJBZTP7RS3tWnQw74ooKa6L9b9i9QYXY21eUEvhZ3u9XLfv6OnFsQq6RxkhHygsaUMvYsZRV5rU/OVNZxw==", - "license": "MIT", - "dependencies": { - "camel-case": "^4.1.2", - "clean-css": "^5.2.2", - "commander": "^8.3.0", - "he": "^1.2.0", - "param-case": "^3.0.4", - "relateurl": "^0.2.7", - "terser": "^5.10.0" - }, - "bin": { - "html-minifier-terser": "cli.js" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/htmlparser2": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-8.0.2.tgz", - "integrity": "sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA==", - "funding": [ - "https://github.com/fb55/htmlparser2?sponsor=1", - { - "type": "github", - "url": "https://github.com/sponsors/fb55" - } - ], - "license": "MIT", - "dependencies": { - "domelementtype": "^2.3.0", - "domhandler": "^5.0.3", - "domutils": "^3.0.1", - "entities": "^4.4.0" - } - }, - "node_modules/http-cache-semantics": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.2.0.tgz", - "integrity": "sha512-dTxcvPXqPvXBQpq5dUr6mEMJX4oIEFv6bwom3FDwKRDsuIjjJGANqhBuoAn9c1RQJIdAKav33ED65E2ys+87QQ==", - "license": "BSD-2-Clause" - }, - "node_modules/http-deceiver": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/http-deceiver/-/http-deceiver-1.2.7.tgz", - "integrity": "sha512-LmpOGxTfbpgtGVxJrj5k7asXHCgNZp5nLfp+hWc8QQRqtb7fUy6kRY3BO1h9ddF6yIPYUARgxGOwB42DnxIaNw==", - "license": "MIT" - }, - "node_modules/http-errors": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.1.tgz", - "integrity": "sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==", - "license": "MIT", - "dependencies": { - "depd": "~2.0.0", - "inherits": "~2.0.4", - "setprototypeof": "~1.2.0", - "statuses": "~2.0.2", - "toidentifier": "~1.0.1" - }, - "engines": { - "node": ">= 0.8" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/express" - } - }, - "node_modules/http-parser-js": { - "version": "0.5.10", - "resolved": "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.5.10.tgz", - "integrity": "sha512-Pysuw9XpUq5dVc/2SMHpuTY01RFl8fttgcyunjL7eEMhGM3cI4eOmiCycJDVCo/7O7ClfQD3SaI6ftDzqOXYMA==", - "license": "MIT" - }, - "node_modules/http-proxy": { - "version": "1.18.1", - "resolved": "https://registry.npmjs.org/http-proxy/-/http-proxy-1.18.1.tgz", - "integrity": "sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ==", - "license": "MIT", - "dependencies": { - "eventemitter3": "^4.0.0", - "follow-redirects": "^1.0.0", - "requires-port": "^1.0.0" - }, - "engines": { - "node": ">=8.0.0" - } - }, - "node_modules/http-proxy-middleware": { - "version": "2.0.9", - "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-2.0.9.tgz", - "integrity": "sha512-c1IyJYLYppU574+YI7R4QyX2ystMtVXZwIdzazUIPIJsHuWNd+mho2j+bKoHftndicGj9yh+xjd+l0yj7VeT1Q==", - "license": "MIT", - "dependencies": { - "@types/http-proxy": "^1.17.8", - "http-proxy": "^1.18.1", - "is-glob": "^4.0.1", - "is-plain-obj": "^3.0.0", - "micromatch": "^4.0.2" - }, - "engines": { - "node": ">=12.0.0" - }, - "peerDependencies": { - "@types/express": "^4.17.13" - }, - "peerDependenciesMeta": { - "@types/express": { - "optional": true - } - } - }, - "node_modules/http-proxy-middleware/node_modules/is-plain-obj": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-3.0.0.tgz", - "integrity": "sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/http2-wrapper": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/http2-wrapper/-/http2-wrapper-2.2.1.tgz", - "integrity": "sha512-V5nVw1PAOgfI3Lmeaj2Exmeg7fenjhRUgz1lPSezy1CuhPYbgQtbQj4jZfEAEMlaL+vupsvhjqCyjzob0yxsmQ==", - "license": "MIT", - "dependencies": { - "quick-lru": "^5.1.1", - "resolve-alpn": "^1.2.0" - }, - "engines": { - "node": ">=10.19.0" - } - }, - "node_modules/human-signals": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", - "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", - "license": "Apache-2.0", - "engines": { - "node": ">=10.17.0" - } - }, - "node_modules/hyperdyperid": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/hyperdyperid/-/hyperdyperid-1.2.0.tgz", - "integrity": "sha512-Y93lCzHYgGWdrJ66yIktxiaGULYc6oGiABxhcO5AufBeOyoIdZF7bIfLaOrbM0iGIOXQQgxxRrFEnb+Y6w1n4A==", - "license": "MIT", - "engines": { - "node": ">=10.18" - } - }, - "node_modules/iconv-lite": { - "version": "0.4.24", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", - "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", - "license": "MIT", - "dependencies": { - "safer-buffer": ">= 2.1.2 < 3" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/icss-utils": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/icss-utils/-/icss-utils-5.1.0.tgz", - "integrity": "sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA==", - "license": "ISC", - "engines": { - "node": "^10 || ^12 || >= 14" - }, - "peerDependencies": { - "postcss": "^8.1.0" - } - }, - "node_modules/ignore": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", - "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", - "license": "MIT", - "engines": { - "node": ">= 4" - } - }, - "node_modules/image-size": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/image-size/-/image-size-2.0.2.tgz", - "integrity": "sha512-IRqXKlaXwgSMAMtpNzZa1ZAe8m+Sa1770Dhk8VkSsP9LS+iHD62Zd8FQKs8fbPiagBE7BzoFX23cxFnwshpV6w==", - "license": "MIT", - "bin": { - "image-size": "bin/image-size.js" - }, - "engines": { - "node": ">=16.x" - } - }, - "node_modules/import-fresh": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz", - "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==", - "license": "MIT", - "dependencies": { - "parent-module": "^1.0.0", - "resolve-from": "^4.0.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/import-lazy": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/import-lazy/-/import-lazy-4.0.0.tgz", - "integrity": "sha512-rKtvo6a868b5Hu3heneU+L4yEQ4jYKLtjpnPeUdK7h0yzXGmyBTypknlkCvHFBqfX9YlorEiMM6Dnq/5atfHkw==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/imurmurhash": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", - "license": "MIT", - "engines": { - "node": ">=0.8.19" - } - }, - "node_modules/indent-string": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", - "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/infima": { - "version": "0.2.0-alpha.45", - "resolved": "https://registry.npmjs.org/infima/-/infima-0.2.0-alpha.45.tgz", - "integrity": "sha512-uyH0zfr1erU1OohLk0fT4Rrb94AOhguWNOcD9uGrSpRvNB+6gZXUoJX5J0NtvzBO10YZ9PgvA4NFgt+fYg8ojw==", - "license": "MIT", - "engines": { - "node": ">=12" - } - }, - "node_modules/inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "license": "ISC" - }, - "node_modules/ini": { - "version": "1.3.8", - "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", - "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", - "license": "ISC" - }, - "node_modules/inline-style-parser": { - "version": "0.2.7", - "resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.2.7.tgz", - "integrity": "sha512-Nb2ctOyNR8DqQoR0OwRG95uNWIC0C1lCgf5Naz5H6Ji72KZ8OcFZLz2P5sNgwlyoJ8Yif11oMuYs5pBQa86csA==", - "license": "MIT" - }, - "node_modules/internmap": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz", - "integrity": "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==", - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/invariant": { - "version": "2.2.4", - "resolved": "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz", - "integrity": "sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==", - "license": "MIT", - "dependencies": { - "loose-envify": "^1.0.0" - } - }, - "node_modules/ipaddr.js": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-2.3.0.tgz", - "integrity": "sha512-Zv/pA+ciVFbCSBBjGfaKUya/CcGmUHzTydLMaTwrUUEM2DIEO3iZvueGxmacvmN50fGpGVKeTXpb2LcYQxeVdg==", - "license": "MIT", - "engines": { - "node": ">= 10" - } - }, - "node_modules/is-alphabetical": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-2.0.1.tgz", - "integrity": "sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/is-alphanumerical": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-2.0.1.tgz", - "integrity": "sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw==", - "license": "MIT", - "dependencies": { - "is-alphabetical": "^2.0.0", - "is-decimal": "^2.0.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/is-arrayish": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", - "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", - "license": "MIT" - }, - "node_modules/is-binary-path": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", - "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", - "license": "MIT", - "dependencies": { - "binary-extensions": "^2.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/is-ci": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-3.0.1.tgz", - "integrity": "sha512-ZYvCgrefwqoQ6yTyYUbQu64HsITZ3NfKX1lzaEYdkTDcfKzzCI/wthRRYKkdjHKFVgNiXKAKm65Zo1pk2as/QQ==", - "license": "MIT", - "dependencies": { - "ci-info": "^3.2.0" - }, - "bin": { - "is-ci": "bin.js" - } - }, - "node_modules/is-core-module": { - "version": "2.16.1", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", - "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", - "license": "MIT", - "dependencies": { - "hasown": "^2.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-decimal": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-2.0.1.tgz", - "integrity": "sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/is-docker": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", - "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==", - "license": "MIT", - "bin": { - "is-docker": "cli.js" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "license": "MIT", - "dependencies": { - "is-extglob": "^2.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-hexadecimal": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-2.0.1.tgz", - "integrity": "sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/is-inside-container": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-inside-container/-/is-inside-container-1.0.0.tgz", - "integrity": "sha512-KIYLCCJghfHZxqjYBE7rEy0OBuTd5xCHS7tHVgvCLkx7StIoaxwNW3hCALgEUjFfeRk+MG/Qxmp/vtETEF3tRA==", - "license": "MIT", - "dependencies": { - "is-docker": "^3.0.0" - }, - "bin": { - "is-inside-container": "cli.js" - }, - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-inside-container/node_modules/is-docker": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-3.0.0.tgz", - "integrity": "sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ==", - "license": "MIT", - "bin": { - "is-docker": "cli.js" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-installed-globally": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/is-installed-globally/-/is-installed-globally-0.4.0.tgz", - "integrity": "sha512-iwGqO3J21aaSkC7jWnHP/difazwS7SFeIqxv6wEtLU8Y5KlzFTjyqcSIT0d8s4+dDhKytsk9PJZ2BkS5eZwQRQ==", - "license": "MIT", - "dependencies": { - "global-dirs": "^3.0.0", - "is-path-inside": "^3.0.2" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-network-error": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/is-network-error/-/is-network-error-1.3.1.tgz", - "integrity": "sha512-6QCxa49rQbmUWLfk0nuGqzql9U8uaV2H6279bRErPBHe/109hCzsLUBUHfbEtvLIHBd6hyXbgedBSHevm43Edw==", - "license": "MIT", - "engines": { - "node": ">=16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-npm": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/is-npm/-/is-npm-6.1.0.tgz", - "integrity": "sha512-O2z4/kNgyjhQwVR1Wpkbfc19JIhggF97NZNCpWTnjH7kVcZMUrnut9XSN7txI7VdyIYk5ZatOq3zvSuWpU8hoA==", - "license": "MIT", - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "license": "MIT", - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/is-obj": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-1.0.1.tgz", - "integrity": "sha512-l4RyHgRqGN4Y3+9JHVrNqO+tN0rV5My76uW5/nuO4K1b6vw5G8d/cmFjP9tRfEsdhZNt0IFdZuK/c2Vr4Nb+Qg==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-path-inside": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", - "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/is-plain-obj": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz", - "integrity": "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-plain-object": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", - "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", - "license": "MIT", - "dependencies": { - "isobject": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-regexp": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-regexp/-/is-regexp-1.0.0.tgz", - "integrity": "sha512-7zjFAPO4/gwyQAAgRRmqeEeyIICSdmCqa3tsVHMdBzaXXRiqopZL4Cyghg/XulGWrtABTpbnYYzzIRffLkP4oA==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-stream": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", - "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", - "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-typedarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", - "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==", - "license": "MIT" - }, - "node_modules/is-wsl": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", - "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", - "license": "MIT", - "dependencies": { - "is-docker": "^2.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/is-yarn-global": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/is-yarn-global/-/is-yarn-global-0.4.1.tgz", - "integrity": "sha512-/kppl+R+LO5VmhYSEWARUFjodS25D68gvj8W7z0I7OWhUla5xWu8KL6CtB2V0R6yqhnRgbcaREMr4EEM6htLPQ==", - "license": "MIT", - "engines": { - "node": ">=12" - } - }, - "node_modules/isarray": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "integrity": "sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==", - "license": "MIT" - }, - "node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", - "license": "ISC" - }, - "node_modules/isobject": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", - "integrity": "sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/jest-util": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-29.7.0.tgz", - "integrity": "sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA==", - "license": "MIT", - "dependencies": { - "@jest/types": "^29.6.3", - "@types/node": "*", - "chalk": "^4.0.0", - "ci-info": "^3.2.0", - "graceful-fs": "^4.2.9", - "picomatch": "^2.2.3" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-worker": { - "version": "29.7.0", - "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-29.7.0.tgz", - "integrity": "sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw==", - "license": "MIT", - "dependencies": { - "@types/node": "*", - "jest-util": "^29.7.0", - "merge-stream": "^2.0.0", - "supports-color": "^8.0.0" - }, - "engines": { - "node": "^14.15.0 || ^16.10.0 || >=18.0.0" - } - }, - "node_modules/jest-worker/node_modules/supports-color": { - "version": "8.1.1", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", - "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", - "license": "MIT", - "dependencies": { - "has-flag": "^4.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/supports-color?sponsor=1" - } - }, - "node_modules/jiti": { - "version": "1.21.7", - "resolved": "https://registry.npmjs.org/jiti/-/jiti-1.21.7.tgz", - "integrity": "sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A==", - "license": "MIT", - "bin": { - "jiti": "bin/jiti.js" - } - }, - "node_modules/joi": { - "version": "17.13.3", - "resolved": "https://registry.npmjs.org/joi/-/joi-17.13.3.tgz", - "integrity": "sha512-otDA4ldcIx+ZXsKHWmp0YizCweVRZG96J10b0FevjfuncLO1oX59THoAmHkNubYJ+9gWsYsp5k8v4ib6oDv1fA==", - "license": "BSD-3-Clause", - "dependencies": { - "@hapi/hoek": "^9.3.0", - "@hapi/topo": "^5.1.0", - "@sideway/address": "^4.1.5", - "@sideway/formula": "^3.0.1", - "@sideway/pinpoint": "^2.0.0" - } - }, - "node_modules/js-tokens": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", - "license": "MIT" - }, - "node_modules/js-yaml": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz", - "integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==", - "license": "MIT", - "dependencies": { - "argparse": "^2.0.1" - }, - "bin": { - "js-yaml": "bin/js-yaml.js" - } - }, - "node_modules/jsesc": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", - "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", - "license": "MIT", - "bin": { - "jsesc": "bin/jsesc" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/json-buffer": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", - "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", - "license": "MIT" - }, - "node_modules/json-parse-even-better-errors": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", - "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", - "license": "MIT" - }, - "node_modules/json-schema-traverse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", - "license": "MIT" - }, - "node_modules/json5": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", - "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", - "license": "MIT", - "bin": { - "json5": "lib/cli.js" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/jsonfile": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz", - "integrity": "sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==", - "license": "MIT", - "dependencies": { - "universalify": "^2.0.0" - }, - "optionalDependencies": { - "graceful-fs": "^4.1.6" - } - }, - "node_modules/katex": { - "version": "0.16.33", - "resolved": "https://registry.npmjs.org/katex/-/katex-0.16.33.tgz", - "integrity": "sha512-q3N5u+1sY9Bu7T4nlXoiRBXWfwSefNGoKeOwekV+gw0cAXQlz2Ww6BLcmBxVDeXBMUDQv6fK5bcNaJLxob3ZQA==", - "funding": [ - "https://opencollective.com/katex", - "https://github.com/sponsors/katex" - ], - "license": "MIT", - "dependencies": { - "commander": "^8.3.0" - }, - "bin": { - "katex": "cli.js" - } - }, - "node_modules/katex/node_modules/commander": { - "version": "8.3.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", - "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==", - "license": "MIT", - "engines": { - "node": ">= 12" - } - }, - "node_modules/keyv": { - "version": "4.5.4", - "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", - "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", - "license": "MIT", - "dependencies": { - "json-buffer": "3.0.1" - } - }, - "node_modules/khroma": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/khroma/-/khroma-2.1.0.tgz", - "integrity": "sha512-Ls993zuzfayK269Svk9hzpeGUKob/sIgZzyHYdjQoAdQetRKpOLj+k/QQQ/6Qi0Yz65mlROrfd+Ev+1+7dz9Kw==" - }, - "node_modules/kind-of": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", - "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/kleur": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", - "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/langium": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/langium/-/langium-4.2.1.tgz", - "integrity": "sha512-zu9QWmjpzJcomzdJQAHgDVhLGq5bLosVak1KVa40NzQHXfqr4eAHupvnPOVXEoLkg6Ocefvf/93d//SB7du4YQ==", - "license": "MIT", - "dependencies": { - "chevrotain": "~11.1.1", - "chevrotain-allstar": "~0.3.1", - "vscode-languageserver": "~9.0.1", - "vscode-languageserver-textdocument": "~1.0.11", - "vscode-uri": "~3.1.0" - }, - "engines": { - "node": ">=20.10.0", - "npm": ">=10.2.3" - } - }, - "node_modules/latest-version": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/latest-version/-/latest-version-7.0.0.tgz", - "integrity": "sha512-KvNT4XqAMzdcL6ka6Tl3i2lYeFDgXNCuIX+xNx6ZMVR1dFq+idXd9FLKNMOIx0t9mJ9/HudyX4oZWXZQ0UJHeg==", - "license": "MIT", - "dependencies": { - "package-json": "^8.1.0" - }, - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/launch-editor": { - "version": "2.13.1", - "resolved": "https://registry.npmjs.org/launch-editor/-/launch-editor-2.13.1.tgz", - "integrity": "sha512-lPSddlAAluRKJ7/cjRFoXUFzaX7q/YKI7yPHuEvSJVqoXvFnJov1/Ud87Aa4zULIbA9Nja4mSPK8l0z/7eV2wA==", - "license": "MIT", - "dependencies": { - "picocolors": "^1.1.1", - "shell-quote": "^1.8.3" - } - }, - "node_modules/layout-base": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/layout-base/-/layout-base-1.0.2.tgz", - "integrity": "sha512-8h2oVEZNktL4BH2JCOI90iD1yXwL6iNW7KcCKT2QZgQJR2vbqDsldCTPRU9NifTCqHZci57XvQQ15YTu+sTYPg==", - "license": "MIT" - }, - "node_modules/leven": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", - "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/lilconfig": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.3.tgz", - "integrity": "sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==", - "license": "MIT", - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/antonk52" - } - }, - "node_modules/lines-and-columns": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", - "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", - "license": "MIT" - }, - "node_modules/loader-runner": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/loader-runner/-/loader-runner-4.3.1.tgz", - "integrity": "sha512-IWqP2SCPhyVFTBtRcgMHdzlf9ul25NwaFx4wCEH/KjAXuuHY4yNjvPXsBokp8jCB936PyWRaPKUNh8NvylLp2Q==", - "license": "MIT", - "engines": { - "node": ">=6.11.5" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - } - }, - "node_modules/loader-utils": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.4.tgz", - "integrity": "sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw==", - "license": "MIT", - "dependencies": { - "big.js": "^5.2.2", - "emojis-list": "^3.0.0", - "json5": "^2.1.2" - }, - "engines": { - "node": ">=8.9.0" - } - }, - "node_modules/locate-path": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-7.2.0.tgz", - "integrity": "sha512-gvVijfZvn7R+2qyPX8mAuKcFGDf6Nc61GdvGafQsHL0sBIxfKzA+usWn4GFC/bk+QdwPUD4kWFJLhElipq+0VA==", - "license": "MIT", - "dependencies": { - "p-locate": "^6.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/lodash": { - "version": "4.17.23", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.23.tgz", - "integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==", - "license": "MIT" - }, - "node_modules/lodash-es": { - "version": "4.17.23", - "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.23.tgz", - "integrity": "sha512-kVI48u3PZr38HdYz98UmfPnXl2DXrpdctLrFLCd3kOx1xUkOmpFPx7gCWWM5MPkL/fD8zb+Ph0QzjGFs4+hHWg==", - "license": "MIT" - }, - "node_modules/lodash.debounce": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz", - "integrity": "sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==", - "license": "MIT" - }, - "node_modules/lodash.memoize": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", - "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==", - "license": "MIT" - }, - "node_modules/lodash.uniq": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz", - "integrity": "sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==", - "license": "MIT" - }, - "node_modules/longest-streak": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz", - "integrity": "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/loose-envify": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", - "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", - "license": "MIT", - "dependencies": { - "js-tokens": "^3.0.0 || ^4.0.0" - }, - "bin": { - "loose-envify": "cli.js" - } - }, - "node_modules/lower-case": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz", - "integrity": "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==", - "license": "MIT", - "dependencies": { - "tslib": "^2.0.3" - } - }, - "node_modules/lowercase-keys": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-3.0.0.tgz", - "integrity": "sha512-ozCC6gdQ+glXOQsveKD0YsDy8DSQFjDTz4zyzEHNV5+JP5D62LmfDZ6o1cycFx9ouG940M5dE8C8CTewdj2YWQ==", - "license": "MIT", - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/lru-cache": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", - "license": "ISC", - "dependencies": { - "yallist": "^3.0.2" - } - }, - "node_modules/markdown-extensions": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/markdown-extensions/-/markdown-extensions-2.0.0.tgz", - "integrity": "sha512-o5vL7aDWatOTX8LzaS1WMoaoxIiLRQJuIKKe2wAw6IeULDHaqbiqiggmx+pKvZDb1Sj+pE46Sn1T7lCqfFtg1Q==", - "license": "MIT", - "engines": { - "node": ">=16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/markdown-table": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.4.tgz", - "integrity": "sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/marked": { - "version": "16.4.2", - "resolved": "https://registry.npmjs.org/marked/-/marked-16.4.2.tgz", - "integrity": "sha512-TI3V8YYWvkVf3KJe1dRkpnjs68JUPyEa5vjKrp1XEEJUAOaQc+Qj+L1qWbPd0SJuAdQkFU0h73sXXqwDYxsiDA==", - "license": "MIT", - "bin": { - "marked": "bin/marked.js" - }, - "engines": { - "node": ">= 20" - } - }, - "node_modules/math-intrinsics": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", - "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/mdast-util-directive": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/mdast-util-directive/-/mdast-util-directive-3.1.0.tgz", - "integrity": "sha512-I3fNFt+DHmpWCYAT7quoM6lHf9wuqtI+oCOfvILnoicNIqjh5E3dEJWiXuYME2gNe8vl1iMQwyUHa7bgFmak6Q==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "@types/unist": "^3.0.0", - "ccount": "^2.0.0", - "devlop": "^1.0.0", - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0", - "parse-entities": "^4.0.0", - "stringify-entities": "^4.0.0", - "unist-util-visit-parents": "^6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-find-and-replace": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-3.0.2.tgz", - "integrity": "sha512-Tmd1Vg/m3Xz43afeNxDIhWRtFZgM2VLyaf4vSTYwudTyeuTneoL3qtWMA5jeLyz/O1vDJmmV4QuScFCA2tBPwg==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "escape-string-regexp": "^5.0.0", - "unist-util-is": "^6.0.0", - "unist-util-visit-parents": "^6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-find-and-replace/node_modules/escape-string-regexp": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz", - "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/mdast-util-from-markdown": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-2.0.3.tgz", - "integrity": "sha512-W4mAWTvSlKvf8L6J+VN9yLSqQ9AOAAvHuoDAmPkz4dHf553m5gVj2ejadHJhoJmcmxEnOv6Pa8XJhpxE93kb8Q==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "@types/unist": "^3.0.0", - "decode-named-character-reference": "^1.0.0", - "devlop": "^1.0.0", - "mdast-util-to-string": "^4.0.0", - "micromark": "^4.0.0", - "micromark-util-decode-numeric-character-reference": "^2.0.0", - "micromark-util-decode-string": "^2.0.0", - "micromark-util-normalize-identifier": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0", - "unist-util-stringify-position": "^4.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-from-markdown/node_modules/micromark-util-symbol": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", - "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/mdast-util-frontmatter": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/mdast-util-frontmatter/-/mdast-util-frontmatter-2.0.1.tgz", - "integrity": "sha512-LRqI9+wdgC25P0URIJY9vwocIzCcksduHQ9OF2joxQoyTNVduwLAFUzjoopuRJbJAReaKrNQKAZKL3uCMugWJA==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "devlop": "^1.0.0", - "escape-string-regexp": "^5.0.0", - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0", - "micromark-extension-frontmatter": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-frontmatter/node_modules/escape-string-regexp": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz", - "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/mdast-util-gfm": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-3.1.0.tgz", - "integrity": "sha512-0ulfdQOM3ysHhCJ1p06l0b0VKlhU0wuQs3thxZQagjcjPrlFRqY215uZGHHJan9GEAXd9MbfPjFJz+qMkVR6zQ==", - "license": "MIT", - "dependencies": { - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-gfm-autolink-literal": "^2.0.0", - "mdast-util-gfm-footnote": "^2.0.0", - "mdast-util-gfm-strikethrough": "^2.0.0", - "mdast-util-gfm-table": "^2.0.0", - "mdast-util-gfm-task-list-item": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-gfm-autolink-literal": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-2.0.1.tgz", - "integrity": "sha512-5HVP2MKaP6L+G6YaxPNjuL0BPrq9orG3TsrZ9YXbA3vDw/ACI4MEsnoDpn6ZNm7GnZgtAcONJyPhOP8tNJQavQ==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "ccount": "^2.0.0", - "devlop": "^1.0.0", - "mdast-util-find-and-replace": "^3.0.0", - "micromark-util-character": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-gfm-autolink-literal/node_modules/micromark-util-character": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz", - "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/mdast-util-gfm-autolink-literal/node_modules/micromark-util-symbol": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", - "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/mdast-util-gfm-footnote": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-footnote/-/mdast-util-gfm-footnote-2.1.0.tgz", - "integrity": "sha512-sqpDWlsHn7Ac9GNZQMeUzPQSMzR6Wv0WKRNvQRg0KqHh02fpTz69Qc1QSseNX29bhz1ROIyNyxExfawVKTm1GQ==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "devlop": "^1.1.0", - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0", - "micromark-util-normalize-identifier": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-gfm-strikethrough": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-2.0.0.tgz", - "integrity": "sha512-mKKb915TF+OC5ptj5bJ7WFRPdYtuHv0yTRxK2tJvi+BDqbkiG7h7u/9SI89nRAYcmap2xHQL9D+QG/6wSrTtXg==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-gfm-table": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-2.0.0.tgz", - "integrity": "sha512-78UEvebzz/rJIxLvE7ZtDd/vIQ0RHv+3Mh5DR96p7cS7HsBhYIICDBCu8csTNWNO6tBWfqXPWekRuj2FNOGOZg==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "devlop": "^1.0.0", - "markdown-table": "^3.0.0", - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-gfm-task-list-item": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-2.0.0.tgz", - "integrity": "sha512-IrtvNvjxC1o06taBAVJznEnkiHxLFTzgonUdy8hzFVeDun0uTjxxrRGVaNFqkU1wJR3RBPEfsxmU6jDWPofrTQ==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "devlop": "^1.0.0", - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-mdx": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-mdx/-/mdast-util-mdx-3.0.0.tgz", - "integrity": "sha512-JfbYLAW7XnYTTbUsmpu0kdBUVe+yKVJZBItEjwyYJiDJuZ9w4eeaqks4HQO+R7objWgS2ymV60GYpI14Ug554w==", - "license": "MIT", - "dependencies": { - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-mdx-expression": "^2.0.0", - "mdast-util-mdx-jsx": "^3.0.0", - "mdast-util-mdxjs-esm": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-mdx-expression": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/mdast-util-mdx-expression/-/mdast-util-mdx-expression-2.0.1.tgz", - "integrity": "sha512-J6f+9hUp+ldTZqKRSg7Vw5V6MqjATc+3E4gf3CFNcuZNWD8XdyI6zQ8GqH7f8169MM6P7hMBRDVGnn7oHB9kXQ==", - "license": "MIT", - "dependencies": { - "@types/estree-jsx": "^1.0.0", - "@types/hast": "^3.0.0", - "@types/mdast": "^4.0.0", - "devlop": "^1.0.0", - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-mdx-jsx": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/mdast-util-mdx-jsx/-/mdast-util-mdx-jsx-3.2.0.tgz", - "integrity": "sha512-lj/z8v0r6ZtsN/cGNNtemmmfoLAFZnjMbNyLzBafjzikOM+glrjNHPlf6lQDOTccj9n5b0PPihEBbhneMyGs1Q==", - "license": "MIT", - "dependencies": { - "@types/estree-jsx": "^1.0.0", - "@types/hast": "^3.0.0", - "@types/mdast": "^4.0.0", - "@types/unist": "^3.0.0", - "ccount": "^2.0.0", - "devlop": "^1.1.0", - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0", - "parse-entities": "^4.0.0", - "stringify-entities": "^4.0.0", - "unist-util-stringify-position": "^4.0.0", - "vfile-message": "^4.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-mdxjs-esm": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/mdast-util-mdxjs-esm/-/mdast-util-mdxjs-esm-2.0.1.tgz", - "integrity": "sha512-EcmOpxsZ96CvlP03NghtH1EsLtr0n9Tm4lPUJUBccV9RwUOneqSycg19n5HGzCf+10LozMRSObtVr3ee1WoHtg==", - "license": "MIT", - "dependencies": { - "@types/estree-jsx": "^1.0.0", - "@types/hast": "^3.0.0", - "@types/mdast": "^4.0.0", - "devlop": "^1.0.0", - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-phrasing": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-4.1.0.tgz", - "integrity": "sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "unist-util-is": "^6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-to-hast": { - "version": "13.2.1", - "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.2.1.tgz", - "integrity": "sha512-cctsq2wp5vTsLIcaymblUriiTcZd0CwWtCbLvrOzYCDZoWyMNV8sZ7krj09FSnsiJi3WVsHLM4k6Dq/yaPyCXA==", - "license": "MIT", - "dependencies": { - "@types/hast": "^3.0.0", - "@types/mdast": "^4.0.0", - "@ungap/structured-clone": "^1.0.0", - "devlop": "^1.0.0", - "micromark-util-sanitize-uri": "^2.0.0", - "trim-lines": "^3.0.0", - "unist-util-position": "^5.0.0", - "unist-util-visit": "^5.0.0", - "vfile": "^6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-to-markdown": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-2.1.2.tgz", - "integrity": "sha512-xj68wMTvGXVOKonmog6LwyJKrYXZPvlwabaryTjLh9LuvovB/KAH+kvi8Gjj+7rJjsFi23nkUxRQv1KqSroMqA==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "@types/unist": "^3.0.0", - "longest-streak": "^3.0.0", - "mdast-util-phrasing": "^4.0.0", - "mdast-util-to-string": "^4.0.0", - "micromark-util-classify-character": "^2.0.0", - "micromark-util-decode-string": "^2.0.0", - "unist-util-visit": "^5.0.0", - "zwitch": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-to-string": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz", - "integrity": "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdn-data": { - "version": "2.0.30", - "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.30.tgz", - "integrity": "sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA==", - "license": "CC0-1.0" - }, - "node_modules/media-typer": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", - "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/memfs": { - "version": "4.56.10", - "resolved": "https://registry.npmjs.org/memfs/-/memfs-4.56.10.tgz", - "integrity": "sha512-eLvzyrwqLHnLYalJP7YZ3wBe79MXktMdfQbvMrVD80K+NhrIukCVBvgP30zTJYEEDh9hZ/ep9z0KOdD7FSHo7w==", - "license": "Apache-2.0", - "dependencies": { - "@jsonjoy.com/fs-core": "4.56.10", - "@jsonjoy.com/fs-fsa": "4.56.10", - "@jsonjoy.com/fs-node": "4.56.10", - "@jsonjoy.com/fs-node-builtins": "4.56.10", - "@jsonjoy.com/fs-node-to-fsa": "4.56.10", - "@jsonjoy.com/fs-node-utils": "4.56.10", - "@jsonjoy.com/fs-print": "4.56.10", - "@jsonjoy.com/fs-snapshot": "4.56.10", - "@jsonjoy.com/json-pack": "^1.11.0", - "@jsonjoy.com/util": "^1.9.0", - "glob-to-regex.js": "^1.0.1", - "thingies": "^2.5.0", - "tree-dump": "^1.0.3", - "tslib": "^2.0.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/streamich" - }, - "peerDependencies": { - "tslib": "2" - } - }, - "node_modules/merge-descriptors": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", - "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==", - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/merge-stream": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", - "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", - "license": "MIT" - }, - "node_modules/merge2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", - "license": "MIT", - "engines": { - "node": ">= 8" - } - }, - "node_modules/mermaid": { - "version": "11.12.3", - "resolved": "https://registry.npmjs.org/mermaid/-/mermaid-11.12.3.tgz", - "integrity": "sha512-wN5ZSgJQIC+CHJut9xaKWsknLxaFBwCPwPkGTSUYrTiHORWvpT8RxGk849HPnpUAQ+/9BPRqYb80jTpearrHzQ==", - "license": "MIT", - "dependencies": { - "@braintree/sanitize-url": "^7.1.1", - "@iconify/utils": "^3.0.1", - "@mermaid-js/parser": "^1.0.0", - "@types/d3": "^7.4.3", - "cytoscape": "^3.29.3", - "cytoscape-cose-bilkent": "^4.1.0", - "cytoscape-fcose": "^2.2.0", - "d3": "^7.9.0", - "d3-sankey": "^0.12.3", - "dagre-d3-es": "7.0.13", - "dayjs": "^1.11.18", - "dompurify": "^3.2.5", - "katex": "^0.16.22", - "khroma": "^2.1.0", - "lodash-es": "^4.17.23", - "marked": "^16.2.1", - "roughjs": "^4.6.6", - "stylis": "^4.3.6", - "ts-dedent": "^2.2.0", - "uuid": "^11.1.0" - } - }, - "node_modules/mermaid/node_modules/uuid": { - "version": "11.1.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-11.1.0.tgz", - "integrity": "sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A==", - "funding": [ - "https://github.com/sponsors/broofa", - "https://github.com/sponsors/ctavan" - ], - "license": "MIT", - "bin": { - "uuid": "dist/esm/bin/uuid" - } - }, - "node_modules/methods": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", - "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/micromark": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/micromark/-/micromark-4.0.2.tgz", - "integrity": "sha512-zpe98Q6kvavpCr1NPVSCMebCKfD7CA2NqZ+rykeNhONIJBpc1tFKt9hucLGwha3jNTNI8lHpctWJWoimVF4PfA==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "@types/debug": "^4.0.0", - "debug": "^4.0.0", - "decode-named-character-reference": "^1.0.0", - "devlop": "^1.0.0", - "micromark-core-commonmark": "^2.0.0", - "micromark-factory-space": "^2.0.0", - "micromark-util-character": "^2.0.0", - "micromark-util-chunked": "^2.0.0", - "micromark-util-combine-extensions": "^2.0.0", - "micromark-util-decode-numeric-character-reference": "^2.0.0", - "micromark-util-encode": "^2.0.0", - "micromark-util-normalize-identifier": "^2.0.0", - "micromark-util-resolve-all": "^2.0.0", - "micromark-util-sanitize-uri": "^2.0.0", - "micromark-util-subtokenize": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-core-commonmark": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-2.0.3.tgz", - "integrity": "sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "decode-named-character-reference": "^1.0.0", - "devlop": "^1.0.0", - "micromark-factory-destination": "^2.0.0", - "micromark-factory-label": "^2.0.0", - "micromark-factory-space": "^2.0.0", - "micromark-factory-title": "^2.0.0", - "micromark-factory-whitespace": "^2.0.0", - "micromark-util-character": "^2.0.0", - "micromark-util-chunked": "^2.0.0", - "micromark-util-classify-character": "^2.0.0", - "micromark-util-html-tag-name": "^2.0.0", - "micromark-util-normalize-identifier": "^2.0.0", - "micromark-util-resolve-all": "^2.0.0", - "micromark-util-subtokenize": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-core-commonmark/node_modules/micromark-factory-space": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz", - "integrity": "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-character": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-core-commonmark/node_modules/micromark-util-character": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz", - "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-core-commonmark/node_modules/micromark-util-symbol": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", - "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-extension-directive": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/micromark-extension-directive/-/micromark-extension-directive-3.0.2.tgz", - "integrity": "sha512-wjcXHgk+PPdmvR58Le9d7zQYWy+vKEU9Se44p2CrCDPiLr2FMyiT4Fyb5UFKFC66wGB3kPlgD7q3TnoqPS7SZA==", - "license": "MIT", - "dependencies": { - "devlop": "^1.0.0", - "micromark-factory-space": "^2.0.0", - "micromark-factory-whitespace": "^2.0.0", - "micromark-util-character": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0", - "parse-entities": "^4.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/micromark-extension-directive/node_modules/micromark-factory-space": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz", - "integrity": "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-character": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-extension-directive/node_modules/micromark-util-character": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz", - "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-extension-directive/node_modules/micromark-util-symbol": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", - "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-extension-frontmatter": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/micromark-extension-frontmatter/-/micromark-extension-frontmatter-2.0.0.tgz", - "integrity": "sha512-C4AkuM3dA58cgZha7zVnuVxBhDsbttIMiytjgsM2XbHAB2faRVaHRle40558FBN+DJcrLNCoqG5mlrpdU4cRtg==", - "license": "MIT", - "dependencies": { - "fault": "^2.0.0", - "micromark-util-character": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/micromark-extension-frontmatter/node_modules/micromark-util-character": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz", - "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-extension-frontmatter/node_modules/micromark-util-symbol": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", - "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-extension-gfm": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-3.0.0.tgz", - "integrity": "sha512-vsKArQsicm7t0z2GugkCKtZehqUm31oeGBV/KVSorWSy8ZlNAv7ytjFhvaryUiCUJYqs+NoE6AFhpQvBTM6Q4w==", - "license": "MIT", - "dependencies": { - "micromark-extension-gfm-autolink-literal": "^2.0.0", - "micromark-extension-gfm-footnote": "^2.0.0", - "micromark-extension-gfm-strikethrough": "^2.0.0", - "micromark-extension-gfm-table": "^2.0.0", - "micromark-extension-gfm-tagfilter": "^2.0.0", - "micromark-extension-gfm-task-list-item": "^2.0.0", - "micromark-util-combine-extensions": "^2.0.0", - "micromark-util-types": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/micromark-extension-gfm-autolink-literal": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-2.1.0.tgz", - "integrity": "sha512-oOg7knzhicgQ3t4QCjCWgTmfNhvQbDDnJeVu9v81r7NltNCVmhPy1fJRX27pISafdjL+SVc4d3l48Gb6pbRypw==", - "license": "MIT", - "dependencies": { - "micromark-util-character": "^2.0.0", - "micromark-util-sanitize-uri": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/micromark-extension-gfm-autolink-literal/node_modules/micromark-util-character": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz", - "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-extension-gfm-autolink-literal/node_modules/micromark-util-symbol": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", - "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-extension-gfm-footnote": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-footnote/-/micromark-extension-gfm-footnote-2.1.0.tgz", - "integrity": "sha512-/yPhxI1ntnDNsiHtzLKYnE3vf9JZ6cAisqVDauhp4CEHxlb4uoOTxOCJ+9s51bIB8U1N1FJ1RXOKTIlD5B/gqw==", - "license": "MIT", - "dependencies": { - "devlop": "^1.0.0", - "micromark-core-commonmark": "^2.0.0", - "micromark-factory-space": "^2.0.0", - "micromark-util-character": "^2.0.0", - "micromark-util-normalize-identifier": "^2.0.0", - "micromark-util-sanitize-uri": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/micromark-extension-gfm-footnote/node_modules/micromark-factory-space": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz", - "integrity": "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-character": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-extension-gfm-footnote/node_modules/micromark-util-character": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz", - "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-extension-gfm-footnote/node_modules/micromark-util-symbol": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", - "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-extension-gfm-strikethrough": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-2.1.0.tgz", - "integrity": "sha512-ADVjpOOkjz1hhkZLlBiYA9cR2Anf8F4HqZUO6e5eDcPQd0Txw5fxLzzxnEkSkfnD0wziSGiv7sYhk/ktvbf1uw==", - "license": "MIT", - "dependencies": { - "devlop": "^1.0.0", - "micromark-util-chunked": "^2.0.0", - "micromark-util-classify-character": "^2.0.0", - "micromark-util-resolve-all": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/micromark-extension-gfm-strikethrough/node_modules/micromark-util-symbol": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", - "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-extension-gfm-table": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-2.1.1.tgz", - "integrity": "sha512-t2OU/dXXioARrC6yWfJ4hqB7rct14e8f7m0cbI5hUmDyyIlwv5vEtooptH8INkbLzOatzKuVbQmAYcbWoyz6Dg==", - "license": "MIT", - "dependencies": { - "devlop": "^1.0.0", - "micromark-factory-space": "^2.0.0", - "micromark-util-character": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/micromark-extension-gfm-table/node_modules/micromark-factory-space": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz", - "integrity": "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-character": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-extension-gfm-table/node_modules/micromark-util-character": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz", - "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-extension-gfm-table/node_modules/micromark-util-symbol": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", - "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-extension-gfm-tagfilter": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-2.0.0.tgz", - "integrity": "sha512-xHlTOmuCSotIA8TW1mDIM6X2O1SiX5P9IuDtqGonFhEK0qgRI4yeC6vMxEV2dgyr2TiD+2PQ10o+cOhdVAcwfg==", - "license": "MIT", - "dependencies": { - "micromark-util-types": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/micromark-extension-gfm-task-list-item": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-2.1.0.tgz", - "integrity": "sha512-qIBZhqxqI6fjLDYFTBIa4eivDMnP+OZqsNwmQ3xNLE4Cxwc+zfQEfbs6tzAo2Hjq+bh6q5F+Z8/cksrLFYWQQw==", - "license": "MIT", - "dependencies": { - "devlop": "^1.0.0", - "micromark-factory-space": "^2.0.0", - "micromark-util-character": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/micromark-extension-gfm-task-list-item/node_modules/micromark-factory-space": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz", - "integrity": "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-character": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-extension-gfm-task-list-item/node_modules/micromark-util-character": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz", - "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-extension-gfm-task-list-item/node_modules/micromark-util-symbol": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", - "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-extension-mdx-expression": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/micromark-extension-mdx-expression/-/micromark-extension-mdx-expression-3.0.1.tgz", - "integrity": "sha512-dD/ADLJ1AeMvSAKBwO22zG22N4ybhe7kFIZ3LsDI0GlsNr2A3KYxb0LdC1u5rj4Nw+CHKY0RVdnHX8vj8ejm4Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0", - "devlop": "^1.0.0", - "micromark-factory-mdx-expression": "^2.0.0", - "micromark-factory-space": "^2.0.0", - "micromark-util-character": "^2.0.0", - "micromark-util-events-to-acorn": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-extension-mdx-expression/node_modules/micromark-factory-space": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz", - "integrity": "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-character": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-extension-mdx-expression/node_modules/micromark-util-character": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz", - "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-extension-mdx-expression/node_modules/micromark-util-symbol": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", - "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-extension-mdx-jsx": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/micromark-extension-mdx-jsx/-/micromark-extension-mdx-jsx-3.0.2.tgz", - "integrity": "sha512-e5+q1DjMh62LZAJOnDraSSbDMvGJ8x3cbjygy2qFEi7HCeUT4BDKCvMozPozcD6WmOt6sVvYDNBKhFSz3kjOVQ==", - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0", - "devlop": "^1.0.0", - "estree-util-is-identifier-name": "^3.0.0", - "micromark-factory-mdx-expression": "^2.0.0", - "micromark-factory-space": "^2.0.0", - "micromark-util-character": "^2.0.0", - "micromark-util-events-to-acorn": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0", - "vfile-message": "^4.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/micromark-extension-mdx-jsx/node_modules/micromark-factory-space": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz", - "integrity": "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-character": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-extension-mdx-jsx/node_modules/micromark-util-character": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz", - "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-extension-mdx-jsx/node_modules/micromark-util-symbol": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", - "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-extension-mdx-md": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/micromark-extension-mdx-md/-/micromark-extension-mdx-md-2.0.0.tgz", - "integrity": "sha512-EpAiszsB3blw4Rpba7xTOUptcFeBFi+6PY8VnJ2hhimH+vCQDirWgsMpz7w1XcZE7LVrSAUGb9VJpG9ghlYvYQ==", - "license": "MIT", - "dependencies": { - "micromark-util-types": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/micromark-extension-mdxjs": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/micromark-extension-mdxjs/-/micromark-extension-mdxjs-3.0.0.tgz", - "integrity": "sha512-A873fJfhnJ2siZyUrJ31l34Uqwy4xIFmvPY1oj+Ean5PHcPBYzEsvqvWGaWcfEIr11O5Dlw3p2y0tZWpKHDejQ==", - "license": "MIT", - "dependencies": { - "acorn": "^8.0.0", - "acorn-jsx": "^5.0.0", - "micromark-extension-mdx-expression": "^3.0.0", - "micromark-extension-mdx-jsx": "^3.0.0", - "micromark-extension-mdx-md": "^2.0.0", - "micromark-extension-mdxjs-esm": "^3.0.0", - "micromark-util-combine-extensions": "^2.0.0", - "micromark-util-types": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/micromark-extension-mdxjs-esm": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/micromark-extension-mdxjs-esm/-/micromark-extension-mdxjs-esm-3.0.0.tgz", - "integrity": "sha512-DJFl4ZqkErRpq/dAPyeWp15tGrcrrJho1hKK5uBS70BCtfrIFg81sqcTVu3Ta+KD1Tk5vAtBNElWxtAa+m8K9A==", - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0", - "devlop": "^1.0.0", - "micromark-core-commonmark": "^2.0.0", - "micromark-util-character": "^2.0.0", - "micromark-util-events-to-acorn": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0", - "unist-util-position-from-estree": "^2.0.0", - "vfile-message": "^4.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/micromark-extension-mdxjs-esm/node_modules/micromark-util-character": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz", - "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-extension-mdxjs-esm/node_modules/micromark-util-symbol": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", - "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-factory-destination": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-2.0.1.tgz", - "integrity": "sha512-Xe6rDdJlkmbFRExpTOmRj9N3MaWmbAgdpSrBQvCFqhezUn4AHqJHbaEnfbVYYiexVSs//tqOdY/DxhjdCiJnIA==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-character": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-factory-destination/node_modules/micromark-util-character": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz", - "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-factory-destination/node_modules/micromark-util-symbol": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", - "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-factory-label": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-2.0.1.tgz", - "integrity": "sha512-VFMekyQExqIW7xIChcXn4ok29YE3rnuyveW3wZQWWqF4Nv9Wk5rgJ99KzPvHjkmPXF93FXIbBp6YdW3t71/7Vg==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "devlop": "^1.0.0", - "micromark-util-character": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-factory-label/node_modules/micromark-util-character": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz", - "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-factory-label/node_modules/micromark-util-symbol": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", - "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-factory-mdx-expression": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/micromark-factory-mdx-expression/-/micromark-factory-mdx-expression-2.0.3.tgz", - "integrity": "sha512-kQnEtA3vzucU2BkrIa8/VaSAsP+EJ3CKOvhMuJgOEGg9KDC6OAY6nSnNDVRiVNRqj7Y4SlSzcStaH/5jge8JdQ==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0", - "devlop": "^1.0.0", - "micromark-factory-space": "^2.0.0", - "micromark-util-character": "^2.0.0", - "micromark-util-events-to-acorn": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0", - "unist-util-position-from-estree": "^2.0.0", - "vfile-message": "^4.0.0" - } - }, - "node_modules/micromark-factory-mdx-expression/node_modules/micromark-factory-space": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz", - "integrity": "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-character": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-factory-mdx-expression/node_modules/micromark-util-character": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz", - "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-factory-mdx-expression/node_modules/micromark-util-symbol": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", - "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-factory-space": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-1.1.0.tgz", - "integrity": "sha512-cRzEj7c0OL4Mw2v6nwzttyOZe8XY/Z8G0rzmWQZTBi/jjwyw/U4uqKtUORXQrR5bAZZnbTI/feRV/R7hc4jQYQ==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-character": "^1.0.0", - "micromark-util-types": "^1.0.0" - } - }, - "node_modules/micromark-factory-space/node_modules/micromark-util-types": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-1.1.0.tgz", - "integrity": "sha512-ukRBgie8TIAcacscVHSiddHjO4k/q3pnedmzMQ4iwDcK0FtFCohKOlFbaOL/mPgfnPsL3C1ZyxJa4sbWrBl3jg==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-factory-title": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-2.0.1.tgz", - "integrity": "sha512-5bZ+3CjhAd9eChYTHsjy6TGxpOFSKgKKJPJxr293jTbfry2KDoWkhBb6TcPVB4NmzaPhMs1Frm9AZH7OD4Cjzw==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-factory-space": "^2.0.0", - "micromark-util-character": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-factory-title/node_modules/micromark-factory-space": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz", - "integrity": "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-character": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-factory-title/node_modules/micromark-util-character": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz", - "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-factory-title/node_modules/micromark-util-symbol": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", - "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-factory-whitespace": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-2.0.1.tgz", - "integrity": "sha512-Ob0nuZ3PKt/n0hORHyvoD9uZhr+Za8sFoP+OnMcnWK5lngSzALgQYKMr9RJVOWLqQYuyn6ulqGWSXdwf6F80lQ==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-factory-space": "^2.0.0", - "micromark-util-character": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-factory-whitespace/node_modules/micromark-factory-space": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz", - "integrity": "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-character": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-factory-whitespace/node_modules/micromark-util-character": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz", - "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-factory-whitespace/node_modules/micromark-util-symbol": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", - "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-util-character": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-1.2.0.tgz", - "integrity": "sha512-lXraTwcX3yH/vMDaFWCQJP1uIszLVebzUa3ZHdrgxr7KEU/9mL4mVgCpGbyhvNLNlauROiNUq7WN5u7ndbY6xg==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-symbol": "^1.0.0", - "micromark-util-types": "^1.0.0" - } - }, - "node_modules/micromark-util-character/node_modules/micromark-util-types": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-1.1.0.tgz", - "integrity": "sha512-ukRBgie8TIAcacscVHSiddHjO4k/q3pnedmzMQ4iwDcK0FtFCohKOlFbaOL/mPgfnPsL3C1ZyxJa4sbWrBl3jg==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-util-chunked": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-2.0.1.tgz", - "integrity": "sha512-QUNFEOPELfmvv+4xiNg2sRYeS/P84pTW0TCgP5zc9FpXetHY0ab7SxKyAQCNCc1eK0459uoLI1y5oO5Vc1dbhA==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-symbol": "^2.0.0" - } - }, - "node_modules/micromark-util-chunked/node_modules/micromark-util-symbol": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", - "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-util-classify-character": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-2.0.1.tgz", - "integrity": "sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-character": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-util-classify-character/node_modules/micromark-util-character": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz", - "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-util-classify-character/node_modules/micromark-util-symbol": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", - "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-util-combine-extensions": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-2.0.1.tgz", - "integrity": "sha512-OnAnH8Ujmy59JcyZw8JSbK9cGpdVY44NKgSM7E9Eh7DiLS2E9RNQf0dONaGDzEG9yjEl5hcqeIsj4hfRkLH/Bg==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-chunked": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-util-decode-numeric-character-reference": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-2.0.2.tgz", - "integrity": "sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-symbol": "^2.0.0" - } - }, - "node_modules/micromark-util-decode-numeric-character-reference/node_modules/micromark-util-symbol": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", - "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-util-decode-string": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-2.0.1.tgz", - "integrity": "sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "decode-named-character-reference": "^1.0.0", - "micromark-util-character": "^2.0.0", - "micromark-util-decode-numeric-character-reference": "^2.0.0", - "micromark-util-symbol": "^2.0.0" - } - }, - "node_modules/micromark-util-decode-string/node_modules/micromark-util-character": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz", - "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-util-decode-string/node_modules/micromark-util-symbol": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", - "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-util-encode": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.1.tgz", - "integrity": "sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-util-events-to-acorn": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/micromark-util-events-to-acorn/-/micromark-util-events-to-acorn-2.0.3.tgz", - "integrity": "sha512-jmsiEIiZ1n7X1Rr5k8wVExBQCg5jy4UXVADItHmNk1zkwEVhBuIUKRu3fqv+hs4nxLISi2DQGlqIOGiFxgbfHg==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0", - "@types/unist": "^3.0.0", - "devlop": "^1.0.0", - "estree-util-visit": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0", - "vfile-message": "^4.0.0" - } - }, - "node_modules/micromark-util-events-to-acorn/node_modules/micromark-util-symbol": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", - "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-util-html-tag-name": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-2.0.1.tgz", - "integrity": "sha512-2cNEiYDhCWKI+Gs9T0Tiysk136SnR13hhO8yW6BGNyhOC4qYFnwF1nKfD3HFAIXA5c45RrIG1ub11GiXeYd1xA==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-util-normalize-identifier": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-2.0.1.tgz", - "integrity": "sha512-sxPqmo70LyARJs0w2UclACPUUEqltCkJ6PhKdMIDuJ3gSf/Q+/GIe3WKl0Ijb/GyH9lOpUkRAO2wp0GVkLvS9Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-symbol": "^2.0.0" - } - }, - "node_modules/micromark-util-normalize-identifier/node_modules/micromark-util-symbol": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", - "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-util-resolve-all": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-2.0.1.tgz", - "integrity": "sha512-VdQyxFWFT2/FGJgwQnJYbe1jjQoNTS4RjglmSjTUlpUMa95Htx9NHeYW4rGDJzbjvCsl9eLjMQwGeElsqmzcHg==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-util-sanitize-uri": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.1.tgz", - "integrity": "sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-character": "^2.0.0", - "micromark-util-encode": "^2.0.0", - "micromark-util-symbol": "^2.0.0" - } - }, - "node_modules/micromark-util-sanitize-uri/node_modules/micromark-util-character": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz", - "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-util-sanitize-uri/node_modules/micromark-util-symbol": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", - "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-util-subtokenize": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-2.1.0.tgz", - "integrity": "sha512-XQLu552iSctvnEcgXw6+Sx75GflAPNED1qx7eBJ+wydBb2KCbRZe+NwvIEEMM83uml1+2WSXpBAcp9IUCgCYWA==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "devlop": "^1.0.0", - "micromark-util-chunked": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-util-subtokenize/node_modules/micromark-util-symbol": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", - "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-util-symbol": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-1.1.0.tgz", - "integrity": "sha512-uEjpEYY6KMs1g7QfJ2eX1SQEV+ZT4rUD3UcF6l57acZvLNK7PBZL+ty82Z1qhK1/yXIY4bdx04FKMgR0g4IAag==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-util-types": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz", - "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark/node_modules/micromark-factory-space": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz", - "integrity": "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-character": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark/node_modules/micromark-util-character": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz", - "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark/node_modules/micromark-util-symbol": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", - "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromatch": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", - "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", - "license": "MIT", - "dependencies": { - "braces": "^3.0.3", - "picomatch": "^2.3.1" - }, - "engines": { - "node": ">=8.6" - } - }, - "node_modules/mime": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", - "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", - "license": "MIT", - "bin": { - "mime": "cli.js" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/mime-db": { - "version": "1.33.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.33.0.tgz", - "integrity": "sha512-BHJ/EKruNIqJf/QahvxwQZXKygOQ256myeN/Ew+THcAa5q+PjyTTMMeNQC4DZw5AwfvelsUrA6B67NKMqXDbzQ==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/mime-types": { - "version": "2.1.18", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.18.tgz", - "integrity": "sha512-lc/aahn+t4/SWV/qcmumYjymLsWfN3ELhpmVuUFjgsORruuZPVSwAQryq+HHGvO/SI2KVX26bx+En+zhM8g8hQ==", - "license": "MIT", - "dependencies": { - "mime-db": "~1.33.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/mimic-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/mimic-response": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-4.0.0.tgz", - "integrity": "sha512-e5ISH9xMYU0DzrT+jl8q2ze9D6eWBto+I8CNpe+VI+K2J/F/k3PdkdTdz4wvGVH4NTpo+NRYTVIuMQEMMcsLqg==", - "license": "MIT", - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/mini-css-extract-plugin": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/mini-css-extract-plugin/-/mini-css-extract-plugin-2.10.0.tgz", - "integrity": "sha512-540P2c5dYnJlyJxTaSloliZexv8rji6rY8FhQN+WF/82iHQfA23j/xtJx97L+mXOML27EqksSek/g4eK7jaL3g==", - "license": "MIT", - "dependencies": { - "schema-utils": "^4.0.0", - "tapable": "^2.2.1" - }, - "engines": { - "node": ">= 12.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "webpack": "^5.0.0" - } - }, - "node_modules/minimalistic-assert": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz", - "integrity": "sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==", - "license": "ISC" - }, - "node_modules/minimist": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", - "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/mlly": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/mlly/-/mlly-1.8.0.tgz", - "integrity": "sha512-l8D9ODSRWLe2KHJSifWGwBqpTZXIXTeo8mlKjY+E2HAakaTeNpqAyBZ8GSqLzHgw4XmHmC8whvpjJNMbFZN7/g==", - "license": "MIT", - "dependencies": { - "acorn": "^8.15.0", - "pathe": "^2.0.3", - "pkg-types": "^1.3.1", - "ufo": "^1.6.1" - } - }, - "node_modules/mrmime": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/mrmime/-/mrmime-2.0.1.tgz", - "integrity": "sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ==", - "license": "MIT", - "engines": { - "node": ">=10" - } - }, - "node_modules/ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", - "license": "MIT" - }, - "node_modules/multicast-dns": { - "version": "7.2.5", - "resolved": "https://registry.npmjs.org/multicast-dns/-/multicast-dns-7.2.5.tgz", - "integrity": "sha512-2eznPJP8z2BFLX50tf0LuODrpINqP1RVIm/CObbTcBRITQgmC/TjcREF1NeTBzIcR5XO/ukWo+YHOjBbFwIupg==", - "license": "MIT", - "dependencies": { - "dns-packet": "^5.2.2", - "thunky": "^1.0.2" - }, - "bin": { - "multicast-dns": "cli.js" - } - }, - "node_modules/nanoid": { - "version": "3.3.11", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", - "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "bin": { - "nanoid": "bin/nanoid.cjs" - }, - "engines": { - "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" - } - }, - "node_modules/negotiator": { - "version": "0.6.4", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.4.tgz", - "integrity": "sha512-myRT3DiWPHqho5PrJaIRyaMv2kgYf0mUVgBNOYMuCH5Ki1yEiQaf/ZJuQ62nvpc44wL5WDbTX7yGJi1Neevw8w==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/neo-async": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", - "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==", - "license": "MIT" - }, - "node_modules/no-case": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz", - "integrity": "sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==", - "license": "MIT", - "dependencies": { - "lower-case": "^2.0.2", - "tslib": "^2.0.3" - } - }, - "node_modules/node-emoji": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/node-emoji/-/node-emoji-2.2.0.tgz", - "integrity": "sha512-Z3lTE9pLaJF47NyMhd4ww1yFTAP8YhYI8SleJiHzM46Fgpm5cnNzSl9XfzFNqbaz+VlJrIj3fXQ4DeN1Rjm6cw==", - "license": "MIT", - "dependencies": { - "@sindresorhus/is": "^4.6.0", - "char-regex": "^1.0.2", - "emojilib": "^2.4.0", - "skin-tone": "^2.0.0" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/node-releases": { - "version": "2.0.27", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.27.tgz", - "integrity": "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==", - "license": "MIT" - }, - "node_modules/normalize-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", - "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/normalize-url": { - "version": "8.1.1", - "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-8.1.1.tgz", - "integrity": "sha512-JYc0DPlpGWB40kH5g07gGTrYuMqV653k3uBKY6uITPWds3M0ov3GaWGp9lbE3Bzngx8+XkfzgvASb9vk9JDFXQ==", - "license": "MIT", - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/npm-run-path": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", - "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", - "license": "MIT", - "dependencies": { - "path-key": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/nprogress": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/nprogress/-/nprogress-0.2.0.tgz", - "integrity": "sha512-I19aIingLgR1fmhftnbWWO3dXc0hSxqHQHQb3H8m+K3TnEn/iSeTZZOyvKXWqQESMwuUVnatlCnZdLBZZt2VSA==", - "license": "MIT" - }, - "node_modules/nth-check": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz", - "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==", - "license": "BSD-2-Clause", - "dependencies": { - "boolbase": "^1.0.0" - }, - "funding": { - "url": "https://github.com/fb55/nth-check?sponsor=1" - } - }, - "node_modules/null-loader": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/null-loader/-/null-loader-4.0.1.tgz", - "integrity": "sha512-pxqVbi4U6N26lq+LmgIbB5XATP0VdZKOG25DhHi8btMmJJefGArFyDg1yc4U3hWCJbMqSrw0qyrz1UQX+qYXqg==", - "license": "MIT", - "dependencies": { - "loader-utils": "^2.0.0", - "schema-utils": "^3.0.0" - }, - "engines": { - "node": ">= 10.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "webpack": "^4.0.0 || ^5.0.0" - } - }, - "node_modules/null-loader/node_modules/ajv": { - "version": "6.14.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.14.0.tgz", - "integrity": "sha512-IWrosm/yrn43eiKqkfkHis7QioDleaXQHdDVPKg0FSwwd/DuvyX79TZnFOnYpB7dcsFAMmtFztZuXPDvSePkFw==", - "license": "MIT", - "dependencies": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/null-loader/node_modules/ajv-keywords": { - "version": "3.5.2", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", - "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", - "license": "MIT", - "peerDependencies": { - "ajv": "^6.9.1" - } - }, - "node_modules/null-loader/node_modules/json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "license": "MIT" - }, - "node_modules/null-loader/node_modules/schema-utils": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz", - "integrity": "sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg==", - "license": "MIT", - "dependencies": { - "@types/json-schema": "^7.0.8", - "ajv": "^6.12.5", - "ajv-keywords": "^3.5.2" - }, - "engines": { - "node": ">= 10.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - } - }, - "node_modules/object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-inspect": { - "version": "1.13.4", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", - "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/object-keys": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", - "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/object.assign": { - "version": "4.1.7", - "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.7.tgz", - "integrity": "sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw==", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.8", - "call-bound": "^1.0.3", - "define-properties": "^1.2.1", - "es-object-atoms": "^1.0.0", - "has-symbols": "^1.1.0", - "object-keys": "^1.1.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/obuf": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/obuf/-/obuf-1.1.2.tgz", - "integrity": "sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==", - "license": "MIT" - }, - "node_modules/on-finished": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", - "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", - "license": "MIT", - "dependencies": { - "ee-first": "1.1.1" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/on-headers": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.1.0.tgz", - "integrity": "sha512-737ZY3yNnXy37FHkQxPzt4UZ2UWPWiCZWLvFZ4fu5cueciegX0zGPnrlY6bwRg4FdQOe9YU8MkmJwGhoMybl8A==", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/onetime": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", - "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", - "license": "MIT", - "dependencies": { - "mimic-fn": "^2.1.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/open": { - "version": "8.4.2", - "resolved": "https://registry.npmjs.org/open/-/open-8.4.2.tgz", - "integrity": "sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==", - "license": "MIT", - "dependencies": { - "define-lazy-prop": "^2.0.0", - "is-docker": "^2.1.1", - "is-wsl": "^2.2.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/opener": { - "version": "1.5.2", - "resolved": "https://registry.npmjs.org/opener/-/opener-1.5.2.tgz", - "integrity": "sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A==", - "license": "(WTFPL OR MIT)", - "bin": { - "opener": "bin/opener-bin.js" - } - }, - "node_modules/p-cancelable": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-3.0.0.tgz", - "integrity": "sha512-mlVgR3PGuzlo0MmTdk4cXqXWlwQDLnONTAg6sm62XkMJEiRxN3GL3SffkYvqwonbkJBcrI7Uvv5Zh9yjvn2iUw==", - "license": "MIT", - "engines": { - "node": ">=12.20" - } - }, - "node_modules/p-finally": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", - "integrity": "sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==", - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/p-limit": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-4.0.0.tgz", - "integrity": "sha512-5b0R4txpzjPWVw/cXXUResoD4hb6U/x9BH08L7nw+GN1sezDzPdxeRvpc9c433fZhBan/wusjbCsqwqm4EIBIQ==", - "license": "MIT", - "dependencies": { - "yocto-queue": "^1.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-locate": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-6.0.0.tgz", - "integrity": "sha512-wPrq66Llhl7/4AGC6I+cqxT07LhXvWL08LNXz1fENOw0Ap4sRZZ/gZpTTJ5jpurzzzfS2W/Ge9BY3LgLjCShcw==", - "license": "MIT", - "dependencies": { - "p-limit": "^4.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-map": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", - "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", - "license": "MIT", - "dependencies": { - "aggregate-error": "^3.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-queue": { - "version": "6.6.2", - "resolved": "https://registry.npmjs.org/p-queue/-/p-queue-6.6.2.tgz", - "integrity": "sha512-RwFpb72c/BhQLEXIZ5K2e+AhgNVmIejGlTgiB9MzZ0e93GRvqZ7uSi0dvRF7/XIXDeNkra2fNHBxTyPDGySpjQ==", - "license": "MIT", - "dependencies": { - "eventemitter3": "^4.0.4", - "p-timeout": "^3.2.0" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-retry": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/p-retry/-/p-retry-6.2.1.tgz", - "integrity": "sha512-hEt02O4hUct5wtwg4H4KcWgDdm+l1bOaEy/hWzd8xtXB9BqxTWBBhb+2ImAtH4Cv4rPjV76xN3Zumqk3k3AhhQ==", - "license": "MIT", - "dependencies": { - "@types/retry": "0.12.2", - "is-network-error": "^1.0.0", - "retry": "^0.13.1" - }, - "engines": { - "node": ">=16.17" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-timeout": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-3.2.0.tgz", - "integrity": "sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg==", - "license": "MIT", - "dependencies": { - "p-finally": "^1.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/package-json": { - "version": "8.1.1", - "resolved": "https://registry.npmjs.org/package-json/-/package-json-8.1.1.tgz", - "integrity": "sha512-cbH9IAIJHNj9uXi196JVsRlt7cHKak6u/e6AkL/bkRelZ7rlL3X1YKxsZwa36xipOEKAsdtmaG6aAJoM1fx2zA==", - "license": "MIT", - "dependencies": { - "got": "^12.1.0", - "registry-auth-token": "^5.0.1", - "registry-url": "^6.0.0", - "semver": "^7.3.7" - }, - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/package-manager-detector": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/package-manager-detector/-/package-manager-detector-1.6.0.tgz", - "integrity": "sha512-61A5ThoTiDG/C8s8UMZwSorAGwMJ0ERVGj2OjoW5pAalsNOg15+iQiPzrLJ4jhZ1HJzmC2PIHT2oEiH3R5fzNA==", - "license": "MIT" - }, - "node_modules/param-case": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/param-case/-/param-case-3.0.4.tgz", - "integrity": "sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A==", - "license": "MIT", - "dependencies": { - "dot-case": "^3.0.4", - "tslib": "^2.0.3" - } - }, - "node_modules/parent-module": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", - "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", - "license": "MIT", - "dependencies": { - "callsites": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/parse-entities": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-4.0.2.tgz", - "integrity": "sha512-GG2AQYWoLgL877gQIKeRPGO1xF9+eG1ujIb5soS5gPvLQ1y2o8FL90w2QWNdf9I361Mpp7726c+lj3U0qK1uGw==", - "license": "MIT", - "dependencies": { - "@types/unist": "^2.0.0", - "character-entities-legacy": "^3.0.0", - "character-reference-invalid": "^2.0.0", - "decode-named-character-reference": "^1.0.0", - "is-alphanumerical": "^2.0.0", - "is-decimal": "^2.0.0", - "is-hexadecimal": "^2.0.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/parse-entities/node_modules/@types/unist": { - "version": "2.0.11", - "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.11.tgz", - "integrity": "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==", - "license": "MIT" - }, - "node_modules/parse-json": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", - "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.0.0", - "error-ex": "^1.3.1", - "json-parse-even-better-errors": "^2.3.0", - "lines-and-columns": "^1.1.6" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/parse-numeric-range": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/parse-numeric-range/-/parse-numeric-range-1.3.0.tgz", - "integrity": "sha512-twN+njEipszzlMJd4ONUYgSfZPDxgHhT9Ahed5uTigpQn90FggW4SA/AIPq/6a149fTbE9qBEcSwE3FAEp6wQQ==", - "license": "ISC" - }, - "node_modules/parse5": { - "version": "7.3.0", - "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.3.0.tgz", - "integrity": "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw==", - "license": "MIT", - "dependencies": { - "entities": "^6.0.0" - }, - "funding": { - "url": "https://github.com/inikulin/parse5?sponsor=1" - } - }, - "node_modules/parse5-htmlparser2-tree-adapter": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/parse5-htmlparser2-tree-adapter/-/parse5-htmlparser2-tree-adapter-7.1.0.tgz", - "integrity": "sha512-ruw5xyKs6lrpo9x9rCZqZZnIUntICjQAd0Wsmp396Ul9lN/h+ifgVV1x1gZHi8euej6wTfpqX8j+BFQxF0NS/g==", - "license": "MIT", - "dependencies": { - "domhandler": "^5.0.3", - "parse5": "^7.0.0" - }, - "funding": { - "url": "https://github.com/inikulin/parse5?sponsor=1" - } - }, - "node_modules/parse5/node_modules/entities": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/entities/-/entities-6.0.1.tgz", - "integrity": "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==", - "license": "BSD-2-Clause", - "engines": { - "node": ">=0.12" - }, - "funding": { - "url": "https://github.com/fb55/entities?sponsor=1" - } - }, - "node_modules/parseurl": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", - "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/pascal-case": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/pascal-case/-/pascal-case-3.1.2.tgz", - "integrity": "sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==", - "license": "MIT", - "dependencies": { - "no-case": "^3.0.4", - "tslib": "^2.0.3" - } - }, - "node_modules/path-data-parser": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/path-data-parser/-/path-data-parser-0.1.0.tgz", - "integrity": "sha512-NOnmBpt5Y2RWbuv0LMzsayp3lVylAHLPUTut412ZA3l+C4uw4ZVkQbjShYCQ8TCpUMdPapr4YjUqLYD6v68j+w==", - "license": "MIT" - }, - "node_modules/path-exists": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-5.0.0.tgz", - "integrity": "sha512-RjhtfwJOxzcFmNOi6ltcbcu4Iu+FL3zEj83dk4kAS+fVpTxXLO1b38RvJgT/0QwvV/L3aY9TAnyv0EOqW4GoMQ==", - "license": "MIT", - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - } - }, - "node_modules/path-is-inside": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz", - "integrity": "sha512-DUWJr3+ULp4zXmol/SZkFf3JGsS9/SIv+Y3Rt93/UjPpDpklB5f1er4O3POIbUuUJ3FXgqte2Q7SrU6zAqwk8w==", - "license": "(WTFPL OR MIT)" - }, - "node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/path-parse": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", - "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", - "license": "MIT" - }, - "node_modules/path-to-regexp": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.9.0.tgz", - "integrity": "sha512-xIp7/apCFJuUHdDLWe8O1HIkb0kQrOMb/0u6FXQjemHn/ii5LrIzU6bdECnsiTF/GjZkMEKg1xdiZwNqDYlZ6g==", - "license": "MIT", - "dependencies": { - "isarray": "0.0.1" - } - }, - "node_modules/path-type": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", - "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/pathe": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", - "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", - "license": "MIT" - }, - "node_modules/picocolors": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", - "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", - "license": "ISC" - }, - "node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "license": "MIT", - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/pkg-dir": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-7.0.0.tgz", - "integrity": "sha512-Ie9z/WINcxxLp27BKOCHGde4ITq9UklYKDzVo1nhk5sqGEXU3FpkwP5GM2voTGJkGd9B3Otl+Q4uwSOeSUtOBA==", - "license": "MIT", - "dependencies": { - "find-up": "^6.3.0" - }, - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/pkg-types": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-1.3.1.tgz", - "integrity": "sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==", - "license": "MIT", - "dependencies": { - "confbox": "^0.1.8", - "mlly": "^1.7.4", - "pathe": "^2.0.1" - } - }, - "node_modules/pkijs": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/pkijs/-/pkijs-3.3.3.tgz", - "integrity": "sha512-+KD8hJtqQMYoTuL1bbGOqxb4z+nZkTAwVdNtWwe8Tc2xNbEmdJYIYoc6Qt0uF55e6YW6KuTHw1DjQ18gMhzepw==", - "license": "BSD-3-Clause", - "dependencies": { - "@noble/hashes": "1.4.0", - "asn1js": "^3.0.6", - "bytestreamjs": "^2.0.1", - "pvtsutils": "^1.3.6", - "pvutils": "^1.1.3", - "tslib": "^2.8.1" - }, - "engines": { - "node": ">=16.0.0" - } - }, - "node_modules/points-on-curve": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/points-on-curve/-/points-on-curve-0.2.0.tgz", - "integrity": "sha512-0mYKnYYe9ZcqMCWhUjItv/oHjvgEsfKvnUTg8sAtnHr3GVy7rGkXCb6d5cSyqrWqL4k81b9CPg3urd+T7aop3A==", - "license": "MIT" - }, - "node_modules/points-on-path": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/points-on-path/-/points-on-path-0.2.1.tgz", - "integrity": "sha512-25ClnWWuw7JbWZcgqY/gJ4FQWadKxGWk+3kR/7kD0tCaDtPPMj7oHu2ToLaVhfpnHrZzYby2w6tUA0eOIuUg8g==", - "license": "MIT", - "dependencies": { - "path-data-parser": "0.1.0", - "points-on-curve": "0.2.0" - } - }, - "node_modules/postcss": { - "version": "8.5.6", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", - "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/postcss" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "nanoid": "^3.3.11", - "picocolors": "^1.1.1", - "source-map-js": "^1.2.1" - }, - "engines": { - "node": "^10 || ^12 || >=14" - } - }, - "node_modules/postcss-attribute-case-insensitive": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/postcss-attribute-case-insensitive/-/postcss-attribute-case-insensitive-7.0.1.tgz", - "integrity": "sha512-Uai+SupNSqzlschRyNx3kbCTWgY/2hcwtHEI/ej2LJWc9JJ77qKgGptd8DHwY1mXtZ7Aoh4z4yxfwMBue9eNgw==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT", - "dependencies": { - "postcss-selector-parser": "^7.0.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/postcss-attribute-case-insensitive/node_modules/postcss-selector-parser": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz", - "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==", - "license": "MIT", - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/postcss-calc": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/postcss-calc/-/postcss-calc-9.0.1.tgz", - "integrity": "sha512-TipgjGyzP5QzEhsOZUaIkeO5mKeMFpebWzRogWG/ysonUlnHcq5aJe0jOjpfzUU8PeSaBQnrE8ehR0QA5vs8PQ==", - "license": "MIT", - "dependencies": { - "postcss-selector-parser": "^6.0.11", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.2.2" - } - }, - "node_modules/postcss-clamp": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/postcss-clamp/-/postcss-clamp-4.1.0.tgz", - "integrity": "sha512-ry4b1Llo/9zz+PKC+030KUnPITTJAHeOwjfAyyB60eT0AorGLdzp52s31OsPRHRf8NchkgFoG2y6fCfn1IV1Ow==", - "license": "MIT", - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": ">=7.6.0" - }, - "peerDependencies": { - "postcss": "^8.4.6" - } - }, - "node_modules/postcss-color-functional-notation": { - "version": "7.0.12", - "resolved": "https://registry.npmjs.org/postcss-color-functional-notation/-/postcss-color-functional-notation-7.0.12.tgz", - "integrity": "sha512-TLCW9fN5kvO/u38/uesdpbx3e8AkTYhMvDZYa9JpmImWuTE99bDQ7GU7hdOADIZsiI9/zuxfAJxny/khknp1Zw==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/css-color-parser": "^3.1.0", - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4", - "@csstools/postcss-progressive-custom-properties": "^4.2.1", - "@csstools/utilities": "^2.0.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/postcss-color-hex-alpha": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/postcss-color-hex-alpha/-/postcss-color-hex-alpha-10.0.0.tgz", - "integrity": "sha512-1kervM2cnlgPs2a8Vt/Qbe5cQ++N7rkYo/2rz2BkqJZIHQwaVuJgQH38REHrAi4uM0b1fqxMkWYmese94iMp3w==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT", - "dependencies": { - "@csstools/utilities": "^2.0.0", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/postcss-color-rebeccapurple": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/postcss-color-rebeccapurple/-/postcss-color-rebeccapurple-10.0.0.tgz", - "integrity": "sha512-JFta737jSP+hdAIEhk1Vs0q0YF5P8fFcj+09pweS8ktuGuZ8pPlykHsk6mPxZ8awDl4TrcxUqJo9l1IhVr/OjQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/utilities": "^2.0.0", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/postcss-colormin": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/postcss-colormin/-/postcss-colormin-6.1.0.tgz", - "integrity": "sha512-x9yX7DOxeMAR+BgGVnNSAxmAj98NX/YxEMNFP+SDCEeNLb2r3i6Hh1ksMsnW8Ub5SLCpbescQqn9YEbE9554Sw==", - "license": "MIT", - "dependencies": { - "browserslist": "^4.23.0", - "caniuse-api": "^3.0.0", - "colord": "^2.9.3", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-convert-values": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/postcss-convert-values/-/postcss-convert-values-6.1.0.tgz", - "integrity": "sha512-zx8IwP/ts9WvUM6NkVSkiU902QZL1bwPhaVaLynPtCsOTqp+ZKbNi+s6XJg3rfqpKGA/oc7Oxk5t8pOQJcwl/w==", - "license": "MIT", - "dependencies": { - "browserslist": "^4.23.0", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-custom-media": { - "version": "11.0.6", - "resolved": "https://registry.npmjs.org/postcss-custom-media/-/postcss-custom-media-11.0.6.tgz", - "integrity": "sha512-C4lD4b7mUIw+RZhtY7qUbf4eADmb7Ey8BFA2px9jUbwg7pjTZDl4KY4bvlUV+/vXQvzQRfiGEVJyAbtOsCMInw==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT", - "dependencies": { - "@csstools/cascade-layer-name-parser": "^2.0.5", - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4", - "@csstools/media-query-list-parser": "^4.0.3" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/postcss-custom-properties": { - "version": "14.0.6", - "resolved": "https://registry.npmjs.org/postcss-custom-properties/-/postcss-custom-properties-14.0.6.tgz", - "integrity": "sha512-fTYSp3xuk4BUeVhxCSJdIPhDLpJfNakZKoiTDx7yRGCdlZrSJR7mWKVOBS4sBF+5poPQFMj2YdXx1VHItBGihQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT", - "dependencies": { - "@csstools/cascade-layer-name-parser": "^2.0.5", - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4", - "@csstools/utilities": "^2.0.0", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/postcss-custom-selectors": { - "version": "8.0.5", - "resolved": "https://registry.npmjs.org/postcss-custom-selectors/-/postcss-custom-selectors-8.0.5.tgz", - "integrity": "sha512-9PGmckHQswiB2usSO6XMSswO2yFWVoCAuih1yl9FVcwkscLjRKjwsjM3t+NIWpSU2Jx3eOiK2+t4vVTQaoCHHg==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT", - "dependencies": { - "@csstools/cascade-layer-name-parser": "^2.0.5", - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4", - "postcss-selector-parser": "^7.0.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/postcss-custom-selectors/node_modules/postcss-selector-parser": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz", - "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==", - "license": "MIT", - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/postcss-dir-pseudo-class": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/postcss-dir-pseudo-class/-/postcss-dir-pseudo-class-9.0.1.tgz", - "integrity": "sha512-tRBEK0MHYvcMUrAuYMEOa0zg9APqirBcgzi6P21OhxtJyJADo/SWBwY1CAwEohQ/6HDaa9jCjLRG7K3PVQYHEA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "postcss-selector-parser": "^7.0.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/postcss-dir-pseudo-class/node_modules/postcss-selector-parser": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz", - "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==", - "license": "MIT", - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/postcss-discard-comments": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/postcss-discard-comments/-/postcss-discard-comments-6.0.2.tgz", - "integrity": "sha512-65w/uIqhSBBfQmYnG92FO1mWZjJ4GL5b8atm5Yw2UgrwD7HiNiSSNwJor1eCFGzUgYnN/iIknhNRVqjrrpuglw==", - "license": "MIT", - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-discard-duplicates": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/postcss-discard-duplicates/-/postcss-discard-duplicates-6.0.3.tgz", - "integrity": "sha512-+JA0DCvc5XvFAxwx6f/e68gQu/7Z9ud584VLmcgto28eB8FqSFZwtrLwB5Kcp70eIoWP/HXqz4wpo8rD8gpsTw==", - "license": "MIT", - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-discard-empty": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/postcss-discard-empty/-/postcss-discard-empty-6.0.3.tgz", - "integrity": "sha512-znyno9cHKQsK6PtxL5D19Fj9uwSzC2mB74cpT66fhgOadEUPyXFkbgwm5tvc3bt3NAy8ltE5MrghxovZRVnOjQ==", - "license": "MIT", - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-discard-overridden": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/postcss-discard-overridden/-/postcss-discard-overridden-6.0.2.tgz", - "integrity": "sha512-j87xzI4LUggC5zND7KdjsI25APtyMuynXZSujByMaav2roV6OZX+8AaCUcZSWqckZpjAjRyFDdpqybgjFO0HJQ==", - "license": "MIT", - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-discard-unused": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/postcss-discard-unused/-/postcss-discard-unused-6.0.5.tgz", - "integrity": "sha512-wHalBlRHkaNnNwfC8z+ppX57VhvS+HWgjW508esjdaEYr3Mx7Gnn2xA4R/CKf5+Z9S5qsqC+Uzh4ueENWwCVUA==", - "license": "MIT", - "dependencies": { - "postcss-selector-parser": "^6.0.16" - }, - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-double-position-gradients": { - "version": "6.0.4", - "resolved": "https://registry.npmjs.org/postcss-double-position-gradients/-/postcss-double-position-gradients-6.0.4.tgz", - "integrity": "sha512-m6IKmxo7FxSP5nF2l63QbCC3r+bWpFUWmZXZf096WxG0m7Vl1Q1+ruFOhpdDRmKrRS+S3Jtk+TVk/7z0+BVK6g==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/postcss-progressive-custom-properties": "^4.2.1", - "@csstools/utilities": "^2.0.0", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/postcss-focus-visible": { - "version": "10.0.1", - "resolved": "https://registry.npmjs.org/postcss-focus-visible/-/postcss-focus-visible-10.0.1.tgz", - "integrity": "sha512-U58wyjS/I1GZgjRok33aE8juW9qQgQUNwTSdxQGuShHzwuYdcklnvK/+qOWX1Q9kr7ysbraQ6ht6r+udansalA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "postcss-selector-parser": "^7.0.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/postcss-focus-visible/node_modules/postcss-selector-parser": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz", - "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==", - "license": "MIT", - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/postcss-focus-within": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/postcss-focus-within/-/postcss-focus-within-9.0.1.tgz", - "integrity": "sha512-fzNUyS1yOYa7mOjpci/bR+u+ESvdar6hk8XNK/TRR0fiGTp2QT5N+ducP0n3rfH/m9I7H/EQU6lsa2BrgxkEjw==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "postcss-selector-parser": "^7.0.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/postcss-focus-within/node_modules/postcss-selector-parser": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz", - "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==", - "license": "MIT", - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/postcss-font-variant": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-font-variant/-/postcss-font-variant-5.0.0.tgz", - "integrity": "sha512-1fmkBaCALD72CK2a9i468mA/+tr9/1cBxRRMXOUaZqO43oWPR5imcyPjXwuv7PXbCid4ndlP5zWhidQVVa3hmA==", - "license": "MIT", - "peerDependencies": { - "postcss": "^8.1.0" - } - }, - "node_modules/postcss-gap-properties": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/postcss-gap-properties/-/postcss-gap-properties-6.0.0.tgz", - "integrity": "sha512-Om0WPjEwiM9Ru+VhfEDPZJAKWUd0mV1HmNXqp2C29z80aQ2uP9UVhLc7e3aYMIor/S5cVhoPgYQ7RtfeZpYTRw==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/postcss-image-set-function": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/postcss-image-set-function/-/postcss-image-set-function-7.0.0.tgz", - "integrity": "sha512-QL7W7QNlZuzOwBTeXEmbVckNt1FSmhQtbMRvGGqqU4Nf4xk6KUEQhAoWuMzwbSv5jxiRiSZ5Tv7eiDB9U87znA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/utilities": "^2.0.0", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/postcss-lab-function": { - "version": "7.0.12", - "resolved": "https://registry.npmjs.org/postcss-lab-function/-/postcss-lab-function-7.0.12.tgz", - "integrity": "sha512-tUcyRk1ZTPec3OuKFsqtRzW2Go5lehW29XA21lZ65XmzQkz43VY2tyWEC202F7W3mILOjw0voOiuxRGTsN+J9w==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/css-color-parser": "^3.1.0", - "@csstools/css-parser-algorithms": "^3.0.5", - "@csstools/css-tokenizer": "^3.0.4", - "@csstools/postcss-progressive-custom-properties": "^4.2.1", - "@csstools/utilities": "^2.0.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/postcss-loader": { - "version": "7.3.4", - "resolved": "https://registry.npmjs.org/postcss-loader/-/postcss-loader-7.3.4.tgz", - "integrity": "sha512-iW5WTTBSC5BfsBJ9daFMPVrLT36MrNiC6fqOZTTaHjBNX6Pfd5p+hSBqe/fEeNd7pc13QiAyGt7VdGMw4eRC4A==", - "license": "MIT", - "dependencies": { - "cosmiconfig": "^8.3.5", - "jiti": "^1.20.0", - "semver": "^7.5.4" - }, - "engines": { - "node": ">= 14.15.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "postcss": "^7.0.0 || ^8.0.1", - "webpack": "^5.0.0" - } - }, - "node_modules/postcss-logical": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/postcss-logical/-/postcss-logical-8.1.0.tgz", - "integrity": "sha512-pL1hXFQ2fEXNKiNiAgtfA005T9FBxky5zkX6s4GZM2D8RkVgRqz3f4g1JUoq925zXv495qk8UNldDwh8uGEDoA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/postcss-merge-idents": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/postcss-merge-idents/-/postcss-merge-idents-6.0.3.tgz", - "integrity": "sha512-1oIoAsODUs6IHQZkLQGO15uGEbK3EAl5wi9SS8hs45VgsxQfMnxvt+L+zIr7ifZFIH14cfAeVe2uCTa+SPRa3g==", - "license": "MIT", - "dependencies": { - "cssnano-utils": "^4.0.2", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-merge-longhand": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/postcss-merge-longhand/-/postcss-merge-longhand-6.0.5.tgz", - "integrity": "sha512-5LOiordeTfi64QhICp07nzzuTDjNSO8g5Ksdibt44d+uvIIAE1oZdRn8y/W5ZtYgRH/lnLDlvi9F8btZcVzu3w==", - "license": "MIT", - "dependencies": { - "postcss-value-parser": "^4.2.0", - "stylehacks": "^6.1.1" - }, - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-merge-rules": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/postcss-merge-rules/-/postcss-merge-rules-6.1.1.tgz", - "integrity": "sha512-KOdWF0gju31AQPZiD+2Ar9Qjowz1LTChSjFFbS+e2sFgc4uHOp3ZvVX4sNeTlk0w2O31ecFGgrFzhO0RSWbWwQ==", - "license": "MIT", - "dependencies": { - "browserslist": "^4.23.0", - "caniuse-api": "^3.0.0", - "cssnano-utils": "^4.0.2", - "postcss-selector-parser": "^6.0.16" - }, - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-minify-font-values": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/postcss-minify-font-values/-/postcss-minify-font-values-6.1.0.tgz", - "integrity": "sha512-gklfI/n+9rTh8nYaSJXlCo3nOKqMNkxuGpTn/Qm0gstL3ywTr9/WRKznE+oy6fvfolH6dF+QM4nCo8yPLdvGJg==", - "license": "MIT", - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-minify-gradients": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/postcss-minify-gradients/-/postcss-minify-gradients-6.0.3.tgz", - "integrity": "sha512-4KXAHrYlzF0Rr7uc4VrfwDJ2ajrtNEpNEuLxFgwkhFZ56/7gaE4Nr49nLsQDZyUe+ds+kEhf+YAUolJiYXF8+Q==", - "license": "MIT", - "dependencies": { - "colord": "^2.9.3", - "cssnano-utils": "^4.0.2", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-minify-params": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/postcss-minify-params/-/postcss-minify-params-6.1.0.tgz", - "integrity": "sha512-bmSKnDtyyE8ujHQK0RQJDIKhQ20Jq1LYiez54WiaOoBtcSuflfK3Nm596LvbtlFcpipMjgClQGyGr7GAs+H1uA==", - "license": "MIT", - "dependencies": { - "browserslist": "^4.23.0", - "cssnano-utils": "^4.0.2", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-minify-selectors": { - "version": "6.0.4", - "resolved": "https://registry.npmjs.org/postcss-minify-selectors/-/postcss-minify-selectors-6.0.4.tgz", - "integrity": "sha512-L8dZSwNLgK7pjTto9PzWRoMbnLq5vsZSTu8+j1P/2GB8qdtGQfn+K1uSvFgYvgh83cbyxT5m43ZZhUMTJDSClQ==", - "license": "MIT", - "dependencies": { - "postcss-selector-parser": "^6.0.16" - }, - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-modules-extract-imports": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.1.0.tgz", - "integrity": "sha512-k3kNe0aNFQDAZGbin48pL2VNidTF0w4/eASDsxlyspobzU3wZQLOGj7L9gfRe0Jo9/4uud09DsjFNH7winGv8Q==", - "license": "ISC", - "engines": { - "node": "^10 || ^12 || >= 14" - }, - "peerDependencies": { - "postcss": "^8.1.0" - } - }, - "node_modules/postcss-modules-local-by-default": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.2.0.tgz", - "integrity": "sha512-5kcJm/zk+GJDSfw+V/42fJ5fhjL5YbFDl8nVdXkJPLLW+Vf9mTD5Xe0wqIaDnLuL2U6cDNpTr+UQ+v2HWIBhzw==", - "license": "MIT", - "dependencies": { - "icss-utils": "^5.0.0", - "postcss-selector-parser": "^7.0.0", - "postcss-value-parser": "^4.1.0" - }, - "engines": { - "node": "^10 || ^12 || >= 14" - }, - "peerDependencies": { - "postcss": "^8.1.0" - } - }, - "node_modules/postcss-modules-local-by-default/node_modules/postcss-selector-parser": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz", - "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==", - "license": "MIT", - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/postcss-modules-scope": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/postcss-modules-scope/-/postcss-modules-scope-3.2.1.tgz", - "integrity": "sha512-m9jZstCVaqGjTAuny8MdgE88scJnCiQSlSrOWcTQgM2t32UBe+MUmFSO5t7VMSfAf/FJKImAxBav8ooCHJXCJA==", - "license": "ISC", - "dependencies": { - "postcss-selector-parser": "^7.0.0" - }, - "engines": { - "node": "^10 || ^12 || >= 14" - }, - "peerDependencies": { - "postcss": "^8.1.0" - } - }, - "node_modules/postcss-modules-scope/node_modules/postcss-selector-parser": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz", - "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==", - "license": "MIT", - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/postcss-modules-values": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/postcss-modules-values/-/postcss-modules-values-4.0.0.tgz", - "integrity": "sha512-RDxHkAiEGI78gS2ofyvCsu7iycRv7oqw5xMWn9iMoR0N/7mf9D50ecQqUo5BZ9Zh2vH4bCUR/ktCqbB9m8vJjQ==", - "license": "ISC", - "dependencies": { - "icss-utils": "^5.0.0" - }, - "engines": { - "node": "^10 || ^12 || >= 14" - }, - "peerDependencies": { - "postcss": "^8.1.0" - } - }, - "node_modules/postcss-nesting": { - "version": "13.0.2", - "resolved": "https://registry.npmjs.org/postcss-nesting/-/postcss-nesting-13.0.2.tgz", - "integrity": "sha512-1YCI290TX+VP0U/K/aFxzHzQWHWURL+CtHMSbex1lCdpXD1SoR2sYuxDu5aNI9lPoXpKTCggFZiDJbwylU0LEQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/selector-resolve-nested": "^3.1.0", - "@csstools/selector-specificity": "^5.0.0", - "postcss-selector-parser": "^7.0.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/postcss-nesting/node_modules/@csstools/selector-resolve-nested": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/@csstools/selector-resolve-nested/-/selector-resolve-nested-3.1.0.tgz", - "integrity": "sha512-mf1LEW0tJLKfWyvn5KdDrhpxHyuxpbNwTIwOYLIvsTffeyOf85j5oIzfG0yosxDgx/sswlqBnESYUcQH0vgZ0g==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss-selector-parser": "^7.0.0" - } - }, - "node_modules/postcss-nesting/node_modules/@csstools/selector-specificity": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/@csstools/selector-specificity/-/selector-specificity-5.0.0.tgz", - "integrity": "sha512-PCqQV3c4CoVm3kdPhyeZ07VmBRdH2EpMFA/pd9OASpOEC3aXNGoqPDAZ80D0cLpMBxnmk0+yNhGsEx31hq7Gtw==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss-selector-parser": "^7.0.0" - } - }, - "node_modules/postcss-nesting/node_modules/postcss-selector-parser": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz", - "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==", - "license": "MIT", - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/postcss-normalize-charset": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/postcss-normalize-charset/-/postcss-normalize-charset-6.0.2.tgz", - "integrity": "sha512-a8N9czmdnrjPHa3DeFlwqst5eaL5W8jYu3EBbTTkI5FHkfMhFZh1EGbku6jhHhIzTA6tquI2P42NtZ59M/H/kQ==", - "license": "MIT", - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-normalize-display-values": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/postcss-normalize-display-values/-/postcss-normalize-display-values-6.0.2.tgz", - "integrity": "sha512-8H04Mxsb82ON/aAkPeq8kcBbAtI5Q2a64X/mnRRfPXBq7XeogoQvReqxEfc0B4WPq1KimjezNC8flUtC3Qz6jg==", - "license": "MIT", - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-normalize-positions": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/postcss-normalize-positions/-/postcss-normalize-positions-6.0.2.tgz", - "integrity": "sha512-/JFzI441OAB9O7VnLA+RtSNZvQ0NCFZDOtp6QPFo1iIyawyXg0YI3CYM9HBy1WvwCRHnPep/BvI1+dGPKoXx/Q==", - "license": "MIT", - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-normalize-repeat-style": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-6.0.2.tgz", - "integrity": "sha512-YdCgsfHkJ2jEXwR4RR3Tm/iOxSfdRt7jplS6XRh9Js9PyCR/aka/FCb6TuHT2U8gQubbm/mPmF6L7FY9d79VwQ==", - "license": "MIT", - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-normalize-string": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/postcss-normalize-string/-/postcss-normalize-string-6.0.2.tgz", - "integrity": "sha512-vQZIivlxlfqqMp4L9PZsFE4YUkWniziKjQWUtsxUiVsSSPelQydwS8Wwcuw0+83ZjPWNTl02oxlIvXsmmG+CiQ==", - "license": "MIT", - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-normalize-timing-functions": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-6.0.2.tgz", - "integrity": "sha512-a+YrtMox4TBtId/AEwbA03VcJgtyW4dGBizPl7e88cTFULYsprgHWTbfyjSLyHeBcK/Q9JhXkt2ZXiwaVHoMzA==", - "license": "MIT", - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-normalize-unicode": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/postcss-normalize-unicode/-/postcss-normalize-unicode-6.1.0.tgz", - "integrity": "sha512-QVC5TQHsVj33otj8/JD869Ndr5Xcc/+fwRh4HAsFsAeygQQXm+0PySrKbr/8tkDKzW+EVT3QkqZMfFrGiossDg==", - "license": "MIT", - "dependencies": { - "browserslist": "^4.23.0", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-normalize-url": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/postcss-normalize-url/-/postcss-normalize-url-6.0.2.tgz", - "integrity": "sha512-kVNcWhCeKAzZ8B4pv/DnrU1wNh458zBNp8dh4y5hhxih5RZQ12QWMuQrDgPRw3LRl8mN9vOVfHl7uhvHYMoXsQ==", - "license": "MIT", - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-normalize-whitespace": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/postcss-normalize-whitespace/-/postcss-normalize-whitespace-6.0.2.tgz", - "integrity": "sha512-sXZ2Nj1icbJOKmdjXVT9pnyHQKiSAyuNQHSgRCUgThn2388Y9cGVDR+E9J9iAYbSbLHI+UUwLVl1Wzco/zgv0Q==", - "license": "MIT", - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-opacity-percentage": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/postcss-opacity-percentage/-/postcss-opacity-percentage-3.0.0.tgz", - "integrity": "sha512-K6HGVzyxUxd/VgZdX04DCtdwWJ4NGLG212US4/LA1TLAbHgmAsTWVR86o+gGIbFtnTkfOpb9sCRBx8K7HO66qQ==", - "funding": [ - { - "type": "kofi", - "url": "https://ko-fi.com/mrcgrtz" - }, - { - "type": "liberapay", - "url": "https://liberapay.com/mrcgrtz" - } - ], - "license": "MIT", - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/postcss-ordered-values": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/postcss-ordered-values/-/postcss-ordered-values-6.0.2.tgz", - "integrity": "sha512-VRZSOB+JU32RsEAQrO94QPkClGPKJEL/Z9PCBImXMhIeK5KAYo6slP/hBYlLgrCjFxyqvn5VC81tycFEDBLG1Q==", - "license": "MIT", - "dependencies": { - "cssnano-utils": "^4.0.2", - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-overflow-shorthand": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/postcss-overflow-shorthand/-/postcss-overflow-shorthand-6.0.0.tgz", - "integrity": "sha512-BdDl/AbVkDjoTofzDQnwDdm/Ym6oS9KgmO7Gr+LHYjNWJ6ExORe4+3pcLQsLA9gIROMkiGVjjwZNoL/mpXHd5Q==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/postcss-page-break": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/postcss-page-break/-/postcss-page-break-3.0.4.tgz", - "integrity": "sha512-1JGu8oCjVXLa9q9rFTo4MbeeA5FMe00/9C7lN4va606Rdb+HkxXtXsmEDrIraQ11fGz/WvKWa8gMuCKkrXpTsQ==", - "license": "MIT", - "peerDependencies": { - "postcss": "^8" - } - }, - "node_modules/postcss-place": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/postcss-place/-/postcss-place-10.0.0.tgz", - "integrity": "sha512-5EBrMzat2pPAxQNWYavwAfoKfYcTADJ8AXGVPcUZ2UkNloUTWzJQExgrzrDkh3EKzmAx1evfTAzF9I8NGcc+qw==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/postcss-preset-env": { - "version": "10.6.1", - "resolved": "https://registry.npmjs.org/postcss-preset-env/-/postcss-preset-env-10.6.1.tgz", - "integrity": "sha512-yrk74d9EvY+W7+lO9Aj1QmjWY9q5NsKjK2V9drkOPZB/X6KZ0B3igKsHUYakb7oYVhnioWypQX3xGuePf89f3g==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "@csstools/postcss-alpha-function": "^1.0.1", - "@csstools/postcss-cascade-layers": "^5.0.2", - "@csstools/postcss-color-function": "^4.0.12", - "@csstools/postcss-color-function-display-p3-linear": "^1.0.1", - "@csstools/postcss-color-mix-function": "^3.0.12", - "@csstools/postcss-color-mix-variadic-function-arguments": "^1.0.2", - "@csstools/postcss-content-alt-text": "^2.0.8", - "@csstools/postcss-contrast-color-function": "^2.0.12", - "@csstools/postcss-exponential-functions": "^2.0.9", - "@csstools/postcss-font-format-keywords": "^4.0.0", - "@csstools/postcss-gamut-mapping": "^2.0.11", - "@csstools/postcss-gradients-interpolation-method": "^5.0.12", - "@csstools/postcss-hwb-function": "^4.0.12", - "@csstools/postcss-ic-unit": "^4.0.4", - "@csstools/postcss-initial": "^2.0.1", - "@csstools/postcss-is-pseudo-class": "^5.0.3", - "@csstools/postcss-light-dark-function": "^2.0.11", - "@csstools/postcss-logical-float-and-clear": "^3.0.0", - "@csstools/postcss-logical-overflow": "^2.0.0", - "@csstools/postcss-logical-overscroll-behavior": "^2.0.0", - "@csstools/postcss-logical-resize": "^3.0.0", - "@csstools/postcss-logical-viewport-units": "^3.0.4", - "@csstools/postcss-media-minmax": "^2.0.9", - "@csstools/postcss-media-queries-aspect-ratio-number-values": "^3.0.5", - "@csstools/postcss-nested-calc": "^4.0.0", - "@csstools/postcss-normalize-display-values": "^4.0.1", - "@csstools/postcss-oklab-function": "^4.0.12", - "@csstools/postcss-position-area-property": "^1.0.0", - "@csstools/postcss-progressive-custom-properties": "^4.2.1", - "@csstools/postcss-property-rule-prelude-list": "^1.0.0", - "@csstools/postcss-random-function": "^2.0.1", - "@csstools/postcss-relative-color-syntax": "^3.0.12", - "@csstools/postcss-scope-pseudo-class": "^4.0.1", - "@csstools/postcss-sign-functions": "^1.1.4", - "@csstools/postcss-stepped-value-functions": "^4.0.9", - "@csstools/postcss-syntax-descriptor-syntax-production": "^1.0.1", - "@csstools/postcss-system-ui-font-family": "^1.0.0", - "@csstools/postcss-text-decoration-shorthand": "^4.0.3", - "@csstools/postcss-trigonometric-functions": "^4.0.9", - "@csstools/postcss-unset-value": "^4.0.0", - "autoprefixer": "^10.4.23", - "browserslist": "^4.28.1", - "css-blank-pseudo": "^7.0.1", - "css-has-pseudo": "^7.0.3", - "css-prefers-color-scheme": "^10.0.0", - "cssdb": "^8.6.0", - "postcss-attribute-case-insensitive": "^7.0.1", - "postcss-clamp": "^4.1.0", - "postcss-color-functional-notation": "^7.0.12", - "postcss-color-hex-alpha": "^10.0.0", - "postcss-color-rebeccapurple": "^10.0.0", - "postcss-custom-media": "^11.0.6", - "postcss-custom-properties": "^14.0.6", - "postcss-custom-selectors": "^8.0.5", - "postcss-dir-pseudo-class": "^9.0.1", - "postcss-double-position-gradients": "^6.0.4", - "postcss-focus-visible": "^10.0.1", - "postcss-focus-within": "^9.0.1", - "postcss-font-variant": "^5.0.0", - "postcss-gap-properties": "^6.0.0", - "postcss-image-set-function": "^7.0.0", - "postcss-lab-function": "^7.0.12", - "postcss-logical": "^8.1.0", - "postcss-nesting": "^13.0.2", - "postcss-opacity-percentage": "^3.0.0", - "postcss-overflow-shorthand": "^6.0.0", - "postcss-page-break": "^3.0.4", - "postcss-place": "^10.0.0", - "postcss-pseudo-class-any-link": "^10.0.1", - "postcss-replace-overflow-wrap": "^4.0.0", - "postcss-selector-not": "^8.0.1" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/postcss-pseudo-class-any-link": { - "version": "10.0.1", - "resolved": "https://registry.npmjs.org/postcss-pseudo-class-any-link/-/postcss-pseudo-class-any-link-10.0.1.tgz", - "integrity": "sha512-3el9rXlBOqTFaMFkWDOkHUTQekFIYnaQY55Rsp8As8QQkpiSgIYEcF/6Ond93oHiDsGb4kad8zjt+NPlOC1H0Q==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT-0", - "dependencies": { - "postcss-selector-parser": "^7.0.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/postcss-pseudo-class-any-link/node_modules/postcss-selector-parser": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz", - "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==", - "license": "MIT", - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/postcss-reduce-idents": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/postcss-reduce-idents/-/postcss-reduce-idents-6.0.3.tgz", - "integrity": "sha512-G3yCqZDpsNPoQgbDUy3T0E6hqOQ5xigUtBQyrmq3tn2GxlyiL0yyl7H+T8ulQR6kOcHJ9t7/9H4/R2tv8tJbMA==", - "license": "MIT", - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-reduce-initial": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/postcss-reduce-initial/-/postcss-reduce-initial-6.1.0.tgz", - "integrity": "sha512-RarLgBK/CrL1qZags04oKbVbrrVK2wcxhvta3GCxrZO4zveibqbRPmm2VI8sSgCXwoUHEliRSbOfpR0b/VIoiw==", - "license": "MIT", - "dependencies": { - "browserslist": "^4.23.0", - "caniuse-api": "^3.0.0" - }, - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-reduce-transforms": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/postcss-reduce-transforms/-/postcss-reduce-transforms-6.0.2.tgz", - "integrity": "sha512-sB+Ya++3Xj1WaT9+5LOOdirAxP7dJZms3GRcYheSPi1PiTMigsxHAdkrbItHxwYHr4kt1zL7mmcHstgMYT+aiA==", - "license": "MIT", - "dependencies": { - "postcss-value-parser": "^4.2.0" - }, - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-replace-overflow-wrap": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/postcss-replace-overflow-wrap/-/postcss-replace-overflow-wrap-4.0.0.tgz", - "integrity": "sha512-KmF7SBPphT4gPPcKZc7aDkweHiKEEO8cla/GjcBK+ckKxiZslIu3C4GCRW3DNfL0o7yW7kMQu9xlZ1kXRXLXtw==", - "license": "MIT", - "peerDependencies": { - "postcss": "^8.0.3" - } - }, - "node_modules/postcss-selector-not": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/postcss-selector-not/-/postcss-selector-not-8.0.1.tgz", - "integrity": "sha512-kmVy/5PYVb2UOhy0+LqUYAhKj7DUGDpSWa5LZqlkWJaaAV+dxxsOG3+St0yNLu6vsKD7Dmqx+nWQt0iil89+WA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/csstools" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/csstools" - } - ], - "license": "MIT", - "dependencies": { - "postcss-selector-parser": "^7.0.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" - } - }, - "node_modules/postcss-selector-not/node_modules/postcss-selector-parser": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.1.tgz", - "integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==", - "license": "MIT", - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/postcss-selector-parser": { - "version": "6.1.2", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz", - "integrity": "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==", - "license": "MIT", - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/postcss-sort-media-queries": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/postcss-sort-media-queries/-/postcss-sort-media-queries-5.2.0.tgz", - "integrity": "sha512-AZ5fDMLD8SldlAYlvi8NIqo0+Z8xnXU2ia0jxmuhxAU+Lqt9K+AlmLNJ/zWEnE9x+Zx3qL3+1K20ATgNOr3fAA==", - "license": "MIT", - "dependencies": { - "sort-css-media-queries": "2.2.0" - }, - "engines": { - "node": ">=14.0.0" - }, - "peerDependencies": { - "postcss": "^8.4.23" - } - }, - "node_modules/postcss-svgo": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/postcss-svgo/-/postcss-svgo-6.0.3.tgz", - "integrity": "sha512-dlrahRmxP22bX6iKEjOM+c8/1p+81asjKT+V5lrgOH944ryx/OHpclnIbGsKVd3uWOXFLYJwCVf0eEkJGvO96g==", - "license": "MIT", - "dependencies": { - "postcss-value-parser": "^4.2.0", - "svgo": "^3.2.0" - }, - "engines": { - "node": "^14 || ^16 || >= 18" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-unique-selectors": { - "version": "6.0.4", - "resolved": "https://registry.npmjs.org/postcss-unique-selectors/-/postcss-unique-selectors-6.0.4.tgz", - "integrity": "sha512-K38OCaIrO8+PzpArzkLKB42dSARtC2tmG6PvD4b1o1Q2E9Os8jzfWFfSy/rixsHwohtsDdFtAWGjFVFUdwYaMg==", - "license": "MIT", - "dependencies": { - "postcss-selector-parser": "^6.0.16" - }, - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/postcss-value-parser": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", - "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", - "license": "MIT" - }, - "node_modules/postcss-zindex": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/postcss-zindex/-/postcss-zindex-6.0.2.tgz", - "integrity": "sha512-5BxW9l1evPB/4ZIc+2GobEBoKC+h8gPGCMi+jxsYvd2x0mjq7wazk6DrP71pStqxE9Foxh5TVnonbWpFZzXaYg==", - "license": "MIT", - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/pretty-error": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/pretty-error/-/pretty-error-4.0.0.tgz", - "integrity": "sha512-AoJ5YMAcXKYxKhuJGdcvse+Voc6v1RgnsR3nWcYU7q4t6z0Q6T86sv5Zq8VIRbOWWFpvdGE83LtdSMNd+6Y0xw==", - "license": "MIT", - "dependencies": { - "lodash": "^4.17.20", - "renderkid": "^3.0.0" - } - }, - "node_modules/pretty-time": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/pretty-time/-/pretty-time-1.1.0.tgz", - "integrity": "sha512-28iF6xPQrP8Oa6uxE6a1biz+lWeTOAPKggvjB8HAs6nVMKZwf5bG++632Dx614hIWgUPkgivRfG+a8uAXGTIbA==", - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/prism-react-renderer": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/prism-react-renderer/-/prism-react-renderer-2.4.1.tgz", - "integrity": "sha512-ey8Ls/+Di31eqzUxC46h8MksNuGx/n0AAC8uKpwFau4RPDYLuE3EXTp8N8G2vX2N7UC/+IXeNUnlWBGGcAG+Ig==", - "license": "MIT", - "dependencies": { - "@types/prismjs": "^1.26.0", - "clsx": "^2.0.0" - }, - "peerDependencies": { - "react": ">=16.0.0" - } - }, - "node_modules/prismjs": { - "version": "1.30.0", - "resolved": "https://registry.npmjs.org/prismjs/-/prismjs-1.30.0.tgz", - "integrity": "sha512-DEvV2ZF2r2/63V+tK8hQvrR2ZGn10srHbXviTlcv7Kpzw8jWiNTqbVgjO3IY8RxrrOUF8VPMQQFysYYYv0YZxw==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/process-nextick-args": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", - "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", - "license": "MIT" - }, - "node_modules/prompts": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz", - "integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==", - "license": "MIT", - "dependencies": { - "kleur": "^3.0.3", - "sisteransi": "^1.0.5" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/prop-types": { - "version": "15.8.1", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", - "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==", - "license": "MIT", - "dependencies": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.13.1" - } - }, - "node_modules/property-information": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/property-information/-/property-information-7.1.0.tgz", - "integrity": "sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/proto-list": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/proto-list/-/proto-list-1.2.4.tgz", - "integrity": "sha512-vtK/94akxsTMhe0/cbfpR+syPuszcuwhqVjJq26CuNDgFGj682oRBXOP5MJpv2r7JtE8MsiepGIqvvOTBwn2vA==", - "license": "ISC" - }, - "node_modules/proxy-addr": { - "version": "2.0.7", - "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", - "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", - "license": "MIT", - "dependencies": { - "forwarded": "0.2.0", - "ipaddr.js": "1.9.1" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/proxy-addr/node_modules/ipaddr.js": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", - "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", - "license": "MIT", - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/punycode": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", - "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/pupa": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/pupa/-/pupa-3.3.0.tgz", - "integrity": "sha512-LjgDO2zPtoXP2wJpDjZrGdojii1uqO0cnwKoIoUzkfS98HDmbeiGmYiXo3lXeFlq2xvne1QFQhwYXSUCLKtEuA==", - "license": "MIT", - "dependencies": { - "escape-goat": "^4.0.0" - }, - "engines": { - "node": ">=12.20" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/pvtsutils": { - "version": "1.3.6", - "resolved": "https://registry.npmjs.org/pvtsutils/-/pvtsutils-1.3.6.tgz", - "integrity": "sha512-PLgQXQ6H2FWCaeRak8vvk1GW462lMxB5s3Jm673N82zI4vqtVUPuZdffdZbPDFRoU8kAhItWFtPCWiPpp4/EDg==", - "license": "MIT", - "dependencies": { - "tslib": "^2.8.1" - } - }, - "node_modules/pvutils": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/pvutils/-/pvutils-1.1.5.tgz", - "integrity": "sha512-KTqnxsgGiQ6ZAzZCVlJH5eOjSnvlyEgx1m8bkRJfOhmGRqfo5KLvmAlACQkrjEtOQ4B7wF9TdSLIs9O90MX9xA==", - "license": "MIT", - "engines": { - "node": ">=16.0.0" - } - }, - "node_modules/qs": { - "version": "6.14.2", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.2.tgz", - "integrity": "sha512-V/yCWTTF7VJ9hIh18Ugr2zhJMP01MY7c5kh4J870L7imm6/DIzBsNLTXzMwUA3yZ5b/KBqLx8Kp3uRvd7xSe3Q==", - "license": "BSD-3-Clause", - "dependencies": { - "side-channel": "^1.1.0" - }, - "engines": { - "node": ">=0.6" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/queue-microtask": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", - "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, - "node_modules/quick-lru": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz", - "integrity": "sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/randombytes": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", - "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", - "license": "MIT", - "dependencies": { - "safe-buffer": "^5.1.0" - } - }, - "node_modules/range-parser": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz", - "integrity": "sha512-kA5WQoNVo4t9lNx2kQNFCxKeBl5IbbSNBl1M/tLkw9WCn+hxNBAW5Qh8gdhs63CJnhjJ2zQWFoqPJP2sK1AV5A==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/raw-body": { - "version": "2.5.3", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.3.tgz", - "integrity": "sha512-s4VSOf6yN0rvbRZGxs8Om5CWj6seneMwK3oDb4lWDH0UPhWcxwOWw5+qk24bxq87szX1ydrwylIOp2uG1ojUpA==", - "license": "MIT", - "dependencies": { - "bytes": "~3.1.2", - "http-errors": "~2.0.1", - "iconv-lite": "~0.4.24", - "unpipe": "~1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/raw-body/node_modules/bytes": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", - "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/rc": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", - "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", - "license": "(BSD-2-Clause OR MIT OR Apache-2.0)", - "dependencies": { - "deep-extend": "^0.6.0", - "ini": "~1.3.0", - "minimist": "^1.2.0", - "strip-json-comments": "~2.0.1" - }, - "bin": { - "rc": "cli.js" - } - }, - "node_modules/rc/node_modules/strip-json-comments": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", - "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/react": { - "version": "19.2.4", - "resolved": "https://registry.npmjs.org/react/-/react-19.2.4.tgz", - "integrity": "sha512-9nfp2hYpCwOjAN+8TZFGhtWEwgvWHXqESH8qT89AT/lWklpLON22Lc8pEtnpsZz7VmawabSU0gCjnj8aC0euHQ==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/react-dom": { - "version": "19.2.4", - "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.2.4.tgz", - "integrity": "sha512-AXJdLo8kgMbimY95O2aKQqsz2iWi9jMgKJhRBAxECE4IFxfcazB2LmzloIoibJI3C12IlY20+KFaLv+71bUJeQ==", - "license": "MIT", - "dependencies": { - "scheduler": "^0.27.0" - }, - "peerDependencies": { - "react": "^19.2.4" - } - }, - "node_modules/react-fast-compare": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/react-fast-compare/-/react-fast-compare-3.2.2.tgz", - "integrity": "sha512-nsO+KSNgo1SbJqJEYRE9ERzo7YtYbou/OqjSQKxV7jcKox7+usiUVZOAC+XnDOABXggQTno0Y1CpVnuWEc1boQ==", - "license": "MIT" - }, - "node_modules/react-helmet-async": { - "name": "@slorber/react-helmet-async", - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/@slorber/react-helmet-async/-/react-helmet-async-1.3.0.tgz", - "integrity": "sha512-e9/OK8VhwUSc67diWI8Rb3I0YgI9/SBQtnhe9aEuK6MhZm7ntZZimXgwXnd8W96YTmSOb9M4d8LwhRZyhWr/1A==", - "license": "Apache-2.0", - "dependencies": { - "@babel/runtime": "^7.12.5", - "invariant": "^2.2.4", - "prop-types": "^15.7.2", - "react-fast-compare": "^3.2.0", - "shallowequal": "^1.1.0" - }, - "peerDependencies": { - "react": "^16.6.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", - "react-dom": "^16.6.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" - } - }, - "node_modules/react-is": { - "version": "16.13.1", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", - "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==", - "license": "MIT" - }, - "node_modules/react-json-view-lite": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/react-json-view-lite/-/react-json-view-lite-2.5.0.tgz", - "integrity": "sha512-tk7o7QG9oYyELWHL8xiMQ8x4WzjCzbWNyig3uexmkLb54r8jO0yH3WCWx8UZS0c49eSA4QUmG5caiRJ8fAn58g==", - "license": "MIT", - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "react": "^18.0.0 || ^19.0.0" - } - }, - "node_modules/react-loadable": { - "name": "@docusaurus/react-loadable", - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/@docusaurus/react-loadable/-/react-loadable-6.0.0.tgz", - "integrity": "sha512-YMMxTUQV/QFSnbgrP3tjDzLHRg7vsbMn8e9HAa8o/1iXoiomo48b7sk/kkmWEuWNDPJVlKSJRB6Y2fHqdJk+SQ==", - "license": "MIT", - "dependencies": { - "@types/react": "*" - }, - "peerDependencies": { - "react": "*" - } - }, - "node_modules/react-loadable-ssr-addon-v5-slorber": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/react-loadable-ssr-addon-v5-slorber/-/react-loadable-ssr-addon-v5-slorber-1.0.1.tgz", - "integrity": "sha512-lq3Lyw1lGku8zUEJPDxsNm1AfYHBrO9Y1+olAYwpUJ2IGFBskM0DMKok97A6LWUpHm+o7IvQBOWu9MLenp9Z+A==", - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.10.3" - }, - "engines": { - "node": ">=10.13.0" - }, - "peerDependencies": { - "react-loadable": "*", - "webpack": ">=4.41.1 || 5.x" - } - }, - "node_modules/react-router": { - "version": "5.3.4", - "resolved": "https://registry.npmjs.org/react-router/-/react-router-5.3.4.tgz", - "integrity": "sha512-Ys9K+ppnJah3QuaRiLxk+jDWOR1MekYQrlytiXxC1RyfbdsZkS5pvKAzCCr031xHixZwpnsYNT5xysdFHQaYsA==", - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.12.13", - "history": "^4.9.0", - "hoist-non-react-statics": "^3.1.0", - "loose-envify": "^1.3.1", - "path-to-regexp": "^1.7.0", - "prop-types": "^15.6.2", - "react-is": "^16.6.0", - "tiny-invariant": "^1.0.2", - "tiny-warning": "^1.0.0" - }, - "peerDependencies": { - "react": ">=15" - } - }, - "node_modules/react-router-config": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/react-router-config/-/react-router-config-5.1.1.tgz", - "integrity": "sha512-DuanZjaD8mQp1ppHjgnnUnyOlqYXZVjnov/JzFhjLEwd3Z4dYjMSnqrEzzGThH47vpCOqPPwJM2FtthLeJ8Pbg==", - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.1.2" - }, - "peerDependencies": { - "react": ">=15", - "react-router": ">=5" - } - }, - "node_modules/react-router-dom": { - "version": "5.3.4", - "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-5.3.4.tgz", - "integrity": "sha512-m4EqFMHv/Ih4kpcBCONHbkT68KoAeHN4p3lAGoNryfHi0dMy0kCzEZakiKRsvg5wHZ/JLrLW8o8KomWiz/qbYQ==", - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.12.13", - "history": "^4.9.0", - "loose-envify": "^1.3.1", - "prop-types": "^15.6.2", - "react-router": "5.3.4", - "tiny-invariant": "^1.0.2", - "tiny-warning": "^1.0.0" - }, - "peerDependencies": { - "react": ">=15" - } - }, - "node_modules/readable-stream": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", - "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", - "license": "MIT", - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/readdirp": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", - "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", - "license": "MIT", - "dependencies": { - "picomatch": "^2.2.1" - }, - "engines": { - "node": ">=8.10.0" - } - }, - "node_modules/recma-build-jsx": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/recma-build-jsx/-/recma-build-jsx-1.0.0.tgz", - "integrity": "sha512-8GtdyqaBcDfva+GUKDr3nev3VpKAhup1+RvkMvUxURHpW7QyIvk9F5wz7Vzo06CEMSilw6uArgRqhpiUcWp8ew==", - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0", - "estree-util-build-jsx": "^3.0.0", - "vfile": "^6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/recma-jsx": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/recma-jsx/-/recma-jsx-1.0.1.tgz", - "integrity": "sha512-huSIy7VU2Z5OLv6oFLosQGGDqPqdO1iq6bWNAdhzMxSJP7RAso4fCZ1cKu8j9YHCZf3TPrq4dw3okhrylgcd7w==", - "license": "MIT", - "dependencies": { - "acorn-jsx": "^5.0.0", - "estree-util-to-js": "^2.0.0", - "recma-parse": "^1.0.0", - "recma-stringify": "^1.0.0", - "unified": "^11.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - }, - "peerDependencies": { - "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" - } - }, - "node_modules/recma-parse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/recma-parse/-/recma-parse-1.0.0.tgz", - "integrity": "sha512-OYLsIGBB5Y5wjnSnQW6t3Xg7q3fQ7FWbw/vcXtORTnyaSFscOtABg+7Pnz6YZ6c27fG1/aN8CjfwoUEUIdwqWQ==", - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0", - "esast-util-from-js": "^2.0.0", - "unified": "^11.0.0", - "vfile": "^6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/recma-stringify": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/recma-stringify/-/recma-stringify-1.0.0.tgz", - "integrity": "sha512-cjwII1MdIIVloKvC9ErQ+OgAtwHBmcZ0Bg4ciz78FtbT8In39aAYbaA7zvxQ61xVMSPE8WxhLwLbhif4Js2C+g==", - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0", - "estree-util-to-js": "^2.0.0", - "unified": "^11.0.0", - "vfile": "^6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/reflect-metadata": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/reflect-metadata/-/reflect-metadata-0.2.2.tgz", - "integrity": "sha512-urBwgfrvVP/eAyXx4hluJivBKzuEbSQs9rKWCrCkbSxNv8mxPcUZKeuoF3Uy4mJl3Lwprp6yy5/39VWigZ4K6Q==", - "license": "Apache-2.0" - }, - "node_modules/regenerate": { - "version": "1.4.2", - "resolved": "https://registry.npmjs.org/regenerate/-/regenerate-1.4.2.tgz", - "integrity": "sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==", - "license": "MIT" - }, - "node_modules/regenerate-unicode-properties": { - "version": "10.2.2", - "resolved": "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-10.2.2.tgz", - "integrity": "sha512-m03P+zhBeQd1RGnYxrGyDAPpWX/epKirLrp8e3qevZdVkKtnCrjjWczIbYc8+xd6vcTStVlqfycTx1KR4LOr0g==", - "license": "MIT", - "dependencies": { - "regenerate": "^1.4.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/regexpu-core": { - "version": "6.4.0", - "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-6.4.0.tgz", - "integrity": "sha512-0ghuzq67LI9bLXpOX/ISfve/Mq33a4aFRzoQYhnnok1JOFpmE/A2TBGkNVenOGEeSBCjIiWcc6MVOG5HEQv0sA==", - "license": "MIT", - "dependencies": { - "regenerate": "^1.4.2", - "regenerate-unicode-properties": "^10.2.2", - "regjsgen": "^0.8.0", - "regjsparser": "^0.13.0", - "unicode-match-property-ecmascript": "^2.0.0", - "unicode-match-property-value-ecmascript": "^2.2.1" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/registry-auth-token": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-5.1.1.tgz", - "integrity": "sha512-P7B4+jq8DeD2nMsAcdfaqHbssgHtZ7Z5+++a5ask90fvmJ8p5je4mOa+wzu+DB4vQ5tdJV/xywY+UnVFeQLV5Q==", - "license": "MIT", - "dependencies": { - "@pnpm/npm-conf": "^3.0.2" - }, - "engines": { - "node": ">=14" - } - }, - "node_modules/registry-url": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/registry-url/-/registry-url-6.0.1.tgz", - "integrity": "sha512-+crtS5QjFRqFCoQmvGduwYWEBng99ZvmFvF+cUJkGYF1L1BfU8C6Zp9T7f5vPAwyLkUExpvK+ANVZmGU49qi4Q==", - "license": "MIT", - "dependencies": { - "rc": "1.2.8" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/regjsgen": { - "version": "0.8.0", - "resolved": "https://registry.npmjs.org/regjsgen/-/regjsgen-0.8.0.tgz", - "integrity": "sha512-RvwtGe3d7LvWiDQXeQw8p5asZUmfU1G/l6WbUXeHta7Y2PEIvBTwH6E2EfmYUK8pxcxEdEmaomqyp0vZZ7C+3Q==", - "license": "MIT" - }, - "node_modules/regjsparser": { - "version": "0.13.0", - "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.13.0.tgz", - "integrity": "sha512-NZQZdC5wOE/H3UT28fVGL+ikOZcEzfMGk/c3iN9UGxzWHMa1op7274oyiUVrAG4B2EuFhus8SvkaYnhvW92p9Q==", - "license": "BSD-2-Clause", - "dependencies": { - "jsesc": "~3.1.0" - }, - "bin": { - "regjsparser": "bin/parser" - } - }, - "node_modules/rehype-raw": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/rehype-raw/-/rehype-raw-7.0.0.tgz", - "integrity": "sha512-/aE8hCfKlQeA8LmyeyQvQF3eBiLRGNlfBJEvWH7ivp9sBqs7TNqBL5X3v157rM4IFETqDnIOO+z5M/biZbo9Ww==", - "license": "MIT", - "dependencies": { - "@types/hast": "^3.0.0", - "hast-util-raw": "^9.0.0", - "vfile": "^6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/rehype-recma": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/rehype-recma/-/rehype-recma-1.0.0.tgz", - "integrity": "sha512-lqA4rGUf1JmacCNWWZx0Wv1dHqMwxzsDWYMTowuplHF3xH0N/MmrZ/G3BDZnzAkRmxDadujCjaKM2hqYdCBOGw==", - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0", - "@types/hast": "^3.0.0", - "hast-util-to-estree": "^3.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/relateurl": { - "version": "0.2.7", - "resolved": "https://registry.npmjs.org/relateurl/-/relateurl-0.2.7.tgz", - "integrity": "sha512-G08Dxvm4iDN3MLM0EsP62EDV9IuhXPR6blNz6Utcp7zyV3tr4HVNINt6MpaRWbxoOHT3Q7YN2P+jaHX8vUbgog==", - "license": "MIT", - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/remark-directive": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/remark-directive/-/remark-directive-3.0.1.tgz", - "integrity": "sha512-gwglrEQEZcZYgVyG1tQuA+h58EZfq5CSULw7J90AFuCTyib1thgHPoqQ+h9iFvU6R+vnZ5oNFQR5QKgGpk741A==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "mdast-util-directive": "^3.0.0", - "micromark-extension-directive": "^3.0.0", - "unified": "^11.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/remark-emoji": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/remark-emoji/-/remark-emoji-4.0.1.tgz", - "integrity": "sha512-fHdvsTR1dHkWKev9eNyhTo4EFwbUvJ8ka9SgeWkMPYFX4WoI7ViVBms3PjlQYgw5TLvNQso3GUB/b/8t3yo+dg==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.2", - "emoticon": "^4.0.1", - "mdast-util-find-and-replace": "^3.0.1", - "node-emoji": "^2.1.0", - "unified": "^11.0.4" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - } - }, - "node_modules/remark-frontmatter": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/remark-frontmatter/-/remark-frontmatter-5.0.0.tgz", - "integrity": "sha512-XTFYvNASMe5iPN0719nPrdItC9aU0ssC4v14mH1BCi1u0n1gAocqcujWUrByftZTbLhRtiKRyjYTSIOcr69UVQ==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "mdast-util-frontmatter": "^2.0.0", - "micromark-extension-frontmatter": "^2.0.0", - "unified": "^11.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/remark-gfm": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-4.0.1.tgz", - "integrity": "sha512-1quofZ2RQ9EWdeN34S79+KExV1764+wCUGop5CPL1WGdD0ocPpu91lzPGbwWMECpEpd42kJGQwzRfyov9j4yNg==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "mdast-util-gfm": "^3.0.0", - "micromark-extension-gfm": "^3.0.0", - "remark-parse": "^11.0.0", - "remark-stringify": "^11.0.0", - "unified": "^11.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/remark-mdx": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/remark-mdx/-/remark-mdx-3.1.1.tgz", - "integrity": "sha512-Pjj2IYlUY3+D8x00UJsIOg5BEvfMyeI+2uLPn9VO9Wg4MEtN/VTIq2NEJQfde9PnX15KgtHyl9S0BcTnWrIuWg==", - "license": "MIT", - "dependencies": { - "mdast-util-mdx": "^3.0.0", - "micromark-extension-mdxjs": "^3.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/remark-parse": { - "version": "11.0.0", - "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-11.0.0.tgz", - "integrity": "sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "mdast-util-from-markdown": "^2.0.0", - "micromark-util-types": "^2.0.0", - "unified": "^11.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/remark-rehype": { - "version": "11.1.2", - "resolved": "https://registry.npmjs.org/remark-rehype/-/remark-rehype-11.1.2.tgz", - "integrity": "sha512-Dh7l57ianaEoIpzbp0PC9UKAdCSVklD8E5Rpw7ETfbTl3FqcOOgq5q2LVDhgGCkaBv7p24JXikPdvhhmHvKMsw==", - "license": "MIT", - "dependencies": { - "@types/hast": "^3.0.0", - "@types/mdast": "^4.0.0", - "mdast-util-to-hast": "^13.0.0", - "unified": "^11.0.0", - "vfile": "^6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/remark-stringify": { - "version": "11.0.0", - "resolved": "https://registry.npmjs.org/remark-stringify/-/remark-stringify-11.0.0.tgz", - "integrity": "sha512-1OSmLd3awB/t8qdoEOMazZkNsfVTeY4fTsgzcQFdXNq8ToTN4ZGwrMnlda4K6smTFKD+GRV6O48i6Z4iKgPPpw==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "mdast-util-to-markdown": "^2.0.0", - "unified": "^11.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/renderkid": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/renderkid/-/renderkid-3.0.0.tgz", - "integrity": "sha512-q/7VIQA8lmM1hF+jn+sFSPWGlMkSAeNYcPLmDQx2zzuiDfaLrOmumR8iaUKlenFgh0XRPIUeSPlH3A+AW3Z5pg==", - "license": "MIT", - "dependencies": { - "css-select": "^4.1.3", - "dom-converter": "^0.2.0", - "htmlparser2": "^6.1.0", - "lodash": "^4.17.21", - "strip-ansi": "^6.0.1" - } - }, - "node_modules/renderkid/node_modules/css-select": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/css-select/-/css-select-4.3.0.tgz", - "integrity": "sha512-wPpOYtnsVontu2mODhA19JrqWxNsfdatRKd64kmpRbQgh1KtItko5sTnEpPdpSaJszTOhEMlF/RPz28qj4HqhQ==", - "license": "BSD-2-Clause", - "dependencies": { - "boolbase": "^1.0.0", - "css-what": "^6.0.1", - "domhandler": "^4.3.1", - "domutils": "^2.8.0", - "nth-check": "^2.0.1" - }, - "funding": { - "url": "https://github.com/sponsors/fb55" - } - }, - "node_modules/renderkid/node_modules/dom-serializer": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-1.4.1.tgz", - "integrity": "sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag==", - "license": "MIT", - "dependencies": { - "domelementtype": "^2.0.1", - "domhandler": "^4.2.0", - "entities": "^2.0.0" - }, - "funding": { - "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" - } - }, - "node_modules/renderkid/node_modules/domhandler": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-4.3.1.tgz", - "integrity": "sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ==", - "license": "BSD-2-Clause", - "dependencies": { - "domelementtype": "^2.2.0" - }, - "engines": { - "node": ">= 4" - }, - "funding": { - "url": "https://github.com/fb55/domhandler?sponsor=1" - } - }, - "node_modules/renderkid/node_modules/domutils": { - "version": "2.8.0", - "resolved": "https://registry.npmjs.org/domutils/-/domutils-2.8.0.tgz", - "integrity": "sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A==", - "license": "BSD-2-Clause", - "dependencies": { - "dom-serializer": "^1.0.1", - "domelementtype": "^2.2.0", - "domhandler": "^4.2.0" - }, - "funding": { - "url": "https://github.com/fb55/domutils?sponsor=1" - } - }, - "node_modules/renderkid/node_modules/entities": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/entities/-/entities-2.2.0.tgz", - "integrity": "sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==", - "license": "BSD-2-Clause", - "funding": { - "url": "https://github.com/fb55/entities?sponsor=1" - } - }, - "node_modules/renderkid/node_modules/htmlparser2": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-6.1.0.tgz", - "integrity": "sha512-gyyPk6rgonLFEDGoeRgQNaEUvdJ4ktTmmUh/h2t7s+M8oPpIPxgNACWa+6ESR57kXstwqPiCut0V8NRpcwgU7A==", - "funding": [ - "https://github.com/fb55/htmlparser2?sponsor=1", - { - "type": "github", - "url": "https://github.com/sponsors/fb55" - } - ], - "license": "MIT", - "dependencies": { - "domelementtype": "^2.0.1", - "domhandler": "^4.0.0", - "domutils": "^2.5.2", - "entities": "^2.0.0" - } - }, - "node_modules/repeat-string": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", - "integrity": "sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w==", - "license": "MIT", - "engines": { - "node": ">=0.10" - } - }, - "node_modules/require-from-string": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", - "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/require-like": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz", - "integrity": "sha512-oyrU88skkMtDdauHDuKVrgR+zuItqr6/c//FXzvmxRGMexSDc6hNvJInGW3LL46n+8b50RykrvwSUIIQH2LQ5A==", - "engines": { - "node": "*" - } - }, - "node_modules/requires-port": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", - "integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==", - "license": "MIT" - }, - "node_modules/resolve": { - "version": "1.22.11", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz", - "integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==", - "license": "MIT", - "dependencies": { - "is-core-module": "^2.16.1", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - }, - "bin": { - "resolve": "bin/resolve" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/resolve-alpn": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/resolve-alpn/-/resolve-alpn-1.2.1.tgz", - "integrity": "sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g==", - "license": "MIT" - }, - "node_modules/resolve-from": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", - "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/resolve-pathname": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/resolve-pathname/-/resolve-pathname-3.0.0.tgz", - "integrity": "sha512-C7rARubxI8bXFNB/hqcp/4iUeIXJhJZvFPFPiSPRnhU5UPxzMFIl+2E6yY6c4k9giDJAhtV+enfA+G89N6Csng==", - "license": "MIT" - }, - "node_modules/responselike": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/responselike/-/responselike-3.0.0.tgz", - "integrity": "sha512-40yHxbNcl2+rzXvZuVkrYohathsSJlMTXKryG5y8uciHv1+xDLHQpgjG64JUO9nrEq2jGLH6IZ8BcZyw3wrweg==", - "license": "MIT", - "dependencies": { - "lowercase-keys": "^3.0.0" - }, - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/retry": { - "version": "0.13.1", - "resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz", - "integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==", - "license": "MIT", - "engines": { - "node": ">= 4" - } - }, - "node_modules/reusify": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", - "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", - "license": "MIT", - "engines": { - "iojs": ">=1.0.0", - "node": ">=0.10.0" - } - }, - "node_modules/robust-predicates": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/robust-predicates/-/robust-predicates-3.0.2.tgz", - "integrity": "sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==", - "license": "Unlicense" - }, - "node_modules/roughjs": { - "version": "4.6.6", - "resolved": "https://registry.npmjs.org/roughjs/-/roughjs-4.6.6.tgz", - "integrity": "sha512-ZUz/69+SYpFN/g/lUlo2FXcIjRkSu3nDarreVdGGndHEBJ6cXPdKguS8JGxwj5HA5xIbVKSmLgr5b3AWxtRfvQ==", - "license": "MIT", - "dependencies": { - "hachure-fill": "^0.5.2", - "path-data-parser": "^0.1.0", - "points-on-curve": "^0.2.0", - "points-on-path": "^0.2.1" - } - }, - "node_modules/rtlcss": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/rtlcss/-/rtlcss-4.3.0.tgz", - "integrity": "sha512-FI+pHEn7Wc4NqKXMXFM+VAYKEj/mRIcW4h24YVwVtyjI+EqGrLc2Hx/Ny0lrZ21cBWU2goLy36eqMcNj3AQJig==", - "license": "MIT", - "dependencies": { - "escalade": "^3.1.1", - "picocolors": "^1.0.0", - "postcss": "^8.4.21", - "strip-json-comments": "^3.1.1" - }, - "bin": { - "rtlcss": "bin/rtlcss.js" - }, - "engines": { - "node": ">=12.0.0" - } - }, - "node_modules/run-applescript": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/run-applescript/-/run-applescript-7.1.0.tgz", - "integrity": "sha512-DPe5pVFaAsinSaV6QjQ6gdiedWDcRCbUuiQfQa2wmWV7+xC9bGulGI8+TdRmoFkAPaBXk8CrAbnlY2ISniJ47Q==", - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/run-parallel": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", - "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT", - "dependencies": { - "queue-microtask": "^1.2.2" - } - }, - "node_modules/rw": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/rw/-/rw-1.3.3.tgz", - "integrity": "sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ==", - "license": "BSD-3-Clause" - }, - "node_modules/safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, - "node_modules/safer-buffer": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", - "license": "MIT" - }, - "node_modules/sax": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/sax/-/sax-1.5.0.tgz", - "integrity": "sha512-21IYA3Q5cQf089Z6tgaUTr7lDAyzoTPx5HRtbhsME8Udispad8dC/+sziTNugOEx54ilvatQ9YCzl4KQLPcRHA==", - "license": "BlueOak-1.0.0", - "engines": { - "node": ">=11.0.0" - } - }, - "node_modules/scheduler": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.27.0.tgz", - "integrity": "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q==", - "license": "MIT" - }, - "node_modules/schema-dts": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/schema-dts/-/schema-dts-1.1.5.tgz", - "integrity": "sha512-RJr9EaCmsLzBX2NDiO5Z3ux2BVosNZN5jo0gWgsyKvxKIUL5R3swNvoorulAeL9kLB0iTSX7V6aokhla2m7xbg==", - "license": "Apache-2.0" - }, - "node_modules/schema-utils": { - "version": "4.3.3", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.3.3.tgz", - "integrity": "sha512-eflK8wEtyOE6+hsaRVPxvUKYCpRgzLqDTb8krvAsRIwOGlHoSgYLgBXoubGgLd2fT41/OUYdb48v4k4WWHQurA==", - "license": "MIT", - "dependencies": { - "@types/json-schema": "^7.0.9", - "ajv": "^8.9.0", - "ajv-formats": "^2.1.1", - "ajv-keywords": "^5.1.0" - }, - "engines": { - "node": ">= 10.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - } - }, - "node_modules/search-insights": { - "version": "2.17.3", - "resolved": "https://registry.npmjs.org/search-insights/-/search-insights-2.17.3.tgz", - "integrity": "sha512-RQPdCYTa8A68uM2jwxoY842xDhvx3E5LFL1LxvxCNMev4o5mLuokczhzjAgGwUZBAmOKZknArSxLKmXtIi2AxQ==", - "license": "MIT", - "peer": true - }, - "node_modules/section-matter": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/section-matter/-/section-matter-1.0.0.tgz", - "integrity": "sha512-vfD3pmTzGpufjScBh50YHKzEu2lxBWhVEHsNGoEXmCmn2hKGfeNLYMzCJpe8cD7gqX7TJluOVpBkAequ6dgMmA==", - "license": "MIT", - "dependencies": { - "extend-shallow": "^2.0.1", - "kind-of": "^6.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/select-hose": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/select-hose/-/select-hose-2.0.0.tgz", - "integrity": "sha512-mEugaLK+YfkijB4fx0e6kImuJdCIt2LxCRcbEYPqRGCs4F2ogyfZU5IAZRdjCP8JPq2AtdNoC/Dux63d9Kiryg==", - "license": "MIT" - }, - "node_modules/selfsigned": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/selfsigned/-/selfsigned-5.5.0.tgz", - "integrity": "sha512-ftnu3TW4+3eBfLRFnDEkzGxSF/10BJBkaLJuBHZX0kiPS7bRdlpZGu6YGt4KngMkdTwJE6MbjavFpqHvqVt+Ew==", - "license": "MIT", - "dependencies": { - "@peculiar/x509": "^1.14.2", - "pkijs": "^3.3.3" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/semver": { - "version": "7.7.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", - "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/semver-diff": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/semver-diff/-/semver-diff-4.0.0.tgz", - "integrity": "sha512-0Ju4+6A8iOnpL/Thra7dZsSlOHYAHIeMxfhWQRI1/VLcT3WDBZKKtQt/QkBOsiIN9ZpuvHE6cGZ0x4glCMmfiA==", - "license": "MIT", - "dependencies": { - "semver": "^7.3.5" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/send": { - "version": "0.19.2", - "resolved": "https://registry.npmjs.org/send/-/send-0.19.2.tgz", - "integrity": "sha512-VMbMxbDeehAxpOtWJXlcUS5E8iXh6QmN+BkRX1GARS3wRaXEEgzCcB10gTQazO42tpNIya8xIyNx8fll1OFPrg==", - "license": "MIT", - "dependencies": { - "debug": "2.6.9", - "depd": "2.0.0", - "destroy": "1.2.0", - "encodeurl": "~2.0.0", - "escape-html": "~1.0.3", - "etag": "~1.8.1", - "fresh": "~0.5.2", - "http-errors": "~2.0.1", - "mime": "1.6.0", - "ms": "2.1.3", - "on-finished": "~2.4.1", - "range-parser": "~1.2.1", - "statuses": "~2.0.2" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/send/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "license": "MIT", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/send/node_modules/debug/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "license": "MIT" - }, - "node_modules/send/node_modules/range-parser": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", - "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/serialize-javascript": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.2.tgz", - "integrity": "sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==", - "license": "BSD-3-Clause", - "dependencies": { - "randombytes": "^2.1.0" - } - }, - "node_modules/serve-handler": { - "version": "6.1.6", - "resolved": "https://registry.npmjs.org/serve-handler/-/serve-handler-6.1.6.tgz", - "integrity": "sha512-x5RL9Y2p5+Sh3D38Fh9i/iQ5ZK+e4xuXRd/pGbM4D13tgo/MGwbttUk8emytcr1YYzBYs+apnUngBDFYfpjPuQ==", - "license": "MIT", - "dependencies": { - "bytes": "3.0.0", - "content-disposition": "0.5.2", - "mime-types": "2.1.18", - "minimatch": "3.1.2", - "path-is-inside": "1.0.2", - "path-to-regexp": "3.3.0", - "range-parser": "1.2.0" - } - }, - "node_modules/serve-handler/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "license": "ISC", - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/serve-handler/node_modules/path-to-regexp": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-3.3.0.tgz", - "integrity": "sha512-qyCH421YQPS2WFDxDjftfc1ZR5WKQzVzqsp4n9M2kQhVOo/ByahFoUNJfl58kOcEGfQ//7weFTDhm+ss8Ecxgw==", - "license": "MIT" - }, - "node_modules/serve-index": { - "version": "1.9.2", - "resolved": "https://registry.npmjs.org/serve-index/-/serve-index-1.9.2.tgz", - "integrity": "sha512-KDj11HScOaLmrPxl70KYNW1PksP4Nb/CLL2yvC+Qd2kHMPEEpfc4Re2e4FOay+bC/+XQl/7zAcWON3JVo5v3KQ==", - "license": "MIT", - "dependencies": { - "accepts": "~1.3.8", - "batch": "0.6.1", - "debug": "2.6.9", - "escape-html": "~1.0.3", - "http-errors": "~1.8.0", - "mime-types": "~2.1.35", - "parseurl": "~1.3.3" - }, - "engines": { - "node": ">= 0.8.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/express" - } - }, - "node_modules/serve-index/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "license": "MIT", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/serve-index/node_modules/depd": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", - "integrity": "sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/serve-index/node_modules/http-errors": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.8.1.tgz", - "integrity": "sha512-Kpk9Sm7NmI+RHhnj6OIWDI1d6fIoFAtFt9RLaTMRlg/8w49juAStsrBgp0Dp4OdxdVbRIeKhtCUvoi/RuAhO4g==", - "license": "MIT", - "dependencies": { - "depd": "~1.1.2", - "inherits": "2.0.4", - "setprototypeof": "1.2.0", - "statuses": ">= 1.5.0 < 2", - "toidentifier": "1.0.1" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/serve-index/node_modules/mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/serve-index/node_modules/mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "license": "MIT", - "dependencies": { - "mime-db": "1.52.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/serve-index/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "license": "MIT" - }, - "node_modules/serve-index/node_modules/statuses": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", - "integrity": "sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/serve-static": { - "version": "1.16.3", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.3.tgz", - "integrity": "sha512-x0RTqQel6g5SY7Lg6ZreMmsOzncHFU7nhnRWkKgWuMTu5NN0DR5oruckMqRvacAN9d5w6ARnRBXl9xhDCgfMeA==", - "license": "MIT", - "dependencies": { - "encodeurl": "~2.0.0", - "escape-html": "~1.0.3", - "parseurl": "~1.3.3", - "send": "~0.19.1" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/set-function-length": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", - "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", - "license": "MIT", - "dependencies": { - "define-data-property": "^1.1.4", - "es-errors": "^1.3.0", - "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.4", - "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/setprototypeof": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", - "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", - "license": "ISC" - }, - "node_modules/shallow-clone": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz", - "integrity": "sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==", - "license": "MIT", - "dependencies": { - "kind-of": "^6.0.2" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/shallowequal": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/shallowequal/-/shallowequal-1.1.0.tgz", - "integrity": "sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ==", - "license": "MIT" - }, - "node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "license": "MIT", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/shell-quote": { - "version": "1.8.3", - "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.8.3.tgz", - "integrity": "sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/side-channel": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", - "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "object-inspect": "^1.13.3", - "side-channel-list": "^1.0.0", - "side-channel-map": "^1.0.1", - "side-channel-weakmap": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/side-channel-list": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", - "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "object-inspect": "^1.13.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/side-channel-map": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", - "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.2", - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.5", - "object-inspect": "^1.13.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/side-channel-weakmap": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", - "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.2", - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.5", - "object-inspect": "^1.13.3", - "side-channel-map": "^1.0.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/signal-exit": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", - "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", - "license": "ISC" - }, - "node_modules/sirv": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/sirv/-/sirv-2.0.4.tgz", - "integrity": "sha512-94Bdh3cC2PKrbgSOUqTiGPWVZeSiXfKOVZNJniWoqrWrRkB1CJzBU3NEbiTsPcYy1lDsANA/THzS+9WBiy5nfQ==", - "license": "MIT", - "dependencies": { - "@polka/url": "^1.0.0-next.24", - "mrmime": "^2.0.0", - "totalist": "^3.0.0" - }, - "engines": { - "node": ">= 10" - } - }, - "node_modules/sisteransi": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", - "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==", - "license": "MIT" - }, - "node_modules/sitemap": { - "version": "7.1.3", - "resolved": "https://registry.npmjs.org/sitemap/-/sitemap-7.1.3.tgz", - "integrity": "sha512-tAjEd+wt/YwnEbfNB2ht51ybBJxbEWwe5ki/Z//Wh0rpBFTCUSj46GnxUKEWzhfuJTsee8x3lybHxFgUMig2hw==", - "license": "MIT", - "dependencies": { - "@types/node": "^17.0.5", - "@types/sax": "^1.2.1", - "arg": "^5.0.0", - "sax": "^1.2.4" - }, - "bin": { - "sitemap": "dist/cli.js" - }, - "engines": { - "node": ">=12.0.0", - "npm": ">=5.6.0" - } - }, - "node_modules/sitemap/node_modules/@types/node": { - "version": "17.0.45", - "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.45.tgz", - "integrity": "sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw==", - "license": "MIT" - }, - "node_modules/skin-tone": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/skin-tone/-/skin-tone-2.0.0.tgz", - "integrity": "sha512-kUMbT1oBJCpgrnKoSr0o6wPtvRWT9W9UKvGLwfJYO2WuahZRHOpEyL1ckyMGgMWh0UdpmaoFqKKD29WTomNEGA==", - "license": "MIT", - "dependencies": { - "unicode-emoji-modifier-base": "^1.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/slash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", - "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/snake-case": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/snake-case/-/snake-case-3.0.4.tgz", - "integrity": "sha512-LAOh4z89bGQvl9pFfNF8V146i7o7/CqFPbqzYgP+yYzDIDeS9HaNFtXABamRW+AQzEVODcvE79ljJ+8a9YSdMg==", - "license": "MIT", - "dependencies": { - "dot-case": "^3.0.4", - "tslib": "^2.0.3" - } - }, - "node_modules/sockjs": { - "version": "0.3.24", - "resolved": "https://registry.npmjs.org/sockjs/-/sockjs-0.3.24.tgz", - "integrity": "sha512-GJgLTZ7vYb/JtPSSZ10hsOYIvEYsjbNU+zPdIHcUaWVNUEPivzxku31865sSSud0Da0W4lEeOPlmw93zLQchuQ==", - "license": "MIT", - "dependencies": { - "faye-websocket": "^0.11.3", - "uuid": "^8.3.2", - "websocket-driver": "^0.7.4" - } - }, - "node_modules/sort-css-media-queries": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/sort-css-media-queries/-/sort-css-media-queries-2.2.0.tgz", - "integrity": "sha512-0xtkGhWCC9MGt/EzgnvbbbKhqWjl1+/rncmhTh5qCpbYguXh6S/qwePfv/JQ8jePXXmqingylxoC49pCkSPIbA==", - "license": "MIT", - "engines": { - "node": ">= 6.3.0" - } - }, - "node_modules/source-map": { - "version": "0.7.6", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.6.tgz", - "integrity": "sha512-i5uvt8C3ikiWeNZSVZNWcfZPItFQOsYTUAOkcUPGd8DqDy1uOUikjt5dG+uRlwyvR108Fb9DOd4GvXfT0N2/uQ==", - "license": "BSD-3-Clause", - "engines": { - "node": ">= 12" - } - }, - "node_modules/source-map-js": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", - "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", - "license": "BSD-3-Clause", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/source-map-support": { - "version": "0.5.21", - "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", - "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", - "license": "MIT", - "dependencies": { - "buffer-from": "^1.0.0", - "source-map": "^0.6.0" - } - }, - "node_modules/source-map-support/node_modules/source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "license": "BSD-3-Clause", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/space-separated-tokens": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz", - "integrity": "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/spdy": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/spdy/-/spdy-4.0.2.tgz", - "integrity": "sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA==", - "license": "MIT", - "dependencies": { - "debug": "^4.1.0", - "handle-thing": "^2.0.0", - "http-deceiver": "^1.2.7", - "select-hose": "^2.0.0", - "spdy-transport": "^3.0.0" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/spdy-transport": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/spdy-transport/-/spdy-transport-3.0.0.tgz", - "integrity": "sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw==", - "license": "MIT", - "dependencies": { - "debug": "^4.1.0", - "detect-node": "^2.0.4", - "hpack.js": "^2.1.6", - "obuf": "^1.1.2", - "readable-stream": "^3.0.6", - "wbuf": "^1.7.3" - } - }, - "node_modules/sprintf-js": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", - "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", - "license": "BSD-3-Clause" - }, - "node_modules/srcset": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/srcset/-/srcset-4.0.0.tgz", - "integrity": "sha512-wvLeHgcVHKO8Sc/H/5lkGreJQVeYMm9rlmt8PuR1xE31rIuXhuzznUUqAt8MqLhB3MqJdFzlNAfpcWnxiFUcPw==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/statuses": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz", - "integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/std-env": { - "version": "3.10.0", - "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.10.0.tgz", - "integrity": "sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==", - "license": "MIT" - }, - "node_modules/string_decoder": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "license": "MIT", - "dependencies": { - "safe-buffer": "~5.2.0" - } - }, - "node_modules/string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", - "license": "MIT", - "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/string-width/node_modules/ansi-regex": { - "version": "6.2.2", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz", - "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" - } - }, - "node_modules/string-width/node_modules/strip-ansi": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.2.0.tgz", - "integrity": "sha512-yDPMNjp4WyfYBkHnjIRLfca1i6KMyGCtsVgoKe/z1+6vukgaENdgGBZt+ZmKPc4gavvEZ5OgHfHdrazhgNyG7w==", - "license": "MIT", - "dependencies": { - "ansi-regex": "^6.2.2" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" - } - }, - "node_modules/stringify-entities": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-4.0.4.tgz", - "integrity": "sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==", - "license": "MIT", - "dependencies": { - "character-entities-html4": "^2.0.0", - "character-entities-legacy": "^3.0.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/stringify-object": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/stringify-object/-/stringify-object-3.3.0.tgz", - "integrity": "sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw==", - "license": "BSD-2-Clause", - "dependencies": { - "get-own-enumerable-property-symbols": "^3.0.0", - "is-obj": "^1.0.1", - "is-regexp": "^1.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-bom-string": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/strip-bom-string/-/strip-bom-string-1.0.0.tgz", - "integrity": "sha512-uCC2VHvQRYu+lMh4My/sFNmF2klFymLX1wHJeXnbEJERpV/ZsVuonzerjfrGpIGF7LBVa1O7i9kjiWvJiFck8g==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/strip-final-newline": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", - "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/strip-json-comments": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", - "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", - "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/style-to-js": { - "version": "1.1.21", - "resolved": "https://registry.npmjs.org/style-to-js/-/style-to-js-1.1.21.tgz", - "integrity": "sha512-RjQetxJrrUJLQPHbLku6U/ocGtzyjbJMP9lCNK7Ag0CNh690nSH8woqWH9u16nMjYBAok+i7JO1NP2pOy8IsPQ==", - "license": "MIT", - "dependencies": { - "style-to-object": "1.0.14" - } - }, - "node_modules/style-to-object": { - "version": "1.0.14", - "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-1.0.14.tgz", - "integrity": "sha512-LIN7rULI0jBscWQYaSswptyderlarFkjQ+t79nzty8tcIAceVomEVlLzH5VP4Cmsv6MtKhs7qaAiwlcp+Mgaxw==", - "license": "MIT", - "dependencies": { - "inline-style-parser": "0.2.7" - } - }, - "node_modules/stylehacks": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/stylehacks/-/stylehacks-6.1.1.tgz", - "integrity": "sha512-gSTTEQ670cJNoaeIp9KX6lZmm8LJ3jPB5yJmX8Zq/wQxOsAFXV3qjWzHas3YYk1qesuVIyYWWUpZ0vSE/dTSGg==", - "license": "MIT", - "dependencies": { - "browserslist": "^4.23.0", - "postcss-selector-parser": "^6.0.16" - }, - "engines": { - "node": "^14 || ^16 || >=18.0" - }, - "peerDependencies": { - "postcss": "^8.4.31" - } - }, - "node_modules/stylis": { - "version": "4.3.6", - "resolved": "https://registry.npmjs.org/stylis/-/stylis-4.3.6.tgz", - "integrity": "sha512-yQ3rwFWRfwNUY7H5vpU0wfdkNSnvnJinhF9830Swlaxl03zsOjCfmX0ugac+3LtK0lYSgwL/KXc8oYL3mG4YFQ==", - "license": "MIT" - }, - "node_modules/supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "license": "MIT", - "dependencies": { - "has-flag": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/supports-preserve-symlinks-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", - "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/svg-parser": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/svg-parser/-/svg-parser-2.0.4.tgz", - "integrity": "sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ==", - "license": "MIT" - }, - "node_modules/svgo": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/svgo/-/svgo-3.3.2.tgz", - "integrity": "sha512-OoohrmuUlBs8B8o6MB2Aevn+pRIH9zDALSR+6hhqVfa6fRwG/Qw9VUMSMW9VNg2CFc/MTIfabtdOVl9ODIJjpw==", - "license": "MIT", - "dependencies": { - "@trysound/sax": "0.2.0", - "commander": "^7.2.0", - "css-select": "^5.1.0", - "css-tree": "^2.3.1", - "css-what": "^6.1.0", - "csso": "^5.0.5", - "picocolors": "^1.0.0" - }, - "bin": { - "svgo": "bin/svgo" - }, - "engines": { - "node": ">=14.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/svgo" - } - }, - "node_modules/svgo/node_modules/commander": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", - "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", - "license": "MIT", - "engines": { - "node": ">= 10" - } - }, - "node_modules/tapable": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.3.0.tgz", - "integrity": "sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg==", - "license": "MIT", - "engines": { - "node": ">=6" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - } - }, - "node_modules/terser": { - "version": "5.46.0", - "resolved": "https://registry.npmjs.org/terser/-/terser-5.46.0.tgz", - "integrity": "sha512-jTwoImyr/QbOWFFso3YoU3ik0jBBDJ6JTOQiy/J2YxVJdZCc+5u7skhNwiOR3FQIygFqVUPHl7qbbxtjW2K3Qg==", - "license": "BSD-2-Clause", - "dependencies": { - "@jridgewell/source-map": "^0.3.3", - "acorn": "^8.15.0", - "commander": "^2.20.0", - "source-map-support": "~0.5.20" - }, - "bin": { - "terser": "bin/terser" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/terser-webpack-plugin": { - "version": "5.3.16", - "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.16.tgz", - "integrity": "sha512-h9oBFCWrq78NyWWVcSwZarJkZ01c2AyGrzs1crmHZO3QUg9D61Wu4NPjBy69n7JqylFF5y+CsUZYmYEIZ3mR+Q==", - "license": "MIT", - "dependencies": { - "@jridgewell/trace-mapping": "^0.3.25", - "jest-worker": "^27.4.5", - "schema-utils": "^4.3.0", - "serialize-javascript": "^6.0.2", - "terser": "^5.31.1" - }, - "engines": { - "node": ">= 10.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "webpack": "^5.1.0" - }, - "peerDependenciesMeta": { - "@swc/core": { - "optional": true - }, - "esbuild": { - "optional": true - }, - "uglify-js": { - "optional": true - } - } - }, - "node_modules/terser-webpack-plugin/node_modules/jest-worker": { - "version": "27.5.1", - "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-27.5.1.tgz", - "integrity": "sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==", - "license": "MIT", - "dependencies": { - "@types/node": "*", - "merge-stream": "^2.0.0", - "supports-color": "^8.0.0" - }, - "engines": { - "node": ">= 10.13.0" - } - }, - "node_modules/terser-webpack-plugin/node_modules/supports-color": { - "version": "8.1.1", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", - "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", - "license": "MIT", - "dependencies": { - "has-flag": "^4.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/supports-color?sponsor=1" - } - }, - "node_modules/terser/node_modules/commander": { - "version": "2.20.3", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", - "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", - "license": "MIT" - }, - "node_modules/thingies": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/thingies/-/thingies-2.5.0.tgz", - "integrity": "sha512-s+2Bwztg6PhWUD7XMfeYm5qliDdSiZm7M7n8KjTkIsm3l/2lgVRc2/Gx/v+ZX8lT4FMA+i8aQvhcWylldc+ZNw==", - "license": "MIT", - "engines": { - "node": ">=10.18" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/streamich" - }, - "peerDependencies": { - "tslib": "^2" - } - }, - "node_modules/thunky": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/thunky/-/thunky-1.1.0.tgz", - "integrity": "sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==", - "license": "MIT" - }, - "node_modules/tiny-invariant": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.3.3.tgz", - "integrity": "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==", - "license": "MIT" - }, - "node_modules/tiny-warning": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz", - "integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==", - "license": "MIT" - }, - "node_modules/tinyexec": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.2.tgz", - "integrity": "sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg==", - "license": "MIT", - "engines": { - "node": ">=18" - } - }, - "node_modules/tinypool": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.1.1.tgz", - "integrity": "sha512-Zba82s87IFq9A9XmjiX5uZA/ARWDrB03OHlq+Vw1fSdt0I+4/Kutwy8BP4Y/y/aORMo61FQ0vIb5j44vSo5Pkg==", - "license": "MIT", - "engines": { - "node": "^18.0.0 || >=20.0.0" - } - }, - "node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "license": "MIT", - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, - "node_modules/toidentifier": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", - "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", - "license": "MIT", - "engines": { - "node": ">=0.6" - } - }, - "node_modules/totalist": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/totalist/-/totalist-3.0.1.tgz", - "integrity": "sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/tree-dump": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/tree-dump/-/tree-dump-1.1.0.tgz", - "integrity": "sha512-rMuvhU4MCDbcbnleZTFezWsaZXRFemSqAM+7jPnzUl1fo9w3YEKOxAeui0fz3OI4EU4hf23iyA7uQRVko+UaBA==", - "license": "Apache-2.0", - "engines": { - "node": ">=10.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/streamich" - }, - "peerDependencies": { - "tslib": "2" - } - }, - "node_modules/trim-lines": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/trim-lines/-/trim-lines-3.0.1.tgz", - "integrity": "sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/trough": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/trough/-/trough-2.2.0.tgz", - "integrity": "sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/ts-dedent": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/ts-dedent/-/ts-dedent-2.2.0.tgz", - "integrity": "sha512-q5W7tVM71e2xjHZTlgfTDoPF/SmqKG5hddq9SzR49CH2hayqRKJtQ4mtRlSxKaJlR/+9rEM+mnBHf7I2/BQcpQ==", - "license": "MIT", - "engines": { - "node": ">=6.10" - } - }, - "node_modules/tslib": { - "version": "2.8.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", - "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", - "license": "0BSD" - }, - "node_modules/tsyringe": { - "version": "4.10.0", - "resolved": "https://registry.npmjs.org/tsyringe/-/tsyringe-4.10.0.tgz", - "integrity": "sha512-axr3IdNuVIxnaK5XGEUFTu3YmAQ6lllgrvqfEoR16g/HGnYY/6We4oWENtAnzK6/LpJ2ur9PAb80RBt7/U4ugw==", - "license": "MIT", - "dependencies": { - "tslib": "^1.9.3" - }, - "engines": { - "node": ">= 6.0.0" - } - }, - "node_modules/tsyringe/node_modules/tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", - "license": "0BSD" - }, - "node_modules/type-fest": { - "version": "2.19.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.19.0.tgz", - "integrity": "sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==", - "license": "(MIT OR CC0-1.0)", - "engines": { - "node": ">=12.20" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/type-is": { - "version": "1.6.18", - "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", - "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", - "license": "MIT", - "dependencies": { - "media-typer": "0.3.0", - "mime-types": "~2.1.24" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/type-is/node_modules/mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/type-is/node_modules/mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "license": "MIT", - "dependencies": { - "mime-db": "1.52.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/typedarray-to-buffer": { - "version": "3.1.5", - "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz", - "integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==", - "license": "MIT", - "dependencies": { - "is-typedarray": "^1.0.0" - } - }, - "node_modules/typescript": { - "version": "5.6.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.6.3.tgz", - "integrity": "sha512-hjcS1mhfuyi4WW8IWtjP7brDrG2cuDZukyrYrSauoXGNgx0S7zceP07adYkJycEr56BOUTNPzbInooiN3fn1qw==", - "devOptional": true, - "license": "Apache-2.0", - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=14.17" - } - }, - "node_modules/ufo": { - "version": "1.6.3", - "resolved": "https://registry.npmjs.org/ufo/-/ufo-1.6.3.tgz", - "integrity": "sha512-yDJTmhydvl5lJzBmy/hyOAA0d+aqCBuwl818haVdYCRrWV84o7YyeVm4QlVHStqNrrJSTb6jKuFAVqAFsr+K3Q==", - "license": "MIT" - }, - "node_modules/undici-types": { - "version": "7.18.2", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.18.2.tgz", - "integrity": "sha512-AsuCzffGHJybSaRrmr5eHr81mwJU3kjw6M+uprWvCXiNeN9SOGwQ3Jn8jb8m3Z6izVgknn1R0FTCEAP2QrLY/w==", - "license": "MIT" - }, - "node_modules/unicode-canonical-property-names-ecmascript": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.1.tgz", - "integrity": "sha512-dA8WbNeb2a6oQzAQ55YlT5vQAWGV9WXOsi3SskE3bcCdM0P4SDd+24zS/OCacdRq5BkdsRj9q3Pg6YyQoxIGqg==", - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/unicode-emoji-modifier-base": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/unicode-emoji-modifier-base/-/unicode-emoji-modifier-base-1.0.0.tgz", - "integrity": "sha512-yLSH4py7oFH3oG/9K+XWrz1pSi3dfUrWEnInbxMfArOfc1+33BlGPQtLsOYwvdMy11AwUBetYuaRxSPqgkq+8g==", - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/unicode-match-property-ecmascript": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz", - "integrity": "sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==", - "license": "MIT", - "dependencies": { - "unicode-canonical-property-names-ecmascript": "^2.0.0", - "unicode-property-aliases-ecmascript": "^2.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/unicode-match-property-value-ecmascript": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.2.1.tgz", - "integrity": "sha512-JQ84qTuMg4nVkx8ga4A16a1epI9H6uTXAknqxkGF/aFfRLw1xC/Bp24HNLaZhHSkWd3+84t8iXnp1J0kYcZHhg==", - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/unicode-property-aliases-ecmascript": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.2.0.tgz", - "integrity": "sha512-hpbDzxUY9BFwX+UeBnxv3Sh1q7HFxj48DTmXchNgRa46lO8uj3/1iEn3MiNUYTg1g9ctIqXCCERn8gYZhHC5lQ==", - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/unified": { - "version": "11.0.5", - "resolved": "https://registry.npmjs.org/unified/-/unified-11.0.5.tgz", - "integrity": "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==", - "license": "MIT", - "dependencies": { - "@types/unist": "^3.0.0", - "bail": "^2.0.0", - "devlop": "^1.0.0", - "extend": "^3.0.0", - "is-plain-obj": "^4.0.0", - "trough": "^2.0.0", - "vfile": "^6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/unique-string": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/unique-string/-/unique-string-3.0.0.tgz", - "integrity": "sha512-VGXBUVwxKMBUznyffQweQABPRRW1vHZAbadFZud4pLFAqRGvv/96vafgjWFqzourzr8YonlQiPgH0YCJfawoGQ==", - "license": "MIT", - "dependencies": { - "crypto-random-string": "^4.0.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/unist-util-is": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.1.tgz", - "integrity": "sha512-LsiILbtBETkDz8I9p1dQ0uyRUWuaQzd/cuEeS1hoRSyW5E5XGmTzlwY1OrNzzakGowI9Dr/I8HVaw4hTtnxy8g==", - "license": "MIT", - "dependencies": { - "@types/unist": "^3.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/unist-util-position": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-5.0.0.tgz", - "integrity": "sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==", - "license": "MIT", - "dependencies": { - "@types/unist": "^3.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/unist-util-position-from-estree": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/unist-util-position-from-estree/-/unist-util-position-from-estree-2.0.0.tgz", - "integrity": "sha512-KaFVRjoqLyF6YXCbVLNad/eS4+OfPQQn2yOd7zF/h5T/CSL2v8NpN6a5TPvtbXthAGw5nG+PuTtq+DdIZr+cRQ==", - "license": "MIT", - "dependencies": { - "@types/unist": "^3.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/unist-util-stringify-position": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz", - "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==", - "license": "MIT", - "dependencies": { - "@types/unist": "^3.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/unist-util-visit": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.1.0.tgz", - "integrity": "sha512-m+vIdyeCOpdr/QeQCu2EzxX/ohgS8KbnPDgFni4dQsfSCtpz8UqDyY5GjRru8PDKuYn7Fq19j1CQ+nJSsGKOzg==", - "license": "MIT", - "dependencies": { - "@types/unist": "^3.0.0", - "unist-util-is": "^6.0.0", - "unist-util-visit-parents": "^6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/unist-util-visit-parents": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.2.tgz", - "integrity": "sha512-goh1s1TBrqSqukSc8wrjwWhL0hiJxgA8m4kFxGlQ+8FYQ3C/m11FcTs4YYem7V664AhHVvgoQLk890Ssdsr2IQ==", - "license": "MIT", - "dependencies": { - "@types/unist": "^3.0.0", - "unist-util-is": "^6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/universalify": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", - "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", - "license": "MIT", - "engines": { - "node": ">= 10.0.0" - } - }, - "node_modules/unpipe": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", - "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/update-browserslist-db": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.2.3.tgz", - "integrity": "sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==", - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/browserslist" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "escalade": "^3.2.0", - "picocolors": "^1.1.1" - }, - "bin": { - "update-browserslist-db": "cli.js" - }, - "peerDependencies": { - "browserslist": ">= 4.21.0" - } - }, - "node_modules/update-notifier": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/update-notifier/-/update-notifier-6.0.2.tgz", - "integrity": "sha512-EDxhTEVPZZRLWYcJ4ZXjGFN0oP7qYvbXWzEgRm/Yql4dHX5wDbvh89YHP6PK1lzZJYrMtXUuZZz8XGK+U6U1og==", - "license": "BSD-2-Clause", - "dependencies": { - "boxen": "^7.0.0", - "chalk": "^5.0.1", - "configstore": "^6.0.0", - "has-yarn": "^3.0.0", - "import-lazy": "^4.0.0", - "is-ci": "^3.0.1", - "is-installed-globally": "^0.4.0", - "is-npm": "^6.0.0", - "is-yarn-global": "^0.4.0", - "latest-version": "^7.0.0", - "pupa": "^3.1.0", - "semver": "^7.3.7", - "semver-diff": "^4.0.0", - "xdg-basedir": "^5.1.0" - }, - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/yeoman/update-notifier?sponsor=1" - } - }, - "node_modules/update-notifier/node_modules/boxen": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/boxen/-/boxen-7.1.1.tgz", - "integrity": "sha512-2hCgjEmP8YLWQ130n2FerGv7rYpfBmnmp9Uy2Le1vge6X3gZIfSmEzP5QTDElFxcvVcXlEn8Aq6MU/PZygIOog==", - "license": "MIT", - "dependencies": { - "ansi-align": "^3.0.1", - "camelcase": "^7.0.1", - "chalk": "^5.2.0", - "cli-boxes": "^3.0.0", - "string-width": "^5.1.2", - "type-fest": "^2.13.0", - "widest-line": "^4.0.1", - "wrap-ansi": "^8.1.0" - }, - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/update-notifier/node_modules/camelcase": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-7.0.1.tgz", - "integrity": "sha512-xlx1yCK2Oc1APsPXDL2LdlNP6+uu8OCDdhOBSVT279M/S+y75O30C2VuD8T2ogdePBBl7PfPF4504tnLgX3zfw==", - "license": "MIT", - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/update-notifier/node_modules/chalk": { - "version": "5.6.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.6.2.tgz", - "integrity": "sha512-7NzBL0rN6fMUW+f7A6Io4h40qQlG+xGmtMxfbnH/K7TAtt8JQWVQK+6g0UXKMeVJoyV5EkkNsErQ8pVD3bLHbA==", - "license": "MIT", - "engines": { - "node": "^12.17.0 || ^14.13 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/uri-js": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", - "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", - "license": "BSD-2-Clause", - "dependencies": { - "punycode": "^2.1.0" - } - }, - "node_modules/url-loader": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/url-loader/-/url-loader-4.1.1.tgz", - "integrity": "sha512-3BTV812+AVHHOJQO8O5MkWgZ5aosP7GnROJwvzLS9hWDj00lZ6Z0wNak423Lp9PBZN05N+Jk/N5Si8jRAlGyWA==", - "license": "MIT", - "dependencies": { - "loader-utils": "^2.0.0", - "mime-types": "^2.1.27", - "schema-utils": "^3.0.0" - }, - "engines": { - "node": ">= 10.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "file-loader": "*", - "webpack": "^4.0.0 || ^5.0.0" - }, - "peerDependenciesMeta": { - "file-loader": { - "optional": true - } - } - }, - "node_modules/url-loader/node_modules/ajv": { - "version": "6.14.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.14.0.tgz", - "integrity": "sha512-IWrosm/yrn43eiKqkfkHis7QioDleaXQHdDVPKg0FSwwd/DuvyX79TZnFOnYpB7dcsFAMmtFztZuXPDvSePkFw==", - "license": "MIT", - "dependencies": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/url-loader/node_modules/ajv-keywords": { - "version": "3.5.2", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", - "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", - "license": "MIT", - "peerDependencies": { - "ajv": "^6.9.1" - } - }, - "node_modules/url-loader/node_modules/json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "license": "MIT" - }, - "node_modules/url-loader/node_modules/mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/url-loader/node_modules/mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "license": "MIT", - "dependencies": { - "mime-db": "1.52.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/url-loader/node_modules/schema-utils": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz", - "integrity": "sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg==", - "license": "MIT", - "dependencies": { - "@types/json-schema": "^7.0.8", - "ajv": "^6.12.5", - "ajv-keywords": "^3.5.2" - }, - "engines": { - "node": ">= 10.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - } - }, - "node_modules/util-deprecate": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", - "license": "MIT" - }, - "node_modules/utila": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/utila/-/utila-0.4.0.tgz", - "integrity": "sha512-Z0DbgELS9/L/75wZbro8xAnT50pBVFQZ+hUEueGDU5FN51YSCYM+jdxsfCiHjwNP/4LCDD0i/graKpeBnOXKRA==", - "license": "MIT" - }, - "node_modules/utility-types": { - "version": "3.11.0", - "resolved": "https://registry.npmjs.org/utility-types/-/utility-types-3.11.0.tgz", - "integrity": "sha512-6Z7Ma2aVEWisaL6TvBCy7P8rm2LQoPv6dJ7ecIaIixHcwfbJ0x7mWdbcwlIM5IGQxPZSFYeqRCqlOOeKoJYMkw==", - "license": "MIT", - "engines": { - "node": ">= 4" - } - }, - "node_modules/utils-merge": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", - "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==", - "license": "MIT", - "engines": { - "node": ">= 0.4.0" - } - }, - "node_modules/uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", - "license": "MIT", - "bin": { - "uuid": "dist/bin/uuid" - } - }, - "node_modules/value-equal": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/value-equal/-/value-equal-1.0.1.tgz", - "integrity": "sha512-NOJ6JZCAWr0zlxZt+xqCHNTEKOsrks2HQd4MqhP1qy4z1SkbEP467eNx6TgDKXMvUOb+OENfJCZwM+16n7fRfw==", - "license": "MIT" - }, - "node_modules/vary": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", - "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/vfile": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz", - "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==", - "license": "MIT", - "dependencies": { - "@types/unist": "^3.0.0", - "vfile-message": "^4.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/vfile-location": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/vfile-location/-/vfile-location-5.0.3.tgz", - "integrity": "sha512-5yXvWDEgqeiYiBe1lbxYF7UMAIm/IcopxMHrMQDq3nvKcjPKIhZklUKL+AE7J7uApI4kwe2snsK+eI6UTj9EHg==", - "license": "MIT", - "dependencies": { - "@types/unist": "^3.0.0", - "vfile": "^6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/vfile-message": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz", - "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==", - "license": "MIT", - "dependencies": { - "@types/unist": "^3.0.0", - "unist-util-stringify-position": "^4.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/vscode-jsonrpc": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.2.0.tgz", - "integrity": "sha512-C+r0eKJUIfiDIfwJhria30+TYWPtuHJXHtI7J0YlOmKAo7ogxP20T0zxB7HZQIFhIyvoBPwWskjxrvAtfjyZfA==", - "license": "MIT", - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/vscode-languageserver": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/vscode-languageserver/-/vscode-languageserver-9.0.1.tgz", - "integrity": "sha512-woByF3PDpkHFUreUa7Hos7+pUWdeWMXRd26+ZX2A8cFx6v/JPTtd4/uN0/jB6XQHYaOlHbio03NTHCqrgG5n7g==", - "license": "MIT", - "dependencies": { - "vscode-languageserver-protocol": "3.17.5" - }, - "bin": { - "installServerIntoExtension": "bin/installServerIntoExtension" - } - }, - "node_modules/vscode-languageserver-protocol": { - "version": "3.17.5", - "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.5.tgz", - "integrity": "sha512-mb1bvRJN8SVznADSGWM9u/b07H7Ecg0I3OgXDuLdn307rl/J3A9YD6/eYOssqhecL27hK1IPZAsaqh00i/Jljg==", - "license": "MIT", - "dependencies": { - "vscode-jsonrpc": "8.2.0", - "vscode-languageserver-types": "3.17.5" - } - }, - "node_modules/vscode-languageserver-textdocument": { - "version": "1.0.12", - "resolved": "https://registry.npmjs.org/vscode-languageserver-textdocument/-/vscode-languageserver-textdocument-1.0.12.tgz", - "integrity": "sha512-cxWNPesCnQCcMPeenjKKsOCKQZ/L6Tv19DTRIGuLWe32lyzWhihGVJ/rcckZXJxfdKCFvRLS3fpBIsV/ZGX4zA==", - "license": "MIT" - }, - "node_modules/vscode-languageserver-types": { - "version": "3.17.5", - "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.5.tgz", - "integrity": "sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg==", - "license": "MIT" - }, - "node_modules/vscode-uri": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/vscode-uri/-/vscode-uri-3.1.0.tgz", - "integrity": "sha512-/BpdSx+yCQGnCvecbyXdxHDkuk55/G3xwnC0GqY4gmQ3j+A+g8kzzgB4Nk/SINjqn6+waqw3EgbVF2QKExkRxQ==", - "license": "MIT" - }, - "node_modules/watchpack": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.5.1.tgz", - "integrity": "sha512-Zn5uXdcFNIA1+1Ei5McRd+iRzfhENPCe7LeABkJtNulSxjma+l7ltNx55BWZkRlwRnpOgHqxnjyaDgJnNXnqzg==", - "license": "MIT", - "dependencies": { - "glob-to-regexp": "^0.4.1", - "graceful-fs": "^4.1.2" - }, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/wbuf": { - "version": "1.7.3", - "resolved": "https://registry.npmjs.org/wbuf/-/wbuf-1.7.3.tgz", - "integrity": "sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA==", - "license": "MIT", - "dependencies": { - "minimalistic-assert": "^1.0.0" - } - }, - "node_modules/web-namespaces": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/web-namespaces/-/web-namespaces-2.0.1.tgz", - "integrity": "sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/webpack": { - "version": "5.105.3", - "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.105.3.tgz", - "integrity": "sha512-LLBBA4oLmT7sZdHiYE/PeVuifOxYyE2uL/V+9VQP7YSYdJU7bSf7H8bZRRxW8kEPMkmVjnrXmoR3oejIdX0xbg==", - "license": "MIT", - "dependencies": { - "@types/eslint-scope": "^3.7.7", - "@types/estree": "^1.0.8", - "@types/json-schema": "^7.0.15", - "@webassemblyjs/ast": "^1.14.1", - "@webassemblyjs/wasm-edit": "^1.14.1", - "@webassemblyjs/wasm-parser": "^1.14.1", - "acorn": "^8.16.0", - "acorn-import-phases": "^1.0.3", - "browserslist": "^4.28.1", - "chrome-trace-event": "^1.0.2", - "enhanced-resolve": "^5.19.0", - "es-module-lexer": "^2.0.0", - "eslint-scope": "5.1.1", - "events": "^3.2.0", - "glob-to-regexp": "^0.4.1", - "graceful-fs": "^4.2.11", - "json-parse-even-better-errors": "^2.3.1", - "loader-runner": "^4.3.1", - "mime-types": "^2.1.27", - "neo-async": "^2.6.2", - "schema-utils": "^4.3.3", - "tapable": "^2.3.0", - "terser-webpack-plugin": "^5.3.16", - "watchpack": "^2.5.1", - "webpack-sources": "^3.3.4" - }, - "bin": { - "webpack": "bin/webpack.js" - }, - "engines": { - "node": ">=10.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependenciesMeta": { - "webpack-cli": { - "optional": true - } - } - }, - "node_modules/webpack-bundle-analyzer": { - "version": "4.10.2", - "resolved": "https://registry.npmjs.org/webpack-bundle-analyzer/-/webpack-bundle-analyzer-4.10.2.tgz", - "integrity": "sha512-vJptkMm9pk5si4Bv922ZbKLV8UTT4zib4FPgXMhgzUny0bfDDkLXAVQs3ly3fS4/TN9ROFtb0NFrm04UXFE/Vw==", - "license": "MIT", - "dependencies": { - "@discoveryjs/json-ext": "0.5.7", - "acorn": "^8.0.4", - "acorn-walk": "^8.0.0", - "commander": "^7.2.0", - "debounce": "^1.2.1", - "escape-string-regexp": "^4.0.0", - "gzip-size": "^6.0.0", - "html-escaper": "^2.0.2", - "opener": "^1.5.2", - "picocolors": "^1.0.0", - "sirv": "^2.0.3", - "ws": "^7.3.1" - }, - "bin": { - "webpack-bundle-analyzer": "lib/bin/analyzer.js" - }, - "engines": { - "node": ">= 10.13.0" - } - }, - "node_modules/webpack-bundle-analyzer/node_modules/commander": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", - "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", - "license": "MIT", - "engines": { - "node": ">= 10" - } - }, - "node_modules/webpack-dev-middleware": { - "version": "7.4.5", - "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-7.4.5.tgz", - "integrity": "sha512-uxQ6YqGdE4hgDKNf7hUiPXOdtkXvBJXrfEGYSx7P7LC8hnUYGK70X6xQXUvXeNyBDDcsiQXpG2m3G9vxowaEuA==", - "license": "MIT", - "dependencies": { - "colorette": "^2.0.10", - "memfs": "^4.43.1", - "mime-types": "^3.0.1", - "on-finished": "^2.4.1", - "range-parser": "^1.2.1", - "schema-utils": "^4.0.0" - }, - "engines": { - "node": ">= 18.12.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "webpack": "^5.0.0" - }, - "peerDependenciesMeta": { - "webpack": { - "optional": true - } - } - }, - "node_modules/webpack-dev-middleware/node_modules/mime-db": { - "version": "1.54.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", - "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/webpack-dev-middleware/node_modules/mime-types": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.2.tgz", - "integrity": "sha512-Lbgzdk0h4juoQ9fCKXW4by0UJqj+nOOrI9MJ1sSj4nI8aI2eo1qmvQEie4VD1glsS250n15LsWsYtCugiStS5A==", - "license": "MIT", - "dependencies": { - "mime-db": "^1.54.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/express" - } - }, - "node_modules/webpack-dev-middleware/node_modules/range-parser": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", - "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/webpack-dev-server": { - "version": "5.2.3", - "resolved": "https://registry.npmjs.org/webpack-dev-server/-/webpack-dev-server-5.2.3.tgz", - "integrity": "sha512-9Gyu2F7+bg4Vv+pjbovuYDhHX+mqdqITykfzdM9UyKqKHlsE5aAjRhR+oOEfXW5vBeu8tarzlJFIZva4ZjAdrQ==", - "license": "MIT", - "dependencies": { - "@types/bonjour": "^3.5.13", - "@types/connect-history-api-fallback": "^1.5.4", - "@types/express": "^4.17.25", - "@types/express-serve-static-core": "^4.17.21", - "@types/serve-index": "^1.9.4", - "@types/serve-static": "^1.15.5", - "@types/sockjs": "^0.3.36", - "@types/ws": "^8.5.10", - "ansi-html-community": "^0.0.8", - "bonjour-service": "^1.2.1", - "chokidar": "^3.6.0", - "colorette": "^2.0.10", - "compression": "^1.8.1", - "connect-history-api-fallback": "^2.0.0", - "express": "^4.22.1", - "graceful-fs": "^4.2.6", - "http-proxy-middleware": "^2.0.9", - "ipaddr.js": "^2.1.0", - "launch-editor": "^2.6.1", - "open": "^10.0.3", - "p-retry": "^6.2.0", - "schema-utils": "^4.2.0", - "selfsigned": "^5.5.0", - "serve-index": "^1.9.1", - "sockjs": "^0.3.24", - "spdy": "^4.0.2", - "webpack-dev-middleware": "^7.4.2", - "ws": "^8.18.0" - }, - "bin": { - "webpack-dev-server": "bin/webpack-dev-server.js" - }, - "engines": { - "node": ">= 18.12.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "webpack": "^5.0.0" - }, - "peerDependenciesMeta": { - "webpack": { - "optional": true - }, - "webpack-cli": { - "optional": true - } - } - }, - "node_modules/webpack-dev-server/node_modules/define-lazy-prop": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-3.0.0.tgz", - "integrity": "sha512-N+MeXYoqr3pOgn8xfyRPREN7gHakLYjhsHhWGT3fWAiL4IkAt0iDw14QiiEm2bE30c5XX5q0FtAA3CK5f9/BUg==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/webpack-dev-server/node_modules/open": { - "version": "10.2.0", - "resolved": "https://registry.npmjs.org/open/-/open-10.2.0.tgz", - "integrity": "sha512-YgBpdJHPyQ2UE5x+hlSXcnejzAvD0b22U2OuAP+8OnlJT+PjWPxtgmGqKKc+RgTM63U9gN0YzrYc71R2WT/hTA==", - "license": "MIT", - "dependencies": { - "default-browser": "^5.2.1", - "define-lazy-prop": "^3.0.0", - "is-inside-container": "^1.0.0", - "wsl-utils": "^0.1.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/webpack-dev-server/node_modules/ws": { - "version": "8.19.0", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.19.0.tgz", - "integrity": "sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg==", - "license": "MIT", - "engines": { - "node": ">=10.0.0" - }, - "peerDependencies": { - "bufferutil": "^4.0.1", - "utf-8-validate": ">=5.0.2" - }, - "peerDependenciesMeta": { - "bufferutil": { - "optional": true - }, - "utf-8-validate": { - "optional": true - } - } - }, - "node_modules/webpack-merge": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/webpack-merge/-/webpack-merge-6.0.1.tgz", - "integrity": "sha512-hXXvrjtx2PLYx4qruKl+kyRSLc52V+cCvMxRjmKwoA+CBbbF5GfIBtR6kCvl0fYGqTUPKB+1ktVmTHqMOzgCBg==", - "license": "MIT", - "dependencies": { - "clone-deep": "^4.0.1", - "flat": "^5.0.2", - "wildcard": "^2.0.1" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/webpack-sources": { - "version": "3.3.4", - "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.3.4.tgz", - "integrity": "sha512-7tP1PdV4vF+lYPnkMR0jMY5/la2ub5Fc/8VQrrU+lXkiM6C4TjVfGw7iKfyhnTQOsD+6Q/iKw0eFciziRgD58Q==", - "license": "MIT", - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/webpack/node_modules/mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/webpack/node_modules/mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "license": "MIT", - "dependencies": { - "mime-db": "1.52.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/webpackbar": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/webpackbar/-/webpackbar-6.0.1.tgz", - "integrity": "sha512-TnErZpmuKdwWBdMoexjio3KKX6ZtoKHRVvLIU0A47R0VVBDtx3ZyOJDktgYixhoJokZTYTt1Z37OkO9pnGJa9Q==", - "license": "MIT", - "dependencies": { - "ansi-escapes": "^4.3.2", - "chalk": "^4.1.2", - "consola": "^3.2.3", - "figures": "^3.2.0", - "markdown-table": "^2.0.0", - "pretty-time": "^1.1.0", - "std-env": "^3.7.0", - "wrap-ansi": "^7.0.0" - }, - "engines": { - "node": ">=14.21.3" - }, - "peerDependencies": { - "webpack": "3 || 4 || 5" - } - }, - "node_modules/webpackbar/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "license": "MIT" - }, - "node_modules/webpackbar/node_modules/markdown-table": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-2.0.0.tgz", - "integrity": "sha512-Ezda85ToJUBhM6WGaG6veasyym+Tbs3cMAw/ZhOPqXiYsr0jgocBV3j3nx+4lk47plLlIqjwuTm/ywVI+zjJ/A==", - "license": "MIT", - "dependencies": { - "repeat-string": "^1.0.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/webpackbar/node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/webpackbar/node_modules/wrap-ansi": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/websocket-driver": { - "version": "0.7.4", - "resolved": "https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.7.4.tgz", - "integrity": "sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg==", - "license": "Apache-2.0", - "dependencies": { - "http-parser-js": ">=0.5.1", - "safe-buffer": ">=5.1.0", - "websocket-extensions": ">=0.1.1" - }, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/websocket-extensions": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/websocket-extensions/-/websocket-extensions-0.1.4.tgz", - "integrity": "sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg==", - "license": "Apache-2.0", - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "license": "ISC", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/widest-line": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/widest-line/-/widest-line-4.0.1.tgz", - "integrity": "sha512-o0cyEG0e8GPzT4iGHphIOh0cJOV8fivsXxddQasHPHfoZf1ZexrfeA21w2NaEN1RHE+fXlfISmOE8R9N3u3Qig==", - "license": "MIT", - "dependencies": { - "string-width": "^5.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/wildcard": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/wildcard/-/wildcard-2.0.1.tgz", - "integrity": "sha512-CC1bOL87PIWSBhDcTrdeLo6eGT7mCFtrg0uIJtqJUFyK+eJnzl8A1niH56uu7KMa5XFrtiV+AQuHO3n7DsHnLQ==", - "license": "MIT" - }, - "node_modules/wrap-ansi": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", - "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", - "license": "MIT", - "dependencies": { - "ansi-styles": "^6.1.0", - "string-width": "^5.0.1", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/wrap-ansi/node_modules/ansi-regex": { - "version": "6.2.2", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz", - "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" - } - }, - "node_modules/wrap-ansi/node_modules/ansi-styles": { - "version": "6.2.3", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz", - "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/wrap-ansi/node_modules/strip-ansi": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.2.0.tgz", - "integrity": "sha512-yDPMNjp4WyfYBkHnjIRLfca1i6KMyGCtsVgoKe/z1+6vukgaENdgGBZt+ZmKPc4gavvEZ5OgHfHdrazhgNyG7w==", - "license": "MIT", - "dependencies": { - "ansi-regex": "^6.2.2" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" - } - }, - "node_modules/write-file-atomic": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz", - "integrity": "sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==", - "license": "ISC", - "dependencies": { - "imurmurhash": "^0.1.4", - "is-typedarray": "^1.0.0", - "signal-exit": "^3.0.2", - "typedarray-to-buffer": "^3.1.5" - } - }, - "node_modules/ws": { - "version": "7.5.10", - "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.10.tgz", - "integrity": "sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ==", - "license": "MIT", - "engines": { - "node": ">=8.3.0" - }, - "peerDependencies": { - "bufferutil": "^4.0.1", - "utf-8-validate": "^5.0.2" - }, - "peerDependenciesMeta": { - "bufferutil": { - "optional": true - }, - "utf-8-validate": { - "optional": true - } - } - }, - "node_modules/wsl-utils": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/wsl-utils/-/wsl-utils-0.1.0.tgz", - "integrity": "sha512-h3Fbisa2nKGPxCpm89Hk33lBLsnaGBvctQopaBSOW/uIs6FTe1ATyAnKFJrzVs9vpGdsTe73WF3V4lIsk4Gacw==", - "license": "MIT", - "dependencies": { - "is-wsl": "^3.1.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/wsl-utils/node_modules/is-wsl": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-3.1.1.tgz", - "integrity": "sha512-e6rvdUCiQCAuumZslxRJWR/Doq4VpPR82kqclvcS0efgt430SlGIk05vdCN58+VrzgtIcfNODjozVielycD4Sw==", - "license": "MIT", - "dependencies": { - "is-inside-container": "^1.0.0" - }, - "engines": { - "node": ">=16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/xdg-basedir": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-5.1.0.tgz", - "integrity": "sha512-GCPAHLvrIH13+c0SuacwvRYj2SxJXQ4kaVTT5xgL3kPrz56XxkF21IGhjSE1+W0aw7gpBWRGXLCPnPby6lSpmQ==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/xml-js": { - "version": "1.6.11", - "resolved": "https://registry.npmjs.org/xml-js/-/xml-js-1.6.11.tgz", - "integrity": "sha512-7rVi2KMfwfWFl+GpPg6m80IVMWXLRjO+PxTq7V2CDhoGak0wzYzFgUY2m4XJ47OGdXd8eLE8EmwfAmdjw7lC1g==", - "license": "MIT", - "dependencies": { - "sax": "^1.2.4" - }, - "bin": { - "xml-js": "bin/cli.js" - } - }, - "node_modules/yallist": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", - "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", - "license": "ISC" - }, - "node_modules/yocto-queue": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.2.2.tgz", - "integrity": "sha512-4LCcse/U2MHZ63HAJVE+v71o7yOdIe4cZ70Wpf8D/IyjDKYQLV5GD46B+hSTjJsvV5PztjvHoU580EftxjDZFQ==", - "license": "MIT", - "engines": { - "node": ">=12.20" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/zwitch": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz", - "integrity": "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - } - } -} diff --git a/docs/package.json b/docs/package.json index 3d74850..2ab6dd3 100644 --- a/docs/package.json +++ b/docs/package.json @@ -3,48 +3,38 @@ "version": "0.0.0", "private": true, "scripts": { - "docusaurus": "docusaurus", - "start": "docusaurus start", - "build": "docusaurus build", - "swizzle": "docusaurus swizzle", - "deploy": "docusaurus deploy", - "clear": "docusaurus clear", - "serve": "docusaurus serve", - "write-translations": "docusaurus write-translations", - "write-heading-ids": "docusaurus write-heading-ids", - "typecheck": "tsc", - "check": "biome ci src/ docusaurus.config.ts sidebars.ts" + "build": "next build", + "dev": "next dev", + "start": "serve out", + "types:check": "fumadocs-mdx && next typegen && tsc --noEmit", + "postinstall": "fumadocs-mdx", + "lint": "biome check", + "format": "biome format --write" }, "dependencies": { - "@docusaurus/core": "^3.9.2", - "@docusaurus/preset-classic": "^3.9.2", - "@docusaurus/theme-mermaid": "^3.9.2", - "@mdx-js/react": "^3.0.0", - "clsx": "^2.0.0", - "prism-react-renderer": "^2.3.0", - "react": "^19.0.0", - "react-dom": "^19.0.0" + "@orama/orama": "^3.1.18", + "clsx": "^2.1.1", + "fumadocs-core": "16.8.5", + "fumadocs-mdx": "14.3.2", + "fumadocs-ui": "16.8.5", + "lucide-react": "^1.14.0", + "mermaid": "^11.14.0", + "next": "16.2.4", + "next-themes": "^0.4.6", + "react": "^19.2.5", + "react-dom": "^19.2.5", + "tailwind-merge": "^3.5.0" }, "devDependencies": { - "@biomejs/biome": "2.4.10", - "@docusaurus/module-type-aliases": "^3.9.2", - "@docusaurus/tsconfig": "^3.9.2", - "@docusaurus/types": "^3.9.2", - "typescript": "~5.6.2" - }, - "browserslist": { - "production": [ - ">0.5%", - "not dead", - "not op_mini all" - ], - "development": [ - "last 3 chrome version", - "last 3 firefox version", - "last 5 safari version" - ] - }, - "engines": { - "node": ">=18.0" + "@biomejs/biome": "^2.4.14", + "@tailwindcss/postcss": "^4.2.4", + "@types/mdx": "^2.0.13", + "@types/node": "^25.6.0", + "@types/react": "^19.2.14", + "@types/react-dom": "^19.2.3", + "postcss": "^8.5.13", + "serve": "^14.2.6", + "tailwindcss": "^4.2.4", + "typescript": "^6.0.3" } } diff --git a/docs/pages/api/analysis/analysis.mdx b/docs/pages/api/analysis/analysis.mdx deleted file mode 100644 index 441651e..0000000 --- a/docs/pages/api/analysis/analysis.mdx +++ /dev/null @@ -1,623 +0,0 @@ ---- -sidebar_position: 16 -slug: /api/analysis/analysis -title: Analysis -description: API reference for dagron's graph analysis toolkit -- explain, what-if, lineage tracking, linting, schema validation, and query DSL. ---- - -import ApiSignature from '@site/src/components/ApiSignature'; -import ParamTable from '@site/src/components/ParamTable'; - -# Analysis - -The analysis module provides tools for understanding, validating, and querying -DAG structure. It includes node diagnostics (`explain`), hypothetical mutation -analysis (`what_if`), data lineage tracking, structural linting, schema -validation, and a mini query DSL. - -For a guided introduction, see [Inspecting Graphs](/guide/core-concepts/inspecting-graphs). - -```python -from dagron.analysis import ( - explain, what_if, track_lineage, lint, query, - NodeExplanation, WhatIfResult, LineageReport, - LintReport, DAGSchema, -) -``` - ---- - -## explain - - NodeExplanation`} /> - -Generate a structured diagnostic for a single node in the DAG. The -explanation includes depth, critical path membership, a bottleneck score, -dominator analysis, and dependency information. - - - -**Returns:** `NodeExplanation` -- Frozen dataclass with all diagnostic information. - -```python -from dagron.analysis import explain - -info = explain(dag, "transform") -print(info.summary()) -# Node: transform -# Depth from root: 1 -# On critical path: True -# Bottleneck score: 0.50 -# In-degree: 1, Out-degree: 1 -# Ancestors: 1, Descendants: 1 -# Root: False, Leaf: False -# Blocked by: extract -# Blocks: load -``` - ---- - -## NodeExplanation - - - -Structured diagnostic for a single node in the DAG. - - - -#### NodeExplanation.summary - - str`} /> - -**Returns:** `str` -- Human-readable multi-line summary of the node diagnostic. - ---- - -## what_if - - WhatIfResult`} /> - -Analyze the effect of hypothetical mutations without modifying the original -DAG. The function creates an internal snapshot, applies the proposed changes, -and reports the structural impact. - - - -**Returns:** `WhatIfResult` -- Frozen dataclass describing the impact of the proposed changes. - -```python -from dagron.analysis import what_if - -# What happens if we remove the "validate" node? -result = what_if(dag, remove_nodes=["validate"]) -print(result.summary()) - -# Would this edge create a cycle? -result = what_if(dag, add_edges=[("load", "extract")]) -if result.would_create_cycle: - print(f"Cycle detected: {' -> '.join(result.cycle_path)}") -``` - ---- - -## WhatIfResult - - - -Result of a hypothetical graph mutation analysis. - - - -#### WhatIfResult.summary - - str`} /> - -**Returns:** `str` -- Human-readable summary. Reports the cycle path if one would be created, otherwise shows structural statistics. - ---- - -## track_lineage - - LineageReport`} /> - -Convenience function to create a `LineageReport` for post-execution data -lineage analysis. Equivalent to `LineageReport(dag, execution_result)`. - - - -**Returns:** `LineageReport` -- A lineage analysis report. - -```python -from dagron.analysis import track_lineage - -report = track_lineage(dag, result) -print(report.summary()) -``` - ---- - -## LineageReport - - None: ...`} /> - -Post-execution lineage analysis over a DAG and its execution result. Tracks -which upstream nodes contributed to each output, detects broken lineage -(upstream failures that did not prevent downstream execution), and finds -data flow paths between any two nodes. - - - -### Methods - ---- - -#### LineageReport.lineage - - LineageRecord`} /> - -Compute lineage for a single node, filtered to actually-completed upstream nodes. - - - -**Returns:** `LineageRecord` -- The node's upstream provenance. - -**Raises:** `KeyError` -- If the node is not in the DAG. - -```python -record = report.lineage("load") -print(f"Direct inputs: {record.direct_inputs}") -print(f"Full upstream chain: {record.upstream_chain}") -print(f"Depth: {record.depth}") -``` - ---- - -#### LineageReport.impact - - ImpactRecord`} /> - -Compute downstream impact of a single node, restricted to nodes that -actually completed. - - - -**Returns:** `ImpactRecord` -- Downstream impact analysis. - -**Raises:** `KeyError` -- If the node is not in the DAG. - -```python -impact = report.impact("extract") -print(f"Directly affects: {impact.directly_affects}") -print(f"Transitively affects: {impact.transitively_affects}") -print(f"Affected leaves: {impact.affected_leaves}") -``` - ---- - -#### LineageReport.data_flow_path - - list[str] | None`} /> - -Find the shortest path from `source` to `target` where all intermediate -nodes completed successfully. - - - -**Returns:** `list[str] | None` -- Ordered list of node names forming the path, or `None` if no completed path exists. - ---- - -#### LineageReport.broken_lineage - - list[tuple[str, str]]`} /> - -Find edges where the upstream node failed but the downstream node still -completed. This can happen when `fail_fast=False`. - -**Returns:** `list[tuple[str, str]]` -- List of `(upstream, downstream)` tuples. - ---- - -#### LineageReport.full_lineage - - dict[str, LineageRecord]`} /> - -Compute lineage for all completed nodes. - -**Returns:** `dict[str, LineageRecord]` -- Mapping of node name to `LineageRecord` for every successfully completed node. - ---- - -#### LineageReport.summary - - str`} /> - -**Returns:** `str` -- Human-readable summary including total nodes, completed count, source/leaf nodes, and broken lineage edges. - ---- - -## LineageRecord - - - - - ---- - -## ImpactRecord - - - - - ---- - -## lint - - LintReport`} /> - -Analyze a DAG for structural anti-patterns. Checks for high fan-in/fan-out -nodes, disconnected components, redundant transitive edges, excessive depth, -and isolated nodes. - - - -**Returns:** `LintReport` -- Report containing all detected warnings. - -```python -from dagron.analysis import lint - -report = lint(dag, max_fan_in=5, max_fan_out=5) -if not report.ok: - print(report.summary()) -else: - print("DAG passes all lint checks") -``` - -### Lint codes - -| Code | Severity | Description | -|------|----------|-------------| -| `EMPTY_GRAPH` | INFO | DAG has no nodes. | -| `HIGH_FAN_IN` | WARNING | Nodes with in-degree exceeding the threshold. | -| `HIGH_FAN_OUT` | WARNING | Nodes with out-degree exceeding the threshold. | -| `DISCONNECTED` | WARNING | DAG has multiple disconnected components. | -| `EXCESSIVE_DEPTH` | WARNING | DAG depth exceeds the threshold. | -| `REDUNDANT_EDGES` | INFO | Transitive edges that could be removed without changing reachability. | -| `ISOLATED_NODES` | WARNING | Nodes with no edges in a graph that has edges. | - ---- - -## LintReport - - - - - -### Properties - -| Property | Type | Description | -|----------|------|-------------| -| `error_count` | `int` | Number of ERROR-severity warnings. | -| `warning_count` | `int` | Number of WARNING-severity warnings. | -| `info_count` | `int` | Number of INFO-severity warnings. | -| `ok` | `bool` | `True` if there are zero errors (warnings and info are allowed). | - -#### LintReport.summary - - str`} /> - -**Returns:** `str` -- Formatted report with counts and individual warning details. - ---- - -## LintWarning - - - - - ---- - -## LintSeverity - - - -Severity levels for lint warnings. - ---- - -## DAGSchema - - None: ...`} /> - -Declarative structural constraints for DAG validation. Define expected -structural properties and validate any DAG against them. - - - -#### DAGSchema.validate - - list[str]`} /> - -Validate a DAG against the schema constraints. - - - -**Returns:** `list[str]` -- List of error messages. An empty list means validation passed. - -```python -from dagron.analysis import DAGSchema - -schema = DAGSchema( - single_root=True, - single_leaf=True, - max_depth=10, - connected=True, - required_nodes=["extract", "load"], - leaf_pattern="output_*", -) - -errors = schema.validate(dag) -if errors: - for err in errors: - print(f"Schema violation: {err}") -else: - print("DAG passes schema validation") -``` - ---- - -## query - - list[str]`} /> - -Select nodes using a concise query expression. The query DSL supports -set functions, filters, and set operations. Results are returned in -topological order. - - - -**Returns:** `list[str]` -- Matching node names in topological order. - -**Raises:** `ValueError` -- If the expression is not a valid query. - -### Query syntax - -**Set functions:** - -| Function | Description | -|----------|-------------| -| `roots` | Root nodes (in-degree 0). | -| `leaves` | Leaf nodes (out-degree 0). | -| `critical_path` | Nodes on the critical path. | -| `ancestors(node)` | All transitive ancestors of `node`. | -| `descendants(node)` | All transitive descendants of `node`. | -| `predecessors(node)` | Direct predecessors of `node`. | -| `successors(node)` | Direct successors of `node`. | - -**Filters:** - -| Filter | Description | -|--------|-------------| -| `depth <= N`, `depth >= N`, `depth == N` | Filter by topological depth. Also supports `<` and `>`. | -| `in_degree <= N`, `in_degree >= N` | Filter by in-degree. | -| `out_degree <= N`, `out_degree >= N` | Filter by out-degree. | -| `name:pattern` | Glob pattern matching on node names (`*` and `?` wildcards). | - -**Set operations:** - -| Operator | Description | -|----------|-------------| -| `A \| B` | Union of sets A and B. | -| `A & B` | Intersection of sets A and B. | -| `A - B` | Difference (A minus B). | - -```python -from dagron.analysis import query - -# Find all root nodes -roots = query(dag, "roots") - -# Find ancestors of "deploy" within depth 3 -shallow = query(dag, "ancestors(deploy) & depth <= 3") - -# Find all test nodes on the critical path -critical_tests = query(dag, "critical_path & name:test_*") - -# Find descendants of extract that are not leaves -internal = query(dag, "descendants(extract) - leaves") - -# Combine with union -targets = query(dag, "roots | leaves") -``` - ---- - -## See also - -- [Profiling](/api/observability/profiling) -- post-execution performance analysis. -- [Contracts](/api/analysis/contracts) -- type-level validation for node inputs and outputs. -- [Inspecting Graphs guide](/guide/core-concepts/inspecting-graphs) -- walkthrough of analysis workflows. diff --git a/docs/pages/api/analysis/contracts.mdx b/docs/pages/api/analysis/contracts.mdx deleted file mode 100644 index ca52d33..0000000 --- a/docs/pages/api/analysis/contracts.mdx +++ /dev/null @@ -1,319 +0,0 @@ ---- -sidebar_position: 17 -title: Contracts -description: API reference for dagron's type contract system -- declare, extract, and validate typed data contracts across DAG edges. ---- - -import ApiSignature from '@site/src/components/ApiSignature'; -import ParamTable from '@site/src/components/ParamTable'; - -# Contracts - -The contracts module provides build-time type checking for DAG edges. You -can declare the expected input and output types for each node, and the -validator checks that producer output types are compatible with consumer -input types across every edge. This catches type mismatches before execution. - -For pipelines built with the `@task` decorator, contracts can be -automatically extracted from type annotations. - -```python -from dagron.contracts import ( - NodeContract, - ContractValidator, - ContractViolation, - extract_contracts, - validate_contracts, -) -``` - ---- - -## NodeContract - - - -Type contract for a single node's inputs and outputs. This is a frozen -dataclass, so instances are hashable and immutable after creation. - - - -```python -from dagron.contracts import NodeContract - -# A node that takes a list from 'extract' and produces a dict -transform_contract = NodeContract( - inputs={"extract": list}, - output=dict, -) - -# A node with no type constraints (wildcard) -passthrough = NodeContract() -``` - ---- - -## ContractViolation - - - -A single type-contract violation detected during validation. Frozen -dataclass, so instances are immutable and hashable. - - - -```python -for violation in violations: - print(f"Edge {violation.from_node} -> {violation.to_node}: {violation.message}") -``` - ---- - -## ContractValidator - - None: ...`} /> - -Validates type contracts across DAG edges. For every edge `(u, v)` in the -DAG, the validator checks that the output type of `u` is compatible with -the expected input type declared by `v` for dependency `u`. Compatibility -is determined via `issubclass`. The `object` type acts as a wildcard. - - - -### Methods - ---- - -#### ContractValidator.validate - - list[ContractViolation]`} /> - -Run validation and return all detected violations. An empty list means -all contracts are satisfied. - -**Returns:** `list[ContractViolation]` -- List of type mismatches found across DAG edges. - -```python -import dagron -from dagron.contracts import NodeContract, ContractValidator - -dag = ( - dagron.DAG.builder() - .add_edge("extract", "transform") - .add_edge("transform", "load") - .build() -) - -contracts = { - "extract": NodeContract(output=list), - "transform": NodeContract(inputs={"extract": dict}, output=str), - "load": NodeContract(inputs={"transform": str}), -} - -validator = ContractValidator(dag, contracts) -violations = validator.validate() - -for v in violations: - print(v.message) -# Type mismatch on edge extract -> transform: producer outputs list, -# but consumer expects dict -``` - -### Compatibility rules - -The validator uses `issubclass` to check compatibility: - -- `list` is compatible with `list` (exact match). -- `bool` is compatible with `int` (subclass relationship). -- `object` is always compatible (wildcard / Any equivalent). -- Generic type aliases (e.g., `list[int]`) are treated as compatible if `issubclass` raises `TypeError`. - ---- - -## extract_contracts - - dict[str, NodeContract]`} /> - -Auto-extract `NodeContract` instances from a Pipeline's `@task` functions. -Uses `typing.get_type_hints()` to read input parameter types and return -annotations from each decorated function. - - - -**Returns:** `dict[str, NodeContract]` -- Mapping of task names to their extracted contracts. - -```python -from dagron import Pipeline, task -from dagron.contracts import extract_contracts - -@task -def extract() -> list: - return [1, 2, 3] - -@task -def transform(extract: list) -> dict: - return {"data": extract} - -@task -def load(transform: dict) -> str: - return "done" - -pipeline = Pipeline(tasks=[extract, transform, load]) -contracts = extract_contracts(pipeline) - -print(contracts["extract"].output) # -print(contracts["transform"].inputs) # {'extract': } -print(contracts["transform"].output) # -``` - ---- - -## validate_contracts - - list[ContractViolation]`} /> - -Convenience function that extracts contracts from a pipeline and validates -them in a single call. Optionally merges manually specified contracts that -override the auto-extracted ones. - - - -**Returns:** `list[ContractViolation]` -- List of violations. Empty means all contracts are satisfied. - -```python -from dagron import Pipeline, task -from dagron.contracts import validate_contracts, NodeContract - -@task -def extract() -> list: - return [1, 2, 3] - -@task -def transform(extract: dict) -> str: # Bug: expects dict, but extract returns list - return str(extract) - -pipeline = Pipeline(tasks=[extract, transform]) -violations = validate_contracts(pipeline) - -if violations: - for v in violations: - print(f"Contract violation: {v.message}") - # Contract violation: Type mismatch on edge extract -> transform: - # producer outputs list, but consumer expects dict -``` - -### Overriding extracted contracts - -Sometimes auto-extraction is not enough -- for example, when functions lack -type annotations or when you want stricter constraints: - -```python -from dagron.contracts import validate_contracts, NodeContract - -overrides = { - "transform": NodeContract( - inputs={"extract": list}, - output=dict, - ), -} - -violations = validate_contracts(pipeline, extra_contracts=overrides) -``` - ---- - -## Complete example - -```python -import dagron -from dagron import Pipeline, task -from dagron.contracts import ( - NodeContract, - ContractValidator, - extract_contracts, - validate_contracts, -) - -# Define a typed pipeline -@task -def fetch_users() -> list: - return [{"id": 1, "name": "Alice"}, {"id": 2, "name": "Bob"}] - -@task -def normalize(fetch_users: list) -> list: - return [{"id": u["id"], "name": u["name"].upper()} for u in fetch_users] - -@task -def store(normalize: list) -> int: - return len(normalize) - -pipeline = Pipeline(tasks=[fetch_users, normalize, store], name="users") - -# Validate contracts automatically -violations = validate_contracts(pipeline) -assert not violations, f"Contract violations: {violations}" - -# Or extract and inspect contracts manually -contracts = extract_contracts(pipeline) -for name, contract in contracts.items(): - print(f"{name}: inputs={contract.inputs}, output={contract.output}") - -# Manual validation against an arbitrary DAG -dag = ( - dagron.DAG.builder() - .add_edge("source", "sink") - .build() -) - -manual_contracts = { - "source": NodeContract(output=str), - "sink": NodeContract(inputs={"source": int}), # Mismatch! -} - -validator = ContractValidator(dag, manual_contracts) -for v in validator.validate(): - print(v.message) -# Type mismatch on edge source -> sink: producer outputs str, -# but consumer expects int -``` - ---- - -## See also - -- [DataFrames](/api/analysis/dataframe) -- schema validation for DataFrame outputs. -- [Analysis](/api/analysis/analysis) -- structural analysis and linting. -- [Pipeline](/api/execution/pipeline) -- the `@task` decorator and Pipeline class. -- [Building DAGs guide](/guide/core-concepts/building-dags) -- builder-level contract declarations. diff --git a/docs/pages/api/analysis/dataframe.mdx b/docs/pages/api/analysis/dataframe.mdx deleted file mode 100644 index e96e4bb..0000000 --- a/docs/pages/api/analysis/dataframe.mdx +++ /dev/null @@ -1,378 +0,0 @@ ---- -sidebar_position: 18 -title: DataFrames -description: API reference for dagron's DataFrame integration -- schema validation for pandas and polars DataFrames at DAG edge boundaries. ---- - -import ApiSignature from '@site/src/components/ApiSignature'; -import ParamTable from '@site/src/components/ParamTable'; - -# DataFrames - -The dataframe module provides schema validation for pandas and polars -DataFrames at DAG edge boundaries. Define expected column schemas (names, -dtypes, nullability) and row count constraints for each node, then validate -execution results or individual values against those schemas. - -This module auto-detects whether a value is a pandas or polars DataFrame -and applies the appropriate introspection methods. - -```python -from dagron.dataframe import ( - DataFramePipeline, - DataFrameSchema, - ColumnSchema, - SchemaViolation, - validate_schema, -) -``` - ---- - -## ColumnSchema - - - -Schema definition for a single column in a DataFrame. Frozen dataclass. - - - -```python -from dagron.dataframe import ColumnSchema - -id_col = ColumnSchema("id", dtype="int", nullable=False, required=True) -name_col = ColumnSchema("name", dtype="object", nullable=True) -score_col = ColumnSchema("score", dtype="float", required=False) # optional column -``` - ---- - -## DataFrameSchema - - - -Schema definition for a DataFrame at an edge boundary. Combines column -schemas with optional row count constraints. Frozen dataclass. - - - -```python -from dagron.dataframe import DataFrameSchema, ColumnSchema - -user_schema = DataFrameSchema( - columns=[ - ColumnSchema("id", dtype="int", nullable=False), - ColumnSchema("name", dtype="object", nullable=False), - ColumnSchema("email", dtype="object", nullable=True), - ], - min_rows=1, - max_rows=10000, -) -``` - ---- - -## SchemaViolation - - - -A single schema violation detected during validation. Frozen dataclass. - - - -```python -for violation in violations: - print(f"[{violation.node_name}] {violation.message}") -# [extract] Missing required column 'id' -# [extract] Column 'score' has null values but nullable=False -``` - ---- - -## validate_schema - - list[SchemaViolation]`} /> - -Validate a DataFrame against a schema. Works with both pandas and polars -DataFrames. The framework is auto-detected from the object's type. - - - -**Returns:** `list[SchemaViolation]` -- List of violations. An empty list means the DataFrame is valid. - -### Validation checks - -The function performs the following checks in order: - -1. **Framework detection** -- verifies the object is a pandas or polars DataFrame. -2. **Required columns** -- checks that all required columns exist. -3. **Dtype matching** -- for each column with a `dtype` constraint, checks that the actual dtype string contains the expected substring (case-insensitive). -4. **Nullability** -- for columns with `nullable=False`, checks for null values. -5. **Row count** -- validates `min_rows` and `max_rows` constraints. - -```python -import pandas as pd -from dagron.dataframe import validate_schema, DataFrameSchema, ColumnSchema - -df = pd.DataFrame({"id": [1, 2, None], "name": ["Alice", "Bob", "Charlie"]}) - -schema = DataFrameSchema( - columns=[ - ColumnSchema("id", dtype="int", nullable=False), - ColumnSchema("name", dtype="object"), - ColumnSchema("email", nullable=True, required=True), - ], - min_rows=1, -) - -violations = validate_schema(df, schema, node_name="extract") -for v in violations: - print(v.message) -# Column 'id' has null values but nullable=False -# Missing required column 'email' -``` - ---- - -## DataFramePipeline - - None: ...`} /> - -Execute a DAG pipeline with schema validation at edge boundaries. Validates -that each node's output DataFrame matches the expected schema. - - - -### Methods - ---- - -#### DataFramePipeline.validate_result - - list[SchemaViolation]`} /> - -Validate all completed node outputs in an execution result against their -declared schemas. Only nodes that completed successfully and have a schema -defined are checked. - - - -**Returns:** `list[SchemaViolation]` -- All violations found across all validated nodes. - -```python -import dagron -from dagron.dataframe import DataFramePipeline, DataFrameSchema, ColumnSchema - -dag = ( - dagron.DAG.builder() - .add_edge("extract", "transform") - .add_edge("transform", "load") - .build() -) - -schemas = { - "extract": DataFrameSchema( - columns=[ColumnSchema("id", dtype="int"), ColumnSchema("name")], - min_rows=1, - ), - "transform": DataFrameSchema( - columns=[ - ColumnSchema("id", dtype="int", nullable=False), - ColumnSchema("name_upper", nullable=False), - ], - ), -} - -pipeline = DataFramePipeline(dag, schemas) - -# After execution... -executor = dagron.DAGExecutor(dag) -result = executor.execute(tasks) - -violations = pipeline.validate_result(result) -if violations: - for v in violations: - print(f"[{v.node_name}] {v.message}") -else: - print("All DataFrames match their schemas") -``` - ---- - -#### DataFramePipeline.validate_value - - list[SchemaViolation]`} /> - -Validate a single value against a specific node's schema. Useful for -testing individual node outputs without running the full pipeline. - - - -**Returns:** `list[SchemaViolation]` -- Violations found, or empty list if valid. Returns empty list if no schema is defined for the given node. - -```python -import pandas as pd -from dagron.dataframe import DataFramePipeline, DataFrameSchema, ColumnSchema - -# Validate a single DataFrame in isolation -df = pd.DataFrame({"id": [1, 2], "name": ["Alice", "Bob"]}) -violations = pipeline.validate_value("extract", df) -assert not violations -``` - ---- - -## Complete example - -```python -import dagron -import pandas as pd -from dagron.dataframe import ( - DataFramePipeline, - DataFrameSchema, - ColumnSchema, - validate_schema, -) - -# Define schemas for each pipeline stage -schemas = { - "extract": DataFrameSchema( - columns=[ - ColumnSchema("user_id", dtype="int", nullable=False), - ColumnSchema("username", dtype="object", nullable=False), - ColumnSchema("score", dtype="float", nullable=True), - ], - min_rows=1, - ), - "transform": DataFrameSchema( - columns=[ - ColumnSchema("user_id", dtype="int", nullable=False), - ColumnSchema("username", dtype="object", nullable=False), - ColumnSchema("score_normalized", dtype="float", nullable=False), - ], - ), - "load": DataFrameSchema( - columns=[ - ColumnSchema("user_id", dtype="int", nullable=False), - ColumnSchema("username", dtype="object", nullable=False), - ColumnSchema("score_normalized", dtype="float", nullable=False), - ColumnSchema("loaded_at", dtype="datetime", nullable=False), - ], - max_rows=100000, - ), -} - -# Build the DAG -dag = ( - dagron.DAG.builder() - .add_edge("extract", "transform") - .add_edge("transform", "load") - .build() -) - -pipeline = DataFramePipeline(dag, schemas) - -# Validate individual DataFrames during development -test_df = pd.DataFrame({ - "user_id": [1, 2, 3], - "username": ["alice", "bob", "charlie"], - "score": [0.85, None, 0.92], -}) - -violations = pipeline.validate_value("extract", test_df) -print(f"Extract violations: {len(violations)}") -for v in violations: - print(f" {v.message}") - -# Or use the standalone function -violations = validate_schema(test_df, schemas["extract"], "extract") - -# After full pipeline execution, validate all outputs -executor = dagron.DAGExecutor(dag) -result = executor.execute(tasks) -all_violations = pipeline.validate_result(result) -``` - -### Using with polars - -The module works identically with polars DataFrames -- no configuration -changes needed: - -```python -import polars as pl -from dagron.dataframe import validate_schema, DataFrameSchema, ColumnSchema - -df = pl.DataFrame({ - "id": [1, 2, 3], - "value": [10.5, 20.3, 30.1], -}) - -schema = DataFrameSchema( - columns=[ - ColumnSchema("id", dtype="i64", nullable=False), - ColumnSchema("value", dtype="f64"), - ], -) - -violations = validate_schema(df, schema, "my_node") -``` - ---- - -## See also - -- [Contracts](/api/analysis/contracts) -- type-level contracts for arbitrary Python types. -- [Pipeline](/api/execution/pipeline) -- the Pipeline class for decorator-based DAG construction. -- [Execution](/api/execution/execution) -- `ExecutionResult` containing node outputs. diff --git a/docs/pages/api/core/builder.mdx b/docs/pages/api/core/builder.mdx deleted file mode 100644 index 4da40da..0000000 --- a/docs/pages/api/core/builder.mdx +++ /dev/null @@ -1,271 +0,0 @@ ---- -sidebar_position: 2 -title: "DAGBuilder" -description: "API reference for DAGBuilder — the fluent builder pattern for constructing validated DAGs." ---- - -import ApiSignature from '@site/src/components/ApiSignature'; -import ParamTable from '@site/src/components/ParamTable'; - -# DAGBuilder - -The `DAGBuilder` provides a fluent, chainable API for constructing DAGs. Every -mutating method returns `self`, so you can chain calls together. The builder -validates the graph on `.build()`, ensuring you never receive an invalid DAG. - -Obtain a builder via `DAG.builder()`: - -```python -import dagron - -dag = ( - dagron.DAG.builder() - .add_node("extract") - .add_node("transform") - .add_node("load") - .add_edge("extract", "transform") - .add_edge("transform", "load") - .build() -) -``` - -See the [Building DAGs](/guide/core-concepts/building-dags) guide for construction -patterns and best practices. - ---- - -## Constructor - - - -Create a new empty builder. In most cases you will use `DAG.builder()` instead -of instantiating this class directly. - -```python -# Preferred -builder = dagron.DAG.builder() - -# Also valid -builder = dagron.DAGBuilder() -``` - ---- - -## Methods - -### add_node - - DAGBuilder`} /> - -Add a node to the graph under construction. Returns `self` for chaining. - - - -**Raises:** -- `DuplicateNodeError` — if a node with the same name has already been added. - -```python -builder = ( - dagron.DAG.builder() - .add_node("fetch", payload={"url": "https://api.example.com"}) - .add_node("parse", metadata={"team": "data-eng"}) - .add_node("store") -) -``` - ---- - -### add_edge - - DAGBuilder`} /> - -Add a directed edge from `from_node` to `to_node`. Returns `self` for chaining. -The builder defers cycle detection to `.build()`, so you can add edges in any -order. - - - -```python -builder = ( - dagron.DAG.builder() - .add_node("a") - .add_node("b") - .add_node("c") - .add_edge("a", "b", weight=2.0) - .add_edge("b", "c", label="transform-to-load") -) -``` - ---- - -### contract - - DAGBuilder`} /> - -Attach a type contract to a node. Contracts declare the expected input types -(keyed by predecessor name) and the output type. Use `validate_contracts()` to -check all contracts before building, or let `build()` validate them -automatically. - - - -**Raises:** -- `NodeNotFoundError` — if the specified node has not been added. - -```python -builder = ( - dagron.DAG.builder() - .add_node("fetch") - .add_node("parse") - .add_node("validate") - .add_edge("fetch", "parse") - .add_edge("parse", "validate") - .contract("fetch", output=list) - .contract("parse", inputs={"fetch": list}, output=dict) - .contract("validate", inputs={"parse": dict}, output=bool) -) -``` - -See the [Contracts](/guide/advanced/contracts) guide for more details on type -contracts. - ---- - -### validate_contracts - - list[ContractViolation]`} /> - -Check all attached contracts for consistency (e.g., output type of `A` matches -the input type expected by `B` on the `A -> B` edge). Returns a list of -`ContractViolation` objects describing any mismatches. An empty list means all -contracts are consistent. - -**Returns:** `list[ContractViolation]` — a list of violations, empty if valid. - -```python -violations = builder.validate_contracts() -if violations: - for v in violations: - print(f"Contract violation: {v}") -else: - print("All contracts consistent.") -``` - ---- - -### build - - DAG`} /> - -Finalize and validate the graph. Returns a fully constructed -[DAG](/api/core/core) instance. This method performs: - -1. **Node existence checks** — every edge endpoint must reference an existing node. -2. **Cycle detection** — raises `CycleError` if the graph contains a cycle. -3. **Contract validation** — if any contracts were attached, they are validated. - -**Returns:** `DAG` — the validated directed acyclic graph. - -**Raises:** -- `CycleError` — if the graph contains a cycle. -- `NodeNotFoundError` — if an edge references a non-existent node. - -```python -dag = ( - dagron.DAG.builder() - .add_node("a") - .add_node("b") - .add_edge("a", "b") - .build() -) - -print(dag.node_count()) # 2 -print(dag.edge_count()) # 1 -``` - ---- - -## Complete Example - -A full builder workflow with contracts, metadata, and weighted edges: - -```python -import dagron - -dag = ( - dagron.DAG.builder() - # Nodes with payloads and metadata - .add_node("ingest", payload={"source": "s3"}, metadata={"tier": "bronze"}) - .add_node("clean", metadata={"tier": "silver"}) - .add_node("enrich", metadata={"tier": "silver"}) - .add_node("aggregate", metadata={"tier": "gold"}) - .add_node("publish", metadata={"tier": "gold"}) - - # Weighted edges for cost-aware scheduling - .add_edge("ingest", "clean", weight=1.0) - .add_edge("ingest", "enrich", weight=1.0) - .add_edge("clean", "aggregate", weight=3.0) - .add_edge("enrich", "aggregate", weight=2.0) - .add_edge("aggregate", "publish", weight=0.5) - - # Type contracts - .contract("ingest", output=list) - .contract("clean", inputs={"ingest": list}, output=list) - .contract("enrich", inputs={"ingest": list}, output=list) - .contract("aggregate", inputs={"clean": list, "enrich": list}, output=dict) - .contract("publish", inputs={"aggregate": dict}, output=bool) - - .build() -) - -print(dag.node_count()) # 5 -print(dag.edge_count()) # 5 -print(dag.topological_levels()) # [[ingest], [clean, enrich], [aggregate], [publish]] -``` - ---- - -## ContractViolation - - - -Returned by `DAGBuilder.validate_contracts()` when a type mismatch is found. - -| Property | Type | Description | -|----------|------|-------------| -| `from_node` | `str` | The upstream node whose output type does not match. | -| `to_node` | `str` | The downstream node whose expected input type does not match. | -| `expected` | `type` | The type expected by the downstream node. | -| `actual` | `type` | The type declared by the upstream node's output. | - -```python -violations = builder.validate_contracts() -for v in violations: - print(f"{v.from_node} -> {v.to_node}: expected {v.expected}, got {v.actual}") -``` diff --git a/docs/pages/api/core/core.mdx b/docs/pages/api/core/core.mdx deleted file mode 100644 index 0d162ea..0000000 --- a/docs/pages/api/core/core.mdx +++ /dev/null @@ -1,868 +0,0 @@ ---- -sidebar_position: 1 -slug: /api/core/core -title: "Core — DAG, NodeId, GraphStats" -description: "Complete API reference for the DAG class, NodeId, GraphStats, GraphDiff, ReachabilityIndex, and ExecutionPlan." ---- - -import ApiSignature from '@site/src/components/ApiSignature'; -import ParamTable from '@site/src/components/ParamTable'; - -# Core — DAG, NodeId, GraphStats - -The core module provides the foundational graph data structure that powers every -feature in dagron. The `DAG` class contains approximately 78 methods spanning -construction, inspection, traversal, scheduling, transforms, serialization, and -more. The underlying graph lives in Rust for zero-copy speed, exposed to Python -via PyO3. - -See the [Building DAGs](/guide/core-concepts/building-dags) guide for usage patterns and -the [Inspecting Graphs](/guide/core-concepts/inspecting-graphs) guide for analysis -workflows. - ---- - -## DAG - - - -The central directed acyclic graph. Create an empty graph with `DAG()` or use -the fluent [DAGBuilder](/api/core/builder) via `DAG.builder()`. - -```python -import dagron - -# Empty graph, then mutate -dag = dagron.DAG() -dag.add_node("a") -dag.add_node("b") -dag.add_edge("a", "b") - -# Or use the builder -dag = dagron.DAG.builder().add_node("a").add_node("b").add_edge("a", "b").build() -``` - ---- - -### Construction - - DAGBuilder`} /> - -Return a new [DAGBuilder](/api/core/builder) instance for fluent graph -construction. The builder validates the graph when `.build()` is called. - - NodeId`} /> - -Add a node to the graph. Returns a `NodeId` handle. Raises `DuplicateNodeError` -if a node with the same name already exists. - - - - list[NodeId]`} /> - -Bulk-add multiple nodes. Each element can be a bare name string, a -`(name, payload)` tuple, or a `(name, payload, metadata)` tuple. - - - - None`} /> - -Add a directed edge from `from_node` to `to_node`. Raises `NodeNotFoundError` -if either node does not exist, and `CycleError` if the edge would create a -cycle. - - - - None`} /> - -Bulk-add multiple edges. Each element is a tuple of `(from, to)`, -`(from, to, weight)`, or `(from, to, weight, label)`. - - - - None`} /> - -Remove a node and all edges connected to it. Raises `NodeNotFoundError` if the -node does not exist. - - - - None`} /> - -Remove a directed edge. Raises `EdgeNotFoundError` if the edge does not exist. - - - ---- - -### Inspection - - bool`} /> - -Return `True` if a node with the given name exists. - - bool`} /> - -Return `True` if a directed edge exists between the two nodes. - - int`} /> - -Return the total number of nodes in the graph. - - int`} /> - -Return the total number of edges in the graph. - - Any`} /> - -Return the payload attached to a node. Raises `NodeNotFoundError` if the node -does not exist. - - None`} /> - -Replace the payload on an existing node. - - dict | None`} /> - -Return the metadata dictionary for a node, or `None` if no metadata was set. - - None`} /> - -Replace the metadata on an existing node. - - list[NodeId]`} /> - -Return the direct predecessors (parents) of a node. - - list[NodeId]`} /> - -Return the direct successors (children) of a node. - - list[NodeId]`} /> - -Return all transitive predecessors of a node (the full upstream lineage). - - list[NodeId]`} /> - -Return all transitive successors of a node (the full downstream lineage). - - int`} /> - -Return the number of incoming edges to a node. - - int`} /> - -Return the number of outgoing edges from a node. - - list[NodeId]`} /> - -Return all root nodes (nodes with no predecessors). - - list[NodeId]`} /> - -Return all leaf nodes (nodes with no successors). - - list[NodeId]`} /> - -Return all nodes in the graph as a list of `NodeId` objects. - -```python -dag = dagron.DAG() -dag.add_node("extract", payload={"source": "api"}) -dag.add_node("transform") -dag.add_edge("extract", "transform") - -print(dag.has_node("extract")) # True -print(dag.predecessors("transform")) # [NodeId("extract")] -print(dag.get_payload("extract")) # {"source": "api"} -print(dag.roots()) # [NodeId("extract")] -print(dag.leaves()) # [NodeId("transform")] -``` - ---- - -### Iterators - -Lazy iterator variants that yield items one at a time instead of building a full -list in memory. Useful for very large graphs. - - Iterator[NodeId]`} /> - - Iterator[NodeId]`} /> - - Iterator[NodeId]`} /> - - Iterator[NodeId]`} /> - - Iterator[NodeId]`} /> - - Iterator[NodeId]`} /> - - Iterator[list[NodeId]]`} /> - -```python -for node in dag.iter_topological_sort(): - print(node.name) - -for level in dag.iter_topological_levels(): - print([n.name for n in level]) -``` - ---- - -### Topological Sorting - - list[NodeId]`} /> - -Return a topological ordering of all nodes using Kahn's algorithm. The ordering -is deterministic (stable sort by insertion order). - - list[NodeId]`} /> - -Return a topological ordering computed via depth-first search. May produce a -different valid ordering than the Kahn-based `topological_sort()`. - - list[list[NodeId]]`} /> - -Group nodes into levels where all nodes at the same level can execute in -parallel (all predecessors are in earlier levels). - - list[list[NodeId]]`} /> - -Enumerate all valid topological orderings. Use `limit` to cap the output for -large graphs (the number of orderings can be exponential). - - - - list[NodeId]`} /> - -Return a topological ordering that respects priority hints. Nodes with higher -priority values are scheduled earlier when topologically valid. - - - - list[list[NodeId]]`} /> - -Like `topological_levels()`, but nodes within each level are sorted by priority. - - - ---- - -### Paths - - list[NodeId] | None`} /> - -Return the shortest path (fewest edges) between two nodes, or `None` if no path -exists. - - - - tuple[list[NodeId], float] | None`} /> - -Return the longest path and its total cost between two nodes, or `None` if no -path exists. If `costs` is provided, edge/node costs are summed; otherwise every -node has cost 1. - - - - list[list[NodeId]]`} /> - -Enumerate all directed paths between two nodes. - - - -```python -path = dag.shortest_path("extract", "load") -print([n.name for n in path]) # ["extract", "transform", "load"] -``` - ---- - -### Scheduling - - ExecutionPlan`} /> - -Compute an optimal execution plan (topological-level schedule) with unlimited -parallelism. See [ExecutionPlan](#executionplan) below. - - - - ExecutionPlan`} /> - -Compute an execution plan constrained to a fixed number of workers. - - - - tuple[list[NodeId], float]`} /> - -Return the critical path (longest weighted path through the graph) and its total -cost. This determines the theoretical minimum makespan. - - - - dict[str, float]`} /> - -Return the bottom-level priority of each node (the longest path from that node -to any leaf). Used internally for scheduling heuristics. - - - -```python -costs = {"extract": 2.0, "transform": 5.0, "load": 1.0} -path, total = dag.critical_path(costs) -print([n.name for n in path], total) # ["extract", "transform", "load"], 8.0 -``` - ---- - -### Transforms - - DAG`} /> - -Return a new DAG with all edge directions reversed. - - DAG`} /> - -Return a new DAG containing only nodes for which `predicate(name)` returns -`True`. Edges between remaining nodes are preserved. - - - - DAG`} /> - -Merge another DAG into this one. When both DAGs share a node name, `conflict` -determines the behavior: `"keep_first"`, `"keep_second"`, `"error"`, or use a -custom `conflict_resolver` callable. - - payload for resolving conflicts."}, -]} /> - - DAG`} /> - -Collapse multiple nodes into a single node. Edges are rewired to the collapsed -node. - - - - DAG`} /> - -Return the transitive reduction: the smallest DAG with the same reachability -relation. Removes redundant edges. - - DAG`} /> - -Return the transitive closure: a DAG with an edge `(u, v)` for every pair of -nodes where `u` can reach `v`. - - DAG`} /> - -Return a deep copy of the DAG at this point in time. - - list[tuple[str, str]]`} /> - -Compute the dominator tree rooted at the given node. Returns a list of -`(dominator, dominated)` edge pairs. - - - -```python -reduced = dag.transitive_reduction() -filtered = dag.filter(lambda name: name.startswith("transform")) -merged = dag1.merge(dag2, conflict="keep_second") -``` - ---- - -### Subgraph - - DAG`} /> - -Extract a subgraph containing only the specified nodes and edges between them. - - - - DAG`} /> - -Extract a subgraph by traversing up to `depth` hops from `root`. - - - ---- - -### Serialization - - str`} /> - -Serialize the DAG to a JSON string. If nodes carry non-JSON-serializable -payloads, provide a custom `payload_serializer`. - - DAG`} /> - -Deserialize a DAG from a JSON string. - - str`} /> - -Export the graph in Graphviz DOT format. - - dict of DOT attributes per node."}, -]} /> - - str`} /> - -Export the graph as a Mermaid diagram string. - - bytes`} /> - -Serialize the DAG to a compact binary format. - - DAG`} /> - -Deserialize a DAG from binary data. - - None`} /> - -Save the DAG to a file on disk (binary format). - - DAG`} /> - -Load a DAG from a file on disk. - -```python -# Round-trip through JSON -json_str = dag.to_json() -restored = dagron.DAG.from_json(json_str) - -# Save / load from disk -dag.save("pipeline.dagron") -loaded = dagron.DAG.load("pipeline.dagron") - -# Export for visualization -print(dag.to_mermaid()) -``` - -See the [Serialization](/guide/core-concepts/serialization) guide for detailed examples. - ---- - -### Matching - - list[NodeId]`} /> - -Return all nodes whose names match the given regular expression pattern. - - - - list[NodeId]`} /> - -Return all nodes whose names match the given glob pattern (supports `*`, `?`, -`[...]`). - - - -```python -transforms = dag.nodes_matching_glob("transform_*") -etl_nodes = dag.nodes_matching_regex(r"^(extract|transform|load)_\d+$") -``` - ---- - -### Reachability - - ReachabilityIndex`} /> - -Precompute a reachability index for O(1) ancestor/descendant queries. See -[ReachabilityIndex](#reachabilityindex) below. - - bool`} /> - -Return `True` if `ancestor` is a transitive predecessor of `descendant`. This -performs a graph traversal each time; for repeated queries, use -`build_reachability_index()`. - - - ---- - -### Diffing - - GraphDiff`} /> - -Compute a structural diff between this DAG and another. Returns a -[GraphDiff](#graphdiff) object. See the [Versioning](/guide/advanced/versioning) -guide for diff workflows. - - - ---- - -### Partitioning - - PartitionResult`} /> - -Partition the graph into `k` groups using topological-level-based assignment. - - PartitionResult`} /> - -Partition the graph into `k` balanced groups minimizing total cost imbalance. - - PartitionResult`} /> - -Partition the graph into `k` groups minimizing inter-partition communication -(cross-partition edges). - - - ---- - -### Incremental - - list[str]`} /> - -Given a list of changed nodes, return the full set of nodes that need -re-execution (the changed nodes plus all their downstream descendants). - - - - dict[str, list[str]]`} /> - -For each downstream node, return which changed nodes are responsible for -invalidating it. - - - -See the [Incremental Execution](/api/execution/incremental) API page for the full -incremental executor. - ---- - -### Validation, Stats, and Cache - - bool`} /> - -Validate graph invariants (acyclicity, no orphan edges). Returns `True` if the -graph is valid. Raises `CycleError` if a cycle is detected. - - GraphStats`} /> - -Return a [GraphStats](#graphstats) summary of the graph. - - dict`} /> - -Return information about the internal Rust-side cache (hit/miss counts for -memoized graph algorithms). - - None`} /> - -Clear the internal algorithm cache. The cache is automatically invalidated on -mutation, but you can force-clear it if needed. - - int`} /> - -A monotonically increasing counter that increments on every mutation. Useful for -cache invalidation in external systems. - -```python -stats = dag.stats() -print(f"Nodes: {stats.node_count}, Depth: {stats.depth}, Width: {stats.width}") -print(f"Generation: {dag.generation}") -``` - ---- - -## NodeId - - - -An opaque handle returned by node-creation methods. Provides a `.name` property -for accessing the underlying string identifier. - -| Property | Type | Description | -|----------|------|-------------| -| `name` | `str` | The unique string name of the node. | - -```python -node_id = dag.add_node("extract") -print(node_id.name) # "extract" -``` - ---- - -## GraphStats - - - -A read-only summary of graph metrics, returned by `DAG.stats()`. - -| Property | Type | Description | -|----------|------|-------------| -| `node_count` | `int` | Total number of nodes. | -| `edge_count` | `int` | Total number of edges. | -| `depth` | `int` | Length of the longest path (in edges). | -| `width` | `int` | Maximum number of nodes at any topological level. | -| `density` | `float` | Edge density: `edge_count / (node_count * (node_count - 1))`. | -| `longest_path_length` | `int` | Number of edges in the longest path. | -| `avg_in_degree` | `float` | Average incoming edges per node. | -| `avg_out_degree` | `float` | Average outgoing edges per node. | -| `max_in_degree` | `int` | Highest in-degree of any node. | -| `max_out_degree` | `int` | Highest out-degree of any node. | -| `root_count` | `int` | Number of root nodes. | -| `leaf_count` | `int` | Number of leaf nodes. | -| `is_weakly_connected` | `bool` | Whether the underlying undirected graph is connected. | -| `component_count` | `int` | Number of weakly connected components. | - -```python -stats = dag.stats() -print(f"Density: {stats.density:.4f}") -print(f"Connected: {stats.is_weakly_connected}") -print(f"Components: {stats.component_count}") -``` - ---- - -## GraphDiff - - - -The result of `DAG.diff(other)`. Contains sets of added, removed, and changed -nodes and edges. - -| Property | Type | Description | -|----------|------|-------------| -| `added_nodes` | `list[str]` | Nodes present in `other` but not `self`. | -| `removed_nodes` | `list[str]` | Nodes present in `self` but not `other`. | -| `changed_nodes` | `list[str]` | Nodes with different payloads or metadata. | -| `added_edges` | `list[tuple[str, str]]` | Edges present in `other` but not `self`. | -| `removed_edges` | `list[tuple[str, str]]` | Edges present in `self` but not `other`. | -| `changed_edges` | `list[tuple[str, str]]` | Edges with different weights or labels. | - -```python -diff = dag_v1.diff(dag_v2) -print(f"Added nodes: {diff.added_nodes}") -print(f"Removed edges: {diff.removed_edges}") -``` - ---- - -## ReachabilityIndex - - - -A precomputed index for O(1) reachability queries. Built via -`DAG.build_reachability_index()`. The index is a snapshot; it does not track -subsequent mutations to the DAG. - - bool`} /> - -Return `True` if `from_node` can reach `to_node` via directed edges. - - list[str]`} /> - -Return all nodes reachable from the given node. - - list[str]`} /> - -Return all ancestors of the given node. - - int`} /> - -Return the number of nodes in the index. - -```python -index = dag.build_reachability_index() -print(index.can_reach("extract", "load")) # True -print(index.reachable_from("extract")) # ["transform", "load"] -print(index.ancestors_of("load")) # ["extract", "transform"] -``` - ---- - -## ExecutionPlan - - - -A computed execution schedule returned by `DAG.execution_plan()` and -`DAG.execution_plan_constrained()`. - -| Property | Type | Description | -|----------|------|-------------| -| `steps` | `list[list[str]]` | Ordered list of execution steps. Each step is a list of node names that can run concurrently. | -| `total_nodes` | `int` | Total number of nodes in the plan. | -| `max_parallelism` | `int` | Maximum number of nodes in any single step. | -| `estimated_makespan` | `float` | Estimated total execution time based on provided costs. | -| `critical_path` | `list[str]` | The critical path through the plan. | - -```python -plan = dag.execution_plan(costs={"extract": 2.0, "transform": 5.0, "load": 1.0}) -print(f"Steps: {len(plan.steps)}") -print(f"Max parallelism: {plan.max_parallelism}") -print(f"Estimated makespan: {plan.estimated_makespan}s") - -for i, step in enumerate(plan.steps): - print(f" Step {i}: {step}") -``` diff --git a/docs/pages/api/core/errors.mdx b/docs/pages/api/core/errors.mdx deleted file mode 100644 index b86dde5..0000000 --- a/docs/pages/api/core/errors.mdx +++ /dev/null @@ -1,278 +0,0 @@ ---- -sidebar_position: 3 -title: "Errors" -description: "API reference for all dagron exception classes and the error hierarchy." ---- - -import ApiSignature from '@site/src/components/ApiSignature'; -import ParamTable from '@site/src/components/ParamTable'; - -# Errors - -All dagron exceptions inherit from `DagronError`, making it easy to catch any -library error with a single `except` clause. More specific exceptions allow -targeted handling of individual failure modes. - ---- - -## Error Hierarchy - -```mermaid -classDiagram - DagronError <|-- GraphError - GraphError <|-- CycleError - GraphError <|-- DuplicateNodeError - GraphError <|-- NodeNotFoundError - GraphError <|-- EdgeNotFoundError - - class DagronError { - +str message - } - class GraphError { - +str message - } - class CycleError { - +list~str~ cycle - +str message - } - class DuplicateNodeError { - +str node_name - +str message - } - class NodeNotFoundError { - +str node_name - +str message - } - class EdgeNotFoundError { - +str from_node - +str to_node - +str message - } -``` - ---- - -## DagronError - - - -The base exception for all dagron errors. Every exception raised by dagron is a -subclass of this class, so you can write a single catch-all handler: - -```python -import dagron - -try: - dag = dagron.DAG() - dag.add_edge("x", "y") # nodes don't exist -except dagron.DagronError as e: - print(f"dagron error: {e}") -``` - -| Attribute | Type | Description | -|-----------|------|-------------| -| `message` | `str` | Human-readable error description. | - ---- - -## GraphError - - - -Base class for all errors related to graph structure operations. Covers node -and edge manipulation, cycle detection, and structural invariant violations. - -| Attribute | Type | Description | -|-----------|------|-------------| -| `message` | `str` | Human-readable error description. | - -```python -try: - dag.add_edge("nonexistent", "also_nonexistent") -except dagron.GraphError as e: - # Catches NodeNotFoundError, EdgeNotFoundError, CycleError, etc. - print(f"Graph error: {e}") -``` - ---- - -## CycleError - - - -Raised when an operation would introduce a cycle into the DAG. The `cycle` -attribute contains the list of node names forming the cycle, which is invaluable -for debugging. - - - -```python -import dagron - -dag = dagron.DAG() -dag.add_node("a") -dag.add_node("b") -dag.add_node("c") -dag.add_edge("a", "b") -dag.add_edge("b", "c") - -try: - dag.add_edge("c", "a") # would create a -> b -> c -> a -except dagron.CycleError as e: - print(f"Cycle detected: {e.cycle}") - # Cycle detected: ['a', 'b', 'c', 'a'] -``` - -`CycleError` is also raised by `DAG.validate()` if the graph contains a cycle, -and by `DAGBuilder.build()` during construction. - ---- - -## DuplicateNodeError - - - -Raised when attempting to add a node with a name that already exists in the -graph. - - - -```python -dag = dagron.DAG() -dag.add_node("extract") - -try: - dag.add_node("extract") # duplicate -except dagron.DuplicateNodeError as e: - print(f"Duplicate: {e.node_name}") - # Duplicate: extract -``` - ---- - -## NodeNotFoundError - - - -Raised when referencing a node that does not exist in the graph. Common triggers -include `add_edge()` with a non-existent endpoint, `remove_node()` on a missing -node, or `get_payload()` for an unknown name. - - - -```python -dag = dagron.DAG() -dag.add_node("a") - -try: - dag.add_edge("a", "nonexistent") -except dagron.NodeNotFoundError as e: - print(f"Missing node: {e.node_name}") - # Missing node: nonexistent -``` - ---- - -## EdgeNotFoundError - - - -Raised when referencing an edge that does not exist in the graph. Typically -triggered by `remove_edge()`. - - - -```python -dag = dagron.DAG() -dag.add_node("a") -dag.add_node("b") - -try: - dag.remove_edge("a", "b") # edge doesn't exist -except dagron.EdgeNotFoundError as e: - print(f"No edge from {e.from_node} to {e.to_node}") - # No edge from a to b -``` - ---- - -## Error Handling Patterns - -### Catch-all - -Use `DagronError` as a catch-all for any dagron-specific error: - -```python -try: - dag = build_complex_pipeline() - result = dagron.DAGExecutor(dag).execute(tasks) -except dagron.DagronError as e: - logger.error(f"Pipeline failed: {e}") - raise -``` - -### Granular handling - -For more control, catch specific exceptions: - -```python -try: - dag = ( - dagron.DAG.builder() - .add_node("a") - .add_node("b") - .add_edge("a", "b") - .add_edge("b", "a") # cycle - .build() - ) -except dagron.CycleError as e: - print(f"Fix the cycle: {' -> '.join(e.cycle)}") -except dagron.DuplicateNodeError as e: - print(f"Remove duplicate node: {e.node_name}") -except dagron.NodeNotFoundError as e: - print(f"Add missing node first: {e.node_name}") -``` - -### Checking before acting - -Use predicate methods to avoid exceptions entirely: - -```python -if dag.has_node("transform"): - dag.remove_node("transform") - -if dag.has_edge("a", "b"): - dag.remove_edge("a", "b") -``` - ---- - -## Related - -- [DAG](/api/core/core) — core graph class whose methods raise these errors. -- [DAGBuilder](/api/core/builder) — builder that raises `CycleError` on `.build()`. -- [Gates](/api/execution/gates) — `GateRejectedError` and `GateTimeoutError` for gate-specific errors. diff --git a/docs/pages/api/execution/caching.mdx b/docs/pages/api/execution/caching.mdx deleted file mode 100644 index 7991934..0000000 --- a/docs/pages/api/execution/caching.mdx +++ /dev/null @@ -1,371 +0,0 @@ ---- -sidebar_position: 7 -title: "Caching" -description: "API reference for CachedDAGExecutor, ContentAddressableCache, CachePolicy, and cache backends." ---- - -import ApiSignature from '@site/src/components/ApiSignature'; -import ParamTable from '@site/src/components/ParamTable'; - -# Caching - -The caching module provides content-addressable, Merkle-tree-based caching for -DAG execution. A node's cache key is derived from its task code, its -predecessors' output hashes, and its name — so if nothing upstream has changed, -the cached result is returned without re-execution. - -See the [Caching](/guide/execution-strategies/caching) guide for configuration patterns, -backend selection, and cache invalidation strategies. - ---- - -## CachedDAGExecutor - - - -An executor that wraps the standard [DAGExecutor](/api/execution/execution) with a -content-addressable cache layer. Before executing a node, the executor checks -the cache; if a valid entry exists, the cached result is returned and the node -is marked `CACHE_HIT`. - - - -### execute - - CachedExecutionResult`} /> - -Execute tasks with caching. Returns a [CachedExecutionResult](#cachedexecutionresult) -with cache statistics. - - - -```python -import dagron - -dag = ( - dagron.DAG.builder() - .add_node("fetch").add_node("process").add_node("save") - .add_edge("fetch", "process").add_edge("process", "save") - .build() -) - -cache = dagron.ContentAddressableCache( - backend=dagron.FileSystemCacheBackend("./cache_dir") -) - -executor = dagron.CachedDAGExecutor(dag, cache=cache) - -# First run — all nodes executed -result = executor.execute({ - "fetch": lambda: [1, 2, 3], - "process": lambda: [2, 4, 6], - "save": lambda: "done", -}) -print(result.cache_hits, result.cache_misses) # 0, 3 - -# Second run — all nodes cached -result = executor.execute({ - "fetch": lambda: [1, 2, 3], - "process": lambda: [2, 4, 6], - "save": lambda: "done", -}) -print(result.cache_hits, result.cache_misses) # 3, 0 -``` - ---- - -## CachedExecutionResult - - - -Extends the standard [ExecutionResult](/api/execution/execution) with cache -statistics. - -| Property | Type | Description | -|----------|------|-------------| -| `execution_result` | `ExecutionResult` | The underlying execution result with per-node details. | -| `cache_hits` | `int` | Number of nodes whose results were loaded from cache. | -| `cache_misses` | `int` | Number of nodes that were executed (cache miss). | -| `nodes_executed` | `int` | Number of nodes that were actually executed. | -| `nodes_cached` | `int` | Number of nodes that returned cached results. | - -```python -print(f"Hit rate: {result.cache_hits}/{result.cache_hits + result.cache_misses}") -print(f"Nodes executed: {result.nodes_executed}") -print(f"Nodes cached: {result.nodes_cached}") -``` - ---- - -## ContentAddressableCache - - - -A Merkle-tree cache that computes content-addressable keys from task code and -predecessor output hashes. This ensures that a cache entry is only valid when -the exact same computation with the exact same inputs was previously executed. - - - -### Methods - - str`} /> - -Compute the content-addressable cache key for a node. - - - - Any | None`} /> - -Retrieve a cached value by key. Returns `None` on cache miss. - - None`} /> - -Store a value in the cache. - - bool`} /> - -Check if a key exists in the cache. - - None`} /> - -Clear all entries from the cache. - - CacheStats`} /> - -Return current cache statistics. - -```python -cache = dagron.ContentAddressableCache( - backend=dagron.FileSystemCacheBackend("./my_cache") -) - -key = cache.compute_key("process", process_fn, {"fetch": "abc123"}) -cache.put(key, [2, 4, 6]) -print(cache.has(key)) # True -print(cache.get(key)) # [2, 4, 6] -print(cache.stats()) # CacheStats(...) -``` - ---- - -## CachePolicy - - - -A policy that controls cache eviction. Applied to a cache backend to limit -storage consumption. - - - -```python -policy = dagron.CachePolicy( - max_entries=1000, - max_size_bytes=500 * 1024 * 1024, # 500 MB - ttl_seconds=3600, # 1 hour -) -backend = dagron.FileSystemCacheBackend("./cache_dir", policy=policy) -``` - ---- - -## CacheStats - - - -Cache performance statistics. - -| Property | Type | Description | -|----------|------|-------------| -| `hits` | `int` | Total cache hits since creation or last clear. | -| `misses` | `int` | Total cache misses. | -| `evictions` | `int` | Total entries evicted by policy. | -| `total_entries` | `int` | Current number of entries in the cache. | -| `total_size_bytes` | `int` | Current total size of cached data in bytes. | - - float`} /> - -The cache hit rate as a float between 0.0 and 1.0. Returns 0.0 if no lookups -have been performed. - -```python -stats = cache.stats() -print(f"Hit rate: {stats.hit_rate:.1%}") -print(f"Entries: {stats.total_entries}") -print(f"Size: {stats.total_size_bytes / 1024 / 1024:.1f} MB") -print(f"Evictions: {stats.evictions}") -``` - ---- - -## FileSystemCacheBackend - - - -A cache backend that stores entries as files on the local filesystem. Each cache -key maps to a file in `cache_dir`. Supports optional eviction via a -[CachePolicy](#cachepolicy). - - - -### Methods - - Any | None`} /> - -Retrieve a cached value by key from disk. - - None`} /> - -Store a value on disk. - - bool`} /> - -Check if a key exists on disk. - - None`} /> - -Remove a single entry from disk. - - None`} /> - -Remove all cache files. - - CacheStats`} /> - -Return current backend statistics. - -```python -backend = dagron.FileSystemCacheBackend( - "./my_pipeline_cache", - policy=dagron.CachePolicy(max_entries=500, ttl_seconds=7200), -) - -backend.put("key123", {"data": [1, 2, 3]}) -print(backend.has("key123")) # True -print(backend.get("key123")) # {"data": [1, 2, 3]} -``` - ---- - -## CacheKeyBuilder - - - -A utility for manually constructing cache keys. Used internally by -`ContentAddressableCache` but available for advanced use cases. - - str`} /> - -Compute a hash of a callable's bytecode and closure. - - str`} /> - -Compute a hash of an arbitrary Python value. - - str`} /> - -Combine a node name, task hash, and predecessor hashes into a final cache key. - - - -```python -builder = dagron.CacheKeyBuilder() -task_hash = builder.hash_task(my_function) -value_hash = builder.hash_value([1, 2, 3]) -key = builder.build_key("process", task_hash, {"fetch": value_hash}) -``` - ---- - -## CacheKeyProtocol - - str: ...`} /> - -A protocol for objects that provide their own cache key. If a task's return -value implements this protocol, the cache uses its `__dagron_cache_key__()` -method instead of the default hashing strategy. - -```python -class MyModel: - def __init__(self, version, data): - self.version = version - self.data = data - - def __dagron_cache_key__(self) -> str: - return f"model-v{self.version}-{hash(tuple(self.data))}" - -# When MyModel is returned from a task, the cache uses __dagron_cache_key__ -``` - ---- - -## Related - -- [DAGExecutor](/api/execution/execution) — the base executor that caching wraps. -- [Incremental Execution](/api/execution/incremental) — dirty-set-based re-execution. -- [Checkpointing](/api/execution/checkpoint) — save progress to disk for resume. -- [Caching guide](/guide/execution-strategies/caching) — configuration and invalidation patterns. diff --git a/docs/pages/api/execution/checkpoint.mdx b/docs/pages/api/execution/checkpoint.mdx deleted file mode 100644 index 9a13759..0000000 --- a/docs/pages/api/execution/checkpoint.mdx +++ /dev/null @@ -1,261 +0,0 @@ ---- -sidebar_position: 8 -title: "Checkpointing" -description: "API reference for CheckpointExecutor and CheckpointInfo — save progress and resume execution after failures." ---- - -import ApiSignature from '@site/src/components/ApiSignature'; -import ParamTable from '@site/src/components/ParamTable'; - -# Checkpointing - -The checkpointing module allows you to persist execution progress to disk and -resume after failures. When a node completes, its result is saved to a -checkpoint directory. If execution is interrupted (crash, timeout, manual stop), -you can resume from where it left off without re-executing completed nodes. - -See the [Checkpointing](/guide/execution-strategies/checkpointing) guide for usage patterns -and failure recovery strategies. - ---- - -## CheckpointExecutor - - - -An executor that saves completed node results to a checkpoint directory. On -failure, call `.resume()` to pick up where execution left off. - - - -### execute - - ExecutionResult`} /> - -Execute all tasks, saving results to the checkpoint directory as each node -completes. If a previous checkpoint exists, it is cleared and a fresh execution -begins. - - - -**Returns:** [ExecutionResult](/api/execution/execution) - -```python -import dagron - -dag = ( - dagron.DAG.builder() - .add_node("download").add_node("parse").add_node("validate") - .add_node("transform").add_node("upload") - .add_edge("download", "parse") - .add_edge("parse", "validate") - .add_edge("validate", "transform") - .add_edge("transform", "upload") - .build() -) - -executor = dagron.CheckpointExecutor(dag, checkpoint_dir="./checkpoints") -result = executor.execute({ - "download": lambda: "raw_data", - "parse": lambda: "parsed", - "validate": lambda: "valid", - "transform": lambda: "transformed", - "upload": lambda: "uploaded", -}) - -print(result.succeeded) # 5 -``` - -### resume - - ExecutionResult`} /> - -Resume execution from the last checkpoint. Nodes that completed successfully in -a previous run are skipped (their saved results are loaded). Nodes that failed -or were never started are re-executed. - - - -**Returns:** [ExecutionResult](/api/execution/execution) - -```python -# Suppose download and parse completed, but validate failed. -# Fix the issue and resume: -executor = dagron.CheckpointExecutor(dag, checkpoint_dir="./checkpoints") -result = executor.resume({ - "download": lambda: "raw_data", - "parse": lambda: "parsed", - "validate": lambda: "valid", # fixed - "transform": lambda: "transformed", - "upload": lambda: "uploaded", -}) - -print(result.succeeded) # 5 -# download and parse were loaded from checkpoint -# validate, transform, upload were re-executed -``` - -### checkpoint_info - - CheckpointInfo | None`} /> - -Return information about the current checkpoint state, or `None` if no -checkpoint exists. - -**Returns:** [CheckpointInfo](#checkpointinfo) or `None` - -```python -info = executor.checkpoint_info() -if info is not None: - print(f"Completed: {len(info.completed_nodes)}/{info.total_nodes}") - print(f"Failed: {info.failed_nodes}") - print(f"Timestamp: {info.timestamp}") -else: - print("No checkpoint found.") -``` - -### clear_checkpoint - - None`} /> - -Delete all checkpoint files from the checkpoint directory. Use this after a -successful run to clean up, or to force a fresh execution on the next call. - -```python -executor.clear_checkpoint() -assert executor.checkpoint_info() is None -``` - ---- - -## CheckpointInfo - - - -Metadata about the current checkpoint state. - -| Property | Type | Description | -|----------|------|-------------| -| `checkpoint_dir` | `str` | The directory where checkpoint files are stored. | -| `completed_nodes` | `list[str]` | Names of nodes that completed successfully. | -| `failed_nodes` | `list[str]` | Names of nodes that failed. | -| `total_nodes` | `int` | Total number of nodes in the DAG. | -| `timestamp` | `str` | ISO-8601 timestamp of the last checkpoint write. | - -```python -info = executor.checkpoint_info() -print(f"Progress: {len(info.completed_nodes)}/{info.total_nodes}") -print(f"Completed: {info.completed_nodes}") -print(f"Failed: {info.failed_nodes}") -print(f"Last updated: {info.timestamp}") -``` - ---- - -## Complete Example - -A long-running data pipeline with checkpoint-and-resume: - -```python -import dagron - -dag = ( - dagron.DAG.builder() - .add_node("fetch_users") - .add_node("fetch_orders") - .add_node("join") - .add_node("enrich") - .add_node("validate") - .add_node("write_parquet") - .add_node("upload_s3") - .add_edge("fetch_users", "join") - .add_edge("fetch_orders", "join") - .add_edge("join", "enrich") - .add_edge("enrich", "validate") - .add_edge("validate", "write_parquet") - .add_edge("write_parquet", "upload_s3") - .build() -) - -tasks = { - "fetch_users": lambda: "1M users fetched", - "fetch_orders": lambda: "5M orders fetched", - "join": lambda: "joined dataset", - "enrich": lambda: "enriched with geo data", - "validate": lambda: "all checks passed", - "write_parquet": lambda: "wrote 2GB parquet", - "upload_s3": lambda: "uploaded to s3://bucket/output", -} - -executor = dagron.CheckpointExecutor(dag, checkpoint_dir="/tmp/pipeline_ckpt") - -# First attempt — may fail partway through -try: - result = executor.execute(tasks) -except Exception: - info = executor.checkpoint_info() - print(f"Interrupted: {len(info.completed_nodes)}/{info.total_nodes} complete") - -# Resume after fixing the issue -result = executor.resume(tasks) -print(f"All done: {result.succeeded} nodes succeeded") - -# Clean up -executor.clear_checkpoint() -``` - ---- - -## Checkpoint File Layout - -The checkpoint directory contains one file per completed node, plus a metadata -file: - -``` -./checkpoints/ - _meta.json # CheckpointInfo (completed/failed lists, timestamp) - fetch_users.pkl # Pickled result of fetch_users - fetch_orders.pkl # Pickled result of fetch_orders - join.pkl # Pickled result of join - ... -``` - -:::caution -Checkpoint files use Python's `pickle` module. Only resume from checkpoints you -trust. Do not load checkpoint files from untrusted sources. -::: - ---- - -## Related - -- [DAGExecutor](/api/execution/execution) — the base executor without checkpointing. -- [Caching](/api/execution/caching) — content-addressable caching (complementary to checkpointing). -- [Incremental Execution](/api/execution/incremental) — re-execute only changed nodes. -- [Checkpointing guide](/guide/execution-strategies/checkpointing) — usage patterns and recovery strategies. diff --git a/docs/pages/api/execution/conditions.mdx b/docs/pages/api/execution/conditions.mdx deleted file mode 100644 index c2d2bf1..0000000 --- a/docs/pages/api/execution/conditions.mdx +++ /dev/null @@ -1,282 +0,0 @@ ---- -sidebar_position: 9 -title: "Conditional Execution" -description: "API reference for ConditionalDAGBuilder, ConditionalEdge, and ConditionalExecutor — predicate-gated edges that skip branches at runtime." ---- - -import ApiSignature from '@site/src/components/ApiSignature'; -import ParamTable from '@site/src/components/ParamTable'; - -# Conditional Execution - -The conditional execution module allows edges in your DAG to carry predicate -functions. At runtime, the executor evaluates each condition before traversing -the edge. If the condition returns `False`, the downstream node (and its -subtree) is skipped. This enables branching, feature flags, and data-dependent -routing without modifying the graph structure. - -See the [Conditional Execution](/guide/execution-strategies/conditional) guide for patterns -including if/else branches, switch-case routing, and dynamic feature flags. - ---- - -## ConditionalDAGBuilder - - - -A specialized builder that supports conditional edges. Similar to -[DAGBuilder](/api/core/builder) but with an additional `condition` parameter on -`add_edge()`. The builder produces both a DAG and a conditions dictionary that -the [ConditionalExecutor](#conditionalexecutor) uses at runtime. - -```python -import dagron - -builder = dagron.ConditionalDAGBuilder() -``` - -### add_node - - ConditionalDAGBuilder`} /> - -Add a node to the graph. Returns `self` for chaining. - - - -### add_edge - - ConditionalDAGBuilder`} /> - -Add a directed edge with an optional condition predicate. If `condition` is -provided, the edge is only traversed at runtime when `condition()` returns -`True`. If `condition` is `None`, the edge is unconditional (always traversed). - - - -### build - - tuple[DAG, dict[tuple[str, str], Callable[[], bool]]]`} /> - -Finalize and validate the graph. Returns a tuple of the -[DAG](/api/core/core) and a dictionary mapping `(from_node, to_node)` pairs to -their condition predicates. Pass both to the -[ConditionalExecutor](#conditionalexecutor). - -**Returns:** `tuple[DAG, dict]` — the validated DAG and the conditions map. - -**Raises:** -- `CycleError` — if the graph contains a cycle. -- `NodeNotFoundError` — if an edge references a non-existent node. - -```python -import dagron - -use_gpu = True # runtime flag - -dag, conditions = ( - dagron.ConditionalDAGBuilder() - .add_node("preprocess") - .add_node("cpu_train") - .add_node("gpu_train") - .add_node("evaluate") - .add_edge("preprocess", "cpu_train", condition=lambda: not use_gpu) - .add_edge("preprocess", "gpu_train", condition=lambda: use_gpu) - .add_edge("cpu_train", "evaluate") - .add_edge("gpu_train", "evaluate") - .build() -) - -print(dag.node_count()) # 4 -print(len(conditions)) # 2 (only conditional edges) -``` - ---- - -## ConditionalEdge - - - -A data class representing a conditional edge. Returned by -`ConditionalDAGBuilder` internals and useful for introspection. - -| Property | Type | Description | -|----------|------|-------------| -| `from_node` | `str` | The source node name. | -| `to_node` | `str` | The target node name. | -| `condition` | `Callable[[], bool]` | The predicate function. | -| `label` | `str | None` | Optional human-readable label. | - ---- - -## ConditionalExecutor - - - -An executor that evaluates edge conditions at runtime. Before dispatching a -node, the executor checks all incoming conditional edges. If any required -condition returns `False`, the node is skipped. A node is executed only when at -least one incoming conditional edge evaluates to `True` (or the node has at -least one unconditional incoming edge whose source completed). - - - -### execute - - ExecutionResult`} /> - -Execute tasks, evaluating conditions on each edge before dispatching. - - - -**Returns:** [ExecutionResult](/api/execution/execution) - -```python -import dagron - -use_gpu = True - -dag, conditions = ( - dagron.ConditionalDAGBuilder() - .add_node("preprocess") - .add_node("cpu_train") - .add_node("gpu_train") - .add_node("evaluate") - .add_edge("preprocess", "cpu_train", condition=lambda: not use_gpu) - .add_edge("preprocess", "gpu_train", condition=lambda: use_gpu) - .add_edge("cpu_train", "evaluate") - .add_edge("gpu_train", "evaluate") - .build() -) - -executor = dagron.ConditionalExecutor(dag, conditions) -result = executor.execute({ - "preprocess": lambda: "data ready", - "cpu_train": lambda: "trained on CPU", - "gpu_train": lambda: "trained on GPU", - "evaluate": lambda: "accuracy: 0.95", -}) - -print(result.succeeded) # 3 -print(result.skipped) # 1 (cpu_train skipped) -print(result.node_results["gpu_train"].status) # COMPLETED -print(result.node_results["cpu_train"].status) # SKIPPED -``` - ---- - -## Patterns - -### If/Else Branch - -```python -flag = True - -dag, conditions = ( - dagron.ConditionalDAGBuilder() - .add_node("check") - .add_node("branch_true") - .add_node("branch_false") - .add_node("merge") - .add_edge("check", "branch_true", condition=lambda: flag) - .add_edge("check", "branch_false", condition=lambda: not flag) - .add_edge("branch_true", "merge") - .add_edge("branch_false", "merge") - .build() -) -``` - -### Feature Flags - -```python -import os - -dag, conditions = ( - dagron.ConditionalDAGBuilder() - .add_node("fetch") - .add_node("cache_result") - .add_node("process") - .add_edge("fetch", "cache_result", - condition=lambda: os.getenv("ENABLE_CACHE") == "1") - .add_edge("fetch", "process") - .add_edge("cache_result", "process") - .build() -) -``` - -### Data-Dependent Routing - -Conditions can inspect shared state that is updated by upstream tasks: - -```python -shared = {} - -def classify(): - shared["category"] = "premium" - return shared["category"] - -dag, conditions = ( - dagron.ConditionalDAGBuilder() - .add_node("classify") - .add_node("premium_flow") - .add_node("standard_flow") - .add_node("finalize") - .add_edge("classify", "premium_flow", - condition=lambda: shared.get("category") == "premium") - .add_edge("classify", "standard_flow", - condition=lambda: shared.get("category") != "premium") - .add_edge("premium_flow", "finalize") - .add_edge("standard_flow", "finalize") - .build() -) -``` - ---- - -## Related - -- [DAGExecutor](/api/execution/execution) — the base executor without conditions. -- [DAGBuilder](/api/core/builder) — the standard builder without conditional edges. -- [Dynamic Execution](/api/execution/dynamic) — modify the graph at runtime instead of skipping edges. -- [Approval Gates](/api/execution/gates) — human-in-the-loop pause/resume. -- [Conditional Execution guide](/guide/execution-strategies/conditional) — patterns and best practices. diff --git a/docs/pages/api/execution/distributed.mdx b/docs/pages/api/execution/distributed.mdx deleted file mode 100644 index d665609..0000000 --- a/docs/pages/api/execution/distributed.mdx +++ /dev/null @@ -1,426 +0,0 @@ ---- -sidebar_position: 13 -title: "Distributed Execution" -description: "API reference for DistributedExecutor, DistributedBackend, and PartitionedDAGExecutor — run DAGs across threads, processes, Ray, and Celery." ---- - -import ApiSignature from '@site/src/components/ApiSignature'; -import ParamTable from '@site/src/components/ParamTable'; - -# Distributed Execution - -The distributed execution module lets you run DAG tasks across multiple -backends: threads, processes, Ray clusters, or Celery workers. A pluggable -backend protocol makes it easy to integrate with any distributed computing -framework. - -For large DAGs, the `PartitionedDAGExecutor` splits the graph into partitions -and assigns each partition to a different worker group for improved data -locality and reduced communication overhead. - -See the [Distributed Execution](/guide/execution-strategies/distributed) guide for deployment -patterns and backend selection advice. - ---- - -## DistributedExecutor - - - -An executor that dispatches tasks to a pluggable distributed backend. -Supports the context manager protocol for automatic backend shutdown. - - - -### execute - - DistributedExecutionResult`} /> - -Execute tasks via the distributed backend. - - - -**Returns:** [DistributedExecutionResult](#distributedexecutionresult) - -### Context Manager - -The executor can be used as a context manager for automatic backend shutdown: - -```python -import dagron - -dag = ( - dagron.DAG.builder() - .add_node("fetch").add_node("process").add_node("store") - .add_edge("fetch", "process").add_edge("process", "store") - .build() -) - -with dagron.DistributedExecutor(dag, backend=dagron.ThreadBackend(max_workers=4)) as executor: - result = executor.execute({ - "fetch": lambda: "data", - "process": lambda: "processed", - "store": lambda: "stored", - }) - -print(result.succeeded) # 3 -# Backend is automatically shut down on exit -``` - ---- - -## DistributedExecutionResult - - - -The result of a distributed execution. Wraps the standard -[ExecutionResult](/api/execution/execution) with backend-specific metadata. - -| Property | Type | Description | -|----------|------|-------------| -| `execution_result` | `ExecutionResult` | The underlying execution result with per-node details. | -| `backend_name` | `str` | Name of the backend used (e.g., `"thread"`, `"ray"`, `"celery"`). | -| `dispatch_info` | `dict[str, Any]` | Backend-specific dispatch metadata (worker IDs, queue names, etc.). | - -```python -print(f"Backend: {result.backend_name}") -print(f"Succeeded: {result.execution_result.succeeded}") -print(f"Dispatch info: {result.dispatch_info}") -``` - ---- - -## DistributedBackend Protocol - - str: ... - - def submit( - self, - task: Callable, - node_name: str, - ) -> Any: ... - - def result( - self, - future: Any, - timeout: float | None = None, - ) -> Any: ... - - def shutdown(self) -> None: ...`} /> - -The protocol that all distributed backends must implement. You can create custom -backends by implementing these four members. - -| Method | Description | -|--------|-------------| -| `name` | A human-readable backend name. | -| `submit(task, node_name)` | Submit a task for execution. Returns a future-like object. | -| `result(future, timeout)` | Block until the future completes and return its result. Raises on timeout. | -| `shutdown()` | Shut down the backend and release all resources. | - -```python -class MyCustomBackend: - @property - def name(self) -> str: - return "custom" - - def submit(self, task, node_name): - # dispatch to your infrastructure - return my_cluster.submit(task) - - def result(self, future, timeout=None): - return future.get(timeout=timeout) - - def shutdown(self): - my_cluster.close() -``` - ---- - -## Built-in Backends - -### ThreadBackend - - - -A backend that dispatches tasks to a `concurrent.futures.ThreadPoolExecutor`. -Best for I/O-bound tasks. - - - -```python -backend = dagron.ThreadBackend(max_workers=8) -``` - -### MultiprocessingBackend - - - -A backend that dispatches tasks to a `concurrent.futures.ProcessPoolExecutor`. -Best for CPU-bound tasks. Tasks must be picklable. - - - -```python -backend = dagron.MultiprocessingBackend(max_workers=4) -``` - -:::caution -Tasks submitted to `MultiprocessingBackend` must be picklable. Lambdas and -closures will fail. Use module-level functions instead. -::: - -### RayBackend - - - -A backend that dispatches tasks to a [Ray](https://ray.io) cluster. Requires -`ray` to be installed (`pip install dagron[ray]`). - - - -```python -backend = dagron.RayBackend( - address="ray://cluster:10001", - num_cpus=2, - num_gpus=1, -) - -with dagron.DistributedExecutor(dag, backend=backend) as executor: - result = executor.execute(tasks) -``` - -### CeleryBackend - - - -A backend that dispatches tasks to [Celery](https://docs.celeryq.dev/) workers. -Requires `celery` to be installed (`pip install dagron[celery]`). - - - -```python -backend = dagron.CeleryBackend( - broker="redis://localhost:6379/0", - backend_url="redis://localhost:6379/1", - queue="dagron_tasks", -) - -with dagron.DistributedExecutor(dag, backend=backend) as executor: - result = executor.execute(tasks) -``` - ---- - -## PartitionedDAGExecutor - - - -An executor that partitions the DAG into `k` groups and executes each partition -with a dedicated worker pool. This reduces inter-partition communication and -improves data locality for large DAGs. - - - -### execute - - ExecutionResult`} /> - -Partition the DAG and execute tasks. - - - -**Returns:** [ExecutionResult](/api/execution/execution) - -```python -import dagron - -# A large DAG with many nodes -dag = dagron.DAG.builder() -for i in range(100): - dag = dag.add_node(f"node_{i}") -for i in range(99): - dag = dag.add_edge(f"node_{i}", f"node_{i+1}") -dag = dag.build() - -tasks = {f"node_{i}": lambda i=i: f"result_{i}" for i in range(100)} - -executor = dagron.PartitionedDAGExecutor( - dag, - k=4, - strategy="balanced", - max_workers=8, -) - -result = executor.execute(tasks) -print(f"Succeeded: {result.succeeded}") # 100 -``` - -### Strategies - -| Strategy | Description | -|----------|-------------| -| `"level_based"` | Assign nodes to partitions based on their topological level. Simple and fast. | -| `"balanced"` | Balance node costs across partitions. Good general-purpose strategy. | -| `"communication_min"` | Minimize cross-partition edges using Kernighan-Lin refinement. Best for data-intensive pipelines. | - -These map to `DAG.partition_level_based()`, `DAG.partition_balanced()`, and -`DAG.partition_communication_min()` respectively. See [DAG partitioning](/api/core/core) -for the underlying algorithms. - ---- - -## Complete Example: Ray Cluster - -A complete distributed ML training pipeline running on Ray: - -```python -import dagron - -dag = ( - dagron.DAG.builder() - .add_node("load_data") - .add_node("preprocess") - .add_node("train_xgb") - .add_node("train_nn") - .add_node("ensemble") - .add_node("evaluate") - .add_edge("load_data", "preprocess") - .add_edge("preprocess", "train_xgb") - .add_edge("preprocess", "train_nn") - .add_edge("train_xgb", "ensemble") - .add_edge("train_nn", "ensemble") - .add_edge("ensemble", "evaluate") - .build() -) - -def load_data(): - return "loaded 1M rows" - -def preprocess(): - return "preprocessed features" - -def train_xgb(): - import time; time.sleep(5) - return {"model": "xgb", "auc": 0.92} - -def train_nn(): - import time; time.sleep(10) - return {"model": "nn", "auc": 0.94} - -def ensemble(): - return {"model": "ensemble", "auc": 0.96} - -def evaluate(): - return "evaluation report saved" - -tasks = { - "load_data": load_data, - "preprocess": preprocess, - "train_xgb": train_xgb, - "train_nn": train_nn, - "ensemble": ensemble, - "evaluate": evaluate, -} - -backend = dagron.RayBackend(num_cpus=2, num_gpus=1) - -with dagron.DistributedExecutor( - dag, - backend=backend, - enable_tracing=True, - node_timeout=300, - callbacks=dagron.ExecutionCallbacks( - on_start=lambda n: print(f"[{backend.name}] Starting {n}"), - on_complete=lambda n, r: print(f"[{backend.name}] Completed {n}"), - ), -) as executor: - result = executor.execute(tasks) - -print(f"\nBackend: {result.backend_name}") -print(f"Succeeded: {result.execution_result.succeeded}") -print(f"Duration: {result.execution_result.total_duration_seconds:.1f}s") -``` - ---- - -## Related - -- [DAGExecutor](/api/execution/execution) — the local thread-pool executor. -- [Resource Scheduling](/api/execution/resources) — GPU/CPU/memory-aware local scheduling. -- [DAG partitioning](/api/core/core) — the partitioning algorithms used by `PartitionedDAGExecutor`. -- [Distributed Execution guide](/guide/execution-strategies/distributed) — deployment and backend selection guide. diff --git a/docs/pages/api/execution/dynamic.mdx b/docs/pages/api/execution/dynamic.mdx deleted file mode 100644 index 67466de..0000000 --- a/docs/pages/api/execution/dynamic.mdx +++ /dev/null @@ -1,266 +0,0 @@ ---- -sidebar_position: 10 -title: "Dynamic Execution" -description: "API reference for DynamicExecutor, DynamicModification, and DynamicNodeSpec — expand the DAG at runtime based on node results." ---- - -import ApiSignature from '@site/src/components/ApiSignature'; -import ParamTable from '@site/src/components/ParamTable'; - -# Dynamic Execution - -The dynamic execution module lets you modify the DAG at runtime. When a node -completes, an **expander function** can inspect its result and add or remove -nodes before execution continues. This supports fan-out patterns where the -number of downstream tasks depends on data discovered at runtime (e.g., one task -per file found in a directory, one task per API page to fetch). - -See the [Dynamic DAGs](/guide/execution-strategies/dynamic-dags) guide for usage patterns and -best practices. - ---- - -## DynamicExecutor - - - -An executor that supports runtime graph modifications. After each node -completes, the executor checks if an expander function is registered for that -node. If so, the expander is called with the node's return value and can return -a `DynamicModification` describing nodes and edges to add or remove. - - - -### execute - - ExecutionResult`} /> - -Execute tasks with dynamic expansion. The `tasks` dictionary should contain -tasks for all initially known nodes. Dynamically added nodes must include their -task callable in the [DynamicNodeSpec](#dynamicnodespec). - - - -**Returns:** [ExecutionResult](/api/execution/execution) — includes results for both -initial and dynamically added nodes. - -```python -import dagron - -dag = ( - dagron.DAG.builder() - .add_node("discover") - .add_node("aggregate") - .add_edge("discover", "aggregate") - .build() -) - -def discover_expander(result): - """Add one processing node per discovered file.""" - files = result # e.g., ["a.csv", "b.csv", "c.csv"] - nodes = [ - dagron.DynamicNodeSpec( - name=f"process_{f}", - task=lambda f=f: f"processed {f}", - dependencies=["discover"], - dependents=["aggregate"], - ) - for f in files - ] - return dagron.DynamicModification(add_nodes=nodes) - -executor = dagron.DynamicExecutor( - dag, - expanders={"discover": discover_expander}, - max_workers=4, -) - -result = executor.execute({ - "discover": lambda: ["a.csv", "b.csv", "c.csv"], - "aggregate": lambda: "all files processed", -}) - -print(result.succeeded) # 5 (discover + 3 process + aggregate) -``` - ---- - -## DynamicModification - - - -A description of graph modifications to apply after a node completes. Returned -by expander functions. - - - -```python -# Add nodes -mod = dagron.DynamicModification( - add_nodes=[ - dagron.DynamicNodeSpec("task_1", task=lambda: "result_1", dependencies=["source"]), - dagron.DynamicNodeSpec("task_2", task=lambda: "result_2", dependencies=["source"]), - ] -) - -# Remove nodes -mod = dagron.DynamicModification(remove_nodes=["obsolete_node"]) - -# Both -mod = dagron.DynamicModification( - add_nodes=[dagron.DynamicNodeSpec("replacement", task=lambda: "new", dependencies=["source"])], - remove_nodes=["old_task"], -) -``` - ---- - -## DynamicNodeSpec - - - -A specification for a node to be dynamically added during execution. Includes -the task callable and edge connections. - - - -The `dependencies` list creates edges `dep -> new_node` and the `dependents` -list creates edges `new_node -> dependent`. This wires the new node into the -existing graph topology. - -```python -spec = dagron.DynamicNodeSpec( - name="process_chunk_42", - task=lambda: "chunk 42 processed", - dependencies=["split"], # split -> process_chunk_42 - dependents=["merge"], # process_chunk_42 -> merge -) -``` - ---- - -## Complete Example: Map-Reduce - -A dynamic map-reduce pipeline where the mapper discovers the number of chunks at -runtime: - -```python -import dagron - -dag = ( - dagron.DAG.builder() - .add_node("read_input") - .add_node("reduce") - .add_edge("read_input", "reduce") - .build() -) - -def map_expander(data): - """Split input into chunks and create a mapper node per chunk.""" - chunks = [data[i:i+100] for i in range(0, len(data), 100)] - return dagron.DynamicModification( - add_nodes=[ - dagron.DynamicNodeSpec( - name=f"map_{i}", - task=lambda chunk=chunk: sum(chunk), - dependencies=["read_input"], - dependents=["reduce"], - ) - for i, chunk in enumerate(chunks) - ] - ) - -executor = dagron.DynamicExecutor( - dag, - expanders={"read_input": map_expander}, - max_workers=8, - callbacks=dagron.ExecutionCallbacks( - on_dynamic_expand=lambda name, new_nodes: print( - f"[EXPAND] {name} added {len(new_nodes)} nodes" - ), - ), -) - -result = executor.execute({ - "read_input": lambda: list(range(500)), - "reduce": lambda: "reduction complete", -}) - -print(f"Total nodes executed: {result.succeeded}") -# read_input + 5 mappers + reduce = 7 -``` - ---- - -## Callbacks for Dynamic Expansion - -The `on_dynamic_expand` callback in [ExecutionCallbacks](/api/execution/execution) is -called whenever new nodes are added: - -```python -callbacks = dagron.ExecutionCallbacks( - on_dynamic_expand=lambda node_name, new_nodes: print( - f"Node '{node_name}' expanded with: {new_nodes}" - ), -) -``` - -This is useful for logging, monitoring, and debugging dynamic DAGs in -production. - ---- - -## Constraints and Safety - -- **No cycles:** dynamically added edges must not create cycles. The executor - validates this and raises `CycleError` if a cycle would be introduced. -- **No duplicate names:** dynamically added nodes must have unique names. A - `DuplicateNodeError` is raised otherwise. -- **Topological consistency:** new nodes are inserted into the execution - schedule at the correct topological position. Nodes that have already been - dispatched are not re-executed. -- **Expander idempotency:** expanders should be idempotent. If execution is - retried (e.g., via checkpointing), expanders may run again. - ---- - -## Related - -- [DAGExecutor](/api/execution/execution) — the base executor without dynamic expansion. -- [Conditional Execution](/api/execution/conditions) — skip branches without modifying the graph. -- [Pipeline](/api/execution/pipeline) — a static decorator-based pipeline API. -- [Dynamic DAGs guide](/guide/execution-strategies/dynamic-dags) — patterns and best practices. diff --git a/docs/pages/api/execution/execution.mdx b/docs/pages/api/execution/execution.mdx deleted file mode 100644 index 93f9363..0000000 --- a/docs/pages/api/execution/execution.mdx +++ /dev/null @@ -1,350 +0,0 @@ ---- -sidebar_position: 4 -slug: /api/execution/execution -title: "Execution" -description: "API reference for DAGExecutor, AsyncDAGExecutor, ExecutionResult, NodeResult, NodeStatus, and ExecutionCallbacks." ---- - -import ApiSignature from '@site/src/components/ApiSignature'; -import ParamTable from '@site/src/components/ParamTable'; - -# Execution - -The execution module provides thread-pool and async executors that walk the DAG -in topological order, dispatching tasks with maximum parallelism while respecting -dependency constraints. - -See the [Executing Tasks](/guide/core-concepts/executing-tasks) guide for patterns -including timeouts, cancellation, callbacks, and fail-fast behavior. - ---- - -## DAGExecutor - - - -A synchronous executor that runs DAG tasks on a thread pool. Tasks are -dispatched in topological order with maximum parallelism bounded by -`max_workers`. - - - -### execute - - ExecutionResult`} /> - -Execute all tasks according to the DAG topology. Each key in `tasks` must match -a node name in the DAG. Returns an [ExecutionResult](#executionresult) when all -tasks have completed, failed, or been skipped. - - - -```python -import dagron - -dag = ( - dagron.DAG.builder() - .add_node("a").add_node("b").add_node("c") - .add_edge("a", "b").add_edge("b", "c") - .build() -) - -result = dagron.DAGExecutor(dag, max_workers=4).execute({ - "a": lambda: "step-a", - "b": lambda: "step-b", - "c": lambda: "step-c", -}) - -print(result.succeeded) # 3 -``` - ---- - -## AsyncDAGExecutor - - - -An async executor that runs DAG tasks on an asyncio event loop. Same interface -as `DAGExecutor` but all tasks must be async callables (coroutines). - - - -### execute (async) - - ExecutionResult`} /> - -Execute all async tasks according to the DAG topology. - - - -```python -import asyncio -import dagron - -dag = ( - dagron.DAG.builder() - .add_node("fetch").add_node("process") - .add_edge("fetch", "process") - .build() -) - -async def main(): - result = await dagron.AsyncDAGExecutor(dag).execute({ - "fetch": lambda: asyncio.sleep(0.1), - "process": lambda: asyncio.sleep(0.05), - }) - print(result.succeeded) # 2 - -asyncio.run(main()) -``` - ---- - -## ExecutionResult - - - -The aggregate result of executing all tasks in a DAG. Returned by every -executor's `.execute()` method. - -| Property | Type | Description | -|----------|------|-------------| -| `node_results` | `dict[str, NodeResult]` | Per-node results keyed by node name. | -| `succeeded` | `int` | Number of nodes that completed successfully. | -| `failed` | `int` | Number of nodes that raised exceptions. | -| `skipped` | `int` | Number of nodes skipped due to upstream failures. | -| `timed_out` | `int` | Number of nodes that exceeded the timeout. | -| `cancelled` | `int` | Number of nodes cancelled by the cancel event. | -| `total_duration_seconds` | `float` | Wall-clock duration of the entire execution. | -| `trace` | `Trace | None` | Chrome-compatible trace data if `enable_tracing=True`. | - -```python -result = executor.execute(tasks) - -print(f"Succeeded: {result.succeeded}/{result.succeeded + result.failed}") -print(f"Duration: {result.total_duration_seconds:.3f}s") - -for name, nr in result.node_results.items(): - print(f" {name}: {nr.status} ({nr.duration_seconds:.3f}s)") -``` - ---- - -## NodeResult - - - -The result of executing a single node. - -| Property | Type | Description | -|----------|------|-------------| -| `name` | `str` | The node name. | -| `status` | `NodeStatus` | The terminal status of this node. | -| `result` | `Any` | The return value of the task callable, or `None` if it did not complete. | -| `error` | `Exception | None` | The exception raised by the task, or `None` on success. | -| `duration_seconds` | `float` | Wall-clock duration of this node's execution. | - -```python -nr = result.node_results["transform"] -if nr.status == dagron.NodeStatus.COMPLETED: - print(f"Transform returned: {nr.result}") -elif nr.status == dagron.NodeStatus.FAILED: - print(f"Transform failed: {nr.error}") -``` - ---- - -## NodeStatus - - - -Enumeration of possible node execution states. - -| Value | Description | -|-------|-------------| -| `PENDING` | Node has not yet started. | -| `RUNNING` | Node is currently executing. | -| `COMPLETED` | Node finished successfully. | -| `FAILED` | Node raised an exception. | -| `SKIPPED` | Node was skipped because an upstream dependency failed (fail-fast mode). | -| `TIMED_OUT` | Node exceeded the execution timeout. | -| `CANCELLED` | Node was cancelled by the cancel event. | -| `CACHE_HIT` | Node result was loaded from cache instead of executing. | - ---- - -## ExecutionCallbacks - - - -Lifecycle callbacks invoked during execution. All callbacks are optional. Each -receives the node name as the first argument. - - - -```python -import dagron - -callbacks = dagron.ExecutionCallbacks( - on_start=lambda name: print(f"[START] {name}"), - on_complete=lambda name, result: print(f"[DONE] {name} -> {result}"), - on_failure=lambda name, err: print(f"[FAIL] {name}: {err}"), - on_skip=lambda name: print(f"[SKIP] {name}"), -) - -executor = dagron.DAGExecutor(dag, callbacks=callbacks) -result = executor.execute(tasks) -``` - ---- - -## Timeouts and Cancellation - -### Global timeout - -Pass a `timeout` to `.execute()` to set a wall-clock limit on the entire -execution: - -```python -result = executor.execute(tasks, timeout=30.0) -print(result.timed_out) # number of nodes that exceeded the timeout -``` - -### External cancellation - -Use a `threading.Event` (or `asyncio.Event` for async) to cancel execution from -another thread: - -```python -import threading - -cancel = threading.Event() - -def watchdog(): - import time - time.sleep(10) - cancel.set() - -threading.Thread(target=watchdog, daemon=True).start() -result = executor.execute(tasks, cancel_event=cancel) -print(result.cancelled) # number of nodes cancelled -``` - ---- - -## Tracing - -Enable execution tracing to produce Chrome-compatible trace data: - -```python -executor = dagron.DAGExecutor(dag, enable_tracing=True) -result = executor.execute(tasks) - -# Write trace to file for chrome://tracing -with open("trace.json", "w") as f: - f.write(result.trace.to_json()) -``` - -See the [Tracing & Profiling](/guide/observability/tracing-profiling) guide for -visualization instructions. - ---- - -## Related - -- [DAG](/api/core/core) — the graph driving execution order. -- [Pipeline](/api/execution/pipeline) — a higher-level decorator-based execution API. -- [Incremental Execution](/api/execution/incremental) — re-execute only changed nodes. -- [Caching](/api/execution/caching) — skip nodes whose inputs have not changed. -- [Resource Scheduling](/api/execution/resources) — GPU/CPU/memory-aware execution. diff --git a/docs/pages/api/execution/gates.mdx b/docs/pages/api/execution/gates.mdx deleted file mode 100644 index d44828f..0000000 --- a/docs/pages/api/execution/gates.mdx +++ /dev/null @@ -1,352 +0,0 @@ ---- -sidebar_position: 11 -title: "Approval Gates" -description: "API reference for ApprovalGate, GateController, GateStatus, and gate-related errors — human-in-the-loop pause and resume." ---- - -import ApiSignature from '@site/src/components/ApiSignature'; -import ParamTable from '@site/src/components/ParamTable'; - -# Approval Gates - -Approval gates pause DAG execution at specific nodes until a human (or external -system) explicitly approves or rejects the continuation. This enables -human-in-the-loop workflows such as deployment approvals, data quality -sign-offs, and compliance checks. - -Gates integrate with any executor via [ExecutionCallbacks](/api/execution/execution) -and provide both synchronous and asynchronous waiting interfaces. - -See the [Approval Gates](/guide/execution-strategies/approval-gates) guide for end-to-end -workflow patterns. - ---- - -## ApprovalGate - - - -A single approval gate that can be attached to a node. The gate starts in -`PENDING` state, transitions to `WAITING` when the executor reaches it, and -resolves to `APPROVED`, `REJECTED`, or `TIMED_OUT`. - - - -### approve - - None`} /> - -Approve the gate, allowing execution to proceed past this point. - -### reject - - None`} /> - -Reject the gate, causing the gated node to fail with a `GateRejectedError`. - - - -### wait_sync - - None`} /> - -Block the current thread until the gate is resolved (approved, rejected, or -timed out). Raises `GateRejectedError` on rejection and `GateTimeoutError` on -timeout. - -### wait_async - - None`} /> - -Await gate resolution in an async context. Raises `GateRejectedError` on -rejection and `GateTimeoutError` on timeout. - -### reset - - None`} /> - -Reset the gate to `PENDING` state for reuse. - -### status - - GateStatus`} /> - -The current gate status. - -### reason - - str | None`} /> - -The rejection reason, or `None` if the gate was not rejected. - -```python -import dagron -import threading - -gate = dagron.ApprovalGate(timeout=60.0) - -# In another thread or process: -def approval_ui(): - input("Press Enter to approve deployment...") - gate.approve() - -threading.Thread(target=approval_ui, daemon=True).start() - -# In the task: -gate.wait_sync() # blocks until approved -print(f"Gate status: {gate.status}") # GateStatus.APPROVED -``` - ---- - -## GateController - - - -A centralized controller for managing multiple gates. Provides a single -interface for approving, rejecting, and querying the status of all gates in a -pipeline. - - - -### add_gate - - ApprovalGate`} /> - -Add a gate to the controller. If `gate` is `None`, a new default `ApprovalGate` -is created. Returns the gate instance. - - - -### approve - - None`} /> - -Approve a named gate. - -### reject - - None`} /> - -Reject a named gate. - -### status - - GateStatus`} /> - -Return the status of a named gate. - -### waiting_gates - - list[str]`} /> - -Return the names of all gates currently in `WAITING` status. - -### get_gate - - ApprovalGate`} /> - -Return the `ApprovalGate` instance for a named gate. - -### has_gate - - bool`} /> - -Return `True` if a gate with the given name exists. - -### wait_sync - - None`} /> - -Block until a named gate is resolved. - -### wait_async - - None`} /> - -Await resolution of a named gate. - -### reset_all - - None`} /> - -Reset all gates to `PENDING` status. - -```python -import dagron - -controller = dagron.GateController() -controller.add_gate("qa_review", dagron.ApprovalGate(timeout=300)) -controller.add_gate("deploy_prod", dagron.ApprovalGate(timeout=600)) - -# Check what's waiting -print(controller.waiting_gates()) # [] - -# Later, in a webhook handler: -controller.approve("qa_review") -print(controller.status("qa_review")) # GateStatus.APPROVED - -controller.reject("deploy_prod", reason="Failed canary check") -print(controller.status("deploy_prod")) # GateStatus.REJECTED -``` - ---- - -## GateStatus - - - -Enumeration of gate states. - -| Value | Description | -|-------|-------------| -| `PENDING` | Gate has been created but execution has not reached it yet. | -| `WAITING` | Execution has reached the gate and is waiting for approval. | -| `APPROVED` | Gate was approved; execution proceeds. | -| `REJECTED` | Gate was rejected; the gated node fails. | -| `TIMED_OUT` | Gate was not resolved before its timeout expired. | - ---- - -## GateRejectedError - - - -Raised when a gate is rejected. The gated node's task will receive this as its -exception, and it will appear in the node's `NodeResult.error`. - - - -```python -try: - gate.wait_sync() -except dagron.GateRejectedError as e: - print(f"Gate '{e.gate_name}' rejected: {e.reason}") -``` - ---- - -## GateTimeoutError - - - -Raised when a gate times out before being approved or rejected. - - - -```python -try: - gate.wait_sync() -except dagron.GateTimeoutError as e: - print(f"Gate '{e.gate_name}' timed out after {e.timeout}s") -``` - ---- - -## Complete Example: Deployment Pipeline - -A deployment pipeline with QA approval and production deployment gates: - -```python -import dagron -import threading - -# Build the DAG -dag = ( - dagron.DAG.builder() - .add_node("build") - .add_node("test") - .add_node("qa_gate") - .add_node("deploy_staging") - .add_node("prod_gate") - .add_node("deploy_prod") - .add_edge("build", "test") - .add_edge("test", "qa_gate") - .add_edge("qa_gate", "deploy_staging") - .add_edge("deploy_staging", "prod_gate") - .add_edge("prod_gate", "deploy_prod") - .build() -) - -# Set up gates -controller = dagron.GateController() -qa_gate = controller.add_gate("qa_gate", dagron.ApprovalGate(timeout=3600)) -prod_gate = controller.add_gate("prod_gate", dagron.ApprovalGate(timeout=7200)) - -# Define tasks -tasks = { - "build": lambda: "artifact-v1.2.3", - "test": lambda: "42 tests passed", - "qa_gate": lambda: qa_gate.wait_sync(), - "deploy_staging": lambda: "deployed to staging", - "prod_gate": lambda: prod_gate.wait_sync(), - "deploy_prod": lambda: "deployed to production", -} - -# Simulate external approval (in production, this would be a web UI or API) -def simulate_approvals(): - import time - time.sleep(2) - print("QA approved!") - controller.approve("qa_gate") - time.sleep(2) - print("Prod approved!") - controller.approve("prod_gate") - -threading.Thread(target=simulate_approvals, daemon=True).start() - -# Execute with gate callbacks -result = dagron.DAGExecutor( - dag, - callbacks=dagron.ExecutionCallbacks( - on_gate_waiting=lambda name: print(f"Waiting for gate: {name}"), - on_gate_resolved=lambda name, status: print(f"Gate {name}: {status}"), - ), -).execute(tasks) - -print(f"\nPipeline: {result.succeeded} succeeded, {result.failed} failed") -``` - ---- - -## Related - -- [DAGExecutor](/api/execution/execution) — the executor that integrates with gates via callbacks. -- [Conditional Execution](/api/execution/conditions) — automated branching (no human involved). -- [Checkpointing](/api/execution/checkpoint) — save and resume after gate rejection. -- [Approval Gates guide](/guide/execution-strategies/approval-gates) — end-to-end workflow patterns. diff --git a/docs/pages/api/execution/incremental.mdx b/docs/pages/api/execution/incremental.mdx deleted file mode 100644 index 08c6bd4..0000000 --- a/docs/pages/api/execution/incremental.mdx +++ /dev/null @@ -1,260 +0,0 @@ ---- -sidebar_position: 6 -title: "Incremental Execution" -description: "API reference for IncrementalExecutor and IncrementalResult — re-execute only what changed." ---- - -import ApiSignature from '@site/src/components/ApiSignature'; -import ParamTable from '@site/src/components/ParamTable'; - -# Incremental Execution - -The incremental execution module provides an executor that re-runs only the -nodes affected by a set of changes. Unchanged nodes are reused from the previous -run, dramatically reducing execution time for large DAGs where only a few inputs -have changed. - -dagron computes the "dirty set" from the changed nodes and their downstream -descendants, then applies **early cutoff** optimization: if a recomputed node -produces the same result as its cached value, its descendants are not recomputed -even if they were in the initial dirty set. - -See the [Incremental Execution](/guide/execution-strategies/incremental) guide for workflow -patterns and best practices. - ---- - -## IncrementalExecutor - - - -An executor that tracks previous results and only re-executes nodes in the dirty -set. On the first invocation, all nodes are executed. On subsequent invocations -with `changed_nodes`, only the affected subset is re-executed. - - - -### execute - - IncrementalResult`} /> - -Execute tasks incrementally. On the first call (or when `changed_nodes` is -`None`), all nodes are executed. On subsequent calls, only nodes downstream of -the changed nodes are re-executed. The executor applies early cutoff: if a -recomputed node produces the same output as the cached value, its descendants -are skipped. - - - -**Returns:** [IncrementalResult](#incrementalresult) - -```python -import dagron - -dag = ( - dagron.DAG.builder() - .add_node("source_a") - .add_node("source_b") - .add_node("transform") - .add_node("aggregate") - .add_node("report") - .add_edge("source_a", "transform") - .add_edge("source_b", "transform") - .add_edge("transform", "aggregate") - .add_edge("aggregate", "report") - .build() -) - -tasks = { - "source_a": lambda: [1, 2, 3], - "source_b": lambda: [4, 5, 6], - "transform": lambda: "transformed", - "aggregate": lambda: "aggregated", - "report": lambda: "report ready", -} - -executor = dagron.IncrementalExecutor(dag) - -# First run: execute everything -result = executor.execute(tasks) -print(result.recomputed) # 5 (all nodes) -print(result.reused) # 0 - -# Second run: only source_a changed -result = executor.execute(tasks, changed_nodes=["source_a"]) -print(result.recomputed) # 4 (source_a, transform, aggregate, report) -print(result.reused) # 1 (source_b) -``` - ---- - -## IncrementalResult - - - -The result of an incremental execution. Extends the standard execution result -with incremental-specific metrics. - -| Property | Type | Description | -|----------|------|-------------| -| `node_results` | `dict[str, NodeResult]` | Per-node results keyed by node name. Includes both recomputed and reused nodes. | -| `recomputed` | `int` | Number of nodes that were actually re-executed. | -| `early_cutoff` | `int` | Number of nodes in the dirty set that were skipped because an upstream recomputation produced the same result. | -| `reused` | `int` | Number of nodes that were reused from the previous execution without recomputation. | -| `provenance` | `dict[str, list[str]]` | For each recomputed node, the list of changed root nodes responsible for its invalidation. | -| `total_duration_seconds` | `float` | Wall-clock duration of this incremental execution. | -| `trace` | `Trace | None` | Chrome-compatible trace data if `enable_tracing=True`. | - -```python -result = executor.execute(tasks, changed_nodes=["source_a"]) - -print(f"Recomputed: {result.recomputed}") -print(f"Early cutoff: {result.early_cutoff}") -print(f"Reused: {result.reused}") -print(f"Duration: {result.total_duration_seconds:.3f}s") - -# Why was each node recomputed? -for node, causes in result.provenance.items(): - print(f" {node} invalidated by: {causes}") -``` - ---- - -## How Dirty Set Computation Works - -The dirty set is the set of nodes that must be re-evaluated. It is computed as -follows: - -1. Start with the explicitly `changed_nodes`. -2. Add all downstream descendants (transitive successors) of each changed node. -3. The union of these sets is the **dirty set**. - -You can preview the dirty set without executing via `DAG.dirty_set()`: - -```python -dirty = dag.dirty_set(["source_a"]) -print(dirty) # ["source_a", "transform", "aggregate", "report"] -``` - -And the provenance (which change caused which recomputation) via -`DAG.change_provenance()`: - -```python -prov = dag.change_provenance(["source_a"]) -print(prov) -# { -# "source_a": ["source_a"], -# "transform": ["source_a"], -# "aggregate": ["source_a"], -# "report": ["source_a"], -# } -``` - ---- - -## Early Cutoff - -Early cutoff is an optimization that stops propagation when a recomputed node -produces the same result as its cached value. The comparison uses Python's `==` -operator. - -For example, if `transform` is in the dirty set but produces the same output as -the previous run, then `aggregate` and `report` are cut off and reused from -cache, even though they were in the dirty set. - -```python -counter = {"calls": 0} - -def transform(): - counter["calls"] += 1 - return "always_same" # same output regardless of input - -# First run -result1 = executor.execute(tasks) - -# Second run: source_a changed, but transform produces same output -result2 = executor.execute(tasks, changed_nodes=["source_a"]) -print(result2.early_cutoff) # 2 (aggregate and report were cut off) -print(result2.recomputed) # 2 (source_a and transform were recomputed) -print(result2.reused) # 1 (source_b) -``` - ---- - -## Complete Example - -A build system that recompiles only changed source files: - -```python -import dagron - -dag = ( - dagron.DAG.builder() - .add_node("parse_module_a") - .add_node("parse_module_b") - .add_node("type_check") - .add_node("optimize") - .add_node("codegen") - .add_node("link") - .add_edge("parse_module_a", "type_check") - .add_edge("parse_module_b", "type_check") - .add_edge("type_check", "optimize") - .add_edge("optimize", "codegen") - .add_edge("codegen", "link") - .build() -) - -tasks = { - "parse_module_a": lambda: "ast_a", - "parse_module_b": lambda: "ast_b", - "type_check": lambda: "typed_ast", - "optimize": lambda: "optimized_ir", - "codegen": lambda: "machine_code", - "link": lambda: "executable", -} - -executor = dagron.IncrementalExecutor(dag, enable_tracing=True) - -# Initial full build -result = executor.execute(tasks) -print(f"Full build: {result.recomputed} nodes in {result.total_duration_seconds:.3f}s") - -# Developer edits module_a.py — incremental rebuild -result = executor.execute(tasks, changed_nodes=["parse_module_a"]) -print(f"Incremental: {result.recomputed} recomputed, {result.reused} reused") -print(f"Speed improvement: {result.reused}/{result.recomputed + result.reused} nodes skipped") -``` - ---- - -## Related - -- [DAG.dirty_set](/api/core/core) — preview the dirty set without executing. -- [DAG.change_provenance](/api/core/core) — understand why each node needs recomputation. -- [Caching](/api/execution/caching) — content-addressable caching for persistent result storage. -- [Checkpointing](/api/execution/checkpoint) — save and resume execution state. -- [Executing Tasks](/guide/core-concepts/executing-tasks) — general execution guide. diff --git a/docs/pages/api/execution/pipeline.mdx b/docs/pages/api/execution/pipeline.mdx deleted file mode 100644 index ac33118..0000000 --- a/docs/pages/api/execution/pipeline.mdx +++ /dev/null @@ -1,334 +0,0 @@ ---- -sidebar_position: 5 -title: "Pipeline" -description: "API reference for the @task decorator and Pipeline class — a high-level API for building and executing DAGs from decorated functions." ---- - -import ApiSignature from '@site/src/components/ApiSignature'; -import ParamTable from '@site/src/components/ParamTable'; - -# Pipeline - -The Pipeline API provides a high-level, decorator-based approach to building and -executing DAGs. Instead of manually creating nodes and edges, you decorate -functions with `@task` and let dagron infer the graph structure from function -parameter names. - -See the [Executing Tasks](/guide/core-concepts/executing-tasks) guide for usage patterns -and the [Contracts](/guide/advanced/contracts) guide for type validation with -pipelines. - ---- - -## @task - - - -A decorator that marks a function as a pipeline task. The DAG is inferred from -function parameter names: each parameter name corresponds to the name of an -upstream task whose return value is passed as the argument. - -When used without arguments, the task name is the function name. When used with -arguments, you can customize the name, timeout, and retry behavior. - - - -```python -from dagron import task - -@task -def extract(): - """Root task — no parameters, so no dependencies.""" - return [1, 2, 3, 4, 5] - -@task -def transform(extract): - """Depends on 'extract'. Receives extract's return value.""" - return [x * 10 for x in extract] - -@task -def load(transform): - """Depends on 'transform'. Receives transform's return value.""" - return f"loaded {len(transform)} rows" -``` - -### Dependency inference - -The `@task` decorator inspects each parameter name and wires it as a dependency: - -| Parameter name | Matched to | -|---------------|------------| -| `extract` | The task named `"extract"` | -| `transform` | The task named `"transform"` | -| Any other name | The task with that name | - -If a parameter name does not match any task in the pipeline, an error is raised -at pipeline construction time. - -### Fan-in pattern - -Tasks with multiple parameters depend on multiple upstream tasks: - -```python -@task -def merge(api_data, db_data): - """Depends on both 'api_data' and 'db_data' tasks.""" - return {**api_data, **db_data} -``` - ---- - -## Pipeline - - - -A high-level container that builds a DAG from a list of `@task`-decorated -functions and provides execution methods. - - - -```python -from dagron import Pipeline, task - -@task -def fetch(): - return {"users": [1, 2, 3]} - -@task -def enrich(fetch): - return {**fetch, "enriched": True} - -@task -def store(enrich): - return f"stored {len(enrich)} keys" - -pipeline = Pipeline(tasks=[fetch, enrich, store], name="user-pipeline") -``` - ---- - -### Properties - - DAG`} /> - -Access the underlying [DAG](/api/core/core) instance. Useful for inspection, -visualization, or passing to lower-level executors. - -```python -print(pipeline.dag.node_count()) # 3 -print(pipeline.dag.topological_sort()) # [fetch, enrich, store] -print(pipeline.dag.to_mermaid()) # Mermaid diagram string -``` - - list[str]`} /> - -Return the names of all tasks in the pipeline, in topological order. - -```python -print(pipeline.task_names) # ["fetch", "enrich", "store"] -``` - ---- - -### execute - - ExecutionResult`} /> - -Execute the pipeline synchronously. Task return values are automatically passed -as arguments to downstream tasks based on parameter names. - - - -**Returns:** [ExecutionResult](/api/execution/execution) — the aggregate execution -result. - -```python -result = pipeline.execute(max_workers=4) - -print(result.succeeded) # 3 -print(result.node_results["store"].result) # "stored 3 keys" -print(f"Took {result.total_duration_seconds:.3f}s") -``` - -#### Overriding tasks - -Use `overrides` to replace tasks at execution time without modifying the -pipeline definition. This is especially useful for testing: - -```python -result = pipeline.execute(overrides={ - "fetch": lambda: {"users": [99]}, # mock data -}) -print(result.node_results["store"].result) # "stored 2 keys" -``` - ---- - -### execute_async - - ExecutionResult`} /> - -Execute the pipeline asynchronously. All tasks should be async callables. - - - -```python -import asyncio -from dagron import Pipeline, task - -@task -async def fetch(): - await asyncio.sleep(0.1) - return [1, 2, 3] - -@task -async def process(fetch): - return [x * 2 for x in fetch] - -pipeline = Pipeline(tasks=[fetch, process]) -result = asyncio.run(pipeline.execute_async()) -print(result.node_results["process"].result) # [2, 4, 6] -``` - ---- - -### validate_contracts - - list[ContractViolation]`} /> - -Validate type contracts across all pipeline edges. If tasks have type -annotations, those are used as implicit contracts. You can supply additional -explicit contracts via `extra_contracts`. - - - -**Returns:** `list[ContractViolation]` — empty if all contracts are consistent. - -```python -from dagron import Pipeline, task - -@task -def fetch() -> list: - return [1, 2, 3] - -@task -def process(fetch: list) -> dict: - return {"data": fetch} - -pipeline = Pipeline(tasks=[fetch, process]) -violations = pipeline.validate_contracts() -assert len(violations) == 0 -``` - ---- - -## Complete Example - -A realistic data pipeline with fan-out, fan-in, callbacks, and tracing: - -```python -from dagron import Pipeline, task, ExecutionCallbacks - -@task -def api_source(): - return {"source": "api", "rows": 100} - -@task -def db_source(): - return {"source": "db", "rows": 250} - -@task -def clean_api(api_source): - return {**api_source, "cleaned": True} - -@task -def clean_db(db_source): - return {**db_source, "cleaned": True} - -@task -def merge(clean_api, clean_db): - total = clean_api["rows"] + clean_db["rows"] - return {"total_rows": total, "sources": 2} - -@task -def publish(merge): - return f"Published {merge['total_rows']} rows from {merge['sources']} sources" - -pipeline = Pipeline( - tasks=[api_source, db_source, clean_api, clean_db, merge, publish], - name="etl-pipeline", -) - -# Inspect the generated DAG -print(pipeline.dag.to_mermaid()) -print(f"Parallelism levels: {pipeline.dag.topological_levels()}") - -# Execute with callbacks and tracing -result = pipeline.execute( - max_workers=4, - callbacks=ExecutionCallbacks( - on_start=lambda n: print(f" Starting {n}..."), - on_complete=lambda n, r: print(f" Finished {n}"), - ), - enable_tracing=True, -) - -print(f"\n{result.node_results['publish'].result}") -# Published 350 rows from 2 sources -``` - ---- - -## Related - -- [DAGExecutor](/api/execution/execution) — the lower-level thread-pool executor used internally. -- [DAGBuilder](/api/core/builder) — the builder that the Pipeline constructs behind the scenes. -- [Contracts](/guide/advanced/contracts) — type contract validation guide. -- [Tracing & Profiling](/guide/observability/tracing-profiling) — visualizing pipeline traces. diff --git a/docs/pages/api/execution/reactive.mdx b/docs/pages/api/execution/reactive.mdx deleted file mode 100644 index d40c04f..0000000 --- a/docs/pages/api/execution/reactive.mdx +++ /dev/null @@ -1,380 +0,0 @@ ---- -sidebar_position: 25 -title: Reactive DAG -description: API reference for dagron's reactive DAG execution -- push-based incremental recomputation with subscriptions and early cutoff. ---- - -import ApiSignature from '@site/src/components/ApiSignature'; -import ParamTable from '@site/src/components/ParamTable'; - -# Reactive DAG - -The reactive module extends dagron's execution model into a push-based -reactive system. When you set an input value, the `ReactiveDAG` -automatically cascades recomputation through the graph, only recomputing -nodes whose inputs have actually changed (early cutoff). Subscriber -callbacks are fired whenever a node's value changes, enabling live -dashboards, incremental pipelines, and interactive data exploration. - -```python -from dagron.execution.reactive import ReactiveDAG -``` - ---- - -## ReactiveDAG - - None: ...`} /> - -Push-based reactive DAG execution system. Setting an input value -automatically cascades recomputation through the graph and fires subscriber -callbacks when outputs change. - -Each task function receives keyword arguments named after its predecessor -nodes, with the current values of those predecessors. For example, a node -`"transform"` with predecessor `"extract"` receives -`transform(extract=)`. - - - -```python -import dagron -from dagron.execution.reactive import ReactiveDAG - -dag = ( - dagron.DAG.builder() - .add_edge("raw", "cleaned") - .add_edge("cleaned", "features") - .add_edge("features", "prediction") - .build() -) - -tasks = { - "raw": lambda: None, # Input node -- value set externally - "cleaned": lambda raw=None: [x.strip() for x in raw] if raw else [], - "features": lambda cleaned=None: len(cleaned) if cleaned else 0, - "prediction": lambda features=None: features > 5 if features else False, -} - -reactive = ReactiveDAG(dag, tasks) -``` - -### Properties - ---- - -#### ReactiveDAG.dag - - DAG`} /> - -The underlying DAG. - -**Returns:** `DAG` -- The DAG defining the dependency structure. - ---- - -#### ReactiveDAG.values - - dict[str, Any]`} /> - -Current values of all computed nodes. Returns a read-only copy. - -**Returns:** `dict[str, Any]` -- Mapping of node names to their current values. - -```python -all_values = reactive.values -for name, value in all_values.items(): - print(f"{name}: {value}") -``` - -### Methods - ---- - -#### ReactiveDAG.initialize - - dict[str, Any]`} /> - -Compute all nodes in topological order. This performs the initial full -computation. After this, use `set_input()` for incremental updates. - -Any values pre-set via `set_input()` before initialization are preserved -and used during the computation. - -**Returns:** `dict[str, Any]` -- Dictionary of all computed values. - -```python -reactive = ReactiveDAG(dag, tasks) -values = reactive.initialize() -print(f"Initial prediction: {values.get('prediction')}") -``` - ---- - -#### ReactiveDAG.set_input - - dict[str, Any]`} /> - -Set an input value and cascade recomputation through the graph. Only -nodes that are transitively downstream of the changed input are considered -for recomputation, and the early cutoff optimization skips nodes whose -computed value has not actually changed. - -If `initialize()` has not been called yet, this method stores the value -and then calls `initialize()` automatically. - - - -**Returns:** `dict[str, Any]` -- Dictionary of all nodes that were recomputed, mapping node name to new value. Includes the input node itself. - -```python -# Set a new input and see what changed -changed = reactive.set_input("raw", [" Alice ", " Bob ", " Charlie "]) -print(f"Changed nodes: {list(changed.keys())}") -# Changed nodes: ['raw', 'cleaned', 'features', 'prediction'] - -# Set the same value again -- early cutoff prevents recomputation -changed = reactive.set_input("raw", [" Alice ", " Bob ", " Charlie "]) -print(f"Changed nodes: {list(changed.keys())}") -# Changed nodes: {} (nothing changed) -``` - ---- - -#### ReactiveDAG.set_inputs - - dict[str, Any]`} /> - -Set multiple input values and cascade recomputation. More efficient than -calling `set_input()` multiple times because it computes the combined dirty -set and processes all changes in a single topological pass. - - - -**Returns:** `dict[str, Any]` -- Dictionary of all nodes that were recomputed. - -```python -changed = reactive.set_inputs({ - "raw": [" Alice ", " Bob "], -}) -``` - ---- - -#### ReactiveDAG.subscribe - - Callable[[], None]`} /> - -Subscribe to changes on a specific node. The callback is called with -`(node_name, new_value)` whenever the node's value changes during -`set_input()` or `initialize()`. - - - -**Returns:** `Callable[[], None]` -- An unsubscribe function. Call it to remove the subscription. - -```python -def on_prediction_change(name: str, value): - print(f"Prediction updated: {value}") - -unsubscribe = reactive.subscribe("prediction", on_prediction_change) - -reactive.set_input("raw", ["a", "b", "c", "d", "e", "f"]) -# Prints: Prediction updated: True - -# Stop listening -unsubscribe() -``` - ---- - -#### ReactiveDAG.subscribe_all - - Callable[[], None]`} /> - -Subscribe to changes on any node in the DAG. The callback is called with -`(node_name, new_value)` whenever any node's value changes. - - - -**Returns:** `Callable[[], None]` -- An unsubscribe function. - -```python -changes_log = [] - -def log_all(name: str, value): - changes_log.append((name, value)) - -unsub = reactive.subscribe_all(log_all) -reactive.set_input("raw", ["x", "y"]) - -print(f"Total changes: {len(changes_log)}") -for name, val in changes_log: - print(f" {name} = {val}") -``` - ---- - -#### ReactiveDAG.get - - Any`} /> - -Get the current value of a node. - - - -**Returns:** `Any` -- The node's current value, or `None` if not yet computed. - -```python -prediction = reactive.get("prediction") -features = reactive.get("features") -print(f"Features: {features}, Prediction: {prediction}") -``` - ---- - -## Early cutoff - -The reactive system implements early cutoff optimization. When a node is -recomputed and its new value equals the old value (via `==`), downstream -nodes are not recomputed. This prevents unnecessary cascading through the -graph. - -```python -import dagron -from dagron.execution.reactive import ReactiveDAG - -dag = ( - dagron.DAG.builder() - .add_edge("input", "round") - .add_edge("round", "format") - .build() -) - -tasks = { - "input": lambda: None, - "round": lambda input=None: round(input, 2) if input else 0, - "format": lambda round=None: f"Value: {round}", -} - -reactive = ReactiveDAG(dag, tasks) -reactive.initialize() - -# Setting 3.14159 -- round(3.14159, 2) = 3.14 -changed = reactive.set_input("input", 3.14159) -print(f"Changed: {list(changed.keys())}") -# Changed: ['input', 'round', 'format'] - -# Setting 3.14001 -- round(3.14001, 2) = 3.14 (same!) -changed = reactive.set_input("input", 3.14001) -print(f"Changed: {list(changed.keys())}") -# Changed: ['input', 'round'] -# 'format' was NOT recomputed because 'round' value didn't change -``` - ---- - -## Complete example - -```python -import dagron -from dagron.execution.reactive import ReactiveDAG - -# Build a reactive data processing pipeline -dag = ( - dagron.DAG.builder() - .add_edge("raw_data", "clean") - .add_edge("clean", "stats") - .add_edge("clean", "top_n") - .add_edge("stats", "report") - .add_edge("top_n", "report") - .build() -) - -tasks = { - "raw_data": lambda: None, - "clean": lambda raw_data=None: ( - [x for x in raw_data if x is not None] if raw_data else [] - ), - "stats": lambda clean=None: ( - {"count": len(clean), "sum": sum(clean)} if clean else {} - ), - "top_n": lambda clean=None: ( - sorted(clean, reverse=True)[:3] if clean else [] - ), - "report": lambda stats=None, top_n=None: ( - f"Count: {stats.get('count', 0)}, Top 3: {top_n}" - ), -} - -# Create the reactive DAG -reactive = ReactiveDAG(dag, tasks) - -# Subscribe to the report node -def on_report(name, value): - print(f"Report updated: {value}") - -reactive.subscribe("report", on_report) - -# Initialize with initial data -reactive.set_input("raw_data", [10, 20, None, 30, 5, 15]) -# Report updated: Count: 5, Top 3: [30, 20, 15] - -# Update with new data -- only affected nodes recompute -changed = reactive.set_input("raw_data", [10, 20, 30, 5, 15, 25]) -print(f"\nRecomputed: {list(changed.keys())}") -# Recomputed: ['raw_data', 'clean', 'stats', 'top_n', 'report'] - -# Check current values -print(f"\nCurrent stats: {reactive.get('stats')}") -print(f"Current top 3: {reactive.get('top_n')}") -print(f"Current report: {reactive.get('report')}") - -# Set multiple inputs at once -changes = reactive.set_inputs({"raw_data": [100, 200, 300]}) -print(f"\nBatch update changed {len(changes)} nodes") -``` - ---- - -## See also - -- [Execution](/api/execution/execution) -- standard batch execution with `DAGExecutor`. -- [Plugins](/api/utilities/plugins) -- hook-based lifecycle extensions. -- [Tracing](/api/observability/tracing) -- recording events during reactive updates. diff --git a/docs/pages/api/execution/resources.mdx b/docs/pages/api/execution/resources.mdx deleted file mode 100644 index a241198..0000000 --- a/docs/pages/api/execution/resources.mdx +++ /dev/null @@ -1,375 +0,0 @@ ---- -sidebar_position: 12 -title: "Resource Scheduling" -description: "API reference for ResourceAwareExecutor, ResourcePool, ResourceRequirements, and resource tracking — GPU, CPU, and memory-aware scheduling." ---- - -import ApiSignature from '@site/src/components/ApiSignature'; -import ParamTable from '@site/src/components/ParamTable'; - -# Resource Scheduling - -The resource scheduling module extends the standard executor with capacity-aware -scheduling. Nodes declare their resource requirements (GPU, CPU, memory, or -custom resources), and the executor only dispatches a node when the resource pool -has sufficient capacity. This prevents oversubscription and enables scheduling of -heterogeneous workloads. - -See the [Resource Scheduling](/guide/execution-strategies/resource-scheduling) guide for usage -patterns and capacity planning. - ---- - -## ResourceAwareExecutor - - - -A synchronous executor that checks resource availability before dispatching each -node. When a node cannot be scheduled due to insufficient resources, it blocks -until resources are released by completed nodes. - - - -### execute - - ExecutionResult`} /> - -Execute tasks with resource-aware scheduling. - - - -**Returns:** [ExecutionResult](/api/execution/execution) - -```python -import dagron - -dag = ( - dagron.DAG.builder() - .add_node("preprocess") - .add_node("train_model_a") - .add_node("train_model_b") - .add_node("evaluate") - .add_edge("preprocess", "train_model_a") - .add_edge("preprocess", "train_model_b") - .add_edge("train_model_a", "evaluate") - .add_edge("train_model_b", "evaluate") - .build() -) - -pool = dagron.ResourcePool(capacities={"gpu": 2, "cpu": 8, "memory_mb": 16000}) - -requirements = { - "preprocess": dagron.ResourceRequirements(resources={"cpu": 2, "memory_mb": 2000}), - "train_model_a": dagron.ResourceRequirements(resources={"gpu": 1, "cpu": 4, "memory_mb": 8000}), - "train_model_b": dagron.ResourceRequirements(resources={"gpu": 1, "cpu": 4, "memory_mb": 8000}), - "evaluate": dagron.ResourceRequirements(resources={"cpu": 2, "memory_mb": 4000}), -} - -executor = dagron.ResourceAwareExecutor(dag, pool, requirements) -result = executor.execute({ - "preprocess": lambda: "data ready", - "train_model_a": lambda: "model A trained", - "train_model_b": lambda: "model B trained", - "evaluate": lambda: "evaluation complete", -}) - -print(result.succeeded) # 4 -``` - ---- - -## AsyncResourceAwareExecutor - - - -An async variant of `ResourceAwareExecutor`. Same interface but tasks must be -async callables. - - - -### execute (async) - - ExecutionResult`} /> - -Execute async tasks with resource-aware scheduling. - -```python -import asyncio -import dagron - -async def main(): - executor = dagron.AsyncResourceAwareExecutor(dag, pool, requirements) - result = await executor.execute({ - "preprocess": lambda: preprocess_async(), - "train_model_a": lambda: train_async("model_a"), - "train_model_b": lambda: train_async("model_b"), - "evaluate": lambda: evaluate_async(), - }) - print(result.succeeded) - -asyncio.run(main()) -``` - ---- - -## ResourcePool - - - -A pool of named resources with finite capacities. Resources are acquired before -a node runs and released after it completes (or fails). - - - -### can_satisfy - - bool`} /> - -Return `True` if the pool's total capacity can satisfy the given requirements -(ignoring current allocation). Useful for validating requirements before -execution. - -### try_acquire - - bool`} /> - -Attempt to acquire resources without blocking. Returns `True` if successful, -`False` if insufficient resources are currently available. - -### acquire - - bool`} /> - -Acquire resources, blocking until they become available or the timeout expires. -Returns `True` on success, `False` on timeout. - - - -### release - - None`} /> - -Release previously acquired resources back to the pool. - -### Properties - - dict[str, float]`} /> - -The total capacity of each resource. - - dict[str, float]`} /> - -The currently available (unallocated) amount of each resource. - - dict[str, float]`} /> - -The currently allocated amount of each resource. - - ResourceTimeline`} /> - -A timeline of resource allocation events for visualization and debugging. - -```python -pool = dagron.ResourcePool(capacities={"gpu": 4, "cpu": 16, "memory_mb": 32000}) - -print(pool.capacities) # {"gpu": 4, "cpu": 16, "memory_mb": 32000} -print(pool.available) # {"gpu": 4, "cpu": 16, "memory_mb": 32000} - -req = dagron.ResourceRequirements(resources={"gpu": 2}) -pool.acquire(req) -print(pool.available) # {"gpu": 2, "cpu": 16, "memory_mb": 32000} -print(pool.allocated) # {"gpu": 2, "cpu": 0, "memory_mb": 0} - -pool.release(req) -print(pool.available) # {"gpu": 4, "cpu": 16, "memory_mb": 32000} -``` - ---- - -## ResourceRequirements - - - -A set of resource requirements for a single node. - - - -### Convenience Constructors - - ResourceRequirements`} /> - -Create a requirement for `n` GPUs. - - ResourceRequirements`} /> - -Create a requirement for `n` CPU cores. - - ResourceRequirements`} /> - -Create a requirement for `mb` megabytes of memory. - -### fits - - bool`} /> - -Return `True` if the given available resources can satisfy this requirement. - - - -```python -# Explicit construction -req = dagron.ResourceRequirements(resources={"gpu": 2, "cpu": 4, "memory_mb": 8000}) - -# Convenience constructors -gpu_req = dagron.ResourceRequirements.gpu(1) -cpu_req = dagron.ResourceRequirements.cpu(4) -mem_req = dagron.ResourceRequirements.memory(4096) - -# Check fit -print(req.fits({"gpu": 4, "cpu": 16, "memory_mb": 32000})) # True -print(req.fits({"gpu": 1, "cpu": 16, "memory_mb": 32000})) # False (needs 2 GPUs) -``` - ---- - -## ResourceSnapshot - - - -A point-in-time snapshot of resource allocation. Recorded by the -`ResourceTimeline`. - -| Property | Type | Description | -|----------|------|-------------| -| `timestamp` | `float` | Unix timestamp of the snapshot. | -| `allocated` | `dict[str, float]` | Allocated resources at this point. | -| `available` | `dict[str, float]` | Available resources at this point. | -| `node_name` | `str | None` | The node that triggered this allocation event, if any. | -| `event` | `str` | Event type: `"acquire"` or `"release"`. | - ---- - -## ResourceTimeline - - - -A recorded timeline of resource allocation events. Access via -`ResourcePool.timeline`. - -### record - - None`} /> - -Manually record a snapshot (typically done automatically by the pool). - -### snapshots - - list[ResourceSnapshot]`} /> - -All recorded snapshots in chronological order. - -### peak_utilization - - dict[str, float]`} /> - -Return the peak utilization (as a fraction 0.0-1.0) for each resource across -the entire timeline. - -```python -result = executor.execute(tasks) - -timeline = pool.timeline -print(f"Snapshots: {len(timeline.snapshots)}") -print(f"Peak utilization: {timeline.peak_utilization()}") -# {"gpu": 1.0, "cpu": 0.75, "memory_mb": 0.5} - -for snap in timeline.snapshots: - print(f" t={snap.timestamp:.3f} {snap.event} {snap.node_name}: gpu={snap.allocated.get('gpu', 0)}") -``` - ---- - -## Related - -- [DAGExecutor](/api/execution/execution) — the base executor without resource awareness. -- [Distributed Execution](/api/execution/distributed) — multi-backend execution for cluster workloads. -- [Execution Plans](/api/core/core) — cost-aware scheduling at the graph level. -- [Resource Scheduling guide](/guide/execution-strategies/resource-scheduling) — usage patterns and capacity planning. diff --git a/docs/pages/api/observability/profiling.mdx b/docs/pages/api/observability/profiling.mdx deleted file mode 100644 index 701ad7c..0000000 --- a/docs/pages/api/observability/profiling.mdx +++ /dev/null @@ -1,273 +0,0 @@ ---- -sidebar_position: 15 -title: Profiling -description: API reference for dagron's post-execution profiling -- critical path analysis, slack computation, bottleneck detection, and parallelism efficiency. ---- - -import ApiSignature from '@site/src/components/ApiSignature'; -import ParamTable from '@site/src/components/ParamTable'; - -# Profiling - -The profiling module analyzes completed executions against the DAG structure -to identify the critical path, compute slack for every node, detect -bottlenecks, and measure parallelism efficiency. Unlike tracing (which -records events in real time), profiling is a post-execution analysis step -that requires both the DAG and a completed `ExecutionResult`. - -For a guided walkthrough, see [Tracing & Profiling](/guide/observability/tracing-profiling). - -```python -from dagron.execution.profiling import profile_execution, ProfileReport, NodeProfile -``` - ---- - -## profile_execution - - ProfileReport`} /> - -Analyze an execution result against the DAG structure. This function -performs a forward and backward pass over the DAG to compute earliest -start times, latest start times, slack, and critical path membership -from actual recorded durations. - - - -**Returns:** `ProfileReport` -- A complete profiling report with per-node -analysis, critical path, bottlenecks, and efficiency metrics. - -The function performs the following analysis: - -1. **Forward pass** -- compute earliest start time for each node based on predecessor completion times. -2. **Backward pass** -- compute latest start time from the makespan working backwards. -3. **Slack** -- the difference between latest and earliest start times. Zero-slack nodes are on the critical path. -4. **Bottleneck scoring** -- nodes ranked by `duration * (1 + descendant_count)`. -5. **Parallelism efficiency** -- ratio of total work to makespan, indicating how well the DAG exploits concurrency. - -```python -import dagron -from dagron.execution.profiling import profile_execution - -dag = ( - dagron.DAG.builder() - .add_edge("extract", "transform_a") - .add_edge("extract", "transform_b") - .add_edge("transform_a", "merge") - .add_edge("transform_b", "merge") - .add_edge("merge", "load") - .build() -) - -executor = dagron.DAGExecutor(dag, max_workers=4) -result = executor.execute(tasks) - -report = profile_execution(dag, result) -print(report.summary()) -``` - -:::note -Only nodes with `NodeStatus.COMPLETED` are included in the analysis. -Failed, skipped, or cancelled nodes are excluded from critical path -and slack computations. -::: - ---- - -## ProfileReport - - - -Complete profiling report for a DAG execution. Contains per-node profiles, -the critical path, bottleneck rankings, and overall efficiency metrics. - - - -### Methods - ---- - -#### ProfileReport.summary - - str`} /> - -Return a human-readable summary of the profiling report. - -**Returns:** `str` -- Multi-line summary including critical path, efficiency, and bottleneck list. - -```python -print(report.summary()) -# Profile Report -# Nodes profiled: 5 -# Critical path: extract -> transform_a -> merge -> load -# Critical path duration: 2.3456s -# Parallelism efficiency: 1.80 -# Max parallelism: 2 -# Bottlenecks: extract, merge, transform_a, transform_b, load -``` - ---- - -#### ProfileReport.to_dict - - dict[str, Any]`} /> - -Convert the report to a plain dictionary suitable for JSON serialization -or logging. - -**Returns:** `dict[str, Any]` -- Dictionary representation of the report, including nested node profiles. - -```python -import json - -report_dict = report.to_dict() -print(json.dumps(report_dict, indent=2)) - -# Access specific node data -extract_profile = report_dict["node_profiles"]["extract"] -print(f"Extract slack: {extract_profile['slack']:.4f}s") -``` - ---- - -## NodeProfile - - - -Profile data for a single node, computed from the forward/backward pass -over actual execution timings. - - - -```python -for name, profile in report.node_profiles.items(): - status = "CRITICAL" if profile.on_critical_path else f"slack={profile.slack:.3f}s" - print(f"{name}: {profile.duration:.3f}s ({status}), blocks {profile.blocked_descendants} nodes") -``` - -### Understanding slack - -Slack represents how much a node's start time can be delayed without -affecting the overall pipeline completion time. Nodes with zero slack -form the critical path -- any delay in these nodes directly delays the -entire execution. - -```python -# Find nodes with scheduling flexibility -flexible = [ - (name, p.slack) - for name, p in report.node_profiles.items() - if p.slack > 0 -] -flexible.sort(key=lambda x: -x[1]) - -for name, slack in flexible: - print(f"{name}: can be delayed by {slack:.3f}s without impact") -``` - ---- - -## Complete example - -```python -import dagron -from dagron.execution.profiling import profile_execution - -# Build and execute a pipeline -dag = ( - dagron.DAG.builder() - .add_edge("fetch_api", "parse_api") - .add_edge("fetch_db", "parse_db") - .add_edge("parse_api", "merge") - .add_edge("parse_db", "merge") - .add_edge("merge", "validate") - .add_edge("validate", "store") - .build() -) - -import time - -tasks = { - "fetch_api": lambda: time.sleep(0.5) or "api_data", - "fetch_db": lambda: time.sleep(0.3) or "db_data", - "parse_api": lambda: time.sleep(0.2) or "parsed_api", - "parse_db": lambda: time.sleep(0.1) or "parsed_db", - "merge": lambda: time.sleep(0.15) or "merged", - "validate": lambda: time.sleep(0.05) or "valid", - "store": lambda: time.sleep(0.1) or "stored", -} - -executor = dagron.DAGExecutor(dag, max_workers=4) -result = executor.execute(tasks) - -# Profile the execution -report = profile_execution(dag, result) - -# Print the overall summary -print(report.summary()) - -# Identify the critical path -print("\nCritical path:") -for node_name in report.critical_path: - p = report.node_profiles[node_name] - print(f" {node_name}: {p.duration:.3f}s") -print(f" Total: {report.critical_path_duration:.3f}s") - -# Find optimization opportunities -print("\nOptimization targets (nodes with most blocked descendants):") -ranked = sorted( - report.node_profiles.values(), - key=lambda p: p.blocked_descendants, - reverse=True, -) -for p in ranked[:3]: - print(f" {p.name}: blocks {p.blocked_descendants} nodes, duration {p.duration:.3f}s") - -# Export for logging -import json -print(json.dumps(report.to_dict(), indent=2)) -``` - ---- - -## See also - -- [Tracing](/api/observability/tracing) -- the event recording system that feeds execution timings. -- [Analysis](/api/analysis/analysis) -- structural analysis including `explain()` and `what_if()`. -- [Tracing & Profiling guide](/guide/observability/tracing-profiling) -- end-to-end walkthrough. diff --git a/docs/pages/api/observability/tracing.mdx b/docs/pages/api/observability/tracing.mdx deleted file mode 100644 index be202b7..0000000 --- a/docs/pages/api/observability/tracing.mdx +++ /dev/null @@ -1,328 +0,0 @@ ---- -sidebar_position: 14 -title: Tracing -description: API reference for dagron's execution tracing system — record, query, and export structured timeline events from DAG execution. ---- - -import ApiSignature from '@site/src/components/ApiSignature'; -import ParamTable from '@site/src/components/ParamTable'; - -# Tracing - -The tracing module provides a structured timeline log of every event that occurs -during DAG execution. Traces capture node starts, completions, failures, gate -interactions, resource acquisitions, and cache events. You can export traces as -JSON or in Chrome Tracing format for visualization in `chrome://tracing`. - -For a higher-level introduction, see the [Tracing & Profiling](/guide/observability/tracing-profiling) -guide. - -```python -from dagron.execution.tracing import ExecutionTrace, TraceEvent, TraceEventType -``` - ---- - -## TraceEventType - - - -An enumeration of all event types that can be recorded during DAG execution. -Each value corresponds to a distinct lifecycle moment. - -### Event categories - -| Category | Events | Description | -|----------|--------|-------------| -| **Execution** | `EXECUTION_STARTED`, `EXECUTION_COMPLETED` | Overall execution boundaries. | -| **Step** | `STEP_STARTED`, `STEP_COMPLETED` | Topological level boundaries within execution. | -| **Node lifecycle** | `NODE_STARTED`, `NODE_COMPLETED`, `NODE_FAILED`, `NODE_SKIPPED`, `NODE_TIMED_OUT`, `NODE_CANCELLED` | Individual node state transitions. | -| **Gates** | `NODE_GATE_WAITING`, `NODE_GATE_RESOLVED` | Approval gate interactions. See [Gates](/api/execution/execution). | -| **Resources** | `RESOURCE_ACQUIRED`, `RESOURCE_RELEASED` | Resource pool acquisition and release. | -| **Cache** | `NODE_CACHE_HIT`, `NODE_CACHE_MISS` | Content-addressable cache interactions. | - -```python -# Check event type -event = trace.events[0] -if event.event_type == TraceEventType.NODE_FAILED: - print(f"Node {event.node_name} failed: {event.error}") -``` - ---- - -## TraceEvent - - - -A single trace event captured during execution. Each event has a type, a -monotonic timestamp relative to the start of recording, and optional fields -that vary by event type. - - - -```python -for event in trace.events: - if event.node_name: - print(f"[{event.timestamp:.4f}s] {event.event_type.value}: {event.node_name}") - if event.duration: - print(f" Duration: {event.duration:.4f}s") - if event.error: - print(f" Error: {event.error}") -``` - ---- - -## ExecutionTrace - - None: ...`} /> - -A structured timeline log that collects events during DAG execution. The -executor creates an `ExecutionTrace` automatically when tracing is enabled. -You can also create one manually for custom recording scenarios. - -The trace uses `time.monotonic()` internally, so all timestamps are relative -to the first recorded event and monotonically increasing. - -```python -from dagron.execution.tracing import ExecutionTrace, TraceEventType - -trace = ExecutionTrace() -trace.record(TraceEventType.EXECUTION_STARTED) -trace.record(TraceEventType.NODE_STARTED, node_name="extract") -trace.record(TraceEventType.NODE_COMPLETED, node_name="extract", duration=0.45) -trace.record(TraceEventType.EXECUTION_COMPLETED) - -print(len(trace.events)) # 4 -``` - -### Methods - ---- - -#### ExecutionTrace.record - - None`} /> - -Record a trace event. The timestamp is captured automatically using -`time.monotonic()` relative to the first recorded event. - - - -```python -trace.record( - TraceEventType.NODE_FAILED, - node_name="transform", - error="ValueError: missing column 'id'", - metadata={"retry_count": 2}, -) -``` - ---- - -#### ExecutionTrace.events - - list[TraceEvent]`} /> - -Returns a copy of all recorded events in chronological order. - -**Returns:** `list[TraceEvent]` -- All events recorded so far. - -```python -for event in trace.events: - print(f"{event.event_type.value} at {event.timestamp:.4f}s") -``` - ---- - -#### ExecutionTrace.events_for_node - - list[TraceEvent]`} /> - -Filter events for a specific node. Returns only events where `node_name` -matches the given name. - - - -**Returns:** `list[TraceEvent]` -- Events associated with the named node. - -```python -extract_events = trace.events_for_node("extract") -for e in extract_events: - print(f" {e.event_type.value}: {e.timestamp:.4f}s") -# NODE_STARTED: 0.0001s -# NODE_COMPLETED: 0.4502s -``` - ---- - -#### ExecutionTrace.to_json - - str`} /> - -Export the trace as a JSON string. Each event becomes a JSON object with -`event_type`, `timestamp`, and any non-None optional fields. - -**Returns:** `str` -- Pretty-printed JSON array of event objects. - -```python -import json - -json_str = trace.to_json() -events = json.loads(json_str) -print(events[0]) -# {"event_type": "execution_started", "timestamp": 0.0} -``` - ---- - -#### ExecutionTrace.to_chrome_trace - - str`} /> - -Export the trace in [Chrome Tracing format](https://docs.google.com/document/d/1CvAClvFfyA5R-PhYUmn5OOQtYMH4h6I0nSsKchNAySU/preview). -The output is a JSON string compatible with `chrome://tracing` or -[Perfetto](https://ui.perfetto.dev/). - -Node executions become Duration events (`B`/`E` pairs). Each unique node -gets its own thread ID for visual separation. Timestamps are converted -to microseconds. - -**Returns:** `str` -- Chrome Tracing JSON string. - -```python -# Write the trace to a file and open in chrome://tracing -chrome_json = trace.to_chrome_trace() -with open("trace.json", "w") as f: - f.write(chrome_json) -``` - ---- - -#### ExecutionTrace.summary - - str`} /> - -Return a human-readable summary of the trace, including total event count, -unique node count, and counts by outcome (completed, failed, skipped, -timed out, cancelled). - -**Returns:** `str` -- Multi-line summary string. - -```python -print(trace.summary()) -# Execution Trace Summary -# Total events: 14 -# Unique nodes: 5 -# Completed: 4 -# Failed: 1 -# Skipped: 0 -# Timed out: 0 -# Cancelled: 0 -# Duration: 1.2345s -``` - ---- - -## Complete example - -```python -import dagron -from dagron.execution.tracing import ExecutionTrace, TraceEventType - -# Build a DAG -dag = ( - dagron.DAG.builder() - .add_edge("extract", "transform") - .add_edge("transform", "load") - .build() -) - -# Execute with tracing enabled -executor = dagron.DAGExecutor(dag, trace=True) -tasks = { - "extract": lambda: [1, 2, 3], - "transform": lambda: [2, 4, 6], - "load": lambda: "done", -} -result = executor.execute(tasks) - -# Access the trace from the result -trace = result.trace - -# Inspect events -print(trace.summary()) - -# Find slow nodes -for event in trace.events: - if event.event_type == TraceEventType.NODE_COMPLETED and event.duration: - if event.duration > 1.0: - print(f"Slow node: {event.node_name} ({event.duration:.2f}s)") - -# Export for Chrome Tracing visualization -with open("pipeline_trace.json", "w") as f: - f.write(trace.to_chrome_trace()) - -# Export as plain JSON for custom analysis -with open("pipeline_events.json", "w") as f: - f.write(trace.to_json()) -``` - ---- - -## See also - -- [Profiling](/api/observability/profiling) -- post-execution performance analysis built on trace data. -- [Tracing & Profiling guide](/guide/observability/tracing-profiling) -- walkthrough of tracing and profiling workflows. -- [Execution](/api/execution/execution) -- `DAGExecutor` and the `trace` parameter. diff --git a/docs/pages/api/utilities/compose.mdx b/docs/pages/api/utilities/compose.mdx deleted file mode 100644 index cadda37..0000000 --- a/docs/pages/api/utilities/compose.mdx +++ /dev/null @@ -1,251 +0,0 @@ ---- -sidebar_position: 21 -title: Composition -description: API reference for dagron's DAG composition -- merge multiple DAGs into one with namespace prefixes and cross-namespace connections. ---- - -import ApiSignature from '@site/src/components/ApiSignature'; -import ParamTable from '@site/src/components/ParamTable'; - -# Composition - -The compose module provides multi-DAG composition with automatic namespace -prefixing. Combine independent DAGs into a single unified graph, preserving -node payloads and metadata, with optional cross-namespace connections. - -```python -from dagron.compose import compose -``` - ---- - -## compose - - DAG`} /> - -Compose multiple DAGs into one with namespace prefixes. Each DAG's nodes -are prefixed with its namespace key (e.g., a node `"load"` in namespace -`"etl"` becomes `"etl/load"`). Internal edges within each DAG are preserved -with the same prefixing. Cross-namespace edges can be added via the -`connections` parameter. - - - -**Returns:** `DAG` -- A new DAG containing all nodes and edges from all input DAGs with namespaces applied, plus any cross-namespace connections. - -### Node prefixing - -Every node in the resulting DAG has a name of the form -`{namespace}{separator}{original_name}`. Payloads and metadata from the -original nodes are preserved on the prefixed nodes. - -### Edge preservation - -All edges within each input DAG are preserved with prefixed names. For -example, an edge `("a", "b")` in namespace `"etl"` becomes -`("etl/a", "etl/b")` in the composed DAG. - -```python -import dagron -from dagron.compose import compose - -# Build individual DAGs -etl_dag = ( - dagron.DAG.builder() - .add_edge("extract", "transform") - .add_edge("transform", "load") - .build() -) - -ml_dag = ( - dagron.DAG.builder() - .add_edge("train", "evaluate") - .add_edge("evaluate", "deploy") - .build() -) - -# Compose with a cross-namespace connection -combined = compose( - dags={"etl": etl_dag, "ml": ml_dag}, - connections=[("etl/load", "ml/train")], -) - -print(list(combined.nodes())) -# ['etl/extract', 'etl/transform', 'etl/load', -# 'ml/train', 'ml/evaluate', 'ml/deploy'] - -print(combined.edge_count()) -# 5 (2 from etl + 2 from ml + 1 cross-namespace) - -print(combined.roots()) # ['etl/extract'] -print(combined.leaves()) # ['ml/deploy'] -``` - -### Custom separators - -Use the `separator` parameter to change the namespace delimiter: - -```python -combined = compose( - dags={"etl": etl_dag, "ml": ml_dag}, - connections=[("etl.load", "ml.train")], - separator=".", -) - -print(list(combined.nodes())) -# ['etl.extract', 'etl.transform', 'etl.load', -# 'ml.train', 'ml.evaluate', 'ml.deploy'] -``` - -### Composing many DAGs - -The function accepts any number of DAGs: - -```python -ingestion = dagron.DAG.builder().add_edge("fetch", "parse").build() -validation = dagron.DAG.builder().add_edge("check_schema", "check_values").build() -storage = dagron.DAG.builder().add_edge("write_db", "write_cache").build() - -pipeline = compose( - dags={ - "ingest": ingestion, - "validate": validation, - "store": storage, - }, - connections=[ - ("ingest/parse", "validate/check_schema"), - ("validate/check_values", "store/write_db"), - ], -) - -print(pipeline.node_count()) # 6 -print(pipeline.edge_count()) # 5 -``` - -### Preserving payloads and metadata - -Payloads and metadata from the original DAGs are carried over to the -composed DAG: - -```python -dag_with_payloads = dagron.DAG() -dag_with_payloads.add_node("train", payload={"epochs": 10, "lr": 0.001}) -dag_with_payloads.add_node("evaluate", payload={"metrics": ["acc", "f1"]}) -dag_with_payloads.add_edge("train", "evaluate") - -combined = compose(dags={"ml": dag_with_payloads}) - -# Payloads are accessible via prefixed names -print(combined.get_payload("ml/train")) -# {"epochs": 10, "lr": 0.001} -``` - ---- - -## Complete example - -```python -import dagron -from dagron.compose import compose - -# === Build sub-DAGs for different teams === - -# Data Engineering team -data_eng = ( - dagron.DAG.builder() - .add_node("raw_ingest", payload={"source": "s3://bucket/raw"}) - .add_node("clean") - .add_node("feature_store") - .add_edge("raw_ingest", "clean") - .add_edge("clean", "feature_store") - .build() -) - -# ML team -ml = ( - dagron.DAG.builder() - .add_node("train", payload={"model": "xgboost"}) - .add_node("evaluate") - .add_node("register") - .add_edge("train", "evaluate") - .add_edge("evaluate", "register") - .build() -) - -# Platform team -platform = ( - dagron.DAG.builder() - .add_node("deploy_staging") - .add_node("integration_test") - .add_node("deploy_prod") - .add_edge("deploy_staging", "integration_test") - .add_edge("integration_test", "deploy_prod") - .build() -) - -# === Compose into a unified pipeline === -full_pipeline = compose( - dags={ - "data": data_eng, - "ml": ml, - "platform": platform, - }, - connections=[ - ("data/feature_store", "ml/train"), - ("ml/register", "platform/deploy_staging"), - ], -) - -print(f"Full pipeline: {full_pipeline.node_count()} nodes, {full_pipeline.edge_count()} edges") -# Full pipeline: 9 nodes, 8 edges - -# Inspect the composed structure -for level_idx, level in enumerate(full_pipeline.topological_levels()): - names = [n.name for n in level] - print(f"Level {level_idx}: {names}") -# Level 0: ['data/raw_ingest'] -# Level 1: ['data/clean'] -# Level 2: ['data/feature_store'] -# Level 3: ['ml/train'] -# Level 4: ['ml/evaluate'] -# Level 5: ['ml/register'] -# Level 6: ['platform/deploy_staging'] -# Level 7: ['platform/integration_test'] -# Level 8: ['platform/deploy_prod'] - -# Execute the composed DAG -tasks = { - "data/raw_ingest": lambda: "raw data", - "data/clean": lambda: "clean data", - "data/feature_store": lambda: "features", - "ml/train": lambda: "model", - "ml/evaluate": lambda: {"accuracy": 0.95}, - "ml/register": lambda: "model_v1", - "platform/deploy_staging": lambda: "staging OK", - "platform/integration_test": lambda: "tests pass", - "platform/deploy_prod": lambda: "deployed", -} - -executor = dagron.DAGExecutor(full_pipeline, max_workers=4) -result = executor.execute(tasks) -print(f"Succeeded: {result.succeeded}/{full_pipeline.node_count()}") -``` - ---- - -## See also - -- [DAG](/api/core/core) -- the core graph class. -- [Templates](/api/utilities/template) -- parameterized DAG construction. -- [Versioning](/api/utilities/versioning) -- tracking changes to composed DAGs. -- [Building DAGs guide](/guide/core-concepts/building-dags) -- construction patterns and composition walkthrough. -- [Graph Transforms guide](/guide/core-concepts/transforms) -- filtering, merging, and reshaping DAGs. diff --git a/docs/pages/api/utilities/display.mdx b/docs/pages/api/utilities/display.mdx deleted file mode 100644 index e649cc4..0000000 --- a/docs/pages/api/utilities/display.mdx +++ /dev/null @@ -1,255 +0,0 @@ ---- -sidebar_position: 22 -title: Display -description: API reference for dagron's display utilities -- ASCII rendering, Jupyter SVG auto-display, and custom node formatting. ---- - -import ApiSignature from '@site/src/components/ApiSignature'; -import ParamTable from '@site/src/components/ParamTable'; - -# Display - -The display module provides visualization utilities for DAGs. It includes -ASCII rendering for terminals and logs, and SVG generation for Jupyter -notebooks. The SVG renderer tries Graphviz first (Python package or CLI) -and falls back to an ASCII-in-SVG representation. - -```python -from dagron.display import pretty_print, _repr_svg_ -``` - ---- - -## pretty_print - - str`} /> - -Render the DAG as an ASCII diagram. The output is a multi-line string -suitable for printing to a terminal, writing to log files, or embedding -in text reports. - - - -**Returns:** `str` -- Multi-line ASCII string representing the DAG. - -**Raises:** `ValueError` -- If `dag.node_count()` exceeds `max_nodes`. - -### Vertical layout - -The default layout renders topological levels as rows, with edges shown -as connectors between levels: - -```python -import dagron -from dagron.display import pretty_print - -dag = ( - dagron.DAG.builder() - .add_edge("extract", "transform") - .add_edge("transform", "load") - .build() -) - -print(pretty_print(dag)) -# [ extract ] -# | -# [ transform ] -# | -# [ load ] -``` - -### Horizontal layout - -The horizontal layout renders levels as columns, with arrows between them: - -```python -print(pretty_print(dag, layout="horizontal")) -# [ extract ]-->[ transform ]-->[ load ] -``` - -### With payloads - -```python -dag = dagron.DAG() -dag.add_node("train", payload={"epochs": 10}) -dag.add_node("evaluate", payload={"metric": "f1"}) -dag.add_edge("train", "evaluate") - -print(pretty_print(dag, show_payloads=True)) -# [ train={'epochs': 10} ] -# | -# [ evaluate={'metric': 'f1'} ] -``` - -### Custom node formatter - -```python -def short_label(name: str, payload: object) -> str: - if payload and isinstance(payload, dict): - return f"{name} ({len(payload)} params)" - return name - -print(pretty_print(dag, node_formatter=short_label)) -# [ train (1 params) ] -# | -# [ evaluate (1 params) ] -``` - -### Empty graphs - -```python -empty = dagron.DAG() -print(pretty_print(empty)) -# (empty graph) -``` - -### Large graph safety - -```python -try: - print(pretty_print(large_dag, max_nodes=10)) -except ValueError as e: - print(e) - # Graph has 100 nodes, exceeding max_nodes=10. Increase max_nodes to render. -``` - ---- - -## _repr_svg_ - - str`} /> - -Return an SVG representation of the DAG for Jupyter notebooks. This -function is used by dagron's Jupyter integration to provide auto-display -when a DAG object is the last expression in a cell. - - - -**Returns:** `str` -- SVG string. - -### Rendering strategy - -The function tries multiple rendering backends in order: - -1. **Empty graph** -- returns a simple SVG with `"(empty graph)"` text. -2. **Too many nodes** -- returns a summary SVG showing node and edge counts. -3. **Graphviz Python package** -- tries `graphviz.Source(dot).pipe(format='svg')`. -4. **Graphviz `dot` CLI** -- tries `dot -Tsvg` via subprocess. -5. **ASCII fallback** -- wraps the `pretty_print()` output in an SVG `` element. - -### Jupyter auto-display - -When working in a Jupyter notebook, DAG objects automatically display as -SVG. This is enabled through dagron's `_repr_svg_` integration: - -```python -# In a Jupyter notebook cell: -dag = ( - dagron.DAG.builder() - .add_edge("a", "b") - .add_edge("b", "c") - .build() -) - -dag # Auto-renders as SVG in the notebook output -``` - -### Graphviz installation - -For the best rendering quality, install Graphviz: - -```bash -# Python package -pip install graphviz - -# System package (for the dot CLI) -# Ubuntu/Debian: -sudo apt install graphviz -# macOS: -brew install graphviz -``` - -### Manual SVG export - -```python -from dagron.display import _repr_svg_ - -svg = _repr_svg_(dag) -with open("pipeline.svg", "w") as f: - f.write(svg) -``` - ---- - -## Complete example - -```python -import dagron -from dagron.display import pretty_print, _repr_svg_ - -# Build a pipeline -dag = ( - dagron.DAG.builder() - .add_node("fetch_api", payload={"url": "https://api.example.com"}) - .add_node("fetch_db", payload={"table": "users"}) - .add_node("merge") - .add_node("validate") - .add_node("store") - .add_edge("fetch_api", "merge") - .add_edge("fetch_db", "merge") - .add_edge("merge", "validate") - .add_edge("validate", "store") - .build() -) - -# ASCII output for terminal -print("=== Vertical ===") -print(pretty_print(dag)) - -print("\n=== Horizontal ===") -print(pretty_print(dag, layout="horizontal")) - -print("\n=== With Payloads ===") -print(pretty_print(dag, show_payloads=True)) - -# Custom formatter for a clean summary -def status_label(name: str, payload: object) -> str: - icon = ">" if payload else "-" - return f"{icon} {name}" - -print("\n=== Custom Labels ===") -print(pretty_print(dag, node_formatter=status_label)) - -# SVG export -svg = _repr_svg_(dag) -with open("pipeline.svg", "w") as f: - f.write(svg) -print(f"\nSVG exported ({len(svg)} bytes)") -``` - ---- - -## See also - -- [DAG](/api/core/core) -- the `to_dot()` and `to_mermaid()` export methods. -- [Serialization guide](/guide/core-concepts/serialization) -- full guide to serialization and visualization. diff --git a/docs/pages/api/utilities/integration.mdx b/docs/pages/api/utilities/integration.mdx deleted file mode 100644 index 225470e..0000000 --- a/docs/pages/api/utilities/integration.mdx +++ /dev/null @@ -1,240 +0,0 @@ ---- -sidebar_position: 23 -title: Integration -description: API reference for dagron's integration helpers -- build DAGs from dicts, dataclasses, and Pydantic models. ---- - -import ApiSignature from '@site/src/components/ApiSignature'; -import ParamTable from '@site/src/components/ParamTable'; - -# Integration - -The integration module provides helpers for building DAGs from common Python -data structures. The primary function, `from_records`, converts sequences of -dicts, dataclasses, or Pydantic models into DAGs with minimal boilerplate. - -```python -from dagron.integration import from_records -``` - ---- - -## from_records - - DAG`} /> - -Build a DAG from a sequence of records. Works with dicts, dataclasses, -and Pydantic `BaseModel` instances. Each record becomes a node, with the -node name extracted from the specified field. Edges and payloads are derived -via optional callback functions. - - - -**Returns:** `DAG` -- A new DAG built from the records. - -**Raises:** -- `DuplicateNodeError` -- If any records share the same name. -- `NodeNotFoundError` -- If `edge_fn` references a node that does not exist. -- `CycleError` -- If the edges derived from `edge_fn` would create a cycle. - -### Building from dicts - -The most common use case is converting a list of dictionaries, such as -records loaded from a YAML file, JSON API, or database query: - -```python -from dagron.integration import from_records - -records = [ - {"name": "extract", "depends_on": [], "config": {"source": "s3"}}, - {"name": "transform", "depends_on": ["extract"], "config": {"mode": "batch"}}, - {"name": "load", "depends_on": ["transform"], "config": {"target": "warehouse"}}, -] - -dag = from_records( - records, - edge_fn=lambda r: r["depends_on"], - payload_fn=lambda r: r["config"], -) - -print(dag.node_count()) # 3 -print(dag.edge_count()) # 2 -print(dag.get_payload("extract")) # {"source": "s3"} -``` - -### Building from dataclasses - -```python -from dataclasses import dataclass, field -from dagron.integration import from_records - -@dataclass -class TaskSpec: - name: str - dependencies: list[str] = field(default_factory=list) - timeout: float = 30.0 - -specs = [ - TaskSpec("fetch", timeout=60.0), - TaskSpec("parse", dependencies=["fetch"], timeout=10.0), - TaskSpec("validate", dependencies=["parse"]), - TaskSpec("store", dependencies=["validate"]), -] - -dag = from_records( - specs, - edge_fn=lambda s: s.dependencies, - payload_fn=lambda s: {"timeout": s.timeout}, -) - -print(list(dag.nodes())) # ['fetch', 'parse', 'validate', 'store'] -``` - -### Building from Pydantic models - -```python -from pydantic import BaseModel -from dagron.integration import from_records - -class PipelineStep(BaseModel): - name: str - depends_on: list[str] = [] - retries: int = 3 - -steps = [ - PipelineStep(name="ingest", retries=5), - PipelineStep(name="clean", depends_on=["ingest"]), - PipelineStep(name="publish", depends_on=["clean"], retries=1), -] - -dag = from_records( - steps, - edge_fn=lambda s: s.depends_on, - payload_fn=lambda s: {"retries": s.retries}, -) -``` - -### Custom name fields - -Use the `name_field` parameter when your records use a different field -for the node identifier: - -```python -records = [ - {"id": "step_1", "after": []}, - {"id": "step_2", "after": ["step_1"]}, - {"id": "step_3", "after": ["step_2"]}, -] - -dag = from_records( - records, - name_field="id", - edge_fn=lambda r: r["after"], -) - -print(list(dag.nodes())) # ['step_1', 'step_2', 'step_3'] -``` - -### No edges - -When `edge_fn` is not provided, the resulting DAG contains nodes but no -edges. This is useful when you want to add edges separately: - -```python -records = [{"name": "a"}, {"name": "b"}, {"name": "c"}] - -dag = from_records(records) -print(dag.node_count()) # 3 -print(dag.edge_count()) # 0 - -# Add edges manually -dag.add_edge("a", "b") -dag.add_edge("b", "c") -``` - -### Default payloads - -When `payload_fn` is not provided, the entire record is stored as the -payload: - -```python -records = [ - {"name": "extract", "source": "api", "timeout": 30}, - {"name": "load", "target": "db", "batch_size": 100}, -] - -dag = from_records(records) -print(dag.get_payload("extract")) -# {"name": "extract", "source": "api", "timeout": 30} -``` - ---- - -## Complete example - -```python -import json -from dagron.integration import from_records - -# Simulate loading pipeline config from JSON -config_json = """ -[ - {"name": "raw_data", "deps": [], "type": "source", "params": {"path": "/data/raw"}}, - {"name": "clean", "deps": ["raw_data"], "type": "transform", "params": {"drop_nulls": true}}, - {"name": "features", "deps": ["clean"], "type": "transform", "params": {"method": "tfidf"}}, - {"name": "train", "deps": ["features"], "type": "model", "params": {"epochs": 50}}, - {"name": "evaluate", "deps": ["train"], "type": "model", "params": {"metrics": ["acc", "f1"]}}, - {"name": "deploy", "deps": ["evaluate"], "type": "deploy", "params": {"env": "prod"}} -] -""" - -records = json.loads(config_json) - -# Build DAG with structured payloads -dag = from_records( - records, - edge_fn=lambda r: r["deps"], - payload_fn=lambda r: {"type": r["type"], "params": r["params"]}, -) - -print(f"Pipeline: {dag.node_count()} nodes, {dag.edge_count()} edges") -# Pipeline: 6 nodes, 5 edges - -# Inspect the structure -for level_idx, level in enumerate(dag.topological_levels()): - names = [n.name for n in level] - print(f"Level {level_idx}: {names}") - -# Access payload metadata -for name in ["raw_data", "train", "deploy"]: - payload = dag.get_payload(name) - print(f"{name}: type={payload['type']}, params={payload['params']}") - -# Execute with task functions -import dagron - -tasks = {name: (lambda: f"completed {name}") for name in dag.nodes()} -executor = dagron.DAGExecutor(dag) -result = executor.execute(tasks) -print(f"Succeeded: {result.succeeded}/{dag.node_count()}") -``` - ---- - -## See also - -- [DAG](/api/core/core) -- the core graph class. -- [DAGBuilder](/api/core/builder) -- fluent builder for programmatic construction. -- [Templates](/api/utilities/template) -- parameterized DAG construction. -- [Building DAGs guide](/guide/core-concepts/building-dags) -- construction patterns overview. diff --git a/docs/pages/api/utilities/modern-api.mdx b/docs/pages/api/utilities/modern-api.mdx deleted file mode 100644 index d042544..0000000 --- a/docs/pages/api/utilities/modern-api.mdx +++ /dev/null @@ -1,199 +0,0 @@ ---- -sidebar_position: 7 -title: Modern API (NodeRef, flow, reactive, contentcache, trace) -description: API reference for the typed-handles + uniqueness modules — dagron.NodeRef, dagron.flow, dagron.Effect, dagron.reactive, dagron.contentcache, dagron.trace, dagron.stubgen. ---- - -# Modern API - -API reference for the typed-handles and uniqueness modules. For a -walkthrough of how they compose, see the -[Typed Handles & Reactive Engine](../../guide/typed-and-reactive) guide. - -## `dagron.NodeRef` - -```python -class NodeRef: - name: str - epoch: int -``` - -Stable handle returned by `DAG.add_node()`. Survives unrelated graph -mutations; invalidated only when the underlying node is removed (or -remove-and-readded with the same name, which yields a fresh epoch). - -```python -dag.node_ref(name: str) -> NodeRef | None -``` - -Look up the current ref for a name without mutating the DAG. - -Every public method that previously took `name: str` now takes `NodeArg = str | NodeRef`: -`add_edge`, `remove_node`, `has_node`, `has_edge`, `get_payload`, `set_payload`, -`predecessors`, `successors`, `ancestors`, `descendants`, `subgraph`, -`subgraph_by_depth`, `collapse`, `dominator_tree`, `all_paths`, -`shortest_path`, `longest_path`, `dirty_set`, `change_provenance`, -`is_ancestor`, and the `ReachabilityIndex` query methods. - -Stale refs raise `dagron.StaleNodeRefError`. - -## `dagron.flow` - -```python -@dagron.task -def fn(...) -> T: ... - -@dagron.task(effect=Effect.NETWORK) -def fn(...) -> T: ... - -@dagron.flow -def pipeline(...) -> FlowFuture[T] | None: ... -``` - -| Member | Purpose | -|---|---| -| `task` | Decorator. Outside a `@flow`, executes normally. Inside one, records the call and returns `FlowFuture[R]`. Supports `effect=` keyword (defaults to `Effect.PURE`). | -| `flow` | Decorator. Wraps a function as a `Flow`. | -| `Flow.dag()` | Trace the body and return the built `DAG`. | -| `Flow.run(*args, **kwargs)` | Trace, build, execute synchronously → `ExecutionResult`. | -| `Flow.run_async(...)` | Async variant. | -| `Flow.__call__(...)` | Sugar for `run`. | -| `FlowFuture[T]` | Generic placeholder returned from `@task` calls inside a `@flow`. Pass to other tasks to wire deps. | -| `TaskSpec` | Metadata attached to every `@task` (`name`, `fn`, `dependencies`, `is_async`, `effect`). | - -`batch()` semantics aren't part of `dagron.flow` — they live in -[`dagron.reactive`](#dagronreactive). - -## `dagron.Effect` - -```python -class Effect(Enum): - PURE = "pure" - READ = "read" - WRITE = "write" - NETWORK = "network" - NONDETERMINISTIC = "nondeterministic" - - is_cacheable: bool - is_deterministic: bool - is_isolated: bool -``` - -```python -def effects_of(dag: DAG) -> dict[str, Effect] -``` - -Read every node's effect tag from a DAG built by `@dagron.flow`. Returns -`Effect.PURE` for nodes without a tag. - -`DAGExecutor(enforce_effect_isolation=True)` reads these tags and runs -`NONDETERMINISTIC` nodes through a shared lock, so they don't overlap. - -## `dagron.reactive` - -```python -import dagron.reactive as dr - -s = dr.signal(value) # → Signal[T] -c = dr.computed(lambda: ...) # → Computed[T] -w = dr.watch(lambda: ...) # → Watcher (also fires once now) -with dr.batch(): ... # glitch-free coalesced updates -``` - -| Member | API | -|---|---| -| `Signal[T]` | `__call__() -> T`, `set(v: T)`, `peek() -> T` (no tracking). Equality-checked sets are no-ops. | -| `Computed[T]` | `__call__() -> T`, `peek() -> T`. Lazy memoised. | -| `Watcher` | Auto-fires when any tracked dep changes. `.dispose()` to detach. | -| `batch()` | Context manager. Defers Watcher fires until the outermost block ends. Multiple signal mutations coalesce into one fire. | -| `signal()` / `computed()` / `watch()` | Convenience factories. | - -Track via thread-local; reads inside a `Computed` body or `Watcher` body -register the source as a dep. Observers are held by `weakref.WeakSet` -so dropped derived nodes don't leak. - -## `dagron.contentcache` - -```python -from dagron.contentcache import ContentCache, default_cache_dir - -cache = ContentCache(cache_dir=None, hasher=None) -cache.compute_or_cached(fn, args=(), kwargs=None, effect=None) -> tuple[Any, bool] -cache.get(fingerprint: bytes) -> tuple[Any, bool] -cache.put(fingerprint: bytes, value: Any) -> None -cache.has(fingerprint: bytes) -> bool -cache.delete(fingerprint: bytes) -> None -cache.clear() -> None -cache.hash(value: Any) -> bytes # delegates to the configured Hasher -``` - -| Helper | Purpose | -|---|---| -| `default_cache_dir()` | `$DAGRON_CACHE_DIR` or `~/.cache/dagron/cas`. | -| `default_hash(value)` | pickle + blake2b 256-bit. Falls back to `repr()` for unpickleable inputs. | -| `numpy_hash(value)` | `array.tobytes()` for numpy arrays; falls back to `default_hash`. | -| `fingerprint_function(fn)` | Hashes `co_code`, `co_consts`, `co_freevars`, qualname, Python major.minor. | -| `fingerprint_node(fn, effect, input_fingerprints)` | Composite fingerprint used as the cache key. | - -`compute_or_cached` is **effect-aware**: `WRITE` / `NETWORK` / -`NONDETERMINISTIC` skip the cache entirely; `PURE` and `READ` go through -it. - -Storage layout: `///.cache` where the -fingerprint hex is ``. POSIX `rename(2)` makes writes -atomic. The filesystem itself is the index — independent processes -share intermediates with no coordination. - -## `dagron.trace` - -```python -from dagron.trace import TraceWriter, TraceReader, TraceRecord, ReplayedNode, replay - -writer = TraceWriter(path, cas=None) -writer.record(name, *, value=None, effect=None, duration_ns=0, - error=None, metadata=None, timestamp=None) -> TraceRecord -writer.flush() -writer.close() # also via context manager - -reader = TraceReader(path, cas=None) -reader.records() -> Iterator[TraceRecord] -reader.records_until(t, *, inclusive=True) -> Iterator[TraceRecord] -reader.timeline() -> list[tuple[float, str]] -reader.fetch(rec) -> Any # resolves payload via the CAS - -replay(source, *, at=None, cas=None) -> dict[str, ReplayedNode] -``` - -`ReplayedNode` carries `name`, `timestamp`, `value`, `effect`, -`replayable`, `duration_ns`, `error`, and a derived `has_value` -property. `replayable` mirrors `effect.is_deterministic`: pure / READ -nodes can be reproduced; impure nodes' values are *what that run -produced*, not what a fresh run would produce. - -| Helper | Purpose | -|---|---| -| `default_trace_dir()` | `$DAGRON_TRACE_DIR` or `~/.cache/dagron/traces`. | -| `new_run_id()` | 16-hex-char random id for naming a run's log file. | -| `list_runs(trace_dir=None)` | Every `*.jsonl` under `trace_dir`. | - -Logs are append-only JSONL. Payloads live in the bound `ContentCache`, -deduplicated across runs that produced the same value. - -## `dagron.stubgen` - -```python -from dagron.stubgen import generate_stub - -generate_stub( - dag, - *, - type_hints: dict[str, type | str] | None = None, - tasks: dict[str, Callable] | None = None, - name: str = "TypedExecutionResult", -) -> str -``` - -Emits `.pyi`-formatted source declaring a class with `Literal["nodename"]` -overloads typed by inferred (or explicitly provided) return types. -Drop the result into a stub file alongside your code so even string-keyed -`result["nodename"]` lookups become statically typed. diff --git a/docs/pages/api/utilities/plugins.mdx b/docs/pages/api/utilities/plugins.mdx deleted file mode 100644 index 272a45d..0000000 --- a/docs/pages/api/utilities/plugins.mdx +++ /dev/null @@ -1,507 +0,0 @@ ---- -sidebar_position: 24 -title: Plugins -description: API reference for dagron's plugin system -- lifecycle hooks, plugin discovery, the DashboardPlugin, and the @dagron_plugin decorator. ---- - -import ApiSignature from '@site/src/components/ApiSignature'; -import ParamTable from '@site/src/components/ParamTable'; - -# Plugins - -The plugin system provides extensible lifecycle hooks for DAG construction -and execution. Plugins can observe and react to events such as node starts, -completions, failures, and full execution boundaries. dagron includes a -built-in `DashboardPlugin` that serves a live web dashboard backed by a -Rust web server. - -```python -from dagron.plugins import ( - DagronPlugin, - HookRegistry, - HookEvent, - HookContext, - PluginManager, - dagron_plugin, -) -from dagron.dashboard import DashboardPlugin -``` - ---- - -## HookEvent - - - -Events that hooks can subscribe to. These correspond to lifecycle moments -during DAG construction and execution. - -| Event | When it fires | Context fields | -|-------|--------------|----------------| -| `PRE_EXECUTE` | Before execution starts. | `dag` | -| `POST_EXECUTE` | After execution completes. | `dag`, `execution_result` | -| `PRE_NODE` | Before a node task runs. | `dag`, `node_name` | -| `POST_NODE` | After a node task completes. | `dag`, `node_name`, `node_result` | -| `ON_ERROR` | When a node task fails. | `dag`, `node_name`, `error` | -| `PRE_BUILD` | Before `DAGBuilder.build()`. | `dag` | -| `POST_BUILD` | After `DAGBuilder.build()`. | `dag` | - ---- - -## HookContext - - - -Context object passed to hook callbacks. Fields are populated based on the -event type -- not all fields are set for every event. - - - ---- - -## HookRegistry - - None: ...`} /> - -Registry for event hooks with priority ordering. Hooks are fire-and-forget: -exceptions in callbacks are caught and issued as `RuntimeWarning`, never -propagated to the caller. - -```python -from dagron.plugins import HookRegistry, HookEvent, HookContext - -hooks = HookRegistry() - -# Register a simple logging hook -def log_node_start(ctx: HookContext): - print(f"Starting node: {ctx.node_name}") - -unregister = hooks.register(HookEvent.PRE_NODE, log_node_start) - -# Later, remove the hook -unregister() -``` - -### Methods - ---- - -#### HookRegistry.register - - Callable[[], None]`} /> - -Register a hook callback for an event. Higher priority callbacks run first. - - - -**Returns:** `Callable[[], None]` -- An unregister function. Call it to remove this hook. - -```python -# High-priority hook runs first -hooks.register(HookEvent.PRE_EXECUTE, auth_check, priority=100) -hooks.register(HookEvent.PRE_EXECUTE, log_start, priority=0) -``` - ---- - -#### HookRegistry.fire - - None`} /> - -Fire all hooks registered for the context's event. Callbacks are called -in priority order (descending). Exceptions in callbacks are caught and -issued as `RuntimeWarning`. - - - -```python -from dagron.plugins import HookContext, HookEvent - -ctx = HookContext(event=HookEvent.PRE_NODE, dag=dag, node_name="extract") -hooks.fire(ctx) -``` - ---- - -#### HookRegistry.clear - - None`} /> - -Clear all hooks, or hooks for a specific event. - - - ---- - -#### HookRegistry.hook_count - - int`} /> - -Return the number of registered hooks. - - - -**Returns:** `int` -- Number of registered hooks. - ---- - -## DagronPlugin - - str: ... - - @abstractmethod - def initialize(self, hooks: HookRegistry) -> None: ... - - def teardown(self) -> None: ...`} /> - -Abstract base class for dagron plugins. Subclass this and implement the -`name` property and `initialize()` method. The `teardown()` method is -optional. - -### Abstract members - -| Member | Type | Description | -|--------|------|-------------| -| `name` | `property -> str` | Unique name for the plugin (abstract). | -| `initialize(hooks)` | `method` | Called when the plugin is initialized. Register hooks here (abstract). | -| `teardown()` | `method` | Called when the plugin is torn down. Clean up resources (optional, default no-op). | - -```python -from dagron.plugins import DagronPlugin, HookRegistry, HookEvent, HookContext - -class TimingPlugin(DagronPlugin): - @property - def name(self) -> str: - return "timing" - - def initialize(self, hooks: HookRegistry) -> None: - self._starts: dict[str, float] = {} - - def on_pre_node(ctx: HookContext): - import time - if ctx.node_name: - self._starts[ctx.node_name] = time.monotonic() - - def on_post_node(ctx: HookContext): - import time - if ctx.node_name and ctx.node_name in self._starts: - elapsed = time.monotonic() - self._starts[ctx.node_name] - print(f"{ctx.node_name}: {elapsed:.3f}s") - - hooks.register(HookEvent.PRE_NODE, on_pre_node) - hooks.register(HookEvent.POST_NODE, on_post_node) - - def teardown(self) -> None: - self._starts.clear() -``` - ---- - -## PluginManager - - None: ...`} /> - -Manages plugin discovery, initialization, and teardown. Shares a single -`HookRegistry` across all managed plugins. - - - -### Properties - ---- - -#### PluginManager.hooks - - HookRegistry`} /> - -The hook registry shared by all plugins. - ---- - -#### PluginManager.plugins - - dict[str, DagronPlugin]`} /> - -Currently registered plugins, as a copy of the internal mapping. - -**Returns:** `dict[str, DagronPlugin]` -- Mapping of plugin names to plugin instances. - -### Methods - ---- - -#### PluginManager.register - - None`} /> - -Register a plugin instance. If a plugin with the same name is already -registered, it is replaced with a `RuntimeWarning`. - - - ---- - -#### PluginManager.discover - - list[str]`} /> - -Discover plugins via `entry_points(group='dagron.plugins')`. Each -discovered entry point is loaded, instantiated, and registered. - -**Returns:** `list[str]` -- Names of successfully discovered and registered plugins. - -```python -manager = PluginManager() -discovered = manager.discover() -print(f"Discovered plugins: {discovered}") -``` - ---- - -#### PluginManager.initialize_all - - None`} /> - -Initialize all registered plugins that have not been initialized yet. -Each plugin's `initialize()` method is called with the shared hook registry. -Failures are caught and issued as `RuntimeWarning`. - ---- - -#### PluginManager.teardown_all - - None`} /> - -Tear down all initialized plugins by calling their `teardown()` methods. -Failures are caught and issued as `RuntimeWarning`. - -```python -from dagron.plugins import PluginManager - -manager = PluginManager() -manager.register(TimingPlugin()) -manager.initialize_all() - -# Use manager.hooks with an executor -executor = dagron.DAGExecutor(dag, hooks=manager.hooks) -result = executor.execute(tasks) - -# Clean up -manager.teardown_all() -``` - ---- - -## @dagron_plugin - - type`} /> - -Class decorator that registers a plugin class with the global plugin -manager. The class must be a `DagronPlugin` subclass. It is instantiated -and registered immediately when the decorator is applied. - - - -**Returns:** `type` -- The class, unmodified. - -**Raises:** `TypeError` -- If `cls` is not a `DagronPlugin` subclass. - -```python -from dagron.plugins import dagron_plugin, DagronPlugin, HookRegistry - -@dagron_plugin -class MyPlugin(DagronPlugin): - @property - def name(self) -> str: - return "my_plugin" - - def initialize(self, hooks: HookRegistry) -> None: - # Register hooks here - pass -``` - ---- - -## DashboardPlugin - - None: ...`} /> - -A plugin that serves a live web dashboard showing real-time DAG execution -status. The web server runs in Rust (axum + tokio) on a background OS -thread for minimal Python overhead. - - - -:::note -The `DashboardPlugin` requires dagron to be built with the `dashboard` -Cargo feature. If the feature is not available, `initialize()` raises -`ImportError` with instructions to rebuild. -::: - -### Registered hooks - -The dashboard automatically registers hooks for these events: - -| Event | Behavior | -|-------|----------| -| `PRE_EXECUTE` | Resets the dashboard state, sends the DAG structure. | -| `PRE_NODE` | Marks a node as "started" in the UI. | -| `POST_NODE` | Marks a node as "completed" in the UI. | -| `ON_ERROR` | Marks a node as "failed" with the error message. | -| `POST_EXECUTE` | Sends final execution statistics. | - -```python -import dagron -from dagron.dashboard import DashboardPlugin -from dagron.plugins import HookRegistry - -# Set up hooks and dashboard -hooks = HookRegistry() -dashboard = DashboardPlugin(port=8765, open_browser=True) -dashboard.initialize(hooks) -# Prints: Dashboard: http://127.0.0.1:8765 - -# Execute with hooks -executor = dagron.DAGExecutor(dag, hooks=hooks) -result = executor.execute(tasks) - -# Clean up -dashboard.teardown() -``` - ---- - -## Complete example - -```python -import dagron -from dagron.plugins import ( - DagronPlugin, - HookRegistry, - HookEvent, - HookContext, - PluginManager, -) - -# Define a custom metrics plugin -class MetricsPlugin(DagronPlugin): - @property - def name(self) -> str: - return "metrics" - - def initialize(self, hooks: HookRegistry) -> None: - self.node_count = 0 - self.error_count = 0 - - def count_nodes(ctx: HookContext): - self.node_count += 1 - - def count_errors(ctx: HookContext): - self.error_count += 1 - - hooks.register(HookEvent.POST_NODE, count_nodes) - hooks.register(HookEvent.ON_ERROR, count_errors) - - def teardown(self) -> None: - print(f"Metrics: {self.node_count} nodes executed, {self.error_count} errors") - -# Use with PluginManager -manager = PluginManager() -manager.register(MetricsPlugin()) -manager.initialize_all() - -# Build and execute -dag = ( - dagron.DAG.builder() - .add_edge("extract", "transform") - .add_edge("transform", "load") - .build() -) - -tasks = { - "extract": lambda: [1, 2, 3], - "transform": lambda: [2, 4, 6], - "load": lambda: "done", -} - -executor = dagron.DAGExecutor(dag, hooks=manager.hooks) -result = executor.execute(tasks) - -# Teardown prints metrics -manager.teardown_all() -# Metrics: 3 nodes executed, 0 errors -``` - ---- - -## See also - -- [Execution](/api/execution/execution) -- the `hooks` parameter on `DAGExecutor`. -- [Tracing](/api/observability/tracing) -- structured event recording during execution. -- [Reactive DAG](/api/execution/reactive) -- push-based reactive execution with subscriptions. diff --git a/docs/pages/api/utilities/template.mdx b/docs/pages/api/utilities/template.mdx deleted file mode 100644 index 277f7b1..0000000 --- a/docs/pages/api/utilities/template.mdx +++ /dev/null @@ -1,367 +0,0 @@ ---- -sidebar_position: 19 -title: Templates -description: API reference for dagron's DAG template system -- parameterized DAG construction with substitution, validation, and rendering. ---- - -import ApiSignature from '@site/src/components/ApiSignature'; -import ParamTable from '@site/src/components/ParamTable'; - -# Templates - -The template module provides parameterized DAG construction. Define a DAG -shape with placeholder values in node names (and optionally in payloads and -metadata), then render concrete DAGs by supplying parameter values. Templates -support type checking, custom validators, default values, and configurable -delimiters. - -```python -from dagron.template import DAGTemplate, TemplateParam, TemplateError -``` - ---- - -## DAGTemplate - - None: ...`} /> - -A parameterized DAG template that renders concrete DAGs by substituting -placeholder values. Placeholders in node names, payloads, and edge labels -are replaced at render time. - - - -```python -from dagron.template import DAGTemplate - -template = DAGTemplate( - params={"env": str, "replicas": int}, - defaults={"env": "staging"}, - descriptions={"env": "Target environment", "replicas": "Number of replicas"}, - validators={"replicas": lambda n: 1 <= n <= 10}, -) - -template.add_node("extract_{{env}}") -template.add_node("load_{{env}}") -template.add_edge("extract_{{env}}", "load_{{env}}") - -dag = template.render(env="prod", replicas=3) -print(list(dag.nodes())) # ['extract_prod', 'load_prod'] -``` - -### Properties - ---- - -#### DAGTemplate.params - - dict[str, TemplateParam]`} /> - -Return a copy of the template parameters. - -**Returns:** `dict[str, TemplateParam]` -- Mapping of parameter names to their `TemplateParam` specifications. - -```python -for name, param in template.params.items(): - print(f"{name}: type={param.type.__name__}, default={param.default}") -``` - -### Methods - ---- - -#### DAGTemplate.add_node - - DAGTemplate`} /> - -Add a templated node. The name may contain `{{param}}` placeholders that -are resolved at render time. Returns `self` for method chaining. - - - -**Returns:** `DAGTemplate` -- Self, for fluent chaining. - -```python -template.add_node("worker_{{env}}_{{replicas}}", payload={"region": "{{env}}"}) -``` - ---- - -#### DAGTemplate.add_edge - - DAGTemplate`} /> - -Add a templated edge. Both node names may contain placeholders. Returns -`self` for method chaining. - - - -**Returns:** `DAGTemplate` -- Self, for fluent chaining. - -```python -template.add_edge("extract_{{env}}", "transform_{{env}}", label="{{env}}_pipeline") -``` - ---- - -#### DAGTemplate.validate_params - - list[str]`} /> - -Validate parameters without rendering. Checks for unknown parameters, -missing required parameters, type mismatches, and custom validator failures. - -**Returns:** `list[str]` -- List of error messages. Empty means all parameters are valid. - -```python -errors = template.validate_params(env="prod", replicas="three") -for err in errors: - print(err) -# Parameter 'replicas' expects int, got str -``` - ---- - -#### DAGTemplate.render - - DAG`} /> - -Render the template into a concrete DAG. All placeholders are resolved -with the provided parameter values (merged with defaults). - -**Returns:** `DAG` -- A new `DAG` instance with all placeholders resolved. - -**Raises:** `TemplateError` -- If parameters are missing, wrong type, or fail custom validation. - -```python -dag = template.render(env="production", replicas=5) -print(dag.node_count()) -``` - -### Type-preserving substitution - -If the entire string is a single placeholder (e.g., `"{{replicas}}"`), the -raw Python value is returned rather than a stringified version. This means -integer and other non-string parameters pass through as their original type -when they are the sole content of a placeholder. - ---- - -#### DAGTemplate.render_builder - - DAGBuilder`} /> - -Render the template into a pre-populated `DAGBuilder`. This allows you to -add additional nodes and edges before calling `.build()`. - -**Returns:** `DAGBuilder` -- A builder with all templated nodes and edges added. - -**Raises:** `TemplateError` -- If parameter validation fails. - -```python -builder = template.render_builder(env="staging", replicas=2) -# Add extra nodes to the rendered template -builder.add_node("monitoring") -builder.add_edge("load_staging", "monitoring") -dag = builder.build() -``` - ---- - -#### DAGTemplate.render_pipeline - - Pipeline`} /> - -Render the template into a `Pipeline`. - - - -**Returns:** `Pipeline` -- A Pipeline wrapping the rendered DAG. - ---- - -## TemplateParam - - - -Specification for a single template parameter. Frozen dataclass. - - - -### Methods - ---- - -#### TemplateParam.validate - - None`} /> - -Validate a value against this parameter's type and optional custom -validator. - - - -**Raises:** `TemplateError` -- If the value has the wrong type or fails the custom validator. - -```python -from dagron.template import TemplateParam, TemplateError - -param = TemplateParam( - name="replicas", - type=int, - validator=lambda n: 1 <= n <= 10, -) - -param.validate(5) # OK - -try: - param.validate(15) # Fails validator -except TemplateError as e: - print(e) # Parameter 'replicas' failed custom validation - -try: - param.validate("five") # Wrong type -except TemplateError as e: - print(e) # Parameter 'replicas' expects int, got str -``` - ---- - -## TemplateError - - - -Raised when template validation or rendering fails. This includes missing -parameters, type mismatches, unknown parameters, and custom validator -failures. - -```python -from dagron.template import DAGTemplate, TemplateError - -template = DAGTemplate(params={"env": str}) -template.add_node("node_{{env}}") - -try: - dag = template.render() # Missing required parameter -except TemplateError as e: - print(e) # Missing required parameter: 'env' -``` - ---- - -## Complete example - -```python -from dagron.template import DAGTemplate - -# Define a reusable ETL template -etl_template = DAGTemplate( - params={ - "source": str, - "target": str, - "parallelism": int, - }, - defaults={"parallelism": 4}, - descriptions={ - "source": "Data source identifier", - "target": "Destination database", - "parallelism": "Number of parallel transform workers", - }, - validators={ - "parallelism": lambda n: 1 <= n <= 32, - }, -) - -# Define the template shape -etl_template.add_node("extract_{{source}}") -for i in range(4): # max parallelism slots - etl_template.add_node(f"transform_{{source}}_{i}") - etl_template.add_edge(f"extract_{{{{source}}}}", f"transform_{{{{source}}}}_{i}") -etl_template.add_node("load_{{target}}") - -# Validate before rendering -errors = etl_template.validate_params(source="api", target="warehouse") -assert not errors - -# Render multiple concrete DAGs -api_dag = etl_template.render(source="api", target="warehouse") -db_dag = etl_template.render(source="db", target="lake", parallelism=2) - -# Use render_builder for customization -builder = etl_template.render_builder(source="s3", target="redshift") -builder.add_node("notify") -builder.add_edge("load_redshift", "notify") -custom_dag = builder.build() - -# Inspect template parameters -for name, param in etl_template.params.items(): - print(f" {name} ({param.type.__name__}): {param.description}") - if param.default is not None: - print(f" default: {param.default}") -``` - ---- - -## See also - -- [DAGBuilder](/api/core/builder) -- the builder returned by `render_builder()`. -- [Composition](/api/utilities/compose) -- combining multiple rendered DAGs. -- [Building DAGs guide](/guide/core-concepts/building-dags) -- construction patterns overview. diff --git a/docs/pages/api/utilities/versioning.mdx b/docs/pages/api/utilities/versioning.mdx deleted file mode 100644 index fdfe703..0000000 --- a/docs/pages/api/utilities/versioning.mdx +++ /dev/null @@ -1,412 +0,0 @@ ---- -sidebar_position: 20 -title: Versioning -description: API reference for dagron's DAG versioning system -- structural time-travel, mutation history, diffing, and forking. ---- - -import ApiSignature from '@site/src/components/ApiSignature'; -import ParamTable from '@site/src/components/ParamTable'; - -# Versioning - -The versioning module provides structural versioning and time-travel for -DAGs. Every mutation (add/remove node, add/remove edge, set payload/metadata) -is recorded in an append-only log. You can navigate to any historical version, -diff between arbitrary versions, inspect the full mutation history, and fork -independent copies from any point. - -```python -from dagron.versioning import VersionedDAG, Mutation, MutationType -``` - ---- - -## VersionedDAG - - None: ...`} /> - -DAG with full structural versioning and time-travel. Every mutation is -recorded in an append-only log with timestamps. The version counter starts -at 0 (empty state) and increments by 1 for each mutation. - - - -```python -from dagron.versioning import VersionedDAG - -vdag = VersionedDAG() -vdag.add_node("extract") -vdag.add_node("transform") -vdag.add_edge("extract", "transform") - -print(vdag.version) # 3 -print(vdag.dag.node_count()) # 2 -``` - -### Properties - ---- - -#### VersionedDAG.dag - - DAG`} /> - -The current DAG object. Read-only access is recommended -- use the -`VersionedDAG` mutation methods instead to ensure changes are tracked. - -**Returns:** `DAG` -- The underlying DAG in its current state. - ---- - -#### VersionedDAG.version - - int`} /> - -Current version number. Starts at 0 and increments by 1 for each mutation. - -**Returns:** `int` -- The current version. - -### Mutation methods - -These methods mirror the `DAG` API but record each operation in the -version log. - ---- - -#### VersionedDAG.add_node - - None`} /> - -Add a node and record the mutation. - - - ---- - -#### VersionedDAG.remove_node - - None`} /> - -Remove a node and all its edges, and record the mutation. - - - ---- - -#### VersionedDAG.add_edge - - None`} /> - -Add an edge and record the mutation. - - - -**Raises:** `CycleError` -- If adding the edge would create a cycle. - ---- - -#### VersionedDAG.remove_edge - - None`} /> - -Remove an edge and record the mutation. - - - ---- - -#### VersionedDAG.set_payload - - None`} /> - -Set a node's payload and record the mutation. - - - ---- - -#### VersionedDAG.set_metadata - - None`} /> - -Set a node's metadata and record the mutation. - - - -### Time-travel methods - ---- - -#### VersionedDAG.at_version - - DAG`} /> - -Reconstruct the DAG at a specific version by replaying mutations from -version 0 up to the specified version. - - - -**Returns:** `DAG` -- A new DAG representing the state at that version. - -**Raises:** `ValueError` -- If version is negative or exceeds the current version. - -```python -# Build up history -vdag = VersionedDAG() -vdag.add_node("a") # version 1 -vdag.add_node("b") # version 2 -vdag.add_edge("a", "b") # version 3 -vdag.add_node("c") # version 4 - -# Time-travel to version 2 -old_dag = vdag.at_version(2) -print(old_dag.node_count()) # 2 (only "a" and "b") -print(old_dag.edge_count()) # 0 (edge was added in version 3) -``` - ---- - -#### VersionedDAG.diff_versions - - GraphDiff`} /> - -Diff two versions of the DAG. Reconstructs both versions and computes -the structural difference. - - - -**Returns:** `GraphDiff` -- Structural differences between the two versions, including added/removed nodes and edges. - -```python -diff = vdag.diff_versions(1, 4) -print(diff) # Shows nodes/edges added between version 1 and version 4 -``` - ---- - -#### VersionedDAG.history - - list[Mutation]`} /> - -Get the full mutation history as an ordered list. - -**Returns:** `list[Mutation]` -- All recorded mutations in chronological order. - -```python -for mutation in vdag.history(): - print(f"v{mutation.version}: {mutation.mutation_type.value} {mutation.args}") -# v1: add_node {'name': 'a'} -# v2: add_node {'name': 'b'} -# v3: add_edge {'from_node': 'a', 'to_node': 'b'} -``` - ---- - -#### VersionedDAG.history_since - - list[Mutation]`} /> - -Get mutations since a specific version (exclusive). - - this value."}, -]} /> - -**Returns:** `list[Mutation]` -- Mutations after the given version. - -```python -recent = vdag.history_since(2) -# Returns mutations at version 3 and 4 -``` - ---- - -#### VersionedDAG.fork - - VersionedDAG`} /> - -Create an independent fork of this versioned DAG. The fork gets its own -copy of the DAG and history up to the fork point. Subsequent mutations -in either the original or the fork do not affect the other. - - - -**Returns:** `VersionedDAG` -- A new independent `VersionedDAG` forked from the specified version. - -```python -# Fork from the current state -fork = vdag.fork() -fork.add_node("d") - -print(vdag.dag.node_count()) # 3 (original unchanged) -print(fork.dag.node_count()) # 4 - -# Fork from a specific historical version -old_fork = vdag.fork(at_version=2) -print(old_fork.version) # 2 -print(old_fork.dag.node_count()) # 2 -``` - ---- - -## Mutation - - - -A single recorded mutation in the version log. Frozen dataclass. - - - ---- - -## MutationType - - - -Enumeration of all mutation types that can be recorded. - -| Value | Description | -|-------|-------------| -| `ADD_NODE` | A node was added to the DAG. | -| `REMOVE_NODE` | A node (and its edges) was removed. | -| `ADD_EDGE` | An edge was added between two nodes. | -| `REMOVE_EDGE` | An edge was removed. | -| `SET_PAYLOAD` | A node's payload was updated. | -| `SET_METADATA` | A node's metadata was updated. | - ---- - -## Complete example - -```python -from dagron.versioning import VersionedDAG, MutationType - -# Create a versioned DAG and build it incrementally -vdag = VersionedDAG() - -# Phase 1: Basic pipeline -vdag.add_node("extract", payload={"source": "api"}) # v1 -vdag.add_node("transform") # v2 -vdag.add_edge("extract", "transform") # v3 -vdag.add_node("load") # v4 -vdag.add_edge("transform", "load") # v5 -print(f"Phase 1 complete: v{vdag.version}") # v5 - -# Phase 2: Add validation step -vdag.add_node("validate") # v6 -vdag.add_edge("transform", "validate") # v7 -vdag.add_edge("validate", "load") # v8 -vdag.remove_edge("transform", "load") # v9 -print(f"Phase 2 complete: v{vdag.version}") # v9 - -# Time-travel: see the DAG before validation was added -phase1_dag = vdag.at_version(5) -print(f"Phase 1: {phase1_dag.node_count()} nodes, {phase1_dag.edge_count()} edges") -# Phase 1: 3 nodes, 2 edges - -# Diff between phases -diff = vdag.diff_versions(5, 9) -print(diff) - -# Inspect mutation log -print("\nMutation history:") -for m in vdag.history(): - print(f" v{m.version}: {m.mutation_type.value} {m.args}") - -# Fork for experimentation -experiment = vdag.fork() -experiment.add_node("cache") -experiment.add_edge("extract", "cache") -experiment.add_edge("cache", "transform") - -print(f"\nOriginal: {vdag.dag.node_count()} nodes") # 4 -print(f"Experiment: {experiment.dag.node_count()} nodes") # 5 - -# Fork from an earlier version -legacy = vdag.fork(at_version=5) -print(f"Legacy fork: v{legacy.version}, {legacy.dag.node_count()} nodes") -# Legacy fork: v5, 3 nodes - -# Get recent changes -recent = vdag.history_since(5) -print(f"\nChanges since v5: {len(recent)} mutations") -for m in recent: - print(f" {m.mutation_type.value}: {m.args}") -``` - ---- - -## See also - -- [DAG](/api/core/core) -- the core graph class that `VersionedDAG` wraps. -- [Composition](/api/utilities/compose) -- combining versioned DAGs. -- [Building DAGs guide](/guide/core-concepts/building-dags) -- construction patterns and best practices. diff --git a/docs/pages/guide/advanced/contracts.mdx b/docs/pages/guide/advanced/contracts.mdx deleted file mode 100644 index b8efbee..0000000 --- a/docs/pages/guide/advanced/contracts.mdx +++ /dev/null @@ -1,388 +0,0 @@ ---- -sidebar_position: 17 -title: Contracts -description: Enforce type contracts across DAG edges to catch mismatches before execution. ---- - -import DagDiagram from '@site/src/components/DagDiagram'; -import StatusBadge from '@site/src/components/StatusBadge'; - -# Contracts - -When DAGs grow large, it becomes easy for the output type of one node to drift from what a downstream node expects. dagron's **contract system** lets you declare input and output types for each node and validate them at build time -- before any task runs. This catches type mismatches early, similar to how a compiler checks function signatures. - -output: list"] - clean["clean
input: list, output: dict"] - model["train_model
input: dict, output: float"] - report["report
input: str ❌"] - fetch --> clean - clean --> model - model --> report - style report fill:#ffcdd2,stroke:#c62828`} - caption="The contract validator catches that 'train_model' outputs float but 'report' expects str as input." -/> - ---- - -## Core Classes - -| Class | Role | -|---|---| -| [`NodeContract`](/api/analysis/contracts#nodecontract) | Declares input types (per dependency) and output type for a single node. | -| [`ContractValidator`](/api/analysis/contracts#contractvalidator) | Validates contracts across all edges in a DAG. | -| [`ContractViolation`](/api/analysis/contracts#contractviolation) | Describes a single type mismatch: from_node, to_node, and a human-readable message. | -| [`extract_contracts`](/api/analysis/contracts#extract_contracts) | Auto-extract contracts from a Pipeline's `@task` functions using `typing.get_type_hints`. | -| [`validate_contracts`](/api/analysis/contracts#validate_contracts) | Convenience function: extract + validate in one call. | - ---- - -## Defining Contracts Manually - -### NodeContract - -A `NodeContract` declares what types a node expects from its dependencies and what type it produces: - -```python -from dagron.contracts import NodeContract - -# This node expects its "fetch_data" dependency to provide a list, -# and it outputs a dict. -clean_contract = NodeContract( - inputs={"fetch_data": list}, - output=dict, -) - -# This node expects "clean_data" to provide a dict, -# and outputs a float. -train_contract = NodeContract( - inputs={"clean_data": dict}, - output=float, -) -``` - -The `inputs` dict maps **dependency node names** to their expected types. The `output` is the type this node produces. - -The special type `object` acts as a wildcard (equivalent to `Any`) -- it matches any type. - -### Validating - -Create a `ContractValidator` with a DAG and contracts, then call `validate()`: - -```python -import dagron -from dagron.contracts import ContractValidator, NodeContract - -dag = ( - dagron.DAG.builder() - .add_node("fetch_data") - .add_node("clean_data") - .add_node("train_model") - .add_node("generate_report") - .add_edge("fetch_data", "clean_data") - .add_edge("clean_data", "train_model") - .add_edge("train_model", "generate_report") - .build() -) - -contracts = { - "fetch_data": NodeContract(output=list), - "clean_data": NodeContract(inputs={"fetch_data": list}, output=dict), - "train_model": NodeContract(inputs={"clean_data": dict}, output=float), - "generate_report": NodeContract(inputs={"train_model": str}, output=str), - # ^^^ BUG: should be float -} - -validator = ContractValidator(dag, contracts) -violations = validator.validate() - -for v in violations: - print(f" {v.from_node} -> {v.to_node}: {v.message}") -``` - -Output: - -``` - train_model -> generate_report: Type mismatch on edge train_model -> generate_report: - producer outputs float, but consumer expects str -``` - ---- - -## How Validation Works - -For every edge `(u, v)` in the DAG, the validator: - -1. Looks up `v`'s contract to find the expected input type for dependency `u`. -2. Looks up `u`'s contract to find its declared output type. -3. Checks compatibility using `issubclass(actual_output, expected_input)`. - -If the output type is not a subclass of the expected input type, a `ContractViolation` is recorded. - -### Type Compatibility Rules - -- `object` always matches (wildcard). -- Standard Python inheritance works: if `B` is a subclass of `A`, then `B` satisfies an `A` contract. -- Generic type aliases (e.g., `list[int]`) fall back to `True` if `issubclass` raises `TypeError`. - -```python -from dagron.contracts import NodeContract - -# int is a subclass of object -- always valid -NodeContract(inputs={"dep": object}, output=int) - -# bool is a subclass of int -- valid -NodeContract(inputs={"dep": int}, output=bool) - -# str is NOT a subclass of int -- violation -NodeContract(inputs={"dep": int}, output=str) -``` - ---- - -## ContractViolation - -Each violation is a frozen dataclass with three fields: - -```python -@dataclass(frozen=True) -class ContractViolation: - from_node: str # the upstream node - to_node: str # the downstream node - message: str # human-readable description -``` - -You can use violations to fail a CI check: - -```python -violations = validator.validate() -if violations: - for v in violations: - print(f"ERROR: {v.message}") - raise SystemExit(1) # fail the build -``` - ---- - -## Auto-Extracting Contracts from Pipelines - -If you use dagron's `Pipeline` / `@task` decorator pattern with type annotations, you can auto-extract contracts from the function signatures: - -```python -import dagron -from dagron.contracts import extract_contracts, validate_contracts - -@dagron.task(dependencies=[]) -def fetch_data() -> list: - return [1, 2, 3] - -@dagron.task(dependencies=["fetch_data"]) -def clean_data(fetch_data: list) -> dict: - return {"values": fetch_data} - -@dagron.task(dependencies=["clean_data"]) -def train_model(clean_data: dict) -> float: - return 0.95 - -@dagron.task(dependencies=["train_model"]) -def generate_report(train_model: float) -> str: - return f"Accuracy: {train_model}" - -pipeline = dagron.Pipeline([fetch_data, clean_data, train_model, generate_report]) -``` - -### extract_contracts() - -Reads `typing.get_type_hints()` from each task function to build `NodeContract` instances: - -```python -contracts = extract_contracts(pipeline) - -for name, contract in contracts.items(): - print(f" {name}: inputs={contract.inputs}, output={contract.output}") -``` - -Output: - -``` - fetch_data: inputs={}, output= - clean_data: inputs={'fetch_data': }, output= - train_model: inputs={'clean_data': }, output= - generate_report: inputs={'train_model': }, output= -``` - -### validate_contracts() - -One-liner that extracts and validates in a single call: - -```python -violations = validate_contracts(pipeline) -if not violations: - print("All contracts valid!") -``` - -You can also provide extra manually-defined contracts that override the auto-extracted ones: - -```python -violations = validate_contracts( - pipeline, - extra_contracts={ - "fetch_data": NodeContract(output=dict), # override - }, -) -``` - ---- - -## Using Contracts with DAGBuilder - -You can attach contracts during DAG construction via the builder pattern: - -```python -dag = ( - dagron.DAG.builder() - .add_node("extract") - .add_node("transform") - .add_node("load") - .add_edge("extract", "transform") - .add_edge("transform", "load") - .contract("extract", NodeContract(output=list)) - .contract("transform", NodeContract(inputs={"extract": list}, output=dict)) - .contract("load", NodeContract(inputs={"transform": dict}, output=bool)) - .build() -) -``` - ---- - -## Complete Validation Example - -Here is a full example that demonstrates catching a type mismatch in a data pipeline: - -```python -import dagron -from dagron.contracts import ContractValidator, ContractViolation, NodeContract - -# Build the DAG -dag = ( - dagron.DAG.builder() - .add_node("read_csv") - .add_node("parse_dates") - .add_node("compute_stats") - .add_node("render_chart") - .add_node("send_email") - .add_edge("read_csv", "parse_dates") - .add_edge("parse_dates", "compute_stats") - .add_edge("compute_stats", "render_chart") - .add_edge("render_chart", "send_email") - .build() -) - -# Define contracts -contracts = { - "read_csv": NodeContract(output=list), # list of rows - "parse_dates": NodeContract(inputs={"read_csv": list}, output=list), - "compute_stats": NodeContract(inputs={"parse_dates": list}, output=dict), - "render_chart": NodeContract(inputs={"compute_stats": dict}, output=bytes), # PNG bytes - "send_email": NodeContract(inputs={"render_chart": str}, output=bool), - # ^^^ BUG: chart is bytes, not str -} - -# Validate -validator = ContractValidator(dag, contracts) -violations = validator.validate() - -if violations: - print(f"Found {len(violations)} contract violation(s):") - for v in violations: - print(f" {v.message}") -else: - print("All contracts valid.") -``` - -Output: - -``` -Found 1 contract violation(s): - Type mismatch on edge render_chart -> send_email: - producer outputs bytes, but consumer expects str -``` - ---- - -## Contracts in CI/CD - -Add contract validation as a pre-execution check in your CI pipeline: - -```python -def validate_pipeline(pipeline): - """Run as part of CI -- fail if contracts are violated.""" - violations = validate_contracts(pipeline) - if violations: - print("Contract violations detected:") - for v in violations: - print(f" ERROR: {v.message}") - raise SystemExit(1) - print("All contracts valid.") - -# In your CI script: -validate_pipeline(my_pipeline) -``` - -This catches type drift when someone changes a task's return type without updating the downstream consumer. - ---- - -## Partial Contracts - -You do not need to define contracts for every node. Nodes without contracts are silently skipped during validation. This lets you adopt contracts incrementally: - -```python -contracts = { - # Only validate the critical path - "train_model": NodeContract(inputs={"features": dict}, output=float), - "deploy": NodeContract(inputs={"train_model": float}, output=bool), -} - -validator = ContractValidator(dag, contracts) -violations = validator.validate() # only checks edges between contracted nodes -``` - ---- - -## Wildcard Types - -Use `object` as a wildcard that accepts any type: - -```python -# This node accepts anything from its dependency -contracts = { - "logger": NodeContract(inputs={"any_node": object}, output=object), -} -``` - -This is useful for utility nodes (loggers, monitors) that process arbitrary data. - ---- - -## Best Practices - -1. **Add type annotations to all `@task` functions.** This enables `extract_contracts()` to work automatically. - -2. **Run `validate_contracts()` in CI.** Catch type mismatches before they cause runtime errors. - -3. **Start with the critical path.** You do not need full coverage immediately -- contract a few key nodes and expand over time. - -4. **Use concrete types, not `object`.** The more specific your contracts, the more errors they catch. Reserve `object` for truly polymorphic nodes. - -5. **Combine with [DataFrames](/guide/advanced/dataframes).** For pandas/polars pipelines, use `DataFrameSchema` for column-level validation and `NodeContract` for edge-level type checking. - ---- - -## Related - -- [API Reference: Contracts](/api/analysis/contracts) -- full API documentation. -- [DataFrames](/guide/advanced/dataframes) -- schema validation for DataFrame pipelines. -- [Building DAGs](/guide/core-concepts/building-dags) -- the builder `.contract()` method. -- [Error Handling](/guide/observability/error-handling) -- how violations integrate with the error flow. diff --git a/docs/pages/guide/advanced/dataframes.mdx b/docs/pages/guide/advanced/dataframes.mdx deleted file mode 100644 index 87b0694..0000000 --- a/docs/pages/guide/advanced/dataframes.mdx +++ /dev/null @@ -1,479 +0,0 @@ ---- -sidebar_position: 18 -title: DataFrames -description: Validate pandas and polars DataFrames at DAG edge boundaries with schema definitions. ---- - -import DagDiagram from '@site/src/components/DagDiagram'; -import StatusBadge from '@site/src/components/StatusBadge'; - -# DataFrames - -Data pipelines frequently pass DataFrames between nodes. A missing column, a wrong dtype, or an unexpectedly empty table can cascade through the pipeline and produce silent corruption. dagron's DataFrame integration lets you define **schemas** at edge boundaries and validate DataFrames automatically -- catching issues at the source instead of downstream. - -The system works with both **pandas** and **polars** DataFrames. No additional dependencies are required beyond what you already use. - -Schema: id(int), name(str), email(str)
min_rows: 1"] - clean["clean
Schema: id(int), name(str), email(str)
no nulls in email"] - aggregate["aggregate
Schema: domain(str), count(int)"] - report["report"] - extract --> clean --> aggregate --> report - style extract fill:#e3f2fd,stroke:#1565c0 - style clean fill:#e3f2fd,stroke:#1565c0 - style aggregate fill:#e3f2fd,stroke:#1565c0`} - caption="Each node has a schema that its output DataFrame must satisfy." -/> - ---- - -## Core Classes - -| Class | Role | -|---|---| -| [`DataFramePipeline`](/api/analysis/dataframe#dataframepipeline) | Wraps a DAG with schema definitions. Validates execution results against schemas. | -| [`DataFrameSchema`](/api/analysis/dataframe#dataframeschema) | Defines the expected shape of a DataFrame: columns, row count bounds. | -| [`ColumnSchema`](/api/analysis/dataframe#columnschema) | Defines a single column: name, dtype, nullable, required. | -| [`SchemaViolation`](/api/analysis/dataframe#schemaviolation) | Describes a single validation failure: node name and message. | -| [`validate_schema`](/api/analysis/dataframe#validate_schema) | Standalone function to validate any DataFrame against a schema. | - ---- - -## Defining Schemas - -### ColumnSchema - -Each column is described by a `ColumnSchema`: - -```python -from dagron.dataframe import ColumnSchema - -col = ColumnSchema( - name="user_id", - dtype="int", # substring match against the actual dtype string - nullable=False, # reject null values - required=True, # column must be present (default) -) -``` - -| Parameter | Type | Default | Description | -|---|---|---|---| -| `name` | `str` | -- | Column name (exact match). | -| `dtype` | `str \| None` | `None` | Expected dtype as a substring. `"int"` matches `int64`, `Int64`, etc. | -| `nullable` | `bool` | `True` | If `False`, the column must contain no null/NaN values. | -| `required` | `bool` | `True` | If `True`, the column must exist in the DataFrame. | - -The `dtype` check uses substring matching, so `"int"` matches both pandas `int64` and polars `Int64`. This provides cross-framework compatibility without requiring exact dtype strings. - -### DataFrameSchema - -Group column schemas and optional row count bounds into a `DataFrameSchema`: - -```python -from dagron.dataframe import DataFrameSchema, ColumnSchema - -schema = DataFrameSchema( - columns=[ - ColumnSchema("id", dtype="int", nullable=False), - ColumnSchema("name", dtype="str"), - ColumnSchema("email", dtype="str", nullable=False), - ColumnSchema("age", dtype="int", required=False), # optional column - ], - min_rows=1, # at least 1 row - max_rows=1000000, # at most 1M rows -) -``` - ---- - -## Quick Start - -```python -import dagron -import pandas as pd -from dagron.dataframe import ( - ColumnSchema, - DataFramePipeline, - DataFrameSchema, -) - -# 1. Build the DAG -dag = ( - dagron.DAG.builder() - .add_node("extract") - .add_node("clean") - .add_node("aggregate") - .add_edge("extract", "clean") - .add_edge("clean", "aggregate") - .build() -) - -# 2. Define schemas for each node's output -schemas = { - "extract": DataFrameSchema( - columns=[ - ColumnSchema("user_id", dtype="int", nullable=False), - ColumnSchema("name", dtype="str"), - ColumnSchema("email", dtype="str"), - ], - min_rows=1, - ), - "clean": DataFrameSchema( - columns=[ - ColumnSchema("user_id", dtype="int", nullable=False), - ColumnSchema("name", dtype="str", nullable=False), - ColumnSchema("email", dtype="str", nullable=False), - ], - ), - "aggregate": DataFrameSchema( - columns=[ - ColumnSchema("domain", dtype="str"), - ColumnSchema("count", dtype="int"), - ], - ), -} - -# 3. Define tasks -tasks = { - "extract": lambda: pd.DataFrame({ - "user_id": [1, 2, 3], - "name": ["Alice", "Bob", None], - "email": ["alice@a.com", "bob@b.com", "charlie@c.com"], - }), - "clean": lambda: pd.DataFrame({ - "user_id": [1, 2, 3], - "name": ["Alice", "Bob", "Charlie"], - "email": ["alice@a.com", "bob@b.com", "charlie@c.com"], - }), - "aggregate": lambda: pd.DataFrame({ - "domain": ["a.com", "b.com", "c.com"], - "count": [1, 1, 1], - }), -} - -# 4. Execute the DAG -executor = dagron.DAGExecutor(dag) -result = executor.execute(tasks) - -# 5. Validate results against schemas -pipeline = DataFramePipeline(dag, schemas) -violations = pipeline.validate_result(result) - -if violations: - for v in violations: - print(f" [{v.node_name}] {v.message}") -else: - print("All schemas valid!") -``` - ---- - -## Validation Rules - -The schema validator checks four things for each node: - -### 1. Required Columns - -If a column is `required=True` (the default) and is missing from the DataFrame: - -```python -schema = DataFrameSchema(columns=[ColumnSchema("missing_col", required=True)]) -# Violation: "Missing required column 'missing_col'" -``` - -### 2. Data Types - -If a `dtype` is specified, the actual dtype must contain the expected string as a substring: - -```python -schema = DataFrameSchema(columns=[ColumnSchema("age", dtype="int")]) - -# pandas int64 -> "int64" contains "int" -> valid -# pandas object -> "object" does NOT contain "int" -> violation -``` - -This means `dtype="int"` matches `int8`, `int16`, `int32`, `int64`, `Int64`, `UInt32`, etc. - -### 3. Null Values - -If `nullable=False`, the column must not contain any null/NaN values: - -```python -schema = DataFrameSchema(columns=[ColumnSchema("email", nullable=False)]) - -# DataFrame with email = ["alice@a.com", None, "bob@b.com"] -# Violation: "Column 'email' has null values but nullable=False" -``` - -### 4. Row Count Bounds - -```python -schema = DataFrameSchema(min_rows=1, max_rows=10000) - -# Empty DataFrame -# Violation: "Expected at least 1 rows, got 0" - -# DataFrame with 20000 rows -# Violation: "Expected at most 10000 rows, got 20000" -``` - ---- - -## Standalone Validation - -Use `validate_schema()` to validate any DataFrame without a full pipeline: - -```python -import pandas as pd -from dagron.dataframe import validate_schema, DataFrameSchema, ColumnSchema - -df = pd.DataFrame({ - "id": [1, 2, 3], - "name": ["Alice", None, "Charlie"], -}) - -schema = DataFrameSchema( - columns=[ - ColumnSchema("id", dtype="int", nullable=False), - ColumnSchema("name", dtype="str", nullable=False), - ], - min_rows=1, -) - -violations = validate_schema(df, schema, node_name="my_step") -for v in violations: - print(f" {v.message}") -# "Column 'name' has null values but nullable=False" -``` - ---- - -## Polars Support - -The same schemas work with polars DataFrames -- no changes needed: - -```python -import polars as pl -from dagron.dataframe import validate_schema, DataFrameSchema, ColumnSchema - -df = pl.DataFrame({ - "id": [1, 2, 3], - "name": ["Alice", "Bob", "Charlie"], - "score": [0.9, 0.8, 0.7], -}) - -schema = DataFrameSchema( - columns=[ - ColumnSchema("id", dtype="Int", nullable=False), # polars uses "Int64" - ColumnSchema("name", dtype="Utf8"), - ColumnSchema("score", dtype="Float"), - ], -) - -violations = validate_schema(df, schema) -if not violations: - print("Valid!") -``` - -dagron detects the framework automatically by inspecting the object's module path. Both `pandas.DataFrame` and `polars.DataFrame` are supported. - ---- - -## Validating Individual Node Outputs - -Use `validate_value()` on a `DataFramePipeline` to validate a single node's output: - -```python -pipeline = DataFramePipeline(dag, schemas) - -df = pd.DataFrame({"user_id": [1, 2], "name": ["Alice", "Bob"]}) -violations = pipeline.validate_value("extract", df) - -if violations: - print("extract output is invalid:") - for v in violations: - print(f" {v.message}") -``` - -This is useful for validating intermediate results during development. - ---- - -## SchemaViolation - -Each violation is a frozen dataclass: - -```python -@dataclass(frozen=True) -class SchemaViolation: - node_name: str # which node produced the invalid DataFrame - message: str # human-readable description -``` - ---- - -## Complete pandas Pipeline Example - -```python -import dagron -import pandas as pd -from dagron.dataframe import ( - ColumnSchema, - DataFramePipeline, - DataFrameSchema, - validate_schema, -) - -# Build DAG -dag = ( - dagron.DAG.builder() - .add_node("read_orders") - .add_node("filter_active") - .add_node("compute_revenue") - .add_node("top_customers") - .add_edge("read_orders", "filter_active") - .add_edge("filter_active", "compute_revenue") - .add_edge("compute_revenue", "top_customers") - .build() -) - -# Define schemas -schemas = { - "read_orders": DataFrameSchema( - columns=[ - ColumnSchema("order_id", dtype="int", nullable=False), - ColumnSchema("customer_id", dtype="int", nullable=False), - ColumnSchema("amount", dtype="float"), - ColumnSchema("status", dtype="str"), - ], - min_rows=1, - ), - "filter_active": DataFrameSchema( - columns=[ - ColumnSchema("order_id", dtype="int", nullable=False), - ColumnSchema("customer_id", dtype="int", nullable=False), - ColumnSchema("amount", dtype="float", nullable=False), - ], - ), - "compute_revenue": DataFrameSchema( - columns=[ - ColumnSchema("customer_id", dtype="int", nullable=False), - ColumnSchema("total_revenue", dtype="float", nullable=False), - ], - ), - "top_customers": DataFrameSchema( - columns=[ - ColumnSchema("customer_id", dtype="int"), - ColumnSchema("total_revenue", dtype="float"), - ColumnSchema("rank", dtype="int"), - ], - max_rows=100, # top 100 - ), -} - -# Tasks -def read_orders(): - return pd.DataFrame({ - "order_id": [1, 2, 3, 4, 5], - "customer_id": [101, 102, 101, 103, 102], - "amount": [50.0, 75.0, 30.0, 100.0, 45.0], - "status": ["active", "active", "cancelled", "active", "active"], - }) - -def filter_active(): - df = read_orders() - return df[df["status"] == "active"][["order_id", "customer_id", "amount"]] - -def compute_revenue(): - df = filter_active() - return df.groupby("customer_id")["amount"].sum().reset_index().rename( - columns={"amount": "total_revenue"} - ) - -def top_customers(): - df = compute_revenue() - df = df.sort_values("total_revenue", ascending=False).head(100) - df["rank"] = range(1, len(df) + 1) - return df - -# Execute and validate -executor = dagron.DAGExecutor(dag) -result = executor.execute({ - "read_orders": read_orders, - "filter_active": filter_active, - "compute_revenue": compute_revenue, - "top_customers": top_customers, -}) - -pipeline = DataFramePipeline(dag, schemas) -violations = pipeline.validate_result(result) - -if violations: - print(f"{len(violations)} schema violation(s):") - for v in violations: - print(f" [{v.node_name}] {v.message}") -else: - print("All schemas valid!") -``` - ---- - -## Combining with Contracts - -Use `NodeContract` for type-level checking (ensuring nodes produce DataFrames) and `DataFrameSchema` for content-level checking (columns, dtypes, nulls): - -```python -from dagron.contracts import NodeContract, ContractValidator - -# Type-level: ensure the node outputs a DataFrame -import pandas as pd -contracts = { - "read_orders": NodeContract(output=pd.DataFrame), - "filter_active": NodeContract(inputs={"read_orders": pd.DataFrame}, output=pd.DataFrame), - "compute_revenue": NodeContract(inputs={"filter_active": pd.DataFrame}, output=pd.DataFrame), -} - -# Validate types -type_violations = ContractValidator(dag, contracts).validate() - -# Content-level: validate column schemas -content_violations = DataFramePipeline(dag, schemas).validate_result(result) -``` - ---- - -## Non-DataFrame Handling - -If a node's output is not a pandas or polars DataFrame, the validator reports it: - -```python -schema = DataFrameSchema(columns=[ColumnSchema("id")]) -violations = validate_schema("not a dataframe", schema, node_name="bad_node") -# SchemaViolation("bad_node", "Expected DataFrame, got str") -``` - ---- - -## Best Practices - -1. **Define schemas for every node that produces a DataFrame.** This creates a complete validation boundary at every step. - -2. **Use `nullable=False` for critical columns.** Null values in ID or key columns are a common source of downstream errors. - -3. **Set `min_rows=1` for extract nodes.** Catch empty result sets immediately instead of letting them flow through. - -4. **Use `max_rows` for output nodes.** Prevent accidentally returning millions of rows to a reporting step. - -5. **Run validation in CI.** Execute the pipeline with test data and validate schemas as part of your test suite. - -6. **Use substring dtype matching.** Specify `"int"` instead of `"int64"` for cross-framework compatibility between pandas and polars. - ---- - -## Related - -- [API Reference: DataFrames](/api/analysis/dataframe) -- full API documentation. -- [Contracts](/guide/advanced/contracts) -- type-level edge validation. -- [Executing Tasks](/guide/core-concepts/executing-tasks) -- the execution model that produces `ExecutionResult`. -- [Error Handling](/guide/observability/error-handling) -- handling violations as part of the error flow. diff --git a/docs/pages/guide/advanced/plugins-hooks.mdx b/docs/pages/guide/advanced/plugins-hooks.mdx deleted file mode 100644 index 5c29cc7..0000000 --- a/docs/pages/guide/advanced/plugins-hooks.mdx +++ /dev/null @@ -1,538 +0,0 @@ ---- -sidebar_position: 20 -title: Plugins & Hooks -description: Extend dagron with plugins and react to lifecycle events via the hook system. ---- - -import DagDiagram from '@site/src/components/DagDiagram'; -import StatusBadge from '@site/src/components/StatusBadge'; - -# Plugins & Hooks - -dagron provides an event-driven plugin system that lets you hook into every stage of DAG construction and execution. Plugins can log events, collect metrics, serve live dashboards, send notifications, or implement any cross-cutting concern without modifying your pipeline code. - -The system has three layers: - -1. **HookRegistry** -- registers and fires callbacks for lifecycle events. -2. **DagronPlugin** -- abstract base class for plugin implementations. -3. **PluginManager** -- discovers, initializes, and tears down plugins. - -```mermaid -graph LR - Plugin1["LoggingPlugin"] --> HookRegistry - Plugin2["MetricsPlugin"] --> HookRegistry - Plugin3["DashboardPlugin"] --> HookRegistry - HookRegistry --> PRE_EXECUTE - HookRegistry --> PRE_NODE - HookRegistry --> POST_NODE - HookRegistry --> ON_ERROR - HookRegistry --> POST_EXECUTE -``` - ---- - -## Hook Events - -The `HookEvent` enum defines the lifecycle events you can subscribe to: - -| Event | Fires when... | Context fields | -|---|---|---| -| `PRE_BUILD` | A DAGBuilder starts building. | `dag` (partial) | -| `POST_BUILD` | A DAGBuilder finishes building. | `dag` (complete) | -| `PRE_EXECUTE` | Execution begins. | `dag` | -| `POST_EXECUTE` | Execution completes. | `dag`, `execution_result` | -| `PRE_NODE` | A node is about to execute. | `dag`, `node_name` | -| `POST_NODE` | A node has finished executing. | `dag`, `node_name`, `node_result` | -| `ON_ERROR` | A node raises an exception. | `dag`, `node_name`, `error` | - -```python -from dagron.plugins.hooks import HookEvent - -HookEvent.PRE_EXECUTE # "pre_execute" -HookEvent.POST_EXECUTE # "post_execute" -HookEvent.PRE_NODE # "pre_node" -HookEvent.POST_NODE # "post_node" -HookEvent.ON_ERROR # "on_error" -HookEvent.PRE_BUILD # "pre_build" -HookEvent.POST_BUILD # "post_build" -``` - ---- - -## HookRegistry - -The `HookRegistry` is the central event bus. You register callbacks for specific events and fire them with a `HookContext`: - -```python -from dagron.plugins.hooks import HookRegistry, HookEvent, HookContext - -hooks = HookRegistry() - -# Register a callback -def on_node_start(ctx: HookContext): - print(f" Starting node: {ctx.node_name}") - -unregister = hooks.register(HookEvent.PRE_NODE, on_node_start) - -# Fire the event (the executor does this automatically) -hooks.fire(HookContext( - event=HookEvent.PRE_NODE, - dag=dag, - node_name="extract", -)) -# prints: " Starting node: extract" - -# Unregister when no longer needed -unregister() -``` - -### HookContext - -Every callback receives a `HookContext` with the relevant information: - -```python -from dagron.plugins.hooks import HookContext - -ctx = HookContext( - event=HookEvent.POST_NODE, - dag=dag, - node_name="transform", - node_result=result, - error=None, - execution_result=None, - metadata={"extra": "info"}, -) -``` - -| Field | Type | Description | -|---|---|---| -| `event` | `HookEvent` | The event that triggered this callback. | -| `dag` | `DAG \| None` | The DAG being built or executed. | -| `node_name` | `str \| None` | The node involved (for node-level events). | -| `node_result` | `Any` | The node's result (for `POST_NODE`). | -| `error` | `Exception \| None` | The exception (for `ON_ERROR`). | -| `execution_result` | `Any` | The full execution result (for `POST_EXECUTE`). | -| `metadata` | `dict` | Arbitrary extra data. | - -### Priority - -Callbacks run in **descending priority order**. Higher priority runs first: - -```python -hooks.register(HookEvent.PRE_NODE, first_callback, priority=100) -hooks.register(HookEvent.PRE_NODE, second_callback, priority=50) -hooks.register(HookEvent.PRE_NODE, last_callback, priority=0) - -# Order: first_callback -> second_callback -> last_callback -``` - -### Error Isolation - -Hook callbacks are fire-and-forget. If a callback raises an exception, it is caught and issued as a `RuntimeWarning`, but execution continues: - -```python -def buggy_hook(ctx): - raise ValueError("oops") - -hooks.register(HookEvent.PRE_NODE, buggy_hook) - -# This fires the hook but does NOT stop execution -# Instead, a RuntimeWarning is issued -hooks.fire(HookContext(event=HookEvent.PRE_NODE)) -``` - -### Clearing Hooks - -```python -# Clear all hooks for a specific event -hooks.clear(HookEvent.PRE_NODE) - -# Clear all hooks for all events -hooks.clear() - -# Count registered hooks -hooks.hook_count() # total across all events -hooks.hook_count(HookEvent.PRE_NODE) # for a specific event -``` - ---- - -## Writing a Plugin - -### DagronPlugin ABC - -Subclass `DagronPlugin` and implement `name`, `initialize()`, and optionally `teardown()`: - -```python -from dagron.plugins.base import DagronPlugin -from dagron.plugins.hooks import HookEvent, HookContext, HookRegistry - - -class TimingPlugin(DagronPlugin): - """Plugin that measures and logs node execution times.""" - - def __init__(self): - self._start_times: dict[str, float] = {} - - @property - def name(self) -> str: - return "timing" - - def initialize(self, hooks: HookRegistry) -> None: - """Register hooks for node timing.""" - import time - - def on_pre_node(ctx: HookContext): - if ctx.node_name: - self._start_times[ctx.node_name] = time.monotonic() - - def on_post_node(ctx: HookContext): - if ctx.node_name and ctx.node_name in self._start_times: - elapsed = time.monotonic() - self._start_times[ctx.node_name] - print(f" [{ctx.node_name}] completed in {elapsed:.3f}s") - - hooks.register(HookEvent.PRE_NODE, on_pre_node) - hooks.register(HookEvent.POST_NODE, on_post_node) - - def teardown(self) -> None: - """Clean up resources.""" - self._start_times.clear() -``` - -### Using the Plugin - -```python -from dagron.plugins.hooks import HookRegistry -from dagron.plugins.manager import PluginManager - -# Create the hook registry and plugin manager -hooks = HookRegistry() -manager = PluginManager(hooks) - -# Register and initialize plugins -manager.register(TimingPlugin()) -manager.initialize_all() - -# Pass hooks to the executor -executor = dagron.DAGExecutor(dag, hooks=hooks) -result = executor.execute(tasks) - -# Clean up -manager.teardown_all() -``` - ---- - -## PluginManager - -The `PluginManager` handles the plugin lifecycle: - -```python -from dagron.plugins.manager import PluginManager - -manager = PluginManager() - -# Register plugins manually -manager.register(TimingPlugin()) -manager.register(LoggingPlugin()) - -# Auto-discover plugins from entry_points -discovered = manager.discover() -print(f"Discovered: {discovered}") - -# Initialize all registered plugins -manager.initialize_all() - -# Access the shared hook registry -hooks = manager.hooks - -# List registered plugins -print(manager.plugins) - -# Tear down all plugins -manager.teardown_all() -``` - -### Plugin Discovery - -Plugins can be auto-discovered via Python entry points. Add to your `pyproject.toml`: - -```toml -[project.entry-points."dagron.plugins"] -my_plugin = "my_package.plugins:MyPlugin" -``` - -Then `manager.discover()` will find and register them automatically. - ---- - -## @dagron_plugin Decorator - -For quick plugin registration, use the `@dagron_plugin` class decorator: - -```python -from dagron.plugins.base import DagronPlugin -from dagron.plugins.manager import dagron_plugin -from dagron.plugins.hooks import HookEvent, HookContext, HookRegistry - - -@dagron_plugin -class NotificationPlugin(DagronPlugin): - """Send a notification when execution fails.""" - - @property - def name(self) -> str: - return "notifications" - - def initialize(self, hooks: HookRegistry) -> None: - def on_error(ctx: HookContext): - send_alert(f"Node {ctx.node_name} failed: {ctx.error}") - - hooks.register(HookEvent.ON_ERROR, on_error) -``` - -The `@dagron_plugin` decorator automatically instantiates and registers the plugin with dagron's global plugin manager. - ---- - -## DashboardPlugin - -dagron ships with a built-in `DashboardPlugin` that serves a live web dashboard showing real-time execution status. The web server runs in Rust (axum + tokio) on a background thread. - -```python -from dagron.dashboard import DashboardPlugin -from dagron.execution.gates import ApprovalGate, GateController -from dagron.plugins.hooks import HookRegistry -from dagron.plugins.manager import PluginManager - -# Optional: set up gates for the dashboard to manage -controller = GateController({ - "review": ApprovalGate(timeout=600), - "deploy": ApprovalGate(timeout=300), -}) - -# Create the dashboard plugin -dashboard = DashboardPlugin( - host="127.0.0.1", - port=8765, - gate_controller=controller, - open_browser=True, # auto-open in browser -) - -# Wire it up -hooks = HookRegistry() -manager = PluginManager(hooks) -manager.register(dashboard) -manager.initialize_all() -# prints: "Dashboard: http://127.0.0.1:8765" - -# Execute with hooks -executor = dagron.DAGExecutor(dag, hooks=hooks) -result = executor.execute(tasks) - -# Clean up -manager.teardown_all() -``` - -The dashboard shows: - -- A live graph visualization with node status (pending, running, completed, failed). -- Execution timing for each node. -- Approve/reject buttons for any gates in the `WAITING` state. -- Summary statistics after execution completes. - -### Dashboard Hooks - -The `DashboardPlugin` registers hooks for these events: - -| Event | Dashboard action | -|---|---| -| `PRE_EXECUTE` | Resets the dashboard with the DAG structure. | -| `PRE_NODE` | Marks the node as "running" in the UI. | -| `POST_NODE` | Marks the node as "completed". | -| `ON_ERROR` | Marks the node as "failed" with error details. | -| `POST_EXECUTE` | Shows final execution summary. | - ---- - -## Practical Plugin Examples - -### Logging Plugin - -```python -import logging - -class LoggingPlugin(DagronPlugin): - """Log all lifecycle events.""" - - def __init__(self, logger_name: str = "dagron"): - self._logger = logging.getLogger(logger_name) - - @property - def name(self) -> str: - return "logging" - - def initialize(self, hooks: HookRegistry) -> None: - def on_pre_execute(ctx: HookContext): - self._logger.info( - "Execution started: %d nodes", - ctx.dag.node_count() if ctx.dag else 0, - ) - - def on_pre_node(ctx: HookContext): - self._logger.info("Node started: %s", ctx.node_name) - - def on_post_node(ctx: HookContext): - self._logger.info("Node completed: %s", ctx.node_name) - - def on_error(ctx: HookContext): - self._logger.error( - "Node failed: %s - %s", - ctx.node_name, - ctx.error, - ) - - def on_post_execute(ctx: HookContext): - r = ctx.execution_result - if r: - self._logger.info( - "Execution finished: %d succeeded, %d failed in %.1fs", - r.succeeded, - r.failed, - r.total_duration_seconds, - ) - - hooks.register(HookEvent.PRE_EXECUTE, on_pre_execute) - hooks.register(HookEvent.PRE_NODE, on_pre_node) - hooks.register(HookEvent.POST_NODE, on_post_node) - hooks.register(HookEvent.ON_ERROR, on_error) - hooks.register(HookEvent.POST_EXECUTE, on_post_execute) -``` - -### Metrics Plugin (Prometheus) - -```python -class PrometheusPlugin(DagronPlugin): - """Export execution metrics to Prometheus.""" - - def __init__(self): - from prometheus_client import Counter, Histogram - self.node_duration = Histogram( - "dagron_node_duration_seconds", - "Node execution duration", - ["node_name"], - ) - self.node_failures = Counter( - "dagron_node_failures_total", - "Total node failures", - ["node_name"], - ) - - @property - def name(self) -> str: - return "prometheus" - - def initialize(self, hooks: HookRegistry) -> None: - def on_post_node(ctx: HookContext): - if ctx.node_name and ctx.node_result: - self.node_duration.labels( - node_name=ctx.node_name - ).observe(ctx.node_result.duration_seconds) - - def on_error(ctx: HookContext): - if ctx.node_name: - self.node_failures.labels(node_name=ctx.node_name).inc() - - hooks.register(HookEvent.POST_NODE, on_post_node) - hooks.register(HookEvent.ON_ERROR, on_error) -``` - -### Slack Notification Plugin - -```python -class SlackPlugin(DagronPlugin): - """Send Slack notifications on execution failure.""" - - def __init__(self, webhook_url: str, channel: str = "#alerts"): - self._webhook_url = webhook_url - self._channel = channel - - @property - def name(self) -> str: - return "slack" - - def initialize(self, hooks: HookRegistry) -> None: - def on_post_execute(ctx: HookContext): - r = ctx.execution_result - if r and r.failed > 0: - import httpx - httpx.post(self._webhook_url, json={ - "channel": self._channel, - "text": ( - f"dagron pipeline failed: " - f"{r.failed} node(s) failed, " - f"{r.succeeded} succeeded" - ), - }) - - hooks.register(HookEvent.POST_EXECUTE, on_post_execute) -``` - ---- - -## Composing Multiple Plugins - -Register multiple plugins and they all receive the same events: - -```python -manager = PluginManager() - -manager.register(LoggingPlugin()) -manager.register(TimingPlugin()) -manager.register(DashboardPlugin(port=8765)) -manager.register(SlackPlugin(webhook_url="https://hooks.slack.com/...")) - -manager.initialize_all() - -# All plugins receive events during execution -executor = dagron.DAGExecutor(dag, hooks=manager.hooks) -result = executor.execute(tasks) - -manager.teardown_all() -``` - -Use **priority** to control the order when it matters: - -```python -# Logging should run first (highest priority) -hooks.register(HookEvent.PRE_NODE, log_callback, priority=100) - -# Metrics second -hooks.register(HookEvent.PRE_NODE, metrics_callback, priority=50) - -# Dashboard last -hooks.register(HookEvent.PRE_NODE, dashboard_callback, priority=0) -``` - ---- - -## Best Practices - -1. **Keep hooks lightweight.** Callbacks run on the executor thread, so heavy work (network calls, disk I/O) should be offloaded to a background thread or queue. - -2. **Never raise from hooks.** Exceptions in hooks are caught and warned, but they can mask real errors. Log errors and continue. - -3. **Use `teardown()` for cleanup.** Close file handles, flush metrics, and shut down background threads in the teardown method. - -4. **Use entry points for distribution.** Package plugins as standalone PyPI packages with `dagron.plugins` entry points for automatic discovery. - -5. **Test plugins in isolation.** Create a `HookRegistry`, register your plugin, fire test events, and assert the behavior. - ---- - -## Related - -- [API Reference: Plugins](/api/utilities/plugins) -- full API documentation. -- [Approval Gates](/guide/execution-strategies/approval-gates) -- gate integration with the DashboardPlugin. -- [Visualization](/guide/observability/visualization) -- other ways to visualize DAG execution. -- [Executing Tasks](/guide/core-concepts/executing-tasks) -- how the executor fires hook events. diff --git a/docs/pages/guide/advanced/templates.mdx b/docs/pages/guide/advanced/templates.mdx deleted file mode 100644 index 108a273..0000000 --- a/docs/pages/guide/advanced/templates.mdx +++ /dev/null @@ -1,456 +0,0 @@ ---- -sidebar_position: 15 -title: Templates -description: Create parameterized DAG templates with placeholder expansion for reusable pipeline patterns. ---- - -import DagDiagram from '@site/src/components/DagDiagram'; -import StatusBadge from '@site/src/components/StatusBadge'; - -# Templates - -Many organizations run the same pipeline structure against different environments, datasets, or configurations. Instead of building a separate DAG for each variant, dagron lets you define a **DAGTemplate** with `{{placeholder}}` syntax and render concrete DAGs by supplying parameter values. - -Templates enforce type safety and support custom validators, so invalid parameter combinations are caught before the graph is ever built. - - T_T --> T_L - end - subgraph "render(env='prod')" - P_E["extract_prod"] - P_T["transform_prod"] - P_L["load_prod"] - P_E --> P_T --> P_L - end - subgraph "render(env='staging')" - S_E["extract_staging"] - S_T["transform_staging"] - S_L["load_staging"] - S_E --> S_T --> S_L - end`} - caption="A single template renders into different concrete DAGs depending on the parameter values." -/> - ---- - -## Quick Start - -```python -from dagron.template import DAGTemplate - -# 1. Define the template -template = DAGTemplate( - params={"env": str, "batch_size": int}, - defaults={"env": "staging", "batch_size": 1000}, -) - -# 2. Add templated nodes and edges -template.add_node("extract_{{env}}") -template.add_node("transform_{{env}}") -template.add_node("load_{{env}}") -template.add_edge("extract_{{env}}", "transform_{{env}}") -template.add_edge("transform_{{env}}", "load_{{env}}") - -# 3. Render a concrete DAG -dag = template.render(env="prod", batch_size=5000) - -# The DAG now has nodes: extract_prod, transform_prod, load_prod -print([n.name for n in dag.topological_sort()]) -# ['extract_prod', 'transform_prod', 'load_prod'] -``` - ---- - -## Template Parameters - -### Declaring Parameters - -Parameters are declared with their types when constructing the template: - -```python -template = DAGTemplate( - params={ - "env": str, - "replicas": int, - "gpu_enabled": bool, - }, - defaults={ - "env": "staging", - "replicas": 1, - }, - descriptions={ - "env": "Target deployment environment", - "replicas": "Number of parallel workers", - "gpu_enabled": "Whether to use GPU acceleration", - }, -) -``` - -Parameters without a default are **required** -- `render()` will raise a `TemplateError` if they are missing. - -### TemplateParam - -Under the hood, each parameter is a `TemplateParam` dataclass: - -```python -from dagron.template import TemplateParam - -param = TemplateParam( - name="env", - type=str, - default="staging", - description="Target deployment environment", - validator=lambda v: v in ("dev", "staging", "prod"), -) -``` - -You can access all parameter specs through the template: - -```python -for name, param in template.params.items(): - print(f" {name}: {param.type.__name__}, default={param.default}") - print(f" {param.description}") -``` - ---- - -## Placeholder Syntax - -### Default Delimiters - -By default, placeholders use double-brace syntax: `{{param_name}}`. You can place them anywhere in a node name or edge label: - -```python -template.add_node("train_{{model}}_{{env}}") -template.add_edge("data_{{env}}", "train_{{model}}_{{env}}") -``` - -### Custom Delimiters - -If double braces conflict with your naming conventions, specify custom delimiters: - -```python -template = DAGTemplate( - params={"env": str}, - delimiters=("${", "}"), # shell-style -) - -template.add_node("extract_${env}") -``` - -### Type-Preserving Substitution - -If an entire node name is a single placeholder (e.g., `"{{replicas}}"`), dagron returns the raw Python value instead of stringifying it. This is useful for metadata: - -```python -template = DAGTemplate(params={"replicas": int}) -template.add_node("worker", metadata="{{replicas}}") - -dag = template.render(replicas=4) -# The metadata is the integer 4, not the string "4" -``` - -When a placeholder is part of a larger string (e.g., `"worker_{{env}}"`), values are converted to strings via `str()`. - ---- - -## Validation - -### Automatic Type Checking - -Parameters are validated against their declared types at render time: - -```python -template = DAGTemplate(params={"replicas": int}) - -try: - template.render(replicas="three") # str is not int -except TemplateError as e: - print(e) - # "Parameter 'replicas' expects int, got str" -``` - -### Custom Validators - -Supply a validator function for each parameter to enforce domain-specific constraints: - -```python -template = DAGTemplate( - params={"env": str, "replicas": int}, - validators={ - "env": lambda v: v in ("dev", "staging", "prod"), - "replicas": lambda v: 1 <= v <= 100, - }, -) - -try: - template.render(env="banana", replicas=1) -except TemplateError as e: - print(e) - # "Parameter 'env' failed custom validation" -``` - -### Pre-Validation - -Use `validate_params()` to check parameters without rendering. This returns a list of error messages instead of raising: - -```python -errors = template.validate_params(env="prod", replicas=-1) -for error in errors: - print(f" - {error}") -# - Parameter 'replicas' failed custom validation - -errors = template.validate_params() # missing required params -# - Missing required parameter: 'replicas' -``` - -### Unknown Parameters - -Passing parameters not declared in the template is an error: - -```python -try: - template.render(env="prod", replicas=3, color="blue") -except TemplateError as e: - print(e) - # "Unknown parameters: color" -``` - ---- - -## Rendering Methods - -### `render()` -- Direct DAG - -The simplest rendering method produces a finalized DAG: - -```python -dag = template.render(env="prod", replicas=3) -# dag is a dagron.DAG, ready for execution -``` - -### `render_builder()` -- DAGBuilder for Further Modification - -If you need to add extra nodes or edges after rendering, use `render_builder()` to get a `DAGBuilder`: - -```python -builder = template.render_builder(env="prod", replicas=3) - -# Add extra nodes beyond what the template defines -builder.add_node("monitoring") -builder.add_edge("load_prod", "monitoring") - -dag = builder.build() -``` - -This is useful when you have a standard template but need per-deployment customizations. - -### `render_pipeline()` -- Pipeline - -Render into a `Pipeline` for use with the `@task` decorator workflow: - -```python -pipeline = template.render_pipeline(env="prod", replicas=3) -``` - ---- - -## Fluent API - -`add_node()` and `add_edge()` return `self`, so you can chain calls: - -```python -template = DAGTemplate(params={"env": str}, defaults={"env": "dev"}) - -template = ( - template - .add_node("extract_{{env}}") - .add_node("transform_{{env}}") - .add_node("load_{{env}}") - .add_edge("extract_{{env}}", "transform_{{env}}") - .add_edge("transform_{{env}}", "load_{{env}}") -) - -dag = template.render(env="prod") -``` - ---- - -## Parameterized ETL Example - -Here is a realistic ETL pipeline template that generates environment-specific DAGs: - -```python -from dagron.template import DAGTemplate - -def create_etl_template(): - """Create a reusable ETL pipeline template.""" - template = DAGTemplate( - params={ - "env": str, - "source_table": str, - "target_table": str, - "batch_size": int, - "validate": bool, - }, - defaults={ - "batch_size": 10000, - "validate": True, - }, - descriptions={ - "env": "Target environment (dev, staging, prod)", - "source_table": "Source database table name", - "target_table": "Target data warehouse table", - "batch_size": "Number of rows per batch", - "validate": "Whether to run data validation", - }, - validators={ - "env": lambda v: v in ("dev", "staging", "prod"), - "batch_size": lambda v: 100 <= v <= 1_000_000, - }, - ) - - # Core ETL nodes - ( - template - .add_node("extract_{{source_table}}_{{env}}") - .add_node("validate_{{source_table}}_{{env}}") - .add_node("transform_{{source_table}}_{{env}}") - .add_node("load_{{target_table}}_{{env}}") - .add_node("verify_{{target_table}}_{{env}}") - .add_edge("extract_{{source_table}}_{{env}}", "validate_{{source_table}}_{{env}}") - .add_edge("validate_{{source_table}}_{{env}}", "transform_{{source_table}}_{{env}}") - .add_edge("transform_{{source_table}}_{{env}}", "load_{{target_table}}_{{env}}") - .add_edge("load_{{target_table}}_{{env}}", "verify_{{target_table}}_{{env}}") - ) - - return template - -# Create the template once -etl_template = create_etl_template() - -# Render for different environments -dev_dag = etl_template.render( - env="dev", - source_table="users", - target_table="dim_users", -) - -prod_dag = etl_template.render( - env="prod", - source_table="users", - target_table="dim_users", - batch_size=100000, -) - -print(f"Dev nodes: {[n.name for n in dev_dag.topological_sort()]}") -# ['extract_users_dev', 'validate_users_dev', 'transform_users_dev', -# 'load_dim_users_dev', 'verify_dim_users_dev'] - -print(f"Prod nodes: {[n.name for n in prod_dag.topological_sort()]}") -# ['extract_users_prod', 'validate_users_prod', 'transform_users_prod', -# 'load_dim_users_prod', 'verify_dim_users_prod'] -``` - ---- - -## Multi-Tenant Pipeline Generation - -Templates are powerful for generating per-tenant pipelines: - -```python -template = DAGTemplate( - params={"tenant": str, "region": str}, - defaults={"region": "us-east-1"}, -) - -( - template - .add_node("ingest_{{tenant}}_{{region}}") - .add_node("process_{{tenant}}_{{region}}") - .add_node("deliver_{{tenant}}_{{region}}") - .add_edge("ingest_{{tenant}}_{{region}}", "process_{{tenant}}_{{region}}") - .add_edge("process_{{tenant}}_{{region}}", "deliver_{{tenant}}_{{region}}") -) - -tenants = ["acme", "globex", "initech"] -dags = { - tenant: template.render(tenant=tenant, region="eu-west-1") - for tenant in tenants -} - -for tenant, dag in dags.items(): - nodes = [n.name for n in dag.topological_sort()] - print(f"{tenant}: {nodes}") -``` - ---- - -## Template Composition with render_builder - -Use `render_builder()` to compose a base template with per-use customizations: - -```python -# Base template: standard ML training pipeline -base = DAGTemplate( - params={"model": str, "dataset": str}, -) -( - base - .add_node("load_{{dataset}}") - .add_node("preprocess_{{dataset}}") - .add_node("train_{{model}}") - .add_node("evaluate_{{model}}") - .add_edge("load_{{dataset}}", "preprocess_{{dataset}}") - .add_edge("preprocess_{{dataset}}", "train_{{model}}") - .add_edge("train_{{model}}", "evaluate_{{model}}") -) - -# Render with customization -builder = base.render_builder(model="resnet50", dataset="imagenet") -builder.add_node("deploy_resnet50") -builder.add_edge("evaluate_resnet50", "deploy_resnet50") - -# Only production builds get a deploy step -dag = builder.build() -``` - ---- - -## Repr and Debugging - -Templates have a helpful repr: - -```python -print(template) -# DAGTemplate(params=[batch_size, env, source_table, target_table, validate], -# nodes=5, edges=4) -``` - ---- - -## Best Practices - -1. **Define templates as factory functions.** Return a `DAGTemplate` from a function so that the template definition is reusable and testable. - -2. **Use validators for all string parameters.** Catch typos like `env="prodd"` at render time instead of at execution time. - -3. **Provide defaults for optional parameters.** This makes the most common usage concise while still allowing customization. - -4. **Use `validate_params()` in CI.** Run parameter validation in your test suite to catch invalid configurations early. - -5. **Prefer `render_builder()` when composing.** It gives you flexibility to add environment-specific nodes without modifying the base template. - ---- - -## Related - -- [API Reference: Templates](/api/utilities/template) -- full API documentation. -- [Building DAGs](/guide/core-concepts/building-dags) -- the DAGBuilder that templates render into. -- [Versioning](/guide/advanced/versioning) -- version-tracking the rendered DAGs. -- [Contracts](/guide/advanced/contracts) -- type-checking the rendered DAG's edges. diff --git a/docs/pages/guide/advanced/versioning.mdx b/docs/pages/guide/advanced/versioning.mdx deleted file mode 100644 index bb5e92c..0000000 --- a/docs/pages/guide/advanced/versioning.mdx +++ /dev/null @@ -1,441 +0,0 @@ ---- -sidebar_position: 16 -title: Versioning -description: Track DAG mutations with an append-only log, time-travel to any historical version, diff changes, and fork branches. ---- - -import DagDiagram from '@site/src/components/DagDiagram'; -import StatusBadge from '@site/src/components/StatusBadge'; - -# Versioning - -As pipelines evolve, you often need to answer questions like "What did this DAG look like last week?" or "What changed between version 12 and version 15?". dagron's `VersionedDAG` wraps a standard DAG with an **append-only mutation log** that records every structural change. You can time-travel to any historical version, diff any two versions, and fork independent branches from any point. - - B3 - end - subgraph "v5 (fork from v3)" - A5["A"] - B5["B"] - C5["C"] - A5 --> B5 - A5 --> C5 - end`} - caption="A VersionedDAG accumulates mutations. You can fork at any version to create an independent branch." -/> - ---- - -## Core Concepts - -| Class | Role | -|---|---| -| [`VersionedDAG`](/api/utilities/versioning#versioneddag) | Wraps a DAG with an append-only mutation log. Every mutation increments the version counter. | -| [`Mutation`](/api/utilities/versioning#mutation) | A single recorded change: version number, mutation type, arguments, and timestamp. | -| [`MutationType`](/api/utilities/versioning#mutationtype) | Enum of mutation types: `ADD_NODE`, `REMOVE_NODE`, `ADD_EDGE`, `REMOVE_EDGE`, `SET_PAYLOAD`, `SET_METADATA`. | - ---- - -## Creating a VersionedDAG - -### Starting Empty - -```python -from dagron.versioning import VersionedDAG - -vdag = VersionedDAG() -print(vdag.version) # 0 -- no mutations yet -``` - -### Wrapping an Existing DAG - -```python -import dagron - -dag = ( - dagron.DAG.builder() - .add_node("extract") - .add_node("transform") - .add_edge("extract", "transform") - .build() -) - -vdag = VersionedDAG(dag) -print(vdag.version) # 0 -- initial state, no tracked mutations yet -``` - -Note that mutations made to the DAG *before* wrapping are not tracked. The version log starts from the moment you create the `VersionedDAG`. - ---- - -## Making Mutations - -Every structural change is recorded and increments the version: - -```python -vdag = VersionedDAG() - -vdag.add_node("extract") # version 1 -vdag.add_node("transform") # version 2 -vdag.add_node("load") # version 3 -vdag.add_edge("extract", "transform") # version 4 -vdag.add_edge("transform", "load") # version 5 - -print(vdag.version) # 5 -``` - -All mutation methods mirror the standard DAG API: - -```python -vdag.add_node("name", payload=..., metadata=...) -vdag.remove_node("name") -vdag.add_edge("from", "to", weight=..., label=...) -vdag.remove_edge("from", "to") -vdag.set_payload("name", payload) -vdag.set_metadata("name", metadata) -``` - -Each call is recorded with its full arguments and a timestamp. - ---- - -## Accessing the Current DAG - -The `.dag` property returns the underlying DAG for read-only access: - -```python -dag = vdag.dag -print(dag.node_count()) # 3 -print(dag.edge_count()) # 2 - -for node in dag.topological_sort(): - print(node.name) -``` - -You can pass `vdag.dag` to any executor or analysis function that expects a `DAG`. - ---- - -## Time-Travel with at_version() - -Reconstruct the DAG as it was at any historical version: - -```python -vdag = VersionedDAG() -vdag.add_node("a") # v1 -vdag.add_node("b") # v2 -vdag.add_edge("a", "b") # v3 -vdag.add_node("c") # v4 -vdag.add_edge("b", "c") # v5 - -# Go back to version 2 (only nodes "a" and "b", no edges) -dag_v2 = vdag.at_version(2) -print(dag_v2.node_count()) # 2 -print(dag_v2.edge_count()) # 0 - -# Version 0 is the empty DAG -dag_v0 = vdag.at_version(0) -print(dag_v0.node_count()) # 0 -``` - -`at_version()` **replays** the mutation log up to the specified version, constructing a fresh DAG. The original `VersionedDAG` is not modified. - -### Version Bounds - -```python -try: - vdag.at_version(999) -except ValueError as e: - print(e) - # "Version 999 out of range [0, 5]." -``` - ---- - -## Diffing Versions - -Compare any two versions to see what changed: - -```python -diff = vdag.diff_versions(2, 5) - -print(f"Added nodes: {diff.added_nodes}") -print(f"Removed nodes: {diff.removed_nodes}") -print(f"Added edges: {diff.added_edges}") -print(f"Removed edges: {diff.removed_edges}") -``` - -This uses the Rust-side `DAG.diff()` method, which produces a `GraphDiff` object with sets of added/removed nodes and edges. - -|"added"| B_v5 - B_v5 -->|"added"| C_v5 - end - style C_v5 fill:#c8e6c9,stroke:#2e7d32`} - caption="Diffing v2 and v5 shows node 'c' and two edges were added." -/> - ---- - -## Mutation History - -### Full History - -```python -for mutation in vdag.history(): - print( - f" v{mutation.version}: {mutation.mutation_type.value} " - f"args={mutation.args} " - f"at={mutation.timestamp:.0f}" - ) -``` - -Output: - -``` - v1: add_node args={'name': 'a', 'payload': None, 'metadata': None} at=1709400000 - v2: add_node args={'name': 'b', 'payload': None, 'metadata': None} at=1709400001 - v3: add_edge args={'from_node': 'a', 'to_node': 'b', 'weight': None, 'label': None} at=1709400001 - v4: add_node args={'name': 'c', 'payload': None, 'metadata': None} at=1709400002 - v5: add_edge args={'from_node': 'b', 'to_node': 'c', 'weight': None, 'label': None} at=1709400002 -``` - -### History Since a Version - -Get only the mutations after a specific version: - -```python -recent = vdag.history_since(3) -for mutation in recent: - print(f" v{mutation.version}: {mutation.mutation_type.value}") -# v4: add_node -# v5: add_edge -``` - -This is useful for incremental synchronization -- fetch only the mutations that happened since the last sync. - ---- - -## Mutation Dataclass - -Each `Mutation` is a frozen dataclass: - -| Field | Type | Description | -|---|---|---| -| `version` | `int` | The version number this mutation created (1-based). | -| `mutation_type` | `MutationType` | One of `ADD_NODE`, `REMOVE_NODE`, `ADD_EDGE`, `REMOVE_EDGE`, `SET_PAYLOAD`, `SET_METADATA`. | -| `args` | `dict[str, Any]` | The arguments passed to the mutation method. | -| `timestamp` | `float` | Unix timestamp when the mutation was recorded. | - ---- - -## MutationType Enum - -```python -from dagron.versioning import MutationType - -MutationType.ADD_NODE # "add_node" -MutationType.REMOVE_NODE # "remove_node" -MutationType.ADD_EDGE # "add_edge" -MutationType.REMOVE_EDGE # "remove_edge" -MutationType.SET_PAYLOAD # "set_payload" -MutationType.SET_METADATA # "set_metadata" -``` - ---- - -## Forking - -Create an independent copy of the `VersionedDAG` at any version. The fork has its own mutation log and does not affect the original: - -```python -vdag = VersionedDAG() -vdag.add_node("a") # v1 -vdag.add_node("b") # v2 -vdag.add_edge("a", "b") # v3 -vdag.add_node("c") # v4 - -# Fork from version 3 (before "c" was added) -fork = vdag.fork(at_version=3) -print(fork.version) # 3 -print(fork.dag.node_count()) # 2 (a, b) - -# Mutate the fork independently -fork.add_node("d") -fork.add_edge("a", "d") -print(fork.version) # 5 - -# Original is unaffected -print(vdag.version) # 4 -print(vdag.dag.node_count()) # 3 (a, b, c) -``` - -### Fork at Current Version - -Call `fork()` without arguments to fork at the current version: - -```python -fork = vdag.fork() -print(fork.version) # same as vdag.version -``` - ---- - -## Use Cases - -### Pipeline Auditing - -Record every change to a production pipeline and audit the history later: - -```python -vdag = VersionedDAG() - -# Day 1: initial pipeline -vdag.add_node("ingest") -vdag.add_node("transform") -vdag.add_edge("ingest", "transform") - -# Day 2: add a new output -vdag.add_node("export_csv") -vdag.add_edge("transform", "export_csv") - -# Day 3: add monitoring -vdag.add_node("monitor") -vdag.add_edge("transform", "monitor") - -# Audit: what was the pipeline on day 1? -dag_day1 = vdag.at_version(2) -print(f"Day 1 nodes: {[n.name for n in dag_day1.topological_sort()]}") - -# What changed between day 1 and day 3? -diff = vdag.diff_versions(2, 5) -print(f"Added: {diff.added_nodes}") -``` - -### A/B Testing Pipeline Variants - -Fork a pipeline and try different approaches: - -```python -# Base pipeline -base = VersionedDAG() -base.add_node("data") -base.add_node("features") -base.add_edge("data", "features") - -# Variant A: XGBoost -variant_a = base.fork() -variant_a.add_node("xgboost") -variant_a.add_edge("features", "xgboost") - -# Variant B: Neural Network -variant_b = base.fork() -variant_b.add_node("neural_net") -variant_b.add_edge("features", "neural_net") - -# Execute both variants -executor_a = dagron.DAGExecutor(variant_a.dag) -executor_b = dagron.DAGExecutor(variant_b.dag) -``` - -### Rollback - -If a mutation causes problems, reconstruct the previous version and create a new `VersionedDAG` from it: - -```python -# Something went wrong after version 10 -good_dag = vdag.at_version(10) - -# Start fresh from the known-good state -new_vdag = VersionedDAG(good_dag) -# Continue making mutations on the recovered DAG -``` - ---- - -## Serialization Pattern - -While `VersionedDAG` does not have built-in serialization, the mutation log is easy to serialize: - -```python -import json - -# Serialize the mutation log -log_data = [] -for m in vdag.history(): - log_data.append({ - "version": m.version, - "type": m.mutation_type.value, - "args": m.args, - "timestamp": m.timestamp, - }) - -with open("pipeline_history.json", "w") as f: - json.dump(log_data, f, indent=2, default=str) - -# Deserialize and replay -from dagron.versioning import VersionedDAG, MutationType - -vdag_restored = VersionedDAG() -with open("pipeline_history.json") as f: - log_data = json.load(f) - -for entry in log_data: - mt = MutationType(entry["type"]) - args = entry["args"] - if mt == MutationType.ADD_NODE: - vdag_restored.add_node(args["name"], payload=args.get("payload"), metadata=args.get("metadata")) - elif mt == MutationType.ADD_EDGE: - vdag_restored.add_edge(args["from_node"], args["to_node"], weight=args.get("weight"), label=args.get("label")) - elif mt == MutationType.REMOVE_NODE: - vdag_restored.remove_node(args["name"]) - elif mt == MutationType.REMOVE_EDGE: - vdag_restored.remove_edge(args["from_node"], args["to_node"]) - elif mt == MutationType.SET_PAYLOAD: - vdag_restored.set_payload(args["name"], args.get("payload")) - elif mt == MutationType.SET_METADATA: - vdag_restored.set_metadata(args["name"], args.get("metadata")) -``` - ---- - -## Best Practices - -1. **Use `VersionedDAG` during development.** Wrap your DAG early so you have a full audit trail from the start. - -2. **Serialize the mutation log to version control.** Store `pipeline_history.json` alongside your code to track pipeline structure changes in git. - -3. **Use `diff_versions()` in code review.** Compare the pipeline before and after a change to verify that only the intended modifications were made. - -4. **Fork for experiments.** Instead of modifying the main pipeline, fork it, try your changes, and merge back only if they work. - -5. **Use `history_since()` for incremental sync.** If you are syncing pipeline state across services, send only the mutations since the last known version. - ---- - -## Related - -- [API Reference: Versioning](/api/utilities/versioning) -- full API documentation. -- [Building DAGs](/guide/core-concepts/building-dags) -- the underlying DAG API that `VersionedDAG` wraps. -- [Templates](/guide/advanced/templates) -- parameterized DAG generation. -- [Inspecting Graphs](/guide/core-concepts/inspecting-graphs) -- analysis and querying of the versioned graph. diff --git a/docs/pages/guide/architecture.mdx b/docs/pages/guide/architecture.mdx deleted file mode 100644 index 8df395f..0000000 --- a/docs/pages/guide/architecture.mdx +++ /dev/null @@ -1,159 +0,0 @@ ---- -sidebar_position: 99 -title: Architecture -description: How dagron is built — Rust core, PyO3 bindings, crate structure, caching, and the optional web dashboard. ---- - -# Architecture - -dagron is a layered system: a Rust core providing the graph data structure and algorithms, a PyO3 binding layer that exposes everything to Python, and an optional web dashboard for real-time execution monitoring. - -```mermaid -graph TB - PY["Python API
(dagron package)"] --> PYO3["PyO3 Bindings
(dagron-py crate)"] - PYO3 --> CORE["Rust Core
(dagron-core crate)"] - CORE --> PG["petgraph::StableGraph"] - PYO3 -.-> UI["Dashboard
(dagron-ui crate, optional)"] - UI --> AXUM["Axum + Tokio"] -``` - ---- - -## Crate Structure - -| Crate | Path | Purpose | -|-------|------|---------| -| `dagron-core` | `crates/dagron-core/` | Graph data structure, algorithms, serialization, scheduling — pure Rust, no Python dependency | -| `dagron-py` | `crates/dagron-py/` | PyO3 bindings wrapping `dagron-core` as a Python extension module | -| `dagron-ui` | `crates/dagron-ui/` | Optional Axum-based web dashboard for live execution visualization | - -The Python package (`py_src/dagron/`) adds higher-level execution strategies (incremental, checkpoint, caching, distributed), the builder pattern, analysis utilities, and plugin system — all in pure Python, calling into the Rust core for graph operations. - ---- - -## Rust Core Internals - -### DAG\ - -The central type is `DAG

` in `dagron-core`: - -```rust -pub struct DAG

{ - graph: StableGraph, EdgeData, Directed, u32>, - name_to_index: AHashMap, - generation: u64, - cache: RwLock, -} -``` - -- **`graph`** — petgraph's `StableGraph` with arena-allocated node/edge storage. `StableGraph` preserves indices across removals, which is critical for caching correctness. -- **`name_to_index`** — `AHashMap` for O(1) string-to-index lookups. ahash is a fast, non-cryptographic hash map that outperforms the standard `HashMap`. -- **`generation`** — monotonically increasing counter, bumped on every structural mutation (add/remove node/edge). -- **`cache`** — `RwLock` storing cached results for expensive computations. - -### Generational Cache - -The cache avoids recomputing expensive results (topological sorts, roots, leaves) when the graph hasn't changed: - -```rust -struct DagCache { - gen: u64, // generation when cache was populated - hits: u64, - misses: u64, - roots: Option>, - leaves: Option>, - topo_sort: Option>, - topo_sort_dfs: Option>, - topo_levels: Option>>, -} -``` - -**How it works:** -1. Every mutation to the DAG increments `generation` -2. When a cached result is requested, the cache compares its stored `gen` against the DAG's current `generation` -3. On mismatch, all cached entries are invalidated (set to `None`) -4. On match, the cached result is returned directly - -This gives O(1) amortized cost for repeated queries on an unchanged graph. - -### Algorithm Modules - -| Module | Algorithms | -|--------|-----------| -| `toposort` | Kahn's algorithm, DFS-based sort, topological levels, all orderings | -| `reachability` | Bitset-based reachability index — O(V*E/64) build, O(1) `can_reach` queries | -| `scheduling` | Critical path, bottom-level computation, max-parallelism and resource-constrained plans | -| `partition` | Level-based, size-balanced, and communication-minimizing graph partitioning | -| `paths` | All paths (DFS), shortest path (BFS), longest path | -| `cycle` | Tarjan's SCC for cycle detection, `would_create_cycle` for edge insertion checks | -| `dominators` | Immediate dominators via Cooper-Harvey-Kennedy algorithm | -| `transforms` | Transitive reduction, transitive closure | -| `incremental` | Dirty-set propagation (BFS from changed nodes), change provenance tracking | -| `traversal` | Ancestors, descendants | -| `diff` | Structural graph diffing | - ---- - -## PyO3 Boundary - -`dagron-py` wraps `DAG` as `PyDAG`, where `PyNodePayload` holds an `Option>` for arbitrary Python objects as node payloads. - -### GIL Release Points - -Every CPU-intensive Rust operation releases the Python GIL via `py.allow_threads()`: - -- **Topological sort** — all variants (Kahn, DFS, levels) -- **Ancestors / descendants** — graph traversals -- **Reachability** — index building and queries -- **Scheduling** — execution planning, critical path -- **Validation** — cycle detection -- **Serialization** — JSON, bincode, DOT export -- **Transforms** — transitive reduction/closure, dominator tree -- **Partitioning** — all three strategies -- **Pattern matching** — regex/glob node filtering -- **Incremental** — dirty set computation, change provenance -- **Stats** — graph statistics computation - -This means Python threads are not blocked while Rust computes. In multi-threaded executors, multiple graph operations can genuinely run in parallel. - -### Exception Mapping - -Rust errors are mapped to a Python exception hierarchy: - -``` -DagronError (base) - +-- CycleError - +-- NodeNotFoundError - +-- DuplicateNodeError - +-- EdgeNotFoundError - +-- GraphError -``` - ---- - -## Dashboard (Optional) - -The `dagron-ui` crate provides a live web dashboard for monitoring DAG execution. It is feature-gated and only built when `--features dashboard` is passed to maturin. - -### Architecture - -```mermaid -graph LR - EX["Python Executor"] -->|"hooks"| STATE["DashboardState
(Arc<RwLock>)"] - STATE -->|"SSE broadcast"| SERVER["Axum Server
(background thread)"] - SERVER -->|"HTML + SSE"| BROWSER["Browser"] -``` - -1. **Startup:** `DashboardHandle::start(host, port)` spawns a background OS thread running a Tokio runtime with an Axum server -2. **State:** `DashboardState` is shared via `Arc>` between the executor and the server thread -3. **Hooks:** The executor calls `node_started()`, `node_finished()`, `execution_finished()` to update state -4. **SSE:** State changes are broadcast to all connected browsers via Server-Sent Events -5. **Endpoints:** - - `GET /` — single-file HTML/CSS/JS dashboard (embedded at compile time) - - `GET /api/state` — JSON snapshot of current execution state - - `GET /api/events` — SSE stream for real-time updates - - `GET /api/profile` — execution profile statistics - - `POST /api/gates/{name}/approve` — approve an approval gate - - `POST /api/gates/{name}/reject` — reject an approval gate - -The dashboard requires no external build tools — the entire UI is a single HTML file embedded in the Rust binary. diff --git a/docs/pages/guide/benchmarks.mdx b/docs/pages/guide/benchmarks.mdx deleted file mode 100644 index 79ea55c..0000000 --- a/docs/pages/guide/benchmarks.mdx +++ /dev/null @@ -1,168 +0,0 @@ ---- -sidebar_position: 3 -title: Benchmarks -description: Performance benchmarks comparing dagron's Rust core against NetworkX on common DAG operations. ---- - -# Benchmarks - -dagron's core graph engine is written in Rust (petgraph + PyO3), giving it a significant performance advantage over pure-Python graph libraries. This page presents representative benchmarks comparing dagron against [NetworkX](https://networkx.org/) on equivalent operations. - -:::info Reproduce these results yourself -```bash -uv pip install pytest-benchmark networkx -uv run python -m pytest tests/python/test_benchmarks.py --benchmark-only --benchmark-columns=mean,stddev,rounds -q -``` -::: - -All Python benchmarks below were measured with `pytest-benchmark` on a single machine. Rust-only benchmarks use [Criterion](https://bheisler.github.io/criterion.rs/). Numbers will vary by hardware — treat ratios as the meaningful signal. - -**Hardware:** AMD Ryzen / Intel Core (modern x86_64), Linux, Python 3.12, dagron 0.1.0. -**Last measured:** March 2026. - ---- - -## dagron vs NetworkX (Python API) - -### Construction (10K nodes) - -| Benchmark | dagron | NetworkX | Speedup | -|-----------|--------|----------|---------| -| Chain (10K nodes, 9,999 edges) | 7.66 ms | 21.04 ms | **2.7x** | -| Wide (1,000 roots x 10 depth) | 7.05 ms | 22.45 ms | **3.2x** | - -### Topological Sort (10K-node chain) - -| Benchmark | dagron | NetworkX | Speedup | -|-----------|--------|----------|---------| -| Topological sort | 880 us | 6,668 us | **7.6x** | - -### Ancestors / Descendants (mid-node on 10K chain) - -| Benchmark | dagron | NetworkX | Speedup | -|-----------|--------|----------|---------| -| Ancestors of node 5000 | 562 us | 1,955 us | **3.5x** | -| Descendants of node 5000 | 548 us | 2,053 us | **3.7x** | - -### Cycle Detection / Validation (10K chain) - -| Benchmark | dagron | NetworkX | Speedup | -|-----------|--------|----------|---------| -| Validate (acyclic check) | 535 us | 6,718 us | **12.6x** | - -### JSON Serialization (1K-node chain) - -| Benchmark | dagron | NetworkX | Speedup | -|-----------|--------|----------|---------| -| Serialize to JSON | 258 us | 823 us | **3.2x** | - -### Reachability (5K-node chain) - -| Benchmark | dagron | NetworkX | -|-----------|--------|----------| -| Build reachability index | 1,818 us | N/A (no equivalent) | -| Batch query (10 pairs) | 3.2 us | 12,637 us (nx.has_path) | - -Once the reachability index is built, dagron answers batch reachability queries **~3,900x faster** than NetworkX's `has_path` (which re-traverses the graph each call). - -### BFS / Topological Levels (10K chain) - -| Benchmark | dagron | NetworkX | Speedup | -|-----------|--------|----------|---------| -| Topological levels | 2,045 us | 9,312 us | **4.6x** | - ---- - -## Why the performance gap? - -Three factors drive dagron's advantage: - -1. **Rust core releases the GIL.** Every expensive operation in dagron runs inside `py.allow_threads()`, so the Rust code executes without Python interpreter overhead. -2. **petgraph's adjacency list is cache-friendly.** Nodes and edges are stored in contiguous arena-allocated vectors, giving excellent CPU cache behavior during traversals. -3. **ahash beats Python dict overhead.** Node name lookups use `AHashMap` — a fast, non-cryptographic hash map — instead of Python's general-purpose `dict`. - ---- - -## Rust-Only Numbers (Criterion) - -These benchmarks run entirely in Rust, showing the pure performance ceiling before any PyO3 overhead. - -### Construction - -| Benchmark | Time | -|-----------|------| -| Chain 1K | 252 us | -| Chain 10K | 2.73 ms | -| Chain 100K | 48.6 ms | -| Wide 1,000x10 | 2.96 ms | -| Diamond 10x10 | 111 us | - -### Topological Sort (10K nodes) - -| Benchmark | Time | -|-----------|------| -| Kahn (chain) | 332 us | -| Kahn (wide) | 332 us | -| DFS (chain) | 336 us | -| DFS (wide) | 337 us | -| Levels (chain) | 617 us | -| Levels (wide) | 331 us | - -### Cycle Detection (10K chain) - -| Benchmark | Time | -|-----------|------| -| validate (acyclic) | 466 us | -| would_create_cycle | 501 us | - -### Reachability (10K chain) - -| Benchmark | Time | -|-----------|------| -| Build index | 3.26 ms | -| can_reach (single query) | 6.8 ns | -| reachable_from | 11.5 us | -| ancestors_of | 27.4 us | - -### Serialization - -| Benchmark | Time | -|-----------|------| -| to_json (1K) | 241 us | -| from_json (1K) | 481 us | -| to_bincode (1K) | 57.9 us | -| from_bincode (1K) | 305 us | -| to_bincode (10K) | 617 us | -| from_bincode (10K) | 3.30 ms | -| to_bincode (100K) | 6.60 ms | -| to_dot (1K) | 48.0 us | - -### Scheduling (1K chain) - -| Benchmark | Time | -|-----------|------| -| Max parallelism plan | 281 us | -| Resource-constrained (4 workers) | 450 us | -| Critical path | 133 us | - -### Transforms (1K chain) - -| Benchmark | Time | -|-----------|------| -| Transitive reduction | 268 us | -| Snapshot (deep clone) | 70 us | - -### Introspection (10K chain) - -| Benchmark | Time | -|-----------|------| -| Ancestors (mid-node) | 273 us | -| Descendants (mid-node) | 274 us | -| Roots | 37 ns | -| Leaves | 39 ns | - -:::tip Run Rust benchmarks -```bash -cargo bench --bench graph_bench -``` -::: diff --git a/docs/pages/guide/cookbook.mdx b/docs/pages/guide/cookbook.mdx deleted file mode 100644 index 6fb9a10..0000000 --- a/docs/pages/guide/cookbook.mdx +++ /dev/null @@ -1,344 +0,0 @@ ---- -sidebar_position: 4 -title: Cookbook -description: Complete real-world examples — build systems, spreadsheet engines, ETL pipelines, and CI/CD schedulers. ---- - -# Cookbook - -Four complete examples showing how to use dagron in real-world scenarios. Each includes full code and a DAG diagram. - ---- - -## 1. Build System Dependency Resolver - -Model file targets as nodes, detect stale targets via incremental execution, and skip unchanged targets automatically. - -```mermaid -graph LR - A[parse_config] --> B[compile_lib] - A --> C[compile_utils] - B --> D[link_binary] - C --> D - D --> E[run_tests] -``` - -```python -import dagron -from dagron.execution import IncrementalExecutor - -# Define build targets as a DAG -dag = ( - dagron.DAG.builder() - .add_node("parse_config", metadata={"output": "config.json"}) - .add_node("compile_lib", metadata={"output": "lib.o"}) - .add_node("compile_utils", metadata={"output": "utils.o"}) - .add_node("link_binary", metadata={"output": "app"}) - .add_node("run_tests", metadata={"output": "test_report.xml"}) - .add_edge("parse_config", "compile_lib") - .add_edge("parse_config", "compile_utils") - .add_edge("compile_lib", "link_binary") - .add_edge("compile_utils", "link_binary") - .add_edge("link_binary", "run_tests") - .build() -) - -# Simulated build functions -def parse_config(): - print(" Parsing config...") - return {"version": "1.0", "flags": ["-O2"]} - -def compile_lib(): - print(" Compiling lib...") - return "lib.o" - -def compile_utils(): - print(" Compiling utils...") - return "utils.o" - -def link_binary(): - print(" Linking binary...") - return "app" - -def run_tests(): - print(" Running tests...") - return "PASSED" - -tasks = { - "parse_config": parse_config, - "compile_lib": compile_lib, - "compile_utils": compile_utils, - "link_binary": link_binary, - "run_tests": run_tests, -} - -# First build: runs everything -executor = IncrementalExecutor(dag) -print("=== First build ===") -result = executor.execute(tasks) - -# Second build: only re-runs if inputs changed -print("\n=== Incremental rebuild (nothing changed) ===") -result = executor.execute(tasks) - -# Mark a node as changed and rebuild -print("\n=== Incremental rebuild (compile_lib changed) ===") -executor.mark_changed("compile_lib") -result = executor.execute(tasks) -``` - -:::tip What this demonstrates -**Incremental execution** — only changed nodes and their downstream dependents re-execute. The second build is a no-op; the third only re-runs `compile_lib`, `link_binary`, and `run_tests`. -::: - ---- - -## 2. Spreadsheet Formula Engine - -Cells are nodes. Formula dependencies are edges. When a cell changes, only dependent cells recalculate. - -```mermaid -graph TD - A1[A1: Price = 100] --> C1[C1: Total = A1 * B1] - B1[B1: Qty = 5] --> C1 - C1 --> D1[D1: Tax = C1 * 0.08] - C1 --> E1[E1: Discount = C1 * 0.1] - D1 --> F1[F1: Final = C1 + D1 - E1] - E1 --> F1 - C1 --> F1 -``` - -```python -import dagron -from dagron.execution import IncrementalExecutor - -# Build the cell dependency graph -dag = ( - dagron.DAG.builder() - .add_node("A1") # Price - .add_node("B1") # Quantity - .add_node("C1") # Total = A1 * B1 - .add_node("D1") # Tax = C1 * 0.08 - .add_node("E1") # Discount = C1 * 0.1 - .add_node("F1") # Final = C1 + D1 - E1 - .add_edge("A1", "C1") - .add_edge("B1", "C1") - .add_edge("C1", "D1") - .add_edge("C1", "E1") - .add_edge("C1", "F1") - .add_edge("D1", "F1") - .add_edge("E1", "F1") - .build() -) - -# Cell values (mutable state) -cells = {"A1": 100, "B1": 5} - -def eval_cell(name): - """Evaluate a single cell.""" - if name == "A1": - return cells["A1"] - elif name == "B1": - return cells["B1"] - elif name == "C1": - cells["C1"] = cells["A1"] * cells["B1"] - return cells["C1"] - elif name == "D1": - cells["D1"] = cells["C1"] * 0.08 - return cells["D1"] - elif name == "E1": - cells["E1"] = cells["C1"] * 0.1 - return cells["E1"] - elif name == "F1": - cells["F1"] = cells["C1"] + cells["D1"] - cells["E1"] - return cells["F1"] - -tasks = {name: (lambda n=name: eval_cell(n)) for name in dag.nodes()} - -# Initial calculation -executor = IncrementalExecutor(dag) -result = executor.execute(tasks) -print(f"Initial: Price={cells['A1']}, Qty={cells['B1']}, Final={cells['F1']}") - -# User edits A1 (price) — only C1, D1, E1, F1 recalculate -cells["A1"] = 150 -executor.mark_changed("A1") -result = executor.execute(tasks) -print(f"After edit: Price={cells['A1']}, Qty={cells['B1']}, Final={cells['F1']}") -``` - -:::tip What this demonstrates -**Change propagation** — editing cell A1 triggers recalculation of only the cells that depend on it (C1, D1, E1, F1). B1 is untouched. -::: - ---- - -## 3. ETL Pipeline with Checkpointing - -A multi-stage pipeline that writes checkpoints to disk, simulates a crash, and resumes from the last checkpoint. - -```mermaid -graph LR - E[extract] --> V[validate] - V --> T[transform] - T --> A[aggregate] - A --> L[load] -``` - -```python -import dagron -from dagron.execution import CheckpointExecutor - -dag = ( - dagron.DAG.builder() - .add_node("extract") - .add_node("validate") - .add_node("transform") - .add_node("aggregate") - .add_node("load") - .add_edge("extract", "validate") - .add_edge("validate", "transform") - .add_edge("transform", "aggregate") - .add_edge("aggregate", "load") - .build() -) - -call_count = {"transform": 0} - -def extract(): - print(" Extracting 10,000 rows from source...") - return list(range(10_000)) - -def validate(): - print(" Validating schema...") - return True - -def transform(): - call_count["transform"] += 1 - if call_count["transform"] == 1: - print(" Transforming... CRASH!") - raise RuntimeError("Simulated crash during transform") - print(" Transforming data...") - return "transformed" - -def aggregate(): - print(" Aggregating results...") - return {"total": 10_000, "valid": 9_950} - -def load(): - print(" Loading into warehouse...") - return "success" - -tasks = { - "extract": extract, - "validate": validate, - "transform": transform, - "aggregate": aggregate, - "load": load, -} - -executor = CheckpointExecutor(dag, checkpoint_dir="/tmp/dagron_checkpoints") - -# First run: crashes during transform -print("=== Run 1 (will crash) ===") -try: - result = executor.execute(tasks) -except Exception as e: - print(f" Pipeline failed: {e}") - -# Second run: resumes from checkpoint, skips extract + validate -print("\n=== Run 2 (resume from checkpoint) ===") -result = executor.execute(tasks) -print(f" Pipeline completed: {result}") -``` - -:::tip What this demonstrates -**Checkpointing** — the first run completes `extract` and `validate` before crashing. The second run skips those stages and resumes from `transform`. No wasted work. -::: - ---- - -## 4. CI/CD Task Scheduler - -Lint, test, build, and deploy with resource constraints and an approval gate before production deployment. - -```mermaid -graph LR - L[lint] --> B[build] - T[test_unit] --> B - T2[test_integration] --> B - B --> S[staging_deploy] - S --> G{approval_gate} - G --> P[prod_deploy] -``` - -```python -import dagron -from dagron.execution import ResourceAwareExecutor - -dag = ( - dagron.DAG.builder() - .add_node("lint", metadata={"cpu": 1}) - .add_node("test_unit", metadata={"cpu": 2}) - .add_node("test_integration", metadata={"cpu": 2}) - .add_node("build", metadata={"cpu": 2}) - .add_node("staging_deploy", metadata={"cpu": 1}) - .add_node("approval_gate", metadata={"gate": True}) - .add_node("prod_deploy", metadata={"cpu": 1}) - .add_edge("lint", "build") - .add_edge("test_unit", "build") - .add_edge("test_integration", "build") - .add_edge("build", "staging_deploy") - .add_edge("staging_deploy", "approval_gate") - .add_edge("approval_gate", "prod_deploy") - .build() -) - -def lint(): - print(" [1 CPU] Linting...") - return "ok" - -def test_unit(): - print(" [2 CPU] Running unit tests...") - return "148 passed" - -def test_integration(): - print(" [2 CPU] Running integration tests...") - return "32 passed" - -def build(): - print(" [2 CPU] Building Docker image...") - return "sha256:abc123" - -def staging_deploy(): - print(" [1 CPU] Deploying to staging...") - return "https://staging.example.com" - -def approval_gate(): - print(" Approval gate: auto-approved for demo") - return True - -def prod_deploy(): - print(" [1 CPU] Deploying to production...") - return "https://example.com" - -tasks = { - "lint": lint, - "test_unit": test_unit, - "test_integration": test_integration, - "build": build, - "staging_deploy": staging_deploy, - "approval_gate": approval_gate, - "prod_deploy": prod_deploy, -} - -# Execute with a 4-CPU constraint — lint + test_unit run in parallel, -# test_integration waits for a free slot -executor = ResourceAwareExecutor(dag, max_workers=4) -result = executor.execute(tasks) -print(f"\nPipeline result: {result}") -``` - -:::tip What this demonstrates -**Resource-aware scheduling** — with 4 CPU slots, `lint` (1 CPU) and `test_unit` (2 CPU) run in parallel (3 slots used). `test_integration` (2 CPU) waits until a slot frees up. The approval gate pauses execution until approved. -::: diff --git a/docs/pages/guide/core-concepts/building-dags.mdx b/docs/pages/guide/core-concepts/building-dags.mdx deleted file mode 100644 index a7005ff..0000000 --- a/docs/pages/guide/core-concepts/building-dags.mdx +++ /dev/null @@ -1,520 +0,0 @@ ---- -sidebar_position: 2 -title: Building DAGs -description: A comprehensive guide to constructing directed acyclic graphs in dagron — fluent builder, direct mutation, bulk operations, payloads, and metadata. ---- - -import DagDiagram from '@site/src/components/DagDiagram'; -import StatusBadge from '@site/src/components/StatusBadge'; - -# Building DAGs - -dagron offers multiple ways to construct a directed acyclic graph. This guide covers -every construction pattern, from quick one-liners to advanced builder configurations -with payloads and metadata. - -## Construction patterns at a glance - -| Pattern | Best for | Example | -|---------|----------|---------| -| `DAG.builder()` | Most use cases — fluent, validated | `DAG.builder().add_node("a").build()` | -| `DAG()` + mutations | Incremental / dynamic construction | `dag.add_node("a")` | -| Bulk helpers | Large graphs from lists | `builder.add_nodes([...]).add_edges([...])` | -| `Pipeline` | Linear function chains | `Pipeline(tasks=[fn1, fn2])` | - -## The fluent builder - -[`DAG.builder()`](/api/core/builder) returns a `DAGBuilder` that chains method -calls and validates the graph when you call `.build()`. - -```python -import dagron - -dag = ( - dagron.DAG.builder() - .add_node("fetch") - .add_node("parse") - .add_node("validate") - .add_node("store") - .add_edge("fetch", "parse") - .add_edge("parse", "validate") - .add_edge("validate", "store") - .build() -) -``` - - parse --> validate --> store`} - caption="Linear four-node pipeline created with the builder." -/> - -### Cycle detection - -The builder rejects cycles at `.build()` time: - -```python -try: - dagron.DAG.builder() \ - .add_node("a").add_node("b").add_node("c") \ - .add_edge("a", "b") \ - .add_edge("b", "c") \ - .add_edge("c", "a") \ - .build() -except dagron.CycleError as e: - print(e) # Cycle detected: c -> a -``` - -This guarantee means that any `DAG` instance you hold is always valid. - -### Implicit node creation - -When you add an edge, both endpoints are created automatically if they do not -already exist: - -```python -dag = ( - dagron.DAG.builder() - .add_edge("a", "b") - .add_edge("b", "c") - .build() -) -print(dag.node_count()) # 3 -``` - -This shorthand is convenient for small graphs where you do not need to attach -metadata to every node. - -## Direct construction - -If you prefer an imperative style, create a bare `DAG` and mutate it: - -```python -dag = dagron.DAG() - -dag.add_node("ingest") -dag.add_node("clean") -dag.add_node("enrich") -dag.add_node("publish") - -dag.add_edge("ingest", "clean") -dag.add_edge("clean", "enrich") -dag.add_edge("enrich", "publish") -``` - -Direct mutation is useful when the graph structure is determined at runtime — for -example, when reading a config file or discovering tasks from a plugin registry. - -### Checking membership - -```python -print(dag.has_node("clean")) # True -print(dag.has_edge("clean", "enrich")) # True -``` - -### Removing nodes and edges - -```python -dag.remove_edge("enrich", "publish") -dag.remove_node("publish") - -print(dag.node_count()) # 3 -print(dag.edge_count()) # 2 -``` - -Removing a node also removes all edges connected to it. - -## Bulk operations - -When building large graphs, individual `add_node` / `add_edge` calls become -verbose. Use the bulk helpers instead: - -```python -dag = ( - dagron.DAG.builder() - .add_nodes(["extract", "transform_a", "transform_b", "merge", "load"]) - .add_edges([ - ("extract", "transform_a"), - ("extract", "transform_b"), - ("transform_a", "merge"), - ("transform_b", "merge"), - ("merge", "load"), - ]) - .build() -) -``` - - transform_a --> merge - extract --> transform_b --> merge - merge --> load`} - caption="Fan-out / fan-in graph created with bulk helpers." -/> - -### Building from data - -A common pattern is constructing the graph from a list of records (e.g., rows -from a database or lines in a YAML file): - -```python -records = [ - {"name": "extract", "depends_on": []}, - {"name": "transform", "depends_on": ["extract"]}, - {"name": "load", "depends_on": ["transform"]}, -] - -builder = dagron.DAG.builder() -for rec in records: - builder.add_node(rec["name"]) -for rec in records: - for dep in rec["depends_on"]: - builder.add_edge(dep, rec["name"]) -dag = builder.build() -``` - -This makes it straightforward to drive graph construction from external -configuration. - -## Node payloads - -Every node can carry an arbitrary Python object called a **payload**. Payloads -are useful for attaching configuration, metadata, or cost hints without -polluting the task functions. - -```python -dag = dagron.DAG() -dag.add_node("train", payload={"epochs": 10, "lr": 0.001}) -dag.add_node("evaluate", payload={"metrics": ["accuracy", "f1"]}) -dag.add_edge("train", "evaluate") -``` - -Retrieve payloads later: - -```python -# Access via the nodes iterator -for name in dag.nodes(): - print(name, dag.get_payload(name)) -``` - -### Payloads with the builder - -```python -dag = ( - dagron.DAG.builder() - .add_node("fetch", payload={"url": "https://api.example.com/data"}) - .add_node("parse", payload={"format": "json"}) - .add_edge("fetch", "parse") - .build() -) -``` - -## Contracts - -The builder supports **contracts** that declare the expected output type of a -node. Contracts are checked at build time and serve as living documentation of -your pipeline's data flow. - -```python -dag = ( - dagron.DAG.builder() - .add_node("extract") - .add_node("transform") - .add_node("load") - .add_edge("extract", "transform") - .add_edge("transform", "load") - .contract("extract", output=list) - .contract("transform", output=dict) - .contract("load", output=str) - .build() -) -``` - -See [Contracts](/api/analysis/contracts) in the API reference for details on -runtime enforcement and custom validators. - -## Understanding the graph structure - -Once a DAG is built, you have a rich set of read-only accessors: - -### Nodes and edges - -```python -print(list(dag.nodes())) # ['extract', 'transform', 'load'] -print(dag.node_count()) # 3 -print(dag.edge_count()) # 2 -``` - -### Roots and leaves - -**Roots** have no incoming edges (in-degree 0). **Leaves** have no outgoing -edges (out-degree 0). - -```python -print(dag.roots()) # ['extract'] -print(dag.leaves()) # ['load'] -``` - - transform --> load:::leaf`} - caption="Roots (green) and leaves (yellow) of a linear pipeline." -/> - -### Degree - -```python -print(dag.in_degree("transform")) # 1 -print(dag.out_degree("transform")) # 1 -print(dag.in_degree("extract")) # 0 (root) -print(dag.out_degree("load")) # 0 (leaf) -``` - -### Neighbourhood queries - -```python -# Direct parents -print(dag.predecessors("transform")) # ['extract'] - -# Direct children -print(dag.successors("transform")) # ['load'] - -# Transitive ancestors (all upstream nodes) -print(dag.ancestors("load")) # ['extract', 'transform'] - -# Transitive descendants (all downstream nodes) -print(dag.descendants("extract")) # ['transform', 'load'] -``` - -### Topological ordering - -A topological sort produces an ordering where every node appears after all of -its dependencies: - -```python -print(dag.topological_sort()) -# ['extract', 'transform', 'load'] -``` - -For parallel execution planning, **topological levels** group nodes that can run -concurrently: - -```python -for level, nodes in enumerate(dag.topological_levels()): - print(f"Level {level}: {nodes}") -# Level 0: ['extract'] -# Level 1: ['transform'] -# Level 2: ['load'] -``` - -See [Inspecting Graphs](/guide/core-concepts/inspecting-graphs) for the full analysis -toolkit. - -## Diamond dependencies - -A common pattern in data pipelines is the **diamond** shape — one root fans out -to multiple branches that converge at a single join node: - -```python -dag = ( - dagron.DAG.builder() - .add_nodes(["source", "branch_a", "branch_b", "branch_c", "join"]) - .add_edges([ - ("source", "branch_a"), - ("source", "branch_b"), - ("source", "branch_c"), - ("branch_a", "join"), - ("branch_b", "join"), - ("branch_c", "join"), - ]) - .build() -) -``` - - branch_a --> join - source --> branch_b --> join - source --> branch_c --> join`} - caption="Diamond DAG. The three branches execute in parallel; join waits for all of them." -/> - -The executor automatically parallelises the three branches and synchronises -at the join node. - -## Multi-layer pipelines - -For complex ML workflows you might have many layers: - -```python -dag = ( - dagron.DAG.builder() - # Data layer - .add_node("raw_data") - .add_node("clean_data") - .add_edge("raw_data", "clean_data") - # Feature layer - .add_node("feature_numeric") - .add_node("feature_text") - .add_node("feature_image") - .add_edge("clean_data", "feature_numeric") - .add_edge("clean_data", "feature_text") - .add_edge("clean_data", "feature_image") - # Model layer - .add_node("train_model") - .add_edge("feature_numeric", "train_model") - .add_edge("feature_text", "train_model") - .add_edge("feature_image", "train_model") - # Evaluation layer - .add_node("evaluate") - .add_node("deploy") - .add_edge("train_model", "evaluate") - .add_edge("evaluate", "deploy") - .build() -) - -print(dag.node_count()) # 8 -print(dag.edge_count()) # 8 - -for level, nodes in enumerate(dag.topological_levels()): - print(f"Level {level}: {nodes}") -# Level 0: ['raw_data'] -# Level 1: ['clean_data'] -# Level 2: ['feature_numeric', 'feature_text', 'feature_image'] -# Level 3: ['train_model'] -# Level 4: ['evaluate'] -# Level 5: ['deploy'] -``` - - clean_data - clean_data --> feature_numeric - clean_data --> feature_text - clean_data --> feature_image - feature_numeric --> train_model - feature_text --> train_model - feature_image --> train_model - train_model --> evaluate - evaluate --> deploy`} - caption="Multi-layer ML pipeline with parallel feature extraction." -/> - -## Pattern matching on node names - -dagron supports finding nodes by name patterns, which is handy when you build -graphs programmatically with naming conventions: - -```python -# Glob-style matching -feature_nodes = dag.nodes_matching_glob("feature_*") -print(feature_nodes) # ['feature_numeric', 'feature_text', 'feature_image'] - -# Regex matching -data_nodes = dag.nodes_matching_regex(r".*_data$") -print(data_nodes) # ['raw_data', 'clean_data'] -``` - -## Graph statistics - -The [`stats()`](/api/core/core) method returns a summary of the graph: - -```python -s = dag.stats() -print(s) -# DAGStats(nodes=8, edges=8, roots=1, leaves=1, depth=5, width=3, density=0.143) -``` - -This is useful for logging and monitoring in production. - -## Composing multiple DAGs - -Large systems often consist of several independent DAGs that need to be wired -together. The [`compose()`](/api/core/core) function merges DAGs with namespaced -node names and cross-DAG connections: - -```python -etl_dag = ( - dagron.DAG.builder() - .add_edge("extract", "transform") - .add_edge("transform", "load") - .build() -) - -ml_dag = ( - dagron.DAG.builder() - .add_edge("train", "evaluate") - .build() -) - -combined = dagron.compose( - dags={"etl": etl_dag, "ml": ml_dag}, - connections=[("etl/load", "ml/train")], -) - -print(list(combined.nodes())) -# ['etl/extract', 'etl/transform', 'etl/load', 'ml/train', 'ml/evaluate'] -``` - - etl/transform --> etl/load --> ml/train --> ml/evaluate`} - caption="Two DAGs composed into a single pipeline with a cross-DAG edge." -/> - -Composition is covered fully in [Graph Transforms](/guide/core-concepts/transforms). - -## Validation and linting - -After building a DAG, you can run the built-in linter to catch common issues: - -```python -from dagron import lint - -warnings = lint(dag) -for w in warnings: - print(w) -# e.g., "Node 'deploy' has in-degree 1 and out-degree 0 — consider if it should be a leaf." -``` - -And validate structural integrity at any time: - -```python -dag.validate() # Raises if the graph is malformed -``` - -See [Inspecting Graphs](/guide/core-concepts/inspecting-graphs) for the full analysis -and linting toolkit. - -## Best practices - -1. **Use the builder for static graphs.** It gives you cycle detection and a - clean, readable construction block. - -2. **Use direct mutation for dynamic graphs.** When the structure depends on - runtime decisions, building imperatively is simpler. - -3. **Attach payloads for configuration.** Keep task functions pure; put - parameters in payloads. - -4. **Name nodes with conventions.** Use prefixes like `extract_`, `transform_`, - `load_` so you can use glob/regex matching later. - -5. **Start small and compose.** Build self-contained sub-DAGs and wire them - together with `compose()`. - -## API reference - -For the full list of construction methods, see: - -- [`DAG`](/api/core/core) — the core graph class and all its methods. -- [`DAGBuilder`](/api/core/builder) — fluent builder API. -- [`compose()`](/api/core/core) — DAG composition. - -## Next steps - -- [Executing Tasks](/guide/core-concepts/executing-tasks) — learn how to run your DAG with executors. -- [Inspecting Graphs](/guide/core-concepts/inspecting-graphs) — analyze structure, find critical paths, run queries. -- [Graph Transforms](/guide/core-concepts/transforms) — filter, merge, reverse, and reshape DAGs. diff --git a/docs/pages/guide/core-concepts/executing-tasks.mdx b/docs/pages/guide/core-concepts/executing-tasks.mdx deleted file mode 100644 index 47d1490..0000000 --- a/docs/pages/guide/core-concepts/executing-tasks.mdx +++ /dev/null @@ -1,588 +0,0 @@ ---- -sidebar_position: 3 -title: Executing Tasks -description: Run DAG tasks in parallel with DAGExecutor, AsyncDAGExecutor, and Pipeline — with timeouts, cancellation, callbacks, and result inspection. ---- - -import DagDiagram from '@site/src/components/DagDiagram'; -import StatusBadge from '@site/src/components/StatusBadge'; - -# Executing Tasks - -Once you have a [DAG](/guide/core-concepts/building-dags), you need an **executor** to run -it. dagron ships with several executor types — from a simple thread-pool executor to -async, pipeline, conditional, dynamic, and incremental variants. This guide covers -the two general-purpose executors (`DAGExecutor` and `AsyncDAGExecutor`), the -`Pipeline` convenience API, and all the options you can tune. - -## Executor overview - -| Executor | Runtime | Use case | -|----------|---------|----------| -| [`DAGExecutor`](/api/execution/execution) | Thread pool | CPU-bound tasks, synchronous code | -| [`AsyncDAGExecutor`](/api/execution/execution) | asyncio | I/O-bound tasks, async/await code | -| [`Pipeline`](/api/execution/pipeline) | Thread pool | Simple linear chains with `@task` | - -Specialised executors are covered in their own guides: -[Incremental](/guide/execution-strategies/incremental), -[Conditional](/guide/execution-strategies/conditional), -[Dynamic](/guide/execution-strategies/dynamic-dags), -[Checkpointing](/guide/execution-strategies/checkpointing). - -## DAGExecutor - -The workhorse executor. It schedules tasks across a thread pool, respecting -topological order and maximising parallelism. - -```python -import dagron - -dag = ( - dagron.DAG.builder() - .add_edge("a", "b") - .add_edge("a", "c") - .add_edge("b", "d") - .add_edge("c", "d") - .build() -) - -tasks = { - "a": lambda: "data", - "b": lambda: "processed_b", - "c": lambda: "processed_c", - "d": lambda: "merged", -} - -result = dagron.DAGExecutor(dag).execute(tasks) -``` - - b --> d - a --> c --> d`} - caption="Diamond DAG. Nodes b and c run in parallel after a completes." -/> - -### Constructor parameters - -```python -dagron.DAGExecutor( - dag, # The DAG to execute - max_workers=None, # Thread pool size (default: CPU count) - costs=None, # Dict[str, float] — cost hints for scheduling - callbacks=None, # ExecutionCallbacks instance - fail_fast=True, # Stop on first failure? - enable_tracing=False, # Record execution trace? - hooks=None, # Plugin hooks -) -``` - -### The `execute()` method - -```python -result = executor.execute( - tasks, # Dict[str, Callable] - timeout=None, # Overall timeout in seconds - cancel_event=None, # threading.Event to signal cancellation -) -``` - -## Understanding ExecutionResult - -Every `.execute()` call returns an [`ExecutionResult`](/api/execution/execution): - -```python -result = dagron.DAGExecutor(dag).execute(tasks) - -# Aggregate counts -print(result.succeeded) # int -print(result.failed) # int -print(result.skipped) # int -print(result.timed_out) # int -print(result.cancelled) # int - -# Wall-clock duration -print(result.total_duration_seconds) # float - -# Per-node details -for name, nr in result.node_results.items(): - print(name, nr.status, nr.result, nr.error, nr.duration_seconds) -``` - -### NodeResult - -Each entry in `result.node_results` is a [`NodeResult`](/api/execution/execution): - -| Field | Type | Description | -|-------|------|-------------| -| `name` | `str` | Node name | -| `status` | `NodeStatus` | Final status | -| `result` | `Any` | Return value of the callable | -| `error` | `Exception \| None` | Exception if the task failed | -| `duration_seconds` | `float` | Wall-clock time for this node | - -### NodeStatus - -dagron defines eight possible statuses: - -| Status | Badge | Meaning | -|--------|-------|---------| -| `PENDING` | | Not yet scheduled | -| `RUNNING` | | Currently executing | -| `COMPLETED` | | Finished successfully | -| `FAILED` | | Raised an exception | -| `SKIPPED` | | Skipped (upstream failure or condition) | -| `TIMED_OUT` | | Exceeded timeout | -| `CANCELLED` | | Cancelled via cancel event | -| `CACHE_HIT` | | Result retrieved from cache | - -```python -from dagron import NodeStatus - -if result.node_results["d"].status == NodeStatus.COMPLETED: - print("All good!") -``` - -## Fail-fast vs. best-effort - -### Fail-fast (default) - -When `fail_fast=True`, the executor stops scheduling new tasks as soon as any -node fails. Nodes that depend on the failed node (directly or transitively) -are marked . - -```python -import time - -def slow_a(): - time.sleep(1) - return "ok" - -def failing_b(): - raise RuntimeError("boom") - -def depends_on_b(): - return "never reached" - -dag = dagron.DAG.builder() \ - .add_edge("a", "c") \ - .add_edge("b", "c") \ - .build() - -result = dagron.DAGExecutor(dag, fail_fast=True).execute({ - "a": slow_a, - "b": failing_b, - "c": depends_on_b, -}) - -print(result.node_results["b"].status) # FAILED -print(result.node_results["c"].status) # SKIPPED -``` - -### Best-effort - -With `fail_fast=False`, independent branches continue executing even when one -branch fails. Only direct descendants of the failed node are skipped. - -```python -dag = ( - dagron.DAG.builder() - .add_edge("root", "branch_a") - .add_edge("root", "branch_b") - .add_edge("branch_a", "join") - .add_edge("branch_b", "join") - .build() -) - -def ok(): - return "ok" - -def fail(): - raise RuntimeError("oops") - -result = dagron.DAGExecutor(dag, fail_fast=False).execute({ - "root": ok, - "branch_a": fail, - "branch_b": ok, - "join": ok, # skipped because branch_a failed -}) - -print(result.node_results["branch_b"].status) # COMPLETED (still ran!) -print(result.node_results["join"].status) # SKIPPED -``` - -## Timeouts - -### Global timeout - -Set a wall-clock deadline for the entire execution: - -```python -result = dagron.DAGExecutor(dag).execute(tasks, timeout=30.0) - -# Any node still running after 30 seconds is marked TIMED_OUT -for name, nr in result.node_results.items(): - if nr.status == dagron.NodeStatus.TIMED_OUT: - print(f"{name} timed out!") -``` - -Timed-out nodes appear as in the results. - -### Per-node timeouts - -Per-node timeouts are supported through the cost-aware scheduling system. See -the [Resource Scheduling](/api/execution/resources) API reference for details. - -## Cancellation - -You can cancel a running execution from another thread using a -`threading.Event`: - -```python -import threading - -cancel = threading.Event() - -# In another thread (e.g., signal handler): -# cancel.set() - -result = dagron.DAGExecutor(dag).execute(tasks, cancel_event=cancel) -``` - -When the event is set, the executor finishes any currently-running tasks but -does not schedule new ones. Unstarted nodes are marked -. - -```python -import signal - -cancel = threading.Event() -signal.signal(signal.SIGINT, lambda *_: cancel.set()) - -result = dagron.DAGExecutor(dag).execute(tasks, cancel_event=cancel) -print(result.cancelled) # number of cancelled nodes -``` - -## Callbacks - -Callbacks let you react to execution events — for logging, metrics, progress -bars, or custom logic. - -```python -class MyCallbacks: - def on_node_start(self, name): - print(f"[START] {name}") - - def on_node_complete(self, name, result): - print(f"[DONE] {name} -> {result}") - - def on_node_error(self, name, error): - print(f"[FAIL] {name}: {error}") - - def on_node_skip(self, name): - print(f"[SKIP] {name}") - -result = dagron.DAGExecutor(dag, callbacks=MyCallbacks()).execute(tasks) -``` - -Callbacks are called synchronously on the executor thread that completed the -task. Keep them lightweight to avoid blocking the scheduler. - -### Progress tracking example - -```python -from dagron import DAGExecutor - -class ProgressTracker: - def __init__(self, total): - self.total = total - self.done = 0 - - def on_node_complete(self, name, result): - self.done += 1 - pct = (self.done / self.total) * 100 - print(f"Progress: {self.done}/{self.total} ({pct:.0f}%)") - - def on_node_error(self, name, error): - self.done += 1 - - def on_node_skip(self, name): - self.done += 1 - -tracker = ProgressTracker(dag.node_count()) -result = DAGExecutor(dag, callbacks=tracker).execute(tasks) -``` - -## Cost-aware scheduling - -If some tasks are more expensive than others, provide cost hints so the -executor can schedule them more intelligently: - -```python -costs = { - "train_model": 100.0, - "evaluate": 10.0, - "preprocess": 5.0, -} - -result = dagron.DAGExecutor(dag, costs=costs).execute(tasks) -``` - -The executor uses costs when computing the [critical path](/guide/core-concepts/inspecting-graphs) -and when deciding which ready node to schedule first. - -## AsyncDAGExecutor - -For I/O-bound workloads (HTTP requests, database queries, file operations), use -the async executor: - -```python -import asyncio -import dagron - -dag = ( - dagron.DAG.builder() - .add_edge("fetch_users", "enrich") - .add_edge("fetch_orders", "enrich") - .add_edge("enrich", "store") - .build() -) - -async def fetch_users(): - await asyncio.sleep(0.5) # simulate HTTP call - return [{"id": 1, "name": "Alice"}] - -async def fetch_orders(): - await asyncio.sleep(0.3) - return [{"id": 1, "item": "Widget"}] - -async def enrich(): - return {"users": 1, "orders": 1} - -async def store(): - return "stored" - -async def main(): - executor = dagron.AsyncDAGExecutor(dag) - result = await executor.execute({ - "fetch_users": fetch_users, - "fetch_orders": fetch_orders, - "enrich": enrich, - "store": store, - }) - print(result.succeeded) # 4 - -asyncio.run(main()) -``` - -`AsyncDAGExecutor` accepts the same constructor parameters as `DAGExecutor` -(`max_workers`, `callbacks`, `fail_fast`, `enable_tracing`, `hooks`), and its -`.execute()` method accepts the same `timeout` parameter. - - enrich - fetch_orders --> enrich - enrich --> store`} - caption="Async execution: both fetch tasks run concurrently on the event loop." -/> - -## Pipeline and @task - -For simple function chains where dependencies are inferred from parameter names, -the `Pipeline` API is the most concise approach: - -```python -from dagron import Pipeline, task - -@task -def download(): - return {"raw": [1, 2, 3]} - -@task -def normalize(download): - """Depends on 'download' because of the parameter name.""" - return [x * 10 for x in download["raw"]] - -@task -def summarize(normalize): - return {"count": len(normalize), "total": sum(normalize)} - -pipeline = Pipeline(tasks=[download, normalize, summarize], name="etl") -result = pipeline.execute() - -print(result.node_results["summarize"].result) -# {'count': 3, 'total': 60} -``` - -### How dependency inference works - -The Pipeline inspects each function's parameter names. If a parameter matches -the name of another task in the pipeline, an edge is added: - -```python -@task -def a(): - return 1 - -@task -def b(): - return 2 - -@task -def c(a, b): - """Depends on both a and b.""" - return a + b - -pipeline = Pipeline(tasks=[a, b, c]) -# Internally builds: a -> c, b -> c -``` - -### Async pipelines - -```python -result = await pipeline.execute_async() -``` - -### When to use Pipeline vs. DAGExecutor - -| Feature | Pipeline | DAGExecutor | -|---------|----------|-------------| -| Dependency declaration | Implicit (parameter names) | Explicit (edges) | -| Data passing | Automatic (return values injected) | Manual | -| Graph complexity | Linear / simple fan-in | Any DAG shape | -| Fine-grained control | Limited | Full | - -Use `Pipeline` for quick scripts and prototypes. Switch to `DAGExecutor` when you -need explicit control over graph structure, payloads, or advanced executor features. - -## Controlling parallelism - -### max_workers - -```python -# Use exactly 2 threads -result = dagron.DAGExecutor(dag, max_workers=2).execute(tasks) - -# Use all available cores (default) -result = dagron.DAGExecutor(dag).execute(tasks) -``` - -Setting `max_workers=1` gives you sequential execution in topological order, -which is useful for debugging. - -### Execution plan preview - -Before executing, you can see the planned execution order: - -```python -plan = dag.execution_plan() -for step in plan: - print(step) -# ExecutionStep(level=0, nodes=['a']) -# ExecutionStep(level=1, nodes=['b', 'c']) -# ExecutionStep(level=2, nodes=['d']) -``` - -This tells you which nodes run in parallel at each level. - -## Putting it all together - -Here is a complete example with all major features: - -```python -import dagron -import threading -import time - -# Build the DAG -dag = ( - dagron.DAG.builder() - .add_nodes(["extract", "validate", "transform", "enrich", "load", "notify"]) - .add_edges([ - ("extract", "validate"), - ("validate", "transform"), - ("validate", "enrich"), - ("transform", "load"), - ("enrich", "load"), - ("load", "notify"), - ]) - .build() -) - -# Define tasks -def extract(): - time.sleep(0.1) - return {"rows": 1000} - -def validate(): - return {"valid": True} - -def transform(): - time.sleep(0.2) - return {"transformed": 1000} - -def enrich(): - time.sleep(0.15) - return {"enriched": 1000} - -def load(): - return {"loaded": 1000} - -def notify(): - return "email sent" - -tasks = { - "extract": extract, - "validate": validate, - "transform": transform, - "enrich": enrich, - "load": load, - "notify": notify, -} - -# Callbacks -class Logger: - def on_node_start(self, name): - print(f" -> {name}") - def on_node_complete(self, name, result): - print(f" <- {name}: {result}") - def on_node_error(self, name, error): - print(f" !! {name}: {error}") - -# Cancellation support -cancel = threading.Event() - -# Execute with all options -result = dagron.DAGExecutor( - dag, - max_workers=4, - callbacks=Logger(), - fail_fast=True, - enable_tracing=True, - costs={"transform": 2.0, "enrich": 1.5}, -).execute(tasks, timeout=60.0, cancel_event=cancel) - -# Inspect results -print(f"\nCompleted: {result.succeeded}/{dag.node_count()}") -print(f"Duration: {result.total_duration_seconds:.3f}s") - -for name, nr in result.node_results.items(): - print(f" {name}: {nr.status.name} ({nr.duration_seconds:.3f}s)") -``` - - validate - validate --> transform --> load - validate --> enrich --> load - load --> notify`} - caption="Complete ETL pipeline with parallel transform and enrich branches." -/> - -## Next steps - -- [Tracing & Profiling](/guide/observability/tracing-profiling) — enable tracing and analyse bottlenecks. -- [Incremental Execution](/guide/execution-strategies/incremental) — only re-run what changed. -- [Conditional Execution](/guide/execution-strategies/conditional) — skip branches based on runtime predicates. -- [Checkpointing](/guide/execution-strategies/checkpointing) — resume after failures. diff --git a/docs/pages/guide/core-concepts/inspecting-graphs.mdx b/docs/pages/guide/core-concepts/inspecting-graphs.mdx deleted file mode 100644 index c16cfb2..0000000 --- a/docs/pages/guide/core-concepts/inspecting-graphs.mdx +++ /dev/null @@ -1,500 +0,0 @@ ---- -sidebar_position: 4 -title: Inspecting Graphs -description: Analyze your DAG's structure — topological ordering, critical path, predecessors, ancestors, explain, what-if, lint, and the query DSL. ---- - -import DagDiagram from '@site/src/components/DagDiagram'; -import StatusBadge from '@site/src/components/StatusBadge'; - -# Inspecting Graphs - -dagron gives you deep introspection into your DAG's structure. This guide covers -every analysis tool — from basic traversals to critical-path analysis, what-if -exploration, linting, and the query DSL. - -## Building a sample graph - -We will use this graph throughout the guide: - -```python -import dagron - -dag = ( - dagron.DAG.builder() - .add_nodes(["raw", "clean", "features_a", "features_b", "train", "evaluate", "deploy"]) - .add_edges([ - ("raw", "clean"), - ("clean", "features_a"), - ("clean", "features_b"), - ("features_a", "train"), - ("features_b", "train"), - ("train", "evaluate"), - ("evaluate", "deploy"), - ]) - .build() -) -``` - - clean - clean --> features_a --> train - clean --> features_b --> train - train --> evaluate --> deploy`} - caption="ML pipeline used throughout this guide." -/> - -## Basic structure - -### Node and edge counts - -```python -print(dag.node_count()) # 7 -print(dag.edge_count()) # 7 -``` - -### Listing nodes - -```python -print(list(dag.nodes())) -# ['raw', 'clean', 'features_a', 'features_b', 'train', 'evaluate', 'deploy'] -``` - -### Roots and leaves - -**Roots** are nodes with no incoming edges — the entry points of your pipeline. -**Leaves** are nodes with no outgoing edges — the terminal outputs. - -```python -print(dag.roots()) # ['raw'] -print(dag.leaves()) # ['deploy'] -``` - -### Membership checks - -```python -print(dag.has_node("train")) # True -print(dag.has_edge("clean", "features_a")) # True -print(dag.has_node("nonexistent")) # False -``` - -## Neighbourhood queries - -### Direct neighbours - -```python -# Parents (nodes that point TO this node) -print(dag.predecessors("train")) # ['features_a', 'features_b'] - -# Children (nodes this node points TO) -print(dag.successors("clean")) # ['features_a', 'features_b'] -``` - -### Degree - -```python -print(dag.in_degree("train")) # 2 -print(dag.out_degree("clean")) # 2 -print(dag.in_degree("raw")) # 0 (root) -print(dag.out_degree("deploy")) # 0 (leaf) -``` - -### Transitive closure queries - -```python -# All upstream nodes (recursive predecessors) -print(dag.ancestors("train")) -# ['raw', 'clean', 'features_a', 'features_b'] - -# All downstream nodes (recursive successors) -print(dag.descendants("clean")) -# ['features_a', 'features_b', 'train', 'evaluate', 'deploy'] -``` - - clean:::ancestor - clean --> features_a:::ancestor --> train:::target - clean --> features_b:::ancestor --> train - train --> evaluate --> deploy`} - caption="Ancestors of 'train' (blue) and the target node (red)." -/> - -## Graph statistics - -The `stats()` method returns a comprehensive summary: - -```python -s = dag.stats() -print(s) -# DAGStats(nodes=7, edges=7, roots=1, leaves=1, depth=5, width=2, density=0.167) -``` - -| Field | Meaning | -|-------|---------| -| `nodes` | Total node count | -| `edges` | Total edge count | -| `roots` | Number of root nodes | -| `leaves` | Number of leaf nodes | -| `depth` | Length of the longest path (in edges) | -| `width` | Maximum number of nodes at any single level | -| `density` | Edge count / maximum possible edges | - -## Topological ordering - -### Topological sort - -A flat ordering where every node appears after all its dependencies: - -```python -print(dag.topological_sort()) -# ['raw', 'clean', 'features_a', 'features_b', 'train', 'evaluate', 'deploy'] -``` - -### Topological levels - -Groups nodes that can execute in parallel — this is what the executor uses -internally: - -```python -for level, nodes in enumerate(dag.topological_levels()): - print(f"Level {level}: {nodes}") -# Level 0: ['raw'] -# Level 1: ['clean'] -# Level 2: ['features_a', 'features_b'] -# Level 3: ['train'] -# Level 4: ['evaluate'] -# Level 5: ['deploy'] -``` - -Level 2 shows that `features_a` and `features_b` can run concurrently. - -### Execution plan - -The execution plan is a richer version of topological levels that includes -scheduling metadata: - -```python -plan = dag.execution_plan() -for step in plan: - print(step) -``` - -## Critical path - -The **critical path** is the longest dependency chain through the DAG. It -determines the minimum wall-clock time, assuming unlimited parallelism. - -```python -path = dag.critical_path() -print(path) -# ['raw', 'clean', 'features_a', 'train', 'evaluate', 'deploy'] -``` - -With cost hints, the critical path accounts for task durations: - -```python -costs = { - "raw": 1.0, - "clean": 2.0, - "features_a": 5.0, - "features_b": 3.0, - "train": 10.0, - "evaluate": 2.0, - "deploy": 1.0, -} - -path = dag.critical_path(costs=costs) -print(path) -# ['raw', 'clean', 'features_a', 'train', 'evaluate', 'deploy'] -# Total cost: 21.0 -``` - - clean:::critical - clean --> features_a:::critical --> train:::critical - clean --> features_b --> train - train --> evaluate:::critical --> deploy:::critical`} - caption="Critical path highlighted in red. Optimising these nodes reduces total pipeline time." -/> - -Understanding the critical path is key to performance tuning — see -[Tracing & Profiling](/guide/observability/tracing-profiling) for post-execution analysis. - -## Shortest and longest paths - -Find the shortest or longest path between any two nodes: - -```python -print(dag.shortest_path("raw", "deploy")) -# ['raw', 'clean', 'features_a', 'train', 'evaluate', 'deploy'] - -print(dag.longest_path("raw", "deploy")) -# ['raw', 'clean', 'features_a', 'train', 'evaluate', 'deploy'] -``` - -### All paths - -Enumerate every path between two nodes: - -```python -paths = dag.all_paths("clean", "train") -for p in paths: - print(p) -# ['clean', 'features_a', 'train'] -# ['clean', 'features_b', 'train'] -``` - -## Pattern matching - -Find nodes by name using glob or regex patterns: - -```python -# Glob-style -print(dag.nodes_matching_glob("features_*")) -# ['features_a', 'features_b'] - -# Regex -print(dag.nodes_matching_regex(r"^(train|evaluate|deploy)$")) -# ['train', 'evaluate', 'deploy'] -``` - -This is especially useful in large graphs with naming conventions. - -## explain() - -The [`explain()`](/api/analysis/analysis) function gives a human-readable summary -of a single node's role in the graph: - -```python -from dagron import explain - -info = explain(dag, "train") -print(info) -``` - -Output: - -``` -Node: train - In-degree: 2 - Out-degree: 1 - Predecessors: features_a, features_b - Successors: evaluate - Ancestors: raw, clean, features_a, features_b - Descendants: evaluate, deploy - Level: 3 - Is root: False - Is leaf: False - On critical path: Yes -``` - -## what_if() - -The [`what_if()`](/api/analysis/analysis) function lets you explore hypothetical -changes without modifying the DAG: - -```python -from dagron import what_if - -report = what_if(dag, remove_nodes=["features_b"]) -print(report) -``` - -Output: - -``` -What-if: remove nodes ['features_b'] - Nodes removed: 1 - Edges removed: 2 - New roots: ['raw'] - New leaves: ['deploy'] - Disconnected: False - Affected downstream: ['train', 'evaluate', 'deploy'] -``` - -This is invaluable for understanding the impact of removing a step or a -dependency before making the change. - -```python -# What if we remove an edge instead? -report = what_if(dag, remove_edges=[("clean", "features_b")]) -print(report) -``` - -## lint() - -The [`lint()`](/api/analysis/analysis) function checks for common structural -issues: - -```python -from dagron import lint - -warnings = lint(dag) -for w in warnings: - print(w) -``` - -Possible warnings include: - -| Warning | Meaning | -|---------|---------| -| Isolated node | Node with no edges (in or out) | -| Single-child bottleneck | Node with high in-degree feeding a single successor | -| Redundant edge | Edge that is implied by transitivity | -| Wide fan-out | Node with many successors (may be a design smell) | - -Linting is especially useful in CI pipelines to enforce graph hygiene. - -## query() - -The [`query()`](/api/analysis/analysis) function provides a mini DSL for selecting -nodes: - -```python -from dagron import query - -# Find all root nodes -roots = query(dag, "roots") -print(roots) # ['raw'] - -# Find all leaves -leaves = query(dag, "leaves") -print(leaves) # ['deploy'] - -# Glob on node names -features = query(dag, "name:features_*") -print(features) # ['features_a', 'features_b'] - -# Combine with set operators -result = query(dag, "roots & name:raw*") -print(result) # ['raw'] -``` - -### Query syntax reference - -| Expression | Meaning | -|------------|---------| -| `roots` | Nodes with in-degree 0 | -| `leaves` | Nodes with out-degree 0 | -| `name:pattern` | Glob match on node names | -| `A & B` | Intersection | -| `A \| B` | Union | -| `!A` | Complement | -| `ancestors(node)` | All ancestors of a node | -| `descendants(node)` | All descendants of a node | - -```python -# All non-leaf nodes -non_leaves = query(dag, "!leaves") -print(non_leaves) -# ['raw', 'clean', 'features_a', 'features_b', 'train', 'evaluate'] - -# Ancestors of train that match a glob -query(dag, "ancestors(train) & name:feature*") -# ['features_a', 'features_b'] -``` - -## Reachability index - -For large graphs where you repeatedly query ancestors/descendants, building a -reachability index dramatically speeds up lookups: - -```python -dag.build_reachability_index() - -# Now these calls use the precomputed index: -print(dag.ancestors("deploy")) -print(dag.descendants("raw")) -``` - -The index is invalidated when the graph is mutated and can be rebuilt at any -time. - -## Dominator tree - -The **dominator tree** reveals which nodes are mandatory bottlenecks — a node -**d** dominates node **n** if every path from any root to **n** passes through -**d**. - -```python -dom_tree = dag.dominator_tree() -print(dom_tree) -# {'clean': 'raw', 'features_a': 'clean', 'features_b': 'clean', -# 'train': 'clean', 'evaluate': 'train', 'deploy': 'evaluate'} -``` - -This tells you, for example, that `clean` dominates everything downstream of -it, making it a critical bottleneck. - -## Validation - -At any time you can verify the DAG is structurally sound: - -```python -dag.validate() # Raises if the graph contains a cycle or is otherwise invalid -``` - -This is called automatically by the builder at `.build()` time, but you may -want to call it after manual mutations. - -## Practical example: debugging a slow pipeline - -Suppose your pipeline is slower than expected. Here is a systematic inspection -workflow: - -```python -import dagron -from dagron import explain, what_if, lint, query - -# 1. Check graph stats -s = dag.stats() -print(f"Nodes: {s.nodes}, Depth: {s.depth}, Width: {s.width}") - -# 2. Find the critical path -cp = dag.critical_path() -print(f"Critical path ({len(cp)} nodes): {' -> '.join(cp)}") - -# 3. Explain the bottleneck node -explain(dag, cp[len(cp) // 2]) - -# 4. What if we parallelise the bottleneck? -what_if(dag, remove_nodes=[cp[len(cp) // 2]]) - -# 5. Lint for structural issues -for w in lint(dag): - print(f"Warning: {w}") - -# 6. Query for specific patterns -heavy_nodes = query(dag, "name:train* | name:feature*") -print(f"Compute-heavy nodes: {heavy_nodes}") -``` - -## API reference - -| Function / Method | Docs | -|-------------------|------| -| `dag.stats()` | [DAG](/api/core/core) | -| `dag.topological_sort()` | [DAG](/api/core/core) | -| `dag.topological_levels()` | [DAG](/api/core/core) | -| `dag.critical_path()` | [DAG](/api/core/core) | -| `dag.execution_plan()` | [DAG](/api/core/core) | -| `dag.shortest_path()` | [DAG](/api/core/core) | -| `dag.longest_path()` | [DAG](/api/core/core) | -| `dag.all_paths()` | [DAG](/api/core/core) | -| `dag.dominator_tree()` | [DAG](/api/core/core) | -| `explain()` | [Analysis](/api/analysis/analysis) | -| `what_if()` | [Analysis](/api/analysis/analysis) | -| `lint()` | [Analysis](/api/analysis/analysis) | -| `query()` | [Analysis](/api/analysis/analysis) | - -## Next steps - -- [Graph Transforms](/guide/core-concepts/transforms) — reshape your DAG with filter, merge, reverse, and more. -- [Tracing & Profiling](/guide/observability/tracing-profiling) — post-execution analysis with Chrome traces. -- [Serialization](/guide/core-concepts/serialization) — export your DAG to JSON, DOT, Mermaid, or binary. diff --git a/docs/pages/guide/core-concepts/serialization.mdx b/docs/pages/guide/core-concepts/serialization.mdx deleted file mode 100644 index 932ed22..0000000 --- a/docs/pages/guide/core-concepts/serialization.mdx +++ /dev/null @@ -1,485 +0,0 @@ ---- -sidebar_position: 6 -title: Serialization -description: Save and load DAGs in JSON, binary, DOT, and Mermaid formats — plus custom payload serializers. ---- - -import DagDiagram from '@site/src/components/DagDiagram'; -import StatusBadge from '@site/src/components/StatusBadge'; - -# Serialization - -dagron supports multiple serialization formats for persisting DAGs, sharing them -across processes, embedding them in documentation, and visualizing them with -external tools. This guide covers every format and shows how to handle custom -payloads. - -## Format comparison - -| Format | Round-trip? | Human-readable? | Best for | -|--------|:-----------:|:---------------:|----------| -| JSON | Yes | Yes | Config files, APIs, debugging | -| Binary | Yes | No | Performance-critical storage, IPC | -| DOT | No (export only) | Yes | Graphviz visualization | -| Mermaid | No (export only) | Yes | Documentation, Markdown | -| File (save/load) | Yes | No | Disk persistence with compression | - -## JSON serialization - -### Export to JSON - -```python -import dagron - -dag = ( - dagron.DAG.builder() - .add_edge("extract", "transform") - .add_edge("transform", "load") - .build() -) - -json_str = dag.to_json() -print(json_str) -``` - -Output: - -```json -{ - "nodes": ["extract", "transform", "load"], - "edges": [ - ["extract", "transform"], - ["transform", "load"] - ] -} -``` - -### Import from JSON - -```python -restored = dagron.DAG.from_json(json_str) - -print(restored.node_count()) # 3 -print(restored.edge_count()) # 2 -print(list(restored.nodes())) # ['extract', 'transform', 'load'] -``` - -The round-trip preserves all structural information: nodes, edges, and their -ordering. - -### JSON with payloads - -When nodes carry payloads, they are included in the JSON output: - -```python -dag = dagron.DAG() -dag.add_node("train", payload={"epochs": 10, "lr": 0.001}) -dag.add_node("evaluate", payload={"metrics": ["accuracy", "f1"]}) -dag.add_edge("train", "evaluate") - -json_str = dag.to_json() -print(json_str) -``` - -```json -{ - "nodes": [ - {"name": "train", "payload": {"epochs": 10, "lr": 0.001}}, - {"name": "evaluate", "payload": {"metrics": ["accuracy", "f1"]}} - ], - "edges": [ - ["train", "evaluate"] - ] -} -``` - -### Storing JSON to a file - -```python -import json - -# Write -with open("pipeline.json", "w") as f: - f.write(dag.to_json()) - -# Read -with open("pipeline.json", "r") as f: - dag = dagron.DAG.from_json(f.read()) -``` - -### Use case: sharing DAG definitions via APIs - -```python -from flask import Flask, jsonify, request - -app = Flask(__name__) - -@app.route("/pipeline", methods=["GET"]) -def get_pipeline(): - return dag.to_json(), 200, {"Content-Type": "application/json"} - -@app.route("/pipeline", methods=["POST"]) -def set_pipeline(): - new_dag = dagron.DAG.from_json(request.data.decode()) - # ... use new_dag ... - return jsonify({"nodes": new_dag.node_count()}) -``` - -## Binary serialization - -Binary format uses an efficient Rust-native encoding that is significantly faster -and more compact than JSON. Use it when performance matters. - -### Export to bytes - -```python -data = dag.to_bytes() -print(type(data)) # -print(len(data)) # compact binary representation -``` - -### Import from bytes - -```python -restored = dagron.DAG.from_bytes(data) -print(restored.node_count()) # same as original -``` - -### Use case: Redis caching - -```python -import redis - -r = redis.Redis() - -# Store -r.set("pipeline:etl", dag.to_bytes()) - -# Retrieve -data = r.get("pipeline:etl") -if data: - dag = dagron.DAG.from_bytes(data) -``` - -### Use case: inter-process communication - -```python -import multiprocessing as mp - -def worker(dag_bytes): - dag = dagron.DAG.from_bytes(dag_bytes) - print(f"Worker received DAG with {dag.node_count()} nodes") - -# Send the DAG to a subprocess -p = mp.Process(target=worker, args=(dag.to_bytes(),)) -p.start() -p.join() -``` - -### Performance comparison - -Binary serialization is typically 5-10x faster than JSON and produces 3-5x -smaller output, because it avoids string parsing and uses Rust's native -serialization: - -```python -import time - -# JSON -start = time.perf_counter() -for _ in range(10000): - dagron.DAG.from_json(dag.to_json()) -json_time = time.perf_counter() - start - -# Binary -start = time.perf_counter() -for _ in range(10000): - dagron.DAG.from_bytes(dag.to_bytes()) -binary_time = time.perf_counter() - start - -print(f"JSON: {json_time:.3f}s") -print(f"Binary: {binary_time:.3f}s") -print(f"Speedup: {json_time / binary_time:.1f}x") -``` - -## File persistence (save / load) - -The `save()` and `load()` methods write and read DAGs to/from disk files. They -use the binary format internally with optional compression. - -### Saving to disk - -```python -dag.save("pipeline.dagron") -``` - -### Loading from disk - -```python -dag = dagron.DAG.load("pipeline.dagron") -print(dag.node_count()) -``` - -### Use case: checkpoint-style persistence - -```python -import os - -PIPELINE_PATH = "/var/data/pipeline.dagron" - -def get_or_create_pipeline(): - if os.path.exists(PIPELINE_PATH): - return dagron.DAG.load(PIPELINE_PATH) - - dag = ( - dagron.DAG.builder() - .add_edge("extract", "transform") - .add_edge("transform", "load") - .build() - ) - dag.save(PIPELINE_PATH) - return dag -``` - -## DOT export (Graphviz) - -The [DOT language](https://graphviz.org/doc/info/lang.html) is the standard -input format for Graphviz. - -```python -dot = dag.to_dot() -print(dot) -``` - -Output: - -```dot -digraph { - "extract" -> "transform" - "transform" -> "load" -} -``` - -### Rendering with Graphviz - -```python -import subprocess - -dot = dag.to_dot() -with open("pipeline.dot", "w") as f: - f.write(dot) - -subprocess.run(["dot", "-Tpng", "pipeline.dot", "-o", "pipeline.png"]) -``` - -### Rendering in a Jupyter notebook - -```python -from IPython.display import SVG, display -import subprocess - -dot = dag.to_dot() -result = subprocess.run( - ["dot", "-Tsvg"], - input=dot.encode(), - capture_output=True, -) -display(SVG(result.stdout)) -``` - -## Mermaid export - -[Mermaid](https://mermaid.js.org/) is a Markdown-friendly diagramming language -supported by GitHub, GitLab, Docusaurus, and many other platforms. - -```python -mermaid = dag.to_mermaid() -print(mermaid) -``` - -Output: - -``` -graph TD - extract --> transform - transform --> load -``` - -### Embedding in Markdown - -````markdown -```mermaid -graph TD - extract --> transform - transform --> load -``` -```` - -### Use case: auto-generated documentation - -````python -def generate_pipeline_docs(dag, output_path): - """Generate a Markdown file with an embedded DAG diagram.""" - mermaid = dag.to_mermaid() - content = f"""# Pipeline Overview - -## DAG Structure - -```mermaid -{mermaid} -``` - -## Statistics - -- Nodes: {dag.node_count()} -- Edges: {dag.edge_count()} -- Roots: {', '.join(dag.roots())} -- Leaves: {', '.join(dag.leaves())} -""" - with open(output_path, "w") as f: - f.write(content) - -generate_pipeline_docs(dag, "pipeline.md") -```` - - transform --> load`} - caption="The same DAG rendered as a DagDiagram component." -/> - -## Custom payload serializers - -By default, dagron serializes payloads using Python's standard JSON encoder, -which handles `dict`, `list`, `str`, `int`, `float`, `bool`, and `None`. For -custom objects, you need to provide serializer functions. - -### Example: serializing dataclass payloads - -```python -import dagron -import json -from dataclasses import dataclass, asdict - -@dataclass -class TaskConfig: - retries: int - timeout_seconds: float - tags: list - -# Build a DAG with dataclass payloads -dag = dagron.DAG() -dag.add_node("fetch", payload=TaskConfig(retries=3, timeout_seconds=30.0, tags=["io"])) -dag.add_node("process", payload=TaskConfig(retries=1, timeout_seconds=120.0, tags=["cpu"])) -dag.add_edge("fetch", "process") - -# Custom encoder -class ConfigEncoder(json.JSONEncoder): - def default(self, obj): - if isinstance(obj, TaskConfig): - return {"__type__": "TaskConfig", **asdict(obj)} - return super().default(obj) - -# Custom decoder -def config_decoder(dct): - if dct.get("__type__") == "TaskConfig": - return TaskConfig( - retries=dct["retries"], - timeout_seconds=dct["timeout_seconds"], - tags=dct["tags"], - ) - return dct - -# Serialize with custom encoder -json_str = dag.to_json(cls=ConfigEncoder) -print(json_str) - -# Deserialize with custom decoder -restored = dagron.DAG.from_json(json_str, object_hook=config_decoder) -``` - -### Example: binary serialization with pickle payloads - -For the binary format, payloads are serialized using pickle by default, so -custom objects work out of the box as long as they are picklable: - -```python -import dagron -import numpy as np - -dag = dagron.DAG() -dag.add_node("matrix", payload=np.array([[1, 2], [3, 4]])) -dag.add_node("result") -dag.add_edge("matrix", "result") - -# Binary round-trip preserves numpy arrays -data = dag.to_bytes() -restored = dagron.DAG.from_bytes(data) -``` - -## Combining serialization with snapshots - -Snapshots and serialization work well together for versioning: - -```python -import dagron -from datetime import datetime - -dag = ( - dagron.DAG.builder() - .add_edge("a", "b") - .add_edge("b", "c") - .build() -) - -# Save version 1 -dag.save(f"pipeline_v1.dagron") - -# Make changes -dag.add_node("d") -dag.add_edge("c", "d") - -# Save version 2 -dag.save(f"pipeline_v2.dagron") - -# Compare versions -v1 = dagron.DAG.load("pipeline_v1.dagron") -v2 = dagron.DAG.load("pipeline_v2.dagron") - -print(f"v1: {v1.node_count()} nodes, {v1.edge_count()} edges") -print(f"v2: {v2.node_count()} nodes, {v2.edge_count()} edges") -``` - -## Format selection guide - -Use this decision tree to pick the right format: - -1. **Need to read/write from Python?** Use `save()` / `load()` for files, or - `to_bytes()` / `from_bytes()` for in-memory. - -2. **Need human-readable config?** Use `to_json()` / `from_json()`. - -3. **Need to visualize with Graphviz?** Use `to_dot()`. - -4. **Need to embed in Markdown/docs?** Use `to_mermaid()`. - -5. **Need maximum performance?** Use `to_bytes()` / `from_bytes()`. - -## API reference - -| Method | Docs | -|--------|------| -| `dag.to_json()` | [DAG](/api/core/core) | -| `DAG.from_json()` | [DAG](/api/core/core) | -| `dag.to_bytes()` | [DAG](/api/core/core) | -| `DAG.from_bytes()` | [DAG](/api/core/core) | -| `dag.save()` | [DAG](/api/core/core) | -| `DAG.load()` | [DAG](/api/core/core) | -| `dag.to_dot()` | [DAG](/api/core/core) | -| `dag.to_mermaid()` | [DAG](/api/core/core) | - -## Next steps - -- [Incremental Execution](/guide/execution-strategies/incremental) — use save/load for caching intermediate state. -- [Checkpointing](/guide/execution-strategies/checkpointing) — persist execution progress to disk. -- [Graph Transforms](/guide/core-concepts/transforms) — create snapshots before applying transforms. diff --git a/docs/pages/guide/core-concepts/transforms.mdx b/docs/pages/guide/core-concepts/transforms.mdx deleted file mode 100644 index cd2e983..0000000 --- a/docs/pages/guide/core-concepts/transforms.mdx +++ /dev/null @@ -1,532 +0,0 @@ ---- -sidebar_position: 5 -title: Graph Transforms -description: Reshape your DAGs with reverse, filter, merge, collapse, transitive reduction, subgraph extraction, snapshots, and composition. ---- - -import DagDiagram from '@site/src/components/DagDiagram'; -import StatusBadge from '@site/src/components/StatusBadge'; - -# Graph Transforms - -dagron provides a rich set of **structural transformations** that produce new DAGs -from existing ones. Transforms are non-destructive — the original DAG is never -mutated. This guide covers every built-in transform with before/after diagrams. - -## Transform overview - -| Transform | What it does | -|-----------|-------------| -| `reverse()` | Flip every edge | -| `filter()` | Keep nodes matching a predicate | -| `merge()` | Combine two DAGs into one | -| `collapse()` | Replace a set of nodes with a single node | -| `transitive_reduction()` | Remove redundant edges | -| `transitive_closure()` | Add all implied edges | -| `subgraph()` | Extract a subgraph by node set | -| `subgraph_by_depth()` | Extract nodes within N hops | -| `snapshot()` | Immutable frozen copy | -| `compose()` | Namespace and wire multiple DAGs | - -## Sample DAG - -We will use this graph for most examples: - -```python -import dagron - -dag = ( - dagron.DAG.builder() - .add_nodes(["a", "b", "c", "d", "e"]) - .add_edges([ - ("a", "b"), ("a", "c"), - ("b", "d"), ("c", "d"), - ("d", "e"), - ]) - .build() -) -``` - - b --> d - a --> c --> d - d --> e`} - caption="Original DAG used throughout this guide." -/> - -## reverse() - -Flipping every edge is useful when you want to reason about upstream -dependencies as downstream propagation (e.g., "what gets affected if this node -changes?"). - -```python -rev = dag.reverse() - -print(rev.roots()) # ['e'] (was a leaf) -print(rev.leaves()) # ['a'] (was a root) -print(rev.successors("e")) # ['d'] -print(rev.predecessors("a")) # ['b', 'c'] -``` - - d - d --> b --> a - d --> c --> a`} - caption="Reversed DAG — edges point upstream." -/> - -Reversing is an O(V + E) operation implemented in Rust. - -## filter() - -Keep only the nodes that satisfy a predicate. Edges between remaining nodes are -preserved; edges to/from removed nodes are dropped. - -```python -# Keep only nodes whose names are NOT 'c' -filtered = dag.filter(lambda name: name != "c") - -print(list(filtered.nodes())) # ['a', 'b', 'd', 'e'] -print(filtered.edge_count()) # 3 -``` - - b --> d --> e`} - caption="After filtering out node 'c'." -/> - -### Filtering with payloads - -If you attached payloads, you can filter based on them: - -```python -dag2 = dagron.DAG() -dag2.add_node("gpu_train", payload={"gpu": True}) -dag2.add_node("cpu_prep", payload={"gpu": False}) -dag2.add_node("cpu_eval", payload={"gpu": False}) -dag2.add_edge("cpu_prep", "gpu_train") -dag2.add_edge("gpu_train", "cpu_eval") - -# Keep only GPU nodes -gpu_only = dag2.filter(lambda name: dag2.get_payload(name).get("gpu", False)) -print(list(gpu_only.nodes())) # ['gpu_train'] -``` - -## merge() - -Combine two DAGs into a single graph. Nodes with the same name are unified; -edges from both graphs are included. - -```python -dag_a = ( - dagron.DAG.builder() - .add_edge("x", "y") - .add_edge("y", "z") - .build() -) - -dag_b = ( - dagron.DAG.builder() - .add_edge("y", "w") - .add_edge("w", "z") - .build() -) - -merged = dag_a.merge(dag_b) -print(list(merged.nodes())) # ['x', 'y', 'z', 'w'] -print(merged.edge_count()) # 4 -``` - - y --> z - y --> w --> z`} - caption="Merged DAG. Node 'y' and 'z' were shared between the two inputs." -/> - -:::caution -If merging two DAGs would introduce a cycle, `merge()` raises a `CycleError`. -::: - -## collapse() - -Replace a set of nodes with a single representative node. All incoming edges to -the set become incoming edges to the representative; all outgoing edges from the -set become outgoing edges from the representative. - -```python -# Collapse the feature extraction branch into one node -collapsed = dag.collapse( - nodes=["b", "c"], - into="features", -) - -print(list(collapsed.nodes())) # ['a', 'features', 'd', 'e'] -print(collapsed.successors("a")) # ['features'] -print(collapsed.predecessors("d")) # ['features'] -``` - - features --> d --> e`} - caption="After collapsing nodes b and c into 'features'." -/> - -Collapsing is useful for: -- Simplifying large graphs for visualization. -- Creating summary views for stakeholders. -- Reducing overhead when scheduling tightly-coupled tasks. - -## transitive_reduction() - -Remove edges that are implied by other paths. The transitive reduction has the -same reachability as the original graph but with the minimum number of edges. - -```python -# Add a redundant shortcut edge -dag_with_shortcut = ( - dagron.DAG.builder() - .add_edge("a", "b") - .add_edge("b", "c") - .add_edge("a", "c") # redundant — a->b->c already implies a can reach c - .build() -) - -reduced = dag_with_shortcut.transitive_reduction() -print(reduced.edge_count()) # 2 (shortcut removed) -print(reduced.has_edge("a", "c")) # False -print(reduced.has_edge("a", "b")) # True -print(reduced.has_edge("b", "c")) # True -``` - -

- -
- -**Before (with shortcut)** - - b --> c - a -.-> c`} - caption="Dashed edge is redundant." -/> - -
- -
- -**After (transitive reduction)** - - b --> c`} - caption="Minimum edges preserving reachability." -/> - -
-
- -This is especially useful for cleaning up graphs generated from broad dependency -specifications. - -## transitive_closure() - -The opposite of reduction: add an edge for every pair of nodes (u, v) where v -is reachable from u. - -```python -closure = dag.transitive_closure() - -# a can reach e (a->b->d->e), so there is now a direct edge: -print(closure.has_edge("a", "e")) # True -print(closure.has_edge("a", "d")) # True -print(closure.has_edge("b", "e")) # True - -print(f"Original edges: {dag.edge_count()}") # 5 -print(f"Closure edges: {closure.edge_count()}") # 10 -``` - -The transitive closure is useful for pre-computing reachability queries. - -## subgraph() - -Extract a subgraph containing only the specified nodes and the edges between -them: - -```python -sub = dag.subgraph(["a", "b", "d"]) - -print(list(sub.nodes())) # ['a', 'b', 'd'] -print(sub.edge_count()) # 2 (a->b, b->d) -``` - - b --> d`} - caption="Subgraph of nodes a, b, d." -/> - -### Extracting ancestors or descendants - -A common pattern is extracting the full upstream or downstream of a node: - -```python -# Everything upstream of 'train' (including train itself) -upstream_nodes = set(dag.ancestors("d")) | {"d"} -upstream = dag.subgraph(list(upstream_nodes)) -print(list(upstream.nodes())) # ['a', 'b', 'c', 'd'] -``` - -## subgraph_by_depth() - -Extract nodes within a certain number of hops from a starting node: - -```python -# All nodes within 1 hop of 'd' -nearby = dag.subgraph_by_depth("d", depth=1) -print(list(nearby.nodes())) # ['b', 'c', 'd', 'e'] -``` - - d - c --> d - d --> e`} - caption="Subgraph within 1 hop of node 'd' (both directions)." -/> - -```python -# Within 2 hops -wider = dag.subgraph_by_depth("d", depth=2) -print(list(wider.nodes())) # ['a', 'b', 'c', 'd', 'e'] -``` - -## snapshot() - -Create an immutable, frozen copy of the DAG: - -```python -snap = dag.snapshot() -``` - -Snapshots are useful for: -- Recording the state of a graph before mutations. -- Passing a read-only view to analysis functions. -- Implementing undo/redo. - -See [Serialization](/guide/core-concepts/serialization) for persisting snapshots to disk. - -### Diffing snapshots - -Compare two snapshots to see what changed: - -```python -snap1 = dag.snapshot() - -# Mutate the original -dag.add_node("f") -dag.add_edge("e", "f") - -snap2 = dag.snapshot() - -diff = dag.diff(snap1, snap2) -print(diff) -# DagDiff(added_nodes=['f'], removed_nodes=[], added_edges=[('e', 'f')], removed_edges=[]) -``` - -## compose() - -The [`compose()`](/api/core/core) function wires multiple DAGs together under -namespaces: - -```python -etl = ( - dagron.DAG.builder() - .add_edge("extract", "transform") - .add_edge("transform", "load") - .build() -) - -ml = ( - dagron.DAG.builder() - .add_edge("train", "evaluate") - .build() -) - -combined = dagron.compose( - dags={"etl": etl, "ml": ml}, - connections=[("etl/load", "ml/train")], -) - -print(list(combined.nodes())) -# ['etl/extract', 'etl/transform', 'etl/load', 'ml/train', 'ml/evaluate'] - -print(combined.successors("etl/load")) # ['ml/train'] -``` - - etl/transform --> etl/load --> ml/train --> ml/evaluate`} - caption="Two sub-DAGs composed into a single pipeline." -/> - -### Composing many DAGs - -```python -ingestion = dagron.DAG.builder().add_edge("fetch", "validate").build() -processing = dagron.DAG.builder().add_edge("clean", "aggregate").build() -reporting = dagron.DAG.builder().add_edge("render", "email").build() - -full_pipeline = dagron.compose( - dags={ - "ingest": ingestion, - "process": processing, - "report": reporting, - }, - connections=[ - ("ingest/validate", "process/clean"), - ("process/aggregate", "report/render"), - ], -) - -for level, nodes in enumerate(full_pipeline.topological_levels()): - print(f"Level {level}: {nodes}") -# Level 0: ['ingest/fetch'] -# Level 1: ['ingest/validate'] -# Level 2: ['process/clean'] -# Level 3: ['process/aggregate'] -# Level 4: ['report/render'] -# Level 5: ['report/email'] -``` - - ingest/validate --> process/clean --> process/aggregate --> report/render --> report/email`} - caption="Three composed DAGs forming a full data pipeline." -/> - -## Chaining transforms - -Transforms return new DAG instances, so you can chain them: - -```python -result = ( - dag - .filter(lambda n: n != "c") - .transitive_reduction() - .reverse() -) - -print(list(result.nodes())) # ['e', 'd', 'b', 'a'] -``` - -Since each transform produces a new DAG, the original is never modified. - -## Partitioning - -dagron includes two partitioning strategies for splitting a DAG into -independent sub-DAGs: - -### Level-based partitioning - -Splits the DAG at topological level boundaries: - -```python -partitions = dag.partition_level_based(num_partitions=2) -for i, part in enumerate(partitions): - print(f"Partition {i}: {list(part.nodes())}") -``` - -### Balanced partitioning - -Tries to balance the number of nodes across partitions: - -```python -partitions = dag.partition_balanced(num_partitions=3) -for i, part in enumerate(partitions): - print(f"Partition {i}: {list(part.nodes())}") -``` - -Partitioning is useful for distributed execution where each partition is sent -to a different worker. - -## Practical example: simplifying a graph for stakeholders - -```python -import dagron - -# A complex internal pipeline -pipeline = ( - dagron.DAG.builder() - .add_nodes([ - "fetch_api", "fetch_db", "fetch_s3", - "validate_api", "validate_db", "validate_s3", - "merge_sources", "feature_eng", "train_xgb", - "train_nn", "ensemble", "evaluate", "deploy" - ]) - .add_edges([ - ("fetch_api", "validate_api"), ("fetch_db", "validate_db"), - ("fetch_s3", "validate_s3"), - ("validate_api", "merge_sources"), ("validate_db", "merge_sources"), - ("validate_s3", "merge_sources"), - ("merge_sources", "feature_eng"), - ("feature_eng", "train_xgb"), ("feature_eng", "train_nn"), - ("train_xgb", "ensemble"), ("train_nn", "ensemble"), - ("ensemble", "evaluate"), ("evaluate", "deploy"), - ]) - .build() -) - -# Collapse ingestion into one node for the executive summary -simplified = pipeline.collapse( - nodes=["fetch_api", "fetch_db", "fetch_s3", - "validate_api", "validate_db", "validate_s3", - "merge_sources"], - into="data_ingestion", -) - -simplified = simplified.collapse( - nodes=["train_xgb", "train_nn", "ensemble"], - into="model_training", -) - -print(list(simplified.nodes())) -# ['data_ingestion', 'feature_eng', 'model_training', 'evaluate', 'deploy'] - -print(simplified.to_mermaid()) -``` - - feature_eng --> model_training --> evaluate --> deploy`} - caption="Simplified stakeholder view after collapsing internal details." -/> - -## API reference - -| Method | Docs | -|--------|------| -| `dag.reverse()` | [DAG](/api/core/core) | -| `dag.filter()` | [DAG](/api/core/core) | -| `dag.merge()` | [DAG](/api/core/core) | -| `dag.collapse()` | [DAG](/api/core/core) | -| `dag.transitive_reduction()` | [DAG](/api/core/core) | -| `dag.transitive_closure()` | [DAG](/api/core/core) | -| `dag.subgraph()` | [DAG](/api/core/core) | -| `dag.subgraph_by_depth()` | [DAG](/api/core/core) | -| `dag.snapshot()` | [DAG](/api/core/core) | -| `dag.diff()` | [DAG](/api/core/core) | -| `dagron.compose()` | [DAG](/api/core/core) | -| `dag.partition_level_based()` | [DAG](/api/core/core) | -| `dag.partition_balanced()` | [DAG](/api/core/core) | - -## Next steps - -- [Serialization](/guide/core-concepts/serialization) — persist DAGs to JSON, binary, DOT, and Mermaid. -- [Inspecting Graphs](/guide/core-concepts/inspecting-graphs) — analyze structure, critical paths, and queries. -- [Incremental Execution](/guide/execution-strategies/incremental) — use transforms to understand dirty propagation. diff --git a/docs/pages/guide/execution-strategies/approval-gates.mdx b/docs/pages/guide/execution-strategies/approval-gates.mdx deleted file mode 100644 index 599d93a..0000000 --- a/docs/pages/guide/execution-strategies/approval-gates.mdx +++ /dev/null @@ -1,466 +0,0 @@ ---- -sidebar_position: 13 -title: Approval Gates -description: Pause DAG execution at human-in-the-loop gates that wait for explicit approval or rejection. ---- - -import DagDiagram from '@site/src/components/DagDiagram'; -import StatusBadge from '@site/src/components/StatusBadge'; - -# Approval Gates - -Production pipelines often need a human checkpoint before proceeding. dagron's **approval gates** let you pause execution at specific nodes until an operator explicitly approves or rejects the step. This is useful for deployment sign-offs, data quality reviews, compliance checks, and any workflow that requires human judgment. - -Gates are **execution-time concerns**, not graph structure. The DAG itself stays pure; gates are attached via a `GateController` that the executor consults at runtime. - - test - test --> gate_qa - gate_qa --> stage - stage --> gate_prod - gate_prod --> deploy - style gate_qa fill:#fff3e0,stroke:#e65100 - style gate_prod fill:#fff3e0,stroke:#e65100`} - caption="A deployment pipeline with two approval gates. Execution pauses at each gate until a human approves." -/> - ---- - -## Core Classes - -| Class | Role | -|---|---| -| [`ApprovalGate`](/api/execution/gates#approvalgate) | A single gate that blocks until approved, rejected, or timed out. | -| [`GateController`](/api/execution/gates#gatecontroller) | Manages multiple named gates. Thread-safe facade for approve/reject operations. | -| [`GateStatus`](/api/execution/gates#gatestatus) | Enum: `PENDING`, `WAITING`, `APPROVED`, `REJECTED`, `TIMED_OUT`. | -| [`GateRejectedError`](/api/execution/gates#gaterejectederror) | Raised when a gate is rejected. | -| [`GateTimeoutError`](/api/execution/gates#gatetimeouterror) | Raised when a gate times out before a decision is made. | - ---- - -## Creating Gates - -### Single Gate - -An `ApprovalGate` represents a single decision point: - -```python -from dagron.execution.gates import ApprovalGate, GateStatus - -gate = ApprovalGate(timeout=300) # 5-minute timeout -print(gate.status) # GateStatus.PENDING -``` - -### GateController - -In practice you manage multiple gates through a `GateController`: - -```python -from dagron.execution.gates import ApprovalGate, GateController - -controller = GateController({ - "qa_review": ApprovalGate(timeout=600), # 10 min - "prod_deploy": ApprovalGate(timeout=300), # 5 min -}) -``` - -The controller provides a thread-safe interface for approving and rejecting gates by name. - ---- - -## Gate Lifecycle - -Each gate transitions through a well-defined set of states: - -```mermaid -stateDiagram-v2 - [*] --> PENDING - PENDING --> WAITING : executor calls wait_sync/wait_async - WAITING --> APPROVED : approve() called - WAITING --> REJECTED : reject(reason) called - WAITING --> TIMED_OUT : timeout expires - APPROVED --> PENDING : reset() - REJECTED --> PENDING : reset() - TIMED_OUT --> PENDING : reset() -``` - -- **PENDING** -- initial state. The gate exists but no one is waiting on it yet. -- **WAITING** -- the executor has reached this gate and is blocked, waiting for a decision. -- **APPROVED** -- the gate was approved and execution continues. -- **REJECTED** -- the gate was rejected, which raises a `GateRejectedError` in the executor. -- **TIMED_OUT** -- the timeout expired before a decision, raising a `GateTimeoutError`. - ---- - -## Approving and Rejecting - -### From Another Thread - -Gates are designed for multi-threaded use. The executor blocks on the gate in one thread, and you approve or reject from another: - -```python -import threading -from dagron.execution.gates import ApprovalGate, GateController - -controller = GateController({ - "deploy": ApprovalGate(timeout=120), -}) - -# In the executor thread, the gate blocks: -# controller.wait_sync("deploy") # blocks until approve/reject - -# From a monitoring thread, API handler, or CLI: -controller.approve("deploy") -# or: -controller.reject("deploy", reason="Failed canary analysis") -``` - -### Querying Status - -```python -print(controller.status("deploy")) # GateStatus.APPROVED - -# List all gates currently waiting for a decision -waiting = controller.waiting_gates() -print(f"Gates awaiting approval: {waiting}") -``` - ---- - -## Integrating Gates with Execution - -Gates are integrated into DAG execution via **callbacks**. The executor calls `controller.wait_sync(node_name)` when it reaches a node that has an associated gate. Here is a complete example: - -```python -import threading -import time -import dagron -from dagron.execution.gates import ApprovalGate, GateController, GateStatus -from dagron.execution._types import ExecutionCallbacks - -# 1. Build the DAG -dag = ( - dagron.DAG.builder() - .add_node("build") - .add_node("test") - .add_node("review_gate") - .add_node("deploy_staging") - .add_node("deploy_gate") - .add_node("deploy_prod") - .add_edge("build", "test") - .add_edge("test", "review_gate") - .add_edge("review_gate", "deploy_staging") - .add_edge("deploy_staging", "deploy_gate") - .add_edge("deploy_gate", "deploy_prod") - .build() -) - -# 2. Set up gates -controller = GateController({ - "review_gate": ApprovalGate(timeout=600), - "deploy_gate": ApprovalGate(timeout=300), -}) - -# 3. Create tasks that wait at gates -def make_gate_task(gate_name): - """Create a task that blocks on a gate.""" - def task(): - print(f" Waiting for approval at '{gate_name}'...") - controller.wait_sync(gate_name) - print(f" Gate '{gate_name}' approved!") - return "approved" - return task - -tasks = { - "build": lambda: print(" Building...") or "build-ok", - "test": lambda: print(" Testing...") or "test-ok", - "review_gate": make_gate_task("review_gate"), - "deploy_staging": lambda: print(" Deploying to staging...") or "staging-ok", - "deploy_gate": make_gate_task("deploy_gate"), - "deploy_prod": lambda: print(" Deploying to production!") or "prod-ok", -} - -# 4. Auto-approve from a background thread (simulates human operator) -def auto_approver(): - while True: - time.sleep(1) - for name in controller.waiting_gates(): - print(f" [approver] Approving '{name}'") - controller.approve(name) - -approver = threading.Thread(target=auto_approver, daemon=True) -approver.start() - -# 5. Execute -executor = dagron.DAGExecutor(dag) -result = executor.execute(tasks) -print(f"Completed: {result.succeeded} nodes") -``` - ---- - -## Handling Rejection - -When a gate is rejected, `wait_sync()` raises a `GateRejectedError`. If the executor is running with `fail_fast=True` (the default), all downstream nodes are skipped: - -```python -from dagron.execution.gates import GateRejectedError - -try: - controller.wait_sync("deploy_gate") -except GateRejectedError as e: - print(f"Gate '{e.gate_name}' rejected: {e.reason}") - # With fail_fast, deploy_prod will be skipped -``` - - test --> gate --> deploy - style gate fill:#ffcdd2,stroke:#c62828 - style deploy fill:#e0e0e0,stroke:#9e9e9e`} - caption="When the deploy_gate is rejected, downstream nodes are skipped." -/> - ---- - -## Handling Timeouts - -If no decision is made before the timeout expires, `GateTimeoutError` is raised: - -```python -from dagron.execution.gates import ApprovalGate, GateTimeoutError - -gate = ApprovalGate(timeout=5) # 5-second timeout - -try: - gate.wait_sync() -except GateTimeoutError as e: - print(f"Timed out after {e.timeout}s") - print(f"Gate status: {gate.status}") # GateStatus.TIMED_OUT -``` - ---- - -## Async Gates - -Gates work seamlessly with async code: - -```python -import asyncio -from dagron.execution.gates import ApprovalGate - -gate = ApprovalGate(timeout=60) - -async def wait_for_approval(): - print("Waiting for approval...") - await gate.wait_async() - print("Approved!") - -async def approve_later(): - await asyncio.sleep(2) - gate.approve() - -async def main(): - await asyncio.gather( - wait_for_approval(), - approve_later(), - ) - -asyncio.run(main()) -``` - -The `GateController` also provides `wait_async()`: - -```python -await controller.wait_async("deploy_gate") -``` - ---- - -## Auto-Approve for Testing - -During development and testing, you often want to skip the human approval step. Set `auto_approve=True`: - -```python -gate = ApprovalGate(auto_approve=True) -print(gate.status) # GateStatus.APPROVED -- immediately approved - -# wait_sync() returns immediately -gate.wait_sync() -``` - -You can use this to build a test-mode controller: - -```python -def make_controller(test_mode=False): - return GateController({ - "qa_review": ApprovalGate( - timeout=None if test_mode else 600, - auto_approve=test_mode, - ), - "prod_deploy": ApprovalGate( - timeout=None if test_mode else 300, - auto_approve=test_mode, - ), - }) - -# In tests: -controller = make_controller(test_mode=True) - -# In production: -controller = make_controller(test_mode=False) -``` - ---- - -## Resetting Gates - -Gates can be reset for reuse. This is useful for retry loops: - -```python -gate = ApprovalGate(timeout=60) -gate.reject("bad config") -print(gate.status) # GateStatus.REJECTED - -gate.reset() -print(gate.status) # GateStatus.PENDING -- ready for another round - -# Reset all gates in a controller -controller.reset_all() -``` - ---- - -## Combining with the Dashboard - -The [DashboardPlugin](/guide/advanced/plugins-hooks) integrates with gates to provide a web UI for approving and rejecting: - -```python -from dagron.dashboard import DashboardPlugin -from dagron.execution.gates import ApprovalGate, GateController - -controller = GateController({ - "review": ApprovalGate(timeout=600), - "deploy": ApprovalGate(timeout=300), -}) - -dashboard = DashboardPlugin( - port=8765, - gate_controller=controller, # wires approve/reject buttons in the UI - open_browser=True, -) -``` - -When a gate enters the `WAITING` state, the dashboard shows an **Approve** / **Reject** button that operators can click from their browser. - ---- - -## GateController API Summary - -```python -controller = GateController({ - "qa": ApprovalGate(timeout=600), - "prod": ApprovalGate(timeout=300), -}) - -# Add a gate after construction -controller.add_gate("staging", ApprovalGate()) - -# Approve / reject -controller.approve("qa") -controller.reject("prod", reason="Canary failed") - -# Query -controller.status("qa") # GateStatus.APPROVED -controller.waiting_gates() # ["staging"] -controller.has_gate("qa") # True -controller.get_gate("qa") # ApprovalGate instance or None - -# Wait (blocking) -controller.wait_sync("staging") - -# Wait (async) -await controller.wait_async("staging") - -# Reset all gates to PENDING -controller.reset_all() -``` - ---- - -## Patterns and Best Practices - -### Pattern: HTTP Webhook Approval - -Expose an HTTP endpoint that approves gates when called by a CI system: - -```python -from fastapi import FastAPI -from dagron.execution.gates import GateController - -app = FastAPI() - -# Shared controller (in-process) -controller: GateController = ... - -@app.post("/gates/{gate_name}/approve") -def approve_gate(gate_name: str): - controller.approve(gate_name) - return {"status": "approved"} - -@app.post("/gates/{gate_name}/reject") -def reject_gate(gate_name: str, reason: str = ""): - controller.reject(gate_name, reason=reason) - return {"status": "rejected"} -``` - -### Pattern: Slack Approval Bot - -Poll `waiting_gates()` and post to Slack when a gate is waiting: - -```python -import time - -while True: - for name in controller.waiting_gates(): - send_slack_message(f"Gate '{name}' needs approval. Reply /approve or /reject") - time.sleep(10) -``` - -### Pattern: Conditional Auto-Approval - -Auto-approve gates based on runtime conditions: - -```python -def conditional_approver(controller, rules): - """Auto-approve gates that pass predefined rules.""" - for name in controller.waiting_gates(): - rule = rules.get(name) - if rule and rule(): - controller.approve(name) - -# Example: auto-approve QA if all tests pass -rules = { - "qa_review": lambda: all_tests_passed, -} -``` - ---- - -## Related - -- [API Reference: Gates](/api/execution/gates) -- full API documentation. -- [Plugins & Hooks](/guide/advanced/plugins-hooks) -- using DashboardPlugin for gate management. -- [Executing Tasks](/guide/core-concepts/executing-tasks) -- standard execution model. -- [Error Handling](/guide/observability/error-handling) -- how `GateRejectedError` and `GateTimeoutError` fit into the error hierarchy. diff --git a/docs/pages/guide/execution-strategies/caching.mdx b/docs/pages/guide/execution-strategies/caching.mdx deleted file mode 100644 index 4a8262a..0000000 --- a/docs/pages/guide/execution-strategies/caching.mdx +++ /dev/null @@ -1,461 +0,0 @@ ---- -sidebar_position: 14 -title: Caching -description: Content-addressable Merkle-tree caching for cross-run DAG execution with dagron. ---- - -import DagDiagram from '@site/src/components/DagDiagram'; -import StatusBadge from '@site/src/components/StatusBadge'; - -# Caching - -dagron's caching system provides **content-addressable Merkle-tree caching** for DAG execution results. When you re-execute a pipeline, nodes whose inputs have not changed return their cached result instantly -- no recomputation needed. This is conceptually similar to how build systems like Bazel and Nix work: change any upstream node and all downstream cache keys automatically invalidate. - - B --> C - A --> D - style A fill:#c8e6c9,stroke:#2e7d32 - style B fill:#c8e6c9,stroke:#2e7d32 - style C fill:#fff9c4,stroke:#f9a825 - style D fill:#c8e6c9,stroke:#2e7d32`} - caption="Green nodes are cache hits. Yellow nodes had an upstream change and must re-execute." -/> - ---- - -## How Merkle-Tree Keys Work - -For each node, dagron computes a cache key from three inputs: - -1. **Node name** -- the identity of the node. -2. **Task source hash** -- a hash of the callable's source code (or bytecode as fallback). -3. **Predecessor result hashes** -- the hashes of all upstream nodes' results, sorted by name. - -``` -cache_key = SHA256( node_name || task_source_hash || pred1_name:pred1_hash || pred2_name:pred2_hash ) -``` - -Because each node's key includes the hashes of its predecessors' results, a change to **any** upstream node automatically produces a different key for all downstream nodes. This is the "Merkle tree" property -- changes propagate without explicitly tracking what changed. - -```mermaid -graph TD - A["extract
key: sha256(extract, src_hash_A)"] - B["transform
key: sha256(transform, src_hash_B, extract:result_hash_A)"] - C["load
key: sha256(load, src_hash_C, transform:result_hash_B)"] - A --> B --> C -``` - ---- - -## Core Classes - -| Class | Role | -|---|---| -| [`CachedDAGExecutor`](/api/execution/caching#cacheddagexecutor) | Executes a DAG with caching. On cache hit, returns stored result without running the task. | -| [`ContentAddressableCache`](/api/execution/caching#contentaddressablecache) | High-level cache that manages Merkle-tree key computation and delegates storage to a backend. | -| [`CacheKeyBuilder`](/api/execution/caching#cachekeybuilder) | Computes SHA-256 cache keys from node name, task hash, and predecessor hashes. | -| [`CachePolicy`](/api/execution/caching#cachepolicy) | Eviction rules: `max_entries`, `max_size_bytes`, `ttl_seconds`. | -| [`FileSystemCacheBackend`](/api/execution/caching#filesystemcachebackend) | Stores cached values as pickle files on disk with an index for LRU/TTL tracking. | -| [`CacheStats`](/api/execution/caching#cachestats) | Hit count, miss count, eviction count, total entries, total size, and computed `hit_rate`. | -| [`CacheKeyProtocol`](/api/execution/caching#cachekeyprotocol) | Protocol for objects that provide their own cache key via `__dagron_cache_key__()`. | - ---- - -## Quick Start - -```python -import dagron -from dagron.execution.cached_executor import CachedDAGExecutor -from dagron.execution.content_cache import ( - CachePolicy, - ContentAddressableCache, - FileSystemCacheBackend, -) - -# 1. Build the DAG -dag = ( - dagron.DAG.builder() - .add_node("fetch") - .add_node("clean") - .add_node("aggregate") - .add_node("report") - .add_edge("fetch", "clean") - .add_edge("clean", "aggregate") - .add_edge("aggregate", "report") - .build() -) - -# 2. Create a cache backend with eviction policy -policy = CachePolicy( - max_entries=1000, - max_size_bytes=500 * 1024 * 1024, # 500 MB - ttl_seconds=3600, # 1 hour -) -backend = FileSystemCacheBackend("/tmp/dagron_cache", policy=policy) -cache = ContentAddressableCache(backend) - -# 3. Define tasks -tasks = { - "fetch": lambda: {"rows": 10000}, - "clean": lambda: {"rows": 9800}, - "aggregate": lambda: {"total": 42.0}, - "report": lambda: "Report generated", -} - -# 4. First run -- all cache misses -executor = CachedDAGExecutor(dag, cache) -result = executor.execute(tasks) -print(f"Hits: {result.cache_hits}, Misses: {result.cache_misses}") -# Hits: 0, Misses: 4 - -# 5. Second run -- all cache hits (tasks unchanged) -result = executor.execute(tasks) -print(f"Hits: {result.cache_hits}, Misses: {result.cache_misses}") -# Hits: 4, Misses: 0 -``` - ---- - -## CachedExecutionResult - -The `CachedDAGExecutor.execute()` method returns a `CachedExecutionResult` that wraps the standard `ExecutionResult` and adds cache-specific statistics: - -```python -result = executor.execute(tasks) - -# Standard execution stats -print(result.execution_result.succeeded) -print(result.execution_result.failed) - -# Cache stats -print(f"Cache hits: {result.cache_hits}") -print(f"Cache misses: {result.cache_misses}") -print(f"Nodes executed (cache miss): {result.nodes_executed}") -print(f"Nodes cached (cache hit): {result.nodes_cached}") -``` - -When a node is a cache hit, its result status is `NodeStatus.CACHE_HIT` and `duration_seconds` is `0.0`. - ---- - -## Cache Invalidation - -The Merkle-tree approach provides **automatic invalidation**. You never need to manually invalidate cache entries. Here is how changes propagate: - -### Change a task's code - -If you modify the source code of a task function, its source hash changes, which changes its cache key: - -```python -# Original -tasks["clean"] = lambda: {"rows": 9800} - -# Modified -- different source, different key -tasks["clean"] = lambda: {"rows": 9800, "filtered": True} - -result = executor.execute(tasks) -# clean is a cache miss, and so are aggregate and report (downstream) -``` - -### Change an upstream result - -If `fetch` returns different data, its result hash changes, which invalidates `clean`, `aggregate`, and `report`: - -```python -tasks["fetch"] = lambda: {"rows": 20000} # different result - -result = executor.execute(tasks) -# All 4 nodes are cache misses because the root changed -``` - -### Unchanged branches stay cached - -If only one branch changes, the other branch remains cached: - - clean - clean --> stats - clean --> model - stats --> report - model --> report - style raw fill:#fff9c4,stroke:#f9a825 - style clean fill:#fff9c4,stroke:#f9a825 - style stats fill:#fff9c4,stroke:#f9a825 - style model fill:#c8e6c9,stroke:#2e7d32 - style report fill:#fff9c4,stroke:#f9a825`} - caption="If raw_data changes but the model task source is identical, model may still be a cache hit if its upstream result hash is the same." -/> - ---- - -## CachePolicy and Eviction - -The `CachePolicy` controls when old entries are evicted: - -```python -from dagron.execution.content_cache import CachePolicy - -policy = CachePolicy( - max_entries=500, # LRU eviction after 500 entries - max_size_bytes=1_073_741_824, # 1 GB total - ttl_seconds=7200, # entries expire after 2 hours -) -``` - -| Parameter | Behavior | -|---|---| -| `max_entries` | When the entry count exceeds this, the **least recently accessed** entry is evicted. | -| `max_size_bytes` | When total size exceeds this, LRU entries are evicted until the size is under the limit. | -| `ttl_seconds` | Entries older than this are treated as expired on read and automatically removed. | - -All three constraints are checked during `put()`. TTL is also checked during `get()`. - ---- - -## FileSystemCacheBackend - -The `FileSystemCacheBackend` stores values as pickle files and maintains a JSON index for metadata: - -```python -from dagron.execution.content_cache import FileSystemCacheBackend - -backend = FileSystemCacheBackend( - cache_dir="/var/cache/dagron/my_pipeline", - policy=CachePolicy(max_entries=1000, ttl_seconds=86400), -) -``` - -The directory structure looks like: - -``` -/var/cache/dagron/my_pipeline/ - index.json # metadata index (atomic writes) - a1b2c3d4e5f6g7h8.pkl # pickled result (key prefix) - ... -``` - -The backend uses **atomic writes** (`write to .tmp, then rename`) so that cache corruption from crashes is avoided. - -### Backend Operations - -```python -# Get a cached value -value, found = backend.get("sha256_key_here") - -# Store a value -from dagron.execution.content_cache import CacheEntryMetadata -meta = CacheEntryMetadata(node_name="clean", cache_key="sha256_key_here") -backend.put("sha256_key_here", {"rows": 9800}, meta) - -# Check existence -exists = backend.has("sha256_key_here") - -# Delete a specific entry -backend.delete("sha256_key_here") - -# Clear the entire cache -backend.clear() - -# Get statistics -stats = backend.stats() -print(f"Entries: {stats.total_entries}, Size: {stats.total_size_bytes} bytes") -``` - ---- - -## CacheStats - -After execution, inspect cache health: - -```python -stats = cache.stats() -print(f"Hits: {stats.hits}") -print(f"Misses: {stats.misses}") -print(f"Evictions: {stats.evictions}") -print(f"Entries: {stats.total_entries}") -print(f"Size: {stats.total_size_bytes / 1024 / 1024:.1f} MB") -print(f"Hit rate: {stats.hit_rate:.1%}") -``` - -A healthy pipeline running repeatedly should converge toward a high `hit_rate` (90%+ when only a few nodes change between runs). - ---- - -## Custom Cache Keys with CacheKeyProtocol - -If your task returns objects that are not easily serializable via `pickle`, implement the `CacheKeyProtocol`: - -```python -from dagron.execution.content_cache import CacheKeyProtocol - -class TrainedModel: - def __init__(self, weights_path: str, metrics: dict): - self.weights_path = weights_path - self.metrics = metrics - - def __dagron_cache_key__(self) -> str: - """Return a stable, deterministic cache key.""" - import hashlib - content = f"{self.weights_path}:{sorted(self.metrics.items())}" - return hashlib.sha256(content.encode()).hexdigest() -``` - -When dagron hashes this object's result, it calls `__dagron_cache_key__()` instead of pickling the entire object. This is useful for: - -- Large objects where pickle is expensive. -- Objects with non-deterministic pickle output. -- Objects that reference external state (file paths, database connections). - ---- - -## ContentAddressableCache API - -The `ContentAddressableCache` is the high-level interface you pass to `CachedDAGExecutor`: - -```python -from dagron.execution.content_cache import ContentAddressableCache - -cache = ContentAddressableCache(backend) - -# Compute a cache key for a node -key = cache.compute_key( - node_name="clean", - task_fn=clean_fn, - predecessor_result_hashes={"fetch": "a1b2c3..."}, -) - -# Get / put / check -value, found = cache.get(key) -cache.put(key, {"rows": 9800}, node_name="clean") -cache.has(key) - -# Clear all entries -cache.clear() - -# Get stats -stats = cache.stats() -``` - ---- - -## Combining Caching with Other Features - -### Caching + Tracing - -Enable tracing to see which nodes were cache hits vs misses in the execution timeline: - -```python -executor = CachedDAGExecutor(dag, cache, enable_tracing=True) -result = executor.execute(tasks) - -trace = result.execution_result.trace -if trace: - trace.to_chrome_json("cached_run.json") -``` - -The trace will include `NODE_CACHE_HIT` and `NODE_CACHE_MISS` events. - -### Caching + Fail-Fast - -By default, `fail_fast=True`. If a node fails, downstream nodes are skipped (not cached): - -```python -executor = CachedDAGExecutor(dag, cache, fail_fast=True) -``` - -### Caching vs Incremental Execution - -dagron also provides an [IncrementalExecutor](/guide/execution-strategies/incremental) that re-executes only nodes in the "dirty set" of explicitly changed nodes. The key difference: - -| Feature | CachedDAGExecutor | IncrementalExecutor | -|---|---|---| -| Cross-run persistence | Yes (disk-backed) | No (in-memory) | -| Invalidation | Automatic (Merkle keys) | Manual (`changed_nodes` list) | -| Overhead | Hash computation + disk I/O | Dirty-set computation | -| Best for | CI pipelines, batch jobs | Interactive/reactive workflows | - -You can use both together: `CachedDAGExecutor` for cross-run caching and `IncrementalExecutor` for within-run incremental updates. - ---- - -## Writing a Custom Backend - -Implement the `CacheBackend` protocol to use Redis, S3, or any storage system: - -```python -from dagron.execution.content_cache import CacheBackend, CacheEntryMetadata, CacheStats - -class RedisCacheBackend: - """Example Redis-backed cache backend.""" - - def __init__(self, redis_url: str): - import redis - self._client = redis.from_url(redis_url) - self._stats = CacheStats() - - def get(self, key: str) -> tuple[any, bool]: - data = self._client.get(f"dagron:{key}") - if data is None: - self._stats.misses += 1 - return None, False - import pickle - self._stats.hits += 1 - return pickle.loads(data), True - - def put(self, key: str, value: any, metadata: CacheEntryMetadata) -> None: - import pickle - data = pickle.dumps(value) - self._client.set(f"dagron:{key}", data) - - def has(self, key: str) -> bool: - return self._client.exists(f"dagron:{key}") > 0 - - def delete(self, key: str) -> None: - self._client.delete(f"dagron:{key}") - - def clear(self) -> None: - for key in self._client.scan_iter("dagron:*"): - self._client.delete(key) - - def stats(self) -> CacheStats: - return self._stats - -# Usage -backend = RedisCacheBackend("redis://localhost:6379") -cache = ContentAddressableCache(backend) -executor = CachedDAGExecutor(dag, cache) -``` - ---- - -## Best Practices - -1. **Use a TTL in production.** Without a TTL, stale cache entries from old pipeline versions can accumulate. A TTL of 24-48 hours is a reasonable default. - -2. **Set `max_size_bytes`.** Prevent the cache from consuming unbounded disk space. - -3. **Use `CacheKeyProtocol` for large objects.** If your nodes return multi-GB DataFrames, implement `__dagron_cache_key__()` to hash a fingerprint instead of the full data. - -4. **Monitor `hit_rate`.** A low hit rate suggests frequent code changes or non-deterministic task outputs. Check `CacheStats` after each run. - -5. **Share caches across CI runs.** Mount the cache directory as a persistent volume in your CI system to get cross-run cache hits. - ---- - -## Related - -- [API Reference: Caching](/api/execution/caching) -- full API documentation. -- [Incremental Execution](/guide/execution-strategies/incremental) -- in-memory dirty-set-based re-execution. -- [Tracing & Profiling](/guide/observability/tracing-profiling) -- visualizing cache hit/miss events. -- [Checkpointing](/guide/execution-strategies/checkpointing) -- saving and resuming execution state. diff --git a/docs/pages/guide/execution-strategies/checkpointing.mdx b/docs/pages/guide/execution-strategies/checkpointing.mdx deleted file mode 100644 index 844b0a7..0000000 --- a/docs/pages/guide/execution-strategies/checkpointing.mdx +++ /dev/null @@ -1,478 +0,0 @@ ---- -sidebar_position: 11 -title: Checkpointing -description: Save execution progress to disk and resume after failures — CheckpointExecutor for fault-tolerant, resumable DAG pipelines. ---- - -import DagDiagram from '@site/src/components/DagDiagram'; -import StatusBadge from '@site/src/components/StatusBadge'; - -# Checkpointing - -Long-running pipelines fail. Networks drop, machines reboot, dependencies crash. -Without checkpointing, a failure at step 95 of 100 means re-running all 95 -successful steps. dagron's [`CheckpointExecutor`](/api/execution/checkpoint) saves -progress to disk after each node completes, so you can resume from exactly where -you left off. - -## Concepts - -### Checkpoint directory - -The `CheckpointExecutor` writes checkpoint files to a directory you specify. Each -file records the status and result of a completed node. On resume, the executor -reads these files, skips already-completed nodes, and picks up from the first -incomplete node. - -### Execute / resume cycle - -1. **First run**: call `.execute(tasks)`. If everything succeeds, checkpoint - files are cleaned up automatically. -2. **Failure**: some nodes fail. Checkpoint files for completed nodes remain on - disk. -3. **Resume**: call `.resume(tasks)`. The executor reads the checkpoint, skips - completed nodes, and retries the failed and remaining nodes. - -### Checkpoint info - -The `.checkpoint_info()` method returns metadata about the current checkpoint -state: which nodes are completed, which failed, and what the last run time was. - -## Basic usage - -```python -import dagron -import time - -dag = ( - dagron.DAG.builder() - .add_edge("extract", "transform") - .add_edge("transform", "validate") - .add_edge("validate", "load") - .add_edge("load", "notify") - .build() -) -``` - - transform --> validate --> load --> notify`} - caption="Five-step pipeline. We will simulate a failure at the 'load' step." -/> - -### First run with a failure - -```python -call_count = 0 - -def flaky_load(): - """Simulates a task that fails on the first attempt.""" - global call_count - call_count += 1 - if call_count == 1: - raise ConnectionError("Database connection lost") - return "loaded 1000 rows" - -tasks = { - "extract": lambda: time.sleep(2) or "extracted 1000 rows", - "transform": lambda: time.sleep(1) or "transformed", - "validate": lambda: "all rows valid", - "load": flaky_load, - "notify": lambda: "email sent", -} - -executor = dagron.CheckpointExecutor(dag, checkpoint_dir="/tmp/pipeline_checkpoint") -result = executor.execute(tasks) - -print(result.succeeded) # 3 (extract, transform, validate) -print(result.failed) # 1 (load) -print(result.skipped) # 1 (notify) -``` - -After this run, the checkpoint directory contains files for the three completed -nodes: - -| Node | Status | Checkpointed? | -|------|--------|:------------:| -| extract | | Yes | -| transform | | Yes | -| validate | | Yes | -| load | | No | -| notify | | No | - -### Resume after fixing the issue - -```python -# The flaky_load function will succeed on the second call (call_count is now 1) -result = executor.resume(tasks) - -print(result.succeeded) # 2 (load, notify — the rest were restored from checkpoint) -print(result.failed) # 0 -``` - -| Node | Status | Source | -|------|--------|--------| -| extract | | Restored from checkpoint | -| transform | | Restored from checkpoint | -| validate | | Restored from checkpoint | -| load | | Re-executed | -| notify | | Executed | - - transform:::restored --> validate:::restored --> load:::rerun --> notify:::rerun`} - caption="On resume, extract/transform/validate are restored from checkpoint (blue). Only load and notify execute (green)." -/> - -The expensive `extract` (2 seconds) and `transform` (1 second) steps were not -re-run, saving 3 seconds on the resume. - -## Constructor - -```python -dagron.CheckpointExecutor( - dag, # The DAG to execute - checkpoint_dir, # Path to the checkpoint directory (str or Path) -) -``` - -The directory is created automatically if it does not exist. - -## API methods - -### execute(tasks) - -Runs the pipeline from scratch, checkpointing each completed node: - -```python -result = executor.execute(tasks) -``` - -If the entire pipeline succeeds, checkpoint files are cleaned up. If any node -fails, checkpoint files for completed nodes remain. - -### resume(tasks) - -Reads the checkpoint and resumes from the last incomplete node: - -```python -result = executor.resume(tasks) -``` - -Nodes that were previously completed are not re-executed — their results are -restored from the checkpoint. - -### checkpoint_info() - -Returns metadata about the current checkpoint: - -```python -info = executor.checkpoint_info() -print(info) -``` - -Output: - -```python -CheckpointInfo( - exists=True, - completed_nodes=['extract', 'transform', 'validate'], - failed_nodes=['load'], - skipped_nodes=['notify'], - last_run_time='2025-01-15T10:30:00', - total_checkpointed=3, -) -``` - -Use this to build monitoring dashboards or decide whether to resume or -start fresh. - -### clear_checkpoint() - -Remove all checkpoint files: - -```python -executor.clear_checkpoint() -info = executor.checkpoint_info() -print(info.exists) # False -``` - -This is useful when you want to force a full re-run. - -## Complete example: ETL with retry - -Here is a production-style pattern with automatic retry: - -```python -import dagron -import time -import logging - -logging.basicConfig(level=logging.INFO) -logger = logging.getLogger("pipeline") - -dag = ( - dagron.DAG.builder() - .add_nodes(["fetch_api", "fetch_db", "merge", "transform", - "validate", "load", "update_dashboard"]) - .add_edges([ - ("fetch_api", "merge"), - ("fetch_db", "merge"), - ("merge", "transform"), - ("transform", "validate"), - ("validate", "load"), - ("load", "update_dashboard"), - ]) - .build() -) - -tasks = { - "fetch_api": lambda: time.sleep(5) or {"api_rows": 10000}, - "fetch_db": lambda: time.sleep(3) or {"db_rows": 50000}, - "merge": lambda: {"total_rows": 60000}, - "transform": lambda: time.sleep(10) or {"transformed": 60000}, - "validate": lambda: {"valid": True, "bad_rows": 0}, - "load": lambda: time.sleep(2) or "loaded", - "update_dashboard": lambda: "dashboard updated", -} - -CHECKPOINT_DIR = "/var/data/pipeline_checkpoints/etl_daily" - -def run_with_retry(max_attempts=3): - executor = dagron.CheckpointExecutor(dag, checkpoint_dir=CHECKPOINT_DIR) - - # Check if there is an existing checkpoint to resume - info = executor.checkpoint_info() - if info.exists: - logger.info(f"Found checkpoint with {info.total_checkpointed} completed nodes") - logger.info(f"Resuming from last failure...") - result = executor.resume(tasks) - else: - logger.info("Starting fresh execution") - result = executor.execute(tasks) - - # Retry loop for transient failures - attempts = 1 - while result.failed > 0 and attempts < max_attempts: - attempts += 1 - logger.warning(f"Attempt {attempts}: {result.failed} nodes failed, retrying...") - time.sleep(5) # back off before retry - result = executor.resume(tasks) - - if result.failed > 0: - logger.error(f"Pipeline failed after {attempts} attempts") - # Leave checkpoint for manual inspection - else: - logger.info(f"Pipeline succeeded in {attempts} attempt(s)") - # Checkpoint auto-cleaned on full success - - return result - -result = run_with_retry() -``` - - merge - fetch_db --> merge - merge --> transform --> validate --> load --> update_dashboard`} - caption="ETL pipeline with automatic checkpoint-based retry." -/> - -## Checkpointing with parallel branches - -Checkpointing works correctly with fan-out / fan-in topologies. Each completed -node is checkpointed independently: - -```python -dag = ( - dagron.DAG.builder() - .add_edge("source", "branch_a") - .add_edge("source", "branch_b") - .add_edge("source", "branch_c") - .add_edge("branch_a", "join") - .add_edge("branch_b", "join") - .add_edge("branch_c", "join") - .build() -) -``` - -If `branch_b` fails but `source`, `branch_a`, and `branch_c` succeed, the -checkpoint stores all three. On resume, only `branch_b` and `join` need to -run. - -| Node | First run | Resume | -|------|-----------|--------| -| source | | Restored | -| branch_a | | Restored | -| branch_b | | Re-executed | -| branch_c | | Restored | -| join | | Executed | - - branch_a:::restored --> join:::rerun - source --> branch_b:::rerun --> join - source --> branch_c:::restored --> join`} - caption="On resume, only branch_b and join need to execute." -/> - -## Monitoring checkpoint state - -Build observability around checkpoint state: - -```python -def monitor_pipeline(executor): - info = executor.checkpoint_info() - - if not info.exists: - print("No checkpoint found — pipeline has not run or completed cleanly.") - return - - total = dag.node_count() - completed = info.total_checkpointed - pct = (completed / total) * 100 - - print(f"Pipeline progress: {completed}/{total} ({pct:.0f}%)") - print(f"Completed: {info.completed_nodes}") - print(f"Failed: {info.failed_nodes}") - print(f"Remaining: {total - completed - len(info.failed_nodes)}") - print(f"Last run: {info.last_run_time}") -``` - -### Integrating with alerting - -```python -import json - -def checkpoint_to_metrics(executor): - """Export checkpoint state as metrics for Prometheus/DataDog.""" - info = executor.checkpoint_info() - return { - "pipeline.checkpoint.exists": 1 if info.exists else 0, - "pipeline.checkpoint.completed": info.total_checkpointed, - "pipeline.checkpoint.failed": len(info.failed_nodes), - "pipeline.checkpoint.progress_pct": ( - info.total_checkpointed / dag.node_count() * 100 - if info.exists else 0 - ), - } -``` - -## Checkpoint directory structure - -The checkpoint directory contains one file per completed node plus a metadata -file: - -``` -/tmp/pipeline_checkpoint/ - _metadata.json # Run metadata (start time, DAG hash, etc.) - extract.checkpoint # Serialized NodeResult for 'extract' - transform.checkpoint # Serialized NodeResult for 'transform' - validate.checkpoint # Serialized NodeResult for 'validate' -``` - -:::caution -Do not modify checkpoint files manually. The executor validates file integrity -on resume and will reject tampered checkpoints. -::: - -## When to clear checkpoints - -Clear checkpoints when: - -- **The DAG structure changed.** Adding or removing nodes invalidates the - checkpoint. The executor detects this and raises an error on resume. -- **Task logic changed.** If you fixed a bug in a task that already - checkpointed successfully, you need to re-run it. -- **You want a clean start.** For periodic batch pipelines, clear the - checkpoint before each scheduled run. - -```python -# Before a scheduled daily run -executor.clear_checkpoint() -result = executor.execute(tasks) -``` - -## Combining with other features - -### Checkpointing + Incremental execution - -Use checkpointing for fault tolerance and -[incremental execution](/guide/execution-strategies/incremental) for change-based -optimization: - -```python -# First: use IncrementalExecutor to determine what needs to run -inc_executor = dagron.IncrementalExecutor(dag) -inc_result = inc_executor.execute(tasks, changed_nodes=["source"]) - -# Then: use CheckpointExecutor for fault tolerance on the re-execution -cp_executor = dagron.CheckpointExecutor(dag, checkpoint_dir="/tmp/cp") -cp_result = cp_executor.execute(tasks) -``` - -### Checkpointing + Tracing - -```python -executor = dagron.CheckpointExecutor(dag, checkpoint_dir="/tmp/cp") -result = executor.execute(tasks) - -# Note: tracing captures only the nodes that actually executed, -# not the ones restored from checkpoint. -``` - -### Checkpointing + Conditional execution - -Checkpoint files record the status of conditionally-skipped nodes, so resume -correctly handles conditional branches: - -```python -# A conditionally-skipped node is recorded as SKIPPED in the checkpoint -# and remains skipped on resume. -``` - -## Best practices - -1. **Use unique checkpoint directories.** For concurrent pipeline runs, use - unique directories (e.g., include a run ID or timestamp): - ```python - checkpoint_dir = f"/tmp/checkpoints/run_{run_id}" - ``` - -2. **Clean up old checkpoints.** Implement a retention policy to avoid - accumulating stale checkpoint directories. - -3. **Monitor checkpoint size.** Node results are serialized to disk. If - tasks return large objects (dataframes, models), checkpoint files can grow - large. Consider returning metadata references instead. - -4. **Handle DAG changes gracefully.** Before resuming, compare the current DAG - hash against the checkpoint metadata. If they differ, clear and re-run. - -5. **Test your resume path.** Deliberately inject failures in tests and verify - that resume produces the correct final results. - -6. **Set appropriate file permissions.** Checkpoint files may contain sensitive - results. Ensure the checkpoint directory has restrictive permissions. - -## API reference - -| Class / Method | Docs | -|----------------|------| -| `CheckpointExecutor` | [Checkpoint](/api/execution/checkpoint) | -| `CheckpointExecutor.execute()` | [Checkpoint](/api/execution/checkpoint) | -| `CheckpointExecutor.resume()` | [Checkpoint](/api/execution/checkpoint) | -| `CheckpointExecutor.checkpoint_info()` | [Checkpoint](/api/execution/checkpoint) | -| `CheckpointExecutor.clear_checkpoint()` | [Checkpoint](/api/execution/checkpoint) | - -## Next steps - -- [Incremental Execution](/guide/execution-strategies/incremental) — only re-run what changed. -- [Tracing & Profiling](/guide/observability/tracing-profiling) — analyze resume execution performance. -- [Conditional Execution](/guide/execution-strategies/conditional) — gate branches with runtime predicates. -- [Getting Started](/guide/getting-started) — back to the basics. diff --git a/docs/pages/guide/execution-strategies/conditional.mdx b/docs/pages/guide/execution-strategies/conditional.mdx deleted file mode 100644 index 4146369..0000000 --- a/docs/pages/guide/execution-strategies/conditional.mdx +++ /dev/null @@ -1,456 +0,0 @@ ---- -sidebar_position: 9 -title: Conditional Execution -description: Gate DAG branches with runtime predicates — skip nodes dynamically based on upstream results using ConditionalDAGBuilder and ConditionalExecutor. ---- - -import DagDiagram from '@site/src/components/DagDiagram'; -import StatusBadge from '@site/src/components/StatusBadge'; - -# Conditional Execution - -Not every branch in a pipeline should always run. Sometimes you want to skip -expensive model training if validation fails, route data to different processors -based on its type, or gate a deployment step on quality metrics. dagron's -**conditional execution** system lets you attach predicates to edges that -are evaluated at runtime, dynamically gating which branches execute. - -## Concepts - -### Conditional edges - -A **conditional edge** is an edge with an attached predicate — a Python function -that receives the upstream node's result and returns `True` (execute the -downstream node) or `False` (skip it). - -``` -condition: lambda result: result["score"] > 0.9 -``` - -When a conditional edge evaluates to `False`, the downstream node and all of -its descendants are . - -### ConditionalDAGBuilder - -The [`ConditionalDAGBuilder`](/api/execution/conditions) extends the standard builder -with support for condition predicates on edges. - -### ConditionalExecutor - -The [`ConditionalExecutor`](/api/execution/conditions) evaluates conditions at -runtime and only dispatches nodes whose incoming conditions are satisfied. - -## Building a conditional DAG - -```python -import dagron - -builder = dagron.ConditionalDAGBuilder() - -builder.add_node("validate") -builder.add_node("fast_path") -builder.add_node("slow_path") -builder.add_node("merge") - -# Unconditional edge -builder.add_edge("validate", "fast_path", condition=lambda r: r["size"] < 1000) -builder.add_edge("validate", "slow_path", condition=lambda r: r["size"] >= 1000) -builder.add_edge("fast_path", "merge") -builder.add_edge("slow_path", "merge") - -dag, conditions = builder.build() -``` - -The `.build()` method returns a tuple: the DAG and a dictionary of conditions -keyed by `(from_node, to_node)` tuples. - -|"size < 1000"| fast_path --> merge - validate -->|"size >= 1000"| slow_path --> merge`} - caption="Conditional branching. Only one path executes based on the validation result." -/> - -## Executing with conditions - -```python -tasks = { - "validate": lambda: {"size": 500, "valid": True}, - "fast_path": lambda: "processed quickly", - "slow_path": lambda: "processed with full pipeline", - "merge": lambda: "done", -} - -executor = dagron.ConditionalExecutor(dag, conditions) -result = executor.execute(tasks) - -# fast_path runs (size=500 < 1000), slow_path is skipped -print(result.node_results["fast_path"].status) # COMPLETED -print(result.node_results["slow_path"].status) # SKIPPED -print(result.node_results["merge"].status) # COMPLETED -``` - -| Node | Status | Why | -|------|--------|-----| -| validate | | Always runs (root) | -| fast_path | | Condition `size < 1000` is True | -| slow_path | | Condition `size >= 1000` is False | -| merge | | At least one predecessor completed | - -### What if the data is large? - -```python -tasks["validate"] = lambda: {"size": 5000, "valid": True} - -result = executor.execute(tasks) -print(result.node_results["fast_path"].status) # SKIPPED -print(result.node_results["slow_path"].status) # COMPLETED -``` - -|"size < 1000 ✗"| fast_path:::skipped - validate:::active -->|"size >= 1000 ✓"| slow_path:::active - fast_path --> merge:::active - slow_path --> merge`} - caption="With size=5000, slow_path executes and fast_path is skipped." -/> - -## Condition predicates - -Conditions are plain Python callables that receive the predecessor's return -value: - -```python -# Simple threshold -condition=lambda result: result > 0.9 - -# Dictionary access -condition=lambda result: result["status"] == "ok" - -# Complex logic -def should_retrain(result): - return ( - result["accuracy"] < 0.95 - or result["data_drift"] > 0.1 - or result["days_since_last_train"] > 7 - ) - -builder.add_edge("evaluate", "retrain", condition=should_retrain) -``` - -### Multi-input conditions - -When a node has multiple conditional predecessors, the executor evaluates each -incoming edge independently. The node runs if **at least one** incoming -conditional edge evaluates to `True`: - -```python -builder = dagron.ConditionalDAGBuilder() -builder.add_node("source_a") -builder.add_node("source_b") -builder.add_node("process") - -builder.add_edge("source_a", "process", condition=lambda r: r is not None) -builder.add_edge("source_b", "process", condition=lambda r: r is not None) - -dag, conditions = builder.build() - -tasks = { - "source_a": lambda: None, # condition False - "source_b": lambda: "data", # condition True - "process": lambda: "processed", -} - -result = dagron.ConditionalExecutor(dag, conditions).execute(tasks) -print(result.node_results["process"].status) # COMPLETED (source_b passed) -``` - -## Real-world example: ML pipeline with quality gates - -A common pattern is gating deployment on model quality: - -```python -import dagron - -builder = dagron.ConditionalDAGBuilder() - -# Pipeline stages -builder.add_node("load_data") -builder.add_node("train") -builder.add_node("evaluate") -builder.add_node("deploy_prod") -builder.add_node("deploy_staging") -builder.add_node("alert_team") - -# Edges with conditions -builder.add_edge("load_data", "train") -builder.add_edge("train", "evaluate") - -# Gate: deploy to prod only if accuracy >= 0.95 -builder.add_edge("evaluate", "deploy_prod", - condition=lambda r: r["accuracy"] >= 0.95) - -# Gate: deploy to staging if accuracy between 0.85 and 0.95 -builder.add_edge("evaluate", "deploy_staging", - condition=lambda r: 0.85 <= r["accuracy"] < 0.95) - -# Gate: alert team if accuracy < 0.85 -builder.add_edge("evaluate", "alert_team", - condition=lambda r: r["accuracy"] < 0.85) - -dag, conditions = builder.build() -``` - - train --> evaluate - evaluate -->|"acc >= 0.95"| deploy_prod - evaluate -->|"0.85 <= acc < 0.95"| deploy_staging - evaluate -->|"acc < 0.85"| alert_team`} - caption="ML pipeline with three quality gates. Exactly one downstream path activates." -/> - -```python -# Scenario 1: Great model -tasks = { - "load_data": lambda: "loaded", - "train": lambda: "trained", - "evaluate": lambda: {"accuracy": 0.97, "f1": 0.96}, - "deploy_prod": lambda: "deployed to production!", - "deploy_staging": lambda: "deployed to staging", - "alert_team": lambda: "alert sent", -} - -result = dagron.ConditionalExecutor(dag, conditions).execute(tasks) -print(result.node_results["deploy_prod"].status) # COMPLETED -print(result.node_results["deploy_staging"].status) # SKIPPED -print(result.node_results["alert_team"].status) # SKIPPED -``` - -```python -# Scenario 2: Mediocre model -tasks["evaluate"] = lambda: {"accuracy": 0.90, "f1": 0.88} -result = dagron.ConditionalExecutor(dag, conditions).execute(tasks) -print(result.node_results["deploy_prod"].status) # SKIPPED -print(result.node_results["deploy_staging"].status) # COMPLETED -print(result.node_results["alert_team"].status) # SKIPPED -``` - -```python -# Scenario 3: Bad model -tasks["evaluate"] = lambda: {"accuracy": 0.70, "f1": 0.65} -result = dagron.ConditionalExecutor(dag, conditions).execute(tasks) -print(result.node_results["deploy_prod"].status) # SKIPPED -print(result.node_results["deploy_staging"].status) # SKIPPED -print(result.node_results["alert_team"].status) # COMPLETED -``` - -## Conditional chains - -Conditions propagate through the graph. If a conditional edge skips a node, -that node's descendants are also skipped: - -```python -builder = dagron.ConditionalDAGBuilder() -builder.add_node("check") -builder.add_node("step_1") -builder.add_node("step_2") -builder.add_node("step_3") - -builder.add_edge("check", "step_1", condition=lambda r: r["go"]) -builder.add_edge("step_1", "step_2") # unconditional -builder.add_edge("step_2", "step_3") # unconditional - -dag, conditions = builder.build() - -tasks = { - "check": lambda: {"go": False}, - "step_1": lambda: "1", - "step_2": lambda: "2", - "step_3": lambda: "3", -} - -result = dagron.ConditionalExecutor(dag, conditions).execute(tasks) -``` - -| Node | Status | Why | -|------|--------|-----| -| check | | Root node | -| step_1 | | Condition is False | -| step_2 | | Predecessor skipped | -| step_3 | | Predecessor skipped | - -## Mixing conditional and unconditional edges - -You can freely mix conditional and unconditional edges in the same graph: - -```python -builder = dagron.ConditionalDAGBuilder() - -builder.add_node("extract") -builder.add_node("validate") -builder.add_node("transform") -builder.add_node("quarantine") -builder.add_node("load") - -# Unconditional: extract -> validate -builder.add_edge("extract", "validate") - -# Conditional: validate -> transform (if valid) -builder.add_edge("validate", "transform", - condition=lambda r: r["valid"]) - -# Conditional: validate -> quarantine (if invalid) -builder.add_edge("validate", "quarantine", - condition=lambda r: not r["valid"]) - -# Unconditional: transform -> load -builder.add_edge("transform", "load") - -dag, conditions = builder.build() -``` - - validate - validate -->|valid| transform --> load - validate -->|invalid| quarantine`} - caption="Mixed conditional and unconditional edges. Invalid data is quarantined." -/> - -## Combining conditions with fail-fast - -When `fail_fast=True` (the default in the underlying executor), a failure in -any executed node skips its descendants — this combines naturally with -conditional skipping: - -```python -tasks = { - "extract": lambda: "data", - "validate": lambda: {"valid": True}, - "transform": lambda: (_ for _ in ()).throw(RuntimeError("transform error")), - "quarantine": lambda: "quarantined", - "load": lambda: "loaded", -} - -result = dagron.ConditionalExecutor(dag, conditions).execute(tasks) -``` - -| Node | Status | Why | -|------|--------|-----| -| extract | | Root | -| validate | | Unconditional | -| transform | | Raised exception | -| quarantine | | Condition False (valid=True) | -| load | | Predecessor failed | - -## Debugging conditions - -To understand why a node was skipped, inspect the conditions dictionary: - -```python -dag, conditions = builder.build() - -# List all conditional edges -for (src, dst), predicate in conditions.items(): - print(f"{src} -> {dst}: {predicate}") -``` - -You can also test conditions in isolation: - -```python -validate_result = {"valid": True, "size": 500} - -for (src, dst), predicate in conditions.items(): - if src == "validate": - print(f"validate -> {dst}: {predicate(validate_result)}") -``` - -## Example: data routing pipeline - -Route records to different processors based on their type: - -```python -import dagron - -builder = dagron.ConditionalDAGBuilder() - -builder.add_node("classify") -builder.add_node("process_text") -builder.add_node("process_image") -builder.add_node("process_video") -builder.add_node("store") - -builder.add_edge("classify", "process_text", - condition=lambda r: r["type"] == "text") -builder.add_edge("classify", "process_image", - condition=lambda r: r["type"] == "image") -builder.add_edge("classify", "process_video", - condition=lambda r: r["type"] == "video") - -builder.add_edge("process_text", "store") -builder.add_edge("process_image", "store") -builder.add_edge("process_video", "store") - -dag, conditions = builder.build() - -tasks = { - "classify": lambda: {"type": "image", "data": b"..."}, - "process_text": lambda: "text processed", - "process_image": lambda: "image processed", - "process_video": lambda: "video processed", - "store": lambda: "stored", -} - -result = dagron.ConditionalExecutor(dag, conditions).execute(tasks) -print(result.node_results["process_image"].status) # COMPLETED -print(result.node_results["process_text"].status) # SKIPPED -print(result.node_results["process_video"].status) # SKIPPED -``` - -|text| process_text --> store - classify -->|image| process_image --> store - classify -->|video| process_video --> store`} - caption="Data routing. Only the matching processor runs based on the classification result." -/> - -## Best practices - -1. **Keep conditions pure.** Conditions should only examine the input result, - not produce side effects or access external state. - -2. **Handle None results.** If a predecessor might return `None`, guard against - it in your condition: `condition=lambda r: r is not None and r["ok"]`. - -3. **Use descriptive function names.** Named functions are easier to debug than - lambdas: - ```python - def is_high_quality(result): - return result["accuracy"] >= 0.95 - - builder.add_edge("evaluate", "deploy", condition=is_high_quality) - ``` - -4. **Test conditions independently.** Unit-test your predicate functions with - various inputs before wiring them into the DAG. - -5. **Combine with tracing.** Enable tracing to see which conditions fired and - which branches were taken. See [Tracing & Profiling](/guide/observability/tracing-profiling). - -## API reference - -| Class / Method | Docs | -|----------------|------| -| `ConditionalDAGBuilder` | [Conditions](/api/execution/conditions) | -| `ConditionalExecutor` | [Conditions](/api/execution/conditions) | - -## Next steps - -- [Dynamic DAGs](/guide/execution-strategies/dynamic-dags) — expand the graph at runtime based on node results. -- [Incremental Execution](/guide/execution-strategies/incremental) — combine conditions with incremental recomputation. -- [Checkpointing](/guide/execution-strategies/checkpointing) — resume conditional pipelines after failures. diff --git a/docs/pages/guide/execution-strategies/distributed.mdx b/docs/pages/guide/execution-strategies/distributed.mdx deleted file mode 100644 index 8135c31..0000000 --- a/docs/pages/guide/execution-strategies/distributed.mdx +++ /dev/null @@ -1,561 +0,0 @@ ---- -sidebar_position: 19 -title: Distributed Execution -description: Execute DAG nodes across threads, processes, Ray clusters, and Celery workers with pluggable backends. ---- - -import DagDiagram from '@site/src/components/DagDiagram'; -import StatusBadge from '@site/src/components/StatusBadge'; - -# Distributed Execution - -dagron's distributed execution system lets you run DAG nodes across different concurrency and distribution primitives -- from local thread pools to Ray clusters and Celery workers -- using a single, unified API. A pluggable `DistributedBackend` protocol abstracts away the transport, so you can switch from threads to Ray by changing one line. - -For large DAGs, the `PartitionedDAGExecutor` splits the graph into partitions and executes each partition as a unit, minimizing cross-partition communication. - - B - end - subgraph "Partition 2 (Worker B)" - C["feature_eng"] - D["train"] - C --> D - end - subgraph "Partition 3 (Worker C)" - E["evaluate"] - F["deploy"] - E --> F - end - B --> C - D --> E - style A fill:#e3f2fd,stroke:#1565c0 - style B fill:#e3f2fd,stroke:#1565c0 - style C fill:#fff3e0,stroke:#e65100 - style D fill:#fff3e0,stroke:#e65100 - style E fill:#e8f5e9,stroke:#2e7d32 - style F fill:#e8f5e9,stroke:#2e7d32`} - caption="A DAG split into 3 partitions. Each partition runs on a different worker." -/> - ---- - -## Architecture Overview - -There are two main approaches to distributed execution: - -| Executor | Approach | Best for | -|---|---|---| -| [`DistributedExecutor`](/api/execution/distributed#distributedexecutor) | Dispatches individual nodes to a backend by topological level. | Fine-grained distribution where each node runs independently. | -| [`PartitionedDAGExecutor`](/api/execution/distributed#partitioneddagexecutor) | Splits the DAG into k partitions, executes each partition as a sub-DAG. | Coarse-grained distribution that minimizes serialization overhead. | - -Both use the `DistributedBackend` protocol for the actual task dispatch. - ---- - -## DistributedBackend Protocol - -All backends implement three methods: - -```python -class DistributedBackend(Protocol): - @property - def name(self) -> str: ... - - def submit(self, fn, *args, **kwargs) -> Any: - """Submit a callable for execution. Returns a future.""" - ... - - def result(self, future, timeout=None) -> Any: - """Retrieve the result of a submitted task.""" - ... - - def shutdown(self, wait=True) -> None: - """Shut down the backend and release resources.""" - ... -``` - -dagron ships with four backends: - -| Backend | Module | Use case | -|---|---|---| -| `ThreadBackend` | `dagron.execution.backends.thread` | I/O-bound tasks, testing, development. | -| `MultiprocessingBackend` | `dagron.execution.backends.multiprocessing` | CPU-bound tasks on a single machine. | -| `RayBackend` | `dagron.execution.backends.ray` | Multi-machine clusters. Requires `pip install dagron[ray]`. | -| `CeleryBackend` | `dagron.execution.backends.celery` | Existing Celery infrastructure. Requires `pip install dagron[celery]`. | - ---- - -## ThreadBackend - -The simplest backend, using Python's `ThreadPoolExecutor`. Good for I/O-bound workloads (API calls, database queries, file downloads): - -```python -import dagron -from dagron.execution.distributed_executor import DistributedExecutor -from dagron.execution.backends.thread import ThreadBackend - -dag = ( - dagron.DAG.builder() - .add_node("fetch_users") - .add_node("fetch_orders") - .add_node("join") - .add_edge("fetch_users", "join") - .add_edge("fetch_orders", "join") - .build() -) - -backend = ThreadBackend(max_workers=8) - -with DistributedExecutor(dag, backend) as executor: - result = executor.execute({ - "fetch_users": lambda: fetch_from_api("/users"), - "fetch_orders": lambda: fetch_from_api("/orders"), - "join": lambda: merge_data(), - }) - -print(f"Backend: {result.backend_name}") # "thread" -print(f"Succeeded: {result.execution_result.succeeded}") -``` - -The `with` statement ensures `backend.shutdown()` is called when execution completes. - ---- - -## MultiprocessingBackend - -Bypasses the GIL for CPU-bound workloads by dispatching tasks to separate processes: - -```python -from dagron.execution.backends.multiprocessing import MultiprocessingBackend - -backend = MultiprocessingBackend(max_workers=4) - -with DistributedExecutor(dag, backend) as executor: - result = executor.execute(tasks) -``` - -:::caution -Tasks must be **picklable** when using `MultiprocessingBackend`. Lambda functions and closures cannot be pickled. Use module-level functions instead. -::: - -```python -# This works: -def compute_features(): - return heavy_computation() - -tasks = {"features": compute_features} - -# This does NOT work with multiprocessing: -tasks = {"features": lambda: heavy_computation()} -``` - ---- - -## RayBackend - -Distribute tasks across a Ray cluster for true multi-machine parallelism: - -```python -from dagron.execution.backends.ray import RayBackend - -# Initialize Ray (or connect to an existing cluster) -backend = RayBackend(num_cpus=16) - -with DistributedExecutor(dag, backend, node_timeout=300) as executor: - result = executor.execute(tasks) -``` - -Ray must be installed separately: - -```bash -pip install dagron[ray] -``` - -If Ray is already initialized (e.g., you called `ray.init()` elsewhere), `RayBackend` detects this and reuses the existing session. - -### Ray Cluster Example - -```python -import ray -from dagron.execution.backends.ray import RayBackend - -# Connect to a remote cluster -ray.init(address="ray://cluster-head:10001") - -backend = RayBackend() # uses the existing Ray session - -with DistributedExecutor(dag, backend) as executor: - result = executor.execute({ - "train_model_a": lambda: train_on_gpu("model_a"), - "train_model_b": lambda: train_on_gpu("model_b"), - "ensemble": lambda: combine_models(), - }) -``` - ---- - -## CeleryBackend - -Integrate with existing Celery infrastructure for message-broker-based distribution: - -```python -from celery import Celery -from dagron.execution.backends.celery import CeleryBackend - -app = Celery("dagron_tasks", broker="redis://localhost:6379") - -backend = CeleryBackend(app=app, queue="dagron") - -with DistributedExecutor(dag, backend) as executor: - result = executor.execute(tasks) -``` - -Celery must be installed separately: - -```bash -pip install dagron[celery] -``` - -The `queue` parameter routes all dagron tasks to a specific Celery queue, keeping them separate from your other Celery tasks. - ---- - -## DistributedExecutor - -The `DistributedExecutor` dispatches nodes **by topological level**. All nodes in a level are submitted to the backend concurrently, and results are collected before advancing to the next level. - -```python -from dagron.execution.distributed_executor import DistributedExecutor - -executor = DistributedExecutor( - dag, - backend=backend, - fail_fast=True, # skip downstream on failure - enable_tracing=True, # record execution trace - node_timeout=60.0, # per-node timeout in seconds -) - -result = executor.execute(tasks) -``` - -### DistributedExecutionResult - -The result contains the standard `ExecutionResult` plus distributed metadata: - -```python -result = executor.execute(tasks) - -# Standard execution stats -er = result.execution_result -print(f"Succeeded: {er.succeeded}, Failed: {er.failed}") -print(f"Total time: {er.total_duration_seconds:.1f}s") - -# Distributed metadata -print(f"Backend: {result.backend_name}") -print(f"Dispatch info: {result.dispatch_info}") -# e.g. {"fetch_users": {"backend": "ray"}, ...} -``` - -### Context Manager - -`DistributedExecutor` supports context-manager usage for automatic cleanup: - -```python -with DistributedExecutor(dag, backend) as executor: - result = executor.execute(tasks) -# backend.shutdown(wait=True) is called automatically -``` - -### Node Timeout - -Set `node_timeout` to fail nodes that take too long: - -```python -executor = DistributedExecutor(dag, backend, node_timeout=30.0) -result = executor.execute(tasks) - -# Check for timed-out nodes -print(f"Timed out: {result.execution_result.timed_out}") -``` - -Timed-out nodes are treated as failures and trigger fail-fast behavior for downstream nodes. - ---- - -## PartitionedDAGExecutor - -For large DAGs, dispatching every node individually to a remote backend can create excessive serialization overhead. The `PartitionedDAGExecutor` solves this by splitting the DAG into **k partitions** and executing each partition as a sub-DAG: - -```python -from dagron.execution.distributed import PartitionedDAGExecutor - -executor = PartitionedDAGExecutor( - dag, - k=4, # target number of partitions - strategy="level_based", # partitioning strategy - max_workers=8, # workers per partition - fail_fast=True, -) - -result = executor.execute(tasks) -``` - -### Partitioning Strategies - -| Strategy | Description | Best for | -|---|---|---| -| `"level_based"` | Assigns nodes to partitions based on their topological level. | Balanced, predictable partitions. | -| `"balanced"` | Distributes nodes to minimize the maximum partition cost. | Cost-aware balancing when node costs vary widely. | -| `"communication_min"` | Minimizes cross-partition edges (Kernighan-Lin style). | Minimizing serialization overhead between partitions. | - -### Level-Based Partitioning - -Groups nodes by topological level and distributes levels across k partitions: - -```python -executor = PartitionedDAGExecutor(dag, k=3, strategy="level_based") -``` - - C - A --> D - B --> D - C --> E - D --> E`} - caption="Level-based partitioning: each level maps to a partition." -/> - -### Balanced Partitioning - -When nodes have very different execution costs, use balanced partitioning: - -```python -costs = { - "extract": 5.0, - "heavy_transform": 120.0, - "light_transform": 2.0, - "load": 10.0, -} - -executor = PartitionedDAGExecutor( - dag, - k=2, - strategy="balanced", - costs=costs, -) -``` - -### Communication-Minimizing Partitioning - -Minimizes the number of edges that cross partition boundaries: - -```python -executor = PartitionedDAGExecutor( - dag, - k=3, - strategy="communication_min", - max_iterations=20, # Kernighan-Lin iterations - max_imbalance=0.3, # allow 30% size imbalance -) -``` - -The `max_imbalance` parameter controls the trade-off between partition balance and communication minimization. A value of `0.0` requires perfectly balanced partitions; `0.3` allows 30% deviation. - ---- - -## Choosing Between Executors - -| Scenario | Recommended Executor | -|---|---| -| Small DAG, I/O-bound tasks | `DistributedExecutor` + `ThreadBackend` | -| Small DAG, CPU-bound tasks | `DistributedExecutor` + `MultiprocessingBackend` | -| Large DAG, multi-machine cluster | `PartitionedDAGExecutor` with `"communication_min"` | -| Existing Celery infrastructure | `DistributedExecutor` + `CeleryBackend` | -| GPU cluster | `DistributedExecutor` + `RayBackend` | - ---- - -## Writing a Custom Backend - -Implement the `DistributedBackend` protocol to integrate with any execution system: - -```python -from dagron.execution.backends.base import DistributedBackend - -class DaskBackend: - """Example backend using Dask distributed.""" - - def __init__(self, scheduler_address: str): - from dask.distributed import Client - self._client = Client(scheduler_address) - - @property - def name(self) -> str: - return "dask" - - def submit(self, fn, *args, **kwargs): - return self._client.submit(fn, *args, **kwargs) - - def result(self, future, timeout=None): - return future.result(timeout=timeout) - - def shutdown(self, wait=True): - self._client.close() - -# Usage -backend = DaskBackend("tcp://scheduler:8786") -executor = DistributedExecutor(dag, backend) -``` - ---- - -## Combining with Other Features - -### Distributed + Tracing - -Enable tracing to see per-node timing across distributed workers: - -```python -executor = DistributedExecutor(dag, backend, enable_tracing=True) -result = executor.execute(tasks) - -trace = result.execution_result.trace -if trace: - trace.to_chrome_json("distributed_trace.json") -``` - -### Distributed + Fail-Fast - -```python -executor = DistributedExecutor(dag, backend, fail_fast=True) -``` - -When a node fails, all downstream nodes are skipped, even across different topological levels. - -### Partitioned + Cost Estimates - -Provide cost estimates for better partitioning: - -```python -costs = {node: estimate_cost(node) for node in dag.node_names()} - -executor = PartitionedDAGExecutor( - dag, - k=4, - strategy="balanced", - costs=costs, -) -``` - ---- - -## Complete Example: Ray Cluster Training - -```python -import dagron -from dagron.execution.distributed_executor import DistributedExecutor -from dagron.execution.backends.ray import RayBackend - -# Build a training pipeline -dag = ( - dagron.DAG.builder() - .add_node("load_data") - .add_node("preprocess") - .add_node("train_model_a") - .add_node("train_model_b") - .add_node("train_model_c") - .add_node("ensemble") - .add_node("evaluate") - .add_edge("load_data", "preprocess") - .add_edge("preprocess", "train_model_a") - .add_edge("preprocess", "train_model_b") - .add_edge("preprocess", "train_model_c") - .add_edge("train_model_a", "ensemble") - .add_edge("train_model_b", "ensemble") - .add_edge("train_model_c", "ensemble") - .add_edge("ensemble", "evaluate") - .build() -) - -def load_data(): - return load_dataset("imagenet") - -def preprocess(): - return normalize_images() - -def train_model_a(): - return train("resnet50", epochs=10) - -def train_model_b(): - return train("vgg16", epochs=10) - -def train_model_c(): - return train("efficientnet", epochs=10) - -def ensemble(): - return combine_predictions() - -def evaluate(): - return compute_metrics() - -tasks = { - "load_data": load_data, - "preprocess": preprocess, - "train_model_a": train_model_a, - "train_model_b": train_model_b, - "train_model_c": train_model_c, - "ensemble": ensemble, - "evaluate": evaluate, -} - -# Dispatch to Ray -- models train in parallel on different machines -backend = RayBackend(num_cpus=32) - -with DistributedExecutor(dag, backend, node_timeout=3600) as executor: - result = executor.execute(tasks) - -er = result.execution_result -print(f"Succeeded: {er.succeeded}/{er.succeeded + er.failed}") -print(f"Total time: {er.total_duration_seconds:.0f}s") -``` - ---- - -## Best Practices - -1. **Start with `ThreadBackend` for development.** Switch to `RayBackend` or `CeleryBackend` for production. - -2. **Use `PartitionedDAGExecutor` for large DAGs.** When your DAG has hundreds of nodes, per-node dispatch overhead adds up. Partitioning reduces it. - -3. **Provide cost estimates.** The `balanced` and `communication_min` strategies produce much better partitions when they know how long each node takes. - -4. **Set `node_timeout`.** Prevent runaway tasks from blocking the entire pipeline. - -5. **Use the context manager.** Always use `with DistributedExecutor(...) as executor:` to ensure proper cleanup. - -6. **Avoid lambdas with multiprocessing.** Module-level functions are required for pickling. - ---- - -## Related - -- [API Reference: Distributed](/api/execution/distributed) -- full API documentation. -- [Executing Tasks](/guide/core-concepts/executing-tasks) -- the standard single-machine executor. -- [Resource Scheduling](/guide/execution-strategies/resource-scheduling) -- GPU/CPU/memory-aware scheduling. -- [Tracing & Profiling](/guide/observability/tracing-profiling) -- visualizing distributed execution traces. diff --git a/docs/pages/guide/execution-strategies/dynamic-dags.mdx b/docs/pages/guide/execution-strategies/dynamic-dags.mdx deleted file mode 100644 index 9ab3863..0000000 --- a/docs/pages/guide/execution-strategies/dynamic-dags.mdx +++ /dev/null @@ -1,475 +0,0 @@ ---- -sidebar_position: 10 -title: Dynamic DAGs -description: Expand the DAG at runtime based on node results — add or remove nodes dynamically with DynamicExecutor, expanders, and DynamicModification. ---- - -import DagDiagram from '@site/src/components/DagDiagram'; -import StatusBadge from '@site/src/components/StatusBadge'; - -# Dynamic DAGs - -Sometimes you cannot know the full shape of your pipeline until execution is -underway. A data-discovery step might reveal 50 tables that each need their own -processing branch. A model-selection step might choose between three architectures. -A file scanner might find a variable number of inputs. - -dagron's **dynamic execution** system lets you define **expander functions** that -modify the DAG at runtime — adding new nodes, removing nodes, or rewiring edges -based on the results of upstream tasks. - -## Concepts - -### Expanders - -An **expander** is a Python function associated with a specific node. After that -node completes, the executor calls the expander with the node's name and result. -The expander returns a [`DynamicModification`](/api/execution/dynamic) describing -what to change. - -### DynamicModification - -A `DynamicModification` is a data class with two fields: - -| Field | Type | Description | -|-------|------|-------------| -| `add_nodes` | `list[DynamicNodeSpec]` | Nodes to add to the DAG | -| `remove_nodes` | `list[str]` | Nodes to remove from the DAG | - -### DynamicNodeSpec - -Each new node is described by a `DynamicNodeSpec`: - -| Field | Type | Description | -|-------|------|-------------| -| `name` | `str` | The new node's name | -| `task` | `Callable` | The task function to execute | -| `dependencies` | `list[str]` | Nodes this new node depends on | -| `dependents` | `list[str]` | Nodes that depend on this new node | - -### DynamicExecutor - -The [`DynamicExecutor`](/api/execution/dynamic) wraps the standard executor and -applies modifications between scheduling rounds. - -## Basic example - -Let us build a pipeline where a discovery step determines how many files to -process: - -```python -import dagron - -# Initial DAG with a discovery node and a merge node -dag = ( - dagron.DAG.builder() - .add_node("discover") - .add_node("merge") - .add_edge("discover", "merge") - .build() -) -``` - - merge`} - caption="Initial DAG before expansion. The discover node will spawn dynamic children." -/> - -### Define the expander - -```python -def discover_expander(name, result): - """After 'discover' runs, add one processing node per file.""" - files = result # e.g., ["a.csv", "b.csv", "c.csv"] - - new_nodes = [] - for filename in files: - node_name = f"process_{filename.replace('.', '_')}" - new_nodes.append( - dagron.DynamicNodeSpec( - name=node_name, - task=lambda fn=filename: f"processed {fn}", - dependencies=["discover"], - dependents=["merge"], - ) - ) - - return dagron.DynamicModification( - add_nodes=new_nodes, - remove_nodes=[], - ) -``` - -### Execute - -```python -tasks = { - "discover": lambda: ["a.csv", "b.csv", "c.csv"], - "merge": lambda: "all files merged", -} - -expanders = { - "discover": discover_expander, -} - -executor = dagron.DynamicExecutor(dag, expanders=expanders, max_workers=4) -result = executor.execute(tasks) - -print(result.succeeded) # 5 (discover + 3 process nodes + merge) -print(list(result.node_results.keys())) -# ['discover', 'process_a_csv', 'process_b_csv', 'process_c_csv', 'merge'] -``` - -After `discover` completes, the expander fires and adds three new nodes. The -executor then schedules them in parallel, and finally runs `merge`: - - process_a_csv --> merge - discover --> process_b_csv --> merge - discover --> process_c_csv --> merge`} - caption="DAG after dynamic expansion. Three processing nodes were added at runtime." -/> - -## DynamicModification in detail - -### Adding nodes - -```python -dagron.DynamicModification( - add_nodes=[ - dagron.DynamicNodeSpec( - name="new_node", - task=lambda: "hello", - dependencies=["existing_parent"], - dependents=["existing_child"], - ), - ], - remove_nodes=[], -) -``` - -The `dependencies` field creates edges FROM those nodes TO the new node. -The `dependents` field creates edges FROM the new node TO those nodes. - -### Removing nodes - -```python -dagron.DynamicModification( - add_nodes=[], - remove_nodes=["obsolete_node"], -) -``` - -Removing a node also removes all its edges. Be careful not to remove a node -that has already been scheduled or completed. - -### Combined add and remove - -```python -def replace_placeholder(name, result): - """Replace a placeholder node with specific implementations.""" - return dagron.DynamicModification( - add_nodes=[ - dagron.DynamicNodeSpec( - name="specific_impl_a", - task=lambda: "impl a", - dependencies=["upstream"], - dependents=["downstream"], - ), - dagron.DynamicNodeSpec( - name="specific_impl_b", - task=lambda: "impl b", - dependencies=["upstream"], - dependents=["downstream"], - ), - ], - remove_nodes=["placeholder"], - ) -``` - -## Real-world example: dynamic ETL pipeline - -A common scenario is an ETL pipeline that discovers database tables at runtime -and creates a processing branch for each: - -```python -import dagron -import time - -# Initial DAG -dag = ( - dagron.DAG.builder() - .add_node("discover_tables") - .add_node("aggregate") - .add_node("publish") - .add_edge("discover_tables", "aggregate") - .add_edge("aggregate", "publish") - .build() -) - -def discover_tables(): - """Simulate querying a database catalog.""" - time.sleep(0.1) - return ["users", "orders", "products", "reviews"] - -def aggregate(): - return "aggregated all tables" - -def publish(): - return "published to data warehouse" - -def make_table_processor(table_name): - """Factory function for table-specific processors.""" - def process(): - time.sleep(0.2) # simulate processing - return f"processed {table_name}: 1000 rows" - return process - -def table_expander(name, result): - """Create one processing node per discovered table.""" - tables = result - nodes = [] - for table in tables: - nodes.append( - dagron.DynamicNodeSpec( - name=f"process_{table}", - task=make_table_processor(table), - dependencies=["discover_tables"], - dependents=["aggregate"], - ) - ) - return dagron.DynamicModification(add_nodes=nodes, remove_nodes=[]) - -tasks = { - "discover_tables": discover_tables, - "aggregate": aggregate, - "publish": publish, -} - -executor = dagron.DynamicExecutor( - dag, - expanders={"discover_tables": table_expander}, - max_workers=4, -) -result = executor.execute(tasks) - -print(f"Executed {result.succeeded} tasks") -# Executed 7 tasks - -for name, nr in result.node_results.items(): - print(f" {name}: {nr.status.name} ({nr.duration_seconds:.3f}s)") -``` - - process_users --> aggregate - discover_tables --> process_orders --> aggregate - discover_tables --> process_products --> aggregate - discover_tables --> process_reviews --> aggregate - aggregate --> publish`} - caption="After expansion: four table-processing nodes run in parallel." -/> - -## Chained expansion - -Expanders can trigger further expansions. If a dynamically-added node also has -an expander, it fires after that node completes: - -```python -dag = ( - dagron.DAG.builder() - .add_node("level_0") - .add_node("final") - .add_edge("level_0", "final") - .build() -) - -def level_0_expander(name, result): - return dagron.DynamicModification( - add_nodes=[ - dagron.DynamicNodeSpec( - name="level_1", - task=lambda: ["sub_a", "sub_b"], - dependencies=["level_0"], - dependents=["final"], - ), - ], - remove_nodes=[], - ) - -def level_1_expander(name, result): - nodes = [] - for sub in result: - nodes.append( - dagron.DynamicNodeSpec( - name=f"level_2_{sub}", - task=lambda s=sub: f"processed {s}", - dependencies=["level_1"], - dependents=["final"], - ) - ) - return dagron.DynamicModification(add_nodes=nodes, remove_nodes=[]) - -tasks = { - "level_0": lambda: "started", - "final": lambda: "done", -} - -expanders = { - "level_0": level_0_expander, - "level_1": level_1_expander, -} - -executor = dagron.DynamicExecutor(dag, expanders=expanders) -result = executor.execute(tasks) - -print(list(result.node_results.keys())) -# ['level_0', 'level_1', 'level_2_sub_a', 'level_2_sub_b', 'final'] -``` - - level_1 - level_1 --> level_2_sub_a --> final - level_1 --> level_2_sub_b --> final`} - caption="Two-level chained expansion. level_0 spawns level_1, which spawns level_2 nodes." -/> - -## Model selection example - -Use dynamic expansion to choose a model architecture at runtime: - -```python -import dagron - -dag = ( - dagron.DAG.builder() - .add_edge("prepare_data", "select_model") - .add_edge("select_model", "evaluate") - .build() -) - -def model_selector_expander(name, result): - """Based on data characteristics, pick the right model.""" - data_size = result["rows"] - - if data_size < 1000: - model = dagron.DynamicNodeSpec( - name="train_linear", - task=lambda: {"model": "linear", "accuracy": 0.85}, - dependencies=["select_model"], - dependents=["evaluate"], - ) - elif data_size < 100000: - model = dagron.DynamicNodeSpec( - name="train_xgboost", - task=lambda: {"model": "xgboost", "accuracy": 0.92}, - dependencies=["select_model"], - dependents=["evaluate"], - ) - else: - model = dagron.DynamicNodeSpec( - name="train_neural_net", - task=lambda: {"model": "nn", "accuracy": 0.96}, - dependencies=["select_model"], - dependents=["evaluate"], - ) - - return dagron.DynamicModification(add_nodes=[model], remove_nodes=[]) - -tasks = { - "prepare_data": lambda: {"rows": 50000, "features": 20}, - "select_model": lambda: {"rows": 50000}, - "evaluate": lambda: "evaluated", -} - -executor = dagron.DynamicExecutor( - dag, - expanders={"select_model": model_selector_expander}, -) -result = executor.execute(tasks) - -# With 50000 rows, xgboost was selected -print("train_xgboost" in result.node_results) # True -print(result.node_results["train_xgboost"].result) -# {'model': 'xgboost', 'accuracy': 0.92} -``` - - select_model --> train_xgboost --> evaluate`} - caption="Dynamic model selection. XGBoost was chosen based on the data size." -/> - -## Error handling in expanders - -If an expander raises an exception, the node that triggered it is marked as - and its descendants are skipped: - -```python -def bad_expander(name, result): - raise RuntimeError("Expansion failed!") - -expanders = {"discover": bad_expander} -executor = dagron.DynamicExecutor(dag, expanders=expanders) -result = executor.execute(tasks) - -print(result.node_results["discover"].status) # FAILED -``` - -To handle expansion errors gracefully, wrap your expander logic in try/except: - -```python -def safe_expander(name, result): - try: - # ... expansion logic ... - return dagron.DynamicModification(add_nodes=nodes, remove_nodes=[]) - except Exception: - # Return empty modification — no expansion, but no failure - return dagron.DynamicModification(add_nodes=[], remove_nodes=[]) -``` - -## Best practices - -1. **Use factory functions for tasks.** When creating tasks in a loop, use a - factory to capture the loop variable correctly: - ```python - def make_task(item): - def task(): - return process(item) - return task - - # NOT: lambda: process(item) -- captures the variable, not the value - ``` - -2. **Name dynamic nodes predictably.** Use naming conventions like - `process_{table}` so you can use glob/regex matching later. - -3. **Limit expansion depth.** Chained expansions can grow the graph - unexpectedly. Set reasonable limits in your expander logic. - -4. **Combine with conditional edges.** Use - [Conditional Execution](/guide/execution-strategies/conditional) to gate whether expansion - happens at all. - -5. **Test expanders in isolation.** Write unit tests for your expander functions - with various inputs before running them in the full pipeline. - -6. **Monitor graph size.** Log `dag.node_count()` after expansion to detect - runaway growth. - -## API reference - -| Class / Method | Docs | -|----------------|------| -| `DynamicExecutor` | [Dynamic](/api/execution/dynamic) | -| `DynamicModification` | [Dynamic](/api/execution/dynamic) | -| `DynamicNodeSpec` | [Dynamic](/api/execution/dynamic) | - -## Next steps - -- [Conditional Execution](/guide/execution-strategies/conditional) — gate branches with runtime predicates. -- [Checkpointing](/guide/execution-strategies/checkpointing) — checkpoint dynamic pipelines for resume. -- [Tracing & Profiling](/guide/observability/tracing-profiling) — trace dynamic expansion events. diff --git a/docs/pages/guide/execution-strategies/incremental.mdx b/docs/pages/guide/execution-strategies/incremental.mdx deleted file mode 100644 index 0478a9b..0000000 --- a/docs/pages/guide/execution-strategies/incremental.mdx +++ /dev/null @@ -1,397 +0,0 @@ ---- -sidebar_position: 7 -title: Incremental Execution -description: Re-execute only what changed — dirty sets, early cutoff, and the IncrementalExecutor for efficient recomputation. ---- - -import DagDiagram from '@site/src/components/DagDiagram'; -import StatusBadge from '@site/src/components/StatusBadge'; - -# Incremental Execution - -When a small part of your input changes, you should not have to re-run the entire -pipeline. dagron's **incremental execution** engine tracks which nodes are -affected by a change, re-executes only those nodes, and applies **early cutoff** -to stop propagation when a recomputed node produces the same result as before. - -This guide explains the concepts behind incremental execution, walks through the -[`IncrementalExecutor`](/api/execution/incremental) API, and shows real-world -patterns for using it effectively. - -## Why incremental? - -Consider a data pipeline with 20 nodes. If one source table changes, a naive -executor re-runs all 20 tasks. With incremental execution: - -1. You declare which nodes changed (the **dirty set**). -2. The executor computes the **affected set** — all downstream descendants of - the dirty nodes. -3. Only the affected set is re-executed, in topological order. -4. If a recomputed node's output matches its previous output, the executor - applies **early cutoff** and skips its descendants. - -The result: you re-run 3 nodes instead of 20, saving minutes or hours on -large pipelines. - -## Concepts - -### Dirty set - -The **dirty set** is the set of nodes whose inputs have changed since the last -run. You provide this set explicitly via the `changed_nodes` parameter. - -### Affected set - -The **affected set** is the transitive closure of the dirty set's descendants. -dagron computes this automatically. - -### Early cutoff - -After re-executing a node, the executor compares its new result to the cached -previous result. If they are equal, the node's descendants are **not** -re-executed — even if they are in the affected set. This is called **early -cutoff** and can dramatically reduce recomputation. - -### Reused set - -Nodes that are not in the affected set (or were cut off early) keep their -previous results. These are the **reused** nodes. - - B:::recomputed --> D:::cutoff - A --> C:::recomputed --> E:::reused - D --> F:::reused - E --> F`} - caption="Incremental execution. A is dirty (red). B and C are recomputed (orange). D produces the same result as before so early cutoff applies (green). E and F are reused (blue)." -/> - -## IncrementalExecutor - -### Basic usage - -```python -import dagron - -dag = ( - dagron.DAG.builder() - .add_edge("source_a", "transform") - .add_edge("source_b", "transform") - .add_edge("transform", "aggregate") - .add_edge("aggregate", "report") - .build() -) - -tasks = { - "source_a": lambda: [1, 2, 3], - "source_b": lambda: [4, 5, 6], - "transform": lambda: [1, 2, 3, 4, 5, 6], - "aggregate": lambda: {"sum": 21, "count": 6}, - "report": lambda: "Report: 6 items, sum=21", -} - -executor = dagron.IncrementalExecutor(dag) - -# First run — everything executes -result = executor.execute(tasks) -print(result.recomputed) # ['source_a', 'source_b', 'transform', 'aggregate', 'report'] -print(result.reused) # [] -print(result.early_cutoff) # [] -``` - -### Subsequent run with changes - -```python -# Only source_a changed -tasks["source_a"] = lambda: [10, 20, 30] - -result = executor.execute(tasks, changed_nodes=["source_a"]) -print(result.recomputed) # ['source_a', 'transform', 'aggregate', 'report'] -print(result.reused) # ['source_b'] -print(result.early_cutoff) # [] -``` - -Node `source_b` was not in the dirty set or the affected set, so its previous -result is reused. - - transform:::recomputed - source_b:::reused --> transform - transform --> aggregate:::recomputed --> report:::recomputed`} - caption="Only source_a changed. source_b is reused; everything downstream of source_a is recomputed." -/> - -### Early cutoff in action - -Early cutoff activates when a recomputed node produces the same result as its -previous run: - -```python -# source_a changed, but transform produces the same result anyway -tasks["source_a"] = lambda: [1, 2, 3] # same as original -tasks["transform"] = lambda: [1, 2, 3, 4, 5, 6] # same output - -result = executor.execute(tasks, changed_nodes=["source_a"]) -print(result.recomputed) # ['source_a', 'transform'] -print(result.early_cutoff) # ['transform'] -print(result.reused) # ['source_b', 'aggregate', 'report'] -``` - -Even though `transform` is downstream of the dirty node, its output did not -change, so `aggregate` and `report` are **not** re-executed. - - transform:::cutoff - source_b:::reused --> transform - transform --> aggregate:::reused --> report:::reused`} - caption="Early cutoff at transform (green) prevents recomputation of aggregate and report." -/> - -## Constructor parameters - -```python -dagron.IncrementalExecutor( - dag, # The DAG - callbacks=None, # ExecutionCallbacks instance - fail_fast=True, # Stop on first failure? - enable_tracing=False, # Record execution trace? -) -``` - -The constructor is similar to [`DAGExecutor`](/api/execution/execution) but without -`max_workers` or `costs`, because incremental execution focuses on minimising -work rather than parallelising it. - -## IncrementalResult - -The `.execute()` method returns an `IncrementalResult` that extends -`ExecutionResult` with three additional fields: - -| Field | Type | Description | -|-------|------|-------------| -| `recomputed` | `list[str]` | Nodes that were actually re-executed | -| `early_cutoff` | `list[str]` | Nodes where early cutoff stopped propagation | -| `reused` | `list[str]` | Nodes whose previous results were kept | - -```python -result = executor.execute(tasks, changed_nodes=["source_a"]) - -# Standard ExecutionResult fields still work -print(result.succeeded) -print(result.total_duration_seconds) -for name, nr in result.node_results.items(): - print(f"{name}: {nr.status}") - -# Incremental-specific fields -print(f"Recomputed: {result.recomputed}") -print(f"Early cutoff: {result.early_cutoff}") -print(f"Reused: {result.reused}") -``` - -## Computing dirty sets - -### Manual dirty set - -The simplest approach: you know which inputs changed, so you list them -explicitly: - -```python -result = executor.execute(tasks, changed_nodes=["source_a"]) -``` - -### Using DAG's dirty_set() - -If you track changes at a finer granularity (e.g., file modification times), -dagron can compute the dirty set for you: - -```python -# After changing node configurations or data sources, -# ask the DAG which nodes are dirty: -dirty = dag.dirty_set( - changed=["source_a"], -) -print(dirty) # ['source_a', 'transform', 'aggregate', 'report'] -``` - -The `dirty_set()` method returns the full affected set, not just the immediate -changes. This is the set of nodes that **might** need re-execution (before -early cutoff). - -### Using change_provenance() - -For more sophisticated change tracking, use `change_provenance()` to understand -**why** a node is dirty: - -```python -provenance = dag.change_provenance(changed=["source_a"]) -for node, reason in provenance.items(): - print(f"{node}: dirty because of {reason}") -# source_a: dirty because of direct change -# transform: dirty because of ancestor source_a -# aggregate: dirty because of ancestor source_a -# report: dirty because of ancestor source_a -``` - -## Callbacks with incremental execution - -Callbacks work the same as with `DAGExecutor`, but you get additional events -for skip/reuse: - -```python -class IncrementalLogger: - def on_node_start(self, name): - print(f" [RUN] {name}") - - def on_node_complete(self, name, result): - print(f" [DONE] {name}") - - def on_node_skip(self, name): - print(f" [REUSE] {name}") - -executor = dagron.IncrementalExecutor(dag, callbacks=IncrementalLogger()) -result = executor.execute(tasks, changed_nodes=["source_a"]) -``` - -## Fail-fast behaviour - -When `fail_fast=True` (the default), a failure in any recomputed node skips -its descendants — just like the standard executor: - -```python -tasks["transform"] = lambda: (_ for _ in ()).throw(ValueError("bad data")) - -result = executor.execute(tasks, changed_nodes=["source_a"]) -print(result.failed) # 1 -print(result.skipped) # 2 (aggregate, report) -``` - -| Node | Status | -|------|--------| -| source_a | | -| source_b | (reused) | -| transform | | -| aggregate | | -| report | | - -## Real-world example: incremental ML training - -```python -import dagron -import hashlib -import json - -dag = ( - dagron.DAG.builder() - .add_edge("load_data", "feature_eng") - .add_edge("feature_eng", "train") - .add_edge("train", "evaluate") - .add_edge("evaluate", "report") - .build() -) - -# Simulate expensive tasks -def load_data(): - return {"rows": 10000, "checksum": "abc123"} - -def feature_eng(): - return {"features": 50, "checksum": "def456"} - -def train(): - import time - time.sleep(2) # expensive! - return {"model": "xgb_v1", "accuracy": 0.95} - -def evaluate(): - return {"accuracy": 0.95, "f1": 0.93} - -def report(): - return "Model accuracy: 95%" - -tasks = { - "load_data": load_data, - "feature_eng": feature_eng, - "train": train, - "evaluate": evaluate, - "report": report, -} - -executor = dagron.IncrementalExecutor(dag, enable_tracing=True) - -# First run: everything executes -result = executor.execute(tasks) -print(f"First run: {result.total_duration_seconds:.1f}s, recomputed={len(result.recomputed)}") - -# New data arrives, but only load_data changes -tasks["load_data"] = lambda: {"rows": 10001, "checksum": "abc124"} - -# Second run: only affected nodes execute -result = executor.execute(tasks, changed_nodes=["load_data"]) -print(f"Second run: {result.total_duration_seconds:.1f}s, recomputed={len(result.recomputed)}") -print(f"Reused: {result.reused}") -``` - -## Combining with tracing - -Enable tracing to understand incremental execution performance: - -```python -executor = dagron.IncrementalExecutor(dag, enable_tracing=True) -result = executor.execute(tasks, changed_nodes=["source_a"]) - -# The trace shows which nodes were recomputed vs reused -if result.trace: - print(result.trace.summary()) -``` - -See [Tracing & Profiling](/guide/observability/tracing-profiling) for the full tracing -guide. - -## Tips for effective incremental execution - -1. **Keep tasks deterministic.** Early cutoff works best when the same inputs - produce the same outputs. Non-deterministic tasks (e.g., those using random - seeds or wall-clock time) defeat early cutoff. - -2. **Use fine-grained nodes.** The more granular your nodes, the more - opportunities for early cutoff. A single monolithic "transform" node that - changes its output on every run provides no cutoff benefit. - -3. **Track changes accurately.** Over-reporting dirty nodes wastes computation. - Under-reporting produces stale results. Use checksums or file modification - times for accurate change detection. - -4. **Combine with checkpointing.** For long-running pipelines, use - [`CheckpointExecutor`](/guide/execution-strategies/checkpointing) alongside incremental - execution to resume from failures without losing incremental state. - -5. **Monitor cutoff rates.** Track `len(result.early_cutoff)` over time. If - cutoff rates are low, your nodes may not be deterministic or your change - detection may be too coarse. - -## API reference - -| Class / Method | Docs | -|----------------|------| -| `IncrementalExecutor` | [Incremental](/api/execution/incremental) | -| `IncrementalResult` | [Incremental](/api/execution/incremental) | -| `dag.dirty_set()` | [DAG](/api/core/core) | -| `dag.change_provenance()` | [DAG](/api/core/core) | - -## Next steps - -- [Tracing & Profiling](/guide/observability/tracing-profiling) — combine tracing with incremental runs. -- [Checkpointing](/guide/execution-strategies/checkpointing) — persist incremental state across restarts. -- [Conditional Execution](/guide/execution-strategies/conditional) — combine conditions with incremental logic. diff --git a/docs/pages/guide/execution-strategies/resource-scheduling.mdx b/docs/pages/guide/execution-strategies/resource-scheduling.mdx deleted file mode 100644 index 6681f3f..0000000 --- a/docs/pages/guide/execution-strategies/resource-scheduling.mdx +++ /dev/null @@ -1,484 +0,0 @@ ---- -sidebar_position: 12 -title: Resource Scheduling -description: Schedule DAG tasks with GPU, CPU, and memory constraints using dagron's resource-aware executors. ---- - -import DagDiagram from '@site/src/components/DagDiagram'; -import StatusBadge from '@site/src/components/StatusBadge'; - -# Resource Scheduling - -Many real-world pipelines require access to scarce physical resources -- GPUs for model training, memory for large datasets, or CPU slots for compute-heavy transforms. dagron's resource scheduling system lets you declare per-node requirements and execute the DAG with a scheduler that respects capacity constraints at all times. - -The scheduler dispatches nodes in **bottom-level priority order** (longest-path-to-sink first), so the critical path gets resources before less important branches. - -## Core Concepts - - B - A --> C - B --> D - C --> D - D --> E - style B fill:#f9a825,stroke:#f57f17 - style C fill:#f9a825,stroke:#f57f17 - style D fill:#ffcc80,stroke:#ef6c00`} - caption="A pipeline where training nodes compete for 4 GPUs. The scheduler ensures only 2 training nodes of this size run simultaneously." -/> - -There are four main building blocks: - -| Class | Role | -|---|---| -| [`ResourcePool`](/api/execution/resources#resourcepool) | Holds the total capacity of each resource and manages blocking acquire/release. | -| [`ResourceRequirements`](/api/execution/resources#resourcerequirements) | Declares how much of each resource a single node needs. | -| [`ResourceAwareExecutor`](/api/execution/resources#resourceawareexecutor) | Synchronous executor that dispatches nodes when their resources are available. | -| [`AsyncResourceAwareExecutor`](/api/execution/resources#asyncresourceawareexecutor) | Async (`asyncio`) variant of the same scheduler. | -| [`ResourceTimeline`](/api/execution/resources#resourcetimeline) | Records timestamped snapshots of resource utilization during execution. | - ---- - -## Declaring Resources - -### ResourcePool - -A `ResourcePool` represents the total resources available on the machine or cluster. Resources are named strings with integer capacities: - -```python -from dagron.execution.resources import ResourcePool - -# A machine with 4 GPUs, 16 CPU slots, and 32 GB of memory -pool = ResourcePool(capacities={ - "gpu": 4, - "cpu_slots": 16, - "memory_mb": 32768, -}) - -print(pool.capacities) # {'gpu': 4, 'cpu_slots': 16, 'memory_mb': 32768} -print(pool.available) # same as capacities initially -print(pool.allocated) # {'gpu': 0, 'cpu_slots': 0, 'memory_mb': 0} -``` - -The pool is **thread-safe**. Internally it uses a `threading.Condition` so that the executor can block on `acquire()` until resources are freed by another thread. - -### ResourceRequirements - -Each node declares its needs via a `ResourceRequirements` object. You can use the constructor directly or one of the shorthand factory methods: - -```python -from dagron.execution.resources import ResourceRequirements - -# Explicit constructor -req = ResourceRequirements(resources={"gpu": 2, "memory_mb": 4096}) - -# Shorthand factories -gpu_req = ResourceRequirements.gpu(2) # {"gpu": 2} -cpu_req = ResourceRequirements.cpu(4) # {"cpu_slots": 4} -mem_req = ResourceRequirements.memory(8192) # {"memory_mb": 8192} -``` - -You can combine multiple resource types in a single `ResourceRequirements`: - -```python -heavy_req = ResourceRequirements(resources={ - "gpu": 2, - "cpu_slots": 4, - "memory_mb": 16384, -}) -``` - -The `fits()` method checks whether a requirement can be satisfied by a given availability dict: - -```python -available = {"gpu": 3, "cpu_slots": 8, "memory_mb": 16384} -print(heavy_req.fits(available)) # True - -available["gpu"] = 1 -print(heavy_req.fits(available)) # False -- only 1 GPU available -``` - ---- - -## Building a Resource-Scheduled Pipeline - -Here is a complete example that trains two ML models concurrently, limited by GPU availability: - -```python -import dagron -from dagron.execution.resources import ( - ResourceAwareExecutor, - ResourcePool, - ResourceRequirements, -) - -# 1. Build the DAG -dag = ( - dagron.DAG.builder() - .add_node("fetch_data") - .add_node("preprocess") - .add_node("train_resnet") - .add_node("train_bert") - .add_node("ensemble") - .add_node("deploy") - .add_edge("fetch_data", "preprocess") - .add_edge("preprocess", "train_resnet") - .add_edge("preprocess", "train_bert") - .add_edge("train_resnet", "ensemble") - .add_edge("train_bert", "ensemble") - .add_edge("ensemble", "deploy") - .build() -) - -# 2. Declare resource requirements per node -requirements = { - "train_resnet": ResourceRequirements.gpu(2), - "train_bert": ResourceRequirements.gpu(3), - "ensemble": ResourceRequirements.gpu(1), - # fetch_data, preprocess, deploy need no special resources -} - -# 3. Create the resource pool (4 GPUs available) -pool = ResourcePool(capacities={"gpu": 4}) - -# 4. Create the executor -executor = ResourceAwareExecutor( - dag, - resource_pool=pool, - requirements=requirements, -) - -# 5. Define tasks -def fetch_data(): - print("Fetching dataset...") - return {"rows": 10000} - -def preprocess(): - print("Cleaning data...") - return {"rows": 9500} - -def train_resnet(): - print("Training ResNet on 2 GPUs...") - return {"accuracy": 0.91} - -def train_bert(): - print("Training BERT on 3 GPUs...") - return {"accuracy": 0.94} - -def ensemble(): - print("Ensembling models on 1 GPU...") - return {"accuracy": 0.96} - -def deploy(): - print("Deploying model...") - return "deployed" - -# 6. Execute -result = executor.execute({ - "fetch_data": fetch_data, - "preprocess": preprocess, - "train_resnet": train_resnet, - "train_bert": train_bert, - "ensemble": ensemble, - "deploy": deploy, -}) - -print(f"Succeeded: {result.succeeded}, Failed: {result.failed}") -``` - -Because `train_resnet` needs 2 GPUs and `train_bert` needs 3, and the pool has only 4, they **cannot run simultaneously**. The scheduler dispatches whichever has a higher bottom-level priority first, then dispatches the other once the first releases its GPUs. - - B1 --> C1 --> D1 --> E1 --> F1`} - caption="Serialized timeline: train_bert runs first (higher bottom-level priority), then train_resnet once GPUs are freed." -/> - -If you change the pool to 5 GPUs, both training nodes can run concurrently since 2 + 3 = 5 fits within capacity. - ---- - -## Pre-Validation - -Before execution begins, `ResourceAwareExecutor` validates that every node's requirements **can ever be satisfied** by the pool: - -```python -# This will raise immediately -- a single node needs 8 GPUs but pool has 4 -requirements["train_huge"] = ResourceRequirements.gpu(8) - -try: - executor.execute(tasks) -except ValueError as e: - print(e) - # "Node 'train_huge' requires {'gpu': 8} but pool capacity is {'gpu': 4}" -``` - -This check prevents deadlocks where a node would block forever because the pool is too small. - ---- - -## Priority Scheduling - -The executor computes **bottom-level priorities** for each node. The bottom level is the longest weighted path from a node to any sink. Nodes with higher bottom levels are dispatched first because they sit on the critical path. - -You can provide optional cost estimates to influence priority: - -```python -costs = { - "train_resnet": 120.0, # seconds - "train_bert": 300.0, - "ensemble": 60.0, -} - -executor = ResourceAwareExecutor( - dag, - resource_pool=pool, - requirements=requirements, - costs=costs, -) -``` - -With these costs, `train_bert` has a higher bottom-level value and the scheduler gives it resources first. - ---- - -## Async Resource Scheduling - -For `asyncio`-based pipelines, use `AsyncResourceAwareExecutor`: - -```python -import asyncio -from dagron.execution.resources import AsyncResourceAwareExecutor - -async def train_resnet_async(): - await asyncio.sleep(2) # simulate training - return {"accuracy": 0.91} - -async def train_bert_async(): - await asyncio.sleep(5) - return {"accuracy": 0.94} - -async def main(): - executor = AsyncResourceAwareExecutor( - dag, - resource_pool=pool, - requirements=requirements, - ) - result = await executor.execute({ - "fetch_data": lambda: "data", - "preprocess": lambda: "cleaned", - "train_resnet": train_resnet_async, - "train_bert": train_bert_async, - "ensemble": lambda: "ensembled", - "deploy": lambda: "deployed", - }) - print(f"Done in {result.total_duration_seconds:.1f}s") - -asyncio.run(main()) -``` - -The async executor uses `asyncio.create_task` for concurrency while the underlying `ResourcePool` still uses threading primitives for acquire/release (safe from async code via the GIL). - ---- - -## ResourceTimeline and Utilization Tracking - -Every `ResourcePool` automatically records a `ResourceTimeline` that captures timestamped snapshots of resource allocation and availability. After execution you can inspect utilization: - -```python -timeline = pool.timeline - -# Iterate over snapshots -for snap in timeline.snapshots: - print( - f" t={snap.timestamp:.3f}s " - f"node={snap.node_name} " - f"event={snap.event} " - f"allocated={snap.allocated} " - f"available={snap.available}" - ) - -# Peak utilization across the entire execution -peaks = timeline.peak_utilization() -print(f"Peak GPU utilization: {peaks.get('gpu', 0)} / {pool.capacities['gpu']}") -``` - -A typical timeline output might look like: - -``` - t=0.001s node=train_bert event=acquired allocated={'gpu': 3} available={'gpu': 1} - t=5.012s node=train_bert event=released allocated={'gpu': 0} available={'gpu': 4} - t=5.013s node=train_resnet event=acquired allocated={'gpu': 2} available={'gpu': 2} - t=7.045s node=train_resnet event=released allocated={'gpu': 0} available={'gpu': 4} - t=7.046s node=ensemble event=acquired allocated={'gpu': 1} available={'gpu': 3} - t=7.102s node=ensemble event=released allocated={'gpu': 0} available={'gpu': 4} -Peak GPU utilization: 3 / 4 -``` - -### ResourceSnapshot - -Each snapshot is a `ResourceSnapshot` dataclass: - -| Field | Type | Description | -|---|---|---| -| `timestamp` | `float` | Seconds since the first snapshot was recorded. | -| `allocated` | `dict[str, int]` | Resources currently allocated at this point in time. | -| `available` | `dict[str, int]` | Resources still available at this point in time. | -| `node_name` | `str \| None` | The node that triggered this snapshot. | -| `event` | `str` | Either `"acquired"` or `"released"`. | - ---- - -## Manual Acquire and Release - -You can also use the pool directly outside of an executor, for example in custom scheduling logic: - -```python -pool = ResourcePool(capacities={"gpu": 4}) -req = ResourceRequirements.gpu(2) - -# Non-blocking attempt -if pool.try_acquire(req, node_name="my_node"): - try: - run_gpu_work() - finally: - pool.release(req, node_name="my_node") -else: - print("GPUs not available right now") - -# Blocking with timeout -acquired = pool.acquire(req, node_name="my_node", timeout=30.0) -if acquired: - try: - run_gpu_work() - finally: - pool.release(req, node_name="my_node") -else: - print("Timed out waiting for GPUs") -``` - ---- - -## Callbacks and Tracing - -`ResourceAwareExecutor` accepts `ExecutionCallbacks` that fire during execution. Two resource-specific callbacks are available: - -```python -from dagron.execution._types import ExecutionCallbacks - -callbacks = ExecutionCallbacks( - on_start=lambda name: print(f" [{name}] started"), - on_complete=lambda name, val: print(f" [{name}] completed: {val}"), - on_failure=lambda name, err: print(f" [{name}] FAILED: {err}"), - on_resource_acquired=lambda name, res: print(f" [{name}] acquired {res}"), - on_resource_released=lambda name, res: print(f" [{name}] released {res}"), -) - -executor = ResourceAwareExecutor( - dag, - resource_pool=pool, - requirements=requirements, - callbacks=callbacks, - enable_tracing=True, -) - -result = executor.execute(tasks) - -# Access the trace for Chrome-compatible profiling -if result.trace: - result.trace.to_chrome_json("resource_trace.json") -``` - -When `enable_tracing=True`, the executor records `RESOURCE_ACQUIRED` and `RESOURCE_RELEASED` trace events alongside the standard node start/complete events. - ---- - -## Fail-Fast Behavior - -By default, `fail_fast=True`. If a node fails, all downstream nodes are skipped immediately without acquiring resources: - -```python -executor = ResourceAwareExecutor( - dag, - resource_pool=pool, - requirements=requirements, - fail_fast=True, # default -) -``` - -Set `fail_fast=False` to let independent branches continue executing even when one branch fails. - ---- - -## Multi-Resource Scheduling Example - -Here is a more realistic example combining GPU, CPU, and memory constraints: - -```python -pool = ResourcePool(capacities={ - "gpu": 4, - "cpu_slots": 16, - "memory_mb": 65536, # 64 GB -}) - -requirements = { - "ingest": ResourceRequirements(resources={"cpu_slots": 2, "memory_mb": 4096}), - "feature_eng": ResourceRequirements(resources={"cpu_slots": 8, "memory_mb": 16384}), - "train_xgboost": ResourceRequirements(resources={"cpu_slots": 4, "memory_mb": 8192}), - "train_nn": ResourceRequirements(resources={"gpu": 2, "cpu_slots": 2, "memory_mb": 16384}), - "explain_shap": ResourceRequirements(resources={"cpu_slots": 8, "memory_mb": 32768}), - "deploy": ResourceRequirements(resources={"cpu_slots": 1}), -} - -executor = ResourceAwareExecutor(dag, pool, requirements) -result = executor.execute(tasks) -``` - -A node is only dispatched when **all** of its required resources are simultaneously available. This prevents situations where a node acquires some GPUs but blocks on memory, starving other nodes. - ---- - -## Best Practices - -1. **Right-size your pools.** Start with the actual hardware capacity. If you have 4 GPUs, set `"gpu": 4`. - -2. **Use `costs` for critical-path optimization.** Provide estimated runtimes so the scheduler prioritizes the bottleneck path. - -3. **Always release resources.** The executor handles this automatically, but if you use `ResourcePool` manually, use `try/finally`. - -4. **Check `peak_utilization()` after execution.** If peak usage is far below capacity, you may have too-conservative requirements. If it equals capacity, you are fully saturating your hardware. - -5. **Combine with tracing.** Enable `enable_tracing=True` to generate Chrome-compatible traces that show resource acquire/release events overlaid on the execution timeline. - ---- - -## Related - -- [API Reference: Resources](/api/execution/resources) -- full API documentation for all resource classes. -- [Executing Tasks](/guide/core-concepts/executing-tasks) -- standard execution without resource constraints. -- [Distributed Execution](/guide/execution-strategies/distributed) -- running nodes across multiple machines. -- [Tracing & Profiling](/guide/observability/tracing-profiling) -- Chrome-compatible execution traces. diff --git a/docs/pages/guide/getting-started.mdx b/docs/pages/guide/getting-started.mdx deleted file mode 100644 index dd7c445..0000000 --- a/docs/pages/guide/getting-started.mdx +++ /dev/null @@ -1,395 +0,0 @@ ---- -sidebar_position: 1 -title: Getting Started -description: Install dagron and build your first DAG — a complete walkthrough from zero to executing a parallel ETL pipeline. ---- - -import DagDiagram from '@site/src/components/DagDiagram'; -import StatusBadge from '@site/src/components/StatusBadge'; - -# Getting Started - -This guide walks you through installing dagron, creating your first directed acyclic -graph, executing tasks in parallel, and inspecting the results. By the end you will -have a working ETL pipeline that extracts, transforms, and loads data with full -observability. - -## Installation - -dagron is distributed as a single wheel that bundles the Rust core via PyO3. Install -it from PyPI: - -```bash -pip install dagron -``` - -For async execution support, install the optional `async` extra: - -```bash -pip install "dagron[async]" -``` - -Verify the installation: - -```python -import dagron -print(dagron.__version__) -``` - -:::tip System requirements -dagron requires Python 3.9 or later. The Rust extension is pre-compiled for -Linux (x86_64, aarch64), macOS (x86_64, Apple Silicon), and Windows (x86_64). -::: - -## Core concepts - -Before writing code, it helps to understand three concepts that appear throughout -dagron: - -| Concept | What it is | -|---------|-----------| -| **DAG** | A directed acyclic graph whose nodes represent units of work and whose edges represent dependencies. The graph structure lives in Rust for speed. | -| **Executor** | A scheduler that walks the DAG in topological order, dispatching tasks to a thread pool (or async event loop) with maximum parallelism. | -| **Result** | A structured report containing every node's status, return value, error (if any), and wall-clock duration. | - -The typical workflow is: **build a DAG, map tasks to nodes, execute, inspect results**. - -## Your first DAG - -Let us model a classic ETL pipeline with three stages: **extract**, **transform**, -and **load**. The transform step depends on extract, and load depends on transform. - - transform --> load`} - caption="A simple three-node ETL pipeline." -/> - -### Step 1 — Build the graph - -The easiest way to create a DAG is with the fluent **builder** pattern: - -```python -import dagron - -dag = ( - dagron.DAG.builder() - .add_node("extract") - .add_node("transform") - .add_node("load") - .add_edge("extract", "transform") - .add_edge("transform", "load") - .build() -) -``` - -The builder validates the graph at `.build()` time. If you accidentally introduce a -cycle, dagron raises a `CycleError` immediately — you never get an invalid graph. - -```python -print(dag.node_count()) # 3 -print(dag.edge_count()) # 2 -``` - -### Step 2 — Define tasks - -A **task** is any Python callable (function, lambda, method). You map node names to -callables in a plain dictionary: - -```python -def extract(): - """Simulate fetching rows from an API.""" - print("Extracting data...") - return [{"id": 1, "name": "Alice"}, {"id": 2, "name": "Bob"}] - -def transform(): - """Normalize names to uppercase.""" - print("Transforming data...") - return [{"id": 1, "name": "ALICE"}, {"id": 2, "name": "BOB"}] - -def load(): - """Write results to the database.""" - print("Loading data...") - return "2 rows written" - -tasks = { - "extract": extract, - "transform": transform, - "load": load, -} -``` - -:::note -Each task runs independently. If you need to pass data between tasks, see the -[Executing Tasks](/guide/core-concepts/executing-tasks) guide for strategies including -shared state and the Pipeline API. -::: - -### Step 3 — Execute - -Create a [`DAGExecutor`](/api/execution/execution) and call `.execute()`: - -```python -executor = dagron.DAGExecutor(dag) -result = executor.execute(tasks) -``` - -The executor honours the dependency order: **extract** runs first, then -**transform**, then **load**. Independent nodes (if any) would run in parallel -across all available CPU cores. - -### Step 4 — Inspect results - -The returned [`ExecutionResult`](/api/execution/execution) contains per-node -outcomes: - -```python -print(result.succeeded) # 3 -print(result.failed) # 0 - -for name, node_result in result.node_results.items(): - print(f"{name}: {node_result.status} in {node_result.duration_seconds:.3f}s") - # extract: NodeStatus.COMPLETED in 0.001s - # transform: NodeStatus.COMPLETED in 0.001s - # load: NodeStatus.COMPLETED in 0.001s -``` - -Each [`NodeResult`](/api/execution/execution) carries the callable's return value: - -```python -print(result.node_results["load"].result) -# "2 rows written" -``` - -And the overall wall-clock time: - -```python -print(f"Pipeline finished in {result.total_duration_seconds:.3f}s") -``` - -## A more realistic example - -Real pipelines have fan-out and fan-in. Let us expand the ETL to extract from -two sources in parallel, transform each independently, then merge and load: - - transform_api --> merge - extract_db --> transform_db --> merge - merge --> load`} - caption="Fan-out / fan-in ETL pipeline. The two extract-transform branches run in parallel." -/> - -```python -import dagron -import time - -# -- Build the DAG -- -dag = ( - dagron.DAG.builder() - .add_node("extract_api") - .add_node("extract_db") - .add_node("transform_api") - .add_node("transform_db") - .add_node("merge") - .add_node("load") - .add_edge("extract_api", "transform_api") - .add_edge("extract_db", "transform_db") - .add_edge("transform_api", "merge") - .add_edge("transform_db", "merge") - .add_edge("merge", "load") - .build() -) - -# -- Define tasks -- -def extract_api(): - time.sleep(0.5) # simulate network I/O - return [{"source": "api", "id": 1}] - -def extract_db(): - time.sleep(0.3) # simulate query - return [{"source": "db", "id": 2}] - -def transform_api(): - return [{"source": "api", "id": 1, "clean": True}] - -def transform_db(): - return [{"source": "db", "id": 2, "clean": True}] - -def merge(): - return "merged 2 sources" - -def load(): - return "loaded to warehouse" - -tasks = { - "extract_api": extract_api, - "extract_db": extract_db, - "transform_api": transform_api, - "transform_db": transform_db, - "merge": merge, - "load": load, -} - -# -- Execute -- -result = dagron.DAGExecutor(dag, max_workers=4).execute(tasks) - -print(f"Completed {result.succeeded}/{dag.node_count()} tasks") -print(f"Wall time: {result.total_duration_seconds:.3f}s") -``` - -Because `extract_api` and `extract_db` have no mutual dependency, they execute -**concurrently**. The merge node waits until both transform branches finish, -then load runs last. - -### Understanding execution order - -You can preview the execution plan without running anything: - -```python -print(dag.topological_sort()) -# ['extract_api', 'extract_db', 'transform_api', 'transform_db', 'merge', 'load'] - -for level, nodes in enumerate(dag.topological_levels()): - print(f"Level {level}: {nodes}") -# Level 0: ['extract_api', 'extract_db'] -# Level 1: ['transform_api', 'transform_db'] -# Level 2: ['merge'] -# Level 3: ['load'] -``` - -Nodes at the same level can run in parallel. The executor uses this structure -internally to maximise concurrency. - -## Handling failures - -By default, the executor uses **fail-fast** mode: if any node raises an exception, -downstream nodes are skipped and the result reports the failure. - -```python -def bad_transform(): - raise ValueError("Data quality check failed!") - -tasks_with_failure = { - "extract": extract, - "transform": bad_transform, - "load": load, -} - -result = dagron.DAGExecutor(dag).execute(tasks_with_failure) - -print(result.failed) # 1 -print(result.skipped) # 1 (load was skipped) -``` - -Node statuses after a failure: - -| Node | Status | -|------|--------| -| extract | | -| transform | | -| load | | - -To continue executing independent branches even after a failure, disable -fail-fast: - -```python -result = dagron.DAGExecutor(dag, fail_fast=False).execute(tasks) -``` - -See the [Executing Tasks](/guide/core-concepts/executing-tasks) guide for the full set -of executor options. - -## Builder shortcuts - -The builder supports several convenience patterns: - -### Bulk operations - -```python -dag = ( - dagron.DAG.builder() - .add_nodes(["a", "b", "c", "d"]) - .add_edges([("a", "b"), ("a", "c"), ("b", "d"), ("c", "d")]) - .build() -) -``` - -### Direct construction - -If you prefer an imperative style, create a bare `DAG` and mutate it: - -```python -dag = dagron.DAG() -dag.add_node("x") -dag.add_node("y") -dag.add_edge("x", "y") -``` - -### Pipeline decorator - -For simple linear pipelines, the `@task` decorator infers dependencies from -function parameter names: - -```python -from dagron import Pipeline, task - -@task -def fetch(): - return [1, 2, 3] - -@task -def double(fetch): - return [x * 2 for x in fetch] - -@task -def save(double): - return f"saved {len(double)} items" - -pipeline = Pipeline(tasks=[fetch, double, save], name="numbers") -result = pipeline.execute() -print(result.node_results["save"].result) # "saved 3 items" -``` - -The Pipeline API is covered in depth in [Executing Tasks](/guide/core-concepts/executing-tasks). - -## Visualizing your DAG - -dagron can export the graph in several formats: - -```python -# Mermaid (great for docs) -print(dag.to_mermaid()) - -# Graphviz DOT -print(dag.to_dot()) - -# JSON (for programmatic consumption) -print(dag.to_json()) -``` - -See [Serialization](/guide/core-concepts/serialization) for the full serialization guide. - -## Quick reference - -Here is a summary of the objects introduced in this guide with links to the API -reference: - -| Object | Purpose | API docs | -|--------|---------|----------| -| `dagron.DAG` | The core graph | [DAG](/api/core/core) | -| `dagron.DAG.builder()` | Fluent graph construction | [DAGBuilder](/api/core/builder) | -| `dagron.DAGExecutor` | Thread-pool executor | [DAGExecutor](/api/execution/execution) | -| `ExecutionResult` | Aggregate execution report | [ExecutionResult](/api/execution/execution) | -| `NodeResult` | Per-node outcome | [NodeResult](/api/execution/execution) | -| `Pipeline` / `@task` | Decorator-based pipelines | [Pipeline](/api/execution/pipeline) | - -## Next steps - -You now know how to install dagron, build a DAG, execute tasks, and read -results. Continue with: - -- [Building DAGs](/guide/core-concepts/building-dags) — deep dive into construction patterns, metadata, and payloads. -- [Executing Tasks](/guide/core-concepts/executing-tasks) — timeouts, cancellation, callbacks, async execution. -- [Inspecting Graphs](/guide/core-concepts/inspecting-graphs) — analysis, querying, and what-if exploration. -- [Tracing & Profiling](/guide/observability/tracing-profiling) — Chrome-compatible traces and bottleneck detection. diff --git a/docs/pages/guide/observability/error-handling.mdx b/docs/pages/guide/observability/error-handling.mdx deleted file mode 100644 index 54bc08f..0000000 --- a/docs/pages/guide/observability/error-handling.mdx +++ /dev/null @@ -1,607 +0,0 @@ ---- -sidebar_position: 22 -title: Error Handling -description: Understand dagron's error hierarchy, fail-fast behavior, and patterns for robust pipeline error recovery. ---- - -import DagDiagram from '@site/src/components/DagDiagram'; -import StatusBadge from '@site/src/components/StatusBadge'; - -# Error Handling - -Pipelines fail. Data is missing, APIs time out, code has bugs. dagron provides a structured error hierarchy, clear fail-fast semantics, and patterns for graceful error recovery so you can build pipelines that handle failure predictably. - ---- - -## Error Hierarchy - -All dagron-specific errors inherit from `DagronError`, which itself inherits from Python's `Exception`. This gives you a single base class to catch all dagron errors: - -```mermaid -classDiagram - Exception <|-- DagronError - DagronError <|-- GraphError - DagronError <|-- CycleError - DagronError <|-- DuplicateNodeError - DagronError <|-- NodeNotFoundError - DagronError <|-- EdgeNotFoundError - Exception <|-- GateRejectedError - Exception <|-- GateTimeoutError - Exception <|-- TemplateError -``` - -### Core Errors - -These are raised by the Rust-backed graph engine: - -| Error | When it is raised | -|---|---| -| `DagronError` | Base class for all dagron errors. Never raised directly. | -| `GraphError` | General graph structure error (e.g., invalid operation on the graph). | -| `CycleError` | Adding an edge would create a cycle, violating the DAG property. | -| `DuplicateNodeError` | Adding a node with a name that already exists in the graph. | -| `NodeNotFoundError` | Referencing a node name that does not exist. | -| `EdgeNotFoundError` | Referencing an edge that does not exist (e.g., during removal). | - -### Execution Errors - -These are raised during task execution: - -| Error | When it is raised | -|---|---| -| `GateRejectedError` | An [approval gate](/guide/execution-strategies/approval-gates) was rejected. | -| `GateTimeoutError` | An approval gate timed out before a decision was made. | -| `TemplateError` | A [template](/guide/advanced/templates) parameter is invalid. | - ---- - -## Graph Construction Errors - -### CycleError - -The most common graph error. Raised when adding an edge would create a cycle: - -```python -import dagron - -dag = dagron.DAG() -dag.add_node("a") -dag.add_node("b") -dag.add_node("c") -dag.add_edge("a", "b") -dag.add_edge("b", "c") - -try: - dag.add_edge("c", "a") # would create a -> b -> c -> a cycle -except dagron.CycleError as e: - print(f"Cycle detected: {e}") -``` - - B --> C - C -.->|"rejected"| A - style C fill:#ffcdd2,stroke:#c62828`} - caption="The edge c -> a is rejected because it would create a cycle." -/> - -Use the builder pattern with `allow_cycles=False` (the default) to catch cycles at build time: - -```python -try: - dag = ( - dagron.DAG.builder() - .add_node("a").add_node("b").add_node("c") - .add_edge("a", "b") - .add_edge("b", "c") - .add_edge("c", "a") # CycleError raised here - .build() - ) -except dagron.CycleError: - print("Cannot build: graph contains a cycle") -``` - -### DuplicateNodeError - -Raised when you try to add a node with a name that already exists: - -```python -dag = dagron.DAG() -dag.add_node("extract") - -try: - dag.add_node("extract") # already exists -except dagron.DuplicateNodeError as e: - print(f"Duplicate: {e}") -``` - -### NodeNotFoundError - -Raised when referencing a node that does not exist: - -```python -dag = dagron.DAG() -dag.add_node("a") - -try: - dag.add_edge("a", "b") # "b" does not exist -except dagron.NodeNotFoundError as e: - print(f"Not found: {e}") -``` - -Also raised when trying to get the payload, metadata, or predecessors of a nonexistent node: - -```python -try: - dag.get_payload("nonexistent") -except dagron.NodeNotFoundError: - print("Node does not exist") -``` - -### EdgeNotFoundError - -Raised when trying to remove or reference an edge that does not exist: - -```python -dag = dagron.DAG() -dag.add_node("a") -dag.add_node("b") - -try: - dag.remove_edge("a", "b") # no edge between a and b -except dagron.EdgeNotFoundError as e: - print(f"Edge not found: {e}") -``` - -### GraphError - -A general graph error for operations that are not covered by the more specific errors: - -```python -try: - dag.some_invalid_operation() -except dagron.GraphError as e: - print(f"Graph error: {e}") -``` - ---- - -## Catching All dagron Errors - -Use `DagronError` as a catch-all: - -```python -import dagron - -try: - dag = ( - dagron.DAG.builder() - .add_node("a") - .add_edge("a", "nonexistent") - .build() - ) -except dagron.DagronError as e: - print(f"dagron error: {type(e).__name__}: {e}") -``` - -This catches `CycleError`, `DuplicateNodeError`, `NodeNotFoundError`, `EdgeNotFoundError`, and `GraphError`. - ---- - -## Fail-Fast Execution - -During execution, dagron uses **fail-fast** semantics by default. When a node fails, all downstream nodes are immediately skipped: - -```python -dag = ( - dagron.DAG.builder() - .add_node("extract") - .add_node("transform") - .add_node("load") - .add_node("report") - .add_edge("extract", "transform") - .add_edge("transform", "load") - .add_edge("load", "report") - .build() -) - -def failing_transform(): - raise ValueError("Bad data format") - -executor = dagron.DAGExecutor(dag) -result = executor.execute({ - "extract": lambda: "raw data", - "transform": failing_transform, - "load": lambda: "loaded", - "report": lambda: "report", -}) - -for name, nr in result.node_results.items(): - print(f" {name}: {nr.status.value}") -``` - -Output: - -``` - extract: completed - transform: failed - load: skipped - report: skipped -``` - - transform --> load --> report - style extract fill:#c8e6c9,stroke:#2e7d32 - style transform fill:#ffcdd2,stroke:#c62828 - style load fill:#e0e0e0,stroke:#9e9e9e - style report fill:#e0e0e0,stroke:#9e9e9e`} - caption="When transform fails, load and report are skipped." -/> - -### Disabling Fail-Fast - -Set `fail_fast=False` to let independent branches continue executing: - -```python -dag = ( - dagron.DAG.builder() - .add_node("extract") - .add_node("path_a") - .add_node("path_b") - .add_node("merge") - .add_edge("extract", "path_a") - .add_edge("extract", "path_b") - .add_edge("path_a", "merge") - .add_edge("path_b", "merge") - .build() -) - -executor = dagron.DAGExecutor(dag, fail_fast=False) -result = executor.execute({ - "extract": lambda: "data", - "path_a": lambda: 1 / 0, # fails - "path_b": lambda: "success", # still runs - "merge": lambda: "merged", # still runs (has at least one completed dep) -}) - -for name, nr in result.node_results.items(): - print(f" {name}: {nr.status.value}") -``` - -Output: - -``` - extract: completed - path_a: failed - path_b: completed - merge: completed -``` - ---- - -## Inspecting Node Errors - -Each `NodeResult` contains the original exception if the node failed: - -```python -for name, nr in result.node_results.items(): - if nr.status == dagron.NodeStatus.FAILED: - print(f"Node '{name}' failed:") - print(f" Error type: {type(nr.error).__name__}") - print(f" Message: {nr.error}") - print(f" Duration: {nr.duration_seconds:.3f}s") -``` - ---- - -## ExecutionResult Summary - -The `ExecutionResult` provides aggregate counts: - -```python -result = executor.execute(tasks) - -print(f"Succeeded: {result.succeeded}") -print(f"Failed: {result.failed}") -print(f"Skipped: {result.skipped}") -print(f"Timed out: {result.timed_out}") -print(f"Cancelled: {result.cancelled}") -print(f"Duration: {result.total_duration_seconds:.1f}s") - -# Check if the entire execution succeeded -if result.failed == 0: - print("All nodes completed successfully") -else: - print(f"{result.failed} node(s) failed") -``` - ---- - -## Error Recovery Patterns - -### Pattern: Retry with Backoff - -Wrap tasks in a retry decorator: - -```python -import time - -def retry(fn, max_retries=3, backoff=1.0): - """Retry a task function with exponential backoff.""" - def wrapper(): - last_error = None - for attempt in range(max_retries): - try: - return fn() - except Exception as e: - last_error = e - if attempt < max_retries - 1: - time.sleep(backoff * (2 ** attempt)) - raise last_error - return wrapper - -tasks = { - "fetch_api": retry(lambda: call_flaky_api(), max_retries=3), - "process": lambda: process_data(), -} -``` - -### Pattern: Fallback Values - -Provide a fallback when a task fails: - -```python -def with_fallback(fn, default): - """Return a default value if the task fails.""" - def wrapper(): - try: - return fn() - except Exception: - return default - return wrapper - -tasks = { - "fetch_cache": with_fallback(lambda: get_from_cache(), default=None), - "fetch_api": with_fallback(lambda: get_from_api(), default={}), -} -``` - -### Pattern: Error Callbacks - -Use execution callbacks to log errors as they happen: - -```python -from dagron.execution._types import ExecutionCallbacks - -def on_failure(name, error): - log.error(f"Node '{name}' failed: {error}") - send_alert(f"Pipeline node '{name}' failed") - -callbacks = ExecutionCallbacks( - on_failure=on_failure, -) - -executor = dagron.DAGExecutor(dag, callbacks=callbacks) -``` - -### Pattern: Checkpoint and Resume - -Use [checkpointing](/guide/execution-strategies/checkpointing) to save progress and resume after fixing the failure: - -```python -from dagron.execution.checkpoint import CheckpointExecutor - -executor = CheckpointExecutor(dag, checkpoint_dir="/tmp/checkpoints") -result = executor.execute(tasks) - -if result.failed > 0: - # Fix the failing task, then resume from the checkpoint - result = executor.resume(tasks) -``` - -### Pattern: Graceful Degradation - -Design your DAG so that non-critical branches can fail without affecting the critical path: - -```python -dag = ( - dagron.DAG.builder() - .add_node("extract") - .add_node("transform") # critical - .add_node("load") # critical - .add_node("send_metrics") # non-critical - .add_node("send_slack") # non-critical - .add_edge("extract", "transform") - .add_edge("transform", "load") - .add_edge("transform", "send_metrics") - .add_edge("load", "send_slack") - .build() -) - -# With fail_fast=False, metrics/slack failures don't block load -executor = dagron.DAGExecutor(dag, fail_fast=False) -``` - -(non-critical)"] - slack["send_slack
(non-critical)"] - extract --> transform - transform --> load - transform --> metrics - load --> slack - style load fill:#c8e6c9,stroke:#2e7d32 - style metrics fill:#fff9c4,stroke:#f9a825 - style slack fill:#fff9c4,stroke:#f9a825`} - caption="Non-critical branches can fail independently without affecting the critical path." -/> - ---- - -## Gate Errors - -[Approval gates](/guide/execution-strategies/approval-gates) raise their own errors: - -```python -from dagron.execution.gates import GateRejectedError, GateTimeoutError - -try: - controller.wait_sync("deploy") -except GateRejectedError as e: - print(f"Gate '{e.gate_name}' rejected: {e.reason}") -except GateTimeoutError as e: - print(f"Gate '{e.gate_name}' timed out after {e.timeout}s") -``` - -These errors are **not** subclasses of `DagronError` since they originate from the execution layer, not the graph engine. - ---- - -## Template Errors - -[Template](/guide/advanced/templates) parameter validation raises `TemplateError`: - -```python -from dagron.template import DAGTemplate, TemplateError - -template = DAGTemplate(params={"env": str}) - -try: - template.render(env=42) # wrong type -except TemplateError as e: - print(f"Template error: {e}") - # "Parameter 'env' expects str, got int" -``` - ---- - -## Error Handling in Hooks - -[Plugin hooks](/guide/advanced/plugins-hooks) catch and warn on callback errors to prevent them from breaking execution: - -```python -from dagron.plugins.hooks import HookRegistry, HookEvent, HookContext - -hooks = HookRegistry() - -def buggy_hook(ctx: HookContext): - raise RuntimeError("hook crashed") - -hooks.register(HookEvent.PRE_NODE, buggy_hook) - -# This fires the hook but does NOT raise. -# Instead, a RuntimeWarning is issued. -hooks.fire(HookContext(event=HookEvent.PRE_NODE)) -``` - -If you need to observe hook errors, use Python's `warnings` module: - -```python -import warnings - -with warnings.catch_warnings(record=True) as w: - warnings.simplefilter("always") - hooks.fire(HookContext(event=HookEvent.PRE_NODE)) - if w: - print(f"Hook warning: {w[0].message}") -``` - ---- - -## Defensive DAG Construction - -Use try/except around builder operations to build robust DAG construction code: - -```python -import dagron - -def build_pipeline_safely(node_specs): - """Build a DAG from specs, skipping invalid entries.""" - builder = dagron.DAG.builder() - errors = [] - - for spec in node_specs: - try: - builder.add_node(spec["name"]) - except dagron.DuplicateNodeError: - errors.append(f"Duplicate node: {spec['name']}") - - for spec in node_specs: - for dep in spec.get("dependencies", []): - try: - builder.add_edge(dep, spec["name"]) - except dagron.NodeNotFoundError as e: - errors.append(f"Missing dependency: {e}") - except dagron.CycleError as e: - errors.append(f"Would create cycle: {e}") - - if errors: - print(f"Warnings during build ({len(errors)}):") - for err in errors: - print(f" - {err}") - - return builder.build() -``` - ---- - -## Error Hierarchy Summary - -```python -import dagron -from dagron.execution.gates import GateRejectedError, GateTimeoutError -from dagron.template import TemplateError - -# Graph engine errors (from Rust) -dagron.DagronError # base class -dagron.GraphError # general graph error -dagron.CycleError # edge would create a cycle -dagron.DuplicateNodeError # node already exists -dagron.NodeNotFoundError # node does not exist -dagron.EdgeNotFoundError # edge does not exist - -# Execution errors (from Python) -GateRejectedError # gate was rejected -GateTimeoutError # gate timed out -TemplateError # template parameter invalid -``` - ---- - -## Best Practices - -1. **Catch specific errors.** Use `CycleError`, `NodeNotFoundError`, etc. instead of the broad `DagronError` when you know what might go wrong. - -2. **Use `fail_fast=True` by default.** This prevents wasting compute on nodes that will fail anyway due to missing upstream data. - -3. **Disable fail-fast for non-critical branches.** If some branches are optional (metrics, notifications), use `fail_fast=False` so they do not block the critical path. - -4. **Log errors via callbacks.** Use `ExecutionCallbacks.on_failure` to capture errors as they happen, not just at the end. - -5. **Inspect `NodeResult.error`.** When a node fails, the original exception is preserved for debugging. - -6. **Use retry wrappers for transient failures.** Network errors, API rate limits, and temporary file locks benefit from retry logic. - -7. **Checkpoint long-running pipelines.** For pipelines that take hours, use checkpointing so you do not lose progress on failure. - ---- - -## Related - -- [API Reference: Errors](/api/core/errors) -- full documentation for all error classes. -- [Executing Tasks](/guide/core-concepts/executing-tasks) -- how fail-fast works in the executor. -- [Approval Gates](/guide/execution-strategies/approval-gates) -- `GateRejectedError` and `GateTimeoutError`. -- [Templates](/guide/advanced/templates) -- `TemplateError`. -- [Checkpointing](/guide/execution-strategies/checkpointing) -- saving and resuming execution state. -- [Plugins & Hooks](/guide/advanced/plugins-hooks) -- error isolation in hooks. diff --git a/docs/pages/guide/observability/tracing-profiling.mdx b/docs/pages/guide/observability/tracing-profiling.mdx deleted file mode 100644 index c892449..0000000 --- a/docs/pages/guide/observability/tracing-profiling.mdx +++ /dev/null @@ -1,450 +0,0 @@ ---- -sidebar_position: 8 -title: Tracing & Profiling -description: Record Chrome-compatible execution traces, analyse critical paths, detect bottlenecks, and profile parallelism efficiency. ---- - -import DagDiagram from '@site/src/components/DagDiagram'; -import StatusBadge from '@site/src/components/StatusBadge'; - -# Tracing & Profiling - -When your DAG pipeline is slow, you need data — not guesswork. dagron provides -two complementary observability tools: - -- **Tracing** records a timestamped event log for every node during execution. -- **Profiling** analyses the trace to find the critical path, detect bottlenecks, - and measure parallelism efficiency. - -This guide shows you how to enable tracing, explore traces, export to Chrome's -trace viewer, and use the profiling API to optimise your pipelines. - -## Enabling tracing - -Pass `enable_tracing=True` to any executor: - -```python -import dagron - -dag = ( - dagron.DAG.builder() - .add_nodes(["extract", "transform_a", "transform_b", "merge", "load"]) - .add_edges([ - ("extract", "transform_a"), - ("extract", "transform_b"), - ("transform_a", "merge"), - ("transform_b", "merge"), - ("merge", "load"), - ]) - .build() -) - -import time - -tasks = { - "extract": lambda: time.sleep(0.1) or "data", - "transform_a": lambda: time.sleep(0.3) or "a_done", - "transform_b": lambda: time.sleep(0.2) or "b_done", - "merge": lambda: time.sleep(0.05) or "merged", - "load": lambda: time.sleep(0.1) or "loaded", -} - -result = dagron.DAGExecutor( - dag, - max_workers=4, - enable_tracing=True, -).execute(tasks) -``` - - transform_a --> merge - extract --> transform_b --> merge - merge --> load`} - caption="Pipeline with parallel transform branches — we will trace this execution." -/> - -## ExecutionTrace - -When tracing is enabled, `result.trace` contains an -[`ExecutionTrace`](/api/observability/tracing) object: - -```python -trace = result.trace - -# Quick summary -print(trace.summary()) -``` - -Output: - -``` -Execution Trace Summary -======================= -Total duration: 0.552s -Nodes executed: 5 - COMPLETED: 5 - -Timeline: - extract [0.000s - 0.102s] (0.102s) COMPLETED - transform_a [0.102s - 0.401s] (0.299s) COMPLETED - transform_b [0.102s - 0.305s] (0.203s) COMPLETED - merge [0.401s - 0.452s] (0.051s) COMPLETED - load [0.452s - 0.552s] (0.100s) COMPLETED -``` - -### Trace events - -Each node's lifecycle is recorded as a series of -[`TraceEvent`](/api/observability/tracing) objects: - -```python -for event in trace.events: - print(f"{event.timestamp:.3f}s {event.node_name:20s} {event.event_type}") -``` - -Output: - -``` -0.000s extract STARTED -0.102s extract COMPLETED -0.102s transform_a STARTED -0.102s transform_b STARTED -0.305s transform_b COMPLETED -0.401s transform_a COMPLETED -0.401s merge STARTED -0.452s merge COMPLETED -0.452s load STARTED -0.552s load COMPLETED -``` - -### Events for a specific node - -```python -events = trace.events_for_node("transform_a") -for e in events: - print(f"{e.event_type}: {e.timestamp:.3f}s") -# STARTED: 0.102s -# COMPLETED: 0.401s -``` - -## Chrome trace format - -dagron can export traces in Chrome's -[Trace Event Format](https://docs.google.com/document/d/1CvAClvFfyA5R-PhYUmn5OOQtYMH4h6I0nSsKchNAySU/), -which you can visualize in `chrome://tracing` or [Perfetto](https://ui.perfetto.dev/). - -### Exporting - -```python -chrome_json = trace.to_chrome_trace() - -with open("trace.json", "w") as f: - f.write(chrome_json) -``` - -### Viewing - -1. Open Chrome and navigate to `chrome://tracing` -2. Click **Load** and select `trace.json` -3. You will see a timeline with each node as a horizontal bar - -Alternatively, open [Perfetto UI](https://ui.perfetto.dev/) and drag the file -onto the page. - -The Chrome trace view shows: -- **Parallel lanes** for concurrent tasks -- **Gap analysis** between sequential tasks -- **Duration bars** proportional to wall-clock time -- **Zoom and pan** for exploring long traces - -### JSON trace format - -```python -json_str = trace.to_json() -``` - -This exports the raw trace data as JSON (dagron's own format, not Chrome's). -Useful for custom analysis or storage. - -## Profiling with profile_execution() - -The [`profile_execution()`](/api/observability/tracing) function takes a DAG and an -execution result, and produces a [`ProfileReport`](/api/observability/tracing) with -actionable insights: - -```python -from dagron import profile_execution - -report = profile_execution(dag, result) -``` - -### Critical path analysis - -The critical path is the sequence of nodes that determined the total wall-clock -time: - -```python -print("Critical path:") -for node in report.critical_path: - print(f" {node}") -# extract -# transform_a <-- bottleneck (longest task) -# merge -# load -``` - - transform_a:::critical --> merge:::critical - extract --> transform_b --> merge - merge --> load:::critical`} - caption="Critical path highlighted in red. transform_a is the bottleneck." -/> - -The critical path tells you exactly which nodes to optimise for maximum -speedup. - -### Bottleneck detection - -```python -print("Bottlenecks:") -for b in report.bottlenecks: - print(f" {b.node}: {b.duration_seconds:.3f}s ({b.percentage:.1f}% of total)") -# transform_a: 0.299s (54.2% of total) -# transform_b: 0.203s (36.8% of total) -``` - -Bottlenecks are nodes that consume a disproportionate share of the total -execution time. - -### Parallelism efficiency - -```python -print(f"Parallelism efficiency: {report.parallelism_efficiency:.1%}") -# Parallelism efficiency: 85.3% -``` - -Parallelism efficiency is the ratio of the sequential sum of all task durations -to the wall-clock time multiplied by the number of workers. A value of 100% -means all workers were busy the entire time. Low values indicate scheduling -gaps or sequential bottlenecks. - -```python -# Detailed breakdown -print(f"Sequential sum: {report.sequential_sum:.3f}s") -print(f"Wall-clock: {report.wall_clock:.3f}s") -print(f"Speedup: {report.sequential_sum / report.wall_clock:.2f}x") -``` - -## Complete profiling workflow - -Here is a full example that builds, executes, traces, and profiles a pipeline: - -```python -import dagron -from dagron import profile_execution -import time - -# 1. Build the DAG -dag = ( - dagron.DAG.builder() - .add_nodes(["fetch", "validate", "feature_a", "feature_b", "feature_c", - "train", "evaluate", "deploy"]) - .add_edges([ - ("fetch", "validate"), - ("validate", "feature_a"), - ("validate", "feature_b"), - ("validate", "feature_c"), - ("feature_a", "train"), - ("feature_b", "train"), - ("feature_c", "train"), - ("train", "evaluate"), - ("evaluate", "deploy"), - ]) - .build() -) - -# 2. Define tasks with realistic durations -tasks = { - "fetch": lambda: time.sleep(0.5) or "fetched", - "validate": lambda: time.sleep(0.1) or "valid", - "feature_a": lambda: time.sleep(1.0) or "features_a", - "feature_b": lambda: time.sleep(0.8) or "features_b", - "feature_c": lambda: time.sleep(0.3) or "features_c", - "train": lambda: time.sleep(2.0) or "model", - "evaluate": lambda: time.sleep(0.5) or "metrics", - "deploy": lambda: time.sleep(0.2) or "deployed", -} - -# 3. Execute with tracing -result = dagron.DAGExecutor( - dag, - max_workers=4, - enable_tracing=True, -).execute(tasks) - -# 4. Profile -report = profile_execution(dag, result) - -# 5. Print report -print("=" * 60) -print("PROFILING REPORT") -print("=" * 60) -print(f"Total wall-clock time: {result.total_duration_seconds:.3f}s") -print(f"Parallelism efficiency: {report.parallelism_efficiency:.1%}") -print() - -print("Critical path:") -for node in report.critical_path: - nr = result.node_results[node] - print(f" {node:20s} {nr.duration_seconds:.3f}s") -print() - -print("Top bottlenecks:") -for b in report.bottlenecks[:3]: - print(f" {b.node:20s} {b.duration_seconds:.3f}s ({b.percentage:.1f}%)") -print() - -# 6. Export trace for Chrome -with open("pipeline_trace.json", "w") as f: - f.write(result.trace.to_chrome_trace()) -print("Trace exported to pipeline_trace.json") -``` - - validate - validate --> feature_a --> train - validate --> feature_b --> train - validate --> feature_c --> train - train --> evaluate --> deploy`} - caption="ML pipeline with three parallel feature extraction branches." -/> - -## Tracing with other executors - -Tracing works with all executor types: - -### AsyncDAGExecutor - -```python -import asyncio - -async def main(): - executor = dagron.AsyncDAGExecutor(dag, enable_tracing=True) - result = await executor.execute(tasks) - print(result.trace.summary()) - -asyncio.run(main()) -``` - -### IncrementalExecutor - -```python -executor = dagron.IncrementalExecutor(dag, enable_tracing=True) -result = executor.execute(tasks, changed_nodes=["fetch"]) - -# Trace shows which nodes were recomputed vs reused -print(result.trace.summary()) -``` - -### CheckpointExecutor - -```python -executor = dagron.CheckpointExecutor(dag, checkpoint_dir="/tmp/checkpoints") -# Note: CheckpointExecutor uses tracing internally for resume support -result = executor.execute(tasks) -``` - -## Interpreting traces - -### Identifying scheduling gaps - -Look for periods where no node is running. These indicate: -- **Sequential bottlenecks** — a node with high in-degree that must wait for - all predecessors. -- **Under-utilisation** — `max_workers` is too low, or the graph is too - sequential. - -### Identifying stragglers - -If one branch takes much longer than its siblings, the join node waits -for the straggler. In the example above, `feature_a` (1.0s) is a straggler -compared to `feature_c` (0.3s). - -### Measuring overhead - -Compare the sequential sum to the parallel execution time: - -```python -sequential = sum(nr.duration_seconds for nr in result.node_results.values()) -parallel = result.total_duration_seconds -overhead = parallel - (sequential / 4) # with 4 workers - -print(f"Sequential sum: {sequential:.3f}s") -print(f"Parallel time: {parallel:.3f}s") -print(f"Overhead: {overhead:.3f}s") -``` - -## Tracing in production - -For production pipelines, consider these patterns: - -### Conditional tracing - -```python -import os - -enable = os.environ.get("DAGRON_TRACING", "false").lower() == "true" -result = dagron.DAGExecutor(dag, enable_tracing=enable).execute(tasks) -``` - -### Trace sampling - -```python -import random - -# Trace 10% of executions -enable = random.random() < 0.1 -result = dagron.DAGExecutor(dag, enable_tracing=enable).execute(tasks) - -if result.trace: - with open(f"trace_{int(time.time())}.json", "w") as f: - f.write(result.trace.to_chrome_trace()) -``` - -### Monitoring integration - -```python -from dagron import profile_execution - -result = dagron.DAGExecutor(dag, enable_tracing=True).execute(tasks) -report = profile_execution(dag, result) - -# Send metrics to your monitoring system -metrics = { - "pipeline.duration": result.total_duration_seconds, - "pipeline.parallelism_efficiency": report.parallelism_efficiency, - "pipeline.critical_path_length": len(report.critical_path), - "pipeline.nodes_succeeded": result.succeeded, - "pipeline.nodes_failed": result.failed, -} -# send_to_datadog(metrics) # or Prometheus, Grafana, etc. -``` - -## API reference - -| Class / Function | Docs | -|------------------|------| -| `ExecutionTrace` | [Tracing](/api/observability/tracing) | -| `TraceEvent` | [Tracing](/api/observability/tracing) | -| `profile_execution()` | [Tracing](/api/observability/tracing) | -| `ProfileReport` | [Tracing](/api/observability/tracing) | - -## Next steps - -- [Inspecting Graphs](/guide/core-concepts/inspecting-graphs) — pre-execution analysis and critical path estimation. -- [Incremental Execution](/guide/execution-strategies/incremental) — combine tracing with incremental runs. -- [Checkpointing](/guide/execution-strategies/checkpointing) — persist progress and resume after failures. diff --git a/docs/pages/guide/observability/visualization.mdx b/docs/pages/guide/observability/visualization.mdx deleted file mode 100644 index 1618f28..0000000 --- a/docs/pages/guide/observability/visualization.mdx +++ /dev/null @@ -1,444 +0,0 @@ ---- -sidebar_position: 21 -title: Visualization -description: Render DAGs as ASCII art, SVG, Graphviz DOT, Mermaid diagrams, and live web dashboards. ---- - -import DagDiagram from '@site/src/components/DagDiagram'; -import StatusBadge from '@site/src/components/StatusBadge'; - -# Visualization - -dagron provides multiple ways to visualize your DAGs, from quick ASCII previews in the terminal to rich SVG renderings in Jupyter notebooks and live web dashboards for production monitoring. - -| Method | Output | Best for | -|---|---|---| -| `pretty_print()` | ASCII text | Terminal, logs, CI output | -| `_repr_svg_()` | SVG | Jupyter notebooks | -| `dag.to_dot()` | Graphviz DOT | External tools, custom rendering | -| `dag.to_mermaid()` | Mermaid syntax | Documentation, Markdown | -| `DashboardPlugin` | Live web UI | Production monitoring, gate approval | - ---- - -## ASCII Pretty Print - -The `pretty_print()` function renders a DAG as an ASCII diagram directly in the terminal: - -```python -import dagron -from dagron.display import pretty_print - -dag = ( - dagron.DAG.builder() - .add_node("extract") - .add_node("transform") - .add_node("validate") - .add_node("load") - .add_edge("extract", "transform") - .add_edge("extract", "validate") - .add_edge("transform", "load") - .add_edge("validate", "load") - .build() -) - -print(pretty_print(dag)) -``` - -Output: - -``` - [ extract ] - +---------------+ - [ transform ] [ validate ] - +---------------+ - [ load ] -``` - -### Layout Options - -Choose between vertical (top-to-bottom) and horizontal (left-to-right) layouts: - -```python -# Vertical (default) -print(pretty_print(dag, layout="vertical")) - -# Horizontal -print(pretty_print(dag, layout="horizontal")) -``` - -Horizontal output: - -``` -[ extract ]-->[ transform ]-->[ load ] - [ validate ]---> -``` - -### Show Payloads - -Include node payloads in the ASCII output: - -```python -dag = ( - dagron.DAG.builder() - .add_node("extract", payload="csv") - .add_node("transform", payload="pandas") - .add_node("load", payload="postgres") - .add_edge("extract", "transform") - .add_edge("transform", "load") - .build() -) - -print(pretty_print(dag, show_payloads=True)) -``` - -Output: - -``` - [ extract=csv ] - | - [ transform=pandas ] - | - [ load=postgres ] -``` - -### Custom Formatters - -Supply a custom formatter to control node labels: - -```python -def status_formatter(name, payload): - status = payload or "pending" - return f"{name} ({status})" - -print(pretty_print(dag, node_formatter=status_formatter)) -``` - -### Max Nodes Guard - -For large graphs, `pretty_print` raises `ValueError` to prevent terminal floods: - -```python -try: - print(pretty_print(huge_dag)) -except ValueError as e: - print(e) - # "Graph has 500 nodes, exceeding max_nodes=50. Increase max_nodes to render." - -# Override the limit -print(pretty_print(huge_dag, max_nodes=500)) -``` - ---- - -## Jupyter SVG Rendering - -In Jupyter notebooks, dagron DAGs render as SVG automatically. The `_repr_svg_()` function is called by Jupyter's display system: - -```python -# In a Jupyter notebook cell: -dag # displays as an SVG graph -``` - -The rendering strategy has multiple fallbacks: - -1. **graphviz Python package** -- if installed, produces high-quality SVG via `Source(dot).pipe()`. -2. **dot CLI** -- if the `graphviz` system package is installed, calls `dot -Tsvg`. -3. **ASCII fallback** -- wraps the ASCII pretty-print output in an SVG `` element. - -### Installing Graphviz - -For the best Jupyter experience, install graphviz: - -```bash -# Python package -pip install graphviz - -# System package (needed by the Python package) -# Ubuntu/Debian: -sudo apt install graphviz -# macOS: -brew install graphviz -``` - -### Direct SVG Generation - -You can also call `_repr_svg_()` directly: - -```python -from dagron.display import _repr_svg_ - -svg_string = _repr_svg_(dag) - -# Save to file -with open("pipeline.svg", "w") as f: - f.write(svg_string) -``` - -### Large Graph Handling - -For graphs exceeding `max_nodes` (default 100), a summary SVG is returned instead: - -``` -DAG(nodes=500, edges=1200) -- too large to render -``` - ---- - -## Graphviz DOT Export - -Export the DAG as a Graphviz DOT string for use with external tools: - -```python -dot_string = dag.to_dot() -print(dot_string) -``` - -Output: - -```dot -digraph { - rankdir=TB; - node [shape=box, style=rounded]; - "extract" -> "transform"; - "extract" -> "validate"; - "transform" -> "load"; - "validate" -> "load"; -} -``` - -### Rendering with Graphviz - -```python -import graphviz - -src = graphviz.Source(dag.to_dot()) -src.render("pipeline", format="png", cleanup=True) -# Creates pipeline.png -``` - -### Command-Line Rendering - -```bash -python -c "import dagron; print(dagron.DAG.builder()...build().to_dot())" | dot -Tpng > pipeline.png -``` - ---- - -## Mermaid Export - -Export as Mermaid syntax for embedding in Markdown documentation: - -```python -mermaid_string = dag.to_mermaid() -print(mermaid_string) -``` - -Output: - -``` -graph TD - extract --> transform - extract --> validate - transform --> load - validate --> load -``` - -### Embedding in Markdown - -````markdown -```mermaid -graph TD - extract --> transform - extract --> validate - transform --> load - validate --> load -``` -```` - -### Using with DagDiagram Component - -In dagron's documentation site, use the `DagDiagram` component for interactive rendering: - -```jsx - -``` - -The `DagDiagram` component renders an interactive Mermaid diagram of your pipeline. - ---- - -## Live Web Dashboard - -For production monitoring, the `DashboardPlugin` serves a real-time web UI: - -```python -from dagron.dashboard import DashboardPlugin -from dagron.plugins.hooks import HookRegistry -from dagron.plugins.manager import PluginManager - -# Set up the dashboard -dashboard = DashboardPlugin( - host="127.0.0.1", - port=8765, - open_browser=True, -) - -hooks = HookRegistry() -manager = PluginManager(hooks) -manager.register(dashboard) -manager.initialize_all() -# prints: "Dashboard: http://127.0.0.1:8765" - -# Execute with hooks -executor = dagron.DAGExecutor(dag, hooks=hooks) -result = executor.execute(tasks) - -manager.teardown_all() -``` - -### Dashboard Features - -The dashboard provides: - -- **Live graph visualization** -- nodes change color as they transition through states: - - Pending - - Running - - Completed - - Failed - - Skipped - -- **Execution timeline** -- see which nodes are running at each point in time. - -- **Gate management** -- if a `GateController` is provided, approve/reject buttons appear for waiting gates. - -- **Execution summary** -- after completion, shows total duration and per-status counts. - -### Dashboard with Gates - -```python -from dagron.execution.gates import ApprovalGate, GateController - -controller = GateController({ - "review": ApprovalGate(timeout=600), - "deploy": ApprovalGate(timeout=300), -}) - -dashboard = DashboardPlugin( - port=8765, - gate_controller=controller, -) -``` - -When a gate enters `WAITING` state, the dashboard shows clickable **Approve** and **Reject** buttons. - -### Technical Details - -The dashboard web server is implemented in Rust using axum and tokio. It runs on a background OS thread and communicates with the Python hooks via thread-safe callbacks. The Rust implementation ensures low overhead even during high-frequency hook events. - -:::note -The dashboard requires dagron to be built with the `dashboard` Cargo feature. If it is not available, importing `DashboardPlugin` raises an `ImportError` with build instructions. -::: - ---- - -## Combining Visualization Methods - -### Export All Formats - -```python -import dagron -from dagron.display import pretty_print, _repr_svg_ - -dag = ( - dagron.DAG.builder() - .add_node("A").add_node("B").add_node("C") - .add_edge("A", "B").add_edge("A", "C") - .build() -) - -# ASCII -ascii_art = pretty_print(dag) -print(ascii_art) - -# DOT -dot = dag.to_dot() -with open("graph.dot", "w") as f: - f.write(dot) - -# Mermaid -mermaid = dag.to_mermaid() -with open("graph.mmd", "w") as f: - f.write(mermaid) - -# SVG -svg = _repr_svg_(dag) -with open("graph.svg", "w") as f: - f.write(svg) -``` - -### Visualization in CI Logs - -Use `pretty_print()` to include a graph visualization in your CI output: - -```python -import dagron -from dagron.display import pretty_print - -def print_pipeline_summary(dag, result): - """Print a visual summary at the end of a CI run.""" - print("\n--- Pipeline Graph ---") - print(pretty_print(dag, layout="horizontal")) - print(f"\n--- Results ---") - print(f" Succeeded: {result.succeeded}") - print(f" Failed: {result.failed}") - print(f" Skipped: {result.skipped}") - print(f" Duration: {result.total_duration_seconds:.1f}s") -``` - -### Visualization in Documentation - -Generate Mermaid diagrams for your project documentation: - -````python -# Generate documentation diagrams -mermaid = dag.to_mermaid() - -doc = f""" -# Pipeline Architecture - -```mermaid -{mermaid} -``` - -This pipeline has {dag.node_count()} nodes and {dag.edge_count()} edges. -""" -```` - ---- - -## Best Practices - -1. **Use `pretty_print()` for quick debugging.** It requires no external dependencies and works in any terminal. - -2. **Install `graphviz` for Jupyter.** The SVG rendering is significantly better with the graphviz package. - -3. **Use `to_mermaid()` for documentation.** Mermaid renders natively in GitHub, GitLab, and most documentation sites. - -4. **Use `DashboardPlugin` for production.** The live dashboard gives operators real-time visibility and gate control. - -5. **Set `max_nodes` appropriately.** For large graphs, increase `max_nodes` or use `to_dot()` with Graphviz's layout engines, which handle hundreds of nodes well. - -6. **Export DOT for complex layouts.** When Mermaid's layout is not sufficient, use `to_dot()` and render with Graphviz's `neato`, `fdp`, or `sfdp` engines. - ---- - -## Related - -- [API Reference: Display](/api/utilities/display) -- full API documentation for visualization functions. -- [Plugins & Hooks](/guide/advanced/plugins-hooks) -- the plugin system that powers the DashboardPlugin. -- [Approval Gates](/guide/execution-strategies/approval-gates) -- gate approval via the dashboard UI. -- [Inspecting Graphs](/guide/core-concepts/inspecting-graphs) -- programmatic graph analysis. diff --git a/docs/pages/guide/typed-and-reactive.mdx b/docs/pages/guide/typed-and-reactive.mdx deleted file mode 100644 index 6ac9a10..0000000 --- a/docs/pages/guide/typed-and-reactive.mdx +++ /dev/null @@ -1,298 +0,0 @@ ---- -sidebar_position: 7 -title: Typed Handles & Reactive Engine -description: NodeRef typed handles, @dagron.flow compose API, generic FlowFuture / NodeResult, effect tags, the reactive Signal/Computed/Watcher engine, the cross-process content cache, and time-travel replay. ---- - -# Typed Handles & Reactive Engine - -dagron ships seven coordinated additions that move beyond stringly-typed -node addressing and add four headline differentiators no other Python DAG -library combines: typed handles, a Tawazi-style flow API, fine-grained -reactive recomputation, content-addressed cross-process caching, and -time-travel replay. Existing string-based code keeps working — every new -feature is opt-in. - -## 1. `NodeRef` — typed node handles - -`dag.add_node()` returns a stable `NodeRef`. Every public method that -takes a node identifier accepts both `str` and `NodeRef`, so existing -code keeps working. - -```python -from dagron import DAG, NodeRef - -dag = DAG() -extract = dag.add_node("extract") # NodeRef -transform = dag.add_node("transform") # NodeRef - -dag.add_edge(extract, transform) # NodeRef → NodeRef -dag.add_edge("extract", transform) # str → NodeRef -dag.add_edge(extract, "transform") # NodeRef → str - -isinstance(extract, NodeRef) # True -extract.name # "extract" -extract.epoch # 0 -``` - -NodeRefs survive unrelated mutations (adding other nodes / edges) and -detect *remove-then-readd*: removing `"extract"` and re-adding a node -with the same name produces a NodeRef with a different epoch, so the old -reference correctly raises `StaleNodeRefError`. - -```python -import pytest -from dagron import StaleNodeRefError - -dag.remove_node(extract) -new_extract = dag.add_node("extract") # fresh epoch -with pytest.raises(StaleNodeRefError): - dag.has_edge(extract, transform) # the old extract is stale -``` - -## 2. `@dagron.flow` — Pythonic compose API - -Build a DAG by writing a regular Python function. Each `@task` call -inside a `@flow` body records a node; passing one task's return value to -another wires the edge. No string IDs, no fluent builder — just Python. - -```python -import dagron - -@dagron.task -def fetch() -> list[int]: - return [1, 2, 3, 4] - -@dagron.task -def total(rows: list[int]) -> int: - return sum(rows) - -@dagron.task -def label(value: int) -> str: - return f"Total = {value}" - -@dagron.flow -def pipeline(): - return label(total(fetch())) - -dag = pipeline.dag() # the underlying DAG, for analysis -result = pipeline() # builds + runs → ExecutionResult -result["label"].result # "Total = 10" -``` - -The same `@task` decorator is compatible with the legacy parameter-name -inference of `Pipeline`, so a single set of tasks can power both APIs. -Inside a `@flow` context, calling `transform(raw)` returns a -`FlowFuture[T]` placeholder; outside one, it executes normally. - -## 3. Generic typing & `dagron.stubgen` - -`FlowFuture[T]` and `NodeResult[T]` carry the wrapped task's return type -all the way through: - -```python -from dagron import FlowFuture -from dagron.execution._types import NodeResult - -@dagron.task -def fetch() -> list[int]: ... - -@dagron.task -def total(rows: list[int]) -> int: ... - -@dagron.flow -def pipeline(): - raw = fetch() # type-checks as list[int] - return total(raw) # type-checks as int - -result = pipeline() -result[fetch].result # NodeResult[list[int]] → list[int] -result[total].result # NodeResult[int] → int -``` - -For string-keyed lookups, generate a stub: - -```python -from dagron.stubgen import generate_stub - -stub = generate_stub( - pipeline.dag(), - tasks={"fetch": fetch, "total": total}, - name="PipelineResult", -) -print(stub) -# class PipelineResult: -# @overload -# def __getitem__(self, key: Literal['fetch']) -> NodeResult[list[int]]: ... -# @overload -# def __getitem__(self, key: Literal['total']) -> NodeResult[int]: ... -``` - -Save the output as a `.pyi` file alongside your code; `mypy` will type -`result["fetch"]` as `NodeResult[list[int]]` even though `result` itself -is just `ExecutionResult`. - -## 4. Effect tags - -Tag every `@task` with its side-effect class — the engine uses these -for parallelism gating today and for cache / replay semantics in the -features below. - -```python -from dagron import Effect - -@dagron.task # defaults to Effect.PURE -def add(a: int, b: int) -> int: return a + b - -@dagron.task(effect=Effect.NETWORK) -def fetch_user(uid: int) -> dict: ... - -@dagron.task(effect=Effect.NONDETERMINISTIC) -def now() -> float: - import time; return time.time() -``` - -Properties: - -| Effect | `is_cacheable` | `is_deterministic` | `is_isolated` | -|--------|-----:|-----:|-----:| -| `PURE` | ✅ | ✅ | ❌ | -| `READ` | ✅ | ✅ | ❌ | -| `WRITE` | ❌ | ❌ | ❌ | -| `NETWORK` | ❌ | ❌ | ❌ | -| `NONDETERMINISTIC` | ❌ | ❌ | ✅ | - -`@flow` mirrors each task's effect onto its DAG node's metadata; read -back with `dagron.effects_of(dag)`. An AST-scan heuristic emits a -`UserWarning` when a `PURE` task appears to call impure functions -(`time.time`, `random.*`, `os.*`, `requests.*`, …). - -`DAGExecutor(enforce_effect_isolation=True)` serializes -`NONDETERMINISTIC` tasks while letting other effects parallelize freely. - -## 5. Reactive engine — `Signal` / `Computed` / `Watcher` - -`dagron.reactive` provides Solid.js / Jane-Street-`Incremental` style -primitives where the dependency graph is *implicit*: building a -`Computed` records its read dependencies as a side-effect of evaluating -the function. - -```python -import dagron.reactive as dr - -a = dr.signal(1) -b = dr.signal(2) -s = dr.computed(lambda: a() + b()) -p = dr.computed(lambda: s() * 10) - -p() # 30 — initial compute -a.set(5) # invalidates s and p; b untouched -p() # 70 — recomputes only s and p - -@dr.watch -def watch_p(): - print("p =", p()) - -with dr.batch(): # glitch-free - a.set(0) - b.set(0) -# watch_p fires exactly once after the batch, sees p == 0 -``` - -**Headline benchmark**: in a graph of 10,000 derived nodes off one root -signal, mutating the root and reading just one branch takes ~10 µs — -the engine recomputes only the read path, skipping the other 9999 -invalidated-but-unread branches. This is the differentiator no other -Python DAG library delivers. - -This module is distinct from the existing -`dagron.execution.reactive.ReactiveDAG`, which wraps a *pre-built* -`dagron.DAG` and exposes a push-based `subscribe()` / `set_input()` API. -Use whichever fits your shape: the reactive primitives for fresh -dependency graphs you build in code; `ReactiveDAG` to layer reactivity -over a DAG you already have. - -## 6. Cross-process content-addressed cache - -`dagron.contentcache` is Nix-flake-style: the cache is keyed by content -hash, the filesystem path *is* the index, and there's no -`index.json` to keep in sync. Independent processes share intermediates -transparently — a build on one CI worker hits the cache on another the -moment they compute the same fingerprint. - -```python -from dagron import Effect -from dagron.contentcache import ContentCache - -cache = ContentCache() # ~/.cache/dagron/cas - -def expensive(x: int) -> int: - return x * 1000 - -# First call: miss, computes, writes payload to CAS. -val, hit = cache.compute_or_cached(expensive, args=(42,), effect=Effect.PURE) -# In another process / another day: -val, hit = cache.compute_or_cached(expensive, args=(42,), effect=Effect.PURE) -# `hit` is True; the payload deserialized straight from disk. -``` - -Effect-aware: `WRITE` / `NETWORK` / `NONDETERMINISTIC` tasks bypass the -cache entirely (their results aren't reproducible). Pluggable via the -`Hasher` protocol — `default_hash` (pickle + blake2b) handles most -Python types; `numpy_hash` uses `array.tobytes()` for byte equality; -write your own for polars frames or any tobyte-friendly type. Honors -`$DAGRON_CACHE_DIR`. - -## 7. Time-travel replay - -`dagron.trace` writes an append-only JSONL log of node executions; each -record references a payload stored by fingerprint in the -`ContentCache`, so identical values across runs deduplicate -automatically. `replay(at=t)` walks the log up to time `t` and -reconstructs the per-node state. - -```python -from dagron.contentcache import ContentCache -from dagron.trace import TraceWriter, replay - -cas = ContentCache() -log_path = "run-2026-05.jsonl" - -with TraceWriter(log_path, cas=cas) as w: - w.record("fetch", value=[1, 2, 3], effect=Effect.PURE, timestamp=t0) - w.record("transform", value=6, effect=Effect.PURE, timestamp=t0 + 1) - w.record("publish", value="ok", effect=Effect.NETWORK, timestamp=t0 + 2) - -# Days later, in another process: -state = replay(log_path, at=t0 + 1.5, cas=cas) -state["fetch"].value # [1, 2, 3] — byte-identical to the original run -state["transform"].value # 6 -"publish" in state # False — cutoff was before publish ran - -state = replay(log_path, cas=cas) -state["publish"].value # "ok" — surfaced from the log -state["publish"].replayable # False — NETWORK is non-deterministic -``` - -Pure / READ nodes replay byte-identically. Impure nodes -(`WRITE`/`NETWORK`/`NONDETERMINISTIC`) are flagged `replayable=False` -but their *logged* values are still exposed, so you can audit what the -run actually produced. Honors `$DAGRON_TRACE_DIR`. - -## How they fit together - -The seven additions are designed to compose: - -* **NodeRef** is the substrate — every later API references nodes by - the typed handle. -* **`@flow`** records call structure into a `dagron.DAG`, mirroring each - task's **effect** onto node metadata. -* **`stubgen`** turns the `@flow`-built DAG into a typed lookup stub. -* **Effects** drive parallelism isolation, cache opt-in, and replay - reproducibility flags — one tag, three downstream behaviours. -* **Reactive** is the "live" face of computation; **content cache** is - its persistent face; **replay** is its retrospective face. - -You can adopt any subset independently. The string-based DAG API, -`Pipeline`, and the existing `ReactiveDAG` / `ContentAddressableCache` -classes remain unchanged. diff --git a/docs/pages/guide/why-dagron.mdx b/docs/pages/guide/why-dagron.mdx deleted file mode 100644 index 91e8189..0000000 --- a/docs/pages/guide/why-dagron.mdx +++ /dev/null @@ -1,53 +0,0 @@ ---- -sidebar_position: 2 -title: Why dagron? -description: How dagron compares to NetworkX, graphlib, Dask, Airflow, and Prefect — and when to use it. ---- - -# Why dagron? - -**dagron is an embeddable DAG library, not a deployment framework.** It gives your application a Rust-fast graph engine with Python ergonomics — no scheduler daemon, no YAML configs, no cloud console. - -You import dagron, build a graph, and execute it. Your process, your rules. - ---- - -## Comparison - -| | dagron | NetworkX | graphlib | Dask | Airflow | Prefect | -|---|---|---|---|---|---|---| -| **Type** | Library | Library | Stdlib | Framework | Orchestrator | Orchestrator | -| **Language** | Rust + Python | Python | Python | Python | Python | Python | -| **DAG execution** | Built-in (thread/async/distributed) | No | No | Yes | Yes | Yes | -| **Incremental recomputation** | Yes | No | No | No | No | No | -| **Checkpointing** | Yes | No | No | Partial | Yes | Yes | -| **Dynamic DAG expansion** | Yes (runtime) | N/A | N/A | Limited | Yes (2.x) | Yes | -| **Approval gates** | Yes | No | No | No | Plugin | Plugin | -| **Resource-aware scheduling** | Yes (CPU/GPU/memory slots) | No | No | Yes (workers) | Yes (pools) | Yes (work pools) | -| **Distributed execution** | Ray, Celery backends | No | No | Yes (native) | Yes (Celery) | Yes (native) | -| **Overhead** | ~0 (library import) | ~0 | ~0 | Scheduler process | Web server + DB + scheduler | API server + DB | -| **Performance** | Rust core, 3-12x vs NetworkX | Pure Python | Pure Python (minimal) | Python + C extensions | Python | Python | - ---- - -## When to use dagron - -dagron is a great fit when you need a **task graph inside your own process**: - -- **Build systems** — model file targets as nodes, skip unchanged targets with incremental execution -- **Spreadsheet engines** — cells as nodes, formula dependencies as edges, recalculate only dirty cells -- **CI/CD schedulers** — lint/test/build/deploy with resource constraints and approval gates -- **ETL pipelines** — multi-stage data pipelines with checkpointing and crash recovery -- **Game asset pipelines** — texture/model/shader compilation with dependency tracking -- **Reactive UIs** — propagate state changes through a dependency graph - -The common thread: you want the graph engine **embedded in your application**, not running as a separate service. - ---- - -## When NOT to use dagron - -- **Managed cloud orchestration** — If you want a web UI, user management, scheduled triggers, and a managed service, use [Airflow](https://airflow.apache.org/) or [Prefect](https://www.prefect.io/). -- **General graph database** — If you need property graphs, Cypher queries, or persistent graph storage, use [Neo4j](https://neo4j.com/). -- **Undirected / cyclic graphs** — dagron enforces acyclicity. For general graph algorithms on undirected graphs, use [NetworkX](https://networkx.org/). -- **Distributed-first data processing** — If your primary need is data parallelism across a cluster, use [Dask](https://www.dask.org/) or [Spark](https://spark.apache.org/). diff --git a/docs/pages/intro.mdx b/docs/pages/intro.mdx deleted file mode 100644 index f1afeb2..0000000 --- a/docs/pages/intro.mdx +++ /dev/null @@ -1,188 +0,0 @@ ---- -sidebar_position: 1 -title: Introduction -description: dagron — a high-performance DAG execution engine for Python, powered by Rust. -slug: / ---- - -import FeatureCard from '@site/src/components/FeatureCard'; - -# dagron - -**High-performance DAG execution engine for Python, powered by Rust.** - -**Up to 12x faster than NetworkX** on 10K-node DAG validation, with sub-microsecond reachability queries after index build. [See benchmarks](/guide/benchmarks). - -**For engineers embedding task graphs in applications** — build systems, data pipelines, spreadsheet engines, CI/CD schedulers. [Why dagron?](/guide/why-dagron) - -dagron lets you define directed acyclic graphs of tasks and execute them with maximum parallelism, rich observability, and dozens of execution strategies — from simple thread pools to distributed clusters. - -The core graph data structure lives in Rust (via PyO3) for zero-copy speed, while the Python layer provides an ergonomic API for building, executing, analyzing, and visualizing your DAGs. - -```python -import dagron - -dag = ( - dagron.DAG.builder() - .add_node("extract") - .add_node("transform") - .add_node("load") - .add_edge("extract", "transform") - .add_edge("transform", "load") - .build() -) - -result = dagron.DAGExecutor(dag).execute({ - "extract": lambda: fetch_data(), - "transform": lambda: clean(result), - "load": lambda: write_to_db(result), -}) -``` - -## Features - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- -## Getting Started - -Install dagron from PyPI: - -```bash -pip install dagron -``` - -**Requirements:** Python 3.12+ · Linux (x86_64, aarch64) · macOS (x86_64, Apple Silicon) · Windows (x86_64) - -Then head to the [Getting Started](/guide/getting-started) guide. - ---- - -**[Benchmarks](/guide/benchmarks)** | **[Why dagron?](/guide/why-dagron)** | **[Cookbook](/guide/cookbook)** | **[Architecture](/guide/architecture)** | **[Changelog](https://github.com/pratyush618/dagron/blob/master/CHANGELOG.md)** diff --git a/docs-next/pnpm-lock.yaml b/docs/pnpm-lock.yaml similarity index 100% rename from docs-next/pnpm-lock.yaml rename to docs/pnpm-lock.yaml diff --git a/docs-next/postcss.config.mjs b/docs/postcss.config.mjs similarity index 100% rename from docs-next/postcss.config.mjs rename to docs/postcss.config.mjs diff --git a/docs-next/public/.nojekyll b/docs/public/.nojekyll similarity index 100% rename from docs-next/public/.nojekyll rename to docs/public/.nojekyll diff --git a/docs-next/public/img/favicon.ico b/docs/public/img/favicon.ico similarity index 100% rename from docs-next/public/img/favicon.ico rename to docs/public/img/favicon.ico diff --git a/docs-next/public/img/logo.svg b/docs/public/img/logo.svg similarity index 100% rename from docs-next/public/img/logo.svg rename to docs/public/img/logo.svg diff --git a/docs-next/scripts/migrate.mjs b/docs/scripts/migrate.mjs similarity index 100% rename from docs-next/scripts/migrate.mjs rename to docs/scripts/migrate.mjs diff --git a/docs/sidebars.ts b/docs/sidebars.ts deleted file mode 100644 index 9905bc4..0000000 --- a/docs/sidebars.ts +++ /dev/null @@ -1,136 +0,0 @@ -import type { SidebarsConfig } from '@docusaurus/plugin-content-docs'; - -const sidebars: SidebarsConfig = { - guideSidebar: [ - 'intro', - 'guide/why-dagron', - { - type: 'category', - label: 'Guide', - collapsed: false, - link: { type: 'generated-index', title: 'User Guide', slug: '/guide' }, - items: [ - 'guide/getting-started', - 'guide/typed-and-reactive', - 'guide/benchmarks', - 'guide/cookbook', - { - type: 'category', - label: 'Core Concepts', - collapsed: false, - items: [ - 'guide/core-concepts/building-dags', - 'guide/core-concepts/executing-tasks', - 'guide/core-concepts/inspecting-graphs', - 'guide/core-concepts/transforms', - 'guide/core-concepts/serialization', - ], - }, - { - type: 'category', - label: 'Execution Strategies', - collapsed: false, - items: [ - 'guide/execution-strategies/incremental', - 'guide/execution-strategies/conditional', - 'guide/execution-strategies/dynamic-dags', - 'guide/execution-strategies/checkpointing', - 'guide/execution-strategies/caching', - 'guide/execution-strategies/resource-scheduling', - 'guide/execution-strategies/approval-gates', - 'guide/execution-strategies/distributed', - ], - }, - { - type: 'category', - label: 'Observability', - collapsed: true, - items: [ - 'guide/observability/tracing-profiling', - 'guide/observability/visualization', - 'guide/observability/error-handling', - ], - }, - { - type: 'category', - label: 'Advanced Features', - collapsed: true, - items: [ - 'guide/advanced/templates', - 'guide/advanced/versioning', - 'guide/advanced/contracts', - 'guide/advanced/dataframes', - 'guide/advanced/plugins-hooks', - ], - }, - 'guide/architecture', - ], - }, - ], - apiSidebar: [ - { - type: 'category', - label: 'API Reference', - collapsed: false, - link: { type: 'generated-index', title: 'API Reference', slug: '/api' }, - items: [ - { - type: 'category', - label: 'Core', - collapsed: false, - items: ['api/core/core', 'api/core/builder', 'api/core/errors'], - }, - { - type: 'category', - label: 'Execution', - collapsed: false, - items: [ - 'api/execution/execution', - 'api/execution/pipeline', - 'api/execution/incremental', - 'api/execution/caching', - 'api/execution/checkpoint', - 'api/execution/conditions', - 'api/execution/dynamic', - 'api/execution/gates', - 'api/execution/resources', - 'api/execution/distributed', - 'api/execution/reactive', - ], - }, - { - type: 'category', - label: 'Observability', - collapsed: true, - items: ['api/observability/tracing', 'api/observability/profiling'], - }, - { - type: 'category', - label: 'Analysis & Validation', - collapsed: true, - items: [ - 'api/analysis/analysis', - 'api/analysis/contracts', - 'api/analysis/dataframe', - ], - }, - { - type: 'category', - label: 'Utilities', - collapsed: true, - items: [ - 'api/utilities/template', - 'api/utilities/versioning', - 'api/utilities/compose', - 'api/utilities/display', - 'api/utilities/integration', - 'api/utilities/plugins', - 'api/utilities/modern-api', - ], - }, - ], - }, - ], -}; - -export default sidebars; diff --git a/docs-next/source.config.ts b/docs/source.config.ts similarity index 100% rename from docs-next/source.config.ts rename to docs/source.config.ts diff --git a/docs-next/src/app/(docs)/[...slug]/page.tsx b/docs/src/app/(docs)/[...slug]/page.tsx similarity index 100% rename from docs-next/src/app/(docs)/[...slug]/page.tsx rename to docs/src/app/(docs)/[...slug]/page.tsx diff --git a/docs-next/src/app/(docs)/layout.tsx b/docs/src/app/(docs)/layout.tsx similarity index 100% rename from docs-next/src/app/(docs)/layout.tsx rename to docs/src/app/(docs)/layout.tsx diff --git a/docs-next/src/app/(home)/_sections/features.tsx b/docs/src/app/(home)/_sections/features.tsx similarity index 100% rename from docs-next/src/app/(home)/_sections/features.tsx rename to docs/src/app/(home)/_sections/features.tsx diff --git a/docs-next/src/app/(home)/_sections/hero.tsx b/docs/src/app/(home)/_sections/hero.tsx similarity index 100% rename from docs-next/src/app/(home)/_sections/hero.tsx rename to docs/src/app/(home)/_sections/hero.tsx diff --git a/docs-next/src/app/(home)/_sections/index.ts b/docs/src/app/(home)/_sections/index.ts similarity index 100% rename from docs-next/src/app/(home)/_sections/index.ts rename to docs/src/app/(home)/_sections/index.ts diff --git a/docs-next/src/app/(home)/layout.tsx b/docs/src/app/(home)/layout.tsx similarity index 100% rename from docs-next/src/app/(home)/layout.tsx rename to docs/src/app/(home)/layout.tsx diff --git a/docs-next/src/app/(home)/page.tsx b/docs/src/app/(home)/page.tsx similarity index 100% rename from docs-next/src/app/(home)/page.tsx rename to docs/src/app/(home)/page.tsx diff --git a/docs-next/src/app/api/search/route.ts b/docs/src/app/api/search/route.ts similarity index 100% rename from docs-next/src/app/api/search/route.ts rename to docs/src/app/api/search/route.ts diff --git a/docs-next/src/app/global.css b/docs/src/app/global.css similarity index 100% rename from docs-next/src/app/global.css rename to docs/src/app/global.css diff --git a/docs-next/src/app/layout.tsx b/docs/src/app/layout.tsx similarity index 100% rename from docs-next/src/app/layout.tsx rename to docs/src/app/layout.tsx diff --git a/docs-next/src/app/llms-full.txt/route.ts b/docs/src/app/llms-full.txt/route.ts similarity index 100% rename from docs-next/src/app/llms-full.txt/route.ts rename to docs/src/app/llms-full.txt/route.ts diff --git a/docs-next/src/app/llms.txt/route.ts b/docs/src/app/llms.txt/route.ts similarity index 100% rename from docs-next/src/app/llms.txt/route.ts rename to docs/src/app/llms.txt/route.ts diff --git a/docs/src/components/ApiSignature.tsx b/docs/src/components/ApiSignature.tsx deleted file mode 100644 index 0704c95..0000000 --- a/docs/src/components/ApiSignature.tsx +++ /dev/null @@ -1,23 +0,0 @@ -import CodeBlock from '@theme/CodeBlock'; - -interface ApiSignatureProps { - name: string; - signature: string; - language?: string; -} - -export default function ApiSignature({ - name, - signature, - language = 'python', -}: ApiSignatureProps) { - const id = name.toLowerCase().replace(/[^a-z0-9]+/g, '-'); - return ( -
- - {name} - - {signature} -
- ); -} diff --git a/docs/src/components/DagDiagram.tsx b/docs/src/components/DagDiagram.tsx deleted file mode 100644 index 333faf3..0000000 --- a/docs/src/components/DagDiagram.tsx +++ /dev/null @@ -1,104 +0,0 @@ -import { useColorMode } from '@docusaurus/theme-common'; -import Mermaid from '@theme/Mermaid'; -import { useMemo } from 'react'; - -/** - * Maps light-mode hex colors (used in classDef/style directives) to dark-mode equivalents. - * Fills go darker/more saturated; strokes go lighter/brighter for contrast on dark backgrounds. - * Keys must be lowercase. - */ -const DARK_COLOR_MAP: Record = { - // Green (success/active) — fills - '#c8e6c9': '#1b5e20', - '#d4edda': '#1b5e20', - '#e8f5e9': '#1b5e20', - // Green — strokes - '#2e7d32': '#66bb6a', - '#28a745': '#66bb6a', - - // Blue (processing) — fill / stroke - '#e3f2fd': '#0d47a1', - '#1565c0': '#64b5f6', - - // Blue (ancestor) — fill / stroke - '#dbeafe': '#1e3a5f', - '#3b82f6': '#90caf9', - - // Indigo (reused/restored) — fill / stroke - '#e0e7ff': '#283593', - '#6366f1': '#9fa8da', - - // Red (dirty/error/critical) — fills - '#fecaca': '#7f1d1d', - '#ffcdd2': '#7f1d1d', - '#fee2e2': '#7f1d1d', - // Red — strokes - '#ef4444': '#ef5350', - '#c62828': '#ef5350', - - // Orange (recomputed/warning) — fills - '#fed7aa': '#7c2d12', - '#fff3e0': '#7c2d12', - '#ffcc80': '#7c2d12', - // Orange — strokes - '#f97316': '#fb8c00', - '#e65100': '#fb8c00', - '#ef6c00': '#ff9800', - - // Yellow (cached/leaf) — fills - '#fff9c4': '#5f370e', - '#fff3cd': '#5f370e', - // Yellow — stroke - '#ffc107': '#fdd835', - - // Amber — fill / stroke - '#f9a825': '#f57f17', - '#f57f17': '#ffb300', - - // Lime (cutoff) — fill / stroke - '#d9f99d': '#365314', - '#65a30d': '#9ccc65', - - // Gray (skipped/default) — fills - '#e0e0e0': '#37474f', - '#e2e8f0': '#37474f', - // Gray — strokes - '#9e9e9e': '#b0bec5', - '#94a3b8': '#b0bec5', -}; - -const COLOR_PATTERN = new RegExp(Object.keys(DARK_COLOR_MAP).join('|'), 'gi'); - -interface DagDiagramProps { - chart: string; - caption?: string; -} - -export default function DagDiagram({ chart, caption }: DagDiagramProps) { - const { colorMode } = useColorMode(); - - const value = useMemo(() => { - if (colorMode !== 'dark') return chart; - return chart.replace( - COLOR_PATTERN, - (match) => DARK_COLOR_MAP[match.toLowerCase()] ?? match, - ); - }, [chart, colorMode]); - - return ( -
- - {caption && ( -

- {caption} -

- )} -
- ); -} diff --git a/docs/src/components/FeatureCard.tsx b/docs/src/components/FeatureCard.tsx deleted file mode 100644 index e4c6638..0000000 --- a/docs/src/components/FeatureCard.tsx +++ /dev/null @@ -1,31 +0,0 @@ -import Link from '@docusaurus/Link'; - -interface FeatureCardProps { - title: string; - description: string; - guideLink?: string; - apiLink?: string; - icon?: string; -} - -export default function FeatureCard({ - title, - description, - guideLink, - apiLink, - icon, -}: FeatureCardProps) { - return ( -
-
- {icon && {icon}} - {title} -
-
{description}
-
- {guideLink && Guide →} - {apiLink && API →} -
-
- ); -} diff --git a/docs/src/components/ParamTable.tsx b/docs/src/components/ParamTable.tsx deleted file mode 100644 index bffa605..0000000 --- a/docs/src/components/ParamTable.tsx +++ /dev/null @@ -1,39 +0,0 @@ -interface Param { - name: string; - type: string; - default?: string; - description: string; -} - -interface ParamTableProps { - params: Param[]; -} - -export default function ParamTable({ params }: ParamTableProps) { - return ( - - - - - - - - - - - {params.map((p) => ( - - - - - - - ))} - -
ParameterTypeDefaultDescription
- {p.name} - - {p.type} - {p.default ? {p.default} : required}{p.description}
- ); -} diff --git a/docs/src/components/StatusBadge.tsx b/docs/src/components/StatusBadge.tsx deleted file mode 100644 index 023c182..0000000 --- a/docs/src/components/StatusBadge.tsx +++ /dev/null @@ -1,22 +0,0 @@ -type Status = - | 'pending' - | 'running' - | 'completed' - | 'failed' - | 'skipped' - | 'timed-out' - | 'cancelled' - | 'cache-hit'; - -interface StatusBadgeProps { - status: Status; - label?: string; -} - -export default function StatusBadge({ status, label }: StatusBadgeProps) { - return ( - - {label ?? status} - - ); -} diff --git a/docs-next/src/components/api-signature.tsx b/docs/src/components/api-signature.tsx similarity index 100% rename from docs-next/src/components/api-signature.tsx rename to docs/src/components/api-signature.tsx diff --git a/docs-next/src/components/dag-diagram.tsx b/docs/src/components/dag-diagram.tsx similarity index 100% rename from docs-next/src/components/dag-diagram.tsx rename to docs/src/components/dag-diagram.tsx diff --git a/docs-next/src/components/diagram-carousel.tsx b/docs/src/components/diagram-carousel.tsx similarity index 100% rename from docs-next/src/components/diagram-carousel.tsx rename to docs/src/components/diagram-carousel.tsx diff --git a/docs-next/src/components/effect-badge.tsx b/docs/src/components/effect-badge.tsx similarity index 100% rename from docs-next/src/components/effect-badge.tsx rename to docs/src/components/effect-badge.tsx diff --git a/docs-next/src/components/feature-card.tsx b/docs/src/components/feature-card.tsx similarity index 100% rename from docs-next/src/components/feature-card.tsx rename to docs/src/components/feature-card.tsx diff --git a/docs-next/src/components/mdx.tsx b/docs/src/components/mdx.tsx similarity index 100% rename from docs-next/src/components/mdx.tsx rename to docs/src/components/mdx.tsx diff --git a/docs-next/src/components/mermaid.tsx b/docs/src/components/mermaid.tsx similarity index 100% rename from docs-next/src/components/mermaid.tsx rename to docs/src/components/mermaid.tsx diff --git a/docs-next/src/components/param-table.tsx b/docs/src/components/param-table.tsx similarity index 100% rename from docs-next/src/components/param-table.tsx rename to docs/src/components/param-table.tsx diff --git a/docs-next/src/components/provider.tsx b/docs/src/components/provider.tsx similarity index 100% rename from docs-next/src/components/provider.tsx rename to docs/src/components/provider.tsx diff --git a/docs-next/src/components/search.tsx b/docs/src/components/search.tsx similarity index 100% rename from docs-next/src/components/search.tsx rename to docs/src/components/search.tsx diff --git a/docs-next/src/components/status-badge.tsx b/docs/src/components/status-badge.tsx similarity index 100% rename from docs-next/src/components/status-badge.tsx rename to docs/src/components/status-badge.tsx diff --git a/docs-next/src/components/ui/button.tsx b/docs/src/components/ui/button.tsx similarity index 100% rename from docs-next/src/components/ui/button.tsx rename to docs/src/components/ui/button.tsx diff --git a/docs-next/src/components/ui/code-panel.tsx b/docs/src/components/ui/code-panel.tsx similarity index 100% rename from docs-next/src/components/ui/code-panel.tsx rename to docs/src/components/ui/code-panel.tsx diff --git a/docs-next/src/components/ui/index.ts b/docs/src/components/ui/index.ts similarity index 100% rename from docs-next/src/components/ui/index.ts rename to docs/src/components/ui/index.ts diff --git a/docs-next/src/components/ui/section-header.tsx b/docs/src/components/ui/section-header.tsx similarity index 100% rename from docs-next/src/components/ui/section-header.tsx rename to docs/src/components/ui/section-header.tsx diff --git a/docs/src/css/custom.css b/docs/src/css/custom.css deleted file mode 100644 index 4bfd63c..0000000 --- a/docs/src/css/custom.css +++ /dev/null @@ -1,162 +0,0 @@ -:root { - --ifm-color-primary: #6c5ce7; - --ifm-color-primary-dark: #5a48e0; - --ifm-color-primary-darker: #5140dd; - --ifm-color-primary-darkest: #3a27d0; - --ifm-color-primary-light: #7e70ee; - --ifm-color-primary-lighter: #8779f0; - --ifm-color-primary-lightest: #a69af5; - --ifm-code-font-size: 95%; - --docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.1); - - --dagron-status-pending: #a0aec0; - --dagron-status-running: #4299e1; - --dagron-status-completed: #48bb78; - --dagron-status-failed: #f56565; - --dagron-status-skipped: #ed8936; - --dagron-status-timed-out: #ed64a6; - --dagron-status-cancelled: #a0aec0; - --dagron-status-cache-hit: #38b2ac; -} - -[data-theme="dark"] { - --ifm-color-primary: #a29bfe; - --ifm-color-primary-dark: #8b82fe; - --ifm-color-primary-darker: #7d73fd; - --ifm-color-primary-darkest: #5243fd; - --ifm-color-primary-light: #b9b4fe; - --ifm-color-primary-lighter: #c7c3fe; - --ifm-color-primary-lightest: #edecff; - --docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.3); - --ifm-background-color: #1a1a2e; - --ifm-background-surface-color: #16213e; - - --dagron-status-pending: #718096; - --dagron-status-running: #63b3ed; - --dagron-status-completed: #68d391; - --dagron-status-failed: #fc8181; - --dagron-status-skipped: #f6ad55; - --dagron-status-timed-out: #f687b3; - --dagron-status-cancelled: #718096; - --dagron-status-cache-hit: #4fd1c5; -} - -.api-signature { - background: var(--ifm-code-background); - border: 1px solid var(--ifm-color-emphasis-300); - border-radius: var(--ifm-code-border-radius); - padding: 1rem; - margin-bottom: 1rem; - overflow-x: auto; - font-family: var(--ifm-font-family-monospace); - font-size: 0.9rem; - line-height: 1.5; -} - -.api-signature__anchor { - color: var(--ifm-color-primary); - text-decoration: none; - font-weight: 600; -} - -.api-signature__anchor:hover { - text-decoration: underline; -} - -.param-table { - width: 100%; - margin-bottom: 1.5rem; -} - -.param-table th { - background: var(--ifm-color-emphasis-100); - text-align: left; -} - -.param-table td code { - font-size: 0.85rem; -} - -.status-badge { - display: inline-block; - padding: 0.15rem 0.5rem; - border-radius: 999px; - font-size: 0.75rem; - font-weight: 600; - text-transform: uppercase; - letter-spacing: 0.05em; - color: #fff; -} - -.status-badge--pending { - background: var(--dagron-status-pending); -} -.status-badge--running { - background: var(--dagron-status-running); -} -.status-badge--completed { - background: var(--dagron-status-completed); -} -.status-badge--failed { - background: var(--dagron-status-failed); -} -.status-badge--skipped { - background: var(--dagron-status-skipped); -} -.status-badge--timed-out { - background: var(--dagron-status-timed-out); -} -.status-badge--cancelled { - background: var(--dagron-status-cancelled); -} -.status-badge--cache-hit { - background: var(--dagron-status-cache-hit); -} - -.dag-diagram { - margin: 1.5rem 0; -} - -[data-theme="dark"] .dag-diagram .docusaurus-mermaid-container svg .nodeLabel { - color: #e0e0e0; - fill: #e0e0e0; -} - -.feature-card { - border: 1px solid var(--ifm-color-emphasis-200); - border-radius: var(--ifm-global-radius); - padding: 1.25rem; - transition: - box-shadow 0.2s, - border-color 0.2s; -} - -.feature-card:hover { - border-color: var(--ifm-color-primary); - box-shadow: 0 2px 12px rgba(0, 0, 0, 0.08); -} - -.feature-card__title { - font-size: 1.1rem; - font-weight: 600; - margin-bottom: 0.5rem; -} - -.feature-card__description { - font-size: 0.9rem; - color: var(--ifm-font-color-secondary); - margin-bottom: 0.75rem; -} - -.feature-card__links { - display: flex; - gap: 1rem; - font-size: 0.85rem; -} - -.feature-grid { - display: grid; - grid-template-columns: repeat(auto-fill, minmax(280px, 1fr)); - gap: 1rem; - margin: 1.5rem 0; -} diff --git a/docs/src/lib/cn.ts b/docs/src/lib/cn.ts new file mode 100644 index 0000000..a70ebb6 --- /dev/null +++ b/docs/src/lib/cn.ts @@ -0,0 +1,6 @@ +import { type ClassValue, clsx } from "clsx"; +import { twMerge } from "tailwind-merge"; + +export function cn(...inputs: ClassValue[]): string { + return twMerge(clsx(inputs)); +} diff --git a/docs/src/lib/layout.shared.tsx b/docs/src/lib/layout.shared.tsx new file mode 100644 index 0000000..1302f16 --- /dev/null +++ b/docs/src/lib/layout.shared.tsx @@ -0,0 +1,38 @@ +import type { BaseLayoutProps } from "fumadocs-ui/layouts/shared"; +import { appName, gitConfig } from "./shared"; + +const PRIMARY_NAV_LINKS = [ + { + text: "Guide", + url: "/guide/getting-started", + }, + { + text: "API", + url: "/api/core/core", + }, + { + text: "Typed & Reactive", + url: "/typed-and-reactive", + }, + { + text: "Changelog", + url: `https://github.com/${gitConfig.user}/${gitConfig.repo}/blob/${gitConfig.branch}/CHANGELOG.md`, + external: true, + }, +]; + +export function baseOptions(): BaseLayoutProps { + return { + nav: { + title: appName, + }, + githubUrl: `https://github.com/${gitConfig.user}/${gitConfig.repo}`, + }; +} + +export function homeOptions(): BaseLayoutProps { + return { + ...baseOptions(), + links: PRIMARY_NAV_LINKS, + }; +} diff --git a/docs/src/lib/shared.ts b/docs/src/lib/shared.ts new file mode 100644 index 0000000..282dd52 --- /dev/null +++ b/docs/src/lib/shared.ts @@ -0,0 +1,10 @@ +export const appName = "dagron"; +export const docsRoute = "/"; +export const docsImageRoute = "/og"; +export const docsContentRoute = "/llms.mdx"; + +export const gitConfig = { + user: "ByteVeda", + repo: "dagron", + branch: "master", +}; diff --git a/docs/src/lib/source.ts b/docs/src/lib/source.ts new file mode 100644 index 0000000..47710c0 --- /dev/null +++ b/docs/src/lib/source.ts @@ -0,0 +1,36 @@ +import { docs } from "collections/server"; +import { loader } from "fumadocs-core/source"; +import { docsContentRoute, docsImageRoute, docsRoute } from "./shared"; + +// See https://fumadocs.dev/docs/headless/source-api for more info +export const source = loader({ + baseUrl: docsRoute, + source: docs.toFumadocsSource(), + plugins: [], +}); + +export function getPageImage(page: (typeof source)["$inferPage"]) { + const segments = [...page.slugs, "image.png"]; + + return { + segments, + url: `${docsImageRoute}/${segments.join("/")}`, + }; +} + +export function getPageMarkdownUrl(page: (typeof source)["$inferPage"]) { + const segments = [...page.slugs, "content.md"]; + + return { + segments, + url: `${docsContentRoute}/${segments.join("/")}`, + }; +} + +export async function getLLMText(page: (typeof source)["$inferPage"]) { + const processed = await page.data.getText("processed"); + + return `# ${page.data.title} (${page.url}) + +${processed}`; +} diff --git a/docs/static/.nojekyll b/docs/static/.nojekyll deleted file mode 100644 index e69de29..0000000 diff --git a/docs/static/img/favicon.ico b/docs/static/img/favicon.ico deleted file mode 100644 index c01d54bcd39a5f853428f3cd5aa0f383d963c484..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3626 zcmb`Je@s(X6vrR`EK3%b%orErlDW({vnABqA zcfaS{d+xbU5JKp0*;0YOg+;Fl!eT)XRuapIwFLL`=imZCSon$`se`_<%@MB=M~KG+ z=EW^FL`w|Bo>*ktlaS^(fut!95`iG5u=SZ8nfDHO#GaTlH1-XG^;vsjUb^gWTVz0+ z^=WR1wv9-2oeR=_;fL0H7rNWqAzGtO(D;`~cX(RcN0w2v24Y8)6t`cS^_ghs`_ho? z{0ka~1Dgo8TfAP$r*ua?>$_V+kZ!-(TvEJ7O2f;Y#tezt$&R4 zLI}=-y@Z!grf*h3>}DUL{km4R>ya_I5Ag#{h_&?+HpKS!;$x3LC#CqUQ8&nM?X))Q zXAy2?`YL4FbC5CgJu(M&Q|>1st8XXLZ|5MgwgjP$m_2Vt0(J z&Gu7bOlkbGzGm2sh?X`){7w69Y$1#@P@7DF{ZE=4%T0NDS)iH`tiPSKpDNW)zmtn( zw;4$f>k)4$LBc>eBAaTZeCM2(iD+sHlj!qd z2GjRJ>f_Qes(+mnzdA^NH?^NB(^o-%Gmg$c8MNMq&`vm@9Ut;*&$xSD)PKH{wBCEC z4P9%NQ;n2s59ffMn8*5)5AAg4-93gBXBDX`A7S& zH-|%S3Wd%T79fk-e&l`{!?lve8_epXhE{d3Hn$Cg!t=-4D(t$cK~7f&4s?t7wr3ZP z*!SRQ-+tr|e1|hbc__J`k3S!rMy<0PHy&R`v#aJv?`Y?2{avK5sQz%=Us()jcNuZV z*$>auD4cEw>;t`+m>h?f?%VFJZj8D|Y1e_SjxG%J4{-AkFtT2+ZZS5UScS~%;dp!V>)7zi`w(xwSd*FS;Lml=f6hn#jq)2is4nkp+aTrV?)F6N z>DY#SU0IZ;*?Hu%tSj4edd~kYNHMFvS&5}#3-M;mBCOCZL3&;2obdG?qZ>rD|zC|Lu|sny76pn2xl|6sk~Hs{X9{8iBW zwiwgQt+@hi`FYMEhX2 \ No newline at end of file diff --git a/docs/tsconfig.json b/docs/tsconfig.json index 920d7a6..f43f873 100644 --- a/docs/tsconfig.json +++ b/docs/tsconfig.json @@ -1,8 +1,35 @@ { - // This file is not used in compilation. It is here just for a nice editor experience. - "extends": "@docusaurus/tsconfig", "compilerOptions": { - "baseUrl": "." + "target": "ESNext", + "lib": ["dom", "dom.iterable", "esnext"], + "allowJs": true, + "skipLibCheck": true, + "strict": true, + "forceConsistentCasingInFileNames": true, + "noEmit": true, + "esModuleInterop": true, + "module": "esnext", + "moduleResolution": "bundler", + "resolveJsonModule": true, + "isolatedModules": true, + "jsx": "react-jsx", + "incremental": true, + "paths": { + "@/*": ["./src/*"], + "collections/*": ["./.source/*"] + }, + "plugins": [ + { + "name": "next" + } + ] }, - "exclude": [".docusaurus", "build"] + "include": [ + "next-env.d.ts", + "**/*.ts", + "**/*.tsx", + ".next/types/**/*.ts", + ".next/dev/types/**/*.ts" + ], + "exclude": ["node_modules"] } From 6f8968adf87f0c3e537cf65efc172f9b30b555be Mon Sep 17 00:00:00 2001 From: Pratyush Sharma <56130065+pratyush618@users.noreply.github.com> Date: Sun, 10 May 2026 03:02:26 +0530 Subject: [PATCH 11/14] chore(docs): add types pre-commit hook + check script Split the docs biome hook into docs-biome (lint) + docs-types (fumadocs-mdx typegen + tsc --noEmit), so MDX edits trigger the types check too. Add `pnpm check` script that runs both, and `pnpm lint:fix` for the auto-fix flavour. --- .pre-commit-config.yaml | 12 +++++++++--- docs/package.json | 4 +++- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 258e386..9f91802 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,9 +29,15 @@ repos: language: system types: [rust] pass_filenames: false - - id: biome - name: biome + - id: docs-biome + name: docs biome entry: bash -c 'cd docs && pnpm exec biome check' language: system - files: ^docs/(src/|content/|source\.config\.ts|next\.config\.mjs|biome\.json) + files: ^docs/(src/|content/|source\.config\.ts|next\.config\.mjs|biome\.json|tsconfig\.json|package\.json) + pass_filenames: false + - id: docs-types + name: docs types + entry: bash -c 'cd docs && pnpm types:check' + language: system + files: ^docs/(src/|source\.config\.ts|next\.config\.mjs|tsconfig\.json|package\.json|content/.*\.mdx) pass_filenames: false diff --git a/docs/package.json b/docs/package.json index 2ab6dd3..27b0402 100644 --- a/docs/package.json +++ b/docs/package.json @@ -9,7 +9,9 @@ "types:check": "fumadocs-mdx && next typegen && tsc --noEmit", "postinstall": "fumadocs-mdx", "lint": "biome check", - "format": "biome format --write" + "lint:fix": "biome check --write", + "format": "biome format --write", + "check": "pnpm lint && pnpm types:check" }, "dependencies": { "@orama/orama": "^3.1.18", From 3b2b53e1356aec0693bb2de3db633284cb2817ab Mon Sep 17 00:00:00 2001 From: Pratyush Sharma <56130065+pratyush618@users.noreply.github.com> Date: Sun, 10 May 2026 03:08:07 +0530 Subject: [PATCH 12/14] ci: install pnpm + docs deps so docs hooks pass The new docs-biome and docs-types pre-commit hooks invoke `cd docs && pnpm exec ...`. The lint job had no Node/pnpm, so both hooks failed with "pnpm: command not found". Add pnpm/action-setup + actions/setup-node + a frozen-lockfile install before pre-commit runs. --- .github/workflows/ci.yml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 760a9c6..89cb7ea 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -28,6 +28,21 @@ jobs: - name: Install Python dependencies run: uv sync --group dev + - name: Set up pnpm + uses: pnpm/action-setup@v4 + with: + version: 10 + + - name: Set up Node.js + uses: actions/setup-node@v6 + with: + node-version: "22" + cache: pnpm + cache-dependency-path: docs/pnpm-lock.yaml + + - name: Install docs dependencies + run: cd docs && pnpm install --frozen-lockfile + - name: Check Rust formatting run: cargo fmt --all --check From f4ba43226f2a90dffc855bdd09615c87a97fe092 Mon Sep 17 00:00:00 2001 From: Pratyush Sharma <56130065+pratyush618@users.noreply.github.com> Date: Sun, 10 May 2026 03:14:37 +0530 Subject: [PATCH 13/14] ci: ignore reactive-bench in pytest, force UTF-8 in mypy snippet MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit test_reactive_bench.py uses pytest-benchmark which isn't a CI dependency — add it to the existing --ignore list. Force UTF-8 when writing the synthesized mypy snippet in test_mypy_reveal_types (Windows default CP1252 broke the em-dash) and replace the em-dash with a plain hyphen for belt-and-braces. --- .github/workflows/ci.yml | 2 +- tests/python/test_typing.py | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 89cb7ea..81908ca 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -99,7 +99,7 @@ jobs: args: --release - name: Run tests - run: uv run pytest tests/python/ -v --tb=short --junit-xml=results.xml --ignore=tests/python/test_benchmarks.py + run: uv run pytest tests/python/ -v --tb=short --junit-xml=results.xml --ignore=tests/python/test_benchmarks.py --ignore=tests/python/test_reactive_bench.py - name: Upload test results if: always() diff --git a/tests/python/test_typing.py b/tests/python/test_typing.py index 1ebd36d..295d0ec 100644 --- a/tests/python/test_typing.py +++ b/tests/python/test_typing.py @@ -177,11 +177,13 @@ def pipeline(): return total(raw) result: ExecutionResult = pipeline() - reveal_type(result[fetch_future]) # noqa: F821 — illustrative + reveal_type(result[fetch_future]) # noqa: F821 - illustrative """ ) target = tmp_path / "snippet.py" - target.write_text(snippet) + # Force UTF-8 — Python on Windows defaults to CP1252, but mypy reads the + # file as UTF-8 and chokes on any non-ASCII byte. + target.write_text(snippet, encoding="utf-8") proc = subprocess.run( ["mypy", "--ignore-missing-imports", str(target)], From ba57b44dd17132777c8b273340d830108690fac8 Mon Sep 17 00:00:00 2001 From: Pratyush Sharma <56130065+pratyush618@users.noreply.github.com> Date: Sun, 10 May 2026 03:19:58 +0530 Subject: [PATCH 14/14] ci: replace timing-flaky parallelism test with concurrency counter MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit test_two_pure_nodes_run_in_parallel asserted elapsed < 0.15s; macOS CI runner blew that with 0.158s. Replace the wall-clock check with a threading.Barrier(2) + max_active counter — deterministic: both tasks must enter the critical section together for the assertion to hold, otherwise the barrier times out. --- tests/python/test_effects.py | 67 ++++++++++++++++++++---------------- 1 file changed, 37 insertions(+), 30 deletions(-) diff --git a/tests/python/test_effects.py b/tests/python/test_effects.py index ce6ef37..e530840 100644 --- a/tests/python/test_effects.py +++ b/tests/python/test_effects.py @@ -202,56 +202,63 @@ def test_effects_of_untagged_dag_defaults_to_pure(self): class TestExecutorIsolation: def test_two_pure_nodes_run_in_parallel(self): - """When isolation is enforced, PURE tasks should still parallelize.""" - # Two parallel-ready pure nodes (no dependency between them). - # Suppress the AST-scan warning — the time.sleep here is just to - # make parallelism observable. - with warnings.catch_warnings(): - warnings.simplefilter("ignore", UserWarning) + """When isolation is enforced, PURE tasks should still parallelize. - @task - def slow_pure_a() -> str: - time.sleep(0.05) - return "a" + Use a concurrency counter (max_active) instead of wall-clock timing + — wall-clock thresholds are flaky on slow CI runners. + """ - @task - def slow_pure_b() -> str: - time.sleep(0.05) - return "b" + @task + def pure_a() -> None: + return None + + @task + def pure_b() -> None: + return None @flow def pipeline(): - slow_pure_a() - return slow_pure_b() + pure_a() + return pure_b() - # Build dag and run with isolation dag = pipeline.dag() - def _sleep_a() -> str: - time.sleep(0.05) - return "a" + active = 0 + max_active = 0 + lock = threading.Lock() + # Both tasks must be inside the critical section at the same instant + # for max_active to reach 2; the barrier guarantees they overlap. + barrier = threading.Barrier(2, timeout=2.0) + + def make_fn() -> Callable[[], None]: + def fn() -> None: + nonlocal active, max_active + barrier.wait() + with lock: + active += 1 + max_active = max(max_active, active) + time.sleep(0.02) + with lock: + active -= 1 - def _sleep_b() -> str: - time.sleep(0.05) - return "b" + return fn tasks_dict: dict[str | NodeRef, Callable[[], Any]] = { - "slow_pure_a": _sleep_a, - "slow_pure_b": _sleep_b, + "pure_a": make_fn(), + "pure_b": make_fn(), } executor = DAGExecutor( dag, max_workers=2, enforce_effect_isolation=True, ) - t0 = time.monotonic() result = executor.execute(tasks_dict) - elapsed = time.monotonic() - t0 assert result.succeeded == 2 - # Two 50ms sleeps, run in parallel, should finish in < 90ms. - # Generous bound to avoid CI flakiness. - assert elapsed < 0.15, f"PURE tasks did not parallelize ({elapsed:.3f}s)" + assert max_active == 2, ( + f"PURE tasks did not run concurrently (max_active={max_active}); " + "isolation incorrectly serialised non-ND tasks." + ) def test_two_nondeterministic_nodes_serialize(self): """Under isolation, two NONDETERMINISTIC tasks must NOT overlap."""