diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 26d41f42923..4b65c5777e7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -35,6 +35,7 @@ jobs: upload_libs: true node-target: linux-x64 rust-target: x86_64-unknown-linux-musl + dune-profile: static - os: ubuntu-24.04-arm # ARM ocaml_compiler: ocaml-variants.5.3.0+options,ocaml-option-static upload_binaries: true @@ -44,34 +45,40 @@ jobs: benchmarks: true node-target: linux-arm64 rust-target: aarch64-unknown-linux-musl + dune-profile: static - os: macos-15-intel # x64 ocaml_compiler: 5.3.0 upload_binaries: true node-target: darwin-x64 rust-target: x86_64-apple-darwin + dune-profile: release - os: macos-15 # ARM ocaml_compiler: 5.3.0 upload_binaries: true node-target: darwin-arm64 rust-target: aarch64-apple-darwin + dune-profile: release - os: windows-2025 ocaml_compiler: 5.3.0 upload_binaries: true node-target: win32-x64 rust-target: x86_64-pc-windows-gnu exe-suffix: ".exe" + dune-profile: release # Verify that the compiler still builds with the oldest OCaml version we support. - os: ubuntu-24.04 ocaml_compiler: ocaml-variants.5.0.0+options,ocaml-option-static node-target: linux-x64 rust-target: x86_64-unknown-linux-musl + dune-profile: static runs-on: ${{matrix.os}} env: # When changing the setup-ocaml version, also adjust it in the setup step further below. SETUP_OCAML_VERSION: 3.4.6 # OPAM <2.6.0 + DUNE_PROFILE: ${{ matrix.dune-profile }} steps: - name: "Windows: Set git config" @@ -264,12 +271,7 @@ jobs: key: ${{ steps.compiler-build-state-key.outputs.value }} - name: Build compiler - if: runner.os != 'Linux' - run: opam exec -- dune build --display quiet --profile release - - - name: Build compiler (Linux static) - if: runner.os == 'Linux' - run: opam exec -- dune build --display quiet --profile static + run: opam exec -- dune build --display quiet - name: Delete stable compiler build state if: github.event_name == 'push' && github.ref == 'refs/heads/master' diff --git a/CHANGELOG.md b/CHANGELOG.md index 856e6d1e929..7c3f02e324b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,9 +10,37 @@ > - :nail_care: [Polish] > - :house: [Internal] +# 12.3.0-beta.1 + +#### :rocket: New Feature + +- Reanalyze: add glob pattern support for suppress/unsuppress configurations (e.g., `"src/generated/**"`). https://github.com/rescript-lang/rescript/pull/8277 +- Add optional `~locales` and `~options` parameters to `String.localeCompare`. https://github.com/rescript-lang/rescript/pull/8287 + +#### :bug: Bug fix + +- Fix `null` and array values incorrectly matching the `Object` branch when pattern matching on `JSON.t` (or other untagged variants with an `Object` case) in statement position. https://github.com/rescript-lang/rescript/pull/8279 +- Fix rewatch panic when `package.json` has no `name` field. https://github.com/rescript-lang/rescript/pull/8291 +- Fix unpacking first-class module in default argument of react component. https://github.com/rescript-lang/rescript/pull/8296 +- Fix exception record field regression. https://github.com/rescript-lang/rescript/pull/8319 +- Rewatch: ignore stale lock for unrelated process name. https://github.com/rescript-lang/rescript/pull/8316 +- Fix partial application generalization for `...`. https://github.com/rescript-lang/rescript/pull/8343 +- Preserve JSX prop locations across the AST0 translation layer, fixing `0:0` editor diagnostics in PPX-related flows. https://github.com/rescript-lang/rescript/pull/8350 +- Allow builds while watchers are running. https://github.com/rescript-lang/rescript/pull/8349 +- Rewatch: preserve warnings after atomic-save full rebuilds. https://github.com/rescript-lang/rescript/pull/8358 +- Fix type lowering for `dict{}` and `async`, so you don't need to annotate one extra time when the type is known. https://github.com/rescript-lang/rescript/pull/8359 +- Fix issue where warning 56 would blow up with `dict{}` patterns. https://github.com/rescript-lang/rescript/pull/8403 + +#### :nail_care: Polish + +- Reanalyze server: redesign incremental fixpoint with delete-then-rederive strategy and predecessor tracking, improving speed on deletions. https://github.com/rescript-lang/rescript/pull/8276 +- Improve error message for dependency without `rescript.json`. https://github.com/rescript-lang/rescript/pull/8292 +- Resolve workspace dependencies in editor analysis. https://github.com/rescript-lang/rescript/pull/8392 +- Improve deprecated attribute extraction and support record form. https://github.com/rescript-lang/rescript/pull/8396 + # 12.2.0 -#### :boom: Breaking Change +#### :rocket: New Feature - Stdlib: Added Array.zip, Array.unzip, Array.zipBy, and Array.partition. https://github.com/rescript-lang/rescript/pull/8244 diff --git a/analysis/reactive/IncrementalFixpointReport.md b/analysis/reactive/IncrementalFixpointReport.md new file mode 100644 index 00000000000..304d994f2e8 --- /dev/null +++ b/analysis/reactive/IncrementalFixpointReport.md @@ -0,0 +1,201 @@ +# Incremental Fixpoint for Reactive Analysis + +## Introduction + +When a source file changes in a codebase, analysis results that depend on it may become stale. Recomputing the entire analysis from scratch is correct but wasteful: typically only a small region of the dependency graph is affected. The incremental fixpoint algorithm described here maintains the set of reachable nodes in a dynamic directed graph, updating it efficiently as roots and edges change. It is the core propagation engine behind the reactive analysis server. + +This report covers the problem definition, the algorithm and its key design ideas, correctness instrumentation, and empirical results from a replay-based evaluation on a real codebase. + +## Problem Definition + +The algorithm maintains transitive reachability over a directed graph that evolves over time. + +**State.** The graph is defined by: + +- A set of **roots** R, the starting points of reachability. +- An **edge relation** E mapping each node to its list of successors. +- A **reachable set** C, the nodes reachable from R through E. + +The fundamental invariant is: + +> **C = Reach(R, E)** — the least fixed point of forward reachability from R through E. + +**Updates.** The graph changes in discrete *waves*. Each wave supplies: + +- **Root updates**: a list of `(k, unit option)` entries. `Some ()` adds a root; `None` removes one. +- **Edge updates**: a list of `(k, k list option)` entries. `Some succs` sets the successors of k; `None` removes all edges from k. + +**Output.** Each wave returns a list of *delta entries* describing how C changed: + +- `(k, Some ())` — node k became reachable (was not in C before, is now). +- `(k, None)` — node k became unreachable (was in C before, is no longer). + +The output represents the *net effect* of the wave: if a node is tentatively removed and then recovered within the same wave, no output is emitted for it. + +## Data Structures + +The algorithm maintains four mutable structures: + +| Structure | Purpose | +|-----------|---------| +| `current` | The reachable set C (hash set of nodes) | +| `roots` | The root set R (hash set) | +| `edge_map` | Forward edges: node -> successor list | +| `pred_map` | Reverse edges: node -> set of predecessors | + +The predecessor map (`pred_map`) is the key auxiliary structure. It enables efficient *support checking*: given a tentatively deleted node, we can quickly determine whether any live predecessor still points to it, without scanning the entire graph. The cost of maintaining `pred_map` is proportional to edge updates: each edge addition or removal requires a constant-time insert or delete in the predecessor set of the target node. This is amortized across normal edge-update processing and adds no asymptotic overhead. + +## Algorithm + +### Design Intuition + +When edges or roots are removed, some previously reachable nodes may lose all paths from the roots. The challenge is determining *which* nodes are still reachable without recomputing from scratch. + +The algorithm uses a **delete-then-rederive** strategy: + +1. **Delete pessimistically** — starting from invalidation points (removed roots, targets of removed edges), propagate tentative deletions forward through the *old* edges. This marks every node that *might* have lost reachability. + +2. **Rederive optimistically** — scan the tentatively deleted nodes and recover those that still have *support* in the *new* state: either they are roots, or at least one live predecessor still points to them. Recovered nodes propagate support to their successors. + +3. **Expand forward** — from newly added roots and nodes with new outgoing edges, discover newly reachable nodes via forward BFS. + +This two-pass approach (pessimistic deletion followed by optimistic recovery) is both simple and correct: it avoids the complexity of trying to determine "true" deletions upfront, while ensuring the final state is exactly the least fixed point. + +### Phases in Detail + +The `apply` function executes one wave in the following phases: + +**Phase 1 — Analyze changes.** Before modifying any state, examine each edge update against the current edge map. For each updated source node, compute which successor edges were removed (`removed_targets`) and whether any new edges were added (`has_new_edge`). This pre-analysis drives the deletion and expansion phases. + +**Phase 2 — Tentative deletion.** Seed a deletion queue with: +- Roots being removed (if they were in C). +- Targets of removed edges (if they were in C). + +Propagate forward through the *old* edges: for each node popped from the queue, enqueue its old successors that are still in C. This produces `deleted_nodes`, a set of nodes that may have lost reachability. + +The deletion uses old edges deliberately: we need to follow the paths that *used to* provide reachability, since those are the paths that may have broken. + +Next, apply root and edge updates to the state (updating `roots`, `edge_map`, and `pred_map`), then remove all deleted nodes from `current`. + +**Phase 3 — Re-derivation.** Scan the deleted nodes. A deleted node is *supported* if: +- It is a root (k in R), OR +- It has at least one predecessor p such that p is in `current` and k is in E(p). + +Supported nodes are added back to `current` and their successors are checked recursively. This phase only adds nodes back; it never removes them. When it converges, every deleted node that had an alternative reachability path has been recovered. + +**Phase 4 — Expansion.** Starting from newly added roots and nodes whose edge updates introduced new successors, perform forward BFS to discover all newly reachable nodes. Each newly reached node is added to `current` and an add-delta is emitted — unless the node was tentatively deleted earlier in this same wave (in which case it is a no-op recovery, not a net change). + +**Phase 5 — Emit removals.** For each node in `deleted_nodes` that is *not* in the final `current`, emit a remove-delta. These are the nodes that were genuinely lost. + +### Worked Example + +Consider a graph with root A and edges A->B, A->X, B->D, X->D: + +``` + A (root) + / \ + B X + \ / + D +``` + +The reachable set is C = {A, B, X, D}. + +**Wave: remove the edge A->B.** + +1. *Analyze*: Edge A->B is removed. `removed_targets = [B]`, the target of the dropped edge. + +2. *Tentative deletion*: Seed the delete queue with B (target of removed edge, currently in C). Propagate forward through old edges: B->D, so D is also tentatively deleted. `deleted_nodes = {B, D}`. Remove B and D from `current`. Now `current = {A, X}`. + +3. *Re-derivation*: Check each deleted node for support. + - B: not a root. Predecessors of B = {A}. Is A in current? Yes. Is B in A's *new* successors? A's new edges are [X] (we removed A->B). So A does not point to B anymore. B is **not supported**. B stays deleted. + - D: not a root. Predecessors of D = {B, X}. B is not in current. X is in current. Is D in X's successors? X->D exists and was not modified. D **is supported**. Add D back to `current`. + + After re-derivation: `current = {A, X, D}`. + +4. *Expansion*: No new roots or new edges added. Nothing to expand. + +5. *Emit removals*: `deleted_nodes = {B, D}`. D is back in `current`, so no removal for D. B is not in `current`. Emit `(B, None)`. + +**Output: `[(B, None)]`** — only B was lost. D survived because it had an alternative path through X. + +### Complexity + +- **Best case** (additions only, no deletions): work is proportional to the number of newly reachable nodes. +- **Typical case** (localized changes): work is proportional to the affected region of the graph — the tentatively deleted subgraph plus newly reachable nodes. +- **Worst case** (removing a root that reaches everything): degrades to a full BFS over the entire graph. + +Space is O(|C| + |E|) for the reachable set, edge map, and predecessor map. + +## Correctness Instrumentation + +Debug invariants are implemented in the `Invariants` submodule and enabled by setting `RESCRIPT_REACTIVE_FIXPOINT_ASSERT=1`. They validate the algorithm's internal consistency after each phase: + +1. **Edge change consistency** — `removed_targets` and `has_new_edge` match the actual diff between old and new successor lists. +2. **Deletion closure** — deleted nodes form a forward-closed set under the old edges (no reachable successor of a deleted node is left undeleted). +3. **Post-deletion state** — `current` equals the pre-wave reachable set minus `deleted_nodes`. +4. **Re-derivation convergence** — no supported node remains outside `current` after Phase 3. +5. **Removal output correctness** — emitted removal deltas match `deleted_nodes \ current` (nodes deleted but not recovered). +6. **Final closure and delta correctness** — `current` equals `Reach(R, E)` computed by a fresh BFS, and the full output (both adds and removes) matches the set difference between pre-wave and post-wave `current`. + +These checks add significant overhead and are intended for testing and validation, not production use. + +## Empirical Evaluation + +### Setup + +The algorithm was evaluated using a replay-based benchmark. The workload replays 56 sequential commits from a real project (Hyperindex), with invariant assertions and metrics collection enabled throughout. For each commit, the script runs the incremental (server-backed) analysis and a cold baseline that recomputes the full analysis from scratch (with `RESCRIPT_REANALYZE_NO_SERVER=1`), comparing both for timing and failure behavior. + +Average change size per commit: 1.3 files, 5.9 insertions, 47.2 deletions (range: 1–5 changed files per commit). + +### Results + +**Fixpoint internal metrics** (aggregated over 56 waves): + +| Category | Metric | Value | +|----------|--------|-------| +| **Throughput** | Root entries processed | 9,926 | +| | Edge entries processed | 46,836 | +| | Output deltas emitted | 5,241 | +| **Deletion/recovery** | Nodes tentatively deleted | 3,465 | +| | Nodes re-derived (recovered) | 874 | +| | Waves involving re-derivation | 34 / 56 (61%) | +| **Incremental efficiency** | Edge work vs full recompute | 16.7% | +| | Node work vs full recompute | 95.5% | +| **Per-wave maxima** | Max root entries | 330 | +| | Max edge entries | 1,670 | +| | Max deleted nodes | 574 | +| | Max re-derived nodes | 133 | + +### Interpretation + +**Re-derivation is central, not exceptional.** 61% of waves trigger the delete-then-rederive path. The worked example above (where D survives deletion because of an alternative path through X) is the common case in real dependency graphs, not a corner case. + +**Edge-work savings are substantial.** The incremental algorithm traverses only 16.7% of the edges that a full recomputation would visit — an 83% reduction. This confirms that localized source changes produce localized graph updates. + +**Node-work savings are modest.** Node-side bookkeeping (queue operations, hash table lookups) runs at 95.5% of the full baseline. This is likely because the node-work counter counts queue pops and hash table membership checks, which are O(1) operations that dominate BFS cost regardless of how many edges are skipped. In other words, the algorithm successfully avoids traversing most edges, but still touches most nodes during the deletion and re-derivation sweeps. Reducing this overhead would require more targeted invalidation — for example, bounding the deletion frontier using dominator information or topological depth — rather than optimizing individual operations. + +**Output is much smaller than input.** 56,762 input entries produce only 5,241 output deltas — a 10.8x compression ratio. Most of the internal work cancels out, consistent with the high re-derivation rate. + +## Comparison to Alternatives + +Several strategies exist for maintaining transitive reachability under updates: + +- **Full recomputation**: Rerun BFS/DFS from all roots after every change. Correct and simple, but wasteful when changes are localized. The empirical results show the incremental algorithm traverses only 16.7% of the edges that full recomputation would visit. + +- **Reference counting**: Track the number of paths reaching each node and remove it when the count drops to zero. This is efficient for pure deletions but fails in the presence of cycles (counts never reach zero) and does not naturally handle the case where a node loses one path but retains another through a different predecessor — precisely the re-derivation scenario that occurs in 61% of waves here. + +- **DFS-based marking**: Mark nodes as "dirty" and re-verify reachability via DFS from roots. This avoids false deletions but may revisit large portions of the graph when many nodes are marked dirty, offering less control over the re-derivation scope than the delete-then-rederive approach. + +The delete-then-rederive strategy used here combines the simplicity of pessimistic deletion with efficient recovery via the predecessor map, avoiding the cycle problems of reference counting and the unbounded re-traversal of DFS marking. + +## Limitations + +1. Results are from a single replay corpus and commit ordering. Different projects or change patterns may yield different efficiency ratios. +2. Work counters (node pops, edge scans) are algorithmic proxies, not hardware-level measurements. + +## Conclusion + +The incremental fixpoint algorithm maintains exact transitive closure through a delete-then-rederive strategy that is both simple to reason about and effective in practice. Assertion-enabled replay over 56 real commits confirms correctness, while metrics show an 83% reduction in edge traversals compared to full recomputation. The data also reveals that re-derivation is a routine operation (occurring in 61% of waves) rather than a rare edge case, validating the algorithm's two-pass design. + +The primary optimization opportunity lies in reducing node-side bookkeeping overhead, which currently runs near the full-recomputation baseline despite the large edge-work savings. diff --git a/analysis/reactive/experiments/hyperindex_replay_build_times.sh b/analysis/reactive/experiments/hyperindex_replay_build_times.sh new file mode 100755 index 00000000000..4b6485749c0 --- /dev/null +++ b/analysis/reactive/experiments/hyperindex_replay_build_times.sh @@ -0,0 +1,301 @@ +#!/bin/zsh +set -euo pipefail + +# Fixed benchmark range/repo: +START_REF="benchmark/rescript-baseline" +END_REF="benchmark/rescript-followup" +HYPERINDEX_REPO="/Users/cristianocalcagno/GitHub/hyperindex" +OUT_DIR="${OUT_DIR:-/tmp/hyperindex-replay-times-refs}" +SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" +RESCRIPT_REPO="$(cd "$SCRIPT_DIR/../../.." && pwd)" +TOOLS_BIN="$RESCRIPT_REPO/_build/default/tools/bin/main.exe" +SOCKET_FILE="$HYPERINDEX_REPO/.rescript-reanalyze.sock" +SERVER_LOG="$OUT_DIR/reactive-server.log" +SERVER_PID="" +FIXPOINT_ASSERT="${FIXPOINT_ASSERT:-1}" +FIXPOINT_METRICS="${FIXPOINT_METRICS:-1}" + +mkdir -p "$OUT_DIR" + +cd "$HYPERINDEX_REPO" +ORIG_REF="$(git rev-parse --abbrev-ref HEAD)" + +stop_server() { + if [[ -n "$SERVER_PID" ]] && kill -0 "$SERVER_PID" 2>/dev/null; then + # Ask server to flush fixpoint metrics before shutdown. + kill -USR1 "$SERVER_PID" 2>/dev/null || true + sleep 0.2 + # Prefer SIGINT so OCaml runtime has a chance to run at_exit hooks. + kill -INT "$SERVER_PID" 2>/dev/null || true + for _ in {1..30}; do + if ! kill -0 "$SERVER_PID" 2>/dev/null; then + break + fi + sleep 0.1 + done + if kill -0 "$SERVER_PID" 2>/dev/null; then + kill -TERM "$SERVER_PID" 2>/dev/null || true + fi + wait "$SERVER_PID" 2>/dev/null || true + fi + rm -f "$SOCKET_FILE" 2>/dev/null || true +} + +cleanup() { + stop_server + git checkout -q "$ORIG_REF" >/dev/null 2>&1 || true +} +trap cleanup EXIT + +if [[ ! -x "$TOOLS_BIN" ]]; then + echo "missing tools binary: $TOOLS_BIN" >&2 + echo "build it from rescript repo root with: make" >&2 + exit 1 +fi + +if ! git rev-parse --verify "$START_REF" >/dev/null 2>&1; then + echo "missing ref: $START_REF" >&2 + exit 1 +fi +if ! git rev-parse --verify "$END_REF" >/dev/null 2>&1; then + echo "missing ref: $END_REF" >&2 + exit 1 +fi + +COMMITS=(${(@f)$(git rev-list --first-parent --reverse "$START_REF..$END_REF")}) +TOTAL=${#COMMITS[@]} + +SUMMARY="$OUT_DIR/summary.tsv" +echo -e "idx\tcommit\tbuild_status\tbuild_real_seconds\treactive_status\treactive_real_seconds\treactive_issue_count\tcold_status\tcold_real_seconds\treactive_vs_cold_pct\tchanged_files\tinsertions\tdeletions" > "$SUMMARY" + +echo "Starting reactive server with debug assertions..." +rm -f "$SOCKET_FILE" "$SERVER_LOG" +RESCRIPT_REACTIVE_FIXPOINT_ASSERT="$FIXPOINT_ASSERT" \ +RESCRIPT_REACTIVE_FIXPOINT_METRICS="$FIXPOINT_METRICS" \ +"$TOOLS_BIN" reanalyze-server >"$SERVER_LOG" 2>&1 & +SERVER_PID=$! + +for _ in {1..60}; do + if [[ -S "$SOCKET_FILE" ]]; then + break + fi + sleep 0.1 +done + +if [[ ! -S "$SOCKET_FILE" ]]; then + echo "reactive server failed to start; log follows:" >&2 + cat "$SERVER_LOG" >&2 + exit 1 +fi + +echo "Replay start: $START_REF..$END_REF ($TOTAL commits)" + +idx=0 +for c in $COMMITS; do + idx=$((idx+1)) + echo "[$idx/$TOTAL] $c" + + git checkout -q "$c" + + BUILD_LOG="$OUT_DIR/${idx}_${c}.build.log" + TIME_LOG="$OUT_DIR/${idx}_${c}.time.log" + REACTIVE_JSON="$OUT_DIR/${idx}_${c}.reactive.json" + REACTIVE_TIME_LOG="$OUT_DIR/${idx}_${c}.reactive.time.log" + REACTIVE_STDERR="$OUT_DIR/${idx}_${c}.reactive.stderr.log" + COLD_JSON="$OUT_DIR/${idx}_${c}.cold.json" + COLD_TIME_LOG="$OUT_DIR/${idx}_${c}.cold.time.log" + + set +e + change_stats="$(git show --numstat --format='' "$c" | awk ' + BEGIN {files=0; ins=0; del=0} + NF==3 { + files++ + if ($1 != "-") ins += $1 + if ($2 != "-") del += $2 + } + END {printf "%d %d %d", files, ins, del} + ')" + change_stats_rc=$? + set -e + if [[ $change_stats_rc -ne 0 || -z "${change_stats:-}" ]]; then + changed_files="NA" + insertions="NA" + deletions="NA" + else + changed_files="$(echo "$change_stats" | awk '{print $1}')" + insertions="$(echo "$change_stats" | awk '{print $2}')" + deletions="$(echo "$change_stats" | awk '{print $3}')" + fi + + set +e + /usr/bin/time -p pnpm exec rescript >"$BUILD_LOG" 2>"$TIME_LOG" + build_rc=$? + set -e + + build_real="$(awk '/^real / {print $2}' "$TIME_LOG" | tail -n 1)" + [[ -z "${build_real:-}" ]] && build_real="NA" + + if [[ $build_rc -eq 0 ]]; then + build_status="ok" + else + build_status="fail($build_rc)" + fi + + if [[ "$build_status" != "ok" ]]; then + echo -e "${idx}\t${c}\t${build_status}\t${build_real}\tskipped\tNA\tNA\tskipped\tNA\tNA\t${changed_files}\t${insertions}\t${deletions}" >> "$SUMMARY" + echo "stop: commit $c build_status=$build_status" >&2 + exit 2 + fi + + set +e + /usr/bin/time -p "$TOOLS_BIN" reanalyze -json >"$REACTIVE_JSON" 2>"$REACTIVE_TIME_LOG" + reactive_rc=$? + set -e + + reactive_real="$(awk '/^real / {print $2}' "$REACTIVE_TIME_LOG" | tail -n 1)" + [[ -z "${reactive_real:-}" ]] && reactive_real="NA" + + if [[ $reactive_rc -eq 0 ]]; then + reactive_status="ok" + reactive_issues="$(python3 -c "import json; print(len(json.load(open('$REACTIVE_JSON'))))" 2>"$REACTIVE_STDERR" || true)" + [[ -z "${reactive_issues:-}" ]] && reactive_issues="NA" + else + reactive_status="fail($reactive_rc)" + reactive_issues="NA" + fi + + if [[ "$reactive_status" != "ok" ]]; then + echo -e "${idx}\t${c}\t${build_status}\t${build_real}\t${reactive_status}\t${reactive_real}\t${reactive_issues}\tskipped\tNA\tNA\t${changed_files}\t${insertions}\t${deletions}" >> "$SUMMARY" + echo "stop: commit $c reactive_status=$reactive_status" >&2 + echo "server log: $SERVER_LOG" >&2 + exit 3 + fi + + set +e + /usr/bin/time -p env RESCRIPT_REANALYZE_NO_SERVER=1 "$TOOLS_BIN" reanalyze -json >"$COLD_JSON" 2>"$COLD_TIME_LOG" + cold_rc=$? + set -e + + cold_real="$(awk '/^real / {print $2}' "$COLD_TIME_LOG" | tail -n 1)" + [[ -z "${cold_real:-}" ]] && cold_real="NA" + + if [[ $cold_rc -eq 0 ]]; then + cold_status="ok" + else + cold_status="fail($cold_rc)" + fi + + if [[ "$cold_status" == "ok" && "$reactive_real" != "NA" && "$cold_real" != "NA" ]]; then + reactive_vs_cold_pct="$(awk -v r="$reactive_real" -v c="$cold_real" 'BEGIN { if (c == 0) print "0.00"; else printf "%.2f", (100.0 * r / c) }')" + else + reactive_vs_cold_pct="NA" + fi + + echo -e "${idx}\t${c}\t${build_status}\t${build_real}\t${reactive_status}\t${reactive_real}\t${reactive_issues}\t${cold_status}\t${cold_real}\t${reactive_vs_cold_pct}\t${changed_files}\t${insertions}\t${deletions}" >> "$SUMMARY" + + if [[ "$cold_status" != "ok" ]]; then + echo "stop: commit $c cold_status=$cold_status" >&2 + exit 4 + fi +done + +stop_server + +awk -F '\t' \ + -v start_ref="$START_REF" \ + -v end_ref="$END_REF" \ + ' + function update_num(x, kind, v) { + if (x == "NA" || x == "") return + v = x + 0 + if (kind == "build") { + build_sum += v + if (build_min == "" || v < build_min) build_min = v + if (build_max == "" || v > build_max) build_max = v + build_n++ + } else if (kind == "reactive") { + reactive_sum += v + if (reactive_min == "" || v < reactive_min) reactive_min = v + if (reactive_max == "" || v > reactive_max) reactive_max = v + reactive_n++ + } else if (kind == "issues") { + issues_sum += v + issues_n++ + } else if (kind == "cold") { + cold_sum += v + if (cold_min == "" || v < cold_min) cold_min = v + if (cold_max == "" || v > cold_max) cold_max = v + cold_n++ + } else if (kind == "ratio") { + ratio_sum += v + ratio_n++ + } else if (kind == "files") { + files_sum += v + files_n++ + } else if (kind == "ins") { + ins_sum += v + ins_n++ + } else if (kind == "del") { + del_sum += v + del_n++ + } + } + NR==1 {next} + { + if ($3=="ok") { + build_ok++ + update_num($4, "build") + } else { + build_fail++ + } + + if ($5=="ok") { + reactive_ok++ + update_num($6, "reactive") + update_num($7, "issues") + } else if ($5=="skipped") { + reactive_skipped++ + } else { + reactive_fail++ + } + + if ($8=="ok") { + cold_ok++ + update_num($9, "cold") + } else if ($8=="skipped") { + cold_skipped++ + } else { + cold_fail++ + } + + update_num($10, "ratio") + update_num($11, "files") + update_num($12, "ins") + update_num($13, "del") + } + END { + printf "START=%s END=%s TOTAL=%d\n", start_ref, end_ref, NR-1 + printf "BUILD_OK=%d BUILD_FAIL=%d BUILD_AVG_OK=%.3f BUILD_MIN_OK=%.3f BUILD_MAX_OK=%.3f\n", + build_ok, build_fail, (build_n?build_sum/build_n:0), (build_n?build_min:0), (build_n?build_max:0) + printf "REACTIVE_OK=%d REACTIVE_FAIL=%d REACTIVE_SKIPPED=%d REACTIVE_AVG_OK=%.3f REACTIVE_MIN_OK=%.3f REACTIVE_MAX_OK=%.3f REACTIVE_AVG_ISSUES=%.2f\n", + reactive_ok, reactive_fail, reactive_skipped, (reactive_n?reactive_sum/reactive_n:0), (reactive_n?reactive_min:0), (reactive_n?reactive_max:0), (issues_n?issues_sum/issues_n:0) + printf "COLD_OK=%d COLD_FAIL=%d COLD_SKIPPED=%d COLD_AVG_OK=%.3f COLD_MIN_OK=%.3f COLD_MAX_OK=%.3f\n", + cold_ok, cold_fail, cold_skipped, (cold_n?cold_sum/cold_n:0), (cold_n?cold_min:0), (cold_n?cold_max:0) + printf "REACTIVE_VS_COLD_PCT_AVG=%.2f\n", (ratio_n?ratio_sum/ratio_n:0) + printf "CHANGE_STATS_AVG_FILES=%.2f CHANGE_STATS_AVG_INSERTIONS=%.2f CHANGE_STATS_AVG_DELETIONS=%.2f CHANGE_STATS_TOTAL_INSERTIONS=%.0f CHANGE_STATS_TOTAL_DELETIONS=%.0f\n", + (files_n?files_sum/files_n:0), (ins_n?ins_sum/ins_n:0), (del_n?del_sum/del_n:0), ins_sum, del_sum + }' "$SUMMARY" > "$OUT_DIR/stats.txt" + +METRICS_SUMMARY="$( (grep -F '[ReactiveFixpointMetrics]' "$SERVER_LOG" || true) | tail -n 1 | sed -E 's/^.*\[ReactiveFixpointMetrics\] //')" +if [[ -n "${METRICS_SUMMARY:-}" ]]; then + echo "$METRICS_SUMMARY" > "$OUT_DIR/reactive_fixpoint_metrics.txt" + echo "REACTIVE_FIXPOINT_METRICS=$METRICS_SUMMARY" >> "$OUT_DIR/stats.txt" +fi + +echo +echo "done: $OUT_DIR" +echo "reactive server log: $SERVER_LOG" +if [[ -n "${METRICS_SUMMARY:-}" ]]; then + echo "reactive fixpoint metrics: $OUT_DIR/reactive_fixpoint_metrics.txt" +fi +cat "$OUT_DIR/stats.txt" diff --git a/analysis/reactive/src/Reactive.ml b/analysis/reactive/src/Reactive.ml index 8b934c0b34c..9db201901dc 100644 --- a/analysis/reactive/src/Reactive.ml +++ b/analysis/reactive/src/Reactive.ml @@ -1077,8 +1077,7 @@ let fixpoint ~name ~(init : ('k, unit) t) ~(edges : ('k, 'k list) t) () : let my_level = max init.level edges.level + 1 in (* Internal state *) - let current : ('k, unit) Hashtbl.t = Hashtbl.create 256 in - let edge_map : ('k, 'k list) Hashtbl.t = Hashtbl.create 256 in + let state = ReactiveFixpoint.create () in let subscribers = ref [] in let my_stats = create_stats () in @@ -1086,35 +1085,16 @@ let fixpoint ~name ~(init : ('k, unit) t) ~(edges : ('k, 'k list) t) () : let init_pending = ref [] in let edges_pending = ref [] in - (* Track which nodes are roots *) - let roots : ('k, unit) Hashtbl.t = Hashtbl.create 64 in - - (* BFS helper to find all reachable from roots *) - let recompute_all () = - let new_current = Hashtbl.create (Hashtbl.length current) in - let frontier = Queue.create () in - - (* Start from all roots *) - Hashtbl.iter - (fun k () -> - Hashtbl.replace new_current k (); - Queue.add k frontier) - roots; - - (* BFS *) - while not (Queue.is_empty frontier) do - let k = Queue.pop frontier in - match Hashtbl.find_opt edge_map k with - | None -> () - | Some successors -> - List.iter - (fun succ -> - if not (Hashtbl.mem new_current succ) then ( - Hashtbl.replace new_current succ (); - Queue.add succ frontier)) - successors - done; - new_current + let emit_output output_entries = + if output_entries <> [] then ( + let num_adds, num_removes = count_changes output_entries in + my_stats.deltas_emitted <- my_stats.deltas_emitted + 1; + my_stats.entries_emitted <- + my_stats.entries_emitted + List.length output_entries; + my_stats.adds_emitted <- my_stats.adds_emitted + num_adds; + my_stats.removes_emitted <- my_stats.removes_emitted + num_removes; + let delta = Batch output_entries in + List.iter (fun h -> h delta) !subscribers) in let process () = @@ -1141,94 +1121,10 @@ let fixpoint ~name ~(init : ('k, unit) t) ~(edges : ('k, 'k list) t) () : my_stats.removes_received <- my_stats.removes_received + init_removes + edges_removes; - let output_entries = ref [] in - let needs_full_recompute = ref false in - - (* Apply edge updates *) - List.iter - (fun (k, v_opt) -> - match v_opt with - | Some successors -> - let old = Hashtbl.find_opt edge_map k in - Hashtbl.replace edge_map k successors; - (* If edges changed for a current node, may need recompute *) - if Hashtbl.mem current k && old <> Some successors then - needs_full_recompute := true - | None -> - if Hashtbl.mem edge_map k then ( - Hashtbl.remove edge_map k; - if Hashtbl.mem current k then needs_full_recompute := true)) - edges_entries; - - (* Apply init updates *) - List.iter - (fun (k, v_opt) -> - match v_opt with - | Some () -> Hashtbl.replace roots k () - | None -> - if Hashtbl.mem roots k then ( - Hashtbl.remove roots k; - needs_full_recompute := true)) - init_entries; - - (* Either do incremental expansion or full recompute *) - (if !needs_full_recompute then ( - (* Full recompute: find what changed *) - let new_current = recompute_all () in - - (* Find removed entries *) - Hashtbl.iter - (fun k () -> - if not (Hashtbl.mem new_current k) then - output_entries := (k, None) :: !output_entries) - current; - - (* Find added entries *) - Hashtbl.iter - (fun k () -> - if not (Hashtbl.mem current k) then - output_entries := (k, Some ()) :: !output_entries) - new_current; - - (* Update current *) - Hashtbl.reset current; - Hashtbl.iter (fun k v -> Hashtbl.replace current k v) new_current) - else - (* Incremental: BFS from new roots *) - let frontier = Queue.create () in - - init_entries - |> List.iter (fun (k, v_opt) -> - match v_opt with - | Some () when not (Hashtbl.mem current k) -> - Hashtbl.replace current k (); - output_entries := (k, Some ()) :: !output_entries; - Queue.add k frontier - | _ -> ()); - - while not (Queue.is_empty frontier) do - let k = Queue.pop frontier in - match Hashtbl.find_opt edge_map k with - | None -> () - | Some successors -> - List.iter - (fun succ -> - if not (Hashtbl.mem current succ) then ( - Hashtbl.replace current succ (); - output_entries := (succ, Some ()) :: !output_entries; - Queue.add succ frontier)) - successors - done); - - if !output_entries <> [] then ( - let num_adds, num_removes = count_changes !output_entries in - my_stats.deltas_emitted <- my_stats.deltas_emitted + 1; - my_stats.entries_emitted <- - my_stats.entries_emitted + List.length !output_entries; - my_stats.adds_emitted <- my_stats.adds_emitted + num_adds; - my_stats.removes_emitted <- my_stats.removes_emitted + num_removes; - let delta = Batch !output_entries in - List.iter (fun h -> h delta) !subscribers) + let output_entries = + ReactiveFixpoint.apply state ~init_entries ~edge_entries:edges_entries + in + emit_output output_entries in let _info = @@ -1249,35 +1145,14 @@ let fixpoint ~name ~(init : ('k, unit) t) ~(edges : ('k, 'k list) t) () : Registry.mark_dirty name); (* Initialize from existing data *) - (* First, copy edges *) - edges.iter (fun k v -> Hashtbl.replace edge_map k v); - (* Then, BFS from existing init values *) - let frontier = Queue.create () in - init.iter (fun k () -> - Hashtbl.replace roots k (); - (* Track roots *) - if not (Hashtbl.mem current k) then ( - Hashtbl.replace current k (); - Queue.add k frontier)); - while not (Queue.is_empty frontier) do - let k = Queue.pop frontier in - match Hashtbl.find_opt edge_map k with - | None -> () - | Some successors -> - List.iter - (fun succ -> - if not (Hashtbl.mem current succ) then ( - Hashtbl.replace current succ (); - Queue.add succ frontier)) - successors - done; + ReactiveFixpoint.initialize state ~roots_iter:init.iter ~edges_iter:edges.iter; { name; subscribe = (fun h -> subscribers := h :: !subscribers); - iter = (fun f -> Hashtbl.iter f current); - get = (fun k -> Hashtbl.find_opt current k); - length = (fun () -> Hashtbl.length current); + iter = (fun f -> ReactiveFixpoint.iter_current state f); + get = (fun k -> ReactiveFixpoint.get_current state k); + length = (fun () -> ReactiveFixpoint.current_length state); stats = my_stats; level = my_level; } diff --git a/analysis/reactive/src/ReactiveFixpoint.ml b/analysis/reactive/src/ReactiveFixpoint.ml new file mode 100644 index 00000000000..c3f153e1272 --- /dev/null +++ b/analysis/reactive/src/ReactiveFixpoint.ml @@ -0,0 +1,636 @@ +type 'k t = { + current: ('k, unit) Hashtbl.t; + edge_map: ('k, 'k list) Hashtbl.t; + pred_map: ('k, ('k, unit) Hashtbl.t) Hashtbl.t; + roots: ('k, unit) Hashtbl.t; +} + +type 'k edge_change = { + src: 'k; + old_succs: 'k list; + new_succs: 'k list; + removed_targets: 'k list; + has_new_edge: bool; +} + +let analyze_edge_change ~old_succs ~new_succs = + match (old_succs, new_succs) with + | [], [] -> ([], false) + | [], _ -> ([], true) + | _, [] -> (old_succs, false) + | _, _ -> + let new_set = Hashtbl.create (List.length new_succs) in + List.iter (fun k -> Hashtbl.replace new_set k ()) new_succs; + let old_set = Hashtbl.create (List.length old_succs) in + List.iter (fun k -> Hashtbl.replace old_set k ()) old_succs; + let removed_targets = + List.filter (fun target -> not (Hashtbl.mem new_set target)) old_succs + in + let has_new_edge = + List.exists (fun tgt -> not (Hashtbl.mem old_set tgt)) new_succs + in + (removed_targets, has_new_edge) + +let compute_reachable_from_roots_with_work t = + let new_current = Hashtbl.create (Hashtbl.length t.current) in + let frontier = Queue.create () in + let nodes_visited = ref 0 in + let edges_scanned = ref 0 in + + Hashtbl.iter + (fun k () -> + Hashtbl.replace new_current k (); + incr nodes_visited; + Queue.add k frontier) + t.roots; + + while not (Queue.is_empty frontier) do + let k = Queue.pop frontier in + match Hashtbl.find_opt t.edge_map k with + | None -> () + | Some successors -> + edges_scanned := !edges_scanned + List.length successors; + List.iter + (fun succ -> + if not (Hashtbl.mem new_current succ) then ( + Hashtbl.replace new_current succ (); + incr nodes_visited; + Queue.add succ frontier)) + successors + done; + (new_current, !nodes_visited, !edges_scanned) + +module Metrics = struct + let enabled = + match Sys.getenv_opt "RESCRIPT_REACTIVE_FIXPOINT_METRICS" with + | Some ("1" | "true" | "TRUE" | "yes" | "YES") -> true + | _ -> false + + type t = { + mutable apply_calls: int; + mutable init_entries_total: int; + mutable edge_entries_total: int; + mutable output_entries_total: int; + mutable deleted_nodes_total: int; + mutable rederived_nodes_total: int; + mutable delete_then_rederive_calls: int; + mutable incr_node_work_total: int; + mutable incr_edge_work_total: int; + mutable full_node_work_total: int; + mutable full_edge_work_total: int; + mutable max_init_entries: int; + mutable max_edge_entries: int; + mutable max_deleted_nodes: int; + mutable max_rederived_nodes: int; + } + + let totals = + { + apply_calls = 0; + init_entries_total = 0; + edge_entries_total = 0; + output_entries_total = 0; + deleted_nodes_total = 0; + rederived_nodes_total = 0; + delete_then_rederive_calls = 0; + incr_node_work_total = 0; + incr_edge_work_total = 0; + full_node_work_total = 0; + full_edge_work_total = 0; + max_init_entries = 0; + max_edge_entries = 0; + max_deleted_nodes = 0; + max_rederived_nodes = 0; + } + + let update ~init_entries ~edge_entries ~output_entries ~deleted_nodes + ~rederived_nodes ~incr_node_work ~incr_edge_work ~full_node_work + ~full_edge_work = + if enabled then ( + totals.apply_calls <- totals.apply_calls + 1; + totals.init_entries_total <- totals.init_entries_total + init_entries; + totals.edge_entries_total <- totals.edge_entries_total + edge_entries; + totals.output_entries_total <- + totals.output_entries_total + output_entries; + totals.deleted_nodes_total <- totals.deleted_nodes_total + deleted_nodes; + totals.rederived_nodes_total <- + totals.rederived_nodes_total + rederived_nodes; + if deleted_nodes > 0 && rederived_nodes > 0 then + totals.delete_then_rederive_calls <- + totals.delete_then_rederive_calls + 1; + totals.incr_node_work_total <- + totals.incr_node_work_total + incr_node_work; + totals.incr_edge_work_total <- + totals.incr_edge_work_total + incr_edge_work; + totals.full_node_work_total <- + totals.full_node_work_total + full_node_work; + totals.full_edge_work_total <- + totals.full_edge_work_total + full_edge_work; + totals.max_init_entries <- max totals.max_init_entries init_entries; + totals.max_edge_entries <- max totals.max_edge_entries edge_entries; + totals.max_deleted_nodes <- max totals.max_deleted_nodes deleted_nodes; + totals.max_rederived_nodes <- + max totals.max_rederived_nodes rederived_nodes) + + let emit_summary () = + if enabled then + let pct_incr_nodes = + if totals.full_node_work_total = 0 then 0. + else + 100. + *. float_of_int totals.incr_node_work_total + /. float_of_int totals.full_node_work_total + in + let pct_incr_edges = + if totals.full_edge_work_total = 0 then 0. + else + 100. + *. float_of_int totals.incr_edge_work_total + /. float_of_int totals.full_edge_work_total + in + prerr_endline + (Printf.sprintf + "[ReactiveFixpointMetrics] apply_calls=%d init_entries_total=%d \ + edge_entries_total=%d output_entries_total=%d \ + deleted_nodes_total=%d rederived_nodes_total=%d \ + delete_then_rederive_calls=%d incr_node_work_total=%d \ + full_node_work_total=%d incr_edge_work_total=%d \ + full_edge_work_total=%d incr_vs_full_nodes_pct=%.2f \ + incr_vs_full_edges_pct=%.2f max_init_entries=%d \ + max_edge_entries=%d max_deleted_nodes=%d max_rederived_nodes=%d" + totals.apply_calls totals.init_entries_total + totals.edge_entries_total totals.output_entries_total + totals.deleted_nodes_total totals.rederived_nodes_total + totals.delete_then_rederive_calls totals.incr_node_work_total + totals.full_node_work_total totals.incr_edge_work_total + totals.full_edge_work_total pct_incr_nodes pct_incr_edges + totals.max_init_entries totals.max_edge_entries + totals.max_deleted_nodes totals.max_rederived_nodes) + + let emit_summary_on_signal _ = emit_summary () + let () = + if enabled then + Sys.set_signal Sys.sigusr1 (Sys.Signal_handle emit_summary_on_signal) + let () = at_exit emit_summary +end + +module Invariants = struct + let enabled = + match Sys.getenv_opt "RESCRIPT_REACTIVE_FIXPOINT_ASSERT" with + | Some ("1" | "true" | "TRUE" | "yes" | "YES") -> true + | _ -> false + + let () = + if enabled then + prerr_endline + "[ReactiveFixpoint] debug invariants enabled \ + (RESCRIPT_REACTIVE_FIXPOINT_ASSERT)" + + let assert_ condition message = + if enabled && not condition then failwith message + + let copy_set tbl = + let out = Hashtbl.create (Hashtbl.length tbl) in + Hashtbl.iter (fun k () -> Hashtbl.replace out k ()) tbl; + out + + let set_equal a b = + Hashtbl.length a = Hashtbl.length b + && + let ok = ref true in + Hashtbl.iter (fun k () -> if not (Hashtbl.mem b k) then ok := false) a; + !ok + + let assert_edge_changes_consistent edge_changes = + (* Invariant: for each change, [removed_targets = old_succs \\ new_succs] + and [has_new_edge <=> (new_succs \\ old_succs <> empty)]. *) + if enabled then + List.iter + (fun ({old_succs; new_succs; removed_targets; has_new_edge; _} : + _ edge_change) -> + let expected_removed, expected_has_new = + analyze_edge_change ~old_succs ~new_succs + in + assert_ + (removed_targets = expected_removed + && has_new_edge = expected_has_new) + "ReactiveFixpoint.apply invariant failed: inconsistent edge_change") + edge_changes + + let assert_deleted_nodes_closed ~current ~deleted_nodes ~old_successors = + (* Invariant: [deleted_nodes ⊆ current] and + [k in deleted_nodes => old_successors(k) ∩ current ⊆ deleted_nodes]. *) + if enabled then + Hashtbl.iter + (fun k () -> + assert_ (Hashtbl.mem current k) + "ReactiveFixpoint.apply invariant failed: deleted node not in \ + current"; + List.iter + (fun succ -> + if Hashtbl.mem current succ then + assert_ + (Hashtbl.mem deleted_nodes succ) + "ReactiveFixpoint.apply invariant failed: deleted closure \ + broken") + (old_successors k)) + deleted_nodes + + let assert_current_minus_deleted ~pre_current ~current ~deleted_nodes = + (* Invariant: [current = pre_current \\ deleted_nodes]. *) + if enabled then ( + let expected = copy_set pre_current in + Hashtbl.iter (fun k () -> Hashtbl.remove expected k) deleted_nodes; + assert_ + (set_equal expected current) + "ReactiveFixpoint.apply invariant failed: current != pre_current minus \ + deleted") + + let assert_no_supported_deleted_left ~deleted_nodes ~current ~supported = + (* Invariant: [k in deleted_nodes \\ current => not (supported k)]. *) + if enabled then + Hashtbl.iter + (fun k () -> + if not (Hashtbl.mem current k) then + assert_ + (not (supported k)) + "ReactiveFixpoint.apply invariant failed: supported deleted node \ + left behind") + deleted_nodes + + let assert_removal_output_matches ~output_entries ~deleted_nodes ~current = + (* Invariant: [removal_keys(output_entries) = deleted_nodes \\ current]. *) + if enabled then ( + let expected = Hashtbl.create (Hashtbl.length deleted_nodes) in + Hashtbl.iter + (fun k () -> + if not (Hashtbl.mem current k) then Hashtbl.replace expected k ()) + deleted_nodes; + let actual = Hashtbl.create (List.length output_entries) in + List.iter + (fun (k, v_opt) -> if v_opt = None then Hashtbl.replace actual k ()) + output_entries; + assert_ + (set_equal expected actual) + "ReactiveFixpoint.apply invariant failed: removal output mismatch") + + let assert_final_fixpoint_and_delta ~compute_reachable ~t ~pre_current + ~output_entries = + (* Invariant: [t.current = Reach(t.roots, t.edge_map)] and + [adds(output_entries) = t.current \\ pre_current] and + [removes(output_entries) = pre_current \\ t.current]. *) + if enabled then ( + let reachable = compute_reachable t in + assert_ + (set_equal reachable t.current) + "ReactiveFixpoint.apply invariant failed: current is not a fixed-point \ + closure"; + + let expected_adds = Hashtbl.create (Hashtbl.length t.current) in + let expected_removes = Hashtbl.create (Hashtbl.length pre_current) in + Hashtbl.iter + (fun k () -> + if not (Hashtbl.mem pre_current k) then + Hashtbl.replace expected_adds k ()) + t.current; + Hashtbl.iter + (fun k () -> + if not (Hashtbl.mem t.current k) then + Hashtbl.replace expected_removes k ()) + pre_current; + + let actual_adds = Hashtbl.create (List.length output_entries) in + let actual_removes = Hashtbl.create (List.length output_entries) in + List.iter + (fun (k, v_opt) -> + match v_opt with + | Some () -> Hashtbl.replace actual_adds k () + | None -> Hashtbl.replace actual_removes k ()) + output_entries; + + let adds_ok = set_equal expected_adds actual_adds in + let removes_ok = set_equal expected_removes actual_removes in + if not (adds_ok && removes_ok) then + failwith + (Printf.sprintf + "ReactiveFixpoint.apply invariant failed: output delta mismatch \ + (pre=%d final=%d output=%d expected_adds=%d actual_adds=%d \ + expected_removes=%d actual_removes=%d)" + (Hashtbl.length pre_current) + (Hashtbl.length t.current) + (List.length output_entries) + (Hashtbl.length expected_adds) + (Hashtbl.length actual_adds) + (Hashtbl.length expected_removes) + (Hashtbl.length actual_removes))) +end + +let create () = + { + current = Hashtbl.create 256; + edge_map = Hashtbl.create 256; + pred_map = Hashtbl.create 256; + roots = Hashtbl.create 64; + } + +let iter_current t f = Hashtbl.iter f t.current +let get_current t k = Hashtbl.find_opt t.current k +let current_length t = Hashtbl.length t.current + +let compute_reachable_from_roots t = + let reachable, _nodes, _edges = compute_reachable_from_roots_with_work t in + reachable + +let replace_current_with t new_current = + Hashtbl.reset t.current; + Hashtbl.iter (fun k v -> Hashtbl.replace t.current k v) new_current + +let add_pred t ~target ~pred = + let preds = + match Hashtbl.find_opt t.pred_map target with + | Some ps -> ps + | None -> + let ps = Hashtbl.create 4 in + Hashtbl.replace t.pred_map target ps; + ps + in + Hashtbl.replace preds pred () + +let remove_pred t ~target ~pred = + match Hashtbl.find_opt t.pred_map target with + | None -> () + | Some preds -> + Hashtbl.remove preds pred; + if Hashtbl.length preds = 0 then Hashtbl.remove t.pred_map target + +exception Found_live_pred + +let has_live_predecessor t k = + match Hashtbl.find_opt t.pred_map k with + | None -> false + | Some preds -> ( + try + Hashtbl.iter + (fun pred () -> + if Hashtbl.mem t.current pred then raise Found_live_pred) + preds; + false + with Found_live_pred -> true) + +let apply_edge_update t ~src ~new_successors = + let old_successors = + match Hashtbl.find_opt t.edge_map src with + | Some succs -> succs + | None -> [] + in + match (old_successors, new_successors) with + | [], [] -> Hashtbl.remove t.edge_map src + | [], _ -> + List.iter (fun target -> add_pred t ~target ~pred:src) new_successors; + Hashtbl.replace t.edge_map src new_successors + | _, [] -> + List.iter (fun target -> remove_pred t ~target ~pred:src) old_successors; + Hashtbl.remove t.edge_map src + | _, _ -> + let new_set = Hashtbl.create (List.length new_successors) in + List.iter (fun k -> Hashtbl.replace new_set k ()) new_successors; + + let old_set = Hashtbl.create (List.length old_successors) in + List.iter (fun k -> Hashtbl.replace old_set k ()) old_successors; + + List.iter + (fun target -> + if not (Hashtbl.mem new_set target) then remove_pred t ~target ~pred:src) + old_successors; + + List.iter + (fun target -> + if not (Hashtbl.mem old_set target) then add_pred t ~target ~pred:src) + new_successors; + + Hashtbl.replace t.edge_map src new_successors + +let initialize t ~roots_iter ~edges_iter = + Hashtbl.reset t.roots; + Hashtbl.reset t.edge_map; + Hashtbl.reset t.pred_map; + roots_iter (fun k () -> Hashtbl.replace t.roots k ()); + edges_iter (fun k successors -> + apply_edge_update t ~src:k ~new_successors:successors); + replace_current_with t (compute_reachable_from_roots t) + +let apply t ~init_entries ~edge_entries = + let pre_current = + if Invariants.enabled then Some (Invariants.copy_set t.current) else None + in + let output_entries = ref [] in + let removed_roots = ref [] in + let added_roots = ref [] in + let edge_changes : 'k edge_change list ref = ref [] in + + List.iter + (fun (k, v_opt) -> + let had_root = Hashtbl.mem t.roots k in + match v_opt with + | Some () -> if not had_root then added_roots := k :: !added_roots + | None -> if had_root then removed_roots := k :: !removed_roots) + init_entries; + + let old_successors_for_changed : ('k, 'k list) Hashtbl.t = + Hashtbl.create 64 + in + + List.iter + (fun (src, v_opt) -> + let old_succs = + match Hashtbl.find_opt t.edge_map src with + | Some succs -> succs + | None -> [] + in + let new_succs = + match v_opt with + | Some succs -> succs + | None -> [] + in + let removed_targets, has_new_edge = + analyze_edge_change ~old_succs ~new_succs + in + Hashtbl.replace old_successors_for_changed src old_succs; + edge_changes := + {src; old_succs; new_succs; removed_targets; has_new_edge} + :: !edge_changes) + edge_entries; + Invariants.assert_edge_changes_consistent !edge_changes; + + let deleted_nodes : ('k, unit) Hashtbl.t = Hashtbl.create 128 in + let delete_queue = Queue.create () in + let delete_queue_pops = ref 0 in + let delete_edges_scanned = ref 0 in + + let mark_deleted k = + if Hashtbl.mem t.current k && not (Hashtbl.mem deleted_nodes k) then ( + Hashtbl.replace deleted_nodes k (); + Queue.add k delete_queue) + in + + List.iter mark_deleted !removed_roots; + + List.iter + (fun {src; removed_targets; _} -> + if Hashtbl.mem t.current src then + List.iter (fun target -> mark_deleted target) removed_targets) + !edge_changes; + + let old_successors k = + match Hashtbl.find_opt old_successors_for_changed k with + | Some succs -> succs + | None -> ( + match Hashtbl.find_opt t.edge_map k with + | Some succs -> succs + | None -> []) + in + + while not (Queue.is_empty delete_queue) do + let k = Queue.pop delete_queue in + incr delete_queue_pops; + let succs = old_successors k in + delete_edges_scanned := !delete_edges_scanned + List.length succs; + List.iter mark_deleted succs + done; + Invariants.assert_deleted_nodes_closed ~current:t.current ~deleted_nodes + ~old_successors; + + List.iter + (fun (k, v_opt) -> + match v_opt with + | Some () -> Hashtbl.replace t.roots k () + | None -> Hashtbl.remove t.roots k) + init_entries; + + List.iter + (fun {src; new_succs; _} -> + apply_edge_update t ~src ~new_successors:new_succs) + !edge_changes; + + Hashtbl.iter (fun k () -> Hashtbl.remove t.current k) deleted_nodes; + (match pre_current with + | Some pre -> + Invariants.assert_current_minus_deleted ~pre_current:pre ~current:t.current + ~deleted_nodes + | None -> ()); + + let supported k = Hashtbl.mem t.roots k || has_live_predecessor t k in + + let rederive_queue = Queue.create () in + let rederive_pending : ('k, unit) Hashtbl.t = Hashtbl.create 128 in + let rederive_queue_pops = ref 0 in + let rederived_nodes = ref 0 in + let rederive_edges_scanned = ref 0 in + + let enqueue_rederive_if_needed k = + if + Hashtbl.mem deleted_nodes k + && (not (Hashtbl.mem t.current k)) + && (not (Hashtbl.mem rederive_pending k)) + && supported k + then ( + Hashtbl.replace rederive_pending k (); + Queue.add k rederive_queue) + in + + Hashtbl.iter (fun k () -> enqueue_rederive_if_needed k) deleted_nodes; + + while not (Queue.is_empty rederive_queue) do + let k = Queue.pop rederive_queue in + incr rederive_queue_pops; + Hashtbl.remove rederive_pending k; + if + Hashtbl.mem deleted_nodes k + && (not (Hashtbl.mem t.current k)) + && supported k + then ( + Hashtbl.replace t.current k (); + incr rederived_nodes; + match Hashtbl.find_opt t.edge_map k with + | None -> () + | Some succs -> + rederive_edges_scanned := !rederive_edges_scanned + List.length succs; + List.iter enqueue_rederive_if_needed succs) + done; + Invariants.assert_no_supported_deleted_left ~deleted_nodes ~current:t.current + ~supported; + + let expansion_queue = Queue.create () in + let expansion_seen : ('k, unit) Hashtbl.t = Hashtbl.create 128 in + let expansion_queue_pops = ref 0 in + let expansion_edges_scanned = ref 0 in + + let enqueue_expand k = + if Hashtbl.mem t.current k && not (Hashtbl.mem expansion_seen k) then ( + Hashtbl.replace expansion_seen k (); + Queue.add k expansion_queue) + in + + let add_live k = + if not (Hashtbl.mem t.current k) then ( + Hashtbl.replace t.current k (); + (* If a node was tentatively deleted in this wave and later rederived, + suppress add output so downstream sees no net change for that key. *) + if not (Hashtbl.mem deleted_nodes k) then + output_entries := (k, Some ()) :: !output_entries; + enqueue_expand k) + in + + List.iter add_live !added_roots; + + List.iter + (fun {src; has_new_edge; _} -> + if Hashtbl.mem t.current src && has_new_edge then enqueue_expand src) + !edge_changes; + + while not (Queue.is_empty expansion_queue) do + let k = Queue.pop expansion_queue in + incr expansion_queue_pops; + match Hashtbl.find_opt t.edge_map k with + | None -> () + | Some successors -> + expansion_edges_scanned := + !expansion_edges_scanned + List.length successors; + List.iter add_live successors + done; + Hashtbl.iter + (fun k () -> + if not (Hashtbl.mem t.current k) then + output_entries := (k, None) :: !output_entries) + deleted_nodes; + Invariants.assert_removal_output_matches ~output_entries:!output_entries + ~deleted_nodes ~current:t.current; + (match pre_current with + | Some pre -> + Invariants.assert_final_fixpoint_and_delta + ~compute_reachable:compute_reachable_from_roots ~t ~pre_current:pre + ~output_entries:!output_entries + | None -> ()); + + (if Metrics.enabled then + (* Metrics mode intentionally computes a full closure baseline to compare + incremental work against full recomputation. Keep this opt-in only. *) + let _full_reachable, full_node_work, full_edge_work = + compute_reachable_from_roots_with_work t + in + let incr_node_work = + List.length init_entries + List.length edge_entries + !delete_queue_pops + + !rederive_queue_pops + !expansion_queue_pops + in + let incr_edge_work = + !delete_edges_scanned + !rederive_edges_scanned + + !expansion_edges_scanned + in + Metrics.update ~init_entries:(List.length init_entries) + ~edge_entries:(List.length edge_entries) + ~output_entries:(List.length !output_entries) + ~deleted_nodes:(Hashtbl.length deleted_nodes) + ~rederived_nodes:!rederived_nodes ~incr_node_work ~incr_edge_work + ~full_node_work ~full_edge_work); + + !output_entries diff --git a/analysis/reactive/src/ReactiveFixpoint.mli b/analysis/reactive/src/ReactiveFixpoint.mli new file mode 100644 index 00000000000..2b68c56ad86 --- /dev/null +++ b/analysis/reactive/src/ReactiveFixpoint.mli @@ -0,0 +1,63 @@ +type 'k t +(** Internal state for incremental transitive-closure computation. + + High-level model: + - Root set [R : 'k set] + - Edge relation [E : 'k -> 'k list] + - Current reachable set [C : 'k set] + + Fundamental invariant: + [C = Reach(R, E)], where [Reach] is the least fixed point of reachability + from roots through directed edges. *) + +val create : unit -> 'k t +(** Create an empty state. + Postcondition: [R = empty], [E = empty], [C = empty]. *) + +val iter_current : 'k t -> ('k -> unit -> unit) -> unit +(** Iterate keys currently in [C]. + Order is unspecified. *) + +val get_current : 'k t -> 'k -> unit option +(** Membership query for [C]. + Returns [Some ()] iff the key is currently reachable, [None] otherwise. *) + +val current_length : 'k t -> int +(** Cardinality of [C]. *) + +val initialize : + 'k t -> + roots_iter:(('k -> unit -> unit) -> unit) -> + edges_iter:(('k -> 'k list -> unit) -> unit) -> + unit +(** Replace [R] and [E] from iterators (full overwrite), then recompute closure. + Postcondition: [C := Reach(R, E)]. *) + +val apply : + 'k t -> + init_entries:('k * unit option) list -> + edge_entries:('k * 'k list option) list -> + ('k * unit option) list +(** Apply one incremental update wave and return closure deltas. + + Input semantics: + - [init_entries]: root updates, where [(k, Some ())] adds/presents root [k] + and [(k, None)] removes root [k]. + - [edge_entries]: outgoing-edge updates, where [(k, Some succs)] sets + [E(k) := succs] and [(k, None)] removes [k]'s edge entry. + + Correctness postcondition: + - Let pre-state be [(R0, E0, C0)] and post-state [(R1, E1, C1)] after the + updates. Then [C1 = Reach(R1, E1)]. + - Returned entries encode the set delta [C0 -> C1]: + [(k, Some ())] iff [k in (C1 \\ C0)], + [(k, None)] iff [k in (C0 \\ C1)]. + + Net-effect rule: + - If a key is tentatively deleted and rederived within the same wave, no + remove/add pair is emitted for that key. + + Notes: + - Output entry order is unspecified. + - Callers should provide at most one update per key per call (or + deduplicate before calling). *) diff --git a/analysis/reactive/src/dune b/analysis/reactive/src/dune index 4fb933961f3..cc8d382ccd8 100644 --- a/analysis/reactive/src/dune +++ b/analysis/reactive/src/dune @@ -1,4 +1,5 @@ (library (name reactive) (wrapped false) + (private_modules ReactiveFixpoint) (libraries unix)) diff --git a/analysis/reactive/test/FixpointIncrementalTest.ml b/analysis/reactive/test/FixpointIncrementalTest.ml index e7fb6c086e1..e0c2d0b6cbe 100644 --- a/analysis/reactive/test/FixpointIncrementalTest.ml +++ b/analysis/reactive/test/FixpointIncrementalTest.ml @@ -672,6 +672,244 @@ let test_fixpoint_remove_base_needs_rederivation () = assert (get fp "y" = Some ()); Printf.printf "PASSED\n\n" +let test_fixpoint_batch_overlapping_deletions () = + reset (); + Printf.printf "=== Test: fixpoint batch overlapping deletions ===\n"; + + let init, emit_init = source ~name:"init" () in + let edges, emit_edges = source ~name:"edges" () in + + (* r -> a,b ; a -> x ; b -> x ; x -> y *) + emit_edges (Set ("r", ["a"; "b"])); + emit_edges (Set ("a", ["x"])); + emit_edges (Set ("b", ["x"])); + emit_edges (Set ("x", ["y"])); + + let fp = fixpoint ~name:"fp" ~init ~edges () in + emit_init (Set ("r", ())); + + assert (get fp "x" = Some ()); + assert (get fp "y" = Some ()); + + let removed = ref [] in + subscribe + (function + | Remove k -> removed := k :: !removed + | Batch entries -> + List.iter + (fun (k, v_opt) -> if v_opt = None then removed := k :: !removed) + entries + | _ -> ()) + fp; + + (* Remove both supports for x in one batch. *) + emit_edges (Batch [("a", Some []); ("b", Some [])]); + + Printf.printf "Removed: [%s]\n" (String.concat ", " !removed); + assert (List.mem "x" !removed); + assert (List.mem "y" !removed); + assert (List.length !removed = 2); + assert (get fp "x" = None); + assert (get fp "y" = None); + assert (length fp = 3); + + (* r, a, b *) + Printf.printf "PASSED\n\n" + +let test_fixpoint_batch_delete_add_same_wave () = + reset (); + Printf.printf "=== Test: fixpoint batch delete+add same wave ===\n"; + + let init, emit_init = source ~name:"init" () in + let edges, emit_edges = source ~name:"edges" () in + + (* r -> a,c ; a -> x ; c -> [] *) + emit_edges (Set ("r", ["a"; "c"])); + emit_edges (Set ("a", ["x"])); + emit_edges (Set ("c", [])); + + let fp = fixpoint ~name:"fp" ~init ~edges () in + emit_init (Set ("r", ())); + + assert (get fp "x" = Some ()); + assert (length fp = 4); + + let added = ref [] in + let removed = ref [] in + subscribe + (function + | Set (k, ()) -> added := k :: !added + | Remove k -> removed := k :: !removed + | Batch entries -> + List.iter + (fun (k, v_opt) -> + match v_opt with + | Some () -> added := k :: !added + | None -> removed := k :: !removed) + entries) + fp; + + (* In one batch: remove a->x and add c->x. x should stay live. *) + emit_edges (Batch [("a", Some []); ("c", Some ["x"])]); + + Printf.printf "Removed: [%s], Added: [%s]\n" + (String.concat ", " !removed) + (String.concat ", " !added); + + assert (get fp "x" = Some ()); + assert (length fp = 4); + assert (!removed = []); + assert (!added = []); + + Printf.printf "PASSED\n\n" + +let test_fixpoint_fanin_single_predecessor_removed () = + reset (); + Printf.printf "=== Test: fixpoint fan-in single predecessor removed ===\n"; + + let init, emit_init = source ~name:"init" () in + let edges, emit_edges = source ~name:"edges" () in + + (* r -> a,b,c ; a,b,c -> z *) + emit_edges (Set ("r", ["a"; "b"; "c"])); + emit_edges (Set ("a", ["z"])); + emit_edges (Set ("b", ["z"])); + emit_edges (Set ("c", ["z"])); + + let fp = fixpoint ~name:"fp" ~init ~edges () in + emit_init (Set ("r", ())); + + assert (get fp "z" = Some ()); + assert (length fp = 5); + + let removed = ref [] in + subscribe + (function + | Remove k -> removed := k :: !removed + | Batch entries -> + List.iter + (fun (k, v_opt) -> if v_opt = None then removed := k :: !removed) + entries + | _ -> ()) + fp; + + (* Remove only one predecessor contribution; z should remain live. *) + emit_edges (Set ("a", [])); + + Printf.printf "Removed: [%s]\n" (String.concat ", " !removed); + assert (get fp "z" = Some ()); + assert (length fp = 5); + assert (!removed = []); + + Printf.printf "PASSED\n\n" + +let test_fixpoint_cycle_alternative_external_support () = + reset (); + Printf.printf + "=== Test: fixpoint cycle with alternative external support ===\n"; + + let init, emit_init = source ~name:"init" () in + let edges, emit_edges = source ~name:"edges" () in + + (* r1 -> b ; r2 -> c ; b <-> c *) + emit_edges (Set ("r1", ["b"])); + emit_edges (Set ("r2", ["c"])); + emit_edges (Set ("b", ["c"])); + emit_edges (Set ("c", ["b"])); + + let fp = fixpoint ~name:"fp" ~init ~edges () in + emit_init (Set ("r1", ())); + emit_init (Set ("r2", ())); + + assert (get fp "b" = Some ()); + assert (get fp "c" = Some ()); + + let removed = ref [] in + subscribe + (function + | Remove k -> removed := k :: !removed + | Batch entries -> + List.iter + (fun (k, v_opt) -> if v_opt = None then removed := k :: !removed) + entries + | _ -> ()) + fp; + + (* Remove one external support edge; cycle should remain via r2 -> c. *) + emit_edges (Set ("r1", [])); + + Printf.printf "After removing r1->b, removed: [%s]\n" + (String.concat ", " !removed); + assert (get fp "b" = Some ()); + assert (get fp "c" = Some ()); + assert (!removed = []); + + removed := []; + + (* Remove the other external support edge; cycle should now disappear. *) + emit_edges (Set ("r2", [])); + + Printf.printf "After removing r2->c, removed: [%s]\n" + (String.concat ", " !removed); + assert (List.mem "b" !removed); + assert (List.mem "c" !removed); + assert (get fp "b" = None); + assert (get fp "c" = None); + + Printf.printf "PASSED\n\n" + +let test_fixpoint_remove_then_readd_via_expansion_same_wave () = + reset (); + Printf.printf + "=== Test: fixpoint remove then re-add via expansion (same wave) ===\n"; + + let init, emit_init = source ~name:"init" () in + let edges, emit_edges = source ~name:"edges" () in + + (* r -> s ; s -> x ; y -> x ; then update s -> y. + x is first tentatively deleted (s no longer points to x), + then becomes reachable again via new path r -> s -> y -> x. *) + emit_edges (Set ("r", ["s"])); + emit_edges (Set ("s", ["x"])); + emit_edges (Set ("y", ["x"])); + + let fp = fixpoint ~name:"fp" ~init ~edges () in + emit_init (Set ("r", ())); + + assert (get fp "x" = Some ()); + assert (get fp "y" = None); + assert (length fp = 3); + + let added = ref [] in + let removed = ref [] in + subscribe + (function + | Set (k, ()) -> added := k :: !added + | Remove k -> removed := k :: !removed + | Batch entries -> + List.iter + (fun (k, v_opt) -> + match v_opt with + | Some () -> added := k :: !added + | None -> removed := k :: !removed) + entries) + fp; + + emit_edges (Set ("s", ["y"])); + + Printf.printf "Removed: [%s], Added: [%s]\n" + (String.concat ", " !removed) + (String.concat ", " !added); + + (* x should remain reachable; it must not be emitted as removed. *) + assert (get fp "x" = Some ()); + assert (get fp "y" = Some ()); + assert (length fp = 4); + assert (not (List.mem "x" !removed)); + assert (List.mem "y" !added); + + Printf.printf "PASSED\n\n" + let run_all () = Printf.printf "\n====== Fixpoint Incremental Tests ======\n\n"; test_fixpoint_add_base (); @@ -687,4 +925,9 @@ let run_all () = test_fixpoint_remove_edge_entry_rederivation (); test_fixpoint_remove_edge_entry_higher_rank_support (); test_fixpoint_remove_edge_entry_needs_rederivation (); - test_fixpoint_remove_base_needs_rederivation () + test_fixpoint_remove_base_needs_rederivation (); + test_fixpoint_batch_overlapping_deletions (); + test_fixpoint_batch_delete_add_same_wave (); + test_fixpoint_fanin_single_predecessor_removed (); + test_fixpoint_cycle_alternative_external_support (); + test_fixpoint_remove_then_readd_via_expansion_same_wave () diff --git a/analysis/reanalyze/src/Suppress.ml b/analysis/reanalyze/src/Suppress.ml index 0502ce9d3fa..b40d6af2c73 100644 --- a/analysis/reanalyze/src/Suppress.ml +++ b/analysis/reanalyze/src/Suppress.ml @@ -1,27 +1,85 @@ let runConfig = RunConfig.runConfig -let checkPrefix prefix_ = - let prefix = +let normalize_separators s = + if Sys.win32 then String.map (fun c -> if c = '\\' then '/' else c) s else s + +let split_on_slash s = + let rec aux acc start i = + if i >= String.length s then + let last = String.sub s start (i - start) in + if last = "" then acc else last :: acc + else if s.[i] = '/' then + let seg = String.sub s start (i - start) in + let acc = if seg = "" then acc else seg :: acc in + aux acc (i + 1) (i + 1) + else aux acc start (i + 1) + in + List.rev (aux [] 0 0) + +let has_glob_char s = String.contains s '*' + +(** Match glob pattern segments against path segments. + A single star matches one path segment, a double star matches zero or more. + Matches as a prefix: if the pattern is exhausted, the remaining path is accepted. *) +let rec glob_match pattern_segs path_segs = + match (pattern_segs, path_segs) with + | [], _ -> true + | "**" :: rest, _ -> ( + glob_match rest path_segs + || + match path_segs with + | _ :: path_rest -> glob_match pattern_segs path_rest + | [] -> false) + | _ :: _, [] -> false + | pat :: pat_rest, seg :: path_rest -> + glob_segment pat seg && glob_match pat_rest path_rest + +and glob_segment pattern segment = + let rec aux pi si = + if pi >= String.length pattern then si >= String.length segment + else if pattern.[pi] = '*' then + let rec try_skip si' = + si' <= String.length segment && (aux (pi + 1) si' || try_skip (si' + 1)) + in + try_skip si + else + si < String.length segment + && pattern.[pi] = segment.[si] + && aux (pi + 1) (si + 1) + in + aux 0 0 + +let checkPattern pattern_ = + let is_glob = has_glob_char pattern_ in + let pattern = match runConfig.projectRoot = "" with - | true -> prefix_ - | false -> Filename.concat runConfig.projectRoot prefix_ + | true -> pattern_ + | false -> Filename.concat runConfig.projectRoot pattern_ in - let prefixLen = prefix |> String.length in - fun sourceDir -> - try String.sub sourceDir 0 prefixLen = prefix - with Invalid_argument _ -> false + let pattern = normalize_separators pattern in + if is_glob then + let pattern_segs = split_on_slash pattern in + fun sourceDir -> + let path_segs = split_on_slash (normalize_separators sourceDir) in + glob_match pattern_segs path_segs + else + let prefixLen = pattern |> String.length in + fun sourceDir -> + let sourceDir = normalize_separators sourceDir in + try String.sub sourceDir 0 prefixLen = pattern + with Invalid_argument _ -> false let suppressSourceDir = lazy (fun sourceDir -> runConfig.suppress - |> List.exists (fun prefix -> checkPrefix prefix sourceDir)) + |> List.exists (fun pattern -> checkPattern pattern sourceDir)) let unsuppressSourceDir = lazy (fun sourceDir -> runConfig.unsuppress - |> List.exists (fun prefix -> checkPrefix prefix sourceDir)) + |> List.exists (fun pattern -> checkPattern pattern sourceDir)) let posInSuppress (pos : Lexing.position) = pos.pos_fname |> Lazy.force suppressSourceDir diff --git a/analysis/src/CompletionBackEnd.ml b/analysis/src/CompletionBackEnd.ml index a24b4315fb4..7f3260176be 100644 --- a/analysis/src/CompletionBackEnd.ml +++ b/analysis/src/CompletionBackEnd.ml @@ -2157,7 +2157,7 @@ let rec processCompletable ~debug ~full ~scope ~env ~pos ~forHover completable = Sys.readdir (Filename.dirname (env.file.uri |> Uri.toPath)) |> Array.to_list in - (* Try to filter out compiled in source files *) + (* Filter out generated build artifacts from in-source builds. *) let resFiles = StringSet.of_list (files @@ -2166,6 +2166,10 @@ let rec processCompletable ~debug ~full ~scope ~env ~pos ~forHover completable = Some (try Filename.chop_extension f with _ -> f) else None)) in + let is_internal_artifact_extension = function + | ".ast" | ".cmi" | ".cmj" | ".cmt" | ".cmti" | ".iast" -> true + | _ -> false + in files |> List.filter_map (fun fileName -> let withoutExtension = @@ -2178,6 +2182,7 @@ let rec processCompletable ~debug ~full ~scope ~env ~pos ~forHover completable = else match Filename.extension fileName with | ".res" | ".resi" | "" -> None + | ext when is_internal_artifact_extension ext -> None | _ -> Some ("./" ^ fileName)) |> List.sort String.compare with _ -> diff --git a/analysis/src/FindFiles.ml b/analysis/src/FindFiles.ml index a77e97ad1f1..b1d9eea7960 100644 --- a/analysis/src/FindFiles.ml +++ b/analysis/src/FindFiles.ml @@ -135,6 +135,32 @@ let collectFiles directory = | None -> None | Some res -> Some (modName, SharedTypes.Impl {cmt; res})) +(* Dependency resolution uses the package graph recorded by the build system in + .sourcedirs.json when available. If a package is not listed there, analysis + falls back to walking up node_modules from the project root. *) +let readSourcedirsPackageRoots base = + let sourceDirsFile = base /+ "lib" /+ "bs" /+ ".sourcedirs.json" in + let readPackageEntry = function + | Json.Array [Json.String name; Json.String path] -> + let path = if Filename.is_relative path then base /+ path else path in + Some (name, path) + | _ -> None + in + match Files.readFile sourceDirsFile with + | None -> [] + | Some text -> ( + match Json.parse text with + | None -> [] + | Some json -> ( + match json |> Json.get "pkgs" |> bind Json.array with + | None -> [] + | Some packages -> packages |> List.filter_map readPackageEntry)) + +let findPackageRoot ~base ~sourcedirsPackageRoots name = + match List.assoc_opt name sourcedirsPackageRoots with + | Some path when Files.exists path -> Some path + | _ -> ModuleResolution.resolveNodeModulePath ~startPath:base name + (* returns a list of (absolute path to cmt(i), relative path from base to source file) *) let findProjectFiles ~public ~namespace ~path ~sourceDirectories ~libBs = let dirs = @@ -233,12 +259,12 @@ let findDependencyFiles base config = in let deps = deps @ devDeps in Log.log ("Dependencies: " ^ String.concat " " deps); + let sourcedirsPackageRoots = readSourcedirsPackageRoots base in let depFiles = deps |> List.map (fun name -> let result = - Json.bind - (ModuleResolution.resolveNodeModulePath ~startPath:base name) + Json.bind (findPackageRoot ~base ~sourcedirsPackageRoots name) (fun path -> let rescriptJsonPath = path /+ "rescript.json" in let bsconfigJsonPath = path /+ "bsconfig.json" in diff --git a/analysis/src/ProcessAttributes.ml b/analysis/src/ProcessAttributes.ml index 10e43d51126..31d994d5e03 100644 --- a/analysis/src/ProcessAttributes.ml +++ b/analysis/src/ProcessAttributes.ml @@ -22,15 +22,25 @@ let rec findDeprecatedAttribute attributes = match attributes with | [] -> None | ( {Asttypes.txt = "deprecated"}, - PStr - [ - { - pstr_desc = - Pstr_eval ({pexp_desc = Pexp_constant (Pconst_string (msg, _))}, _); - }; - ] ) - :: _ -> - Some msg + PStr [{pstr_desc = Pstr_eval ({pexp_desc = expr}, _)}] ) + :: _ -> ( + match expr with + (* Simple deprecated attr @deprecated("message") *) + | Pexp_constant (Pconst_string (_msg, _)) -> Some _msg + (* deprecated attr with record *) + | Pexp_record (fields, _) -> + let reason = ref "" in + + fields + |> List.iter (fun {lid = {txt}; x} -> + match (txt, x) with + | ( Lident "reason", + {pexp_desc = Pexp_constant (Pconst_string (msg, _))} ) -> + reason := msg + | _ -> ()); + + Some !reason + | _ -> None) | ({Asttypes.txt = "deprecated"}, _) :: _ -> Some "" | _ :: rest -> findDeprecatedAttribute rest @@ -41,7 +51,7 @@ let newDeclared ~item ~extent ~name ~stamp ~modulePath isExported attributes = extentLoc = extent; isExported; modulePath; - deprecated = findDeprecatedAttribute attributes; + deprecated = findDeprecatedAttribute (List.rev attributes); docstring = (match findDocAttribute attributes with | None -> [] diff --git a/compiler/bsc/rescript_compiler_main.ml b/compiler/bsc/rescript_compiler_main.ml index ec40263bb67..fba8c2c3c41 100644 --- a/compiler/bsc/rescript_compiler_main.ml +++ b/compiler/bsc/rescript_compiler_main.ml @@ -268,6 +268,10 @@ let command_line_flags : (string * Bsc_args.spec * string) array = Js_config.binary_ast := true; Js_config.syntax_only := true), "*internal* Generate binary .mli_ast and ml_ast and stop" ); + ( "-bs-test-ast-conversion", + set Js_config.test_ast_conversion, + "*internal* Roundtrip the parsed AST through Parsetree0 before continuing" + ); ( "-bs-syntax-only", set Js_config.syntax_only, "*internal* Only check syntax" ); diff --git a/compiler/common/bs_version.ml b/compiler/common/bs_version.ml index ef2d08ac2c2..5d43340f4a7 100644 --- a/compiler/common/bs_version.ml +++ b/compiler/common/bs_version.ml @@ -21,5 +21,5 @@ * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) -let version = "12.2.0" +let version = "12.3.0-beta.1" let header = "// Generated by ReScript, PLEASE EDIT WITH CARE" diff --git a/compiler/common/js_config.ml b/compiler/common/js_config.ml index 24aa8b69f13..9e2b6f598ca 100644 --- a/compiler/common/js_config.ml +++ b/compiler/common/js_config.ml @@ -43,6 +43,7 @@ let check_div_by_zero = ref true let get_check_div_by_zero () = !check_div_by_zero let syntax_only = ref false let binary_ast = ref false +let test_ast_conversion = ref false let debug = ref false let cmi_only = ref false let cmj_only = ref false diff --git a/compiler/common/js_config.mli b/compiler/common/js_config.mli index d6f4bd8ba60..f19b3c6126c 100644 --- a/compiler/common/js_config.mli +++ b/compiler/common/js_config.mli @@ -65,6 +65,8 @@ val syntax_only : bool ref val binary_ast : bool ref +val test_ast_conversion : bool ref + val debug : bool ref val cmi_only : bool ref diff --git a/compiler/common/ml_binary.ml b/compiler/common/ml_binary.ml index ae7e441c82e..cfe96efcab3 100644 --- a/compiler/common/ml_binary.ml +++ b/compiler/common/ml_binary.ml @@ -52,6 +52,12 @@ let ast0_to_signature : ast0 -> Parsetree.signature = function Ast_mapper_from0.default_mapper.signature Ast_mapper_from0.default_mapper sig0 +let ast0_roundtrip : type a. a kind -> a -> a = + fun kind ast -> + match kind with + | Ml -> ast |> to_ast0 Ml |> ast0_to_structure + | Mli -> ast |> to_ast0 Mli |> ast0_to_signature + let magic_of_kind : type a. a kind -> string = function | Ml -> Config.ast_impl_magic_number | Mli -> Config.ast_intf_magic_number diff --git a/compiler/common/ml_binary.mli b/compiler/common/ml_binary.mli index 7749e8ccec5..13ca35c930b 100644 --- a/compiler/common/ml_binary.mli +++ b/compiler/common/ml_binary.mli @@ -32,5 +32,6 @@ type ast0 = Impl of Parsetree0.structure | Intf of Parsetree0.signature val magic_of_kind : 'a kind -> string val magic_of_ast0 : ast0 -> string val to_ast0 : 'a kind -> 'a -> ast0 +val ast0_roundtrip : 'a kind -> 'a -> 'a val ast0_to_structure : ast0 -> Parsetree.structure val ast0_to_signature : ast0 -> Parsetree.signature diff --git a/compiler/core/cmd_ppx_apply.ml b/compiler/core/cmd_ppx_apply.ml index dc2f50d0403..111eae078d2 100644 --- a/compiler/core/cmd_ppx_apply.ml +++ b/compiler/core/cmd_ppx_apply.ml @@ -95,7 +95,10 @@ let rewrite kind ppxs ast = let apply_rewriters_str ?(restore = true) ~tool_name ast = match !Clflags.all_ppx with - | [] -> ast + | [] -> + if !Js_config.test_ast_conversion then + Ml_binary.ast0_roundtrip Ml_binary.Ml ast + else ast | ppxs -> ast |> Ast_mapper.add_ppx_context_str ~tool_name @@ -104,7 +107,10 @@ let apply_rewriters_str ?(restore = true) ~tool_name ast = let apply_rewriters_sig ?(restore = true) ~tool_name ast = match !Clflags.all_ppx with - | [] -> ast + | [] -> + if !Js_config.test_ast_conversion then + Ml_binary.ast0_roundtrip Ml_binary.Mli ast + else ast | ppxs -> ast |> Ast_mapper.add_ppx_context_sig ~tool_name diff --git a/compiler/core/lam_compile.ml b/compiler/core/lam_compile.ml index 87a25bfe2ad..42844bc99c0 100644 --- a/compiler/core/lam_compile.ml +++ b/compiler/core/lam_compile.ml @@ -825,18 +825,27 @@ let compile output_prefix = | _ -> false) typeof_clauses in - let has_array_case = + let clauses_have_array_case = List.exists (function | Ast_untagged_variants.Untagged (InstanceType Array), _ -> true | _ -> false) not_typeof_clauses in + let type_has_array_case = + List.exists + (function + | Ast_untagged_variants.InstanceType Array -> true + | _ -> false) + block_cases + in (* When there's an ObjectType typeof case, null and arrays can incorrectly match it (typeof null === typeof [] === "object"). Guard against them when they should fall through to default. *) let needs_null_guard = has_object_typeof && has_null_case in - let needs_array_guard = has_object_typeof && not has_array_case in + let needs_array_guard = + has_object_typeof && type_has_array_case && not clauses_have_array_case + in let rec build_if_chain remaining_clauses = match remaining_clauses with | ( Ast_untagged_variants.Untagged (InstanceType instance_type), @@ -860,7 +869,8 @@ let compile output_prefix = match (guard, default) with | Some guard, Some default_body -> S.if_ guard default_body ~else_:[typeof_switch ()] - | _ -> typeof_switch ()) + | Some guard, None -> S.if_ (E.not guard) [typeof_switch ()] + | None, _ -> typeof_switch ()) in build_if_chain not_typeof_clauses in diff --git a/compiler/ml/ast_mapper_from0.ml b/compiler/ml/ast_mapper_from0.ml index 3f91d6ac1ee..7b1ed3f7287 100644 --- a/compiler/ml/ast_mapper_from0.ml +++ b/compiler/ml/ast_mapper_from0.ml @@ -25,6 +25,19 @@ open Ast_helper open Location module Pt = Parsetree +let jsx_prop_loc_attr = "res.jsxPropLoc" +let jsx_spread_loc_attr = "res.jsxSpreadLoc" + +let extract_internal_loc_attr attr_name attrs = + let rec loop rev_acc = function + | [] -> (None, List.rev rev_acc) + | (({txt; loc}, payload) as attr) :: rest -> + if txt = attr_name && payload = PStr [] then + (Some loc, List.rev_append rev_acc rest) + else loop (attr :: rev_acc) rest + in + loop [] attrs + type mapper = { attribute: mapper -> attribute -> Pt.attribute; attributes: mapper -> attribute list -> Pt.attribute list; @@ -331,9 +344,22 @@ module E = struct let try_map_jsx_prop (sub : mapper) (lbl : Asttypes.Noloc.arg_label) (e : expression) : Parsetree.jsx_prop option = + let map_expr_with_loc_attr attr_name fallback make_prop = + let loc, attrs = extract_internal_loc_attr attr_name e.pexp_attributes in + let e = {e with pexp_attributes = attrs} in + let expr = sub.expr sub e in + make_prop + (match loc with + | Some loc -> loc + | None -> fallback expr) + expr + in match (lbl, e) with - | Asttypes.Noloc.Labelled "_spreadProps", expr -> - Some (Parsetree.JSXPropSpreading (Location.none, sub.expr sub expr)) + | Asttypes.Noloc.Labelled "_spreadProps", _expr -> + Some + (map_expr_with_loc_attr jsx_spread_loc_attr + (fun expr -> expr.pexp_loc) + (fun loc expr -> Parsetree.JSXPropSpreading (loc, expr))) | ( Asttypes.Noloc.Labelled name, {pexp_desc = Pexp_ident {txt = Longident.Lident v}; pexp_loc = name_loc} ) @@ -344,14 +370,18 @@ module E = struct ) when name = v -> Some (Parsetree.JSXPropPunning (true, {txt = name; loc = name_loc})) - | Asttypes.Noloc.Labelled name, exp -> + | Asttypes.Noloc.Labelled name, _exp -> Some - (Parsetree.JSXPropValue - ({txt = name; loc = Location.none}, false, sub.expr sub exp)) - | Asttypes.Noloc.Optional name, exp -> + (map_expr_with_loc_attr jsx_prop_loc_attr + (fun expr -> expr.pexp_loc) + (fun loc expr -> + Parsetree.JSXPropValue ({txt = name; loc}, false, expr))) + | Asttypes.Noloc.Optional name, _exp -> Some - (Parsetree.JSXPropValue - ({txt = name; loc = Location.none}, true, sub.expr sub exp)) + (map_expr_with_loc_attr jsx_prop_loc_attr + (fun expr -> expr.pexp_loc) + (fun loc expr -> + Parsetree.JSXPropValue ({txt = name; loc}, true, expr))) | _ -> None let extract_props_and_children (sub : mapper) items = diff --git a/compiler/ml/ast_mapper_to0.ml b/compiler/ml/ast_mapper_to0.ml index d0ac43d737a..d322dbc57b8 100644 --- a/compiler/ml/ast_mapper_to0.ml +++ b/compiler/ml/ast_mapper_to0.ml @@ -25,6 +25,13 @@ open Ast_helper0 open Location module Pt = Parsetree0 +let jsx_prop_loc_attr = "res.jsxPropLoc" +let jsx_spread_loc_attr = "res.jsxSpreadLoc" + +let wrap_with_loc_attr attr_name loc (expr : Pt.expression) = + let attr : Pt.attribute = (Location.mkloc attr_name loc, Pt.PStr []) in + {expr with pexp_attributes = attr :: expr.pexp_attributes} + type mapper = { attribute: mapper -> attribute -> Pt.attribute; attributes: mapper -> attribute list -> Pt.attribute list; @@ -334,9 +341,12 @@ module E = struct if is_optional then Asttypes.Noloc.Optional name.txt else Asttypes.Noloc.Labelled name.txt in - (label, sub.expr sub value) - | JSXPropSpreading (_, value) -> - (Asttypes.Noloc.Labelled "_spreadProps", sub.expr sub value)) + ( label, + sub.expr sub value |> wrap_with_loc_attr jsx_prop_loc_attr name.loc + ) + | JSXPropSpreading (loc, value) -> + ( Asttypes.Noloc.Labelled "_spreadProps", + sub.expr sub value |> wrap_with_loc_attr jsx_spread_loc_attr loc )) let map_jsx_children sub loc children = match children with diff --git a/compiler/ml/parmatch.ml b/compiler/ml/parmatch.ml index ebb6903c661..b86364445f0 100644 --- a/compiler/ml/parmatch.ml +++ b/compiler/ml/parmatch.ml @@ -2017,6 +2017,25 @@ let contains_extension pat = loop pat; !r +let contains_dict_pattern pat = + let r = ref false in + let rec loop p = + if Dict_type_helpers.has_dict_pattern_attribute p.pat_attributes then + r := true + else Typedtree.iter_pattern_desc loop p.pat_desc + in + loop pat; + !r + +let rec opaque_dict_patterns pat = + if Dict_type_helpers.has_dict_pattern_attribute pat.pat_attributes then + {pat with pat_desc = Tpat_any; pat_extra = []; pat_attributes = []} + else + { + pat with + pat_desc = Typedtree.map_pattern_desc opaque_dict_patterns pat.pat_desc; + } + (* Build an untyped or-pattern from its expected type *) let ppat_of_type env ty = match pats_of_type env ty with @@ -2192,6 +2211,19 @@ let check_unused pred casel = if skip then r else (* Then look for empty patterns *) + let pss, qs = + if + contains_dict_pattern q + || List.exists + (fun ps -> List.exists contains_dict_pattern ps) + pss + then + ( List.filter + (fun ps -> not (List.exists contains_dict_pattern ps)) + pss, + List.map opaque_dict_patterns qs ) + else (pss, qs) + in let sfs = satisfiables pss qs in if sfs = [] then Unused else diff --git a/compiler/ml/typecore.ml b/compiler/ml/typecore.ml index ee304dff7d7..89126950d58 100644 --- a/compiler/ml/typecore.ml +++ b/compiler/ml/typecore.ml @@ -1803,6 +1803,11 @@ let rec is_nonexpansive exp = List.for_all (fun vb -> is_nonexpansive vb.vb_expr) pat_exp_list && is_nonexpansive body | Texp_function _ -> true + | Texp_apply {partial = true; _} -> + (* ReScript partial applications (`foo(args, ...)`) lower to wrapper + functions in codegen, so creating the partial itself is nonexpansive + like an explicit lambda. *) + true | Texp_apply {funct = e; args = (_, None) :: el} -> is_nonexpansive e && List.for_all is_nonexpansive_opt (List.map snd el) | Texp_match (e, cases, [], _) -> @@ -2239,6 +2244,19 @@ let extract_function_name funct = | Texp_ident (path, _, _) -> Some (Longident.parse (Path.name path)) | _ -> None +let should_unify_expected_result_before_typing_lowered_apply funct sargs = + match (extract_function_name funct, sargs) with + | ( Some (Longident.Ldot (Longident.Lident "Primitive_dict", "make")), + [(Asttypes.Nolabel, {Parsetree.pexp_desc = Parsetree.Pexp_array _})] ) -> + (* Dict literals *) + true + | ( Some + (Longident.Ldot (Longident.Lident "Primitive_promise", "unsafe_async")), + [(Asttypes.Nolabel, _)] ) -> + (* Async wrapper *) + true + | _ -> false + type lazy_args = (Asttypes.arg_label * (unit -> Typedtree.expression) option) list @@ -2440,6 +2458,17 @@ and type_expect_ ?deprecated_context ~context ?in_function ?(recarg = Rejected) let funct = type_exp ~deprecated_context:FunctionCall ~context:None env sfunct in + (if should_unify_expected_result_before_typing_lowered_apply funct sargs + then + (* Lowered syntax like dict literals and async wrappers becomes a regular + application, so thread the expected result type into the application + before typing its arguments. *) + let _, ty_res = + filter_arrow ~env + ~arity:(Some (List.length sargs)) + funct.exp_type Nolabel + in + unify_exp_types ~context:None loc env ty_res (instance env ty_expected)); let ty = instance env funct.exp_type in end_def (); wrap_trace_gadt_instances env (lower_args env []) ty; diff --git a/compiler/ml/typedecl.ml b/compiler/ml/typedecl.ml index da35de5288d..0f44d4595f4 100644 --- a/compiler/ml/typedecl.ml +++ b/compiler/ml/typedecl.ml @@ -312,6 +312,24 @@ let transl_constructor_arguments env closed = function let cty = transl_simple_type env closed obj_ty in (Types.Cstr_tuple [cty.ctyp_type], Cstr_tuple [cty]))))) +let rewrite_optional_inline_record_fields = function + | Pcstr_tuple _ as args -> args + | Pcstr_record lds -> + Pcstr_record + (Ext_list.map lds (fun ld -> + if ld.pld_optional then + let typ = ld.pld_type in + let typ = + { + typ with + ptyp_desc = + Ptyp_constr + ({txt = Lident "option"; loc = typ.ptyp_loc}, [typ]); + } + in + {ld with pld_type = typ} + else ld)) + let make_constructor env type_path type_params sargs sret_type = match sret_type with | None -> @@ -440,28 +458,10 @@ let transl_declaration ~type_record_as_object ~untagged_wfc env sdecl id = Location.prerr_warning loc Warnings.Constraint_on_gadt); let scstrs = Ext_list.map scstrs (fun ({pcd_args} as cstr) -> - match pcd_args with - | Pcstr_tuple _ -> cstr - | Pcstr_record lds -> - { - cstr with - pcd_args = - Pcstr_record - (Ext_list.map lds (fun ld -> - if ld.pld_optional then - let typ = ld.pld_type in - let typ = - { - typ with - ptyp_desc = - Ptyp_constr - ( {txt = Lident "option"; loc = typ.ptyp_loc}, - [typ] ); - } - in - {ld with pld_type = typ} - else ld)); - }) + { + cstr with + pcd_args = rewrite_optional_inline_record_fields pcd_args; + }) in let all_constrs = ref StringSet.empty in List.iter @@ -1627,6 +1627,7 @@ let transl_extension_constructor env type_path type_params typext_params priv let args, ret_type, kind = match sext.pext_kind with | Pext_decl (sargs, sret_type) -> + let sargs = rewrite_optional_inline_record_fields sargs in let targs, tret_type, args, ret_type, _ = make_constructor env type_path typext_params sargs sret_type in diff --git a/compiler/syntax/src/jsx_v4.ml b/compiler/syntax/src/jsx_v4.ml index 4cf9c9a0dd1..83bbb0dcdc4 100644 --- a/compiler/syntax/src/jsx_v4.ml +++ b/compiler/syntax/src/jsx_v4.ml @@ -498,15 +498,23 @@ let modified_binding ~binding_loc ~binding_pat_loc ~fn_name binding = in (wrap_expression_with_binding wrap_expression, has_forward_ref, expression) -let vb_match ~expr (name, default, _, alias, loc, _) = +let rec strip_constraint_unpack pattern = + match pattern with + | {ppat_desc = Ppat_constraint (_, {ptyp_desc = Ptyp_package _})} -> pattern + | {ppat_desc = Ppat_constraint (pattern, _)} -> + strip_constraint_unpack pattern + | _ -> pattern + +let vb_match ~expr (name, default, pattern, _alias, loc, _) = let label = get_label name in match default with | Some default -> + let resolved_name = "__" ^ label ^ "_value" in let value_binding = Vb.mk - (Pat.var (Location.mkloc alias loc)) + (Pat.var (Location.mkloc resolved_name loc)) (Exp.match_ - (Exp.ident {txt = Lident ("__" ^ alias); loc = Location.none}) + (Exp.ident {txt = Lident ("__" ^ label); loc = Location.none}) [ Exp.case (Pat.construct @@ -518,7 +526,10 @@ let vb_match ~expr (name, default, _, alias, loc, _) = default; ]) in - Exp.let_ Nonrecursive [value_binding] expr + Exp.let_ Nonrecursive [value_binding] + (Exp.let_ Nonrecursive + [Vb.mk pattern (Exp.ident (Location.mknoloc @@ Lident resolved_name))] + expr) | None -> expr let vb_match_expr named_arg_list expr = @@ -652,22 +663,6 @@ let map_binding ~config ~empty_loc ~pstr_loc ~file_name ~rec_flag binding = ] (Exp.ident ~loc:pstr_loc {loc = empty_loc; txt = Lident txt}) in - let rec strip_constraint_unpack ~label pattern = - match pattern with - | {ppat_desc = Ppat_constraint (_, {ptyp_desc = Ptyp_package _})} -> - pattern - | {ppat_desc = Ppat_constraint (pattern, _)} -> - strip_constraint_unpack ~label pattern - | _ -> pattern - in - let safe_pattern_label pattern = - match pattern with - | {ppat_desc = Ppat_var {txt; loc}} -> - {pattern with ppat_desc = Ppat_var {txt = "__" ^ txt; loc}} - | {ppat_desc = Ppat_alias (p, {txt; loc})} -> - {pattern with ppat_desc = Ppat_alias (p, {txt = "__" ^ txt; loc})} - | _ -> pattern - in let rec returned_expression patterns_with_label patterns_with_nolabel ({pexp_desc} as expr) = match pexp_desc with @@ -688,17 +683,20 @@ let map_binding ~config ~empty_loc ~pstr_loc ~file_name ~rec_flag binding = lhs = {ppat_loc; ppat_desc} as pattern; rhs = expr; } -> ( - let pattern_without_constraint = - strip_constraint_unpack ~label:(get_label arg_label) pattern - in + let pattern_without_constraint = strip_constraint_unpack pattern in (* If prop has the default value as Ident, it will get a build error when the referenced Ident value and the prop have the same name. - So we add a "__" to label to resolve the build error. + So we bind a temp "__