Skip to content

Commit

Permalink
replace component sorting with leader update
Browse files Browse the repository at this point in the history
Summary:
D4339309 tried to fix merge crashes during rechecks by sorting components,
thereby ensuring components have the same leaders no matter how they are
discovered. Unfortunately, this strategy currently turns out to be problematic:
sorting perturbs the order in which files of a component are checked, which
somehow (probably interacting with other bugs) causes spurious error diffs on
rechecks.

This diff implements an alternative to sorting, where the system adapts when
the leader of a component changes, either installing the new leader or reusing
the old leader. It also makes the handling of leaders similar to the handling of
sig contexts and sig hashes, thereby simplifying some invariants.

Reviewed By: jeffmo

Differential Revision: D4345135

fbshipit-source-id: bcb1c02c0449e6fcb65134e6b1796c7661367801
  • Loading branch information
avikchaudhuri authored and Facebook Github Bot committed Dec 19, 2016
1 parent d2dc508 commit c8d6e5d
Show file tree
Hide file tree
Showing 7 changed files with 74 additions and 76 deletions.
63 changes: 39 additions & 24 deletions src/services/inference/context_cache.ml
Original file line number Diff line number Diff line change
Expand Up @@ -28,58 +28,73 @@ end)
let add_context = Expensive.wrap ContextHeap.add
let find_unsafe_context = Expensive.wrap ContextHeap.find_unsafe

let add ~audit cx =
let cx_file = Context.file cx in
add_context ~audit cx_file cx

let remove_batch files =
ContextHeap.remove_batch files

module SigContextHeap = SharedMem_js.WithCache (Loc.FilenameKey) (struct
type t = Context.t
let prefix = Prefix.make()
let description = "SigContext"
end)

let add_sig_context = Expensive.wrap SigContextHeap.add
let find_unsafe_sig_context = Expensive.wrap SigContextHeap.find_unsafe

let add_sig ~audit cx =
let cx_file = Context.file cx in
add_sig_context ~audit cx_file cx

module SigHashHeap = SharedMem_js.WithCache (Loc.FilenameKey) (struct
type t = SigHash.t
let prefix = Prefix.make()
let description = "SigHash"
end)

let add_sig_context = Expensive.wrap SigContextHeap.add
let find_unsafe_sig_context = Expensive.wrap SigContextHeap.find_unsafe
module LeaderHeap = SharedMem_js.WithCache (Loc.FilenameKey) (struct
type t = filename
let prefix = Prefix.make()
let description = "Leader"
end)

let add ~audit cx =
let cx_file = Context.file cx in
add_context ~audit cx_file cx
let find_leader file =
match LeaderHeap.get file with
| Some leader -> leader
| None -> raise (Key_not_found ("LeaderHeap", (string_of_filename file)))

let add_sig ~audit cx =
let cx_file = Context.file cx in
add_sig_context ~audit cx_file cx
(* While merging, we must keep LeaderHeap, SigContextHeap, and SigHashHeap in
sync, sometimes creating new entries and sometimes reusing old entries. *)

(* Add a sig only if it has not changed meaningfully, and return the result of
that check. *)
let add_sig_on_diff ~audit cx md5 =
let cx_file = Context.file cx in
let diff = match SigHashHeap.get_old cx_file with
| Some md5_old -> Loc.check_suffix cx_file Files.flow_ext || md5 <> md5_old
let add_merge_on_diff ~audit component_cxs md5 =
let component_files = List.map (Context.file) component_cxs in
let leader_f, leader_cx = List.hd component_files, List.hd component_cxs in
let diff = match SigHashHeap.get_old leader_f with
| Some md5_old -> Loc.check_suffix leader_f Files.flow_ext || md5 <> md5_old
| None -> true in
if diff then begin
add_sig_context ~audit cx_file cx;
SigHashHeap.add cx_file md5;
List.iter (fun f -> LeaderHeap.add f leader_f) component_files;
add_sig_context ~audit leader_f leader_cx;
SigHashHeap.add leader_f md5;
end;
diff

let remove_batch files =
ContextHeap.remove_batch files

let remove_sig_batch files =
SigContextHeap.remove_batch files;
SigHashHeap.remove_batch files

let oldify_sig_batch files =
let oldify_merge_batch files =
LeaderHeap.oldify_batch files;
SigContextHeap.oldify_batch files;
SigHashHeap.oldify_batch files

let remove_old_sig_batch files =
let remove_old_merge_batch files =
LeaderHeap.remove_old_batch files;
SigContextHeap.remove_old_batch files;
SigHashHeap.remove_old_batch files

let revive_sig_batch files =
let revive_merge_batch files =
LeaderHeap.revive_batch files;
SigContextHeap.revive_batch files;
SigHashHeap.revive_batch files

Expand Down
17 changes: 10 additions & 7 deletions src/services/inference/context_cache.mli
Original file line number Diff line number Diff line change
Expand Up @@ -14,18 +14,21 @@ class context_cache : object
method read_safe: (Loc.FilenameKey.t -> Context.t option) Expensive.t
end

val add: (Context.t -> unit) Expensive.t
val remove_batch: Utils_js.FilenameSet.t -> unit

class sig_context_cache : object
method find: Loc.FilenameKey.t -> Context.t option
method read: (Loc.FilenameKey.t -> Context.t * Context.t) Expensive.t
method read_safe:
(Loc.FilenameKey.t -> (Context.t * Context.t) option) Expensive.t
end

val add: (Context.t -> unit) Expensive.t
val add_sig: (Context.t -> unit) Expensive.t
val add_sig_on_diff: (Context.t -> SigHash.t -> bool) Expensive.t
val remove_batch: Utils_js.FilenameSet.t -> unit
val remove_sig_batch: Utils_js.FilenameSet.t -> unit
val oldify_sig_batch: Utils_js.FilenameSet.t -> unit
val revive_sig_batch: Utils_js.FilenameSet.t -> unit
val remove_old_sig_batch: Utils_js.FilenameSet.t -> unit

val find_leader: Loc.FilenameKey.t -> Loc.FilenameKey.t

val add_merge_on_diff: (Context.t list -> SigHash.t -> bool) Expensive.t
val oldify_merge_batch: Utils_js.FilenameSet.t -> unit
val revive_merge_batch: Utils_js.FilenameSet.t -> unit
val remove_old_merge_batch: Utils_js.FilenameSet.t -> unit
29 changes: 5 additions & 24 deletions src/services/inference/merge_service.ml
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,6 @@

open Utils_js

module LeaderHeap = SharedMem_js.WithCache (Loc.FilenameKey) (struct
type t = filename
let prefix = Prefix.make()
let description = "Leader"
end)

(* Warn on a missing required module resolved_r referenced as r in context cx.
TODO maybe make this suppressable
Expand Down Expand Up @@ -98,11 +92,7 @@ let merge_strict_context ~options cache component_cxs =
orig_sig_cxs, sig_cxs,
(impl sig_cx) :: impls, res, decls
| None ->
let file =
try LeaderHeap.find_unsafe file
with Not_found ->
raise (Key_not_found ("LeaderHeap", (string_of_filename file)))
in
let file = Context_cache.find_leader file in
begin match sig_cache#find file with
| Some sig_cx ->
orig_sig_cxs, sig_cxs,
Expand Down Expand Up @@ -144,8 +134,8 @@ let merge_strict_component ~options = function
are going to reuse the existing signature for the file, so we must be
careful that such a signature actually exists! This holds because a
skipped file cannot be a direct dependency, so its dependencies cannot
change, which in particular means that it belongs to, and leads, the same
component. *)
change, which in particular means that it belongs to the same component,
although possibly with a different leader that has the signature. *)
file, Errors.ErrorSet.empty, None
| Merge_stream.Component component ->
(* A component may have several files: there's always at least one, and
Expand Down Expand Up @@ -179,7 +169,8 @@ let merge_strict_component ~options = function
Context.remove_all_errors cx;
Context.clear_intermediates cx;

let diff = Context_cache.add_sig_on_diff ~audit:Expensive.ok cx md5 in
let diff = Context_cache.add_merge_on_diff ~audit:Expensive.ok
component_cxs md5 in
file, errors, Some diff
)
else file, Errors.ErrorSet.empty, Some true
Expand Down Expand Up @@ -249,8 +240,6 @@ let merge_strict ~options ~workers ~save_errors
) acc component
) component_map FilenameMap.empty
in
(* store leaders to a heap; used when rechecking *)
leader_map |> FilenameMap.iter LeaderHeap.add;
(* lift recheck map from files to leaders *)
let recheck_leader_map = FilenameMap.map (
List.exists (fun f -> FilenameMap.find_unsafe f recheck_map)
Expand All @@ -269,11 +258,3 @@ let merge_strict ~options ~workers ~save_errors
(* save *)
save_errors files errsets
)

let remove_batch to_merge =
LeaderHeap.remove_batch to_merge;
Context_cache.remove_sig_batch to_merge

let oldify_batch to_merge =
LeaderHeap.remove_batch to_merge;
Context_cache.oldify_sig_batch to_merge
8 changes: 0 additions & 8 deletions src/services/inference/merge_service.mli
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,3 @@ val merge_strict:
filename list list IMap.t ->
bool FilenameMap.t ->
unit

val remove_batch:
FilenameSet.t ->
unit

val oldify_batch:
FilenameSet.t ->
unit
26 changes: 18 additions & 8 deletions src/services/inference/merge_stream.ml
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,9 @@ let blocked = ref 0
(* For each leader, maps other leaders that are dependent on it. *)
let dependents = ref FilenameMap.empty

(* For each leader, maps the files in its component *)
let components = ref FilenameMap.empty

(* For each leader, specifies whether to recheck its component *)
let to_recheck: bool FilenameMap.t ref = ref FilenameMap.empty

Expand All @@ -67,18 +70,18 @@ type element =
let rev_append_triple (x1, y1, z1) (x2, y2, z2) =
List.rev_append x1 x2, List.rev_append y1 y2, List.rev_append z1 z2

let component (f, diff) =
if diff then Component (FilenameMap.find_unsafe f !components)
else Skip f

(* leader_map is a map from files to leaders *)
(* component_map is a map from leaders to components *)
(* dependency_graph is a map from files to dependencies *)
let make dependency_graph leader_map component_map recheck_leader_map =
components := component_map;
to_recheck := recheck_leader_map;
(* TODO: clear or replace state *)
let procs = Sys_utils.nbr_procs in
let leader f = FilenameMap.find_unsafe f leader_map in
let component (f, diff) =
if diff then Component (FilenameMap.find_unsafe f component_map)
else Skip f in

let leader f = FilenameMap.find_unsafe f leader_map in
let dependency_dag = FilenameMap.fold (fun f fs dependency_dag ->
let leader_f = leader f in
let dep_leader_fs = match FilenameMap.get leader_f dependency_dag with
Expand Down Expand Up @@ -107,6 +110,7 @@ let make dependency_graph leader_map component_map recheck_leader_map =
(* TODO: remember reverse dependencies to quickly calculate remerge sets *)
dependents := Sort_js.reverse dependency_dag;

let procs = Sys_utils.nbr_procs in
fun () ->
let jobs = List.length !stream in
if jobs = 0 && !blocked <> 0 then MultiWorker.Wait
Expand Down Expand Up @@ -145,8 +149,14 @@ let join =
fun res acc ->
let merged, _, unchanged = res in
let changed = List.filter (fun f -> not (List.mem f unchanged)) merged in
Context_cache.revive_sig_batch (FilenameSet.of_list unchanged);
Context_cache.remove_old_sig_batch (FilenameSet.of_list changed);
List.iter (fun leader_f ->
let fs = FilenameMap.find_unsafe leader_f !components in
Context_cache.revive_merge_batch (FilenameSet.of_list fs);
) unchanged;
List.iter (fun leader_f ->
let fs = FilenameMap.find_unsafe leader_f !components in
Context_cache.remove_old_merge_batch (FilenameSet.of_list fs);
) changed;
push (List.rev_append
(List.rev_map (fun leader_f -> (leader_f, true)) changed)
(List.rev_map (fun leader_f -> (leader_f, false)) unchanged)
Expand Down
2 changes: 1 addition & 1 deletion src/services/inference/types_js.ml
Original file line number Diff line number Diff line change
Expand Up @@ -581,7 +581,7 @@ let recheck genv env modified =

(* to_merge is inferred files plus all dependents. prep for re-merge *)
let to_merge = FilenameSet.union all_deps modified_files in
Merge_service.oldify_batch to_merge;
Context_cache.oldify_merge_batch to_merge;
(** TODO [perf]: Consider `aggressive **)
SharedMem_js.collect genv.ServerEnv.options `gentle;

Expand Down
5 changes: 1 addition & 4 deletions src/typing/sort_js.ml
Original file line number Diff line number Diff line change
Expand Up @@ -137,10 +137,7 @@ let tarjan state =
each partition, by height. *)
let partition heights components =
FilenameMap.fold (fun m c map ->
(* Although the same graph always returns the same components, how a
component is split between m and c is non-deterministic. We can restore
determinism by sorting. *)
let mc = List.sort Pervasives.compare (m::c) in
let mc = m::c in
let height = FilenameMap.find_unsafe m heights in
match IMap.get height map with
| None -> IMap.add height [mc] map
Expand Down

0 comments on commit c8d6e5d

Please sign in to comment.