forked from facebook/flow
-
Notifications
You must be signed in to change notification settings - Fork 0
/
commandHandler.ml
2212 lines (2101 loc) · 91.6 KB
/
commandHandler.ml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
(*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open Base.Result
open ServerEnv
open Utils_js
open Lsp
let ( >|= ) = Lwt.( >|= )
let type_contents_with_cache ~options ~env ~profiling ~type_contents_cache content file =
let lazy_result = lazy (Types_js.type_contents ~options ~env ~profiling content file) in
match type_contents_cache with
| None -> Lazy.force lazy_result
| Some cache -> FilenameCache.with_cache file lazy_result cache
let status_log errors =
if Errors.ConcreteLocPrintableErrorSet.is_empty errors then
Hh_logger.info "Status: OK"
else
Hh_logger.info "Status: Error";
flush stdout
let convert_errors ~errors ~warnings ~suppressed_errors =
if
Errors.ConcreteLocPrintableErrorSet.is_empty errors
&& Errors.ConcreteLocPrintableErrorSet.is_empty warnings
&& suppressed_errors = []
then
ServerProt.Response.NO_ERRORS
else
ServerProt.Response.ERRORS { errors; warnings; suppressed_errors }
let json_of_parse_error =
let int x = Hh_json.JSON_Number (string_of_int x) in
let position p = Hh_json.JSON_Object [("line", int p.Loc.line); ("column", int p.Loc.column)] in
let location loc =
Hh_json.JSON_Object [("start", position loc.Loc.start); ("end", position loc.Loc._end)]
in
fun (loc, err) ->
Hh_json.JSON_Object
[("loc", location loc); ("message", Hh_json.JSON_String (Parse_error.PP.error err))]
let fold_json_of_parse_errors parse_errors acc =
match parse_errors with
| err :: _ ->
("parse_error", json_of_parse_error err)
:: ("parse_error_count", Hh_json.JSON_Number (parse_errors |> List.length |> string_of_int))
:: acc
| [] -> acc
let get_status ~reader genv env client_root =
let options = genv.ServerEnv.options in
let server_root = Options.root options in
let lazy_stats = Rechecker.get_lazy_stats ~options env in
let status_response =
if server_root <> client_root then
ServerProt.Response.DIRECTORY_MISMATCH
{ ServerProt.Response.server = server_root; ServerProt.Response.client = client_root }
else
(* collate errors by origin *)
let (errors, warnings, suppressed_errors) = ErrorCollator.get ~reader ~options env in
let warnings =
if Options.should_include_warnings options then
warnings
else
Errors.ConcreteLocPrintableErrorSet.empty
in
let suppressed_errors =
if Options.include_suppressions options then
suppressed_errors
else
[]
in
(* TODO: check status.directory *)
status_log errors;
FlowEventLogger.status_response
~num_errors:(Errors.ConcreteLocPrintableErrorSet.cardinal errors);
convert_errors ~errors ~warnings ~suppressed_errors
in
(status_response, lazy_stats)
let autocomplete ~trigger_character ~reader ~options ~env ~profiling ~filename ~contents ~cursor =
let path =
match filename with
| Some filename -> filename
| None -> "-"
in
let path = File_key.SourceFile path in
let (contents, broader_context) =
let (line, column) = cursor in
AutocompleteService_js.add_autocomplete_token contents line column
in
Autocomplete_js.autocomplete_set_hooks trigger_character;
let%lwt check_contents_result = Types_js.type_contents ~options ~env ~profiling contents path in
Autocomplete_js.autocomplete_unset_hooks ();
let initial_json_props =
let open Hh_json in
[
("ac_trigger", JSON_String (Option.value trigger_character ~default:"None"));
("broader_context", JSON_String broader_context);
]
in
match check_contents_result with
| Error err ->
let json_data_to_log =
let open Hh_json in
JSON_Object
( ("errors", JSON_Array [JSON_String err])
:: ("result", JSON_String "FAILURE_CHECK_CONTENTS")
:: ("count", JSON_Number "0")
:: initial_json_props )
in
Lwt.return (Error err, Some json_data_to_log)
| Ok (cx, info, file_sig, tast, parse_errors) ->
Profiling_js.with_timer_lwt profiling ~timer:"GetResults" ~f:(fun () ->
try_with_json2 (fun () ->
let open AutocompleteService_js in
let (ac_type_string, results_res) =
let cursor_loc =
let (line, column) = cursor in
Loc.make path line column
in
autocomplete_get_results ~reader cx file_sig tast trigger_character cursor_loc
in
let json_props_to_log =
("ac_type", Hh_json.JSON_String ac_type_string)
:: ("docblock", Docblock.json_of_docblock info)
:: initial_json_props
in
let (response, json_props_to_log) =
let open Hh_json in
match results_res with
| AcResult { results; errors_to_log } ->
let result_string =
match (results, errors_to_log) with
| (_, []) -> "SUCCESS"
| ([], _ :: _) -> "FAILURE"
| (_ :: _, _ :: _) -> "PARTIAL"
in
( Ok results,
("result", JSON_String result_string)
:: ("count", JSON_Number (results |> List.length |> string_of_int))
:: ("errors", JSON_Array (List.map (fun s -> JSON_String s) errors_to_log))
:: json_props_to_log )
| AcEmpty reason ->
( Ok [],
("result", JSON_String "SUCCESS")
:: ("count", JSON_Number "0")
:: ("empty_reason", JSON_String reason)
:: json_props_to_log )
| AcFatalError error ->
( Error error,
("result", JSON_String "FAILURE")
:: ("errors", JSON_Array [JSON_String error])
:: json_props_to_log )
in
let json_props_to_log = fold_json_of_parse_errors parse_errors json_props_to_log in
Lwt.return (response, Some (Hh_json.JSON_Object json_props_to_log))))
let check_file ~options ~env ~profiling ~force file_input =
let file = File_input.filename_of_file_input file_input in
match file_input with
| File_input.FileName _ -> failwith "Not implemented"
| File_input.FileContent (_, content) ->
let should_check =
if force then
true
else
let (_, docblock) =
Parsing_service_js.(parse_docblock docblock_max_tokens (File_key.SourceFile file) content)
in
Docblock.is_flow docblock
in
if should_check then
let file = File_key.SourceFile file in
let%lwt (_, errors, warnings) =
Types_js.typecheck_contents ~options ~env ~profiling content file
in
Lwt.return (convert_errors ~errors ~warnings ~suppressed_errors:[])
else
Lwt.return ServerProt.Response.NOT_COVERED
let infer_type
~(options : Options.t)
~(env : ServerEnv.env)
~(profiling : Profiling_js.running)
~type_contents_cache
((file_input, line, col, verbose, expand_aliases, omit_targ_defaults, evaluate_type_destructors) :
File_input.t * int * int * Verbose.t option * bool * bool * bool) :
((Loc.t * Ty.t option, string) result * Hh_json.json option) Lwt.t =
let file = File_input.filename_of_file_input file_input in
let file = File_key.SourceFile file in
let options = { options with Options.opt_verbose = verbose } in
match File_input.content_of_file_input file_input with
| Error e -> Lwt.return (Error e, None)
| Ok content ->
let%lwt result =
try_with_json (fun () ->
let%lwt type_contents_result =
type_contents_with_cache ~options ~env ~profiling ~type_contents_cache content file
in
let result =
match type_contents_result with
| Error str -> Error (str, None)
| Ok (cx, _info, file_sig, typed_ast, _parse_errors) ->
Ok
(Type_info_service.type_at_pos
~cx
~file_sig
~typed_ast
~expand_aliases
~omit_targ_defaults
~evaluate_type_destructors
file
line
col)
in
Lwt.return result)
in
Lwt.return (split_result result)
let insert_type
~options
~env
~profiling
~file_input
~target
~verbose
~expand_aliases
~omit_targ_defaults
~location_is_strict
~ambiguity_strategy =
let filename = File_input.filename_of_file_input file_input in
let file_key = File_key.SourceFile filename in
let options = { options with Options.opt_verbose = verbose } in
File_input.content_of_file_input file_input %>>= fun file_content ->
try_with (fun _ ->
let%lwt result =
Type_info_service.insert_type
~options
~env
~profiling
~file_key
~file_content
~target
~expand_aliases
~omit_targ_defaults
~location_is_strict
~ambiguity_strategy
in
Lwt.return result)
let autofix_exports ~options ~env ~profiling ~input =
let filename = File_input.filename_of_file_input input in
let file_key = File_key.SourceFile filename in
File_input.content_of_file_input input %>>= fun file_content ->
try_with (fun _ ->
let%lwt result =
Type_info_service.autofix_exports ~options ~env ~profiling ~file_key ~file_content
in
Lwt.return result)
let collect_rage ~options ~reader ~env ~files =
let items = [] in
(* options *)
let data = Printf.sprintf "lazy_mode=%s\n" Options.(lazy_mode options |> lazy_mode_to_string) in
let items = ("options", data) :: items in
(* env: checked files *)
let data =
Printf.sprintf
"%s\n\n%s\n"
(CheckedSet.debug_counts_to_string env.checked_files)
(CheckedSet.debug_to_string ~limit:200 env.checked_files)
in
let items = ("env.checked_files", data) :: items in
(* env: dependency graph *)
let dependency_to_string (file, deps) =
let file = File_key.to_string file in
let deps =
Utils_js.FilenameSet.elements deps
|> Base.List.map ~f:File_key.to_string
|> ListUtils.first_upto_n 20 (fun t -> Some (Printf.sprintf " ...%d more" t))
|> String.concat ","
in
file ^ ":" ^ deps ^ "\n"
in
let dependencies =
Dependency_info.implementation_dependency_graph env.ServerEnv.dependency_info
|> Utils_js.FilenameGraph.to_map
|> Utils_js.FilenameMap.bindings
|> Base.List.map ~f:dependency_to_string
|> ListUtils.first_upto_n 200 (fun t -> Some (Printf.sprintf "[shown 200/%d]\n" t))
|> String.concat ""
in
let data = "DEPENDENCIES:\n" ^ dependencies in
let items = ("env.dependencies", data) :: items in
(* env: errors *)
let (errors, warnings, _) = ErrorCollator.get ~reader ~options env in
let json =
Errors.Json_output.json_of_errors_with_context
~strip_root:None
~stdin_file:None
~offset_kind:Offset_utils.Utf8
~suppressed_errors:[]
~errors
~warnings
()
in
let data = "ERRORS:\n" ^ Hh_json.json_to_multiline json in
let items = ("env.errors", data) :: items in
(* Checking if file hashes are up to date *)
let items =
Option.value_map files ~default:items ~f:(fun files ->
let buf = Buffer.create 1024 in
Printf.bprintf
buf
"Does the content on the disk match the most recent version of the file?\n\n";
List.iter
(fun file ->
(* TODO - this isn't exactly right. It could be something else, right? *)
let file_key = File_key.SourceFile file in
let file_state =
if not (FilenameSet.mem file_key env.ServerEnv.files) then
"FILE NOT PARSED BY FLOW (likely ignored implicitly or explicitly)"
else
match Sys_utils.cat_or_failed file with
| None -> "ERROR! FAILED TO READ"
| Some content ->
if Parsing_service_js.does_content_match_file_hash ~reader file_key content then
"OK"
else
"HASH OUT OF DATE"
in
Printf.bprintf buf "%s: %s\n" file file_state)
files;
("file hash check", Buffer.contents buf) :: items)
in
let items =
let buf = Buffer.create 127 in
let log str =
Buffer.add_string buf str;
Buffer.add_char buf '\n'
in
LoggingUtils.dump_server_options ~server_options:options ~log;
("server_options", Buffer.contents buf) :: items
in
items
let dump_types ~options ~env ~profiling ~expand_aliases ~evaluate_type_destructors file_input =
let file = File_input.filename_of_file_input file_input in
let file = File_key.SourceFile file in
File_input.content_of_file_input file_input %>>= fun content ->
try_with (fun () ->
Type_info_service.dump_types
~options
~env
~profiling
~expand_aliases
~evaluate_type_destructors
file
content)
let coverage ~options ~env ~profiling ~force ~trust file_input =
if Options.trust_mode options = Options.NoTrust && trust then
Error
"Coverage cannot be run in trust mode if the server is not in trust mode. \n\nRestart the Flow server with --trust-mode=check' to enable this command."
|> Lwt.return
else
let file = File_input.filename_of_file_input file_input in
let file = File_key.SourceFile file in
File_input.content_of_file_input file_input %>>= fun content ->
try_with (fun () ->
Types_js.type_contents ~options ~env ~profiling content file
>|= Base.Result.map ~f:(fun (cx, _, _, typed_ast, _) ->
Type_info_service.coverage ~cx ~typed_ast ~force ~trust file content))
let batch_coverage ~options ~env ~trust ~batch =
if Options.trust_mode options = Options.NoTrust && trust then
Error
"Batch Coverage cannot be run in trust mode if the server is not in trust mode. \n\nRestart the Flow server with --trust-mode=check' to enable this command."
|> Lwt.return
else if Options.lazy_mode options <> Options.NON_LAZY_MODE then
Error
"Batch coverage cannot be run in lazy mode.\n\nRestart the Flow server with '--lazy-mode none' to enable this command."
|> Lwt.return
else
let is_checked key = CheckedSet.mem key env.checked_files in
let filter key = Base.List.exists ~f:(fun elt -> Files.is_prefix elt key) batch in
let coverage_map =
FilenameMap.filter
(fun key _ -> is_checked key && File_key.to_string key |> filter)
env.coverage
in
let response =
FilenameMap.fold (fun key coverage -> List.cons (key, coverage)) coverage_map []
in
Ok response |> Lwt.return
let serialize_graph graph =
(* Convert from map/set to lists for serialization to client. *)
FilenameMap.fold
(fun f dep_fs acc ->
let f = File_key.to_string f in
let dep_fs = FilenameSet.fold (fun dep_f acc -> File_key.to_string dep_f :: acc) dep_fs [] in
(f, dep_fs) :: acc)
graph
[]
let output_dependencies ~env root strip_root types_only outfile =
let strip_root =
if strip_root then
Files.relative_path root
else
fun x ->
x
in
let dep_graph =
if types_only then
Dependency_info.sig_dependency_graph
else
Dependency_info.implementation_dependency_graph
in
let graph = serialize_graph (dep_graph env.ServerEnv.dependency_info |> FilenameGraph.to_map) in
Hh_logger.info "printing dependency graph to %s\n" outfile;
let%lwt out = Lwt_io.open_file ~mode:Lwt_io.Output outfile in
let%lwt () = LwtUtils.output_graph out strip_root graph in
let%lwt () = Lwt_io.close out in
ok_unit |> Lwt.return
let get_cycle ~env fn types_only =
(* Re-calculate SCC *)
let parsed = env.ServerEnv.files in
let dependency_info = env.ServerEnv.dependency_info in
let dependency_graph =
if types_only then
Dependency_info.sig_dependency_graph dependency_info
else
Dependency_info.implementation_dependency_graph dependency_info
in
Lwt.return
(Ok
(let components = Sort_js.topsort ~roots:parsed (FilenameGraph.to_map dependency_graph) in
(* Get component for target file *)
let component = List.find (Nel.mem ~equal:File_key.equal fn) components in
(* Restrict dep graph to only in-cycle files *)
Nel.fold_left
(fun acc f ->
Option.fold (FilenameGraph.find_opt f dependency_graph) ~init:acc ~f:(fun acc deps ->
let subdeps =
FilenameSet.filter (fun f -> Nel.mem ~equal:File_key.equal f component) deps
in
if FilenameSet.is_empty subdeps then
acc
else
FilenameMap.add f subdeps acc))
FilenameMap.empty
component
|> serialize_graph))
let suggest ~options ~env ~profiling file =
let file_name = File_input.filename_of_file_input file in
File_input.content_of_file_input file %>>= fun file_content ->
try_with (fun _ ->
let%lwt result = Type_info_service.suggest ~options ~env ~profiling file_name file_content in
match result with
| Ok (tc_errors, tc_warnings, suggest_warnings, file_patch) ->
Lwt.return
(Ok
(ServerProt.Response.Suggest_Ok
{ tc_errors; tc_warnings; suggest_warnings; file_patch }))
| Error errors -> Lwt.return (Ok (ServerProt.Response.Suggest_Error errors)))
let find_module ~options ~reader (moduleref, filename) =
let file = File_key.SourceFile filename in
let loc = { Loc.none with Loc.source = Some file } in
let module_name =
Module_js.imported_module
~options
~reader:(Abstract_state_reader.State_reader reader)
~node_modules_containers:!Files.node_modules_containers
file
(Nel.one (ALoc.of_loc loc))
moduleref
in
Module_heaps.Reader.get_file ~reader ~audit:Expensive.warn module_name
let convert_find_refs_result (result : FindRefsTypes.find_refs_ok) :
ServerProt.Response.find_refs_success =
Option.map result ~f:(fun (name, refs) -> (name, Base.List.map ~f:snd refs))
(* Find refs is a really weird command. Whereas other commands will cancel themselves if they find
* unchecked code, find refs will focus that code and keep chugging along. It may therefore change
* the env. Furthermore, it is written using a lot of `result`'s, which make it really hard to
* properly pass through the env. Therefore, it uses an `ServerEnv.env ref` instead of an
* `ServerEnv.env`. *)
let find_global_refs ~reader ~genv ~env ~profiling (file_input, line, col, multi_hop) =
let env = ref env in
let%lwt (result, dep_count) =
FindRefs_js.find_global_refs ~reader ~genv ~env ~profiling ~file_input ~line ~col ~multi_hop
in
let env = !env in
let result = Base.Result.map result ~f:convert_find_refs_result in
Lwt.return (env, result, dep_count)
let find_local_refs ~reader ~options ~env ~profiling (file_input, line, col) =
FindRefs_js.find_local_refs ~reader ~options ~env ~profiling ~file_input ~line ~col
|> Lwt_result.map convert_find_refs_result
(* This returns result, json_data_to_log, where json_data_to_log is the json data from
* getdef_get_result which we end up using *)
let get_def_of_check_result ~options ~reader ~profiling ~check_result (file, line, col) =
let loc = Loc.make file line col in
let (cx, _, file_sig, typed_ast, parse_errors) = check_result in
Profiling_js.with_timer_lwt profiling ~timer:"GetResult" ~f:(fun () ->
try_with_json2 (fun () ->
Lwt.return
( GetDef_js.get_def ~options ~reader cx file_sig typed_ast loc
|> fun (result, request_history) ->
let open GetDef_js.Get_def_result in
let json_props =
[
( "request_history",
Hh_json.JSON_Array
(Base.List.map ~f:(fun str -> Hh_json.JSON_String str) request_history) );
]
|> fold_json_of_parse_errors parse_errors
in
match result with
| Def loc ->
( Ok loc,
Some
(Hh_json.JSON_Object (("result", Hh_json.JSON_String "SUCCESS") :: json_props))
)
| Partial (loc, msg) ->
( Ok loc,
Some
(Hh_json.JSON_Object
( ("result", Hh_json.JSON_String "PARTIAL_FAILURE")
:: ("error", Hh_json.JSON_String msg)
:: json_props )) )
| Bad_loc ->
( Ok Loc.none,
Some
(Hh_json.JSON_Object (("result", Hh_json.JSON_String "BAD_LOC") :: json_props))
)
| Def_error msg ->
( Error msg,
Some
(Hh_json.JSON_Object
( ("result", Hh_json.JSON_String "FAILURE")
:: ("error", Hh_json.JSON_String msg)
:: json_props )) ) )))
let get_def ~options ~reader ~env ~profiling ~type_contents_cache (file_input, line, col) =
let filename = File_input.filename_of_file_input file_input in
let file = File_key.SourceFile filename in
let%lwt check_result =
File_input.content_of_file_input file_input %>>= fun content ->
type_contents_with_cache ~options ~env ~profiling ~type_contents_cache content file
in
match check_result with
| Error msg ->
Lwt.return (Error msg, Some (Hh_json.JSON_Object [("error", Hh_json.JSON_String msg)]))
| Ok check_result ->
get_def_of_check_result ~options ~reader ~profiling ~check_result (file, line, col)
let module_name_of_string ~options module_name_str =
let file_options = Options.file_options options in
let path = Path.to_string (Path.make module_name_str) in
if Files.is_flow_file ~options:file_options path then
Modulename.Filename (File_key.SourceFile path)
else
Modulename.String module_name_str
let get_imports ~options ~reader module_names =
let add_to_results (map, non_flow) module_name_str =
let module_name = module_name_of_string ~options module_name_str in
match Module_heaps.Reader.get_file ~reader ~audit:Expensive.warn module_name with
| Some file ->
(* We do not process all modules which are stored in our module
* database. In case we do not process a module its requirements
* are not kept track of. To avoid confusing results we notify the
* client that these modules have not been processed.
*)
let { Module_heaps.checked; _ } =
Module_heaps.Reader.get_info_unsafe ~reader ~audit:Expensive.warn file
in
if checked then
let { Module_heaps.resolved_modules; _ } =
Module_heaps.Reader.get_resolved_requires_unsafe ~reader ~audit:Expensive.warn file
in
let fsig = Parsing_heaps.Reader.get_file_sig_unsafe ~reader file in
let requires = File_sig.With_Loc.(require_loc_map fsig.module_sig) in
let mlocs =
SMap.fold
(fun mref locs acc ->
let m = SMap.find mref resolved_modules in
Modulename.Map.add m locs acc)
requires
Modulename.Map.empty
in
(SMap.add module_name_str mlocs map, non_flow)
else
(map, SSet.add module_name_str non_flow)
| None ->
(* We simply ignore non existent modules *)
(map, non_flow)
in
(* Our result is a tuple. The first element is a map from module names to
* modules imported by them and their locations of import. The second
* element is a set of modules which are not marked for processing by
* flow. *)
List.fold_left add_to_results (SMap.empty, SSet.empty) module_names
let save_state ~saved_state_filename ~genv ~env ~profiling =
try_with (fun () ->
let%lwt () = Saved_state.save ~saved_state_filename ~genv ~env ~profiling in
Lwt.return (Ok ()))
let handle_autocomplete
~trigger_character ~reader ~options ~profiling ~env ~filename ~contents ~cursor =
let%lwt (result, json_data) =
autocomplete ~trigger_character ~reader ~options ~env ~profiling ~filename ~contents ~cursor
in
Lwt.return (ServerProt.Response.AUTOCOMPLETE result, json_data)
let handle_autofix_exports ~options ~input ~profiling ~env =
let%lwt result = autofix_exports ~options ~env ~profiling ~input in
Lwt.return (ServerProt.Response.AUTOFIX_EXPORTS result, None)
let handle_check_file ~options ~force ~input ~profiling ~env =
let%lwt response = check_file ~options ~env ~force ~profiling input in
Lwt.return (ServerProt.Response.CHECK_FILE response, None)
let handle_coverage ~options ~force ~input ~trust ~profiling ~env =
let%lwt response = coverage ~options ~env ~profiling ~force ~trust input in
Lwt.return (ServerProt.Response.COVERAGE response, None)
let handle_batch_coverage ~options ~profiling:_ ~env ~batch ~trust =
let%lwt response = batch_coverage ~options ~env ~batch ~trust in
Lwt.return (ServerProt.Response.BATCH_COVERAGE response, None)
let handle_cycle ~fn ~types_only ~profiling:_ ~env =
let%lwt response = get_cycle ~env fn types_only in
Lwt.return (env, ServerProt.Response.CYCLE response, None)
let handle_dump_types ~options ~input ~expand_aliases ~evaluate_type_destructors ~profiling ~env =
let%lwt response =
dump_types ~options ~env ~profiling ~expand_aliases ~evaluate_type_destructors input
in
Lwt.return (ServerProt.Response.DUMP_TYPES response, None)
let handle_find_module ~options ~reader ~moduleref ~filename ~profiling:_ ~env:_ =
let response = find_module ~options ~reader (moduleref, filename) in
Lwt.return (ServerProt.Response.FIND_MODULE response, None)
let handle_find_refs ~reader ~genv ~filename ~line ~char ~global ~multi_hop ~profiling ~env =
let%lwt (env, result, dep_count) =
if global || multi_hop then
find_global_refs ~reader ~genv ~env ~profiling (filename, line, char, multi_hop)
else
let options = genv.ServerEnv.options in
let%lwt result = find_local_refs ~reader ~options ~env ~profiling (filename, line, char) in
Lwt.return (env, result, None)
in
let json_data =
Some
(Hh_json.JSON_Object
( ( "result",
Hh_json.JSON_String
(match result with
| Ok _ -> "SUCCESS"
| _ -> "FAILURE") )
:: ("global", Hh_json.JSON_Bool global)
::
(match dep_count with
| Some count -> [("deps", Hh_json.JSON_Number (string_of_int count))]
| None -> []) ))
in
Lwt.return (env, ServerProt.Response.FIND_REFS result, json_data)
let handle_force_recheck ~files ~focus ~profile ~profiling =
let fileset = SSet.of_list files in
let reason =
LspProt.(
match files with
| [filename] -> Single_file_changed { filename }
| _ -> Many_files_changed { file_count = List.length files })
in
(* `flow force-recheck --focus a.js` not only marks a.js as a focused file, but it also
* tells Flow that `a.js` has changed. In that case we push a.js to be rechecked and to be
* focused *)
let push ?callback files =
ServerMonitorListenerState.(
if focus then
push_files_to_force_focused_and_recheck ?callback ~reason files
else
push_files_to_recheck ?metadata:None ?callback ~reason files)
in
if profile then (
let (wait_for_recheck_thread, wakener) = Lwt.task () in
push ~callback:(fun profiling -> Lwt.wakeup wakener profiling) fileset;
let%lwt recheck_profiling = wait_for_recheck_thread in
Option.iter recheck_profiling ~f:(fun recheck_profiling ->
Profiling_js.merge ~from:recheck_profiling ~into:profiling);
Lwt.return (ServerProt.Response.FORCE_RECHECK recheck_profiling, None)
) else (
(* If we're not profiling the recheck, then respond immediately *)
push fileset;
Lwt.return (ServerProt.Response.FORCE_RECHECK None, None)
)
let handle_get_def ~reader ~options ~filename ~line ~char ~profiling ~env =
let%lwt (result, json_data) =
get_def ~reader ~options ~env ~profiling ~type_contents_cache:None (filename, line, char)
in
Lwt.return (ServerProt.Response.GET_DEF result, json_data)
let handle_get_imports ~options ~reader ~module_names ~profiling:_ ~env:_ =
let response = get_imports ~options ~reader module_names in
Lwt.return (ServerProt.Response.GET_IMPORTS response, None)
let handle_graph_dep_graph ~root ~strip_root ~outfile ~types_only ~profiling:_ ~env =
let%lwt response = output_dependencies ~env root strip_root types_only outfile in
Lwt.return (env, ServerProt.Response.GRAPH_DEP_GRAPH response, None)
let handle_infer_type
~options
~input
~line
~char
~verbose
~expand_aliases
~omit_targ_defaults
~evaluate_type_destructors
~profiling
~env =
let%lwt (result, json_data) =
infer_type
~options
~env
~profiling
~type_contents_cache:None
(input, line, char, verbose, expand_aliases, omit_targ_defaults, evaluate_type_destructors)
in
Lwt.return (ServerProt.Response.INFER_TYPE result, json_data)
let handle_insert_type
~options
~file_input
~target
~verbose
~expand_aliases
~omit_targ_defaults
~location_is_strict
~ambiguity_strategy
~profiling
~env =
let%lwt result =
insert_type
~options
~env
~profiling
~file_input
~target
~verbose
~expand_aliases
~omit_targ_defaults
~location_is_strict
~ambiguity_strategy
in
Lwt.return (ServerProt.Response.INSERT_TYPE result, None)
let handle_rage ~reader ~options ~files ~profiling:_ ~env =
let items = collect_rage ~options ~reader ~env ~files:(Some files) in
Lwt.return (ServerProt.Response.RAGE items, None)
let handle_refactor
~reader ~genv ~input:file_input ~line ~char:col ~refactor_variant ~profiling ~env =
(* Refactor is another weird command that may mutate the env by doing a bunch of rechecking,
* since that's what find-refs does and refactor delegates to find-refs *)
ServerProt.Response.(
let env = ref env in
let%lwt result =
match refactor_variant with
| ServerProt.Request.RENAME new_name ->
Refactor_service.rename ~reader ~genv ~env ~profiling ~file_input ~line ~col ~new_name
in
let env = !env in
let result =
result |> Base.Result.map ~f:(Option.map ~f:(fun refactor_edits -> { refactor_edits }))
in
Lwt.return (env, REFACTOR result, None))
let handle_status ~reader ~genv ~client_root ~profiling:_ ~env =
let (status_response, lazy_stats) = get_status ~reader genv env client_root in
Lwt.return (env, ServerProt.Response.STATUS { status_response; lazy_stats }, None)
let handle_suggest ~options ~input ~profiling ~env =
let%lwt result = suggest ~options ~env ~profiling input in
Lwt.return (ServerProt.Response.SUGGEST result, None)
let handle_save_state ~saved_state_filename ~genv ~profiling ~env =
let%lwt result = save_state ~saved_state_filename ~genv ~env ~profiling in
Lwt.return (env, ServerProt.Response.SAVE_STATE result, None)
let find_code_actions ~options ~env ~profiling ~params ~client =
CodeActionRequest.(
Flow_lsp_conversions.(
let { textDocument; range; _ } = params in
(* The current ide-lsp-server/flow-lsp-client doesn't necisarrily get restart for every project.
* Checking the option here ensures the the flow server doesn't do too much work for code
* action requests on projects where code actions are not enabled in the `.flowconfig`.
*)
if not options.Options.opt_lsp_code_actions then
Lwt.return (Ok [])
else
let (file_key, file, loc) = lsp_textDocument_and_range_to_flow textDocument range client in
match File_input.content_of_file_input file with
| Error msg -> Lwt.return (Error msg)
| Ok file_contents ->
Type_info_service.code_actions_at_loc
~options
~env
~profiling
~params
~file_key
~file_contents
~loc))
type command_handler =
(* A command can be handled immediately if it is super duper fast and doesn't require the env.
* These commands will be handled as soon as we read them off the pipe. Almost nothing should ever
* be handled immediately *)
| Handle_immediately of
(profiling:Profiling_js.running -> (ServerProt.Response.response * Hh_json.json option) Lwt.t)
(* A command is parallelizable if it passes four conditions
*
* 1. It is fast. Running it in parallel will block the current recheck, so it needs to be really
* fast.
* 2. It doesn't use the workers. Currently we can't deal with the recheck using the workers at the
* same time as a command using the workers
* 3. It doesn't return a new env. It really should be just a read-only job
* 4. It doesn't mind using slightly out of date data. During a recheck, it will be reading the
* oldified data
*)
| Handle_parallelizable of
(profiling:Profiling_js.running ->
env:ServerEnv.env ->
(ServerProt.Response.response * Hh_json.json option) Lwt.t)
(* A command is nonparallelizable if it can't be handled immediately or parallelized. *)
| Handle_nonparallelizable of
(profiling:Profiling_js.running ->
env:ServerEnv.env ->
(ServerEnv.env * ServerProt.Response.response * Hh_json.json option) Lwt.t)
(* This command is parallelizable, but we will treat it as nonparallelizable if we've been told
* to wait_for_recheck by the .flowconfig or CLI *)
let mk_parallelizable ~wait_for_recheck ~options f =
let wait_for_recheck =
Option.value wait_for_recheck ~default:(Options.wait_for_recheck options)
in
if wait_for_recheck then
Handle_nonparallelizable
(fun ~profiling ~env ->
let%lwt (response, json_data) = f ~profiling ~env in
Lwt.return (env, response, json_data))
else
Handle_parallelizable f
(* This function is called as soon as we read an ephemeral command from the pipe. It decides whether
* the command should be handled immediately or deferred as parallelizable or nonparallelizable.
* This function does NOT run any handling code itself. *)
let get_ephemeral_handler genv command =
let options = genv.options in
let reader = State_reader.create () in
match command with
| ServerProt.Request.AUTOCOMPLETE
{ filename; contents; cursor; trigger_character; wait_for_recheck } ->
mk_parallelizable
~wait_for_recheck
~options
(handle_autocomplete ~trigger_character ~reader ~options ~filename ~contents ~cursor)
| ServerProt.Request.AUTOFIX_EXPORTS { input; verbose; wait_for_recheck } ->
let options = { options with Options.opt_verbose = verbose } in
mk_parallelizable ~wait_for_recheck ~options (handle_autofix_exports ~input ~options)
| ServerProt.Request.CHECK_FILE { input; verbose; force; include_warnings; wait_for_recheck } ->
let options =
{
options with
Options.opt_verbose = verbose;
opt_include_warnings = options.Options.opt_include_warnings || include_warnings;
}
in
mk_parallelizable ~wait_for_recheck ~options (handle_check_file ~options ~force ~input)
| ServerProt.Request.COVERAGE { input; force; wait_for_recheck; trust } ->
mk_parallelizable ~wait_for_recheck ~options (handle_coverage ~options ~force ~trust ~input)
| ServerProt.Request.BATCH_COVERAGE { batch; wait_for_recheck; trust } ->
mk_parallelizable ~wait_for_recheck ~options (handle_batch_coverage ~options ~trust ~batch)
| ServerProt.Request.CYCLE { filename; types_only } ->
(* The user preference is to make this wait for up-to-date data *)
let file_options = Options.file_options options in
let fn = Files.filename_from_string ~options:file_options filename in
Handle_nonparallelizable (handle_cycle ~fn ~types_only)
| ServerProt.Request.DUMP_TYPES
{ input; expand_aliases; evaluate_type_destructors; wait_for_recheck } ->
mk_parallelizable
~wait_for_recheck
~options
(handle_dump_types ~options ~input ~expand_aliases ~evaluate_type_destructors)
| ServerProt.Request.FIND_MODULE { moduleref; filename; wait_for_recheck } ->
mk_parallelizable
~wait_for_recheck
~options
(handle_find_module ~options ~reader ~moduleref ~filename)
| ServerProt.Request.FIND_REFS { filename; line; char; global; multi_hop } ->
(* find-refs can take a while and may use MultiWorker. Furthermore, it may do a recheck and
* change env. Each of these 3 facts disqualifies find-refs from being parallelizable *)
Handle_nonparallelizable
(handle_find_refs ~reader ~genv ~filename ~line ~char ~global ~multi_hop)
| ServerProt.Request.FORCE_RECHECK { files; focus; profile } ->
Handle_immediately (handle_force_recheck ~files ~focus ~profile)
| ServerProt.Request.GET_DEF { filename; line; char; wait_for_recheck } ->
mk_parallelizable
~wait_for_recheck
~options
(handle_get_def ~reader ~options ~filename ~line ~char)
| ServerProt.Request.GET_IMPORTS { module_names; wait_for_recheck } ->
mk_parallelizable ~wait_for_recheck ~options (handle_get_imports ~options ~reader ~module_names)
| ServerProt.Request.GRAPH_DEP_GRAPH { root; strip_root; outfile; types_only } ->
(* The user preference is to make this wait for up-to-date data *)
Handle_nonparallelizable (handle_graph_dep_graph ~root ~strip_root ~types_only ~outfile)
| ServerProt.Request.INFER_TYPE
{
input;
line;
char;
verbose;
expand_aliases;
omit_targ_defaults;
evaluate_type_destructors;
wait_for_recheck;
} ->
mk_parallelizable
~wait_for_recheck
~options
(handle_infer_type
~options
~input
~line
~char
~verbose
~expand_aliases
~omit_targ_defaults
~evaluate_type_destructors)
| ServerProt.Request.RAGE { files } ->
mk_parallelizable ~wait_for_recheck:None ~options (handle_rage ~reader ~options ~files)
| ServerProt.Request.INSERT_TYPE
{
input;
target;
wait_for_recheck;
verbose;
expand_aliases;
omit_targ_defaults;
location_is_strict;
ambiguity_strategy;
} ->
mk_parallelizable
~wait_for_recheck
~options
(handle_insert_type
~file_input:input
~options
~target
~verbose
~expand_aliases
~omit_targ_defaults
~location_is_strict
~ambiguity_strategy)
| ServerProt.Request.REFACTOR { input; line; char; refactor_variant } ->
(* refactor delegates to find-refs, which is not parallelizable. Therefore refactor is also not
* parallelizable *)
Handle_nonparallelizable (handle_refactor ~reader ~genv ~input ~line ~char ~refactor_variant)
| ServerProt.Request.STATUS { client_root; include_warnings } ->
let genv =
{
genv with
options =
Options.
{ options with opt_include_warnings = options.opt_include_warnings || include_warnings };
}
in
(* `flow status` is often used by users to get all the current errors. After talking to some
* coworkers and users, glevi decided that users would rather that `flow status` always waits