-
-
Notifications
You must be signed in to change notification settings - Fork 3k
/
constellation.rs
5516 lines (5094 loc) · 225 KB
/
constellation.rs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! The `Constellation`, Servo's Grand Central Station
//!
//! The constellation tracks all information kept globally by the
//! browser engine, which includes:
//!
//! * The set of all `EventLoop` objects. Each event loop is
//! the constellation's view of a script thread. The constellation
//! interacts with a script thread by message-passing.
//!
//! * The set of all `Pipeline` objects. Each pipeline gives the
//! constellation's view of a `Window`, with its script thread and
//! layout threads. Pipelines may share script threads, but not
//! layout threads.
//!
//! * The set of all `BrowsingContext` objects. Each browsing context
//! gives the constellation's view of a `WindowProxy`.
//! Each browsing context stores an independent
//! session history, created by navigation. The session
//! history can be traversed, for example by the back and forwards UI,
//! so each session history maintains a list of past and future pipelines,
//! as well as the current active pipeline.
//!
//! There are two kinds of browsing context: top-level ones (for
//! example tabs in a browser UI), and nested ones (typically caused
//! by `iframe` elements). Browsing contexts have a hierarchy
//! (typically caused by `iframe`s containing `iframe`s), giving rise
//! to a forest whose roots are top-level browsing context. The logical
//! relationship between these types is:
//!
//! ```
//! +------------+ +------------+ +---------+
//! | Browsing | ------parent?------> | Pipeline | --event_loop--> | Event |
//! | Context | ------current------> | | | Loop |
//! | | ------prev*--------> | | <---pipeline*-- | |
//! | | ------next*--------> | | +---------+
//! | | | |
//! | | <-top_level--------- | |
//! | | <-browsing_context-- | |
//! +------------+ +------------+
//! ```
//
//! The constellation also maintains channels to threads, including:
//!
//! * The script and layout threads.
//! * The graphics compositor.
//! * The font cache, image cache, and resource manager, which load
//! and cache shared fonts, images, or other resources.
//! * The service worker manager.
//! * The devtools, debugger and webdriver servers.
//!
//! The constellation passes messages between the threads, and updates its state
//! to track the evolving state of the browsing context tree.
//!
//! The constellation acts as a logger, tracking any `warn!` messages from threads,
//! and converting any `error!` or `panic!` into a crash report.
//!
//! Since there is only one constellation, and its responsibilities include crash reporting,
//! it is very important that it does not panic.
//!
//! It's also important that the constellation not deadlock. In particular, we need
//! to be careful that we don't introduce any cycles in the can-block-on relation.
//! Blocking is typically introduced by `receiver.recv()`, which blocks waiting for the
//! sender to send some data. Servo tries to achieve deadlock-freedom by using the following
//! can-block-on relation:
//!
//! * Layout can block on canvas
//! * Layout can block on font cache
//! * Layout can block on image cache
//! * Constellation can block on compositor
//! * Constellation can block on embedder
//! * Constellation can block on layout
//! * Script can block on anything (other than script)
//! * Blocking is transitive (if T1 can block on T2 and T2 can block on T3 then T1 can block on T3)
//! * Nothing can block on itself!
//!
//! There is a complexity intoduced by IPC channels, since they do not support
//! non-blocking send. This means that as well as `receiver.recv()` blocking,
//! `sender.send(data)` can also block when the IPC buffer is full. For this reason it is
//! very important that all IPC receivers where we depend on non-blocking send
//! use a router to route IPC messages to an mpsc channel. The reason why that solves
//! the problem is that under the hood, the router uses a dedicated thread to forward
//! messages, and:
//!
//! * Anything (other than a routing thread) can block on a routing thread
//!
//! See https://github.com/servo/servo/issues/14704
use crate::browsingcontext::NewBrowsingContextInfo;
use crate::browsingcontext::{
AllBrowsingContextsIterator, BrowsingContext, FullyActiveBrowsingContextsIterator,
};
use crate::event_loop::EventLoop;
use crate::network_listener::NetworkListener;
use crate::pipeline::{InitialPipelineState, Pipeline};
use crate::serviceworker::ServiceWorkerUnprivilegedContent;
use crate::session_history::{
JointSessionHistory, NeedsToReload, SessionHistoryChange, SessionHistoryDiff,
};
use crate::timer_scheduler::TimerScheduler;
use background_hang_monitor::HangMonitorRegister;
use backtrace::Backtrace;
use bluetooth_traits::BluetoothRequest;
use canvas_traits::canvas::{CanvasId, CanvasMsg};
use canvas_traits::webgl::WebGLThreads;
use canvas_traits::ConstellationCanvasMsg;
use compositing::compositor_thread::CompositorProxy;
use compositing::compositor_thread::Msg as ToCompositorMsg;
use compositing::compositor_thread::WebrenderMsg;
use compositing::{ConstellationMsg as FromCompositorMsg, SendableFrameTree};
use crossbeam_channel::{after, never, unbounded, Receiver, Sender};
use devtools_traits::{ChromeToDevtoolsControlMsg, DevtoolsControlMsg};
use embedder_traits::{Cursor, EmbedderMsg, EmbedderProxy, EventLoopWaker};
use embedder_traits::{MediaSessionEvent, MediaSessionPlaybackState};
use euclid::{default::Size2D as UntypedSize2D, Size2D};
use gfx::font_cache_thread::FontCacheThread;
use gfx_traits::Epoch;
use ipc_channel::ipc::{self, IpcReceiver, IpcSender};
use ipc_channel::router::ROUTER;
use ipc_channel::Error as IpcError;
use keyboard_types::webdriver::Event as WebDriverInputEvent;
use keyboard_types::KeyboardEvent;
use layout_traits::LayoutThreadFactory;
use log::{Level, LevelFilter, Log, Metadata, Record};
use media::{GLPlayerThreads, WindowGLContext};
use msg::constellation_msg::{
BackgroundHangMonitorControlMsg, BackgroundHangMonitorRegister, HangMonitorAlert,
};
use msg::constellation_msg::{
BroadcastChannelRouterId, MessagePortId, MessagePortRouterId, PipelineNamespace,
PipelineNamespaceId, PipelineNamespaceRequest, TraversalDirection,
};
use msg::constellation_msg::{
BrowsingContextGroupId, BrowsingContextId, HistoryStateId, PipelineId,
TopLevelBrowsingContextId,
};
use net_traits::pub_domains::reg_host;
use net_traits::request::{Referrer, RequestBuilder};
use net_traits::storage_thread::{StorageThreadMsg, StorageType};
use net_traits::{self, FetchResponseMsg, IpcSend, ResourceThreads};
use profile_traits::mem;
use profile_traits::time;
use script_traits::CompositorEvent::{MouseButtonEvent, MouseMoveEvent};
use script_traits::{webdriver_msg, LogEntry, ScriptToConstellationChan, ServiceWorkerMsg};
use script_traits::{
AnimationState, AnimationTickType, AuxiliaryBrowsingContextLoadInfo, BroadcastMsg,
CompositorEvent,
};
use script_traits::{ConstellationControlMsg, DiscardBrowsingContext};
use script_traits::{DocumentActivity, DocumentState, LayoutControlMsg, LoadData, LoadOrigin};
use script_traits::{HistoryEntryReplacement, IFrameSizeMsg, WindowSizeData, WindowSizeType};
use script_traits::{
IFrameLoadInfo, IFrameLoadInfoWithData, IFrameSandboxState, TimerSchedulerMsg,
};
use script_traits::{
Job, LayoutMsg as FromLayoutMsg, ScriptMsg as FromScriptMsg, ScriptThreadFactory,
ServiceWorkerManagerFactory,
};
use script_traits::{MediaSessionActionType, MouseEventType};
use script_traits::{MessagePortMsg, PortMessageTask, StructuredSerializedData};
use script_traits::{SWManagerMsg, SWManagerSenders, UpdatePipelineIdReason, WebDriverCommandMsg};
use serde::{Deserialize, Serialize};
use servo_config::{opts, pref};
use servo_rand::{random, Rng, ServoRng, SliceRandom};
use servo_remutex::ReentrantMutex;
use servo_url::{Host, ImmutableOrigin, ServoUrl};
use std::borrow::{Cow, ToOwned};
use std::collections::hash_map::Entry;
use std::collections::{HashMap, HashSet, VecDeque};
use std::marker::PhantomData;
use std::mem::replace;
use std::process;
use std::rc::{Rc, Weak};
use std::sync::{Arc, Mutex};
use std::thread;
use style_traits::viewport::ViewportConstraints;
use style_traits::CSSPixel;
use webgpu::{self, WebGPU, WebGPURequest};
use webrender_traits::WebrenderExternalImageRegistry;
type PendingApprovalNavigations = HashMap<PipelineId, (LoadData, HistoryEntryReplacement)>;
#[derive(Debug)]
/// The state used by MessagePortInfo to represent the various states the port can be in.
enum TransferState {
/// The port is currently managed by a given global,
/// identified by its router id.
Managed(MessagePortRouterId),
/// The port is currently in-transfer,
/// and incoming tasks should be buffered until it becomes managed again.
TransferInProgress(VecDeque<PortMessageTask>),
/// A global has requested the transfer to be completed,
/// it's pending a confirmation of either failure or success to complete the transfer.
CompletionInProgress(MessagePortRouterId),
/// While a completion of a transfer was in progress, the port was shipped,
/// hence the transfer failed to complete.
/// We start buffering incoming messages,
/// while awaiting the return of the previous buffer from the global
/// that failed to complete the transfer.
CompletionFailed(VecDeque<PortMessageTask>),
/// While a completion failed, another global requested to complete the transfer.
/// We are still buffering messages, and awaiting the return of the buffer from the global who failed.
CompletionRequested(MessagePortRouterId, VecDeque<PortMessageTask>),
/// The entangled port has been removed while the port was in-transfer,
/// the current port should be removed as well once it is managed again.
EntangledRemoved,
}
#[derive(Debug)]
/// Info related to a message-port tracked by the constellation.
struct MessagePortInfo {
/// The current state of the messageport.
state: TransferState,
/// The id of the entangled port, if any.
entangled_with: Option<MessagePortId>,
}
/// Webrender related objects required by WebGPU threads
struct WebrenderWGPU {
/// Webrender API.
webrender_api: webrender_api::RenderApi,
/// List of Webrender external images
webrender_external_images: Arc<Mutex<WebrenderExternalImageRegistry>>,
/// WebGPU data that supplied to Webrender for rendering
wgpu_image_map: Arc<Mutex<HashMap<u64, webgpu::PresentationData>>>,
}
/// Servo supports tabs (referred to as browsers), so `Constellation` needs to
/// store browser specific data for bookkeeping.
struct Browser {
/// The currently focused browsing context in this browser for key events.
/// The focused pipeline is the current entry of the focused browsing
/// context.
focused_browsing_context_id: BrowsingContextId,
/// The joint session history for this browser.
session_history: JointSessionHistory,
}
/// A browsing context group.
///
/// https://html.spec.whatwg.org/multipage/#browsing-context-group
#[derive(Clone, Default)]
struct BrowsingContextGroup {
/// A browsing context group holds a set of top-level browsing contexts.
top_level_browsing_context_set: HashSet<TopLevelBrowsingContextId>,
/// The set of all event loops in this BrowsingContextGroup.
/// We store the event loops in a map
/// indexed by registered domain name (as a `Host`) to event loops.
/// It is important that scripts with the same eTLD+1,
/// who are part of the same browsing-context group
/// share an event loop, since they can use `document.domain`
/// to become same-origin, at which point they can share DOM objects.
event_loops: HashMap<Host, Weak<EventLoop>>,
/// The set of all WebGPU channels in this BrowsingContextGroup.
webgpus: HashMap<Host, WebGPU>,
}
/// The `Constellation` itself. In the servo browser, there is one
/// constellation, which maintains all of the browser global data.
/// In embedded applications, there may be more than one constellation,
/// which are independent of each other.
///
/// The constellation may be in a different process from the pipelines,
/// and communicates using IPC.
///
/// It is parameterized over a `LayoutThreadFactory` and a
/// `ScriptThreadFactory` (which in practice are implemented by
/// `LayoutThread` in the `layout` crate, and `ScriptThread` in
/// the `script` crate). Script and layout communicate using a `Message`
/// type.
pub struct Constellation<Message, LTF, STF, SWF> {
/// An ipc-sender/threaded-receiver pair
/// to facilitate installing pipeline namespaces in threads
/// via a per-process installer.
namespace_receiver: Receiver<Result<PipelineNamespaceRequest, IpcError>>,
namespace_sender: IpcSender<PipelineNamespaceRequest>,
/// An IPC channel for script threads to send messages to the constellation.
/// This is the script threads' view of `script_receiver`.
script_sender: IpcSender<(PipelineId, FromScriptMsg)>,
/// A channel for the constellation to receive messages from script threads.
/// This is the constellation's view of `script_sender`.
script_receiver: Receiver<Result<(PipelineId, FromScriptMsg), IpcError>>,
/// A handle to register components for hang monitoring.
/// None when in multiprocess mode.
background_monitor_register: Option<Box<dyn BackgroundHangMonitorRegister>>,
/// Channels to control all background-hang monitors.
/// TODO: store them on the relevant BrowsingContextGroup,
/// so that they could be controlled on a "per-tab/event-loop" basis.
background_monitor_control_senders: Vec<IpcSender<BackgroundHangMonitorControlMsg>>,
/// A channel for the background hang monitor to send messages
/// to the constellation.
background_hang_monitor_sender: IpcSender<HangMonitorAlert>,
/// A channel for the constellation to receiver messages
/// from the background hang monitor.
background_hang_monitor_receiver: Receiver<Result<HangMonitorAlert, IpcError>>,
/// An IPC channel for layout threads to send messages to the constellation.
/// This is the layout threads' view of `layout_receiver`.
layout_sender: IpcSender<FromLayoutMsg>,
/// A channel for the constellation to receive messages from layout threads.
/// This is the constellation's view of `layout_sender`.
layout_receiver: Receiver<Result<FromLayoutMsg, IpcError>>,
/// A channel for network listener to send messages to the constellation.
network_listener_sender: Sender<(PipelineId, FetchResponseMsg)>,
/// A channel for the constellation to receive messages from network listener.
network_listener_receiver: Receiver<(PipelineId, FetchResponseMsg)>,
/// A channel for the constellation to receive messages from the compositor thread.
compositor_receiver: Receiver<FromCompositorMsg>,
/// A channel through which messages can be sent to the embedder.
embedder_proxy: EmbedderProxy,
/// A channel (the implementation of which is port-specific) for the
/// constellation to send messages to the compositor thread.
compositor_proxy: CompositorProxy,
/// The last frame tree sent to WebRender, denoting the browser (tab) user
/// has currently selected. This also serves as the key to retrieve data
/// about the current active browser from `browsers`.
active_browser_id: Option<TopLevelBrowsingContextId>,
/// Bookkeeping data for all browsers in constellation.
browsers: HashMap<TopLevelBrowsingContextId, Browser>,
/// Channels for the constellation to send messages to the public
/// resource-related threads. There are two groups of resource threads: one
/// for public browsing, and one for private browsing.
public_resource_threads: ResourceThreads,
/// Channels for the constellation to send messages to the private
/// resource-related threads. There are two groups of resource
/// threads: one for public browsing, and one for private
/// browsing.
private_resource_threads: ResourceThreads,
/// A channel for the constellation to send messages to the font
/// cache thread.
font_cache_thread: FontCacheThread,
/// A channel for the constellation to send messages to the
/// debugger thread.
debugger_chan: Option<debugger::Sender>,
/// A channel for the constellation to send messages to the
/// devtools thread.
devtools_chan: Option<Sender<DevtoolsControlMsg>>,
/// An IPC channel for the constellation to send messages to the
/// bluetooth thread.
bluetooth_thread: IpcSender<BluetoothRequest>,
/// A map of origin to sender to a Service worker manager.
sw_managers: HashMap<ImmutableOrigin, IpcSender<ServiceWorkerMsg>>,
/// An IPC channel for Service Worker Manager threads to send
/// messages to the constellation. This is the SW Manager thread's
/// view of `swmanager_receiver`.
swmanager_sender: IpcSender<SWManagerMsg>,
/// A channel for the constellation to receive messages from the
/// Service Worker Manager thread. This is the constellation's view of
/// `swmanager_sender`.
swmanager_receiver: Receiver<Result<SWManagerMsg, IpcError>>,
/// A channel for the constellation to send messages to the
/// time profiler thread.
time_profiler_chan: time::ProfilerChan,
/// A channel for the constellation to send messages to the
/// memory profiler thread.
mem_profiler_chan: mem::ProfilerChan,
/// A channel for a pipeline to schedule timer events.
scheduler_chan: IpcSender<TimerSchedulerMsg>,
/// The receiver to which the IPC requests from scheduler_chan will be forwarded.
scheduler_receiver: Receiver<Result<TimerSchedulerMsg, IpcError>>,
/// The logic and data behing scheduling timer events.
timer_scheduler: TimerScheduler,
/// A single WebRender document the constellation operates on.
webrender_document: webrender_api::DocumentId,
/// Webrender related objects required by WebGPU threads
webrender_wgpu: WebrenderWGPU,
/// A channel for content processes to send messages that will
/// be relayed to the WebRender thread.
webrender_api_ipc_sender: script_traits::WebrenderIpcSender,
/// A channel for content process image caches to send messages
/// that will be relayed to the WebRender thread.
webrender_image_api_sender: net_traits::WebrenderIpcSender,
/// A map of message-port Id to info.
message_ports: HashMap<MessagePortId, MessagePortInfo>,
/// A map of router-id to ipc-sender, to route messages to ports.
message_port_routers: HashMap<MessagePortRouterId, IpcSender<MessagePortMsg>>,
/// A map of broadcast routers to their IPC sender.
broadcast_routers: HashMap<BroadcastChannelRouterId, IpcSender<BroadcastMsg>>,
/// A map of origin to a map of channel-name to a list of relevant routers.
broadcast_channels: HashMap<ImmutableOrigin, HashMap<String, Vec<BroadcastChannelRouterId>>>,
/// The set of all the pipelines in the browser. (See the `pipeline` module
/// for more details.)
pipelines: HashMap<PipelineId, Pipeline>,
/// The set of all the browsing contexts in the browser.
browsing_contexts: HashMap<BrowsingContextId, BrowsingContext>,
/// A user agent holds a a set of browsing context groups.
///
/// https://html.spec.whatwg.org/multipage/#browsing-context-group-set
browsing_context_group_set: HashMap<BrowsingContextGroupId, BrowsingContextGroup>,
/// The Id counter for BrowsingContextGroup.
browsing_context_group_next_id: u32,
/// When a navigation is performed, we do not immediately update
/// the session history, instead we ask the event loop to begin loading
/// the new document, and do not update the browsing context until the
/// document is active. Between starting the load and it activating,
/// we store a `SessionHistoryChange` object for the navigation in progress.
pending_changes: Vec<SessionHistoryChange>,
/// Pipeline IDs are namespaced in order to avoid name collisions,
/// and the namespaces are allocated by the constellation.
next_pipeline_namespace_id: PipelineNamespaceId,
/// The size of the top-level window.
window_size: WindowSizeData,
/// Bits of state used to interact with the webdriver implementation
webdriver: WebDriverData,
/// Document states for loaded pipelines (used only when writing screenshots).
document_states: HashMap<PipelineId, DocumentState>,
/// Are we shutting down?
shutting_down: bool,
/// Have we seen any warnings? Hopefully always empty!
/// The buffer contains `(thread_name, reason)` entries.
handled_warnings: VecDeque<(Option<String>, String)>,
/// The random number generator and probability for closing pipelines.
/// This is for testing the hardening of the constellation.
random_pipeline_closure: Option<(ServoRng, f32)>,
/// Phantom data that keeps the Rust type system happy.
phantom: PhantomData<(Message, LTF, STF, SWF)>,
/// Entry point to create and get channels to a WebGLThread.
webgl_threads: Option<WebGLThreads>,
/// The XR device registry
webxr_registry: webxr_api::Registry,
/// A channel through which messages can be sent to the canvas paint thread.
canvas_chan: Sender<ConstellationCanvasMsg>,
ipc_canvas_chan: IpcSender<CanvasMsg>,
/// Navigation requests from script awaiting approval from the embedder.
pending_approval_navigations: PendingApprovalNavigations,
/// Bitmask which indicates which combination of mouse buttons are
/// currently being pressed.
pressed_mouse_buttons: u16,
is_running_problem_test: bool,
/// If True, exits on thread failure instead of displaying about:failure
hard_fail: bool,
/// If set with --disable-canvas-aa, disable antialiasing on the HTML
/// canvas element.
/// Like --disable-text-aa, this is useful for reftests where pixel perfect
/// results are required.
enable_canvas_antialiasing: bool,
/// Entry point to create and get channels to a GLPlayerThread.
glplayer_threads: Option<GLPlayerThreads>,
/// Application window's GL Context for Media player
player_context: WindowGLContext,
/// Mechanism to force the compositor to process events.
event_loop_waker: Option<Box<dyn EventLoopWaker>>,
/// Pipeline ID of the active media session.
active_media_session: Option<PipelineId>,
/// User agent string to report in network requests.
user_agent: Cow<'static, str>,
}
/// State needed to construct a constellation.
pub struct InitialConstellationState {
/// A channel through which messages can be sent to the embedder.
pub embedder_proxy: EmbedderProxy,
/// A channel through which messages can be sent to the compositor.
pub compositor_proxy: CompositorProxy,
/// A channel to the debugger, if applicable.
pub debugger_chan: Option<debugger::Sender>,
/// A channel to the developer tools, if applicable.
pub devtools_chan: Option<Sender<DevtoolsControlMsg>>,
/// A channel to the bluetooth thread.
pub bluetooth_thread: IpcSender<BluetoothRequest>,
/// A channel to the font cache thread.
pub font_cache_thread: FontCacheThread,
/// A channel to the resource thread.
pub public_resource_threads: ResourceThreads,
/// A channel to the resource thread.
pub private_resource_threads: ResourceThreads,
/// A channel to the time profiler thread.
pub time_profiler_chan: time::ProfilerChan,
/// A channel to the memory profiler thread.
pub mem_profiler_chan: mem::ProfilerChan,
/// Webrender document ID.
pub webrender_document: webrender_api::DocumentId,
/// Webrender API.
pub webrender_api_sender: webrender_api::RenderApiSender,
/// Webrender external images
pub webrender_external_images: Arc<Mutex<WebrenderExternalImageRegistry>>,
/// Entry point to create and get channels to a WebGLThread.
pub webgl_threads: Option<WebGLThreads>,
/// The XR device registry
pub webxr_registry: webxr_api::Registry,
pub glplayer_threads: Option<GLPlayerThreads>,
/// Application window's GL Context for Media player
pub player_context: WindowGLContext,
/// Mechanism to force the compositor to process events.
pub event_loop_waker: Option<Box<dyn EventLoopWaker>>,
/// User agent string to report in network requests.
pub user_agent: Cow<'static, str>,
pub wgpu_image_map: Arc<Mutex<HashMap<u64, webgpu::PresentationData>>>,
}
/// Data needed for webdriver
struct WebDriverData {
load_channel: Option<(PipelineId, IpcSender<webdriver_msg::LoadStatus>)>,
resize_channel: Option<IpcSender<WindowSizeData>>,
}
impl WebDriverData {
fn new() -> WebDriverData {
WebDriverData {
load_channel: None,
resize_channel: None,
}
}
}
/// When we are running reftests, we save an image to compare against a reference.
/// This enum gives the possible states of preparing such an image.
#[derive(Debug, PartialEq)]
enum ReadyToSave {
NoTopLevelBrowsingContext,
PendingChanges,
WebFontNotLoaded,
DocumentLoading,
EpochMismatch,
PipelineUnknown,
Ready,
}
/// When we are exiting a pipeline, we can either force exiting or not.
/// A normal exit waits for the compositor to update its state before
/// exiting, and delegates layout exit to script. A forced exit does
/// not notify the compositor, and exits layout without involving script.
#[derive(Clone, Copy)]
enum ExitPipelineMode {
Normal,
Force,
}
/// The constellation uses logging to perform crash reporting.
/// The constellation receives all `warn!`, `error!` and `panic!` messages,
/// and generates a crash report when it receives a panic.
/// A logger directed at the constellation from content processes
#[derive(Clone)]
pub struct FromScriptLogger {
/// A channel to the constellation
pub script_to_constellation_chan: Arc<ReentrantMutex<ScriptToConstellationChan>>,
}
impl FromScriptLogger {
/// Create a new constellation logger.
pub fn new(script_to_constellation_chan: ScriptToConstellationChan) -> FromScriptLogger {
FromScriptLogger {
script_to_constellation_chan: Arc::new(ReentrantMutex::new(
script_to_constellation_chan,
)),
}
}
/// The maximum log level the constellation logger is interested in.
pub fn filter(&self) -> LevelFilter {
LevelFilter::Warn
}
}
impl Log for FromScriptLogger {
fn enabled(&self, metadata: &Metadata) -> bool {
metadata.level() <= Level::Warn
}
fn log(&self, record: &Record) {
if let Some(entry) = log_entry(record) {
debug!("Sending log entry {:?}.", entry);
let thread_name = thread::current().name().map(ToOwned::to_owned);
let msg = FromScriptMsg::LogEntry(thread_name, entry);
let chan = self
.script_to_constellation_chan
.lock()
.unwrap_or_else(|err| err.into_inner());
let _ = chan.send(msg);
}
}
fn flush(&self) {}
}
/// A logger directed at the constellation from the compositor
#[derive(Clone)]
pub struct FromCompositorLogger {
/// A channel to the constellation
pub constellation_chan: Arc<ReentrantMutex<Sender<FromCompositorMsg>>>,
}
impl FromCompositorLogger {
/// Create a new constellation logger.
pub fn new(constellation_chan: Sender<FromCompositorMsg>) -> FromCompositorLogger {
FromCompositorLogger {
constellation_chan: Arc::new(ReentrantMutex::new(constellation_chan)),
}
}
/// The maximum log level the constellation logger is interested in.
pub fn filter(&self) -> LevelFilter {
LevelFilter::Warn
}
}
impl Log for FromCompositorLogger {
fn enabled(&self, metadata: &Metadata) -> bool {
metadata.level() <= Level::Warn
}
fn log(&self, record: &Record) {
if let Some(entry) = log_entry(record) {
debug!("Sending log entry {:?}.", entry);
let top_level_id = TopLevelBrowsingContextId::installed();
let thread_name = thread::current().name().map(ToOwned::to_owned);
let msg = FromCompositorMsg::LogEntry(top_level_id, thread_name, entry);
let chan = self
.constellation_chan
.lock()
.unwrap_or_else(|err| err.into_inner());
let _ = chan.send(msg);
}
}
fn flush(&self) {}
}
/// Rust uses `Record` for storing logging, but servo converts that to
/// a `LogEntry`. We do this so that we can record panics as well as log
/// messages, and because `Record` does not implement serde (de)serialization,
/// so cannot be used over an IPC channel.
fn log_entry(record: &Record) -> Option<LogEntry> {
match record.level() {
Level::Error if thread::panicking() => Some(LogEntry::Panic(
format!("{}", record.args()),
format!("{:?}", Backtrace::new()),
)),
Level::Error => Some(LogEntry::Error(format!("{}", record.args()))),
Level::Warn => Some(LogEntry::Warn(format!("{}", record.args()))),
_ => None,
}
}
/// The number of warnings to include in each crash report.
const WARNINGS_BUFFER_SIZE: usize = 32;
/// Route an ipc receiver to an mpsc receiver, preserving any errors.
/// This is the same as `route_ipc_receiver_to_new_mpsc_receiver`,
/// but does not panic on deserializtion errors.
fn route_ipc_receiver_to_new_mpsc_receiver_preserving_errors<T>(
ipc_receiver: IpcReceiver<T>,
) -> Receiver<Result<T, IpcError>>
where
T: for<'de> Deserialize<'de> + Serialize + Send + 'static,
{
let (mpsc_sender, mpsc_receiver) = unbounded();
ROUTER.add_route(
ipc_receiver.to_opaque(),
Box::new(move |message| drop(mpsc_sender.send(message.to::<T>()))),
);
mpsc_receiver
}
impl<Message, LTF, STF, SWF> Constellation<Message, LTF, STF, SWF>
where
LTF: LayoutThreadFactory<Message = Message>,
STF: ScriptThreadFactory<Message = Message>,
SWF: ServiceWorkerManagerFactory,
{
/// Create a new constellation thread.
pub fn start(
state: InitialConstellationState,
initial_window_size: WindowSizeData,
random_pipeline_closure_probability: Option<f32>,
random_pipeline_closure_seed: Option<usize>,
is_running_problem_test: bool,
hard_fail: bool,
enable_canvas_antialiasing: bool,
canvas_chan: Sender<ConstellationCanvasMsg>,
ipc_canvas_chan: IpcSender<CanvasMsg>,
) -> Sender<FromCompositorMsg> {
let (compositor_sender, compositor_receiver) = unbounded();
// service worker manager to communicate with constellation
let (swmanager_sender, swmanager_receiver) = ipc::channel().expect("ipc channel failure");
thread::Builder::new()
.name("Constellation".to_owned())
.spawn(move || {
let (ipc_script_sender, ipc_script_receiver) =
ipc::channel().expect("ipc channel failure");
let script_receiver =
route_ipc_receiver_to_new_mpsc_receiver_preserving_errors(ipc_script_receiver);
let (namespace_sender, ipc_namespace_receiver) =
ipc::channel().expect("ipc channel failure");
let namespace_receiver = route_ipc_receiver_to_new_mpsc_receiver_preserving_errors(
ipc_namespace_receiver,
);
let (scheduler_chan, ipc_scheduler_receiver) =
ipc::channel().expect("ipc channel failure");
let scheduler_receiver = route_ipc_receiver_to_new_mpsc_receiver_preserving_errors(
ipc_scheduler_receiver,
);
let (background_hang_monitor_sender, ipc_bhm_receiver) =
ipc::channel().expect("ipc channel failure");
let background_hang_monitor_receiver =
route_ipc_receiver_to_new_mpsc_receiver_preserving_errors(ipc_bhm_receiver);
// If we are in multiprocess mode,
// a dedicated per-process hang monitor will be initialized later inside the content process.
// See run_content_process in servo/lib.rs
let (background_monitor_register, bhm_control_chans) = if opts::multiprocess() {
(None, vec![])
} else {
let (bhm_control_chan, bhm_control_port) =
ipc::channel().expect("ipc channel failure");
(
Some(HangMonitorRegister::init(
background_hang_monitor_sender.clone(),
bhm_control_port,
opts::get().background_hang_monitor,
)),
vec![bhm_control_chan],
)
};
let (ipc_layout_sender, ipc_layout_receiver) =
ipc::channel().expect("ipc channel failure");
let layout_receiver =
route_ipc_receiver_to_new_mpsc_receiver_preserving_errors(ipc_layout_receiver);
let (network_listener_sender, network_listener_receiver) = unbounded();
let swmanager_receiver =
route_ipc_receiver_to_new_mpsc_receiver_preserving_errors(swmanager_receiver);
// Zero is reserved for the embedder.
PipelineNamespace::install(PipelineNamespaceId(1));
let (webrender_ipc_sender, webrender_ipc_receiver) =
ipc::channel().expect("ipc channel failure");
let (webrender_image_ipc_sender, webrender_image_ipc_receiver) =
ipc::channel().expect("ipc channel failure");
let compositor_proxy = state.compositor_proxy.clone();
ROUTER.add_route(
webrender_ipc_receiver.to_opaque(),
Box::new(move |message| {
let _ = compositor_proxy.send(ToCompositorMsg::Webrender(
WebrenderMsg::Layout(message.to().expect("conversion failure")),
));
}),
);
let compositor_proxy = state.compositor_proxy.clone();
ROUTER.add_route(
webrender_image_ipc_receiver.to_opaque(),
Box::new(move |message| {
let _ = compositor_proxy.send(ToCompositorMsg::Webrender(
WebrenderMsg::Net(message.to().expect("conversion failure")),
));
}),
);
let webrender_wgpu = WebrenderWGPU {
webrender_api: state.webrender_api_sender.create_api(),
webrender_external_images: state.webrender_external_images,
wgpu_image_map: state.wgpu_image_map,
};
let mut constellation: Constellation<Message, LTF, STF, SWF> = Constellation {
namespace_receiver,
namespace_sender,
script_sender: ipc_script_sender,
background_hang_monitor_sender,
background_hang_monitor_receiver,
background_monitor_register,
background_monitor_control_senders: bhm_control_chans,
layout_sender: ipc_layout_sender,
script_receiver: script_receiver,
compositor_receiver: compositor_receiver,
layout_receiver: layout_receiver,
network_listener_sender: network_listener_sender,
network_listener_receiver: network_listener_receiver,
embedder_proxy: state.embedder_proxy,
compositor_proxy: state.compositor_proxy,
active_browser_id: None,
browsers: HashMap::new(),
debugger_chan: state.debugger_chan,
devtools_chan: state.devtools_chan,
bluetooth_thread: state.bluetooth_thread,
public_resource_threads: state.public_resource_threads,
private_resource_threads: state.private_resource_threads,
font_cache_thread: state.font_cache_thread,
sw_managers: Default::default(),
swmanager_receiver: swmanager_receiver,
swmanager_sender,
browsing_context_group_set: Default::default(),
browsing_context_group_next_id: Default::default(),
message_ports: HashMap::new(),
message_port_routers: HashMap::new(),
broadcast_routers: HashMap::new(),
broadcast_channels: HashMap::new(),
pipelines: HashMap::new(),
browsing_contexts: HashMap::new(),
pending_changes: vec![],
// We initialize the namespace at 2, since we reserved
// namespace 0 for the embedder, and 0 for the constellation
next_pipeline_namespace_id: PipelineNamespaceId(2),
time_profiler_chan: state.time_profiler_chan,
mem_profiler_chan: state.mem_profiler_chan,
window_size: initial_window_size,
phantom: PhantomData,
webdriver: WebDriverData::new(),
timer_scheduler: TimerScheduler::new(),
scheduler_chan,
scheduler_receiver,
document_states: HashMap::new(),
webrender_document: state.webrender_document,
webrender_api_ipc_sender: script_traits::WebrenderIpcSender::new(
webrender_ipc_sender,
),
webrender_image_api_sender: net_traits::WebrenderIpcSender::new(
webrender_image_ipc_sender,
),
webrender_wgpu,
shutting_down: false,
handled_warnings: VecDeque::new(),
random_pipeline_closure: random_pipeline_closure_probability.map(|prob| {
let seed = random_pipeline_closure_seed.unwrap_or_else(random);
let rng = ServoRng::new_manually_reseeded(seed as u64);
warn!("Randomly closing pipelines.");
info!("Using seed {} for random pipeline closure.", seed);
(rng, prob)
}),
webgl_threads: state.webgl_threads,
webxr_registry: state.webxr_registry,
canvas_chan,
ipc_canvas_chan,
pending_approval_navigations: HashMap::new(),
pressed_mouse_buttons: 0,
is_running_problem_test,
hard_fail,
enable_canvas_antialiasing,
glplayer_threads: state.glplayer_threads,
player_context: state.player_context,
event_loop_waker: state.event_loop_waker,
active_media_session: None,
user_agent: state.user_agent,
};
constellation.run();
})
.expect("Thread spawning failed");
compositor_sender
}
/// The main event loop for the constellation.
fn run(&mut self) {
while !self.shutting_down || !self.pipelines.is_empty() {
// Randomly close a pipeline if --random-pipeline-closure-probability is set
// This is for testing the hardening of the constellation.
self.maybe_close_random_pipeline();
self.handle_request();
}
self.handle_shutdown();
}
/// Generate a new pipeline id namespace.
fn next_pipeline_namespace_id(&mut self) -> PipelineNamespaceId {
let namespace_id = self.next_pipeline_namespace_id;
let PipelineNamespaceId(ref mut i) = self.next_pipeline_namespace_id;
*i += 1;
namespace_id
}
fn next_browsing_context_group_id(&mut self) -> BrowsingContextGroupId {
let id = self.browsing_context_group_next_id;
self.browsing_context_group_next_id += 1;
BrowsingContextGroupId(id)
}
fn get_event_loop(
&mut self,
host: &Host,
top_level_browsing_context_id: &TopLevelBrowsingContextId,
opener: &Option<BrowsingContextId>,
) -> Result<Weak<EventLoop>, &'static str> {
let bc_group = match opener {
Some(browsing_context_id) => {
let opener = self
.browsing_contexts
.get(&browsing_context_id)
.ok_or("Opener was closed before the openee started")?;
self.browsing_context_group_set
.get(&opener.bc_group_id)
.ok_or("Opener belongs to an unknow BC group")?
},
None => self
.browsing_context_group_set
.iter()
.filter_map(|(_, bc_group)| {
if bc_group
.top_level_browsing_context_set
.contains(&top_level_browsing_context_id)
{
Some(bc_group)
} else {
None
}
})
.last()
.ok_or(