Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Kill ServoParser::pipeline #15220

Merged
merged 2 commits into from Jan 27, 2017
Merged
Changes from all commits
Commits
File filter...
Filter file types
Jump to…
Jump to file
Failed to load files.

Always

Just for now

@@ -530,11 +530,6 @@ impl Document {
self.reflow_timeout.set(Some(timeout))
}

/// Disables any pending reflow timeouts.
pub fn disarm_reflow_timeout(&self) {
self.reflow_timeout.set(None)
}

/// Remove any existing association between the provided id and any elements in this document.
pub fn unregister_named_element(&self, to_unregister: &Element, id: Atom) {
debug!("Removing named element from document {:p}: {:p} id={}",
@@ -1556,6 +1551,15 @@ impl Document {
self.process_deferred_scripts();
},
LoadType::PageSource(_) => {
if self.browsing_context.is_some() {
// Disarm the reflow timer and trigger the initial reflow.
self.reflow_timeout.set(None);
self.upcast::<Node>().dirty(NodeDamage::OtherNodeDamage);
self.window.reflow(ReflowGoal::ForDisplay,
ReflowQueryType::NoQuery,
ReflowReason::FirstLoad);
}

// Deferred scripts have to wait for page to finish loading,
// this is the first opportunity to process them.

@@ -69,7 +69,7 @@ impl DOMParserMethods for DOMParser {
loader,
None,
None);
ServoParser::parse_html_document(&document, s, url, None);
ServoParser::parse_html_document(&document, s, url);
document.set_ready_state(DocumentReadyState::Complete);
Ok(document)
}
@@ -86,7 +86,7 @@ impl DOMParserMethods for DOMParser {
loader,
None,
None);
ServoParser::parse_xml_document(&document, s, url, None);
ServoParser::parse_xml_document(&document, s, url);
Ok(document)
}
}
@@ -20,9 +20,8 @@ use dom::globalscope::GlobalScope;
use dom::htmlformelement::HTMLFormElement;
use dom::htmlimageelement::HTMLImageElement;
use dom::htmlscriptelement::{HTMLScriptElement, ScriptResult};
use dom::node::{Node, NodeDamage, NodeSiblingIterator};
use dom::node::{Node, NodeSiblingIterator};
use dom::text::Text;
use dom::window::ReflowReason;
use encoding::all::UTF_8;
use encoding::types::{DecoderTrap, Encoding};
use html5ever::tokenizer::buffer_queue::BufferQueue;
@@ -35,13 +34,11 @@ use net_traits::{FetchMetadata, FetchResponseListener, Metadata, NetworkError};
use network_listener::PreInvoke;
use profile_traits::time::{TimerMetadata, TimerMetadataFrameType};
use profile_traits::time::{TimerMetadataReflowType, ProfilerCategory, profile};
use script_layout_interface::message::ReflowQueryType;
use script_thread::ScriptThread;
use servo_config::resource_files::read_resource_file;
use servo_url::ServoUrl;
use std::cell::Cell;
use std::mem;
use style::context::ReflowGoal;

mod html;
mod xml;
@@ -63,9 +60,6 @@ pub struct ServoParser {
reflector: Reflector,
/// The document associated with this parser.
document: JS<Document>,
/// The pipeline associated with this parse, unavailable if this parse
/// does not correspond to a page load.
pipeline: Option<PipelineId>,
/// Input received from network.
#[ignore_heap_size_of = "Defined in html5ever"]
network_input: DOMRefCell<BufferQueue>,
@@ -89,16 +83,10 @@ enum LastChunkState {
}

impl ServoParser {
pub fn parse_html_document(
document: &Document,
input: DOMString,
url: ServoUrl,
owner: Option<PipelineId>) {
let parser = ServoParser::new(
document,
owner,
Tokenizer::Html(self::html::Tokenizer::new(document, url, None)),
LastChunkState::NotReceived);
pub fn parse_html_document(document: &Document, input: DOMString, url: ServoUrl) {
let parser = ServoParser::new(document,
Tokenizer::Html(self::html::Tokenizer::new(document, url, None)),
LastChunkState::NotReceived);
parser.parse_chunk(String::from(input));
}

@@ -112,48 +100,47 @@ impl ServoParser {
// Step 1.
let loader = DocumentLoader::new_with_threads(context_document.loader().resource_threads().clone(),
Some(url.clone()));
let document = Document::new(window, None, Some(url.clone()),
let document = Document::new(window,
None,
Some(url.clone()),
context_document.origin().alias(),
IsHTMLDocument::HTMLDocument,
None, None,
None,
None,
DocumentSource::FromParser,
loader,
None, None);
None,
None);

// Step 2.
document.set_quirks_mode(context_document.quirks_mode());

// Step 11.
let form = context_node.inclusive_ancestors()
.find(|element| element.is::<HTMLFormElement>());
.find(|element| element.is::<HTMLFormElement>());
let fragment_context = FragmentContext {
context_elem: context_node,
form_elem: form.r(),
};

let parser = ServoParser::new(
&document,
None,
Tokenizer::Html(
self::html::Tokenizer::new(&document, url.clone(), Some(fragment_context))),
LastChunkState::Received);
let parser = ServoParser::new(&document,
Tokenizer::Html(self::html::Tokenizer::new(&document,
url.clone(),
Some(fragment_context))),
LastChunkState::Received);
parser.parse_chunk(String::from(input));

// Step 14.
let root_element = document.GetDocumentElement().expect("no document element");
FragmentParsingResult { inner: root_element.upcast::<Node>().children() }
FragmentParsingResult {
inner: root_element.upcast::<Node>().children(),
}
}

pub fn parse_xml_document(
document: &Document,
input: DOMString,
url: ServoUrl,
owner: Option<PipelineId>) {
let parser = ServoParser::new(
document,
owner,
Tokenizer::Xml(self::xml::Tokenizer::new(document, url)),
LastChunkState::NotReceived);
pub fn parse_xml_document(document: &Document, input: DOMString, url: ServoUrl) {
let parser = ServoParser::new(document,
Tokenizer::Xml(self::xml::Tokenizer::new(document, url)),
LastChunkState::NotReceived);
parser.parse_chunk(String::from(input));
}

@@ -179,7 +166,8 @@ impl ServoParser {
assert!(self.suspended.get());
self.suspended.set(false);

mem::swap(&mut *self.script_input.borrow_mut(), &mut *self.network_input.borrow_mut());
mem::swap(&mut *self.script_input.borrow_mut(),
&mut *self.network_input.borrow_mut());
while let Some(chunk) = self.script_input.borrow_mut().pop_front() {
self.network_input.borrow_mut().push_back(chunk);
}
@@ -236,16 +224,13 @@ impl ServoParser {
}

#[allow(unrooted_must_root)]
fn new_inherited(
document: &Document,
pipeline: Option<PipelineId>,
tokenizer: Tokenizer,
last_chunk_state: LastChunkState)
-> Self {
fn new_inherited(document: &Document,
tokenizer: Tokenizer,
last_chunk_state: LastChunkState)
-> Self {
ServoParser {
reflector: Reflector::new(),
document: JS::from_ref(document),
pipeline: pipeline,
network_input: DOMRefCell::new(BufferQueue::new()),
script_input: DOMRefCell::new(BufferQueue::new()),
tokenizer: DOMRefCell::new(tokenizer),
@@ -256,16 +241,13 @@ impl ServoParser {
}

#[allow(unrooted_must_root)]
fn new(
document: &Document,
pipeline: Option<PipelineId>,
tokenizer: Tokenizer,
last_chunk_state: LastChunkState)
-> Root<Self> {
reflect_dom_object(
box ServoParser::new_inherited(document, pipeline, tokenizer, last_chunk_state),
document.window(),
ServoParserBinding::Wrap)
fn new(document: &Document,
tokenizer: Tokenizer,
last_chunk_state: LastChunkState)
-> Root<Self> {
reflect_dom_object(box ServoParser::new_inherited(document, tokenizer, last_chunk_state),
document.window(),
ServoParserBinding::Wrap)
}

fn push_input_chunk(&self, chunk: String) {
@@ -313,7 +295,7 @@ impl ServoParser {
}

fn tokenize<F>(&self, mut feed: F)
where F: FnMut(&mut Tokenizer) -> Result<(), Root<HTMLScriptElement>>
where F: FnMut(&mut Tokenizer) -> Result<(), Root<HTMLScriptElement>>,
{
loop {
assert!(!self.suspended.get());
@@ -351,14 +333,6 @@ impl ServoParser {
self.tokenizer.borrow_mut().end();
self.document.set_current_parser(None);

if self.pipeline.is_some() {
// Initial reflow.
self.document.disarm_reflow_timeout();
self.document.upcast::<Node>().dirty(NodeDamage::OtherNodeDamage);
let window = self.document.window();
window.reflow(ReflowGoal::ForDisplay, ReflowQueryType::NoQuery, ReflowReason::FirstLoad);
}

// Steps 3-12 are in another castle, namely finish_load.
let url = self.tokenizer.borrow().url().clone();
self.document.finish_load(LoadType::PageSource(url));
@@ -455,14 +429,13 @@ impl FetchResponseListener for ParserContext {

fn process_request_eof(&mut self) {}

fn process_response(&mut self,
meta_result: Result<FetchMetadata, NetworkError>) {
fn process_response(&mut self, meta_result: Result<FetchMetadata, NetworkError>) {
let mut ssl_error = None;
let metadata = match meta_result {
Ok(meta) => {
Some(match meta {
FetchMetadata::Unfiltered(m) => m,
FetchMetadata::Filtered { unsafe_, .. } => unsafe_
FetchMetadata::Filtered { unsafe_, .. } => unsafe_,
})
},
Err(NetworkError::SslValidation(url, reason)) => {
@@ -474,10 +447,8 @@ impl FetchResponseListener for ParserContext {
},
Err(_) => None,
};
let content_type =
metadata.clone().and_then(|meta| meta.content_type).map(Serde::into_inner);
let parser = match ScriptThread::page_headers_available(&self.id,
metadata) {
let content_type = metadata.clone().and_then(|meta| meta.content_type).map(Serde::into_inner);
let parser = match ScriptThread::page_headers_available(&self.id, metadata) {
Some(parser) => parser,
None => return,
};
@@ -505,7 +476,8 @@ impl FetchResponseListener for ParserContext {
parser.parse_sync();
parser.tokenizer.borrow_mut().set_plaintext_state();
},
Some(ContentType(Mime(TopLevel::Text, SubLevel::Html, _))) => { // Handle text/html
Some(ContentType(Mime(TopLevel::Text, SubLevel::Html, _))) => {
// Handle text/html
if let Some(reason) = ssl_error {
self.is_synthesized_document = true;
let page_bytes = read_resource_file("badcert.html").unwrap();
@@ -524,15 +496,16 @@ impl FetchResponseListener for ParserContext {

// Show warning page for unknown mime types.
let page = format!("<html><body><p>Unknown content type ({}/{}).</p></body></html>",
toplevel.as_str(), sublevel.as_str());
toplevel.as_str(),
sublevel.as_str());
self.is_synthesized_document = true;
parser.push_input_chunk(page);
parser.parse_sync();
},
None => {
// No content-type header.
// Merge with #4212 when fixed.
}
},
}
}

@@ -595,6 +568,6 @@ fn insert(parent: &Node, reference_child: Option<&Node>, child: NodeOrText<JS<No
let text = Text::new(String::from(t).into(), &parent.owner_doc());
parent.InsertBefore(text.upcast(), reference_child).unwrap();
}
}
},
}
}
@@ -1181,7 +1181,7 @@ impl XMLHttpRequest {
self.response_json.get()
}

fn document_text_html(&self) -> Root<Document>{
fn document_text_html(&self) -> Root<Document> {
let charset = self.final_charset().unwrap_or(UTF_8);
let wr = self.global();
let decoded = charset.decode(&self.response.borrow(), DecoderTrap::Replace).unwrap();
@@ -1190,8 +1190,7 @@ impl XMLHttpRequest {
ServoParser::parse_html_document(
&document,
DOMString::from(decoded),
wr.get_url(),
Some(wr.pipeline_id()));
wr.get_url());
document
}

@@ -1204,8 +1203,7 @@ impl XMLHttpRequest {
ServoParser::parse_xml_document(
&document,
DOMString::from(decoded),
wr.get_url(),
Some(wr.pipeline_id()));
wr.get_url());
document
}

@@ -1849,17 +1849,9 @@ impl ScriptThread {
document.set_https_state(metadata.https_state);

if is_html_document == IsHTMLDocument::NonHTMLDocument {
ServoParser::parse_xml_document(
&document,
parse_input,
final_url,
Some(incomplete.pipeline_id));
ServoParser::parse_xml_document(&document, parse_input, final_url);
} else {
ServoParser::parse_html_document(
&document,
parse_input,
final_url,
Some(incomplete.pipeline_id));
ServoParser::parse_html_document(&document, parse_input, final_url);
}

if incomplete.is_frozen {
ProTip! Use n and p to navigate between commits in a pull request.
You can’t perform that action at this time.