Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update html5ever to 0.8 #13813

Merged
merged 2 commits into from Nov 3, 2016
Merged
Changes from all commits
Commits
File filter...
Filter file types
Jump to…
Jump to file
Failed to load files.

Always

Just for now

@@ -36,7 +36,7 @@ fnv = "1.0"
gfx_traits = {path = "../gfx_traits"}
heapsize = "0.3.6"
heapsize_derive = "0.1"
html5ever = {version = "0.5.1", features = ["heap_size", "unstable"]}
html5ever = {version = "0.8.0", features = ["heap_size", "unstable"]}
hyper = "0.9.9"
hyper_serde = "0.1.4"
image = "0.10"
@@ -29,14 +29,14 @@ use html5ever::serialize::{AttrRef, Serializable, Serializer};
use html5ever::serialize::TraversalScope;
use html5ever::serialize::TraversalScope::{ChildrenOnly, IncludeNode};
use html5ever::tendril::StrTendril;
use html5ever::tokenizer::{Tokenizer as HtmlTokenizer, TokenizerOpts};
use html5ever::tokenizer::{Tokenizer as H5ETokenizer, TokenizerOpts};
use html5ever::tree_builder::{NextParserState, NodeOrText, QuirksMode};
use html5ever::tree_builder::{TreeBuilder, TreeBuilderOpts, TreeSink};
use msg::constellation_msg::PipelineId;
use std::borrow::Cow;
use std::io::{self, Write};
use string_cache::QualName;
use super::{LastChunkState, ServoParser, Sink, Tokenizer};
use super::{HtmlTokenizer, LastChunkState, ServoParser, Sink, Tokenizer};
use url::Url;

fn insert(parent: &Node, reference_child: Option<&Node>, child: NodeOrText<JS<Node>>) {
@@ -276,10 +276,13 @@ pub fn parse_html(document: &Document,
let parser = match context {
ParseContext::Owner(owner) => {
let tb = TreeBuilder::new(sink, options);
let tok = HtmlTokenizer::new(tb, Default::default());
let tok = H5ETokenizer::new(tb, Default::default());

ServoParser::new(
document, owner, Tokenizer::HTML(tok), LastChunkState::NotReceived)
document,
owner,
Tokenizer::HTML(HtmlTokenizer::new(tok)),
LastChunkState::NotReceived)
},
ParseContext::Fragment(fc) => {
let tb = TreeBuilder::new_for_fragment(
@@ -292,10 +295,13 @@ pub fn parse_html(document: &Document,
initial_state: Some(tb.tokenizer_state_for_context_elem()),
.. Default::default()
};
let tok = HtmlTokenizer::new(tb, tok_options);
let tok = H5ETokenizer::new(tb, tok_options);

ServoParser::new(
document, None, Tokenizer::HTML(tok), LastChunkState::Received)
document,
None,
Tokenizer::HTML(HtmlTokenizer::new(tok)),
LastChunkState::Received)
}
};
parser.parse_chunk(String::from(input));
@@ -20,7 +20,8 @@ use dom::htmlimageelement::HTMLImageElement;
use dom::node::Node;
use encoding::all::UTF_8;
use encoding::types::{DecoderTrap, Encoding};
use html5ever::tokenizer::Tokenizer as HtmlTokenizer;
use html5ever::tokenizer::Tokenizer as H5ETokenizer;
use html5ever::tokenizer::buffer_queue::BufferQueue;
use html5ever::tree_builder::Tracer as HtmlTracer;
use html5ever::tree_builder::TreeBuilder as HtmlTreeBuilder;
use hyper::header::ContentType;
@@ -136,10 +137,6 @@ impl ServoParser {
self.tokenizer.borrow_mut().set_plaintext_state()
}

pub fn end_tokenizer(&self) {
self.tokenizer.borrow_mut().end()
}

pub fn suspend(&self) {
assert!(!self.suspended.get());
self.suspended.set(true);
@@ -220,16 +217,50 @@ impl ServoParser {
#[derive(HeapSizeOf)]
#[must_root]
enum Tokenizer {
HTML(
#[ignore_heap_size_of = "Defined in html5ever"]
HtmlTokenizer<HtmlTreeBuilder<JS<Node>, Sink>>
),
HTML(HtmlTokenizer),
XML(
#[ignore_heap_size_of = "Defined in xml5ever"]
XmlTokenizer<XmlTreeBuilder<JS<Node>, Sink>>
),
}

#[derive(HeapSizeOf)]
#[must_root]
struct HtmlTokenizer {
#[ignore_heap_size_of = "Defined in html5ever"]
inner: H5ETokenizer<HtmlTreeBuilder<JS<Node>, Sink>>,
#[ignore_heap_size_of = "Defined in html5ever"]
input_buffer: BufferQueue,
}

impl HtmlTokenizer {
#[allow(unrooted_must_root)]
fn new(inner: H5ETokenizer<HtmlTreeBuilder<JS<Node>, Sink>>) -> Self {
HtmlTokenizer {
inner: inner,
input_buffer: BufferQueue::new(),
}
}

fn feed(&mut self, input: String) {
self.input_buffer.push_back(input.into());
self.run();
}

fn run(&mut self) {
self.inner.feed(&mut self.input_buffer);
}

fn end(&mut self) {
assert!(self.input_buffer.is_empty());
self.inner.end();
}

fn set_plaintext_state(&mut self) {
self.inner.set_plaintext_state();
}
}

#[derive(JSTraceable, HeapSizeOf)]
#[must_root]
struct Sink {
@@ -240,7 +271,7 @@ struct Sink {
impl Tokenizer {
fn feed(&mut self, input: String) {
match *self {
Tokenizer::HTML(ref mut tokenizer) => tokenizer.feed(input.into()),
Tokenizer::HTML(ref mut tokenizer) => tokenizer.feed(input),
Tokenizer::XML(ref mut tokenizer) => tokenizer.feed(input.into()),
}
}
@@ -288,7 +319,7 @@ impl JSTraceable for Tokenizer {
node.trace(self.0);
}
}
let tree_builder = tokenizer.sink();
let tree_builder = tokenizer.inner.sink();
tree_builder.trace_handles(&tracer);
tree_builder.sink().trace(trc);
},

Some generated files are not rendered by default. Learn more.

ProTip! Use n and p to navigate between commits in a pull request.
You can’t perform that action at this time.