Skip to content

Commit

Permalink
Auto merge of #9840 - servo:url-1.0, r=<try>
Browse files Browse the repository at this point in the history
Update to rust-url 1.0

**Do not merge yet:** rust-url 1.0 is not published yet and may still get breaking changes. The goal of this PR for now is to demonstrate API usage.

Depends on:

* servo/rust-url#176
* rwf2/cookie-rs#42
* hyperium/hyper#740
* https://github.com/cyderize/rust-websocket/pull/70

<!-- Reviewable:start -->
[<img src="https://reviewable.io/review_button.svg" height="40" alt="Review on Reviewable"/>](https://reviewable.io/reviews/servo/servo/9840)
<!-- Reviewable:end -->
  • Loading branch information
bors-servo committed Apr 21, 2016
2 parents 3bfa4cc + 24d9d06 commit 8b1ad8a
Show file tree
Hide file tree
Showing 57 changed files with 516 additions and 649 deletions.
2 changes: 1 addition & 1 deletion components/compositing/constellation.rs
Expand Up @@ -1920,7 +1920,7 @@ impl<LTF: LayoutThreadFactory, STF: ScriptThreadFactory> Constellation<LTF, STF>

let event_info = self.pipelines.get(&pipeline_id).and_then(|pipeline| {
pipeline.parent_info.map(|(containing_pipeline_id, subpage_id)| {
(containing_pipeline_id, subpage_id, pipeline.url.serialize())
(containing_pipeline_id, subpage_id, pipeline.url.to_string())
})
});

Expand Down
2 changes: 1 addition & 1 deletion components/devtools/actors/network_event.rs
Expand Up @@ -312,7 +312,7 @@ impl NetworkEventActor {
}

pub fn add_request(&mut self, request: DevtoolsHttpRequest) {
self.request.url = request.url.serialize();
self.request.url = request.url.as_str().to_owned();
self.request.method = request.method.clone();
self.request.headers = request.headers.clone();
self.request.body = request.body;
Expand Down
2 changes: 1 addition & 1 deletion components/devtools/lib.rs
Expand Up @@ -287,7 +287,7 @@ fn run_server(sender: Sender<DevtoolsControlMsg>,
let tab = TabActor {
name: actors.new_name("tab"),
title: String::from(title),
url: url.serialize(),
url: url.into_string(),
console: console.name(),
inspector: inspector.name(),
timeline: timeline.name(),
Expand Down
7 changes: 3 additions & 4 deletions components/layout/layout_thread.rs
Expand Up @@ -1002,7 +1002,7 @@ impl LayoutThread {
let document = unsafe { ServoLayoutNode::new(&data.document) };
let document = document.as_document().unwrap();

debug!("layout: received layout request for: {}", self.url.borrow().serialize());
debug!("layout: received layout request for: {}", *self.url.borrow());

let mut rw_data = possibly_locked_rw_data.lock();

Expand Down Expand Up @@ -1048,8 +1048,7 @@ impl LayoutThread {
Some(x) => x,
};

debug!("layout: received layout request for: {}",
self.url.borrow().serialize());
debug!("layout: received layout request for: {}", *self.url.borrow());
if log_enabled!(log::LogLevel::Debug) {
node.dump();
}
Expand Down Expand Up @@ -1463,7 +1462,7 @@ impl LayoutThread {
/// Returns profiling information which is passed to the time profiler.
fn profiler_metadata(&self) -> Option<TimerMetadata> {
Some(TimerMetadata {
url: self.url.borrow().serialize(),
url: self.url.borrow().to_string(),
iframe: if self.is_iframe {
TimerMetadataFrameType::IFrame
} else {
Expand Down
1 change: 1 addition & 0 deletions components/net/Cargo.toml
Expand Up @@ -38,6 +38,7 @@ flate2 = "0.2.0"
hyper = { version = "0.8", features = [ "serde-serialization" ] }
immeta = "0.3.1"
log = "0.3.5"
matches = "0.1"
mime = "0.2.0"
mime_guess = "1.6.0"
openssl = "0.7.6"
Expand Down
5 changes: 2 additions & 3 deletions components/net/about_loader.rs
Expand Up @@ -27,8 +27,7 @@ pub fn factory(mut load_data: LoadData,
classifier: Arc<MIMEClassifier>,
cancel_listener: CancellationListener) {
let url = load_data.url.clone();
let non_relative_scheme_data = url.non_relative_scheme_data().unwrap();
match non_relative_scheme_data {
match url.path() {
"blank" => {
let metadata = Metadata {
final_url: load_data.url,
Expand All @@ -49,7 +48,7 @@ pub fn factory(mut load_data: LoadData,
}
"crash" => panic!("Loading the about:crash URL."),
"failure" | "not-found" =>
url_from_non_relative_scheme(&mut load_data, &(non_relative_scheme_data.to_owned() + ".html")),
url_from_non_relative_scheme(&mut load_data, &(url.path().to_owned() + ".html")),
"sslfail" => url_from_non_relative_scheme(&mut load_data, "badcert.html"),
_ => {
send_error(load_data.url, NetworkError::Internal("Unknown about: URL.".to_owned()), start_chan);
Expand Down
20 changes: 8 additions & 12 deletions components/net/chrome_loader.rs
Expand Up @@ -6,26 +6,22 @@ use file_loader;
use mime_classifier::MIMEClassifier;
use net_traits::{LoadConsumer, LoadData, NetworkError};
use resource_thread::{CancellationListener, send_error};
use std::path::Path;
use std::sync::Arc;
use url::Url;
use util::resource_files::resources_dir_path;

pub fn resolve_chrome_url(url: &Url) -> Result<Url, ()> {
assert_eq!(url.scheme, "chrome");
// Skip the initial //
let non_relative_scheme_data = &url.non_relative_scheme_data().unwrap()[2..];
let relative_path = Path::new(non_relative_scheme_data);
assert_eq!(url.scheme(), "chrome");
let resources = resources_dir_path();
let mut path = resources.clone();
for segment in url.path_segments().unwrap() {
path.push(segment)
}
println!("{:?} {:?}, {:?} {:?}", resources.display(), path.display(), url, url.path());
// Don't allow chrome URLs access to files outside of the resources directory.
if non_relative_scheme_data.find("..").is_some() ||
relative_path.is_absolute() ||
relative_path.has_root() {
if !(path.starts_with(resources) && path.exists()) {
return Err(());
}

let mut path = resources_dir_path();
path.push(non_relative_scheme_data);
assert!(path.exists());
return Ok(Url::from_file_path(&*path).unwrap());
}

Expand Down
18 changes: 8 additions & 10 deletions components/net/cookie.rs
Expand Up @@ -40,7 +40,7 @@ impl Cookie {
_ => (false, None)
};

let url_host = request.host().map_or("".to_owned(), |host| host.serialize());
let url_host = request.host_str().unwrap_or("").to_owned();

// Step 4
let mut domain = cookie.domain.clone().unwrap_or("".to_owned());
Expand Down Expand Up @@ -68,9 +68,7 @@ impl Cookie {
// Step 7
let mut path = cookie.path.unwrap_or("".to_owned());
if path.chars().next() != Some('/') {
let url_path = request.serialize_path();
let url_path = url_path.as_ref().map(|path| &**path);
path = Cookie::default_path(url_path.unwrap_or("")).to_owned();
path = Cookie::default_path(request.path()).to_owned();
}
cookie.path = Some(path);

Expand Down Expand Up @@ -147,26 +145,26 @@ impl Cookie {

// http://tools.ietf.org/html/rfc6265#section-5.4 step 1
pub fn appropriate_for_url(&self, url: &Url, source: CookieSource) -> bool {
let domain = url.host().map(|host| host.serialize());
let domain = url.host_str();
if self.host_only {
if self.cookie.domain != domain {
if self.cookie.domain.as_ref().map(String::as_str) != domain {
return false;
}
} else {
if let (Some(ref domain), &Some(ref cookie_domain)) = (domain, &self.cookie.domain) {
if let (Some(domain), &Some(ref cookie_domain)) = (domain, &self.cookie.domain) {
if !Cookie::domain_match(domain, cookie_domain) {
return false;
}
}
}

if let (Some(ref path), &Some(ref cookie_path)) = (url.serialize_path(), &self.cookie.path) {
if !Cookie::path_match(path, cookie_path) {
if let Some(ref cookie_path) = self.cookie.path {
if !Cookie::path_match(url.path(), cookie_path) {
return false;
}
}

if self.cookie.secure && url.scheme != "https" {
if self.cookie.secure && url.scheme() != "https" {
return false;
}
if self.cookie.httponly && source == CookieSource::NonHTTP {
Expand Down
61 changes: 22 additions & 39 deletions components/net/data_loader.rs
Expand Up @@ -10,9 +10,8 @@ use net_traits::{LoadData, Metadata, NetworkError};
use resource_thread::{CancellationListener, send_error, start_sending_sniffed_opt};
use rustc_serialize::base64::FromBase64;
use std::sync::Arc;
use url::SchemeData;
use url::Url;
use url::percent_encoding::percent_decode;
use url::{Position, Url};

pub fn factory(load_data: LoadData,
senders: LoadConsumer,
Expand All @@ -33,58 +32,42 @@ pub enum DecodeError {
pub type DecodeData = (Mime, Vec<u8>);

pub fn decode(url: &Url) -> Result<DecodeData, DecodeError> {
assert!(&*url.scheme == "data");
assert!(url.scheme() == "data");
// Split out content type and data.
let mut scheme_data = match url.scheme_data {
SchemeData::NonRelative(ref scheme_data) => scheme_data.clone(),
_ => panic!("Expected a non-relative scheme URL."),
};
match url.query {
Some(ref query) => {
scheme_data.push_str("?");
scheme_data.push_str(query);
},
None => ()
}
let parts: Vec<&str> = scheme_data.splitn(2, ',').collect();
let parts: Vec<&str> = url[Position::BeforePath..Position::AfterQuery].splitn(2, ',').collect();
if parts.len() != 2 {
return Err(DecodeError::InvalidDataUri);
}

// ";base64" must come at the end of the content type, per RFC 2397.
// rust-http will fail to parse it because there's no =value part.
let mut is_base64 = false;
let mut ct_str = parts[0].to_owned();
if ct_str.ends_with(";base64") {
is_base64 = true;
let end_index = ct_str.len() - 7;
ct_str.truncate(end_index);
}
if ct_str.starts_with(";charset=") {
ct_str = format!("text/plain{}", ct_str);
let mut ct_str = parts[0];
let is_base64 = ct_str.ends_with(";base64");
if is_base64 {
ct_str = &ct_str[..ct_str.len() - ";base64".len()];
}
let ct_str = if ct_str.starts_with(";charset=") {
format!("text/plain{}", ct_str)
} else {
ct_str.to_owned()
};

// Parse the content type using rust-http.
// FIXME: this can go into an infinite loop! (rust-http #25)
let mut content_type: Option<Mime> = ct_str.parse().ok();
if content_type == None {
content_type = Some(Mime(TopLevel::Text, SubLevel::Plain,
vec!((Attr::Charset, Value::Ext("US-ASCII".to_owned())))));
}
let content_type = ct_str.parse().unwrap_or_else(|_| {
Mime(TopLevel::Text, SubLevel::Plain,
vec![(Attr::Charset, Value::Ext("US-ASCII".to_owned()))])
});

let bytes = percent_decode(parts[1].as_bytes());
let bytes = if is_base64 {
let mut bytes = percent_decode(parts[1].as_bytes()).collect::<Vec<_>>();
if is_base64 {
// FIXME(#2909): It’s unclear what to do with non-alphabet characters,
// but Acid 3 apparently depends on spaces being ignored.
let bytes = bytes.into_iter().filter(|&b| b != ' ' as u8).collect::<Vec<u8>>();
bytes = bytes.into_iter().filter(|&b| b != ' ' as u8).collect::<Vec<u8>>();
match bytes.from_base64() {
Err(..) => return Err(DecodeError::NonBase64DataUri),
Ok(data) => data,
Ok(data) => bytes = data,
}
} else {
bytes
};
Ok((content_type.unwrap(), bytes))
}
Ok((content_type, bytes))
}

pub fn load(load_data: LoadData,
Expand Down

0 comments on commit 8b1ad8a

Please sign in to comment.