Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove the legacy networking stack. #14360

Merged
merged 9 commits into from Nov 24, 2016
@@ -16,7 +16,6 @@ content-blocker = "0.2.1"
cookie = {version = "0.2.5", features = ["serialize-rustc"]}
devtools_traits = {path = "../devtools_traits"}
flate2 = "0.2.0"
fnv = "1.0"
hyper = "0.9.9"
hyper_serde = "0.1.4"
immeta = "0.3.1"

This file was deleted.

@@ -5,120 +5,16 @@
use filemanager_thread::FileManager;
use hyper::header::{Charset, ContentLength, ContentType, Headers};
use hyper::header::{ContentDisposition, DispositionParam, DispositionType};
use hyper_serde::Serde;
use ipc_channel::ipc;
use mime::{Attr, Mime};
use mime_classifier::MimeClassifier;
use net_traits::{LoadConsumer, LoadData, Metadata, NetworkError};
use net_traits::ProgressMsg::{Done, Payload};
use net_traits::NetworkError;
use net_traits::blob_url_store::parse_blob_url;
use net_traits::filemanager_thread::ReadFileProgress;
use net_traits::response::HttpsState;
use resource_thread::{send_error, start_sending_sniffed_opt};
use resource_thread::CancellationListener;
use servo_url::ServoUrl;
use std::boxed::FnBox;
use std::sync::Arc;
use util::thread::spawn_named;

// TODO: Check on GET
// https://w3c.github.io/FileAPI/#requestResponseModel

pub fn factory(filemanager: FileManager)
-> Box<FnBox(LoadData, LoadConsumer, Arc<MimeClassifier>, CancellationListener) + Send> {
box move |load_data: LoadData, start_chan, classifier, cancel_listener| {
spawn_named(format!("blob loader for {}", load_data.url), move || {
load_blob(load_data, start_chan, classifier, filemanager, cancel_listener);
})
}
}

fn load_blob(load_data: LoadData, start_chan: LoadConsumer,
classifier: Arc<MimeClassifier>,
filemanager: FileManager,
cancel_listener: CancellationListener) {
let (chan, recv) = ipc::channel().unwrap();
if let Ok((id, origin, _fragment)) = parse_blob_url(&load_data.url.clone()) {
let check_url_validity = true;
filemanager.read_file(chan, id, check_url_validity, origin, Some(cancel_listener));

// Receive first chunk
match recv.recv().unwrap() {
Ok(ReadFileProgress::Meta(blob_buf)) => {
let content_type: Mime = blob_buf.type_string.parse().unwrap_or(mime!(Text / Plain));
let charset = content_type.get_param(Attr::Charset);

let mut headers = Headers::new();

if let Some(name) = blob_buf.filename {
let charset = charset.and_then(|c| c.as_str().parse().ok());
headers.set(ContentDisposition {
disposition: DispositionType::Inline,
parameters: vec![
DispositionParam::Filename(charset.unwrap_or(Charset::Us_Ascii),
None, name.as_bytes().to_vec())
]
});
}

headers.set(ContentType(content_type.clone()));
headers.set(ContentLength(blob_buf.size as u64));

let metadata = Metadata {
final_url: load_data.url.clone(),
content_type: Some(Serde(ContentType(content_type.clone()))),
charset: charset.map(|c| c.as_str().to_string()),
headers: Some(Serde(headers)),
// https://w3c.github.io/FileAPI/#TwoHundredOK
status: Some((200, b"OK".to_vec())),
https_state: HttpsState::None,
referrer: None,
};

if let Ok(chan) =
start_sending_sniffed_opt(start_chan, metadata, classifier,
&blob_buf.bytes, load_data.context.clone()) {
let _ = chan.send(Payload(blob_buf.bytes));

loop {
match recv.recv().unwrap() {
Ok(ReadFileProgress::Partial(bytes)) => {
let _ = chan.send(Payload(bytes));
}
Ok(ReadFileProgress::EOF) => {
let _ = chan.send(Done(Ok(())));
return;
}
Ok(_) => {
let err = NetworkError::Internal("Invalid filemanager reply".to_string());
let _ = chan.send(Done(Err(err)));
return;
}
Err(e) => {
let err = NetworkError::Internal(format!("{:?}", e));
let _ = chan.send(Done(Err(err)));
return;
}
}
}
}
}
Ok(_) => {
let err = NetworkError::Internal("Invalid filemanager reply".to_string());
send_error(load_data.url, err, start_chan);
}
Err(e) => {
let err = NetworkError::Internal(format!("{:?}", e));
send_error(load_data.url, err, start_chan);
}
}
} else {
let e = format!("Invalid blob URL format {:?}", load_data.url);
let format_err = NetworkError::Internal(e);
send_error(load_data.url.clone(), format_err, start_chan);
}
}

/// https://fetch.spec.whatwg.org/#concept-basic-fetch (partial)
// TODO: make async.
pub fn load_blob_sync
@@ -135,7 +31,7 @@ pub fn load_blob_sync

let (sender, receiver) = ipc::channel().unwrap();
let check_url_validity = true;
filemanager.read_file(sender, id, check_url_validity, origin, None);
filemanager.read_file(sender, id, check_url_validity, origin);

let blob_buf = match receiver.recv().unwrap() {
Ok(ReadFileProgress::Meta(blob_buf)) => blob_buf,
@@ -2,13 +2,8 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */

use file_loader;
use mime_classifier::MimeClassifier;
use net_traits::{LoadConsumer, LoadData, NetworkError};
use resource_thread::{CancellationListener, send_error};
use servo_url::ServoUrl;
use std::fs::canonicalize;
use std::sync::Arc;
use url::percent_encoding::percent_decode;
use util::resource_files::resources_dir_path;

@@ -34,20 +29,3 @@ pub fn resolve_chrome_url(url: &ServoUrl) -> Result<ServoUrl, ()> {
_ => Err(())
}
}

pub fn factory(mut load_data: LoadData,
start_chan: LoadConsumer,
classifier: Arc<MimeClassifier>,
cancel_listener: CancellationListener) {
let file_url = match resolve_chrome_url(&load_data.url) {
Ok(url) => url,
Err(_) => {
send_error(load_data.url,
NetworkError::Internal("Invalid chrome URL.".to_owned()),
start_chan);
return;
}
};
load_data.url = file_url;
file_loader::factory(load_data, start_chan, classifier, cancel_listener)
}
@@ -3,28 +3,11 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */

use hyper::mime::{Attr, Mime, SubLevel, TopLevel, Value};
use mime_classifier::MimeClassifier;
use net_traits::{LoadData, Metadata, NetworkError};
use net_traits::LoadConsumer;
use net_traits::ProgressMsg::{Done, Payload};
use resource_thread::{CancellationListener, send_error, start_sending_sniffed_opt};
use rustc_serialize::base64::FromBase64;
use servo_url::ServoUrl;
use std::sync::Arc;
use url::Position;
use url::percent_encoding::percent_decode;

pub fn factory(load_data: LoadData,
senders: LoadConsumer,
classifier: Arc<MimeClassifier>,
cancel_listener: CancellationListener) {
// NB: we don't spawn a new thread.
// Hypothesis: data URLs are too small for parallel base64 etc. to be worth it.
// Should be tested at some point.
// Left in separate function to allow easy moving to a thread, if desired.
load(load_data, senders, classifier, cancel_listener)
}

pub enum DecodeError {
InvalidDataUri,
NonBase64DataUri,
@@ -70,33 +53,3 @@ pub fn decode(url: &ServoUrl) -> Result<DecodeData, DecodeError> {
}
Ok((content_type, bytes))
}

pub fn load(load_data: LoadData,
start_chan: LoadConsumer,
classifier: Arc<MimeClassifier>,
cancel_listener: CancellationListener) {
let url = load_data.url;

if cancel_listener.is_cancelled() {
return;
}

match decode(&url) {
Ok((content_type, bytes)) => {
let mut metadata = Metadata::default(url);
metadata.set_content_type(Some(content_type).as_ref());
if let Ok(chan) = start_sending_sniffed_opt(start_chan,
metadata,
classifier,
&bytes,
load_data.context) {
let _ = chan.send(Payload(bytes));
let _ = chan.send(Done(Ok(())));
}
},
Err(DecodeError::InvalidDataUri) =>
send_error(url, NetworkError::Internal("invalid data uri".to_owned()), start_chan),
Err(DecodeError::NonBase64DataUri) =>
send_error(url, NetworkError::Internal("non-base64 data uri".to_owned()), start_chan),
}
}
@@ -28,7 +28,6 @@ use net_traits::request::{CacheMode, CredentialsMode, Destination};
use net_traits::request::{RedirectMode, Referrer, Request, RequestMode, ResponseTainting};
use net_traits::request::{Type, Origin, Window};
use net_traits::response::{HttpsState, Response, ResponseBody, ResponseType};
use resource_thread::CancellationListener;
use servo_url::ServoUrl;
use std::borrow::Cow;
use std::collections::HashSet;
@@ -992,7 +991,6 @@ fn http_network_fetch(request: Rc<Request>,
connector: connection,
};
let url = request.current_url();
let cancellation_listener = CancellationListener::new(None);

let request_id = context.devtools_chan.as_ref().map(|_| {
uuid::Uuid::new_v4().simple().to_string()
@@ -1004,7 +1002,7 @@ fn http_network_fetch(request: Rc<Request>,
let is_xhr = request.destination == Destination::None;
let wrapped_response = obtain_response(&factory, &url, &request.method.borrow(),
&request.headers.borrow(),
&cancellation_listener, &request.body.borrow(), &request.method.borrow(),
&request.body.borrow(), &request.method.borrow(),
&request.pipeline_id.get(), request.redirect_count.get() + 1,
request_id.as_ref().map(Deref::deref), is_xhr);

@@ -1015,7 +1013,6 @@ fn http_network_fetch(request: Rc<Request>,
let error = match error.error {
LoadErrorType::ConnectionAborted { .. } => unreachable!(),
LoadErrorType::Ssl { reason } => NetworkError::SslValidation(error.url, reason),
LoadErrorType::Cancelled => NetworkError::LoadCancelled,
e => NetworkError::Internal(e.description().to_owned())
};
return Response::network_error(error);
ProTip! Use n and p to navigate between commits in a pull request.
You can’t perform that action at this time.