Skip to content

Commit

Permalink
Add support fo byte range requests for blob URLs
Browse files Browse the repository at this point in the history
  • Loading branch information
ferjm committed Nov 26, 2018
1 parent 8538634 commit a844428
Show file tree
Hide file tree
Showing 4 changed files with 179 additions and 195 deletions.
41 changes: 0 additions & 41 deletions components/net/blob_loader.rs

This file was deleted.

185 changes: 84 additions & 101 deletions components/net/fetch/methods.rs
Expand Up @@ -2,10 +2,9 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */

use crate::blob_loader::load_blob_async;
use crate::data_loader::decode;
use crate::fetch::cors_cache::CorsCache;
use crate::filemanager_thread::FileManager;
use crate::filemanager_thread::{fetch_file_in_chunks, FILE_CHUNK_SIZE, FileManager};
use crate::http_loader::{determine_request_referrer, http_fetch, HttpState};
use crate::http_loader::{set_default_accept, set_default_accept_language};
use crate::subresource_integrity::is_response_integrity_valid;
Expand All @@ -19,28 +18,27 @@ use hyper::StatusCode;
use ipc_channel::ipc::IpcReceiver;
use mime::{self, Mime};
use mime_guess::guess_mime_type;
use net_traits::blob_url_store::parse_blob_url;
use net_traits::filemanager_thread::RelativePos;
use net_traits::request::{CredentialsMode, Destination, Referrer, Request, RequestMode};
use net_traits::request::{Origin, ResponseTainting, Window};
use net_traits::response::{Response, ResponseBody, ResponseType};
use net_traits::{FetchTaskTarget, NetworkError, ReferrerPolicy, ResourceFetchTiming};
use servo_url::ServoUrl;
use std::borrow::Cow;
use std::fs::File;
use std::io::{BufRead, BufReader, Seek, SeekFrom};
use std::fs::{File, Metadata};
use std::io::{BufReader, Seek, SeekFrom};
use std::mem;
use std::ops::Bound;
use std::str;
use std::sync::atomic::Ordering;
use std::sync::{Arc, Mutex};
use std::thread;

lazy_static! {
static ref X_CONTENT_TYPE_OPTIONS: HeaderName =
HeaderName::from_static("x-content-type-options");
}

const FILE_CHUNK_SIZE: usize = 32768; //32 KB

pub type Target<'a> = &'a mut (dyn FetchTaskTarget + Send);

pub enum Data {
Expand Down Expand Up @@ -492,6 +490,36 @@ fn wait_for_response(response: &mut Response, target: Target, done_chan: &mut Do
}
}

/// Get the range bounds if the `Range` header is present.
fn get_range_bounds(range: Option<Range>, metadata: Option<Metadata>) -> RelativePos {
if let Some(ref range) = range
{
let (start, end) = match range
.iter()
.collect::<Vec<(Bound<u64>, Bound<u64>)>>()
.first()
{
Some(&(Bound::Included(start), Bound::Unbounded)) => (start, None),
Some(&(Bound::Included(start), Bound::Included(end))) => {
// `end` should be less or equal to `start`.
(start, Some(i64::max(start as i64, end as i64)))
},
Some(&(Bound::Unbounded, Bound::Included(offset))) => {
if let Some(metadata) = metadata {
// `offset` cannot be bigger than the file size.
(metadata.len() - u64::min(metadata.len(), offset), None)
} else {
(0, None)
}
},
_ => (0, None),
};
RelativePos::from_opts(Some(start as i64), end)
} else {
RelativePos::from_opts(Some(0), None)
}
}

/// [Scheme fetch](https://fetch.spec.whatwg.org#scheme-fetch)
fn scheme_fetch(
request: &mut Request,
Expand Down Expand Up @@ -537,106 +565,35 @@ fn scheme_fetch(
}
if let Ok(file_path) = url.to_file_path() {
if let Ok(file) = File::open(file_path.clone()) {
let mime = guess_mime_type(file_path);
let mut response = Response::new(url, ResourceFetchTiming::new(request.timing_type()));

// Get range bounds (if any) and try to seek to the requested offset.
// If seeking fails, bail out with a NetworkError.
let range = get_range_bounds(request.headers.typed_get::<Range>(), file.metadata().ok());
let mut reader = BufReader::with_capacity(FILE_CHUNK_SIZE, file);
if reader.seek(SeekFrom::Start(range.start as u64)).is_err() {
*response.body.lock().unwrap() = ResponseBody::Done(vec![]);
return Response::network_error(NetworkError::Internal(
"Unexpected method for blob".into(),
));
}

let mut response =
Response::new(url, ResourceFetchTiming::new(request.timing_type()));
// Set Content-Type header.
let mime = guess_mime_type(file_path);
response.headers.typed_insert(ContentType::from(mime));

let (done_sender, done_receiver) = unbounded();
// Setup channel to receive cross-thread messages about the file fetch
// operation.
let (done_sender, done_receiver) = channel();
*done_chan = Some((done_sender.clone(), done_receiver));
*response.body.lock().unwrap() = ResponseBody::Receiving(vec![]);

let res_body = response.body.clone();

let cancellation_listener = context.cancellation_listener.clone();
fetch_file_in_chunks(done_sender,
reader,
response.body.clone(),
context.cancellation_listener.clone(),
range);

let (start, end) = if let Some(ref range) = request.headers.typed_get::<Range>()
{
match range
.iter()
.collect::<Vec<(Bound<u64>, Bound<u64>)>>()
.first()
{
Some(&(Bound::Included(start), Bound::Unbounded)) => (start, None),
Some(&(Bound::Included(start), Bound::Included(end))) => {
// `end` should be less or equal to `start`.
(start, Some(u64::max(start, end)))
},
Some(&(Bound::Unbounded, Bound::Included(offset))) => {
if let Ok(metadata) = file.metadata() {
// `offset` cannot be bigger than the file size.
(metadata.len() - u64::min(metadata.len(), offset), None)
} else {
(0, None)
}
},
_ => (0, None),
}
} else {
(0, None)
};

thread::Builder::new()
.name("fetch file worker thread".to_string())
.spawn(move || {
let mut reader = BufReader::with_capacity(FILE_CHUNK_SIZE, file);
if reader.seek(SeekFrom::Start(start)).is_err() {
warn!("Fetch - could not seek to {:?}", start);
}

loop {
if cancellation_listener.lock().unwrap().cancelled() {
*res_body.lock().unwrap() = ResponseBody::Done(vec![]);
let _ = done_sender.send(Data::Cancelled);
return;
}
let length = {
let buffer = reader.fill_buf().unwrap().to_vec();
let mut buffer_len = buffer.len();
if let ResponseBody::Receiving(ref mut body) =
*res_body.lock().unwrap()
{
let offset = usize::min(
{
if let Some(end) = end {
let remaining_bytes =
end as usize - start as usize - body.len();
if remaining_bytes <= FILE_CHUNK_SIZE {
// This is the last chunk so we set buffer
// len to 0 to break the reading loop.
buffer_len = 0;
remaining_bytes
} else {
FILE_CHUNK_SIZE
}
} else {
FILE_CHUNK_SIZE
}
},
buffer.len(),
);
body.extend_from_slice(&buffer[0..offset]);
let _ = done_sender.send(Data::Payload(buffer));
}
buffer_len
};
if length == 0 {
let mut body = res_body.lock().unwrap();
let completed_body = match *body {
ResponseBody::Receiving(ref mut body) => {
mem::replace(body, vec![])
},
_ => vec![],
};
*body = ResponseBody::Done(completed_body);
let _ = done_sender.send(Data::Done);
break;
}
reader.consume(length);
}
})
.expect("Failed to create fetch file worker thread");
response
} else {
Response::network_error(NetworkError::Internal("Opening file failed".into()))
Expand All @@ -657,7 +614,33 @@ fn scheme_fetch(
));
}

load_blob_async(url.clone(), context.filemanager.clone(), done_chan)
let range = get_range_bounds(request.headers.typed_get::<Range>(), None);

let (id, origin) = match parse_blob_url(&url) {
Ok((id, origin)) => (id, origin),
Err(()) => {
return Response::network_error(NetworkError::Internal("Invalid blob url".into()));
},
};

let mut response = Response::new(url);
let (done_sender, done_receiver) = channel();
*done_chan = Some((done_sender.clone(), done_receiver));
*response.body.lock().unwrap() = ResponseBody::Receiving(vec![]);
let check_url_validity = true;
if let Err(err) = context.filemanager.fetch_file(&done_sender,
context.cancellation_listener.clone(),
id,
check_url_validity,
origin,
&mut response,
range)
{
let _ = done_sender.send(Data::Done);
return Response::network_error(NetworkError::Internal(err));
};

response
},

"ftp" => {
Expand Down

0 comments on commit a844428

Please sign in to comment.