Skip to content

Commit

Permalink
From<Digest> no longer panics
Browse files Browse the repository at this point in the history
We often do this conversion on things returned from a remote execution
API, and we shouldn't panic based on uncontrolled external input.
  • Loading branch information
illicitonion committed May 17, 2018
1 parent ffdede5 commit e3e8821
Show file tree
Hide file tree
Showing 7 changed files with 85 additions and 39 deletions.
18 changes: 15 additions & 3 deletions src/rust/engine/fs/brfs/src/main.rs
Expand Up @@ -308,7 +308,11 @@ impl BuildResultFS {
});

for (digest, name, filetype, is_executable) in directories.chain(files) {
let child_digest = digest.into();
let child_digest_result: Result<Digest, String> = digest.into();
let child_digest = child_digest_result.map_err(|err| {
error!("Error parsing digest: {:?}", err);
libc::ENOENT
})?;
let maybe_child_inode = match filetype {
fuse::FileType::Directory => self.inode_for_directory(child_digest),
fuse::FileType::RegularFile => self.inode_for_file(child_digest, is_executable),
Expand Down Expand Up @@ -406,11 +410,19 @@ impl fuse::Filesystem for BuildResultFS {
})
.and_then(|node| match node {
Node::Directory(directory_node) => {
let digest = directory_node.get_digest().into();
let digest_result: Result<Digest, String> = directory_node.get_digest().into();
let digest = digest_result.map_err(|err| {
error!("Error parsing digest: {:?}", err);
libc::ENOENT
})?;
self.dir_attr_for(digest)
}
Node::File(file_node) => {
let digest = file_node.get_digest().into();
let digest_result: Result<Digest, String> = file_node.get_digest().into();
let digest = digest_result.map_err(|err| {
error!("Error parsing digest: {:?}", err);
libc::ENOENT
})?;
self
.inode_for_file(digest, file_node.get_is_executable())
.map_err(|err| {
Expand Down
1 change: 1 addition & 0 deletions src/rust/engine/fs/src/lib.rs
Expand Up @@ -10,6 +10,7 @@ mod pool;
pub use pool::ResettablePool;

extern crate bazel_protos;
#[macro_use]
extern crate boxfuture;
extern crate byteorder;
extern crate bytes;
Expand Down
25 changes: 15 additions & 10 deletions src/rust/engine/fs/src/snapshot.rs
Expand Up @@ -237,15 +237,18 @@ impl Snapshot {
.group_by(|d| d.name.clone())
.into_iter()
.map(move |(child_name, group)| {
Self::merge_helper(
store2.clone(),
group.map(|d| d.get_digest().into()).collect(),
).map(move |merged_digest| {
let mut child_dir = bazel_protos::remote_execution::DirectoryNode::new();
child_dir.set_name(child_name);
child_dir.set_digest((&merged_digest).into());
child_dir
})
let store2 = store2.clone();
let digests_result = group
.map(|d| d.get_digest().into())
.collect::<Result<Vec<_>, String>>();
future::done(digests_result)
.and_then(move |digests| Self::merge_helper(store2.clone(), digests))
.map(move |merged_digest| {
let mut child_dir = bazel_protos::remote_execution::DirectoryNode::new();
child_dir.set_name(child_name);
child_dir.set_digest((&merged_digest).into());
child_dir
})
})
.collect::<Vec<_>>(),
).and_then(move |child_directories| {
Expand Down Expand Up @@ -486,8 +489,10 @@ mod tests {
assert_eq!(merged_root_directory.directories.len(), 1);

let merged_child_dirnode = merged_root_directory.directories[0].clone();
let merged_child_dirnode_digest: Result<Digest, String> =
merged_child_dirnode.get_digest().into();
let merged_child_directory = store
.load_directory(merged_child_dirnode.get_digest().into())
.load_directory(merged_child_dirnode_digest.unwrap())
.wait()
.unwrap()
.unwrap();
Expand Down
33 changes: 18 additions & 15 deletions src/rust/engine/fs/src/store.rs
Expand Up @@ -383,17 +383,18 @@ impl Store {
let mut accumulator = accumulator.lock().unwrap();
accumulator.insert(digest, EntryType::Directory);
for file in directory.get_files().into_iter() {
accumulator.insert(file.get_digest().into(), EntryType::File);
accumulator.insert(try_future!(file.get_digest().into()), EntryType::File);
}
}
future::join_all(
directory
.get_directories()
.into_iter()
.map(move |subdir| {
store
.clone()
.expand_directory_helper(subdir.get_digest().into(), accumulator.clone())
store.clone().expand_directory_helper(
try_future!(subdir.get_digest().into()),
accumulator.clone(),
)
})
.collect::<Vec<_>>(),
).map(|_| ())
Expand Down Expand Up @@ -428,7 +429,7 @@ impl Store {
.map(|file_node| {
let store = store.clone();
let path = destination.join(file_node.get_name());
let digest = file_node.get_digest().into();
let digest = try_future!(file_node.get_digest().into());
store.materialize_file(path, digest, file_node.is_executable)
})
.collect::<Vec<_>>();
Expand All @@ -438,7 +439,7 @@ impl Store {
.map(|directory_node| {
let store = store.clone();
let path = destination.join(directory_node.get_name());
let digest = directory_node.get_digest().into();
let digest = try_future!(directory_node.get_digest().into());
store.materialize_directory(path, digest)
})
.collect::<Vec<_>>();
Expand Down Expand Up @@ -512,7 +513,7 @@ impl Store {
let path = path_so_far_copy.join(file_node.get_name());
let contents_wrapped_copy = contents_wrapped_copy.clone();
store_copy
.load_file_bytes_with(file_node.get_digest().into(), |b| b)
.load_file_bytes_with(try_future!(file_node.get_digest().into()), |b| b)
.and_then(move |maybe_bytes| {
maybe_bytes
.ok_or_else(|| format!("Couldn't find file contents for {:?}", path))
Expand All @@ -521,6 +522,7 @@ impl Store {
contents.insert(path, bytes);
})
})
.to_boxed()
})
.collect::<Vec<_>>(),
);
Expand All @@ -530,7 +532,7 @@ impl Store {
.get_directories()
.into_iter()
.map(move |dir_node| {
let digest = dir_node.get_digest().into();
let digest = try_future!(dir_node.get_digest().into());
let path = path_so_far.join(dir_node.get_name());
let store = store.clone();
let contents_wrapped = contents_wrapped.clone();
Expand All @@ -541,6 +543,7 @@ impl Store {
.ok_or_else(|| format!("Could not find sub-directory with digest {:?}", digest))
})
.and_then(move |dir| store.contents_for_directory_helper(dir, path, contents_wrapped))
.to_boxed()
})
.collect::<Vec<_>>(),
);
Expand Down Expand Up @@ -1718,19 +1721,19 @@ mod remote {
.cas_client
.get()
.find_missing_blobs(&request)
.map(|response| {
response
.get_missing_blob_digests()
.iter()
.map(|digest| digest.into())
.collect()
})
.map_err(|err| {
format!(
"Error from server in response to find_missing_blobs_request: {:?}",
err
)
})
.and_then(|response| {
response
.get_missing_blob_digests()
.iter()
.map(|digest| digest.into())
.collect()
})
}
}

Expand Down
27 changes: 20 additions & 7 deletions src/rust/engine/process_execution/bazel_protos/src/conversions.rs
Expand Up @@ -9,12 +9,11 @@ impl<'a> From<&'a hashing::Digest> for super::remote_execution::Digest {
}
}

impl<'a> From<&'a super::remote_execution::Digest> for hashing::Digest {
impl<'a> From<&'a super::remote_execution::Digest> for Result<hashing::Digest, String> {
fn from(d: &super::remote_execution::Digest) -> Self {
hashing::Digest(
hashing::Fingerprint::from_hex_string(d.get_hash()).expect("Bad fingerprint in Digest"),
d.get_size_bytes() as usize,
)
hashing::Fingerprint::from_hex_string(d.get_hash())
.map_err(|err| format!("Bad fingerprint in Digest {:?}: {:?}", d.get_hash(), err))
.map(|fingerprint| hashing::Digest(fingerprint, d.get_size_bytes() as usize))
}
}

Expand Down Expand Up @@ -43,13 +42,27 @@ mod tests {
bazel_digest
.set_hash("0123456789abcdeffedcba98765432100000000000000000ffffffffffffffff".to_owned());
bazel_digest.set_size_bytes(10);
let converted: hashing::Digest = (&bazel_digest).into();
let converted: Result<hashing::Digest, String> = (&bazel_digest).into();
let want = hashing::Digest(
hashing::Fingerprint::from_hex_string(
"0123456789abcdeffedcba98765432100000000000000000ffffffffffffffff",
).unwrap(),
10,
);
assert_eq!(converted, want);
assert_eq!(converted, Ok(want));
}

#[test]
fn from_bad_bazel_digest() {
let mut bazel_digest = super::super::remote_execution::Digest::new();
bazel_digest.set_hash("0".to_owned());
bazel_digest.set_size_bytes(10);
let converted: Result<hashing::Digest, String> = (&bazel_digest).into();
let err = converted.expect_err("Want Err converting bad digest");
assert!(
err.starts_with("Bad fingerprint in Digest \"0\":"),
"Bad error message: {}",
err
);
}
}
17 changes: 14 additions & 3 deletions src/rust/engine/process_execution/src/remote.rs
Expand Up @@ -109,7 +109,8 @@ impl CommandRunner {
match execute_request_result {
Ok((command, execute_request)) => {
let command_runner = self.clone();
self.upload_command(&command, execute_request.get_action().get_command_digest().into())
let command_digest = try_future!(execute_request.get_action().get_command_digest().into());
self.upload_command(&command, command_digest)
.and_then(move |_| {
debug!("Executing remotely request: {:?} (command: {:?})", execute_request, command);

Expand Down Expand Up @@ -334,7 +335,12 @@ impl CommandRunner {
execute_response: &bazel_protos::remote_execution::ExecuteResponse,
) -> BoxFuture<Bytes, ExecutionError> {
let stdout = if execute_response.get_result().has_stdout_digest() {
let stdout_digest = execute_response.get_result().get_stdout_digest().into();
let stdout_digest_result: Result<Digest, String> =
execute_response.get_result().get_stdout_digest().into();
let stdout_digest = try_future!(
stdout_digest_result
.map_err(|err| ExecutionError::Fatal(format!("Error extracting stdout: {}", err)))
);
self
.store
.load_file_bytes_with(stdout_digest, |v| v)
Expand Down Expand Up @@ -365,7 +371,12 @@ impl CommandRunner {
execute_response: &bazel_protos::remote_execution::ExecuteResponse,
) -> BoxFuture<Bytes, ExecutionError> {
let stderr = if execute_response.get_result().has_stderr_digest() {
let stderr_digest = execute_response.get_result().get_stderr_digest().into();
let stderr_digest_result: Result<Digest, String> =
execute_response.get_result().get_stderr_digest().into();
let stderr_digest = try_future!(
stderr_digest_result
.map_err(|err| ExecutionError::Fatal(format!("Error extracting stderr: {}", err)))
);
self
.store
.load_file_bytes_with(stderr_digest, |v| v)
Expand Down
3 changes: 2 additions & 1 deletion src/rust/engine/testutil/mock/src/cas.rs
Expand Up @@ -376,7 +376,8 @@ impl bazel_protos::remote_execution_grpc::ContentAddressableStorage for StubCASR
let blobs = self.blobs.lock().unwrap();
let mut response = bazel_protos::remote_execution::FindMissingBlobsResponse::new();
for digest in req.get_blob_digests() {
let hashing_digest: Digest = digest.into();
let hashing_digest_result: Result<Digest, String> = digest.into();
let hashing_digest = hashing_digest_result.expect("Bad digest");
if !blobs.contains_key(&hashing_digest.0) {
response.mut_missing_blob_digests().push(digest.clone())
}
Expand Down

0 comments on commit e3e8821

Please sign in to comment.