Skip to content

Commit

Permalink
fix(RUN-896): Raise unzipped Wasm limit
Browse files Browse the repository at this point in the history
  • Loading branch information
adambratschikaye committed Jan 31, 2024
1 parent 821b642 commit 5705126
Show file tree
Hide file tree
Showing 4 changed files with 14 additions and 11 deletions.
6 changes: 4 additions & 2 deletions rs/embedders/src/wasm_utils/decoding.rs
@@ -1,9 +1,11 @@
use ic_replicated_state::canister_state::system_state::wasm_chunk_store;
use ic_types::NumBytes;
use ic_wasm_types::{BinaryEncodedWasm, WasmValidationError};
use std::io::Read;
use std::sync::Arc;

/// Maximum size of a WebAssembly module.
pub const MAX_WASM_MODULE_SIZE_BYTES: usize = 30 * 1024 * 1024;
pub const MAX_WASM_MODULE_SIZE_BYTES: NumBytes = wasm_chunk_store::DEFAULT_MAX_SIZE;

fn make_module_too_large_error() -> WasmValidationError {
WasmValidationError::DecodingError(format!(
Expand Down Expand Up @@ -72,7 +74,7 @@ pub fn decoded_wasm_size(module_bytes: &[u8]) -> Result<usize, WasmValidationErr
pub fn decode_wasm(module: Arc<Vec<u8>>) -> Result<BinaryEncodedWasm, WasmValidationError> {
let module_bytes = module.as_slice();
let (encoding, uncompressed_size) = wasm_encoding_and_size(module_bytes)?;
if uncompressed_size > MAX_WASM_MODULE_SIZE_BYTES {
if uncompressed_size as u64 > MAX_WASM_MODULE_SIZE_BYTES.get() {
return Err(make_module_too_large_error());
}

Expand Down
Binary file modified rs/embedders/tests/compressed/zeros.gz
Binary file not shown.
17 changes: 9 additions & 8 deletions rs/embedders/tests/misc_tests.rs
Expand Up @@ -131,21 +131,22 @@ fn compressed_test_contents(name: &str) -> Vec<u8> {
#[test]
#[should_panic(expected = "too large")]
fn test_decode_large_compressed_module() {
// Try decoding 12MB of zeros
decode_wasm(Arc::new(compressed_test_contents("zeros.gz"))).unwrap();
}

#[test]
#[should_panic(expected = "specified uncompressed size 100 does not match extracted size 101")]
fn test_decode_large_compressed_module_with_tweaked_size() {
// Try decoding 101MB of zeros
//
// We also tested decoding with a much larger file.
// To save space and CI time, we do not include the larger archive file and
// do not generate it in the test. To reproduce the test, execute the following
// command:
//
// dd if=/dev/zero of=/dev/stdout bs=1048576 count=10240 | gzip -9 > zeroes.gz
// dd if=/dev/zero bs=1024 count=$((500 * 1024)) | gzip -9 > zeroes.gz
//
// and replace the zeroes.gz file used in the test.
decode_wasm(Arc::new(compressed_test_contents("zeros.gz"))).unwrap();
}

#[test]
#[should_panic(expected = "specified uncompressed size 100 does not match extracted size 101")]
fn test_decode_large_compressed_module_with_tweaked_size() {
let mut contents = compressed_test_contents("zeros.gz");
let n = contents.len();
contents[n - 4..n].copy_from_slice(&100u32.to_le_bytes());
Expand Down
Expand Up @@ -8,7 +8,7 @@ use crate::{page_map::PageAllocatorFileDescriptor, PageMap};

const PAGES_PER_CHUNK: u64 = 256;
const CHUNK_SIZE: u64 = PAGES_PER_CHUNK * (PAGE_SIZE as u64);
pub(crate) const DEFAULT_MAX_SIZE: NumBytes = NumBytes::new(100 * 1024 * 1024); // 100 MiB
pub const DEFAULT_MAX_SIZE: NumBytes = NumBytes::new(100 * 1024 * 1024); // 100 MiB

#[test]
fn check_chunk_size() {
Expand Down

0 comments on commit 5705126

Please sign in to comment.