Skip to content

Commit

Permalink
feat: forbid most collections from containing zst elements/keys
Browse files Browse the repository at this point in the history
  • Loading branch information
dj8yfo committed Aug 21, 2023
1 parent 1953864 commit 1902665
Show file tree
Hide file tree
Showing 6 changed files with 182 additions and 66 deletions.
11 changes: 5 additions & 6 deletions borsh/src/de/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ use crate::__private::maybestd::{

#[cfg(feature = "rc")]
use crate::__private::maybestd::{rc::Rc, sync::Arc};
use crate::error::check_zst;

mod hint;

Expand Down Expand Up @@ -388,12 +389,7 @@ where
{
#[inline]
fn deserialize_reader<R: Read>(reader: &mut R) -> Result<Self> {
if size_of::<T>() == 0 {
return Err(Error::new(
ErrorKind::InvalidData,
"Vectors of zero-sized types are not allowed due to deny-of-service concerns on deserialization.",
));
}
check_zst::<T>()?;

let len = u32::deserialize_reader(reader)?;
if len == 0 {
Expand Down Expand Up @@ -493,6 +489,7 @@ pub mod hashes {
const ERROR_WRONG_ORDER_OF_KEYS: &str = "keys were not serialized in ascending order";
#[cfg(feature = "de_strict_order")]
use crate::__private::maybestd::io::{Error, ErrorKind};
use crate::error::check_zst;
#[cfg(feature = "de_strict_order")]
use core::cmp::Ordering;

Expand Down Expand Up @@ -537,6 +534,7 @@ pub mod hashes {
{
#[inline]
fn deserialize_reader<R: Read>(reader: &mut R) -> Result<Self> {
check_zst::<K>()?;
// NOTE: deserialize-as-you-go approach as once was in HashSet is better in the sense
// that it allows to fail early, and not allocate memory for all the entries
// which may fail `cmp()` checks
Expand Down Expand Up @@ -604,6 +602,7 @@ where
{
#[inline]
fn deserialize_reader<R: Read>(reader: &mut R) -> Result<Self> {
check_zst::<K>()?;
// NOTE: deserialize-as-you-go approach as once was in HashSet is better in the sense
// that it allows to fail early, and not allocate memory for all the entries
// which may fail `cmp()` checks
Expand Down
10 changes: 10 additions & 0 deletions borsh/src/error.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
use crate::__private::maybestd::io::{Error, ErrorKind, Result};
use core::mem::size_of;
pub const ERROR_ZST_FORBIDDEN: &str = "Collections of zero-sized types are not allowed due to deny-of-service concerns on deserialization.";

pub(crate) fn check_zst<T>() -> Result<()> {
if size_of::<T>() == 0 {
return Err(Error::new(ErrorKind::InvalidData, ERROR_ZST_FORBIDDEN));
}
Ok(())
}
1 change: 1 addition & 0 deletions borsh/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,7 @@ pub use schema::BorshSchema;
pub use schema_helpers::{try_from_slice_with_schema, try_to_vec_with_schema};
pub use ser::helpers::{to_vec, to_writer};
pub use ser::BorshSerialize;
pub mod error;

#[cfg(all(feature = "std", feature = "hashbrown"))]
compile_error!("feature \"std\" and feature \"hashbrown\" don't make sense at the same time");
Expand Down
39 changes: 24 additions & 15 deletions borsh/src/ser/mod.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,16 @@
use core::convert::TryFrom;
use core::marker::PhantomData;
use core::mem::size_of;

use crate::__private::maybestd::{
borrow::{Cow, ToOwned},
boxed::Box,
collections::{BTreeMap, BTreeSet, LinkedList, VecDeque},
io::{Error, ErrorKind, Result, Write},
string::String,
vec::Vec,

use crate::{
__private::maybestd::{
borrow::{Cow, ToOwned},
boxed::Box,
collections::{BTreeMap, BTreeSet, LinkedList, VecDeque},
io::{ErrorKind, Result, Write},
string::String,
vec::Vec,
},
error::check_zst,
};

#[cfg(feature = "rc")]
Expand Down Expand Up @@ -278,12 +280,8 @@ where
{
#[inline]
fn serialize<W: Write>(&self, writer: &mut W) -> Result<()> {
if size_of::<T>() == 0 {
return Err(Error::new(
ErrorKind::InvalidData,
"Vectors of zero-sized types are not allowed due to deny-of-service concerns on deserialization.",
));
}
check_zst::<T>()?;

self.as_slice().serialize(writer)
}
}
Expand Down Expand Up @@ -318,6 +316,8 @@ where
{
#[inline]
fn serialize<W: Write>(&self, writer: &mut W) -> Result<()> {
check_zst::<T>()?;

writer.write_all(
&(u32::try_from(self.len()).map_err(|_| ErrorKind::InvalidData)?).to_le_bytes(),
)?;
Expand All @@ -333,6 +333,8 @@ where
{
#[inline]
fn serialize<W: Write>(&self, writer: &mut W) -> Result<()> {
check_zst::<T>()?;

writer.write_all(
&(u32::try_from(self.len()).map_err(|_| ErrorKind::InvalidData)?).to_le_bytes(),
)?;
Expand All @@ -350,6 +352,7 @@ pub mod hashes {
//! Module defines [BorshSerialize](crate::ser::BorshSerialize) implementation for
//! [HashMap](std::collections::HashMap)/[HashSet](std::collections::HashSet).
use crate::__private::maybestd::vec::Vec;
use crate::error::check_zst;
use crate::{
BorshSerialize,
__private::maybestd::collections::{HashMap, HashSet},
Expand All @@ -367,6 +370,8 @@ pub mod hashes {
{
#[inline]
fn serialize<W: Write>(&self, writer: &mut W) -> Result<()> {
check_zst::<K>()?;

let mut vec = self.iter().collect::<Vec<_>>();
vec.sort_by(|(a, _), (b, _)| a.partial_cmp(b).unwrap());
u32::try_from(vec.len())
Expand All @@ -387,6 +392,8 @@ pub mod hashes {
{
#[inline]
fn serialize<W: Write>(&self, writer: &mut W) -> Result<()> {
check_zst::<T>()?;

let mut vec = self.iter().collect::<Vec<_>>();
vec.sort_by(|a, b| a.partial_cmp(b).unwrap());
u32::try_from(vec.len())
Expand All @@ -407,6 +414,7 @@ where
{
#[inline]
fn serialize<W: Write>(&self, writer: &mut W) -> Result<()> {
check_zst::<K>()?;
// NOTE: BTreeMap iterates over the entries that are sorted by key, so the serialization
// result will be consistent without a need to sort the entries as we do for HashMap
// serialization.
Expand All @@ -427,6 +435,7 @@ where
{
#[inline]
fn serialize<W: Write>(&self, writer: &mut W) -> Result<()> {
check_zst::<T>()?;
// NOTE: BTreeSet iterates over the items that are sorted, so the serialization result will
// be consistent without a need to sort the entries as we do for HashSet serialization.
u32::try_from(self.len())
Expand Down
45 changes: 0 additions & 45 deletions borsh/tests/test_zero_size.rs

This file was deleted.

142 changes: 142 additions & 0 deletions borsh/tests/test_zero_sized_types.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,142 @@
#![cfg_attr(not(feature = "std"), no_std)]
#![cfg(feature = "derive")]

#[cfg(not(feature = "std"))]
extern crate alloc;
#[cfg(not(feature = "std"))]
use alloc::{string::ToString, vec, vec::Vec};

#[cfg(feature = "std")]
use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet, LinkedList, VecDeque};

#[cfg(not(feature = "std"))]
use alloc::collections::{BTreeMap, BTreeSet, LinkedList, VecDeque};
#[cfg(feature = "hashbrown")]
use hashbrown::{HashMap, HashSet};

use borsh::from_slice;
use borsh::to_vec;
use borsh::BorshDeserialize;
use borsh::BorshSerialize;

use borsh::error::ERROR_ZST_FORBIDDEN;
#[derive(BorshDeserialize, BorshSerialize, PartialEq, Debug, Eq, PartialOrd, Ord, Hash)]
struct A();

#[test]
fn test_deserialize_vec_of_zst() {
let v = [0u8, 0u8, 0u8, 64u8];
let res = from_slice::<Vec<A>>(&v);
assert_eq!(res.unwrap_err().to_string(), ERROR_ZST_FORBIDDEN);
}

#[test]
fn test_serialize_vec_of_zst() {
let v = vec![A()];
let res = to_vec(&v);
assert_eq!(res.unwrap_err().to_string(), ERROR_ZST_FORBIDDEN);
}

#[test]
fn test_deserialize_vec_deque_of_zst() {
let v = [0u8, 0u8, 0u8, 64u8];
let res = from_slice::<VecDeque<A>>(&v);
assert_eq!(res.unwrap_err().to_string(), ERROR_ZST_FORBIDDEN);
}

#[test]
fn test_serialize_vec_deque_of_zst() {
let v: VecDeque<A> = vec![A()].into();
let res = to_vec(&v);
assert_eq!(res.unwrap_err().to_string(), ERROR_ZST_FORBIDDEN);
}

#[test]
fn test_deserialize_linked_list_of_zst() {
let v = [0u8, 0u8, 0u8, 64u8];
let res = from_slice::<LinkedList<A>>(&v);
assert_eq!(res.unwrap_err().to_string(), ERROR_ZST_FORBIDDEN);
}

#[test]
fn test_serialize_linked_list_of_zst() {
let v: LinkedList<A> = vec![A()].into_iter().collect();
let res = to_vec(&v);
assert_eq!(res.unwrap_err().to_string(), ERROR_ZST_FORBIDDEN);
}

#[test]
fn test_deserialize_btreeset_of_zst() {
let v = [0u8, 0u8, 0u8, 64u8];
let res = from_slice::<BTreeSet<A>>(&v);
assert_eq!(res.unwrap_err().to_string(), ERROR_ZST_FORBIDDEN);
}

#[test]
fn test_serialize_btreeset_of_zst() {
let v: BTreeSet<A> = vec![A()].into_iter().collect();
let res = to_vec(&v);
assert_eq!(res.unwrap_err().to_string(), ERROR_ZST_FORBIDDEN);
}

#[cfg(hash_collections)]
#[test]
fn test_deserialize_hashset_of_zst() {
let v = [0u8, 0u8, 0u8, 64u8];
let res = from_slice::<HashSet<A>>(&v);
assert_eq!(res.unwrap_err().to_string(), ERROR_ZST_FORBIDDEN);
}

#[cfg(hash_collections)]
#[test]
fn test_serialize_hashset_of_zst() {
let v: HashSet<A> = vec![A()].into_iter().collect();
let res = to_vec(&v);
assert_eq!(res.unwrap_err().to_string(), ERROR_ZST_FORBIDDEN);
}

#[test]
fn test_deserialize_btreemap_of_zst() {
let v = [0u8, 0u8, 0u8, 64u8];
let res = from_slice::<BTreeMap<A, u64>>(&v);
assert_eq!(res.unwrap_err().to_string(), ERROR_ZST_FORBIDDEN);
}

#[test]
fn test_serialize_btreemap_of_zst() {
let v: BTreeMap<A, u64> = vec![(A(), 42u64)].into_iter().collect();
let res = to_vec(&v);
assert_eq!(res.unwrap_err().to_string(), ERROR_ZST_FORBIDDEN);
}

#[cfg(hash_collections)]
#[test]
fn test_deserialize_hashmap_of_zst() {
let v = [0u8, 0u8, 0u8, 64u8];
let res = from_slice::<HashMap<A, u64>>(&v);
assert_eq!(res.unwrap_err().to_string(), ERROR_ZST_FORBIDDEN);
}

#[cfg(hash_collections)]
#[test]
fn test_serialize_hashmap_of_zst() {
let v: HashMap<A, u64> = vec![(A(), 42u64)].into_iter().collect();
let res = to_vec(&v);
assert_eq!(res.unwrap_err().to_string(), ERROR_ZST_FORBIDDEN);
}

#[derive(BorshDeserialize, BorshSerialize, PartialEq, Debug)]
struct B(u32);
#[test]
fn test_deserialize_non_zst() {
let v = [1, 0, 0, 0, 64, 0, 0, 0];
let res = Vec::<B>::try_from_slice(&v);
assert!(res.is_ok());
}

#[test]
fn test_serialize_non_zst() {
let v = vec![B(1)];
let res = to_vec(&v);
assert!(res.is_ok());
}

0 comments on commit 1902665

Please sign in to comment.