diff --git a/src/array/binary/mutable.rs b/src/array/binary/mutable.rs index 51e578945d0..5e9a97f28cd 100644 --- a/src/array/binary/mutable.rs +++ b/src/array/binary/mutable.rs @@ -410,8 +410,8 @@ impl> TryPush> for MutableBinaryArray { Some(value) => { let bytes = value.as_ref(); - let size = O::from_usize(self.values.len() + bytes.len()) - .ok_or(ArrowError::KeyOverflowError)?; + let size = + O::from_usize(self.values.len() + bytes.len()).ok_or(ArrowError::Overflow)?; self.values.extend_from_slice(bytes); diff --git a/src/array/dictionary/mutable.rs b/src/array/dictionary/mutable.rs index 623b6ad4ead..0527be6ad1b 100644 --- a/src/array/dictionary/mutable.rs +++ b/src/array/dictionary/mutable.rs @@ -70,7 +70,7 @@ impl MutableDictionaryArray { Ok(false) } None => { - let key = K::from_usize(self.map.len()).ok_or(ArrowError::KeyOverflowError)?; + let key = K::from_usize(self.map.len()).ok_or(ArrowError::Overflow)?; self.map.insert(hash, key); self.keys.push(Some(key)); Ok(true) diff --git a/src/array/fixed_size_list/mutable.rs b/src/array/fixed_size_list/mutable.rs index cf0850ba92b..9c0644a59d1 100644 --- a/src/array/fixed_size_list/mutable.rs +++ b/src/array/fixed_size_list/mutable.rs @@ -58,7 +58,7 @@ impl MutableFixedSizeListArray { #[inline] fn try_push_valid(&mut self) -> Result<()> { if self.values.len() % self.size != 0 { - return Err(ArrowError::KeyOverflowError); + return Err(ArrowError::Overflow); }; if let Some(validity) = &mut self.validity { validity.push(true) diff --git a/src/array/list/mutable.rs b/src/array/list/mutable.rs index 57a3590122e..64b0de8ce30 100644 --- a/src/array/list/mutable.rs +++ b/src/array/list/mutable.rs @@ -130,7 +130,7 @@ impl MutableListArray { /// This is a relatively low level function, prefer `try_push` when you can. pub fn try_push_valid(&mut self) -> Result<()> { let size = self.values.len(); - let size = O::from_usize(size).ok_or(ArrowError::KeyOverflowError)?; // todo: make this error + let size = O::from_usize(size).ok_or(ArrowError::Overflow)?; assert!(size >= *self.offsets.last().unwrap()); self.offsets.push(size); diff --git a/src/array/utf8/mutable.rs b/src/array/utf8/mutable.rs index 52b0a859608..4d33a3ef7af 100644 --- a/src/array/utf8/mutable.rs +++ b/src/array/utf8/mutable.rs @@ -448,7 +448,7 @@ impl> TryPush> for MutableUtf8Array { let bytes = value.as_ref().as_bytes(); self.values.extend_from_slice(bytes); - let size = O::from_usize(self.values.len()).ok_or(ArrowError::KeyOverflowError)?; + let size = O::from_usize(self.values.len()).ok_or(ArrowError::Overflow)?; self.offsets.push(size); diff --git a/src/compute/cast/dictionary_to.rs b/src/compute/cast/dictionary_to.rs index 375d69546d0..88fc33e8870 100644 --- a/src/compute/cast/dictionary_to.rs +++ b/src/compute/cast/dictionary_to.rs @@ -13,7 +13,7 @@ macro_rules! key_cast { // Failure to cast keys (because they don't fit in the // target type) results in NULL values; if cast_keys.null_count() > $keys.null_count() { - return Err(ArrowError::KeyOverflowError); + return Err(ArrowError::Overflow); } Ok(Box::new(DictionaryArray::<$to_type>::from_data( cast_keys, $values, @@ -74,7 +74,7 @@ where let casted_keys = primitive_to_primitive::(keys, &K2::DATA_TYPE); if casted_keys.null_count() > keys.null_count() { - Err(ArrowError::KeyOverflowError) + Err(ArrowError::Overflow) } else { Ok(DictionaryArray::from_data(casted_keys, values.clone())) } @@ -94,7 +94,7 @@ where let casted_keys = primitive_as_primitive::(keys, &K2::DATA_TYPE); if casted_keys.null_count() > keys.null_count() { - Err(ArrowError::KeyOverflowError) + Err(ArrowError::Overflow) } else { Ok(DictionaryArray::from_data(casted_keys, values.clone())) } diff --git a/src/datatypes/field.rs b/src/datatypes/field.rs index f6ba2f84e76..fcd3b43bb76 100644 --- a/src/datatypes/field.rs +++ b/src/datatypes/field.rs @@ -177,7 +177,7 @@ impl Field { for (key, from_value) in from_metadata { if let Some(self_value) = self_metadata.get(key) { if self_value != from_value { - return Err(ArrowError::Schema(format!( + return Err(ArrowError::InvalidArgumentError(format!( "Fail to merge field due to conflicting metadata data value for key {}", key), )); } @@ -193,12 +193,12 @@ impl Field { _ => {} } if from.dict_id != self.dict_id { - return Err(ArrowError::Schema( + return Err(ArrowError::InvalidArgumentError( "Fail to merge schema Field due to conflicting dict_id".to_string(), )); } if from.dict_is_ordered != self.dict_is_ordered { - return Err(ArrowError::Schema( + return Err(ArrowError::InvalidArgumentError( "Fail to merge schema Field due to conflicting dict_is_ordered".to_string(), )); } @@ -220,7 +220,7 @@ impl Field { } } _ => { - return Err(ArrowError::Schema( + return Err(ArrowError::InvalidArgumentError( "Fail to merge schema Field due to conflicting datatype".to_string(), )); } @@ -241,7 +241,7 @@ impl Field { } } _ => { - return Err(ArrowError::Schema( + return Err(ArrowError::InvalidArgumentError( "Fail to merge schema Field due to conflicting datatype".to_string(), )); } @@ -279,7 +279,7 @@ impl Field { | DataType::Map(_, _) | DataType::Decimal(_, _) => { if self.data_type != from.data_type { - return Err(ArrowError::Schema( + return Err(ArrowError::InvalidArgumentError( "Fail to merge schema Field due to conflicting datatype".to_string(), )); } diff --git a/src/datatypes/schema.rs b/src/datatypes/schema.rs index 4ab3488a63d..70bcb7b21a7 100644 --- a/src/datatypes/schema.rs +++ b/src/datatypes/schema.rs @@ -120,7 +120,7 @@ impl Schema { // merge metadata if let Some(old_val) = merged.metadata.get(&key) { if old_val != &value { - return Err(ArrowError::Schema( + return Err(ArrowError::InvalidArgumentError( "Fail to merge schema due to conflicting metadata.".to_string(), )); } diff --git a/src/error.rs b/src/error.rs index c1938453494..48c51e45ba6 100644 --- a/src/error.rs +++ b/src/error.rs @@ -5,31 +5,23 @@ use std::error::Error; /// Enum with all errors in this crate. #[derive(Debug)] +#[non_exhaustive] pub enum ArrowError { /// Returned when functionality is not yet available. NotYetImplemented(String), - /// Triggered by an external error, such as CSV, serde, chrono. + /// Wrapper for an error triggered by a dependency External(String, Box), - /// Error associated with incompatible schemas. - Schema(String), - /// Errors associated with IO + /// Wrapper for IO errors Io(std::io::Error), /// When an invalid argument is passed to a function. InvalidArgumentError(String), - /// Error during import or export to/from C Data Interface - Ffi(String), - /// Error during import or export to/from IPC - Ipc(String), /// Error during import or export to/from a format ExternalFormat(String), /// Whenever pushing to a container fails because it does not support more entries. - /// (e.g. maximum size of the keys of a dictionary overflowed) - KeyOverflowError, - /// Error during arithmetic operation. Normally returned - /// during checked operations - ArithmeticError(String), - /// Any other error. - Other(String), + /// The solution is usually to use a higher-capacity container-backing type. + Overflow, + /// Whenever incoming data from the C data interface, IPC or Flight does not fulfil the Arrow specification. + OutOfSpec(String), } impl ArrowError { @@ -66,27 +58,17 @@ impl Display for ArrowError { ArrowError::External(message, source) => { write!(f, "External error{}: {}", message, &source) } - ArrowError::Schema(desc) => write!(f, "Schema error: {}", desc), ArrowError::Io(desc) => write!(f, "Io error: {}", desc), ArrowError::InvalidArgumentError(desc) => { write!(f, "Invalid argument error: {}", desc) } - ArrowError::Ffi(desc) => { - write!(f, "FFI error: {}", desc) - } - ArrowError::Ipc(desc) => { - write!(f, "IPC error: {}", desc) - } ArrowError::ExternalFormat(desc) => { write!(f, "External format error: {}", desc) } - ArrowError::KeyOverflowError => { - write!(f, "Dictionary key bigger than the key type") - } - ArrowError::ArithmeticError(desc) => { - write!(f, "Arithmetic error: {}", desc) + ArrowError::Overflow => { + write!(f, "Operation overflew the backing container.") } - ArrowError::Other(message) => { + ArrowError::OutOfSpec(message) => { write!(f, "{}", message) } } diff --git a/src/ffi/ffi.rs b/src/ffi/ffi.rs index f05e30de1c6..24b06ae8e14 100644 --- a/src/ffi/ffi.rs +++ b/src/ffi/ffi.rs @@ -184,7 +184,9 @@ unsafe fn create_buffer( index: usize, ) -> Result> { if array.buffers.is_null() { - return Err(ArrowError::Ffi("The array buffers are null".to_string())); + return Err(ArrowError::OutOfSpec( + "The array buffers are null".to_string(), + )); } let buffers = array.buffers as *mut *const u8; @@ -197,7 +199,9 @@ unsafe fn create_buffer( let offset = buffer_offset(array, data_type, index); let bytes = ptr .map(|ptr| Bytes::new(ptr, len, deallocation)) - .ok_or_else(|| ArrowError::Ffi(format!("The buffer at position {} is null", index)))?; + .ok_or_else(|| { + ArrowError::OutOfSpec(format!("The buffer at position {} is null", index)) + })?; Ok(Buffer::from_bytes(bytes).slice(offset, len - offset)) } @@ -215,7 +219,9 @@ unsafe fn create_bitmap( index: usize, ) -> Result { if array.buffers.is_null() { - return Err(ArrowError::Ffi("The array buffers are null".to_string())); + return Err(ArrowError::OutOfSpec( + "The array buffers are null".to_string(), + )); } let len = array.length as usize; let offset = array.offset as usize; @@ -229,7 +235,7 @@ unsafe fn create_bitmap( let bytes = ptr .map(|ptr| Bytes::new(ptr, bytes_len, deallocation)) .ok_or_else(|| { - ArrowError::Ffi(format!( + ArrowError::OutOfSpec(format!( "The buffer {} is a null pointer and cannot be interpreted as a bitmap", index )) diff --git a/src/ffi/schema.rs b/src/ffi/schema.rs index 45fd6d9a587..bf84963bcb7 100644 --- a/src/ffi/schema.rs +++ b/src/ffi/schema.rs @@ -243,7 +243,7 @@ fn to_integer_type(format: &str) -> Result { "l" => Int64, "L" => UInt64, _ => { - return Err(ArrowError::Ffi( + return Err(ArrowError::OutOfSpec( "Dictionary indices can only be integers".to_string(), )) } @@ -312,36 +312,40 @@ unsafe fn to_data_type(schema: &Ffi_ArrowSchema) -> Result { } else if parts.len() == 2 && parts[0] == "tsn" { DataType::Timestamp(TimeUnit::Nanosecond, Some(parts[1].to_string())) } else if parts.len() == 2 && parts[0] == "w" { - let size = parts[1] - .parse::() - .map_err(|_| ArrowError::Ffi("size is not a valid integer".to_string()))?; + let size = parts[1].parse::().map_err(|_| { + ArrowError::OutOfSpec("size is not a valid integer".to_string()) + })?; DataType::FixedSizeBinary(size) } else if parts.len() == 2 && parts[0] == "+w" { - let size = parts[1] - .parse::() - .map_err(|_| ArrowError::Ffi("size is not a valid integer".to_string()))?; + let size = parts[1].parse::().map_err(|_| { + ArrowError::OutOfSpec("size is not a valid integer".to_string()) + })?; let child = to_field(schema.child(0))?; DataType::FixedSizeList(Box::new(child), size) } else if parts.len() == 2 && parts[0] == "d" { let parts = parts[1].split(',').collect::>(); if parts.len() < 2 || parts.len() > 3 { - return Err(ArrowError::Ffi( + return Err(ArrowError::OutOfSpec( "Decimal must contain 2 or 3 comma-separated values".to_string(), )); }; if parts.len() == 3 { let bit_width = parts[0].parse::().map_err(|_| { - ArrowError::Ffi("Decimal bit width is not a valid integer".to_string()) + ArrowError::OutOfSpec( + "Decimal bit width is not a valid integer".to_string(), + ) })?; if bit_width != 128 { - return Err(ArrowError::Ffi("Decimal256 is not supported".to_string())); + return Err(ArrowError::OutOfSpec( + "Decimal256 is not supported".to_string(), + )); } } let precision = parts[0].parse::().map_err(|_| { - ArrowError::Ffi("Decimal precision is not a valid integer".to_string()) + ArrowError::OutOfSpec("Decimal precision is not a valid integer".to_string()) })?; let scale = parts[1].parse::().map_err(|_| { - ArrowError::Ffi("Decimal scale is not a valid integer".to_string()) + ArrowError::OutOfSpec("Decimal scale is not a valid integer".to_string()) })?; DataType::Decimal(precision, scale) } else if !parts.is_empty() && ((parts[0] == "+us") || (parts[0] == "+ud")) { @@ -351,7 +355,9 @@ unsafe fn to_data_type(schema: &Ffi_ArrowSchema) -> Result { .split(',') .map(|x| { x.parse::().map_err(|_| { - ArrowError::Ffi("Union type id is not a valid integer".to_string()) + ArrowError::OutOfSpec( + "Union type id is not a valid integer".to_string(), + ) }) }) .collect::>>()?; @@ -360,7 +366,7 @@ unsafe fn to_data_type(schema: &Ffi_ArrowSchema) -> Result { .collect::>>()?; DataType::Union(fields, Some(type_ids), mode) } else { - return Err(ArrowError::Ffi(format!( + return Err(ArrowError::OutOfSpec(format!( "The datatype \"{}\" is still not supported in Rust implementation", other ))); @@ -456,7 +462,7 @@ pub(super) fn get_field_child(field: &Field, index: usize) -> Result { (0, DataType::Map(field, _)) => Ok(field.as_ref().clone()), (index, DataType::Struct(fields)) => Ok(fields[index].clone()), (index, DataType::Union(fields, _, _)) => Ok(fields[index].clone()), - (child, data_type) => Err(ArrowError::Ffi(format!( + (child, data_type) => Err(ArrowError::OutOfSpec(format!( "Requested child {} to type {:?} that has no such child", child, data_type ))), diff --git a/src/io/avro/read/decompress.rs b/src/io/avro/read/decompress.rs index 2fdeb613d40..e6bce777035 100644 --- a/src/io/avro/read/decompress.rs +++ b/src/io/avro/read/decompress.rs @@ -30,12 +30,12 @@ fn decompress_block( #[cfg(feature = "io_avro_compression")] Some(Compression::Snappy) => { let len = snap::raw::decompress_len(&block[..block.len() - 4]) - .map_err(|_| ArrowError::Other("Failed to decompress snap".to_string()))?; + .map_err(|_| ArrowError::ExternalFormat("Failed to decompress snap".to_string()))?; decompress.clear(); decompress.resize(len, 0); snap::raw::Decoder::new() .decompress(&block[..block.len() - 4], decompress) - .map_err(|_| ArrowError::Other("Failed to decompress snap".to_string()))?; + .map_err(|_| ArrowError::ExternalFormat("Failed to decompress snap".to_string()))?; Ok(false) } #[cfg(not(feature = "io_avro_compression"))] diff --git a/src/io/avro/read/nested.rs b/src/io/avro/read/nested.rs index 30d6496f4a5..f60281c72b2 100644 --- a/src/io/avro/read/nested.rs +++ b/src/io/avro/read/nested.rs @@ -43,7 +43,7 @@ impl DynMutableListArray { #[inline] pub fn try_push_valid(&mut self) -> Result<()> { let size = self.values.len(); - let size = O::from_usize(size).ok_or(ArrowError::KeyOverflowError)?; // todo: make this error + let size = O::from_usize(size).ok_or(ArrowError::Overflow)?; assert!(size >= *self.offsets.last().unwrap()); self.offsets.push(size); diff --git a/src/io/flight/mod.rs b/src/io/flight/mod.rs index 75b270c7c68..8ff4bcda5c3 100644 --- a/src/io/flight/mod.rs +++ b/src/io/flight/mod.rs @@ -87,10 +87,14 @@ fn schema_from_bytes(bytes: &[u8]) -> Result { if let Some((schema, _)) = ipc.header_as_schema().map(fb_to_schema) { Ok(schema) } else { - Err(ArrowError::Ipc("Unable to get head as schema".to_string())) + Err(ArrowError::OutOfSpec( + "Unable to get head as schema".to_string(), + )) } } else { - Err(ArrowError::Ipc("Unable to get root as message".to_string())) + Err(ArrowError::OutOfSpec( + "Unable to get root as message".to_string(), + )) } } @@ -98,7 +102,7 @@ impl TryFrom<&FlightData> for Schema { type Error = ArrowError; fn try_from(data: &FlightData) -> Result { schema_from_bytes(&data.data_header[..]).map_err(|err| { - ArrowError::Ipc(format!( + ArrowError::OutOfSpec(format!( "Unable to convert flight data to Arrow schema: {}", err )) @@ -110,7 +114,7 @@ impl TryFrom<&SchemaResult> for Schema { type Error = ArrowError; fn try_from(data: &SchemaResult) -> Result { schema_from_bytes(&data.schema[..]).map_err(|err| { - ArrowError::Ipc(format!( + ArrowError::OutOfSpec(format!( "Unable to convert schema result to Arrow schema: {}", err )) @@ -126,15 +130,18 @@ pub fn deserialize_batch( dictionaries: &HashMap>, ) -> Result { // check that the data_header is a record batch message - let message = ipc::Message::root_as_message(&data.data_header[..]) - .map_err(|err| ArrowError::Ipc(format!("Unable to get root as message: {:?}", err)))?; + let message = ipc::Message::root_as_message(&data.data_header[..]).map_err(|err| { + ArrowError::OutOfSpec(format!("Unable to get root as message: {:?}", err)) + })?; let mut reader = std::io::Cursor::new(&data.data_body); message .header_as_record_batch() .ok_or_else(|| { - ArrowError::Ipc("Unable to convert flight data header to a record batch".to_string()) + ArrowError::OutOfSpec( + "Unable to convert flight data header to a record batch".to_string(), + ) }) .map(|batch| { read_record_batch( diff --git a/src/io/ipc/compression.rs b/src/io/ipc/compression.rs index b98d670b799..ab60ba19711 100644 --- a/src/io/ipc/compression.rs +++ b/src/io/ipc/compression.rs @@ -19,13 +19,13 @@ pub fn decompress_zstd(input_buf: &[u8], output_buf: &mut [u8]) -> Result<()> { #[cfg(not(feature = "io_ipc_compression"))] pub fn decompress_lz4(_input_buf: &[u8], _output_buf: &mut [u8]) -> Result<()> { use crate::error::ArrowError; - Err(ArrowError::Ipc("The crate was compiled without IPC compression. Use `io_ipc_compression` to read compressed IPC.".to_string())) + Err(ArrowError::OutOfSpec("The crate was compiled without IPC compression. Use `io_ipc_compression` to read compressed IPC.".to_string())) } #[cfg(not(feature = "io_ipc_compression"))] pub fn decompress_zstd(_input_buf: &[u8], _output_buf: &mut [u8]) -> Result<()> { use crate::error::ArrowError; - Err(ArrowError::Ipc("The crate was compiled without IPC compression. Use `io_ipc_compression` to read compressed IPC.".to_string())) + Err(ArrowError::OutOfSpec("The crate was compiled without IPC compression. Use `io_ipc_compression` to read compressed IPC.".to_string())) } #[cfg(feature = "io_ipc_compression")] @@ -48,13 +48,13 @@ pub fn compress_zstd(input_buf: &[u8], output_buf: &mut Vec) -> Result<()> { #[cfg(not(feature = "io_ipc_compression"))] pub fn compress_lz4(_input_buf: &[u8], _output_buf: &mut Vec) -> Result<()> { use crate::error::ArrowError; - Err(ArrowError::Ipc("The crate was compiled without IPC compression. Use `io_ipc_compression` to write compressed IPC.".to_string())) + Err(ArrowError::OutOfSpec("The crate was compiled without IPC compression. Use `io_ipc_compression` to write compressed IPC.".to_string())) } #[cfg(not(feature = "io_ipc_compression"))] pub fn compress_zstd(_input_buf: &[u8], _output_buf: &mut Vec) -> Result<()> { use crate::error::ArrowError; - Err(ArrowError::Ipc("The crate was compiled without IPC compression. Use `io_ipc_compression` to write compressed IPC.".to_string())) + Err(ArrowError::OutOfSpec("The crate was compiled without IPC compression. Use `io_ipc_compression` to write compressed IPC.".to_string())) } #[cfg(test)] diff --git a/src/io/ipc/read/array/dictionary.rs b/src/io/ipc/read/array/dictionary.rs index 1d61b59b6f4..589070e1c0b 100644 --- a/src/io/ipc/read/array/dictionary.rs +++ b/src/io/ipc/read/array/dictionary.rs @@ -31,7 +31,7 @@ where .get(&id) .ok_or_else(|| { let valid_ids = dictionaries.keys().collect::>(); - ArrowError::Ipc(format!( + ArrowError::OutOfSpec(format!( "Dictionary id {} not found. Valid ids: {:?}", id, valid_ids )) diff --git a/src/io/ipc/read/common.rs b/src/io/ipc/read/common.rs index 055b31b3705..a7de2f599be 100644 --- a/src/io/ipc/read/common.rs +++ b/src/io/ipc/read/common.rs @@ -103,12 +103,12 @@ pub fn read_record_batch( reader: &mut R, block_offset: u64, ) -> Result { - let buffers = batch - .buffers() - .ok_or_else(|| ArrowError::Ipc("Unable to get buffers from IPC RecordBatch".to_string()))?; + let buffers = batch.buffers().ok_or_else(|| { + ArrowError::OutOfSpec("Unable to get buffers from IPC RecordBatch".to_string()) + })?; let mut buffers: VecDeque<&ipc::Schema::Buffer> = buffers.iter().collect(); let field_nodes = batch.nodes().ok_or_else(|| { - ArrowError::Ipc("Unable to get field nodes from IPC RecordBatch".to_string()) + ArrowError::OutOfSpec("Unable to get field nodes from IPC RecordBatch".to_string()) })?; let mut field_nodes = field_nodes.iter().collect::>(); @@ -205,7 +205,7 @@ fn first_dict_field(id: usize, fields: &[Field]) -> Result<&Field> { return Ok(field); } } - Err(ArrowError::Schema(format!( + Err(ArrowError::OutOfSpec(format!( "dictionary id {} not found in schema", id ))) diff --git a/src/io/ipc/read/read_basic.rs b/src/io/ipc/read/read_basic.rs index be9562ff8b4..356938ec4f8 100644 --- a/src/io/ipc/read/read_basic.rs +++ b/src/io/ipc/read/read_basic.rs @@ -52,7 +52,7 @@ fn read_uncompressed_buffer( ) -> Result> { let bytes = length * std::mem::size_of::(); if bytes > buffer_length { - return Err(ArrowError::Ipc( + return Err(ArrowError::OutOfSpec( format!("The slots of the array times the physical size must \ be smaller or equal to the length of the IPC buffer. \ However, this array reports {} slots, which, for physical type \"{}\", corresponds to {} bytes, \ diff --git a/src/io/ipc/read/reader.rs b/src/io/ipc/read/reader.rs index 212006f155a..be8668de929 100644 --- a/src/io/ipc/read/reader.rs +++ b/src/io/ipc/read/reader.rs @@ -93,14 +93,14 @@ pub fn read_file_metadata(reader: &mut R) -> Result(reader: &mut R) -> Result(reader: &mut R) -> Result { @@ -167,7 +168,7 @@ pub fn read_file_metadata(reader: &mut R) -> Result { - return Err(ArrowError::Ipc(format!( + return Err(ArrowError::OutOfSpec(format!( "Expecting DictionaryBatch in dictionary blocks, found {:?}.", t ))); @@ -187,15 +188,15 @@ fn get_serialized_batch<'a>( message: &'a ipc::Message::Message, ) -> Result> { match message.header_type() { - ipc::Message::MessageHeader::Schema => Err(ArrowError::Ipc( + ipc::Message::MessageHeader::Schema => Err(ArrowError::OutOfSpec( "Not expecting a schema when messages are read".to_string(), )), ipc::Message::MessageHeader::RecordBatch => { message.header_as_record_batch().ok_or_else(|| { - ArrowError::Ipc("Unable to read IPC message as record batch".to_string()) + ArrowError::OutOfSpec("Unable to read IPC message as record batch".to_string()) }) } - t => Err(ArrowError::Ipc(format!( + t => Err(ArrowError::OutOfSpec(format!( "Reading types other than record batches not yet supported, unable to read {:?}", t ))), @@ -227,12 +228,12 @@ pub fn read_batch( reader.read_exact(block_data)?; let message = ipc::Message::root_as_message(&block_data[..]) - .map_err(|err| ArrowError::Ipc(format!("Unable to get root as footer: {:?}", err)))?; + .map_err(|err| ArrowError::OutOfSpec(format!("Unable to get root as footer: {:?}", err)))?; // some old test data's footer metadata is not set, so we account for that if metadata.version != ipc::Schema::MetadataVersion::V1 && message.version() != metadata.version { - return Err(ArrowError::Ipc( + return Err(ArrowError::OutOfSpec( "Could not read IPC message as metadata versions mismatch".to_string(), )); } diff --git a/src/io/ipc/read/stream.rs b/src/io/ipc/read/stream.rs index 7e96e78c229..f9e5e34c2b4 100644 --- a/src/io/ipc/read/stream.rs +++ b/src/io/ipc/read/stream.rs @@ -59,13 +59,14 @@ pub fn read_stream_metadata(reader: &mut R) -> Result { let mut meta_buffer = vec![0; meta_len as usize]; reader.read_exact(&mut meta_buffer)?; - let message = ipc::Message::root_as_message(meta_buffer.as_slice()) - .map_err(|err| ArrowError::Ipc(format!("Unable to get root as message: {:?}", err)))?; + let message = ipc::Message::root_as_message(meta_buffer.as_slice()).map_err(|err| { + ArrowError::OutOfSpec(format!("Unable to get root as message: {:?}", err)) + })?; let version = message.version(); // message header is a Schema, so read it let ipc_schema: ipc::Schema::Schema = message .header_as_schema() - .ok_or_else(|| ArrowError::Ipc("Unable to read IPC message as schema".to_string()))?; + .ok_or_else(|| ArrowError::OutOfSpec("Unable to read IPC message as schema".to_string()))?; let (schema, is_little_endian) = convert::fb_to_schema(ipc_schema); let schema = Arc::new(schema); @@ -151,16 +152,17 @@ fn read_next( message_buffer.resize(meta_length, 0); reader.read_exact(message_buffer)?; - let message = ipc::Message::root_as_message(message_buffer) - .map_err(|err| ArrowError::Ipc(format!("Unable to get root as message: {:?}", err)))?; + let message = ipc::Message::root_as_message(message_buffer).map_err(|err| { + ArrowError::OutOfSpec(format!("Unable to get root as message: {:?}", err)) + })?; match message.header_type() { - ipc::Message::MessageHeader::Schema => Err(ArrowError::Ipc( + ipc::Message::MessageHeader::Schema => Err(ArrowError::OutOfSpec( "Not expecting a schema when messages are read".to_string(), )), ipc::Message::MessageHeader::RecordBatch => { let batch = message.header_as_record_batch().ok_or_else(|| { - ArrowError::Ipc("Unable to read IPC message as record batch".to_string()) + ArrowError::OutOfSpec("Unable to read IPC message as record batch".to_string()) })?; // read the block that makes up the record batch into a buffer data_buffer.clear(); @@ -183,7 +185,7 @@ fn read_next( } ipc::Message::MessageHeader::DictionaryBatch => { let batch = message.header_as_dictionary_batch().ok_or_else(|| { - ArrowError::Ipc("Unable to read IPC message as dictionary batch".to_string()) + ArrowError::OutOfSpec("Unable to read IPC message as dictionary batch".to_string()) })?; // read the block that makes up the dictionary batch into a buffer let mut buf = vec![0; message.bodyLength() as usize]; @@ -204,7 +206,7 @@ fn read_next( read_next(reader, metadata, dictionaries, message_buffer, data_buffer) } ipc::Message::MessageHeader::NONE => Ok(Some(StreamState::Waiting)), - t => Err(ArrowError::Ipc(format!( + t => Err(ArrowError::OutOfSpec(format!( "Reading types other than record batches not yet supported, unable to read {:?} ", t ))), diff --git a/src/io/ipc/write/common_async.rs b/src/io/ipc/write/common_async.rs index 5880fce32ea..d9e28a0e386 100644 --- a/src/io/ipc/write/common_async.rs +++ b/src/io/ipc/write/common_async.rs @@ -1,7 +1,7 @@ use futures::AsyncWrite; use futures::AsyncWriteExt; -use crate::error::{ArrowError, Result}; +use crate::error::Result; use super::super::CONTINUATION_MARKER; use super::common::pad_to_8; @@ -13,9 +13,7 @@ pub async fn write_message( encoded: EncodedData, ) -> Result<(usize, usize)> { let arrow_data_len = encoded.arrow_data.len(); - if arrow_data_len % 8 != 0 { - return Err(ArrowError::Ipc("Arrow data not aligned".to_string())); - } + assert_eq!(arrow_data_len % 8, 0, "Arrow data not aligned"); let a = 8 - 1; let buffer = encoded.ipc_message; diff --git a/src/io/ipc/write/common_sync.rs b/src/io/ipc/write/common_sync.rs index 9e80d446eb1..60e47a65142 100644 --- a/src/io/ipc/write/common_sync.rs +++ b/src/io/ipc/write/common_sync.rs @@ -1,6 +1,6 @@ use std::io::Write; -use crate::error::{ArrowError, Result}; +use crate::error::Result; use super::super::CONTINUATION_MARKER; use super::common::pad_to_8; @@ -9,9 +9,7 @@ use super::common::EncodedData; /// Write a message's IPC data and buffers, returning metadata and buffer data lengths written pub fn write_message(writer: &mut W, encoded: EncodedData) -> Result<(usize, usize)> { let arrow_data_len = encoded.arrow_data.len(); - if arrow_data_len % 8 != 0 { - return Err(ArrowError::Ipc("Arrow data not aligned".to_string())); - } + assert_eq!(arrow_data_len % 8, 0, "Arrow data not aligned"); let a = 8 - 1; let buffer = encoded.ipc_message; diff --git a/src/io/ipc/write/stream.rs b/src/io/ipc/write/stream.rs index c6d4f9bd950..bdc09a44951 100644 --- a/src/io/ipc/write/stream.rs +++ b/src/io/ipc/write/stream.rs @@ -41,7 +41,7 @@ pub struct StreamWriter { writer: W, /// IPC write options write_options: WriteOptions, - /// Whether the writer footer has been written, and the writer is finished + /// Whether the stream has been finished finished: bool, /// Keeps track of dictionaries that have been written dictionary_tracker: DictionaryTracker, @@ -67,9 +67,10 @@ impl StreamWriter { /// Write a record batch to the stream pub fn write(&mut self, batch: &RecordBatch) -> Result<()> { if self.finished { - return Err(ArrowError::Ipc( - "Cannot write record batch to stream writer as it is closed".to_string(), - )); + return Err(ArrowError::Io(std::io::Error::new( + std::io::ErrorKind::UnexpectedEof, + "Cannot write to a finished stream".to_string(), + ))); } let (encoded_dictionaries, encoded_message) = diff --git a/src/io/ipc/write/stream_async.rs b/src/io/ipc/write/stream_async.rs index 34395724e2b..6af81fb6fce 100644 --- a/src/io/ipc/write/stream_async.rs +++ b/src/io/ipc/write/stream_async.rs @@ -16,7 +16,7 @@ pub struct StreamWriter { writer: W, /// IPC write options write_options: WriteOptions, - /// Whether the writer footer has been written, and the writer is finished + /// Whether the stream has been finished finished: bool, /// Keeps track of dictionaries that have been written dictionary_tracker: DictionaryTracker, @@ -46,9 +46,10 @@ impl StreamWriter { /// Writes a [`RecordBatch`] to the stream pub async fn write(&mut self, batch: &RecordBatch) -> Result<()> { if self.finished { - return Err(ArrowError::Ipc( - "Cannot write record batch to stream writer as it is closed".to_string(), - )); + return Err(ArrowError::Io(std::io::Error::new( + std::io::ErrorKind::UnexpectedEof, + "Cannot write to a finished stream".to_string(), + ))); } // todo: move this out of the `async` since this is blocking. diff --git a/src/io/ipc/write/writer.rs b/src/io/ipc/write/writer.rs index 10d58d063ca..6a66fb3a1a8 100644 --- a/src/io/ipc/write/writer.rs +++ b/src/io/ipc/write/writer.rs @@ -89,9 +89,10 @@ impl FileWriter { /// Write a record batch to the file pub fn write(&mut self, batch: &RecordBatch) -> Result<()> { if self.finished { - return Err(ArrowError::Ipc( - "Cannot write record batch to file writer as it is closed".to_string(), - )); + return Err(ArrowError::Io(std::io::Error::new( + std::io::ErrorKind::UnexpectedEof, + "Cannot write to a finished file".to_string(), + ))); } let (encoded_dictionaries, encoded_message) = diff --git a/src/io/json/read/infer_schema.rs b/src/io/json/read/infer_schema.rs index c54e05195dd..56a851bad18 100644 --- a/src/io/json/read/infer_schema.rs +++ b/src/io/json/read/infer_schema.rs @@ -236,7 +236,7 @@ where } } value => { - return Err(ArrowError::Other(format!( + return Err(ArrowError::ExternalFormat(format!( "Expected JSON record to be an object, found {:?}", value ))); diff --git a/src/io/json/read/reader.rs b/src/io/json/read/reader.rs index 3f1c55f32b8..fe5fb252a4f 100644 --- a/src/io/json/read/reader.rs +++ b/src/io/json/read/reader.rs @@ -84,7 +84,7 @@ impl Decoder { let v = value?; match v { Value::Object(_) => Ok(v), - _ => Err(ArrowError::Other(format!( + _ => Err(ArrowError::ExternalFormat(format!( "Row needs to be of type object, got: {:?}", v ))), diff --git a/src/io/json_integration/read.rs b/src/io/json_integration/read.rs index f6aabfff9dc..10711fdd3af 100644 --- a/src/io/json_integration/read.rs +++ b/src/io/json_integration/read.rs @@ -255,9 +255,9 @@ fn to_dictionary( dictionaries: &HashMap, ) -> Result> { // find dictionary - let dictionary = dictionaries - .get(&dict_id) - .ok_or_else(|| ArrowError::Ipc(format!("Unable to find any dictionary id {}", dict_id)))?; + let dictionary = dictionaries.get(&dict_id).ok_or_else(|| { + ArrowError::OutOfSpec(format!("Unable to find any dictionary id {}", dict_id)) + })?; let keys = to_primitive(json_col, K::DATA_TYPE); diff --git a/src/io/json_integration/schema.rs b/src/io/json_integration/schema.rs index dfffd55bed7..8d55a76c676 100644 --- a/src/io/json_integration/schema.rs +++ b/src/io/json_integration/schema.rs @@ -166,7 +166,7 @@ fn to_time_unit(item: Option<&Value>) -> Result { Some(p) if p == "MILLISECOND" => Ok(TimeUnit::Millisecond), Some(p) if p == "MICROSECOND" => Ok(TimeUnit::Microsecond), Some(p) if p == "NANOSECOND" => Ok(TimeUnit::Nanosecond), - _ => Err(ArrowError::Schema( + _ => Err(ArrowError::OutOfSpec( "time unit missing or invalid".to_string(), )), } @@ -181,13 +181,13 @@ fn to_int(item: &Value) -> Result { Some(32) => IntegerType::Int32, Some(64) => IntegerType::Int64, _ => { - return Err(ArrowError::Schema( + return Err(ArrowError::OutOfSpec( "int bitWidth missing or invalid".to_string(), )) } }, _ => { - return Err(ArrowError::Schema( + return Err(ArrowError::OutOfSpec( "int bitWidth missing or invalid".to_string(), )) } @@ -199,19 +199,19 @@ fn to_int(item: &Value) -> Result { Some(32) => IntegerType::UInt32, Some(64) => IntegerType::UInt64, _ => { - return Err(ArrowError::Schema( + return Err(ArrowError::OutOfSpec( "int bitWidth missing or invalid".to_string(), )) } }, _ => { - return Err(ArrowError::Schema( + return Err(ArrowError::OutOfSpec( "int bitWidth missing or invalid".to_string(), )) } }, _ => { - return Err(ArrowError::Schema( + return Err(ArrowError::OutOfSpec( "int signed missing or invalid".to_string(), )) } @@ -227,7 +227,9 @@ fn children(children: Option<&Value>) -> Result> { .map(Field::try_from) .collect::>>() } else { - Err(ArrowError::Schema("children must be an array".to_string())) + Err(ArrowError::OutOfSpec( + "children must be an array".to_string(), + )) } }) .unwrap_or_else(|| Ok(vec![])) @@ -241,7 +243,7 @@ fn read_metadata(metadata: &Value) -> Result> { match value.as_object() { Some(map) => { if map.len() != 2 { - return Err(ArrowError::Schema( + return Err(ArrowError::OutOfSpec( "Field 'metadata' must have exact two entries for each key-value map".to_string(), )); } @@ -249,20 +251,20 @@ fn read_metadata(metadata: &Value) -> Result> { if let (Some(k_str), Some(v_str)) = (k.as_str(), v.as_str()) { res.insert(k_str.to_string().clone(), v_str.to_string().clone()); } else { - return Err(ArrowError::Schema( + return Err(ArrowError::OutOfSpec( "Field 'metadata' must have map value of string type" .to_string(), )); } } else { - return Err(ArrowError::Schema( + return Err(ArrowError::OutOfSpec( "Field 'metadata' lacks map keys named \"key\" or \"value\"" .to_string(), )); } } _ => { - return Err(ArrowError::Schema( + return Err(ArrowError::OutOfSpec( "Field 'metadata' contains non-object key-value pair".to_string(), )); } @@ -276,7 +278,7 @@ fn read_metadata(metadata: &Value) -> Result> { if let Some(str_value) = v.as_str() { res.insert(k.clone(), str_value.to_string().clone()); } else { - return Err(ArrowError::Schema(format!( + return Err(ArrowError::OutOfSpec(format!( "Field 'metadata' contains non-string value for key {}", k ))); @@ -284,7 +286,7 @@ fn read_metadata(metadata: &Value) -> Result> { } Ok(res) } - _ => Err(ArrowError::Schema( + _ => Err(ArrowError::OutOfSpec( "Invalid json value type for field".to_string(), )), } @@ -293,12 +295,12 @@ fn read_metadata(metadata: &Value) -> Result> { fn to_data_type(item: &Value, mut children: Vec) -> Result { let type_ = item .get("name") - .ok_or_else(|| ArrowError::Schema("type missing".to_string()))?; + .ok_or_else(|| ArrowError::OutOfSpec("type missing".to_string()))?; let type_ = if let Value::String(name) = type_ { name.as_str() } else { - return Err(ArrowError::Schema("type is not a string".to_string())); + return Err(ArrowError::OutOfSpec("type is not a string".to_string())); }; use DataType::*; @@ -312,7 +314,7 @@ fn to_data_type(item: &Value, mut children: Vec) -> Result { if let Some(Value::Number(size)) = item.get("byteWidth") { DataType::FixedSizeBinary(size.as_i64().unwrap() as usize) } else { - return Err(ArrowError::Schema( + return Err(ArrowError::OutOfSpec( "Expecting a byteWidth for fixedsizebinary".to_string(), )); } @@ -323,13 +325,13 @@ fn to_data_type(item: &Value, mut children: Vec) -> Result { // return a list with any type as its child isn't defined in the map let precision = match item.get("precision") { Some(p) => Ok(p.as_u64().unwrap() as usize), - None => Err(ArrowError::Schema( + None => Err(ArrowError::OutOfSpec( "Expecting a precision for decimal".to_string(), )), }; let scale = match item.get("scale") { Some(s) => Ok(s.as_u64().unwrap() as usize), - _ => Err(ArrowError::Schema( + _ => Err(ArrowError::OutOfSpec( "Expecting a scale for decimal".to_string(), )), }; @@ -341,7 +343,7 @@ fn to_data_type(item: &Value, mut children: Vec) -> Result { Some(p) if p == "SINGLE" => DataType::Float32, Some(p) if p == "DOUBLE" => DataType::Float64, _ => { - return Err(ArrowError::Schema( + return Err(ArrowError::OutOfSpec( "floatingpoint precision missing or invalid".to_string(), )) } @@ -351,7 +353,9 @@ fn to_data_type(item: &Value, mut children: Vec) -> Result { let tz = match item.get("timezone") { None => Ok(None), Some(Value::String(tz)) => Ok(Some(tz.clone())), - _ => Err(ArrowError::Schema("timezone must be a string".to_string())), + _ => Err(ArrowError::OutOfSpec( + "timezone must be a string".to_string(), + )), }?; DataType::Timestamp(unit, tz) } @@ -359,7 +363,7 @@ fn to_data_type(item: &Value, mut children: Vec) -> Result { Some(p) if p == "DAY" => DataType::Date32, Some(p) if p == "MILLISECOND" => DataType::Date64, _ => { - return Err(ArrowError::Schema( + return Err(ArrowError::OutOfSpec( "date unit missing or invalid".to_string(), )) } @@ -370,7 +374,7 @@ fn to_data_type(item: &Value, mut children: Vec) -> Result { Some(p) if p == 32 => DataType::Time32(unit), Some(p) if p == 64 => DataType::Time64(unit), _ => { - return Err(ArrowError::Schema( + return Err(ArrowError::OutOfSpec( "time bitWidth missing or invalid".to_string(), )) } @@ -385,7 +389,7 @@ fn to_data_type(item: &Value, mut children: Vec) -> Result { Some(p) if p == "YEAR_MONTH" => DataType::Interval(IntervalUnit::YearMonth), Some(p) if p == "MONTH_DAY_NANO" => DataType::Interval(IntervalUnit::MonthDayNano), _ => { - return Err(ArrowError::Schema( + return Err(ArrowError::OutOfSpec( "interval unit missing or invalid".to_string(), )) } @@ -400,7 +404,7 @@ fn to_data_type(item: &Value, mut children: Vec) -> Result { size.as_i64().unwrap() as usize, ) } else { - return Err(ArrowError::Schema( + return Err(ArrowError::OutOfSpec( "Expecting a listSize for fixedsizelist".to_string(), )); } @@ -410,12 +414,12 @@ fn to_data_type(item: &Value, mut children: Vec) -> Result { let mode = if let Some(Value::String(mode)) = item.get("mode") { UnionMode::sparse(mode == "SPARSE") } else { - return Err(ArrowError::Schema("union requires mode".to_string())); + return Err(ArrowError::OutOfSpec("union requires mode".to_string())); }; let ids = if let Some(Value::Array(ids)) = item.get("typeIds") { Some(ids.iter().map(|x| x.as_i64().unwrap() as i32).collect()) } else { - return Err(ArrowError::Schema("union requires ids".to_string())); + return Err(ArrowError::OutOfSpec("union requires ids".to_string())); }; DataType::Union(children, ids, mode) } @@ -423,7 +427,7 @@ fn to_data_type(item: &Value, mut children: Vec) -> Result { let sorted_keys = if let Some(Value::Bool(sorted_keys)) = item.get("keysSorted") { *sorted_keys } else { - return Err(ArrowError::Schema("sorted keys not defined".to_string())); + return Err(ArrowError::OutOfSpec("sorted keys not defined".to_string())); }; DataType::Map(Box::new(children.pop().unwrap()), sorted_keys) } @@ -445,7 +449,7 @@ impl TryFrom<&Value> for Field { let name = match map.get("name") { Some(&Value::String(ref name)) => name.to_string(), _ => { - return Err(ArrowError::Schema( + return Err(ArrowError::OutOfSpec( "Field missing 'name' attribute".to_string(), )); } @@ -453,7 +457,7 @@ impl TryFrom<&Value> for Field { let nullable = match map.get("nullable") { Some(&Value::Bool(b)) => b, _ => { - return Err(ArrowError::Schema( + return Err(ArrowError::OutOfSpec( "Field missing 'nullable' attribute".to_string(), )); } @@ -471,7 +475,7 @@ impl TryFrom<&Value> for Field { let type_ = map .get("type") - .ok_or_else(|| ArrowError::Schema("type missing".to_string()))?; + .ok_or_else(|| ArrowError::OutOfSpec("type missing".to_string()))?; let data_type = to_data_type(type_, children)?; @@ -485,7 +489,7 @@ impl TryFrom<&Value> for Field { let index_type = match dictionary.get("indexType") { Some(t) => to_int(t)?, _ => { - return Err(ArrowError::Schema( + return Err(ArrowError::OutOfSpec( "Field missing 'indexType' attribute".to_string(), )); } @@ -499,7 +503,7 @@ impl TryFrom<&Value> for Field { let dict_id = match dictionary.get("id") { Some(Value::Number(n)) => n.as_i64().unwrap(), _ => { - return Err(ArrowError::Schema( + return Err(ArrowError::OutOfSpec( "Field missing 'id' attribute".to_string(), )); } @@ -507,7 +511,7 @@ impl TryFrom<&Value> for Field { let dict_is_ordered = match dictionary.get("isOrdered") { Some(&Value::Bool(n)) => n, _ => { - return Err(ArrowError::Schema( + return Err(ArrowError::OutOfSpec( "Field missing 'isOrdered' attribute".to_string(), )); } @@ -520,7 +524,7 @@ impl TryFrom<&Value> for Field { f.set_metadata(metadata); Ok(f) } - _ => Err(ArrowError::Schema( + _ => Err(ArrowError::OutOfSpec( "Invalid json value type for field".to_string(), )), } @@ -560,13 +564,13 @@ fn from_metadata(json: &Value) -> Result> { if let Value::String(v) = v { Ok((k.to_string(), v.to_string())) } else { - Err(ArrowError::Schema( + Err(ArrowError::OutOfSpec( "metadata `value` field must be a string".to_string(), )) } }) .collect::>(), - _ => Err(ArrowError::Schema( + _ => Err(ArrowError::OutOfSpec( "`metadata` field must be an object".to_string(), )), } @@ -581,7 +585,7 @@ impl TryFrom<&Value> for Schema { let fields = if let Some(Value::Array(fields)) = schema.get("fields") { fields.iter().map(Field::try_from).collect::>()? } else { - return Err(ArrowError::Schema( + return Err(ArrowError::OutOfSpec( "Schema fields should be an array".to_string(), )); }; @@ -594,7 +598,7 @@ impl TryFrom<&Value> for Schema { Ok(Self { fields, metadata }) } - _ => Err(ArrowError::Schema( + _ => Err(ArrowError::OutOfSpec( "Invalid json value type for schema".to_string(), )), } diff --git a/src/io/parquet/read/schema/metadata.rs b/src/io/parquet/read/schema/metadata.rs index 188144bb17a..02ebbae9925 100644 --- a/src/io/parquet/read/schema/metadata.rs +++ b/src/io/parquet/read/schema/metadata.rs @@ -38,10 +38,12 @@ fn get_arrow_schema_from_metadata(encoded_meta: &str) -> Result { .header_as_schema() .map(fb_to_schema) .map(|x| x.0) - .ok_or_else(|| ArrowError::Ipc("the message is not Arrow Schema".to_string())), + .ok_or_else(|| { + ArrowError::OutOfSpec("the message is not Arrow Schema".to_string()) + }), Err(err) => { // The flatbuffers implementation returns an error on verification error. - Err(ArrowError::Ipc(format!( + Err(ArrowError::OutOfSpec(format!( "Unable to get root as message stored in {}: {:?}", ARROW_SCHEMA_META_KEY, err ))) diff --git a/src/io/parquet/read/statistics/fixlen.rs b/src/io/parquet/read/statistics/fixlen.rs index 8ef9a7de197..6bd3e960021 100644 --- a/src/io/parquet/read/statistics/fixlen.rs +++ b/src/io/parquet/read/statistics/fixlen.rs @@ -53,8 +53,8 @@ impl TryFrom<(&ParquetFixedLenStatistics, DataType)> for PrimitiveStatistics unreachable!(), }; if byte_lens > 16 { - Err(ArrowError::Other(format!( - "Can't deserialize i128 from Fixed Len Byte array with lengtg {:?}", + Err(ArrowError::ExternalFormat(format!( + "Can't deserialize i128 from Fixed Len Byte array with length {:?}", byte_lens ))) } else { diff --git a/tests/it/io/json/read.rs b/tests/it/io/json/read.rs index 4a7c0b0d3d3..0ba1e8528e3 100644 --- a/tests/it/io/json/read.rs +++ b/tests/it/io/json/read.rs @@ -211,7 +211,7 @@ fn row_type_validation() { let re = builder.build(Cursor::new(content)); assert_eq!( re.err().unwrap().to_string(), - r#"Expected JSON record to be an object, found Array([Number(1), String("hello")])"#, + r#"External format error: Expected JSON record to be an object, found Array([Number(1), String("hello")])"#, ); }