From c5b77d233308cb222c6f917259faa8ec1f85de4e Mon Sep 17 00:00:00 2001 From: IGI-111 Date: Tue, 18 Jul 2023 06:22:13 +0200 Subject: [PATCH] wip --- packages/fuels-core/src/codec/abi_decoder.rs | 8 ++++---- packages/fuels-core/src/codec/abi_encoder.rs | 8 ++++---- packages/fuels-core/src/traits/tokenizable.rs | 10 +++++----- packages/fuels-core/src/types.rs | 7 ++----- packages/fuels-programs/src/contract.rs | 2 +- 5 files changed, 16 insertions(+), 19 deletions(-) diff --git a/packages/fuels-core/src/codec/abi_decoder.rs b/packages/fuels-core/src/codec/abi_decoder.rs index 550f48b4fe..75a2764486 100644 --- a/packages/fuels-core/src/codec/abi_decoder.rs +++ b/packages/fuels-core/src/codec/abi_decoder.rs @@ -179,7 +179,7 @@ impl ABIDecoder { let decoded = str::from_utf8(&encoded_str[..length])?; let result = DecodeResult { - token: Token::String(StringToken::new(decoded.into(), Some(length))), + token: Token::StringArray(StringToken::new(decoded.into(), Some(length))), bytes_read: encoded_len, }; Ok(result) @@ -457,8 +457,8 @@ mod tests { let decoded = ABIDecoder::decode(&types, &data)?; let expected = vec![ - Token::String(StringToken::new("This is a full sentence".into(), Some(23))), - Token::String(StringToken::new("Hello".into(), Some(5))), + Token::StringArray(StringToken::new("This is a full sentence".into(), Some(23))), + Token::StringArray(StringToken::new("Hello".into(), Some(5))), ]; assert_eq!(decoded, expected); @@ -733,7 +733,7 @@ mod tests { let ss = Token::StringSlice(StringToken::new("This is a full sentence".into(), None)); - let s = Token::String(StringToken::new("foo".into(), Some(3))); + let s = Token::StringArray(StringToken::new("foo".into(), Some(3))); let expected: Vec = vec![foo, u8_arr, b256, s, ss]; diff --git a/packages/fuels-core/src/codec/abi_encoder.rs b/packages/fuels-core/src/codec/abi_encoder.rs index 3290c200b8..0d7a56069b 100644 --- a/packages/fuels-core/src/codec/abi_encoder.rs +++ b/packages/fuels-core/src/codec/abi_encoder.rs @@ -43,7 +43,7 @@ impl ABIEncoder { Token::Array(arg_array) => Self::encode_array(arg_array)?, Token::Vector(data) => Self::encode_vector(data)?, Token::StringSlice(arg_string) => Self::encode_string_slice(arg_string)?, - Token::String(arg_string) => vec![Self::encode_string_array(arg_string)?], + Token::StringArray(arg_string) => vec![Self::encode_string_array(arg_string)?], Token::Struct(arg_struct) => Self::encode_struct(arg_struct)?, Token::Enum(arg_enum) => Self::encode_enum(arg_enum)?, Token::Tuple(arg_tuple) => Self::encode_tuple(arg_tuple)?, @@ -499,7 +499,7 @@ mod tests { let fn_signature = "takes_string(str[23])"; - let args: Vec = vec![Token::String(StringToken::new( + let args: Vec = vec![Token::StringArray(StringToken::new( "This is a full sentence".into(), Some(23), ))]; @@ -695,7 +695,7 @@ mod tests { */ let types = vec![ParamType::Bool, ParamType::String(10)]; let deeper_enum_variants = EnumVariants::new(types)?; - let deeper_enum_token = Token::String(StringToken::new("0123456789".into(), Some(10))); + let deeper_enum_token = Token::StringArray(StringToken::new("0123456789".into(), Some(10))); let str_enc = vec![ b'0', b'1', b'2', b'3', b'4', b'5', b'6', b'7', b'8', b'9', 0x0, 0x0, 0x0, 0x0, 0x0, @@ -868,7 +868,7 @@ mod tests { let b256 = Token::B256(hasher.finalize().into()); - let s = Token::String(StringToken::new("This is a full sentence".into(), Some(23))); + let s = Token::StringArray(StringToken::new("This is a full sentence".into(), Some(23))); let args: Vec = vec![foo, u8_arr, b256, s]; diff --git a/packages/fuels-core/src/traits/tokenizable.rs b/packages/fuels-core/src/traits/tokenizable.rs index 3a7c809b77..07fb33cd83 100644 --- a/packages/fuels-core/src/traits/tokenizable.rs +++ b/packages/fuels-core/src/traits/tokenizable.rs @@ -474,7 +474,7 @@ impl Tokenizable for SizedAsciiString { Self: Sized, { match token { - Token::String(contents) => { + Token::StringArray(contents) => { let expected_len = contents.get_encodable_str()?.len() ; if expected_len!= LEN { return Err(error!(InvalidData,"SizedAsciiString<{LEN}>::from_token got a Token::StringArray whose expected length({}) is != {LEN}", expected_len)) @@ -488,7 +488,7 @@ impl Tokenizable for SizedAsciiString { } fn into_token(self) -> Token { - Token::String(StringToken::new(self.into(), Some(LEN))) + Token::StringArray(StringToken::new(self.into(), Some(LEN))) } } @@ -508,7 +508,7 @@ impl Tokenizable for AsciiString { } fn into_token(self) -> Token { - Token::String(StringToken::new(self.into(), None)) + Token::StringArray(StringToken::new(self.into(), None)) } } @@ -567,7 +567,7 @@ mod tests { let token = sut.into_token(); match token { - Token::String(string_token) => { + Token::StringArray(string_token) => { let contents = string_token.get_encodable_str()?; assert_eq!(contents, "abc"); } @@ -581,7 +581,7 @@ mod tests { #[test] fn sized_ascii_string_is_detokenized_correctly() -> Result<()> { - let token = Token::String(StringToken::new("abc".to_string(), Some(3))); + let token = Token::StringArray(StringToken::new("abc".to_string(), Some(3))); let sized_ascii_string = SizedAsciiString::<3>::from_token(token).expect("Should have succeeded"); diff --git a/packages/fuels-core/src/types.rs b/packages/fuels-core/src/types.rs index 19410f514a..32e0a5d4d8 100644 --- a/packages/fuels-core/src/types.rs +++ b/packages/fuels-core/src/types.rs @@ -4,7 +4,6 @@ pub use fuel_tx::{Address, AssetId, ContractId, TxPointer, UtxoId}; pub use fuel_types::Nonce; use fuel_types::bytes::padded_len; -use strum_macros::EnumString; pub use crate::types::{core::*, wrappers::*}; use crate::types::{ @@ -72,8 +71,7 @@ impl TryFrom for String { } } -#[derive(Debug, Clone, PartialEq, EnumString)] -#[strum(ascii_case_insensitive)] +#[derive(Debug, Clone, PartialEq)] pub enum Token { // Used for unit type variants in Enum. An "empty" enum is not represented as Enum, // because this way we can have both unit and non-unit type variants. @@ -89,9 +87,8 @@ pub enum Token { Array(Vec), Vector(Vec), StringSlice(StringToken), - String(StringToken), + StringArray(StringToken), Struct(Vec), - #[strum(disabled)] Enum(Box), Tuple(Vec), RawSlice(Vec), diff --git a/packages/fuels-programs/src/contract.rs b/packages/fuels-programs/src/contract.rs index 4660b1cfed..782f6b6f50 100644 --- a/packages/fuels-programs/src/contract.rs +++ b/packages/fuels-programs/src/contract.rs @@ -635,7 +635,7 @@ fn should_compute_custom_input_offset(args: &[Token]) -> bool { | Token::U128(_) | Token::U256(_) | Token::Vector(_) - | Token::String(_) + | Token::StringArray(_) | Token::StringSlice(_) ) })