diff --git a/Cargo.lock b/Cargo.lock index d73b2d0a5ad..4e53ade5d82 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8492,7 +8492,6 @@ dependencies = [ "assert_matches", "async-trait", "axum", - "bigdecimal", "bitflags 1.3.2", "chrono", "ctrlc", @@ -8504,7 +8503,6 @@ dependencies = [ "lru", "metrics", "multivm", - "num 0.3.1", "once_cell", "prometheus_exporter", "prost", @@ -8581,7 +8579,6 @@ dependencies = [ "chrono", "hex", "itertools 0.10.5", - "num 0.4.1", "once_cell", "prost", "rand 0.8.5", @@ -9023,6 +9020,7 @@ dependencies = [ "itertools 0.10.5", "metrics", "num 0.4.1", + "rand 0.8.5", "reqwest", "serde", "serde_json", diff --git a/core/bin/snapshots_creator/src/tests.rs b/core/bin/snapshots_creator/src/tests.rs index b8e6234817b..b12002e945e 100644 --- a/core/bin/snapshots_creator/src/tests.rs +++ b/core/bin/snapshots_creator/src/tests.rs @@ -157,7 +157,8 @@ async fn create_miniblock( .unwrap(); conn.storage_logs_dal() .insert_storage_logs(miniblock_number, &[(H256::zero(), block_logs)]) - .await; + .await + .unwrap(); } async fn create_l1_batch( @@ -179,7 +180,8 @@ async fn create_l1_batch( written_keys.sort_unstable(); conn.storage_logs_dedup_dal() .insert_initial_writes(l1_batch_number, &written_keys) - .await; + .await + .unwrap(); } async fn prepare_postgres( diff --git a/core/lib/dal/.sqlx/query-12ab8ba692a42f528450f2adf8d263298abc0521734f807fbf45484158b167b2.json b/core/lib/dal/.sqlx/query-12ab8ba692a42f528450f2adf8d263298abc0521734f807fbf45484158b167b2.json deleted file mode 100644 index 556867a21ff..00000000000 --- a/core/lib/dal/.sqlx/query-12ab8ba692a42f528450f2adf8d263298abc0521734f807fbf45484158b167b2.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n l1_address\n FROM\n tokens\n WHERE\n well_known = FALSE\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "l1_address", - "type_info": "Bytea" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - false - ] - }, - "hash": "12ab8ba692a42f528450f2adf8d263298abc0521734f807fbf45484158b167b2" -} diff --git a/core/lib/dal/.sqlx/query-1f25016c41169aa4ab14db2faf7b2d0413d0f89c309de4b31254c309116ea60c.json b/core/lib/dal/.sqlx/query-1f25016c41169aa4ab14db2faf7b2d0413d0f89c309de4b31254c309116ea60c.json deleted file mode 100644 index b535ae5a863..00000000000 --- a/core/lib/dal/.sqlx/query-1f25016c41169aa4ab14db2faf7b2d0413d0f89c309de4b31254c309116ea60c.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE tokens\n SET\n token_list_name = $2,\n token_list_symbol = $3,\n token_list_decimals = $4,\n well_known = TRUE,\n updated_at = NOW()\n WHERE\n l1_address = $1\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Bytea", - "Varchar", - "Varchar", - "Int4" - ] - }, - "nullable": [] - }, - "hash": "1f25016c41169aa4ab14db2faf7b2d0413d0f89c309de4b31254c309116ea60c" -} diff --git a/core/lib/dal/.sqlx/query-35c78c8f04e2db43285bbe5f485149cdcb6c7ee74047567785a38265a03720be.json b/core/lib/dal/.sqlx/query-35c78c8f04e2db43285bbe5f485149cdcb6c7ee74047567785a38265a03720be.json new file mode 100644 index 00000000000..2e8ef0ae4ec --- /dev/null +++ b/core/lib/dal/.sqlx/query-35c78c8f04e2db43285bbe5f485149cdcb6c7ee74047567785a38265a03720be.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM tokens\n WHERE\n l2_address IN (\n SELECT\n SUBSTRING(key, 12, 20)\n FROM\n storage_logs\n WHERE\n storage_logs.address = $1\n AND miniblock_number > $2\n AND NOT EXISTS (\n SELECT\n 1\n FROM\n storage_logs AS s\n WHERE\n s.hashed_key = storage_logs.hashed_key\n AND (s.miniblock_number, s.operation_number) >= (storage_logs.miniblock_number, storage_logs.operation_number)\n AND s.value = $3\n )\n )\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Bytea", + "Int8", + "Bytea" + ] + }, + "nullable": [] + }, + "hash": "35c78c8f04e2db43285bbe5f485149cdcb6c7ee74047567785a38265a03720be" +} diff --git a/core/lib/dal/.sqlx/query-3e170eea3a5ea5c7389c15f76c6489745438eae73a07b577aa25bd08adf95354.json b/core/lib/dal/.sqlx/query-3e170eea3a5ea5c7389c15f76c6489745438eae73a07b577aa25bd08adf95354.json deleted file mode 100644 index 2290d558cea..00000000000 --- a/core/lib/dal/.sqlx/query-3e170eea3a5ea5c7389c15f76c6489745438eae73a07b577aa25bd08adf95354.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n DELETE FROM tokens\n WHERE\n l2_address IN (\n SELECT\n SUBSTRING(key, 12, 20)\n FROM\n storage_logs\n WHERE\n storage_logs.address = $1\n AND miniblock_number > $2\n AND NOT EXISTS (\n SELECT\n 1\n FROM\n storage_logs AS s\n WHERE\n s.hashed_key = storage_logs.hashed_key\n AND (s.miniblock_number, s.operation_number) >= (storage_logs.miniblock_number, storage_logs.operation_number)\n AND s.value = $3\n )\n )\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Bytea", - "Int8", - "Bytea" - ] - }, - "nullable": [] - }, - "hash": "3e170eea3a5ea5c7389c15f76c6489745438eae73a07b577aa25bd08adf95354" -} diff --git a/core/lib/dal/.sqlx/query-59cb0dd78fadc121e2b1ebbc8a063f089c91aead2bc9abb284697e65840f1e8f.json b/core/lib/dal/.sqlx/query-59cb0dd78fadc121e2b1ebbc8a063f089c91aead2bc9abb284697e65840f1e8f.json deleted file mode 100644 index 8a0cb19b390..00000000000 --- a/core/lib/dal/.sqlx/query-59cb0dd78fadc121e2b1ebbc8a063f089c91aead2bc9abb284697e65840f1e8f.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE tokens\n SET\n usd_price = $2,\n usd_price_updated_at = $3,\n updated_at = NOW()\n WHERE\n l1_address = $1\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Bytea", - "Numeric", - "Timestamp" - ] - }, - "nullable": [] - }, - "hash": "59cb0dd78fadc121e2b1ebbc8a063f089c91aead2bc9abb284697e65840f1e8f" -} diff --git a/core/lib/dal/.sqlx/query-f39372e37160df4897f62a800694867ed765dcb9dc60754df9df8700d4244bfb.json b/core/lib/dal/.sqlx/query-5f7034d22251a893249208c5ff8fa5c8bf46bc0cea4ac2b25ecde236c30ae32d.json similarity index 60% rename from core/lib/dal/.sqlx/query-f39372e37160df4897f62a800694867ed765dcb9dc60754df9df8700d4244bfb.json rename to core/lib/dal/.sqlx/query-5f7034d22251a893249208c5ff8fa5c8bf46bc0cea4ac2b25ecde236c30ae32d.json index 9495f8f7c82..0ae6e3bff4b 100644 --- a/core/lib/dal/.sqlx/query-f39372e37160df4897f62a800694867ed765dcb9dc60754df9df8700d4244bfb.json +++ b/core/lib/dal/.sqlx/query-5f7034d22251a893249208c5ff8fa5c8bf46bc0cea4ac2b25ecde236c30ae32d.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n l1_address,\n l2_address,\n NAME,\n symbol,\n decimals\n FROM\n tokens\n WHERE\n well_known = TRUE\n ORDER BY\n symbol\n ", + "query": "\n SELECT\n l1_address,\n l2_address,\n NAME,\n symbol,\n decimals\n FROM\n tokens\n WHERE\n well_known = TRUE\n ORDER BY\n symbol\n ", "describe": { "columns": [ { @@ -40,5 +40,5 @@ false ] }, - "hash": "f39372e37160df4897f62a800694867ed765dcb9dc60754df9df8700d4244bfb" + "hash": "5f7034d22251a893249208c5ff8fa5c8bf46bc0cea4ac2b25ecde236c30ae32d" } diff --git a/core/lib/dal/.sqlx/query-6c46fb3d51aeb069c09ff0cae3ea82f1af696fcc8f7484eaf255d5311589ee39.json b/core/lib/dal/.sqlx/query-6c46fb3d51aeb069c09ff0cae3ea82f1af696fcc8f7484eaf255d5311589ee39.json new file mode 100644 index 00000000000..35e1168dc8f --- /dev/null +++ b/core/lib/dal/.sqlx/query-6c46fb3d51aeb069c09ff0cae3ea82f1af696fcc8f7484eaf255d5311589ee39.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE tokens\n SET\n well_known = TRUE,\n updated_at = NOW()\n WHERE\n l1_address = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Bytea" + ] + }, + "nullable": [] + }, + "hash": "6c46fb3d51aeb069c09ff0cae3ea82f1af696fcc8f7484eaf255d5311589ee39" +} diff --git a/core/lib/dal/.sqlx/query-e71c39b93ceba5416ff3d988290cb35d4d07d47f33fe1a5b9e9fe1f0ae09b705.json b/core/lib/dal/.sqlx/query-e71c39b93ceba5416ff3d988290cb35d4d07d47f33fe1a5b9e9fe1f0ae09b705.json deleted file mode 100644 index b61fbc645a0..00000000000 --- a/core/lib/dal/.sqlx/query-e71c39b93ceba5416ff3d988290cb35d4d07d47f33fe1a5b9e9fe1f0ae09b705.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n usd_price,\n usd_price_updated_at\n FROM\n tokens\n WHERE\n l2_address = $1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "usd_price", - "type_info": "Numeric" - }, - { - "ordinal": 1, - "name": "usd_price_updated_at", - "type_info": "Timestamp" - } - ], - "parameters": { - "Left": [ - "Bytea" - ] - }, - "nullable": [ - true, - true - ] - }, - "hash": "e71c39b93ceba5416ff3d988290cb35d4d07d47f33fe1a5b9e9fe1f0ae09b705" -} diff --git a/core/lib/dal/.sqlx/query-f22c5d136fe68bbfcee60beb304cfdc050b85e6d773b13f9699f15c335d42593.json b/core/lib/dal/.sqlx/query-f22c5d136fe68bbfcee60beb304cfdc050b85e6d773b13f9699f15c335d42593.json deleted file mode 100644 index 7ffda2c8a32..00000000000 --- a/core/lib/dal/.sqlx/query-f22c5d136fe68bbfcee60beb304cfdc050b85e6d773b13f9699f15c335d42593.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n l1_address\n FROM\n tokens\n WHERE\n market_volume > $1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "l1_address", - "type_info": "Bytea" - } - ], - "parameters": { - "Left": [ - "Numeric" - ] - }, - "nullable": [ - false - ] - }, - "hash": "f22c5d136fe68bbfcee60beb304cfdc050b85e6d773b13f9699f15c335d42593" -} diff --git a/core/lib/dal/Cargo.toml b/core/lib/dal/Cargo.toml index 7222b4f0ce8..a284690390f 100644 --- a/core/lib/dal/Cargo.toml +++ b/core/lib/dal/Cargo.toml @@ -45,7 +45,6 @@ serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" bigdecimal = "0.3.0" bincode = "1" -num = "0.4.0" hex = "0.4" once_cell = "1.7" strum = { version = "0.24", features = ["derive"] } diff --git a/core/lib/dal/src/models/mod.rs b/core/lib/dal/src/models/mod.rs index 4e3e0853991..99c50bcd854 100644 --- a/core/lib/dal/src/models/mod.rs +++ b/core/lib/dal/src/models/mod.rs @@ -7,7 +7,6 @@ pub mod storage_log; pub mod storage_protocol_version; pub mod storage_prover_job_info; pub mod storage_sync; -pub mod storage_token; pub mod storage_transaction; pub mod storage_verification_request; pub mod storage_witness_job_info; diff --git a/core/lib/dal/src/models/storage_token.rs b/core/lib/dal/src/models/storage_token.rs deleted file mode 100644 index 3acd7e03bc9..00000000000 --- a/core/lib/dal/src/models/storage_token.rs +++ /dev/null @@ -1,32 +0,0 @@ -use sqlx::types::{ - chrono::{DateTime, NaiveDateTime, Utc}, - BigDecimal, -}; -use zksync_types::tokens::TokenPrice; -use zksync_utils::big_decimal_to_ratio; - -#[derive(Debug, Clone, sqlx::FromRow)] -pub struct StorageTokenPrice { - pub usd_price: Option, - pub usd_price_updated_at: Option, -} - -impl From for Option { - fn from(price: StorageTokenPrice) -> Option { - match (&price.usd_price, price.usd_price_updated_at) { - (Some(usd_price), Some(updated_at)) => Some(TokenPrice { - usd_price: big_decimal_to_ratio(usd_price).unwrap(), - last_updated: DateTime::::from_naive_utc_and_offset(updated_at, Utc), - }), - (None, None) => None, - _ => { - tracing::warn!( - "Found storage token with {:?} `usd_price` and {:?} `usd_price_updated_at`", - price.usd_price, - price.usd_price_updated_at - ); - None - } - } - } -} diff --git a/core/lib/dal/src/storage_logs_dal.rs b/core/lib/dal/src/storage_logs_dal.rs index 15fc26c0d86..12d958ed859 100644 --- a/core/lib/dal/src/storage_logs_dal.rs +++ b/core/lib/dal/src/storage_logs_dal.rs @@ -21,8 +21,8 @@ impl StorageLogsDal<'_, '_> { &mut self, block_number: MiniblockNumber, logs: &[(H256, Vec)], - ) { - self.insert_storage_logs_inner(block_number, logs, 0).await; + ) -> sqlx::Result<()> { + self.insert_storage_logs_inner(block_number, logs, 0).await } async fn insert_storage_logs_inner( @@ -30,7 +30,7 @@ impl StorageLogsDal<'_, '_> { block_number: MiniblockNumber, logs: &[(H256, Vec)], mut operation_number: u32, - ) { + ) -> sqlx::Result<()> { let mut copy = self .storage .conn() @@ -41,8 +41,7 @@ impl StorageLogsDal<'_, '_> { ) FROM STDIN WITH (DELIMITER '|')", ) - .await - .unwrap(); + .await?; let mut buffer = String::new(); let now = Utc::now().naive_utc().to_string(); @@ -64,8 +63,9 @@ impl StorageLogsDal<'_, '_> { operation_number += 1; } } - copy.send(buffer.as_bytes()).await.unwrap(); - copy.finish().await.unwrap(); + copy.send(buffer.as_bytes()).await?; + copy.finish().await?; + Ok(()) } pub async fn insert_storage_logs_from_snapshot( @@ -112,7 +112,7 @@ impl StorageLogsDal<'_, '_> { &mut self, block_number: MiniblockNumber, logs: &[(H256, Vec)], - ) { + ) -> sqlx::Result<()> { let operation_number = sqlx::query!( r#" SELECT @@ -125,14 +125,13 @@ impl StorageLogsDal<'_, '_> { block_number.0 as i64 ) .fetch_one(self.storage.conn()) - .await - .unwrap() + .await? .max .map(|max| max as u32 + 1) .unwrap_or(0); self.insert_storage_logs_inner(block_number, logs, operation_number) - .await; + .await } /// Rolls back storage to the specified point in time. @@ -811,7 +810,8 @@ mod tests { let logs = [(H256::zero(), logs)]; conn.storage_logs_dal() .insert_storage_logs(MiniblockNumber(number), &logs) - .await; + .await + .unwrap(); #[allow(deprecated)] conn.storage_dal().apply_storage_logs(&logs).await; conn.blocks_dal() @@ -849,7 +849,8 @@ mod tests { let more_logs = [(H256::repeat_byte(1), vec![third_log])]; conn.storage_logs_dal() .append_storage_logs(MiniblockNumber(1), &more_logs) - .await; + .await + .unwrap(); #[allow(deprecated)] conn.storage_dal().apply_storage_logs(&more_logs).await; @@ -961,7 +962,8 @@ mod tests { let written_keys: Vec<_> = logs.iter().map(|log| log.key).collect(); conn.storage_logs_dedup_dal() .insert_initial_writes(L1BatchNumber(1), &written_keys) - .await; + .await + .unwrap(); let new_logs: Vec<_> = (5_u64..20) .map(|i| { @@ -973,7 +975,8 @@ mod tests { let new_written_keys: Vec<_> = new_logs[5..].iter().map(|log| log.key).collect(); conn.storage_logs_dedup_dal() .insert_initial_writes(L1BatchNumber(2), &new_written_keys) - .await; + .await + .unwrap(); let logs_for_revert = conn .storage_logs_dal() @@ -1032,7 +1035,8 @@ mod tests { assert!(initial_keys.len() < logs.len()); conn.storage_logs_dedup_dal() .insert_initial_writes(L1BatchNumber(l1_batch), &initial_keys) - .await; + .await + .unwrap(); } let logs_for_revert = conn @@ -1109,7 +1113,8 @@ mod tests { initial_keys.sort_unstable(); conn.storage_logs_dedup_dal() .insert_initial_writes(L1BatchNumber(1), &initial_keys) - .await; + .await + .unwrap(); let mut sorted_hashed_keys: Vec<_> = logs.iter().map(|log| log.key.hashed_key()).collect(); sorted_hashed_keys.sort_unstable(); diff --git a/core/lib/dal/src/storage_logs_dedup_dal.rs b/core/lib/dal/src/storage_logs_dedup_dal.rs index 9ca17176e8b..3409588aabc 100644 --- a/core/lib/dal/src/storage_logs_dedup_dal.rs +++ b/core/lib/dal/src/storage_logs_dedup_dal.rs @@ -20,7 +20,7 @@ impl StorageLogsDedupDal<'_, '_> { &mut self, l1_batch_number: L1BatchNumber, read_logs: &[LogQuery], - ) { + ) -> sqlx::Result<()> { let mut copy = self .storage .conn() @@ -28,8 +28,7 @@ impl StorageLogsDedupDal<'_, '_> { "COPY protective_reads (l1_batch_number, address, key, created_at, updated_at) \ FROM STDIN WITH (DELIMITER '|')", ) - .await - .unwrap(); + .await?; let mut bytes: Vec = Vec::new(); let now = Utc::now().naive_utc().to_string(); @@ -42,8 +41,9 @@ impl StorageLogsDedupDal<'_, '_> { ); bytes.extend_from_slice(row.as_bytes()); } - copy.send(bytes).await.unwrap(); - copy.finish().await.unwrap(); + copy.send(bytes).await?; + copy.finish().await?; + Ok(()) } /// Insert initial writes and assigns indices to them. @@ -84,7 +84,7 @@ impl StorageLogsDedupDal<'_, '_> { &mut self, l1_batch_number: L1BatchNumber, written_storage_keys: &[StorageKey], - ) { + ) -> sqlx::Result<()> { let hashed_keys: Vec<_> = written_storage_keys .iter() .map(|key| StorageKey::raw_hashed_key(key.address(), key.key()).to_vec()) @@ -113,8 +113,9 @@ impl StorageLogsDedupDal<'_, '_> { l1_batch_number.0 as i64, ) .execute(self.storage.conn()) - .await - .unwrap(); + .await?; + + Ok(()) } pub async fn get_protective_reads_for_l1_batch( diff --git a/core/lib/dal/src/tokens_dal.rs b/core/lib/dal/src/tokens_dal.rs index a912656c083..8308a91b0e2 100644 --- a/core/lib/dal/src/tokens_dal.rs +++ b/core/lib/dal/src/tokens_dal.rs @@ -1,88 +1,64 @@ -use num::{rational::Ratio, BigUint}; use sqlx::types::chrono::Utc; use zksync_types::{ - tokens::{TokenInfo, TokenMetadata, TokenPrice}, - Address, MiniblockNumber, ACCOUNT_CODE_STORAGE_ADDRESS, + tokens::TokenInfo, Address, MiniblockNumber, ACCOUNT_CODE_STORAGE_ADDRESS, FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH, }; -use zksync_utils::ratio_to_big_decimal; use crate::StorageProcessor; -// Precision of the USD price per token -pub(crate) const STORED_USD_PRICE_PRECISION: usize = 6; - #[derive(Debug)] pub struct TokensDal<'a, 'c> { pub(crate) storage: &'a mut StorageProcessor<'c>, } impl TokensDal<'_, '_> { - pub async fn add_tokens(&mut self, tokens: Vec) { - { - let mut copy = self + pub async fn add_tokens(&mut self, tokens: &[TokenInfo]) -> sqlx::Result<()> { + let mut copy = self .storage .conn() .copy_in_raw( "COPY tokens (l1_address, l2_address, name, symbol, decimals, well_known, created_at, updated_at) FROM STDIN WITH (DELIMITER '|')", ) - .await - .unwrap(); + .await?; - let mut bytes: Vec = Vec::new(); - let now = Utc::now().naive_utc().to_string(); - for TokenInfo { - l1_address, - l2_address, - metadata: - TokenMetadata { - name, - symbol, - decimals, - }, - } in tokens - { - let l1_address_str = format!("\\\\x{}", hex::encode(l1_address.0)); - let l2_address_str = format!("\\\\x{}", hex::encode(l2_address.0)); - let row = format!( - "{}|{}|{}|{}|{}|FALSE|{}|{}\n", - l1_address_str, l2_address_str, name, symbol, decimals, now, now - ); - bytes.extend_from_slice(row.as_bytes()); - } - copy.send(bytes).await.unwrap(); - copy.finish().await.unwrap(); + let mut buffer = String::new(); + let now = Utc::now().naive_utc().to_string(); + for token_info in tokens { + write_str!( + &mut buffer, + "\\\\x{:x}|\\\\x{:x}|", + token_info.l1_address, + token_info.l2_address + ); + writeln_str!( + &mut buffer, + "{}|{}|{}|FALSE|{now}|{now}", + token_info.metadata.name, + token_info.metadata.symbol, + token_info.metadata.decimals + ); } + copy.send(buffer.as_bytes()).await?; + copy.finish().await?; + Ok(()) } - pub async fn update_well_known_l1_token( - &mut self, - l1_address: &Address, - metadata: TokenMetadata, - ) { - { - sqlx::query!( - r#" - UPDATE tokens - SET - token_list_name = $2, - token_list_symbol = $3, - token_list_decimals = $4, - well_known = TRUE, - updated_at = NOW() - WHERE - l1_address = $1 - "#, - l1_address.as_bytes(), - metadata.name, - metadata.symbol, - metadata.decimals as i32, - ) - .execute(self.storage.conn()) - .await - .unwrap(); - } + pub async fn mark_token_as_well_known(&mut self, l1_address: Address) -> sqlx::Result<()> { + sqlx::query!( + r#" + UPDATE tokens + SET + well_known = TRUE, + updated_at = NOW() + WHERE + l1_address = $1 + "#, + l1_address.as_bytes() + ) + .execute(self.storage.conn()) + .await?; + Ok(()) } pub async fn get_all_l2_token_addresses(&mut self) -> sqlx::Result> { @@ -103,109 +79,105 @@ impl TokensDal<'_, '_> { .collect()) } - pub async fn get_unknown_l1_token_addresses(&mut self) -> Vec
{ - { - let records = sqlx::query!( - r#" - SELECT - l1_address - FROM - tokens - WHERE - well_known = FALSE - "# - ) - .fetch_all(self.storage.conn()) - .await - .unwrap(); - let addresses: Vec
= records - .into_iter() - .map(|record| Address::from_slice(&record.l1_address)) - .collect(); - addresses - } - } + pub async fn rollback_tokens(&mut self, block_number: MiniblockNumber) -> sqlx::Result<()> { + sqlx::query!( + r#" + DELETE FROM tokens + WHERE + l2_address IN ( + SELECT + SUBSTRING(key, 12, 20) + FROM + storage_logs + WHERE + storage_logs.address = $1 + AND miniblock_number > $2 + AND NOT EXISTS ( + SELECT + 1 + FROM + storage_logs AS s + WHERE + s.hashed_key = storage_logs.hashed_key + AND (s.miniblock_number, s.operation_number) >= (storage_logs.miniblock_number, storage_logs.operation_number) + AND s.value = $3 + ) + ) + "#, + ACCOUNT_CODE_STORAGE_ADDRESS.as_bytes(), + block_number.0 as i64, + FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH.as_bytes() + ) + .execute(self.storage.conn()) + .await?; - pub async fn get_l1_tokens_by_volume(&mut self, min_volume: &Ratio) -> Vec
{ - { - let min_volume = ratio_to_big_decimal(min_volume, STORED_USD_PRICE_PRECISION); - let records = sqlx::query!( - r#" - SELECT - l1_address - FROM - tokens - WHERE - market_volume > $1 - "#, - min_volume - ) - .fetch_all(self.storage.conn()) - .await - .unwrap(); - let addresses: Vec
= records - .into_iter() - .map(|record| Address::from_slice(&record.l1_address)) - .collect(); - addresses - } + Ok(()) } +} - pub async fn set_l1_token_price(&mut self, l1_address: &Address, price: TokenPrice) { - { - sqlx::query!( - r#" - UPDATE tokens - SET - usd_price = $2, - usd_price_updated_at = $3, - updated_at = NOW() - WHERE - l1_address = $1 - "#, - l1_address.as_bytes(), - ratio_to_big_decimal(&price.usd_price, STORED_USD_PRICE_PRECISION), - price.last_updated.naive_utc(), - ) - .execute(self.storage.conn()) +#[cfg(test)] +mod tests { + use std::collections::HashSet; + + use zksync_types::tokens::TokenMetadata; + + use super::*; + use crate::ConnectionPool; + + #[tokio::test] + async fn adding_and_getting_tokens() { + let pool = ConnectionPool::test_pool().await; + let mut storage = pool.access_storage().await.unwrap(); + let tokens = [ + TokenInfo { + l1_address: Address::repeat_byte(1), + l2_address: Address::repeat_byte(2), + metadata: TokenMetadata { + name: "Test".to_string(), + symbol: "TST".to_string(), + decimals: 10, + }, + }, + TokenInfo { + l1_address: Address::repeat_byte(0), + l2_address: Address::repeat_byte(0), + metadata: TokenMetadata { + name: "Ether".to_string(), + symbol: "ETH".to_string(), + decimals: 18, + }, + }, + ]; + storage.tokens_dal().add_tokens(&tokens).await.unwrap(); + + let token_addresses = storage + .tokens_dal() + .get_all_l2_token_addresses() .await .unwrap(); + assert_eq!( + token_addresses.into_iter().collect::>(), + tokens + .iter() + .map(|token| token.l2_address) + .collect::>(), + ); + + for token in &tokens { + storage + .tokens_dal() + .mark_token_as_well_known(token.l1_address) + .await + .unwrap(); } - } - pub async fn rollback_tokens(&mut self, block_number: MiniblockNumber) { - { - sqlx::query!( - r#" - DELETE FROM tokens - WHERE - l2_address IN ( - SELECT - SUBSTRING(key, 12, 20) - FROM - storage_logs - WHERE - storage_logs.address = $1 - AND miniblock_number > $2 - AND NOT EXISTS ( - SELECT - 1 - FROM - storage_logs AS s - WHERE - s.hashed_key = storage_logs.hashed_key - AND (s.miniblock_number, s.operation_number) >= (storage_logs.miniblock_number, storage_logs.operation_number) - AND s.value = $3 - ) - ) - "#, - ACCOUNT_CODE_STORAGE_ADDRESS.as_bytes(), - block_number.0 as i64, - FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH.as_bytes() - ) - .execute(self.storage.conn()) + let well_known_tokens = storage + .tokens_web3_dal() + .get_well_known_tokens() .await .unwrap(); - } + assert_eq!(well_known_tokens.len(), 2); + assert!(well_known_tokens.contains(&tokens[0])); + assert!(well_known_tokens.contains(&tokens[1])); } } diff --git a/core/lib/dal/src/tokens_web3_dal.rs b/core/lib/dal/src/tokens_web3_dal.rs index c3421c64679..36f58c2711b 100644 --- a/core/lib/dal/src/tokens_web3_dal.rs +++ b/core/lib/dal/src/tokens_web3_dal.rs @@ -1,9 +1,9 @@ use zksync_types::{ - tokens::{TokenInfo, TokenMetadata, TokenPrice}, + tokens::{TokenInfo, TokenMetadata}, Address, }; -use crate::{models::storage_token::StorageTokenPrice, SqlxError, StorageProcessor}; +use crate::StorageProcessor; #[derive(Debug)] pub struct TokensWeb3Dal<'a, 'c> { @@ -11,64 +11,37 @@ pub struct TokensWeb3Dal<'a, 'c> { } impl TokensWeb3Dal<'_, '_> { - pub async fn get_well_known_tokens(&mut self) -> Result, SqlxError> { - { - let records = sqlx::query!( - r#" - SELECT - l1_address, - l2_address, - NAME, - symbol, - decimals - FROM - tokens - WHERE - well_known = TRUE - ORDER BY - symbol - "# - ) - .fetch_all(self.storage.conn()) - .await?; - let result: Vec = records - .into_iter() - .map(|record| TokenInfo { - l1_address: Address::from_slice(&record.l1_address), - l2_address: Address::from_slice(&record.l2_address), - metadata: TokenMetadata { - name: record.name, - symbol: record.symbol, - decimals: record.decimals as u8, - }, - }) - .collect(); - Ok(result) - } - } - - pub async fn get_token_price( - &mut self, - l2_address: &Address, - ) -> Result, SqlxError> { - { - let storage_price = sqlx::query_as!( - StorageTokenPrice, - r#" - SELECT - usd_price, - usd_price_updated_at - FROM - tokens - WHERE - l2_address = $1 - "#, - l2_address.as_bytes(), - ) - .fetch_optional(self.storage.conn()) - .await?; + pub async fn get_well_known_tokens(&mut self) -> sqlx::Result> { + let records = sqlx::query!( + r#" + SELECT + l1_address, + l2_address, + NAME, + symbol, + decimals + FROM + tokens + WHERE + well_known = TRUE + ORDER BY + symbol + "# + ) + .fetch_all(self.storage.conn()) + .await?; - Ok(storage_price.and_then(Into::into)) - } + Ok(records + .into_iter() + .map(|record| TokenInfo { + l1_address: Address::from_slice(&record.l1_address), + l2_address: Address::from_slice(&record.l2_address), + metadata: TokenMetadata { + name: record.name, + symbol: record.symbol, + decimals: record.decimals as u8, + }, + }) + .collect()) } } diff --git a/core/lib/state/src/test_utils.rs b/core/lib/state/src/test_utils.rs index b21eeb196c5..77c9e9029ea 100644 --- a/core/lib/state/src/test_utils.rs +++ b/core/lib/state/src/test_utils.rs @@ -93,7 +93,8 @@ pub(crate) async fn create_miniblock( .unwrap(); conn.storage_logs_dal() .insert_storage_logs(miniblock_number, &[(H256::zero(), block_logs)]) - .await; + .await + .unwrap(); } #[allow(clippy::default_trait_access)] @@ -117,7 +118,8 @@ pub(crate) async fn create_l1_batch( written_keys.sort_unstable(); conn.storage_logs_dedup_dal() .insert_initial_writes(l1_batch_number, &written_keys) - .await; + .await + .unwrap(); } pub(crate) async fn prepare_postgres_for_snapshot_recovery( @@ -148,11 +150,13 @@ pub(crate) async fn prepare_postgres_for_snapshot_recovery( snapshot_recovery.miniblock_number, &[(H256::zero(), snapshot_storage_logs.clone())], ) - .await; + .await + .unwrap(); let mut written_keys: Vec<_> = snapshot_storage_logs.iter().map(|log| log.key).collect(); written_keys.sort_unstable(); conn.storage_logs_dedup_dal() .insert_initial_writes(snapshot_recovery.l1_batch_number, &written_keys) - .await; + .await + .unwrap(); (snapshot_recovery, snapshot_storage_logs) } diff --git a/core/lib/types/src/tokens.rs b/core/lib/types/src/tokens.rs index c4b85cb0e21..26aec479498 100644 --- a/core/lib/types/src/tokens.rs +++ b/core/lib/types/src/tokens.rs @@ -1,9 +1,6 @@ -use chrono::{DateTime, Utc}; -use num::{rational::Ratio, BigUint}; use serde::{Deserialize, Serialize}; use zksync_basic_types::Address; pub use zksync_system_constants::ETHEREUM_ADDRESS; -use zksync_utils::UnsignedRatioSerializeAsDecimal; #[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] pub struct TokenInfo { @@ -35,11 +32,3 @@ impl TokenMetadata { } } } - -/// Token price known to the zkSync network. -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct TokenPrice { - #[serde(with = "UnsignedRatioSerializeAsDecimal")] - pub usd_price: Ratio, - pub last_updated: DateTime, -} diff --git a/core/lib/utils/Cargo.toml b/core/lib/utils/Cargo.toml index 5561f9b36da..0ed901bacac 100644 --- a/core/lib/utils/Cargo.toml +++ b/core/lib/utils/Cargo.toml @@ -29,3 +29,4 @@ metrics = "0.21" [dev-dependencies] serde_json = "1.0.0" +rand = "0.8" diff --git a/core/lib/utils/src/convert.rs b/core/lib/utils/src/convert.rs index cc4699448e6..e086e385c8e 100644 --- a/core/lib/utils/src/convert.rs +++ b/core/lib/utils/src/convert.rs @@ -1,54 +1,18 @@ use std::convert::TryInto; use bigdecimal::BigDecimal; -use num::{ - bigint::ToBigInt, - rational::Ratio, - traits::{sign::Signed, Pow}, - BigUint, -}; +use num::BigUint; use zksync_basic_types::{Address, H256, U256}; pub fn u256_to_big_decimal(value: U256) -> BigDecimal { - let ratio = Ratio::new_raw(u256_to_biguint(value), BigUint::from(1u8)); - ratio_to_big_decimal(&ratio, 80) -} - -pub fn ratio_to_big_decimal(num: &Ratio, precision: usize) -> BigDecimal { - let bigint = round_precision_raw_no_div(num, precision) - .to_bigint() - .unwrap(); - BigDecimal::new(bigint, precision as i64) -} - -pub fn ratio_to_big_decimal_normalized( - num: &Ratio, - precision: usize, - min_precision: usize, -) -> BigDecimal { - let normalized = ratio_to_big_decimal(num, precision).normalized(); - let min_scaled = normalized.with_scale(min_precision as i64); - normalized.max(min_scaled) -} - -pub fn big_decimal_to_ratio(num: &BigDecimal) -> Result, anyhow::Error> { - let (big_int, exp) = num.as_bigint_and_exponent(); - anyhow::ensure!(!big_int.is_negative(), "BigDecimal should be unsigned"); - let big_uint = big_int.to_biguint().unwrap(); - let ten_pow = BigUint::from(10_u32).pow(exp as u128); - Ok(Ratio::new(big_uint, ten_pow)) -} - -fn round_precision_raw_no_div(num: &Ratio, precision: usize) -> BigUint { - let ten_pow = BigUint::from(10u32).pow(precision); - (num * ten_pow).round().to_integer() -} - -/// Converts `U256` into the corresponding `BigUint` value. -fn u256_to_biguint(value: U256) -> BigUint { - let mut bytes = [0u8; 32]; - value.to_little_endian(&mut bytes); - BigUint::from_bytes_le(&bytes) + let mut u32_digits = vec![0_u32; 8]; + // `u64_digit`s from `U256` are little-endian + for (i, &u64_digit) in value.0.iter().enumerate() { + u32_digits[2 * i] = u64_digit as u32; + u32_digits[2 * i + 1] = (u64_digit >> 32) as u32; + } + let value = BigUint::new(u32_digits); + BigDecimal::new(value.into(), 0) } /// Converts `BigUint` value into the corresponding `U256` value. @@ -171,60 +135,41 @@ pub fn u256_to_bytes_be(value: &U256) -> Vec { #[cfg(test)] mod test { - use std::str::FromStr; - use num::BigInt; + use rand::{rngs::StdRng, Rng, SeedableRng}; use super::*; #[test] - fn test_ratio_to_big_decimal() { - let ratio = Ratio::from_integer(BigUint::from(0u32)); - let dec = ratio_to_big_decimal(&ratio, 1); - assert_eq!(dec.to_string(), "0.0"); - let ratio = Ratio::from_integer(BigUint::from(1234u32)); - let dec = ratio_to_big_decimal(&ratio, 7); - assert_eq!(dec.to_string(), "1234.0000000"); - // 4 divided by 9 is 0.(4). - let ratio = Ratio::new(BigUint::from(4u32), BigUint::from(9u32)); - let dec = ratio_to_big_decimal(&ratio, 12); - assert_eq!(dec.to_string(), "0.444444444444"); - // First 7 decimal digits of pi. - let ratio = Ratio::new(BigUint::from(52163u32), BigUint::from(16604u32)); - let dec = ratio_to_big_decimal(&ratio, 6); - assert_eq!(dec.to_string(), "3.141592"); - } - - #[test] - fn test_ratio_to_big_decimal_normalized() { - let ratio = Ratio::from_integer(BigUint::from(10u32)); - let dec = ratio_to_big_decimal_normalized(&ratio, 100, 2); - assert_eq!(dec.to_string(), "10.00"); - - // First 7 decimal digits of pi. - let ratio = Ratio::new(BigUint::from(52163u32), BigUint::from(16604u32)); - let dec = ratio_to_big_decimal_normalized(&ratio, 6, 2); - assert_eq!(dec.to_string(), "3.141592"); - - // 4 divided by 9 is 0.(4). - let ratio = Ratio::new(BigUint::from(4u32), BigUint::from(9u32)); - let dec = ratio_to_big_decimal_normalized(&ratio, 12, 2); - assert_eq!(dec.to_string(), "0.444444444444"); - } - - #[test] - fn test_big_decimal_to_ratio() { - // Expect unsigned number. - let dec = BigDecimal::from(-1); - assert!(big_decimal_to_ratio(&dec).is_err()); - let expected = Ratio::from_integer(BigUint::from(0u32)); - let dec = BigDecimal::from(0); - let ratio = big_decimal_to_ratio(&dec).unwrap(); - assert_eq!(ratio, expected); - let expected = Ratio::new(BigUint::from(1234567u32), BigUint::from(10000u32)); - let dec = BigDecimal::from_str("123.4567").unwrap(); - let ratio = big_decimal_to_ratio(&dec).unwrap(); - assert_eq!(ratio, expected); + fn test_u256_to_bigdecimal() { + const RNG_SEED: u64 = 123; + + let mut rng = StdRng::seed_from_u64(RNG_SEED); + // Small values. + for _ in 0..10_000 { + let value: u64 = rng.gen(); + let expected = BigDecimal::from(value); + assert_eq!(u256_to_big_decimal(value.into()), expected); + } + + // Arbitrary values + for _ in 0..10_000 { + let u64_digits: [u64; 4] = rng.gen(); + let value = u64_digits + .iter() + .enumerate() + .map(|(i, &digit)| U256::from(digit) << (i * 64)) + .fold(U256::zero(), |acc, x| acc + x); + let expected_value = u64_digits + .iter() + .enumerate() + .map(|(i, &digit)| BigInt::from(digit) << (i * 64)) + .fold(BigInt::from(0), |acc, x| acc + x); + assert_eq!( + u256_to_big_decimal(value), + BigDecimal::new(expected_value, 0) + ); + } } #[test] diff --git a/core/lib/utils/src/serde_wrappers.rs b/core/lib/utils/src/serde_wrappers.rs index 4721436f4d9..3c7876a5bbb 100644 --- a/core/lib/utils/src/serde_wrappers.rs +++ b/core/lib/utils/src/serde_wrappers.rs @@ -1,52 +1,5 @@ -use std::str::FromStr; - -use bigdecimal::BigDecimal; -use num::{rational::Ratio, BigUint}; use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; -use crate::convert::*; - -#[derive(Clone, Debug)] -pub struct UnsignedRatioSerializeAsDecimal; -impl UnsignedRatioSerializeAsDecimal { - pub fn serialize(value: &Ratio, serializer: S) -> Result - where - S: Serializer, - { - if serializer.is_human_readable() { - BigDecimal::serialize(&ratio_to_big_decimal(value, 18), serializer) - } else { - value.serialize(serializer) - } - } - - pub fn deserialize<'de, D>(deserializer: D) -> Result, D::Error> - where - D: Deserializer<'de>, - { - if deserializer.is_human_readable() { - // First, deserialize a string value. It is expected to be a - // hexadecimal representation of `BigDecimal`. - let big_decimal_string = BigDecimal::deserialize(deserializer)?; - - big_decimal_to_ratio(&big_decimal_string).map_err(de::Error::custom) - } else { - Ratio::::deserialize(deserializer) - } - } - - pub fn deserialize_from_str_with_dot(input: &str) -> Result, anyhow::Error> { - big_decimal_to_ratio(&BigDecimal::from_str(input)?) - } - - pub fn serialize_to_str_with_dot(num: &Ratio, precision: usize) -> String { - ratio_to_big_decimal(num, precision) - .to_string() - .trim_end_matches('0') - .to_string() - } -} - /// Trait for specifying prefix for bytes to hex serialization pub trait Prefix { fn prefix() -> &'static str; @@ -97,28 +50,3 @@ impl BytesToHexSerde

{ } pub type ZeroPrefixHexSerde = BytesToHexSerde; - -#[cfg(test)] -mod test { - use super::*; - - /// Tests that `Ratio` serializer works correctly. - #[test] - fn test_ratio_serialize_as_decimal() { - #[derive(Clone, Serialize, Deserialize)] - struct RatioSerdeWrapper( - #[serde(with = "UnsignedRatioSerializeAsDecimal")] pub Ratio, - ); - // It's essential that this number is a finite decimal, otherwise the precision will be lost - // and the assertion will fail. - let expected = RatioSerdeWrapper(Ratio::new( - BigUint::from(120315391195132u64), - BigUint::from(1250000000u64), - )); - let value = - serde_json::to_value(expected.clone()).expect("cannot serialize Ratio as Decimal"); - let ratio: RatioSerdeWrapper = - serde_json::from_value(value).expect("cannot deserialize Ratio from Decimal"); - assert_eq!(expected.0, ratio.0); - } -} diff --git a/core/lib/web3_decl/src/namespaces/zks.rs b/core/lib/web3_decl/src/namespaces/zks.rs index a9eddaa0312..66ca97cb03a 100644 --- a/core/lib/web3_decl/src/namespaces/zks.rs +++ b/core/lib/web3_decl/src/namespaces/zks.rs @@ -1,6 +1,5 @@ use std::collections::HashMap; -use bigdecimal::BigDecimal; use jsonrpsee::{core::RpcResult, proc_macros::rpc}; use zksync_types::{ api::{ @@ -48,8 +47,6 @@ pub trait ZksNamespace { #[method(name = "getConfirmedTokens")] async fn get_confirmed_tokens(&self, from: u32, limit: u8) -> RpcResult>; - #[method(name = "getTokenPrice")] - async fn get_token_price(&self, token_address: Address) -> RpcResult; #[method(name = "getAllAccountBalances")] async fn get_all_account_balances(&self, address: Address) diff --git a/core/lib/zksync_core/Cargo.toml b/core/lib/zksync_core/Cargo.toml index 032e58d2bcd..c28878fe96d 100644 --- a/core/lib/zksync_core/Cargo.toml +++ b/core/lib/zksync_core/Cargo.toml @@ -70,8 +70,6 @@ thiserror = "1.0" async-trait = "0.1" bitflags = "1.3.2" -num = { version = "0.3.1", features = ["serde"] } -bigdecimal = { version = "0.3.0", features = ["serde"] } reqwest = { version = "0.11", features = ["blocking", "json"] } hex = "0.4" lru = { version = "0.12.1", default-features = false } diff --git a/core/lib/zksync_core/src/api_server/tx_sender/tests.rs b/core/lib/zksync_core/src/api_server/tx_sender/tests.rs index ace647de8cb..e8dc8d7595f 100644 --- a/core/lib/zksync_core/src/api_server/tx_sender/tests.rs +++ b/core/lib/zksync_core/src/api_server/tx_sender/tests.rs @@ -57,7 +57,8 @@ async fn getting_nonce_for_account() { storage .storage_logs_dal() .append_storage_logs(MiniblockNumber(0), &[(H256::default(), vec![nonce_log])]) - .await; + .await + .unwrap(); let tx_executor = MockTransactionExecutor::default().into(); let (tx_sender, _) = create_test_tx_sender(pool.clone(), l2_chain_id, tx_executor).await; @@ -78,7 +79,8 @@ async fn getting_nonce_for_account() { storage .storage_logs_dal() .insert_storage_logs(MiniblockNumber(1), &[(H256::default(), vec![nonce_log])]) - .await; + .await + .unwrap(); let nonce = tx_sender.get_expected_nonce(test_address).await.unwrap(); assert_eq!(nonce, Nonce(321)); @@ -134,7 +136,8 @@ async fn getting_nonce_for_account_after_snapshot_recovery() { SNAPSHOT_MINIBLOCK_NUMBER + 1, &[(H256::default(), new_nonce_logs)], ) - .await; + .await + .unwrap(); let nonce = tx_sender.get_expected_nonce(test_address).await.unwrap(); assert_eq!(nonce, Nonce(321)); diff --git a/core/lib/zksync_core/src/api_server/web3/backend_jsonrpsee/namespaces/zks.rs b/core/lib/zksync_core/src/api_server/web3/backend_jsonrpsee/namespaces/zks.rs index db0760a66c7..e6ef5ddeb90 100644 --- a/core/lib/zksync_core/src/api_server/web3/backend_jsonrpsee/namespaces/zks.rs +++ b/core/lib/zksync_core/src/api_server/web3/backend_jsonrpsee/namespaces/zks.rs @@ -1,6 +1,5 @@ use std::collections::HashMap; -use bigdecimal::BigDecimal; use zksync_types::{ api::{ BlockDetails, BridgeAddresses, L1BatchDetails, L2ToL1LogProof, Proof, ProtocolVersion, @@ -53,12 +52,6 @@ impl ZksNamespaceServer for ZksNamespace { .map_err(into_jsrpc_error) } - async fn get_token_price(&self, token_address: Address) -> RpcResult { - self.get_token_price_impl(token_address) - .await - .map_err(into_jsrpc_error) - } - async fn get_all_account_balances( &self, address: Address, diff --git a/core/lib/zksync_core/src/api_server/web3/namespaces/zks.rs b/core/lib/zksync_core/src/api_server/web3/namespaces/zks.rs index c4b3042503a..61746a20b03 100644 --- a/core/lib/zksync_core/src/api_server/web3/namespaces/zks.rs +++ b/core/lib/zksync_core/src/api_server/web3/namespaces/zks.rs @@ -1,6 +1,5 @@ use std::{collections::HashMap, convert::TryInto}; -use bigdecimal::{BigDecimal, Zero}; use zksync_dal::StorageProcessor; use zksync_mini_merkle_tree::MiniMerkleTree; use zksync_system_constants::DEFAULT_L2_TX_GAS_PER_PUBDATA_BYTE; @@ -20,7 +19,7 @@ use zksync_types::{ AccountTreeId, L1BatchNumber, MiniblockNumber, StorageKey, Transaction, L1_MESSENGER_ADDRESS, L2_ETH_TOKEN_ADDRESS, REQUIRED_L1_TO_L2_GAS_PER_PUBDATA_BYTE, U256, U64, }; -use zksync_utils::{address_to_h256, h256_to_u256, ratio_to_big_decimal_normalized}; +use zksync_utils::{address_to_h256, h256_to_u256}; use zksync_web3_decl::{ error::Web3Error, types::{Address, Token, H256}, @@ -176,37 +175,6 @@ impl ZksNamespace { Ok(tokens) } - #[tracing::instrument(skip(self))] - pub async fn get_token_price_impl(&self, l2_token: Address) -> Result { - const METHOD_NAME: &str = "get_token_price"; - - /// Amount of possible symbols after the decimal dot in the USD. - /// Used to convert `Ratio` to `BigDecimal`. - const USD_PRECISION: usize = 100; - /// Minimum amount of symbols after the decimal dot in the USD. - /// Used to convert `Ratio` to `BigDecimal`. - const MIN_PRECISION: usize = 2; - - let method_latency = API_METRICS.start_call(METHOD_NAME); - let token_price_result = { - let mut storage = self.access_storage(METHOD_NAME).await?; - storage.tokens_web3_dal().get_token_price(&l2_token).await - }; - - let result = match token_price_result { - Ok(Some(price)) => Ok(ratio_to_big_decimal_normalized( - &price.usd_price, - USD_PRECISION, - MIN_PRECISION, - )), - Ok(None) => Ok(BigDecimal::zero()), - Err(err) => Err(internal_error(METHOD_NAME, err)), - }; - - method_latency.observe(); - result - } - #[tracing::instrument(skip(self))] pub async fn get_all_account_balances_impl( &self, diff --git a/core/lib/zksync_core/src/api_server/web3/tests/mod.rs b/core/lib/zksync_core/src/api_server/web3/tests/mod.rs index 1aa04fee519..a2e8998482b 100644 --- a/core/lib/zksync_core/src/api_server/web3/tests/mod.rs +++ b/core/lib/zksync_core/src/api_server/web3/tests/mod.rs @@ -1,4 +1,4 @@ -use std::{collections::HashMap, pin::Pin, time::Instant}; +use std::{collections::HashMap, pin::Pin, slice, time::Instant}; use assert_matches::assert_matches; use async_trait::async_trait; @@ -684,7 +684,7 @@ impl HttpTest for TransactionCountTest { storage .storage_logs_dal() .insert_storage_logs(miniblock_number, &[(H256::zero(), vec![nonce_log])]) - .await; + .await?; } let pending_count = client.get_transaction_count(test_address, None).await?; @@ -880,7 +880,7 @@ impl HttpTest for AllAccountBalancesTest { storage .storage_logs_dal() .insert_storage_logs(MiniblockNumber(1), &[(H256::zero(), vec![eth_balance_log])]) - .await; + .await?; // Create a custom token, but don't set balance for it yet. let custom_token = TokenInfo { l1_address: Address::repeat_byte(0xfe), @@ -889,8 +889,8 @@ impl HttpTest for AllAccountBalancesTest { }; storage .tokens_dal() - .add_tokens(vec![custom_token.clone()]) - .await; + .add_tokens(slice::from_ref(&custom_token)) + .await?; let balances = client.get_all_account_balances(Self::ADDRESS).await?; assert_eq!(balances, HashMap::from([(Address::zero(), eth_balance)])); @@ -909,7 +909,7 @@ impl HttpTest for AllAccountBalancesTest { MiniblockNumber(2), &[(H256::zero(), vec![token_balance_log])], ) - .await; + .await?; let balances = client.get_all_account_balances(Self::ADDRESS).await?; assert_eq!( diff --git a/core/lib/zksync_core/src/api_server/web3/tests/vm.rs b/core/lib/zksync_core/src/api_server/web3/tests/vm.rs index d2832349ae9..22760c37feb 100644 --- a/core/lib/zksync_core/src/api_server/web3/tests/vm.rs +++ b/core/lib/zksync_core/src/api_server/web3/tests/vm.rs @@ -228,7 +228,7 @@ impl HttpTest for SendRawTransactionTest { MiniblockNumber(0), &[(H256::zero(), vec![Self::balance_storage_log()])], ) - .await; + .await?; } let (tx_bytes, tx_hash) = Self::transaction_bytes_and_hash(); @@ -456,7 +456,7 @@ impl HttpTest for EstimateGasTest { storage .storage_logs_dal() .append_storage_logs(MiniblockNumber(0), &[(H256::zero(), vec![storage_log])]) - .await; + .await?; } let mut call_request = CallRequest::from(l2_transaction); call_request.from = Some(SendRawTransactionTest::private_key_and_address().1); diff --git a/core/lib/zksync_core/src/block_reverter/mod.rs b/core/lib/zksync_core/src/block_reverter/mod.rs index 06ed7d5c239..3b1ab41becc 100644 --- a/core/lib/zksync_core/src/block_reverter/mod.rs +++ b/core/lib/zksync_core/src/block_reverter/mod.rs @@ -255,7 +255,8 @@ impl BlockReverter { transaction .tokens_dal() .rollback_tokens(last_miniblock_to_keep) - .await; + .await + .expect("failed rolling back created tokens"); tracing::info!("rolling back factory deps...."); transaction .factory_deps_dal() diff --git a/core/lib/zksync_core/src/genesis.rs b/core/lib/zksync_core/src/genesis.rs index e76d57c0b37..9dfd9b952ce 100644 --- a/core/lib/zksync_core/src/genesis.rs +++ b/core/lib/zksync_core/src/genesis.rs @@ -60,17 +60,17 @@ pub async fn ensure_genesis_state( zksync_chain_id: L2ChainId, genesis_params: &GenesisParams, ) -> anyhow::Result { - let mut transaction = storage.start_transaction().await.unwrap(); + let mut transaction = storage.start_transaction().await?; // return if genesis block was already processed - if !transaction.blocks_dal().is_genesis_needed().await.unwrap() { + if !transaction.blocks_dal().is_genesis_needed().await? { tracing::debug!("genesis is not needed!"); return transaction .blocks_dal() .get_l1_batch_state_root(L1BatchNumber(0)) .await - .unwrap() - .context("genesis block hash is empty"); + .context("failed fetching state root hash for genesis L1 batch")? + .context("genesis L1 batch hash is empty"); } tracing::info!("running regenesis"); @@ -95,11 +95,13 @@ pub async fn ensure_genesis_state( *first_l1_verifier_config, *first_verifier_address, ) - .await; + .await?; tracing::info!("chain_schema_genesis is complete"); let storage_logs = L1BatchWithLogs::new(&mut transaction, L1BatchNumber(0)).await; - let storage_logs = storage_logs.unwrap().storage_logs; + let storage_logs = storage_logs + .context("genesis L1 batch disappeared from Postgres")? + .storage_logs; let metadata = ZkSyncTree::process_genesis_batch(&storage_logs); let genesis_root_hash = metadata.root_hash; let rollup_last_leaf_index = metadata.leaf_count + 1; @@ -118,10 +120,10 @@ pub async fn ensure_genesis_state( genesis_root_hash, rollup_last_leaf_index, ) - .await; + .await?; tracing::info!("operations_schema_genesis is complete"); - transaction.commit().await.unwrap(); + transaction.commit().await?; // We need to `println` this value because it will be used to initialize the smart contract. println!("CONTRACTS_GENESIS_ROOT={:?}", genesis_root_hash); @@ -154,7 +156,7 @@ pub async fn ensure_genesis_state( async fn insert_base_system_contracts_to_factory_deps( storage: &mut StorageProcessor<'_>, contracts: &BaseSystemContracts, -) { +) -> anyhow::Result<()> { let factory_deps = [&contracts.bootloader, &contracts.default_aa] .iter() .map(|c| (c.hash, be_words_to_bytes(&c.code))) @@ -164,36 +166,35 @@ async fn insert_base_system_contracts_to_factory_deps( .factory_deps_dal() .insert_factory_deps(MiniblockNumber(0), &factory_deps) .await - .unwrap(); + .context("failed inserting base system contracts to Postgres") } async fn insert_system_contracts( storage: &mut StorageProcessor<'_>, contracts: &[DeployedContract], chain_id: L2ChainId, -) { +) -> anyhow::Result<()> { let system_context_init_logs = (H256::default(), get_system_context_init_logs(chain_id)); - let storage_logs: Vec<(H256, Vec)> = contracts + let storage_logs: Vec<_> = contracts .iter() .map(|contract| { let hash = hash_bytecode(&contract.bytecode); let code_key = get_code_key(contract.account_id.address()); - ( - Default::default(), + H256::default(), vec![StorageLog::new_write_log(code_key, hash)], ) }) .chain(Some(system_context_init_logs)) .collect(); - let mut transaction = storage.start_transaction().await.unwrap(); - + let mut transaction = storage.start_transaction().await?; transaction .storage_logs_dal() .insert_storage_logs(MiniblockNumber(0), &storage_logs) - .await; + .await + .context("failed inserting genesis storage logs")?; // we don't produce proof for the genesis block, // but we still need to populate the table @@ -221,7 +222,7 @@ async fn insert_system_contracts( is_service: false, } }) - .collect::>() + .collect::>() }) .collect(); @@ -249,7 +250,8 @@ async fn insert_system_contracts( transaction .storage_logs_dedup_dal() .insert_protective_reads(L1BatchNumber(0), &protective_reads) - .await; + .await + .context("failed inserting genesis protective reads")?; let written_storage_keys: Vec<_> = deduplicated_writes .iter() @@ -258,7 +260,8 @@ async fn insert_system_contracts( transaction .storage_logs_dedup_dal() .insert_initial_writes(L1BatchNumber(0), &written_storage_keys) - .await; + .await + .context("failed inserting genesis initial writes")?; #[allow(deprecated)] transaction @@ -274,9 +277,10 @@ async fn insert_system_contracts( .factory_deps_dal() .insert_factory_deps(MiniblockNumber(0), &factory_deps) .await - .unwrap(); + .context("failed inserting bytecodes for genesis smart contracts")?; - transaction.commit().await.unwrap(); + transaction.commit().await?; + Ok(()) } #[allow(clippy::too_many_arguments)] @@ -289,7 +293,7 @@ pub(crate) async fn create_genesis_l1_batch( system_contracts: &[DeployedContract], l1_verifier_config: L1VerifierConfig, verifier_address: Address, -) { +) -> anyhow::Result<()> { let version = ProtocolVersion { id: protocol_version, timestamp: 0, @@ -321,7 +325,7 @@ pub(crate) async fn create_genesis_l1_batch( virtual_blocks: 0, }; - let mut transaction = storage.start_transaction().await.unwrap(); + let mut transaction = storage.start_transaction().await?; transaction .protocol_versions_dal() @@ -338,27 +342,30 @@ pub(crate) async fn create_genesis_l1_batch( Default::default(), ) .await - .unwrap(); + .context("failed inserting genesis L1 batch")?; transaction .blocks_dal() .insert_miniblock(&genesis_miniblock_header) .await - .unwrap(); + .context("failed inserting genesis miniblock")?; transaction .blocks_dal() .mark_miniblocks_as_executed_in_l1_batch(L1BatchNumber(0)) .await - .unwrap(); + .context("failed assigning genesis miniblock to L1 batch")?; - insert_base_system_contracts_to_factory_deps(&mut transaction, base_system_contracts).await; - insert_system_contracts(&mut transaction, system_contracts, chain_id).await; - - add_eth_token(&mut transaction).await; + insert_base_system_contracts_to_factory_deps(&mut transaction, base_system_contracts).await?; + insert_system_contracts(&mut transaction, system_contracts, chain_id) + .await + .context("cannot insert system contracts")?; + add_eth_token(&mut transaction).await?; - transaction.commit().await.unwrap(); + transaction.commit().await?; + Ok(()) } -pub(crate) async fn add_eth_token(storage: &mut StorageProcessor<'_>) { +async fn add_eth_token(transaction: &mut StorageProcessor<'_>) -> anyhow::Result<()> { + assert!(transaction.in_transaction()); // sanity check let eth_token = TokenInfo { l1_address: ETHEREUM_ADDRESS, l2_address: ETHEREUM_ADDRESS, @@ -369,27 +376,26 @@ pub(crate) async fn add_eth_token(storage: &mut StorageProcessor<'_>) { }, }; - let mut transaction = storage.start_transaction().await.unwrap(); - transaction .tokens_dal() - .add_tokens(vec![eth_token.clone()]) - .await; + .add_tokens(&[eth_token]) + .await + .context("failed adding Ether token")?; transaction .tokens_dal() - .update_well_known_l1_token(ÐEREUM_ADDRESS, eth_token.metadata) - .await; - - transaction.commit().await.unwrap(); + .mark_token_as_well_known(ETHEREUM_ADDRESS) + .await + .context("failed marking Ether token as well-known")?; + Ok(()) } -pub(crate) async fn save_genesis_l1_batch_metadata( +async fn save_genesis_l1_batch_metadata( storage: &mut StorageProcessor<'_>, commitment: L1BatchCommitment, genesis_root_hash: H256, rollup_last_leaf_index: u64, -) { - let mut transaction = storage.start_transaction().await.unwrap(); +) -> anyhow::Result<()> { + let mut transaction = storage.start_transaction().await?; let tree_data = L1BatchTreeData { hash: genesis_root_hash, @@ -399,7 +405,7 @@ pub(crate) async fn save_genesis_l1_batch_metadata( .blocks_dal() .save_l1_batch_tree_data(L1BatchNumber(0), &tree_data) .await - .unwrap(); + .context("failed saving tree data for genesis L1 batch")?; let mut commitment_artifacts = commitment.artifacts(); // `l2_l1_merkle_root` for genesis batch is set to 0 on L1 contract, same must be here. @@ -409,9 +415,10 @@ pub(crate) async fn save_genesis_l1_batch_metadata( .blocks_dal() .save_l1_batch_commitment_artifacts(L1BatchNumber(0), &commitment_artifacts) .await - .unwrap(); + .context("failed saving commitment for genesis L1 batch")?; - transaction.commit().await.unwrap(); + transaction.commit().await?; + Ok(()) } #[cfg(test)] diff --git a/core/lib/zksync_core/src/metadata_calculator/helpers.rs b/core/lib/zksync_core/src/metadata_calculator/helpers.rs index b16e48d7302..1f51746bd99 100644 --- a/core/lib/zksync_core/src/metadata_calculator/helpers.rs +++ b/core/lib/zksync_core/src/metadata_calculator/helpers.rs @@ -714,7 +714,8 @@ mod tests { storage .storage_logs_dedup_dal() .insert_protective_reads(L1BatchNumber(2), &read_logs) - .await; + .await + .unwrap(); let l1_batch_with_logs = L1BatchWithLogs::new(&mut storage, L1BatchNumber(2)) .await diff --git a/core/lib/zksync_core/src/metadata_calculator/tests.rs b/core/lib/zksync_core/src/metadata_calculator/tests.rs index 795883b5a19..ece4c4e66fc 100644 --- a/core/lib/zksync_core/src/metadata_calculator/tests.rs +++ b/core/lib/zksync_core/src/metadata_calculator/tests.rs @@ -510,7 +510,8 @@ pub(super) async fn extend_db_state_from_l1_batch( storage .storage_logs_dal() .insert_storage_logs(miniblock_number, &[(H256::zero(), batch_logs)]) - .await; + .await + .unwrap(); storage .blocks_dal() .mark_miniblocks_as_executed_in_l1_batch(batch_number) @@ -549,7 +550,8 @@ async fn insert_initial_writes_for_batch( connection .storage_logs_dedup_dal() .insert_initial_writes(l1_batch_number, &keys_to_insert) - .await; + .await + .unwrap(); } pub(crate) fn gen_storage_logs( diff --git a/core/lib/zksync_core/src/state_keeper/batch_executor/tests/tester.rs b/core/lib/zksync_core/src/state_keeper/batch_executor/tests/tester.rs index a4cb9f676f2..9f30a9245aa 100644 --- a/core/lib/zksync_core/src/state_keeper/batch_executor/tests/tester.rs +++ b/core/lib/zksync_core/src/state_keeper/batch_executor/tests/tester.rs @@ -173,7 +173,8 @@ impl Tester { Default::default(), Default::default(), ) - .await; + .await + .unwrap(); } } @@ -199,7 +200,8 @@ impl Tester { storage .storage_logs_dal() .append_storage_logs(MiniblockNumber(0), &[(H256::zero(), vec![storage_log])]) - .await; + .await + .unwrap(); if storage .storage_logs_dedup_dal() .filter_written_slots(&[storage_log.key.hashed_key()]) @@ -210,6 +212,7 @@ impl Tester { .storage_logs_dedup_dal() .insert_initial_writes(L1BatchNumber(0), &[storage_log.key]) .await + .unwrap(); } } } diff --git a/core/lib/zksync_core/src/state_keeper/io/seal_logic.rs b/core/lib/zksync_core/src/state_keeper/io/seal_logic.rs index d9098bbabac..f835d776ba6 100644 --- a/core/lib/zksync_core/src/state_keeper/io/seal_logic.rs +++ b/core/lib/zksync_core/src/state_keeper/io/seal_logic.rs @@ -182,7 +182,8 @@ impl UpdatesManager { transaction .storage_logs_dedup_dal() .insert_protective_reads(l1_batch_env.number, &protective_reads) - .await; + .await + .unwrap(); progress.observe(protective_reads.len()); let progress = L1_BATCH_METRICS.start(L1BatchSealStage::FilterWrittenSlots); @@ -213,7 +214,8 @@ impl UpdatesManager { transaction .storage_logs_dedup_dal() .insert_initial_writes(l1_batch_env.number, &written_storage_keys) - .await; + .await + .unwrap(); progress.observe(deduplicated_writes.len()); let progress = L1_BATCH_METRICS.start(L1BatchSealStage::CommitL1Batch); @@ -373,7 +375,8 @@ impl MiniblockSealCommand { transaction .storage_logs_dal() .insert_storage_logs(miniblock_number, &write_logs) - .await; + .await + .unwrap(); progress.observe(write_log_count); #[allow(deprecated)] // Will be removed shortly @@ -405,7 +408,11 @@ impl MiniblockSealCommand { let progress = MINIBLOCK_METRICS.start(MiniblockSealStage::InsertTokens, is_fictive); let added_tokens_len = added_tokens.len(); if !added_tokens.is_empty() { - transaction.tokens_dal().add_tokens(added_tokens).await; + transaction + .tokens_dal() + .add_tokens(&added_tokens) + .await + .unwrap(); } progress.observe(added_tokens_len); diff --git a/core/lib/zksync_core/src/state_keeper/io/tests/tester.rs b/core/lib/zksync_core/src/state_keeper/io/tests/tester.rs index ca30aa1454e..892d6153b66 100644 --- a/core/lib/zksync_core/src/state_keeper/io/tests/tester.rs +++ b/core/lib/zksync_core/src/state_keeper/io/tests/tester.rs @@ -141,7 +141,8 @@ impl Tester { L1VerifierConfig::default(), Address::zero(), ) - .await; + .await + .unwrap(); } } diff --git a/core/lib/zksync_core/src/state_keeper/mempool_actor.rs b/core/lib/zksync_core/src/state_keeper/mempool_actor.rs index f3e566f53a6..1e864b06347 100644 --- a/core/lib/zksync_core/src/state_keeper/mempool_actor.rs +++ b/core/lib/zksync_core/src/state_keeper/mempool_actor.rs @@ -193,7 +193,8 @@ mod tests { storage .storage_logs_dal() .insert_storage_logs(MiniblockNumber(0), &[(H256::zero(), vec![nonce_log])]) - .await; + .await + .unwrap(); let other_transaction = create_l2_transaction(10, 100); let other_transaction_initiator = other_transaction.initiator_account(); @@ -347,7 +348,8 @@ mod tests { storage .storage_logs_dal() .append_storage_logs(MiniblockNumber(0), &[(H256::zero(), vec![nonce_log])]) - .await; + .await + .unwrap(); storage .transactions_dal() .insert_transaction_l2(transaction, TransactionExecutionMetrics::default()) diff --git a/core/lib/zksync_core/src/utils/testonly.rs b/core/lib/zksync_core/src/utils/testonly.rs index e3a8e834995..978b342a4b8 100644 --- a/core/lib/zksync_core/src/utils/testonly.rs +++ b/core/lib/zksync_core/src/utils/testonly.rs @@ -210,11 +210,13 @@ pub(crate) async fn prepare_recovery_snapshot( storage .storage_logs_dedup_dal() .insert_initial_writes(l1_batch.number, &written_keys) - .await; + .await + .unwrap(); storage .storage_logs_dal() .insert_storage_logs(miniblock.number, &[(H256::zero(), snapshot_logs.to_vec())]) - .await; + .await + .unwrap(); let snapshot_recovery = SnapshotRecoveryStatus { l1_batch_number: l1_batch.number, diff --git a/core/tests/ts-integration/tests/api/web3.test.ts b/core/tests/ts-integration/tests/api/web3.test.ts index 70ea34816e2..185c9d3dd07 100644 --- a/core/tests/ts-integration/tests/api/web3.test.ts +++ b/core/tests/ts-integration/tests/api/web3.test.ts @@ -215,9 +215,6 @@ describe('web3 API compatibility tests', () => { test('Should test various token methods', async () => { const tokens = await alice.provider.getConfirmedTokens(); expect(tokens).not.toHaveLength(0); // Should not be an empty array. - - const price = await alice.provider.getTokenPrice(l2Token); - expect(+price!).toEqual(expect.any(Number)); }); test('Should check transactions from API / Legacy tx', async () => { diff --git a/prover/Cargo.lock b/prover/Cargo.lock index b241bfb1cff..7d2468f95c5 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -6844,7 +6844,6 @@ dependencies = [ "chrono", "hex", "itertools 0.10.5", - "num", "once_cell", "prost", "rand 0.8.5",