Skip to content

Commit

Permalink
chore: upgrade rust to 1.73.0 (#14167)
Browse files Browse the repository at this point in the history
## Description 

Main motivation is to pick up some new changes such as
rust-lang/rust#115659
Most changes here are autogenerated by the new formatter
NFC

## Test Plan 

Existing

---
If your changes are not user-facing and not a breaking change, you can
skip the following section. Otherwise, please indicate what changed, and
then add to the Release Notes section as highlighted during the release
process.

### Type of Change (Check all that apply)

- [ ] protocol change
- [ ] user-visible impact
- [ ] breaking change for a client SDKs
- [ ] breaking change for FNs (FN binary must upgrade)
- [ ] breaking change for validators or node operators (must upgrade
binaries)
- [ ] breaking change for on-chain data layout
- [ ] necessitate either a data wipe or data migration

### Release notes
  • Loading branch information
oxade committed Oct 12, 2023
1 parent 2b532bc commit 6cf5727
Show file tree
Hide file tree
Showing 196 changed files with 926 additions and 740 deletions.
5 changes: 5 additions & 0 deletions .cargo/config
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,11 @@ move-clippy = [
"-Aclippy::new_without_default",
"-Aclippy::box_default",
"-Aclippy::manual_slice_size_calculation",
"-Aclippy::unwrap-or-default",
"-Aclippy::incorrect_partial_ord_impl_on_ord_type",
"-Aclippy::useless_attribute",
"-Aclippy::manual_while_let_some",
"-Aclippy::redundant_closure",
]

[build]
Expand Down
4 changes: 3 additions & 1 deletion crates/mysten-common/src/sync/notify_once.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,9 @@ impl NotifyOnce {
/// This method returns errors if called more then once
#[allow(clippy::result_unit_err)]
pub fn notify(&self) -> Result<(), ()> {
let Some(notify) = self.notify.lock().take() else { return Err(()) };
let Some(notify) = self.notify.lock().take() else {
return Err(());
};
// At this point all `register` either registered with current notify,
// or will be returning immediately
notify.notify_waiters();
Expand Down
4 changes: 3 additions & 1 deletion crates/mysten-common/src/sync/notify_read.rs
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,9 @@ impl<K: Eq + Hash + Clone, V: Clone> NotifyRead<K, V> {
fn cleanup(&self, key: &K) {
let mut pending = self.pending(key);
// it is possible that registration was fulfilled before we get here
let Some(registrations) = pending.get_mut(key) else { return; };
let Some(registrations) = pending.get_mut(key) else {
return;
};
let mut count_deleted = 0usize;
registrations.retain(|s| {
let delete = s.is_closed();
Expand Down
2 changes: 1 addition & 1 deletion crates/mysten-metrics/src/histogram.rs
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ impl HistogramVec {
register_int_counter_vec_with_registry!(sum_name, desc, labels, registry).unwrap();
let count =
register_int_counter_vec_with_registry!(count_name, desc, labels, registry).unwrap();
let labels: Vec<_> = labels.iter().cloned().chain(["pct"].into_iter()).collect();
let labels: Vec<_> = labels.iter().cloned().chain(["pct"]).collect();
let gauge = register_int_gauge_vec_with_registry!(name, desc, &labels, registry).unwrap();
Self::new(gauge, sum, count, percentiles, name)
}
Expand Down
4 changes: 2 additions & 2 deletions crates/mysten-metrics/src/metered_channel.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ use tokio::sync::mpsc::{
#[path = "tests/metered_channel_tests.rs"]
mod metered_channel_tests;

/// An [`mpsc::Sender`](tokio::sync::mpsc::Sender) with an [`IntGauge`]
/// An [`mpsc::Sender`] with an [`IntGauge`]
/// counting the number of currently queued items.
#[derive(Debug)]
pub struct Sender<T> {
Expand All @@ -34,7 +34,7 @@ impl<T> Clone for Sender<T> {
}
}

/// An [`mpsc::Receiver`](tokio::sync::mpsc::Receiver) with an [`IntGauge`]
/// An [`mpsc::Receiver`] with an [`IntGauge`]
/// counting the number of currently queued items.
#[derive(Debug)]
pub struct Receiver<T> {
Expand Down
2 changes: 1 addition & 1 deletion crates/sui-aws-orchestrator/src/measurement.rs
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ impl<T: BenchmarkType> MeasurementsCollection<T> {
pub fn add(&mut self, scraper_id: ScraperId, measurement: Measurement) {
self.scrapers
.entry(scraper_id)
.or_insert_with(Vec::new)
.or_default()
.push(measurement);
}

Expand Down
2 changes: 1 addition & 1 deletion crates/sui-aws-orchestrator/src/protocol/sui.rs
Original file line number Diff line number Diff line change
Expand Up @@ -259,7 +259,7 @@ impl ProtocolMetrics for SuiProtocol {
.validator_config_info
.expect("No validator in genesis")
.iter()
.zip(instances.into_iter())
.zip(instances)
.map(|(config, instance)| {
let path = format!(
"{}:{}{}",
Expand Down
2 changes: 1 addition & 1 deletion crates/sui-benchmark/src/bank.rs
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ impl BenchmarkBank {
async fn pay_sui(
&mut self,
coin_configs: &[GasCoinConfig],
mut init_coin: &mut Gas,
init_coin: &mut Gas,
gas_price: u64,
) -> Result<UpdatedAndNewlyMintedGasCoins> {
let recipient_addresses: Vec<SuiAddress> = coin_configs.iter().map(|g| g.address).collect();
Expand Down
7 changes: 5 additions & 2 deletions crates/sui-cluster-test/src/test_case/shared_object_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,8 +63,11 @@ impl TestCaseImpl for SharedCounterTest {
.mutated()
.iter()
.find_map(|obj| {
let Owner::Shared { initial_shared_version } = obj.owner else {
return None
let Owner::Shared {
initial_shared_version,
} = obj.owner
else {
return None;
};

if obj.reference.object_id == counter_id
Expand Down
51 changes: 36 additions & 15 deletions crates/sui-core/src/authority.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1670,8 +1670,10 @@ impl AuthorityState {
);
assert_eq!(new_object.version(), oref.1, "tx_digest={:?} error processing object owner index, object {:?} from written has mismatched version. Actual: {}, expected: {}", tx_digest, id, new_object.version(), oref.1);

let Some(df_info) = self.try_create_dynamic_field_info(new_object, written, module_resolver)
.expect("try_create_dynamic_field_info should not fail.") else {
let Some(df_info) = self
.try_create_dynamic_field_info(new_object, written, module_resolver)
.expect("try_create_dynamic_field_info should not fail.")
else {
// Skip indexing for non dynamic field objects.
continue;
};
Expand Down Expand Up @@ -2109,8 +2111,8 @@ impl AuthorityState {
genesis_objects: &[Object],
epoch_store: &Arc<AuthorityPerEpochStore>,
) -> SuiResult {
let Some(index_store) = &self.indexes else{
return Ok(())
let Some(index_store) = &self.indexes else {
return Ok(());
};
if !index_store.is_empty() {
return Ok(());
Expand All @@ -2126,7 +2128,12 @@ impl AuthorityState {
)),
Owner::ObjectOwner(object_id) => {
let id = o.id();
let Some(info) = self.try_create_dynamic_field_info(o, &BTreeMap::new(), epoch_store.module_cache())? else{
let Some(info) = self.try_create_dynamic_field_info(
o,
&BTreeMap::new(),
epoch_store.module_cache(),
)?
else {
continue;
};
new_dynamic_fields.push(((ObjectID::from(object_id), id), info));
Expand Down Expand Up @@ -2532,15 +2539,19 @@ impl AuthorityState {
epoch_store: &AuthorityPerEpochStore,
) -> SuiResult<Option<VerifiedCheckpoint>> {
let checkpoint = self.get_transaction_checkpoint_sequence(digest, epoch_store)?;
let Some(checkpoint) = checkpoint else { return Ok(None); };
let Some(checkpoint) = checkpoint else {
return Ok(None);
};
let checkpoint = self
.checkpoint_store
.get_checkpoint_by_sequence_number(checkpoint)?;
Ok(checkpoint)
}

pub fn get_object_read(&self, object_id: &ObjectID) -> SuiResult<ObjectRead> {
let Some((object_key, store_object)) = self.database.get_latest_object_or_tombstone(*object_id)? else {
let Some((object_key, store_object)) =
self.database.get_latest_object_or_tombstone(*object_id)?
else {
return Ok(ObjectRead::NotExists(*object_id));
};
if let Some(object_ref) = self
Expand Down Expand Up @@ -2608,7 +2619,10 @@ impl AuthorityState {
version: SequenceNumber,
) -> SuiResult<PastObjectRead> {
// Firstly we see if the object ever existed by getting its latest data
let Some(obj_ref) = self.database.get_latest_object_ref_or_tombstone(*object_id)? else {
let Some(obj_ref) = self
.database
.get_latest_object_ref_or_tombstone(*object_id)?
else {
return Ok(PastObjectRead::ObjectNotExists(*object_id));
};

Expand Down Expand Up @@ -3626,7 +3640,9 @@ impl AuthorityState {
system_package.dependencies().to_vec(),
max_binary_format_version,
no_extraneous_module_bytes,
).await else {
)
.await
else {
return vec![];
};
results.push(obj_ref);
Expand Down Expand Up @@ -3908,11 +3924,14 @@ impl AuthorityState {
// since system packages are created during the current epoch, they should abide by the
// rules of the current epoch, including the current epoch's max Move binary format version
let config = epoch_store.protocol_config();
let Some(next_epoch_system_package_bytes) = self.get_system_package_bytes(
next_epoch_system_packages.clone(),
config.move_binary_format_version(),
config.no_extraneous_module_bytes(),
).await else {
let Some(next_epoch_system_package_bytes) = self
.get_system_package_bytes(
next_epoch_system_packages.clone(),
config.move_binary_format_version(),
config.no_extraneous_module_bytes(),
)
.await
else {
error!(
"upgraded system packages {:?} are not locally available, cannot create \
ChangeEpochTx. validator binary must be upgraded to the correct version!",
Expand All @@ -3926,7 +3945,9 @@ impl AuthorityState {
// state sync, and execute it. This will upgrade the framework packages, reconfigure,
// and most likely shut down in the new epoch (this validator likely doesn't support
// the new protocol version, or else it should have had the packages.)
return Err(anyhow!("missing system packages: cannot form ChangeEpochTx"));
return Err(anyhow!(
"missing system packages: cannot form ChangeEpochTx"
));
};

let tx = if epoch_store
Expand Down
4 changes: 2 additions & 2 deletions crates/sui-core/src/authority/authority_notify_read.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ impl EffectsNotifyRead for Arc<AuthorityStore> {
let mut needs_wait = false;
let mut results: FuturesUnordered<_> = effects
.into_iter()
.zip(registrations.into_iter())
.zip(registrations)
.map(|(e, r)| match e {
// Note that Some() clause also drops registration that is already fulfilled
Some(ready) => Either::Left(futures::future::ready(ready)),
Expand Down Expand Up @@ -100,7 +100,7 @@ impl EffectsNotifyRead for Arc<AuthorityStore> {

let results = effects_digests
.into_iter()
.zip(registrations.into_iter())
.zip(registrations)
.map(|(a, r)| match a {
// Note that Some() clause also drops registration that is already fulfilled
Some(ready) => Either::Left(futures::future::ready(ready)),
Expand Down
8 changes: 7 additions & 1 deletion crates/sui-core/src/authority/authority_per_epoch_store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1296,7 +1296,13 @@ impl AuthorityPerEpochStore {
let mut result = Vec::with_capacity(digests.len());
for (signatures, digest) in signatures.into_iter().zip(digests.iter()) {
let Some(signatures) = signatures else {
return Err(SuiError::from(format!("Can not find user signature for checkpoint for transaction {:?}", digest).as_str()));
return Err(SuiError::from(
format!(
"Can not find user signature for checkpoint for transaction {:?}",
digest
)
.as_str(),
));
};
result.push(signatures);
}
Expand Down
10 changes: 6 additions & 4 deletions crates/sui-core/src/authority/authority_store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1496,7 +1496,7 @@ impl AuthorityStore {
pub async fn revert_state_update(&self, tx_digest: &TransactionDigest) -> SuiResult {
let Some(effects) = self.get_executed_effects(tx_digest)? else {
debug!("Not reverting {:?} as it was not executed", tx_digest);
return Ok(())
return Ok(());
};

info!(?tx_digest, ?effects, "reverting transaction");
Expand Down Expand Up @@ -1991,7 +1991,7 @@ impl ChildObjectResolver for AuthorityStore {
let Some(child_object) =
self.find_object_lt_or_eq_version(*child, child_version_upper_bound)
else {
return Ok(None)
return Ok(None);
};

let parent = *parent;
Expand All @@ -2012,8 +2012,10 @@ impl ChildObjectResolver for AuthorityStore {
receive_object_at_version: SequenceNumber,
epoch_id: EpochId,
) -> SuiResult<Option<Object>> {
let Some(recv_object) = self.get_object_by_key(receiving_object_id, receive_object_at_version)? else {
return Ok(None)
let Some(recv_object) =
self.get_object_by_key(receiving_object_id, receive_object_at_version)?
else {
return Ok(None);
};

// Check for:
Expand Down
11 changes: 9 additions & 2 deletions crates/sui-core/src/authority/authority_store_pruner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -339,7 +339,12 @@ impl AuthorityStorePruner {
let mut effects_to_prune = vec![];

loop {
let Some(ckpt) = checkpoint_store.certified_checkpoints.get(&(checkpoint_number + 1))? else {break;};
let Some(ckpt) = checkpoint_store
.certified_checkpoints
.get(&(checkpoint_number + 1))?
else {
break;
};
let checkpoint = ckpt.into_inner();
// Skipping because checkpoint's epoch or checkpoint number is too new.
// We have to respect the highest executed checkpoint watermark because there might be
Expand Down Expand Up @@ -450,7 +455,9 @@ impl AuthorityStorePruner {
}
sst_file_for_compaction = Some(sst_file);
}
let Some(sst_file) = sst_file_for_compaction else {return Ok(None);};
let Some(sst_file) = sst_file_for_compaction else {
return Ok(None);
};
info!(
"Manual compaction of sst file {:?}. Size: {:?}, level: {:?}",
sst_file.name, sst_file.size, sst_file.level
Expand Down
12 changes: 8 additions & 4 deletions crates/sui-core/src/authority/authority_store_tables.rs
Original file line number Diff line number Diff line change
Expand Up @@ -154,10 +154,12 @@ impl AuthorityPerpetualTables {
object_id: ObjectID,
version: SequenceNumber,
) -> Option<Object> {
let Ok(iter) = self.objects
let Ok(iter) = self
.objects
.range_iter(ObjectKey::min_for_id(&object_id)..=ObjectKey::max_for_id(&object_id))
.skip_prior_to(&ObjectKey(object_id, version))else {
return None
.skip_prior_to(&ObjectKey(object_id, version))
else {
return None;
};
iter.reverse()
.next()
Expand Down Expand Up @@ -186,7 +188,9 @@ impl AuthorityPerpetualTables {
object_key: &ObjectKey,
store_object: StoreObjectWrapper,
) -> Result<Option<Object>, SuiError> {
let StoreObject::Value(store_object) = store_object.migrate().into_inner() else {return Ok(None)};
let StoreObject::Value(store_object) = store_object.migrate().into_inner() else {
return Ok(None);
};
Ok(Some(self.construct_object(object_key, store_object)?))
}

Expand Down
8 changes: 6 additions & 2 deletions crates/sui-core/src/checkpoints/causal_order.rs
Original file line number Diff line number Diff line change
Expand Up @@ -151,9 +151,13 @@ impl RWLockDependencyBuilder {
digest: TransactionDigest,
v: &mut BTreeSet<TransactionDigest>,
) {
let Some(overwrites) = self.overwrite_versions.get(&digest) else {return;};
let Some(overwrites) = self.overwrite_versions.get(&digest) else {
return;
};
for obj_ver in overwrites {
let Some(reads) = self.read_version.get(obj_ver) else {continue;};
let Some(reads) = self.read_version.get(obj_ver) else {
continue;
};
for dep in reads {
trace!(
"Assuming additional dependency when constructing checkpoint {:?} -> {:?}",
Expand Down
11 changes: 5 additions & 6 deletions crates/sui-core/src/checkpoints/checkpoint_executor/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -290,10 +290,9 @@ impl CheckpointExecutor {
let Some(latest_synced_checkpoint) = self
.checkpoint_store
.get_highest_synced_checkpoint()
.expect("Failed to read highest synced checkpoint") else {
debug!(
"No checkpoints to schedule, highest synced checkpoint is None",
);
.expect("Failed to read highest synced checkpoint")
else {
debug!("No checkpoints to schedule, highest synced checkpoint is None",);
return;
};

Expand Down Expand Up @@ -936,7 +935,7 @@ fn get_unexecuted_transactions(
let executable_txns: Vec<_> = if let Some(full_contents_txns) = full_contents_txns {
unexecuted_txns
.into_iter()
.zip(expected_effects_digests.into_iter())
.zip(expected_effects_digests)
.map(|(tx_digest, expected_effects_digest)| {
let tx = &full_contents_txns.get(&tx_digest).unwrap().transaction;
(
Expand All @@ -954,7 +953,7 @@ fn get_unexecuted_transactions(
.multi_get_transaction_blocks(&unexecuted_txns)
.expect("Failed to get checkpoint txes from store")
.into_iter()
.zip(expected_effects_digests.into_iter())
.zip(expected_effects_digests)
.enumerate()
.map(|(i, (tx, expected_effects_digest))| {
let tx = tx.unwrap_or_else(||
Expand Down

1 comment on commit 6cf5727

@vercel
Copy link

@vercel vercel bot commented on 6cf5727 Oct 12, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please sign in to comment.