Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion packages/rs-dapi-client/src/dump.rs
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ impl<T: TransportRequest> DumpData<T> {
// Return request type (T) name without module prefix
fn request_type() -> String {
let req_type = std::any::type_name::<T>();
req_type.split(':').last().unwrap_or(req_type).to_string()
req_type.rsplit(':').next().unwrap_or(req_type).to_string()
}
/// Generate unique filename for this dump.
///
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -119,10 +119,10 @@ pub enum DistributionFunction {
/// - `step_count`: The number of periods between each step.
/// - `decrease_per_interval_numerator` and `decrease_per_interval_denominator`: Define the reduction factor per step.
/// - `start_decreasing_offset`: Optional start period offset (e.g., start block or time). If not provided, the contract creation start is used.
/// If this is provided before this number we give out the distribution start amount every interval.
/// If this is provided before this number we give out the distribution start amount every interval.
/// - `max_interval_count`: The maximum amount of intervals there can be. Can be up to 1024.
/// !!!Very important!!! -> This will default to 128 is default if not set.
/// This means that after 128 cycles we will be distributing trailing_distribution_interval_amount per interval.
/// !!!Very important!!! -> This will default to 128 is default if not set.
/// This means that after 128 cycles we will be distributing trailing_distribution_interval_amount per interval.
/// - `distribution_start_amount`: The initial token emission.
/// - `trailing_distribution_interval_amount`: The token emission after all decreasing intervals.
/// - `min_value`: Optional minimum emission value.
Expand Down Expand Up @@ -524,6 +524,7 @@ pub enum DistributionFunction {
/// f(x) = 10000 * ln(5000 / x)
/// ```
/// - Values: a = 10000 n = 5000 m = 1 o = 0 b = 0 d = 0
/// ```text
/// y
/// ↑
/// 10000 |*
Expand All @@ -538,6 +539,7 @@ pub enum DistributionFunction {
/// 1000 | *
/// 0 +-------------------*----------→ x
/// 0 2000 4000 6000 8000
/// ```
///
/// - The emission **starts high** and **gradually decreases**, ensuring early adopters receive
/// more tokens while later participants still get rewards.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ impl DocumentType {
token_configurations: &BTreeMap<TokenContractPosition, TokenConfiguration>,
data_contact_config: &DataContractConfig,
full_validation: bool,
validation_operations: &mut Vec<ProtocolValidationOperation>,
validation_operations: &mut impl Extend<ProtocolValidationOperation>,
platform_version: &PlatformVersion,
) -> Result<BTreeMap<String, DocumentType>, ProtocolError> {
let mut contract_document_types: BTreeMap<String, DocumentType> = BTreeMap::new();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ impl DocumentType {
token_configurations: &BTreeMap<TokenContractPosition, TokenConfiguration>,
data_contact_config: &DataContractConfig,
full_validation: bool,
validation_operations: &mut Vec<ProtocolValidationOperation>,
validation_operations: &mut impl Extend<ProtocolValidationOperation>,
platform_version: &PlatformVersion,
) -> Result<Self, ProtocolError> {
match platform_version
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ impl DocumentTypeV0 {
schema_defs: Option<&BTreeMap<String, Value>>,
data_contact_config: &DataContractConfig,
full_validation: bool, // we don't need to validate if loaded from state
validation_operations: &mut Vec<ProtocolValidationOperation>,
validation_operations: &mut impl Extend<ProtocolValidationOperation>,
platform_version: &PlatformVersion,
) -> Result<Self, ProtocolError> {
// Create a full root JSON Schema from shorten contract document type schema
Expand All @@ -82,9 +82,7 @@ impl DocumentTypeV0 {
if full_validation {
// TODO we are silently dropping this error when we shouldn't be
// but returning this error causes tests to fail; investigate more.
ProtocolError::CorruptedCodeExecution(
"validation is not enabled but is being called on try_from_schema".to_string(),
);
"validation is not enabled but is being called on try_from_schema".to_string();
}

#[cfg(feature = "validation")]
Expand Down Expand Up @@ -112,18 +110,18 @@ impl DocumentTypeV0 {

let schema_size = result.into_data()?.size;

validation_operations.push(
validation_operations.extend(std::iter::once(
ProtocolValidationOperation::DocumentTypeSchemaValidationForSize(schema_size),
);
));

return Err(ProtocolError::ConsensusError(Box::new(error)));
}

let schema_size = result.into_data()?.size;

validation_operations.push(
validation_operations.extend(std::iter::once(
ProtocolValidationOperation::DocumentTypeSchemaValidationForSize(schema_size),
);
));

// Make sure JSON Schema is compilable
let root_json_schema = root_schema.try_to_validating_json().map_err(|e| {
Expand Down Expand Up @@ -200,11 +198,11 @@ impl DocumentTypeV0 {

#[cfg(feature = "validation")]
if full_validation {
validation_operations.push(
validation_operations.extend(std::iter::once(
ProtocolValidationOperation::DocumentTypeSchemaPropertyValidation(
property_values.values().len() as u64,
),
);
));

// We should validate that the positions are continuous
for (pos, value) in property_values.values().enumerate() {
Expand Down Expand Up @@ -302,12 +300,12 @@ impl DocumentTypeV0 {

#[cfg(feature = "validation")]
if full_validation {
validation_operations.push(
validation_operations.extend(std::iter::once(
ProtocolValidationOperation::DocumentTypeSchemaIndexValidation(
index.properties.len() as u64,
index.unique,
),
);
));

// Unique indices produces significant load on the system during state validation
// so we need to limit their number to prevent of spikes and DoS attacks
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ impl DocumentTypeV1 {
token_configurations: &BTreeMap<TokenContractPosition, TokenConfiguration>,
data_contact_config: &DataContractConfig,
full_validation: bool, // we don't need to validate if loaded from state
validation_operations: &mut Vec<ProtocolValidationOperation>,
validation_operations: &mut impl Extend<ProtocolValidationOperation>,
platform_version: &PlatformVersion,
) -> Result<Self, ProtocolError> {
// Create a full root JSON Schema from shorten contract document type schema
Expand All @@ -99,9 +99,7 @@ impl DocumentTypeV1 {
if full_validation {
// TODO we are silently dropping this error when we shouldn't be
// but returning this error causes tests to fail; investigate more.
ProtocolError::CorruptedCodeExecution(
"validation is not enabled but is being called on try_from_schema".to_string(),
);
"validation is not enabled but is being called on try_from_schema".to_string();
}

#[cfg(feature = "validation")]
Expand Down Expand Up @@ -129,18 +127,18 @@ impl DocumentTypeV1 {

let schema_size = result.into_data()?.size;

validation_operations.push(
validation_operations.extend(std::iter::once(
ProtocolValidationOperation::DocumentTypeSchemaValidationForSize(schema_size),
);
));

return Err(ProtocolError::ConsensusError(Box::new(error)));
}

let schema_size = result.into_data()?.size;

validation_operations.push(
validation_operations.extend(std::iter::once(
ProtocolValidationOperation::DocumentTypeSchemaValidationForSize(schema_size),
);
));

// Make sure JSON Schema is compilable
let root_json_schema = root_schema.try_to_validating_json().map_err(|e| {
Expand Down Expand Up @@ -217,11 +215,11 @@ impl DocumentTypeV1 {

#[cfg(feature = "validation")]
if full_validation {
validation_operations.push(
validation_operations.extend(std::iter::once(
ProtocolValidationOperation::DocumentTypeSchemaPropertyValidation(
property_values.values().len() as u64,
),
);
));

// We should validate that the positions are continuous
for (pos, value) in property_values.values().enumerate() {
Expand Down Expand Up @@ -319,12 +317,12 @@ impl DocumentTypeV1 {

#[cfg(feature = "validation")]
if full_validation {
validation_operations.push(
validation_operations.extend(std::iter::once(
ProtocolValidationOperation::DocumentTypeSchemaIndexValidation(
index.properties.len() as u64,
index.unique,
),
);
));

// Unique indices produces significant load on the system during state validation
// so we need to limit their number to prevent of spikes and DoS attacks
Expand Down
14 changes: 7 additions & 7 deletions packages/rs-dpp/src/data_contract/document_type/methods/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -212,15 +212,15 @@ pub trait DocumentTypeV0Methods: DocumentTypeV0Getters + DocumentTypeV0MethodsVe
/// - `id`: An identifier for the document. Unique within the context of the document's type.
/// - `owner_id`: The identifier of the entity that will own this document.
/// - `block_height`: The block height at which this document is considered to have been created.
/// While this value is recorded in the document, it is ignored when the document is broadcasted
/// to the network. This is because the actual block height at the time of broadcast may differ.
/// This parameter is included to fulfill schema requirements that specify a block height; you may
/// use the current block height, a placeholder value of 0, or any other value as necessary.
/// While this value is recorded in the document, it is ignored when the document is broadcasted
/// to the network. This is because the actual block height at the time of broadcast may differ.
/// This parameter is included to fulfill schema requirements that specify a block height; you may
/// use the current block height, a placeholder value of 0, or any other value as necessary.
/// - `core_block_height`: Similar to `block_height`, this represents the core network's block height
/// at the document's creation time. It is handled the same way as `block_height` regarding broadcast
/// and schema requirements.
/// at the document's creation time. It is handled the same way as `block_height` regarding broadcast
/// and schema requirements.
/// - `properties`: A collection of properties for the document, structured as a `BTreeMap<String, Value>`.
/// These must be pre-validated to match the document's schema definitions.
/// These must be pre-validated to match the document's schema definitions.
/// - `platform_version`: A reference to the current version of the platform for which the document is created.
///
/// # Returns:
Expand Down
1 change: 1 addition & 0 deletions packages/rs-dpp/src/data_contract/document_type/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,7 @@ pub enum DocumentTypeMutRef<'a> {
V1(&'a mut DocumentTypeV1),
}

#[allow(clippy::large_enum_variant)]
#[derive(Debug, Clone, PartialEq, From)]
pub enum DocumentType {
V0(DocumentTypeV0),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -306,7 +306,7 @@ impl DocumentPropertyType {
let min_size = self.min_size()?;
let max_size = self.max_size()?;
if platform_version.protocol_version > 8 {
Some(((min_size as u32 + max_size as u32 + 1) / 2) as u16)
Some(((min_size as u32 + max_size as u32).div_ceil(2)) as u16)
} else {
Some(min_size.wrapping_add(max_size).wrapping_add(1) / 2)
}
Expand Down Expand Up @@ -342,7 +342,9 @@ impl DocumentPropertyType {
return Ok(None);
};
if platform_version.protocol_version > 8 {
Ok(Some(((min_size as u32 + max_size as u32 + 1) / 2) as u16))
Ok(Some(
((min_size as u32 + max_size as u32).div_ceil(2)) as u16,
))
} else {
Ok(Some(min_size.wrapping_add(max_size).wrapping_add(1) / 2))
}
Expand Down
10 changes: 4 additions & 6 deletions packages/rs-dpp/src/data_contract/extra/drive_api_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@ mod test {
&[("$ownerId", "asc"), ("$updatedAt", "asc")],
),
],
..Default::default()
},
ExpectedDocumentsData {
document_name: "contactInfo",
Expand All @@ -49,7 +48,6 @@ mod test {
("index1", true, &[("$ownerId", "asc")]),
("index2", false, &[("$ownerId", "asc"), ("lastName", "asc")]),
],
..Default::default()
},
ExpectedDocumentsData {
document_name: "contactRequest",
Expand Down Expand Up @@ -150,25 +148,25 @@ mod test {
assert!(!contract.config().readonly()); // the contract shouldn't be readonly
assert!(!contract.config.documents_keep_history_contract_default());
assert_eq!(contract.document_types.len(), 3);
assert!(contract.document_types.get("profile").is_some());
assert!(contract.document_types.contains_key("profile"));
assert!(contract
.document_types
.get("profile")
.unwrap()
.documents_mutable());
assert!(contract.document_types.get("contactInfo").is_some());
assert!(contract.document_types.contains_key("contactInfo"));
assert!(contract
.document_types
.get("contactInfo")
.unwrap()
.documents_mutable());
assert!(contract.document_types.get("contactRequest").is_some());
assert!(contract.document_types.contains_key("contactRequest"));
assert!(!contract
.document_types
.get("contactRequest")
.unwrap()
.documents_mutable());
assert!(contract.document_types.get("non_existent_key").is_none());
assert!(!contract.document_types.contains_key("non_existent_key"));

let contact_info_indices = &contract
.document_types
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,7 @@ use crate::consensus::basic::{
use crate::consensus::state::identity::master_public_key_update_error::MasterPublicKeyUpdateError;
use crate::data_contract::errors::DataContractError;

#[allow(clippy::large_enum_variant)]
#[derive(
Error, Debug, PlatformSerialize, PlatformDeserialize, Encode, Decode, PartialEq, Clone,
)]
Expand Down
1 change: 1 addition & 0 deletions packages/rs-dpp/src/errors/consensus/consensus_error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ use crate::errors::consensus::basic::BasicError;
)]
#[platform_serialize(limit = 2000)]
#[error(transparent)]
#[allow(clippy::large_enum_variant)]
pub enum ConsensusError {
/*

Expand Down
1 change: 1 addition & 0 deletions packages/rs-dpp/src/errors/protocol_error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ use crate::version::FeatureVersion;
use platform_value::{Error as ValueError, Value};
use platform_version::error::PlatformVersionError;

#[allow(clippy::large_enum_variant)]
#[derive(Error, Debug)]
pub enum ProtocolError {
#[error("Identifier Error: {0}")]
Expand Down
5 changes: 1 addition & 4 deletions packages/rs-dpp/src/fee/fee_result/refunds.rs
Original file line number Diff line number Diff line change
Expand Up @@ -158,10 +158,7 @@ impl FeeRefunds {
) -> Option<Credits> {
let credits_per_epoch = self.get(identity_id.as_bytes())?;

let credits = credits_per_epoch
.iter()
.map(|(_epoch_index, credits)| credits)
.sum();
let credits = credits_per_epoch.values().sum();

Some(credits)
}
Expand Down
Loading
Loading