Skip to content

Commit

Permalink
Address multiple cargo clippy warningst #26
Browse files Browse the repository at this point in the history
chore: Address multiple cargo clippy warnings
  • Loading branch information
DarkingLee authored Oct 2, 2023
2 parents 7cea3b7 + ac6f61c commit 8d6fe25
Show file tree
Hide file tree
Showing 10 changed files with 51 additions and 54 deletions.
2 changes: 1 addition & 1 deletion crates/core-primitives/src/testing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ impl HeaderCommitList for CommitListTestWithData {
impl CommitListTestWithData {
/// Converts the static `TEST_COMMITMENTS` into bytes.
pub fn commit_bytes() -> Vec<u8> {
TEST_COMMITMENTS.iter().map(|c| c.to_bytes()).flatten().collect()
TEST_COMMITMENTS.iter().flat_map(|c| c.to_bytes()).collect()
}

/// Creates a `HeaderExtension` with the bytes representation of `TEST_COMMITMENTS`.
Expand Down
1 change: 1 addition & 0 deletions crates/core-primitives/src/traits.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ pub trait HeaderCommitList {

sp_api::decl_runtime_apis! {
/// Extracts the `data` field from some types of extrinsics.
#[allow(clippy::ptr_arg, clippy::type_complexity)]
pub trait Extractor {
fn extract(
extrinsic: &Vec<u8>,
Expand Down
71 changes: 34 additions & 37 deletions crates/das-network/src/dht_work.rs
Original file line number Diff line number Diff line change
Expand Up @@ -118,36 +118,33 @@ where
for (key, value) in values {
let maybe_sidecar =
Sidecar::from_local_outside::<B, BE>(key.as_ref(), &mut self.offchain_db);
match maybe_sidecar {
Some(sidecar) => {
if sidecar.status.is_none() {
let data_hash = Sidecar::calculate_id(&value);
let mut new_sidecar = sidecar.clone();
if data_hash != sidecar.metadata.blobs_hash.as_bytes() {
new_sidecar.status = Some(SidecarStatus::ProofError);
if let Some(sidecar) = maybe_sidecar {
if sidecar.status.is_none() {
let data_hash = Sidecar::calculate_id(&value);
let mut new_sidecar = sidecar.clone();
if data_hash != sidecar.metadata.blobs_hash.as_bytes() {
new_sidecar.status = Some(SidecarStatus::ProofError);
} else {
let kzg = KZG::default_embedded();
// TODO bytes to blobs
let blobs = bytes_vec_to_blobs(&[value.clone()], 1).unwrap();
let encoding_valid = Blob::verify_batch(
&blobs,
&sidecar.metadata.commitments,
&sidecar.metadata.proofs,
&kzg,
FIELD_ELEMENTS_PER_BLOB,
)
.unwrap();
if encoding_valid {
new_sidecar.blobs = Some(value.clone());
new_sidecar.status = Some(SidecarStatus::Success);
} else {
let kzg = KZG::default_embedded();
// TODO bytes to blobs
let blobs = bytes_vec_to_blobs(&[value.clone()], 1).unwrap();
let encoding_valid = Blob::verify_batch(
&blobs,
&sidecar.metadata.commitments,
&sidecar.metadata.proofs,
&kzg,
FIELD_ELEMENTS_PER_BLOB,
)
.unwrap();
if encoding_valid {
new_sidecar.blobs = Some(value.clone());
new_sidecar.status = Some(SidecarStatus::Success);
} else {
new_sidecar.status = Some(SidecarStatus::ProofError);
}
new_sidecar.status = Some(SidecarStatus::ProofError);
}
new_sidecar.save_to_local_outside::<B, BE>(&mut self.offchain_db)
}
},
None => {},
new_sidecar.save_to_local_outside::<B, BE>(&mut self.offchain_db)
}
}
}
}
Expand All @@ -156,23 +153,23 @@ where
fn handle_dht_value_not_found_event(&mut self, key: KademliaKey) {
let maybe_sidecar =
Sidecar::from_local_outside::<B, BE>(key.as_ref(), &mut self.offchain_db);
match maybe_sidecar {
Some(sidecar) => {
if sidecar.status.is_none() {
let mut new_sidecar = sidecar.clone();
new_sidecar.status = Some(SidecarStatus::NotFound);
new_sidecar.save_to_local_outside::<B, BE>(&mut self.offchain_db)
}
},
None => {},
if let Some(sidecar) = maybe_sidecar {
if sidecar.status.is_none() {
let mut new_sidecar = sidecar.clone();
new_sidecar.status = Some(SidecarStatus::NotFound);
new_sidecar.save_to_local_outside::<B, BE>(&mut self.offchain_db)
}
}
}

// Processes messages coming from the main service.
fn process_message_from_service(&self, msg: ServicetoWorkerMsg) {
match msg {
ServicetoWorkerMsg::PutValueToDht(key, value, sender) => {
let _ = sender.send(Some(self.network.put_value(key, value)));
let _ = sender.send({
self.network.put_value(key, value);
Some(())
});
},
}
}
Expand Down
1 change: 1 addition & 0 deletions crates/das-network/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ pub fn new_service(to_worker: mpsc::Sender<ServicetoWorkerMsg>) -> Service {
}

/// Conveniently creates both a Worker and Service with the given parameters.
#[allow(clippy::type_complexity)]
pub fn new_worker_and_service<B, Client, Network, DhtEventStream, BE>(
client: Arc<Client>,
network: Arc<Network>,
Expand Down
2 changes: 1 addition & 1 deletion crates/das-rpc/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ where
let tx_hash = self.pool.submit_one(&at, TX_SOURCE, xt).await.map_err(|e| {
e.into_pool_error()
.map(|e| Error::TransactionPushFailed(Box::new(e)))
.unwrap_or_else(|e| Error::TransactionPushFailed(Box::new(e)).into())
.unwrap_or_else(|e| Error::TransactionPushFailed(Box::new(e)))
})?;

let metadata = SidecarMetadata { data_len, blobs_hash: data_hash, commitments, proofs };
Expand Down
4 changes: 2 additions & 2 deletions crates/meloxt/src/run_examples.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ async fn main() -> Result<()> {
async fn run_example(example: &str) -> Result<()> {
// Execute the example using cargo. It assumes that the example can be run using the cargo command.
let status = TokioCommand::new("cargo")
.args(&["run", "--release", "--example", example])
.args(["run", "--release", "--example", example])
.status()
.await?;

Expand All @@ -63,7 +63,7 @@ async fn fetch_all_examples() -> Result<Vec<String>> {
// Use tokio's spawn_blocking to run a blocking operation in the context of an asynchronous function.
let output = tokio::task::spawn_blocking(move || {
std::process::Command::new("cargo")
.args(&["run", "--release", "--example"])
.args(["run", "--release", "--example"])
.stderr(Stdio::piped())
.output()
})
Expand Down
10 changes: 4 additions & 6 deletions crates/pallet-melo-store/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -416,7 +416,7 @@ pub mod pallet {
let app_id = AppId::<T>::get() + 1;
AppId::<T>::put(app_id);
Self::deposit_event(Event::AppIdRegistered { app_id, from: who });
Ok(().into())
Ok(())
}
}

Expand Down Expand Up @@ -468,7 +468,7 @@ pub mod pallet {
let keys = Keys::<T>::get();

let authority_id =
match keys.get(unavailable_data_report.authority_index.clone() as usize) {
match keys.get(unavailable_data_report.authority_index as usize) {
Some(id) => id,
None => return InvalidTransaction::Stale.into(),
};
Expand Down Expand Up @@ -535,8 +535,7 @@ impl<T: Config> Pallet<T> {
pub fn get_commitment_list(at_block: BlockNumberFor<T>) -> Vec<KZGCommitment> {
Metadata::<T>::get(at_block)
.iter()
.map(|metadata| metadata.commitments.clone())
.flatten()
.flat_map(|metadata| metadata.commitments.clone())
.collect::<Vec<_>>()
}

Expand All @@ -548,7 +547,6 @@ impl<T: Config> Pallet<T> {
now: BlockNumberFor<T>,
) -> OffchainResult<T, impl Iterator<Item = OffchainResult<T, ()>>> {
let reports = (0..DELAY_CHECK_THRESHOLD)
.into_iter()
.filter_map(move |gap| {
if T::BlockNumber::from(gap) > now {
return None;
Expand All @@ -569,7 +567,7 @@ impl<T: Config> Pallet<T> {
None
}
})
.flat_map(|it| it);
.flatten();

Ok(reports)
}
Expand Down
3 changes: 1 addition & 2 deletions node/src/chain_spec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,6 @@ pub fn testnet_genesis(
let nominations = initial_authorities
.as_slice()
.choose_multiple(&mut rng, count)
.into_iter()
.map(|choice| choice.0.clone())
.collect::<Vec<_>>();
(x.clone(), x.clone(), STASH, StakerStatus::Nominator(nominations))
Expand Down Expand Up @@ -155,7 +154,7 @@ pub fn testnet_genesis(
},
nomination_pools: NominationPoolsConfig {
min_create_bond: 10 * DOLLARS,
min_join_bond: 1 * DOLLARS,
min_join_bond: DOLLARS,
..Default::default()
},
}
Expand Down
1 change: 1 addition & 0 deletions node/src/service.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ pub type TransactionPool = sc_transaction_pool::FullPool<Block, FullClient>;
type FullGrandpaBlockImport =
grandpa::GrandpaBlockImport<FullBackend, Block, FullClient, FullSelectChain>;

#[allow(clippy::type_complexity)]
pub fn new_partial(
config: &Configuration,
) -> Result<
Expand Down
10 changes: 5 additions & 5 deletions runtime/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -725,9 +725,9 @@ where

parameter_types! {
pub const PreimageMaxSize: u32 = 4096 * 1024;
pub const PreimageBaseDeposit: Balance = 1 * DOLLARS;
pub const PreimageBaseDeposit: Balance = DOLLARS;
// One cent: $10,000 / MB
pub const PreimageByteDeposit: Balance = 1 * CENTS;
pub const PreimageByteDeposit: Balance = CENTS;
}

#[auto_config(include_currency)]
Expand Down Expand Up @@ -779,10 +779,10 @@ impl pallet_elections_phragmen::Config for Runtime {

parameter_types! {
pub const AssetDeposit: Balance = 100 * DOLLARS;
pub const ApprovalDeposit: Balance = 1 * DOLLARS;
pub const ApprovalDeposit: Balance = DOLLARS;
pub const StringLimit: u32 = 50;
pub const MetadataDepositBase: Balance = 10 * DOLLARS;
pub const MetadataDepositPerByte: Balance = 1 * DOLLARS;
pub const MetadataDepositPerByte: Balance = DOLLARS;
}

impl pallet_assets::Config for Runtime {
Expand All @@ -809,7 +809,7 @@ impl pallet_assets::Config for Runtime {
}

parameter_types! {
pub const IndexDeposit: Balance = 1 * DOLLARS;
pub const IndexDeposit: Balance = DOLLARS;
}

// #[auto_config()]
Expand Down

0 comments on commit 8d6fe25

Please sign in to comment.