Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion examples/compiler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ fn main() -> Result<(), Box<dyn Error>> {

let network = matches
.value_of("network")
.and_then(|n| Some(Network::from_str(n)))
.map(|n| Network::from_str(n))
.transpose()
.unwrap()
.unwrap_or(Network::Testnet);
Expand Down
31 changes: 31 additions & 0 deletions scripts/cargo-check.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
#!/bin/bash
#
# Run various invocations of cargo check

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: I'd add a set -e to make the whole script fail as soon as there's an error

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nice, cheers!

features=( "default" "compiler" "electrum" "esplora" "compact_filters" "key-value-db" "async-interface" "all-keys" "keys-bip39" )
toolchains=( "+stable" "+1.45" "+nightly" )

main() {
check_src
check_all_targets
}

# Check with all features, with various toolchains.
check_src() {
for toolchain in "${toolchains[@]}"; do
cmd="cargo $toolchain clippy --all-targets --no-default-features"

for feature in "${features[@]}"; do
touch_files
$cmd --features "$feature"
done
done
}

# Touch files to prevent cached warnings from not showing up.
touch_files() {
touch $(find . -name *.rs)
}

main
exit 0
1 change: 1 addition & 0 deletions src/blockchain/compact_filters/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -239,6 +239,7 @@ impl Blockchain for CompactFiltersBlockchain {
vec![Capability::FullHistory].into_iter().collect()
}

#[allow(clippy::mutex_atomic)] // Mutex is easier to understand than a CAS loop.
fn setup<D: BatchDatabase, P: 'static + Progress>(
&self,
_stop_gap: Option<usize>, // TODO: move to electrum and esplora only
Expand Down
46 changes: 23 additions & 23 deletions src/blockchain/compact_filters/store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,8 @@ use bitcoin::BlockHash;
use bitcoin::BlockHeader;
use bitcoin::Network;

use lazy_static::lazy_static;

use super::CompactFiltersError;

lazy_static! {
Expand Down Expand Up @@ -119,7 +121,7 @@ where
}

fn deserialize(data: &[u8]) -> Result<Self, CompactFiltersError> {
Ok(deserialize(data).map_err(|_| CompactFiltersError::DataCorruption)?)
deserialize(data).map_err(|_| CompactFiltersError::DataCorruption)
}
}

Expand Down Expand Up @@ -436,15 +438,14 @@ impl ChainStore<Full> {

let key = StoreEntry::BlockHeaderIndex(Some(*block_hash)).get_key();
let data = read_store.get_pinned_cf(cf_handle, key)?;
Ok(data
.map(|data| {
Ok::<_, CompactFiltersError>(usize::from_be_bytes(
data.as_ref()
.try_into()
.map_err(|_| CompactFiltersError::DataCorruption)?,
))
})
.transpose()?)
data.map(|data| {
Ok::<_, CompactFiltersError>(usize::from_be_bytes(
data.as_ref()
.try_into()
.map_err(|_| CompactFiltersError::DataCorruption)?,
))
})
.transpose()
}

pub fn get_block_hash(&self, height: usize) -> Result<Option<BlockHash>, CompactFiltersError> {
Expand All @@ -453,13 +454,12 @@ impl ChainStore<Full> {

let key = StoreEntry::BlockHeader(Some(height)).get_key();
let data = read_store.get_pinned_cf(cf_handle, key)?;
Ok(data
.map(|data| {
let (header, _): (BlockHeader, Uint256) =
deserialize(&data).map_err(|_| CompactFiltersError::DataCorruption)?;
Ok::<_, CompactFiltersError>(header.block_hash())
})
.transpose()?)
data.map(|data| {
let (header, _): (BlockHeader, Uint256) =
deserialize(&data).map_err(|_| CompactFiltersError::DataCorruption)?;
Ok::<_, CompactFiltersError>(header.block_hash())
})
.transpose()
}

pub fn save_full_block(&self, block: &Block, height: usize) -> Result<(), CompactFiltersError> {
Expand All @@ -475,10 +475,10 @@ impl ChainStore<Full> {
let key = StoreEntry::Block(Some(height)).get_key();
let opt_block = read_store.get_pinned(key)?;

Ok(opt_block
opt_block
.map(|data| deserialize(&data))
.transpose()
.map_err(|_| CompactFiltersError::DataCorruption)?)
.map_err(|_| CompactFiltersError::DataCorruption)
}

pub fn delete_blocks_until(&self, height: usize) -> Result<(), CompactFiltersError> {
Expand Down Expand Up @@ -565,14 +565,14 @@ impl<T: StoreType> ChainStore<T> {
let prefix = StoreEntry::BlockHeader(None).get_key();
let iterator = read_store.prefix_iterator_cf(cf_handle, prefix);

Ok(iterator
iterator
.last()
.map(|(_, v)| -> Result<_, CompactFiltersError> {
let (header, _): (BlockHeader, Uint256) = SerializeDb::deserialize(&v)?;

Ok(header.block_hash())
})
.transpose()?)
.transpose()
}

pub fn apply(
Expand Down Expand Up @@ -716,11 +716,11 @@ impl CFStore {

// FIXME: we have to filter manually because rocksdb sometimes returns stuff that doesn't
// have the right prefix
Ok(iterator
iterator
.filter(|(k, _)| k.starts_with(&prefix))
.skip(1)
.map(|(_, data)| Ok::<_, CompactFiltersError>(BundleEntry::deserialize(&data)?.1))
.collect::<Result<_, _>>()?)
.collect::<Result<_, _>>()
}

pub fn replace_checkpoints(
Expand Down
25 changes: 9 additions & 16 deletions src/database/any.rs
Original file line number Diff line number Diff line change
Expand Up @@ -312,24 +312,17 @@ impl BatchDatabase for AnyDatabase {
}
}
fn commit_batch(&mut self, batch: Self::Batch) -> Result<(), Error> {
// TODO: refactor once `move_ref_pattern` is stable
#[allow(irrefutable_let_patterns)]
match self {
AnyDatabase::Memory(db) => {
if let AnyBatch::Memory(batch) = batch {
db.commit_batch(batch)
} else {
unimplemented!()
}
}
AnyDatabase::Memory(db) => match batch {
AnyBatch::Memory(batch) => db.commit_batch(batch),
#[cfg(feature = "key-value-db")]
_ => unimplemented!("Sled batch shouldn't be used with Memory db."),
},
#[cfg(feature = "key-value-db")]
AnyDatabase::Sled(db) => {
if let AnyBatch::Sled(batch) = batch {
db.commit_batch(batch)
} else {
unimplemented!()
}
}
AnyDatabase::Sled(db) => match batch {
AnyBatch::Sled(batch) => db.commit_batch(batch),
_ => unimplemented!("Memory batch shouldn't be used with Sled db."),
},
}
}
}
Expand Down
4 changes: 2 additions & 2 deletions src/database/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -227,7 +227,7 @@ pub mod test {
);
assert_eq!(
tree.get_path_from_script_pubkey(&script).unwrap(),
Some((keychain, path.clone()))
Some((keychain, path))
);
}

Expand Down Expand Up @@ -256,7 +256,7 @@ pub mod test {
);
assert_eq!(
tree.get_path_from_script_pubkey(&script).unwrap(),
Some((keychain, path.clone()))
Some((keychain, path))
);
}

Expand Down
33 changes: 16 additions & 17 deletions src/descriptor/dsl.rs
Original file line number Diff line number Diff line change
Expand Up @@ -228,10 +228,11 @@ macro_rules! impl_sortedmulti {
use $crate::keys::IntoDescriptorKey;
let secp = $crate::bitcoin::secp256k1::Secp256k1::new();

let mut keys = vec![];
$(
keys.push($key.into_descriptor_key());
)*
let keys = vec![
$(
$key.into_descriptor_key(),
)*
];

keys.into_iter().collect::<Result<Vec<_>, _>>()
.map_err($crate::descriptor::DescriptorError::Key)
Expand Down Expand Up @@ -656,10 +657,11 @@ macro_rules! fragment {
use $crate::keys::IntoDescriptorKey;
let secp = $crate::bitcoin::secp256k1::Secp256k1::new();

let mut keys = vec![];
$(
keys.push($key.into_descriptor_key());
)*
let keys = vec![
$(
$key.into_descriptor_key(),
)*
];

keys.into_iter().collect::<Result<Vec<_>, _>>()
.map_err($crate::descriptor::DescriptorError::Key)
Expand Down Expand Up @@ -968,7 +970,7 @@ mod test {
fn test_valid_networks() {
let xprv = bip32::ExtendedPrivKey::from_str("tprv8ZgxMBicQKsPcx5nBGsR63Pe8KnRUqmbJNENAfGftF3yuXoMMoVJJcYeUw5eVkm9WBPjWYt6HMWYJNesB5HaNVBaFc1M6dRjWSYnmewUMYy").unwrap();
let path = bip32::DerivationPath::from_str("m/0").unwrap();
let desc_key = (xprv, path.clone()).into_descriptor_key().unwrap();
let desc_key = (xprv, path).into_descriptor_key().unwrap();

let (_desc, _key_map, valid_networks) = descriptor!(pkh(desc_key)).unwrap();
assert_eq!(
Expand All @@ -978,7 +980,7 @@ mod test {

let xprv = bip32::ExtendedPrivKey::from_str("xprv9s21ZrQH143K3QTDL4LXw2F7HEK3wJUD2nW2nRk4stbPy6cq3jPPqjiChkVvvNKmPGJxWUtg6LnF5kejMRNNU3TGtRBeJgk33yuGBxrMPHi").unwrap();
let path = bip32::DerivationPath::from_str("m/10/20/30/40").unwrap();
let desc_key = (xprv, path.clone()).into_descriptor_key().unwrap();
let desc_key = (xprv, path).into_descriptor_key().unwrap();

let (_desc, _key_map, valid_networks) = descriptor!(wpkh(desc_key)).unwrap();
assert_eq!(valid_networks, [Bitcoin].iter().cloned().collect());
Expand All @@ -1005,12 +1007,9 @@ mod test {
descriptor!(sh(wsh(multi(2, desc_key1, desc_key2, desc_key3)))).unwrap();
assert_eq!(key_map.len(), 3);

let desc_key1: DescriptorKey<Segwitv0> =
(xprv1, path1.clone()).into_descriptor_key().unwrap();
let desc_key2: DescriptorKey<Segwitv0> =
(xprv2, path2.clone()).into_descriptor_key().unwrap();
let desc_key3: DescriptorKey<Segwitv0> =
(xprv3, path3.clone()).into_descriptor_key().unwrap();
let desc_key1: DescriptorKey<Segwitv0> = (xprv1, path1).into_descriptor_key().unwrap();
let desc_key2: DescriptorKey<Segwitv0> = (xprv2, path2).into_descriptor_key().unwrap();
let desc_key3: DescriptorKey<Segwitv0> = (xprv3, path3).into_descriptor_key().unwrap();

let (key1, _key_map, _valid_networks) = desc_key1.extract(&secp).unwrap();
let (key2, _key_map, _valid_networks) = desc_key2.extract(&secp).unwrap();
Expand All @@ -1026,7 +1025,7 @@ mod test {
// this compiles
let xprv = bip32::ExtendedPrivKey::from_str("tprv8ZgxMBicQKsPcx5nBGsR63Pe8KnRUqmbJNENAfGftF3yuXoMMoVJJcYeUw5eVkm9WBPjWYt6HMWYJNesB5HaNVBaFc1M6dRjWSYnmewUMYy").unwrap();
let path = bip32::DerivationPath::from_str("m/0").unwrap();
let desc_key: DescriptorKey<Legacy> = (xprv, path.clone()).into_descriptor_key().unwrap();
let desc_key: DescriptorKey<Legacy> = (xprv, path).into_descriptor_key().unwrap();

let (desc, _key_map, _valid_networks) = descriptor!(pkh(desc_key)).unwrap();
assert_eq!(desc.to_string(), "pkh(tpubD6NzVbkrYhZ4WR7a4vY1VT3khMJMeAxVsfq9TBJyJWrNk247zCJtV7AWf6UJP7rAVsn8NNKdJi3gFyKPTmWZS9iukb91xbn2HbFSMQm2igY/0/*)#yrnz9pp2");
Expand Down
20 changes: 10 additions & 10 deletions src/descriptor/policy.rs
Original file line number Diff line number Diff line change
Expand Up @@ -938,7 +938,7 @@ mod test {
.unwrap();

assert!(
matches!(&policy.item, Signature(pk_or_f) if &pk_or_f.fingerprint.unwrap() == &fingerprint)
matches!(&policy.item, Signature(pk_or_f) if pk_or_f.fingerprint.unwrap() == fingerprint)
);
assert!(matches!(&policy.contribution, Satisfaction::None));

Expand All @@ -953,7 +953,7 @@ mod test {
.unwrap();

assert!(
matches!(&policy.item, Signature(pk_or_f) if &pk_or_f.fingerprint.unwrap() == &fingerprint)
matches!(&policy.item, Signature(pk_or_f) if pk_or_f.fingerprint.unwrap() == fingerprint)
);
assert!(
matches!(&policy.contribution, Satisfaction::Complete {condition} if condition.csv == None && condition.timelock == None)
Expand Down Expand Up @@ -1039,8 +1039,8 @@ mod test {

assert!(
matches!(&policy.item, Multisig { keys, threshold } if threshold == &1
&& &keys[0].fingerprint.unwrap() == &fingerprint0
&& &keys[1].fingerprint.unwrap() == &fingerprint1)
&& keys[0].fingerprint.unwrap() == fingerprint0
&& keys[1].fingerprint.unwrap() == fingerprint1)
);
assert!(
matches!(&policy.contribution, Satisfaction::PartialComplete { n, m, items, conditions, .. } if n == &2
Expand Down Expand Up @@ -1071,8 +1071,8 @@ mod test {

assert!(
matches!(&policy.item, Multisig { keys, threshold } if threshold == &2
&& &keys[0].fingerprint.unwrap() == &fingerprint0
&& &keys[1].fingerprint.unwrap() == &fingerprint1)
&& keys[0].fingerprint.unwrap() == fingerprint0
&& keys[1].fingerprint.unwrap() == fingerprint1)
);

assert!(
Expand Down Expand Up @@ -1103,7 +1103,7 @@ mod test {
.unwrap();

assert!(
matches!(&policy.item, Signature(pk_or_f) if &pk_or_f.fingerprint.unwrap() == &fingerprint)
matches!(&policy.item, Signature(pk_or_f) if pk_or_f.fingerprint.unwrap() == fingerprint)
);
assert!(matches!(&policy.contribution, Satisfaction::None));

Expand All @@ -1119,7 +1119,7 @@ mod test {
.unwrap();

assert!(
matches!(&policy.item, Signature(pk_or_f) if &pk_or_f.fingerprint.unwrap() == &fingerprint)
matches!(&policy.item, Signature(pk_or_f) if pk_or_f.fingerprint.unwrap() == fingerprint)
);
assert!(
matches!(&policy.contribution, Satisfaction::Complete {condition} if condition.csv == None && condition.timelock == None)
Expand Down Expand Up @@ -1147,8 +1147,8 @@ mod test {

assert!(
matches!(&policy.item, Multisig { keys, threshold } if threshold == &1
&& &keys[0].fingerprint.unwrap() == &fingerprint0
&& &keys[1].fingerprint.unwrap() == &fingerprint1)
&& keys[0].fingerprint.unwrap() == fingerprint0
&& keys[1].fingerprint.unwrap() == fingerprint1)
);
assert!(
matches!(&policy.contribution, Satisfaction::PartialComplete { n, m, items, conditions, .. } if n == &2
Expand Down
Loading