Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion bounded-collections/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@ The format is based on [Keep a Changelog].

[Keep a Changelog]: http://keepachangelog.com/en/1.0.0/

## [0.1.4] - 2023-01-28
- Fixed unnecessary decoding and allocations for bounded types, when the decoded length is greater than the allowed bound.
- Add `Hash` derivation (when `feature = "std"`) for bounded types.

## [0.1.3] - 2023-01-27
- Removed non-existent `bounded` mod reference. [#715](https://github.com/paritytech/parity-common/pull/715)

Expand All @@ -16,4 +20,4 @@ The format is based on [Keep a Changelog].

## [0.1.0] - 2023-01-26
- Wrote better description for `bounded-collections`. [#709](https://github.com/paritytech/parity-common/pull/709)
- Added `bounded-collections` crate. [#708](https://github.com/paritytech/parity-common/pull/708)
- Added `bounded-collections` crate. [#708](https://github.com/paritytech/parity-common/pull/708)
4 changes: 2 additions & 2 deletions bounded-collections/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "bounded-collections"
version = "0.1.3"
version = "0.1.4"
authors = ["Parity Technologies <[email protected]>"]
license = "MIT OR Apache-2.0"
homepage = "https://github.com/paritytech/parity-common"
Expand All @@ -10,7 +10,7 @@ rust-version = "1.60.0"

[dependencies]
serde = { version = "1.0.101", default-features = false, optional = true }
codec = { version = "3.0.0", default-features = false, features = ["max-encoded-len"], package = "parity-scale-codec" }
codec = { version = "3.3.0", default-features = false, features = ["max-encoded-len"], package = "parity-scale-codec" }
scale-info = { version = ">=1.0, <3", features = ["derive"], default-features = false }
log = { version = "0.4.17", default-features = false }

Expand Down
31 changes: 28 additions & 3 deletions bounded-collections/src/bounded_btree_map.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

use crate::{Get, TryCollect};
use alloc::collections::BTreeMap;
use codec::{Decode, Encode, MaxEncodedLen};
use codec::{Compact, Decode, Encode, MaxEncodedLen};
use core::{borrow::Borrow, marker::PhantomData, ops::Deref};

/// A bounded map based on a B-Tree.
Expand All @@ -29,6 +29,7 @@ use core::{borrow::Borrow, marker::PhantomData, ops::Deref};
///
/// Unlike a standard `BTreeMap`, there is an enforced upper limit to the number of items in the
/// map. All internal operations ensure this bound is respected.
#[cfg_attr(feature = "std", derive(Hash))]
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
#[cfg_attr(feature = "std", derive(Hash))]
#[cfg_attr(feature = "std", derive(Hash, serde::Serialize, serde::Deserialize), serde(transparent))]

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm not sure about deriving these; Deserialize would need a custom impl to add bounds checks, like we do now with BoundedVec. And the custom impl looks annoying to implement, so I'm thinking we don't add it until there's actually a need for it.

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@bkchr I'm going to go ahead and merge -- these derives can be a follow-up if we really want them.

#[derive(Encode, scale_info::TypeInfo)]
#[scale_info(skip_type_params(S))]
pub struct BoundedBTreeMap<K, V, S>(BTreeMap<K, V>, PhantomData<S>);
Expand All @@ -40,10 +41,15 @@ where
S: Get<u32>,
{
fn decode<I: codec::Input>(input: &mut I) -> Result<Self, codec::Error> {
let inner = BTreeMap::<K, V>::decode(input)?;
if inner.len() > S::get() as usize {
// Same as the underlying implementation for `Decode` on `BTreeMap`, except we fail early if
// the len is too big.
let len: u32 = <Compact<u32>>::decode(input)?.into();
if len > S::get() {
return Err("BoundedBTreeMap exceeds its limit".into())
}
input.descend_ref()?;
let inner = Result::from_iter((0..len).map(|_| Decode::decode(input)))?;
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

No idea if rust handles this in the most efficient way, otherwise we should maybe pre-allocate as optimization.

Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I mean this is now discusable with the bound, but for the normal BTreeMap we don't have done this as someone could send you a fake encoded BTreeMap that pretends to be u32::Max in size and then maybe leading to some OOM.

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Interesting, would have to compile with optimizations and see. At any rate, I don't want to deviate too much from the standard Decode for the unbounded types, at least in this PR.

Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should be fine to merge as is.

Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is this just another way of calling collect?

Suggested change
let inner = Result::from_iter((0..len).map(|_| Decode::decode(input)))?;
let inner = (0..len).map(|_| Decode::decode(input)).collect::<Result<Vec<_>, codec::Error>>()?;

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, I think it is. http://doc.rust-lang.org/1.66.1/std/result/enum.Result.html#method.from_iter

Currently this impl is the same as the unbounded version, apart from the length check. I think it should be kept the same unless there's a good reason not to.

input.ascend_ref();
Ok(Self(inner, PhantomData))
}

Expand Down Expand Up @@ -394,6 +400,7 @@ mod test {
use super::*;
use crate::ConstU32;
use alloc::{vec, vec::Vec};
use codec::CompactLen;

fn map_from_keys<K>(keys: &[K]) -> BTreeMap<K, ()>
where
Expand All @@ -410,6 +417,14 @@ mod test {
map_from_keys(keys).try_into().unwrap()
}

#[test]
fn encoding_same_as_unbounded_map() {
let b = boundedmap_from_keys::<u32, ConstU32<7>>(&[1, 2, 3, 4, 5, 6]);
let m = map_from_keys(&[1, 2, 3, 4, 5, 6]);

assert_eq!(b.encode(), m.encode());
}

#[test]
fn try_insert_works() {
let mut bounded = boundedmap_from_keys::<u32, ConstU32<4>>(&[1, 2, 3]);
Expand Down Expand Up @@ -460,6 +475,16 @@ mod test {
);
}

#[test]
fn dont_consume_more_data_than_bounded_len() {
let m = map_from_keys(&[1, 2, 3, 4, 5, 6]);
let data = m.encode();
let data_input = &mut &data[..];

BoundedBTreeMap::<u32, u32, ConstU32<4>>::decode(data_input).unwrap_err();
assert_eq!(data_input.len(), data.len() - Compact::<u32>::compact_len(&(data.len() as u32)));
}

#[test]
fn unequal_eq_impl_insert_works() {
// given a struct with a strange notion of equality
Expand Down
31 changes: 28 additions & 3 deletions bounded-collections/src/bounded_btree_set.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

use crate::{Get, TryCollect};
use alloc::collections::BTreeSet;
use codec::{Decode, Encode, MaxEncodedLen};
use codec::{Compact, Decode, Encode, MaxEncodedLen};
use core::{borrow::Borrow, marker::PhantomData, ops::Deref};

/// A bounded set based on a B-Tree.
Expand All @@ -29,6 +29,7 @@ use core::{borrow::Borrow, marker::PhantomData, ops::Deref};
///
/// Unlike a standard `BTreeSet`, there is an enforced upper limit to the number of items in the
/// set. All internal operations ensure this bound is respected.
#[cfg_attr(feature = "std", derive(Hash))]
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
#[cfg_attr(feature = "std", derive(Hash))]
#[cfg_attr(feature = "std", derive(Hash, serde::Serialize, serde::Deserialize), serde(transparent))]

#[derive(Encode, scale_info::TypeInfo)]
#[scale_info(skip_type_params(S))]
pub struct BoundedBTreeSet<T, S>(BTreeSet<T>, PhantomData<S>);
Expand All @@ -39,10 +40,15 @@ where
S: Get<u32>,
{
fn decode<I: codec::Input>(input: &mut I) -> Result<Self, codec::Error> {
let inner = BTreeSet::<T>::decode(input)?;
if inner.len() > S::get() as usize {
// Same as the underlying implementation for `Decode` on `BTreeSet`, except we fail early if
// the len is too big.
let len: u32 = <Compact<u32>>::decode(input)?.into();
if len > S::get() {
return Err("BoundedBTreeSet exceeds its limit".into())
}
input.descend_ref()?;
let inner = Result::from_iter((0..len).map(|_| Decode::decode(input)))?;
input.ascend_ref();
Ok(Self(inner, PhantomData))
}

Expand Down Expand Up @@ -324,6 +330,7 @@ mod test {
use super::*;
use crate::ConstU32;
use alloc::{vec, vec::Vec};
use codec::CompactLen;

fn set_from_keys<T>(keys: &[T]) -> BTreeSet<T>
where
Expand All @@ -340,6 +347,14 @@ mod test {
set_from_keys(keys).try_into().unwrap()
}

#[test]
fn encoding_same_as_unbounded_set() {
let b = boundedset_from_keys::<u32, ConstU32<7>>(&[1, 2, 3, 4, 5, 6]);
let m = set_from_keys(&[1, 2, 3, 4, 5, 6]);

assert_eq!(b.encode(), m.encode());
}

#[test]
fn try_insert_works() {
let mut bounded = boundedset_from_keys::<u32, ConstU32<4>>(&[1, 2, 3]);
Expand Down Expand Up @@ -390,6 +405,16 @@ mod test {
);
}

#[test]
fn dont_consume_more_data_than_bounded_len() {
let s = set_from_keys(&[1, 2, 3, 4, 5, 6]);
let data = s.encode();
let data_input = &mut &data[..];

BoundedBTreeSet::<u32, ConstU32<4>>::decode(data_input).unwrap_err();
assert_eq!(data_input.len(), data.len() - Compact::<u32>::compact_len(&(data.len() as u32)));
}

#[test]
fn unequal_eq_impl_insert_works() {
// given a struct with a strange notion of equality
Expand Down
31 changes: 27 additions & 4 deletions bounded-collections/src/bounded_vec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
use super::WeakBoundedVec;
use crate::{Get, TryCollect};
use alloc::{boxed::Box, vec::Vec};
use codec::{Decode, Encode, EncodeLike, MaxEncodedLen};
use codec::{decode_vec_with_len, Compact, Decode, Encode, EncodeLike, MaxEncodedLen};
use core::{
marker::PhantomData,
ops::{Deref, Index, IndexMut, RangeBounds},
Expand All @@ -40,7 +40,7 @@ use serde::{
///
/// As the name suggests, the length of the queue is always bounded. All internal operations ensure
/// this bound is respected.
#[cfg_attr(feature = "std", derive(Serialize), serde(transparent))]
#[cfg_attr(feature = "std", derive(Hash, Serialize), serde(transparent))]
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
#[cfg_attr(feature = "std", derive(Hash, Serialize), serde(transparent))]
#[cfg_attr(feature = "std", derive(Hash, serde::Serialize, serde::Deserialize), serde(transparent))]

#[derive(Encode, scale_info::TypeInfo)]
#[scale_info(skip_type_params(S))]
pub struct BoundedVec<T, S>(pub(super) Vec<T>, #[cfg_attr(feature = "std", serde(skip_serializing))] PhantomData<S>);
Expand Down Expand Up @@ -108,6 +108,7 @@ where
/// A bounded slice.
///
/// Similar to a `BoundedVec`, but not owned and cannot be decoded.
#[cfg_attr(feature = "std", derive(Hash))]
#[derive(Encode)]
pub struct BoundedSlice<'a, T, S>(pub(super) &'a [T], PhantomData<S>);

Expand Down Expand Up @@ -290,10 +291,13 @@ impl<'a, T, S: Get<u32>> BoundedSlice<'a, T, S> {

impl<T: Decode, S: Get<u32>> Decode for BoundedVec<T, S> {
fn decode<I: codec::Input>(input: &mut I) -> Result<Self, codec::Error> {
let inner = Vec::<T>::decode(input)?;
if inner.len() > S::get() as usize {
// Same as the underlying implementation for `Decode` on `Vec`, except we fail early if the
// len is too big.
let len: u32 = <Compact<u32>>::decode(input)?.into();
if len > S::get() {
return Err("BoundedVec exceeds its limit".into())
}
let inner = decode_vec_with_len(input, len as usize)?;
Ok(Self(inner, PhantomData))
}

Expand Down Expand Up @@ -904,6 +908,15 @@ where
mod test {
use super::*;
use crate::{bounded_vec, ConstU32};
use codec::CompactLen;

#[test]
fn encoding_same_as_unbounded_vec() {
let b: BoundedVec<u32, ConstU32<6>> = bounded_vec![0, 1, 2, 3, 4, 5];
let v: Vec<u32> = vec![0, 1, 2, 3, 4, 5];

assert_eq!(b.encode(), v.encode());
}

#[test]
fn slice_truncate_from_works() {
Expand Down Expand Up @@ -1101,6 +1114,16 @@ mod test {
);
}

#[test]
fn dont_consume_more_data_than_bounded_len() {
let v: Vec<u32> = vec![1, 2, 3, 4, 5];
let data = v.encode();
let data_input = &mut &data[..];

BoundedVec::<u32, ConstU32<4>>::decode(data_input).unwrap_err();
assert_eq!(data_input.len(), data.len() - Compact::<u32>::compact_len(&(data.len() as u32)));
}

#[test]
fn eq_works() {
// of same type
Expand Down
2 changes: 1 addition & 1 deletion primitive-types/impls/codec/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ edition = "2021"
rust-version = "1.56.1"

[dependencies]
parity-scale-codec = { version = "3.0.0", default-features = false, features = ["max-encoded-len"] }
parity-scale-codec = { version = "3.3.0", default-features = false, features = ["max-encoded-len"] }

[features]
default = ["std"]
Expand Down