-
Notifications
You must be signed in to change notification settings - Fork 246
bounded_vec: Avoid unnecessary allocation when decoding #713
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
75ec16c
a0b7f5d
87e5556
7a02d51
e89f7bc
72dec2a
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,6 +1,6 @@ | ||
| [package] | ||
| name = "bounded-collections" | ||
| version = "0.1.3" | ||
| version = "0.1.4" | ||
| authors = ["Parity Technologies <[email protected]>"] | ||
| license = "MIT OR Apache-2.0" | ||
| homepage = "https://github.com/paritytech/parity-common" | ||
|
|
@@ -10,7 +10,7 @@ rust-version = "1.60.0" | |
|
|
||
| [dependencies] | ||
| serde = { version = "1.0.101", default-features = false, optional = true } | ||
| codec = { version = "3.0.0", default-features = false, features = ["max-encoded-len"], package = "parity-scale-codec" } | ||
| codec = { version = "3.3.0", default-features = false, features = ["max-encoded-len"], package = "parity-scale-codec" } | ||
| scale-info = { version = ">=1.0, <3", features = ["derive"], default-features = false } | ||
| log = { version = "0.4.17", default-features = false } | ||
|
|
||
|
|
||
| Original file line number | Diff line number | Diff line change | ||||
|---|---|---|---|---|---|---|
|
|
@@ -19,7 +19,7 @@ | |||||
|
|
||||||
| use crate::{Get, TryCollect}; | ||||||
| use alloc::collections::BTreeMap; | ||||||
| use codec::{Decode, Encode, MaxEncodedLen}; | ||||||
| use codec::{Compact, Decode, Encode, MaxEncodedLen}; | ||||||
| use core::{borrow::Borrow, marker::PhantomData, ops::Deref}; | ||||||
|
|
||||||
| /// A bounded map based on a B-Tree. | ||||||
|
|
@@ -29,6 +29,7 @@ use core::{borrow::Borrow, marker::PhantomData, ops::Deref}; | |||||
| /// | ||||||
| /// Unlike a standard `BTreeMap`, there is an enforced upper limit to the number of items in the | ||||||
| /// map. All internal operations ensure this bound is respected. | ||||||
| #[cfg_attr(feature = "std", derive(Hash))] | ||||||
| #[derive(Encode, scale_info::TypeInfo)] | ||||||
| #[scale_info(skip_type_params(S))] | ||||||
| pub struct BoundedBTreeMap<K, V, S>(BTreeMap<K, V>, PhantomData<S>); | ||||||
|
|
@@ -40,10 +41,15 @@ where | |||||
| S: Get<u32>, | ||||||
| { | ||||||
| fn decode<I: codec::Input>(input: &mut I) -> Result<Self, codec::Error> { | ||||||
| let inner = BTreeMap::<K, V>::decode(input)?; | ||||||
| if inner.len() > S::get() as usize { | ||||||
| // Same as the underlying implementation for `Decode` on `BTreeMap`, except we fail early if | ||||||
| // the len is too big. | ||||||
| let len: u32 = <Compact<u32>>::decode(input)?.into(); | ||||||
| if len > S::get() { | ||||||
| return Err("BoundedBTreeMap exceeds its limit".into()) | ||||||
| } | ||||||
| input.descend_ref()?; | ||||||
| let inner = Result::from_iter((0..len).map(|_| Decode::decode(input)))?; | ||||||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. No idea if rust handles this in the most efficient way, otherwise we should maybe pre-allocate as optimization.
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I mean this is now discusable with the bound, but for the normal
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Interesting, would have to compile with optimizations and see. At any rate, I don't want to deviate too much from the standard
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Should be fine to merge as is.
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Is this just another way of calling
Suggested change
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Yes, I think it is. http://doc.rust-lang.org/1.66.1/std/result/enum.Result.html#method.from_iter Currently this impl is the same as the unbounded version, apart from the length check. I think it should be kept the same unless there's a good reason not to. |
||||||
| input.ascend_ref(); | ||||||
| Ok(Self(inner, PhantomData)) | ||||||
| } | ||||||
|
|
||||||
|
|
@@ -394,6 +400,7 @@ mod test { | |||||
| use super::*; | ||||||
| use crate::ConstU32; | ||||||
| use alloc::{vec, vec::Vec}; | ||||||
| use codec::CompactLen; | ||||||
|
|
||||||
| fn map_from_keys<K>(keys: &[K]) -> BTreeMap<K, ()> | ||||||
| where | ||||||
|
|
@@ -410,6 +417,14 @@ mod test { | |||||
| map_from_keys(keys).try_into().unwrap() | ||||||
| } | ||||||
|
|
||||||
| #[test] | ||||||
| fn encoding_same_as_unbounded_map() { | ||||||
| let b = boundedmap_from_keys::<u32, ConstU32<7>>(&[1, 2, 3, 4, 5, 6]); | ||||||
| let m = map_from_keys(&[1, 2, 3, 4, 5, 6]); | ||||||
|
|
||||||
| assert_eq!(b.encode(), m.encode()); | ||||||
| } | ||||||
|
|
||||||
| #[test] | ||||||
| fn try_insert_works() { | ||||||
| let mut bounded = boundedmap_from_keys::<u32, ConstU32<4>>(&[1, 2, 3]); | ||||||
|
|
@@ -460,6 +475,16 @@ mod test { | |||||
| ); | ||||||
| } | ||||||
|
|
||||||
| #[test] | ||||||
| fn dont_consume_more_data_than_bounded_len() { | ||||||
| let m = map_from_keys(&[1, 2, 3, 4, 5, 6]); | ||||||
| let data = m.encode(); | ||||||
| let data_input = &mut &data[..]; | ||||||
|
|
||||||
| BoundedBTreeMap::<u32, u32, ConstU32<4>>::decode(data_input).unwrap_err(); | ||||||
| assert_eq!(data_input.len(), data.len() - Compact::<u32>::compact_len(&(data.len() as u32))); | ||||||
| } | ||||||
|
|
||||||
| #[test] | ||||||
| fn unequal_eq_impl_insert_works() { | ||||||
| // given a struct with a strange notion of equality | ||||||
|
|
||||||
| Original file line number | Diff line number | Diff line change | ||||
|---|---|---|---|---|---|---|
|
|
@@ -19,7 +19,7 @@ | |||||
|
|
||||||
| use crate::{Get, TryCollect}; | ||||||
| use alloc::collections::BTreeSet; | ||||||
| use codec::{Decode, Encode, MaxEncodedLen}; | ||||||
| use codec::{Compact, Decode, Encode, MaxEncodedLen}; | ||||||
| use core::{borrow::Borrow, marker::PhantomData, ops::Deref}; | ||||||
|
|
||||||
| /// A bounded set based on a B-Tree. | ||||||
|
|
@@ -29,6 +29,7 @@ use core::{borrow::Borrow, marker::PhantomData, ops::Deref}; | |||||
| /// | ||||||
| /// Unlike a standard `BTreeSet`, there is an enforced upper limit to the number of items in the | ||||||
| /// set. All internal operations ensure this bound is respected. | ||||||
| #[cfg_attr(feature = "std", derive(Hash))] | ||||||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Suggested change
|
||||||
| #[derive(Encode, scale_info::TypeInfo)] | ||||||
| #[scale_info(skip_type_params(S))] | ||||||
| pub struct BoundedBTreeSet<T, S>(BTreeSet<T>, PhantomData<S>); | ||||||
|
|
@@ -39,10 +40,15 @@ where | |||||
| S: Get<u32>, | ||||||
| { | ||||||
| fn decode<I: codec::Input>(input: &mut I) -> Result<Self, codec::Error> { | ||||||
| let inner = BTreeSet::<T>::decode(input)?; | ||||||
| if inner.len() > S::get() as usize { | ||||||
| // Same as the underlying implementation for `Decode` on `BTreeSet`, except we fail early if | ||||||
| // the len is too big. | ||||||
| let len: u32 = <Compact<u32>>::decode(input)?.into(); | ||||||
| if len > S::get() { | ||||||
| return Err("BoundedBTreeSet exceeds its limit".into()) | ||||||
| } | ||||||
| input.descend_ref()?; | ||||||
ggwpez marked this conversation as resolved.
Show resolved
Hide resolved
|
||||||
| let inner = Result::from_iter((0..len).map(|_| Decode::decode(input)))?; | ||||||
| input.ascend_ref(); | ||||||
| Ok(Self(inner, PhantomData)) | ||||||
| } | ||||||
|
|
||||||
|
|
@@ -324,6 +330,7 @@ mod test { | |||||
| use super::*; | ||||||
| use crate::ConstU32; | ||||||
| use alloc::{vec, vec::Vec}; | ||||||
| use codec::CompactLen; | ||||||
|
|
||||||
| fn set_from_keys<T>(keys: &[T]) -> BTreeSet<T> | ||||||
| where | ||||||
|
|
@@ -340,6 +347,14 @@ mod test { | |||||
| set_from_keys(keys).try_into().unwrap() | ||||||
| } | ||||||
|
|
||||||
| #[test] | ||||||
| fn encoding_same_as_unbounded_set() { | ||||||
| let b = boundedset_from_keys::<u32, ConstU32<7>>(&[1, 2, 3, 4, 5, 6]); | ||||||
| let m = set_from_keys(&[1, 2, 3, 4, 5, 6]); | ||||||
|
|
||||||
| assert_eq!(b.encode(), m.encode()); | ||||||
| } | ||||||
|
|
||||||
| #[test] | ||||||
| fn try_insert_works() { | ||||||
| let mut bounded = boundedset_from_keys::<u32, ConstU32<4>>(&[1, 2, 3]); | ||||||
|
|
@@ -390,6 +405,16 @@ mod test { | |||||
| ); | ||||||
| } | ||||||
|
|
||||||
| #[test] | ||||||
| fn dont_consume_more_data_than_bounded_len() { | ||||||
| let s = set_from_keys(&[1, 2, 3, 4, 5, 6]); | ||||||
| let data = s.encode(); | ||||||
| let data_input = &mut &data[..]; | ||||||
|
|
||||||
| BoundedBTreeSet::<u32, ConstU32<4>>::decode(data_input).unwrap_err(); | ||||||
| assert_eq!(data_input.len(), data.len() - Compact::<u32>::compact_len(&(data.len() as u32))); | ||||||
| } | ||||||
|
|
||||||
| #[test] | ||||||
| fn unequal_eq_impl_insert_works() { | ||||||
| // given a struct with a strange notion of equality | ||||||
|
|
||||||
| Original file line number | Diff line number | Diff line change | ||||
|---|---|---|---|---|---|---|
|
|
@@ -21,7 +21,7 @@ | |||||
| use super::WeakBoundedVec; | ||||||
| use crate::{Get, TryCollect}; | ||||||
| use alloc::{boxed::Box, vec::Vec}; | ||||||
| use codec::{Decode, Encode, EncodeLike, MaxEncodedLen}; | ||||||
| use codec::{decode_vec_with_len, Compact, Decode, Encode, EncodeLike, MaxEncodedLen}; | ||||||
| use core::{ | ||||||
| marker::PhantomData, | ||||||
| ops::{Deref, Index, IndexMut, RangeBounds}, | ||||||
|
|
@@ -40,7 +40,7 @@ use serde::{ | |||||
| /// | ||||||
| /// As the name suggests, the length of the queue is always bounded. All internal operations ensure | ||||||
| /// this bound is respected. | ||||||
| #[cfg_attr(feature = "std", derive(Serialize), serde(transparent))] | ||||||
| #[cfg_attr(feature = "std", derive(Hash, Serialize), serde(transparent))] | ||||||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Suggested change
|
||||||
| #[derive(Encode, scale_info::TypeInfo)] | ||||||
| #[scale_info(skip_type_params(S))] | ||||||
| pub struct BoundedVec<T, S>(pub(super) Vec<T>, #[cfg_attr(feature = "std", serde(skip_serializing))] PhantomData<S>); | ||||||
|
|
@@ -108,6 +108,7 @@ where | |||||
| /// A bounded slice. | ||||||
| /// | ||||||
| /// Similar to a `BoundedVec`, but not owned and cannot be decoded. | ||||||
| #[cfg_attr(feature = "std", derive(Hash))] | ||||||
| #[derive(Encode)] | ||||||
| pub struct BoundedSlice<'a, T, S>(pub(super) &'a [T], PhantomData<S>); | ||||||
|
|
||||||
|
|
@@ -290,10 +291,13 @@ impl<'a, T, S: Get<u32>> BoundedSlice<'a, T, S> { | |||||
|
|
||||||
| impl<T: Decode, S: Get<u32>> Decode for BoundedVec<T, S> { | ||||||
| fn decode<I: codec::Input>(input: &mut I) -> Result<Self, codec::Error> { | ||||||
| let inner = Vec::<T>::decode(input)?; | ||||||
| if inner.len() > S::get() as usize { | ||||||
| // Same as the underlying implementation for `Decode` on `Vec`, except we fail early if the | ||||||
| // len is too big. | ||||||
| let len: u32 = <Compact<u32>>::decode(input)?.into(); | ||||||
| if len > S::get() { | ||||||
ggwpez marked this conversation as resolved.
Show resolved
Hide resolved
|
||||||
| return Err("BoundedVec exceeds its limit".into()) | ||||||
| } | ||||||
| let inner = decode_vec_with_len(input, len as usize)?; | ||||||
| Ok(Self(inner, PhantomData)) | ||||||
| } | ||||||
|
|
||||||
|
|
@@ -904,6 +908,15 @@ where | |||||
| mod test { | ||||||
| use super::*; | ||||||
| use crate::{bounded_vec, ConstU32}; | ||||||
| use codec::CompactLen; | ||||||
|
|
||||||
| #[test] | ||||||
| fn encoding_same_as_unbounded_vec() { | ||||||
| let b: BoundedVec<u32, ConstU32<6>> = bounded_vec![0, 1, 2, 3, 4, 5]; | ||||||
| let v: Vec<u32> = vec![0, 1, 2, 3, 4, 5]; | ||||||
|
|
||||||
| assert_eq!(b.encode(), v.encode()); | ||||||
| } | ||||||
|
|
||||||
| #[test] | ||||||
| fn slice_truncate_from_works() { | ||||||
|
|
@@ -1101,6 +1114,16 @@ mod test { | |||||
| ); | ||||||
| } | ||||||
|
|
||||||
| #[test] | ||||||
| fn dont_consume_more_data_than_bounded_len() { | ||||||
| let v: Vec<u32> = vec![1, 2, 3, 4, 5]; | ||||||
| let data = v.encode(); | ||||||
| let data_input = &mut &data[..]; | ||||||
|
|
||||||
| BoundedVec::<u32, ConstU32<4>>::decode(data_input).unwrap_err(); | ||||||
| assert_eq!(data_input.len(), data.len() - Compact::<u32>::compact_len(&(data.len() as u32))); | ||||||
| } | ||||||
|
|
||||||
| #[test] | ||||||
| fn eq_works() { | ||||||
| // of same type | ||||||
|
|
||||||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I'm not sure about deriving these;
Deserializewould need a customimplto add bounds checks, like we do now withBoundedVec. And the customimpllooks annoying to implement, so I'm thinking we don't add it until there's actually a need for it.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
@bkchr I'm going to go ahead and merge -- these derives can be a follow-up if we really want them.