From 2d6b0ecc21a9e0f1eece3a612db860e24c9b5f90 Mon Sep 17 00:00:00 2001
From: Keith Yeung <kungfukeith11@gmail.com>
Date: Mon, 13 Jun 2022 14:31:42 +0200
Subject: [PATCH] Move bounded type definitions to sp-runtime (#11645)

* Move bounded type definitions to sp-runtime

* cargo fmt

* Fix compile error

Signed-off-by: Oliver Tale-Yazdi <oliver.tale-yazdi@parity.io>

* Move TryCollect to sp-runtime

* Write some docs

* Import missing types

Co-authored-by: Oliver Tale-Yazdi <oliver.tale-yazdi@parity.io>
---
 substrate/frame/support/src/lib.rs            |  43 +-
 .../support/src/storage/bounded_btree_map.rs  | 489 +--------
 .../support/src/storage/bounded_btree_set.rs  | 451 +-------
 .../frame/support/src/storage/bounded_vec.rs  | 976 +----------------
 .../support/src/storage/weak_bounded_vec.rs   | 367 +------
 substrate/frame/support/src/traits/misc.rs    |  12 +-
 substrate/primitives/runtime/src/bounded.rs   |  28 +
 .../runtime/src/bounded/bounded_btree_map.rs  | 517 +++++++++
 .../runtime/src/bounded/bounded_btree_set.rs  | 479 +++++++++
 .../runtime/src/bounded/bounded_vec.rs        | 998 ++++++++++++++++++
 .../runtime/src/bounded/weak_bounded_vec.rs   | 393 +++++++
 substrate/primitives/runtime/src/lib.rs       |  43 +
 substrate/primitives/runtime/src/traits.rs    |  11 +
 13 files changed, 2484 insertions(+), 2323 deletions(-)
 create mode 100644 substrate/primitives/runtime/src/bounded.rs
 create mode 100644 substrate/primitives/runtime/src/bounded/bounded_btree_map.rs
 create mode 100644 substrate/primitives/runtime/src/bounded/bounded_btree_set.rs
 create mode 100644 substrate/primitives/runtime/src/bounded/bounded_vec.rs
 create mode 100644 substrate/primitives/runtime/src/bounded/weak_bounded_vec.rs

diff --git a/substrate/frame/support/src/lib.rs b/substrate/frame/support/src/lib.rs
index 96bc1257c75..f2cd7d1e165 100644
--- a/substrate/frame/support/src/lib.rs
+++ b/substrate/frame/support/src/lib.rs
@@ -45,6 +45,9 @@ pub use sp_core::Void;
 pub use sp_core_hashing_proc_macro;
 #[doc(hidden)]
 pub use sp_io::{self, storage::root as storage_root};
+#[cfg(feature = "std")]
+#[doc(hidden)]
+pub use sp_runtime::{bounded_btree_map, bounded_vec};
 #[doc(hidden)]
 pub use sp_runtime::{RuntimeDebug, StateVersion};
 #[cfg(feature = "std")]
@@ -119,46 +122,6 @@ impl TypeId for PalletId {
 	const TYPE_ID: [u8; 4] = *b"modl";
 }
 
-/// Build a bounded vec from the given literals.
-///
-/// The type of the outcome must be known.
-///
-/// Will not handle any errors and just panic if the given literals cannot fit in the corresponding
-/// bounded vec type. Thus, this is only suitable for testing and non-consensus code.
-#[macro_export]
-#[cfg(feature = "std")]
-macro_rules! bounded_vec {
-	($ ($values:expr),* $(,)?) => {
-		{
-			$crate::sp_std::vec![$($values),*].try_into().unwrap()
-		}
-	};
-	( $value:expr ; $repetition:expr ) => {
-		{
-			$crate::sp_std::vec![$value ; $repetition].try_into().unwrap()
-		}
-	}
-}
-
-/// Build a bounded btree-map from the given literals.
-///
-/// The type of the outcome must be known.
-///
-/// Will not handle any errors and just panic if the given literals cannot fit in the corresponding
-/// bounded vec type. Thus, this is only suitable for testing and non-consensus code.
-#[macro_export]
-#[cfg(feature = "std")]
-macro_rules! bounded_btree_map {
-	($ ( $key:expr => $value:expr ),* $(,)?) => {
-		{
-			$crate::traits::TryCollect::<$crate::BoundedBTreeMap<_, _, _>>::try_collect(
-				$crate::sp_std::vec![$(($key, $value)),*].into_iter()
-			).unwrap()
-		}
-	};
-
-}
-
 /// Generate a new type alias for [`storage::types::StorageValue`],
 /// [`storage::types::StorageMap`], [`storage::types::StorageDoubleMap`]
 /// and [`storage::types::StorageNMap`].
diff --git a/substrate/frame/support/src/storage/bounded_btree_map.rs b/substrate/frame/support/src/storage/bounded_btree_map.rs
index fd086f1fb23..d567faa38fd 100644
--- a/substrate/frame/support/src/storage/bounded_btree_map.rs
+++ b/substrate/frame/support/src/storage/bounded_btree_map.rs
@@ -17,354 +17,18 @@
 
 //! Traits, types and structs to support a bounded BTreeMap.
 
-use crate::{
-	storage::StorageDecodeLength,
-	traits::{Get, TryCollect},
-};
-use codec::{Decode, Encode, MaxEncodedLen};
-use sp_std::{borrow::Borrow, collections::btree_map::BTreeMap, marker::PhantomData, ops::Deref};
-
-/// A bounded map based on a B-Tree.
-///
-/// B-Trees represent a fundamental compromise between cache-efficiency and actually minimizing
-/// the amount of work performed in a search. See [`BTreeMap`] for more details.
-///
-/// Unlike a standard `BTreeMap`, there is an enforced upper limit to the number of items in the
-/// map. All internal operations ensure this bound is respected.
-#[derive(Encode, scale_info::TypeInfo)]
-#[scale_info(skip_type_params(S))]
-pub struct BoundedBTreeMap<K, V, S>(BTreeMap<K, V>, PhantomData<S>);
-
-impl<K, V, S> Decode for BoundedBTreeMap<K, V, S>
-where
-	K: Decode + Ord,
-	V: Decode,
-	S: Get<u32>,
-{
-	fn decode<I: codec::Input>(input: &mut I) -> Result<Self, codec::Error> {
-		let inner = BTreeMap::<K, V>::decode(input)?;
-		if inner.len() > S::get() as usize {
-			return Err("BoundedBTreeMap exceeds its limit".into())
-		}
-		Ok(Self(inner, PhantomData))
-	}
-
-	fn skip<I: codec::Input>(input: &mut I) -> Result<(), codec::Error> {
-		BTreeMap::<K, V>::skip(input)
-	}
-}
-
-impl<K, V, S> BoundedBTreeMap<K, V, S>
-where
-	S: Get<u32>,
-{
-	/// Get the bound of the type in `usize`.
-	pub fn bound() -> usize {
-		S::get() as usize
-	}
-}
-
-impl<K, V, S> BoundedBTreeMap<K, V, S>
-where
-	K: Ord,
-	S: Get<u32>,
-{
-	/// Create `Self` from `t` without any checks.
-	fn unchecked_from(t: BTreeMap<K, V>) -> Self {
-		Self(t, Default::default())
-	}
-
-	/// Exactly the same semantics as `BTreeMap::retain`.
-	///
-	/// The is a safe `&mut self` borrow because `retain` can only ever decrease the length of the
-	/// inner map.
-	pub fn retain<F: FnMut(&K, &mut V) -> bool>(&mut self, f: F) {
-		self.0.retain(f)
-	}
-
-	/// Create a new `BoundedBTreeMap`.
-	///
-	/// Does not allocate.
-	pub fn new() -> Self {
-		BoundedBTreeMap(BTreeMap::new(), PhantomData)
-	}
-
-	/// Consume self, and return the inner `BTreeMap`.
-	///
-	/// This is useful when a mutating API of the inner type is desired, and closure-based mutation
-	/// such as provided by [`try_mutate`][Self::try_mutate] is inconvenient.
-	pub fn into_inner(self) -> BTreeMap<K, V> {
-		debug_assert!(self.0.len() <= Self::bound());
-		self.0
-	}
-
-	/// Consumes self and mutates self via the given `mutate` function.
-	///
-	/// If the outcome of mutation is within bounds, `Some(Self)` is returned. Else, `None` is
-	/// returned.
-	///
-	/// This is essentially a *consuming* shorthand [`Self::into_inner`] -> `...` ->
-	/// [`Self::try_from`].
-	pub fn try_mutate(mut self, mut mutate: impl FnMut(&mut BTreeMap<K, V>)) -> Option<Self> {
-		mutate(&mut self.0);
-		(self.0.len() <= Self::bound()).then(move || self)
-	}
-
-	// Clears the map, removing all elements.
-	pub fn clear(&mut self) {
-		self.0.clear()
-	}
-
-	/// Return a mutable reference to the value corresponding to the key.
-	///
-	/// The key may be any borrowed form of the map's key type, but the ordering on the borrowed
-	/// form _must_ match the ordering on the key type.
-	pub fn get_mut<Q>(&mut self, key: &Q) -> Option<&mut V>
-	where
-		K: Borrow<Q>,
-		Q: Ord + ?Sized,
-	{
-		self.0.get_mut(key)
-	}
-
-	/// Exactly the same semantics as [`BTreeMap::insert`], but returns an `Err` (and is a noop) if
-	/// the new length of the map exceeds `S`.
-	///
-	/// In the `Err` case, returns the inserted pair so it can be further used without cloning.
-	pub fn try_insert(&mut self, key: K, value: V) -> Result<Option<V>, (K, V)> {
-		if self.len() < Self::bound() || self.0.contains_key(&key) {
-			Ok(self.0.insert(key, value))
-		} else {
-			Err((key, value))
-		}
-	}
-
-	/// Remove a key from the map, returning the value at the key if the key was previously in the
-	/// map.
-	///
-	/// The key may be any borrowed form of the map's key type, but the ordering on the borrowed
-	/// form _must_ match the ordering on the key type.
-	pub fn remove<Q>(&mut self, key: &Q) -> Option<V>
-	where
-		K: Borrow<Q>,
-		Q: Ord + ?Sized,
-	{
-		self.0.remove(key)
-	}
-
-	/// Remove a key from the map, returning the value at the key if the key was previously in the
-	/// map.
-	///
-	/// The key may be any borrowed form of the map's key type, but the ordering on the borrowed
-	/// form _must_ match the ordering on the key type.
-	pub fn remove_entry<Q>(&mut self, key: &Q) -> Option<(K, V)>
-	where
-		K: Borrow<Q>,
-		Q: Ord + ?Sized,
-	{
-		self.0.remove_entry(key)
-	}
-}
-
-impl<K, V, S> Default for BoundedBTreeMap<K, V, S>
-where
-	K: Ord,
-	S: Get<u32>,
-{
-	fn default() -> Self {
-		Self::new()
-	}
-}
-
-impl<K, V, S> Clone for BoundedBTreeMap<K, V, S>
-where
-	BTreeMap<K, V>: Clone,
-{
-	fn clone(&self) -> Self {
-		BoundedBTreeMap(self.0.clone(), PhantomData)
-	}
-}
-
-#[cfg(feature = "std")]
-impl<K, V, S> std::fmt::Debug for BoundedBTreeMap<K, V, S>
-where
-	BTreeMap<K, V>: std::fmt::Debug,
-	S: Get<u32>,
-{
-	fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-		f.debug_tuple("BoundedBTreeMap").field(&self.0).field(&Self::bound()).finish()
-	}
-}
-
-impl<K, V, S1, S2> PartialEq<BoundedBTreeMap<K, V, S1>> for BoundedBTreeMap<K, V, S2>
-where
-	BTreeMap<K, V>: PartialEq,
-	S1: Get<u32>,
-	S2: Get<u32>,
-{
-	fn eq(&self, other: &BoundedBTreeMap<K, V, S1>) -> bool {
-		S1::get() == S2::get() && self.0 == other.0
-	}
-}
-
-impl<K, V, S> Eq for BoundedBTreeMap<K, V, S>
-where
-	BTreeMap<K, V>: Eq,
-	S: Get<u32>,
-{
-}
-
-impl<K, V, S> PartialEq<BTreeMap<K, V>> for BoundedBTreeMap<K, V, S>
-where
-	BTreeMap<K, V>: PartialEq,
-{
-	fn eq(&self, other: &BTreeMap<K, V>) -> bool {
-		self.0 == *other
-	}
-}
-
-impl<K, V, S> PartialOrd for BoundedBTreeMap<K, V, S>
-where
-	BTreeMap<K, V>: PartialOrd,
-	S: Get<u32>,
-{
-	fn partial_cmp(&self, other: &Self) -> Option<sp_std::cmp::Ordering> {
-		self.0.partial_cmp(&other.0)
-	}
-}
-
-impl<K, V, S> Ord for BoundedBTreeMap<K, V, S>
-where
-	BTreeMap<K, V>: Ord,
-	S: Get<u32>,
-{
-	fn cmp(&self, other: &Self) -> sp_std::cmp::Ordering {
-		self.0.cmp(&other.0)
-	}
-}
-
-impl<K, V, S> IntoIterator for BoundedBTreeMap<K, V, S> {
-	type Item = (K, V);
-	type IntoIter = sp_std::collections::btree_map::IntoIter<K, V>;
-
-	fn into_iter(self) -> Self::IntoIter {
-		self.0.into_iter()
-	}
-}
-
-impl<'a, K, V, S> IntoIterator for &'a BoundedBTreeMap<K, V, S> {
-	type Item = (&'a K, &'a V);
-	type IntoIter = sp_std::collections::btree_map::Iter<'a, K, V>;
-
-	fn into_iter(self) -> Self::IntoIter {
-		self.0.iter()
-	}
-}
-
-impl<'a, K, V, S> IntoIterator for &'a mut BoundedBTreeMap<K, V, S> {
-	type Item = (&'a K, &'a mut V);
-	type IntoIter = sp_std::collections::btree_map::IterMut<'a, K, V>;
-
-	fn into_iter(self) -> Self::IntoIter {
-		self.0.iter_mut()
-	}
-}
-
-impl<K, V, S> MaxEncodedLen for BoundedBTreeMap<K, V, S>
-where
-	K: MaxEncodedLen,
-	V: MaxEncodedLen,
-	S: Get<u32>,
-{
-	fn max_encoded_len() -> usize {
-		Self::bound()
-			.saturating_mul(K::max_encoded_len().saturating_add(V::max_encoded_len()))
-			.saturating_add(codec::Compact(S::get()).encoded_size())
-	}
-}
-
-impl<K, V, S> Deref for BoundedBTreeMap<K, V, S>
-where
-	K: Ord,
-{
-	type Target = BTreeMap<K, V>;
-
-	fn deref(&self) -> &Self::Target {
-		&self.0
-	}
-}
-
-impl<K, V, S> AsRef<BTreeMap<K, V>> for BoundedBTreeMap<K, V, S>
-where
-	K: Ord,
-{
-	fn as_ref(&self) -> &BTreeMap<K, V> {
-		&self.0
-	}
-}
-
-impl<K, V, S> From<BoundedBTreeMap<K, V, S>> for BTreeMap<K, V>
-where
-	K: Ord,
-{
-	fn from(map: BoundedBTreeMap<K, V, S>) -> Self {
-		map.0
-	}
-}
-
-impl<K, V, S> TryFrom<BTreeMap<K, V>> for BoundedBTreeMap<K, V, S>
-where
-	K: Ord,
-	S: Get<u32>,
-{
-	type Error = ();
-
-	fn try_from(value: BTreeMap<K, V>) -> Result<Self, Self::Error> {
-		(value.len() <= Self::bound())
-			.then(move || BoundedBTreeMap(value, PhantomData))
-			.ok_or(())
-	}
-}
-
-impl<K, V, S> codec::DecodeLength for BoundedBTreeMap<K, V, S> {
-	fn len(self_encoded: &[u8]) -> Result<usize, codec::Error> {
-		// `BoundedBTreeMap<K, V, S>` is stored just a `BTreeMap<K, V>`, which is stored as a
-		// `Compact<u32>` with its length followed by an iteration of its items. We can just use
-		// the underlying implementation.
-		<BTreeMap<K, V> as codec::DecodeLength>::len(self_encoded)
-	}
-}
+use crate::storage::StorageDecodeLength;
+pub use sp_runtime::BoundedBTreeMap;
 
 impl<K, V, S> StorageDecodeLength for BoundedBTreeMap<K, V, S> {}
 
-impl<K, V, S> codec::EncodeLike<BTreeMap<K, V>> for BoundedBTreeMap<K, V, S> where
-	BTreeMap<K, V>: Encode
-{
-}
-
-impl<I, K, V, Bound> TryCollect<BoundedBTreeMap<K, V, Bound>> for I
-where
-	K: Ord,
-	I: ExactSizeIterator + Iterator<Item = (K, V)>,
-	Bound: Get<u32>,
-{
-	type Error = &'static str;
-
-	fn try_collect(self) -> Result<BoundedBTreeMap<K, V, Bound>, Self::Error> {
-		if self.len() > Bound::get() as usize {
-			Err("iterator length too big")
-		} else {
-			Ok(BoundedBTreeMap::<K, V, Bound>::unchecked_from(self.collect::<BTreeMap<K, V>>()))
-		}
-	}
-}
-
 #[cfg(test)]
 pub mod test {
 	use super::*;
 	use crate::Twox128;
-	use frame_support::traits::ConstU32;
+	use frame_support::traits::{ConstU32, Get};
 	use sp_io::TestExternalities;
+	use sp_std::collections::btree_map::BTreeMap;
 
 	#[crate::storage_alias]
 	type Foo = StorageValue<Prefix, BoundedBTreeMap<u32, (), ConstU32<7>>>;
@@ -416,149 +80,4 @@ pub mod test {
 			assert!(FooDoubleMap::decode_len(2, 2).is_none());
 		});
 	}
-
-	#[test]
-	fn try_insert_works() {
-		let mut bounded = boundedmap_from_keys::<u32, ConstU32<4>>(&[1, 2, 3]);
-		bounded.try_insert(0, ()).unwrap();
-		assert_eq!(*bounded, map_from_keys(&[1, 0, 2, 3]));
-
-		assert!(bounded.try_insert(9, ()).is_err());
-		assert_eq!(*bounded, map_from_keys(&[1, 0, 2, 3]));
-	}
-
-	#[test]
-	fn deref_coercion_works() {
-		let bounded = boundedmap_from_keys::<u32, ConstU32<7>>(&[1, 2, 3]);
-		// these methods come from deref-ed vec.
-		assert_eq!(bounded.len(), 3);
-		assert!(bounded.iter().next().is_some());
-		assert!(!bounded.is_empty());
-	}
-
-	#[test]
-	fn try_mutate_works() {
-		let bounded = boundedmap_from_keys::<u32, ConstU32<7>>(&[1, 2, 3, 4, 5, 6]);
-		let bounded = bounded
-			.try_mutate(|v| {
-				v.insert(7, ());
-			})
-			.unwrap();
-		assert_eq!(bounded.len(), 7);
-		assert!(bounded
-			.try_mutate(|v| {
-				v.insert(8, ());
-			})
-			.is_none());
-	}
-
-	#[test]
-	fn btree_map_eq_works() {
-		let bounded = boundedmap_from_keys::<u32, ConstU32<7>>(&[1, 2, 3, 4, 5, 6]);
-		assert_eq!(bounded, map_from_keys(&[1, 2, 3, 4, 5, 6]));
-	}
-
-	#[test]
-	fn too_big_fail_to_decode() {
-		let v: Vec<(u32, u32)> = vec![(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)];
-		assert_eq!(
-			BoundedBTreeMap::<u32, u32, ConstU32<4>>::decode(&mut &v.encode()[..]),
-			Err("BoundedBTreeMap exceeds its limit".into()),
-		);
-	}
-
-	#[test]
-	fn unequal_eq_impl_insert_works() {
-		// given a struct with a strange notion of equality
-		#[derive(Debug)]
-		struct Unequal(u32, bool);
-
-		impl PartialEq for Unequal {
-			fn eq(&self, other: &Self) -> bool {
-				self.0 == other.0
-			}
-		}
-		impl Eq for Unequal {}
-
-		impl Ord for Unequal {
-			fn cmp(&self, other: &Self) -> std::cmp::Ordering {
-				self.0.cmp(&other.0)
-			}
-		}
-
-		impl PartialOrd for Unequal {
-			fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
-				Some(self.cmp(other))
-			}
-		}
-
-		let mut map = BoundedBTreeMap::<Unequal, u32, ConstU32<4>>::new();
-
-		// when the set is full
-
-		for i in 0..4 {
-			map.try_insert(Unequal(i, false), i).unwrap();
-		}
-
-		// can't insert a new distinct member
-		map.try_insert(Unequal(5, false), 5).unwrap_err();
-
-		// but _can_ insert a distinct member which compares equal, though per the documentation,
-		// neither the set length nor the actual member are changed, but the value is
-		map.try_insert(Unequal(0, true), 6).unwrap();
-		assert_eq!(map.len(), 4);
-		let (zero_key, zero_value) = map.get_key_value(&Unequal(0, true)).unwrap();
-		assert_eq!(zero_key.0, 0);
-		assert_eq!(zero_key.1, false);
-		assert_eq!(*zero_value, 6);
-	}
-
-	#[test]
-	fn can_be_collected() {
-		let b1 = boundedmap_from_keys::<u32, ConstU32<5>>(&[1, 2, 3, 4]);
-		let b2: BoundedBTreeMap<u32, (), ConstU32<5>> =
-			b1.iter().map(|(k, v)| (k + 1, *v)).try_collect().unwrap();
-		assert_eq!(b2.into_iter().map(|(k, _)| k).collect::<Vec<_>>(), vec![2, 3, 4, 5]);
-
-		// can also be collected into a collection of length 4.
-		let b2: BoundedBTreeMap<u32, (), ConstU32<4>> =
-			b1.iter().map(|(k, v)| (k + 1, *v)).try_collect().unwrap();
-		assert_eq!(b2.into_iter().map(|(k, _)| k).collect::<Vec<_>>(), vec![2, 3, 4, 5]);
-
-		// can be mutated further into iterators that are `ExactSizedIterator`.
-		let b2: BoundedBTreeMap<u32, (), ConstU32<5>> =
-			b1.iter().map(|(k, v)| (k + 1, *v)).rev().skip(2).try_collect().unwrap();
-		// note that the binary tree will re-sort this, so rev() is not really seen
-		assert_eq!(b2.into_iter().map(|(k, _)| k).collect::<Vec<_>>(), vec![2, 3]);
-
-		let b2: BoundedBTreeMap<u32, (), ConstU32<5>> =
-			b1.iter().map(|(k, v)| (k + 1, *v)).take(2).try_collect().unwrap();
-		assert_eq!(b2.into_iter().map(|(k, _)| k).collect::<Vec<_>>(), vec![2, 3]);
-
-		// but these worn't work
-		let b2: Result<BoundedBTreeMap<u32, (), ConstU32<3>>, _> =
-			b1.iter().map(|(k, v)| (k + 1, *v)).try_collect();
-		assert!(b2.is_err());
-
-		let b2: Result<BoundedBTreeMap<u32, (), ConstU32<1>>, _> =
-			b1.iter().map(|(k, v)| (k + 1, *v)).skip(2).try_collect();
-		assert!(b2.is_err());
-	}
-
-	#[test]
-	fn eq_works() {
-		// of same type
-		let b1 = boundedmap_from_keys::<u32, ConstU32<7>>(&[1, 2]);
-		let b2 = boundedmap_from_keys::<u32, ConstU32<7>>(&[1, 2]);
-		assert_eq!(b1, b2);
-
-		// of different type, but same value and bound.
-		crate::parameter_types! {
-			B1: u32 = 7;
-			B2: u32 = 7;
-		}
-		let b1 = boundedmap_from_keys::<u32, B1>(&[1, 2]);
-		let b2 = boundedmap_from_keys::<u32, B2>(&[1, 2]);
-		assert_eq!(b1, b2);
-	}
 }
diff --git a/substrate/frame/support/src/storage/bounded_btree_set.rs b/substrate/frame/support/src/storage/bounded_btree_set.rs
index 77e1c6f1c96..9ed129e67c4 100644
--- a/substrate/frame/support/src/storage/bounded_btree_set.rs
+++ b/substrate/frame/support/src/storage/bounded_btree_set.rs
@@ -17,319 +17,18 @@
 
 //! Traits, types and structs to support a bounded `BTreeSet`.
 
-use crate::{
-	storage::StorageDecodeLength,
-	traits::{Get, TryCollect},
-};
-use codec::{Decode, Encode, MaxEncodedLen};
-use sp_std::{borrow::Borrow, collections::btree_set::BTreeSet, marker::PhantomData, ops::Deref};
-
-/// A bounded set based on a B-Tree.
-///
-/// B-Trees represent a fundamental compromise between cache-efficiency and actually minimizing
-/// the amount of work performed in a search. See [`BTreeSet`] for more details.
-///
-/// Unlike a standard `BTreeSet`, there is an enforced upper limit to the number of items in the
-/// set. All internal operations ensure this bound is respected.
-#[derive(Encode, scale_info::TypeInfo)]
-#[scale_info(skip_type_params(S))]
-pub struct BoundedBTreeSet<T, S>(BTreeSet<T>, PhantomData<S>);
-
-impl<T, S> Decode for BoundedBTreeSet<T, S>
-where
-	T: Decode + Ord,
-	S: Get<u32>,
-{
-	fn decode<I: codec::Input>(input: &mut I) -> Result<Self, codec::Error> {
-		let inner = BTreeSet::<T>::decode(input)?;
-		if inner.len() > S::get() as usize {
-			return Err("BoundedBTreeSet exceeds its limit".into())
-		}
-		Ok(Self(inner, PhantomData))
-	}
-
-	fn skip<I: codec::Input>(input: &mut I) -> Result<(), codec::Error> {
-		BTreeSet::<T>::skip(input)
-	}
-}
-
-impl<T, S> BoundedBTreeSet<T, S>
-where
-	S: Get<u32>,
-{
-	/// Get the bound of the type in `usize`.
-	pub fn bound() -> usize {
-		S::get() as usize
-	}
-}
-
-impl<T, S> BoundedBTreeSet<T, S>
-where
-	T: Ord,
-	S: Get<u32>,
-{
-	/// Create `Self` from `t` without any checks.
-	fn unchecked_from(t: BTreeSet<T>) -> Self {
-		Self(t, Default::default())
-	}
-
-	/// Create a new `BoundedBTreeSet`.
-	///
-	/// Does not allocate.
-	pub fn new() -> Self {
-		BoundedBTreeSet(BTreeSet::new(), PhantomData)
-	}
-
-	/// Consume self, and return the inner `BTreeSet`.
-	///
-	/// This is useful when a mutating API of the inner type is desired, and closure-based mutation
-	/// such as provided by [`try_mutate`][Self::try_mutate] is inconvenient.
-	pub fn into_inner(self) -> BTreeSet<T> {
-		debug_assert!(self.0.len() <= Self::bound());
-		self.0
-	}
-
-	/// Consumes self and mutates self via the given `mutate` function.
-	///
-	/// If the outcome of mutation is within bounds, `Some(Self)` is returned. Else, `None` is
-	/// returned.
-	///
-	/// This is essentially a *consuming* shorthand [`Self::into_inner`] -> `...` ->
-	/// [`Self::try_from`].
-	pub fn try_mutate(mut self, mut mutate: impl FnMut(&mut BTreeSet<T>)) -> Option<Self> {
-		mutate(&mut self.0);
-		(self.0.len() <= Self::bound()).then(move || self)
-	}
-
-	// Clears the set, removing all elements.
-	pub fn clear(&mut self) {
-		self.0.clear()
-	}
-
-	/// Exactly the same semantics as [`BTreeSet::insert`], but returns an `Err` (and is a noop) if
-	/// the new length of the set exceeds `S`.
-	///
-	/// In the `Err` case, returns the inserted item so it can be further used without cloning.
-	pub fn try_insert(&mut self, item: T) -> Result<bool, T> {
-		if self.len() < Self::bound() || self.0.contains(&item) {
-			Ok(self.0.insert(item))
-		} else {
-			Err(item)
-		}
-	}
-
-	/// Remove an item from the set, returning whether it was previously in the set.
-	///
-	/// The item may be any borrowed form of the set's item type, but the ordering on the borrowed
-	/// form _must_ match the ordering on the item type.
-	pub fn remove<Q>(&mut self, item: &Q) -> bool
-	where
-		T: Borrow<Q>,
-		Q: Ord + ?Sized,
-	{
-		self.0.remove(item)
-	}
-
-	/// Removes and returns the value in the set, if any, that is equal to the given one.
-	///
-	/// The value may be any borrowed form of the set's value type, but the ordering on the borrowed
-	/// form _must_ match the ordering on the value type.
-	pub fn take<Q>(&mut self, value: &Q) -> Option<T>
-	where
-		T: Borrow<Q> + Ord,
-		Q: Ord + ?Sized,
-	{
-		self.0.take(value)
-	}
-}
-
-impl<T, S> Default for BoundedBTreeSet<T, S>
-where
-	T: Ord,
-	S: Get<u32>,
-{
-	fn default() -> Self {
-		Self::new()
-	}
-}
-
-impl<T, S> Clone for BoundedBTreeSet<T, S>
-where
-	BTreeSet<T>: Clone,
-{
-	fn clone(&self) -> Self {
-		BoundedBTreeSet(self.0.clone(), PhantomData)
-	}
-}
-
-#[cfg(feature = "std")]
-impl<T, S> std::fmt::Debug for BoundedBTreeSet<T, S>
-where
-	BTreeSet<T>: std::fmt::Debug,
-	S: Get<u32>,
-{
-	fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-		f.debug_tuple("BoundedBTreeSet").field(&self.0).field(&Self::bound()).finish()
-	}
-}
-
-impl<T, S1, S2> PartialEq<BoundedBTreeSet<T, S1>> for BoundedBTreeSet<T, S2>
-where
-	BTreeSet<T>: PartialEq,
-	S1: Get<u32>,
-	S2: Get<u32>,
-{
-	fn eq(&self, other: &BoundedBTreeSet<T, S1>) -> bool {
-		S1::get() == S2::get() && self.0 == other.0
-	}
-}
-
-impl<T, S> Eq for BoundedBTreeSet<T, S>
-where
-	BTreeSet<T>: Eq,
-	S: Get<u32>,
-{
-}
-
-impl<T, S> PartialEq<BTreeSet<T>> for BoundedBTreeSet<T, S>
-where
-	BTreeSet<T>: PartialEq,
-	S: Get<u32>,
-{
-	fn eq(&self, other: &BTreeSet<T>) -> bool {
-		self.0 == *other
-	}
-}
-
-impl<T, S> PartialOrd for BoundedBTreeSet<T, S>
-where
-	BTreeSet<T>: PartialOrd,
-	S: Get<u32>,
-{
-	fn partial_cmp(&self, other: &Self) -> Option<sp_std::cmp::Ordering> {
-		self.0.partial_cmp(&other.0)
-	}
-}
-
-impl<T, S> Ord for BoundedBTreeSet<T, S>
-where
-	BTreeSet<T>: Ord,
-	S: Get<u32>,
-{
-	fn cmp(&self, other: &Self) -> sp_std::cmp::Ordering {
-		self.0.cmp(&other.0)
-	}
-}
-
-impl<T, S> IntoIterator for BoundedBTreeSet<T, S> {
-	type Item = T;
-	type IntoIter = sp_std::collections::btree_set::IntoIter<T>;
-
-	fn into_iter(self) -> Self::IntoIter {
-		self.0.into_iter()
-	}
-}
-
-impl<'a, T, S> IntoIterator for &'a BoundedBTreeSet<T, S> {
-	type Item = &'a T;
-	type IntoIter = sp_std::collections::btree_set::Iter<'a, T>;
-
-	fn into_iter(self) -> Self::IntoIter {
-		self.0.iter()
-	}
-}
-
-impl<T, S> MaxEncodedLen for BoundedBTreeSet<T, S>
-where
-	T: MaxEncodedLen,
-	S: Get<u32>,
-{
-	fn max_encoded_len() -> usize {
-		Self::bound()
-			.saturating_mul(T::max_encoded_len())
-			.saturating_add(codec::Compact(S::get()).encoded_size())
-	}
-}
-
-impl<T, S> Deref for BoundedBTreeSet<T, S>
-where
-	T: Ord,
-{
-	type Target = BTreeSet<T>;
-
-	fn deref(&self) -> &Self::Target {
-		&self.0
-	}
-}
-
-impl<T, S> AsRef<BTreeSet<T>> for BoundedBTreeSet<T, S>
-where
-	T: Ord,
-{
-	fn as_ref(&self) -> &BTreeSet<T> {
-		&self.0
-	}
-}
-
-impl<T, S> From<BoundedBTreeSet<T, S>> for BTreeSet<T>
-where
-	T: Ord,
-{
-	fn from(set: BoundedBTreeSet<T, S>) -> Self {
-		set.0
-	}
-}
-
-impl<T, S> TryFrom<BTreeSet<T>> for BoundedBTreeSet<T, S>
-where
-	T: Ord,
-	S: Get<u32>,
-{
-	type Error = ();
-
-	fn try_from(value: BTreeSet<T>) -> Result<Self, Self::Error> {
-		(value.len() <= Self::bound())
-			.then(move || BoundedBTreeSet(value, PhantomData))
-			.ok_or(())
-	}
-}
-
-impl<T, S> codec::DecodeLength for BoundedBTreeSet<T, S> {
-	fn len(self_encoded: &[u8]) -> Result<usize, codec::Error> {
-		// `BoundedBTreeSet<T, S>` is stored just a `BTreeSet<T>`, which is stored as a
-		// `Compact<u32>` with its length followed by an iteration of its items. We can just use
-		// the underlying implementation.
-		<BTreeSet<T> as codec::DecodeLength>::len(self_encoded)
-	}
-}
+use crate::storage::StorageDecodeLength;
+pub use sp_runtime::BoundedBTreeSet;
 
 impl<T, S> StorageDecodeLength for BoundedBTreeSet<T, S> {}
 
-impl<T, S> codec::EncodeLike<BTreeSet<T>> for BoundedBTreeSet<T, S> where BTreeSet<T>: Encode {}
-
-impl<I, T, Bound> TryCollect<BoundedBTreeSet<T, Bound>> for I
-where
-	T: Ord,
-	I: ExactSizeIterator + Iterator<Item = T>,
-	Bound: Get<u32>,
-{
-	type Error = &'static str;
-
-	fn try_collect(self) -> Result<BoundedBTreeSet<T, Bound>, Self::Error> {
-		if self.len() > Bound::get() as usize {
-			Err("iterator length too big")
-		} else {
-			Ok(BoundedBTreeSet::<T, Bound>::unchecked_from(self.collect::<BTreeSet<T>>()))
-		}
-	}
-}
-
 #[cfg(test)]
 pub mod test {
 	use super::*;
 	use crate::Twox128;
-	use frame_support::traits::ConstU32;
+	use frame_support::traits::{ConstU32, Get};
 	use sp_io::TestExternalities;
+	use sp_std::collections::btree_set::BTreeSet;
 
 	#[crate::storage_alias]
 	type Foo = StorageValue<Prefix, BoundedBTreeSet<u32, ConstU32<7>>>;
@@ -381,146 +80,4 @@ pub mod test {
 			assert!(FooDoubleMap::decode_len(2, 2).is_none());
 		});
 	}
-
-	#[test]
-	fn try_insert_works() {
-		let mut bounded = boundedset_from_keys::<u32, ConstU32<4>>(&[1, 2, 3]);
-		bounded.try_insert(0).unwrap();
-		assert_eq!(*bounded, set_from_keys(&[1, 0, 2, 3]));
-
-		assert!(bounded.try_insert(9).is_err());
-		assert_eq!(*bounded, set_from_keys(&[1, 0, 2, 3]));
-	}
-
-	#[test]
-	fn deref_coercion_works() {
-		let bounded = boundedset_from_keys::<u32, ConstU32<7>>(&[1, 2, 3]);
-		// these methods come from deref-ed vec.
-		assert_eq!(bounded.len(), 3);
-		assert!(bounded.iter().next().is_some());
-		assert!(!bounded.is_empty());
-	}
-
-	#[test]
-	fn try_mutate_works() {
-		let bounded = boundedset_from_keys::<u32, ConstU32<7>>(&[1, 2, 3, 4, 5, 6]);
-		let bounded = bounded
-			.try_mutate(|v| {
-				v.insert(7);
-			})
-			.unwrap();
-		assert_eq!(bounded.len(), 7);
-		assert!(bounded
-			.try_mutate(|v| {
-				v.insert(8);
-			})
-			.is_none());
-	}
-
-	#[test]
-	fn btree_map_eq_works() {
-		let bounded = boundedset_from_keys::<u32, ConstU32<7>>(&[1, 2, 3, 4, 5, 6]);
-		assert_eq!(bounded, set_from_keys(&[1, 2, 3, 4, 5, 6]));
-	}
-
-	#[test]
-	fn too_big_fail_to_decode() {
-		let v: Vec<u32> = vec![1, 2, 3, 4, 5];
-		assert_eq!(
-			BoundedBTreeSet::<u32, ConstU32<4>>::decode(&mut &v.encode()[..]),
-			Err("BoundedBTreeSet exceeds its limit".into()),
-		);
-	}
-
-	#[test]
-	fn unequal_eq_impl_insert_works() {
-		// given a struct with a strange notion of equality
-		#[derive(Debug)]
-		struct Unequal(u32, bool);
-
-		impl PartialEq for Unequal {
-			fn eq(&self, other: &Self) -> bool {
-				self.0 == other.0
-			}
-		}
-		impl Eq for Unequal {}
-
-		impl Ord for Unequal {
-			fn cmp(&self, other: &Self) -> std::cmp::Ordering {
-				self.0.cmp(&other.0)
-			}
-		}
-
-		impl PartialOrd for Unequal {
-			fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
-				Some(self.cmp(other))
-			}
-		}
-
-		let mut set = BoundedBTreeSet::<Unequal, ConstU32<4>>::new();
-
-		// when the set is full
-
-		for i in 0..4 {
-			set.try_insert(Unequal(i, false)).unwrap();
-		}
-
-		// can't insert a new distinct member
-		set.try_insert(Unequal(5, false)).unwrap_err();
-
-		// but _can_ insert a distinct member which compares equal, though per the documentation,
-		// neither the set length nor the actual member are changed
-		set.try_insert(Unequal(0, true)).unwrap();
-		assert_eq!(set.len(), 4);
-		let zero_item = set.get(&Unequal(0, true)).unwrap();
-		assert_eq!(zero_item.0, 0);
-		assert_eq!(zero_item.1, false);
-	}
-
-	#[test]
-	fn can_be_collected() {
-		let b1 = boundedset_from_keys::<u32, ConstU32<5>>(&[1, 2, 3, 4]);
-		let b2: BoundedBTreeSet<u32, ConstU32<5>> = b1.iter().map(|k| k + 1).try_collect().unwrap();
-		assert_eq!(b2.into_iter().collect::<Vec<_>>(), vec![2, 3, 4, 5]);
-
-		// can also be collected into a collection of length 4.
-		let b2: BoundedBTreeSet<u32, ConstU32<4>> = b1.iter().map(|k| k + 1).try_collect().unwrap();
-		assert_eq!(b2.into_iter().collect::<Vec<_>>(), vec![2, 3, 4, 5]);
-
-		// can be mutated further into iterators that are `ExactSizedIterator`.
-		let b2: BoundedBTreeSet<u32, ConstU32<5>> =
-			b1.iter().map(|k| k + 1).rev().skip(2).try_collect().unwrap();
-		// note that the binary tree will re-sort this, so rev() is not really seen
-		assert_eq!(b2.into_iter().collect::<Vec<_>>(), vec![2, 3]);
-
-		let b2: BoundedBTreeSet<u32, ConstU32<5>> =
-			b1.iter().map(|k| k + 1).take(2).try_collect().unwrap();
-		assert_eq!(b2.into_iter().collect::<Vec<_>>(), vec![2, 3]);
-
-		// but these worn't work
-		let b2: Result<BoundedBTreeSet<u32, ConstU32<3>>, _> =
-			b1.iter().map(|k| k + 1).try_collect();
-		assert!(b2.is_err());
-
-		let b2: Result<BoundedBTreeSet<u32, ConstU32<1>>, _> =
-			b1.iter().map(|k| k + 1).skip(2).try_collect();
-		assert!(b2.is_err());
-	}
-
-	#[test]
-	fn eq_works() {
-		// of same type
-		let b1 = boundedset_from_keys::<u32, ConstU32<7>>(&[1, 2]);
-		let b2 = boundedset_from_keys::<u32, ConstU32<7>>(&[1, 2]);
-		assert_eq!(b1, b2);
-
-		// of different type, but same value and bound.
-		crate::parameter_types! {
-			B1: u32 = 7;
-			B2: u32 = 7;
-		}
-		let b1 = boundedset_from_keys::<u32, B1>(&[1, 2]);
-		let b2 = boundedset_from_keys::<u32, B2>(&[1, 2]);
-		assert_eq!(b1, b2);
-	}
 }
diff --git a/substrate/frame/support/src/storage/bounded_vec.rs b/substrate/frame/support/src/storage/bounded_vec.rs
index 82ae36a82bf..1fa01b44ae6 100644
--- a/substrate/frame/support/src/storage/bounded_vec.rs
+++ b/substrate/frame/support/src/storage/bounded_vec.rs
@@ -20,641 +20,9 @@
 
 use crate::{
 	storage::{StorageDecodeLength, StorageTryAppend},
-	traits::{Get, TryCollect},
-	WeakBoundedVec,
+	traits::Get,
 };
-use codec::{Decode, Encode, EncodeLike, MaxEncodedLen};
-use core::{
-	ops::{Deref, Index, IndexMut, RangeBounds},
-	slice::SliceIndex,
-};
-#[cfg(feature = "std")]
-use serde::{
-	de::{Error, SeqAccess, Visitor},
-	Deserialize, Deserializer, Serialize,
-};
-use sp_std::{marker::PhantomData, prelude::*};
-
-/// A bounded vector.
-///
-/// It has implementations for efficient append and length decoding, as with a normal `Vec<_>`, once
-/// put into storage as a raw value, map or double-map.
-///
-/// As the name suggests, the length of the queue is always bounded. All internal operations ensure
-/// this bound is respected.
-#[cfg_attr(feature = "std", derive(Serialize), serde(transparent))]
-#[derive(Encode, scale_info::TypeInfo)]
-#[scale_info(skip_type_params(S))]
-pub struct BoundedVec<T, S>(
-	Vec<T>,
-	#[cfg_attr(feature = "std", serde(skip_serializing))] PhantomData<S>,
-);
-
-#[cfg(feature = "std")]
-impl<'de, T, S: Get<u32>> Deserialize<'de> for BoundedVec<T, S>
-where
-	T: Deserialize<'de>,
-{
-	fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
-	where
-		D: Deserializer<'de>,
-	{
-		struct VecVisitor<T, S: Get<u32>>(PhantomData<(T, S)>);
-
-		impl<'de, T, S: Get<u32>> Visitor<'de> for VecVisitor<T, S>
-		where
-			T: Deserialize<'de>,
-		{
-			type Value = Vec<T>;
-
-			fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
-				formatter.write_str("a sequence")
-			}
-
-			fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
-			where
-				A: SeqAccess<'de>,
-			{
-				let size = seq.size_hint().unwrap_or(0);
-				let max = match usize::try_from(S::get()) {
-					Ok(n) => n,
-					Err(_) => return Err(A::Error::custom("can't convert to usize")),
-				};
-				if size > max {
-					Err(A::Error::custom("out of bounds"))
-				} else {
-					let mut values = Vec::with_capacity(size);
-
-					while let Some(value) = seq.next_element()? {
-						values.push(value);
-						if values.len() > max {
-							return Err(A::Error::custom("out of bounds"))
-						}
-					}
-
-					Ok(values)
-				}
-			}
-		}
-
-		let visitor: VecVisitor<T, S> = VecVisitor(PhantomData);
-		deserializer
-			.deserialize_seq(visitor)
-			.map(|v| BoundedVec::<T, S>::try_from(v).map_err(|_| Error::custom("out of bounds")))?
-	}
-}
-
-/// A bounded slice.
-///
-/// Similar to a `BoundedVec`, but not owned and cannot be decoded.
-#[derive(Encode, scale_info::TypeInfo)]
-#[scale_info(skip_type_params(S))]
-pub struct BoundedSlice<'a, T, S>(&'a [T], PhantomData<S>);
-
-// `BoundedSlice`s encode to something which will always decode into a `BoundedVec`,
-// `WeakBoundedVec`, or a `Vec`.
-impl<'a, T: Encode + Decode, S: Get<u32>> EncodeLike<BoundedVec<T, S>> for BoundedSlice<'a, T, S> {}
-impl<'a, T: Encode + Decode, S: Get<u32>> EncodeLike<WeakBoundedVec<T, S>>
-	for BoundedSlice<'a, T, S>
-{
-}
-impl<'a, T: Encode + Decode, S: Get<u32>> EncodeLike<Vec<T>> for BoundedSlice<'a, T, S> {}
-
-impl<T: PartialOrd, Bound: Get<u32>> PartialOrd for BoundedVec<T, Bound> {
-	fn partial_cmp(&self, other: &Self) -> Option<sp_std::cmp::Ordering> {
-		self.0.partial_cmp(&other.0)
-	}
-}
-
-impl<T: Ord, Bound: Get<u32>> Ord for BoundedVec<T, Bound> {
-	fn cmp(&self, other: &Self) -> sp_std::cmp::Ordering {
-		self.0.cmp(&other.0)
-	}
-}
-
-impl<'a, T, S: Get<u32>> TryFrom<&'a [T]> for BoundedSlice<'a, T, S> {
-	type Error = ();
-	fn try_from(t: &'a [T]) -> Result<Self, Self::Error> {
-		if t.len() <= S::get() as usize {
-			Ok(BoundedSlice(t, PhantomData))
-		} else {
-			Err(())
-		}
-	}
-}
-
-impl<'a, T, S> From<BoundedSlice<'a, T, S>> for &'a [T] {
-	fn from(t: BoundedSlice<'a, T, S>) -> Self {
-		t.0
-	}
-}
-
-impl<'a, T, S> sp_std::iter::IntoIterator for BoundedSlice<'a, T, S> {
-	type Item = &'a T;
-	type IntoIter = sp_std::slice::Iter<'a, T>;
-	fn into_iter(self) -> Self::IntoIter {
-		self.0.iter()
-	}
-}
-
-impl<T: Decode, S: Get<u32>> Decode for BoundedVec<T, S> {
-	fn decode<I: codec::Input>(input: &mut I) -> Result<Self, codec::Error> {
-		let inner = Vec::<T>::decode(input)?;
-		if inner.len() > S::get() as usize {
-			return Err("BoundedVec exceeds its limit".into())
-		}
-		Ok(Self(inner, PhantomData))
-	}
-
-	fn skip<I: codec::Input>(input: &mut I) -> Result<(), codec::Error> {
-		Vec::<T>::skip(input)
-	}
-}
-
-// `BoundedVec`s encode to something which will always decode as a `Vec`.
-impl<T: Encode + Decode, S: Get<u32>> EncodeLike<Vec<T>> for BoundedVec<T, S> {}
-
-impl<T, S> BoundedVec<T, S> {
-	/// Create `Self` from `t` without any checks.
-	fn unchecked_from(t: Vec<T>) -> Self {
-		Self(t, Default::default())
-	}
-
-	/// Consume self, and return the inner `Vec`. Henceforth, the `Vec<_>` can be altered in an
-	/// arbitrary way. At some point, if the reverse conversion is required, `TryFrom<Vec<_>>` can
-	/// be used.
-	///
-	/// This is useful for cases if you need access to an internal API of the inner `Vec<_>` which
-	/// is not provided by the wrapper `BoundedVec`.
-	pub fn into_inner(self) -> Vec<T> {
-		self.0
-	}
-
-	/// Exactly the same semantics as [`slice::sort_by`].
-	///
-	/// This is safe since sorting cannot change the number of elements in the vector.
-	pub fn sort_by<F>(&mut self, compare: F)
-	where
-		F: FnMut(&T, &T) -> sp_std::cmp::Ordering,
-	{
-		self.0.sort_by(compare)
-	}
-
-	/// Exactly the same semantics as [`slice::sort`].
-	///
-	/// This is safe since sorting cannot change the number of elements in the vector.
-	pub fn sort(&mut self)
-	where
-		T: sp_std::cmp::Ord,
-	{
-		self.0.sort()
-	}
-
-	/// Exactly the same semantics as `Vec::remove`.
-	///
-	/// # Panics
-	///
-	/// Panics if `index` is out of bounds.
-	pub fn remove(&mut self, index: usize) -> T {
-		self.0.remove(index)
-	}
-
-	/// Exactly the same semantics as `slice::swap_remove`.
-	///
-	/// # Panics
-	///
-	/// Panics if `index` is out of bounds.
-	pub fn swap_remove(&mut self, index: usize) -> T {
-		self.0.swap_remove(index)
-	}
-
-	/// Exactly the same semantics as `Vec::retain`.
-	pub fn retain<F: FnMut(&T) -> bool>(&mut self, f: F) {
-		self.0.retain(f)
-	}
-
-	/// Exactly the same semantics as `slice::get_mut`.
-	pub fn get_mut<I: SliceIndex<[T]>>(
-		&mut self,
-		index: I,
-	) -> Option<&mut <I as SliceIndex<[T]>>::Output> {
-		self.0.get_mut(index)
-	}
-
-	/// Exactly the same semantics as `Vec::truncate`.
-	///
-	/// This is safe because `truncate` can never increase the length of the internal vector.
-	pub fn truncate(&mut self, s: usize) {
-		self.0.truncate(s);
-	}
-
-	/// Exactly the same semantics as `Vec::pop`.
-	///
-	/// This is safe since popping can only shrink the inner vector.
-	pub fn pop(&mut self) -> Option<T> {
-		self.0.pop()
-	}
-
-	/// Exactly the same semantics as [`slice::iter_mut`].
-	pub fn iter_mut(&mut self) -> core::slice::IterMut<'_, T> {
-		self.0.iter_mut()
-	}
-
-	/// Exactly the same semantics as [`slice::last_mut`].
-	pub fn last_mut(&mut self) -> Option<&mut T> {
-		self.0.last_mut()
-	}
-
-	/// Exact same semantics as [`Vec::drain`].
-	pub fn drain<R>(&mut self, range: R) -> sp_std::vec::Drain<'_, T>
-	where
-		R: RangeBounds<usize>,
-	{
-		self.0.drain(range)
-	}
-}
-
-impl<T, S: Get<u32>> From<BoundedVec<T, S>> for Vec<T> {
-	fn from(x: BoundedVec<T, S>) -> Vec<T> {
-		x.0
-	}
-}
-
-impl<T, S: Get<u32>> BoundedVec<T, S> {
-	/// Pre-allocate `capacity` items in self.
-	///
-	/// If `capacity` is greater than [`Self::bound`], then the minimum of the two is used.
-	pub fn with_bounded_capacity(capacity: usize) -> Self {
-		let capacity = capacity.min(Self::bound());
-		Self(Vec::with_capacity(capacity), Default::default())
-	}
-
-	/// Allocate self with the maximum possible capacity.
-	pub fn with_max_capacity() -> Self {
-		Self::with_bounded_capacity(Self::bound())
-	}
-
-	/// Consume and truncate the vector `v` in order to create a new instance of `Self` from it.
-	pub fn truncate_from(mut v: Vec<T>) -> Self {
-		v.truncate(Self::bound());
-		Self::unchecked_from(v)
-	}
-
-	/// Get the bound of the type in `usize`.
-	pub fn bound() -> usize {
-		S::get() as usize
-	}
-
-	/// Returns true of this collection is full.
-	pub fn is_full(&self) -> bool {
-		self.len() >= Self::bound()
-	}
-
-	/// Forces the insertion of `element` into `self` retaining all items with index at least
-	/// `index`.
-	///
-	/// If `index == 0` and `self.len() == Self::bound()`, then this is a no-op.
-	///
-	/// If `Self::bound() < index` or `self.len() < index`, then this is also a no-op.
-	///
-	/// Returns `Ok(maybe_removed)` if the item was inserted, where `maybe_removed` is
-	/// `Some(removed)` if an item was removed to make room for the new one. Returns `Err(())` if
-	/// `element` cannot be inserted.
-	pub fn force_insert_keep_right(
-		&mut self,
-		index: usize,
-		mut element: T,
-	) -> Result<Option<T>, ()> {
-		// Check against panics.
-		if Self::bound() < index || self.len() < index {
-			Err(())
-		} else if self.len() < Self::bound() {
-			// Cannot panic since self.len() >= index;
-			self.0.insert(index, element);
-			Ok(None)
-		} else {
-			if index == 0 {
-				return Err(())
-			}
-			sp_std::mem::swap(&mut self[0], &mut element);
-			// `[0..index] cannot panic since self.len() >= index.
-			// `rotate_left(1)` cannot panic because there is at least 1 element.
-			self[0..index].rotate_left(1);
-			Ok(Some(element))
-		}
-	}
-
-	/// Forces the insertion of `element` into `self` retaining all items with index at most
-	/// `index`.
-	///
-	/// If `index == Self::bound()` and `self.len() == Self::bound()`, then this is a no-op.
-	///
-	/// If `Self::bound() < index` or `self.len() < index`, then this is also a no-op.
-	///
-	/// Returns `Ok(maybe_removed)` if the item was inserted, where `maybe_removed` is
-	/// `Some(removed)` if an item was removed to make room for the new one. Returns `Err(())` if
-	/// `element` cannot be inserted.
-	pub fn force_insert_keep_left(&mut self, index: usize, element: T) -> Result<Option<T>, ()> {
-		// Check against panics.
-		if Self::bound() < index || self.len() < index || Self::bound() == 0 {
-			return Err(())
-		}
-		// Noop condition.
-		if Self::bound() == index && self.len() <= Self::bound() {
-			return Err(())
-		}
-		let maybe_removed = if self.is_full() {
-			// defensive-only: since we are at capacity, this is a noop.
-			self.0.truncate(Self::bound());
-			// if we truncate anything, it will be the last one.
-			self.0.pop()
-		} else {
-			None
-		};
-
-		// Cannot panic since `self.len() >= index`;
-		self.0.insert(index, element);
-		Ok(maybe_removed)
-	}
-
-	/// Move the position of an item from one location to another in the slice.
-	///
-	/// Except for the item being moved, the order of the slice remains the same.
-	///
-	/// - `index` is the location of the item to be moved.
-	/// - `insert_position` is the index of the item in the slice which should *immediately follow*
-	///   the item which is being moved.
-	///
-	/// Returns `true` of the operation was successful, otherwise `false` if a noop.
-	pub fn slide(&mut self, index: usize, insert_position: usize) -> bool {
-		// Check against panics.
-		if self.len() <= index || self.len() < insert_position || index == usize::MAX {
-			return false
-		}
-		// Noop conditions.
-		if index == insert_position || index + 1 == insert_position {
-			return false
-		}
-		if insert_position < index && index < self.len() {
-			// --- --- --- === === === === @@@ --- --- ---
-			//            ^-- N            ^O^
-			// ...
-			//               /-----<<<-----\
-			// --- --- --- === === === === @@@ --- --- ---
-			//               >>> >>> >>> >>>
-			// ...
-			// --- --- --- @@@ === === === === --- --- ---
-			//             ^N^
-			self[insert_position..index + 1].rotate_right(1);
-			return true
-		} else if insert_position > 0 && index + 1 < insert_position {
-			// Note that the apparent asymmetry of these two branches is due to the
-			// fact that the "new" position is the position to be inserted *before*.
-			// --- --- --- @@@ === === === === --- --- ---
-			//             ^O^                ^-- N
-			// ...
-			//               /----->>>-----\
-			// --- --- --- @@@ === === === === --- --- ---
-			//               <<< <<< <<< <<<
-			// ...
-			// --- --- --- === === === === @@@ --- --- ---
-			//                             ^N^
-			self[index..insert_position].rotate_left(1);
-			return true
-		}
-
-		debug_assert!(false, "all noop conditions should have been covered above");
-		false
-	}
-
-	/// Forces the insertion of `s` into `self` truncating first if necessary.
-	///
-	/// Infallible, but if the bound is zero, then it's a no-op.
-	pub fn force_push(&mut self, element: T) {
-		if Self::bound() > 0 {
-			self.0.truncate(Self::bound() as usize - 1);
-			self.0.push(element);
-		}
-	}
-
-	/// Same as `Vec::resize`, but if `size` is more than [`Self::bound`], then [`Self::bound`] is
-	/// used.
-	pub fn bounded_resize(&mut self, size: usize, value: T)
-	where
-		T: Clone,
-	{
-		let size = size.min(Self::bound());
-		self.0.resize(size, value);
-	}
-
-	/// Exactly the same semantics as [`Vec::extend`], but returns an error and does nothing if the
-	/// length of the outcome is larger than the bound.
-	pub fn try_extend(
-		&mut self,
-		with: impl IntoIterator<Item = T> + ExactSizeIterator,
-	) -> Result<(), ()> {
-		if with.len().saturating_add(self.len()) <= Self::bound() {
-			self.0.extend(with);
-			Ok(())
-		} else {
-			Err(())
-		}
-	}
-
-	/// Exactly the same semantics as [`Vec::append`], but returns an error and does nothing if the
-	/// length of the outcome is larger than the bound.
-	pub fn try_append(&mut self, other: &mut Vec<T>) -> Result<(), ()> {
-		if other.len().saturating_add(self.len()) <= Self::bound() {
-			self.0.append(other);
-			Ok(())
-		} else {
-			Err(())
-		}
-	}
-
-	/// Consumes self and mutates self via the given `mutate` function.
-	///
-	/// If the outcome of mutation is within bounds, `Some(Self)` is returned. Else, `None` is
-	/// returned.
-	///
-	/// This is essentially a *consuming* shorthand [`Self::into_inner`] -> `...` ->
-	/// [`Self::try_from`].
-	pub fn try_mutate(mut self, mut mutate: impl FnMut(&mut Vec<T>)) -> Option<Self> {
-		mutate(&mut self.0);
-		(self.0.len() <= Self::bound()).then(move || self)
-	}
-
-	/// Exactly the same semantics as [`Vec::insert`], but returns an `Err` (and is a noop) if the
-	/// new length of the vector exceeds `S`.
-	///
-	/// # Panics
-	///
-	/// Panics if `index > len`.
-	pub fn try_insert(&mut self, index: usize, element: T) -> Result<(), ()> {
-		if self.len() < Self::bound() {
-			self.0.insert(index, element);
-			Ok(())
-		} else {
-			Err(())
-		}
-	}
-
-	/// Exactly the same semantics as [`Vec::push`], but returns an `Err` (and is a noop) if the
-	/// new length of the vector exceeds `S`.
-	///
-	/// # Panics
-	///
-	/// Panics if the new capacity exceeds isize::MAX bytes.
-	pub fn try_push(&mut self, element: T) -> Result<(), ()> {
-		if self.len() < Self::bound() {
-			self.0.push(element);
-			Ok(())
-		} else {
-			Err(())
-		}
-	}
-}
-
-impl<T, S> Default for BoundedVec<T, S> {
-	fn default() -> Self {
-		// the bound cannot be below 0, which is satisfied by an empty vector
-		Self::unchecked_from(Vec::default())
-	}
-}
-
-impl<T, S> sp_std::fmt::Debug for BoundedVec<T, S>
-where
-	T: sp_std::fmt::Debug,
-	S: Get<u32>,
-{
-	fn fmt(&self, f: &mut sp_std::fmt::Formatter<'_>) -> sp_std::fmt::Result {
-		f.debug_tuple("BoundedVec").field(&self.0).field(&Self::bound()).finish()
-	}
-}
-
-impl<T, S> Clone for BoundedVec<T, S>
-where
-	T: Clone,
-{
-	fn clone(&self) -> Self {
-		// bound is retained
-		Self::unchecked_from(self.0.clone())
-	}
-}
-
-impl<T, S: Get<u32>> TryFrom<Vec<T>> for BoundedVec<T, S> {
-	type Error = ();
-	fn try_from(t: Vec<T>) -> Result<Self, Self::Error> {
-		if t.len() <= Self::bound() {
-			// explicit check just above
-			Ok(Self::unchecked_from(t))
-		} else {
-			Err(())
-		}
-	}
-}
-
-// It is okay to give a non-mutable reference of the inner vec to anyone.
-impl<T, S> AsRef<Vec<T>> for BoundedVec<T, S> {
-	fn as_ref(&self) -> &Vec<T> {
-		&self.0
-	}
-}
-
-impl<T, S> AsRef<[T]> for BoundedVec<T, S> {
-	fn as_ref(&self) -> &[T] {
-		&self.0
-	}
-}
-
-impl<T, S> AsMut<[T]> for BoundedVec<T, S> {
-	fn as_mut(&mut self) -> &mut [T] {
-		&mut self.0
-	}
-}
-
-// will allow for immutable all operations of `Vec<T>` on `BoundedVec<T>`.
-impl<T, S> Deref for BoundedVec<T, S> {
-	type Target = Vec<T>;
-
-	fn deref(&self) -> &Self::Target {
-		&self.0
-	}
-}
-
-// Allows for indexing similar to a normal `Vec`. Can panic if out of bound.
-impl<T, S, I> Index<I> for BoundedVec<T, S>
-where
-	I: SliceIndex<[T]>,
-{
-	type Output = I::Output;
-
-	#[inline]
-	fn index(&self, index: I) -> &Self::Output {
-		self.0.index(index)
-	}
-}
-
-impl<T, S, I> IndexMut<I> for BoundedVec<T, S>
-where
-	I: SliceIndex<[T]>,
-{
-	#[inline]
-	fn index_mut(&mut self, index: I) -> &mut Self::Output {
-		self.0.index_mut(index)
-	}
-}
-
-impl<T, S> sp_std::iter::IntoIterator for BoundedVec<T, S> {
-	type Item = T;
-	type IntoIter = sp_std::vec::IntoIter<T>;
-	fn into_iter(self) -> Self::IntoIter {
-		self.0.into_iter()
-	}
-}
-
-impl<'a, T, S> sp_std::iter::IntoIterator for &'a BoundedVec<T, S> {
-	type Item = &'a T;
-	type IntoIter = sp_std::slice::Iter<'a, T>;
-	fn into_iter(self) -> Self::IntoIter {
-		self.0.iter()
-	}
-}
-
-impl<'a, T, S> sp_std::iter::IntoIterator for &'a mut BoundedVec<T, S> {
-	type Item = &'a mut T;
-	type IntoIter = sp_std::slice::IterMut<'a, T>;
-	fn into_iter(self) -> Self::IntoIter {
-		self.0.iter_mut()
-	}
-}
-
-impl<T, S> codec::DecodeLength for BoundedVec<T, S> {
-	fn len(self_encoded: &[u8]) -> Result<usize, codec::Error> {
-		// `BoundedVec<T, _>` stored just a `Vec<T>`, thus the length is at the beginning in
-		// `Compact` form, and same implementation as `Vec<T>` can be used.
-		<Vec<T> as codec::DecodeLength>::len(self_encoded)
-	}
-}
-
-impl<T, BoundSelf, BoundRhs> PartialEq<BoundedVec<T, BoundRhs>> for BoundedVec<T, BoundSelf>
-where
-	T: PartialEq,
-	BoundSelf: Get<u32>,
-	BoundRhs: Get<u32>,
-{
-	fn eq(&self, rhs: &BoundedVec<T, BoundRhs>) -> bool {
-		BoundSelf::get() == BoundRhs::get() && self.0 == rhs.0
-	}
-}
-
-impl<T: PartialEq, S: Get<u32>> PartialEq<Vec<T>> for BoundedVec<T, S> {
-	fn eq(&self, other: &Vec<T>) -> bool {
-		&self.0 == other
-	}
-}
-
-impl<T, S: Get<u32>> Eq for BoundedVec<T, S> where T: Eq {}
+pub use sp_runtime::{BoundedSlice, BoundedVec};
 
 impl<T, S> StorageDecodeLength for BoundedVec<T, S> {}
 
@@ -664,38 +32,6 @@ impl<T, S: Get<u32>> StorageTryAppend<T> for BoundedVec<T, S> {
 	}
 }
 
-impl<T, S> MaxEncodedLen for BoundedVec<T, S>
-where
-	T: MaxEncodedLen,
-	S: Get<u32>,
-	BoundedVec<T, S>: Encode,
-{
-	fn max_encoded_len() -> usize {
-		// BoundedVec<T, S> encodes like Vec<T> which encodes like [T], which is a compact u32
-		// plus each item in the slice:
-		// https://docs.substrate.io/v3/advanced/scale-codec
-		codec::Compact(S::get())
-			.encoded_size()
-			.saturating_add(Self::bound().saturating_mul(T::max_encoded_len()))
-	}
-}
-
-impl<I, T, Bound> TryCollect<BoundedVec<T, Bound>> for I
-where
-	I: ExactSizeIterator + Iterator<Item = T>,
-	Bound: Get<u32>,
-{
-	type Error = &'static str;
-
-	fn try_collect(self) -> Result<BoundedVec<T, Bound>, Self::Error> {
-		if self.len() > Bound::get() as usize {
-			Err("iterator length too big")
-		} else {
-			Ok(BoundedVec::<T, Bound>::unchecked_from(self.collect::<Vec<T>>()))
-		}
-	}
-}
-
 #[cfg(test)]
 pub mod test {
 	use super::*;
@@ -712,108 +48,6 @@ pub mod test {
 	type FooDoubleMap =
 		StorageDoubleMap<Prefix, Twox128, u32, Twox128, u32, BoundedVec<u32, ConstU32<7>>>;
 
-	#[test]
-	fn slide_works() {
-		let mut b: BoundedVec<u32, ConstU32<6>> = bounded_vec![0, 1, 2, 3, 4, 5];
-		assert!(b.slide(1, 5));
-		assert_eq!(*b, vec![0, 2, 3, 4, 1, 5]);
-		assert!(b.slide(4, 0));
-		assert_eq!(*b, vec![1, 0, 2, 3, 4, 5]);
-		assert!(b.slide(0, 2));
-		assert_eq!(*b, vec![0, 1, 2, 3, 4, 5]);
-		assert!(b.slide(1, 6));
-		assert_eq!(*b, vec![0, 2, 3, 4, 5, 1]);
-		assert!(b.slide(0, 6));
-		assert_eq!(*b, vec![2, 3, 4, 5, 1, 0]);
-		assert!(b.slide(5, 0));
-		assert_eq!(*b, vec![0, 2, 3, 4, 5, 1]);
-		assert!(!b.slide(6, 0));
-		assert!(!b.slide(7, 0));
-		assert_eq!(*b, vec![0, 2, 3, 4, 5, 1]);
-
-		let mut c: BoundedVec<u32, ConstU32<6>> = bounded_vec![0, 1, 2];
-		assert!(!c.slide(1, 5));
-		assert_eq!(*c, vec![0, 1, 2]);
-		assert!(!c.slide(4, 0));
-		assert_eq!(*c, vec![0, 1, 2]);
-		assert!(!c.slide(3, 0));
-		assert_eq!(*c, vec![0, 1, 2]);
-		assert!(c.slide(2, 0));
-		assert_eq!(*c, vec![2, 0, 1]);
-	}
-
-	#[test]
-	fn slide_noops_work() {
-		let mut b: BoundedVec<u32, ConstU32<6>> = bounded_vec![0, 1, 2, 3, 4, 5];
-		assert!(!b.slide(3, 3));
-		assert_eq!(*b, vec![0, 1, 2, 3, 4, 5]);
-		assert!(!b.slide(3, 4));
-		assert_eq!(*b, vec![0, 1, 2, 3, 4, 5]);
-	}
-
-	#[test]
-	fn force_insert_keep_left_works() {
-		let mut b: BoundedVec<u32, ConstU32<4>> = bounded_vec![];
-		assert_eq!(b.force_insert_keep_left(1, 10), Err(()));
-		assert!(b.is_empty());
-
-		assert_eq!(b.force_insert_keep_left(0, 30), Ok(None));
-		assert_eq!(b.force_insert_keep_left(0, 10), Ok(None));
-		assert_eq!(b.force_insert_keep_left(1, 20), Ok(None));
-		assert_eq!(b.force_insert_keep_left(3, 40), Ok(None));
-		assert_eq!(*b, vec![10, 20, 30, 40]);
-		// at capacity.
-		assert_eq!(b.force_insert_keep_left(4, 41), Err(()));
-		assert_eq!(*b, vec![10, 20, 30, 40]);
-		assert_eq!(b.force_insert_keep_left(3, 31), Ok(Some(40)));
-		assert_eq!(*b, vec![10, 20, 30, 31]);
-		assert_eq!(b.force_insert_keep_left(1, 11), Ok(Some(31)));
-		assert_eq!(*b, vec![10, 11, 20, 30]);
-		assert_eq!(b.force_insert_keep_left(0, 1), Ok(Some(30)));
-		assert_eq!(*b, vec![1, 10, 11, 20]);
-
-		let mut z: BoundedVec<u32, ConstU32<0>> = bounded_vec![];
-		assert!(z.is_empty());
-		assert_eq!(z.force_insert_keep_left(0, 10), Err(()));
-		assert!(z.is_empty());
-	}
-
-	#[test]
-	fn force_insert_keep_right_works() {
-		let mut b: BoundedVec<u32, ConstU32<4>> = bounded_vec![];
-		assert_eq!(b.force_insert_keep_right(1, 10), Err(()));
-		assert!(b.is_empty());
-
-		assert_eq!(b.force_insert_keep_right(0, 30), Ok(None));
-		assert_eq!(b.force_insert_keep_right(0, 10), Ok(None));
-		assert_eq!(b.force_insert_keep_right(1, 20), Ok(None));
-		assert_eq!(b.force_insert_keep_right(3, 40), Ok(None));
-		assert_eq!(*b, vec![10, 20, 30, 40]);
-
-		// at capacity.
-		assert_eq!(b.force_insert_keep_right(0, 0), Err(()));
-		assert_eq!(*b, vec![10, 20, 30, 40]);
-		assert_eq!(b.force_insert_keep_right(1, 11), Ok(Some(10)));
-		assert_eq!(*b, vec![11, 20, 30, 40]);
-		assert_eq!(b.force_insert_keep_right(3, 31), Ok(Some(11)));
-		assert_eq!(*b, vec![20, 30, 31, 40]);
-		assert_eq!(b.force_insert_keep_right(4, 41), Ok(Some(20)));
-		assert_eq!(*b, vec![30, 31, 40, 41]);
-
-		assert_eq!(b.force_insert_keep_right(5, 69), Err(()));
-		assert_eq!(*b, vec![30, 31, 40, 41]);
-
-		let mut z: BoundedVec<u32, ConstU32<0>> = bounded_vec![];
-		assert!(z.is_empty());
-		assert_eq!(z.force_insert_keep_right(0, 10), Err(()));
-		assert!(z.is_empty());
-	}
-
-	#[test]
-	fn bound_returns_correct_value() {
-		assert_eq!(BoundedVec::<u32, ConstU32<7>>::bound(), 7);
-	}
-
 	#[test]
 	fn decode_len_works() {
 		TestExternalities::default().execute_with(|| {
@@ -839,210 +73,4 @@ pub mod test {
 			assert!(FooDoubleMap::decode_len(2, 2).is_none());
 		});
 	}
-
-	#[test]
-	fn try_insert_works() {
-		let mut bounded: BoundedVec<u32, ConstU32<4>> = bounded_vec![1, 2, 3];
-		bounded.try_insert(1, 0).unwrap();
-		assert_eq!(*bounded, vec![1, 0, 2, 3]);
-
-		assert!(bounded.try_insert(0, 9).is_err());
-		assert_eq!(*bounded, vec![1, 0, 2, 3]);
-	}
-
-	#[test]
-	fn constructor_macro_works() {
-		use frame_support::bounded_vec;
-
-		// With values. Use some brackets to make sure the macro doesn't expand.
-		let bv: BoundedVec<(u32, u32), ConstU32<3>> = bounded_vec![(1, 2), (1, 2), (1, 2)];
-		assert_eq!(bv, vec![(1, 2), (1, 2), (1, 2)]);
-
-		// With repetition.
-		let bv: BoundedVec<(u32, u32), ConstU32<3>> = bounded_vec![(1, 2); 3];
-		assert_eq!(bv, vec![(1, 2), (1, 2), (1, 2)]);
-	}
-
-	#[test]
-	#[should_panic(expected = "insertion index (is 9) should be <= len (is 3)")]
-	fn try_inert_panics_if_oob() {
-		let mut bounded: BoundedVec<u32, ConstU32<4>> = bounded_vec![1, 2, 3];
-		bounded.try_insert(9, 0).unwrap();
-	}
-
-	#[test]
-	fn try_push_works() {
-		let mut bounded: BoundedVec<u32, ConstU32<4>> = bounded_vec![1, 2, 3];
-		bounded.try_push(0).unwrap();
-		assert_eq!(*bounded, vec![1, 2, 3, 0]);
-
-		assert!(bounded.try_push(9).is_err());
-	}
-
-	#[test]
-	fn deref_coercion_works() {
-		let bounded: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3];
-		// these methods come from deref-ed vec.
-		assert_eq!(bounded.len(), 3);
-		assert!(bounded.iter().next().is_some());
-		assert!(!bounded.is_empty());
-	}
-
-	#[test]
-	fn try_mutate_works() {
-		let bounded: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3, 4, 5, 6];
-		let bounded = bounded.try_mutate(|v| v.push(7)).unwrap();
-		assert_eq!(bounded.len(), 7);
-		assert!(bounded.try_mutate(|v| v.push(8)).is_none());
-	}
-
-	#[test]
-	fn slice_indexing_works() {
-		let bounded: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3, 4, 5, 6];
-		assert_eq!(&bounded[0..=2], &[1, 2, 3]);
-	}
-
-	#[test]
-	fn vec_eq_works() {
-		let bounded: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3, 4, 5, 6];
-		assert_eq!(bounded, vec![1, 2, 3, 4, 5, 6]);
-	}
-
-	#[test]
-	fn too_big_vec_fail_to_decode() {
-		let v: Vec<u32> = vec![1, 2, 3, 4, 5];
-		assert_eq!(
-			BoundedVec::<u32, ConstU32<4>>::decode(&mut &v.encode()[..]),
-			Err("BoundedVec exceeds its limit".into()),
-		);
-	}
-
-	#[test]
-	fn can_be_collected() {
-		let b1: BoundedVec<u32, ConstU32<5>> = bounded_vec![1, 2, 3, 4];
-		let b2: BoundedVec<u32, ConstU32<5>> = b1.iter().map(|x| x + 1).try_collect().unwrap();
-		assert_eq!(b2, vec![2, 3, 4, 5]);
-
-		// can also be collected into a collection of length 4.
-		let b2: BoundedVec<u32, ConstU32<4>> = b1.iter().map(|x| x + 1).try_collect().unwrap();
-		assert_eq!(b2, vec![2, 3, 4, 5]);
-
-		// can be mutated further into iterators that are `ExactSizedIterator`.
-		let b2: BoundedVec<u32, ConstU32<4>> =
-			b1.iter().map(|x| x + 1).rev().try_collect().unwrap();
-		assert_eq!(b2, vec![5, 4, 3, 2]);
-
-		let b2: BoundedVec<u32, ConstU32<4>> =
-			b1.iter().map(|x| x + 1).rev().skip(2).try_collect().unwrap();
-		assert_eq!(b2, vec![3, 2]);
-		let b2: BoundedVec<u32, ConstU32<2>> =
-			b1.iter().map(|x| x + 1).rev().skip(2).try_collect().unwrap();
-		assert_eq!(b2, vec![3, 2]);
-
-		let b2: BoundedVec<u32, ConstU32<4>> =
-			b1.iter().map(|x| x + 1).rev().take(2).try_collect().unwrap();
-		assert_eq!(b2, vec![5, 4]);
-		let b2: BoundedVec<u32, ConstU32<2>> =
-			b1.iter().map(|x| x + 1).rev().take(2).try_collect().unwrap();
-		assert_eq!(b2, vec![5, 4]);
-
-		// but these worn't work
-		let b2: Result<BoundedVec<u32, ConstU32<3>>, _> = b1.iter().map(|x| x + 1).try_collect();
-		assert!(b2.is_err());
-
-		let b2: Result<BoundedVec<u32, ConstU32<1>>, _> =
-			b1.iter().map(|x| x + 1).rev().take(2).try_collect();
-		assert!(b2.is_err());
-	}
-
-	#[test]
-	fn eq_works() {
-		// of same type
-		let b1: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3];
-		let b2: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3];
-		assert_eq!(b1, b2);
-
-		// of different type, but same value and bound.
-		crate::parameter_types! {
-			B1: u32 = 7;
-			B2: u32 = 7;
-		}
-		let b1: BoundedVec<u32, B1> = bounded_vec![1, 2, 3];
-		let b2: BoundedVec<u32, B2> = bounded_vec![1, 2, 3];
-		assert_eq!(b1, b2);
-	}
-
-	#[test]
-	fn ord_works() {
-		use std::cmp::Ordering;
-		let b1: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3];
-		let b2: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 3, 2];
-
-		// ordering for vec is lexicographic.
-		assert_eq!(b1.cmp(&b2), Ordering::Less);
-		assert_eq!(b1.cmp(&b2), b1.into_inner().cmp(&b2.into_inner()));
-	}
-
-	#[test]
-	fn try_extend_works() {
-		let mut b: BoundedVec<u32, ConstU32<5>> = bounded_vec![1, 2, 3];
-
-		assert!(b.try_extend(vec![4].into_iter()).is_ok());
-		assert_eq!(*b, vec![1, 2, 3, 4]);
-
-		assert!(b.try_extend(vec![5].into_iter()).is_ok());
-		assert_eq!(*b, vec![1, 2, 3, 4, 5]);
-
-		assert!(b.try_extend(vec![6].into_iter()).is_err());
-		assert_eq!(*b, vec![1, 2, 3, 4, 5]);
-
-		let mut b: BoundedVec<u32, ConstU32<5>> = bounded_vec![1, 2, 3];
-		assert!(b.try_extend(vec![4, 5].into_iter()).is_ok());
-		assert_eq!(*b, vec![1, 2, 3, 4, 5]);
-
-		let mut b: BoundedVec<u32, ConstU32<5>> = bounded_vec![1, 2, 3];
-		assert!(b.try_extend(vec![4, 5, 6].into_iter()).is_err());
-		assert_eq!(*b, vec![1, 2, 3]);
-	}
-
-	#[test]
-	fn test_serializer() {
-		let c: BoundedVec<u32, ConstU32<6>> = bounded_vec![0, 1, 2];
-		assert_eq!(serde_json::json!(&c).to_string(), r#"[0,1,2]"#);
-	}
-
-	#[test]
-	fn test_deserializer() {
-		let c: BoundedVec<u32, ConstU32<6>> = serde_json::from_str(r#"[0,1,2]"#).unwrap();
-
-		assert_eq!(c.len(), 3);
-		assert_eq!(c[0], 0);
-		assert_eq!(c[1], 1);
-		assert_eq!(c[2], 2);
-	}
-
-	#[test]
-	fn test_deserializer_failed() {
-		let c: Result<BoundedVec<u32, ConstU32<4>>, serde_json::error::Error> =
-			serde_json::from_str(r#"[0,1,2,3,4,5]"#);
-
-		match c {
-			Err(msg) => assert_eq!(msg.to_string(), "out of bounds at line 1 column 11"),
-			_ => unreachable!("deserializer must raise error"),
-		}
-	}
-
-	#[test]
-	fn bounded_vec_try_from_works() {
-		assert!(BoundedVec::<u32, ConstU32<2>>::try_from(vec![0]).is_ok());
-		assert!(BoundedVec::<u32, ConstU32<2>>::try_from(vec![0, 1]).is_ok());
-		assert!(BoundedVec::<u32, ConstU32<2>>::try_from(vec![0, 1, 2]).is_err());
-	}
-
-	#[test]
-	fn bounded_slice_try_from_works() {
-		assert!(BoundedSlice::<u32, ConstU32<2>>::try_from(&[0][..]).is_ok());
-		assert!(BoundedSlice::<u32, ConstU32<2>>::try_from(&[0, 1][..]).is_ok());
-		assert!(BoundedSlice::<u32, ConstU32<2>>::try_from(&[0, 1, 2][..]).is_err());
-	}
 }
diff --git a/substrate/frame/support/src/storage/weak_bounded_vec.rs b/substrate/frame/support/src/storage/weak_bounded_vec.rs
index bf9b9a10172..72ba8d775a1 100644
--- a/substrate/frame/support/src/storage/weak_bounded_vec.rs
+++ b/substrate/frame/support/src/storage/weak_bounded_vec.rs
@@ -22,289 +22,7 @@ use crate::{
 	storage::{StorageDecodeLength, StorageTryAppend},
 	traits::Get,
 };
-use codec::{Decode, Encode, MaxEncodedLen};
-use core::{
-	ops::{Deref, Index, IndexMut},
-	slice::SliceIndex,
-};
-use sp_std::{marker::PhantomData, prelude::*};
-
-/// A weakly bounded vector.
-///
-/// It has implementations for efficient append and length decoding, as with a normal `Vec<_>`, once
-/// put into storage as a raw value, map or double-map.
-///
-/// The length of the vec is not strictly bounded. Decoding a vec with more element that the bound
-/// is accepted, and some method allow to bypass the restriction with warnings.
-#[derive(Encode, scale_info::TypeInfo)]
-#[scale_info(skip_type_params(S))]
-pub struct WeakBoundedVec<T, S>(Vec<T>, PhantomData<S>);
-
-impl<T: Decode, S: Get<u32>> Decode for WeakBoundedVec<T, S> {
-	fn decode<I: codec::Input>(input: &mut I) -> Result<Self, codec::Error> {
-		let inner = Vec::<T>::decode(input)?;
-		Ok(Self::force_from(inner, Some("decode")))
-	}
-
-	fn skip<I: codec::Input>(input: &mut I) -> Result<(), codec::Error> {
-		Vec::<T>::skip(input)
-	}
-}
-
-impl<T, S> WeakBoundedVec<T, S> {
-	/// Create `Self` from `t` without any checks.
-	fn unchecked_from(t: Vec<T>) -> Self {
-		Self(t, Default::default())
-	}
-
-	/// Consume self, and return the inner `Vec`. Henceforth, the `Vec<_>` can be altered in an
-	/// arbitrary way. At some point, if the reverse conversion is required, `TryFrom<Vec<_>>` can
-	/// be used.
-	///
-	/// This is useful for cases if you need access to an internal API of the inner `Vec<_>` which
-	/// is not provided by the wrapper `WeakBoundedVec`.
-	pub fn into_inner(self) -> Vec<T> {
-		self.0
-	}
-
-	/// Exactly the same semantics as [`Vec::remove`].
-	///
-	/// # Panics
-	///
-	/// Panics if `index` is out of bounds.
-	pub fn remove(&mut self, index: usize) -> T {
-		self.0.remove(index)
-	}
-
-	/// Exactly the same semantics as [`Vec::swap_remove`].
-	///
-	/// # Panics
-	///
-	/// Panics if `index` is out of bounds.
-	pub fn swap_remove(&mut self, index: usize) -> T {
-		self.0.swap_remove(index)
-	}
-
-	/// Exactly the same semantics as [`Vec::retain`].
-	pub fn retain<F: FnMut(&T) -> bool>(&mut self, f: F) {
-		self.0.retain(f)
-	}
-
-	/// Exactly the same semantics as [`slice::get_mut`].
-	pub fn get_mut<I: SliceIndex<[T]>>(
-		&mut self,
-		index: I,
-	) -> Option<&mut <I as SliceIndex<[T]>>::Output> {
-		self.0.get_mut(index)
-	}
-}
-
-impl<T, S: Get<u32>> WeakBoundedVec<T, S> {
-	/// Get the bound of the type in `usize`.
-	pub fn bound() -> usize {
-		S::get() as usize
-	}
-
-	/// Create `Self` from `t` without any checks. Logs warnings if the bound is not being
-	/// respected. The additional scope can be used to indicate where a potential overflow is
-	/// happening.
-	pub fn force_from(t: Vec<T>, scope: Option<&'static str>) -> Self {
-		if t.len() > Self::bound() {
-			log::warn!(
-				target: crate::LOG_TARGET,
-				"length of a bounded vector in scope {} is not respected.",
-				scope.unwrap_or("UNKNOWN"),
-			);
-		}
-
-		Self::unchecked_from(t)
-	}
-
-	/// Consumes self and mutates self via the given `mutate` function.
-	///
-	/// If the outcome of mutation is within bounds, `Some(Self)` is returned. Else, `None` is
-	/// returned.
-	///
-	/// This is essentially a *consuming* shorthand [`Self::into_inner`] -> `...` ->
-	/// [`Self::try_from`].
-	pub fn try_mutate(mut self, mut mutate: impl FnMut(&mut Vec<T>)) -> Option<Self> {
-		mutate(&mut self.0);
-		(self.0.len() <= Self::bound()).then(move || self)
-	}
-
-	/// Exactly the same semantics as [`Vec::insert`], but returns an `Err` (and is a noop) if the
-	/// new length of the vector exceeds `S`.
-	///
-	/// # Panics
-	///
-	/// Panics if `index > len`.
-	pub fn try_insert(&mut self, index: usize, element: T) -> Result<(), ()> {
-		if self.len() < Self::bound() {
-			self.0.insert(index, element);
-			Ok(())
-		} else {
-			Err(())
-		}
-	}
-
-	/// Exactly the same semantics as [`Vec::push`], but returns an `Err` (and is a noop) if the
-	/// new length of the vector exceeds `S`.
-	///
-	/// # Panics
-	///
-	/// Panics if the new capacity exceeds isize::MAX bytes.
-	pub fn try_push(&mut self, element: T) -> Result<(), ()> {
-		if self.len() < Self::bound() {
-			self.0.push(element);
-			Ok(())
-		} else {
-			Err(())
-		}
-	}
-}
-
-impl<T, S> Default for WeakBoundedVec<T, S> {
-	fn default() -> Self {
-		// the bound cannot be below 0, which is satisfied by an empty vector
-		Self::unchecked_from(Vec::default())
-	}
-}
-
-#[cfg(feature = "std")]
-impl<T, S> std::fmt::Debug for WeakBoundedVec<T, S>
-where
-	T: std::fmt::Debug,
-	S: Get<u32>,
-{
-	fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-		f.debug_tuple("WeakBoundedVec").field(&self.0).field(&Self::bound()).finish()
-	}
-}
-
-impl<T, S> Clone for WeakBoundedVec<T, S>
-where
-	T: Clone,
-{
-	fn clone(&self) -> Self {
-		// bound is retained
-		Self::unchecked_from(self.0.clone())
-	}
-}
-
-impl<T, S: Get<u32>> TryFrom<Vec<T>> for WeakBoundedVec<T, S> {
-	type Error = ();
-	fn try_from(t: Vec<T>) -> Result<Self, Self::Error> {
-		if t.len() <= Self::bound() {
-			// explicit check just above
-			Ok(Self::unchecked_from(t))
-		} else {
-			Err(())
-		}
-	}
-}
-
-// It is okay to give a non-mutable reference of the inner vec to anyone.
-impl<T, S> AsRef<Vec<T>> for WeakBoundedVec<T, S> {
-	fn as_ref(&self) -> &Vec<T> {
-		&self.0
-	}
-}
-
-impl<T, S> AsRef<[T]> for WeakBoundedVec<T, S> {
-	fn as_ref(&self) -> &[T] {
-		&self.0
-	}
-}
-
-impl<T, S> AsMut<[T]> for WeakBoundedVec<T, S> {
-	fn as_mut(&mut self) -> &mut [T] {
-		&mut self.0
-	}
-}
-
-// will allow for immutable all operations of `Vec<T>` on `WeakBoundedVec<T>`.
-impl<T, S> Deref for WeakBoundedVec<T, S> {
-	type Target = Vec<T>;
-
-	fn deref(&self) -> &Self::Target {
-		&self.0
-	}
-}
-
-// Allows for indexing similar to a normal `Vec`. Can panic if out of bound.
-impl<T, S, I> Index<I> for WeakBoundedVec<T, S>
-where
-	I: SliceIndex<[T]>,
-{
-	type Output = I::Output;
-
-	#[inline]
-	fn index(&self, index: I) -> &Self::Output {
-		self.0.index(index)
-	}
-}
-
-impl<T, S, I> IndexMut<I> for WeakBoundedVec<T, S>
-where
-	I: SliceIndex<[T]>,
-{
-	#[inline]
-	fn index_mut(&mut self, index: I) -> &mut Self::Output {
-		self.0.index_mut(index)
-	}
-}
-
-impl<T, S> sp_std::iter::IntoIterator for WeakBoundedVec<T, S> {
-	type Item = T;
-	type IntoIter = sp_std::vec::IntoIter<T>;
-	fn into_iter(self) -> Self::IntoIter {
-		self.0.into_iter()
-	}
-}
-
-impl<'a, T, S> sp_std::iter::IntoIterator for &'a WeakBoundedVec<T, S> {
-	type Item = &'a T;
-	type IntoIter = sp_std::slice::Iter<'a, T>;
-	fn into_iter(self) -> Self::IntoIter {
-		self.0.iter()
-	}
-}
-
-impl<'a, T, S> sp_std::iter::IntoIterator for &'a mut WeakBoundedVec<T, S> {
-	type Item = &'a mut T;
-	type IntoIter = sp_std::slice::IterMut<'a, T>;
-	fn into_iter(self) -> Self::IntoIter {
-		self.0.iter_mut()
-	}
-}
-
-impl<T, S> codec::DecodeLength for WeakBoundedVec<T, S> {
-	fn len(self_encoded: &[u8]) -> Result<usize, codec::Error> {
-		// `WeakBoundedVec<T, _>` stored just a `Vec<T>`, thus the length is at the beginning in
-		// `Compact` form, and same implementation as `Vec<T>` can be used.
-		<Vec<T> as codec::DecodeLength>::len(self_encoded)
-	}
-}
-
-// NOTE: we could also implement this as:
-// impl<T: Value, S1: Get<u32>, S2: Get<u32>> PartialEq<WeakBoundedVec<T, S2>> for WeakBoundedVec<T,
-// S1> to allow comparison of bounded vectors with different bounds.
-impl<T, S> PartialEq for WeakBoundedVec<T, S>
-where
-	T: PartialEq,
-{
-	fn eq(&self, rhs: &Self) -> bool {
-		self.0 == rhs.0
-	}
-}
-
-impl<T: PartialEq, S: Get<u32>> PartialEq<Vec<T>> for WeakBoundedVec<T, S> {
-	fn eq(&self, other: &Vec<T>) -> bool {
-		&self.0 == other
-	}
-}
-
-impl<T, S> Eq for WeakBoundedVec<T, S> where T: Eq {}
+pub use sp_runtime::WeakBoundedVec;
 
 impl<T, S> StorageDecodeLength for WeakBoundedVec<T, S> {}
 
@@ -314,22 +32,6 @@ impl<T, S: Get<u32>> StorageTryAppend<T> for WeakBoundedVec<T, S> {
 	}
 }
 
-impl<T, S> MaxEncodedLen for WeakBoundedVec<T, S>
-where
-	T: MaxEncodedLen,
-	S: Get<u32>,
-	WeakBoundedVec<T, S>: Encode,
-{
-	fn max_encoded_len() -> usize {
-		// WeakBoundedVec<T, S> encodes like Vec<T> which encodes like [T], which is a compact u32
-		// plus each item in the slice:
-		// https://docs.substrate.io/v3/advanced/scale-codec
-		codec::Compact(S::get())
-			.encoded_size()
-			.saturating_add(Self::bound().saturating_mul(T::max_encoded_len()))
-	}
-}
-
 #[cfg(test)]
 pub mod test {
 	use super::*;
@@ -345,11 +47,6 @@ pub mod test {
 	type FooDoubleMap =
 		StorageDoubleMap<Prefix, Twox128, u32, Twox128, u32, WeakBoundedVec<u32, ConstU32<7>>>;
 
-	#[test]
-	fn bound_returns_correct_value() {
-		assert_eq!(WeakBoundedVec::<u32, ConstU32<7>>::bound(), 7);
-	}
-
 	#[test]
 	fn decode_len_works() {
 		TestExternalities::default().execute_with(|| {
@@ -375,66 +72,4 @@ pub mod test {
 			assert!(FooDoubleMap::decode_len(2, 2).is_none());
 		});
 	}
-
-	#[test]
-	fn try_insert_works() {
-		let mut bounded: WeakBoundedVec<u32, ConstU32<4>> = vec![1, 2, 3].try_into().unwrap();
-		bounded.try_insert(1, 0).unwrap();
-		assert_eq!(*bounded, vec![1, 0, 2, 3]);
-
-		assert!(bounded.try_insert(0, 9).is_err());
-		assert_eq!(*bounded, vec![1, 0, 2, 3]);
-	}
-
-	#[test]
-	#[should_panic(expected = "insertion index (is 9) should be <= len (is 3)")]
-	fn try_inert_panics_if_oob() {
-		let mut bounded: WeakBoundedVec<u32, ConstU32<4>> = vec![1, 2, 3].try_into().unwrap();
-		bounded.try_insert(9, 0).unwrap();
-	}
-
-	#[test]
-	fn try_push_works() {
-		let mut bounded: WeakBoundedVec<u32, ConstU32<4>> = vec![1, 2, 3].try_into().unwrap();
-		bounded.try_push(0).unwrap();
-		assert_eq!(*bounded, vec![1, 2, 3, 0]);
-
-		assert!(bounded.try_push(9).is_err());
-	}
-
-	#[test]
-	fn deref_coercion_works() {
-		let bounded: WeakBoundedVec<u32, ConstU32<7>> = vec![1, 2, 3].try_into().unwrap();
-		// these methods come from deref-ed vec.
-		assert_eq!(bounded.len(), 3);
-		assert!(bounded.iter().next().is_some());
-		assert!(!bounded.is_empty());
-	}
-
-	#[test]
-	fn try_mutate_works() {
-		let bounded: WeakBoundedVec<u32, ConstU32<7>> = vec![1, 2, 3, 4, 5, 6].try_into().unwrap();
-		let bounded = bounded.try_mutate(|v| v.push(7)).unwrap();
-		assert_eq!(bounded.len(), 7);
-		assert!(bounded.try_mutate(|v| v.push(8)).is_none());
-	}
-
-	#[test]
-	fn slice_indexing_works() {
-		let bounded: WeakBoundedVec<u32, ConstU32<7>> = vec![1, 2, 3, 4, 5, 6].try_into().unwrap();
-		assert_eq!(&bounded[0..=2], &[1, 2, 3]);
-	}
-
-	#[test]
-	fn vec_eq_works() {
-		let bounded: WeakBoundedVec<u32, ConstU32<7>> = vec![1, 2, 3, 4, 5, 6].try_into().unwrap();
-		assert_eq!(bounded, vec![1, 2, 3, 4, 5, 6]);
-	}
-
-	#[test]
-	fn too_big_succeed_to_decode() {
-		let v: Vec<u32> = vec![1, 2, 3, 4, 5];
-		let w = WeakBoundedVec::<u32, ConstU32<4>>::decode(&mut &v.encode()[..]).unwrap();
-		assert_eq!(v, *w);
-	}
 }
diff --git a/substrate/frame/support/src/traits/misc.rs b/substrate/frame/support/src/traits/misc.rs
index ced6df8f971..1f0ba1e769c 100644
--- a/substrate/frame/support/src/traits/misc.rs
+++ b/substrate/frame/support/src/traits/misc.rs
@@ -24,7 +24,7 @@ use sp_arithmetic::traits::{CheckedAdd, CheckedMul, CheckedSub, Saturating};
 #[doc(hidden)]
 pub use sp_runtime::traits::{
 	ConstBool, ConstI128, ConstI16, ConstI32, ConstI64, ConstI8, ConstU128, ConstU16, ConstU32,
-	ConstU64, ConstU8, Get, GetDefault, TypedGet,
+	ConstU64, ConstU8, Get, GetDefault, TryCollect, TypedGet,
 };
 use sp_runtime::{traits::Block as BlockT, DispatchError};
 use sp_std::{cmp::Ordering, prelude::*};
@@ -367,16 +367,6 @@ impl<T: Saturating + CheckedAdd + CheckedMul + CheckedSub> DefensiveSaturating f
 	}
 }
 
-/// Try and collect into a collection `C`.
-pub trait TryCollect<C> {
-	type Error;
-	/// Consume self and try to collect the results into `C`.
-	///
-	/// This is useful in preventing the undesirable `.collect().try_into()` call chain on
-	/// collections that need to be converted into a bounded type (e.g. `BoundedVec`).
-	fn try_collect(self) -> Result<C, Self::Error>;
-}
-
 /// Anything that can have a `::len()` method.
 pub trait Len {
 	/// Return the length of data type.
diff --git a/substrate/primitives/runtime/src/bounded.rs b/substrate/primitives/runtime/src/bounded.rs
new file mode 100644
index 00000000000..45b4a9ca623
--- /dev/null
+++ b/substrate/primitives/runtime/src/bounded.rs
@@ -0,0 +1,28 @@
+// This file is part of Substrate.
+
+// Copyright (C) 2022 Parity Technologies (UK) Ltd.
+// SPDX-License-Identifier: Apache-2.0
+
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// 	http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//! Bounded collection types.
+
+pub mod bounded_btree_map;
+pub mod bounded_btree_set;
+pub mod bounded_vec;
+pub mod weak_bounded_vec;
+
+pub use bounded_btree_map::BoundedBTreeMap;
+pub use bounded_btree_set::BoundedBTreeSet;
+pub use bounded_vec::{BoundedSlice, BoundedVec};
+pub use weak_bounded_vec::WeakBoundedVec;
diff --git a/substrate/primitives/runtime/src/bounded/bounded_btree_map.rs b/substrate/primitives/runtime/src/bounded/bounded_btree_map.rs
new file mode 100644
index 00000000000..f4fd4275beb
--- /dev/null
+++ b/substrate/primitives/runtime/src/bounded/bounded_btree_map.rs
@@ -0,0 +1,517 @@
+// This file is part of Substrate.
+
+// Copyright (C) 2017-2022 Parity Technologies (UK) Ltd.
+// SPDX-License-Identifier: Apache-2.0
+
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// 	http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//! Traits, types and structs to support a bounded BTreeMap.
+
+use crate::traits::{Get, TryCollect};
+use codec::{Decode, Encode, MaxEncodedLen};
+use sp_std::{borrow::Borrow, collections::btree_map::BTreeMap, marker::PhantomData, ops::Deref};
+
+/// A bounded map based on a B-Tree.
+///
+/// B-Trees represent a fundamental compromise between cache-efficiency and actually minimizing
+/// the amount of work performed in a search. See [`BTreeMap`] for more details.
+///
+/// Unlike a standard `BTreeMap`, there is an enforced upper limit to the number of items in the
+/// map. All internal operations ensure this bound is respected.
+#[derive(Encode, scale_info::TypeInfo)]
+#[scale_info(skip_type_params(S))]
+pub struct BoundedBTreeMap<K, V, S>(BTreeMap<K, V>, PhantomData<S>);
+
+impl<K, V, S> Decode for BoundedBTreeMap<K, V, S>
+where
+	K: Decode + Ord,
+	V: Decode,
+	S: Get<u32>,
+{
+	fn decode<I: codec::Input>(input: &mut I) -> Result<Self, codec::Error> {
+		let inner = BTreeMap::<K, V>::decode(input)?;
+		if inner.len() > S::get() as usize {
+			return Err("BoundedBTreeMap exceeds its limit".into())
+		}
+		Ok(Self(inner, PhantomData))
+	}
+
+	fn skip<I: codec::Input>(input: &mut I) -> Result<(), codec::Error> {
+		BTreeMap::<K, V>::skip(input)
+	}
+}
+
+impl<K, V, S> BoundedBTreeMap<K, V, S>
+where
+	S: Get<u32>,
+{
+	/// Get the bound of the type in `usize`.
+	pub fn bound() -> usize {
+		S::get() as usize
+	}
+}
+
+impl<K, V, S> BoundedBTreeMap<K, V, S>
+where
+	K: Ord,
+	S: Get<u32>,
+{
+	/// Exactly the same semantics as `BTreeMap::retain`.
+	///
+	/// The is a safe `&mut self` borrow because `retain` can only ever decrease the length of the
+	/// inner map.
+	pub fn retain<F: FnMut(&K, &mut V) -> bool>(&mut self, f: F) {
+		self.0.retain(f)
+	}
+
+	/// Create a new `BoundedBTreeMap`.
+	///
+	/// Does not allocate.
+	pub fn new() -> Self {
+		BoundedBTreeMap(BTreeMap::new(), PhantomData)
+	}
+
+	/// Consume self, and return the inner `BTreeMap`.
+	///
+	/// This is useful when a mutating API of the inner type is desired, and closure-based mutation
+	/// such as provided by [`try_mutate`][Self::try_mutate] is inconvenient.
+	pub fn into_inner(self) -> BTreeMap<K, V> {
+		debug_assert!(self.0.len() <= Self::bound());
+		self.0
+	}
+
+	/// Consumes self and mutates self via the given `mutate` function.
+	///
+	/// If the outcome of mutation is within bounds, `Some(Self)` is returned. Else, `None` is
+	/// returned.
+	///
+	/// This is essentially a *consuming* shorthand [`Self::into_inner`] -> `...` ->
+	/// [`Self::try_from`].
+	pub fn try_mutate(mut self, mut mutate: impl FnMut(&mut BTreeMap<K, V>)) -> Option<Self> {
+		mutate(&mut self.0);
+		(self.0.len() <= Self::bound()).then(move || self)
+	}
+
+	/// Clears the map, removing all elements.
+	pub fn clear(&mut self) {
+		self.0.clear()
+	}
+
+	/// Return a mutable reference to the value corresponding to the key.
+	///
+	/// The key may be any borrowed form of the map's key type, but the ordering on the borrowed
+	/// form _must_ match the ordering on the key type.
+	pub fn get_mut<Q>(&mut self, key: &Q) -> Option<&mut V>
+	where
+		K: Borrow<Q>,
+		Q: Ord + ?Sized,
+	{
+		self.0.get_mut(key)
+	}
+
+	/// Exactly the same semantics as [`BTreeMap::insert`], but returns an `Err` (and is a noop) if
+	/// the new length of the map exceeds `S`.
+	///
+	/// In the `Err` case, returns the inserted pair so it can be further used without cloning.
+	pub fn try_insert(&mut self, key: K, value: V) -> Result<Option<V>, (K, V)> {
+		if self.len() < Self::bound() || self.0.contains_key(&key) {
+			Ok(self.0.insert(key, value))
+		} else {
+			Err((key, value))
+		}
+	}
+
+	/// Remove a key from the map, returning the value at the key if the key was previously in the
+	/// map.
+	///
+	/// The key may be any borrowed form of the map's key type, but the ordering on the borrowed
+	/// form _must_ match the ordering on the key type.
+	pub fn remove<Q>(&mut self, key: &Q) -> Option<V>
+	where
+		K: Borrow<Q>,
+		Q: Ord + ?Sized,
+	{
+		self.0.remove(key)
+	}
+
+	/// Remove a key from the map, returning the value at the key if the key was previously in the
+	/// map.
+	///
+	/// The key may be any borrowed form of the map's key type, but the ordering on the borrowed
+	/// form _must_ match the ordering on the key type.
+	pub fn remove_entry<Q>(&mut self, key: &Q) -> Option<(K, V)>
+	where
+		K: Borrow<Q>,
+		Q: Ord + ?Sized,
+	{
+		self.0.remove_entry(key)
+	}
+}
+
+impl<K, V, S> Default for BoundedBTreeMap<K, V, S>
+where
+	K: Ord,
+	S: Get<u32>,
+{
+	fn default() -> Self {
+		Self::new()
+	}
+}
+
+impl<K, V, S> Clone for BoundedBTreeMap<K, V, S>
+where
+	BTreeMap<K, V>: Clone,
+{
+	fn clone(&self) -> Self {
+		BoundedBTreeMap(self.0.clone(), PhantomData)
+	}
+}
+
+#[cfg(feature = "std")]
+impl<K, V, S> std::fmt::Debug for BoundedBTreeMap<K, V, S>
+where
+	BTreeMap<K, V>: std::fmt::Debug,
+	S: Get<u32>,
+{
+	fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+		f.debug_tuple("BoundedBTreeMap").field(&self.0).field(&Self::bound()).finish()
+	}
+}
+
+impl<K, V, S1, S2> PartialEq<BoundedBTreeMap<K, V, S1>> for BoundedBTreeMap<K, V, S2>
+where
+	BTreeMap<K, V>: PartialEq,
+	S1: Get<u32>,
+	S2: Get<u32>,
+{
+	fn eq(&self, other: &BoundedBTreeMap<K, V, S1>) -> bool {
+		S1::get() == S2::get() && self.0 == other.0
+	}
+}
+
+impl<K, V, S> Eq for BoundedBTreeMap<K, V, S>
+where
+	BTreeMap<K, V>: Eq,
+	S: Get<u32>,
+{
+}
+
+impl<K, V, S> PartialEq<BTreeMap<K, V>> for BoundedBTreeMap<K, V, S>
+where
+	BTreeMap<K, V>: PartialEq,
+{
+	fn eq(&self, other: &BTreeMap<K, V>) -> bool {
+		self.0 == *other
+	}
+}
+
+impl<K, V, S> PartialOrd for BoundedBTreeMap<K, V, S>
+where
+	BTreeMap<K, V>: PartialOrd,
+	S: Get<u32>,
+{
+	fn partial_cmp(&self, other: &Self) -> Option<sp_std::cmp::Ordering> {
+		self.0.partial_cmp(&other.0)
+	}
+}
+
+impl<K, V, S> Ord for BoundedBTreeMap<K, V, S>
+where
+	BTreeMap<K, V>: Ord,
+	S: Get<u32>,
+{
+	fn cmp(&self, other: &Self) -> sp_std::cmp::Ordering {
+		self.0.cmp(&other.0)
+	}
+}
+
+impl<K, V, S> IntoIterator for BoundedBTreeMap<K, V, S> {
+	type Item = (K, V);
+	type IntoIter = sp_std::collections::btree_map::IntoIter<K, V>;
+
+	fn into_iter(self) -> Self::IntoIter {
+		self.0.into_iter()
+	}
+}
+
+impl<'a, K, V, S> IntoIterator for &'a BoundedBTreeMap<K, V, S> {
+	type Item = (&'a K, &'a V);
+	type IntoIter = sp_std::collections::btree_map::Iter<'a, K, V>;
+
+	fn into_iter(self) -> Self::IntoIter {
+		self.0.iter()
+	}
+}
+
+impl<'a, K, V, S> IntoIterator for &'a mut BoundedBTreeMap<K, V, S> {
+	type Item = (&'a K, &'a mut V);
+	type IntoIter = sp_std::collections::btree_map::IterMut<'a, K, V>;
+
+	fn into_iter(self) -> Self::IntoIter {
+		self.0.iter_mut()
+	}
+}
+
+impl<K, V, S> MaxEncodedLen for BoundedBTreeMap<K, V, S>
+where
+	K: MaxEncodedLen,
+	V: MaxEncodedLen,
+	S: Get<u32>,
+{
+	fn max_encoded_len() -> usize {
+		Self::bound()
+			.saturating_mul(K::max_encoded_len().saturating_add(V::max_encoded_len()))
+			.saturating_add(codec::Compact(S::get()).encoded_size())
+	}
+}
+
+impl<K, V, S> Deref for BoundedBTreeMap<K, V, S>
+where
+	K: Ord,
+{
+	type Target = BTreeMap<K, V>;
+
+	fn deref(&self) -> &Self::Target {
+		&self.0
+	}
+}
+
+impl<K, V, S> AsRef<BTreeMap<K, V>> for BoundedBTreeMap<K, V, S>
+where
+	K: Ord,
+{
+	fn as_ref(&self) -> &BTreeMap<K, V> {
+		&self.0
+	}
+}
+
+impl<K, V, S> From<BoundedBTreeMap<K, V, S>> for BTreeMap<K, V>
+where
+	K: Ord,
+{
+	fn from(map: BoundedBTreeMap<K, V, S>) -> Self {
+		map.0
+	}
+}
+
+impl<K, V, S> TryFrom<BTreeMap<K, V>> for BoundedBTreeMap<K, V, S>
+where
+	K: Ord,
+	S: Get<u32>,
+{
+	type Error = ();
+
+	fn try_from(value: BTreeMap<K, V>) -> Result<Self, Self::Error> {
+		(value.len() <= Self::bound())
+			.then(move || BoundedBTreeMap(value, PhantomData))
+			.ok_or(())
+	}
+}
+
+impl<K, V, S> codec::DecodeLength for BoundedBTreeMap<K, V, S> {
+	fn len(self_encoded: &[u8]) -> Result<usize, codec::Error> {
+		// `BoundedBTreeMap<K, V, S>` is stored just a `BTreeMap<K, V>`, which is stored as a
+		// `Compact<u32>` with its length followed by an iteration of its items. We can just use
+		// the underlying implementation.
+		<BTreeMap<K, V> as codec::DecodeLength>::len(self_encoded)
+	}
+}
+
+impl<K, V, S> codec::EncodeLike<BTreeMap<K, V>> for BoundedBTreeMap<K, V, S> where
+	BTreeMap<K, V>: Encode
+{
+}
+
+impl<I, K, V, Bound> TryCollect<BoundedBTreeMap<K, V, Bound>> for I
+where
+	K: Ord,
+	I: ExactSizeIterator + Iterator<Item = (K, V)>,
+	Bound: Get<u32>,
+{
+	type Error = &'static str;
+
+	fn try_collect(self) -> Result<BoundedBTreeMap<K, V, Bound>, Self::Error> {
+		if self.len() > Bound::get() as usize {
+			Err("iterator length too big")
+		} else {
+			Ok(BoundedBTreeMap::<K, V, Bound>::try_from(self.collect::<BTreeMap<K, V>>())
+				.expect("length checked above; qed"))
+		}
+	}
+}
+
+#[cfg(test)]
+pub mod test {
+	use super::*;
+	use crate::traits::ConstU32;
+
+	fn map_from_keys<K>(keys: &[K]) -> BTreeMap<K, ()>
+	where
+		K: Ord + Copy,
+	{
+		keys.iter().copied().zip(std::iter::repeat(())).collect()
+	}
+
+	fn boundedmap_from_keys<K, S>(keys: &[K]) -> BoundedBTreeMap<K, (), S>
+	where
+		K: Ord + Copy,
+		S: Get<u32>,
+	{
+		map_from_keys(keys).try_into().unwrap()
+	}
+
+	#[test]
+	fn try_insert_works() {
+		let mut bounded = boundedmap_from_keys::<u32, ConstU32<4>>(&[1, 2, 3]);
+		bounded.try_insert(0, ()).unwrap();
+		assert_eq!(*bounded, map_from_keys(&[1, 0, 2, 3]));
+
+		assert!(bounded.try_insert(9, ()).is_err());
+		assert_eq!(*bounded, map_from_keys(&[1, 0, 2, 3]));
+	}
+
+	#[test]
+	fn deref_coercion_works() {
+		let bounded = boundedmap_from_keys::<u32, ConstU32<7>>(&[1, 2, 3]);
+		// these methods come from deref-ed vec.
+		assert_eq!(bounded.len(), 3);
+		assert!(bounded.iter().next().is_some());
+		assert!(!bounded.is_empty());
+	}
+
+	#[test]
+	fn try_mutate_works() {
+		let bounded = boundedmap_from_keys::<u32, ConstU32<7>>(&[1, 2, 3, 4, 5, 6]);
+		let bounded = bounded
+			.try_mutate(|v| {
+				v.insert(7, ());
+			})
+			.unwrap();
+		assert_eq!(bounded.len(), 7);
+		assert!(bounded
+			.try_mutate(|v| {
+				v.insert(8, ());
+			})
+			.is_none());
+	}
+
+	#[test]
+	fn btree_map_eq_works() {
+		let bounded = boundedmap_from_keys::<u32, ConstU32<7>>(&[1, 2, 3, 4, 5, 6]);
+		assert_eq!(bounded, map_from_keys(&[1, 2, 3, 4, 5, 6]));
+	}
+
+	#[test]
+	fn too_big_fail_to_decode() {
+		let v: Vec<(u32, u32)> = vec![(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)];
+		assert_eq!(
+			BoundedBTreeMap::<u32, u32, ConstU32<4>>::decode(&mut &v.encode()[..]),
+			Err("BoundedBTreeMap exceeds its limit".into()),
+		);
+	}
+
+	#[test]
+	fn unequal_eq_impl_insert_works() {
+		// given a struct with a strange notion of equality
+		#[derive(Debug)]
+		struct Unequal(u32, bool);
+
+		impl PartialEq for Unequal {
+			fn eq(&self, other: &Self) -> bool {
+				self.0 == other.0
+			}
+		}
+		impl Eq for Unequal {}
+
+		impl Ord for Unequal {
+			fn cmp(&self, other: &Self) -> std::cmp::Ordering {
+				self.0.cmp(&other.0)
+			}
+		}
+
+		impl PartialOrd for Unequal {
+			fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
+				Some(self.cmp(other))
+			}
+		}
+
+		let mut map = BoundedBTreeMap::<Unequal, u32, ConstU32<4>>::new();
+
+		// when the set is full
+
+		for i in 0..4 {
+			map.try_insert(Unequal(i, false), i).unwrap();
+		}
+
+		// can't insert a new distinct member
+		map.try_insert(Unequal(5, false), 5).unwrap_err();
+
+		// but _can_ insert a distinct member which compares equal, though per the documentation,
+		// neither the set length nor the actual member are changed, but the value is
+		map.try_insert(Unequal(0, true), 6).unwrap();
+		assert_eq!(map.len(), 4);
+		let (zero_key, zero_value) = map.get_key_value(&Unequal(0, true)).unwrap();
+		assert_eq!(zero_key.0, 0);
+		assert_eq!(zero_key.1, false);
+		assert_eq!(*zero_value, 6);
+	}
+
+	#[test]
+	fn eq_works() {
+		// of same type
+		let b1 = boundedmap_from_keys::<u32, ConstU32<7>>(&[1, 2]);
+		let b2 = boundedmap_from_keys::<u32, ConstU32<7>>(&[1, 2]);
+		assert_eq!(b1, b2);
+
+		// of different type, but same value and bound.
+		crate::parameter_types! {
+			B1: u32 = 7;
+			B2: u32 = 7;
+		}
+		let b1 = boundedmap_from_keys::<u32, B1>(&[1, 2]);
+		let b2 = boundedmap_from_keys::<u32, B2>(&[1, 2]);
+		assert_eq!(b1, b2);
+	}
+
+	#[test]
+	fn can_be_collected() {
+		let b1 = boundedmap_from_keys::<u32, ConstU32<5>>(&[1, 2, 3, 4]);
+		let b2: BoundedBTreeMap<u32, (), ConstU32<5>> =
+			b1.iter().map(|(k, v)| (k + 1, *v)).try_collect().unwrap();
+		assert_eq!(b2.into_iter().map(|(k, _)| k).collect::<Vec<_>>(), vec![2, 3, 4, 5]);
+
+		// can also be collected into a collection of length 4.
+		let b2: BoundedBTreeMap<u32, (), ConstU32<4>> =
+			b1.iter().map(|(k, v)| (k + 1, *v)).try_collect().unwrap();
+		assert_eq!(b2.into_iter().map(|(k, _)| k).collect::<Vec<_>>(), vec![2, 3, 4, 5]);
+
+		// can be mutated further into iterators that are `ExactSizedIterator`.
+		let b2: BoundedBTreeMap<u32, (), ConstU32<5>> =
+			b1.iter().map(|(k, v)| (k + 1, *v)).rev().skip(2).try_collect().unwrap();
+		// note that the binary tree will re-sort this, so rev() is not really seen
+		assert_eq!(b2.into_iter().map(|(k, _)| k).collect::<Vec<_>>(), vec![2, 3]);
+
+		let b2: BoundedBTreeMap<u32, (), ConstU32<5>> =
+			b1.iter().map(|(k, v)| (k + 1, *v)).take(2).try_collect().unwrap();
+		assert_eq!(b2.into_iter().map(|(k, _)| k).collect::<Vec<_>>(), vec![2, 3]);
+
+		// but these worn't work
+		let b2: Result<BoundedBTreeMap<u32, (), ConstU32<3>>, _> =
+			b1.iter().map(|(k, v)| (k + 1, *v)).try_collect();
+		assert!(b2.is_err());
+
+		let b2: Result<BoundedBTreeMap<u32, (), ConstU32<1>>, _> =
+			b1.iter().map(|(k, v)| (k + 1, *v)).skip(2).try_collect();
+		assert!(b2.is_err());
+	}
+}
diff --git a/substrate/primitives/runtime/src/bounded/bounded_btree_set.rs b/substrate/primitives/runtime/src/bounded/bounded_btree_set.rs
new file mode 100644
index 00000000000..40b95165da1
--- /dev/null
+++ b/substrate/primitives/runtime/src/bounded/bounded_btree_set.rs
@@ -0,0 +1,479 @@
+// This file is part of Substrate.
+
+// Copyright (C) 2022 Parity Technologies (UK) Ltd.
+// SPDX-License-Identifier: Apache-2.0
+
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// 	http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//! Traits, types and structs to support a bounded `BTreeSet`.
+
+use crate::traits::{Get, TryCollect};
+use codec::{Decode, Encode, MaxEncodedLen};
+use sp_std::{borrow::Borrow, collections::btree_set::BTreeSet, marker::PhantomData, ops::Deref};
+
+/// A bounded set based on a B-Tree.
+///
+/// B-Trees represent a fundamental compromise between cache-efficiency and actually minimizing
+/// the amount of work performed in a search. See [`BTreeSet`] for more details.
+///
+/// Unlike a standard `BTreeSet`, there is an enforced upper limit to the number of items in the
+/// set. All internal operations ensure this bound is respected.
+#[derive(Encode, scale_info::TypeInfo)]
+#[scale_info(skip_type_params(S))]
+pub struct BoundedBTreeSet<T, S>(BTreeSet<T>, PhantomData<S>);
+
+impl<T, S> Decode for BoundedBTreeSet<T, S>
+where
+	T: Decode + Ord,
+	S: Get<u32>,
+{
+	fn decode<I: codec::Input>(input: &mut I) -> Result<Self, codec::Error> {
+		let inner = BTreeSet::<T>::decode(input)?;
+		if inner.len() > S::get() as usize {
+			return Err("BoundedBTreeSet exceeds its limit".into())
+		}
+		Ok(Self(inner, PhantomData))
+	}
+
+	fn skip<I: codec::Input>(input: &mut I) -> Result<(), codec::Error> {
+		BTreeSet::<T>::skip(input)
+	}
+}
+
+impl<T, S> BoundedBTreeSet<T, S>
+where
+	S: Get<u32>,
+{
+	/// Get the bound of the type in `usize`.
+	pub fn bound() -> usize {
+		S::get() as usize
+	}
+}
+
+impl<T, S> BoundedBTreeSet<T, S>
+where
+	T: Ord,
+	S: Get<u32>,
+{
+	/// Create a new `BoundedBTreeSet`.
+	///
+	/// Does not allocate.
+	pub fn new() -> Self {
+		BoundedBTreeSet(BTreeSet::new(), PhantomData)
+	}
+
+	/// Consume self, and return the inner `BTreeSet`.
+	///
+	/// This is useful when a mutating API of the inner type is desired, and closure-based mutation
+	/// such as provided by [`try_mutate`][Self::try_mutate] is inconvenient.
+	pub fn into_inner(self) -> BTreeSet<T> {
+		debug_assert!(self.0.len() <= Self::bound());
+		self.0
+	}
+
+	/// Consumes self and mutates self via the given `mutate` function.
+	///
+	/// If the outcome of mutation is within bounds, `Some(Self)` is returned. Else, `None` is
+	/// returned.
+	///
+	/// This is essentially a *consuming* shorthand [`Self::into_inner`] -> `...` ->
+	/// [`Self::try_from`].
+	pub fn try_mutate(mut self, mut mutate: impl FnMut(&mut BTreeSet<T>)) -> Option<Self> {
+		mutate(&mut self.0);
+		(self.0.len() <= Self::bound()).then(move || self)
+	}
+
+	/// Clears the set, removing all elements.
+	pub fn clear(&mut self) {
+		self.0.clear()
+	}
+
+	/// Exactly the same semantics as [`BTreeSet::insert`], but returns an `Err` (and is a noop) if
+	/// the new length of the set exceeds `S`.
+	///
+	/// In the `Err` case, returns the inserted item so it can be further used without cloning.
+	pub fn try_insert(&mut self, item: T) -> Result<bool, T> {
+		if self.len() < Self::bound() || self.0.contains(&item) {
+			Ok(self.0.insert(item))
+		} else {
+			Err(item)
+		}
+	}
+
+	/// Remove an item from the set, returning whether it was previously in the set.
+	///
+	/// The item may be any borrowed form of the set's item type, but the ordering on the borrowed
+	/// form _must_ match the ordering on the item type.
+	pub fn remove<Q>(&mut self, item: &Q) -> bool
+	where
+		T: Borrow<Q>,
+		Q: Ord + ?Sized,
+	{
+		self.0.remove(item)
+	}
+
+	/// Removes and returns the value in the set, if any, that is equal to the given one.
+	///
+	/// The value may be any borrowed form of the set's value type, but the ordering on the borrowed
+	/// form _must_ match the ordering on the value type.
+	pub fn take<Q>(&mut self, value: &Q) -> Option<T>
+	where
+		T: Borrow<Q> + Ord,
+		Q: Ord + ?Sized,
+	{
+		self.0.take(value)
+	}
+}
+
+impl<T, S> Default for BoundedBTreeSet<T, S>
+where
+	T: Ord,
+	S: Get<u32>,
+{
+	fn default() -> Self {
+		Self::new()
+	}
+}
+
+impl<T, S> Clone for BoundedBTreeSet<T, S>
+where
+	BTreeSet<T>: Clone,
+{
+	fn clone(&self) -> Self {
+		BoundedBTreeSet(self.0.clone(), PhantomData)
+	}
+}
+
+#[cfg(feature = "std")]
+impl<T, S> std::fmt::Debug for BoundedBTreeSet<T, S>
+where
+	BTreeSet<T>: std::fmt::Debug,
+	S: Get<u32>,
+{
+	fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+		f.debug_tuple("BoundedBTreeSet").field(&self.0).field(&Self::bound()).finish()
+	}
+}
+
+impl<T, S1, S2> PartialEq<BoundedBTreeSet<T, S1>> for BoundedBTreeSet<T, S2>
+where
+	BTreeSet<T>: PartialEq,
+	S1: Get<u32>,
+	S2: Get<u32>,
+{
+	fn eq(&self, other: &BoundedBTreeSet<T, S1>) -> bool {
+		S1::get() == S2::get() && self.0 == other.0
+	}
+}
+
+impl<T, S> Eq for BoundedBTreeSet<T, S>
+where
+	BTreeSet<T>: Eq,
+	S: Get<u32>,
+{
+}
+
+impl<T, S> PartialEq<BTreeSet<T>> for BoundedBTreeSet<T, S>
+where
+	BTreeSet<T>: PartialEq,
+	S: Get<u32>,
+{
+	fn eq(&self, other: &BTreeSet<T>) -> bool {
+		self.0 == *other
+	}
+}
+
+impl<T, S> PartialOrd for BoundedBTreeSet<T, S>
+where
+	BTreeSet<T>: PartialOrd,
+	S: Get<u32>,
+{
+	fn partial_cmp(&self, other: &Self) -> Option<sp_std::cmp::Ordering> {
+		self.0.partial_cmp(&other.0)
+	}
+}
+
+impl<T, S> Ord for BoundedBTreeSet<T, S>
+where
+	BTreeSet<T>: Ord,
+	S: Get<u32>,
+{
+	fn cmp(&self, other: &Self) -> sp_std::cmp::Ordering {
+		self.0.cmp(&other.0)
+	}
+}
+
+impl<T, S> IntoIterator for BoundedBTreeSet<T, S> {
+	type Item = T;
+	type IntoIter = sp_std::collections::btree_set::IntoIter<T>;
+
+	fn into_iter(self) -> Self::IntoIter {
+		self.0.into_iter()
+	}
+}
+
+impl<'a, T, S> IntoIterator for &'a BoundedBTreeSet<T, S> {
+	type Item = &'a T;
+	type IntoIter = sp_std::collections::btree_set::Iter<'a, T>;
+
+	fn into_iter(self) -> Self::IntoIter {
+		self.0.iter()
+	}
+}
+
+impl<T, S> MaxEncodedLen for BoundedBTreeSet<T, S>
+where
+	T: MaxEncodedLen,
+	S: Get<u32>,
+{
+	fn max_encoded_len() -> usize {
+		Self::bound()
+			.saturating_mul(T::max_encoded_len())
+			.saturating_add(codec::Compact(S::get()).encoded_size())
+	}
+}
+
+impl<T, S> Deref for BoundedBTreeSet<T, S>
+where
+	T: Ord,
+{
+	type Target = BTreeSet<T>;
+
+	fn deref(&self) -> &Self::Target {
+		&self.0
+	}
+}
+
+impl<T, S> AsRef<BTreeSet<T>> for BoundedBTreeSet<T, S>
+where
+	T: Ord,
+{
+	fn as_ref(&self) -> &BTreeSet<T> {
+		&self.0
+	}
+}
+
+impl<T, S> From<BoundedBTreeSet<T, S>> for BTreeSet<T>
+where
+	T: Ord,
+{
+	fn from(set: BoundedBTreeSet<T, S>) -> Self {
+		set.0
+	}
+}
+
+impl<T, S> TryFrom<BTreeSet<T>> for BoundedBTreeSet<T, S>
+where
+	T: Ord,
+	S: Get<u32>,
+{
+	type Error = ();
+
+	fn try_from(value: BTreeSet<T>) -> Result<Self, Self::Error> {
+		(value.len() <= Self::bound())
+			.then(move || BoundedBTreeSet(value, PhantomData))
+			.ok_or(())
+	}
+}
+
+impl<T, S> codec::DecodeLength for BoundedBTreeSet<T, S> {
+	fn len(self_encoded: &[u8]) -> Result<usize, codec::Error> {
+		// `BoundedBTreeSet<T, S>` is stored just a `BTreeSet<T>`, which is stored as a
+		// `Compact<u32>` with its length followed by an iteration of its items. We can just use
+		// the underlying implementation.
+		<BTreeSet<T> as codec::DecodeLength>::len(self_encoded)
+	}
+}
+
+impl<T, S> codec::EncodeLike<BTreeSet<T>> for BoundedBTreeSet<T, S> where BTreeSet<T>: Encode {}
+
+impl<I, T, Bound> TryCollect<BoundedBTreeSet<T, Bound>> for I
+where
+	T: Ord,
+	I: ExactSizeIterator + Iterator<Item = T>,
+	Bound: Get<u32>,
+{
+	type Error = &'static str;
+
+	fn try_collect(self) -> Result<BoundedBTreeSet<T, Bound>, Self::Error> {
+		if self.len() > Bound::get() as usize {
+			Err("iterator length too big")
+		} else {
+			Ok(BoundedBTreeSet::<T, Bound>::try_from(self.collect::<BTreeSet<T>>())
+				.expect("length is checked above; qed"))
+		}
+	}
+}
+
+#[cfg(test)]
+pub mod test {
+	use super::*;
+	use crate::traits::ConstU32;
+
+	fn set_from_keys<T>(keys: &[T]) -> BTreeSet<T>
+	where
+		T: Ord + Copy,
+	{
+		keys.iter().copied().collect()
+	}
+
+	fn boundedset_from_keys<T, S>(keys: &[T]) -> BoundedBTreeSet<T, S>
+	where
+		T: Ord + Copy,
+		S: Get<u32>,
+	{
+		set_from_keys(keys).try_into().unwrap()
+	}
+
+	#[test]
+	fn try_insert_works() {
+		let mut bounded = boundedset_from_keys::<u32, ConstU32<4>>(&[1, 2, 3]);
+		bounded.try_insert(0).unwrap();
+		assert_eq!(*bounded, set_from_keys(&[1, 0, 2, 3]));
+
+		assert!(bounded.try_insert(9).is_err());
+		assert_eq!(*bounded, set_from_keys(&[1, 0, 2, 3]));
+	}
+
+	#[test]
+	fn deref_coercion_works() {
+		let bounded = boundedset_from_keys::<u32, ConstU32<7>>(&[1, 2, 3]);
+		// these methods come from deref-ed vec.
+		assert_eq!(bounded.len(), 3);
+		assert!(bounded.iter().next().is_some());
+		assert!(!bounded.is_empty());
+	}
+
+	#[test]
+	fn try_mutate_works() {
+		let bounded = boundedset_from_keys::<u32, ConstU32<7>>(&[1, 2, 3, 4, 5, 6]);
+		let bounded = bounded
+			.try_mutate(|v| {
+				v.insert(7);
+			})
+			.unwrap();
+		assert_eq!(bounded.len(), 7);
+		assert!(bounded
+			.try_mutate(|v| {
+				v.insert(8);
+			})
+			.is_none());
+	}
+
+	#[test]
+	fn btree_map_eq_works() {
+		let bounded = boundedset_from_keys::<u32, ConstU32<7>>(&[1, 2, 3, 4, 5, 6]);
+		assert_eq!(bounded, set_from_keys(&[1, 2, 3, 4, 5, 6]));
+	}
+
+	#[test]
+	fn too_big_fail_to_decode() {
+		let v: Vec<u32> = vec![1, 2, 3, 4, 5];
+		assert_eq!(
+			BoundedBTreeSet::<u32, ConstU32<4>>::decode(&mut &v.encode()[..]),
+			Err("BoundedBTreeSet exceeds its limit".into()),
+		);
+	}
+
+	#[test]
+	fn unequal_eq_impl_insert_works() {
+		// given a struct with a strange notion of equality
+		#[derive(Debug)]
+		struct Unequal(u32, bool);
+
+		impl PartialEq for Unequal {
+			fn eq(&self, other: &Self) -> bool {
+				self.0 == other.0
+			}
+		}
+		impl Eq for Unequal {}
+
+		impl Ord for Unequal {
+			fn cmp(&self, other: &Self) -> std::cmp::Ordering {
+				self.0.cmp(&other.0)
+			}
+		}
+
+		impl PartialOrd for Unequal {
+			fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
+				Some(self.cmp(other))
+			}
+		}
+
+		let mut set = BoundedBTreeSet::<Unequal, ConstU32<4>>::new();
+
+		// when the set is full
+
+		for i in 0..4 {
+			set.try_insert(Unequal(i, false)).unwrap();
+		}
+
+		// can't insert a new distinct member
+		set.try_insert(Unequal(5, false)).unwrap_err();
+
+		// but _can_ insert a distinct member which compares equal, though per the documentation,
+		// neither the set length nor the actual member are changed
+		set.try_insert(Unequal(0, true)).unwrap();
+		assert_eq!(set.len(), 4);
+		let zero_item = set.get(&Unequal(0, true)).unwrap();
+		assert_eq!(zero_item.0, 0);
+		assert_eq!(zero_item.1, false);
+	}
+
+	#[test]
+	fn eq_works() {
+		// of same type
+		let b1 = boundedset_from_keys::<u32, ConstU32<7>>(&[1, 2]);
+		let b2 = boundedset_from_keys::<u32, ConstU32<7>>(&[1, 2]);
+		assert_eq!(b1, b2);
+
+		// of different type, but same value and bound.
+		crate::parameter_types! {
+			B1: u32 = 7;
+			B2: u32 = 7;
+		}
+		let b1 = boundedset_from_keys::<u32, B1>(&[1, 2]);
+		let b2 = boundedset_from_keys::<u32, B2>(&[1, 2]);
+		assert_eq!(b1, b2);
+	}
+
+	#[test]
+	fn can_be_collected() {
+		let b1 = boundedset_from_keys::<u32, ConstU32<5>>(&[1, 2, 3, 4]);
+		let b2: BoundedBTreeSet<u32, ConstU32<5>> = b1.iter().map(|k| k + 1).try_collect().unwrap();
+		assert_eq!(b2.into_iter().collect::<Vec<_>>(), vec![2, 3, 4, 5]);
+
+		// can also be collected into a collection of length 4.
+		let b2: BoundedBTreeSet<u32, ConstU32<4>> = b1.iter().map(|k| k + 1).try_collect().unwrap();
+		assert_eq!(b2.into_iter().collect::<Vec<_>>(), vec![2, 3, 4, 5]);
+
+		// can be mutated further into iterators that are `ExactSizedIterator`.
+		let b2: BoundedBTreeSet<u32, ConstU32<5>> =
+			b1.iter().map(|k| k + 1).rev().skip(2).try_collect().unwrap();
+		// note that the binary tree will re-sort this, so rev() is not really seen
+		assert_eq!(b2.into_iter().collect::<Vec<_>>(), vec![2, 3]);
+
+		let b2: BoundedBTreeSet<u32, ConstU32<5>> =
+			b1.iter().map(|k| k + 1).take(2).try_collect().unwrap();
+		assert_eq!(b2.into_iter().collect::<Vec<_>>(), vec![2, 3]);
+
+		// but these worn't work
+		let b2: Result<BoundedBTreeSet<u32, ConstU32<3>>, _> =
+			b1.iter().map(|k| k + 1).try_collect();
+		assert!(b2.is_err());
+
+		let b2: Result<BoundedBTreeSet<u32, ConstU32<1>>, _> =
+			b1.iter().map(|k| k + 1).skip(2).try_collect();
+		assert!(b2.is_err());
+	}
+}
diff --git a/substrate/primitives/runtime/src/bounded/bounded_vec.rs b/substrate/primitives/runtime/src/bounded/bounded_vec.rs
new file mode 100644
index 00000000000..4493d9f8b01
--- /dev/null
+++ b/substrate/primitives/runtime/src/bounded/bounded_vec.rs
@@ -0,0 +1,998 @@
+// This file is part of Substrate.
+
+// Copyright (C) 2017-2022 Parity Technologies (UK) Ltd.
+// SPDX-License-Identifier: Apache-2.0
+
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// 	http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//! Traits, types and structs to support putting a bounded vector into storage, as a raw value, map
+//! or a double map.
+
+use super::WeakBoundedVec;
+use crate::traits::{Get, TryCollect};
+use codec::{Decode, Encode, EncodeLike, MaxEncodedLen};
+use core::{
+	ops::{Deref, Index, IndexMut, RangeBounds},
+	slice::SliceIndex,
+};
+#[cfg(feature = "std")]
+use serde::{
+	de::{Error, SeqAccess, Visitor},
+	Deserialize, Deserializer, Serialize,
+};
+use sp_std::{marker::PhantomData, prelude::*};
+
+/// A bounded vector.
+///
+/// It has implementations for efficient append and length decoding, as with a normal `Vec<_>`, once
+/// put into storage as a raw value, map or double-map.
+///
+/// As the name suggests, the length of the queue is always bounded. All internal operations ensure
+/// this bound is respected.
+#[cfg_attr(feature = "std", derive(Serialize), serde(transparent))]
+#[derive(Encode, scale_info::TypeInfo)]
+#[scale_info(skip_type_params(S))]
+pub struct BoundedVec<T, S>(
+	Vec<T>,
+	#[cfg_attr(feature = "std", serde(skip_serializing))] PhantomData<S>,
+);
+
+#[cfg(feature = "std")]
+impl<'de, T, S: Get<u32>> Deserialize<'de> for BoundedVec<T, S>
+where
+	T: Deserialize<'de>,
+{
+	fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+	where
+		D: Deserializer<'de>,
+	{
+		struct VecVisitor<T, S: Get<u32>>(PhantomData<(T, S)>);
+
+		impl<'de, T, S: Get<u32>> Visitor<'de> for VecVisitor<T, S>
+		where
+			T: Deserialize<'de>,
+		{
+			type Value = Vec<T>;
+
+			fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
+				formatter.write_str("a sequence")
+			}
+
+			fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
+			where
+				A: SeqAccess<'de>,
+			{
+				let size = seq.size_hint().unwrap_or(0);
+				let max = match usize::try_from(S::get()) {
+					Ok(n) => n,
+					Err(_) => return Err(A::Error::custom("can't convert to usize")),
+				};
+				if size > max {
+					Err(A::Error::custom("out of bounds"))
+				} else {
+					let mut values = Vec::with_capacity(size);
+
+					while let Some(value) = seq.next_element()? {
+						values.push(value);
+						if values.len() > max {
+							return Err(A::Error::custom("out of bounds"))
+						}
+					}
+
+					Ok(values)
+				}
+			}
+		}
+
+		let visitor: VecVisitor<T, S> = VecVisitor(PhantomData);
+		deserializer
+			.deserialize_seq(visitor)
+			.map(|v| BoundedVec::<T, S>::try_from(v).map_err(|_| Error::custom("out of bounds")))?
+	}
+}
+
+/// A bounded slice.
+///
+/// Similar to a `BoundedVec`, but not owned and cannot be decoded.
+#[derive(Encode, scale_info::TypeInfo)]
+#[scale_info(skip_type_params(S))]
+pub struct BoundedSlice<'a, T, S>(&'a [T], PhantomData<S>);
+
+// `BoundedSlice`s encode to something which will always decode into a `BoundedVec`,
+// `WeakBoundedVec`, or a `Vec`.
+impl<'a, T: Encode + Decode, S: Get<u32>> EncodeLike<BoundedVec<T, S>> for BoundedSlice<'a, T, S> {}
+impl<'a, T: Encode + Decode, S: Get<u32>> EncodeLike<WeakBoundedVec<T, S>>
+	for BoundedSlice<'a, T, S>
+{
+}
+impl<'a, T: Encode + Decode, S: Get<u32>> EncodeLike<Vec<T>> for BoundedSlice<'a, T, S> {}
+
+impl<T: PartialOrd, Bound: Get<u32>> PartialOrd for BoundedVec<T, Bound> {
+	fn partial_cmp(&self, other: &Self) -> Option<sp_std::cmp::Ordering> {
+		self.0.partial_cmp(&other.0)
+	}
+}
+
+impl<T: Ord, Bound: Get<u32>> Ord for BoundedVec<T, Bound> {
+	fn cmp(&self, other: &Self) -> sp_std::cmp::Ordering {
+		self.0.cmp(&other.0)
+	}
+}
+
+impl<'a, T, S: Get<u32>> TryFrom<&'a [T]> for BoundedSlice<'a, T, S> {
+	type Error = ();
+	fn try_from(t: &'a [T]) -> Result<Self, Self::Error> {
+		if t.len() <= S::get() as usize {
+			Ok(BoundedSlice(t, PhantomData))
+		} else {
+			Err(())
+		}
+	}
+}
+
+impl<'a, T, S> From<BoundedSlice<'a, T, S>> for &'a [T] {
+	fn from(t: BoundedSlice<'a, T, S>) -> Self {
+		t.0
+	}
+}
+
+impl<'a, T, S> sp_std::iter::IntoIterator for BoundedSlice<'a, T, S> {
+	type Item = &'a T;
+	type IntoIter = sp_std::slice::Iter<'a, T>;
+	fn into_iter(self) -> Self::IntoIter {
+		self.0.iter()
+	}
+}
+
+impl<T: Decode, S: Get<u32>> Decode for BoundedVec<T, S> {
+	fn decode<I: codec::Input>(input: &mut I) -> Result<Self, codec::Error> {
+		let inner = Vec::<T>::decode(input)?;
+		if inner.len() > S::get() as usize {
+			return Err("BoundedVec exceeds its limit".into())
+		}
+		Ok(Self(inner, PhantomData))
+	}
+
+	fn skip<I: codec::Input>(input: &mut I) -> Result<(), codec::Error> {
+		Vec::<T>::skip(input)
+	}
+}
+
+// `BoundedVec`s encode to something which will always decode as a `Vec`.
+impl<T: Encode + Decode, S: Get<u32>> EncodeLike<Vec<T>> for BoundedVec<T, S> {}
+
+impl<T, S> BoundedVec<T, S> {
+	/// Create `Self` from `t` without any checks.
+	fn unchecked_from(t: Vec<T>) -> Self {
+		Self(t, Default::default())
+	}
+
+	/// Consume self, and return the inner `Vec`. Henceforth, the `Vec<_>` can be altered in an
+	/// arbitrary way. At some point, if the reverse conversion is required, `TryFrom<Vec<_>>` can
+	/// be used.
+	///
+	/// This is useful for cases if you need access to an internal API of the inner `Vec<_>` which
+	/// is not provided by the wrapper `BoundedVec`.
+	pub fn into_inner(self) -> Vec<T> {
+		self.0
+	}
+
+	/// Exactly the same semantics as [`slice::sort_by`].
+	///
+	/// This is safe since sorting cannot change the number of elements in the vector.
+	pub fn sort_by<F>(&mut self, compare: F)
+	where
+		F: FnMut(&T, &T) -> sp_std::cmp::Ordering,
+	{
+		self.0.sort_by(compare)
+	}
+
+	/// Exactly the same semantics as [`slice::sort`].
+	///
+	/// This is safe since sorting cannot change the number of elements in the vector.
+	pub fn sort(&mut self)
+	where
+		T: sp_std::cmp::Ord,
+	{
+		self.0.sort()
+	}
+
+	/// Exactly the same semantics as `Vec::remove`.
+	///
+	/// # Panics
+	///
+	/// Panics if `index` is out of bounds.
+	pub fn remove(&mut self, index: usize) -> T {
+		self.0.remove(index)
+	}
+
+	/// Exactly the same semantics as `slice::swap_remove`.
+	///
+	/// # Panics
+	///
+	/// Panics if `index` is out of bounds.
+	pub fn swap_remove(&mut self, index: usize) -> T {
+		self.0.swap_remove(index)
+	}
+
+	/// Exactly the same semantics as `Vec::retain`.
+	pub fn retain<F: FnMut(&T) -> bool>(&mut self, f: F) {
+		self.0.retain(f)
+	}
+
+	/// Exactly the same semantics as `slice::get_mut`.
+	pub fn get_mut<I: SliceIndex<[T]>>(
+		&mut self,
+		index: I,
+	) -> Option<&mut <I as SliceIndex<[T]>>::Output> {
+		self.0.get_mut(index)
+	}
+
+	/// Exactly the same semantics as `Vec::truncate`.
+	///
+	/// This is safe because `truncate` can never increase the length of the internal vector.
+	pub fn truncate(&mut self, s: usize) {
+		self.0.truncate(s);
+	}
+
+	/// Exactly the same semantics as `Vec::pop`.
+	///
+	/// This is safe since popping can only shrink the inner vector.
+	pub fn pop(&mut self) -> Option<T> {
+		self.0.pop()
+	}
+
+	/// Exactly the same semantics as [`slice::iter_mut`].
+	pub fn iter_mut(&mut self) -> core::slice::IterMut<'_, T> {
+		self.0.iter_mut()
+	}
+
+	/// Exactly the same semantics as [`slice::last_mut`].
+	pub fn last_mut(&mut self) -> Option<&mut T> {
+		self.0.last_mut()
+	}
+
+	/// Exact same semantics as [`Vec::drain`].
+	pub fn drain<R>(&mut self, range: R) -> sp_std::vec::Drain<'_, T>
+	where
+		R: RangeBounds<usize>,
+	{
+		self.0.drain(range)
+	}
+}
+
+impl<T, S: Get<u32>> From<BoundedVec<T, S>> for Vec<T> {
+	fn from(x: BoundedVec<T, S>) -> Vec<T> {
+		x.0
+	}
+}
+
+impl<T, S: Get<u32>> BoundedVec<T, S> {
+	/// Pre-allocate `capacity` items in self.
+	///
+	/// If `capacity` is greater than [`Self::bound`], then the minimum of the two is used.
+	pub fn with_bounded_capacity(capacity: usize) -> Self {
+		let capacity = capacity.min(Self::bound());
+		Self(Vec::with_capacity(capacity), Default::default())
+	}
+
+	/// Allocate self with the maximum possible capacity.
+	pub fn with_max_capacity() -> Self {
+		Self::with_bounded_capacity(Self::bound())
+	}
+
+	/// Consume and truncate the vector `v` in order to create a new instance of `Self` from it.
+	pub fn truncate_from(mut v: Vec<T>) -> Self {
+		v.truncate(Self::bound());
+		Self::unchecked_from(v)
+	}
+
+	/// Get the bound of the type in `usize`.
+	pub fn bound() -> usize {
+		S::get() as usize
+	}
+
+	/// Returns true of this collection is full.
+	pub fn is_full(&self) -> bool {
+		self.len() >= Self::bound()
+	}
+
+	/// Forces the insertion of `element` into `self` retaining all items with index at least
+	/// `index`.
+	///
+	/// If `index == 0` and `self.len() == Self::bound()`, then this is a no-op.
+	///
+	/// If `Self::bound() < index` or `self.len() < index`, then this is also a no-op.
+	///
+	/// Returns `Ok(maybe_removed)` if the item was inserted, where `maybe_removed` is
+	/// `Some(removed)` if an item was removed to make room for the new one. Returns `Err(())` if
+	/// `element` cannot be inserted.
+	pub fn force_insert_keep_right(
+		&mut self,
+		index: usize,
+		mut element: T,
+	) -> Result<Option<T>, ()> {
+		// Check against panics.
+		if Self::bound() < index || self.len() < index {
+			Err(())
+		} else if self.len() < Self::bound() {
+			// Cannot panic since self.len() >= index;
+			self.0.insert(index, element);
+			Ok(None)
+		} else {
+			if index == 0 {
+				return Err(())
+			}
+			sp_std::mem::swap(&mut self[0], &mut element);
+			// `[0..index] cannot panic since self.len() >= index.
+			// `rotate_left(1)` cannot panic because there is at least 1 element.
+			self[0..index].rotate_left(1);
+			Ok(Some(element))
+		}
+	}
+
+	/// Forces the insertion of `element` into `self` retaining all items with index at most
+	/// `index`.
+	///
+	/// If `index == Self::bound()` and `self.len() == Self::bound()`, then this is a no-op.
+	///
+	/// If `Self::bound() < index` or `self.len() < index`, then this is also a no-op.
+	///
+	/// Returns `Ok(maybe_removed)` if the item was inserted, where `maybe_removed` is
+	/// `Some(removed)` if an item was removed to make room for the new one. Returns `Err(())` if
+	/// `element` cannot be inserted.
+	pub fn force_insert_keep_left(&mut self, index: usize, element: T) -> Result<Option<T>, ()> {
+		// Check against panics.
+		if Self::bound() < index || self.len() < index || Self::bound() == 0 {
+			return Err(())
+		}
+		// Noop condition.
+		if Self::bound() == index && self.len() <= Self::bound() {
+			return Err(())
+		}
+		let maybe_removed = if self.is_full() {
+			// defensive-only: since we are at capacity, this is a noop.
+			self.0.truncate(Self::bound());
+			// if we truncate anything, it will be the last one.
+			self.0.pop()
+		} else {
+			None
+		};
+
+		// Cannot panic since `self.len() >= index`;
+		self.0.insert(index, element);
+		Ok(maybe_removed)
+	}
+
+	/// Move the position of an item from one location to another in the slice.
+	///
+	/// Except for the item being moved, the order of the slice remains the same.
+	///
+	/// - `index` is the location of the item to be moved.
+	/// - `insert_position` is the index of the item in the slice which should *immediately follow*
+	///   the item which is being moved.
+	///
+	/// Returns `true` of the operation was successful, otherwise `false` if a noop.
+	pub fn slide(&mut self, index: usize, insert_position: usize) -> bool {
+		// Check against panics.
+		if self.len() <= index || self.len() < insert_position || index == usize::MAX {
+			return false
+		}
+		// Noop conditions.
+		if index == insert_position || index + 1 == insert_position {
+			return false
+		}
+		if insert_position < index && index < self.len() {
+			// --- --- --- === === === === @@@ --- --- ---
+			//            ^-- N            ^O^
+			// ...
+			//               /-----<<<-----\
+			// --- --- --- === === === === @@@ --- --- ---
+			//               >>> >>> >>> >>>
+			// ...
+			// --- --- --- @@@ === === === === --- --- ---
+			//             ^N^
+			self[insert_position..index + 1].rotate_right(1);
+			return true
+		} else if insert_position > 0 && index + 1 < insert_position {
+			// Note that the apparent asymmetry of these two branches is due to the
+			// fact that the "new" position is the position to be inserted *before*.
+			// --- --- --- @@@ === === === === --- --- ---
+			//             ^O^                ^-- N
+			// ...
+			//               /----->>>-----\
+			// --- --- --- @@@ === === === === --- --- ---
+			//               <<< <<< <<< <<<
+			// ...
+			// --- --- --- === === === === @@@ --- --- ---
+			//                             ^N^
+			self[index..insert_position].rotate_left(1);
+			return true
+		}
+
+		debug_assert!(false, "all noop conditions should have been covered above");
+		false
+	}
+
+	/// Forces the insertion of `s` into `self` truncating first if necessary.
+	///
+	/// Infallible, but if the bound is zero, then it's a no-op.
+	pub fn force_push(&mut self, element: T) {
+		if Self::bound() > 0 {
+			self.0.truncate(Self::bound() as usize - 1);
+			self.0.push(element);
+		}
+	}
+
+	/// Same as `Vec::resize`, but if `size` is more than [`Self::bound`], then [`Self::bound`] is
+	/// used.
+	pub fn bounded_resize(&mut self, size: usize, value: T)
+	where
+		T: Clone,
+	{
+		let size = size.min(Self::bound());
+		self.0.resize(size, value);
+	}
+
+	/// Exactly the same semantics as [`Vec::extend`], but returns an error and does nothing if the
+	/// length of the outcome is larger than the bound.
+	pub fn try_extend(
+		&mut self,
+		with: impl IntoIterator<Item = T> + ExactSizeIterator,
+	) -> Result<(), ()> {
+		if with.len().saturating_add(self.len()) <= Self::bound() {
+			self.0.extend(with);
+			Ok(())
+		} else {
+			Err(())
+		}
+	}
+
+	/// Exactly the same semantics as [`Vec::append`], but returns an error and does nothing if the
+	/// length of the outcome is larger than the bound.
+	pub fn try_append(&mut self, other: &mut Vec<T>) -> Result<(), ()> {
+		if other.len().saturating_add(self.len()) <= Self::bound() {
+			self.0.append(other);
+			Ok(())
+		} else {
+			Err(())
+		}
+	}
+
+	/// Consumes self and mutates self via the given `mutate` function.
+	///
+	/// If the outcome of mutation is within bounds, `Some(Self)` is returned. Else, `None` is
+	/// returned.
+	///
+	/// This is essentially a *consuming* shorthand [`Self::into_inner`] -> `...` ->
+	/// [`Self::try_from`].
+	pub fn try_mutate(mut self, mut mutate: impl FnMut(&mut Vec<T>)) -> Option<Self> {
+		mutate(&mut self.0);
+		(self.0.len() <= Self::bound()).then(move || self)
+	}
+
+	/// Exactly the same semantics as [`Vec::insert`], but returns an `Err` (and is a noop) if the
+	/// new length of the vector exceeds `S`.
+	///
+	/// # Panics
+	///
+	/// Panics if `index > len`.
+	pub fn try_insert(&mut self, index: usize, element: T) -> Result<(), ()> {
+		if self.len() < Self::bound() {
+			self.0.insert(index, element);
+			Ok(())
+		} else {
+			Err(())
+		}
+	}
+
+	/// Exactly the same semantics as [`Vec::push`], but returns an `Err` (and is a noop) if the
+	/// new length of the vector exceeds `S`.
+	///
+	/// # Panics
+	///
+	/// Panics if the new capacity exceeds isize::MAX bytes.
+	pub fn try_push(&mut self, element: T) -> Result<(), ()> {
+		if self.len() < Self::bound() {
+			self.0.push(element);
+			Ok(())
+		} else {
+			Err(())
+		}
+	}
+}
+
+impl<T, S> Default for BoundedVec<T, S> {
+	fn default() -> Self {
+		// the bound cannot be below 0, which is satisfied by an empty vector
+		Self::unchecked_from(Vec::default())
+	}
+}
+
+impl<T, S> sp_std::fmt::Debug for BoundedVec<T, S>
+where
+	T: sp_std::fmt::Debug,
+	S: Get<u32>,
+{
+	fn fmt(&self, f: &mut sp_std::fmt::Formatter<'_>) -> sp_std::fmt::Result {
+		f.debug_tuple("BoundedVec").field(&self.0).field(&Self::bound()).finish()
+	}
+}
+
+impl<T, S> Clone for BoundedVec<T, S>
+where
+	T: Clone,
+{
+	fn clone(&self) -> Self {
+		// bound is retained
+		Self::unchecked_from(self.0.clone())
+	}
+}
+
+impl<T, S: Get<u32>> TryFrom<Vec<T>> for BoundedVec<T, S> {
+	type Error = ();
+	fn try_from(t: Vec<T>) -> Result<Self, Self::Error> {
+		if t.len() <= Self::bound() {
+			// explicit check just above
+			Ok(Self::unchecked_from(t))
+		} else {
+			Err(())
+		}
+	}
+}
+
+// It is okay to give a non-mutable reference of the inner vec to anyone.
+impl<T, S> AsRef<Vec<T>> for BoundedVec<T, S> {
+	fn as_ref(&self) -> &Vec<T> {
+		&self.0
+	}
+}
+
+impl<T, S> AsRef<[T]> for BoundedVec<T, S> {
+	fn as_ref(&self) -> &[T] {
+		&self.0
+	}
+}
+
+impl<T, S> AsMut<[T]> for BoundedVec<T, S> {
+	fn as_mut(&mut self) -> &mut [T] {
+		&mut self.0
+	}
+}
+
+// will allow for immutable all operations of `Vec<T>` on `BoundedVec<T>`.
+impl<T, S> Deref for BoundedVec<T, S> {
+	type Target = Vec<T>;
+
+	fn deref(&self) -> &Self::Target {
+		&self.0
+	}
+}
+
+// Allows for indexing similar to a normal `Vec`. Can panic if out of bound.
+impl<T, S, I> Index<I> for BoundedVec<T, S>
+where
+	I: SliceIndex<[T]>,
+{
+	type Output = I::Output;
+
+	#[inline]
+	fn index(&self, index: I) -> &Self::Output {
+		self.0.index(index)
+	}
+}
+
+impl<T, S, I> IndexMut<I> for BoundedVec<T, S>
+where
+	I: SliceIndex<[T]>,
+{
+	#[inline]
+	fn index_mut(&mut self, index: I) -> &mut Self::Output {
+		self.0.index_mut(index)
+	}
+}
+
+impl<T, S> sp_std::iter::IntoIterator for BoundedVec<T, S> {
+	type Item = T;
+	type IntoIter = sp_std::vec::IntoIter<T>;
+	fn into_iter(self) -> Self::IntoIter {
+		self.0.into_iter()
+	}
+}
+
+impl<'a, T, S> sp_std::iter::IntoIterator for &'a BoundedVec<T, S> {
+	type Item = &'a T;
+	type IntoIter = sp_std::slice::Iter<'a, T>;
+	fn into_iter(self) -> Self::IntoIter {
+		self.0.iter()
+	}
+}
+
+impl<'a, T, S> sp_std::iter::IntoIterator for &'a mut BoundedVec<T, S> {
+	type Item = &'a mut T;
+	type IntoIter = sp_std::slice::IterMut<'a, T>;
+	fn into_iter(self) -> Self::IntoIter {
+		self.0.iter_mut()
+	}
+}
+
+impl<T, S> codec::DecodeLength for BoundedVec<T, S> {
+	fn len(self_encoded: &[u8]) -> Result<usize, codec::Error> {
+		// `BoundedVec<T, _>` stored just a `Vec<T>`, thus the length is at the beginning in
+		// `Compact` form, and same implementation as `Vec<T>` can be used.
+		<Vec<T> as codec::DecodeLength>::len(self_encoded)
+	}
+}
+
+impl<T, BoundSelf, BoundRhs> PartialEq<BoundedVec<T, BoundRhs>> for BoundedVec<T, BoundSelf>
+where
+	T: PartialEq,
+	BoundSelf: Get<u32>,
+	BoundRhs: Get<u32>,
+{
+	fn eq(&self, rhs: &BoundedVec<T, BoundRhs>) -> bool {
+		BoundSelf::get() == BoundRhs::get() && self.0 == rhs.0
+	}
+}
+
+impl<T: PartialEq, S: Get<u32>> PartialEq<Vec<T>> for BoundedVec<T, S> {
+	fn eq(&self, other: &Vec<T>) -> bool {
+		&self.0 == other
+	}
+}
+
+impl<T, S: Get<u32>> Eq for BoundedVec<T, S> where T: Eq {}
+
+impl<T, S> MaxEncodedLen for BoundedVec<T, S>
+where
+	T: MaxEncodedLen,
+	S: Get<u32>,
+	BoundedVec<T, S>: Encode,
+{
+	fn max_encoded_len() -> usize {
+		// BoundedVec<T, S> encodes like Vec<T> which encodes like [T], which is a compact u32
+		// plus each item in the slice:
+		// https://docs.substrate.io/v3/advanced/scale-codec
+		codec::Compact(S::get())
+			.encoded_size()
+			.saturating_add(Self::bound().saturating_mul(T::max_encoded_len()))
+	}
+}
+
+impl<I, T, Bound> TryCollect<BoundedVec<T, Bound>> for I
+where
+	I: ExactSizeIterator + Iterator<Item = T>,
+	Bound: Get<u32>,
+{
+	type Error = &'static str;
+
+	fn try_collect(self) -> Result<BoundedVec<T, Bound>, Self::Error> {
+		if self.len() > Bound::get() as usize {
+			Err("iterator length too big")
+		} else {
+			Ok(BoundedVec::<T, Bound>::unchecked_from(self.collect::<Vec<T>>()))
+		}
+	}
+}
+
+#[cfg(test)]
+pub mod test {
+	use super::*;
+	use crate::{bounded_vec, traits::ConstU32};
+
+	#[test]
+	fn slide_works() {
+		let mut b: BoundedVec<u32, ConstU32<6>> = bounded_vec![0, 1, 2, 3, 4, 5];
+		assert!(b.slide(1, 5));
+		assert_eq!(*b, vec![0, 2, 3, 4, 1, 5]);
+		assert!(b.slide(4, 0));
+		assert_eq!(*b, vec![1, 0, 2, 3, 4, 5]);
+		assert!(b.slide(0, 2));
+		assert_eq!(*b, vec![0, 1, 2, 3, 4, 5]);
+		assert!(b.slide(1, 6));
+		assert_eq!(*b, vec![0, 2, 3, 4, 5, 1]);
+		assert!(b.slide(0, 6));
+		assert_eq!(*b, vec![2, 3, 4, 5, 1, 0]);
+		assert!(b.slide(5, 0));
+		assert_eq!(*b, vec![0, 2, 3, 4, 5, 1]);
+		assert!(!b.slide(6, 0));
+		assert!(!b.slide(7, 0));
+		assert_eq!(*b, vec![0, 2, 3, 4, 5, 1]);
+
+		let mut c: BoundedVec<u32, ConstU32<6>> = bounded_vec![0, 1, 2];
+		assert!(!c.slide(1, 5));
+		assert_eq!(*c, vec![0, 1, 2]);
+		assert!(!c.slide(4, 0));
+		assert_eq!(*c, vec![0, 1, 2]);
+		assert!(!c.slide(3, 0));
+		assert_eq!(*c, vec![0, 1, 2]);
+		assert!(c.slide(2, 0));
+		assert_eq!(*c, vec![2, 0, 1]);
+	}
+
+	#[test]
+	fn slide_noops_work() {
+		let mut b: BoundedVec<u32, ConstU32<6>> = bounded_vec![0, 1, 2, 3, 4, 5];
+		assert!(!b.slide(3, 3));
+		assert_eq!(*b, vec![0, 1, 2, 3, 4, 5]);
+		assert!(!b.slide(3, 4));
+		assert_eq!(*b, vec![0, 1, 2, 3, 4, 5]);
+	}
+
+	#[test]
+	fn force_insert_keep_left_works() {
+		let mut b: BoundedVec<u32, ConstU32<4>> = bounded_vec![];
+		assert_eq!(b.force_insert_keep_left(1, 10), Err(()));
+		assert!(b.is_empty());
+
+		assert_eq!(b.force_insert_keep_left(0, 30), Ok(None));
+		assert_eq!(b.force_insert_keep_left(0, 10), Ok(None));
+		assert_eq!(b.force_insert_keep_left(1, 20), Ok(None));
+		assert_eq!(b.force_insert_keep_left(3, 40), Ok(None));
+		assert_eq!(*b, vec![10, 20, 30, 40]);
+		// at capacity.
+		assert_eq!(b.force_insert_keep_left(4, 41), Err(()));
+		assert_eq!(*b, vec![10, 20, 30, 40]);
+		assert_eq!(b.force_insert_keep_left(3, 31), Ok(Some(40)));
+		assert_eq!(*b, vec![10, 20, 30, 31]);
+		assert_eq!(b.force_insert_keep_left(1, 11), Ok(Some(31)));
+		assert_eq!(*b, vec![10, 11, 20, 30]);
+		assert_eq!(b.force_insert_keep_left(0, 1), Ok(Some(30)));
+		assert_eq!(*b, vec![1, 10, 11, 20]);
+
+		let mut z: BoundedVec<u32, ConstU32<0>> = bounded_vec![];
+		assert!(z.is_empty());
+		assert_eq!(z.force_insert_keep_left(0, 10), Err(()));
+		assert!(z.is_empty());
+	}
+
+	#[test]
+	fn force_insert_keep_right_works() {
+		let mut b: BoundedVec<u32, ConstU32<4>> = bounded_vec![];
+		assert_eq!(b.force_insert_keep_right(1, 10), Err(()));
+		assert!(b.is_empty());
+
+		assert_eq!(b.force_insert_keep_right(0, 30), Ok(None));
+		assert_eq!(b.force_insert_keep_right(0, 10), Ok(None));
+		assert_eq!(b.force_insert_keep_right(1, 20), Ok(None));
+		assert_eq!(b.force_insert_keep_right(3, 40), Ok(None));
+		assert_eq!(*b, vec![10, 20, 30, 40]);
+
+		// at capacity.
+		assert_eq!(b.force_insert_keep_right(0, 0), Err(()));
+		assert_eq!(*b, vec![10, 20, 30, 40]);
+		assert_eq!(b.force_insert_keep_right(1, 11), Ok(Some(10)));
+		assert_eq!(*b, vec![11, 20, 30, 40]);
+		assert_eq!(b.force_insert_keep_right(3, 31), Ok(Some(11)));
+		assert_eq!(*b, vec![20, 30, 31, 40]);
+		assert_eq!(b.force_insert_keep_right(4, 41), Ok(Some(20)));
+		assert_eq!(*b, vec![30, 31, 40, 41]);
+
+		assert_eq!(b.force_insert_keep_right(5, 69), Err(()));
+		assert_eq!(*b, vec![30, 31, 40, 41]);
+
+		let mut z: BoundedVec<u32, ConstU32<0>> = bounded_vec![];
+		assert!(z.is_empty());
+		assert_eq!(z.force_insert_keep_right(0, 10), Err(()));
+		assert!(z.is_empty());
+	}
+
+	#[test]
+	fn bound_returns_correct_value() {
+		assert_eq!(BoundedVec::<u32, ConstU32<7>>::bound(), 7);
+	}
+
+	#[test]
+	fn try_insert_works() {
+		let mut bounded: BoundedVec<u32, ConstU32<4>> = bounded_vec![1, 2, 3];
+		bounded.try_insert(1, 0).unwrap();
+		assert_eq!(*bounded, vec![1, 0, 2, 3]);
+
+		assert!(bounded.try_insert(0, 9).is_err());
+		assert_eq!(*bounded, vec![1, 0, 2, 3]);
+	}
+
+	#[test]
+	fn constructor_macro_works() {
+		// With values. Use some brackets to make sure the macro doesn't expand.
+		let bv: BoundedVec<(u32, u32), ConstU32<3>> = bounded_vec![(1, 2), (1, 2), (1, 2)];
+		assert_eq!(bv, vec![(1, 2), (1, 2), (1, 2)]);
+
+		// With repetition.
+		let bv: BoundedVec<(u32, u32), ConstU32<3>> = bounded_vec![(1, 2); 3];
+		assert_eq!(bv, vec![(1, 2), (1, 2), (1, 2)]);
+	}
+
+	#[test]
+	#[should_panic(expected = "insertion index (is 9) should be <= len (is 3)")]
+	fn try_inert_panics_if_oob() {
+		let mut bounded: BoundedVec<u32, ConstU32<4>> = bounded_vec![1, 2, 3];
+		bounded.try_insert(9, 0).unwrap();
+	}
+
+	#[test]
+	fn try_push_works() {
+		let mut bounded: BoundedVec<u32, ConstU32<4>> = bounded_vec![1, 2, 3];
+		bounded.try_push(0).unwrap();
+		assert_eq!(*bounded, vec![1, 2, 3, 0]);
+
+		assert!(bounded.try_push(9).is_err());
+	}
+
+	#[test]
+	fn deref_coercion_works() {
+		let bounded: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3];
+		// these methods come from deref-ed vec.
+		assert_eq!(bounded.len(), 3);
+		assert!(bounded.iter().next().is_some());
+		assert!(!bounded.is_empty());
+	}
+
+	#[test]
+	fn try_mutate_works() {
+		let bounded: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3, 4, 5, 6];
+		let bounded = bounded.try_mutate(|v| v.push(7)).unwrap();
+		assert_eq!(bounded.len(), 7);
+		assert!(bounded.try_mutate(|v| v.push(8)).is_none());
+	}
+
+	#[test]
+	fn slice_indexing_works() {
+		let bounded: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3, 4, 5, 6];
+		assert_eq!(&bounded[0..=2], &[1, 2, 3]);
+	}
+
+	#[test]
+	fn vec_eq_works() {
+		let bounded: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3, 4, 5, 6];
+		assert_eq!(bounded, vec![1, 2, 3, 4, 5, 6]);
+	}
+
+	#[test]
+	fn too_big_vec_fail_to_decode() {
+		let v: Vec<u32> = vec![1, 2, 3, 4, 5];
+		assert_eq!(
+			BoundedVec::<u32, ConstU32<4>>::decode(&mut &v.encode()[..]),
+			Err("BoundedVec exceeds its limit".into()),
+		);
+	}
+
+	#[test]
+	fn eq_works() {
+		// of same type
+		let b1: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3];
+		let b2: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3];
+		assert_eq!(b1, b2);
+
+		// of different type, but same value and bound.
+		crate::parameter_types! {
+			B1: u32 = 7;
+			B2: u32 = 7;
+		}
+		let b1: BoundedVec<u32, B1> = bounded_vec![1, 2, 3];
+		let b2: BoundedVec<u32, B2> = bounded_vec![1, 2, 3];
+		assert_eq!(b1, b2);
+	}
+
+	#[test]
+	fn ord_works() {
+		use std::cmp::Ordering;
+		let b1: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3];
+		let b2: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 3, 2];
+
+		// ordering for vec is lexicographic.
+		assert_eq!(b1.cmp(&b2), Ordering::Less);
+		assert_eq!(b1.cmp(&b2), b1.into_inner().cmp(&b2.into_inner()));
+	}
+
+	#[test]
+	fn try_extend_works() {
+		let mut b: BoundedVec<u32, ConstU32<5>> = bounded_vec![1, 2, 3];
+
+		assert!(b.try_extend(vec![4].into_iter()).is_ok());
+		assert_eq!(*b, vec![1, 2, 3, 4]);
+
+		assert!(b.try_extend(vec![5].into_iter()).is_ok());
+		assert_eq!(*b, vec![1, 2, 3, 4, 5]);
+
+		assert!(b.try_extend(vec![6].into_iter()).is_err());
+		assert_eq!(*b, vec![1, 2, 3, 4, 5]);
+
+		let mut b: BoundedVec<u32, ConstU32<5>> = bounded_vec![1, 2, 3];
+		assert!(b.try_extend(vec![4, 5].into_iter()).is_ok());
+		assert_eq!(*b, vec![1, 2, 3, 4, 5]);
+
+		let mut b: BoundedVec<u32, ConstU32<5>> = bounded_vec![1, 2, 3];
+		assert!(b.try_extend(vec![4, 5, 6].into_iter()).is_err());
+		assert_eq!(*b, vec![1, 2, 3]);
+	}
+
+	#[test]
+	fn test_serializer() {
+		let c: BoundedVec<u32, ConstU32<6>> = bounded_vec![0, 1, 2];
+		assert_eq!(serde_json::json!(&c).to_string(), r#"[0,1,2]"#);
+	}
+
+	#[test]
+	fn test_deserializer() {
+		let c: BoundedVec<u32, ConstU32<6>> = serde_json::from_str(r#"[0,1,2]"#).unwrap();
+
+		assert_eq!(c.len(), 3);
+		assert_eq!(c[0], 0);
+		assert_eq!(c[1], 1);
+		assert_eq!(c[2], 2);
+	}
+
+	#[test]
+	fn test_deserializer_failed() {
+		let c: Result<BoundedVec<u32, ConstU32<4>>, serde_json::error::Error> =
+			serde_json::from_str(r#"[0,1,2,3,4,5]"#);
+
+		match c {
+			Err(msg) => assert_eq!(msg.to_string(), "out of bounds at line 1 column 11"),
+			_ => unreachable!("deserializer must raise error"),
+		}
+	}
+
+	#[test]
+	fn bounded_vec_try_from_works() {
+		assert!(BoundedVec::<u32, ConstU32<2>>::try_from(vec![0]).is_ok());
+		assert!(BoundedVec::<u32, ConstU32<2>>::try_from(vec![0, 1]).is_ok());
+		assert!(BoundedVec::<u32, ConstU32<2>>::try_from(vec![0, 1, 2]).is_err());
+	}
+
+	#[test]
+	fn bounded_slice_try_from_works() {
+		assert!(BoundedSlice::<u32, ConstU32<2>>::try_from(&[0][..]).is_ok());
+		assert!(BoundedSlice::<u32, ConstU32<2>>::try_from(&[0, 1][..]).is_ok());
+		assert!(BoundedSlice::<u32, ConstU32<2>>::try_from(&[0, 1, 2][..]).is_err());
+	}
+
+	#[test]
+	fn can_be_collected() {
+		let b1: BoundedVec<u32, ConstU32<5>> = bounded_vec![1, 2, 3, 4];
+		let b2: BoundedVec<u32, ConstU32<5>> = b1.iter().map(|x| x + 1).try_collect().unwrap();
+		assert_eq!(b2, vec![2, 3, 4, 5]);
+
+		// can also be collected into a collection of length 4.
+		let b2: BoundedVec<u32, ConstU32<4>> = b1.iter().map(|x| x + 1).try_collect().unwrap();
+		assert_eq!(b2, vec![2, 3, 4, 5]);
+
+		// can be mutated further into iterators that are `ExactSizedIterator`.
+		let b2: BoundedVec<u32, ConstU32<4>> =
+			b1.iter().map(|x| x + 1).rev().try_collect().unwrap();
+		assert_eq!(b2, vec![5, 4, 3, 2]);
+
+		let b2: BoundedVec<u32, ConstU32<4>> =
+			b1.iter().map(|x| x + 1).rev().skip(2).try_collect().unwrap();
+		assert_eq!(b2, vec![3, 2]);
+		let b2: BoundedVec<u32, ConstU32<2>> =
+			b1.iter().map(|x| x + 1).rev().skip(2).try_collect().unwrap();
+		assert_eq!(b2, vec![3, 2]);
+
+		let b2: BoundedVec<u32, ConstU32<4>> =
+			b1.iter().map(|x| x + 1).rev().take(2).try_collect().unwrap();
+		assert_eq!(b2, vec![5, 4]);
+		let b2: BoundedVec<u32, ConstU32<2>> =
+			b1.iter().map(|x| x + 1).rev().take(2).try_collect().unwrap();
+		assert_eq!(b2, vec![5, 4]);
+
+		// but these worn't work
+		let b2: Result<BoundedVec<u32, ConstU32<3>>, _> = b1.iter().map(|x| x + 1).try_collect();
+		assert!(b2.is_err());
+
+		let b2: Result<BoundedVec<u32, ConstU32<1>>, _> =
+			b1.iter().map(|x| x + 1).rev().take(2).try_collect();
+		assert!(b2.is_err());
+	}
+}
diff --git a/substrate/primitives/runtime/src/bounded/weak_bounded_vec.rs b/substrate/primitives/runtime/src/bounded/weak_bounded_vec.rs
new file mode 100644
index 00000000000..9b88ad27e42
--- /dev/null
+++ b/substrate/primitives/runtime/src/bounded/weak_bounded_vec.rs
@@ -0,0 +1,393 @@
+// This file is part of Substrate.
+
+// Copyright (C) 2017-2022 Parity Technologies (UK) Ltd.
+// SPDX-License-Identifier: Apache-2.0
+
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// 	http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//! Traits, types and structs to support putting a bounded vector into storage, as a raw value, map
+//! or a double map.
+
+use crate::traits::Get;
+use codec::{Decode, Encode, MaxEncodedLen};
+use core::{
+	ops::{Deref, Index, IndexMut},
+	slice::SliceIndex,
+};
+use sp_std::{marker::PhantomData, prelude::*};
+
+/// A weakly bounded vector.
+///
+/// It has implementations for efficient append and length decoding, as with a normal `Vec<_>`, once
+/// put into storage as a raw value, map or double-map.
+///
+/// The length of the vec is not strictly bounded. Decoding a vec with more element that the bound
+/// is accepted, and some method allow to bypass the restriction with warnings.
+#[derive(Encode, scale_info::TypeInfo)]
+#[scale_info(skip_type_params(S))]
+pub struct WeakBoundedVec<T, S>(Vec<T>, PhantomData<S>);
+
+impl<T: Decode, S: Get<u32>> Decode for WeakBoundedVec<T, S> {
+	fn decode<I: codec::Input>(input: &mut I) -> Result<Self, codec::Error> {
+		let inner = Vec::<T>::decode(input)?;
+		Ok(Self::force_from(inner, Some("decode")))
+	}
+
+	fn skip<I: codec::Input>(input: &mut I) -> Result<(), codec::Error> {
+		Vec::<T>::skip(input)
+	}
+}
+
+impl<T, S> WeakBoundedVec<T, S> {
+	/// Create `Self` from `t` without any checks.
+	fn unchecked_from(t: Vec<T>) -> Self {
+		Self(t, Default::default())
+	}
+
+	/// Consume self, and return the inner `Vec`. Henceforth, the `Vec<_>` can be altered in an
+	/// arbitrary way. At some point, if the reverse conversion is required, `TryFrom<Vec<_>>` can
+	/// be used.
+	///
+	/// This is useful for cases if you need access to an internal API of the inner `Vec<_>` which
+	/// is not provided by the wrapper `WeakBoundedVec`.
+	pub fn into_inner(self) -> Vec<T> {
+		self.0
+	}
+
+	/// Exactly the same semantics as [`Vec::remove`].
+	///
+	/// # Panics
+	///
+	/// Panics if `index` is out of bounds.
+	pub fn remove(&mut self, index: usize) -> T {
+		self.0.remove(index)
+	}
+
+	/// Exactly the same semantics as [`Vec::swap_remove`].
+	///
+	/// # Panics
+	///
+	/// Panics if `index` is out of bounds.
+	pub fn swap_remove(&mut self, index: usize) -> T {
+		self.0.swap_remove(index)
+	}
+
+	/// Exactly the same semantics as [`Vec::retain`].
+	pub fn retain<F: FnMut(&T) -> bool>(&mut self, f: F) {
+		self.0.retain(f)
+	}
+
+	/// Exactly the same semantics as [`slice::get_mut`].
+	pub fn get_mut<I: SliceIndex<[T]>>(
+		&mut self,
+		index: I,
+	) -> Option<&mut <I as SliceIndex<[T]>>::Output> {
+		self.0.get_mut(index)
+	}
+}
+
+impl<T, S: Get<u32>> WeakBoundedVec<T, S> {
+	/// Get the bound of the type in `usize`.
+	pub fn bound() -> usize {
+		S::get() as usize
+	}
+
+	/// Create `Self` from `t` without any checks. Logs warnings if the bound is not being
+	/// respected. The additional scope can be used to indicate where a potential overflow is
+	/// happening.
+	pub fn force_from(t: Vec<T>, scope: Option<&'static str>) -> Self {
+		if t.len() > Self::bound() {
+			log::warn!(
+				target: "runtime",
+				"length of a bounded vector in scope {} is not respected.",
+				scope.unwrap_or("UNKNOWN"),
+			);
+		}
+
+		Self::unchecked_from(t)
+	}
+
+	/// Consumes self and mutates self via the given `mutate` function.
+	///
+	/// If the outcome of mutation is within bounds, `Some(Self)` is returned. Else, `None` is
+	/// returned.
+	///
+	/// This is essentially a *consuming* shorthand [`Self::into_inner`] -> `...` ->
+	/// [`Self::try_from`].
+	pub fn try_mutate(mut self, mut mutate: impl FnMut(&mut Vec<T>)) -> Option<Self> {
+		mutate(&mut self.0);
+		(self.0.len() <= Self::bound()).then(move || self)
+	}
+
+	/// Exactly the same semantics as [`Vec::insert`], but returns an `Err` (and is a noop) if the
+	/// new length of the vector exceeds `S`.
+	///
+	/// # Panics
+	///
+	/// Panics if `index > len`.
+	pub fn try_insert(&mut self, index: usize, element: T) -> Result<(), ()> {
+		if self.len() < Self::bound() {
+			self.0.insert(index, element);
+			Ok(())
+		} else {
+			Err(())
+		}
+	}
+
+	/// Exactly the same semantics as [`Vec::push`], but returns an `Err` (and is a noop) if the
+	/// new length of the vector exceeds `S`.
+	///
+	/// # Panics
+	///
+	/// Panics if the new capacity exceeds isize::MAX bytes.
+	pub fn try_push(&mut self, element: T) -> Result<(), ()> {
+		if self.len() < Self::bound() {
+			self.0.push(element);
+			Ok(())
+		} else {
+			Err(())
+		}
+	}
+}
+
+impl<T, S> Default for WeakBoundedVec<T, S> {
+	fn default() -> Self {
+		// the bound cannot be below 0, which is satisfied by an empty vector
+		Self::unchecked_from(Vec::default())
+	}
+}
+
+#[cfg(feature = "std")]
+impl<T, S> std::fmt::Debug for WeakBoundedVec<T, S>
+where
+	T: std::fmt::Debug,
+	S: Get<u32>,
+{
+	fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+		f.debug_tuple("WeakBoundedVec").field(&self.0).field(&Self::bound()).finish()
+	}
+}
+
+impl<T, S> Clone for WeakBoundedVec<T, S>
+where
+	T: Clone,
+{
+	fn clone(&self) -> Self {
+		// bound is retained
+		Self::unchecked_from(self.0.clone())
+	}
+}
+
+impl<T, S: Get<u32>> TryFrom<Vec<T>> for WeakBoundedVec<T, S> {
+	type Error = ();
+	fn try_from(t: Vec<T>) -> Result<Self, Self::Error> {
+		if t.len() <= Self::bound() {
+			// explicit check just above
+			Ok(Self::unchecked_from(t))
+		} else {
+			Err(())
+		}
+	}
+}
+
+// It is okay to give a non-mutable reference of the inner vec to anyone.
+impl<T, S> AsRef<Vec<T>> for WeakBoundedVec<T, S> {
+	fn as_ref(&self) -> &Vec<T> {
+		&self.0
+	}
+}
+
+impl<T, S> AsRef<[T]> for WeakBoundedVec<T, S> {
+	fn as_ref(&self) -> &[T] {
+		&self.0
+	}
+}
+
+impl<T, S> AsMut<[T]> for WeakBoundedVec<T, S> {
+	fn as_mut(&mut self) -> &mut [T] {
+		&mut self.0
+	}
+}
+
+// will allow for immutable all operations of `Vec<T>` on `WeakBoundedVec<T>`.
+impl<T, S> Deref for WeakBoundedVec<T, S> {
+	type Target = Vec<T>;
+
+	fn deref(&self) -> &Self::Target {
+		&self.0
+	}
+}
+
+// Allows for indexing similar to a normal `Vec`. Can panic if out of bound.
+impl<T, S, I> Index<I> for WeakBoundedVec<T, S>
+where
+	I: SliceIndex<[T]>,
+{
+	type Output = I::Output;
+
+	#[inline]
+	fn index(&self, index: I) -> &Self::Output {
+		self.0.index(index)
+	}
+}
+
+impl<T, S, I> IndexMut<I> for WeakBoundedVec<T, S>
+where
+	I: SliceIndex<[T]>,
+{
+	#[inline]
+	fn index_mut(&mut self, index: I) -> &mut Self::Output {
+		self.0.index_mut(index)
+	}
+}
+
+impl<T, S> sp_std::iter::IntoIterator for WeakBoundedVec<T, S> {
+	type Item = T;
+	type IntoIter = sp_std::vec::IntoIter<T>;
+	fn into_iter(self) -> Self::IntoIter {
+		self.0.into_iter()
+	}
+}
+
+impl<'a, T, S> sp_std::iter::IntoIterator for &'a WeakBoundedVec<T, S> {
+	type Item = &'a T;
+	type IntoIter = sp_std::slice::Iter<'a, T>;
+	fn into_iter(self) -> Self::IntoIter {
+		self.0.iter()
+	}
+}
+
+impl<'a, T, S> sp_std::iter::IntoIterator for &'a mut WeakBoundedVec<T, S> {
+	type Item = &'a mut T;
+	type IntoIter = sp_std::slice::IterMut<'a, T>;
+	fn into_iter(self) -> Self::IntoIter {
+		self.0.iter_mut()
+	}
+}
+
+impl<T, S> codec::DecodeLength for WeakBoundedVec<T, S> {
+	fn len(self_encoded: &[u8]) -> Result<usize, codec::Error> {
+		// `WeakBoundedVec<T, _>` stored just a `Vec<T>`, thus the length is at the beginning in
+		// `Compact` form, and same implementation as `Vec<T>` can be used.
+		<Vec<T> as codec::DecodeLength>::len(self_encoded)
+	}
+}
+
+// NOTE: we could also implement this as:
+// impl<T: Value, S1: Get<u32>, S2: Get<u32>> PartialEq<WeakBoundedVec<T, S2>> for WeakBoundedVec<T,
+// S1> to allow comparison of bounded vectors with different bounds.
+impl<T, S> PartialEq for WeakBoundedVec<T, S>
+where
+	T: PartialEq,
+{
+	fn eq(&self, rhs: &Self) -> bool {
+		self.0 == rhs.0
+	}
+}
+
+impl<T: PartialEq, S: Get<u32>> PartialEq<Vec<T>> for WeakBoundedVec<T, S> {
+	fn eq(&self, other: &Vec<T>) -> bool {
+		&self.0 == other
+	}
+}
+
+impl<T, S> Eq for WeakBoundedVec<T, S> where T: Eq {}
+
+impl<T, S> MaxEncodedLen for WeakBoundedVec<T, S>
+where
+	T: MaxEncodedLen,
+	S: Get<u32>,
+	WeakBoundedVec<T, S>: Encode,
+{
+	fn max_encoded_len() -> usize {
+		// WeakBoundedVec<T, S> encodes like Vec<T> which encodes like [T], which is a compact u32
+		// plus each item in the slice:
+		// https://docs.substrate.io/v3/advanced/scale-codec
+		codec::Compact(S::get())
+			.encoded_size()
+			.saturating_add(Self::bound().saturating_mul(T::max_encoded_len()))
+	}
+}
+
+#[cfg(test)]
+pub mod test {
+	use super::*;
+	use crate::traits::ConstU32;
+
+	#[test]
+	fn bound_returns_correct_value() {
+		assert_eq!(WeakBoundedVec::<u32, ConstU32<7>>::bound(), 7);
+	}
+
+	#[test]
+	fn try_insert_works() {
+		let mut bounded: WeakBoundedVec<u32, ConstU32<4>> = vec![1, 2, 3].try_into().unwrap();
+		bounded.try_insert(1, 0).unwrap();
+		assert_eq!(*bounded, vec![1, 0, 2, 3]);
+
+		assert!(bounded.try_insert(0, 9).is_err());
+		assert_eq!(*bounded, vec![1, 0, 2, 3]);
+	}
+
+	#[test]
+	#[should_panic(expected = "insertion index (is 9) should be <= len (is 3)")]
+	fn try_inert_panics_if_oob() {
+		let mut bounded: WeakBoundedVec<u32, ConstU32<4>> = vec![1, 2, 3].try_into().unwrap();
+		bounded.try_insert(9, 0).unwrap();
+	}
+
+	#[test]
+	fn try_push_works() {
+		let mut bounded: WeakBoundedVec<u32, ConstU32<4>> = vec![1, 2, 3].try_into().unwrap();
+		bounded.try_push(0).unwrap();
+		assert_eq!(*bounded, vec![1, 2, 3, 0]);
+
+		assert!(bounded.try_push(9).is_err());
+	}
+
+	#[test]
+	fn deref_coercion_works() {
+		let bounded: WeakBoundedVec<u32, ConstU32<7>> = vec![1, 2, 3].try_into().unwrap();
+		// these methods come from deref-ed vec.
+		assert_eq!(bounded.len(), 3);
+		assert!(bounded.iter().next().is_some());
+		assert!(!bounded.is_empty());
+	}
+
+	#[test]
+	fn try_mutate_works() {
+		let bounded: WeakBoundedVec<u32, ConstU32<7>> = vec![1, 2, 3, 4, 5, 6].try_into().unwrap();
+		let bounded = bounded.try_mutate(|v| v.push(7)).unwrap();
+		assert_eq!(bounded.len(), 7);
+		assert!(bounded.try_mutate(|v| v.push(8)).is_none());
+	}
+
+	#[test]
+	fn slice_indexing_works() {
+		let bounded: WeakBoundedVec<u32, ConstU32<7>> = vec![1, 2, 3, 4, 5, 6].try_into().unwrap();
+		assert_eq!(&bounded[0..=2], &[1, 2, 3]);
+	}
+
+	#[test]
+	fn vec_eq_works() {
+		let bounded: WeakBoundedVec<u32, ConstU32<7>> = vec![1, 2, 3, 4, 5, 6].try_into().unwrap();
+		assert_eq!(bounded, vec![1, 2, 3, 4, 5, 6]);
+	}
+
+	#[test]
+	fn too_big_succeed_to_decode() {
+		let v: Vec<u32> = vec![1, 2, 3, 4, 5];
+		let w = WeakBoundedVec::<u32, ConstU32<4>>::decode(&mut &v.encode()[..]).unwrap();
+		assert_eq!(v, *w);
+	}
+}
diff --git a/substrate/primitives/runtime/src/lib.rs b/substrate/primitives/runtime/src/lib.rs
index 39e606eb9b5..b41c605d8a3 100644
--- a/substrate/primitives/runtime/src/lib.rs
+++ b/substrate/primitives/runtime/src/lib.rs
@@ -55,6 +55,7 @@ use sp_std::prelude::*;
 use codec::{Decode, Encode, MaxEncodedLen};
 use scale_info::TypeInfo;
 
+pub mod bounded;
 pub mod curve;
 pub mod generic;
 pub mod legacy;
@@ -69,6 +70,9 @@ pub mod transaction_validity;
 
 pub use crate::runtime_string::*;
 
+// Re-export bounded types
+pub use bounded::{BoundedBTreeMap, BoundedBTreeSet, BoundedSlice, BoundedVec, WeakBoundedVec};
+
 // Re-export Multiaddress
 pub use multiaddress::MultiAddress;
 
@@ -825,6 +829,45 @@ macro_rules! assert_eq_error_rate {
 	};
 }
 
+/// Build a bounded vec from the given literals.
+///
+/// The type of the outcome must be known.
+///
+/// Will not handle any errors and just panic if the given literals cannot fit in the corresponding
+/// bounded vec type. Thus, this is only suitable for testing and non-consensus code.
+#[macro_export]
+#[cfg(feature = "std")]
+macro_rules! bounded_vec {
+	($ ($values:expr),* $(,)?) => {
+		{
+			$crate::sp_std::vec![$($values),*].try_into().unwrap()
+		}
+	};
+	( $value:expr ; $repetition:expr ) => {
+		{
+			$crate::sp_std::vec![$value ; $repetition].try_into().unwrap()
+		}
+	}
+}
+
+/// Build a bounded btree-map from the given literals.
+///
+/// The type of the outcome must be known.
+///
+/// Will not handle any errors and just panic if the given literals cannot fit in the corresponding
+/// bounded vec type. Thus, this is only suitable for testing and non-consensus code.
+#[macro_export]
+#[cfg(feature = "std")]
+macro_rules! bounded_btree_map {
+	($ ( $key:expr => $value:expr ),* $(,)?) => {
+		{
+			$crate::traits::TryCollect::<$crate::BoundedBTreeMap<_, _, _>>::try_collect(
+				$crate::sp_std::vec![$(($key, $value)),*].into_iter()
+			).unwrap()
+		}
+	};
+}
+
 /// Simple blob to hold an extrinsic without committing to its format and ensure it is serialized
 /// correctly.
 #[derive(PartialEq, Eq, Clone, Default, Encode, Decode, TypeInfo)]
diff --git a/substrate/primitives/runtime/src/traits.rs b/substrate/primitives/runtime/src/traits.rs
index aa5908526b8..fba3117c416 100644
--- a/substrate/primitives/runtime/src/traits.rs
+++ b/substrate/primitives/runtime/src/traits.rs
@@ -308,6 +308,17 @@ impl<T: Default> Get<T> for GetDefault {
 	}
 }
 
+/// Try and collect into a collection `C`.
+pub trait TryCollect<C> {
+	/// The error type that gets returned when a collection can't be made from `self`.
+	type Error;
+	/// Consume self and try to collect the results into `C`.
+	///
+	/// This is useful in preventing the undesirable `.collect().try_into()` call chain on
+	/// collections that need to be converted into a bounded type (e.g. `BoundedVec`).
+	fn try_collect(self) -> Result<C, Self::Error>;
+}
+
 macro_rules! impl_const_get {
 	($name:ident, $t:ty) => {
 		#[doc = "Const getter for a basic type."]
-- 
GitLab