Unverified Commit 718b9f5e authored by Hero Bird's avatar Hero Bird Committed by GitHub

Eliminate some unsafe in storage2 module (#413)

* [core] apply rustfmt

* [core] add CacheCell abstraction

* [core] re-export CacheCell for other lazy abstractions

* [core] make use of CacheCell in LazyIndexMap

* [core] make use of CacheCell in LazyHashMap

* [core] make use of CacheCell in LazyCell

* [core] make use of CacheCell in LazyArray

* [core] apply rustfmt
parent 3628b5f7
Pipeline #93767 passed with stages
in 7 minutes and 33 seconds
......@@ -49,7 +49,8 @@ where
{
fn assert_index_within_bounds(&self, index: u32) {
if cfg!(debug_assertions) {
assert!(index < self.len(),
assert!(
index < self.len(),
"index out of bounds: the len is {} but the index is {}",
self.len(),
index
......
// Copyright 2019-2020 Parity Technologies (UK) Ltd.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use core::{
cell::UnsafeCell,
fmt,
fmt::Debug,
ptr::NonNull,
};
/// A cache for a `T` that allow to mutate the inner `T` through `&self`.
///
/// Internally this is a thin wrapper around an `UnsafeCell<T>`.
/// The main difference to `UnsafeCell` is that this type provides an out of the
/// box API to safely access the inner `T` as well for single threaded contexts.
pub struct CacheCell<T: ?Sized> {
/// The inner value that is allowed to be mutated in shared contexts.
inner: UnsafeCell<T>,
}
impl<T> CacheCell<T> {
/// Creates a new cache cell from the given value.
pub fn new(value: T) -> Self {
Self {
inner: UnsafeCell::new(value),
}
}
/// Returns the inner value.
pub fn into_inner(self) -> T {
self.inner.into_inner()
}
}
impl<T> Debug for CacheCell<T>
where
T: ?Sized + Debug,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
<T as Debug>::fmt(self.as_inner(), f)
}
}
impl<T> From<T> for CacheCell<T> {
fn from(value: T) -> Self {
Self::new(value)
}
}
impl<T> Default for CacheCell<T>
where
T: Default,
{
fn default() -> Self {
Self::new(<T as Default>::default())
}
}
impl<T> CacheCell<T>
where
T: ?Sized,
{
/// Returns a shared reference to the inner value.
pub fn as_inner(&self) -> &T {
// SAFETY: This is safe since we are returning a shared reference back
// to the caller while this method itself accesses `self` as
// shared reference.
unsafe { &*self.inner.get() }
}
/// Returns an exclusive reference to the inner value.
pub fn as_inner_mut(&mut self) -> &mut T {
// SAFETY: This is safe since we are returning the exclusive reference
// of the inner value through the `get_mut` API which itself
// requires exclusive reference access to the wrapping `self`
// disallowing aliasing exclusive references.
unsafe { &mut *self.inner.get() }
}
/// Returns a mutable pointer to the inner value.
///
/// # Safety
///
/// This is unsafe since it allows to mutably borrow the inner value through
/// a shared reference. The same rules apply here as with [`UnsafeCell::get`].
pub fn get_ptr(&self) -> NonNull<T> {
// SAFETY: The inner `T` of the internal `UnsafeCell` exists and thus
// the pointer that we get returned to it via `UnsafeCell::get`
// is never going to be `null`.
unsafe { NonNull::new_unchecked(self.inner.get()) }
}
}
......@@ -13,6 +13,7 @@
// limitations under the License.
use super::{
CacheCell,
Entry,
EntryState,
};
......@@ -24,7 +25,6 @@ use crate::storage2::traits::{
SpreadLayout,
};
use core::{
cell::UnsafeCell,
fmt,
fmt::Debug,
mem,
......@@ -48,12 +48,12 @@ pub type Index = u32;
/// Utility trait for helping with lazy array construction.
pub trait LazyArrayLength<T>:
ArrayLength<UnsafeCell<Option<Entry<T>>>> + Unsigned
ArrayLength<CacheCell<Option<Entry<T>>>> + Unsigned
{
}
impl<T> LazyArrayLength<T> for UTerm {}
impl<T, N: ArrayLength<UnsafeCell<Option<Entry<T>>>>> LazyArrayLength<T> for UInt<N, B0> {}
impl<T, N: ArrayLength<UnsafeCell<Option<Entry<T>>>>> LazyArrayLength<T> for UInt<N, B1> {}
impl<T, N: ArrayLength<CacheCell<Option<Entry<T>>>>> LazyArrayLength<T> for UInt<N, B0> {}
impl<T, N: ArrayLength<CacheCell<Option<Entry<T>>>>> LazyArrayLength<T> for UInt<N, B1> {}
/// A lazy storage array that spans over N storage cells.
///
......@@ -171,12 +171,12 @@ where
N: LazyArrayLength<T>,
{
/// The cache entries of the entry array.
entries: GenericArray<UnsafeCell<Option<Entry<T>>>, N>,
entries: GenericArray<CacheCell<Option<Entry<T>>>, N>,
}
#[derive(Debug)]
pub struct EntriesIter<'a, T> {
iter: core::slice::Iter<'a, UnsafeCell<Option<Entry<T>>>>,
iter: core::slice::Iter<'a, CacheCell<Option<Entry<T>>>>,
}
impl<'a, T> EntriesIter<'a, T> {
......@@ -194,7 +194,7 @@ impl<'a, T> Iterator for EntriesIter<'a, T> {
type Item = &'a Option<Entry<T>>;
fn next(&mut self) -> Option<Self::Item> {
self.iter.next().map(|cell| unsafe { &*cell.get() })
self.iter.next().map(|cell| cell.as_inner())
}
fn size_hint(&self) -> (usize, Option<usize>) {
......@@ -211,7 +211,7 @@ impl<'a, T> Iterator for EntriesIter<'a, T> {
impl<'a, T> DoubleEndedIterator for EntriesIter<'a, T> {
fn next_back(&mut self) -> Option<Self::Item> {
self.iter.next_back().map(|cell| unsafe { &*cell.get() })
self.iter.next_back().map(|cell| cell.as_inner())
}
}
......@@ -252,7 +252,7 @@ where
/// returns the old value if any.
fn put(&self, at: Index, new_value: Option<T>) -> Option<T> {
mem::replace(
unsafe { &mut *self.entries.as_slice()[at as usize].get() },
unsafe { self.entries.as_slice()[at as usize].get_ptr().as_mut() },
Some(Entry::new(new_value, EntryState::Mutated)),
)
.map(Entry::into_value)
......@@ -262,7 +262,7 @@ where
/// Inserts a new entry into the cache and returns an exclusive reference to it.
unsafe fn insert_entry(&self, at: Index, new_entry: Entry<T>) -> NonNull<Entry<T>> {
let entry: &mut Option<Entry<T>> =
&mut *UnsafeCell::get(&self.entries[at as usize]);
&mut *CacheCell::get_ptr(&self.entries[at as usize]).as_ptr();
*entry = Some(new_entry);
entry
.as_mut()
......@@ -275,7 +275,7 @@ where
if at >= Self::capacity() {
return None
}
(&mut *UnsafeCell::get(&self.entries[at as usize])).as_mut()
(&mut *CacheCell::get_ptr(&self.entries[at as usize]).as_ptr()).as_mut()
}
/// Returns an iterator that yields shared references to all cached entries.
......
......@@ -13,6 +13,7 @@
// limitations under the License.
use super::{
CacheCell,
Entry,
EntryState,
};
......@@ -23,7 +24,6 @@ use crate::storage2::traits::{
SpreadLayout,
};
use core::{
cell::UnsafeCell,
fmt,
fmt::Debug,
ptr::NonNull,
......@@ -60,7 +60,7 @@ where
/// and the fact that ink! code is always run single-threaded.
/// Being efficient is important here because this is intended to be
/// a low-level primitive with lots of dependencies.
cache: UnsafeCell<Option<Entry<T>>>,
cache: CacheCell<Option<Entry<T>>>,
}
impl<T> Debug for LazyCell<T>
......@@ -70,7 +70,7 @@ where
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("LazyCell")
.field("key", &self.key)
.field("cache", unsafe { &*self.cache.get() })
.field("cache", self.cache.as_inner())
.finish()
}
}
......@@ -178,7 +178,7 @@ where
pub fn new(value: Option<T>) -> Self {
Self {
key: None,
cache: UnsafeCell::new(Some(Entry::new(value, EntryState::Mutated))),
cache: CacheCell::new(Some(Entry::new(value, EntryState::Mutated))),
}
}
......@@ -192,7 +192,7 @@ where
pub fn lazy(key: Key) -> Self {
Self {
key: Some(key),
cache: UnsafeCell::new(None),
cache: CacheCell::new(None),
}
}
......@@ -208,7 +208,7 @@ where
/// Returns the cached entry.
fn entry(&self) -> Option<&Entry<T>> {
unsafe { &*self.cache.get() }.as_ref()
self.cache.as_inner().as_ref()
}
}
......@@ -225,7 +225,7 @@ where
// However, we mutate the entry only if it is vacant.
// If the entry is occupied by a value we return early.
// This way we do not invalidate pointers to this value.
let cache = &mut *self.cache.get();
let cache = &mut *self.cache.get_ptr().as_ptr();
if cache.is_none() {
// Load value from storage and then return the cached entry.
let value = self
......
......@@ -13,6 +13,7 @@
// limitations under the License.
use super::{
CacheCell,
Entry,
EntryState,
};
......@@ -31,10 +32,7 @@ use crate::{
};
use core::{
borrow::Borrow,
cell::{
RefCell,
UnsafeCell,
},
cell::RefCell,
cmp::{
Eq,
Ord,
......@@ -82,12 +80,12 @@ pub struct LazyHashMap<K, V, H> {
///
/// This normally only represents a subset of the total set of elements.
/// An entry is cached as soon as it is loaded or written.
cached_entries: UnsafeCell<EntryMap<K, V>>,
cached_entries: CacheCell<EntryMap<K, V>>,
/// The used hash builder.
hash_builder: RefCell<HashBuilder<H, Vec<u8>>>,
}
struct DebugEntryMap<'a, K, V>(&'a UnsafeCell<EntryMap<K, V>>);
struct DebugEntryMap<'a, K, V>(&'a CacheCell<EntryMap<K, V>>);
impl<'a, K, V> Debug for DebugEntryMap<'a, K, V>
where
......@@ -95,9 +93,7 @@ where
V: Debug,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_map()
.entries(unsafe { &*self.0.get() }.iter())
.finish()
f.debug_map().entries(self.0.as_inner().iter()).finish()
}
}
......@@ -211,7 +207,7 @@ where
pub fn new() -> Self {
Self {
key: None,
cached_entries: UnsafeCell::new(EntryMap::new()),
cached_entries: CacheCell::new(EntryMap::new()),
hash_builder: RefCell::new(HashBuilder::from(Vec::new())),
}
}
......@@ -227,7 +223,7 @@ where
fn lazy(key: Key) -> Self {
Self {
key: Some(key),
cached_entries: UnsafeCell::new(EntryMap::new()),
cached_entries: CacheCell::new(EntryMap::new()),
hash_builder: RefCell::new(HashBuilder::from(Vec::new())),
}
}
......@@ -239,14 +235,12 @@ where
/// Returns a shared reference to the underlying entries.
fn entries(&self) -> &EntryMap<K, V> {
// SAFETY: It is safe to return a `&` reference from a `&self` receiver.
unsafe { &*self.cached_entries.get() }
self.cached_entries.as_inner()
}
/// Returns an exclusive reference to the underlying entries.
fn entries_mut(&mut self) -> &mut EntryMap<K, V> {
// SAFETY: It is safe to return a `&mut` reference from a `&mut self` receiver.
unsafe { &mut *self.cached_entries.get() }
self.cached_entries.as_inner_mut()
}
/// Puts the new value under the given key.
......@@ -350,7 +344,7 @@ where
// By returning a raw pointer we enforce an `unsafe` block at
// the caller site to underline that guarantees are given by the
// caller.
let cached_entries = &mut *self.cached_entries.get();
let cached_entries = &mut *self.cached_entries.get_ptr().as_ptr();
use ink_prelude::collections::btree_map::Entry as BTreeMapEntry;
// We have to clone the key here because we do not have access to the unsafe
// raw entry API for Rust hash maps, yet since it is unstable. We can remove
......
......@@ -13,6 +13,7 @@
// limitations under the License.
use super::{
CacheCell,
Entry,
EntryState,
};
......@@ -24,7 +25,6 @@ use crate::storage2::traits::{
SpreadLayout,
};
use core::{
cell::UnsafeCell,
fmt,
fmt::Debug,
ptr::NonNull,
......@@ -59,19 +59,17 @@ pub struct LazyIndexMap<V> {
/// The subset of currently cached entries of the lazy storage chunk.
///
/// An entry is cached as soon as it is loaded or written.
cached_entries: UnsafeCell<EntryMap<V>>,
cached_entries: CacheCell<EntryMap<V>>,
}
struct DebugEntryMap<'a, V>(&'a UnsafeCell<EntryMap<V>>);
struct DebugEntryMap<'a, V>(&'a CacheCell<EntryMap<V>>);
impl<'a, V> Debug for DebugEntryMap<'a, V>
where
V: Debug,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_map()
.entries(unsafe { &*self.0.get() }.iter())
.finish()
f.debug_map().entries(self.0.as_inner().iter()).finish()
}
}
......@@ -146,7 +144,7 @@ impl<V> LazyIndexMap<V> {
pub fn new() -> Self {
Self {
key: None,
cached_entries: UnsafeCell::new(EntryMap::new()),
cached_entries: CacheCell::new(EntryMap::new()),
}
}
......@@ -161,7 +159,7 @@ impl<V> LazyIndexMap<V> {
fn lazy(key: Key) -> Self {
Self {
key: Some(key),
cached_entries: UnsafeCell::new(EntryMap::new()),
cached_entries: CacheCell::new(EntryMap::new()),
}
}
......@@ -172,14 +170,12 @@ impl<V> LazyIndexMap<V> {
/// Returns a shared reference to the underlying entries.
fn entries(&self) -> &EntryMap<V> {
// SAFETY: It is safe to return a `&` reference from a `&self` receiver.
unsafe { &*self.cached_entries.get() }
self.cached_entries.as_inner()
}
/// Returns an exclusive reference to the underlying entries.
fn entries_mut(&mut self) -> &mut EntryMap<V> {
// SAFETY: It is safe to return a `&mut` reference from a `&mut self` receiver.
unsafe { &mut *self.cached_entries.get() }
self.cached_entries.as_inner_mut()
}
/// Puts the new value at the given index.
......@@ -297,7 +293,7 @@ where
// By returning a raw pointer we enforce an `unsafe` block at
// the caller site to underline that guarantees are given by the
// caller.
let cached_entries = &mut *self.cached_entries.get();
let cached_entries = &mut *self.cached_entries.get_ptr().as_ptr();
use ink_prelude::collections::btree_map::Entry as BTreeMapEntry;
match cached_entries.entry(index) {
BTreeMapEntry::Occupied(occupied) => {
......
......@@ -24,15 +24,19 @@
//! These low-level collections are not aware of the elements they manage thus
//! extra care has to be taken when operating directly on them.
mod cache_cell;
mod entry;
mod lazy_array;
mod lazy_cell;
mod lazy_hmap;
mod lazy_imap;
use self::entry::{
Entry,
EntryState,
use self::{
cache_cell::CacheCell,
entry::{
Entry,
EntryState,
},
};
pub use self::{
lazy_array::{
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment