diff --git a/Cargo.lock b/Cargo.lock index 63a3f5dd03773..87b2b26a90c5d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1491,7 +1491,6 @@ version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" dependencies = [ - "allocator-api2", "foldhash", "serde", ] @@ -3502,7 +3501,6 @@ dependencies = [ "either", "elsa", "ena", - "hashbrown 0.15.2", "indexmap", "jobserver", "libc", diff --git a/compiler/rustc_data_structures/Cargo.toml b/compiler/rustc_data_structures/Cargo.toml index fcaf2750507dd..df3bee6ee9cc8 100644 --- a/compiler/rustc_data_structures/Cargo.toml +++ b/compiler/rustc_data_structures/Cargo.toml @@ -29,11 +29,6 @@ thin-vec = "0.2.12" tracing = "0.1" # tidy-alphabetical-end -[dependencies.hashbrown] -version = "0.15.2" -default-features = false -features = ["nightly"] # for may_dangle - [dependencies.parking_lot] version = "0.12" diff --git a/compiler/rustc_data_structures/src/lib.rs b/compiler/rustc_data_structures/src/lib.rs index 865424fd6bbdc..a3b62b469196a 100644 --- a/compiler/rustc_data_structures/src/lib.rs +++ b/compiler/rustc_data_structures/src/lib.rs @@ -24,6 +24,7 @@ #![feature(dropck_eyepatch)] #![feature(extend_one)] #![feature(file_buffered)] +#![feature(hash_raw_entry)] #![feature(macro_metavar_expr)] #![feature(map_try_insert)] #![feature(min_specialization)] diff --git a/compiler/rustc_data_structures/src/marker.rs b/compiler/rustc_data_structures/src/marker.rs index 64c64bfa3c296..4cacc269709a9 100644 --- a/compiler/rustc_data_structures/src/marker.rs +++ b/compiler/rustc_data_structures/src/marker.rs @@ -76,7 +76,6 @@ impl_dyn_send!( [crate::sync::RwLock where T: DynSend] [crate::tagged_ptr::TaggedRef<'a, P, T> where 'a, P: Sync, T: Send + crate::tagged_ptr::Tag] [rustc_arena::TypedArena where T: DynSend] - [hashbrown::HashTable where T: DynSend] [indexmap::IndexSet where V: DynSend, S: DynSend] [indexmap::IndexMap where K: DynSend, V: DynSend, S: DynSend] [thin_vec::ThinVec where T: DynSend] @@ -154,7 +153,6 @@ impl_dyn_sync!( [crate::tagged_ptr::TaggedRef<'a, P, T> where 'a, P: Sync, T: Sync + crate::tagged_ptr::Tag] [parking_lot::lock_api::Mutex where R: DynSync, T: ?Sized + DynSend] [parking_lot::lock_api::RwLock where R: DynSync, T: ?Sized + DynSend + DynSync] - [hashbrown::HashTable where T: DynSync] [indexmap::IndexSet where V: DynSync, S: DynSync] [indexmap::IndexMap where K: DynSync, V: DynSync, S: DynSync] [smallvec::SmallVec where A: smallvec::Array + DynSync] diff --git a/compiler/rustc_data_structures/src/sharded.rs b/compiler/rustc_data_structures/src/sharded.rs index 49cafcb17a03e..3016348f22469 100644 --- a/compiler/rustc_data_structures/src/sharded.rs +++ b/compiler/rustc_data_structures/src/sharded.rs @@ -1,11 +1,11 @@ use std::borrow::Borrow; +use std::collections::hash_map::RawEntryMut; use std::hash::{Hash, Hasher}; -use std::{iter, mem}; +use std::iter; use either::Either; -use hashbrown::hash_table::{Entry, HashTable}; -use crate::fx::FxHasher; +use crate::fx::{FxHashMap, FxHasher}; use crate::sync::{CacheAligned, Lock, LockGuard, Mode, is_dyn_thread_safe}; // 32 shards is sufficient to reduce contention on an 8-core Ryzen 7 1700, @@ -140,67 +140,17 @@ pub fn shards() -> usize { 1 } -pub type ShardedHashMap = Sharded>; +pub type ShardedHashMap = Sharded>; impl ShardedHashMap { pub fn with_capacity(cap: usize) -> Self { - Self::new(|| HashTable::with_capacity(cap)) + Self::new(|| FxHashMap::with_capacity_and_hasher(cap, rustc_hash::FxBuildHasher::default())) } pub fn len(&self) -> usize { self.lock_shards().map(|shard| shard.len()).sum() } } -impl ShardedHashMap { - #[inline] - pub fn get(&self, key: &Q) -> Option - where - K: Borrow, - Q: Hash + Eq, - V: Clone, - { - let hash = make_hash(key); - let shard = self.lock_shard_by_hash(hash); - let (_, value) = shard.find(hash, |(k, _)| k.borrow() == key)?; - Some(value.clone()) - } - - #[inline] - pub fn get_or_insert_with(&self, key: K, default: impl FnOnce() -> V) -> V - where - V: Copy, - { - let hash = make_hash(&key); - let mut shard = self.lock_shard_by_hash(hash); - - match table_entry(&mut shard, hash, &key) { - Entry::Occupied(e) => e.get().1, - Entry::Vacant(e) => { - let value = default(); - e.insert((key, value)); - value - } - } - } - - #[inline] - pub fn insert(&self, key: K, value: V) -> Option { - let hash = make_hash(&key); - let mut shard = self.lock_shard_by_hash(hash); - - match table_entry(&mut shard, hash, &key) { - Entry::Occupied(e) => { - let previous = mem::replace(&mut e.into_mut().1, value); - Some(previous) - } - Entry::Vacant(e) => { - e.insert((key, value)); - None - } - } - } -} - impl ShardedHashMap { #[inline] pub fn intern_ref(&self, value: &Q, make: impl FnOnce() -> K) -> K @@ -210,12 +160,13 @@ impl ShardedHashMap { { let hash = make_hash(value); let mut shard = self.lock_shard_by_hash(hash); + let entry = shard.raw_entry_mut().from_key_hashed_nocheck(hash, value); - match table_entry(&mut shard, hash, value) { - Entry::Occupied(e) => e.get().0, - Entry::Vacant(e) => { + match entry { + RawEntryMut::Occupied(e) => *e.key(), + RawEntryMut::Vacant(e) => { let v = make(); - e.insert((v, ())); + e.insert_hashed_nocheck(hash, v, ()); v } } @@ -229,12 +180,13 @@ impl ShardedHashMap { { let hash = make_hash(&value); let mut shard = self.lock_shard_by_hash(hash); + let entry = shard.raw_entry_mut().from_key_hashed_nocheck(hash, &value); - match table_entry(&mut shard, hash, &value) { - Entry::Occupied(e) => e.get().0, - Entry::Vacant(e) => { + match entry { + RawEntryMut::Occupied(e) => *e.key(), + RawEntryMut::Vacant(e) => { let v = make(value); - e.insert((v, ())); + e.insert_hashed_nocheck(hash, v, ()); v } } @@ -251,30 +203,17 @@ impl ShardedHashMap { let hash = make_hash(&value); let shard = self.lock_shard_by_hash(hash); let value = value.into_pointer(); - shard.find(hash, |(k, ())| k.into_pointer() == value).is_some() + shard.raw_entry().from_hash(hash, |entry| entry.into_pointer() == value).is_some() } } #[inline] -fn make_hash(val: &K) -> u64 { +pub fn make_hash(val: &K) -> u64 { let mut state = FxHasher::default(); val.hash(&mut state); state.finish() } -#[inline] -fn table_entry<'a, K, V, Q>( - table: &'a mut HashTable<(K, V)>, - hash: u64, - key: &Q, -) -> Entry<'a, (K, V)> -where - K: Hash + Borrow, - Q: ?Sized + Eq, -{ - table.entry(hash, move |(k, _)| k.borrow() == key, |(k, _)| make_hash(k)) -} - /// Get a shard with a pre-computed hash value. If `get_shard_by_value` is /// ever used in combination with `get_shard_by_hash` on a single `Sharded` /// instance, then `hash` must be computed with `FxHasher`. Otherwise, diff --git a/compiler/rustc_middle/src/mir/interpret/mod.rs b/compiler/rustc_middle/src/mir/interpret/mod.rs index c2438af6a1e19..6b1d04addf343 100644 --- a/compiler/rustc_middle/src/mir/interpret/mod.rs +++ b/compiler/rustc_middle/src/mir/interpret/mod.rs @@ -452,7 +452,12 @@ impl<'tcx> TyCtxt<'tcx> { } let id = self.alloc_map.reserve(); debug!("creating alloc {:?} with id {id:?}", alloc_salt.0); - let had_previous = self.alloc_map.to_alloc.insert(id, alloc_salt.0.clone()).is_some(); + let had_previous = self + .alloc_map + .to_alloc + .lock_shard_by_value(&id) + .insert(id, alloc_salt.0.clone()) + .is_some(); // We just reserved, so should always be unique. assert!(!had_previous); dedup.insert(alloc_salt, id); @@ -505,7 +510,7 @@ impl<'tcx> TyCtxt<'tcx> { /// local dangling pointers and allocations in constants/statics. #[inline] pub fn try_get_global_alloc(self, id: AllocId) -> Option> { - self.alloc_map.to_alloc.get(&id) + self.alloc_map.to_alloc.lock_shard_by_value(&id).get(&id).cloned() } #[inline] @@ -524,7 +529,9 @@ impl<'tcx> TyCtxt<'tcx> { /// Freezes an `AllocId` created with `reserve` by pointing it at an `Allocation`. Trying to /// call this function twice, even with the same `Allocation` will ICE the compiler. pub fn set_alloc_id_memory(self, id: AllocId, mem: ConstAllocation<'tcx>) { - if let Some(old) = self.alloc_map.to_alloc.insert(id, GlobalAlloc::Memory(mem)) { + if let Some(old) = + self.alloc_map.to_alloc.lock_shard_by_value(&id).insert(id, GlobalAlloc::Memory(mem)) + { bug!("tried to set allocation ID {id:?}, but it was already existing as {old:#?}"); } } @@ -532,8 +539,11 @@ impl<'tcx> TyCtxt<'tcx> { /// Freezes an `AllocId` created with `reserve` by pointing it at a static item. Trying to /// call this function twice, even with the same `DefId` will ICE the compiler. pub fn set_nested_alloc_id_static(self, id: AllocId, def_id: LocalDefId) { - if let Some(old) = - self.alloc_map.to_alloc.insert(id, GlobalAlloc::Static(def_id.to_def_id())) + if let Some(old) = self + .alloc_map + .to_alloc + .lock_shard_by_value(&id) + .insert(id, GlobalAlloc::Static(def_id.to_def_id())) { bug!("tried to set allocation ID {id:?}, but it was already existing as {old:#?}"); } diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs index f54dd2b0040ae..335c74862545a 100644 --- a/compiler/rustc_middle/src/ty/context.rs +++ b/compiler/rustc_middle/src/ty/context.rs @@ -2335,8 +2335,8 @@ macro_rules! sty_debug_print { $(let mut $variant = total;)* for shard in tcx.interners.type_.lock_shards() { - let types = shard.iter(); - for &(InternedInSet(t), ()) in types { + let types = shard.keys(); + for &InternedInSet(t) in types { let variant = match t.internee { ty::Bool | ty::Char | ty::Int(..) | ty::Uint(..) | ty::Float(..) | ty::Str | ty::Never => continue, diff --git a/compiler/rustc_query_system/src/dep_graph/graph.rs b/compiler/rustc_query_system/src/dep_graph/graph.rs index 3109d53cd2caa..6ece01c211bac 100644 --- a/compiler/rustc_query_system/src/dep_graph/graph.rs +++ b/compiler/rustc_query_system/src/dep_graph/graph.rs @@ -1,4 +1,5 @@ use std::assert_matches::assert_matches; +use std::collections::hash_map::Entry; use std::fmt::Debug; use std::hash::Hash; use std::marker::PhantomData; @@ -8,7 +9,7 @@ use std::sync::atomic::{AtomicU32, Ordering}; use rustc_data_structures::fingerprint::Fingerprint; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_data_structures::profiling::{QueryInvocationId, SelfProfilerRef}; -use rustc_data_structures::sharded::{self, ShardedHashMap}; +use rustc_data_structures::sharded::{self, Sharded}; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_data_structures::sync::{AtomicU64, Lock}; use rustc_data_structures::unord::UnordMap; @@ -618,7 +619,7 @@ impl DepGraphData { if let Some(prev_index) = self.previous.node_to_index_opt(dep_node) { self.current.prev_index_to_index.lock()[prev_index] } else { - self.current.new_node_to_index.get(dep_node) + self.current.new_node_to_index.lock_shard_by_value(dep_node).get(dep_node).copied() } } @@ -1047,7 +1048,7 @@ rustc_index::newtype_index! { /// first, and `data` second. pub(super) struct CurrentDepGraph { encoder: GraphEncoder, - new_node_to_index: ShardedHashMap, + new_node_to_index: Sharded>, prev_index_to_index: Lock>>, /// This is used to verify that fingerprints do not change between the creation of a node @@ -1116,9 +1117,12 @@ impl CurrentDepGraph { profiler, previous, ), - new_node_to_index: ShardedHashMap::with_capacity( - new_node_count_estimate / sharded::shards(), - ), + new_node_to_index: Sharded::new(|| { + FxHashMap::with_capacity_and_hasher( + new_node_count_estimate / sharded::shards(), + Default::default(), + ) + }), prev_index_to_index: Lock::new(IndexVec::from_elem_n(None, prev_graph_node_count)), anon_id_seed, #[cfg(debug_assertions)] @@ -1148,9 +1152,14 @@ impl CurrentDepGraph { edges: EdgesVec, current_fingerprint: Fingerprint, ) -> DepNodeIndex { - let dep_node_index = self - .new_node_to_index - .get_or_insert_with(key, || self.encoder.send(key, current_fingerprint, edges)); + let dep_node_index = match self.new_node_to_index.lock_shard_by_value(&key).entry(key) { + Entry::Occupied(entry) => *entry.get(), + Entry::Vacant(entry) => { + let dep_node_index = self.encoder.send(key, current_fingerprint, edges); + entry.insert(dep_node_index); + dep_node_index + } + }; #[cfg(debug_assertions)] self.record_edge(dep_node_index, key, current_fingerprint); @@ -1248,7 +1257,7 @@ impl CurrentDepGraph { ) { let node = &prev_graph.index_to_node(prev_index); debug_assert!( - !self.new_node_to_index.get(node).is_some(), + !self.new_node_to_index.lock_shard_by_value(node).contains_key(node), "node from previous graph present in new node collection" ); } @@ -1373,7 +1382,7 @@ fn panic_on_forbidden_read(data: &DepGraphData, dep_node_index: DepN if dep_node.is_none() { // Try to find it among the new nodes for shard in data.current.new_node_to_index.lock_shards() { - if let Some((node, _)) = shard.iter().find(|(_, index)| *index == dep_node_index) { + if let Some((node, _)) = shard.iter().find(|(_, index)| **index == dep_node_index) { dep_node = Some(*node); break; } diff --git a/compiler/rustc_query_system/src/lib.rs b/compiler/rustc_query_system/src/lib.rs index 2aedd365adc89..a546362414cf4 100644 --- a/compiler/rustc_query_system/src/lib.rs +++ b/compiler/rustc_query_system/src/lib.rs @@ -3,6 +3,7 @@ #![feature(assert_matches)] #![feature(core_intrinsics)] #![feature(dropck_eyepatch)] +#![feature(hash_raw_entry)] #![feature(let_chains)] #![feature(min_specialization)] // tidy-alphabetical-end diff --git a/compiler/rustc_query_system/src/query/caches.rs b/compiler/rustc_query_system/src/query/caches.rs index 30b5d7e595491..3b47e7eba0ff8 100644 --- a/compiler/rustc_query_system/src/query/caches.rs +++ b/compiler/rustc_query_system/src/query/caches.rs @@ -2,7 +2,8 @@ use std::fmt::Debug; use std::hash::Hash; use std::sync::OnceLock; -use rustc_data_structures::sharded::ShardedHashMap; +use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::sharded::{self, Sharded}; pub use rustc_data_structures::vec_cache::VecCache; use rustc_hir::def_id::LOCAL_CRATE; use rustc_index::Idx; @@ -35,7 +36,7 @@ pub trait QueryCache: Sized { /// In-memory cache for queries whose keys aren't suitable for any of the /// more specialized kinds of cache. Backed by a sharded hashmap. pub struct DefaultCache { - cache: ShardedHashMap, + cache: Sharded>, } impl Default for DefaultCache { @@ -54,14 +55,19 @@ where #[inline(always)] fn lookup(&self, key: &K) -> Option<(V, DepNodeIndex)> { - self.cache.get(key) + let key_hash = sharded::make_hash(key); + let lock = self.cache.lock_shard_by_hash(key_hash); + let result = lock.raw_entry().from_key_hashed_nocheck(key_hash, key); + + if let Some((_, value)) = result { Some(*value) } else { None } } #[inline] fn complete(&self, key: K, value: V, index: DepNodeIndex) { + let mut lock = self.cache.lock_shard_by_value(&key); // We may be overwriting another value. This is all right, since the dep-graph // will check that the fingerprint matches. - self.cache.insert(key, (value, index)); + lock.insert(key, (value, index)); } fn iter(&self, f: &mut dyn FnMut(&Self::Key, &Self::Value, DepNodeIndex)) { diff --git a/library/std/src/collections/hash/map.rs b/library/std/src/collections/hash/map.rs index 2487f5a2a503f..eda6a8b6b0f3b 100644 --- a/library/std/src/collections/hash/map.rs +++ b/library/std/src/collections/hash/map.rs @@ -1290,6 +1290,69 @@ where } } +impl HashMap +where + S: BuildHasher, +{ + /// Creates a raw entry builder for the `HashMap`. + /// + /// Raw entries provide the lowest level of control for searching and + /// manipulating a map. They must be manually initialized with a hash and + /// then manually searched. After this, insertions into a vacant entry + /// still require an owned key to be provided. + /// + /// Raw entries are useful for such exotic situations as: + /// + /// * Hash memoization + /// * Deferring the creation of an owned key until it is known to be required + /// * Using a search key that doesn't work with the Borrow trait + /// * Using custom comparison logic without newtype wrappers + /// + /// Because raw entries provide much more low-level control, it's much easier + /// to put the `HashMap` into an inconsistent state which, while memory-safe, + /// will cause the map to produce seemingly random results. Higher-level and + /// more foolproof APIs like `entry` should be preferred when possible. + /// + /// In particular, the hash used to initialize the raw entry must still be + /// consistent with the hash of the key that is ultimately stored in the entry. + /// This is because implementations of `HashMap` may need to recompute hashes + /// when resizing, at which point only the keys are available. + /// + /// Raw entries give mutable access to the keys. This must not be used + /// to modify how the key would compare or hash, as the map will not re-evaluate + /// where the key should go, meaning the keys may become "lost" if their + /// location does not reflect their state. For instance, if you change a key + /// so that the map now contains keys which compare equal, search may start + /// acting erratically, with two keys randomly masking each other. Implementations + /// are free to assume this doesn't happen (within the limits of memory-safety). + #[inline] + #[unstable(feature = "hash_raw_entry", issue = "56167")] + pub fn raw_entry_mut(&mut self) -> RawEntryBuilderMut<'_, K, V, S> { + RawEntryBuilderMut { map: self } + } + + /// Creates a raw immutable entry builder for the `HashMap`. + /// + /// Raw entries provide the lowest level of control for searching and + /// manipulating a map. They must be manually initialized with a hash and + /// then manually searched. + /// + /// This is useful for + /// * Hash memoization + /// * Using a search key that doesn't work with the Borrow trait + /// * Using custom comparison logic without newtype wrappers + /// + /// Unless you are in such a situation, higher-level and more foolproof APIs like + /// `get` should be preferred. + /// + /// Immutable raw entries have very limited use; you might instead want `raw_entry_mut`. + #[inline] + #[unstable(feature = "hash_raw_entry", issue = "56167")] + pub fn raw_entry(&self) -> RawEntryBuilder<'_, K, V, S> { + RawEntryBuilder { map: self } + } +} + #[stable(feature = "rust1", since = "1.0.0")] impl Clone for HashMap where @@ -1777,6 +1840,404 @@ impl Default for IntoValues { } } +/// A builder for computing where in a HashMap a key-value pair would be stored. +/// +/// See the [`HashMap::raw_entry_mut`] docs for usage examples. +#[unstable(feature = "hash_raw_entry", issue = "56167")] +pub struct RawEntryBuilderMut<'a, K: 'a, V: 'a, S: 'a> { + map: &'a mut HashMap, +} + +/// A view into a single entry in a map, which may either be vacant or occupied. +/// +/// This is a lower-level version of [`Entry`]. +/// +/// This `enum` is constructed through the [`raw_entry_mut`] method on [`HashMap`], +/// then calling one of the methods of that [`RawEntryBuilderMut`]. +/// +/// [`raw_entry_mut`]: HashMap::raw_entry_mut +#[unstable(feature = "hash_raw_entry", issue = "56167")] +pub enum RawEntryMut<'a, K: 'a, V: 'a, S: 'a> { + /// An occupied entry. + Occupied(RawOccupiedEntryMut<'a, K, V, S>), + /// A vacant entry. + Vacant(RawVacantEntryMut<'a, K, V, S>), +} + +/// A view into an occupied entry in a `HashMap`. +/// It is part of the [`RawEntryMut`] enum. +#[unstable(feature = "hash_raw_entry", issue = "56167")] +pub struct RawOccupiedEntryMut<'a, K: 'a, V: 'a, S: 'a> { + base: base::RawOccupiedEntryMut<'a, K, V, S>, +} + +/// A view into a vacant entry in a `HashMap`. +/// It is part of the [`RawEntryMut`] enum. +#[unstable(feature = "hash_raw_entry", issue = "56167")] +pub struct RawVacantEntryMut<'a, K: 'a, V: 'a, S: 'a> { + base: base::RawVacantEntryMut<'a, K, V, S>, +} + +/// A builder for computing where in a HashMap a key-value pair would be stored. +/// +/// See the [`HashMap::raw_entry`] docs for usage examples. +#[unstable(feature = "hash_raw_entry", issue = "56167")] +pub struct RawEntryBuilder<'a, K: 'a, V: 'a, S: 'a> { + map: &'a HashMap, +} + +impl<'a, K, V, S> RawEntryBuilderMut<'a, K, V, S> +where + S: BuildHasher, +{ + /// Creates a `RawEntryMut` from the given key. + #[inline] + #[unstable(feature = "hash_raw_entry", issue = "56167")] + pub fn from_key(self, k: &Q) -> RawEntryMut<'a, K, V, S> + where + K: Borrow, + Q: Hash + Eq, + { + map_raw_entry(self.map.base.raw_entry_mut().from_key(k)) + } + + /// Creates a `RawEntryMut` from the given key and its hash. + #[inline] + #[unstable(feature = "hash_raw_entry", issue = "56167")] + pub fn from_key_hashed_nocheck(self, hash: u64, k: &Q) -> RawEntryMut<'a, K, V, S> + where + K: Borrow, + Q: Eq, + { + map_raw_entry(self.map.base.raw_entry_mut().from_key_hashed_nocheck(hash, k)) + } + + /// Creates a `RawEntryMut` from the given hash. + #[inline] + #[unstable(feature = "hash_raw_entry", issue = "56167")] + pub fn from_hash(self, hash: u64, is_match: F) -> RawEntryMut<'a, K, V, S> + where + for<'b> F: FnMut(&'b K) -> bool, + { + map_raw_entry(self.map.base.raw_entry_mut().from_hash(hash, is_match)) + } +} + +impl<'a, K, V, S> RawEntryBuilder<'a, K, V, S> +where + S: BuildHasher, +{ + /// Access an entry by key. + #[inline] + #[unstable(feature = "hash_raw_entry", issue = "56167")] + pub fn from_key(self, k: &Q) -> Option<(&'a K, &'a V)> + where + K: Borrow, + Q: Hash + Eq, + { + self.map.base.raw_entry().from_key(k) + } + + /// Access an entry by a key and its hash. + #[inline] + #[unstable(feature = "hash_raw_entry", issue = "56167")] + pub fn from_key_hashed_nocheck(self, hash: u64, k: &Q) -> Option<(&'a K, &'a V)> + where + K: Borrow, + Q: Hash + Eq, + { + self.map.base.raw_entry().from_key_hashed_nocheck(hash, k) + } + + /// Access an entry by hash. + #[inline] + #[unstable(feature = "hash_raw_entry", issue = "56167")] + pub fn from_hash(self, hash: u64, is_match: F) -> Option<(&'a K, &'a V)> + where + F: FnMut(&K) -> bool, + { + self.map.base.raw_entry().from_hash(hash, is_match) + } +} + +impl<'a, K, V, S> RawEntryMut<'a, K, V, S> { + /// Ensures a value is in the entry by inserting the default if empty, and returns + /// mutable references to the key and value in the entry. + /// + /// # Examples + /// + /// ``` + /// #![feature(hash_raw_entry)] + /// use std::collections::HashMap; + /// + /// let mut map: HashMap<&str, u32> = HashMap::new(); + /// + /// map.raw_entry_mut().from_key("poneyland").or_insert("poneyland", 3); + /// assert_eq!(map["poneyland"], 3); + /// + /// *map.raw_entry_mut().from_key("poneyland").or_insert("poneyland", 10).1 *= 2; + /// assert_eq!(map["poneyland"], 6); + /// ``` + #[inline] + #[unstable(feature = "hash_raw_entry", issue = "56167")] + pub fn or_insert(self, default_key: K, default_val: V) -> (&'a mut K, &'a mut V) + where + K: Hash, + S: BuildHasher, + { + match self { + RawEntryMut::Occupied(entry) => entry.into_key_value(), + RawEntryMut::Vacant(entry) => entry.insert(default_key, default_val), + } + } + + /// Ensures a value is in the entry by inserting the result of the default function if empty, + /// and returns mutable references to the key and value in the entry. + /// + /// # Examples + /// + /// ``` + /// #![feature(hash_raw_entry)] + /// use std::collections::HashMap; + /// + /// let mut map: HashMap<&str, String> = HashMap::new(); + /// + /// map.raw_entry_mut().from_key("poneyland").or_insert_with(|| { + /// ("poneyland", "hoho".to_string()) + /// }); + /// + /// assert_eq!(map["poneyland"], "hoho".to_string()); + /// ``` + #[inline] + #[unstable(feature = "hash_raw_entry", issue = "56167")] + pub fn or_insert_with(self, default: F) -> (&'a mut K, &'a mut V) + where + F: FnOnce() -> (K, V), + K: Hash, + S: BuildHasher, + { + match self { + RawEntryMut::Occupied(entry) => entry.into_key_value(), + RawEntryMut::Vacant(entry) => { + let (k, v) = default(); + entry.insert(k, v) + } + } + } + + /// Provides in-place mutable access to an occupied entry before any + /// potential inserts into the map. + /// + /// # Examples + /// + /// ``` + /// #![feature(hash_raw_entry)] + /// use std::collections::HashMap; + /// + /// let mut map: HashMap<&str, u32> = HashMap::new(); + /// + /// map.raw_entry_mut() + /// .from_key("poneyland") + /// .and_modify(|_k, v| { *v += 1 }) + /// .or_insert("poneyland", 42); + /// assert_eq!(map["poneyland"], 42); + /// + /// map.raw_entry_mut() + /// .from_key("poneyland") + /// .and_modify(|_k, v| { *v += 1 }) + /// .or_insert("poneyland", 0); + /// assert_eq!(map["poneyland"], 43); + /// ``` + #[inline] + #[unstable(feature = "hash_raw_entry", issue = "56167")] + pub fn and_modify(self, f: F) -> Self + where + F: FnOnce(&mut K, &mut V), + { + match self { + RawEntryMut::Occupied(mut entry) => { + { + let (k, v) = entry.get_key_value_mut(); + f(k, v); + } + RawEntryMut::Occupied(entry) + } + RawEntryMut::Vacant(entry) => RawEntryMut::Vacant(entry), + } + } +} + +impl<'a, K, V, S> RawOccupiedEntryMut<'a, K, V, S> { + /// Gets a reference to the key in the entry. + #[inline] + #[must_use] + #[unstable(feature = "hash_raw_entry", issue = "56167")] + pub fn key(&self) -> &K { + self.base.key() + } + + /// Gets a mutable reference to the key in the entry. + #[inline] + #[must_use] + #[unstable(feature = "hash_raw_entry", issue = "56167")] + pub fn key_mut(&mut self) -> &mut K { + self.base.key_mut() + } + + /// Converts the entry into a mutable reference to the key in the entry + /// with a lifetime bound to the map itself. + #[inline] + #[must_use = "`self` will be dropped if the result is not used"] + #[unstable(feature = "hash_raw_entry", issue = "56167")] + pub fn into_key(self) -> &'a mut K { + self.base.into_key() + } + + /// Gets a reference to the value in the entry. + #[inline] + #[must_use] + #[unstable(feature = "hash_raw_entry", issue = "56167")] + pub fn get(&self) -> &V { + self.base.get() + } + + /// Converts the `OccupiedEntry` into a mutable reference to the value in the entry + /// with a lifetime bound to the map itself. + #[inline] + #[must_use = "`self` will be dropped if the result is not used"] + #[unstable(feature = "hash_raw_entry", issue = "56167")] + pub fn into_mut(self) -> &'a mut V { + self.base.into_mut() + } + + /// Gets a mutable reference to the value in the entry. + #[inline] + #[must_use] + #[unstable(feature = "hash_raw_entry", issue = "56167")] + pub fn get_mut(&mut self) -> &mut V { + self.base.get_mut() + } + + /// Gets a reference to the key and value in the entry. + #[inline] + #[must_use] + #[unstable(feature = "hash_raw_entry", issue = "56167")] + pub fn get_key_value(&mut self) -> (&K, &V) { + self.base.get_key_value() + } + + /// Gets a mutable reference to the key and value in the entry. + #[inline] + #[unstable(feature = "hash_raw_entry", issue = "56167")] + pub fn get_key_value_mut(&mut self) -> (&mut K, &mut V) { + self.base.get_key_value_mut() + } + + /// Converts the `OccupiedEntry` into a mutable reference to the key and value in the entry + /// with a lifetime bound to the map itself. + #[inline] + #[must_use = "`self` will be dropped if the result is not used"] + #[unstable(feature = "hash_raw_entry", issue = "56167")] + pub fn into_key_value(self) -> (&'a mut K, &'a mut V) { + self.base.into_key_value() + } + + /// Sets the value of the entry, and returns the entry's old value. + #[inline] + #[unstable(feature = "hash_raw_entry", issue = "56167")] + pub fn insert(&mut self, value: V) -> V { + self.base.insert(value) + } + + /// Sets the value of the entry, and returns the entry's old value. + #[inline] + #[unstable(feature = "hash_raw_entry", issue = "56167")] + pub fn insert_key(&mut self, key: K) -> K { + self.base.insert_key(key) + } + + /// Takes the value out of the entry, and returns it. + #[inline] + #[unstable(feature = "hash_raw_entry", issue = "56167")] + pub fn remove(self) -> V { + self.base.remove() + } + + /// Take the ownership of the key and value from the map. + #[inline] + #[unstable(feature = "hash_raw_entry", issue = "56167")] + pub fn remove_entry(self) -> (K, V) { + self.base.remove_entry() + } +} + +impl<'a, K, V, S> RawVacantEntryMut<'a, K, V, S> { + /// Sets the value of the entry with the `VacantEntry`'s key, + /// and returns a mutable reference to it. + #[inline] + #[unstable(feature = "hash_raw_entry", issue = "56167")] + pub fn insert(self, key: K, value: V) -> (&'a mut K, &'a mut V) + where + K: Hash, + S: BuildHasher, + { + self.base.insert(key, value) + } + + /// Sets the value of the entry with the VacantEntry's key, + /// and returns a mutable reference to it. + #[inline] + #[unstable(feature = "hash_raw_entry", issue = "56167")] + pub fn insert_hashed_nocheck(self, hash: u64, key: K, value: V) -> (&'a mut K, &'a mut V) + where + K: Hash, + S: BuildHasher, + { + self.base.insert_hashed_nocheck(hash, key, value) + } +} + +#[unstable(feature = "hash_raw_entry", issue = "56167")] +impl Debug for RawEntryBuilderMut<'_, K, V, S> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("RawEntryBuilder").finish_non_exhaustive() + } +} + +#[unstable(feature = "hash_raw_entry", issue = "56167")] +impl Debug for RawEntryMut<'_, K, V, S> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + RawEntryMut::Vacant(ref v) => f.debug_tuple("RawEntry").field(v).finish(), + RawEntryMut::Occupied(ref o) => f.debug_tuple("RawEntry").field(o).finish(), + } + } +} + +#[unstable(feature = "hash_raw_entry", issue = "56167")] +impl Debug for RawOccupiedEntryMut<'_, K, V, S> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("RawOccupiedEntryMut") + .field("key", self.key()) + .field("value", self.get()) + .finish_non_exhaustive() + } +} + +#[unstable(feature = "hash_raw_entry", issue = "56167")] +impl Debug for RawVacantEntryMut<'_, K, V, S> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("RawVacantEntryMut").finish_non_exhaustive() + } +} + +#[unstable(feature = "hash_raw_entry", issue = "56167")] +impl Debug for RawEntryBuilder<'_, K, V, S> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("RawEntryBuilder").finish_non_exhaustive() + } +} + /// A view into a single entry in a map, which may either be vacant or occupied. /// /// This `enum` is constructed from the [`entry`] method on [`HashMap`]. @@ -2849,6 +3310,16 @@ pub(super) fn map_try_reserve_error(err: hashbrown::TryReserveError) -> TryReser } } +#[inline] +fn map_raw_entry<'a, K: 'a, V: 'a, S: 'a>( + raw: base::RawEntryMut<'a, K, V, S>, +) -> RawEntryMut<'a, K, V, S> { + match raw { + base::RawEntryMut::Occupied(base) => RawEntryMut::Occupied(RawOccupiedEntryMut { base }), + base::RawEntryMut::Vacant(base) => RawEntryMut::Vacant(RawVacantEntryMut { base }), + } +} + #[allow(dead_code)] fn assert_covariance() { fn map_key<'new>(v: HashMap<&'static str, u8>) -> HashMap<&'new str, u8> { diff --git a/library/std/src/collections/hash/map/tests.rs b/library/std/src/collections/hash/map/tests.rs index 9f7df20a1d7e4..a275488a55602 100644 --- a/library/std/src/collections/hash/map/tests.rs +++ b/library/std/src/collections/hash/map/tests.rs @@ -852,6 +852,99 @@ fn test_try_reserve() { } } +#[test] +fn test_raw_entry() { + use super::RawEntryMut::{Occupied, Vacant}; + + let xs = [(1i32, 10i32), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)]; + + let mut map: HashMap<_, _> = xs.iter().cloned().collect(); + + let compute_hash = |map: &HashMap, k: i32| -> u64 { + use core::hash::{BuildHasher, Hash, Hasher}; + + let mut hasher = map.hasher().build_hasher(); + k.hash(&mut hasher); + hasher.finish() + }; + + // Existing key (insert) + match map.raw_entry_mut().from_key(&1) { + Vacant(_) => unreachable!(), + Occupied(mut view) => { + assert_eq!(view.get(), &10); + assert_eq!(view.insert(100), 10); + } + } + let hash1 = compute_hash(&map, 1); + assert_eq!(map.raw_entry().from_key(&1).unwrap(), (&1, &100)); + assert_eq!(map.raw_entry().from_hash(hash1, |k| *k == 1).unwrap(), (&1, &100)); + assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash1, &1).unwrap(), (&1, &100)); + assert_eq!(map.len(), 6); + + // Existing key (update) + match map.raw_entry_mut().from_key(&2) { + Vacant(_) => unreachable!(), + Occupied(mut view) => { + let v = view.get_mut(); + let new_v = (*v) * 10; + *v = new_v; + } + } + let hash2 = compute_hash(&map, 2); + assert_eq!(map.raw_entry().from_key(&2).unwrap(), (&2, &200)); + assert_eq!(map.raw_entry().from_hash(hash2, |k| *k == 2).unwrap(), (&2, &200)); + assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash2, &2).unwrap(), (&2, &200)); + assert_eq!(map.len(), 6); + + // Existing key (take) + let hash3 = compute_hash(&map, 3); + match map.raw_entry_mut().from_key_hashed_nocheck(hash3, &3) { + Vacant(_) => unreachable!(), + Occupied(view) => { + assert_eq!(view.remove_entry(), (3, 30)); + } + } + assert_eq!(map.raw_entry().from_key(&3), None); + assert_eq!(map.raw_entry().from_hash(hash3, |k| *k == 3), None); + assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash3, &3), None); + assert_eq!(map.len(), 5); + + // Nonexistent key (insert) + match map.raw_entry_mut().from_key(&10) { + Occupied(_) => unreachable!(), + Vacant(view) => { + assert_eq!(view.insert(10, 1000), (&mut 10, &mut 1000)); + } + } + assert_eq!(map.raw_entry().from_key(&10).unwrap(), (&10, &1000)); + assert_eq!(map.len(), 6); + + // Ensure all lookup methods produce equivalent results. + for k in 0..12 { + let hash = compute_hash(&map, k); + let v = map.get(&k).cloned(); + let kv = v.as_ref().map(|v| (&k, v)); + + assert_eq!(map.raw_entry().from_key(&k), kv); + assert_eq!(map.raw_entry().from_hash(hash, |q| *q == k), kv); + assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash, &k), kv); + + match map.raw_entry_mut().from_key(&k) { + Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv), + Vacant(_) => assert_eq!(v, None), + } + match map.raw_entry_mut().from_key_hashed_nocheck(hash, &k) { + Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv), + Vacant(_) => assert_eq!(v, None), + } + match map.raw_entry_mut().from_hash(hash, |q| *q == k) { + Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv), + Vacant(_) => assert_eq!(v, None), + } + } +} + mod test_extract_if { use super::*; use crate::panic::{AssertUnwindSafe, catch_unwind};