@@ -71,15 +71,35 @@ impl fmt::Debug for Item {
7171}
7272
7373/// Extra per-location state.
74- #[ derive( Clone , Debug , PartialEq , Eq ) ]
74+ #[ derive( Clone , Debug ) ]
7575pub struct Stack {
7676 /// Used *mostly* as a stack; never empty.
7777 /// Invariants:
7878 /// * Above a `SharedReadOnly` there can only be more `SharedReadOnly`.
7979 /// * Except for `Untagged`, no tag occurs in the stack more than once.
8080 borrows : Vec < Item > ,
81+ /// We often want to know what (if any) tag in the stack exists for some `SbTag`.
82+ /// The tag we're looking for does not occur often enough at the end of the stack for a linear
83+ /// search to be efficient, so we use the uniqueness guarantee to keep a map from tag to
84+ /// position in the stack.
85+ cache : FxHashMap < PtrId , usize > ,
86+ /// `Untagged` may occur multiple times so we store it outside of the map.
87+ untagged : Vec < usize > ,
88+ /// On a read, we need to disable all `Unique` above the granting item. We can avoid most of
89+ /// this scan by keeping track of the region of the borrow stack that may contain `Unique`s.
90+ first_unique : usize ,
91+ last_unique : usize ,
92+ }
93+
94+ impl PartialEq for Stack {
95+ fn eq ( & self , other : & Self ) -> bool {
96+ // All the semantics of Stack are in self.borrows, everything else is caching
97+ self . borrows == other. borrows
98+ }
8199}
82100
101+ impl Eq for Stack { }
102+
83103/// Extra per-allocation state.
84104#[ derive( Clone , Debug ) ]
85105pub struct Stacks {
@@ -256,17 +276,23 @@ impl<'tcx> Stack {
256276 /// Find the item granting the given kind of access to the given tag, and return where
257277 /// it is on the stack.
258278 fn find_granting ( & self , access : AccessKind , tag : SbTag ) -> Option < usize > {
259- self . borrows
260- . iter ( )
261- . enumerate ( ) // we also need to know *where* in the stack
262- . rev ( ) // search top-to-bottom
263- // Return permission of first item that grants access.
264- // We require a permission with the right tag, ensuring U3 and F3.
265- . find_map (
266- |( idx, item) | {
267- if tag == item. tag && item. perm . grants ( access) { Some ( idx) } else { None }
268- } ,
269- )
279+ match tag {
280+ SbTag :: Untagged => {
281+ // Search top-to-bottom
282+ for idx in self . untagged . iter ( ) . rev ( ) {
283+ // Return permission of the first item that grants access.
284+ // We require a permission with the right tag, ensuring U3 and F3.
285+ if self . borrows [ * idx] . perm . grants ( access) {
286+ return Some ( * idx) ;
287+ }
288+ }
289+ return None ;
290+ }
291+ SbTag :: Tagged ( id) =>
292+ self . cache . get ( & id) . and_then ( |idx| {
293+ if self . borrows [ * idx] . perm . grants ( access) { Some ( * idx) } else { None }
294+ } ) ,
295+ }
270296 }
271297
272298 /// Find the first write-incompatible item above the given one --
@@ -356,6 +382,23 @@ impl<'tcx> Stack {
356382 for item in self . borrows . drain ( first_incompatible_idx..) . rev ( ) {
357383 trace ! ( "access: popping item {:?}" , item) ;
358384 Stack :: check_protector ( & item, Some ( ( tag, access) ) , global) ?;
385+ // The drain removes elements from `borrows`, but we need to remove those elements
386+ // from the lookup cache too.
387+ if let SbTag :: Tagged ( id) = item. tag {
388+ assert ! ( self . cache. remove( & id) . is_some( ) ) ;
389+ }
390+ }
391+
392+ while self . untagged . last ( ) >= Some ( & first_incompatible_idx) {
393+ self . untagged . pop ( ) ;
394+ }
395+ if first_incompatible_idx <= self . first_unique {
396+ // We removed all the Unique items
397+ self . first_unique = 0 ;
398+ self . last_unique = 0 ;
399+ } else {
400+ // Ensure the range doesn't extend past the new top of the stack
401+ self . last_unique = self . last_unique . min ( first_incompatible_idx. saturating_sub ( 1 ) ) ;
359402 }
360403 } else {
361404 // On a read, *disable* all `Unique` above the granting item. This ensures U2 for read accesses.
@@ -366,14 +409,26 @@ impl<'tcx> Stack {
366409 // This pattern occurs a lot in the standard library: create a raw pointer, then also create a shared
367410 // reference and use that.
368411 // We *disable* instead of removing `Unique` to avoid "connecting" two neighbouring blocks of SRWs.
369- for idx in ( ( granting_idx + 1 ) ..self . borrows . len ( ) ) . rev ( ) {
370- let item = & mut self . borrows [ idx] ;
371- if item. perm == Permission :: Unique {
372- trace ! ( "access: disabling item {:?}" , item) ;
373- Stack :: check_protector ( item, Some ( ( tag, access) ) , global) ?;
374- item. perm = Permission :: Disabled ;
412+ if granting_idx < self . last_unique {
413+ // add 1 so we don't disable the granting item
414+ let lower = self . first_unique . max ( granting_idx + 1 ) ;
415+ //let upper = (self.last_unique + 1).min(self.borrows.len());
416+ for item in & mut self . borrows [ lower..=self . last_unique ] {
417+ if item. perm == Permission :: Unique {
418+ trace ! ( "access: disabling item {:?}" , item) ;
419+ Stack :: check_protector ( item, Some ( ( tag, access) ) , global) ?;
420+ item. perm = Permission :: Disabled ;
421+ }
375422 }
376423 }
424+ if granting_idx < self . first_unique {
425+ // We disabled all Unique items
426+ self . first_unique = 0 ;
427+ self . last_unique = 0 ;
428+ } else {
429+ // Truncate the range to granting_idx
430+ self . last_unique = self . last_unique . min ( granting_idx) ;
431+ }
377432 }
378433
379434 // Done.
@@ -401,6 +456,11 @@ impl<'tcx> Stack {
401456 Stack :: check_protector ( & item, None , global) ?;
402457 }
403458
459+ self . cache . clear ( ) ;
460+ self . untagged . clear ( ) ;
461+ self . first_unique = 0 ;
462+ self . last_unique = 0 ;
463+
404464 Ok ( ( ) )
405465 }
406466
@@ -459,6 +519,43 @@ impl<'tcx> Stack {
459519 } else {
460520 trace ! ( "reborrow: adding item {:?}" , new) ;
461521 self . borrows . insert ( new_idx, new) ;
522+ // The above insert changes the meaning of every index in the cache >= new_idx, so now
523+ // we need to find every one of those indexes and increment it.
524+
525+ // Adjust the possibly-unique range if an insert occurs before or within it
526+ if self . first_unique >= new_idx {
527+ self . first_unique += 1 ;
528+ }
529+ if self . last_unique >= new_idx {
530+ self . last_unique += 1 ;
531+ }
532+ if new. perm == Permission :: Unique {
533+ // Make sure the possibly-unique range contains the new borrow
534+ self . first_unique = self . first_unique . min ( new_idx) ;
535+ self . last_unique = self . last_unique . max ( new_idx) ;
536+ }
537+ // Repair the lookup cache; indices have shifted if an insert was not at the end
538+ if new_idx != self . borrows . len ( ) - 1 {
539+ for item in & self . borrows [ new_idx + 1 ..] {
540+ if let SbTag :: Tagged ( id) = item. tag {
541+ * self . cache . get_mut ( & id) . unwrap ( ) += 1 ;
542+ }
543+ }
544+ for idx in self . untagged . iter_mut ( ) . rev ( ) . take_while ( |idx| * * idx >= new_idx) {
545+ * idx += 1 ;
546+ }
547+ }
548+ // We've now made a (conceptual) hole in the tag lookup cache.
549+ // Nothing maps to new_idx, because we've adjusted everything >= it.
550+ match new. tag {
551+ SbTag :: Untagged => {
552+ self . untagged . push ( new_idx) ;
553+ self . untagged . sort ( ) ;
554+ }
555+ SbTag :: Tagged ( id) => {
556+ self . cache . insert ( id, new_idx) ;
557+ }
558+ }
462559 }
463560
464561 Ok ( ( ) )
@@ -471,7 +568,17 @@ impl<'tcx> Stacks {
471568 /// Creates new stack with initial tag.
472569 fn new ( size : Size , perm : Permission , tag : SbTag ) -> Self {
473570 let item = Item { perm, tag, protector : None } ;
474- let stack = Stack { borrows : vec ! [ item] } ;
571+ let mut cache = FxHashMap :: default ( ) ;
572+ let mut untagged = Vec :: new ( ) ;
573+ match item. tag {
574+ SbTag :: Untagged => {
575+ untagged. push ( 0 ) ;
576+ }
577+ SbTag :: Tagged ( id) => {
578+ cache. insert ( id, 0 ) ;
579+ }
580+ }
581+ let stack = Stack { borrows : vec ! [ item] , cache, untagged, first_unique : 0 , last_unique : 0 } ;
475582
476583 Stacks { stacks : RefCell :: new ( RangeMap :: new ( size, stack) ) }
477584 }
0 commit comments