From 496eb0c95c5ee8198e6ae60b153d53fc56ca1b10 Mon Sep 17 00:00:00 2001 From: Nate Chandler Date: Tue, 27 Jun 2023 08:20:57 -0700 Subject: [PATCH 1/2] [MoveOnlyAddressChecker] NFC: Extracted helpers. In preparation for having a third instance getting or creating the bits affected by an instruction, introduce a typealias for maps SILInstruction->SmallBitVector and a helper function that returns a reference to the SmallBitVector for an instruction and two others that set into those bits the bits from another bit vector or from a range. --- .../Mandatory/MoveOnlyAddressCheckerUtils.cpp | 40 +++++++++++-------- 1 file changed, 23 insertions(+), 17 deletions(-) diff --git a/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp b/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp index 96f5baa9dd923..3e5123934ca95 100644 --- a/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp +++ b/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp @@ -563,6 +563,9 @@ namespace { struct UseState { MarkMustCheckInst *address; + using InstToBitMap = + llvm::SmallMapVector; + Optional cachedNumSubelements; /// The blocks that consume fields of the value. @@ -576,7 +579,7 @@ struct UseState { /// A map from a liveness requiring use to the part of the type that it /// requires liveness for. - llvm::SmallMapVector livenessUses; + InstToBitMap livenessUses; /// A map from a load [copy] or load [take] that we determined must be /// converted to a load_borrow to the part of the type tree that it needs to @@ -626,7 +629,7 @@ struct UseState { /// memInstMustReinitialize insts. Contains both insts like copy_addr/store /// [assign] that are reinits that we will convert to inits and true reinits. - llvm::SmallMapVector reinitInsts; + InstToBitMap reinitInsts; /// The set of drop_deinits of this mark_must_check SmallSetVector dropDeinitInsts; @@ -653,32 +656,35 @@ struct UseState { return *cachedNumSubelements; } - SmallBitVector &getOrCreateLivenessUse(SILInstruction *inst) { - auto iter = livenessUses.find(inst); - if (iter == livenessUses.end()) { - iter = livenessUses.insert({inst, SmallBitVector(getNumSubelements())}) - .first; + SmallBitVector &getOrCreateAffectedBits(SILInstruction *inst, + InstToBitMap &map) { + auto iter = map.find(inst); + if (iter == map.end()) { + iter = map.insert({inst, SmallBitVector(getNumSubelements())}).first; } return iter->second; } + void setAffectedBits(SILInstruction *inst, SmallBitVector const &bits, + InstToBitMap &map) { + getOrCreateAffectedBits(inst, map) |= bits; + } + + void setAffectedBits(SILInstruction *inst, TypeTreeLeafTypeRange range, + InstToBitMap &map) { + range.setBits(getOrCreateAffectedBits(inst, map)); + } + void recordLivenessUse(SILInstruction *inst, SmallBitVector const &bits) { - getOrCreateLivenessUse(inst) |= bits; + setAffectedBits(inst, bits, livenessUses); } void recordLivenessUse(SILInstruction *inst, TypeTreeLeafTypeRange range) { - auto &bits = getOrCreateLivenessUse(inst); - range.setBits(bits); + setAffectedBits(inst, range, livenessUses); } void recordReinitUse(SILInstruction *inst, TypeTreeLeafTypeRange range) { - auto iter = reinitInsts.find(inst); - if (iter == reinitInsts.end()) { - iter = - reinitInsts.insert({inst, SmallBitVector(getNumSubelements())}).first; - } - auto &bits = iter->second; - range.setBits(bits); + setAffectedBits(inst, range, reinitInsts); } /// Returns true if this is a terminator instruction that although it doesn't From cdf4307193b7cb6addb8de7a10c1cd8dbb5777ae Mon Sep 17 00:00:00 2001 From: Nate Chandler Date: Tue, 27 Jun 2023 08:21:31 -0700 Subject: [PATCH 2/2] [MoveOnlyAddressChecker] Fix repr for initInsts. The address checker records instructions that initialize fields in its initInsts map. Previously, that map mapped from an instruction to a range of fields of the type. But an instruction can initialize multiple discontiguous fields of a single value. (Indeed an attempt to add a second range that was initialized by an already initializing instruction--even if it were overlapping or adjacent--would have no effect and the map wouldn't be updated.) Here, this is fixed by fixing the representation and updating the storage whenver a new range is seen to be initialized by the instruction. As in https://github.com/apple/swift/pull/66728 , a SmallBitVector is the representation chosen. rdar://111391893 --- .../Mandatory/MoveOnlyAddressCheckerUtils.cpp | 21 ++++++----- .../moveonly_addresschecker_unmaximized.sil | 35 +++++++++++++++++++ 2 files changed, 47 insertions(+), 9 deletions(-) diff --git a/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp b/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp index 3e5123934ca95..6578e69ce8972 100644 --- a/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp +++ b/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp @@ -625,7 +625,7 @@ struct UseState { /// A map from an instruction that initializes memory to the description of /// the part of the type tree that it initializes. - llvm::SmallMapVector initInsts; + InstToBitMap initInsts; /// memInstMustReinitialize insts. Contains both insts like copy_addr/store /// [assign] that are reinits that we will convert to inits and true reinits. @@ -687,6 +687,10 @@ struct UseState { setAffectedBits(inst, range, reinitInsts); } + void recordInitUse(SILInstruction *inst, TypeTreeLeafTypeRange range) { + setAffectedBits(inst, range, initInsts); + } + /// Returns true if this is a terminator instruction that although it doesn't /// use our inout argument directly is used by the pass to ensure that we /// reinit said argument if we consumed it in the body of the function. @@ -881,7 +885,7 @@ struct UseState { { auto iter = initInsts.find(inst); if (iter != initInsts.end()) { - if (span.setIntersection(iter->second)) + if (span.intersects(iter->second)) return true; } } @@ -1026,7 +1030,7 @@ void UseState::initializeLiveness( llvm::dbgs() << "Found in/in_guaranteed/inout/inout_aliasable argument as " "an init... adding mark_must_check as init!\n"); - initInsts.insert({address, liveness.getTopLevelSpan()}); + recordInitUse(address, liveness.getTopLevelSpan()); liveness.initializeDef(address, liveness.getTopLevelSpan()); break; case swift::SILArgumentConvention::Indirect_Out: @@ -1057,14 +1061,14 @@ void UseState::initializeLiveness( // later invariants that we assert upon remain true. LLVM_DEBUG(llvm::dbgs() << "Found move only arg closure box use... " "adding mark_must_check as init!\n"); - initInsts.insert({address, liveness.getTopLevelSpan()}); + recordInitUse(address, liveness.getTopLevelSpan()); liveness.initializeDef(address, liveness.getTopLevelSpan()); } } else if (auto *box = dyn_cast( lookThroughOwnershipInsts(projectBox->getOperand()))) { LLVM_DEBUG(llvm::dbgs() << "Found move only var allocbox use... " "adding mark_must_check as init!\n"); - initInsts.insert({address, liveness.getTopLevelSpan()}); + recordInitUse(address, liveness.getTopLevelSpan()); liveness.initializeDef(address, liveness.getTopLevelSpan()); } } @@ -1075,7 +1079,7 @@ void UseState::initializeLiveness( stripAccessMarkers(address->getOperand()))) { LLVM_DEBUG(llvm::dbgs() << "Found ref_element_addr use... " "adding mark_must_check as init!\n"); - initInsts.insert({address, liveness.getTopLevelSpan()}); + recordInitUse(address, liveness.getTopLevelSpan()); liveness.initializeDef(address, liveness.getTopLevelSpan()); } @@ -1085,7 +1089,7 @@ void UseState::initializeLiveness( dyn_cast(stripAccessMarkers(address->getOperand()))) { LLVM_DEBUG(llvm::dbgs() << "Found global_addr use... " "adding mark_must_check as init!\n"); - initInsts.insert({address, liveness.getTopLevelSpan()}); + recordInitUse(address, liveness.getTopLevelSpan()); liveness.initializeDef(address, liveness.getTopLevelSpan()); } @@ -1758,8 +1762,7 @@ bool GatherUsesVisitor::visitUse(Operand *op) { if (!leafRange) return false; - assert(!useState.initInsts.count(user)); - useState.initInsts.insert({user, *leafRange}); + useState.recordInitUse(user, *leafRange); return true; } diff --git a/test/SILOptimizer/moveonly_addresschecker_unmaximized.sil b/test/SILOptimizer/moveonly_addresschecker_unmaximized.sil index 68fe941976da4..af00f06daa9c7 100644 --- a/test/SILOptimizer/moveonly_addresschecker_unmaximized.sil +++ b/test/SILOptimizer/moveonly_addresschecker_unmaximized.sil @@ -17,6 +17,7 @@ sil @get_M4 : $@convention(thin) () -> @owned M4 sil @end_2 : $@convention(thin) (@owned M, @owned M) -> () sil @see_addr_2 : $@convention(thin) (@in_guaranteed M, @in_guaranteed M) -> () sil @replace_2 : $@convention(thin) (@inout M, @inout M) -> () +sil @get_out_2 : $@convention(thin) () -> (@out M, @out M) /// Two non-contiguous fields (#M4.s2, #M4.s4) are borrowed by @see_addr_2. /// Two non-contiguous fields (#M4.s1, #M$.s3) are consumed by @end_2. @@ -107,3 +108,37 @@ bb0: %22 = tuple () return %22 : $() } + +// Verify that M4.s4 is not leaked after it is set. +// CHECK-LABEL: sil [ossa] @rdar111391893 : $@convention(thin) () -> () { +// CHECK: [[GET_OUT_2:%[^,]+]] = function_ref @get_out_2 +// CHECK: [[STACK:%[^,]+]] = alloc_stack +// CHECK: [[S2_ADDR_1:%[^,]+]] = struct_element_addr [[STACK]] +// CHECK: [[S4_ADDR_1:%[^,]+]] = struct_element_addr [[STACK]] +// CHECK: apply [[GET_OUT_2]]([[S2_ADDR_1]], [[S4_ADDR_1]]) +// CHECK: [[S2_ADDR_2:%[^,]+]] = struct_element_addr [[STACK]] +// CHECK: [[S4_ADDR_4:%[^,]+]] = struct_element_addr [[STACK]] +// CHECK: destroy_addr [[S2_ADDR_2]] +// CHECK: destroy_addr [[S4_ADDR_4]] +// CHECK-LABEL: } // end sil function 'rdar111391893' +sil [ossa] @rdar111391893 : $@convention(thin) () -> () { + %get_out_2 = function_ref @get_out_2 : $@convention(thin) () -> (@out M, @out M) + %end_2 = function_ref @end_2 : $@convention(thin) (@owned M, @owned M) -> () + %stack_addr = alloc_stack $M4 + %stack = mark_must_check [consumable_and_assignable] %stack_addr : $*M4 + %s2_addr = struct_element_addr %stack : $*M4, #M4.s2 + %s4_addr = struct_element_addr %stack : $*M4, #M4.s4 + apply %get_out_2(%s2_addr, %s4_addr) : $@convention(thin) () -> (@out M, @out M) + %s1_addr = struct_element_addr %stack : $*M4, #M4.s1 + %s3_addr = struct_element_addr %stack : $*M4, #M4.s3 + apply %get_out_2(%s1_addr, %s3_addr) : $@convention(thin) () -> (@out M, @out M) + %12 = struct_element_addr %stack : $*M4, #M4.s1 + %13 = load [copy] %12 : $*M + %14 = struct_element_addr %stack : $*M4, #M4.s3 + %15 = load [copy] %14 : $*M + %17 = apply %end_2(%13, %15) : $@convention(thin) (@owned M, @owned M) -> () + destroy_addr %stack : $*M4 + dealloc_stack %stack_addr : $*M4 + %22 = tuple () + return %22 : $() +}