Skip to content

[opt] Update alloc_refs on the stack to have stack memory kind. #31423

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 1 addition & 3 deletions include/swift/SIL/MemAccessUtils.h
Original file line number Diff line number Diff line change
Expand Up @@ -582,9 +582,7 @@ class AccessUseDefChainVisitor {
// Visitors for specific identified access kinds. These default to calling out
// to visitIdentified.

Result visitClassAccess(RefElementAddrInst *field) {
return asImpl().visitBase(field, AccessedStorage::Class);
}
Result visitClassAccess(RefElementAddrInst *field);
Result visitArgumentAccess(SILFunctionArgument *arg) {
return asImpl().visitBase(arg, AccessedStorage::Argument);
}
Expand Down
20 changes: 19 additions & 1 deletion lib/SIL/Utils/MemAccessUtils.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,6 @@ AccessedStorage::AccessedStorage(SILValue base, Kind kind) {
value = base;
break;
case Stack:
assert(isa<AllocStackInst>(base));
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'd like to keep the assertion here but, it would require duplicating the code in visitClassAccess and I don't think it's worth it just for the assertion. WDYT?

value = base;
break;
case Nested:
Expand Down Expand Up @@ -772,3 +771,22 @@ void swift::visitAccessedAddress(SILInstruction *I,
return;
}
}

template <typename Impl, typename Result>
Result swift::AccessUseDefChainVisitor<Impl, Result>::visitClassAccess(
RefElementAddrInst *field) {
auto operand = field->getOperand();
while (isa<StructElementAddrInst>(operand) ||
isa<RefElementAddrInst>(operand) ||
isa<TupleElementAddrInst>(operand) || isa<LoadInst>(operand))
operand = operand.getDefiningInstruction()->getOperand(0);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm not sure what's happening here with peeking through operands. The UseDefChainVisitor needs to visit each instruction on the "access path" between the address of the memory access and the address of the access to formal storage.


if (isa<AllocStackInst>(operand))
return asImpl().visitBase(field, AccessedStorage::Stack);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If an access is to a class property, it doesn't matter where the reference to the class is stored. Once you reach the base of the formal access, you've found the AccessStorage. This corresponds to language-level constructs: global variable, class property, local variable.

if (auto allocRef = dyn_cast<AllocRefInst>(operand)) {
if (allocRef->isAllocatingStack())
return asImpl().visitBase(field, AccessedStorage::Stack);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Class properties have specific semantics at the language level and specific structural properties in SIL. Whether the object is allocated on the heap or stack is irrelevant to any of this. If we have an optimization that can somehow benefit from knowing that an object is stack-allocated, then we can teach that optimization about stack-allocated objects. But we can never pretend that a class property is a local variable.

}

return asImpl().visitBase(field, AccessedStorage::Class);
}
4 changes: 4 additions & 0 deletions lib/SILOptimizer/PassManager/PassPipeline.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -588,6 +588,10 @@ static void addClosureSpecializePassPipeline(SILPassPipelinePlan &P) {
static void addLowLevelPassPipeline(SILPassPipelinePlan &P) {
P.startPipeline("LowLevel,Function", true /*isFunctionPassPipeline*/);

P.addAccessEnforcementOpts();
P.addAccessEnforcementWMO();
// P.addAccessMarkerElimination();

// Should be after FunctionSignatureOpts and before the last inliner.
P.addReleaseDevirtualizer();

Expand Down
133 changes: 133 additions & 0 deletions test/SILOptimizer/access_enforcement_opts.sil
Original file line number Diff line number Diff line change
Expand Up @@ -1737,3 +1737,136 @@ bb0(%0 : ${ var Int64 }):
end_access %access : $*Int64
return %val : $Int64
}

// CHECK-LABEL: sil @promote_local_class_to_static
// CHECK: begin_access [modify] [static] [no_nested_conflict]
// CHECK: begin_access [read] [static] [no_nested_conflict]
// CHECK-LABEL: end sil function 'promote_local_class_to_static'
sil @promote_local_class_to_static : $@convention(thin) () -> () {
bb0:
%0 = integer_literal $Builtin.Int32, 0
%1 = struct $Int32 (%0 : $Builtin.Int32)
%4 = alloc_ref [stack] $RefElemClass

%8 = ref_element_addr %4 : $RefElemClass, #RefElemClass.y
%9 = begin_access [modify] [dynamic] [no_nested_conflict] %8 : $*Int32
store %1 to %9 : $*Int32
end_access %9 : $*Int32

%17 = begin_access [read] [dynamic] [no_nested_conflict] %8 : $*Int32
%18 = struct_element_addr %17 : $*Int32, #Int32._value
%19 = load %18 : $*Builtin.Int32
end_access %17 : $*Int32

set_deallocating %4 : $RefElemClass
dealloc_ref %4 : $RefElemClass
dealloc_ref [stack] %4 : $RefElemClass

%9999 = tuple ()
return %9999 : $()
}

class Class1 {
@_hasStorage @_hasInitialValue var x: Int32 { get set }
deinit
init()
}

struct Struct1 {
@_hasStorage @_hasInitialValue var c: Class1 { get set }
init(c: Class1 = Class1())
init()
}

class Class2 {
@_hasStorage @_hasInitialValue var s: Struct1 { get set }
deinit
init()
}

// CHECK-LABEL: sil @promote_local_class_in_struct_in_class_to_static
// CHECK: begin_access [modify] [static] [no_nested_conflict]
// CHECK: begin_access [read] [static] [no_nested_conflict]
// CHECK-LABEL: end sil function 'promote_local_class_in_struct_in_class_to_static'
sil @promote_local_class_in_struct_in_class_to_static : $@convention(thin) () -> () {
bb0:
%0 = integer_literal $Builtin.Int32, 0
%1 = struct $Int32 (%0 : $Builtin.Int32)
%4 = alloc_ref [stack] $Class2

%5 = ref_element_addr %4 : $Class2, #Class2.s
%6 = struct_element_addr %5 : $*Struct1, #Struct1.c
%7 = load %6 : $*Class1
%8 = ref_element_addr %7 : $Class1, #Class1.x
%9 = begin_access [modify] [dynamic] [no_nested_conflict] %8 : $*Int32
store %1 to %9 : $*Int32
end_access %9 : $*Int32

%17 = begin_access [read] [dynamic] [no_nested_conflict] %8 : $*Int32
%18 = struct_element_addr %17 : $*Int32, #Int32._value
%19 = load %18 : $*Builtin.Int32
end_access %17 : $*Int32

set_deallocating %4 : $Class2
dealloc_ref %4 : $Class2
dealloc_ref [stack] %4 : $Class2

%9999 = tuple ()
return %9999 : $()
}

// CHECK-LABEL: sil @promote_local_class_in_struct_to_static
// CHECK: begin_access [modify] [static] [no_nested_conflict]
// CHECK: begin_access [read] [static] [no_nested_conflict]
// CHECK-LABEL: end sil function 'promote_local_class_in_struct_to_static'
sil @promote_local_class_in_struct_to_static : $@convention(thin) () -> () {
bb0:
%0 = integer_literal $Builtin.Int32, 0
%1 = struct $Int32 (%0 : $Builtin.Int32)
%4 = alloc_stack $Struct1

%6 = struct_element_addr %4 : $*Struct1, #Struct1.c
%7 = load %6 : $*Class1
%8 = ref_element_addr %7 : $Class1, #Class1.x
%9 = begin_access [modify] [dynamic] [no_nested_conflict] %8 : $*Int32
store %1 to %9 : $*Int32
end_access %9 : $*Int32

%17 = begin_access [read] [dynamic] [no_nested_conflict] %8 : $*Int32
%18 = struct_element_addr %17 : $*Int32, #Int32._value
%19 = load %18 : $*Builtin.Int32
end_access %17 : $*Int32

dealloc_stack %4 : $*Struct1

%9999 = tuple ()
return %9999 : $()
}

// CHECK-LABEL: sil @promote_local_class_in_tuple_to_static
// CHECK: begin_access [modify] [static] [no_nested_conflict]
// CHECK: begin_access [read] [static] [no_nested_conflict]
// CHECK-LABEL: end sil function 'promote_local_class_in_tuple_to_static'
sil @promote_local_class_in_tuple_to_static : $@convention(thin) () -> () {
bb0:
%0 = integer_literal $Builtin.Int32, 0
%1 = struct $Int32 (%0 : $Builtin.Int32)
%4 = alloc_stack $(Class1, Int)

%6 = tuple_element_addr %4 : $*(Class1, Int), 0
%7 = load %6 : $*Class1
%8 = ref_element_addr %7 : $Class1, #Class1.x
%9 = begin_access [modify] [dynamic] [no_nested_conflict] %8 : $*Int32
store %1 to %9 : $*Int32
end_access %9 : $*Int32

%17 = begin_access [read] [dynamic] [no_nested_conflict] %8 : $*Int32
%18 = struct_element_addr %17 : $*Int32, #Int32._value
%19 = load %18 : $*Builtin.Int32
end_access %17 : $*Int32

dealloc_stack %4 : $*(Class1, Int)

%9999 = tuple ()
return %9999 : $()
}