Skip to content

Add LLVM attributes in batches instead of individually #94221

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Feb 27, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
211 changes: 82 additions & 129 deletions compiler/rustc_codegen_llvm/src/abi.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use crate::attributes;
use crate::builder::Builder;
use crate::context::CodegenCx;
use crate::llvm::{self, AttributePlace};
use crate::llvm::{self, Attribute, AttributePlace};
use crate::type_::Type;
use crate::type_of::LayoutLlvmExt;
use crate::value::Value;
Expand All @@ -20,6 +21,7 @@ use rustc_target::abi::{self, HasDataLayout, Int};
pub use rustc_target::spec::abi::Abi;

use libc::c_uint;
use smallvec::SmallVec;

pub trait ArgAttributesExt {
fn apply_attrs_to_llfn(&self, idx: AttributePlace, cx: &CodegenCx<'_, '_>, llfn: &Value);
Expand All @@ -38,57 +40,65 @@ fn should_use_mutable_noalias(cx: &CodegenCx<'_, '_>) -> bool {
cx.tcx.sess.opts.debugging_opts.mutable_noalias.unwrap_or(true)
}

const ABI_AFFECTING_ATTRIBUTES: [(ArgAttribute, llvm::Attribute); 1] =
[(ArgAttribute::InReg, llvm::Attribute::InReg)];
const ABI_AFFECTING_ATTRIBUTES: [(ArgAttribute, llvm::AttributeKind); 1] =
[(ArgAttribute::InReg, llvm::AttributeKind::InReg)];

const OPTIMIZATION_ATTRIBUTES: [(ArgAttribute, llvm::Attribute); 5] = [
(ArgAttribute::NoAlias, llvm::Attribute::NoAlias),
(ArgAttribute::NoCapture, llvm::Attribute::NoCapture),
(ArgAttribute::NonNull, llvm::Attribute::NonNull),
(ArgAttribute::ReadOnly, llvm::Attribute::ReadOnly),
(ArgAttribute::NoUndef, llvm::Attribute::NoUndef),
const OPTIMIZATION_ATTRIBUTES: [(ArgAttribute, llvm::AttributeKind); 5] = [
(ArgAttribute::NoAlias, llvm::AttributeKind::NoAlias),
(ArgAttribute::NoCapture, llvm::AttributeKind::NoCapture),
(ArgAttribute::NonNull, llvm::AttributeKind::NonNull),
(ArgAttribute::ReadOnly, llvm::AttributeKind::ReadOnly),
(ArgAttribute::NoUndef, llvm::AttributeKind::NoUndef),
];

impl ArgAttributesExt for ArgAttributes {
fn apply_attrs_to_llfn(&self, idx: AttributePlace, cx: &CodegenCx<'_, '_>, llfn: &Value) {
let mut regular = self.regular;
unsafe {
// ABI-affecting attributes must always be applied
for (attr, llattr) in ABI_AFFECTING_ATTRIBUTES {
if regular.contains(attr) {
llattr.apply_llfn(idx, llfn);
}
}
if let Some(align) = self.pointee_align {
llvm::LLVMRustAddAlignmentAttr(llfn, idx.as_uint(), align.bytes() as u32);
}
match self.arg_ext {
ArgExtension::None => {}
ArgExtension::Zext => llvm::Attribute::ZExt.apply_llfn(idx, llfn),
ArgExtension::Sext => llvm::Attribute::SExt.apply_llfn(idx, llfn),
}
// Only apply remaining attributes when optimizing
if cx.sess().opts.optimize == config::OptLevel::No {
return;
}
let deref = self.pointee_size.bytes();
if deref != 0 {
if regular.contains(ArgAttribute::NonNull) {
llvm::LLVMRustAddDereferenceableAttr(llfn, idx.as_uint(), deref);
} else {
llvm::LLVMRustAddDereferenceableOrNullAttr(llfn, idx.as_uint(), deref);
}
regular -= ArgAttribute::NonNull;
}
for (attr, llattr) in OPTIMIZATION_ATTRIBUTES {
if regular.contains(attr) {
llattr.apply_llfn(idx, llfn);
}
fn get_attrs<'ll>(this: &ArgAttributes, cx: &CodegenCx<'ll, '_>) -> SmallVec<[&'ll Attribute; 8]> {
let mut regular = this.regular;

let mut attrs = SmallVec::new();

// ABI-affecting attributes must always be applied
for (attr, llattr) in ABI_AFFECTING_ATTRIBUTES {
if regular.contains(attr) {
attrs.push(llattr.create_attr(cx.llcx));
}
}
if let Some(align) = this.pointee_align {
attrs.push(llvm::CreateAlignmentAttr(cx.llcx, align.bytes()));
}
match this.arg_ext {
ArgExtension::None => {}
ArgExtension::Zext => attrs.push(llvm::AttributeKind::ZExt.create_attr(cx.llcx)),
ArgExtension::Sext => attrs.push(llvm::AttributeKind::SExt.create_attr(cx.llcx)),
}

// Only apply remaining attributes when optimizing
if cx.sess().opts.optimize != config::OptLevel::No {
let deref = this.pointee_size.bytes();
if deref != 0 {
if regular.contains(ArgAttribute::NonNull) {
attrs.push(llvm::CreateDereferenceableAttr(cx.llcx, deref));
} else {
attrs.push(llvm::CreateDereferenceableOrNullAttr(cx.llcx, deref));
}
if regular.contains(ArgAttribute::NoAliasMutRef) && should_use_mutable_noalias(cx) {
llvm::Attribute::NoAlias.apply_llfn(idx, llfn);
regular -= ArgAttribute::NonNull;
}
for (attr, llattr) in OPTIMIZATION_ATTRIBUTES {
if regular.contains(attr) {
attrs.push(llattr.create_attr(cx.llcx));
}
}
if regular.contains(ArgAttribute::NoAliasMutRef) && should_use_mutable_noalias(cx) {
attrs.push(llvm::AttributeKind::NoAlias.create_attr(cx.llcx));
}
}

attrs
}

impl ArgAttributesExt for ArgAttributes {
fn apply_attrs_to_llfn(&self, idx: AttributePlace, cx: &CodegenCx<'_, '_>, llfn: &Value) {
let attrs = get_attrs(self, cx);
attributes::apply_to_llfn(llfn, idx, &attrs);
}

fn apply_attrs_to_callsite(
Expand All @@ -97,52 +107,8 @@ impl ArgAttributesExt for ArgAttributes {
cx: &CodegenCx<'_, '_>,
callsite: &Value,
) {
let mut regular = self.regular;
unsafe {
// ABI-affecting attributes must always be applied
for (attr, llattr) in ABI_AFFECTING_ATTRIBUTES {
if regular.contains(attr) {
llattr.apply_callsite(idx, callsite);
}
}
if let Some(align) = self.pointee_align {
llvm::LLVMRustAddAlignmentCallSiteAttr(
callsite,
idx.as_uint(),
align.bytes() as u32,
);
}
match self.arg_ext {
ArgExtension::None => {}
ArgExtension::Zext => llvm::Attribute::ZExt.apply_callsite(idx, callsite),
ArgExtension::Sext => llvm::Attribute::SExt.apply_callsite(idx, callsite),
}
// Only apply remaining attributes when optimizing
if cx.sess().opts.optimize == config::OptLevel::No {
return;
}
let deref = self.pointee_size.bytes();
if deref != 0 {
if regular.contains(ArgAttribute::NonNull) {
llvm::LLVMRustAddDereferenceableCallSiteAttr(callsite, idx.as_uint(), deref);
} else {
llvm::LLVMRustAddDereferenceableOrNullCallSiteAttr(
callsite,
idx.as_uint(),
deref,
);
}
regular -= ArgAttribute::NonNull;
}
for (attr, llattr) in OPTIMIZATION_ATTRIBUTES {
if regular.contains(attr) {
llattr.apply_callsite(idx, callsite);
}
}
if regular.contains(ArgAttribute::NoAliasMutRef) && should_use_mutable_noalias(cx) {
llvm::Attribute::NoAlias.apply_callsite(idx, callsite);
}
}
let attrs = get_attrs(self, cx);
attributes::apply_to_callsite(callsite, idx, &attrs);
}
}

Expand Down Expand Up @@ -444,15 +410,14 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> {
}

fn apply_attrs_llfn(&self, cx: &CodegenCx<'ll, 'tcx>, llfn: &'ll Value) {
// FIXME(eddyb) can this also be applied to callsites?
let mut func_attrs = SmallVec::<[_; 2]>::new();
if self.ret.layout.abi.is_uninhabited() {
llvm::Attribute::NoReturn.apply_llfn(llvm::AttributePlace::Function, llfn);
func_attrs.push(llvm::AttributeKind::NoReturn.create_attr(cx.llcx));
}

// FIXME(eddyb, wesleywiser): apply this to callsites as well?
if !self.can_unwind {
llvm::Attribute::NoUnwind.apply_llfn(llvm::AttributePlace::Function, llfn);
func_attrs.push(llvm::AttributeKind::NoUnwind.create_attr(cx.llcx));
}
attributes::apply_to_llfn(llfn, llvm::AttributePlace::Function, &{ func_attrs });
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why does this need braces?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not necessary, but causes the SmallVec to be moved, which makes func_attrs.push() after this point an error.


let mut i = 0;
let mut apply = |attrs: &ArgAttributes| {
Expand All @@ -467,13 +432,8 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> {
PassMode::Indirect { ref attrs, extra_attrs: _, on_stack } => {
assert!(!on_stack);
let i = apply(attrs);
unsafe {
llvm::LLVMRustAddStructRetAttr(
llfn,
llvm::AttributePlace::Argument(i).as_uint(),
self.ret.layout.llvm_type(cx),
);
}
let sret = llvm::CreateStructRetAttr(cx.llcx, self.ret.layout.llvm_type(cx));
attributes::apply_to_llfn(llfn, llvm::AttributePlace::Argument(i), &[sret]);
}
PassMode::Cast(cast) => {
cast.attrs.apply_attrs_to_llfn(llvm::AttributePlace::ReturnValue, cx, llfn);
Expand All @@ -488,13 +448,8 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> {
PassMode::Ignore => {}
PassMode::Indirect { ref attrs, extra_attrs: None, on_stack: true } => {
let i = apply(attrs);
unsafe {
llvm::LLVMRustAddByValAttr(
llfn,
llvm::AttributePlace::Argument(i).as_uint(),
arg.layout.llvm_type(cx),
);
}
let byval = llvm::CreateByValAttr(cx.llcx, arg.layout.llvm_type(cx));
attributes::apply_to_llfn(llfn, llvm::AttributePlace::Argument(i), &[byval]);
}
PassMode::Direct(ref attrs)
| PassMode::Indirect { ref attrs, extra_attrs: None, on_stack: false } => {
Expand All @@ -517,12 +472,14 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> {
}

fn apply_attrs_callsite(&self, bx: &mut Builder<'_, 'll, 'tcx>, callsite: &'ll Value) {
let mut func_attrs = SmallVec::<[_; 2]>::new();
if self.ret.layout.abi.is_uninhabited() {
llvm::Attribute::NoReturn.apply_callsite(llvm::AttributePlace::Function, callsite);
func_attrs.push(llvm::AttributeKind::NoReturn.create_attr(bx.cx.llcx));
}
if !self.can_unwind {
llvm::Attribute::NoUnwind.apply_callsite(llvm::AttributePlace::Function, callsite);
func_attrs.push(llvm::AttributeKind::NoUnwind.create_attr(bx.cx.llcx));
}
attributes::apply_to_callsite(callsite, llvm::AttributePlace::Function, &{ func_attrs });

let mut i = 0;
let mut apply = |cx: &CodegenCx<'_, '_>, attrs: &ArgAttributes| {
Expand All @@ -537,13 +494,8 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> {
PassMode::Indirect { ref attrs, extra_attrs: _, on_stack } => {
assert!(!on_stack);
let i = apply(bx.cx, attrs);
unsafe {
llvm::LLVMRustAddStructRetCallSiteAttr(
callsite,
llvm::AttributePlace::Argument(i).as_uint(),
self.ret.layout.llvm_type(bx),
);
}
let sret = llvm::CreateStructRetAttr(bx.cx.llcx, self.ret.layout.llvm_type(bx));
attributes::apply_to_callsite(callsite, llvm::AttributePlace::Argument(i), &[sret]);
}
PassMode::Cast(cast) => {
cast.attrs.apply_attrs_to_callsite(
Expand Down Expand Up @@ -572,13 +524,12 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> {
PassMode::Ignore => {}
PassMode::Indirect { ref attrs, extra_attrs: None, on_stack: true } => {
let i = apply(bx.cx, attrs);
unsafe {
llvm::LLVMRustAddByValCallSiteAttr(
callsite,
llvm::AttributePlace::Argument(i).as_uint(),
arg.layout.llvm_type(bx),
);
}
let byval = llvm::CreateByValAttr(bx.cx.llcx, arg.layout.llvm_type(bx));
attributes::apply_to_callsite(
callsite,
llvm::AttributePlace::Argument(i),
&[byval],
);
}
PassMode::Direct(ref attrs)
| PassMode::Indirect { ref attrs, extra_attrs: None, on_stack: false } => {
Expand Down Expand Up @@ -610,10 +561,12 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> {
if self.conv == Conv::CCmseNonSecureCall {
// This will probably get ignored on all targets but those supporting the TrustZone-M
// extension (thumbv8m targets).
llvm::AddCallSiteAttrString(
let cmse_nonsecure_call =
llvm::CreateAttrString(bx.cx.llcx, cstr::cstr!("cmse_nonsecure_call"));
attributes::apply_to_callsite(
callsite,
llvm::AttributePlace::Function,
cstr::cstr!("cmse_nonsecure_call"),
&[cmse_nonsecure_call],
);
}
}
Expand Down
11 changes: 7 additions & 4 deletions compiler/rustc_codegen_llvm/src/allocator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,8 @@ pub(crate) unsafe fn codegen(
llvm::LLVMRustSetVisibility(llfn, llvm::Visibility::Hidden);
}
if tcx.sess.must_emit_unwind_tables() {
attributes::emit_uwtable(llfn);
let uwtable = attributes::uwtable_attr(llcx);
attributes::apply_to_llfn(llfn, llvm::AttributePlace::Function, &[uwtable]);
}

let callee = kind.fn_name(method.name);
Expand Down Expand Up @@ -105,20 +106,22 @@ pub(crate) unsafe fn codegen(
let name = "__rust_alloc_error_handler";
let llfn = llvm::LLVMRustGetOrInsertFunction(llmod, name.as_ptr().cast(), name.len(), ty);
// -> ! DIFlagNoReturn
llvm::Attribute::NoReturn.apply_llfn(llvm::AttributePlace::Function, llfn);
let no_return = llvm::AttributeKind::NoReturn.create_attr(llcx);
attributes::apply_to_llfn(llfn, llvm::AttributePlace::Function, &[no_return]);

if tcx.sess.target.default_hidden_visibility {
llvm::LLVMRustSetVisibility(llfn, llvm::Visibility::Hidden);
}
if tcx.sess.must_emit_unwind_tables() {
attributes::emit_uwtable(llfn);
let uwtable = attributes::uwtable_attr(llcx);
attributes::apply_to_llfn(llfn, llvm::AttributePlace::Function, &[uwtable]);
}

let kind = if has_alloc_error_handler { AllocatorKind::Global } else { AllocatorKind::Default };
let callee = kind.fn_name(sym::oom);
let callee = llvm::LLVMRustGetOrInsertFunction(llmod, callee.as_ptr().cast(), callee.len(), ty);
// -> ! DIFlagNoReturn
llvm::Attribute::NoReturn.apply_llfn(llvm::AttributePlace::Function, callee);
attributes::apply_to_llfn(callee, llvm::AttributePlace::Function, &[no_return]);
llvm::LLVMRustSetVisibility(callee, llvm::Visibility::Hidden);

let llbb = llvm::LLVMAppendBasicBlockInContext(llcx, llfn, "entry\0".as_ptr().cast());
Expand Down
13 changes: 8 additions & 5 deletions compiler/rustc_codegen_llvm/src/asm.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use crate::attributes;
use crate::builder::Builder;
use crate::common::Funclet;
use crate::context::CodegenCx;
Expand All @@ -18,6 +19,7 @@ use rustc_target::abi::*;
use rustc_target::asm::*;

use libc::{c_char, c_uint};
use smallvec::SmallVec;
use tracing::debug;

impl<'ll, 'tcx> AsmBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
Expand Down Expand Up @@ -273,19 +275,20 @@ impl<'ll, 'tcx> AsmBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
)
.unwrap_or_else(|| span_bug!(line_spans[0], "LLVM asm constraint validation failed"));

let mut attrs = SmallVec::<[_; 2]>::new();
if options.contains(InlineAsmOptions::PURE) {
if options.contains(InlineAsmOptions::NOMEM) {
llvm::Attribute::ReadNone.apply_callsite(llvm::AttributePlace::Function, result);
attrs.push(llvm::AttributeKind::ReadNone.create_attr(self.cx.llcx));
} else if options.contains(InlineAsmOptions::READONLY) {
llvm::Attribute::ReadOnly.apply_callsite(llvm::AttributePlace::Function, result);
attrs.push(llvm::AttributeKind::ReadOnly.create_attr(self.cx.llcx));
}
llvm::Attribute::WillReturn.apply_callsite(llvm::AttributePlace::Function, result);
attrs.push(llvm::AttributeKind::WillReturn.create_attr(self.cx.llcx));
} else if options.contains(InlineAsmOptions::NOMEM) {
llvm::Attribute::InaccessibleMemOnly
.apply_callsite(llvm::AttributePlace::Function, result);
attrs.push(llvm::AttributeKind::InaccessibleMemOnly.create_attr(self.cx.llcx));
} else {
// LLVM doesn't have an attribute to represent ReadOnly + SideEffect
}
attributes::apply_to_callsite(result, llvm::AttributePlace::Function, &{ attrs });

// Write results to outputs
for (idx, op) in operands.iter().enumerate() {
Expand Down
Loading