Skip to content

Keep valid scalar ranges attached to newtypes when dealing with their inner field. #97261

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 7 commits into from
26 changes: 14 additions & 12 deletions compiler/rustc_codegen_gcc/src/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -675,15 +675,14 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
return OperandRef::new_zst(self, place.layout);
}

fn scalar_load_metadata<'a, 'gcc, 'tcx>(bx: &mut Builder<'a, 'gcc, 'tcx>, load: RValue<'gcc>, scalar: &abi::Scalar) {
let vr = scalar.valid_range(bx);
match scalar.primitive() {
abi::Int(..) => {
if !scalar.is_always_valid(bx) {
fn scalar_load_metadata<'a, 'gcc, 'tcx>(bx: &mut Builder<'a, 'gcc, 'tcx>, load: RValue<'gcc>, scalar: &abi::Scalar, vr: Option<WrappingRange>) {
match (vr, scalar.primitive()) {
(Some(vr), abi::Int(..)) => {
if !vr.is_full_for(scalar.size(bx)) {
bx.range_metadata(load, vr);
}
}
abi::Pointer if vr.start < vr.end && !vr.contains(0) => {
(Some(vr), abi::Pointer) if vr.start < vr.end && !vr.contains(0) => {
bx.nonnull_metadata(load);
}
_ => {}
Expand All @@ -697,24 +696,26 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
else if place.layout.is_gcc_immediate() {
let load = self.load(place.llval.get_type(), place.llval, place.align);
if let abi::Abi::Scalar(ref scalar) = place.layout.abi {
scalar_load_metadata(self, load, scalar);
let vr = place.scalar_valid_range.map(|ranges| ranges.single()).flatten();
scalar_load_metadata(self, load, scalar, vr);
}
OperandValue::Immediate(self.to_immediate(load, place.layout))
}
else if let abi::Abi::ScalarPair(ref a, ref b) = place.layout.abi {
let b_offset = a.size(self).align_to(b.align(self).abi);
let pair_type = place.layout.gcc_type(self, false);

let mut load = |i, scalar: &abi::Scalar, align| {
let mut load = |i, scalar: &abi::Scalar, align, vr| {
let llptr = self.struct_gep(pair_type, place.llval, i as u64);
let load = self.load(llptr.get_type(), llptr, align);
scalar_load_metadata(self, load, scalar);
scalar_load_metadata(self, load, scalar, vr);
if scalar.is_bool() { self.trunc(load, self.type_i1()) } else { load }
};

let (vr_a, vr_b) = place.scalar_valid_range.map(|ranges| ranges.pair()).flatten().unzip();
OperandValue::Pair(
load(0, a, place.align),
load(1, b, place.align.restrict_for_offset(b_offset)),
load(0, a, place.align, vr_a),
load(1, b, place.align.restrict_for_offset(b_offset), vr_b),
)
}
else {
Expand Down Expand Up @@ -747,7 +748,8 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {

self.switch_to_block(body_bb);
let align = dest.align.restrict_for_offset(dest.layout.field(self.cx(), 0).size);
cg_elem.val.store(&mut self, PlaceRef::new_sized_aligned(current_val, cg_elem.layout, align));
let dest = PlaceRef::new_sized_aligned(self.cx(), current_val, cg_elem.layout, align);
cg_elem.val.store(&mut self, dest);

let next = self.inbounds_gep(self.backend_type(cg_elem.layout), current.to_rvalue(), &[self.const_usize(1)]);
self.llbb().add_assignment(None, current, next);
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_codegen_gcc/src/common.rs
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@ impl<'gcc, 'tcx> ConstMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
let value = self.context.new_array_access(None, array, self.const_usize(offset.bytes())).get_address(None);
self.const_bitcast(value, ty)
};
PlaceRef::new_sized(value, layout)
PlaceRef::new_sized(self, value, layout)
}

fn const_ptrcast(&self, val: RValue<'gcc>, ty: Type<'gcc>) -> RValue<'gcc> {
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_codegen_gcc/src/intrinsic/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
let name_str = name.as_str();

let llret_ty = self.layout_of(ret_ty).gcc_type(self, true);
let result = PlaceRef::new_sized(llresult, fn_abi.ret.layout);
let result = PlaceRef::new_sized(self, llresult, fn_abi.ret.layout);

let simple = get_simple_intrinsic(self, name);
let llval =
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_codegen_gcc/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
* TODO(antoyo): remove the patches.
*/

#![feature(rustc_private, decl_macro, associated_type_bounds, never_type, trusted_len)]
#![feature(rustc_private, decl_macro, associated_type_bounds, never_type, trusted_len, unzip_option)]
#![allow(broken_intra_doc_links)]
#![recursion_limit="256"]
#![warn(rust_2018_idioms)]
Expand Down
34 changes: 19 additions & 15 deletions compiler/rustc_codegen_llvm/src/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -478,20 +478,21 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
load: &'ll Value,
scalar: abi::Scalar,
layout: TyAndLayout<'tcx>,
vr: Option<WrappingRange>,
offset: Size,
) {
if !scalar.is_always_valid(bx) {
bx.noundef_metadata(load);
}

match scalar.primitive() {
abi::Int(..) => {
if !scalar.is_always_valid(bx) {
bx.range_metadata(load, scalar.valid_range(bx));
match (vr, scalar.primitive()) {
(Some(vr), abi::Int(..)) => {
if !vr.is_full_for(scalar.size(bx)) {
bx.range_metadata(load, vr);
}
}
abi::Pointer => {
if !scalar.valid_range(bx).contains(0) {
(Some(vr), abi::Pointer) => {
if !vr.contains(0) {
bx.nonnull_metadata(load);
}

Expand All @@ -501,7 +502,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
}
}
}
abi::F32 | abi::F64 => {}
_ => {}
}
}

Expand All @@ -524,7 +525,8 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
let llval = const_llval.unwrap_or_else(|| {
let load = self.load(llty, place.llval, place.align);
if let abi::Abi::Scalar(scalar) = place.layout.abi {
scalar_load_metadata(self, load, scalar, place.layout, Size::ZERO);
let vr = place.scalar_valid_range.map(|range| range.single()).flatten();
scalar_load_metadata(self, load, scalar, place.layout, vr, Size::ZERO);
}
load
});
Expand All @@ -533,17 +535,18 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
let b_offset = a.size(self).align_to(b.align(self).abi);
let pair_ty = place.layout.llvm_type(self);

let mut load = |i, scalar: abi::Scalar, layout, align, offset| {
let mut load = |i, scalar: abi::Scalar, layout, align, vr, offset| {
let llptr = self.struct_gep(pair_ty, place.llval, i as u64);
let llty = place.layout.scalar_pair_element_llvm_type(self, i, false);
let load = self.load(llty, llptr, align);
scalar_load_metadata(self, load, scalar, layout, offset);
scalar_load_metadata(self, load, scalar, layout, vr, offset);
self.to_immediate_scalar(load, scalar)
};

let (vr_a, vr_b) = place.scalar_valid_range.map(|vrs| vrs.pair()).flatten().unzip();
OperandValue::Pair(
load(0, a, place.layout, place.align, Size::ZERO),
load(1, b, place.layout, place.align.restrict_for_offset(b_offset), b_offset),
load(0, a, place.layout, place.align, vr_a, Size::ZERO),
load(1, b, place.layout, place.align.restrict_for_offset(b_offset), vr_b, b_offset),
)
} else {
OperandValue::Ref(place.llval, None, place.align)
Expand Down Expand Up @@ -577,9 +580,10 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {

let mut body_bx = Self::build(self.cx, body_bb);
let align = dest.align.restrict_for_offset(dest.layout.field(self.cx(), 0).size);
cg_elem
.val
.store(&mut body_bx, PlaceRef::new_sized_aligned(current, cg_elem.layout, align));
cg_elem.val.store(
&mut body_bx,
PlaceRef::new_sized_aligned(self.cx, current, cg_elem.layout, align),
);

let next = body_bx.inbounds_gep(
self.backend_type(cg_elem.layout),
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_codegen_llvm/src/common.rs
Original file line number Diff line number Diff line change
Expand Up @@ -308,7 +308,7 @@ impl<'ll, 'tcx> ConstMethods<'tcx> for CodegenCx<'ll, 'tcx> {
};
self.const_bitcast(llval, llty)
};
PlaceRef::new_sized(llval, layout)
PlaceRef::new_sized(self, llval, layout)
}

fn const_ptrcast(&self, val: &'ll Value, ty: &'ll Type) -> &'ll Value {
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_codegen_llvm/src/intrinsic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ impl<'ll, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'_, 'll, 'tcx> {
let name = tcx.item_name(def_id);

let llret_ty = self.layout_of(ret_ty).llvm_type(self);
let result = PlaceRef::new_sized(llresult, fn_abi.ret.layout);
let result = PlaceRef::new_sized(self, llresult, fn_abi.ret.layout);

let simple = get_simple_intrinsic(self, name);
let llval = match name {
Expand Down
1 change: 1 addition & 0 deletions compiler/rustc_codegen_llvm/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
#![feature(once_cell)]
#![feature(nll)]
#![feature(iter_intersperse)]
#![feature(unzip_option)]
#![recursion_limit = "256"]
#![allow(rustc::potential_query_instability)]

Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_codegen_ssa/src/mir/block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1240,7 +1240,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {

// Handle both by-ref and immediate tuples.
if let Ref(llval, None, align) = tuple.val {
let tuple_ptr = PlaceRef::new_sized_aligned(llval, tuple.layout, align);
let tuple_ptr = PlaceRef::new_sized_aligned(bx, llval, tuple.layout, align);
for i in 0..tuple.layout.fields.count() {
let field_ptr = tuple_ptr.project_field(bx, i);
let field = bx.load_operand(field_ptr);
Expand Down Expand Up @@ -1581,7 +1581,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
let llty = bx.backend_type(src.layout);
let cast_ptr = bx.pointercast(dst.llval, bx.type_ptr_to(llty));
let align = src.layout.align.abi.min(dst.align);
src.val.store(bx, PlaceRef::new_sized_aligned(cast_ptr, src.layout, align));
src.val.store(bx, PlaceRef::new_sized_aligned(bx, cast_ptr, src.layout, align));
}

// Stores the return value of a function call into it's final location.
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_codegen_ssa/src/mir/intrinsic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
let name_str = name.as_str();

let llret_ty = bx.backend_type(bx.layout_of(ret_ty));
let result = PlaceRef::new_sized(llresult, fn_abi.ret.layout);
let result = PlaceRef::new_sized(bx, llresult, fn_abi.ret.layout);

let llval = match name {
sym::assume => {
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_codegen_ssa/src/mir/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,7 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
if local == mir::RETURN_PLACE && fx.fn_abi.ret.is_indirect() {
debug!("alloc: {:?} (return place) -> place", local);
let llretptr = bx.get_param(0);
return LocalRef::Place(PlaceRef::new_sized(llretptr, layout));
return LocalRef::Place(PlaceRef::new_sized(&bx, llretptr, layout));
}

if memory_locals.contains(local) {
Expand Down Expand Up @@ -356,7 +356,7 @@ fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
// FIXME: lifetimes
let llarg = bx.get_param(llarg_idx);
llarg_idx += 1;
LocalRef::Place(PlaceRef::new_sized(llarg, arg.layout))
LocalRef::Place(PlaceRef::new_sized(bx, llarg, arg.layout))
} else if arg.is_unsized_indirect() {
// As the storage for the indirect argument lives during
// the whole function call, we just copy the fat pointer.
Expand Down
8 changes: 7 additions & 1 deletion compiler/rustc_codegen_ssa/src/mir/operand.rs
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,13 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
OperandValue::Ref(..) => bug!("Deref of by-Ref operand {:?}", self),
};
let layout = cx.layout_of(projected_ty);
PlaceRef { llval: llptr, llextra, layout, align: layout.align.abi }
PlaceRef {
llval: llptr,
llextra,
layout,
align: layout.align.abi,
scalar_valid_range: layout.abi.scalar_valid_range(cx),
}
}

/// If this operand is a `Pair`, we return an aggregate with the two values.
Expand Down
36 changes: 30 additions & 6 deletions compiler/rustc_codegen_ssa/src/mir/place.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use rustc_middle::mir;
use rustc_middle::mir::tcx::PlaceTy;
use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf, TyAndLayout};
use rustc_middle::ty::{self, Ty};
use rustc_target::abi::{Abi, Align, FieldsShape, Int, TagEncoding};
use rustc_target::abi::{Abi, Align, FieldsShape, Int, ScalarRanges, TagEncoding};
use rustc_target::abi::{VariantIdx, Variants};

#[derive(Copy, Clone, Debug)]
Expand All @@ -26,21 +26,30 @@ pub struct PlaceRef<'tcx, V> {

/// The alignment we know for this place.
pub align: Align,

/// If the place refers to a `Scalar` value, this represents the range
/// of valid values.
pub scalar_valid_range: Option<ScalarRanges>,
}

impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
pub fn new_sized(llval: V, layout: TyAndLayout<'tcx>) -> PlaceRef<'tcx, V> {
assert!(!layout.is_unsized());
PlaceRef { llval, llextra: None, layout, align: layout.align.abi }
pub fn new_sized(
cx: &impl HasTyCtxt<'tcx>,
llval: V,
layout: TyAndLayout<'tcx>,
) -> PlaceRef<'tcx, V> {
Self::new_sized_aligned(cx, llval, layout, layout.align.abi)
}

pub fn new_sized_aligned(
cx: &impl HasTyCtxt<'tcx>,
llval: V,
layout: TyAndLayout<'tcx>,
align: Align,
) -> PlaceRef<'tcx, V> {
assert!(!layout.is_unsized());
PlaceRef { llval, llextra: None, layout, align }
let scalar_valid_range = layout.abi.scalar_valid_range(cx);
PlaceRef { llval, llextra: None, layout, align, scalar_valid_range }
}

// FIXME(eddyb) pass something else for the name so no work is done
Expand All @@ -51,7 +60,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
) -> Self {
assert!(!layout.is_unsized(), "tried to statically allocate unsized place");
let tmp = bx.alloca(bx.cx().backend_type(layout), layout.align.abi);
Self::new_sized(tmp, layout)
Self::new_sized(bx, tmp, layout)
}

/// Returns a place for an indirect reference to an unsized place.
Expand Down Expand Up @@ -125,12 +134,24 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
bx.struct_gep(ty, self.llval, bx.cx().backend_field_index(self.layout, ix))
}
};

// If this field is a primitive type of a newtype, propogate the scalar valid range
let scalar_valid_range =
if let (0, Variants::Single { .. }, Abi::Scalar(_) | Abi::ScalarPair(..)) =
(offset.bytes(), &self.layout.variants, field.abi)
{
self.scalar_valid_range
} else {
field.abi.scalar_valid_range(bx)
};

PlaceRef {
// HACK(eddyb): have to bitcast pointers until LLVM removes pointee types.
llval: bx.pointercast(llval, bx.cx().type_ptr_to(bx.cx().backend_type(field))),
llextra: if bx.cx().type_has_metadata(field.ty) { self.llextra } else { None },
layout: field,
align: effective_field_align,
scalar_valid_range,
}
};

Expand Down Expand Up @@ -200,6 +221,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
llextra: self.llextra,
layout: field,
align: effective_field_align,
scalar_valid_range: field.abi.scalar_valid_range(bx),
}
}

Expand Down Expand Up @@ -392,6 +414,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
llextra: None,
layout,
align: self.align.restrict_for_offset(offset),
scalar_valid_range: layout.abi.scalar_valid_range(bx),
}
}

Expand All @@ -402,6 +425,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
) -> Self {
let mut downcast = *self;
downcast.layout = self.layout.for_variant(bx.cx(), variant_index);
downcast.scalar_valid_range = downcast.layout.abi.scalar_valid_range(bx);

// Cast to the appropriate variant struct type.
let variant_ty = bx.cx().backend_type(downcast.layout);
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_codegen_ssa/src/mir/rvalue.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
scratch.storage_dead(&mut bx);
}
OperandValue::Ref(llref, None, align) => {
let source = PlaceRef::new_sized_aligned(llref, operand.layout, align);
let source = PlaceRef::new_sized_aligned(&bx, llref, operand.layout, align);
base::coerce_unsized_into(&mut bx, source, dest);
}
OperandValue::Ref(_, Some(_), _) => {
Expand Down
Loading