|
1 |
| -use super::BackendTypes; |
| 1 | +use rustc_middle::bug; |
| 2 | +use rustc_middle::ty::layout::TyAndLayout; |
| 3 | +use rustc_target::abi::call::CastTarget; |
| 4 | +use rustc_target::abi::Align; |
| 5 | + |
| 6 | +use super::consts::ConstMethods; |
| 7 | +use super::type_::BaseTypeMethods; |
| 8 | +use super::{BackendTypes, BuilderMethods, LayoutTypeMethods}; |
2 | 9 |
|
3 | 10 | pub trait AbiBuilderMethods<'tcx>: BackendTypes {
|
4 | 11 | fn get_param(&mut self, index: usize) -> Self::Value;
|
5 | 12 | }
|
| 13 | + |
| 14 | +/// The ABI mandates that the value is passed as a different struct representation. |
| 15 | +pub trait CastTargetAbiExt<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> { |
| 16 | + /// Spill and reload it from the stack to convert from the Rust representation to the ABI representation. |
| 17 | + fn cast_rust_abi_to_other(&self, bx: &mut Bx, src: Bx::Value, align: Align) -> Bx::Value; |
| 18 | + /// Spill and reload it from the stack to convert from the ABI representation to the Rust representation. |
| 19 | + fn cast_other_abi_to_rust( |
| 20 | + &self, |
| 21 | + bx: &mut Bx, |
| 22 | + src: Bx::Value, |
| 23 | + dst: Bx::Value, |
| 24 | + layout: TyAndLayout<'tcx>, |
| 25 | + ); |
| 26 | +} |
| 27 | + |
| 28 | +impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> CastTargetAbiExt<'a, 'tcx, Bx> for CastTarget { |
| 29 | + fn cast_rust_abi_to_other(&self, bx: &mut Bx, src: Bx::Value, align: Align) -> Bx::Value { |
| 30 | + let cast_ty = bx.cast_backend_type(self); |
| 31 | + match bx.type_kind(cast_ty) { |
| 32 | + crate::common::TypeKind::Struct | crate::common::TypeKind::Array => { |
| 33 | + let mut index = 0; |
| 34 | + let mut offset = 0; |
| 35 | + let mut target = bx.const_poison(cast_ty); |
| 36 | + for reg in self.prefix.iter().filter_map(|&x| x) { |
| 37 | + let ptr = if offset == 0 { |
| 38 | + src |
| 39 | + } else { |
| 40 | + bx.inbounds_ptradd(src, bx.const_usize(offset)) |
| 41 | + }; |
| 42 | + let load = bx.load(bx.reg_backend_type(®), ptr, align); |
| 43 | + target = bx.insert_value(target, load, index); |
| 44 | + index += 1; |
| 45 | + offset += reg.size.bytes(); |
| 46 | + } |
| 47 | + let (rest_count, rem_bytes) = if self.rest.unit.size.bytes() == 0 { |
| 48 | + (0, 0) |
| 49 | + } else { |
| 50 | + ( |
| 51 | + self.rest.total.bytes() / self.rest.unit.size.bytes(), |
| 52 | + self.rest.total.bytes() % self.rest.unit.size.bytes(), |
| 53 | + ) |
| 54 | + }; |
| 55 | + for _ in 0..rest_count { |
| 56 | + let ptr = if offset == 0 { |
| 57 | + src |
| 58 | + } else { |
| 59 | + bx.inbounds_ptradd(src, bx.const_usize(offset)) |
| 60 | + }; |
| 61 | + let load = bx.load(bx.reg_backend_type(&self.rest.unit), ptr, align); |
| 62 | + target = bx.insert_value(target, load, index); |
| 63 | + index += 1; |
| 64 | + offset += self.rest.unit.size.bytes(); |
| 65 | + } |
| 66 | + if rem_bytes != 0 { |
| 67 | + let ptr = bx.inbounds_ptradd(src, bx.const_usize(offset)); |
| 68 | + let load = bx.load(bx.reg_backend_type(&self.rest.unit), ptr, align); |
| 69 | + target = bx.insert_value(target, load, index); |
| 70 | + } |
| 71 | + target |
| 72 | + } |
| 73 | + ty_kind if bx.type_kind(bx.reg_backend_type(&self.rest.unit)) == ty_kind => { |
| 74 | + bx.load(cast_ty, src, align) |
| 75 | + } |
| 76 | + ty_kind => bug!("cannot cast {ty_kind:?} to the ABI representation in CastTarget"), |
| 77 | + } |
| 78 | + } |
| 79 | + |
| 80 | + fn cast_other_abi_to_rust( |
| 81 | + &self, |
| 82 | + bx: &mut Bx, |
| 83 | + src: Bx::Value, |
| 84 | + dst: Bx::Value, |
| 85 | + layout: TyAndLayout<'tcx>, |
| 86 | + ) { |
| 87 | + let align = layout.align.abi; |
| 88 | + match bx.type_kind(bx.val_ty(src)) { |
| 89 | + crate::common::TypeKind::Struct | crate::common::TypeKind::Array => { |
| 90 | + let mut index = 0; |
| 91 | + let mut offset = 0; |
| 92 | + for reg in self.prefix.iter().filter_map(|&x| x) { |
| 93 | + let from = bx.extract_value(src, index); |
| 94 | + let ptr = if index == 0 { |
| 95 | + dst |
| 96 | + } else { |
| 97 | + bx.inbounds_ptradd(dst, bx.const_usize(offset)) |
| 98 | + }; |
| 99 | + bx.store(from, ptr, align); |
| 100 | + index += 1; |
| 101 | + offset += reg.size.bytes(); |
| 102 | + } |
| 103 | + let (rest_count, rem_bytes) = if self.rest.unit.size.bytes() == 0 { |
| 104 | + (0, 0) |
| 105 | + } else { |
| 106 | + ( |
| 107 | + self.rest.total.bytes() / self.rest.unit.size.bytes(), |
| 108 | + self.rest.total.bytes() % self.rest.unit.size.bytes(), |
| 109 | + ) |
| 110 | + }; |
| 111 | + for _ in 0..rest_count { |
| 112 | + let from = bx.extract_value(src, index); |
| 113 | + let ptr = if offset == 0 { |
| 114 | + dst |
| 115 | + } else { |
| 116 | + bx.inbounds_ptradd(dst, bx.const_usize(offset)) |
| 117 | + }; |
| 118 | + bx.store(from, ptr, align); |
| 119 | + index += 1; |
| 120 | + offset += self.rest.unit.size.bytes(); |
| 121 | + } |
| 122 | + if rem_bytes != 0 { |
| 123 | + let from = bx.extract_value(src, index); |
| 124 | + let ptr = bx.inbounds_ptradd(dst, bx.const_usize(offset)); |
| 125 | + bx.store(from, ptr, align); |
| 126 | + } |
| 127 | + } |
| 128 | + ty_kind if bx.type_kind(bx.reg_backend_type(&self.rest.unit)) == ty_kind => { |
| 129 | + let scratch_size = self.size(bx); |
| 130 | + let src = if scratch_size > layout.size { |
| 131 | + // When using just a single register, we directly use load or store instructions, |
| 132 | + // so we must allocate sufficient space. |
| 133 | + let scratch_align = self.align(bx); |
| 134 | + let llscratch = bx.alloca(scratch_size, scratch_align); |
| 135 | + bx.lifetime_start(llscratch, scratch_size); |
| 136 | + bx.store(src, llscratch, scratch_align); |
| 137 | + let tmp = bx.load(bx.backend_type(layout), llscratch, scratch_align); |
| 138 | + bx.lifetime_end(llscratch, scratch_size); |
| 139 | + tmp |
| 140 | + } else { |
| 141 | + src |
| 142 | + }; |
| 143 | + bx.store(src, dst, align); |
| 144 | + } |
| 145 | + ty_kind => bug!("cannot cast {ty_kind:?} to the Rust representation in CastTarget"), |
| 146 | + }; |
| 147 | + } |
| 148 | +} |
0 commit comments