Skip to content

Commit 5567b2a

Browse files
committed
Move scalar_to_backend to ssa
1 parent cf3fb76 commit 5567b2a

File tree

14 files changed

+229
-234
lines changed

14 files changed

+229
-234
lines changed

Cargo.lock

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3536,7 +3536,6 @@ dependencies = [
35363536
"rustc_errors",
35373537
"rustc_fluent_macro",
35383538
"rustc_fs_util",
3539-
"rustc_hashes",
35403539
"rustc_hir",
35413540
"rustc_index",
35423541
"rustc_llvm",

compiler/rustc_codegen_gcc/src/common.rs

Lines changed: 30 additions & 93 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,10 @@
11
use gccjit::{LValue, RValue, ToRValue, Type};
2-
use rustc_abi::Primitive::Pointer;
3-
use rustc_abi::{self as abi, HasDataLayout};
2+
use rustc_abi as abi;
3+
use rustc_abi::{Align, HasDataLayout};
44
use rustc_codegen_ssa::traits::{
5-
BaseTypeCodegenMethods, ConstCodegenMethods, MiscCodegenMethods, StaticCodegenMethods,
5+
BaseTypeCodegenMethods, ConstCodegenMethods, StaticCodegenMethods,
66
};
7-
use rustc_middle::mir::Mutability;
8-
use rustc_middle::mir::interpret::{ConstAllocation, GlobalAlloc, Scalar};
7+
use rustc_middle::mir::interpret::ConstAllocation;
98
use rustc_middle::ty::layout::LayoutOf;
109

1110
use crate::context::CodegenCx;
@@ -109,7 +108,7 @@ pub fn type_is_pointer(typ: Type<'_>) -> bool {
109108
typ.get_pointee().is_some()
110109
}
111110

112-
impl<'gcc, 'tcx> ConstCodegenMethods for CodegenCx<'gcc, 'tcx> {
111+
impl<'gcc, 'tcx> ConstCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
113112
fn const_null(&self, typ: Type<'gcc>) -> RValue<'gcc> {
114113
if type_is_pointer(typ) { self.context.new_null(typ) } else { self.const_int(typ, 0) }
115114
}
@@ -220,93 +219,6 @@ impl<'gcc, 'tcx> ConstCodegenMethods for CodegenCx<'gcc, 'tcx> {
220219
None
221220
}
222221

223-
fn scalar_to_backend(&self, cv: Scalar, layout: abi::Scalar, ty: Type<'gcc>) -> RValue<'gcc> {
224-
let bitsize = if layout.is_bool() { 1 } else { layout.size(self).bits() };
225-
match cv {
226-
Scalar::Int(int) => {
227-
let data = int.to_bits(layout.size(self));
228-
let value = self.const_uint_big(self.type_ix(bitsize), data);
229-
let bytesize = layout.size(self).bytes();
230-
if bitsize > 1 && ty.is_integral() && bytesize as u32 == ty.get_size() {
231-
// NOTE: since the intrinsic _xabort is called with a bitcast, which
232-
// is non-const, but expects a constant, do a normal cast instead of a bitcast.
233-
// FIXME(antoyo): fix bitcast to work in constant contexts.
234-
// TODO(antoyo): perhaps only use bitcast for pointers?
235-
self.context.new_cast(None, value, ty)
236-
} else {
237-
// TODO(bjorn3): assert size is correct
238-
self.const_bitcast(value, ty)
239-
}
240-
}
241-
Scalar::Ptr(ptr, _size) => {
242-
let (prov, offset) = ptr.prov_and_relative_offset();
243-
let alloc_id = prov.alloc_id();
244-
let base_addr = match self.tcx.global_alloc(alloc_id) {
245-
GlobalAlloc::Memory(alloc) => {
246-
// For ZSTs directly codegen an aligned pointer.
247-
// This avoids generating a zero-sized constant value and actually needing a
248-
// real address at runtime.
249-
if alloc.inner().len() == 0 {
250-
assert_eq!(offset.bytes(), 0);
251-
let val = self.const_usize(alloc.inner().align.bytes());
252-
return if matches!(layout.primitive(), Pointer(_)) {
253-
self.context.new_cast(None, val, ty)
254-
} else {
255-
self.const_bitcast(val, ty)
256-
};
257-
}
258-
259-
let init = self.const_data_from_alloc(alloc);
260-
let alloc = alloc.inner();
261-
let value = match alloc.mutability {
262-
Mutability::Mut => self.static_addr_of_mut(init, alloc.align, None),
263-
_ => self.static_addr_of(init, alloc.align, None),
264-
};
265-
if !self.sess().fewer_names() {
266-
// TODO(antoyo): set value name.
267-
}
268-
value
269-
}
270-
GlobalAlloc::Function { instance, .. } => self.get_fn_addr(instance),
271-
GlobalAlloc::VTable(ty, dyn_ty) => {
272-
let alloc = self
273-
.tcx
274-
.global_alloc(self.tcx.vtable_allocation((
275-
ty,
276-
dyn_ty.principal().map(|principal| {
277-
self.tcx.instantiate_bound_regions_with_erased(principal)
278-
}),
279-
)))
280-
.unwrap_memory();
281-
let init = self.const_data_from_alloc(alloc);
282-
self.static_addr_of(init, alloc.inner().align, None)
283-
}
284-
GlobalAlloc::TypeId { .. } => {
285-
let val = self.const_usize(offset.bytes());
286-
// This is still a variable of pointer type, even though we only use the provenance
287-
// of that pointer in CTFE and Miri. But to make LLVM's type system happy,
288-
// we need an int-to-ptr cast here (it doesn't matter at all which provenance that picks).
289-
return self.context.new_cast(None, val, ty);
290-
}
291-
GlobalAlloc::Static(def_id) => {
292-
assert!(self.tcx.is_static(def_id));
293-
self.get_static(def_id).get_address(None)
294-
}
295-
};
296-
let ptr_type = base_addr.get_type();
297-
let base_addr = self.context.new_cast(None, base_addr, self.usize_type);
298-
let offset =
299-
self.context.new_rvalue_from_long(self.usize_type, offset.bytes() as i64);
300-
let ptr = self.context.new_cast(None, base_addr + offset, ptr_type);
301-
if !matches!(layout.primitive(), Pointer(_)) {
302-
self.const_bitcast(ptr.dereference(None).to_rvalue(), ty)
303-
} else {
304-
self.context.new_cast(None, ptr, ty)
305-
}
306-
}
307-
}
308-
}
309-
310222
fn const_data_from_alloc(&self, alloc: ConstAllocation<'_>) -> Self::Value {
311223
// We ignore the alignment for the purpose of deduping RValues
312224
// The alignment is not handled / used in any way by `const_alloc_to_gcc`,
@@ -328,6 +240,31 @@ impl<'gcc, 'tcx> ConstCodegenMethods for CodegenCx<'gcc, 'tcx> {
328240
.new_array_access(None, base_addr, self.const_usize(offset.bytes()))
329241
.get_address(None)
330242
}
243+
fn const_bitcast(&self, val: Self::Value, ty: Self::Type) -> Self::Value {
244+
self.const_bitcast(val, ty)
245+
}
246+
fn const_pointercast(&self, val: Self::Value, ty: Self::Type) -> Self::Value {
247+
self.context.new_cast(None, val, ty)
248+
}
249+
fn const_int_to_ptr(&self, val: Self::Value, ty: Self::Type) -> Self::Value {
250+
self.context.new_cast(None, val, ty)
251+
}
252+
fn const_ptr_to_int(&self, val: Self::Value, ty: Self::Type) -> Self::Value {
253+
self.context.new_cast(None, val, ty)
254+
}
255+
256+
fn static_addr_of_impl(
257+
&self,
258+
cv: Self::Value,
259+
align: Align,
260+
kind: Option<&str>,
261+
) -> Self::Value {
262+
self.static_addr_of(cv, align, kind)
263+
}
264+
265+
fn static_addr_of_mut(&self, cv: Self::Value, align: Align, kind: Option<&str>) -> Self::Value {
266+
self.static_addr_of_mut(cv, align, kind)
267+
}
331268
}
332269

333270
pub trait SignType<'gcc, 'tcx> {

compiler/rustc_codegen_gcc/src/consts.rs

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -150,6 +150,17 @@ impl<'gcc, 'tcx> StaticCodegenMethods for CodegenCx<'gcc, 'tcx> {
150150
self.add_used_global(global.to_rvalue());
151151
}
152152
}
153+
154+
fn get_value_name(&self, _val: Self::Value) -> &[u8] {
155+
// TODO(antoyo)
156+
&[]
157+
}
158+
fn set_value_name(&self, _val: Self::Value, _name: &[u8]) {
159+
// TODO(antoyo)
160+
}
161+
fn get_static(&self, def_id: DefId) -> Self::Value {
162+
self.get_static(def_id).get_address(None)
163+
}
153164
}
154165

155166
impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {

compiler/rustc_codegen_gcc/src/type_.rs

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -17,22 +17,6 @@ use crate::context::CodegenCx;
1717
use crate::type_of::LayoutGccExt;
1818

1919
impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
20-
pub fn type_ix(&self, num_bits: u64) -> Type<'gcc> {
21-
// gcc only supports 1, 2, 4 or 8-byte integers.
22-
// FIXME(antoyo): this is misleading to use the next power of two as rustc_codegen_ssa
23-
// sometimes use 96-bit numbers and the following code will give an integer of a different
24-
// size.
25-
let bytes = (num_bits / 8).next_power_of_two() as i32;
26-
match bytes {
27-
1 => self.i8_type,
28-
2 => self.i16_type,
29-
4 => self.i32_type,
30-
8 => self.i64_type,
31-
16 => self.i128_type,
32-
_ => panic!("unexpected num_bits: {}", num_bits),
33-
}
34-
}
35-
3620
pub fn type_void(&self) -> Type<'gcc> {
3721
self.context.new_type::<()>()
3822
}
@@ -148,6 +132,22 @@ impl<'gcc, 'tcx> BaseTypeCodegenMethods for CodegenCx<'gcc, 'tcx> {
148132
self.isize_type
149133
}
150134

135+
fn type_ix(&self, num_bits: u64) -> Type<'gcc> {
136+
// gcc only supports 1, 2, 4 or 8-byte integers.
137+
// FIXME(antoyo): this is misleading to use the next power of two as rustc_codegen_ssa
138+
// sometimes use 96-bit numbers and the following code will give an integer of a different
139+
// size.
140+
let bytes = (num_bits / 8).next_power_of_two() as i32;
141+
match bytes {
142+
1 => self.i8_type,
143+
2 => self.i16_type,
144+
4 => self.i32_type,
145+
8 => self.i64_type,
146+
16 => self.i128_type,
147+
_ => panic!("unexpected num_bits: {}", num_bits),
148+
}
149+
}
150+
151151
fn type_f16(&self) -> Type<'gcc> {
152152
#[cfg(feature = "master")]
153153
if self.supports_f16_type {

compiler/rustc_codegen_llvm/Cargo.toml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,6 @@ rustc_data_structures = { path = "../rustc_data_structures" }
2525
rustc_errors = { path = "../rustc_errors" }
2626
rustc_fluent_macro = { path = "../rustc_fluent_macro" }
2727
rustc_fs_util = { path = "../rustc_fs_util" }
28-
rustc_hashes = { path = "../rustc_hashes" }
2928
rustc_hir = { path = "../rustc_hir" }
3029
rustc_index = { path = "../rustc_index" }
3130
rustc_llvm = { path = "../rustc_llvm" }

compiler/rustc_codegen_llvm/src/common.rs

Lines changed: 30 additions & 102 deletions
Original file line numberDiff line numberDiff line change
@@ -3,16 +3,13 @@
33
use std::borrow::Borrow;
44

55
use libc::{c_char, c_uint};
6-
use rustc_abi::Primitive::Pointer;
7-
use rustc_abi::{self as abi, HasDataLayout as _};
8-
use rustc_ast::Mutability;
6+
use rustc_abi as abi;
7+
use rustc_abi::{Align, HasDataLayout};
98
use rustc_codegen_ssa::common::TypeKind;
109
use rustc_codegen_ssa::traits::*;
11-
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
12-
use rustc_hashes::Hash128;
1310
use rustc_hir::def_id::DefId;
1411
use rustc_middle::bug;
15-
use rustc_middle::mir::interpret::{ConstAllocation, GlobalAlloc, Scalar};
12+
use rustc_middle::mir::interpret::ConstAllocation;
1613
use rustc_middle::ty::TyCtxt;
1714
use rustc_session::cstore::DllImport;
1815
use tracing::debug;
@@ -120,7 +117,7 @@ impl<'ll, CX: Borrow<SCx<'ll>>> GenericCx<'ll, CX> {
120117
}
121118
}
122119

123-
impl<'ll, 'tcx> ConstCodegenMethods for CodegenCx<'ll, 'tcx> {
120+
impl<'ll, 'tcx> ConstCodegenMethods<'tcx> for CodegenCx<'ll, 'tcx> {
124121
fn const_null(&self, t: &'ll Type) -> &'ll Value {
125122
unsafe { llvm::LLVMConstNull(t) }
126123
}
@@ -254,101 +251,6 @@ impl<'ll, 'tcx> ConstCodegenMethods for CodegenCx<'ll, 'tcx> {
254251
})
255252
}
256253

257-
fn scalar_to_backend(&self, cv: Scalar, layout: abi::Scalar, llty: &'ll Type) -> &'ll Value {
258-
let bitsize = if layout.is_bool() { 1 } else { layout.size(self).bits() };
259-
match cv {
260-
Scalar::Int(int) => {
261-
let data = int.to_bits(layout.size(self));
262-
let llval = self.const_uint_big(self.type_ix(bitsize), data);
263-
if matches!(layout.primitive(), Pointer(_)) {
264-
unsafe { llvm::LLVMConstIntToPtr(llval, llty) }
265-
} else {
266-
self.const_bitcast(llval, llty)
267-
}
268-
}
269-
Scalar::Ptr(ptr, _size) => {
270-
let (prov, offset) = ptr.prov_and_relative_offset();
271-
let global_alloc = self.tcx.global_alloc(prov.alloc_id());
272-
let base_addr = match global_alloc {
273-
GlobalAlloc::Memory(alloc) => {
274-
// For ZSTs directly codegen an aligned pointer.
275-
// This avoids generating a zero-sized constant value and actually needing a
276-
// real address at runtime.
277-
if alloc.inner().len() == 0 {
278-
assert_eq!(offset.bytes(), 0);
279-
let llval = self.const_usize(alloc.inner().align.bytes());
280-
return if matches!(layout.primitive(), Pointer(_)) {
281-
unsafe { llvm::LLVMConstIntToPtr(llval, llty) }
282-
} else {
283-
self.const_bitcast(llval, llty)
284-
};
285-
} else {
286-
let init =
287-
const_alloc_to_llvm(self, alloc.inner(), /*static*/ false);
288-
let alloc = alloc.inner();
289-
let value = match alloc.mutability {
290-
Mutability::Mut => self.static_addr_of_mut(init, alloc.align, None),
291-
_ => self.static_addr_of_impl(init, alloc.align, None),
292-
};
293-
if !self.sess().fewer_names() && llvm::get_value_name(value).is_empty()
294-
{
295-
let hash = self.tcx.with_stable_hashing_context(|mut hcx| {
296-
let mut hasher = StableHasher::new();
297-
alloc.hash_stable(&mut hcx, &mut hasher);
298-
hasher.finish::<Hash128>()
299-
});
300-
llvm::set_value_name(
301-
value,
302-
format!("alloc_{hash:032x}").as_bytes(),
303-
);
304-
}
305-
value
306-
}
307-
}
308-
GlobalAlloc::Function { instance, .. } => self.get_fn_addr(instance),
309-
GlobalAlloc::VTable(ty, dyn_ty) => {
310-
let alloc = self
311-
.tcx
312-
.global_alloc(self.tcx.vtable_allocation((
313-
ty,
314-
dyn_ty.principal().map(|principal| {
315-
self.tcx.instantiate_bound_regions_with_erased(principal)
316-
}),
317-
)))
318-
.unwrap_memory();
319-
let init = const_alloc_to_llvm(self, alloc.inner(), /*static*/ false);
320-
self.static_addr_of_impl(init, alloc.inner().align, None)
321-
}
322-
GlobalAlloc::Static(def_id) => {
323-
assert!(self.tcx.is_static(def_id));
324-
assert!(!self.tcx.is_thread_local_static(def_id));
325-
self.get_static(def_id)
326-
}
327-
GlobalAlloc::TypeId { .. } => {
328-
// Drop the provenance, the offset contains the bytes of the hash
329-
let llval = self.const_usize(offset.bytes());
330-
return unsafe { llvm::LLVMConstIntToPtr(llval, llty) };
331-
}
332-
};
333-
let base_addr_space = global_alloc.address_space(self);
334-
let llval = unsafe {
335-
llvm::LLVMConstInBoundsGEP2(
336-
self.type_i8(),
337-
// Cast to the required address space if necessary
338-
self.const_pointercast(base_addr, self.type_ptr_ext(base_addr_space)),
339-
&self.const_usize(offset.bytes()),
340-
1,
341-
)
342-
};
343-
if !matches!(layout.primitive(), Pointer(_)) {
344-
unsafe { llvm::LLVMConstPtrToInt(llval, llty) }
345-
} else {
346-
self.const_bitcast(llval, llty)
347-
}
348-
}
349-
}
350-
}
351-
352254
fn const_data_from_alloc(&self, alloc: ConstAllocation<'_>) -> Self::Value {
353255
const_alloc_to_llvm(self, alloc.inner(), /*static*/ false)
354256
}
@@ -363,6 +265,32 @@ impl<'ll, 'tcx> ConstCodegenMethods for CodegenCx<'ll, 'tcx> {
363265
)
364266
}
365267
}
268+
269+
fn const_bitcast(&self, val: Self::Value, ty: Self::Type) -> Self::Value {
270+
unsafe { llvm::LLVMConstBitCast(val, ty) }
271+
}
272+
fn const_pointercast(&self, val: Self::Value, ty: Self::Type) -> Self::Value {
273+
unsafe { llvm::LLVMConstPointerCast(val, ty) }
274+
}
275+
fn const_int_to_ptr(&self, val: Self::Value, ty: Self::Type) -> Self::Value {
276+
unsafe { llvm::LLVMConstIntToPtr(val, ty) }
277+
}
278+
fn const_ptr_to_int(&self, val: Self::Value, ty: Self::Type) -> Self::Value {
279+
unsafe { llvm::LLVMConstPtrToInt(val, ty) }
280+
}
281+
282+
fn static_addr_of_impl(
283+
&self,
284+
cv: Self::Value,
285+
align: Align,
286+
kind: Option<&str>,
287+
) -> Self::Value {
288+
self.static_addr_of_impl(cv, align, kind)
289+
}
290+
291+
fn static_addr_of_mut(&self, cv: Self::Value, align: Align, kind: Option<&str>) -> Self::Value {
292+
self.static_addr_of_mut(cv, align, kind)
293+
}
366294
}
367295

368296
/// Get the [LLVM type][Type] of a [`Value`].

0 commit comments

Comments
 (0)