Skip to content

Commit e03382d

Browse files
committed
callconv: adapt mips padding logic to mips64
MIPS64 needs to put a padding argument before an aggregate argument when this argument is in an odd-number position, starting from 0, and has an alignment of 16 bytes or higher, e.g. `void foo(int a, max_align_t b);` is the same as `void foo(int a, long _padding, max_align_t b);` This fix uses an i32 padding, but it should work just fine because i32 is aligned like i64 for arguments.
1 parent 6d41834 commit e03382d

File tree

1 file changed

+65
-56
lines changed

1 file changed

+65
-56
lines changed

compiler/rustc_target/src/callconv/mips64.rs

Lines changed: 65 additions & 56 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ where
3434
}
3535
}
3636

37-
fn classify_ret<'a, Ty, C>(cx: &C, ret: &mut ArgAbi<'a, Ty>)
37+
fn classify_ret<'a, Ty, C>(cx: &C, ret: &mut ArgAbi<'a, Ty>, offset: &mut Size)
3838
where
3939
Ty: TyAbiInterface<'a, C> + Copy,
4040
C: HasDataLayout,
@@ -70,92 +70,101 @@ where
7070
ret.cast_to(Uniform::new(Reg::i64(), size));
7171
} else {
7272
ret.make_indirect();
73+
*offset += cx.data_layout().pointer_size();
7374
}
7475
}
7576

76-
fn classify_arg<'a, Ty, C>(cx: &C, arg: &mut ArgAbi<'a, Ty>)
77+
fn classify_arg<'a, Ty, C>(cx: &C, arg: &mut ArgAbi<'a, Ty>, offset: &mut Size)
7778
where
7879
Ty: TyAbiInterface<'a, C> + Copy,
7980
C: HasDataLayout,
8081
{
81-
if !arg.layout.is_aggregate() {
82-
extend_integer_width_mips(arg, 64);
83-
return;
84-
}
85-
if arg.layout.pass_indirectly_in_non_rustic_abis(cx) {
86-
arg.make_indirect();
87-
return;
88-
}
89-
9082
let dl = cx.data_layout();
9183
let size = arg.layout.size;
9284
let mut prefix = [None; 8];
9385
let mut prefix_index = 0;
9486

95-
match arg.layout.fields {
96-
FieldsShape::Primitive => unreachable!(),
97-
FieldsShape::Array { .. } => {
98-
// Arrays are passed indirectly
99-
arg.make_indirect();
100-
return;
101-
}
102-
FieldsShape::Union(_) => {
103-
// Unions and are always treated as a series of 64-bit integer chunks
104-
}
105-
FieldsShape::Arbitrary { .. } => {
106-
// Structures are split up into a series of 64-bit integer chunks, but any aligned
107-
// doubles not part of another aggregate are passed as floats.
108-
let mut last_offset = Size::ZERO;
109-
110-
for i in 0..arg.layout.fields.count() {
111-
let field = arg.layout.field(cx, i);
112-
let offset = arg.layout.fields.offset(i);
113-
114-
// We only care about aligned doubles
115-
if let BackendRepr::Scalar(scalar) = field.backend_repr {
116-
if scalar.primitive() == Primitive::Float(Float::F64) {
117-
if offset.is_aligned(dl.f64_align) {
118-
// Insert enough integers to cover [last_offset, offset)
119-
assert!(last_offset.is_aligned(dl.f64_align));
120-
for _ in 0..((offset - last_offset).bits() / 64)
121-
.min((prefix.len() - prefix_index) as u64)
122-
{
123-
prefix[prefix_index] = Some(Reg::i64());
124-
prefix_index += 1;
125-
}
87+
// Detect need for padding
88+
let align = arg.layout.align.abi.max(dl.i64_align).min(dl.i128_align);
89+
let pad_i32 = !offset.is_aligned(align);
12690

127-
if prefix_index == prefix.len() {
128-
break;
91+
if !arg.layout.is_aggregate() {
92+
extend_integer_width_mips(arg, 64);
93+
} else if arg.layout.pass_indirectly_in_non_rustic_abis(cx) {
94+
arg.make_indirect();
95+
} else {
96+
match arg.layout.fields {
97+
FieldsShape::Primitive => unreachable!(),
98+
FieldsShape::Array { .. } => {
99+
// Arrays are passed indirectly
100+
arg.make_indirect();
101+
}
102+
FieldsShape::Union(_) => {
103+
// Unions and are always treated as a series of 64-bit integer chunks
104+
}
105+
FieldsShape::Arbitrary { .. } => {
106+
// Structures are split up into a series of 64-bit integer chunks, but any aligned
107+
// doubles not part of another aggregate are passed as floats.
108+
let mut last_offset = Size::ZERO;
109+
110+
for i in 0..arg.layout.fields.count() {
111+
let field = arg.layout.field(cx, i);
112+
let offset = arg.layout.fields.offset(i);
113+
114+
// We only care about aligned doubles
115+
if let BackendRepr::Scalar(scalar) = field.backend_repr {
116+
if scalar.primitive() == Primitive::Float(Float::F64) {
117+
if offset.is_aligned(dl.f64_align) {
118+
// Insert enough integers to cover [last_offset, offset)
119+
assert!(last_offset.is_aligned(dl.f64_align));
120+
for _ in 0..((offset - last_offset).bits() / 64)
121+
.min((prefix.len() - prefix_index) as u64)
122+
{
123+
prefix[prefix_index] = Some(Reg::i64());
124+
prefix_index += 1;
125+
}
126+
127+
if prefix_index == prefix.len() {
128+
break;
129+
}
130+
131+
prefix[prefix_index] = Some(Reg::f64());
132+
prefix_index += 1;
133+
last_offset = offset + Reg::f64().size;
129134
}
130-
131-
prefix[prefix_index] = Some(Reg::f64());
132-
prefix_index += 1;
133-
last_offset = offset + Reg::f64().size;
134135
}
135136
}
136137
}
137138
}
138-
}
139-
};
140-
141-
// Extract first 8 chunks as the prefix
142-
let rest_size = size - Size::from_bytes(8) * prefix_index as u64;
143-
arg.cast_to(CastTarget::prefixed(prefix, Uniform::new(Reg::i64(), rest_size)));
139+
};
140+
141+
// Extract first 8 chunks as the prefix
142+
let rest_size = size - Size::from_bytes(8) * prefix_index as u64;
143+
arg.cast_to_and_pad_i32(
144+
CastTarget::prefixed(prefix, Uniform::new(Reg::i64(), rest_size)),
145+
pad_i32,
146+
);
147+
}
148+
*offset = offset.align_to(align) + size.align_to(align);
144149
}
145150

146151
pub(crate) fn compute_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>)
147152
where
148153
Ty: TyAbiInterface<'a, C> + Copy,
149154
C: HasDataLayout,
150155
{
156+
// mips64 argument passing is also affected by the alignment of aggregates.
157+
// see mips.rs for how the offset is used
158+
let mut offset = Size::ZERO;
159+
151160
if !fn_abi.ret.is_ignore() {
152-
classify_ret(cx, &mut fn_abi.ret);
161+
classify_ret(cx, &mut fn_abi.ret, &mut offset);
153162
}
154163

155164
for arg in fn_abi.args.iter_mut() {
156165
if arg.is_ignore() {
157166
continue;
158167
}
159-
classify_arg(cx, arg);
168+
classify_arg(cx, arg, &mut offset);
160169
}
161170
}

0 commit comments

Comments
 (0)