cranelift_codegen/isa/pulley_shared/lower/
isle.rs1pub mod generated_code;
5use generated_code::MInst;
6use inst::InstAndKind;
7
8use crate::ir::{condcodes::*, immediates::*, types::*, *};
10use crate::isa::pulley_shared::{
11 inst::{
12 FReg, OperandSize, PulleyCall, ReturnCallInfo, VReg, WritableFReg, WritableVReg,
13 WritableXReg, XReg,
14 },
15 lower::{Cond, regs},
16 *,
17};
18use crate::machinst::{
19 CallArgList, CallInfo, CallRetList, MachInst, Reg, VCodeConstant, VCodeConstantData,
20 abi::{ArgPair, RetPair, StackAMode},
21 isle::*,
22};
23use alloc::boxed::Box;
24use pulley_interpreter::U6;
25use regalloc2::PReg;
26use smallvec::SmallVec;
27
28type Unit = ();
29type VecArgPair = Vec<ArgPair>;
30type VecRetPair = Vec<RetPair>;
31type BoxCallInfo = Box<CallInfo<PulleyCall>>;
32type BoxCallIndInfo = Box<CallInfo<XReg>>;
33type BoxCallIndirectHostInfo = Box<CallInfo<ExternalName>>;
34type BoxReturnCallInfo = Box<ReturnCallInfo<ExternalName>>;
35type BoxReturnCallIndInfo = Box<ReturnCallInfo<XReg>>;
36type BoxExternalName = Box<ExternalName>;
37type UpperXRegSet = pulley_interpreter::UpperRegSet<pulley_interpreter::XReg>;
38type PcRelOffset = pulley_interpreter::PcRelOffset;
39
40#[expect(
41 unused_imports,
42 reason = "used on other backends, used here to suppress warning elsewhere"
43)]
44use crate::machinst::isle::UnwindInst as _;
45
46pub(crate) struct PulleyIsleContext<'a, 'b, I, B>
47where
48 I: VCodeInst,
49 B: LowerBackend,
50{
51 pub lower_ctx: &'a mut Lower<'b, I>,
52 pub backend: &'a B,
53}
54
55impl<'a, 'b, P> PulleyIsleContext<'a, 'b, InstAndKind<P>, PulleyBackend<P>>
56where
57 P: PulleyTargetKind,
58{
59 fn new(lower_ctx: &'a mut Lower<'b, InstAndKind<P>>, backend: &'a PulleyBackend<P>) -> Self {
60 Self { lower_ctx, backend }
61 }
62
63 pub(crate) fn dfg(&self) -> &crate::ir::DataFlowGraph {
64 &self.lower_ctx.f.dfg
65 }
66}
67
68impl<P> generated_code::Context for PulleyIsleContext<'_, '_, InstAndKind<P>, PulleyBackend<P>>
69where
70 P: PulleyTargetKind,
71{
72 crate::isle_lower_prelude_methods!(InstAndKind<P>);
73
74 fn gen_call_info(
75 &mut self,
76 sig: Sig,
77 name: ExternalName,
78 mut uses: CallArgList,
79 defs: CallRetList,
80 try_call_info: Option<TryCallInfo>,
81 ) -> BoxCallInfo {
82 let stack_ret_space = self.lower_ctx.sigs()[sig].sized_stack_ret_space();
83 let stack_arg_space = self.lower_ctx.sigs()[sig].sized_stack_arg_space();
84 self.lower_ctx
85 .abi_mut()
86 .accumulate_outgoing_args_size(stack_ret_space + stack_arg_space);
87
88 let mut args = SmallVec::new();
95 uses.sort_by_key(|arg| arg.preg);
96 uses.retain(|arg| {
97 if arg.preg != regs::x0()
98 && arg.preg != regs::x1()
99 && arg.preg != regs::x2()
100 && arg.preg != regs::x3()
101 {
102 return true;
103 }
104 args.push(XReg::new(arg.vreg).unwrap());
105 false
106 });
107 let dest = PulleyCall { name, args };
108 Box::new(
109 self.lower_ctx
110 .gen_call_info(sig, dest, uses, defs, try_call_info),
111 )
112 }
113
114 fn gen_call_ind_info(
115 &mut self,
116 sig: Sig,
117 dest: Reg,
118 uses: CallArgList,
119 defs: CallRetList,
120 try_call_info: Option<TryCallInfo>,
121 ) -> BoxCallIndInfo {
122 let stack_ret_space = self.lower_ctx.sigs()[sig].sized_stack_ret_space();
123 let stack_arg_space = self.lower_ctx.sigs()[sig].sized_stack_arg_space();
124 self.lower_ctx
125 .abi_mut()
126 .accumulate_outgoing_args_size(stack_ret_space + stack_arg_space);
127
128 let dest = XReg::new(dest).unwrap();
129 Box::new(
130 self.lower_ctx
131 .gen_call_info(sig, dest, uses, defs, try_call_info),
132 )
133 }
134
135 fn gen_call_host_info(
136 &mut self,
137 sig: Sig,
138 dest: ExternalName,
139 uses: CallArgList,
140 defs: CallRetList,
141 try_call_info: Option<TryCallInfo>,
142 ) -> BoxCallIndirectHostInfo {
143 let stack_ret_space = self.lower_ctx.sigs()[sig].sized_stack_ret_space();
144 let stack_arg_space = self.lower_ctx.sigs()[sig].sized_stack_arg_space();
145 self.lower_ctx
146 .abi_mut()
147 .accumulate_outgoing_args_size(stack_ret_space + stack_arg_space);
148
149 Box::new(
150 self.lower_ctx
151 .gen_call_info(sig, dest, uses, defs, try_call_info),
152 )
153 }
154
155 fn gen_return_call_info(
156 &mut self,
157 sig: Sig,
158 dest: ExternalName,
159 uses: CallArgList,
160 ) -> BoxReturnCallInfo {
161 let new_stack_arg_size = self.lower_ctx.sigs()[sig].sized_stack_arg_space();
162 self.lower_ctx
163 .abi_mut()
164 .accumulate_tail_args_size(new_stack_arg_size);
165
166 Box::new(ReturnCallInfo {
167 dest,
168 uses,
169 new_stack_arg_size,
170 })
171 }
172
173 fn gen_return_call_ind_info(
174 &mut self,
175 sig: Sig,
176 dest: Reg,
177 uses: CallArgList,
178 ) -> BoxReturnCallIndInfo {
179 let new_stack_arg_size = self.lower_ctx.sigs()[sig].sized_stack_arg_space();
180 self.lower_ctx
181 .abi_mut()
182 .accumulate_tail_args_size(new_stack_arg_size);
183
184 Box::new(ReturnCallInfo {
185 dest: XReg::new(dest).unwrap(),
186 uses,
187 new_stack_arg_size,
188 })
189 }
190
191 fn vreg_new(&mut self, r: Reg) -> VReg {
192 VReg::new(r).unwrap()
193 }
194 fn writable_vreg_new(&mut self, r: WritableReg) -> WritableVReg {
195 r.map(|wr| VReg::new(wr).unwrap())
196 }
197 fn writable_vreg_to_vreg(&mut self, arg0: WritableVReg) -> VReg {
198 arg0.to_reg()
199 }
200 fn writable_vreg_to_writable_reg(&mut self, arg0: WritableVReg) -> WritableReg {
201 arg0.map(|vr| vr.to_reg())
202 }
203 fn vreg_to_reg(&mut self, arg0: VReg) -> Reg {
204 *arg0
205 }
206 fn xreg_new(&mut self, r: Reg) -> XReg {
207 XReg::new(r).unwrap()
208 }
209 fn writable_xreg_new(&mut self, r: WritableReg) -> WritableXReg {
210 r.map(|wr| XReg::new(wr).unwrap())
211 }
212 fn writable_xreg_to_xreg(&mut self, arg0: WritableXReg) -> XReg {
213 arg0.to_reg()
214 }
215 fn writable_xreg_to_writable_reg(&mut self, arg0: WritableXReg) -> WritableReg {
216 arg0.map(|xr| xr.to_reg())
217 }
218 fn xreg_to_reg(&mut self, arg0: XReg) -> Reg {
219 *arg0
220 }
221 fn freg_new(&mut self, r: Reg) -> FReg {
222 FReg::new(r).unwrap()
223 }
224 fn writable_freg_new(&mut self, r: WritableReg) -> WritableFReg {
225 r.map(|wr| FReg::new(wr).unwrap())
226 }
227 fn writable_freg_to_freg(&mut self, arg0: WritableFReg) -> FReg {
228 arg0.to_reg()
229 }
230 fn writable_freg_to_writable_reg(&mut self, arg0: WritableFReg) -> WritableReg {
231 arg0.map(|fr| fr.to_reg())
232 }
233 fn freg_to_reg(&mut self, arg0: FReg) -> Reg {
234 *arg0
235 }
236
237 #[inline]
238 fn emit(&mut self, arg0: &MInst) -> Unit {
239 self.lower_ctx.emit(arg0.clone().into());
240 }
241
242 fn sp_reg(&mut self) -> XReg {
243 XReg::new(regs::stack_reg()).unwrap()
244 }
245
246 fn cond_invert(&mut self, cond: &Cond) -> Cond {
247 cond.invert()
248 }
249
250 fn u6_from_u8(&mut self, imm: u8) -> Option<U6> {
251 U6::new(imm)
252 }
253
254 fn endianness(&mut self, flags: MemFlags) -> Endianness {
255 flags.endianness(self.backend.isa_flags.endianness())
256 }
257
258 fn is_native_endianness(&mut self, endianness: &Endianness) -> bool {
259 *endianness == self.backend.isa_flags.endianness()
260 }
261
262 fn pointer_width(&mut self) -> PointerWidth {
263 P::pointer_width()
264 }
265
266 fn memflags_nontrapping(&mut self, flags: MemFlags) -> bool {
267 flags.trap_code().is_none()
268 }
269
270 fn memflags_is_wasm(&mut self, flags: MemFlags) -> bool {
271 flags.trap_code() == Some(TrapCode::HEAP_OUT_OF_BOUNDS)
272 && self.endianness(flags) == Endianness::Little
273 }
274
275 fn g32_offset(
276 &mut self,
277 load_offset: i32,
278 load_ty: Type,
279 bound_check_offset: u64,
280 ) -> Option<u16> {
281 let load_offset = u64::try_from(load_offset).ok()?;
283 let load_bytes = u64::from(load_ty.bytes());
284 if bound_check_offset != load_offset + load_bytes {
285 return None;
286 }
287 u16::try_from(load_offset).ok()
288 }
289}
290
291pub(crate) fn lower<P>(
293 lower_ctx: &mut Lower<InstAndKind<P>>,
294 backend: &PulleyBackend<P>,
295 inst: Inst,
296) -> Option<InstOutput>
297where
298 P: PulleyTargetKind,
299{
300 let mut isle_ctx = PulleyIsleContext::new(lower_ctx, backend);
303 generated_code::constructor_lower(&mut isle_ctx, inst)
304}
305
306pub(crate) fn lower_branch<P>(
308 lower_ctx: &mut Lower<InstAndKind<P>>,
309 backend: &PulleyBackend<P>,
310 branch: Inst,
311 targets: &[MachLabel],
312) -> Option<()>
313where
314 P: PulleyTargetKind,
315{
316 let mut isle_ctx = PulleyIsleContext::new(lower_ctx, backend);
319 generated_code::constructor_lower_branch(&mut isle_ctx, branch, targets)
320}