[interp] Remove varargs from InterpFrame and recompute it instead (#16598)
[mono-project.git] / mono / mini / mini-arm64.c
blob550ed3c856e7aef6f06ca6ca33bc3f534964913b
1 /**
2 * \file
3 * ARM64 backend for the Mono code generator
5 * Copyright 2013 Xamarin, Inc (http://www.xamarin.com)
6 *
7 * Based on mini-arm.c:
9 * Authors:
10 * Paolo Molaro (lupus@ximian.com)
11 * Dietmar Maurer (dietmar@ximian.com)
13 * (C) 2003 Ximian, Inc.
14 * Copyright 2003-2011 Novell, Inc (http://www.novell.com)
15 * Copyright 2011 Xamarin, Inc (http://www.xamarin.com)
16 * Licensed under the MIT license. See LICENSE file in the project root for full license information.
19 #include "mini.h"
20 #include "cpu-arm64.h"
21 #include "ir-emit.h"
22 #include "aot-runtime.h"
23 #include "mini-runtime.h"
25 #include <mono/arch/arm64/arm64-codegen.h>
26 #include <mono/utils/mono-mmap.h>
27 #include <mono/utils/mono-memory-model.h>
28 #include <mono/metadata/abi-details.h>
30 #include "interp/interp.h"
33 * Documentation:
35 * - ARM(R) Architecture Reference Manual, ARMv8, for ARMv8-A architecture profile (DDI0487A_a_armv8_arm.pdf)
36 * - Procedure Call Standard for the ARM 64-bit Architecture (AArch64) (IHI0055B_aapcs64.pdf)
37 * - ELF for the ARM 64-bit Architecture (IHI0056B_aaelf64.pdf)
39 * Register usage:
40 * - ip0/ip1/lr are used as temporary registers
41 * - r27 is used as the rgctx/imt register
42 * - r28 is used to access arguments passed on the stack
43 * - d15/d16 are used as fp temporary registers
46 #define FP_TEMP_REG ARMREG_D16
47 #define FP_TEMP_REG2 ARMREG_D17
49 #define THUNK_SIZE (4 * 4)
51 /* The single step trampoline */
52 static gpointer ss_trampoline;
54 /* The breakpoint trampoline */
55 static gpointer bp_trampoline;
57 static gboolean ios_abi;
59 static __attribute__ ((__warn_unused_result__)) guint8* emit_load_regset (guint8 *code, guint64 regs, int basereg, int offset);
61 const char*
62 mono_arch_regname (int reg)
64 static const char * rnames[] = {
65 "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9",
66 "r10", "r11", "r12", "r13", "r14", "r15", "r16", "r17", "r18", "r19",
67 "r20", "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "fp",
68 "lr", "sp"
70 if (reg >= 0 && reg < 32)
71 return rnames [reg];
72 return "unknown";
75 const char*
76 mono_arch_fregname (int reg)
78 static const char * rnames[] = {
79 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7", "d8", "d9",
80 "d10", "d11", "d12", "d13", "d14", "d15", "d16", "d17", "d18", "d19",
81 "d20", "d21", "d22", "d23", "d24", "d25", "d26", "d27", "d28", "d29",
82 "d30", "d31"
84 if (reg >= 0 && reg < 32)
85 return rnames [reg];
86 return "unknown fp";
89 int
90 mono_arch_get_argument_info (MonoMethodSignature *csig, int param_count, MonoJitArgumentInfo *arg_info)
92 NOT_IMPLEMENTED;
93 return 0;
96 #define MAX_ARCH_DELEGATE_PARAMS 7
98 static gpointer
99 get_delegate_invoke_impl (gboolean has_target, gboolean param_count, guint32 *code_size)
101 guint8 *code, *start;
103 if (has_target) {
104 start = code = mono_global_codeman_reserve (12);
106 /* Replace the this argument with the target */
107 arm_ldrx (code, ARMREG_IP0, ARMREG_R0, MONO_STRUCT_OFFSET (MonoDelegate, method_ptr));
108 arm_ldrx (code, ARMREG_R0, ARMREG_R0, MONO_STRUCT_OFFSET (MonoDelegate, target));
109 arm_brx (code, ARMREG_IP0);
111 g_assert ((code - start) <= 12);
113 mono_arch_flush_icache (start, 12);
114 MONO_PROFILER_RAISE (jit_code_buffer, (start, code - start, MONO_PROFILER_CODE_BUFFER_DELEGATE_INVOKE, NULL));
115 } else {
116 int size, i;
118 size = 8 + param_count * 4;
119 start = code = mono_global_codeman_reserve (size);
121 arm_ldrx (code, ARMREG_IP0, ARMREG_R0, MONO_STRUCT_OFFSET (MonoDelegate, method_ptr));
122 /* slide down the arguments */
123 for (i = 0; i < param_count; ++i)
124 arm_movx (code, i, i + 1);
125 arm_brx (code, ARMREG_IP0);
127 g_assert ((code - start) <= size);
129 mono_arch_flush_icache (start, size);
130 MONO_PROFILER_RAISE (jit_code_buffer, (start, code - start, MONO_PROFILER_CODE_BUFFER_DELEGATE_INVOKE, NULL));
133 if (code_size)
134 *code_size = code - start;
136 return start;
140 * mono_arch_get_delegate_invoke_impls:
142 * Return a list of MonoAotTrampInfo structures for the delegate invoke impl
143 * trampolines.
145 GSList*
146 mono_arch_get_delegate_invoke_impls (void)
148 GSList *res = NULL;
149 guint8 *code;
150 guint32 code_len;
151 int i;
152 char *tramp_name;
154 code = (guint8*)get_delegate_invoke_impl (TRUE, 0, &code_len);
155 res = g_slist_prepend (res, mono_tramp_info_create ("delegate_invoke_impl_has_target", code, code_len, NULL, NULL));
157 for (i = 0; i <= MAX_ARCH_DELEGATE_PARAMS; ++i) {
158 code = (guint8*)get_delegate_invoke_impl (FALSE, i, &code_len);
159 tramp_name = g_strdup_printf ("delegate_invoke_impl_target_%d", i);
160 res = g_slist_prepend (res, mono_tramp_info_create (tramp_name, code, code_len, NULL, NULL));
161 g_free (tramp_name);
164 return res;
167 gpointer
168 mono_arch_get_delegate_invoke_impl (MonoMethodSignature *sig, gboolean has_target)
170 guint8 *code, *start;
173 * vtypes are returned in registers, or using the dedicated r8 register, so
174 * they can be supported by delegate invokes.
177 if (has_target) {
178 static guint8* cached = NULL;
180 if (cached)
181 return cached;
183 if (mono_ee_features.use_aot_trampolines)
184 start = (guint8*)mono_aot_get_trampoline ("delegate_invoke_impl_has_target");
185 else
186 start = (guint8*)get_delegate_invoke_impl (TRUE, 0, NULL);
187 mono_memory_barrier ();
188 cached = start;
189 return cached;
190 } else {
191 static guint8* cache [MAX_ARCH_DELEGATE_PARAMS + 1] = {NULL};
192 int i;
194 if (sig->param_count > MAX_ARCH_DELEGATE_PARAMS)
195 return NULL;
196 for (i = 0; i < sig->param_count; ++i)
197 if (!mono_is_regsize_var (sig->params [i]))
198 return NULL;
200 code = cache [sig->param_count];
201 if (code)
202 return code;
204 if (mono_ee_features.use_aot_trampolines) {
205 char *name = g_strdup_printf ("delegate_invoke_impl_target_%d", sig->param_count);
206 start = (guint8*)mono_aot_get_trampoline (name);
207 g_free (name);
208 } else {
209 start = (guint8*)get_delegate_invoke_impl (FALSE, sig->param_count, NULL);
211 mono_memory_barrier ();
212 cache [sig->param_count] = start;
213 return start;
216 return NULL;
219 gpointer
220 mono_arch_get_delegate_virtual_invoke_impl (MonoMethodSignature *sig, MonoMethod *method, int offset, gboolean load_imt_reg)
222 return NULL;
225 gpointer
226 mono_arch_get_this_arg_from_call (host_mgreg_t *regs, guint8 *code)
228 return (gpointer)regs [ARMREG_R0];
231 void
232 mono_arch_cpu_init (void)
236 void
237 mono_arch_init (void)
239 if (!mono_aot_only)
240 bp_trampoline = mini_get_breakpoint_trampoline ();
242 mono_arm_gsharedvt_init ();
244 #if defined(TARGET_IOS)
245 ios_abi = TRUE;
246 #endif
249 void
250 mono_arch_cleanup (void)
254 guint32
255 mono_arch_cpu_optimizations (guint32 *exclude_mask)
257 *exclude_mask = 0;
258 return 0;
261 guint32
262 mono_arch_cpu_enumerate_simd_versions (void)
264 return 0;
267 void
268 mono_arch_register_lowlevel_calls (void)
272 void
273 mono_arch_finish_init (void)
277 /* The maximum length is 2 instructions */
278 static guint8*
279 emit_imm (guint8 *code, int dreg, int imm)
281 // FIXME: Optimize this
282 if (imm < 0) {
283 gint64 limm = imm;
284 arm_movnx (code, dreg, (~limm) & 0xffff, 0);
285 arm_movkx (code, dreg, (limm >> 16) & 0xffff, 16);
286 } else {
287 arm_movzx (code, dreg, imm & 0xffff, 0);
288 if (imm >> 16)
289 arm_movkx (code, dreg, (imm >> 16) & 0xffff, 16);
292 return code;
295 /* The maximum length is 4 instructions */
296 static guint8*
297 emit_imm64 (guint8 *code, int dreg, guint64 imm)
299 // FIXME: Optimize this
300 arm_movzx (code, dreg, imm & 0xffff, 0);
301 if ((imm >> 16) & 0xffff)
302 arm_movkx (code, dreg, (imm >> 16) & 0xffff, 16);
303 if ((imm >> 32) & 0xffff)
304 arm_movkx (code, dreg, (imm >> 32) & 0xffff, 32);
305 if ((imm >> 48) & 0xffff)
306 arm_movkx (code, dreg, (imm >> 48) & 0xffff, 48);
308 return code;
311 guint8*
312 mono_arm_emit_imm64 (guint8 *code, int dreg, gint64 imm)
314 return emit_imm64 (code, dreg, imm);
318 * emit_imm_template:
320 * Emit a patchable code sequence for constructing a 64 bit immediate.
322 static guint8*
323 emit_imm64_template (guint8 *code, int dreg)
325 arm_movzx (code, dreg, 0, 0);
326 arm_movkx (code, dreg, 0, 16);
327 arm_movkx (code, dreg, 0, 32);
328 arm_movkx (code, dreg, 0, 48);
330 return code;
333 static __attribute__ ((__warn_unused_result__)) guint8*
334 emit_addw_imm (guint8 *code, int dreg, int sreg, int imm)
336 if (!arm_is_arith_imm (imm)) {
337 code = emit_imm (code, ARMREG_LR, imm);
338 arm_addw (code, dreg, sreg, ARMREG_LR);
339 } else {
340 arm_addw_imm (code, dreg, sreg, imm);
342 return code;
345 static __attribute__ ((__warn_unused_result__)) guint8*
346 emit_addx_imm (guint8 *code, int dreg, int sreg, int imm)
348 if (!arm_is_arith_imm (imm)) {
349 code = emit_imm (code, ARMREG_LR, imm);
350 arm_addx (code, dreg, sreg, ARMREG_LR);
351 } else {
352 arm_addx_imm (code, dreg, sreg, imm);
354 return code;
357 static __attribute__ ((__warn_unused_result__)) guint8*
358 emit_subw_imm (guint8 *code, int dreg, int sreg, int imm)
360 if (!arm_is_arith_imm (imm)) {
361 code = emit_imm (code, ARMREG_LR, imm);
362 arm_subw (code, dreg, sreg, ARMREG_LR);
363 } else {
364 arm_subw_imm (code, dreg, sreg, imm);
366 return code;
369 static __attribute__ ((__warn_unused_result__)) guint8*
370 emit_subx_imm (guint8 *code, int dreg, int sreg, int imm)
372 if (!arm_is_arith_imm (imm)) {
373 code = emit_imm (code, ARMREG_LR, imm);
374 arm_subx (code, dreg, sreg, ARMREG_LR);
375 } else {
376 arm_subx_imm (code, dreg, sreg, imm);
378 return code;
381 /* Emit sp+=imm. Clobbers ip0/ip1 */
382 static __attribute__ ((__warn_unused_result__)) guint8*
383 emit_addx_sp_imm (guint8 *code, int imm)
385 code = emit_imm (code, ARMREG_IP0, imm);
386 arm_movspx (code, ARMREG_IP1, ARMREG_SP);
387 arm_addx (code, ARMREG_IP1, ARMREG_IP1, ARMREG_IP0);
388 arm_movspx (code, ARMREG_SP, ARMREG_IP1);
389 return code;
392 /* Emit sp-=imm. Clobbers ip0/ip1 */
393 static __attribute__ ((__warn_unused_result__)) guint8*
394 emit_subx_sp_imm (guint8 *code, int imm)
396 code = emit_imm (code, ARMREG_IP0, imm);
397 arm_movspx (code, ARMREG_IP1, ARMREG_SP);
398 arm_subx (code, ARMREG_IP1, ARMREG_IP1, ARMREG_IP0);
399 arm_movspx (code, ARMREG_SP, ARMREG_IP1);
400 return code;
403 static __attribute__ ((__warn_unused_result__)) guint8*
404 emit_andw_imm (guint8 *code, int dreg, int sreg, int imm)
406 // FIXME:
407 code = emit_imm (code, ARMREG_LR, imm);
408 arm_andw (code, dreg, sreg, ARMREG_LR);
410 return code;
413 static __attribute__ ((__warn_unused_result__)) guint8*
414 emit_andx_imm (guint8 *code, int dreg, int sreg, int imm)
416 // FIXME:
417 code = emit_imm (code, ARMREG_LR, imm);
418 arm_andx (code, dreg, sreg, ARMREG_LR);
420 return code;
423 static __attribute__ ((__warn_unused_result__)) guint8*
424 emit_orrw_imm (guint8 *code, int dreg, int sreg, int imm)
426 // FIXME:
427 code = emit_imm (code, ARMREG_LR, imm);
428 arm_orrw (code, dreg, sreg, ARMREG_LR);
430 return code;
433 static __attribute__ ((__warn_unused_result__)) guint8*
434 emit_orrx_imm (guint8 *code, int dreg, int sreg, int imm)
436 // FIXME:
437 code = emit_imm (code, ARMREG_LR, imm);
438 arm_orrx (code, dreg, sreg, ARMREG_LR);
440 return code;
443 static __attribute__ ((__warn_unused_result__)) guint8*
444 emit_eorw_imm (guint8 *code, int dreg, int sreg, int imm)
446 // FIXME:
447 code = emit_imm (code, ARMREG_LR, imm);
448 arm_eorw (code, dreg, sreg, ARMREG_LR);
450 return code;
453 static __attribute__ ((__warn_unused_result__)) guint8*
454 emit_eorx_imm (guint8 *code, int dreg, int sreg, int imm)
456 // FIXME:
457 code = emit_imm (code, ARMREG_LR, imm);
458 arm_eorx (code, dreg, sreg, ARMREG_LR);
460 return code;
463 static __attribute__ ((__warn_unused_result__)) guint8*
464 emit_cmpw_imm (guint8 *code, int sreg, int imm)
466 if (imm == 0) {
467 arm_cmpw (code, sreg, ARMREG_RZR);
468 } else {
469 // FIXME:
470 code = emit_imm (code, ARMREG_LR, imm);
471 arm_cmpw (code, sreg, ARMREG_LR);
474 return code;
477 static __attribute__ ((__warn_unused_result__)) guint8*
478 emit_cmpx_imm (guint8 *code, int sreg, int imm)
480 if (imm == 0) {
481 arm_cmpx (code, sreg, ARMREG_RZR);
482 } else {
483 // FIXME:
484 code = emit_imm (code, ARMREG_LR, imm);
485 arm_cmpx (code, sreg, ARMREG_LR);
488 return code;
491 static __attribute__ ((__warn_unused_result__)) guint8*
492 emit_strb (guint8 *code, int rt, int rn, int imm)
494 if (arm_is_strb_imm (imm)) {
495 arm_strb (code, rt, rn, imm);
496 } else {
497 g_assert (rt != ARMREG_IP0);
498 g_assert (rn != ARMREG_IP0);
499 code = emit_imm (code, ARMREG_IP0, imm);
500 arm_strb_reg (code, rt, rn, ARMREG_IP0);
502 return code;
505 static __attribute__ ((__warn_unused_result__)) guint8*
506 emit_strh (guint8 *code, int rt, int rn, int imm)
508 if (arm_is_strh_imm (imm)) {
509 arm_strh (code, rt, rn, imm);
510 } else {
511 g_assert (rt != ARMREG_IP0);
512 g_assert (rn != ARMREG_IP0);
513 code = emit_imm (code, ARMREG_IP0, imm);
514 arm_strh_reg (code, rt, rn, ARMREG_IP0);
516 return code;
519 static __attribute__ ((__warn_unused_result__)) guint8*
520 emit_strw (guint8 *code, int rt, int rn, int imm)
522 if (arm_is_strw_imm (imm)) {
523 arm_strw (code, rt, rn, imm);
524 } else {
525 g_assert (rt != ARMREG_IP0);
526 g_assert (rn != ARMREG_IP0);
527 code = emit_imm (code, ARMREG_IP0, imm);
528 arm_strw_reg (code, rt, rn, ARMREG_IP0);
530 return code;
533 static __attribute__ ((__warn_unused_result__)) guint8*
534 emit_strfpw (guint8 *code, int rt, int rn, int imm)
536 if (arm_is_strw_imm (imm)) {
537 arm_strfpw (code, rt, rn, imm);
538 } else {
539 g_assert (rn != ARMREG_IP0);
540 code = emit_imm (code, ARMREG_IP0, imm);
541 arm_addx (code, ARMREG_IP0, rn, ARMREG_IP0);
542 arm_strfpw (code, rt, ARMREG_IP0, 0);
544 return code;
547 static __attribute__ ((__warn_unused_result__)) guint8*
548 emit_strfpx (guint8 *code, int rt, int rn, int imm)
550 if (arm_is_strx_imm (imm)) {
551 arm_strfpx (code, rt, rn, imm);
552 } else {
553 g_assert (rn != ARMREG_IP0);
554 code = emit_imm (code, ARMREG_IP0, imm);
555 arm_addx (code, ARMREG_IP0, rn, ARMREG_IP0);
556 arm_strfpx (code, rt, ARMREG_IP0, 0);
558 return code;
561 static __attribute__ ((__warn_unused_result__)) guint8*
562 emit_strx (guint8 *code, int rt, int rn, int imm)
564 if (arm_is_strx_imm (imm)) {
565 arm_strx (code, rt, rn, imm);
566 } else {
567 g_assert (rt != ARMREG_IP0);
568 g_assert (rn != ARMREG_IP0);
569 code = emit_imm (code, ARMREG_IP0, imm);
570 arm_strx_reg (code, rt, rn, ARMREG_IP0);
572 return code;
575 static __attribute__ ((__warn_unused_result__)) guint8*
576 emit_ldrb (guint8 *code, int rt, int rn, int imm)
578 if (arm_is_pimm12_scaled (imm, 1)) {
579 arm_ldrb (code, rt, rn, imm);
580 } else {
581 g_assert (rt != ARMREG_IP0);
582 g_assert (rn != ARMREG_IP0);
583 code = emit_imm (code, ARMREG_IP0, imm);
584 arm_ldrb_reg (code, rt, rn, ARMREG_IP0);
586 return code;
589 static __attribute__ ((__warn_unused_result__)) guint8*
590 emit_ldrsbx (guint8 *code, int rt, int rn, int imm)
592 if (arm_is_pimm12_scaled (imm, 1)) {
593 arm_ldrsbx (code, rt, rn, imm);
594 } else {
595 g_assert (rt != ARMREG_IP0);
596 g_assert (rn != ARMREG_IP0);
597 code = emit_imm (code, ARMREG_IP0, imm);
598 arm_ldrsbx_reg (code, rt, rn, ARMREG_IP0);
600 return code;
603 static __attribute__ ((__warn_unused_result__)) guint8*
604 emit_ldrh (guint8 *code, int rt, int rn, int imm)
606 if (arm_is_pimm12_scaled (imm, 2)) {
607 arm_ldrh (code, rt, rn, imm);
608 } else {
609 g_assert (rt != ARMREG_IP0);
610 g_assert (rn != ARMREG_IP0);
611 code = emit_imm (code, ARMREG_IP0, imm);
612 arm_ldrh_reg (code, rt, rn, ARMREG_IP0);
614 return code;
617 static __attribute__ ((__warn_unused_result__)) guint8*
618 emit_ldrshx (guint8 *code, int rt, int rn, int imm)
620 if (arm_is_pimm12_scaled (imm, 2)) {
621 arm_ldrshx (code, rt, rn, imm);
622 } else {
623 g_assert (rt != ARMREG_IP0);
624 g_assert (rn != ARMREG_IP0);
625 code = emit_imm (code, ARMREG_IP0, imm);
626 arm_ldrshx_reg (code, rt, rn, ARMREG_IP0);
628 return code;
631 static __attribute__ ((__warn_unused_result__)) guint8*
632 emit_ldrswx (guint8 *code, int rt, int rn, int imm)
634 if (arm_is_pimm12_scaled (imm, 4)) {
635 arm_ldrswx (code, rt, rn, imm);
636 } else {
637 g_assert (rt != ARMREG_IP0);
638 g_assert (rn != ARMREG_IP0);
639 code = emit_imm (code, ARMREG_IP0, imm);
640 arm_ldrswx_reg (code, rt, rn, ARMREG_IP0);
642 return code;
645 static __attribute__ ((__warn_unused_result__)) guint8*
646 emit_ldrw (guint8 *code, int rt, int rn, int imm)
648 if (arm_is_pimm12_scaled (imm, 4)) {
649 arm_ldrw (code, rt, rn, imm);
650 } else {
651 g_assert (rn != ARMREG_IP0);
652 code = emit_imm (code, ARMREG_IP0, imm);
653 arm_ldrw_reg (code, rt, rn, ARMREG_IP0);
655 return code;
658 static __attribute__ ((__warn_unused_result__)) guint8*
659 emit_ldrx (guint8 *code, int rt, int rn, int imm)
661 if (arm_is_pimm12_scaled (imm, 8)) {
662 arm_ldrx (code, rt, rn, imm);
663 } else {
664 g_assert (rn != ARMREG_IP0);
665 code = emit_imm (code, ARMREG_IP0, imm);
666 arm_ldrx_reg (code, rt, rn, ARMREG_IP0);
668 return code;
671 static __attribute__ ((__warn_unused_result__)) guint8*
672 emit_ldrfpw (guint8 *code, int rt, int rn, int imm)
674 if (arm_is_pimm12_scaled (imm, 4)) {
675 arm_ldrfpw (code, rt, rn, imm);
676 } else {
677 g_assert (rn != ARMREG_IP0);
678 code = emit_imm (code, ARMREG_IP0, imm);
679 arm_addx (code, ARMREG_IP0, rn, ARMREG_IP0);
680 arm_ldrfpw (code, rt, ARMREG_IP0, 0);
682 return code;
685 static __attribute__ ((__warn_unused_result__)) guint8*
686 emit_ldrfpx (guint8 *code, int rt, int rn, int imm)
688 if (arm_is_pimm12_scaled (imm, 8)) {
689 arm_ldrfpx (code, rt, rn, imm);
690 } else {
691 g_assert (rn != ARMREG_IP0);
692 code = emit_imm (code, ARMREG_IP0, imm);
693 arm_addx (code, ARMREG_IP0, rn, ARMREG_IP0);
694 arm_ldrfpx (code, rt, ARMREG_IP0, 0);
696 return code;
699 guint8*
700 mono_arm_emit_ldrx (guint8 *code, int rt, int rn, int imm)
702 return emit_ldrx (code, rt, rn, imm);
705 static guint8*
706 emit_call (MonoCompile *cfg, guint8* code, MonoJumpInfoType patch_type, gconstpointer data)
709 mono_add_patch_info_rel (cfg, code - cfg->native_code, patch_type, data, MONO_R_ARM64_IMM);
710 code = emit_imm64_template (code, ARMREG_LR);
711 arm_blrx (code, ARMREG_LR);
713 mono_add_patch_info_rel (cfg, code - cfg->native_code, patch_type, data, MONO_R_ARM64_BL);
714 arm_bl (code, code);
715 cfg->thunk_area += THUNK_SIZE;
716 return code;
719 static guint8*
720 emit_aotconst_full (MonoCompile *cfg, MonoJumpInfo **ji, guint8 *code, guint8 *start, int dreg, guint32 patch_type, gconstpointer data)
722 if (cfg)
723 mono_add_patch_info (cfg, code - cfg->native_code, (MonoJumpInfoType)patch_type, data);
724 else
725 *ji = mono_patch_info_list_prepend (*ji, code - start, (MonoJumpInfoType)patch_type, data);
726 /* See arch_emit_got_access () in aot-compiler.c */
727 arm_ldrx_lit (code, dreg, 0);
728 arm_nop (code);
729 arm_nop (code);
730 return code;
733 static guint8*
734 emit_aotconst (MonoCompile *cfg, guint8 *code, int dreg, guint32 patch_type, gconstpointer data)
736 return emit_aotconst_full (cfg, NULL, code, NULL, dreg, patch_type, data);
740 * mono_arm_emit_aotconst:
742 * Emit code to load an AOT constant into DREG. Usable from trampolines.
744 guint8*
745 mono_arm_emit_aotconst (gpointer ji, guint8 *code, guint8 *code_start, int dreg, guint32 patch_type, gconstpointer data)
747 return emit_aotconst_full (NULL, (MonoJumpInfo**)ji, code, code_start, dreg, patch_type, data);
750 gboolean
751 mono_arch_have_fast_tls (void)
753 #ifdef TARGET_IOS
754 return FALSE;
755 #else
756 return TRUE;
757 #endif
760 static guint8*
761 emit_tls_get (guint8 *code, int dreg, int tls_offset)
763 arm_mrs (code, dreg, ARM_MRS_REG_TPIDR_EL0);
764 if (tls_offset < 256) {
765 arm_ldrx (code, dreg, dreg, tls_offset);
766 } else {
767 code = emit_addx_imm (code, dreg, dreg, tls_offset);
768 arm_ldrx (code, dreg, dreg, 0);
770 return code;
773 static guint8*
774 emit_tls_set (guint8 *code, int sreg, int tls_offset)
776 int tmpreg = ARMREG_IP0;
778 g_assert (sreg != tmpreg);
779 arm_mrs (code, tmpreg, ARM_MRS_REG_TPIDR_EL0);
780 if (tls_offset < 256) {
781 arm_strx (code, sreg, tmpreg, tls_offset);
782 } else {
783 code = emit_addx_imm (code, tmpreg, tmpreg, tls_offset);
784 arm_strx (code, sreg, tmpreg, 0);
786 return code;
790 * Emits
791 * - mov sp, fp
792 * - ldrp [fp, lr], [sp], !stack_offfset
793 * Clobbers TEMP_REGS.
795 __attribute__ ((__warn_unused_result__)) guint8*
796 mono_arm_emit_destroy_frame (guint8 *code, int stack_offset, guint64 temp_regs)
798 // At least one of these registers must be available, or both.
799 gboolean const temp0 = (temp_regs & (1 << ARMREG_IP0)) != 0;
800 gboolean const temp1 = (temp_regs & (1 << ARMREG_IP1)) != 0;
801 g_assert (temp0 || temp1);
802 int const temp = temp0 ? ARMREG_IP0 : ARMREG_IP1;
804 arm_movspx (code, ARMREG_SP, ARMREG_FP);
806 if (arm_is_ldpx_imm (stack_offset)) {
807 arm_ldpx_post (code, ARMREG_FP, ARMREG_LR, ARMREG_SP, stack_offset);
808 } else {
809 arm_ldpx (code, ARMREG_FP, ARMREG_LR, ARMREG_SP, 0);
810 /* sp += stack_offset */
811 if (temp0 && temp1) {
812 code = emit_addx_sp_imm (code, stack_offset);
813 } else {
814 int imm = stack_offset;
816 /* Can't use addx_sp_imm () since we can't clobber both ip0/ip1 */
817 arm_addx_imm (code, temp, ARMREG_SP, 0);
818 while (imm > 256) {
819 arm_addx_imm (code, temp, temp, 256);
820 imm -= 256;
822 arm_addx_imm (code, ARMREG_SP, temp, imm);
825 return code;
828 #define is_call_imm(diff) ((gint)(diff) >= -33554432 && (gint)(diff) <= 33554431)
830 static guint8*
831 emit_thunk (guint8 *code, gconstpointer target)
833 guint8 *p = code;
835 arm_ldrx_lit (code, ARMREG_IP0, code + 8);
836 arm_brx (code, ARMREG_IP0);
837 *(guint64*)code = (guint64)target;
838 code += sizeof (guint64);
840 mono_arch_flush_icache (p, code - p);
841 return code;
844 static gpointer
845 create_thunk (MonoCompile *cfg, MonoDomain *domain, guchar *code, const guchar *target)
847 MonoJitInfo *ji;
848 MonoThunkJitInfo *info;
849 guint8 *thunks, *p;
850 int thunks_size;
851 guint8 *orig_target;
852 guint8 *target_thunk;
854 if (!domain)
855 domain = mono_domain_get ();
857 if (cfg) {
859 * This can be called multiple times during JITting,
860 * save the current position in cfg->arch to avoid
861 * doing a O(n^2) search.
863 if (!cfg->arch.thunks) {
864 cfg->arch.thunks = cfg->thunks;
865 cfg->arch.thunks_size = cfg->thunk_area;
867 thunks = cfg->arch.thunks;
868 thunks_size = cfg->arch.thunks_size;
869 if (!thunks_size) {
870 g_print ("thunk failed %p->%p, thunk space=%d method %s", code, target, thunks_size, mono_method_full_name (cfg->method, TRUE));
871 g_assert_not_reached ();
874 g_assert (*(guint32*)thunks == 0);
875 emit_thunk (thunks, target);
877 cfg->arch.thunks += THUNK_SIZE;
878 cfg->arch.thunks_size -= THUNK_SIZE;
880 return thunks;
881 } else {
882 ji = mini_jit_info_table_find (domain, (char*)code, NULL);
883 g_assert (ji);
884 info = mono_jit_info_get_thunk_info (ji);
885 g_assert (info);
887 thunks = (guint8*)ji->code_start + info->thunks_offset;
888 thunks_size = info->thunks_size;
890 orig_target = mono_arch_get_call_target (code + 4);
892 mono_domain_lock (domain);
894 target_thunk = NULL;
895 if (orig_target >= thunks && orig_target < thunks + thunks_size) {
896 /* The call already points to a thunk, because of trampolines etc. */
897 target_thunk = orig_target;
898 } else {
899 for (p = thunks; p < thunks + thunks_size; p += THUNK_SIZE) {
900 if (((guint32*)p) [0] == 0) {
901 /* Free entry */
902 target_thunk = p;
903 break;
904 } else if (((guint64*)p) [1] == (guint64)target) {
905 /* Thunk already points to target */
906 target_thunk = p;
907 break;
912 //printf ("THUNK: %p %p %p\n", code, target, target_thunk);
914 if (!target_thunk) {
915 mono_domain_unlock (domain);
916 g_print ("thunk failed %p->%p, thunk space=%d method %s", code, target, thunks_size, cfg ? mono_method_full_name (cfg->method, TRUE) : mono_method_full_name (jinfo_get_method (ji), TRUE));
917 g_assert_not_reached ();
920 emit_thunk (target_thunk, target);
922 mono_domain_unlock (domain);
924 return target_thunk;
928 static void
929 arm_patch_full (MonoCompile *cfg, MonoDomain *domain, guint8 *code, guint8 *target, int relocation)
931 switch (relocation) {
932 case MONO_R_ARM64_B:
933 if (arm_is_bl_disp (code, target)) {
934 arm_b (code, target);
935 } else {
936 gpointer thunk;
938 thunk = create_thunk (cfg, domain, code, target);
939 g_assert (arm_is_bl_disp (code, thunk));
940 arm_b (code, thunk);
942 break;
943 case MONO_R_ARM64_BCC: {
944 int cond;
946 cond = arm_get_bcc_cond (code);
947 arm_bcc (code, cond, target);
948 break;
950 case MONO_R_ARM64_CBZ:
951 arm_set_cbz_target (code, target);
952 break;
953 case MONO_R_ARM64_IMM: {
954 guint64 imm = (guint64)target;
955 int dreg;
957 /* emit_imm64_template () */
958 dreg = arm_get_movzx_rd (code);
959 arm_movzx (code, dreg, imm & 0xffff, 0);
960 arm_movkx (code, dreg, (imm >> 16) & 0xffff, 16);
961 arm_movkx (code, dreg, (imm >> 32) & 0xffff, 32);
962 arm_movkx (code, dreg, (imm >> 48) & 0xffff, 48);
963 break;
965 case MONO_R_ARM64_BL:
966 if (arm_is_bl_disp (code, target)) {
967 arm_bl (code, target);
968 } else {
969 gpointer thunk;
971 thunk = create_thunk (cfg, domain, code, target);
972 g_assert (arm_is_bl_disp (code, thunk));
973 arm_bl (code, thunk);
975 break;
976 default:
977 g_assert_not_reached ();
981 static void
982 arm_patch_rel (guint8 *code, guint8 *target, int relocation)
984 arm_patch_full (NULL, NULL, code, target, relocation);
987 void
988 mono_arm_patch (guint8 *code, guint8 *target, int relocation)
990 arm_patch_rel (code, target, relocation);
993 void
994 mono_arch_patch_code_new (MonoCompile *cfg, MonoDomain *domain, guint8 *code, MonoJumpInfo *ji, gpointer target)
996 guint8 *ip;
998 ip = ji->ip.i + code;
1000 switch (ji->type) {
1001 case MONO_PATCH_INFO_METHOD_JUMP:
1002 /* ji->relocation is not set by the caller */
1003 arm_patch_full (cfg, domain, ip, (guint8*)target, MONO_R_ARM64_B);
1004 break;
1005 default:
1006 arm_patch_full (cfg, domain, ip, (guint8*)target, ji->relocation);
1007 break;
1008 case MONO_PATCH_INFO_NONE:
1009 break;
1013 void
1014 mono_arch_flush_register_windows (void)
1018 MonoMethod*
1019 mono_arch_find_imt_method (host_mgreg_t *regs, guint8 *code)
1021 return (MonoMethod*)regs [MONO_ARCH_RGCTX_REG];
1024 MonoVTable*
1025 mono_arch_find_static_call_vtable (host_mgreg_t *regs, guint8 *code)
1027 return (MonoVTable*)regs [MONO_ARCH_RGCTX_REG];
1030 host_mgreg_t
1031 mono_arch_context_get_int_reg (MonoContext *ctx, int reg)
1033 return ctx->regs [reg];
1036 void
1037 mono_arch_context_set_int_reg (MonoContext *ctx, int reg, host_mgreg_t val)
1039 ctx->regs [reg] = val;
1043 * mono_arch_set_target:
1045 * Set the target architecture the JIT backend should generate code for, in the form
1046 * of a GNU target triplet. Only used in AOT mode.
1048 void
1049 mono_arch_set_target (char *mtriple)
1051 if (strstr (mtriple, "darwin") || strstr (mtriple, "ios")) {
1052 ios_abi = TRUE;
1056 static void
1057 add_general (CallInfo *cinfo, ArgInfo *ainfo, int size, gboolean sign)
1059 if (cinfo->gr >= PARAM_REGS) {
1060 ainfo->storage = ArgOnStack;
1061 if (ios_abi) {
1062 /* Assume size == align */
1063 } else {
1064 /* Put arguments into 8 byte aligned stack slots */
1065 size = 8;
1066 sign = FALSE;
1068 cinfo->stack_usage = ALIGN_TO (cinfo->stack_usage, size);
1069 ainfo->offset = cinfo->stack_usage;
1070 ainfo->slot_size = size;
1071 ainfo->sign = sign;
1072 cinfo->stack_usage += size;
1073 } else {
1074 ainfo->storage = ArgInIReg;
1075 ainfo->reg = cinfo->gr;
1076 cinfo->gr ++;
1080 static void
1081 add_fp (CallInfo *cinfo, ArgInfo *ainfo, gboolean single)
1083 int size = single ? 4 : 8;
1085 if (cinfo->fr >= FP_PARAM_REGS) {
1086 ainfo->storage = single ? ArgOnStackR4 : ArgOnStackR8;
1087 if (ios_abi) {
1088 cinfo->stack_usage = ALIGN_TO (cinfo->stack_usage, size);
1089 ainfo->offset = cinfo->stack_usage;
1090 ainfo->slot_size = size;
1091 cinfo->stack_usage += size;
1092 } else {
1093 ainfo->offset = cinfo->stack_usage;
1094 ainfo->slot_size = 8;
1095 /* Put arguments into 8 byte aligned stack slots */
1096 cinfo->stack_usage += 8;
1098 } else {
1099 if (single)
1100 ainfo->storage = ArgInFRegR4;
1101 else
1102 ainfo->storage = ArgInFReg;
1103 ainfo->reg = cinfo->fr;
1104 cinfo->fr ++;
1108 static gboolean
1109 is_hfa (MonoType *t, int *out_nfields, int *out_esize, int *field_offsets)
1111 MonoClass *klass;
1112 gpointer iter;
1113 MonoClassField *field;
1114 MonoType *ftype, *prev_ftype = NULL;
1115 int i, nfields = 0;
1117 klass = mono_class_from_mono_type_internal (t);
1118 iter = NULL;
1119 while ((field = mono_class_get_fields_internal (klass, &iter))) {
1120 if (field->type->attrs & FIELD_ATTRIBUTE_STATIC)
1121 continue;
1122 ftype = mono_field_get_type_internal (field);
1123 ftype = mini_get_underlying_type (ftype);
1125 if (MONO_TYPE_ISSTRUCT (ftype)) {
1126 int nested_nfields, nested_esize;
1127 int nested_field_offsets [16];
1129 if (!is_hfa (ftype, &nested_nfields, &nested_esize, nested_field_offsets))
1130 return FALSE;
1131 if (nested_esize == 4)
1132 ftype = m_class_get_byval_arg (mono_defaults.single_class);
1133 else
1134 ftype = m_class_get_byval_arg (mono_defaults.double_class);
1135 if (prev_ftype && prev_ftype->type != ftype->type)
1136 return FALSE;
1137 prev_ftype = ftype;
1138 for (i = 0; i < nested_nfields; ++i) {
1139 if (nfields + i < 4)
1140 field_offsets [nfields + i] = field->offset - MONO_ABI_SIZEOF (MonoObject) + nested_field_offsets [i];
1142 nfields += nested_nfields;
1143 } else {
1144 if (!(!ftype->byref && (ftype->type == MONO_TYPE_R4 || ftype->type == MONO_TYPE_R8)))
1145 return FALSE;
1146 if (prev_ftype && prev_ftype->type != ftype->type)
1147 return FALSE;
1148 prev_ftype = ftype;
1149 if (nfields < 4)
1150 field_offsets [nfields] = field->offset - MONO_ABI_SIZEOF (MonoObject);
1151 nfields ++;
1154 if (nfields == 0 || nfields > 4)
1155 return FALSE;
1156 *out_nfields = nfields;
1157 *out_esize = prev_ftype->type == MONO_TYPE_R4 ? 4 : 8;
1158 return TRUE;
1161 static void
1162 add_valuetype (CallInfo *cinfo, ArgInfo *ainfo, MonoType *t)
1164 int i, size, align_size, nregs, nfields, esize;
1165 int field_offsets [16];
1166 guint32 align;
1168 size = mini_type_stack_size_full (t, &align, cinfo->pinvoke);
1169 align_size = ALIGN_TO (size, 8);
1171 nregs = align_size / 8;
1172 if (is_hfa (t, &nfields, &esize, field_offsets)) {
1174 * The struct might include nested float structs aligned at 8,
1175 * so need to keep track of the offsets of the individual fields.
1177 if (cinfo->fr + nfields <= FP_PARAM_REGS) {
1178 ainfo->storage = ArgHFA;
1179 ainfo->reg = cinfo->fr;
1180 ainfo->nregs = nfields;
1181 ainfo->size = size;
1182 ainfo->esize = esize;
1183 for (i = 0; i < nfields; ++i)
1184 ainfo->foffsets [i] = field_offsets [i];
1185 cinfo->fr += ainfo->nregs;
1186 } else {
1187 ainfo->nfregs_to_skip = FP_PARAM_REGS > cinfo->fr ? FP_PARAM_REGS - cinfo->fr : 0;
1188 cinfo->fr = FP_PARAM_REGS;
1189 size = ALIGN_TO (size, 8);
1190 ainfo->storage = ArgVtypeOnStack;
1191 cinfo->stack_usage = ALIGN_TO (cinfo->stack_usage, align);
1192 ainfo->offset = cinfo->stack_usage;
1193 ainfo->size = size;
1194 ainfo->hfa = TRUE;
1195 ainfo->nregs = nfields;
1196 ainfo->esize = esize;
1197 cinfo->stack_usage += size;
1199 return;
1202 if (align_size > 16) {
1203 ainfo->storage = ArgVtypeByRef;
1204 ainfo->size = size;
1205 return;
1208 if (cinfo->gr + nregs > PARAM_REGS) {
1209 size = ALIGN_TO (size, 8);
1210 ainfo->storage = ArgVtypeOnStack;
1211 cinfo->stack_usage = ALIGN_TO (cinfo->stack_usage, align);
1212 ainfo->offset = cinfo->stack_usage;
1213 ainfo->size = size;
1214 cinfo->stack_usage += size;
1215 cinfo->gr = PARAM_REGS;
1216 } else {
1217 ainfo->storage = ArgVtypeInIRegs;
1218 ainfo->reg = cinfo->gr;
1219 ainfo->nregs = nregs;
1220 ainfo->size = size;
1221 cinfo->gr += nregs;
1225 static void
1226 add_param (CallInfo *cinfo, ArgInfo *ainfo, MonoType *t)
1228 MonoType *ptype;
1230 ptype = mini_get_underlying_type (t);
1231 switch (ptype->type) {
1232 case MONO_TYPE_I1:
1233 add_general (cinfo, ainfo, 1, TRUE);
1234 break;
1235 case MONO_TYPE_U1:
1236 add_general (cinfo, ainfo, 1, FALSE);
1237 break;
1238 case MONO_TYPE_I2:
1239 add_general (cinfo, ainfo, 2, TRUE);
1240 break;
1241 case MONO_TYPE_U2:
1242 add_general (cinfo, ainfo, 2, FALSE);
1243 break;
1244 case MONO_TYPE_I4:
1245 add_general (cinfo, ainfo, 4, TRUE);
1246 break;
1247 case MONO_TYPE_U4:
1248 add_general (cinfo, ainfo, 4, FALSE);
1249 break;
1250 case MONO_TYPE_I:
1251 case MONO_TYPE_U:
1252 case MONO_TYPE_PTR:
1253 case MONO_TYPE_FNPTR:
1254 case MONO_TYPE_OBJECT:
1255 case MONO_TYPE_U8:
1256 case MONO_TYPE_I8:
1257 add_general (cinfo, ainfo, 8, FALSE);
1258 break;
1259 case MONO_TYPE_R8:
1260 add_fp (cinfo, ainfo, FALSE);
1261 break;
1262 case MONO_TYPE_R4:
1263 add_fp (cinfo, ainfo, TRUE);
1264 break;
1265 case MONO_TYPE_VALUETYPE:
1266 case MONO_TYPE_TYPEDBYREF:
1267 add_valuetype (cinfo, ainfo, ptype);
1268 break;
1269 case MONO_TYPE_VOID:
1270 ainfo->storage = ArgNone;
1271 break;
1272 case MONO_TYPE_GENERICINST:
1273 if (!mono_type_generic_inst_is_valuetype (ptype)) {
1274 add_general (cinfo, ainfo, 8, FALSE);
1275 } else if (mini_is_gsharedvt_variable_type (ptype)) {
1277 * Treat gsharedvt arguments as large vtypes
1279 ainfo->storage = ArgVtypeByRef;
1280 ainfo->gsharedvt = TRUE;
1281 } else {
1282 add_valuetype (cinfo, ainfo, ptype);
1284 break;
1285 case MONO_TYPE_VAR:
1286 case MONO_TYPE_MVAR:
1287 g_assert (mini_is_gsharedvt_type (ptype));
1288 ainfo->storage = ArgVtypeByRef;
1289 ainfo->gsharedvt = TRUE;
1290 break;
1291 default:
1292 g_assert_not_reached ();
1293 break;
1298 * get_call_info:
1300 * Obtain information about a call according to the calling convention.
1302 static CallInfo*
1303 get_call_info (MonoMemPool *mp, MonoMethodSignature *sig)
1305 CallInfo *cinfo;
1306 ArgInfo *ainfo;
1307 int n, pstart, pindex;
1309 n = sig->hasthis + sig->param_count;
1311 if (mp)
1312 cinfo = mono_mempool_alloc0 (mp, sizeof (CallInfo) + (sizeof (ArgInfo) * n));
1313 else
1314 cinfo = g_malloc0 (sizeof (CallInfo) + (sizeof (ArgInfo) * n));
1316 cinfo->nargs = n;
1317 cinfo->pinvoke = sig->pinvoke;
1319 /* Return value */
1320 add_param (cinfo, &cinfo->ret, sig->ret);
1321 if (cinfo->ret.storage == ArgVtypeByRef)
1322 cinfo->ret.reg = ARMREG_R8;
1323 /* Reset state */
1324 cinfo->gr = 0;
1325 cinfo->fr = 0;
1326 cinfo->stack_usage = 0;
1328 /* Parameters */
1329 if (sig->hasthis)
1330 add_general (cinfo, cinfo->args + 0, 8, FALSE);
1331 pstart = 0;
1332 for (pindex = pstart; pindex < sig->param_count; ++pindex) {
1333 ainfo = cinfo->args + sig->hasthis + pindex;
1335 if ((sig->call_convention == MONO_CALL_VARARG) && (pindex == sig->sentinelpos)) {
1336 /* Prevent implicit arguments and sig_cookie from
1337 being passed in registers */
1338 cinfo->gr = PARAM_REGS;
1339 cinfo->fr = FP_PARAM_REGS;
1340 /* Emit the signature cookie just before the implicit arguments */
1341 add_param (cinfo, &cinfo->sig_cookie, mono_get_int_type ());
1344 add_param (cinfo, ainfo, sig->params [pindex]);
1345 if (ainfo->storage == ArgVtypeByRef) {
1346 /* Pass the argument address in the next register */
1347 if (cinfo->gr >= PARAM_REGS) {
1348 ainfo->storage = ArgVtypeByRefOnStack;
1349 cinfo->stack_usage = ALIGN_TO (cinfo->stack_usage, 8);
1350 ainfo->offset = cinfo->stack_usage;
1351 cinfo->stack_usage += 8;
1352 } else {
1353 ainfo->reg = cinfo->gr;
1354 cinfo->gr ++;
1359 /* Handle the case where there are no implicit arguments */
1360 if ((sig->call_convention == MONO_CALL_VARARG) && (pindex == sig->sentinelpos)) {
1361 /* Prevent implicit arguments and sig_cookie from
1362 being passed in registers */
1363 cinfo->gr = PARAM_REGS;
1364 cinfo->fr = FP_PARAM_REGS;
1365 /* Emit the signature cookie just before the implicit arguments */
1366 add_param (cinfo, &cinfo->sig_cookie, mono_get_int_type ());
1369 cinfo->stack_usage = ALIGN_TO (cinfo->stack_usage, MONO_ARCH_FRAME_ALIGNMENT);
1371 return cinfo;
1374 static int
1375 arg_need_temp (ArgInfo *ainfo)
1377 if (ainfo->storage == ArgHFA && ainfo->esize == 4)
1378 return ainfo->size;
1379 return 0;
1382 static gpointer
1383 arg_get_storage (CallContext *ccontext, ArgInfo *ainfo)
1385 switch (ainfo->storage) {
1386 case ArgVtypeInIRegs:
1387 case ArgInIReg:
1388 return &ccontext->gregs [ainfo->reg];
1389 case ArgInFReg:
1390 case ArgInFRegR4:
1391 case ArgHFA:
1392 return &ccontext->fregs [ainfo->reg];
1393 case ArgOnStack:
1394 case ArgOnStackR4:
1395 case ArgOnStackR8:
1396 case ArgVtypeOnStack:
1397 return ccontext->stack + ainfo->offset;
1398 case ArgVtypeByRef:
1399 return (gpointer) ccontext->gregs [ainfo->reg];
1400 default:
1401 g_error ("Arg storage type not yet supported");
1405 static void
1406 arg_get_val (CallContext *ccontext, ArgInfo *ainfo, gpointer dest)
1408 g_assert (arg_need_temp (ainfo));
1410 float *dest_float = (float*)dest;
1411 for (int k = 0; k < ainfo->nregs; k++) {
1412 *dest_float = *(float*)&ccontext->fregs [ainfo->reg + k];
1413 dest_float++;
1417 static void
1418 arg_set_val (CallContext *ccontext, ArgInfo *ainfo, gpointer src)
1420 g_assert (arg_need_temp (ainfo));
1422 float *src_float = (float*)src;
1423 for (int k = 0; k < ainfo->nregs; k++) {
1424 *(float*)&ccontext->fregs [ainfo->reg + k] = *src_float;
1425 src_float++;
1429 /* Set arguments in the ccontext (for i2n entry) */
1430 void
1431 mono_arch_set_native_call_context_args (CallContext *ccontext, gpointer frame, MonoMethodSignature *sig)
1433 const MonoEECallbacks *interp_cb = mini_get_interp_callbacks ();
1434 CallInfo *cinfo = get_call_info (NULL, sig);
1435 gpointer storage;
1436 ArgInfo *ainfo;
1438 memset (ccontext, 0, sizeof (CallContext));
1440 ccontext->stack_size = ALIGN_TO (cinfo->stack_usage, MONO_ARCH_FRAME_ALIGNMENT);
1441 if (ccontext->stack_size)
1442 ccontext->stack = (guint8*)g_calloc (1, ccontext->stack_size);
1444 if (sig->ret->type != MONO_TYPE_VOID) {
1445 ainfo = &cinfo->ret;
1446 if (ainfo->storage == ArgVtypeByRef) {
1447 storage = interp_cb->frame_arg_to_storage ((MonoInterpFrameHandle)frame, sig, -1);
1448 ccontext->gregs [cinfo->ret.reg] = (gsize)storage;
1452 g_assert (!sig->hasthis);
1454 for (int i = 0; i < sig->param_count; i++) {
1455 ainfo = &cinfo->args [i];
1457 if (ainfo->storage == ArgVtypeByRef) {
1458 ccontext->gregs [ainfo->reg] = (host_mgreg_t)interp_cb->frame_arg_to_storage ((MonoInterpFrameHandle)frame, sig, i);
1459 continue;
1462 int temp_size = arg_need_temp (ainfo);
1464 if (temp_size)
1465 storage = alloca (temp_size); // FIXME? alloca in a loop
1466 else
1467 storage = arg_get_storage (ccontext, ainfo);
1469 interp_cb->frame_arg_to_data ((MonoInterpFrameHandle)frame, sig, i, storage);
1470 if (temp_size)
1471 arg_set_val (ccontext, ainfo, storage);
1474 g_free (cinfo);
1477 /* Set return value in the ccontext (for n2i return) */
1478 void
1479 mono_arch_set_native_call_context_ret (CallContext *ccontext, gpointer frame, MonoMethodSignature *sig)
1481 const MonoEECallbacks *interp_cb;
1482 CallInfo *cinfo;
1483 gpointer storage;
1484 ArgInfo *ainfo;
1486 if (sig->ret->type == MONO_TYPE_VOID)
1487 return;
1489 interp_cb = mini_get_interp_callbacks ();
1490 cinfo = get_call_info (NULL, sig);
1491 ainfo = &cinfo->ret;
1493 if (ainfo->storage != ArgVtypeByRef) {
1494 int temp_size = arg_need_temp (ainfo);
1496 if (temp_size)
1497 storage = alloca (temp_size);
1498 else
1499 storage = arg_get_storage (ccontext, ainfo);
1500 memset (ccontext, 0, sizeof (CallContext)); // FIXME
1501 interp_cb->frame_arg_to_data ((MonoInterpFrameHandle)frame, sig, -1, storage);
1502 if (temp_size)
1503 arg_set_val (ccontext, ainfo, storage);
1506 g_free (cinfo);
1509 /* Gets the arguments from ccontext (for n2i entry) */
1510 void
1511 mono_arch_get_native_call_context_args (CallContext *ccontext, gpointer frame, MonoMethodSignature *sig)
1513 const MonoEECallbacks *interp_cb = mini_get_interp_callbacks ();
1514 CallInfo *cinfo = get_call_info (NULL, sig);
1515 gpointer storage;
1516 ArgInfo *ainfo;
1518 if (sig->ret->type != MONO_TYPE_VOID) {
1519 ainfo = &cinfo->ret;
1520 if (ainfo->storage == ArgVtypeByRef) {
1521 storage = (gpointer) ccontext->gregs [cinfo->ret.reg];
1522 interp_cb->frame_arg_set_storage ((MonoInterpFrameHandle)frame, sig, -1, storage);
1526 for (int i = 0; i < sig->param_count + sig->hasthis; i++) {
1527 ainfo = &cinfo->args [i];
1528 int temp_size = arg_need_temp (ainfo);
1530 if (temp_size) {
1531 storage = alloca (temp_size); // FIXME? alloca in a loop
1532 arg_get_val (ccontext, ainfo, storage);
1533 } else {
1534 storage = arg_get_storage (ccontext, ainfo);
1536 interp_cb->data_to_frame_arg ((MonoInterpFrameHandle)frame, sig, i, storage);
1539 g_free (cinfo);
1542 /* Gets the return value from ccontext (for i2n exit) */
1543 void
1544 mono_arch_get_native_call_context_ret (CallContext *ccontext, gpointer frame, MonoMethodSignature *sig)
1546 const MonoEECallbacks *interp_cb;
1547 CallInfo *cinfo;
1548 ArgInfo *ainfo;
1549 gpointer storage;
1551 if (sig->ret->type == MONO_TYPE_VOID)
1552 return;
1554 interp_cb = mini_get_interp_callbacks ();
1555 cinfo = get_call_info (NULL, sig);
1556 ainfo = &cinfo->ret;
1558 if (ainfo->storage != ArgVtypeByRef) {
1559 int temp_size = arg_need_temp (ainfo);
1561 if (temp_size) {
1562 storage = alloca (temp_size);
1563 arg_get_val (ccontext, ainfo, storage);
1564 } else {
1565 storage = arg_get_storage (ccontext, ainfo);
1567 interp_cb->data_to_frame_arg ((MonoInterpFrameHandle)frame, sig, -1, storage);
1570 g_free (cinfo);
1573 typedef struct {
1574 MonoMethodSignature *sig;
1575 CallInfo *cinfo;
1576 MonoType *rtype;
1577 MonoType **param_types;
1578 int n_fpargs, n_fpret, nullable_area;
1579 } ArchDynCallInfo;
1581 static gboolean
1582 dyn_call_supported (CallInfo *cinfo, MonoMethodSignature *sig)
1584 int i;
1586 // FIXME: Add more cases
1587 switch (cinfo->ret.storage) {
1588 case ArgNone:
1589 case ArgInIReg:
1590 case ArgInFReg:
1591 case ArgInFRegR4:
1592 case ArgVtypeByRef:
1593 break;
1594 case ArgVtypeInIRegs:
1595 if (cinfo->ret.nregs > 2)
1596 return FALSE;
1597 break;
1598 case ArgHFA:
1599 break;
1600 default:
1601 return FALSE;
1604 for (i = 0; i < cinfo->nargs; ++i) {
1605 ArgInfo *ainfo = &cinfo->args [i];
1607 switch (ainfo->storage) {
1608 case ArgInIReg:
1609 case ArgVtypeInIRegs:
1610 case ArgInFReg:
1611 case ArgInFRegR4:
1612 case ArgHFA:
1613 case ArgVtypeByRef:
1614 case ArgVtypeByRefOnStack:
1615 case ArgOnStack:
1616 case ArgVtypeOnStack:
1617 break;
1618 default:
1619 return FALSE;
1623 return TRUE;
1626 MonoDynCallInfo*
1627 mono_arch_dyn_call_prepare (MonoMethodSignature *sig)
1629 ArchDynCallInfo *info;
1630 CallInfo *cinfo;
1631 int i, aindex;
1633 cinfo = get_call_info (NULL, sig);
1635 if (!dyn_call_supported (cinfo, sig)) {
1636 g_free (cinfo);
1637 return NULL;
1640 info = g_new0 (ArchDynCallInfo, 1);
1641 // FIXME: Preprocess the info to speed up start_dyn_call ()
1642 info->sig = sig;
1643 info->cinfo = cinfo;
1644 info->rtype = mini_get_underlying_type (sig->ret);
1645 info->param_types = g_new0 (MonoType*, sig->param_count);
1646 for (i = 0; i < sig->param_count; ++i)
1647 info->param_types [i] = mini_get_underlying_type (sig->params [i]);
1649 switch (cinfo->ret.storage) {
1650 case ArgInFReg:
1651 case ArgInFRegR4:
1652 info->n_fpret = 1;
1653 break;
1654 case ArgHFA:
1655 info->n_fpret = cinfo->ret.nregs;
1656 break;
1657 default:
1658 break;
1661 for (aindex = 0; aindex < sig->param_count; aindex++) {
1662 MonoType *t = info->param_types [aindex];
1664 if (t->byref)
1665 continue;
1667 switch (t->type) {
1668 case MONO_TYPE_GENERICINST:
1669 if (t->type == MONO_TYPE_GENERICINST && mono_class_is_nullable (mono_class_from_mono_type_internal (t))) {
1670 MonoClass *klass = mono_class_from_mono_type_internal (t);
1671 int size;
1673 /* Nullables need a temporary buffer, its stored at the end of DynCallArgs.regs after the stack args */
1674 size = mono_class_value_size (klass, NULL);
1675 info->nullable_area += size;
1677 break;
1678 default:
1679 break;
1683 return (MonoDynCallInfo*)info;
1686 void
1687 mono_arch_dyn_call_free (MonoDynCallInfo *info)
1689 ArchDynCallInfo *ainfo = (ArchDynCallInfo*)info;
1691 g_free (ainfo->cinfo);
1692 g_free (ainfo->param_types);
1693 g_free (ainfo);
1697 mono_arch_dyn_call_get_buf_size (MonoDynCallInfo *info)
1699 ArchDynCallInfo *ainfo = (ArchDynCallInfo*)info;
1701 g_assert (ainfo->cinfo->stack_usage % MONO_ARCH_FRAME_ALIGNMENT == 0);
1702 return sizeof (DynCallArgs) + ainfo->cinfo->stack_usage + ainfo->nullable_area;
1705 static double
1706 bitcast_r4_to_r8 (float f)
1708 float *p = &f;
1710 return *(double*)p;
1713 static float
1714 bitcast_r8_to_r4 (double f)
1716 double *p = &f;
1718 return *(float*)p;
1721 void
1722 mono_arch_start_dyn_call (MonoDynCallInfo *info, gpointer **args, guint8 *ret, guint8 *buf)
1724 ArchDynCallInfo *dinfo = (ArchDynCallInfo*)info;
1725 DynCallArgs *p = (DynCallArgs*)buf;
1726 int aindex, arg_index, greg, i, pindex;
1727 MonoMethodSignature *sig = dinfo->sig;
1728 CallInfo *cinfo = dinfo->cinfo;
1729 int buffer_offset = 0;
1730 guint8 *nullable_buffer;
1732 p->res = 0;
1733 p->ret = ret;
1734 p->n_fpargs = dinfo->n_fpargs;
1735 p->n_fpret = dinfo->n_fpret;
1736 p->n_stackargs = cinfo->stack_usage / sizeof (host_mgreg_t);
1738 arg_index = 0;
1739 greg = 0;
1740 pindex = 0;
1742 /* Stored after the stack arguments */
1743 nullable_buffer = (guint8*)&(p->regs [PARAM_REGS + 1 + (cinfo->stack_usage / sizeof (host_mgreg_t))]);
1745 if (sig->hasthis)
1746 p->regs [greg ++] = (host_mgreg_t)*(args [arg_index ++]);
1748 if (cinfo->ret.storage == ArgVtypeByRef)
1749 p->regs [ARMREG_R8] = (host_mgreg_t)ret;
1751 for (aindex = pindex; aindex < sig->param_count; aindex++) {
1752 MonoType *t = dinfo->param_types [aindex];
1753 gpointer *arg = args [arg_index ++];
1754 ArgInfo *ainfo = &cinfo->args [aindex + sig->hasthis];
1755 int slot = -1;
1757 if (ainfo->storage == ArgOnStack || ainfo->storage == ArgVtypeOnStack || ainfo->storage == ArgVtypeByRefOnStack) {
1758 slot = PARAM_REGS + 1 + (ainfo->offset / sizeof (host_mgreg_t));
1759 } else {
1760 slot = ainfo->reg;
1763 if (t->byref) {
1764 p->regs [slot] = (host_mgreg_t)*arg;
1765 continue;
1768 if (ios_abi && ainfo->storage == ArgOnStack) {
1769 guint8 *stack_arg = (guint8*)&(p->regs [PARAM_REGS + 1]) + ainfo->offset;
1770 gboolean handled = TRUE;
1772 /* Special case arguments smaller than 1 machine word */
1773 switch (t->type) {
1774 case MONO_TYPE_U1:
1775 *(guint8*)stack_arg = *(guint8*)arg;
1776 break;
1777 case MONO_TYPE_I1:
1778 *(gint8*)stack_arg = *(gint8*)arg;
1779 break;
1780 case MONO_TYPE_U2:
1781 *(guint16*)stack_arg = *(guint16*)arg;
1782 break;
1783 case MONO_TYPE_I2:
1784 *(gint16*)stack_arg = *(gint16*)arg;
1785 break;
1786 case MONO_TYPE_I4:
1787 *(gint32*)stack_arg = *(gint32*)arg;
1788 break;
1789 case MONO_TYPE_U4:
1790 *(guint32*)stack_arg = *(guint32*)arg;
1791 break;
1792 default:
1793 handled = FALSE;
1794 break;
1796 if (handled)
1797 continue;
1800 switch (t->type) {
1801 case MONO_TYPE_OBJECT:
1802 case MONO_TYPE_PTR:
1803 case MONO_TYPE_I:
1804 case MONO_TYPE_U:
1805 case MONO_TYPE_I8:
1806 case MONO_TYPE_U8:
1807 p->regs [slot] = (host_mgreg_t)*arg;
1808 break;
1809 case MONO_TYPE_U1:
1810 p->regs [slot] = *(guint8*)arg;
1811 break;
1812 case MONO_TYPE_I1:
1813 p->regs [slot] = *(gint8*)arg;
1814 break;
1815 case MONO_TYPE_I2:
1816 p->regs [slot] = *(gint16*)arg;
1817 break;
1818 case MONO_TYPE_U2:
1819 p->regs [slot] = *(guint16*)arg;
1820 break;
1821 case MONO_TYPE_I4:
1822 p->regs [slot] = *(gint32*)arg;
1823 break;
1824 case MONO_TYPE_U4:
1825 p->regs [slot] = *(guint32*)arg;
1826 break;
1827 case MONO_TYPE_R4:
1828 p->fpregs [ainfo->reg] = bitcast_r4_to_r8 (*(float*)arg);
1829 p->n_fpargs ++;
1830 break;
1831 case MONO_TYPE_R8:
1832 p->fpregs [ainfo->reg] = *(double*)arg;
1833 p->n_fpargs ++;
1834 break;
1835 case MONO_TYPE_GENERICINST:
1836 if (MONO_TYPE_IS_REFERENCE (t)) {
1837 p->regs [slot] = (host_mgreg_t)*arg;
1838 break;
1839 } else {
1840 if (t->type == MONO_TYPE_GENERICINST && mono_class_is_nullable (mono_class_from_mono_type_internal (t))) {
1841 MonoClass *klass = mono_class_from_mono_type_internal (t);
1842 guint8 *nullable_buf;
1843 int size;
1846 * Use p->buffer as a temporary buffer since the data needs to be available after this call
1847 * if the nullable param is passed by ref.
1849 size = mono_class_value_size (klass, NULL);
1850 nullable_buf = nullable_buffer + buffer_offset;
1851 buffer_offset += size;
1852 g_assert (buffer_offset <= dinfo->nullable_area);
1854 /* The argument pointed to by arg is either a boxed vtype or null */
1855 mono_nullable_init (nullable_buf, (MonoObject*)arg, klass);
1857 arg = (gpointer*)nullable_buf;
1858 /* Fall though */
1859 } else {
1860 /* Fall though */
1863 case MONO_TYPE_VALUETYPE:
1864 switch (ainfo->storage) {
1865 case ArgVtypeInIRegs:
1866 for (i = 0; i < ainfo->nregs; ++i)
1867 p->regs [slot ++] = ((host_mgreg_t*)arg) [i];
1868 break;
1869 case ArgHFA:
1870 if (ainfo->esize == 4) {
1871 for (i = 0; i < ainfo->nregs; ++i)
1872 p->fpregs [ainfo->reg + i] = bitcast_r4_to_r8 (((float*)arg) [ainfo->foffsets [i] / 4]);
1873 } else {
1874 for (i = 0; i < ainfo->nregs; ++i)
1875 p->fpregs [ainfo->reg + i] = ((double*)arg) [ainfo->foffsets [i] / 8];
1877 p->n_fpargs += ainfo->nregs;
1878 break;
1879 case ArgVtypeByRef:
1880 case ArgVtypeByRefOnStack:
1881 p->regs [slot] = (host_mgreg_t)arg;
1882 break;
1883 case ArgVtypeOnStack:
1884 for (i = 0; i < ainfo->size / 8; ++i)
1885 p->regs [slot ++] = ((host_mgreg_t*)arg) [i];
1886 break;
1887 default:
1888 g_assert_not_reached ();
1889 break;
1891 break;
1892 default:
1893 g_assert_not_reached ();
1898 void
1899 mono_arch_finish_dyn_call (MonoDynCallInfo *info, guint8 *buf)
1901 ArchDynCallInfo *ainfo = (ArchDynCallInfo*)info;
1902 CallInfo *cinfo = ainfo->cinfo;
1903 DynCallArgs *args = (DynCallArgs*)buf;
1904 MonoType *ptype = ainfo->rtype;
1905 guint8 *ret = args->ret;
1906 host_mgreg_t res = args->res;
1907 host_mgreg_t res2 = args->res2;
1908 int i;
1910 if (cinfo->ret.storage == ArgVtypeByRef)
1911 return;
1913 switch (ptype->type) {
1914 case MONO_TYPE_VOID:
1915 *(gpointer*)ret = NULL;
1916 break;
1917 case MONO_TYPE_OBJECT:
1918 case MONO_TYPE_I:
1919 case MONO_TYPE_U:
1920 case MONO_TYPE_PTR:
1921 *(gpointer*)ret = (gpointer)res;
1922 break;
1923 case MONO_TYPE_I1:
1924 *(gint8*)ret = res;
1925 break;
1926 case MONO_TYPE_U1:
1927 *(guint8*)ret = res;
1928 break;
1929 case MONO_TYPE_I2:
1930 *(gint16*)ret = res;
1931 break;
1932 case MONO_TYPE_U2:
1933 *(guint16*)ret = res;
1934 break;
1935 case MONO_TYPE_I4:
1936 *(gint32*)ret = res;
1937 break;
1938 case MONO_TYPE_U4:
1939 *(guint32*)ret = res;
1940 break;
1941 case MONO_TYPE_I8:
1942 case MONO_TYPE_U8:
1943 *(guint64*)ret = res;
1944 break;
1945 case MONO_TYPE_R4:
1946 *(float*)ret = bitcast_r8_to_r4 (args->fpregs [0]);
1947 break;
1948 case MONO_TYPE_R8:
1949 *(double*)ret = args->fpregs [0];
1950 break;
1951 case MONO_TYPE_GENERICINST:
1952 if (MONO_TYPE_IS_REFERENCE (ptype)) {
1953 *(gpointer*)ret = (gpointer)res;
1954 break;
1955 } else {
1956 /* Fall though */
1958 case MONO_TYPE_VALUETYPE:
1959 switch (ainfo->cinfo->ret.storage) {
1960 case ArgVtypeInIRegs:
1961 *(host_mgreg_t*)ret = res;
1962 if (ainfo->cinfo->ret.nregs > 1)
1963 ((host_mgreg_t*)ret) [1] = res2;
1964 break;
1965 case ArgHFA:
1966 /* Use the same area for returning fp values */
1967 if (cinfo->ret.esize == 4) {
1968 for (i = 0; i < cinfo->ret.nregs; ++i)
1969 ((float*)ret) [cinfo->ret.foffsets [i] / 4] = bitcast_r8_to_r4 (args->fpregs [i]);
1970 } else {
1971 for (i = 0; i < cinfo->ret.nregs; ++i)
1972 ((double*)ret) [cinfo->ret.foffsets [i] / 8] = args->fpregs [i];
1974 break;
1975 default:
1976 g_assert_not_reached ();
1977 break;
1979 break;
1980 default:
1981 g_assert_not_reached ();
1985 #if __APPLE__
1986 G_BEGIN_DECLS
1987 void sys_icache_invalidate (void *start, size_t len);
1988 G_END_DECLS
1989 #endif
1991 void
1992 mono_arch_flush_icache (guint8 *code, gint size)
1994 #ifndef MONO_CROSS_COMPILE
1995 #if __APPLE__
1996 sys_icache_invalidate (code, size);
1997 #else
1998 /* Don't rely on GCC's __clear_cache implementation, as it caches
1999 * icache/dcache cache line sizes, that can vary between cores on
2000 * big.LITTLE architectures. */
2001 guint64 end = (guint64) (code + size);
2002 guint64 addr;
2003 /* always go with cacheline size of 4 bytes as this code isn't perf critical
2004 * anyway. Reading the cache line size from a machine register can be racy
2005 * on a big.LITTLE architecture if the cores don't have the same cache line
2006 * sizes. */
2007 const size_t icache_line_size = 4;
2008 const size_t dcache_line_size = 4;
2010 addr = (guint64) code & ~(guint64) (dcache_line_size - 1);
2011 for (; addr < end; addr += dcache_line_size)
2012 asm volatile("dc civac, %0" : : "r" (addr) : "memory");
2013 asm volatile("dsb ish" : : : "memory");
2015 addr = (guint64) code & ~(guint64) (icache_line_size - 1);
2016 for (; addr < end; addr += icache_line_size)
2017 asm volatile("ic ivau, %0" : : "r" (addr) : "memory");
2019 asm volatile ("dsb ish" : : : "memory");
2020 asm volatile ("isb" : : : "memory");
2021 #endif
2022 #endif
2025 #ifndef DISABLE_JIT
2027 gboolean
2028 mono_arch_opcode_needs_emulation (MonoCompile *cfg, int opcode)
2030 NOT_IMPLEMENTED;
2031 return FALSE;
2034 GList *
2035 mono_arch_get_allocatable_int_vars (MonoCompile *cfg)
2037 GList *vars = NULL;
2038 int i;
2040 for (i = 0; i < cfg->num_varinfo; i++) {
2041 MonoInst *ins = cfg->varinfo [i];
2042 MonoMethodVar *vmv = MONO_VARINFO (cfg, i);
2044 /* unused vars */
2045 if (vmv->range.first_use.abs_pos >= vmv->range.last_use.abs_pos)
2046 continue;
2048 if ((ins->flags & (MONO_INST_IS_DEAD|MONO_INST_VOLATILE|MONO_INST_INDIRECT)) ||
2049 (ins->opcode != OP_LOCAL && ins->opcode != OP_ARG))
2050 continue;
2052 if (mono_is_regsize_var (ins->inst_vtype)) {
2053 g_assert (MONO_VARINFO (cfg, i)->reg == -1);
2054 g_assert (i == vmv->idx);
2055 vars = g_list_prepend (vars, vmv);
2059 vars = mono_varlist_sort (cfg, vars, 0);
2061 return vars;
2064 GList *
2065 mono_arch_get_global_int_regs (MonoCompile *cfg)
2067 GList *regs = NULL;
2068 int i;
2070 /* r28 is reserved for cfg->arch.args_reg */
2071 /* r27 is reserved for the imt argument */
2072 for (i = ARMREG_R19; i <= ARMREG_R26; ++i)
2073 regs = g_list_prepend (regs, GUINT_TO_POINTER (i));
2075 return regs;
2078 guint32
2079 mono_arch_regalloc_cost (MonoCompile *cfg, MonoMethodVar *vmv)
2081 MonoInst *ins = cfg->varinfo [vmv->idx];
2083 if (ins->opcode == OP_ARG)
2084 return 1;
2085 else
2086 return 2;
2089 void
2090 mono_arch_create_vars (MonoCompile *cfg)
2092 MonoMethodSignature *sig;
2093 CallInfo *cinfo;
2095 sig = mono_method_signature_internal (cfg->method);
2096 if (!cfg->arch.cinfo)
2097 cfg->arch.cinfo = get_call_info (cfg->mempool, sig);
2098 cinfo = cfg->arch.cinfo;
2100 if (cinfo->ret.storage == ArgVtypeByRef) {
2101 cfg->vret_addr = mono_compile_create_var (cfg, mono_get_int_type (), OP_LOCAL);
2102 cfg->vret_addr->flags |= MONO_INST_VOLATILE;
2105 if (cfg->gen_sdb_seq_points) {
2106 MonoInst *ins;
2108 if (cfg->compile_aot) {
2109 ins = mono_compile_create_var (cfg, mono_get_int_type (), OP_LOCAL);
2110 ins->flags |= MONO_INST_VOLATILE;
2111 cfg->arch.seq_point_info_var = ins;
2114 ins = mono_compile_create_var (cfg, mono_get_int_type (), OP_LOCAL);
2115 ins->flags |= MONO_INST_VOLATILE;
2116 cfg->arch.ss_tramp_var = ins;
2118 ins = mono_compile_create_var (cfg, mono_get_int_type (), OP_LOCAL);
2119 ins->flags |= MONO_INST_VOLATILE;
2120 cfg->arch.bp_tramp_var = ins;
2123 if (cfg->method->save_lmf) {
2124 cfg->create_lmf_var = TRUE;
2125 cfg->lmf_ir = TRUE;
2129 void
2130 mono_arch_allocate_vars (MonoCompile *cfg)
2132 MonoMethodSignature *sig;
2133 MonoInst *ins;
2134 CallInfo *cinfo;
2135 ArgInfo *ainfo;
2136 int i, offset, size, align;
2137 guint32 locals_stack_size, locals_stack_align;
2138 gint32 *offsets;
2141 * Allocate arguments and locals to either register (OP_REGVAR) or to a stack slot (OP_REGOFFSET).
2142 * Compute cfg->stack_offset and update cfg->used_int_regs.
2145 sig = mono_method_signature_internal (cfg->method);
2147 if (!cfg->arch.cinfo)
2148 cfg->arch.cinfo = get_call_info (cfg->mempool, sig);
2149 cinfo = cfg->arch.cinfo;
2152 * The ARM64 ABI always uses a frame pointer.
2153 * The instruction set prefers positive offsets, so fp points to the bottom of the
2154 * frame, and stack slots are at positive offsets.
2155 * If some arguments are received on the stack, their offsets relative to fp can
2156 * not be computed right now because the stack frame might grow due to spilling
2157 * done by the local register allocator. To solve this, we reserve a register
2158 * which points to them.
2159 * The stack frame looks like this:
2160 * args_reg -> <bottom of parent frame>
2161 * <locals etc>
2162 * fp -> <saved fp+lr>
2163 * sp -> <localloc/params area>
2165 cfg->frame_reg = ARMREG_FP;
2166 cfg->flags |= MONO_CFG_HAS_SPILLUP;
2167 offset = 0;
2169 /* Saved fp+lr */
2170 offset += 16;
2172 if (cinfo->stack_usage) {
2173 g_assert (!(cfg->used_int_regs & (1 << ARMREG_R28)));
2174 cfg->arch.args_reg = ARMREG_R28;
2175 cfg->used_int_regs |= 1 << ARMREG_R28;
2178 if (cfg->method->save_lmf) {
2179 /* The LMF var is allocated normally */
2180 } else {
2181 /* Callee saved regs */
2182 cfg->arch.saved_gregs_offset = offset;
2183 for (i = 0; i < 32; ++i)
2184 if ((MONO_ARCH_CALLEE_SAVED_REGS & (1 << i)) && (cfg->used_int_regs & (1 << i)))
2185 offset += 8;
2188 /* Return value */
2189 switch (cinfo->ret.storage) {
2190 case ArgNone:
2191 break;
2192 case ArgInIReg:
2193 case ArgInFReg:
2194 case ArgInFRegR4:
2195 cfg->ret->opcode = OP_REGVAR;
2196 cfg->ret->dreg = cinfo->ret.reg;
2197 break;
2198 case ArgVtypeInIRegs:
2199 case ArgHFA:
2200 /* Allocate a local to hold the result, the epilog will copy it to the correct place */
2201 cfg->ret->opcode = OP_REGOFFSET;
2202 cfg->ret->inst_basereg = cfg->frame_reg;
2203 cfg->ret->inst_offset = offset;
2204 if (cinfo->ret.storage == ArgHFA)
2205 // FIXME:
2206 offset += 64;
2207 else
2208 offset += 16;
2209 break;
2210 case ArgVtypeByRef:
2211 /* This variable will be initalized in the prolog from R8 */
2212 cfg->vret_addr->opcode = OP_REGOFFSET;
2213 cfg->vret_addr->inst_basereg = cfg->frame_reg;
2214 cfg->vret_addr->inst_offset = offset;
2215 offset += 8;
2216 if (G_UNLIKELY (cfg->verbose_level > 1)) {
2217 printf ("vret_addr =");
2218 mono_print_ins (cfg->vret_addr);
2220 break;
2221 default:
2222 g_assert_not_reached ();
2223 break;
2226 /* Arguments */
2227 for (i = 0; i < sig->param_count + sig->hasthis; ++i) {
2228 ainfo = cinfo->args + i;
2230 ins = cfg->args [i];
2231 if (ins->opcode == OP_REGVAR)
2232 continue;
2234 ins->opcode = OP_REGOFFSET;
2235 ins->inst_basereg = cfg->frame_reg;
2237 switch (ainfo->storage) {
2238 case ArgInIReg:
2239 case ArgInFReg:
2240 case ArgInFRegR4:
2241 // FIXME: Use nregs/size
2242 /* These will be copied to the stack in the prolog */
2243 ins->inst_offset = offset;
2244 offset += 8;
2245 break;
2246 case ArgOnStack:
2247 case ArgOnStackR4:
2248 case ArgOnStackR8:
2249 case ArgVtypeOnStack:
2250 /* These are in the parent frame */
2251 g_assert (cfg->arch.args_reg);
2252 ins->inst_basereg = cfg->arch.args_reg;
2253 ins->inst_offset = ainfo->offset;
2254 break;
2255 case ArgVtypeInIRegs:
2256 case ArgHFA:
2257 ins->opcode = OP_REGOFFSET;
2258 ins->inst_basereg = cfg->frame_reg;
2259 /* These arguments are saved to the stack in the prolog */
2260 ins->inst_offset = offset;
2261 if (cfg->verbose_level >= 2)
2262 printf ("arg %d allocated to %s+0x%0x.\n", i, mono_arch_regname (ins->inst_basereg), (int)ins->inst_offset);
2263 if (ainfo->storage == ArgHFA)
2264 // FIXME:
2265 offset += 64;
2266 else
2267 offset += 16;
2268 break;
2269 case ArgVtypeByRefOnStack: {
2270 MonoInst *vtaddr;
2272 if (ainfo->gsharedvt) {
2273 ins->opcode = OP_REGOFFSET;
2274 ins->inst_basereg = cfg->arch.args_reg;
2275 ins->inst_offset = ainfo->offset;
2276 break;
2279 /* The vtype address is in the parent frame */
2280 g_assert (cfg->arch.args_reg);
2281 MONO_INST_NEW (cfg, vtaddr, 0);
2282 vtaddr->opcode = OP_REGOFFSET;
2283 vtaddr->inst_basereg = cfg->arch.args_reg;
2284 vtaddr->inst_offset = ainfo->offset;
2286 /* Need an indirection */
2287 ins->opcode = OP_VTARG_ADDR;
2288 ins->inst_left = vtaddr;
2289 break;
2291 case ArgVtypeByRef: {
2292 MonoInst *vtaddr;
2294 if (ainfo->gsharedvt) {
2295 ins->opcode = OP_REGOFFSET;
2296 ins->inst_basereg = cfg->frame_reg;
2297 ins->inst_offset = offset;
2298 offset += 8;
2299 break;
2302 /* The vtype address is in a register, will be copied to the stack in the prolog */
2303 MONO_INST_NEW (cfg, vtaddr, 0);
2304 vtaddr->opcode = OP_REGOFFSET;
2305 vtaddr->inst_basereg = cfg->frame_reg;
2306 vtaddr->inst_offset = offset;
2307 offset += 8;
2309 /* Need an indirection */
2310 ins->opcode = OP_VTARG_ADDR;
2311 ins->inst_left = vtaddr;
2312 break;
2314 default:
2315 g_assert_not_reached ();
2316 break;
2320 /* Allocate these first so they have a small offset, OP_SEQ_POINT depends on this */
2321 // FIXME: Allocate these to registers
2322 ins = cfg->arch.seq_point_info_var;
2323 if (ins) {
2324 size = 8;
2325 align = 8;
2326 offset += align - 1;
2327 offset &= ~(align - 1);
2328 ins->opcode = OP_REGOFFSET;
2329 ins->inst_basereg = cfg->frame_reg;
2330 ins->inst_offset = offset;
2331 offset += size;
2333 ins = cfg->arch.ss_tramp_var;
2334 if (ins) {
2335 size = 8;
2336 align = 8;
2337 offset += align - 1;
2338 offset &= ~(align - 1);
2339 ins->opcode = OP_REGOFFSET;
2340 ins->inst_basereg = cfg->frame_reg;
2341 ins->inst_offset = offset;
2342 offset += size;
2344 ins = cfg->arch.bp_tramp_var;
2345 if (ins) {
2346 size = 8;
2347 align = 8;
2348 offset += align - 1;
2349 offset &= ~(align - 1);
2350 ins->opcode = OP_REGOFFSET;
2351 ins->inst_basereg = cfg->frame_reg;
2352 ins->inst_offset = offset;
2353 offset += size;
2356 /* Locals */
2357 offsets = mono_allocate_stack_slots (cfg, FALSE, &locals_stack_size, &locals_stack_align);
2358 if (locals_stack_align)
2359 offset = ALIGN_TO (offset, locals_stack_align);
2361 for (i = cfg->locals_start; i < cfg->num_varinfo; i++) {
2362 if (offsets [i] != -1) {
2363 ins = cfg->varinfo [i];
2364 ins->opcode = OP_REGOFFSET;
2365 ins->inst_basereg = cfg->frame_reg;
2366 ins->inst_offset = offset + offsets [i];
2367 //printf ("allocated local %d to ", i); mono_print_tree_nl (ins);
2370 offset += locals_stack_size;
2372 offset = ALIGN_TO (offset, MONO_ARCH_FRAME_ALIGNMENT);
2374 cfg->stack_offset = offset;
2377 #ifdef ENABLE_LLVM
2378 LLVMCallInfo*
2379 mono_arch_get_llvm_call_info (MonoCompile *cfg, MonoMethodSignature *sig)
2381 int i, n;
2382 CallInfo *cinfo;
2383 ArgInfo *ainfo;
2384 LLVMCallInfo *linfo;
2386 n = sig->param_count + sig->hasthis;
2388 cinfo = get_call_info (cfg->mempool, sig);
2390 linfo = mono_mempool_alloc0 (cfg->mempool, sizeof (LLVMCallInfo) + (sizeof (LLVMArgInfo) * n));
2392 switch (cinfo->ret.storage) {
2393 case ArgInIReg:
2394 case ArgInFReg:
2395 case ArgInFRegR4:
2396 case ArgNone:
2397 break;
2398 case ArgVtypeByRef:
2399 linfo->ret.storage = LLVMArgVtypeByRef;
2400 break;
2402 // FIXME: This doesn't work yet since the llvm backend represents these types as an i8
2403 // array which is returned in int regs
2405 case ArgHFA:
2406 linfo->ret.storage = LLVMArgFpStruct;
2407 linfo->ret.nslots = cinfo->ret.nregs;
2408 linfo->ret.esize = cinfo->ret.esize;
2409 break;
2410 case ArgVtypeInIRegs:
2411 /* LLVM models this by returning an int */
2412 linfo->ret.storage = LLVMArgVtypeAsScalar;
2413 linfo->ret.nslots = cinfo->ret.nregs;
2414 linfo->ret.esize = cinfo->ret.esize;
2415 break;
2416 default:
2417 g_assert_not_reached ();
2418 break;
2421 for (i = 0; i < n; ++i) {
2422 LLVMArgInfo *lainfo = &linfo->args [i];
2424 ainfo = cinfo->args + i;
2426 lainfo->storage = LLVMArgNone;
2428 switch (ainfo->storage) {
2429 case ArgInIReg:
2430 case ArgInFReg:
2431 case ArgInFRegR4:
2432 case ArgOnStack:
2433 case ArgOnStackR4:
2434 case ArgOnStackR8:
2435 lainfo->storage = LLVMArgNormal;
2436 break;
2437 case ArgVtypeByRef:
2438 case ArgVtypeByRefOnStack:
2439 lainfo->storage = LLVMArgVtypeByRef;
2440 break;
2441 case ArgHFA: {
2442 int j;
2444 lainfo->storage = LLVMArgAsFpArgs;
2445 lainfo->nslots = ainfo->nregs;
2446 lainfo->esize = ainfo->esize;
2447 for (j = 0; j < ainfo->nregs; ++j)
2448 lainfo->pair_storage [j] = LLVMArgInFPReg;
2449 break;
2451 case ArgVtypeInIRegs:
2452 lainfo->storage = LLVMArgAsIArgs;
2453 lainfo->nslots = ainfo->nregs;
2454 break;
2455 case ArgVtypeOnStack:
2456 if (ainfo->hfa) {
2457 int j;
2458 /* Same as above */
2459 lainfo->storage = LLVMArgAsFpArgs;
2460 lainfo->nslots = ainfo->nregs;
2461 lainfo->esize = ainfo->esize;
2462 lainfo->ndummy_fpargs = ainfo->nfregs_to_skip;
2463 for (j = 0; j < ainfo->nregs; ++j)
2464 lainfo->pair_storage [j] = LLVMArgInFPReg;
2465 } else {
2466 lainfo->storage = LLVMArgAsIArgs;
2467 lainfo->nslots = ainfo->size / 8;
2469 break;
2470 default:
2471 g_assert_not_reached ();
2472 break;
2476 return linfo;
2478 #endif
2480 static void
2481 add_outarg_reg (MonoCompile *cfg, MonoCallInst *call, ArgStorage storage, int reg, MonoInst *arg)
2483 MonoInst *ins;
2485 switch (storage) {
2486 case ArgInIReg:
2487 MONO_INST_NEW (cfg, ins, OP_MOVE);
2488 ins->dreg = mono_alloc_ireg_copy (cfg, arg->dreg);
2489 ins->sreg1 = arg->dreg;
2490 MONO_ADD_INS (cfg->cbb, ins);
2491 mono_call_inst_add_outarg_reg (cfg, call, ins->dreg, reg, FALSE);
2492 break;
2493 case ArgInFReg:
2494 MONO_INST_NEW (cfg, ins, OP_FMOVE);
2495 ins->dreg = mono_alloc_freg (cfg);
2496 ins->sreg1 = arg->dreg;
2497 MONO_ADD_INS (cfg->cbb, ins);
2498 mono_call_inst_add_outarg_reg (cfg, call, ins->dreg, reg, TRUE);
2499 break;
2500 case ArgInFRegR4:
2501 if (COMPILE_LLVM (cfg))
2502 MONO_INST_NEW (cfg, ins, OP_FMOVE);
2503 else if (cfg->r4fp)
2504 MONO_INST_NEW (cfg, ins, OP_RMOVE);
2505 else
2506 MONO_INST_NEW (cfg, ins, OP_ARM_SETFREG_R4);
2507 ins->dreg = mono_alloc_freg (cfg);
2508 ins->sreg1 = arg->dreg;
2509 MONO_ADD_INS (cfg->cbb, ins);
2510 mono_call_inst_add_outarg_reg (cfg, call, ins->dreg, reg, TRUE);
2511 break;
2512 default:
2513 g_assert_not_reached ();
2514 break;
2518 static void
2519 emit_sig_cookie (MonoCompile *cfg, MonoCallInst *call, CallInfo *cinfo)
2521 MonoMethodSignature *tmp_sig;
2522 int sig_reg;
2524 if (MONO_IS_TAILCALL_OPCODE (call))
2525 NOT_IMPLEMENTED;
2527 g_assert (cinfo->sig_cookie.storage == ArgOnStack);
2530 * mono_ArgIterator_Setup assumes the signature cookie is
2531 * passed first and all the arguments which were before it are
2532 * passed on the stack after the signature. So compensate by
2533 * passing a different signature.
2535 tmp_sig = mono_metadata_signature_dup (call->signature);
2536 tmp_sig->param_count -= call->signature->sentinelpos;
2537 tmp_sig->sentinelpos = 0;
2538 memcpy (tmp_sig->params, call->signature->params + call->signature->sentinelpos, tmp_sig->param_count * sizeof (MonoType*));
2540 sig_reg = mono_alloc_ireg (cfg);
2541 MONO_EMIT_NEW_SIGNATURECONST (cfg, sig_reg, tmp_sig);
2543 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORE_MEMBASE_REG, ARMREG_SP, cinfo->sig_cookie.offset, sig_reg);
2546 void
2547 mono_arch_emit_call (MonoCompile *cfg, MonoCallInst *call)
2549 MonoMethodSignature *sig;
2550 MonoInst *arg, *vtarg;
2551 CallInfo *cinfo;
2552 ArgInfo *ainfo;
2553 int i;
2555 sig = call->signature;
2557 cinfo = get_call_info (cfg->mempool, sig);
2559 switch (cinfo->ret.storage) {
2560 case ArgVtypeInIRegs:
2561 case ArgHFA:
2562 if (MONO_IS_TAILCALL_OPCODE (call))
2563 break;
2565 * The vtype is returned in registers, save the return area address in a local, and save the vtype into
2566 * the location pointed to by it after call in emit_move_return_value ().
2568 if (!cfg->arch.vret_addr_loc) {
2569 cfg->arch.vret_addr_loc = mono_compile_create_var (cfg, mono_get_int_type (), OP_LOCAL);
2570 /* Prevent it from being register allocated or optimized away */
2571 cfg->arch.vret_addr_loc->flags |= MONO_INST_VOLATILE;
2574 MONO_EMIT_NEW_UNALU (cfg, OP_MOVE, cfg->arch.vret_addr_loc->dreg, call->vret_var->dreg);
2575 break;
2576 case ArgVtypeByRef:
2577 /* Pass the vtype return address in R8 */
2578 g_assert (!MONO_IS_TAILCALL_OPCODE (call) || call->vret_var == cfg->vret_addr);
2579 MONO_INST_NEW (cfg, vtarg, OP_MOVE);
2580 vtarg->sreg1 = call->vret_var->dreg;
2581 vtarg->dreg = mono_alloc_preg (cfg);
2582 MONO_ADD_INS (cfg->cbb, vtarg);
2584 mono_call_inst_add_outarg_reg (cfg, call, vtarg->dreg, cinfo->ret.reg, FALSE);
2585 break;
2586 default:
2587 break;
2590 for (i = 0; i < cinfo->nargs; ++i) {
2591 ainfo = cinfo->args + i;
2592 arg = call->args [i];
2594 if ((sig->call_convention == MONO_CALL_VARARG) && (i == sig->sentinelpos)) {
2595 /* Emit the signature cookie just before the implicit arguments */
2596 emit_sig_cookie (cfg, call, cinfo);
2599 switch (ainfo->storage) {
2600 case ArgInIReg:
2601 case ArgInFReg:
2602 case ArgInFRegR4:
2603 add_outarg_reg (cfg, call, ainfo->storage, ainfo->reg, arg);
2604 break;
2605 case ArgOnStack:
2606 switch (ainfo->slot_size) {
2607 case 8:
2608 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORE_MEMBASE_REG, ARMREG_SP, ainfo->offset, arg->dreg);
2609 break;
2610 case 4:
2611 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI4_MEMBASE_REG, ARMREG_SP, ainfo->offset, arg->dreg);
2612 break;
2613 case 2:
2614 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI2_MEMBASE_REG, ARMREG_SP, ainfo->offset, arg->dreg);
2615 break;
2616 case 1:
2617 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI1_MEMBASE_REG, ARMREG_SP, ainfo->offset, arg->dreg);
2618 break;
2619 default:
2620 g_assert_not_reached ();
2621 break;
2623 break;
2624 case ArgOnStackR8:
2625 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORER8_MEMBASE_REG, ARMREG_SP, ainfo->offset, arg->dreg);
2626 break;
2627 case ArgOnStackR4:
2628 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORER4_MEMBASE_REG, ARMREG_SP, ainfo->offset, arg->dreg);
2629 break;
2630 case ArgVtypeInIRegs:
2631 case ArgVtypeByRef:
2632 case ArgVtypeByRefOnStack:
2633 case ArgVtypeOnStack:
2634 case ArgHFA: {
2635 MonoInst *ins;
2636 guint32 align;
2637 guint32 size;
2639 size = mono_class_value_size (arg->klass, &align);
2641 MONO_INST_NEW (cfg, ins, OP_OUTARG_VT);
2642 ins->sreg1 = arg->dreg;
2643 ins->klass = arg->klass;
2644 ins->backend.size = size;
2645 ins->inst_p0 = call;
2646 ins->inst_p1 = mono_mempool_alloc (cfg->mempool, sizeof (ArgInfo));
2647 memcpy (ins->inst_p1, ainfo, sizeof (ArgInfo));
2648 MONO_ADD_INS (cfg->cbb, ins);
2649 break;
2651 default:
2652 g_assert_not_reached ();
2653 break;
2657 /* Handle the case where there are no implicit arguments */
2658 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG) && (cinfo->nargs == sig->sentinelpos))
2659 emit_sig_cookie (cfg, call, cinfo);
2661 call->call_info = cinfo;
2662 call->stack_usage = cinfo->stack_usage;
2665 void
2666 mono_arch_emit_outarg_vt (MonoCompile *cfg, MonoInst *ins, MonoInst *src)
2668 MonoCallInst *call = (MonoCallInst*)ins->inst_p0;
2669 ArgInfo *ainfo = (ArgInfo*)ins->inst_p1;
2670 MonoInst *load;
2671 int i;
2673 if (ins->backend.size == 0 && !ainfo->gsharedvt)
2674 return;
2676 switch (ainfo->storage) {
2677 case ArgVtypeInIRegs:
2678 for (i = 0; i < ainfo->nregs; ++i) {
2679 // FIXME: Smaller sizes
2680 MONO_INST_NEW (cfg, load, OP_LOADI8_MEMBASE);
2681 load->dreg = mono_alloc_ireg (cfg);
2682 load->inst_basereg = src->dreg;
2683 load->inst_offset = i * sizeof (target_mgreg_t);
2684 MONO_ADD_INS (cfg->cbb, load);
2685 add_outarg_reg (cfg, call, ArgInIReg, ainfo->reg + i, load);
2687 break;
2688 case ArgHFA:
2689 for (i = 0; i < ainfo->nregs; ++i) {
2690 if (ainfo->esize == 4)
2691 MONO_INST_NEW (cfg, load, OP_LOADR4_MEMBASE);
2692 else
2693 MONO_INST_NEW (cfg, load, OP_LOADR8_MEMBASE);
2694 load->dreg = mono_alloc_freg (cfg);
2695 load->inst_basereg = src->dreg;
2696 load->inst_offset = ainfo->foffsets [i];
2697 MONO_ADD_INS (cfg->cbb, load);
2698 add_outarg_reg (cfg, call, ainfo->esize == 4 ? ArgInFRegR4 : ArgInFReg, ainfo->reg + i, load);
2700 break;
2701 case ArgVtypeByRef:
2702 case ArgVtypeByRefOnStack: {
2703 MonoInst *vtaddr, *load, *arg;
2705 /* Pass the vtype address in a reg/on the stack */
2706 if (ainfo->gsharedvt) {
2707 load = src;
2708 } else {
2709 /* Make a copy of the argument */
2710 vtaddr = mono_compile_create_var (cfg, m_class_get_byval_arg (ins->klass), OP_LOCAL);
2712 MONO_INST_NEW (cfg, load, OP_LDADDR);
2713 load->inst_p0 = vtaddr;
2714 vtaddr->flags |= MONO_INST_INDIRECT;
2715 load->type = STACK_MP;
2716 load->klass = vtaddr->klass;
2717 load->dreg = mono_alloc_ireg (cfg);
2718 MONO_ADD_INS (cfg->cbb, load);
2719 mini_emit_memcpy (cfg, load->dreg, 0, src->dreg, 0, ainfo->size, 8);
2722 if (ainfo->storage == ArgVtypeByRef) {
2723 MONO_INST_NEW (cfg, arg, OP_MOVE);
2724 arg->dreg = mono_alloc_preg (cfg);
2725 arg->sreg1 = load->dreg;
2726 MONO_ADD_INS (cfg->cbb, arg);
2727 add_outarg_reg (cfg, call, ArgInIReg, ainfo->reg, arg);
2728 } else {
2729 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORE_MEMBASE_REG, ARMREG_SP, ainfo->offset, load->dreg);
2731 break;
2733 case ArgVtypeOnStack:
2734 for (i = 0; i < ainfo->size / 8; ++i) {
2735 MONO_INST_NEW (cfg, load, OP_LOADI8_MEMBASE);
2736 load->dreg = mono_alloc_ireg (cfg);
2737 load->inst_basereg = src->dreg;
2738 load->inst_offset = i * 8;
2739 MONO_ADD_INS (cfg->cbb, load);
2740 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI8_MEMBASE_REG, ARMREG_SP, ainfo->offset + (i * 8), load->dreg);
2742 break;
2743 default:
2744 g_assert_not_reached ();
2745 break;
2749 void
2750 mono_arch_emit_setret (MonoCompile *cfg, MonoMethod *method, MonoInst *val)
2752 MonoMethodSignature *sig;
2753 CallInfo *cinfo;
2755 sig = mono_method_signature_internal (cfg->method);
2756 if (!cfg->arch.cinfo)
2757 cfg->arch.cinfo = get_call_info (cfg->mempool, sig);
2758 cinfo = cfg->arch.cinfo;
2760 switch (cinfo->ret.storage) {
2761 case ArgNone:
2762 break;
2763 case ArgInIReg:
2764 MONO_EMIT_NEW_UNALU (cfg, OP_MOVE, cfg->ret->dreg, val->dreg);
2765 break;
2766 case ArgInFReg:
2767 MONO_EMIT_NEW_UNALU (cfg, OP_FMOVE, cfg->ret->dreg, val->dreg);
2768 break;
2769 case ArgInFRegR4:
2770 if (COMPILE_LLVM (cfg))
2771 MONO_EMIT_NEW_UNALU (cfg, OP_FMOVE, cfg->ret->dreg, val->dreg);
2772 else if (cfg->r4fp)
2773 MONO_EMIT_NEW_UNALU (cfg, OP_RMOVE, cfg->ret->dreg, val->dreg);
2774 else
2775 MONO_EMIT_NEW_UNALU (cfg, OP_ARM_SETFREG_R4, cfg->ret->dreg, val->dreg);
2776 break;
2777 default:
2778 g_assert_not_reached ();
2779 break;
2783 #ifndef DISABLE_JIT
2785 gboolean
2786 mono_arch_tailcall_supported (MonoCompile *cfg, MonoMethodSignature *caller_sig, MonoMethodSignature *callee_sig, gboolean virtual_)
2788 g_assert (caller_sig);
2789 g_assert (callee_sig);
2791 CallInfo *caller_info = get_call_info (NULL, caller_sig);
2792 CallInfo *callee_info = get_call_info (NULL, callee_sig);
2794 gboolean res = IS_SUPPORTED_TAILCALL (callee_info->stack_usage <= caller_info->stack_usage)
2795 && IS_SUPPORTED_TAILCALL (caller_info->ret.storage == callee_info->ret.storage);
2797 // FIXME Limit stack_usage to 1G. emit_ldrx / strx has 32bit limits.
2798 res &= IS_SUPPORTED_TAILCALL (callee_info->stack_usage < (1 << 30));
2799 res &= IS_SUPPORTED_TAILCALL (caller_info->stack_usage < (1 << 30));
2801 // valuetype parameters are the address of a local
2802 const ArgInfo *ainfo;
2803 ainfo = callee_info->args + callee_sig->hasthis;
2804 for (int i = 0; res && i < callee_sig->param_count; ++i) {
2805 res = IS_SUPPORTED_TAILCALL (ainfo [i].storage != ArgVtypeByRef)
2806 && IS_SUPPORTED_TAILCALL (ainfo [i].storage != ArgVtypeByRefOnStack);
2809 g_free (caller_info);
2810 g_free (callee_info);
2812 return res;
2815 #endif
2817 gboolean
2818 mono_arch_is_inst_imm (int opcode, int imm_opcode, gint64 imm)
2820 return (imm >= -((gint64)1<<31) && imm <= (((gint64)1<<31)-1));
2823 void
2824 mono_arch_peephole_pass_1 (MonoCompile *cfg, MonoBasicBlock *bb)
2826 //NOT_IMPLEMENTED;
2829 void
2830 mono_arch_peephole_pass_2 (MonoCompile *cfg, MonoBasicBlock *bb)
2832 //NOT_IMPLEMENTED;
2835 #define ADD_NEW_INS(cfg,dest,op) do { \
2836 MONO_INST_NEW ((cfg), (dest), (op)); \
2837 mono_bblock_insert_before_ins (bb, ins, (dest)); \
2838 } while (0)
2840 void
2841 mono_arch_lowering_pass (MonoCompile *cfg, MonoBasicBlock *bb)
2843 MonoInst *ins, *temp, *last_ins = NULL;
2845 MONO_BB_FOR_EACH_INS (bb, ins) {
2846 switch (ins->opcode) {
2847 case OP_SBB:
2848 case OP_ISBB:
2849 case OP_SUBCC:
2850 case OP_ISUBCC:
2851 if (ins->next && (ins->next->opcode == OP_COND_EXC_C || ins->next->opcode == OP_COND_EXC_IC))
2852 /* ARM sets the C flag to 1 if there was _no_ overflow */
2853 ins->next->opcode = OP_COND_EXC_NC;
2854 break;
2855 case OP_IDIV_IMM:
2856 case OP_IREM_IMM:
2857 case OP_IDIV_UN_IMM:
2858 case OP_IREM_UN_IMM:
2859 case OP_LREM_IMM:
2860 mono_decompose_op_imm (cfg, bb, ins);
2861 break;
2862 case OP_LOCALLOC_IMM:
2863 if (ins->inst_imm > 32) {
2864 ADD_NEW_INS (cfg, temp, OP_ICONST);
2865 temp->inst_c0 = ins->inst_imm;
2866 temp->dreg = mono_alloc_ireg (cfg);
2867 ins->sreg1 = temp->dreg;
2868 ins->opcode = mono_op_imm_to_op (ins->opcode);
2870 break;
2871 case OP_ICOMPARE_IMM:
2872 if (ins->inst_imm == 0 && ins->next && ins->next->opcode == OP_IBEQ) {
2873 ins->next->opcode = OP_ARM64_CBZW;
2874 ins->next->sreg1 = ins->sreg1;
2875 NULLIFY_INS (ins);
2876 } else if (ins->inst_imm == 0 && ins->next && ins->next->opcode == OP_IBNE_UN) {
2877 ins->next->opcode = OP_ARM64_CBNZW;
2878 ins->next->sreg1 = ins->sreg1;
2879 NULLIFY_INS (ins);
2881 break;
2882 case OP_LCOMPARE_IMM:
2883 case OP_COMPARE_IMM:
2884 if (ins->inst_imm == 0 && ins->next && ins->next->opcode == OP_LBEQ) {
2885 ins->next->opcode = OP_ARM64_CBZX;
2886 ins->next->sreg1 = ins->sreg1;
2887 NULLIFY_INS (ins);
2888 } else if (ins->inst_imm == 0 && ins->next && ins->next->opcode == OP_LBNE_UN) {
2889 ins->next->opcode = OP_ARM64_CBNZX;
2890 ins->next->sreg1 = ins->sreg1;
2891 NULLIFY_INS (ins);
2893 break;
2894 case OP_FCOMPARE:
2895 case OP_RCOMPARE: {
2896 gboolean swap = FALSE;
2897 int reg;
2899 if (!ins->next) {
2900 /* Optimized away */
2901 NULLIFY_INS (ins);
2902 break;
2906 * FP compares with unordered operands set the flags
2907 * to NZCV=0011, which matches some non-unordered compares
2908 * as well, like LE, so have to swap the operands.
2910 switch (ins->next->opcode) {
2911 case OP_FBLT:
2912 ins->next->opcode = OP_FBGT;
2913 swap = TRUE;
2914 break;
2915 case OP_FBLE:
2916 ins->next->opcode = OP_FBGE;
2917 swap = TRUE;
2918 break;
2919 case OP_RBLT:
2920 ins->next->opcode = OP_RBGT;
2921 swap = TRUE;
2922 break;
2923 case OP_RBLE:
2924 ins->next->opcode = OP_RBGE;
2925 swap = TRUE;
2926 break;
2927 default:
2928 break;
2930 if (swap) {
2931 reg = ins->sreg1;
2932 ins->sreg1 = ins->sreg2;
2933 ins->sreg2 = reg;
2935 break;
2937 default:
2938 break;
2941 last_ins = ins;
2943 bb->last_ins = last_ins;
2944 bb->max_vreg = cfg->next_vreg;
2947 void
2948 mono_arch_decompose_long_opts (MonoCompile *cfg, MonoInst *long_ins)
2952 static int
2953 opcode_to_armcond (int opcode)
2955 switch (opcode) {
2956 case OP_IBEQ:
2957 case OP_LBEQ:
2958 case OP_FBEQ:
2959 case OP_CEQ:
2960 case OP_ICEQ:
2961 case OP_LCEQ:
2962 case OP_FCEQ:
2963 case OP_RCEQ:
2964 case OP_COND_EXC_IEQ:
2965 case OP_COND_EXC_EQ:
2966 return ARMCOND_EQ;
2967 case OP_IBGE:
2968 case OP_LBGE:
2969 case OP_FBGE:
2970 case OP_ICGE:
2971 case OP_FCGE:
2972 case OP_RCGE:
2973 return ARMCOND_GE;
2974 case OP_IBGT:
2975 case OP_LBGT:
2976 case OP_FBGT:
2977 case OP_CGT:
2978 case OP_ICGT:
2979 case OP_LCGT:
2980 case OP_FCGT:
2981 case OP_RCGT:
2982 case OP_COND_EXC_IGT:
2983 case OP_COND_EXC_GT:
2984 return ARMCOND_GT;
2985 case OP_IBLE:
2986 case OP_LBLE:
2987 case OP_FBLE:
2988 case OP_ICLE:
2989 case OP_FCLE:
2990 case OP_RCLE:
2991 return ARMCOND_LE;
2992 case OP_IBLT:
2993 case OP_LBLT:
2994 case OP_FBLT:
2995 case OP_CLT:
2996 case OP_ICLT:
2997 case OP_LCLT:
2998 case OP_COND_EXC_ILT:
2999 case OP_COND_EXC_LT:
3000 return ARMCOND_LT;
3001 case OP_IBNE_UN:
3002 case OP_LBNE_UN:
3003 case OP_FBNE_UN:
3004 case OP_ICNEQ:
3005 case OP_FCNEQ:
3006 case OP_RCNEQ:
3007 case OP_COND_EXC_INE_UN:
3008 case OP_COND_EXC_NE_UN:
3009 return ARMCOND_NE;
3010 case OP_IBGE_UN:
3011 case OP_LBGE_UN:
3012 case OP_FBGE_UN:
3013 case OP_ICGE_UN:
3014 case OP_COND_EXC_IGE_UN:
3015 case OP_COND_EXC_GE_UN:
3016 return ARMCOND_HS;
3017 case OP_IBGT_UN:
3018 case OP_LBGT_UN:
3019 case OP_FBGT_UN:
3020 case OP_CGT_UN:
3021 case OP_ICGT_UN:
3022 case OP_LCGT_UN:
3023 case OP_FCGT_UN:
3024 case OP_RCGT_UN:
3025 case OP_COND_EXC_IGT_UN:
3026 case OP_COND_EXC_GT_UN:
3027 return ARMCOND_HI;
3028 case OP_IBLE_UN:
3029 case OP_LBLE_UN:
3030 case OP_FBLE_UN:
3031 case OP_ICLE_UN:
3032 case OP_COND_EXC_ILE_UN:
3033 case OP_COND_EXC_LE_UN:
3034 return ARMCOND_LS;
3035 case OP_IBLT_UN:
3036 case OP_LBLT_UN:
3037 case OP_FBLT_UN:
3038 case OP_CLT_UN:
3039 case OP_ICLT_UN:
3040 case OP_LCLT_UN:
3041 case OP_COND_EXC_ILT_UN:
3042 case OP_COND_EXC_LT_UN:
3043 return ARMCOND_LO;
3045 * FCMP sets the NZCV condition bits as follows:
3046 * eq = 0110
3047 * < = 1000
3048 * > = 0010
3049 * unordered = 0011
3050 * ARMCOND_LT is N!=V, so it matches unordered too, so
3051 * fclt and fclt_un need to be special cased.
3053 case OP_FCLT:
3054 case OP_RCLT:
3055 /* N==1 */
3056 return ARMCOND_MI;
3057 case OP_FCLT_UN:
3058 case OP_RCLT_UN:
3059 return ARMCOND_LT;
3060 case OP_COND_EXC_C:
3061 case OP_COND_EXC_IC:
3062 return ARMCOND_CS;
3063 case OP_COND_EXC_OV:
3064 case OP_COND_EXC_IOV:
3065 return ARMCOND_VS;
3066 case OP_COND_EXC_NC:
3067 case OP_COND_EXC_INC:
3068 return ARMCOND_CC;
3069 case OP_COND_EXC_NO:
3070 case OP_COND_EXC_INO:
3071 return ARMCOND_VC;
3072 default:
3073 printf ("%s\n", mono_inst_name (opcode));
3074 g_assert_not_reached ();
3075 return -1;
3079 /* This clobbers LR */
3080 static __attribute__ ((__warn_unused_result__)) guint8*
3081 emit_cond_exc (MonoCompile *cfg, guint8 *code, int opcode, const char *exc_name)
3083 int cond;
3085 cond = opcode_to_armcond (opcode);
3086 /* Capture PC */
3087 arm_adrx (code, ARMREG_IP1, code);
3088 mono_add_patch_info_rel (cfg, code - cfg->native_code, MONO_PATCH_INFO_EXC, exc_name, MONO_R_ARM64_BCC);
3089 arm_bcc (code, cond, 0);
3090 return code;
3093 static guint8*
3094 emit_move_return_value (MonoCompile *cfg, guint8 * code, MonoInst *ins)
3096 CallInfo *cinfo;
3097 MonoCallInst *call;
3099 call = (MonoCallInst*)ins;
3100 cinfo = call->call_info;
3101 g_assert (cinfo);
3102 switch (cinfo->ret.storage) {
3103 case ArgNone:
3104 break;
3105 case ArgInIReg:
3106 /* LLVM compiled code might only set the bottom bits */
3107 if (call->signature && mini_get_underlying_type (call->signature->ret)->type == MONO_TYPE_I4)
3108 arm_sxtwx (code, call->inst.dreg, cinfo->ret.reg);
3109 else if (call->inst.dreg != cinfo->ret.reg)
3110 arm_movx (code, call->inst.dreg, cinfo->ret.reg);
3111 break;
3112 case ArgInFReg:
3113 if (call->inst.dreg != cinfo->ret.reg)
3114 arm_fmovd (code, call->inst.dreg, cinfo->ret.reg);
3115 break;
3116 case ArgInFRegR4:
3117 if (cfg->r4fp)
3118 arm_fmovs (code, call->inst.dreg, cinfo->ret.reg);
3119 else
3120 arm_fcvt_sd (code, call->inst.dreg, cinfo->ret.reg);
3121 break;
3122 case ArgVtypeInIRegs: {
3123 MonoInst *loc = cfg->arch.vret_addr_loc;
3124 int i;
3126 /* Load the destination address */
3127 g_assert (loc && loc->opcode == OP_REGOFFSET);
3128 code = emit_ldrx (code, ARMREG_LR, loc->inst_basereg, loc->inst_offset);
3129 for (i = 0; i < cinfo->ret.nregs; ++i)
3130 arm_strx (code, cinfo->ret.reg + i, ARMREG_LR, i * 8);
3131 break;
3133 case ArgHFA: {
3134 MonoInst *loc = cfg->arch.vret_addr_loc;
3135 int i;
3137 /* Load the destination address */
3138 g_assert (loc && loc->opcode == OP_REGOFFSET);
3139 code = emit_ldrx (code, ARMREG_LR, loc->inst_basereg, loc->inst_offset);
3140 for (i = 0; i < cinfo->ret.nregs; ++i) {
3141 if (cinfo->ret.esize == 4)
3142 arm_strfpw (code, cinfo->ret.reg + i, ARMREG_LR, cinfo->ret.foffsets [i]);
3143 else
3144 arm_strfpx (code, cinfo->ret.reg + i, ARMREG_LR, cinfo->ret.foffsets [i]);
3146 break;
3148 case ArgVtypeByRef:
3149 break;
3150 default:
3151 g_assert_not_reached ();
3152 break;
3154 return code;
3158 * emit_branch_island:
3160 * Emit a branch island for the conditional branches from cfg->native_code + start_offset to code.
3162 static guint8*
3163 emit_branch_island (MonoCompile *cfg, guint8 *code, int start_offset)
3165 MonoJumpInfo *ji;
3167 /* Iterate over the patch infos added so far by this bb */
3168 int island_size = 0;
3169 for (ji = cfg->patch_info; ji; ji = ji->next) {
3170 if (ji->ip.i < start_offset)
3171 /* The patch infos are in reverse order, so this means the end */
3172 break;
3173 if (ji->relocation == MONO_R_ARM64_BCC || ji->relocation == MONO_R_ARM64_CBZ)
3174 island_size += 4;
3177 if (island_size) {
3178 code = realloc_code (cfg, island_size);
3180 /* Branch over the island */
3181 arm_b (code, code + 4 + island_size);
3183 for (ji = cfg->patch_info; ji; ji = ji->next) {
3184 if (ji->ip.i < start_offset)
3185 break;
3186 if (ji->relocation == MONO_R_ARM64_BCC || ji->relocation == MONO_R_ARM64_CBZ) {
3187 /* Rewrite the cond branch so it branches to an unconditional branch in the branch island */
3188 arm_patch_rel (cfg->native_code + ji->ip.i, code, ji->relocation);
3189 /* Rewrite the patch so it points to the unconditional branch */
3190 ji->ip.i = code - cfg->native_code;
3191 ji->relocation = MONO_R_ARM64_B;
3192 arm_b (code, code);
3195 set_code_cursor (cfg, code);
3197 return code;
3200 void
3201 mono_arch_output_basic_block (MonoCompile *cfg, MonoBasicBlock *bb)
3203 MonoInst *ins;
3204 MonoCallInst *call;
3205 guint8 *code = cfg->native_code + cfg->code_len;
3206 int start_offset, max_len, dreg, sreg1, sreg2;
3207 target_mgreg_t imm;
3209 if (cfg->verbose_level > 2)
3210 g_print ("Basic block %d starting at offset 0x%x\n", bb->block_num, bb->native_offset);
3212 start_offset = code - cfg->native_code;
3213 g_assert (start_offset <= cfg->code_size);
3215 MONO_BB_FOR_EACH_INS (bb, ins) {
3216 guint offset = code - cfg->native_code;
3217 set_code_cursor (cfg, code);
3218 max_len = ins_get_size (ins->opcode);
3219 code = realloc_code (cfg, max_len);
3221 if (G_UNLIKELY (cfg->arch.cond_branch_islands && offset - start_offset > 4 * 0x1ffff)) {
3222 /* Emit a branch island for large basic blocks */
3223 code = emit_branch_island (cfg, code, start_offset);
3224 offset = code - cfg->native_code;
3225 start_offset = offset;
3228 mono_debug_record_line_number (cfg, ins, offset);
3230 dreg = ins->dreg;
3231 sreg1 = ins->sreg1;
3232 sreg2 = ins->sreg2;
3233 imm = ins->inst_imm;
3235 switch (ins->opcode) {
3236 case OP_ICONST:
3237 code = emit_imm (code, dreg, ins->inst_c0);
3238 break;
3239 case OP_I8CONST:
3240 code = emit_imm64 (code, dreg, ins->inst_c0);
3241 break;
3242 case OP_MOVE:
3243 if (dreg != sreg1)
3244 arm_movx (code, dreg, sreg1);
3245 break;
3246 case OP_NOP:
3247 case OP_RELAXED_NOP:
3248 break;
3249 case OP_JUMP_TABLE:
3250 mono_add_patch_info_rel (cfg, offset, (MonoJumpInfoType)(gsize)ins->inst_i1, ins->inst_p0, MONO_R_ARM64_IMM);
3251 code = emit_imm64_template (code, dreg);
3252 break;
3253 case OP_BREAK:
3255 * gdb does not like encountering the hw breakpoint ins in the debugged code.
3256 * So instead of emitting a trap, we emit a call a C function and place a
3257 * breakpoint there.
3259 code = emit_call (cfg, code, MONO_PATCH_INFO_JIT_ICALL_ID, GUINT_TO_POINTER (MONO_JIT_ICALL_mono_break));
3260 break;
3261 case OP_LOCALLOC: {
3262 guint8 *buf [16];
3264 arm_addx_imm (code, ARMREG_IP0, sreg1, (MONO_ARCH_FRAME_ALIGNMENT - 1));
3265 // FIXME: andx_imm doesn't work yet
3266 code = emit_imm (code, ARMREG_IP1, -MONO_ARCH_FRAME_ALIGNMENT);
3267 arm_andx (code, ARMREG_IP0, ARMREG_IP0, ARMREG_IP1);
3268 //arm_andx_imm (code, ARMREG_IP0, sreg1, - MONO_ARCH_FRAME_ALIGNMENT);
3269 arm_movspx (code, ARMREG_IP1, ARMREG_SP);
3270 arm_subx (code, ARMREG_IP1, ARMREG_IP1, ARMREG_IP0);
3271 arm_movspx (code, ARMREG_SP, ARMREG_IP1);
3273 /* Init */
3274 /* ip1 = pointer, ip0 = end */
3275 arm_addx (code, ARMREG_IP0, ARMREG_IP1, ARMREG_IP0);
3276 buf [0] = code;
3277 arm_cmpx (code, ARMREG_IP1, ARMREG_IP0);
3278 buf [1] = code;
3279 arm_bcc (code, ARMCOND_EQ, 0);
3280 arm_stpx (code, ARMREG_RZR, ARMREG_RZR, ARMREG_IP1, 0);
3281 arm_addx_imm (code, ARMREG_IP1, ARMREG_IP1, 16);
3282 arm_b (code, buf [0]);
3283 arm_patch_rel (buf [1], code, MONO_R_ARM64_BCC);
3285 arm_movspx (code, dreg, ARMREG_SP);
3286 if (cfg->param_area)
3287 code = emit_subx_sp_imm (code, cfg->param_area);
3288 break;
3290 case OP_LOCALLOC_IMM: {
3291 int imm, offset;
3293 imm = ALIGN_TO (ins->inst_imm, MONO_ARCH_FRAME_ALIGNMENT);
3294 g_assert (arm_is_arith_imm (imm));
3295 arm_subx_imm (code, ARMREG_SP, ARMREG_SP, imm);
3297 /* Init */
3298 g_assert (MONO_ARCH_FRAME_ALIGNMENT == 16);
3299 offset = 0;
3300 while (offset < imm) {
3301 arm_stpx (code, ARMREG_RZR, ARMREG_RZR, ARMREG_SP, offset);
3302 offset += 16;
3304 arm_movspx (code, dreg, ARMREG_SP);
3305 if (cfg->param_area)
3306 code = emit_subx_sp_imm (code, cfg->param_area);
3307 break;
3309 case OP_AOTCONST:
3310 code = emit_aotconst (cfg, code, dreg, (MonoJumpInfoType)(gsize)ins->inst_i1, ins->inst_p0);
3311 break;
3312 case OP_OBJC_GET_SELECTOR:
3313 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_OBJC_SELECTOR_REF, ins->inst_p0);
3314 /* See arch_emit_objc_selector_ref () in aot-compiler.c */
3315 arm_ldrx_lit (code, ins->dreg, 0);
3316 arm_nop (code);
3317 arm_nop (code);
3318 break;
3319 case OP_SEQ_POINT: {
3320 MonoInst *info_var = cfg->arch.seq_point_info_var;
3323 * For AOT, we use one got slot per method, which will point to a
3324 * SeqPointInfo structure, containing all the information required
3325 * by the code below.
3327 if (cfg->compile_aot) {
3328 g_assert (info_var);
3329 g_assert (info_var->opcode == OP_REGOFFSET);
3332 if (ins->flags & MONO_INST_SINGLE_STEP_LOC) {
3333 MonoInst *var = cfg->arch.ss_tramp_var;
3335 g_assert (var);
3336 g_assert (var->opcode == OP_REGOFFSET);
3337 /* Load ss_tramp_var */
3338 /* This is equal to &ss_trampoline */
3339 arm_ldrx (code, ARMREG_IP1, var->inst_basereg, var->inst_offset);
3340 /* Load the trampoline address */
3341 arm_ldrx (code, ARMREG_IP1, ARMREG_IP1, 0);
3342 /* Call it if it is non-null */
3343 arm_cbzx (code, ARMREG_IP1, code + 8);
3344 arm_blrx (code, ARMREG_IP1);
3347 mono_add_seq_point (cfg, bb, ins, code - cfg->native_code);
3349 if (cfg->compile_aot) {
3350 const guint32 offset = code - cfg->native_code;
3351 guint32 val;
3353 arm_ldrx (code, ARMREG_IP1, info_var->inst_basereg, info_var->inst_offset);
3354 /* Add the offset */
3355 val = ((offset / 4) * sizeof (target_mgreg_t)) + MONO_STRUCT_OFFSET (SeqPointInfo, bp_addrs);
3356 /* Load the info->bp_addrs [offset], which is either 0 or the address of the bp trampoline */
3357 code = emit_ldrx (code, ARMREG_IP1, ARMREG_IP1, val);
3358 /* Skip the load if its 0 */
3359 arm_cbzx (code, ARMREG_IP1, code + 8);
3360 /* Call the breakpoint trampoline */
3361 arm_blrx (code, ARMREG_IP1);
3362 } else {
3363 MonoInst *var = cfg->arch.bp_tramp_var;
3365 g_assert (var);
3366 g_assert (var->opcode == OP_REGOFFSET);
3367 /* Load the address of the bp trampoline into IP0 */
3368 arm_ldrx (code, ARMREG_IP0, var->inst_basereg, var->inst_offset);
3370 * A placeholder for a possible breakpoint inserted by
3371 * mono_arch_set_breakpoint ().
3373 arm_nop (code);
3375 break;
3378 /* BRANCH */
3379 case OP_BR:
3380 mono_add_patch_info_rel (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_target_bb, MONO_R_ARM64_B);
3381 arm_b (code, code);
3382 break;
3383 case OP_BR_REG:
3384 arm_brx (code, sreg1);
3385 break;
3386 case OP_IBEQ:
3387 case OP_IBGE:
3388 case OP_IBGT:
3389 case OP_IBLE:
3390 case OP_IBLT:
3391 case OP_IBNE_UN:
3392 case OP_IBGE_UN:
3393 case OP_IBGT_UN:
3394 case OP_IBLE_UN:
3395 case OP_IBLT_UN:
3396 case OP_LBEQ:
3397 case OP_LBGE:
3398 case OP_LBGT:
3399 case OP_LBLE:
3400 case OP_LBLT:
3401 case OP_LBNE_UN:
3402 case OP_LBGE_UN:
3403 case OP_LBGT_UN:
3404 case OP_LBLE_UN:
3405 case OP_LBLT_UN:
3406 case OP_FBEQ:
3407 case OP_FBNE_UN:
3408 case OP_FBLT:
3409 case OP_FBGT:
3410 case OP_FBGT_UN:
3411 case OP_FBLE:
3412 case OP_FBGE:
3413 case OP_FBGE_UN: {
3414 int cond;
3416 mono_add_patch_info_rel (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_true_bb, MONO_R_ARM64_BCC);
3417 cond = opcode_to_armcond (ins->opcode);
3418 arm_bcc (code, cond, 0);
3419 break;
3421 case OP_FBLT_UN:
3422 mono_add_patch_info_rel (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_true_bb, MONO_R_ARM64_BCC);
3423 /* For fp compares, ARMCOND_LT is lt or unordered */
3424 arm_bcc (code, ARMCOND_LT, 0);
3425 break;
3426 case OP_FBLE_UN:
3427 mono_add_patch_info_rel (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_true_bb, MONO_R_ARM64_BCC);
3428 arm_bcc (code, ARMCOND_EQ, 0);
3429 mono_add_patch_info_rel (cfg, code - cfg->native_code, MONO_PATCH_INFO_BB, ins->inst_true_bb, MONO_R_ARM64_BCC);
3430 /* For fp compares, ARMCOND_LT is lt or unordered */
3431 arm_bcc (code, ARMCOND_LT, 0);
3432 break;
3433 case OP_ARM64_CBZW:
3434 mono_add_patch_info_rel (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_true_bb, MONO_R_ARM64_CBZ);
3435 arm_cbzw (code, sreg1, 0);
3436 break;
3437 case OP_ARM64_CBZX:
3438 mono_add_patch_info_rel (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_true_bb, MONO_R_ARM64_CBZ);
3439 arm_cbzx (code, sreg1, 0);
3440 break;
3441 case OP_ARM64_CBNZW:
3442 mono_add_patch_info_rel (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_true_bb, MONO_R_ARM64_CBZ);
3443 arm_cbnzw (code, sreg1, 0);
3444 break;
3445 case OP_ARM64_CBNZX:
3446 mono_add_patch_info_rel (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_true_bb, MONO_R_ARM64_CBZ);
3447 arm_cbnzx (code, sreg1, 0);
3448 break;
3449 /* ALU */
3450 case OP_IADD:
3451 arm_addw (code, dreg, sreg1, sreg2);
3452 break;
3453 case OP_LADD:
3454 arm_addx (code, dreg, sreg1, sreg2);
3455 break;
3456 case OP_ISUB:
3457 arm_subw (code, dreg, sreg1, sreg2);
3458 break;
3459 case OP_LSUB:
3460 arm_subx (code, dreg, sreg1, sreg2);
3461 break;
3462 case OP_IAND:
3463 arm_andw (code, dreg, sreg1, sreg2);
3464 break;
3465 case OP_LAND:
3466 arm_andx (code, dreg, sreg1, sreg2);
3467 break;
3468 case OP_IOR:
3469 arm_orrw (code, dreg, sreg1, sreg2);
3470 break;
3471 case OP_LOR:
3472 arm_orrx (code, dreg, sreg1, sreg2);
3473 break;
3474 case OP_IXOR:
3475 arm_eorw (code, dreg, sreg1, sreg2);
3476 break;
3477 case OP_LXOR:
3478 arm_eorx (code, dreg, sreg1, sreg2);
3479 break;
3480 case OP_INEG:
3481 arm_negw (code, dreg, sreg1);
3482 break;
3483 case OP_LNEG:
3484 arm_negx (code, dreg, sreg1);
3485 break;
3486 case OP_INOT:
3487 arm_mvnw (code, dreg, sreg1);
3488 break;
3489 case OP_LNOT:
3490 arm_mvnx (code, dreg, sreg1);
3491 break;
3492 case OP_IADDCC:
3493 arm_addsw (code, dreg, sreg1, sreg2);
3494 break;
3495 case OP_ADDCC:
3496 case OP_LADDCC:
3497 arm_addsx (code, dreg, sreg1, sreg2);
3498 break;
3499 case OP_ISUBCC:
3500 arm_subsw (code, dreg, sreg1, sreg2);
3501 break;
3502 case OP_LSUBCC:
3503 case OP_SUBCC:
3504 arm_subsx (code, dreg, sreg1, sreg2);
3505 break;
3506 case OP_ICOMPARE:
3507 arm_cmpw (code, sreg1, sreg2);
3508 break;
3509 case OP_COMPARE:
3510 case OP_LCOMPARE:
3511 arm_cmpx (code, sreg1, sreg2);
3512 break;
3513 case OP_IADD_IMM:
3514 code = emit_addw_imm (code, dreg, sreg1, imm);
3515 break;
3516 case OP_LADD_IMM:
3517 case OP_ADD_IMM:
3518 code = emit_addx_imm (code, dreg, sreg1, imm);
3519 break;
3520 case OP_ISUB_IMM:
3521 code = emit_subw_imm (code, dreg, sreg1, imm);
3522 break;
3523 case OP_LSUB_IMM:
3524 code = emit_subx_imm (code, dreg, sreg1, imm);
3525 break;
3526 case OP_IAND_IMM:
3527 code = emit_andw_imm (code, dreg, sreg1, imm);
3528 break;
3529 case OP_LAND_IMM:
3530 case OP_AND_IMM:
3531 code = emit_andx_imm (code, dreg, sreg1, imm);
3532 break;
3533 case OP_IOR_IMM:
3534 code = emit_orrw_imm (code, dreg, sreg1, imm);
3535 break;
3536 case OP_LOR_IMM:
3537 code = emit_orrx_imm (code, dreg, sreg1, imm);
3538 break;
3539 case OP_IXOR_IMM:
3540 code = emit_eorw_imm (code, dreg, sreg1, imm);
3541 break;
3542 case OP_LXOR_IMM:
3543 code = emit_eorx_imm (code, dreg, sreg1, imm);
3544 break;
3545 case OP_ICOMPARE_IMM:
3546 code = emit_cmpw_imm (code, sreg1, imm);
3547 break;
3548 case OP_LCOMPARE_IMM:
3549 case OP_COMPARE_IMM:
3550 if (imm == 0) {
3551 arm_cmpx (code, sreg1, ARMREG_RZR);
3552 } else {
3553 // FIXME: 32 vs 64 bit issues for 0xffffffff
3554 code = emit_imm64 (code, ARMREG_LR, imm);
3555 arm_cmpx (code, sreg1, ARMREG_LR);
3557 break;
3558 case OP_ISHL:
3559 arm_lslvw (code, dreg, sreg1, sreg2);
3560 break;
3561 case OP_LSHL:
3562 arm_lslvx (code, dreg, sreg1, sreg2);
3563 break;
3564 case OP_ISHR:
3565 arm_asrvw (code, dreg, sreg1, sreg2);
3566 break;
3567 case OP_LSHR:
3568 arm_asrvx (code, dreg, sreg1, sreg2);
3569 break;
3570 case OP_ISHR_UN:
3571 arm_lsrvw (code, dreg, sreg1, sreg2);
3572 break;
3573 case OP_LSHR_UN:
3574 arm_lsrvx (code, dreg, sreg1, sreg2);
3575 break;
3576 case OP_ISHL_IMM:
3577 if (imm == 0)
3578 arm_movx (code, dreg, sreg1);
3579 else
3580 arm_lslw (code, dreg, sreg1, imm);
3581 break;
3582 case OP_SHL_IMM:
3583 case OP_LSHL_IMM:
3584 if (imm == 0)
3585 arm_movx (code, dreg, sreg1);
3586 else
3587 arm_lslx (code, dreg, sreg1, imm);
3588 break;
3589 case OP_ISHR_IMM:
3590 if (imm == 0)
3591 arm_movx (code, dreg, sreg1);
3592 else
3593 arm_asrw (code, dreg, sreg1, imm);
3594 break;
3595 case OP_LSHR_IMM:
3596 case OP_SHR_IMM:
3597 if (imm == 0)
3598 arm_movx (code, dreg, sreg1);
3599 else
3600 arm_asrx (code, dreg, sreg1, imm);
3601 break;
3602 case OP_ISHR_UN_IMM:
3603 if (imm == 0)
3604 arm_movx (code, dreg, sreg1);
3605 else
3606 arm_lsrw (code, dreg, sreg1, imm);
3607 break;
3608 case OP_SHR_UN_IMM:
3609 case OP_LSHR_UN_IMM:
3610 if (imm == 0)
3611 arm_movx (code, dreg, sreg1);
3612 else
3613 arm_lsrx (code, dreg, sreg1, imm);
3614 break;
3616 /* 64BIT ALU */
3617 case OP_SEXT_I4:
3618 arm_sxtwx (code, dreg, sreg1);
3619 break;
3620 case OP_ZEXT_I4:
3621 /* Clean out the upper word */
3622 arm_movw (code, dreg, sreg1);
3623 break;
3625 /* MULTIPLY/DIVISION */
3626 case OP_IDIV:
3627 case OP_IREM:
3628 // FIXME: Optimize this
3629 /* Check for zero */
3630 arm_cmpx_imm (code, sreg2, 0);
3631 code = emit_cond_exc (cfg, code, OP_COND_EXC_IEQ, "DivideByZeroException");
3632 /* Check for INT_MIN/-1 */
3633 code = emit_imm (code, ARMREG_IP0, 0x80000000);
3634 arm_cmpx (code, sreg1, ARMREG_IP0);
3635 arm_cset (code, ARMCOND_EQ, ARMREG_IP1);
3636 code = emit_imm (code, ARMREG_IP0, 0xffffffff);
3637 arm_cmpx (code, sreg2, ARMREG_IP0);
3638 arm_cset (code, ARMCOND_EQ, ARMREG_IP0);
3639 arm_andx (code, ARMREG_IP0, ARMREG_IP0, ARMREG_IP1);
3640 arm_cmpx_imm (code, ARMREG_IP0, 1);
3641 code = emit_cond_exc (cfg, code, OP_COND_EXC_IEQ, "OverflowException");
3642 if (ins->opcode == OP_IREM) {
3643 arm_sdivw (code, ARMREG_LR, sreg1, sreg2);
3644 arm_msubw (code, dreg, ARMREG_LR, sreg2, sreg1);
3645 } else {
3646 arm_sdivw (code, dreg, sreg1, sreg2);
3648 break;
3649 case OP_IDIV_UN:
3650 arm_cmpx_imm (code, sreg2, 0);
3651 code = emit_cond_exc (cfg, code, OP_COND_EXC_IEQ, "DivideByZeroException");
3652 arm_udivw (code, dreg, sreg1, sreg2);
3653 break;
3654 case OP_IREM_UN:
3655 arm_cmpx_imm (code, sreg2, 0);
3656 code = emit_cond_exc (cfg, code, OP_COND_EXC_IEQ, "DivideByZeroException");
3657 arm_udivw (code, ARMREG_LR, sreg1, sreg2);
3658 arm_msubw (code, dreg, ARMREG_LR, sreg2, sreg1);
3659 break;
3660 case OP_LDIV:
3661 case OP_LREM:
3662 // FIXME: Optimize this
3663 /* Check for zero */
3664 arm_cmpx_imm (code, sreg2, 0);
3665 code = emit_cond_exc (cfg, code, OP_COND_EXC_IEQ, "DivideByZeroException");
3666 /* Check for INT64_MIN/-1 */
3667 code = emit_imm64 (code, ARMREG_IP0, 0x8000000000000000);
3668 arm_cmpx (code, sreg1, ARMREG_IP0);
3669 arm_cset (code, ARMCOND_EQ, ARMREG_IP1);
3670 code = emit_imm64 (code, ARMREG_IP0, 0xffffffffffffffff);
3671 arm_cmpx (code, sreg2, ARMREG_IP0);
3672 arm_cset (code, ARMCOND_EQ, ARMREG_IP0);
3673 arm_andx (code, ARMREG_IP0, ARMREG_IP0, ARMREG_IP1);
3674 arm_cmpx_imm (code, ARMREG_IP0, 1);
3675 /* 64 bit uses OverflowException */
3676 code = emit_cond_exc (cfg, code, OP_COND_EXC_IEQ, "OverflowException");
3677 if (ins->opcode == OP_LREM) {
3678 arm_sdivx (code, ARMREG_LR, sreg1, sreg2);
3679 arm_msubx (code, dreg, ARMREG_LR, sreg2, sreg1);
3680 } else {
3681 arm_sdivx (code, dreg, sreg1, sreg2);
3683 break;
3684 case OP_LDIV_UN:
3685 arm_cmpx_imm (code, sreg2, 0);
3686 code = emit_cond_exc (cfg, code, OP_COND_EXC_IEQ, "DivideByZeroException");
3687 arm_udivx (code, dreg, sreg1, sreg2);
3688 break;
3689 case OP_LREM_UN:
3690 arm_cmpx_imm (code, sreg2, 0);
3691 code = emit_cond_exc (cfg, code, OP_COND_EXC_IEQ, "DivideByZeroException");
3692 arm_udivx (code, ARMREG_LR, sreg1, sreg2);
3693 arm_msubx (code, dreg, ARMREG_LR, sreg2, sreg1);
3694 break;
3695 case OP_IMUL:
3696 arm_mulw (code, dreg, sreg1, sreg2);
3697 break;
3698 case OP_LMUL:
3699 arm_mulx (code, dreg, sreg1, sreg2);
3700 break;
3701 case OP_IMUL_IMM:
3702 code = emit_imm (code, ARMREG_LR, imm);
3703 arm_mulw (code, dreg, sreg1, ARMREG_LR);
3704 break;
3705 case OP_MUL_IMM:
3706 case OP_LMUL_IMM:
3707 code = emit_imm (code, ARMREG_LR, imm);
3708 arm_mulx (code, dreg, sreg1, ARMREG_LR);
3709 break;
3711 /* CONVERSIONS */
3712 case OP_ICONV_TO_I1:
3713 case OP_LCONV_TO_I1:
3714 arm_sxtbx (code, dreg, sreg1);
3715 break;
3716 case OP_ICONV_TO_I2:
3717 case OP_LCONV_TO_I2:
3718 arm_sxthx (code, dreg, sreg1);
3719 break;
3720 case OP_ICONV_TO_U1:
3721 case OP_LCONV_TO_U1:
3722 arm_uxtbw (code, dreg, sreg1);
3723 break;
3724 case OP_ICONV_TO_U2:
3725 case OP_LCONV_TO_U2:
3726 arm_uxthw (code, dreg, sreg1);
3727 break;
3729 /* CSET */
3730 case OP_CEQ:
3731 case OP_ICEQ:
3732 case OP_LCEQ:
3733 case OP_CLT:
3734 case OP_ICLT:
3735 case OP_LCLT:
3736 case OP_CGT:
3737 case OP_ICGT:
3738 case OP_LCGT:
3739 case OP_CLT_UN:
3740 case OP_ICLT_UN:
3741 case OP_LCLT_UN:
3742 case OP_CGT_UN:
3743 case OP_ICGT_UN:
3744 case OP_LCGT_UN:
3745 case OP_ICNEQ:
3746 case OP_ICGE:
3747 case OP_ICLE:
3748 case OP_ICGE_UN:
3749 case OP_ICLE_UN: {
3750 int cond;
3752 cond = opcode_to_armcond (ins->opcode);
3753 arm_cset (code, cond, dreg);
3754 break;
3756 case OP_FCEQ:
3757 case OP_FCLT:
3758 case OP_FCLT_UN:
3759 case OP_FCGT:
3760 case OP_FCGT_UN:
3761 case OP_FCNEQ:
3762 case OP_FCLE:
3763 case OP_FCGE: {
3764 int cond;
3766 cond = opcode_to_armcond (ins->opcode);
3767 arm_fcmpd (code, sreg1, sreg2);
3768 arm_cset (code, cond, dreg);
3769 break;
3772 /* MEMORY */
3773 case OP_LOADI1_MEMBASE:
3774 code = emit_ldrsbx (code, dreg, ins->inst_basereg, ins->inst_offset);
3775 break;
3776 case OP_LOADU1_MEMBASE:
3777 code = emit_ldrb (code, dreg, ins->inst_basereg, ins->inst_offset);
3778 break;
3779 case OP_LOADI2_MEMBASE:
3780 code = emit_ldrshx (code, dreg, ins->inst_basereg, ins->inst_offset);
3781 break;
3782 case OP_LOADU2_MEMBASE:
3783 code = emit_ldrh (code, dreg, ins->inst_basereg, ins->inst_offset);
3784 break;
3785 case OP_LOADI4_MEMBASE:
3786 code = emit_ldrswx (code, dreg, ins->inst_basereg, ins->inst_offset);
3787 break;
3788 case OP_LOADU4_MEMBASE:
3789 code = emit_ldrw (code, dreg, ins->inst_basereg, ins->inst_offset);
3790 break;
3791 case OP_LOAD_MEMBASE:
3792 case OP_LOADI8_MEMBASE:
3793 code = emit_ldrx (code, dreg, ins->inst_basereg, ins->inst_offset);
3794 break;
3795 case OP_STOREI1_MEMBASE_IMM:
3796 case OP_STOREI2_MEMBASE_IMM:
3797 case OP_STOREI4_MEMBASE_IMM:
3798 case OP_STORE_MEMBASE_IMM:
3799 case OP_STOREI8_MEMBASE_IMM: {
3800 int immreg;
3802 if (imm != 0) {
3803 code = emit_imm (code, ARMREG_LR, imm);
3804 immreg = ARMREG_LR;
3805 } else {
3806 immreg = ARMREG_RZR;
3809 switch (ins->opcode) {
3810 case OP_STOREI1_MEMBASE_IMM:
3811 code = emit_strb (code, immreg, ins->inst_destbasereg, ins->inst_offset);
3812 break;
3813 case OP_STOREI2_MEMBASE_IMM:
3814 code = emit_strh (code, immreg, ins->inst_destbasereg, ins->inst_offset);
3815 break;
3816 case OP_STOREI4_MEMBASE_IMM:
3817 code = emit_strw (code, immreg, ins->inst_destbasereg, ins->inst_offset);
3818 break;
3819 case OP_STORE_MEMBASE_IMM:
3820 case OP_STOREI8_MEMBASE_IMM:
3821 code = emit_strx (code, immreg, ins->inst_destbasereg, ins->inst_offset);
3822 break;
3823 default:
3824 g_assert_not_reached ();
3825 break;
3827 break;
3829 case OP_STOREI1_MEMBASE_REG:
3830 code = emit_strb (code, sreg1, ins->inst_destbasereg, ins->inst_offset);
3831 break;
3832 case OP_STOREI2_MEMBASE_REG:
3833 code = emit_strh (code, sreg1, ins->inst_destbasereg, ins->inst_offset);
3834 break;
3835 case OP_STOREI4_MEMBASE_REG:
3836 code = emit_strw (code, sreg1, ins->inst_destbasereg, ins->inst_offset);
3837 break;
3838 case OP_STORE_MEMBASE_REG:
3839 case OP_STOREI8_MEMBASE_REG:
3840 code = emit_strx (code, sreg1, ins->inst_destbasereg, ins->inst_offset);
3841 break;
3842 case OP_TLS_GET:
3843 code = emit_tls_get (code, dreg, ins->inst_offset);
3844 break;
3845 case OP_TLS_SET:
3846 code = emit_tls_set (code, sreg1, ins->inst_offset);
3847 break;
3848 /* Atomic */
3849 case OP_MEMORY_BARRIER:
3850 arm_dmb (code, ARM_DMB_ISH);
3851 break;
3852 case OP_ATOMIC_ADD_I4: {
3853 guint8 *buf [16];
3855 buf [0] = code;
3856 arm_ldxrw (code, ARMREG_IP0, sreg1);
3857 arm_addx (code, ARMREG_IP0, ARMREG_IP0, sreg2);
3858 arm_stlxrw (code, ARMREG_IP1, ARMREG_IP0, sreg1);
3859 arm_cbnzw (code, ARMREG_IP1, buf [0]);
3861 arm_dmb (code, ARM_DMB_ISH);
3862 arm_movx (code, dreg, ARMREG_IP0);
3863 break;
3865 case OP_ATOMIC_ADD_I8: {
3866 guint8 *buf [16];
3868 buf [0] = code;
3869 arm_ldxrx (code, ARMREG_IP0, sreg1);
3870 arm_addx (code, ARMREG_IP0, ARMREG_IP0, sreg2);
3871 arm_stlxrx (code, ARMREG_IP1, ARMREG_IP0, sreg1);
3872 arm_cbnzx (code, ARMREG_IP1, buf [0]);
3874 arm_dmb (code, ARM_DMB_ISH);
3875 arm_movx (code, dreg, ARMREG_IP0);
3876 break;
3878 case OP_ATOMIC_EXCHANGE_I4: {
3879 guint8 *buf [16];
3881 buf [0] = code;
3882 arm_ldxrw (code, ARMREG_IP0, sreg1);
3883 arm_stlxrw (code, ARMREG_IP1, sreg2, sreg1);
3884 arm_cbnzw (code, ARMREG_IP1, buf [0]);
3886 arm_dmb (code, ARM_DMB_ISH);
3887 arm_movx (code, dreg, ARMREG_IP0);
3888 break;
3890 case OP_ATOMIC_EXCHANGE_I8: {
3891 guint8 *buf [16];
3893 buf [0] = code;
3894 arm_ldxrx (code, ARMREG_IP0, sreg1);
3895 arm_stlxrx (code, ARMREG_IP1, sreg2, sreg1);
3896 arm_cbnzw (code, ARMREG_IP1, buf [0]);
3898 arm_dmb (code, ARM_DMB_ISH);
3899 arm_movx (code, dreg, ARMREG_IP0);
3900 break;
3902 case OP_ATOMIC_CAS_I4: {
3903 guint8 *buf [16];
3905 /* sreg2 is the value, sreg3 is the comparand */
3906 buf [0] = code;
3907 arm_ldxrw (code, ARMREG_IP0, sreg1);
3908 arm_cmpw (code, ARMREG_IP0, ins->sreg3);
3909 buf [1] = code;
3910 arm_bcc (code, ARMCOND_NE, 0);
3911 arm_stlxrw (code, ARMREG_IP1, sreg2, sreg1);
3912 arm_cbnzw (code, ARMREG_IP1, buf [0]);
3913 arm_patch_rel (buf [1], code, MONO_R_ARM64_BCC);
3915 arm_dmb (code, ARM_DMB_ISH);
3916 arm_movx (code, dreg, ARMREG_IP0);
3917 break;
3919 case OP_ATOMIC_CAS_I8: {
3920 guint8 *buf [16];
3922 buf [0] = code;
3923 arm_ldxrx (code, ARMREG_IP0, sreg1);
3924 arm_cmpx (code, ARMREG_IP0, ins->sreg3);
3925 buf [1] = code;
3926 arm_bcc (code, ARMCOND_NE, 0);
3927 arm_stlxrx (code, ARMREG_IP1, sreg2, sreg1);
3928 arm_cbnzw (code, ARMREG_IP1, buf [0]);
3929 arm_patch_rel (buf [1], code, MONO_R_ARM64_BCC);
3931 arm_dmb (code, ARM_DMB_ISH);
3932 arm_movx (code, dreg, ARMREG_IP0);
3933 break;
3935 case OP_ATOMIC_LOAD_I1: {
3936 code = emit_addx_imm (code, ARMREG_LR, ins->inst_basereg, ins->inst_offset);
3937 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
3938 arm_dmb (code, ARM_DMB_ISH);
3939 arm_ldarb (code, ins->dreg, ARMREG_LR);
3940 arm_sxtbx (code, ins->dreg, ins->dreg);
3941 break;
3943 case OP_ATOMIC_LOAD_U1: {
3944 code = emit_addx_imm (code, ARMREG_LR, ins->inst_basereg, ins->inst_offset);
3945 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
3946 arm_dmb (code, ARM_DMB_ISH);
3947 arm_ldarb (code, ins->dreg, ARMREG_LR);
3948 arm_uxtbx (code, ins->dreg, ins->dreg);
3949 break;
3951 case OP_ATOMIC_LOAD_I2: {
3952 code = emit_addx_imm (code, ARMREG_LR, ins->inst_basereg, ins->inst_offset);
3953 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
3954 arm_dmb (code, ARM_DMB_ISH);
3955 arm_ldarh (code, ins->dreg, ARMREG_LR);
3956 arm_sxthx (code, ins->dreg, ins->dreg);
3957 break;
3959 case OP_ATOMIC_LOAD_U2: {
3960 code = emit_addx_imm (code, ARMREG_LR, ins->inst_basereg, ins->inst_offset);
3961 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
3962 arm_dmb (code, ARM_DMB_ISH);
3963 arm_ldarh (code, ins->dreg, ARMREG_LR);
3964 arm_uxthx (code, ins->dreg, ins->dreg);
3965 break;
3967 case OP_ATOMIC_LOAD_I4: {
3968 code = emit_addx_imm (code, ARMREG_LR, ins->inst_basereg, ins->inst_offset);
3969 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
3970 arm_dmb (code, ARM_DMB_ISH);
3971 arm_ldarw (code, ins->dreg, ARMREG_LR);
3972 arm_sxtwx (code, ins->dreg, ins->dreg);
3973 break;
3975 case OP_ATOMIC_LOAD_U4: {
3976 code = emit_addx_imm (code, ARMREG_LR, ins->inst_basereg, ins->inst_offset);
3977 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
3978 arm_dmb (code, ARM_DMB_ISH);
3979 arm_ldarw (code, ins->dreg, ARMREG_LR);
3980 arm_movw (code, ins->dreg, ins->dreg); /* Clear upper half of the register. */
3981 break;
3983 case OP_ATOMIC_LOAD_I8:
3984 case OP_ATOMIC_LOAD_U8: {
3985 code = emit_addx_imm (code, ARMREG_LR, ins->inst_basereg, ins->inst_offset);
3986 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
3987 arm_dmb (code, ARM_DMB_ISH);
3988 arm_ldarx (code, ins->dreg, ARMREG_LR);
3989 break;
3991 case OP_ATOMIC_LOAD_R4: {
3992 code = emit_addx_imm (code, ARMREG_LR, ins->inst_basereg, ins->inst_offset);
3993 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
3994 arm_dmb (code, ARM_DMB_ISH);
3995 if (cfg->r4fp) {
3996 arm_ldarw (code, ARMREG_LR, ARMREG_LR);
3997 arm_fmov_rx_to_double (code, ins->dreg, ARMREG_LR);
3998 } else {
3999 arm_ldarw (code, ARMREG_LR, ARMREG_LR);
4000 arm_fmov_rx_to_double (code, FP_TEMP_REG, ARMREG_LR);
4001 arm_fcvt_sd (code, ins->dreg, FP_TEMP_REG);
4003 break;
4005 case OP_ATOMIC_LOAD_R8: {
4006 code = emit_addx_imm (code, ARMREG_LR, ins->inst_basereg, ins->inst_offset);
4007 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
4008 arm_dmb (code, ARM_DMB_ISH);
4009 arm_ldarx (code, ARMREG_LR, ARMREG_LR);
4010 arm_fmov_rx_to_double (code, ins->dreg, ARMREG_LR);
4011 break;
4013 case OP_ATOMIC_STORE_I1:
4014 case OP_ATOMIC_STORE_U1: {
4015 code = emit_addx_imm (code, ARMREG_LR, ins->inst_destbasereg, ins->inst_offset);
4016 arm_stlrb (code, ARMREG_LR, ins->sreg1);
4017 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
4018 arm_dmb (code, ARM_DMB_ISH);
4019 break;
4021 case OP_ATOMIC_STORE_I2:
4022 case OP_ATOMIC_STORE_U2: {
4023 code = emit_addx_imm (code, ARMREG_LR, ins->inst_destbasereg, ins->inst_offset);
4024 arm_stlrh (code, ARMREG_LR, ins->sreg1);
4025 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
4026 arm_dmb (code, ARM_DMB_ISH);
4027 break;
4029 case OP_ATOMIC_STORE_I4:
4030 case OP_ATOMIC_STORE_U4: {
4031 code = emit_addx_imm (code, ARMREG_LR, ins->inst_destbasereg, ins->inst_offset);
4032 arm_stlrw (code, ARMREG_LR, ins->sreg1);
4033 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
4034 arm_dmb (code, ARM_DMB_ISH);
4035 break;
4037 case OP_ATOMIC_STORE_I8:
4038 case OP_ATOMIC_STORE_U8: {
4039 code = emit_addx_imm (code, ARMREG_LR, ins->inst_destbasereg, ins->inst_offset);
4040 arm_stlrx (code, ARMREG_LR, ins->sreg1);
4041 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
4042 arm_dmb (code, ARM_DMB_ISH);
4043 break;
4045 case OP_ATOMIC_STORE_R4: {
4046 code = emit_addx_imm (code, ARMREG_LR, ins->inst_destbasereg, ins->inst_offset);
4047 if (cfg->r4fp) {
4048 arm_fmov_double_to_rx (code, ARMREG_IP0, ins->sreg1);
4049 arm_stlrw (code, ARMREG_LR, ARMREG_IP0);
4050 } else {
4051 arm_fcvt_ds (code, FP_TEMP_REG, ins->sreg1);
4052 arm_fmov_double_to_rx (code, ARMREG_IP0, FP_TEMP_REG);
4053 arm_stlrw (code, ARMREG_LR, ARMREG_IP0);
4055 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
4056 arm_dmb (code, ARM_DMB_ISH);
4057 break;
4059 case OP_ATOMIC_STORE_R8: {
4060 code = emit_addx_imm (code, ARMREG_LR, ins->inst_destbasereg, ins->inst_offset);
4061 arm_fmov_double_to_rx (code, ARMREG_IP0, ins->sreg1);
4062 arm_stlrx (code, ARMREG_LR, ARMREG_IP0);
4063 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
4064 arm_dmb (code, ARM_DMB_ISH);
4065 break;
4068 /* FP */
4069 case OP_R8CONST: {
4070 guint64 imm = *(guint64*)ins->inst_p0;
4072 if (imm == 0) {
4073 arm_fmov_rx_to_double (code, dreg, ARMREG_RZR);
4074 } else {
4075 code = emit_imm64 (code, ARMREG_LR, imm);
4076 arm_fmov_rx_to_double (code, ins->dreg, ARMREG_LR);
4078 break;
4080 case OP_R4CONST: {
4081 guint64 imm = *(guint32*)ins->inst_p0;
4083 code = emit_imm64 (code, ARMREG_LR, imm);
4084 if (cfg->r4fp) {
4085 arm_fmov_rx_to_double (code, dreg, ARMREG_LR);
4086 } else {
4087 arm_fmov_rx_to_double (code, FP_TEMP_REG, ARMREG_LR);
4088 arm_fcvt_sd (code, dreg, FP_TEMP_REG);
4090 break;
4092 case OP_LOADR8_MEMBASE:
4093 code = emit_ldrfpx (code, dreg, ins->inst_basereg, ins->inst_offset);
4094 break;
4095 case OP_LOADR4_MEMBASE:
4096 if (cfg->r4fp) {
4097 code = emit_ldrfpw (code, dreg, ins->inst_basereg, ins->inst_offset);
4098 } else {
4099 code = emit_ldrfpw (code, FP_TEMP_REG, ins->inst_basereg, ins->inst_offset);
4100 arm_fcvt_sd (code, dreg, FP_TEMP_REG);
4102 break;
4103 case OP_STORER8_MEMBASE_REG:
4104 code = emit_strfpx (code, sreg1, ins->inst_destbasereg, ins->inst_offset);
4105 break;
4106 case OP_STORER4_MEMBASE_REG:
4107 if (cfg->r4fp) {
4108 code = emit_strfpw (code, sreg1, ins->inst_destbasereg, ins->inst_offset);
4109 } else {
4110 arm_fcvt_ds (code, FP_TEMP_REG, sreg1);
4111 code = emit_strfpw (code, FP_TEMP_REG, ins->inst_destbasereg, ins->inst_offset);
4113 break;
4114 case OP_FMOVE:
4115 if (dreg != sreg1)
4116 arm_fmovd (code, dreg, sreg1);
4117 break;
4118 case OP_RMOVE:
4119 if (dreg != sreg1)
4120 arm_fmovs (code, dreg, sreg1);
4121 break;
4122 case OP_MOVE_F_TO_I4:
4123 if (cfg->r4fp) {
4124 arm_fmov_double_to_rx (code, ins->dreg, ins->sreg1);
4125 } else {
4126 arm_fcvt_ds (code, ins->dreg, ins->sreg1);
4127 arm_fmov_double_to_rx (code, ins->dreg, ins->dreg);
4129 break;
4130 case OP_MOVE_I4_TO_F:
4131 if (cfg->r4fp) {
4132 arm_fmov_rx_to_double (code, ins->dreg, ins->sreg1);
4133 } else {
4134 arm_fmov_rx_to_double (code, ins->dreg, ins->sreg1);
4135 arm_fcvt_sd (code, ins->dreg, ins->dreg);
4137 break;
4138 case OP_MOVE_F_TO_I8:
4139 arm_fmov_double_to_rx (code, ins->dreg, ins->sreg1);
4140 break;
4141 case OP_MOVE_I8_TO_F:
4142 arm_fmov_rx_to_double (code, ins->dreg, ins->sreg1);
4143 break;
4144 case OP_FCOMPARE:
4145 arm_fcmpd (code, sreg1, sreg2);
4146 break;
4147 case OP_RCOMPARE:
4148 arm_fcmps (code, sreg1, sreg2);
4149 break;
4150 case OP_FCONV_TO_I1:
4151 arm_fcvtzs_dx (code, dreg, sreg1);
4152 arm_sxtbx (code, dreg, dreg);
4153 break;
4154 case OP_FCONV_TO_U1:
4155 arm_fcvtzu_dx (code, dreg, sreg1);
4156 arm_uxtbw (code, dreg, dreg);
4157 break;
4158 case OP_FCONV_TO_I2:
4159 arm_fcvtzs_dx (code, dreg, sreg1);
4160 arm_sxthx (code, dreg, dreg);
4161 break;
4162 case OP_FCONV_TO_U2:
4163 arm_fcvtzu_dx (code, dreg, sreg1);
4164 arm_uxthw (code, dreg, dreg);
4165 break;
4166 case OP_FCONV_TO_I4:
4167 arm_fcvtzs_dx (code, dreg, sreg1);
4168 arm_sxtwx (code, dreg, dreg);
4169 break;
4170 case OP_FCONV_TO_U4:
4171 arm_fcvtzu_dx (code, dreg, sreg1);
4172 break;
4173 case OP_FCONV_TO_I8:
4174 arm_fcvtzs_dx (code, dreg, sreg1);
4175 break;
4176 case OP_FCONV_TO_U8:
4177 arm_fcvtzu_dx (code, dreg, sreg1);
4178 break;
4179 case OP_FCONV_TO_R4:
4180 if (cfg->r4fp) {
4181 arm_fcvt_ds (code, dreg, sreg1);
4182 } else {
4183 arm_fcvt_ds (code, FP_TEMP_REG, sreg1);
4184 arm_fcvt_sd (code, dreg, FP_TEMP_REG);
4186 break;
4187 case OP_ICONV_TO_R4:
4188 if (cfg->r4fp) {
4189 arm_scvtf_rw_to_s (code, dreg, sreg1);
4190 } else {
4191 arm_scvtf_rw_to_s (code, FP_TEMP_REG, sreg1);
4192 arm_fcvt_sd (code, dreg, FP_TEMP_REG);
4194 break;
4195 case OP_LCONV_TO_R4:
4196 if (cfg->r4fp) {
4197 arm_scvtf_rx_to_s (code, dreg, sreg1);
4198 } else {
4199 arm_scvtf_rx_to_s (code, FP_TEMP_REG, sreg1);
4200 arm_fcvt_sd (code, dreg, FP_TEMP_REG);
4202 break;
4203 case OP_ICONV_TO_R8:
4204 arm_scvtf_rw_to_d (code, dreg, sreg1);
4205 break;
4206 case OP_LCONV_TO_R8:
4207 arm_scvtf_rx_to_d (code, dreg, sreg1);
4208 break;
4209 case OP_ICONV_TO_R_UN:
4210 arm_ucvtf_rw_to_d (code, dreg, sreg1);
4211 break;
4212 case OP_LCONV_TO_R_UN:
4213 arm_ucvtf_rx_to_d (code, dreg, sreg1);
4214 break;
4215 case OP_FADD:
4216 arm_fadd_d (code, dreg, sreg1, sreg2);
4217 break;
4218 case OP_FSUB:
4219 arm_fsub_d (code, dreg, sreg1, sreg2);
4220 break;
4221 case OP_FMUL:
4222 arm_fmul_d (code, dreg, sreg1, sreg2);
4223 break;
4224 case OP_FDIV:
4225 arm_fdiv_d (code, dreg, sreg1, sreg2);
4226 break;
4227 case OP_FREM:
4228 /* Emulated */
4229 g_assert_not_reached ();
4230 break;
4231 case OP_FNEG:
4232 arm_fneg_d (code, dreg, sreg1);
4233 break;
4234 case OP_ARM_SETFREG_R4:
4235 arm_fcvt_ds (code, dreg, sreg1);
4236 break;
4237 case OP_CKFINITE:
4238 /* Check for infinity */
4239 code = emit_imm64 (code, ARMREG_LR, 0x7fefffffffffffffLL);
4240 arm_fmov_rx_to_double (code, FP_TEMP_REG, ARMREG_LR);
4241 arm_fabs_d (code, FP_TEMP_REG2, sreg1);
4242 arm_fcmpd (code, FP_TEMP_REG2, FP_TEMP_REG);
4243 code = emit_cond_exc (cfg, code, OP_COND_EXC_GT, "ArithmeticException");
4244 /* Check for nans */
4245 arm_fcmpd (code, FP_TEMP_REG2, FP_TEMP_REG2);
4246 code = emit_cond_exc (cfg, code, OP_COND_EXC_OV, "ArithmeticException");
4247 arm_fmovd (code, dreg, sreg1);
4248 break;
4250 /* R4 */
4251 case OP_RADD:
4252 arm_fadd_s (code, dreg, sreg1, sreg2);
4253 break;
4254 case OP_RSUB:
4255 arm_fsub_s (code, dreg, sreg1, sreg2);
4256 break;
4257 case OP_RMUL:
4258 arm_fmul_s (code, dreg, sreg1, sreg2);
4259 break;
4260 case OP_RDIV:
4261 arm_fdiv_s (code, dreg, sreg1, sreg2);
4262 break;
4263 case OP_RNEG:
4264 arm_fneg_s (code, dreg, sreg1);
4265 break;
4266 case OP_RCONV_TO_I1:
4267 arm_fcvtzs_sx (code, dreg, sreg1);
4268 arm_sxtbx (code, dreg, dreg);
4269 break;
4270 case OP_RCONV_TO_U1:
4271 arm_fcvtzu_sx (code, dreg, sreg1);
4272 arm_uxtbw (code, dreg, dreg);
4273 break;
4274 case OP_RCONV_TO_I2:
4275 arm_fcvtzs_sx (code, dreg, sreg1);
4276 arm_sxthx (code, dreg, dreg);
4277 break;
4278 case OP_RCONV_TO_U2:
4279 arm_fcvtzu_sx (code, dreg, sreg1);
4280 arm_uxthw (code, dreg, dreg);
4281 break;
4282 case OP_RCONV_TO_I4:
4283 arm_fcvtzs_sx (code, dreg, sreg1);
4284 arm_sxtwx (code, dreg, dreg);
4285 break;
4286 case OP_RCONV_TO_U4:
4287 arm_fcvtzu_sx (code, dreg, sreg1);
4288 break;
4289 case OP_RCONV_TO_I8:
4290 arm_fcvtzs_sx (code, dreg, sreg1);
4291 break;
4292 case OP_RCONV_TO_U8:
4293 arm_fcvtzu_sx (code, dreg, sreg1);
4294 break;
4295 case OP_RCONV_TO_R8:
4296 arm_fcvt_sd (code, dreg, sreg1);
4297 break;
4298 case OP_RCONV_TO_R4:
4299 if (dreg != sreg1)
4300 arm_fmovs (code, dreg, sreg1);
4301 break;
4302 case OP_RCEQ:
4303 case OP_RCLT:
4304 case OP_RCLT_UN:
4305 case OP_RCGT:
4306 case OP_RCGT_UN:
4307 case OP_RCNEQ:
4308 case OP_RCLE:
4309 case OP_RCGE: {
4310 int cond;
4312 cond = opcode_to_armcond (ins->opcode);
4313 arm_fcmps (code, sreg1, sreg2);
4314 arm_cset (code, cond, dreg);
4315 break;
4318 /* CALLS */
4319 case OP_VOIDCALL:
4320 case OP_CALL:
4321 case OP_LCALL:
4322 case OP_FCALL:
4323 case OP_RCALL:
4324 case OP_VCALL2: {
4326 call = (MonoCallInst*)ins;
4327 const MonoJumpInfoTarget patch = mono_call_to_patch (call);
4328 code = emit_call (cfg, code, patch.type, patch.target);
4329 code = emit_move_return_value (cfg, code, ins);
4330 break;
4332 case OP_VOIDCALL_REG:
4333 case OP_CALL_REG:
4334 case OP_LCALL_REG:
4335 case OP_FCALL_REG:
4336 case OP_RCALL_REG:
4337 case OP_VCALL2_REG:
4338 arm_blrx (code, sreg1);
4339 code = emit_move_return_value (cfg, code, ins);
4340 break;
4341 case OP_VOIDCALL_MEMBASE:
4342 case OP_CALL_MEMBASE:
4343 case OP_LCALL_MEMBASE:
4344 case OP_FCALL_MEMBASE:
4345 case OP_RCALL_MEMBASE:
4346 case OP_VCALL2_MEMBASE:
4347 code = emit_ldrx (code, ARMREG_IP0, ins->inst_basereg, ins->inst_offset);
4348 arm_blrx (code, ARMREG_IP0);
4349 code = emit_move_return_value (cfg, code, ins);
4350 break;
4352 case OP_TAILCALL_PARAMETER:
4353 // This opcode helps compute sizes, i.e.
4354 // of the subsequent OP_TAILCALL, but contributes no code.
4355 g_assert (ins->next);
4356 break;
4358 case OP_TAILCALL:
4359 case OP_TAILCALL_MEMBASE:
4360 case OP_TAILCALL_REG: {
4361 int branch_reg = ARMREG_IP0;
4362 guint64 free_reg = 1 << ARMREG_IP1;
4363 call = (MonoCallInst*)ins;
4365 g_assert (!cfg->method->save_lmf);
4367 max_len += call->stack_usage / sizeof (target_mgreg_t) * ins_get_size (OP_TAILCALL_PARAMETER);
4368 while (G_UNLIKELY (offset + max_len > cfg->code_size)) {
4369 cfg->code_size *= 2;
4370 cfg->native_code = (unsigned char *)mono_realloc_native_code (cfg);
4371 code = cfg->native_code + offset;
4372 cfg->stat_code_reallocs++;
4375 switch (ins->opcode) {
4376 case OP_TAILCALL:
4377 free_reg = (1 << ARMREG_IP0) | (1 << ARMREG_IP1);
4378 break;
4380 case OP_TAILCALL_REG:
4381 g_assert (sreg1 != -1);
4382 g_assert (sreg1 != ARMREG_IP0);
4383 g_assert (sreg1 != ARMREG_IP1);
4384 g_assert (sreg1 != ARMREG_LR);
4385 g_assert (sreg1 != ARMREG_SP);
4386 g_assert (sreg1 != ARMREG_R28);
4387 if ((sreg1 << 1) & MONO_ARCH_CALLEE_SAVED_REGS) {
4388 arm_movx (code, branch_reg, sreg1);
4389 } else {
4390 free_reg = (1 << ARMREG_IP0) | (1 << ARMREG_IP1);
4391 branch_reg = sreg1;
4393 break;
4395 case OP_TAILCALL_MEMBASE:
4396 g_assert (ins->inst_basereg != -1);
4397 g_assert (ins->inst_basereg != ARMREG_IP0);
4398 g_assert (ins->inst_basereg != ARMREG_IP1);
4399 g_assert (ins->inst_basereg != ARMREG_LR);
4400 g_assert (ins->inst_basereg != ARMREG_SP);
4401 g_assert (ins->inst_basereg != ARMREG_R28);
4402 code = emit_ldrx (code, branch_reg, ins->inst_basereg, ins->inst_offset);
4403 break;
4405 default:
4406 g_assert_not_reached ();
4409 // Copy stack arguments.
4410 // FIXME a fixed size memcpy is desirable here,
4411 // at least for larger values of stack_usage.
4412 for (int i = 0; i < call->stack_usage; i += sizeof (target_mgreg_t)) {
4413 code = emit_ldrx (code, ARMREG_LR, ARMREG_SP, i);
4414 code = emit_strx (code, ARMREG_LR, ARMREG_R28, i);
4417 /* Restore registers */
4418 code = emit_load_regset (code, MONO_ARCH_CALLEE_SAVED_REGS & cfg->used_int_regs, ARMREG_FP, cfg->arch.saved_gregs_offset);
4420 /* Destroy frame */
4421 code = mono_arm_emit_destroy_frame (code, cfg->stack_offset, free_reg);
4423 switch (ins->opcode) {
4424 case OP_TAILCALL:
4425 if (cfg->compile_aot) {
4426 /* This is not a PLT patch */
4427 code = emit_aotconst (cfg, code, branch_reg, MONO_PATCH_INFO_METHOD_JUMP, call->method);
4428 } else {
4429 mono_add_patch_info_rel (cfg, code - cfg->native_code, MONO_PATCH_INFO_METHOD_JUMP, call->method, MONO_R_ARM64_B);
4430 arm_b (code, code);
4431 cfg->thunk_area += THUNK_SIZE;
4432 break;
4434 // fallthrough
4435 case OP_TAILCALL_MEMBASE:
4436 case OP_TAILCALL_REG:
4437 arm_brx (code, branch_reg);
4438 break;
4440 default:
4441 g_assert_not_reached ();
4444 ins->flags |= MONO_INST_GC_CALLSITE;
4445 ins->backend.pc_offset = code - cfg->native_code;
4446 break;
4448 case OP_ARGLIST:
4449 g_assert (cfg->arch.cinfo);
4450 code = emit_addx_imm (code, ARMREG_IP0, cfg->arch.args_reg, cfg->arch.cinfo->sig_cookie.offset);
4451 arm_strx (code, ARMREG_IP0, sreg1, 0);
4452 break;
4453 case OP_DYN_CALL: {
4454 MonoInst *var = cfg->dyn_call_var;
4455 guint8 *labels [16];
4456 int i;
4459 * sreg1 points to a DynCallArgs structure initialized by mono_arch_start_dyn_call ().
4460 * sreg2 is the function to call.
4463 g_assert (var->opcode == OP_REGOFFSET);
4465 arm_movx (code, ARMREG_LR, sreg1);
4466 arm_movx (code, ARMREG_IP1, sreg2);
4468 /* Save args buffer */
4469 code = emit_strx (code, ARMREG_LR, var->inst_basereg, var->inst_offset);
4471 /* Set fp argument regs */
4472 code = emit_ldrw (code, ARMREG_R0, ARMREG_LR, MONO_STRUCT_OFFSET (DynCallArgs, n_fpargs));
4473 arm_cmpw (code, ARMREG_R0, ARMREG_RZR);
4474 labels [0] = code;
4475 arm_bcc (code, ARMCOND_EQ, 0);
4476 for (i = 0; i < 8; ++i)
4477 code = emit_ldrfpx (code, ARMREG_D0 + i, ARMREG_LR, MONO_STRUCT_OFFSET (DynCallArgs, fpregs) + (i * 8));
4478 arm_patch_rel (labels [0], code, MONO_R_ARM64_BCC);
4480 /* Allocate callee area */
4481 code = emit_ldrx (code, ARMREG_R0, ARMREG_LR, MONO_STRUCT_OFFSET (DynCallArgs, n_stackargs));
4482 arm_lslw (code, ARMREG_R0, ARMREG_R0, 3);
4483 arm_movspx (code, ARMREG_R1, ARMREG_SP);
4484 arm_subx (code, ARMREG_R1, ARMREG_R1, ARMREG_R0);
4485 arm_movspx (code, ARMREG_SP, ARMREG_R1);
4487 /* Set stack args */
4488 /* R1 = limit */
4489 code = emit_ldrx (code, ARMREG_R1, ARMREG_LR, MONO_STRUCT_OFFSET (DynCallArgs, n_stackargs));
4490 /* R2 = pointer into 'regs' */
4491 code = emit_imm (code, ARMREG_R2, MONO_STRUCT_OFFSET (DynCallArgs, regs) + ((PARAM_REGS + 1) * sizeof (target_mgreg_t)));
4492 arm_addx (code, ARMREG_R2, ARMREG_LR, ARMREG_R2);
4493 /* R3 = pointer to stack */
4494 arm_movspx (code, ARMREG_R3, ARMREG_SP);
4495 labels [0] = code;
4496 arm_b (code, code);
4497 labels [1] = code;
4498 code = emit_ldrx (code, ARMREG_R5, ARMREG_R2, 0);
4499 code = emit_strx (code, ARMREG_R5, ARMREG_R3, 0);
4500 code = emit_addx_imm (code, ARMREG_R2, ARMREG_R2, sizeof (target_mgreg_t));
4501 code = emit_addx_imm (code, ARMREG_R3, ARMREG_R3, sizeof (target_mgreg_t));
4502 code = emit_subx_imm (code, ARMREG_R1, ARMREG_R1, 1);
4503 arm_patch_rel (labels [0], code, MONO_R_ARM64_B);
4504 arm_cmpw (code, ARMREG_R1, ARMREG_RZR);
4505 arm_bcc (code, ARMCOND_GT, labels [1]);
4507 /* Set argument registers + r8 */
4508 code = mono_arm_emit_load_regarray (code, 0x1ff, ARMREG_LR, MONO_STRUCT_OFFSET (DynCallArgs, regs));
4510 /* Make the call */
4511 arm_blrx (code, ARMREG_IP1);
4513 /* Save result */
4514 code = emit_ldrx (code, ARMREG_LR, var->inst_basereg, var->inst_offset);
4515 arm_strx (code, ARMREG_R0, ARMREG_LR, MONO_STRUCT_OFFSET (DynCallArgs, res));
4516 arm_strx (code, ARMREG_R1, ARMREG_LR, MONO_STRUCT_OFFSET (DynCallArgs, res2));
4517 /* Save fp result */
4518 code = emit_ldrw (code, ARMREG_R0, ARMREG_LR, MONO_STRUCT_OFFSET (DynCallArgs, n_fpret));
4519 arm_cmpw (code, ARMREG_R0, ARMREG_RZR);
4520 labels [1] = code;
4521 arm_bcc (code, ARMCOND_EQ, 0);
4522 for (i = 0; i < 8; ++i)
4523 code = emit_strfpx (code, ARMREG_D0 + i, ARMREG_LR, MONO_STRUCT_OFFSET (DynCallArgs, fpregs) + (i * 8));
4524 arm_patch_rel (labels [1], code, MONO_R_ARM64_BCC);
4525 break;
4528 case OP_GENERIC_CLASS_INIT: {
4529 int byte_offset;
4530 guint8 *jump;
4532 byte_offset = MONO_STRUCT_OFFSET (MonoVTable, initialized);
4534 /* Load vtable->initialized */
4535 arm_ldrsbx (code, ARMREG_IP0, sreg1, byte_offset);
4536 jump = code;
4537 arm_cbnzx (code, ARMREG_IP0, 0);
4539 /* Slowpath */
4540 g_assert (sreg1 == ARMREG_R0);
4541 code = emit_call (cfg, code, MONO_PATCH_INFO_JIT_ICALL_ID,
4542 GUINT_TO_POINTER (MONO_JIT_ICALL_mono_generic_class_init));
4544 mono_arm_patch (jump, code, MONO_R_ARM64_CBZ);
4545 break;
4548 case OP_CHECK_THIS:
4549 arm_ldrb (code, ARMREG_LR, sreg1, 0);
4550 break;
4551 case OP_NOT_NULL:
4552 case OP_NOT_REACHED:
4553 case OP_DUMMY_USE:
4554 case OP_DUMMY_ICONST:
4555 case OP_DUMMY_I8CONST:
4556 case OP_DUMMY_R8CONST:
4557 case OP_DUMMY_R4CONST:
4558 break;
4559 case OP_IL_SEQ_POINT:
4560 mono_add_seq_point (cfg, bb, ins, code - cfg->native_code);
4561 break;
4563 /* EH */
4564 case OP_COND_EXC_C:
4565 case OP_COND_EXC_IC:
4566 case OP_COND_EXC_OV:
4567 case OP_COND_EXC_IOV:
4568 case OP_COND_EXC_NC:
4569 case OP_COND_EXC_INC:
4570 case OP_COND_EXC_NO:
4571 case OP_COND_EXC_INO:
4572 case OP_COND_EXC_EQ:
4573 case OP_COND_EXC_IEQ:
4574 case OP_COND_EXC_NE_UN:
4575 case OP_COND_EXC_INE_UN:
4576 case OP_COND_EXC_ILT:
4577 case OP_COND_EXC_LT:
4578 case OP_COND_EXC_ILT_UN:
4579 case OP_COND_EXC_LT_UN:
4580 case OP_COND_EXC_IGT:
4581 case OP_COND_EXC_GT:
4582 case OP_COND_EXC_IGT_UN:
4583 case OP_COND_EXC_GT_UN:
4584 case OP_COND_EXC_IGE:
4585 case OP_COND_EXC_GE:
4586 case OP_COND_EXC_IGE_UN:
4587 case OP_COND_EXC_GE_UN:
4588 case OP_COND_EXC_ILE:
4589 case OP_COND_EXC_LE:
4590 case OP_COND_EXC_ILE_UN:
4591 case OP_COND_EXC_LE_UN:
4592 code = emit_cond_exc (cfg, code, ins->opcode, (const char*)ins->inst_p1);
4593 break;
4594 case OP_THROW:
4595 if (sreg1 != ARMREG_R0)
4596 arm_movx (code, ARMREG_R0, sreg1);
4597 code = emit_call (cfg, code, MONO_PATCH_INFO_JIT_ICALL_ID,
4598 GUINT_TO_POINTER (MONO_JIT_ICALL_mono_arch_throw_exception));
4599 break;
4600 case OP_RETHROW:
4601 if (sreg1 != ARMREG_R0)
4602 arm_movx (code, ARMREG_R0, sreg1);
4603 code = emit_call (cfg, code, MONO_PATCH_INFO_JIT_ICALL_ID,
4604 GUINT_TO_POINTER (MONO_JIT_ICALL_mono_arch_rethrow_exception));
4605 break;
4606 case OP_CALL_HANDLER:
4607 mono_add_patch_info_rel (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_target_bb, MONO_R_ARM64_BL);
4608 arm_bl (code, 0);
4609 cfg->thunk_area += THUNK_SIZE;
4610 for (GList *tmp = ins->inst_eh_blocks; tmp != bb->clause_holes; tmp = tmp->prev)
4611 mono_cfg_add_try_hole (cfg, ((MonoLeaveClause *) tmp->data)->clause, code, bb);
4612 break;
4613 case OP_START_HANDLER: {
4614 MonoInst *spvar = mono_find_spvar_for_region (cfg, bb->region);
4616 /* Save caller address */
4617 code = emit_strx (code, ARMREG_LR, spvar->inst_basereg, spvar->inst_offset);
4620 * Reserve a param area, see test_0_finally_param_area ().
4621 * This is needed because the param area is not set up when
4622 * we are called from EH code.
4624 if (cfg->param_area)
4625 code = emit_subx_sp_imm (code, cfg->param_area);
4626 break;
4628 case OP_ENDFINALLY:
4629 case OP_ENDFILTER: {
4630 MonoInst *spvar = mono_find_spvar_for_region (cfg, bb->region);
4632 if (cfg->param_area)
4633 code = emit_addx_sp_imm (code, cfg->param_area);
4635 if (ins->opcode == OP_ENDFILTER && sreg1 != ARMREG_R0)
4636 arm_movx (code, ARMREG_R0, sreg1);
4638 /* Return to either after the branch in OP_CALL_HANDLER, or to the EH code */
4639 code = emit_ldrx (code, ARMREG_LR, spvar->inst_basereg, spvar->inst_offset);
4640 arm_brx (code, ARMREG_LR);
4641 break;
4643 case OP_GET_EX_OBJ:
4644 if (ins->dreg != ARMREG_R0)
4645 arm_movx (code, ins->dreg, ARMREG_R0);
4646 break;
4647 case OP_LIVERANGE_START: {
4648 if (cfg->verbose_level > 1)
4649 printf ("R%d START=0x%x\n", MONO_VARINFO (cfg, ins->inst_c0)->vreg, (int)(code - cfg->native_code));
4650 MONO_VARINFO (cfg, ins->inst_c0)->live_range_start = code - cfg->native_code;
4651 break;
4653 case OP_LIVERANGE_END: {
4654 if (cfg->verbose_level > 1)
4655 printf ("R%d END=0x%x\n", MONO_VARINFO (cfg, ins->inst_c0)->vreg, (int)(code - cfg->native_code));
4656 MONO_VARINFO (cfg, ins->inst_c0)->live_range_end = code - cfg->native_code;
4657 break;
4659 case OP_GC_SAFE_POINT: {
4660 guint8 *buf [1];
4662 arm_ldrx (code, ARMREG_IP1, ins->sreg1, 0);
4663 /* Call it if it is non-null */
4664 buf [0] = code;
4665 arm_cbzx (code, ARMREG_IP1, 0);
4666 code = emit_call (cfg, code, MONO_PATCH_INFO_JIT_ICALL_ID, GUINT_TO_POINTER (MONO_JIT_ICALL_mono_threads_state_poll));
4667 mono_arm_patch (buf [0], code, MONO_R_ARM64_CBZ);
4668 break;
4670 case OP_FILL_PROF_CALL_CTX:
4671 for (int i = 0; i < MONO_MAX_IREGS; i++)
4672 if ((MONO_ARCH_CALLEE_SAVED_REGS & (1 << i)) || i == ARMREG_SP || i == ARMREG_FP)
4673 arm_strx (code, i, ins->sreg1, MONO_STRUCT_OFFSET (MonoContext, regs) + i * sizeof (target_mgreg_t));
4674 break;
4675 default:
4676 g_warning ("unknown opcode %s in %s()\n", mono_inst_name (ins->opcode), __FUNCTION__);
4677 g_assert_not_reached ();
4680 if ((cfg->opt & MONO_OPT_BRANCH) && ((code - cfg->native_code - offset) > max_len)) {
4681 g_warning ("wrong maximal instruction length of instruction %s (expected %d, got %d)",
4682 mono_inst_name (ins->opcode), max_len, code - cfg->native_code - offset);
4683 g_assert_not_reached ();
4686 set_code_cursor (cfg, code);
4689 * If the compiled code size is larger than the bcc displacement (19 bits signed),
4690 * insert branch islands between/inside basic blocks.
4692 if (cfg->arch.cond_branch_islands)
4693 code = emit_branch_island (cfg, code, start_offset);
4696 static guint8*
4697 emit_move_args (MonoCompile *cfg, guint8 *code)
4699 MonoInst *ins;
4700 CallInfo *cinfo;
4701 ArgInfo *ainfo;
4702 int i, part;
4703 MonoMethodSignature *sig = mono_method_signature_internal (cfg->method);
4705 cinfo = cfg->arch.cinfo;
4706 g_assert (cinfo);
4707 for (i = 0; i < cinfo->nargs; ++i) {
4708 ainfo = cinfo->args + i;
4709 ins = cfg->args [i];
4711 if (ins->opcode == OP_REGVAR) {
4712 switch (ainfo->storage) {
4713 case ArgInIReg:
4714 arm_movx (code, ins->dreg, ainfo->reg);
4715 if (i == 0 && sig->hasthis) {
4716 mono_add_var_location (cfg, ins, TRUE, ainfo->reg, 0, 0, code - cfg->native_code);
4717 mono_add_var_location (cfg, ins, TRUE, ins->dreg, 0, code - cfg->native_code, 0);
4719 break;
4720 case ArgOnStack:
4721 switch (ainfo->slot_size) {
4722 case 1:
4723 if (ainfo->sign)
4724 code = emit_ldrsbx (code, ins->dreg, cfg->arch.args_reg, ainfo->offset);
4725 else
4726 code = emit_ldrb (code, ins->dreg, cfg->arch.args_reg, ainfo->offset);
4727 break;
4728 case 2:
4729 if (ainfo->sign)
4730 code = emit_ldrshx (code, ins->dreg, cfg->arch.args_reg, ainfo->offset);
4731 else
4732 code = emit_ldrh (code, ins->dreg, cfg->arch.args_reg, ainfo->offset);
4733 break;
4734 case 4:
4735 if (ainfo->sign)
4736 code = emit_ldrswx (code, ins->dreg, cfg->arch.args_reg, ainfo->offset);
4737 else
4738 code = emit_ldrw (code, ins->dreg, cfg->arch.args_reg, ainfo->offset);
4739 break;
4740 default:
4741 code = emit_ldrx (code, ins->dreg, cfg->arch.args_reg, ainfo->offset);
4742 break;
4744 break;
4745 default:
4746 g_assert_not_reached ();
4747 break;
4749 } else {
4750 if (ainfo->storage != ArgVtypeByRef && ainfo->storage != ArgVtypeByRefOnStack)
4751 g_assert (ins->opcode == OP_REGOFFSET);
4753 switch (ainfo->storage) {
4754 case ArgInIReg:
4755 /* Stack slots for arguments have size 8 */
4756 code = emit_strx (code, ainfo->reg, ins->inst_basereg, ins->inst_offset);
4757 if (i == 0 && sig->hasthis) {
4758 mono_add_var_location (cfg, ins, TRUE, ainfo->reg, 0, 0, code - cfg->native_code);
4759 mono_add_var_location (cfg, ins, FALSE, ins->inst_basereg, ins->inst_offset, code - cfg->native_code, 0);
4761 break;
4762 case ArgInFReg:
4763 code = emit_strfpx (code, ainfo->reg, ins->inst_basereg, ins->inst_offset);
4764 break;
4765 case ArgInFRegR4:
4766 code = emit_strfpw (code, ainfo->reg, ins->inst_basereg, ins->inst_offset);
4767 break;
4768 case ArgOnStack:
4769 case ArgOnStackR4:
4770 case ArgOnStackR8:
4771 case ArgVtypeByRefOnStack:
4772 case ArgVtypeOnStack:
4773 break;
4774 case ArgVtypeByRef: {
4775 MonoInst *addr_arg = ins->inst_left;
4777 if (ainfo->gsharedvt) {
4778 g_assert (ins->opcode == OP_GSHAREDVT_ARG_REGOFFSET);
4779 arm_strx (code, ainfo->reg, ins->inst_basereg, ins->inst_offset);
4780 } else {
4781 g_assert (ins->opcode == OP_VTARG_ADDR);
4782 g_assert (addr_arg->opcode == OP_REGOFFSET);
4783 arm_strx (code, ainfo->reg, addr_arg->inst_basereg, addr_arg->inst_offset);
4785 break;
4787 case ArgVtypeInIRegs:
4788 for (part = 0; part < ainfo->nregs; part ++) {
4789 code = emit_strx (code, ainfo->reg + part, ins->inst_basereg, ins->inst_offset + (part * 8));
4791 break;
4792 case ArgHFA:
4793 for (part = 0; part < ainfo->nregs; part ++) {
4794 if (ainfo->esize == 4)
4795 code = emit_strfpw (code, ainfo->reg + part, ins->inst_basereg, ins->inst_offset + ainfo->foffsets [part]);
4796 else
4797 code = emit_strfpx (code, ainfo->reg + part, ins->inst_basereg, ins->inst_offset + ainfo->foffsets [part]);
4799 break;
4800 default:
4801 g_assert_not_reached ();
4802 break;
4807 return code;
4811 * emit_store_regarray:
4813 * Emit code to store the registers in REGS into the appropriate elements of
4814 * the register array at BASEREG+OFFSET.
4816 static __attribute__ ((__warn_unused_result__)) guint8*
4817 emit_store_regarray (guint8 *code, guint64 regs, int basereg, int offset)
4819 int i;
4821 for (i = 0; i < 32; ++i) {
4822 if (regs & (1 << i)) {
4823 if (i + 1 < 32 && (regs & (1 << (i + 1))) && (i + 1 != ARMREG_SP)) {
4824 arm_stpx (code, i, i + 1, basereg, offset + (i * 8));
4825 i++;
4826 } else if (i == ARMREG_SP) {
4827 arm_movspx (code, ARMREG_IP1, ARMREG_SP);
4828 arm_strx (code, ARMREG_IP1, basereg, offset + (i * 8));
4829 } else {
4830 arm_strx (code, i, basereg, offset + (i * 8));
4834 return code;
4838 * emit_load_regarray:
4840 * Emit code to load the registers in REGS from the appropriate elements of
4841 * the register array at BASEREG+OFFSET.
4843 static __attribute__ ((__warn_unused_result__)) guint8*
4844 emit_load_regarray (guint8 *code, guint64 regs, int basereg, int offset)
4846 int i;
4848 for (i = 0; i < 32; ++i) {
4849 if (regs & (1 << i)) {
4850 if ((regs & (1 << (i + 1))) && (i + 1 != ARMREG_SP)) {
4851 if (offset + (i * 8) < 500)
4852 arm_ldpx (code, i, i + 1, basereg, offset + (i * 8));
4853 else {
4854 code = emit_ldrx (code, i, basereg, offset + (i * 8));
4855 code = emit_ldrx (code, i + 1, basereg, offset + ((i + 1) * 8));
4857 i++;
4858 } else if (i == ARMREG_SP) {
4859 g_assert_not_reached ();
4860 } else {
4861 code = emit_ldrx (code, i, basereg, offset + (i * 8));
4865 return code;
4869 * emit_store_regset:
4871 * Emit code to store the registers in REGS into consecutive memory locations starting
4872 * at BASEREG+OFFSET.
4874 static __attribute__ ((__warn_unused_result__)) guint8*
4875 emit_store_regset (guint8 *code, guint64 regs, int basereg, int offset)
4877 int i, pos;
4879 pos = 0;
4880 for (i = 0; i < 32; ++i) {
4881 if (regs & (1 << i)) {
4882 if ((regs & (1 << (i + 1))) && (i + 1 != ARMREG_SP)) {
4883 arm_stpx (code, i, i + 1, basereg, offset + (pos * 8));
4884 i++;
4885 pos++;
4886 } else if (i == ARMREG_SP) {
4887 arm_movspx (code, ARMREG_IP1, ARMREG_SP);
4888 arm_strx (code, ARMREG_IP1, basereg, offset + (pos * 8));
4889 } else {
4890 arm_strx (code, i, basereg, offset + (pos * 8));
4892 pos++;
4895 return code;
4899 * emit_load_regset:
4901 * Emit code to load the registers in REGS from consecutive memory locations starting
4902 * at BASEREG+OFFSET.
4904 static __attribute__ ((__warn_unused_result__)) guint8*
4905 emit_load_regset (guint8 *code, guint64 regs, int basereg, int offset)
4907 int i, pos;
4909 pos = 0;
4910 for (i = 0; i < 32; ++i) {
4911 if (regs & (1 << i)) {
4912 if ((regs & (1 << (i + 1))) && (i + 1 != ARMREG_SP)) {
4913 arm_ldpx (code, i, i + 1, basereg, offset + (pos * 8));
4914 i++;
4915 pos++;
4916 } else if (i == ARMREG_SP) {
4917 g_assert_not_reached ();
4918 } else {
4919 arm_ldrx (code, i, basereg, offset + (pos * 8));
4921 pos++;
4924 return code;
4927 __attribute__ ((__warn_unused_result__)) guint8*
4928 mono_arm_emit_load_regarray (guint8 *code, guint64 regs, int basereg, int offset)
4930 return emit_load_regarray (code, regs, basereg, offset);
4933 __attribute__ ((__warn_unused_result__)) guint8*
4934 mono_arm_emit_store_regarray (guint8 *code, guint64 regs, int basereg, int offset)
4936 return emit_store_regarray (code, regs, basereg, offset);
4939 __attribute__ ((__warn_unused_result__)) guint8*
4940 mono_arm_emit_store_regset (guint8 *code, guint64 regs, int basereg, int offset)
4942 return emit_store_regset (code, regs, basereg, offset);
4945 /* Same as emit_store_regset, but emit unwind info too */
4946 /* CFA_OFFSET is the offset between the CFA and basereg */
4947 static __attribute__ ((__warn_unused_result__)) guint8*
4948 emit_store_regset_cfa (MonoCompile *cfg, guint8 *code, guint64 regs, int basereg, int offset, int cfa_offset, guint64 no_cfa_regset)
4950 int i, j, pos, nregs;
4951 guint32 cfa_regset = regs & ~no_cfa_regset;
4953 pos = 0;
4954 for (i = 0; i < 32; ++i) {
4955 nregs = 1;
4956 if (regs & (1 << i)) {
4957 if ((regs & (1 << (i + 1))) && (i + 1 != ARMREG_SP)) {
4958 if (offset < 256) {
4959 arm_stpx (code, i, i + 1, basereg, offset + (pos * 8));
4960 } else {
4961 code = emit_strx (code, i, basereg, offset + (pos * 8));
4962 code = emit_strx (code, i + 1, basereg, offset + (pos * 8) + 8);
4964 nregs = 2;
4965 } else if (i == ARMREG_SP) {
4966 arm_movspx (code, ARMREG_IP1, ARMREG_SP);
4967 code = emit_strx (code, ARMREG_IP1, basereg, offset + (pos * 8));
4968 } else {
4969 code = emit_strx (code, i, basereg, offset + (pos * 8));
4972 for (j = 0; j < nregs; ++j) {
4973 if (cfa_regset & (1 << (i + j)))
4974 mono_emit_unwind_op_offset (cfg, code, i + j, (- cfa_offset) + offset + ((pos + j) * 8));
4977 i += nregs - 1;
4978 pos += nregs;
4981 return code;
4985 * emit_setup_lmf:
4987 * Emit code to initialize an LMF structure at LMF_OFFSET.
4988 * Clobbers ip0/ip1.
4990 static guint8*
4991 emit_setup_lmf (MonoCompile *cfg, guint8 *code, gint32 lmf_offset, int cfa_offset)
4994 * The LMF should contain all the state required to be able to reconstruct the machine state
4995 * at the current point of execution. Since the LMF is only read during EH, only callee
4996 * saved etc. registers need to be saved.
4997 * FIXME: Save callee saved fp regs, JITted code doesn't use them, but native code does, and they
4998 * need to be restored during EH.
5001 /* pc */
5002 arm_adrx (code, ARMREG_LR, code);
5003 code = emit_strx (code, ARMREG_LR, ARMREG_FP, lmf_offset + MONO_STRUCT_OFFSET (MonoLMF, pc));
5004 /* gregs + fp + sp */
5005 /* Don't emit unwind info for sp/fp, they are already handled in the prolog */
5006 code = emit_store_regset_cfa (cfg, code, MONO_ARCH_LMF_REGS, ARMREG_FP, lmf_offset + MONO_STRUCT_OFFSET (MonoLMF, gregs), cfa_offset, (1 << ARMREG_FP) | (1 << ARMREG_SP));
5008 return code;
5011 guint8 *
5012 mono_arch_emit_prolog (MonoCompile *cfg)
5014 MonoMethod *method = cfg->method;
5015 MonoMethodSignature *sig;
5016 MonoBasicBlock *bb;
5017 guint8 *code;
5018 int cfa_offset, max_offset;
5020 sig = mono_method_signature_internal (method);
5021 cfg->code_size = 256 + sig->param_count * 64;
5022 code = cfg->native_code = g_malloc (cfg->code_size);
5024 /* This can be unaligned */
5025 cfg->stack_offset = ALIGN_TO (cfg->stack_offset, MONO_ARCH_FRAME_ALIGNMENT);
5028 * - Setup frame
5030 cfa_offset = 0;
5031 mono_emit_unwind_op_def_cfa (cfg, code, ARMREG_SP, 0);
5033 /* Setup frame */
5034 if (arm_is_ldpx_imm (-cfg->stack_offset)) {
5035 arm_stpx_pre (code, ARMREG_FP, ARMREG_LR, ARMREG_SP, -cfg->stack_offset);
5036 } else {
5037 /* sp -= cfg->stack_offset */
5038 /* This clobbers ip0/ip1 */
5039 code = emit_subx_sp_imm (code, cfg->stack_offset);
5040 arm_stpx (code, ARMREG_FP, ARMREG_LR, ARMREG_SP, 0);
5042 cfa_offset += cfg->stack_offset;
5043 mono_emit_unwind_op_def_cfa_offset (cfg, code, cfa_offset);
5044 mono_emit_unwind_op_offset (cfg, code, ARMREG_FP, (- cfa_offset) + 0);
5045 mono_emit_unwind_op_offset (cfg, code, ARMREG_LR, (- cfa_offset) + 8);
5046 arm_movspx (code, ARMREG_FP, ARMREG_SP);
5047 mono_emit_unwind_op_def_cfa_reg (cfg, code, ARMREG_FP);
5048 if (cfg->param_area) {
5049 /* The param area is below the frame pointer */
5050 code = emit_subx_sp_imm (code, cfg->param_area);
5053 if (cfg->method->save_lmf) {
5054 code = emit_setup_lmf (cfg, code, cfg->lmf_var->inst_offset, cfa_offset);
5055 } else {
5056 /* Save gregs */
5057 code = emit_store_regset_cfa (cfg, code, MONO_ARCH_CALLEE_SAVED_REGS & cfg->used_int_regs, ARMREG_FP, cfg->arch.saved_gregs_offset, cfa_offset, 0);
5060 /* Setup args reg */
5061 if (cfg->arch.args_reg) {
5062 /* The register was already saved above */
5063 code = emit_addx_imm (code, cfg->arch.args_reg, ARMREG_FP, cfg->stack_offset);
5066 /* Save return area addr received in R8 */
5067 if (cfg->vret_addr) {
5068 MonoInst *ins = cfg->vret_addr;
5070 g_assert (ins->opcode == OP_REGOFFSET);
5071 code = emit_strx (code, ARMREG_R8, ins->inst_basereg, ins->inst_offset);
5074 /* Save mrgctx received in MONO_ARCH_RGCTX_REG */
5075 if (cfg->rgctx_var) {
5076 MonoInst *ins = cfg->rgctx_var;
5078 g_assert (ins->opcode == OP_REGOFFSET);
5080 code = emit_strx (code, MONO_ARCH_RGCTX_REG, ins->inst_basereg, ins->inst_offset);
5082 mono_add_var_location (cfg, cfg->rgctx_var, TRUE, MONO_ARCH_RGCTX_REG, 0, 0, code - cfg->native_code);
5083 mono_add_var_location (cfg, cfg->rgctx_var, FALSE, ins->inst_basereg, ins->inst_offset, code - cfg->native_code, 0);
5087 * Move arguments to their registers/stack locations.
5089 code = emit_move_args (cfg, code);
5091 /* Initialize seq_point_info_var */
5092 if (cfg->arch.seq_point_info_var) {
5093 MonoInst *ins = cfg->arch.seq_point_info_var;
5095 /* Initialize the variable from a GOT slot */
5096 code = emit_aotconst (cfg, code, ARMREG_IP0, MONO_PATCH_INFO_SEQ_POINT_INFO, cfg->method);
5097 g_assert (ins->opcode == OP_REGOFFSET);
5098 code = emit_strx (code, ARMREG_IP0, ins->inst_basereg, ins->inst_offset);
5100 /* Initialize ss_tramp_var */
5101 ins = cfg->arch.ss_tramp_var;
5102 g_assert (ins->opcode == OP_REGOFFSET);
5104 code = emit_ldrx (code, ARMREG_IP1, ARMREG_IP0, MONO_STRUCT_OFFSET (SeqPointInfo, ss_tramp_addr));
5105 code = emit_strx (code, ARMREG_IP1, ins->inst_basereg, ins->inst_offset);
5106 } else {
5107 MonoInst *ins;
5109 if (cfg->arch.ss_tramp_var) {
5110 /* Initialize ss_tramp_var */
5111 ins = cfg->arch.ss_tramp_var;
5112 g_assert (ins->opcode == OP_REGOFFSET);
5114 code = emit_imm64 (code, ARMREG_IP0, (guint64)&ss_trampoline);
5115 code = emit_strx (code, ARMREG_IP0, ins->inst_basereg, ins->inst_offset);
5118 if (cfg->arch.bp_tramp_var) {
5119 /* Initialize bp_tramp_var */
5120 ins = cfg->arch.bp_tramp_var;
5121 g_assert (ins->opcode == OP_REGOFFSET);
5123 code = emit_imm64 (code, ARMREG_IP0, (guint64)bp_trampoline);
5124 code = emit_strx (code, ARMREG_IP0, ins->inst_basereg, ins->inst_offset);
5128 max_offset = 0;
5129 if (cfg->opt & MONO_OPT_BRANCH) {
5130 for (bb = cfg->bb_entry; bb; bb = bb->next_bb) {
5131 MonoInst *ins;
5132 bb->max_offset = max_offset;
5134 MONO_BB_FOR_EACH_INS (bb, ins) {
5135 max_offset += ins_get_size (ins->opcode);
5139 if (max_offset > 0x3ffff * 4)
5140 cfg->arch.cond_branch_islands = TRUE;
5142 return code;
5145 void
5146 mono_arch_emit_epilog (MonoCompile *cfg)
5148 CallInfo *cinfo;
5149 int max_epilog_size;
5150 guint8 *code;
5151 int i;
5153 max_epilog_size = 16 + 20*4;
5154 code = realloc_code (cfg, max_epilog_size);
5156 if (cfg->method->save_lmf) {
5157 code = mono_arm_emit_load_regarray (code, MONO_ARCH_CALLEE_SAVED_REGS & cfg->used_int_regs, ARMREG_FP, cfg->lmf_var->inst_offset + MONO_STRUCT_OFFSET (MonoLMF, gregs) - (MONO_ARCH_FIRST_LMF_REG * 8));
5158 } else {
5159 /* Restore gregs */
5160 code = emit_load_regset (code, MONO_ARCH_CALLEE_SAVED_REGS & cfg->used_int_regs, ARMREG_FP, cfg->arch.saved_gregs_offset);
5163 /* Load returned vtypes into registers if needed */
5164 cinfo = cfg->arch.cinfo;
5165 switch (cinfo->ret.storage) {
5166 case ArgVtypeInIRegs: {
5167 MonoInst *ins = cfg->ret;
5169 for (i = 0; i < cinfo->ret.nregs; ++i)
5170 code = emit_ldrx (code, cinfo->ret.reg + i, ins->inst_basereg, ins->inst_offset + (i * 8));
5171 break;
5173 case ArgHFA: {
5174 MonoInst *ins = cfg->ret;
5176 for (i = 0; i < cinfo->ret.nregs; ++i) {
5177 if (cinfo->ret.esize == 4)
5178 code = emit_ldrfpw (code, cinfo->ret.reg + i, ins->inst_basereg, ins->inst_offset + cinfo->ret.foffsets [i]);
5179 else
5180 code = emit_ldrfpx (code, cinfo->ret.reg + i, ins->inst_basereg, ins->inst_offset + cinfo->ret.foffsets [i]);
5182 break;
5184 default:
5185 break;
5188 /* Destroy frame */
5189 code = mono_arm_emit_destroy_frame (code, cfg->stack_offset, (1 << ARMREG_IP0) | (1 << ARMREG_IP1));
5191 arm_retx (code, ARMREG_LR);
5193 g_assert (code - (cfg->native_code + cfg->code_len) < max_epilog_size);
5195 set_code_cursor (cfg, code);
5198 void
5199 mono_arch_emit_exceptions (MonoCompile *cfg)
5201 MonoJumpInfo *ji;
5202 MonoClass *exc_class;
5203 guint8 *code, *ip;
5204 guint8* exc_throw_pos [MONO_EXC_INTRINS_NUM];
5205 guint8 exc_throw_found [MONO_EXC_INTRINS_NUM];
5206 int i, id, size = 0;
5208 for (i = 0; i < MONO_EXC_INTRINS_NUM; i++) {
5209 exc_throw_pos [i] = NULL;
5210 exc_throw_found [i] = 0;
5213 for (ji = cfg->patch_info; ji; ji = ji->next) {
5214 if (ji->type == MONO_PATCH_INFO_EXC) {
5215 i = mini_exception_id_by_name ((const char*)ji->data.target);
5216 if (!exc_throw_found [i]) {
5217 size += 32;
5218 exc_throw_found [i] = TRUE;
5223 code = realloc_code (cfg, size);
5225 /* Emit code to raise corlib exceptions */
5226 for (ji = cfg->patch_info; ji; ji = ji->next) {
5227 if (ji->type != MONO_PATCH_INFO_EXC)
5228 continue;
5230 ip = cfg->native_code + ji->ip.i;
5232 id = mini_exception_id_by_name ((const char*)ji->data.target);
5234 if (exc_throw_pos [id]) {
5235 /* ip points to the bcc () in OP_COND_EXC_... */
5236 arm_patch_rel (ip, exc_throw_pos [id], ji->relocation);
5237 ji->type = MONO_PATCH_INFO_NONE;
5238 continue;
5241 exc_throw_pos [id] = code;
5242 arm_patch_rel (ip, code, ji->relocation);
5244 /* We are being branched to from the code generated by emit_cond_exc (), the pc is in ip1 */
5246 /* r0 = type token */
5247 exc_class = mono_class_load_from_name (mono_defaults.corlib, "System", ji->data.name);
5248 code = emit_imm (code, ARMREG_R0, m_class_get_type_token (exc_class) - MONO_TOKEN_TYPE_DEF);
5249 /* r1 = throw ip */
5250 arm_movx (code, ARMREG_R1, ARMREG_IP1);
5251 /* Branch to the corlib exception throwing trampoline */
5252 ji->ip.i = code - cfg->native_code;
5253 ji->type = MONO_PATCH_INFO_JIT_ICALL_ID;
5254 ji->data.jit_icall_id = MONO_JIT_ICALL_mono_arch_throw_corlib_exception;
5255 ji->relocation = MONO_R_ARM64_BL;
5256 arm_bl (code, 0);
5257 cfg->thunk_area += THUNK_SIZE;
5258 set_code_cursor (cfg, code);
5261 set_code_cursor (cfg, code);
5264 MonoInst*
5265 mono_arch_emit_inst_for_method (MonoCompile *cfg, MonoMethod *cmethod, MonoMethodSignature *fsig, MonoInst **args)
5267 return NULL;
5270 guint32
5271 mono_arch_get_patch_offset (guint8 *code)
5273 return 0;
5276 gpointer
5277 mono_arch_build_imt_trampoline (MonoVTable *vtable, MonoDomain *domain, MonoIMTCheckItem **imt_entries, int count,
5278 gpointer fail_tramp)
5280 int i, buf_len, imt_reg;
5281 guint8 *buf, *code;
5283 #if DEBUG_IMT
5284 printf ("building IMT trampoline for class %s %s entries %d code size %d code at %p end %p vtable %p\n", m_class_get_name_space (vtable->klass), m_class_get_name (vtable->klass), count, size, start, ((guint8*)start) + size, vtable);
5285 for (i = 0; i < count; ++i) {
5286 MonoIMTCheckItem *item = imt_entries [i];
5287 printf ("method %d (%p) %s vtable slot %p is_equals %d chunk size %d\n", i, item->key, item->key->name, &vtable->vtable [item->value.vtable_slot], item->is_equals, item->chunk_size);
5289 #endif
5291 buf_len = 0;
5292 for (i = 0; i < count; ++i) {
5293 MonoIMTCheckItem *item = imt_entries [i];
5294 if (item->is_equals) {
5295 gboolean fail_case = !item->check_target_idx && fail_tramp;
5297 if (item->check_target_idx || fail_case) {
5298 if (!item->compare_done || fail_case) {
5299 buf_len += 4 * 4 + 4;
5301 buf_len += 4;
5302 if (item->has_target_code) {
5303 buf_len += 5 * 4;
5304 } else {
5305 buf_len += 6 * 4;
5307 if (fail_case) {
5308 buf_len += 5 * 4;
5310 } else {
5311 buf_len += 6 * 4;
5313 } else {
5314 buf_len += 6 * 4;
5318 if (fail_tramp)
5319 buf = (guint8*)mono_method_alloc_generic_virtual_trampoline (domain, buf_len);
5320 else
5321 buf = mono_domain_code_reserve (domain, buf_len);
5322 code = buf;
5325 * We are called by JITted code, which passes in the IMT argument in
5326 * MONO_ARCH_RGCTX_REG (r27). We need to preserve all caller saved regs
5327 * except ip0/ip1.
5329 imt_reg = MONO_ARCH_RGCTX_REG;
5330 for (i = 0; i < count; ++i) {
5331 MonoIMTCheckItem *item = imt_entries [i];
5333 item->code_target = code;
5335 if (item->is_equals) {
5337 * Check the imt argument against item->key, if equals, jump to either
5338 * item->value.target_code or to vtable [item->value.vtable_slot].
5339 * If fail_tramp is set, jump to it if not-equals.
5341 gboolean fail_case = !item->check_target_idx && fail_tramp;
5343 if (item->check_target_idx || fail_case) {
5344 /* Compare imt_reg with item->key */
5345 if (!item->compare_done || fail_case) {
5346 // FIXME: Optimize this
5347 code = emit_imm64 (code, ARMREG_IP0, (guint64)item->key);
5348 arm_cmpx (code, imt_reg, ARMREG_IP0);
5350 item->jmp_code = code;
5351 arm_bcc (code, ARMCOND_NE, 0);
5352 /* Jump to target if equals */
5353 if (item->has_target_code) {
5354 code = emit_imm64 (code, ARMREG_IP0, (guint64)item->value.target_code);
5355 arm_brx (code, ARMREG_IP0);
5356 } else {
5357 guint64 imm = (guint64)&(vtable->vtable [item->value.vtable_slot]);
5359 code = emit_imm64 (code, ARMREG_IP0, imm);
5360 arm_ldrx (code, ARMREG_IP0, ARMREG_IP0, 0);
5361 arm_brx (code, ARMREG_IP0);
5364 if (fail_case) {
5365 arm_patch_rel (item->jmp_code, code, MONO_R_ARM64_BCC);
5366 item->jmp_code = NULL;
5367 code = emit_imm64 (code, ARMREG_IP0, (guint64)fail_tramp);
5368 arm_brx (code, ARMREG_IP0);
5370 } else {
5371 guint64 imm = (guint64)&(vtable->vtable [item->value.vtable_slot]);
5373 code = emit_imm64 (code, ARMREG_IP0, imm);
5374 arm_ldrx (code, ARMREG_IP0, ARMREG_IP0, 0);
5375 arm_brx (code, ARMREG_IP0);
5377 } else {
5378 code = emit_imm64 (code, ARMREG_IP0, (guint64)item->key);
5379 arm_cmpx (code, imt_reg, ARMREG_IP0);
5380 item->jmp_code = code;
5381 arm_bcc (code, ARMCOND_HS, 0);
5384 /* Patch the branches */
5385 for (i = 0; i < count; ++i) {
5386 MonoIMTCheckItem *item = imt_entries [i];
5387 if (item->jmp_code && item->check_target_idx)
5388 arm_patch_rel (item->jmp_code, imt_entries [item->check_target_idx]->code_target, MONO_R_ARM64_BCC);
5391 g_assert ((code - buf) < buf_len);
5393 mono_arch_flush_icache (buf, code - buf);
5394 MONO_PROFILER_RAISE (jit_code_buffer, (buf, code - buf, MONO_PROFILER_CODE_BUFFER_IMT_TRAMPOLINE, NULL));
5396 return buf;
5399 GSList *
5400 mono_arch_get_trampolines (gboolean aot)
5402 return mono_arm_get_exception_trampolines (aot);
5405 #else /* DISABLE_JIT */
5407 gpointer
5408 mono_arch_build_imt_trampoline (MonoVTable *vtable, MonoDomain *domain, MonoIMTCheckItem **imt_entries, int count,
5409 gpointer fail_tramp)
5411 g_assert_not_reached ();
5412 return NULL;
5415 #endif /* !DISABLE_JIT */
5417 #ifdef MONO_ARCH_SOFT_DEBUG_SUPPORTED
5419 void
5420 mono_arch_set_breakpoint (MonoJitInfo *ji, guint8 *ip)
5422 guint8 *code = ip;
5423 guint32 native_offset = ip - (guint8*)ji->code_start;
5425 if (ji->from_aot) {
5426 SeqPointInfo *info = mono_arch_get_seq_point_info (mono_domain_get (), (guint8*)ji->code_start);
5428 g_assert (native_offset % 4 == 0);
5429 g_assert (info->bp_addrs [native_offset / 4] == 0);
5430 info->bp_addrs [native_offset / 4] = (guint8*)mini_get_breakpoint_trampoline ();
5431 } else {
5432 /* ip points to an ldrx */
5433 code += 4;
5434 arm_blrx (code, ARMREG_IP0);
5435 mono_arch_flush_icache (ip, code - ip);
5439 void
5440 mono_arch_clear_breakpoint (MonoJitInfo *ji, guint8 *ip)
5442 guint8 *code = ip;
5444 if (ji->from_aot) {
5445 guint32 native_offset = ip - (guint8*)ji->code_start;
5446 SeqPointInfo *info = mono_arch_get_seq_point_info (mono_domain_get (), (guint8*)ji->code_start);
5448 g_assert (native_offset % 4 == 0);
5449 info->bp_addrs [native_offset / 4] = NULL;
5450 } else {
5451 /* ip points to an ldrx */
5452 code += 4;
5453 arm_nop (code);
5454 mono_arch_flush_icache (ip, code - ip);
5458 void
5459 mono_arch_start_single_stepping (void)
5461 ss_trampoline = mini_get_single_step_trampoline ();
5464 void
5465 mono_arch_stop_single_stepping (void)
5467 ss_trampoline = NULL;
5470 gboolean
5471 mono_arch_is_single_step_event (void *info, void *sigctx)
5473 /* We use soft breakpoints on arm64 */
5474 return FALSE;
5477 gboolean
5478 mono_arch_is_breakpoint_event (void *info, void *sigctx)
5480 /* We use soft breakpoints on arm64 */
5481 return FALSE;
5484 void
5485 mono_arch_skip_breakpoint (MonoContext *ctx, MonoJitInfo *ji)
5487 g_assert_not_reached ();
5490 void
5491 mono_arch_skip_single_step (MonoContext *ctx)
5493 g_assert_not_reached ();
5496 SeqPointInfo*
5497 mono_arch_get_seq_point_info (MonoDomain *domain, guint8 *code)
5499 SeqPointInfo *info;
5500 MonoJitInfo *ji;
5502 // FIXME: Add a free function
5504 mono_domain_lock (domain);
5505 info = (SeqPointInfo*)g_hash_table_lookup (domain_jit_info (domain)->arch_seq_points,
5506 code);
5507 mono_domain_unlock (domain);
5509 if (!info) {
5510 ji = mono_jit_info_table_find (domain, code);
5511 g_assert (ji);
5513 info = g_malloc0 (sizeof (SeqPointInfo) + (ji->code_size / 4) * sizeof(guint8*));
5515 info->ss_tramp_addr = &ss_trampoline;
5517 mono_domain_lock (domain);
5518 g_hash_table_insert (domain_jit_info (domain)->arch_seq_points,
5519 code, info);
5520 mono_domain_unlock (domain);
5523 return info;
5526 #endif /* MONO_ARCH_SOFT_DEBUG_SUPPORTED */
5528 gboolean
5529 mono_arch_opcode_supported (int opcode)
5531 switch (opcode) {
5532 case OP_ATOMIC_ADD_I4:
5533 case OP_ATOMIC_ADD_I8:
5534 case OP_ATOMIC_EXCHANGE_I4:
5535 case OP_ATOMIC_EXCHANGE_I8:
5536 case OP_ATOMIC_CAS_I4:
5537 case OP_ATOMIC_CAS_I8:
5538 case OP_ATOMIC_LOAD_I1:
5539 case OP_ATOMIC_LOAD_I2:
5540 case OP_ATOMIC_LOAD_I4:
5541 case OP_ATOMIC_LOAD_I8:
5542 case OP_ATOMIC_LOAD_U1:
5543 case OP_ATOMIC_LOAD_U2:
5544 case OP_ATOMIC_LOAD_U4:
5545 case OP_ATOMIC_LOAD_U8:
5546 case OP_ATOMIC_LOAD_R4:
5547 case OP_ATOMIC_LOAD_R8:
5548 case OP_ATOMIC_STORE_I1:
5549 case OP_ATOMIC_STORE_I2:
5550 case OP_ATOMIC_STORE_I4:
5551 case OP_ATOMIC_STORE_I8:
5552 case OP_ATOMIC_STORE_U1:
5553 case OP_ATOMIC_STORE_U2:
5554 case OP_ATOMIC_STORE_U4:
5555 case OP_ATOMIC_STORE_U8:
5556 case OP_ATOMIC_STORE_R4:
5557 case OP_ATOMIC_STORE_R8:
5558 return TRUE;
5559 default:
5560 return FALSE;
5564 CallInfo*
5565 mono_arch_get_call_info (MonoMemPool *mp, MonoMethodSignature *sig)
5567 return get_call_info (mp, sig);
5570 gpointer
5571 mono_arch_load_function (MonoJitICallId jit_icall_id)
5573 gpointer target = NULL;
5574 switch (jit_icall_id) {
5575 #undef MONO_AOT_ICALL
5576 #define MONO_AOT_ICALL(x) case MONO_JIT_ICALL_ ## x: target = (gpointer)x; break;
5577 MONO_AOT_ICALL (mono_arm_resume_unwind)
5578 MONO_AOT_ICALL (mono_arm_start_gsharedvt_call)
5579 MONO_AOT_ICALL (mono_arm_throw_exception)
5581 return target;