Revert "[mono][debugger] First PR to implement iCorDebug on mono (#20757)"
[mono-project.git] / mono / mini / mini-arm64.c
blob7592743e8bb6a7844d529a9b111bc9ef376100e5
1 /**
2 * \file
3 * ARM64 backend for the Mono code generator
5 * Copyright 2013 Xamarin, Inc (http://www.xamarin.com)
6 *
7 * Based on mini-arm.c:
9 * Authors:
10 * Paolo Molaro (lupus@ximian.com)
11 * Dietmar Maurer (dietmar@ximian.com)
13 * (C) 2003 Ximian, Inc.
14 * Copyright 2003-2011 Novell, Inc (http://www.novell.com)
15 * Copyright 2011 Xamarin, Inc (http://www.xamarin.com)
16 * Licensed under the MIT license. See LICENSE file in the project root for full license information.
19 #include "mini.h"
20 #include "cpu-arm64.h"
21 #include "ir-emit.h"
22 #include "aot-runtime.h"
23 #include "mini-runtime.h"
25 #include <mono/arch/arm64/arm64-codegen.h>
26 #include <mono/utils/mono-mmap.h>
27 #include <mono/utils/mono-memory-model.h>
28 #include <mono/metadata/abi-details.h>
30 #include "interp/interp.h"
33 * Documentation:
35 * - ARM(R) Architecture Reference Manual, ARMv8, for ARMv8-A architecture profile (DDI0487A_a_armv8_arm.pdf)
36 * - Procedure Call Standard for the ARM 64-bit Architecture (AArch64) (IHI0055B_aapcs64.pdf)
37 * - ELF for the ARM 64-bit Architecture (IHI0056B_aaelf64.pdf)
39 * Register usage:
40 * - ip0/ip1/lr are used as temporary registers
41 * - r27 is used as the rgctx/imt register
42 * - r28 is used to access arguments passed on the stack
43 * - d15/d16 are used as fp temporary registers
46 #define FP_TEMP_REG ARMREG_D16
47 #define FP_TEMP_REG2 ARMREG_D17
49 #define THUNK_SIZE (4 * 4)
51 /* The single step trampoline */
52 static gpointer ss_trampoline;
54 /* The breakpoint trampoline */
55 static gpointer bp_trampoline;
57 static gboolean ios_abi;
58 static gboolean enable_ptrauth;
60 static __attribute__ ((__warn_unused_result__)) guint8* emit_load_regset (guint8 *code, guint64 regs, int basereg, int offset);
61 static guint8* emit_brx (guint8 *code, int reg);
62 static guint8* emit_blrx (guint8 *code, int reg);
64 const char*
65 mono_arch_regname (int reg)
67 static const char * rnames[] = {
68 "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9",
69 "r10", "r11", "r12", "r13", "r14", "r15", "r16", "r17", "r18", "r19",
70 "r20", "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "fp",
71 "lr", "sp"
73 if (reg >= 0 && reg < 32)
74 return rnames [reg];
75 return "unknown";
78 const char*
79 mono_arch_fregname (int reg)
81 static const char * rnames[] = {
82 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7", "d8", "d9",
83 "d10", "d11", "d12", "d13", "d14", "d15", "d16", "d17", "d18", "d19",
84 "d20", "d21", "d22", "d23", "d24", "d25", "d26", "d27", "d28", "d29",
85 "d30", "d31"
87 if (reg >= 0 && reg < 32)
88 return rnames [reg];
89 return "unknown fp";
92 const char *
93 mono_arch_xregname (int reg)
95 static const char * rnames[] = {
96 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9",
97 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", "v18", "v19",
98 "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29",
99 "v30", "v31"
101 if (reg >= 0 && reg < 32)
102 return rnames [reg];
103 return "unknown";
107 mono_arch_get_argument_info (MonoMethodSignature *csig, int param_count, MonoJitArgumentInfo *arg_info)
109 NOT_IMPLEMENTED;
110 return 0;
113 #define MAX_ARCH_DELEGATE_PARAMS 7
115 static gpointer
116 get_delegate_invoke_impl (gboolean has_target, gboolean param_count, guint32 *code_size)
118 guint8 *code, *start;
120 MINI_BEGIN_CODEGEN ();
122 if (has_target) {
123 start = code = mono_global_codeman_reserve (12);
125 /* Replace the this argument with the target */
126 arm_ldrx (code, ARMREG_IP0, ARMREG_R0, MONO_STRUCT_OFFSET (MonoDelegate, method_ptr));
127 arm_ldrx (code, ARMREG_R0, ARMREG_R0, MONO_STRUCT_OFFSET (MonoDelegate, target));
128 code = mono_arm_emit_brx (code, ARMREG_IP0);
130 g_assert ((code - start) <= 12);
131 } else {
132 int size, i;
134 size = 8 + param_count * 4;
135 start = code = mono_global_codeman_reserve (size);
137 arm_ldrx (code, ARMREG_IP0, ARMREG_R0, MONO_STRUCT_OFFSET (MonoDelegate, method_ptr));
138 /* slide down the arguments */
139 for (i = 0; i < param_count; ++i)
140 arm_movx (code, i, i + 1);
141 code = mono_arm_emit_brx (code, ARMREG_IP0);
143 g_assert ((code - start) <= size);
145 MINI_END_CODEGEN (start, code - start, MONO_PROFILER_CODE_BUFFER_DELEGATE_INVOKE, NULL);
147 if (code_size)
148 *code_size = code - start;
150 return MINI_ADDR_TO_FTNPTR (start);
154 * mono_arch_get_delegate_invoke_impls:
156 * Return a list of MonoAotTrampInfo structures for the delegate invoke impl
157 * trampolines.
159 GSList*
160 mono_arch_get_delegate_invoke_impls (void)
162 GSList *res = NULL;
163 guint8 *code;
164 guint32 code_len;
165 int i;
166 char *tramp_name;
168 code = (guint8*)get_delegate_invoke_impl (TRUE, 0, &code_len);
169 res = g_slist_prepend (res, mono_tramp_info_create ("delegate_invoke_impl_has_target", code, code_len, NULL, NULL));
171 for (i = 0; i <= MAX_ARCH_DELEGATE_PARAMS; ++i) {
172 code = (guint8*)get_delegate_invoke_impl (FALSE, i, &code_len);
173 tramp_name = g_strdup_printf ("delegate_invoke_impl_target_%d", i);
174 res = g_slist_prepend (res, mono_tramp_info_create (tramp_name, code, code_len, NULL, NULL));
175 g_free (tramp_name);
178 return res;
181 gpointer
182 mono_arch_get_delegate_invoke_impl (MonoMethodSignature *sig, gboolean has_target)
184 guint8 *code, *start;
187 * vtypes are returned in registers, or using the dedicated r8 register, so
188 * they can be supported by delegate invokes.
191 if (has_target) {
192 static guint8* cached = NULL;
194 if (cached)
195 return cached;
197 if (mono_ee_features.use_aot_trampolines)
198 start = (guint8*)mono_aot_get_trampoline ("delegate_invoke_impl_has_target");
199 else
200 start = (guint8*)get_delegate_invoke_impl (TRUE, 0, NULL);
201 mono_memory_barrier ();
202 cached = start;
203 return cached;
204 } else {
205 static guint8* cache [MAX_ARCH_DELEGATE_PARAMS + 1] = {NULL};
206 int i;
208 if (sig->param_count > MAX_ARCH_DELEGATE_PARAMS)
209 return NULL;
210 for (i = 0; i < sig->param_count; ++i)
211 if (!mono_is_regsize_var (sig->params [i]))
212 return NULL;
214 code = cache [sig->param_count];
215 if (code)
216 return code;
218 if (mono_ee_features.use_aot_trampolines) {
219 char *name = g_strdup_printf ("delegate_invoke_impl_target_%d", sig->param_count);
220 start = (guint8*)mono_aot_get_trampoline (name);
221 g_free (name);
222 } else {
223 start = (guint8*)get_delegate_invoke_impl (FALSE, sig->param_count, NULL);
225 mono_memory_barrier ();
226 cache [sig->param_count] = start;
227 return start;
230 return NULL;
233 gpointer
234 mono_arch_get_delegate_virtual_invoke_impl (MonoMethodSignature *sig, MonoMethod *method, int offset, gboolean load_imt_reg)
236 return NULL;
239 gpointer
240 mono_arch_get_this_arg_from_call (host_mgreg_t *regs, guint8 *code)
242 return (gpointer)regs [ARMREG_R0];
245 void
246 mono_arch_cpu_init (void)
250 void
251 mono_arch_init (void)
253 #if defined(TARGET_IOS) || defined(TARGET_WATCHOS) || defined(TARGET_OSX)
254 ios_abi = TRUE;
255 #endif
256 #ifdef MONO_ARCH_ENABLE_PTRAUTH
257 enable_ptrauth = TRUE;
258 #endif
260 if (!mono_aot_only)
261 bp_trampoline = mini_get_breakpoint_trampoline ();
263 mono_arm_gsharedvt_init ();
266 void
267 mono_arch_cleanup (void)
271 guint32
272 mono_arch_cpu_optimizations (guint32 *exclude_mask)
274 *exclude_mask = 0;
275 return 0;
278 void
279 mono_arch_register_lowlevel_calls (void)
283 void
284 mono_arch_finish_init (void)
288 /* The maximum length is 2 instructions */
289 static guint8*
290 emit_imm (guint8 *code, int dreg, int imm)
292 // FIXME: Optimize this
293 if (imm < 0) {
294 gint64 limm = imm;
295 arm_movnx (code, dreg, (~limm) & 0xffff, 0);
296 arm_movkx (code, dreg, (limm >> 16) & 0xffff, 16);
297 } else {
298 arm_movzx (code, dreg, imm & 0xffff, 0);
299 if (imm >> 16)
300 arm_movkx (code, dreg, (imm >> 16) & 0xffff, 16);
303 return code;
306 /* The maximum length is 4 instructions */
307 static guint8*
308 emit_imm64 (guint8 *code, int dreg, guint64 imm)
310 // FIXME: Optimize this
311 arm_movzx (code, dreg, imm & 0xffff, 0);
312 if ((imm >> 16) & 0xffff)
313 arm_movkx (code, dreg, (imm >> 16) & 0xffff, 16);
314 if ((imm >> 32) & 0xffff)
315 arm_movkx (code, dreg, (imm >> 32) & 0xffff, 32);
316 if ((imm >> 48) & 0xffff)
317 arm_movkx (code, dreg, (imm >> 48) & 0xffff, 48);
319 return code;
322 guint8*
323 mono_arm_emit_imm64 (guint8 *code, int dreg, gint64 imm)
325 return emit_imm64 (code, dreg, imm);
329 * emit_imm_template:
331 * Emit a patchable code sequence for constructing a 64 bit immediate.
333 static guint8*
334 emit_imm64_template (guint8 *code, int dreg)
336 arm_movzx (code, dreg, 0, 0);
337 arm_movkx (code, dreg, 0, 16);
338 arm_movkx (code, dreg, 0, 32);
339 arm_movkx (code, dreg, 0, 48);
341 return code;
344 static __attribute__ ((__warn_unused_result__)) guint8*
345 emit_addw_imm (guint8 *code, int dreg, int sreg, int imm)
347 if (!arm_is_arith_imm (imm)) {
348 code = emit_imm (code, ARMREG_LR, imm);
349 arm_addw (code, dreg, sreg, ARMREG_LR);
350 } else {
351 arm_addw_imm (code, dreg, sreg, imm);
353 return code;
356 static __attribute__ ((__warn_unused_result__)) guint8*
357 emit_addx_imm (guint8 *code, int dreg, int sreg, int imm)
359 if (!arm_is_arith_imm (imm)) {
360 code = emit_imm (code, ARMREG_LR, imm);
361 arm_addx (code, dreg, sreg, ARMREG_LR);
362 } else {
363 arm_addx_imm (code, dreg, sreg, imm);
365 return code;
368 static __attribute__ ((__warn_unused_result__)) guint8*
369 emit_subw_imm (guint8 *code, int dreg, int sreg, int imm)
371 if (!arm_is_arith_imm (imm)) {
372 code = emit_imm (code, ARMREG_LR, imm);
373 arm_subw (code, dreg, sreg, ARMREG_LR);
374 } else {
375 arm_subw_imm (code, dreg, sreg, imm);
377 return code;
380 static __attribute__ ((__warn_unused_result__)) guint8*
381 emit_subx_imm (guint8 *code, int dreg, int sreg, int imm)
383 if (!arm_is_arith_imm (imm)) {
384 code = emit_imm (code, ARMREG_LR, imm);
385 arm_subx (code, dreg, sreg, ARMREG_LR);
386 } else {
387 arm_subx_imm (code, dreg, sreg, imm);
389 return code;
392 /* Emit sp+=imm. Clobbers ip0/ip1 */
393 static __attribute__ ((__warn_unused_result__)) guint8*
394 emit_addx_sp_imm (guint8 *code, int imm)
396 code = emit_imm (code, ARMREG_IP0, imm);
397 arm_movspx (code, ARMREG_IP1, ARMREG_SP);
398 arm_addx (code, ARMREG_IP1, ARMREG_IP1, ARMREG_IP0);
399 arm_movspx (code, ARMREG_SP, ARMREG_IP1);
400 return code;
403 /* Emit sp-=imm. Clobbers ip0/ip1 */
404 static __attribute__ ((__warn_unused_result__)) guint8*
405 emit_subx_sp_imm (guint8 *code, int imm)
407 code = emit_imm (code, ARMREG_IP0, imm);
408 arm_movspx (code, ARMREG_IP1, ARMREG_SP);
409 arm_subx (code, ARMREG_IP1, ARMREG_IP1, ARMREG_IP0);
410 arm_movspx (code, ARMREG_SP, ARMREG_IP1);
411 return code;
414 static __attribute__ ((__warn_unused_result__)) guint8*
415 emit_andw_imm (guint8 *code, int dreg, int sreg, int imm)
417 // FIXME:
418 code = emit_imm (code, ARMREG_LR, imm);
419 arm_andw (code, dreg, sreg, ARMREG_LR);
421 return code;
424 static __attribute__ ((__warn_unused_result__)) guint8*
425 emit_andx_imm (guint8 *code, int dreg, int sreg, int imm)
427 // FIXME:
428 code = emit_imm (code, ARMREG_LR, imm);
429 arm_andx (code, dreg, sreg, ARMREG_LR);
431 return code;
434 static __attribute__ ((__warn_unused_result__)) guint8*
435 emit_orrw_imm (guint8 *code, int dreg, int sreg, int imm)
437 // FIXME:
438 code = emit_imm (code, ARMREG_LR, imm);
439 arm_orrw (code, dreg, sreg, ARMREG_LR);
441 return code;
444 static __attribute__ ((__warn_unused_result__)) guint8*
445 emit_orrx_imm (guint8 *code, int dreg, int sreg, int imm)
447 // FIXME:
448 code = emit_imm (code, ARMREG_LR, imm);
449 arm_orrx (code, dreg, sreg, ARMREG_LR);
451 return code;
454 static __attribute__ ((__warn_unused_result__)) guint8*
455 emit_eorw_imm (guint8 *code, int dreg, int sreg, int imm)
457 // FIXME:
458 code = emit_imm (code, ARMREG_LR, imm);
459 arm_eorw (code, dreg, sreg, ARMREG_LR);
461 return code;
464 static __attribute__ ((__warn_unused_result__)) guint8*
465 emit_eorx_imm (guint8 *code, int dreg, int sreg, int imm)
467 // FIXME:
468 code = emit_imm (code, ARMREG_LR, imm);
469 arm_eorx (code, dreg, sreg, ARMREG_LR);
471 return code;
474 static __attribute__ ((__warn_unused_result__)) guint8*
475 emit_cmpw_imm (guint8 *code, int sreg, int imm)
477 if (imm == 0) {
478 arm_cmpw (code, sreg, ARMREG_RZR);
479 } else {
480 // FIXME:
481 code = emit_imm (code, ARMREG_LR, imm);
482 arm_cmpw (code, sreg, ARMREG_LR);
485 return code;
488 static __attribute__ ((__warn_unused_result__)) guint8*
489 emit_cmpx_imm (guint8 *code, int sreg, int imm)
491 if (imm == 0) {
492 arm_cmpx (code, sreg, ARMREG_RZR);
493 } else {
494 // FIXME:
495 code = emit_imm (code, ARMREG_LR, imm);
496 arm_cmpx (code, sreg, ARMREG_LR);
499 return code;
502 static __attribute__ ((__warn_unused_result__)) guint8*
503 emit_strb (guint8 *code, int rt, int rn, int imm)
505 if (arm_is_strb_imm (imm)) {
506 arm_strb (code, rt, rn, imm);
507 } else {
508 g_assert (rt != ARMREG_IP0);
509 g_assert (rn != ARMREG_IP0);
510 code = emit_imm (code, ARMREG_IP0, imm);
511 arm_strb_reg (code, rt, rn, ARMREG_IP0);
513 return code;
516 static __attribute__ ((__warn_unused_result__)) guint8*
517 emit_strh (guint8 *code, int rt, int rn, int imm)
519 if (arm_is_strh_imm (imm)) {
520 arm_strh (code, rt, rn, imm);
521 } else {
522 g_assert (rt != ARMREG_IP0);
523 g_assert (rn != ARMREG_IP0);
524 code = emit_imm (code, ARMREG_IP0, imm);
525 arm_strh_reg (code, rt, rn, ARMREG_IP0);
527 return code;
530 static __attribute__ ((__warn_unused_result__)) guint8*
531 emit_strw (guint8 *code, int rt, int rn, int imm)
533 if (arm_is_strw_imm (imm)) {
534 arm_strw (code, rt, rn, imm);
535 } else {
536 g_assert (rt != ARMREG_IP0);
537 g_assert (rn != ARMREG_IP0);
538 code = emit_imm (code, ARMREG_IP0, imm);
539 arm_strw_reg (code, rt, rn, ARMREG_IP0);
541 return code;
544 static __attribute__ ((__warn_unused_result__)) guint8*
545 emit_strfpw (guint8 *code, int rt, int rn, int imm)
547 if (arm_is_strw_imm (imm)) {
548 arm_strfpw (code, rt, rn, imm);
549 } else {
550 g_assert (rn != ARMREG_IP0);
551 code = emit_imm (code, ARMREG_IP0, imm);
552 arm_addx (code, ARMREG_IP0, rn, ARMREG_IP0);
553 arm_strfpw (code, rt, ARMREG_IP0, 0);
555 return code;
558 static __attribute__ ((__warn_unused_result__)) guint8*
559 emit_strfpx (guint8 *code, int rt, int rn, int imm)
561 if (arm_is_strx_imm (imm)) {
562 arm_strfpx (code, rt, rn, imm);
563 } else {
564 g_assert (rn != ARMREG_IP0);
565 code = emit_imm (code, ARMREG_IP0, imm);
566 arm_addx (code, ARMREG_IP0, rn, ARMREG_IP0);
567 arm_strfpx (code, rt, ARMREG_IP0, 0);
569 return code;
572 static __attribute__ ((__warn_unused_result__)) guint8*
573 emit_strx (guint8 *code, int rt, int rn, int imm)
575 if (arm_is_strx_imm (imm)) {
576 arm_strx (code, rt, rn, imm);
577 } else {
578 g_assert (rt != ARMREG_IP0);
579 g_assert (rn != ARMREG_IP0);
580 code = emit_imm (code, ARMREG_IP0, imm);
581 arm_strx_reg (code, rt, rn, ARMREG_IP0);
583 return code;
586 static __attribute__ ((__warn_unused_result__)) guint8*
587 emit_ldrb (guint8 *code, int rt, int rn, int imm)
589 if (arm_is_pimm12_scaled (imm, 1)) {
590 arm_ldrb (code, rt, rn, imm);
591 } else {
592 g_assert (rt != ARMREG_IP0);
593 g_assert (rn != ARMREG_IP0);
594 code = emit_imm (code, ARMREG_IP0, imm);
595 arm_ldrb_reg (code, rt, rn, ARMREG_IP0);
597 return code;
600 static __attribute__ ((__warn_unused_result__)) guint8*
601 emit_ldrsbx (guint8 *code, int rt, int rn, int imm)
603 if (arm_is_pimm12_scaled (imm, 1)) {
604 arm_ldrsbx (code, rt, rn, imm);
605 } else {
606 g_assert (rt != ARMREG_IP0);
607 g_assert (rn != ARMREG_IP0);
608 code = emit_imm (code, ARMREG_IP0, imm);
609 arm_ldrsbx_reg (code, rt, rn, ARMREG_IP0);
611 return code;
614 static __attribute__ ((__warn_unused_result__)) guint8*
615 emit_ldrh (guint8 *code, int rt, int rn, int imm)
617 if (arm_is_pimm12_scaled (imm, 2)) {
618 arm_ldrh (code, rt, rn, imm);
619 } else {
620 g_assert (rt != ARMREG_IP0);
621 g_assert (rn != ARMREG_IP0);
622 code = emit_imm (code, ARMREG_IP0, imm);
623 arm_ldrh_reg (code, rt, rn, ARMREG_IP0);
625 return code;
628 static __attribute__ ((__warn_unused_result__)) guint8*
629 emit_ldrshx (guint8 *code, int rt, int rn, int imm)
631 if (arm_is_pimm12_scaled (imm, 2)) {
632 arm_ldrshx (code, rt, rn, imm);
633 } else {
634 g_assert (rt != ARMREG_IP0);
635 g_assert (rn != ARMREG_IP0);
636 code = emit_imm (code, ARMREG_IP0, imm);
637 arm_ldrshx_reg (code, rt, rn, ARMREG_IP0);
639 return code;
642 static __attribute__ ((__warn_unused_result__)) guint8*
643 emit_ldrswx (guint8 *code, int rt, int rn, int imm)
645 if (arm_is_pimm12_scaled (imm, 4)) {
646 arm_ldrswx (code, rt, rn, imm);
647 } else {
648 g_assert (rt != ARMREG_IP0);
649 g_assert (rn != ARMREG_IP0);
650 code = emit_imm (code, ARMREG_IP0, imm);
651 arm_ldrswx_reg (code, rt, rn, ARMREG_IP0);
653 return code;
656 static __attribute__ ((__warn_unused_result__)) guint8*
657 emit_ldrw (guint8 *code, int rt, int rn, int imm)
659 if (arm_is_pimm12_scaled (imm, 4)) {
660 arm_ldrw (code, rt, rn, imm);
661 } else {
662 g_assert (rn != ARMREG_IP0);
663 code = emit_imm (code, ARMREG_IP0, imm);
664 arm_ldrw_reg (code, rt, rn, ARMREG_IP0);
666 return code;
669 static __attribute__ ((__warn_unused_result__)) guint8*
670 emit_ldrx (guint8 *code, int rt, int rn, int imm)
672 if (arm_is_pimm12_scaled (imm, 8)) {
673 arm_ldrx (code, rt, rn, imm);
674 } else {
675 g_assert (rn != ARMREG_IP0);
676 code = emit_imm (code, ARMREG_IP0, imm);
677 arm_ldrx_reg (code, rt, rn, ARMREG_IP0);
679 return code;
682 static __attribute__ ((__warn_unused_result__)) guint8*
683 emit_ldrfpw (guint8 *code, int rt, int rn, int imm)
685 if (arm_is_pimm12_scaled (imm, 4)) {
686 arm_ldrfpw (code, rt, rn, imm);
687 } else {
688 g_assert (rn != ARMREG_IP0);
689 code = emit_imm (code, ARMREG_IP0, imm);
690 arm_addx (code, ARMREG_IP0, rn, ARMREG_IP0);
691 arm_ldrfpw (code, rt, ARMREG_IP0, 0);
693 return code;
696 static __attribute__ ((__warn_unused_result__)) guint8*
697 emit_ldrfpx (guint8 *code, int rt, int rn, int imm)
699 if (arm_is_pimm12_scaled (imm, 8)) {
700 arm_ldrfpx (code, rt, rn, imm);
701 } else {
702 g_assert (rn != ARMREG_IP0);
703 code = emit_imm (code, ARMREG_IP0, imm);
704 arm_addx (code, ARMREG_IP0, rn, ARMREG_IP0);
705 arm_ldrfpx (code, rt, ARMREG_IP0, 0);
707 return code;
710 guint8*
711 mono_arm_emit_ldrx (guint8 *code, int rt, int rn, int imm)
713 return emit_ldrx (code, rt, rn, imm);
716 static guint8*
717 emit_call (MonoCompile *cfg, guint8* code, MonoJumpInfoType patch_type, gconstpointer data)
720 mono_add_patch_info_rel (cfg, code - cfg->native_code, patch_type, data, MONO_R_ARM64_IMM);
721 code = emit_imm64_template (code, ARMREG_LR);
722 arm_blrx (code, ARMREG_LR);
724 mono_add_patch_info_rel (cfg, code - cfg->native_code, patch_type, data, MONO_R_ARM64_BL);
725 arm_bl (code, code);
726 cfg->thunk_area += THUNK_SIZE;
727 return code;
730 static guint8*
731 emit_aotconst_full (MonoCompile *cfg, MonoJumpInfo **ji, guint8 *code, guint8 *start, int dreg, guint32 patch_type, gconstpointer data)
733 if (cfg)
734 mono_add_patch_info (cfg, code - cfg->native_code, (MonoJumpInfoType)patch_type, data);
735 else
736 *ji = mono_patch_info_list_prepend (*ji, code - start, (MonoJumpInfoType)patch_type, data);
737 /* See arch_emit_got_access () in aot-compiler.c */
738 arm_ldrx_lit (code, dreg, 0);
739 arm_nop (code);
740 arm_nop (code);
741 return code;
744 static guint8*
745 emit_aotconst (MonoCompile *cfg, guint8 *code, int dreg, guint32 patch_type, gconstpointer data)
747 return emit_aotconst_full (cfg, NULL, code, NULL, dreg, patch_type, data);
751 * mono_arm_emit_aotconst:
753 * Emit code to load an AOT constant into DREG. Usable from trampolines.
755 guint8*
756 mono_arm_emit_aotconst (gpointer ji, guint8 *code, guint8 *code_start, int dreg, guint32 patch_type, gconstpointer data)
758 return emit_aotconst_full (NULL, (MonoJumpInfo**)ji, code, code_start, dreg, patch_type, data);
761 gboolean
762 mono_arch_have_fast_tls (void)
764 #ifdef TARGET_IOS
765 return FALSE;
766 #else
767 return TRUE;
768 #endif
771 static guint8*
772 emit_tls_get (guint8 *code, int dreg, int tls_offset)
774 arm_mrs (code, dreg, ARM_MRS_REG_TPIDR_EL0);
775 if (tls_offset < 256) {
776 arm_ldrx (code, dreg, dreg, tls_offset);
777 } else {
778 code = emit_addx_imm (code, dreg, dreg, tls_offset);
779 arm_ldrx (code, dreg, dreg, 0);
781 return code;
784 static guint8*
785 emit_tls_set (guint8 *code, int sreg, int tls_offset)
787 int tmpreg = ARMREG_IP0;
789 g_assert (sreg != tmpreg);
790 arm_mrs (code, tmpreg, ARM_MRS_REG_TPIDR_EL0);
791 if (tls_offset < 256) {
792 arm_strx (code, sreg, tmpreg, tls_offset);
793 } else {
794 code = emit_addx_imm (code, tmpreg, tmpreg, tls_offset);
795 arm_strx (code, sreg, tmpreg, 0);
797 return code;
801 * Emits
802 * - mov sp, fp
803 * - ldrp [fp, lr], [sp], !stack_offfset
804 * Clobbers TEMP_REGS.
806 __attribute__ ((__warn_unused_result__)) guint8*
807 mono_arm_emit_destroy_frame (guint8 *code, int stack_offset, guint64 temp_regs)
809 // At least one of these registers must be available, or both.
810 gboolean const temp0 = (temp_regs & (1 << ARMREG_IP0)) != 0;
811 gboolean const temp1 = (temp_regs & (1 << ARMREG_IP1)) != 0;
812 g_assert (temp0 || temp1);
813 int const temp = temp0 ? ARMREG_IP0 : ARMREG_IP1;
815 arm_movspx (code, ARMREG_SP, ARMREG_FP);
817 if (arm_is_ldpx_imm (stack_offset)) {
818 arm_ldpx_post (code, ARMREG_FP, ARMREG_LR, ARMREG_SP, stack_offset);
819 } else {
820 arm_ldpx (code, ARMREG_FP, ARMREG_LR, ARMREG_SP, 0);
821 /* sp += stack_offset */
822 if (temp0 && temp1) {
823 code = emit_addx_sp_imm (code, stack_offset);
824 } else {
825 int imm = stack_offset;
827 /* Can't use addx_sp_imm () since we can't clobber both ip0/ip1 */
828 arm_addx_imm (code, temp, ARMREG_SP, 0);
829 while (imm > 256) {
830 arm_addx_imm (code, temp, temp, 256);
831 imm -= 256;
833 arm_addx_imm (code, ARMREG_SP, temp, imm);
836 return code;
839 #define is_call_imm(diff) ((gint)(diff) >= -33554432 && (gint)(diff) <= 33554431)
841 static guint8*
842 emit_thunk (guint8 *code, gconstpointer target)
844 guint8 *p = code;
846 arm_ldrx_lit (code, ARMREG_IP0, code + 8);
847 arm_brx (code, ARMREG_IP0);
848 *(guint64*)code = (guint64)target;
849 code += sizeof (guint64);
851 mono_arch_flush_icache (p, code - p);
852 return code;
855 static gpointer
856 create_thunk (MonoCompile *cfg, MonoDomain *domain, guchar *code, const guchar *target)
858 MonoJitInfo *ji;
859 MonoThunkJitInfo *info;
860 guint8 *thunks, *p;
861 int thunks_size;
862 guint8 *orig_target;
863 guint8 *target_thunk;
865 if (!domain)
866 domain = mono_domain_get ();
868 if (cfg) {
870 * This can be called multiple times during JITting,
871 * save the current position in cfg->arch to avoid
872 * doing a O(n^2) search.
874 if (!cfg->arch.thunks) {
875 cfg->arch.thunks = cfg->thunks;
876 cfg->arch.thunks_size = cfg->thunk_area;
878 thunks = cfg->arch.thunks;
879 thunks_size = cfg->arch.thunks_size;
880 if (!thunks_size) {
881 g_print ("thunk failed %p->%p, thunk space=%d method %s", code, target, thunks_size, mono_method_full_name (cfg->method, TRUE));
882 g_assert_not_reached ();
885 g_assert (*(guint32*)thunks == 0);
886 emit_thunk (thunks, target);
888 cfg->arch.thunks += THUNK_SIZE;
889 cfg->arch.thunks_size -= THUNK_SIZE;
891 return thunks;
892 } else {
893 ji = mini_jit_info_table_find (domain, (char*)code, NULL);
894 g_assert (ji);
895 info = mono_jit_info_get_thunk_info (ji);
896 g_assert (info);
898 thunks = (guint8*)ji->code_start + info->thunks_offset;
899 thunks_size = info->thunks_size;
901 orig_target = mono_arch_get_call_target (code + 4);
903 mono_domain_lock (domain);
905 target_thunk = NULL;
906 if (orig_target >= thunks && orig_target < thunks + thunks_size) {
907 /* The call already points to a thunk, because of trampolines etc. */
908 target_thunk = orig_target;
909 } else {
910 for (p = thunks; p < thunks + thunks_size; p += THUNK_SIZE) {
911 if (((guint32*)p) [0] == 0) {
912 /* Free entry */
913 target_thunk = p;
914 break;
915 } else if (((guint64*)p) [1] == (guint64)target) {
916 /* Thunk already points to target */
917 target_thunk = p;
918 break;
923 //printf ("THUNK: %p %p %p\n", code, target, target_thunk);
925 if (!target_thunk) {
926 mono_domain_unlock (domain);
927 g_print ("thunk failed %p->%p, thunk space=%d method %s", code, target, thunks_size, cfg ? mono_method_full_name (cfg->method, TRUE) : mono_method_full_name (jinfo_get_method (ji), TRUE));
928 g_assert_not_reached ();
931 emit_thunk (target_thunk, target);
933 mono_domain_unlock (domain);
935 return target_thunk;
939 static void
940 arm_patch_full (MonoCompile *cfg, MonoDomain *domain, guint8 *code, guint8 *target, int relocation)
942 switch (relocation) {
943 case MONO_R_ARM64_B:
944 target = MINI_FTNPTR_TO_ADDR (target);
945 if (arm_is_bl_disp (code, target)) {
946 arm_b (code, target);
947 } else {
948 gpointer thunk;
950 thunk = create_thunk (cfg, domain, code, target);
951 g_assert (arm_is_bl_disp (code, thunk));
952 arm_b (code, thunk);
954 break;
955 case MONO_R_ARM64_BCC: {
956 int cond;
958 cond = arm_get_bcc_cond (code);
959 arm_bcc (code, cond, target);
960 break;
962 case MONO_R_ARM64_CBZ:
963 arm_set_cbz_target (code, target);
964 break;
965 case MONO_R_ARM64_IMM: {
966 guint64 imm = (guint64)target;
967 int dreg;
969 /* emit_imm64_template () */
970 dreg = arm_get_movzx_rd (code);
971 arm_movzx (code, dreg, imm & 0xffff, 0);
972 arm_movkx (code, dreg, (imm >> 16) & 0xffff, 16);
973 arm_movkx (code, dreg, (imm >> 32) & 0xffff, 32);
974 arm_movkx (code, dreg, (imm >> 48) & 0xffff, 48);
975 break;
977 case MONO_R_ARM64_BL:
978 target = MINI_FTNPTR_TO_ADDR (target);
979 if (arm_is_bl_disp (code, target)) {
980 arm_bl (code, target);
981 } else {
982 gpointer thunk;
984 thunk = create_thunk (cfg, domain, code, target);
985 g_assert (arm_is_bl_disp (code, thunk));
986 arm_bl (code, thunk);
988 break;
989 default:
990 g_assert_not_reached ();
994 static void
995 arm_patch_rel (guint8 *code, guint8 *target, int relocation)
997 arm_patch_full (NULL, NULL, code, target, relocation);
1000 void
1001 mono_arm_patch (guint8 *code, guint8 *target, int relocation)
1003 arm_patch_rel (code, target, relocation);
1006 void
1007 mono_arch_patch_code_new (MonoCompile *cfg, MonoDomain *domain, guint8 *code, MonoJumpInfo *ji, gpointer target)
1009 guint8 *ip;
1011 ip = ji->ip.i + code;
1013 switch (ji->type) {
1014 case MONO_PATCH_INFO_METHOD_JUMP:
1015 /* ji->relocation is not set by the caller */
1016 arm_patch_full (cfg, domain, ip, (guint8*)target, MONO_R_ARM64_B);
1017 mono_arch_flush_icache (ip, 8);
1018 break;
1019 default:
1020 arm_patch_full (cfg, domain, ip, (guint8*)target, ji->relocation);
1021 break;
1022 case MONO_PATCH_INFO_NONE:
1023 break;
1027 void
1028 mono_arch_flush_register_windows (void)
1032 MonoMethod*
1033 mono_arch_find_imt_method (host_mgreg_t *regs, guint8 *code)
1035 return (MonoMethod*)regs [MONO_ARCH_RGCTX_REG];
1038 MonoVTable*
1039 mono_arch_find_static_call_vtable (host_mgreg_t *regs, guint8 *code)
1041 return (MonoVTable*)regs [MONO_ARCH_RGCTX_REG];
1044 GSList*
1045 mono_arch_get_cie_program (void)
1047 GSList *l = NULL;
1049 mono_add_unwind_op_def_cfa (l, (guint8*)NULL, (guint8*)NULL, ARMREG_SP, 0);
1051 return l;
1054 host_mgreg_t
1055 mono_arch_context_get_int_reg (MonoContext *ctx, int reg)
1057 return ctx->regs [reg];
1060 void
1061 mono_arch_context_set_int_reg (MonoContext *ctx, int reg, host_mgreg_t val)
1063 ctx->regs [reg] = val;
1067 * mono_arch_set_target:
1069 * Set the target architecture the JIT backend should generate code for, in the form
1070 * of a GNU target triplet. Only used in AOT mode.
1072 void
1073 mono_arch_set_target (char *mtriple)
1075 if (strstr (mtriple, "darwin") || strstr (mtriple, "ios")) {
1076 ios_abi = TRUE;
1080 static void
1081 add_general (CallInfo *cinfo, ArgInfo *ainfo, int size, gboolean sign)
1083 if (cinfo->gr >= PARAM_REGS) {
1084 ainfo->storage = ArgOnStack;
1086 * FIXME: The vararg argument handling code in ves_icall_System_ArgIterator_IntGetNextArg
1087 * assumes every argument is allocated to a separate full size stack slot.
1089 if (ios_abi && !cinfo->vararg) {
1090 /* Assume size == align */
1091 } else {
1092 /* Put arguments into 8 byte aligned stack slots */
1093 size = 8;
1094 sign = FALSE;
1096 cinfo->stack_usage = ALIGN_TO (cinfo->stack_usage, size);
1097 ainfo->offset = cinfo->stack_usage;
1098 ainfo->slot_size = size;
1099 ainfo->sign = sign;
1100 cinfo->stack_usage += size;
1101 } else {
1102 ainfo->storage = ArgInIReg;
1103 ainfo->reg = cinfo->gr;
1104 cinfo->gr ++;
1108 static void
1109 add_fp (CallInfo *cinfo, ArgInfo *ainfo, gboolean single)
1111 int size = single ? 4 : 8;
1113 if (cinfo->fr >= FP_PARAM_REGS) {
1114 ainfo->storage = single ? ArgOnStackR4 : ArgOnStackR8;
1115 if (ios_abi) {
1116 cinfo->stack_usage = ALIGN_TO (cinfo->stack_usage, size);
1117 ainfo->offset = cinfo->stack_usage;
1118 ainfo->slot_size = size;
1119 cinfo->stack_usage += size;
1120 } else {
1121 ainfo->offset = cinfo->stack_usage;
1122 ainfo->slot_size = 8;
1123 /* Put arguments into 8 byte aligned stack slots */
1124 cinfo->stack_usage += 8;
1126 } else {
1127 if (single)
1128 ainfo->storage = ArgInFRegR4;
1129 else
1130 ainfo->storage = ArgInFReg;
1131 ainfo->reg = cinfo->fr;
1132 cinfo->fr ++;
1136 static gboolean
1137 is_hfa (MonoType *t, int *out_nfields, int *out_esize, int *field_offsets)
1139 MonoClass *klass;
1140 gpointer iter;
1141 MonoClassField *field;
1142 MonoType *ftype, *prev_ftype = NULL;
1143 int i, nfields = 0;
1145 klass = mono_class_from_mono_type_internal (t);
1146 iter = NULL;
1147 while ((field = mono_class_get_fields_internal (klass, &iter))) {
1148 if (field->type->attrs & FIELD_ATTRIBUTE_STATIC)
1149 continue;
1150 ftype = mono_field_get_type_internal (field);
1151 ftype = mini_get_underlying_type (ftype);
1153 if (MONO_TYPE_ISSTRUCT (ftype)) {
1154 int nested_nfields, nested_esize;
1155 int nested_field_offsets [16];
1157 if (!is_hfa (ftype, &nested_nfields, &nested_esize, nested_field_offsets))
1158 return FALSE;
1159 if (nested_esize == 4)
1160 ftype = m_class_get_byval_arg (mono_defaults.single_class);
1161 else
1162 ftype = m_class_get_byval_arg (mono_defaults.double_class);
1163 if (prev_ftype && prev_ftype->type != ftype->type)
1164 return FALSE;
1165 prev_ftype = ftype;
1166 for (i = 0; i < nested_nfields; ++i) {
1167 if (nfields + i < 4)
1168 field_offsets [nfields + i] = field->offset - MONO_ABI_SIZEOF (MonoObject) + nested_field_offsets [i];
1170 nfields += nested_nfields;
1171 } else {
1172 if (!(!ftype->byref && (ftype->type == MONO_TYPE_R4 || ftype->type == MONO_TYPE_R8)))
1173 return FALSE;
1174 if (prev_ftype && prev_ftype->type != ftype->type)
1175 return FALSE;
1176 prev_ftype = ftype;
1177 if (nfields < 4)
1178 field_offsets [nfields] = field->offset - MONO_ABI_SIZEOF (MonoObject);
1179 nfields ++;
1182 if (nfields == 0 || nfields > 4)
1183 return FALSE;
1184 *out_nfields = nfields;
1185 *out_esize = prev_ftype->type == MONO_TYPE_R4 ? 4 : 8;
1186 return TRUE;
1189 static void
1190 add_valuetype (CallInfo *cinfo, ArgInfo *ainfo, MonoType *t)
1192 int i, size, align_size, nregs, nfields, esize;
1193 int field_offsets [16];
1194 guint32 align;
1196 size = mini_type_stack_size_full (t, &align, cinfo->pinvoke);
1197 align_size = ALIGN_TO (size, 8);
1199 nregs = align_size / 8;
1200 if (is_hfa (t, &nfields, &esize, field_offsets)) {
1202 * The struct might include nested float structs aligned at 8,
1203 * so need to keep track of the offsets of the individual fields.
1205 if (cinfo->fr + nfields <= FP_PARAM_REGS) {
1206 ainfo->storage = ArgHFA;
1207 ainfo->reg = cinfo->fr;
1208 ainfo->nregs = nfields;
1209 ainfo->size = size;
1210 ainfo->esize = esize;
1211 for (i = 0; i < nfields; ++i)
1212 ainfo->foffsets [i] = field_offsets [i];
1213 cinfo->fr += ainfo->nregs;
1214 } else {
1215 ainfo->nfregs_to_skip = FP_PARAM_REGS > cinfo->fr ? FP_PARAM_REGS - cinfo->fr : 0;
1216 cinfo->fr = FP_PARAM_REGS;
1217 size = ALIGN_TO (size, 8);
1218 ainfo->storage = ArgVtypeOnStack;
1219 cinfo->stack_usage = ALIGN_TO (cinfo->stack_usage, align);
1220 ainfo->offset = cinfo->stack_usage;
1221 ainfo->size = size;
1222 ainfo->hfa = TRUE;
1223 ainfo->nregs = nfields;
1224 ainfo->esize = esize;
1225 cinfo->stack_usage += size;
1227 return;
1230 if (align_size > 16) {
1231 ainfo->storage = ArgVtypeByRef;
1232 ainfo->size = size;
1233 return;
1236 if (cinfo->gr + nregs > PARAM_REGS) {
1237 size = ALIGN_TO (size, 8);
1238 ainfo->storage = ArgVtypeOnStack;
1239 cinfo->stack_usage = ALIGN_TO (cinfo->stack_usage, align);
1240 ainfo->offset = cinfo->stack_usage;
1241 ainfo->size = size;
1242 cinfo->stack_usage += size;
1243 cinfo->gr = PARAM_REGS;
1244 } else {
1245 ainfo->storage = ArgVtypeInIRegs;
1246 ainfo->reg = cinfo->gr;
1247 ainfo->nregs = nregs;
1248 ainfo->size = size;
1249 cinfo->gr += nregs;
1253 static void
1254 add_param (CallInfo *cinfo, ArgInfo *ainfo, MonoType *t)
1256 MonoType *ptype;
1258 ptype = mini_get_underlying_type (t);
1259 switch (ptype->type) {
1260 case MONO_TYPE_I1:
1261 add_general (cinfo, ainfo, 1, TRUE);
1262 break;
1263 case MONO_TYPE_U1:
1264 add_general (cinfo, ainfo, 1, FALSE);
1265 break;
1266 case MONO_TYPE_I2:
1267 add_general (cinfo, ainfo, 2, TRUE);
1268 break;
1269 case MONO_TYPE_U2:
1270 add_general (cinfo, ainfo, 2, FALSE);
1271 break;
1272 #ifdef MONO_ARCH_ILP32
1273 case MONO_TYPE_I:
1274 #endif
1275 case MONO_TYPE_I4:
1276 add_general (cinfo, ainfo, 4, TRUE);
1277 break;
1278 #ifdef MONO_ARCH_ILP32
1279 case MONO_TYPE_U:
1280 case MONO_TYPE_PTR:
1281 case MONO_TYPE_FNPTR:
1282 case MONO_TYPE_OBJECT:
1283 #endif
1284 case MONO_TYPE_U4:
1285 add_general (cinfo, ainfo, 4, FALSE);
1286 break;
1287 #ifndef MONO_ARCH_ILP32
1288 case MONO_TYPE_I:
1289 case MONO_TYPE_U:
1290 case MONO_TYPE_PTR:
1291 case MONO_TYPE_FNPTR:
1292 case MONO_TYPE_OBJECT:
1293 #endif
1294 case MONO_TYPE_U8:
1295 case MONO_TYPE_I8:
1296 add_general (cinfo, ainfo, 8, FALSE);
1297 break;
1298 case MONO_TYPE_R8:
1299 add_fp (cinfo, ainfo, FALSE);
1300 break;
1301 case MONO_TYPE_R4:
1302 add_fp (cinfo, ainfo, TRUE);
1303 break;
1304 case MONO_TYPE_VALUETYPE:
1305 case MONO_TYPE_TYPEDBYREF:
1306 add_valuetype (cinfo, ainfo, ptype);
1307 break;
1308 case MONO_TYPE_VOID:
1309 ainfo->storage = ArgNone;
1310 break;
1311 case MONO_TYPE_GENERICINST:
1312 if (!mono_type_generic_inst_is_valuetype (ptype)) {
1313 add_general (cinfo, ainfo, 8, FALSE);
1314 } else if (mini_is_gsharedvt_variable_type (ptype)) {
1316 * Treat gsharedvt arguments as large vtypes
1318 ainfo->storage = ArgVtypeByRef;
1319 ainfo->gsharedvt = TRUE;
1320 } else {
1321 add_valuetype (cinfo, ainfo, ptype);
1323 break;
1324 case MONO_TYPE_VAR:
1325 case MONO_TYPE_MVAR:
1326 g_assert (mini_is_gsharedvt_type (ptype));
1327 ainfo->storage = ArgVtypeByRef;
1328 ainfo->gsharedvt = TRUE;
1329 break;
1330 default:
1331 g_assert_not_reached ();
1332 break;
1337 * get_call_info:
1339 * Obtain information about a call according to the calling convention.
1341 static CallInfo*
1342 get_call_info (MonoMemPool *mp, MonoMethodSignature *sig)
1344 CallInfo *cinfo;
1345 ArgInfo *ainfo;
1346 int n, pstart, pindex;
1348 n = sig->hasthis + sig->param_count;
1350 if (mp)
1351 cinfo = mono_mempool_alloc0 (mp, sizeof (CallInfo) + (sizeof (ArgInfo) * n));
1352 else
1353 cinfo = g_malloc0 (sizeof (CallInfo) + (sizeof (ArgInfo) * n));
1355 cinfo->nargs = n;
1356 cinfo->pinvoke = sig->pinvoke;
1357 // Constrain this to OSX only for now
1358 #ifdef TARGET_OSX
1359 cinfo->vararg = sig->call_convention == MONO_CALL_VARARG;
1360 #endif
1362 /* Return value */
1363 add_param (cinfo, &cinfo->ret, sig->ret);
1364 if (cinfo->ret.storage == ArgVtypeByRef)
1365 cinfo->ret.reg = ARMREG_R8;
1366 /* Reset state */
1367 cinfo->gr = 0;
1368 cinfo->fr = 0;
1369 cinfo->stack_usage = 0;
1371 /* Parameters */
1372 if (sig->hasthis)
1373 add_general (cinfo, cinfo->args + 0, 8, FALSE);
1374 pstart = 0;
1375 for (pindex = pstart; pindex < sig->param_count; ++pindex) {
1376 ainfo = cinfo->args + sig->hasthis + pindex;
1378 if ((sig->call_convention == MONO_CALL_VARARG) && (pindex == sig->sentinelpos)) {
1379 /* Prevent implicit arguments and sig_cookie from
1380 being passed in registers */
1381 cinfo->gr = PARAM_REGS;
1382 cinfo->fr = FP_PARAM_REGS;
1383 /* Emit the signature cookie just before the implicit arguments */
1384 add_param (cinfo, &cinfo->sig_cookie, mono_get_int_type ());
1387 add_param (cinfo, ainfo, sig->params [pindex]);
1388 if (ainfo->storage == ArgVtypeByRef) {
1389 /* Pass the argument address in the next register */
1390 if (cinfo->gr >= PARAM_REGS) {
1391 ainfo->storage = ArgVtypeByRefOnStack;
1392 cinfo->stack_usage = ALIGN_TO (cinfo->stack_usage, 8);
1393 ainfo->offset = cinfo->stack_usage;
1394 cinfo->stack_usage += 8;
1395 } else {
1396 ainfo->reg = cinfo->gr;
1397 cinfo->gr ++;
1402 /* Handle the case where there are no implicit arguments */
1403 if ((sig->call_convention == MONO_CALL_VARARG) && (pindex == sig->sentinelpos)) {
1404 /* Prevent implicit arguments and sig_cookie from
1405 being passed in registers */
1406 cinfo->gr = PARAM_REGS;
1407 cinfo->fr = FP_PARAM_REGS;
1408 /* Emit the signature cookie just before the implicit arguments */
1409 add_param (cinfo, &cinfo->sig_cookie, mono_get_int_type ());
1412 cinfo->stack_usage = ALIGN_TO (cinfo->stack_usage, MONO_ARCH_FRAME_ALIGNMENT);
1414 return cinfo;
1417 static int
1418 arg_need_temp (ArgInfo *ainfo)
1420 if (ainfo->storage == ArgHFA && ainfo->esize == 4)
1421 return ainfo->size;
1422 return 0;
1425 static gpointer
1426 arg_get_storage (CallContext *ccontext, ArgInfo *ainfo)
1428 switch (ainfo->storage) {
1429 case ArgVtypeInIRegs:
1430 case ArgInIReg:
1431 return &ccontext->gregs [ainfo->reg];
1432 case ArgInFReg:
1433 case ArgInFRegR4:
1434 case ArgHFA:
1435 return &ccontext->fregs [ainfo->reg];
1436 case ArgOnStack:
1437 case ArgOnStackR4:
1438 case ArgOnStackR8:
1439 case ArgVtypeOnStack:
1440 return ccontext->stack + ainfo->offset;
1441 case ArgVtypeByRef:
1442 return (gpointer) ccontext->gregs [ainfo->reg];
1443 default:
1444 g_error ("Arg storage type not yet supported");
1448 static void
1449 arg_get_val (CallContext *ccontext, ArgInfo *ainfo, gpointer dest)
1451 g_assert (arg_need_temp (ainfo));
1453 float *dest_float = (float*)dest;
1454 for (int k = 0; k < ainfo->nregs; k++) {
1455 *dest_float = *(float*)&ccontext->fregs [ainfo->reg + k];
1456 dest_float++;
1460 static void
1461 arg_set_val (CallContext *ccontext, ArgInfo *ainfo, gpointer src)
1463 g_assert (arg_need_temp (ainfo));
1465 float *src_float = (float*)src;
1466 for (int k = 0; k < ainfo->nregs; k++) {
1467 *(float*)&ccontext->fregs [ainfo->reg + k] = *src_float;
1468 src_float++;
1472 /* Set arguments in the ccontext (for i2n entry) */
1473 void
1474 mono_arch_set_native_call_context_args (CallContext *ccontext, gpointer frame, MonoMethodSignature *sig)
1476 const MonoEECallbacks *interp_cb = mini_get_interp_callbacks ();
1477 CallInfo *cinfo = get_call_info (NULL, sig);
1478 gpointer storage;
1479 ArgInfo *ainfo;
1481 memset (ccontext, 0, sizeof (CallContext));
1483 ccontext->stack_size = ALIGN_TO (cinfo->stack_usage, MONO_ARCH_FRAME_ALIGNMENT);
1484 if (ccontext->stack_size)
1485 ccontext->stack = (guint8*)g_calloc (1, ccontext->stack_size);
1487 if (sig->ret->type != MONO_TYPE_VOID) {
1488 ainfo = &cinfo->ret;
1489 if (ainfo->storage == ArgVtypeByRef) {
1490 storage = interp_cb->frame_arg_to_storage ((MonoInterpFrameHandle)frame, sig, -1);
1491 ccontext->gregs [cinfo->ret.reg] = (gsize)storage;
1495 g_assert (!sig->hasthis);
1497 for (int i = 0; i < sig->param_count; i++) {
1498 ainfo = &cinfo->args [i];
1500 if (ainfo->storage == ArgVtypeByRef) {
1501 ccontext->gregs [ainfo->reg] = (host_mgreg_t)interp_cb->frame_arg_to_storage ((MonoInterpFrameHandle)frame, sig, i);
1502 continue;
1505 int temp_size = arg_need_temp (ainfo);
1507 if (temp_size)
1508 storage = alloca (temp_size); // FIXME? alloca in a loop
1509 else
1510 storage = arg_get_storage (ccontext, ainfo);
1512 interp_cb->frame_arg_to_data ((MonoInterpFrameHandle)frame, sig, i, storage);
1513 if (temp_size)
1514 arg_set_val (ccontext, ainfo, storage);
1517 g_free (cinfo);
1520 /* Set return value in the ccontext (for n2i return) */
1521 void
1522 mono_arch_set_native_call_context_ret (CallContext *ccontext, gpointer frame, MonoMethodSignature *sig, gpointer retp)
1524 const MonoEECallbacks *interp_cb;
1525 CallInfo *cinfo;
1526 gpointer storage;
1527 ArgInfo *ainfo;
1529 if (sig->ret->type == MONO_TYPE_VOID)
1530 return;
1532 interp_cb = mini_get_interp_callbacks ();
1533 cinfo = get_call_info (NULL, sig);
1534 ainfo = &cinfo->ret;
1536 if (retp) {
1537 g_assert (ainfo->storage == ArgVtypeByRef);
1538 interp_cb->frame_arg_to_data ((MonoInterpFrameHandle)frame, sig, -1, retp);
1539 } else {
1540 g_assert (ainfo->storage != ArgVtypeByRef);
1541 int temp_size = arg_need_temp (ainfo);
1543 if (temp_size)
1544 storage = alloca (temp_size);
1545 else
1546 storage = arg_get_storage (ccontext, ainfo);
1547 memset (ccontext, 0, sizeof (CallContext)); // FIXME
1548 interp_cb->frame_arg_to_data ((MonoInterpFrameHandle)frame, sig, -1, storage);
1549 if (temp_size)
1550 arg_set_val (ccontext, ainfo, storage);
1553 g_free (cinfo);
1556 /* Gets the arguments from ccontext (for n2i entry) */
1557 gpointer
1558 mono_arch_get_native_call_context_args (CallContext *ccontext, gpointer frame, MonoMethodSignature *sig)
1560 const MonoEECallbacks *interp_cb = mini_get_interp_callbacks ();
1561 CallInfo *cinfo = get_call_info (NULL, sig);
1562 gpointer storage;
1563 ArgInfo *ainfo;
1565 for (int i = 0; i < sig->param_count + sig->hasthis; i++) {
1566 ainfo = &cinfo->args [i];
1567 int temp_size = arg_need_temp (ainfo);
1569 if (temp_size) {
1570 storage = alloca (temp_size); // FIXME? alloca in a loop
1571 arg_get_val (ccontext, ainfo, storage);
1572 } else {
1573 storage = arg_get_storage (ccontext, ainfo);
1575 interp_cb->data_to_frame_arg ((MonoInterpFrameHandle)frame, sig, i, storage);
1578 storage = NULL;
1579 if (sig->ret->type != MONO_TYPE_VOID) {
1580 ainfo = &cinfo->ret;
1581 if (ainfo->storage == ArgVtypeByRef)
1582 storage = (gpointer) ccontext->gregs [cinfo->ret.reg];
1584 g_free (cinfo);
1585 return storage;
1588 /* Gets the return value from ccontext (for i2n exit) */
1589 void
1590 mono_arch_get_native_call_context_ret (CallContext *ccontext, gpointer frame, MonoMethodSignature *sig)
1592 const MonoEECallbacks *interp_cb;
1593 CallInfo *cinfo;
1594 ArgInfo *ainfo;
1595 gpointer storage;
1597 if (sig->ret->type == MONO_TYPE_VOID)
1598 return;
1600 interp_cb = mini_get_interp_callbacks ();
1601 cinfo = get_call_info (NULL, sig);
1602 ainfo = &cinfo->ret;
1604 if (ainfo->storage != ArgVtypeByRef) {
1605 int temp_size = arg_need_temp (ainfo);
1607 if (temp_size) {
1608 storage = alloca (temp_size);
1609 arg_get_val (ccontext, ainfo, storage);
1610 } else {
1611 storage = arg_get_storage (ccontext, ainfo);
1613 interp_cb->data_to_frame_arg ((MonoInterpFrameHandle)frame, sig, -1, storage);
1616 g_free (cinfo);
1619 typedef struct {
1620 MonoMethodSignature *sig;
1621 CallInfo *cinfo;
1622 MonoType *rtype;
1623 MonoType **param_types;
1624 int n_fpargs, n_fpret, nullable_area;
1625 } ArchDynCallInfo;
1627 static gboolean
1628 dyn_call_supported (CallInfo *cinfo, MonoMethodSignature *sig)
1630 int i;
1632 // FIXME: Add more cases
1633 switch (cinfo->ret.storage) {
1634 case ArgNone:
1635 case ArgInIReg:
1636 case ArgInFReg:
1637 case ArgInFRegR4:
1638 case ArgVtypeByRef:
1639 break;
1640 case ArgVtypeInIRegs:
1641 if (cinfo->ret.nregs > 2)
1642 return FALSE;
1643 break;
1644 case ArgHFA:
1645 break;
1646 default:
1647 return FALSE;
1650 for (i = 0; i < cinfo->nargs; ++i) {
1651 ArgInfo *ainfo = &cinfo->args [i];
1653 switch (ainfo->storage) {
1654 case ArgInIReg:
1655 case ArgVtypeInIRegs:
1656 case ArgInFReg:
1657 case ArgInFRegR4:
1658 case ArgHFA:
1659 case ArgVtypeByRef:
1660 case ArgVtypeByRefOnStack:
1661 case ArgOnStack:
1662 case ArgVtypeOnStack:
1663 break;
1664 default:
1665 return FALSE;
1669 return TRUE;
1672 MonoDynCallInfo*
1673 mono_arch_dyn_call_prepare (MonoMethodSignature *sig)
1675 ArchDynCallInfo *info;
1676 CallInfo *cinfo;
1677 int i, aindex;
1679 cinfo = get_call_info (NULL, sig);
1681 if (!dyn_call_supported (cinfo, sig)) {
1682 g_free (cinfo);
1683 return NULL;
1686 info = g_new0 (ArchDynCallInfo, 1);
1687 // FIXME: Preprocess the info to speed up start_dyn_call ()
1688 info->sig = sig;
1689 info->cinfo = cinfo;
1690 info->rtype = mini_get_underlying_type (sig->ret);
1691 info->param_types = g_new0 (MonoType*, sig->param_count);
1692 for (i = 0; i < sig->param_count; ++i)
1693 info->param_types [i] = mini_get_underlying_type (sig->params [i]);
1695 switch (cinfo->ret.storage) {
1696 case ArgInFReg:
1697 case ArgInFRegR4:
1698 info->n_fpret = 1;
1699 break;
1700 case ArgHFA:
1701 info->n_fpret = cinfo->ret.nregs;
1702 break;
1703 default:
1704 break;
1707 for (aindex = 0; aindex < sig->param_count; aindex++) {
1708 MonoType *t = info->param_types [aindex];
1710 if (t->byref)
1711 continue;
1713 switch (t->type) {
1714 case MONO_TYPE_GENERICINST:
1715 if (t->type == MONO_TYPE_GENERICINST && mono_class_is_nullable (mono_class_from_mono_type_internal (t))) {
1716 MonoClass *klass = mono_class_from_mono_type_internal (t);
1717 int size;
1719 /* Nullables need a temporary buffer, its stored at the end of DynCallArgs.regs after the stack args */
1720 size = mono_class_value_size (klass, NULL);
1721 info->nullable_area += size;
1723 break;
1724 default:
1725 break;
1729 return (MonoDynCallInfo*)info;
1732 void
1733 mono_arch_dyn_call_free (MonoDynCallInfo *info)
1735 ArchDynCallInfo *ainfo = (ArchDynCallInfo*)info;
1737 g_free (ainfo->cinfo);
1738 g_free (ainfo->param_types);
1739 g_free (ainfo);
1743 mono_arch_dyn_call_get_buf_size (MonoDynCallInfo *info)
1745 ArchDynCallInfo *ainfo = (ArchDynCallInfo*)info;
1747 g_assert (ainfo->cinfo->stack_usage % MONO_ARCH_FRAME_ALIGNMENT == 0);
1748 return sizeof (DynCallArgs) + ainfo->cinfo->stack_usage + ainfo->nullable_area;
1751 static double
1752 bitcast_r4_to_r8 (float f)
1754 float *p = &f;
1756 return *(double*)p;
1759 static float
1760 bitcast_r8_to_r4 (double f)
1762 double *p = &f;
1764 return *(float*)p;
1767 void
1768 mono_arch_start_dyn_call (MonoDynCallInfo *info, gpointer **args, guint8 *ret, guint8 *buf)
1770 ArchDynCallInfo *dinfo = (ArchDynCallInfo*)info;
1771 DynCallArgs *p = (DynCallArgs*)buf;
1772 int aindex, arg_index, greg, i, pindex;
1773 MonoMethodSignature *sig = dinfo->sig;
1774 CallInfo *cinfo = dinfo->cinfo;
1775 int buffer_offset = 0;
1776 guint8 *nullable_buffer;
1778 p->res = 0;
1779 p->ret = ret;
1780 p->n_fpargs = dinfo->n_fpargs;
1781 p->n_fpret = dinfo->n_fpret;
1782 p->n_stackargs = cinfo->stack_usage / sizeof (host_mgreg_t);
1784 arg_index = 0;
1785 greg = 0;
1786 pindex = 0;
1788 /* Stored after the stack arguments */
1789 nullable_buffer = (guint8*)&(p->regs [PARAM_REGS + 1 + (cinfo->stack_usage / sizeof (host_mgreg_t))]);
1791 if (sig->hasthis)
1792 p->regs [greg ++] = (host_mgreg_t)*(args [arg_index ++]);
1794 if (cinfo->ret.storage == ArgVtypeByRef)
1795 p->regs [ARMREG_R8] = (host_mgreg_t)ret;
1797 for (aindex = pindex; aindex < sig->param_count; aindex++) {
1798 MonoType *t = dinfo->param_types [aindex];
1799 gpointer *arg = args [arg_index ++];
1800 ArgInfo *ainfo = &cinfo->args [aindex + sig->hasthis];
1801 int slot = -1;
1803 if (ainfo->storage == ArgOnStack || ainfo->storage == ArgVtypeOnStack || ainfo->storage == ArgVtypeByRefOnStack) {
1804 slot = PARAM_REGS + 1 + (ainfo->offset / sizeof (host_mgreg_t));
1805 } else {
1806 slot = ainfo->reg;
1809 if (t->byref) {
1810 p->regs [slot] = (host_mgreg_t)*arg;
1811 continue;
1814 if (ios_abi && ainfo->storage == ArgOnStack) {
1815 guint8 *stack_arg = (guint8*)&(p->regs [PARAM_REGS + 1]) + ainfo->offset;
1816 gboolean handled = TRUE;
1818 /* Special case arguments smaller than 1 machine word */
1819 switch (t->type) {
1820 case MONO_TYPE_U1:
1821 *(guint8*)stack_arg = *(guint8*)arg;
1822 break;
1823 case MONO_TYPE_I1:
1824 *(gint8*)stack_arg = *(gint8*)arg;
1825 break;
1826 case MONO_TYPE_U2:
1827 *(guint16*)stack_arg = *(guint16*)arg;
1828 break;
1829 case MONO_TYPE_I2:
1830 *(gint16*)stack_arg = *(gint16*)arg;
1831 break;
1832 case MONO_TYPE_I4:
1833 *(gint32*)stack_arg = *(gint32*)arg;
1834 break;
1835 case MONO_TYPE_U4:
1836 *(guint32*)stack_arg = *(guint32*)arg;
1837 break;
1838 default:
1839 handled = FALSE;
1840 break;
1842 if (handled)
1843 continue;
1846 switch (t->type) {
1847 case MONO_TYPE_OBJECT:
1848 case MONO_TYPE_PTR:
1849 case MONO_TYPE_I:
1850 case MONO_TYPE_U:
1851 case MONO_TYPE_I8:
1852 case MONO_TYPE_U8:
1853 p->regs [slot] = (host_mgreg_t)*arg;
1854 break;
1855 case MONO_TYPE_U1:
1856 p->regs [slot] = *(guint8*)arg;
1857 break;
1858 case MONO_TYPE_I1:
1859 p->regs [slot] = *(gint8*)arg;
1860 break;
1861 case MONO_TYPE_I2:
1862 p->regs [slot] = *(gint16*)arg;
1863 break;
1864 case MONO_TYPE_U2:
1865 p->regs [slot] = *(guint16*)arg;
1866 break;
1867 case MONO_TYPE_I4:
1868 p->regs [slot] = *(gint32*)arg;
1869 break;
1870 case MONO_TYPE_U4:
1871 p->regs [slot] = *(guint32*)arg;
1872 break;
1873 case MONO_TYPE_R4:
1874 p->fpregs [ainfo->reg] = bitcast_r4_to_r8 (*(float*)arg);
1875 p->n_fpargs ++;
1876 break;
1877 case MONO_TYPE_R8:
1878 p->fpregs [ainfo->reg] = *(double*)arg;
1879 p->n_fpargs ++;
1880 break;
1881 case MONO_TYPE_GENERICINST:
1882 if (MONO_TYPE_IS_REFERENCE (t)) {
1883 p->regs [slot] = (host_mgreg_t)*arg;
1884 break;
1885 } else {
1886 if (t->type == MONO_TYPE_GENERICINST && mono_class_is_nullable (mono_class_from_mono_type_internal (t))) {
1887 MonoClass *klass = mono_class_from_mono_type_internal (t);
1888 guint8 *nullable_buf;
1889 int size;
1892 * Use p->buffer as a temporary buffer since the data needs to be available after this call
1893 * if the nullable param is passed by ref.
1895 size = mono_class_value_size (klass, NULL);
1896 nullable_buf = nullable_buffer + buffer_offset;
1897 buffer_offset += size;
1898 g_assert (buffer_offset <= dinfo->nullable_area);
1900 /* The argument pointed to by arg is either a boxed vtype or null */
1901 mono_nullable_init (nullable_buf, (MonoObject*)arg, klass);
1903 arg = (gpointer*)nullable_buf;
1904 /* Fall though */
1905 } else {
1906 /* Fall though */
1909 case MONO_TYPE_VALUETYPE:
1910 switch (ainfo->storage) {
1911 case ArgVtypeInIRegs:
1912 for (i = 0; i < ainfo->nregs; ++i)
1913 p->regs [slot ++] = ((host_mgreg_t*)arg) [i];
1914 break;
1915 case ArgHFA:
1916 if (ainfo->esize == 4) {
1917 for (i = 0; i < ainfo->nregs; ++i)
1918 p->fpregs [ainfo->reg + i] = bitcast_r4_to_r8 (((float*)arg) [ainfo->foffsets [i] / 4]);
1919 } else {
1920 for (i = 0; i < ainfo->nregs; ++i)
1921 p->fpregs [ainfo->reg + i] = ((double*)arg) [ainfo->foffsets [i] / 8];
1923 p->n_fpargs += ainfo->nregs;
1924 break;
1925 case ArgVtypeByRef:
1926 case ArgVtypeByRefOnStack:
1927 p->regs [slot] = (host_mgreg_t)arg;
1928 break;
1929 case ArgVtypeOnStack:
1930 for (i = 0; i < ainfo->size / 8; ++i)
1931 p->regs [slot ++] = ((host_mgreg_t*)arg) [i];
1932 break;
1933 default:
1934 g_assert_not_reached ();
1935 break;
1937 break;
1938 default:
1939 g_assert_not_reached ();
1944 void
1945 mono_arch_finish_dyn_call (MonoDynCallInfo *info, guint8 *buf)
1947 ArchDynCallInfo *ainfo = (ArchDynCallInfo*)info;
1948 CallInfo *cinfo = ainfo->cinfo;
1949 DynCallArgs *args = (DynCallArgs*)buf;
1950 MonoType *ptype = ainfo->rtype;
1951 guint8 *ret = args->ret;
1952 host_mgreg_t res = args->res;
1953 host_mgreg_t res2 = args->res2;
1954 int i;
1956 if (cinfo->ret.storage == ArgVtypeByRef)
1957 return;
1959 switch (ptype->type) {
1960 case MONO_TYPE_VOID:
1961 *(gpointer*)ret = NULL;
1962 break;
1963 case MONO_TYPE_OBJECT:
1964 case MONO_TYPE_I:
1965 case MONO_TYPE_U:
1966 case MONO_TYPE_PTR:
1967 *(gpointer*)ret = (gpointer)res;
1968 break;
1969 case MONO_TYPE_I1:
1970 *(gint8*)ret = res;
1971 break;
1972 case MONO_TYPE_U1:
1973 *(guint8*)ret = res;
1974 break;
1975 case MONO_TYPE_I2:
1976 *(gint16*)ret = res;
1977 break;
1978 case MONO_TYPE_U2:
1979 *(guint16*)ret = res;
1980 break;
1981 case MONO_TYPE_I4:
1982 *(gint32*)ret = res;
1983 break;
1984 case MONO_TYPE_U4:
1985 *(guint32*)ret = res;
1986 break;
1987 case MONO_TYPE_I8:
1988 case MONO_TYPE_U8:
1989 *(guint64*)ret = res;
1990 break;
1991 case MONO_TYPE_R4:
1992 *(float*)ret = bitcast_r8_to_r4 (args->fpregs [0]);
1993 break;
1994 case MONO_TYPE_R8:
1995 *(double*)ret = args->fpregs [0];
1996 break;
1997 case MONO_TYPE_GENERICINST:
1998 if (MONO_TYPE_IS_REFERENCE (ptype)) {
1999 *(gpointer*)ret = (gpointer)res;
2000 break;
2001 } else {
2002 /* Fall though */
2004 case MONO_TYPE_VALUETYPE:
2005 switch (ainfo->cinfo->ret.storage) {
2006 case ArgVtypeInIRegs:
2007 *(host_mgreg_t*)ret = res;
2008 if (ainfo->cinfo->ret.nregs > 1)
2009 ((host_mgreg_t*)ret) [1] = res2;
2010 break;
2011 case ArgHFA:
2012 /* Use the same area for returning fp values */
2013 if (cinfo->ret.esize == 4) {
2014 for (i = 0; i < cinfo->ret.nregs; ++i)
2015 ((float*)ret) [cinfo->ret.foffsets [i] / 4] = bitcast_r8_to_r4 (args->fpregs [i]);
2016 } else {
2017 for (i = 0; i < cinfo->ret.nregs; ++i)
2018 ((double*)ret) [cinfo->ret.foffsets [i] / 8] = args->fpregs [i];
2020 break;
2021 default:
2022 g_assert_not_reached ();
2023 break;
2025 break;
2026 default:
2027 g_assert_not_reached ();
2031 #if __APPLE__
2032 G_BEGIN_DECLS
2033 void sys_icache_invalidate (void *start, size_t len);
2034 G_END_DECLS
2035 #endif
2037 void
2038 mono_arch_flush_icache (guint8 *code, gint size)
2040 #ifndef MONO_CROSS_COMPILE
2041 #if __APPLE__
2042 sys_icache_invalidate (code, size);
2043 #else
2044 /* Don't rely on GCC's __clear_cache implementation, as it caches
2045 * icache/dcache cache line sizes, that can vary between cores on
2046 * big.LITTLE architectures. */
2047 guint64 end = (guint64) (code + size);
2048 guint64 addr;
2049 /* always go with cacheline size of 4 bytes as this code isn't perf critical
2050 * anyway. Reading the cache line size from a machine register can be racy
2051 * on a big.LITTLE architecture if the cores don't have the same cache line
2052 * sizes. */
2053 const size_t icache_line_size = 4;
2054 const size_t dcache_line_size = 4;
2056 addr = (guint64) code & ~(guint64) (dcache_line_size - 1);
2057 for (; addr < end; addr += dcache_line_size)
2058 asm volatile("dc civac, %0" : : "r" (addr) : "memory");
2059 asm volatile("dsb ish" : : : "memory");
2061 addr = (guint64) code & ~(guint64) (icache_line_size - 1);
2062 for (; addr < end; addr += icache_line_size)
2063 asm volatile("ic ivau, %0" : : "r" (addr) : "memory");
2065 asm volatile ("dsb ish" : : : "memory");
2066 asm volatile ("isb" : : : "memory");
2067 #endif
2068 #endif
2071 #ifndef DISABLE_JIT
2073 gboolean
2074 mono_arch_opcode_needs_emulation (MonoCompile *cfg, int opcode)
2076 NOT_IMPLEMENTED;
2077 return FALSE;
2080 GList *
2081 mono_arch_get_allocatable_int_vars (MonoCompile *cfg)
2083 GList *vars = NULL;
2084 int i;
2086 for (i = 0; i < cfg->num_varinfo; i++) {
2087 MonoInst *ins = cfg->varinfo [i];
2088 MonoMethodVar *vmv = MONO_VARINFO (cfg, i);
2090 /* unused vars */
2091 if (vmv->range.first_use.abs_pos >= vmv->range.last_use.abs_pos)
2092 continue;
2094 if ((ins->flags & (MONO_INST_IS_DEAD|MONO_INST_VOLATILE|MONO_INST_INDIRECT)) ||
2095 (ins->opcode != OP_LOCAL && ins->opcode != OP_ARG))
2096 continue;
2098 if (mono_is_regsize_var (ins->inst_vtype)) {
2099 g_assert (MONO_VARINFO (cfg, i)->reg == -1);
2100 g_assert (i == vmv->idx);
2101 vars = g_list_prepend (vars, vmv);
2105 vars = mono_varlist_sort (cfg, vars, 0);
2107 return vars;
2110 GList *
2111 mono_arch_get_global_int_regs (MonoCompile *cfg)
2113 GList *regs = NULL;
2114 int i;
2116 /* r28 is reserved for cfg->arch.args_reg */
2117 /* r27 is reserved for the imt argument */
2118 for (i = ARMREG_R19; i <= ARMREG_R26; ++i)
2119 regs = g_list_prepend (regs, GUINT_TO_POINTER (i));
2121 return regs;
2124 guint32
2125 mono_arch_regalloc_cost (MonoCompile *cfg, MonoMethodVar *vmv)
2127 MonoInst *ins = cfg->varinfo [vmv->idx];
2129 if (ins->opcode == OP_ARG)
2130 return 1;
2131 else
2132 return 2;
2135 void
2136 mono_arch_create_vars (MonoCompile *cfg)
2138 MonoMethodSignature *sig;
2139 CallInfo *cinfo;
2141 sig = mono_method_signature_internal (cfg->method);
2142 if (!cfg->arch.cinfo)
2143 cfg->arch.cinfo = get_call_info (cfg->mempool, sig);
2144 cinfo = cfg->arch.cinfo;
2146 if (cinfo->ret.storage == ArgVtypeByRef) {
2147 cfg->vret_addr = mono_compile_create_var (cfg, mono_get_int_type (), OP_LOCAL);
2148 cfg->vret_addr->flags |= MONO_INST_VOLATILE;
2151 if (cfg->gen_sdb_seq_points) {
2152 MonoInst *ins;
2154 if (cfg->compile_aot) {
2155 ins = mono_compile_create_var (cfg, mono_get_int_type (), OP_LOCAL);
2156 ins->flags |= MONO_INST_VOLATILE;
2157 cfg->arch.seq_point_info_var = ins;
2160 ins = mono_compile_create_var (cfg, mono_get_int_type (), OP_LOCAL);
2161 ins->flags |= MONO_INST_VOLATILE;
2162 cfg->arch.ss_tramp_var = ins;
2164 ins = mono_compile_create_var (cfg, mono_get_int_type (), OP_LOCAL);
2165 ins->flags |= MONO_INST_VOLATILE;
2166 cfg->arch.bp_tramp_var = ins;
2169 if (cfg->method->save_lmf) {
2170 cfg->create_lmf_var = TRUE;
2171 cfg->lmf_ir = TRUE;
2175 void
2176 mono_arch_allocate_vars (MonoCompile *cfg)
2178 MonoMethodSignature *sig;
2179 MonoInst *ins;
2180 CallInfo *cinfo;
2181 ArgInfo *ainfo;
2182 int i, offset, size, align;
2183 guint32 locals_stack_size, locals_stack_align;
2184 gint32 *offsets;
2187 * Allocate arguments and locals to either register (OP_REGVAR) or to a stack slot (OP_REGOFFSET).
2188 * Compute cfg->stack_offset and update cfg->used_int_regs.
2191 sig = mono_method_signature_internal (cfg->method);
2193 if (!cfg->arch.cinfo)
2194 cfg->arch.cinfo = get_call_info (cfg->mempool, sig);
2195 cinfo = cfg->arch.cinfo;
2198 * The ARM64 ABI always uses a frame pointer.
2199 * The instruction set prefers positive offsets, so fp points to the bottom of the
2200 * frame, and stack slots are at positive offsets.
2201 * If some arguments are received on the stack, their offsets relative to fp can
2202 * not be computed right now because the stack frame might grow due to spilling
2203 * done by the local register allocator. To solve this, we reserve a register
2204 * which points to them.
2205 * The stack frame looks like this:
2206 * args_reg -> <bottom of parent frame>
2207 * <locals etc>
2208 * fp -> <saved fp+lr>
2209 * sp -> <localloc/params area>
2211 cfg->frame_reg = ARMREG_FP;
2212 cfg->flags |= MONO_CFG_HAS_SPILLUP;
2213 offset = 0;
2215 /* Saved fp+lr */
2216 offset += 16;
2218 if (cinfo->stack_usage) {
2219 g_assert (!(cfg->used_int_regs & (1 << ARMREG_R28)));
2220 cfg->arch.args_reg = ARMREG_R28;
2221 cfg->used_int_regs |= 1 << ARMREG_R28;
2224 if (cfg->method->save_lmf) {
2225 /* The LMF var is allocated normally */
2226 } else {
2227 /* Callee saved regs */
2228 cfg->arch.saved_gregs_offset = offset;
2229 for (i = 0; i < 32; ++i)
2230 if ((MONO_ARCH_CALLEE_SAVED_REGS & (1 << i)) && (cfg->used_int_regs & (1 << i)))
2231 offset += 8;
2234 /* Return value */
2235 switch (cinfo->ret.storage) {
2236 case ArgNone:
2237 break;
2238 case ArgInIReg:
2239 case ArgInFReg:
2240 case ArgInFRegR4:
2241 cfg->ret->opcode = OP_REGVAR;
2242 cfg->ret->dreg = cinfo->ret.reg;
2243 break;
2244 case ArgVtypeInIRegs:
2245 case ArgHFA:
2246 /* Allocate a local to hold the result, the epilog will copy it to the correct place */
2247 cfg->ret->opcode = OP_REGOFFSET;
2248 cfg->ret->inst_basereg = cfg->frame_reg;
2249 cfg->ret->inst_offset = offset;
2250 if (cinfo->ret.storage == ArgHFA)
2251 // FIXME:
2252 offset += 64;
2253 else
2254 offset += 16;
2255 break;
2256 case ArgVtypeByRef:
2257 /* This variable will be initalized in the prolog from R8 */
2258 cfg->vret_addr->opcode = OP_REGOFFSET;
2259 cfg->vret_addr->inst_basereg = cfg->frame_reg;
2260 cfg->vret_addr->inst_offset = offset;
2261 offset += 8;
2262 if (G_UNLIKELY (cfg->verbose_level > 1)) {
2263 printf ("vret_addr =");
2264 mono_print_ins (cfg->vret_addr);
2266 break;
2267 default:
2268 g_assert_not_reached ();
2269 break;
2272 /* Arguments */
2273 for (i = 0; i < sig->param_count + sig->hasthis; ++i) {
2274 ainfo = cinfo->args + i;
2276 ins = cfg->args [i];
2277 if (ins->opcode == OP_REGVAR)
2278 continue;
2280 ins->opcode = OP_REGOFFSET;
2281 ins->inst_basereg = cfg->frame_reg;
2283 switch (ainfo->storage) {
2284 case ArgInIReg:
2285 case ArgInFReg:
2286 case ArgInFRegR4:
2287 // FIXME: Use nregs/size
2288 /* These will be copied to the stack in the prolog */
2289 ins->inst_offset = offset;
2290 offset += 8;
2291 break;
2292 case ArgOnStack:
2293 case ArgOnStackR4:
2294 case ArgOnStackR8:
2295 case ArgVtypeOnStack:
2296 /* These are in the parent frame */
2297 g_assert (cfg->arch.args_reg);
2298 ins->inst_basereg = cfg->arch.args_reg;
2299 ins->inst_offset = ainfo->offset;
2300 break;
2301 case ArgVtypeInIRegs:
2302 case ArgHFA:
2303 ins->opcode = OP_REGOFFSET;
2304 ins->inst_basereg = cfg->frame_reg;
2305 /* These arguments are saved to the stack in the prolog */
2306 ins->inst_offset = offset;
2307 if (cfg->verbose_level >= 2)
2308 printf ("arg %d allocated to %s+0x%0x.\n", i, mono_arch_regname (ins->inst_basereg), (int)ins->inst_offset);
2309 if (ainfo->storage == ArgHFA)
2310 // FIXME:
2311 offset += 64;
2312 else
2313 offset += 16;
2314 break;
2315 case ArgVtypeByRefOnStack: {
2316 MonoInst *vtaddr;
2318 if (ainfo->gsharedvt) {
2319 ins->opcode = OP_REGOFFSET;
2320 ins->inst_basereg = cfg->arch.args_reg;
2321 ins->inst_offset = ainfo->offset;
2322 break;
2325 /* The vtype address is in the parent frame */
2326 g_assert (cfg->arch.args_reg);
2327 MONO_INST_NEW (cfg, vtaddr, 0);
2328 vtaddr->opcode = OP_REGOFFSET;
2329 vtaddr->inst_basereg = cfg->arch.args_reg;
2330 vtaddr->inst_offset = ainfo->offset;
2332 /* Need an indirection */
2333 ins->opcode = OP_VTARG_ADDR;
2334 ins->inst_left = vtaddr;
2335 break;
2337 case ArgVtypeByRef: {
2338 MonoInst *vtaddr;
2340 if (ainfo->gsharedvt) {
2341 ins->opcode = OP_REGOFFSET;
2342 ins->inst_basereg = cfg->frame_reg;
2343 ins->inst_offset = offset;
2344 offset += 8;
2345 break;
2348 /* The vtype address is in a register, will be copied to the stack in the prolog */
2349 MONO_INST_NEW (cfg, vtaddr, 0);
2350 vtaddr->opcode = OP_REGOFFSET;
2351 vtaddr->inst_basereg = cfg->frame_reg;
2352 vtaddr->inst_offset = offset;
2353 offset += 8;
2355 /* Need an indirection */
2356 ins->opcode = OP_VTARG_ADDR;
2357 ins->inst_left = vtaddr;
2358 break;
2360 default:
2361 g_assert_not_reached ();
2362 break;
2366 /* Allocate these first so they have a small offset, OP_SEQ_POINT depends on this */
2367 // FIXME: Allocate these to registers
2368 ins = cfg->arch.seq_point_info_var;
2369 if (ins) {
2370 size = 8;
2371 align = 8;
2372 offset += align - 1;
2373 offset &= ~(align - 1);
2374 ins->opcode = OP_REGOFFSET;
2375 ins->inst_basereg = cfg->frame_reg;
2376 ins->inst_offset = offset;
2377 offset += size;
2379 ins = cfg->arch.ss_tramp_var;
2380 if (ins) {
2381 size = 8;
2382 align = 8;
2383 offset += align - 1;
2384 offset &= ~(align - 1);
2385 ins->opcode = OP_REGOFFSET;
2386 ins->inst_basereg = cfg->frame_reg;
2387 ins->inst_offset = offset;
2388 offset += size;
2390 ins = cfg->arch.bp_tramp_var;
2391 if (ins) {
2392 size = 8;
2393 align = 8;
2394 offset += align - 1;
2395 offset &= ~(align - 1);
2396 ins->opcode = OP_REGOFFSET;
2397 ins->inst_basereg = cfg->frame_reg;
2398 ins->inst_offset = offset;
2399 offset += size;
2402 /* Locals */
2403 offsets = mono_allocate_stack_slots (cfg, FALSE, &locals_stack_size, &locals_stack_align);
2404 if (locals_stack_align)
2405 offset = ALIGN_TO (offset, locals_stack_align);
2407 for (i = cfg->locals_start; i < cfg->num_varinfo; i++) {
2408 if (offsets [i] != -1) {
2409 ins = cfg->varinfo [i];
2410 ins->opcode = OP_REGOFFSET;
2411 ins->inst_basereg = cfg->frame_reg;
2412 ins->inst_offset = offset + offsets [i];
2413 //printf ("allocated local %d to ", i); mono_print_tree_nl (ins);
2416 offset += locals_stack_size;
2418 offset = ALIGN_TO (offset, MONO_ARCH_FRAME_ALIGNMENT);
2420 cfg->stack_offset = offset;
2423 #ifdef ENABLE_LLVM
2424 LLVMCallInfo*
2425 mono_arch_get_llvm_call_info (MonoCompile *cfg, MonoMethodSignature *sig)
2427 int i, n;
2428 CallInfo *cinfo;
2429 ArgInfo *ainfo;
2430 LLVMCallInfo *linfo;
2432 n = sig->param_count + sig->hasthis;
2434 cinfo = get_call_info (cfg->mempool, sig);
2436 linfo = mono_mempool_alloc0 (cfg->mempool, sizeof (LLVMCallInfo) + (sizeof (LLVMArgInfo) * n));
2438 switch (cinfo->ret.storage) {
2439 case ArgInIReg:
2440 case ArgInFReg:
2441 case ArgInFRegR4:
2442 case ArgNone:
2443 break;
2444 case ArgVtypeByRef:
2445 linfo->ret.storage = LLVMArgVtypeByRef;
2446 break;
2448 // FIXME: This doesn't work yet since the llvm backend represents these types as an i8
2449 // array which is returned in int regs
2451 case ArgHFA:
2452 linfo->ret.storage = LLVMArgFpStruct;
2453 linfo->ret.nslots = cinfo->ret.nregs;
2454 linfo->ret.esize = cinfo->ret.esize;
2455 break;
2456 case ArgVtypeInIRegs:
2457 /* LLVM models this by returning an int */
2458 linfo->ret.storage = LLVMArgVtypeAsScalar;
2459 linfo->ret.nslots = cinfo->ret.nregs;
2460 linfo->ret.esize = cinfo->ret.esize;
2461 break;
2462 default:
2463 g_assert_not_reached ();
2464 break;
2467 for (i = 0; i < n; ++i) {
2468 LLVMArgInfo *lainfo = &linfo->args [i];
2470 ainfo = cinfo->args + i;
2472 lainfo->storage = LLVMArgNone;
2474 switch (ainfo->storage) {
2475 case ArgInIReg:
2476 case ArgInFReg:
2477 case ArgInFRegR4:
2478 case ArgOnStack:
2479 case ArgOnStackR4:
2480 case ArgOnStackR8:
2481 lainfo->storage = LLVMArgNormal;
2482 break;
2483 case ArgVtypeByRef:
2484 case ArgVtypeByRefOnStack:
2485 lainfo->storage = LLVMArgVtypeByRef;
2486 break;
2487 case ArgHFA: {
2488 int j;
2490 lainfo->storage = LLVMArgAsFpArgs;
2491 lainfo->nslots = ainfo->nregs;
2492 lainfo->esize = ainfo->esize;
2493 for (j = 0; j < ainfo->nregs; ++j)
2494 lainfo->pair_storage [j] = LLVMArgInFPReg;
2495 break;
2497 case ArgVtypeInIRegs:
2498 lainfo->storage = LLVMArgAsIArgs;
2499 lainfo->nslots = ainfo->nregs;
2500 break;
2501 case ArgVtypeOnStack:
2502 if (ainfo->hfa) {
2503 int j;
2504 /* Same as above */
2505 lainfo->storage = LLVMArgAsFpArgs;
2506 lainfo->nslots = ainfo->nregs;
2507 lainfo->esize = ainfo->esize;
2508 lainfo->ndummy_fpargs = ainfo->nfregs_to_skip;
2509 for (j = 0; j < ainfo->nregs; ++j)
2510 lainfo->pair_storage [j] = LLVMArgInFPReg;
2511 } else {
2512 lainfo->storage = LLVMArgAsIArgs;
2513 lainfo->nslots = ainfo->size / 8;
2515 break;
2516 default:
2517 g_assert_not_reached ();
2518 break;
2522 return linfo;
2524 #endif
2526 static void
2527 add_outarg_reg (MonoCompile *cfg, MonoCallInst *call, ArgStorage storage, int reg, MonoInst *arg)
2529 MonoInst *ins;
2531 switch (storage) {
2532 case ArgInIReg:
2533 MONO_INST_NEW (cfg, ins, OP_MOVE);
2534 ins->dreg = mono_alloc_ireg_copy (cfg, arg->dreg);
2535 ins->sreg1 = arg->dreg;
2536 MONO_ADD_INS (cfg->cbb, ins);
2537 mono_call_inst_add_outarg_reg (cfg, call, ins->dreg, reg, FALSE);
2538 break;
2539 case ArgInFReg:
2540 MONO_INST_NEW (cfg, ins, OP_FMOVE);
2541 ins->dreg = mono_alloc_freg (cfg);
2542 ins->sreg1 = arg->dreg;
2543 MONO_ADD_INS (cfg->cbb, ins);
2544 mono_call_inst_add_outarg_reg (cfg, call, ins->dreg, reg, TRUE);
2545 break;
2546 case ArgInFRegR4:
2547 if (COMPILE_LLVM (cfg))
2548 MONO_INST_NEW (cfg, ins, OP_FMOVE);
2549 else if (cfg->r4fp)
2550 MONO_INST_NEW (cfg, ins, OP_RMOVE);
2551 else
2552 MONO_INST_NEW (cfg, ins, OP_ARM_SETFREG_R4);
2553 ins->dreg = mono_alloc_freg (cfg);
2554 ins->sreg1 = arg->dreg;
2555 MONO_ADD_INS (cfg->cbb, ins);
2556 mono_call_inst_add_outarg_reg (cfg, call, ins->dreg, reg, TRUE);
2557 break;
2558 default:
2559 g_assert_not_reached ();
2560 break;
2564 static void
2565 emit_sig_cookie (MonoCompile *cfg, MonoCallInst *call, CallInfo *cinfo)
2567 MonoMethodSignature *tmp_sig;
2568 int sig_reg;
2570 if (MONO_IS_TAILCALL_OPCODE (call))
2571 NOT_IMPLEMENTED;
2573 g_assert (cinfo->sig_cookie.storage == ArgOnStack);
2576 * mono_ArgIterator_Setup assumes the signature cookie is
2577 * passed first and all the arguments which were before it are
2578 * passed on the stack after the signature. So compensate by
2579 * passing a different signature.
2581 tmp_sig = mono_metadata_signature_dup (call->signature);
2582 tmp_sig->param_count -= call->signature->sentinelpos;
2583 tmp_sig->sentinelpos = 0;
2584 memcpy (tmp_sig->params, call->signature->params + call->signature->sentinelpos, tmp_sig->param_count * sizeof (MonoType*));
2586 sig_reg = mono_alloc_ireg (cfg);
2587 MONO_EMIT_NEW_SIGNATURECONST (cfg, sig_reg, tmp_sig);
2589 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORE_MEMBASE_REG, ARMREG_SP, cinfo->sig_cookie.offset, sig_reg);
2592 void
2593 mono_arch_emit_call (MonoCompile *cfg, MonoCallInst *call)
2595 MonoMethodSignature *sig;
2596 MonoInst *arg, *vtarg;
2597 CallInfo *cinfo;
2598 ArgInfo *ainfo;
2599 int i;
2601 sig = call->signature;
2603 cinfo = get_call_info (cfg->mempool, sig);
2605 switch (cinfo->ret.storage) {
2606 case ArgVtypeInIRegs:
2607 case ArgHFA:
2608 if (MONO_IS_TAILCALL_OPCODE (call))
2609 break;
2611 * The vtype is returned in registers, save the return area address in a local, and save the vtype into
2612 * the location pointed to by it after call in emit_move_return_value ().
2614 if (!cfg->arch.vret_addr_loc) {
2615 cfg->arch.vret_addr_loc = mono_compile_create_var (cfg, mono_get_int_type (), OP_LOCAL);
2616 /* Prevent it from being register allocated or optimized away */
2617 cfg->arch.vret_addr_loc->flags |= MONO_INST_VOLATILE;
2620 MONO_EMIT_NEW_UNALU (cfg, OP_MOVE, cfg->arch.vret_addr_loc->dreg, call->vret_var->dreg);
2621 break;
2622 case ArgVtypeByRef:
2623 /* Pass the vtype return address in R8 */
2624 g_assert (!MONO_IS_TAILCALL_OPCODE (call) || call->vret_var == cfg->vret_addr);
2625 MONO_INST_NEW (cfg, vtarg, OP_MOVE);
2626 vtarg->sreg1 = call->vret_var->dreg;
2627 vtarg->dreg = mono_alloc_preg (cfg);
2628 MONO_ADD_INS (cfg->cbb, vtarg);
2630 mono_call_inst_add_outarg_reg (cfg, call, vtarg->dreg, cinfo->ret.reg, FALSE);
2631 break;
2632 default:
2633 break;
2636 for (i = 0; i < cinfo->nargs; ++i) {
2637 ainfo = cinfo->args + i;
2638 arg = call->args [i];
2640 if ((sig->call_convention == MONO_CALL_VARARG) && (i == sig->sentinelpos)) {
2641 /* Emit the signature cookie just before the implicit arguments */
2642 emit_sig_cookie (cfg, call, cinfo);
2645 switch (ainfo->storage) {
2646 case ArgInIReg:
2647 case ArgInFReg:
2648 case ArgInFRegR4:
2649 add_outarg_reg (cfg, call, ainfo->storage, ainfo->reg, arg);
2650 break;
2651 case ArgOnStack:
2652 switch (ainfo->slot_size) {
2653 case 8:
2654 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORE_MEMBASE_REG, ARMREG_SP, ainfo->offset, arg->dreg);
2655 break;
2656 case 4:
2657 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI4_MEMBASE_REG, ARMREG_SP, ainfo->offset, arg->dreg);
2658 break;
2659 case 2:
2660 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI2_MEMBASE_REG, ARMREG_SP, ainfo->offset, arg->dreg);
2661 break;
2662 case 1:
2663 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI1_MEMBASE_REG, ARMREG_SP, ainfo->offset, arg->dreg);
2664 break;
2665 default:
2666 g_assert_not_reached ();
2667 break;
2669 break;
2670 case ArgOnStackR8:
2671 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORER8_MEMBASE_REG, ARMREG_SP, ainfo->offset, arg->dreg);
2672 break;
2673 case ArgOnStackR4:
2674 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORER4_MEMBASE_REG, ARMREG_SP, ainfo->offset, arg->dreg);
2675 break;
2676 case ArgVtypeInIRegs:
2677 case ArgVtypeByRef:
2678 case ArgVtypeByRefOnStack:
2679 case ArgVtypeOnStack:
2680 case ArgHFA: {
2681 MonoInst *ins;
2682 guint32 align;
2683 guint32 size;
2685 size = mono_class_value_size (arg->klass, &align);
2687 MONO_INST_NEW (cfg, ins, OP_OUTARG_VT);
2688 ins->sreg1 = arg->dreg;
2689 ins->klass = arg->klass;
2690 ins->backend.size = size;
2691 ins->inst_p0 = call;
2692 ins->inst_p1 = mono_mempool_alloc (cfg->mempool, sizeof (ArgInfo));
2693 memcpy (ins->inst_p1, ainfo, sizeof (ArgInfo));
2694 MONO_ADD_INS (cfg->cbb, ins);
2695 break;
2697 default:
2698 g_assert_not_reached ();
2699 break;
2703 /* Handle the case where there are no implicit arguments */
2704 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG) && (cinfo->nargs == sig->sentinelpos))
2705 emit_sig_cookie (cfg, call, cinfo);
2707 call->call_info = cinfo;
2708 call->stack_usage = cinfo->stack_usage;
2711 void
2712 mono_arch_emit_outarg_vt (MonoCompile *cfg, MonoInst *ins, MonoInst *src)
2714 MonoCallInst *call = (MonoCallInst*)ins->inst_p0;
2715 ArgInfo *ainfo = (ArgInfo*)ins->inst_p1;
2716 MonoInst *load;
2717 int i;
2719 if (ins->backend.size == 0 && !ainfo->gsharedvt)
2720 return;
2722 switch (ainfo->storage) {
2723 case ArgVtypeInIRegs:
2724 for (i = 0; i < ainfo->nregs; ++i) {
2725 // FIXME: Smaller sizes
2726 MONO_INST_NEW (cfg, load, OP_LOADI8_MEMBASE);
2727 load->dreg = mono_alloc_ireg (cfg);
2728 load->inst_basereg = src->dreg;
2729 load->inst_offset = i * sizeof (target_mgreg_t);
2730 MONO_ADD_INS (cfg->cbb, load);
2731 add_outarg_reg (cfg, call, ArgInIReg, ainfo->reg + i, load);
2733 break;
2734 case ArgHFA:
2735 for (i = 0; i < ainfo->nregs; ++i) {
2736 if (ainfo->esize == 4)
2737 MONO_INST_NEW (cfg, load, OP_LOADR4_MEMBASE);
2738 else
2739 MONO_INST_NEW (cfg, load, OP_LOADR8_MEMBASE);
2740 load->dreg = mono_alloc_freg (cfg);
2741 load->inst_basereg = src->dreg;
2742 load->inst_offset = ainfo->foffsets [i];
2743 MONO_ADD_INS (cfg->cbb, load);
2744 add_outarg_reg (cfg, call, ainfo->esize == 4 ? ArgInFRegR4 : ArgInFReg, ainfo->reg + i, load);
2746 break;
2747 case ArgVtypeByRef:
2748 case ArgVtypeByRefOnStack: {
2749 MonoInst *vtaddr, *load, *arg;
2751 /* Pass the vtype address in a reg/on the stack */
2752 if (ainfo->gsharedvt) {
2753 load = src;
2754 } else {
2755 /* Make a copy of the argument */
2756 vtaddr = mono_compile_create_var (cfg, m_class_get_byval_arg (ins->klass), OP_LOCAL);
2758 MONO_INST_NEW (cfg, load, OP_LDADDR);
2759 load->inst_p0 = vtaddr;
2760 vtaddr->flags |= MONO_INST_INDIRECT;
2761 load->type = STACK_MP;
2762 load->klass = vtaddr->klass;
2763 load->dreg = mono_alloc_ireg (cfg);
2764 MONO_ADD_INS (cfg->cbb, load);
2765 mini_emit_memcpy (cfg, load->dreg, 0, src->dreg, 0, ainfo->size, 8);
2768 if (ainfo->storage == ArgVtypeByRef) {
2769 MONO_INST_NEW (cfg, arg, OP_MOVE);
2770 arg->dreg = mono_alloc_preg (cfg);
2771 arg->sreg1 = load->dreg;
2772 MONO_ADD_INS (cfg->cbb, arg);
2773 add_outarg_reg (cfg, call, ArgInIReg, ainfo->reg, arg);
2774 } else {
2775 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORE_MEMBASE_REG, ARMREG_SP, ainfo->offset, load->dreg);
2777 break;
2779 case ArgVtypeOnStack:
2780 for (i = 0; i < ainfo->size / 8; ++i) {
2781 MONO_INST_NEW (cfg, load, OP_LOADI8_MEMBASE);
2782 load->dreg = mono_alloc_ireg (cfg);
2783 load->inst_basereg = src->dreg;
2784 load->inst_offset = i * 8;
2785 MONO_ADD_INS (cfg->cbb, load);
2786 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI8_MEMBASE_REG, ARMREG_SP, ainfo->offset + (i * 8), load->dreg);
2788 break;
2789 default:
2790 g_assert_not_reached ();
2791 break;
2795 void
2796 mono_arch_emit_setret (MonoCompile *cfg, MonoMethod *method, MonoInst *val)
2798 MonoMethodSignature *sig;
2799 CallInfo *cinfo;
2801 sig = mono_method_signature_internal (cfg->method);
2802 if (!cfg->arch.cinfo)
2803 cfg->arch.cinfo = get_call_info (cfg->mempool, sig);
2804 cinfo = cfg->arch.cinfo;
2806 switch (cinfo->ret.storage) {
2807 case ArgNone:
2808 break;
2809 case ArgInIReg:
2810 MONO_EMIT_NEW_UNALU (cfg, OP_MOVE, cfg->ret->dreg, val->dreg);
2811 break;
2812 case ArgInFReg:
2813 MONO_EMIT_NEW_UNALU (cfg, OP_FMOVE, cfg->ret->dreg, val->dreg);
2814 break;
2815 case ArgInFRegR4:
2816 if (COMPILE_LLVM (cfg))
2817 MONO_EMIT_NEW_UNALU (cfg, OP_FMOVE, cfg->ret->dreg, val->dreg);
2818 else if (cfg->r4fp)
2819 MONO_EMIT_NEW_UNALU (cfg, OP_RMOVE, cfg->ret->dreg, val->dreg);
2820 else
2821 MONO_EMIT_NEW_UNALU (cfg, OP_ARM_SETFREG_R4, cfg->ret->dreg, val->dreg);
2822 break;
2823 default:
2824 g_assert_not_reached ();
2825 break;
2829 #ifndef DISABLE_JIT
2831 gboolean
2832 mono_arch_tailcall_supported (MonoCompile *cfg, MonoMethodSignature *caller_sig, MonoMethodSignature *callee_sig, gboolean virtual_)
2834 g_assert (caller_sig);
2835 g_assert (callee_sig);
2837 CallInfo *caller_info = get_call_info (NULL, caller_sig);
2838 CallInfo *callee_info = get_call_info (NULL, callee_sig);
2840 gboolean res = IS_SUPPORTED_TAILCALL (callee_info->stack_usage <= caller_info->stack_usage)
2841 && IS_SUPPORTED_TAILCALL (caller_info->ret.storage == callee_info->ret.storage);
2843 // FIXME Limit stack_usage to 1G. emit_ldrx / strx has 32bit limits.
2844 res &= IS_SUPPORTED_TAILCALL (callee_info->stack_usage < (1 << 30));
2845 res &= IS_SUPPORTED_TAILCALL (caller_info->stack_usage < (1 << 30));
2847 // valuetype parameters are the address of a local
2848 const ArgInfo *ainfo;
2849 ainfo = callee_info->args + callee_sig->hasthis;
2850 for (int i = 0; res && i < callee_sig->param_count; ++i) {
2851 res = IS_SUPPORTED_TAILCALL (ainfo [i].storage != ArgVtypeByRef)
2852 && IS_SUPPORTED_TAILCALL (ainfo [i].storage != ArgVtypeByRefOnStack);
2855 g_free (caller_info);
2856 g_free (callee_info);
2858 return res;
2861 #endif
2863 gboolean
2864 mono_arch_is_inst_imm (int opcode, int imm_opcode, gint64 imm)
2866 return (imm >= -((gint64)1<<31) && imm <= (((gint64)1<<31)-1));
2869 void
2870 mono_arch_peephole_pass_1 (MonoCompile *cfg, MonoBasicBlock *bb)
2872 //NOT_IMPLEMENTED;
2875 void
2876 mono_arch_peephole_pass_2 (MonoCompile *cfg, MonoBasicBlock *bb)
2878 //NOT_IMPLEMENTED;
2881 #define ADD_NEW_INS(cfg,dest,op) do { \
2882 MONO_INST_NEW ((cfg), (dest), (op)); \
2883 mono_bblock_insert_before_ins (bb, ins, (dest)); \
2884 } while (0)
2886 void
2887 mono_arch_lowering_pass (MonoCompile *cfg, MonoBasicBlock *bb)
2889 MonoInst *ins, *temp, *last_ins = NULL;
2891 MONO_BB_FOR_EACH_INS (bb, ins) {
2892 switch (ins->opcode) {
2893 case OP_SBB:
2894 case OP_ISBB:
2895 case OP_SUBCC:
2896 case OP_ISUBCC:
2897 if (ins->next && (ins->next->opcode == OP_COND_EXC_C || ins->next->opcode == OP_COND_EXC_IC))
2898 /* ARM sets the C flag to 1 if there was _no_ overflow */
2899 ins->next->opcode = OP_COND_EXC_NC;
2900 break;
2901 case OP_IDIV_IMM:
2902 case OP_IREM_IMM:
2903 case OP_IDIV_UN_IMM:
2904 case OP_IREM_UN_IMM:
2905 case OP_LREM_IMM:
2906 mono_decompose_op_imm (cfg, bb, ins);
2907 break;
2908 case OP_LOCALLOC_IMM:
2909 if (ins->inst_imm > 32) {
2910 ADD_NEW_INS (cfg, temp, OP_ICONST);
2911 temp->inst_c0 = ins->inst_imm;
2912 temp->dreg = mono_alloc_ireg (cfg);
2913 ins->sreg1 = temp->dreg;
2914 ins->opcode = mono_op_imm_to_op (ins->opcode);
2916 break;
2917 case OP_ICOMPARE_IMM:
2918 if (ins->inst_imm == 0 && ins->next && ins->next->opcode == OP_IBEQ) {
2919 ins->next->opcode = OP_ARM64_CBZW;
2920 ins->next->sreg1 = ins->sreg1;
2921 NULLIFY_INS (ins);
2922 } else if (ins->inst_imm == 0 && ins->next && ins->next->opcode == OP_IBNE_UN) {
2923 ins->next->opcode = OP_ARM64_CBNZW;
2924 ins->next->sreg1 = ins->sreg1;
2925 NULLIFY_INS (ins);
2927 break;
2928 case OP_LCOMPARE_IMM:
2929 case OP_COMPARE_IMM:
2930 if (ins->inst_imm == 0 && ins->next && ins->next->opcode == OP_LBEQ) {
2931 ins->next->opcode = OP_ARM64_CBZX;
2932 ins->next->sreg1 = ins->sreg1;
2933 NULLIFY_INS (ins);
2934 } else if (ins->inst_imm == 0 && ins->next && ins->next->opcode == OP_LBNE_UN) {
2935 ins->next->opcode = OP_ARM64_CBNZX;
2936 ins->next->sreg1 = ins->sreg1;
2937 NULLIFY_INS (ins);
2939 break;
2940 case OP_FCOMPARE:
2941 case OP_RCOMPARE: {
2942 gboolean swap = FALSE;
2943 int reg;
2945 if (!ins->next) {
2946 /* Optimized away */
2947 NULLIFY_INS (ins);
2948 break;
2952 * FP compares with unordered operands set the flags
2953 * to NZCV=0011, which matches some non-unordered compares
2954 * as well, like LE, so have to swap the operands.
2956 switch (ins->next->opcode) {
2957 case OP_FBLT:
2958 ins->next->opcode = OP_FBGT;
2959 swap = TRUE;
2960 break;
2961 case OP_FBLE:
2962 ins->next->opcode = OP_FBGE;
2963 swap = TRUE;
2964 break;
2965 case OP_RBLT:
2966 ins->next->opcode = OP_RBGT;
2967 swap = TRUE;
2968 break;
2969 case OP_RBLE:
2970 ins->next->opcode = OP_RBGE;
2971 swap = TRUE;
2972 break;
2973 default:
2974 break;
2976 if (swap) {
2977 reg = ins->sreg1;
2978 ins->sreg1 = ins->sreg2;
2979 ins->sreg2 = reg;
2981 break;
2983 default:
2984 break;
2987 last_ins = ins;
2989 bb->last_ins = last_ins;
2990 bb->max_vreg = cfg->next_vreg;
2993 void
2994 mono_arch_decompose_long_opts (MonoCompile *cfg, MonoInst *long_ins)
2998 static int
2999 opcode_to_armcond (int opcode)
3001 switch (opcode) {
3002 case OP_IBEQ:
3003 case OP_LBEQ:
3004 case OP_FBEQ:
3005 case OP_CEQ:
3006 case OP_ICEQ:
3007 case OP_LCEQ:
3008 case OP_FCEQ:
3009 case OP_RCEQ:
3010 case OP_COND_EXC_IEQ:
3011 case OP_COND_EXC_EQ:
3012 return ARMCOND_EQ;
3013 case OP_IBGE:
3014 case OP_LBGE:
3015 case OP_FBGE:
3016 case OP_ICGE:
3017 case OP_FCGE:
3018 case OP_RCGE:
3019 return ARMCOND_GE;
3020 case OP_IBGT:
3021 case OP_LBGT:
3022 case OP_FBGT:
3023 case OP_CGT:
3024 case OP_ICGT:
3025 case OP_LCGT:
3026 case OP_FCGT:
3027 case OP_RCGT:
3028 case OP_COND_EXC_IGT:
3029 case OP_COND_EXC_GT:
3030 return ARMCOND_GT;
3031 case OP_IBLE:
3032 case OP_LBLE:
3033 case OP_FBLE:
3034 case OP_ICLE:
3035 case OP_FCLE:
3036 case OP_RCLE:
3037 return ARMCOND_LE;
3038 case OP_IBLT:
3039 case OP_LBLT:
3040 case OP_FBLT:
3041 case OP_CLT:
3042 case OP_ICLT:
3043 case OP_LCLT:
3044 case OP_COND_EXC_ILT:
3045 case OP_COND_EXC_LT:
3046 return ARMCOND_LT;
3047 case OP_IBNE_UN:
3048 case OP_LBNE_UN:
3049 case OP_FBNE_UN:
3050 case OP_ICNEQ:
3051 case OP_FCNEQ:
3052 case OP_RCNEQ:
3053 case OP_COND_EXC_INE_UN:
3054 case OP_COND_EXC_NE_UN:
3055 return ARMCOND_NE;
3056 case OP_IBGE_UN:
3057 case OP_LBGE_UN:
3058 case OP_FBGE_UN:
3059 case OP_ICGE_UN:
3060 case OP_COND_EXC_IGE_UN:
3061 case OP_COND_EXC_GE_UN:
3062 return ARMCOND_HS;
3063 case OP_IBGT_UN:
3064 case OP_LBGT_UN:
3065 case OP_FBGT_UN:
3066 case OP_CGT_UN:
3067 case OP_ICGT_UN:
3068 case OP_LCGT_UN:
3069 case OP_FCGT_UN:
3070 case OP_RCGT_UN:
3071 case OP_COND_EXC_IGT_UN:
3072 case OP_COND_EXC_GT_UN:
3073 return ARMCOND_HI;
3074 case OP_IBLE_UN:
3075 case OP_LBLE_UN:
3076 case OP_FBLE_UN:
3077 case OP_ICLE_UN:
3078 case OP_COND_EXC_ILE_UN:
3079 case OP_COND_EXC_LE_UN:
3080 return ARMCOND_LS;
3081 case OP_IBLT_UN:
3082 case OP_LBLT_UN:
3083 case OP_FBLT_UN:
3084 case OP_CLT_UN:
3085 case OP_ICLT_UN:
3086 case OP_LCLT_UN:
3087 case OP_COND_EXC_ILT_UN:
3088 case OP_COND_EXC_LT_UN:
3089 return ARMCOND_LO;
3091 * FCMP sets the NZCV condition bits as follows:
3092 * eq = 0110
3093 * < = 1000
3094 * > = 0010
3095 * unordered = 0011
3096 * ARMCOND_LT is N!=V, so it matches unordered too, so
3097 * fclt and fclt_un need to be special cased.
3099 case OP_FCLT:
3100 case OP_RCLT:
3101 /* N==1 */
3102 return ARMCOND_MI;
3103 case OP_FCLT_UN:
3104 case OP_RCLT_UN:
3105 return ARMCOND_LT;
3106 case OP_COND_EXC_C:
3107 case OP_COND_EXC_IC:
3108 return ARMCOND_CS;
3109 case OP_COND_EXC_OV:
3110 case OP_COND_EXC_IOV:
3111 return ARMCOND_VS;
3112 case OP_COND_EXC_NC:
3113 case OP_COND_EXC_INC:
3114 return ARMCOND_CC;
3115 case OP_COND_EXC_NO:
3116 case OP_COND_EXC_INO:
3117 return ARMCOND_VC;
3118 default:
3119 printf ("%s\n", mono_inst_name (opcode));
3120 g_assert_not_reached ();
3121 return -1;
3125 /* This clobbers LR */
3126 static __attribute__ ((__warn_unused_result__)) guint8*
3127 emit_cond_exc (MonoCompile *cfg, guint8 *code, int opcode, const char *exc_name)
3129 int cond;
3131 cond = opcode_to_armcond (opcode);
3132 /* Capture PC */
3133 arm_adrx (code, ARMREG_IP1, code);
3134 mono_add_patch_info_rel (cfg, code - cfg->native_code, MONO_PATCH_INFO_EXC, exc_name, MONO_R_ARM64_BCC);
3135 arm_bcc (code, cond, 0);
3136 return code;
3139 static guint8*
3140 emit_move_return_value (MonoCompile *cfg, guint8 * code, MonoInst *ins)
3142 CallInfo *cinfo;
3143 MonoCallInst *call;
3145 call = (MonoCallInst*)ins;
3146 cinfo = call->call_info;
3147 g_assert (cinfo);
3148 switch (cinfo->ret.storage) {
3149 case ArgNone:
3150 break;
3151 case ArgInIReg:
3152 /* LLVM compiled code might only set the bottom bits */
3153 if (call->signature && mini_get_underlying_type (call->signature->ret)->type == MONO_TYPE_I4)
3154 arm_sxtwx (code, call->inst.dreg, cinfo->ret.reg);
3155 else if (call->inst.dreg != cinfo->ret.reg)
3156 arm_movx (code, call->inst.dreg, cinfo->ret.reg);
3157 break;
3158 case ArgInFReg:
3159 if (call->inst.dreg != cinfo->ret.reg)
3160 arm_fmovd (code, call->inst.dreg, cinfo->ret.reg);
3161 break;
3162 case ArgInFRegR4:
3163 if (cfg->r4fp)
3164 arm_fmovs (code, call->inst.dreg, cinfo->ret.reg);
3165 else
3166 arm_fcvt_sd (code, call->inst.dreg, cinfo->ret.reg);
3167 break;
3168 case ArgVtypeInIRegs: {
3169 MonoInst *loc = cfg->arch.vret_addr_loc;
3170 int i;
3172 /* Load the destination address */
3173 g_assert (loc && loc->opcode == OP_REGOFFSET);
3174 code = emit_ldrx (code, ARMREG_LR, loc->inst_basereg, loc->inst_offset);
3175 for (i = 0; i < cinfo->ret.nregs; ++i)
3176 arm_strx (code, cinfo->ret.reg + i, ARMREG_LR, i * 8);
3177 break;
3179 case ArgHFA: {
3180 MonoInst *loc = cfg->arch.vret_addr_loc;
3181 int i;
3183 /* Load the destination address */
3184 g_assert (loc && loc->opcode == OP_REGOFFSET);
3185 code = emit_ldrx (code, ARMREG_LR, loc->inst_basereg, loc->inst_offset);
3186 for (i = 0; i < cinfo->ret.nregs; ++i) {
3187 if (cinfo->ret.esize == 4)
3188 arm_strfpw (code, cinfo->ret.reg + i, ARMREG_LR, cinfo->ret.foffsets [i]);
3189 else
3190 arm_strfpx (code, cinfo->ret.reg + i, ARMREG_LR, cinfo->ret.foffsets [i]);
3192 break;
3194 case ArgVtypeByRef:
3195 break;
3196 default:
3197 g_assert_not_reached ();
3198 break;
3200 return code;
3204 * emit_branch_island:
3206 * Emit a branch island for the conditional branches from cfg->native_code + start_offset to code.
3208 static guint8*
3209 emit_branch_island (MonoCompile *cfg, guint8 *code, int start_offset)
3211 MonoJumpInfo *ji;
3213 /* Iterate over the patch infos added so far by this bb */
3214 int island_size = 0;
3215 for (ji = cfg->patch_info; ji; ji = ji->next) {
3216 if (ji->ip.i < start_offset)
3217 /* The patch infos are in reverse order, so this means the end */
3218 break;
3219 if (ji->relocation == MONO_R_ARM64_BCC || ji->relocation == MONO_R_ARM64_CBZ)
3220 island_size += 4;
3223 if (island_size) {
3224 code = realloc_code (cfg, island_size);
3226 /* Branch over the island */
3227 arm_b (code, code + 4 + island_size);
3229 for (ji = cfg->patch_info; ji; ji = ji->next) {
3230 if (ji->ip.i < start_offset)
3231 break;
3232 if (ji->relocation == MONO_R_ARM64_BCC || ji->relocation == MONO_R_ARM64_CBZ) {
3233 /* Rewrite the cond branch so it branches to an unconditional branch in the branch island */
3234 arm_patch_rel (cfg->native_code + ji->ip.i, code, ji->relocation);
3235 /* Rewrite the patch so it points to the unconditional branch */
3236 ji->ip.i = code - cfg->native_code;
3237 ji->relocation = MONO_R_ARM64_B;
3238 arm_b (code, code);
3241 set_code_cursor (cfg, code);
3243 return code;
3246 void
3247 mono_arch_output_basic_block (MonoCompile *cfg, MonoBasicBlock *bb)
3249 MonoInst *ins;
3250 MonoCallInst *call;
3251 guint8 *code = cfg->native_code + cfg->code_len;
3252 int start_offset, max_len, dreg, sreg1, sreg2;
3253 target_mgreg_t imm;
3255 if (cfg->verbose_level > 2)
3256 g_print ("Basic block %d starting at offset 0x%x\n", bb->block_num, bb->native_offset);
3258 start_offset = code - cfg->native_code;
3259 g_assert (start_offset <= cfg->code_size);
3261 MONO_BB_FOR_EACH_INS (bb, ins) {
3262 guint offset = code - cfg->native_code;
3263 set_code_cursor (cfg, code);
3264 max_len = ins_get_size (ins->opcode);
3265 code = realloc_code (cfg, max_len);
3267 if (G_UNLIKELY (cfg->arch.cond_branch_islands && offset - start_offset > 4 * 0x1ffff)) {
3268 /* Emit a branch island for large basic blocks */
3269 code = emit_branch_island (cfg, code, start_offset);
3270 offset = code - cfg->native_code;
3271 start_offset = offset;
3274 mono_debug_record_line_number (cfg, ins, offset);
3276 dreg = ins->dreg;
3277 sreg1 = ins->sreg1;
3278 sreg2 = ins->sreg2;
3279 imm = ins->inst_imm;
3281 switch (ins->opcode) {
3282 case OP_ICONST:
3283 code = emit_imm (code, dreg, ins->inst_c0);
3284 break;
3285 case OP_I8CONST:
3286 code = emit_imm64 (code, dreg, ins->inst_c0);
3287 break;
3288 case OP_MOVE:
3289 if (dreg != sreg1)
3290 arm_movx (code, dreg, sreg1);
3291 break;
3292 case OP_NOP:
3293 case OP_RELAXED_NOP:
3294 break;
3295 case OP_JUMP_TABLE:
3296 mono_add_patch_info_rel (cfg, offset, (MonoJumpInfoType)(gsize)ins->inst_i1, ins->inst_p0, MONO_R_ARM64_IMM);
3297 code = emit_imm64_template (code, dreg);
3298 break;
3299 case OP_BREAK:
3301 * gdb does not like encountering the hw breakpoint ins in the debugged code.
3302 * So instead of emitting a trap, we emit a call a C function and place a
3303 * breakpoint there.
3305 code = emit_call (cfg, code, MONO_PATCH_INFO_JIT_ICALL_ID, GUINT_TO_POINTER (MONO_JIT_ICALL_mono_break));
3306 break;
3307 case OP_LOCALLOC: {
3308 guint8 *buf [16];
3310 arm_addx_imm (code, ARMREG_IP0, sreg1, (MONO_ARCH_FRAME_ALIGNMENT - 1));
3311 // FIXME: andx_imm doesn't work yet
3312 code = emit_imm (code, ARMREG_IP1, -MONO_ARCH_FRAME_ALIGNMENT);
3313 arm_andx (code, ARMREG_IP0, ARMREG_IP0, ARMREG_IP1);
3314 //arm_andx_imm (code, ARMREG_IP0, sreg1, - MONO_ARCH_FRAME_ALIGNMENT);
3315 arm_movspx (code, ARMREG_IP1, ARMREG_SP);
3316 arm_subx (code, ARMREG_IP1, ARMREG_IP1, ARMREG_IP0);
3317 arm_movspx (code, ARMREG_SP, ARMREG_IP1);
3319 /* Init */
3320 /* ip1 = pointer, ip0 = end */
3321 arm_addx (code, ARMREG_IP0, ARMREG_IP1, ARMREG_IP0);
3322 buf [0] = code;
3323 arm_cmpx (code, ARMREG_IP1, ARMREG_IP0);
3324 buf [1] = code;
3325 arm_bcc (code, ARMCOND_EQ, 0);
3326 arm_stpx (code, ARMREG_RZR, ARMREG_RZR, ARMREG_IP1, 0);
3327 arm_addx_imm (code, ARMREG_IP1, ARMREG_IP1, 16);
3328 arm_b (code, buf [0]);
3329 arm_patch_rel (buf [1], code, MONO_R_ARM64_BCC);
3331 arm_movspx (code, dreg, ARMREG_SP);
3332 if (cfg->param_area)
3333 code = emit_subx_sp_imm (code, cfg->param_area);
3334 break;
3336 case OP_LOCALLOC_IMM: {
3337 int imm, offset;
3339 imm = ALIGN_TO (ins->inst_imm, MONO_ARCH_FRAME_ALIGNMENT);
3340 g_assert (arm_is_arith_imm (imm));
3341 arm_subx_imm (code, ARMREG_SP, ARMREG_SP, imm);
3343 /* Init */
3344 g_assert (MONO_ARCH_FRAME_ALIGNMENT == 16);
3345 offset = 0;
3346 while (offset < imm) {
3347 arm_stpx (code, ARMREG_RZR, ARMREG_RZR, ARMREG_SP, offset);
3348 offset += 16;
3350 arm_movspx (code, dreg, ARMREG_SP);
3351 if (cfg->param_area)
3352 code = emit_subx_sp_imm (code, cfg->param_area);
3353 break;
3355 case OP_AOTCONST:
3356 code = emit_aotconst (cfg, code, dreg, (MonoJumpInfoType)(gsize)ins->inst_i1, ins->inst_p0);
3357 break;
3358 case OP_OBJC_GET_SELECTOR:
3359 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_OBJC_SELECTOR_REF, ins->inst_p0);
3360 /* See arch_emit_objc_selector_ref () in aot-compiler.c */
3361 arm_ldrx_lit (code, ins->dreg, 0);
3362 arm_nop (code);
3363 arm_nop (code);
3364 break;
3365 case OP_SEQ_POINT: {
3366 MonoInst *info_var = cfg->arch.seq_point_info_var;
3369 * For AOT, we use one got slot per method, which will point to a
3370 * SeqPointInfo structure, containing all the information required
3371 * by the code below.
3373 if (cfg->compile_aot) {
3374 g_assert (info_var);
3375 g_assert (info_var->opcode == OP_REGOFFSET);
3378 if (ins->flags & MONO_INST_SINGLE_STEP_LOC) {
3379 MonoInst *var = cfg->arch.ss_tramp_var;
3381 g_assert (var);
3382 g_assert (var->opcode == OP_REGOFFSET);
3383 /* Load ss_tramp_var */
3384 /* This is equal to &ss_trampoline */
3385 arm_ldrx (code, ARMREG_IP1, var->inst_basereg, var->inst_offset);
3386 /* Load the trampoline address */
3387 arm_ldrx (code, ARMREG_IP1, ARMREG_IP1, 0);
3388 /* Call it if it is non-null */
3389 arm_cbzx (code, ARMREG_IP1, code + 8);
3390 code = mono_arm_emit_blrx (code, ARMREG_IP1);
3393 mono_add_seq_point (cfg, bb, ins, code - cfg->native_code);
3395 if (cfg->compile_aot) {
3396 const guint32 offset = code - cfg->native_code;
3397 guint32 val;
3399 arm_ldrx (code, ARMREG_IP1, info_var->inst_basereg, info_var->inst_offset);
3400 /* Add the offset */
3401 val = ((offset / 4) * sizeof (target_mgreg_t)) + MONO_STRUCT_OFFSET (SeqPointInfo, bp_addrs);
3402 /* Load the info->bp_addrs [offset], which is either 0 or the address of the bp trampoline */
3403 code = emit_ldrx (code, ARMREG_IP1, ARMREG_IP1, val);
3404 /* Skip the load if its 0 */
3405 arm_cbzx (code, ARMREG_IP1, code + 8);
3406 /* Call the breakpoint trampoline */
3407 code = mono_arm_emit_blrx (code, ARMREG_IP1);
3408 } else {
3409 MonoInst *var = cfg->arch.bp_tramp_var;
3411 g_assert (var);
3412 g_assert (var->opcode == OP_REGOFFSET);
3413 /* Load the address of the bp trampoline into IP0 */
3414 arm_ldrx (code, ARMREG_IP0, var->inst_basereg, var->inst_offset);
3416 * A placeholder for a possible breakpoint inserted by
3417 * mono_arch_set_breakpoint ().
3419 arm_nop (code);
3421 break;
3424 /* BRANCH */
3425 case OP_BR:
3426 mono_add_patch_info_rel (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_target_bb, MONO_R_ARM64_B);
3427 arm_b (code, code);
3428 break;
3429 case OP_BR_REG:
3430 arm_brx (code, sreg1);
3431 break;
3432 case OP_IBEQ:
3433 case OP_IBGE:
3434 case OP_IBGT:
3435 case OP_IBLE:
3436 case OP_IBLT:
3437 case OP_IBNE_UN:
3438 case OP_IBGE_UN:
3439 case OP_IBGT_UN:
3440 case OP_IBLE_UN:
3441 case OP_IBLT_UN:
3442 case OP_LBEQ:
3443 case OP_LBGE:
3444 case OP_LBGT:
3445 case OP_LBLE:
3446 case OP_LBLT:
3447 case OP_LBNE_UN:
3448 case OP_LBGE_UN:
3449 case OP_LBGT_UN:
3450 case OP_LBLE_UN:
3451 case OP_LBLT_UN:
3452 case OP_FBEQ:
3453 case OP_FBNE_UN:
3454 case OP_FBLT:
3455 case OP_FBGT:
3456 case OP_FBGT_UN:
3457 case OP_FBLE:
3458 case OP_FBGE:
3459 case OP_FBGE_UN: {
3460 int cond;
3462 mono_add_patch_info_rel (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_true_bb, MONO_R_ARM64_BCC);
3463 cond = opcode_to_armcond (ins->opcode);
3464 arm_bcc (code, cond, 0);
3465 break;
3467 case OP_FBLT_UN:
3468 mono_add_patch_info_rel (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_true_bb, MONO_R_ARM64_BCC);
3469 /* For fp compares, ARMCOND_LT is lt or unordered */
3470 arm_bcc (code, ARMCOND_LT, 0);
3471 break;
3472 case OP_FBLE_UN:
3473 mono_add_patch_info_rel (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_true_bb, MONO_R_ARM64_BCC);
3474 arm_bcc (code, ARMCOND_EQ, 0);
3475 mono_add_patch_info_rel (cfg, code - cfg->native_code, MONO_PATCH_INFO_BB, ins->inst_true_bb, MONO_R_ARM64_BCC);
3476 /* For fp compares, ARMCOND_LT is lt or unordered */
3477 arm_bcc (code, ARMCOND_LT, 0);
3478 break;
3479 case OP_ARM64_CBZW:
3480 mono_add_patch_info_rel (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_true_bb, MONO_R_ARM64_CBZ);
3481 arm_cbzw (code, sreg1, 0);
3482 break;
3483 case OP_ARM64_CBZX:
3484 mono_add_patch_info_rel (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_true_bb, MONO_R_ARM64_CBZ);
3485 arm_cbzx (code, sreg1, 0);
3486 break;
3487 case OP_ARM64_CBNZW:
3488 mono_add_patch_info_rel (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_true_bb, MONO_R_ARM64_CBZ);
3489 arm_cbnzw (code, sreg1, 0);
3490 break;
3491 case OP_ARM64_CBNZX:
3492 mono_add_patch_info_rel (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_true_bb, MONO_R_ARM64_CBZ);
3493 arm_cbnzx (code, sreg1, 0);
3494 break;
3495 /* ALU */
3496 case OP_IADD:
3497 arm_addw (code, dreg, sreg1, sreg2);
3498 break;
3499 case OP_LADD:
3500 arm_addx (code, dreg, sreg1, sreg2);
3501 break;
3502 case OP_ISUB:
3503 arm_subw (code, dreg, sreg1, sreg2);
3504 break;
3505 case OP_LSUB:
3506 arm_subx (code, dreg, sreg1, sreg2);
3507 break;
3508 case OP_IAND:
3509 arm_andw (code, dreg, sreg1, sreg2);
3510 break;
3511 case OP_LAND:
3512 arm_andx (code, dreg, sreg1, sreg2);
3513 break;
3514 case OP_IOR:
3515 arm_orrw (code, dreg, sreg1, sreg2);
3516 break;
3517 case OP_LOR:
3518 arm_orrx (code, dreg, sreg1, sreg2);
3519 break;
3520 case OP_IXOR:
3521 arm_eorw (code, dreg, sreg1, sreg2);
3522 break;
3523 case OP_LXOR:
3524 arm_eorx (code, dreg, sreg1, sreg2);
3525 break;
3526 case OP_INEG:
3527 arm_negw (code, dreg, sreg1);
3528 break;
3529 case OP_LNEG:
3530 arm_negx (code, dreg, sreg1);
3531 break;
3532 case OP_INOT:
3533 arm_mvnw (code, dreg, sreg1);
3534 break;
3535 case OP_LNOT:
3536 arm_mvnx (code, dreg, sreg1);
3537 break;
3538 case OP_IADDCC:
3539 arm_addsw (code, dreg, sreg1, sreg2);
3540 break;
3541 case OP_ADDCC:
3542 case OP_LADDCC:
3543 arm_addsx (code, dreg, sreg1, sreg2);
3544 break;
3545 case OP_ISUBCC:
3546 arm_subsw (code, dreg, sreg1, sreg2);
3547 break;
3548 case OP_LSUBCC:
3549 case OP_SUBCC:
3550 arm_subsx (code, dreg, sreg1, sreg2);
3551 break;
3552 case OP_ICOMPARE:
3553 arm_cmpw (code, sreg1, sreg2);
3554 break;
3555 case OP_COMPARE:
3556 case OP_LCOMPARE:
3557 arm_cmpx (code, sreg1, sreg2);
3558 break;
3559 case OP_IADD_IMM:
3560 code = emit_addw_imm (code, dreg, sreg1, imm);
3561 break;
3562 case OP_LADD_IMM:
3563 case OP_ADD_IMM:
3564 code = emit_addx_imm (code, dreg, sreg1, imm);
3565 break;
3566 case OP_ISUB_IMM:
3567 code = emit_subw_imm (code, dreg, sreg1, imm);
3568 break;
3569 case OP_LSUB_IMM:
3570 code = emit_subx_imm (code, dreg, sreg1, imm);
3571 break;
3572 case OP_IAND_IMM:
3573 code = emit_andw_imm (code, dreg, sreg1, imm);
3574 break;
3575 case OP_LAND_IMM:
3576 case OP_AND_IMM:
3577 code = emit_andx_imm (code, dreg, sreg1, imm);
3578 break;
3579 case OP_IOR_IMM:
3580 code = emit_orrw_imm (code, dreg, sreg1, imm);
3581 break;
3582 case OP_LOR_IMM:
3583 code = emit_orrx_imm (code, dreg, sreg1, imm);
3584 break;
3585 case OP_IXOR_IMM:
3586 code = emit_eorw_imm (code, dreg, sreg1, imm);
3587 break;
3588 case OP_LXOR_IMM:
3589 code = emit_eorx_imm (code, dreg, sreg1, imm);
3590 break;
3591 case OP_ICOMPARE_IMM:
3592 code = emit_cmpw_imm (code, sreg1, imm);
3593 break;
3594 case OP_LCOMPARE_IMM:
3595 case OP_COMPARE_IMM:
3596 if (imm == 0) {
3597 arm_cmpx (code, sreg1, ARMREG_RZR);
3598 } else {
3599 // FIXME: 32 vs 64 bit issues for 0xffffffff
3600 code = emit_imm64 (code, ARMREG_LR, imm);
3601 arm_cmpx (code, sreg1, ARMREG_LR);
3603 break;
3604 case OP_ISHL:
3605 arm_lslvw (code, dreg, sreg1, sreg2);
3606 break;
3607 case OP_LSHL:
3608 arm_lslvx (code, dreg, sreg1, sreg2);
3609 break;
3610 case OP_ISHR:
3611 arm_asrvw (code, dreg, sreg1, sreg2);
3612 break;
3613 case OP_LSHR:
3614 arm_asrvx (code, dreg, sreg1, sreg2);
3615 break;
3616 case OP_ISHR_UN:
3617 arm_lsrvw (code, dreg, sreg1, sreg2);
3618 break;
3619 case OP_LSHR_UN:
3620 arm_lsrvx (code, dreg, sreg1, sreg2);
3621 break;
3622 case OP_ISHL_IMM:
3623 if (imm == 0)
3624 arm_movx (code, dreg, sreg1);
3625 else
3626 arm_lslw (code, dreg, sreg1, imm);
3627 break;
3628 case OP_SHL_IMM:
3629 case OP_LSHL_IMM:
3630 if (imm == 0)
3631 arm_movx (code, dreg, sreg1);
3632 else
3633 arm_lslx (code, dreg, sreg1, imm);
3634 break;
3635 case OP_ISHR_IMM:
3636 if (imm == 0)
3637 arm_movx (code, dreg, sreg1);
3638 else
3639 arm_asrw (code, dreg, sreg1, imm);
3640 break;
3641 case OP_LSHR_IMM:
3642 case OP_SHR_IMM:
3643 if (imm == 0)
3644 arm_movx (code, dreg, sreg1);
3645 else
3646 arm_asrx (code, dreg, sreg1, imm);
3647 break;
3648 case OP_ISHR_UN_IMM:
3649 if (imm == 0)
3650 arm_movx (code, dreg, sreg1);
3651 else
3652 arm_lsrw (code, dreg, sreg1, imm);
3653 break;
3654 case OP_SHR_UN_IMM:
3655 case OP_LSHR_UN_IMM:
3656 if (imm == 0)
3657 arm_movx (code, dreg, sreg1);
3658 else
3659 arm_lsrx (code, dreg, sreg1, imm);
3660 break;
3662 /* 64BIT ALU */
3663 case OP_SEXT_I4:
3664 arm_sxtwx (code, dreg, sreg1);
3665 break;
3666 case OP_ZEXT_I4:
3667 /* Clean out the upper word */
3668 arm_movw (code, dreg, sreg1);
3669 break;
3671 /* MULTIPLY/DIVISION */
3672 case OP_IDIV:
3673 case OP_IREM:
3674 // FIXME: Optimize this
3675 /* Check for zero */
3676 arm_cmpx_imm (code, sreg2, 0);
3677 code = emit_cond_exc (cfg, code, OP_COND_EXC_IEQ, "DivideByZeroException");
3678 /* Check for INT_MIN/-1 */
3679 code = emit_imm (code, ARMREG_IP0, 0x80000000);
3680 arm_cmpx (code, sreg1, ARMREG_IP0);
3681 arm_cset (code, ARMCOND_EQ, ARMREG_IP1);
3682 code = emit_imm (code, ARMREG_IP0, 0xffffffff);
3683 arm_cmpx (code, sreg2, ARMREG_IP0);
3684 arm_cset (code, ARMCOND_EQ, ARMREG_IP0);
3685 arm_andx (code, ARMREG_IP0, ARMREG_IP0, ARMREG_IP1);
3686 arm_cmpx_imm (code, ARMREG_IP0, 1);
3687 code = emit_cond_exc (cfg, code, OP_COND_EXC_IEQ, "OverflowException");
3688 if (ins->opcode == OP_IREM) {
3689 arm_sdivw (code, ARMREG_LR, sreg1, sreg2);
3690 arm_msubw (code, dreg, ARMREG_LR, sreg2, sreg1);
3691 } else {
3692 arm_sdivw (code, dreg, sreg1, sreg2);
3694 break;
3695 case OP_IDIV_UN:
3696 arm_cmpx_imm (code, sreg2, 0);
3697 code = emit_cond_exc (cfg, code, OP_COND_EXC_IEQ, "DivideByZeroException");
3698 arm_udivw (code, dreg, sreg1, sreg2);
3699 break;
3700 case OP_IREM_UN:
3701 arm_cmpx_imm (code, sreg2, 0);
3702 code = emit_cond_exc (cfg, code, OP_COND_EXC_IEQ, "DivideByZeroException");
3703 arm_udivw (code, ARMREG_LR, sreg1, sreg2);
3704 arm_msubw (code, dreg, ARMREG_LR, sreg2, sreg1);
3705 break;
3706 case OP_LDIV:
3707 case OP_LREM:
3708 // FIXME: Optimize this
3709 /* Check for zero */
3710 arm_cmpx_imm (code, sreg2, 0);
3711 code = emit_cond_exc (cfg, code, OP_COND_EXC_IEQ, "DivideByZeroException");
3712 /* Check for INT64_MIN/-1 */
3713 code = emit_imm64 (code, ARMREG_IP0, 0x8000000000000000);
3714 arm_cmpx (code, sreg1, ARMREG_IP0);
3715 arm_cset (code, ARMCOND_EQ, ARMREG_IP1);
3716 code = emit_imm64 (code, ARMREG_IP0, 0xffffffffffffffff);
3717 arm_cmpx (code, sreg2, ARMREG_IP0);
3718 arm_cset (code, ARMCOND_EQ, ARMREG_IP0);
3719 arm_andx (code, ARMREG_IP0, ARMREG_IP0, ARMREG_IP1);
3720 arm_cmpx_imm (code, ARMREG_IP0, 1);
3721 /* 64 bit uses OverflowException */
3722 code = emit_cond_exc (cfg, code, OP_COND_EXC_IEQ, "OverflowException");
3723 if (ins->opcode == OP_LREM) {
3724 arm_sdivx (code, ARMREG_LR, sreg1, sreg2);
3725 arm_msubx (code, dreg, ARMREG_LR, sreg2, sreg1);
3726 } else {
3727 arm_sdivx (code, dreg, sreg1, sreg2);
3729 break;
3730 case OP_LDIV_UN:
3731 arm_cmpx_imm (code, sreg2, 0);
3732 code = emit_cond_exc (cfg, code, OP_COND_EXC_IEQ, "DivideByZeroException");
3733 arm_udivx (code, dreg, sreg1, sreg2);
3734 break;
3735 case OP_LREM_UN:
3736 arm_cmpx_imm (code, sreg2, 0);
3737 code = emit_cond_exc (cfg, code, OP_COND_EXC_IEQ, "DivideByZeroException");
3738 arm_udivx (code, ARMREG_LR, sreg1, sreg2);
3739 arm_msubx (code, dreg, ARMREG_LR, sreg2, sreg1);
3740 break;
3741 case OP_IMUL:
3742 arm_mulw (code, dreg, sreg1, sreg2);
3743 break;
3744 case OP_LMUL:
3745 arm_mulx (code, dreg, sreg1, sreg2);
3746 break;
3747 case OP_IMUL_IMM:
3748 code = emit_imm (code, ARMREG_LR, imm);
3749 arm_mulw (code, dreg, sreg1, ARMREG_LR);
3750 break;
3751 case OP_MUL_IMM:
3752 case OP_LMUL_IMM:
3753 code = emit_imm (code, ARMREG_LR, imm);
3754 arm_mulx (code, dreg, sreg1, ARMREG_LR);
3755 break;
3757 /* CONVERSIONS */
3758 case OP_ICONV_TO_I1:
3759 case OP_LCONV_TO_I1:
3760 arm_sxtbx (code, dreg, sreg1);
3761 break;
3762 case OP_ICONV_TO_I2:
3763 case OP_LCONV_TO_I2:
3764 arm_sxthx (code, dreg, sreg1);
3765 break;
3766 case OP_ICONV_TO_U1:
3767 case OP_LCONV_TO_U1:
3768 arm_uxtbw (code, dreg, sreg1);
3769 break;
3770 case OP_ICONV_TO_U2:
3771 case OP_LCONV_TO_U2:
3772 arm_uxthw (code, dreg, sreg1);
3773 break;
3775 /* CSET */
3776 case OP_CEQ:
3777 case OP_ICEQ:
3778 case OP_LCEQ:
3779 case OP_CLT:
3780 case OP_ICLT:
3781 case OP_LCLT:
3782 case OP_CGT:
3783 case OP_ICGT:
3784 case OP_LCGT:
3785 case OP_CLT_UN:
3786 case OP_ICLT_UN:
3787 case OP_LCLT_UN:
3788 case OP_CGT_UN:
3789 case OP_ICGT_UN:
3790 case OP_LCGT_UN:
3791 case OP_ICNEQ:
3792 case OP_ICGE:
3793 case OP_ICLE:
3794 case OP_ICGE_UN:
3795 case OP_ICLE_UN: {
3796 int cond;
3798 cond = opcode_to_armcond (ins->opcode);
3799 arm_cset (code, cond, dreg);
3800 break;
3802 case OP_FCEQ:
3803 case OP_FCLT:
3804 case OP_FCLT_UN:
3805 case OP_FCGT:
3806 case OP_FCGT_UN:
3807 case OP_FCNEQ:
3808 case OP_FCLE:
3809 case OP_FCGE: {
3810 int cond;
3812 cond = opcode_to_armcond (ins->opcode);
3813 arm_fcmpd (code, sreg1, sreg2);
3814 arm_cset (code, cond, dreg);
3815 break;
3818 /* MEMORY */
3819 case OP_LOADI1_MEMBASE:
3820 code = emit_ldrsbx (code, dreg, ins->inst_basereg, ins->inst_offset);
3821 break;
3822 case OP_LOADU1_MEMBASE:
3823 code = emit_ldrb (code, dreg, ins->inst_basereg, ins->inst_offset);
3824 break;
3825 case OP_LOADI2_MEMBASE:
3826 code = emit_ldrshx (code, dreg, ins->inst_basereg, ins->inst_offset);
3827 break;
3828 case OP_LOADU2_MEMBASE:
3829 code = emit_ldrh (code, dreg, ins->inst_basereg, ins->inst_offset);
3830 break;
3831 case OP_LOADI4_MEMBASE:
3832 code = emit_ldrswx (code, dreg, ins->inst_basereg, ins->inst_offset);
3833 break;
3834 case OP_LOADU4_MEMBASE:
3835 code = emit_ldrw (code, dreg, ins->inst_basereg, ins->inst_offset);
3836 break;
3837 case OP_LOAD_MEMBASE:
3838 case OP_LOADI8_MEMBASE:
3839 code = emit_ldrx (code, dreg, ins->inst_basereg, ins->inst_offset);
3840 break;
3841 case OP_STOREI1_MEMBASE_IMM:
3842 case OP_STOREI2_MEMBASE_IMM:
3843 case OP_STOREI4_MEMBASE_IMM:
3844 case OP_STORE_MEMBASE_IMM:
3845 case OP_STOREI8_MEMBASE_IMM: {
3846 int immreg;
3848 if (imm != 0) {
3849 code = emit_imm (code, ARMREG_LR, imm);
3850 immreg = ARMREG_LR;
3851 } else {
3852 immreg = ARMREG_RZR;
3855 switch (ins->opcode) {
3856 case OP_STOREI1_MEMBASE_IMM:
3857 code = emit_strb (code, immreg, ins->inst_destbasereg, ins->inst_offset);
3858 break;
3859 case OP_STOREI2_MEMBASE_IMM:
3860 code = emit_strh (code, immreg, ins->inst_destbasereg, ins->inst_offset);
3861 break;
3862 case OP_STOREI4_MEMBASE_IMM:
3863 code = emit_strw (code, immreg, ins->inst_destbasereg, ins->inst_offset);
3864 break;
3865 case OP_STORE_MEMBASE_IMM:
3866 case OP_STOREI8_MEMBASE_IMM:
3867 code = emit_strx (code, immreg, ins->inst_destbasereg, ins->inst_offset);
3868 break;
3869 default:
3870 g_assert_not_reached ();
3871 break;
3873 break;
3875 case OP_STOREI1_MEMBASE_REG:
3876 code = emit_strb (code, sreg1, ins->inst_destbasereg, ins->inst_offset);
3877 break;
3878 case OP_STOREI2_MEMBASE_REG:
3879 code = emit_strh (code, sreg1, ins->inst_destbasereg, ins->inst_offset);
3880 break;
3881 case OP_STOREI4_MEMBASE_REG:
3882 code = emit_strw (code, sreg1, ins->inst_destbasereg, ins->inst_offset);
3883 break;
3884 case OP_STORE_MEMBASE_REG:
3885 case OP_STOREI8_MEMBASE_REG:
3886 code = emit_strx (code, sreg1, ins->inst_destbasereg, ins->inst_offset);
3887 break;
3888 case OP_TLS_GET:
3889 code = emit_tls_get (code, dreg, ins->inst_offset);
3890 break;
3891 case OP_TLS_SET:
3892 code = emit_tls_set (code, sreg1, ins->inst_offset);
3893 break;
3894 /* Atomic */
3895 case OP_MEMORY_BARRIER:
3896 arm_dmb (code, ARM_DMB_ISH);
3897 break;
3898 case OP_ATOMIC_ADD_I4: {
3899 guint8 *buf [16];
3901 buf [0] = code;
3902 arm_ldxrw (code, ARMREG_IP0, sreg1);
3903 arm_addx (code, ARMREG_IP0, ARMREG_IP0, sreg2);
3904 arm_stlxrw (code, ARMREG_IP1, ARMREG_IP0, sreg1);
3905 arm_cbnzw (code, ARMREG_IP1, buf [0]);
3907 arm_dmb (code, ARM_DMB_ISH);
3908 arm_movx (code, dreg, ARMREG_IP0);
3909 break;
3911 case OP_ATOMIC_ADD_I8: {
3912 guint8 *buf [16];
3914 buf [0] = code;
3915 arm_ldxrx (code, ARMREG_IP0, sreg1);
3916 arm_addx (code, ARMREG_IP0, ARMREG_IP0, sreg2);
3917 arm_stlxrx (code, ARMREG_IP1, ARMREG_IP0, sreg1);
3918 arm_cbnzx (code, ARMREG_IP1, buf [0]);
3920 arm_dmb (code, ARM_DMB_ISH);
3921 arm_movx (code, dreg, ARMREG_IP0);
3922 break;
3924 case OP_ATOMIC_EXCHANGE_I4: {
3925 guint8 *buf [16];
3927 buf [0] = code;
3928 arm_ldxrw (code, ARMREG_IP0, sreg1);
3929 arm_stlxrw (code, ARMREG_IP1, sreg2, sreg1);
3930 arm_cbnzw (code, ARMREG_IP1, buf [0]);
3932 arm_dmb (code, ARM_DMB_ISH);
3933 arm_movx (code, dreg, ARMREG_IP0);
3934 break;
3936 case OP_ATOMIC_EXCHANGE_I8: {
3937 guint8 *buf [16];
3939 buf [0] = code;
3940 arm_ldxrx (code, ARMREG_IP0, sreg1);
3941 arm_stlxrx (code, ARMREG_IP1, sreg2, sreg1);
3942 arm_cbnzw (code, ARMREG_IP1, buf [0]);
3944 arm_dmb (code, ARM_DMB_ISH);
3945 arm_movx (code, dreg, ARMREG_IP0);
3946 break;
3948 case OP_ATOMIC_CAS_I4: {
3949 guint8 *buf [16];
3951 /* sreg2 is the value, sreg3 is the comparand */
3952 buf [0] = code;
3953 arm_ldxrw (code, ARMREG_IP0, sreg1);
3954 arm_cmpw (code, ARMREG_IP0, ins->sreg3);
3955 buf [1] = code;
3956 arm_bcc (code, ARMCOND_NE, 0);
3957 arm_stlxrw (code, ARMREG_IP1, sreg2, sreg1);
3958 arm_cbnzw (code, ARMREG_IP1, buf [0]);
3959 arm_patch_rel (buf [1], code, MONO_R_ARM64_BCC);
3961 arm_dmb (code, ARM_DMB_ISH);
3962 arm_movx (code, dreg, ARMREG_IP0);
3963 break;
3965 case OP_ATOMIC_CAS_I8: {
3966 guint8 *buf [16];
3968 buf [0] = code;
3969 arm_ldxrx (code, ARMREG_IP0, sreg1);
3970 arm_cmpx (code, ARMREG_IP0, ins->sreg3);
3971 buf [1] = code;
3972 arm_bcc (code, ARMCOND_NE, 0);
3973 arm_stlxrx (code, ARMREG_IP1, sreg2, sreg1);
3974 arm_cbnzw (code, ARMREG_IP1, buf [0]);
3975 arm_patch_rel (buf [1], code, MONO_R_ARM64_BCC);
3977 arm_dmb (code, ARM_DMB_ISH);
3978 arm_movx (code, dreg, ARMREG_IP0);
3979 break;
3981 case OP_ATOMIC_LOAD_I1: {
3982 code = emit_addx_imm (code, ARMREG_LR, ins->inst_basereg, ins->inst_offset);
3983 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
3984 arm_dmb (code, ARM_DMB_ISH);
3985 arm_ldarb (code, ins->dreg, ARMREG_LR);
3986 arm_sxtbx (code, ins->dreg, ins->dreg);
3987 break;
3989 case OP_ATOMIC_LOAD_U1: {
3990 code = emit_addx_imm (code, ARMREG_LR, ins->inst_basereg, ins->inst_offset);
3991 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
3992 arm_dmb (code, ARM_DMB_ISH);
3993 arm_ldarb (code, ins->dreg, ARMREG_LR);
3994 arm_uxtbx (code, ins->dreg, ins->dreg);
3995 break;
3997 case OP_ATOMIC_LOAD_I2: {
3998 code = emit_addx_imm (code, ARMREG_LR, ins->inst_basereg, ins->inst_offset);
3999 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
4000 arm_dmb (code, ARM_DMB_ISH);
4001 arm_ldarh (code, ins->dreg, ARMREG_LR);
4002 arm_sxthx (code, ins->dreg, ins->dreg);
4003 break;
4005 case OP_ATOMIC_LOAD_U2: {
4006 code = emit_addx_imm (code, ARMREG_LR, ins->inst_basereg, ins->inst_offset);
4007 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
4008 arm_dmb (code, ARM_DMB_ISH);
4009 arm_ldarh (code, ins->dreg, ARMREG_LR);
4010 arm_uxthx (code, ins->dreg, ins->dreg);
4011 break;
4013 case OP_ATOMIC_LOAD_I4: {
4014 code = emit_addx_imm (code, ARMREG_LR, ins->inst_basereg, ins->inst_offset);
4015 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
4016 arm_dmb (code, ARM_DMB_ISH);
4017 arm_ldarw (code, ins->dreg, ARMREG_LR);
4018 arm_sxtwx (code, ins->dreg, ins->dreg);
4019 break;
4021 case OP_ATOMIC_LOAD_U4: {
4022 code = emit_addx_imm (code, ARMREG_LR, ins->inst_basereg, ins->inst_offset);
4023 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
4024 arm_dmb (code, ARM_DMB_ISH);
4025 arm_ldarw (code, ins->dreg, ARMREG_LR);
4026 arm_movw (code, ins->dreg, ins->dreg); /* Clear upper half of the register. */
4027 break;
4029 case OP_ATOMIC_LOAD_I8:
4030 case OP_ATOMIC_LOAD_U8: {
4031 code = emit_addx_imm (code, ARMREG_LR, ins->inst_basereg, ins->inst_offset);
4032 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
4033 arm_dmb (code, ARM_DMB_ISH);
4034 arm_ldarx (code, ins->dreg, ARMREG_LR);
4035 break;
4037 case OP_ATOMIC_LOAD_R4: {
4038 code = emit_addx_imm (code, ARMREG_LR, ins->inst_basereg, ins->inst_offset);
4039 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
4040 arm_dmb (code, ARM_DMB_ISH);
4041 if (cfg->r4fp) {
4042 arm_ldarw (code, ARMREG_LR, ARMREG_LR);
4043 arm_fmov_rx_to_double (code, ins->dreg, ARMREG_LR);
4044 } else {
4045 arm_ldarw (code, ARMREG_LR, ARMREG_LR);
4046 arm_fmov_rx_to_double (code, FP_TEMP_REG, ARMREG_LR);
4047 arm_fcvt_sd (code, ins->dreg, FP_TEMP_REG);
4049 break;
4051 case OP_ATOMIC_LOAD_R8: {
4052 code = emit_addx_imm (code, ARMREG_LR, ins->inst_basereg, ins->inst_offset);
4053 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
4054 arm_dmb (code, ARM_DMB_ISH);
4055 arm_ldarx (code, ARMREG_LR, ARMREG_LR);
4056 arm_fmov_rx_to_double (code, ins->dreg, ARMREG_LR);
4057 break;
4059 case OP_ATOMIC_STORE_I1:
4060 case OP_ATOMIC_STORE_U1: {
4061 code = emit_addx_imm (code, ARMREG_LR, ins->inst_destbasereg, ins->inst_offset);
4062 arm_stlrb (code, ARMREG_LR, ins->sreg1);
4063 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
4064 arm_dmb (code, ARM_DMB_ISH);
4065 break;
4067 case OP_ATOMIC_STORE_I2:
4068 case OP_ATOMIC_STORE_U2: {
4069 code = emit_addx_imm (code, ARMREG_LR, ins->inst_destbasereg, ins->inst_offset);
4070 arm_stlrh (code, ARMREG_LR, ins->sreg1);
4071 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
4072 arm_dmb (code, ARM_DMB_ISH);
4073 break;
4075 case OP_ATOMIC_STORE_I4:
4076 case OP_ATOMIC_STORE_U4: {
4077 code = emit_addx_imm (code, ARMREG_LR, ins->inst_destbasereg, ins->inst_offset);
4078 arm_stlrw (code, ARMREG_LR, ins->sreg1);
4079 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
4080 arm_dmb (code, ARM_DMB_ISH);
4081 break;
4083 case OP_ATOMIC_STORE_I8:
4084 case OP_ATOMIC_STORE_U8: {
4085 code = emit_addx_imm (code, ARMREG_LR, ins->inst_destbasereg, ins->inst_offset);
4086 arm_stlrx (code, ARMREG_LR, ins->sreg1);
4087 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
4088 arm_dmb (code, ARM_DMB_ISH);
4089 break;
4091 case OP_ATOMIC_STORE_R4: {
4092 code = emit_addx_imm (code, ARMREG_LR, ins->inst_destbasereg, ins->inst_offset);
4093 if (cfg->r4fp) {
4094 arm_fmov_double_to_rx (code, ARMREG_IP0, ins->sreg1);
4095 arm_stlrw (code, ARMREG_LR, ARMREG_IP0);
4096 } else {
4097 arm_fcvt_ds (code, FP_TEMP_REG, ins->sreg1);
4098 arm_fmov_double_to_rx (code, ARMREG_IP0, FP_TEMP_REG);
4099 arm_stlrw (code, ARMREG_LR, ARMREG_IP0);
4101 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
4102 arm_dmb (code, ARM_DMB_ISH);
4103 break;
4105 case OP_ATOMIC_STORE_R8: {
4106 code = emit_addx_imm (code, ARMREG_LR, ins->inst_destbasereg, ins->inst_offset);
4107 arm_fmov_double_to_rx (code, ARMREG_IP0, ins->sreg1);
4108 arm_stlrx (code, ARMREG_LR, ARMREG_IP0);
4109 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
4110 arm_dmb (code, ARM_DMB_ISH);
4111 break;
4114 /* FP */
4115 case OP_R8CONST: {
4116 guint64 imm = *(guint64*)ins->inst_p0;
4118 if (imm == 0) {
4119 arm_fmov_rx_to_double (code, dreg, ARMREG_RZR);
4120 } else {
4121 code = emit_imm64 (code, ARMREG_LR, imm);
4122 arm_fmov_rx_to_double (code, ins->dreg, ARMREG_LR);
4124 break;
4126 case OP_R4CONST: {
4127 guint64 imm = *(guint32*)ins->inst_p0;
4129 code = emit_imm64 (code, ARMREG_LR, imm);
4130 if (cfg->r4fp) {
4131 arm_fmov_rx_to_double (code, dreg, ARMREG_LR);
4132 } else {
4133 arm_fmov_rx_to_double (code, FP_TEMP_REG, ARMREG_LR);
4134 arm_fcvt_sd (code, dreg, FP_TEMP_REG);
4136 break;
4138 case OP_LOADR8_MEMBASE:
4139 code = emit_ldrfpx (code, dreg, ins->inst_basereg, ins->inst_offset);
4140 break;
4141 case OP_LOADR4_MEMBASE:
4142 if (cfg->r4fp) {
4143 code = emit_ldrfpw (code, dreg, ins->inst_basereg, ins->inst_offset);
4144 } else {
4145 code = emit_ldrfpw (code, FP_TEMP_REG, ins->inst_basereg, ins->inst_offset);
4146 arm_fcvt_sd (code, dreg, FP_TEMP_REG);
4148 break;
4149 case OP_STORER8_MEMBASE_REG:
4150 code = emit_strfpx (code, sreg1, ins->inst_destbasereg, ins->inst_offset);
4151 break;
4152 case OP_STORER4_MEMBASE_REG:
4153 if (cfg->r4fp) {
4154 code = emit_strfpw (code, sreg1, ins->inst_destbasereg, ins->inst_offset);
4155 } else {
4156 arm_fcvt_ds (code, FP_TEMP_REG, sreg1);
4157 code = emit_strfpw (code, FP_TEMP_REG, ins->inst_destbasereg, ins->inst_offset);
4159 break;
4160 case OP_FMOVE:
4161 if (dreg != sreg1)
4162 arm_fmovd (code, dreg, sreg1);
4163 break;
4164 case OP_RMOVE:
4165 if (dreg != sreg1)
4166 arm_fmovs (code, dreg, sreg1);
4167 break;
4168 case OP_MOVE_F_TO_I4:
4169 if (cfg->r4fp) {
4170 arm_fmov_double_to_rx (code, ins->dreg, ins->sreg1);
4171 } else {
4172 arm_fcvt_ds (code, ins->dreg, ins->sreg1);
4173 arm_fmov_double_to_rx (code, ins->dreg, ins->dreg);
4175 break;
4176 case OP_MOVE_I4_TO_F:
4177 if (cfg->r4fp) {
4178 arm_fmov_rx_to_double (code, ins->dreg, ins->sreg1);
4179 } else {
4180 arm_fmov_rx_to_double (code, ins->dreg, ins->sreg1);
4181 arm_fcvt_sd (code, ins->dreg, ins->dreg);
4183 break;
4184 case OP_MOVE_F_TO_I8:
4185 arm_fmov_double_to_rx (code, ins->dreg, ins->sreg1);
4186 break;
4187 case OP_MOVE_I8_TO_F:
4188 arm_fmov_rx_to_double (code, ins->dreg, ins->sreg1);
4189 break;
4190 case OP_FCOMPARE:
4191 arm_fcmpd (code, sreg1, sreg2);
4192 break;
4193 case OP_RCOMPARE:
4194 arm_fcmps (code, sreg1, sreg2);
4195 break;
4196 case OP_FCONV_TO_I1:
4197 arm_fcvtzs_dx (code, dreg, sreg1);
4198 arm_sxtbx (code, dreg, dreg);
4199 break;
4200 case OP_FCONV_TO_U1:
4201 arm_fcvtzu_dx (code, dreg, sreg1);
4202 arm_uxtbw (code, dreg, dreg);
4203 break;
4204 case OP_FCONV_TO_I2:
4205 arm_fcvtzs_dx (code, dreg, sreg1);
4206 arm_sxthx (code, dreg, dreg);
4207 break;
4208 case OP_FCONV_TO_U2:
4209 arm_fcvtzu_dx (code, dreg, sreg1);
4210 arm_uxthw (code, dreg, dreg);
4211 break;
4212 case OP_FCONV_TO_I4:
4213 case OP_FCONV_TO_I:
4214 arm_fcvtzs_dx (code, dreg, sreg1);
4215 arm_sxtwx (code, dreg, dreg);
4216 break;
4217 case OP_FCONV_TO_U4:
4218 arm_fcvtzu_dx (code, dreg, sreg1);
4219 break;
4220 case OP_FCONV_TO_I8:
4221 arm_fcvtzs_dx (code, dreg, sreg1);
4222 break;
4223 case OP_FCONV_TO_U8:
4224 arm_fcvtzu_dx (code, dreg, sreg1);
4225 break;
4226 case OP_FCONV_TO_R4:
4227 if (cfg->r4fp) {
4228 arm_fcvt_ds (code, dreg, sreg1);
4229 } else {
4230 arm_fcvt_ds (code, FP_TEMP_REG, sreg1);
4231 arm_fcvt_sd (code, dreg, FP_TEMP_REG);
4233 break;
4234 case OP_ICONV_TO_R4:
4235 if (cfg->r4fp) {
4236 arm_scvtf_rw_to_s (code, dreg, sreg1);
4237 } else {
4238 arm_scvtf_rw_to_s (code, FP_TEMP_REG, sreg1);
4239 arm_fcvt_sd (code, dreg, FP_TEMP_REG);
4241 break;
4242 case OP_LCONV_TO_R4:
4243 if (cfg->r4fp) {
4244 arm_scvtf_rx_to_s (code, dreg, sreg1);
4245 } else {
4246 arm_scvtf_rx_to_s (code, FP_TEMP_REG, sreg1);
4247 arm_fcvt_sd (code, dreg, FP_TEMP_REG);
4249 break;
4250 case OP_ICONV_TO_R8:
4251 arm_scvtf_rw_to_d (code, dreg, sreg1);
4252 break;
4253 case OP_LCONV_TO_R8:
4254 arm_scvtf_rx_to_d (code, dreg, sreg1);
4255 break;
4256 case OP_ICONV_TO_R_UN:
4257 arm_ucvtf_rw_to_d (code, dreg, sreg1);
4258 break;
4259 case OP_LCONV_TO_R_UN:
4260 arm_ucvtf_rx_to_d (code, dreg, sreg1);
4261 break;
4262 case OP_FADD:
4263 arm_fadd_d (code, dreg, sreg1, sreg2);
4264 break;
4265 case OP_FSUB:
4266 arm_fsub_d (code, dreg, sreg1, sreg2);
4267 break;
4268 case OP_FMUL:
4269 arm_fmul_d (code, dreg, sreg1, sreg2);
4270 break;
4271 case OP_FDIV:
4272 arm_fdiv_d (code, dreg, sreg1, sreg2);
4273 break;
4274 case OP_FREM:
4275 /* Emulated */
4276 g_assert_not_reached ();
4277 break;
4278 case OP_FNEG:
4279 arm_fneg_d (code, dreg, sreg1);
4280 break;
4281 case OP_ARM_SETFREG_R4:
4282 arm_fcvt_ds (code, dreg, sreg1);
4283 break;
4284 case OP_CKFINITE:
4285 /* Check for infinity */
4286 code = emit_imm64 (code, ARMREG_LR, 0x7fefffffffffffffLL);
4287 arm_fmov_rx_to_double (code, FP_TEMP_REG, ARMREG_LR);
4288 arm_fabs_d (code, FP_TEMP_REG2, sreg1);
4289 arm_fcmpd (code, FP_TEMP_REG2, FP_TEMP_REG);
4290 code = emit_cond_exc (cfg, code, OP_COND_EXC_GT, "ArithmeticException");
4291 /* Check for nans */
4292 arm_fcmpd (code, FP_TEMP_REG2, FP_TEMP_REG2);
4293 code = emit_cond_exc (cfg, code, OP_COND_EXC_OV, "ArithmeticException");
4294 arm_fmovd (code, dreg, sreg1);
4295 break;
4297 /* R4 */
4298 case OP_RADD:
4299 arm_fadd_s (code, dreg, sreg1, sreg2);
4300 break;
4301 case OP_RSUB:
4302 arm_fsub_s (code, dreg, sreg1, sreg2);
4303 break;
4304 case OP_RMUL:
4305 arm_fmul_s (code, dreg, sreg1, sreg2);
4306 break;
4307 case OP_RDIV:
4308 arm_fdiv_s (code, dreg, sreg1, sreg2);
4309 break;
4310 case OP_RNEG:
4311 arm_fneg_s (code, dreg, sreg1);
4312 break;
4313 case OP_RCONV_TO_I1:
4314 arm_fcvtzs_sx (code, dreg, sreg1);
4315 arm_sxtbx (code, dreg, dreg);
4316 break;
4317 case OP_RCONV_TO_U1:
4318 arm_fcvtzu_sx (code, dreg, sreg1);
4319 arm_uxtbw (code, dreg, dreg);
4320 break;
4321 case OP_RCONV_TO_I2:
4322 arm_fcvtzs_sx (code, dreg, sreg1);
4323 arm_sxthx (code, dreg, dreg);
4324 break;
4325 case OP_RCONV_TO_U2:
4326 arm_fcvtzu_sx (code, dreg, sreg1);
4327 arm_uxthw (code, dreg, dreg);
4328 break;
4329 case OP_RCONV_TO_I4:
4330 arm_fcvtzs_sx (code, dreg, sreg1);
4331 arm_sxtwx (code, dreg, dreg);
4332 break;
4333 case OP_RCONV_TO_U4:
4334 arm_fcvtzu_sx (code, dreg, sreg1);
4335 break;
4336 case OP_RCONV_TO_I8:
4337 arm_fcvtzs_sx (code, dreg, sreg1);
4338 break;
4339 case OP_RCONV_TO_U8:
4340 arm_fcvtzu_sx (code, dreg, sreg1);
4341 break;
4342 case OP_RCONV_TO_R8:
4343 arm_fcvt_sd (code, dreg, sreg1);
4344 break;
4345 case OP_RCONV_TO_R4:
4346 if (dreg != sreg1)
4347 arm_fmovs (code, dreg, sreg1);
4348 break;
4349 case OP_RCEQ:
4350 case OP_RCLT:
4351 case OP_RCLT_UN:
4352 case OP_RCGT:
4353 case OP_RCGT_UN:
4354 case OP_RCNEQ:
4355 case OP_RCLE:
4356 case OP_RCGE: {
4357 int cond;
4359 cond = opcode_to_armcond (ins->opcode);
4360 arm_fcmps (code, sreg1, sreg2);
4361 arm_cset (code, cond, dreg);
4362 break;
4365 /* CALLS */
4366 case OP_VOIDCALL:
4367 case OP_CALL:
4368 case OP_LCALL:
4369 case OP_FCALL:
4370 case OP_RCALL:
4371 case OP_VCALL2: {
4373 call = (MonoCallInst*)ins;
4374 const MonoJumpInfoTarget patch = mono_call_to_patch (call);
4375 code = emit_call (cfg, code, patch.type, patch.target);
4376 code = emit_move_return_value (cfg, code, ins);
4377 break;
4379 case OP_VOIDCALL_REG:
4380 case OP_CALL_REG:
4381 case OP_LCALL_REG:
4382 case OP_FCALL_REG:
4383 case OP_RCALL_REG:
4384 case OP_VCALL2_REG:
4385 code = mono_arm_emit_blrx (code, sreg1);
4386 code = emit_move_return_value (cfg, code, ins);
4387 break;
4388 case OP_VOIDCALL_MEMBASE:
4389 case OP_CALL_MEMBASE:
4390 case OP_LCALL_MEMBASE:
4391 case OP_FCALL_MEMBASE:
4392 case OP_RCALL_MEMBASE:
4393 case OP_VCALL2_MEMBASE:
4394 code = emit_ldrx (code, ARMREG_IP0, ins->inst_basereg, ins->inst_offset);
4395 code = mono_arm_emit_blrx (code, ARMREG_IP0);
4396 code = emit_move_return_value (cfg, code, ins);
4397 break;
4399 case OP_TAILCALL_PARAMETER:
4400 // This opcode helps compute sizes, i.e.
4401 // of the subsequent OP_TAILCALL, but contributes no code.
4402 g_assert (ins->next);
4403 break;
4405 case OP_TAILCALL:
4406 case OP_TAILCALL_MEMBASE:
4407 case OP_TAILCALL_REG: {
4408 int branch_reg = ARMREG_IP0;
4409 guint64 free_reg = 1 << ARMREG_IP1;
4410 call = (MonoCallInst*)ins;
4412 g_assert (!cfg->method->save_lmf);
4414 max_len += call->stack_usage / sizeof (target_mgreg_t) * ins_get_size (OP_TAILCALL_PARAMETER);
4415 while (G_UNLIKELY (offset + max_len > cfg->code_size)) {
4416 cfg->code_size *= 2;
4417 cfg->native_code = (unsigned char *)mono_realloc_native_code (cfg);
4418 code = cfg->native_code + offset;
4419 cfg->stat_code_reallocs++;
4422 switch (ins->opcode) {
4423 case OP_TAILCALL:
4424 free_reg = (1 << ARMREG_IP0) | (1 << ARMREG_IP1);
4425 break;
4427 case OP_TAILCALL_REG:
4428 g_assert (sreg1 != -1);
4429 g_assert (sreg1 != ARMREG_IP0);
4430 g_assert (sreg1 != ARMREG_IP1);
4431 g_assert (sreg1 != ARMREG_LR);
4432 g_assert (sreg1 != ARMREG_SP);
4433 g_assert (sreg1 != ARMREG_R28);
4434 if ((sreg1 << 1) & MONO_ARCH_CALLEE_SAVED_REGS) {
4435 arm_movx (code, branch_reg, sreg1);
4436 } else {
4437 free_reg = (1 << ARMREG_IP0) | (1 << ARMREG_IP1);
4438 branch_reg = sreg1;
4440 break;
4442 case OP_TAILCALL_MEMBASE:
4443 g_assert (ins->inst_basereg != -1);
4444 g_assert (ins->inst_basereg != ARMREG_IP0);
4445 g_assert (ins->inst_basereg != ARMREG_IP1);
4446 g_assert (ins->inst_basereg != ARMREG_LR);
4447 g_assert (ins->inst_basereg != ARMREG_SP);
4448 g_assert (ins->inst_basereg != ARMREG_R28);
4449 code = emit_ldrx (code, branch_reg, ins->inst_basereg, ins->inst_offset);
4450 break;
4452 default:
4453 g_assert_not_reached ();
4456 // Copy stack arguments.
4457 // FIXME a fixed size memcpy is desirable here,
4458 // at least for larger values of stack_usage.
4459 for (int i = 0; i < call->stack_usage; i += sizeof (target_mgreg_t)) {
4460 code = emit_ldrx (code, ARMREG_LR, ARMREG_SP, i);
4461 code = emit_strx (code, ARMREG_LR, ARMREG_R28, i);
4464 /* Restore registers */
4465 code = emit_load_regset (code, MONO_ARCH_CALLEE_SAVED_REGS & cfg->used_int_regs, ARMREG_FP, cfg->arch.saved_gregs_offset);
4467 /* Destroy frame */
4468 code = mono_arm_emit_destroy_frame (code, cfg->stack_offset, free_reg);
4470 if (enable_ptrauth)
4471 /* There is no retab to authenticate lr */
4472 arm_autibsp (code);
4474 switch (ins->opcode) {
4475 case OP_TAILCALL:
4476 if (cfg->compile_aot) {
4477 /* This is not a PLT patch */
4478 code = emit_aotconst (cfg, code, branch_reg, MONO_PATCH_INFO_METHOD_JUMP, call->method);
4479 } else {
4480 mono_add_patch_info_rel (cfg, code - cfg->native_code, MONO_PATCH_INFO_METHOD_JUMP, call->method, MONO_R_ARM64_B);
4481 arm_b (code, code);
4482 cfg->thunk_area += THUNK_SIZE;
4483 break;
4485 // fallthrough
4486 case OP_TAILCALL_MEMBASE:
4487 case OP_TAILCALL_REG:
4488 code = mono_arm_emit_brx (code, branch_reg);
4489 break;
4491 default:
4492 g_assert_not_reached ();
4495 ins->flags |= MONO_INST_GC_CALLSITE;
4496 ins->backend.pc_offset = code - cfg->native_code;
4497 break;
4499 case OP_ARGLIST:
4500 g_assert (cfg->arch.cinfo);
4501 code = emit_addx_imm (code, ARMREG_IP0, cfg->arch.args_reg, cfg->arch.cinfo->sig_cookie.offset);
4502 arm_strx (code, ARMREG_IP0, sreg1, 0);
4503 break;
4504 case OP_DYN_CALL: {
4505 MonoInst *var = cfg->dyn_call_var;
4506 guint8 *labels [16];
4507 int i;
4510 * sreg1 points to a DynCallArgs structure initialized by mono_arch_start_dyn_call ().
4511 * sreg2 is the function to call.
4514 g_assert (var->opcode == OP_REGOFFSET);
4516 arm_movx (code, ARMREG_LR, sreg1);
4517 arm_movx (code, ARMREG_IP1, sreg2);
4519 /* Save args buffer */
4520 code = emit_strx (code, ARMREG_LR, var->inst_basereg, var->inst_offset);
4522 /* Set fp argument regs */
4523 code = emit_ldrw (code, ARMREG_R0, ARMREG_LR, MONO_STRUCT_OFFSET (DynCallArgs, n_fpargs));
4524 arm_cmpw (code, ARMREG_R0, ARMREG_RZR);
4525 labels [0] = code;
4526 arm_bcc (code, ARMCOND_EQ, 0);
4527 for (i = 0; i < 8; ++i)
4528 code = emit_ldrfpx (code, ARMREG_D0 + i, ARMREG_LR, MONO_STRUCT_OFFSET (DynCallArgs, fpregs) + (i * 8));
4529 arm_patch_rel (labels [0], code, MONO_R_ARM64_BCC);
4531 /* Allocate callee area */
4532 code = emit_ldrx (code, ARMREG_R0, ARMREG_LR, MONO_STRUCT_OFFSET (DynCallArgs, n_stackargs));
4533 arm_lslw (code, ARMREG_R0, ARMREG_R0, 3);
4534 arm_movspx (code, ARMREG_R1, ARMREG_SP);
4535 arm_subx (code, ARMREG_R1, ARMREG_R1, ARMREG_R0);
4536 arm_movspx (code, ARMREG_SP, ARMREG_R1);
4538 /* Set stack args */
4539 /* R1 = limit */
4540 code = emit_ldrx (code, ARMREG_R1, ARMREG_LR, MONO_STRUCT_OFFSET (DynCallArgs, n_stackargs));
4541 /* R2 = pointer into 'regs' */
4542 code = emit_imm (code, ARMREG_R2, MONO_STRUCT_OFFSET (DynCallArgs, regs) + ((PARAM_REGS + 1) * sizeof (target_mgreg_t)));
4543 arm_addx (code, ARMREG_R2, ARMREG_LR, ARMREG_R2);
4544 /* R3 = pointer to stack */
4545 arm_movspx (code, ARMREG_R3, ARMREG_SP);
4546 labels [0] = code;
4547 arm_b (code, code);
4548 labels [1] = code;
4549 code = emit_ldrx (code, ARMREG_R5, ARMREG_R2, 0);
4550 code = emit_strx (code, ARMREG_R5, ARMREG_R3, 0);
4551 code = emit_addx_imm (code, ARMREG_R2, ARMREG_R2, sizeof (target_mgreg_t));
4552 code = emit_addx_imm (code, ARMREG_R3, ARMREG_R3, sizeof (target_mgreg_t));
4553 code = emit_subx_imm (code, ARMREG_R1, ARMREG_R1, 1);
4554 arm_patch_rel (labels [0], code, MONO_R_ARM64_B);
4555 arm_cmpw (code, ARMREG_R1, ARMREG_RZR);
4556 arm_bcc (code, ARMCOND_GT, labels [1]);
4558 /* Set argument registers + r8 */
4559 code = mono_arm_emit_load_regarray (code, 0x1ff, ARMREG_LR, MONO_STRUCT_OFFSET (DynCallArgs, regs));
4561 /* Make the call */
4562 code = mono_arm_emit_blrx (code, ARMREG_IP1);
4564 /* Save result */
4565 code = emit_ldrx (code, ARMREG_LR, var->inst_basereg, var->inst_offset);
4566 arm_strx (code, ARMREG_R0, ARMREG_LR, MONO_STRUCT_OFFSET (DynCallArgs, res));
4567 arm_strx (code, ARMREG_R1, ARMREG_LR, MONO_STRUCT_OFFSET (DynCallArgs, res2));
4568 /* Save fp result */
4569 code = emit_ldrw (code, ARMREG_R0, ARMREG_LR, MONO_STRUCT_OFFSET (DynCallArgs, n_fpret));
4570 arm_cmpw (code, ARMREG_R0, ARMREG_RZR);
4571 labels [1] = code;
4572 arm_bcc (code, ARMCOND_EQ, 0);
4573 for (i = 0; i < 8; ++i)
4574 code = emit_strfpx (code, ARMREG_D0 + i, ARMREG_LR, MONO_STRUCT_OFFSET (DynCallArgs, fpregs) + (i * 8));
4575 arm_patch_rel (labels [1], code, MONO_R_ARM64_BCC);
4576 break;
4579 case OP_GENERIC_CLASS_INIT: {
4580 int byte_offset;
4581 guint8 *jump;
4583 byte_offset = MONO_STRUCT_OFFSET (MonoVTable, initialized);
4585 /* Load vtable->initialized */
4586 arm_ldrsbx (code, ARMREG_IP0, sreg1, byte_offset);
4587 jump = code;
4588 arm_cbnzx (code, ARMREG_IP0, 0);
4590 /* Slowpath */
4591 g_assert (sreg1 == ARMREG_R0);
4592 code = emit_call (cfg, code, MONO_PATCH_INFO_JIT_ICALL_ID,
4593 GUINT_TO_POINTER (MONO_JIT_ICALL_mono_generic_class_init));
4595 mono_arm_patch (jump, code, MONO_R_ARM64_CBZ);
4596 break;
4599 case OP_CHECK_THIS:
4600 arm_ldrb (code, ARMREG_LR, sreg1, 0);
4601 break;
4602 case OP_NOT_NULL:
4603 case OP_NOT_REACHED:
4604 case OP_DUMMY_USE:
4605 case OP_DUMMY_ICONST:
4606 case OP_DUMMY_I8CONST:
4607 case OP_DUMMY_R8CONST:
4608 case OP_DUMMY_R4CONST:
4609 break;
4610 case OP_IL_SEQ_POINT:
4611 mono_add_seq_point (cfg, bb, ins, code - cfg->native_code);
4612 break;
4614 /* EH */
4615 case OP_COND_EXC_C:
4616 case OP_COND_EXC_IC:
4617 case OP_COND_EXC_OV:
4618 case OP_COND_EXC_IOV:
4619 case OP_COND_EXC_NC:
4620 case OP_COND_EXC_INC:
4621 case OP_COND_EXC_NO:
4622 case OP_COND_EXC_INO:
4623 case OP_COND_EXC_EQ:
4624 case OP_COND_EXC_IEQ:
4625 case OP_COND_EXC_NE_UN:
4626 case OP_COND_EXC_INE_UN:
4627 case OP_COND_EXC_ILT:
4628 case OP_COND_EXC_LT:
4629 case OP_COND_EXC_ILT_UN:
4630 case OP_COND_EXC_LT_UN:
4631 case OP_COND_EXC_IGT:
4632 case OP_COND_EXC_GT:
4633 case OP_COND_EXC_IGT_UN:
4634 case OP_COND_EXC_GT_UN:
4635 case OP_COND_EXC_IGE:
4636 case OP_COND_EXC_GE:
4637 case OP_COND_EXC_IGE_UN:
4638 case OP_COND_EXC_GE_UN:
4639 case OP_COND_EXC_ILE:
4640 case OP_COND_EXC_LE:
4641 case OP_COND_EXC_ILE_UN:
4642 case OP_COND_EXC_LE_UN:
4643 code = emit_cond_exc (cfg, code, ins->opcode, (const char*)ins->inst_p1);
4644 break;
4645 case OP_THROW:
4646 if (sreg1 != ARMREG_R0)
4647 arm_movx (code, ARMREG_R0, sreg1);
4648 code = emit_call (cfg, code, MONO_PATCH_INFO_JIT_ICALL_ID,
4649 GUINT_TO_POINTER (MONO_JIT_ICALL_mono_arch_throw_exception));
4650 break;
4651 case OP_RETHROW:
4652 if (sreg1 != ARMREG_R0)
4653 arm_movx (code, ARMREG_R0, sreg1);
4654 code = emit_call (cfg, code, MONO_PATCH_INFO_JIT_ICALL_ID,
4655 GUINT_TO_POINTER (MONO_JIT_ICALL_mono_arch_rethrow_exception));
4656 break;
4657 case OP_CALL_HANDLER:
4658 mono_add_patch_info_rel (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_target_bb, MONO_R_ARM64_BL);
4659 arm_bl (code, 0);
4660 cfg->thunk_area += THUNK_SIZE;
4661 for (GList *tmp = ins->inst_eh_blocks; tmp != bb->clause_holes; tmp = tmp->prev)
4662 mono_cfg_add_try_hole (cfg, ((MonoLeaveClause *) tmp->data)->clause, code, bb);
4663 break;
4664 case OP_START_HANDLER: {
4665 MonoInst *spvar = mono_find_spvar_for_region (cfg, bb->region);
4667 /* Save caller address */
4668 code = emit_strx (code, ARMREG_LR, spvar->inst_basereg, spvar->inst_offset);
4671 * Reserve a param area, see test_0_finally_param_area ().
4672 * This is needed because the param area is not set up when
4673 * we are called from EH code.
4675 if (cfg->param_area)
4676 code = emit_subx_sp_imm (code, cfg->param_area);
4677 break;
4679 case OP_ENDFINALLY:
4680 case OP_ENDFILTER: {
4681 MonoInst *spvar = mono_find_spvar_for_region (cfg, bb->region);
4683 if (cfg->param_area)
4684 code = emit_addx_sp_imm (code, cfg->param_area);
4686 if (ins->opcode == OP_ENDFILTER && sreg1 != ARMREG_R0)
4687 arm_movx (code, ARMREG_R0, sreg1);
4689 /* Return to either after the branch in OP_CALL_HANDLER, or to the EH code */
4690 code = emit_ldrx (code, ARMREG_LR, spvar->inst_basereg, spvar->inst_offset);
4691 arm_brx (code, ARMREG_LR);
4692 break;
4694 case OP_GET_EX_OBJ:
4695 if (ins->dreg != ARMREG_R0)
4696 arm_movx (code, ins->dreg, ARMREG_R0);
4697 break;
4698 case OP_LIVERANGE_START: {
4699 if (cfg->verbose_level > 1)
4700 printf ("R%d START=0x%x\n", MONO_VARINFO (cfg, ins->inst_c0)->vreg, (int)(code - cfg->native_code));
4701 MONO_VARINFO (cfg, ins->inst_c0)->live_range_start = code - cfg->native_code;
4702 break;
4704 case OP_LIVERANGE_END: {
4705 if (cfg->verbose_level > 1)
4706 printf ("R%d END=0x%x\n", MONO_VARINFO (cfg, ins->inst_c0)->vreg, (int)(code - cfg->native_code));
4707 MONO_VARINFO (cfg, ins->inst_c0)->live_range_end = code - cfg->native_code;
4708 break;
4710 case OP_GC_SAFE_POINT: {
4711 guint8 *buf [1];
4713 arm_ldrx (code, ARMREG_IP1, ins->sreg1, 0);
4714 /* Call it if it is non-null */
4715 buf [0] = code;
4716 arm_cbzx (code, ARMREG_IP1, 0);
4717 code = emit_call (cfg, code, MONO_PATCH_INFO_JIT_ICALL_ID, GUINT_TO_POINTER (MONO_JIT_ICALL_mono_threads_state_poll));
4718 mono_arm_patch (buf [0], code, MONO_R_ARM64_CBZ);
4719 break;
4721 case OP_FILL_PROF_CALL_CTX:
4722 for (int i = 0; i < MONO_MAX_IREGS; i++)
4723 if ((MONO_ARCH_CALLEE_SAVED_REGS & (1 << i)) || i == ARMREG_SP || i == ARMREG_FP)
4724 arm_strx (code, i, ins->sreg1, MONO_STRUCT_OFFSET (MonoContext, regs) + i * sizeof (target_mgreg_t));
4725 break;
4726 default:
4727 g_warning ("unknown opcode %s in %s()\n", mono_inst_name (ins->opcode), __FUNCTION__);
4728 g_assert_not_reached ();
4731 if ((cfg->opt & MONO_OPT_BRANCH) && ((code - cfg->native_code - offset) > max_len)) {
4732 g_warning ("wrong maximal instruction length of instruction %s (expected %d, got %d)",
4733 mono_inst_name (ins->opcode), max_len, code - cfg->native_code - offset);
4734 g_assert_not_reached ();
4737 set_code_cursor (cfg, code);
4740 * If the compiled code size is larger than the bcc displacement (19 bits signed),
4741 * insert branch islands between/inside basic blocks.
4743 if (cfg->arch.cond_branch_islands)
4744 code = emit_branch_island (cfg, code, start_offset);
4747 static guint8*
4748 emit_move_args (MonoCompile *cfg, guint8 *code)
4750 MonoInst *ins;
4751 CallInfo *cinfo;
4752 ArgInfo *ainfo;
4753 int i, part;
4754 MonoMethodSignature *sig = mono_method_signature_internal (cfg->method);
4756 cinfo = cfg->arch.cinfo;
4757 g_assert (cinfo);
4758 for (i = 0; i < cinfo->nargs; ++i) {
4759 ainfo = cinfo->args + i;
4760 ins = cfg->args [i];
4762 if (ins->opcode == OP_REGVAR) {
4763 switch (ainfo->storage) {
4764 case ArgInIReg:
4765 arm_movx (code, ins->dreg, ainfo->reg);
4766 if (i == 0 && sig->hasthis) {
4767 mono_add_var_location (cfg, ins, TRUE, ainfo->reg, 0, 0, code - cfg->native_code);
4768 mono_add_var_location (cfg, ins, TRUE, ins->dreg, 0, code - cfg->native_code, 0);
4770 break;
4771 case ArgOnStack:
4772 switch (ainfo->slot_size) {
4773 case 1:
4774 if (ainfo->sign)
4775 code = emit_ldrsbx (code, ins->dreg, cfg->arch.args_reg, ainfo->offset);
4776 else
4777 code = emit_ldrb (code, ins->dreg, cfg->arch.args_reg, ainfo->offset);
4778 break;
4779 case 2:
4780 if (ainfo->sign)
4781 code = emit_ldrshx (code, ins->dreg, cfg->arch.args_reg, ainfo->offset);
4782 else
4783 code = emit_ldrh (code, ins->dreg, cfg->arch.args_reg, ainfo->offset);
4784 break;
4785 case 4:
4786 if (ainfo->sign)
4787 code = emit_ldrswx (code, ins->dreg, cfg->arch.args_reg, ainfo->offset);
4788 else
4789 code = emit_ldrw (code, ins->dreg, cfg->arch.args_reg, ainfo->offset);
4790 break;
4791 default:
4792 code = emit_ldrx (code, ins->dreg, cfg->arch.args_reg, ainfo->offset);
4793 break;
4795 break;
4796 default:
4797 g_assert_not_reached ();
4798 break;
4800 } else {
4801 if (ainfo->storage != ArgVtypeByRef && ainfo->storage != ArgVtypeByRefOnStack)
4802 g_assert (ins->opcode == OP_REGOFFSET);
4804 switch (ainfo->storage) {
4805 case ArgInIReg:
4806 /* Stack slots for arguments have size 8 */
4807 code = emit_strx (code, ainfo->reg, ins->inst_basereg, ins->inst_offset);
4808 if (i == 0 && sig->hasthis) {
4809 mono_add_var_location (cfg, ins, TRUE, ainfo->reg, 0, 0, code - cfg->native_code);
4810 mono_add_var_location (cfg, ins, FALSE, ins->inst_basereg, ins->inst_offset, code - cfg->native_code, 0);
4812 break;
4813 case ArgInFReg:
4814 code = emit_strfpx (code, ainfo->reg, ins->inst_basereg, ins->inst_offset);
4815 break;
4816 case ArgInFRegR4:
4817 code = emit_strfpw (code, ainfo->reg, ins->inst_basereg, ins->inst_offset);
4818 break;
4819 case ArgOnStack:
4820 case ArgOnStackR4:
4821 case ArgOnStackR8:
4822 case ArgVtypeByRefOnStack:
4823 case ArgVtypeOnStack:
4824 break;
4825 case ArgVtypeByRef: {
4826 MonoInst *addr_arg = ins->inst_left;
4828 if (ainfo->gsharedvt) {
4829 g_assert (ins->opcode == OP_GSHAREDVT_ARG_REGOFFSET);
4830 arm_strx (code, ainfo->reg, ins->inst_basereg, ins->inst_offset);
4831 } else {
4832 g_assert (ins->opcode == OP_VTARG_ADDR);
4833 g_assert (addr_arg->opcode == OP_REGOFFSET);
4834 arm_strx (code, ainfo->reg, addr_arg->inst_basereg, addr_arg->inst_offset);
4836 break;
4838 case ArgVtypeInIRegs:
4839 for (part = 0; part < ainfo->nregs; part ++) {
4840 code = emit_strx (code, ainfo->reg + part, ins->inst_basereg, ins->inst_offset + (part * 8));
4842 break;
4843 case ArgHFA:
4844 for (part = 0; part < ainfo->nregs; part ++) {
4845 if (ainfo->esize == 4)
4846 code = emit_strfpw (code, ainfo->reg + part, ins->inst_basereg, ins->inst_offset + ainfo->foffsets [part]);
4847 else
4848 code = emit_strfpx (code, ainfo->reg + part, ins->inst_basereg, ins->inst_offset + ainfo->foffsets [part]);
4850 break;
4851 default:
4852 g_assert_not_reached ();
4853 break;
4858 return code;
4862 * emit_store_regarray:
4864 * Emit code to store the registers in REGS into the appropriate elements of
4865 * the register array at BASEREG+OFFSET.
4867 static __attribute__ ((__warn_unused_result__)) guint8*
4868 emit_store_regarray (guint8 *code, guint64 regs, int basereg, int offset)
4870 int i;
4872 for (i = 0; i < 32; ++i) {
4873 if (regs & (1 << i)) {
4874 if (i + 1 < 32 && (regs & (1 << (i + 1))) && (i + 1 != ARMREG_SP)) {
4875 arm_stpx (code, i, i + 1, basereg, offset + (i * 8));
4876 i++;
4877 } else if (i == ARMREG_SP) {
4878 arm_movspx (code, ARMREG_IP1, ARMREG_SP);
4879 arm_strx (code, ARMREG_IP1, basereg, offset + (i * 8));
4880 } else {
4881 arm_strx (code, i, basereg, offset + (i * 8));
4885 return code;
4889 * emit_load_regarray:
4891 * Emit code to load the registers in REGS from the appropriate elements of
4892 * the register array at BASEREG+OFFSET.
4894 static __attribute__ ((__warn_unused_result__)) guint8*
4895 emit_load_regarray (guint8 *code, guint64 regs, int basereg, int offset)
4897 int i;
4899 for (i = 0; i < 32; ++i) {
4900 if (regs & (1 << i)) {
4901 if ((regs & (1 << (i + 1))) && (i + 1 != ARMREG_SP)) {
4902 if (offset + (i * 8) < 500)
4903 arm_ldpx (code, i, i + 1, basereg, offset + (i * 8));
4904 else {
4905 code = emit_ldrx (code, i, basereg, offset + (i * 8));
4906 code = emit_ldrx (code, i + 1, basereg, offset + ((i + 1) * 8));
4908 i++;
4909 } else if (i == ARMREG_SP) {
4910 g_assert_not_reached ();
4911 } else {
4912 code = emit_ldrx (code, i, basereg, offset + (i * 8));
4916 return code;
4920 * emit_store_regset:
4922 * Emit code to store the registers in REGS into consecutive memory locations starting
4923 * at BASEREG+OFFSET.
4925 static __attribute__ ((__warn_unused_result__)) guint8*
4926 emit_store_regset (guint8 *code, guint64 regs, int basereg, int offset)
4928 int i, pos;
4930 pos = 0;
4931 for (i = 0; i < 32; ++i) {
4932 if (regs & (1 << i)) {
4933 if ((regs & (1 << (i + 1))) && (i + 1 != ARMREG_SP)) {
4934 arm_stpx (code, i, i + 1, basereg, offset + (pos * 8));
4935 i++;
4936 pos++;
4937 } else if (i == ARMREG_SP) {
4938 arm_movspx (code, ARMREG_IP1, ARMREG_SP);
4939 arm_strx (code, ARMREG_IP1, basereg, offset + (pos * 8));
4940 } else {
4941 arm_strx (code, i, basereg, offset + (pos * 8));
4943 pos++;
4946 return code;
4950 * emit_load_regset:
4952 * Emit code to load the registers in REGS from consecutive memory locations starting
4953 * at BASEREG+OFFSET.
4955 static __attribute__ ((__warn_unused_result__)) guint8*
4956 emit_load_regset (guint8 *code, guint64 regs, int basereg, int offset)
4958 int i, pos;
4960 pos = 0;
4961 for (i = 0; i < 32; ++i) {
4962 if (regs & (1 << i)) {
4963 if ((regs & (1 << (i + 1))) && (i + 1 != ARMREG_SP)) {
4964 arm_ldpx (code, i, i + 1, basereg, offset + (pos * 8));
4965 i++;
4966 pos++;
4967 } else if (i == ARMREG_SP) {
4968 g_assert_not_reached ();
4969 } else {
4970 arm_ldrx (code, i, basereg, offset + (pos * 8));
4972 pos++;
4975 return code;
4978 __attribute__ ((__warn_unused_result__)) guint8*
4979 mono_arm_emit_load_regarray (guint8 *code, guint64 regs, int basereg, int offset)
4981 return emit_load_regarray (code, regs, basereg, offset);
4984 __attribute__ ((__warn_unused_result__)) guint8*
4985 mono_arm_emit_store_regarray (guint8 *code, guint64 regs, int basereg, int offset)
4987 return emit_store_regarray (code, regs, basereg, offset);
4990 __attribute__ ((__warn_unused_result__)) guint8*
4991 mono_arm_emit_store_regset (guint8 *code, guint64 regs, int basereg, int offset)
4993 return emit_store_regset (code, regs, basereg, offset);
4996 /* Same as emit_store_regset, but emit unwind info too */
4997 /* CFA_OFFSET is the offset between the CFA and basereg */
4998 static __attribute__ ((__warn_unused_result__)) guint8*
4999 emit_store_regset_cfa (MonoCompile *cfg, guint8 *code, guint64 regs, int basereg, int offset, int cfa_offset, guint64 no_cfa_regset)
5001 int i, j, pos, nregs;
5002 guint32 cfa_regset = regs & ~no_cfa_regset;
5004 pos = 0;
5005 for (i = 0; i < 32; ++i) {
5006 nregs = 1;
5007 if (regs & (1 << i)) {
5008 if ((regs & (1 << (i + 1))) && (i + 1 != ARMREG_SP)) {
5009 if (offset < 256) {
5010 arm_stpx (code, i, i + 1, basereg, offset + (pos * 8));
5011 } else {
5012 code = emit_strx (code, i, basereg, offset + (pos * 8));
5013 code = emit_strx (code, i + 1, basereg, offset + (pos * 8) + 8);
5015 nregs = 2;
5016 } else if (i == ARMREG_SP) {
5017 arm_movspx (code, ARMREG_IP1, ARMREG_SP);
5018 code = emit_strx (code, ARMREG_IP1, basereg, offset + (pos * 8));
5019 } else {
5020 code = emit_strx (code, i, basereg, offset + (pos * 8));
5023 for (j = 0; j < nregs; ++j) {
5024 if (cfa_regset & (1 << (i + j)))
5025 mono_emit_unwind_op_offset (cfg, code, i + j, (- cfa_offset) + offset + ((pos + j) * 8));
5028 i += nregs - 1;
5029 pos += nregs;
5032 return code;
5036 * emit_setup_lmf:
5038 * Emit code to initialize an LMF structure at LMF_OFFSET.
5039 * Clobbers ip0/ip1.
5041 static guint8*
5042 emit_setup_lmf (MonoCompile *cfg, guint8 *code, gint32 lmf_offset, int cfa_offset)
5045 * The LMF should contain all the state required to be able to reconstruct the machine state
5046 * at the current point of execution. Since the LMF is only read during EH, only callee
5047 * saved etc. registers need to be saved.
5048 * FIXME: Save callee saved fp regs, JITted code doesn't use them, but native code does, and they
5049 * need to be restored during EH.
5052 /* pc */
5053 arm_adrx (code, ARMREG_LR, code);
5054 code = emit_strx (code, ARMREG_LR, ARMREG_FP, lmf_offset + MONO_STRUCT_OFFSET (MonoLMF, pc));
5055 /* gregs + fp + sp */
5056 /* Don't emit unwind info for sp/fp, they are already handled in the prolog */
5057 code = emit_store_regset_cfa (cfg, code, MONO_ARCH_LMF_REGS, ARMREG_FP, lmf_offset + MONO_STRUCT_OFFSET (MonoLMF, gregs), cfa_offset, (1 << ARMREG_FP) | (1 << ARMREG_SP));
5059 return code;
5062 guint8 *
5063 mono_arch_emit_prolog (MonoCompile *cfg)
5065 MonoMethod *method = cfg->method;
5066 MonoMethodSignature *sig;
5067 MonoBasicBlock *bb;
5068 guint8 *code;
5069 int cfa_offset, max_offset;
5071 sig = mono_method_signature_internal (method);
5072 cfg->code_size = 256 + sig->param_count * 64;
5073 code = cfg->native_code = g_malloc (cfg->code_size);
5075 /* This can be unaligned */
5076 cfg->stack_offset = ALIGN_TO (cfg->stack_offset, MONO_ARCH_FRAME_ALIGNMENT);
5079 * - Setup frame
5081 cfa_offset = 0;
5082 mono_emit_unwind_op_def_cfa (cfg, code, ARMREG_SP, 0);
5084 if (enable_ptrauth)
5085 arm_pacibsp (code);
5087 /* Setup frame */
5088 if (arm_is_ldpx_imm (-cfg->stack_offset)) {
5089 arm_stpx_pre (code, ARMREG_FP, ARMREG_LR, ARMREG_SP, -cfg->stack_offset);
5090 } else {
5091 /* sp -= cfg->stack_offset */
5092 /* This clobbers ip0/ip1 */
5093 code = emit_subx_sp_imm (code, cfg->stack_offset);
5094 arm_stpx (code, ARMREG_FP, ARMREG_LR, ARMREG_SP, 0);
5096 cfa_offset += cfg->stack_offset;
5097 mono_emit_unwind_op_def_cfa_offset (cfg, code, cfa_offset);
5098 mono_emit_unwind_op_offset (cfg, code, ARMREG_FP, (- cfa_offset) + 0);
5099 mono_emit_unwind_op_offset (cfg, code, ARMREG_LR, (- cfa_offset) + 8);
5100 arm_movspx (code, ARMREG_FP, ARMREG_SP);
5101 mono_emit_unwind_op_def_cfa_reg (cfg, code, ARMREG_FP);
5102 if (cfg->param_area) {
5103 /* The param area is below the frame pointer */
5104 code = emit_subx_sp_imm (code, cfg->param_area);
5107 if (cfg->method->save_lmf) {
5108 code = emit_setup_lmf (cfg, code, cfg->lmf_var->inst_offset, cfa_offset);
5109 } else {
5110 /* Save gregs */
5111 code = emit_store_regset_cfa (cfg, code, MONO_ARCH_CALLEE_SAVED_REGS & cfg->used_int_regs, ARMREG_FP, cfg->arch.saved_gregs_offset, cfa_offset, 0);
5114 /* Setup args reg */
5115 if (cfg->arch.args_reg) {
5116 /* The register was already saved above */
5117 code = emit_addx_imm (code, cfg->arch.args_reg, ARMREG_FP, cfg->stack_offset);
5120 /* Save return area addr received in R8 */
5121 if (cfg->vret_addr) {
5122 MonoInst *ins = cfg->vret_addr;
5124 g_assert (ins->opcode == OP_REGOFFSET);
5125 code = emit_strx (code, ARMREG_R8, ins->inst_basereg, ins->inst_offset);
5128 /* Save mrgctx received in MONO_ARCH_RGCTX_REG */
5129 if (cfg->rgctx_var) {
5130 MonoInst *ins = cfg->rgctx_var;
5132 g_assert (ins->opcode == OP_REGOFFSET);
5134 code = emit_strx (code, MONO_ARCH_RGCTX_REG, ins->inst_basereg, ins->inst_offset);
5136 mono_add_var_location (cfg, cfg->rgctx_var, TRUE, MONO_ARCH_RGCTX_REG, 0, 0, code - cfg->native_code);
5137 mono_add_var_location (cfg, cfg->rgctx_var, FALSE, ins->inst_basereg, ins->inst_offset, code - cfg->native_code, 0);
5141 * Move arguments to their registers/stack locations.
5143 code = emit_move_args (cfg, code);
5145 /* Initialize seq_point_info_var */
5146 if (cfg->arch.seq_point_info_var) {
5147 MonoInst *ins = cfg->arch.seq_point_info_var;
5149 /* Initialize the variable from a GOT slot */
5150 code = emit_aotconst (cfg, code, ARMREG_IP0, MONO_PATCH_INFO_SEQ_POINT_INFO, cfg->method);
5151 g_assert (ins->opcode == OP_REGOFFSET);
5152 code = emit_strx (code, ARMREG_IP0, ins->inst_basereg, ins->inst_offset);
5154 /* Initialize ss_tramp_var */
5155 ins = cfg->arch.ss_tramp_var;
5156 g_assert (ins->opcode == OP_REGOFFSET);
5158 code = emit_ldrx (code, ARMREG_IP1, ARMREG_IP0, MONO_STRUCT_OFFSET (SeqPointInfo, ss_tramp_addr));
5159 code = emit_strx (code, ARMREG_IP1, ins->inst_basereg, ins->inst_offset);
5160 } else {
5161 MonoInst *ins;
5163 if (cfg->arch.ss_tramp_var) {
5164 /* Initialize ss_tramp_var */
5165 ins = cfg->arch.ss_tramp_var;
5166 g_assert (ins->opcode == OP_REGOFFSET);
5168 code = emit_imm64 (code, ARMREG_IP0, (guint64)&ss_trampoline);
5169 code = emit_strx (code, ARMREG_IP0, ins->inst_basereg, ins->inst_offset);
5172 if (cfg->arch.bp_tramp_var) {
5173 /* Initialize bp_tramp_var */
5174 ins = cfg->arch.bp_tramp_var;
5175 g_assert (ins->opcode == OP_REGOFFSET);
5177 code = emit_imm64 (code, ARMREG_IP0, (guint64)bp_trampoline);
5178 code = emit_strx (code, ARMREG_IP0, ins->inst_basereg, ins->inst_offset);
5182 max_offset = 0;
5183 if (cfg->opt & MONO_OPT_BRANCH) {
5184 for (bb = cfg->bb_entry; bb; bb = bb->next_bb) {
5185 MonoInst *ins;
5186 bb->max_offset = max_offset;
5188 MONO_BB_FOR_EACH_INS (bb, ins) {
5189 max_offset += ins_get_size (ins->opcode);
5193 if (max_offset > 0x3ffff * 4)
5194 cfg->arch.cond_branch_islands = TRUE;
5196 return code;
5199 void
5200 mono_arch_emit_epilog (MonoCompile *cfg)
5202 CallInfo *cinfo;
5203 int max_epilog_size;
5204 guint8 *code;
5205 int i;
5207 max_epilog_size = 16 + 20*4;
5208 code = realloc_code (cfg, max_epilog_size);
5210 if (cfg->method->save_lmf) {
5211 code = mono_arm_emit_load_regarray (code, MONO_ARCH_CALLEE_SAVED_REGS & cfg->used_int_regs, ARMREG_FP, cfg->lmf_var->inst_offset + MONO_STRUCT_OFFSET (MonoLMF, gregs) - (MONO_ARCH_FIRST_LMF_REG * 8));
5212 } else {
5213 /* Restore gregs */
5214 code = emit_load_regset (code, MONO_ARCH_CALLEE_SAVED_REGS & cfg->used_int_regs, ARMREG_FP, cfg->arch.saved_gregs_offset);
5217 /* Load returned vtypes into registers if needed */
5218 cinfo = cfg->arch.cinfo;
5219 switch (cinfo->ret.storage) {
5220 case ArgVtypeInIRegs: {
5221 MonoInst *ins = cfg->ret;
5223 for (i = 0; i < cinfo->ret.nregs; ++i)
5224 code = emit_ldrx (code, cinfo->ret.reg + i, ins->inst_basereg, ins->inst_offset + (i * 8));
5225 break;
5227 case ArgHFA: {
5228 MonoInst *ins = cfg->ret;
5230 for (i = 0; i < cinfo->ret.nregs; ++i) {
5231 if (cinfo->ret.esize == 4)
5232 code = emit_ldrfpw (code, cinfo->ret.reg + i, ins->inst_basereg, ins->inst_offset + cinfo->ret.foffsets [i]);
5233 else
5234 code = emit_ldrfpx (code, cinfo->ret.reg + i, ins->inst_basereg, ins->inst_offset + cinfo->ret.foffsets [i]);
5236 break;
5238 default:
5239 break;
5242 /* Destroy frame */
5243 code = mono_arm_emit_destroy_frame (code, cfg->stack_offset, (1 << ARMREG_IP0) | (1 << ARMREG_IP1));
5245 if (enable_ptrauth)
5246 arm_retab (code);
5247 else
5248 arm_retx (code, ARMREG_LR);
5250 g_assert (code - (cfg->native_code + cfg->code_len) < max_epilog_size);
5252 set_code_cursor (cfg, code);
5255 void
5256 mono_arch_emit_exceptions (MonoCompile *cfg)
5258 MonoJumpInfo *ji;
5259 MonoClass *exc_class;
5260 guint8 *code, *ip;
5261 guint8* exc_throw_pos [MONO_EXC_INTRINS_NUM];
5262 guint8 exc_throw_found [MONO_EXC_INTRINS_NUM];
5263 int i, id, size = 0;
5265 for (i = 0; i < MONO_EXC_INTRINS_NUM; i++) {
5266 exc_throw_pos [i] = NULL;
5267 exc_throw_found [i] = 0;
5270 for (ji = cfg->patch_info; ji; ji = ji->next) {
5271 if (ji->type == MONO_PATCH_INFO_EXC) {
5272 i = mini_exception_id_by_name ((const char*)ji->data.target);
5273 if (!exc_throw_found [i]) {
5274 size += 32;
5275 exc_throw_found [i] = TRUE;
5280 code = realloc_code (cfg, size);
5282 /* Emit code to raise corlib exceptions */
5283 for (ji = cfg->patch_info; ji; ji = ji->next) {
5284 if (ji->type != MONO_PATCH_INFO_EXC)
5285 continue;
5287 ip = cfg->native_code + ji->ip.i;
5289 id = mini_exception_id_by_name ((const char*)ji->data.target);
5291 if (exc_throw_pos [id]) {
5292 /* ip points to the bcc () in OP_COND_EXC_... */
5293 arm_patch_rel (ip, exc_throw_pos [id], ji->relocation);
5294 ji->type = MONO_PATCH_INFO_NONE;
5295 continue;
5298 exc_throw_pos [id] = code;
5299 arm_patch_rel (ip, code, ji->relocation);
5301 /* We are being branched to from the code generated by emit_cond_exc (), the pc is in ip1 */
5303 /* r0 = type token */
5304 exc_class = mono_class_load_from_name (mono_defaults.corlib, "System", ji->data.name);
5305 code = emit_imm (code, ARMREG_R0, m_class_get_type_token (exc_class) - MONO_TOKEN_TYPE_DEF);
5306 /* r1 = throw ip */
5307 arm_movx (code, ARMREG_R1, ARMREG_IP1);
5308 /* Branch to the corlib exception throwing trampoline */
5309 ji->ip.i = code - cfg->native_code;
5310 ji->type = MONO_PATCH_INFO_JIT_ICALL_ID;
5311 ji->data.jit_icall_id = MONO_JIT_ICALL_mono_arch_throw_corlib_exception;
5312 ji->relocation = MONO_R_ARM64_BL;
5313 arm_bl (code, 0);
5314 cfg->thunk_area += THUNK_SIZE;
5315 set_code_cursor (cfg, code);
5318 set_code_cursor (cfg, code);
5321 MonoInst*
5322 mono_arch_emit_inst_for_method (MonoCompile *cfg, MonoMethod *cmethod, MonoMethodSignature *fsig, MonoInst **args)
5324 return NULL;
5327 guint32
5328 mono_arch_get_patch_offset (guint8 *code)
5330 return 0;
5333 gpointer
5334 mono_arch_build_imt_trampoline (MonoVTable *vtable, MonoDomain *domain, MonoIMTCheckItem **imt_entries, int count,
5335 gpointer fail_tramp)
5337 int i, buf_len, imt_reg;
5338 guint8 *buf, *code;
5340 #if DEBUG_IMT
5341 printf ("building IMT trampoline for class %s %s entries %d code size %d code at %p end %p vtable %p\n", m_class_get_name_space (vtable->klass), m_class_get_name (vtable->klass), count, size, start, ((guint8*)start) + size, vtable);
5342 for (i = 0; i < count; ++i) {
5343 MonoIMTCheckItem *item = imt_entries [i];
5344 printf ("method %d (%p) %s vtable slot %p is_equals %d chunk size %d\n", i, item->key, item->key->name, &vtable->vtable [item->value.vtable_slot], item->is_equals, item->chunk_size);
5346 #endif
5348 buf_len = 0;
5349 for (i = 0; i < count; ++i) {
5350 MonoIMTCheckItem *item = imt_entries [i];
5351 if (item->is_equals) {
5352 gboolean fail_case = !item->check_target_idx && fail_tramp;
5354 if (item->check_target_idx || fail_case) {
5355 if (!item->compare_done || fail_case) {
5356 buf_len += 4 * 4 + 4;
5358 buf_len += 4;
5359 if (item->has_target_code) {
5360 buf_len += 5 * 4;
5361 } else {
5362 buf_len += 6 * 4;
5364 if (fail_case) {
5365 buf_len += 5 * 4;
5367 } else {
5368 buf_len += 6 * 4;
5370 } else {
5371 buf_len += 6 * 4;
5375 if (fail_tramp) {
5376 buf = (guint8*)mono_method_alloc_generic_virtual_trampoline (mono_domain_ambient_memory_manager (domain), buf_len);
5377 } else {
5378 MonoMemoryManager *mem_manager = m_class_get_mem_manager (domain, vtable->klass);
5379 buf = mono_mem_manager_code_reserve (mem_manager, buf_len);
5381 code = buf;
5383 MINI_BEGIN_CODEGEN ();
5386 * We are called by JITted code, which passes in the IMT argument in
5387 * MONO_ARCH_RGCTX_REG (r27). We need to preserve all caller saved regs
5388 * except ip0/ip1.
5390 imt_reg = MONO_ARCH_RGCTX_REG;
5391 for (i = 0; i < count; ++i) {
5392 MonoIMTCheckItem *item = imt_entries [i];
5394 item->code_target = code;
5396 if (item->is_equals) {
5398 * Check the imt argument against item->key, if equals, jump to either
5399 * item->value.target_code or to vtable [item->value.vtable_slot].
5400 * If fail_tramp is set, jump to it if not-equals.
5402 gboolean fail_case = !item->check_target_idx && fail_tramp;
5404 if (item->check_target_idx || fail_case) {
5405 /* Compare imt_reg with item->key */
5406 if (!item->compare_done || fail_case) {
5407 // FIXME: Optimize this
5408 code = emit_imm64 (code, ARMREG_IP0, (guint64)item->key);
5409 arm_cmpx (code, imt_reg, ARMREG_IP0);
5411 item->jmp_code = code;
5412 arm_bcc (code, ARMCOND_NE, 0);
5413 /* Jump to target if equals */
5414 if (item->has_target_code) {
5415 code = emit_imm64 (code, ARMREG_IP0, (guint64)item->value.target_code);
5416 code = mono_arm_emit_brx (code, ARMREG_IP0);
5417 } else {
5418 guint64 imm = (guint64)&(vtable->vtable [item->value.vtable_slot]);
5420 code = emit_imm64 (code, ARMREG_IP0, imm);
5421 arm_ldrx (code, ARMREG_IP0, ARMREG_IP0, 0);
5422 code = mono_arm_emit_brx (code, ARMREG_IP0);
5425 if (fail_case) {
5426 arm_patch_rel (item->jmp_code, code, MONO_R_ARM64_BCC);
5427 item->jmp_code = NULL;
5428 code = emit_imm64 (code, ARMREG_IP0, (guint64)fail_tramp);
5429 code = mono_arm_emit_brx (code, ARMREG_IP0);
5431 } else {
5432 guint64 imm = (guint64)&(vtable->vtable [item->value.vtable_slot]);
5434 code = emit_imm64 (code, ARMREG_IP0, imm);
5435 arm_ldrx (code, ARMREG_IP0, ARMREG_IP0, 0);
5436 code = mono_arm_emit_brx (code, ARMREG_IP0);
5438 } else {
5439 code = emit_imm64 (code, ARMREG_IP0, (guint64)item->key);
5440 arm_cmpx (code, imt_reg, ARMREG_IP0);
5441 item->jmp_code = code;
5442 arm_bcc (code, ARMCOND_HS, 0);
5445 /* Patch the branches */
5446 for (i = 0; i < count; ++i) {
5447 MonoIMTCheckItem *item = imt_entries [i];
5448 if (item->jmp_code && item->check_target_idx)
5449 arm_patch_rel (item->jmp_code, imt_entries [item->check_target_idx]->code_target, MONO_R_ARM64_BCC);
5452 g_assert ((code - buf) <= buf_len);
5454 MINI_END_CODEGEN (buf, code - buf, MONO_PROFILER_CODE_BUFFER_IMT_TRAMPOLINE, NULL);
5456 return MINI_ADDR_TO_FTNPTR (buf);
5459 GSList *
5460 mono_arch_get_trampolines (gboolean aot)
5462 return mono_arm_get_exception_trampolines (aot);
5465 #else /* DISABLE_JIT */
5467 gpointer
5468 mono_arch_build_imt_trampoline (MonoVTable *vtable, MonoDomain *domain, MonoIMTCheckItem **imt_entries, int count,
5469 gpointer fail_tramp)
5471 g_assert_not_reached ();
5472 return NULL;
5475 #endif /* !DISABLE_JIT */
5477 #ifdef MONO_ARCH_SOFT_DEBUG_SUPPORTED
5479 void
5480 mono_arch_set_breakpoint (MonoJitInfo *ji, guint8 *ip)
5482 guint8 *code = MINI_FTNPTR_TO_ADDR (ip);
5483 guint32 native_offset = ip - (guint8*)ji->code_start;
5485 if (ji->from_aot) {
5486 SeqPointInfo *info = mono_arch_get_seq_point_info (mono_domain_get (), (guint8*)ji->code_start);
5488 if (enable_ptrauth)
5489 NOT_IMPLEMENTED;
5490 g_assert (native_offset % 4 == 0);
5491 g_assert (info->bp_addrs [native_offset / 4] == 0);
5492 info->bp_addrs [native_offset / 4] = (guint8*)mini_get_breakpoint_trampoline ();
5493 } else {
5494 /* ip points to an ldrx */
5495 code += 4;
5496 mono_codeman_enable_write ();
5497 code = mono_arm_emit_blrx (code, ARMREG_IP0);
5498 mono_codeman_disable_write ();
5499 mono_arch_flush_icache (ip, code - ip);
5503 void
5504 mono_arch_clear_breakpoint (MonoJitInfo *ji, guint8 *ip)
5506 guint8 *code = MINI_FTNPTR_TO_ADDR (ip);
5508 if (ji->from_aot) {
5509 guint32 native_offset = ip - (guint8*)ji->code_start;
5510 SeqPointInfo *info = mono_arch_get_seq_point_info (mono_domain_get (), (guint8*)ji->code_start);
5512 if (enable_ptrauth)
5513 NOT_IMPLEMENTED;
5515 g_assert (native_offset % 4 == 0);
5516 info->bp_addrs [native_offset / 4] = NULL;
5517 } else {
5518 /* ip points to an ldrx */
5519 code += 4;
5520 mono_codeman_enable_write ();
5521 arm_nop (code);
5522 mono_codeman_disable_write ();
5523 mono_arch_flush_icache (ip, code - ip);
5527 void
5528 mono_arch_start_single_stepping (void)
5530 ss_trampoline = mini_get_single_step_trampoline ();
5533 void
5534 mono_arch_stop_single_stepping (void)
5536 ss_trampoline = NULL;
5539 gboolean
5540 mono_arch_is_single_step_event (void *info, void *sigctx)
5542 /* We use soft breakpoints on arm64 */
5543 return FALSE;
5546 gboolean
5547 mono_arch_is_breakpoint_event (void *info, void *sigctx)
5549 /* We use soft breakpoints on arm64 */
5550 return FALSE;
5553 void
5554 mono_arch_skip_breakpoint (MonoContext *ctx, MonoJitInfo *ji)
5556 g_assert_not_reached ();
5559 void
5560 mono_arch_skip_single_step (MonoContext *ctx)
5562 g_assert_not_reached ();
5565 SeqPointInfo*
5566 mono_arch_get_seq_point_info (MonoDomain *domain, guint8 *code)
5568 SeqPointInfo *info;
5569 MonoJitInfo *ji;
5571 // FIXME: Add a free function
5573 mono_domain_lock (domain);
5574 info = (SeqPointInfo*)g_hash_table_lookup (domain_jit_info (domain)->arch_seq_points,
5575 code);
5576 mono_domain_unlock (domain);
5578 if (!info) {
5579 ji = mono_jit_info_table_find (domain, code);
5580 g_assert (ji);
5582 info = g_malloc0 (sizeof (SeqPointInfo) + (ji->code_size / 4) * sizeof(guint8*));
5584 info->ss_tramp_addr = &ss_trampoline;
5586 mono_domain_lock (domain);
5587 g_hash_table_insert (domain_jit_info (domain)->arch_seq_points,
5588 code, info);
5589 mono_domain_unlock (domain);
5592 return info;
5595 #endif /* MONO_ARCH_SOFT_DEBUG_SUPPORTED */
5597 gboolean
5598 mono_arch_opcode_supported (int opcode)
5600 switch (opcode) {
5601 case OP_ATOMIC_ADD_I4:
5602 case OP_ATOMIC_ADD_I8:
5603 case OP_ATOMIC_EXCHANGE_I4:
5604 case OP_ATOMIC_EXCHANGE_I8:
5605 case OP_ATOMIC_CAS_I4:
5606 case OP_ATOMIC_CAS_I8:
5607 case OP_ATOMIC_LOAD_I1:
5608 case OP_ATOMIC_LOAD_I2:
5609 case OP_ATOMIC_LOAD_I4:
5610 case OP_ATOMIC_LOAD_I8:
5611 case OP_ATOMIC_LOAD_U1:
5612 case OP_ATOMIC_LOAD_U2:
5613 case OP_ATOMIC_LOAD_U4:
5614 case OP_ATOMIC_LOAD_U8:
5615 case OP_ATOMIC_LOAD_R4:
5616 case OP_ATOMIC_LOAD_R8:
5617 case OP_ATOMIC_STORE_I1:
5618 case OP_ATOMIC_STORE_I2:
5619 case OP_ATOMIC_STORE_I4:
5620 case OP_ATOMIC_STORE_I8:
5621 case OP_ATOMIC_STORE_U1:
5622 case OP_ATOMIC_STORE_U2:
5623 case OP_ATOMIC_STORE_U4:
5624 case OP_ATOMIC_STORE_U8:
5625 case OP_ATOMIC_STORE_R4:
5626 case OP_ATOMIC_STORE_R8:
5627 return TRUE;
5628 default:
5629 return FALSE;
5633 CallInfo*
5634 mono_arch_get_call_info (MonoMemPool *mp, MonoMethodSignature *sig)
5636 return get_call_info (mp, sig);
5639 gpointer
5640 mono_arch_load_function (MonoJitICallId jit_icall_id)
5642 gpointer target = NULL;
5643 switch (jit_icall_id) {
5644 #undef MONO_AOT_ICALL
5645 #define MONO_AOT_ICALL(x) case MONO_JIT_ICALL_ ## x: target = (gpointer)x; break;
5646 MONO_AOT_ICALL (mono_arm_resume_unwind)
5647 MONO_AOT_ICALL (mono_arm_start_gsharedvt_call)
5648 MONO_AOT_ICALL (mono_arm_throw_exception)
5650 return target;
5653 static guint8*
5654 emit_blrx (guint8 *code, int reg)
5656 if (enable_ptrauth)
5657 arm_blraaz (code, reg);
5658 else
5659 arm_blrx (code, reg);
5660 return code;
5663 static guint8*
5664 emit_brx (guint8 *code, int reg)
5666 if (enable_ptrauth)
5667 arm_braaz (code, reg);
5668 else
5669 arm_brx (code, reg);
5670 return code;
5673 guint8*
5674 mono_arm_emit_blrx (guint8 *code, int reg)
5676 return emit_blrx (code, reg);
5679 guint8*
5680 mono_arm_emit_brx (guint8 *code, int reg)
5682 return emit_brx (code, reg);