[mono-api-info] Use XmlWriter instead of XmlDocument to make this faster.
[mono-project.git] / mono / mini / tramp-arm.c
blob2430e2295ceda8979dab5340ee18e038b737c70b
1 /*
2 * tramp-arm.c: JIT trampoline code for ARM
4 * Authors:
5 * Paolo Molaro (lupus@ximian.com)
7 * (C) 2001-2003 Ximian, Inc.
8 * Copyright 2003-2011 Novell Inc
9 * Copyright 2011 Xamarin Inc
12 #include <config.h>
13 #include <glib.h>
15 #include <mono/metadata/abi-details.h>
16 #include <mono/metadata/appdomain.h>
17 #include <mono/metadata/marshal.h>
18 #include <mono/metadata/tabledefs.h>
19 #include <mono/metadata/profiler-private.h>
20 #include <mono/arch/arm/arm-codegen.h>
21 #include <mono/arch/arm/arm-vfp-codegen.h>
23 #include "mini.h"
24 #include "mini-arm.h"
25 #include "debugger-agent.h"
26 #include "jit-icalls.h"
28 #define ALIGN_TO(val,align) ((((guint64)val) + ((align) - 1)) & ~((align) - 1))
30 #ifdef USE_JUMP_TABLES
32 static guint16
33 decode_imm16 (guint32 insn)
35 return (((insn >> 16) & 0xf) << 12) | (insn & 0xfff);
38 #define INSN_MASK 0xff00000
39 #define MOVW_MASK ((3 << 24) | (0 << 20))
40 #define MOVT_MASK ((3 << 24) | (4 << 20))
42 gpointer*
43 mono_arch_jumptable_entry_from_code (guint8 *code)
45 guint32 insn1 = ((guint32*)code) [0];
46 guint32 insn2 = ((guint32*)code) [1];
48 if (((insn1 & INSN_MASK) == MOVW_MASK) &&
49 ((insn2 & INSN_MASK) == MOVT_MASK) ) {
50 guint32 imm_lo = decode_imm16 (insn1);
51 guint32 imm_hi = decode_imm16 (insn2);
52 return (gpointer*) GUINT_TO_POINTER (imm_lo | (imm_hi << 16));
53 } else {
54 g_assert_not_reached ();
55 return NULL;
59 #undef INSN_MASK
60 #undef MOVW_MASK
61 #undef MOVT_MASK
63 void
64 mono_arch_patch_callsite (guint8 *method_start, guint8 *code_ptr, guint8 *addr)
66 gpointer *jte;
68 * code_ptr is 4 instructions after MOVW/MOVT used to address
69 * jumptable entry.
71 jte = mono_jumptable_get_entry (code_ptr - 16);
72 g_assert ( jte != NULL);
73 *jte = addr;
75 #else
76 void
77 mono_arch_patch_callsite (guint8 *method_start, guint8 *code_ptr, guint8 *addr)
79 guint32 *code = (guint32*)code_ptr;
81 /* This is the 'bl' or the 'mov pc' instruction */
82 --code;
85 * Note that methods are called also with the bl opcode.
87 if ((((*code) >> 25) & 7) == 5) {
88 /*g_print ("direct patching\n");*/
89 arm_patch ((guint8*)code, addr);
90 mono_arch_flush_icache ((guint8*)code, 4);
91 return;
94 if ((((*code) >> 20) & 0xFF) == 0x12) {
95 /*g_print ("patching bx\n");*/
96 arm_patch ((guint8*)code, addr);
97 mono_arch_flush_icache ((guint8*)(code - 2), 4);
98 return;
101 g_assert_not_reached ();
103 #endif
105 void
106 mono_arch_patch_plt_entry (guint8 *code, gpointer *got, mgreg_t *regs, guint8 *addr)
108 guint8 *jump_entry;
110 /* Patch the jump table entry used by the plt entry */
111 if (*(guint32*)code == 0xe59fc000) {
112 /* ARM_LDR_IMM (code, ARMREG_IP, ARMREG_PC, 0); */
113 guint32 offset = ((guint32*)code)[2];
115 jump_entry = code + offset + 12;
116 } else if (*(guint16*)(code - 4) == 0xf8df) {
118 * Thumb PLT entry, begins with ldr.w ip, [pc, #8], code points to entry + 4, see
119 * mono_arm_get_thumb_plt_entry ().
121 guint32 offset;
123 code -= 4;
124 offset = *(guint32*)(code + 12);
125 jump_entry = code + offset + 8;
126 } else {
127 g_assert_not_reached ();
130 *(guint8**)jump_entry = addr;
133 #ifndef DISABLE_JIT
135 #define arm_is_imm12(v) ((int)(v) > -4096 && (int)(v) < 4096)
137 #ifndef USE_JUMP_TABLES
139 * Return the instruction to jump from code to target, 0 if not
140 * reachable with a single instruction
142 static guint32
143 branch_for_target_reachable (guint8 *branch, guint8 *target)
145 gint diff = target - branch - 8;
146 g_assert ((diff & 3) == 0);
147 if (diff >= 0) {
148 if (diff <= 33554431)
149 return (ARMCOND_AL << ARMCOND_SHIFT) | (ARM_BR_TAG) | (diff >> 2);
150 } else {
151 /* diff between 0 and -33554432 */
152 if (diff >= -33554432)
153 return (ARMCOND_AL << ARMCOND_SHIFT) | (ARM_BR_TAG) | ((diff >> 2) & ~0xff000000);
155 return 0;
157 #endif
159 static inline guint8*
160 emit_bx (guint8* code, int reg)
162 if (mono_arm_thumb_supported ())
163 ARM_BX (code, reg);
164 else
165 ARM_MOV_REG_REG (code, ARMREG_PC, reg);
166 return code;
169 /* Stack size for trampoline function
171 #define STACK ALIGN_TO (sizeof (MonoLMF), MONO_ARCH_FRAME_ALIGNMENT)
173 /* Method-specific trampoline code fragment size */
174 #define METHOD_TRAMPOLINE_SIZE 64
176 /* Jump-specific trampoline code fragment size */
177 #define JUMP_TRAMPOLINE_SIZE 64
179 guchar*
180 mono_arch_create_generic_trampoline (MonoTrampolineType tramp_type, MonoTrampInfo **info, gboolean aot)
182 char *tramp_name;
183 guint8 *buf, *code = NULL;
184 #ifdef USE_JUMP_TABLES
185 gpointer *load_get_lmf_addr = NULL, *load_trampoline = NULL;
186 #else
187 guint8 *load_get_lmf_addr = NULL, *load_trampoline = NULL;
188 gpointer *constants;
189 #endif
190 int i, cfa_offset, regsave_size, lr_offset;
191 GSList *unwind_ops = NULL;
192 MonoJumpInfo *ji = NULL;
193 int buf_len;
195 #ifdef USE_JUMP_TABLES
196 g_assert (!aot);
197 #endif
199 /* Now we'll create in 'buf' the ARM trampoline code. This
200 is the trampoline code common to all methods */
202 buf_len = 272;
204 /* Add space for saving/restoring VFP regs. */
205 if (mono_arm_is_hard_float ())
206 buf_len += 8 * 2;
208 code = buf = mono_global_codeman_reserve (buf_len);
211 * At this point lr points to the specific arg and sp points to the saved
212 * regs on the stack (all but PC and SP). The original LR value has been
213 * saved as sp + LR_OFFSET by the push in the specific trampoline
216 /* The size of the area already allocated by the push in the specific trampoline */
217 regsave_size = 14 * sizeof (mgreg_t);
218 /* The offset where lr was saved inside the regsave area */
219 lr_offset = 13 * sizeof (mgreg_t);
221 // CFA = SP + (num registers pushed) * 4
222 cfa_offset = 14 * sizeof (mgreg_t);
223 mono_add_unwind_op_def_cfa (unwind_ops, code, buf, ARMREG_SP, cfa_offset);
224 // PC saved at sp+LR_OFFSET
225 mono_add_unwind_op_offset (unwind_ops, code, buf, ARMREG_LR, -4);
226 /* Callee saved regs */
227 for (i = 0; i < 8; ++i)
228 mono_add_unwind_op_offset (unwind_ops, code, buf, ARMREG_R4 + i, -regsave_size + ((4 + i) * 4));
230 if (aot) {
232 * For page trampolines the data is in r1, so just move it, otherwise use the got slot as below.
233 * The trampoline contains a pc-relative offset to the got slot
234 * preceeding the got slot where the value is stored. The offset can be
235 * found at [lr + 0].
237 /* See if emit_trampolines () in aot-compiler.c for the '2' */
238 if (aot == 2) {
239 ARM_MOV_REG_REG (code, ARMREG_V2, ARMREG_R1);
240 } else {
241 ARM_LDR_IMM (code, ARMREG_V2, ARMREG_LR, 0);
242 ARM_ADD_REG_IMM (code, ARMREG_V2, ARMREG_V2, 4, 0);
243 ARM_LDR_REG_REG (code, ARMREG_V2, ARMREG_V2, ARMREG_LR);
245 } else {
246 ARM_LDR_IMM (code, ARMREG_V2, ARMREG_LR, 0);
248 ARM_LDR_IMM (code, ARMREG_V3, ARMREG_SP, lr_offset);
250 /* we build the MonoLMF structure on the stack - see mini-arm.h
251 * The pointer to the struct is put in r1.
252 * the iregs array is already allocated on the stack by push.
254 code = mono_arm_emit_load_imm (code, ARMREG_R2, STACK - regsave_size);
255 ARM_SUB_REG_REG (code, ARMREG_SP, ARMREG_SP, ARMREG_R2);
256 cfa_offset += STACK - regsave_size;
257 mono_add_unwind_op_def_cfa_offset (unwind_ops, code, buf, cfa_offset);
258 /* V1 == lmf */
259 code = mono_arm_emit_load_imm (code, ARMREG_R2, STACK - sizeof (MonoLMF));
260 ARM_ADD_REG_REG (code, ARMREG_V1, ARMREG_SP, ARMREG_R2);
262 /* ok, now we can continue with the MonoLMF setup, mostly untouched
263 * from emit_prolog in mini-arm.c
264 * This is a synthetized call to mono_get_lmf_addr ()
266 if (aot) {
267 ji = mono_patch_info_list_prepend (ji, code - buf, MONO_PATCH_INFO_JIT_ICALL_ADDR, "mono_get_lmf_addr");
268 ARM_LDR_IMM (code, ARMREG_R0, ARMREG_PC, 0);
269 ARM_B (code, 0);
270 *(gpointer*)code = NULL;
271 code += 4;
272 ARM_LDR_REG_REG (code, ARMREG_R0, ARMREG_PC, ARMREG_R0);
273 } else {
274 #ifdef USE_JUMP_TABLES
275 load_get_lmf_addr = mono_jumptable_add_entry ();
276 code = mono_arm_load_jumptable_entry (code, load_get_lmf_addr, ARMREG_R0);
277 #else
278 load_get_lmf_addr = code;
279 code += 4;
280 #endif
282 ARM_MOV_REG_REG (code, ARMREG_LR, ARMREG_PC);
283 code = emit_bx (code, ARMREG_R0);
286 * The stack now looks like:
287 * <saved regs>
288 * v1 -> <rest of LMF>
289 * sp -> <alignment>
292 /* r0 is the result from mono_get_lmf_addr () */
293 ARM_STR_IMM (code, ARMREG_R0, ARMREG_V1, MONO_STRUCT_OFFSET (MonoLMF, lmf_addr));
294 /* new_lmf->previous_lmf = *lmf_addr */
295 ARM_LDR_IMM (code, ARMREG_R2, ARMREG_R0, MONO_STRUCT_OFFSET (MonoLMF, previous_lmf));
296 ARM_STR_IMM (code, ARMREG_R2, ARMREG_V1, MONO_STRUCT_OFFSET (MonoLMF, previous_lmf));
297 /* *(lmf_addr) = r1 */
298 ARM_STR_IMM (code, ARMREG_V1, ARMREG_R0, MONO_STRUCT_OFFSET (MonoLMF, previous_lmf));
299 /* save method info (it's in v2) */
300 if ((tramp_type == MONO_TRAMPOLINE_JIT) || (tramp_type == MONO_TRAMPOLINE_JUMP))
301 ARM_STR_IMM (code, ARMREG_V2, ARMREG_V1, MONO_STRUCT_OFFSET (MonoLMF, method));
302 else {
303 ARM_MOV_REG_IMM8 (code, ARMREG_R2, 0);
304 ARM_STR_IMM (code, ARMREG_R2, ARMREG_V1, MONO_STRUCT_OFFSET (MonoLMF, method));
306 /* save caller SP */
307 code = mono_arm_emit_load_imm (code, ARMREG_R2, cfa_offset);
308 ARM_ADD_REG_REG (code, ARMREG_R2, ARMREG_SP, ARMREG_R2);
309 ARM_STR_IMM (code, ARMREG_R2, ARMREG_V1, MONO_STRUCT_OFFSET (MonoLMF, sp));
310 /* save caller FP */
311 ARM_LDR_IMM (code, ARMREG_R2, ARMREG_V1, (MONO_STRUCT_OFFSET (MonoLMF, iregs) + ARMREG_FP*4));
312 ARM_STR_IMM (code, ARMREG_R2, ARMREG_V1, MONO_STRUCT_OFFSET (MonoLMF, fp));
313 /* save the IP (caller ip) */
314 if (tramp_type == MONO_TRAMPOLINE_JUMP) {
315 ARM_MOV_REG_IMM8 (code, ARMREG_R2, 0);
316 } else {
317 ARM_LDR_IMM (code, ARMREG_R2, ARMREG_V1, (MONO_STRUCT_OFFSET (MonoLMF, iregs) + 13*4));
319 ARM_STR_IMM (code, ARMREG_R2, ARMREG_V1, MONO_STRUCT_OFFSET (MonoLMF, ip));
321 /* Save VFP registers. */
322 if (mono_arm_is_hard_float ()) {
324 * Strictly speaking, we don't have to save d0-d7 in the LMF, but
325 * it's easier than attempting to store them on the stack since
326 * this trampoline code is pretty messy.
328 ARM_ADD_REG_IMM8 (code, ARMREG_R0, ARMREG_V1, MONO_STRUCT_OFFSET (MonoLMF, fregs));
329 ARM_FSTMD (code, ARM_VFP_D0, 8, ARMREG_R0);
333 * Now we're ready to call xxx_trampoline ().
335 /* Arg 1: the saved registers */
336 ARM_ADD_REG_IMM (code, ARMREG_R0, ARMREG_V1, MONO_STRUCT_OFFSET (MonoLMF, iregs), 0);
338 /* Arg 2: code (next address to the instruction that called us) */
339 if (tramp_type == MONO_TRAMPOLINE_JUMP) {
340 ARM_MOV_REG_IMM8 (code, ARMREG_R1, 0);
341 } else {
342 ARM_MOV_REG_REG (code, ARMREG_R1, ARMREG_V3);
345 /* Arg 3: the specific argument, stored in v2
347 ARM_MOV_REG_REG (code, ARMREG_R2, ARMREG_V2);
349 if (aot) {
350 char *icall_name = g_strdup_printf ("trampoline_func_%d", tramp_type);
351 ji = mono_patch_info_list_prepend (ji, code - buf, MONO_PATCH_INFO_JIT_ICALL_ADDR, icall_name);
352 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_PC, 0);
353 ARM_B (code, 0);
354 *(gpointer*)code = NULL;
355 code += 4;
356 ARM_LDR_REG_REG (code, ARMREG_IP, ARMREG_PC, ARMREG_IP);
357 } else {
358 #ifdef USE_JUMP_TABLES
359 load_trampoline = mono_jumptable_add_entry ();
360 code = mono_arm_load_jumptable_entry (code, load_trampoline, ARMREG_IP);
361 #else
362 load_trampoline = code;
363 code += 4;
364 #endif
367 ARM_MOV_REG_REG (code, ARMREG_LR, ARMREG_PC);
368 code = emit_bx (code, ARMREG_IP);
370 /* OK, code address is now on r0. Move it to the place on the stack
371 * where IP was saved (it is now no more useful to us and it can be
372 * clobbered). This way we can just restore all the regs in one inst
373 * and branch to IP.
375 ARM_STR_IMM (code, ARMREG_R0, ARMREG_V1, MONO_STRUCT_OFFSET (MonoLMF, iregs) + (ARMREG_R12 * sizeof (mgreg_t)));
377 /* Check for thread interruption */
378 /* This is not perf critical code so no need to check the interrupt flag */
380 * Have to call the _force_ variant, since there could be a protected wrapper on the top of the stack.
382 if (aot) {
383 ji = mono_patch_info_list_prepend (ji, code - buf, MONO_PATCH_INFO_JIT_ICALL_ADDR, "mono_interruption_checkpoint_from_trampoline");
384 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_PC, 0);
385 ARM_B (code, 0);
386 *(gpointer*)code = NULL;
387 code += 4;
388 ARM_LDR_REG_REG (code, ARMREG_IP, ARMREG_PC, ARMREG_IP);
389 } else {
390 #ifdef USE_JUMP_TABLES
391 gpointer *jte = mono_jumptable_add_entry ();
392 code = mono_arm_load_jumptable_entry (code, jte, ARMREG_IP);
393 jte [0] = mono_interruption_checkpoint_from_trampoline;
394 #else
395 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_PC, 0);
396 ARM_B (code, 0);
397 *(gpointer*)code = mono_interruption_checkpoint_from_trampoline;
398 code += 4;
399 #endif
401 ARM_MOV_REG_REG (code, ARMREG_LR, ARMREG_PC);
402 code = emit_bx (code, ARMREG_IP);
405 * Now we restore the MonoLMF (see emit_epilogue in mini-arm.c)
406 * and the rest of the registers, so the method called will see
407 * the same state as before we executed.
409 /* ip = previous_lmf */
410 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_V1, MONO_STRUCT_OFFSET (MonoLMF, previous_lmf));
411 /* lr = lmf_addr */
412 ARM_LDR_IMM (code, ARMREG_LR, ARMREG_V1, MONO_STRUCT_OFFSET (MonoLMF, lmf_addr));
413 /* *(lmf_addr) = previous_lmf */
414 ARM_STR_IMM (code, ARMREG_IP, ARMREG_LR, MONO_STRUCT_OFFSET (MonoLMF, previous_lmf));
416 /* Restore VFP registers. */
417 if (mono_arm_is_hard_float ()) {
418 ARM_ADD_REG_IMM8 (code, ARMREG_R0, ARMREG_V1, MONO_STRUCT_OFFSET (MonoLMF, fregs));
419 ARM_FLDMD (code, ARM_VFP_D0, 8, ARMREG_R0);
422 /* Non-standard function epilogue. Instead of doing a proper
423 * return, we just jump to the compiled code.
425 /* Restore the registers and jump to the code:
426 * Note that IP has been conveniently set to the method addr.
428 ARM_ADD_REG_IMM8 (code, ARMREG_SP, ARMREG_SP, STACK - regsave_size);
429 cfa_offset -= STACK - regsave_size;
430 mono_add_unwind_op_def_cfa_offset (unwind_ops, code, buf, cfa_offset);
431 ARM_POP_NWB (code, 0x5fff);
432 mono_add_unwind_op_same_value (unwind_ops, code, buf, ARMREG_LR);
433 if (tramp_type == MONO_TRAMPOLINE_RGCTX_LAZY_FETCH)
434 ARM_MOV_REG_REG (code, ARMREG_R0, ARMREG_IP);
435 ARM_ADD_REG_IMM8 (code, ARMREG_SP, ARMREG_SP, regsave_size);
436 cfa_offset -= regsave_size;
437 g_assert (cfa_offset == 0);
438 mono_add_unwind_op_def_cfa_offset (unwind_ops, code, buf, cfa_offset);
439 if (MONO_TRAMPOLINE_TYPE_MUST_RETURN (tramp_type))
440 code = emit_bx (code, ARMREG_LR);
441 else
442 code = emit_bx (code, ARMREG_IP);
444 #ifdef USE_JUMP_TABLES
445 load_get_lmf_addr [0] = mono_get_lmf_addr;
446 load_trampoline [0] = (gpointer)mono_get_trampoline_func (tramp_type);
447 #else
448 constants = (gpointer*)code;
449 constants [0] = mono_get_lmf_addr;
450 constants [1] = (gpointer)mono_get_trampoline_func (tramp_type);
452 if (!aot) {
453 /* backpatch by emitting the missing instructions skipped above */
454 ARM_LDR_IMM (load_get_lmf_addr, ARMREG_R0, ARMREG_PC, (code - load_get_lmf_addr - 8));
455 ARM_LDR_IMM (load_trampoline, ARMREG_IP, ARMREG_PC, (code + 4 - load_trampoline - 8));
458 code += 8;
459 #endif
461 /* Flush instruction cache, since we've generated code */
462 mono_arch_flush_icache (buf, code - buf);
463 mono_profiler_code_buffer_new (buf, code - buf, MONO_PROFILER_CODE_BUFFER_HELPER, NULL);
465 /* Sanity check */
466 g_assert ((code - buf) <= buf_len);
468 g_assert (info);
469 tramp_name = mono_get_generic_trampoline_name (tramp_type);
470 *info = mono_tramp_info_create (tramp_name, buf, code - buf, ji, unwind_ops);
471 g_free (tramp_name);
473 return buf;
476 #define SPEC_TRAMP_SIZE 24
478 gpointer
479 mono_arch_create_specific_trampoline (gpointer arg1, MonoTrampolineType tramp_type, MonoDomain *domain, guint32 *code_len)
481 guint8 *code, *buf, *tramp;
482 gpointer *constants;
483 #ifndef USE_JUMP_TABLES
484 guint32 short_branch = FALSE;
485 #endif
486 guint32 size = SPEC_TRAMP_SIZE;
488 tramp = mono_get_trampoline_code (tramp_type);
490 if (domain) {
491 mono_domain_lock (domain);
492 #ifdef USE_JUMP_TABLES
493 code = buf = mono_domain_code_reserve_align (domain, size, 4);
494 #else
495 code = buf = mono_domain_code_reserve_align (domain, size, 4);
496 if ((short_branch = branch_for_target_reachable (code + 4, tramp))) {
497 size = 12;
498 mono_domain_code_commit (domain, code, SPEC_TRAMP_SIZE, size);
500 #endif
501 mono_domain_unlock (domain);
502 } else {
503 code = buf = mono_global_codeman_reserve (size);
504 short_branch = FALSE;
507 #ifdef USE_JUMP_TABLES
508 /* For jumptables case we always generate the same code for trampolines,
509 * namely
510 * push {r0, r1, r2, r3, r4, r5, r6, r7, r8, r9, r10, r11, r12, lr}
511 * movw lr, lo(jte)
512 * movt lr, hi(jte)
513 * ldr r1, [lr + 4]
514 * bx r1
516 ARM_PUSH (code, 0x5fff);
517 constants = mono_jumptable_add_entries (2);
518 code = mono_arm_load_jumptable_entry_addr (code, constants, ARMREG_LR);
519 ARM_LDR_IMM (code, ARMREG_R1, ARMREG_LR, 4);
520 code = emit_bx (code, ARMREG_R1);
521 constants [0] = arg1;
522 constants [1] = tramp;
523 #else
524 /* we could reduce this to 12 bytes if tramp is within reach:
525 * ARM_PUSH ()
526 * ARM_BL ()
527 * method-literal
528 * The called code can access method using the lr register
529 * A 20 byte sequence could be:
530 * ARM_PUSH ()
531 * ARM_MOV_REG_REG (lr, pc)
532 * ARM_LDR_IMM (pc, pc, 0)
533 * method-literal
534 * tramp-literal
536 /* We save all the registers, except PC and SP */
537 ARM_PUSH (code, 0x5fff);
538 if (short_branch) {
539 constants = (gpointer*)code;
540 constants [0] = GUINT_TO_POINTER (short_branch | (1 << 24));
541 constants [1] = arg1;
542 code += 8;
543 } else {
544 ARM_LDR_IMM (code, ARMREG_R1, ARMREG_PC, 8); /* temp reg */
545 ARM_MOV_REG_REG (code, ARMREG_LR, ARMREG_PC);
546 code = emit_bx (code, ARMREG_R1);
548 constants = (gpointer*)code;
549 constants [0] = arg1;
550 constants [1] = tramp;
551 code += 8;
553 #endif
555 /* Flush instruction cache, since we've generated code */
556 mono_arch_flush_icache (buf, code - buf);
557 mono_profiler_code_buffer_new (buf, code - buf, MONO_PROFILER_CODE_BUFFER_SPECIFIC_TRAMPOLINE, mono_get_generic_trampoline_simple_name (tramp_type));
559 g_assert ((code - buf) <= size);
561 if (code_len)
562 *code_len = code - buf;
564 return buf;
568 * mono_arch_get_unbox_trampoline:
569 * @m: method pointer
570 * @addr: pointer to native code for @m
572 * when value type methods are called through the vtable we need to unbox the
573 * this argument. This method returns a pointer to a trampoline which does
574 * unboxing before calling the method
576 gpointer
577 mono_arch_get_unbox_trampoline (MonoMethod *m, gpointer addr)
579 guint8 *code, *start;
580 MonoDomain *domain = mono_domain_get ();
581 GSList *unwind_ops;
582 #ifdef USE_JUMP_TABLES
583 gpointer *jte;
584 guint32 size = 20;
585 #else
586 guint32 size = 16;
587 #endif
589 start = code = mono_domain_code_reserve (domain, size);
591 unwind_ops = mono_arch_get_cie_program ();
593 #ifdef USE_JUMP_TABLES
594 jte = mono_jumptable_add_entry ();
595 code = mono_arm_load_jumptable_entry (code, jte, ARMREG_IP);
596 ARM_ADD_REG_IMM8 (code, ARMREG_R0, ARMREG_R0, sizeof (MonoObject));
597 code = emit_bx (code, ARMREG_IP);
598 jte [0] = addr;
599 #else
600 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_PC, 4);
601 ARM_ADD_REG_IMM8 (code, ARMREG_R0, ARMREG_R0, sizeof (MonoObject));
602 code = emit_bx (code, ARMREG_IP);
603 *(guint32*)code = (guint32)addr;
604 code += 4;
605 #endif
606 mono_arch_flush_icache (start, code - start);
607 mono_profiler_code_buffer_new (start, code - start, MONO_PROFILER_CODE_BUFFER_UNBOX_TRAMPOLINE, m);
608 g_assert ((code - start) <= size);
609 /*g_print ("unbox trampoline at %d for %s:%s\n", this_pos, m->klass->name, m->name);
610 g_print ("unbox code is at %p for method at %p\n", start, addr);*/
612 mono_tramp_info_register (mono_tramp_info_create (NULL, start, code - start, NULL, unwind_ops), domain);
614 return start;
617 gpointer
618 mono_arch_get_static_rgctx_trampoline (MonoMethod *m, MonoMethodRuntimeGenericContext *mrgctx, gpointer addr)
620 guint8 *code, *start;
621 GSList *unwind_ops;
622 #ifdef USE_JUMP_TABLES
623 int buf_len = 20;
624 gpointer *jte;
625 #else
626 int buf_len = 16;
627 #endif
628 MonoDomain *domain = mono_domain_get ();
630 start = code = mono_domain_code_reserve (domain, buf_len);
632 unwind_ops = mono_arch_get_cie_program ();
634 #ifdef USE_JUMP_TABLES
635 jte = mono_jumptable_add_entries (2);
636 code = mono_arm_load_jumptable_entry_addr (code, jte, ARMREG_IP);
637 ARM_LDR_IMM (code, MONO_ARCH_RGCTX_REG, ARMREG_IP, 0);
638 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_IP, 4);
639 ARM_BX (code, ARMREG_IP);
640 jte [0] = mrgctx;
641 jte [1] = addr;
642 #else
643 ARM_LDR_IMM (code, MONO_ARCH_RGCTX_REG, ARMREG_PC, 0);
644 ARM_LDR_IMM (code, ARMREG_PC, ARMREG_PC, 0);
645 *(guint32*)code = (guint32)mrgctx;
646 code += 4;
647 *(guint32*)code = (guint32)addr;
648 code += 4;
649 #endif
651 g_assert ((code - start) <= buf_len);
653 mono_arch_flush_icache (start, code - start);
654 mono_profiler_code_buffer_new (start, code - start, MONO_PROFILER_CODE_BUFFER_GENERICS_TRAMPOLINE, NULL);
656 mono_tramp_info_register (mono_tramp_info_create (NULL, start, code - start, NULL, unwind_ops), domain);
658 return start;
661 gpointer
662 mono_arch_create_rgctx_lazy_fetch_trampoline (guint32 slot, MonoTrampInfo **info, gboolean aot)
664 guint8 *tramp;
665 guint8 *code, *buf;
666 int tramp_size;
667 guint32 code_len;
668 guint8 **rgctx_null_jumps;
669 int depth, index;
670 int i, njumps;
671 gboolean mrgctx;
672 MonoJumpInfo *ji = NULL;
673 GSList *unwind_ops = NULL;
674 #ifdef USE_JUMP_TABLES
675 gpointer *jte;
676 #endif
678 mrgctx = MONO_RGCTX_SLOT_IS_MRGCTX (slot);
679 index = MONO_RGCTX_SLOT_INDEX (slot);
680 if (mrgctx)
681 index += MONO_SIZEOF_METHOD_RUNTIME_GENERIC_CONTEXT / sizeof (gpointer);
682 for (depth = 0; ; ++depth) {
683 int size = mono_class_rgctx_get_array_size (depth, mrgctx);
685 if (index < size - 1)
686 break;
687 index -= size - 1;
690 tramp_size = 64 + 16 * depth;
692 code = buf = mono_global_codeman_reserve (tramp_size);
694 unwind_ops = mono_arch_get_cie_program ();
696 rgctx_null_jumps = g_malloc (sizeof (guint8*) * (depth + 2));
697 njumps = 0;
699 /* The vtable/mrgctx is in R0 */
700 g_assert (MONO_ARCH_VTABLE_REG == ARMREG_R0);
702 if (mrgctx) {
703 /* get mrgctx ptr */
704 ARM_MOV_REG_REG (code, ARMREG_R1, ARMREG_R0);
705 } else {
706 /* load rgctx ptr from vtable */
707 g_assert (arm_is_imm12 (MONO_STRUCT_OFFSET (MonoVTable, runtime_generic_context)));
708 ARM_LDR_IMM (code, ARMREG_R1, ARMREG_R0, MONO_STRUCT_OFFSET (MonoVTable, runtime_generic_context));
709 /* is the rgctx ptr null? */
710 ARM_CMP_REG_IMM (code, ARMREG_R1, 0, 0);
711 /* if yes, jump to actual trampoline */
712 rgctx_null_jumps [njumps ++] = code;
713 ARM_B_COND (code, ARMCOND_EQ, 0);
716 for (i = 0; i < depth; ++i) {
717 /* load ptr to next array */
718 if (mrgctx && i == 0) {
719 g_assert (arm_is_imm12 (MONO_SIZEOF_METHOD_RUNTIME_GENERIC_CONTEXT));
720 ARM_LDR_IMM (code, ARMREG_R1, ARMREG_R1, MONO_SIZEOF_METHOD_RUNTIME_GENERIC_CONTEXT);
721 } else {
722 ARM_LDR_IMM (code, ARMREG_R1, ARMREG_R1, 0);
724 /* is the ptr null? */
725 ARM_CMP_REG_IMM (code, ARMREG_R1, 0, 0);
726 /* if yes, jump to actual trampoline */
727 rgctx_null_jumps [njumps ++] = code;
728 ARM_B_COND (code, ARMCOND_EQ, 0);
731 /* fetch slot */
732 code = mono_arm_emit_load_imm (code, ARMREG_R2, sizeof (gpointer) * (index + 1));
733 ARM_LDR_REG_REG (code, ARMREG_R1, ARMREG_R1, ARMREG_R2);
734 /* is the slot null? */
735 ARM_CMP_REG_IMM (code, ARMREG_R1, 0, 0);
736 /* if yes, jump to actual trampoline */
737 rgctx_null_jumps [njumps ++] = code;
738 ARM_B_COND (code, ARMCOND_EQ, 0);
739 /* otherwise return, result is in R1 */
740 ARM_MOV_REG_REG (code, ARMREG_R0, ARMREG_R1);
741 code = emit_bx (code, ARMREG_LR);
743 g_assert (njumps <= depth + 2);
744 for (i = 0; i < njumps; ++i)
745 arm_patch (rgctx_null_jumps [i], code);
747 g_free (rgctx_null_jumps);
749 /* Slowpath */
751 /* The vtable/mrgctx is still in R0 */
753 if (aot) {
754 ji = mono_patch_info_list_prepend (ji, code - buf, MONO_PATCH_INFO_JIT_ICALL_ADDR, g_strdup_printf ("specific_trampoline_lazy_fetch_%u", slot));
755 ARM_LDR_IMM (code, ARMREG_R1, ARMREG_PC, 0);
756 ARM_B (code, 0);
757 *(gpointer*)code = NULL;
758 code += 4;
759 ARM_LDR_REG_REG (code, ARMREG_PC, ARMREG_PC, ARMREG_R1);
760 } else {
761 tramp = mono_arch_create_specific_trampoline (GUINT_TO_POINTER (slot), MONO_TRAMPOLINE_RGCTX_LAZY_FETCH, mono_get_root_domain (), &code_len);
763 /* Jump to the actual trampoline */
764 #ifdef USE_JUMP_TABLES
765 jte = mono_jumptable_add_entry ();
766 jte [0] = tramp;
767 code = mono_arm_load_jumptable_entry (code, jte, ARMREG_R1);
768 code = emit_bx (code, ARMREG_R1);
769 #else
770 ARM_LDR_IMM (code, ARMREG_R1, ARMREG_PC, 0); /* temp reg */
771 code = emit_bx (code, ARMREG_R1);
772 *(gpointer*)code = tramp;
773 code += 4;
774 #endif
777 mono_arch_flush_icache (buf, code - buf);
778 mono_profiler_code_buffer_new (buf, code - buf, MONO_PROFILER_CODE_BUFFER_GENERICS_TRAMPOLINE, NULL);
780 g_assert (code - buf <= tramp_size);
782 char *name = mono_get_rgctx_fetch_trampoline_name (slot);
783 *info = mono_tramp_info_create (name, buf, code - buf, ji, unwind_ops);
784 g_free (name);
786 return buf;
789 gpointer
790 mono_arch_create_general_rgctx_lazy_fetch_trampoline (MonoTrampInfo **info, gboolean aot)
792 guint8 *code, *buf;
793 int tramp_size;
794 MonoJumpInfo *ji = NULL;
795 GSList *unwind_ops = NULL;
797 g_assert (aot);
799 tramp_size = 32;
801 code = buf = mono_global_codeman_reserve (tramp_size);
803 unwind_ops = mono_arch_get_cie_program ();
805 // FIXME: Currently, we always go to the slow path.
806 /* Load trampoline addr */
807 ARM_LDR_IMM (code, ARMREG_R1, MONO_ARCH_RGCTX_REG, 4);
808 /* The vtable/mrgctx is in R0 */
809 g_assert (MONO_ARCH_VTABLE_REG == ARMREG_R0);
810 code = emit_bx (code, ARMREG_R1);
812 mono_arch_flush_icache (buf, code - buf);
813 mono_profiler_code_buffer_new (buf, code - buf, MONO_PROFILER_CODE_BUFFER_GENERICS_TRAMPOLINE, NULL);
815 g_assert (code - buf <= tramp_size);
817 *info = mono_tramp_info_create ("rgctx_fetch_trampoline_general", buf, code - buf, ji, unwind_ops);
819 return buf;
822 static gpointer
823 handler_block_trampoline_helper (gpointer *ptr)
825 MonoJitTlsData *jit_tls = mono_native_tls_get_value (mono_jit_tls_id);
826 return jit_tls->handler_block_return_address;
829 gpointer
830 mono_arch_create_handler_block_trampoline (MonoTrampInfo **info, gboolean aot)
832 guint8 *tramp;
833 guint8 *code, *buf;
834 int tramp_size = 64;
835 MonoJumpInfo *ji = NULL;
836 GSList *unwind_ops = NULL;
838 g_assert (!aot);
840 code = buf = mono_global_codeman_reserve (tramp_size);
842 unwind_ops = mono_arch_get_cie_program ();
844 tramp = mono_arch_create_specific_trampoline (NULL, MONO_TRAMPOLINE_HANDLER_BLOCK_GUARD, NULL, NULL);
847 This trampoline restore the call chain of the handler block then jumps into the code that deals with it.
851 * We are in a method frame after the call emitted by OP_CALL_HANDLER.
853 /* Obtain jit_tls->handler_block_return_address */
854 ARM_LDR_IMM (code, ARMREG_R0, ARMREG_PC, 0);
855 ARM_B (code, 0);
856 *(gpointer*)code = handler_block_trampoline_helper;
857 code += 4;
859 /* Set it as the return address so the trampoline will return to it */
860 ARM_MOV_REG_REG (code, ARMREG_LR, ARMREG_R0);
862 /* Call the trampoline */
863 ARM_LDR_IMM (code, ARMREG_R0, ARMREG_PC, 0);
864 code = emit_bx (code, ARMREG_R0);
865 *(gpointer*)code = tramp;
866 code += 4;
868 mono_arch_flush_icache (buf, code - buf);
869 mono_profiler_code_buffer_new (buf, code - buf, MONO_PROFILER_CODE_BUFFER_HELPER, NULL);
870 g_assert (code - buf <= tramp_size);
872 *info = mono_tramp_info_create ("handler_block_trampoline", buf, code - buf, ji, unwind_ops);
874 return buf;
877 guint8*
878 mono_arch_create_sdb_trampoline (gboolean single_step, MonoTrampInfo **info, gboolean aot)
880 guint8 *buf, *code;
881 GSList *unwind_ops = NULL;
882 MonoJumpInfo *ji = NULL;
883 int frame_size;
885 buf = code = mono_global_codeman_reserve (96);
888 * Construct the MonoContext structure on the stack.
891 frame_size = sizeof (MonoContext);
892 frame_size = ALIGN_TO (frame_size, MONO_ARCH_FRAME_ALIGNMENT);
893 ARM_SUB_REG_IMM8 (code, ARMREG_SP, ARMREG_SP, frame_size);
895 /* save ip, lr and pc into their correspodings ctx.regs slots. */
896 ARM_STR_IMM (code, ARMREG_IP, ARMREG_SP, MONO_STRUCT_OFFSET (MonoContext, regs) + sizeof (mgreg_t) * ARMREG_IP);
897 ARM_STR_IMM (code, ARMREG_LR, ARMREG_SP, MONO_STRUCT_OFFSET (MonoContext, regs) + 4 * ARMREG_LR);
898 ARM_STR_IMM (code, ARMREG_LR, ARMREG_SP, MONO_STRUCT_OFFSET (MonoContext, regs) + 4 * ARMREG_PC);
900 /* save r0..r10 and fp */
901 ARM_ADD_REG_IMM8 (code, ARMREG_IP, ARMREG_SP, MONO_STRUCT_OFFSET (MonoContext, regs));
902 ARM_STM (code, ARMREG_IP, 0x0fff);
904 /* now we can update fp. */
905 ARM_MOV_REG_REG (code, ARMREG_FP, ARMREG_SP);
907 /* make ctx.esp hold the actual value of sp at the beginning of this method. */
908 ARM_ADD_REG_IMM8 (code, ARMREG_R0, ARMREG_FP, frame_size);
909 ARM_STR_IMM (code, ARMREG_R0, ARMREG_IP, 4 * ARMREG_SP);
910 ARM_STR_IMM (code, ARMREG_R0, ARMREG_FP, MONO_STRUCT_OFFSET (MonoContext, regs) + 4 * ARMREG_SP);
912 /* make ctx.eip hold the address of the call. */
913 ARM_SUB_REG_IMM8 (code, ARMREG_LR, ARMREG_LR, 4);
914 ARM_STR_IMM (code, ARMREG_LR, ARMREG_FP, MONO_STRUCT_OFFSET (MonoContext, pc));
916 /* r0 now points to the MonoContext */
917 ARM_MOV_REG_REG (code, ARMREG_R0, ARMREG_FP);
919 /* call */
920 // FIXME: AOT
921 #ifdef USE_JUMP_TABLES
923 gpointer *jte = mono_jumptable_add_entry ();
924 code = mono_arm_load_jumptable_entry (code, jte, ARMREG_IP);
925 jte [0] = function;
927 #else
928 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_PC, 0);
929 ARM_B (code, 0);
930 if (single_step)
931 *(gpointer*)code = debugger_agent_single_step_from_context;
932 else
933 *(gpointer*)code = debugger_agent_breakpoint_from_context;
934 code += 4;
935 #endif
936 ARM_BLX_REG (code, ARMREG_IP);
938 /* we're back; save ctx.eip and ctx.esp into the corresponding regs slots. */
939 ARM_LDR_IMM (code, ARMREG_R0, ARMREG_FP, MONO_STRUCT_OFFSET (MonoContext, pc));
940 ARM_STR_IMM (code, ARMREG_R0, ARMREG_FP, MONO_STRUCT_OFFSET (MonoContext, regs) + 4 * ARMREG_LR);
941 ARM_STR_IMM (code, ARMREG_R0, ARMREG_FP, MONO_STRUCT_OFFSET (MonoContext, regs) + 4 * ARMREG_PC);
943 /* make ip point to the regs array, then restore everything, including pc. */
944 ARM_ADD_REG_IMM8 (code, ARMREG_IP, ARMREG_FP, MONO_STRUCT_OFFSET (MonoContext, regs));
945 ARM_LDM (code, ARMREG_IP, 0xffff);
947 mono_arch_flush_icache (buf, code - buf);
948 mono_profiler_code_buffer_new (buf, code - buf, MONO_PROFILER_CODE_BUFFER_HELPER, NULL);
950 const char *tramp_name = single_step ? "sdb_single_step_trampoline" : "sdb_breakpoint_trampoline";
951 *info = mono_tramp_info_create (tramp_name, buf, code - buf, ji, unwind_ops);
953 return buf;
956 #else
958 guchar*
959 mono_arch_create_generic_trampoline (MonoTrampolineType tramp_type, MonoTrampInfo **info, gboolean aot)
961 g_assert_not_reached ();
962 return NULL;
965 gpointer
966 mono_arch_create_specific_trampoline (gpointer arg1, MonoTrampolineType tramp_type, MonoDomain *domain, guint32 *code_len)
968 g_assert_not_reached ();
969 return NULL;
972 gpointer
973 mono_arch_get_unbox_trampoline (MonoMethod *m, gpointer addr)
975 g_assert_not_reached ();
976 return NULL;
979 gpointer
980 mono_arch_get_static_rgctx_trampoline (MonoMethod *m, MonoMethodRuntimeGenericContext *mrgctx, gpointer addr)
982 g_assert_not_reached ();
983 return NULL;
986 gpointer
987 mono_arch_create_rgctx_lazy_fetch_trampoline (guint32 slot, MonoTrampInfo **info, gboolean aot)
989 g_assert_not_reached ();
990 return NULL;
993 gpointer
994 mono_arch_create_handler_block_trampoline (MonoTrampInfo **info, gboolean aot)
996 g_assert_not_reached ();
997 return NULL;
1000 guint8*
1001 mono_arch_create_sdb_trampoline (gboolean single_step, MonoTrampInfo **info, gboolean aot)
1003 g_assert_not_reached ();
1004 return NULL;
1007 #endif /* DISABLE_JIT */
1009 guint8*
1010 mono_arch_get_call_target (guint8 *code)
1012 guint32 ins = ((guint32*)(gpointer)code) [-1];
1014 #if MONOTOUCH
1015 /* Should be a 'bl' or a 'b' */
1016 if (((ins >> 25) & 0x7) == 0x5) {
1017 #else
1018 /* Should be a 'bl' */
1019 if ((((ins >> 25) & 0x7) == 0x5) && (((ins >> 24) & 0x1) == 0x1)) {
1020 #endif
1021 gint32 disp = ((((gint32)ins) & 0xffffff) << 8) >> 8;
1022 guint8 *target = code - 4 + 8 + (disp * 4);
1024 return target;
1025 } else {
1026 return NULL;
1030 guint32
1031 mono_arch_get_plt_info_offset (guint8 *plt_entry, mgreg_t *regs, guint8 *code)
1033 /* The offset is stored as the 4th word of the plt entry */
1034 return ((guint32*)plt_entry) [3];
1038 * Return the address of the PLT entry called by the thumb code CODE.
1040 guint8*
1041 mono_arm_get_thumb_plt_entry (guint8 *code)
1043 int s, j1, j2, imm10, imm11, i1, i2, imm32;
1044 guint8 *bl, *base;
1045 guint16 t1, t2;
1046 guint8 *target;
1048 /* code should be right after a BL */
1049 code = (guint8*)((mgreg_t)code & ~1);
1050 base = (guint8*)((mgreg_t)code & ~3);
1051 bl = code - 4;
1052 t1 = ((guint16*)bl) [0];
1053 t2 = ((guint16*)bl) [1];
1055 g_assert ((t1 >> 11) == 0x1e);
1057 s = (t1 >> 10) & 0x1;
1058 imm10 = (t1 >> 0) & 0x3ff;
1059 j1 = (t2 >> 13) & 0x1;
1060 j2 = (t2 >> 11) & 0x1;
1061 imm11 = t2 & 0x7ff;
1063 i1 = (s ^ j1) ? 0 : 1;
1064 i2 = (s ^ j2) ? 0 : 1;
1066 imm32 = (imm11 << 1) | (imm10 << 12) | (i2 << 22) | (i1 << 23);
1067 if (s)
1068 /* Sign extend from 24 bits to 32 bits */
1069 imm32 = ((gint32)imm32 << 8) >> 8;
1071 target = code + imm32;
1073 /* target now points to the thumb plt entry */
1074 /* ldr.w r12, [pc, #8] */
1075 g_assert (((guint16*)target) [0] == 0xf8df);
1076 g_assert (((guint16*)target) [1] == 0xc008);
1079 * The PLT info offset is at offset 16, but mono_arch_get_plt_entry_offset () returns
1080 * the 3rd word, so compensate by returning a different value.
1082 target += 4;
1084 return target;
1087 #ifndef DISABLE_JIT
1090 * mono_arch_get_gsharedvt_arg_trampoline:
1092 * See tramp-x86.c for documentation.
1094 gpointer
1095 mono_arch_get_gsharedvt_arg_trampoline (MonoDomain *domain, gpointer arg, gpointer addr)
1097 guint8 *code, *buf;
1098 int buf_len;
1099 gpointer *constants;
1101 buf_len = 24;
1103 buf = code = mono_domain_code_reserve (domain, buf_len);
1105 /* Similar to the specialized trampoline code */
1106 ARM_PUSH (code, (1 << ARMREG_R0) | (1 << ARMREG_R1) | (1 << ARMREG_R2) | (1 << ARMREG_R3) | (1 << ARMREG_LR));
1107 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_PC, 8);
1108 /* arg is passed in LR */
1109 ARM_LDR_IMM (code, ARMREG_LR, ARMREG_PC, 0);
1110 code = emit_bx (code, ARMREG_IP);
1111 constants = (gpointer*)code;
1112 constants [0] = arg;
1113 constants [1] = addr;
1114 code += 8;
1116 g_assert ((code - buf) <= buf_len);
1118 nacl_domain_code_validate (domain, &buf, buf_len, &code);
1119 mono_arch_flush_icache (buf, code - buf);
1120 mono_profiler_code_buffer_new (buf, code - buf, MONO_PROFILER_CODE_BUFFER_GENERICS_TRAMPOLINE, NULL);
1122 mono_tramp_info_register (mono_tramp_info_create (NULL, buf, code - buf, NULL, NULL), domain);
1124 return buf;
1127 #else
1129 gpointer
1130 mono_arch_get_gsharedvt_arg_trampoline (MonoDomain *domain, gpointer arg, gpointer addr)
1132 g_assert_not_reached ();
1133 return NULL;
1136 #endif
1138 #if defined(ENABLE_GSHAREDVT)
1140 #include "../../../mono-extensions/mono/mini/tramp-arm-gsharedvt.c"
1142 #else
1144 gpointer
1145 mono_arm_start_gsharedvt_call (GSharedVtCallInfo *info, gpointer *caller, gpointer *callee, gpointer mrgctx_reg)
1147 g_assert_not_reached ();
1148 return NULL;
1151 gpointer
1152 mono_arch_get_gsharedvt_trampoline (MonoTrampInfo **info, gboolean aot)
1154 *info = NULL;
1155 return NULL;
1158 #endif /* !MONOTOUCH */