2010-04-01 Zoltan Varga <vargaz@gmail.com>
[mono/afaerber.git] / mono / mini / tramp-arm.c
bloba4605a9de6093cfd84e8982830ac0d485ae54433
1 /*
2 * tramp-arm.c: JIT trampoline code for ARM
4 * Authors:
5 * Paolo Molaro (lupus@ximian.com)
7 * (C) 2001 Ximian, Inc.
8 */
10 #include <config.h>
11 #include <glib.h>
13 #include <mono/metadata/appdomain.h>
14 #include <mono/metadata/marshal.h>
15 #include <mono/metadata/tabledefs.h>
16 #include <mono/arch/arm/arm-codegen.h>
18 #include "mini.h"
19 #include "mini-arm.h"
21 static guint8* nullified_class_init_trampoline;
23 void
24 mono_arch_patch_callsite (guint8 *method_start, guint8 *code_ptr, guint8 *addr)
26 guint32 *code = (guint32*)code_ptr;
28 /* This is the 'bl' or the 'mov pc' instruction */
29 --code;
32 * Note that methods are called also with the bl opcode.
34 if ((((*code) >> 25) & 7) == 5) {
35 /*g_print ("direct patching\n");*/
36 arm_patch ((guint8*)code, addr);
37 mono_arch_flush_icache ((guint8*)code, 4);
38 return;
41 if ((((*code) >> 20) & 0xFF) == 0x12) {
42 /*g_print ("patching bx\n");*/
43 arm_patch ((guint8*)code, addr);
44 mono_arch_flush_icache ((guint8*)(code - 2), 4);
45 return;
48 g_assert_not_reached ();
51 void
52 mono_arch_patch_plt_entry (guint8 *code, gpointer *got, mgreg_t *regs, guint8 *addr)
54 guint8 *jump_entry;
56 /* Patch the jump table entry used by the plt entry */
57 if (*(guint32*)code == 0xe59fc000) {
58 /* ARM_LDR_IMM (code, ARMREG_IP, ARMREG_PC, 0); */
59 guint32 offset = ((guint32*)code)[2];
61 jump_entry = code + offset + 12;
62 } else {
63 g_assert_not_reached ();
66 *(guint8**)jump_entry = addr;
69 void
70 mono_arch_nullify_class_init_trampoline (guint8 *code, mgreg_t *regs)
72 mono_arch_patch_callsite (NULL, code, nullified_class_init_trampoline);
75 void
76 mono_arch_nullify_plt_entry (guint8 *code, mgreg_t *regs)
78 if (mono_aot_only && !nullified_class_init_trampoline)
79 nullified_class_init_trampoline = mono_aot_get_named_code ("nullified_class_init_trampoline");
81 mono_arch_patch_plt_entry (code, NULL, regs, nullified_class_init_trampoline);
84 #ifndef DISABLE_JIT
86 #define arm_is_imm12(v) ((int)(v) > -4096 && (int)(v) < 4096)
89 * Return the instruction to jump from code to target, 0 if not
90 * reachable with a single instruction
92 static guint32
93 branch_for_target_reachable (guint8 *branch, guint8 *target)
95 gint diff = target - branch - 8;
96 g_assert ((diff & 3) == 0);
97 if (diff >= 0) {
98 if (diff <= 33554431)
99 return (ARMCOND_AL << ARMCOND_SHIFT) | (ARM_BR_TAG) | (diff >> 2);
100 } else {
101 /* diff between 0 and -33554432 */
102 if (diff >= -33554432)
103 return (ARMCOND_AL << ARMCOND_SHIFT) | (ARM_BR_TAG) | ((diff >> 2) & ~0xff000000);
105 return 0;
108 static inline guint8*
109 emit_bx (guint8* code, int reg)
111 if (mono_arm_thumb_supported ())
112 ARM_BX (code, reg);
113 else
114 ARM_MOV_REG_REG (code, ARMREG_PC, reg);
115 return code;
118 /* Stack size for trampoline function
120 #define STACK (sizeof (MonoLMF))
122 /* Method-specific trampoline code fragment size */
123 #define METHOD_TRAMPOLINE_SIZE 64
125 /* Jump-specific trampoline code fragment size */
126 #define JUMP_TRAMPOLINE_SIZE 64
128 #define GEN_TRAMP_SIZE 196
131 * Stack frame description when the generic trampoline is called.
132 * caller frame
133 * ------------------- old sp
134 * MonoLMF
135 * ------------------- sp
137 guchar*
138 mono_arch_create_trampoline_code (MonoTrampolineType tramp_type)
140 MonoJumpInfo *ji;
141 guint32 code_size;
142 guchar *code;
143 GSList *unwind_ops, *l;
145 code = mono_arch_create_trampoline_code_full (tramp_type, &code_size, &ji, &unwind_ops, FALSE);
147 mono_save_trampoline_xdebug_info ("<generic_trampoline>", code, code_size, unwind_ops);
149 for (l = unwind_ops; l; l = l->next)
150 g_free (l->data);
151 g_slist_free (unwind_ops);
153 return code;
156 guchar*
157 mono_arch_create_trampoline_code_full (MonoTrampolineType tramp_type, guint32 *code_size, MonoJumpInfo **ji, GSList **out_unwind_ops, gboolean aot)
159 guint8 *buf, *code = NULL;
160 guint8 *load_get_lmf_addr, *load_trampoline;
161 gpointer *constants;
162 GSList *unwind_ops = NULL;
163 int cfa_offset;
165 *ji = NULL;
167 /* Now we'll create in 'buf' the ARM trampoline code. This
168 is the trampoline code common to all methods */
170 code = buf = mono_global_codeman_reserve (GEN_TRAMP_SIZE);
173 * At this point lr points to the specific arg and sp points to the saved
174 * regs on the stack (all but PC and SP). The original LR value has been
175 * saved as sp + LR_OFFSET by the push in the specific trampoline
177 #define LR_OFFSET (sizeof (gpointer) * 13)
179 // FIXME: Finish the unwind info, the current info allows us to unwind
180 // when the trampoline is not in the epilog
182 // CFA = SP + (num registers pushed) * 4
183 cfa_offset = 14 * sizeof (gpointer);
184 mono_add_unwind_op_def_cfa (unwind_ops, code, buf, ARMREG_SP, cfa_offset);
185 // PC saved at sp+LR_OFFSET
186 mono_add_unwind_op_offset (unwind_ops, code, buf, ARMREG_LR, -4);
188 ARM_MOV_REG_REG (code, ARMREG_V1, ARMREG_SP);
189 if (aot && tramp_type != MONO_TRAMPOLINE_GENERIC_CLASS_INIT) {
191 * The trampoline contains a pc-relative offset to the got slot
192 * preceeding the got slot where the value is stored. The offset can be
193 * found at [lr + 0].
195 ARM_LDR_IMM (code, ARMREG_V2, ARMREG_LR, 0);
196 ARM_ADD_REG_IMM (code, ARMREG_V2, ARMREG_V2, 4, 0);
197 ARM_LDR_REG_REG (code, ARMREG_V2, ARMREG_V2, ARMREG_LR);
198 } else {
199 if (tramp_type != MONO_TRAMPOLINE_GENERIC_CLASS_INIT)
200 ARM_LDR_IMM (code, ARMREG_V2, ARMREG_LR, 0);
201 else
202 ARM_MOV_REG_REG (code, ARMREG_V2, MONO_ARCH_VTABLE_REG);
204 ARM_LDR_IMM (code, ARMREG_V3, ARMREG_SP, LR_OFFSET);
206 /* ok, now we can continue with the MonoLMF setup, mostly untouched
207 * from emit_prolog in mini-arm.c
208 * This is a synthetized call to mono_get_lmf_addr ()
210 if (aot) {
211 *ji = mono_patch_info_list_prepend (*ji, code - buf, MONO_PATCH_INFO_JIT_ICALL_ADDR, "mono_get_lmf_addr");
212 ARM_LDR_IMM (code, ARMREG_R0, ARMREG_PC, 0);
213 ARM_B (code, 0);
214 *(gpointer*)code = NULL;
215 code += 4;
216 ARM_LDR_REG_REG (code, ARMREG_R0, ARMREG_PC, ARMREG_R0);
217 } else {
218 load_get_lmf_addr = code;
219 code += 4;
221 ARM_MOV_REG_REG (code, ARMREG_LR, ARMREG_PC);
222 code = emit_bx (code, ARMREG_R0);
224 /* we build the MonoLMF structure on the stack - see mini-arm.h
225 * The pointer to the struct is put in r1.
226 * the iregs array is already allocated on the stack by push.
228 ARM_SUB_REG_IMM8 (code, ARMREG_SP, ARMREG_SP, sizeof (MonoLMF) - sizeof (guint) * 14);
229 cfa_offset += sizeof (MonoLMF) - sizeof (guint) * 14;
230 mono_add_unwind_op_def_cfa_offset (unwind_ops, code, buf, cfa_offset);
231 ARM_ADD_REG_IMM8 (code, ARMREG_R1, ARMREG_SP, STACK - sizeof (MonoLMF));
232 /* r0 is the result from mono_get_lmf_addr () */
233 ARM_STR_IMM (code, ARMREG_R0, ARMREG_R1, G_STRUCT_OFFSET (MonoLMF, lmf_addr));
234 /* new_lmf->previous_lmf = *lmf_addr */
235 ARM_LDR_IMM (code, ARMREG_R2, ARMREG_R0, G_STRUCT_OFFSET (MonoLMF, previous_lmf));
236 ARM_STR_IMM (code, ARMREG_R2, ARMREG_R1, G_STRUCT_OFFSET (MonoLMF, previous_lmf));
237 /* *(lmf_addr) = r1 */
238 ARM_STR_IMM (code, ARMREG_R1, ARMREG_R0, G_STRUCT_OFFSET (MonoLMF, previous_lmf));
239 /* save method info (it's in v2) */
240 if ((tramp_type == MONO_TRAMPOLINE_JIT) || (tramp_type == MONO_TRAMPOLINE_JUMP))
241 ARM_STR_IMM (code, ARMREG_V2, ARMREG_R1, G_STRUCT_OFFSET (MonoLMF, method));
242 else {
243 ARM_MOV_REG_IMM8 (code, ARMREG_R2, 0);
244 ARM_STR_IMM (code, ARMREG_R2, ARMREG_R1, G_STRUCT_OFFSET (MonoLMF, method));
246 ARM_STR_IMM (code, ARMREG_SP, ARMREG_R1, G_STRUCT_OFFSET (MonoLMF, ebp));
247 /* save the IP (caller ip) */
248 if (tramp_type == MONO_TRAMPOLINE_JUMP) {
249 ARM_MOV_REG_IMM8 (code, ARMREG_R2, 0);
250 } else {
251 /* assumes STACK == sizeof (MonoLMF) */
252 ARM_LDR_IMM (code, ARMREG_R2, ARMREG_SP, (G_STRUCT_OFFSET (MonoLMF, iregs) + 13*4));
254 ARM_STR_IMM (code, ARMREG_R2, ARMREG_R1, G_STRUCT_OFFSET (MonoLMF, eip));
257 * Now we're ready to call xxx_trampoline ().
259 /* Arg 1: the saved registers. It was put in v1 */
260 ARM_MOV_REG_REG (code, ARMREG_R0, ARMREG_V1);
262 /* Arg 2: code (next address to the instruction that called us) */
263 if (tramp_type == MONO_TRAMPOLINE_JUMP) {
264 ARM_MOV_REG_IMM8 (code, ARMREG_R1, 0);
265 } else {
266 ARM_MOV_REG_REG (code, ARMREG_R1, ARMREG_V3);
269 /* Arg 3: the specific argument, stored in v2
271 ARM_MOV_REG_REG (code, ARMREG_R2, ARMREG_V2);
273 if (aot) {
274 char *icall_name = g_strdup_printf ("trampoline_func_%d", tramp_type);
275 *ji = mono_patch_info_list_prepend (*ji, code - buf, MONO_PATCH_INFO_JIT_ICALL_ADDR, icall_name);
276 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_PC, 0);
277 ARM_B (code, 0);
278 *(gpointer*)code = NULL;
279 code += 4;
280 ARM_LDR_REG_REG (code, ARMREG_IP, ARMREG_PC, ARMREG_IP);
281 } else {
282 load_trampoline = code;
283 code += 4;
286 ARM_MOV_REG_REG (code, ARMREG_LR, ARMREG_PC);
287 code = emit_bx (code, ARMREG_IP);
289 /* OK, code address is now on r0. Move it to the place on the stack
290 * where IP was saved (it is now no more useful to us and it can be
291 * clobbered). This way we can just restore all the regs in one inst
292 * and branch to IP.
294 ARM_STR_IMM (code, ARMREG_R0, ARMREG_V1, (ARMREG_R12 * 4));
296 /* Check for thread interruption */
297 /* This is not perf critical code so no need to check the interrupt flag */
299 * Have to call the _force_ variant, since there could be a protected wrapper on the top of the stack.
301 if (aot) {
302 *ji = mono_patch_info_list_prepend (*ji, code - buf, MONO_PATCH_INFO_JIT_ICALL_ADDR, "mono_thread_force_interruption_checkpoint");
303 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_PC, 0);
304 ARM_B (code, 0);
305 *(gpointer*)code = NULL;
306 code += 4;
307 ARM_LDR_REG_REG (code, ARMREG_IP, ARMREG_PC, ARMREG_IP);
308 } else {
309 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_PC, 0);
310 ARM_B (code, 0);
311 *(gpointer*)code = mono_thread_force_interruption_checkpoint;
312 code += 4;
314 ARM_MOV_REG_REG (code, ARMREG_LR, ARMREG_PC);
315 code = emit_bx (code, ARMREG_IP);
318 * Now we restore the MonoLMF (see emit_epilogue in mini-arm.c)
319 * and the rest of the registers, so the method called will see
320 * the same state as before we executed.
321 * The pointer to MonoLMF is in r2.
323 ARM_MOV_REG_REG (code, ARMREG_R2, ARMREG_SP);
324 /* ip = previous_lmf */
325 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_R2, G_STRUCT_OFFSET (MonoLMF, previous_lmf));
326 /* lr = lmf_addr */
327 ARM_LDR_IMM (code, ARMREG_LR, ARMREG_R2, G_STRUCT_OFFSET (MonoLMF, lmf_addr));
328 /* *(lmf_addr) = previous_lmf */
329 ARM_STR_IMM (code, ARMREG_IP, ARMREG_LR, G_STRUCT_OFFSET (MonoLMF, previous_lmf));
331 /* Non-standard function epilogue. Instead of doing a proper
332 * return, we just jump to the compiled code.
334 /* Restore the registers and jump to the code:
335 * Note that IP has been conveniently set to the method addr.
337 ARM_ADD_REG_IMM8 (code, ARMREG_SP, ARMREG_SP, sizeof (MonoLMF) - sizeof (guint) * 14);
338 ARM_POP_NWB (code, 0x5fff);
339 if (tramp_type == MONO_TRAMPOLINE_RGCTX_LAZY_FETCH)
340 ARM_MOV_REG_REG (code, ARMREG_R0, ARMREG_IP);
341 /* do we need to set sp? */
342 ARM_ADD_REG_IMM8 (code, ARMREG_SP, ARMREG_SP, (14 * 4));
343 if ((tramp_type == MONO_TRAMPOLINE_CLASS_INIT) || (tramp_type == MONO_TRAMPOLINE_GENERIC_CLASS_INIT) || (tramp_type == MONO_TRAMPOLINE_RGCTX_LAZY_FETCH))
344 code = emit_bx (code, ARMREG_LR);
345 else
346 code = emit_bx (code, ARMREG_IP);
348 constants = (gpointer*)code;
349 constants [0] = mono_get_lmf_addr;
350 constants [1] = (gpointer)mono_get_trampoline_func (tramp_type);
352 if (!aot) {
353 /* backpatch by emitting the missing instructions skipped above */
354 ARM_LDR_IMM (load_get_lmf_addr, ARMREG_R0, ARMREG_PC, (code - load_get_lmf_addr - 8));
355 ARM_LDR_IMM (load_trampoline, ARMREG_IP, ARMREG_PC, (code + 4 - load_trampoline - 8));
358 code += 8;
360 /* Flush instruction cache, since we've generated code */
361 mono_arch_flush_icache (buf, code - buf);
363 /* Sanity check */
364 g_assert ((code - buf) <= GEN_TRAMP_SIZE);
366 *code_size = code - buf;
368 if (tramp_type == MONO_TRAMPOLINE_CLASS_INIT) {
369 guint32 code_len;
371 /* Initialize the nullified class init trampoline used in the AOT case */
372 nullified_class_init_trampoline = mono_arch_get_nullified_class_init_trampoline (&code_len);
375 *out_unwind_ops = unwind_ops;
377 return buf;
380 gpointer
381 mono_arch_get_nullified_class_init_trampoline (guint32 *code_len)
383 guint8 *buf, *code;
385 code = buf = mono_global_codeman_reserve (16);
387 code = emit_bx (code, ARMREG_LR);
389 mono_arch_flush_icache (buf, code - buf);
391 *code_len = code - buf;
393 return buf;
396 #define SPEC_TRAMP_SIZE 24
398 gpointer
399 mono_arch_create_specific_trampoline (gpointer arg1, MonoTrampolineType tramp_type, MonoDomain *domain, guint32 *code_len)
401 guint8 *code, *buf, *tramp;
402 gpointer *constants;
403 guint32 short_branch, size = SPEC_TRAMP_SIZE;
405 tramp = mono_get_trampoline_code (tramp_type);
407 mono_domain_lock (domain);
408 code = buf = mono_domain_code_reserve_align (domain, size, 4);
409 if ((short_branch = branch_for_target_reachable (code + 8, tramp))) {
410 size = 12;
411 mono_domain_code_commit (domain, code, SPEC_TRAMP_SIZE, size);
413 mono_domain_unlock (domain);
415 /* we could reduce this to 12 bytes if tramp is within reach:
416 * ARM_PUSH ()
417 * ARM_BL ()
418 * method-literal
419 * The called code can access method using the lr register
420 * A 20 byte sequence could be:
421 * ARM_PUSH ()
422 * ARM_MOV_REG_REG (lr, pc)
423 * ARM_LDR_IMM (pc, pc, 0)
424 * method-literal
425 * tramp-literal
427 /* We save all the registers, except PC and SP */
428 ARM_PUSH (code, 0x5fff);
429 if (short_branch) {
430 constants = (gpointer*)code;
431 constants [0] = GUINT_TO_POINTER (short_branch | (1 << 24));
432 constants [1] = arg1;
433 code += 8;
434 } else {
435 ARM_LDR_IMM (code, ARMREG_R1, ARMREG_PC, 8); /* temp reg */
436 ARM_MOV_REG_REG (code, ARMREG_LR, ARMREG_PC);
437 code = emit_bx (code, ARMREG_R1);
439 constants = (gpointer*)code;
440 constants [0] = arg1;
441 constants [1] = tramp;
442 code += 8;
445 /* Flush instruction cache, since we've generated code */
446 mono_arch_flush_icache (buf, code - buf);
448 g_assert ((code - buf) <= size);
450 if (code_len)
451 *code_len = code - buf;
453 return buf;
457 * mono_arch_get_unbox_trampoline:
458 * @gsctx: the generic sharing context
459 * @m: method pointer
460 * @addr: pointer to native code for @m
462 * when value type methods are called through the vtable we need to unbox the
463 * this argument. This method returns a pointer to a trampoline which does
464 * unboxing before calling the method
466 gpointer
467 mono_arch_get_unbox_trampoline (MonoGenericSharingContext *gsctx, MonoMethod *m, gpointer addr)
469 guint8 *code, *start;
470 int this_pos = 0;
471 MonoDomain *domain = mono_domain_get ();
473 if (MONO_TYPE_ISSTRUCT (mono_method_signature (m)->ret))
474 this_pos = 1;
476 start = code = mono_domain_code_reserve (domain, 16);
478 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_PC, 4);
479 ARM_ADD_REG_IMM8 (code, this_pos, this_pos, sizeof (MonoObject));
480 code = emit_bx (code, ARMREG_IP);
481 *(guint32*)code = (guint32)addr;
482 code += 4;
483 mono_arch_flush_icache (start, code - start);
484 g_assert ((code - start) <= 16);
485 /*g_print ("unbox trampoline at %d for %s:%s\n", this_pos, m->klass->name, m->name);
486 g_print ("unbox code is at %p for method at %p\n", start, addr);*/
488 return start;
491 gpointer
492 mono_arch_get_static_rgctx_trampoline (MonoMethod *m, MonoMethodRuntimeGenericContext *mrgctx, gpointer addr)
494 guint8 *code, *start;
495 int buf_len;
497 MonoDomain *domain = mono_domain_get ();
499 buf_len = 16;
501 start = code = mono_domain_code_reserve (domain, buf_len);
503 ARM_LDR_IMM (code, MONO_ARCH_RGCTX_REG, ARMREG_PC, 0);
504 ARM_LDR_IMM (code, ARMREG_PC, ARMREG_PC, 0);
505 *(guint32*)code = (guint32)mrgctx;
506 code += 4;
507 *(guint32*)code = (guint32)addr;
508 code += 4;
510 g_assert ((code - start) <= buf_len);
512 mono_arch_flush_icache (start, code - start);
514 return start;
517 gpointer
518 mono_arch_create_rgctx_lazy_fetch_trampoline (guint32 slot)
520 guint32 code_size;
521 MonoJumpInfo *ji;
523 return mono_arch_create_rgctx_lazy_fetch_trampoline_full (slot, &code_size, &ji, FALSE);
526 gpointer
527 mono_arch_create_rgctx_lazy_fetch_trampoline_full (guint32 slot, guint32 *code_size, MonoJumpInfo **ji, gboolean aot)
529 guint8 *tramp;
530 guint8 *code, *buf;
531 int tramp_size;
532 guint32 code_len;
533 guint8 **rgctx_null_jumps;
534 int depth, index;
535 int i, njumps;
536 gboolean mrgctx;
538 *ji = NULL;
540 mrgctx = MONO_RGCTX_SLOT_IS_MRGCTX (slot);
541 index = MONO_RGCTX_SLOT_INDEX (slot);
542 if (mrgctx)
543 index += MONO_SIZEOF_METHOD_RUNTIME_GENERIC_CONTEXT / sizeof (gpointer);
544 for (depth = 0; ; ++depth) {
545 int size = mono_class_rgctx_get_array_size (depth, mrgctx);
547 if (index < size - 1)
548 break;
549 index -= size - 1;
552 tramp_size = 64 + 16 * depth;
554 code = buf = mono_global_codeman_reserve (tramp_size);
556 rgctx_null_jumps = g_malloc (sizeof (guint8*) * (depth + 2));
557 njumps = 0;
559 /* The vtable/mrgctx is in R0 */
560 g_assert (MONO_ARCH_VTABLE_REG == ARMREG_R0);
562 if (mrgctx) {
563 /* get mrgctx ptr */
564 ARM_MOV_REG_REG (code, ARMREG_R1, ARMREG_R0);
565 } else {
566 /* load rgctx ptr from vtable */
567 g_assert (arm_is_imm12 (G_STRUCT_OFFSET (MonoVTable, runtime_generic_context)));
568 ARM_LDR_IMM (code, ARMREG_R1, ARMREG_R0, G_STRUCT_OFFSET (MonoVTable, runtime_generic_context));
569 /* is the rgctx ptr null? */
570 ARM_CMP_REG_IMM (code, ARMREG_R1, 0, 0);
571 /* if yes, jump to actual trampoline */
572 rgctx_null_jumps [njumps ++] = code;
573 ARM_B_COND (code, ARMCOND_EQ, 0);
576 for (i = 0; i < depth; ++i) {
577 /* load ptr to next array */
578 if (mrgctx && i == 0) {
579 g_assert (arm_is_imm12 (MONO_SIZEOF_METHOD_RUNTIME_GENERIC_CONTEXT));
580 ARM_LDR_IMM (code, ARMREG_R1, ARMREG_R1, MONO_SIZEOF_METHOD_RUNTIME_GENERIC_CONTEXT);
581 } else {
582 ARM_LDR_IMM (code, ARMREG_R1, ARMREG_R1, 0);
584 /* is the ptr null? */
585 ARM_CMP_REG_IMM (code, ARMREG_R1, 0, 0);
586 /* if yes, jump to actual trampoline */
587 rgctx_null_jumps [njumps ++] = code;
588 ARM_B_COND (code, ARMCOND_EQ, 0);
591 /* fetch slot */
592 code = mono_arm_emit_load_imm (code, ARMREG_R2, sizeof (gpointer) * (index + 1));
593 ARM_LDR_REG_REG (code, ARMREG_R1, ARMREG_R1, ARMREG_R2);
594 /* is the slot null? */
595 ARM_CMP_REG_IMM (code, ARMREG_R1, 0, 0);
596 /* if yes, jump to actual trampoline */
597 rgctx_null_jumps [njumps ++] = code;
598 ARM_B_COND (code, ARMCOND_EQ, 0);
599 /* otherwise return, result is in R1 */
600 ARM_MOV_REG_REG (code, ARMREG_R0, ARMREG_R1);
601 code = emit_bx (code, ARMREG_LR);
603 g_assert (njumps <= depth + 2);
604 for (i = 0; i < njumps; ++i)
605 arm_patch (rgctx_null_jumps [i], code);
607 g_free (rgctx_null_jumps);
609 /* Slowpath */
611 /* The vtable/mrgctx is still in R0 */
613 if (aot) {
614 *ji = mono_patch_info_list_prepend (*ji, code - buf, MONO_PATCH_INFO_JIT_ICALL_ADDR, g_strdup_printf ("specific_trampoline_lazy_fetch_%u", slot));
615 ARM_LDR_IMM (code, ARMREG_R1, ARMREG_PC, 0);
616 ARM_B (code, 0);
617 *(gpointer*)code = NULL;
618 code += 4;
619 ARM_LDR_REG_REG (code, ARMREG_PC, ARMREG_PC, ARMREG_R1);
620 } else {
621 tramp = mono_arch_create_specific_trampoline (GUINT_TO_POINTER (slot), MONO_TRAMPOLINE_RGCTX_LAZY_FETCH, mono_get_root_domain (), &code_len);
623 /* Jump to the actual trampoline */
624 ARM_LDR_IMM (code, ARMREG_R1, ARMREG_PC, 0); /* temp reg */
625 code = emit_bx (code, ARMREG_R1);
626 *(gpointer*)code = tramp;
627 code += 4;
630 mono_arch_flush_icache (buf, code - buf);
632 g_assert (code - buf <= tramp_size);
634 *code_size = code - buf;
636 return buf;
639 #define arm_is_imm8(v) ((v) > -256 && (v) < 256)
641 gpointer
642 mono_arch_create_generic_class_init_trampoline (void)
644 guint32 code_size;
645 MonoJumpInfo *ji;
647 return mono_arch_create_generic_class_init_trampoline_full (&code_size, &ji, FALSE);
650 gpointer
651 mono_arch_create_generic_class_init_trampoline_full (guint32 *code_size, MonoJumpInfo **ji, gboolean aot)
653 guint8 *tramp;
654 guint8 *code, *buf;
655 static int byte_offset = -1;
656 static guint8 bitmask;
657 guint8 *jump;
658 int tramp_size;
659 guint32 code_len, imm8;
660 gint rot_amount;
662 *ji = NULL;
664 tramp_size = 64;
666 code = buf = mono_global_codeman_reserve (tramp_size);
668 if (byte_offset < 0)
669 mono_marshal_find_bitfield_offset (MonoVTable, initialized, &byte_offset, &bitmask);
671 g_assert (arm_is_imm8 (byte_offset));
672 ARM_LDRSB_IMM (code, ARMREG_IP, MONO_ARCH_VTABLE_REG, byte_offset);
673 imm8 = mono_arm_is_rotated_imm8 (bitmask, &rot_amount);
674 g_assert (imm8 >= 0);
675 ARM_AND_REG_IMM (code, ARMREG_IP, ARMREG_IP, imm8, rot_amount);
676 ARM_CMP_REG_IMM (code, ARMREG_IP, 0, 0);
677 jump = code;
678 ARM_B_COND (code, ARMCOND_EQ, 0);
680 /* Initialized case */
681 ARM_MOV_REG_REG (code, ARMREG_PC, ARMREG_LR);
683 /* Uninitialized case */
684 arm_patch (jump, code);
686 if (aot) {
687 *ji = mono_patch_info_list_prepend (*ji, code - buf, MONO_PATCH_INFO_JIT_ICALL_ADDR, "specific_trampoline_generic_class_init");
688 ARM_LDR_IMM (code, ARMREG_R1, ARMREG_PC, 0);
689 ARM_B (code, 0);
690 *(gpointer*)code = NULL;
691 code += 4;
692 ARM_LDR_REG_REG (code, ARMREG_PC, ARMREG_PC, ARMREG_R1);
693 } else {
694 tramp = mono_arch_create_specific_trampoline (NULL, MONO_TRAMPOLINE_GENERIC_CLASS_INIT, mono_get_root_domain (), &code_len);
696 /* Jump to the actual trampoline */
697 ARM_LDR_IMM (code, ARMREG_R1, ARMREG_PC, 0); /* temp reg */
698 code = emit_bx (code, ARMREG_R1);
699 *(gpointer*)code = tramp;
700 code += 4;
703 mono_arch_flush_icache (buf, code - buf);
705 g_assert (code - buf <= tramp_size);
707 *code_size = code - buf;
709 return buf;
712 #else
714 guchar*
715 mono_arch_create_trampoline_code_full (MonoTrampolineType tramp_type, guint32 *code_size, MonoJumpInfo **ji, GSList **out_unwind_ops, gboolean aot)
717 g_assert_not_reached ();
718 return NULL;
721 guchar*
722 mono_arch_create_trampoline_code (MonoTrampolineType tramp_type)
724 g_assert_not_reached ();
725 return NULL;
728 gpointer
729 mono_arch_get_nullified_class_init_trampoline (guint32 *code_len)
731 g_assert_not_reached ();
732 return NULL;
735 gpointer
736 mono_arch_create_specific_trampoline (gpointer arg1, MonoTrampolineType tramp_type, MonoDomain *domain, guint32 *code_len)
738 g_assert_not_reached ();
739 return NULL;
742 gpointer
743 mono_arch_get_unbox_trampoline (MonoGenericSharingContext *gsctx, MonoMethod *m, gpointer addr)
745 g_assert_not_reached ();
746 return NULL;
749 gpointer
750 mono_arch_get_static_rgctx_trampoline (MonoMethod *m, MonoMethodRuntimeGenericContext *mrgctx, gpointer addr)
752 g_assert_not_reached ();
753 return NULL;
756 gpointer
757 mono_arch_create_rgctx_lazy_fetch_trampoline (guint32 slot)
759 g_assert_not_reached ();
760 return NULL;
763 gpointer
764 mono_arch_create_rgctx_lazy_fetch_trampoline_full (guint32 slot, guint32 *code_size, MonoJumpInfo **ji, gboolean aot)
766 g_assert_not_reached ();
767 return NULL;
770 gpointer
771 mono_arch_create_generic_class_init_trampoline (void)
773 g_assert_not_reached ();
774 return NULL;
777 gpointer
778 mono_arch_create_generic_class_init_trampoline_full (guint32 *code_size, MonoJumpInfo **ji, gboolean aot)
780 g_assert_not_reached ();
781 return NULL;
784 #endif /* DISABLE_JIT */