Revert "update rx to the latest rx-oss-v1.1 build."
[mono-project.git] / mono / mini / tramp-ppc.c
blob2972348d09ed7103f088f375cf8c8618d5d23fe3
1 /*
2 * tramp-ppc.c: JIT trampoline code for PowerPC
4 * Authors:
5 * Dietmar Maurer (dietmar@ximian.com)
6 * Paolo Molaro (lupus@ximian.com)
7 * Carlos Valiente <yo@virutass.net>
8 * Andreas Faerber <andreas.faerber@web.de>
10 * (C) 2001 Ximian, Inc.
11 * (C) 2007-2008 Andreas Faerber
14 #include <config.h>
15 #include <glib.h>
17 #include <mono/metadata/appdomain.h>
18 #include <mono/metadata/marshal.h>
19 #include <mono/metadata/tabledefs.h>
20 #include <mono/arch/ppc/ppc-codegen.h>
22 #include "mini.h"
23 #include "mini-ppc.h"
25 static guint8* nullified_class_init_trampoline;
27 /* Same as mono_create_ftnptr, but doesn't require a domain */
28 static gpointer
29 mono_ppc_create_ftnptr (guint8 *code)
31 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
32 MonoPPCFunctionDescriptor *ftnptr = mono_global_codeman_reserve (sizeof (MonoPPCFunctionDescriptor));
34 ftnptr->code = code;
35 ftnptr->toc = NULL;
36 ftnptr->env = NULL;
38 return ftnptr;
39 #else
40 return code;
41 #endif
45 * Return the instruction to jump from code to target, 0 if not
46 * reachable with a single instruction
48 static guint32
49 branch_for_target_reachable (guint8 *branch, guint8 *target)
51 gint diff = target - branch;
52 g_assert ((diff & 3) == 0);
53 if (diff >= 0) {
54 if (diff <= 33554431)
55 return (18 << 26) | (diff);
56 } else {
57 /* diff between 0 and -33554432 */
58 if (diff >= -33554432)
59 return (18 << 26) | (diff & ~0xfc000000);
61 return 0;
65 * get_unbox_trampoline:
66 * @m: method pointer
67 * @addr: pointer to native code for @m
69 * when value type methods are called through the vtable we need to unbox the
70 * this argument. This method returns a pointer to a trampoline which does
71 * unboxing before calling the method
73 gpointer
74 mono_arch_get_unbox_trampoline (MonoMethod *m, gpointer addr)
76 guint8 *code, *start;
77 int this_pos = 3;
78 guint32 short_branch;
79 MonoDomain *domain = mono_domain_get ();
80 int size = MONO_PPC_32_64_CASE (20, 32) + PPC_FTNPTR_SIZE;
82 addr = mono_get_addr_from_ftnptr (addr);
84 mono_domain_lock (domain);
85 start = code = mono_domain_code_reserve (domain, size);
86 code = mono_ppc_create_pre_code_ftnptr (code);
87 short_branch = branch_for_target_reachable (code + 4, addr);
88 if (short_branch)
89 mono_domain_code_commit (domain, code, size, 8);
90 mono_domain_unlock (domain);
92 if (short_branch) {
93 ppc_addi (code, this_pos, this_pos, sizeof (MonoObject));
94 ppc_emit32 (code, short_branch);
95 } else {
96 ppc_load_ptr (code, ppc_r0, addr);
97 ppc_mtctr (code, ppc_r0);
98 ppc_addi (code, this_pos, this_pos, sizeof (MonoObject));
99 ppc_bcctr (code, 20, 0);
101 mono_arch_flush_icache (start, code - start);
102 g_assert ((code - start) <= size);
103 /*g_print ("unbox trampoline at %d for %s:%s\n", this_pos, m->klass->name, m->name);
104 g_print ("unbox code is at %p for method at %p\n", start, addr);*/
106 return start;
110 * mono_arch_get_static_rgctx_trampoline:
112 * Create a trampoline which sets RGCTX_REG to MRGCTX, then jumps to ADDR.
114 gpointer
115 mono_arch_get_static_rgctx_trampoline (MonoMethod *m, MonoMethodRuntimeGenericContext *mrgctx, gpointer addr)
117 guint8 *code, *start, *p;
118 guint8 imm_buf [128];
119 guint32 short_branch;
120 MonoDomain *domain = mono_domain_get ();
121 int imm_size;
122 int size = MONO_PPC_32_64_CASE (24, (PPC_LOAD_SEQUENCE_LENGTH * 2) + 8) + PPC_FTNPTR_SIZE;
124 addr = mono_get_addr_from_ftnptr (addr);
126 /* Compute size of code needed to emit mrgctx */
127 p = imm_buf;
128 ppc_load_ptr (p, MONO_ARCH_RGCTX_REG, mrgctx);
129 imm_size = p - imm_buf;
131 mono_domain_lock (domain);
132 start = code = mono_domain_code_reserve (domain, size);
133 code = mono_ppc_create_pre_code_ftnptr (code);
134 short_branch = branch_for_target_reachable (code + imm_size, addr);
135 if (short_branch)
136 mono_domain_code_commit (domain, code, size, imm_size + 4);
137 mono_domain_unlock (domain);
139 if (short_branch) {
140 ppc_load_ptr (code, MONO_ARCH_RGCTX_REG, mrgctx);
141 ppc_emit32 (code, short_branch);
142 } else {
143 ppc_load_ptr (code, ppc_r0, addr);
144 ppc_mtctr (code, ppc_r0);
145 ppc_load_ptr (code, MONO_ARCH_RGCTX_REG, mrgctx);
146 ppc_bcctr (code, 20, 0);
148 mono_arch_flush_icache (start, code - start);
149 g_assert ((code - start) <= size);
151 return start;
154 void
155 mono_arch_patch_callsite (guint8 *method_start, guint8 *code_ptr, guint8 *addr)
157 guint32 *code = (guint32*)code_ptr;
159 addr = mono_get_addr_from_ftnptr (addr);
161 /* This is the 'blrl' instruction */
162 --code;
165 * Note that methods are called also with the bl opcode.
167 if (((*code) >> 26) == 18) {
168 /*g_print ("direct patching\n");*/
169 ppc_patch ((guint8*)code, addr);
170 mono_arch_flush_icache ((guint8*)code, 4);
171 return;
174 /* Sanity check */
175 g_assert (mono_ppc_is_direct_call_sequence (code));
177 ppc_patch ((guint8*)code, addr);
180 void
181 mono_arch_patch_plt_entry (guint8 *code, gpointer *got, mgreg_t *regs, guint8 *addr)
183 guint32 ins1, ins2, offset;
185 /* Patch the jump table entry used by the plt entry */
187 /* Should be a lis+ori */
188 ins1 = ((guint32*)code)[0];
189 g_assert (ins1 >> 26 == 15);
190 ins2 = ((guint32*)code)[1];
191 g_assert (ins2 >> 26 == 24);
192 offset = ((ins1 & 0xffff) << 16) | (ins2 & 0xffff);
194 /* Either got or regs is set */
195 if (!got)
196 got = (gpointer*)(gsize) regs [30];
197 *(guint8**)((guint8*)got + offset) = addr;
200 void
201 mono_arch_nullify_class_init_trampoline (guint8 *code, mgreg_t *regs)
203 mono_arch_patch_callsite (NULL, code, nullified_class_init_trampoline);
206 void
207 mono_arch_nullify_plt_entry (guint8 *code, mgreg_t *regs)
209 if (mono_aot_only && !nullified_class_init_trampoline)
210 nullified_class_init_trampoline = mono_aot_get_trampoline ("nullified_class_init_trampoline");
212 mono_arch_patch_plt_entry (code, NULL, regs, nullified_class_init_trampoline);
215 /* Stack size for trampoline function
216 * PPC_MINIMAL_STACK_SIZE + 16 (args + alignment to ppc_magic_trampoline)
217 * + MonoLMF + 14 fp regs + 13 gregs + alignment
219 #define STACK (((PPC_MINIMAL_STACK_SIZE + 4 * sizeof (mgreg_t) + sizeof (MonoLMF) + 14 * sizeof (double) + 31 * sizeof (mgreg_t)) + (MONO_ARCH_FRAME_ALIGNMENT - 1)) & ~(MONO_ARCH_FRAME_ALIGNMENT - 1))
221 /* Method-specific trampoline code fragment size */
222 #define METHOD_TRAMPOLINE_SIZE 64
224 /* Jump-specific trampoline code fragment size */
225 #define JUMP_TRAMPOLINE_SIZE 64
227 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
228 #define PPC_TOC_REG ppc_r2
229 #else
230 #define PPC_TOC_REG -1
231 #endif
234 * Stack frame description when the generic trampoline is called.
235 * caller frame
236 * --------------------
237 * MonoLMF
238 * -------------------
239 * Saved FP registers 0-13
240 * -------------------
241 * Saved general registers 0-30
242 * -------------------
243 * param area for 3 args to ppc_magic_trampoline
244 * -------------------
245 * linkage area
246 * -------------------
248 guchar*
249 mono_arch_create_generic_trampoline (MonoTrampolineType tramp_type, MonoTrampInfo **info, gboolean aot)
252 guint8 *buf, *code = NULL;
253 int i, offset;
254 gconstpointer tramp_handler;
255 int size = MONO_PPC_32_64_CASE (600, 800);
256 GSList *unwind_ops = NULL;
257 MonoJumpInfo *ji = NULL;
259 /* Now we'll create in 'buf' the PowerPC trampoline code. This
260 is the trampoline code common to all methods */
262 code = buf = mono_global_codeman_reserve (size);
264 ppc_str_update (code, ppc_r1, -STACK, ppc_r1);
266 /* start building the MonoLMF on the stack */
267 offset = STACK - sizeof (double) * MONO_SAVED_FREGS;
268 for (i = 14; i < 32; i++) {
269 ppc_stfd (code, i, offset, ppc_r1);
270 offset += sizeof (double);
273 * now the integer registers.
275 offset = STACK - sizeof (MonoLMF) + G_STRUCT_OFFSET (MonoLMF, iregs);
276 ppc_str_multiple (code, ppc_r13, offset, ppc_r1);
278 /* Now save the rest of the registers below the MonoLMF struct, first 14
279 * fp regs and then the 31 gregs.
281 offset = STACK - sizeof (MonoLMF) - (14 * sizeof (double));
282 for (i = 0; i < 14; i++) {
283 ppc_stfd (code, i, offset, ppc_r1);
284 offset += sizeof (double);
286 #define GREGS_OFFSET (STACK - sizeof (MonoLMF) - (14 * sizeof (double)) - (31 * sizeof (mgreg_t)))
287 offset = GREGS_OFFSET;
288 for (i = 0; i < 31; i++) {
289 ppc_str (code, i, offset, ppc_r1);
290 offset += sizeof (mgreg_t);
293 /* we got here through a jump to the ctr reg, we must save the lr
294 * in the parent frame (we do it here to reduce the size of the
295 * method-specific trampoline)
297 ppc_mflr (code, ppc_r0);
298 ppc_str (code, ppc_r0, STACK + PPC_RET_ADDR_OFFSET, ppc_r1);
300 /* ok, now we can continue with the MonoLMF setup, mostly untouched
301 * from emit_prolog in mini-ppc.c
303 if (aot) {
304 code = mono_arch_emit_load_aotconst (buf, code, &ji, MONO_PATCH_INFO_JIT_ICALL_ADDR, "mono_get_lmf_addr");
305 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
306 ppc_ldptr (code, ppc_r2, sizeof (gpointer), ppc_r11);
307 ppc_ldptr (code, ppc_r11, 0, ppc_r11);
308 #endif
309 ppc_mtlr (code, ppc_r11);
310 ppc_blrl (code);
311 } else {
312 ppc_load_func (code, ppc_r0, mono_get_lmf_addr);
313 ppc_mtlr (code, ppc_r0);
314 ppc_blrl (code);
316 /* we build the MonoLMF structure on the stack - see mini-ppc.h
317 * The pointer to the struct is put in ppc_r11.
319 ppc_addi (code, ppc_r11, ppc_sp, STACK - sizeof (MonoLMF));
320 ppc_stptr (code, ppc_r3, G_STRUCT_OFFSET(MonoLMF, lmf_addr), ppc_r11);
321 /* new_lmf->previous_lmf = *lmf_addr */
322 ppc_ldptr (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r3);
323 ppc_stptr (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r11);
324 /* *(lmf_addr) = r11 */
325 ppc_stptr (code, ppc_r11, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r3);
326 /* save method info (it's stored on the stack, so get it first). */
327 if ((tramp_type == MONO_TRAMPOLINE_JIT) || (tramp_type == MONO_TRAMPOLINE_JUMP)) {
328 ppc_ldr (code, ppc_r0, GREGS_OFFSET, ppc_r1);
329 ppc_stptr (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, method), ppc_r11);
330 } else {
331 ppc_load (code, ppc_r0, 0);
332 ppc_stptr (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, method), ppc_r11);
334 /* store the frame pointer of the calling method */
335 ppc_addi (code, ppc_r0, ppc_sp, STACK);
336 ppc_stptr (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, ebp), ppc_r11);
337 /* save the IP (caller ip) */
338 if (tramp_type == MONO_TRAMPOLINE_JUMP) {
339 ppc_li (code, ppc_r0, 0);
340 } else {
341 ppc_ldr (code, ppc_r0, STACK + PPC_RET_ADDR_OFFSET, ppc_r1);
343 ppc_stptr (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, eip), ppc_r11);
346 * Now we're ready to call trampoline (mgreg_t *regs, guint8 *code, gpointer value, guint8 *tramp)
347 * Note that the last argument is unused.
349 /* Arg 1: a pointer to the registers */
350 ppc_addi (code, ppc_r3, ppc_r1, GREGS_OFFSET);
352 /* Arg 2: code (next address to the instruction that called us) */
353 if (tramp_type == MONO_TRAMPOLINE_JUMP)
354 ppc_li (code, ppc_r4, 0);
355 else
356 ppc_ldr (code, ppc_r4, STACK + PPC_RET_ADDR_OFFSET, ppc_r1);
358 /* Arg 3: trampoline argument */
359 if (tramp_type == MONO_TRAMPOLINE_GENERIC_CLASS_INIT)
360 ppc_ldr (code, ppc_r5, GREGS_OFFSET + MONO_ARCH_VTABLE_REG * sizeof (mgreg_t), ppc_r1);
361 else
362 ppc_ldr (code, ppc_r5, GREGS_OFFSET, ppc_r1);
364 if (aot) {
365 code = mono_arch_emit_load_aotconst (buf, code, &ji, MONO_PATCH_INFO_JIT_ICALL_ADDR, g_strdup_printf ("trampoline_func_%d", tramp_type));
366 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
367 ppc_ldptr (code, ppc_r2, sizeof (gpointer), ppc_r11);
368 ppc_ldptr (code, ppc_r11, 0, ppc_r11);
369 #endif
370 ppc_mtlr (code, ppc_r11);
371 ppc_blrl (code);
372 } else {
373 tramp_handler = mono_get_trampoline_func (tramp_type);
374 ppc_load_func (code, ppc_r0, tramp_handler);
375 ppc_mtlr (code, ppc_r0);
376 ppc_blrl (code);
379 /* OK, code address is now on r3. Move it to the counter reg
380 * so it will be ready for the final jump: this is safe since we
381 * won't do any more calls.
383 if (!MONO_TRAMPOLINE_TYPE_MUST_RETURN (tramp_type)) {
384 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
385 ppc_ldptr (code, ppc_r2, sizeof (gpointer), ppc_r3);
386 ppc_ldptr (code, ppc_r3, 0, ppc_r3);
387 #endif
388 ppc_mtctr (code, ppc_r3);
392 * Now we restore the MonoLMF (see emit_epilogue in mini-ppc.c)
393 * and the rest of the registers, so the method called will see
394 * the same state as before we executed.
395 * The pointer to MonoLMF is in ppc_r11.
397 ppc_addi (code, ppc_r11, ppc_r1, STACK - sizeof (MonoLMF));
398 /* r5 = previous_lmf */
399 ppc_ldptr (code, ppc_r5, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r11);
400 /* r6 = lmf_addr */
401 ppc_ldptr (code, ppc_r6, G_STRUCT_OFFSET(MonoLMF, lmf_addr), ppc_r11);
402 /* *(lmf_addr) = previous_lmf */
403 ppc_stptr (code, ppc_r5, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r6);
404 /* restore iregs */
405 ppc_ldr_multiple (code, ppc_r13, G_STRUCT_OFFSET(MonoLMF, iregs), ppc_r11);
406 /* restore fregs */
407 for (i = 14; i < 32; i++)
408 ppc_lfd (code, i, G_STRUCT_OFFSET(MonoLMF, fregs) + ((i-14) * sizeof (gdouble)), ppc_r11);
410 /* restore the volatile registers, we skip r1, of course */
411 offset = STACK - sizeof (MonoLMF) - (14 * sizeof (double));
412 for (i = 0; i < 14; i++) {
413 ppc_lfd (code, i, offset, ppc_r1);
414 offset += sizeof (double);
416 offset = STACK - sizeof (MonoLMF) - (14 * sizeof (double)) - (31 * sizeof (mgreg_t));
417 ppc_ldr (code, ppc_r0, offset, ppc_r1);
418 offset += 2 * sizeof (mgreg_t);
419 for (i = 2; i < 13; i++) {
420 if (i != PPC_TOC_REG && (i != 3 || tramp_type != MONO_TRAMPOLINE_RGCTX_LAZY_FETCH))
421 ppc_ldr (code, i, offset, ppc_r1);
422 offset += sizeof (mgreg_t);
425 /* Non-standard function epilogue. Instead of doing a proper
426 * return, we just jump to the compiled code.
428 /* Restore stack pointer and LR and jump to the code */
429 ppc_ldr (code, ppc_r1, 0, ppc_r1);
430 ppc_ldr (code, ppc_r11, PPC_RET_ADDR_OFFSET, ppc_r1);
431 ppc_mtlr (code, ppc_r11);
432 if (MONO_TRAMPOLINE_TYPE_MUST_RETURN (tramp_type))
433 ppc_blr (code);
434 else
435 ppc_bcctr (code, 20, 0);
437 /* Flush instruction cache, since we've generated code */
438 mono_arch_flush_icache (buf, code - buf);
440 /* Sanity check */
441 g_assert ((code - buf) <= size);
443 if (tramp_type == MONO_TRAMPOLINE_CLASS_INIT) {
444 /* Initialize the nullified class init trampoline */
445 nullified_class_init_trampoline = mono_ppc_create_ftnptr (mono_arch_get_nullified_class_init_trampoline (NULL));
448 if (info)
449 *info = mono_tramp_info_create (mono_get_generic_trampoline_name (tramp_type), buf, code - buf, ji, unwind_ops);
451 return buf;
454 #define TRAMPOLINE_SIZE (MONO_PPC_32_64_CASE (24, (5+5+1+1)*4))
455 gpointer
456 mono_arch_create_specific_trampoline (gpointer arg1, MonoTrampolineType tramp_type, MonoDomain *domain, guint32 *code_len)
458 guint8 *code, *buf, *tramp;
459 guint32 short_branch;
461 tramp = mono_get_trampoline_code (tramp_type);
463 mono_domain_lock (domain);
464 code = buf = mono_domain_code_reserve_align (domain, TRAMPOLINE_SIZE, 4);
465 short_branch = branch_for_target_reachable (code + MONO_PPC_32_64_CASE (8, 5*4), tramp);
466 #ifdef __mono_ppc64__
467 /* FIXME: make shorter if possible */
468 #else
469 if (short_branch)
470 mono_domain_code_commit (domain, code, TRAMPOLINE_SIZE, 12);
471 #endif
472 mono_domain_unlock (domain);
474 if (short_branch) {
475 ppc_load_sequence (code, ppc_r0, (mgreg_t)(gsize) arg1);
476 ppc_emit32 (code, short_branch);
477 } else {
478 /* Prepare the jump to the generic trampoline code.*/
479 ppc_load_ptr (code, ppc_r0, tramp);
480 ppc_mtctr (code, ppc_r0);
482 /* And finally put 'arg1' in r0 and fly! */
483 ppc_load_ptr (code, ppc_r0, arg1);
484 ppc_bcctr (code, 20, 0);
487 /* Flush instruction cache, since we've generated code */
488 mono_arch_flush_icache (buf, code - buf);
490 g_assert ((code - buf) <= TRAMPOLINE_SIZE);
492 if (code_len)
493 *code_len = code - buf;
495 return buf;
498 static guint8*
499 emit_trampoline_jump (guint8 *code, guint8 *tramp)
501 guint32 short_branch = branch_for_target_reachable (code, tramp);
503 /* FIXME: we can save a few bytes here by committing if the
504 short branch is possible */
505 if (short_branch) {
506 ppc_emit32 (code, short_branch);
507 } else {
508 ppc_load_ptr (code, ppc_r0, tramp);
509 ppc_mtctr (code, ppc_r0);
510 ppc_bcctr (code, 20, 0);
513 return code;
516 gpointer
517 mono_arch_create_rgctx_lazy_fetch_trampoline (guint32 slot, MonoTrampInfo **info, gboolean aot)
519 #ifdef MONO_ARCH_VTABLE_REG
520 guint8 *tramp;
521 guint8 *code, *buf;
522 guint8 **rgctx_null_jumps;
523 int tramp_size;
524 int depth, index;
525 int i;
526 gboolean mrgctx;
527 MonoJumpInfo *ji = NULL;
528 GSList *unwind_ops = NULL;
530 mrgctx = MONO_RGCTX_SLOT_IS_MRGCTX (slot);
531 index = MONO_RGCTX_SLOT_INDEX (slot);
532 if (mrgctx)
533 index += MONO_SIZEOF_METHOD_RUNTIME_GENERIC_CONTEXT / sizeof (gpointer);
534 for (depth = 0; ; ++depth) {
535 int size = mono_class_rgctx_get_array_size (depth, mrgctx);
537 if (index < size - 1)
538 break;
539 index -= size - 1;
542 tramp_size = MONO_PPC_32_64_CASE (40, 52) + 12 * depth;
543 if (mrgctx)
544 tramp_size += 4;
545 else
546 tramp_size += 12;
547 if (aot)
548 tramp_size += 32;
550 code = buf = mono_global_codeman_reserve (tramp_size);
552 rgctx_null_jumps = g_malloc (sizeof (guint8*) * (depth + 2));
554 if (mrgctx) {
555 /* get mrgctx ptr */
556 ppc_mr (code, ppc_r4, PPC_FIRST_ARG_REG);
557 } else {
558 /* load rgctx ptr from vtable */
559 ppc_ldptr (code, ppc_r4, G_STRUCT_OFFSET (MonoVTable, runtime_generic_context), PPC_FIRST_ARG_REG);
560 /* is the rgctx ptr null? */
561 ppc_compare_reg_imm (code, 0, ppc_r4, 0);
562 /* if yes, jump to actual trampoline */
563 rgctx_null_jumps [0] = code;
564 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
567 for (i = 0; i < depth; ++i) {
568 /* load ptr to next array */
569 if (mrgctx && i == 0)
570 ppc_ldptr (code, ppc_r4, MONO_SIZEOF_METHOD_RUNTIME_GENERIC_CONTEXT, ppc_r4);
571 else
572 ppc_ldptr (code, ppc_r4, 0, ppc_r4);
573 /* is the ptr null? */
574 ppc_compare_reg_imm (code, 0, ppc_r4, 0);
575 /* if yes, jump to actual trampoline */
576 rgctx_null_jumps [i + 1] = code;
577 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
580 /* fetch slot */
581 ppc_ldptr (code, ppc_r4, sizeof (gpointer) * (index + 1), ppc_r4);
582 /* is the slot null? */
583 ppc_compare_reg_imm (code, 0, ppc_r4, 0);
584 /* if yes, jump to actual trampoline */
585 rgctx_null_jumps [depth + 1] = code;
586 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
587 /* otherwise return r4 */
588 /* FIXME: if we use r3 as the work register we can avoid this copy */
589 ppc_mr (code, ppc_r3, ppc_r4);
590 ppc_blr (code);
592 for (i = mrgctx ? 1 : 0; i <= depth + 1; ++i)
593 ppc_patch (rgctx_null_jumps [i], code);
595 g_free (rgctx_null_jumps);
597 /* move the rgctx pointer to the VTABLE register */
598 ppc_mr (code, MONO_ARCH_VTABLE_REG, ppc_r3);
600 if (aot) {
601 code = mono_arch_emit_load_aotconst (buf, code, &ji, MONO_PATCH_INFO_JIT_ICALL_ADDR, g_strdup_printf ("specific_trampoline_lazy_fetch_%u", slot));
602 /* Branch to the trampoline */
603 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
604 ppc_ldptr (code, ppc_r11, 0, ppc_r11);
605 #endif
606 ppc_mtctr (code, ppc_r11);
607 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
608 } else {
609 tramp = mono_arch_create_specific_trampoline (GUINT_TO_POINTER (slot),
610 MONO_TRAMPOLINE_RGCTX_LAZY_FETCH, mono_get_root_domain (), NULL);
612 /* jump to the actual trampoline */
613 code = emit_trampoline_jump (code, tramp);
616 mono_arch_flush_icache (buf, code - buf);
618 g_assert (code - buf <= tramp_size);
620 if (info)
621 *info = mono_tramp_info_create (mono_get_rgctx_fetch_trampoline_name (slot), buf, code - buf, ji, unwind_ops);
623 return buf;
624 #else
625 g_assert_not_reached ();
626 #endif
629 gpointer
630 mono_arch_create_generic_class_init_trampoline (MonoTrampInfo **info, gboolean aot)
632 guint8 *tramp;
633 guint8 *code, *buf;
634 static int byte_offset = -1;
635 static guint8 bitmask;
636 guint8 *jump;
637 int tramp_size;
638 GSList *unwind_ops = NULL;
639 MonoJumpInfo *ji = NULL;
641 tramp_size = MONO_PPC_32_64_CASE (32, 44);
642 if (aot)
643 tramp_size += 32;
645 code = buf = mono_global_codeman_reserve (tramp_size);
647 if (byte_offset < 0)
648 mono_marshal_find_bitfield_offset (MonoVTable, initialized, &byte_offset, &bitmask);
650 ppc_lbz (code, ppc_r4, byte_offset, MONO_ARCH_VTABLE_REG);
651 ppc_andid (code, ppc_r4, ppc_r4, bitmask);
652 jump = code;
653 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
655 ppc_blr (code);
657 ppc_patch (jump, code);
659 if (aot) {
660 code = mono_arch_emit_load_aotconst (buf, code, &ji, MONO_PATCH_INFO_JIT_ICALL_ADDR, "specific_trampoline_generic_class_init");
661 /* Branch to the trampoline */
662 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
663 ppc_ldptr (code, ppc_r11, 0, ppc_r11);
664 #endif
665 ppc_mtctr (code, ppc_r11);
666 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
667 } else {
668 tramp = mono_arch_create_specific_trampoline (NULL, MONO_TRAMPOLINE_GENERIC_CLASS_INIT,
669 mono_get_root_domain (), NULL);
671 /* jump to the actual trampoline */
672 code = emit_trampoline_jump (code, tramp);
675 mono_arch_flush_icache (buf, code - buf);
677 g_assert (code - buf <= tramp_size);
679 if (info)
680 *info = mono_tramp_info_create (g_strdup_printf ("generic_class_init_trampoline"), buf, code - buf, ji, unwind_ops);
682 return buf;
685 gpointer
686 mono_arch_get_nullified_class_init_trampoline (MonoTrampInfo **info)
688 guint8 *code, *buf;
689 guint32 tramp_size = 64;
691 code = buf = mono_global_codeman_reserve (tramp_size);
692 ppc_blr (code);
694 mono_arch_flush_icache (buf, code - buf);
696 g_assert (code - buf <= tramp_size);
698 if (info)
699 *info = mono_tramp_info_create (g_strdup_printf ("nullified_class_init_trampoline"), buf, code - buf, NULL, NULL);
701 return buf;
704 guint8*
705 mono_arch_get_call_target (guint8 *code)
707 /* Should be a bl */
708 guint32 ins = ((guint32*)(gpointer)code) [-1];
710 if ((ins >> 26 == 18) && ((ins & 1) == 1) && ((ins & 2) == 0)) {
711 gint32 disp = (((gint32)ins) >> 2) & 0xffffff;
712 guint8 *target = code - 4 + (disp * 4);
714 return target;
715 } else {
716 return NULL;
720 guint32
721 mono_arch_get_plt_info_offset (guint8 *plt_entry, mgreg_t *regs, guint8 *code)
723 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
724 return ((guint32*)plt_entry) [8];
725 #else
726 return ((guint32*)plt_entry) [6];
727 #endif