Allow runtime to be built with C++ on AIX (#17672)
[mono-project.git] / mono / mini / tramp-ppc.c
blob5e603b67d1dacc2d801d9f85dcd3b604883a8887
1 /**
2 * \file
3 * JIT trampoline code for PowerPC
5 * Authors:
6 * Dietmar Maurer (dietmar@ximian.com)
7 * Paolo Molaro (lupus@ximian.com)
8 * Carlos Valiente <yo@virutass.net>
9 * Andreas Faerber <andreas.faerber@web.de>
11 * (C) 2001 Ximian, Inc.
12 * (C) 2007-2008 Andreas Faerber
15 #include <config.h>
16 #include <glib.h>
18 #include <mono/metadata/abi-details.h>
19 #include <mono/metadata/appdomain.h>
20 #include <mono/metadata/marshal.h>
21 #include <mono/metadata/tabledefs.h>
22 #include <mono/arch/ppc/ppc-codegen.h>
24 #include "mini.h"
25 #include "mini-ppc.h"
26 #include "mini-runtime.h"
27 #include "mono/utils/mono-tls-inline.h"
29 #if 0
30 /* Same as mono_create_ftnptr, but doesn't require a domain */
31 static gpointer
32 mono_ppc_create_ftnptr (guint8 *code)
34 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
35 MonoPPCFunctionDescriptor *ftnptr = mono_global_codeman_reserve (sizeof (MonoPPCFunctionDescriptor));
37 ftnptr->code = code;
38 ftnptr->toc = NULL;
39 ftnptr->env = NULL;
41 MONO_PROFILER_RAISE (jit_code_buffer, (ftnptr, sizeof (MonoPPCFunctionDescriptor), MONO_PROFILER_CODE_BUFFER_HELPER, NULL));
43 return ftnptr;
44 #else
45 return code;
46 #endif
48 #endif
51 * Return the instruction to jump from code to target, 0 if not
52 * reachable with a single instruction
54 static guint32
55 branch_for_target_reachable (guint8 *branch, guint8 *target)
57 gint diff = target - branch;
58 g_assert ((diff & 3) == 0);
59 if (diff >= 0) {
60 if (diff <= 33554431)
61 return (18 << 26) | (diff);
62 } else {
63 /* diff between 0 and -33554432 */
64 if (diff >= -33554432)
65 return (18 << 26) | (diff & ~0xfc000000);
67 return 0;
71 * get_unbox_trampoline:
72 * @m: method pointer
73 * @addr: pointer to native code for @m
75 * when value type methods are called through the vtable we need to unbox the
76 * this argument. This method returns a pointer to a trampoline which does
77 * unboxing before calling the method
79 gpointer
80 mono_arch_get_unbox_trampoline (MonoMethod *m, gpointer addr)
82 guint8 *code, *start;
83 int this_pos = 3;
84 guint32 short_branch;
85 MonoDomain *domain = mono_domain_get ();
86 int size = MONO_PPC_32_64_CASE (20, 32) + PPC_FTNPTR_SIZE;
88 addr = mono_get_addr_from_ftnptr (addr);
90 mono_domain_lock (domain);
91 start = code = mono_domain_code_reserve (domain, size);
92 code = mono_ppc_create_pre_code_ftnptr (code);
93 short_branch = branch_for_target_reachable (code + 4, (guint8*)addr);
94 if (short_branch)
95 mono_domain_code_commit (domain, code, size, 8);
96 mono_domain_unlock (domain);
98 if (short_branch) {
99 ppc_addi (code, this_pos, this_pos, MONO_ABI_SIZEOF (MonoObject));
100 ppc_emit32 (code, short_branch);
101 } else {
102 ppc_load_ptr (code, ppc_r0, addr);
103 ppc_mtctr (code, ppc_r0);
104 ppc_addi (code, this_pos, this_pos, MONO_ABI_SIZEOF (MonoObject));
105 ppc_bcctr (code, 20, 0);
107 mono_arch_flush_icache (start, code - start);
108 MONO_PROFILER_RAISE (jit_code_buffer, (start, code - start, MONO_PROFILER_CODE_BUFFER_UNBOX_TRAMPOLINE, m));
109 g_assert ((code - start) <= size);
110 /*g_print ("unbox trampoline at %d for %s:%s\n", this_pos, m->klass->name, m->name);
111 g_print ("unbox code is at %p for method at %p\n", start, addr);*/
113 mono_tramp_info_register (mono_tramp_info_create (NULL, start, code - start, NULL, NULL), domain);
115 return start;
119 * mono_arch_get_static_rgctx_trampoline:
121 * Create a trampoline which sets RGCTX_REG to ARG, then jumps to ADDR.
123 gpointer
124 mono_arch_get_static_rgctx_trampoline (gpointer arg, gpointer addr)
126 guint8 *code, *start, *p;
127 guint8 imm_buf [128];
128 guint32 short_branch;
129 MonoDomain *domain = mono_domain_get ();
130 int imm_size;
131 int size = MONO_PPC_32_64_CASE (24, (PPC_LOAD_SEQUENCE_LENGTH * 2) + 8) + PPC_FTNPTR_SIZE;
133 addr = mono_get_addr_from_ftnptr (addr);
135 /* Compute size of code needed to emit the arg */
136 p = imm_buf;
137 ppc_load_ptr (p, MONO_ARCH_RGCTX_REG, arg);
138 imm_size = p - imm_buf;
140 mono_domain_lock (domain);
141 start = code = mono_domain_code_reserve (domain, size);
142 code = mono_ppc_create_pre_code_ftnptr (code);
143 short_branch = branch_for_target_reachable (code + imm_size, (guint8*)addr);
144 if (short_branch)
145 mono_domain_code_commit (domain, code, size, imm_size + 4);
146 mono_domain_unlock (domain);
148 if (short_branch) {
149 ppc_load_ptr (code, MONO_ARCH_RGCTX_REG, arg);
150 ppc_emit32 (code, short_branch);
151 } else {
152 ppc_load_ptr (code, ppc_r0, addr);
153 ppc_mtctr (code, ppc_r0);
154 ppc_load_ptr (code, MONO_ARCH_RGCTX_REG, arg);
155 ppc_bcctr (code, 20, 0);
157 mono_arch_flush_icache (start, code - start);
158 MONO_PROFILER_RAISE (jit_code_buffer, (start, code - start, MONO_PROFILER_CODE_BUFFER_GENERICS_TRAMPOLINE, NULL));
159 g_assert ((code - start) <= size);
161 mono_tramp_info_register (mono_tramp_info_create (NULL, start, code - start, NULL, NULL), domain);
163 return start;
166 void
167 mono_arch_patch_callsite (guint8 *method_start, guint8 *code_ptr, guint8 *addr)
169 guint32 *code = (guint32*)code_ptr;
171 addr = (guint8*)mono_get_addr_from_ftnptr (addr);
173 /* This is the 'blrl' instruction */
174 --code;
177 * Note that methods are called also with the bl opcode.
179 if (((*code) >> 26) == 18) {
180 /*g_print ("direct patching\n");*/
181 ppc_patch ((guint8*)code, addr);
182 mono_arch_flush_icache ((guint8*)code, 4);
183 return;
186 /* Sanity check */
187 g_assert (mono_ppc_is_direct_call_sequence (code));
189 ppc_patch ((guint8*)code, addr);
192 void
193 mono_arch_patch_plt_entry (guint8 *code, gpointer *got, host_mgreg_t *regs, guint8 *addr)
195 guint32 ins1, ins2, offset;
197 /* Patch the jump table entry used by the plt entry */
199 /* Should be a lis+ori */
200 ins1 = ((guint32*)code)[0];
201 g_assert (ins1 >> 26 == 15);
202 ins2 = ((guint32*)code)[1];
203 g_assert (ins2 >> 26 == 24);
204 offset = ((ins1 & 0xffff) << 16) | (ins2 & 0xffff);
206 /* Either got or regs is set */
207 if (!got)
208 got = (gpointer*)(gsize) regs [30];
209 *(guint8**)((guint8*)got + offset) = addr;
212 /* Stack size for trampoline function
213 * PPC_MINIMAL_STACK_SIZE + 16 (args + alignment to ppc_magic_trampoline)
214 * + MonoLMF + 14 fp regs + 13 gregs + alignment
216 #define STACK (((PPC_MINIMAL_STACK_SIZE + 4 * sizeof (target_mgreg_t) + sizeof (MonoLMF) + 14 * sizeof (double) + 31 * sizeof (target_mgreg_t)) + (MONO_ARCH_FRAME_ALIGNMENT - 1)) & ~(MONO_ARCH_FRAME_ALIGNMENT - 1))
218 /* Method-specific trampoline code fragment size */
219 #define METHOD_TRAMPOLINE_SIZE 64
221 /* Jump-specific trampoline code fragment size */
222 #define JUMP_TRAMPOLINE_SIZE 64
224 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
225 #define PPC_TOC_REG ppc_r2
226 #else
227 #define PPC_TOC_REG -1
228 #endif
231 * Stack frame description when the generic trampoline is called.
232 * caller frame
233 * --------------------
234 * MonoLMF
235 * -------------------
236 * Saved FP registers 0-13
237 * -------------------
238 * Saved general registers 0-30
239 * -------------------
240 * param area for 3 args to ppc_magic_trampoline
241 * -------------------
242 * linkage area
243 * -------------------
245 guchar*
246 mono_arch_create_generic_trampoline (MonoTrampolineType tramp_type, MonoTrampInfo **info, gboolean aot)
248 const char *tramp_name;
249 guint8 *buf, *code = NULL, *exception_branch;
250 int i, offset, offset_r14 = 0;
251 gconstpointer tramp_handler;
252 int size = MONO_PPC_32_64_CASE (700, 900);
253 GSList *unwind_ops = NULL;
254 MonoJumpInfo *ji = NULL;
256 /* Now we'll create in 'buf' the PowerPC trampoline code. This
257 is the trampoline code common to all methods */
259 code = buf = mono_global_codeman_reserve (size);
261 ppc_str_update (code, ppc_r1, -STACK, ppc_r1);
263 /* start building the MonoLMF on the stack */
264 offset = STACK - sizeof (double) * MONO_SAVED_FREGS;
265 for (i = 14; i < 32; i++) {
266 ppc_stfd (code, i, offset, ppc_r1);
267 offset += sizeof (double);
270 * now the integer registers.
272 offset = STACK - sizeof (MonoLMF) + G_STRUCT_OFFSET (MonoLMF, iregs);
273 ppc_str_multiple (code, ppc_r13, offset, ppc_r1);
275 /* Now save the rest of the registers below the MonoLMF struct, first 14
276 * fp regs and then the 31 gregs.
278 offset = STACK - sizeof (MonoLMF) - (14 * sizeof (double));
279 for (i = 0; i < 14; i++) {
280 ppc_stfd (code, i, offset, ppc_r1);
281 offset += sizeof (double);
283 #define GREGS_OFFSET (STACK - sizeof (MonoLMF) - (14 * sizeof (double)) - (31 * sizeof (target_mgreg_t)))
284 offset = GREGS_OFFSET;
285 for (i = 0; i < 31; i++) {
286 ppc_str (code, i, offset, ppc_r1);
287 if (i == ppc_r14) {
288 offset_r14 = offset;
290 offset += sizeof (target_mgreg_t);
293 /* we got here through a jump to the ctr reg, we must save the lr
294 * in the parent frame (we do it here to reduce the size of the
295 * method-specific trampoline)
297 ppc_mflr (code, ppc_r0);
298 ppc_str (code, ppc_r0, STACK + PPC_RET_ADDR_OFFSET, ppc_r1);
300 /* ok, now we can continue with the MonoLMF setup, mostly untouched
301 * from emit_prolog in mini-ppc.c
303 if (aot) {
304 code = mono_arch_emit_load_aotconst (buf, code, &ji, MONO_PATCH_INFO_JIT_ICALL_ADDR, GUINT_TO_POINTER (MONO_JIT_ICALL_mono_get_lmf_addr));
305 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
306 ppc_ldptr (code, ppc_r2, sizeof (target_mgreg_t), ppc_r12);
307 ppc_ldptr (code, ppc_r12, 0, ppc_r12);
308 #endif
309 ppc_mtlr (code, ppc_r12);
310 ppc_blrl (code);
311 } else {
312 ppc_load_func (code, PPC_CALL_REG, mono_get_lmf_addr);
313 ppc_mtlr (code, PPC_CALL_REG);
314 ppc_blrl (code);
316 /* we build the MonoLMF structure on the stack - see mini-ppc.h
317 * The pointer to the struct is put in ppc_r12.
319 ppc_addi (code, ppc_r12, ppc_sp, STACK - sizeof (MonoLMF));
320 ppc_stptr (code, ppc_r3, G_STRUCT_OFFSET(MonoLMF, lmf_addr), ppc_r12);
321 /* new_lmf->previous_lmf = *lmf_addr */
322 ppc_ldptr (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r3);
323 ppc_stptr (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r12);
324 /* *(lmf_addr) = r12 */
325 ppc_stptr (code, ppc_r12, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r3);
326 /* save method info (it's stored on the stack, so get it first). */
327 if ((tramp_type == MONO_TRAMPOLINE_JIT) || (tramp_type == MONO_TRAMPOLINE_JUMP)) {
328 ppc_ldr (code, ppc_r0, GREGS_OFFSET, ppc_r1);
329 ppc_stptr (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, method), ppc_r12);
330 } else {
331 ppc_load (code, ppc_r0, 0);
332 ppc_stptr (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, method), ppc_r12);
334 /* store the frame pointer of the calling method */
335 ppc_addi (code, ppc_r0, ppc_sp, STACK);
336 ppc_stptr (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, ebp), ppc_r12);
337 /* save the IP (caller ip) */
338 if (tramp_type == MONO_TRAMPOLINE_JUMP) {
339 ppc_li (code, ppc_r0, 0);
340 } else {
341 ppc_ldr (code, ppc_r0, STACK + PPC_RET_ADDR_OFFSET, ppc_r1);
343 ppc_stptr (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, eip), ppc_r12);
346 * Now we are ready to call trampoline (target_mgreg_t *regs, guint8 *code, gpointer value, guint8 *tramp)
347 * Note that the last argument is unused.
349 /* Arg 1: a pointer to the registers */
350 ppc_addi (code, ppc_r3, ppc_r1, GREGS_OFFSET);
352 /* Arg 2: code (next address to the instruction that called us) */
353 if (tramp_type == MONO_TRAMPOLINE_JUMP)
354 ppc_li (code, ppc_r4, 0);
355 else
356 ppc_ldr (code, ppc_r4, STACK + PPC_RET_ADDR_OFFSET, ppc_r1);
358 /* Arg 3: trampoline argument */
359 ppc_ldr (code, ppc_r5, GREGS_OFFSET, ppc_r1);
361 if (aot) {
362 code = mono_arch_emit_load_aotconst (buf, code, &ji, MONO_PATCH_INFO_JIT_ICALL_ADDR, GINT_TO_POINTER (mono_trampoline_type_to_jit_icall_id (tramp_type)));
363 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
364 ppc_ldptr (code, ppc_r2, sizeof (target_mgreg_t), ppc_r12);
365 ppc_ldptr (code, ppc_r12, 0, ppc_r12);
366 #endif
367 ppc_mtlr (code, ppc_r12);
368 ppc_blrl (code);
369 } else {
370 tramp_handler = mono_get_trampoline_func (tramp_type);
371 ppc_load_func (code, PPC_CALL_REG, tramp_handler);
372 ppc_mtlr (code, PPC_CALL_REG);
373 ppc_blrl (code);
376 /* OK, code address is now on r3, move it to r14 for now. */
377 if (!MONO_TRAMPOLINE_TYPE_MUST_RETURN (tramp_type)) {
378 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
379 ppc_ldptr (code, ppc_r2, sizeof (target_mgreg_t), ppc_r3);
380 ppc_ldptr (code, ppc_r3, 0, ppc_r3);
381 #endif
382 ppc_mr (code, ppc_r14, ppc_r3);
383 } else {
384 // TODO: is here function descriptor unpacking necessary?
385 /* we clobber r3 during interruption checking, so move it somewhere else */
386 ppc_mr (code, ppc_r14, ppc_r3);
390 * Now we restore the MonoLMF (see emit_epilogue in mini-ppc.c)
391 * and the rest of the registers, so the method called will see
392 * the same state as before we executed.
393 * The pointer to MonoLMF is in ppc_r12.
395 ppc_addi (code, ppc_r12, ppc_r1, STACK - sizeof (MonoLMF));
396 /* r3 = previous_lmf */
397 ppc_ldptr (code, ppc_r3, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r12);
398 /* r12 = lmf_addr */
399 ppc_ldptr (code, ppc_r12, G_STRUCT_OFFSET(MonoLMF, lmf_addr), ppc_r12);
400 /* *(lmf_addr) = previous_lmf */
401 ppc_stptr (code, ppc_r3, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r12);
403 /* thread interruption check */
404 if (aot) {
405 g_error ("Not implemented");
406 } else {
407 gconstpointer checkpoint = (gconstpointer)mono_thread_force_interruption_checkpoint_noraise;
408 ppc_load_func (code, PPC_CALL_REG, checkpoint);
409 ppc_mtlr (code, PPC_CALL_REG);
411 ppc_blrl (code);
413 ppc_compare_reg_imm (code, 0, ppc_r3, 0);
414 exception_branch = code;
415 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
417 /* exception case */
419 /* restore caller frame, as we want to throw from there */
420 ppc_ldr (code, ppc_r14, offset_r14, ppc_r1); /* unclobber r14 */
421 ppc_ldr (code, ppc_r1, 0, ppc_r1);
422 ppc_ldr (code, ppc_r12, PPC_RET_ADDR_OFFSET, ppc_r1);
423 ppc_mtlr (code, ppc_r12);
425 if (aot) {
426 g_error ("Not implemented");
427 } else {
428 ppc_load_func (code, PPC_CALL_REG, mono_get_rethrow_preserve_exception_addr ());
429 ppc_ldr (code, PPC_CALL_REG, 0, PPC_CALL_REG);
430 ppc_mtctr (code, PPC_CALL_REG);
432 ppc_bcctr (code, 20, 0);
433 ppc_break (code); /* never reached */
435 ppc_patch (exception_branch, code);
437 /* no exception case */
438 if (!MONO_TRAMPOLINE_TYPE_MUST_RETURN (tramp_type)) {
439 /* we don't do any calls anymore, so code address can be safely moved
440 * into counter register */
441 ppc_mtctr (code, ppc_r14);
442 } else {
443 ppc_mr (code, ppc_r3, ppc_r14);
446 ppc_addi (code, ppc_r12, ppc_r1, STACK - sizeof (MonoLMF));
447 /* restore iregs */
448 ppc_ldr_multiple (code, ppc_r13, G_STRUCT_OFFSET(MonoLMF, iregs), ppc_r12);
449 /* restore fregs */
450 for (i = 14; i < 32; i++)
451 ppc_lfd (code, i, G_STRUCT_OFFSET(MonoLMF, fregs) + ((i-14) * sizeof (gdouble)), ppc_r12);
453 /* restore the volatile registers, we skip r1, of course */
454 offset = STACK - sizeof (MonoLMF) - (14 * sizeof (double));
455 for (i = 0; i < 14; i++) {
456 ppc_lfd (code, i, offset, ppc_r1);
457 offset += sizeof (double);
459 offset = STACK - sizeof (MonoLMF) - (14 * sizeof (double)) - (31 * sizeof (target_mgreg_t));
460 ppc_ldr (code, ppc_r0, offset, ppc_r1);
461 offset += 2 * sizeof (target_mgreg_t);
462 for (i = 2; i < 13; i++) {
463 if (i != PPC_TOC_REG && (i != 3 || tramp_type != MONO_TRAMPOLINE_RGCTX_LAZY_FETCH))
464 ppc_ldr (code, i, offset, ppc_r1);
465 offset += sizeof (target_mgreg_t);
468 /* Non-standard function epilogue. Instead of doing a proper
469 * return, we just jump to the compiled code.
471 /* Restore stack pointer and LR and jump to the code */
472 ppc_ldr (code, ppc_r1, 0, ppc_r1);
473 ppc_ldr (code, ppc_r12, PPC_RET_ADDR_OFFSET, ppc_r1);
474 ppc_mtlr (code, ppc_r12);
475 if (MONO_TRAMPOLINE_TYPE_MUST_RETURN (tramp_type))
476 ppc_blr (code);
477 else
478 ppc_bcctr (code, 20, 0);
480 /* Flush instruction cache, since we've generated code */
481 mono_arch_flush_icache (buf, code - buf);
482 MONO_PROFILER_RAISE (jit_code_buffer, (buf, code - buf, MONO_PROFILER_CODE_BUFFER_HELPER, NULL));
484 /* Sanity check */
485 g_assert ((code - buf) <= size);
487 g_assert (info);
488 tramp_name = mono_get_generic_trampoline_name (tramp_type);
489 *info = mono_tramp_info_create (tramp_name, buf, code - buf, ji, unwind_ops);
491 return buf;
494 #define TRAMPOLINE_SIZE (MONO_PPC_32_64_CASE (24, (5+5+1+1)*4))
495 gpointer
496 mono_arch_create_specific_trampoline (gpointer arg1, MonoTrampolineType tramp_type, MonoDomain *domain, guint32 *code_len)
498 guint8 *code, *buf, *tramp;
499 guint32 short_branch;
501 tramp = mono_get_trampoline_code (tramp_type);
503 mono_domain_lock (domain);
504 code = buf = mono_domain_code_reserve_align (domain, TRAMPOLINE_SIZE, 4);
505 short_branch = branch_for_target_reachable (code + MONO_PPC_32_64_CASE (8, 5*4), tramp);
506 #ifdef __mono_ppc64__
507 /* FIXME: make shorter if possible */
508 #else
509 if (short_branch)
510 mono_domain_code_commit (domain, code, TRAMPOLINE_SIZE, 12);
511 #endif
512 mono_domain_unlock (domain);
514 if (short_branch) {
515 ppc_load_sequence (code, ppc_r0, (target_mgreg_t)(gsize) arg1);
516 ppc_emit32 (code, short_branch);
517 } else {
518 /* Prepare the jump to the generic trampoline code.*/
519 ppc_load_ptr (code, ppc_r0, tramp);
520 ppc_mtctr (code, ppc_r0);
522 /* And finally put 'arg1' in r0 and fly! */
523 ppc_load_ptr (code, ppc_r0, arg1);
524 ppc_bcctr (code, 20, 0);
527 /* Flush instruction cache, since we've generated code */
528 mono_arch_flush_icache (buf, code - buf);
529 MONO_PROFILER_RAISE (jit_code_buffer, (buf, code - buf, MONO_PROFILER_CODE_BUFFER_SPECIFIC_TRAMPOLINE, mono_get_generic_trampoline_simple_name (tramp_type)));
531 g_assert ((code - buf) <= TRAMPOLINE_SIZE);
533 if (code_len)
534 *code_len = code - buf;
536 return buf;
539 static guint8*
540 emit_trampoline_jump (guint8 *code, guint8 *tramp)
542 guint32 short_branch = branch_for_target_reachable (code, tramp);
544 /* FIXME: we can save a few bytes here by committing if the
545 short branch is possible */
546 if (short_branch) {
547 ppc_emit32 (code, short_branch);
548 } else {
549 ppc_load_ptr (code, ppc_r0, tramp);
550 ppc_mtctr (code, ppc_r0);
551 ppc_bcctr (code, 20, 0);
554 return code;
557 gpointer
558 mono_arch_create_rgctx_lazy_fetch_trampoline (guint32 slot, MonoTrampInfo **info, gboolean aot)
560 guint8 *tramp;
561 guint8 *code, *buf;
562 guint8 **rgctx_null_jumps;
563 int tramp_size;
564 int depth, index;
565 int i;
566 gboolean mrgctx;
567 MonoJumpInfo *ji = NULL;
568 GSList *unwind_ops = NULL;
570 mrgctx = MONO_RGCTX_SLOT_IS_MRGCTX (slot);
571 index = MONO_RGCTX_SLOT_INDEX (slot);
572 if (mrgctx)
573 index += MONO_SIZEOF_METHOD_RUNTIME_GENERIC_CONTEXT / sizeof (target_mgreg_t);
574 for (depth = 0; ; ++depth) {
575 int size = mono_class_rgctx_get_array_size (depth, mrgctx);
577 if (index < size - 1)
578 break;
579 index -= size - 1;
582 tramp_size = MONO_PPC_32_64_CASE (40, 52) + 12 * depth;
583 if (mrgctx)
584 tramp_size += 4;
585 else
586 tramp_size += 12;
587 if (aot)
588 tramp_size += 32;
590 code = buf = mono_global_codeman_reserve (tramp_size);
592 rgctx_null_jumps = g_malloc (sizeof (guint8*) * (depth + 2));
594 if (mrgctx) {
595 /* get mrgctx ptr */
596 ppc_mr (code, ppc_r4, PPC_FIRST_ARG_REG);
597 } else {
598 /* load rgctx ptr from vtable */
599 ppc_ldptr (code, ppc_r4, MONO_STRUCT_OFFSET (MonoVTable, runtime_generic_context), PPC_FIRST_ARG_REG);
600 /* is the rgctx ptr null? */
601 ppc_compare_reg_imm (code, 0, ppc_r4, 0);
602 /* if yes, jump to actual trampoline */
603 rgctx_null_jumps [0] = code;
604 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
607 for (i = 0; i < depth; ++i) {
608 /* load ptr to next array */
609 if (mrgctx && i == 0)
610 ppc_ldptr (code, ppc_r4, MONO_SIZEOF_METHOD_RUNTIME_GENERIC_CONTEXT, ppc_r4);
611 else
612 ppc_ldptr (code, ppc_r4, 0, ppc_r4);
613 /* is the ptr null? */
614 ppc_compare_reg_imm (code, 0, ppc_r4, 0);
615 /* if yes, jump to actual trampoline */
616 rgctx_null_jumps [i + 1] = code;
617 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
620 /* fetch slot */
621 ppc_ldptr (code, ppc_r4, sizeof (target_mgreg_t) * (index + 1), ppc_r4);
622 /* is the slot null? */
623 ppc_compare_reg_imm (code, 0, ppc_r4, 0);
624 /* if yes, jump to actual trampoline */
625 rgctx_null_jumps [depth + 1] = code;
626 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
627 /* otherwise return r4 */
628 /* FIXME: if we use r3 as the work register we can avoid this copy */
629 ppc_mr (code, ppc_r3, ppc_r4);
630 ppc_blr (code);
632 for (i = mrgctx ? 1 : 0; i <= depth + 1; ++i)
633 ppc_patch (rgctx_null_jumps [i], code);
635 g_free (rgctx_null_jumps);
637 /* move the rgctx pointer to the VTABLE register */
638 ppc_mr (code, MONO_ARCH_VTABLE_REG, ppc_r3);
640 if (aot) {
641 code = mono_arch_emit_load_aotconst (buf, code, &ji, MONO_PATCH_INFO_SPECIFIC_TRAMPOLINE_LAZY_FETCH_ADDR, GUINT_TO_POINTER (slot));
642 /* Branch to the trampoline */
643 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
644 ppc_ldptr (code, ppc_r12, 0, ppc_r12);
645 #endif
646 ppc_mtctr (code, ppc_r12);
647 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
648 } else {
649 tramp = (guint8*)mono_arch_create_specific_trampoline (GUINT_TO_POINTER (slot),
650 MONO_TRAMPOLINE_RGCTX_LAZY_FETCH, mono_get_root_domain (), NULL);
652 /* jump to the actual trampoline */
653 code = emit_trampoline_jump (code, tramp);
656 mono_arch_flush_icache (buf, code - buf);
657 MONO_PROFILER_RAISE (jit_code_buffer, (buf, code - buf, MONO_PROFILER_CODE_BUFFER_GENERICS_TRAMPOLINE, NULL));
659 g_assert (code - buf <= tramp_size);
661 char *name = mono_get_rgctx_fetch_trampoline_name (slot);
662 *info = mono_tramp_info_create (name, buf, code - buf, ji, unwind_ops);
663 g_free (name);
665 return buf;
668 guint8*
669 mono_arch_get_call_target (guint8 *code)
671 /* Should be a bl */
672 guint32 ins = ((guint32*)code) [-1];
674 if ((ins >> 26 == 18) && ((ins & 1) == 1) && ((ins & 2) == 0)) {
675 gint32 disp = (((gint32)ins) >> 2) & 0xffffff;
676 guint8 *target = code - 4 + (disp * 4);
678 return target;
679 } else {
680 return NULL;
684 guint32
685 mono_arch_get_plt_info_offset (guint8 *plt_entry, host_mgreg_t *regs, guint8 *code)
687 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
688 return ((guint32*)plt_entry) [8];
689 #else
690 return ((guint32*)plt_entry) [6];
691 #endif