2009-01-07 Zoltan Varga <vargaz@gmail.com>
[mono-project.git] / mono / mini / tramp-ppc.c
blob29773ca7a47ef0cf6766b9327ef6953d75fe17a4
1 /*
2 * tramp-ppc.c: JIT trampoline code for PowerPC
4 * Authors:
5 * Dietmar Maurer (dietmar@ximian.com)
6 * Paolo Molaro (lupus@ximian.com)
7 * Carlos Valiente <yo@virutass.net>
9 * (C) 2001 Ximian, Inc.
12 #include <config.h>
13 #include <glib.h>
15 #include <mono/metadata/appdomain.h>
16 #include <mono/metadata/marshal.h>
17 #include <mono/metadata/tabledefs.h>
18 #include <mono/arch/ppc/ppc-codegen.h>
20 #include "mini.h"
21 #include "mini-ppc.h"
24 * Return the instruction to jump from code to target, 0 if not
25 * reachable with a single instruction
27 static guint32
28 branch_for_target_reachable (guint8 *branch, guint8 *target)
30 gint diff = target - branch;
31 g_assert ((diff & 3) == 0);
32 if (diff >= 0) {
33 if (diff <= 33554431)
34 return (18 << 26) | (diff);
35 } else {
36 /* diff between 0 and -33554432 */
37 if (diff >= -33554432)
38 return (18 << 26) | (diff & ~0xfc000000);
40 return 0;
44 * get_unbox_trampoline:
45 * @gsctx: the generic sharing context
46 * @m: method pointer
47 * @addr: pointer to native code for @m
49 * when value type methods are called through the vtable we need to unbox the
50 * this argument. This method returns a pointer to a trampoline which does
51 * unboxing before calling the method
53 gpointer
54 mono_arch_get_unbox_trampoline (MonoGenericSharingContext *gsctx, MonoMethod *m, gpointer addr)
56 guint8 *code, *start;
57 int this_pos = 3;
58 guint32 short_branch;
59 MonoDomain *domain = mono_domain_get ();
61 if (MONO_TYPE_ISSTRUCT (mono_method_signature (m)->ret))
62 this_pos = 4;
64 mono_domain_lock (domain);
65 start = code = mono_code_manager_reserve (domain->code_mp, 20);
66 short_branch = branch_for_target_reachable (code + 4, addr);
67 if (short_branch)
68 mono_code_manager_commit (domain->code_mp, code, 20, 8);
69 mono_domain_unlock (domain);
71 if (short_branch) {
72 ppc_addi (code, this_pos, this_pos, sizeof (MonoObject));
73 ppc_emit32 (code, short_branch);
74 } else {
75 ppc_load (code, ppc_r0, addr);
76 ppc_mtctr (code, ppc_r0);
77 ppc_addi (code, this_pos, this_pos, sizeof (MonoObject));
78 ppc_bcctr (code, 20, 0);
80 mono_arch_flush_icache (start, code - start);
81 g_assert ((code - start) <= 20);
82 /*g_print ("unbox trampoline at %d for %s:%s\n", this_pos, m->klass->name, m->name);
83 g_print ("unbox code is at %p for method at %p\n", start, addr);*/
85 return start;
88 void
89 mono_arch_patch_callsite (guint8 *method_start, guint8 *code_ptr, guint8 *addr)
91 guint32 *code = (guint32*)code_ptr;
92 /* This is the 'blrl' instruction */
93 --code;
96 * Note that methods are called also with the bl opcode.
98 if (((*code) >> 26) == 18) {
99 /*g_print ("direct patching\n");*/
100 ppc_patch ((guint8*)code, addr);
101 mono_arch_flush_icache ((guint8*)code, 4);
102 return;
105 /* Sanity check: instruction must be 'blrl' */
106 g_assert(*code == 0x4e800021);
108 /* the thunk-less direct call sequence: lis/ori/mtlr/blrl */
109 if ((code [-1] >> 26) == 31 && (code [-2] >> 26) == 24 && (code [-3] >> 26) == 15) {
110 ppc_patch ((guint8*)code, addr);
111 return;
113 g_assert_not_reached ();
116 void
117 mono_arch_patch_plt_entry (guint8 *code, guint8 *addr)
119 g_assert_not_reached ();
122 void
123 mono_arch_nullify_class_init_trampoline (guint8 *code, gssize *regs)
125 return;
128 void
129 mono_arch_nullify_plt_entry (guint8 *code)
131 g_assert_not_reached ();
134 /* Stack size for trampoline function
135 * PPC_MINIMAL_STACK_SIZE + 16 (args + alignment to ppc_magic_trampoline)
136 * + MonoLMF + 14 fp regs + 13 gregs + alignment
137 * #define STACK (PPC_MINIMAL_STACK_SIZE + 4 * sizeof (gulong) + sizeof (MonoLMF) + 14 * sizeof (double) + 13 * (sizeof (gulong)))
138 * STACK would be 444 for 32 bit darwin
140 #define STACK (448)
142 /* Method-specific trampoline code fragment size */
143 #define METHOD_TRAMPOLINE_SIZE 64
145 /* Jump-specific trampoline code fragment size */
146 #define JUMP_TRAMPOLINE_SIZE 64
149 * Stack frame description when the generic trampoline is called.
150 * caller frame
151 * --------------------
152 * MonoLMF
153 * -------------------
154 * Saved FP registers 0-13
155 * -------------------
156 * Saved general registers 0-12
157 * -------------------
158 * param area for 3 args to ppc_magic_trampoline
159 * -------------------
160 * linkage area
161 * -------------------
163 guchar*
164 mono_arch_create_trampoline_code (MonoTrampolineType tramp_type)
166 guint8 *buf, *code = NULL;
167 int i, offset;
168 gconstpointer tramp_handler;
170 /* Now we'll create in 'buf' the PowerPC trampoline code. This
171 is the trampoline code common to all methods */
173 code = buf = mono_global_codeman_reserve (512);
175 ppc_stwu (buf, ppc_r1, -STACK, ppc_r1);
177 /* start building the MonoLMF on the stack */
178 offset = STACK - sizeof (double) * MONO_SAVED_FREGS;
179 for (i = 14; i < 32; i++) {
180 ppc_stfd (buf, i, offset, ppc_r1);
181 offset += sizeof (double);
184 * now the integer registers.
186 offset = STACK - sizeof (MonoLMF) + G_STRUCT_OFFSET (MonoLMF, iregs);
187 ppc_stmw (buf, ppc_r13, ppc_r1, offset);
189 /* Now save the rest of the registers below the MonoLMF struct, first 14
190 * fp regs and then the 13 gregs.
192 offset = STACK - sizeof (MonoLMF) - (14 * sizeof (double));
193 for (i = 0; i < 14; i++) {
194 ppc_stfd (buf, i, offset, ppc_r1);
195 offset += sizeof (double);
197 #define GREGS_OFFSET (STACK - sizeof (MonoLMF) - (14 * sizeof (double)) - (13 * sizeof (gulong)))
198 offset = GREGS_OFFSET;
199 for (i = 0; i < 13; i++) {
200 ppc_stw (buf, i, offset, ppc_r1);
201 offset += sizeof (gulong);
203 /* we got here through a jump to the ctr reg, we must save the lr
204 * in the parent frame (we do it here to reduce the size of the
205 * method-specific trampoline)
207 ppc_mflr (buf, ppc_r0);
208 ppc_stw (buf, ppc_r0, STACK + PPC_RET_ADDR_OFFSET, ppc_r1);
210 /* ok, now we can continue with the MonoLMF setup, mostly untouched
211 * from emit_prolog in mini-ppc.c
213 ppc_load (buf, ppc_r0, mono_get_lmf_addr);
214 ppc_mtlr (buf, ppc_r0);
215 ppc_blrl (buf);
216 /* we build the MonoLMF structure on the stack - see mini-ppc.h
217 * The pointer to the struct is put in ppc_r11.
219 ppc_addi (buf, ppc_r11, ppc_sp, STACK - sizeof (MonoLMF));
220 ppc_stw (buf, ppc_r3, G_STRUCT_OFFSET(MonoLMF, lmf_addr), ppc_r11);
221 /* new_lmf->previous_lmf = *lmf_addr */
222 ppc_lwz (buf, ppc_r0, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r3);
223 ppc_stw (buf, ppc_r0, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r11);
224 /* *(lmf_addr) = r11 */
225 ppc_stw (buf, ppc_r11, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r3);
226 /* save method info (it's stored on the stack, so get it first and put it
227 * in r5 as it's the third argument to the function)
229 if (tramp_type == MONO_TRAMPOLINE_GENERIC_CLASS_INIT)
230 ppc_lwz (buf, ppc_r5, GREGS_OFFSET + PPC_FIRST_ARG_REG * sizeof (gpointer), ppc_r1);
231 else
232 ppc_lwz (buf, ppc_r5, GREGS_OFFSET, ppc_r1);
233 if ((tramp_type == MONO_TRAMPOLINE_JIT) || (tramp_type == MONO_TRAMPOLINE_JUMP))
234 ppc_stw (buf, ppc_r5, G_STRUCT_OFFSET(MonoLMF, method), ppc_r11);
235 ppc_stw (buf, ppc_sp, G_STRUCT_OFFSET(MonoLMF, ebp), ppc_r11);
236 /* save the IP (caller ip) */
237 if (tramp_type == MONO_TRAMPOLINE_JUMP) {
238 ppc_li (buf, ppc_r0, 0);
239 } else {
240 ppc_lwz (buf, ppc_r0, STACK + PPC_RET_ADDR_OFFSET, ppc_r1);
242 ppc_stw (buf, ppc_r0, G_STRUCT_OFFSET(MonoLMF, eip), ppc_r11);
245 * Now we're ready to call trampoline (gssize *regs, guint8 *code, gpointer value, guint8 *tramp)
246 * Note that the last argument is unused.
248 /* Arg 1: a pointer to the registers */
249 ppc_addi (buf, ppc_r3, ppc_r1, GREGS_OFFSET);
251 /* Arg 2: code (next address to the instruction that called us) */
252 if (tramp_type == MONO_TRAMPOLINE_JUMP) {
253 ppc_li (buf, ppc_r4, 0);
254 } else {
255 ppc_lwz (buf, ppc_r4, STACK + PPC_RET_ADDR_OFFSET, ppc_r1);
258 /* Arg 3: MonoMethod *method. It was put in r5 already above */
259 /*ppc_mr (buf, ppc_r5, ppc_r5);*/
261 tramp_handler = mono_get_trampoline_func (tramp_type);
262 ppc_lis (buf, ppc_r0, (guint32) tramp_handler >> 16);
263 ppc_ori (buf, ppc_r0, ppc_r0, (guint32) tramp_handler & 0xffff);
264 ppc_mtlr (buf, ppc_r0);
265 ppc_blrl (buf);
267 /* OK, code address is now on r3. Move it to the counter reg
268 * so it will be ready for the final jump: this is safe since we
269 * won't do any more calls.
271 if (tramp_type != MONO_TRAMPOLINE_RGCTX_LAZY_FETCH)
272 ppc_mtctr (buf, ppc_r3);
275 * Now we restore the MonoLMF (see emit_epilogue in mini-ppc.c)
276 * and the rest of the registers, so the method called will see
277 * the same state as before we executed.
278 * The pointer to MonoLMF is in ppc_r11.
280 ppc_addi (buf, ppc_r11, ppc_r1, STACK - sizeof (MonoLMF));
281 /* r5 = previous_lmf */
282 ppc_lwz (buf, ppc_r5, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r11);
283 /* r6 = lmf_addr */
284 ppc_lwz (buf, ppc_r6, G_STRUCT_OFFSET(MonoLMF, lmf_addr), ppc_r11);
285 /* *(lmf_addr) = previous_lmf */
286 ppc_stw (buf, ppc_r5, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r6);
287 /* restore iregs */
288 ppc_lmw (buf, ppc_r13, ppc_r11, G_STRUCT_OFFSET(MonoLMF, iregs));
289 /* restore fregs */
290 for (i = 14; i < 32; i++) {
291 ppc_lfd (buf, i, G_STRUCT_OFFSET(MonoLMF, fregs) + ((i-14) * sizeof (gdouble)), ppc_r11);
294 /* restore the volatile registers, we skip r1, of course */
295 offset = STACK - sizeof (MonoLMF) - (14 * sizeof (double));
296 for (i = 0; i < 14; i++) {
297 ppc_lfd (buf, i, offset, ppc_r1);
298 offset += sizeof (double);
300 offset = STACK - sizeof (MonoLMF) - (14 * sizeof (double)) - (13 * sizeof (gulong));
301 ppc_lwz (buf, ppc_r0, offset, ppc_r1);
302 offset += 2 * sizeof (gulong);
303 for (i = 2; i < 13; i++) {
304 if (i != 3 || tramp_type != MONO_TRAMPOLINE_RGCTX_LAZY_FETCH)
305 ppc_lwz (buf, i, offset, ppc_r1);
306 offset += sizeof (gulong);
309 /* Non-standard function epilogue. Instead of doing a proper
310 * return, we just jump to the compiled code.
312 /* Restore stack pointer and LR and jump to the code */
313 ppc_lwz (buf, ppc_r1, 0, ppc_r1);
314 ppc_lwz (buf, ppc_r11, PPC_RET_ADDR_OFFSET, ppc_r1);
315 ppc_mtlr (buf, ppc_r11);
316 if (tramp_type == MONO_TRAMPOLINE_CLASS_INIT ||
317 tramp_type == MONO_TRAMPOLINE_GENERIC_CLASS_INIT ||
318 tramp_type == MONO_TRAMPOLINE_RGCTX_LAZY_FETCH) {
319 ppc_blr (buf);
320 } else {
321 ppc_bcctr (buf, 20, 0);
324 /* Flush instruction cache, since we've generated code */
325 mono_arch_flush_icache (code, buf - code);
327 /* Sanity check */
328 g_assert ((buf - code) <= 512);
330 return code;
333 #define TRAMPOLINE_SIZE 24
334 gpointer
335 mono_arch_create_specific_trampoline (gpointer arg1, MonoTrampolineType tramp_type, MonoDomain *domain, guint32 *code_len)
337 guint8 *code, *buf, *tramp;
338 guint32 short_branch;
340 tramp = mono_get_trampoline_code (tramp_type);
342 mono_domain_lock (domain);
343 code = buf = mono_code_manager_reserve_align (domain->code_mp, TRAMPOLINE_SIZE, 4);
344 short_branch = branch_for_target_reachable (code + 8, tramp);
345 if (short_branch)
346 mono_code_manager_commit (domain->code_mp, code, TRAMPOLINE_SIZE, 12);
347 mono_domain_unlock (domain);
349 if (short_branch) {
350 ppc_lis (buf, ppc_r0, (guint32) arg1 >> 16);
351 ppc_ori (buf, ppc_r0, ppc_r0, (guint32) arg1 & 0xffff);
352 ppc_emit32 (buf, short_branch);
353 } else {
354 /* Prepare the jump to the generic trampoline code.*/
355 ppc_lis (buf, ppc_r0, (guint32) tramp >> 16);
356 ppc_ori (buf, ppc_r0, ppc_r0, (guint32) tramp & 0xffff);
357 ppc_mtctr (buf, ppc_r0);
359 /* And finally put 'arg1' in r0 and fly! */
360 ppc_lis (buf, ppc_r0, (guint32) arg1 >> 16);
361 ppc_ori (buf, ppc_r0, ppc_r0, (guint32) arg1 & 0xffff);
362 ppc_bcctr (buf, 20, 0);
365 /* Flush instruction cache, since we've generated code */
366 mono_arch_flush_icache (code, buf - code);
368 g_assert ((buf - code) <= TRAMPOLINE_SIZE);
369 if (code_len)
370 *code_len = buf - code;
372 return code;
375 static guint8*
376 emit_trampoline_jump (guint8 *code, guint8 *tramp)
378 guint32 short_branch = branch_for_target_reachable (code, tramp);
380 /* FIXME: we can save a few bytes here by committing if the
381 short branch is possible */
382 if (short_branch) {
383 ppc_emit32 (code, short_branch);
384 } else {
385 ppc_load (code, ppc_r0, tramp);
386 ppc_mtctr (code, ppc_r0);
387 ppc_bcctr (code, 20, 0);
390 return code;
393 gpointer
394 mono_arch_create_rgctx_lazy_fetch_trampoline (guint32 slot)
396 guint8 *tramp;
397 guint8 *code, *buf;
398 guint8 **rgctx_null_jumps;
399 int tramp_size;
400 int depth, index;
401 int i;
402 gboolean mrgctx;
404 mrgctx = MONO_RGCTX_SLOT_IS_MRGCTX (slot);
405 index = MONO_RGCTX_SLOT_INDEX (slot);
406 if (mrgctx)
407 index += sizeof (MonoMethodRuntimeGenericContext) / sizeof (gpointer);
408 for (depth = 0; ; ++depth) {
409 int size = mono_class_rgctx_get_array_size (depth, mrgctx);
411 if (index < size - 1)
412 break;
413 index -= size - 1;
416 tramp_size = 40 + 12 * depth;
417 if (mrgctx)
418 tramp_size += 4;
419 else
420 tramp_size += 12;
422 code = buf = mono_global_codeman_reserve (tramp_size);
424 rgctx_null_jumps = g_malloc (sizeof (guint8*) * (depth + 2));
426 if (mrgctx) {
427 /* get mrgctx ptr */
428 ppc_mr (code, ppc_r4, PPC_FIRST_ARG_REG);
429 } else {
430 /* load rgctx ptr from vtable */
431 ppc_lwz (code, ppc_r4, G_STRUCT_OFFSET (MonoVTable, runtime_generic_context), PPC_FIRST_ARG_REG);
432 /* is the rgctx ptr null? */
433 ppc_cmpi (code, 0, 0, ppc_r4, 0);
434 /* if yes, jump to actual trampoline */
435 rgctx_null_jumps [0] = code;
436 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
439 for (i = 0; i < depth; ++i) {
440 /* load ptr to next array */
441 if (mrgctx && i == 0)
442 ppc_lwz (code, ppc_r4, sizeof (MonoMethodRuntimeGenericContext), ppc_r4);
443 else
444 ppc_lwz (code, ppc_r4, 0, ppc_r4);
445 /* is the ptr null? */
446 ppc_cmpi (code, 0, 0, ppc_r4, 0);
447 /* if yes, jump to actual trampoline */
448 rgctx_null_jumps [i + 1] = code;
449 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
452 /* fetch slot */
453 ppc_lwz (code, ppc_r4, sizeof (gpointer) * (index + 1), ppc_r4);
454 /* is the slot null? */
455 ppc_cmpi (code, 0, 0, ppc_r4, 0);
456 /* if yes, jump to actual trampoline */
457 rgctx_null_jumps [depth + 1] = code;
458 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
459 /* otherwise return r4 */
460 /* FIXME: if we use r3 as the work register we can avoid this copy */
461 ppc_mr (code, ppc_r3, ppc_r4);
462 ppc_blr (code);
464 for (i = mrgctx ? 1 : 0; i <= depth + 1; ++i)
465 ppc_patch (rgctx_null_jumps [i], code);
467 g_free (rgctx_null_jumps);
469 /* move the rgctx pointer to the VTABLE register */
470 ppc_mr (code, MONO_ARCH_VTABLE_REG, ppc_r3);
472 tramp = mono_arch_create_specific_trampoline (GUINT_TO_POINTER (slot),
473 MONO_TRAMPOLINE_RGCTX_LAZY_FETCH, mono_get_root_domain (), NULL);
475 /* jump to the actual trampoline */
476 code = emit_trampoline_jump (code, tramp);
478 mono_arch_flush_icache (buf, code - buf);
480 g_assert (code - buf <= tramp_size);
482 return buf;
485 gpointer
486 mono_arch_create_generic_class_init_trampoline (void)
488 guint8 *tramp;
489 guint8 *code, *buf;
490 static int byte_offset = -1;
491 static guint8 bitmask;
492 guint8 *jump;
493 int tramp_size;
495 tramp_size = 32;
497 code = buf = mono_global_codeman_reserve (tramp_size);
499 if (byte_offset < 0)
500 mono_marshal_find_bitfield_offset (MonoVTable, initialized, &byte_offset, &bitmask);
502 ppc_lbz (code, ppc_r4, byte_offset, PPC_FIRST_ARG_REG);
503 ppc_andid (code, ppc_r4, ppc_r4, bitmask);
504 jump = code;
505 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
507 ppc_blr (code);
509 ppc_patch (jump, code);
511 tramp = mono_arch_create_specific_trampoline (NULL, MONO_TRAMPOLINE_GENERIC_CLASS_INIT,
512 mono_get_root_domain (), NULL);
514 /* jump to the actual trampoline */
515 code = emit_trampoline_jump (code, tramp);
517 mono_arch_flush_icache (buf, code - buf);
519 g_assert (code - buf <= tramp_size);
521 return buf;