[interp] Fix interp logging (#17636)
[mono-project.git] / mono / mini / tramp-arm-gsharedvt.c
blobbf7ccdbdca1258d62deaa7e57e89e64c3f527918
1 /**
2 * \file
3 * gsharedvt support code for arm
5 * Authors:
6 * Zoltan Varga <vargaz@gmail.com>
8 * Copyright 2013 Xamarin, Inc (http://www.xamarin.com)
9 * Licensed under the MIT license. See LICENSE file in the project root for full license information.
11 #include <config.h>
12 #include <glib.h>
14 #include <mono/metadata/abi-details.h>
15 #include <mono/metadata/appdomain.h>
16 #include <mono/metadata/marshal.h>
17 #include <mono/metadata/tabledefs.h>
18 #include <mono/metadata/profiler-private.h>
19 #include <mono/arch/arm/arm-codegen.h>
20 #include <mono/arch/arm/arm-vfp-codegen.h>
22 #include "mini.h"
23 #include "mini-arm.h"
24 #include "mini-runtime.h"
26 #ifdef MONO_ARCH_GSHAREDVT_SUPPORTED
28 static guint8*
29 emit_bx (guint8* code, int reg)
31 if (mono_arm_thumb_supported ())
32 ARM_BX (code, reg);
33 else
34 ARM_MOV_REG_REG (code, ARMREG_PC, reg);
35 return code;
38 gpointer
39 mono_arm_start_gsharedvt_call (GSharedVtCallInfo *info, gpointer *caller, gpointer *callee, gpointer mrgctx_reg,
40 double *caller_fregs, double *callee_fregs)
42 int i;
45 * The caller/callee regs are mapped to slot 0..3, stack slot 0 is mapped to slot 4, etc.
48 /* Set vtype ret arg */
49 if (info->vret_slot != -1) {
50 callee [info->vret_arg_reg] = &callee [info->vret_slot];
53 for (i = 0; i < info->map_count; ++i) {
54 int src = info->map [i * 2];
55 int dst = info->map [(i * 2) + 1];
56 int arg_marshal = (src >> 24) & 0xff;
58 switch (arg_marshal) {
59 case GSHAREDVT_ARG_NONE:
60 callee [dst] = caller [src];
61 break;
62 case GSHAREDVT_ARG_BYVAL_TO_BYREF:
63 /* gsharedvt argument passed by addr in reg/stack slot */
64 src = src & 0xffff;
65 callee [dst] = caller + src;
66 break;
67 case GSHAREDVT_ARG_BYREF_TO_BYVAL: {
68 /* gsharedvt argument passed by value */
69 int nslots = (src >> 8) & 0xff;
70 int src_slot = src & 0xff;
71 int j;
72 gpointer *addr = (gpointer*)caller [src_slot];
74 for (j = 0; j < nslots; ++j)
75 callee [dst + j] = addr [j];
76 break;
78 case GSHAREDVT_ARG_BYREF_TO_BYVAL_I1: {
79 int src_slot = src & 0xff;
80 gpointer *addr = (gpointer*)caller [src_slot];
82 callee [dst] = GINT_TO_POINTER ((int)*(gint8*)addr);
83 break;
85 case GSHAREDVT_ARG_BYREF_TO_BYVAL_I2: {
86 int src_slot = src & 0xff;
87 gpointer *addr = (gpointer*)caller [src_slot];
89 callee [dst] = GINT_TO_POINTER ((int)*(gint16*)addr);
90 break;
92 case GSHAREDVT_ARG_BYREF_TO_BYVAL_U1: {
93 int src_slot = src & 0xff;
94 gpointer *addr = (gpointer*)caller [src_slot];
96 callee [dst] = GUINT_TO_POINTER ((guint)*(guint8*)addr);
97 break;
99 case GSHAREDVT_ARG_BYREF_TO_BYVAL_U2: {
100 int src_slot = src & 0xff;
101 gpointer *addr = (gpointer*)caller [src_slot];
103 callee [dst] = GUINT_TO_POINTER ((guint)*(guint16*)addr);
104 break;
106 default:
107 g_assert_not_reached ();
108 break;
112 /* The slot based approach above is very complicated, use a nested switch instead for fp regs */
113 // FIXME: Use this for the other cases as well
114 if (info->have_fregs) {
115 CallInfo *caller_cinfo = info->caller_cinfo;
116 CallInfo *callee_cinfo = info->callee_cinfo;
117 int aindex;
119 for (aindex = 0; aindex < caller_cinfo->nargs; ++aindex) {
120 ArgInfo *ainfo = &caller_cinfo->args [aindex];
121 ArgInfo *ainfo2 = &callee_cinfo->args [aindex];
123 switch (ainfo->storage) {
124 case RegTypeFP: {
125 switch (ainfo2->storage) {
126 case RegTypeFP:
127 callee_fregs [ainfo2->reg / 2] = caller_fregs [ainfo->reg / 2];
128 break;
129 case RegTypeGSharedVtInReg:
130 callee [ainfo2->reg] = &caller_fregs [ainfo->reg / 2];
131 break;
132 case RegTypeGSharedVtOnStack: {
133 int sslot = ainfo2->offset / 4;
134 callee [sslot + 4] = &caller_fregs [ainfo->reg / 2];
135 break;
137 default:
138 g_assert_not_reached ();
139 break;
141 break;
143 case RegTypeGSharedVtInReg: {
144 switch (ainfo2->storage) {
145 case RegTypeFP: {
146 callee_fregs [ainfo2->reg / 2] = *(double*)caller [ainfo->reg];
147 break;
149 default:
150 break;
152 break;
154 case RegTypeGSharedVtOnStack: {
155 switch (ainfo2->storage) {
156 case RegTypeFP: {
157 int sslot = ainfo->offset / 4;
158 callee_fregs [ainfo2->reg / 2] = *(double*)caller [sslot + 4];
159 break;
161 default:
162 break;
164 break;
166 default:
167 break;
172 if (info->vcall_offset != -1) {
173 MonoObject *this_obj = (MonoObject*)caller [0];
175 if (G_UNLIKELY (!this_obj))
176 return NULL;
177 if (info->vcall_offset == MONO_GSHAREDVT_DEL_INVOKE_VT_OFFSET)
178 /* delegate invoke */
179 return ((MonoDelegate*)this_obj)->invoke_impl;
180 else
181 return *(gpointer*)((char*)this_obj->vtable + info->vcall_offset);
182 } else if (info->calli) {
183 /* The address to call is passed in the mrgctx reg */
184 return mrgctx_reg;
185 } else {
186 return info->addr;
190 #ifndef DISABLE_JIT
192 gpointer
193 mono_arch_get_gsharedvt_trampoline (MonoTrampInfo **info, gboolean aot)
195 guint8 *code, *buf;
196 int buf_len, cfa_offset;
197 GSList *unwind_ops = NULL;
198 MonoJumpInfo *ji = NULL;
199 guint8 *br_out, *br [16], *br_ret [16];
200 int i, offset, arg_reg, npushed, info_offset, mrgctx_offset;
201 int caller_reg_area_offset, caller_freg_area_offset, callee_reg_area_offset, callee_freg_area_offset;
202 int lr_offset, fp, br_ret_index, args_size;
204 buf_len = 784;
205 buf = code = mono_global_codeman_reserve (buf_len);
207 arg_reg = ARMREG_R0;
208 /* Registers pushed by the arg trampoline */
209 npushed = 4;
211 // ios abi compatible frame
212 fp = ARMREG_R7;
213 cfa_offset = npushed * TARGET_SIZEOF_VOID_P;
214 mono_add_unwind_op_def_cfa (unwind_ops, code, buf, ARMREG_SP, cfa_offset);
215 ARM_PUSH (code, (1 << fp) | (1 << ARMREG_LR));
216 cfa_offset += 2 * TARGET_SIZEOF_VOID_P;
217 mono_add_unwind_op_def_cfa_offset (unwind_ops, code, buf, cfa_offset);
218 mono_add_unwind_op_offset (unwind_ops, code, buf, fp, (- cfa_offset));
219 mono_add_unwind_op_offset (unwind_ops, code, buf, ARMREG_LR, ((- cfa_offset) + 4));
220 ARM_MOV_REG_REG (code, fp, ARMREG_SP);
221 mono_add_unwind_op_def_cfa_reg (unwind_ops, code, buf, fp);
222 /* Allocate stack frame */
223 ARM_SUB_REG_IMM8 (code, ARMREG_SP, ARMREG_SP, 32 + (16 * sizeof (double)));
224 if (MONO_ARCH_FRAME_ALIGNMENT > 8)
225 ARM_SUB_REG_IMM8 (code, ARMREG_SP, ARMREG_SP, (MONO_ARCH_FRAME_ALIGNMENT - 8));
226 offset = 4;
227 info_offset = -offset;
228 offset += 4;
229 mrgctx_offset = -offset;
230 offset += 4 * 4;
231 callee_reg_area_offset = -offset;
232 offset += 8 * 8;
233 caller_freg_area_offset = -offset;
234 offset += 8 * 8;
235 callee_freg_area_offset = -offset;
237 caller_reg_area_offset = cfa_offset - (npushed * TARGET_SIZEOF_VOID_P);
238 lr_offset = 4;
239 /* Save info struct which is in r0 */
240 ARM_STR_IMM (code, arg_reg, fp, info_offset);
241 /* Save rgctx reg */
242 ARM_STR_IMM (code, MONO_ARCH_RGCTX_REG, fp, mrgctx_offset);
243 /* Allocate callee area */
244 ARM_LDR_IMM (code, ARMREG_IP, arg_reg, MONO_STRUCT_OFFSET (GSharedVtCallInfo, stack_usage));
245 ARM_SUB_REG_REG (code, ARMREG_SP, ARMREG_SP, ARMREG_IP);
246 /* Allocate callee register area just below the callee area so the slots are correct */
247 ARM_SUB_REG_IMM8 (code, ARMREG_SP, ARMREG_SP, 4 * TARGET_SIZEOF_VOID_P);
248 if (mono_arm_is_hard_float ()) {
249 /* Save caller fregs */
250 ARM_SUB_REG_IMM8 (code, ARMREG_IP, fp, -caller_freg_area_offset);
251 for (i = 0; i < 8; ++i)
252 ARM_FSTD (code, i * 2, ARMREG_IP, (i * sizeof (double)));
256 * The stack now looks like this:
257 * <caller frame>
258 * <saved r0-r3, lr>
259 * <saved fp> <- fp
260 * <our frame>
261 * <callee area> <- sp
263 g_assert (mono_arm_thumb_supported ());
265 /* Call start_gsharedvt_call () */
266 /* 6 arguments, needs 2 stack slot, need to clean it up after the call */
267 args_size = 2 * TARGET_SIZEOF_VOID_P;
268 ARM_SUB_REG_IMM8 (code, ARMREG_SP, ARMREG_SP, args_size);
269 /* arg1 == info */
270 ARM_LDR_IMM (code, ARMREG_R0, fp, info_offset);
271 /* arg2 == caller stack area */
272 ARM_ADD_REG_IMM8 (code, ARMREG_R1, fp, cfa_offset - 4 * TARGET_SIZEOF_VOID_P);
273 /* arg3 == callee stack area */
274 ARM_ADD_REG_IMM8 (code, ARMREG_R2, ARMREG_SP, args_size);
275 /* arg4 == mrgctx reg */
276 ARM_LDR_IMM (code, ARMREG_R3, fp, mrgctx_offset);
277 /* arg5 == caller freg area */
278 ARM_SUB_REG_IMM8 (code, ARMREG_IP, fp, -caller_freg_area_offset);
279 ARM_STR_IMM (code, ARMREG_IP, ARMREG_SP, 0);
280 /* arg6 == callee freg area */
281 ARM_SUB_REG_IMM8 (code, ARMREG_IP, fp, -callee_freg_area_offset);
282 ARM_STR_IMM (code, ARMREG_IP, ARMREG_SP, 4);
283 /* Make the call */
284 if (aot) {
285 ji = mono_patch_info_list_prepend (ji, code - buf, MONO_PATCH_INFO_JIT_ICALL_ADDR, GUINT_TO_POINTER (MONO_JIT_ICALL_mono_arm_start_gsharedvt_call));
286 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_PC, 0);
287 ARM_B (code, 0);
288 *(gpointer*)code = NULL;
289 code += 4;
290 ARM_LDR_REG_REG (code, ARMREG_IP, ARMREG_PC, ARMREG_IP);
291 } else {
292 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_PC, 0);
293 ARM_B (code, 0);
294 *(gpointer*)code = (gpointer)mono_arm_start_gsharedvt_call;
295 code += 4;
297 ARM_MOV_REG_REG (code, ARMREG_LR, ARMREG_PC);
298 code = emit_bx (code, ARMREG_IP);
299 /* Clean up stack */
300 ARM_ADD_REG_IMM8 (code, ARMREG_SP, ARMREG_SP, args_size);
302 /* Make the real method call */
303 /* R0 contains the addr to call */
304 ARM_MOV_REG_REG (code, ARMREG_IP, ARMREG_R0);
305 /* Load argument registers */
306 ARM_LDM (code, ARMREG_SP, (1 << ARMREG_R0) | (1 << ARMREG_R1) | (1 << ARMREG_R2) | (1 << ARMREG_R3));
307 if (mono_arm_is_hard_float ()) {
308 /* Load argument fregs */
309 ARM_SUB_REG_IMM8 (code, ARMREG_LR, fp, -callee_freg_area_offset);
310 for (i = 0; i < 8; ++i)
311 ARM_FLDD (code, i * 2, ARMREG_LR, (i * sizeof (double)));
313 /* Pop callee register area */
314 ARM_ADD_REG_IMM8 (code, ARMREG_SP, ARMREG_SP, 4 * TARGET_SIZEOF_VOID_P);
315 /* Load rgctx */
316 ARM_LDR_IMM (code, MONO_ARCH_RGCTX_REG, fp, mrgctx_offset);
317 /* Make the call */
318 #if 0
319 ARM_LDR_IMM (code, ARMREG_IP, fp, info_offset);
320 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_IP, MONO_STRUCT_OFFSET (GSharedVtCallInfo, addr));
321 #endif
322 /* mono_arch_find_imt_method () depends on this */
323 ARM_ADD_REG_IMM8 (code, ARMREG_LR, ARMREG_PC, 4);
324 ARM_BX (code, ARMREG_IP);
325 *((gpointer*)code) = NULL;
326 code += 4;
328 br_ret_index = 0;
330 /* Branch between IN/OUT cases */
331 ARM_LDR_IMM (code, ARMREG_IP, fp, info_offset);
332 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_IP, MONO_STRUCT_OFFSET (GSharedVtCallInfo, gsharedvt_in));
334 ARM_CMP_REG_IMM8 (code, ARMREG_IP, 1);
335 br_out = code;
336 ARM_B_COND (code, ARMCOND_NE, 0);
338 /* IN CASE */
340 /* LR == return marshalling type */
341 ARM_LDR_IMM (code, ARMREG_IP, fp, info_offset);
342 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_IP, MONO_STRUCT_OFFSET (GSharedVtCallInfo, ret_marshal));
344 /* Continue if no marshalling required */
345 ARM_CMP_REG_IMM8 (code, ARMREG_IP, GSHAREDVT_RET_NONE);
346 br_ret [br_ret_index ++] = code;
347 ARM_B_COND (code, ARMCOND_EQ, 0);
349 /* Compute vret area address in LR */
350 ARM_LDR_IMM (code, ARMREG_LR, fp, info_offset);
351 ARM_LDR_IMM (code, ARMREG_LR, ARMREG_LR, MONO_STRUCT_OFFSET (GSharedVtCallInfo, vret_slot));
352 /* The slot value is off by 4 */
353 ARM_SUB_REG_IMM8 (code, ARMREG_LR, ARMREG_LR, 4);
354 ARM_SHL_IMM (code, ARMREG_LR, ARMREG_LR, 2);
355 ARM_ADD_REG_REG (code, ARMREG_LR, ARMREG_LR, ARMREG_SP);
357 /* Branch to specific marshalling code */
358 ARM_CMP_REG_IMM8 (code, ARMREG_IP, GSHAREDVT_RET_IREG);
359 br [0] = code;
360 ARM_B_COND (code, ARMCOND_EQ, 0);
361 ARM_CMP_REG_IMM8 (code, ARMREG_IP, GSHAREDVT_RET_IREGS);
362 br [1] = code;
363 ARM_B_COND (code, ARMCOND_EQ, 0);
364 ARM_CMP_REG_IMM8 (code, ARMREG_IP, GSHAREDVT_RET_I1);
365 br [2] = code;
366 ARM_B_COND (code, ARMCOND_EQ, 0);
367 ARM_CMP_REG_IMM8 (code, ARMREG_IP, GSHAREDVT_RET_U1);
368 br [3] = code;
369 ARM_B_COND (code, ARMCOND_EQ, 0);
370 ARM_CMP_REG_IMM8 (code, ARMREG_IP, GSHAREDVT_RET_I2);
371 br [4] = code;
372 ARM_B_COND (code, ARMCOND_EQ, 0);
373 ARM_CMP_REG_IMM8 (code, ARMREG_IP, GSHAREDVT_RET_U2);
374 br [5] = code;
375 ARM_B_COND (code, ARMCOND_EQ, 0);
376 ARM_CMP_REG_IMM8 (code, ARMREG_IP, GSHAREDVT_RET_VFP_R4);
377 br [6] = code;
378 ARM_B_COND (code, ARMCOND_EQ, 0);
379 ARM_CMP_REG_IMM8 (code, ARMREG_IP, GSHAREDVT_RET_VFP_R8);
380 br [7] = code;
381 ARM_B_COND (code, ARMCOND_EQ, 0);
382 br_ret [br_ret_index ++] = code;
383 ARM_B (code, 0);
385 /* IN IREG case */
386 arm_patch (br [0], code);
387 ARM_LDR_IMM (code, ARMREG_R0, ARMREG_LR, 0);
388 br_ret [br_ret_index ++] = code;
389 ARM_B (code, 0);
390 /* IN IREGS case */
391 arm_patch (br [1], code);
392 ARM_LDR_IMM (code, ARMREG_R0, ARMREG_LR, 0);
393 ARM_LDR_IMM (code, ARMREG_R1, ARMREG_LR, 4);
394 br_ret [br_ret_index ++] = code;
395 ARM_B (code, 0);
396 /* I1 case */
397 arm_patch (br [2], code);
398 ARM_LDRSB_IMM (code, ARMREG_R0, ARMREG_LR, 0);
399 br_ret [br_ret_index ++] = code;
400 ARM_B (code, 0);
401 /* U1 case */
402 arm_patch (br [3], code);
403 ARM_LDRB_IMM (code, ARMREG_R0, ARMREG_LR, 0);
404 br_ret [br_ret_index ++] = code;
405 ARM_B (code, 0);
406 /* I2 case */
407 arm_patch (br [4], code);
408 ARM_LDRSH_IMM (code, ARMREG_R0, ARMREG_LR, 0);
409 br_ret [br_ret_index ++] = code;
410 ARM_B (code, 0);
411 /* U2 case */
412 arm_patch (br [5], code);
413 ARM_LDRH_IMM (code, ARMREG_R0, ARMREG_LR, 0);
414 br_ret [br_ret_index ++] = code;
415 ARM_B (code, 0);
416 /* R4 case */
417 arm_patch (br [6], code);
418 ARM_FLDS (code, ARM_VFP_D0, ARMREG_LR, 0);
419 code += 4;
420 br_ret [br_ret_index ++] = code;
421 ARM_B (code, 0);
422 /* R8 case */
423 arm_patch (br [7], code);
424 ARM_FLDD (code, ARM_VFP_D0, ARMREG_LR, 0);
425 code += 4;
426 br_ret [br_ret_index ++] = code;
427 ARM_B (code, 0);
429 /* OUT CASE */
430 arm_patch (br_out, code);
432 /* Marshal return value */
433 ARM_LDR_IMM (code, ARMREG_IP, fp, info_offset);
434 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_IP, MONO_STRUCT_OFFSET (GSharedVtCallInfo, ret_marshal));
436 ARM_CMP_REG_IMM8 (code, ARMREG_IP, GSHAREDVT_RET_IREGS);
437 br [0] = code;
438 ARM_B_COND (code, ARMCOND_NE, 0);
440 /* OUT IREGS case */
441 /* Load vtype ret addr from the caller arg regs */
442 ARM_LDR_IMM (code, ARMREG_IP, fp, info_offset);
443 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_IP, MONO_STRUCT_OFFSET (GSharedVtCallInfo, vret_arg_reg));
444 ARM_SHL_IMM (code, ARMREG_IP, ARMREG_IP, 2);
445 ARM_ADD_REG_REG (code, ARMREG_IP, ARMREG_IP, fp);
446 ARM_ADD_REG_IMM8 (code, ARMREG_IP, ARMREG_IP, caller_reg_area_offset);
447 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_IP, 0);
448 /* Save both registers for simplicity */
449 ARM_STR_IMM (code, ARMREG_R0, ARMREG_IP, 0);
450 ARM_STR_IMM (code, ARMREG_R1, ARMREG_IP, 4);
451 br_ret [br_ret_index ++] = code;
452 ARM_B (code, 0);
453 arm_patch (br [0], code);
455 ARM_CMP_REG_IMM8 (code, ARMREG_IP, GSHAREDVT_RET_IREG);
456 br [0] = code;
457 ARM_B_COND (code, ARMCOND_NE, 0);
459 /* OUT IREG case */
460 /* Load vtype ret addr from the caller arg regs */
461 ARM_LDR_IMM (code, ARMREG_IP, fp, info_offset);
462 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_IP, MONO_STRUCT_OFFSET (GSharedVtCallInfo, vret_arg_reg));
463 ARM_SHL_IMM (code, ARMREG_IP, ARMREG_IP, 2);
464 ARM_ADD_REG_REG (code, ARMREG_IP, ARMREG_IP, fp);
465 ARM_ADD_REG_IMM8 (code, ARMREG_IP, ARMREG_IP, caller_reg_area_offset);
466 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_IP, 0);
467 /* Save the return value to the buffer pointed to by the vret addr */
468 ARM_STR_IMM (code, ARMREG_R0, ARMREG_IP, 0);
469 br_ret [br_ret_index ++] = code;
470 ARM_B (code, 0);
471 arm_patch (br [0], code);
473 ARM_CMP_REG_IMM8 (code, ARMREG_IP, GSHAREDVT_RET_U1);
474 br [0] = code;
475 ARM_B_COND (code, ARMCOND_NE, 0);
477 /* OUT U1 case */
478 /* Load vtype ret addr from the caller arg regs */
479 ARM_LDR_IMM (code, ARMREG_IP, fp, info_offset);
480 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_IP, MONO_STRUCT_OFFSET (GSharedVtCallInfo, vret_arg_reg));
481 ARM_SHL_IMM (code, ARMREG_IP, ARMREG_IP, 2);
482 ARM_ADD_REG_REG (code, ARMREG_IP, ARMREG_IP, fp);
483 ARM_ADD_REG_IMM8 (code, ARMREG_IP, ARMREG_IP, caller_reg_area_offset);
484 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_IP, 0);
485 /* Save the return value to the buffer pointed to by the vret addr */
486 ARM_STRB_IMM (code, ARMREG_R0, ARMREG_IP, 0);
487 br_ret [br_ret_index ++] = code;
488 ARM_B (code, 0);
489 arm_patch (br [0], code);
491 ARM_CMP_REG_IMM8 (code, ARMREG_IP, GSHAREDVT_RET_VFP_R4);
492 br [0] = code;
493 ARM_B_COND (code, ARMCOND_NE, 0);
495 /* OUT R4 case */
496 /* Load vtype ret addr from the caller arg regs */
497 ARM_LDR_IMM (code, ARMREG_IP, fp, info_offset);
498 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_IP, MONO_STRUCT_OFFSET (GSharedVtCallInfo, vret_arg_reg));
499 ARM_SHL_IMM (code, ARMREG_IP, ARMREG_IP, 2);
500 ARM_ADD_REG_REG (code, ARMREG_IP, ARMREG_IP, fp);
501 ARM_ADD_REG_IMM8 (code, ARMREG_IP, ARMREG_IP, caller_reg_area_offset);
502 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_IP, 0);
503 /* Save the return value to the buffer pointed to by the vret addr */
504 ARM_FSTS (code, ARM_VFP_D0, ARMREG_IP, 0);
505 br_ret [br_ret_index ++] = code;
506 ARM_B (code, 0);
507 arm_patch (br [0], code);
509 ARM_CMP_REG_IMM8 (code, ARMREG_IP, GSHAREDVT_RET_VFP_R8);
510 br [0] = code;
511 ARM_B_COND (code, ARMCOND_NE, 0);
513 /* OUT R8 case */
514 /* Load vtype ret addr from the caller arg regs */
515 ARM_LDR_IMM (code, ARMREG_IP, fp, info_offset);
516 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_IP, MONO_STRUCT_OFFSET (GSharedVtCallInfo, vret_arg_reg));
517 ARM_SHL_IMM (code, ARMREG_IP, ARMREG_IP, 2);
518 ARM_ADD_REG_REG (code, ARMREG_IP, ARMREG_IP, fp);
519 ARM_ADD_REG_IMM8 (code, ARMREG_IP, ARMREG_IP, caller_reg_area_offset);
520 ARM_LDR_IMM (code, ARMREG_IP, ARMREG_IP, 0);
521 /* Save the return value to the buffer pointed to by the vret addr */
522 ARM_FSTD (code, ARM_VFP_D0, ARMREG_IP, 0);
523 br_ret [br_ret_index ++] = code;
524 ARM_B (code, 0);
525 arm_patch (br [0], code);
527 /* OUT other cases */
528 br_ret [br_ret_index ++] = code;
529 ARM_B (code, 0);
531 for (i = 0; i < br_ret_index; ++i)
532 arm_patch (br_ret [i], code);
534 /* Normal return */
535 /* Restore registers + stack */
536 ARM_MOV_REG_REG (code, ARMREG_SP, fp);
537 ARM_LDM (code, fp, (1 << fp) | (1 << ARMREG_LR));
538 ARM_ADD_REG_IMM8 (code, ARMREG_SP, ARMREG_SP, cfa_offset);
539 /* Return */
540 ARM_BX (code, ARMREG_LR);
542 g_assert ((code - buf) < buf_len);
544 if (info)
545 *info = mono_tramp_info_create ("gsharedvt_trampoline", buf, code - buf, ji, unwind_ops);
547 mono_arch_flush_icache (buf, code - buf);
548 return buf;
551 #else
553 gpointer
554 mono_arch_get_gsharedvt_trampoline (MonoTrampInfo **info, gboolean aot)
556 g_assert_not_reached ();
557 return NULL;
560 #endif
563 #else
566 gpointer
567 mono_arm_start_gsharedvt_call (GSharedVtCallInfo *info, gpointer *caller, gpointer *callee, gpointer mrgctx_reg)
569 g_assert_not_reached ();
570 return NULL;
573 gpointer
574 mono_arch_get_gsharedvt_trampoline (MonoTrampInfo **info, gboolean aot)
576 *info = NULL;
577 return NULL;
581 #endif