3 * gsharedvt support code for arm
6 * Zoltan Varga <vargaz@gmail.com>
8 * Copyright 2013 Xamarin, Inc (http://www.xamarin.com)
9 * Licensed under the MIT license. See LICENSE file in the project root for full license information.
14 #include <mono/metadata/abi-details.h>
15 #include <mono/metadata/appdomain.h>
16 #include <mono/metadata/marshal.h>
17 #include <mono/metadata/tabledefs.h>
18 #include <mono/metadata/profiler-private.h>
19 #include <mono/arch/arm/arm-codegen.h>
20 #include <mono/arch/arm/arm-vfp-codegen.h>
24 #include "mini-runtime.h"
26 #ifdef MONO_ARCH_GSHAREDVT_SUPPORTED
29 emit_bx (guint8
* code
, int reg
)
31 if (mono_arm_thumb_supported ())
34 ARM_MOV_REG_REG (code
, ARMREG_PC
, reg
);
39 mono_arm_start_gsharedvt_call (GSharedVtCallInfo
*info
, gpointer
*caller
, gpointer
*callee
, gpointer mrgctx_reg
,
40 double *caller_fregs
, double *callee_fregs
)
45 * The caller/callee regs are mapped to slot 0..3, stack slot 0 is mapped to slot 4, etc.
48 /* Set vtype ret arg */
49 if (info
->vret_slot
!= -1) {
50 callee
[info
->vret_arg_reg
] = &callee
[info
->vret_slot
];
53 for (i
= 0; i
< info
->map_count
; ++i
) {
54 int src
= info
->map
[i
* 2];
55 int dst
= info
->map
[(i
* 2) + 1];
56 int arg_marshal
= (src
>> 24) & 0xff;
58 switch (arg_marshal
) {
59 case GSHAREDVT_ARG_NONE
:
60 callee
[dst
] = caller
[src
];
62 case GSHAREDVT_ARG_BYVAL_TO_BYREF
:
63 /* gsharedvt argument passed by addr in reg/stack slot */
65 callee
[dst
] = caller
+ src
;
67 case GSHAREDVT_ARG_BYREF_TO_BYVAL
: {
68 /* gsharedvt argument passed by value */
69 int nslots
= (src
>> 8) & 0xff;
70 int src_slot
= src
& 0xff;
72 gpointer
*addr
= (gpointer
*)caller
[src_slot
];
74 for (j
= 0; j
< nslots
; ++j
)
75 callee
[dst
+ j
] = addr
[j
];
78 case GSHAREDVT_ARG_BYREF_TO_BYVAL_I1
: {
79 int src_slot
= src
& 0xff;
80 gpointer
*addr
= (gpointer
*)caller
[src_slot
];
82 callee
[dst
] = GINT_TO_POINTER ((int)*(gint8
*)addr
);
85 case GSHAREDVT_ARG_BYREF_TO_BYVAL_I2
: {
86 int src_slot
= src
& 0xff;
87 gpointer
*addr
= (gpointer
*)caller
[src_slot
];
89 callee
[dst
] = GINT_TO_POINTER ((int)*(gint16
*)addr
);
92 case GSHAREDVT_ARG_BYREF_TO_BYVAL_U1
: {
93 int src_slot
= src
& 0xff;
94 gpointer
*addr
= (gpointer
*)caller
[src_slot
];
96 callee
[dst
] = GUINT_TO_POINTER ((guint
)*(guint8
*)addr
);
99 case GSHAREDVT_ARG_BYREF_TO_BYVAL_U2
: {
100 int src_slot
= src
& 0xff;
101 gpointer
*addr
= (gpointer
*)caller
[src_slot
];
103 callee
[dst
] = GUINT_TO_POINTER ((guint
)*(guint16
*)addr
);
107 g_assert_not_reached ();
112 /* The slot based approach above is very complicated, use a nested switch instead for fp regs */
113 // FIXME: Use this for the other cases as well
114 if (info
->have_fregs
) {
115 CallInfo
*caller_cinfo
= info
->caller_cinfo
;
116 CallInfo
*callee_cinfo
= info
->callee_cinfo
;
119 for (aindex
= 0; aindex
< caller_cinfo
->nargs
; ++aindex
) {
120 ArgInfo
*ainfo
= &caller_cinfo
->args
[aindex
];
121 ArgInfo
*ainfo2
= &callee_cinfo
->args
[aindex
];
123 switch (ainfo
->storage
) {
125 switch (ainfo2
->storage
) {
127 callee_fregs
[ainfo2
->reg
/ 2] = caller_fregs
[ainfo
->reg
/ 2];
129 case RegTypeGSharedVtInReg
:
130 callee
[ainfo2
->reg
] = &caller_fregs
[ainfo
->reg
/ 2];
132 case RegTypeGSharedVtOnStack
: {
133 int sslot
= ainfo2
->offset
/ 4;
134 callee
[sslot
+ 4] = &caller_fregs
[ainfo
->reg
/ 2];
138 g_assert_not_reached ();
143 case RegTypeGSharedVtInReg
: {
144 switch (ainfo2
->storage
) {
146 callee_fregs
[ainfo2
->reg
/ 2] = *(double*)caller
[ainfo
->reg
];
154 case RegTypeGSharedVtOnStack
: {
155 switch (ainfo2
->storage
) {
157 int sslot
= ainfo
->offset
/ 4;
158 callee_fregs
[ainfo2
->reg
/ 2] = *(double*)caller
[sslot
+ 4];
172 if (info
->vcall_offset
!= -1) {
173 MonoObject
*this_obj
= (MonoObject
*)caller
[0];
175 if (G_UNLIKELY (!this_obj
))
177 if (info
->vcall_offset
== MONO_GSHAREDVT_DEL_INVOKE_VT_OFFSET
)
178 /* delegate invoke */
179 return ((MonoDelegate
*)this_obj
)->invoke_impl
;
181 return *(gpointer
*)((char*)this_obj
->vtable
+ info
->vcall_offset
);
182 } else if (info
->calli
) {
183 /* The address to call is passed in the mrgctx reg */
193 mono_arch_get_gsharedvt_trampoline (MonoTrampInfo
**info
, gboolean aot
)
196 int buf_len
, cfa_offset
;
197 GSList
*unwind_ops
= NULL
;
198 MonoJumpInfo
*ji
= NULL
;
199 guint8
*br_out
, *br
[16], *br_ret
[16];
200 int i
, offset
, arg_reg
, npushed
, info_offset
, mrgctx_offset
;
201 int caller_reg_area_offset
, caller_freg_area_offset
, callee_reg_area_offset
, callee_freg_area_offset
;
202 int lr_offset
, fp
, br_ret_index
, args_size
;
205 buf
= code
= mono_global_codeman_reserve (buf_len
);
208 /* Registers pushed by the arg trampoline */
211 // ios abi compatible frame
213 cfa_offset
= npushed
* TARGET_SIZEOF_VOID_P
;
214 mono_add_unwind_op_def_cfa (unwind_ops
, code
, buf
, ARMREG_SP
, cfa_offset
);
215 ARM_PUSH (code
, (1 << fp
) | (1 << ARMREG_LR
));
216 cfa_offset
+= 2 * TARGET_SIZEOF_VOID_P
;
217 mono_add_unwind_op_def_cfa_offset (unwind_ops
, code
, buf
, cfa_offset
);
218 mono_add_unwind_op_offset (unwind_ops
, code
, buf
, fp
, (- cfa_offset
));
219 mono_add_unwind_op_offset (unwind_ops
, code
, buf
, ARMREG_LR
, ((- cfa_offset
) + 4));
220 ARM_MOV_REG_REG (code
, fp
, ARMREG_SP
);
221 mono_add_unwind_op_def_cfa_reg (unwind_ops
, code
, buf
, fp
);
222 /* Allocate stack frame */
223 ARM_SUB_REG_IMM8 (code
, ARMREG_SP
, ARMREG_SP
, 32 + (16 * sizeof (double)));
224 if (MONO_ARCH_FRAME_ALIGNMENT
> 8)
225 ARM_SUB_REG_IMM8 (code
, ARMREG_SP
, ARMREG_SP
, (MONO_ARCH_FRAME_ALIGNMENT
- 8));
227 info_offset
= -offset
;
229 mrgctx_offset
= -offset
;
231 callee_reg_area_offset
= -offset
;
233 caller_freg_area_offset
= -offset
;
235 callee_freg_area_offset
= -offset
;
237 caller_reg_area_offset
= cfa_offset
- (npushed
* TARGET_SIZEOF_VOID_P
);
239 /* Save info struct which is in r0 */
240 ARM_STR_IMM (code
, arg_reg
, fp
, info_offset
);
242 ARM_STR_IMM (code
, MONO_ARCH_RGCTX_REG
, fp
, mrgctx_offset
);
243 /* Allocate callee area */
244 ARM_LDR_IMM (code
, ARMREG_IP
, arg_reg
, MONO_STRUCT_OFFSET (GSharedVtCallInfo
, stack_usage
));
245 ARM_SUB_REG_REG (code
, ARMREG_SP
, ARMREG_SP
, ARMREG_IP
);
246 /* Allocate callee register area just below the callee area so the slots are correct */
247 ARM_SUB_REG_IMM8 (code
, ARMREG_SP
, ARMREG_SP
, 4 * TARGET_SIZEOF_VOID_P
);
248 if (mono_arm_is_hard_float ()) {
249 /* Save caller fregs */
250 ARM_SUB_REG_IMM8 (code
, ARMREG_IP
, fp
, -caller_freg_area_offset
);
251 for (i
= 0; i
< 8; ++i
)
252 ARM_FSTD (code
, i
* 2, ARMREG_IP
, (i
* sizeof (double)));
256 * The stack now looks like this:
261 * <callee area> <- sp
263 g_assert (mono_arm_thumb_supported ());
265 /* Call start_gsharedvt_call () */
266 /* 6 arguments, needs 2 stack slot, need to clean it up after the call */
267 args_size
= 2 * TARGET_SIZEOF_VOID_P
;
268 ARM_SUB_REG_IMM8 (code
, ARMREG_SP
, ARMREG_SP
, args_size
);
270 ARM_LDR_IMM (code
, ARMREG_R0
, fp
, info_offset
);
271 /* arg2 == caller stack area */
272 ARM_ADD_REG_IMM8 (code
, ARMREG_R1
, fp
, cfa_offset
- 4 * TARGET_SIZEOF_VOID_P
);
273 /* arg3 == callee stack area */
274 ARM_ADD_REG_IMM8 (code
, ARMREG_R2
, ARMREG_SP
, args_size
);
275 /* arg4 == mrgctx reg */
276 ARM_LDR_IMM (code
, ARMREG_R3
, fp
, mrgctx_offset
);
277 /* arg5 == caller freg area */
278 ARM_SUB_REG_IMM8 (code
, ARMREG_IP
, fp
, -caller_freg_area_offset
);
279 ARM_STR_IMM (code
, ARMREG_IP
, ARMREG_SP
, 0);
280 /* arg6 == callee freg area */
281 ARM_SUB_REG_IMM8 (code
, ARMREG_IP
, fp
, -callee_freg_area_offset
);
282 ARM_STR_IMM (code
, ARMREG_IP
, ARMREG_SP
, 4);
285 ji
= mono_patch_info_list_prepend (ji
, code
- buf
, MONO_PATCH_INFO_JIT_ICALL_ADDR
, GUINT_TO_POINTER (MONO_JIT_ICALL_mono_arm_start_gsharedvt_call
));
286 ARM_LDR_IMM (code
, ARMREG_IP
, ARMREG_PC
, 0);
288 *(gpointer
*)code
= NULL
;
290 ARM_LDR_REG_REG (code
, ARMREG_IP
, ARMREG_PC
, ARMREG_IP
);
292 ARM_LDR_IMM (code
, ARMREG_IP
, ARMREG_PC
, 0);
294 *(gpointer
*)code
= (gpointer
)mono_arm_start_gsharedvt_call
;
297 ARM_MOV_REG_REG (code
, ARMREG_LR
, ARMREG_PC
);
298 code
= emit_bx (code
, ARMREG_IP
);
300 ARM_ADD_REG_IMM8 (code
, ARMREG_SP
, ARMREG_SP
, args_size
);
302 /* Make the real method call */
303 /* R0 contains the addr to call */
304 ARM_MOV_REG_REG (code
, ARMREG_IP
, ARMREG_R0
);
305 /* Load argument registers */
306 ARM_LDM (code
, ARMREG_SP
, (1 << ARMREG_R0
) | (1 << ARMREG_R1
) | (1 << ARMREG_R2
) | (1 << ARMREG_R3
));
307 if (mono_arm_is_hard_float ()) {
308 /* Load argument fregs */
309 ARM_SUB_REG_IMM8 (code
, ARMREG_LR
, fp
, -callee_freg_area_offset
);
310 for (i
= 0; i
< 8; ++i
)
311 ARM_FLDD (code
, i
* 2, ARMREG_LR
, (i
* sizeof (double)));
313 /* Pop callee register area */
314 ARM_ADD_REG_IMM8 (code
, ARMREG_SP
, ARMREG_SP
, 4 * TARGET_SIZEOF_VOID_P
);
316 ARM_LDR_IMM (code
, MONO_ARCH_RGCTX_REG
, fp
, mrgctx_offset
);
319 ARM_LDR_IMM (code
, ARMREG_IP
, fp
, info_offset
);
320 ARM_LDR_IMM (code
, ARMREG_IP
, ARMREG_IP
, MONO_STRUCT_OFFSET (GSharedVtCallInfo
, addr
));
322 /* mono_arch_find_imt_method () depends on this */
323 ARM_ADD_REG_IMM8 (code
, ARMREG_LR
, ARMREG_PC
, 4);
324 ARM_BX (code
, ARMREG_IP
);
325 *((gpointer
*)code
) = NULL
;
330 /* Branch between IN/OUT cases */
331 ARM_LDR_IMM (code
, ARMREG_IP
, fp
, info_offset
);
332 ARM_LDR_IMM (code
, ARMREG_IP
, ARMREG_IP
, MONO_STRUCT_OFFSET (GSharedVtCallInfo
, gsharedvt_in
));
334 ARM_CMP_REG_IMM8 (code
, ARMREG_IP
, 1);
336 ARM_B_COND (code
, ARMCOND_NE
, 0);
340 /* LR == return marshalling type */
341 ARM_LDR_IMM (code
, ARMREG_IP
, fp
, info_offset
);
342 ARM_LDR_IMM (code
, ARMREG_IP
, ARMREG_IP
, MONO_STRUCT_OFFSET (GSharedVtCallInfo
, ret_marshal
));
344 /* Continue if no marshalling required */
345 ARM_CMP_REG_IMM8 (code
, ARMREG_IP
, GSHAREDVT_RET_NONE
);
346 br_ret
[br_ret_index
++] = code
;
347 ARM_B_COND (code
, ARMCOND_EQ
, 0);
349 /* Compute vret area address in LR */
350 ARM_LDR_IMM (code
, ARMREG_LR
, fp
, info_offset
);
351 ARM_LDR_IMM (code
, ARMREG_LR
, ARMREG_LR
, MONO_STRUCT_OFFSET (GSharedVtCallInfo
, vret_slot
));
352 /* The slot value is off by 4 */
353 ARM_SUB_REG_IMM8 (code
, ARMREG_LR
, ARMREG_LR
, 4);
354 ARM_SHL_IMM (code
, ARMREG_LR
, ARMREG_LR
, 2);
355 ARM_ADD_REG_REG (code
, ARMREG_LR
, ARMREG_LR
, ARMREG_SP
);
357 /* Branch to specific marshalling code */
358 ARM_CMP_REG_IMM8 (code
, ARMREG_IP
, GSHAREDVT_RET_IREG
);
360 ARM_B_COND (code
, ARMCOND_EQ
, 0);
361 ARM_CMP_REG_IMM8 (code
, ARMREG_IP
, GSHAREDVT_RET_IREGS
);
363 ARM_B_COND (code
, ARMCOND_EQ
, 0);
364 ARM_CMP_REG_IMM8 (code
, ARMREG_IP
, GSHAREDVT_RET_I1
);
366 ARM_B_COND (code
, ARMCOND_EQ
, 0);
367 ARM_CMP_REG_IMM8 (code
, ARMREG_IP
, GSHAREDVT_RET_U1
);
369 ARM_B_COND (code
, ARMCOND_EQ
, 0);
370 ARM_CMP_REG_IMM8 (code
, ARMREG_IP
, GSHAREDVT_RET_I2
);
372 ARM_B_COND (code
, ARMCOND_EQ
, 0);
373 ARM_CMP_REG_IMM8 (code
, ARMREG_IP
, GSHAREDVT_RET_U2
);
375 ARM_B_COND (code
, ARMCOND_EQ
, 0);
376 ARM_CMP_REG_IMM8 (code
, ARMREG_IP
, GSHAREDVT_RET_VFP_R4
);
378 ARM_B_COND (code
, ARMCOND_EQ
, 0);
379 ARM_CMP_REG_IMM8 (code
, ARMREG_IP
, GSHAREDVT_RET_VFP_R8
);
381 ARM_B_COND (code
, ARMCOND_EQ
, 0);
382 br_ret
[br_ret_index
++] = code
;
386 arm_patch (br
[0], code
);
387 ARM_LDR_IMM (code
, ARMREG_R0
, ARMREG_LR
, 0);
388 br_ret
[br_ret_index
++] = code
;
391 arm_patch (br
[1], code
);
392 ARM_LDR_IMM (code
, ARMREG_R0
, ARMREG_LR
, 0);
393 ARM_LDR_IMM (code
, ARMREG_R1
, ARMREG_LR
, 4);
394 br_ret
[br_ret_index
++] = code
;
397 arm_patch (br
[2], code
);
398 ARM_LDRSB_IMM (code
, ARMREG_R0
, ARMREG_LR
, 0);
399 br_ret
[br_ret_index
++] = code
;
402 arm_patch (br
[3], code
);
403 ARM_LDRB_IMM (code
, ARMREG_R0
, ARMREG_LR
, 0);
404 br_ret
[br_ret_index
++] = code
;
407 arm_patch (br
[4], code
);
408 ARM_LDRSH_IMM (code
, ARMREG_R0
, ARMREG_LR
, 0);
409 br_ret
[br_ret_index
++] = code
;
412 arm_patch (br
[5], code
);
413 ARM_LDRH_IMM (code
, ARMREG_R0
, ARMREG_LR
, 0);
414 br_ret
[br_ret_index
++] = code
;
417 arm_patch (br
[6], code
);
418 ARM_FLDS (code
, ARM_VFP_D0
, ARMREG_LR
, 0);
420 br_ret
[br_ret_index
++] = code
;
423 arm_patch (br
[7], code
);
424 ARM_FLDD (code
, ARM_VFP_D0
, ARMREG_LR
, 0);
426 br_ret
[br_ret_index
++] = code
;
430 arm_patch (br_out
, code
);
432 /* Marshal return value */
433 ARM_LDR_IMM (code
, ARMREG_IP
, fp
, info_offset
);
434 ARM_LDR_IMM (code
, ARMREG_IP
, ARMREG_IP
, MONO_STRUCT_OFFSET (GSharedVtCallInfo
, ret_marshal
));
436 ARM_CMP_REG_IMM8 (code
, ARMREG_IP
, GSHAREDVT_RET_IREGS
);
438 ARM_B_COND (code
, ARMCOND_NE
, 0);
441 /* Load vtype ret addr from the caller arg regs */
442 ARM_LDR_IMM (code
, ARMREG_IP
, fp
, info_offset
);
443 ARM_LDR_IMM (code
, ARMREG_IP
, ARMREG_IP
, MONO_STRUCT_OFFSET (GSharedVtCallInfo
, vret_arg_reg
));
444 ARM_SHL_IMM (code
, ARMREG_IP
, ARMREG_IP
, 2);
445 ARM_ADD_REG_REG (code
, ARMREG_IP
, ARMREG_IP
, fp
);
446 ARM_ADD_REG_IMM8 (code
, ARMREG_IP
, ARMREG_IP
, caller_reg_area_offset
);
447 ARM_LDR_IMM (code
, ARMREG_IP
, ARMREG_IP
, 0);
448 /* Save both registers for simplicity */
449 ARM_STR_IMM (code
, ARMREG_R0
, ARMREG_IP
, 0);
450 ARM_STR_IMM (code
, ARMREG_R1
, ARMREG_IP
, 4);
451 br_ret
[br_ret_index
++] = code
;
453 arm_patch (br
[0], code
);
455 ARM_CMP_REG_IMM8 (code
, ARMREG_IP
, GSHAREDVT_RET_IREG
);
457 ARM_B_COND (code
, ARMCOND_NE
, 0);
460 /* Load vtype ret addr from the caller arg regs */
461 ARM_LDR_IMM (code
, ARMREG_IP
, fp
, info_offset
);
462 ARM_LDR_IMM (code
, ARMREG_IP
, ARMREG_IP
, MONO_STRUCT_OFFSET (GSharedVtCallInfo
, vret_arg_reg
));
463 ARM_SHL_IMM (code
, ARMREG_IP
, ARMREG_IP
, 2);
464 ARM_ADD_REG_REG (code
, ARMREG_IP
, ARMREG_IP
, fp
);
465 ARM_ADD_REG_IMM8 (code
, ARMREG_IP
, ARMREG_IP
, caller_reg_area_offset
);
466 ARM_LDR_IMM (code
, ARMREG_IP
, ARMREG_IP
, 0);
467 /* Save the return value to the buffer pointed to by the vret addr */
468 ARM_STR_IMM (code
, ARMREG_R0
, ARMREG_IP
, 0);
469 br_ret
[br_ret_index
++] = code
;
471 arm_patch (br
[0], code
);
473 ARM_CMP_REG_IMM8 (code
, ARMREG_IP
, GSHAREDVT_RET_U1
);
475 ARM_B_COND (code
, ARMCOND_NE
, 0);
478 /* Load vtype ret addr from the caller arg regs */
479 ARM_LDR_IMM (code
, ARMREG_IP
, fp
, info_offset
);
480 ARM_LDR_IMM (code
, ARMREG_IP
, ARMREG_IP
, MONO_STRUCT_OFFSET (GSharedVtCallInfo
, vret_arg_reg
));
481 ARM_SHL_IMM (code
, ARMREG_IP
, ARMREG_IP
, 2);
482 ARM_ADD_REG_REG (code
, ARMREG_IP
, ARMREG_IP
, fp
);
483 ARM_ADD_REG_IMM8 (code
, ARMREG_IP
, ARMREG_IP
, caller_reg_area_offset
);
484 ARM_LDR_IMM (code
, ARMREG_IP
, ARMREG_IP
, 0);
485 /* Save the return value to the buffer pointed to by the vret addr */
486 ARM_STRB_IMM (code
, ARMREG_R0
, ARMREG_IP
, 0);
487 br_ret
[br_ret_index
++] = code
;
489 arm_patch (br
[0], code
);
491 ARM_CMP_REG_IMM8 (code
, ARMREG_IP
, GSHAREDVT_RET_VFP_R4
);
493 ARM_B_COND (code
, ARMCOND_NE
, 0);
496 /* Load vtype ret addr from the caller arg regs */
497 ARM_LDR_IMM (code
, ARMREG_IP
, fp
, info_offset
);
498 ARM_LDR_IMM (code
, ARMREG_IP
, ARMREG_IP
, MONO_STRUCT_OFFSET (GSharedVtCallInfo
, vret_arg_reg
));
499 ARM_SHL_IMM (code
, ARMREG_IP
, ARMREG_IP
, 2);
500 ARM_ADD_REG_REG (code
, ARMREG_IP
, ARMREG_IP
, fp
);
501 ARM_ADD_REG_IMM8 (code
, ARMREG_IP
, ARMREG_IP
, caller_reg_area_offset
);
502 ARM_LDR_IMM (code
, ARMREG_IP
, ARMREG_IP
, 0);
503 /* Save the return value to the buffer pointed to by the vret addr */
504 ARM_FSTS (code
, ARM_VFP_D0
, ARMREG_IP
, 0);
505 br_ret
[br_ret_index
++] = code
;
507 arm_patch (br
[0], code
);
509 ARM_CMP_REG_IMM8 (code
, ARMREG_IP
, GSHAREDVT_RET_VFP_R8
);
511 ARM_B_COND (code
, ARMCOND_NE
, 0);
514 /* Load vtype ret addr from the caller arg regs */
515 ARM_LDR_IMM (code
, ARMREG_IP
, fp
, info_offset
);
516 ARM_LDR_IMM (code
, ARMREG_IP
, ARMREG_IP
, MONO_STRUCT_OFFSET (GSharedVtCallInfo
, vret_arg_reg
));
517 ARM_SHL_IMM (code
, ARMREG_IP
, ARMREG_IP
, 2);
518 ARM_ADD_REG_REG (code
, ARMREG_IP
, ARMREG_IP
, fp
);
519 ARM_ADD_REG_IMM8 (code
, ARMREG_IP
, ARMREG_IP
, caller_reg_area_offset
);
520 ARM_LDR_IMM (code
, ARMREG_IP
, ARMREG_IP
, 0);
521 /* Save the return value to the buffer pointed to by the vret addr */
522 ARM_FSTD (code
, ARM_VFP_D0
, ARMREG_IP
, 0);
523 br_ret
[br_ret_index
++] = code
;
525 arm_patch (br
[0], code
);
527 /* OUT other cases */
528 br_ret
[br_ret_index
++] = code
;
531 for (i
= 0; i
< br_ret_index
; ++i
)
532 arm_patch (br_ret
[i
], code
);
535 /* Restore registers + stack */
536 ARM_MOV_REG_REG (code
, ARMREG_SP
, fp
);
537 ARM_LDM (code
, fp
, (1 << fp
) | (1 << ARMREG_LR
));
538 ARM_ADD_REG_IMM8 (code
, ARMREG_SP
, ARMREG_SP
, cfa_offset
);
540 ARM_BX (code
, ARMREG_LR
);
542 g_assert ((code
- buf
) < buf_len
);
545 *info
= mono_tramp_info_create ("gsharedvt_trampoline", buf
, code
- buf
, ji
, unwind_ops
);
547 mono_arch_flush_icache (buf
, code
- buf
);
554 mono_arch_get_gsharedvt_trampoline (MonoTrampInfo
**info
, gboolean aot
)
556 g_assert_not_reached ();
567 mono_arm_start_gsharedvt_call (GSharedVtCallInfo
*info
, gpointer
*caller
, gpointer
*callee
, gpointer mrgctx_reg
)
569 g_assert_not_reached ();
574 mono_arch_get_gsharedvt_trampoline (MonoTrampInfo
**info
, gboolean aot
)