6 #include <mono/utils/mono-compiler.h>
12 #include "mini-runtime.h"
14 #include "jit-icalls.h"
15 #include "debugger-agent.h"
17 #include <mono/metadata/abi-details.h>
18 #include <mono/metadata/gc-internals.h>
19 #include <mono/metadata/monitor.h>
20 #include <mono/utils/mono-memory-model.h>
22 static GENERATE_GET_CLASS_WITH_CACHE (runtime_helpers
, "System.Runtime.CompilerServices", "RuntimeHelpers")
23 static GENERATE_TRY_GET_CLASS_WITH_CACHE (math
, "System", "Math")
25 /* optimize the simple GetGenericValueImpl/SetGenericValueImpl generic icalls */
27 emit_array_generic_access (MonoCompile
*cfg
, MonoMethodSignature
*fsig
, MonoInst
**args
, int is_set
)
29 MonoInst
*addr
, *store
, *load
;
30 MonoClass
*eklass
= mono_class_from_mono_type_internal (fsig
->params
[2]);
32 /* the bounds check is already done by the callers */
33 addr
= mini_emit_ldelema_1_ins (cfg
, eklass
, args
[0], args
[1], FALSE
);
34 MonoType
*etype
= m_class_get_byval_arg (eklass
);
36 EMIT_NEW_LOAD_MEMBASE_TYPE (cfg
, load
, etype
, args
[2]->dreg
, 0);
37 if (mini_debug_options
.clr_memory_model
&& mini_type_is_reference (etype
))
38 mini_emit_memory_barrier (cfg
, MONO_MEMORY_BARRIER_REL
);
39 EMIT_NEW_STORE_MEMBASE_TYPE (cfg
, store
, etype
, addr
->dreg
, 0, load
->dreg
);
40 if (mini_type_is_reference (etype
))
41 mini_emit_write_barrier (cfg
, addr
, load
);
43 EMIT_NEW_LOAD_MEMBASE_TYPE (cfg
, load
, etype
, addr
->dreg
, 0);
44 EMIT_NEW_STORE_MEMBASE_TYPE (cfg
, store
, etype
, args
[2]->dreg
, 0, load
->dreg
);
50 mono_type_is_native_blittable (MonoType
*t
)
52 if (MONO_TYPE_IS_REFERENCE (t
))
55 if (MONO_TYPE_IS_PRIMITIVE_SCALAR (t
))
58 MonoClass
*klass
= mono_class_from_mono_type_internal (t
);
60 //MonoClass::blitable depends on mono_class_setup_fields being done.
61 mono_class_setup_fields (klass
);
62 if (!m_class_is_blittable (klass
))
65 // If the native marshal size is different we can't convert PtrToStructure to a type load
66 if (mono_class_native_size (klass
, NULL
) != mono_class_value_size (klass
, NULL
))
73 mini_emit_inst_for_ctor (MonoCompile
*cfg
, MonoMethod
*cmethod
, MonoMethodSignature
*fsig
, MonoInst
**args
)
75 const char* cmethod_klass_name_space
= m_class_get_name_space (cmethod
->klass
);
76 const char* cmethod_klass_name
= m_class_get_name (cmethod
->klass
);
77 MonoImage
*cmethod_klass_image
= m_class_get_image (cmethod
->klass
);
78 gboolean in_corlib
= cmethod_klass_image
== mono_defaults
.corlib
;
81 /* Required intrinsics are always used even with -O=-intrins */
83 !strcmp (cmethod_klass_name_space
, "System") &&
84 !strcmp (cmethod_klass_name
, "ByReference`1")) {
85 /* public ByReference(ref T value) */
86 g_assert (fsig
->hasthis
&& fsig
->param_count
== 1);
87 EMIT_NEW_STORE_MEMBASE (cfg
, ins
, OP_STORE_MEMBASE_REG
, args
[0]->dreg
, 0, args
[1]->dreg
);
91 ins
= mono_emit_native_types_intrinsics (cfg
, cmethod
, fsig
, args
);
95 if (!(cfg
->opt
& MONO_OPT_INTRINS
))
98 #ifdef MONO_ARCH_SIMD_INTRINSICS
99 if (cfg
->opt
& MONO_OPT_SIMD
) {
100 ins
= mono_emit_simd_intrinsics (cfg
, cmethod
, fsig
, args
);
110 llvm_emit_inst_for_method (MonoCompile
*cfg
, MonoMethod
*cmethod
, MonoMethodSignature
*fsig
, MonoInst
**args
, gboolean in_corlib
)
112 MonoInst
*ins
= NULL
;
114 // Convert Math and MathF methods into LLVM intrinsics, e.g. MathF.Sin -> @llvm.sin.f32
115 if (in_corlib
&& !strcmp (m_class_get_name (cmethod
->klass
), "MathF") && cfg
->r4fp
) {
117 if (fsig
->param_count
== 1 && fsig
->params
[0]->type
== MONO_TYPE_R4
) {
118 if (!strcmp (cmethod
->name
, "Ceiling")) {
120 } else if (!strcmp (cmethod
->name
, "Cos")) {
122 } else if (!strcmp (cmethod
->name
, "Exp")) {
124 } else if (!strcmp (cmethod
->name
, "Floor")) {
126 } else if (!strcmp (cmethod
->name
, "Log2")) {
128 } else if (!strcmp (cmethod
->name
, "Log10")) {
130 } else if (!strcmp (cmethod
->name
, "Sin")) {
132 } else if (!strcmp (cmethod
->name
, "Sqrt")) {
134 } else if (!strcmp (cmethod
->name
, "Truncate")) {
137 #if defined(TARGET_X86) || defined(TARGET_AMD64)
138 else if (!strcmp (cmethod
->name
, "Round") && !cfg
->compile_aot
&& (mono_arch_cpu_enumerate_simd_versions () & SIMD_VERSION_SSE41
)) {
139 // special case: emit vroundps for MathF.Round directly instead of what llvm.round.f32 emits
140 // to align with CoreCLR behavior
141 int xreg
= alloc_xreg (cfg
);
142 EMIT_NEW_UNALU (cfg
, ins
, OP_FCONV_TO_R4_X
, xreg
, args
[0]->dreg
);
143 EMIT_NEW_UNALU (cfg
, ins
, OP_SSE41_ROUNDSS
, xreg
, xreg
);
144 ins
->inst_c0
= 0x4; // vroundss xmm0, xmm0, xmm0, 0x4 (mode for rounding)
145 int dreg
= alloc_freg (cfg
);
146 EMIT_NEW_UNALU (cfg
, ins
, OP_EXTRACT_R4
, dreg
, xreg
);
152 if (fsig
->param_count
== 2 && fsig
->params
[0]->type
== MONO_TYPE_R4
&& fsig
->params
[1]->type
== MONO_TYPE_R4
) {
153 if (!strcmp (cmethod
->name
, "Pow")) {
155 } else if (!strcmp (cmethod
->name
, "CopySign")) {
156 opcode
= OP_RCOPYSIGN
;
159 // (float, float, float)
160 if (fsig
->param_count
== 3 && fsig
->params
[0]->type
== MONO_TYPE_R4
&& fsig
->params
[1]->type
== MONO_TYPE_R4
&& fsig
->params
[2]->type
== MONO_TYPE_R4
) {
161 if (!strcmp (cmethod
->name
, "FusedMultiplyAdd")) {
167 MONO_INST_NEW (cfg
, ins
, opcode
);
168 ins
->type
= STACK_R8
;
169 ins
->dreg
= mono_alloc_dreg (cfg
, (MonoStackType
)ins
->type
);
170 ins
->sreg1
= args
[0]->dreg
;
171 if (fsig
->param_count
> 1) {
172 ins
->sreg2
= args
[1]->dreg
;
174 if (fsig
->param_count
> 2) {
175 ins
->sreg3
= args
[2]->dreg
;
177 g_assert (fsig
->param_count
<= 3);
178 MONO_ADD_INS (cfg
->cbb
, ins
);
182 if (cmethod
->klass
== mono_class_try_get_math_class ()) {
184 if (fsig
->param_count
== 1 && fsig
->params
[0]->type
== MONO_TYPE_R8
) {
185 if (!strcmp (cmethod
->name
, "Abs")) {
187 } else if (!strcmp (cmethod
->name
, "Ceiling")) {
189 } else if (!strcmp (cmethod
->name
, "Cos")) {
191 } else if (!strcmp (cmethod
->name
, "Exp")) {
193 } else if (!strcmp (cmethod
->name
, "Floor")) {
195 } else if (!strcmp (cmethod
->name
, "Log")) {
197 } else if (!strcmp (cmethod
->name
, "Log2")) {
199 } else if (!strcmp (cmethod
->name
, "Log10")) {
201 } else if (!strcmp (cmethod
->name
, "Sin")) {
203 } else if (!strcmp (cmethod
->name
, "Sqrt")) {
205 } else if (!strcmp (cmethod
->name
, "Truncate")) {
210 if (fsig
->param_count
== 2 && fsig
->params
[0]->type
== MONO_TYPE_R8
&& fsig
->params
[1]->type
== MONO_TYPE_R8
) {
211 // Max and Min can only be optimized in fast math mode
212 if (!strcmp (cmethod
->name
, "Max") && mono_use_fast_math
) {
214 } else if (!strcmp (cmethod
->name
, "Min") && mono_use_fast_math
) {
216 } else if (!strcmp (cmethod
->name
, "Pow")) {
218 } else if (!strcmp (cmethod
->name
, "CopySign")) {
219 opcode
= OP_FCOPYSIGN
;
222 // (double, double, double)
223 if (fsig
->param_count
== 3 && fsig
->params
[0]->type
== MONO_TYPE_R8
&& fsig
->params
[1]->type
== MONO_TYPE_R8
&& fsig
->params
[2]->type
== MONO_TYPE_R8
) {
224 if (!strcmp (cmethod
->name
, "FusedMultiplyAdd")) {
229 // Math also contains overloads for floats (MathF inlines them)
231 if (fsig
->param_count
== 1 && fsig
->params
[0]->type
== MONO_TYPE_R4
) {
232 if (!strcmp (cmethod
->name
, "Abs")) {
237 if (fsig
->param_count
== 2 && fsig
->params
[0]->type
== MONO_TYPE_R4
&& fsig
->params
[1]->type
== MONO_TYPE_R4
) {
238 if (!strcmp (cmethod
->name
, "Max") && mono_use_fast_math
) {
240 } else if (!strcmp (cmethod
->name
, "Min") && mono_use_fast_math
) {
242 } else if (!strcmp (cmethod
->name
, "Pow")) {
247 if (opcode
&& fsig
->param_count
> 0) {
248 MONO_INST_NEW (cfg
, ins
, opcode
);
249 ins
->type
= STACK_R8
;
250 ins
->dreg
= mono_alloc_dreg (cfg
, (MonoStackType
)ins
->type
);
251 ins
->sreg1
= args
[0]->dreg
;
252 if (fsig
->param_count
> 1) {
253 ins
->sreg2
= args
[1]->dreg
;
255 if (fsig
->param_count
> 2) {
256 ins
->sreg3
= args
[2]->dreg
;
258 g_assert (fsig
->param_count
<= 3);
259 MONO_ADD_INS (cfg
->cbb
, ins
);
263 if (cfg
->opt
& MONO_OPT_CMOV
) {
264 if (strcmp (cmethod
->name
, "Min") == 0) {
265 if (fsig
->params
[0]->type
== MONO_TYPE_I4
)
267 if (fsig
->params
[0]->type
== MONO_TYPE_U4
)
269 else if (fsig
->params
[0]->type
== MONO_TYPE_I8
)
271 else if (fsig
->params
[0]->type
== MONO_TYPE_U8
)
273 } else if (strcmp (cmethod
->name
, "Max") == 0) {
274 if (fsig
->params
[0]->type
== MONO_TYPE_I4
)
276 if (fsig
->params
[0]->type
== MONO_TYPE_U4
)
278 else if (fsig
->params
[0]->type
== MONO_TYPE_I8
)
280 else if (fsig
->params
[0]->type
== MONO_TYPE_U8
)
285 if (opcode
&& fsig
->param_count
== 2) {
286 MONO_INST_NEW (cfg
, ins
, opcode
);
287 ins
->type
= fsig
->params
[0]->type
== MONO_TYPE_I4
? STACK_I4
: STACK_I8
;
288 ins
->dreg
= mono_alloc_dreg (cfg
, (MonoStackType
)ins
->type
);
289 ins
->sreg1
= args
[0]->dreg
;
290 ins
->sreg2
= args
[1]->dreg
;
291 MONO_ADD_INS (cfg
->cbb
, ins
);
295 if (in_corlib
&& !strcmp (m_class_get_name (cmethod
->klass
), "Buffer")) {
296 if (!strcmp (cmethod
->name
, "Memmove") && fsig
->param_count
== 3 && fsig
->params
[0]->type
== MONO_TYPE_PTR
&& fsig
->params
[1]->type
== MONO_TYPE_PTR
) {
297 // throw NRE if src or dst are nulls
298 MONO_EMIT_NEW_BIALU_IMM (cfg
, OP_COMPARE_IMM
, -1, args
[0]->dreg
, 0);
299 MONO_EMIT_NEW_COND_EXC (cfg
, EQ
, "NullReferenceException");
300 MONO_EMIT_NEW_BIALU_IMM (cfg
, OP_COMPARE_IMM
, -1, args
[1]->dreg
, 0);
301 MONO_EMIT_NEW_COND_EXC (cfg
, EQ
, "NullReferenceException");
303 MONO_INST_NEW (cfg
, ins
, OP_MEMMOVE
);
304 ins
->sreg1
= args
[0]->dreg
; // i1* dst
305 ins
->sreg2
= args
[1]->dreg
; // i1* src
306 ins
->sreg3
= args
[2]->dreg
; // i32/i64 len
307 MONO_ADD_INS (cfg
->cbb
, ins
);
315 emit_span_intrinsics (MonoCompile
*cfg
, MonoMethod
*cmethod
, MonoMethodSignature
*fsig
, MonoInst
**args
)
319 MonoClassField
*ptr_field
= mono_class_get_field_from_name_full (cmethod
->klass
, "_pointer", NULL
);
321 /* Portable Span<T> */
324 if (!strcmp (cmethod
->name
, "get_Item")) {
325 MonoClassField
*length_field
= mono_class_get_field_from_name_full (cmethod
->klass
, "_length", NULL
);
327 g_assert (length_field
);
329 MonoGenericClass
*gclass
= mono_class_get_generic_class (cmethod
->klass
);
330 MonoClass
*param_class
= mono_class_from_mono_type_internal (gclass
->context
.class_inst
->type_argv
[0]);
332 if (mini_is_gsharedvt_variable_klass (param_class
))
335 int span_reg
= args
[0]->dreg
;
336 /* Load _pointer.Value */
337 int base_reg
= alloc_preg (cfg
);
338 EMIT_NEW_LOAD_MEMBASE (cfg
, ins
, OP_LOAD_MEMBASE
, base_reg
, span_reg
, ptr_field
->offset
- MONO_ABI_SIZEOF (MonoObject
));
339 /* Similar to mini_emit_ldelema_1_ins () */
340 int size
= mono_class_array_element_size (param_class
);
342 int index_reg
= mini_emit_sext_index_reg (cfg
, args
[1]);
344 mini_emit_bounds_check_offset (cfg
, span_reg
, length_field
->offset
- MONO_ABI_SIZEOF (MonoObject
), index_reg
, NULL
);
346 // FIXME: Sign extend index ?
348 int mult_reg
= alloc_preg (cfg
);
349 int add_reg
= alloc_preg (cfg
);
351 MONO_EMIT_NEW_BIALU_IMM (cfg
, OP_MUL_IMM
, mult_reg
, index_reg
, size
);
352 EMIT_NEW_BIALU (cfg
, ins
, OP_PADD
, add_reg
, base_reg
, mult_reg
);
353 ins
->klass
= param_class
;
354 ins
->type
= STACK_MP
;
357 } else if (!strcmp (cmethod
->name
, "get_Length")) {
358 MonoClassField
*length_field
= mono_class_get_field_from_name_full (cmethod
->klass
, "_length", NULL
);
359 g_assert (length_field
);
362 * FIXME: This doesn't work with abcrem, since the src is a unique LDADDR not
363 * the same array object.
365 MONO_INST_NEW (cfg
, ins
, OP_LDLEN
);
366 ins
->dreg
= alloc_preg (cfg
);
367 ins
->sreg1
= args
[0]->dreg
;
368 ins
->inst_imm
= length_field
->offset
- MONO_ABI_SIZEOF (MonoObject
);
369 ins
->type
= STACK_I4
;
370 MONO_ADD_INS (cfg
->cbb
, ins
);
372 cfg
->flags
|= MONO_CFG_NEEDS_DECOMPOSE
;
373 cfg
->cbb
->needs_decompose
= TRUE
;
382 emit_unsafe_intrinsics (MonoCompile
*cfg
, MonoMethod
*cmethod
, MonoMethodSignature
*fsig
, MonoInst
**args
)
386 MonoGenericContext
*ctx
= mono_method_get_context (cmethod
);
389 if (!strcmp (cmethod
->name
, "As")) {
391 g_assert (ctx
->method_inst
);
393 t
= ctx
->method_inst
->type_argv
[0];
394 if (mini_is_gsharedvt_variable_type (t
))
396 if (ctx
->method_inst
->type_argc
== 2) {
397 dreg
= alloc_preg (cfg
);
398 EMIT_NEW_UNALU (cfg
, ins
, OP_MOVE
, dreg
, args
[0]->dreg
);
399 ins
->type
= STACK_OBJ
;
400 ins
->klass
= mono_get_object_class ();
402 } else if (ctx
->method_inst
->type_argc
== 1) {
403 // Casts the given object to the specified type, performs no dynamic type checking.
404 g_assert (fsig
->param_count
== 1);
405 g_assert (fsig
->params
[0]->type
== MONO_TYPE_OBJECT
);
406 dreg
= alloc_preg (cfg
);
407 EMIT_NEW_UNALU (cfg
, ins
, OP_MOVE
, dreg
, args
[0]->dreg
);
408 ins
->type
= STACK_OBJ
;
409 ins
->klass
= mono_class_from_mono_type_internal (ctx
->method_inst
->type_argv
[0]);
412 } else if (!strcmp (cmethod
->name
, "AsPointer")) {
414 g_assert (ctx
->method_inst
);
415 g_assert (ctx
->method_inst
->type_argc
== 1);
416 g_assert (fsig
->param_count
== 1);
418 dreg
= alloc_preg (cfg
);
419 EMIT_NEW_UNALU (cfg
, ins
, OP_MOVE
, dreg
, args
[0]->dreg
);
420 ins
->type
= STACK_PTR
;
422 } else if (!strcmp (cmethod
->name
, "AsRef")) {
424 g_assert (ctx
->method_inst
);
425 g_assert (ctx
->method_inst
->type_argc
== 1);
426 g_assert (fsig
->param_count
== 1);
428 dreg
= alloc_preg (cfg
);
429 EMIT_NEW_UNALU (cfg
, ins
, OP_MOVE
, dreg
, args
[0]->dreg
);
430 ins
->type
= STACK_OBJ
;
431 ins
->klass
= mono_get_object_class ();
433 } else if (!strcmp (cmethod
->name
, "AreSame")) {
435 g_assert (ctx
->method_inst
);
436 g_assert (ctx
->method_inst
->type_argc
== 1);
437 g_assert (fsig
->param_count
== 2);
439 dreg
= alloc_ireg (cfg
);
440 EMIT_NEW_BIALU (cfg
, ins
, OP_COMPARE
, -1, args
[0]->dreg
, args
[1]->dreg
);
441 EMIT_NEW_UNALU (cfg
, ins
, OP_PCEQ
, dreg
, -1);
443 } else if (!strcmp (cmethod
->name
, "IsAddressLessThan")) {
445 g_assert (ctx
->method_inst
);
446 g_assert (ctx
->method_inst
->type_argc
== 1);
447 g_assert (fsig
->param_count
== 2);
449 dreg
= alloc_ireg (cfg
);
450 EMIT_NEW_BIALU (cfg
, ins
, OP_COMPARE
, -1, args
[0]->dreg
, args
[1]->dreg
);
451 EMIT_NEW_UNALU (cfg
, ins
, OP_PCLT_UN
, dreg
, -1);
453 } else if (!strcmp (cmethod
->name
, "IsAddressGreaterThan")) {
455 g_assert (ctx
->method_inst
);
456 g_assert (ctx
->method_inst
->type_argc
== 1);
457 g_assert (fsig
->param_count
== 2);
459 dreg
= alloc_ireg (cfg
);
460 EMIT_NEW_BIALU (cfg
, ins
, OP_COMPARE
, -1, args
[0]->dreg
, args
[1]->dreg
);
461 EMIT_NEW_UNALU (cfg
, ins
, OP_PCGT_UN
, dreg
, -1);
463 } else if (!strcmp (cmethod
->name
, "Add")) {
465 g_assert (ctx
->method_inst
);
466 g_assert (ctx
->method_inst
->type_argc
== 1);
467 g_assert (fsig
->param_count
== 2);
469 int mul_reg
= alloc_preg (cfg
);
471 t
= ctx
->method_inst
->type_argv
[0];
473 if (mini_is_gsharedvt_variable_type (t
)) {
474 esize_ins
= mini_emit_get_gsharedvt_info_klass (cfg
, mono_class_from_mono_type_internal (t
), MONO_RGCTX_INFO_CLASS_SIZEOF
);
475 if (SIZEOF_REGISTER
== 8)
476 MONO_EMIT_NEW_UNALU (cfg
, OP_SEXT_I4
, esize_ins
->dreg
, esize_ins
->dreg
);
478 t
= mini_type_get_underlying_type (t
);
479 int esize
= mono_class_array_element_size (mono_class_from_mono_type_internal (t
));
480 EMIT_NEW_ICONST (cfg
, esize_ins
, esize
);
482 esize_ins
->type
= STACK_I4
;
484 EMIT_NEW_BIALU (cfg
, ins
, OP_PMUL
, mul_reg
, args
[1]->dreg
, esize_ins
->dreg
);
485 ins
->type
= STACK_PTR
;
487 dreg
= alloc_preg (cfg
);
488 EMIT_NEW_BIALU (cfg
, ins
, OP_PADD
, dreg
, args
[0]->dreg
, mul_reg
);
489 ins
->type
= STACK_PTR
;
491 } else if (!strcmp (cmethod
->name
, "AddByteOffset")) {
493 g_assert (ctx
->method_inst
);
494 g_assert (ctx
->method_inst
->type_argc
== 1);
495 g_assert (fsig
->param_count
== 2);
497 if (fsig
->params
[1]->type
== MONO_TYPE_I
) {
498 int dreg
= alloc_preg (cfg
);
499 EMIT_NEW_BIALU (cfg
, ins
, OP_PADD
, dreg
, args
[0]->dreg
, args
[1]->dreg
);
500 ins
->type
= STACK_PTR
;
502 } else if (fsig
->params
[1]->type
== MONO_TYPE_U8
) {
503 int sreg
= args
[1]->dreg
;
504 if (SIZEOF_REGISTER
== 4) {
505 sreg
= alloc_ireg (cfg
);
506 EMIT_NEW_UNALU (cfg
, ins
, OP_LCONV_TO_U4
, sreg
, args
[1]->dreg
);
508 int dreg
= alloc_preg (cfg
);
509 EMIT_NEW_BIALU (cfg
, ins
, OP_PADD
, dreg
, args
[0]->dreg
, sreg
);
510 ins
->type
= STACK_PTR
;
513 } else if (!strcmp (cmethod
->name
, "SizeOf")) {
515 g_assert (ctx
->method_inst
);
516 g_assert (ctx
->method_inst
->type_argc
== 1);
517 g_assert (fsig
->param_count
== 0);
519 t
= ctx
->method_inst
->type_argv
[0];
520 if (mini_is_gsharedvt_variable_type (t
)) {
521 ins
= mini_emit_get_gsharedvt_info_klass (cfg
, mono_class_from_mono_type_internal (t
), MONO_RGCTX_INFO_CLASS_SIZEOF
);
523 int esize
= mono_type_size (t
, &align
);
524 EMIT_NEW_ICONST (cfg
, ins
, esize
);
526 ins
->type
= STACK_I4
;
528 } else if (!strcmp (cmethod
->name
, "ReadUnaligned")) {
530 g_assert (ctx
->method_inst
);
531 g_assert (ctx
->method_inst
->type_argc
== 1);
532 g_assert (fsig
->param_count
== 1);
534 t
= ctx
->method_inst
->type_argv
[0];
535 t
= mini_get_underlying_type (t
);
536 if (MONO_TYPE_IS_PRIMITIVE (t
) && t
->type
!= MONO_TYPE_R4
&& t
->type
!= MONO_TYPE_R8
) {
537 dreg
= alloc_ireg (cfg
);
538 EMIT_NEW_LOAD_MEMBASE_TYPE (cfg
, ins
, t
, args
[0]->dreg
, 0);
539 ins
->type
= STACK_I4
;
540 ins
->flags
|= MONO_INST_UNALIGNED
;
543 } else if (!strcmp (cmethod
->name
, "WriteUnaligned")) {
545 g_assert (ctx
->method_inst
);
546 g_assert (ctx
->method_inst
->type_argc
== 1);
547 g_assert (fsig
->param_count
== 2);
549 t
= ctx
->method_inst
->type_argv
[0];
550 t
= mini_get_underlying_type (t
);
551 if (MONO_TYPE_IS_PRIMITIVE (t
) && t
->type
!= MONO_TYPE_R4
&& t
->type
!= MONO_TYPE_R8
) {
552 dreg
= alloc_ireg (cfg
);
553 EMIT_NEW_STORE_MEMBASE_TYPE (cfg
, ins
, t
, args
[0]->dreg
, 0, args
[1]->dreg
);
554 ins
->flags
|= MONO_INST_UNALIGNED
;
557 } else if (!strcmp (cmethod
->name
, "ByteOffset")) {
559 g_assert (ctx
->method_inst
);
560 g_assert (ctx
->method_inst
->type_argc
== 1);
561 g_assert (fsig
->param_count
== 2);
563 int dreg
= alloc_preg (cfg
);
564 EMIT_NEW_BIALU (cfg
, ins
, OP_PSUB
, dreg
, args
[1]->dreg
, args
[0]->dreg
);
565 ins
->type
= STACK_PTR
;
568 #ifdef ENABLE_NETCORE
569 else if (!strcmp (cmethod
->name
, "InitBlockUnaligned")) {
570 g_assert (fsig
->param_count
== 3);
572 mini_emit_memory_init_bytes (cfg
, args
[0], args
[1], args
[2], MONO_INST_UNALIGNED
);
573 MONO_INST_NEW (cfg
, ins
, OP_NOP
);
574 MONO_ADD_INS (cfg
->cbb
, ins
);
583 emit_jit_helpers_intrinsics (MonoCompile
*cfg
, MonoMethod
*cmethod
, MonoMethodSignature
*fsig
, MonoInst
**args
)
587 MonoGenericContext
*ctx
= mono_method_get_context (cmethod
);
590 if (!strcmp (cmethod
->name
, "EnumEquals") || !strcmp (cmethod
->name
, "EnumCompareTo")) {
592 g_assert (ctx
->method_inst
);
593 g_assert (ctx
->method_inst
->type_argc
== 1);
594 g_assert (fsig
->param_count
== 2);
596 t
= ctx
->method_inst
->type_argv
[0];
597 t
= mini_get_underlying_type (t
);
598 if (mini_is_gsharedvt_variable_type (t
))
601 gboolean is_i8
= (t
->type
== MONO_TYPE_I8
|| t
->type
== MONO_TYPE_U8
);
602 gboolean is_unsigned
= (t
->type
== MONO_TYPE_U1
|| t
->type
== MONO_TYPE_U2
|| t
->type
== MONO_TYPE_U4
|| t
->type
== MONO_TYPE_U8
|| t
->type
== MONO_TYPE_U
);
603 int cmp_op
, ceq_op
, cgt_op
, clt_op
;
606 cmp_op
= OP_LCOMPARE
;
608 cgt_op
= is_unsigned
? OP_LCGT_UN
: OP_LCGT
;
609 clt_op
= is_unsigned
? OP_LCLT_UN
: OP_LCLT
;
611 cmp_op
= OP_ICOMPARE
;
613 cgt_op
= is_unsigned
? OP_ICGT_UN
: OP_ICGT
;
614 clt_op
= is_unsigned
? OP_ICLT_UN
: OP_ICLT
;
617 if (!strcmp (cmethod
->name
, "EnumEquals")) {
618 dreg
= alloc_ireg (cfg
);
619 EMIT_NEW_BIALU (cfg
, ins
, cmp_op
, -1, args
[0]->dreg
, args
[1]->dreg
);
620 EMIT_NEW_UNALU (cfg
, ins
, ceq_op
, dreg
, -1);
622 // Use the branchless code (a > b) - (a < b)
625 reg1
= alloc_ireg (cfg
);
626 reg2
= alloc_ireg (cfg
);
627 dreg
= alloc_ireg (cfg
);
629 EMIT_NEW_BIALU (cfg
, ins
, cmp_op
, -1, args
[0]->dreg
, args
[1]->dreg
);
630 EMIT_NEW_UNALU (cfg
, ins
, cgt_op
, reg1
, -1);
631 EMIT_NEW_BIALU (cfg
, ins
, cmp_op
, -1, args
[0]->dreg
, args
[1]->dreg
);
632 EMIT_NEW_UNALU (cfg
, ins
, clt_op
, reg2
, -1);
633 EMIT_NEW_BIALU (cfg
, ins
, OP_ISUB
, dreg
, reg1
, reg2
);
642 mini_emit_inst_for_method (MonoCompile
*cfg
, MonoMethod
*cmethod
, MonoMethodSignature
*fsig
, MonoInst
**args
)
644 MonoInst
*ins
= NULL
;
645 MonoClass
*runtime_helpers_class
= mono_class_get_runtime_helpers_class ();
647 const char* cmethod_klass_name_space
= m_class_get_name_space (cmethod
->klass
);
648 const char* cmethod_klass_name
= m_class_get_name (cmethod
->klass
);
649 MonoImage
*cmethod_klass_image
= m_class_get_image (cmethod
->klass
);
650 gboolean in_corlib
= cmethod_klass_image
== mono_defaults
.corlib
;
652 /* Required intrinsics are always used even with -O=-intrins */
654 !strcmp (cmethod_klass_name_space
, "System") &&
655 !strcmp (cmethod_klass_name
, "ByReference`1") &&
656 !strcmp (cmethod
->name
, "get_Value")) {
657 g_assert (fsig
->hasthis
&& fsig
->param_count
== 0);
658 int dreg
= alloc_preg (cfg
);
659 EMIT_NEW_LOAD_MEMBASE (cfg
, ins
, OP_LOAD_MEMBASE
, dreg
, args
[0]->dreg
, 0);
661 } else if (in_corlib
&& cmethod
->klass
== mono_defaults
.object_class
) {
662 if (!strcmp (cmethod
->name
, "GetRawData")) {
663 int dreg
= alloc_preg (cfg
);
664 EMIT_NEW_BIALU_IMM (cfg
, ins
, OP_PADD_IMM
, dreg
, args
[0]->dreg
, MONO_ABI_SIZEOF (MonoObject
));
669 if (!(cfg
->opt
& MONO_OPT_INTRINS
))
672 if (cmethod
->klass
== mono_defaults
.string_class
) {
673 if (strcmp (cmethod
->name
, "get_Chars") == 0 && fsig
->param_count
+ fsig
->hasthis
== 2) {
674 int dreg
= alloc_ireg (cfg
);
675 int index_reg
= alloc_preg (cfg
);
676 int add_reg
= alloc_preg (cfg
);
678 #if SIZEOF_REGISTER == 8
679 if (COMPILE_LLVM (cfg
)) {
680 MONO_EMIT_NEW_UNALU (cfg
, OP_ZEXT_I4
, index_reg
, args
[1]->dreg
);
682 /* The array reg is 64 bits but the index reg is only 32 */
683 MONO_EMIT_NEW_UNALU (cfg
, OP_SEXT_I4
, index_reg
, args
[1]->dreg
);
686 index_reg
= args
[1]->dreg
;
688 MONO_EMIT_BOUNDS_CHECK (cfg
, args
[0]->dreg
, MonoString
, length
, index_reg
);
690 #if defined(TARGET_X86) || defined(TARGET_AMD64)
691 EMIT_NEW_X86_LEA (cfg
, ins
, args
[0]->dreg
, index_reg
, 1, MONO_STRUCT_OFFSET (MonoString
, chars
));
693 EMIT_NEW_LOAD_MEMBASE (cfg
, ins
, OP_LOADU2_MEMBASE
, dreg
,
696 int mult_reg
= alloc_preg (cfg
);
697 MONO_EMIT_NEW_BIALU_IMM (cfg
, OP_SHL_IMM
, mult_reg
, index_reg
, 1);
698 MONO_EMIT_NEW_BIALU (cfg
, OP_PADD
, add_reg
, mult_reg
, args
[0]->dreg
);
699 EMIT_NEW_LOAD_MEMBASE (cfg
, ins
, OP_LOADU2_MEMBASE
, dreg
,
700 add_reg
, MONO_STRUCT_OFFSET (MonoString
, chars
));
702 mini_type_from_op (cfg
, ins
, NULL
, NULL
);
704 } else if (strcmp (cmethod
->name
, "get_Length") == 0 && fsig
->param_count
+ fsig
->hasthis
== 1) {
705 int dreg
= alloc_ireg (cfg
);
706 /* Decompose later to allow more optimizations */
707 EMIT_NEW_UNALU (cfg
, ins
, OP_STRLEN
, dreg
, args
[0]->dreg
);
708 ins
->type
= STACK_I4
;
709 ins
->flags
|= MONO_INST_FAULT
;
710 cfg
->cbb
->needs_decompose
= TRUE
;
711 cfg
->flags
|= MONO_CFG_NEEDS_DECOMPOSE
;
716 } else if (cmethod
->klass
== mono_defaults
.object_class
) {
717 if (strcmp (cmethod
->name
, "GetType") == 0 && fsig
->param_count
+ fsig
->hasthis
== 1) {
718 int dreg
= alloc_ireg_ref (cfg
);
719 int vt_reg
= alloc_preg (cfg
);
720 MONO_EMIT_NEW_LOAD_MEMBASE_FAULT (cfg
, vt_reg
, args
[0]->dreg
, MONO_STRUCT_OFFSET (MonoObject
, vtable
));
721 EMIT_NEW_LOAD_MEMBASE (cfg
, ins
, OP_LOAD_MEMBASE
, dreg
, vt_reg
, MONO_STRUCT_OFFSET (MonoVTable
, type
));
722 mini_type_from_op (cfg
, ins
, NULL
, NULL
);
725 } else if (!cfg
->backend
->emulate_mul_div
&& strcmp (cmethod
->name
, "InternalGetHashCode") == 0 && fsig
->param_count
== 1 && !mono_gc_is_moving ()) {
726 int dreg
= alloc_ireg (cfg
);
727 int t1
= alloc_ireg (cfg
);
729 MONO_EMIT_NEW_BIALU_IMM (cfg
, OP_SHL_IMM
, t1
, args
[0]->dreg
, 3);
730 EMIT_NEW_BIALU_IMM (cfg
, ins
, OP_MUL_IMM
, dreg
, t1
, 2654435761u);
731 ins
->type
= STACK_I4
;
734 } else if (strcmp (cmethod
->name
, ".ctor") == 0 && fsig
->param_count
== 0) {
735 MONO_INST_NEW (cfg
, ins
, OP_NOP
);
736 MONO_ADD_INS (cfg
->cbb
, ins
);
740 } else if (cmethod
->klass
== mono_defaults
.array_class
) {
741 if (strcmp (cmethod
->name
, "GetGenericValueImpl") == 0 && fsig
->param_count
+ fsig
->hasthis
== 3 && !cfg
->gsharedvt
)
742 return emit_array_generic_access (cfg
, fsig
, args
, FALSE
);
743 else if (strcmp (cmethod
->name
, "SetGenericValueImpl") == 0 && fsig
->param_count
+ fsig
->hasthis
== 3 && !cfg
->gsharedvt
)
744 return emit_array_generic_access (cfg
, fsig
, args
, TRUE
);
745 else if (!strcmp (cmethod
->name
, "GetRawSzArrayData")) {
746 int dreg
= alloc_preg (cfg
);
747 EMIT_NEW_BIALU_IMM (cfg
, ins
, OP_PADD_IMM
, dreg
, args
[0]->dreg
, MONO_STRUCT_OFFSET (MonoArray
, vector
));
751 #ifndef MONO_BIG_ARRAYS
753 * This is an inline version of GetLength/GetLowerBound(0) used frequently in
756 else if (((strcmp (cmethod
->name
, "GetLength") == 0 && fsig
->param_count
+ fsig
->hasthis
== 2) ||
757 (strcmp (cmethod
->name
, "GetLowerBound") == 0 && fsig
->param_count
+ fsig
->hasthis
== 2)) &&
758 args
[1]->opcode
== OP_ICONST
&& args
[1]->inst_c0
== 0) {
759 int dreg
= alloc_ireg (cfg
);
760 int bounds_reg
= alloc_ireg_mp (cfg
);
761 MonoBasicBlock
*end_bb
, *szarray_bb
;
762 gboolean get_length
= strcmp (cmethod
->name
, "GetLength") == 0;
764 NEW_BBLOCK (cfg
, end_bb
);
765 NEW_BBLOCK (cfg
, szarray_bb
);
767 EMIT_NEW_LOAD_MEMBASE_FAULT (cfg
, ins
, OP_LOAD_MEMBASE
, bounds_reg
,
768 args
[0]->dreg
, MONO_STRUCT_OFFSET (MonoArray
, bounds
));
769 MONO_EMIT_NEW_BIALU_IMM (cfg
, OP_COMPARE_IMM
, -1, bounds_reg
, 0);
770 MONO_EMIT_NEW_BRANCH_BLOCK (cfg
, OP_IBEQ
, szarray_bb
);
771 /* Non-szarray case */
773 EMIT_NEW_LOAD_MEMBASE (cfg
, ins
, OP_LOADI4_MEMBASE
, dreg
,
774 bounds_reg
, MONO_STRUCT_OFFSET (MonoArrayBounds
, length
));
776 EMIT_NEW_LOAD_MEMBASE (cfg
, ins
, OP_LOADI4_MEMBASE
, dreg
,
777 bounds_reg
, MONO_STRUCT_OFFSET (MonoArrayBounds
, lower_bound
));
778 MONO_EMIT_NEW_BRANCH_BLOCK (cfg
, OP_BR
, end_bb
);
779 MONO_START_BB (cfg
, szarray_bb
);
782 EMIT_NEW_LOAD_MEMBASE (cfg
, ins
, OP_LOADI4_MEMBASE
, dreg
,
783 args
[0]->dreg
, MONO_STRUCT_OFFSET (MonoArray
, max_length
));
785 MONO_EMIT_NEW_ICONST (cfg
, dreg
, 0);
786 MONO_START_BB (cfg
, end_bb
);
788 EMIT_NEW_UNALU (cfg
, ins
, OP_MOVE
, dreg
, dreg
);
789 ins
->type
= STACK_I4
;
795 if (cmethod
->name
[0] != 'g')
798 if (strcmp (cmethod
->name
, "get_Rank") == 0 && fsig
->param_count
+ fsig
->hasthis
== 1) {
799 int dreg
= alloc_ireg (cfg
);
800 int vtable_reg
= alloc_preg (cfg
);
801 MONO_EMIT_NEW_LOAD_MEMBASE_OP_FAULT (cfg
, OP_LOAD_MEMBASE
, vtable_reg
,
802 args
[0]->dreg
, MONO_STRUCT_OFFSET (MonoObject
, vtable
));
803 EMIT_NEW_LOAD_MEMBASE (cfg
, ins
, OP_LOADU1_MEMBASE
, dreg
,
804 vtable_reg
, MONO_STRUCT_OFFSET (MonoVTable
, rank
));
805 mini_type_from_op (cfg
, ins
, NULL
, NULL
);
808 } else if (strcmp (cmethod
->name
, "get_Length") == 0 && fsig
->param_count
+ fsig
->hasthis
== 1) {
809 int dreg
= alloc_ireg (cfg
);
811 EMIT_NEW_LOAD_MEMBASE_FAULT (cfg
, ins
, OP_LOADI4_MEMBASE
, dreg
,
812 args
[0]->dreg
, MONO_STRUCT_OFFSET (MonoArray
, max_length
));
813 mini_type_from_op (cfg
, ins
, NULL
, NULL
);
818 } else if (cmethod
->klass
== runtime_helpers_class
) {
819 if (strcmp (cmethod
->name
, "get_OffsetToStringData") == 0 && fsig
->param_count
== 0) {
820 EMIT_NEW_ICONST (cfg
, ins
, MONO_STRUCT_OFFSET (MonoString
, chars
));
822 } else if (strcmp (cmethod
->name
, "IsReferenceOrContainsReferences") == 0 && fsig
->param_count
== 0) {
823 MonoGenericContext
*ctx
= mono_method_get_context (cmethod
);
825 g_assert (ctx
->method_inst
);
826 g_assert (ctx
->method_inst
->type_argc
== 1);
827 MonoType
*arg_type
= ctx
->method_inst
->type_argv
[0];
833 /* Resolve the argument class as possible so we can handle common cases fast */
834 t
= mini_get_underlying_type (arg_type
);
835 klass
= mono_class_from_mono_type_internal (t
);
836 mono_class_init_internal (klass
);
837 if (MONO_TYPE_IS_REFERENCE (t
))
838 EMIT_NEW_ICONST (cfg
, ins
, 1);
839 else if (MONO_TYPE_IS_PRIMITIVE (t
))
840 EMIT_NEW_ICONST (cfg
, ins
, 0);
841 else if (cfg
->gshared
&& (t
->type
== MONO_TYPE_VAR
|| t
->type
== MONO_TYPE_MVAR
) && !mini_type_var_is_vt (t
))
842 EMIT_NEW_ICONST (cfg
, ins
, 1);
843 else if (!cfg
->gshared
|| !mini_class_check_context_used (cfg
, klass
))
844 EMIT_NEW_ICONST (cfg
, ins
, m_class_has_references (klass
) ? 1 : 0);
846 g_assert (cfg
->gshared
);
848 /* Have to use the original argument class here */
849 MonoClass
*arg_class
= mono_class_from_mono_type_internal (arg_type
);
850 int context_used
= mini_class_check_context_used (cfg
, arg_class
);
852 /* This returns 1 or 2 */
853 MonoInst
*info
= mini_emit_get_rgctx_klass (cfg
, context_used
, arg_class
, MONO_RGCTX_INFO_CLASS_IS_REF_OR_CONTAINS_REFS
);
854 int dreg
= alloc_ireg (cfg
);
855 EMIT_NEW_BIALU_IMM (cfg
, ins
, OP_ISUB_IMM
, dreg
, info
->dreg
, 1);
859 } else if (strcmp (cmethod
->name
, "IsBitwiseEquatable") == 0 && fsig
->param_count
== 0) {
860 MonoGenericContext
*ctx
= mono_method_get_context (cmethod
);
862 g_assert (ctx
->method_inst
);
863 g_assert (ctx
->method_inst
->type_argc
== 1);
864 MonoType
*arg_type
= ctx
->method_inst
->type_argv
[0];
868 /* Resolve the argument class as possible so we can handle common cases fast */
869 t
= mini_get_underlying_type (arg_type
);
871 if (MONO_TYPE_IS_PRIMITIVE (t
) && t
->type
!= MONO_TYPE_R4
&& t
->type
!= MONO_TYPE_R8
)
872 EMIT_NEW_ICONST (cfg
, ins
, 1);
874 EMIT_NEW_ICONST (cfg
, ins
, 0);
876 } else if (!strcmp (cmethod
->name
, "ObjectHasComponentSize")) {
877 g_assert (fsig
->param_count
== 1);
878 g_assert (fsig
->params
[0]->type
== MONO_TYPE_OBJECT
);
879 // Return true for arrays and string
882 dreg
= alloc_ireg (cfg
);
884 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg
, OP_LOAD_MEMBASE
, dreg
, args
[0]->dreg
, MONO_STRUCT_OFFSET (MonoObject
, vtable
));
885 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg
, OP_LOADU1_MEMBASE
, dreg
, dreg
, MONO_STRUCT_OFFSET (MonoVTable
, flags
));
886 MONO_EMIT_NEW_BIALU_IMM (cfg
, OP_IAND_IMM
, dreg
, dreg
, MONO_VT_FLAG_ARRAY_OR_STRING
);
887 EMIT_NEW_BIALU_IMM (cfg
, ins
, OP_COMPARE_IMM
, -1, dreg
, 0);
888 EMIT_NEW_UNALU (cfg
, ins
, OP_ICGT
, dreg
, -1);
889 ins
->type
= STACK_I4
;
893 } else if (cmethod
->klass
== mono_defaults
.monitor_class
) {
894 gboolean is_enter
= FALSE
;
895 gboolean is_v4
= FALSE
;
897 if (!strcmp (cmethod
->name
, "Enter") && fsig
->param_count
== 2 && fsig
->params
[1]->byref
) {
901 if (!strcmp (cmethod
->name
, "Enter") && fsig
->param_count
== 1)
906 * To make async stack traces work, icalls which can block should have a wrapper.
907 * For Monitor.Enter, emit two calls: a fastpath which doesn't have a wrapper, and a slowpath, which does.
909 MonoBasicBlock
*end_bb
;
911 NEW_BBLOCK (cfg
, end_bb
);
914 ins
= mono_emit_jit_icall (cfg
, mono_monitor_enter_v4_fast
, args
);
916 ins
= mono_emit_jit_icall (cfg
, mono_monitor_enter_fast
, args
);
918 MONO_EMIT_NEW_BIALU_IMM (cfg
, OP_ICOMPARE_IMM
, -1, ins
->dreg
, 0);
919 MONO_EMIT_NEW_BRANCH_BLOCK (cfg
, OP_IBNE_UN
, end_bb
);
922 ins
= mono_emit_jit_icall (cfg
, mono_monitor_enter_v4_internal
, args
);
924 ins
= mono_emit_jit_icall (cfg
, mono_monitor_enter_internal
, args
);
926 MONO_START_BB (cfg
, end_bb
);
929 } else if (cmethod
->klass
== mono_defaults
.thread_class
) {
930 if (strcmp (cmethod
->name
, "SpinWait_nop") == 0 && fsig
->param_count
== 0) {
931 MONO_INST_NEW (cfg
, ins
, OP_RELAXED_NOP
);
932 MONO_ADD_INS (cfg
->cbb
, ins
);
934 } else if (strcmp (cmethod
->name
, "MemoryBarrier") == 0 && fsig
->param_count
== 0) {
935 return mini_emit_memory_barrier (cfg
, MONO_MEMORY_BARRIER_SEQ
);
936 } else if (!strcmp (cmethod
->name
, "VolatileRead") && fsig
->param_count
== 1) {
938 gboolean is_ref
= mini_type_is_reference (fsig
->params
[0]);
940 if (fsig
->params
[0]->type
== MONO_TYPE_I1
)
941 opcode
= OP_LOADI1_MEMBASE
;
942 else if (fsig
->params
[0]->type
== MONO_TYPE_U1
)
943 opcode
= OP_LOADU1_MEMBASE
;
944 else if (fsig
->params
[0]->type
== MONO_TYPE_I2
)
945 opcode
= OP_LOADI2_MEMBASE
;
946 else if (fsig
->params
[0]->type
== MONO_TYPE_U2
)
947 opcode
= OP_LOADU2_MEMBASE
;
948 else if (fsig
->params
[0]->type
== MONO_TYPE_I4
)
949 opcode
= OP_LOADI4_MEMBASE
;
950 else if (fsig
->params
[0]->type
== MONO_TYPE_U4
)
951 opcode
= OP_LOADU4_MEMBASE
;
952 else if (fsig
->params
[0]->type
== MONO_TYPE_I8
|| fsig
->params
[0]->type
== MONO_TYPE_U8
)
953 opcode
= OP_LOADI8_MEMBASE
;
954 else if (fsig
->params
[0]->type
== MONO_TYPE_R4
)
955 opcode
= OP_LOADR4_MEMBASE
;
956 else if (fsig
->params
[0]->type
== MONO_TYPE_R8
)
957 opcode
= OP_LOADR8_MEMBASE
;
958 else if (is_ref
|| fsig
->params
[0]->type
== MONO_TYPE_I
|| fsig
->params
[0]->type
== MONO_TYPE_U
)
959 opcode
= OP_LOAD_MEMBASE
;
962 MONO_INST_NEW (cfg
, ins
, opcode
);
963 ins
->inst_basereg
= args
[0]->dreg
;
964 ins
->inst_offset
= 0;
965 MONO_ADD_INS (cfg
->cbb
, ins
);
967 switch (fsig
->params
[0]->type
) {
974 ins
->dreg
= mono_alloc_ireg (cfg
);
975 ins
->type
= STACK_I4
;
979 ins
->dreg
= mono_alloc_lreg (cfg
);
980 ins
->type
= STACK_I8
;
984 ins
->dreg
= mono_alloc_ireg (cfg
);
985 #if SIZEOF_REGISTER == 8
986 ins
->type
= STACK_I8
;
988 ins
->type
= STACK_I4
;
993 ins
->dreg
= mono_alloc_freg (cfg
);
994 ins
->type
= STACK_R8
;
997 g_assert (mini_type_is_reference (fsig
->params
[0]));
998 ins
->dreg
= mono_alloc_ireg_ref (cfg
);
999 ins
->type
= STACK_OBJ
;
1003 if (opcode
== OP_LOADI8_MEMBASE
)
1004 ins
= mono_decompose_opcode (cfg
, ins
);
1006 mini_emit_memory_barrier (cfg
, MONO_MEMORY_BARRIER_SEQ
);
1010 } else if (!strcmp (cmethod
->name
, "VolatileWrite") && fsig
->param_count
== 2) {
1012 gboolean is_ref
= mini_type_is_reference (fsig
->params
[0]);
1014 if (fsig
->params
[0]->type
== MONO_TYPE_I1
|| fsig
->params
[0]->type
== MONO_TYPE_U1
)
1015 opcode
= OP_STOREI1_MEMBASE_REG
;
1016 else if (fsig
->params
[0]->type
== MONO_TYPE_I2
|| fsig
->params
[0]->type
== MONO_TYPE_U2
)
1017 opcode
= OP_STOREI2_MEMBASE_REG
;
1018 else if (fsig
->params
[0]->type
== MONO_TYPE_I4
|| fsig
->params
[0]->type
== MONO_TYPE_U4
)
1019 opcode
= OP_STOREI4_MEMBASE_REG
;
1020 else if (fsig
->params
[0]->type
== MONO_TYPE_I8
|| fsig
->params
[0]->type
== MONO_TYPE_U8
)
1021 opcode
= OP_STOREI8_MEMBASE_REG
;
1022 else if (fsig
->params
[0]->type
== MONO_TYPE_R4
)
1023 opcode
= OP_STORER4_MEMBASE_REG
;
1024 else if (fsig
->params
[0]->type
== MONO_TYPE_R8
)
1025 opcode
= OP_STORER8_MEMBASE_REG
;
1026 else if (is_ref
|| fsig
->params
[0]->type
== MONO_TYPE_I
|| fsig
->params
[0]->type
== MONO_TYPE_U
)
1027 opcode
= OP_STORE_MEMBASE_REG
;
1030 mini_emit_memory_barrier (cfg
, MONO_MEMORY_BARRIER_SEQ
);
1032 MONO_INST_NEW (cfg
, ins
, opcode
);
1033 ins
->sreg1
= args
[1]->dreg
;
1034 ins
->inst_destbasereg
= args
[0]->dreg
;
1035 ins
->inst_offset
= 0;
1036 MONO_ADD_INS (cfg
->cbb
, ins
);
1038 if (opcode
== OP_STOREI8_MEMBASE_REG
)
1039 ins
= mono_decompose_opcode (cfg
, ins
);
1044 } else if (in_corlib
&&
1045 (strcmp (cmethod_klass_name_space
, "System.Threading") == 0) &&
1046 (strcmp (cmethod_klass_name
, "Interlocked") == 0)) {
1049 #if SIZEOF_REGISTER == 8
1050 if (!cfg
->llvm_only
&& strcmp (cmethod
->name
, "Read") == 0 && fsig
->param_count
== 1 && (fsig
->params
[0]->type
== MONO_TYPE_I8
)) {
1051 if (!cfg
->llvm_only
&& mono_arch_opcode_supported (OP_ATOMIC_LOAD_I8
)) {
1052 MONO_INST_NEW (cfg
, ins
, OP_ATOMIC_LOAD_I8
);
1053 ins
->dreg
= mono_alloc_preg (cfg
);
1054 ins
->sreg1
= args
[0]->dreg
;
1055 ins
->type
= STACK_I8
;
1056 ins
->backend
.memory_barrier_kind
= MONO_MEMORY_BARRIER_SEQ
;
1057 MONO_ADD_INS (cfg
->cbb
, ins
);
1061 mini_emit_memory_barrier (cfg
, MONO_MEMORY_BARRIER_SEQ
);
1063 /* 64 bit reads are already atomic */
1064 MONO_INST_NEW (cfg
, load_ins
, OP_LOADI8_MEMBASE
);
1065 load_ins
->dreg
= mono_alloc_preg (cfg
);
1066 load_ins
->inst_basereg
= args
[0]->dreg
;
1067 load_ins
->inst_offset
= 0;
1068 load_ins
->type
= STACK_I8
;
1069 MONO_ADD_INS (cfg
->cbb
, load_ins
);
1071 mini_emit_memory_barrier (cfg
, MONO_MEMORY_BARRIER_SEQ
);
1078 if (strcmp (cmethod
->name
, "Increment") == 0 && fsig
->param_count
== 1) {
1079 MonoInst
*ins_iconst
;
1082 if (fsig
->params
[0]->type
== MONO_TYPE_I4
) {
1083 opcode
= OP_ATOMIC_ADD_I4
;
1084 cfg
->has_atomic_add_i4
= TRUE
;
1086 #if SIZEOF_REGISTER == 8
1087 else if (fsig
->params
[0]->type
== MONO_TYPE_I8
)
1088 opcode
= OP_ATOMIC_ADD_I8
;
1091 if (!mono_arch_opcode_supported (opcode
))
1093 MONO_INST_NEW (cfg
, ins_iconst
, OP_ICONST
);
1094 ins_iconst
->inst_c0
= 1;
1095 ins_iconst
->dreg
= mono_alloc_ireg (cfg
);
1096 MONO_ADD_INS (cfg
->cbb
, ins_iconst
);
1098 MONO_INST_NEW (cfg
, ins
, opcode
);
1099 ins
->dreg
= mono_alloc_ireg (cfg
);
1100 ins
->inst_basereg
= args
[0]->dreg
;
1101 ins
->inst_offset
= 0;
1102 ins
->sreg2
= ins_iconst
->dreg
;
1103 ins
->type
= (opcode
== OP_ATOMIC_ADD_I4
) ? STACK_I4
: STACK_I8
;
1104 MONO_ADD_INS (cfg
->cbb
, ins
);
1106 } else if (strcmp (cmethod
->name
, "Decrement") == 0 && fsig
->param_count
== 1) {
1107 MonoInst
*ins_iconst
;
1110 if (fsig
->params
[0]->type
== MONO_TYPE_I4
) {
1111 opcode
= OP_ATOMIC_ADD_I4
;
1112 cfg
->has_atomic_add_i4
= TRUE
;
1114 #if SIZEOF_REGISTER == 8
1115 else if (fsig
->params
[0]->type
== MONO_TYPE_I8
)
1116 opcode
= OP_ATOMIC_ADD_I8
;
1119 if (!mono_arch_opcode_supported (opcode
))
1121 MONO_INST_NEW (cfg
, ins_iconst
, OP_ICONST
);
1122 ins_iconst
->inst_c0
= -1;
1123 ins_iconst
->dreg
= mono_alloc_ireg (cfg
);
1124 MONO_ADD_INS (cfg
->cbb
, ins_iconst
);
1126 MONO_INST_NEW (cfg
, ins
, opcode
);
1127 ins
->dreg
= mono_alloc_ireg (cfg
);
1128 ins
->inst_basereg
= args
[0]->dreg
;
1129 ins
->inst_offset
= 0;
1130 ins
->sreg2
= ins_iconst
->dreg
;
1131 ins
->type
= (opcode
== OP_ATOMIC_ADD_I4
) ? STACK_I4
: STACK_I8
;
1132 MONO_ADD_INS (cfg
->cbb
, ins
);
1134 } else if (strcmp (cmethod
->name
, "Add") == 0 && fsig
->param_count
== 2) {
1137 if (fsig
->params
[0]->type
== MONO_TYPE_I4
) {
1138 opcode
= OP_ATOMIC_ADD_I4
;
1139 cfg
->has_atomic_add_i4
= TRUE
;
1141 #if SIZEOF_REGISTER == 8
1142 else if (fsig
->params
[0]->type
== MONO_TYPE_I8
)
1143 opcode
= OP_ATOMIC_ADD_I8
;
1146 if (!mono_arch_opcode_supported (opcode
))
1148 MONO_INST_NEW (cfg
, ins
, opcode
);
1149 ins
->dreg
= mono_alloc_ireg (cfg
);
1150 ins
->inst_basereg
= args
[0]->dreg
;
1151 ins
->inst_offset
= 0;
1152 ins
->sreg2
= args
[1]->dreg
;
1153 ins
->type
= (opcode
== OP_ATOMIC_ADD_I4
) ? STACK_I4
: STACK_I8
;
1154 MONO_ADD_INS (cfg
->cbb
, ins
);
1157 else if (strcmp (cmethod
->name
, "Exchange") == 0 && fsig
->param_count
== 2) {
1158 MonoInst
*f2i
= NULL
, *i2f
;
1159 guint32 opcode
, f2i_opcode
, i2f_opcode
;
1160 gboolean is_ref
= mini_type_is_reference (fsig
->params
[0]);
1161 gboolean is_float
= fsig
->params
[0]->type
== MONO_TYPE_R4
|| fsig
->params
[0]->type
== MONO_TYPE_R8
;
1163 if (fsig
->params
[0]->type
== MONO_TYPE_I4
||
1164 fsig
->params
[0]->type
== MONO_TYPE_R4
) {
1165 opcode
= OP_ATOMIC_EXCHANGE_I4
;
1166 f2i_opcode
= OP_MOVE_F_TO_I4
;
1167 i2f_opcode
= OP_MOVE_I4_TO_F
;
1168 cfg
->has_atomic_exchange_i4
= TRUE
;
1170 #if SIZEOF_REGISTER == 8
1172 fsig
->params
[0]->type
== MONO_TYPE_I8
||
1173 fsig
->params
[0]->type
== MONO_TYPE_R8
||
1174 fsig
->params
[0]->type
== MONO_TYPE_I
) {
1175 opcode
= OP_ATOMIC_EXCHANGE_I8
;
1176 f2i_opcode
= OP_MOVE_F_TO_I8
;
1177 i2f_opcode
= OP_MOVE_I8_TO_F
;
1180 else if (is_ref
|| fsig
->params
[0]->type
== MONO_TYPE_I
) {
1181 opcode
= OP_ATOMIC_EXCHANGE_I4
;
1182 cfg
->has_atomic_exchange_i4
= TRUE
;
1188 if (!mono_arch_opcode_supported (opcode
))
1192 /* TODO: Decompose these opcodes instead of bailing here. */
1193 if (COMPILE_SOFT_FLOAT (cfg
))
1196 MONO_INST_NEW (cfg
, f2i
, f2i_opcode
);
1197 f2i
->dreg
= mono_alloc_ireg (cfg
);
1198 f2i
->sreg1
= args
[1]->dreg
;
1199 if (f2i_opcode
== OP_MOVE_F_TO_I4
)
1200 f2i
->backend
.spill_var
= mini_get_int_to_float_spill_area (cfg
);
1201 MONO_ADD_INS (cfg
->cbb
, f2i
);
1204 if (is_ref
&& mini_debug_options
.clr_memory_model
)
1205 mini_emit_memory_barrier (cfg
, MONO_MEMORY_BARRIER_REL
);
1207 MONO_INST_NEW (cfg
, ins
, opcode
);
1208 ins
->dreg
= is_ref
? mono_alloc_ireg_ref (cfg
) : mono_alloc_ireg (cfg
);
1209 ins
->inst_basereg
= args
[0]->dreg
;
1210 ins
->inst_offset
= 0;
1211 ins
->sreg2
= is_float
? f2i
->dreg
: args
[1]->dreg
;
1212 MONO_ADD_INS (cfg
->cbb
, ins
);
1214 switch (fsig
->params
[0]->type
) {
1216 ins
->type
= STACK_I4
;
1219 ins
->type
= STACK_I8
;
1222 #if SIZEOF_REGISTER == 8
1223 ins
->type
= STACK_I8
;
1225 ins
->type
= STACK_I4
;
1230 ins
->type
= STACK_R8
;
1233 g_assert (mini_type_is_reference (fsig
->params
[0]));
1234 ins
->type
= STACK_OBJ
;
1239 MONO_INST_NEW (cfg
, i2f
, i2f_opcode
);
1240 i2f
->dreg
= mono_alloc_freg (cfg
);
1241 i2f
->sreg1
= ins
->dreg
;
1242 i2f
->type
= STACK_R8
;
1243 if (i2f_opcode
== OP_MOVE_I4_TO_F
)
1244 i2f
->backend
.spill_var
= mini_get_int_to_float_spill_area (cfg
);
1245 MONO_ADD_INS (cfg
->cbb
, i2f
);
1250 if (cfg
->gen_write_barriers
&& is_ref
)
1251 mini_emit_write_barrier (cfg
, args
[0], args
[1]);
1253 else if ((strcmp (cmethod
->name
, "CompareExchange") == 0) && fsig
->param_count
== 3) {
1254 MonoInst
*f2i_new
= NULL
, *f2i_cmp
= NULL
, *i2f
;
1255 guint32 opcode
, f2i_opcode
, i2f_opcode
;
1256 gboolean is_ref
= mini_type_is_reference (fsig
->params
[1]);
1257 gboolean is_float
= fsig
->params
[1]->type
== MONO_TYPE_R4
|| fsig
->params
[1]->type
== MONO_TYPE_R8
;
1259 if (fsig
->params
[1]->type
== MONO_TYPE_I4
||
1260 fsig
->params
[1]->type
== MONO_TYPE_R4
) {
1261 opcode
= OP_ATOMIC_CAS_I4
;
1262 f2i_opcode
= OP_MOVE_F_TO_I4
;
1263 i2f_opcode
= OP_MOVE_I4_TO_F
;
1264 cfg
->has_atomic_cas_i4
= TRUE
;
1266 #if SIZEOF_REGISTER == 8
1268 fsig
->params
[1]->type
== MONO_TYPE_I8
||
1269 fsig
->params
[1]->type
== MONO_TYPE_R8
||
1270 fsig
->params
[1]->type
== MONO_TYPE_I
) {
1271 opcode
= OP_ATOMIC_CAS_I8
;
1272 f2i_opcode
= OP_MOVE_F_TO_I8
;
1273 i2f_opcode
= OP_MOVE_I8_TO_F
;
1276 else if (is_ref
|| fsig
->params
[1]->type
== MONO_TYPE_I
) {
1277 opcode
= OP_ATOMIC_CAS_I4
;
1278 cfg
->has_atomic_cas_i4
= TRUE
;
1284 if (!mono_arch_opcode_supported (opcode
))
1288 /* TODO: Decompose these opcodes instead of bailing here. */
1289 if (COMPILE_SOFT_FLOAT (cfg
))
1292 MONO_INST_NEW (cfg
, f2i_new
, f2i_opcode
);
1293 f2i_new
->dreg
= mono_alloc_ireg (cfg
);
1294 f2i_new
->sreg1
= args
[1]->dreg
;
1295 if (f2i_opcode
== OP_MOVE_F_TO_I4
)
1296 f2i_new
->backend
.spill_var
= mini_get_int_to_float_spill_area (cfg
);
1297 MONO_ADD_INS (cfg
->cbb
, f2i_new
);
1299 MONO_INST_NEW (cfg
, f2i_cmp
, f2i_opcode
);
1300 f2i_cmp
->dreg
= mono_alloc_ireg (cfg
);
1301 f2i_cmp
->sreg1
= args
[2]->dreg
;
1302 if (f2i_opcode
== OP_MOVE_F_TO_I4
)
1303 f2i_cmp
->backend
.spill_var
= mini_get_int_to_float_spill_area (cfg
);
1304 MONO_ADD_INS (cfg
->cbb
, f2i_cmp
);
1307 if (is_ref
&& mini_debug_options
.clr_memory_model
)
1308 mini_emit_memory_barrier (cfg
, MONO_MEMORY_BARRIER_REL
);
1310 MONO_INST_NEW (cfg
, ins
, opcode
);
1311 ins
->dreg
= is_ref
? alloc_ireg_ref (cfg
) : alloc_ireg (cfg
);
1312 ins
->sreg1
= args
[0]->dreg
;
1313 ins
->sreg2
= is_float
? f2i_new
->dreg
: args
[1]->dreg
;
1314 ins
->sreg3
= is_float
? f2i_cmp
->dreg
: args
[2]->dreg
;
1315 MONO_ADD_INS (cfg
->cbb
, ins
);
1317 switch (fsig
->params
[1]->type
) {
1319 ins
->type
= STACK_I4
;
1322 ins
->type
= STACK_I8
;
1325 #if SIZEOF_REGISTER == 8
1326 ins
->type
= STACK_I8
;
1328 ins
->type
= STACK_I4
;
1332 ins
->type
= cfg
->r4_stack_type
;
1335 ins
->type
= STACK_R8
;
1338 g_assert (mini_type_is_reference (fsig
->params
[1]));
1339 ins
->type
= STACK_OBJ
;
1344 MONO_INST_NEW (cfg
, i2f
, i2f_opcode
);
1345 i2f
->dreg
= mono_alloc_freg (cfg
);
1346 i2f
->sreg1
= ins
->dreg
;
1347 i2f
->type
= STACK_R8
;
1348 if (i2f_opcode
== OP_MOVE_I4_TO_F
)
1349 i2f
->backend
.spill_var
= mini_get_int_to_float_spill_area (cfg
);
1350 MONO_ADD_INS (cfg
->cbb
, i2f
);
1355 if (cfg
->gen_write_barriers
&& is_ref
)
1356 mini_emit_write_barrier (cfg
, args
[0], args
[1]);
1358 else if ((strcmp (cmethod
->name
, "CompareExchange") == 0) && fsig
->param_count
== 4 &&
1359 fsig
->params
[1]->type
== MONO_TYPE_I4
) {
1360 MonoInst
*cmp
, *ceq
;
1362 if (!mono_arch_opcode_supported (OP_ATOMIC_CAS_I4
))
1365 /* int32 r = CAS (location, value, comparand); */
1366 MONO_INST_NEW (cfg
, ins
, OP_ATOMIC_CAS_I4
);
1367 ins
->dreg
= alloc_ireg (cfg
);
1368 ins
->sreg1
= args
[0]->dreg
;
1369 ins
->sreg2
= args
[1]->dreg
;
1370 ins
->sreg3
= args
[2]->dreg
;
1371 ins
->type
= STACK_I4
;
1372 MONO_ADD_INS (cfg
->cbb
, ins
);
1374 /* bool result = r == comparand; */
1375 MONO_INST_NEW (cfg
, cmp
, OP_ICOMPARE
);
1376 cmp
->sreg1
= ins
->dreg
;
1377 cmp
->sreg2
= args
[2]->dreg
;
1378 cmp
->type
= STACK_I4
;
1379 MONO_ADD_INS (cfg
->cbb
, cmp
);
1381 MONO_INST_NEW (cfg
, ceq
, OP_ICEQ
);
1382 ceq
->dreg
= alloc_ireg (cfg
);
1383 ceq
->type
= STACK_I4
;
1384 MONO_ADD_INS (cfg
->cbb
, ceq
);
1386 /* *success = result; */
1387 MONO_EMIT_NEW_STORE_MEMBASE (cfg
, OP_STOREI1_MEMBASE_REG
, args
[3]->dreg
, 0, ceq
->dreg
);
1389 cfg
->has_atomic_cas_i4
= TRUE
;
1391 else if (strcmp (cmethod
->name
, "MemoryBarrier") == 0 && fsig
->param_count
== 0)
1392 ins
= mini_emit_memory_barrier (cfg
, MONO_MEMORY_BARRIER_SEQ
);
1396 } else if (in_corlib
&&
1397 (strcmp (cmethod_klass_name_space
, "System.Threading") == 0) &&
1398 (strcmp (cmethod_klass_name
, "Volatile") == 0)) {
1401 if (!cfg
->llvm_only
&& !strcmp (cmethod
->name
, "Read") && fsig
->param_count
== 1) {
1403 MonoType
*t
= fsig
->params
[0];
1405 gboolean is_float
= t
->type
== MONO_TYPE_R4
|| t
->type
== MONO_TYPE_R8
;
1407 g_assert (t
->byref
);
1408 /* t is a byref type, so the reference check is more complicated */
1409 is_ref
= mini_type_is_reference (m_class_get_byval_arg (mono_class_from_mono_type_internal (t
)));
1410 if (t
->type
== MONO_TYPE_I1
)
1411 opcode
= OP_ATOMIC_LOAD_I1
;
1412 else if (t
->type
== MONO_TYPE_U1
|| t
->type
== MONO_TYPE_BOOLEAN
)
1413 opcode
= OP_ATOMIC_LOAD_U1
;
1414 else if (t
->type
== MONO_TYPE_I2
)
1415 opcode
= OP_ATOMIC_LOAD_I2
;
1416 else if (t
->type
== MONO_TYPE_U2
)
1417 opcode
= OP_ATOMIC_LOAD_U2
;
1418 else if (t
->type
== MONO_TYPE_I4
)
1419 opcode
= OP_ATOMIC_LOAD_I4
;
1420 else if (t
->type
== MONO_TYPE_U4
)
1421 opcode
= OP_ATOMIC_LOAD_U4
;
1422 else if (t
->type
== MONO_TYPE_R4
)
1423 opcode
= OP_ATOMIC_LOAD_R4
;
1424 else if (t
->type
== MONO_TYPE_R8
)
1425 opcode
= OP_ATOMIC_LOAD_R8
;
1426 #if SIZEOF_REGISTER == 8
1427 else if (t
->type
== MONO_TYPE_I8
|| t
->type
== MONO_TYPE_I
)
1428 opcode
= OP_ATOMIC_LOAD_I8
;
1429 else if (is_ref
|| t
->type
== MONO_TYPE_U8
|| t
->type
== MONO_TYPE_U
)
1430 opcode
= OP_ATOMIC_LOAD_U8
;
1432 else if (t
->type
== MONO_TYPE_I
)
1433 opcode
= OP_ATOMIC_LOAD_I4
;
1434 else if (is_ref
|| t
->type
== MONO_TYPE_U
)
1435 opcode
= OP_ATOMIC_LOAD_U4
;
1439 if (!mono_arch_opcode_supported (opcode
))
1442 MONO_INST_NEW (cfg
, ins
, opcode
);
1443 ins
->dreg
= is_ref
? mono_alloc_ireg_ref (cfg
) : (is_float
? mono_alloc_freg (cfg
) : mono_alloc_ireg (cfg
));
1444 ins
->sreg1
= args
[0]->dreg
;
1445 ins
->backend
.memory_barrier_kind
= MONO_MEMORY_BARRIER_ACQ
;
1446 MONO_ADD_INS (cfg
->cbb
, ins
);
1449 case MONO_TYPE_BOOLEAN
:
1456 ins
->type
= STACK_I4
;
1460 ins
->type
= STACK_I8
;
1464 #if SIZEOF_REGISTER == 8
1465 ins
->type
= STACK_I8
;
1467 ins
->type
= STACK_I4
;
1471 ins
->type
= cfg
->r4_stack_type
;
1474 ins
->type
= STACK_R8
;
1478 ins
->type
= STACK_OBJ
;
1484 if (!cfg
->llvm_only
&& !strcmp (cmethod
->name
, "Write") && fsig
->param_count
== 2) {
1486 MonoType
*t
= fsig
->params
[0];
1489 g_assert (t
->byref
);
1490 is_ref
= mini_type_is_reference (m_class_get_byval_arg (mono_class_from_mono_type_internal (t
)));
1491 if (t
->type
== MONO_TYPE_I1
)
1492 opcode
= OP_ATOMIC_STORE_I1
;
1493 else if (t
->type
== MONO_TYPE_U1
|| t
->type
== MONO_TYPE_BOOLEAN
)
1494 opcode
= OP_ATOMIC_STORE_U1
;
1495 else if (t
->type
== MONO_TYPE_I2
)
1496 opcode
= OP_ATOMIC_STORE_I2
;
1497 else if (t
->type
== MONO_TYPE_U2
)
1498 opcode
= OP_ATOMIC_STORE_U2
;
1499 else if (t
->type
== MONO_TYPE_I4
)
1500 opcode
= OP_ATOMIC_STORE_I4
;
1501 else if (t
->type
== MONO_TYPE_U4
)
1502 opcode
= OP_ATOMIC_STORE_U4
;
1503 else if (t
->type
== MONO_TYPE_R4
)
1504 opcode
= OP_ATOMIC_STORE_R4
;
1505 else if (t
->type
== MONO_TYPE_R8
)
1506 opcode
= OP_ATOMIC_STORE_R8
;
1507 #if SIZEOF_REGISTER == 8
1508 else if (t
->type
== MONO_TYPE_I8
|| t
->type
== MONO_TYPE_I
)
1509 opcode
= OP_ATOMIC_STORE_I8
;
1510 else if (is_ref
|| t
->type
== MONO_TYPE_U8
|| t
->type
== MONO_TYPE_U
)
1511 opcode
= OP_ATOMIC_STORE_U8
;
1513 else if (t
->type
== MONO_TYPE_I
)
1514 opcode
= OP_ATOMIC_STORE_I4
;
1515 else if (is_ref
|| t
->type
== MONO_TYPE_U
)
1516 opcode
= OP_ATOMIC_STORE_U4
;
1520 if (!mono_arch_opcode_supported (opcode
))
1523 MONO_INST_NEW (cfg
, ins
, opcode
);
1524 ins
->dreg
= args
[0]->dreg
;
1525 ins
->sreg1
= args
[1]->dreg
;
1526 ins
->backend
.memory_barrier_kind
= MONO_MEMORY_BARRIER_REL
;
1527 MONO_ADD_INS (cfg
->cbb
, ins
);
1529 if (cfg
->gen_write_barriers
&& is_ref
)
1530 mini_emit_write_barrier (cfg
, args
[0], args
[1]);
1536 } else if (in_corlib
&&
1537 (strcmp (cmethod_klass_name_space
, "System.Diagnostics") == 0) &&
1538 (strcmp (cmethod_klass_name
, "Debugger") == 0)) {
1539 if (!strcmp (cmethod
->name
, "Break") && fsig
->param_count
== 0) {
1540 if (mini_should_insert_breakpoint (cfg
->method
)) {
1541 ins
= mono_emit_jit_icall (cfg
, mono_debugger_agent_user_break
, NULL
);
1543 MONO_INST_NEW (cfg
, ins
, OP_NOP
);
1544 MONO_ADD_INS (cfg
->cbb
, ins
);
1548 } else if (in_corlib
&&
1549 (strcmp (cmethod_klass_name_space
, "System") == 0) &&
1550 (strcmp (cmethod_klass_name
, "Environment") == 0)) {
1551 if (!strcmp (cmethod
->name
, "get_IsRunningOnWindows") && fsig
->param_count
== 0) {
1553 EMIT_NEW_ICONST (cfg
, ins
, 1);
1555 EMIT_NEW_ICONST (cfg
, ins
, 0);
1558 } else if (in_corlib
&&
1559 (strcmp (cmethod_klass_name_space
, "System.Reflection") == 0) &&
1560 (strcmp (cmethod_klass_name
, "Assembly") == 0)) {
1561 if (cfg
->llvm_only
&& !strcmp (cmethod
->name
, "GetExecutingAssembly")) {
1562 /* No stack walks are currently available, so implement this as an intrinsic */
1563 MonoInst
*assembly_ins
;
1565 EMIT_NEW_AOTCONST (cfg
, assembly_ins
, MONO_PATCH_INFO_IMAGE
, m_class_get_image (cfg
->method
->klass
));
1566 ins
= mono_emit_jit_icall (cfg
, mono_get_assembly_object
, &assembly_ins
);
1570 // While it is not required per
1571 // https://msdn.microsoft.com/en-us/library/system.reflection.assembly.getcallingassembly(v=vs.110).aspx.
1572 // have GetCallingAssembly be consistent independently of varying optimization.
1573 // This fixes mono/tests/test-inline-call-stack.cs under FullAOT+LLVM.
1574 cfg
->no_inline
|= COMPILE_LLVM (cfg
) && strcmp (cmethod
->name
, "GetCallingAssembly") == 0;
1576 } else if (in_corlib
&&
1577 (strcmp (cmethod_klass_name_space
, "System.Reflection") == 0) &&
1578 (strcmp (cmethod_klass_name
, "MethodBase") == 0)) {
1579 if (cfg
->llvm_only
&& !strcmp (cmethod
->name
, "GetCurrentMethod")) {
1580 /* No stack walks are currently available, so implement this as an intrinsic */
1581 MonoInst
*method_ins
;
1582 MonoMethod
*declaring
= cfg
->method
;
1584 /* This returns the declaring generic method */
1585 if (declaring
->is_inflated
)
1586 declaring
= ((MonoMethodInflated
*)cfg
->method
)->declaring
;
1587 EMIT_NEW_AOTCONST (cfg
, method_ins
, MONO_PATCH_INFO_METHODCONST
, declaring
);
1588 ins
= mono_emit_jit_icall (cfg
, mono_get_method_object
, &method_ins
);
1589 cfg
->no_inline
= TRUE
;
1590 if (cfg
->method
!= cfg
->current_method
)
1591 mini_set_inline_failure (cfg
, "MethodBase:GetCurrentMethod ()");
1594 } else if (cmethod
->klass
== mono_class_try_get_math_class ()) {
1596 * There is general branchless code for Min/Max, but it does not work for
1598 * http://everything2.com/?node_id=1051618
1602 * Constant folding for various Math methods.
1603 * we avoid folding constants that when computed would raise an error, in
1604 * case the user code was expecting to get that error raised
1606 if (fsig
->param_count
== 1 && args
[0]->opcode
== OP_R8CONST
){
1607 double source
= *(double *)args
[0]->inst_p0
;
1609 const char *mname
= cmethod
->name
;
1613 if (strcmp (mname
, "Abs") == 0 && fsig
->params
[0]->type
== MONO_TYPE_R8
) {
1615 } else if (strcmp (mname
, "Asin") == 0){
1616 if (fabs (source
) <= 1)
1618 } else if (strcmp (mname
, "Asinh") == 0){
1620 } else if (strcmp (mname
, "Acos") == 0){
1621 if (fabs (source
) <= 1)
1623 } else if (strcmp (mname
, "Acosh") == 0){
1626 } else if (strcmp (mname
, "Atan") == 0){
1628 } else if (strcmp (mname
, "Atanh") == 0){
1629 if (fabs (source
) < 1)
1632 } else if (c
== 'C'){
1633 if (strcmp (mname
, "Cos") == 0) {
1634 if (!isinf (source
))
1636 } else if (strcmp (mname
, "Cbrt") == 0){
1638 } else if (strcmp (mname
, "Cosh") == 0){
1641 } else if (c
== 'R'){
1642 if (strcmp (mname
, "Round") == 0)
1644 } else if (c
== 'S'){
1645 if (strcmp (mname
, "Sin") == 0) {
1646 if (!isinf (source
))
1648 } else if (strcmp (mname
, "Sqrt") == 0) {
1651 } else if (strcmp (mname
, "Sinh") == 0){
1654 } else if (c
== 'T'){
1655 if (strcmp (mname
, "Tan") == 0){
1656 if (!isinf (source
))
1658 } else if (strcmp (mname
, "Tanh") == 0){
1664 double *dest
= (double *) mono_domain_alloc (cfg
->domain
, sizeof (double));
1666 MONO_INST_NEW (cfg
, ins
, OP_R8CONST
);
1667 ins
->type
= STACK_R8
;
1668 ins
->dreg
= mono_alloc_dreg (cfg
, (MonoStackType
) ins
->type
);
1669 ins
->inst_p0
= dest
;
1673 result
= fabs (source
);
1676 result
= acos (source
);
1679 result
= acosh (source
);
1682 result
= asin (source
);
1685 result
= asinh (source
);
1688 result
= atan (source
);
1691 result
= atanh (source
);
1694 result
= cbrt (source
);
1697 result
= cos (source
);
1700 result
= cosh (source
);
1703 result
= round (source
);
1706 result
= sin (source
);
1709 result
= sinh (source
);
1712 result
= sqrt (source
);
1715 result
= tan (source
);
1718 result
= tanh (source
);
1721 g_error ("invalid opcode %d", (int)opcode
);
1724 MONO_ADD_INS (cfg
->cbb
, ins
);
1725 NULLIFY_INS (args
[0]);
1729 } else if (cmethod
->klass
== mono_defaults
.systemtype_class
&& !strcmp (cmethod
->name
, "op_Equality")) {
1730 EMIT_NEW_BIALU (cfg
, ins
, OP_COMPARE
, -1, args
[0]->dreg
, args
[1]->dreg
);
1731 MONO_INST_NEW (cfg
, ins
, OP_PCEQ
);
1732 ins
->dreg
= alloc_preg (cfg
);
1733 ins
->type
= STACK_I4
;
1734 MONO_ADD_INS (cfg
->cbb
, ins
);
1736 } else if (((!strcmp (cmethod_klass_image
->assembly
->aname
.name
, "MonoMac") ||
1737 !strcmp (cmethod_klass_image
->assembly
->aname
.name
, "monotouch")) &&
1738 !strcmp (cmethod_klass_name_space
, "XamCore.ObjCRuntime") &&
1739 !strcmp (cmethod_klass_name
, "Selector")) ||
1740 ((!strcmp (cmethod_klass_image
->assembly
->aname
.name
, "Xamarin.iOS") ||
1741 !strcmp (cmethod_klass_image
->assembly
->aname
.name
, "Xamarin.Mac")) &&
1742 !strcmp (cmethod_klass_name_space
, "ObjCRuntime") &&
1743 !strcmp (cmethod_klass_name
, "Selector"))
1745 if ((cfg
->backend
->have_objc_get_selector
|| cfg
->compile_llvm
) &&
1746 !strcmp (cmethod
->name
, "GetHandle") && fsig
->param_count
== 1 &&
1747 (args
[0]->opcode
== OP_GOT_ENTRY
|| args
[0]->opcode
== OP_AOTCONST
) &&
1750 MonoJumpInfoToken
*ji
;
1753 if (args
[0]->opcode
== OP_GOT_ENTRY
) {
1754 pi
= (MonoInst
*)args
[0]->inst_p1
;
1755 g_assert (pi
->opcode
== OP_PATCH_INFO
);
1756 g_assert (GPOINTER_TO_INT (pi
->inst_p1
) == MONO_PATCH_INFO_LDSTR
);
1757 ji
= (MonoJumpInfoToken
*)pi
->inst_p0
;
1759 g_assert (GPOINTER_TO_INT (args
[0]->inst_p1
) == MONO_PATCH_INFO_LDSTR
);
1760 ji
= (MonoJumpInfoToken
*)args
[0]->inst_p0
;
1763 NULLIFY_INS (args
[0]);
1765 s
= mono_ldstr_utf8 (ji
->image
, mono_metadata_token_index (ji
->token
), cfg
->error
);
1766 return_val_if_nok (cfg
->error
, NULL
);
1768 MONO_INST_NEW (cfg
, ins
, OP_OBJC_GET_SELECTOR
);
1769 ins
->dreg
= mono_alloc_ireg (cfg
);
1772 MONO_ADD_INS (cfg
->cbb
, ins
);
1775 } else if (in_corlib
&&
1776 (strcmp (cmethod_klass_name_space
, "System.Runtime.InteropServices") == 0) &&
1777 (strcmp (cmethod_klass_name
, "Marshal") == 0)) {
1778 //Convert Marshal.PtrToStructure<T> of blittable T to direct loads
1779 if (strcmp (cmethod
->name
, "PtrToStructure") == 0 &&
1780 cmethod
->is_inflated
&&
1781 fsig
->param_count
== 1 &&
1782 !mini_method_check_context_used (cfg
, cmethod
)) {
1784 MonoGenericContext
*method_context
= mono_method_get_context (cmethod
);
1785 MonoType
*arg0
= method_context
->method_inst
->type_argv
[0];
1786 if (mono_type_is_native_blittable (arg0
))
1787 return mini_emit_memory_load (cfg
, arg0
, args
[0], 0, 0);
1789 } else if (cmethod
->klass
== mono_defaults
.enum_class
&& !strcmp (cmethod
->name
, "HasFlag") &&
1790 args
[0]->opcode
== OP_BOX
&& args
[1]->opcode
== OP_BOX_ICONST
&& args
[0]->klass
== args
[1]->klass
) {
1791 args
[1]->opcode
= OP_ICONST
;
1792 ins
= mini_handle_enum_has_flag (cfg
, args
[0]->klass
, NULL
, args
[0]->sreg1
, args
[1]);
1793 NULLIFY_INS (args
[0]);
1795 } else if (in_corlib
&&
1796 !strcmp (cmethod_klass_name_space
, "System") &&
1797 (!strcmp (cmethod_klass_name
, "Span`1") || !strcmp (cmethod_klass_name
, "ReadOnlySpan`1"))) {
1798 return emit_span_intrinsics (cfg
, cmethod
, fsig
, args
);
1799 } else if (in_corlib
&&
1800 !strcmp (cmethod_klass_name_space
, "Internal.Runtime.CompilerServices") &&
1801 !strcmp (cmethod_klass_name
, "Unsafe")) {
1802 return emit_unsafe_intrinsics (cfg
, cmethod
, fsig
, args
);
1803 } else if (!strcmp (cmethod_klass_name_space
, "System.Runtime.CompilerServices") &&
1804 !strcmp (cmethod_klass_name
, "Unsafe") &&
1805 (in_corlib
|| !strcmp (cmethod_klass_image
->assembly
->aname
.name
, "System.Runtime.CompilerServices.Unsafe"))) {
1806 return emit_unsafe_intrinsics (cfg
, cmethod
, fsig
, args
);
1807 } else if (in_corlib
&&
1808 !strcmp (cmethod_klass_name_space
, "System.Runtime.CompilerServices") &&
1809 !strcmp (cmethod_klass_name
, "JitHelpers")) {
1810 return emit_jit_helpers_intrinsics (cfg
, cmethod
, fsig
, args
);
1813 #ifdef MONO_ARCH_SIMD_INTRINSICS
1814 if (cfg
->opt
& MONO_OPT_SIMD
) {
1815 ins
= mono_emit_simd_intrinsics (cfg
, cmethod
, fsig
, args
);
1821 /* Fallback if SIMD is disabled */
1822 if (in_corlib
&& !strcmp ("System.Numerics", cmethod_klass_name_space
) && !strcmp ("Vector", cmethod_klass_name
)) {
1823 if (!strcmp (cmethod
->name
, "get_IsHardwareAccelerated")) {
1824 EMIT_NEW_ICONST (cfg
, ins
, 0);
1825 ins
->type
= STACK_I4
;
1830 /* Workaround for the compiler server IsMemoryAvailable. */
1831 if (!strcmp ("Microsoft.CodeAnalysis.CompilerServer", cmethod_klass_name_space
) && !strcmp ("MemoryHelper", cmethod_klass_name
)) {
1832 if (!strcmp (cmethod
->name
, "IsMemoryAvailable")) {
1833 EMIT_NEW_ICONST (cfg
, ins
, 1);
1834 ins
->type
= STACK_I4
;
1839 #ifdef ENABLE_NETCORE
1840 // Return false for IsSupported for all types in System.Runtime.Intrinsics.X86
1841 // as we don't support them now
1843 !strcmp ("System.Runtime.Intrinsics.X86", cmethod_klass_name_space
) &&
1844 !strcmp (cmethod
->name
, "get_IsSupported")) {
1845 EMIT_NEW_ICONST (cfg
, ins
, 0);
1846 ins
->type
= STACK_I4
;
1851 ins
= mono_emit_native_types_intrinsics (cfg
, cmethod
, fsig
, args
);
1855 if (COMPILE_LLVM (cfg
)) {
1856 ins
= llvm_emit_inst_for_method (cfg
, cmethod
, fsig
, args
, in_corlib
);
1861 return mono_arch_emit_inst_for_method (cfg
, cmethod
, fsig
, args
);
1866 emit_array_unsafe_access (MonoCompile
*cfg
, MonoMethodSignature
*fsig
, MonoInst
**args
, int is_set
)
1871 eklass
= mono_class_from_mono_type_internal (fsig
->params
[2]);
1873 eklass
= mono_class_from_mono_type_internal (fsig
->ret
);
1876 return mini_emit_array_store (cfg
, eklass
, args
, FALSE
);
1878 MonoInst
*ins
, *addr
= mini_emit_ldelema_1_ins (cfg
, eklass
, args
[0], args
[1], FALSE
);
1879 EMIT_NEW_LOAD_MEMBASE_TYPE (cfg
, ins
, m_class_get_byval_arg (eklass
), addr
->dreg
, 0);
1885 is_unsafe_mov_compatible (MonoCompile
*cfg
, MonoClass
*param_klass
, MonoClass
*return_klass
)
1888 int param_size
, return_size
;
1890 param_klass
= mono_class_from_mono_type_internal (mini_get_underlying_type (m_class_get_byval_arg (param_klass
)));
1891 return_klass
= mono_class_from_mono_type_internal (mini_get_underlying_type (m_class_get_byval_arg (return_klass
)));
1893 if (cfg
->verbose_level
> 3)
1894 printf ("[UNSAFE-MOV-INTRISIC] %s <- %s\n", m_class_get_name (return_klass
), m_class_get_name (param_klass
));
1896 //Don't allow mixing reference types with value types
1897 if (m_class_is_valuetype (param_klass
) != m_class_is_valuetype (return_klass
)) {
1898 if (cfg
->verbose_level
> 3)
1899 printf ("[UNSAFE-MOV-INTRISIC]\tone of the args is a valuetype and the other is not\n");
1903 if (!m_class_is_valuetype (param_klass
)) {
1904 if (cfg
->verbose_level
> 3)
1905 printf ("[UNSAFE-MOV-INTRISIC]\targs are reference types\n");
1910 if (m_class_has_references (param_klass
) || m_class_has_references (return_klass
))
1913 MonoType
*param_type
= m_class_get_byval_arg (param_klass
);
1914 MonoType
*return_type
= m_class_get_byval_arg (return_klass
);
1916 /* Avoid mixing structs and primitive types/enums, they need to be handled differently in the JIT */
1917 if ((MONO_TYPE_ISSTRUCT (param_type
) && !MONO_TYPE_ISSTRUCT (return_type
)) ||
1918 (!MONO_TYPE_ISSTRUCT (param_type
) && MONO_TYPE_ISSTRUCT (return_type
))) {
1919 if (cfg
->verbose_level
> 3)
1920 printf ("[UNSAFE-MOV-INTRISIC]\tmixing structs and scalars\n");
1924 if (param_type
->type
== MONO_TYPE_R4
|| param_type
->type
== MONO_TYPE_R8
||
1925 return_type
->type
== MONO_TYPE_R4
|| return_type
->type
== MONO_TYPE_R8
) {
1926 if (cfg
->verbose_level
> 3)
1927 printf ("[UNSAFE-MOV-INTRISIC]\tfloat or double are not supported\n");
1931 param_size
= mono_class_value_size (param_klass
, &align
);
1932 return_size
= mono_class_value_size (return_klass
, &align
);
1934 //We can do it if sizes match
1935 if (param_size
== return_size
) {
1936 if (cfg
->verbose_level
> 3)
1937 printf ("[UNSAFE-MOV-INTRISIC]\tsame size\n");
1941 //No simple way to handle struct if sizes don't match
1942 if (MONO_TYPE_ISSTRUCT (param_type
)) {
1943 if (cfg
->verbose_level
> 3)
1944 printf ("[UNSAFE-MOV-INTRISIC]\tsize mismatch and type is a struct\n");
1949 * Same reg size category.
1950 * A quick note on why we don't require widening here.
1951 * The intrinsic is "R Array.UnsafeMov<S,R> (S s)".
1953 * Since the source value comes from a function argument, the JIT will already have
1954 * the value in a VREG and performed any widening needed before (say, when loading from a field).
1956 if (param_size
<= 4 && return_size
<= 4) {
1957 if (cfg
->verbose_level
> 3)
1958 printf ("[UNSAFE-MOV-INTRISIC]\tsize mismatch but both are of the same reg class\n");
1966 emit_array_unsafe_mov (MonoCompile
*cfg
, MonoMethodSignature
*fsig
, MonoInst
**args
)
1968 MonoClass
*param_klass
= mono_class_from_mono_type_internal (fsig
->params
[0]);
1969 MonoClass
*return_klass
= mono_class_from_mono_type_internal (fsig
->ret
);
1971 if (mini_is_gsharedvt_variable_type (fsig
->ret
))
1974 //Valuetypes that are semantically equivalent or numbers than can be widened to
1975 if (is_unsafe_mov_compatible (cfg
, param_klass
, return_klass
))
1978 //Arrays of valuetypes that are semantically equivalent
1979 if (m_class_get_rank (param_klass
) == 1 && m_class_get_rank (return_klass
) == 1 && is_unsafe_mov_compatible (cfg
, m_class_get_element_class (param_klass
), m_class_get_element_class (return_klass
)))
1986 mini_emit_inst_for_sharable_method (MonoCompile
*cfg
, MonoMethod
*cmethod
, MonoMethodSignature
*fsig
, MonoInst
**args
)
1988 if (cmethod
->klass
== mono_defaults
.array_class
) {
1989 if (strcmp (cmethod
->name
, "UnsafeStore") == 0)
1990 return emit_array_unsafe_access (cfg
, fsig
, args
, TRUE
);
1991 else if (strcmp (cmethod
->name
, "UnsafeLoad") == 0)
1992 return emit_array_unsafe_access (cfg
, fsig
, args
, FALSE
);
1993 else if (strcmp (cmethod
->name
, "UnsafeMov") == 0)
1994 return emit_array_unsafe_mov (cfg
, fsig
, args
);
2001 mini_emit_inst_for_field_load (MonoCompile
*cfg
, MonoClassField
*field
)
2003 MonoClass
*klass
= field
->parent
;
2004 const char *klass_name_space
= m_class_get_name_space (klass
);
2005 const char *klass_name
= m_class_get_name (klass
);
2006 MonoImage
*klass_image
= m_class_get_image (klass
);
2007 gboolean in_corlib
= klass_image
== mono_defaults
.corlib
;
2011 if (in_corlib
&& !strcmp (klass_name_space
, "System") && !strcmp (klass_name
, "BitConverter") && !strcmp (field
->name
, "IsLittleEndian")) {
2012 is_le
= (TARGET_BYTE_ORDER
== G_LITTLE_ENDIAN
);
2013 EMIT_NEW_ICONST (cfg
, ins
, is_le
);
2016 #ifdef ENABLE_NETCORE
2017 else if ((klass
== mono_defaults
.int_class
|| klass
== mono_defaults
.uint_class
) && strcmp (field
->name
, "Zero") == 0) {
2018 EMIT_NEW_PCONST (cfg
, ins
, 0);
2025 MONO_EMPTY_SOURCE_FILE (intrinsics
);