Fix CounterGroup to use built-in types
[mono-project.git] / mono / mini / intrinsics.c
blob79b2197f1249ce7fc3e4e1b14d16bd92a2ad8641
1 /**
2 * Intrinsics support
3 */
5 #include <config.h>
6 #include <mono/utils/mono-compiler.h>
7 #include <math.h>
9 #ifndef DISABLE_JIT
11 #include "mini.h"
12 #include "mini-runtime.h"
13 #include "ir-emit.h"
14 #include "jit-icalls.h"
15 #include "debugger-agent.h"
17 #include <mono/metadata/abi-details.h>
18 #include <mono/metadata/gc-internals.h>
19 #include <mono/metadata/monitor.h>
20 #include <mono/utils/mono-memory-model.h>
22 static GENERATE_GET_CLASS_WITH_CACHE (runtime_helpers, "System.Runtime.CompilerServices", "RuntimeHelpers")
23 static GENERATE_TRY_GET_CLASS_WITH_CACHE (math, "System", "Math")
25 /* optimize the simple GetGenericValueImpl/SetGenericValueImpl generic icalls */
26 static MonoInst*
27 emit_array_generic_access (MonoCompile *cfg, MonoMethodSignature *fsig, MonoInst **args, int is_set)
29 MonoInst *addr, *store, *load;
30 MonoClass *eklass = mono_class_from_mono_type_internal (fsig->params [2]);
32 /* the bounds check is already done by the callers */
33 addr = mini_emit_ldelema_1_ins (cfg, eklass, args [0], args [1], FALSE);
34 MonoType *etype = m_class_get_byval_arg (eklass);
35 if (is_set) {
36 EMIT_NEW_LOAD_MEMBASE_TYPE (cfg, load, etype, args [2]->dreg, 0);
37 EMIT_NEW_STORE_MEMBASE_TYPE (cfg, store, etype, addr->dreg, 0, load->dreg);
38 if (mini_type_is_reference (etype))
39 mini_emit_write_barrier (cfg, addr, load);
40 } else {
41 EMIT_NEW_LOAD_MEMBASE_TYPE (cfg, load, etype, addr->dreg, 0);
42 EMIT_NEW_STORE_MEMBASE_TYPE (cfg, store, etype, args [2]->dreg, 0, load->dreg);
44 return store;
47 static gboolean
48 mono_type_is_native_blittable (MonoType *t)
50 if (MONO_TYPE_IS_REFERENCE (t))
51 return FALSE;
53 if (MONO_TYPE_IS_PRIMITIVE_SCALAR (t))
54 return TRUE;
56 MonoClass *klass = mono_class_from_mono_type_internal (t);
58 //MonoClass::blitable depends on mono_class_setup_fields being done.
59 mono_class_setup_fields (klass);
60 if (!m_class_is_blittable (klass))
61 return FALSE;
63 // If the native marshal size is different we can't convert PtrToStructure to a type load
64 if (mono_class_native_size (klass, NULL) != mono_class_value_size (klass, NULL))
65 return FALSE;
67 return TRUE;
70 MonoInst*
71 mini_emit_inst_for_ctor (MonoCompile *cfg, MonoMethod *cmethod, MonoMethodSignature *fsig, MonoInst **args)
73 const char* cmethod_klass_name_space = m_class_get_name_space (cmethod->klass);
74 const char* cmethod_klass_name = m_class_get_name (cmethod->klass);
75 MonoImage *cmethod_klass_image = m_class_get_image (cmethod->klass);
76 gboolean in_corlib = cmethod_klass_image == mono_defaults.corlib;
77 MonoInst *ins = NULL;
79 /* Required intrinsics are always used even with -O=-intrins */
80 if (in_corlib &&
81 !strcmp (cmethod_klass_name_space, "System") &&
82 !strcmp (cmethod_klass_name, "ByReference`1")) {
83 /* public ByReference(ref T value) */
84 g_assert (fsig->hasthis && fsig->param_count == 1);
85 EMIT_NEW_STORE_MEMBASE (cfg, ins, OP_STORE_MEMBASE_REG, args [0]->dreg, 0, args [1]->dreg);
86 return ins;
89 ins = mono_emit_native_types_intrinsics (cfg, cmethod, fsig, args);
90 if (ins)
91 return ins;
93 if (!(cfg->opt & MONO_OPT_INTRINS))
94 return NULL;
96 #ifdef MONO_ARCH_SIMD_INTRINSICS
97 if (cfg->opt & MONO_OPT_SIMD) {
98 ins = mono_emit_simd_intrinsics (cfg, cmethod, fsig, args);
99 if (ins)
100 return ins;
102 #endif
104 return NULL;
107 static MonoInst*
108 llvm_emit_inst_for_method (MonoCompile *cfg, MonoMethod *cmethod, MonoMethodSignature *fsig, MonoInst **args, gboolean in_corlib)
110 MonoInst *ins = NULL;
111 int opcode = 0;
113 if (in_corlib && !strcmp (m_class_get_name (cmethod->klass), "MathF") && fsig->param_count && fsig->params [0]->type == MONO_TYPE_R4 && cfg->r4fp) {
114 if (!strcmp (cmethod->name, "Sin"))
115 opcode = OP_SINF;
116 else if (!strcmp (cmethod->name, "Cos"))
117 opcode = OP_COSF;
118 else if (!strcmp (cmethod->name, "Abs"))
119 opcode = OP_ABSF;
120 else if (!strcmp (cmethod->name, "Sqrt"))
121 opcode = OP_SQRTF;
122 else if (!strcmp (cmethod->name, "Max"))
123 opcode = OP_RMAX;
124 else if (!strcmp (cmethod->name, "Pow"))
125 opcode = OP_RPOW;
126 if (opcode) {
127 MONO_INST_NEW (cfg, ins, opcode);
128 ins->type = STACK_R8;
129 ins->dreg = mono_alloc_dreg (cfg, (MonoStackType)ins->type);
130 ins->sreg1 = args [0]->dreg;
131 if (fsig->param_count == 2)
132 ins->sreg2 = args [1]->dreg;
133 MONO_ADD_INS (cfg->cbb, ins);
136 /* The LLVM backend supports these intrinsics */
137 if (cmethod->klass == mono_class_try_get_math_class ()) {
138 if (strcmp (cmethod->name, "Sin") == 0) {
139 opcode = OP_SIN;
140 } else if (strcmp (cmethod->name, "Cos") == 0) {
141 opcode = OP_COS;
142 } else if (strcmp (cmethod->name, "Sqrt") == 0) {
143 opcode = OP_SQRT;
144 } else if (strcmp (cmethod->name, "Abs") == 0 && fsig->params [0]->type == MONO_TYPE_R8) {
145 opcode = OP_ABS;
148 if (opcode && fsig->param_count == 1) {
149 MONO_INST_NEW (cfg, ins, opcode);
150 ins->type = STACK_R8;
151 ins->dreg = mono_alloc_dreg (cfg, (MonoStackType)ins->type);
152 ins->sreg1 = args [0]->dreg;
153 MONO_ADD_INS (cfg->cbb, ins);
156 opcode = 0;
157 if (cfg->opt & MONO_OPT_CMOV) {
158 if (strcmp (cmethod->name, "Min") == 0) {
159 if (fsig->params [0]->type == MONO_TYPE_I4)
160 opcode = OP_IMIN;
161 if (fsig->params [0]->type == MONO_TYPE_U4)
162 opcode = OP_IMIN_UN;
163 else if (fsig->params [0]->type == MONO_TYPE_I8)
164 opcode = OP_LMIN;
165 else if (fsig->params [0]->type == MONO_TYPE_U8)
166 opcode = OP_LMIN_UN;
167 } else if (strcmp (cmethod->name, "Max") == 0) {
168 if (fsig->params [0]->type == MONO_TYPE_I4)
169 opcode = OP_IMAX;
170 if (fsig->params [0]->type == MONO_TYPE_U4)
171 opcode = OP_IMAX_UN;
172 else if (fsig->params [0]->type == MONO_TYPE_I8)
173 opcode = OP_LMAX;
174 else if (fsig->params [0]->type == MONO_TYPE_U8)
175 opcode = OP_LMAX_UN;
179 if (opcode && fsig->param_count == 2) {
180 MONO_INST_NEW (cfg, ins, opcode);
181 ins->type = fsig->params [0]->type == MONO_TYPE_I4 ? STACK_I4 : STACK_I8;
182 ins->dreg = mono_alloc_dreg (cfg, (MonoStackType)ins->type);
183 ins->sreg1 = args [0]->dreg;
184 ins->sreg2 = args [1]->dreg;
185 MONO_ADD_INS (cfg->cbb, ins);
189 return ins;
192 static MonoInst*
193 emit_span_intrinsics (MonoCompile *cfg, MonoMethod *cmethod, MonoMethodSignature *fsig, MonoInst **args)
195 MonoInst *ins;
197 MonoClassField *ptr_field = mono_class_get_field_from_name_full (cmethod->klass, "_pointer", NULL);
198 if (!ptr_field)
199 /* Portable Span<T> */
200 return NULL;
202 if (!strcmp (cmethod->name, "get_Item")) {
203 MonoClassField *length_field = mono_class_get_field_from_name_full (cmethod->klass, "_length", NULL);
205 g_assert (length_field);
207 MonoGenericClass *gclass = mono_class_get_generic_class (cmethod->klass);
208 MonoClass *param_class = mono_class_from_mono_type_internal (gclass->context.class_inst->type_argv [0]);
210 if (mini_is_gsharedvt_variable_klass (param_class))
211 return NULL;
213 int span_reg = args [0]->dreg;
214 /* Load _pointer.Value */
215 int base_reg = alloc_preg (cfg);
216 EMIT_NEW_LOAD_MEMBASE (cfg, ins, OP_LOAD_MEMBASE, base_reg, span_reg, ptr_field->offset - MONO_ABI_SIZEOF (MonoObject));
217 /* Similar to mini_emit_ldelema_1_ins () */
218 int size = mono_class_array_element_size (param_class);
220 int index_reg = mini_emit_sext_index_reg (cfg, args [1]);
222 MONO_EMIT_BOUNDS_CHECK_OFFSET(cfg, span_reg, length_field->offset - MONO_ABI_SIZEOF (MonoObject), index_reg);
224 // FIXME: Sign extend index ?
226 int mult_reg = alloc_preg (cfg);
227 int add_reg = alloc_preg (cfg);
229 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_MUL_IMM, mult_reg, index_reg, size);
230 EMIT_NEW_BIALU (cfg, ins, OP_PADD, add_reg, base_reg, mult_reg);
231 ins->klass = param_class;
232 ins->type = STACK_MP;
234 return ins;
235 } else if (!strcmp (cmethod->name, "get_Length")) {
236 MonoClassField *length_field = mono_class_get_field_from_name_full (cmethod->klass, "_length", NULL);
237 g_assert (length_field);
240 * FIXME: This doesn't work with abcrem, since the src is a unique LDADDR not
241 * the same array object.
243 MONO_INST_NEW (cfg, ins, OP_LDLEN);
244 ins->dreg = alloc_preg (cfg);
245 ins->sreg1 = args [0]->dreg;
246 ins->inst_imm = length_field->offset - MONO_ABI_SIZEOF (MonoObject);
247 ins->type = STACK_I4;
248 MONO_ADD_INS (cfg->cbb, ins);
250 cfg->flags |= MONO_CFG_NEEDS_DECOMPOSE;
251 cfg->cbb->needs_decompose = TRUE;
253 return ins;
256 return NULL;
259 static MonoInst*
260 emit_unsafe_intrinsics (MonoCompile *cfg, MonoMethod *cmethod, MonoMethodSignature *fsig, MonoInst **args)
262 MonoInst *ins;
263 int dreg, align;
264 MonoGenericContext *ctx = mono_method_get_context (cmethod);
265 MonoType *t;
267 if (!strcmp (cmethod->name, "As")) {
268 g_assert (ctx);
269 g_assert (ctx->method_inst);
271 t = ctx->method_inst->type_argv [0];
272 if (mini_is_gsharedvt_variable_type (t))
273 return NULL;
274 if (ctx->method_inst->type_argc == 2) {
275 dreg = alloc_preg (cfg);
276 EMIT_NEW_UNALU (cfg, ins, OP_MOVE, dreg, args [0]->dreg);
277 ins->type = STACK_OBJ;
278 ins->klass = mono_get_object_class ();
279 return ins;
280 } else if (ctx->method_inst->type_argc == 1) {
281 // Casts the given object to the specified type, performs no dynamic type checking.
282 g_assert (fsig->param_count == 1);
283 g_assert (fsig->params [0]->type == MONO_TYPE_OBJECT);
284 dreg = alloc_preg (cfg);
285 EMIT_NEW_UNALU (cfg, ins, OP_MOVE, dreg, args [0]->dreg);
286 ins->type = STACK_OBJ;
287 ins->klass = mono_class_from_mono_type_internal (ctx->method_inst->type_argv [0]);
288 return ins;
290 } else if (!strcmp (cmethod->name, "AsPointer")) {
291 g_assert (ctx);
292 g_assert (ctx->method_inst);
293 g_assert (ctx->method_inst->type_argc == 1);
294 g_assert (fsig->param_count == 1);
296 dreg = alloc_preg (cfg);
297 EMIT_NEW_UNALU (cfg, ins, OP_MOVE, dreg, args [0]->dreg);
298 ins->type = STACK_PTR;
299 return ins;
300 } else if (!strcmp (cmethod->name, "AsRef")) {
301 g_assert (ctx);
302 g_assert (ctx->method_inst);
303 g_assert (ctx->method_inst->type_argc == 1);
304 g_assert (fsig->param_count == 1);
306 dreg = alloc_preg (cfg);
307 EMIT_NEW_UNALU (cfg, ins, OP_MOVE, dreg, args [0]->dreg);
308 ins->type = STACK_OBJ;
309 ins->klass = mono_get_object_class ();
310 return ins;
311 } else if (!strcmp (cmethod->name, "AreSame")) {
312 g_assert (ctx);
313 g_assert (ctx->method_inst);
314 g_assert (ctx->method_inst->type_argc == 1);
315 g_assert (fsig->param_count == 2);
317 dreg = alloc_ireg (cfg);
318 EMIT_NEW_BIALU (cfg, ins, OP_COMPARE, -1, args [0]->dreg, args [1]->dreg);
319 EMIT_NEW_UNALU (cfg, ins, OP_PCEQ, dreg, -1);
320 return ins;
321 } else if (!strcmp (cmethod->name, "IsAddressLessThan")) {
322 g_assert (ctx);
323 g_assert (ctx->method_inst);
324 g_assert (ctx->method_inst->type_argc == 1);
325 g_assert (fsig->param_count == 2);
327 dreg = alloc_ireg (cfg);
328 EMIT_NEW_BIALU (cfg, ins, OP_COMPARE, -1, args [0]->dreg, args [1]->dreg);
329 EMIT_NEW_UNALU (cfg, ins, OP_PCLT_UN, dreg, -1);
330 return ins;
331 } else if (!strcmp (cmethod->name, "IsAddressGreaterThan")) {
332 g_assert (ctx);
333 g_assert (ctx->method_inst);
334 g_assert (ctx->method_inst->type_argc == 1);
335 g_assert (fsig->param_count == 2);
337 dreg = alloc_ireg (cfg);
338 EMIT_NEW_BIALU (cfg, ins, OP_COMPARE, -1, args [0]->dreg, args [1]->dreg);
339 EMIT_NEW_UNALU (cfg, ins, OP_PCGT_UN, dreg, -1);
340 return ins;
341 } else if (!strcmp (cmethod->name, "Add")) {
342 g_assert (ctx);
343 g_assert (ctx->method_inst);
344 g_assert (ctx->method_inst->type_argc == 1);
345 g_assert (fsig->param_count == 2);
347 int mul_reg = alloc_preg (cfg);
349 t = ctx->method_inst->type_argv [0];
350 MonoInst *esize_ins;
351 if (mini_is_gsharedvt_variable_type (t)) {
352 esize_ins = mini_emit_get_gsharedvt_info_klass (cfg, mono_class_from_mono_type_internal (t), MONO_RGCTX_INFO_CLASS_SIZEOF);
353 if (SIZEOF_REGISTER == 8)
354 MONO_EMIT_NEW_UNALU (cfg, OP_SEXT_I4, esize_ins->dreg, esize_ins->dreg);
355 } else {
356 t = mini_type_get_underlying_type (t);
357 int esize = mono_class_array_element_size (mono_class_from_mono_type_internal (t));
358 EMIT_NEW_ICONST (cfg, esize_ins, esize);
360 esize_ins->type = STACK_I4;
362 EMIT_NEW_BIALU (cfg, ins, OP_PMUL, mul_reg, args [1]->dreg, esize_ins->dreg);
363 ins->type = STACK_PTR;
365 dreg = alloc_preg (cfg);
366 EMIT_NEW_BIALU (cfg, ins, OP_PADD, dreg, args [0]->dreg, mul_reg);
367 ins->type = STACK_PTR;
368 return ins;
369 } else if (!strcmp (cmethod->name, "AddByteOffset")) {
370 g_assert (ctx);
371 g_assert (ctx->method_inst);
372 g_assert (ctx->method_inst->type_argc == 1);
373 g_assert (fsig->param_count == 2);
375 if (fsig->params [1]->type == MONO_TYPE_I) {
376 int dreg = alloc_preg (cfg);
377 EMIT_NEW_BIALU (cfg, ins, OP_PADD, dreg, args [0]->dreg, args [1]->dreg);
378 ins->type = STACK_PTR;
379 return ins;
380 } else if (fsig->params [1]->type == MONO_TYPE_U8) {
381 int sreg = args [1]->dreg;
382 if (SIZEOF_REGISTER == 4) {
383 sreg = alloc_ireg (cfg);
384 EMIT_NEW_UNALU (cfg, ins, OP_LCONV_TO_U4, sreg, args [1]->dreg);
386 int dreg = alloc_preg (cfg);
387 EMIT_NEW_BIALU (cfg, ins, OP_PADD, dreg, args [0]->dreg, sreg);
388 ins->type = STACK_PTR;
389 return ins;
391 } else if (!strcmp (cmethod->name, "SizeOf")) {
392 g_assert (ctx);
393 g_assert (ctx->method_inst);
394 g_assert (ctx->method_inst->type_argc == 1);
395 g_assert (fsig->param_count == 0);
397 t = ctx->method_inst->type_argv [0];
398 if (mini_is_gsharedvt_variable_type (t)) {
399 ins = mini_emit_get_gsharedvt_info_klass (cfg, mono_class_from_mono_type_internal (t), MONO_RGCTX_INFO_CLASS_SIZEOF);
400 } else {
401 int esize = mono_type_size (t, &align);
402 EMIT_NEW_ICONST (cfg, ins, esize);
404 ins->type = STACK_I4;
405 return ins;
406 } else if (!strcmp (cmethod->name, "ReadUnaligned")) {
407 g_assert (ctx);
408 g_assert (ctx->method_inst);
409 g_assert (ctx->method_inst->type_argc == 1);
410 g_assert (fsig->param_count == 1);
412 t = ctx->method_inst->type_argv [0];
413 t = mini_get_underlying_type (t);
414 if (MONO_TYPE_IS_PRIMITIVE (t) && t->type != MONO_TYPE_R4 && t->type != MONO_TYPE_R8) {
415 dreg = alloc_ireg (cfg);
416 EMIT_NEW_LOAD_MEMBASE_TYPE (cfg, ins, t, args [0]->dreg, 0);
417 ins->type = STACK_I4;
418 ins->flags |= MONO_INST_UNALIGNED;
419 return ins;
421 } else if (!strcmp (cmethod->name, "WriteUnaligned")) {
422 g_assert (ctx);
423 g_assert (ctx->method_inst);
424 g_assert (ctx->method_inst->type_argc == 1);
425 g_assert (fsig->param_count == 2);
427 t = ctx->method_inst->type_argv [0];
428 t = mini_get_underlying_type (t);
429 if (MONO_TYPE_IS_PRIMITIVE (t) && t->type != MONO_TYPE_R4 && t->type != MONO_TYPE_R8) {
430 dreg = alloc_ireg (cfg);
431 EMIT_NEW_STORE_MEMBASE_TYPE (cfg, ins, t, args [0]->dreg, 0, args [1]->dreg);
432 ins->flags |= MONO_INST_UNALIGNED;
433 return ins;
435 } else if (!strcmp (cmethod->name, "ByteOffset")) {
436 g_assert (ctx);
437 g_assert (ctx->method_inst);
438 g_assert (ctx->method_inst->type_argc == 1);
439 g_assert (fsig->param_count == 2);
441 int dreg = alloc_preg (cfg);
442 EMIT_NEW_BIALU (cfg, ins, OP_PSUB, dreg, args [1]->dreg, args [0]->dreg);
443 ins->type = STACK_PTR;
444 return ins;
447 return NULL;
450 static MonoInst*
451 emit_jit_helpers_intrinsics (MonoCompile *cfg, MonoMethod *cmethod, MonoMethodSignature *fsig, MonoInst **args)
453 MonoInst *ins;
454 int dreg;
455 MonoGenericContext *ctx = mono_method_get_context (cmethod);
456 MonoType *t;
458 if (!strcmp (cmethod->name, "EnumEquals") || !strcmp (cmethod->name, "EnumCompareTo")) {
459 g_assert (ctx);
460 g_assert (ctx->method_inst);
461 g_assert (ctx->method_inst->type_argc == 1);
462 g_assert (fsig->param_count == 2);
464 t = ctx->method_inst->type_argv [0];
465 t = mini_get_underlying_type (t);
466 if (mini_is_gsharedvt_variable_type (t))
467 return NULL;
469 gboolean is_i8 = (t->type == MONO_TYPE_I8 || t->type == MONO_TYPE_U8);
470 gboolean is_unsigned = (t->type == MONO_TYPE_U1 || t->type == MONO_TYPE_U2 || t->type == MONO_TYPE_U4 || t->type == MONO_TYPE_U8 || t->type == MONO_TYPE_U);
471 int cmp_op, ceq_op, cgt_op, clt_op;
473 if (is_i8) {
474 cmp_op = OP_LCOMPARE;
475 ceq_op = OP_LCEQ;
476 cgt_op = is_unsigned ? OP_LCGT_UN : OP_LCGT;
477 clt_op = is_unsigned ? OP_LCLT_UN : OP_LCLT;
478 } else {
479 cmp_op = OP_ICOMPARE;
480 ceq_op = OP_ICEQ;
481 cgt_op = is_unsigned ? OP_ICGT_UN : OP_ICGT;
482 clt_op = is_unsigned ? OP_ICLT_UN : OP_ICLT;
485 if (!strcmp (cmethod->name, "EnumEquals")) {
486 dreg = alloc_ireg (cfg);
487 EMIT_NEW_BIALU (cfg, ins, cmp_op, -1, args [0]->dreg, args [1]->dreg);
488 EMIT_NEW_UNALU (cfg, ins, ceq_op, dreg, -1);
489 } else {
490 // Use the branchless code (a > b) - (a < b)
491 int reg1, reg2;
493 reg1 = alloc_ireg (cfg);
494 reg2 = alloc_ireg (cfg);
495 dreg = alloc_ireg (cfg);
497 EMIT_NEW_BIALU (cfg, ins, cmp_op, -1, args [0]->dreg, args [1]->dreg);
498 EMIT_NEW_UNALU (cfg, ins, cgt_op, reg1, -1);
499 EMIT_NEW_BIALU (cfg, ins, cmp_op, -1, args [0]->dreg, args [1]->dreg);
500 EMIT_NEW_UNALU (cfg, ins, clt_op, reg2, -1);
501 EMIT_NEW_BIALU (cfg, ins, OP_ISUB, dreg, reg1, reg2);
503 return ins;
506 return NULL;
509 MonoInst*
510 mini_emit_inst_for_method (MonoCompile *cfg, MonoMethod *cmethod, MonoMethodSignature *fsig, MonoInst **args)
512 MonoInst *ins = NULL;
513 MonoClass *runtime_helpers_class = mono_class_get_runtime_helpers_class ();
515 const char* cmethod_klass_name_space = m_class_get_name_space (cmethod->klass);
516 const char* cmethod_klass_name = m_class_get_name (cmethod->klass);
517 MonoImage *cmethod_klass_image = m_class_get_image (cmethod->klass);
518 gboolean in_corlib = cmethod_klass_image == mono_defaults.corlib;
520 /* Required intrinsics are always used even with -O=-intrins */
521 if (in_corlib &&
522 !strcmp (cmethod_klass_name_space, "System") &&
523 !strcmp (cmethod_klass_name, "ByReference`1") &&
524 !strcmp (cmethod->name, "get_Value")) {
525 g_assert (fsig->hasthis && fsig->param_count == 0);
526 int dreg = alloc_preg (cfg);
527 EMIT_NEW_LOAD_MEMBASE (cfg, ins, OP_LOAD_MEMBASE, dreg, args [0]->dreg, 0);
528 return ins;
529 } else if (in_corlib && cmethod->klass == mono_defaults.object_class) {
530 if (!strcmp (cmethod->name, "GetRawData")) {
531 int dreg = alloc_preg (cfg);
532 EMIT_NEW_BIALU_IMM (cfg, ins, OP_PADD_IMM, dreg, args [0]->dreg, MONO_ABI_SIZEOF (MonoObject));
533 return ins;
537 if (!(cfg->opt & MONO_OPT_INTRINS))
538 return NULL;
540 if (cmethod->klass == mono_defaults.string_class) {
541 if (strcmp (cmethod->name, "get_Chars") == 0 && fsig->param_count + fsig->hasthis == 2) {
542 int dreg = alloc_ireg (cfg);
543 int index_reg = alloc_preg (cfg);
544 int add_reg = alloc_preg (cfg);
546 #if SIZEOF_REGISTER == 8
547 if (COMPILE_LLVM (cfg)) {
548 MONO_EMIT_NEW_UNALU (cfg, OP_ZEXT_I4, index_reg, args [1]->dreg);
549 } else {
550 /* The array reg is 64 bits but the index reg is only 32 */
551 MONO_EMIT_NEW_UNALU (cfg, OP_SEXT_I4, index_reg, args [1]->dreg);
553 #else
554 index_reg = args [1]->dreg;
555 #endif
556 MONO_EMIT_BOUNDS_CHECK (cfg, args [0]->dreg, MonoString, length, index_reg);
558 #if defined(TARGET_X86) || defined(TARGET_AMD64)
559 EMIT_NEW_X86_LEA (cfg, ins, args [0]->dreg, index_reg, 1, MONO_STRUCT_OFFSET (MonoString, chars));
560 add_reg = ins->dreg;
561 EMIT_NEW_LOAD_MEMBASE (cfg, ins, OP_LOADU2_MEMBASE, dreg,
562 add_reg, 0);
563 #else
564 int mult_reg = alloc_preg (cfg);
565 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHL_IMM, mult_reg, index_reg, 1);
566 MONO_EMIT_NEW_BIALU (cfg, OP_PADD, add_reg, mult_reg, args [0]->dreg);
567 EMIT_NEW_LOAD_MEMBASE (cfg, ins, OP_LOADU2_MEMBASE, dreg,
568 add_reg, MONO_STRUCT_OFFSET (MonoString, chars));
569 #endif
570 mini_type_from_op (cfg, ins, NULL, NULL);
571 return ins;
572 } else if (strcmp (cmethod->name, "get_Length") == 0 && fsig->param_count + fsig->hasthis == 1) {
573 int dreg = alloc_ireg (cfg);
574 /* Decompose later to allow more optimizations */
575 EMIT_NEW_UNALU (cfg, ins, OP_STRLEN, dreg, args [0]->dreg);
576 ins->type = STACK_I4;
577 ins->flags |= MONO_INST_FAULT;
578 cfg->cbb->needs_decompose = TRUE;
579 cfg->flags |= MONO_CFG_NEEDS_DECOMPOSE;
581 return ins;
582 } else
583 return NULL;
584 } else if (cmethod->klass == mono_defaults.object_class) {
585 if (strcmp (cmethod->name, "GetType") == 0 && fsig->param_count + fsig->hasthis == 1) {
586 int dreg = alloc_ireg_ref (cfg);
587 int vt_reg = alloc_preg (cfg);
588 MONO_EMIT_NEW_LOAD_MEMBASE_FAULT (cfg, vt_reg, args [0]->dreg, MONO_STRUCT_OFFSET (MonoObject, vtable));
589 EMIT_NEW_LOAD_MEMBASE (cfg, ins, OP_LOAD_MEMBASE, dreg, vt_reg, MONO_STRUCT_OFFSET (MonoVTable, type));
590 mini_type_from_op (cfg, ins, NULL, NULL);
592 return ins;
593 } else if (!cfg->backend->emulate_mul_div && strcmp (cmethod->name, "InternalGetHashCode") == 0 && fsig->param_count == 1 && !mono_gc_is_moving ()) {
594 int dreg = alloc_ireg (cfg);
595 int t1 = alloc_ireg (cfg);
597 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHL_IMM, t1, args [0]->dreg, 3);
598 EMIT_NEW_BIALU_IMM (cfg, ins, OP_MUL_IMM, dreg, t1, 2654435761u);
599 ins->type = STACK_I4;
601 return ins;
602 } else if (strcmp (cmethod->name, ".ctor") == 0 && fsig->param_count == 0) {
603 MONO_INST_NEW (cfg, ins, OP_NOP);
604 MONO_ADD_INS (cfg->cbb, ins);
605 return ins;
606 } else
607 return NULL;
608 } else if (cmethod->klass == mono_defaults.array_class) {
609 if (strcmp (cmethod->name, "GetGenericValueImpl") == 0 && fsig->param_count + fsig->hasthis == 3 && !cfg->gsharedvt)
610 return emit_array_generic_access (cfg, fsig, args, FALSE);
611 else if (strcmp (cmethod->name, "SetGenericValueImpl") == 0 && fsig->param_count + fsig->hasthis == 3 && !cfg->gsharedvt)
612 return emit_array_generic_access (cfg, fsig, args, TRUE);
613 else if (!strcmp (cmethod->name, "GetRawSzArrayData")) {
614 int dreg = alloc_preg (cfg);
615 EMIT_NEW_BIALU_IMM (cfg, ins, OP_PADD_IMM, dreg, args [0]->dreg, MONO_STRUCT_OFFSET (MonoArray, vector));
616 return ins;
619 #ifndef MONO_BIG_ARRAYS
621 * This is an inline version of GetLength/GetLowerBound(0) used frequently in
622 * Array methods.
624 else if (((strcmp (cmethod->name, "GetLength") == 0 && fsig->param_count + fsig->hasthis == 2) ||
625 (strcmp (cmethod->name, "GetLowerBound") == 0 && fsig->param_count + fsig->hasthis == 2)) &&
626 args [1]->opcode == OP_ICONST && args [1]->inst_c0 == 0) {
627 int dreg = alloc_ireg (cfg);
628 int bounds_reg = alloc_ireg_mp (cfg);
629 MonoBasicBlock *end_bb, *szarray_bb;
630 gboolean get_length = strcmp (cmethod->name, "GetLength") == 0;
632 NEW_BBLOCK (cfg, end_bb);
633 NEW_BBLOCK (cfg, szarray_bb);
635 EMIT_NEW_LOAD_MEMBASE_FAULT (cfg, ins, OP_LOAD_MEMBASE, bounds_reg,
636 args [0]->dreg, MONO_STRUCT_OFFSET (MonoArray, bounds));
637 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_COMPARE_IMM, -1, bounds_reg, 0);
638 MONO_EMIT_NEW_BRANCH_BLOCK (cfg, OP_IBEQ, szarray_bb);
639 /* Non-szarray case */
640 if (get_length)
641 EMIT_NEW_LOAD_MEMBASE (cfg, ins, OP_LOADI4_MEMBASE, dreg,
642 bounds_reg, MONO_STRUCT_OFFSET (MonoArrayBounds, length));
643 else
644 EMIT_NEW_LOAD_MEMBASE (cfg, ins, OP_LOADI4_MEMBASE, dreg,
645 bounds_reg, MONO_STRUCT_OFFSET (MonoArrayBounds, lower_bound));
646 MONO_EMIT_NEW_BRANCH_BLOCK (cfg, OP_BR, end_bb);
647 MONO_START_BB (cfg, szarray_bb);
648 /* Szarray case */
649 if (get_length)
650 EMIT_NEW_LOAD_MEMBASE (cfg, ins, OP_LOADI4_MEMBASE, dreg,
651 args [0]->dreg, MONO_STRUCT_OFFSET (MonoArray, max_length));
652 else
653 MONO_EMIT_NEW_ICONST (cfg, dreg, 0);
654 MONO_START_BB (cfg, end_bb);
656 EMIT_NEW_UNALU (cfg, ins, OP_MOVE, dreg, dreg);
657 ins->type = STACK_I4;
659 return ins;
661 #endif
663 if (cmethod->name [0] != 'g')
664 return NULL;
666 if (strcmp (cmethod->name, "get_Rank") == 0 && fsig->param_count + fsig->hasthis == 1) {
667 int dreg = alloc_ireg (cfg);
668 int vtable_reg = alloc_preg (cfg);
669 MONO_EMIT_NEW_LOAD_MEMBASE_OP_FAULT (cfg, OP_LOAD_MEMBASE, vtable_reg,
670 args [0]->dreg, MONO_STRUCT_OFFSET (MonoObject, vtable));
671 EMIT_NEW_LOAD_MEMBASE (cfg, ins, OP_LOADU1_MEMBASE, dreg,
672 vtable_reg, MONO_STRUCT_OFFSET (MonoVTable, rank));
673 mini_type_from_op (cfg, ins, NULL, NULL);
675 return ins;
676 } else if (strcmp (cmethod->name, "get_Length") == 0 && fsig->param_count + fsig->hasthis == 1) {
677 int dreg = alloc_ireg (cfg);
679 EMIT_NEW_LOAD_MEMBASE_FAULT (cfg, ins, OP_LOADI4_MEMBASE, dreg,
680 args [0]->dreg, MONO_STRUCT_OFFSET (MonoArray, max_length));
681 mini_type_from_op (cfg, ins, NULL, NULL);
683 return ins;
684 } else
685 return NULL;
686 } else if (cmethod->klass == runtime_helpers_class) {
687 if (strcmp (cmethod->name, "get_OffsetToStringData") == 0 && fsig->param_count == 0) {
688 EMIT_NEW_ICONST (cfg, ins, MONO_STRUCT_OFFSET (MonoString, chars));
689 return ins;
690 } else if (strcmp (cmethod->name, "IsReferenceOrContainsReferences") == 0 && fsig->param_count == 0) {
691 MonoGenericContext *ctx = mono_method_get_context (cmethod);
692 g_assert (ctx);
693 g_assert (ctx->method_inst);
694 g_assert (ctx->method_inst->type_argc == 1);
695 MonoType *arg_type = ctx->method_inst->type_argv [0];
696 MonoType *t;
697 MonoClass *klass;
699 ins = NULL;
701 /* Resolve the argument class as possible so we can handle common cases fast */
702 t = mini_get_underlying_type (arg_type);
703 klass = mono_class_from_mono_type_internal (t);
704 mono_class_init_internal (klass);
705 if (MONO_TYPE_IS_REFERENCE (t))
706 EMIT_NEW_ICONST (cfg, ins, 1);
707 else if (MONO_TYPE_IS_PRIMITIVE (t))
708 EMIT_NEW_ICONST (cfg, ins, 0);
709 else if (cfg->gshared && (t->type == MONO_TYPE_VAR || t->type == MONO_TYPE_MVAR) && !mini_type_var_is_vt (t))
710 EMIT_NEW_ICONST (cfg, ins, 1);
711 else if (!cfg->gshared || !mini_class_check_context_used (cfg, klass))
712 EMIT_NEW_ICONST (cfg, ins, m_class_has_references (klass) ? 1 : 0);
713 else {
714 g_assert (cfg->gshared);
716 /* Have to use the original argument class here */
717 MonoClass *arg_class = mono_class_from_mono_type_internal (arg_type);
718 int context_used = mini_class_check_context_used (cfg, arg_class);
720 /* This returns 1 or 2 */
721 MonoInst *info = mini_emit_get_rgctx_klass (cfg, context_used, arg_class, MONO_RGCTX_INFO_CLASS_IS_REF_OR_CONTAINS_REFS);
722 int dreg = alloc_ireg (cfg);
723 EMIT_NEW_BIALU_IMM (cfg, ins, OP_ISUB_IMM, dreg, info->dreg, 1);
726 return ins;
727 } else if (strcmp (cmethod->name, "IsBitwiseEquatable") == 0 && fsig->param_count == 0) {
728 MonoGenericContext *ctx = mono_method_get_context (cmethod);
729 g_assert (ctx);
730 g_assert (ctx->method_inst);
731 g_assert (ctx->method_inst->type_argc == 1);
732 MonoType *arg_type = ctx->method_inst->type_argv [0];
733 MonoType *t;
734 ins = NULL;
736 /* Resolve the argument class as possible so we can handle common cases fast */
737 t = mini_get_underlying_type (arg_type);
739 if (MONO_TYPE_IS_PRIMITIVE (t) && t->type != MONO_TYPE_R4 && t->type != MONO_TYPE_R8)
740 EMIT_NEW_ICONST (cfg, ins, 1);
741 else
742 EMIT_NEW_ICONST (cfg, ins, 0);
743 return ins;
744 } else if (!strcmp (cmethod->name, "ObjectHasComponentSize")) {
745 g_assert (fsig->param_count == 1);
746 g_assert (fsig->params [0]->type == MONO_TYPE_OBJECT);
747 // Return true for arrays and string
748 int dreg;
750 dreg = alloc_ireg (cfg);
752 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOAD_MEMBASE, dreg, args [0]->dreg, MONO_STRUCT_OFFSET (MonoObject, vtable));
753 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADU1_MEMBASE, dreg, dreg, MONO_STRUCT_OFFSET (MonoVTable, flags));
754 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_IAND_IMM, dreg, dreg, MONO_VT_FLAG_ARRAY_OR_STRING);
755 EMIT_NEW_BIALU_IMM (cfg, ins, OP_COMPARE_IMM, -1, dreg, 0);
756 EMIT_NEW_UNALU (cfg, ins, OP_ICGT, dreg, -1);
757 ins->type = STACK_I4;
758 return ins;
759 } else
760 return NULL;
761 } else if (cmethod->klass == mono_defaults.monitor_class) {
762 gboolean is_enter = FALSE;
763 gboolean is_v4 = FALSE;
765 if (!strcmp (cmethod->name, "Enter") && fsig->param_count == 2 && fsig->params [1]->byref) {
766 is_enter = TRUE;
767 is_v4 = TRUE;
769 if (!strcmp (cmethod->name, "Enter") && fsig->param_count == 1)
770 is_enter = TRUE;
772 if (is_enter) {
774 * To make async stack traces work, icalls which can block should have a wrapper.
775 * For Monitor.Enter, emit two calls: a fastpath which doesn't have a wrapper, and a slowpath, which does.
777 MonoBasicBlock *end_bb;
779 NEW_BBLOCK (cfg, end_bb);
781 if (is_v4)
782 ins = mono_emit_jit_icall (cfg, mono_monitor_enter_v4_fast, args);
783 else
784 ins = mono_emit_jit_icall (cfg, mono_monitor_enter_fast, args);
786 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_ICOMPARE_IMM, -1, ins->dreg, 0);
787 MONO_EMIT_NEW_BRANCH_BLOCK (cfg, OP_IBNE_UN, end_bb);
789 if (is_v4)
790 ins = mono_emit_jit_icall (cfg, mono_monitor_enter_v4_internal, args);
791 else
792 ins = mono_emit_jit_icall (cfg, mono_monitor_enter_internal, args);
794 MONO_START_BB (cfg, end_bb);
795 return ins;
797 } else if (cmethod->klass == mono_defaults.thread_class) {
798 if (strcmp (cmethod->name, "SpinWait_nop") == 0 && fsig->param_count == 0) {
799 MONO_INST_NEW (cfg, ins, OP_RELAXED_NOP);
800 MONO_ADD_INS (cfg->cbb, ins);
801 return ins;
802 } else if (strcmp (cmethod->name, "MemoryBarrier") == 0 && fsig->param_count == 0) {
803 return mini_emit_memory_barrier (cfg, MONO_MEMORY_BARRIER_SEQ);
804 } else if (!strcmp (cmethod->name, "VolatileRead") && fsig->param_count == 1) {
805 guint32 opcode = 0;
806 gboolean is_ref = mini_type_is_reference (fsig->params [0]);
808 if (fsig->params [0]->type == MONO_TYPE_I1)
809 opcode = OP_LOADI1_MEMBASE;
810 else if (fsig->params [0]->type == MONO_TYPE_U1)
811 opcode = OP_LOADU1_MEMBASE;
812 else if (fsig->params [0]->type == MONO_TYPE_I2)
813 opcode = OP_LOADI2_MEMBASE;
814 else if (fsig->params [0]->type == MONO_TYPE_U2)
815 opcode = OP_LOADU2_MEMBASE;
816 else if (fsig->params [0]->type == MONO_TYPE_I4)
817 opcode = OP_LOADI4_MEMBASE;
818 else if (fsig->params [0]->type == MONO_TYPE_U4)
819 opcode = OP_LOADU4_MEMBASE;
820 else if (fsig->params [0]->type == MONO_TYPE_I8 || fsig->params [0]->type == MONO_TYPE_U8)
821 opcode = OP_LOADI8_MEMBASE;
822 else if (fsig->params [0]->type == MONO_TYPE_R4)
823 opcode = OP_LOADR4_MEMBASE;
824 else if (fsig->params [0]->type == MONO_TYPE_R8)
825 opcode = OP_LOADR8_MEMBASE;
826 else if (is_ref || fsig->params [0]->type == MONO_TYPE_I || fsig->params [0]->type == MONO_TYPE_U)
827 opcode = OP_LOAD_MEMBASE;
829 if (opcode) {
830 MONO_INST_NEW (cfg, ins, opcode);
831 ins->inst_basereg = args [0]->dreg;
832 ins->inst_offset = 0;
833 MONO_ADD_INS (cfg->cbb, ins);
835 switch (fsig->params [0]->type) {
836 case MONO_TYPE_I1:
837 case MONO_TYPE_U1:
838 case MONO_TYPE_I2:
839 case MONO_TYPE_U2:
840 case MONO_TYPE_I4:
841 case MONO_TYPE_U4:
842 ins->dreg = mono_alloc_ireg (cfg);
843 ins->type = STACK_I4;
844 break;
845 case MONO_TYPE_I8:
846 case MONO_TYPE_U8:
847 ins->dreg = mono_alloc_lreg (cfg);
848 ins->type = STACK_I8;
849 break;
850 case MONO_TYPE_I:
851 case MONO_TYPE_U:
852 ins->dreg = mono_alloc_ireg (cfg);
853 #if SIZEOF_REGISTER == 8
854 ins->type = STACK_I8;
855 #else
856 ins->type = STACK_I4;
857 #endif
858 break;
859 case MONO_TYPE_R4:
860 case MONO_TYPE_R8:
861 ins->dreg = mono_alloc_freg (cfg);
862 ins->type = STACK_R8;
863 break;
864 default:
865 g_assert (mini_type_is_reference (fsig->params [0]));
866 ins->dreg = mono_alloc_ireg_ref (cfg);
867 ins->type = STACK_OBJ;
868 break;
871 if (opcode == OP_LOADI8_MEMBASE)
872 ins = mono_decompose_opcode (cfg, ins);
874 mini_emit_memory_barrier (cfg, MONO_MEMORY_BARRIER_SEQ);
876 return ins;
878 } else if (!strcmp (cmethod->name, "VolatileWrite") && fsig->param_count == 2) {
879 guint32 opcode = 0;
880 gboolean is_ref = mini_type_is_reference (fsig->params [0]);
882 if (fsig->params [0]->type == MONO_TYPE_I1 || fsig->params [0]->type == MONO_TYPE_U1)
883 opcode = OP_STOREI1_MEMBASE_REG;
884 else if (fsig->params [0]->type == MONO_TYPE_I2 || fsig->params [0]->type == MONO_TYPE_U2)
885 opcode = OP_STOREI2_MEMBASE_REG;
886 else if (fsig->params [0]->type == MONO_TYPE_I4 || fsig->params [0]->type == MONO_TYPE_U4)
887 opcode = OP_STOREI4_MEMBASE_REG;
888 else if (fsig->params [0]->type == MONO_TYPE_I8 || fsig->params [0]->type == MONO_TYPE_U8)
889 opcode = OP_STOREI8_MEMBASE_REG;
890 else if (fsig->params [0]->type == MONO_TYPE_R4)
891 opcode = OP_STORER4_MEMBASE_REG;
892 else if (fsig->params [0]->type == MONO_TYPE_R8)
893 opcode = OP_STORER8_MEMBASE_REG;
894 else if (is_ref || fsig->params [0]->type == MONO_TYPE_I || fsig->params [0]->type == MONO_TYPE_U)
895 opcode = OP_STORE_MEMBASE_REG;
897 if (opcode) {
898 mini_emit_memory_barrier (cfg, MONO_MEMORY_BARRIER_SEQ);
900 MONO_INST_NEW (cfg, ins, opcode);
901 ins->sreg1 = args [1]->dreg;
902 ins->inst_destbasereg = args [0]->dreg;
903 ins->inst_offset = 0;
904 MONO_ADD_INS (cfg->cbb, ins);
906 if (opcode == OP_STOREI8_MEMBASE_REG)
907 ins = mono_decompose_opcode (cfg, ins);
909 return ins;
912 } else if (in_corlib &&
913 (strcmp (cmethod_klass_name_space, "System.Threading") == 0) &&
914 (strcmp (cmethod_klass_name, "Interlocked") == 0)) {
915 ins = NULL;
917 #if SIZEOF_REGISTER == 8
918 if (!cfg->llvm_only && strcmp (cmethod->name, "Read") == 0 && fsig->param_count == 1 && (fsig->params [0]->type == MONO_TYPE_I8)) {
919 if (!cfg->llvm_only && mono_arch_opcode_supported (OP_ATOMIC_LOAD_I8)) {
920 MONO_INST_NEW (cfg, ins, OP_ATOMIC_LOAD_I8);
921 ins->dreg = mono_alloc_preg (cfg);
922 ins->sreg1 = args [0]->dreg;
923 ins->type = STACK_I8;
924 ins->backend.memory_barrier_kind = MONO_MEMORY_BARRIER_SEQ;
925 MONO_ADD_INS (cfg->cbb, ins);
926 } else {
927 MonoInst *load_ins;
929 mini_emit_memory_barrier (cfg, MONO_MEMORY_BARRIER_SEQ);
931 /* 64 bit reads are already atomic */
932 MONO_INST_NEW (cfg, load_ins, OP_LOADI8_MEMBASE);
933 load_ins->dreg = mono_alloc_preg (cfg);
934 load_ins->inst_basereg = args [0]->dreg;
935 load_ins->inst_offset = 0;
936 load_ins->type = STACK_I8;
937 MONO_ADD_INS (cfg->cbb, load_ins);
939 mini_emit_memory_barrier (cfg, MONO_MEMORY_BARRIER_SEQ);
941 ins = load_ins;
944 #endif
946 if (strcmp (cmethod->name, "Increment") == 0 && fsig->param_count == 1) {
947 MonoInst *ins_iconst;
948 guint32 opcode = 0;
950 if (fsig->params [0]->type == MONO_TYPE_I4) {
951 opcode = OP_ATOMIC_ADD_I4;
952 cfg->has_atomic_add_i4 = TRUE;
954 #if SIZEOF_REGISTER == 8
955 else if (fsig->params [0]->type == MONO_TYPE_I8)
956 opcode = OP_ATOMIC_ADD_I8;
957 #endif
958 if (opcode) {
959 if (!mono_arch_opcode_supported (opcode))
960 return NULL;
961 MONO_INST_NEW (cfg, ins_iconst, OP_ICONST);
962 ins_iconst->inst_c0 = 1;
963 ins_iconst->dreg = mono_alloc_ireg (cfg);
964 MONO_ADD_INS (cfg->cbb, ins_iconst);
966 MONO_INST_NEW (cfg, ins, opcode);
967 ins->dreg = mono_alloc_ireg (cfg);
968 ins->inst_basereg = args [0]->dreg;
969 ins->inst_offset = 0;
970 ins->sreg2 = ins_iconst->dreg;
971 ins->type = (opcode == OP_ATOMIC_ADD_I4) ? STACK_I4 : STACK_I8;
972 MONO_ADD_INS (cfg->cbb, ins);
974 } else if (strcmp (cmethod->name, "Decrement") == 0 && fsig->param_count == 1) {
975 MonoInst *ins_iconst;
976 guint32 opcode = 0;
978 if (fsig->params [0]->type == MONO_TYPE_I4) {
979 opcode = OP_ATOMIC_ADD_I4;
980 cfg->has_atomic_add_i4 = TRUE;
982 #if SIZEOF_REGISTER == 8
983 else if (fsig->params [0]->type == MONO_TYPE_I8)
984 opcode = OP_ATOMIC_ADD_I8;
985 #endif
986 if (opcode) {
987 if (!mono_arch_opcode_supported (opcode))
988 return NULL;
989 MONO_INST_NEW (cfg, ins_iconst, OP_ICONST);
990 ins_iconst->inst_c0 = -1;
991 ins_iconst->dreg = mono_alloc_ireg (cfg);
992 MONO_ADD_INS (cfg->cbb, ins_iconst);
994 MONO_INST_NEW (cfg, ins, opcode);
995 ins->dreg = mono_alloc_ireg (cfg);
996 ins->inst_basereg = args [0]->dreg;
997 ins->inst_offset = 0;
998 ins->sreg2 = ins_iconst->dreg;
999 ins->type = (opcode == OP_ATOMIC_ADD_I4) ? STACK_I4 : STACK_I8;
1000 MONO_ADD_INS (cfg->cbb, ins);
1002 } else if (strcmp (cmethod->name, "Add") == 0 && fsig->param_count == 2) {
1003 guint32 opcode = 0;
1005 if (fsig->params [0]->type == MONO_TYPE_I4) {
1006 opcode = OP_ATOMIC_ADD_I4;
1007 cfg->has_atomic_add_i4 = TRUE;
1009 #if SIZEOF_REGISTER == 8
1010 else if (fsig->params [0]->type == MONO_TYPE_I8)
1011 opcode = OP_ATOMIC_ADD_I8;
1012 #endif
1013 if (opcode) {
1014 if (!mono_arch_opcode_supported (opcode))
1015 return NULL;
1016 MONO_INST_NEW (cfg, ins, opcode);
1017 ins->dreg = mono_alloc_ireg (cfg);
1018 ins->inst_basereg = args [0]->dreg;
1019 ins->inst_offset = 0;
1020 ins->sreg2 = args [1]->dreg;
1021 ins->type = (opcode == OP_ATOMIC_ADD_I4) ? STACK_I4 : STACK_I8;
1022 MONO_ADD_INS (cfg->cbb, ins);
1025 else if (strcmp (cmethod->name, "Exchange") == 0 && fsig->param_count == 2) {
1026 MonoInst *f2i = NULL, *i2f;
1027 guint32 opcode, f2i_opcode, i2f_opcode;
1028 gboolean is_ref = mini_type_is_reference (fsig->params [0]);
1029 gboolean is_float = fsig->params [0]->type == MONO_TYPE_R4 || fsig->params [0]->type == MONO_TYPE_R8;
1031 if (fsig->params [0]->type == MONO_TYPE_I4 ||
1032 fsig->params [0]->type == MONO_TYPE_R4) {
1033 opcode = OP_ATOMIC_EXCHANGE_I4;
1034 f2i_opcode = OP_MOVE_F_TO_I4;
1035 i2f_opcode = OP_MOVE_I4_TO_F;
1036 cfg->has_atomic_exchange_i4 = TRUE;
1038 #if SIZEOF_REGISTER == 8
1039 else if (is_ref ||
1040 fsig->params [0]->type == MONO_TYPE_I8 ||
1041 fsig->params [0]->type == MONO_TYPE_R8 ||
1042 fsig->params [0]->type == MONO_TYPE_I) {
1043 opcode = OP_ATOMIC_EXCHANGE_I8;
1044 f2i_opcode = OP_MOVE_F_TO_I8;
1045 i2f_opcode = OP_MOVE_I8_TO_F;
1047 #else
1048 else if (is_ref || fsig->params [0]->type == MONO_TYPE_I) {
1049 opcode = OP_ATOMIC_EXCHANGE_I4;
1050 cfg->has_atomic_exchange_i4 = TRUE;
1052 #endif
1053 else
1054 return NULL;
1056 if (!mono_arch_opcode_supported (opcode))
1057 return NULL;
1059 if (is_float) {
1060 /* TODO: Decompose these opcodes instead of bailing here. */
1061 if (COMPILE_SOFT_FLOAT (cfg))
1062 return NULL;
1064 MONO_INST_NEW (cfg, f2i, f2i_opcode);
1065 f2i->dreg = mono_alloc_ireg (cfg);
1066 f2i->sreg1 = args [1]->dreg;
1067 if (f2i_opcode == OP_MOVE_F_TO_I4)
1068 f2i->backend.spill_var = mini_get_int_to_float_spill_area (cfg);
1069 MONO_ADD_INS (cfg->cbb, f2i);
1072 MONO_INST_NEW (cfg, ins, opcode);
1073 ins->dreg = is_ref ? mono_alloc_ireg_ref (cfg) : mono_alloc_ireg (cfg);
1074 ins->inst_basereg = args [0]->dreg;
1075 ins->inst_offset = 0;
1076 ins->sreg2 = is_float ? f2i->dreg : args [1]->dreg;
1077 MONO_ADD_INS (cfg->cbb, ins);
1079 switch (fsig->params [0]->type) {
1080 case MONO_TYPE_I4:
1081 ins->type = STACK_I4;
1082 break;
1083 case MONO_TYPE_I8:
1084 ins->type = STACK_I8;
1085 break;
1086 case MONO_TYPE_I:
1087 #if SIZEOF_REGISTER == 8
1088 ins->type = STACK_I8;
1089 #else
1090 ins->type = STACK_I4;
1091 #endif
1092 break;
1093 case MONO_TYPE_R4:
1094 case MONO_TYPE_R8:
1095 ins->type = STACK_R8;
1096 break;
1097 default:
1098 g_assert (mini_type_is_reference (fsig->params [0]));
1099 ins->type = STACK_OBJ;
1100 break;
1103 if (is_float) {
1104 MONO_INST_NEW (cfg, i2f, i2f_opcode);
1105 i2f->dreg = mono_alloc_freg (cfg);
1106 i2f->sreg1 = ins->dreg;
1107 i2f->type = STACK_R8;
1108 if (i2f_opcode == OP_MOVE_I4_TO_F)
1109 i2f->backend.spill_var = mini_get_int_to_float_spill_area (cfg);
1110 MONO_ADD_INS (cfg->cbb, i2f);
1112 ins = i2f;
1115 if (cfg->gen_write_barriers && is_ref)
1116 mini_emit_write_barrier (cfg, args [0], args [1]);
1118 else if ((strcmp (cmethod->name, "CompareExchange") == 0) && fsig->param_count == 3) {
1119 MonoInst *f2i_new = NULL, *f2i_cmp = NULL, *i2f;
1120 guint32 opcode, f2i_opcode, i2f_opcode;
1121 gboolean is_ref = mini_type_is_reference (fsig->params [1]);
1122 gboolean is_float = fsig->params [1]->type == MONO_TYPE_R4 || fsig->params [1]->type == MONO_TYPE_R8;
1124 if (fsig->params [1]->type == MONO_TYPE_I4 ||
1125 fsig->params [1]->type == MONO_TYPE_R4) {
1126 opcode = OP_ATOMIC_CAS_I4;
1127 f2i_opcode = OP_MOVE_F_TO_I4;
1128 i2f_opcode = OP_MOVE_I4_TO_F;
1129 cfg->has_atomic_cas_i4 = TRUE;
1131 #if SIZEOF_REGISTER == 8
1132 else if (is_ref ||
1133 fsig->params [1]->type == MONO_TYPE_I8 ||
1134 fsig->params [1]->type == MONO_TYPE_R8 ||
1135 fsig->params [1]->type == MONO_TYPE_I) {
1136 opcode = OP_ATOMIC_CAS_I8;
1137 f2i_opcode = OP_MOVE_F_TO_I8;
1138 i2f_opcode = OP_MOVE_I8_TO_F;
1140 #else
1141 else if (is_ref || fsig->params [1]->type == MONO_TYPE_I) {
1142 opcode = OP_ATOMIC_CAS_I4;
1143 cfg->has_atomic_cas_i4 = TRUE;
1145 #endif
1146 else
1147 return NULL;
1149 if (!mono_arch_opcode_supported (opcode))
1150 return NULL;
1152 if (is_float) {
1153 /* TODO: Decompose these opcodes instead of bailing here. */
1154 if (COMPILE_SOFT_FLOAT (cfg))
1155 return NULL;
1157 MONO_INST_NEW (cfg, f2i_new, f2i_opcode);
1158 f2i_new->dreg = mono_alloc_ireg (cfg);
1159 f2i_new->sreg1 = args [1]->dreg;
1160 if (f2i_opcode == OP_MOVE_F_TO_I4)
1161 f2i_new->backend.spill_var = mini_get_int_to_float_spill_area (cfg);
1162 MONO_ADD_INS (cfg->cbb, f2i_new);
1164 MONO_INST_NEW (cfg, f2i_cmp, f2i_opcode);
1165 f2i_cmp->dreg = mono_alloc_ireg (cfg);
1166 f2i_cmp->sreg1 = args [2]->dreg;
1167 if (f2i_opcode == OP_MOVE_F_TO_I4)
1168 f2i_cmp->backend.spill_var = mini_get_int_to_float_spill_area (cfg);
1169 MONO_ADD_INS (cfg->cbb, f2i_cmp);
1172 MONO_INST_NEW (cfg, ins, opcode);
1173 ins->dreg = is_ref ? alloc_ireg_ref (cfg) : alloc_ireg (cfg);
1174 ins->sreg1 = args [0]->dreg;
1175 ins->sreg2 = is_float ? f2i_new->dreg : args [1]->dreg;
1176 ins->sreg3 = is_float ? f2i_cmp->dreg : args [2]->dreg;
1177 MONO_ADD_INS (cfg->cbb, ins);
1179 switch (fsig->params [1]->type) {
1180 case MONO_TYPE_I4:
1181 ins->type = STACK_I4;
1182 break;
1183 case MONO_TYPE_I8:
1184 ins->type = STACK_I8;
1185 break;
1186 case MONO_TYPE_I:
1187 #if SIZEOF_REGISTER == 8
1188 ins->type = STACK_I8;
1189 #else
1190 ins->type = STACK_I4;
1191 #endif
1192 break;
1193 case MONO_TYPE_R4:
1194 ins->type = cfg->r4_stack_type;
1195 break;
1196 case MONO_TYPE_R8:
1197 ins->type = STACK_R8;
1198 break;
1199 default:
1200 g_assert (mini_type_is_reference (fsig->params [1]));
1201 ins->type = STACK_OBJ;
1202 break;
1205 if (is_float) {
1206 MONO_INST_NEW (cfg, i2f, i2f_opcode);
1207 i2f->dreg = mono_alloc_freg (cfg);
1208 i2f->sreg1 = ins->dreg;
1209 i2f->type = STACK_R8;
1210 if (i2f_opcode == OP_MOVE_I4_TO_F)
1211 i2f->backend.spill_var = mini_get_int_to_float_spill_area (cfg);
1212 MONO_ADD_INS (cfg->cbb, i2f);
1214 ins = i2f;
1217 if (cfg->gen_write_barriers && is_ref)
1218 mini_emit_write_barrier (cfg, args [0], args [1]);
1220 else if ((strcmp (cmethod->name, "CompareExchange") == 0) && fsig->param_count == 4 &&
1221 fsig->params [1]->type == MONO_TYPE_I4) {
1222 MonoInst *cmp, *ceq;
1224 if (!mono_arch_opcode_supported (OP_ATOMIC_CAS_I4))
1225 return NULL;
1227 /* int32 r = CAS (location, value, comparand); */
1228 MONO_INST_NEW (cfg, ins, OP_ATOMIC_CAS_I4);
1229 ins->dreg = alloc_ireg (cfg);
1230 ins->sreg1 = args [0]->dreg;
1231 ins->sreg2 = args [1]->dreg;
1232 ins->sreg3 = args [2]->dreg;
1233 ins->type = STACK_I4;
1234 MONO_ADD_INS (cfg->cbb, ins);
1236 /* bool result = r == comparand; */
1237 MONO_INST_NEW (cfg, cmp, OP_ICOMPARE);
1238 cmp->sreg1 = ins->dreg;
1239 cmp->sreg2 = args [2]->dreg;
1240 cmp->type = STACK_I4;
1241 MONO_ADD_INS (cfg->cbb, cmp);
1243 MONO_INST_NEW (cfg, ceq, OP_ICEQ);
1244 ceq->dreg = alloc_ireg (cfg);
1245 ceq->type = STACK_I4;
1246 MONO_ADD_INS (cfg->cbb, ceq);
1248 /* *success = result; */
1249 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI1_MEMBASE_REG, args [3]->dreg, 0, ceq->dreg);
1251 cfg->has_atomic_cas_i4 = TRUE;
1253 else if (strcmp (cmethod->name, "MemoryBarrier") == 0 && fsig->param_count == 0)
1254 ins = mini_emit_memory_barrier (cfg, MONO_MEMORY_BARRIER_SEQ);
1256 if (ins)
1257 return ins;
1258 } else if (in_corlib &&
1259 (strcmp (cmethod_klass_name_space, "System.Threading") == 0) &&
1260 (strcmp (cmethod_klass_name, "Volatile") == 0)) {
1261 ins = NULL;
1263 if (!cfg->llvm_only && !strcmp (cmethod->name, "Read") && fsig->param_count == 1) {
1264 guint32 opcode = 0;
1265 MonoType *t = fsig->params [0];
1266 gboolean is_ref;
1267 gboolean is_float = t->type == MONO_TYPE_R4 || t->type == MONO_TYPE_R8;
1269 g_assert (t->byref);
1270 /* t is a byref type, so the reference check is more complicated */
1271 is_ref = mini_type_is_reference (m_class_get_byval_arg (mono_class_from_mono_type_internal (t)));
1272 if (t->type == MONO_TYPE_I1)
1273 opcode = OP_ATOMIC_LOAD_I1;
1274 else if (t->type == MONO_TYPE_U1 || t->type == MONO_TYPE_BOOLEAN)
1275 opcode = OP_ATOMIC_LOAD_U1;
1276 else if (t->type == MONO_TYPE_I2)
1277 opcode = OP_ATOMIC_LOAD_I2;
1278 else if (t->type == MONO_TYPE_U2)
1279 opcode = OP_ATOMIC_LOAD_U2;
1280 else if (t->type == MONO_TYPE_I4)
1281 opcode = OP_ATOMIC_LOAD_I4;
1282 else if (t->type == MONO_TYPE_U4)
1283 opcode = OP_ATOMIC_LOAD_U4;
1284 else if (t->type == MONO_TYPE_R4)
1285 opcode = OP_ATOMIC_LOAD_R4;
1286 else if (t->type == MONO_TYPE_R8)
1287 opcode = OP_ATOMIC_LOAD_R8;
1288 #if SIZEOF_REGISTER == 8
1289 else if (t->type == MONO_TYPE_I8 || t->type == MONO_TYPE_I)
1290 opcode = OP_ATOMIC_LOAD_I8;
1291 else if (is_ref || t->type == MONO_TYPE_U8 || t->type == MONO_TYPE_U)
1292 opcode = OP_ATOMIC_LOAD_U8;
1293 #else
1294 else if (t->type == MONO_TYPE_I)
1295 opcode = OP_ATOMIC_LOAD_I4;
1296 else if (is_ref || t->type == MONO_TYPE_U)
1297 opcode = OP_ATOMIC_LOAD_U4;
1298 #endif
1300 if (opcode) {
1301 if (!mono_arch_opcode_supported (opcode))
1302 return NULL;
1304 MONO_INST_NEW (cfg, ins, opcode);
1305 ins->dreg = is_ref ? mono_alloc_ireg_ref (cfg) : (is_float ? mono_alloc_freg (cfg) : mono_alloc_ireg (cfg));
1306 ins->sreg1 = args [0]->dreg;
1307 ins->backend.memory_barrier_kind = MONO_MEMORY_BARRIER_ACQ;
1308 MONO_ADD_INS (cfg->cbb, ins);
1310 switch (t->type) {
1311 case MONO_TYPE_BOOLEAN:
1312 case MONO_TYPE_I1:
1313 case MONO_TYPE_U1:
1314 case MONO_TYPE_I2:
1315 case MONO_TYPE_U2:
1316 case MONO_TYPE_I4:
1317 case MONO_TYPE_U4:
1318 ins->type = STACK_I4;
1319 break;
1320 case MONO_TYPE_I8:
1321 case MONO_TYPE_U8:
1322 ins->type = STACK_I8;
1323 break;
1324 case MONO_TYPE_I:
1325 case MONO_TYPE_U:
1326 #if SIZEOF_REGISTER == 8
1327 ins->type = STACK_I8;
1328 #else
1329 ins->type = STACK_I4;
1330 #endif
1331 break;
1332 case MONO_TYPE_R4:
1333 ins->type = cfg->r4_stack_type;
1334 break;
1335 case MONO_TYPE_R8:
1336 ins->type = STACK_R8;
1337 break;
1338 default:
1339 g_assert (is_ref);
1340 ins->type = STACK_OBJ;
1341 break;
1346 if (!cfg->llvm_only && !strcmp (cmethod->name, "Write") && fsig->param_count == 2) {
1347 guint32 opcode = 0;
1348 MonoType *t = fsig->params [0];
1349 gboolean is_ref;
1351 g_assert (t->byref);
1352 is_ref = mini_type_is_reference (m_class_get_byval_arg (mono_class_from_mono_type_internal (t)));
1353 if (t->type == MONO_TYPE_I1)
1354 opcode = OP_ATOMIC_STORE_I1;
1355 else if (t->type == MONO_TYPE_U1 || t->type == MONO_TYPE_BOOLEAN)
1356 opcode = OP_ATOMIC_STORE_U1;
1357 else if (t->type == MONO_TYPE_I2)
1358 opcode = OP_ATOMIC_STORE_I2;
1359 else if (t->type == MONO_TYPE_U2)
1360 opcode = OP_ATOMIC_STORE_U2;
1361 else if (t->type == MONO_TYPE_I4)
1362 opcode = OP_ATOMIC_STORE_I4;
1363 else if (t->type == MONO_TYPE_U4)
1364 opcode = OP_ATOMIC_STORE_U4;
1365 else if (t->type == MONO_TYPE_R4)
1366 opcode = OP_ATOMIC_STORE_R4;
1367 else if (t->type == MONO_TYPE_R8)
1368 opcode = OP_ATOMIC_STORE_R8;
1369 #if SIZEOF_REGISTER == 8
1370 else if (t->type == MONO_TYPE_I8 || t->type == MONO_TYPE_I)
1371 opcode = OP_ATOMIC_STORE_I8;
1372 else if (is_ref || t->type == MONO_TYPE_U8 || t->type == MONO_TYPE_U)
1373 opcode = OP_ATOMIC_STORE_U8;
1374 #else
1375 else if (t->type == MONO_TYPE_I)
1376 opcode = OP_ATOMIC_STORE_I4;
1377 else if (is_ref || t->type == MONO_TYPE_U)
1378 opcode = OP_ATOMIC_STORE_U4;
1379 #endif
1381 if (opcode) {
1382 if (!mono_arch_opcode_supported (opcode))
1383 return NULL;
1385 MONO_INST_NEW (cfg, ins, opcode);
1386 ins->dreg = args [0]->dreg;
1387 ins->sreg1 = args [1]->dreg;
1388 ins->backend.memory_barrier_kind = MONO_MEMORY_BARRIER_REL;
1389 MONO_ADD_INS (cfg->cbb, ins);
1391 if (cfg->gen_write_barriers && is_ref)
1392 mini_emit_write_barrier (cfg, args [0], args [1]);
1396 if (ins)
1397 return ins;
1398 } else if (in_corlib &&
1399 (strcmp (cmethod_klass_name_space, "System.Diagnostics") == 0) &&
1400 (strcmp (cmethod_klass_name, "Debugger") == 0)) {
1401 if (!strcmp (cmethod->name, "Break") && fsig->param_count == 0) {
1402 if (mini_should_insert_breakpoint (cfg->method)) {
1403 ins = mono_emit_jit_icall (cfg, mono_debugger_agent_user_break, NULL);
1404 } else {
1405 MONO_INST_NEW (cfg, ins, OP_NOP);
1406 MONO_ADD_INS (cfg->cbb, ins);
1408 return ins;
1410 } else if (in_corlib &&
1411 (strcmp (cmethod_klass_name_space, "System") == 0) &&
1412 (strcmp (cmethod_klass_name, "Environment") == 0)) {
1413 if (!strcmp (cmethod->name, "get_IsRunningOnWindows") && fsig->param_count == 0) {
1414 #ifdef TARGET_WIN32
1415 EMIT_NEW_ICONST (cfg, ins, 1);
1416 #else
1417 EMIT_NEW_ICONST (cfg, ins, 0);
1418 #endif
1420 } else if (in_corlib &&
1421 (strcmp (cmethod_klass_name_space, "System.Reflection") == 0) &&
1422 (strcmp (cmethod_klass_name, "Assembly") == 0)) {
1423 if (cfg->llvm_only && !strcmp (cmethod->name, "GetExecutingAssembly")) {
1424 /* No stack walks are currently available, so implement this as an intrinsic */
1425 MonoInst *assembly_ins;
1427 EMIT_NEW_AOTCONST (cfg, assembly_ins, MONO_PATCH_INFO_IMAGE, m_class_get_image (cfg->method->klass));
1428 ins = mono_emit_jit_icall (cfg, mono_get_assembly_object, &assembly_ins);
1429 return ins;
1432 // While it is not required per
1433 // https://msdn.microsoft.com/en-us/library/system.reflection.assembly.getcallingassembly(v=vs.110).aspx.
1434 // have GetCallingAssembly be consistent independently of varying optimization.
1435 // This fixes mono/tests/test-inline-call-stack.cs under FullAOT+LLVM.
1436 cfg->no_inline |= COMPILE_LLVM (cfg) && strcmp (cmethod->name, "GetCallingAssembly") == 0;
1438 } else if (in_corlib &&
1439 (strcmp (cmethod_klass_name_space, "System.Reflection") == 0) &&
1440 (strcmp (cmethod_klass_name, "MethodBase") == 0)) {
1441 if (cfg->llvm_only && !strcmp (cmethod->name, "GetCurrentMethod")) {
1442 /* No stack walks are currently available, so implement this as an intrinsic */
1443 MonoInst *method_ins;
1444 MonoMethod *declaring = cfg->method;
1446 /* This returns the declaring generic method */
1447 if (declaring->is_inflated)
1448 declaring = ((MonoMethodInflated*)cfg->method)->declaring;
1449 EMIT_NEW_AOTCONST (cfg, method_ins, MONO_PATCH_INFO_METHODCONST, declaring);
1450 ins = mono_emit_jit_icall (cfg, mono_get_method_object, &method_ins);
1451 cfg->no_inline = TRUE;
1452 if (cfg->method != cfg->current_method)
1453 mini_set_inline_failure (cfg, "MethodBase:GetCurrentMethod ()");
1454 return ins;
1456 } else if (cmethod->klass == mono_class_try_get_math_class ()) {
1458 * There is general branchless code for Min/Max, but it does not work for
1459 * all inputs:
1460 * http://everything2.com/?node_id=1051618
1464 * Constant folding for various Math methods.
1465 * we avoid folding constants that when computed would raise an error, in
1466 * case the user code was expecting to get that error raised
1468 if (fsig->param_count == 1 && args [0]->opcode == OP_R8CONST){
1469 double source = *(double *)args [0]->inst_p0;
1470 int opcode = 0;
1471 const char *mname = cmethod->name;
1472 char c = mname [0];
1474 if (c == 'A'){
1475 if (strcmp (mname, "Abs") == 0 && fsig->params [0]->type == MONO_TYPE_R8) {
1476 opcode = OP_ABS;
1477 } else if (strcmp (mname, "Asin") == 0){
1478 if (fabs (source) <= 1)
1479 opcode = OP_ASIN;
1480 } else if (strcmp (mname, "Asinh") == 0){
1481 opcode = OP_ASINH;
1482 } else if (strcmp (mname, "Acos") == 0){
1483 if (fabs (source) <= 1)
1484 opcode = OP_ACOS;
1485 } else if (strcmp (mname, "Acosh") == 0){
1486 if (source >= 1)
1487 opcode = OP_ACOSH;
1488 } else if (strcmp (mname, "Atan") == 0){
1489 opcode = OP_ATAN;
1490 } else if (strcmp (mname, "Atanh") == 0){
1491 if (fabs (source) < 1)
1492 opcode = OP_ATANH;
1494 } else if (c == 'C'){
1495 if (strcmp (mname, "Cos") == 0) {
1496 if (!isinf (source))
1497 opcode = OP_COS;
1498 } else if (strcmp (mname, "Cbrt") == 0){
1499 opcode = OP_CBRT;
1500 } else if (strcmp (mname, "Cosh") == 0){
1501 opcode = OP_COSH;
1503 } else if (c == 'R'){
1504 if (strcmp (mname, "Round") == 0)
1505 opcode = OP_ROUND;
1506 } else if (c == 'S'){
1507 if (strcmp (mname, "Sin") == 0) {
1508 if (!isinf (source))
1509 opcode = OP_SIN;
1510 } else if (strcmp (mname, "Sqrt") == 0) {
1511 if (source >= 0)
1512 opcode = OP_SQRT;
1513 } else if (strcmp (mname, "Sinh") == 0){
1514 opcode = OP_SINH;
1516 } else if (c == 'T'){
1517 if (strcmp (mname, "Tan") == 0){
1518 if (!isinf (source))
1519 opcode = OP_TAN;
1520 } else if (strcmp (mname, "Tanh") == 0){
1521 opcode = OP_TANH;
1525 if (opcode) {
1526 double *dest = (double *) mono_domain_alloc (cfg->domain, sizeof (double));
1527 double result = 0;
1528 MONO_INST_NEW (cfg, ins, OP_R8CONST);
1529 ins->type = STACK_R8;
1530 ins->dreg = mono_alloc_dreg (cfg, (MonoStackType) ins->type);
1531 ins->inst_p0 = dest;
1533 switch (opcode){
1534 case OP_ABS:
1535 result = fabs (source);
1536 break;
1537 case OP_ACOS:
1538 result = acos (source);
1539 break;
1540 case OP_ACOSH:
1541 result = acosh (source);
1542 break;
1543 case OP_ASIN:
1544 result = asin (source);
1545 break;
1546 case OP_ASINH:
1547 result= asinh (source);
1548 break;
1549 case OP_ATAN:
1550 result = atan (source);
1551 break;
1552 case OP_ATANH:
1553 result = atanh (source);
1554 break;
1555 case OP_CBRT:
1556 result = cbrt (source);
1557 break;
1558 case OP_COS:
1559 result = cos (source);
1560 break;
1561 case OP_COSH:
1562 result = cosh (source);
1563 break;
1564 case OP_ROUND:
1565 result = round (source);
1566 break;
1567 case OP_SIN:
1568 result = sin (source);
1569 break;
1570 case OP_SINH:
1571 result = sinh (source);
1572 break;
1573 case OP_SQRT:
1574 result = sqrt (source);
1575 break;
1576 case OP_TAN:
1577 result = tan (source);
1578 break;
1579 case OP_TANH:
1580 result = tanh (source);
1581 break;
1582 default:
1583 g_error ("invalid opcode %d", (int)opcode);
1585 *dest = result;
1586 MONO_ADD_INS (cfg->cbb, ins);
1587 NULLIFY_INS (args [0]);
1588 return ins;
1591 } else if (cmethod->klass == mono_defaults.systemtype_class && !strcmp (cmethod->name, "op_Equality")) {
1592 EMIT_NEW_BIALU (cfg, ins, OP_COMPARE, -1, args [0]->dreg, args [1]->dreg);
1593 MONO_INST_NEW (cfg, ins, OP_PCEQ);
1594 ins->dreg = alloc_preg (cfg);
1595 ins->type = STACK_I4;
1596 MONO_ADD_INS (cfg->cbb, ins);
1597 return ins;
1598 } else if (((!strcmp (cmethod_klass_image->assembly->aname.name, "MonoMac") ||
1599 !strcmp (cmethod_klass_image->assembly->aname.name, "monotouch")) &&
1600 !strcmp (cmethod_klass_name_space, "XamCore.ObjCRuntime") &&
1601 !strcmp (cmethod_klass_name, "Selector")) ||
1602 ((!strcmp (cmethod_klass_image->assembly->aname.name, "Xamarin.iOS") ||
1603 !strcmp (cmethod_klass_image->assembly->aname.name, "Xamarin.Mac")) &&
1604 !strcmp (cmethod_klass_name_space, "ObjCRuntime") &&
1605 !strcmp (cmethod_klass_name, "Selector"))
1607 if ((cfg->backend->have_objc_get_selector || cfg->compile_llvm) &&
1608 !strcmp (cmethod->name, "GetHandle") && fsig->param_count == 1 &&
1609 (args [0]->opcode == OP_GOT_ENTRY || args [0]->opcode == OP_AOTCONST) &&
1610 cfg->compile_aot) {
1611 MonoInst *pi;
1612 MonoJumpInfoToken *ji;
1613 char *s;
1615 if (args [0]->opcode == OP_GOT_ENTRY) {
1616 pi = (MonoInst *)args [0]->inst_p1;
1617 g_assert (pi->opcode == OP_PATCH_INFO);
1618 g_assert (GPOINTER_TO_INT (pi->inst_p1) == MONO_PATCH_INFO_LDSTR);
1619 ji = (MonoJumpInfoToken *)pi->inst_p0;
1620 } else {
1621 g_assert (GPOINTER_TO_INT (args [0]->inst_p1) == MONO_PATCH_INFO_LDSTR);
1622 ji = (MonoJumpInfoToken *)args [0]->inst_p0;
1625 NULLIFY_INS (args [0]);
1627 s = mono_ldstr_utf8 (ji->image, mono_metadata_token_index (ji->token), &cfg->error);
1628 return_val_if_nok (&cfg->error, NULL);
1630 MONO_INST_NEW (cfg, ins, OP_OBJC_GET_SELECTOR);
1631 ins->dreg = mono_alloc_ireg (cfg);
1632 // FIXME: Leaks
1633 ins->inst_p0 = s;
1634 MONO_ADD_INS (cfg->cbb, ins);
1635 return ins;
1637 } else if (in_corlib &&
1638 (strcmp (cmethod_klass_name_space, "System.Runtime.InteropServices") == 0) &&
1639 (strcmp (cmethod_klass_name, "Marshal") == 0)) {
1640 //Convert Marshal.PtrToStructure<T> of blittable T to direct loads
1641 if (strcmp (cmethod->name, "PtrToStructure") == 0 &&
1642 cmethod->is_inflated &&
1643 fsig->param_count == 1 &&
1644 !mini_method_check_context_used (cfg, cmethod)) {
1646 MonoGenericContext *method_context = mono_method_get_context (cmethod);
1647 MonoType *arg0 = method_context->method_inst->type_argv [0];
1648 if (mono_type_is_native_blittable (arg0))
1649 return mini_emit_memory_load (cfg, arg0, args [0], 0, 0);
1651 } else if (cmethod->klass == mono_defaults.enum_class && !strcmp (cmethod->name, "HasFlag") &&
1652 args [0]->opcode == OP_BOX && args [1]->opcode == OP_BOX_ICONST && args [0]->klass == args [1]->klass) {
1653 args [1]->opcode = OP_ICONST;
1654 ins = mini_handle_enum_has_flag (cfg, args [0]->klass, NULL, args [0]->sreg1, args [1]);
1655 NULLIFY_INS (args [0]);
1656 return ins;
1657 } else if (in_corlib &&
1658 !strcmp (cmethod_klass_name_space, "System") &&
1659 (!strcmp (cmethod_klass_name, "Span`1") || !strcmp (cmethod_klass_name, "ReadOnlySpan`1"))) {
1660 return emit_span_intrinsics (cfg, cmethod, fsig, args);
1661 } else if (in_corlib &&
1662 !strcmp (cmethod_klass_name_space, "Internal.Runtime.CompilerServices") &&
1663 !strcmp (cmethod_klass_name, "Unsafe")) {
1664 return emit_unsafe_intrinsics (cfg, cmethod, fsig, args);
1665 } else if (!strcmp (cmethod_klass_name_space, "System.Runtime.CompilerServices") &&
1666 !strcmp (cmethod_klass_name, "Unsafe") &&
1667 (in_corlib || !strcmp (cmethod_klass_image->assembly->aname.name, "System.Runtime.CompilerServices.Unsafe"))) {
1668 return emit_unsafe_intrinsics (cfg, cmethod, fsig, args);
1669 } else if (in_corlib &&
1670 !strcmp (cmethod_klass_name_space, "System.Runtime.CompilerServices") &&
1671 !strcmp (cmethod_klass_name, "JitHelpers")) {
1672 return emit_jit_helpers_intrinsics (cfg, cmethod, fsig, args);
1675 #ifdef MONO_ARCH_SIMD_INTRINSICS
1676 if (cfg->opt & MONO_OPT_SIMD) {
1677 ins = mono_emit_simd_intrinsics (cfg, cmethod, fsig, args);
1678 if (ins)
1679 return ins;
1681 #endif
1683 /* Fallback if SIMD is disabled */
1684 if (in_corlib && !strcmp ("System.Numerics", cmethod_klass_name_space) && !strcmp ("Vector", cmethod_klass_name)) {
1685 if (!strcmp (cmethod->name, "get_IsHardwareAccelerated")) {
1686 EMIT_NEW_ICONST (cfg, ins, 0);
1687 ins->type = STACK_I4;
1688 return ins;
1692 ins = mono_emit_native_types_intrinsics (cfg, cmethod, fsig, args);
1693 if (ins)
1694 return ins;
1696 if (COMPILE_LLVM (cfg)) {
1697 ins = llvm_emit_inst_for_method (cfg, cmethod, fsig, args, in_corlib);
1698 if (ins)
1699 return ins;
1702 return mono_arch_emit_inst_for_method (cfg, cmethod, fsig, args);
1706 static MonoInst*
1707 emit_array_unsafe_access (MonoCompile *cfg, MonoMethodSignature *fsig, MonoInst **args, int is_set)
1709 MonoClass *eklass;
1711 if (is_set)
1712 eklass = mono_class_from_mono_type_internal (fsig->params [2]);
1713 else
1714 eklass = mono_class_from_mono_type_internal (fsig->ret);
1716 if (is_set) {
1717 return mini_emit_array_store (cfg, eklass, args, FALSE);
1718 } else {
1719 MonoInst *ins, *addr = mini_emit_ldelema_1_ins (cfg, eklass, args [0], args [1], FALSE);
1720 EMIT_NEW_LOAD_MEMBASE_TYPE (cfg, ins, m_class_get_byval_arg (eklass), addr->dreg, 0);
1721 return ins;
1725 static gboolean
1726 is_unsafe_mov_compatible (MonoCompile *cfg, MonoClass *param_klass, MonoClass *return_klass)
1728 uint32_t align;
1729 int param_size, return_size;
1731 param_klass = mono_class_from_mono_type_internal (mini_get_underlying_type (m_class_get_byval_arg (param_klass)));
1732 return_klass = mono_class_from_mono_type_internal (mini_get_underlying_type (m_class_get_byval_arg (return_klass)));
1734 if (cfg->verbose_level > 3)
1735 printf ("[UNSAFE-MOV-INTRISIC] %s <- %s\n", m_class_get_name (return_klass), m_class_get_name (param_klass));
1737 //Don't allow mixing reference types with value types
1738 if (m_class_is_valuetype (param_klass) != m_class_is_valuetype (return_klass)) {
1739 if (cfg->verbose_level > 3)
1740 printf ("[UNSAFE-MOV-INTRISIC]\tone of the args is a valuetype and the other is not\n");
1741 return FALSE;
1744 if (!m_class_is_valuetype (param_klass)) {
1745 if (cfg->verbose_level > 3)
1746 printf ("[UNSAFE-MOV-INTRISIC]\targs are reference types\n");
1747 return TRUE;
1750 //That are blitable
1751 if (m_class_has_references (param_klass) || m_class_has_references (return_klass))
1752 return FALSE;
1754 MonoType *param_type = m_class_get_byval_arg (param_klass);
1755 MonoType *return_type = m_class_get_byval_arg (return_klass);
1757 /* Avoid mixing structs and primitive types/enums, they need to be handled differently in the JIT */
1758 if ((MONO_TYPE_ISSTRUCT (param_type) && !MONO_TYPE_ISSTRUCT (return_type)) ||
1759 (!MONO_TYPE_ISSTRUCT (param_type) && MONO_TYPE_ISSTRUCT (return_type))) {
1760 if (cfg->verbose_level > 3)
1761 printf ("[UNSAFE-MOV-INTRISIC]\tmixing structs and scalars\n");
1762 return FALSE;
1765 if (param_type->type == MONO_TYPE_R4 || param_type->type == MONO_TYPE_R8 ||
1766 return_type->type == MONO_TYPE_R4 || return_type->type == MONO_TYPE_R8) {
1767 if (cfg->verbose_level > 3)
1768 printf ("[UNSAFE-MOV-INTRISIC]\tfloat or double are not supported\n");
1769 return FALSE;
1772 param_size = mono_class_value_size (param_klass, &align);
1773 return_size = mono_class_value_size (return_klass, &align);
1775 //We can do it if sizes match
1776 if (param_size == return_size) {
1777 if (cfg->verbose_level > 3)
1778 printf ("[UNSAFE-MOV-INTRISIC]\tsame size\n");
1779 return TRUE;
1782 //No simple way to handle struct if sizes don't match
1783 if (MONO_TYPE_ISSTRUCT (param_type)) {
1784 if (cfg->verbose_level > 3)
1785 printf ("[UNSAFE-MOV-INTRISIC]\tsize mismatch and type is a struct\n");
1786 return FALSE;
1790 * Same reg size category.
1791 * A quick note on why we don't require widening here.
1792 * The intrinsic is "R Array.UnsafeMov<S,R> (S s)".
1794 * Since the source value comes from a function argument, the JIT will already have
1795 * the value in a VREG and performed any widening needed before (say, when loading from a field).
1797 if (param_size <= 4 && return_size <= 4) {
1798 if (cfg->verbose_level > 3)
1799 printf ("[UNSAFE-MOV-INTRISIC]\tsize mismatch but both are of the same reg class\n");
1800 return TRUE;
1803 return FALSE;
1806 static MonoInst*
1807 emit_array_unsafe_mov (MonoCompile *cfg, MonoMethodSignature *fsig, MonoInst **args)
1809 MonoClass *param_klass = mono_class_from_mono_type_internal (fsig->params [0]);
1810 MonoClass *return_klass = mono_class_from_mono_type_internal (fsig->ret);
1812 if (mini_is_gsharedvt_variable_type (fsig->ret))
1813 return NULL;
1815 //Valuetypes that are semantically equivalent or numbers than can be widened to
1816 if (is_unsafe_mov_compatible (cfg, param_klass, return_klass))
1817 return args [0];
1819 //Arrays of valuetypes that are semantically equivalent
1820 if (m_class_get_rank (param_klass) == 1 && m_class_get_rank (return_klass) == 1 && is_unsafe_mov_compatible (cfg, m_class_get_element_class (param_klass), m_class_get_element_class (return_klass)))
1821 return args [0];
1823 return NULL;
1826 MonoInst*
1827 mini_emit_inst_for_sharable_method (MonoCompile *cfg, MonoMethod *cmethod, MonoMethodSignature *fsig, MonoInst **args)
1829 if (cmethod->klass == mono_defaults.array_class) {
1830 if (strcmp (cmethod->name, "UnsafeStore") == 0)
1831 return emit_array_unsafe_access (cfg, fsig, args, TRUE);
1832 else if (strcmp (cmethod->name, "UnsafeLoad") == 0)
1833 return emit_array_unsafe_access (cfg, fsig, args, FALSE);
1834 else if (strcmp (cmethod->name, "UnsafeMov") == 0)
1835 return emit_array_unsafe_mov (cfg, fsig, args);
1838 return NULL;
1841 MonoInst*
1842 mini_emit_inst_for_field_load (MonoCompile *cfg, MonoClassField *field)
1844 MonoClass *klass = field->parent;
1845 const char *klass_name_space = m_class_get_name_space (klass);
1846 const char *klass_name = m_class_get_name (klass);
1847 MonoImage *klass_image = m_class_get_image (klass);
1848 gboolean in_corlib = klass_image == mono_defaults.corlib;
1849 gboolean is_le;
1850 MonoInst *ins;
1852 if (in_corlib && !strcmp (klass_name_space, "System") && !strcmp (klass_name, "BitConverter") && !strcmp (field->name, "IsLittleEndian")) {
1853 is_le = (TARGET_BYTE_ORDER == G_LITTLE_ENDIAN);
1854 EMIT_NEW_ICONST (cfg, ins, is_le);
1855 return ins;
1857 return NULL;
1859 #else
1860 MONO_EMPTY_SOURCE_FILE (intrinsics);
1861 #endif