intrinsics.c: In function 'mini_emit_inst_for_method':
[mono-project.git] / mono / mini / intrinsics.c
blobdbff85351c1b26967c9f2e747609a44ba269d24b
1 /**
2 * Intrinsics support
3 */
5 #include <config.h>
6 #include <mono/utils/mono-compiler.h>
7 #include <math.h>
9 #ifndef DISABLE_JIT
11 #include "mini.h"
12 #include "mini-runtime.h"
13 #include "ir-emit.h"
14 #include "jit-icalls.h"
15 #include "debugger-agent.h"
17 #include <mono/metadata/abi-details.h>
18 #include <mono/metadata/gc-internals.h>
19 #include <mono/metadata/monitor.h>
20 #include <mono/utils/mono-memory-model.h>
22 static GENERATE_GET_CLASS_WITH_CACHE (runtime_helpers, "System.Runtime.CompilerServices", "RuntimeHelpers")
23 static GENERATE_TRY_GET_CLASS_WITH_CACHE (math, "System", "Math")
25 /* optimize the simple GetGenericValueImpl/SetGenericValueImpl generic icalls */
26 static MonoInst*
27 emit_array_generic_access (MonoCompile *cfg, MonoMethodSignature *fsig, MonoInst **args, int is_set)
29 MonoInst *addr, *store, *load;
30 MonoClass *eklass = mono_class_from_mono_type_internal (fsig->params [2]);
32 /* the bounds check is already done by the callers */
33 addr = mini_emit_ldelema_1_ins (cfg, eklass, args [0], args [1], FALSE);
34 MonoType *etype = m_class_get_byval_arg (eklass);
35 if (is_set) {
36 EMIT_NEW_LOAD_MEMBASE_TYPE (cfg, load, etype, args [2]->dreg, 0);
37 EMIT_NEW_STORE_MEMBASE_TYPE (cfg, store, etype, addr->dreg, 0, load->dreg);
38 if (mini_type_is_reference (etype))
39 mini_emit_write_barrier (cfg, addr, load);
40 } else {
41 EMIT_NEW_LOAD_MEMBASE_TYPE (cfg, load, etype, addr->dreg, 0);
42 EMIT_NEW_STORE_MEMBASE_TYPE (cfg, store, etype, args [2]->dreg, 0, load->dreg);
44 return store;
47 static gboolean
48 mono_type_is_native_blittable (MonoType *t)
50 if (MONO_TYPE_IS_REFERENCE (t))
51 return FALSE;
53 if (MONO_TYPE_IS_PRIMITIVE_SCALAR (t))
54 return TRUE;
56 MonoClass *klass = mono_class_from_mono_type_internal (t);
58 //MonoClass::blitable depends on mono_class_setup_fields being done.
59 mono_class_setup_fields (klass);
60 if (!m_class_is_blittable (klass))
61 return FALSE;
63 // If the native marshal size is different we can't convert PtrToStructure to a type load
64 if (mono_class_native_size (klass, NULL) != mono_class_value_size (klass, NULL))
65 return FALSE;
67 return TRUE;
70 MonoInst*
71 mini_emit_inst_for_ctor (MonoCompile *cfg, MonoMethod *cmethod, MonoMethodSignature *fsig, MonoInst **args)
73 const char* cmethod_klass_name_space = m_class_get_name_space (cmethod->klass);
74 const char* cmethod_klass_name = m_class_get_name (cmethod->klass);
75 MonoImage *cmethod_klass_image = m_class_get_image (cmethod->klass);
76 gboolean in_corlib = cmethod_klass_image == mono_defaults.corlib;
77 MonoInst *ins = NULL;
79 /* Required intrinsics are always used even with -O=-intrins */
80 if (in_corlib &&
81 !strcmp (cmethod_klass_name_space, "System") &&
82 !strcmp (cmethod_klass_name, "ByReference`1")) {
83 /* public ByReference(ref T value) */
84 g_assert (fsig->hasthis && fsig->param_count == 1);
85 EMIT_NEW_STORE_MEMBASE (cfg, ins, OP_STORE_MEMBASE_REG, args [0]->dreg, 0, args [1]->dreg);
86 return ins;
89 ins = mono_emit_native_types_intrinsics (cfg, cmethod, fsig, args);
90 if (ins)
91 return ins;
93 if (!(cfg->opt & MONO_OPT_INTRINS))
94 return NULL;
96 #ifdef MONO_ARCH_SIMD_INTRINSICS
97 if (cfg->opt & MONO_OPT_SIMD) {
98 ins = mono_emit_simd_intrinsics (cfg, cmethod, fsig, args);
99 if (ins)
100 return ins;
102 #endif
104 return NULL;
107 static MonoInst*
108 llvm_emit_inst_for_method (MonoCompile *cfg, MonoMethod *cmethod, MonoMethodSignature *fsig, MonoInst **args, gboolean in_corlib)
110 MonoInst *ins = NULL;
111 int opcode = 0;
113 if (in_corlib && !strcmp (m_class_get_name (cmethod->klass), "MathF") && fsig->param_count && fsig->params [0]->type == MONO_TYPE_R4 && cfg->r4fp) {
114 if (!strcmp (cmethod->name, "Sin"))
115 opcode = OP_SINF;
116 else if (!strcmp (cmethod->name, "Cos"))
117 opcode = OP_COSF;
118 else if (!strcmp (cmethod->name, "Abs"))
119 opcode = OP_ABSF;
120 else if (!strcmp (cmethod->name, "Sqrt"))
121 opcode = OP_SQRTF;
122 else if (!strcmp (cmethod->name, "Max"))
123 opcode = OP_RMAX;
124 else if (!strcmp (cmethod->name, "Pow"))
125 opcode = OP_RPOW;
126 if (opcode) {
127 MONO_INST_NEW (cfg, ins, opcode);
128 ins->type = STACK_R8;
129 ins->dreg = mono_alloc_dreg (cfg, (MonoStackType)ins->type);
130 ins->sreg1 = args [0]->dreg;
131 if (fsig->param_count == 2)
132 ins->sreg2 = args [1]->dreg;
133 MONO_ADD_INS (cfg->cbb, ins);
136 /* The LLVM backend supports these intrinsics */
137 if (cmethod->klass == mono_class_try_get_math_class ()) {
138 if (strcmp (cmethod->name, "Sin") == 0) {
139 opcode = OP_SIN;
140 } else if (strcmp (cmethod->name, "Cos") == 0) {
141 opcode = OP_COS;
142 } else if (strcmp (cmethod->name, "Sqrt") == 0) {
143 opcode = OP_SQRT;
144 } else if (strcmp (cmethod->name, "Abs") == 0 && fsig->params [0]->type == MONO_TYPE_R8) {
145 opcode = OP_ABS;
148 if (opcode && fsig->param_count == 1) {
149 MONO_INST_NEW (cfg, ins, opcode);
150 ins->type = STACK_R8;
151 ins->dreg = mono_alloc_dreg (cfg, (MonoStackType)ins->type);
152 ins->sreg1 = args [0]->dreg;
153 MONO_ADD_INS (cfg->cbb, ins);
156 opcode = 0;
157 if (cfg->opt & MONO_OPT_CMOV) {
158 if (strcmp (cmethod->name, "Min") == 0) {
159 if (fsig->params [0]->type == MONO_TYPE_I4)
160 opcode = OP_IMIN;
161 if (fsig->params [0]->type == MONO_TYPE_U4)
162 opcode = OP_IMIN_UN;
163 else if (fsig->params [0]->type == MONO_TYPE_I8)
164 opcode = OP_LMIN;
165 else if (fsig->params [0]->type == MONO_TYPE_U8)
166 opcode = OP_LMIN_UN;
167 } else if (strcmp (cmethod->name, "Max") == 0) {
168 if (fsig->params [0]->type == MONO_TYPE_I4)
169 opcode = OP_IMAX;
170 if (fsig->params [0]->type == MONO_TYPE_U4)
171 opcode = OP_IMAX_UN;
172 else if (fsig->params [0]->type == MONO_TYPE_I8)
173 opcode = OP_LMAX;
174 else if (fsig->params [0]->type == MONO_TYPE_U8)
175 opcode = OP_LMAX_UN;
179 if (opcode && fsig->param_count == 2) {
180 MONO_INST_NEW (cfg, ins, opcode);
181 ins->type = fsig->params [0]->type == MONO_TYPE_I4 ? STACK_I4 : STACK_I8;
182 ins->dreg = mono_alloc_dreg (cfg, (MonoStackType)ins->type);
183 ins->sreg1 = args [0]->dreg;
184 ins->sreg2 = args [1]->dreg;
185 MONO_ADD_INS (cfg->cbb, ins);
189 return ins;
192 static MonoInst*
193 emit_span_intrinsics (MonoCompile *cfg, MonoMethod *cmethod, MonoMethodSignature *fsig, MonoInst **args)
195 MonoInst *ins;
197 MonoClassField *ptr_field = mono_class_get_field_from_name_full (cmethod->klass, "_pointer", NULL);
198 if (!ptr_field)
199 /* Portable Span<T> */
200 return NULL;
202 if (!strcmp (cmethod->name, "get_Item")) {
203 MonoClassField *length_field = mono_class_get_field_from_name_full (cmethod->klass, "_length", NULL);
205 g_assert (length_field);
207 MonoGenericClass *gclass = mono_class_get_generic_class (cmethod->klass);
208 MonoClass *param_class = mono_class_from_mono_type_internal (gclass->context.class_inst->type_argv [0]);
210 if (mini_is_gsharedvt_variable_klass (param_class))
211 return NULL;
213 int span_reg = args [0]->dreg;
214 /* Load _pointer.Value */
215 int base_reg = alloc_preg (cfg);
216 EMIT_NEW_LOAD_MEMBASE (cfg, ins, OP_LOAD_MEMBASE, base_reg, span_reg, ptr_field->offset - MONO_ABI_SIZEOF (MonoObject));
217 /* Similar to mini_emit_ldelema_1_ins () */
218 int size = mono_class_array_element_size (param_class);
220 int index_reg = mini_emit_sext_index_reg (cfg, args [1]);
222 MONO_EMIT_BOUNDS_CHECK_OFFSET(cfg, span_reg, length_field->offset - MONO_ABI_SIZEOF (MonoObject), index_reg);
224 // FIXME: Sign extend index ?
226 int mult_reg = alloc_preg (cfg);
227 int add_reg = alloc_preg (cfg);
229 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_MUL_IMM, mult_reg, index_reg, size);
230 EMIT_NEW_BIALU (cfg, ins, OP_PADD, add_reg, base_reg, mult_reg);
231 ins->klass = param_class;
232 ins->type = STACK_MP;
234 return ins;
235 } else if (!strcmp (cmethod->name, "get_Length")) {
236 MonoClassField *length_field = mono_class_get_field_from_name_full (cmethod->klass, "_length", NULL);
237 g_assert (length_field);
240 * FIXME: This doesn't work with abcrem, since the src is a unique LDADDR not
241 * the same array object.
243 MONO_INST_NEW (cfg, ins, OP_LDLEN);
244 ins->dreg = alloc_preg (cfg);
245 ins->sreg1 = args [0]->dreg;
246 ins->inst_imm = length_field->offset - MONO_ABI_SIZEOF (MonoObject);
247 ins->type = STACK_I4;
248 MONO_ADD_INS (cfg->cbb, ins);
250 cfg->flags |= MONO_CFG_NEEDS_DECOMPOSE;
251 cfg->cbb->needs_decompose = TRUE;
253 return ins;
256 return NULL;
259 static MonoInst*
260 emit_unsafe_intrinsics (MonoCompile *cfg, MonoMethod *cmethod, MonoMethodSignature *fsig, MonoInst **args)
262 MonoInst *ins;
263 int dreg, align;
264 MonoGenericContext *ctx = mono_method_get_context (cmethod);
265 MonoType *t;
267 if (!strcmp (cmethod->name, "As")) {
268 g_assert (ctx);
269 g_assert (ctx->method_inst);
271 if (ctx->method_inst->type_argc == 2) {
272 dreg = alloc_preg (cfg);
273 EMIT_NEW_UNALU (cfg, ins, OP_MOVE, dreg, args [0]->dreg);
274 ins->type = STACK_OBJ;
275 ins->klass = mono_get_object_class ();
276 return ins;
277 } else if (ctx->method_inst->type_argc == 1) {
278 // Casts the given object to the specified type, performs no dynamic type checking.
279 g_assert (fsig->param_count == 1);
280 g_assert (fsig->params [0]->type == MONO_TYPE_OBJECT);
281 dreg = alloc_preg (cfg);
282 EMIT_NEW_UNALU (cfg, ins, OP_MOVE, dreg, args [0]->dreg);
283 ins->type = STACK_OBJ;
284 ins->klass = mono_class_from_mono_type_internal (ctx->method_inst->type_argv [0]);
285 return ins;
287 } else if (!strcmp (cmethod->name, "AsPointer")) {
288 g_assert (ctx);
289 g_assert (ctx->method_inst);
290 g_assert (ctx->method_inst->type_argc == 1);
291 g_assert (fsig->param_count == 1);
293 dreg = alloc_preg (cfg);
294 EMIT_NEW_UNALU (cfg, ins, OP_MOVE, dreg, args [0]->dreg);
295 ins->type = STACK_PTR;
296 return ins;
297 } else if (!strcmp (cmethod->name, "AsRef")) {
298 g_assert (ctx);
299 g_assert (ctx->method_inst);
300 g_assert (ctx->method_inst->type_argc == 1);
301 g_assert (fsig->param_count == 1);
303 dreg = alloc_preg (cfg);
304 EMIT_NEW_UNALU (cfg, ins, OP_MOVE, dreg, args [0]->dreg);
305 ins->type = STACK_OBJ;
306 ins->klass = mono_get_object_class ();
307 return ins;
308 } else if (!strcmp (cmethod->name, "AreSame")) {
309 g_assert (ctx);
310 g_assert (ctx->method_inst);
311 g_assert (ctx->method_inst->type_argc == 1);
312 g_assert (fsig->param_count == 2);
314 dreg = alloc_ireg (cfg);
315 EMIT_NEW_BIALU (cfg, ins, OP_COMPARE, -1, args [0]->dreg, args [1]->dreg);
316 EMIT_NEW_UNALU (cfg, ins, OP_PCEQ, dreg, -1);
317 return ins;
318 } else if (!strcmp (cmethod->name, "IsAddressLessThan")) {
319 g_assert (ctx);
320 g_assert (ctx->method_inst);
321 g_assert (ctx->method_inst->type_argc == 1);
322 g_assert (fsig->param_count == 2);
324 dreg = alloc_ireg (cfg);
325 EMIT_NEW_BIALU (cfg, ins, OP_COMPARE, -1, args [0]->dreg, args [1]->dreg);
326 EMIT_NEW_UNALU (cfg, ins, OP_PCLT_UN, dreg, -1);
327 return ins;
328 } else if (!strcmp (cmethod->name, "IsAddressGreaterThan")) {
329 g_assert (ctx);
330 g_assert (ctx->method_inst);
331 g_assert (ctx->method_inst->type_argc == 1);
332 g_assert (fsig->param_count == 2);
334 dreg = alloc_ireg (cfg);
335 EMIT_NEW_BIALU (cfg, ins, OP_COMPARE, -1, args [0]->dreg, args [1]->dreg);
336 EMIT_NEW_UNALU (cfg, ins, OP_PCGT_UN, dreg, -1);
337 return ins;
338 } else if (!strcmp (cmethod->name, "Add")) {
339 g_assert (ctx);
340 g_assert (ctx->method_inst);
341 g_assert (ctx->method_inst->type_argc == 1);
342 g_assert (fsig->param_count == 2);
344 int mul_reg = alloc_preg (cfg);
346 t = ctx->method_inst->type_argv [0];
347 MonoInst *esize_ins;
348 if (mini_is_gsharedvt_variable_type (t)) {
349 esize_ins = mini_emit_get_gsharedvt_info_klass (cfg, mono_class_from_mono_type_internal (t), MONO_RGCTX_INFO_CLASS_SIZEOF);
350 if (SIZEOF_REGISTER == 8)
351 MONO_EMIT_NEW_UNALU (cfg, OP_SEXT_I4, esize_ins->dreg, esize_ins->dreg);
352 } else {
353 t = mini_type_get_underlying_type (t);
354 int esize = mono_class_array_element_size (mono_class_from_mono_type_internal (t));
355 EMIT_NEW_ICONST (cfg, esize_ins, esize);
357 esize_ins->type = STACK_I4;
359 EMIT_NEW_BIALU (cfg, ins, OP_PMUL, mul_reg, args [1]->dreg, esize_ins->dreg);
360 ins->type = STACK_PTR;
362 dreg = alloc_preg (cfg);
363 EMIT_NEW_BIALU (cfg, ins, OP_PADD, dreg, args [0]->dreg, mul_reg);
364 ins->type = STACK_PTR;
365 return ins;
366 } else if (!strcmp (cmethod->name, "AddByteOffset")) {
367 g_assert (ctx);
368 g_assert (ctx->method_inst);
369 g_assert (ctx->method_inst->type_argc == 1);
370 g_assert (fsig->param_count == 2);
372 if (fsig->params [1]->type == MONO_TYPE_I) {
373 int dreg = alloc_preg (cfg);
374 EMIT_NEW_BIALU (cfg, ins, OP_PADD, dreg, args [0]->dreg, args [1]->dreg);
375 ins->type = STACK_PTR;
376 return ins;
377 } else if (fsig->params [1]->type == MONO_TYPE_U8) {
378 int sreg = args [1]->dreg;
379 if (SIZEOF_REGISTER == 4) {
380 sreg = alloc_ireg (cfg);
381 EMIT_NEW_UNALU (cfg, ins, OP_LCONV_TO_U4, sreg, args [1]->dreg);
383 int dreg = alloc_preg (cfg);
384 EMIT_NEW_BIALU (cfg, ins, OP_PADD, dreg, args [0]->dreg, sreg);
385 ins->type = STACK_PTR;
386 return ins;
388 } else if (!strcmp (cmethod->name, "SizeOf")) {
389 g_assert (ctx);
390 g_assert (ctx->method_inst);
391 g_assert (ctx->method_inst->type_argc == 1);
392 g_assert (fsig->param_count == 0);
394 t = ctx->method_inst->type_argv [0];
395 if (mini_is_gsharedvt_variable_type (t)) {
396 ins = mini_emit_get_gsharedvt_info_klass (cfg, mono_class_from_mono_type_internal (t), MONO_RGCTX_INFO_CLASS_SIZEOF);
397 } else {
398 int esize = mono_type_size (t, &align);
399 EMIT_NEW_ICONST (cfg, ins, esize);
401 ins->type = STACK_I4;
402 return ins;
403 } else if (!strcmp (cmethod->name, "ReadUnaligned")) {
404 g_assert (ctx);
405 g_assert (ctx->method_inst);
406 g_assert (ctx->method_inst->type_argc == 1);
407 g_assert (fsig->param_count == 1);
409 t = ctx->method_inst->type_argv [0];
410 t = mini_get_underlying_type (t);
411 if (MONO_TYPE_IS_PRIMITIVE (t) && t->type != MONO_TYPE_R4 && t->type != MONO_TYPE_R8) {
412 dreg = alloc_ireg (cfg);
413 EMIT_NEW_LOAD_MEMBASE_TYPE (cfg, ins, t, args [0]->dreg, 0);
414 ins->type = STACK_I4;
415 ins->flags |= MONO_INST_UNALIGNED;
416 return ins;
418 } else if (!strcmp (cmethod->name, "WriteUnaligned")) {
419 g_assert (ctx);
420 g_assert (ctx->method_inst);
421 g_assert (ctx->method_inst->type_argc == 1);
422 g_assert (fsig->param_count == 2);
424 t = ctx->method_inst->type_argv [0];
425 t = mini_get_underlying_type (t);
426 if (MONO_TYPE_IS_PRIMITIVE (t) && t->type != MONO_TYPE_R4 && t->type != MONO_TYPE_R8) {
427 dreg = alloc_ireg (cfg);
428 EMIT_NEW_STORE_MEMBASE_TYPE (cfg, ins, t, args [0]->dreg, 0, args [1]->dreg);
429 ins->flags |= MONO_INST_UNALIGNED;
430 return ins;
432 } else if (!strcmp (cmethod->name, "ByteOffset")) {
433 g_assert (ctx);
434 g_assert (ctx->method_inst);
435 g_assert (ctx->method_inst->type_argc == 1);
436 g_assert (fsig->param_count == 2);
438 int dreg = alloc_preg (cfg);
439 EMIT_NEW_BIALU (cfg, ins, OP_PSUB, dreg, args [1]->dreg, args [0]->dreg);
440 ins->type = STACK_PTR;
441 return ins;
444 return NULL;
447 static MonoInst*
448 emit_jit_helpers_intrinsics (MonoCompile *cfg, MonoMethod *cmethod, MonoMethodSignature *fsig, MonoInst **args)
450 MonoInst *ins;
451 int dreg;
452 MonoGenericContext *ctx = mono_method_get_context (cmethod);
453 MonoType *t;
455 if (!strcmp (cmethod->name, "EnumEquals") || !strcmp (cmethod->name, "EnumCompareTo")) {
456 g_assert (ctx);
457 g_assert (ctx->method_inst);
458 g_assert (ctx->method_inst->type_argc == 1);
459 g_assert (fsig->param_count == 2);
461 t = ctx->method_inst->type_argv [0];
462 t = mini_get_underlying_type (t);
463 if (mini_is_gsharedvt_variable_type (t))
464 return NULL;
466 gboolean is_i8 = (t->type == MONO_TYPE_I8 || t->type == MONO_TYPE_U8);
467 gboolean is_unsigned = (t->type == MONO_TYPE_U1 || t->type == MONO_TYPE_U2 || t->type == MONO_TYPE_U4 || t->type == MONO_TYPE_U8 || t->type == MONO_TYPE_U);
468 int cmp_op, ceq_op, cgt_op, clt_op;
470 if (is_i8) {
471 cmp_op = OP_LCOMPARE;
472 ceq_op = OP_LCEQ;
473 cgt_op = is_unsigned ? OP_LCGT_UN : OP_LCGT;
474 clt_op = is_unsigned ? OP_LCLT_UN : OP_LCLT;
475 } else {
476 cmp_op = OP_ICOMPARE;
477 ceq_op = OP_ICEQ;
478 cgt_op = is_unsigned ? OP_ICGT_UN : OP_ICGT;
479 clt_op = is_unsigned ? OP_ICLT_UN : OP_ICLT;
482 if (!strcmp (cmethod->name, "EnumEquals")) {
483 dreg = alloc_ireg (cfg);
484 EMIT_NEW_BIALU (cfg, ins, cmp_op, -1, args [0]->dreg, args [1]->dreg);
485 EMIT_NEW_UNALU (cfg, ins, ceq_op, dreg, -1);
486 } else {
487 // Use the branchless code (a > b) - (a < b)
488 int reg1, reg2;
490 reg1 = alloc_ireg (cfg);
491 reg2 = alloc_ireg (cfg);
492 dreg = alloc_ireg (cfg);
494 EMIT_NEW_BIALU (cfg, ins, cmp_op, -1, args [0]->dreg, args [1]->dreg);
495 EMIT_NEW_UNALU (cfg, ins, cgt_op, reg1, -1);
496 EMIT_NEW_BIALU (cfg, ins, cmp_op, -1, args [0]->dreg, args [1]->dreg);
497 EMIT_NEW_UNALU (cfg, ins, clt_op, reg2, -1);
498 EMIT_NEW_BIALU (cfg, ins, OP_ISUB, dreg, reg1, reg2);
500 return ins;
503 return NULL;
506 MonoInst*
507 mini_emit_inst_for_method (MonoCompile *cfg, MonoMethod *cmethod, MonoMethodSignature *fsig, MonoInst **args)
509 MonoInst *ins = NULL;
510 MonoClass *runtime_helpers_class = mono_class_get_runtime_helpers_class ();
512 const char* cmethod_klass_name_space = m_class_get_name_space (cmethod->klass);
513 const char* cmethod_klass_name = m_class_get_name (cmethod->klass);
514 MonoImage *cmethod_klass_image = m_class_get_image (cmethod->klass);
515 gboolean in_corlib = cmethod_klass_image == mono_defaults.corlib;
517 /* Required intrinsics are always used even with -O=-intrins */
518 if (in_corlib &&
519 !strcmp (cmethod_klass_name_space, "System") &&
520 !strcmp (cmethod_klass_name, "ByReference`1") &&
521 !strcmp (cmethod->name, "get_Value")) {
522 g_assert (fsig->hasthis && fsig->param_count == 0);
523 int dreg = alloc_preg (cfg);
524 EMIT_NEW_LOAD_MEMBASE (cfg, ins, OP_LOAD_MEMBASE, dreg, args [0]->dreg, 0);
525 return ins;
526 } else if (in_corlib && cmethod->klass == mono_defaults.object_class) {
527 if (!strcmp (cmethod->name, "GetRawData")) {
528 int dreg = alloc_preg (cfg);
529 EMIT_NEW_BIALU_IMM (cfg, ins, OP_PADD_IMM, dreg, args [0]->dreg, MONO_ABI_SIZEOF (MonoObject));
530 return ins;
534 if (!(cfg->opt & MONO_OPT_INTRINS))
535 return NULL;
537 if (cmethod->klass == mono_defaults.string_class) {
538 if (strcmp (cmethod->name, "get_Chars") == 0 && fsig->param_count + fsig->hasthis == 2) {
539 int dreg = alloc_ireg (cfg);
540 int index_reg = alloc_preg (cfg);
541 int add_reg = alloc_preg (cfg);
543 #if SIZEOF_REGISTER == 8
544 if (COMPILE_LLVM (cfg)) {
545 MONO_EMIT_NEW_UNALU (cfg, OP_ZEXT_I4, index_reg, args [1]->dreg);
546 } else {
547 /* The array reg is 64 bits but the index reg is only 32 */
548 MONO_EMIT_NEW_UNALU (cfg, OP_SEXT_I4, index_reg, args [1]->dreg);
550 #else
551 index_reg = args [1]->dreg;
552 #endif
553 MONO_EMIT_BOUNDS_CHECK (cfg, args [0]->dreg, MonoString, length, index_reg);
555 #if defined(TARGET_X86) || defined(TARGET_AMD64)
556 EMIT_NEW_X86_LEA (cfg, ins, args [0]->dreg, index_reg, 1, MONO_STRUCT_OFFSET (MonoString, chars));
557 add_reg = ins->dreg;
558 EMIT_NEW_LOAD_MEMBASE (cfg, ins, OP_LOADU2_MEMBASE, dreg,
559 add_reg, 0);
560 #else
561 int mult_reg = alloc_preg (cfg);
562 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHL_IMM, mult_reg, index_reg, 1);
563 MONO_EMIT_NEW_BIALU (cfg, OP_PADD, add_reg, mult_reg, args [0]->dreg);
564 EMIT_NEW_LOAD_MEMBASE (cfg, ins, OP_LOADU2_MEMBASE, dreg,
565 add_reg, MONO_STRUCT_OFFSET (MonoString, chars));
566 #endif
567 mini_type_from_op (cfg, ins, NULL, NULL);
568 return ins;
569 } else if (strcmp (cmethod->name, "get_Length") == 0 && fsig->param_count + fsig->hasthis == 1) {
570 int dreg = alloc_ireg (cfg);
571 /* Decompose later to allow more optimizations */
572 EMIT_NEW_UNALU (cfg, ins, OP_STRLEN, dreg, args [0]->dreg);
573 ins->type = STACK_I4;
574 ins->flags |= MONO_INST_FAULT;
575 cfg->cbb->needs_decompose = TRUE;
576 cfg->flags |= MONO_CFG_NEEDS_DECOMPOSE;
578 return ins;
579 } else
580 return NULL;
581 } else if (cmethod->klass == mono_defaults.object_class) {
582 if (strcmp (cmethod->name, "GetType") == 0 && fsig->param_count + fsig->hasthis == 1) {
583 int dreg = alloc_ireg_ref (cfg);
584 int vt_reg = alloc_preg (cfg);
585 MONO_EMIT_NEW_LOAD_MEMBASE_FAULT (cfg, vt_reg, args [0]->dreg, MONO_STRUCT_OFFSET (MonoObject, vtable));
586 EMIT_NEW_LOAD_MEMBASE (cfg, ins, OP_LOAD_MEMBASE, dreg, vt_reg, MONO_STRUCT_OFFSET (MonoVTable, type));
587 mini_type_from_op (cfg, ins, NULL, NULL);
589 return ins;
590 } else if (!cfg->backend->emulate_mul_div && strcmp (cmethod->name, "InternalGetHashCode") == 0 && fsig->param_count == 1 && !mono_gc_is_moving ()) {
591 int dreg = alloc_ireg (cfg);
592 int t1 = alloc_ireg (cfg);
594 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHL_IMM, t1, args [0]->dreg, 3);
595 EMIT_NEW_BIALU_IMM (cfg, ins, OP_MUL_IMM, dreg, t1, 2654435761u);
596 ins->type = STACK_I4;
598 return ins;
599 } else if (strcmp (cmethod->name, ".ctor") == 0 && fsig->param_count == 0) {
600 MONO_INST_NEW (cfg, ins, OP_NOP);
601 MONO_ADD_INS (cfg->cbb, ins);
602 return ins;
603 } else
604 return NULL;
605 } else if (cmethod->klass == mono_defaults.array_class) {
606 if (strcmp (cmethod->name, "GetGenericValueImpl") == 0 && fsig->param_count + fsig->hasthis == 3 && !cfg->gsharedvt)
607 return emit_array_generic_access (cfg, fsig, args, FALSE);
608 else if (strcmp (cmethod->name, "SetGenericValueImpl") == 0 && fsig->param_count + fsig->hasthis == 3 && !cfg->gsharedvt)
609 return emit_array_generic_access (cfg, fsig, args, TRUE);
610 else if (!strcmp (cmethod->name, "GetRawSzArrayData")) {
611 int dreg = alloc_preg (cfg);
612 EMIT_NEW_BIALU_IMM (cfg, ins, OP_PADD_IMM, dreg, args [0]->dreg, MONO_STRUCT_OFFSET (MonoArray, vector));
613 return ins;
616 #ifndef MONO_BIG_ARRAYS
618 * This is an inline version of GetLength/GetLowerBound(0) used frequently in
619 * Array methods.
621 else if (((strcmp (cmethod->name, "GetLength") == 0 && fsig->param_count + fsig->hasthis == 2) ||
622 (strcmp (cmethod->name, "GetLowerBound") == 0 && fsig->param_count + fsig->hasthis == 2)) &&
623 args [1]->opcode == OP_ICONST && args [1]->inst_c0 == 0) {
624 int dreg = alloc_ireg (cfg);
625 int bounds_reg = alloc_ireg_mp (cfg);
626 MonoBasicBlock *end_bb, *szarray_bb;
627 gboolean get_length = strcmp (cmethod->name, "GetLength") == 0;
629 NEW_BBLOCK (cfg, end_bb);
630 NEW_BBLOCK (cfg, szarray_bb);
632 EMIT_NEW_LOAD_MEMBASE_FAULT (cfg, ins, OP_LOAD_MEMBASE, bounds_reg,
633 args [0]->dreg, MONO_STRUCT_OFFSET (MonoArray, bounds));
634 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_COMPARE_IMM, -1, bounds_reg, 0);
635 MONO_EMIT_NEW_BRANCH_BLOCK (cfg, OP_IBEQ, szarray_bb);
636 /* Non-szarray case */
637 if (get_length)
638 EMIT_NEW_LOAD_MEMBASE (cfg, ins, OP_LOADI4_MEMBASE, dreg,
639 bounds_reg, MONO_STRUCT_OFFSET (MonoArrayBounds, length));
640 else
641 EMIT_NEW_LOAD_MEMBASE (cfg, ins, OP_LOADI4_MEMBASE, dreg,
642 bounds_reg, MONO_STRUCT_OFFSET (MonoArrayBounds, lower_bound));
643 MONO_EMIT_NEW_BRANCH_BLOCK (cfg, OP_BR, end_bb);
644 MONO_START_BB (cfg, szarray_bb);
645 /* Szarray case */
646 if (get_length)
647 EMIT_NEW_LOAD_MEMBASE (cfg, ins, OP_LOADI4_MEMBASE, dreg,
648 args [0]->dreg, MONO_STRUCT_OFFSET (MonoArray, max_length));
649 else
650 MONO_EMIT_NEW_ICONST (cfg, dreg, 0);
651 MONO_START_BB (cfg, end_bb);
653 EMIT_NEW_UNALU (cfg, ins, OP_MOVE, dreg, dreg);
654 ins->type = STACK_I4;
656 return ins;
658 #endif
660 if (cmethod->name [0] != 'g')
661 return NULL;
663 if (strcmp (cmethod->name, "get_Rank") == 0 && fsig->param_count + fsig->hasthis == 1) {
664 int dreg = alloc_ireg (cfg);
665 int vtable_reg = alloc_preg (cfg);
666 MONO_EMIT_NEW_LOAD_MEMBASE_OP_FAULT (cfg, OP_LOAD_MEMBASE, vtable_reg,
667 args [0]->dreg, MONO_STRUCT_OFFSET (MonoObject, vtable));
668 EMIT_NEW_LOAD_MEMBASE (cfg, ins, OP_LOADU1_MEMBASE, dreg,
669 vtable_reg, MONO_STRUCT_OFFSET (MonoVTable, rank));
670 mini_type_from_op (cfg, ins, NULL, NULL);
672 return ins;
673 } else if (strcmp (cmethod->name, "get_Length") == 0 && fsig->param_count + fsig->hasthis == 1) {
674 int dreg = alloc_ireg (cfg);
676 EMIT_NEW_LOAD_MEMBASE_FAULT (cfg, ins, OP_LOADI4_MEMBASE, dreg,
677 args [0]->dreg, MONO_STRUCT_OFFSET (MonoArray, max_length));
678 mini_type_from_op (cfg, ins, NULL, NULL);
680 return ins;
681 } else
682 return NULL;
683 } else if (cmethod->klass == runtime_helpers_class) {
684 if (strcmp (cmethod->name, "get_OffsetToStringData") == 0 && fsig->param_count == 0) {
685 EMIT_NEW_ICONST (cfg, ins, MONO_STRUCT_OFFSET (MonoString, chars));
686 return ins;
687 } else if (strcmp (cmethod->name, "IsReferenceOrContainsReferences") == 0 && fsig->param_count == 0) {
688 MonoGenericContext *ctx = mono_method_get_context (cmethod);
689 g_assert (ctx);
690 g_assert (ctx->method_inst);
691 g_assert (ctx->method_inst->type_argc == 1);
692 MonoType *arg_type = ctx->method_inst->type_argv [0];
693 MonoType *t;
694 MonoClass *klass;
696 ins = NULL;
698 /* Resolve the argument class as possible so we can handle common cases fast */
699 t = mini_get_underlying_type (arg_type);
700 klass = mono_class_from_mono_type_internal (t);
701 mono_class_init_internal (klass);
702 if (MONO_TYPE_IS_REFERENCE (t))
703 EMIT_NEW_ICONST (cfg, ins, 1);
704 else if (MONO_TYPE_IS_PRIMITIVE (t))
705 EMIT_NEW_ICONST (cfg, ins, 0);
706 else if (cfg->gshared && (t->type == MONO_TYPE_VAR || t->type == MONO_TYPE_MVAR) && !mini_type_var_is_vt (t))
707 EMIT_NEW_ICONST (cfg, ins, 1);
708 else if (!cfg->gshared || !mini_class_check_context_used (cfg, klass))
709 EMIT_NEW_ICONST (cfg, ins, m_class_has_references (klass) ? 1 : 0);
710 else {
711 g_assert (cfg->gshared);
713 /* Have to use the original argument class here */
714 MonoClass *arg_class = mono_class_from_mono_type_internal (arg_type);
715 int context_used = mini_class_check_context_used (cfg, arg_class);
717 /* This returns 1 or 2 */
718 MonoInst *info = mini_emit_get_rgctx_klass (cfg, context_used, arg_class, MONO_RGCTX_INFO_CLASS_IS_REF_OR_CONTAINS_REFS);
719 int dreg = alloc_ireg (cfg);
720 EMIT_NEW_BIALU_IMM (cfg, ins, OP_ISUB_IMM, dreg, info->dreg, 1);
723 return ins;
724 } else if (strcmp (cmethod->name, "IsBitwiseEquatable") == 0 && fsig->param_count == 0) {
725 MonoGenericContext *ctx = mono_method_get_context (cmethod);
726 g_assert (ctx);
727 g_assert (ctx->method_inst);
728 g_assert (ctx->method_inst->type_argc == 1);
729 MonoType *arg_type = ctx->method_inst->type_argv [0];
730 MonoType *t;
731 ins = NULL;
733 /* Resolve the argument class as possible so we can handle common cases fast */
734 t = mini_get_underlying_type (arg_type);
736 if (MONO_TYPE_IS_PRIMITIVE (t) && t->type != MONO_TYPE_R4 && t->type != MONO_TYPE_R8)
737 EMIT_NEW_ICONST (cfg, ins, 1);
738 else
739 EMIT_NEW_ICONST (cfg, ins, 0);
740 return ins;
741 } else if (!strcmp (cmethod->name, "ObjectHasComponentSize")) {
742 g_assert (fsig->param_count == 1);
743 g_assert (fsig->params [0]->type == MONO_TYPE_OBJECT);
744 // Return true for arrays and string
745 int dreg;
747 dreg = alloc_ireg (cfg);
749 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOAD_MEMBASE, dreg, args [0]->dreg, MONO_STRUCT_OFFSET (MonoObject, vtable));
750 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADU1_MEMBASE, dreg, dreg, MONO_STRUCT_OFFSET (MonoVTable, flags));
751 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_IAND_IMM, dreg, dreg, MONO_VT_FLAG_ARRAY_OR_STRING);
752 EMIT_NEW_BIALU_IMM (cfg, ins, OP_COMPARE_IMM, -1, dreg, 0);
753 EMIT_NEW_UNALU (cfg, ins, OP_ICGT, dreg, -1);
754 ins->type = STACK_I4;
755 return ins;
756 } else
757 return NULL;
758 } else if (cmethod->klass == mono_defaults.monitor_class) {
759 gboolean is_enter = FALSE;
760 gboolean is_v4 = FALSE;
762 if (!strcmp (cmethod->name, "Enter") && fsig->param_count == 2 && fsig->params [1]->byref) {
763 is_enter = TRUE;
764 is_v4 = TRUE;
766 if (!strcmp (cmethod->name, "Enter") && fsig->param_count == 1)
767 is_enter = TRUE;
769 if (is_enter) {
771 * To make async stack traces work, icalls which can block should have a wrapper.
772 * For Monitor.Enter, emit two calls: a fastpath which doesn't have a wrapper, and a slowpath, which does.
774 MonoBasicBlock *end_bb;
776 NEW_BBLOCK (cfg, end_bb);
778 if (is_v4)
779 ins = mono_emit_jit_icall (cfg, mono_monitor_enter_v4_fast, args);
780 else
781 ins = mono_emit_jit_icall (cfg, mono_monitor_enter_fast, args);
783 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_ICOMPARE_IMM, -1, ins->dreg, 0);
784 MONO_EMIT_NEW_BRANCH_BLOCK (cfg, OP_IBNE_UN, end_bb);
786 if (is_v4)
787 ins = mono_emit_jit_icall (cfg, mono_monitor_enter_v4_internal, args);
788 else
789 ins = mono_emit_jit_icall (cfg, mono_monitor_enter_internal, args);
791 MONO_START_BB (cfg, end_bb);
792 return ins;
794 } else if (cmethod->klass == mono_defaults.thread_class) {
795 if (strcmp (cmethod->name, "SpinWait_nop") == 0 && fsig->param_count == 0) {
796 MONO_INST_NEW (cfg, ins, OP_RELAXED_NOP);
797 MONO_ADD_INS (cfg->cbb, ins);
798 return ins;
799 } else if (strcmp (cmethod->name, "MemoryBarrier") == 0 && fsig->param_count == 0) {
800 return mini_emit_memory_barrier (cfg, MONO_MEMORY_BARRIER_SEQ);
801 } else if (!strcmp (cmethod->name, "VolatileRead") && fsig->param_count == 1) {
802 guint32 opcode = 0;
803 gboolean is_ref = mini_type_is_reference (fsig->params [0]);
805 if (fsig->params [0]->type == MONO_TYPE_I1)
806 opcode = OP_LOADI1_MEMBASE;
807 else if (fsig->params [0]->type == MONO_TYPE_U1)
808 opcode = OP_LOADU1_MEMBASE;
809 else if (fsig->params [0]->type == MONO_TYPE_I2)
810 opcode = OP_LOADI2_MEMBASE;
811 else if (fsig->params [0]->type == MONO_TYPE_U2)
812 opcode = OP_LOADU2_MEMBASE;
813 else if (fsig->params [0]->type == MONO_TYPE_I4)
814 opcode = OP_LOADI4_MEMBASE;
815 else if (fsig->params [0]->type == MONO_TYPE_U4)
816 opcode = OP_LOADU4_MEMBASE;
817 else if (fsig->params [0]->type == MONO_TYPE_I8 || fsig->params [0]->type == MONO_TYPE_U8)
818 opcode = OP_LOADI8_MEMBASE;
819 else if (fsig->params [0]->type == MONO_TYPE_R4)
820 opcode = OP_LOADR4_MEMBASE;
821 else if (fsig->params [0]->type == MONO_TYPE_R8)
822 opcode = OP_LOADR8_MEMBASE;
823 else if (is_ref || fsig->params [0]->type == MONO_TYPE_I || fsig->params [0]->type == MONO_TYPE_U)
824 opcode = OP_LOAD_MEMBASE;
826 if (opcode) {
827 MONO_INST_NEW (cfg, ins, opcode);
828 ins->inst_basereg = args [0]->dreg;
829 ins->inst_offset = 0;
830 MONO_ADD_INS (cfg->cbb, ins);
832 switch (fsig->params [0]->type) {
833 case MONO_TYPE_I1:
834 case MONO_TYPE_U1:
835 case MONO_TYPE_I2:
836 case MONO_TYPE_U2:
837 case MONO_TYPE_I4:
838 case MONO_TYPE_U4:
839 ins->dreg = mono_alloc_ireg (cfg);
840 ins->type = STACK_I4;
841 break;
842 case MONO_TYPE_I8:
843 case MONO_TYPE_U8:
844 ins->dreg = mono_alloc_lreg (cfg);
845 ins->type = STACK_I8;
846 break;
847 case MONO_TYPE_I:
848 case MONO_TYPE_U:
849 ins->dreg = mono_alloc_ireg (cfg);
850 #if SIZEOF_REGISTER == 8
851 ins->type = STACK_I8;
852 #else
853 ins->type = STACK_I4;
854 #endif
855 break;
856 case MONO_TYPE_R4:
857 case MONO_TYPE_R8:
858 ins->dreg = mono_alloc_freg (cfg);
859 ins->type = STACK_R8;
860 break;
861 default:
862 g_assert (mini_type_is_reference (fsig->params [0]));
863 ins->dreg = mono_alloc_ireg_ref (cfg);
864 ins->type = STACK_OBJ;
865 break;
868 if (opcode == OP_LOADI8_MEMBASE)
869 ins = mono_decompose_opcode (cfg, ins);
871 mini_emit_memory_barrier (cfg, MONO_MEMORY_BARRIER_SEQ);
873 return ins;
875 } else if (!strcmp (cmethod->name, "VolatileWrite") && fsig->param_count == 2) {
876 guint32 opcode = 0;
877 gboolean is_ref = mini_type_is_reference (fsig->params [0]);
879 if (fsig->params [0]->type == MONO_TYPE_I1 || fsig->params [0]->type == MONO_TYPE_U1)
880 opcode = OP_STOREI1_MEMBASE_REG;
881 else if (fsig->params [0]->type == MONO_TYPE_I2 || fsig->params [0]->type == MONO_TYPE_U2)
882 opcode = OP_STOREI2_MEMBASE_REG;
883 else if (fsig->params [0]->type == MONO_TYPE_I4 || fsig->params [0]->type == MONO_TYPE_U4)
884 opcode = OP_STOREI4_MEMBASE_REG;
885 else if (fsig->params [0]->type == MONO_TYPE_I8 || fsig->params [0]->type == MONO_TYPE_U8)
886 opcode = OP_STOREI8_MEMBASE_REG;
887 else if (fsig->params [0]->type == MONO_TYPE_R4)
888 opcode = OP_STORER4_MEMBASE_REG;
889 else if (fsig->params [0]->type == MONO_TYPE_R8)
890 opcode = OP_STORER8_MEMBASE_REG;
891 else if (is_ref || fsig->params [0]->type == MONO_TYPE_I || fsig->params [0]->type == MONO_TYPE_U)
892 opcode = OP_STORE_MEMBASE_REG;
894 if (opcode) {
895 mini_emit_memory_barrier (cfg, MONO_MEMORY_BARRIER_SEQ);
897 MONO_INST_NEW (cfg, ins, opcode);
898 ins->sreg1 = args [1]->dreg;
899 ins->inst_destbasereg = args [0]->dreg;
900 ins->inst_offset = 0;
901 MONO_ADD_INS (cfg->cbb, ins);
903 if (opcode == OP_STOREI8_MEMBASE_REG)
904 ins = mono_decompose_opcode (cfg, ins);
906 return ins;
909 } else if (in_corlib &&
910 (strcmp (cmethod_klass_name_space, "System.Threading") == 0) &&
911 (strcmp (cmethod_klass_name, "Interlocked") == 0)) {
912 ins = NULL;
914 #if SIZEOF_REGISTER == 8
915 if (!cfg->llvm_only && strcmp (cmethod->name, "Read") == 0 && fsig->param_count == 1 && (fsig->params [0]->type == MONO_TYPE_I8)) {
916 if (!cfg->llvm_only && mono_arch_opcode_supported (OP_ATOMIC_LOAD_I8)) {
917 MONO_INST_NEW (cfg, ins, OP_ATOMIC_LOAD_I8);
918 ins->dreg = mono_alloc_preg (cfg);
919 ins->sreg1 = args [0]->dreg;
920 ins->type = STACK_I8;
921 ins->backend.memory_barrier_kind = MONO_MEMORY_BARRIER_SEQ;
922 MONO_ADD_INS (cfg->cbb, ins);
923 } else {
924 MonoInst *load_ins;
926 mini_emit_memory_barrier (cfg, MONO_MEMORY_BARRIER_SEQ);
928 /* 64 bit reads are already atomic */
929 MONO_INST_NEW (cfg, load_ins, OP_LOADI8_MEMBASE);
930 load_ins->dreg = mono_alloc_preg (cfg);
931 load_ins->inst_basereg = args [0]->dreg;
932 load_ins->inst_offset = 0;
933 load_ins->type = STACK_I8;
934 MONO_ADD_INS (cfg->cbb, load_ins);
936 mini_emit_memory_barrier (cfg, MONO_MEMORY_BARRIER_SEQ);
938 ins = load_ins;
941 #endif
943 if (strcmp (cmethod->name, "Increment") == 0 && fsig->param_count == 1) {
944 MonoInst *ins_iconst;
945 guint32 opcode = 0;
947 if (fsig->params [0]->type == MONO_TYPE_I4) {
948 opcode = OP_ATOMIC_ADD_I4;
949 cfg->has_atomic_add_i4 = TRUE;
951 #if SIZEOF_REGISTER == 8
952 else if (fsig->params [0]->type == MONO_TYPE_I8)
953 opcode = OP_ATOMIC_ADD_I8;
954 #endif
955 if (opcode) {
956 if (!mono_arch_opcode_supported (opcode))
957 return NULL;
958 MONO_INST_NEW (cfg, ins_iconst, OP_ICONST);
959 ins_iconst->inst_c0 = 1;
960 ins_iconst->dreg = mono_alloc_ireg (cfg);
961 MONO_ADD_INS (cfg->cbb, ins_iconst);
963 MONO_INST_NEW (cfg, ins, opcode);
964 ins->dreg = mono_alloc_ireg (cfg);
965 ins->inst_basereg = args [0]->dreg;
966 ins->inst_offset = 0;
967 ins->sreg2 = ins_iconst->dreg;
968 ins->type = (opcode == OP_ATOMIC_ADD_I4) ? STACK_I4 : STACK_I8;
969 MONO_ADD_INS (cfg->cbb, ins);
971 } else if (strcmp (cmethod->name, "Decrement") == 0 && fsig->param_count == 1) {
972 MonoInst *ins_iconst;
973 guint32 opcode = 0;
975 if (fsig->params [0]->type == MONO_TYPE_I4) {
976 opcode = OP_ATOMIC_ADD_I4;
977 cfg->has_atomic_add_i4 = TRUE;
979 #if SIZEOF_REGISTER == 8
980 else if (fsig->params [0]->type == MONO_TYPE_I8)
981 opcode = OP_ATOMIC_ADD_I8;
982 #endif
983 if (opcode) {
984 if (!mono_arch_opcode_supported (opcode))
985 return NULL;
986 MONO_INST_NEW (cfg, ins_iconst, OP_ICONST);
987 ins_iconst->inst_c0 = -1;
988 ins_iconst->dreg = mono_alloc_ireg (cfg);
989 MONO_ADD_INS (cfg->cbb, ins_iconst);
991 MONO_INST_NEW (cfg, ins, opcode);
992 ins->dreg = mono_alloc_ireg (cfg);
993 ins->inst_basereg = args [0]->dreg;
994 ins->inst_offset = 0;
995 ins->sreg2 = ins_iconst->dreg;
996 ins->type = (opcode == OP_ATOMIC_ADD_I4) ? STACK_I4 : STACK_I8;
997 MONO_ADD_INS (cfg->cbb, ins);
999 } else if (strcmp (cmethod->name, "Add") == 0 && fsig->param_count == 2) {
1000 guint32 opcode = 0;
1002 if (fsig->params [0]->type == MONO_TYPE_I4) {
1003 opcode = OP_ATOMIC_ADD_I4;
1004 cfg->has_atomic_add_i4 = TRUE;
1006 #if SIZEOF_REGISTER == 8
1007 else if (fsig->params [0]->type == MONO_TYPE_I8)
1008 opcode = OP_ATOMIC_ADD_I8;
1009 #endif
1010 if (opcode) {
1011 if (!mono_arch_opcode_supported (opcode))
1012 return NULL;
1013 MONO_INST_NEW (cfg, ins, opcode);
1014 ins->dreg = mono_alloc_ireg (cfg);
1015 ins->inst_basereg = args [0]->dreg;
1016 ins->inst_offset = 0;
1017 ins->sreg2 = args [1]->dreg;
1018 ins->type = (opcode == OP_ATOMIC_ADD_I4) ? STACK_I4 : STACK_I8;
1019 MONO_ADD_INS (cfg->cbb, ins);
1022 else if (strcmp (cmethod->name, "Exchange") == 0 && fsig->param_count == 2) {
1023 MonoInst *f2i = NULL, *i2f;
1024 guint32 opcode, f2i_opcode, i2f_opcode;
1025 gboolean is_ref = mini_type_is_reference (fsig->params [0]);
1026 gboolean is_float = fsig->params [0]->type == MONO_TYPE_R4 || fsig->params [0]->type == MONO_TYPE_R8;
1028 if (fsig->params [0]->type == MONO_TYPE_I4 ||
1029 fsig->params [0]->type == MONO_TYPE_R4) {
1030 opcode = OP_ATOMIC_EXCHANGE_I4;
1031 f2i_opcode = OP_MOVE_F_TO_I4;
1032 i2f_opcode = OP_MOVE_I4_TO_F;
1033 cfg->has_atomic_exchange_i4 = TRUE;
1035 #if SIZEOF_REGISTER == 8
1036 else if (is_ref ||
1037 fsig->params [0]->type == MONO_TYPE_I8 ||
1038 fsig->params [0]->type == MONO_TYPE_R8 ||
1039 fsig->params [0]->type == MONO_TYPE_I) {
1040 opcode = OP_ATOMIC_EXCHANGE_I8;
1041 f2i_opcode = OP_MOVE_F_TO_I8;
1042 i2f_opcode = OP_MOVE_I8_TO_F;
1044 #else
1045 else if (is_ref || fsig->params [0]->type == MONO_TYPE_I) {
1046 opcode = OP_ATOMIC_EXCHANGE_I4;
1047 cfg->has_atomic_exchange_i4 = TRUE;
1049 #endif
1050 else
1051 return NULL;
1053 if (!mono_arch_opcode_supported (opcode))
1054 return NULL;
1056 if (is_float) {
1057 /* TODO: Decompose these opcodes instead of bailing here. */
1058 if (COMPILE_SOFT_FLOAT (cfg))
1059 return NULL;
1061 MONO_INST_NEW (cfg, f2i, f2i_opcode);
1062 f2i->dreg = mono_alloc_ireg (cfg);
1063 f2i->sreg1 = args [1]->dreg;
1064 if (f2i_opcode == OP_MOVE_F_TO_I4)
1065 f2i->backend.spill_var = mini_get_int_to_float_spill_area (cfg);
1066 MONO_ADD_INS (cfg->cbb, f2i);
1069 MONO_INST_NEW (cfg, ins, opcode);
1070 ins->dreg = is_ref ? mono_alloc_ireg_ref (cfg) : mono_alloc_ireg (cfg);
1071 ins->inst_basereg = args [0]->dreg;
1072 ins->inst_offset = 0;
1073 ins->sreg2 = is_float ? f2i->dreg : args [1]->dreg;
1074 MONO_ADD_INS (cfg->cbb, ins);
1076 switch (fsig->params [0]->type) {
1077 case MONO_TYPE_I4:
1078 ins->type = STACK_I4;
1079 break;
1080 case MONO_TYPE_I8:
1081 ins->type = STACK_I8;
1082 break;
1083 case MONO_TYPE_I:
1084 #if SIZEOF_REGISTER == 8
1085 ins->type = STACK_I8;
1086 #else
1087 ins->type = STACK_I4;
1088 #endif
1089 break;
1090 case MONO_TYPE_R4:
1091 case MONO_TYPE_R8:
1092 ins->type = STACK_R8;
1093 break;
1094 default:
1095 g_assert (mini_type_is_reference (fsig->params [0]));
1096 ins->type = STACK_OBJ;
1097 break;
1100 if (is_float) {
1101 MONO_INST_NEW (cfg, i2f, i2f_opcode);
1102 i2f->dreg = mono_alloc_freg (cfg);
1103 i2f->sreg1 = ins->dreg;
1104 i2f->type = STACK_R8;
1105 if (i2f_opcode == OP_MOVE_I4_TO_F)
1106 i2f->backend.spill_var = mini_get_int_to_float_spill_area (cfg);
1107 MONO_ADD_INS (cfg->cbb, i2f);
1109 ins = i2f;
1112 if (cfg->gen_write_barriers && is_ref)
1113 mini_emit_write_barrier (cfg, args [0], args [1]);
1115 else if ((strcmp (cmethod->name, "CompareExchange") == 0) && fsig->param_count == 3) {
1116 MonoInst *f2i_new = NULL, *f2i_cmp = NULL, *i2f;
1117 guint32 opcode, f2i_opcode, i2f_opcode;
1118 gboolean is_ref = mini_type_is_reference (fsig->params [1]);
1119 gboolean is_float = fsig->params [1]->type == MONO_TYPE_R4 || fsig->params [1]->type == MONO_TYPE_R8;
1121 if (fsig->params [1]->type == MONO_TYPE_I4 ||
1122 fsig->params [1]->type == MONO_TYPE_R4) {
1123 opcode = OP_ATOMIC_CAS_I4;
1124 f2i_opcode = OP_MOVE_F_TO_I4;
1125 i2f_opcode = OP_MOVE_I4_TO_F;
1126 cfg->has_atomic_cas_i4 = TRUE;
1128 #if SIZEOF_REGISTER == 8
1129 else if (is_ref ||
1130 fsig->params [1]->type == MONO_TYPE_I8 ||
1131 fsig->params [1]->type == MONO_TYPE_R8 ||
1132 fsig->params [1]->type == MONO_TYPE_I) {
1133 opcode = OP_ATOMIC_CAS_I8;
1134 f2i_opcode = OP_MOVE_F_TO_I8;
1135 i2f_opcode = OP_MOVE_I8_TO_F;
1137 #else
1138 else if (is_ref || fsig->params [1]->type == MONO_TYPE_I) {
1139 opcode = OP_ATOMIC_CAS_I4;
1140 cfg->has_atomic_cas_i4 = TRUE;
1142 #endif
1143 else
1144 return NULL;
1146 if (!mono_arch_opcode_supported (opcode))
1147 return NULL;
1149 if (is_float) {
1150 /* TODO: Decompose these opcodes instead of bailing here. */
1151 if (COMPILE_SOFT_FLOAT (cfg))
1152 return NULL;
1154 MONO_INST_NEW (cfg, f2i_new, f2i_opcode);
1155 f2i_new->dreg = mono_alloc_ireg (cfg);
1156 f2i_new->sreg1 = args [1]->dreg;
1157 if (f2i_opcode == OP_MOVE_F_TO_I4)
1158 f2i_new->backend.spill_var = mini_get_int_to_float_spill_area (cfg);
1159 MONO_ADD_INS (cfg->cbb, f2i_new);
1161 MONO_INST_NEW (cfg, f2i_cmp, f2i_opcode);
1162 f2i_cmp->dreg = mono_alloc_ireg (cfg);
1163 f2i_cmp->sreg1 = args [2]->dreg;
1164 if (f2i_opcode == OP_MOVE_F_TO_I4)
1165 f2i_cmp->backend.spill_var = mini_get_int_to_float_spill_area (cfg);
1166 MONO_ADD_INS (cfg->cbb, f2i_cmp);
1169 MONO_INST_NEW (cfg, ins, opcode);
1170 ins->dreg = is_ref ? alloc_ireg_ref (cfg) : alloc_ireg (cfg);
1171 ins->sreg1 = args [0]->dreg;
1172 ins->sreg2 = is_float ? f2i_new->dreg : args [1]->dreg;
1173 ins->sreg3 = is_float ? f2i_cmp->dreg : args [2]->dreg;
1174 MONO_ADD_INS (cfg->cbb, ins);
1176 switch (fsig->params [1]->type) {
1177 case MONO_TYPE_I4:
1178 ins->type = STACK_I4;
1179 break;
1180 case MONO_TYPE_I8:
1181 ins->type = STACK_I8;
1182 break;
1183 case MONO_TYPE_I:
1184 #if SIZEOF_REGISTER == 8
1185 ins->type = STACK_I8;
1186 #else
1187 ins->type = STACK_I4;
1188 #endif
1189 break;
1190 case MONO_TYPE_R4:
1191 ins->type = cfg->r4_stack_type;
1192 break;
1193 case MONO_TYPE_R8:
1194 ins->type = STACK_R8;
1195 break;
1196 default:
1197 g_assert (mini_type_is_reference (fsig->params [1]));
1198 ins->type = STACK_OBJ;
1199 break;
1202 if (is_float) {
1203 MONO_INST_NEW (cfg, i2f, i2f_opcode);
1204 i2f->dreg = mono_alloc_freg (cfg);
1205 i2f->sreg1 = ins->dreg;
1206 i2f->type = STACK_R8;
1207 if (i2f_opcode == OP_MOVE_I4_TO_F)
1208 i2f->backend.spill_var = mini_get_int_to_float_spill_area (cfg);
1209 MONO_ADD_INS (cfg->cbb, i2f);
1211 ins = i2f;
1214 if (cfg->gen_write_barriers && is_ref)
1215 mini_emit_write_barrier (cfg, args [0], args [1]);
1217 else if ((strcmp (cmethod->name, "CompareExchange") == 0) && fsig->param_count == 4 &&
1218 fsig->params [1]->type == MONO_TYPE_I4) {
1219 MonoInst *cmp, *ceq;
1221 if (!mono_arch_opcode_supported (OP_ATOMIC_CAS_I4))
1222 return NULL;
1224 /* int32 r = CAS (location, value, comparand); */
1225 MONO_INST_NEW (cfg, ins, OP_ATOMIC_CAS_I4);
1226 ins->dreg = alloc_ireg (cfg);
1227 ins->sreg1 = args [0]->dreg;
1228 ins->sreg2 = args [1]->dreg;
1229 ins->sreg3 = args [2]->dreg;
1230 ins->type = STACK_I4;
1231 MONO_ADD_INS (cfg->cbb, ins);
1233 /* bool result = r == comparand; */
1234 MONO_INST_NEW (cfg, cmp, OP_ICOMPARE);
1235 cmp->sreg1 = ins->dreg;
1236 cmp->sreg2 = args [2]->dreg;
1237 cmp->type = STACK_I4;
1238 MONO_ADD_INS (cfg->cbb, cmp);
1240 MONO_INST_NEW (cfg, ceq, OP_ICEQ);
1241 ceq->dreg = alloc_ireg (cfg);
1242 ceq->type = STACK_I4;
1243 MONO_ADD_INS (cfg->cbb, ceq);
1245 /* *success = result; */
1246 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI1_MEMBASE_REG, args [3]->dreg, 0, ceq->dreg);
1248 cfg->has_atomic_cas_i4 = TRUE;
1250 else if (strcmp (cmethod->name, "MemoryBarrier") == 0 && fsig->param_count == 0)
1251 ins = mini_emit_memory_barrier (cfg, MONO_MEMORY_BARRIER_SEQ);
1253 if (ins)
1254 return ins;
1255 } else if (in_corlib &&
1256 (strcmp (cmethod_klass_name_space, "System.Threading") == 0) &&
1257 (strcmp (cmethod_klass_name, "Volatile") == 0)) {
1258 ins = NULL;
1260 if (!cfg->llvm_only && !strcmp (cmethod->name, "Read") && fsig->param_count == 1) {
1261 guint32 opcode = 0;
1262 MonoType *t = fsig->params [0];
1263 gboolean is_ref;
1264 gboolean is_float = t->type == MONO_TYPE_R4 || t->type == MONO_TYPE_R8;
1266 g_assert (t->byref);
1267 /* t is a byref type, so the reference check is more complicated */
1268 is_ref = mini_type_is_reference (m_class_get_byval_arg (mono_class_from_mono_type_internal (t)));
1269 if (t->type == MONO_TYPE_I1)
1270 opcode = OP_ATOMIC_LOAD_I1;
1271 else if (t->type == MONO_TYPE_U1 || t->type == MONO_TYPE_BOOLEAN)
1272 opcode = OP_ATOMIC_LOAD_U1;
1273 else if (t->type == MONO_TYPE_I2)
1274 opcode = OP_ATOMIC_LOAD_I2;
1275 else if (t->type == MONO_TYPE_U2)
1276 opcode = OP_ATOMIC_LOAD_U2;
1277 else if (t->type == MONO_TYPE_I4)
1278 opcode = OP_ATOMIC_LOAD_I4;
1279 else if (t->type == MONO_TYPE_U4)
1280 opcode = OP_ATOMIC_LOAD_U4;
1281 else if (t->type == MONO_TYPE_R4)
1282 opcode = OP_ATOMIC_LOAD_R4;
1283 else if (t->type == MONO_TYPE_R8)
1284 opcode = OP_ATOMIC_LOAD_R8;
1285 #if SIZEOF_REGISTER == 8
1286 else if (t->type == MONO_TYPE_I8 || t->type == MONO_TYPE_I)
1287 opcode = OP_ATOMIC_LOAD_I8;
1288 else if (is_ref || t->type == MONO_TYPE_U8 || t->type == MONO_TYPE_U)
1289 opcode = OP_ATOMIC_LOAD_U8;
1290 #else
1291 else if (t->type == MONO_TYPE_I)
1292 opcode = OP_ATOMIC_LOAD_I4;
1293 else if (is_ref || t->type == MONO_TYPE_U)
1294 opcode = OP_ATOMIC_LOAD_U4;
1295 #endif
1297 if (opcode) {
1298 if (!mono_arch_opcode_supported (opcode))
1299 return NULL;
1301 MONO_INST_NEW (cfg, ins, opcode);
1302 ins->dreg = is_ref ? mono_alloc_ireg_ref (cfg) : (is_float ? mono_alloc_freg (cfg) : mono_alloc_ireg (cfg));
1303 ins->sreg1 = args [0]->dreg;
1304 ins->backend.memory_barrier_kind = MONO_MEMORY_BARRIER_ACQ;
1305 MONO_ADD_INS (cfg->cbb, ins);
1307 switch (t->type) {
1308 case MONO_TYPE_BOOLEAN:
1309 case MONO_TYPE_I1:
1310 case MONO_TYPE_U1:
1311 case MONO_TYPE_I2:
1312 case MONO_TYPE_U2:
1313 case MONO_TYPE_I4:
1314 case MONO_TYPE_U4:
1315 ins->type = STACK_I4;
1316 break;
1317 case MONO_TYPE_I8:
1318 case MONO_TYPE_U8:
1319 ins->type = STACK_I8;
1320 break;
1321 case MONO_TYPE_I:
1322 case MONO_TYPE_U:
1323 #if SIZEOF_REGISTER == 8
1324 ins->type = STACK_I8;
1325 #else
1326 ins->type = STACK_I4;
1327 #endif
1328 break;
1329 case MONO_TYPE_R4:
1330 ins->type = cfg->r4_stack_type;
1331 break;
1332 case MONO_TYPE_R8:
1333 ins->type = STACK_R8;
1334 break;
1335 default:
1336 g_assert (is_ref);
1337 ins->type = STACK_OBJ;
1338 break;
1343 if (!cfg->llvm_only && !strcmp (cmethod->name, "Write") && fsig->param_count == 2) {
1344 guint32 opcode = 0;
1345 MonoType *t = fsig->params [0];
1346 gboolean is_ref;
1348 g_assert (t->byref);
1349 is_ref = mini_type_is_reference (m_class_get_byval_arg (mono_class_from_mono_type_internal (t)));
1350 if (t->type == MONO_TYPE_I1)
1351 opcode = OP_ATOMIC_STORE_I1;
1352 else if (t->type == MONO_TYPE_U1 || t->type == MONO_TYPE_BOOLEAN)
1353 opcode = OP_ATOMIC_STORE_U1;
1354 else if (t->type == MONO_TYPE_I2)
1355 opcode = OP_ATOMIC_STORE_I2;
1356 else if (t->type == MONO_TYPE_U2)
1357 opcode = OP_ATOMIC_STORE_U2;
1358 else if (t->type == MONO_TYPE_I4)
1359 opcode = OP_ATOMIC_STORE_I4;
1360 else if (t->type == MONO_TYPE_U4)
1361 opcode = OP_ATOMIC_STORE_U4;
1362 else if (t->type == MONO_TYPE_R4)
1363 opcode = OP_ATOMIC_STORE_R4;
1364 else if (t->type == MONO_TYPE_R8)
1365 opcode = OP_ATOMIC_STORE_R8;
1366 #if SIZEOF_REGISTER == 8
1367 else if (t->type == MONO_TYPE_I8 || t->type == MONO_TYPE_I)
1368 opcode = OP_ATOMIC_STORE_I8;
1369 else if (is_ref || t->type == MONO_TYPE_U8 || t->type == MONO_TYPE_U)
1370 opcode = OP_ATOMIC_STORE_U8;
1371 #else
1372 else if (t->type == MONO_TYPE_I)
1373 opcode = OP_ATOMIC_STORE_I4;
1374 else if (is_ref || t->type == MONO_TYPE_U)
1375 opcode = OP_ATOMIC_STORE_U4;
1376 #endif
1378 if (opcode) {
1379 if (!mono_arch_opcode_supported (opcode))
1380 return NULL;
1382 MONO_INST_NEW (cfg, ins, opcode);
1383 ins->dreg = args [0]->dreg;
1384 ins->sreg1 = args [1]->dreg;
1385 ins->backend.memory_barrier_kind = MONO_MEMORY_BARRIER_REL;
1386 MONO_ADD_INS (cfg->cbb, ins);
1388 if (cfg->gen_write_barriers && is_ref)
1389 mini_emit_write_barrier (cfg, args [0], args [1]);
1393 if (ins)
1394 return ins;
1395 } else if (in_corlib &&
1396 (strcmp (cmethod_klass_name_space, "System.Diagnostics") == 0) &&
1397 (strcmp (cmethod_klass_name, "Debugger") == 0)) {
1398 if (!strcmp (cmethod->name, "Break") && fsig->param_count == 0) {
1399 if (mini_should_insert_breakpoint (cfg->method)) {
1400 ins = mono_emit_jit_icall (cfg, mono_debugger_agent_user_break, NULL);
1401 } else {
1402 MONO_INST_NEW (cfg, ins, OP_NOP);
1403 MONO_ADD_INS (cfg->cbb, ins);
1405 return ins;
1407 } else if (in_corlib &&
1408 (strcmp (cmethod_klass_name_space, "System") == 0) &&
1409 (strcmp (cmethod_klass_name, "Environment") == 0)) {
1410 if (!strcmp (cmethod->name, "get_IsRunningOnWindows") && fsig->param_count == 0) {
1411 #ifdef TARGET_WIN32
1412 EMIT_NEW_ICONST (cfg, ins, 1);
1413 #else
1414 EMIT_NEW_ICONST (cfg, ins, 0);
1415 #endif
1417 } else if (in_corlib &&
1418 (strcmp (cmethod_klass_name_space, "System.Reflection") == 0) &&
1419 (strcmp (cmethod_klass_name, "Assembly") == 0)) {
1420 if (cfg->llvm_only && !strcmp (cmethod->name, "GetExecutingAssembly")) {
1421 /* No stack walks are currently available, so implement this as an intrinsic */
1422 MonoInst *assembly_ins;
1424 EMIT_NEW_AOTCONST (cfg, assembly_ins, MONO_PATCH_INFO_IMAGE, m_class_get_image (cfg->method->klass));
1425 ins = mono_emit_jit_icall (cfg, mono_get_assembly_object, &assembly_ins);
1426 return ins;
1429 // While it is not required per
1430 // https://msdn.microsoft.com/en-us/library/system.reflection.assembly.getcallingassembly(v=vs.110).aspx.
1431 // have GetCallingAssembly be consistent independently of varying optimization.
1432 // This fixes mono/tests/test-inline-call-stack.cs under FullAOT+LLVM.
1433 cfg->no_inline |= COMPILE_LLVM (cfg) && strcmp (cmethod->name, "GetCallingAssembly") == 0;
1435 } else if (in_corlib &&
1436 (strcmp (cmethod_klass_name_space, "System.Reflection") == 0) &&
1437 (strcmp (cmethod_klass_name, "MethodBase") == 0)) {
1438 if (cfg->llvm_only && !strcmp (cmethod->name, "GetCurrentMethod")) {
1439 /* No stack walks are currently available, so implement this as an intrinsic */
1440 MonoInst *method_ins;
1441 MonoMethod *declaring = cfg->method;
1443 /* This returns the declaring generic method */
1444 if (declaring->is_inflated)
1445 declaring = ((MonoMethodInflated*)cfg->method)->declaring;
1446 EMIT_NEW_AOTCONST (cfg, method_ins, MONO_PATCH_INFO_METHODCONST, declaring);
1447 ins = mono_emit_jit_icall (cfg, mono_get_method_object, &method_ins);
1448 cfg->no_inline = TRUE;
1449 if (cfg->method != cfg->current_method)
1450 mini_set_inline_failure (cfg, "MethodBase:GetCurrentMethod ()");
1451 return ins;
1453 } else if (cmethod->klass == mono_class_try_get_math_class ()) {
1455 * There is general branchless code for Min/Max, but it does not work for
1456 * all inputs:
1457 * http://everything2.com/?node_id=1051618
1461 * Constant folding for various Math methods.
1462 * we avoid folding constants that when computed would raise an error, in
1463 * case the user code was expecting to get that error raised
1465 if (fsig->param_count == 1 && args [0]->opcode == OP_R8CONST) {
1466 double source = *(double *)args [0]->inst_p0;
1467 int opcode = 0;
1468 const char *mname = cmethod->name;
1469 char c = mname [0];
1471 if (c == 'A'){
1472 if (strcmp (mname, "Abs") == 0 && fsig->params [0]->type == MONO_TYPE_R8) {
1473 opcode = OP_ABS;
1474 } else if (strcmp (mname, "Asin") == 0) {
1475 if (fabs (source) <= 1)
1476 opcode = OP_ASIN;
1477 } else if (strcmp (mname, "Asinh") == 0) {
1478 opcode = OP_ASINH;
1479 } else if (strcmp (mname, "Acos") == 0) {
1480 if (fabs (source) <= 1)
1481 opcode = OP_ACOS;
1482 } else if (strcmp (mname, "Acosh") == 0) {
1483 if (source >= 1)
1484 opcode = OP_ACOSH;
1485 } else if (strcmp (mname, "Atan") == 0) {
1486 opcode = OP_ATAN;
1487 } else if (strcmp (mname, "Atanh") == 0) {
1488 if (fabs (source) < 1)
1489 opcode = OP_ATANH;
1491 } else if (c == 'C') {
1492 if (strcmp (mname, "Cos") == 0) {
1493 if (!isinf (source))
1494 opcode = OP_COS;
1495 } else if (strcmp (mname, "Cbrt") == 0) {
1496 opcode = OP_CBRT;
1497 } else if (strcmp (mname, "Cosh") == 0) {
1498 opcode = OP_COSH;
1500 } else if (c == 'R') {
1501 if (strcmp (mname, "Round") == 0)
1502 opcode = OP_ROUND;
1503 } else if (c == 'S') {
1504 if (strcmp (mname, "Sin") == 0) {
1505 if (!isinf (source))
1506 opcode = OP_SIN;
1507 } else if (strcmp (mname, "Sqrt") == 0) {
1508 if (source >= 0)
1509 opcode = OP_SQRT;
1510 } else if (strcmp (mname, "Sinh") == 0) {
1511 opcode = OP_SINH;
1513 } else if (c == 'T') {
1514 if (strcmp (mname, "Tan") == 0){
1515 if (!isinf (source))
1516 opcode = OP_TAN;
1517 } else if (strcmp (mname, "Tanh") == 0) {
1518 opcode = OP_TANH;
1522 if (opcode) {
1523 double *dest = (double *) mono_domain_alloc (cfg->domain, sizeof (double));
1524 double result = 0;
1525 MONO_INST_NEW (cfg, ins, OP_R8CONST);
1526 ins->type = STACK_R8;
1527 ins->dreg = mono_alloc_dreg (cfg, (MonoStackType) ins->type);
1528 ins->inst_p0 = dest;
1530 switch (opcode) {
1531 case OP_ABS:
1532 result = fabs (source);
1533 break;
1534 case OP_ACOS:
1535 result = acos (source);
1536 break;
1537 case OP_ACOSH:
1538 result = acosh (source);
1539 break;
1540 case OP_ASIN:
1541 result = asin (source);
1542 break;
1543 case OP_ASINH:
1544 result= asinh (source);
1545 break;
1546 case OP_ATAN:
1547 result = atan (source);
1548 break;
1549 case OP_ATANH:
1550 result = atanh (source);
1551 break;
1552 case OP_CBRT:
1553 result = cbrt (source);
1554 break;
1555 case OP_COS:
1556 result = cos (source);
1557 break;
1558 case OP_COSH:
1559 result = cosh (source);
1560 break;
1561 case OP_ROUND:
1562 result = round (source);
1563 break;
1564 case OP_SIN:
1565 result = sin (source);
1566 break;
1567 case OP_SINH:
1568 result = sinh (source);
1569 break;
1570 case OP_SQRT:
1571 result = sqrt (source);
1572 break;
1573 case OP_TAN:
1574 result = tan (source);
1575 break;
1576 case OP_TANH:
1577 result = tanh (source);
1578 break;
1579 default:
1580 g_error ("invalid opcode %d", (int)opcode);
1582 *dest = result;
1583 MONO_ADD_INS (cfg->cbb, ins);
1584 NULLIFY_INS (args [0]);
1585 return ins;
1588 } else if (cmethod->klass == mono_defaults.systemtype_class && !strcmp (cmethod->name, "op_Equality")) {
1589 EMIT_NEW_BIALU (cfg, ins, OP_COMPARE, -1, args [0]->dreg, args [1]->dreg);
1590 MONO_INST_NEW (cfg, ins, OP_PCEQ);
1591 ins->dreg = alloc_preg (cfg);
1592 ins->type = STACK_I4;
1593 MONO_ADD_INS (cfg->cbb, ins);
1594 return ins;
1595 } else if (((!strcmp (cmethod_klass_image->assembly->aname.name, "MonoMac") ||
1596 !strcmp (cmethod_klass_image->assembly->aname.name, "monotouch")) &&
1597 !strcmp (cmethod_klass_name_space, "XamCore.ObjCRuntime") &&
1598 !strcmp (cmethod_klass_name, "Selector")) ||
1599 ((!strcmp (cmethod_klass_image->assembly->aname.name, "Xamarin.iOS") ||
1600 !strcmp (cmethod_klass_image->assembly->aname.name, "Xamarin.Mac")) &&
1601 !strcmp (cmethod_klass_name_space, "ObjCRuntime") &&
1602 !strcmp (cmethod_klass_name, "Selector"))
1604 if ((cfg->backend->have_objc_get_selector || cfg->compile_llvm) &&
1605 !strcmp (cmethod->name, "GetHandle") && fsig->param_count == 1 &&
1606 (args [0]->opcode == OP_GOT_ENTRY || args [0]->opcode == OP_AOTCONST) &&
1607 cfg->compile_aot) {
1608 MonoInst *pi;
1609 MonoJumpInfoToken *ji;
1610 char *s;
1612 if (args [0]->opcode == OP_GOT_ENTRY) {
1613 pi = (MonoInst *)args [0]->inst_p1;
1614 g_assert (pi->opcode == OP_PATCH_INFO);
1615 g_assert (GPOINTER_TO_INT (pi->inst_p1) == MONO_PATCH_INFO_LDSTR);
1616 ji = (MonoJumpInfoToken *)pi->inst_p0;
1617 } else {
1618 g_assert (GPOINTER_TO_INT (args [0]->inst_p1) == MONO_PATCH_INFO_LDSTR);
1619 ji = (MonoJumpInfoToken *)args [0]->inst_p0;
1622 NULLIFY_INS (args [0]);
1624 s = mono_ldstr_utf8 (ji->image, mono_metadata_token_index (ji->token), &cfg->error);
1625 return_val_if_nok (&cfg->error, NULL);
1627 MONO_INST_NEW (cfg, ins, OP_OBJC_GET_SELECTOR);
1628 ins->dreg = mono_alloc_ireg (cfg);
1629 // FIXME: Leaks
1630 ins->inst_p0 = s;
1631 MONO_ADD_INS (cfg->cbb, ins);
1632 return ins;
1634 } else if (in_corlib &&
1635 (strcmp (cmethod_klass_name_space, "System.Runtime.InteropServices") == 0) &&
1636 (strcmp (cmethod_klass_name, "Marshal") == 0)) {
1637 //Convert Marshal.PtrToStructure<T> of blittable T to direct loads
1638 if (strcmp (cmethod->name, "PtrToStructure") == 0 &&
1639 cmethod->is_inflated &&
1640 fsig->param_count == 1 &&
1641 !mini_method_check_context_used (cfg, cmethod)) {
1643 MonoGenericContext *method_context = mono_method_get_context (cmethod);
1644 MonoType *arg0 = method_context->method_inst->type_argv [0];
1645 if (mono_type_is_native_blittable (arg0))
1646 return mini_emit_memory_load (cfg, arg0, args [0], 0, 0);
1648 } else if (cmethod->klass == mono_defaults.enum_class && !strcmp (cmethod->name, "HasFlag") &&
1649 args [0]->opcode == OP_BOX && args [1]->opcode == OP_BOX_ICONST && args [0]->klass == args [1]->klass) {
1650 args [1]->opcode = OP_ICONST;
1651 ins = mini_handle_enum_has_flag (cfg, args [0]->klass, NULL, args [0]->sreg1, args [1]);
1652 NULLIFY_INS (args [0]);
1653 return ins;
1654 } else if (in_corlib &&
1655 !strcmp (cmethod_klass_name_space, "System") &&
1656 (!strcmp (cmethod_klass_name, "Span`1") || !strcmp (cmethod_klass_name, "ReadOnlySpan`1"))) {
1657 return emit_span_intrinsics (cfg, cmethod, fsig, args);
1658 } else if (in_corlib &&
1659 !strcmp (cmethod_klass_name_space, "Internal.Runtime.CompilerServices") &&
1660 !strcmp (cmethod_klass_name, "Unsafe")) {
1661 return emit_unsafe_intrinsics (cfg, cmethod, fsig, args);
1662 } else if (!strcmp (cmethod_klass_name_space, "System.Runtime.CompilerServices") &&
1663 !strcmp (cmethod_klass_name, "Unsafe") &&
1664 (in_corlib || !strcmp (cmethod_klass_image->assembly->aname.name, "System.Runtime.CompilerServices.Unsafe"))) {
1665 return emit_unsafe_intrinsics (cfg, cmethod, fsig, args);
1666 } else if (in_corlib &&
1667 !strcmp (cmethod_klass_name_space, "System.Runtime.CompilerServices") &&
1668 !strcmp (cmethod_klass_name, "JitHelpers")) {
1669 return emit_jit_helpers_intrinsics (cfg, cmethod, fsig, args);
1672 #ifdef MONO_ARCH_SIMD_INTRINSICS
1673 if (cfg->opt & MONO_OPT_SIMD) {
1674 ins = mono_emit_simd_intrinsics (cfg, cmethod, fsig, args);
1675 if (ins)
1676 return ins;
1678 #endif
1680 /* Fallback if SIMD is disabled */
1681 if (in_corlib && !strcmp ("System.Numerics", cmethod_klass_name_space) && !strcmp ("Vector", cmethod_klass_name)) {
1682 if (!strcmp (cmethod->name, "get_IsHardwareAccelerated")) {
1683 EMIT_NEW_ICONST (cfg, ins, 0);
1684 ins->type = STACK_I4;
1685 return ins;
1689 ins = mono_emit_native_types_intrinsics (cfg, cmethod, fsig, args);
1690 if (ins)
1691 return ins;
1693 if (COMPILE_LLVM (cfg)) {
1694 ins = llvm_emit_inst_for_method (cfg, cmethod, fsig, args, in_corlib);
1695 if (ins)
1696 return ins;
1699 return mono_arch_emit_inst_for_method (cfg, cmethod, fsig, args);
1703 static MonoInst*
1704 emit_array_unsafe_access (MonoCompile *cfg, MonoMethodSignature *fsig, MonoInst **args, int is_set)
1706 MonoClass *eklass;
1708 if (is_set)
1709 eklass = mono_class_from_mono_type_internal (fsig->params [2]);
1710 else
1711 eklass = mono_class_from_mono_type_internal (fsig->ret);
1713 if (is_set) {
1714 return mini_emit_array_store (cfg, eklass, args, FALSE);
1715 } else {
1716 MonoInst *ins, *addr = mini_emit_ldelema_1_ins (cfg, eklass, args [0], args [1], FALSE);
1717 EMIT_NEW_LOAD_MEMBASE_TYPE (cfg, ins, m_class_get_byval_arg (eklass), addr->dreg, 0);
1718 return ins;
1722 static gboolean
1723 is_unsafe_mov_compatible (MonoCompile *cfg, MonoClass *param_klass, MonoClass *return_klass)
1725 uint32_t align;
1726 int param_size, return_size;
1728 param_klass = mono_class_from_mono_type_internal (mini_get_underlying_type (m_class_get_byval_arg (param_klass)));
1729 return_klass = mono_class_from_mono_type_internal (mini_get_underlying_type (m_class_get_byval_arg (return_klass)));
1731 if (cfg->verbose_level > 3)
1732 printf ("[UNSAFE-MOV-INTRISIC] %s <- %s\n", m_class_get_name (return_klass), m_class_get_name (param_klass));
1734 //Don't allow mixing reference types with value types
1735 if (m_class_is_valuetype (param_klass) != m_class_is_valuetype (return_klass)) {
1736 if (cfg->verbose_level > 3)
1737 printf ("[UNSAFE-MOV-INTRISIC]\tone of the args is a valuetype and the other is not\n");
1738 return FALSE;
1741 if (!m_class_is_valuetype (param_klass)) {
1742 if (cfg->verbose_level > 3)
1743 printf ("[UNSAFE-MOV-INTRISIC]\targs are reference types\n");
1744 return TRUE;
1747 //That are blitable
1748 if (m_class_has_references (param_klass) || m_class_has_references (return_klass))
1749 return FALSE;
1751 MonoType *param_type = m_class_get_byval_arg (param_klass);
1752 MonoType *return_type = m_class_get_byval_arg (return_klass);
1754 /* Avoid mixing structs and primitive types/enums, they need to be handled differently in the JIT */
1755 if ((MONO_TYPE_ISSTRUCT (param_type) && !MONO_TYPE_ISSTRUCT (return_type)) ||
1756 (!MONO_TYPE_ISSTRUCT (param_type) && MONO_TYPE_ISSTRUCT (return_type))) {
1757 if (cfg->verbose_level > 3)
1758 printf ("[UNSAFE-MOV-INTRISIC]\tmixing structs and scalars\n");
1759 return FALSE;
1762 if (param_type->type == MONO_TYPE_R4 || param_type->type == MONO_TYPE_R8 ||
1763 return_type->type == MONO_TYPE_R4 || return_type->type == MONO_TYPE_R8) {
1764 if (cfg->verbose_level > 3)
1765 printf ("[UNSAFE-MOV-INTRISIC]\tfloat or double are not supported\n");
1766 return FALSE;
1769 param_size = mono_class_value_size (param_klass, &align);
1770 return_size = mono_class_value_size (return_klass, &align);
1772 //We can do it if sizes match
1773 if (param_size == return_size) {
1774 if (cfg->verbose_level > 3)
1775 printf ("[UNSAFE-MOV-INTRISIC]\tsame size\n");
1776 return TRUE;
1779 //No simple way to handle struct if sizes don't match
1780 if (MONO_TYPE_ISSTRUCT (param_type)) {
1781 if (cfg->verbose_level > 3)
1782 printf ("[UNSAFE-MOV-INTRISIC]\tsize mismatch and type is a struct\n");
1783 return FALSE;
1787 * Same reg size category.
1788 * A quick note on why we don't require widening here.
1789 * The intrinsic is "R Array.UnsafeMov<S,R> (S s)".
1791 * Since the source value comes from a function argument, the JIT will already have
1792 * the value in a VREG and performed any widening needed before (say, when loading from a field).
1794 if (param_size <= 4 && return_size <= 4) {
1795 if (cfg->verbose_level > 3)
1796 printf ("[UNSAFE-MOV-INTRISIC]\tsize mismatch but both are of the same reg class\n");
1797 return TRUE;
1800 return FALSE;
1803 static MonoInst*
1804 emit_array_unsafe_mov (MonoCompile *cfg, MonoMethodSignature *fsig, MonoInst **args)
1806 MonoClass *param_klass = mono_class_from_mono_type_internal (fsig->params [0]);
1807 MonoClass *return_klass = mono_class_from_mono_type_internal (fsig->ret);
1809 if (mini_is_gsharedvt_variable_type (fsig->ret))
1810 return NULL;
1812 //Valuetypes that are semantically equivalent or numbers than can be widened to
1813 if (is_unsafe_mov_compatible (cfg, param_klass, return_klass))
1814 return args [0];
1816 //Arrays of valuetypes that are semantically equivalent
1817 if (m_class_get_rank (param_klass) == 1 && m_class_get_rank (return_klass) == 1 && is_unsafe_mov_compatible (cfg, m_class_get_element_class (param_klass), m_class_get_element_class (return_klass)))
1818 return args [0];
1820 return NULL;
1823 MonoInst*
1824 mini_emit_inst_for_sharable_method (MonoCompile *cfg, MonoMethod *cmethod, MonoMethodSignature *fsig, MonoInst **args)
1826 if (cmethod->klass == mono_defaults.array_class) {
1827 if (strcmp (cmethod->name, "UnsafeStore") == 0)
1828 return emit_array_unsafe_access (cfg, fsig, args, TRUE);
1829 else if (strcmp (cmethod->name, "UnsafeLoad") == 0)
1830 return emit_array_unsafe_access (cfg, fsig, args, FALSE);
1831 else if (strcmp (cmethod->name, "UnsafeMov") == 0)
1832 return emit_array_unsafe_mov (cfg, fsig, args);
1835 return NULL;
1838 MonoInst*
1839 mini_emit_inst_for_field_load (MonoCompile *cfg, MonoClassField *field)
1841 MonoClass *klass = field->parent;
1842 const char *klass_name_space = m_class_get_name_space (klass);
1843 const char *klass_name = m_class_get_name (klass);
1844 MonoImage *klass_image = m_class_get_image (klass);
1845 gboolean in_corlib = klass_image == mono_defaults.corlib;
1846 gboolean is_le;
1847 MonoInst *ins;
1849 if (in_corlib && !strcmp (klass_name_space, "System") && !strcmp (klass_name, "BitConverter") && !strcmp (field->name, "IsLittleEndian")) {
1850 is_le = (TARGET_BYTE_ORDER == G_LITTLE_ENDIAN);
1851 EMIT_NEW_ICONST (cfg, ins, is_le);
1852 return ins;
1854 return NULL;
1856 #else
1857 MONO_EMPTY_SOURCE_FILE (intrinsics);
1858 #endif