5 #ifndef __MONO_SUPPORT_S390X_H__
6 #define __MONO_SUPPORT_S390X_H__
8 #define S390_SET(loc, dr, v) \
10 guint64 val = (guint64) v; \
11 if (s390_is_imm16(val)) { \
12 s390_lghi(loc, dr, val); \
13 } else if (s390_is_uimm16(val)) { \
14 s390_llill(loc, dr, val); \
15 } else if (s390_is_imm32(val)) { \
16 s390_lgfi(loc, dr, val); \
17 } else if (s390_is_uimm32(val)) { \
18 s390_llilf(loc, dr, val); \
20 guint32 hi = (val) >> 32; \
21 guint32 lo = (val) & 0xffffffff; \
22 s390_iihf(loc, dr, hi); \
23 s390_iilf(loc, dr, lo); \
27 #define S390_LONG(loc, opy, op, r, ix, br, off) \
28 if (s390_is_imm20(off)) { \
29 s390_##opy (loc, r, ix, br, off); \
32 S390_SET(loc, s390_r13, off); \
33 s390_la (loc, s390_r13, s390_r13, br, 0); \
35 s390_la (loc, s390_r13, ix, br, 0); \
36 S390_SET (loc, s390_r0, off); \
37 s390_agr (loc, s390_r13, s390_r0); \
39 s390_##op (loc, r, 0, s390_r13, 0); \
42 #define S390_SET_MASK(loc, dr, v) \
44 if (s390_is_imm16 (v)) { \
45 s390_lghi (loc, dr, v); \
46 } else if (s390_is_imm32 (v)) { \
47 s390_lgfi (loc, dr, v); \
49 gint64 val = (gint64) v; \
50 guint32 hi = (val) >> 32; \
51 guint32 lo = (val) & 0xffffffff; \
52 s390_iilf(loc, dr, lo); \
53 s390_iihf(loc, dr, hi); \
57 #define S390_CALL_TEMPLATE(loc, r) \
59 s390_iihf (loc, r, 0); \
60 s390_iilf (loc, r, 0); \
61 s390_basr (loc, s390_r14, r); \
64 #define S390_BR_TEMPLATE(loc, r) \
66 s390_iihf (loc, r, 0); \
67 s390_iilf (loc, r, 0); \
71 #define S390_LOAD_TEMPLATE(loc, r) \
73 s390_iihf (loc, r, 0); \
74 s390_iilf (loc, r, 0); \
77 #define S390_EMIT_CALL(loc, t) \
79 gint64 val = (gint64) t; \
80 guint32 hi = (val) >> 32; \
81 guint32 lo = (val) & 0xffffffff; \
82 uintptr_t p = (uintptr_t) loc; \
84 *(guint32 *) p = hi; \
86 *(guint32 *) p = lo; \
89 #define S390_EMIT_LOAD(loc, v) \
91 gint64 val = (gint64) v; \
92 guint32 hi = (val) >> 32; \
93 guint32 lo = (val) & 0xffffffff; \
94 uintptr_t p = (uintptr_t) loc; \
96 *(guint32 *) p = hi; \
98 *(guint32 *) p = lo; \
101 #endif /* __MONO_SUPPORT_S390X_H__ */