1 #ifndef __MONO_SUPPORT_S390X_H__
2 #define __MONO_SUPPORT_S390X_H__
4 #define S390_SET(loc, dr, v) \
6 guint64 val = (guint64) v; \
7 if (s390_is_imm16(val)) { \
8 s390_lghi(loc, dr, val); \
9 } else if (s390_is_uimm16(val)) { \
10 s390_llill(loc, dr, val); \
11 } else if (s390_is_imm32(val)) { \
12 s390_lgfi(loc, dr, val); \
13 } else if (s390_is_uimm32(val)) { \
14 s390_llilf(loc, dr, val); \
16 guint32 hi = (val) >> 32; \
17 guint32 lo = (val) & 0xffffffff; \
18 s390_iihf(loc, dr, hi); \
19 s390_iilf(loc, dr, lo); \
23 #define S390_LONG(loc, opy, op, r, ix, br, off) \
24 if (s390_is_imm20(off)) { \
25 s390_##opy (loc, r, ix, br, off); \
28 S390_SET(loc, s390_r13, off); \
29 s390_la (loc, s390_r13, s390_r13, br, 0); \
31 s390_la (loc, s390_r13, ix, br, 0); \
32 S390_SET (loc, s390_r0, off); \
33 s390_agr (loc, s390_r13, s390_r0); \
35 s390_##op (loc, r, 0, s390_r13, 0); \
38 #define S390_SET_MASK(loc, dr, v) \
40 if (s390_is_imm16 (v)) { \
41 s390_lghi (loc, dr, v); \
42 } else if (s390_is_imm32 (v)) { \
43 s390_lgfi (loc, dr, v); \
45 gint64 val = (gint64) v; \
46 guint32 hi = (val) >> 32; \
47 guint32 lo = (val) & 0xffffffff; \
48 s390_iilf(loc, dr, lo); \
49 s390_iihf(loc, dr, hi); \
53 #define S390_CALL_TEMPLATE(loc, r) \
55 s390_iihf (loc, r, 0); \
56 s390_iilf (loc, r, 0); \
57 s390_basr (loc, s390_r14, r); \
60 #define S390_BR_TEMPLATE(loc, r) \
62 s390_iihf (loc, r, 0); \
63 s390_iilf (loc, r, 0); \
67 #define S390_LOAD_TEMPLATE(loc, r) \
69 s390_iihf (loc, r, 0); \
70 s390_iilf (loc, r, 0); \
73 #define S390_EMIT_CALL(loc, t) \
75 gint64 val = (gint64) t; \
76 guint32 hi = (val) >> 32; \
77 guint32 lo = (val) & 0xffffffff; \
78 uintptr_t p = (uintptr_t) loc; \
80 *(guint32 *) p = hi; \
82 *(guint32 *) p = lo; \
85 #define S390_EMIT_LOAD(loc, v) \
87 gint64 val = (gint64) v; \
88 guint32 hi = (val) >> 32; \
89 guint32 lo = (val) & 0xffffffff; \
90 uintptr_t p = (uintptr_t) loc; \
92 *(guint32 *) p = hi; \
94 *(guint32 *) p = lo; \
97 #endif /* __MONO_SUPPORT_S390X_H__ */