Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
- $Id: codegen.h 7848 2007-05-01 21:40:26Z pm $
+ $Id: codegen.h 8240 2007-07-29 20:36:47Z pm $
*/
#define PATCHER_NOPS \
do { \
- M_NOP; \
- M_NOP; \
- M_NOP; \
+ /* do not generate additonal nops for long patcher branches */ \
+ if (! CODEGENDATA_HAS_FLAG_LONGBRANCHES(cd)) { \
+ M_NOP; \
+ M_NOP; \
+ M_NOP; \
+ } \
} while (0)
+#define PATCHER_LONGBRANCHES_NOPS \
+ do { \
+ M_BR(SZ_BRC + (10 * 2)); \
+ M_NOP2; M_NOP2; M_NOP2; M_NOP2; M_NOP2; M_NOP2; M_NOP2; M_NOP2; /* ild */ \
+ M_NOP2; /* aadd */ \
+ M_NOP2; /* jmp */ \
+ } while (0)
+
#define PATCHER_NOPS_SKIP 12
+#define PATCHER_LONGBRANCHES_NOPS_SKIP 24
/* branch defines ************************************************************/
-#define BRANCH_NOPS M_NOP /* Size of at least M_BRC */
+#define BRANCH_NOPS \
+ do { \
+ if (CODEGENDATA_HAS_FLAG_LONGBRANCHES(cd)) { \
+ M_NOP2; M_NOP2; /* brc */ \
+ M_NOP2; M_NOP2; M_NOP2; M_NOP2; M_NOP2; M_NOP2; M_NOP2; M_NOP2; /* ild */ \
+ M_NOP2; /* ar, bcr */ \
+ } else { \
+ M_NOP; /* brc */ \
+ } \
+ } while (0)
/* stub defines **************************************************************/
#define N_VALID_DISP(x) ((N_DISP_MIN <= (x)) && ((x) <= N_DISP_MAX))
#define ASSERT_VALID_DISP(x) assert(N_VALID_DISP(x))
+#define N_PV_OFFSET (-0xFFC)
+#define N_DSEG_DISP(x) ((x) - N_PV_OFFSET)
+#define N_VALID_DSEG_DISP(x) N_VALID_DISP(N_DSEG_DISP(x))
+
#define N_BRANCH_MIN -32768
#define N_BRANCH_MAX 32767
#define N_VALID_BRANCH(x) ((N_BRANCH_MIN <= (x)) && ((x) <= N_BRANCH_MAX))
#define DD_NO 14
#define DD_ANY 15
+#define DD_0 8
+#define DD_1 4
+#define DD_2 2
+#define DD_3 1
+
/* Misc */
/* Trap instruction.
# define N_J(i2) N_BRC(DD_ANY, i2)
# define SZ_BRC SZ_RI
# define SZ_J SZ_RI
+# define N_BRC_BACK_PATCH(brc_pos) \
+ do { \
+ *(u4 *)(brc_pos) |= (u4)(cd->mcodeptr - (brc_pos)) / 2; \
+ } while (0)
#define N_BRCT(r1, i2) N_RI(0xA7, 0x6, r1, (i2) / 2)
#define N_BRXH(r1, r3, i2) N_RSI(0x84, r1, r3, (i2) / 2)
#define N_BRXLE(r1, r3, i2) N_RSI(0x85, r1, r2, (i2) / 2)
#define N_LCXBR(r1, r2) N_RRE(0xB343, r1, r2)
#define N_LDEBR(r1, r2) N_RRE(0xB304, r1, r2)
+# define SZ_LDEBR SZ_RRE
#define N_LXDBR(r1, r2) N_RRE(0xB305, r1, r2)
#define N_LXEBR(r1, r2) N_RRE(0xB306, r1, r2)
#define N_LDXBR(r1, r2) N_RRE(0xB345, r1, r2)
#define N_LEXBR(r1, r2) N_RRE(0xB346, r1, r2)
+#define N_LTEBR(r1, r2) N_RRE(0xB302, r1, r2)
+#define N_LTDBR(r1, r2) N_RRE(0xB312, r1, r2)
+#define N_LTXBR(r1, r2) N_RRE(0xB342, r1, r2)
+
#define N_MEEBR(r1, r2) N_RRE(0xB317, r1, r2)
#define N_MDBR(r1, r2) N_RRE(0xB31C, r1, r2)
#define N_MXBR(r1, r2) N_RRE(0xB34C, r1, r2)
do { \
if (N_VALID_DISP(d)) { \
N_L(r, d, RN, b); \
- } else if (r == R0) { \
+ } else if ((r == R0) && N_VALID_IMM(d)) { \
N_LR(R0, R1); \
N_LHI(R1, d); \
N_L(R1, 0, R1, b); \
N_XR(R1, R0); \
N_XR(R0, R1); \
N_XR(R1, R0); \
- } else { \
+ } else if ((r != R0) && N_VALID_IMM(d)) { \
N_LHI(r, d); N_L(r, 0, r, b); \
+ } else { \
+ N_BRAS(r, SZ_BRAS + SZ_LONG); \
+ N_LONG(d); \
+ N_L(r, 0, RN, r); \
+ N_L(r, 0, r, b); \
} \
} while (0)
+#define M_ILD_DSEG(r, d) M_ILD(r, REG_PV, N_DSEG_DISP(d))
+
#define M_ALD(r, b, d) M_ILD(r, b, d)
+#define M_ALD_DSEG(r, d) M_ALD(r, REG_PV, N_DSEG_DISP(d))
-#define M_LDA(r, b, d) _IFNEG( \
- d, \
- N_LHI(r, d); N_LA(r, 0, r, b), \
- N_LA(r, d, RN, b) \
-)
+#define M_LDA(r, b, d) \
+ do { \
+ if (N_VALID_DISP(d)) { \
+ N_LA(r, d, RN, b); \
+ } else if (N_VALID_IMM(d)) { \
+ N_LHI(r, d); \
+ N_LA(r, 0, r, b); \
+ } else { \
+ N_BRAS(r, SZ_BRAS + SZ_LONG); \
+ N_LONG(d); \
+ N_L(r, 0, RN, r); \
+ N_LA(r, 0, r, b); \
+ } \
+ } while (0)
+#define M_LDA_DSEG(r, d) M_LDA(r, REG_PV, N_DSEG_DISP(d))
#define M_FLD(r, b, d) N_LE(r, d, RN, b)
-
#define M_FLDN(r, b, d, t) _IFNEG( \
d, \
N_LHI(t, d); N_LE(r, 0, t, b), \
N_LE(r, d, RN, b) \
)
-
+#define M_FLD_DSEG(r, d, t) M_FLDN(r, REG_PV, N_DSEG_DISP(d), t)
+
#define M_DLD(r, b, d) N_LD(r, d, RN, b)
#define M_DLDN(r, b, d, t) _IFNEG( \
d, \
N_LHI(t, d); N_LD(r, 0, t, b), \
N_LD(r, d, RN, b) \
)
+#define M_DLD_DSEG(r, d, t) M_DLDN(r, REG_PV, N_DSEG_DISP(d), t)
#define M_LLD(r, b, d) _IFNEG( \
d, \
N_L(GET_LOW_REG(r), 4, GET_LOW_REG(r), b), \
N_L(GET_HIGH_REG(r), (d) + 0, RN, b); N_L(GET_LOW_REG(r), (d) + 4, RN, b) \
)
+#define M_LLD_DSEG(r, d) M_LLD(r, REG_PV, N_DSEG_DISP(d)
/* MOV(a, b) -> mov from A to B */
#define M_BR(disp) N_BRC(DD_ANY, disp)
#define M_JMP(rs, rd) _IF(rs == RN, N_BCR(DD_ANY, rd), N_BASR(rs, rd))
#define M_NOP N_BC(0, 0, RN, RN)
+#define M_NOP2 N_BCR(0, RN)
#define M_JSR(reg_ret, reg_addr) N_BASR(reg_ret, reg_addr)
#define M_ICMP(a, b) N_CR(a, b)
#define M_ICMPU(a, b) N_CLR(a, b)
#define M_CVTFI(src, dst) N_CFEBR(dst, 5, src)
#define M_CVTDI(src, dst) N_CFDBR(dst, 5, src)
#define M_IADD(a, dest) N_AR(dest, a)
+#define M_AADD(a, dest) N_AR(dest, a)
#define M_ISUB(a, dest) N_SR(dest, a)
#define M_ASUB(a, dest) N_SR(dest, a)
#define M_IAND(a, dest) N_NR(dest, a)
N_LHI(reg, i); \
} else { \
disp = dseg_add_s4(cd, (i)); \
- M_ILD(reg, REG_PV, disp); \
+ M_ILD_DSEG(reg, disp); \
} \
} while (0)
#define M_AST_IMM32(a,b,c) _DEPR( M_AST_IMM32(a,b,c) )
-#define M_AADD(a,b) _DEPR( M_AADD(a,b) )
-
#define M_LADD_IMM32(a,b) _DEPR( M_LADD_IMM32(a,b) )
#define M_AADD_IMM32(a,b) _DEPR( M_AADD_IMM32(a,b) )
#define M_LSUB_IMM32(a,b) _DEPR( M_LSUB_IMM32(a,b) )