Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
- $Id: codegen.c 7353 2007-02-13 23:14:35Z twisti $
+ $Id: codegen.c 7505 2007-03-12 13:34:37Z twisti $
*/
M_MOV_S(REG_ITMP1, s1);
M_RSBMI_IMM(REG_ITMP1, REG_ITMP1, 0);
if (IS_IMM(iptr->sx.val.i))
- M_AND_IMM(d, REG_ITMP1, iptr->sx.val.i);
+ M_AND_IMM(REG_ITMP1, iptr->sx.val.i, d);
else {
ICONST(REG_ITMP3, iptr->sx.val.i);
- M_AND(d, REG_ITMP1, REG_ITMP3);
+ M_AND(REG_ITMP1, REG_ITMP3, d);
}
M_RSBMI_IMM(d, d, 0);
emit_store_dst(jd, iptr, d);
s1 = emit_load_s1(jd, iptr, REG_ITMP1);
s2 = emit_load_s2(jd, iptr, REG_ITMP2);
d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
- M_AND_IMM(REG_ITMP2, s2, 0x1f);
+ M_AND_IMM(s2, 0x1f, REG_ITMP2);
M_MOV(d, REG_LSL_REG(s1, REG_ITMP2));
emit_store_dst(jd, iptr, d);
break;
s1 = emit_load_s1(jd, iptr, REG_ITMP1);
s2 = emit_load_s2(jd, iptr, REG_ITMP2);
d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
- M_AND_IMM(REG_ITMP2, s2, 0x1f);
+ M_AND_IMM(s2, 0x1f, REG_ITMP2);
M_MOV(d, REG_ASR_REG(s1, REG_ITMP2));
emit_store_dst(jd, iptr, d);
break;
s1 = emit_load_s1(jd, iptr, REG_ITMP1);
s2 = emit_load_s2(jd, iptr, REG_ITMP2);
d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
- M_AND_IMM(REG_ITMP2, s2, 0x1f);
+ M_AND_IMM(s2, 0x1f, REG_ITMP2);
M_MOV(d, REG_LSR_REG(s1, REG_ITMP2));
emit_store_dst(jd, iptr, d);
break;
s1 = emit_load_s1(jd, iptr, REG_ITMP1);
s2 = emit_load_s2(jd, iptr, REG_ITMP2);
d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
- M_AND(d, s1, s2);
+ M_AND(s1, s2, d);
emit_store_dst(jd, iptr, d);
break;
s1 = emit_load_s1_low(jd, iptr, REG_ITMP3);
s2 = emit_load_s2_low(jd, iptr, REG_ITMP1);
d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
- M_AND(GET_LOW_REG(d), s1, s2);
+ M_AND(s1, s2, GET_LOW_REG(d));
s1 = emit_load_s1_high(jd, iptr, REG_ITMP3);
s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
- M_AND(GET_HIGH_REG(d), s1, s2);
+ M_AND(s1, s2, GET_HIGH_REG(d));
emit_store_dst(jd, iptr, d);
break;
s1 = emit_load_s1(jd, iptr, REG_ITMP1);
s2 = emit_load_s2(jd, iptr, REG_ITMP2);
d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
- M_ORR(d, s1, s2);
+ M_ORR(s1, s2, d);
emit_store_dst(jd, iptr, d);
break;
s1 = emit_load_s1_low(jd, iptr, REG_ITMP3);
s2 = emit_load_s2_low(jd, iptr, REG_ITMP1);
d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
- M_ORR(GET_LOW_REG(d), s1, s2);
+ M_ORR(s1, s2, GET_LOW_REG(d));
s1 = emit_load_s1_high(jd, iptr, REG_ITMP3);
s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
- M_ORR(GET_HIGH_REG(d), s1, s2);
+ M_ORR(s1, s2, GET_HIGH_REG(d));
emit_store_dst(jd, iptr, d);
break;
s1 = emit_load_s1(jd, iptr, REG_ITMP1);
s2 = emit_load_s2(jd, iptr, REG_ITMP2);
d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
- M_EOR(d, s1, s2);
+ M_EOR(s1, s2, d);
emit_store_dst(jd, iptr, d);
break;
s1 = emit_load_s1_low(jd, iptr, REG_ITMP3);
s2 = emit_load_s2_low(jd, iptr, REG_ITMP1);
d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
- M_EOR(GET_LOW_REG(d), s1, s2);
+ M_EOR(s1, s2, GET_LOW_REG(d));
s1 = emit_load_s1_high(jd, iptr, REG_ITMP3);
s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
- M_EOR(GET_HIGH_REG(d), s1, s2);
+ M_EOR(s1, s2, GET_HIGH_REG(d));
emit_store_dst(jd, iptr, d);
break;
M_FIX(d, s1);
/* this checks for NaN; to return zero as Java likes it */
M_CMF(s1, 0x8);
- M_MOVVS_IMM(d, 0);
+ M_MOVVS_IMM(0, d);
emit_store_dst(jd, iptr, d);
break;
M_FIX(d, s1);
/* this checks for NaN; to return zero as Java likes it */
M_CMF(s1, 0x8);
- M_MOVVS_IMM(d, 0);
+ M_MOVVS_IMM(0, d);
emit_store_dst(jd, iptr, d);
break;
codegen_addreference(cd, iptr->dst.block);
break;
+ case ICMD_IF_LEQ: /* ..., value ==> ... */
+
+ s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
+ s2 = emit_load_s1_low(jd, iptr, REG_ITMP2);
+ if (iptr->sx.val.l == 0) {
+ M_ORR_S(s1, s2, REG_ITMP3);
+ }
+ else {
+ ICONST(REG_ITMP3, iptr->sx.val.l >> 32);
+ M_CMP(s1, REG_ITMP3);
+ ICONST(REG_ITMP3, iptr->sx.val.l & 0xffffffff);
+ M_CMPEQ(s2, REG_ITMP3);
+ }
+ M_BEQ(0);
+ codegen_addreference(cd, iptr->dst.block);
+ break;
+
+#if 0
case ICMD_IF_LLT: /* ..., value ==> ... */
- case ICMD_IF_LLE: /* op1 = target JavaVM pc, val.l = constant */
+
+ s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
+ s2 = emit_load_s1_low(jd, iptr, REG_ITMP2);
+ if (iptr->sx.val.l == 0) {
+ /* if high word is less than zero, the whole long is too */
+ M_CMP_IMM(s1, 0);
+ M_BLT(0);
+ codegen_add_branch_ref(cd, iptr->dst.block);
+ }
+ else {
+ ICONST(REG_ITMP3, iptr->sx.val.l >> 32);
+ M_CMP(s1, REG_ITMP3);
+ M_BLT(0);
+ codegen_add_branch_ref(cd, iptr->dst.block);
+
+/* M_BGT(3); */
+ ICONST(REG_ITMP3, iptr->sx.val.l & 0xffffffff);
+ M_CMPLE(s2, REG_ITMP3);
+ M_BLO(0);
+ codegen_add_branch_ref(cd, iptr->dst.block);
+ }
+ break;
+#endif
+
+ case ICMD_IF_LNE: /* ..., value ==> ... */
+
+ s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
+ s2 = emit_load_s1_low(jd, iptr, REG_ITMP2);
+ if (iptr->sx.val.l == 0) {
+ M_ORR_S(s1, s2, REG_ITMP3);
+ }
+ else {
+ ICONST(REG_ITMP3, iptr->sx.val.l >> 32);
+ M_CMP(s1, REG_ITMP3);
+ ICONST(REG_ITMP3, iptr->sx.val.l & 0xffffffff);
+ M_CMPEQ(s2, REG_ITMP3);
+ }
+ M_BNE(0);
+ codegen_add_branch_ref(cd, iptr->dst.block);
+ break;
+
+ case ICMD_IF_LLT:
+ case ICMD_IF_LLE:
case ICMD_IF_LGT:
case ICMD_IF_LGE:
- case ICMD_IF_LEQ:
- case ICMD_IF_LNE:
/* ATTENTION: compare high words signed and low words unsigned */
M_LDR_INTERN(REG_ITMP3, REG_ITMP1, s2);
M_TST(REG_ITMP3, REG_ITMP3);
- M_MOVNE_IMM(d, 1);
+ M_MOVNE_IMM(1, d);
if (super == NULL) {
M_B(0);
/* If d == REG_ITMP2, then it's destroyed */
if (d == REG_ITMP2)
M_EOR(d, d, d);
- M_MOVLS_IMM(d, 1);
+ M_MOVLS_IMM(1, d);
}
if (branch1) {
/* check for exception */
M_TST(REG_ITMP1_XPTR, REG_ITMP1_XPTR);
- M_MOVEQ(REG_PC, REG_LR); /* if no exception, return to caller */
+ M_MOVEQ(REG_LR, REG_PC); /* if no exception, return to caller */
/* handle exception here */
Authors: Michael Starzinger
Christian Thalinger
- $Id: codegen.h 7276 2007-02-02 11:58:18Z michi $
+ $Id: codegen.h 7505 2007-03-12 13:34:37Z twisti $
*/
#define M_ADC(d,a,b) M_DAT(UNCOND,0x05,d,a,0,0,b) /* d = a + b (with Carry) */
#define M_SUB(d,a,b) M_DAT(UNCOND,0x02,d,a,0,0,b) /* d = a - b */
#define M_SBC(d,a,b) M_DAT(UNCOND,0x06,d,a,0,0,b) /* d = a - b (with Carry) */
-#define M_AND(d,a,b) M_DAT(UNCOND,0x00,d,a,0,0,b) /* d = a & b */
-#define M_ORR(d,a,b) M_DAT(UNCOND,0x0c,d,a,0,0,b) /* d = a | b */
-#define M_EOR(d,a,b) M_DAT(UNCOND,0x01,d,a,0,0,b) /* d = a ^ b */
+#define M_AND(a,b,d) M_DAT(UNCOND,0x00,d,a,0,0,b) /* d = a & b */
+#define M_ORR(a,b,d) M_DAT(UNCOND,0x0c,d,a,0,0,b) /* d = a | b */
+#define M_EOR(a,b,d) M_DAT(UNCOND,0x01,d,a,0,0,b) /* d = a ^ b */
#define M_TST(a,b) M_DAT(UNCOND,0x08,0,a,1,0,b) /* TST a & b */
#define M_TEQ(a,b) M_DAT(UNCOND,0x09,0,a,1,0,b) /* TST a ^ b */
#define M_CMP(a,b) M_DAT(UNCOND,0x0a,0,a,1,0,b) /* TST a - b */
#define M_MOV(d,b) M_DAT(UNCOND,0x0d,d,0,0,0,b) /* d = b */
#define M_ADD_S(d,a,b) M_DAT(UNCOND,0x04,d,a,1,0,b) /* d = a + b (update Flags) */
#define M_SUB_S(d,a,b) M_DAT(UNCOND,0x02,d,a,1,0,b) /* d = a - b (update Flags) */
+#define M_ORR_S(a,b,d) M_DAT(UNCOND,0x0c,d,a,1,0,b) /* d = a | b (update flags) */
#define M_MOV_S(d,b) M_DAT(UNCOND,0x0d,d,0,1,0,b) /* d = b (update Flags) */
#define M_ADD_IMM(d,a,i) M_DAT(UNCOND,0x04,d,a,0,1,i) /* d = a + i */
#define M_SBC_IMM(d,a,i) M_DAT(UNCOND,0x06,d,a,0,1,i) /* d = a - i (with Carry) */
#define M_RSB_IMM(d,a,i) M_DAT(UNCOND,0x03,d,a,0,1,i) /* d = -a + i */
#define M_RSC_IMM(d,a,i) M_DAT(UNCOND,0x07,d,a,0,1,i) /* d = -a + i (with Carry) */
-#define M_AND_IMM(d,a,i) M_DAT(UNCOND,0x00,d,a,0,1,i) /* d = a & i */
+#define M_AND_IMM(a,i,d) M_DAT(UNCOND,0x00,d,a,0,1,i) /* d = a & i */
#define M_TST_IMM(a,i) M_DAT(UNCOND,0x08,0,a,1,1,i) /* TST a & i */
#define M_TEQ_IMM(a,i) M_DAT(UNCOND,0x09,0,a,1,1,i) /* TST a ^ i */
#define M_CMP_IMM(a,i) M_DAT(UNCOND,0x0a,0,a,1,1,i) /* TST a - i */
#define M_RSB_IMMS(d,a,i) M_DAT(UNCOND,0x03,d,a,1,1,i) /* d = -a + i (update Flags) */
#define M_ADDSUB_IMM(d,a,i) if((i)>=0) M_ADD_IMM(d,a,i); else M_SUB_IMM(d,a,-(i))
-#define M_MOVEQ(d,b) M_DAT(COND_EQ,0x0d,d,0,0,0,b)
-#define M_MOVVS_IMM(d,i) M_DAT(COND_VS,0x0d,d,0,0,1,i)
-#define M_MOVNE_IMM(d,i) M_DAT(COND_NE,0x0d,d,0,0,1,i)
-#define M_MOVLS_IMM(d,i) M_DAT(COND_LS,0x0d,d,0,0,1,i)
+#define M_MOVEQ(a,d) M_DAT(COND_EQ,0x0d,d,0,0,0,a)
+
+#define M_MOVVS_IMM(i,d) M_DAT(COND_VS,0x0d,d,0,0,1,i)
+#define M_MOVNE_IMM(i,d) M_DAT(COND_NE,0x0d,d,0,0,1,i)
+#define M_MOVLT_IMM(i,d) M_DAT(COND_LT,0x0d,d,0,0,1,i)
+#define M_MOVLS_IMM(i,d) M_DAT(COND_LS,0x0d,d,0,0,1,i)
+
#define M_ADDLT_IMM(d,a,i) M_DAT(COND_LT,0x04,d,a,0,1,i)
#define M_ADDGT_IMM(d,a,i) M_DAT(COND_GT,0x04,d,a,0,1,i)
#define M_SUBLT_IMM(d,a,i) M_DAT(COND_LT,0x02,d,a,0,1,i)
#define M_RSBMI_IMM(d,a,i) M_DAT(COND_MI,0x03,d,a,0,1,i)
#define M_ADCMI_IMM(d,a,i) M_DAT(COND_MI,0x05,d,a,0,1,i)
+#define M_CMPEQ(a,b) M_DAT(COND_EQ,0x0a,0,a,1,0,b) /* TST a - b */
+#define M_CMPLE(a,b) M_DAT(COND_LE,0x0a,0,a,1,0,b) /* TST a - b */
+
#define M_MUL(d,a,b) M_MULT(UNCOND,d,a,b,0,0,0x0) /* d = a * b */