/* src/vm/jit/stack.c - stack analysis
- Copyright (C) 1996-2005, 2006, 2007 R. Grafl, A. Krall, C. Kruegel,
- C. Oates, R. Obermaisser, M. Platter, M. Probst, S. Ring,
- E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich,
- J. Wenninger, Institut f. Computersprachen - TU Wien
+ Copyright (C) 1996-2005, 2006, 2007, 2008
+ CACAOVM - Verein zur Foerderung der freien virtuellen Maschine CACAO
This file is part of CACAO.
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
- $Id: stack.c 7486 2007-03-08 13:50:07Z twisti $
-
*/
#include "vm/jit/jit.h"
#include "vm/jit/stack.h"
+#if 0
#if defined(ENABLE_SSA)
# include "vm/jit/optimizing/lsra.h"
# include "vm/jit/optimizing/ssa.h"
#elif defined(ENABLE_LSRA)
# include "vm/jit/allocator/lsra.h"
#endif
+#endif
#include "vmcore/options.h"
#include "vm/resolve.h"
struct stackdata_t {
basicblock *bptr; /* the current basic block being analysed */
- stackptr new; /* next free stackelement */
+ stackelement_t *new; /* next free stackelement */
s4 vartop; /* next free variable index */
s4 localcount; /* number of locals (at the start of var) */
s4 varcount; /* maximum number of variables expected */
bool repeat; /* if true, iterate the analysis again */
exception_entry **handlers; /* exception handlers for the current block */
exception_entry *extableend; /* points to the last exception entry */
- stackelement exstack; /* instack for exception handlers */
+ stackelement_t exstack; /* instack for exception handlers */
};
#define BRANCH_TARGET(bt, tempbptr) \
do { \
- tempbptr = BLOCK_OF((bt).insindex); \
+ tempbptr = (bt).block; \
tempbptr = stack_mark_reached(&sd, tempbptr, curstack, \
stackdepth); \
if (tempbptr == NULL) \
/* forward declarations *******************************************************/
static void stack_create_invars(stackdata_t *sd, basicblock *b,
- stackptr curstack, int stackdepth);
+ stackelement_t * curstack, int stackdepth);
static void stack_create_invars_from_outvars(stackdata_t *sd, basicblock *b);
#if defined(STACK_VERBOSE)
static void stack_verbose_block_enter(stackdata_t *sd, bool reanalyse);
static void stack_verbose_block_exit(stackdata_t *sd, bool superblockend);
static void stack_verbose_show_state(stackdata_t *sd, instruction *iptr,
- stackptr curstack);
+ stackelement_t * curstack);
#endif
*******************************************************************************/
static void stack_create_invars(stackdata_t *sd, basicblock *b,
- stackptr curstack, int stackdepth)
+ stackelement_t * curstack, int stackdepth)
{
- stackptr sp;
+ stackelement_t * sp;
int i;
int index;
varinfo *dv;
*******************************************************************************/
static basicblock * stack_check_invars(stackdata_t *sd, basicblock *b,
- stackptr curstack, int stackdepth)
+ stackelement_t * curstack, int stackdepth)
{
int i;
- stackptr sp;
+ stackelement_t * sp;
basicblock *orig;
bool separable;
varinfo *sv;
*******************************************************************************/
-static stackptr stack_create_instack(stackdata_t *sd)
+static stackelement_t * stack_create_instack(stackdata_t *sd)
{
- stackptr sp;
+ stackelement_t * sp;
int depth;
int index;
*******************************************************************************/
-static basicblock *stack_mark_reached(stackdata_t *sd, basicblock *b, stackptr curstack, int stackdepth)
+static basicblock *stack_mark_reached(stackdata_t *sd, basicblock *b, stackelement_t * curstack, int stackdepth)
{
+ assert(b != NULL);
+
#if defined(STACK_VERBOSE)
printf("stack_mark_reached(L%03d from L%03d)\n", b->nr, sd->bptr->nr);
#endif
+
/* mark targets of backward branches */
+
if (b->nr <= sd->bptr->nr)
b->bitflags |= BBFLAG_REPLACEMENT;
static basicblock *stack_mark_reached_from_outvars(stackdata_t *sd, basicblock *b)
{
+ assert(b != NULL);
+
#if defined(STACK_VERBOSE)
printf("stack_mark_reached_from_outvars(L%03d from L%03d)\n", b->nr, sd->bptr->nr);
#endif
+
/* mark targets of backward branches */
+
if (b->nr <= sd->bptr->nr)
b->bitflags |= BBFLAG_REPLACEMENT;
/* stack_reach_next_block ******************************************************
- Mark the following block reached and propagate the outvars of the current block
- and the current locals to it. This function specializes the target block,
- if necessary, and returns a pointer to the specialized target.
+ Mark the following block reached and propagate the outvars of the
+ current block and the current locals to it. This function
+ specializes the target block, if necessary, and returns a pointer
+ to the specialized target.
IN:
sd...........stack analysis data
tbptr = (sd->bptr->original) ? sd->bptr->original : sd->bptr;
tbptr = stack_mark_reached_from_outvars(sd, tbptr->next);
- if (!tbptr)
+
+ if (tbptr == NULL)
return false;
if (tbptr != sd->bptr->next) {
assert(iptr->opc == ICMD_NOP);
iptr->opc = ICMD_GOTO;
iptr->dst.block = tbptr;
+#if defined(STACK_VERBOSE)
+ if (iptr->line == 0) printf("goto with line 0 in L%03d\n", sd->bptr->nr);
+#endif
if (tbptr->flags < BBFINISHED)
sd->repeat = true; /* XXX check if we really need to repeat */
s4 blockvarstart;
s4 invarshift;
s4 blockvarshift;
- s4 i, j;
+ s4 i, varindex;
s4 *argp;
branch_target_t *table;
lookup_target_t *lookup;
switch (iptr->opc) {
case ICMD_RET:
- j = iptr->s1.varindex;
+ varindex = iptr->s1.varindex;
#if defined(ENABLE_VERIFIER)
- if (sd->var[j].type != TYPE_RET) {
+ if (sd->var[varindex].type != TYPE_RET) {
exceptions_throw_verifyerror(sd->m, "RET with non-returnAddress value");
return false;
}
#endif
- iptr->dst.block = stack_mark_reached_from_outvars(sd, sd->var[j].vv.retaddr);
+ iptr->dst.block = stack_mark_reached_from_outvars(sd, sd->var[varindex].vv.retaddr);
superblockend = true;
break;
case ICMD_ASTORE:
RELOCATE(iptr->s1.varindex);
- j = iptr->dst.varindex;
- COPY_VAL_AND_TYPE(*sd, iptr->s1.varindex, j);
+ varindex = iptr->dst.varindex;
+ COPY_VAL_AND_TYPE(*sd, iptr->s1.varindex, varindex);
i = iptr->sx.s23.s3.javaindex;
if (iptr->flags.bits & INS_FLAG_RETADDR) {
iptr->sx.s23.s2.retaddrnr =
- UNUSED - (1 + sd->var[j].vv.retaddr->nr);
+ JAVALOCAL_FROM_RETADDR(sd->var[varindex].vv.retaddr->nr);
sd->javalocals[i] = iptr->sx.s23.s2.retaddrnr;
}
else
- sd->javalocals[i] = j;
+ sd->javalocals[i] = varindex;
if (iptr->flags.bits & INS_FLAG_KILL_PREV)
sd->javalocals[i-1] = UNUSED;
if (iptr->flags.bits & INS_FLAG_KILL_NEXT)
case ICMD_IF_LCMPGT:
case ICMD_IF_LCMPLE:
- case ICMD_IF_FCMPEQ:
- case ICMD_IF_FCMPNE:
-
- case ICMD_IF_FCMPL_LT:
- case ICMD_IF_FCMPL_GE:
- case ICMD_IF_FCMPL_GT:
- case ICMD_IF_FCMPL_LE:
-
- case ICMD_IF_FCMPG_LT:
- case ICMD_IF_FCMPG_GE:
- case ICMD_IF_FCMPG_GT:
- case ICMD_IF_FCMPG_LE:
-
- case ICMD_IF_DCMPEQ:
- case ICMD_IF_DCMPNE:
-
- case ICMD_IF_DCMPL_LT:
- case ICMD_IF_DCMPL_GE:
- case ICMD_IF_DCMPL_GT:
- case ICMD_IF_DCMPL_LE:
-
- case ICMD_IF_DCMPG_LT:
- case ICMD_IF_DCMPG_GE:
- case ICMD_IF_DCMPG_GT:
- case ICMD_IF_DCMPG_LE:
-
case ICMD_IF_ACMPEQ:
case ICMD_IF_ACMPNE:
RELOCATE(iptr->sx.s23.s2.varindex);
*******************************************************************************/
-static void stack_change_to_tempvar(stackdata_t *sd, stackptr sp,
+static void stack_change_to_tempvar(stackdata_t *sd, stackelement_t * sp,
instruction *ilimit)
{
s4 newindex;
static void stack_init_javalocals(stackdata_t *sd)
{
s4 *jl;
- s4 t,i,j;
+ s4 type,i,j;
methoddesc *md;
jitdata *jd;
md = jd->m->parseddesc;
j = 0;
for (i=0; i<md->paramcount; ++i) {
- t = md->paramtypes[i].type;
- jl[j] = jd->local_map[5*j + t];
+ type = md->paramtypes[i].type;
+ jl[j] = jd->local_map[5*j + type];
j++;
- if (IS_2_WORD_TYPE(t))
+ if (IS_2_WORD_TYPE(type))
j++;
}
}
bool stack_analyse(jitdata *jd)
{
methodinfo *m; /* method being analyzed */
+ codeinfo *code;
registerdata *rd;
stackdata_t sd;
-#if defined(ENABLE_SSA)
- lsradata *ls;
-#endif
int stackdepth;
- stackptr curstack; /* current stack top */
- stackptr copy;
+ stackelement_t *curstack; /* current stack top */
+ stackelement_t *copy;
int opcode; /* opcode of current instruction */
- int i, j;
+ int i, varindex;
int javaindex;
+ int type; /* operand type */
int len; /* # of instructions after the current one */
bool superblockend; /* if true, no fallthrough to next block */
bool deadcode; /* true if no live code has been reached */
basicblock *original;
exception_entry *ex;
- stackptr *last_store_boundary;
- stackptr coalescing_boundary;
+ stackelement_t **last_store_boundary;
+ stackelement_t *coalescing_boundary;
- stackptr src1, src2, src3, src4, dst1, dst2;
+ stackelement_t *src1, *src2, *src3, *src4, *dst1, *dst2;
branch_target_t *table;
lookup_target_t *lookup;
/* get required compiler data - initialization */
m = jd->m;
+ code = jd->code;
rd = jd->rd;
-#if defined(ENABLE_SSA)
- ls = jd->ls;
-#endif
/* initialize the stackdata_t struct */
sd.exstack.varnum = sd.localcount;
sd.var[sd.exstack.varnum].type = TYPE_ADR;
-#if defined(ENABLE_LSRA)
- m->maxlifetimes = 0;
-#endif
-
#if defined(ENABLE_STATISTICS)
iteration_count = 0;
#endif
for (i = 0; i < m->maxstack * 5; i++)
jd->interface_map[i].flags = UNUSED;
- last_store_boundary = DMNEW(stackptr, m->maxlocals);
+ last_store_boundary = DMNEW(stackelement_t *, m->maxlocals);
/* initialize flags and invars (none) of first block */
/* initialize loop over basic blocks */
- sd.bptr = jd->basicblocks;
+ sd.bptr = jd->basicblocks;
superblockend = true;
- sd.repeat = false;
- curstack = NULL; stackdepth = 0;
- deadcode = true;
+ sd.repeat = false;
+ curstack = NULL;
+ stackdepth = 0;
+ deadcode = true;
/* iterate over basic blocks *****************************************/
if (sd.bptr->flags == BBTYPECHECK_REACHED) {
/* re-analyse a block because its input changed */
+
deadcode = false;
+
if (!stack_reanalyse_block(&sd))
return false;
+
superblockend = true; /* XXX */
continue;
}
if (superblockend && (sd.bptr->flags < BBREACHED)) {
- /* This block has not been reached so far, and we */
- /* don't fall into it, so we'll have to iterate again. */
+ /* This block has not been reached so far, and we
+ don't fall into it, so we'll have to iterate
+ again. */
sd.repeat = true;
continue;
}
if (sd.bptr->original && sd.bptr->original->flags < BBFINISHED) {
- /* This block is a clone and the original has not been */
- /* analysed, yet. Analyse it on the next iteration. */
+ /* This block is a clone and the original has not been
+ analysed, yet. Analyse it on the next
+ iteration. */
sd.repeat = true;
/* XXX superblockend? */
/* automatically replace some ICMDs with builtins */
-#if defined(USEBUILTINTABLE)
bte = builtintable_get_automatic(opcode);
- if (bte && bte->opcode == opcode) {
- iptr->opc = ICMD_BUILTIN;
+ if ((bte != NULL) && (bte->opcode == opcode)) {
+ iptr->opc = ICMD_BUILTIN;
iptr->flags.bits &= INS_FLAG_ID_MASK;
- iptr->sx.s23.s3.bte = bte;
+ iptr->sx.s23.s3.bte = bte;
/* iptr->line is already set */
- jd->isleafmethod = false;
+ code_unflag_leafmethod(code);
goto icmd_BUILTIN;
}
-#endif /* defined(USEBUILTINTABLE) */
/* main opcode switch *************************************/
break;
case ICMD_RET:
- j = iptr->s1.varindex =
+ varindex = iptr->s1.varindex =
jd->local_map[iptr->s1.varindex * 5 + TYPE_ADR];
#if defined(ENABLE_VERIFIER)
- if (sd.var[j].type != TYPE_RET) {
+ if (sd.var[varindex].type != TYPE_RET) {
exceptions_throw_verifyerror(m, "RET with non-returnAddress value");
return false;
}
CLR_SX;
- iptr->dst.block = stack_mark_reached(&sd, sd.var[j].vv.retaddr, curstack, stackdepth);
+ iptr->dst.block = stack_mark_reached(&sd, sd.var[varindex].vv.retaddr, curstack, stackdepth);
superblockend = true;
break;
icmd_lconst_lcmp_tail:
/* convert LCONST, LCMP, IFXX to IF_LXX */
- iptr->dst.insindex = iptr[2].dst.insindex;
+ iptr->dst.block = iptr[2].dst.block;
iptr[1].opc = ICMD_NOP;
iptr[2].opc = ICMD_NOP;
case ICMD_DLOAD:
case ICMD_ALOAD:
COUNT(count_load_instruction);
- i = opcode - ICMD_ILOAD; /* type */
+ type = opcode - ICMD_ILOAD;
- j = iptr->s1.varindex =
- jd->local_map[iptr->s1.varindex * 5 + i];
+ varindex = iptr->s1.varindex =
+ jd->local_map[iptr->s1.varindex * 5 + type];
#if defined(ENABLE_VERIFIER)
- if (sd.var[j].type == TYPE_RET) {
+ if (sd.var[varindex].type == TYPE_RET) {
exceptions_throw_verifyerror(m, "forbidden load of returnAddress");
return false;
}
#endif
-
-#if defined(ENABLE_SSA)
- if (ls != NULL) {
- GET_NEW_VAR(sd, new_index, i);
- DST(i, new_index);
- stackdepth++;
- }
- else
-
-#else
- LOAD(i, j);
-#endif
+ LOAD(type, varindex);
break;
/* pop 2 push 1 */
case ICMD_IINC:
STATISTICS_STACKDEPTH_DISTRIBUTION(count_store_depth);
-#if defined(ENABLE_SSA)
- if (ls != NULL) {
- iptr->s1.varindex =
- jd->local_map[iptr->s1.varindex * 5 +TYPE_INT];
- }
- else {
-#endif
- last_store_boundary[iptr->s1.varindex] = sd.new;
+ javaindex = iptr->s1.varindex;
+ last_store_boundary[javaindex] = sd.new;
iptr->s1.varindex =
- jd->local_map[iptr->s1.varindex * 5 + TYPE_INT];
+ jd->local_map[javaindex * 5 + TYPE_INT];
copy = curstack;
i = stackdepth - 1;
while (copy) {
if ((copy->varkind == LOCALVAR) &&
- (copy->varnum == iptr->s1.varindex))
+ (jd->reverselocalmap[copy->varnum] == javaindex))
{
assert(IS_LOCALVAR(copy));
SET_TEMPVAR(copy);
i--;
copy = copy->prev;
}
-#if defined(ENABLE_SSA)
- }
-#endif
iptr->dst.varindex = iptr->s1.varindex;
break;
case ICMD_ASTORE:
REQUIRE(1);
- i = opcode - ICMD_ISTORE; /* type */
+ type = opcode - ICMD_ISTORE;
javaindex = iptr->dst.varindex;
- j = iptr->dst.varindex =
- jd->local_map[javaindex * 5 + i];
+ varindex = iptr->dst.varindex =
+ jd->local_map[javaindex * 5 + type];
- COPY_VAL_AND_TYPE(sd, curstack->varnum, j);
+ COPY_VAL_AND_TYPE(sd, curstack->varnum, varindex);
iptr->sx.s23.s3.javaindex = javaindex;
if (curstack->type == TYPE_RET) {
iptr->flags.bits |= INS_FLAG_RETADDR;
iptr->sx.s23.s2.retaddrnr =
- UNUSED - (1 + sd.var[j].vv.retaddr->nr);
+ JAVALOCAL_FROM_RETADDR(sd.var[varindex].vv.retaddr->nr);
sd.javalocals[javaindex] = iptr->sx.s23.s2.retaddrnr;
}
else
- sd.javalocals[javaindex] = j;
+ sd.javalocals[javaindex] = varindex;
/* invalidate the following javalocal for 2-word types */
- if (IS_2_WORD_TYPE(i)) {
+ if (IS_2_WORD_TYPE(type)) {
sd.javalocals[javaindex+1] = UNUSED;
iptr->flags.bits |= INS_FLAG_KILL_NEXT;
}
/* invalidate 2-word types if second half was overwritten */
- if (javaindex > 0 && (i = sd.javalocals[javaindex-1]) != UNUSED) {
+ if (javaindex > 0 && (i = sd.javalocals[javaindex-1]) >= 0) {
if (IS_2_WORD_TYPE(sd.var[i].type)) {
sd.javalocals[javaindex-1] = UNUSED;
iptr->flags.bits |= INS_FLAG_KILL_PREV;
}
#endif
-#if defined(ENABLE_SSA)
- if (ls != NULL) {
-#endif
/* check for conflicts as described in Figure 5.2 */
copy = curstack->prev;
i = stackdepth - 2;
while (copy) {
if ((copy->varkind == LOCALVAR) &&
- (copy->varnum == j))
+ (jd->reverselocalmap[copy->varnum] == javaindex))
{
- copy->varkind = TEMPVAR;
assert(IS_LOCALVAR(copy));
SET_TEMPVAR(copy);
}
if (curstack < coalescing_boundary)
goto assume_conflict;
- /* there is no DEF LOCALVAR(j) while curstack is live */
+ /* there is no DEF LOCALVAR(varindex) while curstack is live */
copy = sd.new; /* most recent stackslot created + 1 */
while (--copy > curstack) {
- if (copy->varkind == LOCALVAR && copy->varnum == j)
+ if (copy->varkind == LOCALVAR && jd->reverselocalmap[copy->varnum] == javaindex)
goto assume_conflict;
}
assert(!(curstack->flags & PASSTHROUGH));
RELEASE_INDEX(sd, curstack);
curstack->varkind = LOCALVAR;
- curstack->varnum = j;
- curstack->creator->dst.varindex = j;
+ curstack->varnum = varindex;
+ curstack->creator->dst.varindex = varindex;
goto store_tail;
/* revert the coalescing, if it has been done earlier */
assume_conflict:
if ((curstack->varkind == LOCALVAR)
- && (curstack->varnum == j))
+ && (jd->reverselocalmap[curstack->varnum] == javaindex))
{
assert(IS_LOCALVAR(curstack));
SET_TEMPVAR(curstack);
/* remember the stack boundary at this store */
store_tail:
last_store_boundary[javaindex] = sd.new;
-#if defined(ENABLE_SSA)
- } /* if (ls != NULL) */
-#endif
if (opcode == ICMD_ASTORE && curstack->type == TYPE_RET)
- STORE(TYPE_RET, j);
+ STORE(TYPE_RET, varindex);
else
- STORE(opcode - ICMD_ISTORE, j);
+ STORE(opcode - ICMD_ISTORE, varindex);
break;
/* pop 3 push 0 */
COUNT(count_check_bound);
COUNT(count_pcmd_mem);
- bte = builtintable_get_internal(BUILTIN_canstore);
+ bte = builtintable_get_internal(BUILTIN_FAST_canstore);
md = bte->md;
if (md->memuse > rd->memuse)
case ICMD_IFEQ:
iptr->opc = ICMD_IF_LCMPEQ;
icmd_lcmp_if_tail:
- iptr->dst.insindex = iptr[1].dst.insindex;
+ iptr->dst.block = iptr[1].dst.block;
iptr[1].opc = ICMD_NOP;
OP2_BRANCH(TYPE_LNG, TYPE_LNG);
OP2_1(TYPE_LNG, TYPE_LNG, TYPE_INT);
break;
- /* XXX why is this deactivated? */
-#if 0
- case ICMD_FCMPL:
- COUNT(count_pcmd_op);
- if ((len == 0) || (iptr[1].sx.val.i != 0))
- goto normal_FCMPL;
-
- switch (iptr[1].opc) {
- case ICMD_IFEQ:
- iptr->opc = ICMD_IF_FCMPEQ;
- icmd_if_fcmpl_tail:
- iptr->dst.insindex = iptr[1].dst.insindex;
- iptr[1].opc = ICMD_NOP;
-
- OP2_BRANCH(TYPE_FLT, TYPE_FLT);
- BRANCH(tbptr);
-
- COUNT(count_pcmd_bra);
- break;
- case ICMD_IFNE:
- iptr->opc = ICMD_IF_FCMPNE;
- goto icmd_if_fcmpl_tail;
- case ICMD_IFLT:
- iptr->opc = ICMD_IF_FCMPL_LT;
- goto icmd_if_fcmpl_tail;
- case ICMD_IFGT:
- iptr->opc = ICMD_IF_FCMPL_GT;
- goto icmd_if_fcmpl_tail;
- case ICMD_IFLE:
- iptr->opc = ICMD_IF_FCMPL_LE;
- goto icmd_if_fcmpl_tail;
- case ICMD_IFGE:
- iptr->opc = ICMD_IF_FCMPL_GE;
- goto icmd_if_fcmpl_tail;
- default:
- goto normal_FCMPL;
- }
- break;
-
-normal_FCMPL:
- OPTT2_1(TYPE_FLT, TYPE_FLT, TYPE_INT);
- break;
-
- case ICMD_FCMPG:
- COUNT(count_pcmd_op);
- if ((len == 0) || (iptr[1].sx.val.i != 0))
- goto normal_FCMPG;
-
- switch (iptr[1].opc) {
- case ICMD_IFEQ:
- iptr->opc = ICMD_IF_FCMPEQ;
- icmd_if_fcmpg_tail:
- iptr->dst.insindex = iptr[1].dst.insindex;
- iptr[1].opc = ICMD_NOP;
-
- OP2_BRANCH(TYPE_FLT, TYPE_FLT);
- BRANCH(tbptr);
-
- COUNT(count_pcmd_bra);
- break;
- case ICMD_IFNE:
- iptr->opc = ICMD_IF_FCMPNE;
- goto icmd_if_fcmpg_tail;
- case ICMD_IFLT:
- iptr->opc = ICMD_IF_FCMPG_LT;
- goto icmd_if_fcmpg_tail;
- case ICMD_IFGT:
- iptr->opc = ICMD_IF_FCMPG_GT;
- goto icmd_if_fcmpg_tail;
- case ICMD_IFLE:
- iptr->opc = ICMD_IF_FCMPG_LE;
- goto icmd_if_fcmpg_tail;
- case ICMD_IFGE:
- iptr->opc = ICMD_IF_FCMPG_GE;
- goto icmd_if_fcmpg_tail;
- default:
- goto normal_FCMPG;
- }
- break;
-
-normal_FCMPG:
- OP2_1(TYPE_FLT, TYPE_FLT, TYPE_INT);
- break;
-
- case ICMD_DCMPL:
- COUNT(count_pcmd_op);
- if ((len == 0) || (iptr[1].sx.val.i != 0))
- goto normal_DCMPL;
-
- switch (iptr[1].opc) {
- case ICMD_IFEQ:
- iptr->opc = ICMD_IF_DCMPEQ;
- icmd_if_dcmpl_tail:
- iptr->dst.insindex = iptr[1].dst.insindex;
- iptr[1].opc = ICMD_NOP;
-
- OP2_BRANCH(TYPE_DBL, TYPE_DBL);
- BRANCH(tbptr);
-
- COUNT(count_pcmd_bra);
- break;
- case ICMD_IFNE:
- iptr->opc = ICMD_IF_DCMPNE;
- goto icmd_if_dcmpl_tail;
- case ICMD_IFLT:
- iptr->opc = ICMD_IF_DCMPL_LT;
- goto icmd_if_dcmpl_tail;
- case ICMD_IFGT:
- iptr->opc = ICMD_IF_DCMPL_GT;
- goto icmd_if_dcmpl_tail;
- case ICMD_IFLE:
- iptr->opc = ICMD_IF_DCMPL_LE;
- goto icmd_if_dcmpl_tail;
- case ICMD_IFGE:
- iptr->opc = ICMD_IF_DCMPL_GE;
- goto icmd_if_dcmpl_tail;
- default:
- goto normal_DCMPL;
- }
- break;
-
-normal_DCMPL:
- OPTT2_1(TYPE_DBL, TYPE_INT);
- break;
-
- case ICMD_DCMPG:
- COUNT(count_pcmd_op);
- if ((len == 0) || (iptr[1].sx.val.i != 0))
- goto normal_DCMPG;
-
- switch (iptr[1].opc) {
- case ICMD_IFEQ:
- iptr->opc = ICMD_IF_DCMPEQ;
- icmd_if_dcmpg_tail:
- iptr->dst.insindex = iptr[1].dst.insindex;
- iptr[1].opc = ICMD_NOP;
-
- OP2_BRANCH(TYPE_DBL, TYPE_DBL);
- BRANCH(tbptr);
-
- COUNT(count_pcmd_bra);
- break;
- case ICMD_IFNE:
- iptr->opc = ICMD_IF_DCMPNE;
- goto icmd_if_dcmpg_tail;
- case ICMD_IFLT:
- iptr->opc = ICMD_IF_DCMPG_LT;
- goto icmd_if_dcmpg_tail;
- case ICMD_IFGT:
- iptr->opc = ICMD_IF_DCMPG_GT;
- goto icmd_if_dcmpg_tail;
- case ICMD_IFLE:
- iptr->opc = ICMD_IF_DCMPG_LE;
- goto icmd_if_dcmpg_tail;
- case ICMD_IFGE:
- iptr->opc = ICMD_IF_DCMPG_GE;
- goto icmd_if_dcmpg_tail;
- default:
- goto normal_DCMPG;
- }
- break;
-
-normal_DCMPG:
- OP2_1(TYPE_DBL, TYPE_DBL, TYPE_INT);
- break;
-#else
case ICMD_FCMPL:
case ICMD_FCMPG:
COUNT(count_pcmd_op);
COUNT(count_pcmd_op);
OP2_1(TYPE_DBL, TYPE_DBL, TYPE_INT);
break;
-#endif
/* pop 1 push 1 */
case ICMD_JSR:
OP0_1(TYPE_RET);
- tbptr = BLOCK_OF(iptr->sx.s23.s3.jsrtarget.insindex);
+ tbptr = iptr->sx.s23.s3.jsrtarget.block;
tbptr->type = BBTYPE_SBR;
assert(sd.bptr->next); /* XXX exception */
#endif
tbptr = stack_mark_reached(&sd, tbptr, curstack, stackdepth);
- if (!tbptr)
+ if (tbptr == NULL)
return false;
iptr->sx.s23.s3.jsrtarget.block = tbptr;
assert(0); /* XXX is this assert ok? */
#else
sd.var[copy->varnum].vv.regoff =
- rd->argfltregs[md->params[i].regoff];
+ md->params[i].regoff;
#endif /* SUPPORT_PASS_FLOATARGS_IN_INTREGS */
}
else {
#if defined(SUPPORT_COMBINE_INTEGER_REGISTERS)
if (IS_2_WORD_TYPE(copy->type))
sd.var[copy->varnum].vv.regoff =
- PACK_REGS( rd->argintregs[GET_LOW_REG(md->params[i].regoff)],
- rd->argintregs[GET_HIGH_REG(md->params[i].regoff)]);
+ PACK_REGS(GET_LOW_REG(md->params[i].regoff),
+ GET_HIGH_REG(md->params[i].regoff));
else
#endif /* SUPPORT_COMBINE_INTEGER_REGISTERS */
sd.var[copy->varnum].vv.regoff =
- rd->argintregs[md->params[i].regoff];
+ md->params[i].regoff;
}
}
}
i = stackdepth - 1;
for (copy = curstack; copy; i--, copy = copy->prev) {
varinfo *v;
- s4 t;
/* with the new vars rd->interfaces will be removed */
/* and all in and outvars have to be STACKVARS! */
/* create an unresolvable conflict */
SET_TEMPVAR(copy);
- t = copy->type;
+ type = copy->type;
v = sd.var + copy->varnum;
v->flags |= INOUT;
/* do not allocate variables for returnAddresses */
- if (t != TYPE_RET) {
- if (jd->interface_map[i*5 + t].flags == UNUSED) {
+ if (type != TYPE_RET) {
+ if (jd->interface_map[i*5 + type].flags == UNUSED) {
/* no interface var until now for this depth and */
/* type */
- jd->interface_map[i*5 + t].flags = v->flags;
+ jd->interface_map[i*5 + type].flags = v->flags;
}
else {
- jd->interface_map[i*5 + t].flags |= v->flags;
+ jd->interface_map[i*5 + type].flags |= v->flags;
}
}
for (i=0; i<sd.bptr->indepth; ++i) {
varinfo *v = sd.var + sd.bptr->invars[i];
- s4 t;
- t = v->type;
+ type = v->type;
- if (t != TYPE_RET) {
- if (jd->interface_map[i*5 + t].flags == UNUSED) {
+ if (type != TYPE_RET) {
+ if (jd->interface_map[i*5 + type].flags == UNUSED) {
/* no interface var until now for this depth and */
/* type */
- jd->interface_map[i*5 + t].flags = v->flags;
+ jd->interface_map[i*5 + type].flags = v->flags;
}
else {
- jd->interface_map[i*5 + t].flags |= v->flags;
+ jd->interface_map[i*5 + type].flags |= v->flags;
}
}
}
void stack_javalocals_store(instruction *iptr, s4 *javalocals)
{
- s4 idx; /* index into the jd->var array */
- s4 j; /* java local index */
+ s4 varindex; /* index into the jd->var array */
+ s4 javaindex; /* java local index */
- idx = iptr->dst.varindex;
- j = iptr->sx.s23.s3.javaindex;
+ varindex = iptr->dst.varindex;
+ javaindex = iptr->sx.s23.s3.javaindex;
- if (j != UNUSED) {
+ if (javaindex != UNUSED) {
+ assert(javaindex >= 0);
if (iptr->flags.bits & INS_FLAG_RETADDR)
- javalocals[j] = iptr->sx.s23.s2.retaddrnr;
+ javalocals[javaindex] = iptr->sx.s23.s2.retaddrnr;
else
- javalocals[j] = idx;
+ javalocals[javaindex] = varindex;
if (iptr->flags.bits & INS_FLAG_KILL_PREV)
- javalocals[j-1] = UNUSED;
+ javalocals[javaindex-1] = UNUSED;
if (iptr->flags.bits & INS_FLAG_KILL_NEXT)
- javalocals[j+1] = UNUSED;
+ javalocals[javaindex+1] = UNUSED;
}
}
printf("\n");
}
-static void stack_verbose_show_state(stackdata_t *sd, instruction *iptr, stackptr curstack)
+static void stack_verbose_show_state(stackdata_t *sd, instruction *iptr, stackelement_t *curstack)
{
- stackptr sp;
+ stackelement_t *sp;
s4 i;
s4 depth;
varinfo *v;
- stackptr *stack;
+ stackelement_t **stack;
printf(" javalocals ");
show_javalocals_array(sd->jd, sd->javalocals, sd->maxlocals, SHOW_STACK);
i++;
depth = i;
- stack = MNEW(stackptr, depth);
+ stack = MNEW(stackelement_t *, depth);
for(sp = curstack; sp; sp = sp->prev)
stack[--i] = sp;