Christian Thalinger
Christian Ullrich
- $Id: stack.c 5517 2006-09-15 15:52:02Z edwin $
+ $Id: stack.c 5675 2006-10-04 19:38:28Z edwin $
*/
/* macro for saving #ifdefs ***************************************************/
-#if defined(ENABLE_INTRP)
-#define IF_INTRP(x) if (opt_intrp) { x }
-#define IF_NO_INTRP(x) if (!opt_intrp) { x }
-#else
-#define IF_INTRP(x)
-#define IF_NO_INTRP(x) { x }
-#endif
-
-#if defined(ENABLE_INTRP)
-#if defined(ENABLE_JIT)
-#define IF_JIT(x) if (!opt_intrp) { x }
-#else
-#define IF_JIT(x)
-#endif
-#else /* !defined(ENABLE_INTRP) */
-#define IF_JIT(x) { x }
-#endif /* defined(ENABLE_INTRP) */
-
#if defined(ENABLE_STATISTICS)
#define STATISTICS_STACKDEPTH_DISTRIBUTION(distr) \
do { \
/* macros for querying variable properties **************************/
-#define IS_OUTVAR(sp) \
- (sd.var[(sp)->varnum].flags & OUTVAR)
+#define IS_INOUT(sp) \
+ (sd.var[(sp)->varnum].flags & INOUT)
#define IS_PREALLOC(sp) \
(sd.var[(sp)->varnum].flags & PREALLOC)
#define IS_TEMPVAR(sp) \
( ((sp)->varnum >= sd.localcount) \
- && !(sd.var[(sp)->varnum].flags & (OUTVAR | PREALLOC)) )
+ && !(sd.var[(sp)->varnum].flags & (INOUT | PREALLOC)) )
#define IS_LOCALVAR_SD(sd, sp) \
if ((sp)->creator) \
(sp)->creator->dst.varindex = new_index; \
} \
- sd.var[(sp)->varnum].flags &= ~(OUTVAR | PREALLOC); \
+ sd.var[(sp)->varnum].flags &= ~(INOUT | PREALLOC); \
} while (0);
#define SET_PREALLOC(sp) \
(d)->creator = iptr; \
} while (0)
+#define MOVE_TO_TEMP(sp) \
+ do { \
+ GET_NEW_INDEX(sd, new_index); \
+ iptr->opc = ICMD_MOVE; \
+ iptr->s1.varindex = (sp)->varnum; \
+ iptr->dst.varindex = new_index; \
+ COPY_VAL_AND_TYPE(sd, (sp)->varnum, new_index); \
+ (sp)->varnum = new_index; \
+ (sp)->varkind = TEMPVAR; \
+ } while (0)
/* macros for branching / reaching basic blocks *********************/
b->next = sd->last_real_block->next;
sd->last_real_block->next = b;
sd->last_real_block = b;
- sd->jd->new_basicblockcount++;
+ sd->jd->basicblockcount++;
}
clone->original = (b->original) ? b->original : b;
clone->copied_to = clone->original->copied_to;
clone->original->copied_to = clone;
- clone->nr = sd->m->c_debug_nr++;
+ clone->nr = sd->m->c_block_nr++;
clone->next = NULL;
clone->flags = BBREACHED;
b->invars[i] = --index;
v = sd->var + index;
v->type = sp->type;
- v->flags = OUTVAR;
+ v->flags = INOUT;
v->vv = sd->var[sp->varnum].vv;
#if defined(STACK_VERBOSE) && 0
printf("\tinvar[%d]: %d\n", i, sd->var[b->invars[i]]);
sv = sd->var + sd->bptr->outvars[i];
b->invars[i] = sd->vartop++;
dv->type = sv->type;
- dv->flags = OUTVAR;
+ dv->flags = INOUT;
dv->vv = sv->vv;
}
}
ex = DNEW(exceptiontable);
ex->handler = sd->handlers[i]->handler;
ex->start = b;
- ex->end = b; /* XXX hack, see end of new_stack_analyse */
+ ex->end = b; /* XXX hack, see end of stack_analyse */
ex->catchtype = sd->handlers[i]->catchtype;
ex->down = NULL;
for (len = b->icount; len--; iptr++) {
#if defined(STACK_VERBOSE)
- new_show_icmd(sd->jd, iptr, false, SHOW_STACK);
+ show_icmd(sd->jd, iptr, false, SHOW_STACK);
printf("\n");
#endif
} /* switch */
#if defined(STACK_VERBOSE)
- new_show_icmd(sd->jd, iptr, false, SHOW_STACK);
+ show_icmd(sd->jd, iptr, false, SHOW_STACK);
printf("\n");
#endif
}
*******************************************************************************/
-bool new_stack_analyse(jitdata *jd)
+bool stack_analyse(jitdata *jd)
{
methodinfo *m; /* method being analyzed */
codeinfo *code;
int new_index; /* used to get a new var index with GET_NEW_INDEX*/
#if defined(STACK_VERBOSE)
- new_show_method(jd, SHOW_PARSE);
+ show_method(jd, SHOW_PARSE);
#endif
/* get required compiler data - initialization */
/* find the last real basic block */
sd.last_real_block = NULL;
- tbptr = jd->new_basicblocks;
+ tbptr = jd->basicblocks;
while (tbptr->next) {
sd.last_real_block = tbptr;
tbptr = tbptr->next;
/* initialize flags and invars (none) of first block */
- jd->new_basicblocks[0].flags = BBREACHED;
- jd->new_basicblocks[0].invars = NULL;
- jd->new_basicblocks[0].indepth = 0;
- jd->new_basicblocks[0].inlocals =
+ jd->basicblocks[0].flags = BBREACHED;
+ jd->basicblocks[0].invars = NULL;
+ jd->basicblocks[0].indepth = 0;
+ jd->basicblocks[0].inlocals =
DMNEW(varinfo, jd->localcount + VERIFIER_EXTRA_LOCALS);
- MCOPY(jd->new_basicblocks[0].inlocals, jd->var, varinfo,
+ MCOPY(jd->basicblocks[0].inlocals, jd->var, varinfo,
jd->localcount + VERIFIER_EXTRA_LOCALS);
/* stack analysis loop (until fixpoint reached) **************************/
/* initialize loop over basic blocks */
- sd.bptr = jd->new_basicblocks;
+ sd.bptr = jd->basicblocks;
superblockend = true;
sd.repeat = false;
curstack = NULL; stackdepth = 0;
/* reset the new pointer for allocating stackslots */
- sd.new = jd->new_stack;
+ sd.new = jd->stack;
/* create the instack of this block */
superblockend = false;
len = sd.bptr->icount;
iptr = sd.bptr->iinstr;
- b_index = sd.bptr - jd->new_basicblocks;
+ b_index = sd.bptr - jd->basicblocks;
/* mark the block as analysed */
while (--len >= 0) {
#if defined(STACK_VERBOSE)
- new_show_icmd(jd, iptr, false, SHOW_PARSE); printf("\n");
+ show_icmd(jd, iptr, false, SHOW_PARSE); printf("\n");
for( copy = curstack; copy; copy = copy->prev ) {
printf("%2d(%d", copy->varnum, copy->type);
- if (IS_OUTVAR(copy))
+ if (IS_INOUT(copy))
printf("S");
if (IS_PREALLOC(copy))
printf("A");
/* automatically replace some ICMDs with builtins */
#if defined(USEBUILTINTABLE)
- IF_NO_INTRP(
- bte = builtintable_get_automatic(opcode);
-
- if (bte && bte->opcode == opcode) {
- iptr->opc = ICMD_BUILTIN;
- iptr->flags.bits = 0;
- iptr->sx.s23.s3.bte = bte;
- /* iptr->line is already set */
- jd->isleafmethod = false;
- goto icmd_BUILTIN;
- }
- );
+ bte = builtintable_get_automatic(opcode);
+
+ if (bte && bte->opcode == opcode) {
+ iptr->opc = ICMD_BUILTIN;
+ iptr->flags.bits = 0;
+ iptr->sx.s23.s3.bte = bte;
+ /* iptr->line is already set */
+ jd->isleafmethod = false;
+ goto icmd_BUILTIN;
+ }
#endif /* defined(USEBUILTINTABLE) */
/* main opcode switch *************************************/
CLR_SX;
iptr->dst.block = stack_mark_reached(&sd, sd.var[j].vv.retaddr, curstack, stackdepth);
-#if 0
- IF_NO_INTRP( rd->locals[iptr->s1.localindex/*XXX invalid here*/][TYPE_ADR].type = TYPE_ADR; );
-#endif
superblockend = true;
break;
case ICMD_BASTORE:
case ICMD_CASTORE:
case ICMD_SASTORE:
- IF_INTRP( goto normal_ICONST; )
# if SUPPORT_CONST_STORE_ZERO_ONLY
if (iptr->sx.val.i != 0)
goto normal_ICONST;
case ICMD_PUTSTATIC:
case ICMD_PUTFIELD:
- IF_INTRP( goto normal_ICONST; )
# if SUPPORT_CONST_STORE_ZERO_ONLY
if (iptr->sx.val.i != 0)
goto normal_ICONST;
#if SUPPORT_CONST_STORE
case ICMD_LASTORE:
- IF_INTRP( goto normal_LCONST; )
# if SUPPORT_CONST_STORE_ZERO_ONLY
if (iptr->sx.val.l != 0)
goto normal_LCONST;
case ICMD_PUTSTATIC:
case ICMD_PUTFIELD:
- IF_INTRP( goto normal_LCONST; )
# if SUPPORT_CONST_STORE_ZERO_ONLY
if (iptr->sx.val.l != 0)
goto normal_LCONST;
coalescing_boundary = sd.new;
COUNT(count_pcmd_load);
#if SUPPORT_CONST_STORE
- IF_INTRP( goto normal_ACONST; )
-
/* We can only optimize if the ACONST is resolved
* and there is an instruction after it. */
/* if the variable is already coalesced, don't bother */
- /* We do not need to check against OUTVAR, as invars */
+ /* We do not need to check against INOUT, as invars */
/* are always before the coalescing boundary. */
if (curstack->varkind == LOCALVAR)
assert((curstack->varkind == TEMPVAR)
|| (curstack->varkind == UNDEFVAR));
assert(!IS_LOCALVAR(curstack)); /* XXX correct? */
- assert(!IS_OUTVAR(curstack));
+ assert(!IS_INOUT(curstack));
assert(!IS_PREALLOC(curstack));
assert(curstack->creator);
case ICMD_DRETURN:
case ICMD_ARETURN:
coalescing_boundary = sd.new;
- /* Assert here that no LOCAL or OUTVARS get */
+ /* Assert here that no LOCAL or INOUTS get */
/* preallocated, since tha macros are not */
/* available in md-abi.c! */
- IF_JIT( if (IS_TEMPVAR(curstack)) \
- md_return_alloc(jd, curstack); )
+ if (IS_TEMPVAR(curstack))
+ md_return_alloc(jd, curstack);
COUNT(count_pcmd_return);
OP1_0(opcode - ICMD_IRETURN);
superblockend = true;
POPANY; POPANY;
stackdepth -= 2;
+ /* move non-temporary sources out of the way */
+ if (!IS_TEMPVAR(src2)) {
+ MOVE_TO_TEMP(src2); iptr++; len--;
+ }
+
DUP_SLOT(src2); dst1 = curstack; stackdepth++;
MOVE_UP(src1); iptr++; len--;
POPANY; POPANY; POPANY;
stackdepth -= 3;
+ /* move non-temporary sources out of the way */
+ if (!IS_TEMPVAR(src2)) {
+ MOVE_TO_TEMP(src2); iptr++; len--;
+ }
+ if (!IS_TEMPVAR(src3)) {
+ MOVE_TO_TEMP(src3); iptr++; len--;
+ }
+
DUP_SLOT(src2); dst1 = curstack; stackdepth++;
DUP_SLOT(src3); dst2 = curstack; stackdepth++;
POPANY; POPANY; POPANY;
stackdepth -= 3;
+ /* move non-temporary sources out of the way */
+ if (!IS_TEMPVAR(src2)) {
+ MOVE_TO_TEMP(src2); iptr++; len--;
+ }
+ if (!IS_TEMPVAR(src3)) {
+ MOVE_TO_TEMP(src3); iptr++; len--;
+ }
+
DUP_SLOT(src3); dst1 = curstack; stackdepth++;
MOVE_UP(src1); iptr++; len--;
POPANY; POPANY; POPANY; POPANY;
stackdepth -= 4;
+ /* move non-temporary sources out of the way */
+ if (!IS_TEMPVAR(src2)) {
+ MOVE_TO_TEMP(src2); iptr++; len--;
+ }
+ if (!IS_TEMPVAR(src3)) {
+ MOVE_TO_TEMP(src3); iptr++; len--;
+ }
+ if (!IS_TEMPVAR(src4)) {
+ MOVE_TO_TEMP(src4); iptr++; len--;
+ }
+
DUP_SLOT(src3); dst1 = curstack; stackdepth++;
DUP_SLOT(src4); dst2 = curstack; stackdepth++;
POPANY; POPANY;
stackdepth -= 2;
+ /* move non-temporary sources out of the way */
if (!IS_TEMPVAR(src1)) {
- /* move src1 out of the way into a temporary */
- GET_NEW_INDEX(sd, new_index);
- iptr->opc = ICMD_MOVE;
- iptr->s1.varindex = src1->varnum;
- iptr->dst.varindex = new_index;
- COPY_VAL_AND_TYPE(sd, src1->varnum, new_index);
- iptr++; len--;
- src1->varnum = new_index;
+ MOVE_TO_TEMP(src1); iptr++; len--;
}
MOVE_UP(src2); iptr++; len--;
/* do not change STACKVARs or LOCALVARS to ARGVAR*/
/* -> won't help anyway */
- if (!(IS_OUTVAR(copy) || IS_LOCALVAR(copy))) {
+ if (!(IS_INOUT(copy) || IS_LOCALVAR(copy))) {
#if defined(SUPPORT_PASS_FLOATARGS_IN_INTREGS)
/* If we pass float arguments in integer argument registers, we
SET_PREALLOC(copy);
-#if defined(ENABLE_INTRP)
- if (!opt_intrp) {
-#endif
- if (md->params[i].inmemory) {
- sd.var[copy->varnum].vv.regoff =
- md->params[i].regoff;
- sd.var[copy->varnum].flags |=
- INMEMORY;
- }
- else {
- if (IS_FLT_DBL_TYPE(copy->type)) {
+ if (md->params[i].inmemory) {
+ sd.var[copy->varnum].vv.regoff =
+ md->params[i].regoff;
+ sd.var[copy->varnum].flags |=
+ INMEMORY;
+ }
+ else {
+ if (IS_FLT_DBL_TYPE(copy->type)) {
#if defined(SUPPORT_PASS_FLOATARGS_IN_INTREGS)
- assert(0); /* XXX is this assert ok? */
+ assert(0); /* XXX is this assert ok? */
#else
- sd.var[copy->varnum].vv.regoff =
- rd->argfltregs[md->params[i].regoff];
+ sd.var[copy->varnum].vv.regoff =
+ rd->argfltregs[md->params[i].regoff];
#endif /* SUPPORT_PASS_FLOATARGS_IN_INTREGS */
- }
- else {
+ }
+ else {
#if defined(SUPPORT_COMBINE_INTEGER_REGISTERS)
- if (IS_2_WORD_TYPE(copy->type))
- sd.var[copy->varnum].vv.regoff =
- PACK_REGS( rd->argintregs[GET_LOW_REG(md->params[i].regoff)],
- rd->argintregs[GET_HIGH_REG(md->params[i].regoff)]);
+ if (IS_2_WORD_TYPE(copy->type))
+ sd.var[copy->varnum].vv.regoff =
+ PACK_REGS( rd->argintregs[GET_LOW_REG(md->params[i].regoff)],
+ rd->argintregs[GET_HIGH_REG(md->params[i].regoff)]);
- else
+ else
#endif /* SUPPORT_COMBINE_INTEGER_REGISTERS */
- sd.var[copy->varnum].vv.regoff =
- rd->argintregs[md->params[i].regoff];
- }
+ sd.var[copy->varnum].vv.regoff =
+ rd->argintregs[md->params[i].regoff];
}
-#if defined(ENABLE_INTRP)
- } /* end if (!opt_intrp) */
-#endif
+ }
}
}
copy = copy->prev;
/* check INT type here? Currently typecheck does this. */
iptr->sx.s23.s2.args[i] = copy->varnum;
if (!(sd.var[copy->varnum].flags & SAVEDVAR)
- && (!IS_OUTVAR(copy))
+ && (!IS_INOUT(copy))
&& (!IS_LOCALVAR(copy)) ) {
copy->varkind = ARGVAR;
sd.var[copy->varnum].flags |=
}
while (copy) {
sd.var[copy->varnum].flags |= SAVEDVAR;
+ copy->flags |= SAVEDVAR;
copy = copy->prev;
}
t = TYPE_ADR;
v = sd.var + copy->varnum;
- v->flags |= OUTVAR;
+ v->flags |= INOUT;
if (jd->interface_map[i*5 + t].flags == UNUSED) {
/* no interface var until now for this depth and */
}
/* check if interface slots at basic block begin must be saved */
- IF_NO_INTRP(
- for (i=0; i<sd.bptr->indepth; ++i) {
- varinfo *v = sd.var + sd.bptr->invars[i];
- s4 t;
-
- t = v->type;
- if (t == TYPE_RET)
- t = TYPE_ADR;
-
- if (jd->interface_map[i*5 + t].flags == UNUSED) {
- /* no interface var until now for this depth and */
- /* type */
- jd->interface_map[i*5 + t].flags = v->flags;
- }
- else {
- jd->interface_map[i*5 + t].flags |= v->flags;
- }
+
+ for (i=0; i<sd.bptr->indepth; ++i) {
+ varinfo *v = sd.var + sd.bptr->invars[i];
+ s4 t;
+
+ t = v->type;
+ if (t == TYPE_RET)
+ t = TYPE_ADR;
+
+ if (jd->interface_map[i*5 + t].flags == UNUSED) {
+ /* no interface var until now for this depth and */
+ /* type */
+ jd->interface_map[i*5 + t].flags = v->flags;
+ }
+ else {
+ jd->interface_map[i*5 + t].flags |= v->flags;
}
- );
+ }
/* store the number of this block's variables */
#if defined(ENABLE_STATISTICS)
if (opt_stat) {
- if (jd->new_basicblockcount > count_max_basic_blocks)
- count_max_basic_blocks = jd->new_basicblockcount;
- count_basic_blocks += jd->new_basicblockcount;
- if (jd->new_instructioncount > count_max_javainstr)
- count_max_javainstr = jd->new_instructioncount;
- count_javainstr += jd->new_instructioncount;
- if (jd->new_stackcount > count_upper_bound_new_stack)
- count_upper_bound_new_stack = jd->new_stackcount;
- if ((sd.new - jd->new_stack) > count_max_new_stack)
- count_max_new_stack = (sd.new - jd->new_stack);
-
- sd.bptr = jd->new_basicblocks;
+ if (jd->basicblockcount > count_max_basic_blocks)
+ count_max_basic_blocks = jd->basicblockcount;
+ count_basic_blocks += jd->basicblockcount;
+ if (jd->instructioncount > count_max_javainstr)
+ count_max_javainstr = jd->instructioncount;
+ count_javainstr += jd->instructioncount;
+ if (jd->stackcount > count_upper_bound_new_stack)
+ count_upper_bound_new_stack = jd->stackcount;
+ if ((sd.new - jd->stack) > count_max_new_stack)
+ count_max_new_stack = (sd.new - jd->stack);
+
+ sd.bptr = jd->basicblocks;
for (; sd.bptr; sd.bptr = sd.bptr->next) {
if (sd.bptr->flags > BBREACHED) {
if (sd.bptr->indepth >= 10)
else
count_analyse_iterations[4]++;
- if (jd->new_basicblockcount <= 5)
+ if (jd->basicblockcount <= 5)
count_method_bb_distribution[0]++;
- else if (jd->new_basicblockcount <= 10)
+ else if (jd->basicblockcount <= 10)
count_method_bb_distribution[1]++;
- else if (jd->new_basicblockcount <= 15)
+ else if (jd->basicblockcount <= 15)
count_method_bb_distribution[2]++;
- else if (jd->new_basicblockcount <= 20)
+ else if (jd->basicblockcount <= 20)
count_method_bb_distribution[3]++;
- else if (jd->new_basicblockcount <= 30)
+ else if (jd->basicblockcount <= 30)
count_method_bb_distribution[4]++;
- else if (jd->new_basicblockcount <= 40)
+ else if (jd->basicblockcount <= 40)
count_method_bb_distribution[5]++;
- else if (jd->new_basicblockcount <= 50)
+ else if (jd->basicblockcount <= 50)
count_method_bb_distribution[6]++;
- else if (jd->new_basicblockcount <= 75)
+ else if (jd->basicblockcount <= 75)
count_method_bb_distribution[7]++;
else
count_method_bb_distribution[8]++;
exceptions_throw_verifyerror(m, "Stack size too large");
return false;
-throw_stack_depth_error:
- exceptions_throw_verifyerror(m,"Stack depth mismatch");
- return false;
-
throw_stack_type_error:
exceptions_throw_verifyerror_for_stack(m, expectedtype);
return false;