/* src/vm/jit/stack.c - stack analysis
- Copyright (C) 1996-2005, 2006 R. Grafl, A. Krall, C. Kruegel,
- C. Oates, R. Obermaisser, M. Platter, M. Probst, S. Ring,
- E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich,
- J. Wenninger, Institut f. Computersprachen - TU Wien
+ Copyright (C) 1996-2005, 2006, 2007, 2008
+ CACAOVM - Verein zur Foerderung der freien virtuellen Maschine CACAO
This file is part of CACAO.
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
- Contact: cacao@cacaojvm.org
-
- Authors: Andreas Krall
-
- Changes: Edwin Steiner
- Christian Thalinger
- Christian Ullrich
-
- $Id: stack.c 5463 2006-09-11 14:37:06Z edwin $
-
*/
#include <assert.h>
#include <stdio.h>
#include <string.h>
-
-#include "vm/types.h"
+#include <limits.h>
#include "arch.h"
#include "md-abi.h"
#include "mm/memory.h"
+
#include "native/native.h"
+
#include "toolbox/logging.h"
+
#include "vm/global.h"
#include "vm/builtin.h"
-#include "vm/options.h"
-#include "vm/resolve.h"
-#include "vm/statistics.h"
#include "vm/stringlocal.h"
+#include "vm/types.h"
+
+#include "vm/jit/abi.h"
#include "vm/jit/cfg.h"
#include "vm/jit/codegen-common.h"
-#include "vm/jit/abi.h"
+#include "vm/jit/parse.h"
#include "vm/jit/show.h"
#if defined(ENABLE_DISASSEMBLER)
#include "vm/jit/jit.h"
#include "vm/jit/stack.h"
-#if defined(ENABLE_LSRA)
+#if 0
+#if defined(ENABLE_SSA)
+# include "vm/jit/optimizing/lsra.h"
+# include "vm/jit/optimizing/ssa.h"
+#elif defined(ENABLE_LSRA)
# include "vm/jit/allocator/lsra.h"
#endif
+#endif
-/*#define STACK_VERBOSE*/
+#include "vmcore/options.h"
+#include "vm/resolve.h"
+#if defined(ENABLE_STATISTICS)
+# include "vmcore/statistics.h"
+#endif
-/* macro for saving #ifdefs ***************************************************/
+/*#define STACK_VERBOSE*/
-#if defined(ENABLE_INTRP)
-#define IF_INTRP(x) if (opt_intrp) { x }
-#define IF_NO_INTRP(x) if (!opt_intrp) { x }
-#else
-#define IF_INTRP(x)
-#define IF_NO_INTRP(x) { x }
-#endif
-#if defined(ENABLE_INTRP)
-#if defined(ENABLE_JIT)
-#define IF_JIT(x) if (!opt_intrp) { x }
-#else
-#define IF_JIT(x)
-#endif
-#else /* !defined(ENABLE_INTRP) */
-#define IF_JIT(x) { x }
-#endif /* defined(ENABLE_INTRP) */
+/* macro for saving #ifdefs ***************************************************/
#if defined(ENABLE_STATISTICS)
#define STATISTICS_STACKDEPTH_DISTRIBUTION(distr) \
#define STATISTICS_STACKDEPTH_DISTRIBUTION(distr)
#endif
-/* stackdata_t ****************************************************************/
+
+#define MIN(a,b) (((a) < (b)) ? (a) : (b))
+
+
+/* For returnAddresses we use a field of the typeinfo to store from which */
+/* subroutine the returnAddress will return, if used. */
+/* XXX It would be nicer to use typeinfo.typeclass, but the verifier seems */
+/* to need it initialised to NULL. This should be investigated. */
+
+#if defined(ENABLE_VERIFIER)
+#define SBRSTART typeinfo.elementclass.any
+#endif
+
+
+/* stackdata_t *****************************************************************
+
+ This struct holds internal data during stack analysis.
+
+*******************************************************************************/
typedef struct stackdata_t stackdata_t;
struct stackdata_t {
- basicblock *bptr;
- stackptr new;
- s4 vartop;
- s4 localcount;
- s4 varcount;
- varinfo *var;
- methodinfo *m;
+ basicblock *bptr; /* the current basic block being analysed */
+ stackelement_t *new; /* next free stackelement */
+ s4 vartop; /* next free variable index */
+ s4 localcount; /* number of locals (at the start of var) */
+ s4 varcount; /* maximum number of variables expected */
+ s4 varsallocated; /* total number of variables allocated */
+ s4 maxlocals; /* max. number of Java locals */
+ varinfo *var; /* variable array (same as jd->var) */
+ s4 *javalocals; /* map from Java locals to jd->var indices */
+ methodinfo *m; /* the method being analysed */
+ jitdata *jd; /* current jitdata */
+ basicblock *last_real_block; /* the last block before the empty one */
+ bool repeat; /* if true, iterate the analysis again */
+ exception_entry **handlers; /* exception handlers for the current block */
+ exception_entry *extableend; /* points to the last exception entry */
+ stackelement_t exstack; /* instack for exception handlers */
};
-/* macros for allocating/releasing variable indices */
+/* macros for allocating/releasing variable indices *****************/
#define GET_NEW_INDEX(sd, new_varindex) \
do { \
(new_varindex) = ((sd).vartop)++; \
} while (0)
-/* not implemented now, can be used to reuse varindices */
-/* pay attention to not release a localvar once implementing it! */
+/* Not implemented now - could be used to reuse varindices. */
+/* Pay attention to not release a localvar once implementing it! */
#define RELEASE_INDEX(sd, varindex)
-#define GET_NEW_VAR(sd, new_varindex, newtype) \
+#define GET_NEW_VAR(sd, newvarindex, newtype) \
do { \
- GET_NEW_INDEX((sd), (new_varindex)); \
- (sd).var[new_index].type = (newtype); \
+ GET_NEW_INDEX((sd), (newvarindex)); \
+ (sd).var[newvarindex].type = (newtype); \
} while (0)
+
/* macros for querying variable properties **************************/
-#define IS_OUTVAR(sp) \
- (sd.var[(sp)->varnum].flags & OUTVAR)
+#define IS_INOUT(sp) \
+ (sd.var[(sp)->varnum].flags & INOUT)
#define IS_PREALLOC(sp) \
(sd.var[(sp)->varnum].flags & PREALLOC)
-#define IS_TEMPVAR(sp) \
- ( ((sp)->varnum >= sd.localcount) \
- && !(sd.var[(sp)->varnum].flags & (OUTVAR | PREALLOC)) )
+#define IS_TEMPVAR(sp) \
+ ( ((sp)->varnum >= sd.localcount) \
+ && !(sd.var[(sp)->varnum].flags & (INOUT | PREALLOC)) )
+
#define IS_LOCALVAR_SD(sd, sp) \
((sp)->varnum < (sd).localcount)
#define SET_TEMPVAR(sp) \
do { \
if (IS_LOCALVAR((sp))) { \
- GET_NEW_VAR(sd, new_index, (sp)->type); \
- sd.var[new_index].flags = (sp)->flags; \
- (sp)->varnum = new_index; \
- (sp)->varkind = TEMPVAR; \
- if ((sp)->creator) \
- (sp)->creator->dst.varindex = new_index; \
+ stack_change_to_tempvar(&sd, (sp), iptr); \
} \
- sd.var[(sp)->varnum].flags &= ~(OUTVAR | PREALLOC); \
+ sd.var[(sp)->varnum].flags &= ~(INOUT | PREALLOC); \
} while (0);
#define SET_PREALLOC(sp) \
sd.var[(sp)->varnum].flags |= PREALLOC; \
} while (0);
+
/* macros for source operands ***************************************/
#define CLR_S1 \
(iptr->s1.varindex = -1)
-#define USE_S1_LOCAL(type1)
-
#define USE_S1(type1) \
do { \
REQUIRE(1); \
} while (0)
+/* macro for propagating constant values ****************************/
+
+#if defined(ENABLE_VERIFIER)
+#define COPY_VAL_AND_TYPE_VAR(sv, dv) \
+ do { \
+ if (((dv)->type = (sv)->type) == TYPE_RET) { \
+ (dv)->vv = (sv)->vv; \
+ (dv)->SBRSTART = (sv)->SBRSTART; \
+ } \
+ } while (0)
+#else
+#define COPY_VAL_AND_TYPE_VAR(sv, dv) \
+ do { \
+ (dv)->type = (sv)->type; \
+ if (((dv)->type = (sv)->type) == TYPE_RET) { \
+ (dv)->vv = (sv)->vv; \
+ } \
+ } while (0)
+#endif
+
+#define COPY_VAL_AND_TYPE(sd, sindex, dindex) \
+ COPY_VAL_AND_TYPE_VAR((sd).var + (sindex), (sd).var + (dindex))
+
+
/* stack modelling macros *******************************************/
#define OP0_1(typed) \
do { \
CLR_S1; \
GET_NEW_VAR(sd, new_index, (typed)); \
- DST(typed, new_index); \
+ DST((typed), new_index); \
stackdepth++; \
} while (0)
/* macros for DUP elimination ***************************************/
+/* XXX replace NEW_VAR with NEW_INDEX */
#define DUP_SLOT(sp) \
do { \
- GET_NEW_VAR(sd, new_index, (sp)->type); \
- NEWSTACK((sp)->type, TEMPVAR, new_index); \
+ GET_NEW_VAR(sd, new_index, (sp)->type); \
+ COPY_VAL_AND_TYPE(sd, (sp)->varnum, new_index); \
+ NEWSTACK((sp)->type, TEMPVAR, new_index); \
} while(0)
/* does not check input stackdepth */
(d)->creator = iptr; \
} while (0)
+#define MOVE_TO_TEMP(sp) \
+ do { \
+ GET_NEW_INDEX(sd, new_index); \
+ iptr->opc = ICMD_MOVE; \
+ iptr->s1.varindex = (sp)->varnum; \
+ iptr->dst.varindex = new_index; \
+ COPY_VAL_AND_TYPE(sd, (sp)->varnum, new_index); \
+ (sp)->varnum = new_index; \
+ (sp)->varkind = TEMPVAR; \
+ } while (0)
/* macros for branching / reaching basic blocks *********************/
-#if defined(ENABLE_VERIFIER)
-#define MARKREACHED(b, c) \
+#define BRANCH_TARGET(bt, tempbptr) \
do { \
- if (!stack_mark_reached(&sd, (b), curstack, stackdepth)) \
+ tempbptr = (bt).block; \
+ tempbptr = stack_mark_reached(&sd, tempbptr, curstack, \
+ stackdepth); \
+ if (tempbptr == NULL) \
return false; \
+ (bt).block = tempbptr; \
} while (0)
-#else
-#define MARKREACHED(b, c) \
- do { \
- (void) stack_mark_reached(&sd, (b), curstack, stackdepth); \
- } while (0)
-#endif
-#define BRANCH_TARGET(bt, tempbptr, tempsp) \
- do { \
- (bt).block = tempbptr = BLOCK_OF((bt).insindex); \
- MARKREACHED(tempbptr, tempsp); \
- } while (0)
+#define BRANCH(tempbptr) \
+ BRANCH_TARGET(iptr->dst, tempbptr)
-#define BRANCH(tempbptr, tempsp) \
- do { \
- iptr->dst.block = tempbptr = BLOCK_OF(iptr->dst.insindex); \
- MARKREACHED(tempbptr, tempsp); \
- } while (0)
+
+/* forward declarations *******************************************************/
+
+static void stack_create_invars(stackdata_t *sd, basicblock *b,
+ stackelement_t * curstack, int stackdepth);
+static void stack_create_invars_from_outvars(stackdata_t *sd, basicblock *b);
+
+#if defined(STACK_VERBOSE)
+static void stack_verbose_show_varinfo(stackdata_t *sd, varinfo *v);
+static void stack_verbose_show_variable(stackdata_t *sd, s4 index);
+static void stack_verbose_show_block(stackdata_t *sd, basicblock *bptr);
+static void stack_verbose_block_enter(stackdata_t *sd, bool reanalyse);
+static void stack_verbose_block_exit(stackdata_t *sd, bool superblockend);
+static void stack_verbose_show_state(stackdata_t *sd, instruction *iptr,
+ stackelement_t * curstack);
+#endif
/* stack_init ******************************************************************
}
-/* stack_create_invars *********************************************************
+/* stack_grow_variable_array ***************************************************
- Create the invars for the given basic block.
+ Grow the variable array so the given number of additional variables fits in.
+ The number is added to `varcount`, which is the maximum number of variables
+ we expect to need at this point. The actual number of variables
+ (`varsallocated`) may be larger than that, in order to avoid too many
+ reallocations.
IN:
sd...........stack analysis data
- b............block to create the invars for
- curstack.....current stack top
- stackdepth...current stack depth
-
- This function creates STACKDEPTH invars and sets their types to the
- types to the types of the corresponding slot in the current stack.
+ num..........number of additional variables
*******************************************************************************/
-static void stack_create_invars(stackdata_t *sd, basicblock *b,
- stackptr curstack, int stackdepth)
+static void stack_grow_variable_array(stackdata_t *sd, s4 num)
{
- stackptr sp;
- int i;
- int index;
- varinfo *v;
-
- assert(sd->vartop + stackdepth <= sd->varcount);
+ s4 newsize;
- b->indepth = stackdepth;
- b->invars = DMNEW(s4, stackdepth);
+ assert(num >= 0);
- /* allocate the variable indices */
- index = (sd->vartop += stackdepth);
+ if (sd->varcount + num > sd->varsallocated) {
+ newsize = 2*sd->varsallocated + num;
- i = stackdepth;
- for (sp = curstack; i--; sp = sp->prev) {
- b->invars[i] = --index;
- v = sd->var + index;
- v->type = sp->type;
- v->flags = OUTVAR;
+ sd->var = DMREALLOC(sd->var, varinfo, sd->varsallocated, newsize);
+ MZERO(sd->var + sd->varsallocated, varinfo, (newsize - sd->varsallocated));
+ sd->varsallocated = newsize;
+ sd->jd->var = sd->var;
}
+
+ sd->varcount += num;
+ sd->jd->varcount += num;
+
+ assert(sd->varcount <= sd->varsallocated);
}
-/* stack_check_invars **********************************************************
+/* stack_append_block **********************************************************
- Check the current stack against the invars of the given basic block.
- Depth and types must match.
+ Append the given block after the last real block of the method (before
+ the pseudo-block at the end).
IN:
sd...........stack analysis data
- b............block which invars to check against
- curstack.....current stack top
- stackdepth...current stack depth
-
- RETURN VALUE:
- true.........everything ok
- false........a VerifyError has been thrown
+ b............the block to append
*******************************************************************************/
-/* XXX only if ENABLE_VERIFIER */
-static bool stack_check_invars(stackdata_t *sd, basicblock *b,
- stackptr curstack, int stackdepth)
+static void stack_append_block(stackdata_t *sd, basicblock *b)
{
- int depth;
-
- depth = b->indepth;
-
- if (depth != stackdepth) {
- exceptions_throw_verifyerror(sd->m, "Stack depth mismatch");
- return false;
- }
-
- while (depth--) {
- if (sd->var[b->invars[depth]].type != curstack->type) {
- exceptions_throw_verifyerror_for_stack(sd->m,
- sd->var[b->invars[depth]].type);
- return false;
- }
- curstack = curstack->prev;
- }
+#if defined(STACK_VERBOSE)
+ printf("APPENDING BLOCK L%0d\n", b->nr);
+#endif
- return true;
+ b->next = sd->last_real_block->next;
+ sd->last_real_block->next = b;
+ sd->last_real_block = b;
+ b->nr = sd->jd->basicblockcount++;
+ b->next->nr = b->nr + 1;
}
-/* stack_create_instack ********************************************************
+/* stack_clone_block ***********************************************************
- Create the instack of the current basic block.
+ Create a copy of the given block and insert it at the end of the method.
+
+ CAUTION: This function does not copy the any variables or the instruction
+ list. It _does_, however, reserve space for the block's invars in the
+ variable array.
IN:
sd...........stack analysis data
+ b............the block to clone
RETURN VALUE:
- the current stack top at the start of the basic block.
+ a pointer to the copy
*******************************************************************************/
-static stackptr stack_create_instack(stackdata_t *sd)
+static basicblock * stack_clone_block(stackdata_t *sd, basicblock *b)
{
- stackptr sp;
- int depth;
- int index;
-
- if ((depth = sd->bptr->indepth) == 0)
- return NULL;
+ basicblock *clone;
- sp = (sd->new += depth);
-
- while (depth--) {
- sp--;
- index = sd->bptr->invars[depth];
- sp->varnum = index;
- sp->type = sd->var[index].type;
- sp->prev = sp - 1;
- sp->creator = NULL;
- sp->flags = 0;
- sp->varkind = STACKVAR;
- }
- sp->prev = NULL;
+ clone = DNEW(basicblock);
+ *clone = *b;
- /* return the top of the created stack */
- return sd->new - 1;
-}
+ clone->iinstr = NULL;
+ clone->inlocals = NULL;
+ clone->javalocals = NULL;
+ clone->invars = NULL;
+ clone->original = (b->original) ? b->original : b;
+ clone->copied_to = clone->original->copied_to;
+ clone->original->copied_to = clone;
+ clone->next = NULL;
+ clone->flags = BBREACHED;
-/* MARKREACHED marks the destination block <b> as reached. If this
- * block has been reached before we check if stack depth and types
- * match. Otherwise the destination block receives a copy of the
- * current stack as its input stack.
- *
- * b...destination block
- * c...current stack
- */
+ stack_append_block(sd, clone);
-static bool stack_mark_reached(stackdata_t *sd, basicblock *b, stackptr curstack, int stackdepth)
-{
- /* mark targets of backward branches */
- if (b <= sd->bptr)
- b->bitflags |= BBFLAG_REPLACEMENT;
+ /* reserve space for the invars of the clone */
- if (b->flags < BBREACHED) {
- /* b is reached for the first time. Create its instack */
- stack_create_invars(sd, b, curstack, stackdepth);
+ stack_grow_variable_array(sd, b->indepth);
- b->flags = BBREACHED;
- }
- else {
- /* b has been reached before. Check that its instack matches */
- if (!stack_check_invars(sd, b, curstack, stackdepth))
- return false;
- }
+#if defined(STACK_VERBOSE)
+ printf("cloning block L%03d ------> L%03d\n", b->nr, clone->nr);
+#endif
- return true;
+ return clone;
}
-/* stack_analyse ***************************************************************
+/* stack_create_locals *********************************************************
+
+ Create the local variables for the start of the given basic block.
- Analyse_stack uses the intermediate code created by parse.c to
- build a model of the JVM operand stack for the current method.
-
- The following checks are performed:
- - check for operand stack underflow (before each instruction)
- - check for operand stack overflow (after[1] each instruction)
- - check for matching stack depth at merging points
- - check for matching basic types[2] at merging points
- - check basic types for instruction input (except for BUILTIN*
- opcodes, INVOKE* opcodes and MULTIANEWARRAY)
-
- [1]) Checking this after the instruction should be ok. parse.c
- counts the number of required stack slots in such a way that it is
- only vital that we don't exceed `maxstack` at basic block
- boundaries.
-
- [2]) 'basic types' means the distinction between INT, LONG, FLOAT,
- DOUBLE and ADDRESS types. Subtypes of INT and different ADDRESS
- types are not discerned.
+ IN:
+ sd...........stack analysis data
+ b............block to create the locals for
*******************************************************************************/
-bool new_stack_analyse(jitdata *jd)
+static void stack_create_locals(stackdata_t *sd, basicblock *b)
{
- methodinfo *m; /* method being analyzed */
- codeinfo *code;
- codegendata *cd;
- registerdata *rd;
- stackdata_t sd;
- int b_count; /* basic block counter */
- int b_index; /* basic block index */
- int stackdepth;
- stackptr curstack; /* current stack top */
- stackptr copy;
- int opcode; /* opcode of current instruction */
- int i, j;
- int javaindex;
- int len; /* # of instructions after the current one */
- bool superblockend; /* if true, no fallthrough to next block */
- bool repeat; /* if true, outermost loop must run again */
- bool deadcode; /* true if no live code has been reached */
- instruction *iptr; /* the current instruction */
- basicblock *tbptr;
+ s4 i;
+ s4 *jl;
+ varinfo *dv;
- stackptr *last_store_boundary;
- stackptr coalescing_boundary;
+ /* copy the current state of the local variables */
+ /* (one extra local is needed by the verifier) */
- stackptr src1, src2, src3, src4, dst1, dst2;
+ dv = DMNEW(varinfo, sd->localcount + VERIFIER_EXTRA_LOCALS);
+ b->inlocals = dv;
+ for (i=0; i<sd->localcount; ++i)
+ *dv++ = sd->var[i];
- branch_target_t *table;
- lookup_target_t *lookup;
-#if defined(ENABLE_VERIFIER)
- int expectedtype; /* used by CHECK_BASIC_TYPE */
-#endif
- builtintable_entry *bte;
- methoddesc *md;
- constant_FMIref *fmiref;
-#if defined(ENABLE_STATISTICS)
- int iteration_count; /* number of iterations of analysis */
-#endif
- int new_index; /* used to get a new var index with GET_NEW_INDEX*/
+ /* the current map from java locals to cacao variables */
-#if defined(STACK_VERBOSE)
- new_show_method(jd, SHOW_PARSE);
-#endif
+ jl = DMNEW(s4, sd->maxlocals);
+ b->javalocals = jl;
+ MCOPY(jl, sd->javalocals, s4, sd->maxlocals);
+}
- /* get required compiler data - initialization */
- m = jd->m;
- code = jd->code;
- cd = jd->cd;
- rd = jd->rd;
+/* stack_merge_locals **********************************************************
+
+ Merge local variables at the beginning of the given basic block.
- /* initialize the stackdata_t struct */
+ IN:
+ sd...........stack analysis data
+ b............the block that is reached
- sd.m = m;
- sd.varcount = jd->varcount;
- sd.vartop = jd->vartop;
- sd.localcount = jd->localcount;
- sd.var = jd->var;
+*******************************************************************************/
-#if defined(ENABLE_LSRA)
- m->maxlifetimes = 0;
-#endif
+static void stack_merge_locals(stackdata_t *sd, basicblock *b)
+{
+ s4 i;
+ varinfo *dv;
+ varinfo *sv;
+
+ /* If a javalocal is mapped to different cacao locals along the */
+ /* incoming control-flow edges, it becomes undefined. */
+
+ for (i=0; i<sd->maxlocals; ++i) {
+ if (b->javalocals[i] != UNUSED && b->javalocals[i] != sd->javalocals[i]) {
+ b->javalocals[i] = UNUSED;
+ if (b->flags >= BBFINISHED)
+ b->flags = BBTYPECHECK_REACHED;
+ if (b->nr <= sd->bptr->nr)
+ sd->repeat = true;
+ }
+ }
-#if defined(ENABLE_STATISTICS)
- iteration_count = 0;
+#if defined(ENABLE_VERIFIER)
+ if (b->inlocals) {
+ for (i=0; i<sd->localcount; ++i) {
+ dv = b->inlocals + i;
+ sv = sd->var + i;
+ if ((sv->type == TYPE_RET && dv->type == TYPE_RET)
+ && (sv->SBRSTART != dv->SBRSTART))
+ {
+#if defined(STACK_VERBOSE)
+ printf("JSR MISMATCH: setting variable %d to VOID\n", i);
#endif
+ dv->type = TYPE_VOID;
+ if (b->flags >= BBFINISHED)
+ b->flags = BBTYPECHECK_REACHED;
+ sd->repeat = true; /* This is very rare, so just repeat */
+ }
+ }
+ }
+#endif /* defined(ENABLE_VERIFIER) */
+}
- /* init jd->interface_map */
-
- jd->interface_map = DMNEW(interface_info, m->maxstack * 5);
- for (i = 0; i < m->maxstack * 5; i++)
- jd->interface_map[i].flags = UNUSED;
- last_store_boundary = DMNEW(stackptr, cd->maxlocals);
+/* stack_create_invars *********************************************************
- /* initialize flags and invars (none) of first block */
+ Create the invars for the given basic block. Also make a copy of the locals.
- jd->new_basicblocks[0].flags = BBREACHED;
- jd->new_basicblocks[0].invars = NULL;
- jd->new_basicblocks[0].indepth = 0;
+ IN:
+ sd...........stack analysis data
+ b............block to create the invars for
+ curstack.....current stack top
+ stackdepth...current stack depth
- /* initialize invars of exception handlers */
+ This function creates STACKDEPTH invars and sets their types to the
+ types to the types of the corresponding slot in the current stack.
- for (i = 0; i < cd->exceptiontablelength; i++) {
- sd.bptr = BLOCK_OF(cd->exceptiontable[i].handlerpc);
- sd.bptr->flags = BBREACHED;
- sd.bptr->type = BBTYPE_EXH;
- sd.bptr->predecessorcount = CFG_UNKNOWN_PREDECESSORS;
+*******************************************************************************/
- GET_NEW_VAR(sd, new_index, TYPE_ADR);
- sd.bptr->invars = DMNEW(s4, 1);
- sd.bptr->invars[0] = new_index;
- sd.bptr->indepth = 1;
- sd.var[new_index].flags |= OUTVAR;
+static void stack_create_invars(stackdata_t *sd, basicblock *b,
+ stackelement_t * curstack, int stackdepth)
+{
+ stackelement_t * sp;
+ int i;
+ int index;
+ varinfo *dv;
+ varinfo *sv;
- /* mark this interface variable used */
- jd->interface_map[0 * 5 + TYPE_ADR].flags = 0;
- }
+ assert(sd->vartop + stackdepth <= sd->varcount);
- /* stack analysis loop (until fixpoint reached) **************************/
+ b->indepth = stackdepth;
+ b->invars = DMNEW(s4, stackdepth);
+
+ /* allocate the variable indices */
+ index = (sd->vartop += stackdepth);
+
+ i = stackdepth;
+ for (sp = curstack; i--; sp = sp->prev) {
+ b->invars[i] = --index;
+ dv = sd->var + index;
+ sv = sd->var + sp->varnum;
+ dv->flags = INOUT;
+ COPY_VAL_AND_TYPE_VAR(sv, dv);
+ }
+
+ stack_create_locals(sd, b);
+}
+
+
+/* stack_create_invars_from_outvars ********************************************
+
+ Create the invars for the given basic block. Also make a copy of the locals.
+ Types are propagated from the outvars of the current block.
+
+ IN:
+ sd...........stack analysis data
+ b............block to create the invars for
+
+*******************************************************************************/
+
+static void stack_create_invars_from_outvars(stackdata_t *sd, basicblock *b)
+{
+ int i;
+ int n;
+ varinfo *sv, *dv;
+
+ n = sd->bptr->outdepth;
+ assert(sd->vartop + n <= sd->varcount);
+
+ b->indepth = n;
+ b->invars = DMNEW(s4, n);
+
+ if (n) {
+ dv = sd->var + sd->vartop;
+
+ /* allocate the invars */
+
+ for (i=0; i<n; ++i, ++dv) {
+ sv = sd->var + sd->bptr->outvars[i];
+ b->invars[i] = sd->vartop++;
+ dv->flags = INOUT;
+ COPY_VAL_AND_TYPE_VAR(sv, dv);
+ }
+ }
+
+ stack_create_locals(sd, b);
+}
+
+
+/* stack_check_invars **********************************************************
+
+ Check the current stack against the invars of the given basic block.
+ Depth and types must match.
+
+ IN:
+ sd...........stack analysis data
+ b............block which invars to check against
+ curstack.....current stack top
+ stackdepth...current stack depth
+
+ RETURN VALUE:
+ the destinaton block
+ NULL.........a VerifyError has been thrown
+
+*******************************************************************************/
+
+static basicblock * stack_check_invars(stackdata_t *sd, basicblock *b,
+ stackelement_t * curstack, int stackdepth)
+{
+ int i;
+ stackelement_t * sp;
+ basicblock *orig;
+ bool separable;
+ varinfo *sv;
+ varinfo *dv;
+
+#if defined(STACK_VERBOSE)
+ printf("stack_check_invars(L%03d)\n", b->nr);
+#endif
+
+ /* find original of b */
+ if (b->original)
+ b = b->original;
+ orig = b;
+
+#if defined(STACK_VERBOSE)
+ printf("original is L%03d\n", orig->nr);
+#endif
+
+ i = orig->indepth;
+
+#if defined(ENABLE_VERIFIER)
+ if (i != stackdepth) {
+ exceptions_throw_verifyerror(sd->m, "Stack depth mismatch");
+ return NULL;
+ }
+#endif
+
+ do {
+ separable = false;
+
+#if defined(STACK_VERBOSE)
+ printf("checking against ");
+ stack_verbose_show_block(sd, b); printf("\n");
+#endif
+
+ sp = curstack;
+ for (i = orig->indepth; i--; sp = sp->prev) {
+ dv = sd->var + b->invars[i];
+ sv = sd->var + sp->varnum;
+
+#if defined(ENABLE_VERIFIER)
+ if (dv->type != sp->type) {
+ exceptions_throw_verifyerror_for_stack(sd->m, dv->type);
+ return NULL;
+ }
+#endif
+
+ if (sp->type == TYPE_RET) {
+#if defined(ENABLE_VERIFIER)
+ if (dv->SBRSTART != sv->SBRSTART) {
+ exceptions_throw_verifyerror(sd->m, "Mismatched stack types");
+ return NULL;
+ }
+#endif
+ if (dv->vv.retaddr != sv->vv.retaddr) {
+ separable = true;
+ /* don't break! have to check the remaining stackslots */
+ }
+ }
+ }
+
+ if (b->inlocals) {
+ for (i=0; i<sd->localcount; ++i) {
+ dv = b->inlocals + i;
+ sv = sd->var + i;
+ if (sv->type == TYPE_RET && dv->type == TYPE_RET) {
+ if (
+#if defined(ENABLE_VERIFIER)
+ (sv->SBRSTART == dv->SBRSTART) &&
+#endif
+ (sv->vv.retaddr != dv->vv.retaddr))
+ {
+ separable = true;
+ break;
+ }
+ }
+ }
+ }
+
+ if (!separable) {
+ /* XXX cascading collapse? */
+
+ stack_merge_locals(sd, b);
+
+#if defined(STACK_VERBOSE)
+ printf("------> using L%03d\n", b->nr);
+#endif
+ return b;
+ }
+ } while ((b = b->copied_to) != NULL);
+
+ b = stack_clone_block(sd, orig);
+ if (!b)
+ return NULL;
+
+ stack_create_invars(sd, b, curstack, stackdepth);
+ return b;
+}
+
+
+/* stack_check_invars_from_outvars *********************************************
+
+ Check the outvars of the current block against the invars of the given block.
+ Depth and types must match.
+
+ IN:
+ sd...........stack analysis data
+ b............block which invars to check against
+
+ RETURN VALUE:
+ the destinaton block
+ NULL.........a VerifyError has been thrown
+
+*******************************************************************************/
+
+static basicblock * stack_check_invars_from_outvars(stackdata_t *sd, basicblock *b)
+{
+ int i;
+ int n;
+ varinfo *sv, *dv;
+ basicblock *orig;
+ bool separable;
+
+#if defined(STACK_VERBOSE)
+ printf("stack_check_invars_from_outvars(L%03d)\n", b->nr);
+#endif
+
+ /* find original of b */
+ if (b->original)
+ b = b->original;
+ orig = b;
+
+#if defined(STACK_VERBOSE)
+ printf("original is L%03d\n", orig->nr);
+#endif
+
+ i = orig->indepth;
+ n = sd->bptr->outdepth;
+
+#if defined(ENABLE_VERIFIER)
+ if (i != n) {
+ exceptions_throw_verifyerror(sd->m, "Stack depth mismatch");
+ return NULL;
+ }
+#endif
+
+ do {
+ separable = false;
+
+#if defined(STACK_VERBOSE)
+ printf("checking against ");
+ stack_verbose_show_block(sd, b); printf("\n");
+#endif
+
+ if (n) {
+ dv = sd->var + b->invars[0];
+
+ for (i=0; i<n; ++i, ++dv) {
+ sv = sd->var + sd->bptr->outvars[i];
+
+#if defined(ENABLE_VERIFIER)
+ if (sv->type != dv->type) {
+ exceptions_throw_verifyerror_for_stack(sd->m, dv->type);
+ return NULL;
+ }
+#endif
+
+ if (dv->type == TYPE_RET) {
+#if defined(ENABLE_VERIFIER)
+ if (sv->SBRSTART != dv->SBRSTART) {
+ exceptions_throw_verifyerror(sd->m, "Mismatched stack types");
+ return NULL;
+ }
+#endif
+ if (sv->vv.retaddr != dv->vv.retaddr) {
+ separable = true;
+ /* don't break! have to check the remaining stackslots */
+ }
+ }
+ }
+ }
+
+ if (b->inlocals) {
+ for (i=0; i<sd->localcount; ++i) {
+ dv = b->inlocals + i;
+ sv = sd->var + i;
+ if (
+#if defined(ENABLE_VERIFIER)
+ (sv->SBRSTART == dv->SBRSTART) &&
+#endif
+ (sv->type == TYPE_RET && dv->type == TYPE_RET))
+ {
+ if (sv->vv.retaddr != dv->vv.retaddr) {
+ separable = true;
+ break;
+ }
+ }
+ }
+ }
+
+ if (!separable) {
+ /* XXX cascading collapse? */
+
+ stack_merge_locals(sd, b);
+
+#if defined(STACK_VERBOSE)
+ printf("------> using L%03d\n", b->nr);
+#endif
+ return b;
+ }
+ } while ((b = b->copied_to) != NULL);
+
+ b = stack_clone_block(sd, orig);
+ if (!b)
+ return NULL;
+
+ stack_create_invars_from_outvars(sd, b);
+ return b;
+}
+
+
+/* stack_create_instack ********************************************************
+
+ Create the instack of the current basic block.
+
+ IN:
+ sd...........stack analysis data
+
+ RETURN VALUE:
+ the current stack top at the start of the basic block.
+
+*******************************************************************************/
+
+static stackelement_t * stack_create_instack(stackdata_t *sd)
+{
+ stackelement_t * sp;
+ int depth;
+ int index;
+
+ if ((depth = sd->bptr->indepth) == 0)
+ return NULL;
+
+ sp = (sd->new += depth);
+
+ while (depth--) {
+ sp--;
+ index = sd->bptr->invars[depth];
+ sp->varnum = index;
+ sp->type = sd->var[index].type;
+ sp->prev = sp - 1;
+ sp->creator = NULL;
+ sp->flags = 0;
+ sp->varkind = STACKVAR;
+ }
+ sp->prev = NULL;
+
+ /* return the top of the created stack */
+ return sd->new - 1;
+}
+
+
+/* stack_mark_reached **********************************************************
+
+ Mark the given block reached and propagate the current stack and locals to
+ it. This function specializes the target block, if necessary, and returns
+ a pointer to the specialized target.
+
+ IN:
+ sd...........stack analysis data
+ b............the block to reach
+ curstack.....the current stack top
+ stackdepth...the current stack depth
+
+ RETURN VALUE:
+ a pointer to (a specialized version of) the target
+ NULL.........a VerifyError has been thrown
+
+*******************************************************************************/
+
+static basicblock *stack_mark_reached(stackdata_t *sd, basicblock *b, stackelement_t * curstack, int stackdepth)
+{
+ assert(b != NULL);
+
+#if defined(STACK_VERBOSE)
+ printf("stack_mark_reached(L%03d from L%03d)\n", b->nr, sd->bptr->nr);
+#endif
+
+ /* mark targets of backward branches */
+
+ if (b->nr <= sd->bptr->nr)
+ b->bitflags |= BBFLAG_REPLACEMENT;
+
+ if (b->flags < BBREACHED) {
+ /* b is reached for the first time. Create its invars. */
+
+#if defined(STACK_VERBOSE)
+ printf("reached L%03d for the first time\n", b->nr);
+#endif
+
+ stack_create_invars(sd, b, curstack, stackdepth);
+
+ b->flags = BBREACHED;
+
+ return b;
+ }
+ else {
+ /* b has been reached before. Check that its invars match. */
+
+ return stack_check_invars(sd, b, curstack, stackdepth);
+ }
+}
+
+
+/* stack_mark_reached_from_outvars *********************************************
+
+ Mark the given block reached and propagate the outvars of the current block
+ and the current locals to it. This function specializes the target block,
+ if necessary, and returns a pointer to the specialized target.
+
+ IN:
+ sd...........stack analysis data
+ b............the block to reach
+
+ RETURN VALUE:
+ a pointer to (a specialized version of) the target
+ NULL.........a VerifyError has been thrown
+
+*******************************************************************************/
+
+static basicblock *stack_mark_reached_from_outvars(stackdata_t *sd, basicblock *b)
+{
+ assert(b != NULL);
+
+#if defined(STACK_VERBOSE)
+ printf("stack_mark_reached_from_outvars(L%03d from L%03d)\n", b->nr, sd->bptr->nr);
+#endif
+
+ /* mark targets of backward branches */
+
+ if (b->nr <= sd->bptr->nr)
+ b->bitflags |= BBFLAG_REPLACEMENT;
+
+ if (b->flags < BBREACHED) {
+ /* b is reached for the first time. Create its invars. */
+
+#if defined(STACK_VERBOSE)
+ printf("reached L%03d for the first time\n", b->nr);
+#endif
+
+ stack_create_invars_from_outvars(sd, b);
+
+ b->flags = BBREACHED;
+
+ return b;
+ }
+ else {
+ /* b has been reached before. Check that its invars match. */
+
+ return stack_check_invars_from_outvars(sd, b);
+ }
+}
+
+
+/* stack_reach_next_block ******************************************************
+
+ Mark the following block reached and propagate the outvars of the
+ current block and the current locals to it. This function
+ specializes the target block, if necessary, and returns a pointer
+ to the specialized target.
+
+ IN:
+ sd...........stack analysis data
+
+ RETURN VALUE:
+ a pointer to (a specialized version of) the following block
+ NULL.........a VerifyError has been thrown
+
+*******************************************************************************/
+
+static bool stack_reach_next_block(stackdata_t *sd)
+{
+ basicblock *tbptr;
+ instruction *iptr;
+
+ tbptr = (sd->bptr->original) ? sd->bptr->original : sd->bptr;
+ tbptr = stack_mark_reached_from_outvars(sd, tbptr->next);
+
+ if (tbptr == NULL)
+ return false;
+
+ if (tbptr != sd->bptr->next) {
+#if defined(STACK_VERBOSE)
+ printf("NEXT IS NON-CONSEQUITIVE L%03d\n", tbptr->nr);
+#endif
+ iptr = sd->bptr->iinstr + sd->bptr->icount - 1;
+ assert(iptr->opc == ICMD_NOP);
+ iptr->opc = ICMD_GOTO;
+ iptr->dst.block = tbptr;
+#if defined(STACK_VERBOSE)
+ if (iptr->line == 0) printf("goto with line 0 in L%03d\n", sd->bptr->nr);
+#endif
+
+ if (tbptr->flags < BBFINISHED)
+ sd->repeat = true; /* XXX check if we really need to repeat */
+ }
+
+ return true;
+}
+
+
+/* stack_reach_handlers ********************************************************
+
+ Reach the exception handlers for the current block.
+
+ IN:
+ sd...........stack analysis data
+
+ RETURN VALUE:
+ true.........everything ok
+ false........a VerifyError has been thrown
+
+*******************************************************************************/
+
+static bool stack_reach_handlers(stackdata_t *sd)
+{
+ s4 i;
+ basicblock *tbptr;
+
+#if defined(STACK_VERBOSE)
+ printf("reaching exception handlers...\n");
+#endif
+
+ for (i=0; sd->handlers[i]; ++i) {
+ tbptr = sd->handlers[i]->handler;
+
+ tbptr->type = BBTYPE_EXH;
+ tbptr->predecessorcount = CFG_UNKNOWN_PREDECESSORS;
+
+ /* reach (and specialize) the handler block */
+
+ tbptr = stack_mark_reached(sd, tbptr, &(sd->exstack), 1);
+
+ if (tbptr == NULL)
+ return false;
+
+ sd->handlers[i]->handler = tbptr;
+ }
+
+ return true;
+}
+
+
+/* stack_reanalyse_block ******************************************************
+
+ Re-analyse the current block. This is called if either the block itself
+ has already been analysed before, or the current block is a clone of an
+ already analysed block, and this clone is reached for the first time.
+ In the latter case, this function does all that is necessary for fully
+ cloning the block (cloning the instruction list and variables, etc.).
+
+ IN:
+ sd...........stack analysis data
+
+ RETURN VALUE:
+ true.........everything ok
+ false........a VerifyError has been thrown
+
+*******************************************************************************/
+
+#define RELOCATE(index) \
+ do { \
+ if ((index) >= blockvarstart) \
+ (index) += blockvarshift; \
+ else if ((index) >= invarstart) \
+ (index) += invarshift; \
+ } while (0)
+
+bool stack_reanalyse_block(stackdata_t *sd)
+{
+ instruction *iptr;
+ basicblock *b;
+ basicblock *orig;
+ s4 len;
+ s4 invarstart;
+ s4 blockvarstart;
+ s4 invarshift;
+ s4 blockvarshift;
+ s4 i, varindex;
+ s4 *argp;
+ branch_target_t *table;
+ lookup_target_t *lookup;
+ bool superblockend;
+ bool cloneinstructions;
+ exception_entry *ex;
+
+#if defined(STACK_VERBOSE)
+ stack_verbose_block_enter(sd, true);
+#endif
+
+ b = sd->bptr;
+
+ if (!b->iinstr) {
+ orig = b->original;
+ assert(orig != NULL);
+
+ /* clone the instruction list */
+
+ cloneinstructions = true;
+
+ assert(orig->iinstr);
+ len = orig->icount;
+ iptr = DMNEW(instruction, len + 1);
+
+ MCOPY(iptr, orig->iinstr, instruction, len);
+ iptr[len].opc = ICMD_NOP;
+ iptr[len].line = 0;
+ iptr[len].flags.bits = 0;
+ b->iinstr = iptr;
+ b->icount = ++len;
+
+ /* reserve space for the clone's block variables */
+
+ stack_grow_variable_array(sd, orig->varcount);
+
+ /* we already have the invars set */
+
+ assert(b->indepth == orig->indepth);
+
+ /* calculate relocation shifts for invars and block variables */
+
+ if (orig->indepth) {
+ invarstart = orig->invars[0];
+ invarshift = b->invars[0] - invarstart;
+ }
+ else {
+ invarstart = INT_MAX;
+ invarshift = 0;
+ }
+ blockvarstart = orig->varstart;
+ blockvarshift = sd->vartop - blockvarstart;
+
+ /* copy block variables */
+
+ b->varstart = sd->vartop;
+ b->varcount = orig->varcount;
+ sd->vartop += b->varcount;
+ MCOPY(sd->var + b->varstart, sd->var + orig->varstart, varinfo, b->varcount);
+
+ /* copy outvars */
+
+ b->outdepth = orig->outdepth;
+ b->outvars = DMNEW(s4, orig->outdepth);
+ MCOPY(b->outvars, orig->outvars, s4, orig->outdepth);
+
+ /* clone exception handlers */
+
+ for (i=0; sd->handlers[i]; ++i) {
+ ex = DNEW(exception_entry);
+ ex->handler = sd->handlers[i]->handler;
+ ex->start = b;
+ ex->end = b; /* XXX hack, see end of stack_analyse */
+ ex->catchtype = sd->handlers[i]->catchtype;
+ ex->down = NULL;
+
+ assert(sd->extableend->down == NULL);
+ sd->extableend->down = ex;
+ sd->extableend = ex;
+ sd->jd->exceptiontablelength++;
+
+ sd->handlers[i] = ex;
+ }
+ }
+ else {
+ cloneinstructions = false;
+ invarshift = 0;
+ blockvarshift = 0;
+ invarstart = sd->vartop;
+ blockvarstart = sd->vartop;
+ iptr = b->iinstr;
+ }
+
+ if (b->original) {
+ /* find exception handlers for the cloned block */
+ len = 0;
+ ex = sd->jd->exceptiontable;
+ for (; ex != NULL; ex = ex->down) {
+ /* XXX the cloned exception handlers have identical */
+ /* start end end blocks. */
+ if ((ex->start == b) && (ex->end == b)) {
+ sd->handlers[len++] = ex;
+ }
+ }
+ sd->handlers[len] = NULL;
+ }
+
+#if defined(STACK_VERBOSE)
+ printf("invarstart = %d, blockvarstart = %d\n", invarstart, blockvarstart);
+ printf("invarshift = %d, blockvarshift = %d\n", invarshift, blockvarshift);
+#endif
+
+ /* mark block as finished */
+
+ b->flags = BBFINISHED;
+
+ /* initialize locals at the start of this block */
+
+ if (b->inlocals)
+ MCOPY(sd->var, b->inlocals, varinfo, sd->localcount);
+
+ MCOPY(sd->javalocals, b->javalocals, s4, sd->maxlocals);
+
+ /* reach exception handlers for this block */
+
+ if (!stack_reach_handlers(sd))
+ return false;
+
+ superblockend = false;
+
+ for (len = b->icount; len--; iptr++) {
+#if defined(STACK_VERBOSE)
+ show_icmd(sd->jd, iptr, false, SHOW_STACK);
+ printf("\n");
+#endif
+
+ switch (iptr->opc) {
+ case ICMD_RET:
+ varindex = iptr->s1.varindex;
+
+#if defined(ENABLE_VERIFIER)
+ if (sd->var[varindex].type != TYPE_RET) {
+ exceptions_throw_verifyerror(sd->m, "RET with non-returnAddress value");
+ return false;
+ }
+#endif
+
+ iptr->dst.block = stack_mark_reached_from_outvars(sd, sd->var[varindex].vv.retaddr);
+ superblockend = true;
+ break;
+
+ case ICMD_JSR:
+ iptr->sx.s23.s3.jsrtarget.block = stack_mark_reached_from_outvars(sd, iptr->sx.s23.s3.jsrtarget.block);
+ RELOCATE(iptr->dst.varindex);
+ superblockend = true;
+ break;
+
+ case ICMD_RETURN:
+ superblockend = true;
+ break;
+
+ case ICMD_CHECKNULL:
+ case ICMD_PUTSTATICCONST:
+ break;
+
+ case ICMD_NOP:
+ case ICMD_IINC:
+ break;
+
+ case ICMD_GOTO:
+ iptr->dst.block = stack_mark_reached_from_outvars(sd, iptr->dst.block);
+ superblockend = true;
+ break;
+
+ /* pop 0 push 1 const */
+
+ case ICMD_ACONST:
+ case ICMD_ICONST:
+ case ICMD_LCONST:
+ case ICMD_FCONST:
+ case ICMD_DCONST:
+
+ /* pop 0 push 1 load */
+
+ case ICMD_ILOAD:
+ case ICMD_LLOAD:
+ case ICMD_FLOAD:
+ case ICMD_DLOAD:
+ case ICMD_ALOAD:
+ RELOCATE(iptr->dst.varindex);
+ break;
+
+ /* pop 2 push 1 */
+
+ case ICMD_IALOAD:
+ case ICMD_LALOAD:
+ case ICMD_FALOAD:
+ case ICMD_DALOAD:
+ case ICMD_AALOAD:
+ case ICMD_BALOAD:
+ case ICMD_CALOAD:
+ case ICMD_SALOAD:
+ RELOCATE(iptr->sx.s23.s2.varindex);
+ RELOCATE(iptr->s1.varindex);
+ RELOCATE(iptr->dst.varindex);
+ break;
+
+ /* pop 3 push 0 */
+
+ case ICMD_IASTORE:
+ case ICMD_LASTORE:
+ case ICMD_FASTORE:
+ case ICMD_DASTORE:
+ case ICMD_AASTORE:
+ case ICMD_BASTORE:
+ case ICMD_CASTORE:
+ case ICMD_SASTORE:
+ RELOCATE(iptr->sx.s23.s3.varindex);
+ RELOCATE(iptr->sx.s23.s2.varindex);
+ RELOCATE(iptr->s1.varindex);
+ break;
+
+ /* pop 1 push 0 store */
+
+ case ICMD_ISTORE:
+ case ICMD_LSTORE:
+ case ICMD_FSTORE:
+ case ICMD_DSTORE:
+ case ICMD_ASTORE:
+ RELOCATE(iptr->s1.varindex);
+
+ varindex = iptr->dst.varindex;
+ COPY_VAL_AND_TYPE(*sd, iptr->s1.varindex, varindex);
+ i = iptr->sx.s23.s3.javaindex;
+ if (iptr->flags.bits & INS_FLAG_RETADDR) {
+ iptr->sx.s23.s2.retaddrnr =
+ JAVALOCAL_FROM_RETADDR(sd->var[varindex].vv.retaddr->nr);
+ sd->javalocals[i] = iptr->sx.s23.s2.retaddrnr;
+ }
+ else
+ sd->javalocals[i] = varindex;
+ if (iptr->flags.bits & INS_FLAG_KILL_PREV)
+ sd->javalocals[i-1] = UNUSED;
+ if (iptr->flags.bits & INS_FLAG_KILL_NEXT)
+ sd->javalocals[i+1] = UNUSED;
+ break;
+
+ /* pop 1 push 0 */
+
+ case ICMD_ARETURN:
+ case ICMD_ATHROW:
+ case ICMD_IRETURN:
+ case ICMD_LRETURN:
+ case ICMD_FRETURN:
+ case ICMD_DRETURN:
+ RELOCATE(iptr->s1.varindex);
+ superblockend = true;
+ break;
+
+ case ICMD_PUTSTATIC:
+ case ICMD_PUTFIELDCONST:
+ case ICMD_POP:
+ RELOCATE(iptr->s1.varindex);
+ break;
+
+ /* pop 1 push 0 branch */
+
+ case ICMD_IFNULL:
+ case ICMD_IFNONNULL:
+
+ case ICMD_IFEQ:
+ case ICMD_IFNE:
+ case ICMD_IFLT:
+ case ICMD_IFGE:
+ case ICMD_IFGT:
+ case ICMD_IFLE:
+
+ case ICMD_IF_LEQ:
+ case ICMD_IF_LNE:
+ case ICMD_IF_LLT:
+ case ICMD_IF_LGE:
+ case ICMD_IF_LGT:
+ case ICMD_IF_LLE:
+ RELOCATE(iptr->s1.varindex);
+ iptr->dst.block = stack_mark_reached_from_outvars(sd, iptr->dst.block);
+ break;
+
+ /* pop 1 push 0 table branch */
+
+ case ICMD_TABLESWITCH:
+ i = iptr->sx.s23.s3.tablehigh - iptr->sx.s23.s2.tablelow + 1 + 1;
+
+ if (cloneinstructions) {
+ table = DMNEW(branch_target_t, i);
+ MCOPY(table, iptr->dst.table, branch_target_t, i);
+ iptr->dst.table = table;
+ }
+ else {
+ table = iptr->dst.table;
+ }
+
+ RELOCATE(iptr->s1.varindex);
+ while (i--) {
+ table->block = stack_mark_reached_from_outvars(sd, table->block);
+ table++;
+ }
+ superblockend = true;
+ break;
+
+ case ICMD_LOOKUPSWITCH:
+ i = iptr->sx.s23.s2.lookupcount;
+ if (cloneinstructions) {
+ lookup = DMNEW(lookup_target_t, i);
+ MCOPY(lookup, iptr->dst.lookup, lookup_target_t, i);
+ iptr->dst.lookup = lookup;
+ }
+ else {
+ lookup = iptr->dst.lookup;
+ }
+ RELOCATE(iptr->s1.varindex);
+ while (i--) {
+ lookup->target.block = stack_mark_reached_from_outvars(sd, lookup->target.block);
+ lookup++;
+ }
+ iptr->sx.s23.s3.lookupdefault.block = stack_mark_reached_from_outvars(sd, iptr->sx.s23.s3.lookupdefault.block);
+ superblockend = true;
+ break;
+
+ case ICMD_MONITORENTER:
+ case ICMD_MONITOREXIT:
+ RELOCATE(iptr->s1.varindex);
+ break;
+
+ /* pop 2 push 0 branch */
+
+ case ICMD_IF_ICMPEQ:
+ case ICMD_IF_ICMPNE:
+ case ICMD_IF_ICMPLT:
+ case ICMD_IF_ICMPGE:
+ case ICMD_IF_ICMPGT:
+ case ICMD_IF_ICMPLE:
+
+ case ICMD_IF_LCMPEQ:
+ case ICMD_IF_LCMPNE:
+ case ICMD_IF_LCMPLT:
+ case ICMD_IF_LCMPGE:
+ case ICMD_IF_LCMPGT:
+ case ICMD_IF_LCMPLE:
+
+ case ICMD_IF_ACMPEQ:
+ case ICMD_IF_ACMPNE:
+ RELOCATE(iptr->sx.s23.s2.varindex);
+ RELOCATE(iptr->s1.varindex);
+ iptr->dst.block = stack_mark_reached_from_outvars(sd, iptr->dst.block);
+ break;
+
+ /* pop 2 push 0 */
+
+ case ICMD_PUTFIELD:
+ case ICMD_IASTORECONST:
+ case ICMD_LASTORECONST:
+ case ICMD_AASTORECONST:
+ case ICMD_BASTORECONST:
+ case ICMD_CASTORECONST:
+ case ICMD_SASTORECONST:
+ case ICMD_POP2:
+ RELOCATE(iptr->sx.s23.s2.varindex);
+ RELOCATE(iptr->s1.varindex);
+ break;
+
+ /* pop 0 push 1 copy */
+
+ case ICMD_COPY:
+ case ICMD_MOVE:
+ RELOCATE(iptr->dst.varindex);
+ RELOCATE(iptr->s1.varindex);
+ COPY_VAL_AND_TYPE(*sd, iptr->s1.varindex, iptr->dst.varindex);
+ break;
+
+ /* pop 2 push 1 */
+
+ case ICMD_IDIV:
+ case ICMD_IREM:
+ case ICMD_LDIV:
+ case ICMD_LREM:
+ case ICMD_IADD:
+ case ICMD_ISUB:
+ case ICMD_IMUL:
+ case ICMD_ISHL:
+ case ICMD_ISHR:
+ case ICMD_IUSHR:
+ case ICMD_IAND:
+ case ICMD_IOR:
+ case ICMD_IXOR:
+ case ICMD_LADD:
+ case ICMD_LSUB:
+ case ICMD_LMUL:
+ case ICMD_LOR:
+ case ICMD_LAND:
+ case ICMD_LXOR:
+ case ICMD_LSHL:
+ case ICMD_LSHR:
+ case ICMD_LUSHR:
+ case ICMD_FADD:
+ case ICMD_FSUB:
+ case ICMD_FMUL:
+ case ICMD_FDIV:
+ case ICMD_FREM:
+ case ICMD_DADD:
+ case ICMD_DSUB:
+ case ICMD_DMUL:
+ case ICMD_DDIV:
+ case ICMD_DREM:
+ case ICMD_LCMP:
+ case ICMD_FCMPL:
+ case ICMD_FCMPG:
+ case ICMD_DCMPL:
+ case ICMD_DCMPG:
+ RELOCATE(iptr->sx.s23.s2.varindex);
+ RELOCATE(iptr->s1.varindex);
+ RELOCATE(iptr->dst.varindex);
+ break;
+
+ /* pop 1 push 1 */
+
+ case ICMD_CHECKCAST:
+ case ICMD_ARRAYLENGTH:
+ case ICMD_INSTANCEOF:
+ case ICMD_NEWARRAY:
+ case ICMD_ANEWARRAY:
+ case ICMD_GETFIELD:
+ case ICMD_IADDCONST:
+ case ICMD_ISUBCONST:
+ case ICMD_IMULCONST:
+ case ICMD_IMULPOW2:
+ case ICMD_IDIVPOW2:
+ case ICMD_IREMPOW2:
+ case ICMD_IANDCONST:
+ case ICMD_IORCONST:
+ case ICMD_IXORCONST:
+ case ICMD_ISHLCONST:
+ case ICMD_ISHRCONST:
+ case ICMD_IUSHRCONST:
+ case ICMD_LADDCONST:
+ case ICMD_LSUBCONST:
+ case ICMD_LMULCONST:
+ case ICMD_LMULPOW2:
+ case ICMD_LDIVPOW2:
+ case ICMD_LREMPOW2:
+ case ICMD_LANDCONST:
+ case ICMD_LORCONST:
+ case ICMD_LXORCONST:
+ case ICMD_LSHLCONST:
+ case ICMD_LSHRCONST:
+ case ICMD_LUSHRCONST:
+ case ICMD_INEG:
+ case ICMD_INT2BYTE:
+ case ICMD_INT2CHAR:
+ case ICMD_INT2SHORT:
+ case ICMD_LNEG:
+ case ICMD_FNEG:
+ case ICMD_DNEG:
+ case ICMD_I2L:
+ case ICMD_I2F:
+ case ICMD_I2D:
+ case ICMD_L2I:
+ case ICMD_L2F:
+ case ICMD_L2D:
+ case ICMD_F2I:
+ case ICMD_F2L:
+ case ICMD_F2D:
+ case ICMD_D2I:
+ case ICMD_D2L:
+ case ICMD_D2F:
+ RELOCATE(iptr->s1.varindex);
+ RELOCATE(iptr->dst.varindex);
+ break;
+
+ /* pop 0 push 1 */
+
+ case ICMD_GETSTATIC:
+ case ICMD_NEW:
+ RELOCATE(iptr->dst.varindex);
+ break;
+
+ /* pop many push any */
+
+ case ICMD_INVOKESTATIC:
+ case ICMD_INVOKESPECIAL:
+ case ICMD_INVOKEVIRTUAL:
+ case ICMD_INVOKEINTERFACE:
+ case ICMD_BUILTIN:
+ case ICMD_MULTIANEWARRAY:
+ i = iptr->s1.argcount;
+ if (cloneinstructions) {
+ argp = DMNEW(s4, i);
+ MCOPY(argp, iptr->sx.s23.s2.args, s4, i);
+ iptr->sx.s23.s2.args = argp;
+ }
+ else {
+ argp = iptr->sx.s23.s2.args;
+ }
+
+ while (--i >= 0) {
+ RELOCATE(*argp);
+ argp++;
+ }
+ RELOCATE(iptr->dst.varindex);
+ break;
+
+ default:
+ exceptions_throw_internalerror("Unknown ICMD %d during stack re-analysis",
+ iptr->opc);
+ return false;
+ } /* switch */
+
+#if defined(STACK_VERBOSE)
+ show_icmd(sd->jd, iptr, false, SHOW_STACK);
+ printf("\n");
+#endif
+ }
+
+ /* relocate outvars */
+
+ for (i=0; i<b->outdepth; ++i) {
+ RELOCATE(b->outvars[i]);
+ }
+
+#if defined(STACK_VERBOSE)
+ stack_verbose_block_exit(sd, superblockend);
+#endif
+
+ /* propagate to the next block */
+
+ if (!superblockend)
+ if (!stack_reach_next_block(sd))
+ return false;
+
+ return true;
+}
+
+
+/* stack_change_to_tempvar *****************************************************
+
+ Change the given stackslot to a TEMPVAR. This includes creating a new
+ temporary variable and changing the dst.varindex of the creator of the
+ stacklot to the new variable index. If this stackslot has been passed
+ through ICMDs between the point of its creation and the current point,
+ then the variable index is also changed in these ICMDs.
+
+ IN:
+ sd...........stack analysis data
+ sp...........stackslot to change
+ ilimit.......instruction up to which to look for ICMDs passing-through
+ the stackslot (exclusive). This may point exactly after the
+ last instruction, in which case the search is done to the
+ basic block end.
+
+*******************************************************************************/
+
+static void stack_change_to_tempvar(stackdata_t *sd, stackelement_t * sp,
+ instruction *ilimit)
+{
+ s4 newindex;
+ s4 oldindex;
+ instruction *iptr;
+ s4 depth;
+ s4 i;
+
+ oldindex = sp->varnum;
+
+ /* create a new temporary variable */
+
+ GET_NEW_VAR(*sd, newindex, sp->type);
+
+ sd->var[newindex].flags = sp->flags;
+
+ /* change the stackslot */
+
+ sp->varnum = newindex;
+ sp->varkind = TEMPVAR;
+
+ /* change the dst.varindex of the stackslot's creator */
+
+ if (sp->creator)
+ sp->creator->dst.varindex = newindex;
+
+ /* handle ICMDs this stackslot passed through, if any */
+
+ if (sp->flags & PASSTHROUGH) {
+ iptr = (sp->creator) ? (sp->creator + 1) : sd->bptr->iinstr;
+
+ /* assert that the limit points to an ICMD, or after the last one */
+
+ assert(ilimit >= sd->bptr->iinstr);
+ assert(ilimit <= sd->bptr->iinstr + sd->bptr->icount);
+
+ /* find the stackdepth under sp plus one */
+ /* Note: This number is usually known when this function is called, */
+ /* but calculating it here is less error-prone and should not be */
+ /* a performance problem. */
+
+ for (depth = 0; sp != NULL; sp = sp->prev)
+ depth++;
+
+ /* iterate over all instructions in the range and replace */
+
+ for (; iptr < ilimit; ++iptr) {
+ switch (iptr->opc) {
+ case ICMD_INVOKESTATIC:
+ case ICMD_INVOKESPECIAL:
+ case ICMD_INVOKEVIRTUAL:
+ case ICMD_INVOKEINTERFACE:
+ case ICMD_BUILTIN:
+ i = iptr->s1.argcount - depth;
+ if (iptr->sx.s23.s2.args[i] == oldindex) {
+ iptr->sx.s23.s2.args[i] = newindex;
+ }
+ break;
+ /* IMPORTANT: If any ICMD sets the PASSTHROUGH flag of a */
+ /* stackslot, it must be added in this switch! */
+ }
+ }
+ }
+}
+
+
+/* stack_init_javalocals *******************************************************
+
+ Initialize the mapping from Java locals to cacao variables at method entry.
+
+ IN:
+ sd...........stack analysis data
+
+*******************************************************************************/
+
+static void stack_init_javalocals(stackdata_t *sd)
+{
+ s4 *jl;
+ s4 type,i,j;
+ methoddesc *md;
+ jitdata *jd;
+
+ jd = sd->jd;
+
+ jl = DMNEW(s4, sd->maxlocals);
+ jd->basicblocks[0].javalocals = jl;
+
+ for (i=0; i<sd->maxlocals; ++i)
+ jl[i] = UNUSED;
+
+ md = jd->m->parseddesc;
+ j = 0;
+ for (i=0; i<md->paramcount; ++i) {
+ type = md->paramtypes[i].type;
+ jl[j] = jd->local_map[5*j + type];
+ j++;
+ if (IS_2_WORD_TYPE(type))
+ j++;
+ }
+}
+
+
+/* stack_analyse ***************************************************************
+
+ Analyse_stack uses the intermediate code created by parse.c to
+ build a model of the JVM operand stack for the current method.
+
+ The following checks are performed:
+ - check for operand stack underflow (before each instruction)
+ - check for operand stack overflow (after[1] each instruction)
+ - check for matching stack depth at merging points
+ - check for matching basic types[2] at merging points
+ - check basic types for instruction input (except for BUILTIN*
+ opcodes, INVOKE* opcodes and MULTIANEWARRAY)
+
+ [1]) Checking this after the instruction should be ok. parse.c
+ counts the number of required stack slots in such a way that it is
+ only vital that we don't exceed `maxstack` at basic block
+ boundaries.
+
+ [2]) 'basic types' means the distinction between INT, LONG, FLOAT,
+ DOUBLE and ADDRESS types. Subtypes of INT and different ADDRESS
+ types are not discerned.
+
+*******************************************************************************/
+
+bool stack_analyse(jitdata *jd)
+{
+ methodinfo *m; /* method being analyzed */
+ codeinfo *code;
+ registerdata *rd;
+ stackdata_t sd;
+ int stackdepth;
+ stackelement_t *curstack; /* current stack top */
+ stackelement_t *copy;
+ int opcode; /* opcode of current instruction */
+ int i, varindex;
+ int javaindex;
+ int type; /* operand type */
+ int len; /* # of instructions after the current one */
+ bool superblockend; /* if true, no fallthrough to next block */
+ bool deadcode; /* true if no live code has been reached */
+ instruction *iptr; /* the current instruction */
+ basicblock *tbptr;
+ basicblock *original;
+ exception_entry *ex;
+
+ stackelement_t **last_store_boundary;
+ stackelement_t *coalescing_boundary;
+
+ stackelement_t *src1, *src2, *src3, *src4, *dst1, *dst2;
+
+ branch_target_t *table;
+ lookup_target_t *lookup;
+#if defined(ENABLE_VERIFIER)
+ int expectedtype; /* used by CHECK_BASIC_TYPE */
+#endif
+ builtintable_entry *bte;
+ methoddesc *md;
+ constant_FMIref *fmiref;
+#if defined(ENABLE_STATISTICS)
+ int iteration_count; /* number of iterations of analysis */
+#endif
+ int new_index; /* used to get a new var index with GET_NEW_INDEX*/
+
+#if defined(STACK_VERBOSE)
+ show_method(jd, SHOW_PARSE);
+#endif
+
+ /* get required compiler data - initialization */
+
+ m = jd->m;
+ code = jd->code;
+ rd = jd->rd;
+
+ /* initialize the stackdata_t struct */
+
+ sd.m = m;
+ sd.jd = jd;
+ sd.varcount = jd->varcount;
+ sd.vartop = jd->vartop;
+ sd.localcount = jd->localcount;
+ sd.var = jd->var;
+ sd.varsallocated = sd.varcount;
+ sd.maxlocals = m->maxlocals;
+ sd.javalocals = DMNEW(s4, sd.maxlocals);
+ sd.handlers = DMNEW(exception_entry *, jd->exceptiontablelength + 1);
+
+ /* prepare the variable for exception handler stacks */
+ /* (has been reserved by STACK_EXTRA_VARS, or VERIFIER_EXTRA_VARS) */
+
+ sd.exstack.type = TYPE_ADR;
+ sd.exstack.prev = NULL;
+ sd.exstack.varnum = sd.localcount;
+ sd.var[sd.exstack.varnum].type = TYPE_ADR;
+
+#if defined(ENABLE_STATISTICS)
+ iteration_count = 0;
+#endif
+
+ /* find the last real basic block */
+
+ sd.last_real_block = NULL;
+ tbptr = jd->basicblocks;
+ while (tbptr->next) {
+ sd.last_real_block = tbptr;
+ tbptr = tbptr->next;
+ }
+ assert(sd.last_real_block);
+
+ /* find the last exception handler */
+
+ if (jd->exceptiontablelength)
+ sd.extableend = jd->exceptiontable + jd->exceptiontablelength - 1;
+ else
+ sd.extableend = NULL;
+
+ /* init jd->interface_map */
+
+ jd->maxinterfaces = m->maxstack;
+ jd->interface_map = DMNEW(interface_info, m->maxstack * 5);
+ for (i = 0; i < m->maxstack * 5; i++)
+ jd->interface_map[i].flags = UNUSED;
+
+ last_store_boundary = DMNEW(stackelement_t *, m->maxlocals);
+
+ /* initialize flags and invars (none) of first block */
+
+ jd->basicblocks[0].flags = BBREACHED;
+ jd->basicblocks[0].invars = NULL;
+ jd->basicblocks[0].indepth = 0;
+ jd->basicblocks[0].inlocals =
+ DMNEW(varinfo, jd->localcount + VERIFIER_EXTRA_LOCALS);
+ MCOPY(jd->basicblocks[0].inlocals, jd->var, varinfo,
+ jd->localcount + VERIFIER_EXTRA_LOCALS);
+
+ /* initialize java local mapping of first block */
+
+ stack_init_javalocals(&sd);
+
+ /* stack analysis loop (until fixpoint reached) **************************/
do {
#if defined(ENABLE_STATISTICS)
/* initialize loop over basic blocks */
- b_count = jd->new_basicblockcount;
- sd.bptr = jd->new_basicblocks;
+ sd.bptr = jd->basicblocks;
superblockend = true;
- repeat = false;
- curstack = NULL; stackdepth = 0;
- deadcode = true;
+ sd.repeat = false;
+ curstack = NULL;
+ stackdepth = 0;
+ deadcode = true;
/* iterate over basic blocks *****************************************/
- for (; --b_count >= 0; ++sd.bptr) {
-
-#if defined(STACK_VERBOSE)
- printf("----\nANALYZING BLOCK L%03d ", sd.bptr->nr);
- if (sd.bptr->type == BBTYPE_EXH) printf("EXH\n");
- else if (sd.bptr->type == BBTYPE_SBR) printf("SBR\n");
- else printf("STD\n");
-#endif
+ for (; sd.bptr; sd.bptr = sd.bptr->next) {
if (sd.bptr->flags == BBDELETED) {
/* This block has been deleted - do nothing. */
continue;
}
+ if (sd.bptr->flags == BBTYPECHECK_REACHED) {
+ /* re-analyse a block because its input changed */
+
+ deadcode = false;
+
+ if (!stack_reanalyse_block(&sd))
+ return false;
+
+ superblockend = true; /* XXX */
+ continue;
+ }
+
if (superblockend && (sd.bptr->flags < BBREACHED)) {
- /* This block has not been reached so far, and we */
- /* don't fall into it, so we'll have to iterate again. */
+ /* This block has not been reached so far, and we
+ don't fall into it, so we'll have to iterate
+ again. */
- repeat = true;
+ sd.repeat = true;
continue;
}
continue;
}
+ if (sd.bptr->original && sd.bptr->original->flags < BBFINISHED) {
+ /* This block is a clone and the original has not been
+ analysed, yet. Analyse it on the next
+ iteration. */
+
+ sd.repeat = true;
+ /* XXX superblockend? */
+ continue;
+ }
+
/* This block has to be analysed now. */
+ deadcode = false;
+
/* XXX The rest of this block is still indented one level too */
/* much in order to avoid a giant diff by changing that. */
- if (superblockend) {
- /* We know that sd.bptr->flags == BBREACHED. */
- /* This block has been reached before. */
+ /* We know that sd.bptr->flags == BBREACHED. */
+ /* This block has been reached before. */
- stackdepth = sd.bptr->indepth;
- }
- else if (sd.bptr->flags < BBREACHED) {
- /* This block is reached for the first time now */
- /* by falling through from the previous block. */
- /* Create the instack (propagated). */
+ assert(sd.bptr->flags == BBREACHED);
+ stackdepth = sd.bptr->indepth;
+
+ /* find exception handlers for this block */
+
+ /* determine the active exception handlers for this block */
+ /* XXX could use a faster algorithm with sorted lists or */
+ /* something? */
+
+ original = (sd.bptr->original) ? sd.bptr->original : sd.bptr;
- stack_create_invars(&sd, sd.bptr, curstack, stackdepth);
+ len = 0;
+ ex = jd->exceptiontable;
+ for (; ex != NULL; ex = ex->down) {
+ if ((ex->start <= original) && (ex->end > original)) {
+ sd.handlers[len++] = ex;
+ }
}
- else {
- /* This block has been reached before. now we are */
- /* falling into it from the previous block. */
- /* Check that stack depth is well-defined. */
+ sd.handlers[len] = NULL;
+
+
+ /* reanalyse cloned block */
- if (!stack_check_invars(&sd, sd.bptr, curstack, stackdepth))
+ if (sd.bptr->original) {
+ if (!stack_reanalyse_block(&sd))
return false;
+ continue;
}
/* reset the new pointer for allocating stackslots */
- sd.new = jd->new_stack;
+ sd.new = jd->stack;
/* create the instack of this block */
curstack = stack_create_instack(&sd);
+ /* initialize locals at the start of this block */
+
+ if (sd.bptr->inlocals)
+ MCOPY(sd.var, sd.bptr->inlocals, varinfo, sd.localcount);
+
+ MCOPY(sd.javalocals, sd.bptr->javalocals, s4, sd.maxlocals);
+
/* set up local variables for analyzing this block */
- deadcode = false;
superblockend = false;
len = sd.bptr->icount;
iptr = sd.bptr->iinstr;
- b_index = sd.bptr - jd->new_basicblocks;
/* mark the block as analysed */
/* reset variables for dependency checking */
coalescing_boundary = sd.new;
- for( i = 0; i < cd->maxlocals; i++)
+ for( i = 0; i < m->maxlocals; i++)
last_store_boundary[i] = sd.new;
/* remember the start of this block's variables */
sd.bptr->varstart = sd.vartop;
-
+
#if defined(STACK_VERBOSE)
- printf("INVARS - indices:\t\n");
- for (i=0; i<sd.bptr->indepth; ++i) {
- printf("%d ", sd.bptr->invars[i]);
- }
- printf("\n\n");
+ stack_verbose_block_enter(&sd, false);
#endif
+
+ /* reach exception handlers for this block */
+
+ if (!stack_reach_handlers(&sd))
+ return false;
/* iterate over ICMDs ****************************************/
while (--len >= 0) {
#if defined(STACK_VERBOSE)
- new_show_icmd(jd, iptr, false, SHOW_PARSE); printf("\n");
- for( copy = curstack; copy; copy = copy->prev ) {
- printf("%2d(%d", copy->varnum, copy->type);
- if (IS_OUTVAR(copy))
- printf("S");
- if (IS_PREALLOC(copy))
- printf("A");
- printf(") ");
- }
- printf("\n");
+ stack_verbose_show_state(&sd, iptr, curstack);
#endif
/* fetch the current opcode */
/* automatically replace some ICMDs with builtins */
-#if defined(USEBUILTINTABLE)
- IF_NO_INTRP(
- bte = builtintable_get_automatic(opcode);
+ bte = builtintable_get_automatic(opcode);
- if (bte && bte->opcode == opcode) {
- iptr->opc = ICMD_BUILTIN;
- iptr->flags.bits = 0;
- iptr->sx.s23.s3.bte = bte;
- /* iptr->line is already set */
- jd->isleafmethod = false;
- goto icmd_BUILTIN;
- }
- );
-#endif /* defined(USEBUILTINTABLE) */
+ if ((bte != NULL) && (bte->opcode == opcode)) {
+ iptr->opc = ICMD_BUILTIN;
+ iptr->flags.bits &= INS_FLAG_ID_MASK;
+ iptr->sx.s23.s3.bte = bte;
+ /* iptr->line is already set */
+ code_unflag_leafmethod(code);
+ goto icmd_BUILTIN;
+ }
/* main opcode switch *************************************/
COUNT(count_check_null);
USE_S1(TYPE_ADR);
CLR_SX;
- CLR_DST; /* XXX live through? */
+ iptr->dst.varindex = iptr->s1.varindex;
break;
case ICMD_RET:
- iptr->s1.varindex =
+ varindex = iptr->s1.varindex =
jd->local_map[iptr->s1.varindex * 5 + TYPE_ADR];
+
+#if defined(ENABLE_VERIFIER)
+ if (sd.var[varindex].type != TYPE_RET) {
+ exceptions_throw_verifyerror(m, "RET with non-returnAddress value");
+ return false;
+ }
+#endif
- USE_S1_LOCAL(TYPE_ADR);
CLR_SX;
- CLR_DST;
-#if 0
- IF_NO_INTRP( rd->locals[iptr->s1.localindex/*XXX invalid here*/][TYPE_ADR].type = TYPE_ADR; );
-#endif
+
+ iptr->dst.block = stack_mark_reached(&sd, sd.var[varindex].vv.retaddr, curstack, stackdepth);
superblockend = true;
break;
CLR_SX;
OP0_0;
superblockend = true;
+ sd.jd->returncount++;
+ sd.jd->returnblock = sd.bptr;
break;
case ICMD_BASTORE:
case ICMD_CASTORE:
case ICMD_SASTORE:
- IF_INTRP( goto normal_ICONST; )
# if SUPPORT_CONST_STORE_ZERO_ONLY
if (iptr->sx.val.i != 0)
goto normal_ICONST;
case ICMD_PUTSTATIC:
case ICMD_PUTFIELD:
- IF_INTRP( goto normal_ICONST; )
# if SUPPORT_CONST_STORE_ZERO_ONLY
if (iptr->sx.val.i != 0)
goto normal_ICONST;
if (iptr[1].flags.bits & INS_FLAG_UNRESOLVED) {
iptr->sx.s23.s3.uf = iptr[1].sx.s23.s3.uf;
iptr->flags.bits |= INS_FLAG_UNRESOLVED;
+ fmiref = iptr->sx.s23.s3.uf->fieldref;
}
else {
- iptr->sx.s23.s3.fmiref = iptr[1].sx.s23.s3.fmiref;
+ fmiref = iptr[1].sx.s23.s3.fmiref;
+ iptr->sx.s23.s3.fmiref = fmiref;
+ }
+
+#if defined(ENABLE_VERIFIER)
+ expectedtype = fmiref->parseddesc.fd->type;
+ switch (iptr[0].opc) {
+ case ICMD_ICONST:
+ if (expectedtype != TYPE_INT)
+ goto throw_stack_type_error;
+ break;
+ case ICMD_LCONST:
+ if (expectedtype != TYPE_LNG)
+ goto throw_stack_type_error;
+ break;
+ case ICMD_ACONST:
+ if (expectedtype != TYPE_ADR)
+ goto throw_stack_type_error;
+ break;
+ default:
+ assert(0);
}
+#endif /* defined(ENABLE_VERIFIER) */
switch (iptr[1].opc) {
case ICMD_PUTSTATIC:
icmd_lconst_lcmp_tail:
/* convert LCONST, LCMP, IFXX to IF_LXX */
- iptr->dst.insindex = iptr[2].dst.insindex;
+ iptr->dst.block = iptr[2].dst.block;
iptr[1].opc = ICMD_NOP;
iptr[2].opc = ICMD_NOP;
OP1_BRANCH(TYPE_LNG);
- BRANCH(tbptr, copy);
+ BRANCH(tbptr);
COUNT(count_pcmd_bra);
COUNT(count_pcmd_op);
break;
#if SUPPORT_CONST_STORE
case ICMD_LASTORE:
- IF_INTRP( goto normal_LCONST; )
# if SUPPORT_CONST_STORE_ZERO_ONLY
if (iptr->sx.val.l != 0)
goto normal_LCONST;
case ICMD_PUTSTATIC:
case ICMD_PUTFIELD:
- IF_INTRP( goto normal_LCONST; )
# if SUPPORT_CONST_STORE_ZERO_ONLY
if (iptr->sx.val.l != 0)
goto normal_LCONST;
coalescing_boundary = sd.new;
COUNT(count_pcmd_load);
#if SUPPORT_CONST_STORE
- IF_INTRP( goto normal_ACONST; )
-
/* We can only optimize if the ACONST is resolved
* and there is an instruction after it. */
case ICMD_DLOAD:
case ICMD_ALOAD:
COUNT(count_load_instruction);
- i = opcode - ICMD_ILOAD; /* type */
+ type = opcode - ICMD_ILOAD;
- iptr->s1.varindex =
- jd->local_map[iptr->s1.varindex * 5 + i];
-
- LOAD(i, iptr->s1.varindex);
+ varindex = iptr->s1.varindex =
+ jd->local_map[iptr->s1.varindex * 5 + type];
+
+#if defined(ENABLE_VERIFIER)
+ if (sd.var[varindex].type == TYPE_RET) {
+ exceptions_throw_verifyerror(m, "forbidden load of returnAddress");
+ return false;
+ }
+#endif
+ LOAD(type, varindex);
break;
/* pop 2 push 1 */
case ICMD_IINC:
STATISTICS_STACKDEPTH_DISTRIBUTION(count_store_depth);
-
- last_store_boundary[iptr->s1.varindex] = sd.new;
+ javaindex = iptr->s1.varindex;
+ last_store_boundary[javaindex] = sd.new;
iptr->s1.varindex =
- jd->local_map[iptr->s1.varindex * 5 + TYPE_INT];
+ jd->local_map[javaindex * 5 + TYPE_INT];
copy = curstack;
i = stackdepth - 1;
while (copy) {
if ((copy->varkind == LOCALVAR) &&
- (copy->varnum == iptr->s1.varindex))
+ (jd->reverselocalmap[copy->varnum] == javaindex))
{
assert(IS_LOCALVAR(copy));
SET_TEMPVAR(copy);
case ICMD_ASTORE:
REQUIRE(1);
- i = opcode - ICMD_ISTORE; /* type */
+ type = opcode - ICMD_ISTORE;
javaindex = iptr->dst.varindex;
- j = iptr->dst.varindex =
- jd->local_map[javaindex * 5 + i];
+ varindex = iptr->dst.varindex =
+ jd->local_map[javaindex * 5 + type];
+
+ COPY_VAL_AND_TYPE(sd, curstack->varnum, varindex);
+
+ iptr->sx.s23.s3.javaindex = javaindex;
+ if (curstack->type == TYPE_RET) {
+ iptr->flags.bits |= INS_FLAG_RETADDR;
+ iptr->sx.s23.s2.retaddrnr =
+ JAVALOCAL_FROM_RETADDR(sd.var[varindex].vv.retaddr->nr);
+ sd.javalocals[javaindex] = iptr->sx.s23.s2.retaddrnr;
+ }
+ else
+ sd.javalocals[javaindex] = varindex;
+
+ /* invalidate the following javalocal for 2-word types */
+
+ if (IS_2_WORD_TYPE(type)) {
+ sd.javalocals[javaindex+1] = UNUSED;
+ iptr->flags.bits |= INS_FLAG_KILL_NEXT;
+ }
+
+ /* invalidate 2-word types if second half was overwritten */
+
+ if (javaindex > 0 && (i = sd.javalocals[javaindex-1]) >= 0) {
+ if (IS_2_WORD_TYPE(sd.var[i].type)) {
+ sd.javalocals[javaindex-1] = UNUSED;
+ iptr->flags.bits |= INS_FLAG_KILL_PREV;
+ }
+ }
#if defined(ENABLE_STATISTICS)
if (opt_stat) {
count_store_depth[i]++;
}
#endif
+
/* check for conflicts as described in Figure 5.2 */
copy = curstack->prev;
i = stackdepth - 2;
while (copy) {
if ((copy->varkind == LOCALVAR) &&
- (copy->varnum == j))
+ (jd->reverselocalmap[copy->varnum] == javaindex))
{
- copy->varkind = TEMPVAR;
assert(IS_LOCALVAR(copy));
SET_TEMPVAR(copy);
}
/* if the variable is already coalesced, don't bother */
- if (IS_OUTVAR(curstack)
- || (curstack->varkind == LOCALVAR
- && curstack->varnum != j))
+ /* We do not need to check against INOUT, as invars */
+ /* are always before the coalescing boundary. */
+
+ if (curstack->varkind == LOCALVAR)
goto store_tail;
/* there is no STORE Lj while curstack is live */
if (curstack < coalescing_boundary)
goto assume_conflict;
- /* there is no DEF LOCALVAR(j) while curstack is live */
+ /* there is no DEF LOCALVAR(varindex) while curstack is live */
copy = sd.new; /* most recent stackslot created + 1 */
while (--copy > curstack) {
- if (copy->varkind == LOCALVAR && copy->varnum == j)
+ if (copy->varkind == LOCALVAR && jd->reverselocalmap[copy->varnum] == javaindex)
goto assume_conflict;
}
/* coalesce the temporary variable with Lj */
assert((curstack->varkind == TEMPVAR)
|| (curstack->varkind == UNDEFVAR));
- assert(!IS_LOCALVAR(curstack));
- assert(!IS_OUTVAR(curstack));
+ assert(!IS_LOCALVAR(curstack)); /* XXX correct? */
+ assert(!IS_INOUT(curstack));
assert(!IS_PREALLOC(curstack));
assert(curstack->creator);
assert(curstack->creator->dst.varindex == curstack->varnum);
+ assert(!(curstack->flags & PASSTHROUGH));
RELEASE_INDEX(sd, curstack);
curstack->varkind = LOCALVAR;
- curstack->varnum = j;
- curstack->creator->dst.varindex = j;
+ curstack->varnum = varindex;
+ curstack->creator->dst.varindex = varindex;
goto store_tail;
/* revert the coalescing, if it has been done earlier */
assume_conflict:
if ((curstack->varkind == LOCALVAR)
- && (curstack->varnum == j))
+ && (jd->reverselocalmap[curstack->varnum] == javaindex))
{
assert(IS_LOCALVAR(curstack));
SET_TEMPVAR(curstack);
store_tail:
last_store_boundary[javaindex] = sd.new;
- STORE(opcode - ICMD_ISTORE, j);
+ if (opcode == ICMD_ASTORE && curstack->type == TYPE_RET)
+ STORE(TYPE_RET, varindex);
+ else
+ STORE(opcode - ICMD_ISTORE, varindex);
break;
/* pop 3 push 0 */
COUNT(count_check_bound);
COUNT(count_pcmd_mem);
- bte = builtintable_get_internal(BUILTIN_canstore);
+ bte = builtintable_get_internal(BUILTIN_FAST_canstore);
md = bte->md;
if (md->memuse > rd->memuse)
case ICMD_DRETURN:
case ICMD_ARETURN:
coalescing_boundary = sd.new;
- IF_JIT( md_return_alloc(jd, curstack); )
+ /* Assert here that no LOCAL or INOUTS get */
+ /* preallocated, since tha macros are not */
+ /* available in md-abi.c! */
+ if (IS_TEMPVAR(curstack))
+ md_return_alloc(jd, curstack);
COUNT(count_pcmd_return);
OP1_0(opcode - ICMD_IRETURN);
superblockend = true;
+ sd.jd->returncount++;
+ sd.jd->returnblock = sd.bptr;
break;
case ICMD_ATHROW:
case ICMD_IFNONNULL:
COUNT(count_pcmd_bra);
OP1_BRANCH(TYPE_ADR);
- BRANCH(tbptr, copy);
+ BRANCH(tbptr);
break;
case ICMD_IFEQ:
OP1_BRANCH(TYPE_INT);
/* iptr->sx.val.i = 0; */
- BRANCH(tbptr, copy);
+ BRANCH(tbptr);
break;
/* pop 0 push 0 branch */
case ICMD_GOTO:
COUNT(count_pcmd_bra);
OP0_BRANCH;
- BRANCH(tbptr, copy);
+ BRANCH(tbptr);
superblockend = true;
break;
OP1_BRANCH(TYPE_INT);
table = iptr->dst.table;
- BRANCH_TARGET(*table, tbptr, copy);
+ BRANCH_TARGET(*table, tbptr);
table++;
i = iptr->sx.s23.s3.tablehigh
- iptr->sx.s23.s2.tablelow + 1;
while (--i >= 0) {
- BRANCH_TARGET(*table, tbptr, copy);
+ BRANCH_TARGET(*table, tbptr);
table++;
}
superblockend = true;
COUNT(count_pcmd_table);
OP1_BRANCH(TYPE_INT);
- BRANCH_TARGET(iptr->sx.s23.s3.lookupdefault, tbptr, copy);
+ BRANCH_TARGET(iptr->sx.s23.s3.lookupdefault, tbptr);
lookup = iptr->dst.lookup;
i = iptr->sx.s23.s2.lookupcount;
while (--i >= 0) {
- BRANCH_TARGET(lookup->target, tbptr, copy);
+ BRANCH_TARGET(lookup->target, tbptr);
lookup++;
}
superblockend = true;
case ICMD_IF_ICMPLE:
COUNT(count_pcmd_bra);
OP2_BRANCH(TYPE_INT, TYPE_INT);
- BRANCH(tbptr, copy);
+ BRANCH(tbptr);
break;
case ICMD_IF_ACMPEQ:
case ICMD_IF_ACMPNE:
COUNT(count_pcmd_bra);
OP2_BRANCH(TYPE_ADR, TYPE_ADR);
- BRANCH(tbptr, copy);
+ BRANCH(tbptr);
break;
/* pop 2 push 0 */
POPANY; POPANY;
stackdepth -= 2;
+ /* move non-temporary sources out of the way */
+ if (!IS_TEMPVAR(src2)) {
+ MOVE_TO_TEMP(src2); iptr++; len--;
+ }
+
DUP_SLOT(src2); dst1 = curstack; stackdepth++;
MOVE_UP(src1); iptr++; len--;
POPANY; POPANY; POPANY;
stackdepth -= 3;
+ /* move non-temporary sources out of the way */
+ if (!IS_TEMPVAR(src2)) {
+ MOVE_TO_TEMP(src2); iptr++; len--;
+ }
+ if (!IS_TEMPVAR(src3)) {
+ MOVE_TO_TEMP(src3); iptr++; len--;
+ }
+
DUP_SLOT(src2); dst1 = curstack; stackdepth++;
DUP_SLOT(src3); dst2 = curstack; stackdepth++;
POPANY; POPANY; POPANY;
stackdepth -= 3;
+ /* move non-temporary sources out of the way */
+ if (!IS_TEMPVAR(src2)) {
+ MOVE_TO_TEMP(src2); iptr++; len--;
+ }
+ if (!IS_TEMPVAR(src3)) {
+ MOVE_TO_TEMP(src3); iptr++; len--;
+ }
+
DUP_SLOT(src3); dst1 = curstack; stackdepth++;
MOVE_UP(src1); iptr++; len--;
POPANY; POPANY; POPANY; POPANY;
stackdepth -= 4;
+ /* move non-temporary sources out of the way */
+ if (!IS_TEMPVAR(src2)) {
+ MOVE_TO_TEMP(src2); iptr++; len--;
+ }
+ if (!IS_TEMPVAR(src3)) {
+ MOVE_TO_TEMP(src3); iptr++; len--;
+ }
+ if (!IS_TEMPVAR(src4)) {
+ MOVE_TO_TEMP(src4); iptr++; len--;
+ }
+
DUP_SLOT(src3); dst1 = curstack; stackdepth++;
DUP_SLOT(src4); dst2 = curstack; stackdepth++;
POPANY; POPANY;
stackdepth -= 2;
+ /* move non-temporary sources out of the way */
+ if (!IS_TEMPVAR(src1)) {
+ MOVE_TO_TEMP(src1); iptr++; len--;
+ }
+
MOVE_UP(src2); iptr++; len--;
MOVE_UP(src1);
case ICMD_IFEQ:
iptr->opc = ICMD_IF_LCMPEQ;
icmd_lcmp_if_tail:
- iptr->dst.insindex = iptr[1].dst.insindex;
+ iptr->dst.block = iptr[1].dst.block;
iptr[1].opc = ICMD_NOP;
OP2_BRANCH(TYPE_LNG, TYPE_LNG);
- BRANCH(tbptr, copy);
+ BRANCH(tbptr);
COUNT(count_pcmd_bra);
break;
OP2_1(TYPE_LNG, TYPE_LNG, TYPE_INT);
break;
- /* XXX why is this deactivated? */
-#if 0
- case ICMD_FCMPL:
- COUNT(count_pcmd_op);
- if ((len == 0) || (iptr[1].sx.val.i != 0))
- goto normal_FCMPL;
-
- switch (iptr[1].opc) {
- case ICMD_IFEQ:
- iptr->opc = ICMD_IF_FCMPEQ;
- icmd_if_fcmpl_tail:
- iptr->dst.insindex = iptr[1].dst.insindex;
- iptr[1].opc = ICMD_NOP;
-
- OP2_BRANCH(TYPE_FLT, TYPE_FLT);
- BRANCH(tbptr, copy);
-
- COUNT(count_pcmd_bra);
- break;
- case ICMD_IFNE:
- iptr->opc = ICMD_IF_FCMPNE;
- goto icmd_if_fcmpl_tail;
- case ICMD_IFLT:
- iptr->opc = ICMD_IF_FCMPL_LT;
- goto icmd_if_fcmpl_tail;
- case ICMD_IFGT:
- iptr->opc = ICMD_IF_FCMPL_GT;
- goto icmd_if_fcmpl_tail;
- case ICMD_IFLE:
- iptr->opc = ICMD_IF_FCMPL_LE;
- goto icmd_if_fcmpl_tail;
- case ICMD_IFGE:
- iptr->opc = ICMD_IF_FCMPL_GE;
- goto icmd_if_fcmpl_tail;
- default:
- goto normal_FCMPL;
- }
- break;
-
-normal_FCMPL:
- OPTT2_1(TYPE_FLT, TYPE_FLT, TYPE_INT);
- break;
-
- case ICMD_FCMPG:
- COUNT(count_pcmd_op);
- if ((len == 0) || (iptr[1].sx.val.i != 0))
- goto normal_FCMPG;
-
- switch (iptr[1].opc) {
- case ICMD_IFEQ:
- iptr->opc = ICMD_IF_FCMPEQ;
- icmd_if_fcmpg_tail:
- iptr->dst.insindex = iptr[1].dst.insindex;
- iptr[1].opc = ICMD_NOP;
-
- OP2_BRANCH(TYPE_FLT, TYPE_FLT);
- BRANCH(tbptr, copy);
-
- COUNT(count_pcmd_bra);
- break;
- case ICMD_IFNE:
- iptr->opc = ICMD_IF_FCMPNE;
- goto icmd_if_fcmpg_tail;
- case ICMD_IFLT:
- iptr->opc = ICMD_IF_FCMPG_LT;
- goto icmd_if_fcmpg_tail;
- case ICMD_IFGT:
- iptr->opc = ICMD_IF_FCMPG_GT;
- goto icmd_if_fcmpg_tail;
- case ICMD_IFLE:
- iptr->opc = ICMD_IF_FCMPG_LE;
- goto icmd_if_fcmpg_tail;
- case ICMD_IFGE:
- iptr->opc = ICMD_IF_FCMPG_GE;
- goto icmd_if_fcmpg_tail;
- default:
- goto normal_FCMPG;
- }
- break;
-
-normal_FCMPG:
- OP2_1(TYPE_FLT, TYPE_FLT, TYPE_INT);
- break;
-
- case ICMD_DCMPL:
- COUNT(count_pcmd_op);
- if ((len == 0) || (iptr[1].sx.val.i != 0))
- goto normal_DCMPL;
-
- switch (iptr[1].opc) {
- case ICMD_IFEQ:
- iptr->opc = ICMD_IF_DCMPEQ;
- icmd_if_dcmpl_tail:
- iptr->dst.insindex = iptr[1].dst.insindex;
- iptr[1].opc = ICMD_NOP;
-
- OP2_BRANCH(TYPE_DBL, TYPE_DBL);
- BRANCH(tbptr, copy);
-
- COUNT(count_pcmd_bra);
- break;
- case ICMD_IFNE:
- iptr->opc = ICMD_IF_DCMPNE;
- goto icmd_if_dcmpl_tail;
- case ICMD_IFLT:
- iptr->opc = ICMD_IF_DCMPL_LT;
- goto icmd_if_dcmpl_tail;
- case ICMD_IFGT:
- iptr->opc = ICMD_IF_DCMPL_GT;
- goto icmd_if_dcmpl_tail;
- case ICMD_IFLE:
- iptr->opc = ICMD_IF_DCMPL_LE;
- goto icmd_if_dcmpl_tail;
- case ICMD_IFGE:
- iptr->opc = ICMD_IF_DCMPL_GE;
- goto icmd_if_dcmpl_tail;
- default:
- goto normal_DCMPL;
- }
- break;
-
-normal_DCMPL:
- OPTT2_1(TYPE_DBL, TYPE_INT);
- break;
-
- case ICMD_DCMPG:
- COUNT(count_pcmd_op);
- if ((len == 0) || (iptr[1].sx.val.i != 0))
- goto normal_DCMPG;
-
- switch (iptr[1].opc) {
- case ICMD_IFEQ:
- iptr->opc = ICMD_IF_DCMPEQ;
- icmd_if_dcmpg_tail:
- iptr->dst.insindex = iptr[1].dst.insindex;
- iptr[1].opc = ICMD_NOP;
-
- OP2_BRANCH(TYPE_DBL, TYPE_DBL);
- BRANCH(tbptr, copy);
-
- COUNT(count_pcmd_bra);
- break;
- case ICMD_IFNE:
- iptr->opc = ICMD_IF_DCMPNE;
- goto icmd_if_dcmpg_tail;
- case ICMD_IFLT:
- iptr->opc = ICMD_IF_DCMPG_LT;
- goto icmd_if_dcmpg_tail;
- case ICMD_IFGT:
- iptr->opc = ICMD_IF_DCMPG_GT;
- goto icmd_if_dcmpg_tail;
- case ICMD_IFLE:
- iptr->opc = ICMD_IF_DCMPG_LE;
- goto icmd_if_dcmpg_tail;
- case ICMD_IFGE:
- iptr->opc = ICMD_IF_DCMPG_GE;
- goto icmd_if_dcmpg_tail;
- default:
- goto normal_DCMPG;
- }
- break;
-
-normal_DCMPG:
- OP2_1(TYPE_DBL, TYPE_DBL, TYPE_INT);
- break;
-#else
case ICMD_FCMPL:
case ICMD_FCMPG:
COUNT(count_pcmd_op);
COUNT(count_pcmd_op);
OP2_1(TYPE_DBL, TYPE_DBL, TYPE_INT);
break;
-#endif
/* pop 1 push 1 */
break;
case ICMD_JSR:
- OP0_1(TYPE_ADR);
-
- BRANCH_TARGET(iptr->sx.s23.s3.jsrtarget, tbptr, copy);
+ OP0_1(TYPE_RET);
+ tbptr = iptr->sx.s23.s3.jsrtarget.block;
tbptr->type = BBTYPE_SBR;
+ assert(sd.bptr->next); /* XXX exception */
+ sd.var[curstack->varnum].vv.retaddr = sd.bptr->next;
+#if defined(ENABLE_VERIFIER)
+ sd.var[curstack->varnum].SBRSTART = (void*) tbptr;
+#endif
+
+ tbptr = stack_mark_reached(&sd, tbptr, curstack, stackdepth);
+ if (tbptr == NULL)
+ return false;
+
+ iptr->sx.s23.s3.jsrtarget.block = tbptr;
+
/* We need to check for overflow right here because
* the pushed value is poped afterwards */
CHECKOVERFLOW;
- /* calculate stack after return */
- POPANY;
- stackdepth--;
+ superblockend = true;
+ /* XXX should not be marked as interface, as it does not need to be */
+ /* allocated. Same for the invar of the target. */
break;
/* pop many push any */
REQUIRE(i);
- /* XXX optimize for <= 2 args */
- /* XXX not for ICMD_BUILTIN */
iptr->s1.argcount = stackdepth;
iptr->sx.s23.s2.args = DMNEW(s4, stackdepth);
/* do not change STACKVARs or LOCALVARS to ARGVAR*/
/* -> won't help anyway */
- if (!(IS_OUTVAR(copy) || IS_LOCALVAR(copy))) {
+ if (!(IS_INOUT(copy) || IS_LOCALVAR(copy))) {
#if defined(SUPPORT_PASS_FLOATARGS_IN_INTREGS)
/* If we pass float arguments in integer argument registers, we
SET_PREALLOC(copy);
-#if defined(ENABLE_INTRP)
- if (!opt_intrp) {
-#endif
- if (md->params[i].inmemory) {
- sd.var[copy->varnum].vv.regoff =
- md->params[i].regoff;
- sd.var[copy->varnum].flags |=
- INMEMORY;
- }
- else {
- if (IS_FLT_DBL_TYPE(copy->type)) {
+ if (md->params[i].inmemory) {
+ sd.var[copy->varnum].vv.regoff =
+ md->params[i].regoff;
+ sd.var[copy->varnum].flags |=
+ INMEMORY;
+ }
+ else {
+ if (IS_FLT_DBL_TYPE(copy->type)) {
#if defined(SUPPORT_PASS_FLOATARGS_IN_INTREGS)
- assert(0); /* XXX is this assert ok? */
+ assert(0); /* XXX is this assert ok? */
#else
- sd.var[copy->varnum].vv.regoff =
- rd->argfltregs[md->params[i].regoff];
+ sd.var[copy->varnum].vv.regoff =
+ md->params[i].regoff;
#endif /* SUPPORT_PASS_FLOATARGS_IN_INTREGS */
- }
- else {
+ }
+ else {
#if defined(SUPPORT_COMBINE_INTEGER_REGISTERS)
- if (IS_2_WORD_TYPE(copy->type))
- sd.var[copy->varnum].vv.regoff =
- PACK_REGS( rd->argintregs[GET_LOW_REG(md->params[i].regoff)],
- rd->argintregs[GET_HIGH_REG(md->params[i].regoff)]);
+ if (IS_2_WORD_TYPE(copy->type))
+ sd.var[copy->varnum].vv.regoff =
+ PACK_REGS(GET_LOW_REG(md->params[i].regoff),
+ GET_HIGH_REG(md->params[i].regoff));
- else
+ else
#endif /* SUPPORT_COMBINE_INTEGER_REGISTERS */
- sd.var[copy->varnum].vv.regoff =
- rd->argintregs[md->params[i].regoff];
- }
+ sd.var[copy->varnum].vv.regoff =
+ md->params[i].regoff;
}
-#if defined(ENABLE_INTRP)
- } /* end if (!opt_intrp) */
-#endif
+ }
}
}
copy = copy->prev;
/* deal with live-through stack slots "under" the */
/* arguments */
- /* XXX not for ICMD_BUILTIN */
i = md->paramcount;
while (copy) {
- SET_TEMPVAR(copy);
iptr->sx.s23.s2.args[i++] = copy->varnum;
sd.var[copy->varnum].flags |= SAVEDVAR;
+ copy->flags |= SAVEDVAR | PASSTHROUGH;
copy = copy->prev;
}
}
break;
- case ICMD_INLINE_START:
- case ICMD_INLINE_END:
- CLR_S1;
- CLR_DST;
- break;
-
case ICMD_MULTIANEWARRAY:
coalescing_boundary = sd.new;
- if (rd->argintreguse < 3)
- rd->argintreguse = 3;
+ if (rd->argintreguse < MIN(3, INT_ARG_CNT))
+ rd->argintreguse = MIN(3, INT_ARG_CNT);
i = iptr->s1.argcount;
/* check INT type here? Currently typecheck does this. */
iptr->sx.s23.s2.args[i] = copy->varnum;
if (!(sd.var[copy->varnum].flags & SAVEDVAR)
- && (!IS_OUTVAR(copy))
+ && (!IS_INOUT(copy))
&& (!IS_LOCALVAR(copy)) ) {
copy->varkind = ARGVAR;
sd.var[copy->varnum].flags |=
}
while (copy) {
sd.var[copy->varnum].flags |= SAVEDVAR;
+ copy->flags |= SAVEDVAR;
copy = copy->prev;
}
break;
default:
- *exceptionptr =
- new_internalerror("Unknown ICMD %d", opcode);
+ exceptions_throw_internalerror("Unknown ICMD %d during stack analysis",
+ opcode);
return false;
} /* switch */
iptr++;
} /* while instructions */
+ /* show state after last instruction */
+
+#if defined(STACK_VERBOSE)
+ stack_verbose_show_state(&sd, NULL, curstack);
+#endif
+
/* stack slots at basic block end become interfaces */
sd.bptr->outdepth = stackdepth;
/* create an unresolvable conflict */
SET_TEMPVAR(copy);
+ type = copy->type;
v = sd.var + copy->varnum;
- v->flags |= OUTVAR;
+ v->flags |= INOUT;
- if (jd->interface_map[i*5 + copy->type].flags == UNUSED) {
- /* no interface var until now for this depth and */
- /* type */
- jd->interface_map[i*5 + copy->type].flags = v->flags;
- }
- else {
- jd->interface_map[i*5 + copy->type].flags |= v->flags;
+ /* do not allocate variables for returnAddresses */
+
+ if (type != TYPE_RET) {
+ if (jd->interface_map[i*5 + type].flags == UNUSED) {
+ /* no interface var until now for this depth and */
+ /* type */
+ jd->interface_map[i*5 + type].flags = v->flags;
+ }
+ else {
+ jd->interface_map[i*5 + type].flags |= v->flags;
+ }
}
sd.bptr->outvars[i] = copy->varnum;
}
/* check if interface slots at basic block begin must be saved */
- IF_NO_INTRP(
- for (i=0; i<sd.bptr->indepth; ++i) {
- varinfo *v = sd.var + sd.bptr->invars[i];
- if (jd->interface_map[i*5 + v->type].flags == UNUSED) {
+ for (i=0; i<sd.bptr->indepth; ++i) {
+ varinfo *v = sd.var + sd.bptr->invars[i];
+
+ type = v->type;
+
+ if (type != TYPE_RET) {
+ if (jd->interface_map[i*5 + type].flags == UNUSED) {
/* no interface var until now for this depth and */
/* type */
- jd->interface_map[i*5 + v->type].flags = v->flags;
+ jd->interface_map[i*5 + type].flags = v->flags;
}
else {
- jd->interface_map[i*5 + v->type].flags |= v->flags;
+ jd->interface_map[i*5 + type].flags |= v->flags;
}
}
- );
+ }
/* store the number of this block's variables */
sd.bptr->varcount = sd.vartop - sd.bptr->varstart;
#if defined(STACK_VERBOSE)
- printf("OUTVARS\n");
- /* XXX print something useful here */
- printf("\n");
+ stack_verbose_block_exit(&sd, superblockend);
#endif
+ /* reach the following block, if any */
+
+ if (!superblockend)
+ if (!stack_reach_next_block(&sd))
+ return false;
+
} /* for blocks */
- } while (repeat && !deadcode);
+ } while (sd.repeat && !deadcode);
+
+ /* reset locals of TYPE_RET|VOID to TYPE_ADR */
+
+ /* A local variable may be used as both a returnAddress and a reference */
+ /* type variable, as we do not split variables between these types when */
+ /* renaming locals. While returnAddresses have been eliminated now, we */
+ /* must assume that the variable is still used as TYPE_ADR. */
+ /* The only way that a local can be TYPE_VOID at this point, is that it */
+ /* was a TYPE_RET variable for which incompatible returnAddresses were */
+ /* merged. Thus we must treat TYPE_VOID in the same way as TYPE_RET */
+ /* here. */
+ /* XXX: It would be nice to remove otherwise unused returnAddress */
+ /* variables from the local variable array, so they are not */
+ /* allocated by simplereg. (For LSRA this is not needed). */
+
+ for (i=0; i<sd.localcount; ++i) {
+ if (sd.var[i].type == TYPE_RET || sd.var[i].type == TYPE_VOID)
+ sd.var[i].type = TYPE_ADR;
+ }
+
+ /* mark temporaries of TYPE_RET as PREALLOC to avoid allocation */
+
+ for (i=sd.localcount; i<sd.vartop; ++i) {
+ if (sd.var[i].type == TYPE_RET)
+ sd.var[i].flags |= PREALLOC;
+ }
+
+ /* XXX hack to fix up the ranges of the cloned single-block handlers */
+
+ ex = jd->exceptiontable;
+ for (; ex != NULL; ex = ex->down) {
+ if (ex->start == ex->end) {
+ assert(ex->end->next);
+ ex->end = ex->end->next;
+ }
+ }
+
+ /* store number of created variables */
+
+ jd->vartop = sd.vartop;
/* gather statistics *****************************************************/
#if defined(ENABLE_STATISTICS)
if (opt_stat) {
- if (jd->new_basicblockcount > count_max_basic_blocks)
- count_max_basic_blocks = jd->new_basicblockcount;
- count_basic_blocks += jd->new_basicblockcount;
- if (jd->new_instructioncount > count_max_javainstr)
- count_max_javainstr = jd->new_instructioncount;
- count_javainstr += jd->new_instructioncount;
- if (jd->new_stackcount > count_upper_bound_new_stack)
- count_upper_bound_new_stack = jd->new_stackcount;
- if ((sd.new - jd->new_stack) > count_max_new_stack)
- count_max_new_stack = (sd.new - jd->new_stack);
-
- b_count = jd->new_basicblockcount;
- sd.bptr = jd->new_basicblocks;
- while (--b_count >= 0) {
+ if (jd->basicblockcount > count_max_basic_blocks)
+ count_max_basic_blocks = jd->basicblockcount;
+ count_basic_blocks += jd->basicblockcount;
+ if (jd->instructioncount > count_max_javainstr)
+ count_max_javainstr = jd->instructioncount;
+ count_javainstr += jd->instructioncount;
+ if (jd->stackcount > count_upper_bound_new_stack)
+ count_upper_bound_new_stack = jd->stackcount;
+ if ((sd.new - jd->stack) > count_max_new_stack)
+ count_max_new_stack = (sd.new - jd->stack);
+
+ sd.bptr = jd->basicblocks;
+ for (; sd.bptr; sd.bptr = sd.bptr->next) {
if (sd.bptr->flags > BBREACHED) {
if (sd.bptr->indepth >= 10)
count_block_stack[10]++;
else
count_block_size_distribution[17]++;
}
- sd.bptr++;
}
if (iteration_count == 1)
else
count_analyse_iterations[4]++;
- if (jd->new_basicblockcount <= 5)
+ if (jd->basicblockcount <= 5)
count_method_bb_distribution[0]++;
- else if (jd->new_basicblockcount <= 10)
+ else if (jd->basicblockcount <= 10)
count_method_bb_distribution[1]++;
- else if (jd->new_basicblockcount <= 15)
+ else if (jd->basicblockcount <= 15)
count_method_bb_distribution[2]++;
- else if (jd->new_basicblockcount <= 20)
+ else if (jd->basicblockcount <= 20)
count_method_bb_distribution[3]++;
- else if (jd->new_basicblockcount <= 30)
+ else if (jd->basicblockcount <= 30)
count_method_bb_distribution[4]++;
- else if (jd->new_basicblockcount <= 40)
+ else if (jd->basicblockcount <= 40)
count_method_bb_distribution[5]++;
- else if (jd->new_basicblockcount <= 50)
+ else if (jd->basicblockcount <= 50)
count_method_bb_distribution[6]++;
- else if (jd->new_basicblockcount <= 75)
+ else if (jd->basicblockcount <= 75)
count_method_bb_distribution[7]++;
else
count_method_bb_distribution[8]++;
exceptions_throw_verifyerror(m, "Stack size too large");
return false;
-throw_stack_depth_error:
- exceptions_throw_verifyerror(m,"Stack depth mismatch");
- return false;
-
throw_stack_type_error:
exceptions_throw_verifyerror_for_stack(m, expectedtype);
return false;
}
+/* stack_javalocals_store ******************************************************
+
+ Model the effect of a ?STORE instruction upon the given javalocals array.
+
+ IN:
+ iptr.............the ?STORE instruction
+ javalocals.......the javalocals array to modify
+
+*******************************************************************************/
+
+void stack_javalocals_store(instruction *iptr, s4 *javalocals)
+{
+ s4 varindex; /* index into the jd->var array */
+ s4 javaindex; /* java local index */
+
+ varindex = iptr->dst.varindex;
+ javaindex = iptr->sx.s23.s3.javaindex;
+
+ if (javaindex != UNUSED) {
+ assert(javaindex >= 0);
+ if (iptr->flags.bits & INS_FLAG_RETADDR)
+ javalocals[javaindex] = iptr->sx.s23.s2.retaddrnr;
+ else
+ javalocals[javaindex] = varindex;
+
+ if (iptr->flags.bits & INS_FLAG_KILL_PREV)
+ javalocals[javaindex-1] = UNUSED;
+
+ if (iptr->flags.bits & INS_FLAG_KILL_NEXT)
+ javalocals[javaindex+1] = UNUSED;
+ }
+}
+
+
+/* functions for verbose stack analysis output ********************************/
+
+#if defined(STACK_VERBOSE)
+static void stack_verbose_show_varinfo(stackdata_t *sd, varinfo *v)
+{
+ printf("%c", show_jit_type_letters[v->type]);
+ if (v->type == TYPE_RET) {
+ printf("{L%03d}", v->vv.retaddr->nr);
+#if defined(ENABLE_VERIFIER)
+ printf("{start=L%03d}", ((basicblock *)v->SBRSTART)->nr);
+#endif
+ }
+}
+
+
+static void stack_verbose_show_variable(stackdata_t *sd, s4 index)
+{
+ assert(index >= 0 && index < sd->vartop);
+ stack_verbose_show_varinfo(sd, sd->var + index);
+}
+
+
+static void stack_verbose_show_block(stackdata_t *sd, basicblock *bptr)
+{
+ s4 i;
+
+ printf("L%03d type:%d in:%d [", bptr->nr, bptr->type, bptr->indepth);
+ if (bptr->invars) {
+ for (i=0; i<bptr->indepth; ++i) {
+ if (i)
+ putchar(' ');
+ stack_verbose_show_variable(sd, bptr->invars[i]);
+ }
+ }
+ else
+ putchar('-');
+ printf("] javalocals ");
+ show_javalocals_array(sd->jd, sd->javalocals, sd->maxlocals, SHOW_STACK);
+ printf(" inlocals [");
+ if (bptr->inlocals) {
+ for (i=0; i<sd->localcount; ++i) {
+ if (i)
+ putchar(' ');
+ stack_verbose_show_varinfo(sd, bptr->inlocals + i);
+ }
+ }
+ else
+ putchar('-');
+ printf("] out:%d [", bptr->outdepth);
+ if (bptr->outvars) {
+ for (i=0; i<bptr->outdepth; ++i) {
+ if (i)
+ putchar(' ');
+ stack_verbose_show_variable(sd, bptr->outvars[i]);
+ }
+ }
+ else
+ putchar('-');
+ printf("]");
+
+ if (bptr->original)
+ printf(" (clone of L%03d)", bptr->original->nr);
+ else {
+ basicblock *b = bptr->copied_to;
+ if (b) {
+ printf(" (copied to ");
+ for (; b; b = b->copied_to)
+ printf("L%03d ", b->nr);
+ printf(")");
+ }
+ }
+}
+
+
+static void stack_verbose_block_enter(stackdata_t *sd, bool reanalyse)
+{
+ int i;
+
+ printf("======================================== STACK %sANALYSE BLOCK ",
+ (reanalyse) ? ((sd->bptr->iinstr == NULL) ? "CLONE-" : "RE-") : "");
+ stack_verbose_show_block(sd, sd->bptr);
+ printf("\n");
+
+ if (sd->handlers[0]) {
+ printf("HANDLERS: ");
+ for (i=0; sd->handlers[i]; ++i) {
+ printf("L%03d ", sd->handlers[i]->handler->nr);
+ }
+ printf("\n");
+ }
+ printf("\n");
+}
+
+
+static void stack_verbose_block_exit(stackdata_t *sd, bool superblockend)
+{
+ printf("%s ", (superblockend) ? "SUPERBLOCKEND" : "END");
+ stack_verbose_show_block(sd, sd->bptr);
+ printf("\n");
+}
+
+static void stack_verbose_show_state(stackdata_t *sd, instruction *iptr, stackelement_t *curstack)
+{
+ stackelement_t *sp;
+ s4 i;
+ s4 depth;
+ varinfo *v;
+ stackelement_t **stack;
+
+ printf(" javalocals ");
+ show_javalocals_array(sd->jd, sd->javalocals, sd->maxlocals, SHOW_STACK);
+ printf(" stack [");
+
+ for(i = 0, sp = curstack; sp; sp = sp->prev)
+ i++;
+ depth = i;
+
+ stack = MNEW(stackelement_t *, depth);
+ for(sp = curstack; sp; sp = sp->prev)
+ stack[--i] = sp;
+
+ for(i=0; i<depth; ++i) {
+ if (i)
+ putchar(' ');
+ sp = stack[i];
+ v = &(sd->var[sp->varnum]);
+
+ if (v->flags & INOUT)
+ putchar('I');
+ if (v->flags & PREALLOC)
+ putchar('A');
+ printf("%d:%c", sp->varnum, show_jit_type_letters[sp->type]);
+ if (v->type == TYPE_RET) {
+ printf("(L%03d)", v->vv.retaddr->nr);
+ }
+ }
+ printf("] ... ");
+ if (iptr)
+ show_icmd(sd->jd, iptr, false, SHOW_PARSE);
+ printf("\n");
+}
+#endif
+
+
/*
* These are local overrides for various environment variables in Emacs.
* Please do not remove this and leave it at the end of the file, where