memory. All functions writing values into the data area return the offset
relative the begin of the code area (start of procedure).
- $Id: codegen-common.c 4921 2006-05-15 14:24:36Z twisti $
+ $Id: codegen-common.c 5404 2006-09-07 13:29:05Z christian $
*/
/* in this tree we store all method addresses *********************************/
-#if defined(__I386__) || defined(__X86_64__) || defined(ENABLE_INTRP) || defined(DISABLE_GC)
static avl_tree *methodtree = NULL;
static s4 methodtree_comparator(const void *pc, const void *element);
-#endif
/* codegen_init ****************************************************************
void codegen_init(void)
{
-#if defined(__I386__) || defined(__X86_64__) || defined(ENABLE_INTRP) || defined(DISABLE_GC)
/* this tree is global, not method specific */
if (!methodtree) {
avl_insert(methodtree, mte);
#endif /* defined(ENABLE_JIT) */
}
-#endif /* defined(__I386__) || defined(__X86_64__) || defined(ENABLE_INTRP) || defined(DISABLE_GC) */
}
cd->superstarts = NULL;
}
#endif
-
- cd->dsegtop = DMNEW(u1, DSEGINITSIZE);
- cd->dsegsize = DSEGINITSIZE;
- cd->dsegtop += cd->dsegsize;
- cd->dseglen = 0;
+
+ cd->dseg = NULL;
+ cd->dseglen = 0;
cd->jumpreferences = NULL;
*******************************************************************************/
-void codegen_add_classcastexception_ref(codegendata *cd)
+void codegen_add_classcastexception_ref(codegendata *cd, s4 reg)
{
- codegen_add_exception_ref(cd, -1, STACKTRACE_inline_classcastexception);
+ codegen_add_exception_ref(cd, reg, STACKTRACE_inline_classcastexception);
}
patchref *pr;
s4 branchpos;
- branchpos = (u1 *) cd->mcodeptr - cd->mcodebase;
+ branchpos = cd->mcodeptr - cd->mcodebase;
pr = DNEW(patchref);
the basic block code generation is completed, we check the
range and maybe generate some nop's. */
- cd->lastmcodeptr = ((u1 *) cd->mcodeptr) + PATCHER_CALL_SIZE;
+ cd->lastmcodeptr = cd->mcodeptr + PATCHER_CALL_SIZE;
#endif
}
-#if defined(__I386__) || defined(__X86_64__) || defined(ENABLE_INTRP) || defined(DISABLE_GC)
/* methodtree_comparator *******************************************************
- XXX
+ Comparator function used for the AVL tree of methods.
*******************************************************************************/
/* codegen_insertmethod ********************************************************
- XXX
+ Insert the machine code range of a method into the AVL tree of methods.
*******************************************************************************/
}
-/* codegen_findmethod **********************************************************
+/* codegen_get_pv_from_pc ******************************************************
- XXX
+ Find the PV for the given PC by searching in the AVL tree of
+ methods.
*******************************************************************************/
-u1 *codegen_findmethod(u1 *pc)
+u1 *codegen_get_pv_from_pc(u1 *pc)
{
methodtree_element mtepc;
methodtree_element *mte;
mte = avl_find(methodtree, &mtepc);
- if (!mte) {
- printf("Cannot find Java function at %p\n", (void *) (ptrint) pc);
- assert(0);
+ if (mte == NULL) {
+ /* No method was found. Let's dump a stacktrace. */
+
+ log_println("We received a SIGSEGV and tried to handle it, but we were");
+ log_println("unable to find a Java method at:");
+ log_println("");
+#if SIZEOF_VOID_P == 8
+ log_println("PC=0x%016lx", pc);
+#else
+ log_println("PC=0x%08x", pc);
+#endif
+ log_println("");
+ log_println("Dumping the current stacktrace:");
- throw_cacao_exception_exit(string_java_lang_InternalError,
- "Cannot find Java function at %p", pc);
+ stacktrace_dump_trace();
+
+ vm_abort("Exiting...");
}
return mte->startpc;
}
-#endif /* defined(__I386__) || defined(__X86_64__) || defined(ENABLE_INTRP) || defined(DISABLE_GC) */
+
+
+/* codegen_get_pv_from_pc_nocheck **********************************************
+
+ Find the PV for the given PC by searching in the AVL tree of
+ methods. This method does not check the return value and is used
+ by the profiler.
+
+*******************************************************************************/
+
+u1 *codegen_get_pv_from_pc_nocheck(u1 *pc)
+{
+ methodtree_element mtepc;
+ methodtree_element *mte;
+
+ /* allocation of the search structure on the stack is much faster */
+
+ mtepc.startpc = pc;
+ mtepc.endpc = pc;
+
+ mte = avl_find(methodtree, &mtepc);
+
+ if (mte == NULL)
+ return NULL;
+ else
+ return mte->startpc;
+}
/* codegen_finish **************************************************************
/* calculate the code length */
- mcodelen = (s4) ((u1 *) cd->mcodeptr - cd->mcodebase);
+ mcodelen = (s4) (cd->mcodeptr - cd->mcodebase);
#if defined(ENABLE_THREADS)
extralen = sizeof(critical_section_node_t) * cd->threadcritcount;
/* allocate new memory */
code->mcodelength = mcodelen + cd->dseglen;
- code->mcode = CNEW(u1, alignedlen + extralen);
-
- /* copy data and code to their new location */
-
- MCOPY((void *) code->mcode, cd->dsegtop - cd->dseglen, u1, cd->dseglen);
- MCOPY((void *) (code->mcode + cd->dseglen), cd->mcodebase, u1, mcodelen);
+ code->mcode = CNEW(u1, alignedlen + extralen);
/* set the entrypoint of the method */
assert(code->entrypoint == NULL);
code->entrypoint = epoint = (code->mcode + cd->dseglen);
+ /* fill the data segment (code->entrypoint must already be set!) */
+
+ dseg_finish(jd);
+
+ /* copy code to the new location */
+
+ MCOPY((void *) code->entrypoint, cd->mcodebase, u1, mcodelen);
+
#if defined(ENABLE_INTRP)
/* relocate native dynamic superinstruction code (if any) */
}
}
-#if defined(__I386__) || defined(__X86_64__) || defined(ENABLE_INTRP) || defined(DISABLE_GC)
/* add method into methodtree to find the entrypoint */
codegen_insertmethod(code->entrypoint, code->entrypoint + mcodelen);
-#endif
-
#if defined(__I386__) || defined(__X86_64__) || defined(__XDSPCORE__) || defined(ENABLE_INTRP)
/* resolve data segment references */
dseg_resolve_datareferences(jd);
#endif
-
#if defined(ENABLE_THREADS)
{
critical_section_node_t *n = (critical_section_node_t *) ((ptrint) code->mcode + alignedlen);
code = jd->code;
+ /* set the flags for the current JIT run */
+
+ if (opt_prof)
+ jd->flags |= JITDATA_FLAG_INSTRUMENT;
+
+ if (opt_verbosecall)
+ jd->flags |= JITDATA_FLAG_VERBOSECALL;
+
/* setup code generation stuff */
#if defined(ENABLE_JIT)
/* codegen_finish_native_call **************************************************
Removes the stuff required for a native (JNI) function call.
+ Additionally it checks for an exceptions and in case, get the
+ exception object and clear the pointer.
*******************************************************************************/
-void codegen_finish_native_call(u1 *datasp)
+java_objectheader *codegen_finish_native_call(u1 *datasp)
{
- stackframeinfo *sfi;
- stackframeinfo **psfi;
- localref_table *lrt;
- localref_table *plrt;
- s4 localframes;
+ stackframeinfo *sfi;
+ stackframeinfo **psfi;
+ localref_table *lrt;
+ localref_table *plrt;
+ s4 localframes;
+ java_objectheader *e;
/* get data structures from stack */
/* now store the previous local frames in the thread structure */
LOCALREFTABLE = lrt;
+
+ /* get the exception and return it */
+
+ e = exceptions_get_and_clear_exception();
+
+ return e;
}
*******************************************************************************/
-s4 codegen_reg_of_var(registerdata *rd, u2 opcode, stackptr v, s4 tempregnum)
+#if defined(NEW_VAR)
+s4 codegen_reg_of_var(u2 opcode, varinfo *v, s4 tempregnum)
+{
+
+#if 0
+ /* Do we have to generate a conditional move? Yes, then always
+ return the temporary register. The real register is identified
+ during the store. */
+
+ if (opcode & ICMD_CONDITION_MASK)
+ return tempregnum;
+#endif
+
+ if (!(v->flags & INMEMORY)) {
+#if defined(__ARM__) && defined(__ARMEL__)
+ if (IS_2_WORD_TYPE(v->type) && (GET_HIGH_REG(v->regoff) == REG_SPLIT))
+ return(PACK_REGS(GET_LOW_REG(var->regoff),
+ GET_HIGH_REG(tempregnum)));
+#endif
+#if defined(__ARM__) && defined(__ARMEB__)
+ if (IS_2_WORD_TYPE(v->type) && (GET_LOW_REG(v->regoff) == REG_SPLIT))
+ return(PACK_REGS(GET_LOW_REG(tempregnum),
+ GET_HIGH_REG(var->regoff)));
+#endif
+ return(v->regoff);
+ }
+
+#if defined(ENABLE_STATISTICS)
+ if (opt_stat)
+ count_spills_read++;
+#endif
+
+ return tempregnum;
+}
+#else
+s4 codegen_reg_of_var(u2 opcode, stackptr v, s4 tempregnum)
{
varinfo *var;
+#if 0
/* Do we have to generate a conditional move? Yes, then always
return the temporary register. The real register is identified
during the store. */
if (opcode & ICMD_CONDITION_MASK)
return tempregnum;
+#endif
switch (v->varkind) {
case TEMPVAR:
return tempregnum;
}
+#endif
+
+
+/* codegen_reg_of_dst **********************************************************
+
+ This function determines a register, to which the result of an
+ operation should go, when it is ultimatively intended to store the
+ result in iptr->dst.var. If dst.var is assigned to an actual
+ register, this register will be returned. Otherwise (when it is
+ spilled) this function returns tempregnum. If not already done,
+ regoff and flags are set in the stack location.
+
+ On ARM we have to check if a long/double variable is splitted
+ across reg/stack (HIGH_REG == REG_SPLIT). We return the actual
+ register of dst.var for LOW_REG and the tempregnum for HIGH_REG in such
+ cases. (michi 2005/07/24)
+
+*******************************************************************************/
+
+s4 codegen_reg_of_dst(jitdata *jd, instruction *iptr, s4 tempregnum)
+{
+ varinfo *v = &jd->var[iptr->dst.varindex];
+
+ if (!(v->flags & INMEMORY)) {
+
+#if defined(__ARM__) && defined(__ARMEL__)
+ if (IS_2_WORD_TYPE(v->type) && (GET_HIGH_REG(v->regoff) == REG_SPLIT))
+ return(PACK_REGS(GET_LOW_REG(v->regoff),
+ GET_HIGH_REG(tempregnum)));
+#endif
+#if defined(__ARM__) && defined(__ARMEB__)
+ if (IS_2_WORD_TYPE(v->type) && (GET_LOW_REG(v->regoff) == REG_SPLIT))
+ return(PACK_REGS(GET_LOW_REG(tempregnum),
+ GET_HIGH_REG(v->regoff)));
+#endif
+ return (v->regoff);
+ }
+
+#if defined(ENABLE_STATISTICS)
+ if (opt_stat)
+ count_spills_read++;
+#endif
+
+ /* Not necessary anymore - either v is inmemory or not. Setting again */
+ /* won't change anything */
+ v->flags |= INMEMORY;
+
+ return tempregnum;
+}
#if defined(ENABLE_THREADS)