memory. All functions writing values into the data area return the offset
relative the begin of the code area (start of procedure).
- $Id: codegen-common.c 7300 2007-02-07 22:06:53Z pm $
+ $Id: codegen-common.c 7797 2007-04-23 20:12:39Z michi $
*/
#include "native/jni.h"
#include "native/native.h"
-#if defined(ENABLE_THREADS)
-# include "threads/native/threads.h"
-#endif
+#include "threads/threads-common.h"
+#include "vm/builtin.h"
#include "vm/exceptions.h"
#include "vm/stringlocal.h"
+#include "vm/jit/abi.h"
#include "vm/jit/asmpart.h"
#include "vm/jit/codegen-common.h"
#endif
#include "vm/jit/dseg.h"
+#include "vm/jit/emit-common.h"
#include "vm/jit/jit.h"
#include "vm/jit/md.h"
-#include "vm/jit/stacktrace.h"
#include "vm/jit/replace.h"
+#include "vm/jit/stacktrace.h"
#if defined(ENABLE_INTRP)
#include "vm/jit/intrp/intrp.h"
#include "vmcore/method.h"
#include "vmcore/options.h"
-#if defined(ENABLE_STATISTICS)
# include "vmcore/statistics.h"
+
+#if defined(ENABLE_VMLOG)
+#include <vmlog_cacao.h>
#endif
mte = NEW(methodtree_element);
mte->startpc = (u1 *) (ptrint) asm_vm_call_method;
- mte->endpc = (u1 *) ((ptrint) asm_call_jit_compiler - 1);
+ mte->endpc = (u1 *) (ptrint) asm_vm_call_method_end;
avl_insert(methodtree, mte);
#endif /* defined(ENABLE_JIT) */
m = jd->m;
cd = jd->cd;
- cd->mcodebase = DMNEW(u1, MCODEINITSIZE);
- cd->mcodeend = cd->mcodebase + MCODEINITSIZE;
- cd->mcodesize = MCODEINITSIZE;
+ /* initialize members */
+
+ cd->flags = 0;
+
+ cd->mcodebase = DMNEW(u1, MCODEINITSIZE);
+ cd->mcodeend = cd->mcodebase + MCODEINITSIZE;
+ cd->mcodesize = MCODEINITSIZE;
/* initialize mcode variables */
cd->jumpreferences = NULL;
-#if defined(__I386__) || defined(__X86_64__) || defined(__XDSPCORE__) || defined(ENABLE_INTRP)
+#if defined(__I386__) || defined(__X86_64__) || defined(__XDSPCORE__) || defined(__M68K__) || defined(ENABLE_INTRP)
cd->datareferences = NULL;
#endif
- cd->exceptionrefs = NULL;
/* cd->patchrefs = list_create_dump(OFFSET(patchref, linkage)); */
cd->patchrefs = NULL;
+ cd->brancheslabel = list_create_dump(OFFSET(branch_label_ref_t, linkage));
cd->linenumberreferences = NULL;
cd->linenumbertablesizepos = 0;
cd->linenumbertablestartpos = 0;
cd->linenumbertab = 0;
- cd->method = m;
+#if defined(ENABLE_THREADS)
+ cd->threadcritcurrent.next = NULL;
+ cd->threadcritcount = 0;
+#endif
+}
+
+
+/* codegen_reset ***************************************************************
+
+ Resets the codegen data structure so we can recompile the method.
+
+*******************************************************************************/
+
+static void codegen_reset(jitdata *jd)
+{
+ codeinfo *code;
+ codegendata *cd;
+ basicblock *bptr;
+
+ /* get required compiler data */
+
+ code = jd->code;
+ cd = jd->cd;
+
+ /* reset error flag */
+
+ cd->flags &= ~CODEGENDATA_FLAG_ERROR;
+
+ /* reset some members, we reuse the code memory already allocated
+ as this should have almost the correct size */
- cd->maxstack = m->maxstack;
+ cd->mcodeptr = cd->mcodebase;
+ cd->lastmcodeptr = cd->mcodebase;
+ cd->dseg = NULL;
+ cd->dseglen = 0;
+
+ cd->jumpreferences = NULL;
+
+#if defined(__I386__) || defined(__X86_64__) || defined(__XDSPCORE__) || defined(__M68K__) || defined(ENABLE_INTRP)
+ cd->datareferences = NULL;
+#endif
+
+/* cd->patchrefs = list_create_dump(OFFSET(patchref, linkage)); */
+ cd->patchrefs = NULL;
+ cd->brancheslabel = list_create_dump(OFFSET(branch_label_ref_t, linkage));
+
+ cd->linenumberreferences = NULL;
+ cd->linenumbertablesizepos = 0;
+ cd->linenumbertablestartpos = 0;
+ cd->linenumbertab = 0;
+
#if defined(ENABLE_THREADS)
cd->threadcritcurrent.next = NULL;
- cd->threadcritcount = 0;
+ cd->threadcritcount = 0;
+#endif
+
+ /* We need to clear the mpc and the branch references from all
+ basic blocks as they will definitely change. */
+
+ for (bptr = jd->basicblocks; bptr != NULL; bptr = bptr->next) {
+ bptr->mpc = -1;
+ bptr->branchrefs = NULL;
+ }
+
+#if defined(ENABLE_REPLACEMENT)
+ code->rplpoints = NULL;
+ code->rplpointcount = 0;
+ code->regalloc = NULL;
+ code->regalloccount = 0;
+ code->globalcount = 0;
+#endif
+}
+
+
+/* codegen_generate ************************************************************
+
+ Generates the code for the currently compiled method.
+
+*******************************************************************************/
+
+bool codegen_generate(jitdata *jd)
+{
+ codegendata *cd;
+
+ /* get required compiler data */
+
+ cd = jd->cd;
+
+ /* call the machine-dependent code generation function */
+
+ if (!codegen_emit(jd))
+ return false;
+
+ /* check for an error */
+
+ if (CODEGENDATA_HAS_FLAG_ERROR(cd)) {
+ /* check for long-branches flag, if it is set we recompile the
+ method */
+
+#if !defined(NDEBUG)
+ if (compileverbose)
+ log_message_method("Re-generating code: ", jd->m);
+#endif
+
+ /* XXX maybe we should tag long-branches-methods for recompilation */
+
+ if (CODEGENDATA_HAS_FLAG_LONGBRANCHES(cd)) {
+ /* we have to reset the codegendata structure first */
+
+ codegen_reset(jd);
+
+ /* and restart the compiler run */
+
+ if (!codegen_emit(jd))
+ return false;
+ }
+ else {
+ vm_abort("codegen_generate: unknown error occurred during codegen_emit: flags=%x\n", cd->flags);
+ }
+
+#if !defined(NDEBUG)
+ if (compileverbose)
+ log_message_method("Re-generating code done: ", jd->m);
#endif
+ }
+
+ /* reallocate the memory and finish the code generation */
+
+ codegen_finish(jd);
+
+ /* everything's ok */
+
+ return true;
}
*******************************************************************************/
-void codegen_add_branch_ref(codegendata *cd, basicblock *target)
+void codegen_add_branch_ref(codegendata *cd, basicblock *target, s4 condition, s4 reg, u4 options)
{
- s4 branchmpc;
+ branchref *br;
+ s4 branchmpc;
+
+ STATISTICS(count_branches_unresolved++);
/* calculate the mpc of the branch instruction */
branchmpc = cd->mcodeptr - cd->mcodebase;
-#if defined(ENABLE_JIT)
- /* Check if the target basicblock has already a start pc, so the
- jump is backward and we can resolve it immediately. */
-
- if ((target->mpc >= 0)
-# if defined(ENABLE_INTRP)
- /* The interpreter uses absolute branches, so we do branch
- resolving after the code and data segment move. */
-
- && !opt_intrp
-# endif
- )
- {
- md_codegen_patch_branch(cd, branchmpc, target->mpc);
- }
- else
-#endif
- {
- branchref *br = DNEW(branchref);
+ br = DNEW(branchref);
- br->branchpos = branchmpc;
- br->next = target->branchrefs;
+ br->branchmpc = branchmpc;
+ br->condition = condition;
+ br->reg = reg;
+ br->options = options;
+ br->next = target->branchrefs;
- target->branchrefs = br;
- }
+ target->branchrefs = br;
}
void codegen_resolve_branchrefs(codegendata *cd, basicblock *bptr)
{
branchref *br;
- s4 branchmpc;
- s4 targetmpc;
-
- /* set target */
-
- targetmpc = bptr->mpc;
-
- for (br = bptr->branchrefs; br != NULL; br = br->next) {
- branchmpc = br->branchpos;
-
- md_codegen_patch_branch(cd, branchmpc, targetmpc);
- }
-}
-
-
-/* codegen_add_exception_ref ***************************************************
-
- Prepends an exception branch to the list.
-
-*******************************************************************************/
-
-static void codegen_add_exception_ref(codegendata *cd, s4 reg,
- functionptr function)
-{
- s4 branchmpc;
- exceptionref *er;
-
- branchmpc = cd->mcodeptr - cd->mcodebase;
-
- er = DNEW(exceptionref);
-
- er->branchpos = branchmpc;
- er->reg = reg;
- er->function = function;
-
- er->next = cd->exceptionrefs;
-
- cd->exceptionrefs = er;
-}
-
-
-/* codegen_add_arithmeticexception_ref *****************************************
-
- Adds an ArithmeticException branch to the list.
-
-*******************************************************************************/
+ u1 *mcodeptr;
-void codegen_add_arithmeticexception_ref(codegendata *cd)
-{
- codegen_add_exception_ref(cd, -1, STACKTRACE_inline_arithmeticexception);
-}
+ /* Save the mcodeptr because in the branch emitting functions
+ we generate code somewhere inside already generated code,
+ but we're still in the actual code generation phase. */
+ mcodeptr = cd->mcodeptr;
-/* codegen_add_arrayindexoutofboundsexception_ref ******************************
+ /* just to make sure */
- Adds an ArrayIndexOutOfBoundsException branch to the list.
+ assert(bptr->mpc >= 0);
-*******************************************************************************/
-
-void codegen_add_arrayindexoutofboundsexception_ref(codegendata *cd, s4 reg)
-{
- codegen_add_exception_ref(cd, reg,
- STACKTRACE_inline_arrayindexoutofboundsexception);
-}
+ for (br = bptr->branchrefs; br != NULL; br = br->next) {
+ /* temporary set the mcodeptr */
+ cd->mcodeptr = cd->mcodebase + br->branchmpc;
-/* codegen_add_arraystoreexception_ref *****************************************
+ /* emit_bccz and emit_branch emit the correct code, even if we
+ pass condition == BRANCH_UNCONDITIONAL or reg == -1. */
- Adds an ArrayStoreException branch to the list.
+ emit_bccz(cd, bptr, br->condition, br->reg, br->options);
+ }
-*******************************************************************************/
+ /* restore mcodeptr */
-void codegen_add_arraystoreexception_ref(codegendata *cd)
-{
- codegen_add_exception_ref(cd, -1, STACKTRACE_inline_arraystoreexception);
+ cd->mcodeptr = mcodeptr;
}
-/* codegen_add_classcastexception_ref ******************************************
+/* codegen_branch_label_add ****************************************************
- Adds an ClassCastException branch to the list.
+ Append an branch to the label-branch list.
*******************************************************************************/
-void codegen_add_classcastexception_ref(codegendata *cd, s4 reg)
+void codegen_branch_label_add(codegendata *cd, s4 label, s4 condition, s4 reg, u4 options)
{
- codegen_add_exception_ref(cd, reg, STACKTRACE_inline_classcastexception);
-}
-
-
-/* codegen_add_nullpointerexception_ref ****************************************
+ list_t *list;
+ branch_label_ref_t *br;
+ s4 mpc;
- Adds an NullPointerException branch to the list.
-
-*******************************************************************************/
+ /* get the label list */
-void codegen_add_nullpointerexception_ref(codegendata *cd)
-{
- codegen_add_exception_ref(cd, -1, STACKTRACE_inline_nullpointerexception);
-}
+ list = cd->brancheslabel;
+
+ /* calculate the current mpc */
+ mpc = cd->mcodeptr - cd->mcodebase;
-/* codegen_add_fillinstacktrace_ref ********************************************
+ br = DNEW(branch_label_ref_t);
- Adds a fillInStackTrace branch to the list.
+ br->mpc = mpc;
+ br->label = label;
+ br->condition = condition;
+ br->reg = reg;
+ br->options = options;
-*******************************************************************************/
+ /* add the branch to the list */
-void codegen_add_fillinstacktrace_ref(codegendata *cd)
-{
- codegen_add_exception_ref(cd, -1, STACKTRACE_inline_fillInStackTrace);
+ list_add_last_unsynced(list, br);
}
pr->next = cd->patchrefs;
cd->patchrefs = pr;
-#if defined(ENABLE_JIT) && (defined(__ALPHA__) || defined(__MIPS__) || defined(__POWERPC__) || defined(__X86_64__) || defined(__S390__))
/* Generate NOPs for opt_shownops. */
if (opt_shownops)
PATCHER_NOPS;
-#endif
#if defined(ENABLE_JIT) && (defined(__I386__) || defined(__MIPS__) || defined(__X86_64__))
/* On some architectures the patcher stub call instruction might
otherwise the avl_probe sometimes thinks the element is already in the
tree */
- if ((long) mte->startpc <= (long) mtepc->startpc &&
- (long) mtepc->startpc <= (long) mte->endpc &&
- (long) mte->startpc <= (long) mtepc->endpc &&
- (long) mtepc->endpc <= (long) mte->endpc) {
+#ifdef __S390__
+ /* On S390 addresses are 31 bit. Compare only 31 bits of value.
+ */
+# define ADDR_MASK(a) ((a) & 0x7FFFFFFF)
+#else
+# define ADDR_MASK(a) (a)
+#endif
+
+ if (ADDR_MASK((long) mte->startpc) <= ADDR_MASK((long) mtepc->startpc) &&
+ ADDR_MASK((long) mtepc->startpc) <= ADDR_MASK((long) mte->endpc) &&
+ ADDR_MASK((long) mte->startpc) <= ADDR_MASK((long) mtepc->endpc) &&
+ ADDR_MASK((long) mtepc->endpc) <= ADDR_MASK((long) mte->endpc)) {
return 0;
- } else if ((long) mtepc->startpc < (long) mte->startpc) {
+ } else if (ADDR_MASK((long) mtepc->startpc) < ADDR_MASK((long) mte->startpc)) {
return -1;
} else {
return 1;
}
+
+# undef ADDR_MASK
}
if (mte == NULL) {
/* No method was found. Let's dump a stacktrace. */
+#if defined(ENABLE_VMLOG)
+ vmlog_cacao_signl("SIGSEGV");
+#endif
+
log_println("We received a SIGSEGV and tried to handle it, but we were");
log_println("unable to find a Java method at:");
log_println("");
log_println("");
log_println("Dumping the current stacktrace:");
- stacktrace_dump_trace(THREADOBJECT);
+#if defined(ENABLE_THREADS)
+ /* XXX michi: This should be available even without threads! */
+ threads_print_stacktrace();
+#endif
vm_abort("Exiting...");
}
codegen_insertmethod(code->entrypoint, code->entrypoint + mcodelen);
-#if defined(__I386__) || defined(__X86_64__) || defined(__XDSPCORE__) || defined(ENABLE_INTRP)
+#if defined(__I386__) || defined(__X86_64__) || defined(__XDSPCORE__) || defined(__M68K__) || defined(ENABLE_INTRP)
/* resolve data segment references */
dseg_resolve_datareferences(jd);
}
-/* codegen_createnativestub ****************************************************
+/* codegen_generate_stub_compiler **********************************************
+
+ Wrapper for codegen_emit_stub_compiler.
+
+ Returns:
+ pointer to the compiler stub code.
+
+*******************************************************************************/
+
+u1 *codegen_generate_stub_compiler(methodinfo *m)
+{
+ jitdata *jd;
+ codegendata *cd;
+ ptrint *d; /* pointer to data memory */
+ u1 *c; /* pointer to code memory */
+ s4 dumpsize;
+
+ /* mark dump memory */
+
+ dumpsize = dump_size();
+
+ /* allocate required data structures */
+
+ jd = DNEW(jitdata);
+
+ jd->m = m;
+ jd->cd = DNEW(codegendata);
+ jd->flags = 0;
+
+ /* get required compiler data */
+
+ cd = jd->cd;
+
+ /* allocate code memory */
+
+ c = CNEW(u1, 3 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
+
+ /* set pointers correctly */
+
+ d = (ptrint *) c;
+
+ cd->mcodebase = c;
+
+ c = c + 3 * SIZEOF_VOID_P;
+ cd->mcodeptr = c;
+
+ /* NOTE: The codeinfo pointer is actually a pointer to the
+ methodinfo (this fakes a codeinfo structure). */
+
+ d[0] = (ptrint) asm_call_jit_compiler;
+ d[1] = (ptrint) m;
+ d[2] = (ptrint) &d[1]; /* fake code->m */
+
+ /* call the emit function */
+
+ codegen_emit_stub_compiler(jd);
+
+#if defined(ENABLE_STATISTICS)
+ if (opt_stat)
+ count_cstub_len += 3 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE;
+#endif
+
+ /* flush caches */
+
+ md_cacheflush(cd->mcodebase, 3 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
+
+ /* release dump memory */
+
+ dump_release(dumpsize);
+
+ /* return native stub code */
+
+ return c;
+}
+
+
+/* codegen_generate_stub_builtin ***********************************************
+
+ Wrapper for codegen_emit_stub_builtin.
+
+ Returns:
+ Pointer to the entrypoint of the stub.
+
+*******************************************************************************/
+
+void codegen_generate_stub_builtin(builtintable_entry *bte)
+{
+ jitdata *jd;
+ codeinfo *code;
+ s4 dumpsize;
+
+ /* mark dump memory */
+
+ dumpsize = dump_size();
+
+ jd = DNEW(jitdata);
+
+ jd->m = NULL;
+ jd->cd = DNEW(codegendata);
+ jd->rd = NULL;
+ jd->flags = 0;
+
+ /* Allocate codeinfo memory from the heap as we need to keep them. */
+
+ jd->code = code_codeinfo_new(NULL);
+
+ /* get required compiler data */
+
+ code = jd->code;
+
+ /* setup code generation stuff */
+
+ codegen_setup(jd);
+
+ /* generate the code */
+
+#if defined(ENABLE_JIT)
+# if defined(ENABLE_INTRP)
+ if (!opt_intrp)
+# endif
+ codegen_emit_stub_builtin(jd, bte);
+#endif
+
+ /* reallocate the memory and finish the code generation */
+
+ codegen_finish(jd);
+
+ /* set the stub entry point in the builtin table */
+
+ bte->stub = code->entrypoint;
+
+#if defined(ENABLE_STATISTICS)
+ if (opt_stat)
+ count_nstub_len += code->mcodelength;
+#endif
+
+ /* release memory */
+
+ dump_release(dumpsize);
+}
+
+
+/* codegen_generate_stub_native ************************************************
- Wrapper for createnativestub.
+ Wrapper for codegen_emit_stub_native.
Returns:
the codeinfo representing the stub code.
*******************************************************************************/
-codeinfo *codegen_createnativestub(functionptr f, methodinfo *m)
+codeinfo *codegen_generate_stub_native(methodinfo *m, functionptr f)
{
jitdata *jd;
codeinfo *code;
# if defined(ENABLE_INTRP)
if (!opt_intrp)
# endif
- md_param_alloc(nmd);
+ /* pre-allocate the arguments for the native ABI */
+
+ md_param_alloc_native(nmd);
#endif
/* generate the code */
#if defined(ENABLE_JIT)
# if defined(ENABLE_INTRP)
if (opt_intrp)
- code->entrypoint = intrp_createnativestub(f, jd, nmd);
+ intrp_createnativestub(f, jd, nmd);
else
# endif
- code->entrypoint = createnativestub(f, jd, nmd);
+ codegen_emit_stub_native(jd, nmd, f);
#else
- code->entrypoint = intrp_createnativestub(f, jd, nmd);
+ intrp_createnativestub(f, jd, nmd);
#endif
+ /* reallocate the memory and finish the code generation */
+
+ codegen_finish(jd);
+
#if defined(ENABLE_STATISTICS)
if (opt_stat)
count_nstub_len += code->mcodelength;
#endif
+/* codegen_stub_builtin_enter **************************************************
+
+ Prepares the stuff required for a builtin function call:
+
+ - adds a stackframe info structure to the chain, for stacktraces
+
+ The layout of the builtin stub stackframe should look like this:
+
+ +---------------------------+ <- SP (of parent Java function)
+ | return address |
+ +---------------------------+
+ | |
+ | stackframe info structure |
+ | |
+ +---------------------------+
+ | |
+ | arguments (if any) |
+ | |
+ +---------------------------+ <- SP (native stub)
+
+*******************************************************************************/
+
+void codegen_stub_builtin_enter(u1 *datasp, u1 *pv, u1 *sp, u1 *ra)
+{
+ stackframeinfo *sfi;
+
+ /* get data structures from stack */
+
+ sfi = (stackframeinfo *) (datasp - sizeof(stackframeinfo));
+
+ /* add a stackframeinfo to the chain */
+
+ stacktrace_create_native_stackframeinfo(sfi, pv, sp, ra);
+
+#if defined(ENABLE_THREADS) && defined(ENABLE_GC_CACAO)
+ /* set the native world flag */
+
+ THREADOBJECT->flags |= THREAD_FLAG_IN_NATIVE;
+#endif
+}
+
+
+/* codegen_stub_builtin_exit ***************************************************
+
+ Removes the stuff required for a builtin function call.
+
+*******************************************************************************/
+
+void codegen_stub_builtin_exit(u1 *datasp)
+{
+ stackframeinfo *sfi;
+ stackframeinfo **psfi;
+
+ /* get data structures from stack */
+
+ sfi = (stackframeinfo *) (datasp - sizeof(stackframeinfo));
+
+ /* remove current stackframeinfo from chain */
+
+ psfi = &STACKFRAMEINFO;
+
+ *psfi = sfi->prev;
+
+#if defined(ENABLE_THREADS) && defined(ENABLE_GC_CACAO)
+ /* clear the native world flag */
+
+ THREADOBJECT->flags &= ~THREAD_FLAG_IN_NATIVE;
+#endif
+}
+
+
/* codegen_start_native_call ***************************************************
Prepares the stuff required for a native (JNI) function call:
LOCALREFTABLE = lrt;
#endif
+
+#if defined(ENABLE_THREADS) && defined(ENABLE_GC_CACAO)
+ /* set the native world flag */
+
+ THREADOBJECT->flags |= THREAD_FLAG_IN_NATIVE;
+#endif
}
{
stackframeinfo *sfi;
stackframeinfo **psfi;
+#if defined(ENABLE_JAVASE)
localref_table *lrt;
localref_table *plrt;
s4 localframes;
+#endif
java_objectheader *e;
/* get data structures from stack */
sfi = (stackframeinfo *) (datasp - sizeof(stackframeinfo));
- lrt = (localref_table *) (datasp - sizeof(stackframeinfo) -
- sizeof(localref_table));
+
+#if defined(ENABLE_THREADS) && defined(ENABLE_GC_CACAO)
+ /* clear the native world flag */
+
+ THREADOBJECT->flags &= ~THREAD_FLAG_IN_NATIVE;
+#endif
/* remove current stackframeinfo from chain */
- psfi = STACKFRAMEINFO;
+ psfi = &STACKFRAMEINFO;
*psfi = sfi->prev;