/* src/vm/jit/codegen-common.c - architecture independent code generator stuff
- Copyright (C) 1996-2005, 2006, 2007 R. Grafl, A. Krall, C. Kruegel,
- C. Oates, R. Obermaisser, M. Platter, M. Probst, S. Ring,
- E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich,
- J. Wenninger, Institut f. Computersprachen - TU Wien
+ Copyright (C) 1996-2005, 2006, 2007, 2008
+ CACAOVM - Verein zur Foerderung der freien virtuellen Maschine CACAO
This file is part of CACAO.
#include "vm/types.h"
-#if defined(ENABLE_JIT)
-/* this is required PATCHER_CALL_SIZE */
-# include "codegen.h"
-#endif
-
+#include "codegen.h"
+#include "md.h"
#include "md-abi.h"
#include "mm/memory.h"
#include "native/localref.h"
#include "native/native.h"
+#if defined(WITH_JAVA_RUNTIME_LIBRARY_OPENJDK)
+# include "native/include/java_lang_Object.h"
+# include "native/include/java_lang_String.h" /* required by j.l.CL */
+# include "native/include/java_nio_ByteBuffer.h" /* required by j.l.CL */
+# include "native/include/java_lang_ClassLoader.h"
+#endif
+
+#if defined(WITH_JAVA_RUNTIME_LIBRARY_CLDC1_1)
+# include "native/include/java_lang_String.h"
+#endif
+
#include "native/include/java_lang_Class.h"
-#include "threads/threads-common.h"
+#include "threads/thread.h"
#include "vm/builtin.h"
#include "vm/exceptions.h"
#include "vm/jit/abi.h"
#include "vm/jit/asmpart.h"
+#include "vm/jit/code.h"
#include "vm/jit/codegen-common.h"
#if defined(ENABLE_DISASSEMBLER)
#include "vm/jit/dseg.h"
#include "vm/jit/emit-common.h"
#include "vm/jit/jit.h"
-#include "vm/jit/md.h"
+#include "vm/jit/linenumbertable.h"
#include "vm/jit/methodheader.h"
+#include "vm/jit/methodtree.h"
#include "vm/jit/patcher-common.h"
#include "vm/jit/replace.h"
#if defined(ENABLE_SSA)
#include "show.h"
-/* in this tree we store all method addresses *********************************/
-
-static avl_tree_t *methodtree = NULL;
-static s4 methodtree_comparator(const void *treenode, const void *node);
-
/* codegen_init ****************************************************************
void codegen_init(void)
{
- /* this tree is global, not method specific */
-
- if (!methodtree) {
-#if defined(ENABLE_JIT)
- methodtree_element *mte;
-#endif
-
- methodtree = avl_create(&methodtree_comparator);
-
-#if defined(ENABLE_JIT)
- /* insert asm_vm_call_method */
-
- mte = NEW(methodtree_element);
-
- mte->startpc = (u1 *) (ptrint) asm_vm_call_method;
- mte->endpc = (u1 *) (ptrint) asm_vm_call_method_end;
-
- avl_insert(methodtree, mte);
-#endif /* defined(ENABLE_JIT) */
-
- }
-
}
cd->datareferences = NULL;
#endif
-/* cd->patchrefs = list_create_dump(OFFSET(patchref, linkage)); */
- cd->patchrefs = NULL;
cd->brancheslabel = list_create_dump(OFFSET(branch_label_ref_t, linkage));
cd->listcritical = list_create_dump(OFFSET(critical_section_ref_t, linkage));
-
- cd->linenumberreferences = NULL;
- cd->linenumbertablesizepos = 0;
- cd->linenumbertablestartpos = 0;
- cd->linenumbertab = 0;
+ cd->linenumbers = list_create_dump(OFFSET(linenumbertable_list_entry_t, linkage));
}
cd->datareferences = NULL;
#endif
-/* cd->patchrefs = list_create_dump(OFFSET(patchref, linkage)); */
- cd->patchrefs = NULL;
cd->brancheslabel = list_create_dump(OFFSET(branch_label_ref_t, linkage));
cd->listcritical = list_create_dump(OFFSET(critical_section_ref_t, linkage));
-
- cd->linenumberreferences = NULL;
- cd->linenumbertablesizepos = 0;
- cd->linenumbertablestartpos = 0;
- cd->linenumbertab = 0;
+ cd->linenumbers = list_create_dump(OFFSET(linenumbertable_list_entry_t, linkage));
/* We need to clear the mpc and the branch references from all
basic blocks as they will definitely change. */
void codegen_branch_label_add(codegendata *cd, s4 label, s4 condition, s4 reg, u4 options)
{
- list_t *list;
+ list_t *l;
branch_label_ref_t *br;
s4 mpc;
- /* get the label list */
+ /* Get the label list. */
- list = cd->brancheslabel;
+ l = cd->brancheslabel;
/* calculate the current mpc */
br->reg = reg;
br->options = options;
- /* add the branch to the list */
-
- list_add_last_unsynced(list, br);
-}
-
-
-/* codegen_add_patch_ref *******************************************************
-
- Appends a new patcher reference to the list of patching positions.
-
-*******************************************************************************/
-
-void codegen_add_patch_ref(codegendata *cd, functionptr patcher, voidptr ref,
- s4 disp)
-{
- patchref *pr;
- s4 branchmpc;
-
- branchmpc = cd->mcodeptr - cd->mcodebase;
-
- pr = DNEW(patchref);
-
- pr->branchpos = branchmpc;
- pr->disp = disp;
- pr->patcher = patcher;
- pr->ref = ref;
+ /* Add the branch to the list. */
-/* list_add_first(cd->patchrefs, pr); */
- pr->next = cd->patchrefs;
- cd->patchrefs = pr;
-
- /* Generate NOPs for opt_shownops. */
-
- if (opt_shownops)
- PATCHER_NOPS;
-
-#if defined(ENABLE_JIT) && (defined(__I386__) || defined(__M68K__) || defined(__MIPS__) \
- || defined(__SPARC_64__) || defined(__X86_64__))
-
- /* On some architectures the patcher stub call instruction might
- be longer than the actual instruction generated. On this
- architectures we store the last patcher call position and after
- the basic block code generation is completed, we check the
- range and maybe generate some nop's. */
- /* The nops are generated in codegen_emit in each codegen */
-
- cd->lastmcodeptr = cd->mcodeptr + PATCHER_CALL_SIZE;
-#endif
+ list_add_last(l, br);
}
#if defined(ENABLE_THREADS)
void codegen_critical_section_new(codegendata *cd)
{
- list_t *list;
+ list_t *l;
critical_section_ref_t *csr;
s4 mpc;
- /* get the critical section list */
+ /* Get the critical section list. */
- list = cd->listcritical;
+ l = cd->listcritical;
/* calculate the current mpc */
csr->end = -1;
csr->restart = mpc;
- /* add the branch to the list */
+ /* Add the branch to the list. */
- list_add_last_unsynced(list, csr);
+ list_add_last(l, csr);
}
#endif
#if defined(ENABLE_THREADS)
void codegen_critical_section_start(codegendata *cd)
{
- list_t *list;
+ list_t *l;
critical_section_ref_t *csr;
s4 mpc;
- /* get the critical section list */
+ /* Get the critical section list. */
- list = cd->listcritical;
+ l = cd->listcritical;
/* calculate the current mpc */
mpc = cd->mcodeptr - cd->mcodebase;
- /* get the current critical section */
+ /* Get the current critical section. */
- csr = list_last_unsynced(list);
+ csr = list_last(l);
/* set the start point */
#if defined(ENABLE_THREADS)
void codegen_critical_section_end(codegendata *cd)
{
- list_t *list;
+ list_t *l;
critical_section_ref_t *csr;
s4 mpc;
- /* get the critical section list */
+ /* Get the critical section list. */
- list = cd->listcritical;
+ l = cd->listcritical;
/* calculate the current mpc */
mpc = cd->mcodeptr - cd->mcodebase;
- /* get the current critical section */
+ /* Get the current critical section. */
- csr = list_last_unsynced(list);
+ csr = list_last(l);
/* set the end point */
{
codeinfo *code;
codegendata *cd;
- list_t *list;
+ list_t *l;
critical_section_ref_t *csr;
critical_section_node_t *csn;
code = jd->code;
cd = jd->cd;
- /* get the critical section list */
+ /* Get the critical section list. */
- list = cd->listcritical;
+ l = cd->listcritical;
/* iterate over all critical sections */
- for (csr = list_first_unsynced(list); csr != NULL;
- csr = list_next_unsynced(list, csr)) {
+ for (csr = list_first(l); csr != NULL; csr = list_next(l, csr)) {
/* check if all points are set */
assert(csr->start != -1);
#endif
-/* methodtree_comparator *******************************************************
-
- Comparator function used for the AVL tree of methods.
-
- ARGUMENTS:
- treenode....the node from the tree
- node........the node to compare to the tree-node
-
-*******************************************************************************/
-
-static s4 methodtree_comparator(const void *treenode, const void *node)
-{
- methodtree_element *mte;
- methodtree_element *mtepc;
-
- mte = (methodtree_element *) treenode;
- mtepc = (methodtree_element *) node;
-
- /* compare both startpc and endpc of pc, even if they have the same value,
- otherwise the avl_probe sometimes thinks the element is already in the
- tree */
-
-#ifdef __S390__
- /* On S390 addresses are 31 bit. Compare only 31 bits of value.
- */
-# define ADDR_MASK(a) ((a) & 0x7FFFFFFF)
-#else
-# define ADDR_MASK(a) (a)
-#endif
-
- if (ADDR_MASK((long) mte->startpc) <= ADDR_MASK((long) mtepc->startpc) &&
- ADDR_MASK((long) mtepc->startpc) <= ADDR_MASK((long) mte->endpc) &&
- ADDR_MASK((long) mte->startpc) <= ADDR_MASK((long) mtepc->endpc) &&
- ADDR_MASK((long) mtepc->endpc) <= ADDR_MASK((long) mte->endpc)) {
- return 0;
-
- } else if (ADDR_MASK((long) mtepc->startpc) < ADDR_MASK((long) mte->startpc)) {
- return -1;
-
- } else {
- return 1;
- }
-
-# undef ADDR_MASK
-}
-
-
-/* codegen_insertmethod ********************************************************
-
- Insert the machine code range of a method into the AVL tree of methods.
-
-*******************************************************************************/
-
-void codegen_insertmethod(u1 *startpc, u1 *endpc)
-{
- methodtree_element *mte;
-
- /* allocate new method entry */
-
- mte = NEW(methodtree_element);
-
- mte->startpc = startpc;
- mte->endpc = endpc;
-
- /* this function does not return an error, but asserts for
- duplicate entries */
-
- avl_insert(methodtree, mte);
-}
-
-
-/* codegen_get_pv_from_pc ******************************************************
-
- Find the PV for the given PC by searching in the AVL tree of
- methods.
-
-*******************************************************************************/
-
-u1 *codegen_get_pv_from_pc(u1 *pc)
-{
- methodtree_element mtepc;
- methodtree_element *mte;
-
- /* allocation of the search structure on the stack is much faster */
-
- mtepc.startpc = pc;
- mtepc.endpc = pc;
-
- mte = avl_find(methodtree, &mtepc);
-
- if (mte == NULL) {
- /* No method was found. Let's dump a stacktrace. */
-
-#if defined(ENABLE_VMLOG)
- vmlog_cacao_signl("SIGSEGV");
-#endif
-
- log_println("We received a SIGSEGV and tried to handle it, but we were");
- log_println("unable to find a Java method at:");
- log_println("");
-#if SIZEOF_VOID_P == 8
- log_println("PC=0x%016lx", pc);
-#else
- log_println("PC=0x%08x", pc);
-#endif
- log_println("");
- assert(0);
- log_println("Dumping the current stacktrace:");
-
-#if defined(ENABLE_THREADS)
- /* XXX michi: This should be available even without threads! */
- threads_print_stacktrace();
-#endif
-
- vm_abort("Exiting...");
- }
-
- return mte->startpc;
-}
-
-
-/* codegen_get_pv_from_pc_nocheck **********************************************
-
- Find the PV for the given PC by searching in the AVL tree of
- methods. This method does not check the return value and is used
- by the profiler.
-
-*******************************************************************************/
-
-u1 *codegen_get_pv_from_pc_nocheck(u1 *pc)
-{
- methodtree_element mtepc;
- methodtree_element *mte;
-
- /* allocation of the search structure on the stack is much faster */
-
- mtepc.startpc = pc;
- mtepc.endpc = pc;
-
- mte = avl_find(methodtree, &mtepc);
-
- if (mte == NULL)
- return NULL;
- else
- return mte->startpc;
-}
-
-
/* codegen_set_replacement_point_notrap ****************************************
Record the position of a non-trappable replacement point.
cd->replacementpoint++;
+#if !defined(NDEBUG)
+ /* XXX actually we should use an own REPLACEMENT_NOPS here! */
+ if (opt_TestReplacement)
+ PATCHER_NOPS;
+#endif
+
/* XXX assert(cd->lastmcodeptr <= cd->mcodeptr); */
cd->lastmcodeptr = cd->mcodeptr + PATCHER_CALL_SIZE;
#endif
s4 alignedmcodelen;
jumpref *jr;
- patchref_t *pr;
u1 *epoint;
s4 alignedlen;
}
#endif
+ /* Create the exception table. */
+
+ exceptiontable_create(jd);
+
+ /* Create the linenumber table. */
+
+ linenumbertable_create(jd);
+
/* jump table resolving */
for (jr = cd->jumpreferences; jr != NULL; jr = jr->next)
*((functionptr *) ((ptrint) epoint + jr->tablepos)) =
(functionptr) ((ptrint) epoint + (ptrint) jr->target->mpc);
- /* line number table resolving */
- {
- linenumberref *lr;
- ptrint lrtlen = 0;
- ptrint target;
-
- for (lr = cd->linenumberreferences; lr != NULL; lr = lr->next) {
- lrtlen++;
- target = lr->targetmpc;
- /* if the entry contains an mcode pointer (normal case), resolve it */
- /* (see doc/inlining_stacktrace.txt for details) */
- if (lr->linenumber >= -2) {
- target += (ptrint) epoint;
- }
- *((functionptr *) ((ptrint) epoint + (ptrint) lr->tablepos)) =
- (functionptr) target;
- }
-
- *((functionptr *) ((ptrint) epoint + cd->linenumbertablestartpos)) =
- (functionptr) ((ptrint) epoint + cd->linenumbertab);
-
- *((ptrint *) ((ptrint) epoint + cd->linenumbertablesizepos)) = lrtlen;
- }
-
/* patcher resolving */
- pr = list_first_unsynced(code->patchers);
- while (pr) {
- pr->mpc += (ptrint) epoint;
- pr->datap = (ptrint) (pr->disp + epoint);
- pr = list_next_unsynced(code->patchers, pr);
- }
+ patcher_resolve(jd);
#if defined(ENABLE_REPLACEMENT)
/* replacement point resolving */
}
#endif /* defined(ENABLE_REPLACEMENT) */
- /* add method into methodtree to find the entrypoint */
+ /* Insert method into methodtree to find the entrypoint. */
- codegen_insertmethod(code->entrypoint, code->entrypoint + mcodelen);
+ methodtree_insert(code->entrypoint, code->entrypoint + mcodelen);
#if defined(__I386__) || defined(__X86_64__) || defined(__XDSPCORE__) || defined(__M68K__) || defined(ENABLE_INTRP)
/* resolve data segment references */
codegendata *cd;
ptrint *d; /* pointer to data memory */
u1 *c; /* pointer to code memory */
- s4 dumpsize;
+ int32_t dumpmarker;
/* mark dump memory */
- dumpsize = dump_size();
+ DMARKER;
/* allocate required data structures */
cd = jd->cd;
+#if !defined(JIT_COMPILER_VIA_SIGNAL)
/* allocate code memory */
c = CNEW(u1, 3 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
/* flush caches */
md_cacheflush(cd->mcodebase, 3 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
+#else
+ /* Allocate code memory. */
+
+ c = CNEW(uint8_t, 2 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
+
+ /* Set pointers correctly. */
+
+ d = (ptrint *) c;
+
+ cd->mcodebase = c;
+
+ c = c + 2 * SIZEOF_VOID_P;
+ cd->mcodeptr = c;
+
+ /* NOTE: The codeinfo pointer is actually a pointer to the
+ methodinfo (this fakes a codeinfo structure). */
+
+ d[0] = (ptrint) m;
+ d[1] = (ptrint) &d[0]; /* fake code->m */
+
+ /* Emit the trap instruction. */
+
+ emit_trap_compiler(cd);
+
+#if defined(ENABLE_STATISTICS)
+ if (opt_stat)
+ count_cstub_len += 2 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE;
+#endif
+
+ /* Flush caches. */
+
+ md_cacheflush(cd->mcodebase, 2 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
+#endif
/* release dump memory */
- dump_release(dumpsize);
+ DRELEASE;
/* return native stub code */
/* codegen_generate_stub_builtin ***********************************************
- Wrapper for codegen_emit_stub_builtin.
-
- Returns:
- Pointer to the entrypoint of the stub.
+ Wrapper for codegen_emit_stub_native.
*******************************************************************************/
void codegen_generate_stub_builtin(methodinfo *m, builtintable_entry *bte)
{
-#if defined(__ARM__) || defined(__ALPHA__) || defined(__I386__) || defined(__M68K__) || defined(__POWERPC__) || defined(__SPARC64__) || defined(__X86_64__)
jitdata *jd;
codeinfo *code;
- s4 dumpsize;
+ int skipparams;
+ int32_t dumpmarker;
/* mark dump memory */
- dumpsize = dump_size();
+ DMARKER;
- jd = DNEW(jitdata);
+ /* Create JIT data structure. */
- jd->m = m;
- jd->cd = DNEW(codegendata);
- jd->rd = NULL;
- jd->flags = 0;
+ jd = jit_jitdata_new(m);
- /* Allocate codeinfo memory from the heap as we need to keep them. */
+ /* Get required compiler data. */
- jd->code = code_codeinfo_new(m); /* XXX check allocation */
+ code = jd->code;
- /* get required compiler data */
+ /* Stubs are non-leaf methods. */
- code = jd->code;
+ code_unflag_leafmethod(code);
/* setup code generation stuff */
codegen_setup(jd);
+ /* Set the number of native arguments we need to skip. */
+
+ skipparams = 0;
+
/* generate the code */
#if defined(ENABLE_JIT)
# if defined(ENABLE_INTRP)
if (!opt_intrp) {
# endif
- /* XXX This is only a hack for builtin_arraycopy and should be done better! */
- if (bte->flags & BUILTINTABLE_FLAG_EXCEPTION) {
- assert(bte->md->returntype.type == TYPE_VOID);
- bte->md->returntype.type = TYPE_INT;
- }
-
- codegen_emit_stub_builtin(jd, bte);
-
- /* XXX see above */
- if (bte->flags & BUILTINTABLE_FLAG_EXCEPTION) {
- bte->md->returntype.type = TYPE_VOID;
- }
+ assert(bte->fp != NULL);
+ codegen_emit_stub_native(jd, bte->md, bte->fp, skipparams);
# if defined(ENABLE_INTRP)
}
# endif
size_stub_native += code->mcodelength;
#endif
+#if !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER)
+ /* disassemble native stub */
+
+ if (opt_DisassembleStubs) {
+ codegen_disassemble_stub(m,
+ (u1 *) (ptrint) code->entrypoint,
+ (u1 *) (ptrint) code->entrypoint + (code->mcodelength - jd->cd->dseglen));
+
+ /* show data segment */
+
+ if (opt_showddatasegment)
+ dseg_display(jd);
+ }
+#endif /* !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER) */
+
/* release memory */
- dump_release(dumpsize);
-#endif /* architecture list */
+ DRELEASE;
}
{
jitdata *jd;
codeinfo *code;
- s4 dumpsize;
methoddesc *md;
methoddesc *nmd;
- s4 nativeparams;
+ int skipparams;
+ int32_t dumpmarker;
/* mark dump memory */
- dumpsize = dump_size();
+ DMARKER;
- jd = DNEW(jitdata);
+ /* Create JIT data structure. */
- jd->m = m;
- jd->cd = DNEW(codegendata);
- jd->rd = DNEW(registerdata);
- jd->flags = 0;
+ jd = jit_jitdata_new(m);
- /* Allocate codeinfo memory from the heap as we need to keep them. */
+ /* Get required compiler data. */
- jd->code = code_codeinfo_new(m); /* XXX check allocation */
+ code = jd->code;
- /* get required compiler data */
+ /* Stubs are non-leaf methods. */
- code = jd->code;
+ code_unflag_leafmethod(code);
/* set the flags for the current JIT run */
/* create new method descriptor with additional native parameters */
md = m->parseddesc;
- nativeparams = (m->flags & ACC_STATIC) ? 2 : 1;
+
+ /* Set the number of native arguments we need to skip. */
+
+ if (m->flags & ACC_STATIC)
+ skipparams = 2;
+ else
+ skipparams = 1;
nmd = (methoddesc *) DMNEW(u1, sizeof(methoddesc) - sizeof(typedesc) +
md->paramcount * sizeof(typedesc) +
- nativeparams * sizeof(typedesc));
+ skipparams * sizeof(typedesc));
- nmd->paramcount = md->paramcount + nativeparams;
+ nmd->paramcount = md->paramcount + skipparams;
nmd->params = DMNEW(paramdesc, nmd->paramcount);
if (m->flags & ACC_STATIC)
nmd->paramtypes[1].type = TYPE_ADR; /* add class pointer */
- MCOPY(nmd->paramtypes + nativeparams, md->paramtypes, typedesc,
+ MCOPY(nmd->paramtypes + skipparams, md->paramtypes, typedesc,
md->paramcount);
#if defined(ENABLE_JIT)
intrp_createnativestub(f, jd, nmd);
else
# endif
- codegen_emit_stub_native(jd, nmd, f);
+ codegen_emit_stub_native(jd, nmd, f, skipparams);
#else
intrp_createnativestub(f, jd, nmd);
#endif
size_stub_native += code->mcodelength;
#endif
-#if !defined(NDEBUG)
+#if !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER)
/* disassemble native stub */
- if (opt_shownativestub) {
-#if defined(ENABLE_DEBUG_FILTER)
+ if (opt_DisassembleStubs) {
+# if defined(ENABLE_DEBUG_FILTER)
if (m->filtermatches & SHOW_FILTER_FLAG_SHOW_METHOD)
-#endif
+# endif
{
-#if defined(ENABLE_DISASSEMBLER)
- codegen_disassemble_nativestub(m,
- (u1 *) (ptrint) code->entrypoint,
- (u1 *) (ptrint) code->entrypoint + (code->mcodelength - jd->cd->dseglen));
-#endif
+ codegen_disassemble_stub(m,
+ (u1 *) (ptrint) code->entrypoint,
+ (u1 *) (ptrint) code->entrypoint + (code->mcodelength - jd->cd->dseglen));
/* show data segment */
dseg_display(jd);
}
}
-#endif /* !defined(NDEBUG) */
+#endif /* !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER) */
/* release memory */
- dump_release(dumpsize);
+ DRELEASE;
/* return native stub code */
/* codegen_disassemble_nativestub **********************************************
- Disassembles the generated native stub.
+ Disassembles the generated builtin or native stub.
*******************************************************************************/
#if defined(ENABLE_DISASSEMBLER)
-void codegen_disassemble_nativestub(methodinfo *m, u1 *start, u1 *end)
+void codegen_disassemble_stub(methodinfo *m, u1 *start, u1 *end)
{
- printf("Native stub: ");
- utf_fprint_printable_ascii_classname(stdout, m->class->name);
+ printf("Stub code: ");
+ if (m->clazz != NULL)
+ utf_fprint_printable_ascii_classname(stdout, m->clazz->name);
+ else
+ printf("NULL");
printf(".");
utf_fprint_printable_ascii(stdout, m->name);
utf_fprint_printable_ascii(stdout, m->descriptor);
- printf("\n\nLength: %d\n\n", (s4) (end - start));
+ printf("\nLength: %d\n\n", (s4) (end - start));
DISASSEMBLE(start, end);
}
#endif
-/* codegen_stub_builtin_enter **************************************************
-
- Prepares the stuff required for a builtin function call:
-
- - adds a stackframe info structure to the chain, for stacktraces
-
- The layout of the builtin stub stackframe should look like this:
-
- +---------------------------+ <- SP (of parent Java function)
- | return address |
- +---------------------------+
- | |
- | stackframe info structure |
- | |
- +---------------------------+
- | |
- | arguments (if any) |
- | |
- +---------------------------+ <- SP (native stub)
-
-*******************************************************************************/
-
-void codegen_stub_builtin_enter(u1 *datasp, u1 *pv, u1 *sp, u1 *ra)
-{
- stackframeinfo *sfi;
-
- /* get data structures from stack */
-
- sfi = (stackframeinfo *) (datasp - sizeof(stackframeinfo));
-
- /* add a stackframeinfo to the chain */
-
- stacktrace_create_native_stackframeinfo(sfi, pv, sp, ra);
-}
-
-
-/* codegen_stub_builtin_exit ***************************************************
-
- Removes the stuff required for a builtin function call.
-
-*******************************************************************************/
-
-void codegen_stub_builtin_exit(u1 *datasp)
-{
- stackframeinfo *sfi;
-
- /* get data structures from stack */
-
- sfi = (stackframeinfo *) (datasp - sizeof(stackframeinfo));
-
- /* remove current stackframeinfo from chain */
-
- stacktrace_remove_stackframeinfo(sfi);
-}
-
-
/* codegen_start_native_call ***************************************************
Prepares the stuff required for a native (JNI) function call:
*******************************************************************************/
-java_handle_t *codegen_start_native_call(u1 *currentsp, u1 *pv)
+java_handle_t *codegen_start_native_call(u1 *sp, u1 *pv)
{
- stackframeinfo *sfi;
- localref_table *lrt;
- codeinfo *code;
- methodinfo *m;
- int32_t framesize;
+ stackframeinfo_t *sfi;
+ localref_table *lrt;
+ methodinfo *m;
+ int32_t framesize;
uint8_t *datasp;
uint8_t *javasp;
- uint8_t *javara;
uint64_t *arg_regs;
uint64_t *arg_stack;
STATISTICS(count_calls_java_to_native++);
- /* get information from method header */
-
- code = *((codeinfo **) (pv + CodeinfoPointer));
- framesize = *((int32_t *) (pv + FrameSize));
- assert(code);
- assert(framesize > sizeof(stackframeinfo) + sizeof(localref_table));
+ /* Get the methodinfo. */
- /* get the methodinfo */
+ m = code_get_methodinfo_for_pv(pv);
- m = code->m;
assert(m);
+ framesize = *((int32_t *) (pv + FrameSize));
+
+ assert(framesize >= sizeof(stackframeinfo_t) + sizeof(localref_table));
+
/* calculate needed values */
#if defined(__ALPHA__) || defined(__ARM__)
- datasp = currentsp + framesize - SIZEOF_VOID_P;
- javasp = currentsp + framesize;
- javara = *((uint8_t **) datasp);
- arg_regs = (uint64_t *) currentsp;
+ datasp = sp + framesize - SIZEOF_VOID_P;
+ javasp = sp + framesize;
+ arg_regs = (uint64_t *) sp;
+ arg_stack = (uint64_t *) javasp;
+#elif defined(__MIPS__)
+ /* MIPS always uses 8 bytes to store the RA */
+ datasp = sp + framesize - 8;
+ javasp = sp + framesize;
+#elif defined(__S390__)
+ datasp = sp + framesize - 8;
+ javasp = sp + framesize;
+ arg_regs = (uint64_t *) (sp + 96);
+ arg_stack = (uint64_t *) javasp;
+#elif defined(__I386__) || defined(__M68K__) || defined(__X86_64__)
+ datasp = sp + framesize;
+ javasp = sp + framesize + SIZEOF_VOID_P;
+ arg_regs = (uint64_t *) sp;
arg_stack = (uint64_t *) javasp;
-#elif defined(__MIPS__) || defined(__S390__)
- /* MIPS and S390 always uses 8 bytes to store the RA */
- datasp = currentsp + framesize - 8;
- javasp = currentsp + framesize;
- javara = *((uint8_t **) datasp);
-#elif defined(__I386__) || defined (__M68K__) || defined (__X86_64__)
- datasp = currentsp + framesize;
- javasp = currentsp + framesize + SIZEOF_VOID_P;
- javara = *((uint8_t **) datasp);
- arg_regs = (uint64_t *) currentsp;
+#elif defined(__POWERPC__)
+ datasp = sp + framesize;
+ javasp = sp + framesize;
+ arg_regs = (uint64_t *) (sp + LA_SIZE + 4 * SIZEOF_VOID_P);
arg_stack = (uint64_t *) javasp;
-#elif defined(__POWERPC__) || defined(__POWERPC64__)
- datasp = currentsp + framesize;
- javasp = currentsp + framesize;
- javara = *((uint8_t **) (datasp + LA_LR_OFFSET));
- arg_regs = (uint64_t *) (currentsp + LA_SIZE + 4 * SIZEOF_VOID_P);
+#elif defined(__POWERPC64__)
+ datasp = sp + framesize;
+ javasp = sp + framesize;
+ arg_regs = (uint64_t *) (sp + PA_SIZE + LA_SIZE + 4 * SIZEOF_VOID_P);
arg_stack = (uint64_t *) javasp;
#else
/* XXX is was unable to do this port for SPARC64, sorry. (-michi) */
vm_abort("codegen_start_native_call: unsupported architecture");
#endif
-#if !defined(NDEBUG)
-# if defined(__POWERPC__) || defined (__X86_64__)
- /* print the call-trace if necesarry */
-
- if (opt_TraceJavaCalls)
- trace_java_call_enter(m, arg_regs, arg_stack);
-# endif
-#endif
-
/* get data structures from stack */
- sfi = (stackframeinfo *) (datasp - sizeof(stackframeinfo));
- lrt = (localref_table *) (datasp - sizeof(stackframeinfo) -
- sizeof(localref_table));
+ sfi = (stackframeinfo_t *) (datasp - sizeof(stackframeinfo_t));
+ lrt = (localref_table *) (datasp - sizeof(stackframeinfo_t) -
+ sizeof(localref_table));
#if defined(ENABLE_JNI)
/* add current JNI local references table to this thread */
localref_table_add(lrt);
#endif
+#if !defined(NDEBUG)
+# if defined(__ALPHA__) || defined(__I386__) || defined(__M68K__) || defined(__POWERPC__) || defined(__POWERPC64__) || defined(__S390__) || defined(__X86_64__)
+ /* print the call-trace if necesarry */
+ /* BEFORE: filling the local reference table */
+
+ if (opt_TraceJavaCalls)
+ trace_java_call_enter(m, arg_regs, arg_stack);
+# endif
+#endif
+
#if defined(ENABLE_HANDLES)
/* place all references into the local reference table */
+ /* BEFORE: creating stackframeinfo */
- localref_fill(m, arg_regs, arg_stack);
+ localref_native_enter(m, arg_regs, arg_stack);
#endif
- /* add a stackframeinfo to the chain */
+ /* Add a stackframeinfo for this native method. We don't have RA
+ and XPC here. These are determined in
+ stacktrace_stackframeinfo_add. */
- stacktrace_create_native_stackframeinfo(sfi, pv, javasp, javara);
+ stacktrace_stackframeinfo_add(sfi, pv, sp, NULL, NULL);
- /* return a wrapped classinfo for static methods */
+ /* Return a wrapped classinfo for static methods. */
if (m->flags & ACC_STATIC)
- return LLNI_classinfo_wrap(m->class);
+ return (java_handle_t *) LLNI_classinfo_wrap(m->clazz);
else
return NULL;
}
*******************************************************************************/
-java_object_t *codegen_finish_native_call(u1 *currentsp, u1 *pv)
+java_object_t *codegen_finish_native_call(u1 *sp, u1 *pv)
{
- stackframeinfo *sfi;
- java_handle_t *e;
- java_object_t *o;
- codeinfo *code;
- methodinfo *m;
- int32_t framesize;
+ stackframeinfo_t *sfi;
+ java_handle_t *e;
+ java_object_t *o;
+ codeinfo *code;
+ methodinfo *m;
+ int32_t framesize;
uint8_t *datasp;
uint64_t *ret_regs;
/* get information from method header */
- code = *((codeinfo **) (pv + CodeinfoPointer));
- framesize = *((int32_t *) (pv + FrameSize));
+ code = code_get_codeinfo_for_pv(pv);
+
+ framesize = *((int32_t *) (pv + FrameSize));
+
assert(code);
/* get the methodinfo */
/* calculate needed values */
#if defined(__ALPHA__) || defined(__ARM__)
- datasp = currentsp + framesize - SIZEOF_VOID_P;
- ret_regs = (uint64_t *) currentsp;
-#elif defined(__MIPS__) || defined(__S390__)
- /* MIPS and S390 always uses 8 bytes to store the RA */
- datasp = currentsp + framesize - 8;
+ datasp = sp + framesize - SIZEOF_VOID_P;
+ ret_regs = (uint64_t *) sp;
+#elif defined(__MIPS__)
+ /* MIPS always uses 8 bytes to store the RA */
+ datasp = sp + framesize - 8;
+#elif defined(__S390__)
+ datasp = sp + framesize - 8;
+ ret_regs = (uint64_t *) (sp + 96);
#elif defined(__I386__)
- datasp = currentsp + framesize;
- ret_regs = (uint64_t *) (currentsp + 2 * SIZEOF_VOID_P);
-#elif defined (__M68K__) || defined (__X86_64__)
- datasp = currentsp + framesize;
- ret_regs = (uint64_t *) currentsp;
-#elif defined(__POWERPC__) || defined(__POWERPC64__)
- datasp = currentsp + framesize;
- ret_regs = (uint64_t *) (currentsp + LA_SIZE + 2 * SIZEOF_VOID_P);
+ datasp = sp + framesize;
+ ret_regs = (uint64_t *) (sp + 2 * SIZEOF_VOID_P);
+#elif defined(__M68K__)
+ datasp = sp + framesize;
+ ret_regs = (uint64_t *) (sp + 2 * 8);
+#elif defined(__X86_64__)
+ datasp = sp + framesize;
+ ret_regs = (uint64_t *) sp;
+#elif defined(__POWERPC__)
+ datasp = sp + framesize;
+ ret_regs = (uint64_t *) (sp + LA_SIZE + 2 * SIZEOF_VOID_P);
+#elif defined(__POWERPC64__)
+ datasp = sp + framesize;
+ ret_regs = (uint64_t *) (sp + PA_SIZE + LA_SIZE + 2 * SIZEOF_VOID_P);
#else
vm_abort("codegen_finish_native_call: unsupported architecture");
#endif
-
-#if !defined(NDEBUG)
-# if defined(__POWERPC__) || defined (__X86_64__)
- /* print the call-trace if necesarry */
-
- if (opt_TraceJavaCalls)
- trace_java_call_exit(m, ret_regs);
-# endif
-#endif
-
/* get data structures from stack */
- sfi = (stackframeinfo *) (datasp - sizeof(stackframeinfo));
+ sfi = (stackframeinfo_t *) (datasp - sizeof(stackframeinfo_t));
- /* remove current stackframeinfo from chain */
+ /* Remove current stackframeinfo from chain. */
- stacktrace_remove_stackframeinfo(sfi);
+ stacktrace_stackframeinfo_remove(sfi);
- /* XXX unfill lrt here!!! */
+#if defined(ENABLE_HANDLES)
+ /* unwrap the return value from the local reference table */
+ /* AFTER: removing the stackframeinfo */
+ /* BEFORE: releasing the local reference table */
+
+ localref_native_exit(m, ret_regs);
+#endif
/* get and unwrap the exception */
- /* ATTENTION: do the this _after_ the stackframeinfo was
- removed but _before_ the localref_table gets removed! */
+ /* AFTER: removing the stackframe info */
+ /* BEFORE: releasing the local reference table */
e = exceptions_get_and_clear_exception();
o = LLNI_UNWRAP(e);
localref_table_remove();
#endif
+#if !defined(NDEBUG)
+# if defined(__ALPHA__) || defined(__I386__) || defined(__M68K__) || defined(__POWERPC__) || defined(__POWERPC64__) || defined(__S390__) || defined(__X86_64__)
+ /* print the call-trace if necesarry */
+ /* AFTER: unwrapping the return value */
+
+ if (opt_TraceJavaCalls)
+ trace_java_call_exit(m, ret_regs);
+# endif
+#endif
+
return o;
}
s4 codegen_reg_of_var(u2 opcode, varinfo *v, s4 tempregnum)
{
-
-#if 0
- /* Do we have to generate a conditional move? Yes, then always
- return the temporary register. The real register is identified
- during the store. */
-
- if (opcode & ICMD_CONDITION_MASK)
- return tempregnum;
-#endif
-
if (!(v->flags & INMEMORY))
return v->vv.regoff;
if (compileverbose)
printf("...returning - phi lifetimes where joined\n");
#endif
- return;
+ continue;
}
if (s->type == -1) {
if (compileverbose)
printf("...returning - phi lifetimes where joined\n");
#endif
- return;
+ continue;
}
tmp_i.opc = 0;
#endif /* defined(ENABLE_SSA) */
+/* REMOVEME When we have exception handling in C. */
+
+void *md_asm_codegen_get_pv_from_pc(void *ra)
+{
+ return md_codegen_get_pv_from_pc(ra);
+}
+
/*
* These are local overrides for various environment variables in Emacs.