#include "vm/types.h"
-#if defined(ENABLE_JIT)
-/* this is required PATCHER_CALL_SIZE */
-# include "codegen.h"
-#endif
-
+#include "codegen.h"
+#include "md.h"
#include "md-abi.h"
#include "mm/memory.h"
#include "native/localref.h"
#include "native/native.h"
+#if defined(WITH_CLASSPATH_SUN)
+# include "native/include/java_lang_Object.h"
+# include "native/include/java_lang_String.h"
+# include "native/include/java_nio_ByteBuffer.h" /* required by j.l.CL */
+# include "native/include/java_lang_ClassLoader.h"
+#endif
+
#include "native/include/java_lang_Class.h"
#include "threads/threads-common.h"
#include "vm/jit/abi.h"
#include "vm/jit/asmpart.h"
+#include "vm/jit/code.h"
#include "vm/jit/codegen-common.h"
#if defined(ENABLE_DISASSEMBLER)
#include "vm/jit/dseg.h"
#include "vm/jit/emit-common.h"
#include "vm/jit/jit.h"
-#include "vm/jit/md.h"
+#include "vm/jit/linenumbertable.h"
#include "vm/jit/methodheader.h"
#include "vm/jit/patcher-common.h"
#include "vm/jit/replace.h"
cd->datareferences = NULL;
#endif
-/* cd->patchrefs = list_create_dump(OFFSET(patchref, linkage)); */
- cd->patchrefs = NULL;
cd->brancheslabel = list_create_dump(OFFSET(branch_label_ref_t, linkage));
cd->listcritical = list_create_dump(OFFSET(critical_section_ref_t, linkage));
-
- cd->linenumberreferences = NULL;
- cd->linenumbertablesizepos = 0;
- cd->linenumbertablestartpos = 0;
- cd->linenumbertab = 0;
+ cd->linenumbers = list_create_dump(OFFSET(linenumbertable_list_entry_t, linkage));
}
cd->datareferences = NULL;
#endif
-/* cd->patchrefs = list_create_dump(OFFSET(patchref, linkage)); */
- cd->patchrefs = NULL;
cd->brancheslabel = list_create_dump(OFFSET(branch_label_ref_t, linkage));
cd->listcritical = list_create_dump(OFFSET(critical_section_ref_t, linkage));
-
- cd->linenumberreferences = NULL;
- cd->linenumbertablesizepos = 0;
- cd->linenumbertablestartpos = 0;
- cd->linenumbertab = 0;
+ cd->linenumbers = list_create_dump(OFFSET(linenumbertable_list_entry_t, linkage));
/* We need to clear the mpc and the branch references from all
basic blocks as they will definitely change. */
}
-/* codegen_add_patch_ref *******************************************************
-
- Appends a new patcher reference to the list of patching positions.
-
-*******************************************************************************/
-
-void codegen_add_patch_ref(codegendata *cd, functionptr patcher, voidptr ref,
- s4 disp)
-{
- patchref *pr;
- s4 branchmpc;
-
- branchmpc = cd->mcodeptr - cd->mcodebase;
-
- pr = DNEW(patchref);
-
- pr->branchpos = branchmpc;
- pr->disp = disp;
- pr->patcher = patcher;
- pr->ref = ref;
-
-/* list_add_first(cd->patchrefs, pr); */
- pr->next = cd->patchrefs;
- cd->patchrefs = pr;
-
- /* Generate NOPs for opt_shownops. */
-
- if (opt_shownops)
- PATCHER_NOPS;
-
-#if defined(ENABLE_JIT) && (defined(__I386__) || defined(__M68K__) || defined(__MIPS__) \
- || defined(__SPARC_64__) || defined(__X86_64__))
-
- /* On some architectures the patcher stub call instruction might
- be longer than the actual instruction generated. On this
- architectures we store the last patcher call position and after
- the basic block code generation is completed, we check the
- range and maybe generate some nop's. */
- /* The nops are generated in codegen_emit in each codegen */
-
- cd->lastmcodeptr = cd->mcodeptr + PATCHER_CALL_SIZE;
-#endif
-}
-
-
/* codegen_critical_section_new ************************************************
Allocates a new critical-section reference and adds it to the
cd->replacementpoint++;
+#if !defined(NDEBUG)
+ /* XXX actually we should use an own REPLACEMENT_NOPS here! */
+ if (opt_TestReplacement)
+ PATCHER_NOPS;
+#endif
+
/* XXX assert(cd->lastmcodeptr <= cd->mcodeptr); */
cd->lastmcodeptr = cd->mcodeptr + PATCHER_CALL_SIZE;
}
#endif
+ /* Create the exception table. */
+
+ exceptiontable_create(jd);
+
+ /* Create the linenumber table. */
+
+ linenumbertable_create(jd);
+
/* jump table resolving */
for (jr = cd->jumpreferences; jr != NULL; jr = jr->next)
*((functionptr *) ((ptrint) epoint + jr->tablepos)) =
(functionptr) ((ptrint) epoint + (ptrint) jr->target->mpc);
- /* line number table resolving */
- {
- linenumberref *lr;
- ptrint lrtlen = 0;
- ptrint target;
-
- for (lr = cd->linenumberreferences; lr != NULL; lr = lr->next) {
- lrtlen++;
- target = lr->targetmpc;
- /* if the entry contains an mcode pointer (normal case), resolve it */
- /* (see doc/inlining_stacktrace.txt for details) */
- if (lr->linenumber >= -2) {
- target += (ptrint) epoint;
- }
- *((functionptr *) ((ptrint) epoint + (ptrint) lr->tablepos)) =
- (functionptr) target;
- }
-
- *((functionptr *) ((ptrint) epoint + cd->linenumbertablestartpos)) =
- (functionptr) ((ptrint) epoint + cd->linenumbertab);
-
- *((ptrint *) ((ptrint) epoint + cd->linenumbertablesizepos)) = lrtlen;
- }
-
/* patcher resolving */
pr = list_first_unsynced(code->patchers);
cd = jd->cd;
+#if !defined(JIT_COMPILER_VIA_SIGNAL)
/* allocate code memory */
c = CNEW(u1, 3 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
/* flush caches */
md_cacheflush(cd->mcodebase, 3 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
+#else
+ /* Allocate code memory. */
+
+ c = CNEW(uint8_t, 2 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
+
+ /* Set pointers correctly. */
+
+ d = (ptrint *) c;
+
+ cd->mcodebase = c;
+
+ c = c + 2 * SIZEOF_VOID_P;
+ cd->mcodeptr = c;
+
+ /* NOTE: The codeinfo pointer is actually a pointer to the
+ methodinfo (this fakes a codeinfo structure). */
+
+ d[0] = (ptrint) m;
+ d[1] = (ptrint) &d[0]; /* fake code->m */
+
+ /* Emit the trap instruction. */
+
+ emit_trap_compiler(cd);
+
+#if defined(ENABLE_STATISTICS)
+ if (opt_stat)
+ count_cstub_len += 2 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE;
+#endif
+
+ /* Flush caches. */
+
+ md_cacheflush(cd->mcodebase, 2 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
+#endif
/* release dump memory */
/* codegen_generate_stub_builtin ***********************************************
- Wrapper for codegen_emit_stub_builtin.
-
- Returns:
- Pointer to the entrypoint of the stub.
+ Wrapper for codegen_emit_stub_native.
*******************************************************************************/
-void codegen_generate_stub_builtin(builtintable_entry *bte)
+void codegen_generate_stub_builtin(methodinfo *m, builtintable_entry *bte)
{
-#if defined(__ARM__) || defined(__ALPHA__) || defined(__I386__) || defined(__M68K__) || defined(__POWERPC__) || defined(__SPARC64__) || defined(__X86_64__)
jitdata *jd;
codeinfo *code;
+ int skipparams;
s4 dumpsize;
/* mark dump memory */
dumpsize = dump_size();
- jd = DNEW(jitdata);
-
- jd->m = NULL;
- jd->cd = DNEW(codegendata);
- jd->rd = NULL;
- jd->flags = 0;
-
- /* Allocate codeinfo memory from the heap as we need to keep them. */
+ /* Create JIT data structure. */
- jd->code = code_codeinfo_new(NULL);
+ jd = jit_jitdata_new(m);
- /* get required compiler data */
+ /* Get required compiler data. */
code = jd->code;
codegen_setup(jd);
+ /* Set the number of native arguments we need to skip. */
+
+ skipparams = 0;
+
/* generate the code */
#if defined(ENABLE_JIT)
# if defined(ENABLE_INTRP)
if (!opt_intrp) {
# endif
- /* XXX This is only a hack for builtin_arraycopy and should be done better! */
- if (bte->flags & BUILTINTABLE_FLAG_EXCEPTION) {
- assert(bte->md->returntype.type == TYPE_VOID);
- bte->md->returntype.type = TYPE_INT;
- }
-
- codegen_emit_stub_builtin(jd, bte);
-
- /* XXX see above */
- if (bte->flags & BUILTINTABLE_FLAG_EXCEPTION) {
- bte->md->returntype.type = TYPE_VOID;
- }
+ assert(bte->fp != NULL);
+ codegen_emit_stub_native(jd, bte->md, bte->fp, skipparams);
# if defined(ENABLE_INTRP)
}
# endif
size_stub_native += code->mcodelength;
#endif
+#if !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER)
+ /* disassemble native stub */
+
+ if (opt_DisassembleStubs) {
+ codegen_disassemble_stub(m,
+ (u1 *) (ptrint) code->entrypoint,
+ (u1 *) (ptrint) code->entrypoint + (code->mcodelength - jd->cd->dseglen));
+
+ /* show data segment */
+
+ if (opt_showddatasegment)
+ dseg_display(jd);
+ }
+#endif /* !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER) */
+
/* release memory */
dump_release(dumpsize);
-#endif /* architecture list */
}
s4 dumpsize;
methoddesc *md;
methoddesc *nmd;
- s4 nativeparams;
+ int skipparams;
/* mark dump memory */
dumpsize = dump_size();
- jd = DNEW(jitdata);
-
- jd->m = m;
- jd->cd = DNEW(codegendata);
- jd->rd = DNEW(registerdata);
- jd->flags = 0;
-
- /* Allocate codeinfo memory from the heap as we need to keep them. */
+ /* Create JIT data structure. */
- jd->code = code_codeinfo_new(m); /* XXX check allocation */
+ jd = jit_jitdata_new(m);
- /* get required compiler data */
+ /* Get required compiler data. */
code = jd->code;
/* create new method descriptor with additional native parameters */
md = m->parseddesc;
- nativeparams = (m->flags & ACC_STATIC) ? 2 : 1;
+
+ /* Set the number of native arguments we need to skip. */
+
+ if (m->flags & ACC_STATIC)
+ skipparams = 2;
+ else
+ skipparams = 1;
nmd = (methoddesc *) DMNEW(u1, sizeof(methoddesc) - sizeof(typedesc) +
md->paramcount * sizeof(typedesc) +
- nativeparams * sizeof(typedesc));
+ skipparams * sizeof(typedesc));
- nmd->paramcount = md->paramcount + nativeparams;
+ nmd->paramcount = md->paramcount + skipparams;
nmd->params = DMNEW(paramdesc, nmd->paramcount);
if (m->flags & ACC_STATIC)
nmd->paramtypes[1].type = TYPE_ADR; /* add class pointer */
- MCOPY(nmd->paramtypes + nativeparams, md->paramtypes, typedesc,
+ MCOPY(nmd->paramtypes + skipparams, md->paramtypes, typedesc,
md->paramcount);
#if defined(ENABLE_JIT)
intrp_createnativestub(f, jd, nmd);
else
# endif
- codegen_emit_stub_native(jd, nmd, f);
+ codegen_emit_stub_native(jd, nmd, f, skipparams);
#else
intrp_createnativestub(f, jd, nmd);
#endif
size_stub_native += code->mcodelength;
#endif
-#if !defined(NDEBUG)
+#if !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER)
/* disassemble native stub */
- if (opt_shownativestub) {
-#if defined(ENABLE_DEBUG_FILTER)
+ if (opt_DisassembleStubs) {
+# if defined(ENABLE_DEBUG_FILTER)
if (m->filtermatches & SHOW_FILTER_FLAG_SHOW_METHOD)
-#endif
+# endif
{
-#if defined(ENABLE_DISASSEMBLER)
- codegen_disassemble_nativestub(m,
- (u1 *) (ptrint) code->entrypoint,
- (u1 *) (ptrint) code->entrypoint + (code->mcodelength - jd->cd->dseglen));
-#endif
+ codegen_disassemble_stub(m,
+ (u1 *) (ptrint) code->entrypoint,
+ (u1 *) (ptrint) code->entrypoint + (code->mcodelength - jd->cd->dseglen));
/* show data segment */
dseg_display(jd);
}
}
-#endif /* !defined(NDEBUG) */
+#endif /* !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER) */
/* release memory */
/* codegen_disassemble_nativestub **********************************************
- Disassembles the generated native stub.
+ Disassembles the generated builtin or native stub.
*******************************************************************************/
#if defined(ENABLE_DISASSEMBLER)
-void codegen_disassemble_nativestub(methodinfo *m, u1 *start, u1 *end)
+void codegen_disassemble_stub(methodinfo *m, u1 *start, u1 *end)
{
- printf("Native stub: ");
- utf_fprint_printable_ascii_classname(stdout, m->class->name);
+ printf("Stub code: ");
+ if (m->class != NULL)
+ utf_fprint_printable_ascii_classname(stdout, m->class->name);
+ else
+ printf("NULL");
printf(".");
utf_fprint_printable_ascii(stdout, m->name);
utf_fprint_printable_ascii(stdout, m->descriptor);
- printf("\n\nLength: %d\n\n", (s4) (end - start));
+ printf("\nLength: %d\n\n", (s4) (end - start));
DISASSEMBLE(start, end);
}
#endif
-/* codegen_stub_builtin_enter **************************************************
-
- Prepares the stuff required for a builtin function call:
-
- - adds a stackframe info structure to the chain, for stacktraces
-
- The layout of the builtin stub stackframe should look like this:
-
- +---------------------------+ <- SP (of parent Java function)
- | return address |
- +---------------------------+
- | |
- | stackframe info structure |
- | |
- +---------------------------+
- | |
- | arguments (if any) |
- | |
- +---------------------------+ <- SP (native stub)
-
-*******************************************************************************/
-
-void codegen_stub_builtin_enter(u1 *datasp, u1 *pv, u1 *sp, u1 *ra)
-{
- stackframeinfo *sfi;
-
- /* get data structures from stack */
-
- sfi = (stackframeinfo *) (datasp - sizeof(stackframeinfo));
-
- /* add a stackframeinfo to the chain */
-
- stacktrace_create_native_stackframeinfo(sfi, pv, sp, ra);
-}
-
-
-/* codegen_stub_builtin_exit ***************************************************
-
- Removes the stuff required for a builtin function call.
-
-*******************************************************************************/
-
-void codegen_stub_builtin_exit(u1 *datasp)
-{
- stackframeinfo *sfi;
-
- /* get data structures from stack */
-
- sfi = (stackframeinfo *) (datasp - sizeof(stackframeinfo));
-
- /* remove current stackframeinfo from chain */
-
- stacktrace_remove_stackframeinfo(sfi);
-}
-
-
/* codegen_start_native_call ***************************************************
Prepares the stuff required for a native (JNI) function call:
*******************************************************************************/
-java_handle_t *codegen_start_native_call(u1 *currentsp, u1 *pv)
+java_handle_t *codegen_start_native_call(u1 *sp, u1 *pv)
{
- stackframeinfo *sfi;
- localref_table *lrt;
- codeinfo *code;
- methodinfo *m;
- int32_t framesize;
+ stackframeinfo_t *sfi;
+ localref_table *lrt;
+ methodinfo *m;
+ int32_t framesize;
uint8_t *datasp;
uint8_t *javasp;
- uint8_t *javara;
uint64_t *arg_regs;
uint64_t *arg_stack;
STATISTICS(count_calls_java_to_native++);
- /* get information from method header */
-
- code = *((codeinfo **) (pv + CodeinfoPointer));
- framesize = *((int32_t *) (pv + FrameSize));
- assert(code);
- assert(framesize > sizeof(stackframeinfo) + sizeof(localref_table));
+ /* Get the methodinfo. */
- /* get the methodinfo */
+ m = code_get_methodinfo_for_pv(pv);
- m = code->m;
assert(m);
+ framesize = *((int32_t *) (pv + FrameSize));
+
+ assert(framesize >= sizeof(stackframeinfo_t) + sizeof(localref_table));
+
/* calculate needed values */
#if defined(__ALPHA__) || defined(__ARM__)
- datasp = currentsp + framesize - SIZEOF_VOID_P;
- javasp = currentsp + framesize;
- javara = *((uint8_t **) datasp);
- arg_regs = (uint64_t *) currentsp;
+ datasp = sp + framesize - SIZEOF_VOID_P;
+ javasp = sp + framesize;
+ arg_regs = (uint64_t *) sp;
+ arg_stack = (uint64_t *) javasp;
+#elif defined(__MIPS__)
+ /* MIPS always uses 8 bytes to store the RA */
+ datasp = sp + framesize - 8;
+ javasp = sp + framesize;
+#elif defined(__S390__)
+ datasp = sp + framesize - 8;
+ javasp = sp + framesize;
+ arg_regs = (uint64_t *) (sp + 96);
+ arg_stack = (uint64_t *) javasp;
+#elif defined(__I386__) || defined(__M68K__) || defined(__X86_64__)
+ datasp = sp + framesize;
+ javasp = sp + framesize + SIZEOF_VOID_P;
+ arg_regs = (uint64_t *) sp;
arg_stack = (uint64_t *) javasp;
-#elif defined(__MIPS__) || defined(__S390__)
- /* MIPS and S390 always uses 8 bytes to store the RA */
- datasp = currentsp + framesize - 8;
- javasp = currentsp + framesize;
- javara = *((uint8_t **) datasp);
-#elif defined(__I386__) || defined (__M68K__) || defined (__X86_64__)
- datasp = currentsp + framesize;
- javasp = currentsp + framesize + SIZEOF_VOID_P;
- javara = *((uint8_t **) datasp);
- arg_regs = (uint64_t *) currentsp;
+#elif defined(__POWERPC__)
+ datasp = sp + framesize;
+ javasp = sp + framesize;
+ arg_regs = (uint64_t *) (sp + LA_SIZE + 4 * SIZEOF_VOID_P);
arg_stack = (uint64_t *) javasp;
-#elif defined(__POWERPC__) || defined(__POWERPC64__)
- datasp = currentsp + framesize;
- javasp = currentsp + framesize;
- javara = *((uint8_t **) (datasp + LA_LR_OFFSET));
- arg_regs = (uint64_t *) (currentsp + LA_SIZE + 4 * SIZEOF_VOID_P);
+#elif defined(__POWERPC64__)
+ datasp = sp + framesize;
+ javasp = sp + framesize;
+ arg_regs = (uint64_t *) (sp + PA_SIZE + LA_SIZE + 4 * SIZEOF_VOID_P);
arg_stack = (uint64_t *) javasp;
#else
/* XXX is was unable to do this port for SPARC64, sorry. (-michi) */
vm_abort("codegen_start_native_call: unsupported architecture");
#endif
-#if !defined(NDEBUG)
-# if defined(__POWERPC__) || defined (__X86_64__)
- /* print the call-trace if necesarry */
-
- if (opt_TraceJavaCalls)
- trace_java_call_enter(m, arg_regs, arg_stack);
-# endif
-#endif
-
/* get data structures from stack */
- sfi = (stackframeinfo *) (datasp - sizeof(stackframeinfo));
- lrt = (localref_table *) (datasp - sizeof(stackframeinfo) -
- sizeof(localref_table));
+ sfi = (stackframeinfo_t *) (datasp - sizeof(stackframeinfo_t));
+ lrt = (localref_table *) (datasp - sizeof(stackframeinfo_t) -
+ sizeof(localref_table));
#if defined(ENABLE_JNI)
/* add current JNI local references table to this thread */
localref_table_add(lrt);
#endif
+#if !defined(NDEBUG)
+# if defined(__ALPHA__) || defined(__POWERPC__) || defined(__POWERPC64__) || defined(__X86_64__) || defined(__S390__)
+ /* print the call-trace if necesarry */
+ /* BEFORE: filling the local reference table */
+
+ if (opt_TraceJavaCalls)
+ trace_java_call_enter(m, arg_regs, arg_stack);
+# endif
+#endif
+
#if defined(ENABLE_HANDLES)
/* place all references into the local reference table */
+ /* BEFORE: creating stackframeinfo */
- localref_fill(m, arg_regs, arg_stack);
+ localref_native_enter(m, arg_regs, arg_stack);
#endif
- /* add a stackframeinfo to the chain */
+ /* Add a stackframeinfo for this native method. We don't have RA
+ and XPC here. These are determined in
+ stacktrace_stackframeinfo_add. */
- stacktrace_create_native_stackframeinfo(sfi, pv, javasp, javara);
+ stacktrace_stackframeinfo_add(sfi, pv, sp, NULL, NULL);
- /* return a wrapped classinfo for static methods */
+ /* Return a wrapped classinfo for static methods. */
if (m->flags & ACC_STATIC)
return LLNI_classinfo_wrap(m->class);
*******************************************************************************/
-java_object_t *codegen_finish_native_call(u1 *currentsp, u1 *pv)
+java_object_t *codegen_finish_native_call(u1 *sp, u1 *pv)
{
- stackframeinfo *sfi;
- java_handle_t *e;
- java_object_t *o;
- codeinfo *code;
- methodinfo *m;
- int32_t framesize;
+ stackframeinfo_t *sfi;
+ java_handle_t *e;
+ java_object_t *o;
+ codeinfo *code;
+ methodinfo *m;
+ int32_t framesize;
uint8_t *datasp;
uint64_t *ret_regs;
/* get information from method header */
- code = *((codeinfo **) (pv + CodeinfoPointer));
- framesize = *((int32_t *) (pv + FrameSize));
+ code = code_get_codeinfo_for_pv(pv);
+
+ framesize = *((int32_t *) (pv + FrameSize));
+
assert(code);
/* get the methodinfo */
/* calculate needed values */
#if defined(__ALPHA__) || defined(__ARM__)
- datasp = currentsp + framesize - SIZEOF_VOID_P;
- ret_regs = (uint64_t *) currentsp;
-#elif defined(__MIPS__) || defined(__S390__)
- /* MIPS and S390 always uses 8 bytes to store the RA */
- datasp = currentsp + framesize - 8;
+ datasp = sp + framesize - SIZEOF_VOID_P;
+ ret_regs = (uint64_t *) sp;
+#elif defined(__MIPS__)
+ /* MIPS always uses 8 bytes to store the RA */
+ datasp = sp + framesize - 8;
+#elif defined(__S390__)
+ datasp = sp + framesize - 8;
+ ret_regs = (uint64_t *) (sp + 96);
#elif defined(__I386__)
- datasp = currentsp + framesize;
- ret_regs = (uint64_t *) (currentsp + 2 * SIZEOF_VOID_P);
-#elif defined (__M68K__) || defined (__X86_64__)
- datasp = currentsp + framesize;
- ret_regs = (uint64_t *) currentsp;
-#elif defined(__POWERPC__) || defined(__POWERPC64__)
- datasp = currentsp + framesize;
- ret_regs = (uint64_t *) (currentsp + LA_SIZE + 2 * SIZEOF_VOID_P);
+ datasp = sp + framesize;
+ ret_regs = (uint64_t *) (sp + 2 * SIZEOF_VOID_P);
+#elif defined(__M68K__) || defined(__X86_64__)
+ datasp = sp + framesize;
+ ret_regs = (uint64_t *) sp;
+#elif defined(__POWERPC__)
+ datasp = sp + framesize;
+ ret_regs = (uint64_t *) (sp + LA_SIZE + 2 * SIZEOF_VOID_P);
+#elif defined(__POWERPC64__)
+ datasp = sp + framesize;
+ ret_regs = (uint64_t *) (sp + PA_SIZE + LA_SIZE + 2 * SIZEOF_VOID_P);
#else
vm_abort("codegen_finish_native_call: unsupported architecture");
#endif
-
-#if !defined(NDEBUG)
-# if defined(__POWERPC__) || defined (__X86_64__)
- /* print the call-trace if necesarry */
-
- if (opt_TraceJavaCalls)
- trace_java_call_exit(m, ret_regs);
-# endif
-#endif
-
/* get data structures from stack */
- sfi = (stackframeinfo *) (datasp - sizeof(stackframeinfo));
+ sfi = (stackframeinfo_t *) (datasp - sizeof(stackframeinfo_t));
- /* remove current stackframeinfo from chain */
+ /* Remove current stackframeinfo from chain. */
- stacktrace_remove_stackframeinfo(sfi);
+ stacktrace_stackframeinfo_remove(sfi);
- /* XXX unfill lrt here!!! */
+#if defined(ENABLE_HANDLES)
+ /* unwrap the return value from the local reference table */
+ /* AFTER: removing the stackframeinfo */
+ /* BEFORE: releasing the local reference table */
+
+ localref_native_exit(m, ret_regs);
+#endif
/* get and unwrap the exception */
- /* ATTENTION: do the this _after_ the stackframeinfo was
- removed but _before_ the localref_table gets removed! */
+ /* AFTER: removing the stackframe info */
+ /* BEFORE: releasing the local reference table */
e = exceptions_get_and_clear_exception();
o = LLNI_UNWRAP(e);
localref_table_remove();
#endif
+#if !defined(NDEBUG)
+# if defined(__ALPHA__) || defined(__POWERPC__) || defined(__POWERPC64__) || defined(__X86_64__) || defined(__S390__)
+ /* print the call-trace if necesarry */
+ /* AFTER: unwrapping the return value */
+
+ if (opt_TraceJavaCalls)
+ trace_java_call_exit(m, ret_regs);
+# endif
+#endif
+
return o;
}
#endif /* defined(ENABLE_SSA) */
+/* REMOVEME When we have exception handling in C. */
+
+void *md_asm_codegen_get_pv_from_pc(void *ra)
+{
+ return md_codegen_get_pv_from_pc(ra);
+}
+
/*
* These are local overrides for various environment variables in Emacs.