1 /* src/vm/jit/codegen-common.c - architecture independent code generator stuff
3 Copyright (C) 1996-2005, 2006, 2007, 2008
4 CACAOVM - Verein zur Foerderung der freien virtuellen Maschine CACAO
6 This file is part of CACAO.
8 This program is free software; you can redistribute it and/or
9 modify it under the terms of the GNU General Public License as
10 published by the Free Software Foundation; either version 2, or (at
11 your option) any later version.
13 This program is distributed in the hope that it will be useful, but
14 WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with this program; if not, write to the Free Software
20 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
23 All functions assume the following code area / data area layout:
27 | code area | code area grows to higher addresses
29 +-----------+ <-- start of procedure
31 | data area | data area grows to lower addresses
35 The functions first write into a temporary code/data area allocated by
36 "codegen_init". "codegen_finish" copies the code and data area into permanent
37 memory. All functions writing values into the data area return the offset
38 relative the begin of the code area (start of procedure).
54 #include "mm/memory.h"
56 #include "toolbox/avl.h"
57 #include "toolbox/list.h"
58 #include "toolbox/logging.h"
60 #include "native/jni.h"
61 #include "native/llni.h"
62 #include "native/localref.h"
63 #include "native/native.h"
65 #if defined(WITH_JAVA_RUNTIME_LIBRARY_OPENJDK)
66 # include "native/include/java_lang_Object.h"
67 # include "native/include/java_lang_String.h" /* required by j.l.CL */
68 # include "native/include/java_nio_ByteBuffer.h" /* required by j.l.CL */
69 # include "native/include/java_lang_ClassLoader.h"
72 #if defined(WITH_JAVA_RUNTIME_LIBRARY_CLDC1_1)
73 # include "native/include/java_lang_String.h"
76 #include "native/include/java_lang_Class.h"
78 #include "threads/thread.h"
80 #include "vm/builtin.h"
81 #include "vm/exceptions.h"
82 #include "vm/stringlocal.h"
84 #include "vm/jit/abi.h"
85 #include "vm/jit/asmpart.h"
86 #include "vm/jit/code.h"
87 #include "vm/jit/codegen-common.h"
89 #if defined(ENABLE_DISASSEMBLER)
90 # include "vm/jit/disass.h"
93 #include "vm/jit/dseg.h"
94 #include "vm/jit/emit-common.h"
95 #include "vm/jit/jit.h"
96 #include "vm/jit/linenumbertable.h"
97 #include "vm/jit/methodheader.h"
98 #include "vm/jit/methodtree.h"
99 #include "vm/jit/patcher-common.h"
100 #include "vm/jit/replace.h"
101 #if defined(ENABLE_SSA)
102 # include "vm/jit/optimizing/lsra.h"
103 # include "vm/jit/optimizing/ssa.h"
105 #include "vm/jit/stacktrace.h"
106 #include "vm/jit/trace.h"
108 #if defined(ENABLE_INTRP)
109 #include "vm/jit/intrp/intrp.h"
112 #include "vmcore/method.h"
113 #include "vmcore/options.h"
115 # include "vmcore/statistics.h"
117 #if defined(ENABLE_VMLOG)
118 #include <vmlog_cacao.h>
124 /* codegen_init ****************************************************************
128 *******************************************************************************/
130 void codegen_init(void)
135 /* codegen_setup ***************************************************************
137 Allocates and initialises code area, data area and references.
139 *******************************************************************************/
141 void codegen_setup(jitdata *jd)
146 /* get required compiler data */
151 /* initialize members */
155 cd->mcodebase = DMNEW(u1, MCODEINITSIZE);
156 cd->mcodeend = cd->mcodebase + MCODEINITSIZE;
157 cd->mcodesize = MCODEINITSIZE;
159 /* initialize mcode variables */
161 cd->mcodeptr = cd->mcodebase;
162 cd->lastmcodeptr = cd->mcodebase;
164 #if defined(ENABLE_INTRP)
165 /* native dynamic superinstructions variables */
168 cd->ncodebase = DMNEW(u1, NCODEINITSIZE);
169 cd->ncodesize = NCODEINITSIZE;
171 /* initialize ncode variables */
173 cd->ncodeptr = cd->ncodebase;
175 cd->lastinstwithoutdispatch = ~0; /* no inst without dispatch */
176 cd->superstarts = NULL;
183 cd->jumpreferences = NULL;
185 #if defined(__I386__) || defined(__X86_64__) || defined(__XDSPCORE__) || defined(__M68K__) || defined(ENABLE_INTRP)
186 cd->datareferences = NULL;
189 cd->brancheslabel = list_create_dump(OFFSET(branch_label_ref_t, linkage));
190 cd->listcritical = list_create_dump(OFFSET(critical_section_ref_t, linkage));
191 cd->linenumbers = list_create_dump(OFFSET(linenumbertable_list_entry_t, linkage));
195 /* codegen_reset ***************************************************************
197 Resets the codegen data structure so we can recompile the method.
199 *******************************************************************************/
201 static void codegen_reset(jitdata *jd)
207 /* get required compiler data */
212 /* reset error flag */
214 cd->flags &= ~CODEGENDATA_FLAG_ERROR;
216 /* reset some members, we reuse the code memory already allocated
217 as this should have almost the correct size */
219 cd->mcodeptr = cd->mcodebase;
220 cd->lastmcodeptr = cd->mcodebase;
225 cd->jumpreferences = NULL;
227 #if defined(__I386__) || defined(__X86_64__) || defined(__XDSPCORE__) || defined(__M68K__) || defined(ENABLE_INTRP)
228 cd->datareferences = NULL;
231 cd->brancheslabel = list_create_dump(OFFSET(branch_label_ref_t, linkage));
232 cd->listcritical = list_create_dump(OFFSET(critical_section_ref_t, linkage));
233 cd->linenumbers = list_create_dump(OFFSET(linenumbertable_list_entry_t, linkage));
235 /* We need to clear the mpc and the branch references from all
236 basic blocks as they will definitely change. */
238 for (bptr = jd->basicblocks; bptr != NULL; bptr = bptr->next) {
240 bptr->branchrefs = NULL;
243 /* We need to clear all the patcher references from the codeinfo
244 since they all will be regenerated */
246 patcher_list_reset(code);
248 #if defined(ENABLE_REPLACEMENT)
249 code->rplpoints = NULL;
250 code->rplpointcount = 0;
251 code->regalloc = NULL;
252 code->regalloccount = 0;
253 code->globalcount = 0;
258 /* codegen_generate ************************************************************
260 Generates the code for the currently compiled method.
262 *******************************************************************************/
264 bool codegen_generate(jitdata *jd)
268 /* get required compiler data */
272 /* call the machine-dependent code generation function */
274 if (!codegen_emit(jd))
277 /* check for an error */
279 if (CODEGENDATA_HAS_FLAG_ERROR(cd)) {
280 /* check for long-branches flag, if it is set we recompile the
285 log_message_method("Re-generating code: ", jd->m);
288 /* XXX maybe we should tag long-branches-methods for recompilation */
290 if (CODEGENDATA_HAS_FLAG_LONGBRANCHES(cd)) {
291 /* we have to reset the codegendata structure first */
295 /* and restart the compiler run */
297 if (!codegen_emit(jd))
301 vm_abort("codegen_generate: unknown error occurred during codegen_emit: flags=%x\n", cd->flags);
306 log_message_method("Re-generating code done: ", jd->m);
310 /* reallocate the memory and finish the code generation */
314 /* everything's ok */
320 /* codegen_close ***************************************************************
324 *******************************************************************************/
326 void codegen_close(void)
328 /* TODO: release avl tree on i386 and x86_64 */
332 /* codegen_increase ************************************************************
336 *******************************************************************************/
338 void codegen_increase(codegendata *cd)
342 /* save old mcodebase pointer */
344 oldmcodebase = cd->mcodebase;
346 /* reallocate to new, doubled memory */
348 cd->mcodebase = DMREALLOC(cd->mcodebase,
353 cd->mcodeend = cd->mcodebase + cd->mcodesize;
355 /* set new mcodeptr */
357 cd->mcodeptr = cd->mcodebase + (cd->mcodeptr - oldmcodebase);
359 #if defined(__I386__) || defined(__MIPS__) || defined(__X86_64__) || defined(__M68K__) || defined(ENABLE_INTRP) \
360 || defined(__SPARC_64__)
361 /* adjust the pointer to the last patcher position */
363 if (cd->lastmcodeptr != NULL)
364 cd->lastmcodeptr = cd->mcodebase + (cd->lastmcodeptr - oldmcodebase);
369 /* codegen_ncode_increase ******************************************************
373 *******************************************************************************/
375 #if defined(ENABLE_INTRP)
376 u1 *codegen_ncode_increase(codegendata *cd, u1 *ncodeptr)
380 /* save old ncodebase pointer */
382 oldncodebase = cd->ncodebase;
384 /* reallocate to new, doubled memory */
386 cd->ncodebase = DMREALLOC(cd->ncodebase,
392 /* return the new ncodeptr */
394 return (cd->ncodebase + (ncodeptr - oldncodebase));
399 /* codegen_add_branch_ref ******************************************************
401 Prepends an branch to the list.
403 *******************************************************************************/
405 void codegen_add_branch_ref(codegendata *cd, basicblock *target, s4 condition, s4 reg, u4 options)
410 STATISTICS(count_branches_unresolved++);
412 /* calculate the mpc of the branch instruction */
414 branchmpc = cd->mcodeptr - cd->mcodebase;
416 br = DNEW(branchref);
418 br->branchmpc = branchmpc;
419 br->condition = condition;
421 br->options = options;
422 br->next = target->branchrefs;
424 target->branchrefs = br;
428 /* codegen_resolve_branchrefs **************************************************
430 Resolves and patches the branch references of a given basic block.
432 *******************************************************************************/
434 void codegen_resolve_branchrefs(codegendata *cd, basicblock *bptr)
439 /* Save the mcodeptr because in the branch emitting functions
440 we generate code somewhere inside already generated code,
441 but we're still in the actual code generation phase. */
443 mcodeptr = cd->mcodeptr;
445 /* just to make sure */
447 assert(bptr->mpc >= 0);
449 for (br = bptr->branchrefs; br != NULL; br = br->next) {
450 /* temporary set the mcodeptr */
452 cd->mcodeptr = cd->mcodebase + br->branchmpc;
454 /* emit_bccz and emit_branch emit the correct code, even if we
455 pass condition == BRANCH_UNCONDITIONAL or reg == -1. */
457 emit_bccz(cd, bptr, br->condition, br->reg, br->options);
460 /* restore mcodeptr */
462 cd->mcodeptr = mcodeptr;
466 /* codegen_branch_label_add ****************************************************
468 Append an branch to the label-branch list.
470 *******************************************************************************/
472 void codegen_branch_label_add(codegendata *cd, s4 label, s4 condition, s4 reg, u4 options)
475 branch_label_ref_t *br;
478 /* Get the label list. */
480 l = cd->brancheslabel;
482 /* calculate the current mpc */
484 mpc = cd->mcodeptr - cd->mcodebase;
486 br = DNEW(branch_label_ref_t);
490 br->condition = condition;
492 br->options = options;
494 /* Add the branch to the list. */
496 list_add_last(l, br);
500 /* codegen_critical_section_new ************************************************
502 Allocates a new critical-section reference and adds it to the
503 critical-section list.
505 *******************************************************************************/
507 #if defined(ENABLE_THREADS)
508 void codegen_critical_section_new(codegendata *cd)
511 critical_section_ref_t *csr;
514 /* Get the critical section list. */
516 l = cd->listcritical;
518 /* calculate the current mpc */
520 mpc = cd->mcodeptr - cd->mcodebase;
522 csr = DNEW(critical_section_ref_t);
524 /* We only can set restart right now, as start and end are set by
525 the following, corresponding functions. */
531 /* Add the branch to the list. */
533 list_add_last(l, csr);
538 /* codegen_critical_section_start **********************************************
540 Set the start-point of the current critical section (which is the
541 last element of the list).
543 *******************************************************************************/
545 #if defined(ENABLE_THREADS)
546 void codegen_critical_section_start(codegendata *cd)
549 critical_section_ref_t *csr;
552 /* Get the critical section list. */
554 l = cd->listcritical;
556 /* calculate the current mpc */
558 mpc = cd->mcodeptr - cd->mcodebase;
560 /* Get the current critical section. */
564 /* set the start point */
566 assert(csr->start == -1);
573 /* codegen_critical_section_end ************************************************
575 Set the end-point of the current critical section (which is the
576 last element of the list).
578 *******************************************************************************/
580 #if defined(ENABLE_THREADS)
581 void codegen_critical_section_end(codegendata *cd)
584 critical_section_ref_t *csr;
587 /* Get the critical section list. */
589 l = cd->listcritical;
591 /* calculate the current mpc */
593 mpc = cd->mcodeptr - cd->mcodebase;
595 /* Get the current critical section. */
599 /* set the end point */
601 assert(csr->end == -1);
608 /* codegen_critical_section_finish *********************************************
610 Finish the critical sections, create the critical section nodes for
611 the AVL tree and insert them into the tree.
613 *******************************************************************************/
615 #if defined(ENABLE_THREADS)
616 static void codegen_critical_section_finish(jitdata *jd)
621 critical_section_ref_t *csr;
622 critical_section_node_t *csn;
624 /* get required compiler data */
629 /* Get the critical section list. */
631 l = cd->listcritical;
633 /* iterate over all critical sections */
635 for (csr = list_first(l); csr != NULL; csr = list_next(l, csr)) {
636 /* check if all points are set */
638 assert(csr->start != -1);
639 assert(csr->end != -1);
640 assert(csr->restart != -1);
642 /* allocate tree node */
644 csn = NEW(critical_section_node_t);
646 csn->start = code->entrypoint + csr->start;
647 csn->end = code->entrypoint + csr->end;
648 csn->restart = code->entrypoint + csr->restart;
650 /* insert into the tree */
652 critical_section_register(csn);
658 /* codegen_set_replacement_point_notrap ****************************************
660 Record the position of a non-trappable replacement point.
662 *******************************************************************************/
664 #if defined(ENABLE_REPLACEMENT)
666 void codegen_set_replacement_point_notrap(codegendata *cd, s4 type)
668 void codegen_set_replacement_point_notrap(codegendata *cd)
671 assert(cd->replacementpoint);
672 assert(cd->replacementpoint->type == type);
673 assert(cd->replacementpoint->flags & RPLPOINT_FLAG_NOTRAP);
675 cd->replacementpoint->pc = (u1*) (ptrint) (cd->mcodeptr - cd->mcodebase);
677 cd->replacementpoint++;
679 #endif /* defined(ENABLE_REPLACEMENT) */
682 /* codegen_set_replacement_point ***********************************************
684 Record the position of a trappable replacement point.
686 *******************************************************************************/
688 #if defined(ENABLE_REPLACEMENT)
690 void codegen_set_replacement_point(codegendata *cd, s4 type)
692 void codegen_set_replacement_point(codegendata *cd)
695 assert(cd->replacementpoint);
696 assert(cd->replacementpoint->type == type);
697 assert(!(cd->replacementpoint->flags & RPLPOINT_FLAG_NOTRAP));
699 cd->replacementpoint->pc = (u1*) (ptrint) (cd->mcodeptr - cd->mcodebase);
701 cd->replacementpoint++;
704 /* XXX actually we should use an own REPLACEMENT_NOPS here! */
705 if (opt_TestReplacement)
709 /* XXX assert(cd->lastmcodeptr <= cd->mcodeptr); */
711 cd->lastmcodeptr = cd->mcodeptr + PATCHER_CALL_SIZE;
713 #endif /* defined(ENABLE_REPLACEMENT) */
716 /* codegen_finish **************************************************************
718 Finishes the code generation. A new memory, large enough for both
719 data and code, is allocated and data and code are copied together
720 to their final layout, unresolved jumps are resolved, ...
722 *******************************************************************************/
724 void codegen_finish(jitdata *jd)
729 #if defined(ENABLE_INTRP)
737 /* get required compiler data */
742 /* prevent compiler warning */
744 #if defined(ENABLE_INTRP)
748 /* calculate the code length */
750 mcodelen = (s4) (cd->mcodeptr - cd->mcodebase);
752 #if defined(ENABLE_STATISTICS)
754 count_code_len += mcodelen;
755 count_data_len += cd->dseglen;
759 alignedmcodelen = MEMORY_ALIGN(mcodelen, MAX_ALIGN);
761 #if defined(ENABLE_INTRP)
763 ncodelen = cd->ncodeptr - cd->ncodebase;
765 ncodelen = 0; /* avoid compiler warning */
769 cd->dseglen = MEMORY_ALIGN(cd->dseglen, MAX_ALIGN);
770 alignedlen = alignedmcodelen + cd->dseglen;
772 #if defined(ENABLE_INTRP)
774 alignedlen += ncodelen;
778 /* allocate new memory */
780 code->mcodelength = mcodelen + cd->dseglen;
781 code->mcode = CNEW(u1, alignedlen);
783 /* set the entrypoint of the method */
785 assert(code->entrypoint == NULL);
786 code->entrypoint = epoint = (code->mcode + cd->dseglen);
788 /* fill the data segment (code->entrypoint must already be set!) */
792 /* copy code to the new location */
794 MCOPY((void *) code->entrypoint, cd->mcodebase, u1, mcodelen);
796 #if defined(ENABLE_INTRP)
797 /* relocate native dynamic superinstruction code (if any) */
800 cd->mcodebase = code->entrypoint;
803 u1 *ncodebase = code->mcode + cd->dseglen + alignedmcodelen;
805 MCOPY((void *) ncodebase, cd->ncodebase, u1, ncodelen);
807 /* flush the instruction and data caches */
809 md_cacheflush(ncodebase, ncodelen);
811 /* set some cd variables for dynamic_super_rerwite */
813 cd->ncodebase = ncodebase;
816 cd->ncodebase = NULL;
819 dynamic_super_rewrite(cd);
823 /* Create the exception table. */
825 exceptiontable_create(jd);
827 /* Create the linenumber table. */
829 linenumbertable_create(jd);
831 /* jump table resolving */
833 for (jr = cd->jumpreferences; jr != NULL; jr = jr->next)
834 *((functionptr *) ((ptrint) epoint + jr->tablepos)) =
835 (functionptr) ((ptrint) epoint + (ptrint) jr->target->mpc);
837 /* patcher resolving */
841 #if defined(ENABLE_REPLACEMENT)
842 /* replacement point resolving */
847 rp = code->rplpoints;
848 for (i=0; i<code->rplpointcount; ++i, ++rp) {
849 rp->pc = (u1*) ((ptrint) epoint + (ptrint) rp->pc);
852 #endif /* defined(ENABLE_REPLACEMENT) */
854 /* Insert method into methodtree to find the entrypoint. */
856 methodtree_insert(code->entrypoint, code->entrypoint + mcodelen);
858 #if defined(__I386__) || defined(__X86_64__) || defined(__XDSPCORE__) || defined(__M68K__) || defined(ENABLE_INTRP)
859 /* resolve data segment references */
861 dseg_resolve_datareferences(jd);
864 #if defined(ENABLE_THREADS)
865 /* create cirtical sections */
867 codegen_critical_section_finish(jd);
870 /* flush the instruction and data caches */
872 md_cacheflush(code->mcode, code->mcodelength);
876 /* codegen_generate_stub_compiler **********************************************
878 Wrapper for codegen_emit_stub_compiler.
881 pointer to the compiler stub code.
883 *******************************************************************************/
885 u1 *codegen_generate_stub_compiler(methodinfo *m)
889 ptrint *d; /* pointer to data memory */
890 u1 *c; /* pointer to code memory */
893 /* mark dump memory */
897 /* allocate required data structures */
902 jd->cd = DNEW(codegendata);
905 /* get required compiler data */
909 #if !defined(JIT_COMPILER_VIA_SIGNAL)
910 /* allocate code memory */
912 c = CNEW(u1, 3 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
914 /* set pointers correctly */
920 c = c + 3 * SIZEOF_VOID_P;
923 /* NOTE: The codeinfo pointer is actually a pointer to the
924 methodinfo (this fakes a codeinfo structure). */
926 d[0] = (ptrint) asm_call_jit_compiler;
928 d[2] = (ptrint) &d[1]; /* fake code->m */
930 /* call the emit function */
932 codegen_emit_stub_compiler(jd);
934 #if defined(ENABLE_STATISTICS)
936 count_cstub_len += 3 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE;
941 md_cacheflush(cd->mcodebase, 3 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
943 /* Allocate code memory. */
945 c = CNEW(uint8_t, 2 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
947 /* Set pointers correctly. */
953 c = c + 2 * SIZEOF_VOID_P;
956 /* NOTE: The codeinfo pointer is actually a pointer to the
957 methodinfo (this fakes a codeinfo structure). */
960 d[1] = (ptrint) &d[0]; /* fake code->m */
962 /* Emit the trap instruction. */
964 emit_trap_compiler(cd);
966 #if defined(ENABLE_STATISTICS)
968 count_cstub_len += 2 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE;
973 md_cacheflush(cd->mcodebase, 2 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
976 /* release dump memory */
980 /* return native stub code */
986 /* codegen_generate_stub_builtin ***********************************************
988 Wrapper for codegen_emit_stub_native.
990 *******************************************************************************/
992 void codegen_generate_stub_builtin(methodinfo *m, builtintable_entry *bte)
999 /* mark dump memory */
1003 /* Create JIT data structure. */
1005 jd = jit_jitdata_new(m);
1007 /* Get required compiler data. */
1011 /* Stubs are non-leaf methods. */
1013 code_unflag_leafmethod(code);
1015 /* setup code generation stuff */
1019 /* Set the number of native arguments we need to skip. */
1023 /* generate the code */
1025 #if defined(ENABLE_JIT)
1026 # if defined(ENABLE_INTRP)
1029 assert(bte->fp != NULL);
1030 codegen_emit_stub_native(jd, bte->md, bte->fp, skipparams);
1031 # if defined(ENABLE_INTRP)
1036 /* reallocate the memory and finish the code generation */
1040 /* set the stub entry point in the builtin table */
1042 bte->stub = code->entrypoint;
1044 #if defined(ENABLE_STATISTICS)
1046 size_stub_native += code->mcodelength;
1049 #if !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER)
1050 /* disassemble native stub */
1052 if (opt_DisassembleStubs) {
1053 codegen_disassemble_stub(m,
1054 (u1 *) (ptrint) code->entrypoint,
1055 (u1 *) (ptrint) code->entrypoint + (code->mcodelength - jd->cd->dseglen));
1057 /* show data segment */
1059 if (opt_showddatasegment)
1062 #endif /* !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER) */
1064 /* release memory */
1070 /* codegen_generate_stub_native ************************************************
1072 Wrapper for codegen_emit_stub_native.
1075 the codeinfo representing the stub code.
1077 *******************************************************************************/
1079 codeinfo *codegen_generate_stub_native(methodinfo *m, functionptr f)
1088 /* mark dump memory */
1092 /* Create JIT data structure. */
1094 jd = jit_jitdata_new(m);
1096 /* Get required compiler data. */
1100 /* Stubs are non-leaf methods. */
1102 code_unflag_leafmethod(code);
1104 /* set the flags for the current JIT run */
1106 #if defined(ENABLE_PROFILING)
1108 jd->flags |= JITDATA_FLAG_INSTRUMENT;
1111 if (opt_verbosecall)
1112 jd->flags |= JITDATA_FLAG_VERBOSECALL;
1114 /* setup code generation stuff */
1116 #if defined(ENABLE_JIT)
1117 # if defined(ENABLE_INTRP)
1125 /* create new method descriptor with additional native parameters */
1129 /* Set the number of native arguments we need to skip. */
1131 if (m->flags & ACC_STATIC)
1136 nmd = (methoddesc *) DMNEW(u1, sizeof(methoddesc) - sizeof(typedesc) +
1137 md->paramcount * sizeof(typedesc) +
1138 skipparams * sizeof(typedesc));
1140 nmd->paramcount = md->paramcount + skipparams;
1142 nmd->params = DMNEW(paramdesc, nmd->paramcount);
1144 nmd->paramtypes[0].type = TYPE_ADR; /* add environment pointer */
1146 if (m->flags & ACC_STATIC)
1147 nmd->paramtypes[1].type = TYPE_ADR; /* add class pointer */
1149 MCOPY(nmd->paramtypes + skipparams, md->paramtypes, typedesc,
1152 #if defined(ENABLE_JIT)
1153 # if defined(ENABLE_INTRP)
1156 /* pre-allocate the arguments for the native ABI */
1158 md_param_alloc_native(nmd);
1161 /* generate the code */
1163 #if defined(ENABLE_JIT)
1164 # if defined(ENABLE_INTRP)
1166 intrp_createnativestub(f, jd, nmd);
1169 codegen_emit_stub_native(jd, nmd, f, skipparams);
1171 intrp_createnativestub(f, jd, nmd);
1174 /* reallocate the memory and finish the code generation */
1178 #if defined(ENABLE_STATISTICS)
1179 /* must be done after codegen_finish() */
1182 size_stub_native += code->mcodelength;
1185 #if !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER)
1186 /* disassemble native stub */
1188 if (opt_DisassembleStubs) {
1189 # if defined(ENABLE_DEBUG_FILTER)
1190 if (m->filtermatches & SHOW_FILTER_FLAG_SHOW_METHOD)
1193 codegen_disassemble_stub(m,
1194 (u1 *) (ptrint) code->entrypoint,
1195 (u1 *) (ptrint) code->entrypoint + (code->mcodelength - jd->cd->dseglen));
1197 /* show data segment */
1199 if (opt_showddatasegment)
1203 #endif /* !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER) */
1205 /* release memory */
1209 /* return native stub code */
1215 /* codegen_disassemble_nativestub **********************************************
1217 Disassembles the generated builtin or native stub.
1219 *******************************************************************************/
1221 #if defined(ENABLE_DISASSEMBLER)
1222 void codegen_disassemble_stub(methodinfo *m, u1 *start, u1 *end)
1224 printf("Stub code: ");
1225 if (m->clazz != NULL)
1226 utf_fprint_printable_ascii_classname(stdout, m->clazz->name);
1230 utf_fprint_printable_ascii(stdout, m->name);
1231 utf_fprint_printable_ascii(stdout, m->descriptor);
1232 printf("\nLength: %d\n\n", (s4) (end - start));
1234 DISASSEMBLE(start, end);
1239 /* codegen_start_native_call ***************************************************
1241 Prepares the stuff required for a native (JNI) function call:
1243 - adds a stackframe info structure to the chain, for stacktraces
1244 - prepares the local references table on the stack
1246 The layout of the native stub stackframe should look like this:
1248 +---------------------------+ <- java SP (of parent Java function)
1250 +---------------------------+ <- data SP
1252 | stackframe info structure |
1254 +---------------------------+
1256 | local references table |
1258 +---------------------------+
1260 | saved registers (if any) |
1262 +---------------------------+
1264 | arguments (if any) |
1266 +---------------------------+ <- current SP (native stub)
1268 *******************************************************************************/
1270 java_handle_t *codegen_start_native_call(u1 *sp, u1 *pv)
1272 stackframeinfo_t *sfi;
1273 localref_table *lrt;
1280 uint64_t *arg_stack;
1282 STATISTICS(count_calls_java_to_native++);
1284 /* Get the methodinfo. */
1286 m = code_get_methodinfo_for_pv(pv);
1290 framesize = *((int32_t *) (pv + FrameSize));
1292 assert(framesize >= sizeof(stackframeinfo_t) + sizeof(localref_table));
1294 /* calculate needed values */
1296 #if defined(__ALPHA__) || defined(__ARM__)
1297 datasp = sp + framesize - SIZEOF_VOID_P;
1298 javasp = sp + framesize;
1299 arg_regs = (uint64_t *) sp;
1300 arg_stack = (uint64_t *) javasp;
1301 #elif defined(__MIPS__)
1302 /* MIPS always uses 8 bytes to store the RA */
1303 datasp = sp + framesize - 8;
1304 javasp = sp + framesize;
1305 #elif defined(__S390__)
1306 datasp = sp + framesize - 8;
1307 javasp = sp + framesize;
1308 arg_regs = (uint64_t *) (sp + 96);
1309 arg_stack = (uint64_t *) javasp;
1310 #elif defined(__I386__) || defined(__M68K__) || defined(__X86_64__)
1311 datasp = sp + framesize;
1312 javasp = sp + framesize + SIZEOF_VOID_P;
1313 arg_regs = (uint64_t *) sp;
1314 arg_stack = (uint64_t *) javasp;
1315 #elif defined(__POWERPC__)
1316 datasp = sp + framesize;
1317 javasp = sp + framesize;
1318 arg_regs = (uint64_t *) (sp + LA_SIZE + 4 * SIZEOF_VOID_P);
1319 arg_stack = (uint64_t *) javasp;
1320 #elif defined(__POWERPC64__)
1321 datasp = sp + framesize;
1322 javasp = sp + framesize;
1323 arg_regs = (uint64_t *) (sp + PA_SIZE + LA_SIZE + 4 * SIZEOF_VOID_P);
1324 arg_stack = (uint64_t *) javasp;
1326 /* XXX is was unable to do this port for SPARC64, sorry. (-michi) */
1327 /* XXX maybe we need to pass the RA as argument there */
1328 vm_abort("codegen_start_native_call: unsupported architecture");
1331 /* get data structures from stack */
1333 sfi = (stackframeinfo_t *) (datasp - sizeof(stackframeinfo_t));
1334 lrt = (localref_table *) (datasp - sizeof(stackframeinfo_t) -
1335 sizeof(localref_table));
1337 #if defined(ENABLE_JNI)
1338 /* add current JNI local references table to this thread */
1340 localref_table_add(lrt);
1343 #if !defined(NDEBUG)
1344 # if defined(__ALPHA__) || defined(__I386__) || defined(__M68K__) || defined(__POWERPC__) || defined(__POWERPC64__) || defined(__S390__) || defined(__X86_64__)
1345 /* print the call-trace if necesarry */
1346 /* BEFORE: filling the local reference table */
1348 if (opt_TraceJavaCalls)
1349 trace_java_call_enter(m, arg_regs, arg_stack);
1353 #if defined(ENABLE_HANDLES)
1354 /* place all references into the local reference table */
1355 /* BEFORE: creating stackframeinfo */
1357 localref_native_enter(m, arg_regs, arg_stack);
1360 /* Add a stackframeinfo for this native method. We don't have RA
1361 and XPC here. These are determined in
1362 stacktrace_stackframeinfo_add. */
1364 stacktrace_stackframeinfo_add(sfi, pv, sp, NULL, NULL);
1366 /* Return a wrapped classinfo for static methods. */
1368 if (m->flags & ACC_STATIC)
1369 return (java_handle_t *) LLNI_classinfo_wrap(m->clazz);
1375 /* codegen_finish_native_call **************************************************
1377 Removes the stuff required for a native (JNI) function call.
1378 Additionally it checks for an exceptions and in case, get the
1379 exception object and clear the pointer.
1381 *******************************************************************************/
1383 java_object_t *codegen_finish_native_call(u1 *sp, u1 *pv)
1385 stackframeinfo_t *sfi;
1395 /* get information from method header */
1397 code = code_get_codeinfo_for_pv(pv);
1399 framesize = *((int32_t *) (pv + FrameSize));
1403 /* get the methodinfo */
1408 /* calculate needed values */
1410 #if defined(__ALPHA__) || defined(__ARM__)
1411 datasp = sp + framesize - SIZEOF_VOID_P;
1412 ret_regs = (uint64_t *) sp;
1413 #elif defined(__MIPS__)
1414 /* MIPS always uses 8 bytes to store the RA */
1415 datasp = sp + framesize - 8;
1416 #elif defined(__S390__)
1417 datasp = sp + framesize - 8;
1418 ret_regs = (uint64_t *) (sp + 96);
1419 #elif defined(__I386__)
1420 datasp = sp + framesize;
1421 ret_regs = (uint64_t *) (sp + 2 * SIZEOF_VOID_P);
1422 #elif defined(__M68K__)
1423 datasp = sp + framesize;
1424 ret_regs = (uint64_t *) (sp + 2 * 8);
1425 #elif defined(__X86_64__)
1426 datasp = sp + framesize;
1427 ret_regs = (uint64_t *) sp;
1428 #elif defined(__POWERPC__)
1429 datasp = sp + framesize;
1430 ret_regs = (uint64_t *) (sp + LA_SIZE + 2 * SIZEOF_VOID_P);
1431 #elif defined(__POWERPC64__)
1432 datasp = sp + framesize;
1433 ret_regs = (uint64_t *) (sp + PA_SIZE + LA_SIZE + 2 * SIZEOF_VOID_P);
1435 vm_abort("codegen_finish_native_call: unsupported architecture");
1438 /* get data structures from stack */
1440 sfi = (stackframeinfo_t *) (datasp - sizeof(stackframeinfo_t));
1442 /* Remove current stackframeinfo from chain. */
1444 stacktrace_stackframeinfo_remove(sfi);
1446 #if defined(ENABLE_HANDLES)
1447 /* unwrap the return value from the local reference table */
1448 /* AFTER: removing the stackframeinfo */
1449 /* BEFORE: releasing the local reference table */
1451 localref_native_exit(m, ret_regs);
1454 /* get and unwrap the exception */
1455 /* AFTER: removing the stackframe info */
1456 /* BEFORE: releasing the local reference table */
1458 e = exceptions_get_and_clear_exception();
1461 #if defined(ENABLE_JNI)
1462 /* release JNI local references table for this thread */
1464 localref_frame_pop_all();
1465 localref_table_remove();
1468 #if !defined(NDEBUG)
1469 # if defined(__ALPHA__) || defined(__I386__) || defined(__M68K__) || defined(__POWERPC__) || defined(__POWERPC64__) || defined(__S390__) || defined(__X86_64__)
1470 /* print the call-trace if necesarry */
1471 /* AFTER: unwrapping the return value */
1473 if (opt_TraceJavaCalls)
1474 trace_java_call_exit(m, ret_regs);
1482 /* removecompilerstub **********************************************************
1484 Deletes a compilerstub from memory (simply by freeing it).
1486 *******************************************************************************/
1488 void removecompilerstub(u1 *stub)
1490 /* pass size 1 to keep the intern function happy */
1492 CFREE((void *) stub, 1);
1496 /* removenativestub ************************************************************
1498 Removes a previously created native-stub from memory.
1500 *******************************************************************************/
1502 void removenativestub(u1 *stub)
1504 /* pass size 1 to keep the intern function happy */
1506 CFREE((void *) stub, 1);
1510 /* codegen_reg_of_var **********************************************************
1512 This function determines a register, to which the result of an
1513 operation should go, when it is ultimatively intended to store the
1514 result in pseudoregister v. If v is assigned to an actual
1515 register, this register will be returned. Otherwise (when v is
1516 spilled) this function returns tempregnum. If not already done,
1517 regoff and flags are set in the stack location.
1519 *******************************************************************************/
1521 s4 codegen_reg_of_var(u2 opcode, varinfo *v, s4 tempregnum)
1523 if (!(v->flags & INMEMORY))
1524 return v->vv.regoff;
1530 /* codegen_reg_of_dst **********************************************************
1532 This function determines a register, to which the result of an
1533 operation should go, when it is ultimatively intended to store the
1534 result in iptr->dst.var. If dst.var is assigned to an actual
1535 register, this register will be returned. Otherwise (when it is
1536 spilled) this function returns tempregnum. If not already done,
1537 regoff and flags are set in the stack location.
1539 *******************************************************************************/
1541 s4 codegen_reg_of_dst(jitdata *jd, instruction *iptr, s4 tempregnum)
1543 return codegen_reg_of_var(iptr->opc, VAROP(iptr->dst), tempregnum);
1547 /* codegen_emit_phi_moves ****************************************************
1549 Emits phi moves at the end of the basicblock.
1551 *******************************************************************************/
1553 #if defined(ENABLE_SSA)
1554 void codegen_emit_phi_moves(jitdata *jd, basicblock *bptr)
1567 /* Moves from phi functions with highest indices have to be */
1568 /* inserted first, since this is the order as is used for */
1569 /* conflict resolution */
1571 for(i = ls->num_phi_moves[bptr->nr] - 1; i >= 0 ; i--) {
1572 lt_d = ls->phi_moves[bptr->nr][i][0];
1573 lt_s = ls->phi_moves[bptr->nr][i][1];
1574 #if defined(SSA_DEBUG_VERBOSE)
1576 printf("BB %3i Move %3i <- %3i ", bptr->nr, lt_d, lt_s);
1578 if (lt_s == UNUSED) {
1579 #if defined(SSA_DEBUG_VERBOSE)
1581 printf(" ... not processed \n");
1586 d = VAR(ls->lifetime[lt_d].v_index);
1587 s = VAR(ls->lifetime[lt_s].v_index);
1590 if (d->type == -1) {
1591 #if defined(SSA_DEBUG_VERBOSE)
1593 printf("...returning - phi lifetimes where joined\n");
1598 if (s->type == -1) {
1599 #if defined(SSA_DEBUG_VERBOSE)
1601 printf("...returning - phi lifetimes where joined\n");
1607 tmp_i.s1.varindex = ls->lifetime[lt_s].v_index;
1608 tmp_i.dst.varindex = ls->lifetime[lt_d].v_index;
1609 emit_copy(jd, &tmp_i);
1611 #if defined(SSA_DEBUG_VERBOSE)
1612 if (compileverbose) {
1613 if (IS_INMEMORY(d->flags) && IS_INMEMORY(s->flags)) {
1615 printf("M%3i <- M%3i",d->vv.regoff,s->vv.regoff);
1617 else if (IS_INMEMORY(s->flags)) {
1619 printf("R%3i <- M%3i",d->vv.regoff,s->vv.regoff);
1621 else if (IS_INMEMORY(d->flags)) {
1623 printf("M%3i <- R%3i",d->vv.regoff,s->vv.regoff);
1627 printf("R%3i <- R%3i",d->vv.regoff,s->vv.regoff);
1631 #endif /* defined(SSA_DEBUG_VERBOSE) */
1634 #endif /* defined(ENABLE_SSA) */
1637 /* REMOVEME When we have exception handling in C. */
1639 void *md_asm_codegen_get_pv_from_pc(void *ra)
1641 return md_codegen_get_pv_from_pc(ra);
1646 * These are local overrides for various environment variables in Emacs.
1647 * Please do not remove this and leave it at the end of the file, where
1648 * Emacs will automagically detect them.
1649 * ---------------------------------------------------------------------
1652 * indent-tabs-mode: t
1656 * vim:noexpandtab:sw=4:ts=4: