1 /* src/vm/jit/codegen-common.c - architecture independent code generator stuff
3 Copyright (C) 1996-2005, 2006, 2007, 2008
4 CACAOVM - Verein zu Foerderung der freien virtuellen Machine CACAO
6 This file is part of CACAO.
8 This program is free software; you can redistribute it and/or
9 modify it under the terms of the GNU General Public License as
10 published by the Free Software Foundation; either version 2, or (at
11 your option) any later version.
13 This program is distributed in the hope that it will be useful, but
14 WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with this program; if not, write to the Free Software
20 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
23 All functions assume the following code area / data area layout:
27 | code area | code area grows to higher addresses
29 +-----------+ <-- start of procedure
31 | data area | data area grows to lower addresses
35 The functions first write into a temporary code/data area allocated by
36 "codegen_init". "codegen_finish" copies the code and data area into permanent
37 memory. All functions writing values into the data area return the offset
38 relative the begin of the code area (start of procedure).
54 #include "mm/memory.h"
56 #include "toolbox/avl.h"
57 #include "toolbox/list.h"
58 #include "toolbox/logging.h"
60 #include "native/jni.h"
61 #include "native/llni.h"
62 #include "native/localref.h"
63 #include "native/native.h"
65 #if defined(WITH_CLASSPATH_SUN)
66 # include "native/include/java_lang_Object.h"
67 # include "native/include/java_lang_String.h"
68 # include "native/include/java_nio_ByteBuffer.h" /* required by j.l.CL */
69 # include "native/include/java_lang_ClassLoader.h"
72 #include "native/include/java_lang_Class.h"
74 #include "threads/threads-common.h"
76 #include "vm/builtin.h"
77 #include "vm/exceptions.h"
78 #include "vm/stringlocal.h"
80 #include "vm/jit/abi.h"
81 #include "vm/jit/asmpart.h"
82 #include "vm/jit/code.h"
83 #include "vm/jit/codegen-common.h"
85 #if defined(ENABLE_DISASSEMBLER)
86 # include "vm/jit/disass.h"
89 #include "vm/jit/dseg.h"
90 #include "vm/jit/emit-common.h"
91 #include "vm/jit/jit.h"
92 #include "vm/jit/linenumbertable.h"
93 #include "vm/jit/methodheader.h"
94 #include "vm/jit/patcher-common.h"
95 #include "vm/jit/replace.h"
96 #if defined(ENABLE_SSA)
97 # include "vm/jit/optimizing/lsra.h"
98 # include "vm/jit/optimizing/ssa.h"
100 #include "vm/jit/stacktrace.h"
101 #include "vm/jit/trace.h"
103 #if defined(ENABLE_INTRP)
104 #include "vm/jit/intrp/intrp.h"
107 #include "vmcore/method.h"
108 #include "vmcore/options.h"
110 # include "vmcore/statistics.h"
112 #if defined(ENABLE_VMLOG)
113 #include <vmlog_cacao.h>
118 /* in this tree we store all method addresses *********************************/
120 static avl_tree_t *methodtree = NULL;
121 static s4 methodtree_comparator(const void *treenode, const void *node);
124 /* codegen_init ****************************************************************
128 *******************************************************************************/
130 void codegen_init(void)
132 /* this tree is global, not method specific */
135 #if defined(ENABLE_JIT)
136 methodtree_element *mte;
139 methodtree = avl_create(&methodtree_comparator);
141 #if defined(ENABLE_JIT)
142 /* insert asm_vm_call_method */
144 mte = NEW(methodtree_element);
146 mte->startpc = (u1 *) (ptrint) asm_vm_call_method;
147 mte->endpc = (u1 *) (ptrint) asm_vm_call_method_end;
149 avl_insert(methodtree, mte);
150 #endif /* defined(ENABLE_JIT) */
157 /* codegen_setup ***************************************************************
159 Allocates and initialises code area, data area and references.
161 *******************************************************************************/
163 void codegen_setup(jitdata *jd)
168 /* get required compiler data */
173 /* initialize members */
177 cd->mcodebase = DMNEW(u1, MCODEINITSIZE);
178 cd->mcodeend = cd->mcodebase + MCODEINITSIZE;
179 cd->mcodesize = MCODEINITSIZE;
181 /* initialize mcode variables */
183 cd->mcodeptr = cd->mcodebase;
184 cd->lastmcodeptr = cd->mcodebase;
186 #if defined(ENABLE_INTRP)
187 /* native dynamic superinstructions variables */
190 cd->ncodebase = DMNEW(u1, NCODEINITSIZE);
191 cd->ncodesize = NCODEINITSIZE;
193 /* initialize ncode variables */
195 cd->ncodeptr = cd->ncodebase;
197 cd->lastinstwithoutdispatch = ~0; /* no inst without dispatch */
198 cd->superstarts = NULL;
205 cd->jumpreferences = NULL;
207 #if defined(__I386__) || defined(__X86_64__) || defined(__XDSPCORE__) || defined(__M68K__) || defined(ENABLE_INTRP)
208 cd->datareferences = NULL;
211 cd->brancheslabel = list_create_dump(OFFSET(branch_label_ref_t, linkage));
212 cd->listcritical = list_create_dump(OFFSET(critical_section_ref_t, linkage));
213 cd->linenumbers = list_create_dump(OFFSET(linenumbertable_list_entry_t, linkage));
217 /* codegen_reset ***************************************************************
219 Resets the codegen data structure so we can recompile the method.
221 *******************************************************************************/
223 static void codegen_reset(jitdata *jd)
229 /* get required compiler data */
234 /* reset error flag */
236 cd->flags &= ~CODEGENDATA_FLAG_ERROR;
238 /* reset some members, we reuse the code memory already allocated
239 as this should have almost the correct size */
241 cd->mcodeptr = cd->mcodebase;
242 cd->lastmcodeptr = cd->mcodebase;
247 cd->jumpreferences = NULL;
249 #if defined(__I386__) || defined(__X86_64__) || defined(__XDSPCORE__) || defined(__M68K__) || defined(ENABLE_INTRP)
250 cd->datareferences = NULL;
253 cd->brancheslabel = list_create_dump(OFFSET(branch_label_ref_t, linkage));
254 cd->listcritical = list_create_dump(OFFSET(critical_section_ref_t, linkage));
255 cd->linenumbers = list_create_dump(OFFSET(linenumbertable_list_entry_t, linkage));
257 /* We need to clear the mpc and the branch references from all
258 basic blocks as they will definitely change. */
260 for (bptr = jd->basicblocks; bptr != NULL; bptr = bptr->next) {
262 bptr->branchrefs = NULL;
265 /* We need to clear all the patcher references from the codeinfo
266 since they all will be regenerated */
268 patcher_list_reset(code);
270 #if defined(ENABLE_REPLACEMENT)
271 code->rplpoints = NULL;
272 code->rplpointcount = 0;
273 code->regalloc = NULL;
274 code->regalloccount = 0;
275 code->globalcount = 0;
280 /* codegen_generate ************************************************************
282 Generates the code for the currently compiled method.
284 *******************************************************************************/
286 bool codegen_generate(jitdata *jd)
290 /* get required compiler data */
294 /* call the machine-dependent code generation function */
296 if (!codegen_emit(jd))
299 /* check for an error */
301 if (CODEGENDATA_HAS_FLAG_ERROR(cd)) {
302 /* check for long-branches flag, if it is set we recompile the
307 log_message_method("Re-generating code: ", jd->m);
310 /* XXX maybe we should tag long-branches-methods for recompilation */
312 if (CODEGENDATA_HAS_FLAG_LONGBRANCHES(cd)) {
313 /* we have to reset the codegendata structure first */
317 /* and restart the compiler run */
319 if (!codegen_emit(jd))
323 vm_abort("codegen_generate: unknown error occurred during codegen_emit: flags=%x\n", cd->flags);
328 log_message_method("Re-generating code done: ", jd->m);
332 /* reallocate the memory and finish the code generation */
336 /* everything's ok */
342 /* codegen_close ***************************************************************
346 *******************************************************************************/
348 void codegen_close(void)
350 /* TODO: release avl tree on i386 and x86_64 */
354 /* codegen_increase ************************************************************
358 *******************************************************************************/
360 void codegen_increase(codegendata *cd)
364 /* save old mcodebase pointer */
366 oldmcodebase = cd->mcodebase;
368 /* reallocate to new, doubled memory */
370 cd->mcodebase = DMREALLOC(cd->mcodebase,
375 cd->mcodeend = cd->mcodebase + cd->mcodesize;
377 /* set new mcodeptr */
379 cd->mcodeptr = cd->mcodebase + (cd->mcodeptr - oldmcodebase);
381 #if defined(__I386__) || defined(__MIPS__) || defined(__X86_64__) || defined(__M68K__) || defined(ENABLE_INTRP) \
382 || defined(__SPARC_64__)
383 /* adjust the pointer to the last patcher position */
385 if (cd->lastmcodeptr != NULL)
386 cd->lastmcodeptr = cd->mcodebase + (cd->lastmcodeptr - oldmcodebase);
391 /* codegen_ncode_increase ******************************************************
395 *******************************************************************************/
397 #if defined(ENABLE_INTRP)
398 u1 *codegen_ncode_increase(codegendata *cd, u1 *ncodeptr)
402 /* save old ncodebase pointer */
404 oldncodebase = cd->ncodebase;
406 /* reallocate to new, doubled memory */
408 cd->ncodebase = DMREALLOC(cd->ncodebase,
414 /* return the new ncodeptr */
416 return (cd->ncodebase + (ncodeptr - oldncodebase));
421 /* codegen_add_branch_ref ******************************************************
423 Prepends an branch to the list.
425 *******************************************************************************/
427 void codegen_add_branch_ref(codegendata *cd, basicblock *target, s4 condition, s4 reg, u4 options)
432 STATISTICS(count_branches_unresolved++);
434 /* calculate the mpc of the branch instruction */
436 branchmpc = cd->mcodeptr - cd->mcodebase;
438 br = DNEW(branchref);
440 br->branchmpc = branchmpc;
441 br->condition = condition;
443 br->options = options;
444 br->next = target->branchrefs;
446 target->branchrefs = br;
450 /* codegen_resolve_branchrefs **************************************************
452 Resolves and patches the branch references of a given basic block.
454 *******************************************************************************/
456 void codegen_resolve_branchrefs(codegendata *cd, basicblock *bptr)
461 /* Save the mcodeptr because in the branch emitting functions
462 we generate code somewhere inside already generated code,
463 but we're still in the actual code generation phase. */
465 mcodeptr = cd->mcodeptr;
467 /* just to make sure */
469 assert(bptr->mpc >= 0);
471 for (br = bptr->branchrefs; br != NULL; br = br->next) {
472 /* temporary set the mcodeptr */
474 cd->mcodeptr = cd->mcodebase + br->branchmpc;
476 /* emit_bccz and emit_branch emit the correct code, even if we
477 pass condition == BRANCH_UNCONDITIONAL or reg == -1. */
479 emit_bccz(cd, bptr, br->condition, br->reg, br->options);
482 /* restore mcodeptr */
484 cd->mcodeptr = mcodeptr;
488 /* codegen_branch_label_add ****************************************************
490 Append an branch to the label-branch list.
492 *******************************************************************************/
494 void codegen_branch_label_add(codegendata *cd, s4 label, s4 condition, s4 reg, u4 options)
497 branch_label_ref_t *br;
500 /* get the label list */
502 list = cd->brancheslabel;
504 /* calculate the current mpc */
506 mpc = cd->mcodeptr - cd->mcodebase;
508 br = DNEW(branch_label_ref_t);
512 br->condition = condition;
514 br->options = options;
516 /* add the branch to the list */
518 list_add_last_unsynced(list, br);
522 /* codegen_critical_section_new ************************************************
524 Allocates a new critical-section reference and adds it to the
525 critical-section list.
527 *******************************************************************************/
529 #if defined(ENABLE_THREADS)
530 void codegen_critical_section_new(codegendata *cd)
533 critical_section_ref_t *csr;
536 /* get the critical section list */
538 list = cd->listcritical;
540 /* calculate the current mpc */
542 mpc = cd->mcodeptr - cd->mcodebase;
544 csr = DNEW(critical_section_ref_t);
546 /* We only can set restart right now, as start and end are set by
547 the following, corresponding functions. */
553 /* add the branch to the list */
555 list_add_last_unsynced(list, csr);
560 /* codegen_critical_section_start **********************************************
562 Set the start-point of the current critical section (which is the
563 last element of the list).
565 *******************************************************************************/
567 #if defined(ENABLE_THREADS)
568 void codegen_critical_section_start(codegendata *cd)
571 critical_section_ref_t *csr;
574 /* get the critical section list */
576 list = cd->listcritical;
578 /* calculate the current mpc */
580 mpc = cd->mcodeptr - cd->mcodebase;
582 /* get the current critical section */
584 csr = list_last_unsynced(list);
586 /* set the start point */
588 assert(csr->start == -1);
595 /* codegen_critical_section_end ************************************************
597 Set the end-point of the current critical section (which is the
598 last element of the list).
600 *******************************************************************************/
602 #if defined(ENABLE_THREADS)
603 void codegen_critical_section_end(codegendata *cd)
606 critical_section_ref_t *csr;
609 /* get the critical section list */
611 list = cd->listcritical;
613 /* calculate the current mpc */
615 mpc = cd->mcodeptr - cd->mcodebase;
617 /* get the current critical section */
619 csr = list_last_unsynced(list);
621 /* set the end point */
623 assert(csr->end == -1);
630 /* codegen_critical_section_finish *********************************************
632 Finish the critical sections, create the critical section nodes for
633 the AVL tree and insert them into the tree.
635 *******************************************************************************/
637 #if defined(ENABLE_THREADS)
638 static void codegen_critical_section_finish(jitdata *jd)
643 critical_section_ref_t *csr;
644 critical_section_node_t *csn;
646 /* get required compiler data */
651 /* get the critical section list */
653 list = cd->listcritical;
655 /* iterate over all critical sections */
657 for (csr = list_first_unsynced(list); csr != NULL;
658 csr = list_next_unsynced(list, csr)) {
659 /* check if all points are set */
661 assert(csr->start != -1);
662 assert(csr->end != -1);
663 assert(csr->restart != -1);
665 /* allocate tree node */
667 csn = NEW(critical_section_node_t);
669 csn->start = code->entrypoint + csr->start;
670 csn->end = code->entrypoint + csr->end;
671 csn->restart = code->entrypoint + csr->restart;
673 /* insert into the tree */
675 critical_section_register(csn);
681 /* methodtree_comparator *******************************************************
683 Comparator function used for the AVL tree of methods.
686 treenode....the node from the tree
687 node........the node to compare to the tree-node
689 *******************************************************************************/
691 static s4 methodtree_comparator(const void *treenode, const void *node)
693 methodtree_element *mte;
694 methodtree_element *mtepc;
696 mte = (methodtree_element *) treenode;
697 mtepc = (methodtree_element *) node;
699 /* compare both startpc and endpc of pc, even if they have the same value,
700 otherwise the avl_probe sometimes thinks the element is already in the
704 /* On S390 addresses are 31 bit. Compare only 31 bits of value.
706 # define ADDR_MASK(a) ((a) & 0x7FFFFFFF)
708 # define ADDR_MASK(a) (a)
711 if (ADDR_MASK((long) mte->startpc) <= ADDR_MASK((long) mtepc->startpc) &&
712 ADDR_MASK((long) mtepc->startpc) <= ADDR_MASK((long) mte->endpc) &&
713 ADDR_MASK((long) mte->startpc) <= ADDR_MASK((long) mtepc->endpc) &&
714 ADDR_MASK((long) mtepc->endpc) <= ADDR_MASK((long) mte->endpc)) {
717 } else if (ADDR_MASK((long) mtepc->startpc) < ADDR_MASK((long) mte->startpc)) {
728 /* codegen_insertmethod ********************************************************
730 Insert the machine code range of a method into the AVL tree of methods.
732 *******************************************************************************/
734 void codegen_insertmethod(u1 *startpc, u1 *endpc)
736 methodtree_element *mte;
738 /* allocate new method entry */
740 mte = NEW(methodtree_element);
742 mte->startpc = startpc;
745 /* this function does not return an error, but asserts for
748 avl_insert(methodtree, mte);
752 /* codegen_get_pv_from_pc ******************************************************
754 Find the PV for the given PC by searching in the AVL tree of
757 *******************************************************************************/
759 u1 *codegen_get_pv_from_pc(u1 *pc)
761 methodtree_element mtepc;
762 methodtree_element *mte;
764 /* allocation of the search structure on the stack is much faster */
769 mte = avl_find(methodtree, &mtepc);
772 /* No method was found. Let's dump a stacktrace. */
774 #if defined(ENABLE_VMLOG)
775 vmlog_cacao_signl("SIGSEGV");
778 log_println("We received a SIGSEGV and tried to handle it, but we were");
779 log_println("unable to find a Java method at:");
781 #if SIZEOF_VOID_P == 8
782 log_println("PC=0x%016lx", pc);
784 log_println("PC=0x%08x", pc);
788 log_println("Dumping the current stacktrace:");
790 #if defined(ENABLE_THREADS)
791 /* XXX michi: This should be available even without threads! */
792 threads_print_stacktrace();
795 vm_abort("Exiting...");
802 /* codegen_get_pv_from_pc_nocheck **********************************************
804 Find the PV for the given PC by searching in the AVL tree of
805 methods. This method does not check the return value and is used
808 *******************************************************************************/
810 u1 *codegen_get_pv_from_pc_nocheck(u1 *pc)
812 methodtree_element mtepc;
813 methodtree_element *mte;
815 /* allocation of the search structure on the stack is much faster */
820 mte = avl_find(methodtree, &mtepc);
829 /* codegen_set_replacement_point_notrap ****************************************
831 Record the position of a non-trappable replacement point.
833 *******************************************************************************/
835 #if defined(ENABLE_REPLACEMENT)
837 void codegen_set_replacement_point_notrap(codegendata *cd, s4 type)
839 void codegen_set_replacement_point_notrap(codegendata *cd)
842 assert(cd->replacementpoint);
843 assert(cd->replacementpoint->type == type);
844 assert(cd->replacementpoint->flags & RPLPOINT_FLAG_NOTRAP);
846 cd->replacementpoint->pc = (u1*) (ptrint) (cd->mcodeptr - cd->mcodebase);
848 cd->replacementpoint++;
850 #endif /* defined(ENABLE_REPLACEMENT) */
853 /* codegen_set_replacement_point ***********************************************
855 Record the position of a trappable replacement point.
857 *******************************************************************************/
859 #if defined(ENABLE_REPLACEMENT)
861 void codegen_set_replacement_point(codegendata *cd, s4 type)
863 void codegen_set_replacement_point(codegendata *cd)
866 assert(cd->replacementpoint);
867 assert(cd->replacementpoint->type == type);
868 assert(!(cd->replacementpoint->flags & RPLPOINT_FLAG_NOTRAP));
870 cd->replacementpoint->pc = (u1*) (ptrint) (cd->mcodeptr - cd->mcodebase);
872 cd->replacementpoint++;
875 /* XXX actually we should use an own REPLACEMENT_NOPS here! */
876 if (opt_TestReplacement)
880 /* XXX assert(cd->lastmcodeptr <= cd->mcodeptr); */
882 cd->lastmcodeptr = cd->mcodeptr + PATCHER_CALL_SIZE;
884 #endif /* defined(ENABLE_REPLACEMENT) */
887 /* codegen_finish **************************************************************
889 Finishes the code generation. A new memory, large enough for both
890 data and code, is allocated and data and code are copied together
891 to their final layout, unresolved jumps are resolved, ...
893 *******************************************************************************/
895 void codegen_finish(jitdata *jd)
900 #if defined(ENABLE_INTRP)
909 /* get required compiler data */
914 /* prevent compiler warning */
916 #if defined(ENABLE_INTRP)
920 /* calculate the code length */
922 mcodelen = (s4) (cd->mcodeptr - cd->mcodebase);
924 #if defined(ENABLE_STATISTICS)
926 count_code_len += mcodelen;
927 count_data_len += cd->dseglen;
931 alignedmcodelen = MEMORY_ALIGN(mcodelen, MAX_ALIGN);
933 #if defined(ENABLE_INTRP)
935 ncodelen = cd->ncodeptr - cd->ncodebase;
937 ncodelen = 0; /* avoid compiler warning */
941 cd->dseglen = MEMORY_ALIGN(cd->dseglen, MAX_ALIGN);
942 alignedlen = alignedmcodelen + cd->dseglen;
944 #if defined(ENABLE_INTRP)
946 alignedlen += ncodelen;
950 /* allocate new memory */
952 code->mcodelength = mcodelen + cd->dseglen;
953 code->mcode = CNEW(u1, alignedlen);
955 /* set the entrypoint of the method */
957 assert(code->entrypoint == NULL);
958 code->entrypoint = epoint = (code->mcode + cd->dseglen);
960 /* fill the data segment (code->entrypoint must already be set!) */
964 /* copy code to the new location */
966 MCOPY((void *) code->entrypoint, cd->mcodebase, u1, mcodelen);
968 #if defined(ENABLE_INTRP)
969 /* relocate native dynamic superinstruction code (if any) */
972 cd->mcodebase = code->entrypoint;
975 u1 *ncodebase = code->mcode + cd->dseglen + alignedmcodelen;
977 MCOPY((void *) ncodebase, cd->ncodebase, u1, ncodelen);
979 /* flush the instruction and data caches */
981 md_cacheflush(ncodebase, ncodelen);
983 /* set some cd variables for dynamic_super_rerwite */
985 cd->ncodebase = ncodebase;
988 cd->ncodebase = NULL;
991 dynamic_super_rewrite(cd);
995 /* Create the exception table. */
997 exceptiontable_create(jd);
999 /* Create the linenumber table. */
1001 linenumbertable_create(jd);
1003 /* jump table resolving */
1005 for (jr = cd->jumpreferences; jr != NULL; jr = jr->next)
1006 *((functionptr *) ((ptrint) epoint + jr->tablepos)) =
1007 (functionptr) ((ptrint) epoint + (ptrint) jr->target->mpc);
1009 /* patcher resolving */
1011 pr = list_first_unsynced(code->patchers);
1013 pr->mpc += (ptrint) epoint;
1014 pr->datap = (ptrint) (pr->disp + epoint);
1015 pr = list_next_unsynced(code->patchers, pr);
1018 #if defined(ENABLE_REPLACEMENT)
1019 /* replacement point resolving */
1024 rp = code->rplpoints;
1025 for (i=0; i<code->rplpointcount; ++i, ++rp) {
1026 rp->pc = (u1*) ((ptrint) epoint + (ptrint) rp->pc);
1029 #endif /* defined(ENABLE_REPLACEMENT) */
1031 /* add method into methodtree to find the entrypoint */
1033 codegen_insertmethod(code->entrypoint, code->entrypoint + mcodelen);
1035 #if defined(__I386__) || defined(__X86_64__) || defined(__XDSPCORE__) || defined(__M68K__) || defined(ENABLE_INTRP)
1036 /* resolve data segment references */
1038 dseg_resolve_datareferences(jd);
1041 #if defined(ENABLE_THREADS)
1042 /* create cirtical sections */
1044 codegen_critical_section_finish(jd);
1047 /* flush the instruction and data caches */
1049 md_cacheflush(code->mcode, code->mcodelength);
1053 /* codegen_generate_stub_compiler **********************************************
1055 Wrapper for codegen_emit_stub_compiler.
1058 pointer to the compiler stub code.
1060 *******************************************************************************/
1062 u1 *codegen_generate_stub_compiler(methodinfo *m)
1066 ptrint *d; /* pointer to data memory */
1067 u1 *c; /* pointer to code memory */
1070 /* mark dump memory */
1074 /* allocate required data structures */
1079 jd->cd = DNEW(codegendata);
1082 /* get required compiler data */
1086 #if !defined(JIT_COMPILER_VIA_SIGNAL)
1087 /* allocate code memory */
1089 c = CNEW(u1, 3 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
1091 /* set pointers correctly */
1097 c = c + 3 * SIZEOF_VOID_P;
1100 /* NOTE: The codeinfo pointer is actually a pointer to the
1101 methodinfo (this fakes a codeinfo structure). */
1103 d[0] = (ptrint) asm_call_jit_compiler;
1105 d[2] = (ptrint) &d[1]; /* fake code->m */
1107 /* call the emit function */
1109 codegen_emit_stub_compiler(jd);
1111 #if defined(ENABLE_STATISTICS)
1113 count_cstub_len += 3 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE;
1118 md_cacheflush(cd->mcodebase, 3 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
1120 /* Allocate code memory. */
1122 c = CNEW(uint8_t, 2 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
1124 /* Set pointers correctly. */
1130 c = c + 2 * SIZEOF_VOID_P;
1133 /* NOTE: The codeinfo pointer is actually a pointer to the
1134 methodinfo (this fakes a codeinfo structure). */
1137 d[1] = (ptrint) &d[0]; /* fake code->m */
1139 /* Emit the trap instruction. */
1141 emit_trap_compiler(cd);
1143 #if defined(ENABLE_STATISTICS)
1145 count_cstub_len += 2 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE;
1150 md_cacheflush(cd->mcodebase, 2 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
1153 /* release dump memory */
1157 /* return native stub code */
1163 /* codegen_generate_stub_builtin ***********************************************
1165 Wrapper for codegen_emit_stub_native.
1167 *******************************************************************************/
1169 void codegen_generate_stub_builtin(methodinfo *m, builtintable_entry *bte)
1176 /* mark dump memory */
1180 /* Create JIT data structure. */
1182 jd = jit_jitdata_new(m);
1184 /* Get required compiler data. */
1188 /* Stubs are non-leaf methods. */
1190 code_unflag_leafmethod(code);
1192 /* setup code generation stuff */
1196 /* Set the number of native arguments we need to skip. */
1200 /* generate the code */
1202 #if defined(ENABLE_JIT)
1203 # if defined(ENABLE_INTRP)
1206 assert(bte->fp != NULL);
1207 codegen_emit_stub_native(jd, bte->md, bte->fp, skipparams);
1208 # if defined(ENABLE_INTRP)
1213 /* reallocate the memory and finish the code generation */
1217 /* set the stub entry point in the builtin table */
1219 bte->stub = code->entrypoint;
1221 #if defined(ENABLE_STATISTICS)
1223 size_stub_native += code->mcodelength;
1226 #if !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER)
1227 /* disassemble native stub */
1229 if (opt_DisassembleStubs) {
1230 codegen_disassemble_stub(m,
1231 (u1 *) (ptrint) code->entrypoint,
1232 (u1 *) (ptrint) code->entrypoint + (code->mcodelength - jd->cd->dseglen));
1234 /* show data segment */
1236 if (opt_showddatasegment)
1239 #endif /* !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER) */
1241 /* release memory */
1247 /* codegen_generate_stub_native ************************************************
1249 Wrapper for codegen_emit_stub_native.
1252 the codeinfo representing the stub code.
1254 *******************************************************************************/
1256 codeinfo *codegen_generate_stub_native(methodinfo *m, functionptr f)
1265 /* mark dump memory */
1269 /* Create JIT data structure. */
1271 jd = jit_jitdata_new(m);
1273 /* Get required compiler data. */
1277 /* Stubs are non-leaf methods. */
1279 code_unflag_leafmethod(code);
1281 /* set the flags for the current JIT run */
1283 #if defined(ENABLE_PROFILING)
1285 jd->flags |= JITDATA_FLAG_INSTRUMENT;
1288 if (opt_verbosecall)
1289 jd->flags |= JITDATA_FLAG_VERBOSECALL;
1291 /* setup code generation stuff */
1293 #if defined(ENABLE_JIT)
1294 # if defined(ENABLE_INTRP)
1302 /* create new method descriptor with additional native parameters */
1306 /* Set the number of native arguments we need to skip. */
1308 if (m->flags & ACC_STATIC)
1313 nmd = (methoddesc *) DMNEW(u1, sizeof(methoddesc) - sizeof(typedesc) +
1314 md->paramcount * sizeof(typedesc) +
1315 skipparams * sizeof(typedesc));
1317 nmd->paramcount = md->paramcount + skipparams;
1319 nmd->params = DMNEW(paramdesc, nmd->paramcount);
1321 nmd->paramtypes[0].type = TYPE_ADR; /* add environment pointer */
1323 if (m->flags & ACC_STATIC)
1324 nmd->paramtypes[1].type = TYPE_ADR; /* add class pointer */
1326 MCOPY(nmd->paramtypes + skipparams, md->paramtypes, typedesc,
1329 #if defined(ENABLE_JIT)
1330 # if defined(ENABLE_INTRP)
1333 /* pre-allocate the arguments for the native ABI */
1335 md_param_alloc_native(nmd);
1338 /* generate the code */
1340 #if defined(ENABLE_JIT)
1341 # if defined(ENABLE_INTRP)
1343 intrp_createnativestub(f, jd, nmd);
1346 codegen_emit_stub_native(jd, nmd, f, skipparams);
1348 intrp_createnativestub(f, jd, nmd);
1351 /* reallocate the memory and finish the code generation */
1355 #if defined(ENABLE_STATISTICS)
1356 /* must be done after codegen_finish() */
1359 size_stub_native += code->mcodelength;
1362 #if !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER)
1363 /* disassemble native stub */
1365 if (opt_DisassembleStubs) {
1366 # if defined(ENABLE_DEBUG_FILTER)
1367 if (m->filtermatches & SHOW_FILTER_FLAG_SHOW_METHOD)
1370 codegen_disassemble_stub(m,
1371 (u1 *) (ptrint) code->entrypoint,
1372 (u1 *) (ptrint) code->entrypoint + (code->mcodelength - jd->cd->dseglen));
1374 /* show data segment */
1376 if (opt_showddatasegment)
1380 #endif /* !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER) */
1382 /* release memory */
1386 /* return native stub code */
1392 /* codegen_disassemble_nativestub **********************************************
1394 Disassembles the generated builtin or native stub.
1396 *******************************************************************************/
1398 #if defined(ENABLE_DISASSEMBLER)
1399 void codegen_disassemble_stub(methodinfo *m, u1 *start, u1 *end)
1401 printf("Stub code: ");
1402 if (m->class != NULL)
1403 utf_fprint_printable_ascii_classname(stdout, m->class->name);
1407 utf_fprint_printable_ascii(stdout, m->name);
1408 utf_fprint_printable_ascii(stdout, m->descriptor);
1409 printf("\nLength: %d\n\n", (s4) (end - start));
1411 DISASSEMBLE(start, end);
1416 /* codegen_start_native_call ***************************************************
1418 Prepares the stuff required for a native (JNI) function call:
1420 - adds a stackframe info structure to the chain, for stacktraces
1421 - prepares the local references table on the stack
1423 The layout of the native stub stackframe should look like this:
1425 +---------------------------+ <- java SP (of parent Java function)
1427 +---------------------------+ <- data SP
1429 | stackframe info structure |
1431 +---------------------------+
1433 | local references table |
1435 +---------------------------+
1437 | saved registers (if any) |
1439 +---------------------------+
1441 | arguments (if any) |
1443 +---------------------------+ <- current SP (native stub)
1445 *******************************************************************************/
1447 java_handle_t *codegen_start_native_call(u1 *sp, u1 *pv)
1449 stackframeinfo_t *sfi;
1450 localref_table *lrt;
1457 uint64_t *arg_stack;
1459 STATISTICS(count_calls_java_to_native++);
1461 /* Get the methodinfo. */
1463 m = code_get_methodinfo_for_pv(pv);
1467 framesize = *((int32_t *) (pv + FrameSize));
1469 assert(framesize >= sizeof(stackframeinfo_t) + sizeof(localref_table));
1471 /* calculate needed values */
1473 #if defined(__ALPHA__) || defined(__ARM__)
1474 datasp = sp + framesize - SIZEOF_VOID_P;
1475 javasp = sp + framesize;
1476 arg_regs = (uint64_t *) sp;
1477 arg_stack = (uint64_t *) javasp;
1478 #elif defined(__MIPS__)
1479 /* MIPS always uses 8 bytes to store the RA */
1480 datasp = sp + framesize - 8;
1481 javasp = sp + framesize;
1482 #elif defined(__S390__)
1483 datasp = sp + framesize - 8;
1484 javasp = sp + framesize;
1485 arg_regs = (uint64_t *) (sp + 96);
1486 arg_stack = (uint64_t *) javasp;
1487 #elif defined(__I386__) || defined(__M68K__) || defined(__X86_64__)
1488 datasp = sp + framesize;
1489 javasp = sp + framesize + SIZEOF_VOID_P;
1490 arg_regs = (uint64_t *) sp;
1491 arg_stack = (uint64_t *) javasp;
1492 #elif defined(__POWERPC__)
1493 datasp = sp + framesize;
1494 javasp = sp + framesize;
1495 arg_regs = (uint64_t *) (sp + LA_SIZE + 4 * SIZEOF_VOID_P);
1496 arg_stack = (uint64_t *) javasp;
1497 #elif defined(__POWERPC64__)
1498 datasp = sp + framesize;
1499 javasp = sp + framesize;
1500 arg_regs = (uint64_t *) (sp + PA_SIZE + LA_SIZE + 4 * SIZEOF_VOID_P);
1501 arg_stack = (uint64_t *) javasp;
1503 /* XXX is was unable to do this port for SPARC64, sorry. (-michi) */
1504 /* XXX maybe we need to pass the RA as argument there */
1505 vm_abort("codegen_start_native_call: unsupported architecture");
1508 /* get data structures from stack */
1510 sfi = (stackframeinfo_t *) (datasp - sizeof(stackframeinfo_t));
1511 lrt = (localref_table *) (datasp - sizeof(stackframeinfo_t) -
1512 sizeof(localref_table));
1514 #if defined(ENABLE_JNI)
1515 /* add current JNI local references table to this thread */
1517 localref_table_add(lrt);
1520 #if !defined(NDEBUG)
1521 # if defined(__ALPHA__) || defined(__POWERPC__) || defined(__POWERPC64__) || defined(__X86_64__) || defined(__S390__)
1522 /* print the call-trace if necesarry */
1523 /* BEFORE: filling the local reference table */
1525 if (opt_TraceJavaCalls)
1526 trace_java_call_enter(m, arg_regs, arg_stack);
1530 #if defined(ENABLE_HANDLES)
1531 /* place all references into the local reference table */
1532 /* BEFORE: creating stackframeinfo */
1534 localref_native_enter(m, arg_regs, arg_stack);
1537 /* Add a stackframeinfo for this native method. We don't have RA
1538 and XPC here. These are determined in
1539 stacktrace_stackframeinfo_add. */
1541 stacktrace_stackframeinfo_add(sfi, pv, sp, NULL, NULL);
1543 /* Return a wrapped classinfo for static methods. */
1545 if (m->flags & ACC_STATIC)
1546 return LLNI_classinfo_wrap(m->class);
1552 /* codegen_finish_native_call **************************************************
1554 Removes the stuff required for a native (JNI) function call.
1555 Additionally it checks for an exceptions and in case, get the
1556 exception object and clear the pointer.
1558 *******************************************************************************/
1560 java_object_t *codegen_finish_native_call(u1 *sp, u1 *pv)
1562 stackframeinfo_t *sfi;
1572 /* get information from method header */
1574 code = code_get_codeinfo_for_pv(pv);
1576 framesize = *((int32_t *) (pv + FrameSize));
1580 /* get the methodinfo */
1585 /* calculate needed values */
1587 #if defined(__ALPHA__) || defined(__ARM__)
1588 datasp = sp + framesize - SIZEOF_VOID_P;
1589 ret_regs = (uint64_t *) sp;
1590 #elif defined(__MIPS__)
1591 /* MIPS always uses 8 bytes to store the RA */
1592 datasp = sp + framesize - 8;
1593 #elif defined(__S390__)
1594 datasp = sp + framesize - 8;
1595 ret_regs = (uint64_t *) (sp + 96);
1596 #elif defined(__I386__)
1597 datasp = sp + framesize;
1598 ret_regs = (uint64_t *) (sp + 2 * SIZEOF_VOID_P);
1599 #elif defined(__M68K__) || defined(__X86_64__)
1600 datasp = sp + framesize;
1601 ret_regs = (uint64_t *) sp;
1602 #elif defined(__POWERPC__)
1603 datasp = sp + framesize;
1604 ret_regs = (uint64_t *) (sp + LA_SIZE + 2 * SIZEOF_VOID_P);
1605 #elif defined(__POWERPC64__)
1606 datasp = sp + framesize;
1607 ret_regs = (uint64_t *) (sp + PA_SIZE + LA_SIZE + 2 * SIZEOF_VOID_P);
1609 vm_abort("codegen_finish_native_call: unsupported architecture");
1612 /* get data structures from stack */
1614 sfi = (stackframeinfo_t *) (datasp - sizeof(stackframeinfo_t));
1616 /* Remove current stackframeinfo from chain. */
1618 stacktrace_stackframeinfo_remove(sfi);
1620 #if defined(ENABLE_HANDLES)
1621 /* unwrap the return value from the local reference table */
1622 /* AFTER: removing the stackframeinfo */
1623 /* BEFORE: releasing the local reference table */
1625 localref_native_exit(m, ret_regs);
1628 /* get and unwrap the exception */
1629 /* AFTER: removing the stackframe info */
1630 /* BEFORE: releasing the local reference table */
1632 e = exceptions_get_and_clear_exception();
1635 #if defined(ENABLE_JNI)
1636 /* release JNI local references table for this thread */
1638 localref_frame_pop_all();
1639 localref_table_remove();
1642 #if !defined(NDEBUG)
1643 # if defined(__ALPHA__) || defined(__POWERPC__) || defined(__POWERPC64__) || defined(__X86_64__) || defined(__S390__)
1644 /* print the call-trace if necesarry */
1645 /* AFTER: unwrapping the return value */
1647 if (opt_TraceJavaCalls)
1648 trace_java_call_exit(m, ret_regs);
1656 /* removecompilerstub **********************************************************
1658 Deletes a compilerstub from memory (simply by freeing it).
1660 *******************************************************************************/
1662 void removecompilerstub(u1 *stub)
1664 /* pass size 1 to keep the intern function happy */
1666 CFREE((void *) stub, 1);
1670 /* removenativestub ************************************************************
1672 Removes a previously created native-stub from memory.
1674 *******************************************************************************/
1676 void removenativestub(u1 *stub)
1678 /* pass size 1 to keep the intern function happy */
1680 CFREE((void *) stub, 1);
1684 /* codegen_reg_of_var **********************************************************
1686 This function determines a register, to which the result of an
1687 operation should go, when it is ultimatively intended to store the
1688 result in pseudoregister v. If v is assigned to an actual
1689 register, this register will be returned. Otherwise (when v is
1690 spilled) this function returns tempregnum. If not already done,
1691 regoff and flags are set in the stack location.
1693 *******************************************************************************/
1695 s4 codegen_reg_of_var(u2 opcode, varinfo *v, s4 tempregnum)
1699 /* Do we have to generate a conditional move? Yes, then always
1700 return the temporary register. The real register is identified
1701 during the store. */
1703 if (opcode & ICMD_CONDITION_MASK)
1707 if (!(v->flags & INMEMORY))
1708 return v->vv.regoff;
1714 /* codegen_reg_of_dst **********************************************************
1716 This function determines a register, to which the result of an
1717 operation should go, when it is ultimatively intended to store the
1718 result in iptr->dst.var. If dst.var is assigned to an actual
1719 register, this register will be returned. Otherwise (when it is
1720 spilled) this function returns tempregnum. If not already done,
1721 regoff and flags are set in the stack location.
1723 *******************************************************************************/
1725 s4 codegen_reg_of_dst(jitdata *jd, instruction *iptr, s4 tempregnum)
1727 return codegen_reg_of_var(iptr->opc, VAROP(iptr->dst), tempregnum);
1731 /* codegen_emit_phi_moves ****************************************************
1733 Emits phi moves at the end of the basicblock.
1735 *******************************************************************************/
1737 #if defined(ENABLE_SSA)
1738 void codegen_emit_phi_moves(jitdata *jd, basicblock *bptr)
1751 /* Moves from phi functions with highest indices have to be */
1752 /* inserted first, since this is the order as is used for */
1753 /* conflict resolution */
1755 for(i = ls->num_phi_moves[bptr->nr] - 1; i >= 0 ; i--) {
1756 lt_d = ls->phi_moves[bptr->nr][i][0];
1757 lt_s = ls->phi_moves[bptr->nr][i][1];
1758 #if defined(SSA_DEBUG_VERBOSE)
1760 printf("BB %3i Move %3i <- %3i ", bptr->nr, lt_d, lt_s);
1762 if (lt_s == UNUSED) {
1763 #if defined(SSA_DEBUG_VERBOSE)
1765 printf(" ... not processed \n");
1770 d = VAR(ls->lifetime[lt_d].v_index);
1771 s = VAR(ls->lifetime[lt_s].v_index);
1774 if (d->type == -1) {
1775 #if defined(SSA_DEBUG_VERBOSE)
1777 printf("...returning - phi lifetimes where joined\n");
1782 if (s->type == -1) {
1783 #if defined(SSA_DEBUG_VERBOSE)
1785 printf("...returning - phi lifetimes where joined\n");
1791 tmp_i.s1.varindex = ls->lifetime[lt_s].v_index;
1792 tmp_i.dst.varindex = ls->lifetime[lt_d].v_index;
1793 emit_copy(jd, &tmp_i);
1795 #if defined(SSA_DEBUG_VERBOSE)
1796 if (compileverbose) {
1797 if (IS_INMEMORY(d->flags) && IS_INMEMORY(s->flags)) {
1799 printf("M%3i <- M%3i",d->vv.regoff,s->vv.regoff);
1801 else if (IS_INMEMORY(s->flags)) {
1803 printf("R%3i <- M%3i",d->vv.regoff,s->vv.regoff);
1805 else if (IS_INMEMORY(d->flags)) {
1807 printf("M%3i <- R%3i",d->vv.regoff,s->vv.regoff);
1811 printf("R%3i <- R%3i",d->vv.regoff,s->vv.regoff);
1815 #endif /* defined(SSA_DEBUG_VERBOSE) */
1818 #endif /* defined(ENABLE_SSA) */
1821 /* REMOVEME When we have exception handling in C. */
1823 void *md_asm_codegen_get_pv_from_pc(void *ra)
1825 return md_codegen_get_pv_from_pc(ra);
1830 * These are local overrides for various environment variables in Emacs.
1831 * Please do not remove this and leave it at the end of the file, where
1832 * Emacs will automagically detect them.
1833 * ---------------------------------------------------------------------
1836 * indent-tabs-mode: t
1840 * vim:noexpandtab:sw=4:ts=4: