1 /* src/vm/jit/codegen-common.c - architecture independent code generator stuff
3 Copyright (C) 1996-2005, 2006, 2007 R. Grafl, A. Krall, C. Kruegel,
4 C. Oates, R. Obermaisser, M. Platter, M. Probst, S. Ring,
5 E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich,
6 J. Wenninger, Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
25 All functions assume the following code area / data area layout:
29 | code area | code area grows to higher addresses
31 +-----------+ <-- start of procedure
33 | data area | data area grows to lower addresses
37 The functions first write into a temporary code/data area allocated by
38 "codegen_init". "codegen_finish" copies the code and data area into permanent
39 memory. All functions writing values into the data area return the offset
40 relative the begin of the code area (start of procedure).
55 #include "mm/memory.h"
57 #include "toolbox/avl.h"
58 #include "toolbox/list.h"
59 #include "toolbox/logging.h"
61 #include "native/jni.h"
62 #include "native/llni.h"
63 #include "native/localref.h"
64 #include "native/native.h"
66 #if defined(WITH_CLASSPATH_SUN)
67 # include "native/include/java_lang_Object.h"
68 # include "native/include/java_lang_String.h"
69 # include "native/include/java_nio_ByteBuffer.h" /* required by j.l.CL */
70 # include "native/include/java_lang_ClassLoader.h"
73 #include "native/include/java_lang_Class.h"
75 #include "threads/threads-common.h"
77 #include "vm/builtin.h"
78 #include "vm/exceptions.h"
79 #include "vm/stringlocal.h"
81 #include "vm/jit/abi.h"
82 #include "vm/jit/asmpart.h"
83 #include "vm/jit/codegen-common.h"
85 #if defined(ENABLE_DISASSEMBLER)
86 # include "vm/jit/disass.h"
89 #include "vm/jit/dseg.h"
90 #include "vm/jit/emit-common.h"
91 #include "vm/jit/jit.h"
92 #include "vm/jit/md.h"
93 #include "vm/jit/methodheader.h"
94 #include "vm/jit/patcher-common.h"
95 #include "vm/jit/replace.h"
96 #if defined(ENABLE_SSA)
97 # include "vm/jit/optimizing/lsra.h"
98 # include "vm/jit/optimizing/ssa.h"
100 #include "vm/jit/stacktrace.h"
101 #include "vm/jit/trace.h"
103 #if defined(ENABLE_INTRP)
104 #include "vm/jit/intrp/intrp.h"
107 #include "vmcore/method.h"
108 #include "vmcore/options.h"
110 # include "vmcore/statistics.h"
112 #if defined(ENABLE_VMLOG)
113 #include <vmlog_cacao.h>
118 /* in this tree we store all method addresses *********************************/
120 static avl_tree_t *methodtree = NULL;
121 static s4 methodtree_comparator(const void *treenode, const void *node);
124 /* codegen_init ****************************************************************
128 *******************************************************************************/
130 void codegen_init(void)
132 /* this tree is global, not method specific */
135 #if defined(ENABLE_JIT)
136 methodtree_element *mte;
139 methodtree = avl_create(&methodtree_comparator);
141 #if defined(ENABLE_JIT)
142 /* insert asm_vm_call_method */
144 mte = NEW(methodtree_element);
146 mte->startpc = (u1 *) (ptrint) asm_vm_call_method;
147 mte->endpc = (u1 *) (ptrint) asm_vm_call_method_end;
149 avl_insert(methodtree, mte);
150 #endif /* defined(ENABLE_JIT) */
157 /* codegen_setup ***************************************************************
159 Allocates and initialises code area, data area and references.
161 *******************************************************************************/
163 void codegen_setup(jitdata *jd)
168 /* get required compiler data */
173 /* initialize members */
177 cd->mcodebase = DMNEW(u1, MCODEINITSIZE);
178 cd->mcodeend = cd->mcodebase + MCODEINITSIZE;
179 cd->mcodesize = MCODEINITSIZE;
181 /* initialize mcode variables */
183 cd->mcodeptr = cd->mcodebase;
184 cd->lastmcodeptr = cd->mcodebase;
186 #if defined(ENABLE_INTRP)
187 /* native dynamic superinstructions variables */
190 cd->ncodebase = DMNEW(u1, NCODEINITSIZE);
191 cd->ncodesize = NCODEINITSIZE;
193 /* initialize ncode variables */
195 cd->ncodeptr = cd->ncodebase;
197 cd->lastinstwithoutdispatch = ~0; /* no inst without dispatch */
198 cd->superstarts = NULL;
205 cd->jumpreferences = NULL;
207 #if defined(__I386__) || defined(__X86_64__) || defined(__XDSPCORE__) || defined(__M68K__) || defined(ENABLE_INTRP)
208 cd->datareferences = NULL;
211 cd->brancheslabel = list_create_dump(OFFSET(branch_label_ref_t, linkage));
212 cd->listcritical = list_create_dump(OFFSET(critical_section_ref_t, linkage));
214 cd->linenumberreferences = NULL;
215 cd->linenumbertablesizepos = 0;
216 cd->linenumbertablestartpos = 0;
217 cd->linenumbertab = 0;
221 /* codegen_reset ***************************************************************
223 Resets the codegen data structure so we can recompile the method.
225 *******************************************************************************/
227 static void codegen_reset(jitdata *jd)
233 /* get required compiler data */
238 /* reset error flag */
240 cd->flags &= ~CODEGENDATA_FLAG_ERROR;
242 /* reset some members, we reuse the code memory already allocated
243 as this should have almost the correct size */
245 cd->mcodeptr = cd->mcodebase;
246 cd->lastmcodeptr = cd->mcodebase;
251 cd->jumpreferences = NULL;
253 #if defined(__I386__) || defined(__X86_64__) || defined(__XDSPCORE__) || defined(__M68K__) || defined(ENABLE_INTRP)
254 cd->datareferences = NULL;
257 cd->brancheslabel = list_create_dump(OFFSET(branch_label_ref_t, linkage));
258 cd->listcritical = list_create_dump(OFFSET(critical_section_ref_t, linkage));
260 cd->linenumberreferences = NULL;
261 cd->linenumbertablesizepos = 0;
262 cd->linenumbertablestartpos = 0;
263 cd->linenumbertab = 0;
265 /* We need to clear the mpc and the branch references from all
266 basic blocks as they will definitely change. */
268 for (bptr = jd->basicblocks; bptr != NULL; bptr = bptr->next) {
270 bptr->branchrefs = NULL;
273 /* We need to clear all the patcher references from the codeinfo
274 since they all will be regenerated */
276 patcher_list_reset(code);
278 #if defined(ENABLE_REPLACEMENT)
279 code->rplpoints = NULL;
280 code->rplpointcount = 0;
281 code->regalloc = NULL;
282 code->regalloccount = 0;
283 code->globalcount = 0;
288 /* codegen_generate ************************************************************
290 Generates the code for the currently compiled method.
292 *******************************************************************************/
294 bool codegen_generate(jitdata *jd)
298 /* get required compiler data */
302 /* call the machine-dependent code generation function */
304 if (!codegen_emit(jd))
307 /* check for an error */
309 if (CODEGENDATA_HAS_FLAG_ERROR(cd)) {
310 /* check for long-branches flag, if it is set we recompile the
315 log_message_method("Re-generating code: ", jd->m);
318 /* XXX maybe we should tag long-branches-methods for recompilation */
320 if (CODEGENDATA_HAS_FLAG_LONGBRANCHES(cd)) {
321 /* we have to reset the codegendata structure first */
325 /* and restart the compiler run */
327 if (!codegen_emit(jd))
331 vm_abort("codegen_generate: unknown error occurred during codegen_emit: flags=%x\n", cd->flags);
336 log_message_method("Re-generating code done: ", jd->m);
340 /* reallocate the memory and finish the code generation */
344 /* everything's ok */
350 /* codegen_close ***************************************************************
354 *******************************************************************************/
356 void codegen_close(void)
358 /* TODO: release avl tree on i386 and x86_64 */
362 /* codegen_increase ************************************************************
366 *******************************************************************************/
368 void codegen_increase(codegendata *cd)
372 /* save old mcodebase pointer */
374 oldmcodebase = cd->mcodebase;
376 /* reallocate to new, doubled memory */
378 cd->mcodebase = DMREALLOC(cd->mcodebase,
383 cd->mcodeend = cd->mcodebase + cd->mcodesize;
385 /* set new mcodeptr */
387 cd->mcodeptr = cd->mcodebase + (cd->mcodeptr - oldmcodebase);
389 #if defined(__I386__) || defined(__MIPS__) || defined(__X86_64__) || defined(__M68K__) || defined(ENABLE_INTRP) \
390 || defined(__SPARC_64__)
391 /* adjust the pointer to the last patcher position */
393 if (cd->lastmcodeptr != NULL)
394 cd->lastmcodeptr = cd->mcodebase + (cd->lastmcodeptr - oldmcodebase);
399 /* codegen_ncode_increase ******************************************************
403 *******************************************************************************/
405 #if defined(ENABLE_INTRP)
406 u1 *codegen_ncode_increase(codegendata *cd, u1 *ncodeptr)
410 /* save old ncodebase pointer */
412 oldncodebase = cd->ncodebase;
414 /* reallocate to new, doubled memory */
416 cd->ncodebase = DMREALLOC(cd->ncodebase,
422 /* return the new ncodeptr */
424 return (cd->ncodebase + (ncodeptr - oldncodebase));
429 /* codegen_add_branch_ref ******************************************************
431 Prepends an branch to the list.
433 *******************************************************************************/
435 void codegen_add_branch_ref(codegendata *cd, basicblock *target, s4 condition, s4 reg, u4 options)
440 STATISTICS(count_branches_unresolved++);
442 /* calculate the mpc of the branch instruction */
444 branchmpc = cd->mcodeptr - cd->mcodebase;
446 br = DNEW(branchref);
448 br->branchmpc = branchmpc;
449 br->condition = condition;
451 br->options = options;
452 br->next = target->branchrefs;
454 target->branchrefs = br;
458 /* codegen_resolve_branchrefs **************************************************
460 Resolves and patches the branch references of a given basic block.
462 *******************************************************************************/
464 void codegen_resolve_branchrefs(codegendata *cd, basicblock *bptr)
469 /* Save the mcodeptr because in the branch emitting functions
470 we generate code somewhere inside already generated code,
471 but we're still in the actual code generation phase. */
473 mcodeptr = cd->mcodeptr;
475 /* just to make sure */
477 assert(bptr->mpc >= 0);
479 for (br = bptr->branchrefs; br != NULL; br = br->next) {
480 /* temporary set the mcodeptr */
482 cd->mcodeptr = cd->mcodebase + br->branchmpc;
484 /* emit_bccz and emit_branch emit the correct code, even if we
485 pass condition == BRANCH_UNCONDITIONAL or reg == -1. */
487 emit_bccz(cd, bptr, br->condition, br->reg, br->options);
490 /* restore mcodeptr */
492 cd->mcodeptr = mcodeptr;
496 /* codegen_branch_label_add ****************************************************
498 Append an branch to the label-branch list.
500 *******************************************************************************/
502 void codegen_branch_label_add(codegendata *cd, s4 label, s4 condition, s4 reg, u4 options)
505 branch_label_ref_t *br;
508 /* get the label list */
510 list = cd->brancheslabel;
512 /* calculate the current mpc */
514 mpc = cd->mcodeptr - cd->mcodebase;
516 br = DNEW(branch_label_ref_t);
520 br->condition = condition;
522 br->options = options;
524 /* add the branch to the list */
526 list_add_last_unsynced(list, br);
530 /* codegen_critical_section_new ************************************************
532 Allocates a new critical-section reference and adds it to the
533 critical-section list.
535 *******************************************************************************/
537 #if defined(ENABLE_THREADS)
538 void codegen_critical_section_new(codegendata *cd)
541 critical_section_ref_t *csr;
544 /* get the critical section list */
546 list = cd->listcritical;
548 /* calculate the current mpc */
550 mpc = cd->mcodeptr - cd->mcodebase;
552 csr = DNEW(critical_section_ref_t);
554 /* We only can set restart right now, as start and end are set by
555 the following, corresponding functions. */
561 /* add the branch to the list */
563 list_add_last_unsynced(list, csr);
568 /* codegen_critical_section_start **********************************************
570 Set the start-point of the current critical section (which is the
571 last element of the list).
573 *******************************************************************************/
575 #if defined(ENABLE_THREADS)
576 void codegen_critical_section_start(codegendata *cd)
579 critical_section_ref_t *csr;
582 /* get the critical section list */
584 list = cd->listcritical;
586 /* calculate the current mpc */
588 mpc = cd->mcodeptr - cd->mcodebase;
590 /* get the current critical section */
592 csr = list_last_unsynced(list);
594 /* set the start point */
596 assert(csr->start == -1);
603 /* codegen_critical_section_end ************************************************
605 Set the end-point of the current critical section (which is the
606 last element of the list).
608 *******************************************************************************/
610 #if defined(ENABLE_THREADS)
611 void codegen_critical_section_end(codegendata *cd)
614 critical_section_ref_t *csr;
617 /* get the critical section list */
619 list = cd->listcritical;
621 /* calculate the current mpc */
623 mpc = cd->mcodeptr - cd->mcodebase;
625 /* get the current critical section */
627 csr = list_last_unsynced(list);
629 /* set the end point */
631 assert(csr->end == -1);
638 /* codegen_critical_section_finish *********************************************
640 Finish the critical sections, create the critical section nodes for
641 the AVL tree and insert them into the tree.
643 *******************************************************************************/
645 #if defined(ENABLE_THREADS)
646 static void codegen_critical_section_finish(jitdata *jd)
651 critical_section_ref_t *csr;
652 critical_section_node_t *csn;
654 /* get required compiler data */
659 /* get the critical section list */
661 list = cd->listcritical;
663 /* iterate over all critical sections */
665 for (csr = list_first_unsynced(list); csr != NULL;
666 csr = list_next_unsynced(list, csr)) {
667 /* check if all points are set */
669 assert(csr->start != -1);
670 assert(csr->end != -1);
671 assert(csr->restart != -1);
673 /* allocate tree node */
675 csn = NEW(critical_section_node_t);
677 csn->start = code->entrypoint + csr->start;
678 csn->end = code->entrypoint + csr->end;
679 csn->restart = code->entrypoint + csr->restart;
681 /* insert into the tree */
683 critical_section_register(csn);
689 /* methodtree_comparator *******************************************************
691 Comparator function used for the AVL tree of methods.
694 treenode....the node from the tree
695 node........the node to compare to the tree-node
697 *******************************************************************************/
699 static s4 methodtree_comparator(const void *treenode, const void *node)
701 methodtree_element *mte;
702 methodtree_element *mtepc;
704 mte = (methodtree_element *) treenode;
705 mtepc = (methodtree_element *) node;
707 /* compare both startpc and endpc of pc, even if they have the same value,
708 otherwise the avl_probe sometimes thinks the element is already in the
712 /* On S390 addresses are 31 bit. Compare only 31 bits of value.
714 # define ADDR_MASK(a) ((a) & 0x7FFFFFFF)
716 # define ADDR_MASK(a) (a)
719 if (ADDR_MASK((long) mte->startpc) <= ADDR_MASK((long) mtepc->startpc) &&
720 ADDR_MASK((long) mtepc->startpc) <= ADDR_MASK((long) mte->endpc) &&
721 ADDR_MASK((long) mte->startpc) <= ADDR_MASK((long) mtepc->endpc) &&
722 ADDR_MASK((long) mtepc->endpc) <= ADDR_MASK((long) mte->endpc)) {
725 } else if (ADDR_MASK((long) mtepc->startpc) < ADDR_MASK((long) mte->startpc)) {
736 /* codegen_insertmethod ********************************************************
738 Insert the machine code range of a method into the AVL tree of methods.
740 *******************************************************************************/
742 void codegen_insertmethod(u1 *startpc, u1 *endpc)
744 methodtree_element *mte;
746 /* allocate new method entry */
748 mte = NEW(methodtree_element);
750 mte->startpc = startpc;
753 /* this function does not return an error, but asserts for
756 avl_insert(methodtree, mte);
760 /* codegen_get_pv_from_pc ******************************************************
762 Find the PV for the given PC by searching in the AVL tree of
765 *******************************************************************************/
767 u1 *codegen_get_pv_from_pc(u1 *pc)
769 methodtree_element mtepc;
770 methodtree_element *mte;
772 /* allocation of the search structure on the stack is much faster */
777 mte = avl_find(methodtree, &mtepc);
780 /* No method was found. Let's dump a stacktrace. */
782 #if defined(ENABLE_VMLOG)
783 vmlog_cacao_signl("SIGSEGV");
786 log_println("We received a SIGSEGV and tried to handle it, but we were");
787 log_println("unable to find a Java method at:");
789 #if SIZEOF_VOID_P == 8
790 log_println("PC=0x%016lx", pc);
792 log_println("PC=0x%08x", pc);
796 log_println("Dumping the current stacktrace:");
798 #if defined(ENABLE_THREADS)
799 /* XXX michi: This should be available even without threads! */
800 threads_print_stacktrace();
803 vm_abort("Exiting...");
810 /* codegen_get_pv_from_pc_nocheck **********************************************
812 Find the PV for the given PC by searching in the AVL tree of
813 methods. This method does not check the return value and is used
816 *******************************************************************************/
818 u1 *codegen_get_pv_from_pc_nocheck(u1 *pc)
820 methodtree_element mtepc;
821 methodtree_element *mte;
823 /* allocation of the search structure on the stack is much faster */
828 mte = avl_find(methodtree, &mtepc);
837 /* codegen_set_replacement_point_notrap ****************************************
839 Record the position of a non-trappable replacement point.
841 *******************************************************************************/
843 #if defined(ENABLE_REPLACEMENT)
845 void codegen_set_replacement_point_notrap(codegendata *cd, s4 type)
847 void codegen_set_replacement_point_notrap(codegendata *cd)
850 assert(cd->replacementpoint);
851 assert(cd->replacementpoint->type == type);
852 assert(cd->replacementpoint->flags & RPLPOINT_FLAG_NOTRAP);
854 cd->replacementpoint->pc = (u1*) (ptrint) (cd->mcodeptr - cd->mcodebase);
856 cd->replacementpoint++;
858 #endif /* defined(ENABLE_REPLACEMENT) */
861 /* codegen_set_replacement_point ***********************************************
863 Record the position of a trappable replacement point.
865 *******************************************************************************/
867 #if defined(ENABLE_REPLACEMENT)
869 void codegen_set_replacement_point(codegendata *cd, s4 type)
871 void codegen_set_replacement_point(codegendata *cd)
874 assert(cd->replacementpoint);
875 assert(cd->replacementpoint->type == type);
876 assert(!(cd->replacementpoint->flags & RPLPOINT_FLAG_NOTRAP));
878 cd->replacementpoint->pc = (u1*) (ptrint) (cd->mcodeptr - cd->mcodebase);
880 cd->replacementpoint++;
883 /* XXX actually we should use an own REPLACEMENT_NOPS here! */
884 if (opt_TestReplacement)
888 /* XXX assert(cd->lastmcodeptr <= cd->mcodeptr); */
890 cd->lastmcodeptr = cd->mcodeptr + PATCHER_CALL_SIZE;
892 #endif /* defined(ENABLE_REPLACEMENT) */
895 /* codegen_finish **************************************************************
897 Finishes the code generation. A new memory, large enough for both
898 data and code, is allocated and data and code are copied together
899 to their final layout, unresolved jumps are resolved, ...
901 *******************************************************************************/
903 void codegen_finish(jitdata *jd)
908 #if defined(ENABLE_INTRP)
917 /* get required compiler data */
922 /* prevent compiler warning */
924 #if defined(ENABLE_INTRP)
928 /* calculate the code length */
930 mcodelen = (s4) (cd->mcodeptr - cd->mcodebase);
932 #if defined(ENABLE_STATISTICS)
934 count_code_len += mcodelen;
935 count_data_len += cd->dseglen;
939 alignedmcodelen = MEMORY_ALIGN(mcodelen, MAX_ALIGN);
941 #if defined(ENABLE_INTRP)
943 ncodelen = cd->ncodeptr - cd->ncodebase;
945 ncodelen = 0; /* avoid compiler warning */
949 cd->dseglen = MEMORY_ALIGN(cd->dseglen, MAX_ALIGN);
950 alignedlen = alignedmcodelen + cd->dseglen;
952 #if defined(ENABLE_INTRP)
954 alignedlen += ncodelen;
958 /* allocate new memory */
960 code->mcodelength = mcodelen + cd->dseglen;
961 code->mcode = CNEW(u1, alignedlen);
963 /* set the entrypoint of the method */
965 assert(code->entrypoint == NULL);
966 code->entrypoint = epoint = (code->mcode + cd->dseglen);
968 /* fill the data segment (code->entrypoint must already be set!) */
972 /* copy code to the new location */
974 MCOPY((void *) code->entrypoint, cd->mcodebase, u1, mcodelen);
976 #if defined(ENABLE_INTRP)
977 /* relocate native dynamic superinstruction code (if any) */
980 cd->mcodebase = code->entrypoint;
983 u1 *ncodebase = code->mcode + cd->dseglen + alignedmcodelen;
985 MCOPY((void *) ncodebase, cd->ncodebase, u1, ncodelen);
987 /* flush the instruction and data caches */
989 md_cacheflush(ncodebase, ncodelen);
991 /* set some cd variables for dynamic_super_rerwite */
993 cd->ncodebase = ncodebase;
996 cd->ncodebase = NULL;
999 dynamic_super_rewrite(cd);
1003 /* Create the exception table. */
1005 exceptiontable_create(jd);
1007 /* jump table resolving */
1009 for (jr = cd->jumpreferences; jr != NULL; jr = jr->next)
1010 *((functionptr *) ((ptrint) epoint + jr->tablepos)) =
1011 (functionptr) ((ptrint) epoint + (ptrint) jr->target->mpc);
1013 /* line number table resolving */
1019 for (lr = cd->linenumberreferences; lr != NULL; lr = lr->next) {
1021 target = lr->targetmpc;
1022 /* if the entry contains an mcode pointer (normal case), resolve it */
1023 /* (see doc/inlining_stacktrace.txt for details) */
1024 if (lr->linenumber >= -2) {
1025 target += (ptrint) epoint;
1027 *((functionptr *) ((ptrint) epoint + (ptrint) lr->tablepos)) =
1028 (functionptr) target;
1031 *((functionptr *) ((ptrint) epoint + cd->linenumbertablestartpos)) =
1032 (functionptr) ((ptrint) epoint + cd->linenumbertab);
1034 *((ptrint *) ((ptrint) epoint + cd->linenumbertablesizepos)) = lrtlen;
1037 /* patcher resolving */
1039 pr = list_first_unsynced(code->patchers);
1041 pr->mpc += (ptrint) epoint;
1042 pr->datap = (ptrint) (pr->disp + epoint);
1043 pr = list_next_unsynced(code->patchers, pr);
1046 #if defined(ENABLE_REPLACEMENT)
1047 /* replacement point resolving */
1052 rp = code->rplpoints;
1053 for (i=0; i<code->rplpointcount; ++i, ++rp) {
1054 rp->pc = (u1*) ((ptrint) epoint + (ptrint) rp->pc);
1057 #endif /* defined(ENABLE_REPLACEMENT) */
1059 /* add method into methodtree to find the entrypoint */
1061 codegen_insertmethod(code->entrypoint, code->entrypoint + mcodelen);
1063 #if defined(__I386__) || defined(__X86_64__) || defined(__XDSPCORE__) || defined(__M68K__) || defined(ENABLE_INTRP)
1064 /* resolve data segment references */
1066 dseg_resolve_datareferences(jd);
1069 #if defined(ENABLE_THREADS)
1070 /* create cirtical sections */
1072 codegen_critical_section_finish(jd);
1075 /* flush the instruction and data caches */
1077 md_cacheflush(code->mcode, code->mcodelength);
1081 /* codegen_generate_stub_compiler **********************************************
1083 Wrapper for codegen_emit_stub_compiler.
1086 pointer to the compiler stub code.
1088 *******************************************************************************/
1090 u1 *codegen_generate_stub_compiler(methodinfo *m)
1094 ptrint *d; /* pointer to data memory */
1095 u1 *c; /* pointer to code memory */
1098 /* mark dump memory */
1100 dumpsize = dump_size();
1102 /* allocate required data structures */
1107 jd->cd = DNEW(codegendata);
1110 /* get required compiler data */
1114 #if !defined(JIT_COMPILER_VIA_SIGNAL)
1115 /* allocate code memory */
1117 c = CNEW(u1, 3 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
1119 /* set pointers correctly */
1125 c = c + 3 * SIZEOF_VOID_P;
1128 /* NOTE: The codeinfo pointer is actually a pointer to the
1129 methodinfo (this fakes a codeinfo structure). */
1131 d[0] = (ptrint) asm_call_jit_compiler;
1133 d[2] = (ptrint) &d[1]; /* fake code->m */
1135 /* call the emit function */
1137 codegen_emit_stub_compiler(jd);
1139 #if defined(ENABLE_STATISTICS)
1141 count_cstub_len += 3 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE;
1146 md_cacheflush(cd->mcodebase, 3 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
1148 /* Allocate code memory. */
1150 c = CNEW(uint8_t, 2 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
1152 /* Set pointers correctly. */
1158 c = c + 2 * SIZEOF_VOID_P;
1161 /* NOTE: The codeinfo pointer is actually a pointer to the
1162 methodinfo (this fakes a codeinfo structure). */
1165 d[1] = (ptrint) &d[0]; /* fake code->m */
1167 /* Emit the trap instruction. */
1169 emit_trap_compiler(cd);
1171 #if defined(ENABLE_STATISTICS)
1173 count_cstub_len += 2 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE;
1178 md_cacheflush(cd->mcodebase, 2 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
1181 /* release dump memory */
1183 dump_release(dumpsize);
1185 /* return native stub code */
1191 /* codegen_generate_stub_builtin ***********************************************
1193 Wrapper for codegen_emit_stub_native.
1195 *******************************************************************************/
1197 void codegen_generate_stub_builtin(methodinfo *m, builtintable_entry *bte)
1204 /* mark dump memory */
1206 dumpsize = dump_size();
1208 /* Create JIT data structure. */
1210 jd = jit_jitdata_new(m);
1212 /* Get required compiler data. */
1216 /* setup code generation stuff */
1220 /* Set the number of native arguments we need to skip. */
1224 /* generate the code */
1226 #if defined(ENABLE_JIT)
1227 # if defined(ENABLE_INTRP)
1230 assert(bte->fp != NULL);
1231 codegen_emit_stub_native(jd, bte->md, bte->fp, skipparams);
1232 # if defined(ENABLE_INTRP)
1237 /* reallocate the memory and finish the code generation */
1241 /* set the stub entry point in the builtin table */
1243 bte->stub = code->entrypoint;
1245 #if defined(ENABLE_STATISTICS)
1247 size_stub_native += code->mcodelength;
1250 #if !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER)
1251 /* disassemble native stub */
1253 if (opt_DisassembleStubs) {
1254 codegen_disassemble_stub(m,
1255 (u1 *) (ptrint) code->entrypoint,
1256 (u1 *) (ptrint) code->entrypoint + (code->mcodelength - jd->cd->dseglen));
1258 /* show data segment */
1260 if (opt_showddatasegment)
1263 #endif /* !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER) */
1265 /* release memory */
1267 dump_release(dumpsize);
1271 /* codegen_generate_stub_native ************************************************
1273 Wrapper for codegen_emit_stub_native.
1276 the codeinfo representing the stub code.
1278 *******************************************************************************/
1280 codeinfo *codegen_generate_stub_native(methodinfo *m, functionptr f)
1289 /* mark dump memory */
1291 dumpsize = dump_size();
1293 /* Create JIT data structure. */
1295 jd = jit_jitdata_new(m);
1297 /* Get required compiler data. */
1301 /* set the flags for the current JIT run */
1303 #if defined(ENABLE_PROFILING)
1305 jd->flags |= JITDATA_FLAG_INSTRUMENT;
1308 if (opt_verbosecall)
1309 jd->flags |= JITDATA_FLAG_VERBOSECALL;
1311 /* setup code generation stuff */
1313 #if defined(ENABLE_JIT)
1314 # if defined(ENABLE_INTRP)
1322 /* create new method descriptor with additional native parameters */
1326 /* Set the number of native arguments we need to skip. */
1328 if (m->flags & ACC_STATIC)
1333 nmd = (methoddesc *) DMNEW(u1, sizeof(methoddesc) - sizeof(typedesc) +
1334 md->paramcount * sizeof(typedesc) +
1335 skipparams * sizeof(typedesc));
1337 nmd->paramcount = md->paramcount + skipparams;
1339 nmd->params = DMNEW(paramdesc, nmd->paramcount);
1341 nmd->paramtypes[0].type = TYPE_ADR; /* add environment pointer */
1343 if (m->flags & ACC_STATIC)
1344 nmd->paramtypes[1].type = TYPE_ADR; /* add class pointer */
1346 MCOPY(nmd->paramtypes + skipparams, md->paramtypes, typedesc,
1349 #if defined(ENABLE_JIT)
1350 # if defined(ENABLE_INTRP)
1353 /* pre-allocate the arguments for the native ABI */
1355 md_param_alloc_native(nmd);
1358 /* generate the code */
1360 #if defined(ENABLE_JIT)
1361 # if defined(ENABLE_INTRP)
1363 intrp_createnativestub(f, jd, nmd);
1366 codegen_emit_stub_native(jd, nmd, f, skipparams);
1368 intrp_createnativestub(f, jd, nmd);
1371 /* reallocate the memory and finish the code generation */
1375 #if defined(ENABLE_STATISTICS)
1376 /* must be done after codegen_finish() */
1379 size_stub_native += code->mcodelength;
1382 #if !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER)
1383 /* disassemble native stub */
1385 if (opt_DisassembleStubs) {
1386 # if defined(ENABLE_DEBUG_FILTER)
1387 if (m->filtermatches & SHOW_FILTER_FLAG_SHOW_METHOD)
1390 codegen_disassemble_stub(m,
1391 (u1 *) (ptrint) code->entrypoint,
1392 (u1 *) (ptrint) code->entrypoint + (code->mcodelength - jd->cd->dseglen));
1394 /* show data segment */
1396 if (opt_showddatasegment)
1400 #endif /* !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER) */
1402 /* release memory */
1404 dump_release(dumpsize);
1406 /* return native stub code */
1412 /* codegen_disassemble_nativestub **********************************************
1414 Disassembles the generated builtin or native stub.
1416 *******************************************************************************/
1418 #if defined(ENABLE_DISASSEMBLER)
1419 void codegen_disassemble_stub(methodinfo *m, u1 *start, u1 *end)
1421 printf("Stub code: ");
1422 if (m->class != NULL)
1423 utf_fprint_printable_ascii_classname(stdout, m->class->name);
1427 utf_fprint_printable_ascii(stdout, m->name);
1428 utf_fprint_printable_ascii(stdout, m->descriptor);
1429 printf("\nLength: %d\n\n", (s4) (end - start));
1431 DISASSEMBLE(start, end);
1436 /* codegen_start_native_call ***************************************************
1438 Prepares the stuff required for a native (JNI) function call:
1440 - adds a stackframe info structure to the chain, for stacktraces
1441 - prepares the local references table on the stack
1443 The layout of the native stub stackframe should look like this:
1445 +---------------------------+ <- java SP (of parent Java function)
1447 +---------------------------+ <- data SP
1449 | stackframe info structure |
1451 +---------------------------+
1453 | local references table |
1455 +---------------------------+
1457 | saved registers (if any) |
1459 +---------------------------+
1461 | arguments (if any) |
1463 +---------------------------+ <- current SP (native stub)
1465 *******************************************************************************/
1467 java_handle_t *codegen_start_native_call(u1 *sp, u1 *pv)
1469 stackframeinfo_t *sfi;
1470 localref_table *lrt;
1477 uint64_t *arg_stack;
1479 STATISTICS(count_calls_java_to_native++);
1481 /* Get the methodinfo. */
1483 m = code_get_methodinfo_for_pv(pv);
1487 framesize = *((int32_t *) (pv + FrameSize));
1489 assert(framesize >= sizeof(stackframeinfo_t) + sizeof(localref_table));
1491 /* calculate needed values */
1493 #if defined(__ALPHA__) || defined(__ARM__)
1494 datasp = sp + framesize - SIZEOF_VOID_P;
1495 javasp = sp + framesize;
1496 arg_regs = (uint64_t *) sp;
1497 arg_stack = (uint64_t *) javasp;
1498 #elif defined(__MIPS__)
1499 /* MIPS always uses 8 bytes to store the RA */
1500 datasp = sp + framesize - 8;
1501 javasp = sp + framesize;
1502 #elif defined(__S390__)
1503 datasp = sp + framesize - 8;
1504 javasp = sp + framesize;
1505 arg_regs = (uint64_t *) (sp + 96);
1506 arg_stack = (uint64_t *) javasp;
1507 #elif defined(__I386__) || defined(__M68K__) || defined(__X86_64__)
1508 datasp = sp + framesize;
1509 javasp = sp + framesize + SIZEOF_VOID_P;
1510 arg_regs = (uint64_t *) sp;
1511 arg_stack = (uint64_t *) javasp;
1512 #elif defined(__POWERPC__)
1513 datasp = sp + framesize;
1514 javasp = sp + framesize;
1515 arg_regs = (uint64_t *) (sp + LA_SIZE + 4 * SIZEOF_VOID_P);
1516 arg_stack = (uint64_t *) javasp;
1517 #elif defined(__POWERPC64__)
1518 datasp = sp + framesize;
1519 javasp = sp + framesize;
1520 arg_regs = (uint64_t *) (sp + PA_SIZE + LA_SIZE + 4 * SIZEOF_VOID_P);
1521 arg_stack = (uint64_t *) javasp;
1523 /* XXX is was unable to do this port for SPARC64, sorry. (-michi) */
1524 /* XXX maybe we need to pass the RA as argument there */
1525 vm_abort("codegen_start_native_call: unsupported architecture");
1528 /* get data structures from stack */
1530 sfi = (stackframeinfo_t *) (datasp - sizeof(stackframeinfo_t));
1531 lrt = (localref_table *) (datasp - sizeof(stackframeinfo_t) -
1532 sizeof(localref_table));
1534 #if defined(ENABLE_JNI)
1535 /* add current JNI local references table to this thread */
1537 localref_table_add(lrt);
1540 #if !defined(NDEBUG)
1541 # if defined(__ALPHA__) || defined(__POWERPC__) || defined(__POWERPC64__) || defined(__X86_64__) || defined(__S390__)
1542 /* print the call-trace if necesarry */
1543 /* BEFORE: filling the local reference table */
1545 if (opt_TraceJavaCalls)
1546 trace_java_call_enter(m, arg_regs, arg_stack);
1550 #if defined(ENABLE_HANDLES)
1551 /* place all references into the local reference table */
1552 /* BEFORE: creating stackframeinfo */
1554 localref_native_enter(m, arg_regs, arg_stack);
1557 /* Add a stackframeinfo for this native method. We don't have RA
1558 and XPC here. These are determined in
1559 stacktrace_stackframeinfo_add. */
1561 stacktrace_stackframeinfo_add(sfi, pv, sp, NULL, NULL);
1563 /* Return a wrapped classinfo for static methods. */
1565 if (m->flags & ACC_STATIC)
1566 return LLNI_classinfo_wrap(m->class);
1572 /* codegen_finish_native_call **************************************************
1574 Removes the stuff required for a native (JNI) function call.
1575 Additionally it checks for an exceptions and in case, get the
1576 exception object and clear the pointer.
1578 *******************************************************************************/
1580 java_object_t *codegen_finish_native_call(u1 *sp, u1 *pv)
1582 stackframeinfo_t *sfi;
1592 /* get information from method header */
1594 code = *((codeinfo **) (pv + CodeinfoPointer));
1595 framesize = *((int32_t *) (pv + FrameSize));
1598 /* get the methodinfo */
1603 /* calculate needed values */
1605 #if defined(__ALPHA__) || defined(__ARM__)
1606 datasp = sp + framesize - SIZEOF_VOID_P;
1607 ret_regs = (uint64_t *) sp;
1608 #elif defined(__MIPS__)
1609 /* MIPS always uses 8 bytes to store the RA */
1610 datasp = sp + framesize - 8;
1611 #elif defined(__S390__)
1612 datasp = sp + framesize - 8;
1613 ret_regs = (uint64_t *) (sp + 96);
1614 #elif defined(__I386__)
1615 datasp = sp + framesize;
1616 ret_regs = (uint64_t *) (sp + 2 * SIZEOF_VOID_P);
1617 #elif defined(__M68K__) || defined(__X86_64__)
1618 datasp = sp + framesize;
1619 ret_regs = (uint64_t *) sp;
1620 #elif defined(__POWERPC__)
1621 datasp = sp + framesize;
1622 ret_regs = (uint64_t *) (sp + LA_SIZE + 2 * SIZEOF_VOID_P);
1623 #elif defined(__POWERPC64__)
1624 datasp = sp + framesize;
1625 ret_regs = (uint64_t *) (sp + PA_SIZE + LA_SIZE + 2 * SIZEOF_VOID_P);
1627 vm_abort("codegen_finish_native_call: unsupported architecture");
1630 /* get data structures from stack */
1632 sfi = (stackframeinfo_t *) (datasp - sizeof(stackframeinfo_t));
1634 /* Remove current stackframeinfo from chain. */
1636 stacktrace_stackframeinfo_remove(sfi);
1638 #if defined(ENABLE_HANDLES)
1639 /* unwrap the return value from the local reference table */
1640 /* AFTER: removing the stackframeinfo */
1641 /* BEFORE: releasing the local reference table */
1643 localref_native_exit(m, ret_regs);
1646 /* get and unwrap the exception */
1647 /* AFTER: removing the stackframe info */
1648 /* BEFORE: releasing the local reference table */
1650 e = exceptions_get_and_clear_exception();
1653 #if defined(ENABLE_JNI)
1654 /* release JNI local references table for this thread */
1656 localref_frame_pop_all();
1657 localref_table_remove();
1660 #if !defined(NDEBUG)
1661 # if defined(__ALPHA__) || defined(__POWERPC__) || defined(__POWERPC64__) || defined(__X86_64__) || defined(__S390__)
1662 /* print the call-trace if necesarry */
1663 /* AFTER: unwrapping the return value */
1665 if (opt_TraceJavaCalls)
1666 trace_java_call_exit(m, ret_regs);
1674 /* removecompilerstub **********************************************************
1676 Deletes a compilerstub from memory (simply by freeing it).
1678 *******************************************************************************/
1680 void removecompilerstub(u1 *stub)
1682 /* pass size 1 to keep the intern function happy */
1684 CFREE((void *) stub, 1);
1688 /* removenativestub ************************************************************
1690 Removes a previously created native-stub from memory.
1692 *******************************************************************************/
1694 void removenativestub(u1 *stub)
1696 /* pass size 1 to keep the intern function happy */
1698 CFREE((void *) stub, 1);
1702 /* codegen_reg_of_var **********************************************************
1704 This function determines a register, to which the result of an
1705 operation should go, when it is ultimatively intended to store the
1706 result in pseudoregister v. If v is assigned to an actual
1707 register, this register will be returned. Otherwise (when v is
1708 spilled) this function returns tempregnum. If not already done,
1709 regoff and flags are set in the stack location.
1711 *******************************************************************************/
1713 s4 codegen_reg_of_var(u2 opcode, varinfo *v, s4 tempregnum)
1717 /* Do we have to generate a conditional move? Yes, then always
1718 return the temporary register. The real register is identified
1719 during the store. */
1721 if (opcode & ICMD_CONDITION_MASK)
1725 if (!(v->flags & INMEMORY))
1726 return v->vv.regoff;
1732 /* codegen_reg_of_dst **********************************************************
1734 This function determines a register, to which the result of an
1735 operation should go, when it is ultimatively intended to store the
1736 result in iptr->dst.var. If dst.var is assigned to an actual
1737 register, this register will be returned. Otherwise (when it is
1738 spilled) this function returns tempregnum. If not already done,
1739 regoff and flags are set in the stack location.
1741 *******************************************************************************/
1743 s4 codegen_reg_of_dst(jitdata *jd, instruction *iptr, s4 tempregnum)
1745 return codegen_reg_of_var(iptr->opc, VAROP(iptr->dst), tempregnum);
1749 /* codegen_emit_phi_moves ****************************************************
1751 Emits phi moves at the end of the basicblock.
1753 *******************************************************************************/
1755 #if defined(ENABLE_SSA)
1756 void codegen_emit_phi_moves(jitdata *jd, basicblock *bptr)
1769 /* Moves from phi functions with highest indices have to be */
1770 /* inserted first, since this is the order as is used for */
1771 /* conflict resolution */
1773 for(i = ls->num_phi_moves[bptr->nr] - 1; i >= 0 ; i--) {
1774 lt_d = ls->phi_moves[bptr->nr][i][0];
1775 lt_s = ls->phi_moves[bptr->nr][i][1];
1776 #if defined(SSA_DEBUG_VERBOSE)
1778 printf("BB %3i Move %3i <- %3i ", bptr->nr, lt_d, lt_s);
1780 if (lt_s == UNUSED) {
1781 #if defined(SSA_DEBUG_VERBOSE)
1783 printf(" ... not processed \n");
1788 d = VAR(ls->lifetime[lt_d].v_index);
1789 s = VAR(ls->lifetime[lt_s].v_index);
1792 if (d->type == -1) {
1793 #if defined(SSA_DEBUG_VERBOSE)
1795 printf("...returning - phi lifetimes where joined\n");
1800 if (s->type == -1) {
1801 #if defined(SSA_DEBUG_VERBOSE)
1803 printf("...returning - phi lifetimes where joined\n");
1809 tmp_i.s1.varindex = ls->lifetime[lt_s].v_index;
1810 tmp_i.dst.varindex = ls->lifetime[lt_d].v_index;
1811 emit_copy(jd, &tmp_i);
1813 #if defined(SSA_DEBUG_VERBOSE)
1814 if (compileverbose) {
1815 if (IS_INMEMORY(d->flags) && IS_INMEMORY(s->flags)) {
1817 printf("M%3i <- M%3i",d->vv.regoff,s->vv.regoff);
1819 else if (IS_INMEMORY(s->flags)) {
1821 printf("R%3i <- M%3i",d->vv.regoff,s->vv.regoff);
1823 else if (IS_INMEMORY(d->flags)) {
1825 printf("M%3i <- R%3i",d->vv.regoff,s->vv.regoff);
1829 printf("R%3i <- R%3i",d->vv.regoff,s->vv.regoff);
1833 #endif /* defined(SSA_DEBUG_VERBOSE) */
1836 #endif /* defined(ENABLE_SSA) */
1841 * These are local overrides for various environment variables in Emacs.
1842 * Please do not remove this and leave it at the end of the file, where
1843 * Emacs will automagically detect them.
1844 * ---------------------------------------------------------------------
1847 * indent-tabs-mode: t
1851 * vim:noexpandtab:sw=4:ts=4: