1 /* src/vm/jit/codegen-common.c - architecture independent code generator stuff
3 Copyright (C) 1996-2005, 2006, 2007 R. Grafl, A. Krall, C. Kruegel,
4 C. Oates, R. Obermaisser, M. Platter, M. Probst, S. Ring,
5 E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich,
6 J. Wenninger, Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
25 All functions assume the following code area / data area layout:
29 | code area | code area grows to higher addresses
31 +-----------+ <-- start of procedure
33 | data area | data area grows to lower addresses
37 The functions first write into a temporary code/data area allocated by
38 "codegen_init". "codegen_finish" copies the code and data area into permanent
39 memory. All functions writing values into the data area return the offset
40 relative the begin of the code area (start of procedure).
52 #if defined(ENABLE_JIT)
53 /* this is required PATCHER_CALL_SIZE */
59 #include "mm/memory.h"
61 #include "toolbox/avl.h"
62 #include "toolbox/list.h"
63 #include "toolbox/logging.h"
65 #include "native/jni.h"
66 #include "native/llni.h"
67 #include "native/localref.h"
68 #include "native/native.h"
70 #if defined(WITH_CLASSPATH_SUN)
71 # include "native/include/java_lang_Object.h"
72 # include "native/include/java_lang_String.h"
73 # include "native/include/java_nio_ByteBuffer.h" /* required by j.l.CL */
74 # include "native/include/java_lang_ClassLoader.h"
77 #include "native/include/java_lang_Class.h"
79 #include "threads/threads-common.h"
81 #include "vm/builtin.h"
82 #include "vm/exceptions.h"
83 #include "vm/stringlocal.h"
85 #include "vm/jit/abi.h"
86 #include "vm/jit/asmpart.h"
87 #include "vm/jit/codegen-common.h"
89 #if defined(ENABLE_DISASSEMBLER)
90 # include "vm/jit/disass.h"
93 #include "vm/jit/dseg.h"
94 #include "vm/jit/emit-common.h"
95 #include "vm/jit/jit.h"
96 #include "vm/jit/md.h"
97 #include "vm/jit/methodheader.h"
98 #include "vm/jit/patcher-common.h"
99 #include "vm/jit/replace.h"
100 #if defined(ENABLE_SSA)
101 # include "vm/jit/optimizing/lsra.h"
102 # include "vm/jit/optimizing/ssa.h"
104 #include "vm/jit/stacktrace.h"
105 #include "vm/jit/trace.h"
107 #if defined(ENABLE_INTRP)
108 #include "vm/jit/intrp/intrp.h"
111 #include "vmcore/method.h"
112 #include "vmcore/options.h"
114 # include "vmcore/statistics.h"
116 #if defined(ENABLE_VMLOG)
117 #include <vmlog_cacao.h>
122 /* in this tree we store all method addresses *********************************/
124 static avl_tree_t *methodtree = NULL;
125 static s4 methodtree_comparator(const void *treenode, const void *node);
128 /* codegen_init ****************************************************************
132 *******************************************************************************/
134 void codegen_init(void)
136 /* this tree is global, not method specific */
139 #if defined(ENABLE_JIT)
140 methodtree_element *mte;
143 methodtree = avl_create(&methodtree_comparator);
145 #if defined(ENABLE_JIT)
146 /* insert asm_vm_call_method */
148 mte = NEW(methodtree_element);
150 mte->startpc = (u1 *) (ptrint) asm_vm_call_method;
151 mte->endpc = (u1 *) (ptrint) asm_vm_call_method_end;
153 avl_insert(methodtree, mte);
154 #endif /* defined(ENABLE_JIT) */
161 /* codegen_setup ***************************************************************
163 Allocates and initialises code area, data area and references.
165 *******************************************************************************/
167 void codegen_setup(jitdata *jd)
172 /* get required compiler data */
177 /* initialize members */
181 cd->mcodebase = DMNEW(u1, MCODEINITSIZE);
182 cd->mcodeend = cd->mcodebase + MCODEINITSIZE;
183 cd->mcodesize = MCODEINITSIZE;
185 /* initialize mcode variables */
187 cd->mcodeptr = cd->mcodebase;
188 cd->lastmcodeptr = cd->mcodebase;
190 #if defined(ENABLE_INTRP)
191 /* native dynamic superinstructions variables */
194 cd->ncodebase = DMNEW(u1, NCODEINITSIZE);
195 cd->ncodesize = NCODEINITSIZE;
197 /* initialize ncode variables */
199 cd->ncodeptr = cd->ncodebase;
201 cd->lastinstwithoutdispatch = ~0; /* no inst without dispatch */
202 cd->superstarts = NULL;
209 cd->jumpreferences = NULL;
211 #if defined(__I386__) || defined(__X86_64__) || defined(__XDSPCORE__) || defined(__M68K__) || defined(ENABLE_INTRP)
212 cd->datareferences = NULL;
215 /* cd->patchrefs = list_create_dump(OFFSET(patchref, linkage)); */
216 cd->patchrefs = NULL;
217 cd->brancheslabel = list_create_dump(OFFSET(branch_label_ref_t, linkage));
218 cd->listcritical = list_create_dump(OFFSET(critical_section_ref_t, linkage));
220 cd->linenumberreferences = NULL;
221 cd->linenumbertablesizepos = 0;
222 cd->linenumbertablestartpos = 0;
223 cd->linenumbertab = 0;
227 /* codegen_reset ***************************************************************
229 Resets the codegen data structure so we can recompile the method.
231 *******************************************************************************/
233 static void codegen_reset(jitdata *jd)
239 /* get required compiler data */
244 /* reset error flag */
246 cd->flags &= ~CODEGENDATA_FLAG_ERROR;
248 /* reset some members, we reuse the code memory already allocated
249 as this should have almost the correct size */
251 cd->mcodeptr = cd->mcodebase;
252 cd->lastmcodeptr = cd->mcodebase;
257 cd->jumpreferences = NULL;
259 #if defined(__I386__) || defined(__X86_64__) || defined(__XDSPCORE__) || defined(__M68K__) || defined(ENABLE_INTRP)
260 cd->datareferences = NULL;
263 /* cd->patchrefs = list_create_dump(OFFSET(patchref, linkage)); */
264 cd->patchrefs = NULL;
265 cd->brancheslabel = list_create_dump(OFFSET(branch_label_ref_t, linkage));
266 cd->listcritical = list_create_dump(OFFSET(critical_section_ref_t, linkage));
268 cd->linenumberreferences = NULL;
269 cd->linenumbertablesizepos = 0;
270 cd->linenumbertablestartpos = 0;
271 cd->linenumbertab = 0;
273 /* We need to clear the mpc and the branch references from all
274 basic blocks as they will definitely change. */
276 for (bptr = jd->basicblocks; bptr != NULL; bptr = bptr->next) {
278 bptr->branchrefs = NULL;
281 /* We need to clear all the patcher references from the codeinfo
282 since they all will be regenerated */
284 patcher_list_reset(code);
286 #if defined(ENABLE_REPLACEMENT)
287 code->rplpoints = NULL;
288 code->rplpointcount = 0;
289 code->regalloc = NULL;
290 code->regalloccount = 0;
291 code->globalcount = 0;
296 /* codegen_generate ************************************************************
298 Generates the code for the currently compiled method.
300 *******************************************************************************/
302 bool codegen_generate(jitdata *jd)
306 /* get required compiler data */
310 /* call the machine-dependent code generation function */
312 if (!codegen_emit(jd))
315 /* check for an error */
317 if (CODEGENDATA_HAS_FLAG_ERROR(cd)) {
318 /* check for long-branches flag, if it is set we recompile the
323 log_message_method("Re-generating code: ", jd->m);
326 /* XXX maybe we should tag long-branches-methods for recompilation */
328 if (CODEGENDATA_HAS_FLAG_LONGBRANCHES(cd)) {
329 /* we have to reset the codegendata structure first */
333 /* and restart the compiler run */
335 if (!codegen_emit(jd))
339 vm_abort("codegen_generate: unknown error occurred during codegen_emit: flags=%x\n", cd->flags);
344 log_message_method("Re-generating code done: ", jd->m);
348 /* reallocate the memory and finish the code generation */
352 /* everything's ok */
358 /* codegen_close ***************************************************************
362 *******************************************************************************/
364 void codegen_close(void)
366 /* TODO: release avl tree on i386 and x86_64 */
370 /* codegen_increase ************************************************************
374 *******************************************************************************/
376 void codegen_increase(codegendata *cd)
380 /* save old mcodebase pointer */
382 oldmcodebase = cd->mcodebase;
384 /* reallocate to new, doubled memory */
386 cd->mcodebase = DMREALLOC(cd->mcodebase,
391 cd->mcodeend = cd->mcodebase + cd->mcodesize;
393 /* set new mcodeptr */
395 cd->mcodeptr = cd->mcodebase + (cd->mcodeptr - oldmcodebase);
397 #if defined(__I386__) || defined(__MIPS__) || defined(__X86_64__) || defined(__M68K__) || defined(ENABLE_INTRP) \
398 || defined(__SPARC_64__)
399 /* adjust the pointer to the last patcher position */
401 if (cd->lastmcodeptr != NULL)
402 cd->lastmcodeptr = cd->mcodebase + (cd->lastmcodeptr - oldmcodebase);
407 /* codegen_ncode_increase ******************************************************
411 *******************************************************************************/
413 #if defined(ENABLE_INTRP)
414 u1 *codegen_ncode_increase(codegendata *cd, u1 *ncodeptr)
418 /* save old ncodebase pointer */
420 oldncodebase = cd->ncodebase;
422 /* reallocate to new, doubled memory */
424 cd->ncodebase = DMREALLOC(cd->ncodebase,
430 /* return the new ncodeptr */
432 return (cd->ncodebase + (ncodeptr - oldncodebase));
437 /* codegen_add_branch_ref ******************************************************
439 Prepends an branch to the list.
441 *******************************************************************************/
443 void codegen_add_branch_ref(codegendata *cd, basicblock *target, s4 condition, s4 reg, u4 options)
448 STATISTICS(count_branches_unresolved++);
450 /* calculate the mpc of the branch instruction */
452 branchmpc = cd->mcodeptr - cd->mcodebase;
454 br = DNEW(branchref);
456 br->branchmpc = branchmpc;
457 br->condition = condition;
459 br->options = options;
460 br->next = target->branchrefs;
462 target->branchrefs = br;
466 /* codegen_resolve_branchrefs **************************************************
468 Resolves and patches the branch references of a given basic block.
470 *******************************************************************************/
472 void codegen_resolve_branchrefs(codegendata *cd, basicblock *bptr)
477 /* Save the mcodeptr because in the branch emitting functions
478 we generate code somewhere inside already generated code,
479 but we're still in the actual code generation phase. */
481 mcodeptr = cd->mcodeptr;
483 /* just to make sure */
485 assert(bptr->mpc >= 0);
487 for (br = bptr->branchrefs; br != NULL; br = br->next) {
488 /* temporary set the mcodeptr */
490 cd->mcodeptr = cd->mcodebase + br->branchmpc;
492 /* emit_bccz and emit_branch emit the correct code, even if we
493 pass condition == BRANCH_UNCONDITIONAL or reg == -1. */
495 emit_bccz(cd, bptr, br->condition, br->reg, br->options);
498 /* restore mcodeptr */
500 cd->mcodeptr = mcodeptr;
504 /* codegen_branch_label_add ****************************************************
506 Append an branch to the label-branch list.
508 *******************************************************************************/
510 void codegen_branch_label_add(codegendata *cd, s4 label, s4 condition, s4 reg, u4 options)
513 branch_label_ref_t *br;
516 /* get the label list */
518 list = cd->brancheslabel;
520 /* calculate the current mpc */
522 mpc = cd->mcodeptr - cd->mcodebase;
524 br = DNEW(branch_label_ref_t);
528 br->condition = condition;
530 br->options = options;
532 /* add the branch to the list */
534 list_add_last_unsynced(list, br);
538 /* codegen_add_patch_ref *******************************************************
540 Appends a new patcher reference to the list of patching positions.
542 *******************************************************************************/
544 void codegen_add_patch_ref(codegendata *cd, functionptr patcher, voidptr ref,
550 branchmpc = cd->mcodeptr - cd->mcodebase;
554 pr->branchpos = branchmpc;
556 pr->patcher = patcher;
559 /* list_add_first(cd->patchrefs, pr); */
560 pr->next = cd->patchrefs;
563 /* Generate NOPs for opt_shownops. */
568 #if defined(ENABLE_JIT) && (defined(__I386__) || defined(__M68K__) || defined(__MIPS__) \
569 || defined(__SPARC_64__) || defined(__X86_64__))
571 /* On some architectures the patcher stub call instruction might
572 be longer than the actual instruction generated. On this
573 architectures we store the last patcher call position and after
574 the basic block code generation is completed, we check the
575 range and maybe generate some nop's. */
576 /* The nops are generated in codegen_emit in each codegen */
578 cd->lastmcodeptr = cd->mcodeptr + PATCHER_CALL_SIZE;
583 /* codegen_critical_section_new ************************************************
585 Allocates a new critical-section reference and adds it to the
586 critical-section list.
588 *******************************************************************************/
590 #if defined(ENABLE_THREADS)
591 void codegen_critical_section_new(codegendata *cd)
594 critical_section_ref_t *csr;
597 /* get the critical section list */
599 list = cd->listcritical;
601 /* calculate the current mpc */
603 mpc = cd->mcodeptr - cd->mcodebase;
605 csr = DNEW(critical_section_ref_t);
607 /* We only can set restart right now, as start and end are set by
608 the following, corresponding functions. */
614 /* add the branch to the list */
616 list_add_last_unsynced(list, csr);
621 /* codegen_critical_section_start **********************************************
623 Set the start-point of the current critical section (which is the
624 last element of the list).
626 *******************************************************************************/
628 #if defined(ENABLE_THREADS)
629 void codegen_critical_section_start(codegendata *cd)
632 critical_section_ref_t *csr;
635 /* get the critical section list */
637 list = cd->listcritical;
639 /* calculate the current mpc */
641 mpc = cd->mcodeptr - cd->mcodebase;
643 /* get the current critical section */
645 csr = list_last_unsynced(list);
647 /* set the start point */
649 assert(csr->start == -1);
656 /* codegen_critical_section_end ************************************************
658 Set the end-point of the current critical section (which is the
659 last element of the list).
661 *******************************************************************************/
663 #if defined(ENABLE_THREADS)
664 void codegen_critical_section_end(codegendata *cd)
667 critical_section_ref_t *csr;
670 /* get the critical section list */
672 list = cd->listcritical;
674 /* calculate the current mpc */
676 mpc = cd->mcodeptr - cd->mcodebase;
678 /* get the current critical section */
680 csr = list_last_unsynced(list);
682 /* set the end point */
684 assert(csr->end == -1);
691 /* codegen_critical_section_finish *********************************************
693 Finish the critical sections, create the critical section nodes for
694 the AVL tree and insert them into the tree.
696 *******************************************************************************/
698 #if defined(ENABLE_THREADS)
699 static void codegen_critical_section_finish(jitdata *jd)
704 critical_section_ref_t *csr;
705 critical_section_node_t *csn;
707 /* get required compiler data */
712 /* get the critical section list */
714 list = cd->listcritical;
716 /* iterate over all critical sections */
718 for (csr = list_first_unsynced(list); csr != NULL;
719 csr = list_next_unsynced(list, csr)) {
720 /* check if all points are set */
722 assert(csr->start != -1);
723 assert(csr->end != -1);
724 assert(csr->restart != -1);
726 /* allocate tree node */
728 csn = NEW(critical_section_node_t);
730 csn->start = code->entrypoint + csr->start;
731 csn->end = code->entrypoint + csr->end;
732 csn->restart = code->entrypoint + csr->restart;
734 /* insert into the tree */
736 critical_section_register(csn);
742 /* methodtree_comparator *******************************************************
744 Comparator function used for the AVL tree of methods.
747 treenode....the node from the tree
748 node........the node to compare to the tree-node
750 *******************************************************************************/
752 static s4 methodtree_comparator(const void *treenode, const void *node)
754 methodtree_element *mte;
755 methodtree_element *mtepc;
757 mte = (methodtree_element *) treenode;
758 mtepc = (methodtree_element *) node;
760 /* compare both startpc and endpc of pc, even if they have the same value,
761 otherwise the avl_probe sometimes thinks the element is already in the
765 /* On S390 addresses are 31 bit. Compare only 31 bits of value.
767 # define ADDR_MASK(a) ((a) & 0x7FFFFFFF)
769 # define ADDR_MASK(a) (a)
772 if (ADDR_MASK((long) mte->startpc) <= ADDR_MASK((long) mtepc->startpc) &&
773 ADDR_MASK((long) mtepc->startpc) <= ADDR_MASK((long) mte->endpc) &&
774 ADDR_MASK((long) mte->startpc) <= ADDR_MASK((long) mtepc->endpc) &&
775 ADDR_MASK((long) mtepc->endpc) <= ADDR_MASK((long) mte->endpc)) {
778 } else if (ADDR_MASK((long) mtepc->startpc) < ADDR_MASK((long) mte->startpc)) {
789 /* codegen_insertmethod ********************************************************
791 Insert the machine code range of a method into the AVL tree of methods.
793 *******************************************************************************/
795 void codegen_insertmethod(u1 *startpc, u1 *endpc)
797 methodtree_element *mte;
799 /* allocate new method entry */
801 mte = NEW(methodtree_element);
803 mte->startpc = startpc;
806 /* this function does not return an error, but asserts for
809 avl_insert(methodtree, mte);
813 /* codegen_get_pv_from_pc ******************************************************
815 Find the PV for the given PC by searching in the AVL tree of
818 *******************************************************************************/
820 u1 *codegen_get_pv_from_pc(u1 *pc)
822 methodtree_element mtepc;
823 methodtree_element *mte;
825 /* allocation of the search structure on the stack is much faster */
830 mte = avl_find(methodtree, &mtepc);
833 /* No method was found. Let's dump a stacktrace. */
835 #if defined(ENABLE_VMLOG)
836 vmlog_cacao_signl("SIGSEGV");
839 log_println("We received a SIGSEGV and tried to handle it, but we were");
840 log_println("unable to find a Java method at:");
842 #if SIZEOF_VOID_P == 8
843 log_println("PC=0x%016lx", pc);
845 log_println("PC=0x%08x", pc);
849 log_println("Dumping the current stacktrace:");
851 #if defined(ENABLE_THREADS)
852 /* XXX michi: This should be available even without threads! */
853 threads_print_stacktrace();
856 vm_abort("Exiting...");
863 /* codegen_get_pv_from_pc_nocheck **********************************************
865 Find the PV for the given PC by searching in the AVL tree of
866 methods. This method does not check the return value and is used
869 *******************************************************************************/
871 u1 *codegen_get_pv_from_pc_nocheck(u1 *pc)
873 methodtree_element mtepc;
874 methodtree_element *mte;
876 /* allocation of the search structure on the stack is much faster */
881 mte = avl_find(methodtree, &mtepc);
890 /* codegen_set_replacement_point_notrap ****************************************
892 Record the position of a non-trappable replacement point.
894 *******************************************************************************/
896 #if defined(ENABLE_REPLACEMENT)
898 void codegen_set_replacement_point_notrap(codegendata *cd, s4 type)
900 void codegen_set_replacement_point_notrap(codegendata *cd)
903 assert(cd->replacementpoint);
904 assert(cd->replacementpoint->type == type);
905 assert(cd->replacementpoint->flags & RPLPOINT_FLAG_NOTRAP);
907 cd->replacementpoint->pc = (u1*) (ptrint) (cd->mcodeptr - cd->mcodebase);
909 cd->replacementpoint++;
911 #endif /* defined(ENABLE_REPLACEMENT) */
914 /* codegen_set_replacement_point ***********************************************
916 Record the position of a trappable replacement point.
918 *******************************************************************************/
920 #if defined(ENABLE_REPLACEMENT)
922 void codegen_set_replacement_point(codegendata *cd, s4 type)
924 void codegen_set_replacement_point(codegendata *cd)
927 assert(cd->replacementpoint);
928 assert(cd->replacementpoint->type == type);
929 assert(!(cd->replacementpoint->flags & RPLPOINT_FLAG_NOTRAP));
931 cd->replacementpoint->pc = (u1*) (ptrint) (cd->mcodeptr - cd->mcodebase);
933 cd->replacementpoint++;
935 /* XXX assert(cd->lastmcodeptr <= cd->mcodeptr); */
937 cd->lastmcodeptr = cd->mcodeptr + PATCHER_CALL_SIZE;
939 #endif /* defined(ENABLE_REPLACEMENT) */
942 /* codegen_finish **************************************************************
944 Finishes the code generation. A new memory, large enough for both
945 data and code, is allocated and data and code are copied together
946 to their final layout, unresolved jumps are resolved, ...
948 *******************************************************************************/
950 void codegen_finish(jitdata *jd)
955 #if defined(ENABLE_INTRP)
964 /* get required compiler data */
969 /* prevent compiler warning */
971 #if defined(ENABLE_INTRP)
975 /* calculate the code length */
977 mcodelen = (s4) (cd->mcodeptr - cd->mcodebase);
979 #if defined(ENABLE_STATISTICS)
981 count_code_len += mcodelen;
982 count_data_len += cd->dseglen;
986 alignedmcodelen = MEMORY_ALIGN(mcodelen, MAX_ALIGN);
988 #if defined(ENABLE_INTRP)
990 ncodelen = cd->ncodeptr - cd->ncodebase;
992 ncodelen = 0; /* avoid compiler warning */
996 cd->dseglen = MEMORY_ALIGN(cd->dseglen, MAX_ALIGN);
997 alignedlen = alignedmcodelen + cd->dseglen;
999 #if defined(ENABLE_INTRP)
1001 alignedlen += ncodelen;
1005 /* allocate new memory */
1007 code->mcodelength = mcodelen + cd->dseglen;
1008 code->mcode = CNEW(u1, alignedlen);
1010 /* set the entrypoint of the method */
1012 assert(code->entrypoint == NULL);
1013 code->entrypoint = epoint = (code->mcode + cd->dseglen);
1015 /* fill the data segment (code->entrypoint must already be set!) */
1019 /* copy code to the new location */
1021 MCOPY((void *) code->entrypoint, cd->mcodebase, u1, mcodelen);
1023 #if defined(ENABLE_INTRP)
1024 /* relocate native dynamic superinstruction code (if any) */
1027 cd->mcodebase = code->entrypoint;
1030 u1 *ncodebase = code->mcode + cd->dseglen + alignedmcodelen;
1032 MCOPY((void *) ncodebase, cd->ncodebase, u1, ncodelen);
1034 /* flush the instruction and data caches */
1036 md_cacheflush(ncodebase, ncodelen);
1038 /* set some cd variables for dynamic_super_rerwite */
1040 cd->ncodebase = ncodebase;
1043 cd->ncodebase = NULL;
1046 dynamic_super_rewrite(cd);
1050 /* jump table resolving */
1052 for (jr = cd->jumpreferences; jr != NULL; jr = jr->next)
1053 *((functionptr *) ((ptrint) epoint + jr->tablepos)) =
1054 (functionptr) ((ptrint) epoint + (ptrint) jr->target->mpc);
1056 /* line number table resolving */
1062 for (lr = cd->linenumberreferences; lr != NULL; lr = lr->next) {
1064 target = lr->targetmpc;
1065 /* if the entry contains an mcode pointer (normal case), resolve it */
1066 /* (see doc/inlining_stacktrace.txt for details) */
1067 if (lr->linenumber >= -2) {
1068 target += (ptrint) epoint;
1070 *((functionptr *) ((ptrint) epoint + (ptrint) lr->tablepos)) =
1071 (functionptr) target;
1074 *((functionptr *) ((ptrint) epoint + cd->linenumbertablestartpos)) =
1075 (functionptr) ((ptrint) epoint + cd->linenumbertab);
1077 *((ptrint *) ((ptrint) epoint + cd->linenumbertablesizepos)) = lrtlen;
1080 /* patcher resolving */
1082 pr = list_first_unsynced(code->patchers);
1084 pr->mpc += (ptrint) epoint;
1085 pr->datap = (ptrint) (pr->disp + epoint);
1086 pr = list_next_unsynced(code->patchers, pr);
1089 #if defined(ENABLE_REPLACEMENT)
1090 /* replacement point resolving */
1095 rp = code->rplpoints;
1096 for (i=0; i<code->rplpointcount; ++i, ++rp) {
1097 rp->pc = (u1*) ((ptrint) epoint + (ptrint) rp->pc);
1100 #endif /* defined(ENABLE_REPLACEMENT) */
1102 /* add method into methodtree to find the entrypoint */
1104 codegen_insertmethod(code->entrypoint, code->entrypoint + mcodelen);
1106 #if defined(__I386__) || defined(__X86_64__) || defined(__XDSPCORE__) || defined(__M68K__) || defined(ENABLE_INTRP)
1107 /* resolve data segment references */
1109 dseg_resolve_datareferences(jd);
1112 #if defined(ENABLE_THREADS)
1113 /* create cirtical sections */
1115 codegen_critical_section_finish(jd);
1118 /* flush the instruction and data caches */
1120 md_cacheflush(code->mcode, code->mcodelength);
1124 /* codegen_generate_stub_compiler **********************************************
1126 Wrapper for codegen_emit_stub_compiler.
1129 pointer to the compiler stub code.
1131 *******************************************************************************/
1133 u1 *codegen_generate_stub_compiler(methodinfo *m)
1137 ptrint *d; /* pointer to data memory */
1138 u1 *c; /* pointer to code memory */
1141 /* mark dump memory */
1143 dumpsize = dump_size();
1145 /* allocate required data structures */
1150 jd->cd = DNEW(codegendata);
1153 /* get required compiler data */
1157 /* allocate code memory */
1159 c = CNEW(u1, 3 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
1161 /* set pointers correctly */
1167 c = c + 3 * SIZEOF_VOID_P;
1170 /* NOTE: The codeinfo pointer is actually a pointer to the
1171 methodinfo (this fakes a codeinfo structure). */
1173 d[0] = (ptrint) asm_call_jit_compiler;
1175 d[2] = (ptrint) &d[1]; /* fake code->m */
1177 /* call the emit function */
1179 codegen_emit_stub_compiler(jd);
1181 #if defined(ENABLE_STATISTICS)
1183 count_cstub_len += 3 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE;
1188 md_cacheflush(cd->mcodebase, 3 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
1190 /* release dump memory */
1192 dump_release(dumpsize);
1194 /* return native stub code */
1200 /* codegen_generate_stub_builtin ***********************************************
1202 Wrapper for codegen_emit_stub_native.
1204 *******************************************************************************/
1206 void codegen_generate_stub_builtin(methodinfo *m, builtintable_entry *bte)
1213 /* mark dump memory */
1215 dumpsize = dump_size();
1220 jd->cd = DNEW(codegendata);
1224 /* Allocate codeinfo memory from the heap as we need to keep them. */
1226 jd->code = code_codeinfo_new(m);
1228 /* get required compiler data */
1232 /* setup code generation stuff */
1236 /* Set the number of native arguments we need to skip. */
1240 /* generate the code */
1242 #if defined(ENABLE_JIT)
1243 # if defined(ENABLE_INTRP)
1246 assert(bte->fp != NULL);
1247 codegen_emit_stub_native(jd, bte->md, bte->fp, skipparams);
1248 # if defined(ENABLE_INTRP)
1253 /* reallocate the memory and finish the code generation */
1257 /* set the stub entry point in the builtin table */
1259 bte->stub = code->entrypoint;
1261 #if defined(ENABLE_STATISTICS)
1263 size_stub_native += code->mcodelength;
1266 #if !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER)
1267 /* disassemble native stub */
1269 if (opt_DisassembleStubs) {
1270 codegen_disassemble_stub(m,
1271 (u1 *) (ptrint) code->entrypoint,
1272 (u1 *) (ptrint) code->entrypoint + (code->mcodelength - jd->cd->dseglen));
1274 /* show data segment */
1276 if (opt_showddatasegment)
1279 #endif /* !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER) */
1281 /* release memory */
1283 dump_release(dumpsize);
1287 /* codegen_generate_stub_native ************************************************
1289 Wrapper for codegen_emit_stub_native.
1292 the codeinfo representing the stub code.
1294 *******************************************************************************/
1296 codeinfo *codegen_generate_stub_native(methodinfo *m, functionptr f)
1305 /* mark dump memory */
1307 dumpsize = dump_size();
1312 jd->cd = DNEW(codegendata);
1313 jd->rd = DNEW(registerdata);
1316 /* Allocate codeinfo memory from the heap as we need to keep them. */
1318 jd->code = code_codeinfo_new(m);
1320 /* get required compiler data */
1324 /* set the flags for the current JIT run */
1326 #if defined(ENABLE_PROFILING)
1328 jd->flags |= JITDATA_FLAG_INSTRUMENT;
1331 if (opt_verbosecall)
1332 jd->flags |= JITDATA_FLAG_VERBOSECALL;
1334 /* setup code generation stuff */
1336 #if defined(ENABLE_JIT)
1337 # if defined(ENABLE_INTRP)
1345 /* create new method descriptor with additional native parameters */
1349 /* Set the number of native arguments we need to skip. */
1351 if (m->flags & ACC_STATIC)
1356 nmd = (methoddesc *) DMNEW(u1, sizeof(methoddesc) - sizeof(typedesc) +
1357 md->paramcount * sizeof(typedesc) +
1358 skipparams * sizeof(typedesc));
1360 nmd->paramcount = md->paramcount + skipparams;
1362 nmd->params = DMNEW(paramdesc, nmd->paramcount);
1364 nmd->paramtypes[0].type = TYPE_ADR; /* add environment pointer */
1366 if (m->flags & ACC_STATIC)
1367 nmd->paramtypes[1].type = TYPE_ADR; /* add class pointer */
1369 MCOPY(nmd->paramtypes + skipparams, md->paramtypes, typedesc,
1372 #if defined(ENABLE_JIT)
1373 # if defined(ENABLE_INTRP)
1376 /* pre-allocate the arguments for the native ABI */
1378 md_param_alloc_native(nmd);
1381 /* generate the code */
1383 #if defined(ENABLE_JIT)
1384 # if defined(ENABLE_INTRP)
1386 intrp_createnativestub(f, jd, nmd);
1389 codegen_emit_stub_native(jd, nmd, f, skipparams);
1391 intrp_createnativestub(f, jd, nmd);
1394 /* reallocate the memory and finish the code generation */
1398 #if defined(ENABLE_STATISTICS)
1399 /* must be done after codegen_finish() */
1402 size_stub_native += code->mcodelength;
1405 #if !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER)
1406 /* disassemble native stub */
1408 if (opt_DisassembleStubs) {
1409 # if defined(ENABLE_DEBUG_FILTER)
1410 if (m->filtermatches & SHOW_FILTER_FLAG_SHOW_METHOD)
1413 codegen_disassemble_stub(m,
1414 (u1 *) (ptrint) code->entrypoint,
1415 (u1 *) (ptrint) code->entrypoint + (code->mcodelength - jd->cd->dseglen));
1417 /* show data segment */
1419 if (opt_showddatasegment)
1423 #endif /* !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER) */
1425 /* release memory */
1427 dump_release(dumpsize);
1429 /* return native stub code */
1435 /* codegen_disassemble_nativestub **********************************************
1437 Disassembles the generated builtin or native stub.
1439 *******************************************************************************/
1441 #if defined(ENABLE_DISASSEMBLER)
1442 void codegen_disassemble_stub(methodinfo *m, u1 *start, u1 *end)
1444 printf("Stub code: ");
1445 if (m->class != NULL)
1446 utf_fprint_printable_ascii_classname(stdout, m->class->name);
1450 utf_fprint_printable_ascii(stdout, m->name);
1451 utf_fprint_printable_ascii(stdout, m->descriptor);
1452 printf("\nLength: %d\n\n", (s4) (end - start));
1454 DISASSEMBLE(start, end);
1459 /* codegen_start_native_call ***************************************************
1461 Prepares the stuff required for a native (JNI) function call:
1463 - adds a stackframe info structure to the chain, for stacktraces
1464 - prepares the local references table on the stack
1466 The layout of the native stub stackframe should look like this:
1468 +---------------------------+ <- java SP (of parent Java function)
1470 +---------------------------+ <- data SP
1472 | stackframe info structure |
1474 +---------------------------+
1476 | local references table |
1478 +---------------------------+
1480 | saved registers (if any) |
1482 +---------------------------+
1484 | arguments (if any) |
1486 +---------------------------+ <- current SP (native stub)
1488 *******************************************************************************/
1490 java_handle_t *codegen_start_native_call(u1 *currentsp, u1 *pv)
1492 stackframeinfo *sfi;
1493 localref_table *lrt;
1502 uint64_t *arg_stack;
1504 STATISTICS(count_calls_java_to_native++);
1506 /* get information from method header */
1508 code = *((codeinfo **) (pv + CodeinfoPointer));
1509 framesize = *((int32_t *) (pv + FrameSize));
1511 assert(framesize >= sizeof(stackframeinfo) + sizeof(localref_table));
1513 /* get the methodinfo */
1518 /* calculate needed values */
1520 #if defined(__ALPHA__) || defined(__ARM__)
1521 datasp = currentsp + framesize - SIZEOF_VOID_P;
1522 javasp = currentsp + framesize;
1523 javara = *((uint8_t **) datasp);
1524 arg_regs = (uint64_t *) currentsp;
1525 arg_stack = (uint64_t *) javasp;
1526 #elif defined(__MIPS__)
1527 /* MIPS always uses 8 bytes to store the RA */
1528 datasp = currentsp + framesize - 8;
1529 javasp = currentsp + framesize;
1530 javara = *((uint8_t **) datasp);
1531 #elif defined(__S390__)
1532 datasp = currentsp + framesize - 8;
1533 javasp = currentsp + framesize;
1534 javara = *((uint8_t **) datasp);
1535 arg_regs = (uint64_t *) (currentsp + 96);
1536 arg_stack = (uint64_t *) javasp;
1537 #elif defined(__I386__) || defined(__M68K__) || defined(__X86_64__)
1538 datasp = currentsp + framesize;
1539 javasp = currentsp + framesize + SIZEOF_VOID_P;
1540 javara = *((uint8_t **) datasp);
1541 arg_regs = (uint64_t *) currentsp;
1542 arg_stack = (uint64_t *) javasp;
1543 #elif defined(__POWERPC__)
1544 datasp = currentsp + framesize;
1545 javasp = currentsp + framesize;
1546 javara = *((uint8_t **) (datasp + LA_LR_OFFSET));
1547 arg_regs = (uint64_t *) (currentsp + LA_SIZE + 4 * SIZEOF_VOID_P);
1548 arg_stack = (uint64_t *) javasp;
1549 #elif defined(__POWERPC64__)
1550 datasp = currentsp + framesize;
1551 javasp = currentsp + framesize;
1552 javara = *((uint8_t **) (datasp + LA_LR_OFFSET));
1553 arg_regs = (uint64_t *) (currentsp + PA_SIZE + LA_SIZE + 4 * SIZEOF_VOID_P);
1554 arg_stack = (uint64_t *) javasp;
1556 /* XXX is was unable to do this port for SPARC64, sorry. (-michi) */
1557 /* XXX maybe we need to pass the RA as argument there */
1558 vm_abort("codegen_start_native_call: unsupported architecture");
1561 #if !defined(NDEBUG)
1562 # if defined(__ALPHA__) || defined(__POWERPC__) || defined(__POWERPC64__) || defined(__X86_64__) || defined(__S390__)
1563 /* print the call-trace if necesarry */
1565 if (opt_TraceJavaCalls)
1566 trace_java_call_enter(m, arg_regs, arg_stack);
1570 /* get data structures from stack */
1572 sfi = (stackframeinfo *) (datasp - sizeof(stackframeinfo));
1573 lrt = (localref_table *) (datasp - sizeof(stackframeinfo) -
1574 sizeof(localref_table));
1576 #if defined(ENABLE_JNI)
1577 /* add current JNI local references table to this thread */
1579 localref_table_add(lrt);
1582 #if defined(ENABLE_HANDLES)
1583 /* place all references into the local reference table */
1585 localref_fill(m, arg_regs, arg_stack);
1588 /* add a stackframeinfo to the chain */
1590 stacktrace_create_native_stackframeinfo(sfi, pv, javasp, javara);
1592 /* return a wrapped classinfo for static native methods */
1594 if (m->flags & ACC_STATIC)
1595 return LLNI_classinfo_wrap(m->class);
1601 /* codegen_finish_native_call **************************************************
1603 Removes the stuff required for a native (JNI) function call.
1604 Additionally it checks for an exceptions and in case, get the
1605 exception object and clear the pointer.
1607 *******************************************************************************/
1609 java_object_t *codegen_finish_native_call(u1 *currentsp, u1 *pv)
1611 stackframeinfo *sfi;
1621 /* get information from method header */
1623 code = *((codeinfo **) (pv + CodeinfoPointer));
1624 framesize = *((int32_t *) (pv + FrameSize));
1627 /* get the methodinfo */
1632 /* calculate needed values */
1634 #if defined(__ALPHA__) || defined(__ARM__)
1635 datasp = currentsp + framesize - SIZEOF_VOID_P;
1636 ret_regs = (uint64_t *) currentsp;
1637 #elif defined(__MIPS__)
1638 /* MIPS always uses 8 bytes to store the RA */
1639 datasp = currentsp + framesize - 8;
1640 #elif defined(__S390__)
1641 datasp = currentsp + framesize - 8;
1642 ret_regs = (uint64_t *) (currentsp + 96);
1643 #elif defined(__I386__)
1644 datasp = currentsp + framesize;
1645 ret_regs = (uint64_t *) (currentsp + 2 * SIZEOF_VOID_P);
1646 #elif defined(__M68K__) || defined(__X86_64__)
1647 datasp = currentsp + framesize;
1648 ret_regs = (uint64_t *) currentsp;
1649 #elif defined(__POWERPC__)
1650 datasp = currentsp + framesize;
1651 ret_regs = (uint64_t *) (currentsp + LA_SIZE + 2 * SIZEOF_VOID_P);
1652 #elif defined(__POWERPC64__)
1653 datasp = currentsp + framesize;
1654 ret_regs = (uint64_t *) (currentsp + PA_SIZE + LA_SIZE + 2 * SIZEOF_VOID_P);
1656 vm_abort("codegen_finish_native_call: unsupported architecture");
1660 #if !defined(NDEBUG)
1661 # if defined(__ALPHA__) || defined(__POWERPC__) || defined(__POWERPC64__) || defined(__X86_64__) || defined(__S390__)
1662 /* print the call-trace if necesarry */
1664 if (opt_TraceJavaCalls)
1665 trace_java_call_exit(m, ret_regs);
1669 /* get data structures from stack */
1671 sfi = (stackframeinfo *) (datasp - sizeof(stackframeinfo));
1673 /* remove current stackframeinfo from chain */
1675 stacktrace_remove_stackframeinfo(sfi);
1677 /* XXX unfill lrt here!!! */
1679 /* get and unwrap the exception */
1680 /* ATTENTION: do the this _after_ the stackframeinfo was
1681 removed but _before_ the localref_table gets removed! */
1683 e = exceptions_get_and_clear_exception();
1686 #if defined(ENABLE_JNI)
1687 /* release JNI local references table for this thread */
1689 localref_frame_pop_all();
1690 localref_table_remove();
1697 /* removecompilerstub **********************************************************
1699 Deletes a compilerstub from memory (simply by freeing it).
1701 *******************************************************************************/
1703 void removecompilerstub(u1 *stub)
1705 /* pass size 1 to keep the intern function happy */
1707 CFREE((void *) stub, 1);
1711 /* removenativestub ************************************************************
1713 Removes a previously created native-stub from memory.
1715 *******************************************************************************/
1717 void removenativestub(u1 *stub)
1719 /* pass size 1 to keep the intern function happy */
1721 CFREE((void *) stub, 1);
1725 /* codegen_reg_of_var **********************************************************
1727 This function determines a register, to which the result of an
1728 operation should go, when it is ultimatively intended to store the
1729 result in pseudoregister v. If v is assigned to an actual
1730 register, this register will be returned. Otherwise (when v is
1731 spilled) this function returns tempregnum. If not already done,
1732 regoff and flags are set in the stack location.
1734 *******************************************************************************/
1736 s4 codegen_reg_of_var(u2 opcode, varinfo *v, s4 tempregnum)
1740 /* Do we have to generate a conditional move? Yes, then always
1741 return the temporary register. The real register is identified
1742 during the store. */
1744 if (opcode & ICMD_CONDITION_MASK)
1748 if (!(v->flags & INMEMORY))
1749 return v->vv.regoff;
1755 /* codegen_reg_of_dst **********************************************************
1757 This function determines a register, to which the result of an
1758 operation should go, when it is ultimatively intended to store the
1759 result in iptr->dst.var. If dst.var is assigned to an actual
1760 register, this register will be returned. Otherwise (when it is
1761 spilled) this function returns tempregnum. If not already done,
1762 regoff and flags are set in the stack location.
1764 *******************************************************************************/
1766 s4 codegen_reg_of_dst(jitdata *jd, instruction *iptr, s4 tempregnum)
1768 return codegen_reg_of_var(iptr->opc, VAROP(iptr->dst), tempregnum);
1772 /* codegen_emit_phi_moves ****************************************************
1774 Emits phi moves at the end of the basicblock.
1776 *******************************************************************************/
1778 #if defined(ENABLE_SSA)
1779 void codegen_emit_phi_moves(jitdata *jd, basicblock *bptr)
1792 /* Moves from phi functions with highest indices have to be */
1793 /* inserted first, since this is the order as is used for */
1794 /* conflict resolution */
1796 for(i = ls->num_phi_moves[bptr->nr] - 1; i >= 0 ; i--) {
1797 lt_d = ls->phi_moves[bptr->nr][i][0];
1798 lt_s = ls->phi_moves[bptr->nr][i][1];
1799 #if defined(SSA_DEBUG_VERBOSE)
1801 printf("BB %3i Move %3i <- %3i ", bptr->nr, lt_d, lt_s);
1803 if (lt_s == UNUSED) {
1804 #if defined(SSA_DEBUG_VERBOSE)
1806 printf(" ... not processed \n");
1811 d = VAR(ls->lifetime[lt_d].v_index);
1812 s = VAR(ls->lifetime[lt_s].v_index);
1815 if (d->type == -1) {
1816 #if defined(SSA_DEBUG_VERBOSE)
1818 printf("...returning - phi lifetimes where joined\n");
1823 if (s->type == -1) {
1824 #if defined(SSA_DEBUG_VERBOSE)
1826 printf("...returning - phi lifetimes where joined\n");
1832 tmp_i.s1.varindex = ls->lifetime[lt_s].v_index;
1833 tmp_i.dst.varindex = ls->lifetime[lt_d].v_index;
1834 emit_copy(jd, &tmp_i);
1836 #if defined(SSA_DEBUG_VERBOSE)
1837 if (compileverbose) {
1838 if (IS_INMEMORY(d->flags) && IS_INMEMORY(s->flags)) {
1840 printf("M%3i <- M%3i",d->vv.regoff,s->vv.regoff);
1842 else if (IS_INMEMORY(s->flags)) {
1844 printf("R%3i <- M%3i",d->vv.regoff,s->vv.regoff);
1846 else if (IS_INMEMORY(d->flags)) {
1848 printf("M%3i <- R%3i",d->vv.regoff,s->vv.regoff);
1852 printf("R%3i <- R%3i",d->vv.regoff,s->vv.regoff);
1856 #endif /* defined(SSA_DEBUG_VERBOSE) */
1859 #endif /* defined(ENABLE_SSA) */
1864 * These are local overrides for various environment variables in Emacs.
1865 * Please do not remove this and leave it at the end of the file, where
1866 * Emacs will automagically detect them.
1867 * ---------------------------------------------------------------------
1870 * indent-tabs-mode: t
1874 * vim:noexpandtab:sw=4:ts=4: