1 /* src/vm/jit/codegen-common.c - architecture independent code generator stuff
3 Copyright (C) 1996-2005, 2006, 2007 R. Grafl, A. Krall, C. Kruegel,
4 C. Oates, R. Obermaisser, M. Platter, M. Probst, S. Ring,
5 E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich,
6 J. Wenninger, Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
25 All functions assume the following code area / data area layout:
29 | code area | code area grows to higher addresses
31 +-----------+ <-- start of procedure
33 | data area | data area grows to lower addresses
37 The functions first write into a temporary code/data area allocated by
38 "codegen_init". "codegen_finish" copies the code and data area into permanent
39 memory. All functions writing values into the data area return the offset
40 relative the begin of the code area (start of procedure).
52 #if defined(ENABLE_JIT)
53 /* this is required PATCHER_CALL_SIZE */
59 #include "mm/memory.h"
61 #include "toolbox/avl.h"
62 #include "toolbox/list.h"
63 #include "toolbox/logging.h"
65 #include "native/jni.h"
66 #include "native/llni.h"
67 #include "native/localref.h"
68 #include "native/native.h"
70 #if defined(WITH_CLASSPATH_SUN)
71 # include "native/include/java_lang_Object.h"
72 # include "native/include/java_lang_String.h"
73 # include "native/include/java_nio_ByteBuffer.h" /* required by j.l.CL */
74 # include "native/include/java_lang_ClassLoader.h"
77 #include "native/include/java_lang_Class.h"
79 #include "threads/threads-common.h"
81 #include "vm/builtin.h"
82 #include "vm/exceptions.h"
83 #include "vm/stringlocal.h"
85 #include "vm/jit/abi.h"
86 #include "vm/jit/asmpart.h"
87 #include "vm/jit/codegen-common.h"
89 #if defined(ENABLE_DISASSEMBLER)
90 # include "vm/jit/disass.h"
93 #include "vm/jit/dseg.h"
94 #include "vm/jit/emit-common.h"
95 #include "vm/jit/jit.h"
96 #include "vm/jit/md.h"
97 #include "vm/jit/methodheader.h"
98 #include "vm/jit/patcher-common.h"
99 #include "vm/jit/replace.h"
100 #if defined(ENABLE_SSA)
101 # include "vm/jit/optimizing/lsra.h"
102 # include "vm/jit/optimizing/ssa.h"
104 #include "vm/jit/stacktrace.h"
105 #include "vm/jit/trace.h"
107 #if defined(ENABLE_INTRP)
108 #include "vm/jit/intrp/intrp.h"
111 #include "vmcore/method.h"
112 #include "vmcore/options.h"
114 # include "vmcore/statistics.h"
116 #if defined(ENABLE_VMLOG)
117 #include <vmlog_cacao.h>
122 /* in this tree we store all method addresses *********************************/
124 static avl_tree_t *methodtree = NULL;
125 static s4 methodtree_comparator(const void *treenode, const void *node);
128 /* codegen_init ****************************************************************
132 *******************************************************************************/
134 void codegen_init(void)
136 /* this tree is global, not method specific */
139 #if defined(ENABLE_JIT)
140 methodtree_element *mte;
143 methodtree = avl_create(&methodtree_comparator);
145 #if defined(ENABLE_JIT)
146 /* insert asm_vm_call_method */
148 mte = NEW(methodtree_element);
150 mte->startpc = (u1 *) (ptrint) asm_vm_call_method;
151 mte->endpc = (u1 *) (ptrint) asm_vm_call_method_end;
153 avl_insert(methodtree, mte);
154 #endif /* defined(ENABLE_JIT) */
161 /* codegen_setup ***************************************************************
163 Allocates and initialises code area, data area and references.
165 *******************************************************************************/
167 void codegen_setup(jitdata *jd)
172 /* get required compiler data */
177 /* initialize members */
181 cd->mcodebase = DMNEW(u1, MCODEINITSIZE);
182 cd->mcodeend = cd->mcodebase + MCODEINITSIZE;
183 cd->mcodesize = MCODEINITSIZE;
185 /* initialize mcode variables */
187 cd->mcodeptr = cd->mcodebase;
188 cd->lastmcodeptr = cd->mcodebase;
190 #if defined(ENABLE_INTRP)
191 /* native dynamic superinstructions variables */
194 cd->ncodebase = DMNEW(u1, NCODEINITSIZE);
195 cd->ncodesize = NCODEINITSIZE;
197 /* initialize ncode variables */
199 cd->ncodeptr = cd->ncodebase;
201 cd->lastinstwithoutdispatch = ~0; /* no inst without dispatch */
202 cd->superstarts = NULL;
209 cd->jumpreferences = NULL;
211 #if defined(__I386__) || defined(__X86_64__) || defined(__XDSPCORE__) || defined(__M68K__) || defined(ENABLE_INTRP)
212 cd->datareferences = NULL;
215 /* cd->patchrefs = list_create_dump(OFFSET(patchref, linkage)); */
216 cd->patchrefs = NULL;
217 cd->brancheslabel = list_create_dump(OFFSET(branch_label_ref_t, linkage));
218 cd->listcritical = list_create_dump(OFFSET(critical_section_ref_t, linkage));
220 cd->linenumberreferences = NULL;
221 cd->linenumbertablesizepos = 0;
222 cd->linenumbertablestartpos = 0;
223 cd->linenumbertab = 0;
227 /* codegen_reset ***************************************************************
229 Resets the codegen data structure so we can recompile the method.
231 *******************************************************************************/
233 static void codegen_reset(jitdata *jd)
239 /* get required compiler data */
244 /* reset error flag */
246 cd->flags &= ~CODEGENDATA_FLAG_ERROR;
248 /* reset some members, we reuse the code memory already allocated
249 as this should have almost the correct size */
251 cd->mcodeptr = cd->mcodebase;
252 cd->lastmcodeptr = cd->mcodebase;
257 cd->jumpreferences = NULL;
259 #if defined(__I386__) || defined(__X86_64__) || defined(__XDSPCORE__) || defined(__M68K__) || defined(ENABLE_INTRP)
260 cd->datareferences = NULL;
263 /* cd->patchrefs = list_create_dump(OFFSET(patchref, linkage)); */
264 cd->patchrefs = NULL;
265 cd->brancheslabel = list_create_dump(OFFSET(branch_label_ref_t, linkage));
266 cd->listcritical = list_create_dump(OFFSET(critical_section_ref_t, linkage));
268 cd->linenumberreferences = NULL;
269 cd->linenumbertablesizepos = 0;
270 cd->linenumbertablestartpos = 0;
271 cd->linenumbertab = 0;
273 /* We need to clear the mpc and the branch references from all
274 basic blocks as they will definitely change. */
276 for (bptr = jd->basicblocks; bptr != NULL; bptr = bptr->next) {
278 bptr->branchrefs = NULL;
281 /* We need to clear all the patcher references from the codeinfo
282 since they all will be regenerated */
284 patcher_list_reset(code);
286 #if defined(ENABLE_REPLACEMENT)
287 code->rplpoints = NULL;
288 code->rplpointcount = 0;
289 code->regalloc = NULL;
290 code->regalloccount = 0;
291 code->globalcount = 0;
296 /* codegen_generate ************************************************************
298 Generates the code for the currently compiled method.
300 *******************************************************************************/
302 bool codegen_generate(jitdata *jd)
306 /* get required compiler data */
310 /* call the machine-dependent code generation function */
312 if (!codegen_emit(jd))
315 /* check for an error */
317 if (CODEGENDATA_HAS_FLAG_ERROR(cd)) {
318 /* check for long-branches flag, if it is set we recompile the
323 log_message_method("Re-generating code: ", jd->m);
326 /* XXX maybe we should tag long-branches-methods for recompilation */
328 if (CODEGENDATA_HAS_FLAG_LONGBRANCHES(cd)) {
329 /* we have to reset the codegendata structure first */
333 /* and restart the compiler run */
335 if (!codegen_emit(jd))
339 vm_abort("codegen_generate: unknown error occurred during codegen_emit: flags=%x\n", cd->flags);
344 log_message_method("Re-generating code done: ", jd->m);
348 /* reallocate the memory and finish the code generation */
352 /* everything's ok */
358 /* codegen_close ***************************************************************
362 *******************************************************************************/
364 void codegen_close(void)
366 /* TODO: release avl tree on i386 and x86_64 */
370 /* codegen_increase ************************************************************
374 *******************************************************************************/
376 void codegen_increase(codegendata *cd)
380 /* save old mcodebase pointer */
382 oldmcodebase = cd->mcodebase;
384 /* reallocate to new, doubled memory */
386 cd->mcodebase = DMREALLOC(cd->mcodebase,
391 cd->mcodeend = cd->mcodebase + cd->mcodesize;
393 /* set new mcodeptr */
395 cd->mcodeptr = cd->mcodebase + (cd->mcodeptr - oldmcodebase);
397 #if defined(__I386__) || defined(__MIPS__) || defined(__X86_64__) || defined(__M68K__) || defined(ENABLE_INTRP) \
398 || defined(__SPARC_64__)
399 /* adjust the pointer to the last patcher position */
401 if (cd->lastmcodeptr != NULL)
402 cd->lastmcodeptr = cd->mcodebase + (cd->lastmcodeptr - oldmcodebase);
407 /* codegen_ncode_increase ******************************************************
411 *******************************************************************************/
413 #if defined(ENABLE_INTRP)
414 u1 *codegen_ncode_increase(codegendata *cd, u1 *ncodeptr)
418 /* save old ncodebase pointer */
420 oldncodebase = cd->ncodebase;
422 /* reallocate to new, doubled memory */
424 cd->ncodebase = DMREALLOC(cd->ncodebase,
430 /* return the new ncodeptr */
432 return (cd->ncodebase + (ncodeptr - oldncodebase));
437 /* codegen_add_branch_ref ******************************************************
439 Prepends an branch to the list.
441 *******************************************************************************/
443 void codegen_add_branch_ref(codegendata *cd, basicblock *target, s4 condition, s4 reg, u4 options)
448 STATISTICS(count_branches_unresolved++);
450 /* calculate the mpc of the branch instruction */
452 branchmpc = cd->mcodeptr - cd->mcodebase;
454 br = DNEW(branchref);
456 br->branchmpc = branchmpc;
457 br->condition = condition;
459 br->options = options;
460 br->next = target->branchrefs;
462 target->branchrefs = br;
466 /* codegen_resolve_branchrefs **************************************************
468 Resolves and patches the branch references of a given basic block.
470 *******************************************************************************/
472 void codegen_resolve_branchrefs(codegendata *cd, basicblock *bptr)
477 /* Save the mcodeptr because in the branch emitting functions
478 we generate code somewhere inside already generated code,
479 but we're still in the actual code generation phase. */
481 mcodeptr = cd->mcodeptr;
483 /* just to make sure */
485 assert(bptr->mpc >= 0);
487 for (br = bptr->branchrefs; br != NULL; br = br->next) {
488 /* temporary set the mcodeptr */
490 cd->mcodeptr = cd->mcodebase + br->branchmpc;
492 /* emit_bccz and emit_branch emit the correct code, even if we
493 pass condition == BRANCH_UNCONDITIONAL or reg == -1. */
495 emit_bccz(cd, bptr, br->condition, br->reg, br->options);
498 /* restore mcodeptr */
500 cd->mcodeptr = mcodeptr;
504 /* codegen_branch_label_add ****************************************************
506 Append an branch to the label-branch list.
508 *******************************************************************************/
510 void codegen_branch_label_add(codegendata *cd, s4 label, s4 condition, s4 reg, u4 options)
513 branch_label_ref_t *br;
516 /* get the label list */
518 list = cd->brancheslabel;
520 /* calculate the current mpc */
522 mpc = cd->mcodeptr - cd->mcodebase;
524 br = DNEW(branch_label_ref_t);
528 br->condition = condition;
530 br->options = options;
532 /* add the branch to the list */
534 list_add_last_unsynced(list, br);
538 /* codegen_add_patch_ref *******************************************************
540 Appends a new patcher reference to the list of patching positions.
542 *******************************************************************************/
544 void codegen_add_patch_ref(codegendata *cd, functionptr patcher, voidptr ref,
550 branchmpc = cd->mcodeptr - cd->mcodebase;
554 pr->branchpos = branchmpc;
556 pr->patcher = patcher;
559 /* list_add_first(cd->patchrefs, pr); */
560 pr->next = cd->patchrefs;
563 /* Generate NOPs for opt_shownops. */
568 #if defined(ENABLE_JIT) && (defined(__I386__) || defined(__M68K__) || defined(__MIPS__) \
569 || defined(__SPARC_64__) || defined(__X86_64__))
571 /* On some architectures the patcher stub call instruction might
572 be longer than the actual instruction generated. On this
573 architectures we store the last patcher call position and after
574 the basic block code generation is completed, we check the
575 range and maybe generate some nop's. */
576 /* The nops are generated in codegen_emit in each codegen */
578 cd->lastmcodeptr = cd->mcodeptr + PATCHER_CALL_SIZE;
583 /* codegen_critical_section_new ************************************************
585 Allocates a new critical-section reference and adds it to the
586 critical-section list.
588 *******************************************************************************/
590 #if defined(ENABLE_THREADS)
591 void codegen_critical_section_new(codegendata *cd)
594 critical_section_ref_t *csr;
597 /* get the critical section list */
599 list = cd->listcritical;
601 /* calculate the current mpc */
603 mpc = cd->mcodeptr - cd->mcodebase;
605 csr = DNEW(critical_section_ref_t);
607 /* We only can set restart right now, as start and end are set by
608 the following, corresponding functions. */
614 /* add the branch to the list */
616 list_add_last_unsynced(list, csr);
621 /* codegen_critical_section_start **********************************************
623 Set the start-point of the current critical section (which is the
624 last element of the list).
626 *******************************************************************************/
628 #if defined(ENABLE_THREADS)
629 void codegen_critical_section_start(codegendata *cd)
632 critical_section_ref_t *csr;
635 /* get the critical section list */
637 list = cd->listcritical;
639 /* calculate the current mpc */
641 mpc = cd->mcodeptr - cd->mcodebase;
643 /* get the current critical section */
645 csr = list_last_unsynced(list);
647 /* set the start point */
649 assert(csr->start == -1);
656 /* codegen_critical_section_end ************************************************
658 Set the end-point of the current critical section (which is the
659 last element of the list).
661 *******************************************************************************/
663 #if defined(ENABLE_THREADS)
664 void codegen_critical_section_end(codegendata *cd)
667 critical_section_ref_t *csr;
670 /* get the critical section list */
672 list = cd->listcritical;
674 /* calculate the current mpc */
676 mpc = cd->mcodeptr - cd->mcodebase;
678 /* get the current critical section */
680 csr = list_last_unsynced(list);
682 /* set the end point */
684 assert(csr->end == -1);
691 /* codegen_critical_section_finish *********************************************
693 Finish the critical sections, create the critical section nodes for
694 the AVL tree and insert them into the tree.
696 *******************************************************************************/
698 #if defined(ENABLE_THREADS)
699 static void codegen_critical_section_finish(jitdata *jd)
704 critical_section_ref_t *csr;
705 critical_section_node_t *csn;
707 /* get required compiler data */
712 /* get the critical section list */
714 list = cd->listcritical;
716 /* iterate over all critical sections */
718 for (csr = list_first_unsynced(list); csr != NULL;
719 csr = list_next_unsynced(list, csr)) {
720 /* check if all points are set */
722 assert(csr->start != -1);
723 assert(csr->end != -1);
724 assert(csr->restart != -1);
726 /* allocate tree node */
728 csn = NEW(critical_section_node_t);
730 csn->start = code->entrypoint + csr->start;
731 csn->end = code->entrypoint + csr->end;
732 csn->restart = code->entrypoint + csr->restart;
734 /* insert into the tree */
736 critical_section_register(csn);
742 /* methodtree_comparator *******************************************************
744 Comparator function used for the AVL tree of methods.
747 treenode....the node from the tree
748 node........the node to compare to the tree-node
750 *******************************************************************************/
752 static s4 methodtree_comparator(const void *treenode, const void *node)
754 methodtree_element *mte;
755 methodtree_element *mtepc;
757 mte = (methodtree_element *) treenode;
758 mtepc = (methodtree_element *) node;
760 /* compare both startpc and endpc of pc, even if they have the same value,
761 otherwise the avl_probe sometimes thinks the element is already in the
765 /* On S390 addresses are 31 bit. Compare only 31 bits of value.
767 # define ADDR_MASK(a) ((a) & 0x7FFFFFFF)
769 # define ADDR_MASK(a) (a)
772 if (ADDR_MASK((long) mte->startpc) <= ADDR_MASK((long) mtepc->startpc) &&
773 ADDR_MASK((long) mtepc->startpc) <= ADDR_MASK((long) mte->endpc) &&
774 ADDR_MASK((long) mte->startpc) <= ADDR_MASK((long) mtepc->endpc) &&
775 ADDR_MASK((long) mtepc->endpc) <= ADDR_MASK((long) mte->endpc)) {
778 } else if (ADDR_MASK((long) mtepc->startpc) < ADDR_MASK((long) mte->startpc)) {
789 /* codegen_insertmethod ********************************************************
791 Insert the machine code range of a method into the AVL tree of methods.
793 *******************************************************************************/
795 void codegen_insertmethod(u1 *startpc, u1 *endpc)
797 methodtree_element *mte;
799 /* allocate new method entry */
801 mte = NEW(methodtree_element);
803 mte->startpc = startpc;
806 /* this function does not return an error, but asserts for
809 avl_insert(methodtree, mte);
813 /* codegen_get_pv_from_pc ******************************************************
815 Find the PV for the given PC by searching in the AVL tree of
818 *******************************************************************************/
820 u1 *codegen_get_pv_from_pc(u1 *pc)
822 methodtree_element mtepc;
823 methodtree_element *mte;
825 /* allocation of the search structure on the stack is much faster */
830 mte = avl_find(methodtree, &mtepc);
833 /* No method was found. Let's dump a stacktrace. */
835 #if defined(ENABLE_VMLOG)
836 vmlog_cacao_signl("SIGSEGV");
839 log_println("We received a SIGSEGV and tried to handle it, but we were");
840 log_println("unable to find a Java method at:");
842 #if SIZEOF_VOID_P == 8
843 log_println("PC=0x%016lx", pc);
845 log_println("PC=0x%08x", pc);
849 log_println("Dumping the current stacktrace:");
851 #if defined(ENABLE_THREADS)
852 /* XXX michi: This should be available even without threads! */
853 threads_print_stacktrace();
856 vm_abort("Exiting...");
863 /* codegen_get_pv_from_pc_nocheck **********************************************
865 Find the PV for the given PC by searching in the AVL tree of
866 methods. This method does not check the return value and is used
869 *******************************************************************************/
871 u1 *codegen_get_pv_from_pc_nocheck(u1 *pc)
873 methodtree_element mtepc;
874 methodtree_element *mte;
876 /* allocation of the search structure on the stack is much faster */
881 mte = avl_find(methodtree, &mtepc);
890 /* codegen_set_replacement_point_notrap ****************************************
892 Record the position of a non-trappable replacement point.
894 *******************************************************************************/
896 #if defined(ENABLE_REPLACEMENT)
898 void codegen_set_replacement_point_notrap(codegendata *cd, s4 type)
900 void codegen_set_replacement_point_notrap(codegendata *cd)
903 assert(cd->replacementpoint);
904 assert(cd->replacementpoint->type == type);
905 assert(cd->replacementpoint->flags & RPLPOINT_FLAG_NOTRAP);
907 cd->replacementpoint->pc = (u1*) (ptrint) (cd->mcodeptr - cd->mcodebase);
909 cd->replacementpoint++;
911 #endif /* defined(ENABLE_REPLACEMENT) */
914 /* codegen_set_replacement_point ***********************************************
916 Record the position of a trappable replacement point.
918 *******************************************************************************/
920 #if defined(ENABLE_REPLACEMENT)
922 void codegen_set_replacement_point(codegendata *cd, s4 type)
924 void codegen_set_replacement_point(codegendata *cd)
927 assert(cd->replacementpoint);
928 assert(cd->replacementpoint->type == type);
929 assert(!(cd->replacementpoint->flags & RPLPOINT_FLAG_NOTRAP));
931 cd->replacementpoint->pc = (u1*) (ptrint) (cd->mcodeptr - cd->mcodebase);
933 cd->replacementpoint++;
936 /* XXX actually we should use an own REPLACEMENT_NOPS here! */
937 if (opt_TestReplacement)
941 /* XXX assert(cd->lastmcodeptr <= cd->mcodeptr); */
943 cd->lastmcodeptr = cd->mcodeptr + PATCHER_CALL_SIZE;
945 #endif /* defined(ENABLE_REPLACEMENT) */
948 /* codegen_finish **************************************************************
950 Finishes the code generation. A new memory, large enough for both
951 data and code, is allocated and data and code are copied together
952 to their final layout, unresolved jumps are resolved, ...
954 *******************************************************************************/
956 void codegen_finish(jitdata *jd)
961 #if defined(ENABLE_INTRP)
970 /* get required compiler data */
975 /* prevent compiler warning */
977 #if defined(ENABLE_INTRP)
981 /* calculate the code length */
983 mcodelen = (s4) (cd->mcodeptr - cd->mcodebase);
985 #if defined(ENABLE_STATISTICS)
987 count_code_len += mcodelen;
988 count_data_len += cd->dseglen;
992 alignedmcodelen = MEMORY_ALIGN(mcodelen, MAX_ALIGN);
994 #if defined(ENABLE_INTRP)
996 ncodelen = cd->ncodeptr - cd->ncodebase;
998 ncodelen = 0; /* avoid compiler warning */
1002 cd->dseglen = MEMORY_ALIGN(cd->dseglen, MAX_ALIGN);
1003 alignedlen = alignedmcodelen + cd->dseglen;
1005 #if defined(ENABLE_INTRP)
1007 alignedlen += ncodelen;
1011 /* allocate new memory */
1013 code->mcodelength = mcodelen + cd->dseglen;
1014 code->mcode = CNEW(u1, alignedlen);
1016 /* set the entrypoint of the method */
1018 assert(code->entrypoint == NULL);
1019 code->entrypoint = epoint = (code->mcode + cd->dseglen);
1021 /* fill the data segment (code->entrypoint must already be set!) */
1025 /* copy code to the new location */
1027 MCOPY((void *) code->entrypoint, cd->mcodebase, u1, mcodelen);
1029 #if defined(ENABLE_INTRP)
1030 /* relocate native dynamic superinstruction code (if any) */
1033 cd->mcodebase = code->entrypoint;
1036 u1 *ncodebase = code->mcode + cd->dseglen + alignedmcodelen;
1038 MCOPY((void *) ncodebase, cd->ncodebase, u1, ncodelen);
1040 /* flush the instruction and data caches */
1042 md_cacheflush(ncodebase, ncodelen);
1044 /* set some cd variables for dynamic_super_rerwite */
1046 cd->ncodebase = ncodebase;
1049 cd->ncodebase = NULL;
1052 dynamic_super_rewrite(cd);
1056 /* jump table resolving */
1058 for (jr = cd->jumpreferences; jr != NULL; jr = jr->next)
1059 *((functionptr *) ((ptrint) epoint + jr->tablepos)) =
1060 (functionptr) ((ptrint) epoint + (ptrint) jr->target->mpc);
1062 /* line number table resolving */
1068 for (lr = cd->linenumberreferences; lr != NULL; lr = lr->next) {
1070 target = lr->targetmpc;
1071 /* if the entry contains an mcode pointer (normal case), resolve it */
1072 /* (see doc/inlining_stacktrace.txt for details) */
1073 if (lr->linenumber >= -2) {
1074 target += (ptrint) epoint;
1076 *((functionptr *) ((ptrint) epoint + (ptrint) lr->tablepos)) =
1077 (functionptr) target;
1080 *((functionptr *) ((ptrint) epoint + cd->linenumbertablestartpos)) =
1081 (functionptr) ((ptrint) epoint + cd->linenumbertab);
1083 *((ptrint *) ((ptrint) epoint + cd->linenumbertablesizepos)) = lrtlen;
1086 /* patcher resolving */
1088 pr = list_first_unsynced(code->patchers);
1090 pr->mpc += (ptrint) epoint;
1091 pr->datap = (ptrint) (pr->disp + epoint);
1092 pr = list_next_unsynced(code->patchers, pr);
1095 #if defined(ENABLE_REPLACEMENT)
1096 /* replacement point resolving */
1101 rp = code->rplpoints;
1102 for (i=0; i<code->rplpointcount; ++i, ++rp) {
1103 rp->pc = (u1*) ((ptrint) epoint + (ptrint) rp->pc);
1106 #endif /* defined(ENABLE_REPLACEMENT) */
1108 /* add method into methodtree to find the entrypoint */
1110 codegen_insertmethod(code->entrypoint, code->entrypoint + mcodelen);
1112 #if defined(__I386__) || defined(__X86_64__) || defined(__XDSPCORE__) || defined(__M68K__) || defined(ENABLE_INTRP)
1113 /* resolve data segment references */
1115 dseg_resolve_datareferences(jd);
1118 #if defined(ENABLE_THREADS)
1119 /* create cirtical sections */
1121 codegen_critical_section_finish(jd);
1124 /* flush the instruction and data caches */
1126 md_cacheflush(code->mcode, code->mcodelength);
1130 /* codegen_generate_stub_compiler **********************************************
1132 Wrapper for codegen_emit_stub_compiler.
1135 pointer to the compiler stub code.
1137 *******************************************************************************/
1139 u1 *codegen_generate_stub_compiler(methodinfo *m)
1143 ptrint *d; /* pointer to data memory */
1144 u1 *c; /* pointer to code memory */
1147 /* mark dump memory */
1149 dumpsize = dump_size();
1151 /* allocate required data structures */
1156 jd->cd = DNEW(codegendata);
1159 /* get required compiler data */
1163 /* allocate code memory */
1165 c = CNEW(u1, 3 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
1167 /* set pointers correctly */
1173 c = c + 3 * SIZEOF_VOID_P;
1176 /* NOTE: The codeinfo pointer is actually a pointer to the
1177 methodinfo (this fakes a codeinfo structure). */
1179 d[0] = (ptrint) asm_call_jit_compiler;
1181 d[2] = (ptrint) &d[1]; /* fake code->m */
1183 /* call the emit function */
1185 codegen_emit_stub_compiler(jd);
1187 #if defined(ENABLE_STATISTICS)
1189 count_cstub_len += 3 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE;
1194 md_cacheflush(cd->mcodebase, 3 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
1196 /* release dump memory */
1198 dump_release(dumpsize);
1200 /* return native stub code */
1206 /* codegen_generate_stub_builtin ***********************************************
1208 Wrapper for codegen_emit_stub_native.
1210 *******************************************************************************/
1212 void codegen_generate_stub_builtin(methodinfo *m, builtintable_entry *bte)
1219 /* mark dump memory */
1221 dumpsize = dump_size();
1226 jd->cd = DNEW(codegendata);
1230 /* Allocate codeinfo memory from the heap as we need to keep them. */
1232 jd->code = code_codeinfo_new(m);
1234 /* get required compiler data */
1238 /* setup code generation stuff */
1242 /* Set the number of native arguments we need to skip. */
1246 /* generate the code */
1248 #if defined(ENABLE_JIT)
1249 # if defined(ENABLE_INTRP)
1252 assert(bte->fp != NULL);
1253 codegen_emit_stub_native(jd, bte->md, bte->fp, skipparams);
1254 # if defined(ENABLE_INTRP)
1259 /* reallocate the memory and finish the code generation */
1263 /* set the stub entry point in the builtin table */
1265 bte->stub = code->entrypoint;
1267 #if defined(ENABLE_STATISTICS)
1269 size_stub_native += code->mcodelength;
1272 #if !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER)
1273 /* disassemble native stub */
1275 if (opt_DisassembleStubs) {
1276 codegen_disassemble_stub(m,
1277 (u1 *) (ptrint) code->entrypoint,
1278 (u1 *) (ptrint) code->entrypoint + (code->mcodelength - jd->cd->dseglen));
1280 /* show data segment */
1282 if (opt_showddatasegment)
1285 #endif /* !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER) */
1287 /* release memory */
1289 dump_release(dumpsize);
1293 /* codegen_generate_stub_native ************************************************
1295 Wrapper for codegen_emit_stub_native.
1298 the codeinfo representing the stub code.
1300 *******************************************************************************/
1302 codeinfo *codegen_generate_stub_native(methodinfo *m, functionptr f)
1311 /* mark dump memory */
1313 dumpsize = dump_size();
1318 jd->cd = DNEW(codegendata);
1319 jd->rd = DNEW(registerdata);
1322 /* Allocate codeinfo memory from the heap as we need to keep them. */
1324 jd->code = code_codeinfo_new(m);
1326 /* get required compiler data */
1330 /* set the flags for the current JIT run */
1332 #if defined(ENABLE_PROFILING)
1334 jd->flags |= JITDATA_FLAG_INSTRUMENT;
1337 if (opt_verbosecall)
1338 jd->flags |= JITDATA_FLAG_VERBOSECALL;
1340 /* setup code generation stuff */
1342 #if defined(ENABLE_JIT)
1343 # if defined(ENABLE_INTRP)
1351 /* create new method descriptor with additional native parameters */
1355 /* Set the number of native arguments we need to skip. */
1357 if (m->flags & ACC_STATIC)
1362 nmd = (methoddesc *) DMNEW(u1, sizeof(methoddesc) - sizeof(typedesc) +
1363 md->paramcount * sizeof(typedesc) +
1364 skipparams * sizeof(typedesc));
1366 nmd->paramcount = md->paramcount + skipparams;
1368 nmd->params = DMNEW(paramdesc, nmd->paramcount);
1370 nmd->paramtypes[0].type = TYPE_ADR; /* add environment pointer */
1372 if (m->flags & ACC_STATIC)
1373 nmd->paramtypes[1].type = TYPE_ADR; /* add class pointer */
1375 MCOPY(nmd->paramtypes + skipparams, md->paramtypes, typedesc,
1378 #if defined(ENABLE_JIT)
1379 # if defined(ENABLE_INTRP)
1382 /* pre-allocate the arguments for the native ABI */
1384 md_param_alloc_native(nmd);
1387 /* generate the code */
1389 #if defined(ENABLE_JIT)
1390 # if defined(ENABLE_INTRP)
1392 intrp_createnativestub(f, jd, nmd);
1395 codegen_emit_stub_native(jd, nmd, f, skipparams);
1397 intrp_createnativestub(f, jd, nmd);
1400 /* reallocate the memory and finish the code generation */
1404 #if defined(ENABLE_STATISTICS)
1405 /* must be done after codegen_finish() */
1408 size_stub_native += code->mcodelength;
1411 #if !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER)
1412 /* disassemble native stub */
1414 if (opt_DisassembleStubs) {
1415 # if defined(ENABLE_DEBUG_FILTER)
1416 if (m->filtermatches & SHOW_FILTER_FLAG_SHOW_METHOD)
1419 codegen_disassemble_stub(m,
1420 (u1 *) (ptrint) code->entrypoint,
1421 (u1 *) (ptrint) code->entrypoint + (code->mcodelength - jd->cd->dseglen));
1423 /* show data segment */
1425 if (opt_showddatasegment)
1429 #endif /* !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER) */
1431 /* release memory */
1433 dump_release(dumpsize);
1435 /* return native stub code */
1441 /* codegen_disassemble_nativestub **********************************************
1443 Disassembles the generated builtin or native stub.
1445 *******************************************************************************/
1447 #if defined(ENABLE_DISASSEMBLER)
1448 void codegen_disassemble_stub(methodinfo *m, u1 *start, u1 *end)
1450 printf("Stub code: ");
1451 if (m->class != NULL)
1452 utf_fprint_printable_ascii_classname(stdout, m->class->name);
1456 utf_fprint_printable_ascii(stdout, m->name);
1457 utf_fprint_printable_ascii(stdout, m->descriptor);
1458 printf("\nLength: %d\n\n", (s4) (end - start));
1460 DISASSEMBLE(start, end);
1465 /* codegen_start_native_call ***************************************************
1467 Prepares the stuff required for a native (JNI) function call:
1469 - adds a stackframe info structure to the chain, for stacktraces
1470 - prepares the local references table on the stack
1472 The layout of the native stub stackframe should look like this:
1474 +---------------------------+ <- java SP (of parent Java function)
1476 +---------------------------+ <- data SP
1478 | stackframe info structure |
1480 +---------------------------+
1482 | local references table |
1484 +---------------------------+
1486 | saved registers (if any) |
1488 +---------------------------+
1490 | arguments (if any) |
1492 +---------------------------+ <- current SP (native stub)
1494 *******************************************************************************/
1496 java_handle_t *codegen_start_native_call(u1 *currentsp, u1 *pv)
1498 stackframeinfo *sfi;
1499 localref_table *lrt;
1508 uint64_t *arg_stack;
1510 STATISTICS(count_calls_java_to_native++);
1512 /* get information from method header */
1514 code = *((codeinfo **) (pv + CodeinfoPointer));
1515 framesize = *((int32_t *) (pv + FrameSize));
1517 assert(framesize >= sizeof(stackframeinfo) + sizeof(localref_table));
1519 /* get the methodinfo */
1524 /* calculate needed values */
1526 #if defined(__ALPHA__) || defined(__ARM__)
1527 datasp = currentsp + framesize - SIZEOF_VOID_P;
1528 javasp = currentsp + framesize;
1529 javara = *((uint8_t **) datasp);
1530 arg_regs = (uint64_t *) currentsp;
1531 arg_stack = (uint64_t *) javasp;
1532 #elif defined(__MIPS__)
1533 /* MIPS always uses 8 bytes to store the RA */
1534 datasp = currentsp + framesize - 8;
1535 javasp = currentsp + framesize;
1536 javara = *((uint8_t **) datasp);
1537 #elif defined(__S390__)
1538 datasp = currentsp + framesize - 8;
1539 javasp = currentsp + framesize;
1540 javara = *((uint8_t **) datasp);
1541 arg_regs = (uint64_t *) (currentsp + 96);
1542 arg_stack = (uint64_t *) javasp;
1543 #elif defined(__I386__) || defined(__M68K__) || defined(__X86_64__)
1544 datasp = currentsp + framesize;
1545 javasp = currentsp + framesize + SIZEOF_VOID_P;
1546 javara = *((uint8_t **) datasp);
1547 arg_regs = (uint64_t *) currentsp;
1548 arg_stack = (uint64_t *) javasp;
1549 #elif defined(__POWERPC__)
1550 datasp = currentsp + framesize;
1551 javasp = currentsp + framesize;
1552 javara = *((uint8_t **) (datasp + LA_LR_OFFSET));
1553 arg_regs = (uint64_t *) (currentsp + LA_SIZE + 4 * SIZEOF_VOID_P);
1554 arg_stack = (uint64_t *) javasp;
1555 #elif defined(__POWERPC64__)
1556 datasp = currentsp + framesize;
1557 javasp = currentsp + framesize;
1558 javara = *((uint8_t **) (datasp + LA_LR_OFFSET));
1559 arg_regs = (uint64_t *) (currentsp + PA_SIZE + LA_SIZE + 4 * SIZEOF_VOID_P);
1560 arg_stack = (uint64_t *) javasp;
1562 /* XXX is was unable to do this port for SPARC64, sorry. (-michi) */
1563 /* XXX maybe we need to pass the RA as argument there */
1564 vm_abort("codegen_start_native_call: unsupported architecture");
1567 #if !defined(NDEBUG)
1568 # if defined(__ALPHA__) || defined(__POWERPC__) || defined(__POWERPC64__) || defined(__X86_64__) || defined(__S390__)
1569 /* print the call-trace if necesarry */
1571 if (opt_TraceJavaCalls)
1572 trace_java_call_enter(m, arg_regs, arg_stack);
1576 /* get data structures from stack */
1578 sfi = (stackframeinfo *) (datasp - sizeof(stackframeinfo));
1579 lrt = (localref_table *) (datasp - sizeof(stackframeinfo) -
1580 sizeof(localref_table));
1582 #if defined(ENABLE_JNI)
1583 /* add current JNI local references table to this thread */
1585 localref_table_add(lrt);
1588 #if defined(ENABLE_HANDLES)
1589 /* place all references into the local reference table */
1591 localref_fill(m, arg_regs, arg_stack);
1594 /* add a stackframeinfo to the chain */
1596 stacktrace_create_native_stackframeinfo(sfi, pv, javasp, javara);
1598 /* return a wrapped classinfo for static native methods */
1600 if (m->flags & ACC_STATIC)
1601 return LLNI_classinfo_wrap(m->class);
1607 /* codegen_finish_native_call **************************************************
1609 Removes the stuff required for a native (JNI) function call.
1610 Additionally it checks for an exceptions and in case, get the
1611 exception object and clear the pointer.
1613 *******************************************************************************/
1615 java_object_t *codegen_finish_native_call(u1 *currentsp, u1 *pv)
1617 stackframeinfo *sfi;
1627 /* get information from method header */
1629 code = *((codeinfo **) (pv + CodeinfoPointer));
1630 framesize = *((int32_t *) (pv + FrameSize));
1633 /* get the methodinfo */
1638 /* calculate needed values */
1640 #if defined(__ALPHA__) || defined(__ARM__)
1641 datasp = currentsp + framesize - SIZEOF_VOID_P;
1642 ret_regs = (uint64_t *) currentsp;
1643 #elif defined(__MIPS__)
1644 /* MIPS always uses 8 bytes to store the RA */
1645 datasp = currentsp + framesize - 8;
1646 #elif defined(__S390__)
1647 datasp = currentsp + framesize - 8;
1648 ret_regs = (uint64_t *) (currentsp + 96);
1649 #elif defined(__I386__)
1650 datasp = currentsp + framesize;
1651 ret_regs = (uint64_t *) (currentsp + 2 * SIZEOF_VOID_P);
1652 #elif defined(__M68K__) || defined(__X86_64__)
1653 datasp = currentsp + framesize;
1654 ret_regs = (uint64_t *) currentsp;
1655 #elif defined(__POWERPC__)
1656 datasp = currentsp + framesize;
1657 ret_regs = (uint64_t *) (currentsp + LA_SIZE + 2 * SIZEOF_VOID_P);
1658 #elif defined(__POWERPC64__)
1659 datasp = currentsp + framesize;
1660 ret_regs = (uint64_t *) (currentsp + PA_SIZE + LA_SIZE + 2 * SIZEOF_VOID_P);
1662 vm_abort("codegen_finish_native_call: unsupported architecture");
1666 #if !defined(NDEBUG)
1667 # if defined(__ALPHA__) || defined(__POWERPC__) || defined(__POWERPC64__) || defined(__X86_64__) || defined(__S390__)
1668 /* print the call-trace if necesarry */
1670 if (opt_TraceJavaCalls)
1671 trace_java_call_exit(m, ret_regs);
1675 /* get data structures from stack */
1677 sfi = (stackframeinfo *) (datasp - sizeof(stackframeinfo));
1679 /* remove current stackframeinfo from chain */
1681 stacktrace_remove_stackframeinfo(sfi);
1683 /* XXX unfill lrt here!!! */
1685 /* get and unwrap the exception */
1686 /* ATTENTION: do the this _after_ the stackframeinfo was
1687 removed but _before_ the localref_table gets removed! */
1689 e = exceptions_get_and_clear_exception();
1692 #if defined(ENABLE_JNI)
1693 /* release JNI local references table for this thread */
1695 localref_frame_pop_all();
1696 localref_table_remove();
1703 /* removecompilerstub **********************************************************
1705 Deletes a compilerstub from memory (simply by freeing it).
1707 *******************************************************************************/
1709 void removecompilerstub(u1 *stub)
1711 /* pass size 1 to keep the intern function happy */
1713 CFREE((void *) stub, 1);
1717 /* removenativestub ************************************************************
1719 Removes a previously created native-stub from memory.
1721 *******************************************************************************/
1723 void removenativestub(u1 *stub)
1725 /* pass size 1 to keep the intern function happy */
1727 CFREE((void *) stub, 1);
1731 /* codegen_reg_of_var **********************************************************
1733 This function determines a register, to which the result of an
1734 operation should go, when it is ultimatively intended to store the
1735 result in pseudoregister v. If v is assigned to an actual
1736 register, this register will be returned. Otherwise (when v is
1737 spilled) this function returns tempregnum. If not already done,
1738 regoff and flags are set in the stack location.
1740 *******************************************************************************/
1742 s4 codegen_reg_of_var(u2 opcode, varinfo *v, s4 tempregnum)
1746 /* Do we have to generate a conditional move? Yes, then always
1747 return the temporary register. The real register is identified
1748 during the store. */
1750 if (opcode & ICMD_CONDITION_MASK)
1754 if (!(v->flags & INMEMORY))
1755 return v->vv.regoff;
1761 /* codegen_reg_of_dst **********************************************************
1763 This function determines a register, to which the result of an
1764 operation should go, when it is ultimatively intended to store the
1765 result in iptr->dst.var. If dst.var is assigned to an actual
1766 register, this register will be returned. Otherwise (when it is
1767 spilled) this function returns tempregnum. If not already done,
1768 regoff and flags are set in the stack location.
1770 *******************************************************************************/
1772 s4 codegen_reg_of_dst(jitdata *jd, instruction *iptr, s4 tempregnum)
1774 return codegen_reg_of_var(iptr->opc, VAROP(iptr->dst), tempregnum);
1778 /* codegen_emit_phi_moves ****************************************************
1780 Emits phi moves at the end of the basicblock.
1782 *******************************************************************************/
1784 #if defined(ENABLE_SSA)
1785 void codegen_emit_phi_moves(jitdata *jd, basicblock *bptr)
1798 /* Moves from phi functions with highest indices have to be */
1799 /* inserted first, since this is the order as is used for */
1800 /* conflict resolution */
1802 for(i = ls->num_phi_moves[bptr->nr] - 1; i >= 0 ; i--) {
1803 lt_d = ls->phi_moves[bptr->nr][i][0];
1804 lt_s = ls->phi_moves[bptr->nr][i][1];
1805 #if defined(SSA_DEBUG_VERBOSE)
1807 printf("BB %3i Move %3i <- %3i ", bptr->nr, lt_d, lt_s);
1809 if (lt_s == UNUSED) {
1810 #if defined(SSA_DEBUG_VERBOSE)
1812 printf(" ... not processed \n");
1817 d = VAR(ls->lifetime[lt_d].v_index);
1818 s = VAR(ls->lifetime[lt_s].v_index);
1821 if (d->type == -1) {
1822 #if defined(SSA_DEBUG_VERBOSE)
1824 printf("...returning - phi lifetimes where joined\n");
1829 if (s->type == -1) {
1830 #if defined(SSA_DEBUG_VERBOSE)
1832 printf("...returning - phi lifetimes where joined\n");
1838 tmp_i.s1.varindex = ls->lifetime[lt_s].v_index;
1839 tmp_i.dst.varindex = ls->lifetime[lt_d].v_index;
1840 emit_copy(jd, &tmp_i);
1842 #if defined(SSA_DEBUG_VERBOSE)
1843 if (compileverbose) {
1844 if (IS_INMEMORY(d->flags) && IS_INMEMORY(s->flags)) {
1846 printf("M%3i <- M%3i",d->vv.regoff,s->vv.regoff);
1848 else if (IS_INMEMORY(s->flags)) {
1850 printf("R%3i <- M%3i",d->vv.regoff,s->vv.regoff);
1852 else if (IS_INMEMORY(d->flags)) {
1854 printf("M%3i <- R%3i",d->vv.regoff,s->vv.regoff);
1858 printf("R%3i <- R%3i",d->vv.regoff,s->vv.regoff);
1862 #endif /* defined(SSA_DEBUG_VERBOSE) */
1865 #endif /* defined(ENABLE_SSA) */
1870 * These are local overrides for various environment variables in Emacs.
1871 * Please do not remove this and leave it at the end of the file, where
1872 * Emacs will automagically detect them.
1873 * ---------------------------------------------------------------------
1876 * indent-tabs-mode: t
1880 * vim:noexpandtab:sw=4:ts=4: