1 /* src/vm/jit/codegen-common.c - architecture independent code generator stuff
3 Copyright (C) 1996-2005, 2006, 2007 R. Grafl, A. Krall, C. Kruegel,
4 C. Oates, R. Obermaisser, M. Platter, M. Probst, S. Ring,
5 E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich,
6 J. Wenninger, Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
25 All functions assume the following code area / data area layout:
29 | code area | code area grows to higher addresses
31 +-----------+ <-- start of procedure
33 | data area | data area grows to lower addresses
37 The functions first write into a temporary code/data area allocated by
38 "codegen_init". "codegen_finish" copies the code and data area into permanent
39 memory. All functions writing values into the data area return the offset
40 relative the begin of the code area (start of procedure).
52 #if defined(ENABLE_JIT)
53 /* this is required PATCHER_CALL_SIZE */
59 #include "mm/memory.h"
61 #include "toolbox/avl.h"
62 #include "toolbox/list.h"
63 #include "toolbox/logging.h"
65 #include "native/jni.h"
66 #include "native/llni.h"
67 #include "native/localref.h"
68 #include "native/native.h"
70 #if defined(WITH_CLASSPATH_SUN)
71 # include "native/include/java_lang_Object.h"
72 # include "native/include/java_lang_String.h"
73 # include "native/include/java_nio_ByteBuffer.h" /* required by j.l.CL */
74 # include "native/include/java_lang_ClassLoader.h"
77 #include "native/include/java_lang_Class.h"
79 #include "threads/threads-common.h"
81 #include "vm/builtin.h"
82 #include "vm/exceptions.h"
83 #include "vm/stringlocal.h"
85 #include "vm/jit/abi.h"
86 #include "vm/jit/asmpart.h"
87 #include "vm/jit/codegen-common.h"
89 #if defined(ENABLE_DISASSEMBLER)
90 # include "vm/jit/disass.h"
93 #include "vm/jit/dseg.h"
94 #include "vm/jit/emit-common.h"
95 #include "vm/jit/jit.h"
96 #include "vm/jit/md.h"
97 #include "vm/jit/methodheader.h"
98 #include "vm/jit/patcher-common.h"
99 #include "vm/jit/replace.h"
100 #if defined(ENABLE_SSA)
101 # include "vm/jit/optimizing/lsra.h"
102 # include "vm/jit/optimizing/ssa.h"
104 #include "vm/jit/stacktrace.h"
105 #include "vm/jit/trace.h"
107 #if defined(ENABLE_INTRP)
108 #include "vm/jit/intrp/intrp.h"
111 #include "vmcore/method.h"
112 #include "vmcore/options.h"
114 # include "vmcore/statistics.h"
116 #if defined(ENABLE_VMLOG)
117 #include <vmlog_cacao.h>
122 /* in this tree we store all method addresses *********************************/
124 static avl_tree_t *methodtree = NULL;
125 static s4 methodtree_comparator(const void *treenode, const void *node);
128 /* codegen_init ****************************************************************
132 *******************************************************************************/
134 void codegen_init(void)
136 /* this tree is global, not method specific */
139 #if defined(ENABLE_JIT)
140 methodtree_element *mte;
143 methodtree = avl_create(&methodtree_comparator);
145 #if defined(ENABLE_JIT)
146 /* insert asm_vm_call_method */
148 mte = NEW(methodtree_element);
150 mte->startpc = (u1 *) (ptrint) asm_vm_call_method;
151 mte->endpc = (u1 *) (ptrint) asm_vm_call_method_end;
153 avl_insert(methodtree, mte);
154 #endif /* defined(ENABLE_JIT) */
161 /* codegen_setup ***************************************************************
163 Allocates and initialises code area, data area and references.
165 *******************************************************************************/
167 void codegen_setup(jitdata *jd)
172 /* get required compiler data */
177 /* initialize members */
181 cd->mcodebase = DMNEW(u1, MCODEINITSIZE);
182 cd->mcodeend = cd->mcodebase + MCODEINITSIZE;
183 cd->mcodesize = MCODEINITSIZE;
185 /* initialize mcode variables */
187 cd->mcodeptr = cd->mcodebase;
188 cd->lastmcodeptr = cd->mcodebase;
190 #if defined(ENABLE_INTRP)
191 /* native dynamic superinstructions variables */
194 cd->ncodebase = DMNEW(u1, NCODEINITSIZE);
195 cd->ncodesize = NCODEINITSIZE;
197 /* initialize ncode variables */
199 cd->ncodeptr = cd->ncodebase;
201 cd->lastinstwithoutdispatch = ~0; /* no inst without dispatch */
202 cd->superstarts = NULL;
209 cd->jumpreferences = NULL;
211 #if defined(__I386__) || defined(__X86_64__) || defined(__XDSPCORE__) || defined(__M68K__) || defined(ENABLE_INTRP)
212 cd->datareferences = NULL;
215 /* cd->patchrefs = list_create_dump(OFFSET(patchref, linkage)); */
216 cd->patchrefs = NULL;
217 cd->brancheslabel = list_create_dump(OFFSET(branch_label_ref_t, linkage));
218 cd->listcritical = list_create_dump(OFFSET(critical_section_ref_t, linkage));
220 cd->linenumberreferences = NULL;
221 cd->linenumbertablesizepos = 0;
222 cd->linenumbertablestartpos = 0;
223 cd->linenumbertab = 0;
227 /* codegen_reset ***************************************************************
229 Resets the codegen data structure so we can recompile the method.
231 *******************************************************************************/
233 static void codegen_reset(jitdata *jd)
239 /* get required compiler data */
244 /* reset error flag */
246 cd->flags &= ~CODEGENDATA_FLAG_ERROR;
248 /* reset some members, we reuse the code memory already allocated
249 as this should have almost the correct size */
251 cd->mcodeptr = cd->mcodebase;
252 cd->lastmcodeptr = cd->mcodebase;
257 cd->jumpreferences = NULL;
259 #if defined(__I386__) || defined(__X86_64__) || defined(__XDSPCORE__) || defined(__M68K__) || defined(ENABLE_INTRP)
260 cd->datareferences = NULL;
263 /* cd->patchrefs = list_create_dump(OFFSET(patchref, linkage)); */
264 cd->patchrefs = NULL;
265 cd->brancheslabel = list_create_dump(OFFSET(branch_label_ref_t, linkage));
266 cd->listcritical = list_create_dump(OFFSET(critical_section_ref_t, linkage));
268 cd->linenumberreferences = NULL;
269 cd->linenumbertablesizepos = 0;
270 cd->linenumbertablestartpos = 0;
271 cd->linenumbertab = 0;
273 /* We need to clear the mpc and the branch references from all
274 basic blocks as they will definitely change. */
276 for (bptr = jd->basicblocks; bptr != NULL; bptr = bptr->next) {
278 bptr->branchrefs = NULL;
281 /* We need to clear all the patcher references from the codeinfo
282 since they all will be regenerated */
284 patcher_list_reset(code);
286 #if defined(ENABLE_REPLACEMENT)
287 code->rplpoints = NULL;
288 code->rplpointcount = 0;
289 code->regalloc = NULL;
290 code->regalloccount = 0;
291 code->globalcount = 0;
296 /* codegen_generate ************************************************************
298 Generates the code for the currently compiled method.
300 *******************************************************************************/
302 bool codegen_generate(jitdata *jd)
306 /* get required compiler data */
310 /* call the machine-dependent code generation function */
312 if (!codegen_emit(jd))
315 /* check for an error */
317 if (CODEGENDATA_HAS_FLAG_ERROR(cd)) {
318 /* check for long-branches flag, if it is set we recompile the
323 log_message_method("Re-generating code: ", jd->m);
326 /* XXX maybe we should tag long-branches-methods for recompilation */
328 if (CODEGENDATA_HAS_FLAG_LONGBRANCHES(cd)) {
329 /* we have to reset the codegendata structure first */
333 /* and restart the compiler run */
335 if (!codegen_emit(jd))
339 vm_abort("codegen_generate: unknown error occurred during codegen_emit: flags=%x\n", cd->flags);
344 log_message_method("Re-generating code done: ", jd->m);
348 /* reallocate the memory and finish the code generation */
352 /* everything's ok */
358 /* codegen_close ***************************************************************
362 *******************************************************************************/
364 void codegen_close(void)
366 /* TODO: release avl tree on i386 and x86_64 */
370 /* codegen_increase ************************************************************
374 *******************************************************************************/
376 void codegen_increase(codegendata *cd)
380 /* save old mcodebase pointer */
382 oldmcodebase = cd->mcodebase;
384 /* reallocate to new, doubled memory */
386 cd->mcodebase = DMREALLOC(cd->mcodebase,
391 cd->mcodeend = cd->mcodebase + cd->mcodesize;
393 /* set new mcodeptr */
395 cd->mcodeptr = cd->mcodebase + (cd->mcodeptr - oldmcodebase);
397 #if defined(__I386__) || defined(__MIPS__) || defined(__X86_64__) || defined(__M68K__) || defined(ENABLE_INTRP) \
398 || defined(__SPARC_64__)
399 /* adjust the pointer to the last patcher position */
401 if (cd->lastmcodeptr != NULL)
402 cd->lastmcodeptr = cd->mcodebase + (cd->lastmcodeptr - oldmcodebase);
407 /* codegen_ncode_increase ******************************************************
411 *******************************************************************************/
413 #if defined(ENABLE_INTRP)
414 u1 *codegen_ncode_increase(codegendata *cd, u1 *ncodeptr)
418 /* save old ncodebase pointer */
420 oldncodebase = cd->ncodebase;
422 /* reallocate to new, doubled memory */
424 cd->ncodebase = DMREALLOC(cd->ncodebase,
430 /* return the new ncodeptr */
432 return (cd->ncodebase + (ncodeptr - oldncodebase));
437 /* codegen_add_branch_ref ******************************************************
439 Prepends an branch to the list.
441 *******************************************************************************/
443 void codegen_add_branch_ref(codegendata *cd, basicblock *target, s4 condition, s4 reg, u4 options)
448 STATISTICS(count_branches_unresolved++);
450 /* calculate the mpc of the branch instruction */
452 branchmpc = cd->mcodeptr - cd->mcodebase;
454 br = DNEW(branchref);
456 br->branchmpc = branchmpc;
457 br->condition = condition;
459 br->options = options;
460 br->next = target->branchrefs;
462 target->branchrefs = br;
466 /* codegen_resolve_branchrefs **************************************************
468 Resolves and patches the branch references of a given basic block.
470 *******************************************************************************/
472 void codegen_resolve_branchrefs(codegendata *cd, basicblock *bptr)
477 /* Save the mcodeptr because in the branch emitting functions
478 we generate code somewhere inside already generated code,
479 but we're still in the actual code generation phase. */
481 mcodeptr = cd->mcodeptr;
483 /* just to make sure */
485 assert(bptr->mpc >= 0);
487 for (br = bptr->branchrefs; br != NULL; br = br->next) {
488 /* temporary set the mcodeptr */
490 cd->mcodeptr = cd->mcodebase + br->branchmpc;
492 /* emit_bccz and emit_branch emit the correct code, even if we
493 pass condition == BRANCH_UNCONDITIONAL or reg == -1. */
495 emit_bccz(cd, bptr, br->condition, br->reg, br->options);
498 /* restore mcodeptr */
500 cd->mcodeptr = mcodeptr;
504 /* codegen_branch_label_add ****************************************************
506 Append an branch to the label-branch list.
508 *******************************************************************************/
510 void codegen_branch_label_add(codegendata *cd, s4 label, s4 condition, s4 reg, u4 options)
513 branch_label_ref_t *br;
516 /* get the label list */
518 list = cd->brancheslabel;
520 /* calculate the current mpc */
522 mpc = cd->mcodeptr - cd->mcodebase;
524 br = DNEW(branch_label_ref_t);
528 br->condition = condition;
530 br->options = options;
532 /* add the branch to the list */
534 list_add_last_unsynced(list, br);
538 /* codegen_add_patch_ref *******************************************************
540 Appends a new patcher reference to the list of patching positions.
542 *******************************************************************************/
544 void codegen_add_patch_ref(codegendata *cd, functionptr patcher, voidptr ref,
550 branchmpc = cd->mcodeptr - cd->mcodebase;
554 pr->branchpos = branchmpc;
556 pr->patcher = patcher;
559 /* list_add_first(cd->patchrefs, pr); */
560 pr->next = cd->patchrefs;
563 /* Generate NOPs for opt_shownops. */
568 #if defined(ENABLE_JIT) && (defined(__I386__) || defined(__M68K__) || defined(__MIPS__) \
569 || defined(__SPARC_64__) || defined(__X86_64__))
571 /* On some architectures the patcher stub call instruction might
572 be longer than the actual instruction generated. On this
573 architectures we store the last patcher call position and after
574 the basic block code generation is completed, we check the
575 range and maybe generate some nop's. */
576 /* The nops are generated in codegen_emit in each codegen */
578 cd->lastmcodeptr = cd->mcodeptr + PATCHER_CALL_SIZE;
583 /* codegen_critical_section_new ************************************************
585 Allocates a new critical-section reference and adds it to the
586 critical-section list.
588 *******************************************************************************/
590 #if defined(ENABLE_THREADS)
591 void codegen_critical_section_new(codegendata *cd)
594 critical_section_ref_t *csr;
597 /* get the critical section list */
599 list = cd->listcritical;
601 /* calculate the current mpc */
603 mpc = cd->mcodeptr - cd->mcodebase;
605 csr = DNEW(critical_section_ref_t);
607 /* We only can set restart right now, as start and end are set by
608 the following, corresponding functions. */
614 /* add the branch to the list */
616 list_add_last_unsynced(list, csr);
621 /* codegen_critical_section_start **********************************************
623 Set the start-point of the current critical section (which is the
624 last element of the list).
626 *******************************************************************************/
628 #if defined(ENABLE_THREADS)
629 void codegen_critical_section_start(codegendata *cd)
632 critical_section_ref_t *csr;
635 /* get the critical section list */
637 list = cd->listcritical;
639 /* calculate the current mpc */
641 mpc = cd->mcodeptr - cd->mcodebase;
643 /* get the current critical section */
645 csr = list_last_unsynced(list);
647 /* set the start point */
649 assert(csr->start == -1);
656 /* codegen_critical_section_end ************************************************
658 Set the end-point of the current critical section (which is the
659 last element of the list).
661 *******************************************************************************/
663 #if defined(ENABLE_THREADS)
664 void codegen_critical_section_end(codegendata *cd)
667 critical_section_ref_t *csr;
670 /* get the critical section list */
672 list = cd->listcritical;
674 /* calculate the current mpc */
676 mpc = cd->mcodeptr - cd->mcodebase;
678 /* get the current critical section */
680 csr = list_last_unsynced(list);
682 /* set the end point */
684 assert(csr->end == -1);
691 /* codegen_critical_section_finish *********************************************
693 Finish the critical sections, create the critical section nodes for
694 the AVL tree and insert them into the tree.
696 *******************************************************************************/
698 #if defined(ENABLE_THREADS)
699 static void codegen_critical_section_finish(jitdata *jd)
704 critical_section_ref_t *csr;
705 critical_section_node_t *csn;
707 /* get required compiler data */
712 /* get the critical section list */
714 list = cd->listcritical;
716 /* iterate over all critical sections */
718 for (csr = list_first_unsynced(list); csr != NULL;
719 csr = list_next_unsynced(list, csr)) {
720 /* check if all points are set */
722 assert(csr->start != -1);
723 assert(csr->end != -1);
724 assert(csr->restart != -1);
726 /* allocate tree node */
728 csn = NEW(critical_section_node_t);
730 csn->start = code->entrypoint + csr->start;
731 csn->end = code->entrypoint + csr->end;
732 csn->restart = code->entrypoint + csr->restart;
734 /* insert into the tree */
736 critical_section_register(csn);
742 /* methodtree_comparator *******************************************************
744 Comparator function used for the AVL tree of methods.
747 treenode....the node from the tree
748 node........the node to compare to the tree-node
750 *******************************************************************************/
752 static s4 methodtree_comparator(const void *treenode, const void *node)
754 methodtree_element *mte;
755 methodtree_element *mtepc;
757 mte = (methodtree_element *) treenode;
758 mtepc = (methodtree_element *) node;
760 /* compare both startpc and endpc of pc, even if they have the same value,
761 otherwise the avl_probe sometimes thinks the element is already in the
765 /* On S390 addresses are 31 bit. Compare only 31 bits of value.
767 # define ADDR_MASK(a) ((a) & 0x7FFFFFFF)
769 # define ADDR_MASK(a) (a)
772 if (ADDR_MASK((long) mte->startpc) <= ADDR_MASK((long) mtepc->startpc) &&
773 ADDR_MASK((long) mtepc->startpc) <= ADDR_MASK((long) mte->endpc) &&
774 ADDR_MASK((long) mte->startpc) <= ADDR_MASK((long) mtepc->endpc) &&
775 ADDR_MASK((long) mtepc->endpc) <= ADDR_MASK((long) mte->endpc)) {
778 } else if (ADDR_MASK((long) mtepc->startpc) < ADDR_MASK((long) mte->startpc)) {
789 /* codegen_insertmethod ********************************************************
791 Insert the machine code range of a method into the AVL tree of methods.
793 *******************************************************************************/
795 void codegen_insertmethod(u1 *startpc, u1 *endpc)
797 methodtree_element *mte;
799 /* allocate new method entry */
801 mte = NEW(methodtree_element);
803 mte->startpc = startpc;
806 /* this function does not return an error, but asserts for
809 avl_insert(methodtree, mte);
813 /* codegen_get_pv_from_pc ******************************************************
815 Find the PV for the given PC by searching in the AVL tree of
818 *******************************************************************************/
820 u1 *codegen_get_pv_from_pc(u1 *pc)
822 methodtree_element mtepc;
823 methodtree_element *mte;
825 /* allocation of the search structure on the stack is much faster */
830 mte = avl_find(methodtree, &mtepc);
833 /* No method was found. Let's dump a stacktrace. */
835 #if defined(ENABLE_VMLOG)
836 vmlog_cacao_signl("SIGSEGV");
839 log_println("We received a SIGSEGV and tried to handle it, but we were");
840 log_println("unable to find a Java method at:");
842 #if SIZEOF_VOID_P == 8
843 log_println("PC=0x%016lx", pc);
845 log_println("PC=0x%08x", pc);
849 log_println("Dumping the current stacktrace:");
851 #if defined(ENABLE_THREADS)
852 /* XXX michi: This should be available even without threads! */
853 threads_print_stacktrace();
856 vm_abort("Exiting...");
863 /* codegen_get_pv_from_pc_nocheck **********************************************
865 Find the PV for the given PC by searching in the AVL tree of
866 methods. This method does not check the return value and is used
869 *******************************************************************************/
871 u1 *codegen_get_pv_from_pc_nocheck(u1 *pc)
873 methodtree_element mtepc;
874 methodtree_element *mte;
876 /* allocation of the search structure on the stack is much faster */
881 mte = avl_find(methodtree, &mtepc);
890 /* codegen_set_replacement_point_notrap ****************************************
892 Record the position of a non-trappable replacement point.
894 *******************************************************************************/
896 #if defined(ENABLE_REPLACEMENT)
898 void codegen_set_replacement_point_notrap(codegendata *cd, s4 type)
900 void codegen_set_replacement_point_notrap(codegendata *cd)
903 assert(cd->replacementpoint);
904 assert(cd->replacementpoint->type == type);
905 assert(cd->replacementpoint->flags & RPLPOINT_FLAG_NOTRAP);
907 cd->replacementpoint->pc = (u1*) (ptrint) (cd->mcodeptr - cd->mcodebase);
909 cd->replacementpoint++;
911 #endif /* defined(ENABLE_REPLACEMENT) */
914 /* codegen_set_replacement_point ***********************************************
916 Record the position of a trappable replacement point.
918 *******************************************************************************/
920 #if defined(ENABLE_REPLACEMENT)
922 void codegen_set_replacement_point(codegendata *cd, s4 type)
924 void codegen_set_replacement_point(codegendata *cd)
927 assert(cd->replacementpoint);
928 assert(cd->replacementpoint->type == type);
929 assert(!(cd->replacementpoint->flags & RPLPOINT_FLAG_NOTRAP));
931 cd->replacementpoint->pc = (u1*) (ptrint) (cd->mcodeptr - cd->mcodebase);
933 cd->replacementpoint++;
936 /* XXX actually we should use an own REPLACEMENT_NOPS here! */
937 if (opt_TestReplacement)
941 /* XXX assert(cd->lastmcodeptr <= cd->mcodeptr); */
943 cd->lastmcodeptr = cd->mcodeptr + PATCHER_CALL_SIZE;
945 #endif /* defined(ENABLE_REPLACEMENT) */
948 /* codegen_finish **************************************************************
950 Finishes the code generation. A new memory, large enough for both
951 data and code, is allocated and data and code are copied together
952 to their final layout, unresolved jumps are resolved, ...
954 *******************************************************************************/
956 void codegen_finish(jitdata *jd)
961 #if defined(ENABLE_INTRP)
970 /* get required compiler data */
975 /* prevent compiler warning */
977 #if defined(ENABLE_INTRP)
981 /* calculate the code length */
983 mcodelen = (s4) (cd->mcodeptr - cd->mcodebase);
985 #if defined(ENABLE_STATISTICS)
987 count_code_len += mcodelen;
988 count_data_len += cd->dseglen;
992 alignedmcodelen = MEMORY_ALIGN(mcodelen, MAX_ALIGN);
994 #if defined(ENABLE_INTRP)
996 ncodelen = cd->ncodeptr - cd->ncodebase;
998 ncodelen = 0; /* avoid compiler warning */
1002 cd->dseglen = MEMORY_ALIGN(cd->dseglen, MAX_ALIGN);
1003 alignedlen = alignedmcodelen + cd->dseglen;
1005 #if defined(ENABLE_INTRP)
1007 alignedlen += ncodelen;
1011 /* allocate new memory */
1013 code->mcodelength = mcodelen + cd->dseglen;
1014 code->mcode = CNEW(u1, alignedlen);
1016 /* set the entrypoint of the method */
1018 assert(code->entrypoint == NULL);
1019 code->entrypoint = epoint = (code->mcode + cd->dseglen);
1021 /* fill the data segment (code->entrypoint must already be set!) */
1025 /* copy code to the new location */
1027 MCOPY((void *) code->entrypoint, cd->mcodebase, u1, mcodelen);
1029 #if defined(ENABLE_INTRP)
1030 /* relocate native dynamic superinstruction code (if any) */
1033 cd->mcodebase = code->entrypoint;
1036 u1 *ncodebase = code->mcode + cd->dseglen + alignedmcodelen;
1038 MCOPY((void *) ncodebase, cd->ncodebase, u1, ncodelen);
1040 /* flush the instruction and data caches */
1042 md_cacheflush(ncodebase, ncodelen);
1044 /* set some cd variables for dynamic_super_rerwite */
1046 cd->ncodebase = ncodebase;
1049 cd->ncodebase = NULL;
1052 dynamic_super_rewrite(cd);
1056 /* jump table resolving */
1058 for (jr = cd->jumpreferences; jr != NULL; jr = jr->next)
1059 *((functionptr *) ((ptrint) epoint + jr->tablepos)) =
1060 (functionptr) ((ptrint) epoint + (ptrint) jr->target->mpc);
1062 /* line number table resolving */
1068 for (lr = cd->linenumberreferences; lr != NULL; lr = lr->next) {
1070 target = lr->targetmpc;
1071 /* if the entry contains an mcode pointer (normal case), resolve it */
1072 /* (see doc/inlining_stacktrace.txt for details) */
1073 if (lr->linenumber >= -2) {
1074 target += (ptrint) epoint;
1076 *((functionptr *) ((ptrint) epoint + (ptrint) lr->tablepos)) =
1077 (functionptr) target;
1080 *((functionptr *) ((ptrint) epoint + cd->linenumbertablestartpos)) =
1081 (functionptr) ((ptrint) epoint + cd->linenumbertab);
1083 *((ptrint *) ((ptrint) epoint + cd->linenumbertablesizepos)) = lrtlen;
1086 /* patcher resolving */
1088 pr = list_first_unsynced(code->patchers);
1090 pr->mpc += (ptrint) epoint;
1091 pr->datap = (ptrint) (pr->disp + epoint);
1092 pr = list_next_unsynced(code->patchers, pr);
1095 #if defined(ENABLE_REPLACEMENT)
1096 /* replacement point resolving */
1101 rp = code->rplpoints;
1102 for (i=0; i<code->rplpointcount; ++i, ++rp) {
1103 rp->pc = (u1*) ((ptrint) epoint + (ptrint) rp->pc);
1106 #endif /* defined(ENABLE_REPLACEMENT) */
1108 /* add method into methodtree to find the entrypoint */
1110 codegen_insertmethod(code->entrypoint, code->entrypoint + mcodelen);
1112 #if defined(__I386__) || defined(__X86_64__) || defined(__XDSPCORE__) || defined(__M68K__) || defined(ENABLE_INTRP)
1113 /* resolve data segment references */
1115 dseg_resolve_datareferences(jd);
1118 #if defined(ENABLE_THREADS)
1119 /* create cirtical sections */
1121 codegen_critical_section_finish(jd);
1124 /* flush the instruction and data caches */
1126 md_cacheflush(code->mcode, code->mcodelength);
1130 /* codegen_generate_stub_compiler **********************************************
1132 Wrapper for codegen_emit_stub_compiler.
1135 pointer to the compiler stub code.
1137 *******************************************************************************/
1139 u1 *codegen_generate_stub_compiler(methodinfo *m)
1143 ptrint *d; /* pointer to data memory */
1144 u1 *c; /* pointer to code memory */
1147 /* mark dump memory */
1149 dumpsize = dump_size();
1151 /* allocate required data structures */
1156 jd->cd = DNEW(codegendata);
1159 /* get required compiler data */
1163 #if !defined(JIT_COMPILER_VIA_SIGNAL)
1164 /* allocate code memory */
1166 c = CNEW(u1, 3 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
1168 /* set pointers correctly */
1174 c = c + 3 * SIZEOF_VOID_P;
1177 /* NOTE: The codeinfo pointer is actually a pointer to the
1178 methodinfo (this fakes a codeinfo structure). */
1180 d[0] = (ptrint) asm_call_jit_compiler;
1182 d[2] = (ptrint) &d[1]; /* fake code->m */
1184 /* call the emit function */
1186 codegen_emit_stub_compiler(jd);
1188 #if defined(ENABLE_STATISTICS)
1190 count_cstub_len += 3 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE;
1195 md_cacheflush(cd->mcodebase, 3 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
1197 /* Allocate code memory. */
1199 c = CNEW(uint8_t, 2 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
1201 /* Set pointers correctly. */
1207 c = c + 2 * SIZEOF_VOID_P;
1210 /* NOTE: The codeinfo pointer is actually a pointer to the
1211 methodinfo (this fakes a codeinfo structure). */
1214 d[1] = (ptrint) &d[0]; /* fake code->m */
1216 /* Emit the trap instruction. */
1218 emit_trap_compiler(cd);
1220 #if defined(ENABLE_STATISTICS)
1222 count_cstub_len += 2 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE;
1227 md_cacheflush(cd->mcodebase, 2 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
1230 /* release dump memory */
1232 dump_release(dumpsize);
1234 /* return native stub code */
1240 /* codegen_generate_stub_builtin ***********************************************
1242 Wrapper for codegen_emit_stub_native.
1244 *******************************************************************************/
1246 void codegen_generate_stub_builtin(methodinfo *m, builtintable_entry *bte)
1253 /* mark dump memory */
1255 dumpsize = dump_size();
1260 jd->cd = DNEW(codegendata);
1264 /* Allocate codeinfo memory from the heap as we need to keep them. */
1266 jd->code = code_codeinfo_new(m);
1268 /* get required compiler data */
1272 /* setup code generation stuff */
1276 /* Set the number of native arguments we need to skip. */
1280 /* generate the code */
1282 #if defined(ENABLE_JIT)
1283 # if defined(ENABLE_INTRP)
1286 assert(bte->fp != NULL);
1287 codegen_emit_stub_native(jd, bte->md, bte->fp, skipparams);
1288 # if defined(ENABLE_INTRP)
1293 /* reallocate the memory and finish the code generation */
1297 /* set the stub entry point in the builtin table */
1299 bte->stub = code->entrypoint;
1301 #if defined(ENABLE_STATISTICS)
1303 size_stub_native += code->mcodelength;
1306 #if !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER)
1307 /* disassemble native stub */
1309 if (opt_DisassembleStubs) {
1310 codegen_disassemble_stub(m,
1311 (u1 *) (ptrint) code->entrypoint,
1312 (u1 *) (ptrint) code->entrypoint + (code->mcodelength - jd->cd->dseglen));
1314 /* show data segment */
1316 if (opt_showddatasegment)
1319 #endif /* !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER) */
1321 /* release memory */
1323 dump_release(dumpsize);
1327 /* codegen_generate_stub_native ************************************************
1329 Wrapper for codegen_emit_stub_native.
1332 the codeinfo representing the stub code.
1334 *******************************************************************************/
1336 codeinfo *codegen_generate_stub_native(methodinfo *m, functionptr f)
1345 /* mark dump memory */
1347 dumpsize = dump_size();
1352 jd->cd = DNEW(codegendata);
1353 jd->rd = DNEW(registerdata);
1356 /* Allocate codeinfo memory from the heap as we need to keep them. */
1358 jd->code = code_codeinfo_new(m);
1360 /* get required compiler data */
1364 /* set the flags for the current JIT run */
1366 #if defined(ENABLE_PROFILING)
1368 jd->flags |= JITDATA_FLAG_INSTRUMENT;
1371 if (opt_verbosecall)
1372 jd->flags |= JITDATA_FLAG_VERBOSECALL;
1374 /* setup code generation stuff */
1376 #if defined(ENABLE_JIT)
1377 # if defined(ENABLE_INTRP)
1385 /* create new method descriptor with additional native parameters */
1389 /* Set the number of native arguments we need to skip. */
1391 if (m->flags & ACC_STATIC)
1396 nmd = (methoddesc *) DMNEW(u1, sizeof(methoddesc) - sizeof(typedesc) +
1397 md->paramcount * sizeof(typedesc) +
1398 skipparams * sizeof(typedesc));
1400 nmd->paramcount = md->paramcount + skipparams;
1402 nmd->params = DMNEW(paramdesc, nmd->paramcount);
1404 nmd->paramtypes[0].type = TYPE_ADR; /* add environment pointer */
1406 if (m->flags & ACC_STATIC)
1407 nmd->paramtypes[1].type = TYPE_ADR; /* add class pointer */
1409 MCOPY(nmd->paramtypes + skipparams, md->paramtypes, typedesc,
1412 #if defined(ENABLE_JIT)
1413 # if defined(ENABLE_INTRP)
1416 /* pre-allocate the arguments for the native ABI */
1418 md_param_alloc_native(nmd);
1421 /* generate the code */
1423 #if defined(ENABLE_JIT)
1424 # if defined(ENABLE_INTRP)
1426 intrp_createnativestub(f, jd, nmd);
1429 codegen_emit_stub_native(jd, nmd, f, skipparams);
1431 intrp_createnativestub(f, jd, nmd);
1434 /* reallocate the memory and finish the code generation */
1438 #if defined(ENABLE_STATISTICS)
1439 /* must be done after codegen_finish() */
1442 size_stub_native += code->mcodelength;
1445 #if !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER)
1446 /* disassemble native stub */
1448 if (opt_DisassembleStubs) {
1449 # if defined(ENABLE_DEBUG_FILTER)
1450 if (m->filtermatches & SHOW_FILTER_FLAG_SHOW_METHOD)
1453 codegen_disassemble_stub(m,
1454 (u1 *) (ptrint) code->entrypoint,
1455 (u1 *) (ptrint) code->entrypoint + (code->mcodelength - jd->cd->dseglen));
1457 /* show data segment */
1459 if (opt_showddatasegment)
1463 #endif /* !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER) */
1465 /* release memory */
1467 dump_release(dumpsize);
1469 /* return native stub code */
1475 /* codegen_disassemble_nativestub **********************************************
1477 Disassembles the generated builtin or native stub.
1479 *******************************************************************************/
1481 #if defined(ENABLE_DISASSEMBLER)
1482 void codegen_disassemble_stub(methodinfo *m, u1 *start, u1 *end)
1484 printf("Stub code: ");
1485 if (m->class != NULL)
1486 utf_fprint_printable_ascii_classname(stdout, m->class->name);
1490 utf_fprint_printable_ascii(stdout, m->name);
1491 utf_fprint_printable_ascii(stdout, m->descriptor);
1492 printf("\nLength: %d\n\n", (s4) (end - start));
1494 DISASSEMBLE(start, end);
1499 /* codegen_start_native_call ***************************************************
1501 Prepares the stuff required for a native (JNI) function call:
1503 - adds a stackframe info structure to the chain, for stacktraces
1504 - prepares the local references table on the stack
1506 The layout of the native stub stackframe should look like this:
1508 +---------------------------+ <- java SP (of parent Java function)
1510 +---------------------------+ <- data SP
1512 | stackframe info structure |
1514 +---------------------------+
1516 | local references table |
1518 +---------------------------+
1520 | saved registers (if any) |
1522 +---------------------------+
1524 | arguments (if any) |
1526 +---------------------------+ <- current SP (native stub)
1528 *******************************************************************************/
1530 java_handle_t *codegen_start_native_call(u1 *sp, u1 *pv)
1532 stackframeinfo *sfi;
1533 localref_table *lrt;
1540 uint64_t *arg_stack;
1542 STATISTICS(count_calls_java_to_native++);
1544 /* Get the methodinfo. */
1546 m = code_get_methodinfo_for_pv(pv);
1550 framesize = *((int32_t *) (pv + FrameSize));
1552 assert(framesize >= sizeof(stackframeinfo) + sizeof(localref_table));
1554 /* calculate needed values */
1556 #if defined(__ALPHA__) || defined(__ARM__)
1557 datasp = sp + framesize - SIZEOF_VOID_P;
1558 javasp = sp + framesize;
1559 arg_regs = (uint64_t *) sp;
1560 arg_stack = (uint64_t *) javasp;
1561 #elif defined(__MIPS__)
1562 /* MIPS always uses 8 bytes to store the RA */
1563 datasp = sp + framesize - 8;
1564 javasp = sp + framesize;
1565 #elif defined(__S390__)
1566 datasp = sp + framesize - 8;
1567 javasp = sp + framesize;
1568 arg_regs = (uint64_t *) (sp + 96);
1569 arg_stack = (uint64_t *) javasp;
1570 #elif defined(__I386__) || defined(__M68K__) || defined(__X86_64__)
1571 datasp = sp + framesize;
1572 javasp = sp + framesize + SIZEOF_VOID_P;
1573 arg_regs = (uint64_t *) sp;
1574 arg_stack = (uint64_t *) javasp;
1575 #elif defined(__POWERPC__)
1576 datasp = sp + framesize;
1577 javasp = sp + framesize;
1578 arg_regs = (uint64_t *) (sp + LA_SIZE + 4 * SIZEOF_VOID_P);
1579 arg_stack = (uint64_t *) javasp;
1580 #elif defined(__POWERPC64__)
1581 datasp = sp + framesize;
1582 javasp = sp + framesize;
1583 arg_regs = (uint64_t *) (sp + PA_SIZE + LA_SIZE + 4 * SIZEOF_VOID_P);
1584 arg_stack = (uint64_t *) javasp;
1586 /* XXX is was unable to do this port for SPARC64, sorry. (-michi) */
1587 /* XXX maybe we need to pass the RA as argument there */
1588 vm_abort("codegen_start_native_call: unsupported architecture");
1591 /* get data structures from stack */
1593 sfi = (stackframeinfo *) (datasp - sizeof(stackframeinfo));
1594 lrt = (localref_table *) (datasp - sizeof(stackframeinfo) -
1595 sizeof(localref_table));
1597 #if defined(ENABLE_JNI)
1598 /* add current JNI local references table to this thread */
1600 localref_table_add(lrt);
1603 #if !defined(NDEBUG)
1604 # if defined(__ALPHA__) || defined(__POWERPC__) || defined(__POWERPC64__) || defined(__X86_64__) || defined(__S390__)
1605 /* print the call-trace if necesarry */
1606 /* BEFORE: filling the local reference table */
1608 if (opt_TraceJavaCalls)
1609 trace_java_call_enter(m, arg_regs, arg_stack);
1613 #if defined(ENABLE_HANDLES)
1614 /* place all references into the local reference table */
1615 /* BEFORE: creating stackframeinfo */
1617 localref_native_enter(m, arg_regs, arg_stack);
1620 /* Add a stackframeinfo for this native method. We don't have RA
1621 and XPC here. These are determined in
1622 stacktrace_stackframeinfo_add. */
1624 stacktrace_stackframeinfo_add(sfi, pv, sp, NULL, NULL);
1626 /* Return a wrapped classinfo for static methods. */
1628 if (m->flags & ACC_STATIC)
1629 return LLNI_classinfo_wrap(m->class);
1635 /* codegen_finish_native_call **************************************************
1637 Removes the stuff required for a native (JNI) function call.
1638 Additionally it checks for an exceptions and in case, get the
1639 exception object and clear the pointer.
1641 *******************************************************************************/
1643 java_object_t *codegen_finish_native_call(u1 *sp, u1 *pv)
1645 stackframeinfo *sfi;
1655 /* get information from method header */
1657 code = *((codeinfo **) (pv + CodeinfoPointer));
1658 framesize = *((int32_t *) (pv + FrameSize));
1661 /* get the methodinfo */
1666 /* calculate needed values */
1668 #if defined(__ALPHA__) || defined(__ARM__)
1669 datasp = sp + framesize - SIZEOF_VOID_P;
1670 ret_regs = (uint64_t *) sp;
1671 #elif defined(__MIPS__)
1672 /* MIPS always uses 8 bytes to store the RA */
1673 datasp = sp + framesize - 8;
1674 #elif defined(__S390__)
1675 datasp = sp + framesize - 8;
1676 ret_regs = (uint64_t *) (sp + 96);
1677 #elif defined(__I386__)
1678 datasp = sp + framesize;
1679 ret_regs = (uint64_t *) (sp + 2 * SIZEOF_VOID_P);
1680 #elif defined(__M68K__) || defined(__X86_64__)
1681 datasp = sp + framesize;
1682 ret_regs = (uint64_t *) sp;
1683 #elif defined(__POWERPC__)
1684 datasp = sp + framesize;
1685 ret_regs = (uint64_t *) (sp + LA_SIZE + 2 * SIZEOF_VOID_P);
1686 #elif defined(__POWERPC64__)
1687 datasp = sp + framesize;
1688 ret_regs = (uint64_t *) (sp + PA_SIZE + LA_SIZE + 2 * SIZEOF_VOID_P);
1690 vm_abort("codegen_finish_native_call: unsupported architecture");
1693 /* get data structures from stack */
1695 sfi = (stackframeinfo *) (datasp - sizeof(stackframeinfo));
1697 /* Remove current stackframeinfo from chain. */
1699 stacktrace_stackframeinfo_remove(sfi);
1701 #if defined(ENABLE_HANDLES)
1702 /* unwrap the return value from the local reference table */
1703 /* AFTER: removing the stackframeinfo */
1704 /* BEFORE: releasing the local reference table */
1706 localref_native_exit(m, ret_regs);
1709 /* get and unwrap the exception */
1710 /* AFTER: removing the stackframe info */
1711 /* BEFORE: releasing the local reference table */
1713 e = exceptions_get_and_clear_exception();
1716 #if defined(ENABLE_JNI)
1717 /* release JNI local references table for this thread */
1719 localref_frame_pop_all();
1720 localref_table_remove();
1723 #if !defined(NDEBUG)
1724 # if defined(__ALPHA__) || defined(__POWERPC__) || defined(__POWERPC64__) || defined(__X86_64__) || defined(__S390__)
1725 /* print the call-trace if necesarry */
1726 /* AFTER: unwrapping the return value */
1728 if (opt_TraceJavaCalls)
1729 trace_java_call_exit(m, ret_regs);
1737 /* removecompilerstub **********************************************************
1739 Deletes a compilerstub from memory (simply by freeing it).
1741 *******************************************************************************/
1743 void removecompilerstub(u1 *stub)
1745 /* pass size 1 to keep the intern function happy */
1747 CFREE((void *) stub, 1);
1751 /* removenativestub ************************************************************
1753 Removes a previously created native-stub from memory.
1755 *******************************************************************************/
1757 void removenativestub(u1 *stub)
1759 /* pass size 1 to keep the intern function happy */
1761 CFREE((void *) stub, 1);
1765 /* codegen_reg_of_var **********************************************************
1767 This function determines a register, to which the result of an
1768 operation should go, when it is ultimatively intended to store the
1769 result in pseudoregister v. If v is assigned to an actual
1770 register, this register will be returned. Otherwise (when v is
1771 spilled) this function returns tempregnum. If not already done,
1772 regoff and flags are set in the stack location.
1774 *******************************************************************************/
1776 s4 codegen_reg_of_var(u2 opcode, varinfo *v, s4 tempregnum)
1780 /* Do we have to generate a conditional move? Yes, then always
1781 return the temporary register. The real register is identified
1782 during the store. */
1784 if (opcode & ICMD_CONDITION_MASK)
1788 if (!(v->flags & INMEMORY))
1789 return v->vv.regoff;
1795 /* codegen_reg_of_dst **********************************************************
1797 This function determines a register, to which the result of an
1798 operation should go, when it is ultimatively intended to store the
1799 result in iptr->dst.var. If dst.var is assigned to an actual
1800 register, this register will be returned. Otherwise (when it is
1801 spilled) this function returns tempregnum. If not already done,
1802 regoff and flags are set in the stack location.
1804 *******************************************************************************/
1806 s4 codegen_reg_of_dst(jitdata *jd, instruction *iptr, s4 tempregnum)
1808 return codegen_reg_of_var(iptr->opc, VAROP(iptr->dst), tempregnum);
1812 /* codegen_emit_phi_moves ****************************************************
1814 Emits phi moves at the end of the basicblock.
1816 *******************************************************************************/
1818 #if defined(ENABLE_SSA)
1819 void codegen_emit_phi_moves(jitdata *jd, basicblock *bptr)
1832 /* Moves from phi functions with highest indices have to be */
1833 /* inserted first, since this is the order as is used for */
1834 /* conflict resolution */
1836 for(i = ls->num_phi_moves[bptr->nr] - 1; i >= 0 ; i--) {
1837 lt_d = ls->phi_moves[bptr->nr][i][0];
1838 lt_s = ls->phi_moves[bptr->nr][i][1];
1839 #if defined(SSA_DEBUG_VERBOSE)
1841 printf("BB %3i Move %3i <- %3i ", bptr->nr, lt_d, lt_s);
1843 if (lt_s == UNUSED) {
1844 #if defined(SSA_DEBUG_VERBOSE)
1846 printf(" ... not processed \n");
1851 d = VAR(ls->lifetime[lt_d].v_index);
1852 s = VAR(ls->lifetime[lt_s].v_index);
1855 if (d->type == -1) {
1856 #if defined(SSA_DEBUG_VERBOSE)
1858 printf("...returning - phi lifetimes where joined\n");
1863 if (s->type == -1) {
1864 #if defined(SSA_DEBUG_VERBOSE)
1866 printf("...returning - phi lifetimes where joined\n");
1872 tmp_i.s1.varindex = ls->lifetime[lt_s].v_index;
1873 tmp_i.dst.varindex = ls->lifetime[lt_d].v_index;
1874 emit_copy(jd, &tmp_i);
1876 #if defined(SSA_DEBUG_VERBOSE)
1877 if (compileverbose) {
1878 if (IS_INMEMORY(d->flags) && IS_INMEMORY(s->flags)) {
1880 printf("M%3i <- M%3i",d->vv.regoff,s->vv.regoff);
1882 else if (IS_INMEMORY(s->flags)) {
1884 printf("R%3i <- M%3i",d->vv.regoff,s->vv.regoff);
1886 else if (IS_INMEMORY(d->flags)) {
1888 printf("M%3i <- R%3i",d->vv.regoff,s->vv.regoff);
1892 printf("R%3i <- R%3i",d->vv.regoff,s->vv.regoff);
1896 #endif /* defined(SSA_DEBUG_VERBOSE) */
1899 #endif /* defined(ENABLE_SSA) */
1904 * These are local overrides for various environment variables in Emacs.
1905 * Please do not remove this and leave it at the end of the file, where
1906 * Emacs will automagically detect them.
1907 * ---------------------------------------------------------------------
1910 * indent-tabs-mode: t
1914 * vim:noexpandtab:sw=4:ts=4: