1 /* src/vm/jit/codegen-common.c - architecture independent code generator stuff
3 Copyright (C) 1996-2005, 2006, 2007 R. Grafl, A. Krall, C. Kruegel,
4 C. Oates, R. Obermaisser, M. Platter, M. Probst, S. Ring,
5 E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich,
6 J. Wenninger, Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
25 All functions assume the following code area / data area layout:
29 | code area | code area grows to higher addresses
31 +-----------+ <-- start of procedure
33 | data area | data area grows to lower addresses
37 The functions first write into a temporary code/data area allocated by
38 "codegen_init". "codegen_finish" copies the code and data area into permanent
39 memory. All functions writing values into the data area return the offset
40 relative the begin of the code area (start of procedure).
56 #include "mm/memory.h"
58 #include "toolbox/avl.h"
59 #include "toolbox/list.h"
60 #include "toolbox/logging.h"
62 #include "native/jni.h"
63 #include "native/llni.h"
64 #include "native/localref.h"
65 #include "native/native.h"
67 #if defined(WITH_CLASSPATH_SUN)
68 # include "native/include/java_lang_Object.h"
69 # include "native/include/java_lang_String.h"
70 # include "native/include/java_nio_ByteBuffer.h" /* required by j.l.CL */
71 # include "native/include/java_lang_ClassLoader.h"
74 #include "native/include/java_lang_Class.h"
76 #include "threads/threads-common.h"
78 #include "vm/builtin.h"
79 #include "vm/exceptions.h"
80 #include "vm/stringlocal.h"
82 #include "vm/jit/abi.h"
83 #include "vm/jit/asmpart.h"
84 #include "vm/jit/code.h"
85 #include "vm/jit/codegen-common.h"
87 #if defined(ENABLE_DISASSEMBLER)
88 # include "vm/jit/disass.h"
91 #include "vm/jit/dseg.h"
92 #include "vm/jit/emit-common.h"
93 #include "vm/jit/jit.h"
94 #include "vm/jit/linenumbertable.h"
95 #include "vm/jit/methodheader.h"
96 #include "vm/jit/patcher-common.h"
97 #include "vm/jit/replace.h"
98 #if defined(ENABLE_SSA)
99 # include "vm/jit/optimizing/lsra.h"
100 # include "vm/jit/optimizing/ssa.h"
102 #include "vm/jit/stacktrace.h"
103 #include "vm/jit/trace.h"
105 #if defined(ENABLE_INTRP)
106 #include "vm/jit/intrp/intrp.h"
109 #include "vmcore/method.h"
110 #include "vmcore/options.h"
112 # include "vmcore/statistics.h"
114 #if defined(ENABLE_VMLOG)
115 #include <vmlog_cacao.h>
120 /* in this tree we store all method addresses *********************************/
122 static avl_tree_t *methodtree = NULL;
123 static s4 methodtree_comparator(const void *treenode, const void *node);
126 /* codegen_init ****************************************************************
130 *******************************************************************************/
132 void codegen_init(void)
134 /* this tree is global, not method specific */
137 #if defined(ENABLE_JIT)
138 methodtree_element *mte;
141 methodtree = avl_create(&methodtree_comparator);
143 #if defined(ENABLE_JIT)
144 /* insert asm_vm_call_method */
146 mte = NEW(methodtree_element);
148 mte->startpc = (u1 *) (ptrint) asm_vm_call_method;
149 mte->endpc = (u1 *) (ptrint) asm_vm_call_method_end;
151 avl_insert(methodtree, mte);
152 #endif /* defined(ENABLE_JIT) */
159 /* codegen_setup ***************************************************************
161 Allocates and initialises code area, data area and references.
163 *******************************************************************************/
165 void codegen_setup(jitdata *jd)
170 /* get required compiler data */
175 /* initialize members */
179 cd->mcodebase = DMNEW(u1, MCODEINITSIZE);
180 cd->mcodeend = cd->mcodebase + MCODEINITSIZE;
181 cd->mcodesize = MCODEINITSIZE;
183 /* initialize mcode variables */
185 cd->mcodeptr = cd->mcodebase;
186 cd->lastmcodeptr = cd->mcodebase;
188 #if defined(ENABLE_INTRP)
189 /* native dynamic superinstructions variables */
192 cd->ncodebase = DMNEW(u1, NCODEINITSIZE);
193 cd->ncodesize = NCODEINITSIZE;
195 /* initialize ncode variables */
197 cd->ncodeptr = cd->ncodebase;
199 cd->lastinstwithoutdispatch = ~0; /* no inst without dispatch */
200 cd->superstarts = NULL;
207 cd->jumpreferences = NULL;
209 #if defined(__I386__) || defined(__X86_64__) || defined(__XDSPCORE__) || defined(__M68K__) || defined(ENABLE_INTRP)
210 cd->datareferences = NULL;
213 cd->brancheslabel = list_create_dump(OFFSET(branch_label_ref_t, linkage));
214 cd->listcritical = list_create_dump(OFFSET(critical_section_ref_t, linkage));
215 cd->linenumbers = list_create_dump(OFFSET(linenumbertable_list_entry_t, linkage));
219 /* codegen_reset ***************************************************************
221 Resets the codegen data structure so we can recompile the method.
223 *******************************************************************************/
225 static void codegen_reset(jitdata *jd)
231 /* get required compiler data */
236 /* reset error flag */
238 cd->flags &= ~CODEGENDATA_FLAG_ERROR;
240 /* reset some members, we reuse the code memory already allocated
241 as this should have almost the correct size */
243 cd->mcodeptr = cd->mcodebase;
244 cd->lastmcodeptr = cd->mcodebase;
249 cd->jumpreferences = NULL;
251 #if defined(__I386__) || defined(__X86_64__) || defined(__XDSPCORE__) || defined(__M68K__) || defined(ENABLE_INTRP)
252 cd->datareferences = NULL;
255 cd->brancheslabel = list_create_dump(OFFSET(branch_label_ref_t, linkage));
256 cd->listcritical = list_create_dump(OFFSET(critical_section_ref_t, linkage));
257 cd->linenumbers = list_create_dump(OFFSET(linenumbertable_list_entry_t, linkage));
259 /* We need to clear the mpc and the branch references from all
260 basic blocks as they will definitely change. */
262 for (bptr = jd->basicblocks; bptr != NULL; bptr = bptr->next) {
264 bptr->branchrefs = NULL;
267 /* We need to clear all the patcher references from the codeinfo
268 since they all will be regenerated */
270 patcher_list_reset(code);
272 #if defined(ENABLE_REPLACEMENT)
273 code->rplpoints = NULL;
274 code->rplpointcount = 0;
275 code->regalloc = NULL;
276 code->regalloccount = 0;
277 code->globalcount = 0;
282 /* codegen_generate ************************************************************
284 Generates the code for the currently compiled method.
286 *******************************************************************************/
288 bool codegen_generate(jitdata *jd)
292 /* get required compiler data */
296 /* call the machine-dependent code generation function */
298 if (!codegen_emit(jd))
301 /* check for an error */
303 if (CODEGENDATA_HAS_FLAG_ERROR(cd)) {
304 /* check for long-branches flag, if it is set we recompile the
309 log_message_method("Re-generating code: ", jd->m);
312 /* XXX maybe we should tag long-branches-methods for recompilation */
314 if (CODEGENDATA_HAS_FLAG_LONGBRANCHES(cd)) {
315 /* we have to reset the codegendata structure first */
319 /* and restart the compiler run */
321 if (!codegen_emit(jd))
325 vm_abort("codegen_generate: unknown error occurred during codegen_emit: flags=%x\n", cd->flags);
330 log_message_method("Re-generating code done: ", jd->m);
334 /* reallocate the memory and finish the code generation */
338 /* everything's ok */
344 /* codegen_close ***************************************************************
348 *******************************************************************************/
350 void codegen_close(void)
352 /* TODO: release avl tree on i386 and x86_64 */
356 /* codegen_increase ************************************************************
360 *******************************************************************************/
362 void codegen_increase(codegendata *cd)
366 /* save old mcodebase pointer */
368 oldmcodebase = cd->mcodebase;
370 /* reallocate to new, doubled memory */
372 cd->mcodebase = DMREALLOC(cd->mcodebase,
377 cd->mcodeend = cd->mcodebase + cd->mcodesize;
379 /* set new mcodeptr */
381 cd->mcodeptr = cd->mcodebase + (cd->mcodeptr - oldmcodebase);
383 #if defined(__I386__) || defined(__MIPS__) || defined(__X86_64__) || defined(__M68K__) || defined(ENABLE_INTRP) \
384 || defined(__SPARC_64__)
385 /* adjust the pointer to the last patcher position */
387 if (cd->lastmcodeptr != NULL)
388 cd->lastmcodeptr = cd->mcodebase + (cd->lastmcodeptr - oldmcodebase);
393 /* codegen_ncode_increase ******************************************************
397 *******************************************************************************/
399 #if defined(ENABLE_INTRP)
400 u1 *codegen_ncode_increase(codegendata *cd, u1 *ncodeptr)
404 /* save old ncodebase pointer */
406 oldncodebase = cd->ncodebase;
408 /* reallocate to new, doubled memory */
410 cd->ncodebase = DMREALLOC(cd->ncodebase,
416 /* return the new ncodeptr */
418 return (cd->ncodebase + (ncodeptr - oldncodebase));
423 /* codegen_add_branch_ref ******************************************************
425 Prepends an branch to the list.
427 *******************************************************************************/
429 void codegen_add_branch_ref(codegendata *cd, basicblock *target, s4 condition, s4 reg, u4 options)
434 STATISTICS(count_branches_unresolved++);
436 /* calculate the mpc of the branch instruction */
438 branchmpc = cd->mcodeptr - cd->mcodebase;
440 br = DNEW(branchref);
442 br->branchmpc = branchmpc;
443 br->condition = condition;
445 br->options = options;
446 br->next = target->branchrefs;
448 target->branchrefs = br;
452 /* codegen_resolve_branchrefs **************************************************
454 Resolves and patches the branch references of a given basic block.
456 *******************************************************************************/
458 void codegen_resolve_branchrefs(codegendata *cd, basicblock *bptr)
463 /* Save the mcodeptr because in the branch emitting functions
464 we generate code somewhere inside already generated code,
465 but we're still in the actual code generation phase. */
467 mcodeptr = cd->mcodeptr;
469 /* just to make sure */
471 assert(bptr->mpc >= 0);
473 for (br = bptr->branchrefs; br != NULL; br = br->next) {
474 /* temporary set the mcodeptr */
476 cd->mcodeptr = cd->mcodebase + br->branchmpc;
478 /* emit_bccz and emit_branch emit the correct code, even if we
479 pass condition == BRANCH_UNCONDITIONAL or reg == -1. */
481 emit_bccz(cd, bptr, br->condition, br->reg, br->options);
484 /* restore mcodeptr */
486 cd->mcodeptr = mcodeptr;
490 /* codegen_branch_label_add ****************************************************
492 Append an branch to the label-branch list.
494 *******************************************************************************/
496 void codegen_branch_label_add(codegendata *cd, s4 label, s4 condition, s4 reg, u4 options)
499 branch_label_ref_t *br;
502 /* get the label list */
504 list = cd->brancheslabel;
506 /* calculate the current mpc */
508 mpc = cd->mcodeptr - cd->mcodebase;
510 br = DNEW(branch_label_ref_t);
514 br->condition = condition;
516 br->options = options;
518 /* add the branch to the list */
520 list_add_last_unsynced(list, br);
524 /* codegen_critical_section_new ************************************************
526 Allocates a new critical-section reference and adds it to the
527 critical-section list.
529 *******************************************************************************/
531 #if defined(ENABLE_THREADS)
532 void codegen_critical_section_new(codegendata *cd)
535 critical_section_ref_t *csr;
538 /* get the critical section list */
540 list = cd->listcritical;
542 /* calculate the current mpc */
544 mpc = cd->mcodeptr - cd->mcodebase;
546 csr = DNEW(critical_section_ref_t);
548 /* We only can set restart right now, as start and end are set by
549 the following, corresponding functions. */
555 /* add the branch to the list */
557 list_add_last_unsynced(list, csr);
562 /* codegen_critical_section_start **********************************************
564 Set the start-point of the current critical section (which is the
565 last element of the list).
567 *******************************************************************************/
569 #if defined(ENABLE_THREADS)
570 void codegen_critical_section_start(codegendata *cd)
573 critical_section_ref_t *csr;
576 /* get the critical section list */
578 list = cd->listcritical;
580 /* calculate the current mpc */
582 mpc = cd->mcodeptr - cd->mcodebase;
584 /* get the current critical section */
586 csr = list_last_unsynced(list);
588 /* set the start point */
590 assert(csr->start == -1);
597 /* codegen_critical_section_end ************************************************
599 Set the end-point of the current critical section (which is the
600 last element of the list).
602 *******************************************************************************/
604 #if defined(ENABLE_THREADS)
605 void codegen_critical_section_end(codegendata *cd)
608 critical_section_ref_t *csr;
611 /* get the critical section list */
613 list = cd->listcritical;
615 /* calculate the current mpc */
617 mpc = cd->mcodeptr - cd->mcodebase;
619 /* get the current critical section */
621 csr = list_last_unsynced(list);
623 /* set the end point */
625 assert(csr->end == -1);
632 /* codegen_critical_section_finish *********************************************
634 Finish the critical sections, create the critical section nodes for
635 the AVL tree and insert them into the tree.
637 *******************************************************************************/
639 #if defined(ENABLE_THREADS)
640 static void codegen_critical_section_finish(jitdata *jd)
645 critical_section_ref_t *csr;
646 critical_section_node_t *csn;
648 /* get required compiler data */
653 /* get the critical section list */
655 list = cd->listcritical;
657 /* iterate over all critical sections */
659 for (csr = list_first_unsynced(list); csr != NULL;
660 csr = list_next_unsynced(list, csr)) {
661 /* check if all points are set */
663 assert(csr->start != -1);
664 assert(csr->end != -1);
665 assert(csr->restart != -1);
667 /* allocate tree node */
669 csn = NEW(critical_section_node_t);
671 csn->start = code->entrypoint + csr->start;
672 csn->end = code->entrypoint + csr->end;
673 csn->restart = code->entrypoint + csr->restart;
675 /* insert into the tree */
677 critical_section_register(csn);
683 /* methodtree_comparator *******************************************************
685 Comparator function used for the AVL tree of methods.
688 treenode....the node from the tree
689 node........the node to compare to the tree-node
691 *******************************************************************************/
693 static s4 methodtree_comparator(const void *treenode, const void *node)
695 methodtree_element *mte;
696 methodtree_element *mtepc;
698 mte = (methodtree_element *) treenode;
699 mtepc = (methodtree_element *) node;
701 /* compare both startpc and endpc of pc, even if they have the same value,
702 otherwise the avl_probe sometimes thinks the element is already in the
706 /* On S390 addresses are 31 bit. Compare only 31 bits of value.
708 # define ADDR_MASK(a) ((a) & 0x7FFFFFFF)
710 # define ADDR_MASK(a) (a)
713 if (ADDR_MASK((long) mte->startpc) <= ADDR_MASK((long) mtepc->startpc) &&
714 ADDR_MASK((long) mtepc->startpc) <= ADDR_MASK((long) mte->endpc) &&
715 ADDR_MASK((long) mte->startpc) <= ADDR_MASK((long) mtepc->endpc) &&
716 ADDR_MASK((long) mtepc->endpc) <= ADDR_MASK((long) mte->endpc)) {
719 } else if (ADDR_MASK((long) mtepc->startpc) < ADDR_MASK((long) mte->startpc)) {
730 /* codegen_insertmethod ********************************************************
732 Insert the machine code range of a method into the AVL tree of methods.
734 *******************************************************************************/
736 void codegen_insertmethod(u1 *startpc, u1 *endpc)
738 methodtree_element *mte;
740 /* allocate new method entry */
742 mte = NEW(methodtree_element);
744 mte->startpc = startpc;
747 /* this function does not return an error, but asserts for
750 avl_insert(methodtree, mte);
754 /* codegen_get_pv_from_pc ******************************************************
756 Find the PV for the given PC by searching in the AVL tree of
759 *******************************************************************************/
761 u1 *codegen_get_pv_from_pc(u1 *pc)
763 methodtree_element mtepc;
764 methodtree_element *mte;
766 /* allocation of the search structure on the stack is much faster */
771 mte = avl_find(methodtree, &mtepc);
774 /* No method was found. Let's dump a stacktrace. */
776 #if defined(ENABLE_VMLOG)
777 vmlog_cacao_signl("SIGSEGV");
780 log_println("We received a SIGSEGV and tried to handle it, but we were");
781 log_println("unable to find a Java method at:");
783 #if SIZEOF_VOID_P == 8
784 log_println("PC=0x%016lx", pc);
786 log_println("PC=0x%08x", pc);
790 log_println("Dumping the current stacktrace:");
792 #if defined(ENABLE_THREADS)
793 /* XXX michi: This should be available even without threads! */
794 threads_print_stacktrace();
797 vm_abort("Exiting...");
804 /* codegen_get_pv_from_pc_nocheck **********************************************
806 Find the PV for the given PC by searching in the AVL tree of
807 methods. This method does not check the return value and is used
810 *******************************************************************************/
812 u1 *codegen_get_pv_from_pc_nocheck(u1 *pc)
814 methodtree_element mtepc;
815 methodtree_element *mte;
817 /* allocation of the search structure on the stack is much faster */
822 mte = avl_find(methodtree, &mtepc);
831 /* codegen_set_replacement_point_notrap ****************************************
833 Record the position of a non-trappable replacement point.
835 *******************************************************************************/
837 #if defined(ENABLE_REPLACEMENT)
839 void codegen_set_replacement_point_notrap(codegendata *cd, s4 type)
841 void codegen_set_replacement_point_notrap(codegendata *cd)
844 assert(cd->replacementpoint);
845 assert(cd->replacementpoint->type == type);
846 assert(cd->replacementpoint->flags & RPLPOINT_FLAG_NOTRAP);
848 cd->replacementpoint->pc = (u1*) (ptrint) (cd->mcodeptr - cd->mcodebase);
850 cd->replacementpoint++;
852 #endif /* defined(ENABLE_REPLACEMENT) */
855 /* codegen_set_replacement_point ***********************************************
857 Record the position of a trappable replacement point.
859 *******************************************************************************/
861 #if defined(ENABLE_REPLACEMENT)
863 void codegen_set_replacement_point(codegendata *cd, s4 type)
865 void codegen_set_replacement_point(codegendata *cd)
868 assert(cd->replacementpoint);
869 assert(cd->replacementpoint->type == type);
870 assert(!(cd->replacementpoint->flags & RPLPOINT_FLAG_NOTRAP));
872 cd->replacementpoint->pc = (u1*) (ptrint) (cd->mcodeptr - cd->mcodebase);
874 cd->replacementpoint++;
877 /* XXX actually we should use an own REPLACEMENT_NOPS here! */
878 if (opt_TestReplacement)
882 /* XXX assert(cd->lastmcodeptr <= cd->mcodeptr); */
884 cd->lastmcodeptr = cd->mcodeptr + PATCHER_CALL_SIZE;
886 #endif /* defined(ENABLE_REPLACEMENT) */
889 /* codegen_finish **************************************************************
891 Finishes the code generation. A new memory, large enough for both
892 data and code, is allocated and data and code are copied together
893 to their final layout, unresolved jumps are resolved, ...
895 *******************************************************************************/
897 void codegen_finish(jitdata *jd)
902 #if defined(ENABLE_INTRP)
911 /* get required compiler data */
916 /* prevent compiler warning */
918 #if defined(ENABLE_INTRP)
922 /* calculate the code length */
924 mcodelen = (s4) (cd->mcodeptr - cd->mcodebase);
926 #if defined(ENABLE_STATISTICS)
928 count_code_len += mcodelen;
929 count_data_len += cd->dseglen;
933 alignedmcodelen = MEMORY_ALIGN(mcodelen, MAX_ALIGN);
935 #if defined(ENABLE_INTRP)
937 ncodelen = cd->ncodeptr - cd->ncodebase;
939 ncodelen = 0; /* avoid compiler warning */
943 cd->dseglen = MEMORY_ALIGN(cd->dseglen, MAX_ALIGN);
944 alignedlen = alignedmcodelen + cd->dseglen;
946 #if defined(ENABLE_INTRP)
948 alignedlen += ncodelen;
952 /* allocate new memory */
954 code->mcodelength = mcodelen + cd->dseglen;
955 code->mcode = CNEW(u1, alignedlen);
957 /* set the entrypoint of the method */
959 assert(code->entrypoint == NULL);
960 code->entrypoint = epoint = (code->mcode + cd->dseglen);
962 /* fill the data segment (code->entrypoint must already be set!) */
966 /* copy code to the new location */
968 MCOPY((void *) code->entrypoint, cd->mcodebase, u1, mcodelen);
970 #if defined(ENABLE_INTRP)
971 /* relocate native dynamic superinstruction code (if any) */
974 cd->mcodebase = code->entrypoint;
977 u1 *ncodebase = code->mcode + cd->dseglen + alignedmcodelen;
979 MCOPY((void *) ncodebase, cd->ncodebase, u1, ncodelen);
981 /* flush the instruction and data caches */
983 md_cacheflush(ncodebase, ncodelen);
985 /* set some cd variables for dynamic_super_rerwite */
987 cd->ncodebase = ncodebase;
990 cd->ncodebase = NULL;
993 dynamic_super_rewrite(cd);
997 /* Create the exception table. */
999 exceptiontable_create(jd);
1001 /* Create the linenumber table. */
1003 linenumbertable_create(jd);
1005 /* jump table resolving */
1007 for (jr = cd->jumpreferences; jr != NULL; jr = jr->next)
1008 *((functionptr *) ((ptrint) epoint + jr->tablepos)) =
1009 (functionptr) ((ptrint) epoint + (ptrint) jr->target->mpc);
1011 /* patcher resolving */
1013 pr = list_first_unsynced(code->patchers);
1015 pr->mpc += (ptrint) epoint;
1016 pr->datap = (ptrint) (pr->disp + epoint);
1017 pr = list_next_unsynced(code->patchers, pr);
1020 #if defined(ENABLE_REPLACEMENT)
1021 /* replacement point resolving */
1026 rp = code->rplpoints;
1027 for (i=0; i<code->rplpointcount; ++i, ++rp) {
1028 rp->pc = (u1*) ((ptrint) epoint + (ptrint) rp->pc);
1031 #endif /* defined(ENABLE_REPLACEMENT) */
1033 /* add method into methodtree to find the entrypoint */
1035 codegen_insertmethod(code->entrypoint, code->entrypoint + mcodelen);
1037 #if defined(__I386__) || defined(__X86_64__) || defined(__XDSPCORE__) || defined(__M68K__) || defined(ENABLE_INTRP)
1038 /* resolve data segment references */
1040 dseg_resolve_datareferences(jd);
1043 #if defined(ENABLE_THREADS)
1044 /* create cirtical sections */
1046 codegen_critical_section_finish(jd);
1049 /* flush the instruction and data caches */
1051 md_cacheflush(code->mcode, code->mcodelength);
1055 /* codegen_generate_stub_compiler **********************************************
1057 Wrapper for codegen_emit_stub_compiler.
1060 pointer to the compiler stub code.
1062 *******************************************************************************/
1064 u1 *codegen_generate_stub_compiler(methodinfo *m)
1068 ptrint *d; /* pointer to data memory */
1069 u1 *c; /* pointer to code memory */
1072 /* mark dump memory */
1074 dumpsize = dump_size();
1076 /* allocate required data structures */
1081 jd->cd = DNEW(codegendata);
1084 /* get required compiler data */
1088 #if !defined(JIT_COMPILER_VIA_SIGNAL)
1089 /* allocate code memory */
1091 c = CNEW(u1, 3 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
1093 /* set pointers correctly */
1099 c = c + 3 * SIZEOF_VOID_P;
1102 /* NOTE: The codeinfo pointer is actually a pointer to the
1103 methodinfo (this fakes a codeinfo structure). */
1105 d[0] = (ptrint) asm_call_jit_compiler;
1107 d[2] = (ptrint) &d[1]; /* fake code->m */
1109 /* call the emit function */
1111 codegen_emit_stub_compiler(jd);
1113 #if defined(ENABLE_STATISTICS)
1115 count_cstub_len += 3 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE;
1120 md_cacheflush(cd->mcodebase, 3 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
1122 /* Allocate code memory. */
1124 c = CNEW(uint8_t, 2 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
1126 /* Set pointers correctly. */
1132 c = c + 2 * SIZEOF_VOID_P;
1135 /* NOTE: The codeinfo pointer is actually a pointer to the
1136 methodinfo (this fakes a codeinfo structure). */
1139 d[1] = (ptrint) &d[0]; /* fake code->m */
1141 /* Emit the trap instruction. */
1143 emit_trap_compiler(cd);
1145 #if defined(ENABLE_STATISTICS)
1147 count_cstub_len += 2 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE;
1152 md_cacheflush(cd->mcodebase, 2 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
1155 /* release dump memory */
1157 dump_release(dumpsize);
1159 /* return native stub code */
1165 /* codegen_generate_stub_builtin ***********************************************
1167 Wrapper for codegen_emit_stub_native.
1169 *******************************************************************************/
1171 void codegen_generate_stub_builtin(methodinfo *m, builtintable_entry *bte)
1178 /* mark dump memory */
1180 dumpsize = dump_size();
1182 /* Create JIT data structure. */
1184 jd = jit_jitdata_new(m);
1186 /* Get required compiler data. */
1190 /* setup code generation stuff */
1194 /* Set the number of native arguments we need to skip. */
1198 /* generate the code */
1200 #if defined(ENABLE_JIT)
1201 # if defined(ENABLE_INTRP)
1204 assert(bte->fp != NULL);
1205 codegen_emit_stub_native(jd, bte->md, bte->fp, skipparams);
1206 # if defined(ENABLE_INTRP)
1211 /* reallocate the memory and finish the code generation */
1215 /* set the stub entry point in the builtin table */
1217 bte->stub = code->entrypoint;
1219 #if defined(ENABLE_STATISTICS)
1221 size_stub_native += code->mcodelength;
1224 #if !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER)
1225 /* disassemble native stub */
1227 if (opt_DisassembleStubs) {
1228 codegen_disassemble_stub(m,
1229 (u1 *) (ptrint) code->entrypoint,
1230 (u1 *) (ptrint) code->entrypoint + (code->mcodelength - jd->cd->dseglen));
1232 /* show data segment */
1234 if (opt_showddatasegment)
1237 #endif /* !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER) */
1239 /* release memory */
1241 dump_release(dumpsize);
1245 /* codegen_generate_stub_native ************************************************
1247 Wrapper for codegen_emit_stub_native.
1250 the codeinfo representing the stub code.
1252 *******************************************************************************/
1254 codeinfo *codegen_generate_stub_native(methodinfo *m, functionptr f)
1263 /* mark dump memory */
1265 dumpsize = dump_size();
1267 /* Create JIT data structure. */
1269 jd = jit_jitdata_new(m);
1271 /* Get required compiler data. */
1275 /* set the flags for the current JIT run */
1277 #if defined(ENABLE_PROFILING)
1279 jd->flags |= JITDATA_FLAG_INSTRUMENT;
1282 if (opt_verbosecall)
1283 jd->flags |= JITDATA_FLAG_VERBOSECALL;
1285 /* setup code generation stuff */
1287 #if defined(ENABLE_JIT)
1288 # if defined(ENABLE_INTRP)
1296 /* create new method descriptor with additional native parameters */
1300 /* Set the number of native arguments we need to skip. */
1302 if (m->flags & ACC_STATIC)
1307 nmd = (methoddesc *) DMNEW(u1, sizeof(methoddesc) - sizeof(typedesc) +
1308 md->paramcount * sizeof(typedesc) +
1309 skipparams * sizeof(typedesc));
1311 nmd->paramcount = md->paramcount + skipparams;
1313 nmd->params = DMNEW(paramdesc, nmd->paramcount);
1315 nmd->paramtypes[0].type = TYPE_ADR; /* add environment pointer */
1317 if (m->flags & ACC_STATIC)
1318 nmd->paramtypes[1].type = TYPE_ADR; /* add class pointer */
1320 MCOPY(nmd->paramtypes + skipparams, md->paramtypes, typedesc,
1323 #if defined(ENABLE_JIT)
1324 # if defined(ENABLE_INTRP)
1327 /* pre-allocate the arguments for the native ABI */
1329 md_param_alloc_native(nmd);
1332 /* generate the code */
1334 #if defined(ENABLE_JIT)
1335 # if defined(ENABLE_INTRP)
1337 intrp_createnativestub(f, jd, nmd);
1340 codegen_emit_stub_native(jd, nmd, f, skipparams);
1342 intrp_createnativestub(f, jd, nmd);
1345 /* reallocate the memory and finish the code generation */
1349 #if defined(ENABLE_STATISTICS)
1350 /* must be done after codegen_finish() */
1353 size_stub_native += code->mcodelength;
1356 #if !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER)
1357 /* disassemble native stub */
1359 if (opt_DisassembleStubs) {
1360 # if defined(ENABLE_DEBUG_FILTER)
1361 if (m->filtermatches & SHOW_FILTER_FLAG_SHOW_METHOD)
1364 codegen_disassemble_stub(m,
1365 (u1 *) (ptrint) code->entrypoint,
1366 (u1 *) (ptrint) code->entrypoint + (code->mcodelength - jd->cd->dseglen));
1368 /* show data segment */
1370 if (opt_showddatasegment)
1374 #endif /* !defined(NDEBUG) && defined(ENABLE_DISASSEMBLER) */
1376 /* release memory */
1378 dump_release(dumpsize);
1380 /* return native stub code */
1386 /* codegen_disassemble_nativestub **********************************************
1388 Disassembles the generated builtin or native stub.
1390 *******************************************************************************/
1392 #if defined(ENABLE_DISASSEMBLER)
1393 void codegen_disassemble_stub(methodinfo *m, u1 *start, u1 *end)
1395 printf("Stub code: ");
1396 if (m->class != NULL)
1397 utf_fprint_printable_ascii_classname(stdout, m->class->name);
1401 utf_fprint_printable_ascii(stdout, m->name);
1402 utf_fprint_printable_ascii(stdout, m->descriptor);
1403 printf("\nLength: %d\n\n", (s4) (end - start));
1405 DISASSEMBLE(start, end);
1410 /* codegen_start_native_call ***************************************************
1412 Prepares the stuff required for a native (JNI) function call:
1414 - adds a stackframe info structure to the chain, for stacktraces
1415 - prepares the local references table on the stack
1417 The layout of the native stub stackframe should look like this:
1419 +---------------------------+ <- java SP (of parent Java function)
1421 +---------------------------+ <- data SP
1423 | stackframe info structure |
1425 +---------------------------+
1427 | local references table |
1429 +---------------------------+
1431 | saved registers (if any) |
1433 +---------------------------+
1435 | arguments (if any) |
1437 +---------------------------+ <- current SP (native stub)
1439 *******************************************************************************/
1441 java_handle_t *codegen_start_native_call(u1 *sp, u1 *pv)
1443 stackframeinfo_t *sfi;
1444 localref_table *lrt;
1451 uint64_t *arg_stack;
1453 STATISTICS(count_calls_java_to_native++);
1455 /* Get the methodinfo. */
1457 m = code_get_methodinfo_for_pv(pv);
1461 framesize = *((int32_t *) (pv + FrameSize));
1463 assert(framesize >= sizeof(stackframeinfo_t) + sizeof(localref_table));
1465 /* calculate needed values */
1467 #if defined(__ALPHA__) || defined(__ARM__)
1468 datasp = sp + framesize - SIZEOF_VOID_P;
1469 javasp = sp + framesize;
1470 arg_regs = (uint64_t *) sp;
1471 arg_stack = (uint64_t *) javasp;
1472 #elif defined(__MIPS__)
1473 /* MIPS always uses 8 bytes to store the RA */
1474 datasp = sp + framesize - 8;
1475 javasp = sp + framesize;
1476 #elif defined(__S390__)
1477 datasp = sp + framesize - 8;
1478 javasp = sp + framesize;
1479 arg_regs = (uint64_t *) (sp + 96);
1480 arg_stack = (uint64_t *) javasp;
1481 #elif defined(__I386__) || defined(__M68K__) || defined(__X86_64__)
1482 datasp = sp + framesize;
1483 javasp = sp + framesize + SIZEOF_VOID_P;
1484 arg_regs = (uint64_t *) sp;
1485 arg_stack = (uint64_t *) javasp;
1486 #elif defined(__POWERPC__)
1487 datasp = sp + framesize;
1488 javasp = sp + framesize;
1489 arg_regs = (uint64_t *) (sp + LA_SIZE + 4 * SIZEOF_VOID_P);
1490 arg_stack = (uint64_t *) javasp;
1491 #elif defined(__POWERPC64__)
1492 datasp = sp + framesize;
1493 javasp = sp + framesize;
1494 arg_regs = (uint64_t *) (sp + PA_SIZE + LA_SIZE + 4 * SIZEOF_VOID_P);
1495 arg_stack = (uint64_t *) javasp;
1497 /* XXX is was unable to do this port for SPARC64, sorry. (-michi) */
1498 /* XXX maybe we need to pass the RA as argument there */
1499 vm_abort("codegen_start_native_call: unsupported architecture");
1502 /* get data structures from stack */
1504 sfi = (stackframeinfo_t *) (datasp - sizeof(stackframeinfo_t));
1505 lrt = (localref_table *) (datasp - sizeof(stackframeinfo_t) -
1506 sizeof(localref_table));
1508 #if defined(ENABLE_JNI)
1509 /* add current JNI local references table to this thread */
1511 localref_table_add(lrt);
1514 #if !defined(NDEBUG)
1515 # if defined(__ALPHA__) || defined(__POWERPC__) || defined(__POWERPC64__) || defined(__X86_64__) || defined(__S390__)
1516 /* print the call-trace if necesarry */
1517 /* BEFORE: filling the local reference table */
1519 if (opt_TraceJavaCalls)
1520 trace_java_call_enter(m, arg_regs, arg_stack);
1524 #if defined(ENABLE_HANDLES)
1525 /* place all references into the local reference table */
1526 /* BEFORE: creating stackframeinfo */
1528 localref_native_enter(m, arg_regs, arg_stack);
1531 /* Add a stackframeinfo for this native method. We don't have RA
1532 and XPC here. These are determined in
1533 stacktrace_stackframeinfo_add. */
1535 stacktrace_stackframeinfo_add(sfi, pv, sp, NULL, NULL);
1537 /* Return a wrapped classinfo for static methods. */
1539 if (m->flags & ACC_STATIC)
1540 return LLNI_classinfo_wrap(m->class);
1546 /* codegen_finish_native_call **************************************************
1548 Removes the stuff required for a native (JNI) function call.
1549 Additionally it checks for an exceptions and in case, get the
1550 exception object and clear the pointer.
1552 *******************************************************************************/
1554 java_object_t *codegen_finish_native_call(u1 *sp, u1 *pv)
1556 stackframeinfo_t *sfi;
1566 /* get information from method header */
1568 code = code_get_codeinfo_for_pv(pv);
1570 framesize = *((int32_t *) (pv + FrameSize));
1574 /* get the methodinfo */
1579 /* calculate needed values */
1581 #if defined(__ALPHA__) || defined(__ARM__)
1582 datasp = sp + framesize - SIZEOF_VOID_P;
1583 ret_regs = (uint64_t *) sp;
1584 #elif defined(__MIPS__)
1585 /* MIPS always uses 8 bytes to store the RA */
1586 datasp = sp + framesize - 8;
1587 #elif defined(__S390__)
1588 datasp = sp + framesize - 8;
1589 ret_regs = (uint64_t *) (sp + 96);
1590 #elif defined(__I386__)
1591 datasp = sp + framesize;
1592 ret_regs = (uint64_t *) (sp + 2 * SIZEOF_VOID_P);
1593 #elif defined(__M68K__) || defined(__X86_64__)
1594 datasp = sp + framesize;
1595 ret_regs = (uint64_t *) sp;
1596 #elif defined(__POWERPC__)
1597 datasp = sp + framesize;
1598 ret_regs = (uint64_t *) (sp + LA_SIZE + 2 * SIZEOF_VOID_P);
1599 #elif defined(__POWERPC64__)
1600 datasp = sp + framesize;
1601 ret_regs = (uint64_t *) (sp + PA_SIZE + LA_SIZE + 2 * SIZEOF_VOID_P);
1603 vm_abort("codegen_finish_native_call: unsupported architecture");
1606 /* get data structures from stack */
1608 sfi = (stackframeinfo_t *) (datasp - sizeof(stackframeinfo_t));
1610 /* Remove current stackframeinfo from chain. */
1612 stacktrace_stackframeinfo_remove(sfi);
1614 #if defined(ENABLE_HANDLES)
1615 /* unwrap the return value from the local reference table */
1616 /* AFTER: removing the stackframeinfo */
1617 /* BEFORE: releasing the local reference table */
1619 localref_native_exit(m, ret_regs);
1622 /* get and unwrap the exception */
1623 /* AFTER: removing the stackframe info */
1624 /* BEFORE: releasing the local reference table */
1626 e = exceptions_get_and_clear_exception();
1629 #if defined(ENABLE_JNI)
1630 /* release JNI local references table for this thread */
1632 localref_frame_pop_all();
1633 localref_table_remove();
1636 #if !defined(NDEBUG)
1637 # if defined(__ALPHA__) || defined(__POWERPC__) || defined(__POWERPC64__) || defined(__X86_64__) || defined(__S390__)
1638 /* print the call-trace if necesarry */
1639 /* AFTER: unwrapping the return value */
1641 if (opt_TraceJavaCalls)
1642 trace_java_call_exit(m, ret_regs);
1650 /* removecompilerstub **********************************************************
1652 Deletes a compilerstub from memory (simply by freeing it).
1654 *******************************************************************************/
1656 void removecompilerstub(u1 *stub)
1658 /* pass size 1 to keep the intern function happy */
1660 CFREE((void *) stub, 1);
1664 /* removenativestub ************************************************************
1666 Removes a previously created native-stub from memory.
1668 *******************************************************************************/
1670 void removenativestub(u1 *stub)
1672 /* pass size 1 to keep the intern function happy */
1674 CFREE((void *) stub, 1);
1678 /* codegen_reg_of_var **********************************************************
1680 This function determines a register, to which the result of an
1681 operation should go, when it is ultimatively intended to store the
1682 result in pseudoregister v. If v is assigned to an actual
1683 register, this register will be returned. Otherwise (when v is
1684 spilled) this function returns tempregnum. If not already done,
1685 regoff and flags are set in the stack location.
1687 *******************************************************************************/
1689 s4 codegen_reg_of_var(u2 opcode, varinfo *v, s4 tempregnum)
1693 /* Do we have to generate a conditional move? Yes, then always
1694 return the temporary register. The real register is identified
1695 during the store. */
1697 if (opcode & ICMD_CONDITION_MASK)
1701 if (!(v->flags & INMEMORY))
1702 return v->vv.regoff;
1708 /* codegen_reg_of_dst **********************************************************
1710 This function determines a register, to which the result of an
1711 operation should go, when it is ultimatively intended to store the
1712 result in iptr->dst.var. If dst.var is assigned to an actual
1713 register, this register will be returned. Otherwise (when it is
1714 spilled) this function returns tempregnum. If not already done,
1715 regoff and flags are set in the stack location.
1717 *******************************************************************************/
1719 s4 codegen_reg_of_dst(jitdata *jd, instruction *iptr, s4 tempregnum)
1721 return codegen_reg_of_var(iptr->opc, VAROP(iptr->dst), tempregnum);
1725 /* codegen_emit_phi_moves ****************************************************
1727 Emits phi moves at the end of the basicblock.
1729 *******************************************************************************/
1731 #if defined(ENABLE_SSA)
1732 void codegen_emit_phi_moves(jitdata *jd, basicblock *bptr)
1745 /* Moves from phi functions with highest indices have to be */
1746 /* inserted first, since this is the order as is used for */
1747 /* conflict resolution */
1749 for(i = ls->num_phi_moves[bptr->nr] - 1; i >= 0 ; i--) {
1750 lt_d = ls->phi_moves[bptr->nr][i][0];
1751 lt_s = ls->phi_moves[bptr->nr][i][1];
1752 #if defined(SSA_DEBUG_VERBOSE)
1754 printf("BB %3i Move %3i <- %3i ", bptr->nr, lt_d, lt_s);
1756 if (lt_s == UNUSED) {
1757 #if defined(SSA_DEBUG_VERBOSE)
1759 printf(" ... not processed \n");
1764 d = VAR(ls->lifetime[lt_d].v_index);
1765 s = VAR(ls->lifetime[lt_s].v_index);
1768 if (d->type == -1) {
1769 #if defined(SSA_DEBUG_VERBOSE)
1771 printf("...returning - phi lifetimes where joined\n");
1776 if (s->type == -1) {
1777 #if defined(SSA_DEBUG_VERBOSE)
1779 printf("...returning - phi lifetimes where joined\n");
1785 tmp_i.s1.varindex = ls->lifetime[lt_s].v_index;
1786 tmp_i.dst.varindex = ls->lifetime[lt_d].v_index;
1787 emit_copy(jd, &tmp_i);
1789 #if defined(SSA_DEBUG_VERBOSE)
1790 if (compileverbose) {
1791 if (IS_INMEMORY(d->flags) && IS_INMEMORY(s->flags)) {
1793 printf("M%3i <- M%3i",d->vv.regoff,s->vv.regoff);
1795 else if (IS_INMEMORY(s->flags)) {
1797 printf("R%3i <- M%3i",d->vv.regoff,s->vv.regoff);
1799 else if (IS_INMEMORY(d->flags)) {
1801 printf("M%3i <- R%3i",d->vv.regoff,s->vv.regoff);
1805 printf("R%3i <- R%3i",d->vv.regoff,s->vv.regoff);
1809 #endif /* defined(SSA_DEBUG_VERBOSE) */
1812 #endif /* defined(ENABLE_SSA) */
1815 /* REMOVEME When we have exception handling in C. */
1817 void *md_asm_codegen_get_pv_from_pc(void *ra)
1819 return md_codegen_get_pv_from_pc(ra);
1824 * These are local overrides for various environment variables in Emacs.
1825 * Please do not remove this and leave it at the end of the file, where
1826 * Emacs will automagically detect them.
1827 * ---------------------------------------------------------------------
1830 * indent-tabs-mode: t
1834 * vim:noexpandtab:sw=4:ts=4: