1 /* src/vm/jit/optimizing/ssa3.c
4 CACAOVM - Verein zu Foerderung der freien virtuellen Machine CACAO
6 This file is part of CACAO.
8 This program is free software; you can redistribute it and/or
9 modify it under the terms of the GNU General Public License as
10 published by the Free Software Foundation; either version 2, or (at
11 your option) any later version.
13 This program is distributed in the hope that it will be useful, but
14 WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with this program; if not, write to the Free Software
20 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
23 SSA transformation PROTOTYPE based on:
26 Adding Static Single Assignment Form and a Graph Coloring Register
27 Allocator to the Java Hotspot Client Compiler, 2000
28 (http://www.ssw.uni-linz.ac.at/Research/Reports/Report15.html)
32 * Adapt for exception handling [done]
33 * Eliminiation of redundand PHI functions. [in progress]
34 * Handle also inout variables. [done]
35 * Create PHI functions lazyly, when they are used for the first time
36 (I suspect that currently PHIs are created that are never used).
37 * REWRITE. The code was never designed for producion. [done]
38 * Unify access to phi args.
41 #include "vm/jit/jit.h"
42 #include "vm/global.h"
43 #include "mm/memory.h"
44 #include "mm/dumpmemory.h"
45 #include "toolbox/list.h"
50 /*#define ELIMINATE_NOP_LOAD_STORE*/
53 /*#define SSA_VERBOSE */
56 __attribute__((always_inline))
59 /*** phi ********************************************************************/
63 PHI_FLAG_REDUNDANT_ALL,
64 PHI_FLAG_REDUNDANT_ONE
67 static inline void phi_set_flag(instruction *iptr, phi_flags_t flag) {
68 iptr->flags.bits |= (1 << flag);
71 static inline void phi_clear_flag(instruction *iptr, phi_flags_t flag) {
72 iptr->flags.bits &= ~(1 << flag);
75 static inline bool phi_has_flag(const instruction *iptr, phi_flags_t flag) {
76 return (iptr->flags.bits & (1 << flag)) != 0;
79 static inline instruction *phi_get_subst(instruction *iptr) {
80 return iptr->sx.s23.s2.iargs[-1];
83 static inline bool phi_has_subst(const instruction *iptr) {
84 return (iptr->sx.s23.s2.iargs[-1] != NULL);
88 static inline void phi_init(instruction *iptr, unsigned argcount, s4 index) {
93 iptr->dst.varindex = 0;
94 iptr->s1.argcount = argcount;
95 iptr->sx.s23.s2.iargs = DMNEW(instruction *, argcount + 1);
96 iptr->sx.s23.s2.iargs += 1;
97 iptr->sx.s23.s3.javaindex = index;
99 /* subst field - If non-null, the phi function shall be replaced by the
100 value produced by the subst instruction. */
101 iptr->sx.s23.s2.iargs[-1] = NULL;
104 for (i = 0; i < argcount; ++i) {
105 iptr->sx.s23.s2.iargs[i] = (instruction *)0x7fffffff;
110 #define phi_assert_opc(iptr) assert(iptr->opc == ICMD_PHI)
112 #define phi_assert_arg(iptr, arg) assert(arg < iptr->s1.argcount)
114 static inline s4 phi_arg_count(const instruction *iptr) {
115 phi_assert_opc(iptr);
116 return iptr->s1.argcount;
119 static inline instruction *phi_get_arg(const instruction *iptr, unsigned arg) {
120 phi_assert_opc(iptr);
121 phi_assert_arg(iptr, arg);
122 return iptr->sx.s23.s2.iargs[arg];
125 static inline s4 phi_get_arg_var(const instruction *iptr, unsigned arg) {
126 phi_assert_opc(iptr);
127 phi_assert_arg(iptr, arg);
128 return iptr->sx.s23.s2.iargs[arg]->dst.varindex;
131 static inline void phi_set_all_args(instruction *iptr, instruction *value) {
133 phi_assert_opc(iptr);
134 for (i = 0; i < iptr->s1.argcount; ++i) {
135 iptr->sx.s23.s2.iargs[i] = value;
139 static inline s4 phi_get_index(const instruction *iptr) {
140 phi_assert_opc(iptr);
141 return iptr->sx.s23.s3.javaindex;
144 static inline s4 phi_get_dst(const instruction *iptr) {
145 phi_assert_opc(iptr);
146 return iptr->dst.varindex;
149 static inline void phi_set_dst(instruction *iptr, s4 dst) {
150 phi_assert_opc(iptr);
151 iptr->dst.varindex = dst;
154 static inline bool phi_get_used(const instruction *iptr) {
155 phi_assert_opc(iptr);
156 return phi_has_flag(iptr, PHI_FLAG_USED);
159 static void phi_set_used(instruction *iptr) {
160 phi_assert_opc(iptr);
161 if (! phi_has_flag(iptr, PHI_FLAG_USED)) {
162 phi_set_flag(iptr, PHI_FLAG_USED);
163 /* TODO recurse into arguments */
167 static instruction *phi_resolve_use(instruction *use) {
169 while (use->opc == ICMD_PHI) {
170 if (phi_has_subst(use)) {
171 use = phi_get_subst(use);
180 static inline void phi_set_arg(instruction *iptr, unsigned arg, instruction *value) {
181 phi_assert_opc(iptr);
182 phi_assert_arg(iptr, arg);
183 assert(value != NULL);
184 iptr->sx.s23.s2.iargs[arg] = value;
187 static inline bool phi_is_redundant(const instruction *iptr) {
189 phi_has_flag(iptr, PHI_FLAG_REDUNDANT_ONE) ||
190 phi_has_flag(iptr, PHI_FLAG_REDUNDANT_ALL)
194 static inline void phi_create_copy(instruction *iptr, unsigned arg, instruction *copy) {
195 phi_assert_opc(iptr);
196 phi_assert_arg(iptr, arg);
197 copy->dst.varindex = phi_get_dst(iptr);
198 copy->s1.varindex = phi_resolve_use(phi_get_arg(iptr, arg))->dst.varindex;
199 if (copy->dst.varindex == copy->s1.varindex || phi_is_redundant(iptr)) {
200 copy->opc = ICMD_NOP;
202 copy->opc = ICMD_COPY;
207 static void phi_print(const instruction *iptr) {
210 printf("%d = phi(", iptr->dst.varindex);
211 for (i = 0; i < iptr->s1.argcount; ++i) {
212 use = phi_resolve_use(iptr->sx.s23.s2.iargs[i]);
214 printf("%d, ", use->dst.varindex);
223 #define FOR_EACH_PHI_USE_CAST(iptr, it, cast) \
225 (it) = cast (iptr)->sx.s23.s2.iargs; \
226 (it) != cast (iptr)->sx.s23.s2.iargs + (iptr)->s1.argcount; \
230 #define FOR_EACH_PHI_USE(iptr, it) \
231 FOR_EACH_PHI_USE_CAST(iptr, it, )
233 #define FOR_EACH_PHI_USE_CONST(iptr, it) \
234 FOR_EACH_PHI_USE_CAST(iptr, it, (const instruction *))
236 static void phi_calculate_redundancy(instruction *iptr) {
238 s4 dst = iptr->dst.varindex;
242 unsigned num_different = 0;
243 instruction *different;
245 if (phi_is_redundant(iptr)) return; /* XXX */
247 /* x = phi(x, y, x, x) ... PHI_FLAG_REDUNDANT_ONE
248 x = phi(x, x, x, x) ... PHI_FLAG_REDUNDANT_ALL */
250 FOR_EACH_PHI_USE(iptr, ituse) {
251 iuse = phi_resolve_use(*ituse);
254 use = iuse->dst.varindex;
259 if (num_different >= 2) {
260 phi_clear_flag(iptr, PHI_FLAG_REDUNDANT_ONE);
261 phi_clear_flag(iptr, PHI_FLAG_REDUNDANT_ALL);
266 if (num_different == 1) {
267 /* Set the subst field of the instruction.
268 I.e. the instruction will be replaced by the value produced by y. */
269 iptr->sx.s23.s2.iargs[-1] = different;
271 phi_set_flag(iptr, PHI_FLAG_REDUNDANT_ONE);
272 phi_clear_flag(iptr, PHI_FLAG_REDUNDANT_ALL);
273 } else if (num_different == 0) {
275 /*iptr->sx.s23.s2.iargs[-1] = iptr->sx.s23.s2.iargs[0];*/
277 phi_clear_flag(iptr, PHI_FLAG_REDUNDANT_ONE);
278 phi_clear_flag(iptr, PHI_FLAG_REDUNDANT_ALL);
284 /*** goto *******************************************************************/
286 static inline void goto_init(instruction *iptr, basicblock *dst) {
287 iptr->opc = ICMD_GOTO;
288 iptr->dst.block = dst;
291 /*** instruction ***********************************************************/
293 static void instruction_get_uses(const instruction *iptr, s4 *buf, s4 **puses, unsigned *puses_count) {
294 unsigned uses_count = 0;
296 switch (icmd_table[iptr->opc].dataflow) {
299 buf[2] = iptr->sx.s23.s3.varindex;
304 buf[1] = iptr->sx.s23.s2.varindex;
311 buf[0] = iptr->s1.varindex;
314 *puses_count = uses_count;
322 *puses = iptr->sx.s23.s2.args;
323 *puses_count = iptr->s1.argcount;
333 static inline void instruction_set_uses(instruction *iptr, s4 *buf, s4 *uses, unsigned uses_count) {
335 switch (uses_count) {
337 iptr->sx.s23.s3.varindex = buf[2];
339 iptr->sx.s23.s2.varindex = buf[1];
341 iptr->s1.varindex = buf[0];
346 /*** vars *******************************************************************/
348 #define VARS_CATEGORY_SHIFT 28
349 #define VARS_INDEX_MASK 0x0FFFFFFF
351 #define VARS_CATEGORY_LOCAL 0
352 #define VARS_CATEGORY_STACK 1
353 #define VARS_CATEGORY_OTHERS 2
355 #define VAR_TYPE_SUBSTITUED 666
357 #define OLD_INDEX_UNUSED -2
365 vars_item_t items[9000]; /* XXX hardcoded max */
371 static inline unsigned vars_add_item(vars_t *vs, const varinfo *item) {
372 unsigned i = vs->count;
375 vs->items[i].v = *item;
376 vs->items[i].old_index = OLD_INDEX_UNUSED;
377 return (vs->category << VARS_CATEGORY_SHIFT) | i;
380 static inline unsigned vars_add(vars_t *vs) {
381 unsigned i = vs->count;
384 return (vs->category << VARS_CATEGORY_SHIFT) | i;
387 static inline varinfo *vars_back(vars_t *vs) {
388 assert(vs->count > 0);
389 return &(vs->items[vs->count - 1].v);
392 static inline void vars_init(vars_t *vs, unsigned category) {
393 vs->max = sizeof(vs->items) / sizeof(vs->items[0]);
395 assert((category & 0x3) == category);
396 vs->category = category;
399 static inline unsigned vars_get_category(unsigned varindex) {
400 return (varindex >> VARS_CATEGORY_SHIFT);
403 static inline unsigned vars_get_index(unsigned varindex) {
404 return (varindex & VARS_INDEX_MASK);
407 static void vars_subst(vars_t *vs, unsigned varindex, unsigned replacementindex) {
408 varindex = vars_get_index(varindex);
409 replacementindex = vars_get_index(replacementindex);
411 vs->items[varindex].v.type = VAR_TYPE_SUBSTITUED;
412 vs->items[varindex].v.vv.ii[1] = replacementindex;
415 static unsigned vars_resolve_subst(const vars_t *vs, unsigned varindex) {
417 unsigned loop_ctr = 0;
419 varindex = vars_get_index(varindex);
421 if (vs->items[varindex].v.type == VAR_TYPE_SUBSTITUED) /*fprintf(stderr, "*")*/;
423 while (vs->items[varindex].v.type == VAR_TYPE_SUBSTITUED) {
424 assert(loop_ctr++ != vs->count);
425 varindex = vs->items[varindex].v.vv.ii[1];
428 return (vs->category << VARS_CATEGORY_SHIFT) | varindex ;
431 static void vars_copy_to_final(vars_t *vs, varinfo *dst) {
432 const vars_item_t *it;
435 for (it = vs->items; it != vs->items + vs->count; ++it, ++dst) {
441 /* Eliminate VAR_TYPE_SUBSTITUED as it leads to problems. */
443 if (dst->type == VAR_TYPE_SUBSTITUED) {
444 subst = vars_get_index(vars_resolve_subst(vs, it - vs->items));
445 dst->type = vs->items[subst].v.type;
451 static void vars_import(vars_t *vs, varinfo *v, unsigned count, s4 old_index) {
454 assert((vs->count + count) <= vs->max);
456 it = vs->items + vs->count;
459 while (count-- > 0) {
461 it->old_index = old_index;
468 static inline void vars_record_old_index(vars_t *vs, unsigned varindex, s4 old_index) {
470 varindex = vars_get_index(varindex);
472 assert(varindex < vs->count);
474 item = vs->items + varindex;
477 item->old_index == OLD_INDEX_UNUSED ||
478 item->old_index == old_index
481 item->old_index = old_index;
484 static inline s4 vars_get_old_index(vars_t *vs, unsigned varindex) {
487 varindex = vars_get_index(varindex);
489 assert(varindex < vs->count);
490 old = vs->items[varindex].old_index;
491 assert(old != OLD_INDEX_UNUSED);
496 /*** phis *******************************************************************/
504 static inline void phis_init(phis_t *ps, unsigned max) {
507 ps->items = DMNEW(instruction, max);
510 static inline instruction *phis_add(phis_t *ps) {
511 unsigned i = ps->count;
514 return ps->items + i;
517 static inline instruction *phis_get(const phis_t *ps, unsigned i) {
518 assert(i < ps->count);
519 return ps->items + i;
522 static inline bool phis_contains(const phis_t *ps, const instruction *phi) {
523 return (ps->items <= phi) && (phi < (ps->items + ps->max));
526 #define FOR_EACH_PHI_FUNCTION_(ps, it) \
527 for ((it) = (ps)->items; (it) != (ps)->items + (ps)->count; ++(it)) \
529 #define FOR_EACH_PHI_FUNCTION(ps, it) \
530 FOR_EACH_PHI_FUNCTION_(ps, it) if (!phi_is_redundant((it)))
533 FIXME() inline void phis_print(const phis_t *ps) {
534 const instruction *iptr;
535 FOR_EACH_PHI_FUNCTION(ps, iptr) {
541 static inline unsigned phis_copy_to(const phis_t *ps, instruction *dst) {
543 instruction *out = dst;
545 FOR_EACH_PHI_FUNCTION(ps, it) {
552 /*** state_array ************************************************************/
559 static inline void state_array_init(state_array_t *sa, unsigned count) {
564 static inline bool state_array_has_items(const state_array_t *sa) {
565 return (sa->items != NULL);
568 static inline s4 state_array_get_var(const state_array_t *sa, unsigned index) {
569 assert(index < sa->count);
570 assert(sa->items[index]);
571 return sa->items[index]->dst.varindex;
574 static inline instruction *state_array_get(const state_array_t *sa, unsigned index) {
575 assert(index < sa->count);
576 return sa->items[index];
579 static inline void state_array_set(const state_array_t *sa, unsigned index, instruction *value) {
580 assert(index < sa->count);
581 sa->items[index] = value;
584 static inline void state_array_copy(state_array_t *sa, state_array_t *other) {
585 assert(sa->count == other->count);
586 MCOPY(sa->items, other->items, instruction *, sa->count);
589 #define state_array_assert_items(sa) assert(state_array_has_items(sa) || (sa->count == 0))
590 #define state_array_assert_no_items(sa) assert(! state_array_has_items(sa))
592 static inline void state_array_allocate_items(state_array_t *sa) {
593 sa->items = DMNEW(instruction *, sa->count);
594 MZERO(sa->items, instruction *, sa->count);
597 /*** basicblock_chain *******************************************************/
602 } basicblock_chain_t;
604 static void basicblock_chain_init(basicblock_chain_t *bbc) {
609 #define basicblock_chain_clear basicblock_chain_init
611 static void basicblock_chain_add(basicblock_chain_t *bbc, basicblock *bb) {
612 if (bbc->first == NULL) {
613 assert(bbc->last == NULL);
614 assert(bb->next == NULL);
618 assert(bbc->last->next == NULL);
619 bbc->last->next = bb;
624 static inline basicblock *basicblock_chain_front(basicblock_chain_t *bbc) {
629 static inline basicblock *basicblock_chain_back(basicblock_chain_t *bbc) {
634 static inline bool basicblock_chain_empty(const basicblock_chain_t *bbc) {
635 return bbc->first == NULL;
638 /*** exception_entry_chain ***************************************************/
641 exception_entry *first;
642 exception_entry *last;
643 } exception_entry_chain_t;
645 static void exception_entry_chain_init(exception_entry_chain_t *eec) {
650 #define exception_entry_chain_clear exception_entry_chain_init
652 static void exception_entry_chain_add(exception_entry_chain_t *eec, exception_entry *ee) {
653 if (eec->first == NULL) {
657 eec->last->next = ee;
658 eec->last->down = ee;
663 static inline bool exception_entry_chain_empty(const exception_entry_chain_t *eec) {
664 return eec->first == NULL;
667 static inline exception_entry *exception_entry_chain_back(exception_entry_chain_t *eec) {
672 static inline exception_entry *exception_entry_chain_front(exception_entry_chain_t *eec) {
677 /*** traversal **************************************************************/
680 unsigned (*var_num_to_index)(void *vp, s4 var);
681 varinfo *(*index_to_initial_var)(void *vp, unsigned index);
682 varinfo *(*var_num_to_varinfo)(void *vp, s4 var);
683 unsigned (*variables_count)(void *vp);
688 state_array_t *state_array;
690 traversal_ops_t *ops;
693 /*** basicblock_info ********************************************************/
695 typedef struct basicblock_info {
699 unsigned backward_branches;
704 basicblock_chain_t *subbasicblocks;
706 unsigned complete_predecessors;
708 #if defined(SSA_VERIFY)
709 unsigned num_phi_elimination;
714 /*** traversal **************************************************************/
716 void traversal_init(traversal_t *t, unsigned count, void *ops_vp, traversal_ops_t *ops) {
717 t->phis = DNEW(phis_t);
718 phis_init(t->phis, count);
720 t->state_array = DNEW(state_array_t);
721 state_array_init(t->state_array, count);
727 instruction *traversal_create_phi(traversal_t *t, vars_t *v, unsigned argcount, s4 index) {
728 instruction *phi = phis_add(t->phis);
731 phi_init(phi, argcount, index);
732 dst = vars_add_item(v, t->ops->index_to_initial_var(t->ops_vp, index));
733 phi_set_dst(phi, dst);
735 state_array_set(t->state_array, index, phi);
737 vars_record_old_index(v, phi->dst.varindex, index);
742 static void traversal_rename_def(traversal_t *t, vars_t *vars, instruction *iptr) {
747 state_array_assert_items(t->state_array);
749 v = t->ops->var_num_to_varinfo(t->ops_vp, iptr->dst.varindex);
750 index = t->ops->var_num_to_index(t->ops_vp, iptr->dst.varindex);
752 old = iptr->dst.varindex;
753 iptr->dst.varindex = vars_add_item(vars, v);
754 state_array_set(t->state_array, index, iptr);
756 vars_record_old_index(vars, iptr->dst.varindex, index);
759 static void traversal_rename_use(traversal_t *t, vars_t *vars, s4 *puse) {
763 state_array_assert_items(t->state_array);
765 index = t->ops->var_num_to_index(t->ops_vp, *puse);
767 assert(state_array_get(t->state_array, index));
769 *puse = state_array_get_var(t->state_array, index);
771 vars_record_old_index(vars, *puse, index);
774 static inline unsigned traversal_variables_count(traversal_t *t) {
775 return t->ops->variables_count(t->ops_vp);
778 unsigned local_var_num_to_index(void *vp, s4 var) {
779 return (unsigned)var;
782 varinfo *local_index_to_initial_var(void *vp, unsigned index) {
783 jitdata *jd = (jitdata *)vp;
784 return jd->var + index;
787 varinfo *local_var_num_to_varinfo(void *vp, s4 var) {
788 jitdata *jd = (jitdata *)vp;
789 return jd->var + var;
792 unsigned local_variables_count(void *vp) {
793 jitdata *jd = (jitdata *)vp;
794 return jd->localcount;
797 traversal_ops_t traversal_local_ops = {
798 local_var_num_to_index,
799 local_index_to_initial_var,
800 local_var_num_to_varinfo,
801 local_variables_count
804 unsigned inout_var_num_to_index(void *vp, s4 var) {
805 basicblock *bb = (basicblock *)vp;
808 for (i = 0; i < bb->indepth; ++i) {
809 if (bb->invars[i] == var) {
814 for (i = 0; i < bb->outdepth; ++i) {
815 if (bb->outvars[i] == var) {
823 varinfo *inout_index_to_initial_var(void *vp, unsigned index) {
824 basicblock *bb = (basicblock *)vp;
825 jitdata *jd = (jitdata *)(((basicblock_info_t *)bb->vp)->locals->ops_vp); /* evil hack */
826 assert(index < bb->indepth);
827 return jd->var + bb->invars[index];
830 varinfo *inout_var_num_to_varinfo(void *vp, s4 var) {
831 basicblock *bb = (basicblock *)vp;
832 jitdata *jd = (jitdata *)(((basicblock_info_t *)bb->vp)->locals->ops_vp); /* evil hack */
833 return jd->var + var;
836 unsigned inout_variables_count(void *vp) {
837 basicblock *bb = (basicblock *)vp;
841 traversal_ops_t traversal_inout_ops = {
842 inout_var_num_to_index,
843 inout_index_to_initial_var,
844 inout_var_num_to_varinfo,
845 inout_variables_count
848 /*** basicblock_info ********************************************************/
850 void basicblock_info_init(basicblock_info_t *bbi, basicblock *bb, jitdata *jd) {
851 MZERO(bbi, basicblock_info_t, 1);
853 bbi->locals = DNEW(traversal_t);
854 traversal_init(bbi->locals, jd->localcount, jd, &traversal_local_ops);
856 bbi->stack = DNEW(traversal_t);
857 traversal_init(bbi->stack, jd->maxinterfaces, bb, &traversal_inout_ops);
859 bbi->subbasicblocks = DNEW(basicblock_chain_t);
860 basicblock_chain_init(bbi->subbasicblocks);
863 /*** basicblock *************************************************************/
865 static inline basicblock_info_t *basicblock_info(basicblock *bb) {
866 return (basicblock_info_t *)(bb->vp);
869 #define bb_info basicblock_info
871 static unsigned basicblock_get_predecessor_count(basicblock *bb) {
875 ret = bb->predecessorcount;
877 FOR_EACH_EXPREDECESSOR(bb, itpred) {
878 ret += (*itpred)->exouts;
884 static unsigned basicblock_get_predecessor_index(basicblock *from, basicblock *to) {
888 FOR_EACH_PREDECESSOR(to, itpred) {
889 if (*itpred == from) break;
893 assert(j != to->predecessorcount);
898 static unsigned basicblock_get_ex_predecessor_index(basicblock *from, unsigned pei, basicblock *to) {
902 j = to->predecessorcount;
904 FOR_EACH_EXPREDECESSOR(to, itpred) {
905 if ((*itpred)->nr == from->nr) {
909 j += (*itpred)->exouts;
916 /*** ssa_info ***************************************************************/
918 typedef struct ssa_info {
927 basicblock_chain_t *new_blocks;
939 unsigned keep_in_out:1;
941 } ssa_info, ssa_info_t;
943 void ssa_info_init(ssa_info_t *ssa, jitdata *jd) {
946 ssa->locals = DNEW(vars_t);
947 vars_init(ssa->locals, VARS_CATEGORY_LOCAL);
949 /* Initialize first version of locals, that is always available. */
950 vars_import(ssa->locals, jd->var, jd->localcount, 0);
952 ssa->stack = DNEW(vars_t);
953 vars_init(ssa->stack, VARS_CATEGORY_STACK);
955 ssa->others = DNEW(vars_t);
956 vars_init(ssa->others, VARS_CATEGORY_OTHERS);
958 ssa->new_blocks = DNEW(basicblock_chain_t);
959 basicblock_chain_init(ssa->new_blocks);
961 ssa->original.maxlocals = jd->maxlocals;
962 ssa->original.maxinterfaces = jd->maxinterfaces;
963 ssa->original.local_map = jd->local_map;
964 ssa->original.var = jd->var;
965 ssa->original.vartop = jd->vartop;
966 ssa->original.varcount = jd->varcount;
967 ssa->original.localcount = jd->localcount;
969 ssa->keep_in_out = false;
972 /*** others_mapping *********************************************************/
974 static inline void others_mapping_set(ssa_info *ssa, s4 var, s4 new_var) {
975 ssa->jd->var[var].vv.ii[1] = new_var;
978 static inline s4 others_mapping_get(const ssa_info *ssa, s4 var) {
979 return ssa->jd->var[var].vv.ii[1];
982 /*** code *******************************************************************/
984 void fix_exception_handlers(jitdata *jd) {
986 basicblock *exh = NULL;
992 basicblock_chain_t chain;
993 basicblock *last = NULL;
994 basicblock *marker = NULL;
998 if (jd->exceptiontablelength == 0) {
1002 basicblock_chain_init(&chain);
1004 /* Remember, where we started adding IO variables. */
1006 vartop = jd->vartop;
1008 /* For each exception handler block, create one block with a prologue block */
1010 FOR_EACH_BASICBLOCK(jd, bptr) {
1011 if (bptr->type == BBTYPE_EXH) {
1015 +---- EXH (exh)-------+
1018 +---------------------+
1020 === TRANSFORMED TO ===>
1022 +---- PROL (exh) -------+
1024 | GETEXECEPTION => OU3 |
1027 +-----------------------+
1029 +---- REAL_EXH (std) -+
1032 +---------------------+
1036 bptr->type = BBTYPE_STD;
1037 bptr->predecessorcount = 0; /* legacy */
1039 /* Create basicblock with 2 instructions */
1041 exh = DNEW(basicblock);
1042 MZERO(exh, basicblock, 1);
1044 iptr = DMNEW(instruction, 2);
1045 MZERO(iptr, instruction, 2);
1047 /* Create outvars */
1049 exh->outdepth = bptr->indepth;
1051 if (exh->outdepth > 0) {
1052 exh->outvars = DMNEW(s4, exh->outdepth);
1053 for (i = 0; i < exh->outdepth; ++i) {
1054 exh->outvars[i] = vartop++;
1060 exh->indepth = exh->outdepth - 1;
1061 exh->invars = exh->outvars;
1064 /* Create new outvar */
1066 assert(jd->vartop < jd->varcount);
1069 jd->var[v].type = TYPE_ADR;
1070 jd->var[v].flags = INOUT;
1073 exh->nr = jd->basicblockcount;
1074 jd->basicblockcount += 1;
1076 exh->type = BBTYPE_EXH;
1081 exh->outvars = DNEW(s4);
1082 exh->outvars[0] = v;
1084 exh->predecessorcount = -1; /* legacy */
1085 exh->flags = BBFINISHED;
1086 exh->method = jd->m;
1088 basicblock_chain_add(&chain, exh);
1092 iptr->opc = ICMD_GETEXCEPTION;
1093 iptr->dst.varindex = exh->outvars[exh->outdepth - 1];
1096 /* Goto real exception handler */
1098 goto_init(iptr, bptr);
1105 if (bptr->next == NULL) {
1112 /* We need to allocate the new iovars in the var array */
1114 avail_vars = (jd->varcount - jd->vartop);
1115 add_vars = (vartop - jd->vartop);
1117 if (add_vars > avail_vars) {
1118 add_vars -= avail_vars;
1119 jd->var = DMREALLOC(jd->var, varinfo, jd->varcount, jd->varcount + add_vars);
1120 jd->varcount += add_vars;
1123 jd->vartop = vartop;
1125 /* Put the chain of exception handlers between just before the last
1126 basic block (end marker). */
1128 if (! basicblock_chain_empty(&chain)) {
1129 marker->nr = jd->basicblockcount;
1130 basicblock_chain_back(&chain)->next = marker;
1131 last->next = basicblock_chain_front(&chain);
1133 assert(last->nr + 1 == basicblock_chain_front(&chain)->nr);
1134 assert(marker->nr == jd->basicblockcount);
1137 /* Replace all exception handlers in exception table with their respective
1140 for (ee = jd->exceptiontable; ee; ee = ee->down) {
1141 assert(ee->handler->vp);
1144 exh = (basicblock *)ee->handler->vp;
1148 /* Set up IO variables in newly craeted exception handlers. */
1150 for (i = 0; i < exh->outdepth; ++i) {
1151 v = exh->outvars[i];
1153 jd->var[v].flags = INOUT;
1154 jd->var[v].type = jd->var[ bptr->invars[i] ].type;
1160 void unfix_exception_handlers(jitdata *jd) {
1161 basicblock *bptr, *exh;
1163 exception_entry *ee;
1164 #if !defined(NDEBUG)
1168 FOR_EACH_BASICBLOCK(jd, bptr) {
1169 if (bptr->type == BBTYPE_EXH) {
1170 assert(bptr->iinstr[1].opc == ICMD_GOTO);
1171 exh = bptr->iinstr[1].dst.block;
1173 bptr->type = BBDELETED;
1177 exh->type = BBTYPE_EXH;
1180 /* bptr is no more a predecessor of exh */
1182 for (i = 0; i < exh->predecessorcount; ++i) {
1183 if (exh->predecessors[i] == bptr) {
1184 exh->predecessors[i] = exh->predecessors[exh->predecessorcount - 1];
1185 exh->predecessorcount -= 1;
1186 #if !defined(NDEBUG)
1200 for (ee = jd->exceptiontable; ee; ee = ee->down) {
1201 assert(ee->handler->vp);
1202 ee->handler = ee->handler->vp;
1206 /*** ssa_enter ***************************************************************/
1208 static void ssa_enter_mark_loops_intern(basicblock *bb, unsigned num_branches) {
1209 basicblock_info_t *bbi = bb_info(bb);
1210 basicblock **itsucc;
1212 if (! bbi->visited) {
1213 bbi->visited = true;
1215 FOR_EACH_SUCCESSOR(bb, itsucc) {
1216 /* There is a single branch from bb into the successor. */
1217 ssa_enter_mark_loops_intern(*itsucc, 1);
1219 FOR_EACH_EXHANDLER(bb, itsucc) {
1220 /* For exception handler type successors,
1221 we count a branch into the exception handler from every PEI. */
1222 ssa_enter_mark_loops_intern(*itsucc, bb->exouts);
1224 bbi->active = false;
1225 } else if (bbi->active) {
1226 bbi->backward_branches += num_branches;
1230 static inline void ssa_enter_mark_loops(basicblock *bb) {
1231 ssa_enter_mark_loops_intern(bb, 1);
1234 static void ssa_enter_merge(
1238 unsigned predecessor_index,
1242 basicblock_info_t *dsti = bb_info(bdst);
1243 unsigned predecessor_count = basicblock_get_predecessor_count(bdst);
1248 /* We are merging for the first time into this block. */
1250 if (! state_array_has_items(dst->state_array)) {
1252 state_array_allocate_items(dst->state_array);
1254 if (dsti->backward_branches > 0) {
1255 /* Loop header, create phi functions for all variables. */
1256 for (i = 0; i < traversal_variables_count(dst); ++i) {
1257 phi = traversal_create_phi(dst, vdst, predecessor_count, i);
1258 /* No need to init, they will only be filled in later. */
1261 state_array_copy(dst->state_array, src->state_array);
1266 /* We are merging another block. */
1268 /* Process every variable. */
1270 for (i = 0; i < traversal_variables_count(dst); ++i) {
1271 if (dsti->traversed) {
1273 /* Back edge, all phi functions are already created.
1274 We only need to set their arguments. */
1277 phis_get(dst->phis, i),
1279 state_array_get(src->state_array, i)
1282 } else if (state_array_get(dst->state_array, i) != state_array_get(src->state_array, i)) {
1284 /* A different definition of this var reaches the block.
1285 We need to create a phi function. */
1287 if (phis_contains(dst->phis, state_array_get(dst->state_array, i))) {
1288 /* There is already a phi function created for this var.
1289 No need to create one. */
1291 /* Create a new phi function.
1292 Set all arguments to old value in state array. */
1293 old = state_array_get(dst->state_array, i);
1294 phi = traversal_create_phi(dst, vdst, predecessor_count, i);
1295 phi_set_all_args(phi, old);
1298 /* Set argument of phi function. */
1301 state_array_get(dst->state_array, i),
1303 state_array_get(src->state_array, i)
1309 static void ssa_enter_process_block(ssa_info *ssa, basicblock *bb);
1310 static bool ssa_enter_eliminate_redundant_phis(traversal_t *t, vars_t *vs, basicblock_info_t *bbi);
1312 #if defined(SSA_VERIFY)
1313 static void ssa_enter_verify_no_redundant_phis(ssa_info_t *ssa) {
1315 basicblock_info_t *bbi;
1321 FOR_EACH_BASICBLOCK(ssa->jd, bptr) {
1322 if (basicblock_reached(bptr)) {
1323 bbi = bb_info(bptr);
1324 FOR_EACH_PHI_FUNCTION(bbi->locals->phis, itph) {
1325 if (! phi_is_redundant(itph)) {
1326 phi_calculate_redundancy(itph);
1327 assert(! phi_is_redundant(itph));
1330 FOR_EACH_PHI_FUNCTION(bbi->stack->phis, itph) {
1331 if (! phi_is_redundant(itph)) {
1332 phi_calculate_redundancy(itph);
1333 assert(! phi_is_redundant(itph));
1341 static void ssa_enter_traverse(ssa_info_t *ssa, basicblock *bb) {
1342 basicblock **itsucc;
1343 basicblock_info_t *succi;
1344 basicblock_info_t *bbi = bb_info(bb);
1345 unsigned predecessor_count;
1349 ssa_enter_process_block(ssa, bb);
1353 FOR_EACH_SUCCESSOR(bb, itsucc) {
1355 succi = bb_info(*itsucc);
1361 basicblock_get_predecessor_index(bb, *itsucc),
1369 basicblock_get_predecessor_index(bb, *itsucc),
1373 succi->complete_predecessors += 1;
1375 predecessor_count = basicblock_get_predecessor_count(*itsucc);
1378 succi->complete_predecessors ==
1379 (predecessor_count - succi->backward_branches)
1381 ssa_enter_traverse(ssa, *itsucc);
1385 (succi->complete_predecessors == predecessor_count) &&
1386 (succi->backward_branches > 0)
1388 #if defined(SSA_VERIFY)
1389 assert(succi->num_phi_elimination < 2);
1390 succi->num_phi_elimination += 1;
1392 ssa_enter_eliminate_redundant_phis(succi->locals, ssa->locals, succi);
1393 ssa_enter_eliminate_redundant_phis(succi->stack, ssa->stack, succi);
1397 FOR_EACH_EXHANDLER(bb, itsucc) {
1399 succi = bb_info(*itsucc);
1401 succi->complete_predecessors += bb->exouts; /* XXX this might be 0 */
1403 predecessor_count = basicblock_get_predecessor_count(*itsucc);
1406 succi->complete_predecessors ==
1407 (predecessor_count - succi->backward_branches)
1409 ssa_enter_traverse(ssa, *itsucc);
1413 (succi->complete_predecessors == predecessor_count) &&
1414 (succi->backward_branches > 0)
1416 #if defined(SSA_VERIFY)
1417 assert(succi->num_phi_elimination < 2);
1418 succi->num_phi_elimination += 1;
1420 ssa_enter_eliminate_redundant_phis(succi->locals, ssa->locals, succi);
1421 ssa_enter_eliminate_redundant_phis(succi->stack, ssa->stack, succi);
1428 static void ssa_enter_process_pei(ssa_info *ssa, basicblock *bb, unsigned pei) {
1429 basicblock_info_t *bbi = bb_info(bb);
1430 basicblock_info_t *succi;
1431 basicblock **itsucc;
1433 FOR_EACH_EXHANDLER(bb, itsucc) {
1434 succi = bb_info(*itsucc);
1440 basicblock_get_ex_predecessor_index(bb, pei, *itsucc),
1448 basicblock_get_ex_predecessor_index(bb, pei, *itsucc),
1454 static FIXME(bool) ssa_enter_eliminate_redundant_phis(traversal_t *t, vars_t *vs, basicblock_info_t *FIXME(bbi)) {
1462 FOR_EACH_PHI_FUNCTION(t->phis, itph) {
1464 phi_calculate_redundancy(itph);
1466 /* If the phi function is redundant,
1467 make the variable it defines point to the value defined by the substituing
1470 if (phi_is_redundant(itph)) {
1471 vars_subst(vs, itph->dst.varindex, phi_get_subst(itph)->dst.varindex);
1472 assert(bbi->backward_branches > 0);
1481 static void ssa_enter_post_eliminate_redundand_phi(
1487 phi_calculate_redundancy(phi);
1488 phi_set_flag(PHI_FLAG_REDUNDANCY_CHECKED);
1490 /* if redundancy changed and phi function escapes block */
1492 /* for each successor */
1495 static void ssa_enter_post_eliminate_redundant_phis(ssa_info_t *ssa) {
1497 basicblock_info_t *bbi;
1500 FOR_EACH_BASICBLOCK(ssa->jd, bptr) {
1501 bbi = bb_info(bptr);
1503 if (bbi->backward_branches > 0) {
1504 /* Redundant phi functions have the left hand side as operand.
1505 This can happen by definition only in loop headers. */
1507 FOR_EACH_PHI_FUNCTION(bbi->locals, itph) {
1508 if (! phi_has_flag(PHI_FLAG_REDUNDANCY_CHECKED)) {
1509 /* Calculate redundancy? */
1511 /* If yes recurse? */
1515 FOR_EACH_PHI_FUNCTION(bbi->stack, itph) {
1522 static void ssa_enter_init_locals(state_array_t *sa) {
1526 for (i = 0; i < sa->count; ++i) {
1527 iptr = DNEW(instruction);
1528 iptr->opc = ICMD_NOP;
1529 iptr->dst.varindex = i;
1530 state_array_set(sa, i, iptr);
1534 static void ssa_enter_process_block(ssa_info *ssa, basicblock *bb) {
1535 basicblock_info_t *bbi = bb_info(bb);
1540 unsigned uses_count;
1543 /* Every basic block can be traversed only once. */
1545 assert(! bbi->traversed);
1546 bbi->traversed = true;
1548 /* The root basicblock needs special treatment. */
1550 if (bb->predecessorcount == 0 && bb->type != BBTYPE_EXH) {
1551 state_array_assert_no_items(bbi->locals->state_array);
1552 state_array_allocate_items(bbi->locals->state_array);
1553 ssa_enter_init_locals(bbi->locals->state_array);
1555 state_array_assert_no_items(bbi->stack->state_array);
1556 state_array_allocate_items(bbi->stack->state_array);
1560 /* Exception handlers have a clean stack. */
1562 /* Not true with inlining. */
1564 if (bb->type == BBTYPE_EXH) {
1565 state_array_assert_no_items(bbi->stack->state_array);
1566 state_array_allocate_items(bbi->stack->state_array);
1570 /* Some in/out vars get marked as INOUT in simplereg,
1571 and are not marked at this point.
1572 Mark them manually. */
1574 for (ituse = bb->invars; ituse != bb->invars + bb->indepth; ++ituse) {
1575 if (ssa->keep_in_out && ssa->jd->var[*ituse].type == TYPE_RET) {
1578 ssa->jd->var[*ituse].flags |= INOUT;
1579 ssa->jd->var[*ituse].flags &= ~PREALLOC;
1582 for (ituse = bb->outvars; ituse != bb->outvars + bb->outdepth; ++ituse) {
1583 if (ssa->keep_in_out && ssa->jd->var[*ituse].type == TYPE_RET) {
1586 ssa->jd->var[*ituse].flags |= INOUT;
1587 ssa->jd->var[*ituse].flags &= ~PREALLOC;
1590 /* Process instructions */
1592 state_array_assert_items(bbi->locals->state_array);
1594 FOR_EACH_INSTRUCTION(bb, iptr) {
1596 #if defined(ELIMINATE_NOP_LOAD_STORE)
1598 /* Kill NOP instructions of the form:
1601 As they create a lot of unnecessary definitions.
1602 For performance, definitely eliminate them. However, keeping them is a
1607 (icmd_table[iptr->opc].dataflow == DF_LOAD) ||
1608 (icmd_table[iptr->opc].dataflow == DF_STORE)
1610 if (iptr->dst.varindex == iptr->s1.varindex) {
1611 iptr->opc = ICMD_NOP;
1617 if (icmd_table[iptr->opc].flags & ICMDTABLE_PEI) {
1618 ssa_enter_process_pei(ssa, bb, pei++);
1621 instruction_get_uses(iptr, ssa->s_buf, &uses, &uses_count);
1623 for (ituse = uses; ituse != uses + uses_count; ++ituse) {
1624 if (var_is_local(ssa->jd, *ituse)) {
1625 traversal_rename_use(
1630 } else if (var_is_inout(ssa->jd, *ituse)) {
1631 traversal_rename_use(
1637 *ituse = others_mapping_get(ssa, *ituse);
1641 instruction_set_uses(iptr, ssa->s_buf, uses, uses_count);
1643 if (instruction_has_dst(iptr)) {
1644 if (var_is_local(ssa->jd, iptr->dst.varindex)) {
1645 traversal_rename_def(
1650 } else if (var_is_inout(ssa->jd, iptr->dst.varindex)) {
1651 traversal_rename_def(
1657 old = iptr->dst.varindex;
1658 iptr->dst.varindex = vars_add_item(
1660 ssa->jd->var + iptr->dst.varindex
1662 vars_record_old_index(ssa->others, iptr->dst.varindex, old);
1663 others_mapping_set(ssa, old, iptr->dst.varindex);
1671 [locals.........................][interaces][others]
1672 [original locals][version > 1 ]
1673 <--------------- new locals --------------->
1676 static void ssa_enter_export_variables(ssa_info *ssa) {
1681 jitdata *jd = ssa->jd;
1684 vartop = ssa->locals->count + ssa->stack->count + ssa->others->count;
1685 vars = DMNEW(varinfo, vartop);
1687 vars_copy_to_final(ssa->locals, vars);
1688 vars_copy_to_final(ssa->stack, vars + ssa->locals->count);
1689 vars_copy_to_final(ssa->others, vars + ssa->locals->count + ssa->stack->count);
1692 jd->vartop = jd->varcount = vartop;
1694 /* Grow local map to accomodate all new locals and iovars.
1695 But keep the local map for version 1 of locals, that contains the holes. */
1699 5 * (jd->maxlocals + ssa->locals->count + ssa->stack->count - jd->localcount)
1702 MCOPY(local_map, jd->local_map, s4, 5 * jd->maxlocals);
1704 jd->local_map = local_map;
1706 it = jd->local_map + (jd->maxlocals * 5); /* start adding entries here */
1708 /* Add version > 1 of all locals */
1710 for (i = jd->localcount; i < ssa->locals->count; ++i) {
1711 for (j = 0; j < 5; ++j) {
1712 if (jd->var[i].type != j) {
1721 /* Add all io vars. */
1723 for (i = ssa->locals->count; i < ssa->locals->count + ssa->stack->count; ++i) {
1724 for (j = 0; j < 5; ++j) {
1725 if (jd->var[i].type != j) {
1736 jd->maxlocals += (ssa->locals->count + ssa->stack->count - jd->localcount);
1737 jd->localcount = ssa->locals->count + ssa->stack->count;
1739 /* Eliminate interfaces. */
1741 jd->maxinterfaces = 0;
1745 static void ssa_enter_export_phis(ssa_info_t *ssa) {
1747 basicblock_info_t *bbi;
1750 FOR_EACH_BASICBLOCK(ssa->jd, bptr) {
1751 bbi = bb_info(bptr);
1753 bptr->phis = DMNEW(instruction, bbi->locals->phis->count + bbi->stack->phis->count);
1757 dst += phis_copy_to(bbi->locals->phis, dst);
1759 dst += phis_copy_to(bbi->stack->phis, dst);
1761 bptr->phicount = dst - bptr->phis;
1767 static inline void ssa_enter_eliminate_category(ssa_info_t *ssa, s4 *pvar) {
1768 switch (vars_get_category(*pvar)) {
1769 case VARS_CATEGORY_LOCAL:
1770 *pvar = vars_get_index(vars_resolve_subst(ssa->locals, *pvar));
1772 case VARS_CATEGORY_STACK:
1773 *pvar = vars_get_index(vars_resolve_subst(ssa->stack, *pvar)) + ssa->locals->count;
1775 case VARS_CATEGORY_OTHERS:
1776 *pvar = vars_get_index(*pvar) + ssa->locals->count + ssa->stack->count;
1782 void ssa_enter_eliminate_categories(ssa_info_t *ssa) {
1786 unsigned uses_count;
1787 basicblock_info_t *bbi;
1790 FOR_EACH_BASICBLOCK(ssa->jd, bb) {
1794 if (! ssa->keep_in_out) {
1800 FOR_EACH_PHI_FUNCTION(bbi->locals->phis, itph) {
1801 ssa_enter_eliminate_category(ssa, &(itph->dst.varindex));
1804 FOR_EACH_PHI_FUNCTION(bbi->stack->phis, itph) {
1805 ssa_enter_eliminate_category(ssa, &(itph->dst.varindex));
1809 FOR_EACH_INSTRUCTION(bb, iptr) {
1810 if (instruction_has_dst(iptr)) {
1811 ssa_enter_eliminate_category(ssa, &(iptr->dst.varindex));
1813 instruction_get_uses(iptr, ssa->s_buf, &uses, &uses_count);
1814 for (ituse = uses; ituse != uses + uses_count; ++ituse) {
1815 ssa_enter_eliminate_category(ssa, ituse);
1817 instruction_set_uses(iptr, ssa->s_buf, uses, uses_count);
1822 static void ssa_enter_create_phi_graph(ssa_info *ssa) {
1824 basicblock_info_t *bbi;
1826 instruction **ituse;
1828 char path[PATH_MAX], *ppath;
1831 snprintf(path, PATH_MAX, "|tmp|graphs|%s.%s.dot", ssa->jd->m->clazz->name->text, ssa->jd->m->name->text);
1832 for (ppath = path; *ppath; ++ppath) {
1833 if (*ppath == '|') *ppath = '/';
1834 else if (*ppath == '/') *ppath = '.';
1837 f = fopen(path, "w");
1839 if (f == NULL) return;
1841 fprintf(f, "digraph G {\n");
1843 FOR_EACH_BASICBLOCK(ssa->jd, bptr) {
1844 bbi = bb_info(bptr);
1846 FOR_EACH_PHI_FUNCTION(bbi->locals->phis, itph) {
1848 FOR_EACH_PHI_USE(itph, ituse) {
1849 if ((*ituse)->opc == ICMD_PHI) {
1850 fprintf(f, "%d -> %d;\n", (*ituse)->dst.varindex, itph->dst.varindex);
1863 static basicblock *ssa_leave_create_transition_block_intern(
1867 unsigned predecessor_index,
1868 unsigned reserved_insns
1873 basicblock_info_t *toi;
1877 /* Create basicblock and instruction array. */
1879 bb = DNEW(basicblock);
1880 MZERO(bb, basicblock, 1);
1882 bb->nr = ssa->jd->basicblockcount;
1883 ssa->jd->basicblockcount += 1;
1885 bb->method = ssa->jd->m;
1886 bb->type = BBTYPE_STD;
1889 toi->locals->phis->count +
1890 toi->stack->phis->count +
1892 bb->iinstr = DMNEW(instruction, bb->icount);
1893 MZERO(bb->iinstr, instruction, bb->icount);
1895 /* Populate instruction array. */
1897 iptr = bb->iinstr + reserved_insns;
1899 /* Add phi moves. */
1901 FOR_EACH_PHI_FUNCTION(toi->locals->phis, itph) {
1902 phi_create_copy(itph, predecessor_index, iptr++);
1905 FOR_EACH_PHI_FUNCTION(toi->stack->phis, itph) {
1906 phi_create_copy(itph, predecessor_index, iptr++);
1909 /* Add goto to real block. */
1911 goto_init(iptr, to);
1913 /* Add basicblock to chain of newly created basicblocks. */
1915 basicblock_chain_add(ssa->new_blocks, bb);
1920 static inline basicblock *ssa_leave_create_transition_block(
1925 return ssa_leave_create_transition_block_intern(
1929 basicblock_get_predecessor_index(from, to),
1934 static void ssa_leave_create_fallthrough(ssa_info *ssa, basicblock *bptr) {
1935 unsigned predecessor_index;
1936 basicblock_info_t *toi;
1941 if (bptr->next == NULL) {
1942 /* No fallthrough. */
1946 predecessor_index = basicblock_get_predecessor_index(bptr, bptr->next);
1947 toi = bb_info(bptr->next);
1951 /* Resize instruction array to accomodate all phi moves. */
1953 icount = bptr->icount + toi->locals->phis->count + toi->stack->phis->count;
1955 bptr->iinstr = DMREALLOC(
1962 iptr = bptr->iinstr + bptr->icount;
1963 bptr->icount = icount;
1965 /* Create phi moves. */
1967 FOR_EACH_PHI_FUNCTION(toi->locals->phis, itph) {
1968 phi_create_copy(itph, predecessor_index, iptr++);
1971 FOR_EACH_PHI_FUNCTION(toi->stack->phis, itph) {
1972 phi_create_copy(itph, predecessor_index, iptr++);
1976 static void ssa_leave_create_phi_moves(ssa_info *ssa) {
1979 basicblock *last = NULL;
1982 branch_target_t *table;
1983 lookup_target_t *lookup;
1984 bool has_fallthrough;
1986 FOR_EACH_BASICBLOCK(ssa->jd, bptr) {
1988 if (bptr->next == NULL) {
1992 if (bptr->vp == NULL) {
1996 if (! basicblock_reached(bptr)) {
2000 has_fallthrough = true;
2002 for (iptr = bptr->iinstr; iptr != bptr->iinstr + bptr->icount; ++iptr) {
2003 switch (icmd_table[iptr->opc].controlflow) {
2008 ssa_leave_create_transition_block(ssa, bptr, iptr->dst.block);
2011 table = iptr->dst.table;
2012 l = iptr->sx.s23.s2.tablelow;
2013 i = iptr->sx.s23.s3.tablehigh;
2015 i += 1; /* default */
2018 ssa_leave_create_transition_block(ssa, bptr, table->block);
2023 lookup = iptr->dst.lookup;
2024 i = iptr->sx.s23.s2.lookupcount;
2026 lookup->target.block =
2027 ssa_leave_create_transition_block(ssa, bptr, lookup->target.block);
2030 iptr->sx.s23.s3.lookupdefault.block =
2031 ssa_leave_create_transition_block(ssa, bptr, iptr->sx.s23.s3.lookupdefault.block);
2034 iptr->sx.s23.s3.jsrtarget.block =
2035 ssa_leave_create_transition_block(ssa, bptr, iptr->sx.s23.s3.jsrtarget.block);
2040 (iptr->opc == ICMD_GOTO) ||
2041 (iptr->opc == ICMD_JSR) ||
2042 (iptr->opc == ICMD_RET) ||
2043 icmd_table[iptr->opc].controlflow == CF_END ||
2044 (iptr->opc == ICMD_TABLESWITCH) ||
2045 (iptr->opc == ICMD_LOOKUPSWITCH)
2047 has_fallthrough = false;
2048 } else if (iptr->opc != ICMD_NOP) {
2049 has_fallthrough = true;
2054 if (bptr->next == NULL) {
2058 if (! basicblock_reached(bptr->next)) {
2062 if (has_fallthrough) {
2063 ssa_leave_create_fallthrough(ssa, bptr);
2067 /* Add chain of new basic blocks */
2069 if (last != NULL && ! basicblock_chain_empty(ssa->new_blocks)) {
2070 last->next = basicblock_chain_front(ssa->new_blocks);
2075 static basicblock *ssa_leave_split_basicblock_at(ssa_info *ssa, basicblock *bptr, instruction *iptr) {
2077 basicblock_info_t *bbi = bb_info(bptr);
2078 unsigned iidx = iptr - bptr->iinstr;
2079 basicblock *newblock;
2080 basicblock *tosplit;
2084 assert(iidx < bptr->icount);
2087 /* If there are no subbasicblocks yet, we initialize the first one to be a
2088 copy of the original basicblock. */
2090 if (basicblock_chain_empty(bbi->subbasicblocks)) {
2091 newblock = DNEW(basicblock);
2093 newblock->next = NULL;
2094 newblock->vp = NULL;
2095 basicblock_chain_add(bbi->subbasicblocks, newblock);
2098 /* Find the subbasicblock that will be split:
2099 the one that cointains iptr. */
2101 tosplit = basicblock_chain_front(bbi->subbasicblocks);
2104 while (tosplit->next && (iidx >= (pos + tosplit->icount))) {
2105 assert(bptr->nr == tosplit->nr);
2106 pos += tosplit->icount;
2107 tosplit = tosplit->next;
2110 assert(bptr->nr == tosplit->nr);
2112 /* Calculate number of instructions left in block to split. */
2114 ileft = iptr - tosplit->iinstr + 1;
2115 assert(ileft <= tosplit->icount);
2117 /* If there are any instructions left in the block to split, split */
2119 if (ileft < tosplit->icount) {
2120 newblock = DNEW(basicblock);
2121 *newblock = *tosplit;
2123 tosplit->next = newblock;
2124 tosplit->icount = ileft;
2126 newblock->icount -= ileft;
2127 newblock->iinstr += ileft;
2129 assert(tosplit->nr == bptr->nr);
2130 assert(newblock->nr == bptr->nr);
2131 assert(newblock->next == NULL);
2133 if (newblock->next == NULL) {
2134 bbi->subbasicblocks->last = newblock;
2138 /* We won't break pointers/references to bptr.
2139 So return bptr instread of the first fragment.
2140 Later, we will put the first fragment into the memory used by bptr.
2143 if (tosplit == basicblock_chain_front(bbi->subbasicblocks)) {
2150 static basicblock *ssa_leave_create_transition_exception_handler(
2158 /* From is a try block, to is an exception handler prologue. */
2160 /* Remove old prologue. */
2162 to->flags = BBDELETED;
2164 /* Create new exception handler. */
2166 exh = ssa_leave_create_transition_block_intern(
2170 basicblock_get_ex_predecessor_index(from, pei, to),
2173 exh->type = BBTYPE_EXH;
2175 /* Copy goto to real exception handler at the end of the exception handler
2178 assert(to->iinstr[to->icount - 1].opc == ICMD_GOTO);
2179 assert(exh->iinstr[exh->icount - 1].opc == ICMD_GOTO);
2180 exh->iinstr[exh->icount - 1] = to->iinstr[to->icount - 1];
2182 /* Copy getexception from the old prologue. */
2184 assert(to->iinstr[0].opc == ICMD_GETEXCEPTION);
2185 exh->iinstr[0] = to->iinstr[0];
2190 static exception_entry *ssa_leave_create_transition_exception_entry(
2193 basicblock *handler,
2194 classref_or_classinfo catchtype
2197 exception_entry *ee = DNEW(exception_entry);
2198 basicblock_info_t *fromi = bb_info(from);
2202 /* If the try block has subbasicblocks, the next block is the next fragment,
2203 not the successor block. */
2205 if (fromi != NULL) {
2206 ee->end = basicblock_chain_front(fromi->subbasicblocks)->next;
2208 ee->end = from->next;
2210 ee->handler = handler;
2211 ee->catchtype = catchtype;
2218 static void ssa_leave_create_exceptional_phi_moves(ssa_info *ssa) {
2221 exception_entry *ite;
2222 exception_entry_chain_t chain;
2223 classref_or_classinfo catchtype;
2228 exception_entry *ee;
2229 basicblock *last = NULL;
2231 if (! basicblock_chain_empty(ssa->new_blocks)) {
2232 last = basicblock_chain_back(ssa->new_blocks);
2235 basicblock_chain_clear(ssa->new_blocks);
2237 for (ite = ssa->jd->exceptiontable; ite; ite = ite->down) {
2238 bptr = ite->handler;
2239 catchtype = ite->catchtype;
2240 exception_entry_chain_init(&chain);
2241 for (ittry = ite->start; ittry != ite->end; ittry = ittry->next) {
2242 if (basicblock_reached(ittry)) {
2243 /* Dead code does not have a basicblock_info_t associated. */
2245 FOR_EACH_INSTRUCTION(ittry, iptr) {
2246 if (icmd_table[iptr->opc].flags & ICMDTABLE_PEI) {
2247 /* try is basicblock fragment till (including) the pei */
2248 try = ssa_leave_split_basicblock_at(ssa, ittry, iptr);
2249 /* ee is handler for try */
2250 exh = ssa_leave_create_transition_exception_handler(
2253 ee = ssa_leave_create_transition_exception_entry(
2254 ssa, try, exh, catchtype
2256 exception_entry_chain_add(&chain, ee);
2258 ssa->jd->exceptiontablelength += 1;
2263 if (! exception_entry_chain_empty(&chain)) {
2264 exception_entry_chain_back(&chain)->down = ite->down;
2265 exception_entry_chain_back(&chain)->next = ite->next;
2266 /* Replace original exception entry by first new one. */
2267 *ite = *exception_entry_chain_front(&chain);
2268 /* Set current iteration position to last newly created one. */
2269 ite = exception_entry_chain_back(&chain);
2274 for (last = ssa->jd->basicblocks; last->next != NULL; last = last->next);
2277 if (last != NULL && ! basicblock_chain_empty(ssa->new_blocks)) {
2278 last->next = basicblock_chain_front(ssa->new_blocks);
2282 void ssa_simple_leave_restore(ssa_info_t *ssa, basicblock *bptr, s4 *pvar) {
2285 basicblock_info_t *bbi;
2287 if (var < ssa->locals->count) {
2288 *pvar = vars_get_old_index(ssa->locals, var);
2289 } else if (var < ssa->locals->count + ssa->stack->count) {
2291 index = vars_get_old_index(
2293 var - ssa->locals->count
2296 bbi = bb_info(bptr);
2298 /* We have to determine whether to take an invar or an outvar for
2299 the stack depth ``index''.
2300 The state array contains the last definition of the stack element
2304 if (state_array_get_var(bbi->stack->state_array, index) == var) {
2305 /* The last definition of a stack depth inside the basicblock.
2306 This is the outvar at the given depth.
2307 If there is no outvar at the given depth, it must be an invar.
2309 if (index < bptr->outdepth) {
2310 *pvar = bptr->outvars[index];
2311 } else if (index < bptr->indepth) {
2312 *pvar = bptr->invars[index];
2317 /* A different than the last definition of a stack depth.
2318 This must be an invar.
2320 assert(index < bptr->indepth);
2321 *pvar = bptr->invars[index];
2324 *pvar = vars_get_old_index(
2326 var - ssa->locals->count - ssa->stack->count
2331 void ssa_simple_leave(ssa_info_t *ssa) {
2335 unsigned uses_count;
2337 FOR_EACH_BASICBLOCK(ssa->jd, bptr) {
2338 if (bptr->type == BBTYPE_EXH) {
2339 /* (Aritifical) exception handler blocks will be eliminated. */
2342 /* In reverse order. We need to rename the definition after any use! */
2343 FOR_EACH_INSTRUCTION_REV(bptr, iptr) {
2344 if (instruction_has_dst(iptr)) {
2345 ssa_simple_leave_restore(ssa, bptr, &(iptr->dst.varindex));
2347 instruction_get_uses(iptr, ssa->s_buf, &uses, &uses_count);
2348 for (ituse = uses; ituse != uses + uses_count; ++ituse) {
2349 ssa_simple_leave_restore(ssa, bptr, ituse);
2351 instruction_set_uses(iptr, ssa->s_buf, uses, uses_count);
2356 unfix_exception_handlers(ssa->jd);
2358 ssa->jd->maxlocals = ssa->original.maxlocals;
2359 ssa->jd->maxinterfaces = ssa->original.maxinterfaces;
2360 ssa->jd->local_map =ssa->original.local_map;
2361 ssa->jd->var = ssa->original.var;
2362 ssa->jd->vartop = ssa->original.vartop;
2363 ssa->jd->varcount = ssa->original.varcount;
2364 ssa->jd->localcount = ssa->original.localcount;
2367 #include "vm/rt-timing.h"
2369 void yssa(jitdata *jd) {
2371 basicblock_info_t *iti;
2374 struct timespec bs, es, be, ee;
2376 RT_TIMING_GET_TIME(bs);
2381 printf("=============== [ before %s ] =========================\n", jd->m->name->text);
2383 printf("=============== [ /before ] =========================\n");
2387 ssa = DNEW(ssa_info);
2389 ssa_info_init(ssa, jd);
2390 ssa->keep_in_out = true;
2392 FOR_EACH_BASICBLOCK(jd, it) {
2393 if (basicblock_reached(it)) {
2394 iti = DNEW(basicblock_info_t);
2395 basicblock_info_init(iti, it, jd);
2402 ssa_enter_mark_loops(jd->basicblocks);
2404 ssa_enter_traverse(ssa, jd->basicblocks);
2406 ssa_enter_eliminate_categories(ssa);
2408 ssa_enter_export_variables(ssa);
2410 ssa_enter_export_phis(ssa);
2412 ssa_enter_verify_no_redundant_phis(ssa);
2414 /*ssa_enter_create_phi_graph(ssa);*/
2416 RT_TIMING_GET_TIME(be);
2417 escape_analysis_perform(ssa->jd);
2418 RT_TIMING_GET_TIME(ee);
2421 ssa_leave_create_phi_moves(ssa);
2423 ssa_leave_create_exceptional_phi_moves(ssa);
2428 printf("=============== [ mid ] =========================\n");
2430 printf("=============== [ /mid ] =========================\n");
2434 ssa_simple_leave(ssa);
2438 printf("=============== [ after ] =========================\n");
2440 printf("=============== [ /after ] =========================\n");
2444 RT_TIMING_GET_TIME(es);
2446 RT_TIMING_TIME_DIFF(bs, es, RT_TIMING_1);
2447 RT_TIMING_TIME_DIFF(be, ee, RT_TIMING_2);
2450 void eliminate_subbasicblocks(jitdata *jd) {
2451 basicblock *bptr, *next;
2452 basicblock_info_t *bbi;
2454 FOR_EACH_BASICBLOCK(jd, bptr) {
2455 bbi = bb_info(bptr);
2457 if (! basicblock_chain_empty(bbi->subbasicblocks)) {
2459 /* Copy first subblock, to keep pointers intact. */
2460 *bptr = *basicblock_chain_front(bbi->subbasicblocks);
2461 bptr = basicblock_chain_back(bbi->subbasicblocks);
2468 printf("=============== [ elim ] =========================\n");
2470 printf("=============== [ /elim ] =========================\n");
2475 * These are local overrides for various environment variables in Emacs.
2476 * Please do not remove this and leave it at the end of the file, where
2477 * Emacs will automagically detect them.
2478 * ---------------------------------------------------------------------
2481 * indent-tabs-mode: t
2485 * vim:noexpandtab:sw=4:ts=4: