1 /* src/vm/optimizing/ssa3.c
4 CACAOVM - Verein zu Foerderung der freien virtuellen Machine CACAO
6 This file is part of CACAO.
8 This program is free software; you can redistribute it and/or
9 modify it under the terms of the GNU General Public License as
10 published by the Free Software Foundation; either version 2, or (at
11 your option) any later version.
13 This program is distributed in the hope that it will be useful, but
14 WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with this program; if not, write to the Free Software
20 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
23 SSA transformation PROTOTYPE based on:
26 Adding Static Single Assignment Form and a Graph Coloring Register
27 Allocator to the Java Hotspot Client Compiler, 2000
28 (http://www.ssw.uni-linz.ac.at/Research/Reports/Report15.html)
32 * Adapt for exception handling [done]
33 * Eliminiation of redundand PHI functions. [in progress]
34 * Handle also inout variables. [done]
35 * Create PHI functions lazyly, when they are used for the first time
36 (I suspect that currently PHIs are created that are never used).
37 * REWRITE. The code was never designed for producion. [done]
38 * Unify access to phi args.
41 #include "vm/jit/jit.h"
42 #include "vm/global.h"
43 #include "mm/memory.h"
44 #include "mm/dumpmemory.h"
45 #include "toolbox/list.h"
50 /*#define ELIMINATE_NOP_LOAD_STORE*/
53 /*#define SSA_VERBOSE */
56 __attribute__((always_inline))
59 /*** phi ********************************************************************/
63 PHI_FLAG_REDUNDANT_ALL,
64 PHI_FLAG_REDUNDANT_ONE
67 static inline void phi_set_flag(instruction *iptr, phi_flags_t flag) {
68 iptr->flags.bits |= (1 << flag);
71 static inline void phi_clear_flag(instruction *iptr, phi_flags_t flag) {
72 iptr->flags.bits &= ~(1 << flag);
75 static inline bool phi_has_flag(const instruction *iptr, phi_flags_t flag) {
76 return (iptr->flags.bits & (1 << flag)) != 0;
79 static inline instruction *phi_get_subst(instruction *iptr) {
80 return iptr->sx.s23.s2.iargs[-1];
83 static inline bool phi_has_subst(const instruction *iptr) {
84 return (iptr->sx.s23.s2.iargs[-1] != NULL);
88 static inline void phi_init(instruction *iptr, unsigned argcount, s4 index) {
93 iptr->dst.varindex = 0;
94 iptr->s1.argcount = argcount;
95 iptr->sx.s23.s2.iargs = DMNEW(instruction *, argcount + 1);
96 iptr->sx.s23.s2.iargs += 1;
97 iptr->sx.s23.s3.javaindex = index;
99 /* subst field - If non-null, the phi function shall be replaced by the
100 value produced by the subst instruction. */
101 iptr->sx.s23.s2.iargs[-1] = NULL;
104 for (i = 0; i < argcount; ++i) {
105 iptr->sx.s23.s2.iargs[i] = (instruction *)0x7fffffff;
110 #define phi_assert_opc(iptr) assert(iptr->opc == ICMD_PHI)
112 #define phi_assert_arg(iptr, arg) assert(arg < iptr->s1.argcount)
114 static inline s4 phi_arg_count(const instruction *iptr) {
115 phi_assert_opc(iptr);
116 return iptr->s1.argcount;
119 static inline instruction *phi_get_arg(const instruction *iptr, unsigned arg) {
120 phi_assert_opc(iptr);
121 phi_assert_arg(iptr, arg);
122 return iptr->sx.s23.s2.iargs[arg];
125 static inline s4 phi_get_arg_var(const instruction *iptr, unsigned arg) {
126 phi_assert_opc(iptr);
127 phi_assert_arg(iptr, arg);
128 return iptr->sx.s23.s2.iargs[arg]->dst.varindex;
131 static inline void phi_set_all_args(instruction *iptr, instruction *value) {
133 phi_assert_opc(iptr);
134 for (i = 0; i < iptr->s1.argcount; ++i) {
135 iptr->sx.s23.s2.iargs[i] = value;
139 static inline s4 phi_get_index(const instruction *iptr) {
140 phi_assert_opc(iptr);
141 return iptr->sx.s23.s3.javaindex;
144 static inline s4 phi_get_dst(const instruction *iptr) {
145 phi_assert_opc(iptr);
146 return iptr->dst.varindex;
149 static inline void phi_set_dst(instruction *iptr, s4 dst) {
150 phi_assert_opc(iptr);
151 iptr->dst.varindex = dst;
154 static inline bool phi_get_used(const instruction *iptr) {
155 phi_assert_opc(iptr);
156 return phi_has_flag(iptr, PHI_FLAG_USED);
159 static void phi_set_used(instruction *iptr) {
160 phi_assert_opc(iptr);
161 if (! phi_has_flag(iptr, PHI_FLAG_USED)) {
162 phi_set_flag(iptr, PHI_FLAG_USED);
163 /* TODO recurse into arguments */
167 static instruction *phi_resolve_use(instruction *use) {
169 while (use->opc == ICMD_PHI) {
170 if (phi_has_subst(use)) {
171 use = phi_get_subst(use);
180 static inline void phi_set_arg(instruction *iptr, unsigned arg, instruction *value) {
181 phi_assert_opc(iptr);
182 phi_assert_arg(iptr, arg);
183 assert(value != NULL);
184 iptr->sx.s23.s2.iargs[arg] = value;
187 static inline bool phi_is_redundant(const instruction *iptr) {
189 phi_has_flag(iptr, PHI_FLAG_REDUNDANT_ONE) ||
190 phi_has_flag(iptr, PHI_FLAG_REDUNDANT_ALL)
194 static inline void phi_create_copy(instruction *iptr, unsigned arg, instruction *copy) {
195 phi_assert_opc(iptr);
196 phi_assert_arg(iptr, arg);
197 copy->dst.varindex = phi_get_dst(iptr);
198 copy->s1.varindex = phi_resolve_use(phi_get_arg(iptr, arg))->dst.varindex;
199 if (copy->dst.varindex == copy->s1.varindex || phi_is_redundant(iptr)) {
200 copy->opc = ICMD_NOP;
202 copy->opc = ICMD_COPY;
207 static void phi_print(const instruction *iptr) {
210 printf("%d = phi(", iptr->dst.varindex);
211 for (i = 0; i < iptr->s1.argcount; ++i) {
212 use = phi_resolve_use(iptr->sx.s23.s2.iargs[i]);
214 printf("%d, ", use->dst.varindex);
223 #define FOR_EACH_PHI_USE_CAST(iptr, it, cast) \
225 (it) = cast (iptr)->sx.s23.s2.iargs; \
226 (it) != cast (iptr)->sx.s23.s2.iargs + (iptr)->s1.argcount; \
230 #define FOR_EACH_PHI_USE(iptr, it) \
231 FOR_EACH_PHI_USE_CAST(iptr, it, )
233 #define FOR_EACH_PHI_USE_CONST(iptr, it) \
234 FOR_EACH_PHI_USE_CAST(iptr, it, (const instruction *))
236 static void phi_calculate_redundancy(instruction *iptr) {
238 s4 dst = iptr->dst.varindex;
242 unsigned num_different = 0;
243 instruction *different;
245 if (phi_is_redundant(iptr)) return; /* XXX */
247 /* x = phi(x, y, x, x) ... PHI_FLAG_REDUNDANT_ONE
248 x = phi(x, x, x, x) ... PHI_FLAG_REDUNDANT_ALL */
250 FOR_EACH_PHI_USE(iptr, ituse) {
251 iuse = phi_resolve_use(*ituse);
254 use = iuse->dst.varindex;
259 if (num_different >= 2) {
260 phi_clear_flag(iptr, PHI_FLAG_REDUNDANT_ONE);
261 phi_clear_flag(iptr, PHI_FLAG_REDUNDANT_ALL);
266 if (num_different == 1) {
267 /* Set the subst field of the instruction.
268 I.e. the instruction will be replaced by the value produced by y. */
269 iptr->sx.s23.s2.iargs[-1] = different;
271 phi_set_flag(iptr, PHI_FLAG_REDUNDANT_ONE);
272 phi_clear_flag(iptr, PHI_FLAG_REDUNDANT_ALL);
273 } else if (num_different == 0) {
275 /*iptr->sx.s23.s2.iargs[-1] = iptr->sx.s23.s2.iargs[0];*/
277 phi_clear_flag(iptr, PHI_FLAG_REDUNDANT_ONE);
278 phi_clear_flag(iptr, PHI_FLAG_REDUNDANT_ALL);
284 /*** goto *******************************************************************/
286 static inline void goto_init(instruction *iptr, basicblock *dst) {
287 iptr->opc = ICMD_GOTO;
288 iptr->dst.block = dst;
291 /*** instruction ***********************************************************/
293 static void instruction_get_uses(const instruction *iptr, s4 *buf, s4 **puses, unsigned *puses_count) {
294 unsigned uses_count = 0;
296 switch (icmd_table[iptr->opc].dataflow) {
299 buf[2] = iptr->sx.s23.s3.varindex;
304 buf[1] = iptr->sx.s23.s2.varindex;
311 buf[0] = iptr->s1.varindex;
314 *puses_count = uses_count;
322 *puses = iptr->sx.s23.s2.args;
323 *puses_count = iptr->s1.argcount;
333 static inline void instruction_set_uses(instruction *iptr, s4 *buf, s4 *uses, unsigned uses_count) {
335 switch (uses_count) {
337 iptr->sx.s23.s3.varindex = buf[2];
339 iptr->sx.s23.s2.varindex = buf[1];
341 iptr->s1.varindex = buf[0];
346 /*** vars *******************************************************************/
348 #define VARS_CATEGORY_SHIFT 29
349 #define VARS_INDEX_MASK 0x1FFFFFFF
351 #define VARS_CATEGORY_LOCAL 0
352 #define VARS_CATEGORY_STACK 1
353 #define VARS_CATEGORY_OTHERS 2
355 #define VAR_TYPE_SUBSTITUED 666
357 #define OLD_INDEX_UNUSED -2
365 vars_item_t items[9000]; /* XXX hardcoded max */
371 static inline unsigned vars_add_item(vars_t *vs, const varinfo *item) {
372 unsigned i = vs->count;
375 vs->items[i].v = *item;
376 vs->items[i].old_index = OLD_INDEX_UNUSED;
377 return (vs->category << VARS_CATEGORY_SHIFT) | i;
380 static inline unsigned vars_add(vars_t *vs) {
381 unsigned i = vs->count;
384 return (vs->category << VARS_CATEGORY_SHIFT) | i;
387 static inline varinfo *vars_back(vars_t *vs) {
388 assert(vs->count > 0);
389 return &(vs->items[vs->count - 1].v);
392 static inline void vars_init(vars_t *vs, unsigned category) {
393 vs->max = sizeof(vs->items) / sizeof(vs->items[0]);
395 assert((category & 0x3) == category);
396 vs->category = category;
399 static inline unsigned vars_get_category(unsigned varindex) {
400 return (varindex >> VARS_CATEGORY_SHIFT);
403 static inline unsigned vars_get_index(unsigned varindex) {
404 return (varindex & VARS_INDEX_MASK);
407 static void vars_subst(vars_t *vs, unsigned varindex, unsigned replacementindex) {
408 varindex = vars_get_index(varindex);
409 replacementindex = vars_get_index(replacementindex);
411 vs->items[varindex].v.type = VAR_TYPE_SUBSTITUED;
412 vs->items[varindex].v.vv.ii[1] = replacementindex;
415 static unsigned vars_resolve_subst(const vars_t *vs, unsigned varindex) {
417 unsigned loop_ctr = 0;
419 varindex = vars_get_index(varindex);
421 if (vs->items[varindex].v.type == VAR_TYPE_SUBSTITUED) /*fprintf(stderr, "*")*/;
423 while (vs->items[varindex].v.type == VAR_TYPE_SUBSTITUED) {
424 assert(loop_ctr++ != vs->count);
425 varindex = vs->items[varindex].v.vv.ii[1];
428 return (vs->category << VARS_CATEGORY_SHIFT) | varindex ;
431 static void vars_copy_to_final(vars_t *vs, varinfo *dst) {
432 const vars_item_t *it;
435 for (it = vs->items; it != vs->items + vs->count; ++it, ++dst) {
441 /* Eliminate VAR_TYPE_SUBSTITUED as it leads to problems. */
443 if (dst->type == VAR_TYPE_SUBSTITUED) {
444 subst = vars_get_index(vars_resolve_subst(vs, it - vs->items));
445 dst->type = vs->items[subst].v.type;
451 static void vars_import(vars_t *vs, varinfo *v, unsigned count, s4 old_index) {
454 assert((vs->count + count) <= vs->max);
456 it = vs->items + vs->count;
459 while (count-- > 0) {
461 it->old_index = old_index;
468 static inline void vars_record_old_index(vars_t *vs, unsigned varindex, s4 old_index) {
470 varindex = vars_get_index(varindex);
472 assert(varindex < vs->count);
474 item = vs->items + varindex;
477 item->old_index == OLD_INDEX_UNUSED ||
478 item->old_index == old_index
481 item->old_index = old_index;
484 static inline s4 vars_get_old_index(vars_t *vs, unsigned varindex) {
487 varindex = vars_get_index(varindex);
489 assert(varindex < vs->count);
490 old = vs->items[varindex].old_index;
491 assert(old != OLD_INDEX_UNUSED);
496 /*** phis *******************************************************************/
504 static inline void phis_init(phis_t *ps, unsigned max) {
507 ps->items = DMNEW(instruction, max);
510 static inline instruction *phis_add(phis_t *ps) {
511 unsigned i = ps->count;
514 return ps->items + i;
517 static inline instruction *phis_get(const phis_t *ps, unsigned i) {
518 assert(i < ps->count);
519 return ps->items + i;
522 static inline bool phis_contains(const phis_t *ps, const instruction *phi) {
523 return (ps->items <= phi) && (phi < (ps->items + ps->max));
526 #define FOR_EACH_PHI_FUNCTION_(ps, it) \
527 for ((it) = (ps)->items; (it) != (ps)->items + (ps)->count; ++(it)) \
529 #define FOR_EACH_PHI_FUNCTION(ps, it) \
530 FOR_EACH_PHI_FUNCTION_(ps, it) if (!phi_is_redundant((it)))
533 FIXME() inline void phis_print(const phis_t *ps) {
534 const instruction *iptr;
535 FOR_EACH_PHI_FUNCTION(ps, iptr) {
541 static inline void phis_copy_to(const phis_t *ps, instruction *dst) {
550 /*** state_array ************************************************************/
557 static inline void state_array_init(state_array_t *sa, unsigned count) {
562 static inline bool state_array_has_items(const state_array_t *sa) {
563 return (sa->items != NULL);
566 static inline s4 state_array_get_var(const state_array_t *sa, unsigned index) {
567 assert(index < sa->count);
568 assert(sa->items[index]);
569 return sa->items[index]->dst.varindex;
572 static inline instruction *state_array_get(const state_array_t *sa, unsigned index) {
573 assert(index < sa->count);
574 return sa->items[index];
577 static inline void state_array_set(const state_array_t *sa, unsigned index, instruction *value) {
578 assert(index < sa->count);
579 sa->items[index] = value;
582 static inline void state_array_copy(state_array_t *sa, state_array_t *other) {
583 assert(sa->count == other->count);
584 MCOPY(sa->items, other->items, instruction *, sa->count);
587 #define state_array_assert_items(sa) assert(state_array_has_items(sa) || (sa->count == 0))
588 #define state_array_assert_no_items(sa) assert(! state_array_has_items(sa))
590 static inline void state_array_allocate_items(state_array_t *sa) {
591 sa->items = DMNEW(instruction *, sa->count);
592 MZERO(sa->items, instruction *, sa->count);
595 /*** basicblock_chain *******************************************************/
600 } basicblock_chain_t;
602 static void basicblock_chain_init(basicblock_chain_t *bbc) {
607 #define basicblock_chain_clear basicblock_chain_init
609 static void basicblock_chain_add(basicblock_chain_t *bbc, basicblock *bb) {
610 if (bbc->first == NULL) {
611 assert(bbc->last == NULL);
612 assert(bb->next == NULL);
616 assert(bbc->last->next == NULL);
617 bbc->last->next = bb;
622 static inline basicblock *basicblock_chain_front(basicblock_chain_t *bbc) {
627 static inline basicblock *basicblock_chain_back(basicblock_chain_t *bbc) {
632 static inline bool basicblock_chain_empty(const basicblock_chain_t *bbc) {
633 return bbc->first == NULL;
636 /*** exception_entry_chain ***************************************************/
639 exception_entry *first;
640 exception_entry *last;
641 } exception_entry_chain_t;
643 static void exception_entry_chain_init(exception_entry_chain_t *eec) {
648 #define exception_entry_chain_clear exception_entry_chain_init
650 static void exception_entry_chain_add(exception_entry_chain_t *eec, exception_entry *ee) {
651 if (eec->first == NULL) {
655 eec->last->next = ee;
656 eec->last->down = ee;
661 static inline bool exception_entry_chain_empty(const exception_entry_chain_t *eec) {
662 return eec->first == NULL;
665 static inline exception_entry *exception_entry_chain_back(exception_entry_chain_t *eec) {
670 static inline exception_entry *exception_entry_chain_front(exception_entry_chain_t *eec) {
675 /*** traversal **************************************************************/
678 unsigned (*var_num_to_index)(void *vp, s4 var);
679 varinfo *(*index_to_initial_var)(void *vp, unsigned index);
680 varinfo *(*var_num_to_varinfo)(void *vp, s4 var);
681 unsigned (*variables_count)(void *vp);
686 state_array_t *state_array;
688 traversal_ops_t *ops;
691 /*** basicblock_info ********************************************************/
693 typedef struct basicblock_info {
697 unsigned backward_branches;
702 basicblock_chain_t *subbasicblocks;
704 unsigned complete_predecessors;
706 #if defined(SSA_VERIFY)
707 unsigned num_phi_elimination;
712 /*** traversal **************************************************************/
714 void traversal_init(traversal_t *t, unsigned count, void *ops_vp, traversal_ops_t *ops) {
715 t->phis = DNEW(phis_t);
716 phis_init(t->phis, count);
718 t->state_array = DNEW(state_array_t);
719 state_array_init(t->state_array, count);
725 instruction *traversal_create_phi(traversal_t *t, vars_t *v, unsigned argcount, s4 index) {
726 instruction *phi = phis_add(t->phis);
729 phi_init(phi, argcount, index);
730 dst = vars_add_item(v, t->ops->index_to_initial_var(t->ops_vp, index));
731 phi_set_dst(phi, dst);
733 state_array_set(t->state_array, index, phi);
735 vars_record_old_index(v, phi->dst.varindex, index);
740 static void traversal_rename_def(traversal_t *t, vars_t *vars, instruction *iptr) {
745 state_array_assert_items(t->state_array);
747 v = t->ops->var_num_to_varinfo(t->ops_vp, iptr->dst.varindex);
748 index = t->ops->var_num_to_index(t->ops_vp, iptr->dst.varindex);
750 old = iptr->dst.varindex;
751 iptr->dst.varindex = vars_add_item(vars, v);
752 state_array_set(t->state_array, index, iptr);
754 vars_record_old_index(vars, iptr->dst.varindex, index);
757 static void traversal_rename_use(traversal_t *t, vars_t *vars, s4 *puse) {
761 state_array_assert_items(t->state_array);
763 index = t->ops->var_num_to_index(t->ops_vp, *puse);
765 assert(state_array_get(t->state_array, index));
767 *puse = state_array_get_var(t->state_array, index);
769 vars_record_old_index(vars, *puse, index);
772 static inline unsigned traversal_variables_count(traversal_t *t) {
773 return t->ops->variables_count(t->ops_vp);
776 unsigned local_var_num_to_index(void *vp, s4 var) {
777 return (unsigned)var;
780 varinfo *local_index_to_initial_var(void *vp, unsigned index) {
781 jitdata *jd = (jitdata *)vp;
782 return jd->var + index;
785 varinfo *local_var_num_to_varinfo(void *vp, s4 var) {
786 jitdata *jd = (jitdata *)vp;
787 return jd->var + var;
790 unsigned local_variables_count(void *vp) {
791 jitdata *jd = (jitdata *)vp;
792 return jd->localcount;
795 traversal_ops_t traversal_local_ops = {
796 local_var_num_to_index,
797 local_index_to_initial_var,
798 local_var_num_to_varinfo,
799 local_variables_count
802 unsigned inout_var_num_to_index(void *vp, s4 var) {
803 basicblock *bb = (basicblock *)vp;
806 for (i = 0; i < bb->indepth; ++i) {
807 if (bb->invars[i] == var) {
812 for (i = 0; i < bb->outdepth; ++i) {
813 if (bb->outvars[i] == var) {
821 varinfo *inout_index_to_initial_var(void *vp, unsigned index) {
822 basicblock *bb = (basicblock *)vp;
823 jitdata *jd = (jitdata *)(((basicblock_info_t *)bb->vp)->locals->ops_vp); /* evil hack */
824 assert(index < bb->indepth);
825 return jd->var + bb->invars[index];
828 varinfo *inout_var_num_to_varinfo(void *vp, s4 var) {
829 basicblock *bb = (basicblock *)vp;
830 jitdata *jd = (jitdata *)(((basicblock_info_t *)bb->vp)->locals->ops_vp); /* evil hack */
831 return jd->var + var;
834 unsigned inout_variables_count(void *vp) {
835 basicblock *bb = (basicblock *)vp;
839 traversal_ops_t traversal_inout_ops = {
840 inout_var_num_to_index,
841 inout_index_to_initial_var,
842 inout_var_num_to_varinfo,
843 inout_variables_count
846 /*** basicblock_info ********************************************************/
848 void basicblock_info_init(basicblock_info_t *bbi, basicblock *bb, jitdata *jd) {
849 MZERO(bbi, basicblock_info_t, 1);
851 bbi->locals = DNEW(traversal_t);
852 traversal_init(bbi->locals, jd->localcount, jd, &traversal_local_ops);
854 bbi->stack = DNEW(traversal_t);
855 traversal_init(bbi->stack, jd->maxinterfaces, bb, &traversal_inout_ops);
857 bbi->subbasicblocks = DNEW(basicblock_chain_t);
858 basicblock_chain_init(bbi->subbasicblocks);
861 /*** basicblock *************************************************************/
863 static inline basicblock_info_t *basicblock_info(basicblock *bb) {
864 return (basicblock_info_t *)(bb->vp);
867 #define bb_info basicblock_info
869 static unsigned basicblock_get_predecessor_count(basicblock *bb) {
873 ret = bb->predecessorcount;
875 FOR_EACH_EXPREDECESSOR(bb, itpred) {
876 ret += (*itpred)->exouts;
882 static unsigned basicblock_get_predecessor_index(basicblock *from, basicblock *to) {
886 FOR_EACH_PREDECESSOR(to, itpred) {
887 if (*itpred == from) break;
891 assert(j != to->predecessorcount);
896 static unsigned basicblock_get_ex_predecessor_index(basicblock *from, unsigned pei, basicblock *to) {
900 j = to->predecessorcount;
902 FOR_EACH_EXPREDECESSOR(to, itpred) {
903 if ((*itpred)->nr == from->nr) {
907 j += (*itpred)->exouts;
914 /*** ssa_info ***************************************************************/
916 typedef struct ssa_info {
925 basicblock_chain_t *new_blocks;
937 unsigned keep_in_out:1;
939 } ssa_info, ssa_info_t;
941 void ssa_info_init(ssa_info_t *ssa, jitdata *jd) {
944 ssa->locals = DNEW(vars_t);
945 vars_init(ssa->locals, VARS_CATEGORY_LOCAL);
947 /* Initialize first version of locals, that is always available. */
948 vars_import(ssa->locals, jd->var, jd->localcount, 0);
950 ssa->stack = DNEW(vars_t);
951 vars_init(ssa->stack, VARS_CATEGORY_STACK);
953 ssa->others = DNEW(vars_t);
954 vars_init(ssa->others, VARS_CATEGORY_OTHERS);
956 ssa->new_blocks = DNEW(basicblock_chain_t);
957 basicblock_chain_init(ssa->new_blocks);
959 ssa->original.maxlocals = jd->maxlocals;
960 ssa->original.maxinterfaces = jd->maxinterfaces;
961 ssa->original.local_map = jd->local_map;
962 ssa->original.var = jd->var;
963 ssa->original.vartop = jd->vartop;
964 ssa->original.varcount = jd->varcount;
965 ssa->original.localcount = jd->localcount;
967 ssa->keep_in_out = false;
970 /*** others_mapping *********************************************************/
972 static inline void others_mapping_set(ssa_info *ssa, s4 var, s4 new_var) {
973 ssa->jd->var[var].vv.ii[1] = new_var;
976 static inline s4 others_mapping_get(const ssa_info *ssa, s4 var) {
977 return ssa->jd->var[var].vv.ii[1];
980 /*** code *******************************************************************/
982 void fix_exception_handlers(jitdata *jd) {
984 basicblock *exh = NULL;
990 basicblock_chain_t chain;
991 basicblock *last = NULL;
992 basicblock *marker = NULL;
994 if (jd->exceptiontablelength == 0) {
998 basicblock_chain_init(&chain);
1000 /* We need to allocate new iovars */
1002 avail_vars = (jd->varcount - jd->vartop);
1003 add_vars = jd->exceptiontablelength;
1005 if (add_vars > avail_vars) {
1006 add_vars -= avail_vars;
1007 jd->var = DMREALLOC(jd->var, varinfo, jd->varcount, jd->varcount + add_vars);
1008 jd->varcount += add_vars;
1011 /* For each exception handler block, create one block with a prologue block */
1013 FOR_EACH_BASICBLOCK(jd, bptr) {
1014 if (bptr->type == BBTYPE_EXH) {
1016 bptr->type = BBTYPE_STD;
1017 bptr->predecessorcount = 0; /* legacy */
1019 /* Create basicblock with 2 instructions */
1021 exh = DNEW(basicblock);
1022 MZERO(exh, basicblock, 1);
1024 iptr = DMNEW(instruction, 2);
1025 MZERO(iptr, instruction, 2);
1027 /* Create new outvar */
1029 assert(jd->vartop < jd->varcount);
1032 jd->var[v].type = TYPE_ADR;
1033 jd->var[v].flags = INOUT;
1035 exh->nr = jd->basicblockcount;
1036 jd->basicblockcount += 1;
1038 exh->type = BBTYPE_EXH;
1042 exh->outvars = DNEW(s4);
1043 exh->outvars[0] = v;
1044 exh->predecessorcount = -1; /* legacy */
1045 exh->flags = BBFINISHED;
1046 exh->method = jd->m;
1048 basicblock_chain_add(&chain, exh);
1052 iptr->opc = ICMD_GETEXCEPTION;
1053 iptr->dst.varindex = v;
1056 /* Goto real exception handler */
1058 goto_init(iptr, bptr);
1065 if (bptr->next == NULL) {
1072 /* Put the chain of exception handlers between just before the last
1073 basic block (end marker). */
1075 if (! basicblock_chain_empty(&chain)) {
1076 marker->nr = jd->basicblockcount;
1077 basicblock_chain_back(&chain)->next = marker;
1078 last->next = basicblock_chain_front(&chain);
1080 assert(last->nr + 1 == basicblock_chain_front(&chain)->nr);
1081 assert(marker->nr == jd->basicblockcount);
1084 /* Replace all exception handlers in exception table with their respective
1087 for (ee = jd->exceptiontable; ee; ee = ee->down) {
1088 assert(ee->handler->vp);
1089 ee->handler = ee->handler->vp;
1094 void unfix_exception_handlers(jitdata *jd) {
1095 basicblock *bptr, *exh;
1097 exception_entry *ee;
1098 #if !defined(NDEBUG)
1102 FOR_EACH_BASICBLOCK(jd, bptr) {
1103 if (bptr->type == BBTYPE_EXH) {
1104 assert(bptr->iinstr[1].opc == ICMD_GOTO);
1105 exh = bptr->iinstr[1].dst.block;
1107 bptr->type = BBDELETED;
1111 exh->type = BBTYPE_EXH;
1114 /* bptr is no more a predecessor of exh */
1116 for (i = 0; i < exh->predecessorcount; ++i) {
1117 if (exh->predecessors[i] == bptr) {
1118 exh->predecessors[i] = exh->predecessors[exh->predecessorcount - 1];
1119 exh->predecessorcount -= 1;
1120 #if !defined(NDEBUG)
1134 for (ee = jd->exceptiontable; ee; ee = ee->down) {
1135 assert(ee->handler->vp);
1136 ee->handler = ee->handler->vp;
1140 /*** ssa_enter ***************************************************************/
1142 static void ssa_enter_mark_loops_intern(basicblock *bb, unsigned num_branches) {
1143 basicblock_info_t *bbi = bb_info(bb);
1144 basicblock **itsucc;
1146 if (! bbi->visited) {
1147 bbi->visited = true;
1149 FOR_EACH_SUCCESSOR(bb, itsucc) {
1150 /* There is a single branch from bb into the successor. */
1151 ssa_enter_mark_loops_intern(*itsucc, 1);
1153 FOR_EACH_EXHANDLER(bb, itsucc) {
1154 /* For exception handler type successors,
1155 we count a branch into the exception handler from every PEI. */
1156 ssa_enter_mark_loops_intern(*itsucc, bb->exouts);
1158 bbi->active = false;
1159 } else if (bbi->active) {
1160 bbi->backward_branches += num_branches;
1164 static inline void ssa_enter_mark_loops(basicblock *bb) {
1165 ssa_enter_mark_loops_intern(bb, 1);
1168 static void ssa_enter_merge(
1172 unsigned predecessor_index,
1176 basicblock_info_t *dsti = bb_info(bdst);
1177 unsigned predecessor_count = basicblock_get_predecessor_count(bdst);
1182 /* We are merging for the first time into this block. */
1184 if (! state_array_has_items(dst->state_array)) {
1186 state_array_allocate_items(dst->state_array);
1188 if (dsti->backward_branches > 0) {
1189 /* Loop header, create phi functions for all variables. */
1190 for (i = 0; i < traversal_variables_count(dst); ++i) {
1191 phi = traversal_create_phi(dst, vdst, predecessor_count, i);
1192 /* No need to init, they will only be filled in later. */
1195 state_array_copy(dst->state_array, src->state_array);
1200 /* We are merging another block. */
1202 /* Process every variable. */
1204 for (i = 0; i < traversal_variables_count(dst); ++i) {
1205 if (dsti->traversed) {
1207 /* Back edge, all phi functions are already created.
1208 We only need to set their arguments. */
1211 phis_get(dst->phis, i),
1213 state_array_get(src->state_array, i)
1216 } else if (state_array_get(dst->state_array, i) != state_array_get(src->state_array, i)) {
1218 /* A different definition of this var reaches the block.
1219 We need to create a phi function. */
1221 if (phis_contains(dst->phis, state_array_get(dst->state_array, i))) {
1222 /* There is already a phi function created for this var.
1223 No need to create one. */
1225 /* Create a new phi function.
1226 Set all arguments to old value in state array. */
1227 old = state_array_get(dst->state_array, i);
1228 phi = traversal_create_phi(dst, vdst, predecessor_count, i);
1229 phi_set_all_args(phi, old);
1232 /* Set argument of phi function. */
1235 state_array_get(dst->state_array, i),
1237 state_array_get(src->state_array, i)
1243 static void ssa_enter_process_block(ssa_info *ssa, basicblock *bb);
1244 static bool ssa_enter_eliminate_redundant_phis(traversal_t *t, vars_t *vs, basicblock_info_t *bbi);
1246 #if defined(SSA_VERIFY)
1247 static void ssa_enter_verify_no_redundant_phis(ssa_info_t *ssa) {
1249 basicblock_info_t *bbi;
1251 FOR_EACH_BASICBLOCK(ssa->jd, bptr) {
1252 if (basicblock_reached(bptr)) {
1253 bbi = bb_info(bptr);
1254 FOR_EACH_PHI_FUNCTION(bbi->locals->phis, itph) {
1255 if (! phi_is_redundant(itph)) {
1256 phi_calculate_redundancy(itph);
1257 assert(! phi_is_redundant(itph));
1260 FOR_EACH_PHI_FUNCTION(bbi->stack->phis, itph) {
1261 if (! phi_is_redundant(itph)) {
1262 phi_calculate_redundancy(itph);
1263 assert(! phi_is_redundant(itph));
1271 static void ssa_enter_traverse(ssa_info_t *ssa, basicblock *bb) {
1272 basicblock **itsucc;
1273 basicblock_info_t *succi;
1274 basicblock_info_t *bbi = bb_info(bb);
1275 unsigned predecessor_count;
1279 ssa_enter_process_block(ssa, bb);
1283 FOR_EACH_SUCCESSOR(bb, itsucc) {
1285 succi = bb_info(*itsucc);
1291 basicblock_get_predecessor_index(bb, *itsucc),
1299 basicblock_get_predecessor_index(bb, *itsucc),
1303 succi->complete_predecessors += 1;
1305 predecessor_count = basicblock_get_predecessor_count(*itsucc);
1308 succi->complete_predecessors ==
1309 (predecessor_count - succi->backward_branches)
1311 ssa_enter_traverse(ssa, *itsucc);
1315 (succi->complete_predecessors == predecessor_count) &&
1316 (succi->backward_branches > 0)
1318 #if defined(SSA_VERIFY)
1319 assert(succi->num_phi_elimination < 2);
1320 succi->num_phi_elimination += 1;
1322 ssa_enter_eliminate_redundant_phis(succi->locals, ssa->locals, succi);
1323 ssa_enter_eliminate_redundant_phis(succi->stack, ssa->stack, succi);
1327 FOR_EACH_EXHANDLER(bb, itsucc) {
1329 succi = bb_info(*itsucc);
1331 succi->complete_predecessors += bb->exouts; /* XXX this might be 0 */
1333 predecessor_count = basicblock_get_predecessor_count(*itsucc);
1336 succi->complete_predecessors ==
1337 (predecessor_count - succi->backward_branches)
1339 ssa_enter_traverse(ssa, *itsucc);
1343 (succi->complete_predecessors == predecessor_count) &&
1344 (succi->backward_branches > 0)
1346 #if defined(SSA_VERIFY)
1347 assert(succi->num_phi_elimination < 2);
1348 succi->num_phi_elimination += 1;
1350 ssa_enter_eliminate_redundant_phis(succi->locals, ssa->locals, succi);
1351 ssa_enter_eliminate_redundant_phis(succi->stack, ssa->stack, succi);
1358 static void ssa_enter_process_pei(ssa_info *ssa, basicblock *bb, unsigned pei) {
1359 basicblock_info_t *bbi = bb_info(bb);
1360 basicblock_info_t *succi;
1361 basicblock **itsucc;
1363 FOR_EACH_EXHANDLER(bb, itsucc) {
1364 succi = bb_info(*itsucc);
1370 basicblock_get_ex_predecessor_index(bb, pei, *itsucc),
1376 static FIXME(bool) ssa_enter_eliminate_redundant_phis(traversal_t *t, vars_t *vs, basicblock_info_t *FIXME(bbi)) {
1381 FOR_EACH_PHI_FUNCTION(t->phis, itph) {
1383 phi_calculate_redundancy(itph);
1385 /* If the phi function is redundant,
1386 make the variable it defines point to the value defined by the substituing
1389 if (phi_is_redundant(itph)) {
1390 vars_subst(vs, itph->dst.varindex, phi_get_subst(itph)->dst.varindex);
1391 assert(bbi->backward_branches > 0);
1400 static void ssa_enter_post_eliminate_redundand_phi(
1406 phi_calculate_redundancy(phi);
1407 phi_set_flag(PHI_FLAG_REDUNDANCY_CHECKED);
1409 /* if redundancy changed and phi function escapes block */
1411 /* for each successor */
1414 static void ssa_enter_post_eliminate_redundant_phis(ssa_info_t *ssa) {
1416 basicblock_info_t *bbi;
1419 FOR_EACH_BASICBLOCK(ssa->jd, bptr) {
1420 bbi = bb_info(bptr);
1422 if (bbi->backward_branches > 0) {
1423 /* Redundant phi functions have the left hand side as operand.
1424 This can happen by definition only in loop headers. */
1426 FOR_EACH_PHI_FUNCTION(bbi->locals, itph) {
1427 if (! phi_has_flag(PHI_FLAG_REDUNDANCY_CHECKED)) {
1428 /* Calculate redundancy? */
1430 /* If yes recurse? */
1434 FOR_EACH_PHI_FUNCTION(bbi->stack, itph) {
1441 static void ssa_enter_init_locals(state_array_t *sa) {
1445 for (i = 0; i < sa->count; ++i) {
1446 iptr = DNEW(instruction);
1447 iptr->opc = ICMD_NOP;
1448 iptr->dst.varindex = i;
1449 state_array_set(sa, i, iptr);
1453 static void ssa_enter_process_block(ssa_info *ssa, basicblock *bb) {
1454 basicblock_info_t *bbi = bb_info(bb);
1459 unsigned uses_count;
1462 /* Every basic block can be traversed only once. */
1464 assert(! bbi->traversed);
1465 bbi->traversed = true;
1467 /* The root basicblock needs special treatment. */
1469 if (bb->predecessorcount == 0 && bb->type != BBTYPE_EXH) {
1470 state_array_assert_no_items(bbi->locals->state_array);
1471 state_array_allocate_items(bbi->locals->state_array);
1472 ssa_enter_init_locals(bbi->locals->state_array);
1474 state_array_assert_no_items(bbi->stack->state_array);
1475 state_array_allocate_items(bbi->stack->state_array);
1478 /* Exception handlers have a clean stack. */
1480 if (bb->type == BBTYPE_EXH) {
1481 state_array_assert_no_items(bbi->stack->state_array);
1482 state_array_allocate_items(bbi->stack->state_array);
1485 /* Some in/out vars get marked as INOUT in simplereg,
1486 and are not marked at this point.
1487 Mark them manually. */
1489 for (ituse = bb->invars; ituse != bb->invars + bb->indepth; ++ituse) {
1490 if (ssa->keep_in_out && ssa->jd->var[*ituse].type == TYPE_RET) {
1493 ssa->jd->var[*ituse].flags |= INOUT;
1494 ssa->jd->var[*ituse].flags &= ~PREALLOC;
1497 for (ituse = bb->outvars; ituse != bb->outvars + bb->outdepth; ++ituse) {
1498 if (ssa->keep_in_out && ssa->jd->var[*ituse].type == TYPE_RET) {
1501 ssa->jd->var[*ituse].flags |= INOUT;
1502 ssa->jd->var[*ituse].flags &= ~PREALLOC;
1505 /* Process instructions */
1507 state_array_assert_items(bbi->locals->state_array);
1509 FOR_EACH_INSTRUCTION(bb, iptr) {
1511 #if defined(ELIMINATE_NOP_LOAD_STORE)
1513 /* Kill NOP instructions of the form:
1516 As they create a lot of unnecessary definitions.
1517 For performance, definitely eliminate them. However, keeping them is a
1522 (icmd_table[iptr->opc].dataflow == DF_LOAD) ||
1523 (icmd_table[iptr->opc].dataflow == DF_STORE)
1525 if (iptr->dst.varindex == iptr->s1.varindex) {
1526 iptr->opc = ICMD_NOP;
1532 if (icmd_table[iptr->opc].flags & ICMDTABLE_PEI) {
1533 ssa_enter_process_pei(ssa, bb, pei++);
1536 instruction_get_uses(iptr, ssa->s_buf, &uses, &uses_count);
1538 for (ituse = uses; ituse != uses + uses_count; ++ituse) {
1539 if (var_is_local(ssa->jd, *ituse)) {
1540 traversal_rename_use(
1545 } else if (var_is_inout(ssa->jd, *ituse)) {
1546 traversal_rename_use(
1552 *ituse = others_mapping_get(ssa, *ituse);
1556 instruction_set_uses(iptr, ssa->s_buf, uses, uses_count);
1558 if (instruction_has_dst(iptr)) {
1559 if (var_is_local(ssa->jd, iptr->dst.varindex)) {
1560 traversal_rename_def(
1565 } else if (var_is_inout(ssa->jd, iptr->dst.varindex)) {
1566 traversal_rename_def(
1572 old = iptr->dst.varindex;
1573 iptr->dst.varindex = vars_add_item(
1575 ssa->jd->var + iptr->dst.varindex
1577 vars_record_old_index(ssa->others, iptr->dst.varindex, old);
1578 others_mapping_set(ssa, old, iptr->dst.varindex);
1586 [locals.........................][interaces][others]
1587 [original locals][version > 1 ]
1588 <--------------- new locals --------------->
1591 static void ssa_enter_export_variables(ssa_info *ssa) {
1596 jitdata *jd = ssa->jd;
1599 vartop = ssa->locals->count + ssa->stack->count + ssa->others->count;
1600 vars = DMNEW(varinfo, vartop);
1602 vars_copy_to_final(ssa->locals, vars);
1603 vars_copy_to_final(ssa->stack, vars + ssa->locals->count);
1604 vars_copy_to_final(ssa->others, vars + ssa->locals->count + ssa->stack->count);
1607 jd->vartop = jd->varcount = vartop;
1609 /* Grow local map to accomodate all new locals and iovars.
1610 But keep the local map for version 1 of locals, that contains the holes. */
1614 5 * (jd->maxlocals + ssa->locals->count + ssa->stack->count - jd->localcount)
1617 MCOPY(local_map, jd->local_map, s4, 5 * jd->maxlocals);
1619 jd->local_map = local_map;
1621 it = jd->local_map + (jd->maxlocals * 5); /* start adding entries here */
1623 /* Add version > 1 of all locals */
1625 for (i = jd->localcount; i < ssa->locals->count; ++i) {
1626 for (j = 0; j < 5; ++j) {
1627 if (jd->var[i].type != j) {
1636 /* Add all io vars. */
1638 for (i = ssa->locals->count; i < ssa->locals->count + ssa->stack->count; ++i) {
1639 for (j = 0; j < 5; ++j) {
1640 if (jd->var[i].type != j) {
1651 jd->maxlocals += (ssa->locals->count + ssa->stack->count - jd->localcount);
1652 jd->localcount = ssa->locals->count + ssa->stack->count;
1654 /* Eliminate interfaces. */
1656 jd->maxinterfaces = 0;
1660 static void ssa_enter_export_phis(ssa_info_t *ssa) {
1662 basicblock_info_t *bbi;
1665 FOR_EACH_BASICBLOCK(ssa->jd, bptr) {
1666 bbi = bb_info(bptr);
1669 bbi->locals->phis->count +
1670 bbi->stack->phis->count;
1672 bptr->phis = DMNEW(instruction, bptr->phicount);
1676 phis_copy_to(bbi->locals->phis, dst);
1678 dst += bbi->locals->phis->count;
1680 phis_copy_to(bbi->stack->phis, dst);
1686 static inline void ssa_enter_eliminate_category(ssa_info_t *ssa, s4 *pvar) {
1687 switch (vars_get_category(*pvar)) {
1688 case VARS_CATEGORY_LOCAL:
1689 *pvar = vars_get_index(vars_resolve_subst(ssa->locals, *pvar));
1691 case VARS_CATEGORY_STACK:
1692 *pvar = vars_get_index(vars_resolve_subst(ssa->stack, *pvar)) + ssa->locals->count;
1694 case VARS_CATEGORY_OTHERS:
1695 *pvar = vars_get_index(*pvar) + ssa->locals->count + ssa->stack->count;
1701 void ssa_enter_eliminate_categories(ssa_info_t *ssa) {
1705 unsigned uses_count;
1706 basicblock_info_t *bbi;
1709 FOR_EACH_BASICBLOCK(ssa->jd, bb) {
1713 if (! ssa->keep_in_out) {
1719 FOR_EACH_PHI_FUNCTION(bbi->locals->phis, itph) {
1720 ssa_enter_eliminate_category(ssa, &(itph->dst.varindex));
1723 FOR_EACH_PHI_FUNCTION(bbi->stack->phis, itph) {
1724 ssa_enter_eliminate_category(ssa, &(itph->dst.varindex));
1728 FOR_EACH_INSTRUCTION(bb, iptr) {
1729 if (instruction_has_dst(iptr)) {
1730 ssa_enter_eliminate_category(ssa, &(iptr->dst.varindex));
1732 instruction_get_uses(iptr, ssa->s_buf, &uses, &uses_count);
1733 for (ituse = uses; ituse != uses + uses_count; ++ituse) {
1734 ssa_enter_eliminate_category(ssa, ituse);
1736 instruction_set_uses(iptr, ssa->s_buf, uses, uses_count);
1741 static void ssa_enter_create_phi_graph(ssa_info *ssa) {
1743 basicblock_info_t *bbi;
1745 instruction **ituse;
1747 char path[PATH_MAX], *ppath;
1750 snprintf(path, PATH_MAX, "|tmp|graphs|%s.%s.dot", ssa->jd->m->clazz->name->text, ssa->jd->m->name->text);
1751 for (ppath = path; *ppath; ++ppath) {
1752 if (*ppath == '|') *ppath = '/';
1753 else if (*ppath == '/') *ppath = '.';
1756 f = fopen(path, "w");
1758 if (f == NULL) return;
1760 fprintf(f, "digraph G {\n");
1762 FOR_EACH_BASICBLOCK(ssa->jd, bptr) {
1763 bbi = bb_info(bptr);
1765 FOR_EACH_PHI_FUNCTION(bbi->locals->phis, itph) {
1767 FOR_EACH_PHI_USE(itph, ituse) {
1768 if ((*ituse)->opc == ICMD_PHI) {
1769 fprintf(f, "%d -> %d;\n", (*ituse)->dst.varindex, itph->dst.varindex);
1782 static basicblock *ssa_leave_create_transition_block_intern(
1786 unsigned predecessor_index,
1787 unsigned reserved_insns
1792 basicblock_info_t *toi;
1796 /* Create basicblock and instruction array. */
1798 bb = DNEW(basicblock);
1799 MZERO(bb, basicblock, 1);
1801 bb->nr = ssa->jd->basicblockcount;
1802 ssa->jd->basicblockcount += 1;
1804 bb->method = ssa->jd->m;
1805 bb->type = BBTYPE_STD;
1808 toi->locals->phis->count +
1809 toi->stack->phis->count +
1811 bb->iinstr = DMNEW(instruction, bb->icount);
1812 MZERO(bb->iinstr, instruction, bb->icount);
1814 /* Populate instruction array. */
1816 iptr = bb->iinstr + reserved_insns;
1818 /* Add phi moves. */
1820 FOR_EACH_PHI_FUNCTION(toi->locals->phis, itph) {
1821 phi_create_copy(itph, predecessor_index, iptr++);
1824 FOR_EACH_PHI_FUNCTION(toi->stack->phis, itph) {
1825 phi_create_copy(itph, predecessor_index, iptr++);
1828 /* Add goto to real block. */
1830 goto_init(iptr, to);
1832 /* Add basicblock to chain of newly created basicblocks. */
1834 basicblock_chain_add(ssa->new_blocks, bb);
1839 static inline basicblock *ssa_leave_create_transition_block(
1844 return ssa_leave_create_transition_block_intern(
1848 basicblock_get_predecessor_index(from, to),
1853 static void ssa_leave_create_fallthrough(ssa_info *ssa, basicblock *bptr) {
1854 unsigned predecessor_index;
1855 basicblock_info_t *toi;
1860 if (bptr->next == NULL) {
1861 /* No fallthrough. */
1865 predecessor_index = basicblock_get_predecessor_index(bptr, bptr->next);
1866 toi = bb_info(bptr->next);
1870 /* Resize instruction array to accomodate all phi moves. */
1872 icount = bptr->icount + toi->locals->phis->count + toi->stack->phis->count;
1874 bptr->iinstr = DMREALLOC(
1881 iptr = bptr->iinstr + bptr->icount;
1882 bptr->icount = icount;
1884 /* Create phi moves. */
1886 FOR_EACH_PHI_FUNCTION(toi->locals->phis, itph) {
1887 phi_create_copy(itph, predecessor_index, iptr++);
1890 FOR_EACH_PHI_FUNCTION(toi->stack->phis, itph) {
1891 phi_create_copy(itph, predecessor_index, iptr++);
1895 static void ssa_leave_create_phi_moves(ssa_info *ssa) {
1898 basicblock *last = NULL;
1901 branch_target_t *table;
1902 lookup_target_t *lookup;
1903 bool has_fallthrough;
1905 FOR_EACH_BASICBLOCK(ssa->jd, bptr) {
1907 if (bptr->next == NULL) {
1911 if (bptr->vp == NULL) {
1915 if (! basicblock_reached(bptr)) {
1919 has_fallthrough = true;
1921 for (iptr = bptr->iinstr; iptr != bptr->iinstr + bptr->icount; ++iptr) {
1922 switch (icmd_table[iptr->opc].controlflow) {
1927 ssa_leave_create_transition_block(ssa, bptr, iptr->dst.block);
1930 table = iptr->dst.table;
1931 l = iptr->sx.s23.s2.tablelow;
1932 i = iptr->sx.s23.s3.tablehigh;
1934 i += 1; /* default */
1937 ssa_leave_create_transition_block(ssa, bptr, table->block);
1942 lookup = iptr->dst.lookup;
1943 i = iptr->sx.s23.s2.lookupcount;
1945 lookup->target.block =
1946 ssa_leave_create_transition_block(ssa, bptr, lookup->target.block);
1949 iptr->sx.s23.s3.lookupdefault.block =
1950 ssa_leave_create_transition_block(ssa, bptr, iptr->sx.s23.s3.lookupdefault.block);
1953 iptr->sx.s23.s3.jsrtarget.block =
1954 ssa_leave_create_transition_block(ssa, bptr, iptr->sx.s23.s3.jsrtarget.block);
1959 (iptr->opc == ICMD_GOTO) ||
1960 (iptr->opc == ICMD_JSR) ||
1961 (iptr->opc == ICMD_RET) ||
1962 icmd_table[iptr->opc].controlflow == CF_END ||
1963 (iptr->opc == ICMD_TABLESWITCH) ||
1964 (iptr->opc == ICMD_LOOKUPSWITCH)
1966 has_fallthrough = false;
1967 } else if (iptr->opc != ICMD_NOP) {
1968 has_fallthrough = true;
1973 if (bptr->next == NULL) {
1977 if (! basicblock_reached(bptr->next)) {
1981 if (has_fallthrough) {
1982 ssa_leave_create_fallthrough(ssa, bptr);
1986 /* Add chain of new basic blocks */
1988 if (last != NULL && ! basicblock_chain_empty(ssa->new_blocks)) {
1989 last->next = basicblock_chain_front(ssa->new_blocks);
1994 static basicblock *ssa_leave_split_basicblock_at(ssa_info *ssa, basicblock *bptr, instruction *iptr) {
1996 basicblock_info_t *bbi = bb_info(bptr);
1997 unsigned iidx = iptr - bptr->iinstr;
1998 basicblock *newblock;
1999 basicblock *tosplit;
2003 assert(iidx < bptr->icount);
2006 /* If there are no subbasicblocks yet, we initialize the first one to be a
2007 copy of the original basicblock. */
2009 if (basicblock_chain_empty(bbi->subbasicblocks)) {
2010 newblock = DNEW(basicblock);
2012 newblock->next = NULL;
2013 newblock->vp = NULL;
2014 basicblock_chain_add(bbi->subbasicblocks, newblock);
2017 /* Find the subbasicblock that will be split:
2018 the one that cointains iptr. */
2020 tosplit = basicblock_chain_front(bbi->subbasicblocks);
2023 while (tosplit->next && (iidx >= (pos + tosplit->icount))) {
2024 assert(bptr->nr == tosplit->nr);
2025 pos += tosplit->icount;
2026 tosplit = tosplit->next;
2029 assert(bptr->nr == tosplit->nr);
2031 /* Calculate number of instructions left in block to split. */
2033 ileft = iptr - tosplit->iinstr + 1;
2034 assert(ileft <= tosplit->icount);
2036 /* If there are any instructions left in the block to split, split */
2038 if (ileft < tosplit->icount) {
2039 newblock = DNEW(basicblock);
2040 *newblock = *tosplit;
2042 tosplit->next = newblock;
2043 tosplit->icount = ileft;
2045 newblock->icount -= ileft;
2046 newblock->iinstr += ileft;
2048 assert(tosplit->nr == bptr->nr);
2049 assert(newblock->nr == bptr->nr);
2050 assert(newblock->next == NULL);
2052 if (newblock->next == NULL) {
2053 bbi->subbasicblocks->last = newblock;
2057 /* We won't break pointers/references to bptr.
2058 So return bptr instread of the first fragment.
2059 Later, we will put the first fragment into the memory used by bptr.
2062 if (tosplit == basicblock_chain_front(bbi->subbasicblocks)) {
2069 static basicblock *ssa_leave_create_transition_exception_handler(
2077 /* From is a try block, to is an exception handler prologue. */
2079 /* Remove old prologue. */
2081 to->flags = BBDELETED;
2083 /* Create new exception handler. */
2085 exh = ssa_leave_create_transition_block_intern(
2089 basicblock_get_ex_predecessor_index(from, pei, to),
2092 exh->type = BBTYPE_EXH;
2094 /* Copy goto to real exception handler at the end of the exception handler
2097 assert(to->iinstr[to->icount - 1].opc == ICMD_GOTO);
2098 assert(exh->iinstr[exh->icount - 1].opc == ICMD_GOTO);
2099 exh->iinstr[exh->icount - 1] = to->iinstr[to->icount - 1];
2101 /* Copy getexception from the old prologue. */
2103 assert(to->iinstr[0].opc == ICMD_GETEXCEPTION);
2104 exh->iinstr[0] = to->iinstr[0];
2109 static exception_entry *ssa_leave_create_transition_exception_entry(
2112 basicblock *handler,
2113 classref_or_classinfo catchtype
2116 exception_entry *ee = DNEW(exception_entry);
2117 basicblock_info_t *fromi = bb_info(from);
2121 /* If the try block has subbasicblocks, the next block is the next fragment,
2122 not the successor block. */
2124 if (fromi != NULL) {
2125 ee->end = basicblock_chain_front(fromi->subbasicblocks)->next;
2127 ee->end = from->next;
2129 ee->handler = handler;
2130 ee->catchtype = catchtype;
2137 static void ssa_leave_create_exceptional_phi_moves(ssa_info *ssa) {
2140 exception_entry *ite;
2141 exception_entry_chain_t chain;
2142 classref_or_classinfo catchtype;
2147 exception_entry *ee;
2148 basicblock *last = NULL;
2150 if (! basicblock_chain_empty(ssa->new_blocks)) {
2151 last = basicblock_chain_back(ssa->new_blocks);
2154 basicblock_chain_clear(ssa->new_blocks);
2156 for (ite = ssa->jd->exceptiontable; ite; ite = ite->down) {
2157 bptr = ite->handler;
2158 catchtype = ite->catchtype;
2159 exception_entry_chain_init(&chain);
2160 for (ittry = ite->start; ittry != ite->end; ittry = ittry->next) {
2161 if (basicblock_reached(ittry)) {
2162 /* Dead code does not have a basicblock_info_t associated. */
2164 FOR_EACH_INSTRUCTION(ittry, iptr) {
2165 if (icmd_table[iptr->opc].flags & ICMDTABLE_PEI) {
2166 /* try is basicblock fragment till (including) the pei */
2167 try = ssa_leave_split_basicblock_at(ssa, ittry, iptr);
2168 /* ee is handler for try */
2169 exh = ssa_leave_create_transition_exception_handler(
2172 ee = ssa_leave_create_transition_exception_entry(
2173 ssa, try, exh, catchtype
2175 exception_entry_chain_add(&chain, ee);
2177 ssa->jd->exceptiontablelength += 1;
2182 if (! exception_entry_chain_empty(&chain)) {
2183 exception_entry_chain_back(&chain)->down = ite->down;
2184 exception_entry_chain_back(&chain)->next = ite->next;
2185 /* Replace original exception entry by first new one. */
2186 *ite = *exception_entry_chain_front(&chain);
2187 /* Set current iteration position to last newly created one. */
2188 ite = exception_entry_chain_back(&chain);
2193 for (last = ssa->jd->basicblocks; last->next != NULL; last = last->next);
2196 if (last != NULL && ! basicblock_chain_empty(ssa->new_blocks)) {
2197 last->next = basicblock_chain_front(ssa->new_blocks);
2201 void ssa_simple_leave_restore(ssa_info_t *ssa, basicblock *bptr, s4 *pvar) {
2204 basicblock_info_t *bbi;
2206 if (var < ssa->locals->count) {
2207 *pvar = vars_get_old_index(ssa->locals, var);
2208 } else if (var < ssa->locals->count + ssa->stack->count) {
2210 index = vars_get_old_index(
2212 var - ssa->locals->count
2215 bbi = bb_info(bptr);
2217 /* We have to determine whether to take an invar or an outvar for
2218 the stack depth ``index''.
2219 The state array contains the last definition of the stack element
2223 if (state_array_get_var(bbi->stack->state_array, index) == var) {
2224 /* The last definition of a stack depth inside the basicblock.
2225 This is the outvar at the given depth.
2226 If there is no outvar at the given depth, it must be an invar.
2228 if (index < bptr->outdepth) {
2229 *pvar = bptr->outvars[index];
2230 } else if (index < bptr->indepth) {
2231 *pvar = bptr->invars[index];
2236 /* A different than the last definition of a stack depth.
2237 This must be an invar.
2239 assert(index < bptr->indepth);
2240 *pvar = bptr->invars[index];
2243 *pvar = vars_get_old_index(
2245 var - ssa->locals->count - ssa->stack->count
2250 void ssa_simple_leave(ssa_info_t *ssa) {
2254 unsigned uses_count;
2256 FOR_EACH_BASICBLOCK(ssa->jd, bptr) {
2257 if (bptr->type == BBTYPE_EXH) {
2258 /* (Aritifical) exception handler blocks will be eliminated. */
2261 /* In reverse order. We need to rename the definition after any use! */
2262 FOR_EACH_INSTRUCTION_REV(bptr, iptr) {
2263 if (instruction_has_dst(iptr)) {
2264 ssa_simple_leave_restore(ssa, bptr, &(iptr->dst.varindex));
2266 instruction_get_uses(iptr, ssa->s_buf, &uses, &uses_count);
2267 for (ituse = uses; ituse != uses + uses_count; ++ituse) {
2268 ssa_simple_leave_restore(ssa, bptr, ituse);
2270 instruction_set_uses(iptr, ssa->s_buf, uses, uses_count);
2274 unfix_exception_handlers(ssa->jd);
2276 ssa->jd->maxlocals = ssa->original.maxlocals;
2277 ssa->jd->maxinterfaces = ssa->original.maxinterfaces;
2278 ssa->jd->local_map =ssa->original.local_map;
2279 ssa->jd->var = ssa->original.var;
2280 ssa->jd->vartop = ssa->original.vartop;
2281 ssa->jd->varcount = ssa->original.varcount;
2282 ssa->jd->localcount = ssa->original.localcount;
2285 void yssa(jitdata *jd) {
2287 basicblock_info_t *iti;
2293 printf("=============== [ before %s ] =========================\n", jd->m->name->text);
2295 printf("=============== [ /before ] =========================\n");
2299 ssa = DNEW(ssa_info);
2301 ssa_info_init(ssa, jd);
2302 ssa->keep_in_out = true;
2304 FOR_EACH_BASICBLOCK(jd, it) {
2305 if (basicblock_reached(it)) {
2306 iti = DNEW(basicblock_info_t);
2307 basicblock_info_init(iti, it, jd);
2314 ssa_enter_mark_loops(jd->basicblocks);
2316 ssa_enter_traverse(ssa, jd->basicblocks);
2318 ssa_enter_eliminate_categories(ssa);
2320 ssa_enter_export_variables(ssa);
2322 ssa_enter_export_phis(ssa);
2324 ssa_enter_verify_no_redundant_phis(ssa);
2326 /*ssa_enter_create_phi_graph(ssa);*/
2328 /*escape_analysis_perform(ssa->jd);*/
2331 ssa_leave_create_phi_moves(ssa);
2333 ssa_leave_create_exceptional_phi_moves(ssa);
2338 printf("=============== [ mid ] =========================\n");
2340 printf("=============== [ /mid ] =========================\n");
2344 ssa_simple_leave(ssa);
2348 printf("=============== [ after ] =========================\n");
2350 printf("=============== [ /after ] =========================\n");
2356 void eliminate_subbasicblocks(jitdata *jd) {
2357 basicblock *bptr, *next;
2358 basicblock_info_t *bbi;
2360 FOR_EACH_BASICBLOCK(jd, bptr) {
2361 bbi = bb_info(bptr);
2363 if (! basicblock_chain_empty(bbi->subbasicblocks)) {
2365 /* Copy first subblock, to keep pointers intact. */
2366 *bptr = *basicblock_chain_front(bbi->subbasicblocks);
2367 bptr = basicblock_chain_back(bbi->subbasicblocks);
2374 printf("=============== [ elim ] =========================\n");
2376 printf("=============== [ /elim ] =========================\n");
2381 * These are local overrides for various environment variables in Emacs.
2382 * Please do not remove this and leave it at the end of the file, where
2383 * Emacs will automagically detect them.
2384 * ---------------------------------------------------------------------
2387 * indent-tabs-mode: t
2391 * vim:noexpandtab:sw=4:ts=4: