1 /* src/vm/jit/optimizing/ssa3.c
4 CACAOVM - Verein zu Foerderung der freien virtuellen Machine CACAO
6 This file is part of CACAO.
8 This program is free software; you can redistribute it and/or
9 modify it under the terms of the GNU General Public License as
10 published by the Free Software Foundation; either version 2, or (at
11 your option) any later version.
13 This program is distributed in the hope that it will be useful, but
14 WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with this program; if not, write to the Free Software
20 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
23 SSA transformation PROTOTYPE based on:
26 Adding Static Single Assignment Form and a Graph Coloring Register
27 Allocator to the Java Hotspot Client Compiler, 2000
28 (http://www.ssw.uni-linz.ac.at/Research/Reports/Report15.html)
32 * Adapt for exception handling [done]
33 * Eliminiation of redundand PHI functions. [in progress]
34 * Handle also inout variables. [done]
35 * Create PHI functions lazyly, when they are used for the first time
36 (I suspect that currently PHIs are created that are never used).
37 * REWRITE. The code was never designed for producion. [done]
38 * Unify access to phi args.
44 #include "vm/jit/jit.hpp"
45 #include "vm/global.h"
46 #include "mm/memory.h"
47 #include "mm/dumpmemory.hpp"
48 #include "toolbox/list.h"
53 /*#define ELIMINATE_NOP_LOAD_STORE*/
56 /*#define SSA_VERBOSE */
59 __attribute__((always_inline))
62 /*** phi ********************************************************************/
66 PHI_FLAG_REDUNDANT_ALL,
67 PHI_FLAG_REDUNDANT_ONE
70 static inline void phi_set_flag(instruction *iptr, phi_flags_t flag) {
71 iptr->flags.bits |= (1 << flag);
74 static inline void phi_clear_flag(instruction *iptr, phi_flags_t flag) {
75 iptr->flags.bits &= ~(1 << flag);
78 static inline bool phi_has_flag(const instruction *iptr, phi_flags_t flag) {
79 return (iptr->flags.bits & (1 << flag)) != 0;
82 static inline instruction *phi_get_subst(instruction *iptr) {
83 return iptr->sx.s23.s2.iargs[-1];
86 static inline bool phi_has_subst(const instruction *iptr) {
87 return (iptr->sx.s23.s2.iargs[-1] != NULL);
91 static inline void phi_init(instruction *iptr, unsigned argcount, s4 index) {
96 iptr->dst.varindex = 0;
97 iptr->s1.argcount = argcount;
98 iptr->sx.s23.s2.iargs = DMNEW(instruction *, argcount + 1);
99 iptr->sx.s23.s2.iargs += 1;
100 iptr->sx.s23.s3.javaindex = index;
102 /* subst field - If non-null, the phi function shall be replaced by the
103 value produced by the subst instruction. */
104 iptr->sx.s23.s2.iargs[-1] = NULL;
107 for (i = 0; i < argcount; ++i) {
108 iptr->sx.s23.s2.iargs[i] = (instruction *)0x7fffffff;
113 #define phi_assert_opc(iptr) assert(iptr->opc == ICMD_PHI)
115 #define phi_assert_arg(iptr, arg) assert(arg < iptr->s1.argcount)
117 static inline s4 phi_arg_count(const instruction *iptr) {
118 phi_assert_opc(iptr);
119 return iptr->s1.argcount;
122 static inline instruction *phi_get_arg(const instruction *iptr, unsigned arg) {
123 phi_assert_opc(iptr);
124 phi_assert_arg(iptr, arg);
125 return iptr->sx.s23.s2.iargs[arg];
128 static inline s4 phi_get_arg_var(const instruction *iptr, unsigned arg) {
129 phi_assert_opc(iptr);
130 phi_assert_arg(iptr, arg);
131 return iptr->sx.s23.s2.iargs[arg]->dst.varindex;
134 static inline void phi_set_all_args(instruction *iptr, instruction *value) {
136 phi_assert_opc(iptr);
137 for (i = 0; i < iptr->s1.argcount; ++i) {
138 iptr->sx.s23.s2.iargs[i] = value;
142 static inline s4 phi_get_index(const instruction *iptr) {
143 phi_assert_opc(iptr);
144 return iptr->sx.s23.s3.javaindex;
147 static inline s4 phi_get_dst(const instruction *iptr) {
148 phi_assert_opc(iptr);
149 return iptr->dst.varindex;
152 static inline void phi_set_dst(instruction *iptr, s4 dst) {
153 phi_assert_opc(iptr);
154 iptr->dst.varindex = dst;
157 static inline bool phi_get_used(const instruction *iptr) {
158 phi_assert_opc(iptr);
159 return phi_has_flag(iptr, PHI_FLAG_USED);
162 static void phi_set_used(instruction *iptr) {
163 phi_assert_opc(iptr);
164 if (! phi_has_flag(iptr, PHI_FLAG_USED)) {
165 phi_set_flag(iptr, PHI_FLAG_USED);
166 /* TODO recurse into arguments */
170 static instruction *phi_resolve_use(instruction *use) {
172 while (use->opc == ICMD_PHI) {
173 if (phi_has_subst(use)) {
174 use = phi_get_subst(use);
183 static inline void phi_set_arg(instruction *iptr, unsigned arg, instruction *value) {
184 phi_assert_opc(iptr);
185 phi_assert_arg(iptr, arg);
186 assert(value != NULL);
187 iptr->sx.s23.s2.iargs[arg] = value;
190 static inline bool phi_is_redundant(const instruction *iptr) {
192 phi_has_flag(iptr, PHI_FLAG_REDUNDANT_ONE) ||
193 phi_has_flag(iptr, PHI_FLAG_REDUNDANT_ALL)
197 static inline void phi_create_copy(instruction *iptr, unsigned arg, instruction *copy) {
198 phi_assert_opc(iptr);
199 phi_assert_arg(iptr, arg);
200 copy->dst.varindex = phi_get_dst(iptr);
201 copy->s1.varindex = phi_resolve_use(phi_get_arg(iptr, arg))->dst.varindex;
202 if (copy->dst.varindex == copy->s1.varindex || phi_is_redundant(iptr)) {
203 copy->opc = ICMD_NOP;
205 copy->opc = ICMD_COPY;
210 static void phi_print(const instruction *iptr) {
213 printf("%d = phi(", iptr->dst.varindex);
214 for (i = 0; i < iptr->s1.argcount; ++i) {
215 use = phi_resolve_use(iptr->sx.s23.s2.iargs[i]);
217 printf("%d, ", use->dst.varindex);
226 #define FOR_EACH_PHI_USE_CAST(iptr, it, cast) \
228 (it) = cast (iptr)->sx.s23.s2.iargs; \
229 (it) != cast (iptr)->sx.s23.s2.iargs + (iptr)->s1.argcount; \
233 #define FOR_EACH_PHI_USE(iptr, it) \
234 FOR_EACH_PHI_USE_CAST(iptr, it, )
236 #define FOR_EACH_PHI_USE_CONST(iptr, it) \
237 FOR_EACH_PHI_USE_CAST(iptr, it, (const instruction *))
239 static void phi_calculate_redundancy(instruction *iptr) {
241 s4 dst = iptr->dst.varindex;
245 unsigned num_different = 0;
246 instruction *different;
248 if (phi_is_redundant(iptr)) return; /* XXX */
250 /* x = phi(x, y, x, x) ... PHI_FLAG_REDUNDANT_ONE
251 x = phi(x, x, x, x) ... PHI_FLAG_REDUNDANT_ALL */
253 FOR_EACH_PHI_USE(iptr, ituse) {
254 iuse = phi_resolve_use(*ituse);
257 use = iuse->dst.varindex;
262 if (num_different >= 2) {
263 phi_clear_flag(iptr, PHI_FLAG_REDUNDANT_ONE);
264 phi_clear_flag(iptr, PHI_FLAG_REDUNDANT_ALL);
269 if (num_different == 1) {
270 /* Set the subst field of the instruction.
271 I.e. the instruction will be replaced by the value produced by y. */
272 iptr->sx.s23.s2.iargs[-1] = different;
274 phi_set_flag(iptr, PHI_FLAG_REDUNDANT_ONE);
275 phi_clear_flag(iptr, PHI_FLAG_REDUNDANT_ALL);
276 } else if (num_different == 0) {
278 /*iptr->sx.s23.s2.iargs[-1] = iptr->sx.s23.s2.iargs[0];*/
280 phi_clear_flag(iptr, PHI_FLAG_REDUNDANT_ONE);
281 phi_clear_flag(iptr, PHI_FLAG_REDUNDANT_ALL);
287 /*** goto *******************************************************************/
289 static inline void goto_init(instruction *iptr, basicblock *dst) {
290 iptr->opc = ICMD_GOTO;
291 iptr->dst.block = dst;
294 /*** instruction ***********************************************************/
296 static void instruction_get_uses(const instruction *iptr, s4 *buf, s4 **puses, unsigned *puses_count) {
297 unsigned uses_count = 0;
299 switch (icmd_table[iptr->opc].dataflow) {
302 buf[2] = iptr->sx.s23.s3.varindex;
307 buf[1] = iptr->sx.s23.s2.varindex;
314 buf[0] = iptr->s1.varindex;
317 *puses_count = uses_count;
325 *puses = iptr->sx.s23.s2.args;
326 *puses_count = iptr->s1.argcount;
336 static inline void instruction_set_uses(instruction *iptr, s4 *buf, s4 *uses, unsigned uses_count) {
338 switch (uses_count) {
340 iptr->sx.s23.s3.varindex = buf[2];
342 iptr->sx.s23.s2.varindex = buf[1];
344 iptr->s1.varindex = buf[0];
349 /*** vars *******************************************************************/
351 #define VARS_CATEGORY_SHIFT 28
352 #define VARS_INDEX_MASK 0x0FFFFFFF
354 #define VARS_CATEGORY_LOCAL 0
355 #define VARS_CATEGORY_STACK 1
356 #define VARS_CATEGORY_OTHERS 2
358 #define VAR_TYPE_SUBSTITUED 666
360 #define OLD_INDEX_UNUSED -2
368 vars_item_t items[9000]; /* XXX hardcoded max */
374 static inline unsigned vars_add_item(vars_t *vs, const varinfo *item) {
375 unsigned i = vs->count;
378 vs->items[i].v = *item;
379 vs->items[i].old_index = OLD_INDEX_UNUSED;
380 return (vs->category << VARS_CATEGORY_SHIFT) | i;
383 static inline unsigned vars_add(vars_t *vs) {
384 unsigned i = vs->count;
387 return (vs->category << VARS_CATEGORY_SHIFT) | i;
390 static inline varinfo *vars_back(vars_t *vs) {
391 assert(vs->count > 0);
392 return &(vs->items[vs->count - 1].v);
395 static inline void vars_init(vars_t *vs, unsigned category) {
396 vs->max = sizeof(vs->items) / sizeof(vs->items[0]);
398 assert((category & 0x3) == category);
399 vs->category = category;
402 static inline unsigned vars_get_category(unsigned varindex) {
403 return (varindex >> VARS_CATEGORY_SHIFT);
406 static inline unsigned vars_get_index(unsigned varindex) {
407 return (varindex & VARS_INDEX_MASK);
410 static void vars_subst(vars_t *vs, unsigned varindex, unsigned replacementindex) {
411 varindex = vars_get_index(varindex);
412 replacementindex = vars_get_index(replacementindex);
414 vs->items[varindex].v.type = VAR_TYPE_SUBSTITUED;
415 vs->items[varindex].v.vv.ii[1] = replacementindex;
418 static unsigned vars_resolve_subst(const vars_t *vs, unsigned varindex) {
420 unsigned loop_ctr = 0;
422 varindex = vars_get_index(varindex);
424 if (vs->items[varindex].v.type == VAR_TYPE_SUBSTITUED) /*fprintf(stderr, "*")*/;
426 while (vs->items[varindex].v.type == VAR_TYPE_SUBSTITUED) {
427 assert(loop_ctr++ != vs->count);
428 varindex = vs->items[varindex].v.vv.ii[1];
431 return (vs->category << VARS_CATEGORY_SHIFT) | varindex ;
434 static void vars_copy_to_final(vars_t *vs, varinfo *dst) {
435 const vars_item_t *it;
438 for (it = vs->items; it != vs->items + vs->count; ++it, ++dst) {
444 /* Eliminate VAR_TYPE_SUBSTITUED as it leads to problems. */
446 if (dst->type == VAR_TYPE_SUBSTITUED) {
447 subst = vars_get_index(vars_resolve_subst(vs, it - vs->items));
448 dst->type = vs->items[subst].v.type;
454 static void vars_import(vars_t *vs, varinfo *v, unsigned count, s4 old_index) {
457 assert((vs->count + count) <= vs->max);
459 it = vs->items + vs->count;
462 while (count-- > 0) {
464 it->old_index = old_index;
471 static inline void vars_record_old_index(vars_t *vs, unsigned varindex, s4 old_index) {
473 varindex = vars_get_index(varindex);
475 assert(varindex < vs->count);
477 item = vs->items + varindex;
480 item->old_index == OLD_INDEX_UNUSED ||
481 item->old_index == old_index
484 item->old_index = old_index;
487 static inline s4 vars_get_old_index(vars_t *vs, unsigned varindex) {
490 varindex = vars_get_index(varindex);
492 assert(varindex < vs->count);
493 old = vs->items[varindex].old_index;
494 assert(old != OLD_INDEX_UNUSED);
499 /*** phis *******************************************************************/
507 static inline void phis_init(phis_t *ps, unsigned max) {
510 ps->items = DMNEW(instruction, max);
513 static inline instruction *phis_add(phis_t *ps) {
514 unsigned i = ps->count;
517 return ps->items + i;
520 static inline instruction *phis_get(const phis_t *ps, unsigned i) {
521 assert(i < ps->count);
522 return ps->items + i;
525 static inline bool phis_contains(const phis_t *ps, const instruction *phi) {
526 return (ps->items <= phi) && (phi < (ps->items + ps->max));
529 #define FOR_EACH_PHI_FUNCTION_(ps, it) \
530 for ((it) = (ps)->items; (it) != (ps)->items + (ps)->count; ++(it)) \
532 #define FOR_EACH_PHI_FUNCTION(ps, it) \
533 FOR_EACH_PHI_FUNCTION_(ps, it) if (!phi_is_redundant((it)))
536 FIXME() inline void phis_print(const phis_t *ps) {
537 const instruction *iptr;
538 FOR_EACH_PHI_FUNCTION(ps, iptr) {
544 static inline unsigned phis_copy_to(const phis_t *ps, instruction *dst) {
546 instruction *out = dst;
548 FOR_EACH_PHI_FUNCTION(ps, it) {
555 /*** state_array ************************************************************/
562 static inline void state_array_init(state_array_t *sa, unsigned count) {
567 static inline bool state_array_has_items(const state_array_t *sa) {
568 return (sa->items != NULL);
571 static inline s4 state_array_get_var(const state_array_t *sa, unsigned index) {
572 assert(index < sa->count);
573 assert(sa->items[index]);
574 return sa->items[index]->dst.varindex;
577 static inline instruction *state_array_get(const state_array_t *sa, unsigned index) {
578 assert(index < sa->count);
579 return sa->items[index];
582 static inline void state_array_set(const state_array_t *sa, unsigned index, instruction *value) {
583 assert(index < sa->count);
584 sa->items[index] = value;
587 static inline void state_array_copy(state_array_t *sa, state_array_t *other) {
588 assert(sa->count == other->count);
589 MCOPY(sa->items, other->items, instruction *, sa->count);
592 #define state_array_assert_items(sa) assert(state_array_has_items(sa) || (sa->count == 0))
593 #define state_array_assert_no_items(sa) assert(! state_array_has_items(sa))
595 static inline void state_array_allocate_items(state_array_t *sa) {
596 sa->items = DMNEW(instruction *, sa->count);
597 MZERO(sa->items, instruction *, sa->count);
600 /*** basicblock_chain *******************************************************/
605 } basicblock_chain_t;
607 static void basicblock_chain_init(basicblock_chain_t *bbc) {
612 #define basicblock_chain_clear basicblock_chain_init
614 static void basicblock_chain_add(basicblock_chain_t *bbc, basicblock *bb) {
615 if (bbc->first == NULL) {
616 assert(bbc->last == NULL);
617 assert(bb->next == NULL);
621 assert(bbc->last->next == NULL);
622 bbc->last->next = bb;
627 static inline basicblock *basicblock_chain_front(basicblock_chain_t *bbc) {
632 static inline basicblock *basicblock_chain_back(basicblock_chain_t *bbc) {
637 static inline bool basicblock_chain_empty(const basicblock_chain_t *bbc) {
638 return bbc->first == NULL;
641 /*** exception_entry_chain ***************************************************/
644 exception_entry *first;
645 exception_entry *last;
646 } exception_entry_chain_t;
648 static void exception_entry_chain_init(exception_entry_chain_t *eec) {
653 #define exception_entry_chain_clear exception_entry_chain_init
655 static void exception_entry_chain_add(exception_entry_chain_t *eec, exception_entry *ee) {
656 if (eec->first == NULL) {
660 eec->last->next = ee;
661 eec->last->down = ee;
666 static inline bool exception_entry_chain_empty(const exception_entry_chain_t *eec) {
667 return eec->first == NULL;
670 static inline exception_entry *exception_entry_chain_back(exception_entry_chain_t *eec) {
675 static inline exception_entry *exception_entry_chain_front(exception_entry_chain_t *eec) {
680 /*** traversal **************************************************************/
683 unsigned (*var_num_to_index)(void *vp, s4 var);
684 varinfo *(*index_to_initial_var)(void *vp, unsigned index);
685 varinfo *(*var_num_to_varinfo)(void *vp, s4 var);
686 unsigned (*variables_count)(void *vp);
691 state_array_t *state_array;
693 traversal_ops_t *ops;
696 /*** basicblock_info ********************************************************/
698 typedef struct basicblock_info {
702 unsigned backward_branches;
707 basicblock_chain_t *subbasicblocks;
709 unsigned complete_predecessors;
711 #if defined(SSA_VERIFY)
712 unsigned num_phi_elimination;
717 /*** traversal **************************************************************/
719 void traversal_init(traversal_t *t, unsigned count, void *ops_vp, traversal_ops_t *ops) {
720 t->phis = DNEW(phis_t);
721 phis_init(t->phis, count);
723 t->state_array = DNEW(state_array_t);
724 state_array_init(t->state_array, count);
730 instruction *traversal_create_phi(traversal_t *t, vars_t *v, unsigned argcount, s4 index) {
731 instruction *phi = phis_add(t->phis);
734 phi_init(phi, argcount, index);
735 dst = vars_add_item(v, t->ops->index_to_initial_var(t->ops_vp, index));
736 phi_set_dst(phi, dst);
738 state_array_set(t->state_array, index, phi);
740 vars_record_old_index(v, phi->dst.varindex, index);
745 static void traversal_rename_def(traversal_t *t, vars_t *vars, instruction *iptr) {
750 state_array_assert_items(t->state_array);
752 v = t->ops->var_num_to_varinfo(t->ops_vp, iptr->dst.varindex);
753 index = t->ops->var_num_to_index(t->ops_vp, iptr->dst.varindex);
755 old = iptr->dst.varindex;
756 iptr->dst.varindex = vars_add_item(vars, v);
757 state_array_set(t->state_array, index, iptr);
759 vars_record_old_index(vars, iptr->dst.varindex, index);
762 static void traversal_rename_use(traversal_t *t, vars_t *vars, s4 *puse) {
766 state_array_assert_items(t->state_array);
768 index = t->ops->var_num_to_index(t->ops_vp, *puse);
770 assert(state_array_get(t->state_array, index));
772 *puse = state_array_get_var(t->state_array, index);
774 vars_record_old_index(vars, *puse, index);
777 static inline unsigned traversal_variables_count(traversal_t *t) {
778 return t->ops->variables_count(t->ops_vp);
781 unsigned local_var_num_to_index(void *vp, s4 var) {
782 return (unsigned)var;
785 varinfo *local_index_to_initial_var(void *vp, unsigned index) {
786 jitdata *jd = (jitdata *)vp;
787 return jd->var + index;
790 varinfo *local_var_num_to_varinfo(void *vp, s4 var) {
791 jitdata *jd = (jitdata *)vp;
792 return jd->var + var;
795 unsigned local_variables_count(void *vp) {
796 jitdata *jd = (jitdata *)vp;
797 return jd->localcount;
800 traversal_ops_t traversal_local_ops = {
801 local_var_num_to_index,
802 local_index_to_initial_var,
803 local_var_num_to_varinfo,
804 local_variables_count
807 unsigned inout_var_num_to_index(void *vp, s4 var) {
808 basicblock *bb = (basicblock *)vp;
811 for (i = 0; i < bb->indepth; ++i) {
812 if (bb->invars[i] == var) {
817 for (i = 0; i < bb->outdepth; ++i) {
818 if (bb->outvars[i] == var) {
826 varinfo *inout_index_to_initial_var(void *vp, unsigned index) {
827 basicblock *bb = (basicblock *)vp;
828 jitdata *jd = (jitdata *)(((basicblock_info_t *)bb->vp)->locals->ops_vp); /* evil hack */
829 assert(index < bb->indepth);
830 return jd->var + bb->invars[index];
833 varinfo *inout_var_num_to_varinfo(void *vp, s4 var) {
834 basicblock *bb = (basicblock *)vp;
835 jitdata *jd = (jitdata *)(((basicblock_info_t *)bb->vp)->locals->ops_vp); /* evil hack */
836 return jd->var + var;
839 unsigned inout_variables_count(void *vp) {
840 basicblock *bb = (basicblock *)vp;
844 traversal_ops_t traversal_inout_ops = {
845 inout_var_num_to_index,
846 inout_index_to_initial_var,
847 inout_var_num_to_varinfo,
848 inout_variables_count
851 /*** basicblock_info ********************************************************/
853 void basicblock_info_init(basicblock_info_t *bbi, basicblock *bb, jitdata *jd) {
854 MZERO(bbi, basicblock_info_t, 1);
856 bbi->locals = DNEW(traversal_t);
857 traversal_init(bbi->locals, jd->localcount, jd, &traversal_local_ops);
859 bbi->stack = DNEW(traversal_t);
860 traversal_init(bbi->stack, jd->maxinterfaces, bb, &traversal_inout_ops);
862 bbi->subbasicblocks = DNEW(basicblock_chain_t);
863 basicblock_chain_init(bbi->subbasicblocks);
866 /*** basicblock *************************************************************/
868 static inline basicblock_info_t *basicblock_info(basicblock *bb) {
869 return (basicblock_info_t *)(bb->vp);
872 #define bb_info basicblock_info
874 static unsigned basicblock_get_predecessor_count(basicblock *bb) {
878 ret = bb->predecessorcount;
880 FOR_EACH_EXPREDECESSOR(bb, itpred) {
881 ret += (*itpred)->exouts;
887 static unsigned basicblock_get_predecessor_index(basicblock *from, basicblock *to) {
891 FOR_EACH_PREDECESSOR(to, itpred) {
892 if (*itpred == from) break;
896 assert(j != to->predecessorcount);
901 static unsigned basicblock_get_ex_predecessor_index(basicblock *from, unsigned pei, basicblock *to) {
905 j = to->predecessorcount;
907 FOR_EACH_EXPREDECESSOR(to, itpred) {
908 if ((*itpred)->nr == from->nr) {
912 j += (*itpred)->exouts;
919 /*** ssa_info ***************************************************************/
921 typedef struct ssa_info {
930 basicblock_chain_t *new_blocks;
942 unsigned keep_in_out:1;
944 } ssa_info, ssa_info_t;
946 void ssa_info_init(ssa_info_t *ssa, jitdata *jd) {
949 ssa->locals = DNEW(vars_t);
950 vars_init(ssa->locals, VARS_CATEGORY_LOCAL);
952 /* Initialize first version of locals, that is always available. */
953 vars_import(ssa->locals, jd->var, jd->localcount, 0);
955 ssa->stack = DNEW(vars_t);
956 vars_init(ssa->stack, VARS_CATEGORY_STACK);
958 ssa->others = DNEW(vars_t);
959 vars_init(ssa->others, VARS_CATEGORY_OTHERS);
961 ssa->new_blocks = DNEW(basicblock_chain_t);
962 basicblock_chain_init(ssa->new_blocks);
964 ssa->original.maxlocals = jd->maxlocals;
965 ssa->original.maxinterfaces = jd->maxinterfaces;
966 ssa->original.local_map = jd->local_map;
967 ssa->original.var = jd->var;
968 ssa->original.vartop = jd->vartop;
969 ssa->original.varcount = jd->varcount;
970 ssa->original.localcount = jd->localcount;
972 ssa->keep_in_out = false;
975 /*** others_mapping *********************************************************/
977 static inline void others_mapping_set(ssa_info *ssa, s4 var, s4 new_var) {
978 ssa->jd->var[var].vv.ii[1] = new_var;
981 static inline s4 others_mapping_get(const ssa_info *ssa, s4 var) {
982 return ssa->jd->var[var].vv.ii[1];
985 /*** code *******************************************************************/
987 void fix_exception_handlers(jitdata *jd) {
989 basicblock *exh = NULL;
995 basicblock_chain_t chain;
996 basicblock *last = NULL;
997 basicblock *marker = NULL;
1001 if (jd->exceptiontablelength == 0) {
1005 basicblock_chain_init(&chain);
1007 /* Remember, where we started adding IO variables. */
1009 vartop = jd->vartop;
1011 /* For each exception handler block, create one block with a prologue block */
1013 FOR_EACH_BASICBLOCK(jd, bptr) {
1014 if (bptr->type == BBTYPE_EXH) {
1018 +---- EXH (exh)-------+
1021 +---------------------+
1023 === TRANSFORMED TO ===>
1025 +---- PROL (exh) -------+
1027 | GETEXECEPTION => OU3 |
1030 +-----------------------+
1032 +---- REAL_EXH (std) -+
1035 +---------------------+
1039 bptr->type = BBTYPE_STD;
1040 bptr->predecessorcount = 0; /* legacy */
1042 /* Create basicblock with 2 instructions */
1044 exh = DNEW(basicblock);
1045 MZERO(exh, basicblock, 1);
1047 iptr = DMNEW(instruction, 2);
1048 MZERO(iptr, instruction, 2);
1050 /* Create outvars */
1052 exh->outdepth = bptr->indepth;
1054 if (exh->outdepth > 0) {
1055 exh->outvars = DMNEW(s4, exh->outdepth);
1056 for (i = 0; i < exh->outdepth; ++i) {
1057 exh->outvars[i] = vartop++;
1063 exh->indepth = exh->outdepth - 1;
1064 exh->invars = exh->outvars;
1067 /* Create new outvar */
1069 assert(jd->vartop < jd->varcount);
1072 jd->var[v].type = TYPE_ADR;
1073 jd->var[v].flags = INOUT;
1076 exh->nr = jd->basicblockcount;
1077 jd->basicblockcount += 1;
1079 exh->type = BBTYPE_EXH;
1084 exh->outvars = DNEW(s4);
1085 exh->outvars[0] = v;
1087 exh->predecessorcount = -1; /* legacy */
1088 exh->flags = BBFINISHED;
1089 exh->method = jd->m;
1091 basicblock_chain_add(&chain, exh);
1095 iptr->opc = ICMD_GETEXCEPTION;
1096 iptr->dst.varindex = exh->outvars[exh->outdepth - 1];
1099 /* Goto real exception handler */
1101 goto_init(iptr, bptr);
1108 if (bptr->next == NULL) {
1115 /* We need to allocate the new iovars in the var array */
1117 avail_vars = (jd->varcount - jd->vartop);
1118 add_vars = (vartop - jd->vartop);
1120 if (add_vars > avail_vars) {
1121 add_vars -= avail_vars;
1122 jd->var = DMREALLOC(jd->var, varinfo, jd->varcount, jd->varcount + add_vars);
1123 jd->varcount += add_vars;
1126 jd->vartop = vartop;
1128 /* Put the chain of exception handlers between just before the last
1129 basic block (end marker). */
1131 if (! basicblock_chain_empty(&chain)) {
1132 marker->nr = jd->basicblockcount;
1133 basicblock_chain_back(&chain)->next = marker;
1134 last->next = basicblock_chain_front(&chain);
1136 assert(last->nr + 1 == basicblock_chain_front(&chain)->nr);
1137 assert(marker->nr == jd->basicblockcount);
1140 /* Replace all exception handlers in exception table with their respective
1143 for (ee = jd->exceptiontable; ee; ee = ee->down) {
1144 assert(ee->handler->vp);
1147 exh = (basicblock *)ee->handler->vp;
1151 /* Set up IO variables in newly craeted exception handlers. */
1153 for (i = 0; i < exh->outdepth; ++i) {
1154 v = exh->outvars[i];
1156 jd->var[v].flags = INOUT;
1157 jd->var[v].type = jd->var[ bptr->invars[i] ].type;
1163 void unfix_exception_handlers(jitdata *jd) {
1164 basicblock *bptr, *exh;
1166 exception_entry *ee;
1167 #if !defined(NDEBUG)
1171 FOR_EACH_BASICBLOCK(jd, bptr) {
1172 if (bptr->type == BBTYPE_EXH) {
1173 assert(bptr->iinstr[1].opc == ICMD_GOTO);
1174 exh = bptr->iinstr[1].dst.block;
1176 bptr->type = BBDELETED;
1180 exh->type = BBTYPE_EXH;
1183 /* bptr is no more a predecessor of exh */
1185 for (i = 0; i < exh->predecessorcount; ++i) {
1186 if (exh->predecessors[i] == bptr) {
1187 exh->predecessors[i] = exh->predecessors[exh->predecessorcount - 1];
1188 exh->predecessorcount -= 1;
1189 #if !defined(NDEBUG)
1203 for (ee = jd->exceptiontable; ee; ee = ee->down) {
1204 assert(ee->handler->vp);
1205 ee->handler = ee->handler->vp;
1209 /*** ssa_enter ***************************************************************/
1211 static void ssa_enter_mark_loops_intern(basicblock *bb, unsigned num_branches) {
1212 basicblock_info_t *bbi = bb_info(bb);
1213 basicblock **itsucc;
1215 if (! bbi->visited) {
1216 bbi->visited = true;
1218 FOR_EACH_SUCCESSOR(bb, itsucc) {
1219 /* There is a single branch from bb into the successor. */
1220 ssa_enter_mark_loops_intern(*itsucc, 1);
1222 FOR_EACH_EXHANDLER(bb, itsucc) {
1223 /* For exception handler type successors,
1224 we count a branch into the exception handler from every PEI. */
1225 ssa_enter_mark_loops_intern(*itsucc, bb->exouts);
1227 bbi->active = false;
1228 } else if (bbi->active) {
1229 bbi->backward_branches += num_branches;
1233 static inline void ssa_enter_mark_loops(basicblock *bb) {
1234 ssa_enter_mark_loops_intern(bb, 1);
1237 static void ssa_enter_merge(
1241 unsigned predecessor_index,
1245 basicblock_info_t *dsti = bb_info(bdst);
1246 unsigned predecessor_count = basicblock_get_predecessor_count(bdst);
1251 /* We are merging for the first time into this block. */
1253 if (! state_array_has_items(dst->state_array)) {
1255 state_array_allocate_items(dst->state_array);
1257 if (dsti->backward_branches > 0) {
1258 /* Loop header, create phi functions for all variables. */
1259 for (i = 0; i < traversal_variables_count(dst); ++i) {
1260 phi = traversal_create_phi(dst, vdst, predecessor_count, i);
1261 /* No need to init, they will only be filled in later. */
1264 state_array_copy(dst->state_array, src->state_array);
1269 /* We are merging another block. */
1271 /* Process every variable. */
1273 for (i = 0; i < traversal_variables_count(dst); ++i) {
1274 if (dsti->traversed) {
1276 /* Back edge, all phi functions are already created.
1277 We only need to set their arguments. */
1280 phis_get(dst->phis, i),
1282 state_array_get(src->state_array, i)
1285 } else if (state_array_get(dst->state_array, i) != state_array_get(src->state_array, i)) {
1287 /* A different definition of this var reaches the block.
1288 We need to create a phi function. */
1290 if (phis_contains(dst->phis, state_array_get(dst->state_array, i))) {
1291 /* There is already a phi function created for this var.
1292 No need to create one. */
1294 /* Create a new phi function.
1295 Set all arguments to old value in state array. */
1296 old = state_array_get(dst->state_array, i);
1297 phi = traversal_create_phi(dst, vdst, predecessor_count, i);
1298 phi_set_all_args(phi, old);
1301 /* Set argument of phi function. */
1304 state_array_get(dst->state_array, i),
1306 state_array_get(src->state_array, i)
1312 static void ssa_enter_process_block(ssa_info *ssa, basicblock *bb);
1313 static bool ssa_enter_eliminate_redundant_phis(traversal_t *t, vars_t *vs, basicblock_info_t *bbi);
1315 #if defined(SSA_VERIFY)
1316 static void ssa_enter_verify_no_redundant_phis(ssa_info_t *ssa) {
1318 basicblock_info_t *bbi;
1324 FOR_EACH_BASICBLOCK(ssa->jd, bptr) {
1325 if (basicblock_reached(bptr)) {
1326 bbi = bb_info(bptr);
1327 FOR_EACH_PHI_FUNCTION(bbi->locals->phis, itph) {
1328 if (! phi_is_redundant(itph)) {
1329 phi_calculate_redundancy(itph);
1330 assert(! phi_is_redundant(itph));
1333 FOR_EACH_PHI_FUNCTION(bbi->stack->phis, itph) {
1334 if (! phi_is_redundant(itph)) {
1335 phi_calculate_redundancy(itph);
1336 assert(! phi_is_redundant(itph));
1344 static void ssa_enter_traverse(ssa_info_t *ssa, basicblock *bb) {
1345 basicblock **itsucc;
1346 basicblock_info_t *succi;
1347 basicblock_info_t *bbi = bb_info(bb);
1348 unsigned predecessor_count;
1352 ssa_enter_process_block(ssa, bb);
1356 FOR_EACH_SUCCESSOR(bb, itsucc) {
1358 succi = bb_info(*itsucc);
1364 basicblock_get_predecessor_index(bb, *itsucc),
1372 basicblock_get_predecessor_index(bb, *itsucc),
1376 succi->complete_predecessors += 1;
1378 predecessor_count = basicblock_get_predecessor_count(*itsucc);
1381 succi->complete_predecessors ==
1382 (predecessor_count - succi->backward_branches)
1384 ssa_enter_traverse(ssa, *itsucc);
1388 (succi->complete_predecessors == predecessor_count) &&
1389 (succi->backward_branches > 0)
1391 #if defined(SSA_VERIFY)
1392 assert(succi->num_phi_elimination < 2);
1393 succi->num_phi_elimination += 1;
1395 ssa_enter_eliminate_redundant_phis(succi->locals, ssa->locals, succi);
1396 ssa_enter_eliminate_redundant_phis(succi->stack, ssa->stack, succi);
1400 FOR_EACH_EXHANDLER(bb, itsucc) {
1402 succi = bb_info(*itsucc);
1404 succi->complete_predecessors += bb->exouts; /* XXX this might be 0 */
1406 predecessor_count = basicblock_get_predecessor_count(*itsucc);
1409 succi->complete_predecessors ==
1410 (predecessor_count - succi->backward_branches)
1412 ssa_enter_traverse(ssa, *itsucc);
1416 (succi->complete_predecessors == predecessor_count) &&
1417 (succi->backward_branches > 0)
1419 #if defined(SSA_VERIFY)
1420 assert(succi->num_phi_elimination < 2);
1421 succi->num_phi_elimination += 1;
1423 ssa_enter_eliminate_redundant_phis(succi->locals, ssa->locals, succi);
1424 ssa_enter_eliminate_redundant_phis(succi->stack, ssa->stack, succi);
1431 static void ssa_enter_process_pei(ssa_info *ssa, basicblock *bb, unsigned pei) {
1432 basicblock_info_t *bbi = bb_info(bb);
1433 basicblock_info_t *succi;
1434 basicblock **itsucc;
1436 FOR_EACH_EXHANDLER(bb, itsucc) {
1437 succi = bb_info(*itsucc);
1443 basicblock_get_ex_predecessor_index(bb, pei, *itsucc),
1451 basicblock_get_ex_predecessor_index(bb, pei, *itsucc),
1457 static FIXME(bool) ssa_enter_eliminate_redundant_phis(traversal_t *t, vars_t *vs, basicblock_info_t *FIXME(bbi)) {
1465 FOR_EACH_PHI_FUNCTION(t->phis, itph) {
1467 phi_calculate_redundancy(itph);
1469 /* If the phi function is redundant,
1470 make the variable it defines point to the value defined by the substituing
1473 if (phi_is_redundant(itph)) {
1474 vars_subst(vs, itph->dst.varindex, phi_get_subst(itph)->dst.varindex);
1475 assert(bbi->backward_branches > 0);
1484 static void ssa_enter_post_eliminate_redundand_phi(
1490 phi_calculate_redundancy(phi);
1491 phi_set_flag(PHI_FLAG_REDUNDANCY_CHECKED);
1493 /* if redundancy changed and phi function escapes block */
1495 /* for each successor */
1498 static void ssa_enter_post_eliminate_redundant_phis(ssa_info_t *ssa) {
1500 basicblock_info_t *bbi;
1503 FOR_EACH_BASICBLOCK(ssa->jd, bptr) {
1504 bbi = bb_info(bptr);
1506 if (bbi->backward_branches > 0) {
1507 /* Redundant phi functions have the left hand side as operand.
1508 This can happen by definition only in loop headers. */
1510 FOR_EACH_PHI_FUNCTION(bbi->locals, itph) {
1511 if (! phi_has_flag(PHI_FLAG_REDUNDANCY_CHECKED)) {
1512 /* Calculate redundancy? */
1514 /* If yes recurse? */
1518 FOR_EACH_PHI_FUNCTION(bbi->stack, itph) {
1525 static void ssa_enter_init_locals(state_array_t *sa) {
1529 for (i = 0; i < sa->count; ++i) {
1530 iptr = DNEW(instruction);
1531 iptr->opc = ICMD_NOP;
1532 iptr->dst.varindex = i;
1533 state_array_set(sa, i, iptr);
1537 static void ssa_enter_process_block(ssa_info *ssa, basicblock *bb) {
1538 basicblock_info_t *bbi = bb_info(bb);
1543 unsigned uses_count;
1546 /* Every basic block can be traversed only once. */
1548 assert(! bbi->traversed);
1549 bbi->traversed = true;
1551 /* The root basicblock needs special treatment. */
1553 if (bb->predecessorcount == 0 && bb->type != BBTYPE_EXH) {
1554 state_array_assert_no_items(bbi->locals->state_array);
1555 state_array_allocate_items(bbi->locals->state_array);
1556 ssa_enter_init_locals(bbi->locals->state_array);
1558 state_array_assert_no_items(bbi->stack->state_array);
1559 state_array_allocate_items(bbi->stack->state_array);
1563 /* Exception handlers have a clean stack. */
1565 /* Not true with inlining. */
1567 if (bb->type == BBTYPE_EXH) {
1568 state_array_assert_no_items(bbi->stack->state_array);
1569 state_array_allocate_items(bbi->stack->state_array);
1573 /* Some in/out vars get marked as INOUT in simplereg,
1574 and are not marked at this point.
1575 Mark them manually. */
1577 for (ituse = bb->invars; ituse != bb->invars + bb->indepth; ++ituse) {
1578 if (ssa->keep_in_out && ssa->jd->var[*ituse].type == TYPE_RET) {
1581 ssa->jd->var[*ituse].flags |= INOUT;
1582 ssa->jd->var[*ituse].flags &= ~PREALLOC;
1585 for (ituse = bb->outvars; ituse != bb->outvars + bb->outdepth; ++ituse) {
1586 if (ssa->keep_in_out && ssa->jd->var[*ituse].type == TYPE_RET) {
1589 ssa->jd->var[*ituse].flags |= INOUT;
1590 ssa->jd->var[*ituse].flags &= ~PREALLOC;
1593 /* Process instructions */
1595 state_array_assert_items(bbi->locals->state_array);
1597 FOR_EACH_INSTRUCTION(bb, iptr) {
1599 #if defined(ELIMINATE_NOP_LOAD_STORE)
1601 /* Kill NOP instructions of the form:
1604 As they create a lot of unnecessary definitions.
1605 For performance, definitely eliminate them. However, keeping them is a
1610 (icmd_table[iptr->opc].dataflow == DF_LOAD) ||
1611 (icmd_table[iptr->opc].dataflow == DF_STORE)
1613 if (iptr->dst.varindex == iptr->s1.varindex) {
1614 iptr->opc = ICMD_NOP;
1620 if (icmd_table[iptr->opc].flags & ICMDTABLE_PEI) {
1621 ssa_enter_process_pei(ssa, bb, pei++);
1624 instruction_get_uses(iptr, ssa->s_buf, &uses, &uses_count);
1626 for (ituse = uses; ituse != uses + uses_count; ++ituse) {
1627 if (var_is_local(ssa->jd, *ituse)) {
1628 traversal_rename_use(
1633 } else if (var_is_inout(ssa->jd, *ituse)) {
1634 traversal_rename_use(
1640 *ituse = others_mapping_get(ssa, *ituse);
1644 instruction_set_uses(iptr, ssa->s_buf, uses, uses_count);
1646 if (instruction_has_dst(iptr)) {
1647 if (var_is_local(ssa->jd, iptr->dst.varindex)) {
1648 traversal_rename_def(
1653 } else if (var_is_inout(ssa->jd, iptr->dst.varindex)) {
1654 traversal_rename_def(
1660 old = iptr->dst.varindex;
1661 iptr->dst.varindex = vars_add_item(
1663 ssa->jd->var + iptr->dst.varindex
1665 vars_record_old_index(ssa->others, iptr->dst.varindex, old);
1666 others_mapping_set(ssa, old, iptr->dst.varindex);
1674 [locals.........................][interaces][others]
1675 [original locals][version > 1 ]
1676 <--------------- new locals --------------->
1679 static void ssa_enter_export_variables(ssa_info *ssa) {
1684 jitdata *jd = ssa->jd;
1687 vartop = ssa->locals->count + ssa->stack->count + ssa->others->count;
1688 vars = DMNEW(varinfo, vartop);
1690 vars_copy_to_final(ssa->locals, vars);
1691 vars_copy_to_final(ssa->stack, vars + ssa->locals->count);
1692 vars_copy_to_final(ssa->others, vars + ssa->locals->count + ssa->stack->count);
1695 jd->vartop = jd->varcount = vartop;
1697 /* Grow local map to accomodate all new locals and iovars.
1698 But keep the local map for version 1 of locals, that contains the holes. */
1702 5 * (jd->maxlocals + ssa->locals->count + ssa->stack->count - jd->localcount)
1705 MCOPY(local_map, jd->local_map, s4, 5 * jd->maxlocals);
1707 jd->local_map = local_map;
1709 it = jd->local_map + (jd->maxlocals * 5); /* start adding entries here */
1711 /* Add version > 1 of all locals */
1713 for (i = jd->localcount; i < ssa->locals->count; ++i) {
1714 for (j = 0; j < 5; ++j) {
1715 if (jd->var[i].type != j) {
1724 /* Add all io vars. */
1726 for (i = ssa->locals->count; i < ssa->locals->count + ssa->stack->count; ++i) {
1727 for (j = 0; j < 5; ++j) {
1728 if (jd->var[i].type != j) {
1739 jd->maxlocals += (ssa->locals->count + ssa->stack->count - jd->localcount);
1740 jd->localcount = ssa->locals->count + ssa->stack->count;
1742 /* Eliminate interfaces. */
1744 jd->maxinterfaces = 0;
1748 static void ssa_enter_export_phis(ssa_info_t *ssa) {
1750 basicblock_info_t *bbi;
1753 FOR_EACH_BASICBLOCK(ssa->jd, bptr) {
1754 bbi = bb_info(bptr);
1756 bptr->phis = DMNEW(instruction, bbi->locals->phis->count + bbi->stack->phis->count);
1760 dst += phis_copy_to(bbi->locals->phis, dst);
1762 dst += phis_copy_to(bbi->stack->phis, dst);
1764 bptr->phicount = dst - bptr->phis;
1770 static inline void ssa_enter_eliminate_category(ssa_info_t *ssa, s4 *pvar) {
1771 switch (vars_get_category(*pvar)) {
1772 case VARS_CATEGORY_LOCAL:
1773 *pvar = vars_get_index(vars_resolve_subst(ssa->locals, *pvar));
1775 case VARS_CATEGORY_STACK:
1776 *pvar = vars_get_index(vars_resolve_subst(ssa->stack, *pvar)) + ssa->locals->count;
1778 case VARS_CATEGORY_OTHERS:
1779 *pvar = vars_get_index(*pvar) + ssa->locals->count + ssa->stack->count;
1785 void ssa_enter_eliminate_categories(ssa_info_t *ssa) {
1789 unsigned uses_count;
1790 basicblock_info_t *bbi;
1793 FOR_EACH_BASICBLOCK(ssa->jd, bb) {
1797 if (! ssa->keep_in_out) {
1803 FOR_EACH_PHI_FUNCTION(bbi->locals->phis, itph) {
1804 ssa_enter_eliminate_category(ssa, &(itph->dst.varindex));
1807 FOR_EACH_PHI_FUNCTION(bbi->stack->phis, itph) {
1808 ssa_enter_eliminate_category(ssa, &(itph->dst.varindex));
1812 FOR_EACH_INSTRUCTION(bb, iptr) {
1813 if (instruction_has_dst(iptr)) {
1814 ssa_enter_eliminate_category(ssa, &(iptr->dst.varindex));
1816 instruction_get_uses(iptr, ssa->s_buf, &uses, &uses_count);
1817 for (ituse = uses; ituse != uses + uses_count; ++ituse) {
1818 ssa_enter_eliminate_category(ssa, ituse);
1820 instruction_set_uses(iptr, ssa->s_buf, uses, uses_count);
1825 static void ssa_enter_create_phi_graph(ssa_info *ssa) {
1827 basicblock_info_t *bbi;
1829 instruction **ituse;
1831 char path[PATH_MAX], *ppath;
1834 snprintf(path, PATH_MAX, "|tmp|graphs|%s.%s.dot", ssa->jd->m->clazz->name->text, ssa->jd->m->name->text);
1835 for (ppath = path; *ppath; ++ppath) {
1836 if (*ppath == '|') *ppath = '/';
1837 else if (*ppath == '/') *ppath = '.';
1840 f = fopen(path, "w");
1842 if (f == NULL) return;
1844 fprintf(f, "digraph G {\n");
1846 FOR_EACH_BASICBLOCK(ssa->jd, bptr) {
1847 bbi = bb_info(bptr);
1849 FOR_EACH_PHI_FUNCTION(bbi->locals->phis, itph) {
1851 FOR_EACH_PHI_USE(itph, ituse) {
1852 if ((*ituse)->opc == ICMD_PHI) {
1853 fprintf(f, "%d -> %d;\n", (*ituse)->dst.varindex, itph->dst.varindex);
1866 static basicblock *ssa_leave_create_transition_block_intern(
1870 unsigned predecessor_index,
1871 unsigned reserved_insns
1876 basicblock_info_t *toi;
1880 /* Create basicblock and instruction array. */
1882 bb = DNEW(basicblock);
1883 MZERO(bb, basicblock, 1);
1885 bb->nr = ssa->jd->basicblockcount;
1886 ssa->jd->basicblockcount += 1;
1888 bb->method = ssa->jd->m;
1889 bb->type = BBTYPE_STD;
1892 toi->locals->phis->count +
1893 toi->stack->phis->count +
1895 bb->iinstr = DMNEW(instruction, bb->icount);
1896 MZERO(bb->iinstr, instruction, bb->icount);
1898 /* Populate instruction array. */
1900 iptr = bb->iinstr + reserved_insns;
1902 /* Add phi moves. */
1904 FOR_EACH_PHI_FUNCTION(toi->locals->phis, itph) {
1905 phi_create_copy(itph, predecessor_index, iptr++);
1908 FOR_EACH_PHI_FUNCTION(toi->stack->phis, itph) {
1909 phi_create_copy(itph, predecessor_index, iptr++);
1912 /* Add goto to real block. */
1914 goto_init(iptr, to);
1916 /* Add basicblock to chain of newly created basicblocks. */
1918 basicblock_chain_add(ssa->new_blocks, bb);
1923 static inline basicblock *ssa_leave_create_transition_block(
1928 return ssa_leave_create_transition_block_intern(
1932 basicblock_get_predecessor_index(from, to),
1937 static void ssa_leave_create_fallthrough(ssa_info *ssa, basicblock *bptr) {
1938 unsigned predecessor_index;
1939 basicblock_info_t *toi;
1944 if (bptr->next == NULL) {
1945 /* No fallthrough. */
1949 predecessor_index = basicblock_get_predecessor_index(bptr, bptr->next);
1950 toi = bb_info(bptr->next);
1954 /* Resize instruction array to accomodate all phi moves. */
1956 icount = bptr->icount + toi->locals->phis->count + toi->stack->phis->count;
1958 bptr->iinstr = DMREALLOC(
1965 iptr = bptr->iinstr + bptr->icount;
1966 bptr->icount = icount;
1968 /* Create phi moves. */
1970 FOR_EACH_PHI_FUNCTION(toi->locals->phis, itph) {
1971 phi_create_copy(itph, predecessor_index, iptr++);
1974 FOR_EACH_PHI_FUNCTION(toi->stack->phis, itph) {
1975 phi_create_copy(itph, predecessor_index, iptr++);
1979 static void ssa_leave_create_phi_moves(ssa_info *ssa) {
1982 basicblock *last = NULL;
1985 branch_target_t *table;
1986 lookup_target_t *lookup;
1987 bool has_fallthrough;
1989 FOR_EACH_BASICBLOCK(ssa->jd, bptr) {
1991 if (bptr->next == NULL) {
1995 if (bptr->vp == NULL) {
1999 if (! basicblock_reached(bptr)) {
2003 has_fallthrough = true;
2005 for (iptr = bptr->iinstr; iptr != bptr->iinstr + bptr->icount; ++iptr) {
2006 switch (icmd_table[iptr->opc].controlflow) {
2011 ssa_leave_create_transition_block(ssa, bptr, iptr->dst.block);
2014 table = iptr->dst.table;
2015 l = iptr->sx.s23.s2.tablelow;
2016 i = iptr->sx.s23.s3.tablehigh;
2018 i += 1; /* default */
2021 ssa_leave_create_transition_block(ssa, bptr, table->block);
2026 lookup = iptr->dst.lookup;
2027 i = iptr->sx.s23.s2.lookupcount;
2029 lookup->target.block =
2030 ssa_leave_create_transition_block(ssa, bptr, lookup->target.block);
2033 iptr->sx.s23.s3.lookupdefault.block =
2034 ssa_leave_create_transition_block(ssa, bptr, iptr->sx.s23.s3.lookupdefault.block);
2037 iptr->sx.s23.s3.jsrtarget.block =
2038 ssa_leave_create_transition_block(ssa, bptr, iptr->sx.s23.s3.jsrtarget.block);
2043 (iptr->opc == ICMD_GOTO) ||
2044 (iptr->opc == ICMD_JSR) ||
2045 (iptr->opc == ICMD_RET) ||
2046 icmd_table[iptr->opc].controlflow == CF_END ||
2047 (iptr->opc == ICMD_TABLESWITCH) ||
2048 (iptr->opc == ICMD_LOOKUPSWITCH)
2050 has_fallthrough = false;
2051 } else if (iptr->opc != ICMD_NOP) {
2052 has_fallthrough = true;
2057 if (bptr->next == NULL) {
2061 if (! basicblock_reached(bptr->next)) {
2065 if (has_fallthrough) {
2066 ssa_leave_create_fallthrough(ssa, bptr);
2070 /* Add chain of new basic blocks */
2072 if (last != NULL && ! basicblock_chain_empty(ssa->new_blocks)) {
2073 last->next = basicblock_chain_front(ssa->new_blocks);
2078 static basicblock *ssa_leave_split_basicblock_at(ssa_info *ssa, basicblock *bptr, instruction *iptr) {
2080 basicblock_info_t *bbi = bb_info(bptr);
2081 unsigned iidx = iptr - bptr->iinstr;
2082 basicblock *newblock;
2083 basicblock *tosplit;
2087 assert(iidx < bptr->icount);
2090 /* If there are no subbasicblocks yet, we initialize the first one to be a
2091 copy of the original basicblock. */
2093 if (basicblock_chain_empty(bbi->subbasicblocks)) {
2094 newblock = DNEW(basicblock);
2096 newblock->next = NULL;
2097 newblock->vp = NULL;
2098 basicblock_chain_add(bbi->subbasicblocks, newblock);
2101 /* Find the subbasicblock that will be split:
2102 the one that cointains iptr. */
2104 tosplit = basicblock_chain_front(bbi->subbasicblocks);
2107 while (tosplit->next && (iidx >= (pos + tosplit->icount))) {
2108 assert(bptr->nr == tosplit->nr);
2109 pos += tosplit->icount;
2110 tosplit = tosplit->next;
2113 assert(bptr->nr == tosplit->nr);
2115 /* Calculate number of instructions left in block to split. */
2117 ileft = iptr - tosplit->iinstr + 1;
2118 assert(ileft <= tosplit->icount);
2120 /* If there are any instructions left in the block to split, split */
2122 if (ileft < tosplit->icount) {
2123 newblock = DNEW(basicblock);
2124 *newblock = *tosplit;
2126 tosplit->next = newblock;
2127 tosplit->icount = ileft;
2129 newblock->icount -= ileft;
2130 newblock->iinstr += ileft;
2132 assert(tosplit->nr == bptr->nr);
2133 assert(newblock->nr == bptr->nr);
2134 assert(newblock->next == NULL);
2136 if (newblock->next == NULL) {
2137 bbi->subbasicblocks->last = newblock;
2141 /* We won't break pointers/references to bptr.
2142 So return bptr instread of the first fragment.
2143 Later, we will put the first fragment into the memory used by bptr.
2146 if (tosplit == basicblock_chain_front(bbi->subbasicblocks)) {
2153 static basicblock *ssa_leave_create_transition_exception_handler(
2161 /* From is a try block, to is an exception handler prologue. */
2163 /* Remove old prologue. */
2165 to->flags = BBDELETED;
2167 /* Create new exception handler. */
2169 exh = ssa_leave_create_transition_block_intern(
2173 basicblock_get_ex_predecessor_index(from, pei, to),
2176 exh->type = BBTYPE_EXH;
2178 /* Copy goto to real exception handler at the end of the exception handler
2181 assert(to->iinstr[to->icount - 1].opc == ICMD_GOTO);
2182 assert(exh->iinstr[exh->icount - 1].opc == ICMD_GOTO);
2183 exh->iinstr[exh->icount - 1] = to->iinstr[to->icount - 1];
2185 /* Copy getexception from the old prologue. */
2187 assert(to->iinstr[0].opc == ICMD_GETEXCEPTION);
2188 exh->iinstr[0] = to->iinstr[0];
2193 static exception_entry *ssa_leave_create_transition_exception_entry(
2196 basicblock *handler,
2197 classref_or_classinfo catchtype
2200 exception_entry *ee = DNEW(exception_entry);
2201 basicblock_info_t *fromi = bb_info(from);
2205 /* If the try block has subbasicblocks, the next block is the next fragment,
2206 not the successor block. */
2208 if (fromi != NULL) {
2209 ee->end = basicblock_chain_front(fromi->subbasicblocks)->next;
2211 ee->end = from->next;
2213 ee->handler = handler;
2214 ee->catchtype = catchtype;
2221 static void ssa_leave_create_exceptional_phi_moves(ssa_info *ssa) {
2224 exception_entry *ite;
2225 exception_entry_chain_t chain;
2226 classref_or_classinfo catchtype;
2231 exception_entry *ee;
2232 basicblock *last = NULL;
2234 if (! basicblock_chain_empty(ssa->new_blocks)) {
2235 last = basicblock_chain_back(ssa->new_blocks);
2238 basicblock_chain_clear(ssa->new_blocks);
2240 for (ite = ssa->jd->exceptiontable; ite; ite = ite->down) {
2241 bptr = ite->handler;
2242 catchtype = ite->catchtype;
2243 exception_entry_chain_init(&chain);
2244 for (ittry = ite->start; ittry != ite->end; ittry = ittry->next) {
2245 if (basicblock_reached(ittry)) {
2246 /* Dead code does not have a basicblock_info_t associated. */
2248 FOR_EACH_INSTRUCTION(ittry, iptr) {
2249 if (icmd_table[iptr->opc].flags & ICMDTABLE_PEI) {
2250 /* try is basicblock fragment till (including) the pei */
2251 try = ssa_leave_split_basicblock_at(ssa, ittry, iptr);
2252 /* ee is handler for try */
2253 exh = ssa_leave_create_transition_exception_handler(
2256 ee = ssa_leave_create_transition_exception_entry(
2257 ssa, try, exh, catchtype
2259 exception_entry_chain_add(&chain, ee);
2261 ssa->jd->exceptiontablelength += 1;
2266 if (! exception_entry_chain_empty(&chain)) {
2267 exception_entry_chain_back(&chain)->down = ite->down;
2268 exception_entry_chain_back(&chain)->next = ite->next;
2269 /* Replace original exception entry by first new one. */
2270 *ite = *exception_entry_chain_front(&chain);
2271 /* Set current iteration position to last newly created one. */
2272 ite = exception_entry_chain_back(&chain);
2277 for (last = ssa->jd->basicblocks; last->next != NULL; last = last->next);
2280 if (last != NULL && ! basicblock_chain_empty(ssa->new_blocks)) {
2281 last->next = basicblock_chain_front(ssa->new_blocks);
2285 void ssa_simple_leave_restore(ssa_info_t *ssa, basicblock *bptr, s4 *pvar) {
2288 basicblock_info_t *bbi;
2290 if (var < ssa->locals->count) {
2291 *pvar = vars_get_old_index(ssa->locals, var);
2292 } else if (var < ssa->locals->count + ssa->stack->count) {
2294 index = vars_get_old_index(
2296 var - ssa->locals->count
2299 bbi = bb_info(bptr);
2301 /* We have to determine whether to take an invar or an outvar for
2302 the stack depth ``index''.
2303 The state array contains the last definition of the stack element
2307 if (state_array_get_var(bbi->stack->state_array, index) == var) {
2308 /* The last definition of a stack depth inside the basicblock.
2309 This is the outvar at the given depth.
2310 If there is no outvar at the given depth, it must be an invar.
2312 if (index < bptr->outdepth) {
2313 *pvar = bptr->outvars[index];
2314 } else if (index < bptr->indepth) {
2315 *pvar = bptr->invars[index];
2320 /* A different than the last definition of a stack depth.
2321 This must be an invar.
2323 assert(index < bptr->indepth);
2324 *pvar = bptr->invars[index];
2327 *pvar = vars_get_old_index(
2329 var - ssa->locals->count - ssa->stack->count
2334 void ssa_simple_leave(ssa_info_t *ssa) {
2338 unsigned uses_count;
2340 FOR_EACH_BASICBLOCK(ssa->jd, bptr) {
2341 if (bptr->type == BBTYPE_EXH) {
2342 /* (Aritifical) exception handler blocks will be eliminated. */
2345 /* In reverse order. We need to rename the definition after any use! */
2346 FOR_EACH_INSTRUCTION_REV(bptr, iptr) {
2347 if (instruction_has_dst(iptr)) {
2348 ssa_simple_leave_restore(ssa, bptr, &(iptr->dst.varindex));
2350 instruction_get_uses(iptr, ssa->s_buf, &uses, &uses_count);
2351 for (ituse = uses; ituse != uses + uses_count; ++ituse) {
2352 ssa_simple_leave_restore(ssa, bptr, ituse);
2354 instruction_set_uses(iptr, ssa->s_buf, uses, uses_count);
2359 unfix_exception_handlers(ssa->jd);
2361 ssa->jd->maxlocals = ssa->original.maxlocals;
2362 ssa->jd->maxinterfaces = ssa->original.maxinterfaces;
2363 ssa->jd->local_map =ssa->original.local_map;
2364 ssa->jd->var = ssa->original.var;
2365 ssa->jd->vartop = ssa->original.vartop;
2366 ssa->jd->varcount = ssa->original.varcount;
2367 ssa->jd->localcount = ssa->original.localcount;
2370 #include "vm/rt-timing.h"
2372 void yssa(jitdata *jd) {
2374 basicblock_info_t *iti;
2377 struct timespec bs, es, be, ee;
2379 RT_TIMING_GET_TIME(bs);
2384 printf("=============== [ before %s ] =========================\n", jd->m->name->text);
2386 printf("=============== [ /before ] =========================\n");
2390 ssa = DNEW(ssa_info);
2392 ssa_info_init(ssa, jd);
2393 ssa->keep_in_out = true;
2395 FOR_EACH_BASICBLOCK(jd, it) {
2396 if (basicblock_reached(it)) {
2397 iti = DNEW(basicblock_info_t);
2398 basicblock_info_init(iti, it, jd);
2405 ssa_enter_mark_loops(jd->basicblocks);
2407 ssa_enter_traverse(ssa, jd->basicblocks);
2409 ssa_enter_eliminate_categories(ssa);
2411 ssa_enter_export_variables(ssa);
2413 ssa_enter_export_phis(ssa);
2415 ssa_enter_verify_no_redundant_phis(ssa);
2417 /*ssa_enter_create_phi_graph(ssa);*/
2419 RT_TIMING_GET_TIME(be);
2420 escape_analysis_perform(ssa->jd);
2421 RT_TIMING_GET_TIME(ee);
2424 ssa_leave_create_phi_moves(ssa);
2426 ssa_leave_create_exceptional_phi_moves(ssa);
2431 printf("=============== [ mid ] =========================\n");
2433 printf("=============== [ /mid ] =========================\n");
2437 ssa_simple_leave(ssa);
2441 printf("=============== [ after ] =========================\n");
2443 printf("=============== [ /after ] =========================\n");
2447 RT_TIMING_GET_TIME(es);
2449 RT_TIMING_TIME_DIFF(bs, es, RT_TIMING_1);
2450 RT_TIMING_TIME_DIFF(be, ee, RT_TIMING_2);
2453 void eliminate_subbasicblocks(jitdata *jd) {
2454 basicblock *bptr, *next;
2455 basicblock_info_t *bbi;
2457 FOR_EACH_BASICBLOCK(jd, bptr) {
2458 bbi = bb_info(bptr);
2460 if (! basicblock_chain_empty(bbi->subbasicblocks)) {
2462 /* Copy first subblock, to keep pointers intact. */
2463 *bptr = *basicblock_chain_front(bbi->subbasicblocks);
2464 bptr = basicblock_chain_back(bbi->subbasicblocks);
2471 printf("=============== [ elim ] =========================\n");
2473 printf("=============== [ /elim ] =========================\n");
2478 * These are local overrides for various environment variables in Emacs.
2479 * Please do not remove this and leave it at the end of the file, where
2480 * Emacs will automagically detect them.
2481 * ---------------------------------------------------------------------
2484 * indent-tabs-mode: t
2488 * vim:noexpandtab:sw=4:ts=4: