1 /* src/vm/jit/allocator/lsra.c - linear scan register allocator
3 Copyright (C) 2005, 2006, 2007, 2008
4 CACAOVM - Verein zur Foerderung der freien virtuellen Maschine CACAO
6 This file is part of CACAO.
8 This program is free software; you can redistribute it and/or
9 modify it under the terms of the GNU General Public License as
10 published by the Free Software Foundation; either version 2, or (at
11 your option) any later version.
13 This program is distributed in the hope that it will be useful, but
14 WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with this program; if not, write to the Free Software
20 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
38 #include "mm/memory.hpp"
40 #include "toolbox/logging.hpp"
42 #include "vm/jit/builtin.hpp"
43 #include "vm/exceptions.hpp"
44 #include "vm/resolve.hpp"
45 #include "vm/options.h"
46 #include "vm/statistics.h"
48 #include "vm/jit/abi.h"
49 #include "vm/jit/reg.h"
51 #include "vm/jit/allocator/liveness.h"
52 #include "vm/jit/allocator/lsra.h"
55 extern char **prof_m_names;
56 extern u4 **prof_bb_freq;
60 /* function prototypes */
61 void lsra_init(jitdata *);
62 void lsra_setup(jitdata *);
63 void lsra_main(jitdata *);
65 void lsra_reg_setup(jitdata *, struct lsra_register *, struct lsra_register * );
66 void lsra_calc_lifetime_length(jitdata *);
67 void _lsra_main( jitdata *, int *, int, struct lsra_register *, int *);
68 void lsra_expire_old_intervalls(jitdata *, struct lifetime *,
69 struct lsra_register *);
70 void spill_at_intervall(jitdata *, struct lifetime *);
71 void lsra_add_active(struct lifetime *, struct lifetime **, int *);
72 void _lsra_expire_old_intervalls(jitdata *, struct lifetime *,
73 struct lsra_register *, struct lifetime **,
75 void _spill_at_intervall(struct lifetime *, struct lifetime **, int *);
77 void lsra_alloc(jitdata *, int *, int, int *);
78 int lsra_getmem(struct lifetime *, struct freemem *, int *);
79 struct freemem *lsra_getnewmem(int *);
80 void lsra_setflags(int *, int);
82 #ifdef LSRA_DEBUG_VERBOSE
83 void lsra_dump_stack(stackelement_t* );
84 void print_lifetimes(jitdata *, int *, int);
88 void lsra_scan_registers_canditates(jitdata *, int);
89 void lsra_join_lifetimes(jitdata *, int);
91 void _lsra_new_stack( lsradata *, stackelement_t* , int , int, int);
92 void _lsra_from_stack(lsradata *, stackelement_t* , int , int, int);
93 void lsra_add_ss(struct lifetime *, stackelement_t* );
94 void lsra_usage_local(lsradata *, s4 , int , int , int , int );
99 bool lsra(jitdata *jd)
101 #if defined(ENABLE_STATISTICS)
106 #if defined(LSRA_DEBUG_CHECK)
109 stackelement_t* in,out;
113 #if defined(LSRA_DEBUG_CHECK)
116 while (b_index < m->basicblockcount ) {
118 if (m->basicblocks[b_index].flags >= BBREACHED) {
120 in=m->basicblocks[b_index].instack;
121 ind=m->basicblocks[b_index].indepth;
122 for (;ind != 0;in=in->prev, ind--) {
123 /* ARGVAR or LOCALVAR in instack is ok*/
125 if (in->varkind == ARGVAR) printf("ARGVAR in instack: \n");
126 if (in->varkind == LOCALVAR) printf("LOCALVAR in instack\n");
129 out=m->basicblocks[b_index].outstack;
130 outd=m->basicblocks[b_index].outdepth;
131 for (;outd != 0;out=out->prev, outd--) {
132 if (out->varkind == ARGVAR)
133 { log_text("ARGVAR in outstack\n"); assert(0); }
134 if (out->varkind == LOCALVAR)
135 { log_text("LOCALVAR in outstack\n"); assert(0); }
142 jd->ls = DNEW(lsradata);
146 #if defined(ENABLE_STATISTICS)
147 /* find conflicts between locals for statistics */
150 /* local Variable Lifetimes are at the end of the lifetime array and */
151 /* have v_index >= 0 */
152 for (locals_start = ls->lifetimecount-1; (locals_start >=0) &&
153 (ls->lifetime[ls->lt_used[locals_start]].v_index >= 0);
155 for (i=locals_start + 1; i < ls->lifetimecount; i++)
156 for (j=i+1; j < ls->lifetimecount; j++)
157 if ( !((ls->lifetime[ls->lt_used[i]].i_end
158 < ls->lifetime[ls->lt_used[j]].i_start)
159 || (ls->lifetime[ls->lt_used[j]].i_end <
160 ls->lifetime[ls->lt_used[i]].i_start)) )
161 count_locals_conflicts += 2;
167 /* everything's ok */
172 /* sort Basic Blocks using Depth First Search in reverse post order in */
174 void lsra_DFS(jitdata *jd) {
188 stack = DMNEW( int, m->basicblockcount + 1);
189 visited = (int *)DMNEW( int, m->basicblockcount + 1);
190 for (i = 0; i <= m->basicblockcount; i++) {
193 ls->sorted_rev[i]=-1;
196 stack[0] = 0; /* start with Block 0 */
198 visited[0] = ls->num_pred[0]; /* Start Block is handled right and can be */
202 while (not_finished) {
203 while (stack_top != 0) {
205 i = stack[stack_top];
208 for (succ = ls->succ[i]; succ != NULL; succ = succ->next) {
209 visited[succ->value]++;
210 if (visited[succ->value] == ls->num_pred[succ->value]) {
211 /* push the node on the stack, only if all ancestors have */
213 stack[stack_top] = succ->value;
218 not_finished = false;
219 for (i=1; i <= m->basicblockcount; i++) {
220 /* search for visited blocks, which have not reached the num_pred */
221 /* and put them on the stack -> happens with backedges */
222 if ((visited[i] != 0) && (visited[i] < ls->num_pred[i])) {
223 stack[stack_top] = i;
225 visited[i] = ls->num_pred[i];
233 void lsra_get_backedges_(lsradata *ls, int basicblockcount) {
236 struct _backedge *_backedges;
242 /* now look for backedges */
243 ls->backedge_count = 0;
244 for(i=0; i < basicblockcount; i++) {
245 if (ls->sorted[i] != -1)
246 for(s=ls->succ[ls->sorted[i]]; s != NULL; s=s->next) {
247 if (i >= ls->sorted_rev[s->value]) {
248 n=DNEW(struct _backedge);
249 n->start = max(i, ls->sorted_rev[s->value]);
250 n->end = min(i, ls->sorted_rev[s->value]);
251 n->next = _backedges;
253 ls->backedge_count++;
257 /* put _backedges in ls->backedge array */
258 ls->backedge = DMNEW(struct _backedge *, ls->backedge_count);
259 for (n=_backedges, i=0; n != NULL; n=n->next, i++) {
261 ls->backedge[i]->nesting = 1;
265 void lsra_get_nesting(jitdata *jd) {
274 for (i=0; i <= m->basicblockcount; i++)
275 if (ls->sorted[i] != -1)
276 ls->sorted_rev[ls->sorted[i]]=i;
278 lsra_get_backedges_(ls, m->basicblockcount + 1);
279 /* - sort backedge by increasing end: */
280 for (i=0; i < ls->backedge_count; i++)
281 for (j=i+1; j < ls->backedge_count; j++)
282 if ((ls->backedge[i]->end > ls->backedge[j]->end) || /* -> swap */
283 ((ls->backedge[i]->end == ls->backedge[j]->end) &&
284 (ls->backedge[i]->start > ls->backedge[j]->start) )) {
286 ls->backedge[i]=ls->backedge[j];
290 /* create ls->nesting */
291 /* look for nesting depth (overlapping backedges*/
292 for (i=0; i < ls->backedge_count - 1; i++) {
293 for (j = i + 1; (j < ls->backedge_count) &&
294 (ls->backedge[i]->start >= ls->backedge[j]->end); j++)
295 ls->backedge[j]->nesting += ls->backedge[i]->nesting;
300 while ( (i < m->basicblockcount + 1) ) {
301 if (j < ls->backedge_count) {
302 while ( i < ls->backedge[j]->end ) {
306 if ( (j+1) < ls->backedge_count)
307 end = min(ls->backedge[j]->start, ls->backedge[j+1]->end - 1);
309 end = ls->backedge[j]->start;
311 ls->nesting[i] = ls->backedge[j]->nesting;
321 #ifdef LSRA_DEBUG_VERBOSE
322 if (compileverbose) {
323 printf("sorted: \n");
324 for (i=0; i < ls->backedge_count; i++)
325 printf("Backedge: %i - %i, %i - %i\n", ls->sorted[ls->backedge[i]->start], ls->sorted[ls->backedge[i]->end], ls->backedge[i]->start, ls->backedge[i]->end);
326 printf("Nesting Level \n");
327 for (i=0; i<m->basicblockcount; i++) printf(" %3li", ls->nesting[i]);
331 for (i=0; i <= m->basicblockcount; i++) {
332 ls->sorted_rev[i] = -1;
333 ls->nesting[i] = 1+ls->nesting[i]*ls->nesting[i]*10;
337 void lsra_get_backedges(jitdata *jd) {
348 /* first remove artificial end basicblock from ls->sorted, succ and pred */
350 for (i=0; i < m->basicblockcount; i++) {
351 for (next=&(ls->succ[i]); *next != NULL; next=&((*next)->next)) {
352 if ( (*next)->value == m->basicblockcount ) {
353 /* artificial end bb found */
354 *next = (*next)->next;
355 if (*next == NULL) break;
358 for (next=&(ls->pred[i]); *next != NULL; next=&((*next)->next)) {
359 if ( (*next)->value == m->basicblockcount ) {
360 /* artificial end bb found */
361 *next = (*next)->next;
362 if (*next == NULL) break;
366 if (ls->sorted[i] == m->basicblockcount) j=i;
369 /* if an artificial end block was removed -> change ls->sorted accordingly*/
371 for (i=j+1; i <= m->basicblockcount; i++) {
372 ls->sorted[i-1] = ls->sorted[i];
373 ls->nesting[i-1] = ls->nesting[i];
376 for (i=0; i < m->basicblockcount; i++)
377 if (ls->sorted[i] != -1)
378 ls->sorted_rev[ls->sorted[i]]=i;
380 lsra_get_backedges_(ls, m->basicblockcount);
382 /* - sort backedge by increasing start */
383 for (i=0; i < ls->backedge_count; i++)
384 for (j=i+1; j < ls->backedge_count; j++)
385 if (ls->backedge[i]->start > ls->backedge[j]->start) {
388 ls->backedge[i] = ls->backedge[j];
392 #ifdef LSRA_DEBUG_VERBOSE
393 if (compileverbose) {
394 printf("sorted: \n");
395 for (i=0; i < ls->backedge_count; i++)
396 printf("Backedge: %i - %i, %i - %i\n",
397 ls->sorted[ls->backedge[i]->start],
398 ls->sorted[ls->backedge[i]->end], ls->backedge[i]->start,
399 ls->backedge[i]->end);
400 printf("Nesting Level \n");
401 for (i=0; i<m->basicblockcount; i++) printf(" %3li", ls->nesting[i]);
407 /* - merge overlapping backedges */
410 for (i=0; i < ls->backedge_count-1; i++) {
411 if (ls->backedge[i] != NULL) {
412 for (j = i + 1; (j < ls->backedge_count) && (ls->backedge[j] == NULL); j++ );
413 if (j != ls->backedge_count) {
414 if (ls->backedge[i]->start >= ls->backedge[j]->end) {
416 /* overlapping -> merge */
417 ls->backedge[j]->end = min (ls->backedge[j]->end,
418 ls->backedge[i]->end);
419 ls->backedge[i] = NULL;
425 #ifdef LSRA_DEBUG_VERBOSE
426 if (compileverbose) {
427 printf("merged: \n");
428 for (i = 0; i < ls->backedge_count; i++)
429 if (ls->backedge[i] != NULL)
430 printf("Backedge: %i - %i, %i - %i\n",
431 ls->sorted[ls->backedge[i]->start],
432 ls->sorted[ls->backedge[i]->end],
433 ls->backedge[i]->start, ls->backedge[i]->end);
436 /* - remove backedge[] == NULL from array */
438 for (j = ls->backedge_count - 1; ((j>=0) && (ls->backedge[j] == NULL));
442 if (ls->backedge[i] == NULL) { /* swap backedge[i] and backedge[j]*/
444 ls->backedge[j] = ls->backedge[i];
448 ls->backedge_count--;
451 #ifdef LSRA_DEBUG_VERBOSE
452 if (compileverbose) {
454 for (i=0; i < ls->backedge_count; i++)
455 printf("Backedge: %i - %i, %i - %i\n",
456 ls->sorted[ls->backedge[i]->start],
457 ls->sorted[ls->backedge[i]->end],ls->backedge[i]->start,
458 ls->backedge[i]->end);
463 void lsra_add_cfg(jitdata *jd, int from, int to) {
471 /* ignore Empty, Deleted,... Basic Blocks as target */
472 /* TODO: Setup BasicBlock array before to avoid this */
473 /* best together with using the basicblock list, so lsra works */
474 /* with opt_loops, too */
475 for (;(to < m->basicblockcount) && (m->basicblocks[to].flags < BBREACHED); to++);
477 for (n=ls->succ[from]; (n!= NULL) && (n->value != to); n=n->next);
478 if (n != NULL) return; /* edge from->to already existing */
480 n=DNEW(struct _list);
483 n->next=ls->succ[from];
486 n=DNEW(struct _list);
488 n->next=ls->pred[to];
493 /* add Edges from guarded Areas to Exception handlers in the CFG */
494 void lsra_add_exceptions(jitdata *jd) {
504 ex = jd->exceptiontable;
506 /* add cfg edges from all bb of a try block to the start of the according */
507 /* exception handler to ensure the right order after depthfirst search */
509 #ifdef LSRA_DEBUG_VERBOSE
511 printf("ExTable(%i): ", jd->exceptiontablelength);
514 for (; ex != NULL; ex = ex->down) {
516 #ifdef LSRA_DEBUG_VERBOSE
517 if (compileverbose) {
518 printf("[%i-%i]->%i ",ex->start->nr, ex->end->nr,
520 if (ex->handler->nr >= m->basicblockcount) {
521 log_text("Exceptionhandler Basicblocknummer invalid\n");
524 if (m->basicblocks[ex->handler->nr].flags < BBREACHED) {
525 log_text("Exceptionhandler Basicblocknummer not reachable\n");
528 if (ex->start->nr > ex->end->nr) {
529 log_text("Guarded Area starts after its end\n");
534 /* loop all valid Basic Blocks of the guarded area and add CFG edges */
535 /* to the appropriate handler */
536 for (i=ex->start->nr; (i <= ex->end->nr) &&
537 (i < m->basicblockcount); i++)
538 if (m->basicblocks[i].flags >= BBREACHED)
539 lsra_add_cfg(jd, i, ex->handler->nr);
541 #ifdef LSRA_DEBUG_VERBOSE
542 if (compileverbose) {
548 void lsra_add_jsr(jitdata *jd, int from, int to) {
551 struct _sbr *sbr, *n;
557 /* ignore Empty, Deleted,... Basic Blocks as target */
558 /* TODO: Setup BasicBlock array before to avoid this */
559 /* best together with using the basicblock list, so lsra works */
560 /* with opt_loops, too */
561 for (; (to < m->basicblockcount) && (m->basicblocks[to].flags < BBREACHED);
563 #ifdef LSRA_DEBUG_CHECK
564 if (to == m->basicblockcount)
565 { log_text("Invalid subroutine start index\n"); assert(0); }
568 lsra_add_cfg(jd, from, to);
570 /* from + 1 ist the return Basic Block Index */
571 for (from++; (from < m->basicblockcount) &&
572 (m->basicblocks[from].flags < BBREACHED); from++);
573 #ifdef LSRA_DEBUG_CHECK
574 if (from == m->basicblockcount)
575 { log_text("Invalid return basic block index for jsr\n"); assert(0); }
578 /* add subroutine info in ls->sbr.next */
580 /* search for right place to insert */
581 for (sbr = &(ls->sbr); (sbr->next != NULL) && (sbr->next->header < to); sbr=sbr->next);
583 if ((sbr->next!= NULL) && (sbr->next->header == to)) {
584 /* Entry for this sub already exist */
587 /* make new Entry and insert it in ls->sbr.next */
588 n = DNEW( struct _sbr );
598 /* now insert return adress in sbr->ret */
599 ret = DNEW( struct _list);
601 ret->next = sbr->ret;
605 void lsra_add_sub( jitdata *jd, int b_index, struct _list *ret,
617 /* break at virtual End Block */
618 if (b_index != m->basicblockcount) {
619 visited[b_index] = true;
622 if (m->basicblocks[b_index].flags < BBREACHED)
624 if (!next_block && !(m->basicblocks[b_index].icount))
628 ip = m->basicblocks[b_index].iinstr
629 + m->basicblocks[b_index].icount -1;
631 if (ip->opc == ICMD_JSR) /* nested Subroutines */
636 if (ip->opc == ICMD_RET) {
637 /* subroutine return found -> add return adresses to CFG */
638 for (l = ret; l != NULL; l = l->next)
639 lsra_add_cfg(jd, b_index, l->value);
640 } else { /* follow CFG */
641 for ( l = ls->succ[b_index]; l != NULL; l = l->next)
642 if (!visited[l->value])
643 lsra_add_sub(jd, l->value, ret, visited);
645 } else { /* fall through to next block */
646 if (!visited[b_index + 1])
647 lsra_add_sub(jd, b_index + 1, ret, visited);
652 /* Add subroutines from ls->sbr list to CFG */
653 void lsra_add_subs(jitdata *jd) {
659 #ifdef LSRA_DEBUG_VERBOSE
666 visited = (bool *)DMNEW(int, m->basicblockcount + 1);
667 for (i=0; i <= m->basicblockcount; i++) visited[i] = false;
668 for (sbr = ls->sbr.next; sbr != NULL; sbr=sbr->next) {
670 #ifdef LSRA_DEBUG_VERBOSE
671 if (compileverbose) {
672 printf("Subroutine Header: %3i Return Adresses:",sbr->header);
673 for (ret = sbr->ret; ret != NULL; ret = ret->next)
674 printf(" %3i", ret->value);
678 lsra_add_sub(jd, sbr->header, sbr->ret, visited );
682 /* Generate the Control Flow Graph */
683 /* ( pred,succ,num_pred of lsradata structure) */
685 void lsra_make_cfg(jitdata *jd) {
690 int high, low, count;
697 while (b_index < m->basicblockcount ) {
698 if ((m->basicblocks[b_index].flags >= BBREACHED) &&
699 (len = m->basicblocks[b_index].icount)) {
700 /* block is valid and contains instructions */
702 /* set ip to last instruction */
703 ip = m->basicblocks[b_index].iinstr +
704 m->basicblocks[b_index].icount -1;
705 while ((len>0) && (ip->opc == ICMD_NOP)) {
709 switch (ip->opc) { /* check type of last instruction */
717 lsra_add_cfg(jd, b_index, m->basicblockcount);
718 break; /* function returns -> end of graph */
747 case ICMD_IF_ACMPNE: /* branch -> add next block */
748 lsra_add_cfg(jd, b_index, b_index+1);
749 /* fall throu -> add branch target */
752 lsra_add_cfg(jd, b_index, m->basicblockindex[ip->op1]);
753 break; /* visit branch (goto) target */
755 case ICMD_TABLESWITCH: /* switch statement */
758 lsra_add_cfg(jd, b_index, m->basicblockindex[*s4ptr]);
765 count = (high-low+1);
767 while (--count >= 0) {
769 lsra_add_cfg(jd, b_index,
770 m->basicblockindex[*s4ptr]);
774 case ICMD_LOOKUPSWITCH: /* switch statement */
777 lsra_add_cfg(jd, b_index, m->basicblockindex[*s4ptr]);
782 while (--count >= 0) {
783 lsra_add_cfg(jd, b_index,
784 m->basicblockindex[s4ptr[1]]);
790 lsra_add_jsr(jd, b_index, m->basicblockindex[ip->op1]);
797 lsra_add_cfg(jd, b_index, b_index + 1 );
799 } /* switch (ip->opc)*/
800 } /* if ((m->basicblocks[blockIndex].icount)&& */
801 /* (m->basicblocks[b_index].flags >= BBREACHED)) */
803 } /* while (b_index < m->basicblockcount ) */
806 void lsra_init(jitdata *jd) {
814 /* Init LSRA Data Structures */
815 /* allocate lifetimes for all Basicblocks */
816 /* + 1 for an artificial exit node */
817 /* which is needed as "start" point for the reverse postorder sorting */
818 ls->pred = DMNEW(struct _list *, m->basicblockcount+1);
819 ls->succ = DMNEW(struct _list *, m->basicblockcount+1);
820 ls->sorted = DMNEW(int , m->basicblockcount+1);
821 ls->sorted_rev = DMNEW(int , m->basicblockcount+1);
822 ls->num_pred = DMNEW(int , m->basicblockcount+1);
823 ls->nesting = DMNEW(long , m->basicblockcount+1);
824 for (i=0; i<m->basicblockcount; i++) {
828 ls->sorted_rev[i]=-1;
832 ls->pred[m->basicblockcount]=NULL;
833 ls->succ[m->basicblockcount]=NULL;
834 ls->sorted[m->basicblockcount]=-1;
835 ls->sorted_rev[m->basicblockcount]=-1;
836 ls->num_pred[m->basicblockcount]=0;
843 void lsra_setup(jitdata *jd) {
848 #ifdef LSRA_DEBUG_VERBOSE
865 #if defined(ENABLE_LOOP)
866 /* Loop optimization "destroys" the basicblock array */
867 /* TODO: work with the basicblock list */
869 log_text("lsra not possible with loop optimization\n");
872 #endif /* defined(ENABLE_LOOP) */
874 /* Setup LSRA Data structures */
876 /* Generate the Control Flow Graph */
878 /* gather nesting before adding of Exceptions and Subroutines!!! */
882 lsra_get_nesting(jd);
885 #ifdef LSRA_DEBUG_VERBOSE
886 if (compileverbose) {
887 printf("Successors:\n");
888 for (i=0; i < m->basicblockcount; i++) {
890 for (nl=ls->succ[i]; nl!= NULL; nl=nl->next)
891 printf("%3i ",nl->value);
894 printf("Predecessors:\n");
895 for (i=0; i < m->basicblockcount; i++) {
897 for (nl=ls->pred[i]; nl!= NULL; nl=nl->next)
898 printf("%3i ",nl->value);
902 for (i=0; i < m->basicblockcount; i++) printf("%3i ", ls->sorted[i]);
904 printf("Sorted_rev: ");
905 for (i=0; i < m->basicblockcount; i++) printf("%3i ", ls->sorted_rev[i]);
910 /* add subroutines before exceptions! They "destroy" the CFG */
912 lsra_add_exceptions(jd);
914 /* generate reverse post order sort */
917 /* setup backedge and nested data structures*/
918 lsra_get_backedges(jd);
922 ls->lifetimecount = ls->maxlifetimes + jd->maxlocals * (TYPE_ADR+1);
923 ls->lifetime = DMNEW(struct lifetime, ls->lifetimecount);
924 ls->lt_used = DMNEW(int, ls->lifetimecount);
925 ls->lt_int = DMNEW(int, ls->lifetimecount);
926 ls->lt_int_count = 0;
927 ls->lt_flt = DMNEW(int, ls->lifetimecount);
928 ls->lt_flt_count = 0;
929 ls->lt_mem = DMNEW(int, ls->lifetimecount);
930 ls->lt_mem_count = 0;
932 for (i=0; i < ls->lifetimecount; i++) ls->lifetime[i].type = -1;
934 #ifdef LSRA_DEBUG_VERBOSE
935 if (compileverbose) {
936 printf("Successors:\n");
937 for (i=0; i < m->basicblockcount; i++) {
939 for (nl=ls->succ[i]; nl!= NULL; nl=nl->next)
940 printf("%3i ",nl->value);
943 printf("Predecessors:\n");
944 for (i=0; i < m->basicblockcount; i++) {
946 for (nl=ls->pred[i]; nl!= NULL; nl=nl->next)
947 printf("%3i ",nl->value);
951 for (i=0; i < m->basicblockcount; i++) printf("%3i ", ls->sorted[i]);
953 printf("Sorted_rev: ");
954 for (i=0; i < m->basicblockcount; i++) printf("%3i ", ls->sorted_rev[i]);
960 #ifdef LSRA_DEBUG_CHECK
961 /* compare m->basicblocks[] with the list basicblocks->next */
963 bptr = m->basicblocks;
964 while (bptr != NULL) {
965 if (i > m->basicblockcount){
966 { log_text("linked bb list does not correspond with bb array(1)\n");
969 if (bptr != &(m->basicblocks[i])){
970 { log_text("linked bb list does not correspond with bb array(2)\n");
977 if (i<m->basicblockcount){
978 { log_text("linked bb list does not correspond with bb array(3)\n");
991 methoddesc *md = m->parseddesc;
993 /* Create Stack Slot lifetimes over all basic blocks */
994 for (i=m->basicblockcount-1; i >= 0; i--) {
995 if (ls->sorted[i] != -1) {
996 lsra_scan_registers_canditates(jd, ls->sorted[i]);
997 lsra_join_lifetimes(jd, ls->sorted[i]);
1001 /* Parameter initialisiation for locals [0 .. paramcount[ */
1002 /* -> add local var write access at (bb=0,iindex=-1) */
1003 /* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! */
1004 /* this needs a special treatment, wenn lifetimes get extended */
1005 /* over backedges, since this parameter initialisation happens */
1006 /* outside of Basic Block 0 !!!! */
1007 /* this could have been avoided by marking the read access with -1,0 */
1009 for (p = 0, i = 0; p < md->paramcount; p++) {
1010 t = md->paramtypes[p].type;
1012 if (rd->locals[i][t].type >= 0)
1013 /* Param to Local init happens before normal Code */
1014 lsra_usage_local(ls, i, t, 0, -1, LSRA_STORE);
1016 /* increment local counter a second time */
1017 /* for 2 word types */
1018 if (IS_2_WORD_TYPE(t))
1024 lsra_calc_lifetime_length(jd);
1026 #ifdef LSRA_DEBUG_VERBOSE
1028 printf("Basicblockcount: %4i\n",m->basicblockcount);
1033 void lsra_reg_setup(jitdata *jd, struct lsra_register *int_reg,
1034 struct lsra_register *flt_reg ) {
1035 int i, j, iarg, farg;
1038 bool *fltarg_used, *intarg_used;
1047 int_reg->nregdesc = nregdescint;
1048 flt_reg->nregdesc = nregdescfloat;
1049 if (code_is_leafmethod(code)) {
1050 /* Temp and Argumentregister can be used as saved registers */
1052 int_reg->sav_top = INT_ARG_CNT + INT_TMP_CNT + INT_SAV_CNT;
1053 int_reg->sav_reg = DMNEW(int, int_reg->sav_top);
1054 int_reg->tmp_reg = NULL;
1055 int_reg->tmp_top = -1;
1056 flt_reg->sav_top = FLT_ARG_CNT + FLT_TMP_CNT + FLT_SAV_CNT;
1057 flt_reg->sav_reg = DMNEW(int, flt_reg->sav_top);
1058 flt_reg->tmp_reg = NULL;
1059 flt_reg->tmp_top = -1;
1061 /* additionaly precolour registers for Local Variables acting as */
1067 intarg_used = DMNEW(bool, INT_ARG_CNT);
1068 for (i=0; i < INT_ARG_CNT; i++)
1069 intarg_used[i]=false;
1071 fltarg_used = DMNEW(bool, FLT_ARG_CNT);
1072 for (i=0; i < FLT_ARG_CNT; i++)
1073 fltarg_used[i]=false;
1075 int_sav_top=int_reg->sav_top;
1076 flt_sav_top=flt_reg->sav_top;
1078 for (i=0; (i < md->paramcount); i++) {
1079 if (!md->params[i].inmemory) {
1080 if (IS_INT_LNG_TYPE(md->paramtypes[i].type)) {
1081 #if defined(SUPPORT_COMBINE_INTEGER_REGISTERS)
1082 if (IS_2_WORD_TYPE(md->paramtypes[i].type)) {
1083 int_reg->sav_reg[--int_sav_top] =
1084 rd->argintregs[GET_HIGH_REG(md->params[i].regoff)];
1085 intarg_used[GET_HIGH_REG(md->params[i].regoff)]=true;
1086 /*used -> don't copy later on */
1087 int_reg->sav_reg[--int_sav_top] =
1088 rd->argintregs[GET_LOW_REG(md->params[i].regoff)];
1089 intarg_used[GET_LOW_REG(md->params[i].regoff)]=true;
1090 /*used -> don't copy later on */
1093 { /* !IS_2_WORD_TYPE(md->paramtypes[i].type */
1094 int_reg->sav_reg[--int_sav_top] =
1095 rd->argintregs[md->params[i].regoff];
1096 intarg_used[md->params[i].regoff]=true;
1097 /*used -> don't copy later on */
1100 #if !defined(SUPPORT_PASS_FLOATARGS_IN_INTREGS)
1101 /* do not precolour float arguments if they are passed in */
1102 /* integer registers. But these integer argument registers */
1103 /* still be used in the method! */
1104 else { /* IS_FLT_DBL_TYPE(md->paramtypes[i].type */
1105 flt_reg->sav_reg[--flt_sav_top] =
1106 rd->argfltregs[md->params[i].regoff];
1107 fltarg_used[md->params[i].regoff]=true;
1114 /* copy rest of argument registers to flt_reg->sav_reg and */
1115 /* int_reg->sav_reg; */
1116 for (i=0; i < INT_ARG_CNT; i++)
1117 if (!intarg_used[i])
1118 int_reg->sav_reg[--int_sav_top]=rd->argintregs[i];
1119 for (i=0; i < FLT_ARG_CNT; i++)
1120 if (!fltarg_used[i])
1121 flt_reg->sav_reg[--flt_sav_top]=rd->argfltregs[i];
1123 /* copy temp registers to flt_reg->sav_reg and int_reg->sav_reg */
1124 for (i=0; i < INT_TMP_CNT; i++)
1125 int_reg->sav_reg[--int_sav_top]=rd->tmpintregs[i];
1126 for (i=0; i < FLT_TMP_CNT; i++)
1127 flt_reg->sav_reg[--flt_sav_top]=rd->tmpfltregs[i];
1130 /* non leaf method -> use Argument Registers [arg[int|flt]reguse */
1131 /* ... [INT|FLT]_ARG_CNT[ as temp reg */
1132 /* divide temp and saved registers */
1133 int argintreguse, argfltreguse;
1135 /* with Locals as non SAVEDVAR, the used arg[int|flt] as in params */
1136 /* of the method itself have to be regarded, or mismatch before */
1137 /* block 0 with parameter copy could happen! */
1138 argintreguse = max(rd->argintreguse, md->argintreguse);
1139 argfltreguse = max(rd->argfltreguse, md->argfltreguse);
1141 argintreguse = rd->argintreguse;
1142 argfltreguse = rd->argfltreguse;
1144 int_sav_top = int_reg->sav_top = INT_SAV_CNT;
1145 int_reg->sav_reg = DMNEW(int, int_reg->sav_top);
1146 int_reg->tmp_top = INT_TMP_CNT +
1147 max(0, (INT_ARG_CNT - argintreguse));
1148 int_reg->tmp_reg = DMNEW(int, int_reg->tmp_top);
1150 flt_sav_top =flt_reg->sav_top = FLT_SAV_CNT;
1151 flt_reg->sav_reg = DMNEW(int, flt_reg->sav_top);
1152 flt_reg->tmp_top = FLT_TMP_CNT +
1153 max(0 , (FLT_ARG_CNT - argfltreguse));
1154 flt_reg->tmp_reg = DMNEW(int, flt_reg->tmp_top);
1156 /* copy temp and unused argument registers to flt_reg->tmp_reg and */
1157 /* int_reg->tmp_reg */
1158 for (i=0; i < INT_TMP_CNT; i++)
1159 int_reg->tmp_reg[i]=rd->tmpintregs[i];
1160 for (j=argintreguse; j < INT_ARG_CNT; j++, i++)
1161 int_reg->tmp_reg[i]=rd->argintregs[j];
1162 for (i=0; i < FLT_TMP_CNT; i++)
1163 flt_reg->tmp_reg[i]=rd->tmpfltregs[i];
1164 for (j=argfltreguse; j < FLT_ARG_CNT; j++, i++)
1165 flt_reg->tmp_reg[i]=rd->argfltregs[j];
1168 /* now copy saved registers to flt_reg->sav_reg and int_reg->sav_reg */
1169 for (i = INT_SAV_CNT-1; i >= 0; i--)
1170 int_reg->sav_reg[--int_sav_top]=rd->savintregs[i];
1171 for (i = FLT_SAV_CNT-1; i >= 0; i--)
1172 flt_reg->sav_reg[--flt_sav_top]=rd->savfltregs[i];
1176 void lsra_insertion_sort( struct lsradata *ls, int *a, int lo, int hi) {
1179 for (i=lo+1; i<=hi; i++) {
1181 t=ls->lifetime[a[j]].i_start;
1183 while ((j>lo) && (ls->lifetime[a[j-1]].i_start > t)) {
1191 void lsra_qsort( struct lsradata *ls, int *a, int lo, int hi) {
1197 x = ls->lifetime[a[(lo+hi)/2]].i_start;
1200 while (ls->lifetime[a[i]].i_start < x) i++;
1201 while (ls->lifetime[a[j]].i_start > x) j--;
1203 /* exchange a[i], a[j] */
1213 if (lo < j) lsra_qsort( ls, a, lo, j);
1214 if (i < hi) lsra_qsort( ls, a, i, hi);
1216 lsra_insertion_sort(ls, a, lo, hi);
1220 void lsra_param_sort(struct lsradata *ls, int *lifetime, int lifetime_count) {
1225 /* count number of parameters ( .i_start == -1) */
1226 for (param_count=0; (param_count < lifetime_count) &&
1227 (ls->lifetime[lifetime[param_count]].i_start == -1); param_count++);
1229 if (param_count > 0) {
1230 /* now sort the parameters by v_index */
1231 for (i=0; i < param_count -1; i++)
1232 for (j=i+1; j < param_count; j++)
1233 if ( ls->lifetime[lifetime[i]].v_index >
1234 ls->lifetime[lifetime[j]].v_index) {
1237 lifetime[i]=lifetime[j];
1243 void lsra_main(jitdata *jd) {
1244 #ifdef LSRA_DEBUG_VERBOSE
1249 struct lsra_register flt_reg, int_reg;
1252 #if defined(__I386__)
1260 /* sort lifetimes by increasing start */
1261 lsra_qsort( ls, ls->lt_mem, 0, ls->lt_mem_count - 1);
1262 lsra_qsort( ls, ls->lt_int, 0, ls->lt_int_count - 1);
1263 lsra_qsort( ls, ls->lt_flt, 0, ls->lt_flt_count - 1);
1264 /* sort local vars used as parameter */
1265 lsra_param_sort( ls, ls->lt_int, ls->lt_int_count);
1266 lsra_param_sort( ls, ls->lt_flt, ls->lt_flt_count);
1267 lsra_reg_setup(jd, &int_reg, &flt_reg);
1269 #ifdef LSRA_DEBUG_VERBOSE
1270 if (compileverbose) {
1271 printf("INTSAV REG: ");
1272 for (i=0; i<int_reg.sav_top; i++)
1273 printf("%2i ",int_reg.sav_reg[i]);
1274 printf("\nINTTMP REG: ");
1275 for (i=0; i<int_reg.tmp_top; i++)
1276 printf("%2i ",int_reg.tmp_reg[i]);
1277 printf("\nFLTSAV REG: ");
1278 for (i=0; i<flt_reg.sav_top; i++)
1279 printf("%2i ",flt_reg.sav_reg[i]);
1280 printf("\nFLTTMP REG: ");
1281 for (i=0; i<flt_reg.tmp_top; i++)
1282 printf("%2i ",flt_reg.tmp_reg[i]);
1286 ls->active_tmp = DMNEW( struct lifetime *, max(INT_REG_CNT, FLT_REG_CNT));
1287 ls->active_sav = DMNEW( struct lifetime *, max(INT_REG_CNT, FLT_REG_CNT));
1289 lsra_reg_use=INT_SAV_CNT; /* init to no saved reg used... */
1290 _lsra_main(jd, ls->lt_int, ls->lt_int_count, &int_reg, &lsra_reg_use);
1291 if (lsra_reg_use > INT_SAV_CNT)
1292 lsra_reg_use=INT_SAV_CNT;
1293 rd->savintreguse = lsra_reg_use;
1295 lsra_reg_use = FLT_SAV_CNT; /* no saved reg used... */
1296 _lsra_main(jd, ls->lt_flt, ls->lt_flt_count, &flt_reg, &lsra_reg_use);
1297 if (lsra_reg_use > FLT_SAV_CNT)
1298 lsra_reg_use=FLT_SAV_CNT;
1299 rd->savfltreguse=lsra_reg_use;
1301 /* rd->memuse was already set in stack.c to allocate stack space for */
1302 /* passing arguments to called methods */
1303 #if defined(__I386__)
1304 if (checksync && code_is_synchronized(code)) {
1305 /* reserve 0(%esp) for Monitorenter/exit Argument on i386 */
1311 lsra_mem_use = rd->memuse; /* Init with memuse from stack.c */
1313 lsra_alloc(jd, ls->lt_mem, ls->lt_mem_count, &lsra_mem_use);
1314 lsra_alloc(jd, ls->lt_int, ls->lt_int_count, &lsra_mem_use);
1315 lsra_alloc(jd, ls->lt_flt, ls->lt_flt_count, &lsra_mem_use);
1317 rd->memuse=lsra_mem_use;
1319 #ifdef LSRA_DEBUG_VERBOSE
1320 if (compileverbose) {
1321 printf("Int RA complete \n");
1322 printf("Lifetimes after splitting int: \n");
1323 print_lifetimes(jd, ls->lt_int, ls->lt_int_count);
1325 printf("Flt RA complete \n");
1326 printf("Lifetimes after splitting flt:\n");
1327 print_lifetimes(jd, ls->lt_flt, ls->lt_flt_count);
1329 printf("Rest RA complete \n");
1330 printf("Lifetimes after leftt:\n");
1331 print_lifetimes(jd, ls->lt_mem, ls->lt_mem_count);
1336 void lsra_alloc(jitdata *jd, int *lifet, int lifetimecount, int *mem_use)
1339 struct lifetime *lt;
1340 struct freemem *fmem;
1341 struct stackslot *n;
1349 fmem = DNEW(struct freemem);
1353 for (lt_index = 0; lt_index < lifetimecount; lt_index ++) {
1354 lt = &(ls->lifetime[lifet[lt_index]]);
1358 if (lt->reg == -1) {
1360 regoff = lsra_getmem(lt, fmem, mem_use);
1362 flags = lt->savedvar;
1366 if (lt->v_index < 0) {
1367 for (n = lt->local_ss; n != NULL; n = n->next) {
1368 lsra_setflags(&(n->s->flags), flags);
1369 n->s->regoff = regoff;
1371 } else { /* local var */
1372 if (rd->locals[lt->v_index][lt->type].type >= 0) {
1373 rd->locals[lt->v_index][lt->type].flags = flags;
1374 rd->locals[lt->v_index][lt->type].regoff = regoff;
1376 log_text("Type Data mismatch\n");
1384 void lsra_setflags(int *flags, int newflags)
1386 if ( newflags & INMEMORY)
1389 *flags &= ~INMEMORY;
1391 if (newflags & SAVEDVAR)
1395 int lsra_getmem(struct lifetime *lt, struct freemem *fmem, int *mem_use)
1397 struct freemem *fm, *p;
1399 /* no Memory Slot allocated till now or all are still live */
1400 if ((fmem->next == NULL) || (fmem->next->end > lt->i_start)) {
1401 fm=lsra_getnewmem(mem_use);
1403 /* Memoryslot free */
1405 fmem->next = fm->next;
1408 fm->end = lt->i_end;
1409 for (p = fmem; (p->next != NULL) && (p->next->end < fm->end); p = p->next);
1415 struct freemem *lsra_getnewmem(int *mem_use)
1419 fm = DNEW(struct freemem);
1426 void _lsra_main(jitdata *jd, int *lifet, int lifetimecount,
1427 struct lsra_register *reg, int *reg_use)
1429 struct lifetime *lt;
1433 bool temp; /* reg from temp registers (true) or saved registers (false) */
1440 #if !defined(SUPPORT_COMBINE_INTEGER_REGISTERS)
1443 if ((reg->tmp_top+reg->sav_top) == 0) {
1445 /* no registers available */
1446 for (lt_index = 0; lt_index < lifetimecount; lt_index++)
1447 ls->lifetime[lifet[lt_index]].reg = -1;
1451 ls->active_tmp_top = 0;
1452 ls->active_sav_top = 0;
1454 for (lt_index = 0; lt_index < lifetimecount; lt_index++) {
1455 lt = &(ls->lifetime[lifet[lt_index]]);
1457 #if defined(SUPPORT_COMBINE_INTEGER_REGISTERS)
1458 regsneeded = (lt->type == TYPE_LNG)?1:0;
1461 lsra_expire_old_intervalls(jd, lt, reg);
1464 if (lt->savedvar || code_is_leafmethod(code)) {
1465 /* use Saved Reg (in case of leafmethod all regs are saved regs) */
1466 if (reg->sav_top > regsneeded) {
1467 #if defined(SUPPORT_COMBINE_INTEGER_REGISTERS)
1469 reg_index = PACK_REGS(reg->sav_reg[--reg->sav_top],
1470 reg->sav_reg[--reg->sav_top]);
1474 reg_index = reg->sav_reg[--reg->sav_top];
1476 } else { /* use Temp Reg or if none is free a Saved Reg */
1477 if (reg->tmp_top > regsneeded) {
1479 #if defined(SUPPORT_COMBINE_INTEGER_REGISTERS)
1481 reg_index = PACK_REGS(reg->tmp_reg[--reg->tmp_top],
1482 reg->tmp_reg[--reg->tmp_top]);
1485 reg_index = reg->tmp_reg[--reg->tmp_top];
1488 if (reg->sav_top > regsneeded) {
1490 #if defined(SUPPORT_COMBINE_INTEGER_REGISTERS)
1492 reg_index = PACK_REGS(reg->sav_reg[--reg->sav_top],
1493 reg->sav_reg[--reg->sav_top]);
1496 reg_index = reg->sav_reg[--reg->sav_top];
1499 if (reg_index == -1) /* no reg is available anymore... -> spill */
1500 spill_at_intervall(jd, lt);
1502 lt->reg = reg_index;
1504 lsra_add_active(lt, ls->active_tmp, &(ls->active_tmp_top));
1506 if (reg->sav_top<*reg_use) *reg_use=reg->sav_top;
1507 lsra_add_active(lt, ls->active_sav, &(ls->active_sav_top));
1513 void lsra_add_active(struct lifetime *lt, struct lifetime **active,
1518 for(i = 0; (i < *active_top) && (active[i]->i_end < lt->i_end); i++);
1519 for(j = *active_top; j > i; j--) active[j] = active[j-1];
1524 void lsra_expire_old_intervalls(jitdata *jd, struct lifetime *lt,
1525 struct lsra_register *reg)
1527 _lsra_expire_old_intervalls(jd, lt, reg, jd->ls->active_tmp,
1528 &(jd->ls->active_tmp_top));
1529 _lsra_expire_old_intervalls(jd, lt, reg, jd->ls->active_sav,
1530 &(jd->ls->active_sav_top));
1533 void _lsra_expire_old_intervalls(jitdata *jd, struct lifetime *lt,
1534 struct lsra_register *reg,
1535 struct lifetime **active, int *active_top)
1539 for(i = 0; i < *active_top; i++) {
1540 if (active[i]->i_end > lt->i_start) break;
1542 /* make active[i]->reg available again */
1543 if (code_is_leafmethod(code)) {
1544 /* leafmethod -> don't care about type -> put all again into */
1546 #if defined(SUPPORT_COMBINE_INTEGER_REGISTERS)
1547 if (active[i]->type == TYPE_LNG) {
1548 reg->sav_reg[reg->sav_top++] = GET_LOW_REG(active[i]->reg);
1549 reg->sav_reg[reg->sav_top++] = GET_HIGH_REG(active[i]->reg);
1552 reg->sav_reg[reg->sav_top++] = active[i]->reg;
1554 /* no leafmethod -> distinguish between temp and saved register */
1555 #if defined(SUPPORT_COMBINE_INTEGER_REGISTERS)
1556 if (active[i]->type == TYPE_LNG) {
1557 /* no temp and saved regs are packed together, so looking at */
1558 /* LOW_REG is sufficient */
1559 if ( reg->nregdesc[ GET_LOW_REG(active[i]->reg)] == REG_SAV) {
1560 reg->sav_reg[reg->sav_top++] = GET_LOW_REG(active[i]->reg);
1561 reg->sav_reg[reg->sav_top++] = GET_HIGH_REG(active[i]->reg);
1563 reg->tmp_reg[reg->tmp_top++] = GET_LOW_REG(active[i]->reg);
1564 reg->tmp_reg[reg->tmp_top++] = GET_HIGH_REG(active[i]->reg);
1568 if ( reg->nregdesc[active[i]->reg] == REG_SAV) {
1569 reg->sav_reg[reg->sav_top++] = active[i]->reg;
1571 reg->tmp_reg[reg->tmp_top++] = active[i]->reg;
1576 /* active[0..i[ is to be removed */
1577 /* -> move [i..*active_top[ to [0..*active_top-i[ */
1578 for(k = 0, j = i; (j < *active_top); k++,j++)
1579 active[k] = active[j];
1585 void spill_at_intervall(jitdata *jd, struct lifetime *lt )
1587 if (lt->savedvar || code_is_leafmethod(code)) {
1588 _spill_at_intervall(lt, jd->ls->active_sav, &(jd->ls->active_sav_top));
1590 _spill_at_intervall(lt, jd->ls->active_tmp, &(jd->ls->active_tmp_top));
1591 if (lt->reg == -1) { /* no tmp free anymore */
1592 _spill_at_intervall(lt, jd->ls->active_sav,
1593 &(jd->ls->active_sav_top));
1598 void _spill_at_intervall(struct lifetime *lt, struct lifetime **active,
1602 #ifdef USAGE_COUNT_EXACT
1606 if (*active_top == 0) {
1611 i = *active_top - 1;
1612 #if defined(USAGE_COUNT_EXACT)
1613 /* find intervall which ends later or equal than than lt and has the lowest
1614 usagecount lower than lt */
1616 u_min = lt->usagecount;
1617 for (; (i >= 0) && (active[i]->i_end >= lt->i_end); i--) {
1618 if (active[i]->usagecount < u_min) {
1619 u_min = active[i]->usagecount;
1627 # if defined(USAGE_COUNT) && !defined(USAGE_COUNT_EXACT)
1628 if ((active[i]->i_end >= lt->i_end)
1629 && (active[i]->usagecount < lt->usagecount)) {
1630 # else /* "normal" LSRA heuristic */
1631 /* get last intervall from active */
1632 if (active[i]->i_end > lt->i_end) {
1635 #if defined(SUPPORT_COMBINE_INTEGER_REGISTERS)
1636 /* Don't spill between one and two word int types */
1637 if ((active[i]->type == TYPE_LNG) != (lt->type == TYPE_LNG))
1640 lt->reg = active[i]->reg;
1644 for (j = i; j < *active_top; j++)
1645 active[j] = active[j + 1];
1647 lsra_add_active(lt, active, active_top);
1653 void lsra_calc_lifetime_length(jitdata *jd) {
1657 struct lifetime *lt;
1658 #if defined(LSRA_DEBUG_VERBOSE) || !defined(LV)
1663 int flags; /* 0 INMEMORY -> ls->lt_mem */
1664 /* 1 INTREG -> ls->lt_int */
1665 /* 2 FLTREG -> ls->lt_flt */
1672 #ifdef LSRA_DEBUG_VERBOSE
1673 if (compileverbose) {
1674 printf("icount_block: ");
1675 for (i=0; i < m->basicblockcount; i++)
1676 printf("(%3i-%3i) ",i, ls->icount_block[i]);
1681 /* extend lifetime over backedges (for the lsra version without exact
1683 now iterate through lifetimes and expand them */
1686 for(lt_index = 0 ;lt_index < ls->lifetimecount; lt_index++) {
1687 if ( ls->lifetime[lt_index].type != -1) { /* used lifetime */
1688 /* remember lt_index in lt_sorted */
1689 ls->lt_used[lifetimecount++] = lt_index;
1690 lt = &(ls->lifetime[lt_index]);
1691 #if defined(SUPPORT_COMBINE_INTEGER_REGISTERS)
1692 /* prevent conflicts between lifetimes of type long by increasing
1693 the lifetime by one instruction
1696 with i==l and/or j==k
1697 to resolve this during codegeneration a temporary register
1699 if (lt->type == TYPE_LNG)
1703 /* distribute lifetimes to lt_int, lt_flt and lt_mem */
1717 #if defined(__I386__)
1719 * for i386 put all floats in memory
1727 { log_text("Unknown Type\n"); assert(0); }
1731 case 0: /* lt_used[lt_used_index] -> lt_rest */
1732 ls->lt_mem[ ls->lt_mem_count++ ] = lt_index;
1734 case 1: /* l->lifetimes -> lt_int */
1735 ls->lt_int[ ls->lt_int_count++ ] = lt_index;
1737 case 2: /* l->lifetimes -> lt_flt */
1738 ls->lt_flt[ ls->lt_flt_count++ ] = lt_index;
1743 if (lt->i_first_def == INT_MAX) {
1744 #ifdef LSRA_DEBUG_VERBOSE
1745 printf("Warning: var not defined! vi: %i start: %i end: %i\n",
1746 lt->v_index, lt->i_start, lt->i_end);
1748 lt->bb_first_def = 0;
1749 lt->i_first_def = 0;
1752 lt->i_start = lt->i_first_def;
1754 if (lt->i_last_use == -2) {
1755 #ifdef LSRA_DEBUG_VERBOSE
1756 printf("Warning: Var not used! vi: %i start: %i end: %i\n",
1757 lt->v_index, lt->i_start, lt->i_end);
1759 lt->bb_last_use = lt->bb_first_def;
1760 lt->i_last_use = lt->i_first_def;
1763 lt->i_end = lt->i_last_use;
1765 #ifdef LSRA_DEBUG_VERBOSE
1766 if (lt->i_start > lt->i_end)
1767 printf("Warning: last use before first def! vi: %i start: %i end: %i\n", lt->v_index, lt->i_start, lt->i_end);
1771 if ((lt->bb_first_def != lt->bb_last_use) ||
1772 (lt->i_first_def == -1)) {
1773 /* Lifetime goes over more than one Basic Block -> */
1774 /* check for necessary extension over backedges */
1775 /* see lsra_get_backedges */
1776 /* Arguments are set "before" Block 0, so they have */
1777 /* a lifespan of more then one block, too */
1779 for (i=0; i < ls->backedge_count; i++) {
1780 if (!( (lt->bb_first_def > ls->backedge[i]->start) ||
1781 (lt->bb_last_use < ls->backedge[i]->end) )) {
1782 /* Live intervall intersects with a backedge */
1783 /* if (lt->bb_first_def <= ls->backedge[i]->start) */
1784 if (lt->bb_last_use <= ls->backedge[i]->start)
1786 ls->icount_block[ls->backedge[i]->start] +
1787 m->basicblocks[ls->sorted[ls->backedge[i]->start]].icount;
1791 #endif /* !defined(LV) */
1793 #ifdef USAGE_PER_INSTR
1794 lt->usagecount = lt->usagecount / ( lt->i_end - lt->i_start + 1);
1798 ls->lifetimecount = lifetimecount;
1801 #ifdef LSRA_DEBUG_VERBOSE
1802 void print_lifetimes(jitdata *jd, int *lt, int lifetimecount)
1806 int type,flags,regoff,varkind;
1814 for (lt_index = 0; lt_index < lifetimecount; lt_index++) {
1815 n = &(ls->lifetime[lt[lt_index]]);
1816 if (n->savedvar == SAVEDVAR)
1820 if (n->v_index < 0) { /* stackslot */
1821 type = n->local_ss->s->type;
1822 flags=n->local_ss->s->flags;
1823 regoff=n->local_ss->s->regoff;
1824 varkind=n->local_ss->s->varkind;
1825 } else { /* local var */
1826 if (rd->locals[n->v_index][n->type].type>=0) {
1827 type = rd->locals[n->v_index][n->type].type;
1828 flags=rd->locals[n->v_index][n->type].flags;
1829 regoff=rd->locals[n->v_index][n->type].regoff;
1832 { log_text("Type Data mismatch 3\n"); assert(0); }
1835 printf("i_Start: %3i(%3i,%3i) i_stop: %3i(%3i,%3i) reg: %3i VI: %3i type: %3i flags: %3i varkind: %3i usage: %3li ltflags: %xi \n",n->i_start, ls->sorted[n->bb_first_def], n->i_first_def,n->i_end, ls->sorted[n->bb_last_use], n->i_last_use,regoff,n->v_index,type,flags, varkind, n->usagecount, n->flags);
1837 printf("i_Start: %3i i_stop: %3i reg: %3i VI: %3i type: %3i flags: %3i varkind: %3i usage: %3li ltflags: %xi \n",n->i_start, n->i_end, regoff,n->v_index,type,flags, varkind, n->usagecount, n->flags);
1840 printf( "%3i Lifetimes printed \n",lt_index);
1846 /******************************************************************************
1847 Helpers for first LSRA Version without exact Liveness Analysis
1848 *****************************************************************************/
1851 bool lsra_join_ss( struct lsradata *ls, struct stackelement *in,
1852 struct stackelement *out, int join_flag) {
1853 struct lifetime *lt, *lto;
1854 struct stackslot *ss, *ss_last;
1857 if (in->varnum != out->varnum) {
1858 lt = &(ls->lifetime[-in->varnum - 1]);
1860 #ifdef LSRA_DEBUG_CHECK
1861 if (join_flag == JOIN_BB)
1862 if (lt->type == -1) {
1863 log_text("lsra_join_ss: lifetime for instack not found\n");
1868 if (out->varnum >= 0) { /* no lifetime for this slot till now */
1869 lsra_add_ss(lt, out);
1871 lto = &(ls->lifetime[-out->varnum - 1]);
1872 if ((join_flag == JOIN_DUP) || (join_flag == JOIN_OP))
1873 if ( (lt->flags & JOIN_BB) || (lto->flags & JOIN_BB)) {
1876 if (join_flag == JOIN_DUP)
1877 if ( (lt->flags & JOIN_OP) || (lto->flags & JOIN_OP)) {
1880 #ifdef LSRA_DEBUG_CHECK
1881 if (lto->type == -1) {
1882 log_text("lsra_join_ss: lifetime for outstack not found\n");
1886 #ifdef LSRA_DEBUG_CHECK
1887 if (lto->type != lt->type) {
1888 log_text("lsra_join_ss: in/out stack type mismatch\n");
1893 lt->flags |= JOINING;
1895 /* take Lifetime lto out of ls->lifetimes */
1898 /* merge lto into lt of in */
1900 ss_last = ss = lto->local_ss;
1901 while (ss != NULL) {
1903 ss->s->varnum = lt->v_index;
1906 if (ss_last != NULL) {
1907 ss_last->next = lt->local_ss;
1908 lt->local_ss = lto->local_ss;
1911 lt->savedvar |= lto->savedvar;
1912 lt->flags |= lto->flags | join_flag;
1913 lt->usagecount += lto->usagecount;
1915 /*join of i_first_def and i_last_use */
1916 if (lto->i_first_def < lt->i_first_def) {
1917 lt->i_first_def = lto->i_first_def;
1919 if (lto->i_last_use > lt->i_last_use) {
1920 lt->i_last_use = lto->i_last_use;
1927 /* join instack of Basic Block b_index with outstack of predecessors */
1928 void lsra_join_lifetimes(jitdata *jd,int b_index) {
1931 struct stackelement *in, *i, *out;
1937 /* do not join instack of Exception Handler */
1938 if (m->basicblocks[b_index].type == BBTYPE_EXH)
1940 in=m->basicblocks[b_index].instack;
1941 /* do not join first instack element of a subroutine header */
1942 if (m->basicblocks[b_index].type == BBTYPE_SBR)
1946 for (pred = ls->pred[b_index]; pred != NULL; pred = pred->next) {
1947 out = m->basicblocks[pred->value].outstack;
1948 for (i=in; (i != NULL); i = i->prev, out=out->prev) {
1949 lsra_join_ss(ls, i, out, JOIN_BB);
1955 struct stackslot *lsra_make_ss(stackelement_t* s, int bb_index)
1957 struct stackslot *ss;
1959 ss = DNEW(struct stackslot);
1965 void lsra_add_ss(struct lifetime *lt, stackelement_t* s) {
1966 struct stackslot *ss;
1968 /* Stackslot not in list? */
1969 if (s->varnum != lt->v_index) {
1970 ss = DNEW(struct stackslot);
1972 ss->s->varnum = lt->v_index;
1973 ss->next = lt->local_ss;
1976 lt->savedvar |= s->flags & SAVEDVAR;
1982 struct lifetime *get_ss_lifetime(lsradata *ls, stackelement_t* s) {
1985 if (s->varnum >= 0) { /* new stackslot lifetime */
1986 #ifdef LSRA_DEBUG_CHECK_VERBOSE
1987 if (-ls->v_index - 1 >= ls->maxlifetimes) {
1988 printf("%i %i\n", -ls->v_index - 1, ls->maxlifetimes);
1991 _LSRA_ASSERT(-ls->v_index - 1 < ls->maxlifetimes);
1993 n = &(ls->lifetime[-ls->v_index - 1]);
1995 n->v_index = ls->v_index--;
1998 n->bb_last_use = -1;
1999 n->bb_first_def = -1;
2000 n->i_last_use = -2; /* At -1 param init happens, so -2 is below all
2001 possible instruction indices */
2002 n->i_first_def = INT_MAX;
2007 n = &(ls->lifetime[-s->varnum - 1]);
2013 #define IS_TEMP_VAR(s) (((s)->varkind != ARGVAR) && ((s)->varkind != LOCALVAR))
2015 #define lsra_join_3_stack(ls, dst, src1, src2, join_type) \
2016 if ( IS_TEMP_VAR(dst) ) { \
2018 if ( IS_TEMP_VAR(src1) && ((src1)->type == (dst)->type)) { \
2019 join_ret = lsra_join_ss(ls, dst, src1, join_type); \
2021 if ((!join_ret) && IS_TEMP_VAR(src2) && ((src2)->type == (dst)->type)) { \
2022 lsra_join_ss(ls, dst, src2, join_type); \
2026 #define lsra_join_2_stack(ls, dst, src, join_type) \
2027 if ( IS_TEMP_VAR(dst) ) { \
2028 if ( (IS_TEMP_VAR(src)) && ((src)->type == (dst)->type)) { \
2029 lsra_join_ss(ls, dst, src, join_type); \
2033 #define lsra_join_dup(ls, s1, s2, s3) { \
2034 if (IS_TEMP_VAR(s1)) { \
2036 if (IS_TEMP_VAR(s2)) \
2037 join_ret = lsra_join_ss(ls, s1, s2, JOIN); \
2038 /* undangerous join!*/\
2039 if (IS_TEMP_VAR(s3)) { \
2040 if (join_ret) /* first join succesfull -> second of type */ \
2042 lsra_join_ss(ls, s1, s3, JOIN_DUP); \
2044 lsra_join_ss(ls, s1, s3, JOIN); /* first join did not */ \
2045 /* happen -> second undangerous */ \
2048 if (IS_TEMP_VAR(s2) && IS_TEMP_VAR(s3)) \
2049 lsra_join_ss(ls, s2, s3, JOIN_DUP); \
2052 #define lsra_new_stack(ls, s, block, instr) \
2053 if ((s)->varkind != ARGVAR) _lsra_new_stack(ls, s, block, instr, LSRA_STORE)
2054 void _lsra_new_stack(lsradata *ls, stackelement_t* s, int block, int instr, int store)
2058 if (s->varkind == LOCALVAR) {
2059 lsra_usage_local(ls, s->varnum, s->type, block, instr, LSRA_STORE);
2060 } else /* if (s->varkind != ARGVAR) */ {
2062 n=get_ss_lifetime(ls, s);
2064 if (store == LSRA_BB_IN)
2065 n->flags |= JOIN_BB;
2066 /* remember first def -> overwrite everytime */
2067 n->bb_first_def = ls->sorted_rev[block];
2068 n->i_first_def = ls->icount_block[ls->sorted_rev[block]] + instr;
2070 n->usagecount+=ls->nesting[ls->sorted_rev[block]];
2074 #define lsra_from_stack(ls, s, block, instr) \
2075 if ((s)->varkind != ARGVAR) _lsra_from_stack(ls, s, block, instr, LSRA_LOAD)
2076 #define lsra_pop_from_stack(ls, s, block, instr) \
2077 if ((s)->varkind != ARGVAR) _lsra_from_stack(ls, s, block, instr, LSRA_POP)
2078 void _lsra_from_stack(lsradata *ls, stackelement_t* s, int block, int instr, int store)
2082 if (s->varkind == LOCALVAR) {
2083 lsra_usage_local(ls, s->varnum, s->type, block, instr, LSRA_LOAD);
2084 } else /* if (s->varkind != ARGVAR) */ {
2085 if (s->varkind == STACKVAR )
2086 /* No STACKVARS possible with lsra! */
2087 s->varkind = TEMPVAR;
2089 n=get_ss_lifetime(ls, s);
2091 if (store == LSRA_BB_OUT)
2092 n->flags |= JOIN_BB;
2093 if (n->flags & JOINING)
2094 n->flags &= ~JOINING;
2095 n->usagecount+=ls->nesting[ls->sorted_rev[block]];
2097 /* remember last USE, so only write, if USE Field is undefined (==-1) */
2098 if (n->bb_last_use == -1) {
2099 n->bb_last_use = ls->sorted_rev[block];
2100 n->i_last_use = ls->icount_block[ls->sorted_rev[block]] + instr;
2105 void lsra_usage_local(lsradata *ls, s4 v_index, int type, int block, int instr,
2110 n = &(ls->lifetime[ ls->maxlifetimes + v_index * (TYPE_ADR+1) + type]);
2112 if (n->type == -1) { /* new local lifetime */
2116 n->savedvar = SAVEDVAR;
2120 n->bb_last_use = -1;
2121 n->bb_first_def = -1;
2123 n->i_first_def = INT_MAX;
2125 n->usagecount+=ls->nesting[ls->sorted_rev[block]];
2126 /* add access at (block, instr) to instruction list */
2127 /* remember last USE, so only write, if USE Field is undefined (==-1) */
2128 /* count store as use, too -> defined and not used vars would overwrite */
2130 if (n->bb_last_use == -1) {
2131 n->bb_last_use = ls->sorted_rev[block];
2132 n->i_last_use = ls->icount_block[ls->sorted_rev[block]] + instr;
2134 if (store == LSRA_STORE) {
2135 /* store == LSRA_STORE, remember first def -> overwrite everytime */
2136 n->bb_first_def = ls->sorted_rev[block];
2137 n->i_first_def = ls->icount_block[ls->sorted_rev[block]] + instr;
2141 #ifdef LSRA_DEBUG_VERBOSE
2142 void lsra_dump_stack(stackelement_t* s)
2145 printf("%p(R%3i N%3i K%3i T%3i F%3i) ",(void *)s,s->regoff, s->varnum,
2146 s->varkind, s->type, s->flags);
2154 void lsra_scan_registers_canditates(jitdata *jd, int b_index)
2156 /* methodinfo *lm; */
2157 builtintable_entry *bte;
2162 stackelement_t* src;
2163 stackelement_t* dst;
2165 bool join_ret; /* for lsra_join* Macros */
2172 /* get instruction count for BB and remember the max instruction count */
2174 iindex = m->basicblocks[b_index].icount - 1;
2176 src = m->basicblocks[b_index].instack;
2177 if (m->basicblocks[b_index].type != BBTYPE_STD) {
2178 lsra_new_stack(ls, src, b_index, 0);
2181 for (;src != NULL; src=src->prev) {
2182 /*******************************************************************************
2183 Check this - ? For every incoming Stack Slot a lifetime has to be created ?
2184 *******************************************************************************/
2185 _lsra_new_stack(ls, src, b_index, 0, LSRA_BB_IN);
2187 src = m->basicblocks[b_index].outstack;
2188 for (;src != NULL; src=src->prev) {
2189 _lsra_from_stack(ls, src, b_index, iindex, LSRA_BB_OUT);
2192 /* set iptr to last instruction of BB */
2193 iptr = m->basicblocks[b_index].iinstr + iindex;
2195 for (;iindex >= 0; iindex--, iptr--) {
2197 /* get source and destination Stack for the current instruction */
2198 /* destination stack is available as iptr->dst */
2202 /* source stack is either the destination stack of the previos */
2203 /* instruction, or the basicblock instack for the first instruction */
2205 if (iindex) /* != 0 is > 0 here, since iindex ist always >= 0 */
2208 src=m->basicblocks[b_index].instack;
2216 /* local read (return adress) */
2217 lsra_usage_local(ls, iptr->op1, TYPE_ADR, b_index, iindex,
2221 /* case ICMD_ELSE_ICONST: */
2222 case ICMD_CHECKNULL:
2226 case ICMD_PUTSTATICCONST:
2227 case ICMD_INLINE_START:
2228 case ICMD_INLINE_END:
2229 case ICMD_INLINE_GOTO:
2233 /* local = local+<const> */
2234 lsra_usage_local(ls, iptr->op1, TYPE_INT, b_index, iindex,
2236 lsra_usage_local(ls, iptr->op1, TYPE_INT, b_index, iindex,
2240 /* pop 0 push 1 const: const->stack */
2246 /* new stack slot */
2247 lsra_new_stack(ls, dst, b_index, iindex);
2250 /* pop 0 push 1 load: local->stack */
2256 if (dst->varkind != LOCALVAR) {
2257 /* local->value on stack */
2258 lsra_usage_local(ls, iptr->op1, opcode - ICMD_ILOAD, b_index,
2260 lsra_new_stack(ls, dst, b_index, iindex); /* value->stack */
2261 } else /* if (dst->varnum != iptr->op1) */ {
2262 /* local -> local */
2263 lsra_usage_local(ls, iptr->op1, opcode - ICMD_ILOAD, b_index,
2264 iindex,LSRA_LOAD); /* local->value */
2265 lsra_usage_local(ls, dst->varnum, opcode - ICMD_ILOAD, b_index,
2266 iindex, LSRA_STORE); /* local->value */
2272 /* Stack(arrayref,index)->stack */
2283 lsra_from_stack(ls, src, b_index, iindex);
2284 /* stack->arrayref */
2285 lsra_from_stack(ls, src->prev, b_index, iindex);
2286 /* arrayref[index]->stack */
2287 lsra_new_stack(ls, dst, b_index, iindex);
2291 /* stack(arrayref,index,value)->arrayref[index]=value */
2302 lsra_from_stack(ls, src,b_index, iindex); /* stack -> value */
2303 lsra_from_stack(ls, src->prev, b_index, iindex); /* stack -> index*/
2304 /* stack -> arrayref */
2305 lsra_from_stack(ls, src->prev->prev, b_index, iindex);
2308 /* pop 1 push 0 store: stack -> local */
2314 if (src->varkind != LOCALVAR) {
2315 lsra_from_stack(ls, src, b_index, iindex); /* stack -> value */
2316 lsra_usage_local(ls, iptr->op1, opcode-ICMD_ISTORE, b_index,
2317 iindex, LSRA_STORE); /* local->value */
2318 } else /* if (src->varnum != iptr->op1) */ {
2319 lsra_usage_local(ls, iptr->op1, opcode-ICMD_ISTORE, b_index,
2320 iindex, LSRA_STORE); /* local->value */
2321 lsra_usage_local(ls, src->varnum, opcode-ICMD_ISTORE, b_index,
2322 iindex, LSRA_LOAD); /* local->value */
2327 case ICMD_POP: /* throw away a stackslot */
2328 /* TODO: check if used anyway (DUP...) and change codegen to */
2329 /* ignore this stackslot */
2330 lsra_pop_from_stack(ls, src, b_index, iindex);
2338 case ICMD_ARETURN: /* stack(value) -> [empty] */
2340 case ICMD_ATHROW: /* stack(objref) -> undefined */
2342 case ICMD_PUTSTATIC: /* stack(value) -> static_field */
2343 case ICMD_PUTFIELDCONST:
2345 /* pop 1 push 0 branch */
2346 case ICMD_IFNULL: /* stack(value) -> branch? */
2347 case ICMD_IFNONNULL:
2363 /* pop 1 push 0 table branch */
2364 case ICMD_TABLESWITCH:
2365 case ICMD_LOOKUPSWITCH:
2367 case ICMD_MONITORENTER:
2368 case ICMD_MONITOREXIT:
2369 lsra_from_stack(ls, src, b_index, iindex); /* stack -> value */
2373 case ICMD_POP2: /* throw away 2 stackslots */
2374 /* TODO: check if used anyway (DUP...) and change codegen to */
2375 /* ignore this stackslot */
2376 lsra_pop_from_stack(ls, src, b_index, iindex);
2377 lsra_pop_from_stack(ls, src->prev, b_index, iindex);
2380 /* pop 2 push 0 branch */
2382 case ICMD_IF_ICMPEQ: /* stack (v1,v2) -> branch(v1,v2) */
2383 case ICMD_IF_ICMPNE:
2384 case ICMD_IF_ICMPLT:
2385 case ICMD_IF_ICMPGE:
2386 case ICMD_IF_ICMPGT:
2387 case ICMD_IF_ICMPLE:
2389 case ICMD_IF_LCMPEQ:
2390 case ICMD_IF_LCMPNE:
2391 case ICMD_IF_LCMPLT:
2392 case ICMD_IF_LCMPGE:
2393 case ICMD_IF_LCMPGT:
2394 case ICMD_IF_LCMPLE:
2396 case ICMD_IF_ACMPEQ:
2397 case ICMD_IF_ACMPNE:
2400 case ICMD_PUTFIELD: /* stack(objref,value) -> objref = value */
2402 case ICMD_IASTORECONST:
2403 case ICMD_LASTORECONST:
2404 case ICMD_AASTORECONST:
2405 case ICMD_BASTORECONST:
2406 case ICMD_CASTORECONST:
2407 case ICMD_SASTORECONST:
2408 lsra_from_stack(ls, src, b_index, iindex); /* stack -> value*/
2409 lsra_from_stack(ls, src->prev, b_index, iindex);
2412 /* pop 0 push 1 dup */
2413 case ICMD_DUP: /* src == dst->prev, src -> dst */
2414 /* lsra_from_stack(ls, src,b_index,iindex);*/
2415 lsra_new_stack(ls, dst, b_index, iindex);
2417 #ifdef JOIN_DUP_STACK
2418 /* src is identical to dst->prev */
2419 lsra_join_2_stack(ls, src, dst, JOIN_DUP);
2423 /* pop 0 push 2 dup */
2425 /* lsra_from_stack(ls, src,b_index, iindex); */
2426 /* lsra_from_stack(ls, src->prev, b_index, iindex); */
2427 lsra_new_stack(ls, dst->prev, b_index, iindex);
2428 lsra_new_stack(ls, dst, b_index, iindex);
2430 #ifdef JOIN_DUP_STACK
2431 lsra_join_2_stack(ls, src, dst, JOIN_DUP);
2432 lsra_join_2_stack(ls, src->prev, dst->prev, JOIN_DUP);
2433 /* src is identical to dst->prev->prev */
2434 /* src->prev is identical to dst->prev->prev->prev */
2438 /* pop 2 push 3 dup */
2440 lsra_from_stack(ls, src, b_index, iindex+1);
2441 lsra_from_stack(ls, src->prev, b_index, iindex+1);
2442 lsra_new_stack(ls, dst->prev->prev, b_index, iindex);
2443 lsra_new_stack(ls, dst->prev, b_index, iindex);
2444 lsra_new_stack(ls, dst, b_index, iindex);
2445 #ifdef JOIN_DUP_STACK
2446 lsra_join_dup(ls, src, dst, dst->prev->prev);
2447 lsra_join_2_stack(ls, src->prev, dst->prev, JOIN);
2451 /* pop 3 push 4 dup */
2453 lsra_from_stack(ls, src,b_index, iindex+1);
2454 lsra_from_stack(ls, src->prev, b_index, iindex+1);
2455 lsra_from_stack(ls, src->prev->prev, b_index, iindex+1);
2456 lsra_new_stack(ls, dst->prev->prev->prev, b_index, iindex);
2457 lsra_new_stack(ls, dst->prev->prev, b_index, iindex);
2458 lsra_new_stack(ls, dst->prev, b_index, iindex);
2459 lsra_new_stack(ls, dst, b_index, iindex);
2461 #ifdef JOIN_DUP_STACK
2462 lsra_join_dup(ls, src, dst, dst->prev->prev->prev);
2463 lsra_join_2_stack(ls, src->prev, dst->prev, JOIN);
2464 lsra_join_2_stack(ls, src->prev->prev, dst->prev->prev, JOIN);
2468 /* pop 3 push 5 dup */
2470 lsra_from_stack(ls, src, b_index, iindex+1);
2471 lsra_from_stack(ls, src->prev, b_index, iindex+1);
2472 lsra_from_stack(ls, src->prev->prev, b_index, iindex+1);
2473 lsra_new_stack(ls, dst->prev->prev->prev->prev, b_index, iindex);
2474 lsra_new_stack(ls, dst->prev->prev->prev, b_index, iindex);
2475 lsra_new_stack(ls, dst->prev->prev, b_index, iindex);
2476 lsra_new_stack(ls, dst->prev, b_index, iindex);
2477 lsra_new_stack(ls, dst, b_index, iindex);
2479 #ifdef JOIN_DUP_STACK
2480 lsra_join_dup(ls, src, dst, dst->prev->prev->prev);
2481 lsra_join_dup(ls, src->prev, dst->prev,
2482 dst->prev->prev->prev->prev);
2483 lsra_join_2_stack(ls, src->prev->prev, dst->prev->prev, JOIN);
2487 /* pop 4 push 6 dup */
2489 lsra_from_stack(ls, src, b_index, iindex+1);
2490 lsra_from_stack(ls, src->prev, b_index, iindex+1);
2491 lsra_from_stack(ls, src->prev->prev, b_index, iindex+1);
2492 lsra_from_stack(ls, src->prev->prev->prev, b_index, iindex+1);
2493 lsra_new_stack(ls, dst->prev->prev->prev->prev->prev, b_index,
2495 lsra_new_stack(ls, dst->prev->prev->prev->prev, b_index, iindex);
2496 lsra_new_stack(ls, dst->prev->prev->prev, b_index, iindex);
2497 lsra_new_stack(ls, dst->prev->prev, b_index, iindex);
2498 lsra_new_stack(ls, dst->prev, b_index, iindex);
2499 lsra_new_stack(ls, dst, b_index, iindex);
2501 #ifdef JOIN_DUP_STACK
2502 lsra_join_dup(ls, src, dst, dst->prev->prev->prev->prev);
2503 lsra_join_dup(ls, src->prev, dst->prev,
2504 dst->prev->prev->prev->prev->prev);
2505 lsra_join_2_stack(ls, src->prev->prev, dst->prev->prev, JOIN);
2506 lsra_join_2_stack(ls, src->prev->prev->prev, dst->prev->prev->prev,
2511 /* pop 2 push 2 swap */
2513 lsra_from_stack(ls, src, b_index, iindex+1);
2514 lsra_from_stack(ls, src->prev, b_index, iindex+1);
2515 lsra_new_stack(ls, dst->prev, b_index, iindex);
2516 lsra_new_stack(ls, dst, b_index, iindex);
2518 lsra_join_2_stack(ls, src->prev, dst, JOIN);
2519 lsra_join_2_stack(ls, src, dst->prev, JOIN);
2557 lsra_from_stack(ls, src, b_index, iindex);
2558 lsra_from_stack(ls, src->prev, b_index, iindex);
2559 lsra_new_stack(ls, dst, b_index, iindex);
2560 #ifdef JOIN_DEST_STACK
2561 lsra_join_3_stack(ls, dst, src->prev, src, JOIN_OP);
2566 lsra_from_stack(ls, src, b_index, iindex);
2567 lsra_from_stack(ls, src->prev,b_index,iindex);
2568 lsra_new_stack(ls, dst, b_index, iindex);
2569 #ifdef JOIN_DEST_STACK
2570 lsra_join_2_stack(ls, src, dst, JOIN_OP);
2588 lsra_from_stack(ls, src, b_index, iindex);
2589 lsra_from_stack(ls, src->prev, b_index, iindex);
2590 lsra_new_stack(ls, dst, b_index, iindex);
2594 case ICMD_LADDCONST:
2595 case ICMD_LSUBCONST:
2596 case ICMD_LMULCONST:
2600 case ICMD_LANDCONST:
2602 case ICMD_LXORCONST:
2603 case ICMD_LSHLCONST:
2604 case ICMD_LSHRCONST:
2605 case ICMD_LUSHRCONST:
2607 case ICMD_IADDCONST:
2608 case ICMD_ISUBCONST:
2609 case ICMD_IMULCONST:
2613 case ICMD_IANDCONST:
2615 case ICMD_IXORCONST:
2616 case ICMD_ISHLCONST:
2617 case ICMD_ISHRCONST:
2618 case ICMD_IUSHRCONST:
2620 /* case ICMD_IFEQ_ICONST: */
2621 /* case ICMD_IFNE_ICONST: */
2622 /* case ICMD_IFLT_ICONST: */
2623 /* case ICMD_IFGE_ICONST: */
2624 /* case ICMD_IFGT_ICONST: */
2625 /* case ICMD_IFLE_ICONST: */
2630 case ICMD_INT2SHORT:
2648 case ICMD_CHECKCAST:
2649 lsra_from_stack(ls, src, b_index, iindex);
2650 lsra_new_stack(ls, dst, b_index, iindex);
2651 #ifdef JOIN_DEST_STACK
2652 lsra_join_2_stack(ls, src, dst, JOIN_OP);
2656 /* TODO: check if for these ICMDs JOIN_DEST_STACK works, too! */
2657 case ICMD_ARRAYLENGTH:
2658 case ICMD_INSTANCEOF:
2661 case ICMD_ANEWARRAY:
2664 lsra_from_stack(ls, src, b_index, iindex);
2665 lsra_new_stack(ls, dst, b_index, iindex);
2669 case ICMD_GETSTATIC:
2672 lsra_new_stack(ls, dst, b_index, iindex);
2675 /* pop many push any */
2677 case ICMD_INVOKESTATIC:
2678 case ICMD_INVOKESPECIAL:
2679 case ICMD_INVOKEVIRTUAL:
2680 case ICMD_INVOKEINTERFACE:
2681 INSTRUCTION_GET_METHODDESC(iptr,md);
2684 lsra_from_stack(ls, src, b_index, iindex);
2687 if (md->returntype.type != TYPE_VOID)
2688 lsra_new_stack(ls, dst, b_index, iindex);
2697 lsra_from_stack(ls, src, b_index, iindex);
2700 if (md->returntype.type != TYPE_VOID)
2701 lsra_new_stack(ls, dst, b_index, iindex);
2704 case ICMD_MULTIANEWARRAY:
2707 lsra_from_stack(ls, src, b_index, iindex);
2710 lsra_new_stack(ls, dst, b_index, iindex);
2714 exceptions_throw_internalerror("Unknown ICMD %d during register allocation",
2720 #endif /* defined(LV) */
2724 * These are local overrides for various environment variables in Emacs.
2725 * Please do not remove this and leave it at the end of the file, where
2726 * Emacs will automagically detect them.
2727 * ---------------------------------------------------------------------
2730 * indent-tabs-mode: t
2734 * vim:noexpandtab:sw=4:ts=4: