1 /* src/vm/jit/allocator/lsra.c - linear scan register allocator
3 Copyright (C) 1996-2005, 2006 R. Grafl, A. Krall, C. Kruegel,
4 C. Oates, R. Obermaisser, M. Platter, M. Probst, S. Ring,
5 E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich,
6 J. Wenninger, Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
25 Contact: cacao@cacaojvm.org
27 Authors: Christian Ullrich
29 Changes: Christian Thalinger
32 $Id: lsra.c 5234 2006-08-14 17:50:12Z christian $
49 #include "mm/memory.h"
50 #include "toolbox/logging.h"
51 #include "vm/builtin.h"
52 #include "vm/exceptions.h"
53 #include "vm/resolve.h"
54 #include "vm/options.h"
55 #include "vm/statistics.h"
56 #include "vm/stringlocal.h"
57 #include "vm/jit/abi.h"
58 #include "vm/jit/reg.h"
59 #include "vm/jit/allocator/liveness.h"
60 #include "vm/jit/allocator/lsra.h"
63 extern char **prof_m_names;
64 extern u4 **prof_bb_freq;
68 /* function prototypes */
69 void lsra_init(jitdata *);
70 void lsra_setup(jitdata *);
71 void lsra_main(jitdata *);
73 void lsra_reg_setup(jitdata *, struct lsra_register *, struct lsra_register * );
74 void lsra_calc_lifetime_length(jitdata *);
75 void _lsra_main( jitdata *, int *, int, struct lsra_register *, int *);
76 void lsra_expire_old_intervalls(jitdata *, struct lifetime *,
77 struct lsra_register *);
78 void spill_at_intervall(jitdata *, struct lifetime *);
79 void lsra_add_active(struct lifetime *, struct lifetime **, int *);
80 void _lsra_expire_old_intervalls(jitdata *, struct lifetime *,
81 struct lsra_register *, struct lifetime **,
83 void _spill_at_intervall(struct lifetime *, struct lifetime **, int *);
85 void lsra_alloc(jitdata *, int *, int, int *);
86 int lsra_getmem(struct lifetime *, struct freemem *, int *);
87 struct freemem *lsra_getnewmem(int *);
88 void lsra_setflags(int *, int);
90 #ifdef LSRA_DEBUG_VERBOSE
91 void lsra_dump_stack(stackptr );
92 void print_lifetimes(jitdata *, int *, int);
96 void lsra_scan_registers_canditates(jitdata *, int);
97 void lsra_join_lifetimes(jitdata *, int);
99 void _lsra_new_stack( lsradata *, stackptr , int , int, int);
100 void _lsra_from_stack(lsradata *, stackptr , int , int, int);
101 void lsra_add_ss(struct lifetime *, stackptr );
102 void lsra_usage_local(lsradata *, s4 , int , int , int , int );
107 bool lsra(jitdata *jd)
109 #if defined(ENABLE_STATISTICS)
114 #if defined(LSRA_DEBUG_CHECK)
121 #if defined(LSRA_DEBUG_CHECK)
124 while (b_index < m->basicblockcount ) {
126 if (m->basicblocks[b_index].flags >= BBREACHED) {
128 in=m->basicblocks[b_index].instack;
129 ind=m->basicblocks[b_index].indepth;
130 for (;ind != 0;in=in->prev, ind--) {
131 /* ARGVAR or LOCALVAR in instack is ok*/
133 if (in->varkind == ARGVAR) printf("ARGVAR in instack: \n");
134 if (in->varkind == LOCALVAR) printf("LOCALVAR in instack\n");
137 out=m->basicblocks[b_index].outstack;
138 outd=m->basicblocks[b_index].outdepth;
139 for (;outd != 0;out=out->prev, outd--) {
140 if (out->varkind == ARGVAR)
141 { log_text("ARGVAR in outstack\n"); assert(0); }
142 if (out->varkind == LOCALVAR)
143 { log_text("LOCALVAR in outstack\n"); assert(0); }
150 jd->ls = DNEW(lsradata);
154 #if defined(ENABLE_STATISTICS)
155 /* find conflicts between locals for statistics */
158 /* local Variable Lifetimes are at the end of the lifetime array and */
159 /* have v_index >= 0 */
160 for (locals_start = ls->lifetimecount-1; (locals_start >=0) &&
161 (ls->lifetime[ls->lt_used[locals_start]].v_index >= 0);
163 for (i=locals_start + 1; i < ls->lifetimecount; i++)
164 for (j=i+1; j < ls->lifetimecount; j++)
165 if ( !((ls->lifetime[ls->lt_used[i]].i_end
166 < ls->lifetime[ls->lt_used[j]].i_start)
167 || (ls->lifetime[ls->lt_used[j]].i_end <
168 ls->lifetime[ls->lt_used[i]].i_start)) )
169 count_locals_conflicts += 2;
175 /* everything's ok */
180 /* sort Basic Blocks using Depth First Search in reverse post order in */
182 void lsra_DFS(jitdata *jd) {
196 stack = DMNEW( int, m->basicblockcount + 1);
197 visited = (int *)DMNEW( int, m->basicblockcount + 1);
198 for (i = 0; i <= m->basicblockcount; i++) {
201 ls->sorted_rev[i]=-1;
204 stack[0] = 0; /* start with Block 0 */
206 visited[0] = ls->num_pred[0]; /* Start Block is handled right and can be */
210 while (not_finished) {
211 while (stack_top != 0) {
213 i = stack[stack_top];
216 for (succ = ls->succ[i]; succ != NULL; succ = succ->next) {
217 visited[succ->value]++;
218 if (visited[succ->value] == ls->num_pred[succ->value]) {
219 /* push the node on the stack, only if all ancestors have */
221 stack[stack_top] = succ->value;
226 not_finished = false;
227 for (i=1; i <= m->basicblockcount; i++) {
228 /* search for visited blocks, which have not reached the num_pred */
229 /* and put them on the stack -> happens with backedges */
230 if ((visited[i] != 0) && (visited[i] < ls->num_pred[i])) {
231 stack[stack_top] = i;
233 visited[i] = ls->num_pred[i];
241 void lsra_get_backedges_(lsradata *ls, int basicblockcount) {
244 struct _backedge *_backedges;
250 /* now look for backedges */
251 ls->backedge_count = 0;
252 for(i=0; i < basicblockcount; i++) {
253 if (ls->sorted[i] != -1)
254 for(s=ls->succ[ls->sorted[i]]; s != NULL; s=s->next) {
255 if (i >= ls->sorted_rev[s->value]) {
256 n=DNEW(struct _backedge);
257 n->start = max(i, ls->sorted_rev[s->value]);
258 n->end = min(i, ls->sorted_rev[s->value]);
259 n->next = _backedges;
261 ls->backedge_count++;
265 /* put _backedges in ls->backedge array */
266 ls->backedge = DMNEW(struct _backedge *, ls->backedge_count);
267 for (n=_backedges, i=0; n != NULL; n=n->next, i++) {
269 ls->backedge[i]->nesting = 1;
273 void lsra_get_nesting(jitdata *jd) {
282 for (i=0; i <= m->basicblockcount; i++)
283 if (ls->sorted[i] != -1)
284 ls->sorted_rev[ls->sorted[i]]=i;
286 lsra_get_backedges_(ls, m->basicblockcount + 1);
287 /* - sort backedge by increasing end: */
288 for (i=0; i < ls->backedge_count; i++)
289 for (j=i+1; j < ls->backedge_count; j++)
290 if ((ls->backedge[i]->end > ls->backedge[j]->end) || /* -> swap */
291 ((ls->backedge[i]->end == ls->backedge[j]->end) &&
292 (ls->backedge[i]->start > ls->backedge[j]->start) )) {
294 ls->backedge[i]=ls->backedge[j];
298 /* create ls->nesting */
299 /* look for nesting depth (overlapping backedges*/
300 for (i=0; i < ls->backedge_count - 1; i++) {
301 for (j = i + 1; (j < ls->backedge_count) &&
302 (ls->backedge[i]->start >= ls->backedge[j]->end); j++)
303 ls->backedge[j]->nesting += ls->backedge[i]->nesting;
308 while ( (i < m->basicblockcount + 1) ) {
309 if (j < ls->backedge_count) {
310 while ( i < ls->backedge[j]->end ) {
314 if ( (j+1) < ls->backedge_count)
315 end = min(ls->backedge[j]->start, ls->backedge[j+1]->end - 1);
317 end = ls->backedge[j]->start;
319 ls->nesting[i] = ls->backedge[j]->nesting;
329 #ifdef LSRA_DEBUG_VERBOSE
330 if (compileverbose) {
331 printf("sorted: \n");
332 for (i=0; i < ls->backedge_count; i++)
333 printf("Backedge: %i - %i, %i - %i\n", ls->sorted[ls->backedge[i]->start], ls->sorted[ls->backedge[i]->end], ls->backedge[i]->start, ls->backedge[i]->end);
334 printf("Nesting Level \n");
335 for (i=0; i<m->basicblockcount; i++) printf(" %3li", ls->nesting[i]);
339 for (i=0; i <= m->basicblockcount; i++) {
340 ls->sorted_rev[i] = -1;
341 ls->nesting[i] = 1+ls->nesting[i]*ls->nesting[i]*10;
345 void lsra_get_backedges(jitdata *jd) {
356 /* first remove artificial end basicblock from ls->sorted, succ and pred */
358 for (i=0; i < m->basicblockcount; i++) {
359 for (next=&(ls->succ[i]); *next != NULL; next=&((*next)->next)) {
360 if ( (*next)->value == m->basicblockcount ) {
361 /* artificial end bb found */
362 *next = (*next)->next;
363 if (*next == NULL) break;
366 for (next=&(ls->pred[i]); *next != NULL; next=&((*next)->next)) {
367 if ( (*next)->value == m->basicblockcount ) {
368 /* artificial end bb found */
369 *next = (*next)->next;
370 if (*next == NULL) break;
374 if (ls->sorted[i] == m->basicblockcount) j=i;
377 /* if an artificial end block was removed -> change ls->sorted accordingly*/
379 for (i=j+1; i <= m->basicblockcount; i++) {
380 ls->sorted[i-1] = ls->sorted[i];
381 ls->nesting[i-1] = ls->nesting[i];
384 for (i=0; i < m->basicblockcount; i++)
385 if (ls->sorted[i] != -1)
386 ls->sorted_rev[ls->sorted[i]]=i;
388 lsra_get_backedges_(ls, m->basicblockcount);
390 /* - sort backedge by increasing start */
391 for (i=0; i < ls->backedge_count; i++)
392 for (j=i+1; j < ls->backedge_count; j++)
393 if (ls->backedge[i]->start > ls->backedge[j]->start) {
396 ls->backedge[i] = ls->backedge[j];
400 #ifdef LSRA_DEBUG_VERBOSE
401 if (compileverbose) {
402 printf("sorted: \n");
403 for (i=0; i < ls->backedge_count; i++)
404 printf("Backedge: %i - %i, %i - %i\n",
405 ls->sorted[ls->backedge[i]->start],
406 ls->sorted[ls->backedge[i]->end], ls->backedge[i]->start,
407 ls->backedge[i]->end);
408 printf("Nesting Level \n");
409 for (i=0; i<m->basicblockcount; i++) printf(" %3li", ls->nesting[i]);
415 /* - merge overlapping backedges */
418 for (i=0; i < ls->backedge_count-1; i++) {
419 if (ls->backedge[i] != NULL) {
420 for (j = i + 1; (j < ls->backedge_count) && (ls->backedge[j] == NULL); j++ );
421 if (j != ls->backedge_count) {
422 if (ls->backedge[i]->start >= ls->backedge[j]->end) {
424 /* overlapping -> merge */
425 ls->backedge[j]->end = min (ls->backedge[j]->end,
426 ls->backedge[i]->end);
427 ls->backedge[i] = NULL;
433 #ifdef LSRA_DEBUG_VERBOSE
434 if (compileverbose) {
435 printf("merged: \n");
436 for (i = 0; i < ls->backedge_count; i++)
437 if (ls->backedge[i] != NULL)
438 printf("Backedge: %i - %i, %i - %i\n",
439 ls->sorted[ls->backedge[i]->start],
440 ls->sorted[ls->backedge[i]->end],
441 ls->backedge[i]->start, ls->backedge[i]->end);
444 /* - remove backedge[] == NULL from array */
446 for (j = ls->backedge_count - 1; ((j>=0) && (ls->backedge[j] == NULL));
450 if (ls->backedge[i] == NULL) { /* swap backedge[i] and backedge[j]*/
452 ls->backedge[j] = ls->backedge[i];
456 ls->backedge_count--;
459 #ifdef LSRA_DEBUG_VERBOSE
460 if (compileverbose) {
462 for (i=0; i < ls->backedge_count; i++)
463 printf("Backedge: %i - %i, %i - %i\n",
464 ls->sorted[ls->backedge[i]->start],
465 ls->sorted[ls->backedge[i]->end],ls->backedge[i]->start,
466 ls->backedge[i]->end);
471 void lsra_add_cfg(jitdata *jd, int from, int to) {
479 /* ignore Empty, Deleted,... Basic Blocks as target */
480 /* TODO: Setup BasicBlock array before to avoid this */
481 /* best together with using the basicblock list, so lsra works */
482 /* with opt_loops, too */
483 for (;(to < m->basicblockcount) && (m->basicblocks[to].flags < BBREACHED); to++);
485 for (n=ls->succ[from]; (n!= NULL) && (n->value != to); n=n->next);
486 if (n != NULL) return; /* edge from->to already existing */
488 n=DNEW(struct _list);
491 n->next=ls->succ[from];
494 n=DNEW(struct _list);
496 n->next=ls->pred[to];
501 /* add Edges from guarded Areas to Exception handlers in the CFG */
502 void lsra_add_exceptions(jitdata *jd) {
512 ex = jd->cd->exceptiontable;
514 /* add cfg edges from all bb of a try block to the start of the according */
515 /* exception handler to ensure the right order after depthfirst search */
517 #ifdef LSRA_DEBUG_VERBOSE
519 printf("ExTable(%i): ", jd->cd->exceptiontablelength);
522 for (; ex != NULL; ex = ex->down) {
524 #ifdef LSRA_DEBUG_VERBOSE
525 if (compileverbose) {
526 printf("[%i-%i]->%i ",ex->start->nr, ex->end->nr,
528 if (ex->handler->nr >= m->basicblockcount) {
529 log_text("Exceptionhandler Basicblocknummer invalid\n");
532 if (m->basicblocks[ex->handler->nr].flags < BBREACHED) {
533 log_text("Exceptionhandler Basicblocknummer not reachable\n");
536 if (ex->start->nr > ex->end->nr) {
537 log_text("Guarded Area starts after its end\n");
542 /* loop all valid Basic Blocks of the guarded area and add CFG edges */
543 /* to the appropriate handler */
544 for (i=ex->start->nr; (i <= ex->end->nr) &&
545 (i < m->basicblockcount); i++)
546 if (m->basicblocks[i].flags >= BBREACHED)
547 lsra_add_cfg(jd, i, ex->handler->nr);
549 #ifdef LSRA_DEBUG_VERBOSE
550 if (compileverbose) {
556 void lsra_add_jsr(jitdata *jd, int from, int to) {
559 struct _sbr *sbr, *n;
565 /* ignore Empty, Deleted,... Basic Blocks as target */
566 /* TODO: Setup BasicBlock array before to avoid this */
567 /* best together with using the basicblock list, so lsra works */
568 /* with opt_loops, too */
569 for (; (to < m->basicblockcount) && (m->basicblocks[to].flags < BBREACHED);
571 #ifdef LSRA_DEBUG_CHECK
572 if (to == m->basicblockcount)
573 { log_text("Invalid subroutine start index\n"); assert(0); }
576 lsra_add_cfg(jd, from, to);
578 /* from + 1 ist the return Basic Block Index */
579 for (from++; (from < m->basicblockcount) &&
580 (m->basicblocks[from].flags < BBREACHED); from++);
581 #ifdef LSRA_DEBUG_CHECK
582 if (from == m->basicblockcount)
583 { log_text("Invalid return basic block index for jsr\n"); assert(0); }
586 /* add subroutine info in ls->sbr.next */
588 /* search for right place to insert */
589 for (sbr = &(ls->sbr); (sbr->next != NULL) && (sbr->next->header < to); sbr=sbr->next);
591 if ((sbr->next!= NULL) && (sbr->next->header == to)) {
592 /* Entry for this sub already exist */
595 /* make new Entry and insert it in ls->sbr.next */
596 n = DNEW( struct _sbr );
606 /* now insert return adress in sbr->ret */
607 ret = DNEW( struct _list);
609 ret->next = sbr->ret;
613 void lsra_add_sub( jitdata *jd, int b_index, struct _list *ret,
625 /* break at virtual End Block */
626 if (b_index != m->basicblockcount) {
627 visited[b_index] = true;
630 if (m->basicblocks[b_index].flags < BBREACHED)
632 if (!next_block && !(m->basicblocks[b_index].icount))
636 ip = m->basicblocks[b_index].iinstr
637 + m->basicblocks[b_index].icount -1;
639 if (ip->opc == ICMD_JSR) /* nested Subroutines */
644 if (ip->opc == ICMD_RET) {
645 /* subroutine return found -> add return adresses to CFG */
646 for (l = ret; l != NULL; l = l->next)
647 lsra_add_cfg(jd, b_index, l->value);
648 } else { /* follow CFG */
649 for ( l = ls->succ[b_index]; l != NULL; l = l->next)
650 if (!visited[l->value])
651 lsra_add_sub(jd, l->value, ret, visited);
653 } else { /* fall through to next block */
654 if (!visited[b_index + 1])
655 lsra_add_sub(jd, b_index + 1, ret, visited);
660 /* Add subroutines from ls->sbr list to CFG */
661 void lsra_add_subs(jitdata *jd) {
667 #ifdef LSRA_DEBUG_VERBOSE
674 visited = (bool *)DMNEW(int, m->basicblockcount + 1);
675 for (i=0; i <= m->basicblockcount; i++) visited[i] = false;
676 for (sbr = ls->sbr.next; sbr != NULL; sbr=sbr->next) {
678 #ifdef LSRA_DEBUG_VERBOSE
679 if (compileverbose) {
680 printf("Subroutine Header: %3i Return Adresses:",sbr->header);
681 for (ret = sbr->ret; ret != NULL; ret = ret->next)
682 printf(" %3i", ret->value);
686 lsra_add_sub(jd, sbr->header, sbr->ret, visited );
690 /* Generate the Control Flow Graph */
691 /* ( pred,succ,num_pred of lsradata structure) */
693 void lsra_make_cfg(jitdata *jd) {
698 int high, low, count;
705 while (b_index < m->basicblockcount ) {
706 if ((m->basicblocks[b_index].flags >= BBREACHED) &&
707 (len = m->basicblocks[b_index].icount)) {
708 /* block is valid and contains instructions */
710 /* set ip to last instruction */
711 ip = m->basicblocks[b_index].iinstr +
712 m->basicblocks[b_index].icount -1;
713 while ((len>0) && (ip->opc == ICMD_NOP)) {
717 switch (ip->opc) { /* check type of last instruction */
725 lsra_add_cfg(jd, b_index, m->basicblockcount);
726 break; /* function returns -> end of graph */
755 case ICMD_IF_ACMPNE: /* branch -> add next block */
756 lsra_add_cfg(jd, b_index, b_index+1);
757 /* fall throu -> add branch target */
760 lsra_add_cfg(jd, b_index, m->basicblockindex[ip->op1]);
761 break; /* visit branch (goto) target */
763 case ICMD_TABLESWITCH: /* switch statement */
766 lsra_add_cfg(jd, b_index, m->basicblockindex[*s4ptr]);
773 count = (high-low+1);
775 while (--count >= 0) {
777 lsra_add_cfg(jd, b_index,
778 m->basicblockindex[*s4ptr]);
782 case ICMD_LOOKUPSWITCH: /* switch statement */
785 lsra_add_cfg(jd, b_index, m->basicblockindex[*s4ptr]);
790 while (--count >= 0) {
791 lsra_add_cfg(jd, b_index,
792 m->basicblockindex[s4ptr[1]]);
798 lsra_add_jsr(jd, b_index, m->basicblockindex[ip->op1]);
805 lsra_add_cfg(jd, b_index, b_index + 1 );
807 } /* switch (ip->opc)*/
808 } /* if ((m->basicblocks[blockIndex].icount)&& */
809 /* (m->basicblocks[b_index].flags >= BBREACHED)) */
811 } /* while (b_index < m->basicblockcount ) */
814 void lsra_init(jitdata *jd) {
822 /* Init LSRA Data Structures */
823 /* allocate lifetimes for all Basicblocks */
824 /* + 1 for an artificial exit node */
825 /* which is needed as "start" point for the reverse postorder sorting */
826 ls->pred = DMNEW(struct _list *, m->basicblockcount+1);
827 ls->succ = DMNEW(struct _list *, m->basicblockcount+1);
828 ls->sorted = DMNEW(int , m->basicblockcount+1);
829 ls->sorted_rev = DMNEW(int , m->basicblockcount+1);
830 ls->num_pred = DMNEW(int , m->basicblockcount+1);
831 ls->nesting = DMNEW(long , m->basicblockcount+1);
832 for (i=0; i<m->basicblockcount; i++) {
836 ls->sorted_rev[i]=-1;
840 ls->pred[m->basicblockcount]=NULL;
841 ls->succ[m->basicblockcount]=NULL;
842 ls->sorted[m->basicblockcount]=-1;
843 ls->sorted_rev[m->basicblockcount]=-1;
844 ls->num_pred[m->basicblockcount]=0;
851 void lsra_setup(jitdata *jd) {
856 #ifdef LSRA_DEBUG_VERBOSE
873 #if defined(ENABLE_LOOP)
874 /* Loop optimization "destroys" the basicblock array */
875 /* TODO: work with the basicblock list */
877 log_text("lsra not possible with loop optimization\n");
880 #endif /* defined(ENABLE_LOOP) */
882 /* Setup LSRA Data structures */
884 /* Generate the Control Flow Graph */
886 /* gather nesting before adding of Exceptions and Subroutines!!! */
890 lsra_get_nesting(jd);
893 #ifdef LSRA_DEBUG_VERBOSE
894 if (compileverbose) {
895 printf("Successors:\n");
896 for (i=0; i < m->basicblockcount; i++) {
898 for (nl=ls->succ[i]; nl!= NULL; nl=nl->next)
899 printf("%3i ",nl->value);
902 printf("Predecessors:\n");
903 for (i=0; i < m->basicblockcount; i++) {
905 for (nl=ls->pred[i]; nl!= NULL; nl=nl->next)
906 printf("%3i ",nl->value);
910 for (i=0; i < m->basicblockcount; i++) printf("%3i ", ls->sorted[i]);
912 printf("Sorted_rev: ");
913 for (i=0; i < m->basicblockcount; i++) printf("%3i ", ls->sorted_rev[i]);
918 /* add subroutines before exceptions! They "destroy" the CFG */
920 lsra_add_exceptions(jd);
922 /* generate reverse post order sort */
925 /* setup backedge and nested data structures*/
926 lsra_get_backedges(jd);
930 ls->lifetimecount = ls->maxlifetimes + cd->maxlocals * (TYPE_ADR+1);
931 ls->lifetime = DMNEW(struct lifetime, ls->lifetimecount);
932 ls->lt_used = DMNEW(int, ls->lifetimecount);
933 ls->lt_int = DMNEW(int, ls->lifetimecount);
934 ls->lt_int_count = 0;
935 ls->lt_flt = DMNEW(int, ls->lifetimecount);
936 ls->lt_flt_count = 0;
937 ls->lt_mem = DMNEW(int, ls->lifetimecount);
938 ls->lt_mem_count = 0;
940 for (i=0; i < ls->lifetimecount; i++) ls->lifetime[i].type = -1;
942 #ifdef LSRA_DEBUG_VERBOSE
943 if (compileverbose) {
944 printf("Successors:\n");
945 for (i=0; i < m->basicblockcount; i++) {
947 for (nl=ls->succ[i]; nl!= NULL; nl=nl->next)
948 printf("%3i ",nl->value);
951 printf("Predecessors:\n");
952 for (i=0; i < m->basicblockcount; i++) {
954 for (nl=ls->pred[i]; nl!= NULL; nl=nl->next)
955 printf("%3i ",nl->value);
959 for (i=0; i < m->basicblockcount; i++) printf("%3i ", ls->sorted[i]);
961 printf("Sorted_rev: ");
962 for (i=0; i < m->basicblockcount; i++) printf("%3i ", ls->sorted_rev[i]);
968 #ifdef LSRA_DEBUG_CHECK
969 /* compare m->basicblocks[] with the list basicblocks->next */
971 bptr = m->basicblocks;
972 while (bptr != NULL) {
973 if (i > m->basicblockcount){
974 { log_text("linked bb list does not correspond with bb array(1)\n");
977 if (bptr != &(m->basicblocks[i])){
978 { log_text("linked bb list does not correspond with bb array(2)\n");
985 if (i<m->basicblockcount){
986 { log_text("linked bb list does not correspond with bb array(3)\n");
999 methoddesc *md = m->parseddesc;
1001 /* Create Stack Slot lifetimes over all basic blocks */
1002 for (i=m->basicblockcount-1; i >= 0; i--) {
1003 if (ls->sorted[i] != -1) {
1004 lsra_scan_registers_canditates(jd, ls->sorted[i]);
1005 lsra_join_lifetimes(jd, ls->sorted[i]);
1009 /* Parameter initialisiation for locals [0 .. paramcount[ */
1010 /* -> add local var write access at (bb=0,iindex=-1) */
1011 /* !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! */
1012 /* this needs a special treatment, wenn lifetimes get extended */
1013 /* over backedges, since this parameter initialisation happens */
1014 /* outside of Basic Block 0 !!!! */
1015 /* this could have been avoided by marking the read access with -1,0 */
1017 for (p = 0, i = 0; p < md->paramcount; p++) {
1018 t = md->paramtypes[p].type;
1020 if (rd->locals[i][t].type >= 0)
1021 /* Param to Local init happens before normal Code */
1022 lsra_usage_local(ls, i, t, 0, -1, LSRA_STORE);
1024 /* increment local counter a second time */
1025 /* for 2 word types */
1026 if (IS_2_WORD_TYPE(t))
1032 lsra_calc_lifetime_length(jd);
1034 #ifdef LSRA_DEBUG_VERBOSE
1036 printf("Basicblockcount: %4i\n",m->basicblockcount);
1041 void lsra_reg_setup(jitdata *jd, struct lsra_register *int_reg,
1042 struct lsra_register *flt_reg ) {
1043 int i, j, iarg, farg;
1046 bool *fltarg_used, *intarg_used;
1055 int_reg->nregdesc = nregdescint;
1056 flt_reg->nregdesc = nregdescfloat;
1057 if (jd->isleafmethod) {
1058 /* Temp and Argumentregister can be used as saved registers */
1060 int_reg->sav_top = INT_ARG_CNT + INT_TMP_CNT + INT_SAV_CNT;
1061 int_reg->sav_reg = DMNEW(int, int_reg->sav_top);
1062 int_reg->tmp_reg = NULL;
1063 int_reg->tmp_top = -1;
1064 flt_reg->sav_top = FLT_ARG_CNT + FLT_TMP_CNT + FLT_SAV_CNT;
1065 flt_reg->sav_reg = DMNEW(int, flt_reg->sav_top);
1066 flt_reg->tmp_reg = NULL;
1067 flt_reg->tmp_top = -1;
1069 /* additionaly precolour registers for Local Variables acting as */
1075 intarg_used = DMNEW(bool, INT_ARG_CNT);
1076 for (i=0; i < INT_ARG_CNT; i++)
1077 intarg_used[i]=false;
1079 fltarg_used = DMNEW(bool, FLT_ARG_CNT);
1080 for (i=0; i < FLT_ARG_CNT; i++)
1081 fltarg_used[i]=false;
1083 int_sav_top=int_reg->sav_top;
1084 flt_sav_top=flt_reg->sav_top;
1086 for (i=0; (i < md->paramcount); i++) {
1087 if (!md->params[i].inmemory) {
1088 if (IS_INT_LNG_TYPE(md->paramtypes[i].type)) {
1089 #if defined(SUPPORT_COMBINE_INTEGER_REGISTERS)
1090 if (IS_2_WORD_TYPE(md->paramtypes[i].type)) {
1091 int_reg->sav_reg[--int_sav_top] =
1092 rd->argintregs[GET_HIGH_REG(md->params[i].regoff)];
1093 intarg_used[GET_HIGH_REG(md->params[i].regoff)]=true;
1094 /*used -> don't copy later on */
1095 int_reg->sav_reg[--int_sav_top] =
1096 rd->argintregs[GET_LOW_REG(md->params[i].regoff)];
1097 intarg_used[GET_LOW_REG(md->params[i].regoff)]=true;
1098 /*used -> don't copy later on */
1101 { /* !IS_2_WORD_TYPE(md->paramtypes[i].type */
1102 int_reg->sav_reg[--int_sav_top] =
1103 rd->argintregs[md->params[i].regoff];
1104 intarg_used[md->params[i].regoff]=true;
1105 /*used -> don't copy later on */
1108 #if !defined(SUPPORT_PASS_FLOATARGS_IN_INTREGS)
1109 /* do not precolour float arguments if they are passed in */
1110 /* integer registers. But these integer argument registers */
1111 /* still be used in the method! */
1112 else { /* IS_FLT_DBL_TYPE(md->paramtypes[i].type */
1113 flt_reg->sav_reg[--flt_sav_top] =
1114 rd->argfltregs[md->params[i].regoff];
1115 fltarg_used[md->params[i].regoff]=true;
1122 /* copy rest of argument registers to flt_reg->sav_reg and */
1123 /* int_reg->sav_reg; */
1124 for (i=0; i < INT_ARG_CNT; i++)
1125 if (!intarg_used[i])
1126 int_reg->sav_reg[--int_sav_top]=rd->argintregs[i];
1127 for (i=0; i < FLT_ARG_CNT; i++)
1128 if (!fltarg_used[i])
1129 flt_reg->sav_reg[--flt_sav_top]=rd->argfltregs[i];
1131 /* copy temp registers to flt_reg->sav_reg and int_reg->sav_reg */
1132 for (i=0; i < INT_TMP_CNT; i++)
1133 int_reg->sav_reg[--int_sav_top]=rd->tmpintregs[i];
1134 for (i=0; i < FLT_TMP_CNT; i++)
1135 flt_reg->sav_reg[--flt_sav_top]=rd->tmpfltregs[i];
1138 /* non leaf method -> use Argument Registers [arg[int|flt]reguse */
1139 /* ... [INT|FLT]_ARG_CNT[ as temp reg */
1140 /* divide temp and saved registers */
1141 int argintreguse, argfltreguse;
1143 /* with Locals as non SAVEDVAR, the used arg[int|flt] as in params */
1144 /* of the method itself have to be regarded, or mismatch before */
1145 /* block 0 with parameter copy could happen! */
1146 argintreguse = max(rd->argintreguse, md->argintreguse);
1147 argfltreguse = max(rd->argfltreguse, md->argfltreguse);
1149 argintreguse = rd->argintreguse;
1150 argfltreguse = rd->argfltreguse;
1152 int_sav_top = int_reg->sav_top = INT_SAV_CNT;
1153 int_reg->sav_reg = DMNEW(int, int_reg->sav_top);
1154 int_reg->tmp_top = INT_TMP_CNT +
1155 max(0, (INT_ARG_CNT - argintreguse));
1156 int_reg->tmp_reg = DMNEW(int, int_reg->tmp_top);
1158 flt_sav_top =flt_reg->sav_top = FLT_SAV_CNT;
1159 flt_reg->sav_reg = DMNEW(int, flt_reg->sav_top);
1160 flt_reg->tmp_top = FLT_TMP_CNT +
1161 max(0 , (FLT_ARG_CNT - argfltreguse));
1162 flt_reg->tmp_reg = DMNEW(int, flt_reg->tmp_top);
1164 /* copy temp and unused argument registers to flt_reg->tmp_reg and */
1165 /* int_reg->tmp_reg */
1166 for (i=0; i < INT_TMP_CNT; i++)
1167 int_reg->tmp_reg[i]=rd->tmpintregs[i];
1168 for (j=argintreguse; j < INT_ARG_CNT; j++, i++)
1169 int_reg->tmp_reg[i]=rd->argintregs[j];
1170 for (i=0; i < FLT_TMP_CNT; i++)
1171 flt_reg->tmp_reg[i]=rd->tmpfltregs[i];
1172 for (j=argfltreguse; j < FLT_ARG_CNT; j++, i++)
1173 flt_reg->tmp_reg[i]=rd->argfltregs[j];
1176 /* now copy saved registers to flt_reg->sav_reg and int_reg->sav_reg */
1177 for (i = INT_SAV_CNT-1; i >= 0; i--)
1178 int_reg->sav_reg[--int_sav_top]=rd->savintregs[i];
1179 for (i = FLT_SAV_CNT-1; i >= 0; i--)
1180 flt_reg->sav_reg[--flt_sav_top]=rd->savfltregs[i];
1184 void lsra_insertion_sort( struct lsradata *ls, int *a, int lo, int hi) {
1187 for (i=lo+1; i<=hi; i++) {
1189 t=ls->lifetime[a[j]].i_start;
1191 while ((j>lo) && (ls->lifetime[a[j-1]].i_start > t)) {
1199 void lsra_qsort( struct lsradata *ls, int *a, int lo, int hi) {
1205 x = ls->lifetime[a[(lo+hi)/2]].i_start;
1208 while (ls->lifetime[a[i]].i_start < x) i++;
1209 while (ls->lifetime[a[j]].i_start > x) j--;
1211 /* exchange a[i], a[j] */
1221 if (lo < j) lsra_qsort( ls, a, lo, j);
1222 if (i < hi) lsra_qsort( ls, a, i, hi);
1224 lsra_insertion_sort(ls, a, lo, hi);
1228 void lsra_param_sort(struct lsradata *ls, int *lifetime, int lifetime_count) {
1233 /* count number of parameters ( .i_start == -1) */
1234 for (param_count=0; (param_count < lifetime_count) &&
1235 (ls->lifetime[lifetime[param_count]].i_start == -1); param_count++);
1237 if (param_count > 0) {
1238 /* now sort the parameters by v_index */
1239 for (i=0; i < param_count -1; i++)
1240 for (j=i+1; j < param_count; j++)
1241 if ( ls->lifetime[lifetime[i]].v_index >
1242 ls->lifetime[lifetime[j]].v_index) {
1245 lifetime[i]=lifetime[j];
1251 void lsra_main(jitdata *jd) {
1252 #ifdef LSRA_DEBUG_VERBOSE
1257 struct lsra_register flt_reg, int_reg;
1260 #if defined(__I386__)
1268 /* sort lifetimes by increasing start */
1269 lsra_qsort( ls, ls->lt_mem, 0, ls->lt_mem_count - 1);
1270 lsra_qsort( ls, ls->lt_int, 0, ls->lt_int_count - 1);
1271 lsra_qsort( ls, ls->lt_flt, 0, ls->lt_flt_count - 1);
1272 /* sort local vars used as parameter */
1273 lsra_param_sort( ls, ls->lt_int, ls->lt_int_count);
1274 lsra_param_sort( ls, ls->lt_flt, ls->lt_flt_count);
1275 lsra_reg_setup(jd, &int_reg, &flt_reg);
1277 #ifdef LSRA_DEBUG_VERBOSE
1278 if (compileverbose) {
1279 printf("INTSAV REG: ");
1280 for (i=0; i<int_reg.sav_top; i++)
1281 printf("%2i ",int_reg.sav_reg[i]);
1282 printf("\nINTTMP REG: ");
1283 for (i=0; i<int_reg.tmp_top; i++)
1284 printf("%2i ",int_reg.tmp_reg[i]);
1285 printf("\nFLTSAV REG: ");
1286 for (i=0; i<flt_reg.sav_top; i++)
1287 printf("%2i ",flt_reg.sav_reg[i]);
1288 printf("\nFLTTMP REG: ");
1289 for (i=0; i<flt_reg.tmp_top; i++)
1290 printf("%2i ",flt_reg.tmp_reg[i]);
1294 ls->active_tmp = DMNEW( struct lifetime *, max(INT_REG_CNT, FLT_REG_CNT));
1295 ls->active_sav = DMNEW( struct lifetime *, max(INT_REG_CNT, FLT_REG_CNT));
1297 lsra_reg_use=INT_SAV_CNT; /* init to no saved reg used... */
1298 _lsra_main(jd, ls->lt_int, ls->lt_int_count, &int_reg, &lsra_reg_use);
1299 if (lsra_reg_use > INT_SAV_CNT)
1300 lsra_reg_use=INT_SAV_CNT;
1301 rd->savintreguse = lsra_reg_use;
1303 lsra_reg_use = FLT_SAV_CNT; /* no saved reg used... */
1304 _lsra_main(jd, ls->lt_flt, ls->lt_flt_count, &flt_reg, &lsra_reg_use);
1305 if (lsra_reg_use > FLT_SAV_CNT)
1306 lsra_reg_use=FLT_SAV_CNT;
1307 rd->savfltreguse=lsra_reg_use;
1309 /* rd->memuse was already set in stack.c to allocate stack space for */
1310 /* passing arguments to called methods */
1311 #if defined(__I386__)
1312 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
1313 /* reserve 0(%esp) for Monitorenter/exit Argument on i386 */
1319 lsra_mem_use = rd->memuse; /* Init with memuse from stack.c */
1321 lsra_alloc(jd, ls->lt_mem, ls->lt_mem_count, &lsra_mem_use);
1322 lsra_alloc(jd, ls->lt_int, ls->lt_int_count, &lsra_mem_use);
1323 lsra_alloc(jd, ls->lt_flt, ls->lt_flt_count, &lsra_mem_use);
1325 rd->memuse=lsra_mem_use;
1327 #ifdef LSRA_DEBUG_VERBOSE
1328 if (compileverbose) {
1329 printf("Int RA complete \n");
1330 printf("Lifetimes after splitting int: \n");
1331 print_lifetimes(jd, ls->lt_int, ls->lt_int_count);
1333 printf("Flt RA complete \n");
1334 printf("Lifetimes after splitting flt:\n");
1335 print_lifetimes(jd, ls->lt_flt, ls->lt_flt_count);
1337 printf("Rest RA complete \n");
1338 printf("Lifetimes after leftt:\n");
1339 print_lifetimes(jd, ls->lt_mem, ls->lt_mem_count);
1344 void lsra_alloc(jitdata *jd, int *lifet, int lifetimecount, int *mem_use)
1347 struct lifetime *lt;
1348 struct freemem *fmem;
1349 struct stackslot *n;
1351 #ifdef HAS_4BYTE_STACKSLOT
1352 struct freemem *fmem_2;
1360 fmem = DNEW(struct freemem);
1363 #ifdef HAS_4BYTE_STACKSLOT
1364 fmem_2=DNEW(struct freemem);
1366 fmem_2->next = NULL;
1369 for (lt_index = 0; lt_index < lifetimecount; lt_index ++) {
1370 lt = &(ls->lifetime[lifet[lt_index]]);
1374 if (lt->reg == -1) {
1376 #ifdef HAS_4BYTE_STACKSLOT
1377 if (IS_2_WORD_TYPE(lt->type))
1378 regoff = lsra_getmem(lt, fmem_2, mem_use);
1381 regoff = lsra_getmem(lt, fmem, mem_use);
1383 flags = lt->savedvar;
1387 if (lt->v_index < 0) {
1388 for (n = lt->local_ss; n != NULL; n = n->next) {
1389 lsra_setflags(&(n->s->flags), flags);
1390 n->s->regoff = regoff;
1392 } else { /* local var */
1393 if (rd->locals[lt->v_index][lt->type].type >= 0) {
1394 rd->locals[lt->v_index][lt->type].flags = flags;
1395 rd->locals[lt->v_index][lt->type].regoff = regoff;
1397 log_text("Type Data mismatch\n");
1405 void lsra_setflags(int *flags, int newflags)
1407 if ( newflags & INMEMORY)
1410 *flags &= ~INMEMORY;
1412 if (newflags & SAVEDVAR)
1416 int lsra_getmem(struct lifetime *lt, struct freemem *fmem, int *mem_use)
1418 struct freemem *fm, *p;
1420 /* no Memory Slot allocated till now or all are still live */
1421 if ((fmem->next == NULL) || (fmem->next->end > lt->i_start)) {
1422 #ifdef HAS_4BYTE_STACKSLOT
1423 if (IS_2_WORD_TYPE(lt->type))
1424 if ( (*mem_use)&1 ) /* align memory location for 2 Word Types */
1426 fm=lsra_getnewmem(mem_use);
1427 if (IS_2_WORD_TYPE(lt->type))
1428 /* allocate a second following Slot for 2 Word Types */
1431 fm=lsra_getnewmem(mem_use);
1434 /* Memoryslot free */
1436 fmem->next = fm->next;
1439 fm->end = lt->i_end;
1440 for (p = fmem; (p->next != NULL) && (p->next->end < fm->end); p = p->next);
1446 struct freemem *lsra_getnewmem(int *mem_use)
1450 fm = DNEW(struct freemem);
1457 void _lsra_main(jitdata *jd, int *lifet, int lifetimecount,
1458 struct lsra_register *reg, int *reg_use)
1460 struct lifetime *lt;
1464 bool temp; /* reg from temp registers (true) or saved registers (false) */
1471 #if !defined(SUPPORT_COMBINE_INTEGER_REGISTERS)
1474 if ((reg->tmp_top+reg->sav_top) == 0) {
1476 /* no registers available */
1477 for (lt_index = 0; lt_index < lifetimecount; lt_index++)
1478 ls->lifetime[lifet[lt_index]].reg = -1;
1482 ls->active_tmp_top = 0;
1483 ls->active_sav_top = 0;
1485 for (lt_index = 0; lt_index < lifetimecount; lt_index++) {
1486 lt = &(ls->lifetime[lifet[lt_index]]);
1488 #if defined(SUPPORT_COMBINE_INTEGER_REGISTERS)
1489 regsneeded = (lt->type == TYPE_LNG)?1:0;
1492 lsra_expire_old_intervalls(jd, lt, reg);
1495 if (lt->savedvar || jd->isleafmethod) {
1496 /* use Saved Reg (in case of leafmethod all regs are saved regs) */
1497 if (reg->sav_top > regsneeded) {
1498 #if defined(SUPPORT_COMBINE_INTEGER_REGISTERS)
1500 reg_index = PACK_REGS(reg->sav_reg[--reg->sav_top],
1501 reg->sav_reg[--reg->sav_top]);
1505 reg_index = reg->sav_reg[--reg->sav_top];
1507 } else { /* use Temp Reg or if none is free a Saved Reg */
1508 if (reg->tmp_top > regsneeded) {
1510 #if defined(SUPPORT_COMBINE_INTEGER_REGISTERS)
1512 reg_index = PACK_REGS(reg->tmp_reg[--reg->tmp_top],
1513 reg->tmp_reg[--reg->tmp_top]);
1516 reg_index = reg->tmp_reg[--reg->tmp_top];
1519 if (reg->sav_top > regsneeded) {
1521 #if defined(SUPPORT_COMBINE_INTEGER_REGISTERS)
1523 reg_index = PACK_REGS(reg->sav_reg[--reg->sav_top],
1524 reg->sav_reg[--reg->sav_top]);
1527 reg_index = reg->sav_reg[--reg->sav_top];
1530 if (reg_index == -1) /* no reg is available anymore... -> spill */
1531 spill_at_intervall(jd, lt);
1533 lt->reg = reg_index;
1535 lsra_add_active(lt, ls->active_tmp, &(ls->active_tmp_top));
1537 if (reg->sav_top<*reg_use) *reg_use=reg->sav_top;
1538 lsra_add_active(lt, ls->active_sav, &(ls->active_sav_top));
1544 void lsra_add_active(struct lifetime *lt, struct lifetime **active,
1549 for(i = 0; (i < *active_top) && (active[i]->i_end < lt->i_end); i++);
1550 for(j = *active_top; j > i; j--) active[j] = active[j-1];
1555 void lsra_expire_old_intervalls(jitdata *jd, struct lifetime *lt,
1556 struct lsra_register *reg)
1558 _lsra_expire_old_intervalls(jd, lt, reg, jd->ls->active_tmp,
1559 &(jd->ls->active_tmp_top));
1560 _lsra_expire_old_intervalls(jd, lt, reg, jd->ls->active_sav,
1561 &(jd->ls->active_sav_top));
1564 void _lsra_expire_old_intervalls(jitdata *jd, struct lifetime *lt,
1565 struct lsra_register *reg,
1566 struct lifetime **active, int *active_top)
1570 for(i = 0; i < *active_top; i++) {
1571 if (active[i]->i_end > lt->i_start) break;
1573 /* make active[i]->reg available again */
1574 if (jd->isleafmethod) {
1575 /* leafmethod -> don't care about type -> put all again into */
1577 #if defined(SUPPORT_COMBINE_INTEGER_REGISTERS)
1578 if (active[i]->type == TYPE_LNG) {
1579 reg->sav_reg[reg->sav_top++] = GET_LOW_REG(active[i]->reg);
1580 reg->sav_reg[reg->sav_top++] = GET_HIGH_REG(active[i]->reg);
1583 reg->sav_reg[reg->sav_top++] = active[i]->reg;
1585 /* no leafmethod -> distinguish between temp and saved register */
1586 #if defined(SUPPORT_COMBINE_INTEGER_REGISTERS)
1587 if (active[i]->type == TYPE_LNG) {
1588 /* no temp and saved regs are packed together, so looking at */
1589 /* LOW_REG is sufficient */
1590 if ( reg->nregdesc[ GET_LOW_REG(active[i]->reg)] == REG_SAV) {
1591 reg->sav_reg[reg->sav_top++] = GET_LOW_REG(active[i]->reg);
1592 reg->sav_reg[reg->sav_top++] = GET_HIGH_REG(active[i]->reg);
1594 reg->tmp_reg[reg->tmp_top++] = GET_LOW_REG(active[i]->reg);
1595 reg->tmp_reg[reg->tmp_top++] = GET_HIGH_REG(active[i]->reg);
1599 if ( reg->nregdesc[active[i]->reg] == REG_SAV) {
1600 reg->sav_reg[reg->sav_top++] = active[i]->reg;
1602 reg->tmp_reg[reg->tmp_top++] = active[i]->reg;
1607 /* active[0..i[ is to be removed */
1608 /* -> move [i..*active_top[ to [0..*active_top-i[ */
1609 for(k = 0, j = i; (j < *active_top); k++,j++)
1610 active[k] = active[j];
1616 void spill_at_intervall(jitdata *jd, struct lifetime *lt )
1618 if (lt->savedvar || jd->isleafmethod) {
1619 _spill_at_intervall(lt, jd->ls->active_sav, &(jd->ls->active_sav_top));
1621 _spill_at_intervall(lt, jd->ls->active_tmp, &(jd->ls->active_tmp_top));
1622 if (lt->reg == -1) { /* no tmp free anymore */
1623 _spill_at_intervall(lt, jd->ls->active_sav,
1624 &(jd->ls->active_sav_top));
1629 void _spill_at_intervall(struct lifetime *lt, struct lifetime **active,
1633 #ifdef USAGE_COUNT_EXACT
1637 if (*active_top == 0) {
1642 i = *active_top - 1;
1643 #if defined(USAGE_COUNT_EXACT)
1644 /* find intervall which ends later or equal than than lt and has the lowest
1645 usagecount lower than lt */
1647 u_min = lt->usagecount;
1648 for (; (i >= 0) && (active[i]->i_end >= lt->i_end); i--) {
1649 if (active[i]->usagecount < u_min) {
1650 u_min = active[i]->usagecount;
1658 # if defined(USAGE_COUNT) && !defined(USAGE_COUNT_EXACT)
1659 if ((active[i]->i_end >= lt->i_end)
1660 && (active[i]->usagecount < lt->usagecount)) {
1661 # else /* "normal" LSRA heuristic */
1662 /* get last intervall from active */
1663 if (active[i]->i_end > lt->i_end) {
1666 #if defined(SUPPORT_COMBINE_INTEGER_REGISTERS)
1667 /* Don't spill between one and two word int types */
1668 if ((active[i]->type == TYPE_LNG) != (lt->type == TYPE_LNG))
1671 lt->reg = active[i]->reg;
1675 for (j = i; j < *active_top; j++)
1676 active[j] = active[j + 1];
1678 lsra_add_active(lt, active, active_top);
1684 void lsra_calc_lifetime_length(jitdata *jd) {
1688 struct lifetime *lt;
1689 #if defined(LSRA_DEBUG_VERBOSE) || !defined(LV)
1694 int flags; /* 0 INMEMORY -> ls->lt_mem */
1695 /* 1 INTREG -> ls->lt_int */
1696 /* 2 FLTREG -> ls->lt_flt */
1703 #ifdef LSRA_DEBUG_VERBOSE
1704 if (compileverbose) {
1705 printf("icount_block: ");
1706 for (i=0; i < m->basicblockcount; i++)
1707 printf("(%3i-%3i) ",i, ls->icount_block[i]);
1712 /* extend lifetime over backedges (for the lsra version without exact
1714 now iterate through lifetimes and expand them */
1717 for(lt_index = 0 ;lt_index < ls->lifetimecount; lt_index++) {
1718 if ( ls->lifetime[lt_index].type != -1) { /* used lifetime */
1719 /* remember lt_index in lt_sorted */
1720 ls->lt_used[lifetimecount++] = lt_index;
1721 lt = &(ls->lifetime[lt_index]);
1722 #if defined(SUPPORT_COMBINE_INTEGER_REGISTERS)
1723 /* prevent conflicts between lifetimes of type long by increasing
1724 the lifetime by one instruction
1727 with i==l and/or j==k
1728 to resolve this during codegeneration a temporary register
1730 if (lt->type == TYPE_LNG)
1734 /* distribute lifetimes to lt_int, lt_flt and lt_mem */
1740 #if defined(HAS_4BYTE_STACKSLOT) && !defined(SUPPORT_COMBINE_INTEGER_REGISTERS)
1753 #if defined(__I386__)
1755 * for i386 put all floats in memory
1763 { log_text("Unknown Type\n"); assert(0); }
1767 case 0: /* lt_used[lt_used_index] -> lt_rest */
1768 ls->lt_mem[ ls->lt_mem_count++ ] = lt_index;
1770 case 1: /* l->lifetimes -> lt_int */
1771 ls->lt_int[ ls->lt_int_count++ ] = lt_index;
1773 case 2: /* l->lifetimes -> lt_flt */
1774 ls->lt_flt[ ls->lt_flt_count++ ] = lt_index;
1779 if (lt->i_first_def == INT_MAX) {
1780 #ifdef LSRA_DEBUG_VERBOSE
1781 printf("Warning: var not defined! vi: %i start: %i end: %i\n",
1782 lt->v_index, lt->i_start, lt->i_end);
1784 lt->bb_first_def = 0;
1785 lt->i_first_def = 0;
1788 lt->i_start = lt->i_first_def;
1790 if (lt->i_last_use == -2) {
1791 #ifdef LSRA_DEBUG_VERBOSE
1792 printf("Warning: Var not used! vi: %i start: %i end: %i\n",
1793 lt->v_index, lt->i_start, lt->i_end);
1795 lt->bb_last_use = lt->bb_first_def;
1796 lt->i_last_use = lt->i_first_def;
1799 lt->i_end = lt->i_last_use;
1801 #ifdef LSRA_DEBUG_VERBOSE
1802 if (lt->i_start > lt->i_end)
1803 printf("Warning: last use before first def! vi: %i start: %i end: %i\n", lt->v_index, lt->i_start, lt->i_end);
1807 if ((lt->bb_first_def != lt->bb_last_use) ||
1808 (lt->i_first_def == -1)) {
1809 /* Lifetime goes over more than one Basic Block -> */
1810 /* check for necessary extension over backedges */
1811 /* see lsra_get_backedges */
1812 /* Arguments are set "before" Block 0, so they have */
1813 /* a lifespan of more then one block, too */
1815 for (i=0; i < ls->backedge_count; i++) {
1816 if (!( (lt->bb_first_def > ls->backedge[i]->start) ||
1817 (lt->bb_last_use < ls->backedge[i]->end) )) {
1818 /* Live intervall intersects with a backedge */
1819 /* if (lt->bb_first_def <= ls->backedge[i]->start) */
1820 if (lt->bb_last_use <= ls->backedge[i]->start)
1822 ls->icount_block[ls->backedge[i]->start] +
1823 m->basicblocks[ls->sorted[ls->backedge[i]->start]].icount;
1827 #endif /* !defined(LV) */
1829 #ifdef USAGE_PER_INSTR
1830 lt->usagecount = lt->usagecount / ( lt->i_end - lt->i_start + 1);
1834 ls->lifetimecount = lifetimecount;
1837 #ifdef LSRA_DEBUG_VERBOSE
1838 void print_lifetimes(jitdata *jd, int *lt, int lifetimecount)
1842 int type,flags,regoff,varkind;
1850 for (lt_index = 0; lt_index < lifetimecount; lt_index++) {
1851 n = &(ls->lifetime[lt[lt_index]]);
1852 if (n->savedvar == SAVEDVAR)
1856 if (n->v_index < 0) { /* stackslot */
1857 type = n->local_ss->s->type;
1858 flags=n->local_ss->s->flags;
1859 regoff=n->local_ss->s->regoff;
1860 varkind=n->local_ss->s->varkind;
1861 } else { /* local var */
1862 if (rd->locals[n->v_index][n->type].type>=0) {
1863 type = rd->locals[n->v_index][n->type].type;
1864 flags=rd->locals[n->v_index][n->type].flags;
1865 regoff=rd->locals[n->v_index][n->type].regoff;
1868 { log_text("Type Data mismatch 3\n"); assert(0); }
1871 printf("i_Start: %3i(%3i,%3i) i_stop: %3i(%3i,%3i) reg: %3i VI: %3i type: %3i flags: %3i varkind: %3i usage: %3li ltflags: %xi \n",n->i_start, ls->sorted[n->bb_first_def], n->i_first_def,n->i_end, ls->sorted[n->bb_last_use], n->i_last_use,regoff,n->v_index,type,flags, varkind, n->usagecount, n->flags);
1873 printf("i_Start: %3i i_stop: %3i reg: %3i VI: %3i type: %3i flags: %3i varkind: %3i usage: %3li ltflags: %xi \n",n->i_start, n->i_end, regoff,n->v_index,type,flags, varkind, n->usagecount, n->flags);
1876 printf( "%3i Lifetimes printed \n",lt_index);
1882 /******************************************************************************
1883 Helpers for first LSRA Version without exact Liveness Analysis
1884 *****************************************************************************/
1887 bool lsra_join_ss( struct lsradata *ls, struct stackelement *in,
1888 struct stackelement *out, int join_flag) {
1889 struct lifetime *lt, *lto;
1890 struct stackslot *ss, *ss_last;
1893 if (in->varnum != out->varnum) {
1894 lt = &(ls->lifetime[-in->varnum - 1]);
1896 #ifdef LSRA_DEBUG_CHECK
1897 if (join_flag == JOIN_BB)
1898 if (lt->type == -1) {
1899 log_text("lsra_join_ss: lifetime for instack not found\n");
1904 if (out->varnum >= 0) { /* no lifetime for this slot till now */
1905 lsra_add_ss(lt, out);
1907 lto = &(ls->lifetime[-out->varnum - 1]);
1908 if ((join_flag == JOIN_DUP) || (join_flag == JOIN_OP))
1909 if ( (lt->flags & JOIN_BB) || (lto->flags & JOIN_BB)) {
1912 if (join_flag == JOIN_DUP)
1913 if ( (lt->flags & JOIN_OP) || (lto->flags & JOIN_OP)) {
1916 #ifdef LSRA_DEBUG_CHECK
1917 if (lto->type == -1) {
1918 log_text("lsra_join_ss: lifetime for outstack not found\n");
1922 #ifdef LSRA_DEBUG_CHECK
1923 if (lto->type != lt->type) {
1924 log_text("lsra_join_ss: in/out stack type mismatch\n");
1929 lt->flags |= JOINING;
1931 /* take Lifetime lto out of ls->lifetimes */
1934 /* merge lto into lt of in */
1936 ss_last = ss = lto->local_ss;
1937 while (ss != NULL) {
1939 ss->s->varnum = lt->v_index;
1942 if (ss_last != NULL) {
1943 ss_last->next = lt->local_ss;
1944 lt->local_ss = lto->local_ss;
1947 lt->savedvar |= lto->savedvar;
1948 lt->flags |= lto->flags | join_flag;
1949 lt->usagecount += lto->usagecount;
1951 /*join of i_first_def and i_last_use */
1952 if (lto->i_first_def < lt->i_first_def) {
1953 lt->i_first_def = lto->i_first_def;
1955 if (lto->i_last_use > lt->i_last_use) {
1956 lt->i_last_use = lto->i_last_use;
1963 /* join instack of Basic Block b_index with outstack of predecessors */
1964 void lsra_join_lifetimes(jitdata *jd,int b_index) {
1967 struct stackelement *in, *i, *out;
1973 /* do not join instack of Exception Handler */
1974 if (m->basicblocks[b_index].type == BBTYPE_EXH)
1976 in=m->basicblocks[b_index].instack;
1977 /* do not join first instack element of a subroutine header */
1978 if (m->basicblocks[b_index].type == BBTYPE_SBR)
1982 for (pred = ls->pred[b_index]; pred != NULL; pred = pred->next) {
1983 out = m->basicblocks[pred->value].outstack;
1984 for (i=in; (i != NULL); i = i->prev, out=out->prev) {
1985 lsra_join_ss(ls, i, out, JOIN_BB);
1991 struct stackslot *lsra_make_ss(stackptr s, int bb_index)
1993 struct stackslot *ss;
1995 ss = DNEW(struct stackslot);
2001 void lsra_add_ss(struct lifetime *lt, stackptr s) {
2002 struct stackslot *ss;
2004 /* Stackslot not in list? */
2005 if (s->varnum != lt->v_index) {
2006 ss = DNEW(struct stackslot);
2008 ss->s->varnum = lt->v_index;
2009 ss->next = lt->local_ss;
2012 lt->savedvar |= s->flags & SAVEDVAR;
2018 struct lifetime *get_ss_lifetime(lsradata *ls, stackptr s) {
2021 if (s->varnum >= 0) { /* new stackslot lifetime */
2022 #ifdef LSRA_DEBUG_CHECK_VERBOSE
2023 if (-ls->v_index - 1 >= ls->maxlifetimes) {
2024 printf("%i %i\n", -ls->v_index - 1, ls->maxlifetimes);
2027 _LSRA_ASSERT(-ls->v_index - 1 < ls->maxlifetimes);
2029 n = &(ls->lifetime[-ls->v_index - 1]);
2031 n->v_index = ls->v_index--;
2034 n->bb_last_use = -1;
2035 n->bb_first_def = -1;
2036 n->i_last_use = -2; /* At -1 param init happens, so -2 is below all
2037 possible instruction indices */
2038 n->i_first_def = INT_MAX;
2043 n = &(ls->lifetime[-s->varnum - 1]);
2049 #define IS_TEMP_VAR(s) (((s)->varkind != ARGVAR) && ((s)->varkind != LOCALVAR))
2051 #define lsra_join_3_stack(ls, dst, src1, src2, join_type) \
2052 if ( IS_TEMP_VAR(dst) ) { \
2054 if ( IS_TEMP_VAR(src1) && ((src1)->type == (dst)->type)) { \
2055 join_ret = lsra_join_ss(ls, dst, src1, join_type); \
2057 if ((!join_ret) && IS_TEMP_VAR(src2) && ((src2)->type == (dst)->type)) { \
2058 lsra_join_ss(ls, dst, src2, join_type); \
2062 #define lsra_join_2_stack(ls, dst, src, join_type) \
2063 if ( IS_TEMP_VAR(dst) ) { \
2064 if ( (IS_TEMP_VAR(src)) && ((src)->type == (dst)->type)) { \
2065 lsra_join_ss(ls, dst, src, join_type); \
2069 #define lsra_join_dup(ls, s1, s2, s3) { \
2070 if (IS_TEMP_VAR(s1)) { \
2072 if (IS_TEMP_VAR(s2)) \
2073 join_ret = lsra_join_ss(ls, s1, s2, JOIN); \
2074 /* undangerous join!*/\
2075 if (IS_TEMP_VAR(s3)) { \
2076 if (join_ret) /* first join succesfull -> second of type */ \
2078 lsra_join_ss(ls, s1, s3, JOIN_DUP); \
2080 lsra_join_ss(ls, s1, s3, JOIN); /* first join did not */ \
2081 /* happen -> second undangerous */ \
2084 if (IS_TEMP_VAR(s2) && IS_TEMP_VAR(s3)) \
2085 lsra_join_ss(ls, s2, s3, JOIN_DUP); \
2088 #define lsra_new_stack(ls, s, block, instr) \
2089 if ((s)->varkind != ARGVAR) _lsra_new_stack(ls, s, block, instr, LSRA_STORE)
2090 void _lsra_new_stack(lsradata *ls, stackptr s, int block, int instr, int store)
2094 if (s->varkind == LOCALVAR) {
2095 lsra_usage_local(ls, s->varnum, s->type, block, instr, LSRA_STORE);
2096 } else /* if (s->varkind != ARGVAR) */ {
2098 n=get_ss_lifetime(ls, s);
2100 if (store == LSRA_BB_IN)
2101 n->flags |= JOIN_BB;
2102 /* remember first def -> overwrite everytime */
2103 n->bb_first_def = ls->sorted_rev[block];
2104 n->i_first_def = ls->icount_block[ls->sorted_rev[block]] + instr;
2106 n->usagecount+=ls->nesting[ls->sorted_rev[block]];
2110 #define lsra_from_stack(ls, s, block, instr) \
2111 if ((s)->varkind != ARGVAR) _lsra_from_stack(ls, s, block, instr, LSRA_LOAD)
2112 #define lsra_pop_from_stack(ls, s, block, instr) \
2113 if ((s)->varkind != ARGVAR) _lsra_from_stack(ls, s, block, instr, LSRA_POP)
2114 void _lsra_from_stack(lsradata *ls, stackptr s, int block, int instr, int store)
2118 if (s->varkind == LOCALVAR) {
2119 lsra_usage_local(ls, s->varnum, s->type, block, instr, LSRA_LOAD);
2120 } else /* if (s->varkind != ARGVAR) */ {
2121 if (s->varkind == STACKVAR )
2122 /* No STACKVARS possible with lsra! */
2123 s->varkind = TEMPVAR;
2125 n=get_ss_lifetime(ls, s);
2127 if (store == LSRA_BB_OUT)
2128 n->flags |= JOIN_BB;
2129 if (n->flags & JOINING)
2130 n->flags &= ~JOINING;
2131 n->usagecount+=ls->nesting[ls->sorted_rev[block]];
2133 /* remember last USE, so only write, if USE Field is undefined (==-1) */
2134 if (n->bb_last_use == -1) {
2135 n->bb_last_use = ls->sorted_rev[block];
2136 n->i_last_use = ls->icount_block[ls->sorted_rev[block]] + instr;
2141 void lsra_usage_local(lsradata *ls, s4 v_index, int type, int block, int instr,
2146 n = &(ls->lifetime[ ls->maxlifetimes + v_index * (TYPE_ADR+1) + type]);
2148 if (n->type == -1) { /* new local lifetime */
2152 n->savedvar = SAVEDVAR;
2156 n->bb_last_use = -1;
2157 n->bb_first_def = -1;
2159 n->i_first_def = INT_MAX;
2161 n->usagecount+=ls->nesting[ls->sorted_rev[block]];
2162 /* add access at (block, instr) to instruction list */
2163 /* remember last USE, so only write, if USE Field is undefined (==-1) */
2164 /* count store as use, too -> defined and not used vars would overwrite */
2166 if (n->bb_last_use == -1) {
2167 n->bb_last_use = ls->sorted_rev[block];
2168 n->i_last_use = ls->icount_block[ls->sorted_rev[block]] + instr;
2170 if (store == LSRA_STORE) {
2171 /* store == LSRA_STORE, remember first def -> overwrite everytime */
2172 n->bb_first_def = ls->sorted_rev[block];
2173 n->i_first_def = ls->icount_block[ls->sorted_rev[block]] + instr;
2177 #ifdef LSRA_DEBUG_VERBOSE
2178 void lsra_dump_stack(stackptr s)
2181 printf("%p(R%3i N%3i K%3i T%3i F%3i) ",(void *)s,s->regoff, s->varnum,
2182 s->varkind, s->type, s->flags);
2190 void lsra_scan_registers_canditates(jitdata *jd, int b_index)
2192 /* methodinfo *lm; */
2193 builtintable_entry *bte;
2201 bool join_ret; /* for lsra_join* Macros */
2208 /* get instruction count for BB and remember the max instruction count */
2210 iindex = m->basicblocks[b_index].icount - 1;
2212 src = m->basicblocks[b_index].instack;
2213 if (m->basicblocks[b_index].type != BBTYPE_STD) {
2214 lsra_new_stack(ls, src, b_index, 0);
2217 for (;src != NULL; src=src->prev) {
2218 /*******************************************************************************
2219 Check this - ? For every incoming Stack Slot a lifetime has to be created ?
2220 *******************************************************************************/
2221 _lsra_new_stack(ls, src, b_index, 0, LSRA_BB_IN);
2223 src = m->basicblocks[b_index].outstack;
2224 for (;src != NULL; src=src->prev) {
2225 _lsra_from_stack(ls, src, b_index, iindex, LSRA_BB_OUT);
2228 /* set iptr to last instruction of BB */
2229 iptr = m->basicblocks[b_index].iinstr + iindex;
2231 for (;iindex >= 0; iindex--, iptr--) {
2233 /* get source and destination Stack for the current instruction */
2234 /* destination stack is available as iptr->dst */
2238 /* source stack is either the destination stack of the previos */
2239 /* instruction, or the basicblock instack for the first instruction */
2241 if (iindex) /* != 0 is > 0 here, since iindex ist always >= 0 */
2244 src=m->basicblocks[b_index].instack;
2252 /* local read (return adress) */
2253 lsra_usage_local(ls, iptr->op1, TYPE_ADR, b_index, iindex,
2257 /* case ICMD_ELSE_ICONST: */
2258 case ICMD_CHECKNULL:
2262 case ICMD_PUTSTATICCONST:
2263 case ICMD_INLINE_START:
2264 case ICMD_INLINE_END:
2265 case ICMD_INLINE_GOTO:
2269 /* local = local+<const> */
2270 lsra_usage_local(ls, iptr->op1, TYPE_INT, b_index, iindex,
2272 lsra_usage_local(ls, iptr->op1, TYPE_INT, b_index, iindex,
2276 /* pop 0 push 1 const: const->stack */
2282 /* new stack slot */
2283 lsra_new_stack(ls, dst, b_index, iindex);
2286 /* pop 0 push 1 load: local->stack */
2292 if (dst->varkind != LOCALVAR) {
2293 /* local->value on stack */
2294 lsra_usage_local(ls, iptr->op1, opcode - ICMD_ILOAD, b_index,
2296 lsra_new_stack(ls, dst, b_index, iindex); /* value->stack */
2297 } else /* if (dst->varnum != iptr->op1) */ {
2298 /* local -> local */
2299 lsra_usage_local(ls, iptr->op1, opcode - ICMD_ILOAD, b_index,
2300 iindex,LSRA_LOAD); /* local->value */
2301 lsra_usage_local(ls, dst->varnum, opcode - ICMD_ILOAD, b_index,
2302 iindex, LSRA_STORE); /* local->value */
2308 /* Stack(arrayref,index)->stack */
2319 lsra_from_stack(ls, src, b_index, iindex);
2320 /* stack->arrayref */
2321 lsra_from_stack(ls, src->prev, b_index, iindex);
2322 /* arrayref[index]->stack */
2323 lsra_new_stack(ls, dst, b_index, iindex);
2327 /* stack(arrayref,index,value)->arrayref[index]=value */
2338 lsra_from_stack(ls, src,b_index, iindex); /* stack -> value */
2339 lsra_from_stack(ls, src->prev, b_index, iindex); /* stack -> index*/
2340 /* stack -> arrayref */
2341 lsra_from_stack(ls, src->prev->prev, b_index, iindex);
2344 /* pop 1 push 0 store: stack -> local */
2350 if (src->varkind != LOCALVAR) {
2351 lsra_from_stack(ls, src, b_index, iindex); /* stack -> value */
2352 lsra_usage_local(ls, iptr->op1, opcode-ICMD_ISTORE, b_index,
2353 iindex, LSRA_STORE); /* local->value */
2354 } else /* if (src->varnum != iptr->op1) */ {
2355 lsra_usage_local(ls, iptr->op1, opcode-ICMD_ISTORE, b_index,
2356 iindex, LSRA_STORE); /* local->value */
2357 lsra_usage_local(ls, src->varnum, opcode-ICMD_ISTORE, b_index,
2358 iindex, LSRA_LOAD); /* local->value */
2363 case ICMD_POP: /* throw away a stackslot */
2364 /* TODO: check if used anyway (DUP...) and change codegen to */
2365 /* ignore this stackslot */
2366 lsra_pop_from_stack(ls, src, b_index, iindex);
2374 case ICMD_ARETURN: /* stack(value) -> [empty] */
2376 case ICMD_ATHROW: /* stack(objref) -> undefined */
2378 case ICMD_PUTSTATIC: /* stack(value) -> static_field */
2379 case ICMD_PUTFIELDCONST:
2381 /* pop 1 push 0 branch */
2382 case ICMD_IFNULL: /* stack(value) -> branch? */
2383 case ICMD_IFNONNULL:
2399 /* pop 1 push 0 table branch */
2400 case ICMD_TABLESWITCH:
2401 case ICMD_LOOKUPSWITCH:
2403 case ICMD_MONITORENTER:
2404 case ICMD_MONITOREXIT:
2405 lsra_from_stack(ls, src, b_index, iindex); /* stack -> value */
2409 case ICMD_POP2: /* throw away 2 stackslots */
2410 /* TODO: check if used anyway (DUP...) and change codegen to */
2411 /* ignore this stackslot */
2412 lsra_pop_from_stack(ls, src, b_index, iindex);
2413 lsra_pop_from_stack(ls, src->prev, b_index, iindex);
2416 /* pop 2 push 0 branch */
2418 case ICMD_IF_ICMPEQ: /* stack (v1,v2) -> branch(v1,v2) */
2419 case ICMD_IF_ICMPNE:
2420 case ICMD_IF_ICMPLT:
2421 case ICMD_IF_ICMPGE:
2422 case ICMD_IF_ICMPGT:
2423 case ICMD_IF_ICMPLE:
2425 case ICMD_IF_LCMPEQ:
2426 case ICMD_IF_LCMPNE:
2427 case ICMD_IF_LCMPLT:
2428 case ICMD_IF_LCMPGE:
2429 case ICMD_IF_LCMPGT:
2430 case ICMD_IF_LCMPLE:
2432 case ICMD_IF_ACMPEQ:
2433 case ICMD_IF_ACMPNE:
2436 case ICMD_PUTFIELD: /* stack(objref,value) -> objref = value */
2438 case ICMD_IASTORECONST:
2439 case ICMD_LASTORECONST:
2440 case ICMD_AASTORECONST:
2441 case ICMD_BASTORECONST:
2442 case ICMD_CASTORECONST:
2443 case ICMD_SASTORECONST:
2444 lsra_from_stack(ls, src, b_index, iindex); /* stack -> value*/
2445 lsra_from_stack(ls, src->prev, b_index, iindex);
2448 /* pop 0 push 1 dup */
2449 case ICMD_DUP: /* src == dst->prev, src -> dst */
2450 /* lsra_from_stack(ls, src,b_index,iindex);*/
2451 lsra_new_stack(ls, dst, b_index, iindex);
2453 #ifdef JOIN_DUP_STACK
2454 /* src is identical to dst->prev */
2455 lsra_join_2_stack(ls, src, dst, JOIN_DUP);
2459 /* pop 0 push 2 dup */
2461 /* lsra_from_stack(ls, src,b_index, iindex); */
2462 /* lsra_from_stack(ls, src->prev, b_index, iindex); */
2463 lsra_new_stack(ls, dst->prev, b_index, iindex);
2464 lsra_new_stack(ls, dst, b_index, iindex);
2466 #ifdef JOIN_DUP_STACK
2467 lsra_join_2_stack(ls, src, dst, JOIN_DUP);
2468 lsra_join_2_stack(ls, src->prev, dst->prev, JOIN_DUP);
2469 /* src is identical to dst->prev->prev */
2470 /* src->prev is identical to dst->prev->prev->prev */
2474 /* pop 2 push 3 dup */
2476 lsra_from_stack(ls, src, b_index, iindex+1);
2477 lsra_from_stack(ls, src->prev, b_index, iindex+1);
2478 lsra_new_stack(ls, dst->prev->prev, b_index, iindex);
2479 lsra_new_stack(ls, dst->prev, b_index, iindex);
2480 lsra_new_stack(ls, dst, b_index, iindex);
2481 #ifdef JOIN_DUP_STACK
2482 lsra_join_dup(ls, src, dst, dst->prev->prev);
2483 lsra_join_2_stack(ls, src->prev, dst->prev, JOIN);
2487 /* pop 3 push 4 dup */
2489 lsra_from_stack(ls, src,b_index, iindex+1);
2490 lsra_from_stack(ls, src->prev, b_index, iindex+1);
2491 lsra_from_stack(ls, src->prev->prev, b_index, iindex+1);
2492 lsra_new_stack(ls, dst->prev->prev->prev, b_index, iindex);
2493 lsra_new_stack(ls, dst->prev->prev, b_index, iindex);
2494 lsra_new_stack(ls, dst->prev, b_index, iindex);
2495 lsra_new_stack(ls, dst, b_index, iindex);
2497 #ifdef JOIN_DUP_STACK
2498 lsra_join_dup(ls, src, dst, dst->prev->prev->prev);
2499 lsra_join_2_stack(ls, src->prev, dst->prev, JOIN);
2500 lsra_join_2_stack(ls, src->prev->prev, dst->prev->prev, JOIN);
2504 /* pop 3 push 5 dup */
2506 lsra_from_stack(ls, src, b_index, iindex+1);
2507 lsra_from_stack(ls, src->prev, b_index, iindex+1);
2508 lsra_from_stack(ls, src->prev->prev, b_index, iindex+1);
2509 lsra_new_stack(ls, dst->prev->prev->prev->prev, b_index, iindex);
2510 lsra_new_stack(ls, dst->prev->prev->prev, b_index, iindex);
2511 lsra_new_stack(ls, dst->prev->prev, b_index, iindex);
2512 lsra_new_stack(ls, dst->prev, b_index, iindex);
2513 lsra_new_stack(ls, dst, b_index, iindex);
2515 #ifdef JOIN_DUP_STACK
2516 lsra_join_dup(ls, src, dst, dst->prev->prev->prev);
2517 lsra_join_dup(ls, src->prev, dst->prev,
2518 dst->prev->prev->prev->prev);
2519 lsra_join_2_stack(ls, src->prev->prev, dst->prev->prev, JOIN);
2523 /* pop 4 push 6 dup */
2525 lsra_from_stack(ls, src, b_index, iindex+1);
2526 lsra_from_stack(ls, src->prev, b_index, iindex+1);
2527 lsra_from_stack(ls, src->prev->prev, b_index, iindex+1);
2528 lsra_from_stack(ls, src->prev->prev->prev, b_index, iindex+1);
2529 lsra_new_stack(ls, dst->prev->prev->prev->prev->prev, b_index,
2531 lsra_new_stack(ls, dst->prev->prev->prev->prev, b_index, iindex);
2532 lsra_new_stack(ls, dst->prev->prev->prev, b_index, iindex);
2533 lsra_new_stack(ls, dst->prev->prev, b_index, iindex);
2534 lsra_new_stack(ls, dst->prev, b_index, iindex);
2535 lsra_new_stack(ls, dst, b_index, iindex);
2537 #ifdef JOIN_DUP_STACK
2538 lsra_join_dup(ls, src, dst, dst->prev->prev->prev->prev);
2539 lsra_join_dup(ls, src->prev, dst->prev,
2540 dst->prev->prev->prev->prev->prev);
2541 lsra_join_2_stack(ls, src->prev->prev, dst->prev->prev, JOIN);
2542 lsra_join_2_stack(ls, src->prev->prev->prev, dst->prev->prev->prev,
2547 /* pop 2 push 2 swap */
2549 lsra_from_stack(ls, src, b_index, iindex+1);
2550 lsra_from_stack(ls, src->prev, b_index, iindex+1);
2551 lsra_new_stack(ls, dst->prev, b_index, iindex);
2552 lsra_new_stack(ls, dst, b_index, iindex);
2554 lsra_join_2_stack(ls, src->prev, dst, JOIN);
2555 lsra_join_2_stack(ls, src, dst->prev, JOIN);
2593 lsra_from_stack(ls, src, b_index, iindex);
2594 lsra_from_stack(ls, src->prev, b_index, iindex);
2595 lsra_new_stack(ls, dst, b_index, iindex);
2596 #ifdef JOIN_DEST_STACK
2597 lsra_join_3_stack(ls, dst, src->prev, src, JOIN_OP);
2602 lsra_from_stack(ls, src, b_index, iindex);
2603 lsra_from_stack(ls, src->prev,b_index,iindex);
2604 lsra_new_stack(ls, dst, b_index, iindex);
2605 #ifdef JOIN_DEST_STACK
2606 lsra_join_2_stack(ls, src, dst, JOIN_OP);
2624 lsra_from_stack(ls, src, b_index, iindex);
2625 lsra_from_stack(ls, src->prev, b_index, iindex);
2626 lsra_new_stack(ls, dst, b_index, iindex);
2630 case ICMD_LADDCONST:
2631 case ICMD_LSUBCONST:
2632 case ICMD_LMULCONST:
2636 case ICMD_LANDCONST:
2638 case ICMD_LXORCONST:
2639 case ICMD_LSHLCONST:
2640 case ICMD_LSHRCONST:
2641 case ICMD_LUSHRCONST:
2643 case ICMD_IADDCONST:
2644 case ICMD_ISUBCONST:
2645 case ICMD_IMULCONST:
2649 case ICMD_IANDCONST:
2651 case ICMD_IXORCONST:
2652 case ICMD_ISHLCONST:
2653 case ICMD_ISHRCONST:
2654 case ICMD_IUSHRCONST:
2656 /* case ICMD_IFEQ_ICONST: */
2657 /* case ICMD_IFNE_ICONST: */
2658 /* case ICMD_IFLT_ICONST: */
2659 /* case ICMD_IFGE_ICONST: */
2660 /* case ICMD_IFGT_ICONST: */
2661 /* case ICMD_IFLE_ICONST: */
2666 case ICMD_INT2SHORT:
2684 case ICMD_CHECKCAST:
2685 lsra_from_stack(ls, src, b_index, iindex);
2686 lsra_new_stack(ls, dst, b_index, iindex);
2687 #ifdef JOIN_DEST_STACK
2688 lsra_join_2_stack(ls, src, dst, JOIN_OP);
2692 /* TODO: check if for these ICMDs JOIN_DEST_STACK works, too! */
2693 case ICMD_ARRAYLENGTH:
2694 case ICMD_INSTANCEOF:
2697 case ICMD_ANEWARRAY:
2700 lsra_from_stack(ls, src, b_index, iindex);
2701 lsra_new_stack(ls, dst, b_index, iindex);
2705 case ICMD_GETSTATIC:
2708 lsra_new_stack(ls, dst, b_index, iindex);
2711 /* pop many push any */
2713 case ICMD_INVOKESTATIC:
2714 case ICMD_INVOKESPECIAL:
2715 case ICMD_INVOKEVIRTUAL:
2716 case ICMD_INVOKEINTERFACE:
2717 INSTRUCTION_GET_METHODDESC(iptr,md);
2720 lsra_from_stack(ls, src, b_index, iindex);
2723 if (md->returntype.type != TYPE_VOID)
2724 lsra_new_stack(ls, dst, b_index, iindex);
2733 lsra_from_stack(ls, src, b_index, iindex);
2736 if (md->returntype.type != TYPE_VOID)
2737 lsra_new_stack(ls, dst, b_index, iindex);
2740 case ICMD_MULTIANEWARRAY:
2743 lsra_from_stack(ls, src, b_index, iindex);
2746 lsra_new_stack(ls, dst, b_index, iindex);
2751 new_internalerror("Unknown ICMD %d during register allocation",
2757 #endif /* defined(LV) */
2761 * These are local overrides for various environment variables in Emacs.
2762 * Please do not remove this and leave it at the end of the file, where
2763 * Emacs will automagically detect them.
2764 * ---------------------------------------------------------------------
2767 * indent-tabs-mode: t
2771 * vim:noexpandtab:sw=4:ts=4: