14 #define DEBUG_ERROR_MESSAGES 0
15 #define DEBUG_COLOR_GRAPH 0
17 #define DEBUG_CONSISTENCY 1
18 #define DEBUG_RANGE_CONFLICTS 0
19 #define DEBUG_COALESCING 0
20 #define DEBUG_SDP_BLOCKS 0
21 #define DEBUG_TRIPLE_COLOR 0
23 #warning "FIXME boundary cases with small types in larger registers"
24 #warning "FIXME give clear error messages about unused variables"
25 #warning "FIXME properly handle multi dimensional arrays"
27 /* Control flow graph of a loop without goto.
38 * |\ GGG HHH | continue;
66 * DFlocal(X) = { Y <- Succ(X) | idom(Y) != X }
67 * DFup(Z) = { Y <- DF(Z) | idom(Y) != X }
70 * [] == DFlocal(X) U DF(X)
73 * Dominator graph of the same nodes.
77 * BBB JJJ BBB: [ JJJ ] ( JJJ ) JJJ: [ ] ()
79 * CCC CCC: [ ] ( BBB, JJJ )
81 * DDD EEE DDD: [ ] ( BBB ) EEE: [ JJJ ] ()
83 * FFF FFF: [ ] ( BBB )
85 * GGG HHH GGG: [ ] ( BBB ) HHH: [ BBB ] ()
90 * BBB and JJJ are definitely the dominance frontier.
91 * Where do I place phi functions and how do I make that decision.
94 static void die(char *fmt, ...)
99 vfprintf(stderr, fmt, args);
106 #define MALLOC_STRONG_DEBUG
107 static void *xmalloc(size_t size, const char *name)
112 die("Cannot malloc %ld bytes to hold %s: %s\n",
113 size + 0UL, name, strerror(errno));
118 static void *xcmalloc(size_t size, const char *name)
121 buf = xmalloc(size, name);
122 memset(buf, 0, size);
126 static void xfree(const void *ptr)
131 static char *xstrdup(const char *str)
136 new = xmalloc(len + 1, "xstrdup string");
137 memcpy(new, str, len);
142 static void xchdir(const char *path)
144 if (chdir(path) != 0) {
145 die("chdir to %s failed: %s\n",
146 path, strerror(errno));
150 static int exists(const char *dirname, const char *filename)
154 if (access(filename, O_RDONLY) < 0) {
155 if ((errno != EACCES) && (errno != EROFS)) {
163 static char *slurp_file(const char *dirname, const char *filename, off_t *r_size)
167 off_t size, progress;
176 fd = open(filename, O_RDONLY);
178 die("Cannot open '%s' : %s\n",
179 filename, strerror(errno));
181 result = fstat(fd, &stats);
183 die("Cannot stat: %s: %s\n",
184 filename, strerror(errno));
186 size = stats.st_size;
188 buf = xmalloc(size +2, filename);
189 buf[size] = '\n'; /* Make certain the file is newline terminated */
190 buf[size+1] = '\0'; /* Null terminate the file for good measure */
192 while(progress < size) {
193 result = read(fd, buf + progress, size - progress);
195 if ((errno == EINTR) || (errno == EAGAIN))
197 die("read on %s of %ld bytes failed: %s\n",
198 filename, (size - progress)+ 0UL, strerror(errno));
204 die("Close of %s failed: %s\n",
205 filename, strerror(errno));
210 /* Long on the destination platform */
211 typedef unsigned long ulong_t;
215 struct file_state *prev;
216 const char *basename;
224 const char *report_name;
225 const char *report_dir;
230 struct hash_entry *ident;
238 /* I have two classes of types:
240 * Logical types. (The type the C standard says the operation is of)
242 * The operational types are:
257 * No memory is useable by the compiler.
258 * There is no floating point support.
259 * All operations take place in general purpose registers.
260 * There is one type of general purpose register.
261 * Unsigned longs are stored in that general purpose register.
264 /* Operations on general purpose registers.
283 #define OP_POS 16 /* Dummy positive operator don't use it */
293 #define OP_SLESSEQ 26
294 #define OP_ULESSEQ 27
295 #define OP_SMOREEQ 28
296 #define OP_UMOREEQ 29
298 #define OP_LFALSE 30 /* Test if the expression is logically false */
299 #define OP_LTRUE 31 /* Test if the expression is logcially true */
303 /* For OP_STORE ->type holds the type
304 * RHS(0) holds the destination address
305 * RHS(1) holds the value to store.
310 #define OP_MIN_CONST 50
311 #define OP_MAX_CONST 59
312 #define IS_CONST_OP(X) (((X) >= OP_MIN_CONST) && ((X) <= OP_MAX_CONST))
313 #define OP_INTCONST 50
314 /* For OP_INTCONST ->type holds the type.
315 * ->u.cval holds the constant value.
317 #define OP_BLOBCONST 51
318 /* For OP_BLOBCONST ->type holds the layout and size
319 * information. u.blob holds a pointer to the raw binary
320 * data for the constant initializer.
322 #define OP_ADDRCONST 52
323 /* For OP_ADDRCONST ->type holds the type.
324 * MISC(0) holds the reference to the static variable.
325 * ->u.cval holds an offset from that value.
329 /* OP_WRITE moves one pseudo register to another.
330 * RHS(0) holds the destination pseudo register, which must be an OP_DECL.
331 * RHS(1) holds the psuedo to move.
335 /* OP_READ reads the value of a variable and makes
336 * it available for the pseudo operation.
337 * Useful for things like def-use chains.
338 * RHS(0) holds points to the triple to read from.
341 /* OP_COPY makes a copy of the psedo register or constant in RHS(0).
344 /* OP_PIECE returns one piece of a instruction that returns a structure.
345 * MISC(0) is the instruction
346 * u.cval is the LHS piece of the instruction to return.
349 /* OP_ASM holds a sequence of assembly instructions, the result
350 * of a C asm directive.
351 * RHS(x) holds input value x to the assembly sequence.
352 * LHS(x) holds the output value x from the assembly sequence.
353 * u.blob holds the string of assembly instructions.
357 /* OP_DEREF generates an lvalue from a pointer.
358 * RHS(0) holds the pointer value.
359 * OP_DEREF serves as a place holder to indicate all necessary
360 * checks have been done to indicate a value is an lvalue.
363 /* OP_DOT references a submember of a structure lvalue.
364 * RHS(0) holds the lvalue.
365 * ->u.field holds the name of the field we want.
367 * Not seen outside of expressions.
370 /* OP_VAL returns the value of a subexpression of the current expression.
371 * Useful for operators that have side effects.
372 * RHS(0) holds the expression.
373 * MISC(0) holds the subexpression of RHS(0) that is the
374 * value of the expression.
376 * Not seen outside of expressions.
379 /* OP_LAND performs a C logical and between RHS(0) and RHS(1).
380 * Not seen outside of expressions.
383 /* OP_LOR performs a C logical or between RHS(0) and RHS(1).
384 * Not seen outside of expressions.
387 /* OP_CODE performas a C ? : operation.
388 * RHS(0) holds the test.
389 * RHS(1) holds the expression to evaluate if the test returns true.
390 * RHS(2) holds the expression to evaluate if the test returns false.
391 * Not seen outside of expressions.
394 /* OP_COMMA performacs a C comma operation.
395 * That is RHS(0) is evaluated, then RHS(1)
396 * and the value of RHS(1) is returned.
397 * Not seen outside of expressions.
401 /* OP_CALL performs a procedure call.
402 * MISC(0) holds a pointer to the OP_LIST of a function
403 * RHS(x) holds argument x of a function
405 * Currently not seen outside of expressions.
407 #define OP_VAL_VEC 74
408 /* OP_VAL_VEC is an array of triples that are either variable
409 * or values for a structure or an array.
410 * RHS(x) holds element x of the vector.
411 * triple->type->elements holds the size of the vector.
416 /* OP_LIST Holds a list of statements, and a result value.
417 * RHS(0) holds the list of statements.
418 * MISC(0) holds the value of the statements.
421 #define OP_BRANCH 81 /* branch */
422 /* For branch instructions
423 * TARG(0) holds the branch target.
424 * RHS(0) if present holds the branch condition.
425 * ->next holds where to branch to if the branch is not taken.
426 * The branch target can only be a decl...
430 /* OP_LABEL is a triple that establishes an target for branches.
431 * ->use is the list of all branches that use this label.
435 /* OP_DECL is a triple that establishes an lvalue for assignments.
436 * ->use is a list of statements that use the variable.
440 /* OP_SDECL is a triple that establishes a variable of static
442 * ->use is a list of statements that use the variable.
443 * MISC(0) holds the initializer expression.
448 /* OP_PHI is a triple used in SSA form code.
449 * It is used when multiple code paths merge and a variable needs
450 * a single assignment from any of those code paths.
451 * The operation is a cross between OP_DECL and OP_WRITE, which
452 * is what OP_PHI is geneared from.
454 * RHS(x) points to the value from code path x
455 * The number of RHS entries is the number of control paths into the block
456 * in which OP_PHI resides. The elements of the array point to point
457 * to the variables OP_PHI is derived from.
459 * MISC(0) holds a pointer to the orginal OP_DECL node.
462 /* Architecture specific instructions */
465 #define OP_SET_EQ 102
466 #define OP_SET_NOTEQ 103
467 #define OP_SET_SLESS 104
468 #define OP_SET_ULESS 105
469 #define OP_SET_SMORE 106
470 #define OP_SET_UMORE 107
471 #define OP_SET_SLESSEQ 108
472 #define OP_SET_ULESSEQ 109
473 #define OP_SET_SMOREEQ 110
474 #define OP_SET_UMOREEQ 111
477 #define OP_JMP_EQ 113
478 #define OP_JMP_NOTEQ 114
479 #define OP_JMP_SLESS 115
480 #define OP_JMP_ULESS 116
481 #define OP_JMP_SMORE 117
482 #define OP_JMP_UMORE 118
483 #define OP_JMP_SLESSEQ 119
484 #define OP_JMP_ULESSEQ 120
485 #define OP_JMP_SMOREEQ 121
486 #define OP_JMP_UMOREEQ 122
488 /* Builtin operators that it is just simpler to use the compiler for */
506 #define PURE_BITS(FLAGS) ((FLAGS) & 0x3)
508 #define BLOCK 8 /* Triple stores the current block */
509 unsigned char lhs, rhs, misc, targ;
512 #define OP(LHS, RHS, MISC, TARG, FLAGS, NAME) { \
520 static const struct op_info table_ops[] = {
521 [OP_SDIVT ] = OP( 2, 2, 0, 0, PURE | BLOCK , "sdivt"),
522 [OP_UDIVT ] = OP( 2, 2, 0, 0, PURE | BLOCK , "udivt"),
523 [OP_SMUL ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "smul"),
524 [OP_UMUL ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "umul"),
525 [OP_SDIV ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "sdiv"),
526 [OP_UDIV ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "udiv"),
527 [OP_SMOD ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "smod"),
528 [OP_UMOD ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "umod"),
529 [OP_ADD ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "add"),
530 [OP_SUB ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "sub"),
531 [OP_SL ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "sl"),
532 [OP_USR ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "usr"),
533 [OP_SSR ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "ssr"),
534 [OP_AND ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "and"),
535 [OP_XOR ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "xor"),
536 [OP_OR ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "or"),
537 [OP_POS ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK , "pos"),
538 [OP_NEG ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK , "neg"),
539 [OP_INVERT ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK , "invert"),
541 [OP_EQ ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "eq"),
542 [OP_NOTEQ ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "noteq"),
543 [OP_SLESS ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "sless"),
544 [OP_ULESS ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "uless"),
545 [OP_SMORE ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "smore"),
546 [OP_UMORE ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "umore"),
547 [OP_SLESSEQ ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "slesseq"),
548 [OP_ULESSEQ ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "ulesseq"),
549 [OP_SMOREEQ ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "smoreeq"),
550 [OP_UMOREEQ ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "umoreeq"),
551 [OP_LFALSE ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK , "lfalse"),
552 [OP_LTRUE ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK , "ltrue"),
554 [OP_LOAD ] = OP( 0, 1, 0, 0, IMPURE | DEF | BLOCK, "load"),
555 [OP_STORE ] = OP( 0, 2, 0, 0, IMPURE | BLOCK , "store"),
557 [OP_NOOP ] = OP( 0, 0, 0, 0, PURE | BLOCK, "noop"),
559 [OP_INTCONST ] = OP( 0, 0, 0, 0, PURE | DEF, "intconst"),
560 [OP_BLOBCONST ] = OP( 0, 0, 0, 0, PURE, "blobconst"),
561 [OP_ADDRCONST ] = OP( 0, 0, 1, 0, PURE | DEF, "addrconst"),
563 [OP_WRITE ] = OP( 0, 2, 0, 0, PURE | BLOCK, "write"),
564 [OP_READ ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK, "read"),
565 [OP_COPY ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK, "copy"),
566 [OP_PIECE ] = OP( 0, 0, 1, 0, PURE | DEF, "piece"),
567 [OP_ASM ] = OP(-1, -1, 0, 0, IMPURE, "asm"),
568 [OP_DEREF ] = OP( 0, 1, 0, 0, 0 | DEF | BLOCK, "deref"),
569 [OP_DOT ] = OP( 0, 1, 0, 0, 0 | DEF | BLOCK, "dot"),
571 [OP_VAL ] = OP( 0, 1, 1, 0, 0 | DEF | BLOCK, "val"),
572 [OP_LAND ] = OP( 0, 2, 0, 0, 0 | DEF | BLOCK, "land"),
573 [OP_LOR ] = OP( 0, 2, 0, 0, 0 | DEF | BLOCK, "lor"),
574 [OP_COND ] = OP( 0, 3, 0, 0, 0 | DEF | BLOCK, "cond"),
575 [OP_COMMA ] = OP( 0, 2, 0, 0, 0 | DEF | BLOCK, "comma"),
576 /* Call is special most it can stand in for anything so it depends on context */
577 [OP_CALL ] = OP(-1, -1, 1, 0, 0 | BLOCK, "call"),
578 /* The sizes of OP_CALL and OP_VAL_VEC depend upon context */
579 [OP_VAL_VEC ] = OP( 0, -1, 0, 0, 0 | BLOCK, "valvec"),
581 [OP_LIST ] = OP( 0, 1, 1, 0, 0 | DEF, "list"),
582 /* The number of targets for OP_BRANCH depends on context */
583 [OP_BRANCH ] = OP( 0, -1, 0, 1, PURE | BLOCK, "branch"),
584 [OP_LABEL ] = OP( 0, 0, 0, 0, PURE | BLOCK, "label"),
585 [OP_ADECL ] = OP( 0, 0, 0, 0, PURE | BLOCK, "adecl"),
586 [OP_SDECL ] = OP( 0, 0, 1, 0, PURE | BLOCK, "sdecl"),
587 /* The number of RHS elements of OP_PHI depend upon context */
588 [OP_PHI ] = OP( 0, -1, 1, 0, PURE | DEF | BLOCK, "phi"),
590 [OP_CMP ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK, "cmp"),
591 [OP_TEST ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK, "test"),
592 [OP_SET_EQ ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK, "set_eq"),
593 [OP_SET_NOTEQ ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK, "set_noteq"),
594 [OP_SET_SLESS ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK, "set_sless"),
595 [OP_SET_ULESS ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK, "set_uless"),
596 [OP_SET_SMORE ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK, "set_smore"),
597 [OP_SET_UMORE ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK, "set_umore"),
598 [OP_SET_SLESSEQ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK, "set_slesseq"),
599 [OP_SET_ULESSEQ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK, "set_ulesseq"),
600 [OP_SET_SMOREEQ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK, "set_smoreq"),
601 [OP_SET_UMOREEQ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK, "set_umoreq"),
602 [OP_JMP ] = OP( 0, 0, 0, 1, PURE | BLOCK, "jmp"),
603 [OP_JMP_EQ ] = OP( 0, 1, 0, 1, PURE | BLOCK, "jmp_eq"),
604 [OP_JMP_NOTEQ ] = OP( 0, 1, 0, 1, PURE | BLOCK, "jmp_noteq"),
605 [OP_JMP_SLESS ] = OP( 0, 1, 0, 1, PURE | BLOCK, "jmp_sless"),
606 [OP_JMP_ULESS ] = OP( 0, 1, 0, 1, PURE | BLOCK, "jmp_uless"),
607 [OP_JMP_SMORE ] = OP( 0, 1, 0, 1, PURE | BLOCK, "jmp_smore"),
608 [OP_JMP_UMORE ] = OP( 0, 1, 0, 1, PURE | BLOCK, "jmp_umore"),
609 [OP_JMP_SLESSEQ] = OP( 0, 1, 0, 1, PURE | BLOCK, "jmp_slesseq"),
610 [OP_JMP_ULESSEQ] = OP( 0, 1, 0, 1, PURE | BLOCK, "jmp_ulesseq"),
611 [OP_JMP_SMOREEQ] = OP( 0, 1, 0, 1, PURE | BLOCK, "jmp_smoreq"),
612 [OP_JMP_UMOREEQ] = OP( 0, 1, 0, 1, PURE | BLOCK, "jmp_umoreq"),
614 [OP_INB ] = OP( 0, 1, 0, 0, IMPURE | DEF | BLOCK, "__inb"),
615 [OP_INW ] = OP( 0, 1, 0, 0, IMPURE | DEF | BLOCK, "__inw"),
616 [OP_INL ] = OP( 0, 1, 0, 0, IMPURE | DEF | BLOCK, "__inl"),
617 [OP_OUTB ] = OP( 0, 2, 0, 0, IMPURE| BLOCK, "__outb"),
618 [OP_OUTW ] = OP( 0, 2, 0, 0, IMPURE| BLOCK, "__outw"),
619 [OP_OUTL ] = OP( 0, 2, 0, 0, IMPURE| BLOCK, "__outl"),
620 [OP_BSF ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK, "__bsf"),
621 [OP_BSR ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK, "__bsr"),
622 [OP_RDMSR ] = OP( 2, 1, 0, 0, IMPURE | BLOCK, "__rdmsr"),
623 [OP_WRMSR ] = OP( 0, 3, 0, 0, IMPURE | BLOCK, "__wrmsr"),
624 [OP_HLT ] = OP( 0, 0, 0, 0, IMPURE | BLOCK, "__hlt"),
627 #define OP_MAX (sizeof(table_ops)/sizeof(table_ops[0]))
629 static const char *tops(int index)
631 static const char unknown[] = "unknown op";
635 if (index > OP_MAX) {
638 return table_ops[index].name;
645 struct triple_set *next;
646 struct triple *member;
656 const char *filename;
657 const char *function;
660 struct occurance *parent;
663 struct triple *next, *prev;
664 struct triple_set *use;
667 unsigned char template_id;
668 unsigned short sizes;
669 #define TRIPLE_LHS(SIZES) (((SIZES) >> 0) & 0x0f)
670 #define TRIPLE_RHS(SIZES) (((SIZES) >> 4) & 0xff)
671 #define TRIPLE_MISC(SIZES) (((SIZES) >> 12) & 0x03)
672 #define TRIPLE_TARG(SIZES) (((SIZES) >> 14) & 0x03)
673 #define TRIPLE_SIZE(SIZES) \
674 (TRIPLE_LHS(SIZES) + \
675 TRIPLE_RHS(SIZES) + \
676 TRIPLE_MISC(SIZES) + \
678 #define TRIPLE_SIZES(LHS, RHS, MISC, TARG) \
679 ((((LHS) & 0x0f) << 0) | \
680 (((RHS) & 0xff) << 4) | \
681 (((MISC) & 0x03) << 12) | \
682 (((TARG) & 0x03) << 14))
683 #define TRIPLE_LHS_OFF(SIZES) (0)
684 #define TRIPLE_RHS_OFF(SIZES) (TRIPLE_LHS_OFF(SIZES) + TRIPLE_LHS(SIZES))
685 #define TRIPLE_MISC_OFF(SIZES) (TRIPLE_RHS_OFF(SIZES) + TRIPLE_RHS(SIZES))
686 #define TRIPLE_TARG_OFF(SIZES) (TRIPLE_MISC_OFF(SIZES) + TRIPLE_MISC(SIZES))
687 #define LHS(PTR,INDEX) ((PTR)->param[TRIPLE_LHS_OFF((PTR)->sizes) + (INDEX)])
688 #define RHS(PTR,INDEX) ((PTR)->param[TRIPLE_RHS_OFF((PTR)->sizes) + (INDEX)])
689 #define TARG(PTR,INDEX) ((PTR)->param[TRIPLE_TARG_OFF((PTR)->sizes) + (INDEX)])
690 #define MISC(PTR,INDEX) ((PTR)->param[TRIPLE_MISC_OFF((PTR)->sizes) + (INDEX)])
691 unsigned id; /* A scratch value and finally the register */
692 #define TRIPLE_FLAG_FLATTENED (1 << 31)
693 #define TRIPLE_FLAG_PRE_SPLIT (1 << 30)
694 #define TRIPLE_FLAG_POST_SPLIT (1 << 29)
695 struct occurance *occurance;
700 struct hash_entry *field;
701 struct asm_info *ainfo;
703 struct triple *param[2];
710 struct ins_template {
711 struct reg_info lhs[MAX_LHS + 1], rhs[MAX_RHS + 1];
715 struct ins_template tmpl;
720 struct block_set *next;
721 struct block *member;
724 struct block *work_next;
725 struct block *left, *right;
726 struct triple *first, *last;
728 struct block_set *use;
729 struct block_set *idominates;
730 struct block_set *domfrontier;
732 struct block_set *ipdominates;
733 struct block_set *ipdomfrontier;
741 struct hash_entry *ident;
748 struct hash_entry *ident;
754 struct hash_entry *next;
758 struct macro *sym_define;
759 struct symbol *sym_label;
760 struct symbol *sym_struct;
761 struct symbol *sym_ident;
764 #define HASH_TABLE_SIZE 2048
766 struct compile_state {
767 const char *label_prefix;
768 const char *ofilename;
770 struct file_state *file;
771 struct occurance *last_occurance;
772 const char *function;
773 struct token token[4];
774 struct hash_entry *hash_table[HASH_TABLE_SIZE];
775 struct hash_entry *i_continue;
776 struct hash_entry *i_break;
778 int if_depth, if_value;
780 struct file_state *macro_file;
781 struct triple *main_function;
782 struct block *first_block, *last_block;
789 /* visibility global/local */
790 /* static/auto duration */
791 /* typedef, register, inline */
793 #define STOR_MASK 0x000f
795 #define STOR_GLOBAL 0x0001
797 #define STOR_PERM 0x0002
798 /* Storage specifiers */
799 #define STOR_AUTO 0x0000
800 #define STOR_STATIC 0x0002
801 #define STOR_EXTERN 0x0003
802 #define STOR_REGISTER 0x0004
803 #define STOR_TYPEDEF 0x0008
804 #define STOR_INLINE 0x000c
807 #define QUAL_MASK 0x0070
808 #define QUAL_NONE 0x0000
809 #define QUAL_CONST 0x0010
810 #define QUAL_VOLATILE 0x0020
811 #define QUAL_RESTRICT 0x0040
814 #define TYPE_MASK 0x1f00
815 #define TYPE_INTEGER(TYPE) (((TYPE) >= TYPE_CHAR) && ((TYPE) <= TYPE_ULLONG))
816 #define TYPE_ARITHMETIC(TYPE) (((TYPE) >= TYPE_CHAR) && ((TYPE) <= TYPE_LDOUBLE))
817 #define TYPE_UNSIGNED(TYPE) ((TYPE) & 0x0100)
818 #define TYPE_SIGNED(TYPE) (!TYPE_UNSIGNED(TYPE))
819 #define TYPE_MKUNSIGNED(TYPE) ((TYPE) | 0x0100)
820 #define TYPE_RANK(TYPE) ((TYPE) & ~0x0100)
821 #define TYPE_PTR(TYPE) (((TYPE) & TYPE_MASK) == TYPE_POINTER)
822 #define TYPE_DEFAULT 0x0000
823 #define TYPE_VOID 0x0100
824 #define TYPE_CHAR 0x0200
825 #define TYPE_UCHAR 0x0300
826 #define TYPE_SHORT 0x0400
827 #define TYPE_USHORT 0x0500
828 #define TYPE_INT 0x0600
829 #define TYPE_UINT 0x0700
830 #define TYPE_LONG 0x0800
831 #define TYPE_ULONG 0x0900
832 #define TYPE_LLONG 0x0a00 /* long long */
833 #define TYPE_ULLONG 0x0b00
834 #define TYPE_FLOAT 0x0c00
835 #define TYPE_DOUBLE 0x0d00
836 #define TYPE_LDOUBLE 0x0e00 /* long double */
837 #define TYPE_STRUCT 0x1000
838 #define TYPE_ENUM 0x1100
839 #define TYPE_POINTER 0x1200
841 * type->left holds the type pointed to.
843 #define TYPE_FUNCTION 0x1300
844 /* For TYPE_FUNCTION:
845 * type->left holds the return type.
846 * type->right holds the...
848 #define TYPE_PRODUCT 0x1400
849 /* TYPE_PRODUCT is a basic building block when defining structures
850 * type->left holds the type that appears first in memory.
851 * type->right holds the type that appears next in memory.
853 #define TYPE_OVERLAP 0x1500
854 /* TYPE_OVERLAP is a basic building block when defining unions
855 * type->left and type->right holds to types that overlap
856 * each other in memory.
858 #define TYPE_ARRAY 0x1600
859 /* TYPE_ARRAY is a basic building block when definitng arrays.
860 * type->left holds the type we are an array of.
861 * type-> holds the number of elements.
864 #define ELEMENT_COUNT_UNSPECIFIED (~0UL)
868 struct type *left, *right;
870 struct hash_entry *field_ident;
871 struct hash_entry *type_ident;
874 #define MAX_REGISTERS 75
875 #define MAX_REG_EQUIVS 16
876 #define REGISTER_BITS 16
877 #define MAX_VIRT_REGISTERS (1<<REGISTER_BITS)
878 #define TEMPLATE_BITS 7
879 #define MAX_TEMPLATES (1<<TEMPLATE_BITS)
882 #define REG_UNNEEDED 1
883 #define REG_VIRT0 (MAX_REGISTERS + 0)
884 #define REG_VIRT1 (MAX_REGISTERS + 1)
885 #define REG_VIRT2 (MAX_REGISTERS + 2)
886 #define REG_VIRT3 (MAX_REGISTERS + 3)
887 #define REG_VIRT4 (MAX_REGISTERS + 4)
888 #define REG_VIRT5 (MAX_REGISTERS + 5)
889 #define REG_VIRT6 (MAX_REGISTERS + 5)
890 #define REG_VIRT7 (MAX_REGISTERS + 5)
891 #define REG_VIRT8 (MAX_REGISTERS + 5)
892 #define REG_VIRT9 (MAX_REGISTERS + 5)
894 /* Provision for 8 register classes */
896 #define REGC_SHIFT REGISTER_BITS
897 #define REGC_MASK (((1 << MAX_REGC) - 1) << REGISTER_BITS)
898 #define REG_MASK (MAX_VIRT_REGISTERS -1)
899 #define ID_REG(ID) ((ID) & REG_MASK)
900 #define SET_REG(ID, REG) ((ID) = (((ID) & ~REG_MASK) | ((REG) & REG_MASK)))
901 #define ID_REGCM(ID) (((ID) & REGC_MASK) >> REGC_SHIFT)
902 #define SET_REGCM(ID, REGCM) ((ID) = (((ID) & ~REGC_MASK) | (((REGCM) << REGC_SHIFT) & REGC_MASK)))
903 #define SET_INFO(ID, INFO) ((ID) = (((ID) & ~(REG_MASK | REGC_MASK)) | \
904 (((INFO).reg) & REG_MASK) | ((((INFO).regcm) << REGC_SHIFT) & REGC_MASK)))
906 static unsigned arch_reg_regcm(struct compile_state *state, int reg);
907 static unsigned arch_regcm_normalize(struct compile_state *state, unsigned regcm);
908 static unsigned arch_regcm_reg_normalize(struct compile_state *state, unsigned regcm);
909 static void arch_reg_equivs(
910 struct compile_state *state, unsigned *equiv, int reg);
911 static int arch_select_free_register(
912 struct compile_state *state, char *used, int classes);
913 static unsigned arch_regc_size(struct compile_state *state, int class);
914 static int arch_regcm_intersect(unsigned regcm1, unsigned regcm2);
915 static unsigned arch_type_to_regcm(struct compile_state *state, struct type *type);
916 static const char *arch_reg_str(int reg);
917 static struct reg_info arch_reg_constraint(
918 struct compile_state *state, struct type *type, const char *constraint);
919 static struct reg_info arch_reg_clobber(
920 struct compile_state *state, const char *clobber);
921 static struct reg_info arch_reg_lhs(struct compile_state *state,
922 struct triple *ins, int index);
923 static struct reg_info arch_reg_rhs(struct compile_state *state,
924 struct triple *ins, int index);
925 static struct triple *transform_to_arch_instruction(
926 struct compile_state *state, struct triple *ins);
930 #define DEBUG_ABORT_ON_ERROR 0x0001
931 #define DEBUG_INTERMEDIATE_CODE 0x0002
932 #define DEBUG_CONTROL_FLOW 0x0004
933 #define DEBUG_BASIC_BLOCKS 0x0008
934 #define DEBUG_FDOMINATORS 0x0010
935 #define DEBUG_RDOMINATORS 0x0020
936 #define DEBUG_TRIPLES 0x0040
937 #define DEBUG_INTERFERENCE 0x0080
938 #define DEBUG_ARCH_CODE 0x0100
939 #define DEBUG_CODE_ELIMINATION 0x0200
940 #define DEBUG_INSERTED_COPIES 0x0400
942 #define GLOBAL_SCOPE_DEPTH 1
943 #define FUNCTION_SCOPE_DEPTH (GLOBAL_SCOPE_DEPTH + 1)
945 static void compile_file(struct compile_state *old_state, const char *filename, int local);
947 static void do_cleanup(struct compile_state *state)
950 fclose(state->output);
951 unlink(state->ofilename);
955 static int get_col(struct file_state *file)
959 ptr = file->line_start;
961 for(col = 0; ptr < end; ptr++) {
966 col = (col & ~7) + 8;
972 static void loc(FILE *fp, struct compile_state *state, struct triple *triple)
975 if (triple && triple->occurance) {
976 struct occurance *spot;
977 spot = triple->occurance;
978 while(spot->parent) {
981 fprintf(fp, "%s:%d.%d: ",
982 spot->filename, spot->line, spot->col);
988 col = get_col(state->file);
989 fprintf(fp, "%s:%d.%d: ",
990 state->file->report_name, state->file->report_line, col);
993 static void __internal_error(struct compile_state *state, struct triple *ptr,
998 loc(stderr, state, ptr);
1000 fprintf(stderr, "%p %s ", ptr, tops(ptr->op));
1002 fprintf(stderr, "Internal compiler error: ");
1003 vfprintf(stderr, fmt, args);
1004 fprintf(stderr, "\n");
1011 static void __internal_warning(struct compile_state *state, struct triple *ptr,
1015 va_start(args, fmt);
1016 loc(stderr, state, ptr);
1018 fprintf(stderr, "%p %s ", ptr, tops(ptr->op));
1020 fprintf(stderr, "Internal compiler warning: ");
1021 vfprintf(stderr, fmt, args);
1022 fprintf(stderr, "\n");
1028 static void __error(struct compile_state *state, struct triple *ptr,
1032 va_start(args, fmt);
1033 loc(stderr, state, ptr);
1034 vfprintf(stderr, fmt, args);
1036 fprintf(stderr, "\n");
1038 if (state->debug & DEBUG_ABORT_ON_ERROR) {
1044 static void __warning(struct compile_state *state, struct triple *ptr,
1048 va_start(args, fmt);
1049 loc(stderr, state, ptr);
1050 fprintf(stderr, "warning: ");
1051 vfprintf(stderr, fmt, args);
1052 fprintf(stderr, "\n");
1056 #if DEBUG_ERROR_MESSAGES
1057 # define internal_error fprintf(stderr, "@ %s.%s:%d \t", __FILE__, __func__, __LINE__),__internal_error
1058 # define internal_warning fprintf(stderr, "@ %s.%s:%d \t", __FILE__, __func__, __LINE__),__internal_warning
1059 # define error fprintf(stderr, "@ %s.%s:%d \t", __FILE__, __func__, __LINE__),__error
1060 # define warning fprintf(stderr, "@ %s.%s:%d \t", __FILE__, __func__, __LINE__),__warning
1062 # define internal_error __internal_error
1063 # define internal_warning __internal_warning
1064 # define error __error
1065 # define warning __warning
1067 #define FINISHME() warning(state, 0, "FINISHME @ %s.%s:%d", __FILE__, __func__, __LINE__)
1069 static void valid_op(struct compile_state *state, int op)
1071 char *fmt = "invalid op: %d";
1073 internal_error(state, 0, fmt, op);
1076 internal_error(state, 0, fmt, op);
1080 static void valid_ins(struct compile_state *state, struct triple *ptr)
1082 valid_op(state, ptr->op);
1085 static void process_trigraphs(struct compile_state *state)
1087 char *src, *dest, *end;
1088 struct file_state *file;
1090 src = dest = file->buf;
1091 end = file->buf + file->size;
1092 while((end - src) >= 3) {
1093 if ((src[0] == '?') && (src[1] == '?')) {
1096 case '=': c = '#'; break;
1097 case '/': c = '\\'; break;
1098 case '\'': c = '^'; break;
1099 case '(': c = '['; break;
1100 case ')': c = ']'; break;
1101 case '!': c = '!'; break;
1102 case '<': c = '{'; break;
1103 case '>': c = '}'; break;
1104 case '-': c = '~'; break;
1121 file->size = dest - file->buf;
1124 static void splice_lines(struct compile_state *state)
1126 char *src, *dest, *end;
1127 struct file_state *file;
1129 src = dest = file->buf;
1130 end = file->buf + file->size;
1131 while((end - src) >= 2) {
1132 if ((src[0] == '\\') && (src[1] == '\n')) {
1142 file->size = dest - file->buf;
1145 static struct type void_type;
1146 static void use_triple(struct triple *used, struct triple *user)
1148 struct triple_set **ptr, *new;
1155 if ((*ptr)->member == user) {
1158 ptr = &(*ptr)->next;
1160 /* Append new to the head of the list,
1161 * copy_func and rename_block_variables
1164 new = xcmalloc(sizeof(*new), "triple_set");
1166 new->next = used->use;
1170 static void unuse_triple(struct triple *used, struct triple *unuser)
1172 struct triple_set *use, **ptr;
1179 if (use->member == unuser) {
1189 static void put_occurance(struct occurance *occurance)
1191 occurance->count -= 1;
1192 if (occurance->count <= 0) {
1193 if (occurance->parent) {
1194 put_occurance(occurance->parent);
1200 static void get_occurance(struct occurance *occurance)
1202 occurance->count += 1;
1206 static struct occurance *new_occurance(struct compile_state *state)
1208 struct occurance *result, *last;
1209 const char *filename;
1210 const char *function;
1218 filename = state->file->report_name;
1219 line = state->file->report_line;
1220 col = get_col(state->file);
1222 if (state->function) {
1223 function = state->function;
1225 last = state->last_occurance;
1227 (last->col == col) &&
1228 (last->line == line) &&
1229 (last->function == function) &&
1230 (strcmp(last->filename, filename) == 0)) {
1231 get_occurance(last);
1235 state->last_occurance = 0;
1236 put_occurance(last);
1238 result = xmalloc(sizeof(*result), "occurance");
1240 result->filename = filename;
1241 result->function = function;
1242 result->line = line;
1245 state->last_occurance = result;
1249 static struct occurance *inline_occurance(struct compile_state *state,
1250 struct occurance *new, struct occurance *orig)
1252 struct occurance *result, *last;
1253 last = state->last_occurance;
1255 (last->parent == orig) &&
1256 (last->col == new->col) &&
1257 (last->line == new->line) &&
1258 (last->function == new->function) &&
1259 (last->filename == new->filename)) {
1260 get_occurance(last);
1264 state->last_occurance = 0;
1265 put_occurance(last);
1267 get_occurance(orig);
1268 result = xmalloc(sizeof(*result), "occurance");
1270 result->filename = new->filename;
1271 result->function = new->function;
1272 result->line = new->line;
1273 result->col = new->col;
1274 result->parent = orig;
1275 state->last_occurance = result;
1280 static struct occurance dummy_occurance = {
1282 .filename = __FILE__,
1289 /* The zero triple is used as a place holder when we are removing pointers
1290 * from a triple. Having allows certain sanity checks to pass even
1291 * when the original triple that was pointed to is gone.
1293 static struct triple zero_triple = {
1294 .next = &zero_triple,
1295 .prev = &zero_triple,
1298 .sizes = TRIPLE_SIZES(0, 0, 0, 0),
1299 .id = -1, /* An invalid id */
1300 .u = { .cval = 0, },
1301 .occurance = &dummy_occurance,
1302 .param = { [0] = 0, [1] = 0, },
1306 static unsigned short triple_sizes(struct compile_state *state,
1307 int op, struct type *type, int lhs_wanted, int rhs_wanted,
1308 struct occurance *occurance)
1310 int lhs, rhs, misc, targ;
1311 struct triple dummy;
1313 dummy.occurance = occurance;
1314 valid_op(state, op);
1315 lhs = table_ops[op].lhs;
1316 rhs = table_ops[op].rhs;
1317 misc = table_ops[op].misc;
1318 targ = table_ops[op].targ;
1321 if (op == OP_CALL) {
1324 param = type->right;
1325 while((param->type & TYPE_MASK) == TYPE_PRODUCT) {
1327 param = param->right;
1329 if ((param->type & TYPE_MASK) != TYPE_VOID) {
1333 if ((type->left->type & TYPE_MASK) == TYPE_STRUCT) {
1334 lhs = type->left->elements;
1337 else if (op == OP_VAL_VEC) {
1338 rhs = type->elements;
1340 else if ((op == OP_BRANCH) || (op == OP_PHI)) {
1343 else if (op == OP_ASM) {
1347 if ((rhs < 0) || (rhs > MAX_RHS)) {
1348 internal_error(state, &dummy, "bad rhs %d", rhs);
1350 if ((lhs < 0) || (lhs > MAX_LHS)) {
1351 internal_error(state, &dummy, "bad lhs");
1353 if ((misc < 0) || (misc > MAX_MISC)) {
1354 internal_error(state, &dummy, "bad misc");
1356 if ((targ < 0) || (targ > MAX_TARG)) {
1357 internal_error(state, &dummy, "bad targs");
1359 return TRIPLE_SIZES(lhs, rhs, misc, targ);
1362 static struct triple *alloc_triple(struct compile_state *state,
1363 int op, struct type *type, int lhs, int rhs,
1364 struct occurance *occurance)
1366 size_t size, sizes, extra_count, min_count;
1368 sizes = triple_sizes(state, op, type, lhs, rhs, occurance);
1370 min_count = sizeof(ret->param)/sizeof(ret->param[0]);
1371 extra_count = TRIPLE_SIZE(sizes);
1372 extra_count = (extra_count < min_count)? 0 : extra_count - min_count;
1374 size = sizeof(*ret) + sizeof(ret->param[0]) * extra_count;
1375 ret = xcmalloc(size, "tripple");
1381 ret->occurance = occurance;
1385 struct triple *dup_triple(struct compile_state *state, struct triple *src)
1388 int src_lhs, src_rhs, src_size;
1389 src_lhs = TRIPLE_LHS(src->sizes);
1390 src_rhs = TRIPLE_RHS(src->sizes);
1391 src_size = TRIPLE_SIZE(src->sizes);
1392 get_occurance(src->occurance);
1393 dup = alloc_triple(state, src->op, src->type, src_lhs, src_rhs,
1395 memcpy(dup, src, sizeof(*src));
1396 memcpy(dup->param, src->param, src_size * sizeof(src->param[0]));
1400 static struct triple *new_triple(struct compile_state *state,
1401 int op, struct type *type, int lhs, int rhs)
1404 struct occurance *occurance;
1405 occurance = new_occurance(state);
1406 ret = alloc_triple(state, op, type, lhs, rhs, occurance);
1410 static struct triple *build_triple(struct compile_state *state,
1411 int op, struct type *type, struct triple *left, struct triple *right,
1412 struct occurance *occurance)
1416 ret = alloc_triple(state, op, type, -1, -1, occurance);
1417 count = TRIPLE_SIZE(ret->sizes);
1419 ret->param[0] = left;
1422 ret->param[1] = right;
1427 static struct triple *triple(struct compile_state *state,
1428 int op, struct type *type, struct triple *left, struct triple *right)
1432 ret = new_triple(state, op, type, -1, -1);
1433 count = TRIPLE_SIZE(ret->sizes);
1435 ret->param[0] = left;
1438 ret->param[1] = right;
1443 static struct triple *branch(struct compile_state *state,
1444 struct triple *targ, struct triple *test)
1447 ret = new_triple(state, OP_BRANCH, &void_type, -1, test?1:0);
1451 TARG(ret, 0) = targ;
1452 /* record the branch target was used */
1453 if (!targ || (targ->op != OP_LABEL)) {
1454 internal_error(state, 0, "branch not to label");
1455 use_triple(targ, ret);
1461 static void insert_triple(struct compile_state *state,
1462 struct triple *first, struct triple *ptr)
1465 if ((ptr->id & TRIPLE_FLAG_FLATTENED) || (ptr->next != ptr)) {
1466 internal_error(state, ptr, "expression already used");
1469 ptr->prev = first->prev;
1470 ptr->prev->next = ptr;
1471 ptr->next->prev = ptr;
1472 if ((ptr->prev->op == OP_BRANCH) &&
1473 TRIPLE_RHS(ptr->prev->sizes)) {
1474 unuse_triple(first, ptr->prev);
1475 use_triple(ptr, ptr->prev);
1480 static int triple_stores_block(struct compile_state *state, struct triple *ins)
1482 /* This function is used to determine if u.block
1483 * is utilized to store the current block number.
1486 valid_ins(state, ins);
1487 stores_block = (table_ops[ins->op].flags & BLOCK) == BLOCK;
1488 return stores_block;
1491 static struct block *block_of_triple(struct compile_state *state,
1494 struct triple *first;
1495 first = RHS(state->main_function, 0);
1496 while(ins != first && !triple_stores_block(state, ins)) {
1497 if (ins == ins->prev) {
1498 internal_error(state, 0, "ins == ins->prev?");
1502 if (!triple_stores_block(state, ins)) {
1503 internal_error(state, ins, "Cannot find block");
1505 return ins->u.block;
1508 static struct triple *pre_triple(struct compile_state *state,
1509 struct triple *base,
1510 int op, struct type *type, struct triple *left, struct triple *right)
1512 struct block *block;
1514 /* If I am an OP_PIECE jump to the real instruction */
1515 if (base->op == OP_PIECE) {
1516 base = MISC(base, 0);
1518 block = block_of_triple(state, base);
1519 get_occurance(base->occurance);
1520 ret = build_triple(state, op, type, left, right, base->occurance);
1521 if (triple_stores_block(state, ret)) {
1522 ret->u.block = block;
1524 insert_triple(state, base, ret);
1525 if (block->first == base) {
1531 static struct triple *post_triple(struct compile_state *state,
1532 struct triple *base,
1533 int op, struct type *type, struct triple *left, struct triple *right)
1535 struct block *block;
1538 /* If I am an OP_PIECE jump to the real instruction */
1539 if (base->op == OP_PIECE) {
1540 base = MISC(base, 0);
1542 /* If I have a left hand side skip over it */
1543 zlhs = TRIPLE_LHS(base->sizes);
1545 base = LHS(base, zlhs - 1);
1548 block = block_of_triple(state, base);
1549 get_occurance(base->occurance);
1550 ret = build_triple(state, op, type, left, right, base->occurance);
1551 if (triple_stores_block(state, ret)) {
1552 ret->u.block = block;
1554 insert_triple(state, base->next, ret);
1555 if (block->last == base) {
1561 static struct triple *label(struct compile_state *state)
1563 /* Labels don't get a type */
1564 struct triple *result;
1565 result = triple(state, OP_LABEL, &void_type, 0, 0);
1569 static void display_triple(FILE *fp, struct triple *ins)
1571 struct occurance *ptr;
1575 if (ins->id & TRIPLE_FLAG_PRE_SPLIT) {
1578 if (ins->id & TRIPLE_FLAG_POST_SPLIT) {
1581 reg = arch_reg_str(ID_REG(ins->id));
1582 if (ins->op == OP_INTCONST) {
1583 fprintf(fp, "(%p) %c%c %-7s %-2d %-10s <0x%08lx> ",
1584 ins, pre, post, reg, ins->template_id, tops(ins->op),
1587 else if (ins->op == OP_ADDRCONST) {
1588 fprintf(fp, "(%p) %c%c %-7s %-2d %-10s %-10p <0x%08lx>",
1589 ins, pre, post, reg, ins->template_id, tops(ins->op),
1590 MISC(ins, 0), ins->u.cval);
1594 fprintf(fp, "(%p) %c%c %-7s %-2d %-10s",
1595 ins, pre, post, reg, ins->template_id, tops(ins->op));
1596 count = TRIPLE_SIZE(ins->sizes);
1597 for(i = 0; i < count; i++) {
1598 fprintf(fp, " %-10p", ins->param[i]);
1605 for(ptr = ins->occurance; ptr; ptr = ptr->parent) {
1606 fprintf(fp, " %s,%s:%d.%d",
1615 struct triple_set *user;
1616 for(user = ptr->use; user; user = user->next) {
1617 fprintf(fp, "use: %p\n", user->member);
1624 static int triple_is_pure(struct compile_state *state, struct triple *ins)
1626 /* Does the triple have no side effects.
1627 * I.e. Rexecuting the triple with the same arguments
1628 * gives the same value.
1631 valid_ins(state, ins);
1632 pure = PURE_BITS(table_ops[ins->op].flags);
1633 if ((pure != PURE) && (pure != IMPURE)) {
1634 internal_error(state, 0, "Purity of %s not known\n",
1637 return pure == PURE;
1640 static int triple_is_branch(struct compile_state *state, struct triple *ins)
1642 /* This function is used to determine which triples need
1646 valid_ins(state, ins);
1647 is_branch = (table_ops[ins->op].targ != 0);
1651 static int triple_is_cond_branch(struct compile_state *state, struct triple *ins)
1653 /* A conditional branch has the condition argument as a single
1656 return triple_is_branch(state, ins) &&
1657 (TRIPLE_RHS(ins->sizes) == 1);
1660 static int triple_is_uncond_branch(struct compile_state *state, struct triple *ins)
1662 /* A unconditional branch has no RHS parameters.
1664 return triple_is_branch(state, ins) &&
1665 (TRIPLE_RHS(ins->sizes) == 0);
1668 static int triple_is_def(struct compile_state *state, struct triple *ins)
1670 /* This function is used to determine which triples need
1674 valid_ins(state, ins);
1675 is_def = (table_ops[ins->op].flags & DEF) == DEF;
1679 static struct triple **triple_iter(struct compile_state *state,
1680 size_t count, struct triple **vector,
1681 struct triple *ins, struct triple **last)
1683 struct triple **ret;
1689 else if ((last >= vector) && (last < (vector + count - 1))) {
1697 static struct triple **triple_lhs(struct compile_state *state,
1698 struct triple *ins, struct triple **last)
1700 return triple_iter(state, TRIPLE_LHS(ins->sizes), &LHS(ins,0),
1704 static struct triple **triple_rhs(struct compile_state *state,
1705 struct triple *ins, struct triple **last)
1707 return triple_iter(state, TRIPLE_RHS(ins->sizes), &RHS(ins,0),
1711 static struct triple **triple_misc(struct compile_state *state,
1712 struct triple *ins, struct triple **last)
1714 return triple_iter(state, TRIPLE_MISC(ins->sizes), &MISC(ins,0),
1718 static struct triple **triple_targ(struct compile_state *state,
1719 struct triple *ins, struct triple **last)
1722 struct triple **ret, **vector;
1724 count = TRIPLE_TARG(ins->sizes);
1725 vector = &TARG(ins, 0);
1730 else if ((last >= vector) && (last < (vector + count - 1))) {
1733 else if ((last == (vector + count - 1)) &&
1734 TRIPLE_RHS(ins->sizes)) {
1742 static void verify_use(struct compile_state *state,
1743 struct triple *user, struct triple *used)
1746 size = TRIPLE_SIZE(user->sizes);
1747 for(i = 0; i < size; i++) {
1748 if (user->param[i] == used) {
1752 if (triple_is_branch(state, user)) {
1753 if (user->next == used) {
1758 internal_error(state, user, "%s(%p) does not use %s(%p)",
1759 tops(user->op), user, tops(used->op), used);
1763 static int find_rhs_use(struct compile_state *state,
1764 struct triple *user, struct triple *used)
1766 struct triple **param;
1768 verify_use(state, user, used);
1769 size = TRIPLE_RHS(user->sizes);
1770 param = &RHS(user, 0);
1771 for(i = 0; i < size; i++) {
1772 if (param[i] == used) {
1779 static void free_triple(struct compile_state *state, struct triple *ptr)
1782 size = sizeof(*ptr) - sizeof(ptr->param) +
1783 (sizeof(ptr->param[0])*TRIPLE_SIZE(ptr->sizes));
1784 ptr->prev->next = ptr->next;
1785 ptr->next->prev = ptr->prev;
1787 internal_error(state, ptr, "ptr->use != 0");
1789 put_occurance(ptr->occurance);
1790 memset(ptr, -1, size);
1794 static void release_triple(struct compile_state *state, struct triple *ptr)
1796 struct triple_set *set, *next;
1797 struct triple **expr;
1798 struct block *block;
1799 /* Make certain the we are not the first or last element of a block */
1800 block = block_of_triple(state, ptr);
1801 if (block && (block->last == ptr)) {
1802 block->last = ptr->prev;
1804 if (block && (block->first == ptr)) {
1805 block->first = ptr->next;
1807 /* Remove ptr from use chains where it is the user */
1808 expr = triple_rhs(state, ptr, 0);
1809 for(; expr; expr = triple_rhs(state, ptr, expr)) {
1811 unuse_triple(*expr, ptr);
1814 expr = triple_lhs(state, ptr, 0);
1815 for(; expr; expr = triple_lhs(state, ptr, expr)) {
1817 unuse_triple(*expr, ptr);
1820 expr = triple_misc(state, ptr, 0);
1821 for(; expr; expr = triple_misc(state, ptr, expr)) {
1823 unuse_triple(*expr, ptr);
1826 expr = triple_targ(state, ptr, 0);
1827 for(; expr; expr = triple_targ(state, ptr, expr)) {
1829 unuse_triple(*expr, ptr);
1832 /* Reomve ptr from use chains where it is used */
1833 for(set = ptr->use; set; set = next) {
1835 expr = triple_rhs(state, set->member, 0);
1836 for(; expr; expr = triple_rhs(state, set->member, expr)) {
1838 *expr = &zero_triple;
1841 expr = triple_lhs(state, set->member, 0);
1842 for(; expr; expr = triple_lhs(state, set->member, expr)) {
1844 *expr = &zero_triple;
1847 expr = triple_misc(state, set->member, 0);
1848 for(; expr; expr = triple_misc(state, set->member, expr)) {
1850 *expr = &zero_triple;
1853 expr = triple_targ(state, set->member, 0);
1854 for(; expr; expr = triple_targ(state, set->member, expr)) {
1856 *expr = &zero_triple;
1859 unuse_triple(ptr, set->member);
1861 free_triple(state, ptr);
1864 static void print_triple(struct compile_state *state, struct triple *ptr);
1866 #define TOK_UNKNOWN 0
1869 #define TOK_LBRACE 3
1870 #define TOK_RBRACE 4
1874 #define TOK_LBRACKET 8
1875 #define TOK_RBRACKET 9
1876 #define TOK_LPAREN 10
1877 #define TOK_RPAREN 11
1882 #define TOK_TIMESEQ 16
1883 #define TOK_DIVEQ 17
1884 #define TOK_MODEQ 18
1885 #define TOK_PLUSEQ 19
1886 #define TOK_MINUSEQ 20
1889 #define TOK_ANDEQ 23
1890 #define TOK_XOREQ 24
1893 #define TOK_NOTEQ 27
1894 #define TOK_QUEST 28
1895 #define TOK_LOGOR 29
1896 #define TOK_LOGAND 30
1900 #define TOK_LESSEQ 34
1901 #define TOK_MOREEQ 35
1905 #define TOK_MINUS 39
1908 #define TOK_PLUSPLUS 42
1909 #define TOK_MINUSMINUS 43
1911 #define TOK_ARROW 45
1913 #define TOK_TILDE 47
1914 #define TOK_LIT_STRING 48
1915 #define TOK_LIT_CHAR 49
1916 #define TOK_LIT_INT 50
1917 #define TOK_LIT_FLOAT 51
1918 #define TOK_MACRO 52
1919 #define TOK_CONCATENATE 53
1921 #define TOK_IDENT 54
1922 #define TOK_STRUCT_NAME 55
1923 #define TOK_ENUM_CONST 56
1924 #define TOK_TYPE_NAME 57
1927 #define TOK_BREAK 59
1930 #define TOK_CONST 62
1931 #define TOK_CONTINUE 63
1932 #define TOK_DEFAULT 64
1934 #define TOK_DOUBLE 66
1937 #define TOK_EXTERN 69
1938 #define TOK_FLOAT 70
1942 #define TOK_INLINE 74
1945 #define TOK_REGISTER 77
1946 #define TOK_RESTRICT 78
1947 #define TOK_RETURN 79
1948 #define TOK_SHORT 80
1949 #define TOK_SIGNED 81
1950 #define TOK_SIZEOF 82
1951 #define TOK_STATIC 83
1952 #define TOK_STRUCT 84
1953 #define TOK_SWITCH 85
1954 #define TOK_TYPEDEF 86
1955 #define TOK_UNION 87
1956 #define TOK_UNSIGNED 88
1958 #define TOK_VOLATILE 90
1959 #define TOK_WHILE 91
1961 #define TOK_ATTRIBUTE 93
1962 #define TOK_ALIGNOF 94
1963 #define TOK_FIRST_KEYWORD TOK_AUTO
1964 #define TOK_LAST_KEYWORD TOK_ALIGNOF
1966 #define TOK_DEFINE 100
1967 #define TOK_UNDEF 101
1968 #define TOK_INCLUDE 102
1969 #define TOK_LINE 103
1970 #define TOK_ERROR 104
1971 #define TOK_WARNING 105
1972 #define TOK_PRAGMA 106
1973 #define TOK_IFDEF 107
1974 #define TOK_IFNDEF 108
1975 #define TOK_ELIF 109
1976 #define TOK_ENDIF 110
1978 #define TOK_FIRST_MACRO TOK_DEFINE
1979 #define TOK_LAST_MACRO TOK_ENDIF
1983 static const char *tokens[] = {
1984 [TOK_UNKNOWN ] = "unknown",
1985 [TOK_SPACE ] = ":space:",
1987 [TOK_LBRACE ] = "{",
1988 [TOK_RBRACE ] = "}",
1992 [TOK_LBRACKET ] = "[",
1993 [TOK_RBRACKET ] = "]",
1994 [TOK_LPAREN ] = "(",
1995 [TOK_RPAREN ] = ")",
1997 [TOK_DOTS ] = "...",
2000 [TOK_TIMESEQ ] = "*=",
2001 [TOK_DIVEQ ] = "/=",
2002 [TOK_MODEQ ] = "%=",
2003 [TOK_PLUSEQ ] = "+=",
2004 [TOK_MINUSEQ ] = "-=",
2005 [TOK_SLEQ ] = "<<=",
2006 [TOK_SREQ ] = ">>=",
2007 [TOK_ANDEQ ] = "&=",
2008 [TOK_XOREQ ] = "^=",
2011 [TOK_NOTEQ ] = "!=",
2013 [TOK_LOGOR ] = "||",
2014 [TOK_LOGAND ] = "&&",
2018 [TOK_LESSEQ ] = "<=",
2019 [TOK_MOREEQ ] = ">=",
2026 [TOK_PLUSPLUS ] = "++",
2027 [TOK_MINUSMINUS ] = "--",
2029 [TOK_ARROW ] = "->",
2032 [TOK_LIT_STRING ] = ":string:",
2033 [TOK_IDENT ] = ":ident:",
2034 [TOK_TYPE_NAME ] = ":typename:",
2035 [TOK_LIT_CHAR ] = ":char:",
2036 [TOK_LIT_INT ] = ":integer:",
2037 [TOK_LIT_FLOAT ] = ":float:",
2039 [TOK_CONCATENATE ] = "##",
2041 [TOK_AUTO ] = "auto",
2042 [TOK_BREAK ] = "break",
2043 [TOK_CASE ] = "case",
2044 [TOK_CHAR ] = "char",
2045 [TOK_CONST ] = "const",
2046 [TOK_CONTINUE ] = "continue",
2047 [TOK_DEFAULT ] = "default",
2049 [TOK_DOUBLE ] = "double",
2050 [TOK_ELSE ] = "else",
2051 [TOK_ENUM ] = "enum",
2052 [TOK_EXTERN ] = "extern",
2053 [TOK_FLOAT ] = "float",
2055 [TOK_GOTO ] = "goto",
2057 [TOK_INLINE ] = "inline",
2059 [TOK_LONG ] = "long",
2060 [TOK_REGISTER ] = "register",
2061 [TOK_RESTRICT ] = "restrict",
2062 [TOK_RETURN ] = "return",
2063 [TOK_SHORT ] = "short",
2064 [TOK_SIGNED ] = "signed",
2065 [TOK_SIZEOF ] = "sizeof",
2066 [TOK_STATIC ] = "static",
2067 [TOK_STRUCT ] = "struct",
2068 [TOK_SWITCH ] = "switch",
2069 [TOK_TYPEDEF ] = "typedef",
2070 [TOK_UNION ] = "union",
2071 [TOK_UNSIGNED ] = "unsigned",
2072 [TOK_VOID ] = "void",
2073 [TOK_VOLATILE ] = "volatile",
2074 [TOK_WHILE ] = "while",
2076 [TOK_ATTRIBUTE ] = "__attribute__",
2077 [TOK_ALIGNOF ] = "__alignof__",
2079 [TOK_DEFINE ] = "define",
2080 [TOK_UNDEF ] = "undef",
2081 [TOK_INCLUDE ] = "include",
2082 [TOK_LINE ] = "line",
2083 [TOK_ERROR ] = "error",
2084 [TOK_WARNING ] = "warning",
2085 [TOK_PRAGMA ] = "pragma",
2086 [TOK_IFDEF ] = "ifdef",
2087 [TOK_IFNDEF ] = "ifndef",
2088 [TOK_ELIF ] = "elif",
2089 [TOK_ENDIF ] = "endif",
2094 static unsigned int hash(const char *str, int str_len)
2098 end = str + str_len;
2100 for(; str < end; str++) {
2101 hash = (hash *263) + *str;
2103 hash = hash & (HASH_TABLE_SIZE -1);
2107 static struct hash_entry *lookup(
2108 struct compile_state *state, const char *name, int name_len)
2110 struct hash_entry *entry;
2112 index = hash(name, name_len);
2113 entry = state->hash_table[index];
2115 ((entry->name_len != name_len) ||
2116 (memcmp(entry->name, name, name_len) != 0))) {
2117 entry = entry->next;
2121 /* Get a private copy of the name */
2122 new_name = xmalloc(name_len + 1, "hash_name");
2123 memcpy(new_name, name, name_len);
2124 new_name[name_len] = '\0';
2126 /* Create a new hash entry */
2127 entry = xcmalloc(sizeof(*entry), "hash_entry");
2128 entry->next = state->hash_table[index];
2129 entry->name = new_name;
2130 entry->name_len = name_len;
2132 /* Place the new entry in the hash table */
2133 state->hash_table[index] = entry;
2138 static void ident_to_keyword(struct compile_state *state, struct token *tk)
2140 struct hash_entry *entry;
2142 if (entry && ((entry->tok == TOK_TYPE_NAME) ||
2143 (entry->tok == TOK_ENUM_CONST) ||
2144 ((entry->tok >= TOK_FIRST_KEYWORD) &&
2145 (entry->tok <= TOK_LAST_KEYWORD)))) {
2146 tk->tok = entry->tok;
2150 static void ident_to_macro(struct compile_state *state, struct token *tk)
2152 struct hash_entry *entry;
2155 (entry->tok >= TOK_FIRST_MACRO) &&
2156 (entry->tok <= TOK_LAST_MACRO)) {
2157 tk->tok = entry->tok;
2161 static void hash_keyword(
2162 struct compile_state *state, const char *keyword, int tok)
2164 struct hash_entry *entry;
2165 entry = lookup(state, keyword, strlen(keyword));
2166 if (entry && entry->tok != TOK_UNKNOWN) {
2167 die("keyword %s already hashed", keyword);
2173 struct compile_state *state, struct hash_entry *ident,
2174 struct symbol **chain, struct triple *def, struct type *type)
2177 if (*chain && ((*chain)->scope_depth == state->scope_depth)) {
2178 error(state, 0, "%s already defined", ident->name);
2180 sym = xcmalloc(sizeof(*sym), "symbol");
2184 sym->scope_depth = state->scope_depth;
2189 static void label_symbol(struct compile_state *state,
2190 struct hash_entry *ident, struct triple *label)
2193 if (ident->sym_label) {
2194 error(state, 0, "label %s already defined", ident->name);
2196 sym = xcmalloc(sizeof(*sym), "label");
2199 sym->type = &void_type;
2200 sym->scope_depth = FUNCTION_SCOPE_DEPTH;
2202 ident->sym_label = sym;
2205 static void start_scope(struct compile_state *state)
2207 state->scope_depth++;
2210 static void end_scope_syms(struct symbol **chain, int depth)
2212 struct symbol *sym, *next;
2214 while(sym && (sym->scope_depth == depth)) {
2222 static void end_scope(struct compile_state *state)
2226 /* Walk through the hash table and remove all symbols
2227 * in the current scope.
2229 depth = state->scope_depth;
2230 for(i = 0; i < HASH_TABLE_SIZE; i++) {
2231 struct hash_entry *entry;
2232 entry = state->hash_table[i];
2234 end_scope_syms(&entry->sym_label, depth);
2235 end_scope_syms(&entry->sym_struct, depth);
2236 end_scope_syms(&entry->sym_ident, depth);
2237 entry = entry->next;
2240 state->scope_depth = depth - 1;
2243 static void register_keywords(struct compile_state *state)
2245 hash_keyword(state, "auto", TOK_AUTO);
2246 hash_keyword(state, "break", TOK_BREAK);
2247 hash_keyword(state, "case", TOK_CASE);
2248 hash_keyword(state, "char", TOK_CHAR);
2249 hash_keyword(state, "const", TOK_CONST);
2250 hash_keyword(state, "continue", TOK_CONTINUE);
2251 hash_keyword(state, "default", TOK_DEFAULT);
2252 hash_keyword(state, "do", TOK_DO);
2253 hash_keyword(state, "double", TOK_DOUBLE);
2254 hash_keyword(state, "else", TOK_ELSE);
2255 hash_keyword(state, "enum", TOK_ENUM);
2256 hash_keyword(state, "extern", TOK_EXTERN);
2257 hash_keyword(state, "float", TOK_FLOAT);
2258 hash_keyword(state, "for", TOK_FOR);
2259 hash_keyword(state, "goto", TOK_GOTO);
2260 hash_keyword(state, "if", TOK_IF);
2261 hash_keyword(state, "inline", TOK_INLINE);
2262 hash_keyword(state, "int", TOK_INT);
2263 hash_keyword(state, "long", TOK_LONG);
2264 hash_keyword(state, "register", TOK_REGISTER);
2265 hash_keyword(state, "restrict", TOK_RESTRICT);
2266 hash_keyword(state, "return", TOK_RETURN);
2267 hash_keyword(state, "short", TOK_SHORT);
2268 hash_keyword(state, "signed", TOK_SIGNED);
2269 hash_keyword(state, "sizeof", TOK_SIZEOF);
2270 hash_keyword(state, "static", TOK_STATIC);
2271 hash_keyword(state, "struct", TOK_STRUCT);
2272 hash_keyword(state, "switch", TOK_SWITCH);
2273 hash_keyword(state, "typedef", TOK_TYPEDEF);
2274 hash_keyword(state, "union", TOK_UNION);
2275 hash_keyword(state, "unsigned", TOK_UNSIGNED);
2276 hash_keyword(state, "void", TOK_VOID);
2277 hash_keyword(state, "volatile", TOK_VOLATILE);
2278 hash_keyword(state, "__volatile__", TOK_VOLATILE);
2279 hash_keyword(state, "while", TOK_WHILE);
2280 hash_keyword(state, "asm", TOK_ASM);
2281 hash_keyword(state, "__asm__", TOK_ASM);
2282 hash_keyword(state, "__attribute__", TOK_ATTRIBUTE);
2283 hash_keyword(state, "__alignof__", TOK_ALIGNOF);
2286 static void register_macro_keywords(struct compile_state *state)
2288 hash_keyword(state, "define", TOK_DEFINE);
2289 hash_keyword(state, "undef", TOK_UNDEF);
2290 hash_keyword(state, "include", TOK_INCLUDE);
2291 hash_keyword(state, "line", TOK_LINE);
2292 hash_keyword(state, "error", TOK_ERROR);
2293 hash_keyword(state, "warning", TOK_WARNING);
2294 hash_keyword(state, "pragma", TOK_PRAGMA);
2295 hash_keyword(state, "ifdef", TOK_IFDEF);
2296 hash_keyword(state, "ifndef", TOK_IFNDEF);
2297 hash_keyword(state, "elif", TOK_ELIF);
2298 hash_keyword(state, "endif", TOK_ENDIF);
2301 static int spacep(int c)
2317 static int digitp(int c)
2321 case '0': case '1': case '2': case '3': case '4':
2322 case '5': case '6': case '7': case '8': case '9':
2328 static int digval(int c)
2331 if ((c >= '0') && (c <= '9')) {
2337 static int hexdigitp(int c)
2341 case '0': case '1': case '2': case '3': case '4':
2342 case '5': case '6': case '7': case '8': case '9':
2343 case 'A': case 'B': case 'C': case 'D': case 'E': case 'F':
2344 case 'a': case 'b': case 'c': case 'd': case 'e': case 'f':
2350 static int hexdigval(int c)
2353 if ((c >= '0') && (c <= '9')) {
2356 else if ((c >= 'A') && (c <= 'F')) {
2357 val = 10 + (c - 'A');
2359 else if ((c >= 'a') && (c <= 'f')) {
2360 val = 10 + (c - 'a');
2365 static int octdigitp(int c)
2369 case '0': case '1': case '2': case '3':
2370 case '4': case '5': case '6': case '7':
2376 static int octdigval(int c)
2379 if ((c >= '0') && (c <= '7')) {
2385 static int letterp(int c)
2389 case 'a': case 'b': case 'c': case 'd': case 'e':
2390 case 'f': case 'g': case 'h': case 'i': case 'j':
2391 case 'k': case 'l': case 'm': case 'n': case 'o':
2392 case 'p': case 'q': case 'r': case 's': case 't':
2393 case 'u': case 'v': case 'w': case 'x': case 'y':
2395 case 'A': case 'B': case 'C': case 'D': case 'E':
2396 case 'F': case 'G': case 'H': case 'I': case 'J':
2397 case 'K': case 'L': case 'M': case 'N': case 'O':
2398 case 'P': case 'Q': case 'R': case 'S': case 'T':
2399 case 'U': case 'V': case 'W': case 'X': case 'Y':
2408 static int char_value(struct compile_state *state,
2409 const signed char **strp, const signed char *end)
2411 const signed char *str;
2415 if ((c == '\\') && (str < end)) {
2417 case 'n': c = '\n'; str++; break;
2418 case 't': c = '\t'; str++; break;
2419 case 'v': c = '\v'; str++; break;
2420 case 'b': c = '\b'; str++; break;
2421 case 'r': c = '\r'; str++; break;
2422 case 'f': c = '\f'; str++; break;
2423 case 'a': c = '\a'; str++; break;
2424 case '\\': c = '\\'; str++; break;
2425 case '?': c = '?'; str++; break;
2426 case '\'': c = '\''; str++; break;
2427 case '"': c = '"'; break;
2431 while((str < end) && hexdigitp(*str)) {
2433 c += hexdigval(*str);
2437 case '0': case '1': case '2': case '3':
2438 case '4': case '5': case '6': case '7':
2440 while((str < end) && octdigitp(*str)) {
2442 c += octdigval(*str);
2447 error(state, 0, "Invalid character constant");
2455 static char *after_digits(char *ptr, char *end)
2457 while((ptr < end) && digitp(*ptr)) {
2463 static char *after_octdigits(char *ptr, char *end)
2465 while((ptr < end) && octdigitp(*ptr)) {
2471 static char *after_hexdigits(char *ptr, char *end)
2473 while((ptr < end) && hexdigitp(*ptr)) {
2479 static void save_string(struct compile_state *state,
2480 struct token *tk, char *start, char *end, const char *id)
2484 /* Create a private copy of the string */
2485 str_len = end - start + 1;
2486 str = xmalloc(str_len + 1, id);
2487 memcpy(str, start, str_len);
2488 str[str_len] = '\0';
2490 /* Store the copy in the token */
2492 tk->str_len = str_len;
2494 static void next_token(struct compile_state *state, int index)
2496 struct file_state *file;
2504 tk = &state->token[index];
2507 token = tokp = file->pos;
2508 end = file->buf + file->size;
2515 if ((tokp + 1) < end) {
2519 if ((tokp + 2) < end) {
2523 if ((tokp + 3) < end) {
2531 else if (spacep(c)) {
2533 while ((tokp < end) && spacep(c)) {
2536 file->report_line++;
2537 file->line_start = tokp + 1;
2546 else if ((c == '/') && (c1 == '/')) {
2548 for(tokp += 2; tokp < end; tokp++) {
2552 file->report_line++;
2553 file->line_start = tokp +1;
2559 else if ((c == '/') && (c1 == '*')) {
2563 line_start = file->line_start;
2564 for(tokp += 2; (end - tokp) >= 2; tokp++) {
2568 line_start = tokp +1;
2570 else if ((c == '*') && (tokp[1] == '/')) {
2576 if (tok == TOK_UNKNOWN) {
2577 error(state, 0, "unterminated comment");
2579 file->report_line += line - file->line;
2581 file->line_start = line_start;
2583 /* string constants */
2584 else if ((c == '"') ||
2585 ((c == 'L') && (c1 == '"'))) {
2590 line_start = file->line_start;
2596 for(tokp += 1; tokp < end; tokp++) {
2600 line_start = tokp + 1;
2602 else if ((c == '\\') && (tokp +1 < end)) {
2605 else if (c == '"') {
2606 tok = TOK_LIT_STRING;
2610 if (tok == TOK_UNKNOWN) {
2611 error(state, 0, "unterminated string constant");
2613 if (line != file->line) {
2614 warning(state, 0, "multiline string constant");
2616 file->report_line += line - file->line;
2618 file->line_start = line_start;
2620 /* Save the string value */
2621 save_string(state, tk, token, tokp, "literal string");
2623 /* character constants */
2624 else if ((c == '\'') ||
2625 ((c == 'L') && (c1 == '\''))) {
2630 line_start = file->line_start;
2636 for(tokp += 1; tokp < end; tokp++) {
2640 line_start = tokp + 1;
2642 else if ((c == '\\') && (tokp +1 < end)) {
2645 else if (c == '\'') {
2650 if (tok == TOK_UNKNOWN) {
2651 error(state, 0, "unterminated character constant");
2653 if (line != file->line) {
2654 warning(state, 0, "multiline character constant");
2656 file->report_line += line - file->line;
2658 file->line_start = line_start;
2660 /* Save the character value */
2661 save_string(state, tk, token, tokp, "literal character");
2663 /* integer and floating constants
2669 * Floating constants
2670 * {digits}.{digits}[Ee][+-]?{digits}
2672 * {digits}[Ee][+-]?{digits}
2673 * .{digits}[Ee][+-]?{digits}
2677 else if (digitp(c) || ((c == '.') && (digitp(c1)))) {
2682 next = after_digits(tokp, end);
2687 if (next[0] == '.') {
2688 new = after_digits(next, end);
2689 is_float = (new != next);
2692 if ((next[0] == 'e') || (next[0] == 'E')) {
2693 if (((next + 1) < end) &&
2694 ((next[1] == '+') || (next[1] == '-'))) {
2697 new = after_digits(next, end);
2698 is_float = (new != next);
2702 tok = TOK_LIT_FLOAT;
2703 if ((next < end) && (
2712 if (!is_float && digitp(c)) {
2714 if ((c == '0') && ((c1 == 'x') || (c1 == 'X'))) {
2715 next = after_hexdigits(tokp + 2, end);
2717 else if (c == '0') {
2718 next = after_octdigits(tokp, end);
2721 next = after_digits(tokp, end);
2723 /* crazy integer suffixes */
2725 ((next[0] == 'u') || (next[0] == 'U'))) {
2728 ((next[0] == 'l') || (next[0] == 'L'))) {
2732 else if ((next < end) &&
2733 ((next[0] == 'l') || (next[0] == 'L'))) {
2736 ((next[0] == 'u') || (next[0] == 'U'))) {
2743 /* Save the integer/floating point value */
2744 save_string(state, tk, token, tokp, "literal number");
2747 else if (letterp(c)) {
2749 for(tokp += 1; tokp < end; tokp++) {
2751 if (!letterp(c) && !digitp(c)) {
2756 tk->ident = lookup(state, token, tokp +1 - token);
2758 /* C99 alternate macro characters */
2759 else if ((c == '%') && (c1 == ':') && (c2 == '%') && (c3 == ':')) {
2761 tok = TOK_CONCATENATE;
2763 else if ((c == '.') && (c1 == '.') && (c2 == '.')) { tokp += 2; tok = TOK_DOTS; }
2764 else if ((c == '<') && (c1 == '<') && (c2 == '=')) { tokp += 2; tok = TOK_SLEQ; }
2765 else if ((c == '>') && (c1 == '>') && (c2 == '=')) { tokp += 2; tok = TOK_SREQ; }
2766 else if ((c == '*') && (c1 == '=')) { tokp += 1; tok = TOK_TIMESEQ; }
2767 else if ((c == '/') && (c1 == '=')) { tokp += 1; tok = TOK_DIVEQ; }
2768 else if ((c == '%') && (c1 == '=')) { tokp += 1; tok = TOK_MODEQ; }
2769 else if ((c == '+') && (c1 == '=')) { tokp += 1; tok = TOK_PLUSEQ; }
2770 else if ((c == '-') && (c1 == '=')) { tokp += 1; tok = TOK_MINUSEQ; }
2771 else if ((c == '&') && (c1 == '=')) { tokp += 1; tok = TOK_ANDEQ; }
2772 else if ((c == '^') && (c1 == '=')) { tokp += 1; tok = TOK_XOREQ; }
2773 else if ((c == '|') && (c1 == '=')) { tokp += 1; tok = TOK_OREQ; }
2774 else if ((c == '=') && (c1 == '=')) { tokp += 1; tok = TOK_EQEQ; }
2775 else if ((c == '!') && (c1 == '=')) { tokp += 1; tok = TOK_NOTEQ; }
2776 else if ((c == '|') && (c1 == '|')) { tokp += 1; tok = TOK_LOGOR; }
2777 else if ((c == '&') && (c1 == '&')) { tokp += 1; tok = TOK_LOGAND; }
2778 else if ((c == '<') && (c1 == '=')) { tokp += 1; tok = TOK_LESSEQ; }
2779 else if ((c == '>') && (c1 == '=')) { tokp += 1; tok = TOK_MOREEQ; }
2780 else if ((c == '<') && (c1 == '<')) { tokp += 1; tok = TOK_SL; }
2781 else if ((c == '>') && (c1 == '>')) { tokp += 1; tok = TOK_SR; }
2782 else if ((c == '+') && (c1 == '+')) { tokp += 1; tok = TOK_PLUSPLUS; }
2783 else if ((c == '-') && (c1 == '-')) { tokp += 1; tok = TOK_MINUSMINUS; }
2784 else if ((c == '-') && (c1 == '>')) { tokp += 1; tok = TOK_ARROW; }
2785 else if ((c == '<') && (c1 == ':')) { tokp += 1; tok = TOK_LBRACKET; }
2786 else if ((c == ':') && (c1 == '>')) { tokp += 1; tok = TOK_RBRACKET; }
2787 else if ((c == '<') && (c1 == '%')) { tokp += 1; tok = TOK_LBRACE; }
2788 else if ((c == '%') && (c1 == '>')) { tokp += 1; tok = TOK_RBRACE; }
2789 else if ((c == '%') && (c1 == ':')) { tokp += 1; tok = TOK_MACRO; }
2790 else if ((c == '#') && (c1 == '#')) { tokp += 1; tok = TOK_CONCATENATE; }
2791 else if (c == ';') { tok = TOK_SEMI; }
2792 else if (c == '{') { tok = TOK_LBRACE; }
2793 else if (c == '}') { tok = TOK_RBRACE; }
2794 else if (c == ',') { tok = TOK_COMMA; }
2795 else if (c == '=') { tok = TOK_EQ; }
2796 else if (c == ':') { tok = TOK_COLON; }
2797 else if (c == '[') { tok = TOK_LBRACKET; }
2798 else if (c == ']') { tok = TOK_RBRACKET; }
2799 else if (c == '(') { tok = TOK_LPAREN; }
2800 else if (c == ')') { tok = TOK_RPAREN; }
2801 else if (c == '*') { tok = TOK_STAR; }
2802 else if (c == '>') { tok = TOK_MORE; }
2803 else if (c == '<') { tok = TOK_LESS; }
2804 else if (c == '?') { tok = TOK_QUEST; }
2805 else if (c == '|') { tok = TOK_OR; }
2806 else if (c == '&') { tok = TOK_AND; }
2807 else if (c == '^') { tok = TOK_XOR; }
2808 else if (c == '+') { tok = TOK_PLUS; }
2809 else if (c == '-') { tok = TOK_MINUS; }
2810 else if (c == '/') { tok = TOK_DIV; }
2811 else if (c == '%') { tok = TOK_MOD; }
2812 else if (c == '!') { tok = TOK_BANG; }
2813 else if (c == '.') { tok = TOK_DOT; }
2814 else if (c == '~') { tok = TOK_TILDE; }
2815 else if (c == '#') { tok = TOK_MACRO; }
2816 if (tok == TOK_MACRO) {
2817 /* Only match preprocessor directives at the start of a line */
2819 for(ptr = file->line_start; spacep(*ptr); ptr++)
2825 if (tok == TOK_UNKNOWN) {
2826 error(state, 0, "unknown token");
2829 file->pos = tokp + 1;
2831 if (tok == TOK_IDENT) {
2832 ident_to_keyword(state, tk);
2834 /* Don't return space tokens. */
2835 if (tok == TOK_SPACE) {
2840 static void compile_macro(struct compile_state *state, struct token *tk)
2842 struct file_state *file;
2843 struct hash_entry *ident;
2845 file = xmalloc(sizeof(*file), "file_state");
2846 file->basename = xstrdup(tk->ident->name);
2847 file->dirname = xstrdup("");
2848 file->size = ident->sym_define->buf_len;
2849 file->buf = xmalloc(file->size +2, file->basename);
2850 memcpy(file->buf, ident->sym_define->buf, file->size);
2851 file->buf[file->size] = '\n';
2852 file->buf[file->size + 1] = '\0';
2853 file->pos = file->buf;
2854 file->line_start = file->pos;
2856 file->report_line = 1;
2857 file->report_name = file->basename;
2858 file->report_dir = file->dirname;
2859 file->prev = state->file;
2864 static int mpeek(struct compile_state *state, int index)
2868 tk = &state->token[index + 1];
2869 if (tk->tok == -1) {
2870 next_token(state, index + 1);
2874 if ((tk->tok == TOK_EOF) &&
2875 (state->file != state->macro_file) &&
2876 (state->file->prev)) {
2877 struct file_state *file = state->file;
2878 state->file = file->prev;
2879 /* file->basename is used keep it */
2880 if (file->report_dir != file->dirname) {
2881 xfree(file->report_dir);
2883 xfree(file->dirname);
2886 next_token(state, index + 1);
2889 else if (tk->ident && tk->ident->sym_define) {
2890 compile_macro(state, tk);
2891 next_token(state, index + 1);
2895 /* Don't show the token on the next line */
2896 if (state->macro_line < state->macro_file->line) {
2899 return state->token[index +1].tok;
2902 static void meat(struct compile_state *state, int index, int tok)
2906 next_tok = mpeek(state, index);
2907 if (next_tok != tok) {
2908 const char *name1, *name2;
2909 name1 = tokens[next_tok];
2911 if (next_tok == TOK_IDENT) {
2912 name2 = state->token[index + 1].ident->name;
2914 error(state, 0, "found %s %s expected %s",
2915 name1, name2, tokens[tok]);
2917 /* Free the old token value */
2918 if (state->token[index].str_len) {
2919 memset((void *)(state->token[index].val.str), -1,
2920 state->token[index].str_len);
2921 xfree(state->token[index].val.str);
2923 for(i = index; i < sizeof(state->token)/sizeof(state->token[0]) - 1; i++) {
2924 state->token[i] = state->token[i + 1];
2926 memset(&state->token[i], 0, sizeof(state->token[i]));
2927 state->token[i].tok = -1;
2930 static long_t mcexpr(struct compile_state *state, int index);
2932 static long_t mprimary_expr(struct compile_state *state, int index)
2936 tok = mpeek(state, index);
2937 while(state->token[index + 1].ident &&
2938 state->token[index + 1].ident->sym_define) {
2939 meat(state, index, tok);
2940 compile_macro(state, &state->token[index]);
2941 tok = mpeek(state, index);
2945 meat(state, index, TOK_LPAREN);
2946 val = mcexpr(state, index);
2947 meat(state, index, TOK_RPAREN);
2952 meat(state, index, TOK_LIT_INT);
2954 val = strtol(state->token[index].val.str, &end, 0);
2955 if (((val == LONG_MIN) || (val == LONG_MAX)) &&
2956 (errno == ERANGE)) {
2957 error(state, 0, "Integer constant to large");
2962 meat(state, index, TOK_LIT_INT);
2967 static long_t munary_expr(struct compile_state *state, int index)
2970 switch(mpeek(state, index)) {
2972 meat(state, index, TOK_PLUS);
2973 val = munary_expr(state, index);
2977 meat(state, index, TOK_MINUS);
2978 val = munary_expr(state, index);
2982 meat(state, index, TOK_BANG);
2983 val = munary_expr(state, index);
2987 meat(state, index, TOK_BANG);
2988 val = munary_expr(state, index);
2992 val = mprimary_expr(state, index);
2998 static long_t mmul_expr(struct compile_state *state, int index)
3002 val = munary_expr(state, index);
3006 switch(mpeek(state, index)) {
3008 meat(state, index, TOK_STAR);
3009 right = munary_expr(state, index);
3013 meat(state, index, TOK_DIV);
3014 right = munary_expr(state, index);
3018 meat(state, index, TOK_MOD);
3019 right = munary_expr(state, index);
3031 static long_t madd_expr(struct compile_state *state, int index)
3035 val = mmul_expr(state, index);
3039 switch(mpeek(state, index)) {
3041 meat(state, index, TOK_PLUS);
3042 right = mmul_expr(state, index);
3046 meat(state, index, TOK_MINUS);
3047 right = mmul_expr(state, index);
3059 static long_t mshift_expr(struct compile_state *state, int index)
3063 val = madd_expr(state, index);
3067 switch(mpeek(state, index)) {
3069 meat(state, index, TOK_SL);
3070 right = madd_expr(state, index);
3074 meat(state, index, TOK_SR);
3075 right = madd_expr(state, index);
3087 static long_t mrel_expr(struct compile_state *state, int index)
3091 val = mshift_expr(state, index);
3095 switch(mpeek(state, index)) {
3097 meat(state, index, TOK_LESS);
3098 right = mshift_expr(state, index);
3102 meat(state, index, TOK_MORE);
3103 right = mshift_expr(state, index);
3107 meat(state, index, TOK_LESSEQ);
3108 right = mshift_expr(state, index);
3112 meat(state, index, TOK_MOREEQ);
3113 right = mshift_expr(state, index);
3124 static long_t meq_expr(struct compile_state *state, int index)
3128 val = mrel_expr(state, index);
3132 switch(mpeek(state, index)) {
3134 meat(state, index, TOK_EQEQ);
3135 right = mrel_expr(state, index);
3139 meat(state, index, TOK_NOTEQ);
3140 right = mrel_expr(state, index);
3151 static long_t mand_expr(struct compile_state *state, int index)
3154 val = meq_expr(state, index);
3155 if (mpeek(state, index) == TOK_AND) {
3157 meat(state, index, TOK_AND);
3158 right = meq_expr(state, index);
3164 static long_t mxor_expr(struct compile_state *state, int index)
3167 val = mand_expr(state, index);
3168 if (mpeek(state, index) == TOK_XOR) {
3170 meat(state, index, TOK_XOR);
3171 right = mand_expr(state, index);
3177 static long_t mor_expr(struct compile_state *state, int index)
3180 val = mxor_expr(state, index);
3181 if (mpeek(state, index) == TOK_OR) {
3183 meat(state, index, TOK_OR);
3184 right = mxor_expr(state, index);
3190 static long_t mland_expr(struct compile_state *state, int index)
3193 val = mor_expr(state, index);
3194 if (mpeek(state, index) == TOK_LOGAND) {
3196 meat(state, index, TOK_LOGAND);
3197 right = mor_expr(state, index);
3202 static long_t mlor_expr(struct compile_state *state, int index)
3205 val = mland_expr(state, index);
3206 if (mpeek(state, index) == TOK_LOGOR) {
3208 meat(state, index, TOK_LOGOR);
3209 right = mland_expr(state, index);
3215 static long_t mcexpr(struct compile_state *state, int index)
3217 return mlor_expr(state, index);
3219 static void preprocess(struct compile_state *state, int index)
3221 /* Doing much more with the preprocessor would require
3222 * a parser and a major restructuring.
3223 * Postpone that for later.
3225 struct file_state *file;
3231 tk = &state->token[index];
3232 state->macro_line = line = file->line;
3233 state->macro_file = file;
3235 next_token(state, index);
3236 ident_to_macro(state, tk);
3237 if (tk->tok == TOK_IDENT) {
3238 error(state, 0, "undefined preprocessing directive `%s'",
3245 override_line = strtoul(tk->val.str, 0, 10);
3246 next_token(state, index);
3247 /* I have a cpp line marker parse it */
3248 if (tk->tok == TOK_LIT_STRING) {
3249 const char *token, *base;
3251 int name_len, dir_len;
3252 name = xmalloc(tk->str_len, "report_name");
3253 token = tk->val.str + 1;
3254 base = strrchr(token, '/');
3255 name_len = tk->str_len -2;
3257 dir_len = base - token;
3259 name_len -= base - token;
3264 memcpy(name, base, name_len);
3265 name[name_len] = '\0';
3266 dir = xmalloc(dir_len + 1, "report_dir");
3267 memcpy(dir, token, dir_len);
3268 dir[dir_len] = '\0';
3269 file->report_line = override_line - 1;
3270 file->report_name = name;
3271 file->report_dir = dir;
3276 meat(state, index, TOK_LINE);
3277 meat(state, index, TOK_LIT_INT);
3278 file->report_line = strtoul(tk->val.str, 0, 10) -1;
3279 if (mpeek(state, index) == TOK_LIT_STRING) {
3280 const char *token, *base;
3282 int name_len, dir_len;
3283 meat(state, index, TOK_LIT_STRING);
3284 name = xmalloc(tk->str_len, "report_name");
3285 token = tk->val.str + 1;
3286 name_len = tk->str_len - 2;
3288 dir_len = base - token;
3290 name_len -= base - token;
3295 memcpy(name, base, name_len);
3296 name[name_len] = '\0';
3297 dir = xmalloc(dir_len + 1, "report_dir");
3298 memcpy(dir, token, dir_len);
3299 dir[dir_len] = '\0';
3300 file->report_name = name;
3301 file->report_dir = dir;
3306 if (state->if_value < 0) {
3309 warning(state, 0, "Ignoring preprocessor directive: %s",
3313 error(state, 0, "#elif not supported");
3314 #warning "FIXME multiple #elif and #else in an #if do not work properly"
3315 if (state->if_depth == 0) {
3316 error(state, 0, "#elif without #if");
3318 /* If the #if was taken the #elif just disables the following code */
3319 if (state->if_value >= 0) {
3320 state->if_value = - state->if_value;
3322 /* If the previous #if was not taken see if the #elif enables the
3325 else if ((state->if_value < 0) &&
3326 (state->if_depth == - state->if_value))
3328 if (mcexpr(state, index) != 0) {
3329 state->if_value = state->if_depth;
3332 state->if_value = - state->if_depth;
3338 if (state->if_value < 0) {
3341 if (mcexpr(state, index) != 0) {
3342 state->if_value = state->if_depth;
3345 state->if_value = - state->if_depth;
3350 if (state->if_value < 0) {
3353 next_token(state, index);
3354 if ((line != file->line) || (tk->tok != TOK_IDENT)) {
3355 error(state, 0, "Invalid macro name");
3357 if (tk->ident->sym_define == 0) {
3358 state->if_value = state->if_depth;
3361 state->if_value = - state->if_depth;
3366 if (state->if_value < 0) {
3369 next_token(state, index);
3370 if ((line != file->line) || (tk->tok != TOK_IDENT)) {
3371 error(state, 0, "Invalid macro name");
3373 if (tk->ident->sym_define != 0) {
3374 state->if_value = state->if_depth;
3377 state->if_value = - state->if_depth;
3381 if (state->if_depth == 0) {
3382 error(state, 0, "#else without #if");
3384 if ((state->if_value >= 0) ||
3385 ((state->if_value < 0) &&
3386 (state->if_depth == -state->if_value)))
3388 state->if_value = - state->if_value;
3392 if (state->if_depth == 0) {
3393 error(state, 0, "#endif without #if");
3395 if ((state->if_value >= 0) ||
3396 ((state->if_value < 0) &&
3397 (state->if_depth == -state->if_value)))
3399 state->if_value = state->if_depth - 1;
3405 struct hash_entry *ident;
3406 struct macro *macro;
3409 if (state->if_value < 0) /* quit early when #if'd out */
3412 meat(state, index, TOK_IDENT);
3416 if (*file->pos == '(') {
3417 #warning "FIXME macros with arguments not supported"
3418 error(state, 0, "Macros with arguments not supported");
3421 /* Find the end of the line to get an estimate of
3422 * the macro's length.
3424 for(ptr = file->pos; *ptr != '\n'; ptr++)
3427 if (ident->sym_define != 0) {
3428 error(state, 0, "macro %s already defined\n", ident->name);
3430 macro = xmalloc(sizeof(*macro), "macro");
3431 macro->ident = ident;
3432 macro->buf_len = ptr - file->pos +1;
3433 macro->buf = xmalloc(macro->buf_len +2, "macro buf");
3435 memcpy(macro->buf, file->pos, macro->buf_len);
3436 macro->buf[macro->buf_len] = '\n';
3437 macro->buf[macro->buf_len +1] = '\0';
3439 ident->sym_define = macro;
3446 /* Find the end of the line */
3447 for(end = file->pos; *end != '\n'; end++)
3449 len = (end - file->pos);
3450 if (state->if_value >= 0) {
3451 error(state, 0, "%*.*s", len, len, file->pos);
3460 /* Find the end of the line */
3461 for(end = file->pos; *end != '\n'; end++)
3463 len = (end - file->pos);
3464 if (state->if_value >= 0) {
3465 warning(state, 0, "%*.*s", len, len, file->pos);
3477 next_token(state, index);
3478 if (tk->tok == TOK_LIT_STRING) {
3481 name = xmalloc(tk->str_len, "include");
3482 token = tk->val.str +1;
3483 name_len = tk->str_len -2;
3484 if (*token == '"') {
3488 memcpy(name, token, name_len);
3489 name[name_len] = '\0';
3492 else if (tk->tok == TOK_LESS) {
3495 for(end = start; *end != '\n'; end++) {
3501 error(state, 0, "Unterminated included directive");
3503 name = xmalloc(end - start + 1, "include");
3504 memcpy(name, start, end - start);
3505 name[end - start] = '\0';
3510 error(state, 0, "Invalid include directive");
3512 /* Error if there are any characters after the include */
3513 for(ptr = file->pos; *ptr != '\n'; ptr++) {
3520 error(state, 0, "garbage after include directive");
3523 if (state->if_value >= 0) {
3524 compile_file(state, name, local);
3527 next_token(state, index);
3531 /* Ignore # without a following ident */
3532 if (tk->tok == TOK_IDENT) {
3533 error(state, 0, "Invalid preprocessor directive: %s",
3538 /* Consume the rest of the macro line */
3540 tok = mpeek(state, index);
3541 meat(state, index, tok);
3542 } while(tok != TOK_EOF);
3546 static void token(struct compile_state *state, int index)
3548 struct file_state *file;
3552 tk = &state->token[index];
3553 next_token(state, index);
3557 if (tk->tok == TOK_EOF && file->prev) {
3558 state->file = file->prev;
3559 /* file->basename is used keep it */
3560 xfree(file->dirname);
3563 next_token(state, index);
3566 else if (tk->tok == TOK_MACRO) {
3567 preprocess(state, index);
3570 else if (tk->ident && tk->ident->sym_define) {
3571 compile_macro(state, tk);
3572 next_token(state, index);
3575 else if (state->if_value < 0) {
3576 next_token(state, index);
3582 static int peek(struct compile_state *state)
3584 if (state->token[1].tok == -1) {
3587 return state->token[1].tok;
3590 static int peek2(struct compile_state *state)
3592 if (state->token[1].tok == -1) {
3595 if (state->token[2].tok == -1) {
3598 return state->token[2].tok;
3601 static void eat(struct compile_state *state, int tok)
3605 next_tok = peek(state);
3606 if (next_tok != tok) {
3607 const char *name1, *name2;
3608 name1 = tokens[next_tok];
3610 if (next_tok == TOK_IDENT) {
3611 name2 = state->token[1].ident->name;
3613 error(state, 0, "\tfound %s %s expected %s",
3614 name1, name2 ,tokens[tok]);
3616 /* Free the old token value */
3617 if (state->token[0].str_len) {
3618 xfree((void *)(state->token[0].val.str));
3620 for(i = 0; i < sizeof(state->token)/sizeof(state->token[0]) - 1; i++) {
3621 state->token[i] = state->token[i + 1];
3623 memset(&state->token[i], 0, sizeof(state->token[i]));
3624 state->token[i].tok = -1;
3627 #warning "FIXME do not hardcode the include paths"
3628 static char *include_paths[] = {
3629 "/home/eric/projects/linuxbios/checkin/solo/freebios2/src/include",
3630 "/home/eric/projects/linuxbios/checkin/solo/freebios2/src/arch/i386/include",
3631 "/home/eric/projects/linuxbios/checkin/solo/freebios2/src",
3635 static void compile_file(struct compile_state *state, const char *filename, int local)
3638 const char *subdir, *base;
3640 struct file_state *file;
3642 file = xmalloc(sizeof(*file), "file_state");
3644 base = strrchr(filename, '/');
3647 subdir_len = base - filename;
3654 basename = xmalloc(strlen(base) +1, "basename");
3655 strcpy(basename, base);
3656 file->basename = basename;
3658 if (getcwd(cwd, sizeof(cwd)) == 0) {
3659 die("cwd buffer to small");
3662 if (subdir[0] == '/') {
3663 file->dirname = xmalloc(subdir_len + 1, "dirname");
3664 memcpy(file->dirname, subdir, subdir_len);
3665 file->dirname[subdir_len] = '\0';
3671 /* Find the appropriate directory... */
3673 if (!state->file && exists(cwd, filename)) {
3676 if (local && state->file && exists(state->file->dirname, filename)) {
3677 dir = state->file->dirname;
3679 for(path = include_paths; !dir && *path; path++) {
3680 if (exists(*path, filename)) {
3685 error(state, 0, "Cannot find `%s'\n", filename);
3687 dirlen = strlen(dir);
3688 file->dirname = xmalloc(dirlen + 1 + subdir_len + 1, "dirname");
3689 memcpy(file->dirname, dir, dirlen);
3690 file->dirname[dirlen] = '/';
3691 memcpy(file->dirname + dirlen + 1, subdir, subdir_len);
3692 file->dirname[dirlen + 1 + subdir_len] = '\0';
3694 file->buf = slurp_file(file->dirname, file->basename, &file->size);
3697 file->pos = file->buf;
3698 file->line_start = file->pos;
3701 file->report_line = 1;
3702 file->report_name = file->basename;
3703 file->report_dir = file->dirname;
3705 file->prev = state->file;
3708 process_trigraphs(state);
3709 splice_lines(state);
3712 /* Type helper functions */
3714 static struct type *new_type(
3715 unsigned int type, struct type *left, struct type *right)
3717 struct type *result;
3718 result = xmalloc(sizeof(*result), "type");
3719 result->type = type;
3720 result->left = left;
3721 result->right = right;
3722 result->field_ident = 0;
3723 result->type_ident = 0;
3727 static struct type *clone_type(unsigned int specifiers, struct type *old)
3729 struct type *result;
3730 result = xmalloc(sizeof(*result), "type");
3731 memcpy(result, old, sizeof(*result));
3732 result->type &= TYPE_MASK;
3733 result->type |= specifiers;
3737 #define SIZEOF_SHORT 2
3738 #define SIZEOF_INT 4
3739 #define SIZEOF_LONG (sizeof(long_t))
3741 #define ALIGNOF_SHORT 2
3742 #define ALIGNOF_INT 4
3743 #define ALIGNOF_LONG (sizeof(long_t))
3745 #define MASK_UCHAR(X) ((X) & ((ulong_t)0xff))
3746 #define MASK_USHORT(X) ((X) & (((ulong_t)1 << (SIZEOF_SHORT*8)) - 1))
3747 static inline ulong_t mask_uint(ulong_t x)
3749 if (SIZEOF_INT < SIZEOF_LONG) {
3750 ulong_t mask = (((ulong_t)1) << ((ulong_t)(SIZEOF_INT*8))) -1;
3755 #define MASK_UINT(X) (mask_uint(X))
3756 #define MASK_ULONG(X) (X)
3758 static struct type void_type = { .type = TYPE_VOID };
3759 static struct type char_type = { .type = TYPE_CHAR };
3760 static struct type uchar_type = { .type = TYPE_UCHAR };
3761 static struct type short_type = { .type = TYPE_SHORT };
3762 static struct type ushort_type = { .type = TYPE_USHORT };
3763 static struct type int_type = { .type = TYPE_INT };
3764 static struct type uint_type = { .type = TYPE_UINT };
3765 static struct type long_type = { .type = TYPE_LONG };
3766 static struct type ulong_type = { .type = TYPE_ULONG };
3768 static struct triple *variable(struct compile_state *state, struct type *type)
3770 struct triple *result;
3771 if ((type->type & STOR_MASK) != STOR_PERM) {
3772 if ((type->type & TYPE_MASK) != TYPE_STRUCT) {
3773 result = triple(state, OP_ADECL, type, 0, 0);
3776 struct triple **vector;
3778 result = new_triple(state, OP_VAL_VEC, type, -1, -1);
3779 vector = &result->param[0];
3783 while((field->type & TYPE_MASK) == TYPE_PRODUCT) {
3784 vector[index] = variable(state, field->left);
3785 field = field->right;
3788 vector[index] = variable(state, field);
3792 result = triple(state, OP_SDECL, type, 0, 0);
3797 static void stor_of(FILE *fp, struct type *type)
3799 switch(type->type & STOR_MASK) {
3801 fprintf(fp, "auto ");
3804 fprintf(fp, "static ");
3807 fprintf(fp, "extern ");
3810 fprintf(fp, "register ");
3813 fprintf(fp, "typedef ");
3816 fprintf(fp, "inline ");
3820 static void qual_of(FILE *fp, struct type *type)
3822 if (type->type & QUAL_CONST) {
3823 fprintf(fp, " const");
3825 if (type->type & QUAL_VOLATILE) {
3826 fprintf(fp, " volatile");
3828 if (type->type & QUAL_RESTRICT) {
3829 fprintf(fp, " restrict");
3833 static void name_of(FILE *fp, struct type *type)
3836 switch(type->type & TYPE_MASK) {
3838 fprintf(fp, "void");
3842 fprintf(fp, "signed char");
3846 fprintf(fp, "unsigned char");
3850 fprintf(fp, "signed short");
3854 fprintf(fp, "unsigned short");
3858 fprintf(fp, "signed int");
3862 fprintf(fp, "unsigned int");
3866 fprintf(fp, "signed long");
3870 fprintf(fp, "unsigned long");
3874 name_of(fp, type->left);
3880 name_of(fp, type->left);
3882 name_of(fp, type->right);
3885 fprintf(fp, "enum %s", type->type_ident->name);
3889 fprintf(fp, "struct %s", type->type_ident->name);
3894 name_of(fp, type->left);
3895 fprintf(fp, " (*)(");
3896 name_of(fp, type->right);
3901 name_of(fp, type->left);
3902 fprintf(fp, " [%ld]", type->elements);
3905 fprintf(fp, "????: %x", type->type & TYPE_MASK);
3910 static size_t align_of(struct compile_state *state, struct type *type)
3914 switch(type->type & TYPE_MASK) {
3924 align = ALIGNOF_SHORT;
3929 align = ALIGNOF_INT;
3934 align = ALIGNOF_LONG;
3939 size_t left_align, right_align;
3940 left_align = align_of(state, type->left);
3941 right_align = align_of(state, type->right);
3942 align = (left_align >= right_align) ? left_align : right_align;
3946 align = align_of(state, type->left);
3949 align = align_of(state, type->left);
3952 error(state, 0, "alignof not yet defined for type\n");
3958 static size_t needed_padding(size_t offset, size_t align)
3962 if (offset % align) {
3963 padding = align - (offset % align);
3967 static size_t size_of(struct compile_state *state, struct type *type)
3971 switch(type->type & TYPE_MASK) {
3981 size = SIZEOF_SHORT;
3997 while((type->type & TYPE_MASK) == TYPE_PRODUCT) {
3998 align = align_of(state, type->left);
3999 pad = needed_padding(size, align);
4000 size = size + pad + size_of(state, type->left);
4003 align = align_of(state, type);
4004 pad = needed_padding(size, align);
4005 size = size + pad + size_of(state, type);
4010 size_t size_left, size_right;
4011 size_left = size_of(state, type->left);
4012 size_right = size_of(state, type->right);
4013 size = (size_left >= size_right)? size_left : size_right;
4017 if (type->elements == ELEMENT_COUNT_UNSPECIFIED) {
4018 internal_error(state, 0, "Invalid array type");
4020 size = size_of(state, type->left) * type->elements;
4026 size = size_of(state, type->left);
4027 /* Pad structures so their size is a multiples of their alignment */
4028 align = align_of(state, type);
4029 pad = needed_padding(size, align);
4034 internal_error(state, 0, "sizeof not yet defined for type\n");
4040 static size_t field_offset(struct compile_state *state,
4041 struct type *type, struct hash_entry *field)
4043 struct type *member;
4045 if ((type->type & TYPE_MASK) != TYPE_STRUCT) {
4046 internal_error(state, 0, "field_offset only works on structures");
4049 member = type->left;
4050 while((member->type & TYPE_MASK) == TYPE_PRODUCT) {
4051 align = align_of(state, member->left);
4052 size += needed_padding(size, align);
4053 if (member->left->field_ident == field) {
4054 member = member->left;
4057 size += size_of(state, member->left);
4058 member = member->right;
4060 align = align_of(state, member);
4061 size += needed_padding(size, align);
4062 if (member->field_ident != field) {
4063 error(state, 0, "member %s not present", field->name);
4068 static struct type *field_type(struct compile_state *state,
4069 struct type *type, struct hash_entry *field)
4071 struct type *member;
4072 if ((type->type & TYPE_MASK) != TYPE_STRUCT) {
4073 internal_error(state, 0, "field_type only works on structures");
4075 member = type->left;
4076 while((member->type & TYPE_MASK) == TYPE_PRODUCT) {
4077 if (member->left->field_ident == field) {
4078 member = member->left;
4081 member = member->right;
4083 if (member->field_ident != field) {
4084 error(state, 0, "member %s not present", field->name);
4089 static struct type *next_field(struct compile_state *state,
4090 struct type *type, struct type *prev_member)
4092 struct type *member;
4093 if ((type->type & TYPE_MASK) != TYPE_STRUCT) {
4094 internal_error(state, 0, "next_field only works on structures");
4096 member = type->left;
4097 while((member->type & TYPE_MASK) == TYPE_PRODUCT) {
4099 member = member->left;
4102 if (member->left == prev_member) {
4105 member = member->right;
4107 if (member == prev_member) {
4111 internal_error(state, 0, "prev_member %s not present",
4112 prev_member->field_ident->name);
4117 static struct triple *struct_field(struct compile_state *state,
4118 struct triple *decl, struct hash_entry *field)
4120 struct triple **vector;
4124 if ((type->type & TYPE_MASK) != TYPE_STRUCT) {
4127 if (decl->op != OP_VAL_VEC) {
4128 internal_error(state, 0, "Invalid struct variable");
4131 internal_error(state, 0, "Missing structure field");
4134 vector = &RHS(decl, 0);
4136 while((type->type & TYPE_MASK) == TYPE_PRODUCT) {
4137 if (type->left->field_ident == field) {
4144 if (type->field_ident != field) {
4145 internal_error(state, 0, "field %s not found?", field->name);
4147 return vector[index];
4150 static void arrays_complete(struct compile_state *state, struct type *type)
4152 if ((type->type & TYPE_MASK) == TYPE_ARRAY) {
4153 if (type->elements == ELEMENT_COUNT_UNSPECIFIED) {
4154 error(state, 0, "array size not specified");
4156 arrays_complete(state, type->left);
4160 static unsigned int do_integral_promotion(unsigned int type)
4163 if (TYPE_INTEGER(type) &&
4164 TYPE_RANK(type) < TYPE_RANK(TYPE_INT)) {
4170 static unsigned int do_arithmetic_conversion(
4171 unsigned int left, unsigned int right)
4175 if ((left == TYPE_LDOUBLE) || (right == TYPE_LDOUBLE)) {
4176 return TYPE_LDOUBLE;
4178 else if ((left == TYPE_DOUBLE) || (right == TYPE_DOUBLE)) {
4181 else if ((left == TYPE_FLOAT) || (right == TYPE_FLOAT)) {
4184 left = do_integral_promotion(left);
4185 right = do_integral_promotion(right);
4186 /* If both operands have the same size done */
4187 if (left == right) {
4190 /* If both operands have the same signedness pick the larger */
4191 else if (!!TYPE_UNSIGNED(left) == !!TYPE_UNSIGNED(right)) {
4192 return (TYPE_RANK(left) >= TYPE_RANK(right)) ? left : right;
4194 /* If the signed type can hold everything use it */
4195 else if (TYPE_SIGNED(left) && (TYPE_RANK(left) > TYPE_RANK(right))) {
4198 else if (TYPE_SIGNED(right) && (TYPE_RANK(right) > TYPE_RANK(left))) {
4201 /* Convert to the unsigned type with the same rank as the signed type */
4202 else if (TYPE_SIGNED(left)) {
4203 return TYPE_MKUNSIGNED(left);
4206 return TYPE_MKUNSIGNED(right);
4210 /* see if two types are the same except for qualifiers */
4211 static int equiv_types(struct type *left, struct type *right)
4214 /* Error if the basic types do not match */
4215 if ((left->type & TYPE_MASK) != (right->type & TYPE_MASK)) {
4218 type = left->type & TYPE_MASK;
4219 /* If the basic types match and it is a void type we are done */
4220 if (type == TYPE_VOID) {
4223 /* if the basic types match and it is an arithmetic type we are done */
4224 if (TYPE_ARITHMETIC(type)) {
4227 /* If it is a pointer type recurse and keep testing */
4228 if (type == TYPE_POINTER) {
4229 return equiv_types(left->left, right->left);
4231 else if (type == TYPE_ARRAY) {
4232 return (left->elements == right->elements) &&
4233 equiv_types(left->left, right->left);
4235 /* test for struct/union equality */
4236 else if (type == TYPE_STRUCT) {
4237 return left->type_ident == right->type_ident;
4239 /* Test for equivalent functions */
4240 else if (type == TYPE_FUNCTION) {
4241 return equiv_types(left->left, right->left) &&
4242 equiv_types(left->right, right->right);
4244 /* We only see TYPE_PRODUCT as part of function equivalence matching */
4245 else if (type == TYPE_PRODUCT) {
4246 return equiv_types(left->left, right->left) &&
4247 equiv_types(left->right, right->right);
4249 /* We should see TYPE_OVERLAP */
4255 static int equiv_ptrs(struct type *left, struct type *right)
4257 if (((left->type & TYPE_MASK) != TYPE_POINTER) ||
4258 ((right->type & TYPE_MASK) != TYPE_POINTER)) {
4261 return equiv_types(left->left, right->left);
4264 static struct type *compatible_types(struct type *left, struct type *right)
4266 struct type *result;
4267 unsigned int type, qual_type;
4268 /* Error if the basic types do not match */
4269 if ((left->type & TYPE_MASK) != (right->type & TYPE_MASK)) {
4272 type = left->type & TYPE_MASK;
4273 qual_type = (left->type & ~STOR_MASK) | (right->type & ~STOR_MASK);
4275 /* if the basic types match and it is an arithmetic type we are done */
4276 if (TYPE_ARITHMETIC(type)) {
4277 result = new_type(qual_type, 0, 0);
4279 /* If it is a pointer type recurse and keep testing */
4280 else if (type == TYPE_POINTER) {
4281 result = compatible_types(left->left, right->left);
4283 result = new_type(qual_type, result, 0);
4286 /* test for struct/union equality */
4287 else if (type == TYPE_STRUCT) {
4288 if (left->type_ident == right->type_ident) {
4292 /* Test for equivalent functions */
4293 else if (type == TYPE_FUNCTION) {
4294 struct type *lf, *rf;
4295 lf = compatible_types(left->left, right->left);
4296 rf = compatible_types(left->right, right->right);
4298 result = new_type(qual_type, lf, rf);
4301 /* We only see TYPE_PRODUCT as part of function equivalence matching */
4302 else if (type == TYPE_PRODUCT) {
4303 struct type *lf, *rf;
4304 lf = compatible_types(left->left, right->left);
4305 rf = compatible_types(left->right, right->right);
4307 result = new_type(qual_type, lf, rf);
4311 /* Nothing else is compatible */
4316 static struct type *compatible_ptrs(struct type *left, struct type *right)
4318 struct type *result;
4319 if (((left->type & TYPE_MASK) != TYPE_POINTER) ||
4320 ((right->type & TYPE_MASK) != TYPE_POINTER)) {
4323 result = compatible_types(left->left, right->left);
4325 unsigned int qual_type;
4326 qual_type = (left->type & ~STOR_MASK) | (right->type & ~STOR_MASK);
4327 result = new_type(qual_type, result, 0);
4332 static struct triple *integral_promotion(
4333 struct compile_state *state, struct triple *def)
4337 /* As all operations are carried out in registers
4338 * the values are converted on load I just convert
4339 * logical type of the operand.
4341 if (TYPE_INTEGER(type->type)) {
4342 unsigned int int_type;
4343 int_type = type->type & ~TYPE_MASK;
4344 int_type |= do_integral_promotion(type->type);
4345 if (int_type != type->type) {
4346 def->type = new_type(int_type, 0, 0);
4353 static void arithmetic(struct compile_state *state, struct triple *def)
4355 if (!TYPE_ARITHMETIC(def->type->type)) {
4356 error(state, 0, "arithmetic type expexted");
4360 static void ptr_arithmetic(struct compile_state *state, struct triple *def)
4362 if (!TYPE_PTR(def->type->type) && !TYPE_ARITHMETIC(def->type->type)) {
4363 error(state, def, "pointer or arithmetic type expected");
4367 static int is_integral(struct triple *ins)
4369 return TYPE_INTEGER(ins->type->type);
4372 static void integral(struct compile_state *state, struct triple *def)
4374 if (!is_integral(def)) {
4375 error(state, 0, "integral type expected");
4380 static void bool(struct compile_state *state, struct triple *def)
4382 if (!TYPE_ARITHMETIC(def->type->type) &&
4383 ((def->type->type & TYPE_MASK) != TYPE_POINTER)) {
4384 error(state, 0, "arithmetic or pointer type expected");
4388 static int is_signed(struct type *type)
4390 return !!TYPE_SIGNED(type->type);
4393 /* Is this value located in a register otherwise it must be in memory */
4394 static int is_in_reg(struct compile_state *state, struct triple *def)
4397 if (def->op == OP_ADECL) {
4400 else if ((def->op == OP_SDECL) || (def->op == OP_DEREF)) {
4403 else if (def->op == OP_VAL_VEC) {
4404 in_reg = is_in_reg(state, RHS(def, 0));
4406 else if (def->op == OP_DOT) {
4407 in_reg = is_in_reg(state, RHS(def, 0));
4410 internal_error(state, 0, "unknown expr storage location");
4416 /* Is this a stable variable location otherwise it must be a temporary */
4417 static int is_stable(struct compile_state *state, struct triple *def)
4424 if ((def->op == OP_ADECL) ||
4425 (def->op == OP_SDECL) ||
4426 (def->op == OP_DEREF) ||
4427 (def->op == OP_BLOBCONST)) {
4430 else if (def->op == OP_DOT) {
4431 ret = is_stable(state, RHS(def, 0));
4433 else if (def->op == OP_VAL_VEC) {
4434 struct triple **vector;
4437 vector = &RHS(def, 0);
4438 for(i = 0; i < def->type->elements; i++) {
4439 if (!is_stable(state, vector[i])) {
4448 static int is_lvalue(struct compile_state *state, struct triple *def)
4455 if (!is_stable(state, def)) {
4458 if (def->op == OP_DOT) {
4459 ret = is_lvalue(state, RHS(def, 0));
4464 static void clvalue(struct compile_state *state, struct triple *def)
4467 internal_error(state, def, "nothing where lvalue expected?");
4469 if (!is_lvalue(state, def)) {
4470 error(state, def, "lvalue expected");
4473 static void lvalue(struct compile_state *state, struct triple *def)
4475 clvalue(state, def);
4476 if (def->type->type & QUAL_CONST) {
4477 error(state, def, "modifable lvalue expected");
4481 static int is_pointer(struct triple *def)
4483 return (def->type->type & TYPE_MASK) == TYPE_POINTER;
4486 static void pointer(struct compile_state *state, struct triple *def)
4488 if (!is_pointer(def)) {
4489 error(state, def, "pointer expected");
4493 static struct triple *int_const(
4494 struct compile_state *state, struct type *type, ulong_t value)
4496 struct triple *result;
4497 switch(type->type & TYPE_MASK) {
4499 case TYPE_INT: case TYPE_UINT:
4500 case TYPE_LONG: case TYPE_ULONG:
4503 internal_error(state, 0, "constant for unkown type");
4505 result = triple(state, OP_INTCONST, type, 0, 0);
4506 result->u.cval = value;
4511 static struct triple *do_mk_addr_expr(struct compile_state *state,
4512 struct triple *expr, struct type *type, ulong_t offset)
4514 struct triple *result;
4515 clvalue(state, expr);
4517 type = new_type(TYPE_POINTER | (type->type & QUAL_MASK), type, 0);
4520 if (expr->op == OP_ADECL) {
4521 error(state, expr, "address of auto variables not supported");
4523 else if (expr->op == OP_SDECL) {
4524 result = triple(state, OP_ADDRCONST, type, 0, 0);
4525 MISC(result, 0) = expr;
4526 result->u.cval = offset;
4528 else if (expr->op == OP_DEREF) {
4529 result = triple(state, OP_ADD, type,
4531 int_const(state, &ulong_type, offset));
4536 static struct triple *mk_addr_expr(
4537 struct compile_state *state, struct triple *expr, ulong_t offset)
4539 return do_mk_addr_expr(state, expr, expr->type, offset);
4542 static struct triple *mk_deref_expr(
4543 struct compile_state *state, struct triple *expr)
4545 struct type *base_type;
4546 pointer(state, expr);
4547 base_type = expr->type->left;
4548 return triple(state, OP_DEREF, base_type, expr, 0);
4551 static struct triple *array_to_pointer(struct compile_state *state, struct triple *def)
4553 if ((def->type->type & TYPE_MASK) == TYPE_ARRAY) {
4556 TYPE_POINTER | (def->type->type & QUAL_MASK),
4557 def->type->left, 0);
4558 if ((def->op == OP_SDECL) || IS_CONST_OP(def->op)) {
4559 struct triple *addrconst;
4560 if ((def->op != OP_SDECL) && (def->op != OP_BLOBCONST)) {
4561 internal_error(state, def, "bad array constant");
4563 addrconst = triple(state, OP_ADDRCONST, type, 0, 0);
4564 MISC(addrconst, 0) = def;
4568 def = triple(state, OP_COPY, type, def, 0);
4574 static struct triple *deref_field(
4575 struct compile_state *state, struct triple *expr, struct hash_entry *field)
4577 struct triple *result;
4578 struct type *type, *member;
4580 internal_error(state, 0, "No field passed to deref_field");
4584 if ((type->type & TYPE_MASK) != TYPE_STRUCT) {
4585 error(state, 0, "request for member %s in something not a struct or union",
4588 member = field_type(state, type, field);
4589 if ((type->type & STOR_MASK) == STOR_PERM) {
4590 /* Do the pointer arithmetic to get a deref the field */
4592 offset = field_offset(state, type, field);
4593 result = do_mk_addr_expr(state, expr, member, offset);
4594 result = mk_deref_expr(state, result);
4597 /* Find the variable for the field I want. */
4598 result = triple(state, OP_DOT, member, expr, 0);
4599 result->u.field = field;
4604 static struct triple *read_expr(struct compile_state *state, struct triple *def)
4610 if (!is_stable(state, def)) {
4613 /* Tranform an array to a pointer to the first element */
4615 #warning "CHECK_ME is this the right place to transform arrays to pointers?"
4616 if ((def->type->type & TYPE_MASK) == TYPE_ARRAY) {
4617 return array_to_pointer(state, def);
4619 if (is_in_reg(state, def)) {
4624 return triple(state, op, def->type, def, 0);
4627 int is_write_compatible(struct compile_state *state,
4628 struct type *dest, struct type *rval)
4631 /* Both operands have arithmetic type */
4632 if (TYPE_ARITHMETIC(dest->type) && TYPE_ARITHMETIC(rval->type)) {
4635 /* One operand is a pointer and the other is a pointer to void */
4636 else if (((dest->type & TYPE_MASK) == TYPE_POINTER) &&
4637 ((rval->type & TYPE_MASK) == TYPE_POINTER) &&
4638 (((dest->left->type & TYPE_MASK) == TYPE_VOID) ||
4639 ((rval->left->type & TYPE_MASK) == TYPE_VOID))) {
4642 /* If both types are the same without qualifiers we are good */
4643 else if (equiv_ptrs(dest, rval)) {
4646 /* test for struct/union equality */
4647 else if (((dest->type & TYPE_MASK) == TYPE_STRUCT) &&
4648 ((rval->type & TYPE_MASK) == TYPE_STRUCT) &&
4649 (dest->type_ident == rval->type_ident)) {
4656 static void write_compatible(struct compile_state *state,
4657 struct type *dest, struct type *rval)
4659 if (!is_write_compatible(state, dest, rval)) {
4660 error(state, 0, "Incompatible types in assignment");
4664 static int is_init_compatible(struct compile_state *state,
4665 struct type *dest, struct type *rval)
4668 if (is_write_compatible(state, dest, rval)) {
4671 else if (equiv_types(dest, rval)) {
4677 static struct triple *write_expr(
4678 struct compile_state *state, struct triple *dest, struct triple *rval)
4685 internal_error(state, 0, "missing rval");
4688 if (rval->op == OP_LIST) {
4689 internal_error(state, 0, "expression of type OP_LIST?");
4691 if (!is_lvalue(state, dest)) {
4692 internal_error(state, 0, "writing to a non lvalue?");
4694 if (dest->type->type & QUAL_CONST) {
4695 internal_error(state, 0, "modifable lvalue expexted");
4698 write_compatible(state, dest->type, rval->type);
4700 /* Now figure out which assignment operator to use */
4702 if (is_in_reg(state, dest)) {
4707 def = triple(state, op, dest->type, dest, rval);
4711 static struct triple *init_expr(
4712 struct compile_state *state, struct triple *dest, struct triple *rval)
4718 internal_error(state, 0, "missing rval");
4720 if ((dest->type->type & STOR_MASK) != STOR_PERM) {
4721 rval = read_expr(state, rval);
4722 def = write_expr(state, dest, rval);
4725 /* Fill in the array size if necessary */
4726 if (((dest->type->type & TYPE_MASK) == TYPE_ARRAY) &&
4727 ((rval->type->type & TYPE_MASK) == TYPE_ARRAY)) {
4728 if (dest->type->elements == ELEMENT_COUNT_UNSPECIFIED) {
4729 dest->type->elements = rval->type->elements;
4732 if (!equiv_types(dest->type, rval->type)) {
4733 error(state, 0, "Incompatible types in inializer");
4735 MISC(dest, 0) = rval;
4736 insert_triple(state, dest, rval);
4737 rval->id |= TRIPLE_FLAG_FLATTENED;
4738 use_triple(MISC(dest, 0), dest);
4743 struct type *arithmetic_result(
4744 struct compile_state *state, struct triple *left, struct triple *right)
4747 /* Sanity checks to ensure I am working with arithmetic types */
4748 arithmetic(state, left);
4749 arithmetic(state, right);
4751 do_arithmetic_conversion(
4753 right->type->type), 0, 0);
4757 struct type *ptr_arithmetic_result(
4758 struct compile_state *state, struct triple *left, struct triple *right)
4761 /* Sanity checks to ensure I am working with the proper types */
4762 ptr_arithmetic(state, left);
4763 arithmetic(state, right);
4764 if (TYPE_ARITHMETIC(left->type->type) &&
4765 TYPE_ARITHMETIC(right->type->type)) {
4766 type = arithmetic_result(state, left, right);
4768 else if (TYPE_PTR(left->type->type)) {
4772 internal_error(state, 0, "huh?");
4779 /* boolean helper function */
4781 static struct triple *ltrue_expr(struct compile_state *state,
4782 struct triple *expr)
4785 case OP_LTRUE: case OP_LFALSE: case OP_EQ: case OP_NOTEQ:
4786 case OP_SLESS: case OP_ULESS: case OP_SMORE: case OP_UMORE:
4787 case OP_SLESSEQ: case OP_ULESSEQ: case OP_SMOREEQ: case OP_UMOREEQ:
4788 /* If the expression is already boolean do nothing */
4791 expr = triple(state, OP_LTRUE, &int_type, expr, 0);
4797 static struct triple *lfalse_expr(struct compile_state *state,
4798 struct triple *expr)
4800 return triple(state, OP_LFALSE, &int_type, expr, 0);
4803 static struct triple *cond_expr(
4804 struct compile_state *state,
4805 struct triple *test, struct triple *left, struct triple *right)
4808 struct type *result_type;
4809 unsigned int left_type, right_type;
4811 left_type = left->type->type;
4812 right_type = right->type->type;
4814 /* Both operands have arithmetic type */
4815 if (TYPE_ARITHMETIC(left_type) && TYPE_ARITHMETIC(right_type)) {
4816 result_type = arithmetic_result(state, left, right);
4818 /* Both operands have void type */
4819 else if (((left_type & TYPE_MASK) == TYPE_VOID) &&
4820 ((right_type & TYPE_MASK) == TYPE_VOID)) {
4821 result_type = &void_type;
4823 /* pointers to the same type... */
4824 else if ((result_type = compatible_ptrs(left->type, right->type))) {
4827 /* Both operands are pointers and left is a pointer to void */
4828 else if (((left_type & TYPE_MASK) == TYPE_POINTER) &&
4829 ((right_type & TYPE_MASK) == TYPE_POINTER) &&
4830 ((left->type->left->type & TYPE_MASK) == TYPE_VOID)) {
4831 result_type = right->type;
4833 /* Both operands are pointers and right is a pointer to void */
4834 else if (((left_type & TYPE_MASK) == TYPE_POINTER) &&
4835 ((right_type & TYPE_MASK) == TYPE_POINTER) &&
4836 ((right->type->left->type & TYPE_MASK) == TYPE_VOID)) {
4837 result_type = left->type;
4840 error(state, 0, "Incompatible types in conditional expression");
4842 /* Cleanup and invert the test */
4843 test = lfalse_expr(state, read_expr(state, test));
4844 def = new_triple(state, OP_COND, result_type, 0, 3);
4845 def->param[0] = test;
4846 def->param[1] = left;
4847 def->param[2] = right;
4852 static int expr_depth(struct compile_state *state, struct triple *ins)
4856 if (!ins || (ins->id & TRIPLE_FLAG_FLATTENED)) {
4859 else if (ins->op == OP_DEREF) {
4860 count = expr_depth(state, RHS(ins, 0)) - 1;
4862 else if (ins->op == OP_VAL) {
4863 count = expr_depth(state, RHS(ins, 0)) - 1;
4865 else if (ins->op == OP_COMMA) {
4867 ldepth = expr_depth(state, RHS(ins, 0));
4868 rdepth = expr_depth(state, RHS(ins, 1));
4869 count = (ldepth >= rdepth)? ldepth : rdepth;
4871 else if (ins->op == OP_CALL) {
4872 /* Don't figure the depth of a call just guess it is huge */
4876 struct triple **expr;
4877 expr = triple_rhs(state, ins, 0);
4878 for(;expr; expr = triple_rhs(state, ins, expr)) {
4881 depth = expr_depth(state, *expr);
4882 if (depth > count) {
4891 static struct triple *flatten(
4892 struct compile_state *state, struct triple *first, struct triple *ptr);
4894 static struct triple *flatten_generic(
4895 struct compile_state *state, struct triple *first, struct triple *ptr)
4899 struct triple **ins;
4902 /* Only operations with just a rhs should come here */
4903 rhs = TRIPLE_RHS(ptr->sizes);
4904 lhs = TRIPLE_LHS(ptr->sizes);
4905 if (TRIPLE_SIZE(ptr->sizes) != lhs + rhs) {
4906 internal_error(state, ptr, "unexpected args for: %d %s",
4907 ptr->op, tops(ptr->op));
4909 /* Find the depth of the rhs elements */
4910 for(i = 0; i < rhs; i++) {
4911 vector[i].ins = &RHS(ptr, i);
4912 vector[i].depth = expr_depth(state, *vector[i].ins);
4914 /* Selection sort the rhs */
4915 for(i = 0; i < rhs; i++) {
4917 for(j = i + 1; j < rhs; j++ ) {
4918 if (vector[j].depth > vector[max].depth) {
4923 struct rhs_vector tmp;
4925 vector[i] = vector[max];
4929 /* Now flatten the rhs elements */
4930 for(i = 0; i < rhs; i++) {
4931 *vector[i].ins = flatten(state, first, *vector[i].ins);
4932 use_triple(*vector[i].ins, ptr);
4935 /* Now flatten the lhs elements */
4936 for(i = 0; i < lhs; i++) {
4937 struct triple **ins = &LHS(ptr, i);
4938 *ins = flatten(state, first, *ins);
4939 use_triple(*ins, ptr);
4944 static struct triple *flatten_land(
4945 struct compile_state *state, struct triple *first, struct triple *ptr)
4947 struct triple *left, *right;
4948 struct triple *val, *test, *jmp, *label1, *end;
4950 /* Find the triples */
4952 right = RHS(ptr, 1);
4954 /* Generate the needed triples */
4957 /* Thread the triples together */
4958 val = flatten(state, first, variable(state, ptr->type));
4959 left = flatten(state, first, write_expr(state, val, left));
4960 test = flatten(state, first,
4961 lfalse_expr(state, read_expr(state, val)));
4962 jmp = flatten(state, first, branch(state, end, test));
4963 label1 = flatten(state, first, label(state));
4964 right = flatten(state, first, write_expr(state, val, right));
4965 TARG(jmp, 0) = flatten(state, first, end);
4967 /* Now give the caller something to chew on */
4968 return read_expr(state, val);
4971 static struct triple *flatten_lor(
4972 struct compile_state *state, struct triple *first, struct triple *ptr)
4974 struct triple *left, *right;
4975 struct triple *val, *jmp, *label1, *end;
4977 /* Find the triples */
4979 right = RHS(ptr, 1);
4981 /* Generate the needed triples */
4984 /* Thread the triples together */
4985 val = flatten(state, first, variable(state, ptr->type));
4986 left = flatten(state, first, write_expr(state, val, left));
4987 jmp = flatten(state, first, branch(state, end, left));
4988 label1 = flatten(state, first, label(state));
4989 right = flatten(state, first, write_expr(state, val, right));
4990 TARG(jmp, 0) = flatten(state, first, end);
4993 /* Now give the caller something to chew on */
4994 return read_expr(state, val);
4997 static struct triple *flatten_cond(
4998 struct compile_state *state, struct triple *first, struct triple *ptr)
5000 struct triple *test, *left, *right;
5001 struct triple *val, *mv1, *jmp1, *label1, *mv2, *middle, *jmp2, *end;
5003 /* Find the triples */
5006 right = RHS(ptr, 2);
5008 /* Generate the needed triples */
5010 middle = label(state);
5012 /* Thread the triples together */
5013 val = flatten(state, first, variable(state, ptr->type));
5014 test = flatten(state, first, test);
5015 jmp1 = flatten(state, first, branch(state, middle, test));
5016 label1 = flatten(state, first, label(state));
5017 left = flatten(state, first, left);
5018 mv1 = flatten(state, first, write_expr(state, val, left));
5019 jmp2 = flatten(state, first, branch(state, end, 0));
5020 TARG(jmp1, 0) = flatten(state, first, middle);
5021 right = flatten(state, first, right);
5022 mv2 = flatten(state, first, write_expr(state, val, right));
5023 TARG(jmp2, 0) = flatten(state, first, end);
5025 /* Now give the caller something to chew on */
5026 return read_expr(state, val);
5029 struct triple *copy_func(struct compile_state *state, struct triple *ofunc,
5030 struct occurance *base_occurance)
5032 struct triple *nfunc;
5033 struct triple *nfirst, *ofirst;
5034 struct triple *new, *old;
5037 fprintf(stdout, "\n");
5038 loc(stdout, state, 0);
5039 fprintf(stdout, "\n__________ copy_func _________\n");
5040 print_triple(state, ofunc);
5041 fprintf(stdout, "__________ copy_func _________ done\n\n");
5044 /* Make a new copy of the old function */
5045 nfunc = triple(state, OP_LIST, ofunc->type, 0, 0);
5047 ofirst = old = RHS(ofunc, 0);
5050 struct occurance *occurance;
5051 int old_lhs, old_rhs;
5052 old_lhs = TRIPLE_LHS(old->sizes);
5053 old_rhs = TRIPLE_RHS(old->sizes);
5054 occurance = inline_occurance(state, base_occurance, old->occurance);
5055 new = alloc_triple(state, old->op, old->type, old_lhs, old_rhs,
5057 if (!triple_stores_block(state, new)) {
5058 memcpy(&new->u, &old->u, sizeof(new->u));
5061 RHS(nfunc, 0) = nfirst = new;
5064 insert_triple(state, nfirst, new);
5066 new->id |= TRIPLE_FLAG_FLATTENED;
5068 /* During the copy remember new as user of old */
5069 use_triple(old, new);
5071 /* Populate the return type if present */
5072 if (old == MISC(ofunc, 0)) {
5073 MISC(nfunc, 0) = new;
5076 } while(old != ofirst);
5078 /* Make a second pass to fix up any unresolved references */
5082 struct triple **oexpr, **nexpr;
5084 /* Lookup where the copy is, to join pointers */
5085 count = TRIPLE_SIZE(old->sizes);
5086 for(i = 0; i < count; i++) {
5087 oexpr = &old->param[i];
5088 nexpr = &new->param[i];
5089 if (!*nexpr && *oexpr && (*oexpr)->use) {
5090 *nexpr = (*oexpr)->use->member;
5091 if (*nexpr == old) {
5092 internal_error(state, 0, "new == old?");
5094 use_triple(*nexpr, new);
5096 if (!*nexpr && *oexpr) {
5097 internal_error(state, 0, "Could not copy %d\n", i);
5102 } while((old != ofirst) && (new != nfirst));
5104 /* Make a third pass to cleanup the extra useses */
5108 unuse_triple(old, new);
5111 } while ((old != ofirst) && (new != nfirst));
5115 static struct triple *flatten_call(
5116 struct compile_state *state, struct triple *first, struct triple *ptr)
5118 /* Inline the function call */
5120 struct triple *ofunc, *nfunc, *nfirst, *param, *result;
5121 struct triple *end, *nend;
5124 /* Find the triples */
5125 ofunc = MISC(ptr, 0);
5126 if (ofunc->op != OP_LIST) {
5127 internal_error(state, 0, "improper function");
5129 nfunc = copy_func(state, ofunc, ptr->occurance);
5130 nfirst = RHS(nfunc, 0)->next;
5131 /* Prepend the parameter reading into the new function list */
5132 ptype = nfunc->type->right;
5133 param = RHS(nfunc, 0)->next;
5134 pvals = TRIPLE_RHS(ptr->sizes);
5135 for(i = 0; i < pvals; i++) {
5139 if ((ptype->type & TYPE_MASK) == TYPE_PRODUCT) {
5140 atype = ptype->left;
5142 while((param->type->type & TYPE_MASK) != (atype->type & TYPE_MASK)) {
5143 param = param->next;
5146 flatten(state, nfirst, write_expr(state, param, arg));
5147 ptype = ptype->right;
5148 param = param->next;
5151 if ((nfunc->type->left->type & TYPE_MASK) != TYPE_VOID) {
5152 result = read_expr(state, MISC(nfunc,0));
5155 fprintf(stdout, "\n");
5156 loc(stdout, state, 0);
5157 fprintf(stdout, "\n__________ flatten_call _________\n");
5158 print_triple(state, nfunc);
5159 fprintf(stdout, "__________ flatten_call _________ done\n\n");
5162 /* Get rid of the extra triples */
5163 nfirst = RHS(nfunc, 0)->next;
5164 free_triple(state, RHS(nfunc, 0));
5166 free_triple(state, nfunc);
5168 /* Append the new function list onto the return list */
5170 nend = nfirst->prev;
5179 static struct triple *flatten(
5180 struct compile_state *state, struct triple *first, struct triple *ptr)
5182 struct triple *orig_ptr;
5187 /* Only flatten triples once */
5188 if (ptr->id & TRIPLE_FLAG_FLATTENED) {
5193 RHS(ptr, 0) = flatten(state, first, RHS(ptr, 0));
5197 RHS(ptr, 0) = flatten(state, first, RHS(ptr, 0));
5198 return MISC(ptr, 0);
5201 ptr = flatten_land(state, first, ptr);
5204 ptr = flatten_lor(state, first, ptr);
5207 ptr = flatten_cond(state, first, ptr);
5210 ptr = flatten_call(state, first, ptr);
5214 RHS(ptr, 0) = flatten(state, first, RHS(ptr, 0));
5215 use_triple(RHS(ptr, 0), ptr);
5218 use_triple(TARG(ptr, 0), ptr);
5219 if (TRIPLE_RHS(ptr->sizes)) {
5220 use_triple(RHS(ptr, 0), ptr);
5221 if (ptr->next != ptr) {
5222 use_triple(ptr->next, ptr);
5227 insert_triple(state, first, ptr);
5228 ptr->id |= TRIPLE_FLAG_FLATTENED;
5229 ptr = triple(state, OP_SDECL, ptr->type, ptr, 0);
5230 use_triple(MISC(ptr, 0), ptr);
5233 /* Since OP_DEREF is just a marker delete it when I flatten it */
5235 RHS(orig_ptr, 0) = 0;
5236 free_triple(state, orig_ptr);
5240 struct triple *base;
5242 if (base->op == OP_DEREF) {
5243 struct triple *left;
5245 offset = field_offset(state, base->type, ptr->u.field);
5246 left = RHS(base, 0);
5247 ptr = triple(state, OP_ADD, left->type,
5248 read_expr(state, left),
5249 int_const(state, &ulong_type, offset));
5250 free_triple(state, base);
5252 else if (base->op == OP_VAL_VEC) {
5253 base = flatten(state, first, base);
5254 ptr = struct_field(state, base, ptr->u.field);
5259 MISC(ptr, 0) = flatten(state, first, MISC(ptr, 0));
5260 use_triple(MISC(ptr, 0), ptr);
5261 use_triple(ptr, MISC(ptr, 0));
5265 MISC(ptr, 0) = flatten(state, first, MISC(ptr, 0));
5266 use_triple(MISC(ptr, 0), ptr);
5271 /* Flatten the easy cases we don't override */
5272 ptr = flatten_generic(state, first, ptr);
5275 } while(ptr && (ptr != orig_ptr));
5277 insert_triple(state, first, ptr);
5278 ptr->id |= TRIPLE_FLAG_FLATTENED;
5283 static void release_expr(struct compile_state *state, struct triple *expr)
5285 struct triple *head;
5286 head = label(state);
5287 flatten(state, head, expr);
5288 while(head->next != head) {
5289 release_triple(state, head->next);
5291 free_triple(state, head);
5294 static int replace_rhs_use(struct compile_state *state,
5295 struct triple *orig, struct triple *new, struct triple *use)
5297 struct triple **expr;
5300 expr = triple_rhs(state, use, 0);
5301 for(;expr; expr = triple_rhs(state, use, expr)) {
5302 if (*expr == orig) {
5308 unuse_triple(orig, use);
5309 use_triple(new, use);
5314 static int replace_lhs_use(struct compile_state *state,
5315 struct triple *orig, struct triple *new, struct triple *use)
5317 struct triple **expr;
5320 expr = triple_lhs(state, use, 0);
5321 for(;expr; expr = triple_lhs(state, use, expr)) {
5322 if (*expr == orig) {
5328 unuse_triple(orig, use);
5329 use_triple(new, use);
5334 static void propogate_use(struct compile_state *state,
5335 struct triple *orig, struct triple *new)
5337 struct triple_set *user, *next;
5338 for(user = orig->use; user; user = next) {
5344 found |= replace_rhs_use(state, orig, new, use);
5345 found |= replace_lhs_use(state, orig, new, use);
5347 internal_error(state, use, "use without use");
5351 internal_error(state, orig, "used after propogate_use");
5357 * ===========================
5360 static struct triple *mk_add_expr(
5361 struct compile_state *state, struct triple *left, struct triple *right)
5363 struct type *result_type;
5364 /* Put pointer operands on the left */
5365 if (is_pointer(right)) {
5371 left = read_expr(state, left);
5372 right = read_expr(state, right);
5373 result_type = ptr_arithmetic_result(state, left, right);
5374 if (is_pointer(left)) {
5375 right = triple(state,
5376 is_signed(right->type)? OP_SMUL : OP_UMUL,
5379 int_const(state, &ulong_type,
5380 size_of(state, left->type->left)));
5382 return triple(state, OP_ADD, result_type, left, right);
5385 static struct triple *mk_sub_expr(
5386 struct compile_state *state, struct triple *left, struct triple *right)
5388 struct type *result_type;
5389 result_type = ptr_arithmetic_result(state, left, right);
5390 left = read_expr(state, left);
5391 right = read_expr(state, right);
5392 if (is_pointer(left)) {
5393 right = triple(state,
5394 is_signed(right->type)? OP_SMUL : OP_UMUL,
5397 int_const(state, &ulong_type,
5398 size_of(state, left->type->left)));
5400 return triple(state, OP_SUB, result_type, left, right);
5403 static struct triple *mk_pre_inc_expr(
5404 struct compile_state *state, struct triple *def)
5408 val = mk_add_expr(state, def, int_const(state, &int_type, 1));
5409 return triple(state, OP_VAL, def->type,
5410 write_expr(state, def, val),
5414 static struct triple *mk_pre_dec_expr(
5415 struct compile_state *state, struct triple *def)
5419 val = mk_sub_expr(state, def, int_const(state, &int_type, 1));
5420 return triple(state, OP_VAL, def->type,
5421 write_expr(state, def, val),
5425 static struct triple *mk_post_inc_expr(
5426 struct compile_state *state, struct triple *def)
5430 val = read_expr(state, def);
5431 return triple(state, OP_VAL, def->type,
5432 write_expr(state, def,
5433 mk_add_expr(state, val, int_const(state, &int_type, 1)))
5437 static struct triple *mk_post_dec_expr(
5438 struct compile_state *state, struct triple *def)
5442 val = read_expr(state, def);
5443 return triple(state, OP_VAL, def->type,
5444 write_expr(state, def,
5445 mk_sub_expr(state, val, int_const(state, &int_type, 1)))
5449 static struct triple *mk_subscript_expr(
5450 struct compile_state *state, struct triple *left, struct triple *right)
5452 left = read_expr(state, left);
5453 right = read_expr(state, right);
5454 if (!is_pointer(left) && !is_pointer(right)) {
5455 error(state, left, "subscripted value is not a pointer");
5457 return mk_deref_expr(state, mk_add_expr(state, left, right));
5460 static struct triple *mk_cast_expr(
5461 struct compile_state *state, struct type *type, struct triple *expr)
5464 def = read_expr(state, expr);
5465 def = triple(state, OP_COPY, type, def, 0);
5470 * Compile time evaluation
5471 * ===========================
5473 static int is_const(struct triple *ins)
5475 return IS_CONST_OP(ins->op);
5478 static int constants_equal(struct compile_state *state,
5479 struct triple *left, struct triple *right)
5482 if (!is_const(left) || !is_const(right)) {
5485 else if (left->op != right->op) {
5488 else if (!equiv_types(left->type, right->type)) {
5495 if (left->u.cval == right->u.cval) {
5501 size_t lsize, rsize;
5502 lsize = size_of(state, left->type);
5503 rsize = size_of(state, right->type);
5504 if (lsize != rsize) {
5507 if (memcmp(left->u.blob, right->u.blob, lsize) == 0) {
5513 if ((MISC(left, 0) == MISC(right, 0)) &&
5514 (left->u.cval == right->u.cval)) {
5519 internal_error(state, left, "uknown constant type");
5526 static int is_zero(struct triple *ins)
5528 return is_const(ins) && (ins->u.cval == 0);
5531 static int is_one(struct triple *ins)
5533 return is_const(ins) && (ins->u.cval == 1);
5536 static long_t bit_count(ulong_t value)
5541 for(i = (sizeof(ulong_t)*8) -1; i >= 0; i--) {
5552 static long_t bsr(ulong_t value)
5555 for(i = (sizeof(ulong_t)*8) -1; i >= 0; i--) {
5566 static long_t bsf(ulong_t value)
5569 for(i = 0; i < (sizeof(ulong_t)*8); i++) {
5580 static long_t log2(ulong_t value)
5585 static long_t tlog2(struct triple *ins)
5587 return log2(ins->u.cval);
5590 static int is_pow2(struct triple *ins)
5592 ulong_t value, mask;
5594 if (!is_const(ins)) {
5597 value = ins->u.cval;
5604 return ((value & mask) == value);
5607 static ulong_t read_const(struct compile_state *state,
5608 struct triple *ins, struct triple **expr)
5612 switch(rhs->type->type &TYPE_MASK) {
5624 internal_error(state, rhs, "bad type to read_const\n");
5630 static long_t read_sconst(struct triple *ins, struct triple **expr)
5634 return (long_t)(rhs->u.cval);
5637 static void unuse_rhs(struct compile_state *state, struct triple *ins)
5639 struct triple **expr;
5640 expr = triple_rhs(state, ins, 0);
5641 for(;expr;expr = triple_rhs(state, ins, expr)) {
5643 unuse_triple(*expr, ins);
5649 static void unuse_lhs(struct compile_state *state, struct triple *ins)
5651 struct triple **expr;
5652 expr = triple_lhs(state, ins, 0);
5653 for(;expr;expr = triple_lhs(state, ins, expr)) {
5654 unuse_triple(*expr, ins);
5659 static void check_lhs(struct compile_state *state, struct triple *ins)
5661 struct triple **expr;
5662 expr = triple_lhs(state, ins, 0);
5663 for(;expr;expr = triple_lhs(state, ins, expr)) {
5664 internal_error(state, ins, "unexpected lhs");
5668 static void check_targ(struct compile_state *state, struct triple *ins)
5670 struct triple **expr;
5671 expr = triple_targ(state, ins, 0);
5672 for(;expr;expr = triple_targ(state, ins, expr)) {
5673 internal_error(state, ins, "unexpected targ");
5677 static void wipe_ins(struct compile_state *state, struct triple *ins)
5679 /* Becareful which instructions you replace the wiped
5680 * instruction with, as there are not enough slots
5681 * in all instructions to hold all others.
5683 check_targ(state, ins);
5684 unuse_rhs(state, ins);
5685 unuse_lhs(state, ins);
5688 static void mkcopy(struct compile_state *state,
5689 struct triple *ins, struct triple *rhs)
5691 wipe_ins(state, ins);
5693 ins->sizes = TRIPLE_SIZES(0, 1, 0, 0);
5695 use_triple(RHS(ins, 0), ins);
5698 static void mkconst(struct compile_state *state,
5699 struct triple *ins, ulong_t value)
5701 if (!is_integral(ins) && !is_pointer(ins)) {
5702 internal_error(state, ins, "unknown type to make constant\n");
5704 wipe_ins(state, ins);
5705 ins->op = OP_INTCONST;
5706 ins->sizes = TRIPLE_SIZES(0, 0, 0, 0);
5707 ins->u.cval = value;
5710 static void mkaddr_const(struct compile_state *state,
5711 struct triple *ins, struct triple *sdecl, ulong_t value)
5713 if (sdecl->op != OP_SDECL) {
5714 internal_error(state, ins, "bad base for addrconst");
5716 wipe_ins(state, ins);
5717 ins->op = OP_ADDRCONST;
5718 ins->sizes = TRIPLE_SIZES(0, 0, 1, 0);
5719 MISC(ins, 0) = sdecl;
5720 ins->u.cval = value;
5721 use_triple(sdecl, ins);
5724 /* Transform multicomponent variables into simple register variables */
5725 static void flatten_structures(struct compile_state *state)
5727 struct triple *ins, *first;
5728 first = RHS(state->main_function, 0);
5730 /* Pass one expand structure values into valvecs.
5734 struct triple *next;
5736 if ((ins->type->type & TYPE_MASK) == TYPE_STRUCT) {
5737 if (ins->op == OP_VAL_VEC) {
5740 else if ((ins->op == OP_LOAD) || (ins->op == OP_READ)) {
5741 struct triple *def, **vector;
5748 get_occurance(ins->occurance);
5749 next = alloc_triple(state, OP_VAL_VEC, ins->type, -1, -1,
5752 vector = &RHS(next, 0);
5753 tptr = next->type->left;
5754 for(i = 0; i < next->type->elements; i++) {
5755 struct triple *sfield;
5758 if ((mtype->type & TYPE_MASK) == TYPE_PRODUCT) {
5759 mtype = mtype->left;
5761 sfield = deref_field(state, def, mtype->field_ident);
5764 state, op, mtype, sfield, 0);
5765 put_occurance(vector[i]->occurance);
5766 get_occurance(next->occurance);
5767 vector[i]->occurance = next->occurance;
5770 propogate_use(state, ins, next);
5771 flatten(state, ins, next);
5772 free_triple(state, ins);
5774 else if ((ins->op == OP_STORE) || (ins->op == OP_WRITE)) {
5775 struct triple *src, *dst, **vector;
5783 get_occurance(ins->occurance);
5784 next = alloc_triple(state, OP_VAL_VEC, ins->type, -1, -1,
5787 vector = &RHS(next, 0);
5788 tptr = next->type->left;
5789 for(i = 0; i < ins->type->elements; i++) {
5790 struct triple *dfield, *sfield;
5793 if ((mtype->type & TYPE_MASK) == TYPE_PRODUCT) {
5794 mtype = mtype->left;
5796 sfield = deref_field(state, src, mtype->field_ident);
5797 dfield = deref_field(state, dst, mtype->field_ident);
5799 state, op, mtype, dfield, sfield);
5800 put_occurance(vector[i]->occurance);
5801 get_occurance(next->occurance);
5802 vector[i]->occurance = next->occurance;
5805 propogate_use(state, ins, next);
5806 flatten(state, ins, next);
5807 free_triple(state, ins);
5811 } while(ins != first);
5812 /* Pass two flatten the valvecs.
5816 struct triple *next;
5818 if (ins->op == OP_VAL_VEC) {
5819 release_triple(state, ins);
5822 } while(ins != first);
5823 /* Pass three verify the state and set ->id to 0.
5827 ins->id &= ~TRIPLE_FLAG_FLATTENED;
5828 if ((ins->op != OP_BLOBCONST) && (ins->op != OP_SDECL) &&
5829 ((ins->type->type & TYPE_MASK) == TYPE_STRUCT)) {
5830 internal_error(state, ins, "STRUCT_TYPE remains?");
5832 if (ins->op == OP_DOT) {
5833 internal_error(state, ins, "OP_DOT remains?");
5835 if (ins->op == OP_VAL_VEC) {
5836 internal_error(state, ins, "OP_VAL_VEC remains?");
5839 } while(ins != first);
5842 /* For those operations that cannot be simplified */
5843 static void simplify_noop(struct compile_state *state, struct triple *ins)
5848 static void simplify_smul(struct compile_state *state, struct triple *ins)
5850 if (is_const(RHS(ins, 0)) && !is_const(RHS(ins, 1))) {
5853 RHS(ins, 0) = RHS(ins, 1);
5856 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
5858 left = read_sconst(ins, &RHS(ins, 0));
5859 right = read_sconst(ins, &RHS(ins, 1));
5860 mkconst(state, ins, left * right);
5862 else if (is_zero(RHS(ins, 1))) {
5863 mkconst(state, ins, 0);
5865 else if (is_one(RHS(ins, 1))) {
5866 mkcopy(state, ins, RHS(ins, 0));
5868 else if (is_pow2(RHS(ins, 1))) {
5870 val = int_const(state, ins->type, tlog2(RHS(ins, 1)));
5872 insert_triple(state, ins, val);
5873 unuse_triple(RHS(ins, 1), ins);
5874 use_triple(val, ins);
5879 static void simplify_umul(struct compile_state *state, struct triple *ins)
5881 if (is_const(RHS(ins, 0)) && !is_const(RHS(ins, 1))) {
5884 RHS(ins, 0) = RHS(ins, 1);
5887 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
5888 ulong_t left, right;
5889 left = read_const(state, ins, &RHS(ins, 0));
5890 right = read_const(state, ins, &RHS(ins, 1));
5891 mkconst(state, ins, left * right);
5893 else if (is_zero(RHS(ins, 1))) {
5894 mkconst(state, ins, 0);
5896 else if (is_one(RHS(ins, 1))) {
5897 mkcopy(state, ins, RHS(ins, 0));
5899 else if (is_pow2(RHS(ins, 1))) {
5901 val = int_const(state, ins->type, tlog2(RHS(ins, 1)));
5903 insert_triple(state, ins, val);
5904 unuse_triple(RHS(ins, 1), ins);
5905 use_triple(val, ins);
5910 static void simplify_sdiv(struct compile_state *state, struct triple *ins)
5912 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
5914 left = read_sconst(ins, &RHS(ins, 0));
5915 right = read_sconst(ins, &RHS(ins, 1));
5916 mkconst(state, ins, left / right);
5918 else if (is_zero(RHS(ins, 0))) {
5919 mkconst(state, ins, 0);
5921 else if (is_zero(RHS(ins, 1))) {
5922 error(state, ins, "division by zero");
5924 else if (is_one(RHS(ins, 1))) {
5925 mkcopy(state, ins, RHS(ins, 0));
5927 else if (is_pow2(RHS(ins, 1))) {
5929 val = int_const(state, ins->type, tlog2(RHS(ins, 1)));
5931 insert_triple(state, ins, val);
5932 unuse_triple(RHS(ins, 1), ins);
5933 use_triple(val, ins);
5938 static void simplify_udiv(struct compile_state *state, struct triple *ins)
5940 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
5941 ulong_t left, right;
5942 left = read_const(state, ins, &RHS(ins, 0));
5943 right = read_const(state, ins, &RHS(ins, 1));
5944 mkconst(state, ins, left / right);
5946 else if (is_zero(RHS(ins, 0))) {
5947 mkconst(state, ins, 0);
5949 else if (is_zero(RHS(ins, 1))) {
5950 error(state, ins, "division by zero");
5952 else if (is_one(RHS(ins, 1))) {
5953 mkcopy(state, ins, RHS(ins, 0));
5955 else if (is_pow2(RHS(ins, 1))) {
5957 val = int_const(state, ins->type, tlog2(RHS(ins, 1)));
5959 insert_triple(state, ins, val);
5960 unuse_triple(RHS(ins, 1), ins);
5961 use_triple(val, ins);
5966 static void simplify_smod(struct compile_state *state, struct triple *ins)
5968 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
5970 left = read_const(state, ins, &RHS(ins, 0));
5971 right = read_const(state, ins, &RHS(ins, 1));
5972 mkconst(state, ins, left % right);
5974 else if (is_zero(RHS(ins, 0))) {
5975 mkconst(state, ins, 0);
5977 else if (is_zero(RHS(ins, 1))) {
5978 error(state, ins, "division by zero");
5980 else if (is_one(RHS(ins, 1))) {
5981 mkconst(state, ins, 0);
5983 else if (is_pow2(RHS(ins, 1))) {
5985 val = int_const(state, ins->type, RHS(ins, 1)->u.cval - 1);
5987 insert_triple(state, ins, val);
5988 unuse_triple(RHS(ins, 1), ins);
5989 use_triple(val, ins);
5993 static void simplify_umod(struct compile_state *state, struct triple *ins)
5995 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
5996 ulong_t left, right;
5997 left = read_const(state, ins, &RHS(ins, 0));
5998 right = read_const(state, ins, &RHS(ins, 1));
5999 mkconst(state, ins, left % right);
6001 else if (is_zero(RHS(ins, 0))) {
6002 mkconst(state, ins, 0);
6004 else if (is_zero(RHS(ins, 1))) {
6005 error(state, ins, "division by zero");
6007 else if (is_one(RHS(ins, 1))) {
6008 mkconst(state, ins, 0);
6010 else if (is_pow2(RHS(ins, 1))) {
6012 val = int_const(state, ins->type, RHS(ins, 1)->u.cval - 1);
6014 insert_triple(state, ins, val);
6015 unuse_triple(RHS(ins, 1), ins);
6016 use_triple(val, ins);
6021 static void simplify_add(struct compile_state *state, struct triple *ins)
6023 /* start with the pointer on the left */
6024 if (is_pointer(RHS(ins, 1))) {
6027 RHS(ins, 0) = RHS(ins, 1);
6030 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6031 if (RHS(ins, 0)->op == OP_INTCONST) {
6032 ulong_t left, right;
6033 left = read_const(state, ins, &RHS(ins, 0));
6034 right = read_const(state, ins, &RHS(ins, 1));
6035 mkconst(state, ins, left + right);
6037 else if (RHS(ins, 0)->op == OP_ADDRCONST) {
6038 struct triple *sdecl;
6039 ulong_t left, right;
6040 sdecl = MISC(RHS(ins, 0), 0);
6041 left = RHS(ins, 0)->u.cval;
6042 right = RHS(ins, 1)->u.cval;
6043 mkaddr_const(state, ins, sdecl, left + right);
6046 internal_warning(state, ins, "Optimize me!");
6049 else if (is_const(RHS(ins, 0)) && !is_const(RHS(ins, 1))) {
6052 RHS(ins, 1) = RHS(ins, 0);
6057 static void simplify_sub(struct compile_state *state, struct triple *ins)
6059 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6060 if (RHS(ins, 0)->op == OP_INTCONST) {
6061 ulong_t left, right;
6062 left = read_const(state, ins, &RHS(ins, 0));
6063 right = read_const(state, ins, &RHS(ins, 1));
6064 mkconst(state, ins, left - right);
6066 else if (RHS(ins, 0)->op == OP_ADDRCONST) {
6067 struct triple *sdecl;
6068 ulong_t left, right;
6069 sdecl = MISC(RHS(ins, 0), 0);
6070 left = RHS(ins, 0)->u.cval;
6071 right = RHS(ins, 1)->u.cval;
6072 mkaddr_const(state, ins, sdecl, left - right);
6075 internal_warning(state, ins, "Optimize me!");
6080 static void simplify_sl(struct compile_state *state, struct triple *ins)
6082 if (is_const(RHS(ins, 1))) {
6084 right = read_const(state, ins, &RHS(ins, 1));
6085 if (right >= (size_of(state, ins->type)*8)) {
6086 warning(state, ins, "left shift count >= width of type");
6089 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6090 ulong_t left, right;
6091 left = read_const(state, ins, &RHS(ins, 0));
6092 right = read_const(state, ins, &RHS(ins, 1));
6093 mkconst(state, ins, left << right);
6097 static void simplify_usr(struct compile_state *state, struct triple *ins)
6099 if (is_const(RHS(ins, 1))) {
6101 right = read_const(state, ins, &RHS(ins, 1));
6102 if (right >= (size_of(state, ins->type)*8)) {
6103 warning(state, ins, "right shift count >= width of type");
6106 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6107 ulong_t left, right;
6108 left = read_const(state, ins, &RHS(ins, 0));
6109 right = read_const(state, ins, &RHS(ins, 1));
6110 mkconst(state, ins, left >> right);
6114 static void simplify_ssr(struct compile_state *state, struct triple *ins)
6116 if (is_const(RHS(ins, 1))) {
6118 right = read_const(state, ins, &RHS(ins, 1));
6119 if (right >= (size_of(state, ins->type)*8)) {
6120 warning(state, ins, "right shift count >= width of type");
6123 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6125 left = read_sconst(ins, &RHS(ins, 0));
6126 right = read_sconst(ins, &RHS(ins, 1));
6127 mkconst(state, ins, left >> right);
6131 static void simplify_and(struct compile_state *state, struct triple *ins)
6133 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6134 ulong_t left, right;
6135 left = read_const(state, ins, &RHS(ins, 0));
6136 right = read_const(state, ins, &RHS(ins, 1));
6137 mkconst(state, ins, left & right);
6141 static void simplify_or(struct compile_state *state, struct triple *ins)
6143 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6144 ulong_t left, right;
6145 left = read_const(state, ins, &RHS(ins, 0));
6146 right = read_const(state, ins, &RHS(ins, 1));
6147 mkconst(state, ins, left | right);
6151 static void simplify_xor(struct compile_state *state, struct triple *ins)
6153 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6154 ulong_t left, right;
6155 left = read_const(state, ins, &RHS(ins, 0));
6156 right = read_const(state, ins, &RHS(ins, 1));
6157 mkconst(state, ins, left ^ right);
6161 static void simplify_pos(struct compile_state *state, struct triple *ins)
6163 if (is_const(RHS(ins, 0))) {
6164 mkconst(state, ins, RHS(ins, 0)->u.cval);
6167 mkcopy(state, ins, RHS(ins, 0));
6171 static void simplify_neg(struct compile_state *state, struct triple *ins)
6173 if (is_const(RHS(ins, 0))) {
6175 left = read_const(state, ins, &RHS(ins, 0));
6176 mkconst(state, ins, -left);
6178 else if (RHS(ins, 0)->op == OP_NEG) {
6179 mkcopy(state, ins, RHS(RHS(ins, 0), 0));
6183 static void simplify_invert(struct compile_state *state, struct triple *ins)
6185 if (is_const(RHS(ins, 0))) {
6187 left = read_const(state, ins, &RHS(ins, 0));
6188 mkconst(state, ins, ~left);
6192 static void simplify_eq(struct compile_state *state, struct triple *ins)
6194 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6195 ulong_t left, right;
6196 left = read_const(state, ins, &RHS(ins, 0));
6197 right = read_const(state, ins, &RHS(ins, 1));
6198 mkconst(state, ins, left == right);
6200 else if (RHS(ins, 0) == RHS(ins, 1)) {
6201 mkconst(state, ins, 1);
6205 static void simplify_noteq(struct compile_state *state, struct triple *ins)
6207 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6208 ulong_t left, right;
6209 left = read_const(state, ins, &RHS(ins, 0));
6210 right = read_const(state, ins, &RHS(ins, 1));
6211 mkconst(state, ins, left != right);
6213 else if (RHS(ins, 0) == RHS(ins, 1)) {
6214 mkconst(state, ins, 0);
6218 static void simplify_sless(struct compile_state *state, struct triple *ins)
6220 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6222 left = read_sconst(ins, &RHS(ins, 0));
6223 right = read_sconst(ins, &RHS(ins, 1));
6224 mkconst(state, ins, left < right);
6226 else if (RHS(ins, 0) == RHS(ins, 1)) {
6227 mkconst(state, ins, 0);
6231 static void simplify_uless(struct compile_state *state, struct triple *ins)
6233 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6234 ulong_t left, right;
6235 left = read_const(state, ins, &RHS(ins, 0));
6236 right = read_const(state, ins, &RHS(ins, 1));
6237 mkconst(state, ins, left < right);
6239 else if (is_zero(RHS(ins, 0))) {
6240 mkconst(state, ins, 1);
6242 else if (RHS(ins, 0) == RHS(ins, 1)) {
6243 mkconst(state, ins, 0);
6247 static void simplify_smore(struct compile_state *state, struct triple *ins)
6249 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6251 left = read_sconst(ins, &RHS(ins, 0));
6252 right = read_sconst(ins, &RHS(ins, 1));
6253 mkconst(state, ins, left > right);
6255 else if (RHS(ins, 0) == RHS(ins, 1)) {
6256 mkconst(state, ins, 0);
6260 static void simplify_umore(struct compile_state *state, struct triple *ins)
6262 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6263 ulong_t left, right;
6264 left = read_const(state, ins, &RHS(ins, 0));
6265 right = read_const(state, ins, &RHS(ins, 1));
6266 mkconst(state, ins, left > right);
6268 else if (is_zero(RHS(ins, 1))) {
6269 mkconst(state, ins, 1);
6271 else if (RHS(ins, 0) == RHS(ins, 1)) {
6272 mkconst(state, ins, 0);
6277 static void simplify_slesseq(struct compile_state *state, struct triple *ins)
6279 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6281 left = read_sconst(ins, &RHS(ins, 0));
6282 right = read_sconst(ins, &RHS(ins, 1));
6283 mkconst(state, ins, left <= right);
6285 else if (RHS(ins, 0) == RHS(ins, 1)) {
6286 mkconst(state, ins, 1);
6290 static void simplify_ulesseq(struct compile_state *state, struct triple *ins)
6292 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6293 ulong_t left, right;
6294 left = read_const(state, ins, &RHS(ins, 0));
6295 right = read_const(state, ins, &RHS(ins, 1));
6296 mkconst(state, ins, left <= right);
6298 else if (is_zero(RHS(ins, 0))) {
6299 mkconst(state, ins, 1);
6301 else if (RHS(ins, 0) == RHS(ins, 1)) {
6302 mkconst(state, ins, 1);
6306 static void simplify_smoreeq(struct compile_state *state, struct triple *ins)
6308 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 0))) {
6310 left = read_sconst(ins, &RHS(ins, 0));
6311 right = read_sconst(ins, &RHS(ins, 1));
6312 mkconst(state, ins, left >= right);
6314 else if (RHS(ins, 0) == RHS(ins, 1)) {
6315 mkconst(state, ins, 1);
6319 static void simplify_umoreeq(struct compile_state *state, struct triple *ins)
6321 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6322 ulong_t left, right;
6323 left = read_const(state, ins, &RHS(ins, 0));
6324 right = read_const(state, ins, &RHS(ins, 1));
6325 mkconst(state, ins, left >= right);
6327 else if (is_zero(RHS(ins, 1))) {
6328 mkconst(state, ins, 1);
6330 else if (RHS(ins, 0) == RHS(ins, 1)) {
6331 mkconst(state, ins, 1);
6335 static void simplify_lfalse(struct compile_state *state, struct triple *ins)
6337 if (is_const(RHS(ins, 0))) {
6339 left = read_const(state, ins, &RHS(ins, 0));
6340 mkconst(state, ins, left == 0);
6342 /* Otherwise if I am the only user... */
6343 else if ((RHS(ins, 0)->use->member == ins) && (RHS(ins, 0)->use->next == 0)) {
6345 /* Invert a boolean operation */
6346 switch(RHS(ins, 0)->op) {
6347 case OP_LTRUE: RHS(ins, 0)->op = OP_LFALSE; break;
6348 case OP_LFALSE: RHS(ins, 0)->op = OP_LTRUE; break;
6349 case OP_EQ: RHS(ins, 0)->op = OP_NOTEQ; break;
6350 case OP_NOTEQ: RHS(ins, 0)->op = OP_EQ; break;
6351 case OP_SLESS: RHS(ins, 0)->op = OP_SMOREEQ; break;
6352 case OP_ULESS: RHS(ins, 0)->op = OP_UMOREEQ; break;
6353 case OP_SMORE: RHS(ins, 0)->op = OP_SLESSEQ; break;
6354 case OP_UMORE: RHS(ins, 0)->op = OP_ULESSEQ; break;
6355 case OP_SLESSEQ: RHS(ins, 0)->op = OP_SMORE; break;
6356 case OP_ULESSEQ: RHS(ins, 0)->op = OP_UMORE; break;
6357 case OP_SMOREEQ: RHS(ins, 0)->op = OP_SLESS; break;
6358 case OP_UMOREEQ: RHS(ins, 0)->op = OP_ULESS; break;
6364 mkcopy(state, ins, RHS(ins, 0));
6369 static void simplify_ltrue (struct compile_state *state, struct triple *ins)
6371 if (is_const(RHS(ins, 0))) {
6373 left = read_const(state, ins, &RHS(ins, 0));
6374 mkconst(state, ins, left != 0);
6376 else switch(RHS(ins, 0)->op) {
6377 case OP_LTRUE: case OP_LFALSE: case OP_EQ: case OP_NOTEQ:
6378 case OP_SLESS: case OP_ULESS: case OP_SMORE: case OP_UMORE:
6379 case OP_SLESSEQ: case OP_ULESSEQ: case OP_SMOREEQ: case OP_UMOREEQ:
6380 mkcopy(state, ins, RHS(ins, 0));
6385 static void simplify_copy(struct compile_state *state, struct triple *ins)
6387 if (is_const(RHS(ins, 0))) {
6388 switch(RHS(ins, 0)->op) {
6392 left = read_const(state, ins, &RHS(ins, 0));
6393 mkconst(state, ins, left);
6398 struct triple *sdecl;
6400 sdecl = MISC(RHS(ins, 0), 0);
6401 offset = RHS(ins, 0)->u.cval;
6402 mkaddr_const(state, ins, sdecl, offset);
6406 internal_error(state, ins, "uknown constant");
6412 static void simplify_branch(struct compile_state *state, struct triple *ins)
6414 struct block *block;
6415 if (ins->op != OP_BRANCH) {
6416 internal_error(state, ins, "not branch");
6418 if (ins->use != 0) {
6419 internal_error(state, ins, "branch use");
6421 #warning "FIXME implement simplify branch."
6422 /* The challenge here with simplify branch is that I need to
6423 * make modifications to the control flow graph as well
6424 * as to the branch instruction itself.
6426 block = ins->u.block;
6428 if (TRIPLE_RHS(ins->sizes) && is_const(RHS(ins, 0))) {
6429 struct triple *targ;
6431 value = read_const(state, ins, &RHS(ins, 0));
6432 unuse_triple(RHS(ins, 0), ins);
6433 targ = TARG(ins, 0);
6434 ins->sizes = TRIPLE_SIZES(0, 0, 0, 1);
6436 unuse_triple(ins->next, ins);
6437 TARG(ins, 0) = targ;
6440 unuse_triple(targ, ins);
6441 TARG(ins, 0) = ins->next;
6443 #warning "FIXME handle the case of making a branch unconditional"
6445 if (TARG(ins, 0) == ins->next) {
6446 unuse_triple(ins->next, ins);
6447 if (TRIPLE_RHS(ins->sizes)) {
6448 unuse_triple(RHS(ins, 0), ins);
6449 unuse_triple(ins->next, ins);
6451 ins->sizes = TRIPLE_SIZES(0, 0, 0, 0);
6454 internal_error(state, ins, "noop use != 0");
6456 #warning "FIXME handle the case of killing a branch"
6460 int phi_present(struct block *block)
6468 if (ptr->op == OP_PHI) {
6472 } while(ptr != block->last);
6476 static void simplify_label(struct compile_state *state, struct triple *ins)
6478 #warning "FIXME enable simplify_label"
6479 struct triple *first, *last;
6480 first = RHS(state->main_function, 0);
6482 /* Ignore the first and last instructions */
6483 if ((ins == first) || (ins == last)) {
6486 if (ins->use == 0) {
6489 else if (ins->prev->op == OP_LABEL) {
6490 struct block *block;
6491 block = ins->prev->u.block;
6492 /* In general it is not safe to merge one label that
6493 * imediately follows another. The problem is that the empty
6494 * looking block may have phi functions that depend on it.
6497 (!phi_present(block->left) &&
6498 !phi_present(block->right)))
6500 struct triple_set *user, *next;
6502 for(user = ins->use; user; user = next) {
6506 if (TARG(use, 0) == ins) {
6507 TARG(use, 0) = ins->prev;
6508 unuse_triple(ins, use);
6509 use_triple(ins->prev, use);
6513 internal_error(state, ins, "noop use != 0");
6519 static void simplify_phi(struct compile_state *state, struct triple *ins)
6521 struct triple **expr;
6523 expr = triple_rhs(state, ins, 0);
6524 if (!*expr || !is_const(*expr)) {
6527 value = read_const(state, ins, expr);
6528 for(;expr;expr = triple_rhs(state, ins, expr)) {
6529 if (!*expr || !is_const(*expr)) {
6532 if (value != read_const(state, ins, expr)) {
6536 mkconst(state, ins, value);
6540 static void simplify_bsf(struct compile_state *state, struct triple *ins)
6542 if (is_const(RHS(ins, 0))) {
6544 left = read_const(state, ins, &RHS(ins, 0));
6545 mkconst(state, ins, bsf(left));
6549 static void simplify_bsr(struct compile_state *state, struct triple *ins)
6551 if (is_const(RHS(ins, 0))) {
6553 left = read_const(state, ins, &RHS(ins, 0));
6554 mkconst(state, ins, bsr(left));
6559 typedef void (*simplify_t)(struct compile_state *state, struct triple *ins);
6560 static const simplify_t table_simplify[] = {
6562 #define simplify_sdivt simplify_noop
6563 #define simplify_udivt simplify_noop
6566 #define simplify_smul simplify_noop
6567 #define simplify_umul simplify_noop
6568 #define simplify_sdiv simplify_noop
6569 #define simplify_udiv simplify_noop
6570 #define simplify_smod simplify_noop
6571 #define simplify_umod simplify_noop
6574 #define simplify_add simplify_noop
6575 #define simplify_sub simplify_noop
6578 #define simplify_sl simplify_noop
6579 #define simplify_usr simplify_noop
6580 #define simplify_ssr simplify_noop
6583 #define simplify_and simplify_noop
6584 #define simplify_xor simplify_noop
6585 #define simplify_or simplify_noop
6588 #define simplify_pos simplify_noop
6589 #define simplify_neg simplify_noop
6590 #define simplify_invert simplify_noop
6594 #define simplify_eq simplify_noop
6595 #define simplify_noteq simplify_noop
6598 #define simplify_sless simplify_noop
6599 #define simplify_uless simplify_noop
6600 #define simplify_smore simplify_noop
6601 #define simplify_umore simplify_noop
6604 #define simplify_slesseq simplify_noop
6605 #define simplify_ulesseq simplify_noop
6606 #define simplify_smoreeq simplify_noop
6607 #define simplify_umoreeq simplify_noop
6610 #define simplify_lfalse simplify_noop
6613 #define simplify_ltrue simplify_noop
6617 #define simplify_copy simplify_noop
6621 #define simplify_branch simplify_noop
6624 #define simplify_label simplify_noop
6628 #define simplify_phi simplify_noop
6632 #define simplify_bsf simplify_noop
6633 #define simplify_bsr simplify_noop
6636 [OP_SDIVT ] = simplify_sdivt,
6637 [OP_UDIVT ] = simplify_udivt,
6638 [OP_SMUL ] = simplify_smul,
6639 [OP_UMUL ] = simplify_umul,
6640 [OP_SDIV ] = simplify_sdiv,
6641 [OP_UDIV ] = simplify_udiv,
6642 [OP_SMOD ] = simplify_smod,
6643 [OP_UMOD ] = simplify_umod,
6644 [OP_ADD ] = simplify_add,
6645 [OP_SUB ] = simplify_sub,
6646 [OP_SL ] = simplify_sl,
6647 [OP_USR ] = simplify_usr,
6648 [OP_SSR ] = simplify_ssr,
6649 [OP_AND ] = simplify_and,
6650 [OP_XOR ] = simplify_xor,
6651 [OP_OR ] = simplify_or,
6652 [OP_POS ] = simplify_pos,
6653 [OP_NEG ] = simplify_neg,
6654 [OP_INVERT ] = simplify_invert,
6656 [OP_EQ ] = simplify_eq,
6657 [OP_NOTEQ ] = simplify_noteq,
6658 [OP_SLESS ] = simplify_sless,
6659 [OP_ULESS ] = simplify_uless,
6660 [OP_SMORE ] = simplify_smore,
6661 [OP_UMORE ] = simplify_umore,
6662 [OP_SLESSEQ ] = simplify_slesseq,
6663 [OP_ULESSEQ ] = simplify_ulesseq,
6664 [OP_SMOREEQ ] = simplify_smoreeq,
6665 [OP_UMOREEQ ] = simplify_umoreeq,
6666 [OP_LFALSE ] = simplify_lfalse,
6667 [OP_LTRUE ] = simplify_ltrue,
6669 [OP_LOAD ] = simplify_noop,
6670 [OP_STORE ] = simplify_noop,
6672 [OP_NOOP ] = simplify_noop,
6674 [OP_INTCONST ] = simplify_noop,
6675 [OP_BLOBCONST ] = simplify_noop,
6676 [OP_ADDRCONST ] = simplify_noop,
6678 [OP_WRITE ] = simplify_noop,
6679 [OP_READ ] = simplify_noop,
6680 [OP_COPY ] = simplify_copy,
6681 [OP_PIECE ] = simplify_noop,
6682 [OP_ASM ] = simplify_noop,
6684 [OP_DOT ] = simplify_noop,
6685 [OP_VAL_VEC ] = simplify_noop,
6687 [OP_LIST ] = simplify_noop,
6688 [OP_BRANCH ] = simplify_branch,
6689 [OP_LABEL ] = simplify_label,
6690 [OP_ADECL ] = simplify_noop,
6691 [OP_SDECL ] = simplify_noop,
6692 [OP_PHI ] = simplify_phi,
6694 [OP_INB ] = simplify_noop,
6695 [OP_INW ] = simplify_noop,
6696 [OP_INL ] = simplify_noop,
6697 [OP_OUTB ] = simplify_noop,
6698 [OP_OUTW ] = simplify_noop,
6699 [OP_OUTL ] = simplify_noop,
6700 [OP_BSF ] = simplify_bsf,
6701 [OP_BSR ] = simplify_bsr,
6702 [OP_RDMSR ] = simplify_noop,
6703 [OP_WRMSR ] = simplify_noop,
6704 [OP_HLT ] = simplify_noop,
6707 static void simplify(struct compile_state *state, struct triple *ins)
6710 simplify_t do_simplify;
6714 if ((op < 0) || (op > sizeof(table_simplify)/sizeof(table_simplify[0]))) {
6718 do_simplify = table_simplify[op];
6721 internal_error(state, ins, "cannot simplify op: %d %s\n",
6725 do_simplify(state, ins);
6726 } while(ins->op != op);
6729 static void simplify_all(struct compile_state *state)
6731 struct triple *ins, *first;
6732 first = RHS(state->main_function, 0);
6735 simplify(state, ins);
6737 }while(ins != first);
6742 * ============================
6745 static void register_builtin_function(struct compile_state *state,
6746 const char *name, int op, struct type *rtype, ...)
6748 struct type *ftype, *atype, *param, **next;
6749 struct triple *def, *arg, *result, *work, *last, *first;
6750 struct hash_entry *ident;
6751 struct file_state file;
6757 /* Dummy file state to get debug handling right */
6758 memset(&file, 0, sizeof(file));
6759 file.basename = "<built-in>";
6761 file.report_line = 1;
6762 file.report_name = file.basename;
6763 file.prev = state->file;
6764 state->file = &file;
6765 state->function = name;
6767 /* Find the Parameter count */
6768 valid_op(state, op);
6769 parameters = table_ops[op].rhs;
6770 if (parameters < 0 ) {
6771 internal_error(state, 0, "Invalid builtin parameter count");
6774 /* Find the function type */
6775 ftype = new_type(TYPE_FUNCTION, rtype, 0);
6776 next = &ftype->right;
6777 va_start(args, rtype);
6778 for(i = 0; i < parameters; i++) {
6779 atype = va_arg(args, struct type *);
6783 *next = new_type(TYPE_PRODUCT, *next, atype);
6784 next = &((*next)->right);
6792 /* Generate the needed triples */
6793 def = triple(state, OP_LIST, ftype, 0, 0);
6794 first = label(state);
6795 RHS(def, 0) = first;
6797 /* Now string them together */
6798 param = ftype->right;
6799 for(i = 0; i < parameters; i++) {
6800 if ((param->type & TYPE_MASK) == TYPE_PRODUCT) {
6801 atype = param->left;
6805 arg = flatten(state, first, variable(state, atype));
6806 param = param->right;
6809 if ((rtype->type & TYPE_MASK) != TYPE_VOID) {
6810 result = flatten(state, first, variable(state, rtype));
6812 MISC(def, 0) = result;
6813 work = new_triple(state, op, rtype, -1, parameters);
6814 for(i = 0, arg = first->next; i < parameters; i++, arg = arg->next) {
6815 RHS(work, i) = read_expr(state, arg);
6817 if (result && ((rtype->type & TYPE_MASK) == TYPE_STRUCT)) {
6819 /* Populate the LHS with the target registers */
6820 work = flatten(state, first, work);
6821 work->type = &void_type;
6822 param = rtype->left;
6823 if (rtype->elements != TRIPLE_LHS(work->sizes)) {
6824 internal_error(state, 0, "Invalid result type");
6826 val = new_triple(state, OP_VAL_VEC, rtype, -1, -1);
6827 for(i = 0; i < rtype->elements; i++) {
6828 struct triple *piece;
6830 if ((param->type & TYPE_MASK) == TYPE_PRODUCT) {
6831 atype = param->left;
6833 if (!TYPE_ARITHMETIC(atype->type) &&
6834 !TYPE_PTR(atype->type)) {
6835 internal_error(state, 0, "Invalid lhs type");
6837 piece = triple(state, OP_PIECE, atype, work, 0);
6839 LHS(work, i) = piece;
6840 RHS(val, i) = piece;
6845 work = write_expr(state, result, work);
6847 work = flatten(state, first, work);
6848 last = flatten(state, first, label(state));
6849 name_len = strlen(name);
6850 ident = lookup(state, name, name_len);
6851 symbol(state, ident, &ident->sym_ident, def, ftype);
6853 state->file = file.prev;
6854 state->function = 0;
6856 fprintf(stdout, "\n");
6857 loc(stdout, state, 0);
6858 fprintf(stdout, "\n__________ builtin_function _________\n");
6859 print_triple(state, def);
6860 fprintf(stdout, "__________ builtin_function _________ done\n\n");
6864 static struct type *partial_struct(struct compile_state *state,
6865 const char *field_name, struct type *type, struct type *rest)
6867 struct hash_entry *field_ident;
6868 struct type *result;
6871 field_name_len = strlen(field_name);
6872 field_ident = lookup(state, field_name, field_name_len);
6874 result = clone_type(0, type);
6875 result->field_ident = field_ident;
6878 result = new_type(TYPE_PRODUCT, result, rest);
6883 static struct type *register_builtin_type(struct compile_state *state,
6884 const char *name, struct type *type)
6886 struct hash_entry *ident;
6889 name_len = strlen(name);
6890 ident = lookup(state, name, name_len);
6892 if ((type->type & TYPE_MASK) == TYPE_PRODUCT) {
6893 ulong_t elements = 0;
6895 type = new_type(TYPE_STRUCT, type, 0);
6897 while((field->type & TYPE_MASK) == TYPE_PRODUCT) {
6899 field = field->right;
6902 symbol(state, ident, &ident->sym_struct, 0, type);
6903 type->type_ident = ident;
6904 type->elements = elements;
6906 symbol(state, ident, &ident->sym_ident, 0, type);
6907 ident->tok = TOK_TYPE_NAME;
6912 static void register_builtins(struct compile_state *state)
6914 struct type *div_type, *ldiv_type;
6915 struct type *udiv_type, *uldiv_type;
6916 struct type *msr_type;
6918 div_type = register_builtin_type(state, "__builtin_div_t",
6919 partial_struct(state, "quot", &int_type,
6920 partial_struct(state, "rem", &int_type, 0)));
6921 ldiv_type = register_builtin_type(state, "__builtin_ldiv_t",
6922 partial_struct(state, "quot", &long_type,
6923 partial_struct(state, "rem", &long_type, 0)));
6924 udiv_type = register_builtin_type(state, "__builtin_udiv_t",
6925 partial_struct(state, "quot", &uint_type,
6926 partial_struct(state, "rem", &uint_type, 0)));
6927 uldiv_type = register_builtin_type(state, "__builtin_uldiv_t",
6928 partial_struct(state, "quot", &ulong_type,
6929 partial_struct(state, "rem", &ulong_type, 0)));
6931 register_builtin_function(state, "__builtin_div", OP_SDIVT, div_type,
6932 &int_type, &int_type);
6933 register_builtin_function(state, "__builtin_ldiv", OP_SDIVT, ldiv_type,
6934 &long_type, &long_type);
6935 register_builtin_function(state, "__builtin_udiv", OP_UDIVT, udiv_type,
6936 &uint_type, &uint_type);
6937 register_builtin_function(state, "__builtin_uldiv", OP_UDIVT, uldiv_type,
6938 &ulong_type, &ulong_type);
6940 register_builtin_function(state, "__builtin_inb", OP_INB, &uchar_type,
6942 register_builtin_function(state, "__builtin_inw", OP_INW, &ushort_type,
6944 register_builtin_function(state, "__builtin_inl", OP_INL, &uint_type,
6947 register_builtin_function(state, "__builtin_outb", OP_OUTB, &void_type,
6948 &uchar_type, &ushort_type);
6949 register_builtin_function(state, "__builtin_outw", OP_OUTW, &void_type,
6950 &ushort_type, &ushort_type);
6951 register_builtin_function(state, "__builtin_outl", OP_OUTL, &void_type,
6952 &uint_type, &ushort_type);
6954 register_builtin_function(state, "__builtin_bsf", OP_BSF, &int_type,
6956 register_builtin_function(state, "__builtin_bsr", OP_BSR, &int_type,
6959 msr_type = register_builtin_type(state, "__builtin_msr_t",
6960 partial_struct(state, "lo", &ulong_type,
6961 partial_struct(state, "hi", &ulong_type, 0)));
6963 register_builtin_function(state, "__builtin_rdmsr", OP_RDMSR, msr_type,
6965 register_builtin_function(state, "__builtin_wrmsr", OP_WRMSR, &void_type,
6966 &ulong_type, &ulong_type, &ulong_type);
6968 register_builtin_function(state, "__builtin_hlt", OP_HLT, &void_type,
6972 static struct type *declarator(
6973 struct compile_state *state, struct type *type,
6974 struct hash_entry **ident, int need_ident);
6975 static void decl(struct compile_state *state, struct triple *first);
6976 static struct type *specifier_qualifier_list(struct compile_state *state);
6977 static int isdecl_specifier(int tok);
6978 static struct type *decl_specifiers(struct compile_state *state);
6979 static int istype(int tok);
6980 static struct triple *expr(struct compile_state *state);
6981 static struct triple *assignment_expr(struct compile_state *state);
6982 static struct type *type_name(struct compile_state *state);
6983 static void statement(struct compile_state *state, struct triple *fist);
6985 static struct triple *call_expr(
6986 struct compile_state *state, struct triple *func)
6989 struct type *param, *type;
6990 ulong_t pvals, index;
6992 if ((func->type->type & TYPE_MASK) != TYPE_FUNCTION) {
6993 error(state, 0, "Called object is not a function");
6995 if (func->op != OP_LIST) {
6996 internal_error(state, 0, "improper function");
6998 eat(state, TOK_LPAREN);
6999 /* Find the return type without any specifiers */
7000 type = clone_type(0, func->type->left);
7001 def = new_triple(state, OP_CALL, func->type, -1, -1);
7004 pvals = TRIPLE_RHS(def->sizes);
7005 MISC(def, 0) = func;
7007 param = func->type->right;
7008 for(index = 0; index < pvals; index++) {
7010 struct type *arg_type;
7011 val = read_expr(state, assignment_expr(state));
7013 if ((param->type & TYPE_MASK) == TYPE_PRODUCT) {
7014 arg_type = param->left;
7016 write_compatible(state, arg_type, val->type);
7017 RHS(def, index) = val;
7018 if (index != (pvals - 1)) {
7019 eat(state, TOK_COMMA);
7020 param = param->right;
7023 eat(state, TOK_RPAREN);
7028 static struct triple *character_constant(struct compile_state *state)
7032 const signed char *str, *end;
7035 eat(state, TOK_LIT_CHAR);
7036 tk = &state->token[0];
7037 str = tk->val.str + 1;
7038 str_len = tk->str_len - 2;
7040 error(state, 0, "empty character constant");
7042 end = str + str_len;
7043 c = char_value(state, &str, end);
7045 error(state, 0, "multibyte character constant not supported");
7047 def = int_const(state, &char_type, (ulong_t)((long_t)c));
7051 static struct triple *string_constant(struct compile_state *state)
7056 const signed char *str, *end;
7057 signed char *buf, *ptr;
7061 type = new_type(TYPE_ARRAY, &char_type, 0);
7063 /* The while loop handles string concatenation */
7065 eat(state, TOK_LIT_STRING);
7066 tk = &state->token[0];
7067 str = tk->val.str + 1;
7068 str_len = tk->str_len - 2;
7070 error(state, 0, "negative string constant length");
7072 end = str + str_len;
7074 buf = xmalloc(type->elements + str_len + 1, "string_constant");
7075 memcpy(buf, ptr, type->elements);
7076 ptr = buf + type->elements;
7078 *ptr++ = char_value(state, &str, end);
7080 type->elements = ptr - buf;
7081 } while(peek(state) == TOK_LIT_STRING);
7083 type->elements += 1;
7084 def = triple(state, OP_BLOBCONST, type, 0, 0);
7090 static struct triple *integer_constant(struct compile_state *state)
7099 eat(state, TOK_LIT_INT);
7100 tk = &state->token[0];
7102 decimal = (tk->val.str[0] != '0');
7103 val = strtoul(tk->val.str, &end, 0);
7104 if ((val == ULONG_MAX) && (errno == ERANGE)) {
7105 error(state, 0, "Integer constant to large");
7108 if ((*end == 'u') || (*end == 'U')) {
7112 if ((*end == 'l') || (*end == 'L')) {
7116 if ((*end == 'u') || (*end == 'U')) {
7121 error(state, 0, "Junk at end of integer constant");
7128 if (!decimal && (val > LONG_MAX)) {
7134 if (val > UINT_MAX) {
7140 if (!decimal && (val > INT_MAX) && (val <= UINT_MAX)) {
7143 else if (!decimal && (val > LONG_MAX)) {
7146 else if (val > INT_MAX) {
7150 def = int_const(state, type, val);
7154 static struct triple *primary_expr(struct compile_state *state)
7162 struct hash_entry *ident;
7163 /* Here ident is either:
7166 * an enumeration constant.
7168 eat(state, TOK_IDENT);
7169 ident = state->token[0].ident;
7170 if (!ident->sym_ident) {
7171 error(state, 0, "%s undeclared", ident->name);
7173 def = ident->sym_ident->def;
7176 case TOK_ENUM_CONST:
7177 /* Here ident is an enumeration constant */
7178 eat(state, TOK_ENUM_CONST);
7183 eat(state, TOK_LPAREN);
7185 eat(state, TOK_RPAREN);
7188 def = integer_constant(state);
7191 eat(state, TOK_LIT_FLOAT);
7192 error(state, 0, "Floating point constants not supported");
7197 def = character_constant(state);
7199 case TOK_LIT_STRING:
7200 def = string_constant(state);
7204 error(state, 0, "Unexpected token: %s\n", tokens[tok]);
7209 static struct triple *postfix_expr(struct compile_state *state)
7213 def = primary_expr(state);
7215 struct triple *left;
7219 switch((tok = peek(state))) {
7221 eat(state, TOK_LBRACKET);
7222 def = mk_subscript_expr(state, left, expr(state));
7223 eat(state, TOK_RBRACKET);
7226 def = call_expr(state, def);
7230 struct hash_entry *field;
7231 eat(state, TOK_DOT);
7232 eat(state, TOK_IDENT);
7233 field = state->token[0].ident;
7234 def = deref_field(state, def, field);
7239 struct hash_entry *field;
7240 eat(state, TOK_ARROW);
7241 eat(state, TOK_IDENT);
7242 field = state->token[0].ident;
7243 def = mk_deref_expr(state, read_expr(state, def));
7244 def = deref_field(state, def, field);
7248 eat(state, TOK_PLUSPLUS);
7249 def = mk_post_inc_expr(state, left);
7251 case TOK_MINUSMINUS:
7252 eat(state, TOK_MINUSMINUS);
7253 def = mk_post_dec_expr(state, left);
7263 static struct triple *cast_expr(struct compile_state *state);
7265 static struct triple *unary_expr(struct compile_state *state)
7267 struct triple *def, *right;
7269 switch((tok = peek(state))) {
7271 eat(state, TOK_PLUSPLUS);
7272 def = mk_pre_inc_expr(state, unary_expr(state));
7274 case TOK_MINUSMINUS:
7275 eat(state, TOK_MINUSMINUS);
7276 def = mk_pre_dec_expr(state, unary_expr(state));
7279 eat(state, TOK_AND);
7280 def = mk_addr_expr(state, cast_expr(state), 0);
7283 eat(state, TOK_STAR);
7284 def = mk_deref_expr(state, read_expr(state, cast_expr(state)));
7287 eat(state, TOK_PLUS);
7288 right = read_expr(state, cast_expr(state));
7289 arithmetic(state, right);
7290 def = integral_promotion(state, right);
7293 eat(state, TOK_MINUS);
7294 right = read_expr(state, cast_expr(state));
7295 arithmetic(state, right);
7296 def = integral_promotion(state, right);
7297 def = triple(state, OP_NEG, def->type, def, 0);
7300 eat(state, TOK_TILDE);
7301 right = read_expr(state, cast_expr(state));
7302 integral(state, right);
7303 def = integral_promotion(state, right);
7304 def = triple(state, OP_INVERT, def->type, def, 0);
7307 eat(state, TOK_BANG);
7308 right = read_expr(state, cast_expr(state));
7310 def = lfalse_expr(state, right);
7316 eat(state, TOK_SIZEOF);
7318 tok2 = peek2(state);
7319 if ((tok1 == TOK_LPAREN) && istype(tok2)) {
7320 eat(state, TOK_LPAREN);
7321 type = type_name(state);
7322 eat(state, TOK_RPAREN);
7325 struct triple *expr;
7326 expr = unary_expr(state);
7328 release_expr(state, expr);
7330 def = int_const(state, &ulong_type, size_of(state, type));
7337 eat(state, TOK_ALIGNOF);
7339 tok2 = peek2(state);
7340 if ((tok1 == TOK_LPAREN) && istype(tok2)) {
7341 eat(state, TOK_LPAREN);
7342 type = type_name(state);
7343 eat(state, TOK_RPAREN);
7346 struct triple *expr;
7347 expr = unary_expr(state);
7349 release_expr(state, expr);
7351 def = int_const(state, &ulong_type, align_of(state, type));
7355 def = postfix_expr(state);
7361 static struct triple *cast_expr(struct compile_state *state)
7366 tok2 = peek2(state);
7367 if ((tok1 == TOK_LPAREN) && istype(tok2)) {
7369 eat(state, TOK_LPAREN);
7370 type = type_name(state);
7371 eat(state, TOK_RPAREN);
7372 def = mk_cast_expr(state, type, cast_expr(state));
7375 def = unary_expr(state);
7380 static struct triple *mult_expr(struct compile_state *state)
7384 def = cast_expr(state);
7386 struct triple *left, *right;
7387 struct type *result_type;
7390 switch(tok = (peek(state))) {
7394 left = read_expr(state, def);
7395 arithmetic(state, left);
7399 right = read_expr(state, cast_expr(state));
7400 arithmetic(state, right);
7402 result_type = arithmetic_result(state, left, right);
7403 sign = is_signed(result_type);
7406 case TOK_STAR: op = sign? OP_SMUL : OP_UMUL; break;
7407 case TOK_DIV: op = sign? OP_SDIV : OP_UDIV; break;
7408 case TOK_MOD: op = sign? OP_SMOD : OP_UMOD; break;
7410 def = triple(state, op, result_type, left, right);
7420 static struct triple *add_expr(struct compile_state *state)
7424 def = mult_expr(state);
7427 switch( peek(state)) {
7429 eat(state, TOK_PLUS);
7430 def = mk_add_expr(state, def, mult_expr(state));
7433 eat(state, TOK_MINUS);
7434 def = mk_sub_expr(state, def, mult_expr(state));
7444 static struct triple *shift_expr(struct compile_state *state)
7448 def = add_expr(state);
7450 struct triple *left, *right;
7453 switch((tok = peek(state))) {
7456 left = read_expr(state, def);
7457 integral(state, left);
7458 left = integral_promotion(state, left);
7462 right = read_expr(state, add_expr(state));
7463 integral(state, right);
7464 right = integral_promotion(state, right);
7466 op = (tok == TOK_SL)? OP_SL :
7467 is_signed(left->type)? OP_SSR: OP_USR;
7469 def = triple(state, op, left->type, left, right);
7479 static struct triple *relational_expr(struct compile_state *state)
7481 #warning "Extend relational exprs to work on more than arithmetic types"
7484 def = shift_expr(state);
7486 struct triple *left, *right;
7487 struct type *arg_type;
7490 switch((tok = peek(state))) {
7495 left = read_expr(state, def);
7496 arithmetic(state, left);
7500 right = read_expr(state, shift_expr(state));
7501 arithmetic(state, right);
7503 arg_type = arithmetic_result(state, left, right);
7504 sign = is_signed(arg_type);
7507 case TOK_LESS: op = sign? OP_SLESS : OP_ULESS; break;
7508 case TOK_MORE: op = sign? OP_SMORE : OP_UMORE; break;
7509 case TOK_LESSEQ: op = sign? OP_SLESSEQ : OP_ULESSEQ; break;
7510 case TOK_MOREEQ: op = sign? OP_SMOREEQ : OP_UMOREEQ; break;
7512 def = triple(state, op, &int_type, left, right);
7522 static struct triple *equality_expr(struct compile_state *state)
7524 #warning "Extend equality exprs to work on more than arithmetic types"
7527 def = relational_expr(state);
7529 struct triple *left, *right;
7532 switch((tok = peek(state))) {
7535 left = read_expr(state, def);
7536 arithmetic(state, left);
7538 right = read_expr(state, relational_expr(state));
7539 arithmetic(state, right);
7540 op = (tok == TOK_EQEQ) ? OP_EQ: OP_NOTEQ;
7541 def = triple(state, op, &int_type, left, right);
7551 static struct triple *and_expr(struct compile_state *state)
7554 def = equality_expr(state);
7555 while(peek(state) == TOK_AND) {
7556 struct triple *left, *right;
7557 struct type *result_type;
7558 left = read_expr(state, def);
7559 integral(state, left);
7560 eat(state, TOK_AND);
7561 right = read_expr(state, equality_expr(state));
7562 integral(state, right);
7563 result_type = arithmetic_result(state, left, right);
7564 def = triple(state, OP_AND, result_type, left, right);
7569 static struct triple *xor_expr(struct compile_state *state)
7572 def = and_expr(state);
7573 while(peek(state) == TOK_XOR) {
7574 struct triple *left, *right;
7575 struct type *result_type;
7576 left = read_expr(state, def);
7577 integral(state, left);
7578 eat(state, TOK_XOR);
7579 right = read_expr(state, and_expr(state));
7580 integral(state, right);
7581 result_type = arithmetic_result(state, left, right);
7582 def = triple(state, OP_XOR, result_type, left, right);
7587 static struct triple *or_expr(struct compile_state *state)
7590 def = xor_expr(state);
7591 while(peek(state) == TOK_OR) {
7592 struct triple *left, *right;
7593 struct type *result_type;
7594 left = read_expr(state, def);
7595 integral(state, left);
7597 right = read_expr(state, xor_expr(state));
7598 integral(state, right);
7599 result_type = arithmetic_result(state, left, right);
7600 def = triple(state, OP_OR, result_type, left, right);
7605 static struct triple *land_expr(struct compile_state *state)
7608 def = or_expr(state);
7609 while(peek(state) == TOK_LOGAND) {
7610 struct triple *left, *right;
7611 left = read_expr(state, def);
7613 eat(state, TOK_LOGAND);
7614 right = read_expr(state, or_expr(state));
7617 def = triple(state, OP_LAND, &int_type,
7618 ltrue_expr(state, left),
7619 ltrue_expr(state, right));
7624 static struct triple *lor_expr(struct compile_state *state)
7627 def = land_expr(state);
7628 while(peek(state) == TOK_LOGOR) {
7629 struct triple *left, *right;
7630 left = read_expr(state, def);
7632 eat(state, TOK_LOGOR);
7633 right = read_expr(state, land_expr(state));
7636 def = triple(state, OP_LOR, &int_type,
7637 ltrue_expr(state, left),
7638 ltrue_expr(state, right));
7643 static struct triple *conditional_expr(struct compile_state *state)
7646 def = lor_expr(state);
7647 if (peek(state) == TOK_QUEST) {
7648 struct triple *test, *left, *right;
7650 test = ltrue_expr(state, read_expr(state, def));
7651 eat(state, TOK_QUEST);
7652 left = read_expr(state, expr(state));
7653 eat(state, TOK_COLON);
7654 right = read_expr(state, conditional_expr(state));
7656 def = cond_expr(state, test, left, right);
7661 static struct triple *eval_const_expr(
7662 struct compile_state *state, struct triple *expr)
7665 if (is_const(expr)) {
7669 /* If we don't start out as a constant simplify into one */
7670 struct triple *head, *ptr;
7671 head = label(state); /* dummy initial triple */
7672 flatten(state, head, expr);
7673 for(ptr = head->next; ptr != head; ptr = ptr->next) {
7674 simplify(state, ptr);
7676 /* Remove the constant value the tail of the list */
7678 def->prev->next = def->next;
7679 def->next->prev = def->prev;
7680 def->next = def->prev = def;
7681 if (!is_const(def)) {
7682 error(state, 0, "Not a constant expression");
7684 /* Free the intermediate expressions */
7685 while(head->next != head) {
7686 release_triple(state, head->next);
7688 free_triple(state, head);
7693 static struct triple *constant_expr(struct compile_state *state)
7695 return eval_const_expr(state, conditional_expr(state));
7698 static struct triple *assignment_expr(struct compile_state *state)
7700 struct triple *def, *left, *right;
7702 /* The C grammer in K&R shows assignment expressions
7703 * only taking unary expressions as input on their
7704 * left hand side. But specifies the precedence of
7705 * assignemnt as the lowest operator except for comma.
7707 * Allowing conditional expressions on the left hand side
7708 * of an assignement results in a grammar that accepts
7709 * a larger set of statements than standard C. As long
7710 * as the subset of the grammar that is standard C behaves
7711 * correctly this should cause no problems.
7713 * For the extra token strings accepted by the grammar
7714 * none of them should produce a valid lvalue, so they
7715 * should not produce functioning programs.
7717 * GCC has this bug as well, so surprises should be minimal.
7719 def = conditional_expr(state);
7721 switch((tok = peek(state))) {
7723 lvalue(state, left);
7725 def = write_expr(state, left,
7726 read_expr(state, assignment_expr(state)));
7731 lvalue(state, left);
7732 arithmetic(state, left);
7734 right = read_expr(state, assignment_expr(state));
7735 arithmetic(state, right);
7737 sign = is_signed(left->type);
7740 case TOK_TIMESEQ: op = sign? OP_SMUL : OP_UMUL; break;
7741 case TOK_DIVEQ: op = sign? OP_SDIV : OP_UDIV; break;
7742 case TOK_MODEQ: op = sign? OP_SMOD : OP_UMOD; break;
7744 def = write_expr(state, left,
7745 triple(state, op, left->type,
7746 read_expr(state, left), right));
7749 lvalue(state, left);
7750 eat(state, TOK_PLUSEQ);
7751 def = write_expr(state, left,
7752 mk_add_expr(state, left, assignment_expr(state)));
7755 lvalue(state, left);
7756 eat(state, TOK_MINUSEQ);
7757 def = write_expr(state, left,
7758 mk_sub_expr(state, left, assignment_expr(state)));
7765 lvalue(state, left);
7766 integral(state, left);
7768 right = read_expr(state, assignment_expr(state));
7769 integral(state, right);
7770 right = integral_promotion(state, right);
7771 sign = is_signed(left->type);
7774 case TOK_SLEQ: op = OP_SL; break;
7775 case TOK_SREQ: op = sign? OP_SSR: OP_USR; break;
7776 case TOK_ANDEQ: op = OP_AND; break;
7777 case TOK_XOREQ: op = OP_XOR; break;
7778 case TOK_OREQ: op = OP_OR; break;
7780 def = write_expr(state, left,
7781 triple(state, op, left->type,
7782 read_expr(state, left), right));
7788 static struct triple *expr(struct compile_state *state)
7791 def = assignment_expr(state);
7792 while(peek(state) == TOK_COMMA) {
7793 struct triple *left, *right;
7795 eat(state, TOK_COMMA);
7796 right = assignment_expr(state);
7797 def = triple(state, OP_COMMA, right->type, left, right);
7802 static void expr_statement(struct compile_state *state, struct triple *first)
7804 if (peek(state) != TOK_SEMI) {
7805 flatten(state, first, expr(state));
7807 eat(state, TOK_SEMI);
7810 static void if_statement(struct compile_state *state, struct triple *first)
7812 struct triple *test, *jmp1, *jmp2, *middle, *end;
7814 jmp1 = jmp2 = middle = 0;
7816 eat(state, TOK_LPAREN);
7819 /* Cleanup and invert the test */
7820 test = lfalse_expr(state, read_expr(state, test));
7821 eat(state, TOK_RPAREN);
7822 /* Generate the needed pieces */
7823 middle = label(state);
7824 jmp1 = branch(state, middle, test);
7825 /* Thread the pieces together */
7826 flatten(state, first, test);
7827 flatten(state, first, jmp1);
7828 flatten(state, first, label(state));
7829 statement(state, first);
7830 if (peek(state) == TOK_ELSE) {
7831 eat(state, TOK_ELSE);
7832 /* Generate the rest of the pieces */
7834 jmp2 = branch(state, end, 0);
7835 /* Thread them together */
7836 flatten(state, first, jmp2);
7837 flatten(state, first, middle);
7838 statement(state, first);
7839 flatten(state, first, end);
7842 flatten(state, first, middle);
7846 static void for_statement(struct compile_state *state, struct triple *first)
7848 struct triple *head, *test, *tail, *jmp1, *jmp2, *end;
7849 struct triple *label1, *label2, *label3;
7850 struct hash_entry *ident;
7852 eat(state, TOK_FOR);
7853 eat(state, TOK_LPAREN);
7854 head = test = tail = jmp1 = jmp2 = 0;
7855 if (peek(state) != TOK_SEMI) {
7858 eat(state, TOK_SEMI);
7859 if (peek(state) != TOK_SEMI) {
7862 test = ltrue_expr(state, read_expr(state, test));
7864 eat(state, TOK_SEMI);
7865 if (peek(state) != TOK_RPAREN) {
7868 eat(state, TOK_RPAREN);
7869 /* Generate the needed pieces */
7870 label1 = label(state);
7871 label2 = label(state);
7872 label3 = label(state);
7874 jmp1 = branch(state, label3, 0);
7875 jmp2 = branch(state, label1, test);
7878 jmp2 = branch(state, label1, 0);
7881 /* Remember where break and continue go */
7883 ident = state->i_break;
7884 symbol(state, ident, &ident->sym_ident, end, end->type);
7885 ident = state->i_continue;
7886 symbol(state, ident, &ident->sym_ident, label2, label2->type);
7887 /* Now include the body */
7888 flatten(state, first, head);
7889 flatten(state, first, jmp1);
7890 flatten(state, first, label1);
7891 statement(state, first);
7892 flatten(state, first, label2);
7893 flatten(state, first, tail);
7894 flatten(state, first, label3);
7895 flatten(state, first, test);
7896 flatten(state, first, jmp2);
7897 flatten(state, first, end);
7898 /* Cleanup the break/continue scope */
7902 static void while_statement(struct compile_state *state, struct triple *first)
7904 struct triple *label1, *test, *label2, *jmp1, *jmp2, *end;
7905 struct hash_entry *ident;
7906 eat(state, TOK_WHILE);
7907 eat(state, TOK_LPAREN);
7910 test = ltrue_expr(state, read_expr(state, test));
7911 eat(state, TOK_RPAREN);
7912 /* Generate the needed pieces */
7913 label1 = label(state);
7914 label2 = label(state);
7915 jmp1 = branch(state, label2, 0);
7916 jmp2 = branch(state, label1, test);
7918 /* Remember where break and continue go */
7920 ident = state->i_break;
7921 symbol(state, ident, &ident->sym_ident, end, end->type);
7922 ident = state->i_continue;
7923 symbol(state, ident, &ident->sym_ident, label2, label2->type);
7924 /* Thread them together */
7925 flatten(state, first, jmp1);
7926 flatten(state, first, label1);
7927 statement(state, first);
7928 flatten(state, first, label2);
7929 flatten(state, first, test);
7930 flatten(state, first, jmp2);
7931 flatten(state, first, end);
7932 /* Cleanup the break/continue scope */
7936 static void do_statement(struct compile_state *state, struct triple *first)
7938 struct triple *label1, *label2, *test, *end;
7939 struct hash_entry *ident;
7941 /* Generate the needed pieces */
7942 label1 = label(state);
7943 label2 = label(state);
7945 /* Remember where break and continue go */
7947 ident = state->i_break;
7948 symbol(state, ident, &ident->sym_ident, end, end->type);
7949 ident = state->i_continue;
7950 symbol(state, ident, &ident->sym_ident, label2, label2->type);
7951 /* Now include the body */
7952 flatten(state, first, label1);
7953 statement(state, first);
7954 /* Cleanup the break/continue scope */
7956 /* Eat the rest of the loop */
7957 eat(state, TOK_WHILE);
7958 eat(state, TOK_LPAREN);
7959 test = read_expr(state, expr(state));
7961 eat(state, TOK_RPAREN);
7962 eat(state, TOK_SEMI);
7963 /* Thread the pieces together */
7964 test = ltrue_expr(state, test);
7965 flatten(state, first, label2);
7966 flatten(state, first, test);
7967 flatten(state, first, branch(state, label1, test));
7968 flatten(state, first, end);
7972 static void return_statement(struct compile_state *state, struct triple *first)
7974 struct triple *jmp, *mv, *dest, *var, *val;
7976 eat(state, TOK_RETURN);
7978 #warning "FIXME implement a more general excess branch elimination"
7980 /* If we have a return value do some more work */
7981 if (peek(state) != TOK_SEMI) {
7982 val = read_expr(state, expr(state));
7984 eat(state, TOK_SEMI);
7986 /* See if this last statement in a function */
7987 last = ((peek(state) == TOK_RBRACE) &&
7988 (state->scope_depth == GLOBAL_SCOPE_DEPTH +2));
7990 /* Find the return variable */
7991 var = MISC(state->main_function, 0);
7992 /* Find the return destination */
7993 dest = RHS(state->main_function, 0)->prev;
7995 /* If needed generate a jump instruction */
7997 jmp = branch(state, dest, 0);
7999 /* If needed generate an assignment instruction */
8001 mv = write_expr(state, var, val);
8003 /* Now put the code together */
8005 flatten(state, first, mv);
8006 flatten(state, first, jmp);
8009 flatten(state, first, jmp);
8013 static void break_statement(struct compile_state *state, struct triple *first)
8015 struct triple *dest;
8016 eat(state, TOK_BREAK);
8017 eat(state, TOK_SEMI);
8018 if (!state->i_break->sym_ident) {
8019 error(state, 0, "break statement not within loop or switch");
8021 dest = state->i_break->sym_ident->def;
8022 flatten(state, first, branch(state, dest, 0));
8025 static void continue_statement(struct compile_state *state, struct triple *first)
8027 struct triple *dest;
8028 eat(state, TOK_CONTINUE);
8029 eat(state, TOK_SEMI);
8030 if (!state->i_continue->sym_ident) {
8031 error(state, 0, "continue statement outside of a loop");
8033 dest = state->i_continue->sym_ident->def;
8034 flatten(state, first, branch(state, dest, 0));
8037 static void goto_statement(struct compile_state *state, struct triple *first)
8039 struct hash_entry *ident;
8040 eat(state, TOK_GOTO);
8041 eat(state, TOK_IDENT);
8042 ident = state->token[0].ident;
8043 if (!ident->sym_label) {
8044 /* If this is a forward branch allocate the label now,
8045 * it will be flattend in the appropriate location later.
8049 label_symbol(state, ident, ins);
8051 eat(state, TOK_SEMI);
8053 flatten(state, first, branch(state, ident->sym_label->def, 0));
8056 static void labeled_statement(struct compile_state *state, struct triple *first)
8059 struct hash_entry *ident;
8060 eat(state, TOK_IDENT);
8062 ident = state->token[0].ident;
8063 if (ident->sym_label && ident->sym_label->def) {
8064 ins = ident->sym_label->def;
8065 put_occurance(ins->occurance);
8066 ins->occurance = new_occurance(state);
8070 label_symbol(state, ident, ins);
8072 if (ins->id & TRIPLE_FLAG_FLATTENED) {
8073 error(state, 0, "label %s already defined", ident->name);
8075 flatten(state, first, ins);
8077 eat(state, TOK_COLON);
8078 statement(state, first);
8081 static void switch_statement(struct compile_state *state, struct triple *first)
8084 eat(state, TOK_SWITCH);
8085 eat(state, TOK_LPAREN);
8087 eat(state, TOK_RPAREN);
8088 statement(state, first);
8089 error(state, 0, "switch statements are not implemented");
8093 static void case_statement(struct compile_state *state, struct triple *first)
8096 eat(state, TOK_CASE);
8097 constant_expr(state);
8098 eat(state, TOK_COLON);
8099 statement(state, first);
8100 error(state, 0, "case statements are not implemented");
8104 static void default_statement(struct compile_state *state, struct triple *first)
8107 eat(state, TOK_DEFAULT);
8108 eat(state, TOK_COLON);
8109 statement(state, first);
8110 error(state, 0, "default statements are not implemented");
8114 static void asm_statement(struct compile_state *state, struct triple *first)
8116 struct asm_info *info;
8118 struct triple *constraint;
8119 struct triple *expr;
8120 } out_param[MAX_LHS], in_param[MAX_RHS], clob_param[MAX_LHS];
8121 struct triple *def, *asm_str;
8122 int out, in, clobbers, more, colons, i;
8124 eat(state, TOK_ASM);
8125 /* For now ignore the qualifiers */
8126 switch(peek(state)) {
8128 eat(state, TOK_CONST);
8131 eat(state, TOK_VOLATILE);
8134 eat(state, TOK_LPAREN);
8135 asm_str = string_constant(state);
8138 out = in = clobbers = 0;
8140 if ((colons == 0) && (peek(state) == TOK_COLON)) {
8141 eat(state, TOK_COLON);
8143 more = (peek(state) == TOK_LIT_STRING);
8146 struct triple *constraint;
8149 if (out > MAX_LHS) {
8150 error(state, 0, "Maximum output count exceeded.");
8152 constraint = string_constant(state);
8153 str = constraint->u.blob;
8154 if (str[0] != '=') {
8155 error(state, 0, "Output constraint does not start with =");
8157 constraint->u.blob = str + 1;
8158 eat(state, TOK_LPAREN);
8159 var = conditional_expr(state);
8160 eat(state, TOK_RPAREN);
8163 out_param[out].constraint = constraint;
8164 out_param[out].expr = var;
8165 if (peek(state) == TOK_COMMA) {
8166 eat(state, TOK_COMMA);
8173 if ((colons == 1) && (peek(state) == TOK_COLON)) {
8174 eat(state, TOK_COLON);
8176 more = (peek(state) == TOK_LIT_STRING);
8179 struct triple *constraint;
8183 error(state, 0, "Maximum input count exceeded.");
8185 constraint = string_constant(state);
8186 str = constraint->u.blob;
8187 if (digitp(str[0] && str[1] == '\0')) {
8189 val = digval(str[0]);
8190 if ((val < 0) || (val >= out)) {
8191 error(state, 0, "Invalid input constraint %d", val);
8194 eat(state, TOK_LPAREN);
8195 val = conditional_expr(state);
8196 eat(state, TOK_RPAREN);
8198 in_param[in].constraint = constraint;
8199 in_param[in].expr = val;
8200 if (peek(state) == TOK_COMMA) {
8201 eat(state, TOK_COMMA);
8209 if ((colons == 2) && (peek(state) == TOK_COLON)) {
8210 eat(state, TOK_COLON);
8212 more = (peek(state) == TOK_LIT_STRING);
8214 struct triple *clobber;
8216 if ((clobbers + out) > MAX_LHS) {
8217 error(state, 0, "Maximum clobber limit exceeded.");
8219 clobber = string_constant(state);
8221 clob_param[clobbers].constraint = clobber;
8222 if (peek(state) == TOK_COMMA) {
8223 eat(state, TOK_COMMA);
8229 eat(state, TOK_RPAREN);
8230 eat(state, TOK_SEMI);
8233 info = xcmalloc(sizeof(*info), "asm_info");
8234 info->str = asm_str->u.blob;
8235 free_triple(state, asm_str);
8237 def = new_triple(state, OP_ASM, &void_type, clobbers + out, in);
8238 def->u.ainfo = info;
8240 /* Find the register constraints */
8241 for(i = 0; i < out; i++) {
8242 struct triple *constraint;
8243 constraint = out_param[i].constraint;
8244 info->tmpl.lhs[i] = arch_reg_constraint(state,
8245 out_param[i].expr->type, constraint->u.blob);
8246 free_triple(state, constraint);
8248 for(; i - out < clobbers; i++) {
8249 struct triple *constraint;
8250 constraint = clob_param[i - out].constraint;
8251 info->tmpl.lhs[i] = arch_reg_clobber(state, constraint->u.blob);
8252 free_triple(state, constraint);
8254 for(i = 0; i < in; i++) {
8255 struct triple *constraint;
8257 constraint = in_param[i].constraint;
8258 str = constraint->u.blob;
8259 if (digitp(str[0]) && str[1] == '\0') {
8260 struct reg_info cinfo;
8262 val = digval(str[0]);
8263 cinfo.reg = info->tmpl.lhs[val].reg;
8264 cinfo.regcm = arch_type_to_regcm(state, in_param[i].expr->type);
8265 cinfo.regcm &= info->tmpl.lhs[val].regcm;
8266 if (cinfo.reg == REG_UNSET) {
8267 cinfo.reg = REG_VIRT0 + val;
8269 if (cinfo.regcm == 0) {
8270 error(state, 0, "No registers for %d", val);
8272 info->tmpl.lhs[val] = cinfo;
8273 info->tmpl.rhs[i] = cinfo;
8276 info->tmpl.rhs[i] = arch_reg_constraint(state,
8277 in_param[i].expr->type, str);
8279 free_triple(state, constraint);
8282 /* Now build the helper expressions */
8283 for(i = 0; i < in; i++) {
8284 RHS(def, i) = read_expr(state,in_param[i].expr);
8286 flatten(state, first, def);
8287 for(i = 0; i < (out + clobbers); i++) {
8289 struct triple *piece;
8290 type = (i < out)? out_param[i].expr->type : &void_type;
8291 piece = triple(state, OP_PIECE, type, def, 0);
8293 LHS(def, i) = piece;
8294 flatten(state, first, piece);
8296 /* And write the helpers to their destinations */
8297 for(i = 0; i < out; i++) {
8298 struct triple *piece;
8299 piece = LHS(def, i);
8300 flatten(state, first,
8301 write_expr(state, out_param[i].expr, piece));
8306 static int isdecl(int tok)
8329 case TOK_TYPE_NAME: /* typedef name */
8336 static void compound_statement(struct compile_state *state, struct triple *first)
8338 eat(state, TOK_LBRACE);
8341 /* statement-list opt */
8342 while (peek(state) != TOK_RBRACE) {
8343 statement(state, first);
8346 eat(state, TOK_RBRACE);
8349 static void statement(struct compile_state *state, struct triple *first)
8353 if (tok == TOK_LBRACE) {
8354 compound_statement(state, first);
8356 else if (tok == TOK_IF) {
8357 if_statement(state, first);
8359 else if (tok == TOK_FOR) {
8360 for_statement(state, first);
8362 else if (tok == TOK_WHILE) {
8363 while_statement(state, first);
8365 else if (tok == TOK_DO) {
8366 do_statement(state, first);
8368 else if (tok == TOK_RETURN) {
8369 return_statement(state, first);
8371 else if (tok == TOK_BREAK) {
8372 break_statement(state, first);
8374 else if (tok == TOK_CONTINUE) {
8375 continue_statement(state, first);
8377 else if (tok == TOK_GOTO) {
8378 goto_statement(state, first);
8380 else if (tok == TOK_SWITCH) {
8381 switch_statement(state, first);
8383 else if (tok == TOK_ASM) {
8384 asm_statement(state, first);
8386 else if ((tok == TOK_IDENT) && (peek2(state) == TOK_COLON)) {
8387 labeled_statement(state, first);
8389 else if (tok == TOK_CASE) {
8390 case_statement(state, first);
8392 else if (tok == TOK_DEFAULT) {
8393 default_statement(state, first);
8395 else if (isdecl(tok)) {
8396 /* This handles C99 intermixing of statements and decls */
8400 expr_statement(state, first);
8404 static struct type *param_decl(struct compile_state *state)
8407 struct hash_entry *ident;
8408 /* Cheat so the declarator will know we are not global */
8411 type = decl_specifiers(state);
8412 type = declarator(state, type, &ident, 0);
8413 type->field_ident = ident;
8418 static struct type *param_type_list(struct compile_state *state, struct type *type)
8420 struct type *ftype, **next;
8421 ftype = new_type(TYPE_FUNCTION, type, param_decl(state));
8422 next = &ftype->right;
8423 while(peek(state) == TOK_COMMA) {
8424 eat(state, TOK_COMMA);
8425 if (peek(state) == TOK_DOTS) {
8426 eat(state, TOK_DOTS);
8427 error(state, 0, "variadic functions not supported");
8430 *next = new_type(TYPE_PRODUCT, *next, param_decl(state));
8431 next = &((*next)->right);
8438 static struct type *type_name(struct compile_state *state)
8441 type = specifier_qualifier_list(state);
8442 /* abstract-declarator (may consume no tokens) */
8443 type = declarator(state, type, 0, 0);
8447 static struct type *direct_declarator(
8448 struct compile_state *state, struct type *type,
8449 struct hash_entry **ident, int need_ident)
8454 arrays_complete(state, type);
8455 switch(peek(state)) {
8457 eat(state, TOK_IDENT);
8459 error(state, 0, "Unexpected identifier found");
8461 /* The name of what we are declaring */
8462 *ident = state->token[0].ident;
8465 eat(state, TOK_LPAREN);
8466 outer = declarator(state, type, ident, need_ident);
8467 eat(state, TOK_RPAREN);
8471 error(state, 0, "Identifier expected");
8477 arrays_complete(state, type);
8478 switch(peek(state)) {
8480 eat(state, TOK_LPAREN);
8481 type = param_type_list(state, type);
8482 eat(state, TOK_RPAREN);
8486 unsigned int qualifiers;
8487 struct triple *value;
8489 eat(state, TOK_LBRACKET);
8490 if (peek(state) != TOK_RBRACKET) {
8491 value = constant_expr(state);
8492 integral(state, value);
8494 eat(state, TOK_RBRACKET);
8496 qualifiers = type->type & (QUAL_MASK | STOR_MASK);
8497 type = new_type(TYPE_ARRAY | qualifiers, type, 0);
8499 type->elements = value->u.cval;
8500 free_triple(state, value);
8502 type->elements = ELEMENT_COUNT_UNSPECIFIED;
8514 arrays_complete(state, type);
8516 for(inner = outer; inner->left; inner = inner->left)
8524 static struct type *declarator(
8525 struct compile_state *state, struct type *type,
8526 struct hash_entry **ident, int need_ident)
8528 while(peek(state) == TOK_STAR) {
8529 eat(state, TOK_STAR);
8530 type = new_type(TYPE_POINTER | (type->type & STOR_MASK), type, 0);
8532 type = direct_declarator(state, type, ident, need_ident);
8537 static struct type *typedef_name(
8538 struct compile_state *state, unsigned int specifiers)
8540 struct hash_entry *ident;
8542 eat(state, TOK_TYPE_NAME);
8543 ident = state->token[0].ident;
8544 type = ident->sym_ident->type;
8545 specifiers |= type->type & QUAL_MASK;
8546 if ((specifiers & (STOR_MASK | QUAL_MASK)) !=
8547 (type->type & (STOR_MASK | QUAL_MASK))) {
8548 type = clone_type(specifiers, type);
8553 static struct type *enum_specifier(
8554 struct compile_state *state, unsigned int specifiers)
8560 eat(state, TOK_ENUM);
8562 if (tok == TOK_IDENT) {
8563 eat(state, TOK_IDENT);
8565 if ((tok != TOK_IDENT) || (peek(state) == TOK_LBRACE)) {
8566 eat(state, TOK_LBRACE);
8568 eat(state, TOK_IDENT);
8569 if (peek(state) == TOK_EQ) {
8571 constant_expr(state);
8573 if (peek(state) == TOK_COMMA) {
8574 eat(state, TOK_COMMA);
8576 } while(peek(state) != TOK_RBRACE);
8577 eat(state, TOK_RBRACE);
8583 static struct type *struct_declarator(
8584 struct compile_state *state, struct type *type, struct hash_entry **ident)
8588 if (tok != TOK_COLON) {
8589 type = declarator(state, type, ident, 1);
8591 if ((tok == TOK_COLON) || (peek(state) == TOK_COLON)) {
8592 struct triple *value;
8593 eat(state, TOK_COLON);
8594 value = constant_expr(state);
8595 #warning "FIXME implement bitfields to reduce register usage"
8596 error(state, 0, "bitfields not yet implemented");
8601 static struct type *struct_or_union_specifier(
8602 struct compile_state *state, unsigned int spec)
8604 struct type *struct_type;
8605 struct hash_entry *ident;
8606 unsigned int type_join;
8610 switch(peek(state)) {
8612 eat(state, TOK_STRUCT);
8613 type_join = TYPE_PRODUCT;
8616 eat(state, TOK_UNION);
8617 type_join = TYPE_OVERLAP;
8618 error(state, 0, "unions not yet supported\n");
8621 eat(state, TOK_STRUCT);
8622 type_join = TYPE_PRODUCT;
8626 if ((tok == TOK_IDENT) || (tok == TOK_TYPE_NAME)) {
8628 ident = state->token[0].ident;
8630 if (!ident || (peek(state) == TOK_LBRACE)) {
8634 eat(state, TOK_LBRACE);
8635 next = &struct_type;
8637 struct type *base_type;
8639 base_type = specifier_qualifier_list(state);
8642 struct hash_entry *fident;
8644 type = struct_declarator(state, base_type, &fident);
8646 if (peek(state) == TOK_COMMA) {
8648 eat(state, TOK_COMMA);
8650 type = clone_type(0, type);
8651 type->field_ident = fident;
8653 *next = new_type(type_join, *next, type);
8654 next = &((*next)->right);
8659 eat(state, TOK_SEMI);
8660 } while(peek(state) != TOK_RBRACE);
8661 eat(state, TOK_RBRACE);
8662 struct_type = new_type(TYPE_STRUCT | spec, struct_type, 0);
8663 struct_type->type_ident = ident;
8664 struct_type->elements = elements;
8666 symbol(state, ident, &ident->sym_struct, 0, struct_type);
8669 if (ident && ident->sym_struct) {
8670 struct_type = clone_type(spec, ident->sym_struct->type);
8672 else if (ident && !ident->sym_struct) {
8673 error(state, 0, "struct %s undeclared", ident->name);
8678 static unsigned int storage_class_specifier_opt(struct compile_state *state)
8680 unsigned int specifiers;
8681 switch(peek(state)) {
8683 eat(state, TOK_AUTO);
8684 specifiers = STOR_AUTO;
8687 eat(state, TOK_REGISTER);
8688 specifiers = STOR_REGISTER;
8691 eat(state, TOK_STATIC);
8692 specifiers = STOR_STATIC;
8695 eat(state, TOK_EXTERN);
8696 specifiers = STOR_EXTERN;
8699 eat(state, TOK_TYPEDEF);
8700 specifiers = STOR_TYPEDEF;
8703 if (state->scope_depth <= GLOBAL_SCOPE_DEPTH) {
8704 specifiers = STOR_STATIC;
8707 specifiers = STOR_AUTO;
8713 static unsigned int function_specifier_opt(struct compile_state *state)
8715 /* Ignore the inline keyword */
8716 unsigned int specifiers;
8718 switch(peek(state)) {
8720 eat(state, TOK_INLINE);
8721 specifiers = STOR_INLINE;
8726 static unsigned int type_qualifiers(struct compile_state *state)
8728 unsigned int specifiers;
8731 specifiers = QUAL_NONE;
8733 switch(peek(state)) {
8735 eat(state, TOK_CONST);
8736 specifiers = QUAL_CONST;
8739 eat(state, TOK_VOLATILE);
8740 specifiers = QUAL_VOLATILE;
8743 eat(state, TOK_RESTRICT);
8744 specifiers = QUAL_RESTRICT;
8754 static struct type *type_specifier(
8755 struct compile_state *state, unsigned int spec)
8759 switch(peek(state)) {
8761 eat(state, TOK_VOID);
8762 type = new_type(TYPE_VOID | spec, 0, 0);
8765 eat(state, TOK_CHAR);
8766 type = new_type(TYPE_CHAR | spec, 0, 0);
8769 eat(state, TOK_SHORT);
8770 if (peek(state) == TOK_INT) {
8771 eat(state, TOK_INT);
8773 type = new_type(TYPE_SHORT | spec, 0, 0);
8776 eat(state, TOK_INT);
8777 type = new_type(TYPE_INT | spec, 0, 0);
8780 eat(state, TOK_LONG);
8781 switch(peek(state)) {
8783 eat(state, TOK_LONG);
8784 error(state, 0, "long long not supported");
8787 eat(state, TOK_DOUBLE);
8788 error(state, 0, "long double not supported");
8791 eat(state, TOK_INT);
8792 type = new_type(TYPE_LONG | spec, 0, 0);
8795 type = new_type(TYPE_LONG | spec, 0, 0);
8800 eat(state, TOK_FLOAT);
8801 error(state, 0, "type float not supported");
8804 eat(state, TOK_DOUBLE);
8805 error(state, 0, "type double not supported");
8808 eat(state, TOK_SIGNED);
8809 switch(peek(state)) {
8811 eat(state, TOK_LONG);
8812 switch(peek(state)) {
8814 eat(state, TOK_LONG);
8815 error(state, 0, "type long long not supported");
8818 eat(state, TOK_INT);
8819 type = new_type(TYPE_LONG | spec, 0, 0);
8822 type = new_type(TYPE_LONG | spec, 0, 0);
8827 eat(state, TOK_INT);
8828 type = new_type(TYPE_INT | spec, 0, 0);
8831 eat(state, TOK_SHORT);
8832 type = new_type(TYPE_SHORT | spec, 0, 0);
8835 eat(state, TOK_CHAR);
8836 type = new_type(TYPE_CHAR | spec, 0, 0);
8839 type = new_type(TYPE_INT | spec, 0, 0);
8844 eat(state, TOK_UNSIGNED);
8845 switch(peek(state)) {
8847 eat(state, TOK_LONG);
8848 switch(peek(state)) {
8850 eat(state, TOK_LONG);
8851 error(state, 0, "unsigned long long not supported");
8854 eat(state, TOK_INT);
8855 type = new_type(TYPE_ULONG | spec, 0, 0);
8858 type = new_type(TYPE_ULONG | spec, 0, 0);
8863 eat(state, TOK_INT);
8864 type = new_type(TYPE_UINT | spec, 0, 0);
8867 eat(state, TOK_SHORT);
8868 type = new_type(TYPE_USHORT | spec, 0, 0);
8871 eat(state, TOK_CHAR);
8872 type = new_type(TYPE_UCHAR | spec, 0, 0);
8875 type = new_type(TYPE_UINT | spec, 0, 0);
8879 /* struct or union specifier */
8882 type = struct_or_union_specifier(state, spec);
8884 /* enum-spefifier */
8886 type = enum_specifier(state, spec);
8890 type = typedef_name(state, spec);
8893 error(state, 0, "bad type specifier %s",
8894 tokens[peek(state)]);
8900 static int istype(int tok)
8926 static struct type *specifier_qualifier_list(struct compile_state *state)
8929 unsigned int specifiers = 0;
8931 /* type qualifiers */
8932 specifiers |= type_qualifiers(state);
8934 /* type specifier */
8935 type = type_specifier(state, specifiers);
8940 static int isdecl_specifier(int tok)
8943 /* storage class specifier */
8949 /* type qualifier */
8953 /* type specifiers */
8963 /* struct or union specifier */
8966 /* enum-spefifier */
8970 /* function specifiers */
8978 static struct type *decl_specifiers(struct compile_state *state)
8981 unsigned int specifiers;
8982 /* I am overly restrictive in the arragement of specifiers supported.
8983 * C is overly flexible in this department it makes interpreting
8984 * the parse tree difficult.
8988 /* storage class specifier */
8989 specifiers |= storage_class_specifier_opt(state);
8991 /* function-specifier */
8992 specifiers |= function_specifier_opt(state);
8994 /* type qualifier */
8995 specifiers |= type_qualifiers(state);
8997 /* type specifier */
8998 type = type_specifier(state, specifiers);
9007 static struct field_info designator(struct compile_state *state, struct type *type)
9010 struct field_info info;
9014 switch(peek(state)) {
9017 struct triple *value;
9018 if ((type->type & TYPE_MASK) != TYPE_ARRAY) {
9019 error(state, 0, "Array designator not in array initializer");
9021 eat(state, TOK_LBRACKET);
9022 value = constant_expr(state);
9023 eat(state, TOK_RBRACKET);
9025 info.type = type->left;
9026 info.offset = value->u.cval * size_of(state, info.type);
9031 struct hash_entry *field;
9032 if ((type->type & TYPE_MASK) != TYPE_STRUCT) {
9033 error(state, 0, "Struct designator not in struct initializer");
9035 eat(state, TOK_DOT);
9036 eat(state, TOK_IDENT);
9037 field = state->token[0].ident;
9038 info.offset = field_offset(state, type, field);
9039 info.type = field_type(state, type, field);
9043 error(state, 0, "Invalid designator");
9046 } while((tok == TOK_LBRACKET) || (tok == TOK_DOT));
9051 static struct triple *initializer(
9052 struct compile_state *state, struct type *type)
9054 struct triple *result;
9055 #warning "FIXME handle string pointer initializers "
9056 #warning "FIXME more consistent initializer handling (where should eval_const_expr go?"
9057 if (peek(state) != TOK_LBRACE) {
9058 result = assignment_expr(state);
9059 if (((type->type & TYPE_MASK) == TYPE_ARRAY) &&
9060 (type->elements == ELEMENT_COUNT_UNSPECIFIED) &&
9061 ((result->type->type & TYPE_MASK) == TYPE_ARRAY) &&
9062 (result->type->elements != ELEMENT_COUNT_UNSPECIFIED) &&
9063 (equiv_types(type->left, result->type->left))) {
9064 type->elements = result->type->elements;
9066 if (!is_init_compatible(state, type, result->type)) {
9067 error(state, 0, "Incompatible types in initializer");
9069 if (!equiv_types(type, result->type)) {
9070 result = mk_cast_expr(state, type, result);
9076 struct field_info info;
9078 if (((type->type & TYPE_MASK) != TYPE_ARRAY) &&
9079 ((type->type & TYPE_MASK) != TYPE_STRUCT)) {
9080 internal_error(state, 0, "unknown initializer type");
9083 info.type = type->left;
9084 if ((type->type & TYPE_MASK) == TYPE_STRUCT) {
9085 info.type = next_field(state, type, 0);
9087 if (type->elements == ELEMENT_COUNT_UNSPECIFIED) {
9090 max_offset = size_of(state, type);
9092 buf = xcmalloc(max_offset, "initializer");
9093 eat(state, TOK_LBRACE);
9095 struct triple *value;
9096 struct type *value_type;
9102 if ((tok == TOK_LBRACKET) || (tok == TOK_DOT)) {
9103 info = designator(state, type);
9105 if ((type->elements != ELEMENT_COUNT_UNSPECIFIED) &&
9106 (info.offset >= max_offset)) {
9107 error(state, 0, "element beyond bounds");
9109 value_type = info.type;
9110 value = eval_const_expr(state, initializer(state, value_type));
9111 value_size = size_of(state, value_type);
9112 if (((type->type & TYPE_MASK) == TYPE_ARRAY) &&
9113 (type->elements == ELEMENT_COUNT_UNSPECIFIED) &&
9114 (max_offset <= info.offset)) {
9118 old_size = max_offset;
9119 max_offset = info.offset + value_size;
9120 buf = xmalloc(max_offset, "initializer");
9121 memcpy(buf, old_buf, old_size);
9124 dest = ((char *)buf) + info.offset;
9125 if (value->op == OP_BLOBCONST) {
9126 memcpy(dest, value->u.blob, value_size);
9128 else if ((value->op == OP_INTCONST) && (value_size == 1)) {
9129 *((uint8_t *)dest) = value->u.cval & 0xff;
9131 else if ((value->op == OP_INTCONST) && (value_size == 2)) {
9132 *((uint16_t *)dest) = value->u.cval & 0xffff;
9134 else if ((value->op == OP_INTCONST) && (value_size == 4)) {
9135 *((uint32_t *)dest) = value->u.cval & 0xffffffff;
9138 internal_error(state, 0, "unhandled constant initializer");
9140 free_triple(state, value);
9141 if (peek(state) == TOK_COMMA) {
9142 eat(state, TOK_COMMA);
9145 info.offset += value_size;
9146 if ((type->type & TYPE_MASK) == TYPE_STRUCT) {
9147 info.type = next_field(state, type, info.type);
9148 info.offset = field_offset(state, type,
9149 info.type->field_ident);
9151 } while(comma && (peek(state) != TOK_RBRACE));
9152 if ((type->elements == ELEMENT_COUNT_UNSPECIFIED) &&
9153 ((type->type & TYPE_MASK) == TYPE_ARRAY)) {
9154 type->elements = max_offset / size_of(state, type->left);
9156 eat(state, TOK_RBRACE);
9157 result = triple(state, OP_BLOBCONST, type, 0, 0);
9158 result->u.blob = buf;
9163 static void resolve_branches(struct compile_state *state)
9165 /* Make a second pass and finish anything outstanding
9166 * with respect to branches. The only outstanding item
9167 * is to see if there are goto to labels that have not
9168 * been defined and to error about them.
9171 for(i = 0; i < HASH_TABLE_SIZE; i++) {
9172 struct hash_entry *entry;
9173 for(entry = state->hash_table[i]; entry; entry = entry->next) {
9175 if (!entry->sym_label) {
9178 ins = entry->sym_label->def;
9179 if (!(ins->id & TRIPLE_FLAG_FLATTENED)) {
9180 error(state, ins, "label `%s' used but not defined",
9187 static struct triple *function_definition(
9188 struct compile_state *state, struct type *type)
9190 struct triple *def, *tmp, *first, *end;
9191 struct hash_entry *ident;
9194 if ((type->type &TYPE_MASK) != TYPE_FUNCTION) {
9195 error(state, 0, "Invalid function header");
9198 /* Verify the function type */
9199 if (((type->right->type & TYPE_MASK) != TYPE_VOID) &&
9200 ((type->right->type & TYPE_MASK) != TYPE_PRODUCT) &&
9201 (type->right->field_ident == 0)) {
9202 error(state, 0, "Invalid function parameters");
9204 param = type->right;
9206 while((param->type & TYPE_MASK) == TYPE_PRODUCT) {
9208 if (!param->left->field_ident) {
9209 error(state, 0, "No identifier for parameter %d\n", i);
9211 param = param->right;
9214 if (((param->type & TYPE_MASK) != TYPE_VOID) && !param->field_ident) {
9215 error(state, 0, "No identifier for paramter %d\n", i);
9218 /* Get a list of statements for this function. */
9219 def = triple(state, OP_LIST, type, 0, 0);
9221 /* Start a new scope for the passed parameters */
9224 /* Put a label at the very start of a function */
9225 first = label(state);
9226 RHS(def, 0) = first;
9228 /* Put a label at the very end of a function */
9230 flatten(state, first, end);
9232 /* Walk through the parameters and create symbol table entries
9235 param = type->right;
9236 while((param->type & TYPE_MASK) == TYPE_PRODUCT) {
9237 ident = param->left->field_ident;
9238 tmp = variable(state, param->left);
9239 symbol(state, ident, &ident->sym_ident, tmp, tmp->type);
9240 flatten(state, end, tmp);
9241 param = param->right;
9243 if ((param->type & TYPE_MASK) != TYPE_VOID) {
9244 /* And don't forget the last parameter */
9245 ident = param->field_ident;
9246 tmp = variable(state, param);
9247 symbol(state, ident, &ident->sym_ident, tmp, tmp->type);
9248 flatten(state, end, tmp);
9250 /* Add a variable for the return value */
9252 if ((type->left->type & TYPE_MASK) != TYPE_VOID) {
9253 /* Remove all type qualifiers from the return type */
9254 tmp = variable(state, clone_type(0, type->left));
9255 flatten(state, end, tmp);
9256 /* Remember where the return value is */
9260 /* Remember which function I am compiling.
9261 * Also assume the last defined function is the main function.
9263 state->main_function = def;
9265 /* Now get the actual function definition */
9266 compound_statement(state, end);
9268 /* Finish anything unfinished with branches */
9269 resolve_branches(state);
9271 /* Remove the parameter scope */
9275 fprintf(stdout, "\n");
9276 loc(stdout, state, 0);
9277 fprintf(stdout, "\n__________ function_definition _________\n");
9278 print_triple(state, def);
9279 fprintf(stdout, "__________ function_definition _________ done\n\n");
9285 static struct triple *do_decl(struct compile_state *state,
9286 struct type *type, struct hash_entry *ident)
9290 /* Clean up the storage types used */
9291 switch (type->type & STOR_MASK) {
9294 /* These are the good types I am aiming for */
9297 type->type &= ~STOR_MASK;
9298 type->type |= STOR_AUTO;
9301 type->type &= ~STOR_MASK;
9302 type->type |= STOR_STATIC;
9306 error(state, 0, "typedef without name");
9308 symbol(state, ident, &ident->sym_ident, 0, type);
9309 ident->tok = TOK_TYPE_NAME;
9313 internal_error(state, 0, "Undefined storage class");
9315 if ((type->type & TYPE_MASK) == TYPE_FUNCTION) {
9316 error(state, 0, "Function prototypes not supported");
9319 ((type->type & STOR_MASK) == STOR_STATIC) &&
9320 ((type->type & QUAL_CONST) == 0)) {
9321 error(state, 0, "non const static variables not supported");
9324 def = variable(state, type);
9325 symbol(state, ident, &ident->sym_ident, def, type);
9330 static void decl(struct compile_state *state, struct triple *first)
9332 struct type *base_type, *type;
9333 struct hash_entry *ident;
9336 global = (state->scope_depth <= GLOBAL_SCOPE_DEPTH);
9337 base_type = decl_specifiers(state);
9339 type = declarator(state, base_type, &ident, 0);
9340 if (global && ident && (peek(state) == TOK_LBRACE)) {
9342 state->function = ident->name;
9343 def = function_definition(state, type);
9344 symbol(state, ident, &ident->sym_ident, def, type);
9345 state->function = 0;
9349 flatten(state, first, do_decl(state, type, ident));
9350 /* type or variable definition */
9353 if (peek(state) == TOK_EQ) {
9355 error(state, 0, "cannot assign to a type");
9358 flatten(state, first,
9360 ident->sym_ident->def,
9361 initializer(state, type)));
9363 arrays_complete(state, type);
9364 if (peek(state) == TOK_COMMA) {
9365 eat(state, TOK_COMMA);
9367 type = declarator(state, base_type, &ident, 0);
9368 flatten(state, first, do_decl(state, type, ident));
9372 eat(state, TOK_SEMI);
9376 static void decls(struct compile_state *state)
9378 struct triple *list;
9380 list = label(state);
9383 if (tok == TOK_EOF) {
9386 if (tok == TOK_SPACE) {
9387 eat(state, TOK_SPACE);
9390 if (list->next != list) {
9391 error(state, 0, "global variables not supported");
9397 * Data structurs for optimation.
9400 static void do_use_block(
9401 struct block *used, struct block_set **head, struct block *user,
9404 struct block_set **ptr, *new;
9411 if ((*ptr)->member == user) {
9414 ptr = &(*ptr)->next;
9416 new = xcmalloc(sizeof(*new), "block_set");
9427 static void do_unuse_block(
9428 struct block *used, struct block_set **head, struct block *unuser)
9430 struct block_set *use, **ptr;
9434 if (use->member == unuser) {
9436 memset(use, -1, sizeof(*use));
9445 static void use_block(struct block *used, struct block *user)
9447 /* Append new to the head of the list, print_block
9450 do_use_block(used, &used->use, user, 1);
9453 static void unuse_block(struct block *used, struct block *unuser)
9455 do_unuse_block(used, &used->use, unuser);
9459 static void idom_block(struct block *idom, struct block *user)
9461 do_use_block(idom, &idom->idominates, user, 0);
9464 static void unidom_block(struct block *idom, struct block *unuser)
9466 do_unuse_block(idom, &idom->idominates, unuser);
9469 static void domf_block(struct block *block, struct block *domf)
9471 do_use_block(block, &block->domfrontier, domf, 0);
9474 static void undomf_block(struct block *block, struct block *undomf)
9476 do_unuse_block(block, &block->domfrontier, undomf);
9479 static void ipdom_block(struct block *ipdom, struct block *user)
9481 do_use_block(ipdom, &ipdom->ipdominates, user, 0);
9484 static void unipdom_block(struct block *ipdom, struct block *unuser)
9486 do_unuse_block(ipdom, &ipdom->ipdominates, unuser);
9489 static void ipdomf_block(struct block *block, struct block *ipdomf)
9491 do_use_block(block, &block->ipdomfrontier, ipdomf, 0);
9494 static void unipdomf_block(struct block *block, struct block *unipdomf)
9496 do_unuse_block(block, &block->ipdomfrontier, unipdomf);
9501 static int do_walk_triple(struct compile_state *state,
9502 struct triple *ptr, int depth,
9503 int (*cb)(struct compile_state *state, struct triple *ptr, int depth))
9506 result = cb(state, ptr, depth);
9507 if ((result == 0) && (ptr->op == OP_LIST)) {
9508 struct triple *list;
9512 result = do_walk_triple(state, ptr, depth + 1, cb);
9513 if (ptr->next->prev != ptr) {
9514 internal_error(state, ptr->next, "bad prev");
9518 } while((result == 0) && (ptr != RHS(list, 0)));
9523 static int walk_triple(
9524 struct compile_state *state,
9526 int (*cb)(struct compile_state *state, struct triple *ptr, int depth))
9528 return do_walk_triple(state, ptr, 0, cb);
9531 static void do_print_prefix(int depth)
9534 for(i = 0; i < depth; i++) {
9539 #define PRINT_LIST 1
9540 static int do_print_triple(struct compile_state *state, struct triple *ins, int depth)
9544 if (op == OP_LIST) {
9549 if ((op == OP_LABEL) && (ins->use)) {
9550 printf("\n%p:\n", ins);
9552 do_print_prefix(depth);
9553 display_triple(stdout, ins);
9555 if ((ins->op == OP_BRANCH) && ins->use) {
9556 internal_error(state, ins, "branch used?");
9558 if (triple_is_branch(state, ins)) {
9564 static void print_triple(struct compile_state *state, struct triple *ins)
9566 walk_triple(state, ins, do_print_triple);
9569 static void print_triples(struct compile_state *state)
9571 print_triple(state, state->main_function);
9575 struct block *block;
9577 static void find_cf_blocks(struct cf_block *cf, struct block *block)
9579 if (!block || (cf[block->vertex].block == block)) {
9582 cf[block->vertex].block = block;
9583 find_cf_blocks(cf, block->left);
9584 find_cf_blocks(cf, block->right);
9587 static void print_control_flow(struct compile_state *state)
9589 struct cf_block *cf;
9591 printf("\ncontrol flow\n");
9592 cf = xcmalloc(sizeof(*cf) * (state->last_vertex + 1), "cf_block");
9593 find_cf_blocks(cf, state->first_block);
9595 for(i = 1; i <= state->last_vertex; i++) {
9596 struct block *block;
9597 block = cf[i].block;
9600 printf("(%p) %d:", block, block->vertex);
9602 printf(" %d", block->left->vertex);
9604 if (block->right && (block->right != block->left)) {
9605 printf(" %d", block->right->vertex);
9614 static struct block *basic_block(struct compile_state *state,
9615 struct triple *first)
9617 struct block *block;
9618 struct triple *ptr, *final;
9620 if (first->op != OP_LABEL) {
9621 internal_error(state, 0, "block does not start with a label");
9623 /* See if this basic block has already been setup */
9624 if (first->u.block != 0) {
9625 return first->u.block;
9627 /* Lookup the final instruction.
9628 * It is important that the final instruction has it's own
9631 final = RHS(state->main_function, 0)->prev;
9632 /* Allocate another basic block structure */
9633 state->last_vertex += 1;
9634 block = xcmalloc(sizeof(*block), "block");
9635 block->first = block->last = first;
9636 block->vertex = state->last_vertex;
9639 if ((ptr != first) && (ptr->op == OP_LABEL) &&
9640 ((ptr->use) || ptr == final)) {
9644 /* If ptr->u is not used remember where the baic block is */
9645 if (triple_stores_block(state, ptr)) {
9646 ptr->u.block = block;
9648 if (ptr->op == OP_BRANCH) {
9652 } while (ptr != RHS(state->main_function, 0));
9653 if (ptr == RHS(state->main_function, 0))
9656 if (op == OP_LABEL) {
9657 block->left = basic_block(state, ptr);
9659 use_block(block->left, block);
9661 else if (op == OP_BRANCH) {
9663 /* Trace the branch target */
9664 block->right = basic_block(state, TARG(ptr, 0));
9665 use_block(block->right, block);
9666 /* If there is a test trace the branch as well */
9667 if (TRIPLE_RHS(ptr->sizes)) {
9668 block->left = basic_block(state, ptr->next);
9669 use_block(block->left, block);
9673 internal_error(state, 0, "Bad basic block split");
9679 static void walk_blocks(struct compile_state *state,
9680 void (*cb)(struct compile_state *state, struct block *block, void *arg),
9683 struct triple *ptr, *first;
9684 struct block *last_block;
9686 first = RHS(state->main_function, 0);
9689 struct block *block;
9690 if (triple_stores_block(state, ptr)) {
9691 block = ptr->u.block;
9692 if (block && (block != last_block)) {
9693 cb(state, block, arg);
9697 if (block && (block->last == ptr)) {
9701 } while(ptr != first);
9704 static void print_block(
9705 struct compile_state *state, struct block *block, void *arg)
9707 struct block_set *user;
9711 fprintf(fp, "\nblock: %p (%d) %p<-%p %p<-%p\n",
9715 block->left && block->left->use?block->left->use->member : 0,
9717 block->right && block->right->use?block->right->use->member : 0);
9718 if (block->first->op == OP_LABEL) {
9719 fprintf(fp, "%p:\n", block->first);
9721 for(ptr = block->first; ; ptr = ptr->next) {
9722 display_triple(fp, ptr);
9723 if (ptr == block->last)
9726 fprintf(fp, "users %d: ", block->users);
9727 for(user = block->use; user; user = user->next) {
9728 fprintf(fp, "%p (%d) ",
9730 user->member->vertex);
9736 static void print_blocks(struct compile_state *state, FILE *fp)
9738 fprintf(fp, "--------------- blocks ---------------\n");
9739 walk_blocks(state, print_block, fp);
9742 static void prune_nonblock_triples(struct compile_state *state)
9744 struct block *block;
9745 struct triple *first, *ins, *next;
9746 /* Delete the triples not in a basic block */
9747 first = RHS(state->main_function, 0);
9752 if (ins->op == OP_LABEL) {
9753 block = ins->u.block;
9756 release_triple(state, ins);
9758 if (block && block->last == ins) {
9762 } while(ins != first);
9765 static void setup_basic_blocks(struct compile_state *state)
9767 if (!triple_stores_block(state, RHS(state->main_function, 0)) ||
9768 !triple_stores_block(state, RHS(state->main_function,0)->prev)) {
9769 internal_error(state, 0, "ins will not store block?");
9771 /* Find the basic blocks */
9772 state->last_vertex = 0;
9773 state->first_block = basic_block(state, RHS(state->main_function,0));
9774 /* Delete the triples not in a basic block */
9775 prune_nonblock_triples(state);
9776 /* Find the last basic block */
9777 state->last_block = RHS(state->main_function, 0)->prev->u.block;
9778 if (!state->last_block) {
9779 internal_error(state, 0, "end not used?");
9781 /* If we are debugging print what I have just done */
9782 if (state->debug & DEBUG_BASIC_BLOCKS) {
9783 print_blocks(state, stdout);
9784 print_control_flow(state);
9788 static void free_basic_block(struct compile_state *state, struct block *block)
9790 struct block_set *entry, *next;
9791 struct block *child;
9795 if (block->vertex == -1) {
9800 unuse_block(block->left, block);
9803 unuse_block(block->right, block);
9806 unidom_block(block->idom, block);
9810 unipdom_block(block->ipdom, block);
9813 for(entry = block->use; entry; entry = next) {
9815 child = entry->member;
9816 unuse_block(block, child);
9817 if (child->left == block) {
9820 if (child->right == block) {
9824 for(entry = block->idominates; entry; entry = next) {
9826 child = entry->member;
9827 unidom_block(block, child);
9830 for(entry = block->domfrontier; entry; entry = next) {
9832 child = entry->member;
9833 undomf_block(block, child);
9835 for(entry = block->ipdominates; entry; entry = next) {
9837 child = entry->member;
9838 unipdom_block(block, child);
9841 for(entry = block->ipdomfrontier; entry; entry = next) {
9843 child = entry->member;
9844 unipdomf_block(block, child);
9846 if (block->users != 0) {
9847 internal_error(state, 0, "block still has users");
9849 free_basic_block(state, block->left);
9851 free_basic_block(state, block->right);
9853 memset(block, -1, sizeof(*block));
9857 static void free_basic_blocks(struct compile_state *state)
9859 struct triple *first, *ins;
9860 free_basic_block(state, state->first_block);
9861 state->last_vertex = 0;
9862 state->first_block = state->last_block = 0;
9863 first = RHS(state->main_function, 0);
9866 if (triple_stores_block(state, ins)) {
9870 } while(ins != first);
9875 struct block *block;
9876 struct sdom_block *sdominates;
9877 struct sdom_block *sdom_next;
9878 struct sdom_block *sdom;
9879 struct sdom_block *label;
9880 struct sdom_block *parent;
9881 struct sdom_block *ancestor;
9886 static void unsdom_block(struct sdom_block *block)
9888 struct sdom_block **ptr;
9889 if (!block->sdom_next) {
9892 ptr = &block->sdom->sdominates;
9894 if ((*ptr) == block) {
9895 *ptr = block->sdom_next;
9898 ptr = &(*ptr)->sdom_next;
9902 static void sdom_block(struct sdom_block *sdom, struct sdom_block *block)
9904 unsdom_block(block);
9906 block->sdom_next = sdom->sdominates;
9907 sdom->sdominates = block;
9912 static int initialize_sdblock(struct sdom_block *sd,
9913 struct block *parent, struct block *block, int vertex)
9915 if (!block || (sd[block->vertex].block == block)) {
9919 /* Renumber the blocks in a convinient fashion */
9920 block->vertex = vertex;
9921 sd[vertex].block = block;
9922 sd[vertex].sdom = &sd[vertex];
9923 sd[vertex].label = &sd[vertex];
9924 sd[vertex].parent = parent? &sd[parent->vertex] : 0;
9925 sd[vertex].ancestor = 0;
9926 sd[vertex].vertex = vertex;
9927 vertex = initialize_sdblock(sd, block, block->left, vertex);
9928 vertex = initialize_sdblock(sd, block, block->right, vertex);
9932 static int initialize_sdpblock(
9933 struct compile_state *state, struct sdom_block *sd,
9934 struct block *parent, struct block *block, int vertex)
9936 struct block_set *user;
9937 if (!block || (sd[block->vertex].block == block)) {
9941 /* Renumber the blocks in a convinient fashion */
9942 block->vertex = vertex;
9943 sd[vertex].block = block;
9944 sd[vertex].sdom = &sd[vertex];
9945 sd[vertex].label = &sd[vertex];
9946 sd[vertex].parent = parent? &sd[parent->vertex] : 0;
9947 sd[vertex].ancestor = 0;
9948 sd[vertex].vertex = vertex;
9949 for(user = block->use; user; user = user->next) {
9950 vertex = initialize_sdpblock(state, sd, block, user->member, vertex);
9955 static int setup_sdpblocks(struct compile_state *state, struct sdom_block *sd)
9957 struct block *block;
9959 /* Setup as many sdpblocks as possible without using fake edges */
9960 vertex = initialize_sdpblock(state, sd, 0, state->last_block, 0);
9962 /* Walk through the graph and find unconnected blocks. If
9963 * we can, add a fake edge from the unconnected blocks to the
9966 block = state->first_block->last->next->u.block;
9967 for(; block && block != state->first_block; block = block->last->next->u.block) {
9968 if (sd[block->vertex].block == block) {
9971 if (block->left != 0) {
9975 #if DEBUG_SDP_BLOCKS
9976 fprintf(stderr, "Adding %d\n", vertex +1);
9979 block->left = state->last_block;
9980 use_block(block->left, block);
9981 vertex = initialize_sdpblock(state, sd, state->last_block, block, vertex);
9986 static void compress_ancestors(struct sdom_block *v)
9988 /* This procedure assumes ancestor(v) != 0 */
9989 /* if (ancestor(ancestor(v)) != 0) {
9990 * compress(ancestor(ancestor(v)));
9991 * if (semi(label(ancestor(v))) < semi(label(v))) {
9992 * label(v) = label(ancestor(v));
9994 * ancestor(v) = ancestor(ancestor(v));
10000 if (v->ancestor->ancestor) {
10001 compress_ancestors(v->ancestor->ancestor);
10002 if (v->ancestor->label->sdom->vertex < v->label->sdom->vertex) {
10003 v->label = v->ancestor->label;
10005 v->ancestor = v->ancestor->ancestor;
10009 static void compute_sdom(struct compile_state *state, struct sdom_block *sd)
10013 * for each v <= pred(w) {
10015 * if (semi[u] < semi[w] {
10016 * semi[w] = semi[u];
10019 * add w to bucket(vertex(semi[w]));
10020 * LINK(parent(w), w);
10023 * for each v <= bucket(parent(w)) {
10024 * delete v from bucket(parent(w));
10026 * dom(v) = (semi[u] < semi[v]) ? u : parent(w);
10029 for(i = state->last_vertex; i >= 2; i--) {
10030 struct sdom_block *v, *parent, *next;
10031 struct block_set *user;
10032 struct block *block;
10033 block = sd[i].block;
10034 parent = sd[i].parent;
10036 for(user = block->use; user; user = user->next) {
10037 struct sdom_block *v, *u;
10038 v = &sd[user->member->vertex];
10039 u = !(v->ancestor)? v : (compress_ancestors(v), v->label);
10040 if (u->sdom->vertex < sd[i].sdom->vertex) {
10041 sd[i].sdom = u->sdom;
10044 sdom_block(sd[i].sdom, &sd[i]);
10045 sd[i].ancestor = parent;
10047 for(v = parent->sdominates; v; v = next) {
10048 struct sdom_block *u;
10049 next = v->sdom_next;
10051 u = (!v->ancestor) ? v : (compress_ancestors(v), v->label);
10052 v->block->idom = (u->sdom->vertex < v->sdom->vertex)?
10053 u->block : parent->block;
10058 static void compute_spdom(struct compile_state *state, struct sdom_block *sd)
10062 * for each v <= pred(w) {
10064 * if (semi[u] < semi[w] {
10065 * semi[w] = semi[u];
10068 * add w to bucket(vertex(semi[w]));
10069 * LINK(parent(w), w);
10072 * for each v <= bucket(parent(w)) {
10073 * delete v from bucket(parent(w));
10075 * dom(v) = (semi[u] < semi[v]) ? u : parent(w);
10078 for(i = state->last_vertex; i >= 2; i--) {
10079 struct sdom_block *u, *v, *parent, *next;
10080 struct block *block;
10081 block = sd[i].block;
10082 parent = sd[i].parent;
10085 v = &sd[block->left->vertex];
10086 u = !(v->ancestor)? v : (compress_ancestors(v), v->label);
10087 if (u->sdom->vertex < sd[i].sdom->vertex) {
10088 sd[i].sdom = u->sdom;
10091 if (block->right && (block->right != block->left)) {
10092 v = &sd[block->right->vertex];
10093 u = !(v->ancestor)? v : (compress_ancestors(v), v->label);
10094 if (u->sdom->vertex < sd[i].sdom->vertex) {
10095 sd[i].sdom = u->sdom;
10098 sdom_block(sd[i].sdom, &sd[i]);
10099 sd[i].ancestor = parent;
10101 for(v = parent->sdominates; v; v = next) {
10102 struct sdom_block *u;
10103 next = v->sdom_next;
10105 u = (!v->ancestor) ? v : (compress_ancestors(v), v->label);
10106 v->block->ipdom = (u->sdom->vertex < v->sdom->vertex)?
10107 u->block : parent->block;
10112 static void compute_idom(struct compile_state *state, struct sdom_block *sd)
10115 for(i = 2; i <= state->last_vertex; i++) {
10116 struct block *block;
10117 block = sd[i].block;
10118 if (block->idom->vertex != sd[i].sdom->vertex) {
10119 block->idom = block->idom->idom;
10121 idom_block(block->idom, block);
10123 sd[1].block->idom = 0;
10126 static void compute_ipdom(struct compile_state *state, struct sdom_block *sd)
10129 for(i = 2; i <= state->last_vertex; i++) {
10130 struct block *block;
10131 block = sd[i].block;
10132 if (block->ipdom->vertex != sd[i].sdom->vertex) {
10133 block->ipdom = block->ipdom->ipdom;
10135 ipdom_block(block->ipdom, block);
10137 sd[1].block->ipdom = 0;
10141 * Every vertex of a flowgraph G = (V, E, r) except r has
10142 * a unique immediate dominator.
10143 * The edges {(idom(w), w) |w <= V - {r}} form a directed tree
10144 * rooted at r, called the dominator tree of G, such that
10145 * v dominates w if and only if v is a proper ancestor of w in
10146 * the dominator tree.
10149 * If v and w are vertices of G such that v <= w,
10150 * than any path from v to w must contain a common ancestor
10153 /* Lemma 2: For any vertex w != r, idom(w) -> w */
10154 /* Lemma 3: For any vertex w != r, sdom(w) -> w */
10155 /* Lemma 4: For any vertex w != r, idom(w) -> sdom(w) */
10157 * Let w != r. Suppose every u for which sdom(w) -> u -> w satisfies
10158 * sdom(u) >= sdom(w). Then idom(w) = sdom(w).
10161 * Let w != r and let u be a vertex for which sdom(u) is
10162 * minimum amoung vertices u satisfying sdom(w) -> u -> w.
10163 * Then sdom(u) <= sdom(w) and idom(u) = idom(w).
10165 /* Lemma 5: Let vertices v,w satisfy v -> w.
10166 * Then v -> idom(w) or idom(w) -> idom(v)
10169 static void find_immediate_dominators(struct compile_state *state)
10171 struct sdom_block *sd;
10172 /* w->sdom = min{v| there is a path v = v0,v1,...,vk = w such that:
10173 * vi > w for (1 <= i <= k - 1}
10176 * For any vertex w != r.
10178 * {v|(v,w) <= E and v < w } U
10179 * {sdom(u) | u > w and there is an edge (v, w) such that u -> v})
10182 * Let w != r and let u be a vertex for which sdom(u) is
10183 * minimum amoung vertices u satisfying sdom(w) -> u -> w.
10185 * { sdom(w) if sdom(w) = sdom(u),
10187 * { idom(u) otherwise
10189 /* The algorithm consists of the following 4 steps.
10190 * Step 1. Carry out a depth-first search of the problem graph.
10191 * Number the vertices from 1 to N as they are reached during
10192 * the search. Initialize the variables used in succeeding steps.
10193 * Step 2. Compute the semidominators of all vertices by applying
10194 * theorem 4. Carry out the computation vertex by vertex in
10195 * decreasing order by number.
10196 * Step 3. Implicitly define the immediate dominator of each vertex
10197 * by applying Corollary 1.
10198 * Step 4. Explicitly define the immediate dominator of each vertex,
10199 * carrying out the computation vertex by vertex in increasing order
10202 /* Step 1 initialize the basic block information */
10203 sd = xcmalloc(sizeof(*sd) * (state->last_vertex + 1), "sdom_state");
10204 initialize_sdblock(sd, 0, state->first_block, 0);
10210 /* Step 2 compute the semidominators */
10211 /* Step 3 implicitly define the immediate dominator of each vertex */
10212 compute_sdom(state, sd);
10213 /* Step 4 explicitly define the immediate dominator of each vertex */
10214 compute_idom(state, sd);
10218 static void find_post_dominators(struct compile_state *state)
10220 struct sdom_block *sd;
10222 /* Step 1 initialize the basic block information */
10223 sd = xcmalloc(sizeof(*sd) * (state->last_vertex + 1), "sdom_state");
10225 vertex = setup_sdpblocks(state, sd);
10226 if (vertex != state->last_vertex) {
10227 internal_error(state, 0, "missing %d blocks\n",
10228 state->last_vertex - vertex);
10231 /* Step 2 compute the semidominators */
10232 /* Step 3 implicitly define the immediate dominator of each vertex */
10233 compute_spdom(state, sd);
10234 /* Step 4 explicitly define the immediate dominator of each vertex */
10235 compute_ipdom(state, sd);
10241 static void find_block_domf(struct compile_state *state, struct block *block)
10243 struct block *child;
10244 struct block_set *user;
10245 if (block->domfrontier != 0) {
10246 internal_error(state, block->first, "domfrontier present?");
10248 for(user = block->idominates; user; user = user->next) {
10249 child = user->member;
10250 if (child->idom != block) {
10251 internal_error(state, block->first, "bad idom");
10253 find_block_domf(state, child);
10255 if (block->left && block->left->idom != block) {
10256 domf_block(block, block->left);
10258 if (block->right && block->right->idom != block) {
10259 domf_block(block, block->right);
10261 for(user = block->idominates; user; user = user->next) {
10262 struct block_set *frontier;
10263 child = user->member;
10264 for(frontier = child->domfrontier; frontier; frontier = frontier->next) {
10265 if (frontier->member->idom != block) {
10266 domf_block(block, frontier->member);
10272 static void find_block_ipdomf(struct compile_state *state, struct block *block)
10274 struct block *child;
10275 struct block_set *user;
10276 if (block->ipdomfrontier != 0) {
10277 internal_error(state, block->first, "ipdomfrontier present?");
10279 for(user = block->ipdominates; user; user = user->next) {
10280 child = user->member;
10281 if (child->ipdom != block) {
10282 internal_error(state, block->first, "bad ipdom");
10284 find_block_ipdomf(state, child);
10286 if (block->left && block->left->ipdom != block) {
10287 ipdomf_block(block, block->left);
10289 if (block->right && block->right->ipdom != block) {
10290 ipdomf_block(block, block->right);
10292 for(user = block->idominates; user; user = user->next) {
10293 struct block_set *frontier;
10294 child = user->member;
10295 for(frontier = child->ipdomfrontier; frontier; frontier = frontier->next) {
10296 if (frontier->member->ipdom != block) {
10297 ipdomf_block(block, frontier->member);
10303 static void print_dominated(
10304 struct compile_state *state, struct block *block, void *arg)
10306 struct block_set *user;
10309 fprintf(fp, "%d:", block->vertex);
10310 for(user = block->idominates; user; user = user->next) {
10311 fprintf(fp, " %d", user->member->vertex);
10312 if (user->member->idom != block) {
10313 internal_error(state, user->member->first, "bad idom");
10319 static void print_dominators(struct compile_state *state, FILE *fp)
10321 fprintf(fp, "\ndominates\n");
10322 walk_blocks(state, print_dominated, fp);
10326 static int print_frontiers(
10327 struct compile_state *state, struct block *block, int vertex)
10329 struct block_set *user;
10331 if (!block || (block->vertex != vertex + 1)) {
10336 printf("%d:", block->vertex);
10337 for(user = block->domfrontier; user; user = user->next) {
10338 printf(" %d", user->member->vertex);
10342 vertex = print_frontiers(state, block->left, vertex);
10343 vertex = print_frontiers(state, block->right, vertex);
10346 static void print_dominance_frontiers(struct compile_state *state)
10348 printf("\ndominance frontiers\n");
10349 print_frontiers(state, state->first_block, 0);
10353 static void analyze_idominators(struct compile_state *state)
10355 /* Find the immediate dominators */
10356 find_immediate_dominators(state);
10357 /* Find the dominance frontiers */
10358 find_block_domf(state, state->first_block);
10359 /* If debuging print the print what I have just found */
10360 if (state->debug & DEBUG_FDOMINATORS) {
10361 print_dominators(state, stdout);
10362 print_dominance_frontiers(state);
10363 print_control_flow(state);
10369 static void print_ipdominated(
10370 struct compile_state *state, struct block *block, void *arg)
10372 struct block_set *user;
10375 fprintf(fp, "%d:", block->vertex);
10376 for(user = block->ipdominates; user; user = user->next) {
10377 fprintf(fp, " %d", user->member->vertex);
10378 if (user->member->ipdom != block) {
10379 internal_error(state, user->member->first, "bad ipdom");
10385 static void print_ipdominators(struct compile_state *state, FILE *fp)
10387 fprintf(fp, "\nipdominates\n");
10388 walk_blocks(state, print_ipdominated, fp);
10391 static int print_pfrontiers(
10392 struct compile_state *state, struct block *block, int vertex)
10394 struct block_set *user;
10396 if (!block || (block->vertex != vertex + 1)) {
10401 printf("%d:", block->vertex);
10402 for(user = block->ipdomfrontier; user; user = user->next) {
10403 printf(" %d", user->member->vertex);
10406 for(user = block->use; user; user = user->next) {
10407 vertex = print_pfrontiers(state, user->member, vertex);
10411 static void print_ipdominance_frontiers(struct compile_state *state)
10413 printf("\nipdominance frontiers\n");
10414 print_pfrontiers(state, state->last_block, 0);
10418 static void analyze_ipdominators(struct compile_state *state)
10420 /* Find the post dominators */
10421 find_post_dominators(state);
10422 /* Find the control dependencies (post dominance frontiers) */
10423 find_block_ipdomf(state, state->last_block);
10424 /* If debuging print the print what I have just found */
10425 if (state->debug & DEBUG_RDOMINATORS) {
10426 print_ipdominators(state, stdout);
10427 print_ipdominance_frontiers(state);
10428 print_control_flow(state);
10432 static int bdominates(struct compile_state *state,
10433 struct block *dom, struct block *sub)
10435 while(sub && (sub != dom)) {
10441 static int tdominates(struct compile_state *state,
10442 struct triple *dom, struct triple *sub)
10444 struct block *bdom, *bsub;
10446 bdom = block_of_triple(state, dom);
10447 bsub = block_of_triple(state, sub);
10448 if (bdom != bsub) {
10449 result = bdominates(state, bdom, bsub);
10452 struct triple *ins;
10454 while((ins != bsub->first) && (ins != dom)) {
10457 result = (ins == dom);
10462 static void insert_phi_operations(struct compile_state *state)
10465 struct triple *first;
10466 int *has_already, *work;
10467 struct block *work_list, **work_list_tail;
10469 struct triple *var, *vnext;
10471 size = sizeof(int) * (state->last_vertex + 1);
10472 has_already = xcmalloc(size, "has_already");
10473 work = xcmalloc(size, "work");
10476 first = RHS(state->main_function, 0);
10477 for(var = first->next; var != first ; var = vnext) {
10478 struct block *block;
10479 struct triple_set *user, *unext;
10481 if ((var->op != OP_ADECL) || !var->use) {
10486 work_list_tail = &work_list;
10487 for(user = var->use; user; user = unext) {
10488 unext = user->next;
10489 if (user->member->op == OP_READ) {
10492 if (user->member->op != OP_WRITE) {
10493 internal_error(state, user->member,
10494 "bad variable access");
10496 block = user->member->u.block;
10498 warning(state, user->member, "dead code");
10499 release_triple(state, user->member);
10502 if (work[block->vertex] >= iter) {
10505 work[block->vertex] = iter;
10506 *work_list_tail = block;
10507 block->work_next = 0;
10508 work_list_tail = &block->work_next;
10510 for(block = work_list; block; block = block->work_next) {
10511 struct block_set *df;
10512 for(df = block->domfrontier; df; df = df->next) {
10513 struct triple *phi;
10514 struct block *front;
10516 front = df->member;
10518 if (has_already[front->vertex] >= iter) {
10521 /* Count how many edges flow into this block */
10522 in_edges = front->users;
10523 /* Insert a phi function for this variable */
10524 get_occurance(var->occurance);
10525 phi = alloc_triple(
10526 state, OP_PHI, var->type, -1, in_edges,
10528 phi->u.block = front;
10529 MISC(phi, 0) = var;
10530 use_triple(var, phi);
10531 /* Insert the phi functions immediately after the label */
10532 insert_triple(state, front->first->next, phi);
10533 if (front->first == front->last) {
10534 front->last = front->first->next;
10536 has_already[front->vertex] = iter;
10538 /* If necessary plan to visit the basic block */
10539 if (work[front->vertex] >= iter) {
10542 work[front->vertex] = iter;
10543 *work_list_tail = front;
10544 front->work_next = 0;
10545 work_list_tail = &front->work_next;
10549 xfree(has_already);
10554 static int count_and_number_adecls(struct compile_state *state)
10556 struct triple *first, *ins;
10558 first = RHS(state->main_function, 0);
10561 if (ins->op == OP_ADECL) {
10566 } while(ins != first);
10570 static struct triple *peek_triple(struct triple_set **stacks, struct triple *var)
10572 struct triple_set *head;
10573 struct triple *top_val;
10575 head = stacks[var->id];
10577 top_val = head->member;
10582 static void push_triple(struct triple_set **stacks, struct triple *var, struct triple *val)
10584 struct triple_set *new;
10585 /* Append new to the head of the list,
10586 * it's the only sensible behavoir for a stack.
10588 new = xcmalloc(sizeof(*new), "triple_set");
10590 new->next = stacks[var->id];
10591 stacks[var->id] = new;
10594 static void pop_triple(struct triple_set **stacks, struct triple *var, struct triple *oldval)
10596 struct triple_set *set, **ptr;
10597 ptr = &stacks[var->id];
10600 if (set->member == oldval) {
10603 /* Only free one occurance from the stack */
10616 static void fixup_block_phi_variables(
10617 struct compile_state *state, struct triple_set **stacks, struct block *parent, struct block *block)
10619 struct block_set *set;
10620 struct triple *ptr;
10622 if (!parent || !block)
10624 /* Find the edge I am coming in on */
10626 for(set = block->use; set; set = set->next, edge++) {
10627 if (set->member == parent) {
10632 internal_error(state, 0, "phi input is not on a control predecessor");
10634 for(ptr = block->first; ; ptr = ptr->next) {
10635 if (ptr->op == OP_PHI) {
10636 struct triple *var, *val, **slot;
10637 var = MISC(ptr, 0);
10639 internal_error(state, ptr, "no var???");
10641 /* Find the current value of the variable */
10642 val = peek_triple(stacks, var);
10643 if (val && ((val->op == OP_WRITE) || (val->op == OP_READ))) {
10644 internal_error(state, val, "bad value in phi");
10646 if (edge >= TRIPLE_RHS(ptr->sizes)) {
10647 internal_error(state, ptr, "edges > phi rhs");
10649 slot = &RHS(ptr, edge);
10650 if ((*slot != 0) && (*slot != val)) {
10651 internal_error(state, ptr, "phi already bound on this edge");
10654 use_triple(val, ptr);
10656 if (ptr == block->last) {
10663 static void rename_block_variables(
10664 struct compile_state *state, struct triple_set **stacks, struct block *block)
10666 struct block_set *user;
10667 struct triple *ptr, *next, *last;
10671 last = block->first;
10673 for(ptr = block->first; !done; ptr = next) {
10675 if (ptr == block->last) {
10679 if (ptr->op == OP_READ) {
10680 struct triple *var, *val;
10682 unuse_triple(var, ptr);
10683 /* Find the current value of the variable */
10684 val = peek_triple(stacks, var);
10686 error(state, ptr, "variable used without being set");
10688 if ((val->op == OP_WRITE) || (val->op == OP_READ)) {
10689 internal_error(state, val, "bad value in read");
10691 propogate_use(state, ptr, val);
10692 release_triple(state, ptr);
10696 if (ptr->op == OP_WRITE) {
10697 struct triple *var, *val, *tval;
10699 tval = val = RHS(ptr, 1);
10700 if ((val->op == OP_WRITE) || (val->op == OP_READ)) {
10701 internal_error(state, ptr, "bad value in write");
10703 /* Insert a copy if the types differ */
10704 if (!equiv_types(ptr->type, val->type)) {
10705 if (val->op == OP_INTCONST) {
10706 tval = pre_triple(state, ptr, OP_INTCONST, ptr->type, 0, 0);
10707 tval->u.cval = val->u.cval;
10710 tval = pre_triple(state, ptr, OP_COPY, ptr->type, val, 0);
10711 use_triple(val, tval);
10713 unuse_triple(val, ptr);
10714 RHS(ptr, 1) = tval;
10715 use_triple(tval, ptr);
10717 propogate_use(state, ptr, tval);
10718 unuse_triple(var, ptr);
10719 /* Push OP_WRITE ptr->right onto a stack of variable uses */
10720 push_triple(stacks, var, tval);
10722 if (ptr->op == OP_PHI) {
10723 struct triple *var;
10724 var = MISC(ptr, 0);
10725 /* Push OP_PHI onto a stack of variable uses */
10726 push_triple(stacks, var, ptr);
10730 block->last = last;
10732 /* Fixup PHI functions in the cf successors */
10733 fixup_block_phi_variables(state, stacks, block, block->left);
10734 fixup_block_phi_variables(state, stacks, block, block->right);
10735 /* rename variables in the dominated nodes */
10736 for(user = block->idominates; user; user = user->next) {
10737 rename_block_variables(state, stacks, user->member);
10739 /* pop the renamed variable stack */
10740 last = block->first;
10742 for(ptr = block->first; !done ; ptr = next) {
10744 if (ptr == block->last) {
10747 if (ptr->op == OP_WRITE) {
10748 struct triple *var;
10750 /* Pop OP_WRITE ptr->right from the stack of variable uses */
10751 pop_triple(stacks, var, RHS(ptr, 1));
10752 release_triple(state, ptr);
10755 if (ptr->op == OP_PHI) {
10756 struct triple *var;
10757 var = MISC(ptr, 0);
10758 /* Pop OP_WRITE ptr->right from the stack of variable uses */
10759 pop_triple(stacks, var, ptr);
10763 block->last = last;
10766 static void prune_block_variables(struct compile_state *state,
10767 struct block *block)
10769 struct block_set *user;
10770 struct triple *next, *last, *ptr;
10772 last = block->first;
10774 for(ptr = block->first; !done; ptr = next) {
10776 if (ptr == block->last) {
10779 if (ptr->op == OP_ADECL) {
10780 struct triple_set *user, *next;
10781 for(user = ptr->use; user; user = next) {
10782 struct triple *use;
10784 use = user->member;
10785 if (use->op != OP_PHI) {
10786 internal_error(state, use, "decl still used");
10788 if (MISC(use, 0) != ptr) {
10789 internal_error(state, use, "bad phi use of decl");
10791 unuse_triple(ptr, use);
10794 release_triple(state, ptr);
10799 block->last = last;
10800 for(user = block->idominates; user; user = user->next) {
10801 prune_block_variables(state, user->member);
10805 struct phi_triple {
10806 struct triple *phi;
10811 static void keep_phi(struct compile_state *state, struct phi_triple *live, struct triple *phi)
10813 struct triple **slot;
10815 if (live[phi->id].alive) {
10818 live[phi->id].alive = 1;
10819 zrhs = TRIPLE_RHS(phi->sizes);
10820 slot = &RHS(phi, 0);
10821 for(i = 0; i < zrhs; i++) {
10822 struct triple *used;
10824 if (used && (used->op == OP_PHI)) {
10825 keep_phi(state, live, used);
10830 static void prune_unused_phis(struct compile_state *state)
10832 struct triple *first, *phi;
10833 struct phi_triple *live;
10837 /* Find the first instruction */
10838 first = RHS(state->main_function, 0);
10840 /* Count how many phi functions I need to process */
10842 for(phi = first->next; phi != first; phi = phi->next) {
10843 if (phi->op == OP_PHI) {
10848 /* Mark them all dead */
10849 live = xcmalloc(sizeof(*live) * (phis + 1), "phi_triple");
10851 for(phi = first->next; phi != first; phi = phi->next) {
10852 if (phi->op != OP_PHI) {
10855 live[phis].alive = 0;
10856 live[phis].orig_id = phi->id;
10857 live[phis].phi = phi;
10862 /* Mark phis alive that are used by non phis */
10863 for(i = 0; i < phis; i++) {
10864 struct triple_set *set;
10865 for(set = live[i].phi->use; !live[i].alive && set; set = set->next) {
10866 if (set->member->op != OP_PHI) {
10867 keep_phi(state, live, live[i].phi);
10873 /* Delete the extraneous phis */
10874 for(i = 0; i < phis; i++) {
10875 struct triple **slot;
10877 if (!live[i].alive) {
10878 release_triple(state, live[i].phi);
10882 slot = &RHS(phi, 0);
10883 zrhs = TRIPLE_RHS(phi->sizes);
10884 for(j = 0; j < zrhs; j++) {
10886 error(state, phi, "variable not set on all paths to use");
10894 static void transform_to_ssa_form(struct compile_state *state)
10896 struct triple_set **stacks;
10898 insert_phi_operations(state);
10900 printf("@%s:%d\n", __FILE__, __LINE__);
10901 print_blocks(state, stdout);
10904 /* Allocate stacks for the Variables */
10905 adecls = count_and_number_adecls(state);
10906 stacks = xcmalloc(sizeof(stacks[0])*(adecls + 1), "adecl stacks");
10907 rename_block_variables(state, stacks, state->first_block);
10910 prune_block_variables(state, state->first_block);
10913 prune_unused_phis(state);
10919 static void clear_vertex(
10920 struct compile_state *state, struct block *block, void *arg)
10925 static void mark_live_block(
10926 struct compile_state *state, struct block *block, int *next_vertex)
10928 /* See if this is a block that has not been marked */
10929 if (block->vertex != 0) {
10932 block->vertex = *next_vertex;
10934 if (triple_is_branch(state, block->last)) {
10935 struct triple **targ;
10936 targ = triple_targ(state, block->last, 0);
10937 for(; targ; targ = triple_targ(state, block->last, targ)) {
10941 if (!triple_stores_block(state, *targ)) {
10942 internal_error(state, 0, "bad targ");
10944 mark_live_block(state, (*targ)->u.block, next_vertex);
10947 else if (block->last->next != RHS(state->main_function, 0)) {
10948 struct triple *ins;
10949 ins = block->last->next;
10950 if (!triple_stores_block(state, ins)) {
10951 internal_error(state, 0, "bad block start");
10953 mark_live_block(state, ins->u.block, next_vertex);
10957 static void transform_from_ssa_form(struct compile_state *state)
10959 /* To get out of ssa form we insert moves on the incoming
10960 * edges to blocks containting phi functions.
10962 struct triple *first;
10963 struct triple *phi, *next;
10966 /* Walk the control flow to see which blocks remain alive */
10967 walk_blocks(state, clear_vertex, 0);
10969 mark_live_block(state, state->first_block, &next_vertex);
10971 /* Walk all of the operations to find the phi functions */
10972 first = RHS(state->main_function, 0);
10973 for(phi = first->next; phi != first ; phi = next) {
10974 struct block_set *set;
10975 struct block *block;
10976 struct triple **slot;
10977 struct triple *var, *read;
10978 struct triple_set *use, *use_next;
10981 if (phi->op != OP_PHI) {
10984 block = phi->u.block;
10985 slot = &RHS(phi, 0);
10987 /* Forget uses from code in dead blocks */
10988 for(use = phi->use; use; use = use_next) {
10989 struct block *ublock;
10990 struct triple **expr;
10991 use_next = use->next;
10992 ublock = block_of_triple(state, use->member);
10993 if ((use->member == phi) || (ublock->vertex != 0)) {
10996 expr = triple_rhs(state, use->member, 0);
10997 for(; expr; expr = triple_rhs(state, use->member, expr)) {
10998 if (*expr == phi) {
11002 unuse_triple(phi, use->member);
11005 #warning "CHECK_ME does the OP_ADECL need to be placed somewhere that dominates all of the incoming phi edges?"
11006 /* A variable to replace the phi function */
11007 var = post_triple(state, phi, OP_ADECL, phi->type, 0,0);
11008 /* A read of the single value that is set into the variable */
11009 read = post_triple(state, var, OP_READ, phi->type, var, 0);
11010 use_triple(var, read);
11012 /* Replaces uses of the phi with variable reads */
11013 propogate_use(state, phi, read);
11015 /* Walk all of the incoming edges/blocks and insert moves.
11017 for(edge = 0, set = block->use; set; set = set->next, edge++) {
11018 struct block *eblock;
11019 struct triple *move;
11020 struct triple *val, *base;
11021 eblock = set->member;
11024 unuse_triple(val, phi);
11026 if (!val || (val == &zero_triple) ||
11027 (block->vertex == 0) || (eblock->vertex == 0) ||
11028 (val == phi) || (val == read)) {
11032 /* Make certain the write is placed in the edge block... */
11033 base = eblock->first;
11034 if (block_of_triple(state, val) == eblock) {
11037 move = post_triple(state, base, OP_WRITE, phi->type, var, val);
11038 use_triple(val, move);
11039 use_triple(var, move);
11041 /* See if there are any writers of var */
11043 for(use = var->use; use; use = use->next) {
11044 if ((use->member->op == OP_WRITE) &&
11045 (RHS(use->member, 0) == var)) {
11049 /* If var is not used free it */
11051 unuse_triple(var, read);
11052 free_triple(state, read);
11053 free_triple(state, var);
11056 /* Release the phi function */
11057 release_triple(state, phi);
11064 * Register conflict resolution
11065 * =========================================================
11068 static struct reg_info find_def_color(
11069 struct compile_state *state, struct triple *def)
11071 struct triple_set *set;
11072 struct reg_info info;
11073 info.reg = REG_UNSET;
11075 if (!triple_is_def(state, def)) {
11078 info = arch_reg_lhs(state, def, 0);
11079 if (info.reg >= MAX_REGISTERS) {
11080 info.reg = REG_UNSET;
11082 for(set = def->use; set; set = set->next) {
11083 struct reg_info tinfo;
11085 i = find_rhs_use(state, set->member, def);
11089 tinfo = arch_reg_rhs(state, set->member, i);
11090 if (tinfo.reg >= MAX_REGISTERS) {
11091 tinfo.reg = REG_UNSET;
11093 if ((tinfo.reg != REG_UNSET) &&
11094 (info.reg != REG_UNSET) &&
11095 (tinfo.reg != info.reg)) {
11096 internal_error(state, def, "register conflict");
11098 if ((info.regcm & tinfo.regcm) == 0) {
11099 internal_error(state, def, "regcm conflict %x & %x == 0",
11100 info.regcm, tinfo.regcm);
11102 if (info.reg == REG_UNSET) {
11103 info.reg = tinfo.reg;
11105 info.regcm &= tinfo.regcm;
11107 if (info.reg >= MAX_REGISTERS) {
11108 internal_error(state, def, "register out of range");
11113 static struct reg_info find_lhs_pre_color(
11114 struct compile_state *state, struct triple *ins, int index)
11116 struct reg_info info;
11118 zrhs = TRIPLE_RHS(ins->sizes);
11119 zlhs = TRIPLE_LHS(ins->sizes);
11120 if (!zlhs && triple_is_def(state, ins)) {
11123 if (index >= zlhs) {
11124 internal_error(state, ins, "Bad lhs %d", index);
11126 info = arch_reg_lhs(state, ins, index);
11127 for(i = 0; i < zrhs; i++) {
11128 struct reg_info rinfo;
11129 rinfo = arch_reg_rhs(state, ins, i);
11130 if ((info.reg == rinfo.reg) &&
11131 (rinfo.reg >= MAX_REGISTERS)) {
11132 struct reg_info tinfo;
11133 tinfo = find_lhs_pre_color(state, RHS(ins, index), 0);
11134 info.reg = tinfo.reg;
11135 info.regcm &= tinfo.regcm;
11139 if (info.reg >= MAX_REGISTERS) {
11140 info.reg = REG_UNSET;
11145 static struct reg_info find_rhs_post_color(
11146 struct compile_state *state, struct triple *ins, int index);
11148 static struct reg_info find_lhs_post_color(
11149 struct compile_state *state, struct triple *ins, int index)
11151 struct triple_set *set;
11152 struct reg_info info;
11153 struct triple *lhs;
11154 #if DEBUG_TRIPLE_COLOR
11155 fprintf(stderr, "find_lhs_post_color(%p, %d)\n",
11158 if ((index == 0) && triple_is_def(state, ins)) {
11161 else if (index < TRIPLE_LHS(ins->sizes)) {
11162 lhs = LHS(ins, index);
11165 internal_error(state, ins, "Bad lhs %d", index);
11168 info = arch_reg_lhs(state, ins, index);
11169 if (info.reg >= MAX_REGISTERS) {
11170 info.reg = REG_UNSET;
11172 for(set = lhs->use; set; set = set->next) {
11173 struct reg_info rinfo;
11174 struct triple *user;
11176 user = set->member;
11177 zrhs = TRIPLE_RHS(user->sizes);
11178 for(i = 0; i < zrhs; i++) {
11179 if (RHS(user, i) != lhs) {
11182 rinfo = find_rhs_post_color(state, user, i);
11183 if ((info.reg != REG_UNSET) &&
11184 (rinfo.reg != REG_UNSET) &&
11185 (info.reg != rinfo.reg)) {
11186 internal_error(state, ins, "register conflict");
11188 if ((info.regcm & rinfo.regcm) == 0) {
11189 internal_error(state, ins, "regcm conflict %x & %x == 0",
11190 info.regcm, rinfo.regcm);
11192 if (info.reg == REG_UNSET) {
11193 info.reg = rinfo.reg;
11195 info.regcm &= rinfo.regcm;
11198 #if DEBUG_TRIPLE_COLOR
11199 fprintf(stderr, "find_lhs_post_color(%p, %d) -> ( %d, %x)\n",
11200 ins, index, info.reg, info.regcm);
11205 static struct reg_info find_rhs_post_color(
11206 struct compile_state *state, struct triple *ins, int index)
11208 struct reg_info info, rinfo;
11210 #if DEBUG_TRIPLE_COLOR
11211 fprintf(stderr, "find_rhs_post_color(%p, %d)\n",
11214 rinfo = arch_reg_rhs(state, ins, index);
11215 zlhs = TRIPLE_LHS(ins->sizes);
11216 if (!zlhs && triple_is_def(state, ins)) {
11220 if (info.reg >= MAX_REGISTERS) {
11221 info.reg = REG_UNSET;
11223 for(i = 0; i < zlhs; i++) {
11224 struct reg_info linfo;
11225 linfo = arch_reg_lhs(state, ins, i);
11226 if ((linfo.reg == rinfo.reg) &&
11227 (linfo.reg >= MAX_REGISTERS)) {
11228 struct reg_info tinfo;
11229 tinfo = find_lhs_post_color(state, ins, i);
11230 if (tinfo.reg >= MAX_REGISTERS) {
11231 tinfo.reg = REG_UNSET;
11233 info.regcm &= linfo.regcm;
11234 info.regcm &= tinfo.regcm;
11235 if (info.reg != REG_UNSET) {
11236 internal_error(state, ins, "register conflict");
11238 if (info.regcm == 0) {
11239 internal_error(state, ins, "regcm conflict");
11241 info.reg = tinfo.reg;
11244 #if DEBUG_TRIPLE_COLOR
11245 fprintf(stderr, "find_rhs_post_color(%p, %d) -> ( %d, %x)\n",
11246 ins, index, info.reg, info.regcm);
11251 static struct reg_info find_lhs_color(
11252 struct compile_state *state, struct triple *ins, int index)
11254 struct reg_info pre, post, info;
11255 #if DEBUG_TRIPLE_COLOR
11256 fprintf(stderr, "find_lhs_color(%p, %d)\n",
11259 pre = find_lhs_pre_color(state, ins, index);
11260 post = find_lhs_post_color(state, ins, index);
11261 if ((pre.reg != post.reg) &&
11262 (pre.reg != REG_UNSET) &&
11263 (post.reg != REG_UNSET)) {
11264 internal_error(state, ins, "register conflict");
11266 info.regcm = pre.regcm & post.regcm;
11267 info.reg = pre.reg;
11268 if (info.reg == REG_UNSET) {
11269 info.reg = post.reg;
11271 #if DEBUG_TRIPLE_COLOR
11272 fprintf(stderr, "find_lhs_color(%p, %d) -> ( %d, %x) ... (%d, %x) (%d, %x)\n",
11273 ins, index, info.reg, info.regcm,
11274 pre.reg, pre.regcm, post.reg, post.regcm);
11279 static struct triple *post_copy(struct compile_state *state, struct triple *ins)
11281 struct triple_set *entry, *next;
11282 struct triple *out;
11283 struct reg_info info, rinfo;
11285 info = arch_reg_lhs(state, ins, 0);
11286 out = post_triple(state, ins, OP_COPY, ins->type, ins, 0);
11287 use_triple(RHS(out, 0), out);
11288 /* Get the users of ins to use out instead */
11289 for(entry = ins->use; entry; entry = next) {
11291 next = entry->next;
11292 if (entry->member == out) {
11295 i = find_rhs_use(state, entry->member, ins);
11299 rinfo = arch_reg_rhs(state, entry->member, i);
11300 if ((info.reg == REG_UNNEEDED) && (rinfo.reg == REG_UNNEEDED)) {
11303 replace_rhs_use(state, ins, out, entry->member);
11305 transform_to_arch_instruction(state, out);
11309 static struct triple *typed_pre_copy(
11310 struct compile_state *state, struct type *type, struct triple *ins, int index)
11312 /* Carefully insert enough operations so that I can
11313 * enter any operation with a GPR32.
11316 struct triple **expr;
11318 struct reg_info info;
11319 if (ins->op == OP_PHI) {
11320 internal_error(state, ins, "pre_copy on a phi?");
11322 classes = arch_type_to_regcm(state, type);
11323 info = arch_reg_rhs(state, ins, index);
11324 expr = &RHS(ins, index);
11325 if ((info.regcm & classes) == 0) {
11326 internal_error(state, ins, "pre_copy with no register classes");
11328 in = pre_triple(state, ins, OP_COPY, type, *expr, 0);
11329 unuse_triple(*expr, ins);
11331 use_triple(RHS(in, 0), in);
11332 use_triple(in, ins);
11333 transform_to_arch_instruction(state, in);
11337 static struct triple *pre_copy(
11338 struct compile_state *state, struct triple *ins, int index)
11340 return typed_pre_copy(state, RHS(ins, index)->type, ins, index);
11344 static void insert_copies_to_phi(struct compile_state *state)
11346 /* To get out of ssa form we insert moves on the incoming
11347 * edges to blocks containting phi functions.
11349 struct triple *first;
11350 struct triple *phi;
11352 /* Walk all of the operations to find the phi functions */
11353 first = RHS(state->main_function, 0);
11354 for(phi = first->next; phi != first ; phi = phi->next) {
11355 struct block_set *set;
11356 struct block *block;
11357 struct triple **slot, *copy;
11359 if (phi->op != OP_PHI) {
11362 phi->id |= TRIPLE_FLAG_POST_SPLIT;
11363 block = phi->u.block;
11364 slot = &RHS(phi, 0);
11365 /* Phi's that feed into mandatory live range joins
11366 * cause nasty complications. Insert a copy of
11367 * the phi value so I never have to deal with
11368 * that in the rest of the code.
11370 copy = post_copy(state, phi);
11371 copy->id |= TRIPLE_FLAG_PRE_SPLIT;
11372 /* Walk all of the incoming edges/blocks and insert moves.
11374 for(edge = 0, set = block->use; set; set = set->next, edge++) {
11375 struct block *eblock;
11376 struct triple *move;
11377 struct triple *val;
11378 struct triple *ptr;
11379 eblock = set->member;
11386 get_occurance(val->occurance);
11387 move = build_triple(state, OP_COPY, phi->type, val, 0,
11389 move->u.block = eblock;
11390 move->id |= TRIPLE_FLAG_PRE_SPLIT;
11391 use_triple(val, move);
11394 unuse_triple(val, phi);
11395 use_triple(move, phi);
11397 /* Walk up the dominator tree until I have found the appropriate block */
11398 while(eblock && !tdominates(state, val, eblock->last)) {
11399 eblock = eblock->idom;
11402 internal_error(state, phi, "Cannot find block dominated by %p",
11406 /* Walk through the block backwards to find
11407 * an appropriate location for the OP_COPY.
11409 for(ptr = eblock->last; ptr != eblock->first; ptr = ptr->prev) {
11410 struct triple **expr;
11411 if ((ptr == phi) || (ptr == val)) {
11414 expr = triple_rhs(state, ptr, 0);
11415 for(;expr; expr = triple_rhs(state, ptr, expr)) {
11416 if ((*expr) == phi) {
11422 if (triple_is_branch(state, ptr)) {
11423 internal_error(state, ptr,
11424 "Could not insert write to phi");
11426 insert_triple(state, ptr->next, move);
11427 if (eblock->last == ptr) {
11428 eblock->last = move;
11430 transform_to_arch_instruction(state, move);
11435 struct triple_reg_set {
11436 struct triple_reg_set *next;
11437 struct triple *member;
11438 struct triple *new;
11442 struct block *block;
11443 struct triple_reg_set *in;
11444 struct triple_reg_set *out;
11448 static int do_triple_set(struct triple_reg_set **head,
11449 struct triple *member, struct triple *new_member)
11451 struct triple_reg_set **ptr, *new;
11456 if ((*ptr)->member == member) {
11459 ptr = &(*ptr)->next;
11461 new = xcmalloc(sizeof(*new), "triple_set");
11462 new->member = member;
11463 new->new = new_member;
11469 static void do_triple_unset(struct triple_reg_set **head, struct triple *member)
11471 struct triple_reg_set *entry, **ptr;
11475 if (entry->member == member) {
11476 *ptr = entry->next;
11481 ptr = &entry->next;
11486 static int in_triple(struct reg_block *rb, struct triple *in)
11488 return do_triple_set(&rb->in, in, 0);
11490 static void unin_triple(struct reg_block *rb, struct triple *unin)
11492 do_triple_unset(&rb->in, unin);
11495 static int out_triple(struct reg_block *rb, struct triple *out)
11497 return do_triple_set(&rb->out, out, 0);
11499 static void unout_triple(struct reg_block *rb, struct triple *unout)
11501 do_triple_unset(&rb->out, unout);
11504 static int initialize_regblock(struct reg_block *blocks,
11505 struct block *block, int vertex)
11507 struct block_set *user;
11508 if (!block || (blocks[block->vertex].block == block)) {
11512 /* Renumber the blocks in a convinient fashion */
11513 block->vertex = vertex;
11514 blocks[vertex].block = block;
11515 blocks[vertex].vertex = vertex;
11516 for(user = block->use; user; user = user->next) {
11517 vertex = initialize_regblock(blocks, user->member, vertex);
11522 static int phi_in(struct compile_state *state, struct reg_block *blocks,
11523 struct reg_block *rb, struct block *suc)
11525 /* Read the conditional input set of a successor block
11526 * (i.e. the input to the phi nodes) and place it in the
11527 * current blocks output set.
11529 struct block_set *set;
11530 struct triple *ptr;
11534 /* Find the edge I am coming in on */
11535 for(edge = 0, set = suc->use; set; set = set->next, edge++) {
11536 if (set->member == rb->block) {
11541 internal_error(state, 0, "Not coming on a control edge?");
11543 for(done = 0, ptr = suc->first; !done; ptr = ptr->next) {
11544 struct triple **slot, *expr, *ptr2;
11545 int out_change, done2;
11546 done = (ptr == suc->last);
11547 if (ptr->op != OP_PHI) {
11550 slot = &RHS(ptr, 0);
11552 out_change = out_triple(rb, expr);
11556 /* If we don't define the variable also plast it
11557 * in the current blocks input set.
11559 ptr2 = rb->block->first;
11560 for(done2 = 0; !done2; ptr2 = ptr2->next) {
11561 if (ptr2 == expr) {
11564 done2 = (ptr2 == rb->block->last);
11569 change |= in_triple(rb, expr);
11574 static int reg_in(struct compile_state *state, struct reg_block *blocks,
11575 struct reg_block *rb, struct block *suc)
11577 struct triple_reg_set *in_set;
11580 /* Read the input set of a successor block
11581 * and place it in the current blocks output set.
11583 in_set = blocks[suc->vertex].in;
11584 for(; in_set; in_set = in_set->next) {
11585 int out_change, done;
11586 struct triple *first, *last, *ptr;
11587 out_change = out_triple(rb, in_set->member);
11591 /* If we don't define the variable also place it
11592 * in the current blocks input set.
11594 first = rb->block->first;
11595 last = rb->block->last;
11597 for(ptr = first; !done; ptr = ptr->next) {
11598 if (ptr == in_set->member) {
11601 done = (ptr == last);
11606 change |= in_triple(rb, in_set->member);
11608 change |= phi_in(state, blocks, rb, suc);
11613 static int use_in(struct compile_state *state, struct reg_block *rb)
11615 /* Find the variables we use but don't define and add
11616 * it to the current blocks input set.
11618 #warning "FIXME is this O(N^2) algorithm bad?"
11619 struct block *block;
11620 struct triple *ptr;
11625 for(done = 0, ptr = block->last; !done; ptr = ptr->prev) {
11626 struct triple **expr;
11627 done = (ptr == block->first);
11628 /* The variable a phi function uses depends on the
11629 * control flow, and is handled in phi_in, not
11632 if (ptr->op == OP_PHI) {
11635 expr = triple_rhs(state, ptr, 0);
11636 for(;expr; expr = triple_rhs(state, ptr, expr)) {
11637 struct triple *rhs, *test;
11643 /* See if rhs is defined in this block */
11644 for(tdone = 0, test = ptr; !tdone; test = test->prev) {
11645 tdone = (test == block->first);
11651 /* If I still have a valid rhs add it to in */
11652 change |= in_triple(rb, rhs);
11658 static struct reg_block *compute_variable_lifetimes(
11659 struct compile_state *state)
11661 struct reg_block *blocks;
11664 sizeof(*blocks)*(state->last_vertex + 1), "reg_block");
11665 initialize_regblock(blocks, state->last_block, 0);
11669 for(i = 1; i <= state->last_vertex; i++) {
11670 struct reg_block *rb;
11672 /* Add the left successor's input set to in */
11673 if (rb->block->left) {
11674 change |= reg_in(state, blocks, rb, rb->block->left);
11676 /* Add the right successor's input set to in */
11677 if ((rb->block->right) &&
11678 (rb->block->right != rb->block->left)) {
11679 change |= reg_in(state, blocks, rb, rb->block->right);
11681 /* Add use to in... */
11682 change |= use_in(state, rb);
11688 static void free_variable_lifetimes(
11689 struct compile_state *state, struct reg_block *blocks)
11692 /* free in_set && out_set on each block */
11693 for(i = 1; i <= state->last_vertex; i++) {
11694 struct triple_reg_set *entry, *next;
11695 struct reg_block *rb;
11697 for(entry = rb->in; entry ; entry = next) {
11698 next = entry->next;
11699 do_triple_unset(&rb->in, entry->member);
11701 for(entry = rb->out; entry; entry = next) {
11702 next = entry->next;
11703 do_triple_unset(&rb->out, entry->member);
11710 typedef void (*wvl_cb_t)(
11711 struct compile_state *state,
11712 struct reg_block *blocks, struct triple_reg_set *live,
11713 struct reg_block *rb, struct triple *ins, void *arg);
11715 static void walk_variable_lifetimes(struct compile_state *state,
11716 struct reg_block *blocks, wvl_cb_t cb, void *arg)
11720 for(i = 1; i <= state->last_vertex; i++) {
11721 struct triple_reg_set *live;
11722 struct triple_reg_set *entry, *next;
11723 struct triple *ptr, *prev;
11724 struct reg_block *rb;
11725 struct block *block;
11728 /* Get the blocks */
11732 /* Copy out into live */
11734 for(entry = rb->out; entry; entry = next) {
11735 next = entry->next;
11736 do_triple_set(&live, entry->member, entry->new);
11738 /* Walk through the basic block calculating live */
11739 for(done = 0, ptr = block->last; !done; ptr = prev) {
11740 struct triple **expr;
11743 done = (ptr == block->first);
11745 /* Ensure the current definition is in live */
11746 if (triple_is_def(state, ptr)) {
11747 do_triple_set(&live, ptr, 0);
11750 /* Inform the callback function of what is
11753 cb(state, blocks, live, rb, ptr, arg);
11755 /* Remove the current definition from live */
11756 do_triple_unset(&live, ptr);
11758 /* Add the current uses to live.
11760 * It is safe to skip phi functions because they do
11761 * not have any block local uses, and the block
11762 * output sets already properly account for what
11763 * control flow depedent uses phi functions do have.
11765 if (ptr->op == OP_PHI) {
11768 expr = triple_rhs(state, ptr, 0);
11769 for(;expr; expr = triple_rhs(state, ptr, expr)) {
11770 /* If the triple is not a definition skip it. */
11771 if (!*expr || !triple_is_def(state, *expr)) {
11774 do_triple_set(&live, *expr, 0);
11778 for(entry = live; entry; entry = next) {
11779 next = entry->next;
11780 do_triple_unset(&live, entry->member);
11785 static int count_triples(struct compile_state *state)
11787 struct triple *first, *ins;
11789 first = RHS(state->main_function, 0);
11794 } while (ins != first);
11799 struct dead_triple {
11800 struct triple *triple;
11801 struct dead_triple *work_next;
11802 struct block *block;
11805 #define TRIPLE_FLAG_ALIVE 1
11809 static void awaken(
11810 struct compile_state *state,
11811 struct dead_triple *dtriple, struct triple **expr,
11812 struct dead_triple ***work_list_tail)
11814 struct triple *triple;
11815 struct dead_triple *dt;
11823 if (triple->id <= 0) {
11824 internal_error(state, triple, "bad triple id: %d",
11827 if (triple->op == OP_NOOP) {
11828 internal_warning(state, triple, "awakening noop?");
11831 dt = &dtriple[triple->id];
11832 if (!(dt->flags & TRIPLE_FLAG_ALIVE)) {
11833 dt->flags |= TRIPLE_FLAG_ALIVE;
11834 if (!dt->work_next) {
11835 **work_list_tail = dt;
11836 *work_list_tail = &dt->work_next;
11841 static void eliminate_inefectual_code(struct compile_state *state)
11843 struct block *block;
11844 struct dead_triple *dtriple, *work_list, **work_list_tail, *dt;
11846 struct triple *first, *ins;
11848 /* Setup the work list */
11850 work_list_tail = &work_list;
11852 first = RHS(state->main_function, 0);
11854 /* Count how many triples I have */
11855 triples = count_triples(state);
11857 /* Now put then in an array and mark all of the triples dead */
11858 dtriple = xcmalloc(sizeof(*dtriple) * (triples + 1), "dtriples");
11864 if (ins->op == OP_LABEL) {
11865 block = ins->u.block;
11867 dtriple[i].triple = ins;
11868 dtriple[i].block = block;
11869 dtriple[i].flags = 0;
11870 dtriple[i].color = ins->id;
11872 /* See if it is an operation we always keep */
11873 #warning "FIXME handle the case of killing a branch instruction"
11874 if (!triple_is_pure(state, ins) || triple_is_branch(state, ins)) {
11875 awaken(state, dtriple, &ins, &work_list_tail);
11878 /* Unconditionally keep the very last instruction */
11879 else if (ins->next == first) {
11880 awaken(state, dtriple, &ins, &work_list_tail);
11885 } while(ins != first);
11887 struct dead_triple *dt;
11888 struct block_set *user;
11889 struct triple **expr;
11891 work_list = dt->work_next;
11893 work_list_tail = &work_list;
11895 /* Wake up the data depencencies of this triple */
11898 expr = triple_rhs(state, dt->triple, expr);
11899 awaken(state, dtriple, expr, &work_list_tail);
11902 expr = triple_lhs(state, dt->triple, expr);
11903 awaken(state, dtriple, expr, &work_list_tail);
11906 expr = triple_misc(state, dt->triple, expr);
11907 awaken(state, dtriple, expr, &work_list_tail);
11909 /* Wake up the forward control dependencies */
11911 expr = triple_targ(state, dt->triple, expr);
11912 awaken(state, dtriple, expr, &work_list_tail);
11914 /* Wake up the reverse control dependencies of this triple */
11915 for(user = dt->block->ipdomfrontier; user; user = user->next) {
11916 awaken(state, dtriple, &user->member->last, &work_list_tail);
11919 for(dt = &dtriple[1]; dt <= &dtriple[triples]; dt++) {
11920 if ((dt->triple->op == OP_NOOP) &&
11921 (dt->flags & TRIPLE_FLAG_ALIVE)) {
11922 internal_error(state, dt->triple, "noop effective?");
11924 dt->triple->id = dt->color; /* Restore the color */
11925 if (!(dt->flags & TRIPLE_FLAG_ALIVE)) {
11926 #warning "FIXME handle the case of killing a basic block"
11927 if (dt->block->first == dt->triple) {
11930 if (dt->block->last == dt->triple) {
11931 dt->block->last = dt->triple->prev;
11933 release_triple(state, dt->triple);
11940 static void insert_mandatory_copies(struct compile_state *state)
11942 struct triple *ins, *first;
11944 /* The object is with a minimum of inserted copies,
11945 * to resolve in fundamental register conflicts between
11946 * register value producers and consumers.
11947 * Theoretically we may be greater than minimal when we
11948 * are inserting copies before instructions but that
11949 * case should be rare.
11951 first = RHS(state->main_function, 0);
11954 struct triple_set *entry, *next;
11955 struct triple *tmp;
11956 struct reg_info info;
11957 unsigned reg, regcm;
11958 int do_post_copy, do_pre_copy;
11960 if (!triple_is_def(state, ins)) {
11963 /* Find the architecture specific color information */
11964 info = arch_reg_lhs(state, ins, 0);
11965 if (info.reg >= MAX_REGISTERS) {
11966 info.reg = REG_UNSET;
11970 regcm = arch_type_to_regcm(state, ins->type);
11971 do_post_copy = do_pre_copy = 0;
11973 /* Walk through the uses of ins and check for conflicts */
11974 for(entry = ins->use; entry; entry = next) {
11975 struct reg_info rinfo;
11977 next = entry->next;
11978 i = find_rhs_use(state, entry->member, ins);
11983 /* Find the users color requirements */
11984 rinfo = arch_reg_rhs(state, entry->member, i);
11985 if (rinfo.reg >= MAX_REGISTERS) {
11986 rinfo.reg = REG_UNSET;
11989 /* See if I need a pre_copy */
11990 if (rinfo.reg != REG_UNSET) {
11991 if ((reg != REG_UNSET) && (reg != rinfo.reg)) {
11996 regcm &= rinfo.regcm;
11997 regcm = arch_regcm_normalize(state, regcm);
12001 /* Always use pre_copies for constants.
12002 * They do not take up any registers until a
12003 * copy places them in one.
12005 if ((info.reg == REG_UNNEEDED) &&
12006 (rinfo.reg != REG_UNNEEDED)) {
12012 (((info.reg != REG_UNSET) &&
12013 (reg != REG_UNSET) &&
12014 (info.reg != reg)) ||
12015 ((info.regcm & regcm) == 0));
12018 regcm = info.regcm;
12019 /* Walk through the uses of ins and do a pre_copy or see if a post_copy is warranted */
12020 for(entry = ins->use; entry; entry = next) {
12021 struct reg_info rinfo;
12023 next = entry->next;
12024 i = find_rhs_use(state, entry->member, ins);
12029 /* Find the users color requirements */
12030 rinfo = arch_reg_rhs(state, entry->member, i);
12031 if (rinfo.reg >= MAX_REGISTERS) {
12032 rinfo.reg = REG_UNSET;
12035 /* Now see if it is time to do the pre_copy */
12036 if (rinfo.reg != REG_UNSET) {
12037 if (((reg != REG_UNSET) && (reg != rinfo.reg)) ||
12038 ((regcm & rinfo.regcm) == 0) ||
12039 /* Don't let a mandatory coalesce sneak
12040 * into a operation that is marked to prevent
12043 ((reg != REG_UNNEEDED) &&
12044 ((ins->id & TRIPLE_FLAG_POST_SPLIT) ||
12045 (entry->member->id & TRIPLE_FLAG_PRE_SPLIT)))
12048 struct triple *user;
12049 user = entry->member;
12050 if (RHS(user, i) != ins) {
12051 internal_error(state, user, "bad rhs");
12053 tmp = pre_copy(state, user, i);
12054 tmp->id |= TRIPLE_FLAG_PRE_SPLIT;
12062 if ((regcm & rinfo.regcm) == 0) {
12064 struct triple *user;
12065 user = entry->member;
12066 if (RHS(user, i) != ins) {
12067 internal_error(state, user, "bad rhs");
12069 tmp = pre_copy(state, user, i);
12070 tmp->id |= TRIPLE_FLAG_PRE_SPLIT;
12076 regcm &= rinfo.regcm;
12079 if (do_post_copy) {
12080 struct reg_info pre, post;
12081 tmp = post_copy(state, ins);
12082 tmp->id |= TRIPLE_FLAG_PRE_SPLIT;
12083 pre = arch_reg_lhs(state, ins, 0);
12084 post = arch_reg_lhs(state, tmp, 0);
12085 if ((pre.reg == post.reg) && (pre.regcm == post.regcm)) {
12086 internal_error(state, tmp, "useless copy");
12091 } while(ins != first);
12095 struct live_range_edge;
12096 struct live_range_def;
12097 struct live_range {
12098 struct live_range_edge *edges;
12099 struct live_range_def *defs;
12100 /* Note. The list pointed to by defs is kept in order.
12101 * That is baring splits in the flow control
12102 * defs dominates defs->next wich dominates defs->next->next
12109 struct live_range *group_next, **group_prev;
12112 struct live_range_edge {
12113 struct live_range_edge *next;
12114 struct live_range *node;
12117 struct live_range_def {
12118 struct live_range_def *next;
12119 struct live_range_def *prev;
12120 struct live_range *lr;
12121 struct triple *def;
12125 #define LRE_HASH_SIZE 2048
12127 struct lre_hash *next;
12128 struct live_range *left;
12129 struct live_range *right;
12134 struct lre_hash *hash[LRE_HASH_SIZE];
12135 struct reg_block *blocks;
12136 struct live_range_def *lrd;
12137 struct live_range *lr;
12138 struct live_range *low, **low_tail;
12139 struct live_range *high, **high_tail;
12142 int passes, max_passes;
12143 #define MAX_ALLOCATION_PASSES 100
12148 struct print_interference_block_info {
12149 struct reg_state *rstate;
12153 static void print_interference_block(
12154 struct compile_state *state, struct block *block, void *arg)
12157 struct print_interference_block_info *info = arg;
12158 struct reg_state *rstate = info->rstate;
12159 FILE *fp = info->fp;
12160 struct reg_block *rb;
12161 struct triple *ptr;
12164 rb = &rstate->blocks[block->vertex];
12166 fprintf(fp, "\nblock: %p (%d), %p<-%p %p<-%p\n",
12170 block->left && block->left->use?block->left->use->member : 0,
12172 block->right && block->right->use?block->right->use->member : 0);
12174 struct triple_reg_set *in_set;
12175 fprintf(fp, " in:");
12176 for(in_set = rb->in; in_set; in_set = in_set->next) {
12177 fprintf(fp, " %-10p", in_set->member);
12182 for(done = 0, ptr = block->first; !done; ptr = ptr->next) {
12183 done = (ptr == block->last);
12184 if (ptr->op == OP_PHI) {
12191 for(edge = 0; edge < block->users; edge++) {
12192 fprintf(fp, " in(%d):", edge);
12193 for(done = 0, ptr = block->first; !done; ptr = ptr->next) {
12194 struct triple **slot;
12195 done = (ptr == block->last);
12196 if (ptr->op != OP_PHI) {
12199 slot = &RHS(ptr, 0);
12200 fprintf(fp, " %-10p", slot[edge]);
12205 if (block->first->op == OP_LABEL) {
12206 fprintf(fp, "%p:\n", block->first);
12208 for(done = 0, ptr = block->first; !done; ptr = ptr->next) {
12209 struct live_range *lr;
12213 done = (ptr == block->last);
12214 lr = rstate->lrd[ptr->id].lr;
12217 ptr->id = rstate->lrd[id].orig_id;
12218 SET_REG(ptr->id, lr->color);
12219 display_triple(fp, ptr);
12222 if (triple_is_def(state, ptr) && (lr->defs == 0)) {
12223 internal_error(state, ptr, "lr has no defs!");
12225 if (info->need_edges) {
12227 struct live_range_def *lrd;
12228 fprintf(fp, " range:");
12231 fprintf(fp, " %-10p", lrd->def);
12233 } while(lrd != lr->defs);
12236 if (lr->edges > 0) {
12237 struct live_range_edge *edge;
12238 fprintf(fp, " edges:");
12239 for(edge = lr->edges; edge; edge = edge->next) {
12240 struct live_range_def *lrd;
12241 lrd = edge->node->defs;
12243 fprintf(fp, " %-10p", lrd->def);
12245 } while(lrd != edge->node->defs);
12251 /* Do a bunch of sanity checks */
12252 valid_ins(state, ptr);
12253 if ((ptr->id < 0) || (ptr->id > rstate->defs)) {
12254 internal_error(state, ptr, "Invalid triple id: %d",
12259 struct triple_reg_set *out_set;
12260 fprintf(fp, " out:");
12261 for(out_set = rb->out; out_set; out_set = out_set->next) {
12262 fprintf(fp, " %-10p", out_set->member);
12269 static void print_interference_blocks(
12270 struct compile_state *state, struct reg_state *rstate, FILE *fp, int need_edges)
12272 struct print_interference_block_info info;
12273 info.rstate = rstate;
12275 info.need_edges = need_edges;
12276 fprintf(fp, "\nlive variables by block\n");
12277 walk_blocks(state, print_interference_block, &info);
12281 static unsigned regc_max_size(struct compile_state *state, int classes)
12286 for(i = 0; i < MAX_REGC; i++) {
12287 if (classes & (1 << i)) {
12289 size = arch_regc_size(state, i);
12290 if (size > max_size) {
12298 static int reg_is_reg(struct compile_state *state, int reg1, int reg2)
12300 unsigned equivs[MAX_REG_EQUIVS];
12302 if ((reg1 < 0) || (reg1 >= MAX_REGISTERS)) {
12303 internal_error(state, 0, "invalid register");
12305 if ((reg2 < 0) || (reg2 >= MAX_REGISTERS)) {
12306 internal_error(state, 0, "invalid register");
12308 arch_reg_equivs(state, equivs, reg1);
12309 for(i = 0; (i < MAX_REG_EQUIVS) && equivs[i] != REG_UNSET; i++) {
12310 if (equivs[i] == reg2) {
12317 static void reg_fill_used(struct compile_state *state, char *used, int reg)
12319 unsigned equivs[MAX_REG_EQUIVS];
12321 if (reg == REG_UNNEEDED) {
12324 arch_reg_equivs(state, equivs, reg);
12325 for(i = 0; (i < MAX_REG_EQUIVS) && equivs[i] != REG_UNSET; i++) {
12326 used[equivs[i]] = 1;
12331 static void reg_inc_used(struct compile_state *state, char *used, int reg)
12333 unsigned equivs[MAX_REG_EQUIVS];
12335 if (reg == REG_UNNEEDED) {
12338 arch_reg_equivs(state, equivs, reg);
12339 for(i = 0; (i < MAX_REG_EQUIVS) && equivs[i] != REG_UNSET; i++) {
12340 used[equivs[i]] += 1;
12345 static unsigned int hash_live_edge(
12346 struct live_range *left, struct live_range *right)
12348 unsigned int hash, val;
12349 unsigned long lval, rval;
12350 lval = ((unsigned long)left)/sizeof(struct live_range);
12351 rval = ((unsigned long)right)/sizeof(struct live_range);
12356 hash = (hash *263) + val;
12361 hash = (hash *263) + val;
12363 hash = hash & (LRE_HASH_SIZE - 1);
12367 static struct lre_hash **lre_probe(struct reg_state *rstate,
12368 struct live_range *left, struct live_range *right)
12370 struct lre_hash **ptr;
12371 unsigned int index;
12372 /* Ensure left <= right */
12373 if (left > right) {
12374 struct live_range *tmp;
12379 index = hash_live_edge(left, right);
12381 ptr = &rstate->hash[index];
12383 if (((*ptr)->left == left) && ((*ptr)->right == right)) {
12386 ptr = &(*ptr)->next;
12391 static int interfere(struct reg_state *rstate,
12392 struct live_range *left, struct live_range *right)
12394 struct lre_hash **ptr;
12395 ptr = lre_probe(rstate, left, right);
12396 return ptr && *ptr;
12399 static void add_live_edge(struct reg_state *rstate,
12400 struct live_range *left, struct live_range *right)
12402 /* FIXME the memory allocation overhead is noticeable here... */
12403 struct lre_hash **ptr, *new_hash;
12404 struct live_range_edge *edge;
12406 if (left == right) {
12409 if ((left == &rstate->lr[0]) || (right == &rstate->lr[0])) {
12412 /* Ensure left <= right */
12413 if (left > right) {
12414 struct live_range *tmp;
12419 ptr = lre_probe(rstate, left, right);
12424 fprintf(stderr, "new_live_edge(%p, %p)\n",
12427 new_hash = xmalloc(sizeof(*new_hash), "lre_hash");
12428 new_hash->next = *ptr;
12429 new_hash->left = left;
12430 new_hash->right = right;
12433 edge = xmalloc(sizeof(*edge), "live_range_edge");
12434 edge->next = left->edges;
12435 edge->node = right;
12436 left->edges = edge;
12439 edge = xmalloc(sizeof(*edge), "live_range_edge");
12440 edge->next = right->edges;
12442 right->edges = edge;
12443 right->degree += 1;
12446 static void remove_live_edge(struct reg_state *rstate,
12447 struct live_range *left, struct live_range *right)
12449 struct live_range_edge *edge, **ptr;
12450 struct lre_hash **hptr, *entry;
12451 hptr = lre_probe(rstate, left, right);
12452 if (!hptr || !*hptr) {
12456 *hptr = entry->next;
12459 for(ptr = &left->edges; *ptr; ptr = &(*ptr)->next) {
12461 if (edge->node == right) {
12463 memset(edge, 0, sizeof(*edge));
12469 for(ptr = &right->edges; *ptr; ptr = &(*ptr)->next) {
12471 if (edge->node == left) {
12473 memset(edge, 0, sizeof(*edge));
12481 static void remove_live_edges(struct reg_state *rstate, struct live_range *range)
12483 struct live_range_edge *edge, *next;
12484 for(edge = range->edges; edge; edge = next) {
12486 remove_live_edge(rstate, range, edge->node);
12490 static void transfer_live_edges(struct reg_state *rstate,
12491 struct live_range *dest, struct live_range *src)
12493 struct live_range_edge *edge, *next;
12494 for(edge = src->edges; edge; edge = next) {
12495 struct live_range *other;
12497 other = edge->node;
12498 remove_live_edge(rstate, src, other);
12499 add_live_edge(rstate, dest, other);
12504 /* Interference graph...
12506 * new(n) --- Return a graph with n nodes but no edges.
12507 * add(g,x,y) --- Return a graph including g with an between x and y
12508 * interfere(g, x, y) --- Return true if there exists an edge between the nodes
12509 * x and y in the graph g
12510 * degree(g, x) --- Return the degree of the node x in the graph g
12511 * neighbors(g, x, f) --- Apply function f to each neighbor of node x in the graph g
12513 * Implement with a hash table && a set of adjcency vectors.
12514 * The hash table supports constant time implementations of add and interfere.
12515 * The adjacency vectors support an efficient implementation of neighbors.
12519 * +---------------------------------------------------+
12520 * | +--------------+ |
12522 * renumber -> build graph -> colalesce -> spill_costs -> simplify -> select
12524 * -- In simplify implment optimistic coloring... (No backtracking)
12525 * -- Implement Rematerialization it is the only form of spilling we can perform
12526 * Essentially this means dropping a constant from a register because
12527 * we can regenerate it later.
12529 * --- Very conservative colalescing (don't colalesce just mark the opportunities)
12530 * coalesce at phi points...
12531 * --- Bias coloring if at all possible do the coalesing a compile time.
12536 static void different_colored(
12537 struct compile_state *state, struct reg_state *rstate,
12538 struct triple *parent, struct triple *ins)
12540 struct live_range *lr;
12541 struct triple **expr;
12542 lr = rstate->lrd[ins->id].lr;
12543 expr = triple_rhs(state, ins, 0);
12544 for(;expr; expr = triple_rhs(state, ins, expr)) {
12545 struct live_range *lr2;
12546 if (!*expr || (*expr == parent) || (*expr == ins)) {
12549 lr2 = rstate->lrd[(*expr)->id].lr;
12550 if (lr->color == lr2->color) {
12551 internal_error(state, ins, "live range too big");
12557 static struct live_range *coalesce_ranges(
12558 struct compile_state *state, struct reg_state *rstate,
12559 struct live_range *lr1, struct live_range *lr2)
12561 struct live_range_def *head, *mid1, *mid2, *end, *lrd;
12567 if (!lr1->defs || !lr2->defs) {
12568 internal_error(state, 0,
12569 "cannot coalese dead live ranges");
12571 if ((lr1->color == REG_UNNEEDED) ||
12572 (lr2->color == REG_UNNEEDED)) {
12573 internal_error(state, 0,
12574 "cannot coalesce live ranges without a possible color");
12576 if ((lr1->color != lr2->color) &&
12577 (lr1->color != REG_UNSET) &&
12578 (lr2->color != REG_UNSET)) {
12579 internal_error(state, lr1->defs->def,
12580 "cannot coalesce live ranges of different colors");
12582 color = lr1->color;
12583 if (color == REG_UNSET) {
12584 color = lr2->color;
12586 classes = lr1->classes & lr2->classes;
12588 internal_error(state, lr1->defs->def,
12589 "cannot coalesce live ranges with dissimilar register classes");
12591 #if DEBUG_COALESCING
12592 fprintf(stderr, "coalescing:");
12595 fprintf(stderr, " %p", lrd->def);
12597 } while(lrd != lr1->defs);
12598 fprintf(stderr, " |");
12601 fprintf(stderr, " %p", lrd->def);
12603 } while(lrd != lr2->defs);
12604 fprintf(stderr, "\n");
12606 /* If there is a clear dominate live range put it in lr1,
12607 * For purposes of this test phi functions are
12608 * considered dominated by the definitions that feed into
12611 if ((lr1->defs->prev->def->op == OP_PHI) ||
12612 ((lr2->defs->prev->def->op != OP_PHI) &&
12613 tdominates(state, lr2->defs->def, lr1->defs->def))) {
12614 struct live_range *tmp;
12620 if (lr1->defs->orig_id & TRIPLE_FLAG_POST_SPLIT) {
12621 fprintf(stderr, "lr1 post\n");
12623 if (lr1->defs->orig_id & TRIPLE_FLAG_PRE_SPLIT) {
12624 fprintf(stderr, "lr1 pre\n");
12626 if (lr2->defs->orig_id & TRIPLE_FLAG_POST_SPLIT) {
12627 fprintf(stderr, "lr2 post\n");
12629 if (lr2->defs->orig_id & TRIPLE_FLAG_PRE_SPLIT) {
12630 fprintf(stderr, "lr2 pre\n");
12634 fprintf(stderr, "coalesce color1(%p): %3d color2(%p) %3d\n",
12641 /* Append lr2 onto lr1 */
12642 #warning "FIXME should this be a merge instead of a splice?"
12643 /* This FIXME item applies to the correctness of live_range_end
12644 * and to the necessity of making multiple passes of coalesce_live_ranges.
12645 * A failure to find some coalesce opportunities in coaleace_live_ranges
12646 * does not impact the correct of the compiler just the efficiency with
12647 * which registers are allocated.
12650 mid1 = lr1->defs->prev;
12652 end = lr2->defs->prev;
12660 /* Fixup the live range in the added live range defs */
12665 } while(lrd != head);
12667 /* Mark lr2 as free. */
12669 lr2->color = REG_UNNEEDED;
12673 internal_error(state, 0, "lr1->defs == 0 ?");
12676 lr1->color = color;
12677 lr1->classes = classes;
12679 /* Keep the graph in sync by transfering the edges from lr2 to lr1 */
12680 transfer_live_edges(rstate, lr1, lr2);
12685 static struct live_range_def *live_range_head(
12686 struct compile_state *state, struct live_range *lr,
12687 struct live_range_def *last)
12689 struct live_range_def *result;
12694 else if (!tdominates(state, lr->defs->def, last->next->def)) {
12695 result = last->next;
12700 static struct live_range_def *live_range_end(
12701 struct compile_state *state, struct live_range *lr,
12702 struct live_range_def *last)
12704 struct live_range_def *result;
12707 result = lr->defs->prev;
12709 else if (!tdominates(state, last->prev->def, lr->defs->prev->def)) {
12710 result = last->prev;
12716 static void initialize_live_ranges(
12717 struct compile_state *state, struct reg_state *rstate)
12719 struct triple *ins, *first;
12720 size_t count, size;
12723 first = RHS(state->main_function, 0);
12724 /* First count how many instructions I have.
12726 count = count_triples(state);
12727 /* Potentially I need one live range definitions for each
12730 rstate->defs = count;
12731 /* Potentially I need one live range for each instruction
12732 * plus an extra for the dummy live range.
12734 rstate->ranges = count + 1;
12735 size = sizeof(rstate->lrd[0]) * rstate->defs;
12736 rstate->lrd = xcmalloc(size, "live_range_def");
12737 size = sizeof(rstate->lr[0]) * rstate->ranges;
12738 rstate->lr = xcmalloc(size, "live_range");
12740 /* Setup the dummy live range */
12741 rstate->lr[0].classes = 0;
12742 rstate->lr[0].color = REG_UNSET;
12743 rstate->lr[0].defs = 0;
12747 /* If the triple is a variable give it a live range */
12748 if (triple_is_def(state, ins)) {
12749 struct reg_info info;
12750 /* Find the architecture specific color information */
12751 info = find_def_color(state, ins);
12753 rstate->lr[i].defs = &rstate->lrd[j];
12754 rstate->lr[i].color = info.reg;
12755 rstate->lr[i].classes = info.regcm;
12756 rstate->lr[i].degree = 0;
12757 rstate->lrd[j].lr = &rstate->lr[i];
12759 /* Otherwise give the triple the dummy live range. */
12761 rstate->lrd[j].lr = &rstate->lr[0];
12764 /* Initalize the live_range_def */
12765 rstate->lrd[j].next = &rstate->lrd[j];
12766 rstate->lrd[j].prev = &rstate->lrd[j];
12767 rstate->lrd[j].def = ins;
12768 rstate->lrd[j].orig_id = ins->id;
12773 } while(ins != first);
12774 rstate->ranges = i;
12776 /* Make a second pass to handle achitecture specific register
12781 int zlhs, zrhs, i, j;
12782 if (ins->id > rstate->defs) {
12783 internal_error(state, ins, "bad id");
12786 /* Walk through the template of ins and coalesce live ranges */
12787 zlhs = TRIPLE_LHS(ins->sizes);
12788 if ((zlhs == 0) && triple_is_def(state, ins)) {
12791 zrhs = TRIPLE_RHS(ins->sizes);
12793 #if DEBUG_COALESCING > 1
12794 fprintf(stderr, "mandatory coalesce: %p %d %d\n",
12797 for(i = 0; i < zlhs; i++) {
12798 struct reg_info linfo;
12799 struct live_range_def *lhs;
12800 linfo = arch_reg_lhs(state, ins, i);
12801 if (linfo.reg < MAX_REGISTERS) {
12804 if (triple_is_def(state, ins)) {
12805 lhs = &rstate->lrd[ins->id];
12807 lhs = &rstate->lrd[LHS(ins, i)->id];
12809 #if DEBUG_COALESCING > 1
12810 fprintf(stderr, "coalesce lhs(%d): %p %d\n",
12811 i, lhs, linfo.reg);
12814 for(j = 0; j < zrhs; j++) {
12815 struct reg_info rinfo;
12816 struct live_range_def *rhs;
12817 rinfo = arch_reg_rhs(state, ins, j);
12818 if (rinfo.reg < MAX_REGISTERS) {
12821 rhs = &rstate->lrd[RHS(ins, j)->id];
12822 #if DEBUG_COALESCING > 1
12823 fprintf(stderr, "coalesce rhs(%d): %p %d\n",
12824 j, rhs, rinfo.reg);
12827 if (rinfo.reg == linfo.reg) {
12828 coalesce_ranges(state, rstate,
12834 } while(ins != first);
12837 static void graph_ins(
12838 struct compile_state *state,
12839 struct reg_block *blocks, struct triple_reg_set *live,
12840 struct reg_block *rb, struct triple *ins, void *arg)
12842 struct reg_state *rstate = arg;
12843 struct live_range *def;
12844 struct triple_reg_set *entry;
12846 /* If the triple is not a definition
12847 * we do not have a definition to add to
12848 * the interference graph.
12850 if (!triple_is_def(state, ins)) {
12853 def = rstate->lrd[ins->id].lr;
12855 /* Create an edge between ins and everything that is
12856 * alive, unless the live_range cannot share
12857 * a physical register with ins.
12859 for(entry = live; entry; entry = entry->next) {
12860 struct live_range *lr;
12861 if ((entry->member->id < 0) || (entry->member->id > rstate->defs)) {
12862 internal_error(state, 0, "bad entry?");
12864 lr = rstate->lrd[entry->member->id].lr;
12868 if (!arch_regcm_intersect(def->classes, lr->classes)) {
12871 add_live_edge(rstate, def, lr);
12876 static struct live_range *get_verify_live_range(
12877 struct compile_state *state, struct reg_state *rstate, struct triple *ins)
12879 struct live_range *lr;
12880 struct live_range_def *lrd;
12882 if ((ins->id < 0) || (ins->id > rstate->defs)) {
12883 internal_error(state, ins, "bad ins?");
12885 lr = rstate->lrd[ins->id].lr;
12889 if (lrd->def == ins) {
12893 } while(lrd != lr->defs);
12895 internal_error(state, ins, "ins not in live range");
12900 static void verify_graph_ins(
12901 struct compile_state *state,
12902 struct reg_block *blocks, struct triple_reg_set *live,
12903 struct reg_block *rb, struct triple *ins, void *arg)
12905 struct reg_state *rstate = arg;
12906 struct triple_reg_set *entry1, *entry2;
12909 /* Compare live against edges and make certain the code is working */
12910 for(entry1 = live; entry1; entry1 = entry1->next) {
12911 struct live_range *lr1;
12912 lr1 = get_verify_live_range(state, rstate, entry1->member);
12913 for(entry2 = live; entry2; entry2 = entry2->next) {
12914 struct live_range *lr2;
12915 struct live_range_edge *edge2;
12918 if (entry2 == entry1) {
12921 lr2 = get_verify_live_range(state, rstate, entry2->member);
12923 internal_error(state, entry2->member,
12924 "live range with 2 values simultaneously alive");
12926 if (!arch_regcm_intersect(lr1->classes, lr2->classes)) {
12929 if (!interfere(rstate, lr1, lr2)) {
12930 internal_error(state, entry2->member,
12931 "edges don't interfere?");
12936 for(edge2 = lr2->edges; edge2; edge2 = edge2->next) {
12938 if (edge2->node == lr1) {
12942 if (lr2_degree != lr2->degree) {
12943 internal_error(state, entry2->member,
12944 "computed degree: %d does not match reported degree: %d\n",
12945 lr2_degree, lr2->degree);
12948 internal_error(state, entry2->member, "missing edge");
12955 #if DEBUG_CONSISTENCY > 1
12956 static void verify_interference_graph(
12957 struct compile_state *state, struct reg_state *rstate)
12960 fprintf(stderr, "verify_interference_graph...\n");
12963 walk_variable_lifetimes(state, rstate->blocks, verify_graph_ins, rstate);
12965 fprintf(stderr, "verify_interference_graph done\n");
12969 static inline void verify_interference_graph(
12970 struct compile_state *state, struct reg_state *rstate) {}
12973 static void print_interference_ins(
12974 struct compile_state *state,
12975 struct reg_block *blocks, struct triple_reg_set *live,
12976 struct reg_block *rb, struct triple *ins, void *arg)
12978 struct reg_state *rstate = arg;
12979 struct live_range *lr;
12982 lr = rstate->lrd[ins->id].lr;
12984 ins->id = rstate->lrd[id].orig_id;
12985 SET_REG(ins->id, lr->color);
12986 display_triple(stdout, ins);
12990 struct live_range_def *lrd;
12994 printf(" %-10p", lrd->def);
12996 } while(lrd != lr->defs);
13000 struct triple_reg_set *entry;
13002 for(entry = live; entry; entry = entry->next) {
13003 printf(" %-10p", entry->member);
13008 struct live_range_edge *entry;
13010 for(entry = lr->edges; entry; entry = entry->next) {
13011 struct live_range_def *lrd;
13012 lrd = entry->node->defs;
13014 printf(" %-10p", lrd->def);
13016 } while(lrd != entry->node->defs);
13021 if (triple_is_branch(state, ins)) {
13027 static int coalesce_live_ranges(
13028 struct compile_state *state, struct reg_state *rstate)
13030 /* At the point where a value is moved from one
13031 * register to another that value requires two
13032 * registers, thus increasing register pressure.
13033 * Live range coaleescing reduces the register
13034 * pressure by keeping a value in one register
13037 * In the case of a phi function all paths leading
13038 * into it must be allocated to the same register
13039 * otherwise the phi function may not be removed.
13041 * Forcing a value to stay in a single register
13042 * for an extended period of time does have
13043 * limitations when applied to non homogenous
13046 * The two cases I have identified are:
13047 * 1) Two forced register assignments may
13049 * 2) Registers may go unused because they
13050 * are only good for storing the value
13051 * and not manipulating it.
13053 * Because of this I need to split live ranges,
13054 * even outside of the context of coalesced live
13055 * ranges. The need to split live ranges does
13056 * impose some constraints on live range coalescing.
13058 * - Live ranges may not be coalesced across phi
13059 * functions. This creates a 2 headed live
13060 * range that cannot be sanely split.
13062 * - phi functions (coalesced in initialize_live_ranges)
13063 * are handled as pre split live ranges so we will
13064 * never attempt to split them.
13070 for(i = 0; i <= rstate->ranges; i++) {
13071 struct live_range *lr1;
13072 struct live_range_def *lrd1;
13073 lr1 = &rstate->lr[i];
13077 lrd1 = live_range_end(state, lr1, 0);
13078 for(; lrd1; lrd1 = live_range_end(state, lr1, lrd1)) {
13079 struct triple_set *set;
13080 if (lrd1->def->op != OP_COPY) {
13083 /* Skip copies that are the result of a live range split. */
13084 if (lrd1->orig_id & TRIPLE_FLAG_POST_SPLIT) {
13087 for(set = lrd1->def->use; set; set = set->next) {
13088 struct live_range_def *lrd2;
13089 struct live_range *lr2, *res;
13091 lrd2 = &rstate->lrd[set->member->id];
13093 /* Don't coalesce with instructions
13094 * that are the result of a live range
13097 if (lrd2->orig_id & TRIPLE_FLAG_PRE_SPLIT) {
13100 lr2 = rstate->lrd[set->member->id].lr;
13104 if ((lr1->color != lr2->color) &&
13105 (lr1->color != REG_UNSET) &&
13106 (lr2->color != REG_UNSET)) {
13109 if ((lr1->classes & lr2->classes) == 0) {
13113 if (interfere(rstate, lr1, lr2)) {
13117 res = coalesce_ranges(state, rstate, lr1, lr2);
13131 static void fix_coalesce_conflicts(struct compile_state *state,
13132 struct reg_block *blocks, struct triple_reg_set *live,
13133 struct reg_block *rb, struct triple *ins, void *arg)
13135 int *conflicts = arg;
13136 int zlhs, zrhs, i, j;
13138 /* See if we have a mandatory coalesce operation between
13139 * a lhs and a rhs value. If so and the rhs value is also
13140 * alive then this triple needs to be pre copied. Otherwise
13141 * we would have two definitions in the same live range simultaneously
13144 zlhs = TRIPLE_LHS(ins->sizes);
13145 if ((zlhs == 0) && triple_is_def(state, ins)) {
13148 zrhs = TRIPLE_RHS(ins->sizes);
13149 for(i = 0; i < zlhs; i++) {
13150 struct reg_info linfo;
13151 linfo = arch_reg_lhs(state, ins, i);
13152 if (linfo.reg < MAX_REGISTERS) {
13155 for(j = 0; j < zrhs; j++) {
13156 struct reg_info rinfo;
13157 struct triple *rhs;
13158 struct triple_reg_set *set;
13161 rinfo = arch_reg_rhs(state, ins, j);
13162 if (rinfo.reg != linfo.reg) {
13166 for(set = live; set && !found; set = set->next) {
13167 if (set->member == rhs) {
13172 struct triple *copy;
13173 copy = pre_copy(state, ins, j);
13174 copy->id |= TRIPLE_FLAG_PRE_SPLIT;
13182 static int correct_coalesce_conflicts(
13183 struct compile_state *state, struct reg_block *blocks)
13187 walk_variable_lifetimes(state, blocks, fix_coalesce_conflicts, &conflicts);
13191 static void replace_set_use(struct compile_state *state,
13192 struct triple_reg_set *head, struct triple *orig, struct triple *new)
13194 struct triple_reg_set *set;
13195 for(set = head; set; set = set->next) {
13196 if (set->member == orig) {
13202 static void replace_block_use(struct compile_state *state,
13203 struct reg_block *blocks, struct triple *orig, struct triple *new)
13206 #warning "WISHLIST visit just those blocks that need it *"
13207 for(i = 1; i <= state->last_vertex; i++) {
13208 struct reg_block *rb;
13210 replace_set_use(state, rb->in, orig, new);
13211 replace_set_use(state, rb->out, orig, new);
13215 static void color_instructions(struct compile_state *state)
13217 struct triple *ins, *first;
13218 first = RHS(state->main_function, 0);
13221 if (triple_is_def(state, ins)) {
13222 struct reg_info info;
13223 info = find_lhs_color(state, ins, 0);
13224 if (info.reg >= MAX_REGISTERS) {
13225 info.reg = REG_UNSET;
13227 SET_INFO(ins->id, info);
13230 } while(ins != first);
13233 static struct reg_info read_lhs_color(
13234 struct compile_state *state, struct triple *ins, int index)
13236 struct reg_info info;
13237 if ((index == 0) && triple_is_def(state, ins)) {
13238 info.reg = ID_REG(ins->id);
13239 info.regcm = ID_REGCM(ins->id);
13241 else if (index < TRIPLE_LHS(ins->sizes)) {
13242 info = read_lhs_color(state, LHS(ins, index), 0);
13245 internal_error(state, ins, "Bad lhs %d", index);
13246 info.reg = REG_UNSET;
13252 static struct triple *resolve_tangle(
13253 struct compile_state *state, struct triple *tangle)
13255 struct reg_info info, uinfo;
13256 struct triple_set *set, *next;
13257 struct triple *copy;
13259 #warning "WISHLIST recalculate all affected instructions colors"
13260 info = find_lhs_color(state, tangle, 0);
13261 for(set = tangle->use; set; set = next) {
13262 struct triple *user;
13265 user = set->member;
13266 zrhs = TRIPLE_RHS(user->sizes);
13267 for(i = 0; i < zrhs; i++) {
13268 if (RHS(user, i) != tangle) {
13271 uinfo = find_rhs_post_color(state, user, i);
13272 if (uinfo.reg == info.reg) {
13273 copy = pre_copy(state, user, i);
13274 copy->id |= TRIPLE_FLAG_PRE_SPLIT;
13275 SET_INFO(copy->id, uinfo);
13280 uinfo = find_lhs_pre_color(state, tangle, 0);
13281 if (uinfo.reg == info.reg) {
13282 struct reg_info linfo;
13283 copy = post_copy(state, tangle);
13284 copy->id |= TRIPLE_FLAG_PRE_SPLIT;
13285 linfo = find_lhs_color(state, copy, 0);
13286 SET_INFO(copy->id, linfo);
13288 info = find_lhs_color(state, tangle, 0);
13289 SET_INFO(tangle->id, info);
13295 static void fix_tangles(struct compile_state *state,
13296 struct reg_block *blocks, struct triple_reg_set *live,
13297 struct reg_block *rb, struct triple *ins, void *arg)
13299 int *tangles = arg;
13300 struct triple *tangle;
13302 char used[MAX_REGISTERS];
13303 struct triple_reg_set *set;
13306 /* Find out which registers have multiple uses at this point */
13307 memset(used, 0, sizeof(used));
13308 for(set = live; set; set = set->next) {
13309 struct reg_info info;
13310 info = read_lhs_color(state, set->member, 0);
13311 if (info.reg == REG_UNSET) {
13314 reg_inc_used(state, used, info.reg);
13317 /* Now find the least dominated definition of a register in
13318 * conflict I have seen so far.
13320 for(set = live; set; set = set->next) {
13321 struct reg_info info;
13322 info = read_lhs_color(state, set->member, 0);
13323 if (used[info.reg] < 2) {
13326 /* Changing copies that feed into phi functions
13329 if (set->member->use &&
13330 (set->member->use->member->op == OP_PHI)) {
13333 if (!tangle || tdominates(state, set->member, tangle)) {
13334 tangle = set->member;
13337 /* If I have found a tangle resolve it */
13339 struct triple *post_copy;
13341 post_copy = resolve_tangle(state, tangle);
13343 replace_block_use(state, blocks, tangle, post_copy);
13345 if (post_copy && (tangle != ins)) {
13346 replace_set_use(state, live, tangle, post_copy);
13353 static int correct_tangles(
13354 struct compile_state *state, struct reg_block *blocks)
13358 color_instructions(state);
13359 walk_variable_lifetimes(state, blocks, fix_tangles, &tangles);
13364 static void ids_from_rstate(struct compile_state *state, struct reg_state *rstate);
13365 static void cleanup_rstate(struct compile_state *state, struct reg_state *rstate);
13367 struct triple *find_constrained_def(
13368 struct compile_state *state, struct live_range *range, struct triple *constrained)
13370 struct live_range_def *lrd;
13373 struct reg_info info;
13375 int is_constrained;
13376 regcm = arch_type_to_regcm(state, lrd->def->type);
13377 info = find_lhs_color(state, lrd->def, 0);
13378 regcm = arch_regcm_reg_normalize(state, regcm);
13379 info.regcm = arch_regcm_reg_normalize(state, info.regcm);
13380 /* If the 2 register class masks are not equal the
13381 * the current register class is constrained.
13383 is_constrained = regcm != info.regcm;
13385 /* Of the constrained live ranges deal with the
13386 * least dominated one first.
13388 if (is_constrained) {
13389 #if DEBUG_RANGE_CONFLICTS
13390 fprintf(stderr, "canidate: %p %-8s regcm: %x %x\n",
13391 lrd->def, tops(lrd->def->op), regcm, info.regcm);
13393 if (!constrained ||
13394 tdominates(state, lrd->def, constrained))
13396 constrained = lrd->def;
13400 } while(lrd != range->defs);
13401 return constrained;
13404 static int split_constrained_ranges(
13405 struct compile_state *state, struct reg_state *rstate,
13406 struct live_range *range)
13408 /* Walk through the edges in conflict and our current live
13409 * range, and find definitions that are more severly constrained
13410 * than they type of data they contain require.
13412 * Then pick one of those ranges and relax the constraints.
13414 struct live_range_edge *edge;
13415 struct triple *constrained;
13418 for(edge = range->edges; edge; edge = edge->next) {
13419 constrained = find_constrained_def(state, edge->node, constrained);
13421 if (!constrained) {
13422 constrained = find_constrained_def(state, range, constrained);
13424 #if DEBUG_RANGE_CONFLICTS
13425 fprintf(stderr, "constrained: %p %-8s\n",
13426 constrained, tops(constrained->op));
13429 ids_from_rstate(state, rstate);
13430 cleanup_rstate(state, rstate);
13431 resolve_tangle(state, constrained);
13433 return !!constrained;
13436 static int split_ranges(
13437 struct compile_state *state, struct reg_state *rstate,
13438 char *used, struct live_range *range)
13441 #if DEBUG_RANGE_CONFLICTS
13442 fprintf(stderr, "split_ranges %d %s %p\n",
13443 rstate->passes, tops(range->defs->def->op), range->defs->def);
13445 if ((range->color == REG_UNNEEDED) ||
13446 (rstate->passes >= rstate->max_passes)) {
13449 split = split_constrained_ranges(state, rstate, range);
13451 /* Ideally I would split the live range that will not be used
13452 * for the longest period of time in hopes that this will
13453 * (a) allow me to spill a register or
13454 * (b) allow me to place a value in another register.
13456 * So far I don't have a test case for this, the resolving
13457 * of mandatory constraints has solved all of my
13458 * know issues. So I have choosen not to write any
13459 * code until I cat get a better feel for cases where
13460 * it would be useful to have.
13463 #warning "WISHLIST implement live range splitting..."
13464 if ((DEBUG_RANGE_CONFLICTS > 1) &&
13465 (!split || (DEBUG_RANGE_CONFLICTS > 2))) {
13466 print_interference_blocks(state, rstate, stderr, 0);
13467 print_dominators(state, stderr);
13472 #if DEBUG_COLOR_GRAPH > 1
13473 #define cgdebug_printf(...) fprintf(stdout, __VA_ARGS__)
13474 #define cgdebug_flush() fflush(stdout)
13475 #define cgdebug_loc(STATE, TRIPLE) loc(stdout, STATE, TRIPLE)
13476 #elif DEBUG_COLOR_GRAPH == 1
13477 #define cgdebug_printf(...) fprintf(stderr, __VA_ARGS__)
13478 #define cgdebug_flush() fflush(stderr)
13479 #define cgdebug_loc(STATE, TRIPLE) loc(stderr, STATE, TRIPLE)
13481 #define cgdebug_printf(...)
13482 #define cgdebug_flush()
13483 #define cgdebug_loc(STATE, TRIPLE)
13487 static int select_free_color(struct compile_state *state,
13488 struct reg_state *rstate, struct live_range *range)
13490 struct triple_set *entry;
13491 struct live_range_def *lrd;
13492 struct live_range_def *phi;
13493 struct live_range_edge *edge;
13494 char used[MAX_REGISTERS];
13495 struct triple **expr;
13497 /* Instead of doing just the trivial color select here I try
13498 * a few extra things because a good color selection will help reduce
13502 /* Find the registers currently in use */
13503 memset(used, 0, sizeof(used));
13504 for(edge = range->edges; edge; edge = edge->next) {
13505 if (edge->node->color == REG_UNSET) {
13508 reg_fill_used(state, used, edge->node->color);
13510 #if DEBUG_COLOR_GRAPH > 1
13514 for(edge = range->edges; edge; edge = edge->next) {
13517 cgdebug_printf("\n%s edges: %d @%s:%d.%d\n",
13518 tops(range->def->op), i,
13519 range->def->filename, range->def->line, range->def->col);
13520 for(i = 0; i < MAX_REGISTERS; i++) {
13522 cgdebug_printf("used: %s\n",
13529 /* If a color is already assigned see if it will work */
13530 if (range->color != REG_UNSET) {
13531 struct live_range_def *lrd;
13532 if (!used[range->color]) {
13535 for(edge = range->edges; edge; edge = edge->next) {
13536 if (edge->node->color != range->color) {
13539 warning(state, edge->node->defs->def, "edge: ");
13540 lrd = edge->node->defs;
13542 warning(state, lrd->def, " %p %s",
13543 lrd->def, tops(lrd->def->op));
13545 } while(lrd != edge->node->defs);
13548 warning(state, range->defs->def, "def: ");
13550 warning(state, lrd->def, " %p %s",
13551 lrd->def, tops(lrd->def->op));
13553 } while(lrd != range->defs);
13554 internal_error(state, range->defs->def,
13555 "live range with already used color %s",
13556 arch_reg_str(range->color));
13559 /* If I feed into an expression reuse it's color.
13560 * This should help remove copies in the case of 2 register instructions
13561 * and phi functions.
13564 lrd = live_range_end(state, range, 0);
13565 for(; (range->color == REG_UNSET) && lrd ; lrd = live_range_end(state, range, lrd)) {
13566 entry = lrd->def->use;
13567 for(;(range->color == REG_UNSET) && entry; entry = entry->next) {
13568 struct live_range_def *insd;
13570 insd = &rstate->lrd[entry->member->id];
13571 if (insd->lr->defs == 0) {
13574 if (!phi && (insd->def->op == OP_PHI) &&
13575 !interfere(rstate, range, insd->lr)) {
13578 if (insd->lr->color == REG_UNSET) {
13581 regcm = insd->lr->classes;
13582 if (((regcm & range->classes) == 0) ||
13583 (used[insd->lr->color])) {
13586 if (interfere(rstate, range, insd->lr)) {
13589 range->color = insd->lr->color;
13592 /* If I feed into a phi function reuse it's color or the color
13593 * of something else that feeds into the phi function.
13596 if (phi->lr->color != REG_UNSET) {
13597 if (used[phi->lr->color]) {
13598 range->color = phi->lr->color;
13602 expr = triple_rhs(state, phi->def, 0);
13603 for(; expr; expr = triple_rhs(state, phi->def, expr)) {
13604 struct live_range *lr;
13609 lr = rstate->lrd[(*expr)->id].lr;
13610 if (lr->color == REG_UNSET) {
13613 regcm = lr->classes;
13614 if (((regcm & range->classes) == 0) ||
13615 (used[lr->color])) {
13618 if (interfere(rstate, range, lr)) {
13621 range->color = lr->color;
13625 /* If I don't interfere with a rhs node reuse it's color */
13626 lrd = live_range_head(state, range, 0);
13627 for(; (range->color == REG_UNSET) && lrd ; lrd = live_range_head(state, range, lrd)) {
13628 expr = triple_rhs(state, lrd->def, 0);
13629 for(; expr; expr = triple_rhs(state, lrd->def, expr)) {
13630 struct live_range *lr;
13635 lr = rstate->lrd[(*expr)->id].lr;
13636 if (lr->color == REG_UNSET) {
13639 regcm = lr->classes;
13640 if (((regcm & range->classes) == 0) ||
13641 (used[lr->color])) {
13644 if (interfere(rstate, range, lr)) {
13647 range->color = lr->color;
13651 /* If I have not opportunitically picked a useful color
13652 * pick the first color that is free.
13654 if (range->color == REG_UNSET) {
13656 arch_select_free_register(state, used, range->classes);
13658 if (range->color == REG_UNSET) {
13659 struct live_range_def *lrd;
13661 if (split_ranges(state, rstate, used, range)) {
13664 for(edge = range->edges; edge; edge = edge->next) {
13665 warning(state, edge->node->defs->def, "edge reg %s",
13666 arch_reg_str(edge->node->color));
13667 lrd = edge->node->defs;
13669 warning(state, lrd->def, " %s %p",
13670 tops(lrd->def->op), lrd->def);
13672 } while(lrd != edge->node->defs);
13674 warning(state, range->defs->def, "range: ");
13677 warning(state, lrd->def, " %s %p",
13678 tops(lrd->def->op), lrd->def);
13680 } while(lrd != range->defs);
13682 warning(state, range->defs->def, "classes: %x",
13684 for(i = 0; i < MAX_REGISTERS; i++) {
13686 warning(state, range->defs->def, "used: %s",
13690 #if DEBUG_COLOR_GRAPH < 2
13691 error(state, range->defs->def, "too few registers");
13693 internal_error(state, range->defs->def, "too few registers");
13696 range->classes &= arch_reg_regcm(state, range->color);
13697 if ((range->color == REG_UNSET) || (range->classes == 0)) {
13698 internal_error(state, range->defs->def, "select_free_color did not?");
13703 static int color_graph(struct compile_state *state, struct reg_state *rstate)
13706 struct live_range_edge *edge;
13707 struct live_range *range;
13709 cgdebug_printf("Lo: ");
13710 range = rstate->low;
13711 if (*range->group_prev != range) {
13712 internal_error(state, 0, "lo: *prev != range?");
13714 *range->group_prev = range->group_next;
13715 if (range->group_next) {
13716 range->group_next->group_prev = range->group_prev;
13718 if (&range->group_next == rstate->low_tail) {
13719 rstate->low_tail = range->group_prev;
13721 if (rstate->low == range) {
13722 internal_error(state, 0, "low: next != prev?");
13725 else if (rstate->high) {
13726 cgdebug_printf("Hi: ");
13727 range = rstate->high;
13728 if (*range->group_prev != range) {
13729 internal_error(state, 0, "hi: *prev != range?");
13731 *range->group_prev = range->group_next;
13732 if (range->group_next) {
13733 range->group_next->group_prev = range->group_prev;
13735 if (&range->group_next == rstate->high_tail) {
13736 rstate->high_tail = range->group_prev;
13738 if (rstate->high == range) {
13739 internal_error(state, 0, "high: next != prev?");
13745 cgdebug_printf(" %d\n", range - rstate->lr);
13746 range->group_prev = 0;
13747 for(edge = range->edges; edge; edge = edge->next) {
13748 struct live_range *node;
13750 /* Move nodes from the high to the low list */
13751 if (node->group_prev && (node->color == REG_UNSET) &&
13752 (node->degree == regc_max_size(state, node->classes))) {
13753 if (*node->group_prev != node) {
13754 internal_error(state, 0, "move: *prev != node?");
13756 *node->group_prev = node->group_next;
13757 if (node->group_next) {
13758 node->group_next->group_prev = node->group_prev;
13760 if (&node->group_next == rstate->high_tail) {
13761 rstate->high_tail = node->group_prev;
13763 cgdebug_printf("Moving...%d to low\n", node - rstate->lr);
13764 node->group_prev = rstate->low_tail;
13765 node->group_next = 0;
13766 *rstate->low_tail = node;
13767 rstate->low_tail = &node->group_next;
13768 if (*node->group_prev != node) {
13769 internal_error(state, 0, "move2: *prev != node?");
13774 colored = color_graph(state, rstate);
13776 cgdebug_printf("Coloring %d @", range - rstate->lr);
13777 cgdebug_loc(state, range->defs->def);
13779 colored = select_free_color(state, rstate, range);
13780 cgdebug_printf(" %s\n", arch_reg_str(range->color));
13785 #if DEBUG_CONSISTENCY
13786 static void verify_colors(struct compile_state *state, struct reg_state *rstate)
13788 struct live_range *lr;
13789 struct live_range_edge *edge;
13790 struct triple *ins, *first;
13791 char used[MAX_REGISTERS];
13792 first = RHS(state->main_function, 0);
13795 if (triple_is_def(state, ins)) {
13796 if ((ins->id < 0) || (ins->id > rstate->defs)) {
13797 internal_error(state, ins,
13798 "triple without a live range def");
13800 lr = rstate->lrd[ins->id].lr;
13801 if (lr->color == REG_UNSET) {
13802 internal_error(state, ins,
13803 "triple without a color");
13805 /* Find the registers used by the edges */
13806 memset(used, 0, sizeof(used));
13807 for(edge = lr->edges; edge; edge = edge->next) {
13808 if (edge->node->color == REG_UNSET) {
13809 internal_error(state, 0,
13810 "live range without a color");
13812 reg_fill_used(state, used, edge->node->color);
13814 if (used[lr->color]) {
13815 internal_error(state, ins,
13816 "triple with already used color");
13820 } while(ins != first);
13823 static inline void verify_colors(struct compile_state *state, struct reg_state *rstate) {}
13826 static void color_triples(struct compile_state *state, struct reg_state *rstate)
13828 struct live_range *lr;
13829 struct triple *first, *ins;
13830 first = RHS(state->main_function, 0);
13833 if ((ins->id < 0) || (ins->id > rstate->defs)) {
13834 internal_error(state, ins,
13835 "triple without a live range");
13837 lr = rstate->lrd[ins->id].lr;
13838 SET_REG(ins->id, lr->color);
13840 } while (ins != first);
13843 static struct live_range *merge_sort_lr(
13844 struct live_range *first, struct live_range *last)
13846 struct live_range *mid, *join, **join_tail, *pick;
13848 size = (last - first) + 1;
13850 mid = first + size/2;
13851 first = merge_sort_lr(first, mid -1);
13852 mid = merge_sort_lr(mid, last);
13856 /* merge the two lists */
13857 while(first && mid) {
13858 if ((first->degree < mid->degree) ||
13859 ((first->degree == mid->degree) &&
13860 (first->length < mid->length))) {
13862 first = first->group_next;
13864 first->group_prev = 0;
13869 mid = mid->group_next;
13871 mid->group_prev = 0;
13874 pick->group_next = 0;
13875 pick->group_prev = join_tail;
13877 join_tail = &pick->group_next;
13879 /* Splice the remaining list */
13880 pick = (first)? first : mid;
13883 pick->group_prev = join_tail;
13887 if (!first->defs) {
13895 static void ids_from_rstate(struct compile_state *state,
13896 struct reg_state *rstate)
13898 struct triple *ins, *first;
13899 if (!rstate->defs) {
13902 /* Display the graph if desired */
13903 if (state->debug & DEBUG_INTERFERENCE) {
13904 print_blocks(state, stdout);
13905 print_control_flow(state);
13907 first = RHS(state->main_function, 0);
13911 struct live_range_def *lrd;
13912 lrd = &rstate->lrd[ins->id];
13913 ins->id = lrd->orig_id;
13916 } while(ins != first);
13919 static void cleanup_live_edges(struct reg_state *rstate)
13922 /* Free the edges on each node */
13923 for(i = 1; i <= rstate->ranges; i++) {
13924 remove_live_edges(rstate, &rstate->lr[i]);
13928 static void cleanup_rstate(struct compile_state *state, struct reg_state *rstate)
13930 cleanup_live_edges(rstate);
13931 xfree(rstate->lrd);
13934 /* Free the variable lifetime information */
13935 if (rstate->blocks) {
13936 free_variable_lifetimes(state, rstate->blocks);
13939 rstate->ranges = 0;
13942 rstate->blocks = 0;
13945 static void verify_consistency(struct compile_state *state);
13946 static void allocate_registers(struct compile_state *state)
13948 struct reg_state rstate;
13951 /* Clear out the reg_state */
13952 memset(&rstate, 0, sizeof(rstate));
13953 rstate.max_passes = MAX_ALLOCATION_PASSES;
13956 struct live_range **point, **next;
13961 #if DEBUG_RANGE_CONFLICTS
13962 fprintf(stderr, "pass: %d\n", rstate.passes);
13966 ids_from_rstate(state, &rstate);
13968 /* Cleanup the temporary data structures */
13969 cleanup_rstate(state, &rstate);
13971 /* Compute the variable lifetimes */
13972 rstate.blocks = compute_variable_lifetimes(state);
13974 /* Fix invalid mandatory live range coalesce conflicts */
13975 conflicts = correct_coalesce_conflicts(state, rstate.blocks);
13977 /* Fix two simultaneous uses of the same register.
13978 * In a few pathlogical cases a partial untangle moves
13979 * the tangle to a part of the graph we won't revisit.
13980 * So we keep looping until we have no more tangle fixes
13984 tangles = correct_tangles(state, rstate.blocks);
13987 if (state->debug & DEBUG_INSERTED_COPIES) {
13988 printf("After resolve_tangles\n");
13989 print_blocks(state, stdout);
13990 print_control_flow(state);
13992 verify_consistency(state);
13994 /* Allocate and initialize the live ranges */
13995 initialize_live_ranges(state, &rstate);
13997 /* Note current doing coalescing in a loop appears to
13998 * buys me nothing. The code is left this way in case
13999 * there is some value in it. Or if a future bugfix
14000 * yields some benefit.
14003 #if DEBUG_COALESCING
14004 fprintf(stderr, "coalescing\n");
14006 /* Remove any previous live edge calculations */
14007 cleanup_live_edges(&rstate);
14009 /* Compute the interference graph */
14010 walk_variable_lifetimes(
14011 state, rstate.blocks, graph_ins, &rstate);
14013 /* Display the interference graph if desired */
14014 if (state->debug & DEBUG_INTERFERENCE) {
14015 print_interference_blocks(state, &rstate, stdout, 1);
14016 printf("\nlive variables by instruction\n");
14017 walk_variable_lifetimes(
14018 state, rstate.blocks,
14019 print_interference_ins, &rstate);
14022 coalesced = coalesce_live_ranges(state, &rstate);
14024 #if DEBUG_COALESCING
14025 fprintf(stderr, "coalesced: %d\n", coalesced);
14027 } while(coalesced);
14029 /* Verify the interference graph */
14030 verify_interference_graph(state, &rstate);
14032 /* Build the groups low and high. But with the nodes
14033 * first sorted by degree order.
14035 rstate.low_tail = &rstate.low;
14036 rstate.high_tail = &rstate.high;
14037 rstate.high = merge_sort_lr(&rstate.lr[1], &rstate.lr[rstate.ranges]);
14039 rstate.high->group_prev = &rstate.high;
14041 for(point = &rstate.high; *point; point = &(*point)->group_next)
14043 rstate.high_tail = point;
14044 /* Walk through the high list and move everything that needs
14047 for(point = &rstate.high; *point; point = next) {
14048 struct live_range *range;
14049 next = &(*point)->group_next;
14052 /* If it has a low degree or it already has a color
14053 * place the node in low.
14055 if ((range->degree < regc_max_size(state, range->classes)) ||
14056 (range->color != REG_UNSET)) {
14057 cgdebug_printf("Lo: %5d degree %5d%s\n",
14058 range - rstate.lr, range->degree,
14059 (range->color != REG_UNSET) ? " (colored)": "");
14060 *range->group_prev = range->group_next;
14061 if (range->group_next) {
14062 range->group_next->group_prev = range->group_prev;
14064 if (&range->group_next == rstate.high_tail) {
14065 rstate.high_tail = range->group_prev;
14067 range->group_prev = rstate.low_tail;
14068 range->group_next = 0;
14069 *rstate.low_tail = range;
14070 rstate.low_tail = &range->group_next;
14074 cgdebug_printf("hi: %5d degree %5d%s\n",
14075 range - rstate.lr, range->degree,
14076 (range->color != REG_UNSET) ? " (colored)": "");
14079 /* Color the live_ranges */
14080 colored = color_graph(state, &rstate);
14082 } while (!colored);
14084 /* Verify the graph was properly colored */
14085 verify_colors(state, &rstate);
14087 /* Move the colors from the graph to the triples */
14088 color_triples(state, &rstate);
14090 /* Cleanup the temporary data structures */
14091 cleanup_rstate(state, &rstate);
14094 /* Sparce Conditional Constant Propogation
14095 * =========================================
14099 struct lattice_node {
14101 struct triple *def;
14102 struct ssa_edge *out;
14103 struct flow_block *fblock;
14104 struct triple *val;
14105 /* lattice high val && !is_const(val)
14106 * lattice const is_const(val)
14107 * lattice low val == 0
14111 struct lattice_node *src;
14112 struct lattice_node *dst;
14113 struct ssa_edge *work_next;
14114 struct ssa_edge *work_prev;
14115 struct ssa_edge *out_next;
14118 struct flow_block *src;
14119 struct flow_block *dst;
14120 struct flow_edge *work_next;
14121 struct flow_edge *work_prev;
14122 struct flow_edge *in_next;
14123 struct flow_edge *out_next;
14126 struct flow_block {
14127 struct block *block;
14128 struct flow_edge *in;
14129 struct flow_edge *out;
14130 struct flow_edge left, right;
14135 struct lattice_node *lattice;
14136 struct ssa_edge *ssa_edges;
14137 struct flow_block *flow_blocks;
14138 struct flow_edge *flow_work_list;
14139 struct ssa_edge *ssa_work_list;
14143 static void scc_add_fedge(struct compile_state *state, struct scc_state *scc,
14144 struct flow_edge *fedge)
14146 if (!scc->flow_work_list) {
14147 scc->flow_work_list = fedge;
14148 fedge->work_next = fedge->work_prev = fedge;
14151 struct flow_edge *ftail;
14152 ftail = scc->flow_work_list->work_prev;
14153 fedge->work_next = ftail->work_next;
14154 fedge->work_prev = ftail;
14155 fedge->work_next->work_prev = fedge;
14156 fedge->work_prev->work_next = fedge;
14160 static struct flow_edge *scc_next_fedge(
14161 struct compile_state *state, struct scc_state *scc)
14163 struct flow_edge *fedge;
14164 fedge = scc->flow_work_list;
14166 fedge->work_next->work_prev = fedge->work_prev;
14167 fedge->work_prev->work_next = fedge->work_next;
14168 if (fedge->work_next != fedge) {
14169 scc->flow_work_list = fedge->work_next;
14171 scc->flow_work_list = 0;
14177 static void scc_add_sedge(struct compile_state *state, struct scc_state *scc,
14178 struct ssa_edge *sedge)
14180 if (!scc->ssa_work_list) {
14181 scc->ssa_work_list = sedge;
14182 sedge->work_next = sedge->work_prev = sedge;
14185 struct ssa_edge *stail;
14186 stail = scc->ssa_work_list->work_prev;
14187 sedge->work_next = stail->work_next;
14188 sedge->work_prev = stail;
14189 sedge->work_next->work_prev = sedge;
14190 sedge->work_prev->work_next = sedge;
14194 static struct ssa_edge *scc_next_sedge(
14195 struct compile_state *state, struct scc_state *scc)
14197 struct ssa_edge *sedge;
14198 sedge = scc->ssa_work_list;
14200 sedge->work_next->work_prev = sedge->work_prev;
14201 sedge->work_prev->work_next = sedge->work_next;
14202 if (sedge->work_next != sedge) {
14203 scc->ssa_work_list = sedge->work_next;
14205 scc->ssa_work_list = 0;
14211 static void initialize_scc_state(
14212 struct compile_state *state, struct scc_state *scc)
14214 int ins_count, ssa_edge_count;
14215 int ins_index, ssa_edge_index, fblock_index;
14216 struct triple *first, *ins;
14217 struct block *block;
14218 struct flow_block *fblock;
14220 memset(scc, 0, sizeof(*scc));
14222 /* Inialize pass zero find out how much memory we need */
14223 first = RHS(state->main_function, 0);
14225 ins_count = ssa_edge_count = 0;
14227 struct triple_set *edge;
14229 for(edge = ins->use; edge; edge = edge->next) {
14233 } while(ins != first);
14235 fprintf(stderr, "ins_count: %d ssa_edge_count: %d vertex_count: %d\n",
14236 ins_count, ssa_edge_count, state->last_vertex);
14238 scc->ins_count = ins_count;
14240 xcmalloc(sizeof(*scc->lattice)*(ins_count + 1), "lattice");
14242 xcmalloc(sizeof(*scc->ssa_edges)*(ssa_edge_count + 1), "ssa_edges");
14244 xcmalloc(sizeof(*scc->flow_blocks)*(state->last_vertex + 1),
14247 /* Initialize pass one collect up the nodes */
14250 ins_index = ssa_edge_index = fblock_index = 0;
14253 if ((ins->op == OP_LABEL) && (block != ins->u.block)) {
14254 block = ins->u.block;
14256 internal_error(state, ins, "label without block");
14259 block->vertex = fblock_index;
14260 fblock = &scc->flow_blocks[fblock_index];
14261 fblock->block = block;
14264 struct lattice_node *lnode;
14266 lnode = &scc->lattice[ins_index];
14269 lnode->fblock = fblock;
14270 lnode->val = ins; /* LATTICE HIGH */
14271 lnode->old_id = ins->id;
14272 ins->id = ins_index;
14275 } while(ins != first);
14276 /* Initialize pass two collect up the edges */
14281 if ((ins->op == OP_LABEL) && (block != ins->u.block)) {
14282 struct flow_edge *fedge, **ftail;
14283 struct block_set *bedge;
14284 block = ins->u.block;
14285 fblock = &scc->flow_blocks[block->vertex];
14288 ftail = &fblock->out;
14290 fblock->left.dst = &scc->flow_blocks[block->left->vertex];
14291 if (fblock->left.dst->block != block->left) {
14292 internal_error(state, 0, "block mismatch");
14294 fblock->left.out_next = 0;
14295 *ftail = &fblock->left;
14296 ftail = &fblock->left.out_next;
14298 if (block->right) {
14299 fblock->right.dst = &scc->flow_blocks[block->right->vertex];
14300 if (fblock->right.dst->block != block->right) {
14301 internal_error(state, 0, "block mismatch");
14303 fblock->right.out_next = 0;
14304 *ftail = &fblock->right;
14305 ftail = &fblock->right.out_next;
14307 for(fedge = fblock->out; fedge; fedge = fedge->out_next) {
14308 fedge->src = fblock;
14309 fedge->work_next = fedge->work_prev = fedge;
14310 fedge->executable = 0;
14312 ftail = &fblock->in;
14313 for(bedge = block->use; bedge; bedge = bedge->next) {
14314 struct block *src_block;
14315 struct flow_block *sfblock;
14316 struct flow_edge *sfedge;
14317 src_block = bedge->member;
14318 sfblock = &scc->flow_blocks[src_block->vertex];
14320 if (src_block->left == block) {
14321 sfedge = &sfblock->left;
14323 sfedge = &sfblock->right;
14326 ftail = &sfedge->in_next;
14327 sfedge->in_next = 0;
14331 struct triple_set *edge;
14332 struct ssa_edge **stail;
14333 struct lattice_node *lnode;
14334 lnode = &scc->lattice[ins->id];
14336 stail = &lnode->out;
14337 for(edge = ins->use; edge; edge = edge->next) {
14338 struct ssa_edge *sedge;
14339 ssa_edge_index += 1;
14340 sedge = &scc->ssa_edges[ssa_edge_index];
14342 stail = &sedge->out_next;
14343 sedge->src = lnode;
14344 sedge->dst = &scc->lattice[edge->member->id];
14345 sedge->work_next = sedge->work_prev = sedge;
14346 sedge->out_next = 0;
14350 } while(ins != first);
14351 /* Setup a dummy block 0 as a node above the start node */
14353 struct flow_block *fblock, *dst;
14354 struct flow_edge *fedge;
14355 fblock = &scc->flow_blocks[0];
14358 fblock->out = &fblock->left;
14359 dst = &scc->flow_blocks[state->first_block->vertex];
14360 fedge = &fblock->left;
14361 fedge->src = fblock;
14363 fedge->work_next = fedge;
14364 fedge->work_prev = fedge;
14365 fedge->in_next = fedge->dst->in;
14366 fedge->out_next = 0;
14367 fedge->executable = 0;
14368 fedge->dst->in = fedge;
14370 /* Initialize the work lists */
14371 scc->flow_work_list = 0;
14372 scc->ssa_work_list = 0;
14373 scc_add_fedge(state, scc, fedge);
14376 fprintf(stderr, "ins_index: %d ssa_edge_index: %d fblock_index: %d\n",
14377 ins_index, ssa_edge_index, fblock_index);
14382 static void free_scc_state(
14383 struct compile_state *state, struct scc_state *scc)
14385 xfree(scc->flow_blocks);
14386 xfree(scc->ssa_edges);
14387 xfree(scc->lattice);
14391 static struct lattice_node *triple_to_lattice(
14392 struct compile_state *state, struct scc_state *scc, struct triple *ins)
14394 if (ins->id <= 0) {
14395 internal_error(state, ins, "bad id");
14397 return &scc->lattice[ins->id];
14400 static struct triple *preserve_lval(
14401 struct compile_state *state, struct lattice_node *lnode)
14403 struct triple *old;
14404 /* Preserve the original value */
14406 old = dup_triple(state, lnode->val);
14407 if (lnode->val != lnode->def) {
14417 static int lval_changed(struct compile_state *state,
14418 struct triple *old, struct lattice_node *lnode)
14421 /* See if the lattice value has changed */
14423 if (!old && !lnode->val) {
14426 if (changed && lnode->val && !is_const(lnode->val)) {
14430 lnode->val && old &&
14431 (memcmp(lnode->val->param, old->param,
14432 TRIPLE_SIZE(lnode->val->sizes) * sizeof(lnode->val->param[0])) == 0) &&
14433 (memcmp(&lnode->val->u, &old->u, sizeof(old->u)) == 0)) {
14443 static void scc_visit_phi(struct compile_state *state, struct scc_state *scc,
14444 struct lattice_node *lnode)
14446 struct lattice_node *tmp;
14447 struct triple **slot, *old;
14448 struct flow_edge *fedge;
14450 if (lnode->def->op != OP_PHI) {
14451 internal_error(state, lnode->def, "not phi");
14453 /* Store the original value */
14454 old = preserve_lval(state, lnode);
14456 /* default to lattice high */
14457 lnode->val = lnode->def;
14458 slot = &RHS(lnode->def, 0);
14460 for(fedge = lnode->fblock->in; fedge; index++, fedge = fedge->in_next) {
14461 if (!fedge->executable) {
14464 if (!slot[index]) {
14465 internal_error(state, lnode->def, "no phi value");
14467 tmp = triple_to_lattice(state, scc, slot[index]);
14468 /* meet(X, lattice low) = lattice low */
14472 /* meet(X, lattice high) = X */
14473 else if (!tmp->val) {
14474 lnode->val = lnode->val;
14476 /* meet(lattice high, X) = X */
14477 else if (!is_const(lnode->val)) {
14478 lnode->val = dup_triple(state, tmp->val);
14479 lnode->val->type = lnode->def->type;
14481 /* meet(const, const) = const or lattice low */
14482 else if (!constants_equal(state, lnode->val, tmp->val)) {
14490 fprintf(stderr, "phi: %d -> %s\n",
14492 (!lnode->val)? "lo": is_const(lnode->val)? "const": "hi");
14494 /* If the lattice value has changed update the work lists. */
14495 if (lval_changed(state, old, lnode)) {
14496 struct ssa_edge *sedge;
14497 for(sedge = lnode->out; sedge; sedge = sedge->out_next) {
14498 scc_add_sedge(state, scc, sedge);
14503 static int compute_lnode_val(struct compile_state *state, struct scc_state *scc,
14504 struct lattice_node *lnode)
14507 struct triple *old, *scratch;
14508 struct triple **dexpr, **vexpr;
14511 /* Store the original value */
14512 old = preserve_lval(state, lnode);
14514 /* Reinitialize the value */
14515 lnode->val = scratch = dup_triple(state, lnode->def);
14516 scratch->id = lnode->old_id;
14517 scratch->next = scratch;
14518 scratch->prev = scratch;
14521 count = TRIPLE_SIZE(scratch->sizes);
14522 for(i = 0; i < count; i++) {
14523 dexpr = &lnode->def->param[i];
14524 vexpr = &scratch->param[i];
14526 if (((i < TRIPLE_MISC_OFF(scratch->sizes)) ||
14527 (i >= TRIPLE_TARG_OFF(scratch->sizes))) &&
14529 struct lattice_node *tmp;
14530 tmp = triple_to_lattice(state, scc, *dexpr);
14531 *vexpr = (tmp->val)? tmp->val : tmp->def;
14534 if (scratch->op == OP_BRANCH) {
14535 scratch->next = lnode->def->next;
14537 /* Recompute the value */
14538 #warning "FIXME see if simplify does anything bad"
14539 /* So far it looks like only the strength reduction
14540 * optimization are things I need to worry about.
14542 simplify(state, scratch);
14543 /* Cleanup my value */
14544 if (scratch->use) {
14545 internal_error(state, lnode->def, "scratch used?");
14547 if ((scratch->prev != scratch) ||
14548 ((scratch->next != scratch) &&
14549 ((lnode->def->op != OP_BRANCH) ||
14550 (scratch->next != lnode->def->next)))) {
14551 internal_error(state, lnode->def, "scratch in list?");
14553 /* undo any uses... */
14554 count = TRIPLE_SIZE(scratch->sizes);
14555 for(i = 0; i < count; i++) {
14556 vexpr = &scratch->param[i];
14558 unuse_triple(*vexpr, scratch);
14561 if (!is_const(scratch)) {
14562 for(i = 0; i < count; i++) {
14563 dexpr = &lnode->def->param[i];
14564 if (((i < TRIPLE_MISC_OFF(scratch->sizes)) ||
14565 (i >= TRIPLE_TARG_OFF(scratch->sizes))) &&
14567 struct lattice_node *tmp;
14568 tmp = triple_to_lattice(state, scc, *dexpr);
14576 (lnode->val->op == lnode->def->op) &&
14577 (memcmp(lnode->val->param, lnode->def->param,
14578 count * sizeof(lnode->val->param[0])) == 0) &&
14579 (memcmp(&lnode->val->u, &lnode->def->u, sizeof(lnode->def->u)) == 0)) {
14580 lnode->val = lnode->def;
14582 /* Find the cases that are always lattice lo */
14584 triple_is_def(state, lnode->val) &&
14585 !triple_is_pure(state, lnode->val)) {
14589 (lnode->val->op == OP_SDECL) &&
14590 (lnode->val != lnode->def)) {
14591 internal_error(state, lnode->def, "bad sdecl");
14593 /* See if the lattice value has changed */
14594 changed = lval_changed(state, old, lnode);
14595 if (lnode->val != scratch) {
14601 static void scc_visit_branch(struct compile_state *state, struct scc_state *scc,
14602 struct lattice_node *lnode)
14604 struct lattice_node *cond;
14607 struct flow_edge *fedge;
14608 fprintf(stderr, "branch: %d (",
14611 for(fedge = lnode->fblock->out; fedge; fedge = fedge->out_next) {
14612 fprintf(stderr, " %d", fedge->dst->block->vertex);
14614 fprintf(stderr, " )");
14615 if (TRIPLE_RHS(lnode->def->sizes) > 0) {
14616 fprintf(stderr, " <- %d",
14617 RHS(lnode->def, 0)->id);
14619 fprintf(stderr, "\n");
14622 if (lnode->def->op != OP_BRANCH) {
14623 internal_error(state, lnode->def, "not branch");
14625 /* This only applies to conditional branches */
14626 if (TRIPLE_RHS(lnode->def->sizes) == 0) {
14629 cond = triple_to_lattice(state, scc, RHS(lnode->def,0));
14630 if (cond->val && !is_const(cond->val)) {
14631 #warning "FIXME do I need to do something here?"
14632 warning(state, cond->def, "condition not constant?");
14635 if (cond->val == 0) {
14636 scc_add_fedge(state, scc, cond->fblock->out);
14637 scc_add_fedge(state, scc, cond->fblock->out->out_next);
14639 else if (cond->val->u.cval) {
14640 scc_add_fedge(state, scc, cond->fblock->out->out_next);
14643 scc_add_fedge(state, scc, cond->fblock->out);
14648 static void scc_visit_expr(struct compile_state *state, struct scc_state *scc,
14649 struct lattice_node *lnode)
14653 changed = compute_lnode_val(state, scc, lnode);
14656 struct triple **expr;
14657 fprintf(stderr, "expr: %3d %10s (",
14658 lnode->def->id, tops(lnode->def->op));
14659 expr = triple_rhs(state, lnode->def, 0);
14660 for(;expr;expr = triple_rhs(state, lnode->def, expr)) {
14662 fprintf(stderr, " %d", (*expr)->id);
14665 fprintf(stderr, " ) -> %s\n",
14666 (!lnode->val)? "lo": is_const(lnode->val)? "const": "hi");
14669 if (lnode->def->op == OP_BRANCH) {
14670 scc_visit_branch(state, scc, lnode);
14673 else if (changed) {
14674 struct ssa_edge *sedge;
14675 for(sedge = lnode->out; sedge; sedge = sedge->out_next) {
14676 scc_add_sedge(state, scc, sedge);
14681 static void scc_writeback_values(
14682 struct compile_state *state, struct scc_state *scc)
14684 struct triple *first, *ins;
14685 first = RHS(state->main_function, 0);
14688 struct lattice_node *lnode;
14689 lnode = triple_to_lattice(state, scc, ins);
14691 ins->id = lnode->old_id;
14693 if (lnode->val && !is_const(lnode->val)) {
14694 warning(state, lnode->def,
14695 "lattice node still high?");
14698 if (lnode->val && (lnode->val != ins)) {
14699 /* See if it something I know how to write back */
14700 switch(lnode->val->op) {
14702 mkconst(state, ins, lnode->val->u.cval);
14705 mkaddr_const(state, ins,
14706 MISC(lnode->val, 0), lnode->val->u.cval);
14709 /* By default don't copy the changes,
14710 * recompute them in place instead.
14712 simplify(state, ins);
14715 if (is_const(lnode->val) &&
14716 !constants_equal(state, lnode->val, ins)) {
14717 internal_error(state, 0, "constants not equal");
14719 /* Free the lattice nodes */
14724 } while(ins != first);
14727 static void scc_transform(struct compile_state *state)
14729 struct scc_state scc;
14731 initialize_scc_state(state, &scc);
14733 while(scc.flow_work_list || scc.ssa_work_list) {
14734 struct flow_edge *fedge;
14735 struct ssa_edge *sedge;
14736 struct flow_edge *fptr;
14737 while((fedge = scc_next_fedge(state, &scc))) {
14738 struct block *block;
14739 struct triple *ptr;
14740 struct flow_block *fblock;
14743 if (fedge->executable) {
14747 internal_error(state, 0, "fedge without dst");
14750 internal_error(state, 0, "fedge without src");
14752 fedge->executable = 1;
14753 fblock = fedge->dst;
14754 block = fblock->block;
14756 for(fptr = fblock->in; fptr; fptr = fptr->in_next) {
14757 if (fptr->executable) {
14762 fprintf(stderr, "vertex: %d time: %d\n",
14763 block->vertex, time);
14767 for(ptr = block->first; !done; ptr = ptr->next) {
14768 struct lattice_node *lnode;
14769 done = (ptr == block->last);
14770 lnode = &scc.lattice[ptr->id];
14771 if (ptr->op == OP_PHI) {
14772 scc_visit_phi(state, &scc, lnode);
14774 else if (time == 1) {
14775 scc_visit_expr(state, &scc, lnode);
14778 if (fblock->out && !fblock->out->out_next) {
14779 scc_add_fedge(state, &scc, fblock->out);
14782 while((sedge = scc_next_sedge(state, &scc))) {
14783 struct lattice_node *lnode;
14784 struct flow_block *fblock;
14785 lnode = sedge->dst;
14786 fblock = lnode->fblock;
14788 fprintf(stderr, "sedge: %5d (%5d -> %5d)\n",
14789 sedge - scc.ssa_edges,
14790 sedge->src->def->id,
14791 sedge->dst->def->id);
14793 if (lnode->def->op == OP_PHI) {
14794 scc_visit_phi(state, &scc, lnode);
14797 for(fptr = fblock->in; fptr; fptr = fptr->in_next) {
14798 if (fptr->executable) {
14803 scc_visit_expr(state, &scc, lnode);
14809 scc_writeback_values(state, &scc);
14810 free_scc_state(state, &scc);
14814 static void transform_to_arch_instructions(struct compile_state *state)
14816 struct triple *ins, *first;
14817 first = RHS(state->main_function, 0);
14820 ins = transform_to_arch_instruction(state, ins);
14821 } while(ins != first);
14824 #if DEBUG_CONSISTENCY
14825 static void verify_uses(struct compile_state *state)
14827 struct triple *first, *ins;
14828 struct triple_set *set;
14829 first = RHS(state->main_function, 0);
14832 struct triple **expr;
14833 expr = triple_rhs(state, ins, 0);
14834 for(; expr; expr = triple_rhs(state, ins, expr)) {
14835 struct triple *rhs;
14837 for(set = rhs?rhs->use:0; set; set = set->next) {
14838 if (set->member == ins) {
14843 internal_error(state, ins, "rhs not used");
14846 expr = triple_lhs(state, ins, 0);
14847 for(; expr; expr = triple_lhs(state, ins, expr)) {
14848 struct triple *lhs;
14850 for(set = lhs?lhs->use:0; set; set = set->next) {
14851 if (set->member == ins) {
14856 internal_error(state, ins, "lhs not used");
14860 } while(ins != first);
14863 static void verify_blocks_present(struct compile_state *state)
14865 struct triple *first, *ins;
14866 if (!state->first_block) {
14869 first = RHS(state->main_function, 0);
14872 valid_ins(state, ins);
14873 if (triple_stores_block(state, ins)) {
14874 if (!ins->u.block) {
14875 internal_error(state, ins,
14876 "%p not in a block?\n", ins);
14880 } while(ins != first);
14884 static void verify_blocks(struct compile_state *state)
14886 struct triple *ins;
14887 struct block *block;
14889 block = state->first_block;
14896 struct block_set *user;
14898 for(ins = block->first; ins != block->last->next; ins = ins->next) {
14899 if (triple_stores_block(state, ins) && (ins->u.block != block)) {
14900 internal_error(state, ins, "inconsitent block specified");
14902 valid_ins(state, ins);
14905 for(user = block->use; user; user = user->next) {
14907 if ((block == state->last_block) &&
14908 (user->member == state->first_block)) {
14911 if ((user->member->left != block) &&
14912 (user->member->right != block)) {
14913 internal_error(state, user->member->first,
14914 "user does not use block");
14917 if (triple_is_branch(state, block->last) &&
14918 (block->right != block_of_triple(state, TARG(block->last, 0))))
14920 internal_error(state, block->last, "block->right != TARG(0)");
14922 if (!triple_is_uncond_branch(state, block->last) &&
14923 (block != state->last_block) &&
14924 (block->left != block_of_triple(state, block->last->next)))
14926 internal_error(state, block->last, "block->left != block->last->next");
14929 for(user = block->left->use; user; user = user->next) {
14930 if (user->member == block) {
14934 if (!user || user->member != block) {
14935 internal_error(state, block->first,
14936 "block does not use left");
14939 if (block->right) {
14940 for(user = block->right->use; user; user = user->next) {
14941 if (user->member == block) {
14945 if (!user || user->member != block) {
14946 internal_error(state, block->first,
14947 "block does not use right");
14950 if (block->users != users) {
14951 internal_error(state, block->first,
14952 "computed users %d != stored users %d\n",
14953 users, block->users);
14955 if (!triple_stores_block(state, block->last->next)) {
14956 internal_error(state, block->last->next,
14957 "cannot find next block");
14959 block = block->last->next->u.block;
14961 internal_error(state, block->last->next,
14964 } while(block != state->first_block);
14965 if (blocks != state->last_vertex) {
14966 internal_error(state, 0, "computed blocks != stored blocks %d\n",
14967 blocks, state->last_vertex);
14971 static void verify_domination(struct compile_state *state)
14973 struct triple *first, *ins;
14974 struct triple_set *set;
14975 if (!state->first_block) {
14979 first = RHS(state->main_function, 0);
14982 for(set = ins->use; set; set = set->next) {
14983 struct triple **slot;
14984 struct triple *use_point;
14987 zrhs = TRIPLE_RHS(ins->sizes);
14988 slot = &RHS(set->member, 0);
14989 /* See if the use is on the right hand side */
14990 for(i = 0; i < zrhs; i++) {
14991 if (slot[i] == ins) {
14996 use_point = set->member;
14997 if (set->member->op == OP_PHI) {
14998 struct block_set *bset;
15000 bset = set->member->u.block->use;
15001 for(edge = 0; bset && (edge < i); edge++) {
15005 internal_error(state, set->member,
15006 "no edge for phi rhs %d\n", i);
15008 use_point = bset->member->last;
15012 !tdominates(state, ins, use_point)) {
15013 internal_warning(state, ins,
15014 "ins does not dominate rhs use");
15015 internal_error(state, use_point,
15016 "non dominated rhs use point?");
15020 } while(ins != first);
15023 static void verify_piece(struct compile_state *state)
15025 struct triple *first, *ins;
15026 first = RHS(state->main_function, 0);
15029 struct triple *ptr;
15031 lhs = TRIPLE_LHS(ins->sizes);
15032 for(ptr = ins->next, i = 0; i < lhs; i++, ptr = ptr->next) {
15033 if (ptr != LHS(ins, i)) {
15034 internal_error(state, ins, "malformed lhs on %s",
15037 if (ptr->op != OP_PIECE) {
15038 internal_error(state, ins, "bad lhs op %s at %d on %s",
15039 tops(ptr->op), i, tops(ins->op));
15041 if (ptr->u.cval != i) {
15042 internal_error(state, ins, "bad u.cval of %d %d expected",
15047 } while(ins != first);
15049 static void verify_ins_colors(struct compile_state *state)
15051 struct triple *first, *ins;
15053 first = RHS(state->main_function, 0);
15057 } while(ins != first);
15059 static void verify_consistency(struct compile_state *state)
15061 verify_uses(state);
15062 verify_blocks_present(state);
15063 verify_blocks(state);
15064 verify_domination(state);
15065 verify_piece(state);
15066 verify_ins_colors(state);
15069 static void verify_consistency(struct compile_state *state) {}
15070 #endif /* DEBUG_USES */
15072 static void optimize(struct compile_state *state)
15074 if (state->debug & DEBUG_TRIPLES) {
15075 print_triples(state);
15077 /* Replace structures with simpler data types */
15078 flatten_structures(state);
15079 if (state->debug & DEBUG_TRIPLES) {
15080 print_triples(state);
15082 verify_consistency(state);
15083 /* Analize the intermediate code */
15084 setup_basic_blocks(state);
15085 analyze_idominators(state);
15086 analyze_ipdominators(state);
15088 /* Transform the code to ssa form. */
15090 * The transformation to ssa form puts a phi function
15091 * on each of edge of a dominance frontier where that
15092 * phi function might be needed. At -O2 if we don't
15093 * eleminate the excess phi functions we can get an
15094 * exponential code size growth. So I kill the extra
15095 * phi functions early and I kill them often.
15097 transform_to_ssa_form(state);
15098 eliminate_inefectual_code(state);
15100 verify_consistency(state);
15101 if (state->debug & DEBUG_CODE_ELIMINATION) {
15102 fprintf(stdout, "After transform_to_ssa_form\n");
15103 print_blocks(state, stdout);
15105 /* Do strength reduction and simple constant optimizations */
15106 if (state->optimize >= 1) {
15107 simplify_all(state);
15108 transform_from_ssa_form(state);
15109 free_basic_blocks(state);
15110 setup_basic_blocks(state);
15111 analyze_idominators(state);
15112 analyze_ipdominators(state);
15113 transform_to_ssa_form(state);
15114 eliminate_inefectual_code(state);
15116 if (state->debug & DEBUG_CODE_ELIMINATION) {
15117 fprintf(stdout, "After simplify_all\n");
15118 print_blocks(state, stdout);
15120 verify_consistency(state);
15121 /* Propogate constants throughout the code */
15122 if (state->optimize >= 2) {
15123 scc_transform(state);
15124 transform_from_ssa_form(state);
15125 free_basic_blocks(state);
15126 setup_basic_blocks(state);
15127 analyze_idominators(state);
15128 analyze_ipdominators(state);
15129 transform_to_ssa_form(state);
15130 eliminate_inefectual_code(state);
15132 verify_consistency(state);
15133 #warning "WISHLIST implement single use constants (least possible register pressure)"
15134 #warning "WISHLIST implement induction variable elimination"
15135 /* Select architecture instructions and an initial partial
15136 * coloring based on architecture constraints.
15138 transform_to_arch_instructions(state);
15139 verify_consistency(state);
15140 if (state->debug & DEBUG_ARCH_CODE) {
15141 printf("After transform_to_arch_instructions\n");
15142 print_blocks(state, stdout);
15143 print_control_flow(state);
15145 eliminate_inefectual_code(state);
15146 verify_consistency(state);
15147 if (state->debug & DEBUG_CODE_ELIMINATION) {
15148 printf("After eliminate_inefectual_code\n");
15149 print_blocks(state, stdout);
15150 print_control_flow(state);
15152 verify_consistency(state);
15153 /* Color all of the variables to see if they will fit in registers */
15154 insert_copies_to_phi(state);
15155 if (state->debug & DEBUG_INSERTED_COPIES) {
15156 printf("After insert_copies_to_phi\n");
15157 print_blocks(state, stdout);
15158 print_control_flow(state);
15160 verify_consistency(state);
15161 insert_mandatory_copies(state);
15162 if (state->debug & DEBUG_INSERTED_COPIES) {
15163 printf("After insert_mandatory_copies\n");
15164 print_blocks(state, stdout);
15165 print_control_flow(state);
15167 verify_consistency(state);
15168 allocate_registers(state);
15169 verify_consistency(state);
15170 if (state->debug & DEBUG_INTERMEDIATE_CODE) {
15171 print_blocks(state, stdout);
15173 if (state->debug & DEBUG_CONTROL_FLOW) {
15174 print_control_flow(state);
15176 /* Remove the optimization information.
15177 * This is more to check for memory consistency than to free memory.
15179 free_basic_blocks(state);
15182 static void print_op_asm(struct compile_state *state,
15183 struct triple *ins, FILE *fp)
15185 struct asm_info *info;
15187 unsigned lhs, rhs, i;
15188 info = ins->u.ainfo;
15189 lhs = TRIPLE_LHS(ins->sizes);
15190 rhs = TRIPLE_RHS(ins->sizes);
15191 /* Don't count the clobbers in lhs */
15192 for(i = 0; i < lhs; i++) {
15193 if (LHS(ins, i)->type == &void_type) {
15198 fprintf(fp, "#ASM\n");
15200 for(ptr = info->str; *ptr; ptr++) {
15202 unsigned long param;
15203 struct triple *piece;
15213 param = strtoul(ptr, &next, 10);
15215 error(state, ins, "Invalid asm template");
15217 if (param >= (lhs + rhs)) {
15218 error(state, ins, "Invalid param %%%u in asm template",
15221 piece = (param < lhs)? LHS(ins, param) : RHS(ins, param - lhs);
15223 arch_reg_str(ID_REG(piece->id)));
15226 fprintf(fp, "\n#NOT ASM\n");
15230 /* Only use the low x86 byte registers. This allows me
15231 * allocate the entire register when a byte register is used.
15233 #define X86_4_8BIT_GPRS 1
15235 /* Recognized x86 cpu variants */
15243 #define CPU_DEFAULT CPU_I386
15245 /* The x86 register classes */
15246 #define REGC_FLAGS 0
15247 #define REGC_GPR8 1
15248 #define REGC_GPR16 2
15249 #define REGC_GPR32 3
15250 #define REGC_DIVIDEND64 4
15251 #define REGC_DIVIDEND32 5
15254 #define REGC_GPR32_8 8
15255 #define REGC_GPR16_8 9
15256 #define REGC_GPR8_LO 10
15257 #define REGC_IMM32 11
15258 #define REGC_IMM16 12
15259 #define REGC_IMM8 13
15260 #define LAST_REGC REGC_IMM8
15261 #if LAST_REGC >= MAX_REGC
15262 #error "MAX_REGC is to low"
15265 /* Register class masks */
15266 #define REGCM_FLAGS (1 << REGC_FLAGS)
15267 #define REGCM_GPR8 (1 << REGC_GPR8)
15268 #define REGCM_GPR16 (1 << REGC_GPR16)
15269 #define REGCM_GPR32 (1 << REGC_GPR32)
15270 #define REGCM_DIVIDEND64 (1 << REGC_DIVIDEND64)
15271 #define REGCM_DIVIDEND32 (1 << REGC_DIVIDEND32)
15272 #define REGCM_MMX (1 << REGC_MMX)
15273 #define REGCM_XMM (1 << REGC_XMM)
15274 #define REGCM_GPR32_8 (1 << REGC_GPR32_8)
15275 #define REGCM_GPR16_8 (1 << REGC_GPR16_8)
15276 #define REGCM_GPR8_LO (1 << REGC_GPR8_LO)
15277 #define REGCM_IMM32 (1 << REGC_IMM32)
15278 #define REGCM_IMM16 (1 << REGC_IMM16)
15279 #define REGCM_IMM8 (1 << REGC_IMM8)
15280 #define REGCM_ALL ((1 << (LAST_REGC + 1)) - 1)
15282 /* The x86 registers */
15283 #define REG_EFLAGS 2
15284 #define REGC_FLAGS_FIRST REG_EFLAGS
15285 #define REGC_FLAGS_LAST REG_EFLAGS
15294 #define REGC_GPR8_LO_FIRST REG_AL
15295 #define REGC_GPR8_LO_LAST REG_DL
15296 #define REGC_GPR8_FIRST REG_AL
15297 #define REGC_GPR8_LAST REG_DH
15306 #define REGC_GPR16_FIRST REG_AX
15307 #define REGC_GPR16_LAST REG_SP
15316 #define REGC_GPR32_FIRST REG_EAX
15317 #define REGC_GPR32_LAST REG_ESP
15318 #define REG_EDXEAX 27
15319 #define REGC_DIVIDEND64_FIRST REG_EDXEAX
15320 #define REGC_DIVIDEND64_LAST REG_EDXEAX
15321 #define REG_DXAX 28
15322 #define REGC_DIVIDEND32_FIRST REG_DXAX
15323 #define REGC_DIVIDEND32_LAST REG_DXAX
15324 #define REG_MMX0 29
15325 #define REG_MMX1 30
15326 #define REG_MMX2 31
15327 #define REG_MMX3 32
15328 #define REG_MMX4 33
15329 #define REG_MMX5 34
15330 #define REG_MMX6 35
15331 #define REG_MMX7 36
15332 #define REGC_MMX_FIRST REG_MMX0
15333 #define REGC_MMX_LAST REG_MMX7
15334 #define REG_XMM0 37
15335 #define REG_XMM1 38
15336 #define REG_XMM2 39
15337 #define REG_XMM3 40
15338 #define REG_XMM4 41
15339 #define REG_XMM5 42
15340 #define REG_XMM6 43
15341 #define REG_XMM7 44
15342 #define REGC_XMM_FIRST REG_XMM0
15343 #define REGC_XMM_LAST REG_XMM7
15344 #warning "WISHLIST figure out how to use pinsrw and pextrw to better use extended regs"
15345 #define LAST_REG REG_XMM7
15347 #define REGC_GPR32_8_FIRST REG_EAX
15348 #define REGC_GPR32_8_LAST REG_EDX
15349 #define REGC_GPR16_8_FIRST REG_AX
15350 #define REGC_GPR16_8_LAST REG_DX
15352 #define REGC_IMM8_FIRST -1
15353 #define REGC_IMM8_LAST -1
15354 #define REGC_IMM16_FIRST -2
15355 #define REGC_IMM16_LAST -1
15356 #define REGC_IMM32_FIRST -4
15357 #define REGC_IMM32_LAST -1
15359 #if LAST_REG >= MAX_REGISTERS
15360 #error "MAX_REGISTERS to low"
15364 static unsigned regc_size[LAST_REGC +1] = {
15365 [REGC_FLAGS] = REGC_FLAGS_LAST - REGC_FLAGS_FIRST + 1,
15366 [REGC_GPR8] = REGC_GPR8_LAST - REGC_GPR8_FIRST + 1,
15367 [REGC_GPR16] = REGC_GPR16_LAST - REGC_GPR16_FIRST + 1,
15368 [REGC_GPR32] = REGC_GPR32_LAST - REGC_GPR32_FIRST + 1,
15369 [REGC_DIVIDEND64] = REGC_DIVIDEND64_LAST - REGC_DIVIDEND64_FIRST + 1,
15370 [REGC_DIVIDEND32] = REGC_DIVIDEND32_LAST - REGC_DIVIDEND32_FIRST + 1,
15371 [REGC_MMX] = REGC_MMX_LAST - REGC_MMX_FIRST + 1,
15372 [REGC_XMM] = REGC_XMM_LAST - REGC_XMM_FIRST + 1,
15373 [REGC_GPR32_8] = REGC_GPR32_8_LAST - REGC_GPR32_8_FIRST + 1,
15374 [REGC_GPR16_8] = REGC_GPR16_8_LAST - REGC_GPR16_8_FIRST + 1,
15375 [REGC_GPR8_LO] = REGC_GPR8_LO_LAST - REGC_GPR8_LO_FIRST + 1,
15381 static const struct {
15383 } regcm_bound[LAST_REGC + 1] = {
15384 [REGC_FLAGS] = { REGC_FLAGS_FIRST, REGC_FLAGS_LAST },
15385 [REGC_GPR8] = { REGC_GPR8_FIRST, REGC_GPR8_LAST },
15386 [REGC_GPR16] = { REGC_GPR16_FIRST, REGC_GPR16_LAST },
15387 [REGC_GPR32] = { REGC_GPR32_FIRST, REGC_GPR32_LAST },
15388 [REGC_DIVIDEND64] = { REGC_DIVIDEND64_FIRST, REGC_DIVIDEND64_LAST },
15389 [REGC_DIVIDEND32] = { REGC_DIVIDEND32_FIRST, REGC_DIVIDEND32_LAST },
15390 [REGC_MMX] = { REGC_MMX_FIRST, REGC_MMX_LAST },
15391 [REGC_XMM] = { REGC_XMM_FIRST, REGC_XMM_LAST },
15392 [REGC_GPR32_8] = { REGC_GPR32_8_FIRST, REGC_GPR32_8_LAST },
15393 [REGC_GPR16_8] = { REGC_GPR16_8_FIRST, REGC_GPR16_8_LAST },
15394 [REGC_GPR8_LO] = { REGC_GPR8_LO_FIRST, REGC_GPR8_LO_LAST },
15395 [REGC_IMM32] = { REGC_IMM32_FIRST, REGC_IMM32_LAST },
15396 [REGC_IMM16] = { REGC_IMM16_FIRST, REGC_IMM16_LAST },
15397 [REGC_IMM8] = { REGC_IMM8_FIRST, REGC_IMM8_LAST },
15400 static int arch_encode_cpu(const char *cpu)
15406 { "i386", CPU_I386 },
15414 for(ptr = cpus; ptr->name; ptr++) {
15415 if (strcmp(ptr->name, cpu) == 0) {
15422 static unsigned arch_regc_size(struct compile_state *state, int class)
15424 if ((class < 0) || (class > LAST_REGC)) {
15427 return regc_size[class];
15430 static int arch_regcm_intersect(unsigned regcm1, unsigned regcm2)
15432 /* See if two register classes may have overlapping registers */
15433 unsigned gpr_mask = REGCM_GPR8 | REGCM_GPR8_LO | REGCM_GPR16_8 | REGCM_GPR16 |
15434 REGCM_GPR32_8 | REGCM_GPR32 |
15435 REGCM_DIVIDEND32 | REGCM_DIVIDEND64;
15437 /* Special case for the immediates */
15438 if ((regcm1 & (REGCM_IMM32 | REGCM_IMM16 | REGCM_IMM8)) &&
15439 ((regcm1 & ~(REGCM_IMM32 | REGCM_IMM16 | REGCM_IMM8)) == 0) &&
15440 (regcm2 & (REGCM_IMM32 | REGCM_IMM16 | REGCM_IMM8)) &&
15441 ((regcm2 & ~(REGCM_IMM32 | REGCM_IMM16 | REGCM_IMM8)) == 0)) {
15444 return (regcm1 & regcm2) ||
15445 ((regcm1 & gpr_mask) && (regcm2 & gpr_mask));
15448 static void arch_reg_equivs(
15449 struct compile_state *state, unsigned *equiv, int reg)
15451 if ((reg < 0) || (reg > LAST_REG)) {
15452 internal_error(state, 0, "invalid register");
15457 #if X86_4_8BIT_GPRS
15461 *equiv++ = REG_EAX;
15462 *equiv++ = REG_DXAX;
15463 *equiv++ = REG_EDXEAX;
15466 #if X86_4_8BIT_GPRS
15470 *equiv++ = REG_EAX;
15471 *equiv++ = REG_DXAX;
15472 *equiv++ = REG_EDXEAX;
15475 #if X86_4_8BIT_GPRS
15479 *equiv++ = REG_EBX;
15483 #if X86_4_8BIT_GPRS
15487 *equiv++ = REG_EBX;
15490 #if X86_4_8BIT_GPRS
15494 *equiv++ = REG_ECX;
15498 #if X86_4_8BIT_GPRS
15502 *equiv++ = REG_ECX;
15505 #if X86_4_8BIT_GPRS
15509 *equiv++ = REG_EDX;
15510 *equiv++ = REG_DXAX;
15511 *equiv++ = REG_EDXEAX;
15514 #if X86_4_8BIT_GPRS
15518 *equiv++ = REG_EDX;
15519 *equiv++ = REG_DXAX;
15520 *equiv++ = REG_EDXEAX;
15525 *equiv++ = REG_EAX;
15526 *equiv++ = REG_DXAX;
15527 *equiv++ = REG_EDXEAX;
15532 *equiv++ = REG_EBX;
15537 *equiv++ = REG_ECX;
15542 *equiv++ = REG_EDX;
15543 *equiv++ = REG_DXAX;
15544 *equiv++ = REG_EDXEAX;
15547 *equiv++ = REG_ESI;
15550 *equiv++ = REG_EDI;
15553 *equiv++ = REG_EBP;
15556 *equiv++ = REG_ESP;
15562 *equiv++ = REG_DXAX;
15563 *equiv++ = REG_EDXEAX;
15579 *equiv++ = REG_DXAX;
15580 *equiv++ = REG_EDXEAX;
15601 *equiv++ = REG_EAX;
15602 *equiv++ = REG_EDX;
15603 *equiv++ = REG_EDXEAX;
15612 *equiv++ = REG_EAX;
15613 *equiv++ = REG_EDX;
15614 *equiv++ = REG_DXAX;
15617 *equiv++ = REG_UNSET;
15620 static unsigned arch_avail_mask(struct compile_state *state)
15622 unsigned avail_mask;
15623 /* REGCM_GPR8 is not available */
15624 avail_mask = REGCM_GPR8_LO | REGCM_GPR16_8 | REGCM_GPR16 |
15625 REGCM_GPR32 | REGCM_GPR32_8 |
15626 REGCM_DIVIDEND32 | REGCM_DIVIDEND64 |
15627 REGCM_IMM32 | REGCM_IMM16 | REGCM_IMM8 | REGCM_FLAGS;
15628 switch(state->cpu) {
15631 avail_mask |= REGCM_MMX;
15635 avail_mask |= REGCM_MMX | REGCM_XMM;
15641 static unsigned arch_regcm_normalize(struct compile_state *state, unsigned regcm)
15643 unsigned mask, result;
15647 for(class = 0, mask = 1; mask; mask <<= 1, class++) {
15648 if ((result & mask) == 0) {
15651 if (class > LAST_REGC) {
15654 for(class2 = 0; class2 <= LAST_REGC; class2++) {
15655 if ((regcm_bound[class2].first >= regcm_bound[class].first) &&
15656 (regcm_bound[class2].last <= regcm_bound[class].last)) {
15657 result |= (1 << class2);
15661 result &= arch_avail_mask(state);
15665 static unsigned arch_regcm_reg_normalize(struct compile_state *state, unsigned regcm)
15667 /* Like arch_regcm_normalize except immediate register classes are excluded */
15668 regcm = arch_regcm_normalize(state, regcm);
15669 /* Remove the immediate register classes */
15670 regcm &= ~(REGCM_IMM32 | REGCM_IMM16 | REGCM_IMM8);
15675 static unsigned arch_reg_regcm(struct compile_state *state, int reg)
15680 for(class = 0; class <= LAST_REGC; class++) {
15681 if ((reg >= regcm_bound[class].first) &&
15682 (reg <= regcm_bound[class].last)) {
15683 mask |= (1 << class);
15687 internal_error(state, 0, "reg %d not in any class", reg);
15692 static struct reg_info arch_reg_constraint(
15693 struct compile_state *state, struct type *type, const char *constraint)
15695 static const struct {
15699 } constraints[] = {
15700 { 'r', REGCM_GPR32, REG_UNSET },
15701 { 'g', REGCM_GPR32, REG_UNSET },
15702 { 'p', REGCM_GPR32, REG_UNSET },
15703 { 'q', REGCM_GPR8_LO, REG_UNSET },
15704 { 'Q', REGCM_GPR32_8, REG_UNSET },
15705 { 'x', REGCM_XMM, REG_UNSET },
15706 { 'y', REGCM_MMX, REG_UNSET },
15707 { 'a', REGCM_GPR32, REG_EAX },
15708 { 'b', REGCM_GPR32, REG_EBX },
15709 { 'c', REGCM_GPR32, REG_ECX },
15710 { 'd', REGCM_GPR32, REG_EDX },
15711 { 'D', REGCM_GPR32, REG_EDI },
15712 { 'S', REGCM_GPR32, REG_ESI },
15713 { '\0', 0, REG_UNSET },
15715 unsigned int regcm;
15716 unsigned int mask, reg;
15717 struct reg_info result;
15719 regcm = arch_type_to_regcm(state, type);
15722 for(ptr = constraint; *ptr; ptr++) {
15727 for(i = 0; constraints[i].class != '\0'; i++) {
15728 if (constraints[i].class == *ptr) {
15732 if (constraints[i].class == '\0') {
15733 error(state, 0, "invalid register constraint ``%c''", *ptr);
15736 if ((constraints[i].mask & regcm) == 0) {
15737 error(state, 0, "invalid register class %c specified",
15740 mask |= constraints[i].mask;
15741 if (constraints[i].reg != REG_UNSET) {
15742 if ((reg != REG_UNSET) && (reg != constraints[i].reg)) {
15743 error(state, 0, "Only one register may be specified");
15745 reg = constraints[i].reg;
15749 result.regcm = mask;
15753 static struct reg_info arch_reg_clobber(
15754 struct compile_state *state, const char *clobber)
15756 struct reg_info result;
15757 if (strcmp(clobber, "memory") == 0) {
15758 result.reg = REG_UNSET;
15761 else if (strcmp(clobber, "%eax") == 0) {
15762 result.reg = REG_EAX;
15763 result.regcm = REGCM_GPR32;
15765 else if (strcmp(clobber, "%ebx") == 0) {
15766 result.reg = REG_EBX;
15767 result.regcm = REGCM_GPR32;
15769 else if (strcmp(clobber, "%ecx") == 0) {
15770 result.reg = REG_ECX;
15771 result.regcm = REGCM_GPR32;
15773 else if (strcmp(clobber, "%edx") == 0) {
15774 result.reg = REG_EDX;
15775 result.regcm = REGCM_GPR32;
15777 else if (strcmp(clobber, "%esi") == 0) {
15778 result.reg = REG_ESI;
15779 result.regcm = REGCM_GPR32;
15781 else if (strcmp(clobber, "%edi") == 0) {
15782 result.reg = REG_EDI;
15783 result.regcm = REGCM_GPR32;
15785 else if (strcmp(clobber, "%ebp") == 0) {
15786 result.reg = REG_EBP;
15787 result.regcm = REGCM_GPR32;
15789 else if (strcmp(clobber, "%esp") == 0) {
15790 result.reg = REG_ESP;
15791 result.regcm = REGCM_GPR32;
15793 else if (strcmp(clobber, "cc") == 0) {
15794 result.reg = REG_EFLAGS;
15795 result.regcm = REGCM_FLAGS;
15797 else if ((strncmp(clobber, "xmm", 3) == 0) &&
15798 octdigitp(clobber[3]) && (clobber[4] == '\0')) {
15799 result.reg = REG_XMM0 + octdigval(clobber[3]);
15800 result.regcm = REGCM_XMM;
15802 else if ((strncmp(clobber, "mmx", 3) == 0) &&
15803 octdigitp(clobber[3]) && (clobber[4] == '\0')) {
15804 result.reg = REG_MMX0 + octdigval(clobber[3]);
15805 result.regcm = REGCM_MMX;
15808 error(state, 0, "Invalid register clobber");
15809 result.reg = REG_UNSET;
15815 static int do_select_reg(struct compile_state *state,
15816 char *used, int reg, unsigned classes)
15822 mask = arch_reg_regcm(state, reg);
15823 return (classes & mask) ? reg : REG_UNSET;
15826 static int arch_select_free_register(
15827 struct compile_state *state, char *used, int classes)
15829 /* Live ranges with the most neighbors are colored first.
15831 * Generally it does not matter which colors are given
15832 * as the register allocator attempts to color live ranges
15833 * in an order where you are guaranteed not to run out of colors.
15835 * Occasionally the register allocator cannot find an order
15836 * of register selection that will find a free color. To
15837 * increase the odds the register allocator will work when
15838 * it guesses first give out registers from register classes
15839 * least likely to run out of registers.
15844 for(i = REGC_XMM_FIRST; (reg == REG_UNSET) && (i <= REGC_XMM_LAST); i++) {
15845 reg = do_select_reg(state, used, i, classes);
15847 for(i = REGC_MMX_FIRST; (reg == REG_UNSET) && (i <= REGC_MMX_LAST); i++) {
15848 reg = do_select_reg(state, used, i, classes);
15850 for(i = REGC_GPR32_LAST; (reg == REG_UNSET) && (i >= REGC_GPR32_FIRST); i--) {
15851 reg = do_select_reg(state, used, i, classes);
15853 for(i = REGC_GPR16_FIRST; (reg == REG_UNSET) && (i <= REGC_GPR16_LAST); i++) {
15854 reg = do_select_reg(state, used, i, classes);
15856 for(i = REGC_GPR8_FIRST; (reg == REG_UNSET) && (i <= REGC_GPR8_LAST); i++) {
15857 reg = do_select_reg(state, used, i, classes);
15859 for(i = REGC_GPR8_LO_FIRST; (reg == REG_UNSET) && (i <= REGC_GPR8_LO_LAST); i++) {
15860 reg = do_select_reg(state, used, i, classes);
15862 for(i = REGC_DIVIDEND32_FIRST; (reg == REG_UNSET) && (i <= REGC_DIVIDEND32_LAST); i++) {
15863 reg = do_select_reg(state, used, i, classes);
15865 for(i = REGC_DIVIDEND64_FIRST; (reg == REG_UNSET) && (i <= REGC_DIVIDEND64_LAST); i++) {
15866 reg = do_select_reg(state, used, i, classes);
15868 for(i = REGC_FLAGS_FIRST; (reg == REG_UNSET) && (i <= REGC_FLAGS_LAST); i++) {
15869 reg = do_select_reg(state, used, i, classes);
15875 static unsigned arch_type_to_regcm(struct compile_state *state, struct type *type)
15877 #warning "FIXME force types smaller (if legal) before I get here"
15880 switch(type->type & TYPE_MASK) {
15887 mask = REGCM_GPR8 | REGCM_GPR8_LO |
15888 REGCM_GPR16 | REGCM_GPR16_8 |
15889 REGCM_GPR32 | REGCM_GPR32_8 |
15890 REGCM_DIVIDEND32 | REGCM_DIVIDEND64 |
15891 REGCM_MMX | REGCM_XMM |
15892 REGCM_IMM32 | REGCM_IMM16 | REGCM_IMM8;
15896 mask = REGCM_GPR16 | REGCM_GPR16_8 |
15897 REGCM_GPR32 | REGCM_GPR32_8 |
15898 REGCM_DIVIDEND32 | REGCM_DIVIDEND64 |
15899 REGCM_MMX | REGCM_XMM |
15900 REGCM_IMM32 | REGCM_IMM16;
15907 mask = REGCM_GPR32 | REGCM_GPR32_8 |
15908 REGCM_DIVIDEND32 | REGCM_DIVIDEND64 |
15909 REGCM_MMX | REGCM_XMM |
15913 internal_error(state, 0, "no register class for type");
15916 mask = arch_regcm_normalize(state, mask);
15920 static int is_imm32(struct triple *imm)
15922 return ((imm->op == OP_INTCONST) && (imm->u.cval <= 0xffffffffUL)) ||
15923 (imm->op == OP_ADDRCONST);
15926 static int is_imm16(struct triple *imm)
15928 return ((imm->op == OP_INTCONST) && (imm->u.cval <= 0xffff));
15930 static int is_imm8(struct triple *imm)
15932 return ((imm->op == OP_INTCONST) && (imm->u.cval <= 0xff));
15935 static int get_imm32(struct triple *ins, struct triple **expr)
15937 struct triple *imm;
15939 while(imm->op == OP_COPY) {
15942 if (!is_imm32(imm)) {
15945 unuse_triple(*expr, ins);
15946 use_triple(imm, ins);
15951 static int get_imm8(struct triple *ins, struct triple **expr)
15953 struct triple *imm;
15955 while(imm->op == OP_COPY) {
15958 if (!is_imm8(imm)) {
15961 unuse_triple(*expr, ins);
15962 use_triple(imm, ins);
15967 #define TEMPLATE_NOP 0
15968 #define TEMPLATE_INTCONST8 1
15969 #define TEMPLATE_INTCONST32 2
15970 #define TEMPLATE_COPY8_REG 3
15971 #define TEMPLATE_COPY16_REG 4
15972 #define TEMPLATE_COPY32_REG 5
15973 #define TEMPLATE_COPY_IMM8 6
15974 #define TEMPLATE_COPY_IMM16 7
15975 #define TEMPLATE_COPY_IMM32 8
15976 #define TEMPLATE_PHI8 9
15977 #define TEMPLATE_PHI16 10
15978 #define TEMPLATE_PHI32 11
15979 #define TEMPLATE_STORE8 12
15980 #define TEMPLATE_STORE16 13
15981 #define TEMPLATE_STORE32 14
15982 #define TEMPLATE_LOAD8 15
15983 #define TEMPLATE_LOAD16 16
15984 #define TEMPLATE_LOAD32 17
15985 #define TEMPLATE_BINARY8_REG 18
15986 #define TEMPLATE_BINARY16_REG 19
15987 #define TEMPLATE_BINARY32_REG 20
15988 #define TEMPLATE_BINARY8_IMM 21
15989 #define TEMPLATE_BINARY16_IMM 22
15990 #define TEMPLATE_BINARY32_IMM 23
15991 #define TEMPLATE_SL8_CL 24
15992 #define TEMPLATE_SL16_CL 25
15993 #define TEMPLATE_SL32_CL 26
15994 #define TEMPLATE_SL8_IMM 27
15995 #define TEMPLATE_SL16_IMM 28
15996 #define TEMPLATE_SL32_IMM 29
15997 #define TEMPLATE_UNARY8 30
15998 #define TEMPLATE_UNARY16 31
15999 #define TEMPLATE_UNARY32 32
16000 #define TEMPLATE_CMP8_REG 33
16001 #define TEMPLATE_CMP16_REG 34
16002 #define TEMPLATE_CMP32_REG 35
16003 #define TEMPLATE_CMP8_IMM 36
16004 #define TEMPLATE_CMP16_IMM 37
16005 #define TEMPLATE_CMP32_IMM 38
16006 #define TEMPLATE_TEST8 39
16007 #define TEMPLATE_TEST16 40
16008 #define TEMPLATE_TEST32 41
16009 #define TEMPLATE_SET 42
16010 #define TEMPLATE_JMP 43
16011 #define TEMPLATE_INB_DX 44
16012 #define TEMPLATE_INB_IMM 45
16013 #define TEMPLATE_INW_DX 46
16014 #define TEMPLATE_INW_IMM 47
16015 #define TEMPLATE_INL_DX 48
16016 #define TEMPLATE_INL_IMM 49
16017 #define TEMPLATE_OUTB_DX 50
16018 #define TEMPLATE_OUTB_IMM 51
16019 #define TEMPLATE_OUTW_DX 52
16020 #define TEMPLATE_OUTW_IMM 53
16021 #define TEMPLATE_OUTL_DX 54
16022 #define TEMPLATE_OUTL_IMM 55
16023 #define TEMPLATE_BSF 56
16024 #define TEMPLATE_RDMSR 57
16025 #define TEMPLATE_WRMSR 58
16026 #define TEMPLATE_UMUL8 59
16027 #define TEMPLATE_UMUL16 60
16028 #define TEMPLATE_UMUL32 61
16029 #define TEMPLATE_DIV8 62
16030 #define TEMPLATE_DIV16 63
16031 #define TEMPLATE_DIV32 64
16032 #define LAST_TEMPLATE TEMPLATE_DIV32
16033 #if LAST_TEMPLATE >= MAX_TEMPLATES
16034 #error "MAX_TEMPLATES to low"
16037 #define COPY8_REGCM (REGCM_DIVIDEND64 | REGCM_DIVIDEND32 | REGCM_GPR32 | REGCM_GPR16 | REGCM_GPR8_LO | REGCM_MMX | REGCM_XMM)
16038 #define COPY16_REGCM (REGCM_DIVIDEND64 | REGCM_DIVIDEND32 | REGCM_GPR32 | REGCM_GPR16 | REGCM_MMX | REGCM_XMM)
16039 #define COPY32_REGCM (REGCM_DIVIDEND64 | REGCM_DIVIDEND32 | REGCM_GPR32 | REGCM_MMX | REGCM_XMM)
16042 static struct ins_template templates[] = {
16043 [TEMPLATE_NOP] = {},
16044 [TEMPLATE_INTCONST8] = {
16045 .lhs = { [0] = { REG_UNNEEDED, REGCM_IMM8 } },
16047 [TEMPLATE_INTCONST32] = {
16048 .lhs = { [0] = { REG_UNNEEDED, REGCM_IMM32 } },
16050 [TEMPLATE_COPY8_REG] = {
16051 .lhs = { [0] = { REG_UNSET, COPY8_REGCM } },
16052 .rhs = { [0] = { REG_UNSET, COPY8_REGCM } },
16054 [TEMPLATE_COPY16_REG] = {
16055 .lhs = { [0] = { REG_UNSET, COPY16_REGCM } },
16056 .rhs = { [0] = { REG_UNSET, COPY16_REGCM } },
16058 [TEMPLATE_COPY32_REG] = {
16059 .lhs = { [0] = { REG_UNSET, COPY32_REGCM } },
16060 .rhs = { [0] = { REG_UNSET, COPY32_REGCM } },
16062 [TEMPLATE_COPY_IMM8] = {
16063 .lhs = { [0] = { REG_UNSET, COPY8_REGCM } },
16064 .rhs = { [0] = { REG_UNNEEDED, REGCM_IMM8 } },
16066 [TEMPLATE_COPY_IMM16] = {
16067 .lhs = { [0] = { REG_UNSET, COPY16_REGCM } },
16068 .rhs = { [0] = { REG_UNNEEDED, REGCM_IMM16 | REGCM_IMM8 } },
16070 [TEMPLATE_COPY_IMM32] = {
16071 .lhs = { [0] = { REG_UNSET, COPY32_REGCM } },
16072 .rhs = { [0] = { REG_UNNEEDED, REGCM_IMM32 | REGCM_IMM16 | REGCM_IMM8 } },
16074 [TEMPLATE_PHI8] = {
16075 .lhs = { [0] = { REG_VIRT0, COPY8_REGCM } },
16077 [ 0] = { REG_VIRT0, COPY8_REGCM },
16078 [ 1] = { REG_VIRT0, COPY8_REGCM },
16079 [ 2] = { REG_VIRT0, COPY8_REGCM },
16080 [ 3] = { REG_VIRT0, COPY8_REGCM },
16081 [ 4] = { REG_VIRT0, COPY8_REGCM },
16082 [ 5] = { REG_VIRT0, COPY8_REGCM },
16083 [ 6] = { REG_VIRT0, COPY8_REGCM },
16084 [ 7] = { REG_VIRT0, COPY8_REGCM },
16085 [ 8] = { REG_VIRT0, COPY8_REGCM },
16086 [ 9] = { REG_VIRT0, COPY8_REGCM },
16087 [10] = { REG_VIRT0, COPY8_REGCM },
16088 [11] = { REG_VIRT0, COPY8_REGCM },
16089 [12] = { REG_VIRT0, COPY8_REGCM },
16090 [13] = { REG_VIRT0, COPY8_REGCM },
16091 [14] = { REG_VIRT0, COPY8_REGCM },
16092 [15] = { REG_VIRT0, COPY8_REGCM },
16094 [TEMPLATE_PHI16] = {
16095 .lhs = { [0] = { REG_VIRT0, COPY16_REGCM } },
16097 [ 0] = { REG_VIRT0, COPY16_REGCM },
16098 [ 1] = { REG_VIRT0, COPY16_REGCM },
16099 [ 2] = { REG_VIRT0, COPY16_REGCM },
16100 [ 3] = { REG_VIRT0, COPY16_REGCM },
16101 [ 4] = { REG_VIRT0, COPY16_REGCM },
16102 [ 5] = { REG_VIRT0, COPY16_REGCM },
16103 [ 6] = { REG_VIRT0, COPY16_REGCM },
16104 [ 7] = { REG_VIRT0, COPY16_REGCM },
16105 [ 8] = { REG_VIRT0, COPY16_REGCM },
16106 [ 9] = { REG_VIRT0, COPY16_REGCM },
16107 [10] = { REG_VIRT0, COPY16_REGCM },
16108 [11] = { REG_VIRT0, COPY16_REGCM },
16109 [12] = { REG_VIRT0, COPY16_REGCM },
16110 [13] = { REG_VIRT0, COPY16_REGCM },
16111 [14] = { REG_VIRT0, COPY16_REGCM },
16112 [15] = { REG_VIRT0, COPY16_REGCM },
16114 [TEMPLATE_PHI32] = {
16115 .lhs = { [0] = { REG_VIRT0, COPY32_REGCM } },
16117 [ 0] = { REG_VIRT0, COPY32_REGCM },
16118 [ 1] = { REG_VIRT0, COPY32_REGCM },
16119 [ 2] = { REG_VIRT0, COPY32_REGCM },
16120 [ 3] = { REG_VIRT0, COPY32_REGCM },
16121 [ 4] = { REG_VIRT0, COPY32_REGCM },
16122 [ 5] = { REG_VIRT0, COPY32_REGCM },
16123 [ 6] = { REG_VIRT0, COPY32_REGCM },
16124 [ 7] = { REG_VIRT0, COPY32_REGCM },
16125 [ 8] = { REG_VIRT0, COPY32_REGCM },
16126 [ 9] = { REG_VIRT0, COPY32_REGCM },
16127 [10] = { REG_VIRT0, COPY32_REGCM },
16128 [11] = { REG_VIRT0, COPY32_REGCM },
16129 [12] = { REG_VIRT0, COPY32_REGCM },
16130 [13] = { REG_VIRT0, COPY32_REGCM },
16131 [14] = { REG_VIRT0, COPY32_REGCM },
16132 [15] = { REG_VIRT0, COPY32_REGCM },
16134 [TEMPLATE_STORE8] = {
16136 [0] = { REG_UNSET, REGCM_GPR32 },
16137 [1] = { REG_UNSET, REGCM_GPR8_LO },
16140 [TEMPLATE_STORE16] = {
16142 [0] = { REG_UNSET, REGCM_GPR32 },
16143 [1] = { REG_UNSET, REGCM_GPR16 },
16146 [TEMPLATE_STORE32] = {
16148 [0] = { REG_UNSET, REGCM_GPR32 },
16149 [1] = { REG_UNSET, REGCM_GPR32 },
16152 [TEMPLATE_LOAD8] = {
16153 .lhs = { [0] = { REG_UNSET, REGCM_GPR8_LO } },
16154 .rhs = { [0] = { REG_UNSET, REGCM_GPR32 } },
16156 [TEMPLATE_LOAD16] = {
16157 .lhs = { [0] = { REG_UNSET, REGCM_GPR16 } },
16158 .rhs = { [0] = { REG_UNSET, REGCM_GPR32 } },
16160 [TEMPLATE_LOAD32] = {
16161 .lhs = { [0] = { REG_UNSET, REGCM_GPR32 } },
16162 .rhs = { [0] = { REG_UNSET, REGCM_GPR32 } },
16164 [TEMPLATE_BINARY8_REG] = {
16165 .lhs = { [0] = { REG_VIRT0, REGCM_GPR8_LO } },
16167 [0] = { REG_VIRT0, REGCM_GPR8_LO },
16168 [1] = { REG_UNSET, REGCM_GPR8_LO },
16171 [TEMPLATE_BINARY16_REG] = {
16172 .lhs = { [0] = { REG_VIRT0, REGCM_GPR16 } },
16174 [0] = { REG_VIRT0, REGCM_GPR16 },
16175 [1] = { REG_UNSET, REGCM_GPR16 },
16178 [TEMPLATE_BINARY32_REG] = {
16179 .lhs = { [0] = { REG_VIRT0, REGCM_GPR32 } },
16181 [0] = { REG_VIRT0, REGCM_GPR32 },
16182 [1] = { REG_UNSET, REGCM_GPR32 },
16185 [TEMPLATE_BINARY8_IMM] = {
16186 .lhs = { [0] = { REG_VIRT0, REGCM_GPR8_LO } },
16188 [0] = { REG_VIRT0, REGCM_GPR8_LO },
16189 [1] = { REG_UNNEEDED, REGCM_IMM8 },
16192 [TEMPLATE_BINARY16_IMM] = {
16193 .lhs = { [0] = { REG_VIRT0, REGCM_GPR16 } },
16195 [0] = { REG_VIRT0, REGCM_GPR16 },
16196 [1] = { REG_UNNEEDED, REGCM_IMM16 },
16199 [TEMPLATE_BINARY32_IMM] = {
16200 .lhs = { [0] = { REG_VIRT0, REGCM_GPR32 } },
16202 [0] = { REG_VIRT0, REGCM_GPR32 },
16203 [1] = { REG_UNNEEDED, REGCM_IMM32 },
16206 [TEMPLATE_SL8_CL] = {
16207 .lhs = { [0] = { REG_VIRT0, REGCM_GPR8_LO } },
16209 [0] = { REG_VIRT0, REGCM_GPR8_LO },
16210 [1] = { REG_CL, REGCM_GPR8_LO },
16213 [TEMPLATE_SL16_CL] = {
16214 .lhs = { [0] = { REG_VIRT0, REGCM_GPR16 } },
16216 [0] = { REG_VIRT0, REGCM_GPR16 },
16217 [1] = { REG_CL, REGCM_GPR8_LO },
16220 [TEMPLATE_SL32_CL] = {
16221 .lhs = { [0] = { REG_VIRT0, REGCM_GPR32 } },
16223 [0] = { REG_VIRT0, REGCM_GPR32 },
16224 [1] = { REG_CL, REGCM_GPR8_LO },
16227 [TEMPLATE_SL8_IMM] = {
16228 .lhs = { [0] = { REG_VIRT0, REGCM_GPR8_LO } },
16230 [0] = { REG_VIRT0, REGCM_GPR8_LO },
16231 [1] = { REG_UNNEEDED, REGCM_IMM8 },
16234 [TEMPLATE_SL16_IMM] = {
16235 .lhs = { [0] = { REG_VIRT0, REGCM_GPR16 } },
16237 [0] = { REG_VIRT0, REGCM_GPR16 },
16238 [1] = { REG_UNNEEDED, REGCM_IMM8 },
16241 [TEMPLATE_SL32_IMM] = {
16242 .lhs = { [0] = { REG_VIRT0, REGCM_GPR32 } },
16244 [0] = { REG_VIRT0, REGCM_GPR32 },
16245 [1] = { REG_UNNEEDED, REGCM_IMM8 },
16248 [TEMPLATE_UNARY8] = {
16249 .lhs = { [0] = { REG_VIRT0, REGCM_GPR8_LO } },
16250 .rhs = { [0] = { REG_VIRT0, REGCM_GPR8_LO } },
16252 [TEMPLATE_UNARY16] = {
16253 .lhs = { [0] = { REG_VIRT0, REGCM_GPR16 } },
16254 .rhs = { [0] = { REG_VIRT0, REGCM_GPR16 } },
16256 [TEMPLATE_UNARY32] = {
16257 .lhs = { [0] = { REG_VIRT0, REGCM_GPR32 } },
16258 .rhs = { [0] = { REG_VIRT0, REGCM_GPR32 } },
16260 [TEMPLATE_CMP8_REG] = {
16261 .lhs = { [0] = { REG_EFLAGS, REGCM_FLAGS } },
16263 [0] = { REG_UNSET, REGCM_GPR8_LO },
16264 [1] = { REG_UNSET, REGCM_GPR8_LO },
16267 [TEMPLATE_CMP16_REG] = {
16268 .lhs = { [0] = { REG_EFLAGS, REGCM_FLAGS } },
16270 [0] = { REG_UNSET, REGCM_GPR16 },
16271 [1] = { REG_UNSET, REGCM_GPR16 },
16274 [TEMPLATE_CMP32_REG] = {
16275 .lhs = { [0] = { REG_EFLAGS, REGCM_FLAGS } },
16277 [0] = { REG_UNSET, REGCM_GPR32 },
16278 [1] = { REG_UNSET, REGCM_GPR32 },
16281 [TEMPLATE_CMP8_IMM] = {
16282 .lhs = { [0] = { REG_EFLAGS, REGCM_FLAGS } },
16284 [0] = { REG_UNSET, REGCM_GPR8_LO },
16285 [1] = { REG_UNNEEDED, REGCM_IMM8 },
16288 [TEMPLATE_CMP16_IMM] = {
16289 .lhs = { [0] = { REG_EFLAGS, REGCM_FLAGS } },
16291 [0] = { REG_UNSET, REGCM_GPR16 },
16292 [1] = { REG_UNNEEDED, REGCM_IMM16 },
16295 [TEMPLATE_CMP32_IMM] = {
16296 .lhs = { [0] = { REG_EFLAGS, REGCM_FLAGS } },
16298 [0] = { REG_UNSET, REGCM_GPR32 },
16299 [1] = { REG_UNNEEDED, REGCM_IMM32 },
16302 [TEMPLATE_TEST8] = {
16303 .lhs = { [0] = { REG_EFLAGS, REGCM_FLAGS } },
16304 .rhs = { [0] = { REG_UNSET, REGCM_GPR8_LO } },
16306 [TEMPLATE_TEST16] = {
16307 .lhs = { [0] = { REG_EFLAGS, REGCM_FLAGS } },
16308 .rhs = { [0] = { REG_UNSET, REGCM_GPR16 } },
16310 [TEMPLATE_TEST32] = {
16311 .lhs = { [0] = { REG_EFLAGS, REGCM_FLAGS } },
16312 .rhs = { [0] = { REG_UNSET, REGCM_GPR32 } },
16315 .lhs = { [0] = { REG_UNSET, REGCM_GPR8_LO } },
16316 .rhs = { [0] = { REG_EFLAGS, REGCM_FLAGS } },
16319 .rhs = { [0] = { REG_EFLAGS, REGCM_FLAGS } },
16321 [TEMPLATE_INB_DX] = {
16322 .lhs = { [0] = { REG_AL, REGCM_GPR8_LO } },
16323 .rhs = { [0] = { REG_DX, REGCM_GPR16 } },
16325 [TEMPLATE_INB_IMM] = {
16326 .lhs = { [0] = { REG_AL, REGCM_GPR8_LO } },
16327 .rhs = { [0] = { REG_UNNEEDED, REGCM_IMM8 } },
16329 [TEMPLATE_INW_DX] = {
16330 .lhs = { [0] = { REG_AX, REGCM_GPR16 } },
16331 .rhs = { [0] = { REG_DX, REGCM_GPR16 } },
16333 [TEMPLATE_INW_IMM] = {
16334 .lhs = { [0] = { REG_AX, REGCM_GPR16 } },
16335 .rhs = { [0] = { REG_UNNEEDED, REGCM_IMM8 } },
16337 [TEMPLATE_INL_DX] = {
16338 .lhs = { [0] = { REG_EAX, REGCM_GPR32 } },
16339 .rhs = { [0] = { REG_DX, REGCM_GPR16 } },
16341 [TEMPLATE_INL_IMM] = {
16342 .lhs = { [0] = { REG_EAX, REGCM_GPR32 } },
16343 .rhs = { [0] = { REG_UNNEEDED, REGCM_IMM8 } },
16345 [TEMPLATE_OUTB_DX] = {
16347 [0] = { REG_AL, REGCM_GPR8_LO },
16348 [1] = { REG_DX, REGCM_GPR16 },
16351 [TEMPLATE_OUTB_IMM] = {
16353 [0] = { REG_AL, REGCM_GPR8_LO },
16354 [1] = { REG_UNNEEDED, REGCM_IMM8 },
16357 [TEMPLATE_OUTW_DX] = {
16359 [0] = { REG_AX, REGCM_GPR16 },
16360 [1] = { REG_DX, REGCM_GPR16 },
16363 [TEMPLATE_OUTW_IMM] = {
16365 [0] = { REG_AX, REGCM_GPR16 },
16366 [1] = { REG_UNNEEDED, REGCM_IMM8 },
16369 [TEMPLATE_OUTL_DX] = {
16371 [0] = { REG_EAX, REGCM_GPR32 },
16372 [1] = { REG_DX, REGCM_GPR16 },
16375 [TEMPLATE_OUTL_IMM] = {
16377 [0] = { REG_EAX, REGCM_GPR32 },
16378 [1] = { REG_UNNEEDED, REGCM_IMM8 },
16382 .lhs = { [0] = { REG_UNSET, REGCM_GPR32 } },
16383 .rhs = { [0] = { REG_UNSET, REGCM_GPR32 } },
16385 [TEMPLATE_RDMSR] = {
16387 [0] = { REG_EAX, REGCM_GPR32 },
16388 [1] = { REG_EDX, REGCM_GPR32 },
16390 .rhs = { [0] = { REG_ECX, REGCM_GPR32 } },
16392 [TEMPLATE_WRMSR] = {
16394 [0] = { REG_ECX, REGCM_GPR32 },
16395 [1] = { REG_EAX, REGCM_GPR32 },
16396 [2] = { REG_EDX, REGCM_GPR32 },
16399 [TEMPLATE_UMUL8] = {
16400 .lhs = { [0] = { REG_AX, REGCM_GPR16 } },
16402 [0] = { REG_AL, REGCM_GPR8_LO },
16403 [1] = { REG_UNSET, REGCM_GPR8_LO },
16406 [TEMPLATE_UMUL16] = {
16407 .lhs = { [0] = { REG_DXAX, REGCM_DIVIDEND32 } },
16409 [0] = { REG_AX, REGCM_GPR16 },
16410 [1] = { REG_UNSET, REGCM_GPR16 },
16413 [TEMPLATE_UMUL32] = {
16414 .lhs = { [0] = { REG_EDXEAX, REGCM_DIVIDEND64 } },
16416 [0] = { REG_EAX, REGCM_GPR32 },
16417 [1] = { REG_UNSET, REGCM_GPR32 },
16420 [TEMPLATE_DIV8] = {
16422 [0] = { REG_AL, REGCM_GPR8_LO },
16423 [1] = { REG_AH, REGCM_GPR8 },
16426 [0] = { REG_AX, REGCM_GPR16 },
16427 [1] = { REG_UNSET, REGCM_GPR8_LO },
16430 [TEMPLATE_DIV16] = {
16432 [0] = { REG_AX, REGCM_GPR16 },
16433 [1] = { REG_DX, REGCM_GPR16 },
16436 [0] = { REG_DXAX, REGCM_DIVIDEND32 },
16437 [1] = { REG_UNSET, REGCM_GPR16 },
16440 [TEMPLATE_DIV32] = {
16442 [0] = { REG_EAX, REGCM_GPR32 },
16443 [1] = { REG_EDX, REGCM_GPR32 },
16446 [0] = { REG_EDXEAX, REGCM_DIVIDEND64 },
16447 [1] = { REG_UNSET, REGCM_GPR32 },
16452 static void fixup_branches(struct compile_state *state,
16453 struct triple *cmp, struct triple *use, int jmp_op)
16455 struct triple_set *entry, *next;
16456 for(entry = use->use; entry; entry = next) {
16457 next = entry->next;
16458 if (entry->member->op == OP_COPY) {
16459 fixup_branches(state, cmp, entry->member, jmp_op);
16461 else if (entry->member->op == OP_BRANCH) {
16462 struct triple *branch, *test;
16463 struct triple *left, *right;
16465 left = RHS(cmp, 0);
16466 if (TRIPLE_RHS(cmp->sizes) > 1) {
16467 right = RHS(cmp, 1);
16469 branch = entry->member;
16470 test = pre_triple(state, branch,
16471 cmp->op, cmp->type, left, right);
16472 test->template_id = TEMPLATE_TEST32;
16473 if (cmp->op == OP_CMP) {
16474 test->template_id = TEMPLATE_CMP32_REG;
16475 if (get_imm32(test, &RHS(test, 1))) {
16476 test->template_id = TEMPLATE_CMP32_IMM;
16479 use_triple(RHS(test, 0), test);
16480 use_triple(RHS(test, 1), test);
16481 unuse_triple(RHS(branch, 0), branch);
16482 RHS(branch, 0) = test;
16483 branch->op = jmp_op;
16484 branch->template_id = TEMPLATE_JMP;
16485 use_triple(RHS(branch, 0), branch);
16490 static void bool_cmp(struct compile_state *state,
16491 struct triple *ins, int cmp_op, int jmp_op, int set_op)
16493 struct triple_set *entry, *next;
16494 struct triple *set;
16496 /* Put a barrier up before the cmp which preceeds the
16497 * copy instruction. If a set actually occurs this gives
16498 * us a chance to move variables in registers out of the way.
16501 /* Modify the comparison operator */
16503 ins->template_id = TEMPLATE_TEST32;
16504 if (cmp_op == OP_CMP) {
16505 ins->template_id = TEMPLATE_CMP32_REG;
16506 if (get_imm32(ins, &RHS(ins, 1))) {
16507 ins->template_id = TEMPLATE_CMP32_IMM;
16510 /* Generate the instruction sequence that will transform the
16511 * result of the comparison into a logical value.
16513 set = post_triple(state, ins, set_op, &char_type, ins, 0);
16514 use_triple(ins, set);
16515 set->template_id = TEMPLATE_SET;
16517 for(entry = ins->use; entry; entry = next) {
16518 next = entry->next;
16519 if (entry->member == set) {
16522 replace_rhs_use(state, ins, set, entry->member);
16524 fixup_branches(state, ins, set, jmp_op);
16527 static struct triple *after_lhs(struct compile_state *state, struct triple *ins)
16529 struct triple *next;
16531 lhs = TRIPLE_LHS(ins->sizes);
16532 for(next = ins->next, i = 0; i < lhs; i++, next = next->next) {
16533 if (next != LHS(ins, i)) {
16534 internal_error(state, ins, "malformed lhs on %s",
16537 if (next->op != OP_PIECE) {
16538 internal_error(state, ins, "bad lhs op %s at %d on %s",
16539 tops(next->op), i, tops(ins->op));
16541 if (next->u.cval != i) {
16542 internal_error(state, ins, "bad u.cval of %d %d expected",
16549 struct reg_info arch_reg_lhs(struct compile_state *state, struct triple *ins, int index)
16551 struct ins_template *template;
16552 struct reg_info result;
16554 if (ins->op == OP_PIECE) {
16555 index = ins->u.cval;
16556 ins = MISC(ins, 0);
16558 zlhs = TRIPLE_LHS(ins->sizes);
16559 if (triple_is_def(state, ins)) {
16562 if (index >= zlhs) {
16563 internal_error(state, ins, "index %d out of range for %s\n",
16564 index, tops(ins->op));
16568 template = &ins->u.ainfo->tmpl;
16571 if (ins->template_id > LAST_TEMPLATE) {
16572 internal_error(state, ins, "bad template number %d",
16575 template = &templates[ins->template_id];
16578 result = template->lhs[index];
16579 result.regcm = arch_regcm_normalize(state, result.regcm);
16580 if (result.reg != REG_UNNEEDED) {
16581 result.regcm &= ~(REGCM_IMM32 | REGCM_IMM16 | REGCM_IMM8);
16583 if (result.regcm == 0) {
16584 internal_error(state, ins, "lhs %d regcm == 0", index);
16589 struct reg_info arch_reg_rhs(struct compile_state *state, struct triple *ins, int index)
16591 struct reg_info result;
16592 struct ins_template *template;
16593 if ((index > TRIPLE_RHS(ins->sizes)) ||
16594 (ins->op == OP_PIECE)) {
16595 internal_error(state, ins, "index %d out of range for %s\n",
16596 index, tops(ins->op));
16600 template = &ins->u.ainfo->tmpl;
16603 if (ins->template_id > LAST_TEMPLATE) {
16604 internal_error(state, ins, "bad template number %d",
16607 template = &templates[ins->template_id];
16610 result = template->rhs[index];
16611 result.regcm = arch_regcm_normalize(state, result.regcm);
16612 if (result.regcm == 0) {
16613 internal_error(state, ins, "rhs %d regcm == 0", index);
16618 static struct triple *mod_div(struct compile_state *state,
16619 struct triple *ins, int div_op, int index)
16621 struct triple *div, *piece0, *piece1;
16623 /* Generate a piece to hold the remainder */
16624 piece1 = post_triple(state, ins, OP_PIECE, ins->type, 0, 0);
16625 piece1->u.cval = 1;
16627 /* Generate a piece to hold the quotient */
16628 piece0 = post_triple(state, ins, OP_PIECE, ins->type, 0, 0);
16629 piece0->u.cval = 0;
16631 /* Generate the appropriate division instruction */
16632 div = post_triple(state, ins, div_op, ins->type, 0, 0);
16633 RHS(div, 0) = RHS(ins, 0);
16634 RHS(div, 1) = RHS(ins, 1);
16635 LHS(div, 0) = piece0;
16636 LHS(div, 1) = piece1;
16637 div->template_id = TEMPLATE_DIV32;
16638 use_triple(RHS(div, 0), div);
16639 use_triple(RHS(div, 1), div);
16640 use_triple(LHS(div, 0), div);
16641 use_triple(LHS(div, 1), div);
16643 /* Hook on piece0 */
16644 MISC(piece0, 0) = div;
16645 use_triple(div, piece0);
16647 /* Hook on piece1 */
16648 MISC(piece1, 0) = div;
16649 use_triple(div, piece1);
16651 /* Replate uses of ins with the appropriate piece of the div */
16652 propogate_use(state, ins, LHS(div, index));
16653 release_triple(state, ins);
16655 /* Return the address of the next instruction */
16656 return piece1->next;
16659 static struct triple *transform_to_arch_instruction(
16660 struct compile_state *state, struct triple *ins)
16662 /* Transform from generic 3 address instructions
16663 * to archtecture specific instructions.
16664 * And apply architecture specific constraints to instructions.
16665 * Copies are inserted to preserve the register flexibility
16666 * of 3 address instructions.
16668 struct triple *next;
16673 ins->template_id = TEMPLATE_INTCONST32;
16674 if (ins->u.cval < 256) {
16675 ins->template_id = TEMPLATE_INTCONST8;
16679 ins->template_id = TEMPLATE_INTCONST32;
16685 ins->template_id = TEMPLATE_NOP;
16688 size = size_of(state, ins->type);
16689 if (is_imm8(RHS(ins, 0)) && (size <= 1)) {
16690 ins->template_id = TEMPLATE_COPY_IMM8;
16692 else if (is_imm16(RHS(ins, 0)) && (size <= 2)) {
16693 ins->template_id = TEMPLATE_COPY_IMM16;
16695 else if (is_imm32(RHS(ins, 0)) && (size <= 4)) {
16696 ins->template_id = TEMPLATE_COPY_IMM32;
16698 else if (is_const(RHS(ins, 0))) {
16699 internal_error(state, ins, "bad constant passed to copy");
16701 else if (size <= 1) {
16702 ins->template_id = TEMPLATE_COPY8_REG;
16704 else if (size <= 2) {
16705 ins->template_id = TEMPLATE_COPY16_REG;
16707 else if (size <= 4) {
16708 ins->template_id = TEMPLATE_COPY32_REG;
16711 internal_error(state, ins, "bad type passed to copy");
16715 size = size_of(state, ins->type);
16717 ins->template_id = TEMPLATE_PHI8;
16719 else if (size <= 2) {
16720 ins->template_id = TEMPLATE_PHI16;
16722 else if (size <= 4) {
16723 ins->template_id = TEMPLATE_PHI32;
16726 internal_error(state, ins, "bad type passed to phi");
16730 switch(ins->type->type & TYPE_MASK) {
16731 case TYPE_CHAR: case TYPE_UCHAR:
16732 ins->template_id = TEMPLATE_STORE8;
16734 case TYPE_SHORT: case TYPE_USHORT:
16735 ins->template_id = TEMPLATE_STORE16;
16737 case TYPE_INT: case TYPE_UINT:
16738 case TYPE_LONG: case TYPE_ULONG:
16740 ins->template_id = TEMPLATE_STORE32;
16743 internal_error(state, ins, "unknown type in store");
16748 switch(ins->type->type & TYPE_MASK) {
16749 case TYPE_CHAR: case TYPE_UCHAR:
16750 ins->template_id = TEMPLATE_LOAD8;
16754 ins->template_id = TEMPLATE_LOAD16;
16761 ins->template_id = TEMPLATE_LOAD32;
16764 internal_error(state, ins, "unknown type in load");
16774 ins->template_id = TEMPLATE_BINARY32_REG;
16775 if (get_imm32(ins, &RHS(ins, 1))) {
16776 ins->template_id = TEMPLATE_BINARY32_IMM;
16781 ins->template_id = TEMPLATE_DIV32;
16782 next = after_lhs(state, ins);
16784 /* FIXME UMUL does not work yet.. */
16786 ins->template_id = TEMPLATE_UMUL32;
16789 next = mod_div(state, ins, OP_UDIVT, 0);
16792 next = mod_div(state, ins, OP_SDIVT, 0);
16795 next = mod_div(state, ins, OP_UDIVT, 1);
16798 next = mod_div(state, ins, OP_SDIVT, 1);
16803 ins->template_id = TEMPLATE_SL32_CL;
16804 if (get_imm8(ins, &RHS(ins, 1))) {
16805 ins->template_id = TEMPLATE_SL32_IMM;
16806 } else if (size_of(state, RHS(ins, 1)->type) > 1) {
16807 typed_pre_copy(state, &char_type, ins, 1);
16812 ins->template_id = TEMPLATE_UNARY32;
16815 bool_cmp(state, ins, OP_CMP, OP_JMP_EQ, OP_SET_EQ);
16818 bool_cmp(state, ins, OP_CMP, OP_JMP_NOTEQ, OP_SET_NOTEQ);
16821 bool_cmp(state, ins, OP_CMP, OP_JMP_SLESS, OP_SET_SLESS);
16824 bool_cmp(state, ins, OP_CMP, OP_JMP_ULESS, OP_SET_ULESS);
16827 bool_cmp(state, ins, OP_CMP, OP_JMP_SMORE, OP_SET_SMORE);
16830 bool_cmp(state, ins, OP_CMP, OP_JMP_UMORE, OP_SET_UMORE);
16833 bool_cmp(state, ins, OP_CMP, OP_JMP_SLESSEQ, OP_SET_SLESSEQ);
16836 bool_cmp(state, ins, OP_CMP, OP_JMP_ULESSEQ, OP_SET_ULESSEQ);
16839 bool_cmp(state, ins, OP_CMP, OP_JMP_SMOREEQ, OP_SET_SMOREEQ);
16842 bool_cmp(state, ins, OP_CMP, OP_JMP_UMOREEQ, OP_SET_UMOREEQ);
16845 bool_cmp(state, ins, OP_TEST, OP_JMP_NOTEQ, OP_SET_NOTEQ);
16848 bool_cmp(state, ins, OP_TEST, OP_JMP_EQ, OP_SET_EQ);
16851 if (TRIPLE_RHS(ins->sizes) > 0) {
16852 internal_error(state, ins, "bad branch test");
16855 ins->template_id = TEMPLATE_NOP;
16861 case OP_INB: ins->template_id = TEMPLATE_INB_DX; break;
16862 case OP_INW: ins->template_id = TEMPLATE_INW_DX; break;
16863 case OP_INL: ins->template_id = TEMPLATE_INL_DX; break;
16865 if (get_imm8(ins, &RHS(ins, 0))) {
16866 ins->template_id += 1;
16873 case OP_OUTB: ins->template_id = TEMPLATE_OUTB_DX; break;
16874 case OP_OUTW: ins->template_id = TEMPLATE_OUTW_DX; break;
16875 case OP_OUTL: ins->template_id = TEMPLATE_OUTL_DX; break;
16877 if (get_imm8(ins, &RHS(ins, 1))) {
16878 ins->template_id += 1;
16883 ins->template_id = TEMPLATE_BSF;
16886 ins->template_id = TEMPLATE_RDMSR;
16887 next = after_lhs(state, ins);
16890 ins->template_id = TEMPLATE_WRMSR;
16893 ins->template_id = TEMPLATE_NOP;
16896 ins->template_id = TEMPLATE_NOP;
16897 next = after_lhs(state, ins);
16899 /* Already transformed instructions */
16901 ins->template_id = TEMPLATE_TEST32;
16904 ins->template_id = TEMPLATE_CMP32_REG;
16905 if (get_imm32(ins, &RHS(ins, 1))) {
16906 ins->template_id = TEMPLATE_CMP32_IMM;
16909 case OP_JMP_EQ: case OP_JMP_NOTEQ:
16910 case OP_JMP_SLESS: case OP_JMP_ULESS:
16911 case OP_JMP_SMORE: case OP_JMP_UMORE:
16912 case OP_JMP_SLESSEQ: case OP_JMP_ULESSEQ:
16913 case OP_JMP_SMOREEQ: case OP_JMP_UMOREEQ:
16914 ins->template_id = TEMPLATE_JMP;
16916 case OP_SET_EQ: case OP_SET_NOTEQ:
16917 case OP_SET_SLESS: case OP_SET_ULESS:
16918 case OP_SET_SMORE: case OP_SET_UMORE:
16919 case OP_SET_SLESSEQ: case OP_SET_ULESSEQ:
16920 case OP_SET_SMOREEQ: case OP_SET_UMOREEQ:
16921 ins->template_id = TEMPLATE_SET;
16923 /* Unhandled instructions */
16926 internal_error(state, ins, "unhandled ins: %d %s\n",
16927 ins->op, tops(ins->op));
16933 static long next_label(struct compile_state *state)
16935 static long label_counter = 0;
16936 return ++label_counter;
16938 static void generate_local_labels(struct compile_state *state)
16940 struct triple *first, *label;
16941 first = RHS(state->main_function, 0);
16944 if ((label->op == OP_LABEL) ||
16945 (label->op == OP_SDECL)) {
16947 label->u.cval = next_label(state);
16953 label = label->next;
16954 } while(label != first);
16957 static int check_reg(struct compile_state *state,
16958 struct triple *triple, int classes)
16962 reg = ID_REG(triple->id);
16963 if (reg == REG_UNSET) {
16964 internal_error(state, triple, "register not set");
16966 mask = arch_reg_regcm(state, reg);
16967 if (!(classes & mask)) {
16968 internal_error(state, triple, "reg %d in wrong class",
16974 static const char *arch_reg_str(int reg)
16977 #error "Registers have renumberd fix arch_reg_str"
16979 static const char *regs[] = {
16983 "%al", "%bl", "%cl", "%dl", "%ah", "%bh", "%ch", "%dh",
16984 "%ax", "%bx", "%cx", "%dx", "%si", "%di", "%bp", "%sp",
16985 "%eax", "%ebx", "%ecx", "%edx", "%esi", "%edi", "%ebp", "%esp",
16988 "%mm0", "%mm1", "%mm2", "%mm3", "%mm4", "%mm5", "%mm6", "%mm7",
16989 "%xmm0", "%xmm1", "%xmm2", "%xmm3",
16990 "%xmm4", "%xmm5", "%xmm6", "%xmm7",
16992 if (!((reg >= REG_EFLAGS) && (reg <= REG_XMM7))) {
16999 static const char *reg(struct compile_state *state, struct triple *triple,
17003 reg = check_reg(state, triple, classes);
17004 return arch_reg_str(reg);
17007 const char *type_suffix(struct compile_state *state, struct type *type)
17009 const char *suffix;
17010 switch(size_of(state, type)) {
17011 case 1: suffix = "b"; break;
17012 case 2: suffix = "w"; break;
17013 case 4: suffix = "l"; break;
17015 internal_error(state, 0, "unknown suffix");
17022 static void print_const_val(
17023 struct compile_state *state, struct triple *ins, FILE *fp)
17027 fprintf(fp, " $%ld ",
17028 (long_t)(ins->u.cval));
17031 if (MISC(ins, 0)->op != OP_SDECL) {
17032 internal_error(state, ins, "bad base for addrconst");
17034 if (MISC(ins, 0)->u.cval <= 0) {
17035 internal_error(state, ins, "unlabeled constant");
17037 fprintf(fp, " $L%s%lu+%lu ",
17038 state->label_prefix,
17039 MISC(ins, 0)->u.cval,
17043 internal_error(state, ins, "unknown constant type");
17048 static void print_const(struct compile_state *state,
17049 struct triple *ins, FILE *fp)
17053 switch(ins->type->type & TYPE_MASK) {
17056 fprintf(fp, ".byte 0x%02lx\n", ins->u.cval);
17060 fprintf(fp, ".short 0x%04lx\n", ins->u.cval);
17066 fprintf(fp, ".int %lu\n", ins->u.cval);
17069 internal_error(state, ins, "Unknown constant type");
17073 if (MISC(ins, 0)->op != OP_SDECL) {
17074 internal_error(state, ins, "bad base for addrconst");
17076 if (MISC(ins, 0)->u.cval <= 0) {
17077 internal_error(state, ins, "unlabeled constant");
17079 fprintf(fp, ".int L%s%lu+%lu\n",
17080 state->label_prefix,
17081 MISC(ins, 0)->u.cval,
17086 unsigned char *blob;
17088 size = size_of(state, ins->type);
17089 blob = ins->u.blob;
17090 for(i = 0; i < size; i++) {
17091 fprintf(fp, ".byte 0x%02x\n",
17097 internal_error(state, ins, "Unknown constant type");
17102 #define TEXT_SECTION ".rom.text"
17103 #define DATA_SECTION ".rom.data"
17105 static long get_const_pool_ref(
17106 struct compile_state *state, struct triple *ins, FILE *fp)
17109 ref = next_label(state);
17110 fprintf(fp, ".section \"" DATA_SECTION "\"\n");
17111 fprintf(fp, ".balign %d\n", align_of(state, ins->type));
17112 fprintf(fp, "L%s%lu:\n", state->label_prefix, ref);
17113 print_const(state, ins, fp);
17114 fprintf(fp, ".section \"" TEXT_SECTION "\"\n");
17118 static void print_binary_op(struct compile_state *state,
17119 const char *op, struct triple *ins, FILE *fp)
17122 mask = REGCM_GPR32 | REGCM_GPR16 | REGCM_GPR8_LO;
17123 if (RHS(ins, 0)->id != ins->id) {
17124 internal_error(state, ins, "invalid register assignment");
17126 if (is_const(RHS(ins, 1))) {
17127 fprintf(fp, "\t%s ", op);
17128 print_const_val(state, RHS(ins, 1), fp);
17129 fprintf(fp, ", %s\n",
17130 reg(state, RHS(ins, 0), mask));
17133 unsigned lmask, rmask;
17135 lreg = check_reg(state, RHS(ins, 0), mask);
17136 rreg = check_reg(state, RHS(ins, 1), mask);
17137 lmask = arch_reg_regcm(state, lreg);
17138 rmask = arch_reg_regcm(state, rreg);
17139 mask = lmask & rmask;
17140 fprintf(fp, "\t%s %s, %s\n",
17142 reg(state, RHS(ins, 1), mask),
17143 reg(state, RHS(ins, 0), mask));
17146 static void print_unary_op(struct compile_state *state,
17147 const char *op, struct triple *ins, FILE *fp)
17150 mask = REGCM_GPR32 | REGCM_GPR16 | REGCM_GPR8_LO;
17151 fprintf(fp, "\t%s %s\n",
17153 reg(state, RHS(ins, 0), mask));
17156 static void print_op_shift(struct compile_state *state,
17157 const char *op, struct triple *ins, FILE *fp)
17160 mask = REGCM_GPR32 | REGCM_GPR16 | REGCM_GPR8_LO;
17161 if (RHS(ins, 0)->id != ins->id) {
17162 internal_error(state, ins, "invalid register assignment");
17164 if (is_const(RHS(ins, 1))) {
17165 fprintf(fp, "\t%s ", op);
17166 print_const_val(state, RHS(ins, 1), fp);
17167 fprintf(fp, ", %s\n",
17168 reg(state, RHS(ins, 0), mask));
17171 fprintf(fp, "\t%s %s, %s\n",
17173 reg(state, RHS(ins, 1), REGCM_GPR8_LO),
17174 reg(state, RHS(ins, 0), mask));
17178 static void print_op_in(struct compile_state *state, struct triple *ins, FILE *fp)
17185 case OP_INB: op = "inb", mask = REGCM_GPR8_LO; break;
17186 case OP_INW: op = "inw", mask = REGCM_GPR16; break;
17187 case OP_INL: op = "inl", mask = REGCM_GPR32; break;
17189 internal_error(state, ins, "not an in operation");
17193 dreg = check_reg(state, ins, mask);
17194 if (!reg_is_reg(state, dreg, REG_EAX)) {
17195 internal_error(state, ins, "dst != %%eax");
17197 if (is_const(RHS(ins, 0))) {
17198 fprintf(fp, "\t%s ", op);
17199 print_const_val(state, RHS(ins, 0), fp);
17200 fprintf(fp, ", %s\n",
17201 reg(state, ins, mask));
17205 addr_reg = check_reg(state, RHS(ins, 0), REGCM_GPR16);
17206 if (!reg_is_reg(state, addr_reg, REG_DX)) {
17207 internal_error(state, ins, "src != %%dx");
17209 fprintf(fp, "\t%s %s, %s\n",
17211 reg(state, RHS(ins, 0), REGCM_GPR16),
17212 reg(state, ins, mask));
17216 static void print_op_out(struct compile_state *state, struct triple *ins, FILE *fp)
17223 case OP_OUTB: op = "outb", mask = REGCM_GPR8_LO; break;
17224 case OP_OUTW: op = "outw", mask = REGCM_GPR16; break;
17225 case OP_OUTL: op = "outl", mask = REGCM_GPR32; break;
17227 internal_error(state, ins, "not an out operation");
17231 lreg = check_reg(state, RHS(ins, 0), mask);
17232 if (!reg_is_reg(state, lreg, REG_EAX)) {
17233 internal_error(state, ins, "src != %%eax");
17235 if (is_const(RHS(ins, 1))) {
17236 fprintf(fp, "\t%s %s,",
17237 op, reg(state, RHS(ins, 0), mask));
17238 print_const_val(state, RHS(ins, 1), fp);
17243 addr_reg = check_reg(state, RHS(ins, 1), REGCM_GPR16);
17244 if (!reg_is_reg(state, addr_reg, REG_DX)) {
17245 internal_error(state, ins, "dst != %%dx");
17247 fprintf(fp, "\t%s %s, %s\n",
17249 reg(state, RHS(ins, 0), mask),
17250 reg(state, RHS(ins, 1), REGCM_GPR16));
17254 static void print_op_move(struct compile_state *state,
17255 struct triple *ins, FILE *fp)
17257 /* op_move is complex because there are many types
17258 * of registers we can move between.
17259 * Because OP_COPY will be introduced in arbitrary locations
17260 * OP_COPY must not affect flags.
17262 int omit_copy = 1; /* Is it o.k. to omit a noop copy? */
17263 struct triple *dst, *src;
17264 if (ins->op == OP_COPY) {
17269 internal_error(state, ins, "unknown move operation");
17272 if (!is_const(src)) {
17273 int src_reg, dst_reg;
17274 int src_regcm, dst_regcm;
17275 src_reg = ID_REG(src->id);
17276 dst_reg = ID_REG(dst->id);
17277 src_regcm = arch_reg_regcm(state, src_reg);
17278 dst_regcm = arch_reg_regcm(state, dst_reg);
17279 /* If the class is the same just move the register */
17280 if (src_regcm & dst_regcm &
17281 (REGCM_GPR8_LO | REGCM_GPR16 | REGCM_GPR32)) {
17282 if ((src_reg != dst_reg) || !omit_copy) {
17283 fprintf(fp, "\tmov %s, %s\n",
17284 reg(state, src, src_regcm),
17285 reg(state, dst, dst_regcm));
17288 /* Move 32bit to 16bit */
17289 else if ((src_regcm & REGCM_GPR32) &&
17290 (dst_regcm & REGCM_GPR16)) {
17291 src_reg = (src_reg - REGC_GPR32_FIRST) + REGC_GPR16_FIRST;
17292 if ((src_reg != dst_reg) || !omit_copy) {
17293 fprintf(fp, "\tmovw %s, %s\n",
17294 arch_reg_str(src_reg),
17295 arch_reg_str(dst_reg));
17298 /* Move from 32bit gprs to 16bit gprs */
17299 else if ((src_regcm & REGCM_GPR32) &&
17300 (dst_regcm & REGCM_GPR16)) {
17301 dst_reg = (dst_reg - REGC_GPR16_FIRST) + REGC_GPR32_FIRST;
17302 if ((src_reg != dst_reg) || !omit_copy) {
17303 fprintf(fp, "\tmov %s, %s\n",
17304 arch_reg_str(src_reg),
17305 arch_reg_str(dst_reg));
17308 /* Move 32bit to 8bit */
17309 else if ((src_regcm & REGCM_GPR32_8) &&
17310 (dst_regcm & REGCM_GPR8_LO))
17312 src_reg = (src_reg - REGC_GPR32_8_FIRST) + REGC_GPR8_FIRST;
17313 if ((src_reg != dst_reg) || !omit_copy) {
17314 fprintf(fp, "\tmovb %s, %s\n",
17315 arch_reg_str(src_reg),
17316 arch_reg_str(dst_reg));
17319 /* Move 16bit to 8bit */
17320 else if ((src_regcm & REGCM_GPR16_8) &&
17321 (dst_regcm & REGCM_GPR8_LO))
17323 src_reg = (src_reg - REGC_GPR16_8_FIRST) + REGC_GPR8_FIRST;
17324 if ((src_reg != dst_reg) || !omit_copy) {
17325 fprintf(fp, "\tmovb %s, %s\n",
17326 arch_reg_str(src_reg),
17327 arch_reg_str(dst_reg));
17330 /* Move 8/16bit to 16/32bit */
17331 else if ((src_regcm & (REGCM_GPR8_LO | REGCM_GPR16)) &&
17332 (dst_regcm & (REGCM_GPR16 | REGCM_GPR32))) {
17334 op = is_signed(src->type)? "movsx": "movzx";
17335 fprintf(fp, "\t%s %s, %s\n",
17337 reg(state, src, src_regcm),
17338 reg(state, dst, dst_regcm));
17340 /* Move between sse registers */
17341 else if ((src_regcm & dst_regcm & REGCM_XMM)) {
17342 if ((src_reg != dst_reg) || !omit_copy) {
17343 fprintf(fp, "\tmovdqa %s, %s\n",
17344 reg(state, src, src_regcm),
17345 reg(state, dst, dst_regcm));
17348 /* Move between mmx registers */
17349 else if ((src_regcm & dst_regcm & REGCM_MMX)) {
17350 if ((src_reg != dst_reg) || !omit_copy) {
17351 fprintf(fp, "\tmovq %s, %s\n",
17352 reg(state, src, src_regcm),
17353 reg(state, dst, dst_regcm));
17356 /* Move from sse to mmx registers */
17357 else if ((src_regcm & REGCM_XMM) && (dst_regcm & REGCM_MMX)) {
17358 fprintf(fp, "\tmovdq2q %s, %s\n",
17359 reg(state, src, src_regcm),
17360 reg(state, dst, dst_regcm));
17362 /* Move from mmx to sse registers */
17363 else if ((src_regcm & REGCM_MMX) && (dst_regcm & REGCM_XMM)) {
17364 fprintf(fp, "\tmovq2dq %s, %s\n",
17365 reg(state, src, src_regcm),
17366 reg(state, dst, dst_regcm));
17368 /* Move between 32bit gprs & mmx/sse registers */
17369 else if ((src_regcm & (REGCM_GPR32 | REGCM_MMX | REGCM_XMM)) &&
17370 (dst_regcm & (REGCM_GPR32 | REGCM_MMX | REGCM_XMM))) {
17371 fprintf(fp, "\tmovd %s, %s\n",
17372 reg(state, src, src_regcm),
17373 reg(state, dst, dst_regcm));
17375 /* Move from 16bit gprs & mmx/sse registers */
17376 else if ((src_regcm & REGCM_GPR16) &&
17377 (dst_regcm & (REGCM_MMX | REGCM_XMM))) {
17380 op = is_signed(src->type)? "movsx":"movzx";
17381 mid_reg = (src_reg - REGC_GPR16_FIRST) + REGC_GPR32_FIRST;
17382 fprintf(fp, "\t%s %s, %s\n\tmovd %s, %s\n",
17384 arch_reg_str(src_reg),
17385 arch_reg_str(mid_reg),
17386 arch_reg_str(mid_reg),
17387 arch_reg_str(dst_reg));
17389 /* Move from mmx/sse registers to 16bit gprs */
17390 else if ((src_regcm & (REGCM_MMX | REGCM_XMM)) &&
17391 (dst_regcm & REGCM_GPR16)) {
17392 dst_reg = (dst_reg - REGC_GPR16_FIRST) + REGC_GPR32_FIRST;
17393 fprintf(fp, "\tmovd %s, %s\n",
17394 arch_reg_str(src_reg),
17395 arch_reg_str(dst_reg));
17397 /* Move from gpr to 64bit dividend */
17398 else if ((src_regcm & (REGCM_GPR32 | REGCM_GPR16 | REGCM_GPR8_LO)) &&
17399 (dst_regcm & REGCM_DIVIDEND64)) {
17400 const char *extend;
17401 extend = is_signed(src->type)? "cltd":"movl $0, %edx";
17402 fprintf(fp, "\tmov %s, %%eax\n\t%s\n",
17403 arch_reg_str(src_reg),
17406 /* Move from 64bit gpr to gpr */
17407 else if ((src_regcm & REGCM_DIVIDEND64) &&
17408 (dst_regcm & (REGCM_GPR32 | REGCM_GPR16 | REGCM_GPR8_LO))) {
17409 if (dst_regcm & REGCM_GPR32) {
17412 else if (dst_regcm & REGCM_GPR16) {
17415 else if (dst_regcm & REGCM_GPR8_LO) {
17418 fprintf(fp, "\tmov %s, %s\n",
17419 arch_reg_str(src_reg),
17420 arch_reg_str(dst_reg));
17422 /* Move from mmx/sse registers to 64bit gpr */
17423 else if ((src_regcm & (REGCM_MMX | REGCM_XMM)) &&
17424 (dst_regcm & REGCM_DIVIDEND64)) {
17425 const char *extend;
17426 extend = is_signed(src->type)? "cltd": "movl $0, %edx";
17427 fprintf(fp, "\tmovd %s, %%eax\n\t%s\n",
17428 arch_reg_str(src_reg),
17431 /* Move from 64bit gpr to mmx/sse register */
17432 else if ((src_regcm & REGCM_DIVIDEND64) &&
17433 (dst_regcm & (REGCM_XMM | REGCM_MMX))) {
17434 fprintf(fp, "\tmovd %%eax, %s\n",
17435 arch_reg_str(dst_reg));
17437 #if X86_4_8BIT_GPRS
17438 /* Move from 8bit gprs to mmx/sse registers */
17439 else if ((src_regcm & REGCM_GPR8_LO) && (src_reg <= REG_DL) &&
17440 (dst_regcm & (REGCM_MMX | REGCM_XMM))) {
17443 op = is_signed(src->type)? "movsx":"movzx";
17444 mid_reg = (src_reg - REGC_GPR8_FIRST) + REGC_GPR32_FIRST;
17445 fprintf(fp, "\t%s %s, %s\n\tmovd %s, %s\n",
17447 reg(state, src, src_regcm),
17448 arch_reg_str(mid_reg),
17449 arch_reg_str(mid_reg),
17450 reg(state, dst, dst_regcm));
17452 /* Move from mmx/sse registers and 8bit gprs */
17453 else if ((src_regcm & (REGCM_MMX | REGCM_XMM)) &&
17454 (dst_regcm & REGCM_GPR8_LO) && (dst_reg <= REG_DL)) {
17456 mid_reg = (dst_reg - REGC_GPR8_FIRST) + REGC_GPR32_FIRST;
17457 fprintf(fp, "\tmovd %s, %s\n",
17458 reg(state, src, src_regcm),
17459 arch_reg_str(mid_reg));
17461 /* Move from 32bit gprs to 8bit gprs */
17462 else if ((src_regcm & REGCM_GPR32) &&
17463 (dst_regcm & REGCM_GPR8_LO)) {
17464 dst_reg = (dst_reg - REGC_GPR8_FIRST) + REGC_GPR32_FIRST;
17465 if ((src_reg != dst_reg) || !omit_copy) {
17466 fprintf(fp, "\tmov %s, %s\n",
17467 arch_reg_str(src_reg),
17468 arch_reg_str(dst_reg));
17471 /* Move from 16bit gprs to 8bit gprs */
17472 else if ((src_regcm & REGCM_GPR16) &&
17473 (dst_regcm & REGCM_GPR8_LO)) {
17474 dst_reg = (dst_reg - REGC_GPR8_FIRST) + REGC_GPR16_FIRST;
17475 if ((src_reg != dst_reg) || !omit_copy) {
17476 fprintf(fp, "\tmov %s, %s\n",
17477 arch_reg_str(src_reg),
17478 arch_reg_str(dst_reg));
17481 #endif /* X86_4_8BIT_GPRS */
17483 internal_error(state, ins, "unknown copy type");
17489 dst_reg = ID_REG(dst->id);
17490 dst_regcm = arch_reg_regcm(state, dst_reg);
17491 if (dst_regcm & (REGCM_GPR32 | REGCM_GPR16 | REGCM_GPR8_LO)) {
17492 fprintf(fp, "\tmov ");
17493 print_const_val(state, src, fp);
17494 fprintf(fp, ", %s\n",
17495 reg(state, dst, REGCM_GPR32 | REGCM_GPR16 | REGCM_GPR8_LO));
17497 else if (dst_regcm & REGCM_DIVIDEND64) {
17498 if (size_of(state, dst->type) > 4) {
17499 internal_error(state, ins, "64bit constant...");
17501 fprintf(fp, "\tmov $0, %%edx\n");
17502 fprintf(fp, "\tmov ");
17503 print_const_val(state, src, fp);
17504 fprintf(fp, ", %%eax\n");
17506 else if (dst_regcm & REGCM_DIVIDEND32) {
17507 if (size_of(state, dst->type) > 2) {
17508 internal_error(state, ins, "32bit constant...");
17510 fprintf(fp, "\tmov $0, %%dx\n");
17511 fprintf(fp, "\tmov ");
17512 print_const_val(state, src, fp);
17513 fprintf(fp, ", %%ax");
17515 else if (dst_regcm & (REGCM_XMM | REGCM_MMX)) {
17517 ref = get_const_pool_ref(state, src, fp);
17518 fprintf(fp, "\tmovq L%s%lu, %s\n",
17519 state->label_prefix, ref,
17520 reg(state, dst, (REGCM_XMM | REGCM_MMX)));
17523 internal_error(state, ins, "unknown copy immediate type");
17528 static void print_op_load(struct compile_state *state,
17529 struct triple *ins, FILE *fp)
17531 struct triple *dst, *src;
17534 if (is_const(src) || is_const(dst)) {
17535 internal_error(state, ins, "unknown load operation");
17537 fprintf(fp, "\tmov (%s), %s\n",
17538 reg(state, src, REGCM_GPR32),
17539 reg(state, dst, REGCM_GPR8_LO | REGCM_GPR16 | REGCM_GPR32));
17543 static void print_op_store(struct compile_state *state,
17544 struct triple *ins, FILE *fp)
17546 struct triple *dst, *src;
17549 if (is_const(src) && (src->op == OP_INTCONST)) {
17551 value = (long_t)(src->u.cval);
17552 fprintf(fp, "\tmov%s $%ld, (%s)\n",
17553 type_suffix(state, src->type),
17555 reg(state, dst, REGCM_GPR32));
17557 else if (is_const(dst) && (dst->op == OP_INTCONST)) {
17558 fprintf(fp, "\tmov%s %s, 0x%08lx\n",
17559 type_suffix(state, src->type),
17560 reg(state, src, REGCM_GPR8_LO | REGCM_GPR16 | REGCM_GPR32),
17564 if (is_const(src) || is_const(dst)) {
17565 internal_error(state, ins, "unknown store operation");
17567 fprintf(fp, "\tmov%s %s, (%s)\n",
17568 type_suffix(state, src->type),
17569 reg(state, src, REGCM_GPR8_LO | REGCM_GPR16 | REGCM_GPR32),
17570 reg(state, dst, REGCM_GPR32));
17576 static void print_op_smul(struct compile_state *state,
17577 struct triple *ins, FILE *fp)
17579 if (!is_const(RHS(ins, 1))) {
17580 fprintf(fp, "\timul %s, %s\n",
17581 reg(state, RHS(ins, 1), REGCM_GPR32),
17582 reg(state, RHS(ins, 0), REGCM_GPR32));
17585 fprintf(fp, "\timul ");
17586 print_const_val(state, RHS(ins, 1), fp);
17587 fprintf(fp, ", %s\n", reg(state, RHS(ins, 0), REGCM_GPR32));
17591 static void print_op_cmp(struct compile_state *state,
17592 struct triple *ins, FILE *fp)
17596 mask = REGCM_GPR32 | REGCM_GPR16 | REGCM_GPR8_LO;
17597 dreg = check_reg(state, ins, REGCM_FLAGS);
17598 if (!reg_is_reg(state, dreg, REG_EFLAGS)) {
17599 internal_error(state, ins, "bad dest register for cmp");
17601 if (is_const(RHS(ins, 1))) {
17602 fprintf(fp, "\tcmp ");
17603 print_const_val(state, RHS(ins, 1), fp);
17604 fprintf(fp, ", %s\n", reg(state, RHS(ins, 0), mask));
17607 unsigned lmask, rmask;
17609 lreg = check_reg(state, RHS(ins, 0), mask);
17610 rreg = check_reg(state, RHS(ins, 1), mask);
17611 lmask = arch_reg_regcm(state, lreg);
17612 rmask = arch_reg_regcm(state, rreg);
17613 mask = lmask & rmask;
17614 fprintf(fp, "\tcmp %s, %s\n",
17615 reg(state, RHS(ins, 1), mask),
17616 reg(state, RHS(ins, 0), mask));
17620 static void print_op_test(struct compile_state *state,
17621 struct triple *ins, FILE *fp)
17624 mask = REGCM_GPR32 | REGCM_GPR16 | REGCM_GPR8_LO;
17625 fprintf(fp, "\ttest %s, %s\n",
17626 reg(state, RHS(ins, 0), mask),
17627 reg(state, RHS(ins, 0), mask));
17630 static void print_op_branch(struct compile_state *state,
17631 struct triple *branch, FILE *fp)
17633 const char *bop = "j";
17634 if (branch->op == OP_JMP) {
17635 if (TRIPLE_RHS(branch->sizes) != 0) {
17636 internal_error(state, branch, "jmp with condition?");
17641 struct triple *ptr;
17642 if (TRIPLE_RHS(branch->sizes) != 1) {
17643 internal_error(state, branch, "jmpcc without condition?");
17645 check_reg(state, RHS(branch, 0), REGCM_FLAGS);
17646 if ((RHS(branch, 0)->op != OP_CMP) &&
17647 (RHS(branch, 0)->op != OP_TEST)) {
17648 internal_error(state, branch, "bad branch test");
17650 #warning "FIXME I have observed instructions between the test and branch instructions"
17651 ptr = RHS(branch, 0);
17652 for(ptr = RHS(branch, 0)->next; ptr != branch; ptr = ptr->next) {
17653 if (ptr->op != OP_COPY) {
17654 internal_error(state, branch, "branch does not follow test");
17657 switch(branch->op) {
17658 case OP_JMP_EQ: bop = "jz"; break;
17659 case OP_JMP_NOTEQ: bop = "jnz"; break;
17660 case OP_JMP_SLESS: bop = "jl"; break;
17661 case OP_JMP_ULESS: bop = "jb"; break;
17662 case OP_JMP_SMORE: bop = "jg"; break;
17663 case OP_JMP_UMORE: bop = "ja"; break;
17664 case OP_JMP_SLESSEQ: bop = "jle"; break;
17665 case OP_JMP_ULESSEQ: bop = "jbe"; break;
17666 case OP_JMP_SMOREEQ: bop = "jge"; break;
17667 case OP_JMP_UMOREEQ: bop = "jae"; break;
17669 internal_error(state, branch, "Invalid branch op");
17674 fprintf(fp, "\t%s L%s%lu\n",
17676 state->label_prefix,
17677 TARG(branch, 0)->u.cval);
17680 static void print_op_set(struct compile_state *state,
17681 struct triple *set, FILE *fp)
17683 const char *sop = "set";
17684 if (TRIPLE_RHS(set->sizes) != 1) {
17685 internal_error(state, set, "setcc without condition?");
17687 check_reg(state, RHS(set, 0), REGCM_FLAGS);
17688 if ((RHS(set, 0)->op != OP_CMP) &&
17689 (RHS(set, 0)->op != OP_TEST)) {
17690 internal_error(state, set, "bad set test");
17692 if (RHS(set, 0)->next != set) {
17693 internal_error(state, set, "set does not follow test");
17696 case OP_SET_EQ: sop = "setz"; break;
17697 case OP_SET_NOTEQ: sop = "setnz"; break;
17698 case OP_SET_SLESS: sop = "setl"; break;
17699 case OP_SET_ULESS: sop = "setb"; break;
17700 case OP_SET_SMORE: sop = "setg"; break;
17701 case OP_SET_UMORE: sop = "seta"; break;
17702 case OP_SET_SLESSEQ: sop = "setle"; break;
17703 case OP_SET_ULESSEQ: sop = "setbe"; break;
17704 case OP_SET_SMOREEQ: sop = "setge"; break;
17705 case OP_SET_UMOREEQ: sop = "setae"; break;
17707 internal_error(state, set, "Invalid set op");
17710 fprintf(fp, "\t%s %s\n",
17711 sop, reg(state, set, REGCM_GPR8_LO));
17714 static void print_op_bit_scan(struct compile_state *state,
17715 struct triple *ins, FILE *fp)
17719 case OP_BSF: op = "bsf"; break;
17720 case OP_BSR: op = "bsr"; break;
17722 internal_error(state, ins, "unknown bit scan");
17732 reg(state, RHS(ins, 0), REGCM_GPR32),
17733 reg(state, ins, REGCM_GPR32),
17734 reg(state, ins, REGCM_GPR32));
17738 static void print_sdecl(struct compile_state *state,
17739 struct triple *ins, FILE *fp)
17741 fprintf(fp, ".section \"" DATA_SECTION "\"\n");
17742 fprintf(fp, ".balign %d\n", align_of(state, ins->type));
17743 fprintf(fp, "L%s%lu:\n", state->label_prefix, ins->u.cval);
17744 print_const(state, MISC(ins, 0), fp);
17745 fprintf(fp, ".section \"" TEXT_SECTION "\"\n");
17749 static void print_instruction(struct compile_state *state,
17750 struct triple *ins, FILE *fp)
17752 /* Assumption: after I have exted the register allocator
17753 * everything is in a valid register.
17757 print_op_asm(state, ins, fp);
17759 case OP_ADD: print_binary_op(state, "add", ins, fp); break;
17760 case OP_SUB: print_binary_op(state, "sub", ins, fp); break;
17761 case OP_AND: print_binary_op(state, "and", ins, fp); break;
17762 case OP_XOR: print_binary_op(state, "xor", ins, fp); break;
17763 case OP_OR: print_binary_op(state, "or", ins, fp); break;
17764 case OP_SL: print_op_shift(state, "shl", ins, fp); break;
17765 case OP_USR: print_op_shift(state, "shr", ins, fp); break;
17766 case OP_SSR: print_op_shift(state, "sar", ins, fp); break;
17767 case OP_POS: break;
17768 case OP_NEG: print_unary_op(state, "neg", ins, fp); break;
17769 case OP_INVERT: print_unary_op(state, "not", ins, fp); break;
17773 /* Don't generate anything here for constants */
17775 /* Don't generate anything for variable declarations. */
17778 print_sdecl(state, ins, fp);
17781 print_op_move(state, ins, fp);
17784 print_op_load(state, ins, fp);
17787 print_op_store(state, ins, fp);
17790 print_op_smul(state, ins, fp);
17792 case OP_CMP: print_op_cmp(state, ins, fp); break;
17793 case OP_TEST: print_op_test(state, ins, fp); break;
17795 case OP_JMP_EQ: case OP_JMP_NOTEQ:
17796 case OP_JMP_SLESS: case OP_JMP_ULESS:
17797 case OP_JMP_SMORE: case OP_JMP_UMORE:
17798 case OP_JMP_SLESSEQ: case OP_JMP_ULESSEQ:
17799 case OP_JMP_SMOREEQ: case OP_JMP_UMOREEQ:
17800 print_op_branch(state, ins, fp);
17802 case OP_SET_EQ: case OP_SET_NOTEQ:
17803 case OP_SET_SLESS: case OP_SET_ULESS:
17804 case OP_SET_SMORE: case OP_SET_UMORE:
17805 case OP_SET_SLESSEQ: case OP_SET_ULESSEQ:
17806 case OP_SET_SMOREEQ: case OP_SET_UMOREEQ:
17807 print_op_set(state, ins, fp);
17809 case OP_INB: case OP_INW: case OP_INL:
17810 print_op_in(state, ins, fp);
17812 case OP_OUTB: case OP_OUTW: case OP_OUTL:
17813 print_op_out(state, ins, fp);
17817 print_op_bit_scan(state, ins, fp);
17820 after_lhs(state, ins);
17821 fprintf(fp, "\trdmsr\n");
17824 fprintf(fp, "\twrmsr\n");
17827 fprintf(fp, "\thlt\n");
17830 fprintf(fp, "\tidiv %s\n", reg(state, RHS(ins, 1), REGCM_GPR32));
17833 fprintf(fp, "\tdiv %s\n", reg(state, RHS(ins, 1), REGCM_GPR32));
17836 fprintf(fp, "\tmul %s\n", reg(state, RHS(ins, 1), REGCM_GPR32));
17842 fprintf(fp, "L%s%lu:\n", state->label_prefix, ins->u.cval);
17844 /* Ignore OP_PIECE */
17847 /* Operations that should never get here */
17848 case OP_SDIV: case OP_UDIV:
17849 case OP_SMOD: case OP_UMOD:
17850 case OP_LTRUE: case OP_LFALSE: case OP_EQ: case OP_NOTEQ:
17851 case OP_SLESS: case OP_ULESS: case OP_SMORE: case OP_UMORE:
17852 case OP_SLESSEQ: case OP_ULESSEQ: case OP_SMOREEQ: case OP_UMOREEQ:
17854 internal_error(state, ins, "unknown op: %d %s",
17855 ins->op, tops(ins->op));
17860 static void print_instructions(struct compile_state *state)
17862 struct triple *first, *ins;
17863 int print_location;
17864 struct occurance *last_occurance;
17866 int max_inline_depth;
17867 max_inline_depth = 0;
17868 print_location = 1;
17869 last_occurance = 0;
17870 fp = state->output;
17871 fprintf(fp, ".section \"" TEXT_SECTION "\"\n");
17872 first = RHS(state->main_function, 0);
17875 if (print_location &&
17876 last_occurance != ins->occurance) {
17877 if (!ins->occurance->parent) {
17878 fprintf(fp, "\t/* %s,%s:%d.%d */\n",
17879 ins->occurance->function,
17880 ins->occurance->filename,
17881 ins->occurance->line,
17882 ins->occurance->col);
17885 struct occurance *ptr;
17887 fprintf(fp, "\t/*\n");
17889 for(ptr = ins->occurance; ptr; ptr = ptr->parent) {
17891 fprintf(fp, "\t * %s,%s:%d.%d\n",
17897 fprintf(fp, "\t */\n");
17898 if (inline_depth > max_inline_depth) {
17899 max_inline_depth = inline_depth;
17902 if (last_occurance) {
17903 put_occurance(last_occurance);
17905 get_occurance(ins->occurance);
17906 last_occurance = ins->occurance;
17909 print_instruction(state, ins, fp);
17911 } while(ins != first);
17912 if (print_location) {
17913 fprintf(fp, "/* max inline depth %d */\n",
17918 static void generate_code(struct compile_state *state)
17920 generate_local_labels(state);
17921 print_instructions(state);
17925 static void print_tokens(struct compile_state *state)
17928 tk = &state->token[0];
17933 next_token(state, 0);
17935 loc(stdout, state, 0);
17936 printf("%s <- `%s'\n",
17938 tk->ident ? tk->ident->name :
17939 tk->str_len ? tk->val.str : "");
17941 } while(tk->tok != TOK_EOF);
17944 static void compile(const char *filename, const char *ofilename,
17945 int cpu, int debug, int opt, const char *label_prefix)
17948 struct compile_state state;
17949 memset(&state, 0, sizeof(state));
17951 for(i = 0; i < sizeof(state.token)/sizeof(state.token[0]); i++) {
17952 memset(&state.token[i], 0, sizeof(state.token[i]));
17953 state.token[i].tok = -1;
17955 /* Remember the debug settings */
17957 state.debug = debug;
17958 state.optimize = opt;
17959 /* Remember the output filename */
17960 state.ofilename = ofilename;
17961 state.output = fopen(state.ofilename, "w");
17962 if (!state.output) {
17963 error(&state, 0, "Cannot open output file %s\n",
17966 /* Remember the label prefix */
17967 state.label_prefix = label_prefix;
17968 /* Prep the preprocessor */
17969 state.if_depth = 0;
17970 state.if_value = 0;
17971 /* register the C keywords */
17972 register_keywords(&state);
17973 /* register the keywords the macro preprocessor knows */
17974 register_macro_keywords(&state);
17975 /* Memorize where some special keywords are. */
17976 state.i_continue = lookup(&state, "continue", 8);
17977 state.i_break = lookup(&state, "break", 5);
17978 /* Enter the globl definition scope */
17979 start_scope(&state);
17980 register_builtins(&state);
17981 compile_file(&state, filename, 1);
17983 print_tokens(&state);
17986 /* Exit the global definition scope */
17989 /* Now that basic compilation has happened
17990 * optimize the intermediate code
17994 generate_code(&state);
17996 fprintf(stderr, "done\n");
18000 static void version(void)
18002 printf("romcc " VERSION " released " RELEASE_DATE "\n");
18005 static void usage(void)
18009 "Usage: romcc <source>.c\n"
18010 "Compile a C source file without using ram\n"
18014 static void arg_error(char *fmt, ...)
18017 va_start(args, fmt);
18018 vfprintf(stderr, fmt, args);
18024 int main(int argc, char **argv)
18026 const char *filename;
18027 const char *ofilename;
18028 const char *label_prefix;
18035 ofilename = "auto.inc";
18039 while((argc > 1) && (argc != last_argc)) {
18041 if (strncmp(argv[1], "--debug=", 8) == 0) {
18042 debug = atoi(argv[1] + 8);
18046 else if (strncmp(argv[1], "--label-prefix=", 15) == 0) {
18047 label_prefix= argv[1] + 15;
18051 else if ((strcmp(argv[1],"-O") == 0) ||
18052 (strcmp(argv[1], "-O1") == 0)) {
18057 else if (strcmp(argv[1],"-O2") == 0) {
18062 else if ((strcmp(argv[1], "-o") == 0) && (argc > 2)) {
18063 ofilename = argv[2];
18067 else if (strncmp(argv[1], "-mcpu=", 6) == 0) {
18068 cpu = arch_encode_cpu(argv[1] + 6);
18069 if (cpu == BAD_CPU) {
18070 arg_error("Invalid cpu specified: %s\n",
18078 arg_error("Wrong argument count %d\n", argc);
18080 filename = argv[1];
18081 compile(filename, ofilename, cpu, debug, optimize, label_prefix);