14 #define DEBUG_ERROR_MESSAGES 0
15 #define DEBUG_COLOR_GRAPH 0
17 #define DEBUG_CONSISTENCY 1
18 #define DEBUG_RANGE_CONFLICTS 0
19 #define DEBUG_COALESCING 0
20 #define DEBUG_SDP_BLOCKS 0
21 #define DEBUG_TRIPLE_COLOR 0
22 #define DEBUG_SIMPLIFY 0
24 #warning "FIXME boundary cases with small types in larger registers"
25 #warning "FIXME give clear error messages about unused variables"
26 #warning "FIXME properly handle multi dimensional arrays"
28 /* Control flow graph of a loop without goto.
39 * |\ GGG HHH | continue;
67 * DFlocal(X) = { Y <- Succ(X) | idom(Y) != X }
68 * DFup(Z) = { Y <- DF(Z) | idom(Y) != X }
71 * [] == DFlocal(X) U DF(X)
74 * Dominator graph of the same nodes.
78 * BBB JJJ BBB: [ JJJ ] ( JJJ ) JJJ: [ ] ()
80 * CCC CCC: [ ] ( BBB, JJJ )
82 * DDD EEE DDD: [ ] ( BBB ) EEE: [ JJJ ] ()
84 * FFF FFF: [ ] ( BBB )
86 * GGG HHH GGG: [ ] ( BBB ) HHH: [ BBB ] ()
91 * BBB and JJJ are definitely the dominance frontier.
92 * Where do I place phi functions and how do I make that decision.
95 static void die(char *fmt, ...)
100 vfprintf(stderr, fmt, args);
107 #define MALLOC_STRONG_DEBUG
108 static void *xmalloc(size_t size, const char *name)
113 die("Cannot malloc %ld bytes to hold %s: %s\n",
114 size + 0UL, name, strerror(errno));
119 static void *xcmalloc(size_t size, const char *name)
122 buf = xmalloc(size, name);
123 memset(buf, 0, size);
127 static void xfree(const void *ptr)
132 static char *xstrdup(const char *str)
137 new = xmalloc(len + 1, "xstrdup string");
138 memcpy(new, str, len);
143 static void xchdir(const char *path)
145 if (chdir(path) != 0) {
146 die("chdir to %s failed: %s\n",
147 path, strerror(errno));
151 static int exists(const char *dirname, const char *filename)
155 if (access(filename, O_RDONLY) < 0) {
156 if ((errno != EACCES) && (errno != EROFS)) {
164 static char *slurp_file(const char *dirname, const char *filename, off_t *r_size)
168 off_t size, progress;
177 fd = open(filename, O_RDONLY);
179 die("Cannot open '%s' : %s\n",
180 filename, strerror(errno));
182 result = fstat(fd, &stats);
184 die("Cannot stat: %s: %s\n",
185 filename, strerror(errno));
187 size = stats.st_size;
189 buf = xmalloc(size +2, filename);
190 buf[size] = '\n'; /* Make certain the file is newline terminated */
191 buf[size+1] = '\0'; /* Null terminate the file for good measure */
193 while(progress < size) {
194 result = read(fd, buf + progress, size - progress);
196 if ((errno == EINTR) || (errno == EAGAIN))
198 die("read on %s of %ld bytes failed: %s\n",
199 filename, (size - progress)+ 0UL, strerror(errno));
205 die("Close of %s failed: %s\n",
206 filename, strerror(errno));
211 /* Types on the destination platform */
212 #warning "FIXME this assumes 32bit x86 is the destination"
213 typedef int8_t schar_t;
214 typedef uint8_t uchar_t;
215 typedef int8_t char_t;
216 typedef int16_t short_t;
217 typedef uint16_t ushort_t;
218 typedef int32_t int_t;
219 typedef uint32_t uint_t;
220 typedef int32_t long_t;
221 typedef uint32_t ulong_t;
223 #define SCHAR_T_MIN (-128)
224 #define SCHAR_T_MAX 127
225 #define UCHAR_T_MAX 255
226 #define CHAR_T_MIN SCHAR_T_MIN
227 #define CHAR_T_MAX SCHAR_T_MAX
228 #define SHRT_T_MIN (-32768)
229 #define SHRT_T_MAX 32767
230 #define USHRT_T_MAX 65535
231 #define INT_T_MIN (-LONG_T_MAX - 1)
232 #define INT_T_MAX 2147483647
233 #define UINT_T_MAX 4294967295U
234 #define LONG_T_MIN (-LONG_T_MAX - 1)
235 #define LONG_T_MAX 2147483647
236 #define ULONG_T_MAX 4294967295U
239 struct file_state *prev;
240 const char *basename;
248 const char *report_name;
249 const char *report_dir;
254 struct hash_entry *ident;
262 /* I have two classes of types:
264 * Logical types. (The type the C standard says the operation is of)
266 * The operational types are:
281 * No memory is useable by the compiler.
282 * There is no floating point support.
283 * All operations take place in general purpose registers.
284 * There is one type of general purpose register.
285 * Unsigned longs are stored in that general purpose register.
288 /* Operations on general purpose registers.
307 #define OP_POS 16 /* Dummy positive operator don't use it */
317 #define OP_SLESSEQ 26
318 #define OP_ULESSEQ 27
319 #define OP_SMOREEQ 28
320 #define OP_UMOREEQ 29
322 #define OP_LFALSE 30 /* Test if the expression is logically false */
323 #define OP_LTRUE 31 /* Test if the expression is logcially true */
327 /* For OP_STORE ->type holds the type
328 * RHS(0) holds the destination address
329 * RHS(1) holds the value to store.
334 #define OP_MIN_CONST 50
335 #define OP_MAX_CONST 59
336 #define IS_CONST_OP(X) (((X) >= OP_MIN_CONST) && ((X) <= OP_MAX_CONST))
337 #define OP_INTCONST 50
338 /* For OP_INTCONST ->type holds the type.
339 * ->u.cval holds the constant value.
341 #define OP_BLOBCONST 51
342 /* For OP_BLOBCONST ->type holds the layout and size
343 * information. u.blob holds a pointer to the raw binary
344 * data for the constant initializer.
346 #define OP_ADDRCONST 52
347 /* For OP_ADDRCONST ->type holds the type.
348 * MISC(0) holds the reference to the static variable.
349 * ->u.cval holds an offset from that value.
353 /* OP_WRITE moves one pseudo register to another.
354 * RHS(0) holds the destination pseudo register, which must be an OP_DECL.
355 * RHS(1) holds the psuedo to move.
359 /* OP_READ reads the value of a variable and makes
360 * it available for the pseudo operation.
361 * Useful for things like def-use chains.
362 * RHS(0) holds points to the triple to read from.
365 /* OP_COPY makes a copy of the psedo register or constant in RHS(0).
368 /* OP_PIECE returns one piece of a instruction that returns a structure.
369 * MISC(0) is the instruction
370 * u.cval is the LHS piece of the instruction to return.
373 /* OP_ASM holds a sequence of assembly instructions, the result
374 * of a C asm directive.
375 * RHS(x) holds input value x to the assembly sequence.
376 * LHS(x) holds the output value x from the assembly sequence.
377 * u.blob holds the string of assembly instructions.
381 /* OP_DEREF generates an lvalue from a pointer.
382 * RHS(0) holds the pointer value.
383 * OP_DEREF serves as a place holder to indicate all necessary
384 * checks have been done to indicate a value is an lvalue.
387 /* OP_DOT references a submember of a structure lvalue.
388 * RHS(0) holds the lvalue.
389 * ->u.field holds the name of the field we want.
391 * Not seen outside of expressions.
394 /* OP_VAL returns the value of a subexpression of the current expression.
395 * Useful for operators that have side effects.
396 * RHS(0) holds the expression.
397 * MISC(0) holds the subexpression of RHS(0) that is the
398 * value of the expression.
400 * Not seen outside of expressions.
403 /* OP_LAND performs a C logical and between RHS(0) and RHS(1).
404 * Not seen outside of expressions.
407 /* OP_LOR performs a C logical or between RHS(0) and RHS(1).
408 * Not seen outside of expressions.
411 /* OP_CODE performas a C ? : operation.
412 * RHS(0) holds the test.
413 * RHS(1) holds the expression to evaluate if the test returns true.
414 * RHS(2) holds the expression to evaluate if the test returns false.
415 * Not seen outside of expressions.
418 /* OP_COMMA performacs a C comma operation.
419 * That is RHS(0) is evaluated, then RHS(1)
420 * and the value of RHS(1) is returned.
421 * Not seen outside of expressions.
425 /* OP_CALL performs a procedure call.
426 * MISC(0) holds a pointer to the OP_LIST of a function
427 * RHS(x) holds argument x of a function
429 * Currently not seen outside of expressions.
431 #define OP_VAL_VEC 74
432 /* OP_VAL_VEC is an array of triples that are either variable
433 * or values for a structure or an array.
434 * RHS(x) holds element x of the vector.
435 * triple->type->elements holds the size of the vector.
440 /* OP_LIST Holds a list of statements, and a result value.
441 * RHS(0) holds the list of statements.
442 * MISC(0) holds the value of the statements.
445 #define OP_BRANCH 81 /* branch */
446 /* For branch instructions
447 * TARG(0) holds the branch target.
448 * RHS(0) if present holds the branch condition.
449 * ->next holds where to branch to if the branch is not taken.
450 * The branch target can only be a decl...
454 /* OP_LABEL is a triple that establishes an target for branches.
455 * ->use is the list of all branches that use this label.
459 /* OP_DECL is a triple that establishes an lvalue for assignments.
460 * ->use is a list of statements that use the variable.
464 /* OP_SDECL is a triple that establishes a variable of static
466 * ->use is a list of statements that use the variable.
467 * MISC(0) holds the initializer expression.
472 /* OP_PHI is a triple used in SSA form code.
473 * It is used when multiple code paths merge and a variable needs
474 * a single assignment from any of those code paths.
475 * The operation is a cross between OP_DECL and OP_WRITE, which
476 * is what OP_PHI is geneared from.
478 * RHS(x) points to the value from code path x
479 * The number of RHS entries is the number of control paths into the block
480 * in which OP_PHI resides. The elements of the array point to point
481 * to the variables OP_PHI is derived from.
483 * MISC(0) holds a pointer to the orginal OP_DECL node.
486 /* Architecture specific instructions */
489 #define OP_SET_EQ 102
490 #define OP_SET_NOTEQ 103
491 #define OP_SET_SLESS 104
492 #define OP_SET_ULESS 105
493 #define OP_SET_SMORE 106
494 #define OP_SET_UMORE 107
495 #define OP_SET_SLESSEQ 108
496 #define OP_SET_ULESSEQ 109
497 #define OP_SET_SMOREEQ 110
498 #define OP_SET_UMOREEQ 111
501 #define OP_JMP_EQ 113
502 #define OP_JMP_NOTEQ 114
503 #define OP_JMP_SLESS 115
504 #define OP_JMP_ULESS 116
505 #define OP_JMP_SMORE 117
506 #define OP_JMP_UMORE 118
507 #define OP_JMP_SLESSEQ 119
508 #define OP_JMP_ULESSEQ 120
509 #define OP_JMP_SMOREEQ 121
510 #define OP_JMP_UMOREEQ 122
512 /* Builtin operators that it is just simpler to use the compiler for */
528 #define PURE 1 /* Triple has no side effects */
529 #define IMPURE 2 /* Triple has side effects */
530 #define PURE_BITS(FLAGS) ((FLAGS) & 0x3)
531 #define DEF 4 /* Triple is a variable definition */
532 #define BLOCK 8 /* Triple stores the current block */
533 #define STRUCTURAL 16 /* Triple does not generate a machine instruction */
534 unsigned char lhs, rhs, misc, targ;
537 #define OP(LHS, RHS, MISC, TARG, FLAGS, NAME) { \
545 static const struct op_info table_ops[] = {
546 [OP_SDIVT ] = OP( 2, 2, 0, 0, PURE | BLOCK , "sdivt"),
547 [OP_UDIVT ] = OP( 2, 2, 0, 0, PURE | BLOCK , "udivt"),
548 [OP_SMUL ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "smul"),
549 [OP_UMUL ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "umul"),
550 [OP_SDIV ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "sdiv"),
551 [OP_UDIV ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "udiv"),
552 [OP_SMOD ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "smod"),
553 [OP_UMOD ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "umod"),
554 [OP_ADD ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "add"),
555 [OP_SUB ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "sub"),
556 [OP_SL ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "sl"),
557 [OP_USR ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "usr"),
558 [OP_SSR ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "ssr"),
559 [OP_AND ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "and"),
560 [OP_XOR ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "xor"),
561 [OP_OR ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "or"),
562 [OP_POS ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK , "pos"),
563 [OP_NEG ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK , "neg"),
564 [OP_INVERT ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK , "invert"),
566 [OP_EQ ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "eq"),
567 [OP_NOTEQ ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "noteq"),
568 [OP_SLESS ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "sless"),
569 [OP_ULESS ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "uless"),
570 [OP_SMORE ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "smore"),
571 [OP_UMORE ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "umore"),
572 [OP_SLESSEQ ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "slesseq"),
573 [OP_ULESSEQ ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "ulesseq"),
574 [OP_SMOREEQ ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "smoreeq"),
575 [OP_UMOREEQ ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK , "umoreeq"),
576 [OP_LFALSE ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK , "lfalse"),
577 [OP_LTRUE ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK , "ltrue"),
579 [OP_LOAD ] = OP( 0, 1, 0, 0, IMPURE | DEF | BLOCK, "load"),
580 [OP_STORE ] = OP( 0, 2, 0, 0, IMPURE | BLOCK , "store"),
582 [OP_NOOP ] = OP( 0, 0, 0, 0, PURE | BLOCK | STRUCTURAL, "noop"),
584 [OP_INTCONST ] = OP( 0, 0, 0, 0, PURE | DEF, "intconst"),
585 [OP_BLOBCONST ] = OP( 0, 0, 0, 0, PURE , "blobconst"),
586 [OP_ADDRCONST ] = OP( 0, 0, 1, 0, PURE | DEF, "addrconst"),
588 [OP_WRITE ] = OP( 0, 2, 0, 0, PURE | BLOCK, "write"),
589 [OP_READ ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK, "read"),
590 [OP_COPY ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK, "copy"),
591 [OP_PIECE ] = OP( 0, 0, 1, 0, PURE | DEF | STRUCTURAL, "piece"),
592 [OP_ASM ] = OP(-1, -1, 0, 0, IMPURE, "asm"),
593 [OP_DEREF ] = OP( 0, 1, 0, 0, 0 | DEF | BLOCK, "deref"),
594 [OP_DOT ] = OP( 0, 1, 0, 0, 0 | DEF | BLOCK, "dot"),
596 [OP_VAL ] = OP( 0, 1, 1, 0, 0 | DEF | BLOCK, "val"),
597 [OP_LAND ] = OP( 0, 2, 0, 0, 0 | DEF | BLOCK, "land"),
598 [OP_LOR ] = OP( 0, 2, 0, 0, 0 | DEF | BLOCK, "lor"),
599 [OP_COND ] = OP( 0, 3, 0, 0, 0 | DEF | BLOCK, "cond"),
600 [OP_COMMA ] = OP( 0, 2, 0, 0, 0 | DEF | BLOCK, "comma"),
601 /* Call is special most it can stand in for anything so it depends on context */
602 [OP_CALL ] = OP(-1, -1, 1, 0, 0 | BLOCK, "call"),
603 /* The sizes of OP_CALL and OP_VAL_VEC depend upon context */
604 [OP_VAL_VEC ] = OP( 0, -1, 0, 0, 0 | BLOCK | STRUCTURAL, "valvec"),
606 [OP_LIST ] = OP( 0, 1, 1, 0, 0 | DEF | STRUCTURAL, "list"),
607 /* The number of targets for OP_BRANCH depends on context */
608 [OP_BRANCH ] = OP( 0, -1, 0, 1, PURE | BLOCK, "branch"),
609 [OP_LABEL ] = OP( 0, 0, 0, 0, PURE | BLOCK | STRUCTURAL, "label"),
610 [OP_ADECL ] = OP( 0, 0, 0, 0, PURE | BLOCK | STRUCTURAL, "adecl"),
611 [OP_SDECL ] = OP( 0, 0, 1, 0, PURE | BLOCK | STRUCTURAL, "sdecl"),
612 /* The number of RHS elements of OP_PHI depend upon context */
613 [OP_PHI ] = OP( 0, -1, 1, 0, PURE | DEF | BLOCK, "phi"),
615 [OP_CMP ] = OP( 0, 2, 0, 0, PURE | DEF | BLOCK, "cmp"),
616 [OP_TEST ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK, "test"),
617 [OP_SET_EQ ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK, "set_eq"),
618 [OP_SET_NOTEQ ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK, "set_noteq"),
619 [OP_SET_SLESS ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK, "set_sless"),
620 [OP_SET_ULESS ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK, "set_uless"),
621 [OP_SET_SMORE ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK, "set_smore"),
622 [OP_SET_UMORE ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK, "set_umore"),
623 [OP_SET_SLESSEQ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK, "set_slesseq"),
624 [OP_SET_ULESSEQ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK, "set_ulesseq"),
625 [OP_SET_SMOREEQ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK, "set_smoreq"),
626 [OP_SET_UMOREEQ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK, "set_umoreq"),
627 [OP_JMP ] = OP( 0, 0, 0, 1, PURE | BLOCK, "jmp"),
628 [OP_JMP_EQ ] = OP( 0, 1, 0, 1, PURE | BLOCK, "jmp_eq"),
629 [OP_JMP_NOTEQ ] = OP( 0, 1, 0, 1, PURE | BLOCK, "jmp_noteq"),
630 [OP_JMP_SLESS ] = OP( 0, 1, 0, 1, PURE | BLOCK, "jmp_sless"),
631 [OP_JMP_ULESS ] = OP( 0, 1, 0, 1, PURE | BLOCK, "jmp_uless"),
632 [OP_JMP_SMORE ] = OP( 0, 1, 0, 1, PURE | BLOCK, "jmp_smore"),
633 [OP_JMP_UMORE ] = OP( 0, 1, 0, 1, PURE | BLOCK, "jmp_umore"),
634 [OP_JMP_SLESSEQ] = OP( 0, 1, 0, 1, PURE | BLOCK, "jmp_slesseq"),
635 [OP_JMP_ULESSEQ] = OP( 0, 1, 0, 1, PURE | BLOCK, "jmp_ulesseq"),
636 [OP_JMP_SMOREEQ] = OP( 0, 1, 0, 1, PURE | BLOCK, "jmp_smoreq"),
637 [OP_JMP_UMOREEQ] = OP( 0, 1, 0, 1, PURE | BLOCK, "jmp_umoreq"),
639 [OP_INB ] = OP( 0, 1, 0, 0, IMPURE | DEF | BLOCK, "__inb"),
640 [OP_INW ] = OP( 0, 1, 0, 0, IMPURE | DEF | BLOCK, "__inw"),
641 [OP_INL ] = OP( 0, 1, 0, 0, IMPURE | DEF | BLOCK, "__inl"),
642 [OP_OUTB ] = OP( 0, 2, 0, 0, IMPURE| BLOCK, "__outb"),
643 [OP_OUTW ] = OP( 0, 2, 0, 0, IMPURE| BLOCK, "__outw"),
644 [OP_OUTL ] = OP( 0, 2, 0, 0, IMPURE| BLOCK, "__outl"),
645 [OP_BSF ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK, "__bsf"),
646 [OP_BSR ] = OP( 0, 1, 0, 0, PURE | DEF | BLOCK, "__bsr"),
647 [OP_RDMSR ] = OP( 2, 1, 0, 0, IMPURE | BLOCK, "__rdmsr"),
648 [OP_WRMSR ] = OP( 0, 3, 0, 0, IMPURE | BLOCK, "__wrmsr"),
649 [OP_HLT ] = OP( 0, 0, 0, 0, IMPURE | BLOCK, "__hlt"),
652 #define OP_MAX (sizeof(table_ops)/sizeof(table_ops[0]))
654 static const char *tops(int index)
656 static const char unknown[] = "unknown op";
660 if (index > OP_MAX) {
663 return table_ops[index].name;
670 struct triple_set *next;
671 struct triple *member;
681 const char *filename;
682 const char *function;
685 struct occurance *parent;
688 struct triple *next, *prev;
689 struct triple_set *use;
692 unsigned char template_id;
693 unsigned short sizes;
694 #define TRIPLE_LHS(SIZES) (((SIZES) >> 0) & 0x0f)
695 #define TRIPLE_RHS(SIZES) (((SIZES) >> 4) & 0xff)
696 #define TRIPLE_MISC(SIZES) (((SIZES) >> 12) & 0x03)
697 #define TRIPLE_TARG(SIZES) (((SIZES) >> 14) & 0x03)
698 #define TRIPLE_SIZE(SIZES) \
699 (TRIPLE_LHS(SIZES) + \
700 TRIPLE_RHS(SIZES) + \
701 TRIPLE_MISC(SIZES) + \
703 #define TRIPLE_SIZES(LHS, RHS, MISC, TARG) \
704 ((((LHS) & 0x0f) << 0) | \
705 (((RHS) & 0xff) << 4) | \
706 (((MISC) & 0x03) << 12) | \
707 (((TARG) & 0x03) << 14))
708 #define TRIPLE_LHS_OFF(SIZES) (0)
709 #define TRIPLE_RHS_OFF(SIZES) (TRIPLE_LHS_OFF(SIZES) + TRIPLE_LHS(SIZES))
710 #define TRIPLE_MISC_OFF(SIZES) (TRIPLE_RHS_OFF(SIZES) + TRIPLE_RHS(SIZES))
711 #define TRIPLE_TARG_OFF(SIZES) (TRIPLE_MISC_OFF(SIZES) + TRIPLE_MISC(SIZES))
712 #define LHS(PTR,INDEX) ((PTR)->param[TRIPLE_LHS_OFF((PTR)->sizes) + (INDEX)])
713 #define RHS(PTR,INDEX) ((PTR)->param[TRIPLE_RHS_OFF((PTR)->sizes) + (INDEX)])
714 #define TARG(PTR,INDEX) ((PTR)->param[TRIPLE_TARG_OFF((PTR)->sizes) + (INDEX)])
715 #define MISC(PTR,INDEX) ((PTR)->param[TRIPLE_MISC_OFF((PTR)->sizes) + (INDEX)])
716 unsigned id; /* A scratch value and finally the register */
717 #define TRIPLE_FLAG_FLATTENED (1 << 31)
718 #define TRIPLE_FLAG_PRE_SPLIT (1 << 30)
719 #define TRIPLE_FLAG_POST_SPLIT (1 << 29)
720 #define TRIPLE_FLAG_VOLATILE (1 << 28)
721 #define TRIPLE_FLAG_LOCAL (1 << 27)
722 struct occurance *occurance;
727 struct hash_entry *field;
728 struct asm_info *ainfo;
730 struct triple *param[2];
737 struct ins_template {
738 struct reg_info lhs[MAX_LHS + 1], rhs[MAX_RHS + 1];
742 struct ins_template tmpl;
747 struct block_set *next;
748 struct block *member;
751 struct block *work_next;
752 struct block *left, *right;
753 struct triple *first, *last;
755 struct block_set *use;
756 struct block_set *idominates;
757 struct block_set *domfrontier;
759 struct block_set *ipdominates;
760 struct block_set *ipdomfrontier;
768 struct hash_entry *ident;
775 struct hash_entry *ident;
781 struct hash_entry *next;
785 struct macro *sym_define;
786 struct symbol *sym_label;
787 struct symbol *sym_tag;
788 struct symbol *sym_ident;
791 #define HASH_TABLE_SIZE 2048
793 struct compile_state {
794 const char *label_prefix;
795 const char *ofilename;
797 struct file_state *file;
798 struct occurance *last_occurance;
799 const char *function;
800 struct token token[4];
801 struct hash_entry *hash_table[HASH_TABLE_SIZE];
802 struct hash_entry *i_switch;
803 struct hash_entry *i_case;
804 struct hash_entry *i_continue;
805 struct hash_entry *i_break;
806 struct hash_entry *i_default;
808 int if_depth, if_value;
810 struct file_state *macro_file;
811 struct triple *main_function;
812 struct triple *first;
813 struct block *first_block, *last_block;
815 unsigned long features;
820 /* visibility global/local */
821 /* static/auto duration */
822 /* typedef, register, inline */
824 #define STOR_MASK 0x000f
826 #define STOR_GLOBAL 0x0001
828 #define STOR_PERM 0x0002
829 /* Storage specifiers */
830 #define STOR_AUTO 0x0000
831 #define STOR_STATIC 0x0002
832 #define STOR_EXTERN 0x0003
833 #define STOR_REGISTER 0x0004
834 #define STOR_TYPEDEF 0x0008
835 #define STOR_INLINE 0x000c
838 #define QUAL_MASK 0x0070
839 #define QUAL_NONE 0x0000
840 #define QUAL_CONST 0x0010
841 #define QUAL_VOLATILE 0x0020
842 #define QUAL_RESTRICT 0x0040
845 #define TYPE_MASK 0x1f00
846 #define TYPE_INTEGER(TYPE) ((((TYPE) >= TYPE_CHAR) && ((TYPE) <= TYPE_ULLONG)) || ((TYPE) == TYPE_ENUM))
847 #define TYPE_ARITHMETIC(TYPE) ((((TYPE) >= TYPE_CHAR) && ((TYPE) <= TYPE_LDOUBLE)) || ((TYPE) == TYPE_ENUM))
848 #define TYPE_UNSIGNED(TYPE) ((TYPE) & 0x0100)
849 #define TYPE_SIGNED(TYPE) (!TYPE_UNSIGNED(TYPE))
850 #define TYPE_MKUNSIGNED(TYPE) (((TYPE) & ~0xF000) | 0x0100)
851 #define TYPE_RANK(TYPE) ((TYPE) & ~0xF1FF)
852 #define TYPE_PTR(TYPE) (((TYPE) & TYPE_MASK) == TYPE_POINTER)
853 #define TYPE_DEFAULT 0x0000
854 #define TYPE_VOID 0x0100
855 #define TYPE_CHAR 0x0200
856 #define TYPE_UCHAR 0x0300
857 #define TYPE_SHORT 0x0400
858 #define TYPE_USHORT 0x0500
859 #define TYPE_INT 0x0600
860 #define TYPE_UINT 0x0700
861 #define TYPE_LONG 0x0800
862 #define TYPE_ULONG 0x0900
863 #define TYPE_LLONG 0x0a00 /* long long */
864 #define TYPE_ULLONG 0x0b00
865 #define TYPE_FLOAT 0x0c00
866 #define TYPE_DOUBLE 0x0d00
867 #define TYPE_LDOUBLE 0x0e00 /* long double */
869 /* Note: TYPE_ENUM is chosen very carefully so TYPE_RANK works */
870 #define TYPE_ENUM 0x1600
871 #define TYPE_LIST 0x1700
872 /* TYPE_LIST is a basic building block when defining enumerations
873 * type->field_ident holds the name of this enumeration entry.
874 * type->right holds the entry in the list.
877 #define TYPE_STRUCT 0x1000
878 #define TYPE_UNION 0x1100
879 #define TYPE_POINTER 0x1200
881 * type->left holds the type pointed to.
883 #define TYPE_FUNCTION 0x1300
884 /* For TYPE_FUNCTION:
885 * type->left holds the return type.
886 * type->right holds the...
888 #define TYPE_PRODUCT 0x1400
889 /* TYPE_PRODUCT is a basic building block when defining structures
890 * type->left holds the type that appears first in memory.
891 * type->right holds the type that appears next in memory.
893 #define TYPE_OVERLAP 0x1500
894 /* TYPE_OVERLAP is a basic building block when defining unions
895 * type->left and type->right holds to types that overlap
896 * each other in memory.
898 #define TYPE_ARRAY 0x1800
899 /* TYPE_ARRAY is a basic building block when definitng arrays.
900 * type->left holds the type we are an array of.
901 * type-> holds the number of elements.
904 #define ELEMENT_COUNT_UNSPECIFIED ULONG_T_MAX
908 struct type *left, *right;
910 struct hash_entry *field_ident;
911 struct hash_entry *type_ident;
914 #define TEMPLATE_BITS 7
915 #define MAX_TEMPLATES (1<<TEMPLATE_BITS)
916 #define MAX_REG_EQUIVS 16
918 #define MAX_REGISTERS 75
919 #define REGISTER_BITS 7
920 #define MAX_VIRT_REGISTERS (1<<REGISTER_BITS)
922 #define REG_UNNEEDED 1
923 #define REG_VIRT0 (MAX_REGISTERS + 0)
924 #define REG_VIRT1 (MAX_REGISTERS + 1)
925 #define REG_VIRT2 (MAX_REGISTERS + 2)
926 #define REG_VIRT3 (MAX_REGISTERS + 3)
927 #define REG_VIRT4 (MAX_REGISTERS + 4)
928 #define REG_VIRT5 (MAX_REGISTERS + 5)
929 #define REG_VIRT6 (MAX_REGISTERS + 6)
930 #define REG_VIRT7 (MAX_REGISTERS + 7)
931 #define REG_VIRT8 (MAX_REGISTERS + 8)
932 #define REG_VIRT9 (MAX_REGISTERS + 9)
934 #if (MAX_REGISTERS + 9) > MAX_VIRT_REGISTERS
935 #error "MAX_VIRT_REGISTERS to small"
937 #if (MAX_REGC + REGISTER_BITS) > 27
938 #error "Too many id bits used"
941 /* Provision for 8 register classes */
943 #define REGC_SHIFT REGISTER_BITS
944 #define REGC_MASK (((1 << MAX_REGC) - 1) << REGISTER_BITS)
945 #define REG_MASK (MAX_VIRT_REGISTERS -1)
946 #define ID_REG(ID) ((ID) & REG_MASK)
947 #define SET_REG(ID, REG) ((ID) = (((ID) & ~REG_MASK) | ((REG) & REG_MASK)))
948 #define ID_REGCM(ID) (((ID) & REGC_MASK) >> REGC_SHIFT)
949 #define SET_REGCM(ID, REGCM) ((ID) = (((ID) & ~REGC_MASK) | (((REGCM) << REGC_SHIFT) & REGC_MASK)))
950 #define SET_INFO(ID, INFO) ((ID) = (((ID) & ~(REG_MASK | REGC_MASK)) | \
951 (((INFO).reg) & REG_MASK) | ((((INFO).regcm) << REGC_SHIFT) & REGC_MASK)))
953 static unsigned arch_reg_regcm(struct compile_state *state, int reg);
954 static unsigned arch_regcm_normalize(struct compile_state *state, unsigned regcm);
955 static unsigned arch_regcm_reg_normalize(struct compile_state *state, unsigned regcm);
956 static void arch_reg_equivs(
957 struct compile_state *state, unsigned *equiv, int reg);
958 static int arch_select_free_register(
959 struct compile_state *state, char *used, int classes);
960 static unsigned arch_regc_size(struct compile_state *state, int class);
961 static int arch_regcm_intersect(unsigned regcm1, unsigned regcm2);
962 static unsigned arch_type_to_regcm(struct compile_state *state, struct type *type);
963 static const char *arch_reg_str(int reg);
964 static struct reg_info arch_reg_constraint(
965 struct compile_state *state, struct type *type, const char *constraint);
966 static struct reg_info arch_reg_clobber(
967 struct compile_state *state, const char *clobber);
968 static struct reg_info arch_reg_lhs(struct compile_state *state,
969 struct triple *ins, int index);
970 static struct reg_info arch_reg_rhs(struct compile_state *state,
971 struct triple *ins, int index);
972 static struct triple *transform_to_arch_instruction(
973 struct compile_state *state, struct triple *ins);
977 #define DEBUG_ABORT_ON_ERROR 0x0001
978 #define DEBUG_INTERMEDIATE_CODE 0x0002
979 #define DEBUG_CONTROL_FLOW 0x0004
980 #define DEBUG_BASIC_BLOCKS 0x0008
981 #define DEBUG_FDOMINATORS 0x0010
982 #define DEBUG_RDOMINATORS 0x0020
983 #define DEBUG_TRIPLES 0x0040
984 #define DEBUG_INTERFERENCE 0x0080
985 #define DEBUG_ARCH_CODE 0x0100
986 #define DEBUG_CODE_ELIMINATION 0x0200
987 #define DEBUG_INSERTED_COPIES 0x0400
989 #define GLOBAL_SCOPE_DEPTH 1
990 #define FUNCTION_SCOPE_DEPTH (GLOBAL_SCOPE_DEPTH + 1)
992 static void compile_file(struct compile_state *old_state, const char *filename, int local);
994 static void do_cleanup(struct compile_state *state)
997 fclose(state->output);
998 unlink(state->ofilename);
1002 static int get_col(struct file_state *file)
1006 ptr = file->line_start;
1008 for(col = 0; ptr < end; ptr++) {
1013 col = (col & ~7) + 8;
1019 static void loc(FILE *fp, struct compile_state *state, struct triple *triple)
1022 if (triple && triple->occurance) {
1023 struct occurance *spot;
1024 spot = triple->occurance;
1025 while(spot->parent) {
1026 spot = spot->parent;
1028 fprintf(fp, "%s:%d.%d: ",
1029 spot->filename, spot->line, spot->col);
1035 col = get_col(state->file);
1036 fprintf(fp, "%s:%d.%d: ",
1037 state->file->report_name, state->file->report_line, col);
1040 static void romcc_internal_error(struct compile_state *state, struct triple *ptr,
1044 va_start(args, fmt);
1045 loc(stderr, state, ptr);
1047 fprintf(stderr, "%p %s ", ptr, tops(ptr->op));
1049 fprintf(stderr, "Internal compiler error: ");
1050 vfprintf(stderr, fmt, args);
1051 fprintf(stderr, "\n");
1058 static void romcc_internal_warning(struct compile_state *state, struct triple *ptr,
1062 va_start(args, fmt);
1063 loc(stderr, state, ptr);
1065 fprintf(stderr, "%p %s ", ptr, tops(ptr->op));
1067 fprintf(stderr, "Internal compiler warning: ");
1068 vfprintf(stderr, fmt, args);
1069 fprintf(stderr, "\n");
1075 static void romcc_error(struct compile_state *state, struct triple *ptr,
1079 va_start(args, fmt);
1080 loc(stderr, state, ptr);
1081 if (ptr && (state->debug & DEBUG_ABORT_ON_ERROR)) {
1082 fprintf(stderr, "%p %s ", ptr, tops(ptr->op));
1084 vfprintf(stderr, fmt, args);
1086 fprintf(stderr, "\n");
1088 if (state->debug & DEBUG_ABORT_ON_ERROR) {
1094 static void romcc_warning(struct compile_state *state, struct triple *ptr,
1098 va_start(args, fmt);
1099 loc(stderr, state, ptr);
1100 fprintf(stderr, "warning: ");
1101 vfprintf(stderr, fmt, args);
1102 fprintf(stderr, "\n");
1106 #if DEBUG_ERROR_MESSAGES
1107 # define internal_error fprintf(stderr, "@ %s.%s:%d \t", __FILE__, __func__, __LINE__),romcc_internal_error
1108 # define internal_warning fprintf(stderr, "@ %s.%s:%d \t", __FILE__, __func__, __LINE__),romcc_internal_warning
1109 # define error fprintf(stderr, "@ %s.%s:%d \t", __FILE__, __func__, __LINE__),romcc_error
1110 # define warning fprintf(stderr, "@ %s.%s:%d \t", __FILE__, __func__, __LINE__),romcc_warning
1112 # define internal_error romcc_internal_error
1113 # define internal_warning romcc_internal_warning
1114 # define error romcc_error
1115 # define warning romcc_warning
1117 #define FINISHME() warning(state, 0, "FINISHME @ %s.%s:%d", __FILE__, __func__, __LINE__)
1119 static void valid_op(struct compile_state *state, int op)
1121 char *fmt = "invalid op: %d";
1123 internal_error(state, 0, fmt, op);
1126 internal_error(state, 0, fmt, op);
1130 static void valid_ins(struct compile_state *state, struct triple *ptr)
1132 valid_op(state, ptr->op);
1135 static void process_trigraphs(struct compile_state *state)
1137 char *src, *dest, *end;
1138 struct file_state *file;
1140 src = dest = file->buf;
1141 end = file->buf + file->size;
1142 while((end - src) >= 3) {
1143 if ((src[0] == '?') && (src[1] == '?')) {
1146 case '=': c = '#'; break;
1147 case '/': c = '\\'; break;
1148 case '\'': c = '^'; break;
1149 case '(': c = '['; break;
1150 case ')': c = ']'; break;
1151 case '!': c = '!'; break;
1152 case '<': c = '{'; break;
1153 case '>': c = '}'; break;
1154 case '-': c = '~'; break;
1171 file->size = dest - file->buf;
1174 static void splice_lines(struct compile_state *state)
1176 char *src, *dest, *end;
1177 struct file_state *file;
1179 src = dest = file->buf;
1180 end = file->buf + file->size;
1181 while((end - src) >= 2) {
1182 if ((src[0] == '\\') && (src[1] == '\n')) {
1192 file->size = dest - file->buf;
1195 static struct type void_type;
1196 static void use_triple(struct triple *used, struct triple *user)
1198 struct triple_set **ptr, *new;
1205 if ((*ptr)->member == user) {
1208 ptr = &(*ptr)->next;
1210 /* Append new to the head of the list,
1211 * copy_func and rename_block_variables
1214 new = xcmalloc(sizeof(*new), "triple_set");
1216 new->next = used->use;
1220 static void unuse_triple(struct triple *used, struct triple *unuser)
1222 struct triple_set *use, **ptr;
1229 if (use->member == unuser) {
1239 static void put_occurance(struct occurance *occurance)
1241 occurance->count -= 1;
1242 if (occurance->count <= 0) {
1243 if (occurance->parent) {
1244 put_occurance(occurance->parent);
1250 static void get_occurance(struct occurance *occurance)
1252 occurance->count += 1;
1256 static struct occurance *new_occurance(struct compile_state *state)
1258 struct occurance *result, *last;
1259 const char *filename;
1260 const char *function;
1268 filename = state->file->report_name;
1269 line = state->file->report_line;
1270 col = get_col(state->file);
1272 if (state->function) {
1273 function = state->function;
1275 last = state->last_occurance;
1277 (last->col == col) &&
1278 (last->line == line) &&
1279 (last->function == function) &&
1280 ((last->filename == filename) ||
1281 (strcmp(last->filename, filename) == 0)))
1283 get_occurance(last);
1287 state->last_occurance = 0;
1288 put_occurance(last);
1290 result = xmalloc(sizeof(*result), "occurance");
1292 result->filename = filename;
1293 result->function = function;
1294 result->line = line;
1297 state->last_occurance = result;
1301 static struct occurance *inline_occurance(struct compile_state *state,
1302 struct occurance *new, struct occurance *orig)
1304 struct occurance *result, *last;
1305 last = state->last_occurance;
1307 (last->parent == orig) &&
1308 (last->col == new->col) &&
1309 (last->line == new->line) &&
1310 (last->function == new->function) &&
1311 (last->filename == new->filename)) {
1312 get_occurance(last);
1316 state->last_occurance = 0;
1317 put_occurance(last);
1319 get_occurance(orig);
1320 result = xmalloc(sizeof(*result), "occurance");
1322 result->filename = new->filename;
1323 result->function = new->function;
1324 result->line = new->line;
1325 result->col = new->col;
1326 result->parent = orig;
1327 state->last_occurance = result;
1332 static struct occurance dummy_occurance = {
1334 .filename = __FILE__,
1341 /* The zero triple is used as a place holder when we are removing pointers
1342 * from a triple. Having allows certain sanity checks to pass even
1343 * when the original triple that was pointed to is gone.
1345 static struct triple zero_triple = {
1346 .next = &zero_triple,
1347 .prev = &zero_triple,
1350 .sizes = TRIPLE_SIZES(0, 0, 0, 0),
1351 .id = -1, /* An invalid id */
1352 .u = { .cval = 0, },
1353 .occurance = &dummy_occurance,
1354 .param = { [0] = 0, [1] = 0, },
1358 static unsigned short triple_sizes(struct compile_state *state,
1359 int op, struct type *type, int lhs_wanted, int rhs_wanted,
1360 struct occurance *occurance)
1362 int lhs, rhs, misc, targ;
1363 struct triple dummy;
1365 dummy.occurance = occurance;
1366 valid_op(state, op);
1367 lhs = table_ops[op].lhs;
1368 rhs = table_ops[op].rhs;
1369 misc = table_ops[op].misc;
1370 targ = table_ops[op].targ;
1373 if (op == OP_CALL) {
1376 param = type->right;
1377 while((param->type & TYPE_MASK) == TYPE_PRODUCT) {
1379 param = param->right;
1381 if ((param->type & TYPE_MASK) != TYPE_VOID) {
1385 if ((type->left->type & TYPE_MASK) == TYPE_STRUCT) {
1386 lhs = type->left->elements;
1389 else if (op == OP_VAL_VEC) {
1390 rhs = type->elements;
1392 else if ((op == OP_BRANCH) || (op == OP_PHI)) {
1395 else if (op == OP_ASM) {
1399 if ((rhs < 0) || (rhs > MAX_RHS)) {
1400 internal_error(state, &dummy, "bad rhs %d", rhs);
1402 if ((lhs < 0) || (lhs > MAX_LHS)) {
1403 internal_error(state, &dummy, "bad lhs");
1405 if ((misc < 0) || (misc > MAX_MISC)) {
1406 internal_error(state, &dummy, "bad misc");
1408 if ((targ < 0) || (targ > MAX_TARG)) {
1409 internal_error(state, &dummy, "bad targs");
1411 return TRIPLE_SIZES(lhs, rhs, misc, targ);
1414 static struct triple *alloc_triple(struct compile_state *state,
1415 int op, struct type *type, int lhs, int rhs,
1416 struct occurance *occurance)
1418 size_t size, sizes, extra_count, min_count;
1420 sizes = triple_sizes(state, op, type, lhs, rhs, occurance);
1422 min_count = sizeof(ret->param)/sizeof(ret->param[0]);
1423 extra_count = TRIPLE_SIZE(sizes);
1424 extra_count = (extra_count < min_count)? 0 : extra_count - min_count;
1426 size = sizeof(*ret) + sizeof(ret->param[0]) * extra_count;
1427 ret = xcmalloc(size, "tripple");
1433 ret->occurance = occurance;
1437 struct triple *dup_triple(struct compile_state *state, struct triple *src)
1440 int src_lhs, src_rhs, src_size;
1441 src_lhs = TRIPLE_LHS(src->sizes);
1442 src_rhs = TRIPLE_RHS(src->sizes);
1443 src_size = TRIPLE_SIZE(src->sizes);
1444 get_occurance(src->occurance);
1445 dup = alloc_triple(state, src->op, src->type, src_lhs, src_rhs,
1447 memcpy(dup, src, sizeof(*src));
1448 memcpy(dup->param, src->param, src_size * sizeof(src->param[0]));
1452 static struct triple *new_triple(struct compile_state *state,
1453 int op, struct type *type, int lhs, int rhs)
1456 struct occurance *occurance;
1457 occurance = new_occurance(state);
1458 ret = alloc_triple(state, op, type, lhs, rhs, occurance);
1462 static struct triple *build_triple(struct compile_state *state,
1463 int op, struct type *type, struct triple *left, struct triple *right,
1464 struct occurance *occurance)
1468 ret = alloc_triple(state, op, type, -1, -1, occurance);
1469 count = TRIPLE_SIZE(ret->sizes);
1471 ret->param[0] = left;
1474 ret->param[1] = right;
1479 static struct triple *triple(struct compile_state *state,
1480 int op, struct type *type, struct triple *left, struct triple *right)
1484 ret = new_triple(state, op, type, -1, -1);
1485 count = TRIPLE_SIZE(ret->sizes);
1487 ret->param[0] = left;
1490 ret->param[1] = right;
1495 static struct triple *branch(struct compile_state *state,
1496 struct triple *targ, struct triple *test)
1499 ret = new_triple(state, OP_BRANCH, &void_type, -1, test?1:0);
1503 TARG(ret, 0) = targ;
1504 /* record the branch target was used */
1505 if (!targ || (targ->op != OP_LABEL)) {
1506 internal_error(state, 0, "branch not to label");
1507 use_triple(targ, ret);
1513 static void insert_triple(struct compile_state *state,
1514 struct triple *first, struct triple *ptr)
1517 if ((ptr->id & TRIPLE_FLAG_FLATTENED) || (ptr->next != ptr)) {
1518 internal_error(state, ptr, "expression already used");
1521 ptr->prev = first->prev;
1522 ptr->prev->next = ptr;
1523 ptr->next->prev = ptr;
1524 if ((ptr->prev->op == OP_BRANCH) &&
1525 TRIPLE_RHS(ptr->prev->sizes)) {
1526 unuse_triple(first, ptr->prev);
1527 use_triple(ptr, ptr->prev);
1532 static int triple_stores_block(struct compile_state *state, struct triple *ins)
1534 /* This function is used to determine if u.block
1535 * is utilized to store the current block number.
1538 valid_ins(state, ins);
1539 stores_block = (table_ops[ins->op].flags & BLOCK) == BLOCK;
1540 return stores_block;
1543 static struct block *block_of_triple(struct compile_state *state,
1546 struct triple *first;
1550 first = state->first;
1551 while(ins != first && !triple_stores_block(state, ins)) {
1552 if (ins == ins->prev) {
1553 internal_error(state, 0, "ins == ins->prev?");
1557 if (!triple_stores_block(state, ins)) {
1558 internal_error(state, ins, "Cannot find block");
1560 return ins->u.block;
1563 static struct triple *pre_triple(struct compile_state *state,
1564 struct triple *base,
1565 int op, struct type *type, struct triple *left, struct triple *right)
1567 struct block *block;
1569 /* If I am an OP_PIECE jump to the real instruction */
1570 if (base->op == OP_PIECE) {
1571 base = MISC(base, 0);
1573 block = block_of_triple(state, base);
1574 get_occurance(base->occurance);
1575 ret = build_triple(state, op, type, left, right, base->occurance);
1576 if (triple_stores_block(state, ret)) {
1577 ret->u.block = block;
1579 insert_triple(state, base, ret);
1580 if (block->first == base) {
1586 static struct triple *post_triple(struct compile_state *state,
1587 struct triple *base,
1588 int op, struct type *type, struct triple *left, struct triple *right)
1590 struct block *block;
1593 /* If I am an OP_PIECE jump to the real instruction */
1594 if (base->op == OP_PIECE) {
1595 base = MISC(base, 0);
1597 /* If I have a left hand side skip over it */
1598 zlhs = TRIPLE_LHS(base->sizes);
1600 base = LHS(base, zlhs - 1);
1603 block = block_of_triple(state, base);
1604 get_occurance(base->occurance);
1605 ret = build_triple(state, op, type, left, right, base->occurance);
1606 if (triple_stores_block(state, ret)) {
1607 ret->u.block = block;
1609 insert_triple(state, base->next, ret);
1610 if (block->last == base) {
1616 static struct triple *label(struct compile_state *state)
1618 /* Labels don't get a type */
1619 struct triple *result;
1620 result = triple(state, OP_LABEL, &void_type, 0, 0);
1624 static void display_triple(FILE *fp, struct triple *ins)
1626 struct occurance *ptr;
1630 if (ins->id & TRIPLE_FLAG_PRE_SPLIT) {
1633 if (ins->id & TRIPLE_FLAG_POST_SPLIT) {
1636 reg = arch_reg_str(ID_REG(ins->id));
1637 if (ins->op == OP_INTCONST) {
1638 fprintf(fp, "(%p) %c%c %-7s %-2d %-10s <0x%08lx> ",
1639 ins, pre, post, reg, ins->template_id, tops(ins->op),
1640 (unsigned long)(ins->u.cval));
1642 else if (ins->op == OP_ADDRCONST) {
1643 fprintf(fp, "(%p) %c%c %-7s %-2d %-10s %-10p <0x%08lx>",
1644 ins, pre, post, reg, ins->template_id, tops(ins->op),
1645 MISC(ins, 0), (unsigned long)(ins->u.cval));
1649 fprintf(fp, "(%p) %c%c %-7s %-2d %-10s",
1650 ins, pre, post, reg, ins->template_id, tops(ins->op));
1651 count = TRIPLE_SIZE(ins->sizes);
1652 for(i = 0; i < count; i++) {
1653 fprintf(fp, " %-10p", ins->param[i]);
1660 for(ptr = ins->occurance; ptr; ptr = ptr->parent) {
1661 fprintf(fp, " %s,%s:%d.%d",
1670 struct triple_set *user;
1671 for(user = ptr->use; user; user = user->next) {
1672 fprintf(fp, "use: %p\n", user->member);
1679 static void display_func(FILE *fp, struct triple *func)
1681 struct triple *first, *ins;
1682 first = ins = RHS(func, 0);
1684 display_triple(fp, ins);
1686 } while(ins != first);
1689 static int triple_is_pure(struct compile_state *state, struct triple *ins, unsigned id)
1691 /* Does the triple have no side effects.
1692 * I.e. Rexecuting the triple with the same arguments
1693 * gives the same value.
1696 valid_ins(state, ins);
1697 pure = PURE_BITS(table_ops[ins->op].flags);
1698 if ((pure != PURE) && (pure != IMPURE)) {
1699 internal_error(state, 0, "Purity of %s not known\n",
1702 return (pure == PURE) && !(id & TRIPLE_FLAG_VOLATILE);
1705 static int triple_is_branch(struct compile_state *state, struct triple *ins)
1707 /* This function is used to determine which triples need
1711 valid_ins(state, ins);
1712 is_branch = (table_ops[ins->op].targ != 0);
1716 static int triple_is_cond_branch(struct compile_state *state, struct triple *ins)
1718 /* A conditional branch has the condition argument as a single
1721 return triple_is_branch(state, ins) &&
1722 (TRIPLE_RHS(ins->sizes) == 1);
1725 static int triple_is_uncond_branch(struct compile_state *state, struct triple *ins)
1727 /* A unconditional branch has no RHS parameters.
1729 return triple_is_branch(state, ins) &&
1730 (TRIPLE_RHS(ins->sizes) == 0);
1733 static int triple_is_def(struct compile_state *state, struct triple *ins)
1735 /* This function is used to determine which triples need
1739 valid_ins(state, ins);
1740 is_def = (table_ops[ins->op].flags & DEF) == DEF;
1744 static int triple_is_structural(struct compile_state *state, struct triple *ins)
1747 valid_ins(state, ins);
1748 is_structural = (table_ops[ins->op].flags & STRUCTURAL) == STRUCTURAL;
1749 return is_structural;
1752 static struct triple **triple_iter(struct compile_state *state,
1753 size_t count, struct triple **vector,
1754 struct triple *ins, struct triple **last)
1756 struct triple **ret;
1762 else if ((last >= vector) && (last < (vector + count - 1))) {
1770 static struct triple **triple_lhs(struct compile_state *state,
1771 struct triple *ins, struct triple **last)
1773 return triple_iter(state, TRIPLE_LHS(ins->sizes), &LHS(ins,0),
1777 static struct triple **triple_rhs(struct compile_state *state,
1778 struct triple *ins, struct triple **last)
1780 return triple_iter(state, TRIPLE_RHS(ins->sizes), &RHS(ins,0),
1784 static struct triple **triple_misc(struct compile_state *state,
1785 struct triple *ins, struct triple **last)
1787 return triple_iter(state, TRIPLE_MISC(ins->sizes), &MISC(ins,0),
1791 static struct triple **triple_targ(struct compile_state *state,
1792 struct triple *ins, struct triple **last)
1795 struct triple **ret, **vector;
1797 count = TRIPLE_TARG(ins->sizes);
1798 vector = &TARG(ins, 0);
1803 else if ((last >= vector) && (last < (vector + count - 1))) {
1806 else if ((last == (vector + count - 1)) &&
1807 TRIPLE_RHS(ins->sizes)) {
1815 static void verify_use(struct compile_state *state,
1816 struct triple *user, struct triple *used)
1819 size = TRIPLE_SIZE(user->sizes);
1820 for(i = 0; i < size; i++) {
1821 if (user->param[i] == used) {
1825 if (triple_is_branch(state, user)) {
1826 if (user->next == used) {
1831 internal_error(state, user, "%s(%p) does not use %s(%p)",
1832 tops(user->op), user, tops(used->op), used);
1836 static int find_rhs_use(struct compile_state *state,
1837 struct triple *user, struct triple *used)
1839 struct triple **param;
1841 verify_use(state, user, used);
1842 size = TRIPLE_RHS(user->sizes);
1843 param = &RHS(user, 0);
1844 for(i = 0; i < size; i++) {
1845 if (param[i] == used) {
1852 static void free_triple(struct compile_state *state, struct triple *ptr)
1855 size = sizeof(*ptr) - sizeof(ptr->param) +
1856 (sizeof(ptr->param[0])*TRIPLE_SIZE(ptr->sizes));
1857 ptr->prev->next = ptr->next;
1858 ptr->next->prev = ptr->prev;
1860 internal_error(state, ptr, "ptr->use != 0");
1862 put_occurance(ptr->occurance);
1863 memset(ptr, -1, size);
1867 static void release_triple(struct compile_state *state, struct triple *ptr)
1869 struct triple_set *set, *next;
1870 struct triple **expr;
1871 struct block *block;
1872 /* Make certain the we are not the first or last element of a block */
1873 block = block_of_triple(state, ptr);
1875 if ((block->last == ptr) && (block->first == ptr)) {
1876 block->last = block->first = 0;
1878 else if (block->last == ptr) {
1879 block->last = ptr->prev;
1881 else if (block->first == ptr) {
1882 block->first = ptr->next;
1885 /* Remove ptr from use chains where it is the user */
1886 expr = triple_rhs(state, ptr, 0);
1887 for(; expr; expr = triple_rhs(state, ptr, expr)) {
1889 unuse_triple(*expr, ptr);
1892 expr = triple_lhs(state, ptr, 0);
1893 for(; expr; expr = triple_lhs(state, ptr, expr)) {
1895 unuse_triple(*expr, ptr);
1898 expr = triple_misc(state, ptr, 0);
1899 for(; expr; expr = triple_misc(state, ptr, expr)) {
1901 unuse_triple(*expr, ptr);
1904 expr = triple_targ(state, ptr, 0);
1905 for(; expr; expr = triple_targ(state, ptr, expr)) {
1907 unuse_triple(*expr, ptr);
1910 /* Reomve ptr from use chains where it is used */
1911 for(set = ptr->use; set; set = next) {
1913 expr = triple_rhs(state, set->member, 0);
1914 for(; expr; expr = triple_rhs(state, set->member, expr)) {
1916 *expr = &zero_triple;
1919 expr = triple_lhs(state, set->member, 0);
1920 for(; expr; expr = triple_lhs(state, set->member, expr)) {
1922 *expr = &zero_triple;
1925 expr = triple_misc(state, set->member, 0);
1926 for(; expr; expr = triple_misc(state, set->member, expr)) {
1928 *expr = &zero_triple;
1931 expr = triple_targ(state, set->member, 0);
1932 for(; expr; expr = triple_targ(state, set->member, expr)) {
1934 *expr = &zero_triple;
1937 unuse_triple(ptr, set->member);
1939 free_triple(state, ptr);
1942 static void print_triple(struct compile_state *state, struct triple *ptr);
1944 #define TOK_UNKNOWN 0
1947 #define TOK_LBRACE 3
1948 #define TOK_RBRACE 4
1952 #define TOK_LBRACKET 8
1953 #define TOK_RBRACKET 9
1954 #define TOK_LPAREN 10
1955 #define TOK_RPAREN 11
1960 #define TOK_TIMESEQ 16
1961 #define TOK_DIVEQ 17
1962 #define TOK_MODEQ 18
1963 #define TOK_PLUSEQ 19
1964 #define TOK_MINUSEQ 20
1967 #define TOK_ANDEQ 23
1968 #define TOK_XOREQ 24
1971 #define TOK_NOTEQ 27
1972 #define TOK_QUEST 28
1973 #define TOK_LOGOR 29
1974 #define TOK_LOGAND 30
1978 #define TOK_LESSEQ 34
1979 #define TOK_MOREEQ 35
1983 #define TOK_MINUS 39
1986 #define TOK_PLUSPLUS 42
1987 #define TOK_MINUSMINUS 43
1989 #define TOK_ARROW 45
1991 #define TOK_TILDE 47
1992 #define TOK_LIT_STRING 48
1993 #define TOK_LIT_CHAR 49
1994 #define TOK_LIT_INT 50
1995 #define TOK_LIT_FLOAT 51
1996 #define TOK_MACRO 52
1997 #define TOK_CONCATENATE 53
1999 #define TOK_IDENT 54
2000 #define TOK_STRUCT_NAME 55
2001 #define TOK_ENUM_CONST 56
2002 #define TOK_TYPE_NAME 57
2005 #define TOK_BREAK 59
2008 #define TOK_CONST 62
2009 #define TOK_CONTINUE 63
2010 #define TOK_DEFAULT 64
2012 #define TOK_DOUBLE 66
2015 #define TOK_EXTERN 69
2016 #define TOK_FLOAT 70
2020 #define TOK_INLINE 74
2023 #define TOK_REGISTER 77
2024 #define TOK_RESTRICT 78
2025 #define TOK_RETURN 79
2026 #define TOK_SHORT 80
2027 #define TOK_SIGNED 81
2028 #define TOK_SIZEOF 82
2029 #define TOK_STATIC 83
2030 #define TOK_STRUCT 84
2031 #define TOK_SWITCH 85
2032 #define TOK_TYPEDEF 86
2033 #define TOK_UNION 87
2034 #define TOK_UNSIGNED 88
2036 #define TOK_VOLATILE 90
2037 #define TOK_WHILE 91
2039 #define TOK_ATTRIBUTE 93
2040 #define TOK_ALIGNOF 94
2041 #define TOK_FIRST_KEYWORD TOK_AUTO
2042 #define TOK_LAST_KEYWORD TOK_ALIGNOF
2044 #define TOK_DEFINE 100
2045 #define TOK_UNDEF 101
2046 #define TOK_INCLUDE 102
2047 #define TOK_LINE 103
2048 #define TOK_ERROR 104
2049 #define TOK_WARNING 105
2050 #define TOK_PRAGMA 106
2051 #define TOK_IFDEF 107
2052 #define TOK_IFNDEF 108
2053 #define TOK_ELIF 109
2054 #define TOK_ENDIF 110
2056 #define TOK_FIRST_MACRO TOK_DEFINE
2057 #define TOK_LAST_MACRO TOK_ENDIF
2061 static const char *tokens[] = {
2062 [TOK_UNKNOWN ] = "unknown",
2063 [TOK_SPACE ] = ":space:",
2065 [TOK_LBRACE ] = "{",
2066 [TOK_RBRACE ] = "}",
2070 [TOK_LBRACKET ] = "[",
2071 [TOK_RBRACKET ] = "]",
2072 [TOK_LPAREN ] = "(",
2073 [TOK_RPAREN ] = ")",
2075 [TOK_DOTS ] = "...",
2078 [TOK_TIMESEQ ] = "*=",
2079 [TOK_DIVEQ ] = "/=",
2080 [TOK_MODEQ ] = "%=",
2081 [TOK_PLUSEQ ] = "+=",
2082 [TOK_MINUSEQ ] = "-=",
2083 [TOK_SLEQ ] = "<<=",
2084 [TOK_SREQ ] = ">>=",
2085 [TOK_ANDEQ ] = "&=",
2086 [TOK_XOREQ ] = "^=",
2089 [TOK_NOTEQ ] = "!=",
2091 [TOK_LOGOR ] = "||",
2092 [TOK_LOGAND ] = "&&",
2096 [TOK_LESSEQ ] = "<=",
2097 [TOK_MOREEQ ] = ">=",
2104 [TOK_PLUSPLUS ] = "++",
2105 [TOK_MINUSMINUS ] = "--",
2107 [TOK_ARROW ] = "->",
2110 [TOK_LIT_STRING ] = ":string:",
2111 [TOK_IDENT ] = ":ident:",
2112 [TOK_TYPE_NAME ] = ":typename:",
2113 [TOK_LIT_CHAR ] = ":char:",
2114 [TOK_LIT_INT ] = ":integer:",
2115 [TOK_LIT_FLOAT ] = ":float:",
2117 [TOK_CONCATENATE ] = "##",
2119 [TOK_AUTO ] = "auto",
2120 [TOK_BREAK ] = "break",
2121 [TOK_CASE ] = "case",
2122 [TOK_CHAR ] = "char",
2123 [TOK_CONST ] = "const",
2124 [TOK_CONTINUE ] = "continue",
2125 [TOK_DEFAULT ] = "default",
2127 [TOK_DOUBLE ] = "double",
2128 [TOK_ELSE ] = "else",
2129 [TOK_ENUM ] = "enum",
2130 [TOK_EXTERN ] = "extern",
2131 [TOK_FLOAT ] = "float",
2133 [TOK_GOTO ] = "goto",
2135 [TOK_INLINE ] = "inline",
2137 [TOK_LONG ] = "long",
2138 [TOK_REGISTER ] = "register",
2139 [TOK_RESTRICT ] = "restrict",
2140 [TOK_RETURN ] = "return",
2141 [TOK_SHORT ] = "short",
2142 [TOK_SIGNED ] = "signed",
2143 [TOK_SIZEOF ] = "sizeof",
2144 [TOK_STATIC ] = "static",
2145 [TOK_STRUCT ] = "struct",
2146 [TOK_SWITCH ] = "switch",
2147 [TOK_TYPEDEF ] = "typedef",
2148 [TOK_UNION ] = "union",
2149 [TOK_UNSIGNED ] = "unsigned",
2150 [TOK_VOID ] = "void",
2151 [TOK_VOLATILE ] = "volatile",
2152 [TOK_WHILE ] = "while",
2154 [TOK_ATTRIBUTE ] = "__attribute__",
2155 [TOK_ALIGNOF ] = "__alignof__",
2157 [TOK_DEFINE ] = "define",
2158 [TOK_UNDEF ] = "undef",
2159 [TOK_INCLUDE ] = "include",
2160 [TOK_LINE ] = "line",
2161 [TOK_ERROR ] = "error",
2162 [TOK_WARNING ] = "warning",
2163 [TOK_PRAGMA ] = "pragma",
2164 [TOK_IFDEF ] = "ifdef",
2165 [TOK_IFNDEF ] = "ifndef",
2166 [TOK_ELIF ] = "elif",
2167 [TOK_ENDIF ] = "endif",
2172 static unsigned int hash(const char *str, int str_len)
2176 end = str + str_len;
2178 for(; str < end; str++) {
2179 hash = (hash *263) + *str;
2181 hash = hash & (HASH_TABLE_SIZE -1);
2185 static struct hash_entry *lookup(
2186 struct compile_state *state, const char *name, int name_len)
2188 struct hash_entry *entry;
2190 index = hash(name, name_len);
2191 entry = state->hash_table[index];
2193 ((entry->name_len != name_len) ||
2194 (memcmp(entry->name, name, name_len) != 0))) {
2195 entry = entry->next;
2199 /* Get a private copy of the name */
2200 new_name = xmalloc(name_len + 1, "hash_name");
2201 memcpy(new_name, name, name_len);
2202 new_name[name_len] = '\0';
2204 /* Create a new hash entry */
2205 entry = xcmalloc(sizeof(*entry), "hash_entry");
2206 entry->next = state->hash_table[index];
2207 entry->name = new_name;
2208 entry->name_len = name_len;
2210 /* Place the new entry in the hash table */
2211 state->hash_table[index] = entry;
2216 static void ident_to_keyword(struct compile_state *state, struct token *tk)
2218 struct hash_entry *entry;
2220 if (entry && ((entry->tok == TOK_TYPE_NAME) ||
2221 (entry->tok == TOK_ENUM_CONST) ||
2222 ((entry->tok >= TOK_FIRST_KEYWORD) &&
2223 (entry->tok <= TOK_LAST_KEYWORD)))) {
2224 tk->tok = entry->tok;
2228 static void ident_to_macro(struct compile_state *state, struct token *tk)
2230 struct hash_entry *entry;
2233 (entry->tok >= TOK_FIRST_MACRO) &&
2234 (entry->tok <= TOK_LAST_MACRO)) {
2235 tk->tok = entry->tok;
2239 static void hash_keyword(
2240 struct compile_state *state, const char *keyword, int tok)
2242 struct hash_entry *entry;
2243 entry = lookup(state, keyword, strlen(keyword));
2244 if (entry && entry->tok != TOK_UNKNOWN) {
2245 die("keyword %s already hashed", keyword);
2251 struct compile_state *state, struct hash_entry *ident,
2252 struct symbol **chain, struct triple *def, struct type *type)
2255 if (*chain && ((*chain)->scope_depth == state->scope_depth)) {
2256 error(state, 0, "%s already defined", ident->name);
2258 sym = xcmalloc(sizeof(*sym), "symbol");
2262 sym->scope_depth = state->scope_depth;
2267 static void label_symbol(struct compile_state *state,
2268 struct hash_entry *ident, struct triple *label)
2271 if (ident->sym_label) {
2272 error(state, 0, "label %s already defined", ident->name);
2274 sym = xcmalloc(sizeof(*sym), "label");
2277 sym->type = &void_type;
2278 sym->scope_depth = FUNCTION_SCOPE_DEPTH;
2280 ident->sym_label = sym;
2283 static void start_scope(struct compile_state *state)
2285 state->scope_depth++;
2288 static void end_scope_syms(struct symbol **chain, int depth)
2290 struct symbol *sym, *next;
2292 while(sym && (sym->scope_depth == depth)) {
2300 static void end_scope(struct compile_state *state)
2304 /* Walk through the hash table and remove all symbols
2305 * in the current scope.
2307 depth = state->scope_depth;
2308 for(i = 0; i < HASH_TABLE_SIZE; i++) {
2309 struct hash_entry *entry;
2310 entry = state->hash_table[i];
2312 end_scope_syms(&entry->sym_label, depth);
2313 end_scope_syms(&entry->sym_tag, depth);
2314 end_scope_syms(&entry->sym_ident, depth);
2315 entry = entry->next;
2318 state->scope_depth = depth - 1;
2321 static void register_keywords(struct compile_state *state)
2323 hash_keyword(state, "auto", TOK_AUTO);
2324 hash_keyword(state, "break", TOK_BREAK);
2325 hash_keyword(state, "case", TOK_CASE);
2326 hash_keyword(state, "char", TOK_CHAR);
2327 hash_keyword(state, "const", TOK_CONST);
2328 hash_keyword(state, "continue", TOK_CONTINUE);
2329 hash_keyword(state, "default", TOK_DEFAULT);
2330 hash_keyword(state, "do", TOK_DO);
2331 hash_keyword(state, "double", TOK_DOUBLE);
2332 hash_keyword(state, "else", TOK_ELSE);
2333 hash_keyword(state, "enum", TOK_ENUM);
2334 hash_keyword(state, "extern", TOK_EXTERN);
2335 hash_keyword(state, "float", TOK_FLOAT);
2336 hash_keyword(state, "for", TOK_FOR);
2337 hash_keyword(state, "goto", TOK_GOTO);
2338 hash_keyword(state, "if", TOK_IF);
2339 hash_keyword(state, "inline", TOK_INLINE);
2340 hash_keyword(state, "int", TOK_INT);
2341 hash_keyword(state, "long", TOK_LONG);
2342 hash_keyword(state, "register", TOK_REGISTER);
2343 hash_keyword(state, "restrict", TOK_RESTRICT);
2344 hash_keyword(state, "return", TOK_RETURN);
2345 hash_keyword(state, "short", TOK_SHORT);
2346 hash_keyword(state, "signed", TOK_SIGNED);
2347 hash_keyword(state, "sizeof", TOK_SIZEOF);
2348 hash_keyword(state, "static", TOK_STATIC);
2349 hash_keyword(state, "struct", TOK_STRUCT);
2350 hash_keyword(state, "switch", TOK_SWITCH);
2351 hash_keyword(state, "typedef", TOK_TYPEDEF);
2352 hash_keyword(state, "union", TOK_UNION);
2353 hash_keyword(state, "unsigned", TOK_UNSIGNED);
2354 hash_keyword(state, "void", TOK_VOID);
2355 hash_keyword(state, "volatile", TOK_VOLATILE);
2356 hash_keyword(state, "__volatile__", TOK_VOLATILE);
2357 hash_keyword(state, "while", TOK_WHILE);
2358 hash_keyword(state, "asm", TOK_ASM);
2359 hash_keyword(state, "__asm__", TOK_ASM);
2360 hash_keyword(state, "__attribute__", TOK_ATTRIBUTE);
2361 hash_keyword(state, "__alignof__", TOK_ALIGNOF);
2364 static void register_macro_keywords(struct compile_state *state)
2366 hash_keyword(state, "define", TOK_DEFINE);
2367 hash_keyword(state, "undef", TOK_UNDEF);
2368 hash_keyword(state, "include", TOK_INCLUDE);
2369 hash_keyword(state, "line", TOK_LINE);
2370 hash_keyword(state, "error", TOK_ERROR);
2371 hash_keyword(state, "warning", TOK_WARNING);
2372 hash_keyword(state, "pragma", TOK_PRAGMA);
2373 hash_keyword(state, "ifdef", TOK_IFDEF);
2374 hash_keyword(state, "ifndef", TOK_IFNDEF);
2375 hash_keyword(state, "elif", TOK_ELIF);
2376 hash_keyword(state, "endif", TOK_ENDIF);
2379 static int spacep(int c)
2395 static int digitp(int c)
2399 case '0': case '1': case '2': case '3': case '4':
2400 case '5': case '6': case '7': case '8': case '9':
2406 static int digval(int c)
2409 if ((c >= '0') && (c <= '9')) {
2415 static int hexdigitp(int c)
2419 case '0': case '1': case '2': case '3': case '4':
2420 case '5': case '6': case '7': case '8': case '9':
2421 case 'A': case 'B': case 'C': case 'D': case 'E': case 'F':
2422 case 'a': case 'b': case 'c': case 'd': case 'e': case 'f':
2428 static int hexdigval(int c)
2431 if ((c >= '0') && (c <= '9')) {
2434 else if ((c >= 'A') && (c <= 'F')) {
2435 val = 10 + (c - 'A');
2437 else if ((c >= 'a') && (c <= 'f')) {
2438 val = 10 + (c - 'a');
2443 static int octdigitp(int c)
2447 case '0': case '1': case '2': case '3':
2448 case '4': case '5': case '6': case '7':
2454 static int octdigval(int c)
2457 if ((c >= '0') && (c <= '7')) {
2463 static int letterp(int c)
2467 case 'a': case 'b': case 'c': case 'd': case 'e':
2468 case 'f': case 'g': case 'h': case 'i': case 'j':
2469 case 'k': case 'l': case 'm': case 'n': case 'o':
2470 case 'p': case 'q': case 'r': case 's': case 't':
2471 case 'u': case 'v': case 'w': case 'x': case 'y':
2473 case 'A': case 'B': case 'C': case 'D': case 'E':
2474 case 'F': case 'G': case 'H': case 'I': case 'J':
2475 case 'K': case 'L': case 'M': case 'N': case 'O':
2476 case 'P': case 'Q': case 'R': case 'S': case 'T':
2477 case 'U': case 'V': case 'W': case 'X': case 'Y':
2486 static int char_value(struct compile_state *state,
2487 const signed char **strp, const signed char *end)
2489 const signed char *str;
2493 if ((c == '\\') && (str < end)) {
2495 case 'n': c = '\n'; str++; break;
2496 case 't': c = '\t'; str++; break;
2497 case 'v': c = '\v'; str++; break;
2498 case 'b': c = '\b'; str++; break;
2499 case 'r': c = '\r'; str++; break;
2500 case 'f': c = '\f'; str++; break;
2501 case 'a': c = '\a'; str++; break;
2502 case '\\': c = '\\'; str++; break;
2503 case '?': c = '?'; str++; break;
2504 case '\'': c = '\''; str++; break;
2505 case '"': c = '"'; break;
2509 while((str < end) && hexdigitp(*str)) {
2511 c += hexdigval(*str);
2515 case '0': case '1': case '2': case '3':
2516 case '4': case '5': case '6': case '7':
2518 while((str < end) && octdigitp(*str)) {
2520 c += octdigval(*str);
2525 error(state, 0, "Invalid character constant");
2533 static char *after_digits(char *ptr, char *end)
2535 while((ptr < end) && digitp(*ptr)) {
2541 static char *after_octdigits(char *ptr, char *end)
2543 while((ptr < end) && octdigitp(*ptr)) {
2549 static char *after_hexdigits(char *ptr, char *end)
2551 while((ptr < end) && hexdigitp(*ptr)) {
2557 static void save_string(struct compile_state *state,
2558 struct token *tk, char *start, char *end, const char *id)
2562 /* Create a private copy of the string */
2563 str_len = end - start + 1;
2564 str = xmalloc(str_len + 1, id);
2565 memcpy(str, start, str_len);
2566 str[str_len] = '\0';
2568 /* Store the copy in the token */
2570 tk->str_len = str_len;
2572 static void next_token(struct compile_state *state, int index)
2574 struct file_state *file;
2582 tk = &state->token[index];
2585 token = tokp = file->pos;
2586 end = file->buf + file->size;
2593 if ((tokp + 1) < end) {
2597 if ((tokp + 2) < end) {
2601 if ((tokp + 3) < end) {
2609 else if (spacep(c)) {
2611 while ((tokp < end) && spacep(c)) {
2614 file->report_line++;
2615 file->line_start = tokp + 1;
2624 else if ((c == '/') && (c1 == '/')) {
2626 for(tokp += 2; tokp < end; tokp++) {
2630 file->report_line++;
2631 file->line_start = tokp +1;
2637 else if ((c == '/') && (c1 == '*')) {
2641 line_start = file->line_start;
2642 for(tokp += 2; (end - tokp) >= 2; tokp++) {
2646 line_start = tokp +1;
2648 else if ((c == '*') && (tokp[1] == '/')) {
2654 if (tok == TOK_UNKNOWN) {
2655 error(state, 0, "unterminated comment");
2657 file->report_line += line - file->line;
2659 file->line_start = line_start;
2661 /* string constants */
2662 else if ((c == '"') ||
2663 ((c == 'L') && (c1 == '"'))) {
2668 line_start = file->line_start;
2674 for(tokp += 1; tokp < end; tokp++) {
2678 line_start = tokp + 1;
2680 else if ((c == '\\') && (tokp +1 < end)) {
2683 else if (c == '"') {
2684 tok = TOK_LIT_STRING;
2688 if (tok == TOK_UNKNOWN) {
2689 error(state, 0, "unterminated string constant");
2691 if (line != file->line) {
2692 warning(state, 0, "multiline string constant");
2694 file->report_line += line - file->line;
2696 file->line_start = line_start;
2698 /* Save the string value */
2699 save_string(state, tk, token, tokp, "literal string");
2701 /* character constants */
2702 else if ((c == '\'') ||
2703 ((c == 'L') && (c1 == '\''))) {
2708 line_start = file->line_start;
2714 for(tokp += 1; tokp < end; tokp++) {
2718 line_start = tokp + 1;
2720 else if ((c == '\\') && (tokp +1 < end)) {
2723 else if (c == '\'') {
2728 if (tok == TOK_UNKNOWN) {
2729 error(state, 0, "unterminated character constant");
2731 if (line != file->line) {
2732 warning(state, 0, "multiline character constant");
2734 file->report_line += line - file->line;
2736 file->line_start = line_start;
2738 /* Save the character value */
2739 save_string(state, tk, token, tokp, "literal character");
2741 /* integer and floating constants
2747 * Floating constants
2748 * {digits}.{digits}[Ee][+-]?{digits}
2750 * {digits}[Ee][+-]?{digits}
2751 * .{digits}[Ee][+-]?{digits}
2755 else if (digitp(c) || ((c == '.') && (digitp(c1)))) {
2760 next = after_digits(tokp, end);
2765 if (next[0] == '.') {
2766 new = after_digits(next, end);
2767 is_float = (new != next);
2770 if ((next[0] == 'e') || (next[0] == 'E')) {
2771 if (((next + 1) < end) &&
2772 ((next[1] == '+') || (next[1] == '-'))) {
2775 new = after_digits(next, end);
2776 is_float = (new != next);
2780 tok = TOK_LIT_FLOAT;
2781 if ((next < end) && (
2790 if (!is_float && digitp(c)) {
2792 if ((c == '0') && ((c1 == 'x') || (c1 == 'X'))) {
2793 next = after_hexdigits(tokp + 2, end);
2795 else if (c == '0') {
2796 next = after_octdigits(tokp, end);
2799 next = after_digits(tokp, end);
2801 /* crazy integer suffixes */
2803 ((next[0] == 'u') || (next[0] == 'U'))) {
2806 ((next[0] == 'l') || (next[0] == 'L'))) {
2810 else if ((next < end) &&
2811 ((next[0] == 'l') || (next[0] == 'L'))) {
2814 ((next[0] == 'u') || (next[0] == 'U'))) {
2821 /* Save the integer/floating point value */
2822 save_string(state, tk, token, tokp, "literal number");
2825 else if (letterp(c)) {
2827 for(tokp += 1; tokp < end; tokp++) {
2829 if (!letterp(c) && !digitp(c)) {
2834 tk->ident = lookup(state, token, tokp +1 - token);
2836 /* C99 alternate macro characters */
2837 else if ((c == '%') && (c1 == ':') && (c2 == '%') && (c3 == ':')) {
2839 tok = TOK_CONCATENATE;
2841 else if ((c == '.') && (c1 == '.') && (c2 == '.')) { tokp += 2; tok = TOK_DOTS; }
2842 else if ((c == '<') && (c1 == '<') && (c2 == '=')) { tokp += 2; tok = TOK_SLEQ; }
2843 else if ((c == '>') && (c1 == '>') && (c2 == '=')) { tokp += 2; tok = TOK_SREQ; }
2844 else if ((c == '*') && (c1 == '=')) { tokp += 1; tok = TOK_TIMESEQ; }
2845 else if ((c == '/') && (c1 == '=')) { tokp += 1; tok = TOK_DIVEQ; }
2846 else if ((c == '%') && (c1 == '=')) { tokp += 1; tok = TOK_MODEQ; }
2847 else if ((c == '+') && (c1 == '=')) { tokp += 1; tok = TOK_PLUSEQ; }
2848 else if ((c == '-') && (c1 == '=')) { tokp += 1; tok = TOK_MINUSEQ; }
2849 else if ((c == '&') && (c1 == '=')) { tokp += 1; tok = TOK_ANDEQ; }
2850 else if ((c == '^') && (c1 == '=')) { tokp += 1; tok = TOK_XOREQ; }
2851 else if ((c == '|') && (c1 == '=')) { tokp += 1; tok = TOK_OREQ; }
2852 else if ((c == '=') && (c1 == '=')) { tokp += 1; tok = TOK_EQEQ; }
2853 else if ((c == '!') && (c1 == '=')) { tokp += 1; tok = TOK_NOTEQ; }
2854 else if ((c == '|') && (c1 == '|')) { tokp += 1; tok = TOK_LOGOR; }
2855 else if ((c == '&') && (c1 == '&')) { tokp += 1; tok = TOK_LOGAND; }
2856 else if ((c == '<') && (c1 == '=')) { tokp += 1; tok = TOK_LESSEQ; }
2857 else if ((c == '>') && (c1 == '=')) { tokp += 1; tok = TOK_MOREEQ; }
2858 else if ((c == '<') && (c1 == '<')) { tokp += 1; tok = TOK_SL; }
2859 else if ((c == '>') && (c1 == '>')) { tokp += 1; tok = TOK_SR; }
2860 else if ((c == '+') && (c1 == '+')) { tokp += 1; tok = TOK_PLUSPLUS; }
2861 else if ((c == '-') && (c1 == '-')) { tokp += 1; tok = TOK_MINUSMINUS; }
2862 else if ((c == '-') && (c1 == '>')) { tokp += 1; tok = TOK_ARROW; }
2863 else if ((c == '<') && (c1 == ':')) { tokp += 1; tok = TOK_LBRACKET; }
2864 else if ((c == ':') && (c1 == '>')) { tokp += 1; tok = TOK_RBRACKET; }
2865 else if ((c == '<') && (c1 == '%')) { tokp += 1; tok = TOK_LBRACE; }
2866 else if ((c == '%') && (c1 == '>')) { tokp += 1; tok = TOK_RBRACE; }
2867 else if ((c == '%') && (c1 == ':')) { tokp += 1; tok = TOK_MACRO; }
2868 else if ((c == '#') && (c1 == '#')) { tokp += 1; tok = TOK_CONCATENATE; }
2869 else if (c == ';') { tok = TOK_SEMI; }
2870 else if (c == '{') { tok = TOK_LBRACE; }
2871 else if (c == '}') { tok = TOK_RBRACE; }
2872 else if (c == ',') { tok = TOK_COMMA; }
2873 else if (c == '=') { tok = TOK_EQ; }
2874 else if (c == ':') { tok = TOK_COLON; }
2875 else if (c == '[') { tok = TOK_LBRACKET; }
2876 else if (c == ']') { tok = TOK_RBRACKET; }
2877 else if (c == '(') { tok = TOK_LPAREN; }
2878 else if (c == ')') { tok = TOK_RPAREN; }
2879 else if (c == '*') { tok = TOK_STAR; }
2880 else if (c == '>') { tok = TOK_MORE; }
2881 else if (c == '<') { tok = TOK_LESS; }
2882 else if (c == '?') { tok = TOK_QUEST; }
2883 else if (c == '|') { tok = TOK_OR; }
2884 else if (c == '&') { tok = TOK_AND; }
2885 else if (c == '^') { tok = TOK_XOR; }
2886 else if (c == '+') { tok = TOK_PLUS; }
2887 else if (c == '-') { tok = TOK_MINUS; }
2888 else if (c == '/') { tok = TOK_DIV; }
2889 else if (c == '%') { tok = TOK_MOD; }
2890 else if (c == '!') { tok = TOK_BANG; }
2891 else if (c == '.') { tok = TOK_DOT; }
2892 else if (c == '~') { tok = TOK_TILDE; }
2893 else if (c == '#') { tok = TOK_MACRO; }
2894 if (tok == TOK_MACRO) {
2895 /* Only match preprocessor directives at the start of a line */
2897 for(ptr = file->line_start; spacep(*ptr); ptr++)
2903 if (tok == TOK_UNKNOWN) {
2904 error(state, 0, "unknown token");
2907 file->pos = tokp + 1;
2909 if (tok == TOK_IDENT) {
2910 ident_to_keyword(state, tk);
2912 /* Don't return space tokens. */
2913 if (tok == TOK_SPACE) {
2918 static void compile_macro(struct compile_state *state, struct token *tk)
2920 struct file_state *file;
2921 struct hash_entry *ident;
2923 file = xmalloc(sizeof(*file), "file_state");
2924 file->basename = xstrdup(tk->ident->name);
2925 file->dirname = xstrdup("");
2926 file->size = ident->sym_define->buf_len;
2927 file->buf = xmalloc(file->size +2, file->basename);
2928 memcpy(file->buf, ident->sym_define->buf, file->size);
2929 file->buf[file->size] = '\n';
2930 file->buf[file->size + 1] = '\0';
2931 file->pos = file->buf;
2932 file->line_start = file->pos;
2934 file->report_line = 1;
2935 file->report_name = file->basename;
2936 file->report_dir = file->dirname;
2937 file->prev = state->file;
2942 static int mpeek(struct compile_state *state, int index)
2946 tk = &state->token[index + 1];
2947 if (tk->tok == -1) {
2948 next_token(state, index + 1);
2952 if ((tk->tok == TOK_EOF) &&
2953 (state->file != state->macro_file) &&
2954 (state->file->prev)) {
2955 struct file_state *file = state->file;
2956 state->file = file->prev;
2957 /* file->basename is used keep it */
2958 if (file->report_dir != file->dirname) {
2959 xfree(file->report_dir);
2961 xfree(file->dirname);
2964 next_token(state, index + 1);
2967 else if (tk->ident && tk->ident->sym_define) {
2968 compile_macro(state, tk);
2969 next_token(state, index + 1);
2973 /* Don't show the token on the next line */
2974 if (state->macro_line < state->macro_file->line) {
2977 return state->token[index +1].tok;
2980 static void meat(struct compile_state *state, int index, int tok)
2984 next_tok = mpeek(state, index);
2985 if (next_tok != tok) {
2986 const char *name1, *name2;
2987 name1 = tokens[next_tok];
2989 if (next_tok == TOK_IDENT) {
2990 name2 = state->token[index + 1].ident->name;
2992 error(state, 0, "found %s %s expected %s",
2993 name1, name2, tokens[tok]);
2995 /* Free the old token value */
2996 if (state->token[index].str_len) {
2997 memset((void *)(state->token[index].val.str), -1,
2998 state->token[index].str_len);
2999 xfree(state->token[index].val.str);
3001 for(i = index; i < sizeof(state->token)/sizeof(state->token[0]) - 1; i++) {
3002 state->token[i] = state->token[i + 1];
3004 memset(&state->token[i], 0, sizeof(state->token[i]));
3005 state->token[i].tok = -1;
3008 static long_t mcexpr(struct compile_state *state, int index);
3010 static long_t mprimary_expr(struct compile_state *state, int index)
3014 tok = mpeek(state, index);
3015 while(state->token[index + 1].ident &&
3016 state->token[index + 1].ident->sym_define) {
3017 meat(state, index, tok);
3018 compile_macro(state, &state->token[index]);
3019 tok = mpeek(state, index);
3023 meat(state, index, TOK_LPAREN);
3024 val = mcexpr(state, index);
3025 meat(state, index, TOK_RPAREN);
3031 meat(state, index, TOK_LIT_INT);
3033 lval = strtol(state->token[index].val.str, &end, 0);
3034 if ((lval > LONG_T_MAX) || (lval < LONG_T_MIN) ||
3035 (((lval == LONG_MIN) || (lval == LONG_MAX)) &&
3036 (errno == ERANGE))) {
3037 error(state, 0, "Integer constant to large");
3043 meat(state, index, TOK_LIT_INT);
3048 static long_t munary_expr(struct compile_state *state, int index)
3051 switch(mpeek(state, index)) {
3053 meat(state, index, TOK_PLUS);
3054 val = munary_expr(state, index);
3058 meat(state, index, TOK_MINUS);
3059 val = munary_expr(state, index);
3063 meat(state, index, TOK_BANG);
3064 val = munary_expr(state, index);
3068 meat(state, index, TOK_BANG);
3069 val = munary_expr(state, index);
3073 val = mprimary_expr(state, index);
3079 static long_t mmul_expr(struct compile_state *state, int index)
3083 val = munary_expr(state, index);
3087 switch(mpeek(state, index)) {
3089 meat(state, index, TOK_STAR);
3090 right = munary_expr(state, index);
3094 meat(state, index, TOK_DIV);
3095 right = munary_expr(state, index);
3099 meat(state, index, TOK_MOD);
3100 right = munary_expr(state, index);
3112 static long_t madd_expr(struct compile_state *state, int index)
3116 val = mmul_expr(state, index);
3120 switch(mpeek(state, index)) {
3122 meat(state, index, TOK_PLUS);
3123 right = mmul_expr(state, index);
3127 meat(state, index, TOK_MINUS);
3128 right = mmul_expr(state, index);
3140 static long_t mshift_expr(struct compile_state *state, int index)
3144 val = madd_expr(state, index);
3148 switch(mpeek(state, index)) {
3150 meat(state, index, TOK_SL);
3151 right = madd_expr(state, index);
3155 meat(state, index, TOK_SR);
3156 right = madd_expr(state, index);
3168 static long_t mrel_expr(struct compile_state *state, int index)
3172 val = mshift_expr(state, index);
3176 switch(mpeek(state, index)) {
3178 meat(state, index, TOK_LESS);
3179 right = mshift_expr(state, index);
3183 meat(state, index, TOK_MORE);
3184 right = mshift_expr(state, index);
3188 meat(state, index, TOK_LESSEQ);
3189 right = mshift_expr(state, index);
3193 meat(state, index, TOK_MOREEQ);
3194 right = mshift_expr(state, index);
3205 static long_t meq_expr(struct compile_state *state, int index)
3209 val = mrel_expr(state, index);
3213 switch(mpeek(state, index)) {
3215 meat(state, index, TOK_EQEQ);
3216 right = mrel_expr(state, index);
3220 meat(state, index, TOK_NOTEQ);
3221 right = mrel_expr(state, index);
3232 static long_t mand_expr(struct compile_state *state, int index)
3235 val = meq_expr(state, index);
3236 if (mpeek(state, index) == TOK_AND) {
3238 meat(state, index, TOK_AND);
3239 right = meq_expr(state, index);
3245 static long_t mxor_expr(struct compile_state *state, int index)
3248 val = mand_expr(state, index);
3249 if (mpeek(state, index) == TOK_XOR) {
3251 meat(state, index, TOK_XOR);
3252 right = mand_expr(state, index);
3258 static long_t mor_expr(struct compile_state *state, int index)
3261 val = mxor_expr(state, index);
3262 if (mpeek(state, index) == TOK_OR) {
3264 meat(state, index, TOK_OR);
3265 right = mxor_expr(state, index);
3271 static long_t mland_expr(struct compile_state *state, int index)
3274 val = mor_expr(state, index);
3275 if (mpeek(state, index) == TOK_LOGAND) {
3277 meat(state, index, TOK_LOGAND);
3278 right = mor_expr(state, index);
3283 static long_t mlor_expr(struct compile_state *state, int index)
3286 val = mland_expr(state, index);
3287 if (mpeek(state, index) == TOK_LOGOR) {
3289 meat(state, index, TOK_LOGOR);
3290 right = mland_expr(state, index);
3296 static long_t mcexpr(struct compile_state *state, int index)
3298 return mlor_expr(state, index);
3300 static void preprocess(struct compile_state *state, int index)
3302 /* Doing much more with the preprocessor would require
3303 * a parser and a major restructuring.
3304 * Postpone that for later.
3306 struct file_state *file;
3312 tk = &state->token[index];
3313 state->macro_line = line = file->line;
3314 state->macro_file = file;
3316 next_token(state, index);
3317 ident_to_macro(state, tk);
3318 if (tk->tok == TOK_IDENT) {
3319 error(state, 0, "undefined preprocessing directive `%s'",
3326 override_line = strtoul(tk->val.str, 0, 10);
3327 next_token(state, index);
3328 /* I have a cpp line marker parse it */
3329 if (tk->tok == TOK_LIT_STRING) {
3330 const char *token, *base;
3332 int name_len, dir_len;
3333 name = xmalloc(tk->str_len, "report_name");
3334 token = tk->val.str + 1;
3335 base = strrchr(token, '/');
3336 name_len = tk->str_len -2;
3338 dir_len = base - token;
3340 name_len -= base - token;
3345 memcpy(name, base, name_len);
3346 name[name_len] = '\0';
3347 dir = xmalloc(dir_len + 1, "report_dir");
3348 memcpy(dir, token, dir_len);
3349 dir[dir_len] = '\0';
3350 file->report_line = override_line - 1;
3351 file->report_name = name;
3352 file->report_dir = dir;
3357 meat(state, index, TOK_LINE);
3358 meat(state, index, TOK_LIT_INT);
3359 file->report_line = strtoul(tk->val.str, 0, 10) -1;
3360 if (mpeek(state, index) == TOK_LIT_STRING) {
3361 const char *token, *base;
3363 int name_len, dir_len;
3364 meat(state, index, TOK_LIT_STRING);
3365 name = xmalloc(tk->str_len, "report_name");
3366 token = tk->val.str + 1;
3367 name_len = tk->str_len - 2;
3369 dir_len = base - token;
3371 name_len -= base - token;
3376 memcpy(name, base, name_len);
3377 name[name_len] = '\0';
3378 dir = xmalloc(dir_len + 1, "report_dir");
3379 memcpy(dir, token, dir_len);
3380 dir[dir_len] = '\0';
3381 file->report_name = name;
3382 file->report_dir = dir;
3387 if (state->if_value < 0) {
3390 warning(state, 0, "Ignoring preprocessor directive: %s",
3394 error(state, 0, "#elif not supported");
3395 #warning "FIXME multiple #elif and #else in an #if do not work properly"
3396 if (state->if_depth == 0) {
3397 error(state, 0, "#elif without #if");
3399 /* If the #if was taken the #elif just disables the following code */
3400 if (state->if_value >= 0) {
3401 state->if_value = - state->if_value;
3403 /* If the previous #if was not taken see if the #elif enables the
3406 else if ((state->if_value < 0) &&
3407 (state->if_depth == - state->if_value))
3409 if (mcexpr(state, index) != 0) {
3410 state->if_value = state->if_depth;
3413 state->if_value = - state->if_depth;
3419 if (state->if_value < 0) {
3422 if (mcexpr(state, index) != 0) {
3423 state->if_value = state->if_depth;
3426 state->if_value = - state->if_depth;
3431 if (state->if_value < 0) {
3434 next_token(state, index);
3435 if ((line != file->line) || (tk->tok != TOK_IDENT)) {
3436 error(state, 0, "Invalid macro name");
3438 if (tk->ident->sym_define == 0) {
3439 state->if_value = state->if_depth;
3442 state->if_value = - state->if_depth;
3447 if (state->if_value < 0) {
3450 next_token(state, index);
3451 if ((line != file->line) || (tk->tok != TOK_IDENT)) {
3452 error(state, 0, "Invalid macro name");
3454 if (tk->ident->sym_define != 0) {
3455 state->if_value = state->if_depth;
3458 state->if_value = - state->if_depth;
3462 if (state->if_depth == 0) {
3463 error(state, 0, "#else without #if");
3465 if ((state->if_value >= 0) ||
3466 ((state->if_value < 0) &&
3467 (state->if_depth == -state->if_value)))
3469 state->if_value = - state->if_value;
3473 if (state->if_depth == 0) {
3474 error(state, 0, "#endif without #if");
3476 if ((state->if_value >= 0) ||
3477 ((state->if_value < 0) &&
3478 (state->if_depth == -state->if_value)))
3480 state->if_value = state->if_depth - 1;
3486 struct hash_entry *ident;
3487 struct macro *macro;
3490 if (state->if_value < 0) /* quit early when #if'd out */
3493 meat(state, index, TOK_IDENT);
3497 if (*file->pos == '(') {
3498 #warning "FIXME macros with arguments not supported"
3499 error(state, 0, "Macros with arguments not supported");
3502 /* Find the end of the line to get an estimate of
3503 * the macro's length.
3505 for(ptr = file->pos; *ptr != '\n'; ptr++)
3508 if (ident->sym_define != 0) {
3509 error(state, 0, "macro %s already defined\n", ident->name);
3511 macro = xmalloc(sizeof(*macro), "macro");
3512 macro->ident = ident;
3513 macro->buf_len = ptr - file->pos +1;
3514 macro->buf = xmalloc(macro->buf_len +2, "macro buf");
3516 memcpy(macro->buf, file->pos, macro->buf_len);
3517 macro->buf[macro->buf_len] = '\n';
3518 macro->buf[macro->buf_len +1] = '\0';
3520 ident->sym_define = macro;
3527 /* Find the end of the line */
3528 for(end = file->pos; *end != '\n'; end++)
3530 len = (end - file->pos);
3531 if (state->if_value >= 0) {
3532 error(state, 0, "%*.*s", len, len, file->pos);
3541 /* Find the end of the line */
3542 for(end = file->pos; *end != '\n'; end++)
3544 len = (end - file->pos);
3545 if (state->if_value >= 0) {
3546 warning(state, 0, "%*.*s", len, len, file->pos);
3558 next_token(state, index);
3559 if (tk->tok == TOK_LIT_STRING) {
3562 name = xmalloc(tk->str_len, "include");
3563 token = tk->val.str +1;
3564 name_len = tk->str_len -2;
3565 if (*token == '"') {
3569 memcpy(name, token, name_len);
3570 name[name_len] = '\0';
3573 else if (tk->tok == TOK_LESS) {
3576 for(end = start; *end != '\n'; end++) {
3582 error(state, 0, "Unterminated included directive");
3584 name = xmalloc(end - start + 1, "include");
3585 memcpy(name, start, end - start);
3586 name[end - start] = '\0';
3591 error(state, 0, "Invalid include directive");
3593 /* Error if there are any characters after the include */
3594 for(ptr = file->pos; *ptr != '\n'; ptr++) {
3601 error(state, 0, "garbage after include directive");
3604 if (state->if_value >= 0) {
3605 compile_file(state, name, local);
3608 next_token(state, index);
3612 /* Ignore # without a following ident */
3613 if (tk->tok == TOK_IDENT) {
3614 error(state, 0, "Invalid preprocessor directive: %s",
3619 /* Consume the rest of the macro line */
3621 tok = mpeek(state, index);
3622 meat(state, index, tok);
3623 } while(tok != TOK_EOF);
3627 static void token(struct compile_state *state, int index)
3629 struct file_state *file;
3633 tk = &state->token[index];
3634 next_token(state, index);
3638 if (tk->tok == TOK_EOF && file->prev) {
3639 state->file = file->prev;
3640 /* file->basename is used keep it */
3641 xfree(file->dirname);
3644 next_token(state, index);
3647 else if (tk->tok == TOK_MACRO) {
3648 preprocess(state, index);
3651 else if (tk->ident && tk->ident->sym_define) {
3652 compile_macro(state, tk);
3653 next_token(state, index);
3656 else if (state->if_value < 0) {
3657 next_token(state, index);
3663 static int peek(struct compile_state *state)
3665 if (state->token[1].tok == -1) {
3668 return state->token[1].tok;
3671 static int peek2(struct compile_state *state)
3673 if (state->token[1].tok == -1) {
3676 if (state->token[2].tok == -1) {
3679 return state->token[2].tok;
3682 static void eat(struct compile_state *state, int tok)
3686 next_tok = peek(state);
3687 if (next_tok != tok) {
3688 const char *name1, *name2;
3689 name1 = tokens[next_tok];
3691 if (next_tok == TOK_IDENT) {
3692 name2 = state->token[1].ident->name;
3694 error(state, 0, "\tfound %s %s expected %s",
3695 name1, name2 ,tokens[tok]);
3697 /* Free the old token value */
3698 if (state->token[0].str_len) {
3699 xfree((void *)(state->token[0].val.str));
3701 for(i = 0; i < sizeof(state->token)/sizeof(state->token[0]) - 1; i++) {
3702 state->token[i] = state->token[i + 1];
3704 memset(&state->token[i], 0, sizeof(state->token[i]));
3705 state->token[i].tok = -1;
3708 #warning "FIXME do not hardcode the include paths"
3709 static char *include_paths[] = {
3710 "/home/eric/projects/linuxbios/checkin/solo/freebios2/src/include",
3711 "/home/eric/projects/linuxbios/checkin/solo/freebios2/src/arch/i386/include",
3712 "/home/eric/projects/linuxbios/checkin/solo/freebios2/src",
3716 static void compile_file(struct compile_state *state, const char *filename, int local)
3719 const char *subdir, *base;
3721 struct file_state *file;
3723 file = xmalloc(sizeof(*file), "file_state");
3725 base = strrchr(filename, '/');
3728 subdir_len = base - filename;
3735 basename = xmalloc(strlen(base) +1, "basename");
3736 strcpy(basename, base);
3737 file->basename = basename;
3739 if (getcwd(cwd, sizeof(cwd)) == 0) {
3740 die("cwd buffer to small");
3743 if (subdir[0] == '/') {
3744 file->dirname = xmalloc(subdir_len + 1, "dirname");
3745 memcpy(file->dirname, subdir, subdir_len);
3746 file->dirname[subdir_len] = '\0';
3752 /* Find the appropriate directory... */
3754 if (!state->file && exists(cwd, filename)) {
3757 if (local && state->file && exists(state->file->dirname, filename)) {
3758 dir = state->file->dirname;
3760 for(path = include_paths; !dir && *path; path++) {
3761 if (exists(*path, filename)) {
3766 error(state, 0, "Cannot find `%s'\n", filename);
3768 dirlen = strlen(dir);
3769 file->dirname = xmalloc(dirlen + 1 + subdir_len + 1, "dirname");
3770 memcpy(file->dirname, dir, dirlen);
3771 file->dirname[dirlen] = '/';
3772 memcpy(file->dirname + dirlen + 1, subdir, subdir_len);
3773 file->dirname[dirlen + 1 + subdir_len] = '\0';
3775 file->buf = slurp_file(file->dirname, file->basename, &file->size);
3778 file->pos = file->buf;
3779 file->line_start = file->pos;
3782 file->report_line = 1;
3783 file->report_name = file->basename;
3784 file->report_dir = file->dirname;
3786 file->prev = state->file;
3789 process_trigraphs(state);
3790 splice_lines(state);
3793 /* Type helper functions */
3795 static struct type *new_type(
3796 unsigned int type, struct type *left, struct type *right)
3798 struct type *result;
3799 result = xmalloc(sizeof(*result), "type");
3800 result->type = type;
3801 result->left = left;
3802 result->right = right;
3803 result->field_ident = 0;
3804 result->type_ident = 0;
3808 static struct type *clone_type(unsigned int specifiers, struct type *old)
3810 struct type *result;
3811 result = xmalloc(sizeof(*result), "type");
3812 memcpy(result, old, sizeof(*result));
3813 result->type &= TYPE_MASK;
3814 result->type |= specifiers;
3818 #define SIZEOF_SHORT 2
3819 #define SIZEOF_INT 4
3820 #define SIZEOF_LONG (sizeof(long_t))
3822 #define ALIGNOF_SHORT 2
3823 #define ALIGNOF_INT 4
3824 #define ALIGNOF_LONG (sizeof(long_t))
3826 #define MASK_UCHAR(X) ((X) & ((ulong_t)0xff))
3827 #define MASK_USHORT(X) ((X) & (((ulong_t)1 << (SIZEOF_SHORT*8)) - 1))
3828 static inline ulong_t mask_uint(ulong_t x)
3830 if (SIZEOF_INT < SIZEOF_LONG) {
3831 ulong_t mask = (((ulong_t)1) << ((ulong_t)(SIZEOF_INT*8))) -1;
3836 #define MASK_UINT(X) (mask_uint(X))
3837 #define MASK_ULONG(X) (X)
3839 static struct type void_type = { .type = TYPE_VOID };
3840 static struct type char_type = { .type = TYPE_CHAR };
3841 static struct type uchar_type = { .type = TYPE_UCHAR };
3842 static struct type short_type = { .type = TYPE_SHORT };
3843 static struct type ushort_type = { .type = TYPE_USHORT };
3844 static struct type int_type = { .type = TYPE_INT };
3845 static struct type uint_type = { .type = TYPE_UINT };
3846 static struct type long_type = { .type = TYPE_LONG };
3847 static struct type ulong_type = { .type = TYPE_ULONG };
3849 static struct type void_func = {
3850 .type = TYPE_FUNCTION,
3852 .right = &void_type,
3855 static struct triple *variable(struct compile_state *state, struct type *type)
3857 struct triple *result;
3858 if ((type->type & STOR_MASK) != STOR_PERM) {
3859 if ((type->type & TYPE_MASK) != TYPE_STRUCT) {
3860 result = triple(state, OP_ADECL, type, 0, 0);
3863 struct triple **vector;
3865 result = new_triple(state, OP_VAL_VEC, type, -1, -1);
3866 vector = &result->param[0];
3870 while((field->type & TYPE_MASK) == TYPE_PRODUCT) {
3871 vector[index] = variable(state, field->left);
3872 field = field->right;
3875 vector[index] = variable(state, field);
3879 result = triple(state, OP_SDECL, type, 0, 0);
3884 static void stor_of(FILE *fp, struct type *type)
3886 switch(type->type & STOR_MASK) {
3888 fprintf(fp, "auto ");
3891 fprintf(fp, "static ");
3894 fprintf(fp, "extern ");
3897 fprintf(fp, "register ");
3900 fprintf(fp, "typedef ");
3903 fprintf(fp, "inline ");
3907 static void qual_of(FILE *fp, struct type *type)
3909 if (type->type & QUAL_CONST) {
3910 fprintf(fp, " const");
3912 if (type->type & QUAL_VOLATILE) {
3913 fprintf(fp, " volatile");
3915 if (type->type & QUAL_RESTRICT) {
3916 fprintf(fp, " restrict");
3920 static void name_of(FILE *fp, struct type *type)
3923 switch(type->type & TYPE_MASK) {
3925 fprintf(fp, "void");
3929 fprintf(fp, "signed char");
3933 fprintf(fp, "unsigned char");
3937 fprintf(fp, "signed short");
3941 fprintf(fp, "unsigned short");
3945 fprintf(fp, "signed int");
3949 fprintf(fp, "unsigned int");
3953 fprintf(fp, "signed long");
3957 fprintf(fp, "unsigned long");
3961 name_of(fp, type->left);
3967 name_of(fp, type->left);
3969 name_of(fp, type->right);
3972 fprintf(fp, "enum %s", type->type_ident->name);
3976 fprintf(fp, "struct %s", type->type_ident->name);
3981 name_of(fp, type->left);
3982 fprintf(fp, " (*)(");
3983 name_of(fp, type->right);
3988 name_of(fp, type->left);
3989 fprintf(fp, " [%ld]", (long)(type->elements));
3992 fprintf(fp, "????: %x", type->type & TYPE_MASK);
3997 static size_t align_of(struct compile_state *state, struct type *type)
4001 switch(type->type & TYPE_MASK) {
4011 align = ALIGNOF_SHORT;
4016 align = ALIGNOF_INT;
4021 align = ALIGNOF_LONG;
4026 size_t left_align, right_align;
4027 left_align = align_of(state, type->left);
4028 right_align = align_of(state, type->right);
4029 align = (left_align >= right_align) ? left_align : right_align;
4033 align = align_of(state, type->left);
4036 align = align_of(state, type->left);
4039 error(state, 0, "alignof not yet defined for type\n");
4045 static size_t needed_padding(size_t offset, size_t align)
4049 if (offset % align) {
4050 padding = align - (offset % align);
4054 static size_t size_of(struct compile_state *state, struct type *type)
4058 switch(type->type & TYPE_MASK) {
4068 size = SIZEOF_SHORT;
4084 while((type->type & TYPE_MASK) == TYPE_PRODUCT) {
4085 align = align_of(state, type->left);
4086 pad = needed_padding(size, align);
4087 size = size + pad + size_of(state, type->left);
4090 align = align_of(state, type);
4091 pad = needed_padding(size, align);
4092 size = size + pad + size_of(state, type);
4097 size_t size_left, size_right;
4098 size_left = size_of(state, type->left);
4099 size_right = size_of(state, type->right);
4100 size = (size_left >= size_right)? size_left : size_right;
4104 if (type->elements == ELEMENT_COUNT_UNSPECIFIED) {
4105 internal_error(state, 0, "Invalid array type");
4107 size = size_of(state, type->left) * type->elements;
4113 size = size_of(state, type->left);
4114 /* Pad structures so their size is a multiples of their alignment */
4115 align = align_of(state, type);
4116 pad = needed_padding(size, align);
4121 internal_error(state, 0, "sizeof not yet defined for type\n");
4127 static size_t field_offset(struct compile_state *state,
4128 struct type *type, struct hash_entry *field)
4130 struct type *member;
4132 if ((type->type & TYPE_MASK) != TYPE_STRUCT) {
4133 internal_error(state, 0, "field_offset only works on structures");
4136 member = type->left;
4137 while((member->type & TYPE_MASK) == TYPE_PRODUCT) {
4138 align = align_of(state, member->left);
4139 size += needed_padding(size, align);
4140 if (member->left->field_ident == field) {
4141 member = member->left;
4144 size += size_of(state, member->left);
4145 member = member->right;
4147 align = align_of(state, member);
4148 size += needed_padding(size, align);
4149 if (member->field_ident != field) {
4150 error(state, 0, "member %s not present", field->name);
4155 static struct type *field_type(struct compile_state *state,
4156 struct type *type, struct hash_entry *field)
4158 struct type *member;
4159 if ((type->type & TYPE_MASK) != TYPE_STRUCT) {
4160 internal_error(state, 0, "field_type only works on structures");
4162 member = type->left;
4163 while((member->type & TYPE_MASK) == TYPE_PRODUCT) {
4164 if (member->left->field_ident == field) {
4165 member = member->left;
4168 member = member->right;
4170 if (member->field_ident != field) {
4171 error(state, 0, "member %s not present", field->name);
4176 static struct type *next_field(struct compile_state *state,
4177 struct type *type, struct type *prev_member)
4179 struct type *member;
4180 if ((type->type & TYPE_MASK) != TYPE_STRUCT) {
4181 internal_error(state, 0, "next_field only works on structures");
4183 member = type->left;
4184 while((member->type & TYPE_MASK) == TYPE_PRODUCT) {
4186 member = member->left;
4189 if (member->left == prev_member) {
4192 member = member->right;
4194 if (member == prev_member) {
4198 internal_error(state, 0, "prev_member %s not present",
4199 prev_member->field_ident->name);
4204 static struct triple *struct_field(struct compile_state *state,
4205 struct triple *decl, struct hash_entry *field)
4207 struct triple **vector;
4211 if ((type->type & TYPE_MASK) != TYPE_STRUCT) {
4214 if (decl->op != OP_VAL_VEC) {
4215 internal_error(state, 0, "Invalid struct variable");
4218 internal_error(state, 0, "Missing structure field");
4221 vector = &RHS(decl, 0);
4223 while((type->type & TYPE_MASK) == TYPE_PRODUCT) {
4224 if (type->left->field_ident == field) {
4231 if (type->field_ident != field) {
4232 internal_error(state, 0, "field %s not found?", field->name);
4234 return vector[index];
4237 static void arrays_complete(struct compile_state *state, struct type *type)
4239 if ((type->type & TYPE_MASK) == TYPE_ARRAY) {
4240 if (type->elements == ELEMENT_COUNT_UNSPECIFIED) {
4241 error(state, 0, "array size not specified");
4243 arrays_complete(state, type->left);
4247 static unsigned int do_integral_promotion(unsigned int type)
4250 if (type == TYPE_ENUM) type = TYPE_INT;
4251 if (TYPE_INTEGER(type) && (TYPE_RANK(type) < TYPE_RANK(TYPE_INT))) {
4257 static unsigned int do_arithmetic_conversion(
4258 unsigned int left, unsigned int right)
4262 /* Convert enums to ints */
4263 if (left == TYPE_ENUM) left = TYPE_INT;
4264 if (right == TYPE_ENUM) right = TYPE_INT;
4265 if ((left == TYPE_LDOUBLE) || (right == TYPE_LDOUBLE)) {
4266 return TYPE_LDOUBLE;
4268 else if ((left == TYPE_DOUBLE) || (right == TYPE_DOUBLE)) {
4271 else if ((left == TYPE_FLOAT) || (right == TYPE_FLOAT)) {
4274 left = do_integral_promotion(left);
4275 right = do_integral_promotion(right);
4276 /* If both operands have the same size done */
4277 if (left == right) {
4280 /* If both operands have the same signedness pick the larger */
4281 else if (!!TYPE_UNSIGNED(left) == !!TYPE_UNSIGNED(right)) {
4282 return (TYPE_RANK(left) >= TYPE_RANK(right)) ? left : right;
4284 /* If the signed type can hold everything use it */
4285 else if (TYPE_SIGNED(left) && (TYPE_RANK(left) > TYPE_RANK(right))) {
4288 else if (TYPE_SIGNED(right) && (TYPE_RANK(right) > TYPE_RANK(left))) {
4291 /* Convert to the unsigned type with the same rank as the signed type */
4292 else if (TYPE_SIGNED(left)) {
4293 return TYPE_MKUNSIGNED(left);
4296 return TYPE_MKUNSIGNED(right);
4300 /* see if two types are the same except for qualifiers */
4301 static int equiv_types(struct type *left, struct type *right)
4304 /* Error if the basic types do not match */
4305 if ((left->type & TYPE_MASK) != (right->type & TYPE_MASK)) {
4308 type = left->type & TYPE_MASK;
4309 /* If the basic types match and it is a void type we are done */
4310 if (type == TYPE_VOID) {
4313 /* if the basic types match and it is an arithmetic type we are done */
4314 if (TYPE_ARITHMETIC(type)) {
4317 /* If it is a pointer type recurse and keep testing */
4318 if (type == TYPE_POINTER) {
4319 return equiv_types(left->left, right->left);
4321 else if (type == TYPE_ARRAY) {
4322 return (left->elements == right->elements) &&
4323 equiv_types(left->left, right->left);
4325 /* test for struct/union equality */
4326 else if (type == TYPE_STRUCT) {
4327 return left->type_ident == right->type_ident;
4329 /* Test for equivalent functions */
4330 else if (type == TYPE_FUNCTION) {
4331 return equiv_types(left->left, right->left) &&
4332 equiv_types(left->right, right->right);
4334 /* We only see TYPE_PRODUCT as part of function equivalence matching */
4335 else if (type == TYPE_PRODUCT) {
4336 return equiv_types(left->left, right->left) &&
4337 equiv_types(left->right, right->right);
4339 /* We should see TYPE_OVERLAP */
4345 static int equiv_ptrs(struct type *left, struct type *right)
4347 if (((left->type & TYPE_MASK) != TYPE_POINTER) ||
4348 ((right->type & TYPE_MASK) != TYPE_POINTER)) {
4351 return equiv_types(left->left, right->left);
4354 static struct type *compatible_types(struct type *left, struct type *right)
4356 struct type *result;
4357 unsigned int type, qual_type;
4358 /* Error if the basic types do not match */
4359 if ((left->type & TYPE_MASK) != (right->type & TYPE_MASK)) {
4362 type = left->type & TYPE_MASK;
4363 qual_type = (left->type & ~STOR_MASK) | (right->type & ~STOR_MASK);
4365 /* if the basic types match and it is an arithmetic type we are done */
4366 if (TYPE_ARITHMETIC(type)) {
4367 result = new_type(qual_type, 0, 0);
4369 /* If it is a pointer type recurse and keep testing */
4370 else if (type == TYPE_POINTER) {
4371 result = compatible_types(left->left, right->left);
4373 result = new_type(qual_type, result, 0);
4376 /* test for struct/union equality */
4377 else if (type == TYPE_STRUCT) {
4378 if (left->type_ident == right->type_ident) {
4382 /* Test for equivalent functions */
4383 else if (type == TYPE_FUNCTION) {
4384 struct type *lf, *rf;
4385 lf = compatible_types(left->left, right->left);
4386 rf = compatible_types(left->right, right->right);
4388 result = new_type(qual_type, lf, rf);
4391 /* We only see TYPE_PRODUCT as part of function equivalence matching */
4392 else if (type == TYPE_PRODUCT) {
4393 struct type *lf, *rf;
4394 lf = compatible_types(left->left, right->left);
4395 rf = compatible_types(left->right, right->right);
4397 result = new_type(qual_type, lf, rf);
4401 /* Nothing else is compatible */
4406 static struct type *compatible_ptrs(struct type *left, struct type *right)
4408 struct type *result;
4409 if (((left->type & TYPE_MASK) != TYPE_POINTER) ||
4410 ((right->type & TYPE_MASK) != TYPE_POINTER)) {
4413 result = compatible_types(left->left, right->left);
4415 unsigned int qual_type;
4416 qual_type = (left->type & ~STOR_MASK) | (right->type & ~STOR_MASK);
4417 result = new_type(qual_type, result, 0);
4422 static struct triple *integral_promotion(
4423 struct compile_state *state, struct triple *def)
4427 /* As all operations are carried out in registers
4428 * the values are converted on load I just convert
4429 * logical type of the operand.
4431 if (TYPE_INTEGER(type->type)) {
4432 unsigned int int_type;
4433 int_type = type->type & ~TYPE_MASK;
4434 int_type |= do_integral_promotion(type->type);
4435 if (int_type != type->type) {
4436 def->type = new_type(int_type, 0, 0);
4443 static void arithmetic(struct compile_state *state, struct triple *def)
4445 if (!TYPE_ARITHMETIC(def->type->type)) {
4446 error(state, 0, "arithmetic type expexted");
4450 static void ptr_arithmetic(struct compile_state *state, struct triple *def)
4452 if (!TYPE_PTR(def->type->type) && !TYPE_ARITHMETIC(def->type->type)) {
4453 error(state, def, "pointer or arithmetic type expected");
4457 static int is_integral(struct triple *ins)
4459 return TYPE_INTEGER(ins->type->type);
4462 static void integral(struct compile_state *state, struct triple *def)
4464 if (!is_integral(def)) {
4465 error(state, 0, "integral type expected");
4470 static void bool(struct compile_state *state, struct triple *def)
4472 if (!TYPE_ARITHMETIC(def->type->type) &&
4473 ((def->type->type & TYPE_MASK) != TYPE_POINTER)) {
4474 error(state, 0, "arithmetic or pointer type expected");
4478 static int is_signed(struct type *type)
4480 return !!TYPE_SIGNED(type->type);
4483 /* Is this value located in a register otherwise it must be in memory */
4484 static int is_in_reg(struct compile_state *state, struct triple *def)
4487 if (def->op == OP_ADECL) {
4490 else if ((def->op == OP_SDECL) || (def->op == OP_DEREF)) {
4493 else if (def->op == OP_VAL_VEC) {
4494 in_reg = is_in_reg(state, RHS(def, 0));
4496 else if (def->op == OP_DOT) {
4497 in_reg = is_in_reg(state, RHS(def, 0));
4500 internal_error(state, 0, "unknown expr storage location");
4506 /* Is this a stable variable location otherwise it must be a temporary */
4507 static int is_stable(struct compile_state *state, struct triple *def)
4514 if ((def->op == OP_ADECL) ||
4515 (def->op == OP_SDECL) ||
4516 (def->op == OP_DEREF) ||
4517 (def->op == OP_BLOBCONST)) {
4520 else if (def->op == OP_DOT) {
4521 ret = is_stable(state, RHS(def, 0));
4523 else if (def->op == OP_VAL_VEC) {
4524 struct triple **vector;
4527 vector = &RHS(def, 0);
4528 for(i = 0; i < def->type->elements; i++) {
4529 if (!is_stable(state, vector[i])) {
4538 static int is_lvalue(struct compile_state *state, struct triple *def)
4545 if (!is_stable(state, def)) {
4548 if (def->op == OP_DOT) {
4549 ret = is_lvalue(state, RHS(def, 0));
4554 static void clvalue(struct compile_state *state, struct triple *def)
4557 internal_error(state, def, "nothing where lvalue expected?");
4559 if (!is_lvalue(state, def)) {
4560 error(state, def, "lvalue expected");
4563 static void lvalue(struct compile_state *state, struct triple *def)
4565 clvalue(state, def);
4566 if (def->type->type & QUAL_CONST) {
4567 error(state, def, "modifable lvalue expected");
4571 static int is_pointer(struct triple *def)
4573 return (def->type->type & TYPE_MASK) == TYPE_POINTER;
4576 static void pointer(struct compile_state *state, struct triple *def)
4578 if (!is_pointer(def)) {
4579 error(state, def, "pointer expected");
4583 static struct triple *int_const(
4584 struct compile_state *state, struct type *type, ulong_t value)
4586 struct triple *result;
4587 switch(type->type & TYPE_MASK) {
4589 case TYPE_INT: case TYPE_UINT:
4590 case TYPE_LONG: case TYPE_ULONG:
4593 internal_error(state, 0, "constant for unkown type");
4595 result = triple(state, OP_INTCONST, type, 0, 0);
4596 result->u.cval = value;
4601 static struct triple *read_expr(struct compile_state *state, struct triple *def);
4603 static struct triple *do_mk_addr_expr(struct compile_state *state,
4604 struct triple *expr, struct type *type, ulong_t offset)
4606 struct triple *result;
4607 clvalue(state, expr);
4609 type = new_type(TYPE_POINTER | (type->type & QUAL_MASK), type, 0);
4612 if (expr->op == OP_ADECL) {
4613 error(state, expr, "address of auto variables not supported");
4615 else if (expr->op == OP_SDECL) {
4616 result = triple(state, OP_ADDRCONST, type, 0, 0);
4617 MISC(result, 0) = expr;
4618 result->u.cval = offset;
4620 else if (expr->op == OP_DEREF) {
4621 result = triple(state, OP_ADD, type,
4623 int_const(state, &ulong_type, offset));
4626 internal_error(state, expr, "cannot take address of expression");
4631 static struct triple *mk_addr_expr(
4632 struct compile_state *state, struct triple *expr, ulong_t offset)
4634 return do_mk_addr_expr(state, expr, expr->type, offset);
4637 static struct triple *mk_deref_expr(
4638 struct compile_state *state, struct triple *expr)
4640 struct type *base_type;
4641 pointer(state, expr);
4642 base_type = expr->type->left;
4643 return triple(state, OP_DEREF, base_type, expr, 0);
4646 static struct triple *array_to_pointer(struct compile_state *state, struct triple *def)
4648 if ((def->type->type & TYPE_MASK) == TYPE_ARRAY) {
4651 TYPE_POINTER | (def->type->type & QUAL_MASK),
4652 def->type->left, 0);
4653 if ((def->op == OP_SDECL) || IS_CONST_OP(def->op)) {
4654 struct triple *addrconst;
4655 if ((def->op != OP_SDECL) && (def->op != OP_BLOBCONST)) {
4656 internal_error(state, def, "bad array constant");
4658 addrconst = triple(state, OP_ADDRCONST, type, 0, 0);
4659 MISC(addrconst, 0) = def;
4663 def = triple(state, OP_COPY, type, def, 0);
4669 static struct triple *deref_field(
4670 struct compile_state *state, struct triple *expr, struct hash_entry *field)
4672 struct triple *result;
4673 struct type *type, *member;
4675 internal_error(state, 0, "No field passed to deref_field");
4679 if ((type->type & TYPE_MASK) != TYPE_STRUCT) {
4680 error(state, 0, "request for member %s in something not a struct or union",
4683 member = field_type(state, type, field);
4684 if ((type->type & STOR_MASK) == STOR_PERM) {
4685 /* Do the pointer arithmetic to get a deref the field */
4687 offset = field_offset(state, type, field);
4688 result = do_mk_addr_expr(state, expr, member, offset);
4689 result = mk_deref_expr(state, result);
4692 /* Find the variable for the field I want. */
4693 result = triple(state, OP_DOT, member, expr, 0);
4694 result->u.field = field;
4699 static struct triple *read_expr(struct compile_state *state, struct triple *def)
4705 if (!is_stable(state, def)) {
4708 /* Tranform an array to a pointer to the first element */
4710 #warning "CHECK_ME is this the right place to transform arrays to pointers?"
4711 if ((def->type->type & TYPE_MASK) == TYPE_ARRAY) {
4712 return array_to_pointer(state, def);
4714 if (is_in_reg(state, def)) {
4717 if (def->op == OP_SDECL) {
4718 def = mk_addr_expr(state, def, 0);
4719 def = mk_deref_expr(state, def);
4723 return triple(state, op, def->type, def, 0);
4726 int is_write_compatible(struct compile_state *state,
4727 struct type *dest, struct type *rval)
4730 /* Both operands have arithmetic type */
4731 if (TYPE_ARITHMETIC(dest->type) && TYPE_ARITHMETIC(rval->type)) {
4734 /* One operand is a pointer and the other is a pointer to void */
4735 else if (((dest->type & TYPE_MASK) == TYPE_POINTER) &&
4736 ((rval->type & TYPE_MASK) == TYPE_POINTER) &&
4737 (((dest->left->type & TYPE_MASK) == TYPE_VOID) ||
4738 ((rval->left->type & TYPE_MASK) == TYPE_VOID))) {
4741 /* If both types are the same without qualifiers we are good */
4742 else if (equiv_ptrs(dest, rval)) {
4745 /* test for struct/union equality */
4746 else if (((dest->type & TYPE_MASK) == TYPE_STRUCT) &&
4747 ((rval->type & TYPE_MASK) == TYPE_STRUCT) &&
4748 (dest->type_ident == rval->type_ident)) {
4755 static void write_compatible(struct compile_state *state,
4756 struct type *dest, struct type *rval)
4758 if (!is_write_compatible(state, dest, rval)) {
4759 error(state, 0, "Incompatible types in assignment");
4763 static int is_init_compatible(struct compile_state *state,
4764 struct type *dest, struct type *rval)
4767 if (is_write_compatible(state, dest, rval)) {
4770 else if (equiv_types(dest, rval)) {
4776 static struct triple *write_expr(
4777 struct compile_state *state, struct triple *dest, struct triple *rval)
4784 internal_error(state, 0, "missing rval");
4787 if (rval->op == OP_LIST) {
4788 internal_error(state, 0, "expression of type OP_LIST?");
4790 if (!is_lvalue(state, dest)) {
4791 internal_error(state, 0, "writing to a non lvalue?");
4793 if (dest->type->type & QUAL_CONST) {
4794 internal_error(state, 0, "modifable lvalue expexted");
4797 write_compatible(state, dest->type, rval->type);
4799 /* Now figure out which assignment operator to use */
4801 if (is_in_reg(state, dest)) {
4806 def = triple(state, op, dest->type, dest, rval);
4810 static struct triple *init_expr(
4811 struct compile_state *state, struct triple *dest, struct triple *rval)
4817 internal_error(state, 0, "missing rval");
4819 if ((dest->type->type & STOR_MASK) != STOR_PERM) {
4820 rval = read_expr(state, rval);
4821 def = write_expr(state, dest, rval);
4824 /* Fill in the array size if necessary */
4825 if (((dest->type->type & TYPE_MASK) == TYPE_ARRAY) &&
4826 ((rval->type->type & TYPE_MASK) == TYPE_ARRAY)) {
4827 if (dest->type->elements == ELEMENT_COUNT_UNSPECIFIED) {
4828 dest->type->elements = rval->type->elements;
4831 if (!equiv_types(dest->type, rval->type)) {
4832 error(state, 0, "Incompatible types in inializer");
4834 MISC(dest, 0) = rval;
4835 insert_triple(state, dest, rval);
4836 rval->id |= TRIPLE_FLAG_FLATTENED;
4837 use_triple(MISC(dest, 0), dest);
4842 struct type *arithmetic_result(
4843 struct compile_state *state, struct triple *left, struct triple *right)
4846 /* Sanity checks to ensure I am working with arithmetic types */
4847 arithmetic(state, left);
4848 arithmetic(state, right);
4850 do_arithmetic_conversion(
4852 right->type->type), 0, 0);
4856 struct type *ptr_arithmetic_result(
4857 struct compile_state *state, struct triple *left, struct triple *right)
4860 /* Sanity checks to ensure I am working with the proper types */
4861 ptr_arithmetic(state, left);
4862 arithmetic(state, right);
4863 if (TYPE_ARITHMETIC(left->type->type) &&
4864 TYPE_ARITHMETIC(right->type->type)) {
4865 type = arithmetic_result(state, left, right);
4867 else if (TYPE_PTR(left->type->type)) {
4871 internal_error(state, 0, "huh?");
4878 /* boolean helper function */
4880 static struct triple *ltrue_expr(struct compile_state *state,
4881 struct triple *expr)
4884 case OP_LTRUE: case OP_LFALSE: case OP_EQ: case OP_NOTEQ:
4885 case OP_SLESS: case OP_ULESS: case OP_SMORE: case OP_UMORE:
4886 case OP_SLESSEQ: case OP_ULESSEQ: case OP_SMOREEQ: case OP_UMOREEQ:
4887 /* If the expression is already boolean do nothing */
4890 expr = triple(state, OP_LTRUE, &int_type, expr, 0);
4896 static struct triple *lfalse_expr(struct compile_state *state,
4897 struct triple *expr)
4899 return triple(state, OP_LFALSE, &int_type, expr, 0);
4902 static struct triple *cond_expr(
4903 struct compile_state *state,
4904 struct triple *test, struct triple *left, struct triple *right)
4907 struct type *result_type;
4908 unsigned int left_type, right_type;
4910 left_type = left->type->type;
4911 right_type = right->type->type;
4913 /* Both operands have arithmetic type */
4914 if (TYPE_ARITHMETIC(left_type) && TYPE_ARITHMETIC(right_type)) {
4915 result_type = arithmetic_result(state, left, right);
4917 /* Both operands have void type */
4918 else if (((left_type & TYPE_MASK) == TYPE_VOID) &&
4919 ((right_type & TYPE_MASK) == TYPE_VOID)) {
4920 result_type = &void_type;
4922 /* pointers to the same type... */
4923 else if ((result_type = compatible_ptrs(left->type, right->type))) {
4926 /* Both operands are pointers and left is a pointer to void */
4927 else if (((left_type & TYPE_MASK) == TYPE_POINTER) &&
4928 ((right_type & TYPE_MASK) == TYPE_POINTER) &&
4929 ((left->type->left->type & TYPE_MASK) == TYPE_VOID)) {
4930 result_type = right->type;
4932 /* Both operands are pointers and right is a pointer to void */
4933 else if (((left_type & TYPE_MASK) == TYPE_POINTER) &&
4934 ((right_type & TYPE_MASK) == TYPE_POINTER) &&
4935 ((right->type->left->type & TYPE_MASK) == TYPE_VOID)) {
4936 result_type = left->type;
4939 error(state, 0, "Incompatible types in conditional expression");
4941 /* Cleanup and invert the test */
4942 test = lfalse_expr(state, read_expr(state, test));
4943 def = new_triple(state, OP_COND, result_type, 0, 3);
4944 def->param[0] = test;
4945 def->param[1] = left;
4946 def->param[2] = right;
4951 static int expr_depth(struct compile_state *state, struct triple *ins)
4955 if (!ins || (ins->id & TRIPLE_FLAG_FLATTENED)) {
4958 else if (ins->op == OP_DEREF) {
4959 count = expr_depth(state, RHS(ins, 0)) - 1;
4961 else if (ins->op == OP_VAL) {
4962 count = expr_depth(state, RHS(ins, 0)) - 1;
4964 else if (ins->op == OP_COMMA) {
4966 ldepth = expr_depth(state, RHS(ins, 0));
4967 rdepth = expr_depth(state, RHS(ins, 1));
4968 count = (ldepth >= rdepth)? ldepth : rdepth;
4970 else if (ins->op == OP_CALL) {
4971 /* Don't figure the depth of a call just guess it is huge */
4975 struct triple **expr;
4976 expr = triple_rhs(state, ins, 0);
4977 for(;expr; expr = triple_rhs(state, ins, expr)) {
4980 depth = expr_depth(state, *expr);
4981 if (depth > count) {
4990 static struct triple *flatten(
4991 struct compile_state *state, struct triple *first, struct triple *ptr);
4993 static struct triple *flatten_generic(
4994 struct compile_state *state, struct triple *first, struct triple *ptr)
4998 struct triple **ins;
5001 /* Only operations with just a rhs should come here */
5002 rhs = TRIPLE_RHS(ptr->sizes);
5003 lhs = TRIPLE_LHS(ptr->sizes);
5004 if (TRIPLE_SIZE(ptr->sizes) != lhs + rhs) {
5005 internal_error(state, ptr, "unexpected args for: %d %s",
5006 ptr->op, tops(ptr->op));
5008 /* Find the depth of the rhs elements */
5009 for(i = 0; i < rhs; i++) {
5010 vector[i].ins = &RHS(ptr, i);
5011 vector[i].depth = expr_depth(state, *vector[i].ins);
5013 /* Selection sort the rhs */
5014 for(i = 0; i < rhs; i++) {
5016 for(j = i + 1; j < rhs; j++ ) {
5017 if (vector[j].depth > vector[max].depth) {
5022 struct rhs_vector tmp;
5024 vector[i] = vector[max];
5028 /* Now flatten the rhs elements */
5029 for(i = 0; i < rhs; i++) {
5030 *vector[i].ins = flatten(state, first, *vector[i].ins);
5031 use_triple(*vector[i].ins, ptr);
5034 /* Now flatten the lhs elements */
5035 for(i = 0; i < lhs; i++) {
5036 struct triple **ins = &LHS(ptr, i);
5037 *ins = flatten(state, first, *ins);
5038 use_triple(*ins, ptr);
5043 static struct triple *flatten_land(
5044 struct compile_state *state, struct triple *first, struct triple *ptr)
5046 struct triple *left, *right;
5047 struct triple *val, *test, *jmp, *label1, *end;
5049 /* Find the triples */
5051 right = RHS(ptr, 1);
5053 /* Generate the needed triples */
5056 /* Thread the triples together */
5057 val = flatten(state, first, variable(state, ptr->type));
5058 left = flatten(state, first, write_expr(state, val, left));
5059 test = flatten(state, first,
5060 lfalse_expr(state, read_expr(state, val)));
5061 jmp = flatten(state, first, branch(state, end, test));
5062 label1 = flatten(state, first, label(state));
5063 right = flatten(state, first, write_expr(state, val, right));
5064 TARG(jmp, 0) = flatten(state, first, end);
5066 /* Now give the caller something to chew on */
5067 return read_expr(state, val);
5070 static struct triple *flatten_lor(
5071 struct compile_state *state, struct triple *first, struct triple *ptr)
5073 struct triple *left, *right;
5074 struct triple *val, *jmp, *label1, *end;
5076 /* Find the triples */
5078 right = RHS(ptr, 1);
5080 /* Generate the needed triples */
5083 /* Thread the triples together */
5084 val = flatten(state, first, variable(state, ptr->type));
5085 left = flatten(state, first, write_expr(state, val, left));
5086 jmp = flatten(state, first, branch(state, end, left));
5087 label1 = flatten(state, first, label(state));
5088 right = flatten(state, first, write_expr(state, val, right));
5089 TARG(jmp, 0) = flatten(state, first, end);
5092 /* Now give the caller something to chew on */
5093 return read_expr(state, val);
5096 static struct triple *flatten_cond(
5097 struct compile_state *state, struct triple *first, struct triple *ptr)
5099 struct triple *test, *left, *right;
5100 struct triple *val, *mv1, *jmp1, *label1, *mv2, *middle, *jmp2, *end;
5102 /* Find the triples */
5105 right = RHS(ptr, 2);
5107 /* Generate the needed triples */
5109 middle = label(state);
5111 /* Thread the triples together */
5112 val = flatten(state, first, variable(state, ptr->type));
5113 test = flatten(state, first, test);
5114 jmp1 = flatten(state, first, branch(state, middle, test));
5115 label1 = flatten(state, first, label(state));
5116 left = flatten(state, first, left);
5117 mv1 = flatten(state, first, write_expr(state, val, left));
5118 jmp2 = flatten(state, first, branch(state, end, 0));
5119 TARG(jmp1, 0) = flatten(state, first, middle);
5120 right = flatten(state, first, right);
5121 mv2 = flatten(state, first, write_expr(state, val, right));
5122 TARG(jmp2, 0) = flatten(state, first, end);
5124 /* Now give the caller something to chew on */
5125 return read_expr(state, val);
5128 static int local_triple(struct compile_state *state,
5129 struct triple *func, struct triple *ins)
5131 int local = (ins->id & TRIPLE_FLAG_LOCAL);
5134 fprintf(stderr, "global: ");
5135 display_triple(stderr, ins);
5141 struct triple *copy_func(struct compile_state *state, struct triple *ofunc,
5142 struct occurance *base_occurance)
5144 struct triple *nfunc;
5145 struct triple *nfirst, *ofirst;
5146 struct triple *new, *old;
5149 fprintf(stdout, "\n");
5150 loc(stdout, state, 0);
5151 fprintf(stdout, "\n__________ copy_func _________\n");
5152 display_func(stdout, ofunc);
5153 fprintf(stdout, "__________ copy_func _________ done\n\n");
5156 /* Make a new copy of the old function */
5157 nfunc = triple(state, OP_LIST, ofunc->type, 0, 0);
5159 ofirst = old = RHS(ofunc, 0);
5162 struct occurance *occurance;
5163 int old_lhs, old_rhs;
5164 old_lhs = TRIPLE_LHS(old->sizes);
5165 old_rhs = TRIPLE_RHS(old->sizes);
5166 occurance = inline_occurance(state, base_occurance, old->occurance);
5167 new = alloc_triple(state, old->op, old->type, old_lhs, old_rhs,
5169 if (!triple_stores_block(state, new)) {
5170 memcpy(&new->u, &old->u, sizeof(new->u));
5173 RHS(nfunc, 0) = nfirst = new;
5176 insert_triple(state, nfirst, new);
5178 new->id |= TRIPLE_FLAG_FLATTENED;
5180 /* During the copy remember new as user of old */
5181 use_triple(old, new);
5183 /* Populate the return type if present */
5184 if (old == MISC(ofunc, 0)) {
5185 MISC(nfunc, 0) = new;
5187 /* Remember which instructions are local */
5188 old->id |= TRIPLE_FLAG_LOCAL;
5190 } while(old != ofirst);
5192 /* Make a second pass to fix up any unresolved references */
5196 struct triple **oexpr, **nexpr;
5198 /* Lookup where the copy is, to join pointers */
5199 count = TRIPLE_SIZE(old->sizes);
5200 for(i = 0; i < count; i++) {
5201 oexpr = &old->param[i];
5202 nexpr = &new->param[i];
5203 if (*oexpr && !*nexpr) {
5204 if (!local_triple(state, ofunc, *oexpr)) {
5207 else if ((*oexpr)->use) {
5208 *nexpr = (*oexpr)->use->member;
5210 if (*nexpr == old) {
5211 internal_error(state, 0, "new == old?");
5213 use_triple(*nexpr, new);
5215 if (!*nexpr && *oexpr) {
5216 internal_error(state, 0, "Could not copy %d\n", i);
5221 } while((old != ofirst) && (new != nfirst));
5223 /* Make a third pass to cleanup the extra useses */
5227 unuse_triple(old, new);
5228 /* Forget which instructions are local */
5229 old->id &= ~TRIPLE_FLAG_LOCAL;
5232 } while ((old != ofirst) && (new != nfirst));
5236 static struct triple *flatten_call(
5237 struct compile_state *state, struct triple *first, struct triple *ptr)
5239 /* Inline the function call */
5241 struct triple *ofunc, *nfunc, *nfirst, *param, *result;
5242 struct triple *end, *nend;
5245 /* Find the triples */
5246 ofunc = MISC(ptr, 0);
5247 if (ofunc->op != OP_LIST) {
5248 internal_error(state, 0, "improper function");
5250 nfunc = copy_func(state, ofunc, ptr->occurance);
5251 nfirst = RHS(nfunc, 0)->next;
5252 /* Prepend the parameter reading into the new function list */
5253 ptype = nfunc->type->right;
5254 param = RHS(nfunc, 0)->next;
5255 pvals = TRIPLE_RHS(ptr->sizes);
5256 for(i = 0; i < pvals; i++) {
5260 if ((ptype->type & TYPE_MASK) == TYPE_PRODUCT) {
5261 atype = ptype->left;
5263 while((param->type->type & TYPE_MASK) != (atype->type & TYPE_MASK)) {
5264 param = param->next;
5267 flatten(state, nfirst, write_expr(state, param, arg));
5268 ptype = ptype->right;
5269 param = param->next;
5272 if ((nfunc->type->left->type & TYPE_MASK) != TYPE_VOID) {
5273 result = read_expr(state, MISC(nfunc,0));
5276 fprintf(stdout, "\n");
5277 loc(stdout, state, 0);
5278 fprintf(stdout, "\n__________ flatten_call _________\n");
5279 display_func(stdout, nfunc);
5280 fprintf(stdout, "__________ flatten_call _________ done\n\n");
5283 /* Get rid of the extra triples */
5284 nfirst = RHS(nfunc, 0)->next;
5285 free_triple(state, RHS(nfunc, 0));
5287 free_triple(state, nfunc);
5289 /* Append the new function list onto the return list */
5291 nend = nfirst->prev;
5300 static struct triple *flatten(
5301 struct compile_state *state, struct triple *first, struct triple *ptr)
5303 struct triple *orig_ptr;
5308 /* Only flatten triples once */
5309 if (ptr->id & TRIPLE_FLAG_FLATTENED) {
5314 RHS(ptr, 0) = flatten(state, first, RHS(ptr, 0));
5318 RHS(ptr, 0) = flatten(state, first, RHS(ptr, 0));
5319 return MISC(ptr, 0);
5322 ptr = flatten_land(state, first, ptr);
5325 ptr = flatten_lor(state, first, ptr);
5328 ptr = flatten_cond(state, first, ptr);
5331 ptr = flatten_call(state, first, ptr);
5335 RHS(ptr, 0) = flatten(state, first, RHS(ptr, 0));
5336 use_triple(RHS(ptr, 0), ptr);
5339 use_triple(TARG(ptr, 0), ptr);
5340 if (TRIPLE_RHS(ptr->sizes)) {
5341 use_triple(RHS(ptr, 0), ptr);
5342 if (ptr->next != ptr) {
5343 use_triple(ptr->next, ptr);
5348 insert_triple(state, state->first, ptr);
5349 ptr->id |= TRIPLE_FLAG_FLATTENED;
5350 ptr->id &= ~TRIPLE_FLAG_LOCAL;
5351 ptr = triple(state, OP_SDECL, ptr->type, ptr, 0);
5352 use_triple(MISC(ptr, 0), ptr);
5355 /* Since OP_DEREF is just a marker delete it when I flatten it */
5357 RHS(orig_ptr, 0) = 0;
5358 free_triple(state, orig_ptr);
5362 struct triple *base;
5364 if (base->op == OP_DEREF) {
5365 struct triple *left;
5367 offset = field_offset(state, base->type, ptr->u.field);
5368 left = RHS(base, 0);
5369 ptr = triple(state, OP_ADD, left->type,
5370 read_expr(state, left),
5371 int_const(state, &ulong_type, offset));
5372 free_triple(state, base);
5374 else if (base->op == OP_VAL_VEC) {
5375 base = flatten(state, first, base);
5376 ptr = struct_field(state, base, ptr->u.field);
5381 MISC(ptr, 0) = flatten(state, first, MISC(ptr, 0));
5382 use_triple(MISC(ptr, 0), ptr);
5383 use_triple(ptr, MISC(ptr, 0));
5386 MISC(ptr, 0) = flatten(state, first, MISC(ptr, 0));
5387 use_triple(MISC(ptr, 0), ptr);
5390 first = state->first;
5391 MISC(ptr, 0) = flatten(state, first, MISC(ptr, 0));
5392 use_triple(MISC(ptr, 0), ptr);
5393 insert_triple(state, first, ptr);
5394 ptr->id |= TRIPLE_FLAG_FLATTENED;
5395 ptr->id &= ~TRIPLE_FLAG_LOCAL;
5400 /* Flatten the easy cases we don't override */
5401 ptr = flatten_generic(state, first, ptr);
5404 } while(ptr && (ptr != orig_ptr));
5406 insert_triple(state, first, ptr);
5407 ptr->id |= TRIPLE_FLAG_FLATTENED;
5408 ptr->id &= ~TRIPLE_FLAG_LOCAL;
5413 static void release_expr(struct compile_state *state, struct triple *expr)
5415 struct triple *head;
5416 head = label(state);
5417 flatten(state, head, expr);
5418 while(head->next != head) {
5419 release_triple(state, head->next);
5421 free_triple(state, head);
5424 static int replace_rhs_use(struct compile_state *state,
5425 struct triple *orig, struct triple *new, struct triple *use)
5427 struct triple **expr;
5430 expr = triple_rhs(state, use, 0);
5431 for(;expr; expr = triple_rhs(state, use, expr)) {
5432 if (*expr == orig) {
5438 unuse_triple(orig, use);
5439 use_triple(new, use);
5444 static int replace_lhs_use(struct compile_state *state,
5445 struct triple *orig, struct triple *new, struct triple *use)
5447 struct triple **expr;
5450 expr = triple_lhs(state, use, 0);
5451 for(;expr; expr = triple_lhs(state, use, expr)) {
5452 if (*expr == orig) {
5458 unuse_triple(orig, use);
5459 use_triple(new, use);
5464 static void propogate_use(struct compile_state *state,
5465 struct triple *orig, struct triple *new)
5467 struct triple_set *user, *next;
5468 for(user = orig->use; user; user = next) {
5474 found |= replace_rhs_use(state, orig, new, use);
5475 found |= replace_lhs_use(state, orig, new, use);
5477 internal_error(state, use, "use without use");
5481 internal_error(state, orig, "used after propogate_use");
5487 * ===========================
5490 static struct triple *mk_add_expr(
5491 struct compile_state *state, struct triple *left, struct triple *right)
5493 struct type *result_type;
5494 /* Put pointer operands on the left */
5495 if (is_pointer(right)) {
5501 left = read_expr(state, left);
5502 right = read_expr(state, right);
5503 result_type = ptr_arithmetic_result(state, left, right);
5504 if (is_pointer(left)) {
5505 right = triple(state,
5506 is_signed(right->type)? OP_SMUL : OP_UMUL,
5509 int_const(state, &ulong_type,
5510 size_of(state, left->type->left)));
5512 return triple(state, OP_ADD, result_type, left, right);
5515 static struct triple *mk_sub_expr(
5516 struct compile_state *state, struct triple *left, struct triple *right)
5518 struct type *result_type;
5519 result_type = ptr_arithmetic_result(state, left, right);
5520 left = read_expr(state, left);
5521 right = read_expr(state, right);
5522 if (is_pointer(left)) {
5523 right = triple(state,
5524 is_signed(right->type)? OP_SMUL : OP_UMUL,
5527 int_const(state, &ulong_type,
5528 size_of(state, left->type->left)));
5530 return triple(state, OP_SUB, result_type, left, right);
5533 static struct triple *mk_pre_inc_expr(
5534 struct compile_state *state, struct triple *def)
5538 val = mk_add_expr(state, def, int_const(state, &int_type, 1));
5539 return triple(state, OP_VAL, def->type,
5540 write_expr(state, def, val),
5544 static struct triple *mk_pre_dec_expr(
5545 struct compile_state *state, struct triple *def)
5549 val = mk_sub_expr(state, def, int_const(state, &int_type, 1));
5550 return triple(state, OP_VAL, def->type,
5551 write_expr(state, def, val),
5555 static struct triple *mk_post_inc_expr(
5556 struct compile_state *state, struct triple *def)
5560 val = read_expr(state, def);
5561 return triple(state, OP_VAL, def->type,
5562 write_expr(state, def,
5563 mk_add_expr(state, val, int_const(state, &int_type, 1)))
5567 static struct triple *mk_post_dec_expr(
5568 struct compile_state *state, struct triple *def)
5572 val = read_expr(state, def);
5573 return triple(state, OP_VAL, def->type,
5574 write_expr(state, def,
5575 mk_sub_expr(state, val, int_const(state, &int_type, 1)))
5579 static struct triple *mk_subscript_expr(
5580 struct compile_state *state, struct triple *left, struct triple *right)
5582 left = read_expr(state, left);
5583 right = read_expr(state, right);
5584 if (!is_pointer(left) && !is_pointer(right)) {
5585 error(state, left, "subscripted value is not a pointer");
5587 return mk_deref_expr(state, mk_add_expr(state, left, right));
5590 static struct triple *mk_cast_expr(
5591 struct compile_state *state, struct type *type, struct triple *expr)
5594 def = read_expr(state, expr);
5595 def = triple(state, OP_COPY, type, def, 0);
5600 * Compile time evaluation
5601 * ===========================
5603 static int is_const(struct triple *ins)
5605 return IS_CONST_OP(ins->op);
5608 static int is_simple_const(struct triple *ins)
5610 return IS_CONST_OP(ins->op) && (ins->op != OP_ADDRCONST);
5613 static int constants_equal(struct compile_state *state,
5614 struct triple *left, struct triple *right)
5617 if (!is_const(left) || !is_const(right)) {
5620 else if (left->op != right->op) {
5623 else if (!equiv_types(left->type, right->type)) {
5630 if (left->u.cval == right->u.cval) {
5636 size_t lsize, rsize;
5637 lsize = size_of(state, left->type);
5638 rsize = size_of(state, right->type);
5639 if (lsize != rsize) {
5642 if (memcmp(left->u.blob, right->u.blob, lsize) == 0) {
5648 if ((MISC(left, 0) == MISC(right, 0)) &&
5649 (left->u.cval == right->u.cval)) {
5654 internal_error(state, left, "uknown constant type");
5661 static int is_zero(struct triple *ins)
5663 return is_const(ins) && (ins->u.cval == 0);
5666 static int is_one(struct triple *ins)
5668 return is_const(ins) && (ins->u.cval == 1);
5671 static long_t bit_count(ulong_t value)
5676 for(i = (sizeof(ulong_t)*8) -1; i >= 0; i--) {
5687 static long_t bsr(ulong_t value)
5690 for(i = (sizeof(ulong_t)*8) -1; i >= 0; i--) {
5701 static long_t bsf(ulong_t value)
5704 for(i = 0; i < (sizeof(ulong_t)*8); i++) {
5715 static long_t log2(ulong_t value)
5720 static long_t tlog2(struct triple *ins)
5722 return log2(ins->u.cval);
5725 static int is_pow2(struct triple *ins)
5727 ulong_t value, mask;
5729 if (!is_const(ins)) {
5732 value = ins->u.cval;
5739 return ((value & mask) == value);
5742 static ulong_t read_const(struct compile_state *state,
5743 struct triple *ins, struct triple **expr)
5747 switch(rhs->type->type &TYPE_MASK) {
5759 internal_error(state, rhs, "bad type to read_const\n");
5762 if (!is_simple_const(rhs)) {
5763 internal_error(state, rhs, "bad op to read_const\n");
5768 static long_t read_sconst(struct triple *ins, struct triple **expr)
5772 return (long_t)(rhs->u.cval);
5775 static void unuse_rhs(struct compile_state *state, struct triple *ins)
5777 struct triple **expr;
5778 expr = triple_rhs(state, ins, 0);
5779 for(;expr;expr = triple_rhs(state, ins, expr)) {
5781 unuse_triple(*expr, ins);
5787 static void unuse_lhs(struct compile_state *state, struct triple *ins)
5789 struct triple **expr;
5790 expr = triple_lhs(state, ins, 0);
5791 for(;expr;expr = triple_lhs(state, ins, expr)) {
5792 unuse_triple(*expr, ins);
5797 static void check_lhs(struct compile_state *state, struct triple *ins)
5799 struct triple **expr;
5800 expr = triple_lhs(state, ins, 0);
5801 for(;expr;expr = triple_lhs(state, ins, expr)) {
5802 internal_error(state, ins, "unexpected lhs");
5806 static void check_targ(struct compile_state *state, struct triple *ins)
5808 struct triple **expr;
5809 expr = triple_targ(state, ins, 0);
5810 for(;expr;expr = triple_targ(state, ins, expr)) {
5811 internal_error(state, ins, "unexpected targ");
5815 static void wipe_ins(struct compile_state *state, struct triple *ins)
5817 /* Becareful which instructions you replace the wiped
5818 * instruction with, as there are not enough slots
5819 * in all instructions to hold all others.
5821 check_targ(state, ins);
5822 unuse_rhs(state, ins);
5823 unuse_lhs(state, ins);
5826 static void mkcopy(struct compile_state *state,
5827 struct triple *ins, struct triple *rhs)
5829 struct block *block;
5830 block = block_of_triple(state, ins);
5831 wipe_ins(state, ins);
5833 ins->sizes = TRIPLE_SIZES(0, 1, 0, 0);
5834 ins->u.block = block;
5836 use_triple(RHS(ins, 0), ins);
5839 static void mkconst(struct compile_state *state,
5840 struct triple *ins, ulong_t value)
5842 if (!is_integral(ins) && !is_pointer(ins)) {
5843 internal_error(state, ins, "unknown type to make constant\n");
5845 wipe_ins(state, ins);
5846 ins->op = OP_INTCONST;
5847 ins->sizes = TRIPLE_SIZES(0, 0, 0, 0);
5848 ins->u.cval = value;
5851 static void mkaddr_const(struct compile_state *state,
5852 struct triple *ins, struct triple *sdecl, ulong_t value)
5854 if (sdecl->op != OP_SDECL) {
5855 internal_error(state, ins, "bad base for addrconst");
5857 wipe_ins(state, ins);
5858 ins->op = OP_ADDRCONST;
5859 ins->sizes = TRIPLE_SIZES(0, 0, 1, 0);
5860 MISC(ins, 0) = sdecl;
5861 ins->u.cval = value;
5862 use_triple(sdecl, ins);
5865 /* Transform multicomponent variables into simple register variables */
5866 static void flatten_structures(struct compile_state *state)
5868 struct triple *ins, *first;
5869 first = state->first;
5871 /* Pass one expand structure values into valvecs.
5875 struct triple *next;
5877 if ((ins->type->type & TYPE_MASK) == TYPE_STRUCT) {
5878 if (ins->op == OP_VAL_VEC) {
5881 else if ((ins->op == OP_LOAD) || (ins->op == OP_READ)) {
5882 struct triple *def, **vector;
5889 get_occurance(ins->occurance);
5890 next = alloc_triple(state, OP_VAL_VEC, ins->type, -1, -1,
5893 vector = &RHS(next, 0);
5894 tptr = next->type->left;
5895 for(i = 0; i < next->type->elements; i++) {
5896 struct triple *sfield;
5899 if ((mtype->type & TYPE_MASK) == TYPE_PRODUCT) {
5900 mtype = mtype->left;
5902 sfield = deref_field(state, def, mtype->field_ident);
5905 state, op, mtype, sfield, 0);
5906 put_occurance(vector[i]->occurance);
5907 get_occurance(next->occurance);
5908 vector[i]->occurance = next->occurance;
5911 propogate_use(state, ins, next);
5912 flatten(state, ins, next);
5913 free_triple(state, ins);
5915 else if ((ins->op == OP_STORE) || (ins->op == OP_WRITE)) {
5916 struct triple *src, *dst, **vector;
5924 get_occurance(ins->occurance);
5925 next = alloc_triple(state, OP_VAL_VEC, ins->type, -1, -1,
5928 vector = &RHS(next, 0);
5929 tptr = next->type->left;
5930 for(i = 0; i < ins->type->elements; i++) {
5931 struct triple *dfield, *sfield;
5934 if ((mtype->type & TYPE_MASK) == TYPE_PRODUCT) {
5935 mtype = mtype->left;
5937 sfield = deref_field(state, src, mtype->field_ident);
5938 dfield = deref_field(state, dst, mtype->field_ident);
5940 state, op, mtype, dfield, sfield);
5941 put_occurance(vector[i]->occurance);
5942 get_occurance(next->occurance);
5943 vector[i]->occurance = next->occurance;
5946 propogate_use(state, ins, next);
5947 flatten(state, ins, next);
5948 free_triple(state, ins);
5952 } while(ins != first);
5953 /* Pass two flatten the valvecs.
5957 struct triple *next;
5959 if (ins->op == OP_VAL_VEC) {
5960 release_triple(state, ins);
5963 } while(ins != first);
5964 /* Pass three verify the state and set ->id to 0.
5968 ins->id &= ~TRIPLE_FLAG_FLATTENED;
5969 if ((ins->op != OP_BLOBCONST) && (ins->op != OP_SDECL) &&
5970 ((ins->type->type & TYPE_MASK) == TYPE_STRUCT)) {
5971 internal_error(state, ins, "STRUCT_TYPE remains?");
5973 if (ins->op == OP_DOT) {
5974 internal_error(state, ins, "OP_DOT remains?");
5976 if (ins->op == OP_VAL_VEC) {
5977 internal_error(state, ins, "OP_VAL_VEC remains?");
5980 } while(ins != first);
5983 /* For those operations that cannot be simplified */
5984 static void simplify_noop(struct compile_state *state, struct triple *ins)
5989 static void simplify_smul(struct compile_state *state, struct triple *ins)
5991 if (is_const(RHS(ins, 0)) && !is_const(RHS(ins, 1))) {
5994 RHS(ins, 0) = RHS(ins, 1);
5997 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
5999 left = read_sconst(ins, &RHS(ins, 0));
6000 right = read_sconst(ins, &RHS(ins, 1));
6001 mkconst(state, ins, left * right);
6003 else if (is_zero(RHS(ins, 1))) {
6004 mkconst(state, ins, 0);
6006 else if (is_one(RHS(ins, 1))) {
6007 mkcopy(state, ins, RHS(ins, 0));
6009 else if (is_pow2(RHS(ins, 1))) {
6011 val = int_const(state, ins->type, tlog2(RHS(ins, 1)));
6013 insert_triple(state, ins, val);
6014 unuse_triple(RHS(ins, 1), ins);
6015 use_triple(val, ins);
6020 static void simplify_umul(struct compile_state *state, struct triple *ins)
6022 if (is_const(RHS(ins, 0)) && !is_const(RHS(ins, 1))) {
6025 RHS(ins, 0) = RHS(ins, 1);
6028 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6029 ulong_t left, right;
6030 left = read_const(state, ins, &RHS(ins, 0));
6031 right = read_const(state, ins, &RHS(ins, 1));
6032 mkconst(state, ins, left * right);
6034 else if (is_zero(RHS(ins, 1))) {
6035 mkconst(state, ins, 0);
6037 else if (is_one(RHS(ins, 1))) {
6038 mkcopy(state, ins, RHS(ins, 0));
6040 else if (is_pow2(RHS(ins, 1))) {
6042 val = int_const(state, ins->type, tlog2(RHS(ins, 1)));
6044 insert_triple(state, ins, val);
6045 unuse_triple(RHS(ins, 1), ins);
6046 use_triple(val, ins);
6051 static void simplify_sdiv(struct compile_state *state, struct triple *ins)
6053 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6055 left = read_sconst(ins, &RHS(ins, 0));
6056 right = read_sconst(ins, &RHS(ins, 1));
6057 mkconst(state, ins, left / right);
6059 else if (is_zero(RHS(ins, 0))) {
6060 mkconst(state, ins, 0);
6062 else if (is_zero(RHS(ins, 1))) {
6063 error(state, ins, "division by zero");
6065 else if (is_one(RHS(ins, 1))) {
6066 mkcopy(state, ins, RHS(ins, 0));
6068 else if (is_pow2(RHS(ins, 1))) {
6070 val = int_const(state, ins->type, tlog2(RHS(ins, 1)));
6072 insert_triple(state, ins, val);
6073 unuse_triple(RHS(ins, 1), ins);
6074 use_triple(val, ins);
6079 static void simplify_udiv(struct compile_state *state, struct triple *ins)
6081 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6082 ulong_t left, right;
6083 left = read_const(state, ins, &RHS(ins, 0));
6084 right = read_const(state, ins, &RHS(ins, 1));
6085 mkconst(state, ins, left / right);
6087 else if (is_zero(RHS(ins, 0))) {
6088 mkconst(state, ins, 0);
6090 else if (is_zero(RHS(ins, 1))) {
6091 error(state, ins, "division by zero");
6093 else if (is_one(RHS(ins, 1))) {
6094 mkcopy(state, ins, RHS(ins, 0));
6096 else if (is_pow2(RHS(ins, 1))) {
6098 val = int_const(state, ins->type, tlog2(RHS(ins, 1)));
6100 insert_triple(state, ins, val);
6101 unuse_triple(RHS(ins, 1), ins);
6102 use_triple(val, ins);
6107 static void simplify_smod(struct compile_state *state, struct triple *ins)
6109 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6111 left = read_const(state, ins, &RHS(ins, 0));
6112 right = read_const(state, ins, &RHS(ins, 1));
6113 mkconst(state, ins, left % right);
6115 else if (is_zero(RHS(ins, 0))) {
6116 mkconst(state, ins, 0);
6118 else if (is_zero(RHS(ins, 1))) {
6119 error(state, ins, "division by zero");
6121 else if (is_one(RHS(ins, 1))) {
6122 mkconst(state, ins, 0);
6124 else if (is_pow2(RHS(ins, 1))) {
6126 val = int_const(state, ins->type, RHS(ins, 1)->u.cval - 1);
6128 insert_triple(state, ins, val);
6129 unuse_triple(RHS(ins, 1), ins);
6130 use_triple(val, ins);
6135 static void simplify_umod(struct compile_state *state, struct triple *ins)
6137 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6138 ulong_t left, right;
6139 left = read_const(state, ins, &RHS(ins, 0));
6140 right = read_const(state, ins, &RHS(ins, 1));
6141 mkconst(state, ins, left % right);
6143 else if (is_zero(RHS(ins, 0))) {
6144 mkconst(state, ins, 0);
6146 else if (is_zero(RHS(ins, 1))) {
6147 error(state, ins, "division by zero");
6149 else if (is_one(RHS(ins, 1))) {
6150 mkconst(state, ins, 0);
6152 else if (is_pow2(RHS(ins, 1))) {
6154 val = int_const(state, ins->type, RHS(ins, 1)->u.cval - 1);
6156 insert_triple(state, ins, val);
6157 unuse_triple(RHS(ins, 1), ins);
6158 use_triple(val, ins);
6163 static void simplify_add(struct compile_state *state, struct triple *ins)
6165 /* start with the pointer on the left */
6166 if (is_pointer(RHS(ins, 1))) {
6169 RHS(ins, 0) = RHS(ins, 1);
6172 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6173 if (RHS(ins, 0)->op == OP_INTCONST) {
6174 ulong_t left, right;
6175 left = read_const(state, ins, &RHS(ins, 0));
6176 right = read_const(state, ins, &RHS(ins, 1));
6177 mkconst(state, ins, left + right);
6179 else if (RHS(ins, 0)->op == OP_ADDRCONST) {
6180 struct triple *sdecl;
6181 ulong_t left, right;
6182 sdecl = MISC(RHS(ins, 0), 0);
6183 left = RHS(ins, 0)->u.cval;
6184 right = RHS(ins, 1)->u.cval;
6185 mkaddr_const(state, ins, sdecl, left + right);
6188 internal_warning(state, ins, "Optimize me!");
6191 else if (is_const(RHS(ins, 0)) && !is_const(RHS(ins, 1))) {
6194 RHS(ins, 1) = RHS(ins, 0);
6199 static void simplify_sub(struct compile_state *state, struct triple *ins)
6201 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6202 if (RHS(ins, 0)->op == OP_INTCONST) {
6203 ulong_t left, right;
6204 left = read_const(state, ins, &RHS(ins, 0));
6205 right = read_const(state, ins, &RHS(ins, 1));
6206 mkconst(state, ins, left - right);
6208 else if (RHS(ins, 0)->op == OP_ADDRCONST) {
6209 struct triple *sdecl;
6210 ulong_t left, right;
6211 sdecl = MISC(RHS(ins, 0), 0);
6212 left = RHS(ins, 0)->u.cval;
6213 right = RHS(ins, 1)->u.cval;
6214 mkaddr_const(state, ins, sdecl, left - right);
6217 internal_warning(state, ins, "Optimize me!");
6222 static void simplify_sl(struct compile_state *state, struct triple *ins)
6224 if (is_const(RHS(ins, 1))) {
6226 right = read_const(state, ins, &RHS(ins, 1));
6227 if (right >= (size_of(state, ins->type)*8)) {
6228 warning(state, ins, "left shift count >= width of type");
6231 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6232 ulong_t left, right;
6233 left = read_const(state, ins, &RHS(ins, 0));
6234 right = read_const(state, ins, &RHS(ins, 1));
6235 mkconst(state, ins, left << right);
6239 static void simplify_usr(struct compile_state *state, struct triple *ins)
6241 if (is_const(RHS(ins, 1))) {
6243 right = read_const(state, ins, &RHS(ins, 1));
6244 if (right >= (size_of(state, ins->type)*8)) {
6245 warning(state, ins, "right shift count >= width of type");
6248 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6249 ulong_t left, right;
6250 left = read_const(state, ins, &RHS(ins, 0));
6251 right = read_const(state, ins, &RHS(ins, 1));
6252 mkconst(state, ins, left >> right);
6256 static void simplify_ssr(struct compile_state *state, struct triple *ins)
6258 if (is_const(RHS(ins, 1))) {
6260 right = read_const(state, ins, &RHS(ins, 1));
6261 if (right >= (size_of(state, ins->type)*8)) {
6262 warning(state, ins, "right shift count >= width of type");
6265 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6267 left = read_sconst(ins, &RHS(ins, 0));
6268 right = read_sconst(ins, &RHS(ins, 1));
6269 mkconst(state, ins, left >> right);
6273 static void simplify_and(struct compile_state *state, struct triple *ins)
6275 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6276 ulong_t left, right;
6277 left = read_const(state, ins, &RHS(ins, 0));
6278 right = read_const(state, ins, &RHS(ins, 1));
6279 mkconst(state, ins, left & right);
6283 static void simplify_or(struct compile_state *state, struct triple *ins)
6285 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6286 ulong_t left, right;
6287 left = read_const(state, ins, &RHS(ins, 0));
6288 right = read_const(state, ins, &RHS(ins, 1));
6289 mkconst(state, ins, left | right);
6293 static void simplify_xor(struct compile_state *state, struct triple *ins)
6295 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6296 ulong_t left, right;
6297 left = read_const(state, ins, &RHS(ins, 0));
6298 right = read_const(state, ins, &RHS(ins, 1));
6299 mkconst(state, ins, left ^ right);
6303 static void simplify_pos(struct compile_state *state, struct triple *ins)
6305 if (is_const(RHS(ins, 0))) {
6306 mkconst(state, ins, RHS(ins, 0)->u.cval);
6309 mkcopy(state, ins, RHS(ins, 0));
6313 static void simplify_neg(struct compile_state *state, struct triple *ins)
6315 if (is_const(RHS(ins, 0))) {
6317 left = read_const(state, ins, &RHS(ins, 0));
6318 mkconst(state, ins, -left);
6320 else if (RHS(ins, 0)->op == OP_NEG) {
6321 mkcopy(state, ins, RHS(RHS(ins, 0), 0));
6325 static void simplify_invert(struct compile_state *state, struct triple *ins)
6327 if (is_const(RHS(ins, 0))) {
6329 left = read_const(state, ins, &RHS(ins, 0));
6330 mkconst(state, ins, ~left);
6334 static void simplify_eq(struct compile_state *state, struct triple *ins)
6336 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6337 ulong_t left, right;
6338 left = read_const(state, ins, &RHS(ins, 0));
6339 right = read_const(state, ins, &RHS(ins, 1));
6340 mkconst(state, ins, left == right);
6342 else if (RHS(ins, 0) == RHS(ins, 1)) {
6343 mkconst(state, ins, 1);
6347 static void simplify_noteq(struct compile_state *state, struct triple *ins)
6349 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6350 ulong_t left, right;
6351 left = read_const(state, ins, &RHS(ins, 0));
6352 right = read_const(state, ins, &RHS(ins, 1));
6353 mkconst(state, ins, left != right);
6355 else if (RHS(ins, 0) == RHS(ins, 1)) {
6356 mkconst(state, ins, 0);
6360 static void simplify_sless(struct compile_state *state, struct triple *ins)
6362 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6364 left = read_sconst(ins, &RHS(ins, 0));
6365 right = read_sconst(ins, &RHS(ins, 1));
6366 mkconst(state, ins, left < right);
6368 else if (RHS(ins, 0) == RHS(ins, 1)) {
6369 mkconst(state, ins, 0);
6373 static void simplify_uless(struct compile_state *state, struct triple *ins)
6375 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6376 ulong_t left, right;
6377 left = read_const(state, ins, &RHS(ins, 0));
6378 right = read_const(state, ins, &RHS(ins, 1));
6379 mkconst(state, ins, left < right);
6381 else if (is_zero(RHS(ins, 0))) {
6382 mkconst(state, ins, 1);
6384 else if (RHS(ins, 0) == RHS(ins, 1)) {
6385 mkconst(state, ins, 0);
6389 static void simplify_smore(struct compile_state *state, struct triple *ins)
6391 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6393 left = read_sconst(ins, &RHS(ins, 0));
6394 right = read_sconst(ins, &RHS(ins, 1));
6395 mkconst(state, ins, left > right);
6397 else if (RHS(ins, 0) == RHS(ins, 1)) {
6398 mkconst(state, ins, 0);
6402 static void simplify_umore(struct compile_state *state, struct triple *ins)
6404 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6405 ulong_t left, right;
6406 left = read_const(state, ins, &RHS(ins, 0));
6407 right = read_const(state, ins, &RHS(ins, 1));
6408 mkconst(state, ins, left > right);
6410 else if (is_zero(RHS(ins, 1))) {
6411 mkconst(state, ins, 1);
6413 else if (RHS(ins, 0) == RHS(ins, 1)) {
6414 mkconst(state, ins, 0);
6419 static void simplify_slesseq(struct compile_state *state, struct triple *ins)
6421 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6423 left = read_sconst(ins, &RHS(ins, 0));
6424 right = read_sconst(ins, &RHS(ins, 1));
6425 mkconst(state, ins, left <= right);
6427 else if (RHS(ins, 0) == RHS(ins, 1)) {
6428 mkconst(state, ins, 1);
6432 static void simplify_ulesseq(struct compile_state *state, struct triple *ins)
6434 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6435 ulong_t left, right;
6436 left = read_const(state, ins, &RHS(ins, 0));
6437 right = read_const(state, ins, &RHS(ins, 1));
6438 mkconst(state, ins, left <= right);
6440 else if (is_zero(RHS(ins, 0))) {
6441 mkconst(state, ins, 1);
6443 else if (RHS(ins, 0) == RHS(ins, 1)) {
6444 mkconst(state, ins, 1);
6448 static void simplify_smoreeq(struct compile_state *state, struct triple *ins)
6450 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 0))) {
6452 left = read_sconst(ins, &RHS(ins, 0));
6453 right = read_sconst(ins, &RHS(ins, 1));
6454 mkconst(state, ins, left >= right);
6456 else if (RHS(ins, 0) == RHS(ins, 1)) {
6457 mkconst(state, ins, 1);
6461 static void simplify_umoreeq(struct compile_state *state, struct triple *ins)
6463 if (is_const(RHS(ins, 0)) && is_const(RHS(ins, 1))) {
6464 ulong_t left, right;
6465 left = read_const(state, ins, &RHS(ins, 0));
6466 right = read_const(state, ins, &RHS(ins, 1));
6467 mkconst(state, ins, left >= right);
6469 else if (is_zero(RHS(ins, 1))) {
6470 mkconst(state, ins, 1);
6472 else if (RHS(ins, 0) == RHS(ins, 1)) {
6473 mkconst(state, ins, 1);
6477 static void simplify_lfalse(struct compile_state *state, struct triple *ins)
6479 if (is_const(RHS(ins, 0))) {
6481 left = read_const(state, ins, &RHS(ins, 0));
6482 mkconst(state, ins, left == 0);
6484 /* Otherwise if I am the only user... */
6485 else if ((RHS(ins, 0)->use) &&
6486 (RHS(ins, 0)->use->member == ins) && (RHS(ins, 0)->use->next == 0)) {
6488 /* Invert a boolean operation */
6489 switch(RHS(ins, 0)->op) {
6490 case OP_LTRUE: RHS(ins, 0)->op = OP_LFALSE; break;
6491 case OP_LFALSE: RHS(ins, 0)->op = OP_LTRUE; break;
6492 case OP_EQ: RHS(ins, 0)->op = OP_NOTEQ; break;
6493 case OP_NOTEQ: RHS(ins, 0)->op = OP_EQ; break;
6494 case OP_SLESS: RHS(ins, 0)->op = OP_SMOREEQ; break;
6495 case OP_ULESS: RHS(ins, 0)->op = OP_UMOREEQ; break;
6496 case OP_SMORE: RHS(ins, 0)->op = OP_SLESSEQ; break;
6497 case OP_UMORE: RHS(ins, 0)->op = OP_ULESSEQ; break;
6498 case OP_SLESSEQ: RHS(ins, 0)->op = OP_SMORE; break;
6499 case OP_ULESSEQ: RHS(ins, 0)->op = OP_UMORE; break;
6500 case OP_SMOREEQ: RHS(ins, 0)->op = OP_SLESS; break;
6501 case OP_UMOREEQ: RHS(ins, 0)->op = OP_ULESS; break;
6507 mkcopy(state, ins, RHS(ins, 0));
6512 static void simplify_ltrue (struct compile_state *state, struct triple *ins)
6514 if (is_const(RHS(ins, 0))) {
6516 left = read_const(state, ins, &RHS(ins, 0));
6517 mkconst(state, ins, left != 0);
6519 else switch(RHS(ins, 0)->op) {
6520 case OP_LTRUE: case OP_LFALSE: case OP_EQ: case OP_NOTEQ:
6521 case OP_SLESS: case OP_ULESS: case OP_SMORE: case OP_UMORE:
6522 case OP_SLESSEQ: case OP_ULESSEQ: case OP_SMOREEQ: case OP_UMOREEQ:
6523 mkcopy(state, ins, RHS(ins, 0));
6528 static void simplify_copy(struct compile_state *state, struct triple *ins)
6530 if (is_const(RHS(ins, 0))) {
6531 switch(RHS(ins, 0)->op) {
6535 left = read_const(state, ins, &RHS(ins, 0));
6536 mkconst(state, ins, left);
6541 struct triple *sdecl;
6543 sdecl = MISC(RHS(ins, 0), 0);
6544 offset = RHS(ins, 0)->u.cval;
6545 mkaddr_const(state, ins, sdecl, offset);
6549 internal_error(state, ins, "uknown constant");
6555 static int phi_present(struct block *block)
6563 if (ptr->op == OP_PHI) {
6567 } while(ptr != block->last);
6571 static int phi_dependency(struct block *block)
6573 /* A block has a phi dependency if a phi function
6574 * depends on that block to exist, and makes a block
6575 * that is otherwise useless unsafe to remove.
6578 phi_present(block->left) ||
6579 phi_present(block->right))) {
6585 static struct triple *branch_target(struct compile_state *state, struct triple *ins)
6587 struct triple *targ;
6588 targ = TARG(ins, 0);
6589 /* During scc_transform temporary triples are allocated that
6590 * loop back onto themselves. If I see one don't advance the
6593 while(triple_is_structural(state, targ) && (targ->next != targ)) {
6600 static void simplify_branch(struct compile_state *state, struct triple *ins)
6603 if (ins->op != OP_BRANCH) {
6604 internal_error(state, ins, "not branch");
6606 if (ins->use != 0) {
6607 internal_error(state, ins, "branch use");
6609 /* The challenge here with simplify branch is that I need to
6610 * make modifications to the control flow graph as well
6611 * as to the branch instruction itself. That is handled
6612 * by rebuilding the basic blocks after simplify all is called.
6615 /* If we have a branch to an unconditional branch update
6616 * our target. But watch out for dependencies from phi
6620 struct triple *targ;
6622 targ = branch_target(state, ins);
6623 if ((targ != ins) && triple_is_uncond_branch(state, targ)) {
6624 if (!phi_dependency(targ->u.block))
6626 unuse_triple(TARG(ins, 0), ins);
6627 TARG(ins, 0) = TARG(targ, 0);
6628 use_triple(TARG(ins, 0), ins);
6632 } while(simplified);
6634 /* If we have a conditional branch with a constant condition
6635 * make it an unconditional branch.
6637 if (TRIPLE_RHS(ins->sizes) && is_const(RHS(ins, 0))) {
6638 struct triple *targ;
6640 value = read_const(state, ins, &RHS(ins, 0));
6641 unuse_triple(RHS(ins, 0), ins);
6642 targ = TARG(ins, 0);
6643 ins->sizes = TRIPLE_SIZES(0, 0, 0, 1);
6645 unuse_triple(ins->next, ins);
6646 TARG(ins, 0) = targ;
6649 unuse_triple(targ, ins);
6650 TARG(ins, 0) = ins->next;
6654 /* If we have an unconditional branch to the next instruction
6657 if (TARG(ins, 0) == ins->next) {
6658 unuse_triple(ins->next, ins);
6659 if (TRIPLE_RHS(ins->sizes)) {
6660 unuse_triple(RHS(ins, 0), ins);
6661 unuse_triple(ins->next, ins);
6663 ins->sizes = TRIPLE_SIZES(0, 0, 0, 0);
6666 internal_error(state, ins, "noop use != 0");
6671 static void simplify_label(struct compile_state *state, struct triple *ins)
6673 struct triple *first;
6674 first = state->first;
6675 /* Ignore volatile labels */
6676 if (!triple_is_pure(state, ins, ins->id)) {
6679 if (ins->use == 0) {
6682 else if (ins->prev->op == OP_LABEL) {
6683 /* In general it is not safe to merge one label that
6684 * imediately follows another. The problem is that the empty
6685 * looking block may have phi functions that depend on it.
6687 if (!phi_dependency(ins->prev->u.block)) {
6688 struct triple_set *user, *next;
6690 for(user = ins->use; user; user = next) {
6694 if (TARG(use, 0) == ins) {
6695 TARG(use, 0) = ins->prev;
6696 unuse_triple(ins, use);
6697 use_triple(ins->prev, use);
6701 internal_error(state, ins, "noop use != 0");
6707 static void simplify_phi(struct compile_state *state, struct triple *ins)
6709 struct triple **slot;
6710 struct triple *value;
6713 slot = &RHS(ins, 0);
6714 zrhs = TRIPLE_RHS(ins->sizes);
6718 /* See if all of the rhs members of a phi have the same value */
6719 if (slot[0] && is_simple_const(slot[0])) {
6720 cvalue = read_const(state, ins, &slot[0]);
6721 for(i = 1; i < zrhs; i++) {
6723 !is_simple_const(slot[i]) ||
6724 (cvalue != read_const(state, ins, &slot[i]))) {
6729 mkconst(state, ins, cvalue);
6734 /* See if all of rhs members of a phi are the same */
6736 for(i = 1; i < zrhs; i++) {
6737 if (slot[i] != value) {
6742 /* If the phi has a single value just copy it */
6743 mkcopy(state, ins, value);
6749 static void simplify_bsf(struct compile_state *state, struct triple *ins)
6751 if (is_const(RHS(ins, 0))) {
6753 left = read_const(state, ins, &RHS(ins, 0));
6754 mkconst(state, ins, bsf(left));
6758 static void simplify_bsr(struct compile_state *state, struct triple *ins)
6760 if (is_const(RHS(ins, 0))) {
6762 left = read_const(state, ins, &RHS(ins, 0));
6763 mkconst(state, ins, bsr(left));
6768 typedef void (*simplify_t)(struct compile_state *state, struct triple *ins);
6769 static const simplify_t table_simplify[] = {
6771 #define simplify_sdivt simplify_noop
6772 #define simplify_udivt simplify_noop
6775 #define simplify_smul simplify_noop
6776 #define simplify_umul simplify_noop
6777 #define simplify_sdiv simplify_noop
6778 #define simplify_udiv simplify_noop
6779 #define simplify_smod simplify_noop
6780 #define simplify_umod simplify_noop
6783 #define simplify_add simplify_noop
6784 #define simplify_sub simplify_noop
6787 #define simplify_sl simplify_noop
6788 #define simplify_usr simplify_noop
6789 #define simplify_ssr simplify_noop
6792 #define simplify_and simplify_noop
6793 #define simplify_xor simplify_noop
6794 #define simplify_or simplify_noop
6797 #define simplify_pos simplify_noop
6798 #define simplify_neg simplify_noop
6799 #define simplify_invert simplify_noop
6803 #define simplify_eq simplify_noop
6804 #define simplify_noteq simplify_noop
6807 #define simplify_sless simplify_noop
6808 #define simplify_uless simplify_noop
6809 #define simplify_smore simplify_noop
6810 #define simplify_umore simplify_noop
6813 #define simplify_slesseq simplify_noop
6814 #define simplify_ulesseq simplify_noop
6815 #define simplify_smoreeq simplify_noop
6816 #define simplify_umoreeq simplify_noop
6819 #define simplify_lfalse simplify_noop
6822 #define simplify_ltrue simplify_noop
6826 #define simplify_copy simplify_noop
6830 #define simplify_branch simplify_noop
6833 #define simplify_label simplify_noop
6837 #define simplify_phi simplify_noop
6841 #define simplify_bsf simplify_noop
6842 #define simplify_bsr simplify_noop
6846 #define simplify_piece simplify_noop
6849 [OP_SDIVT ] = simplify_sdivt,
6850 [OP_UDIVT ] = simplify_udivt,
6851 [OP_SMUL ] = simplify_smul,
6852 [OP_UMUL ] = simplify_umul,
6853 [OP_SDIV ] = simplify_sdiv,
6854 [OP_UDIV ] = simplify_udiv,
6855 [OP_SMOD ] = simplify_smod,
6856 [OP_UMOD ] = simplify_umod,
6857 [OP_ADD ] = simplify_add,
6858 [OP_SUB ] = simplify_sub,
6859 [OP_SL ] = simplify_sl,
6860 [OP_USR ] = simplify_usr,
6861 [OP_SSR ] = simplify_ssr,
6862 [OP_AND ] = simplify_and,
6863 [OP_XOR ] = simplify_xor,
6864 [OP_OR ] = simplify_or,
6865 [OP_POS ] = simplify_pos,
6866 [OP_NEG ] = simplify_neg,
6867 [OP_INVERT ] = simplify_invert,
6869 [OP_EQ ] = simplify_eq,
6870 [OP_NOTEQ ] = simplify_noteq,
6871 [OP_SLESS ] = simplify_sless,
6872 [OP_ULESS ] = simplify_uless,
6873 [OP_SMORE ] = simplify_smore,
6874 [OP_UMORE ] = simplify_umore,
6875 [OP_SLESSEQ ] = simplify_slesseq,
6876 [OP_ULESSEQ ] = simplify_ulesseq,
6877 [OP_SMOREEQ ] = simplify_smoreeq,
6878 [OP_UMOREEQ ] = simplify_umoreeq,
6879 [OP_LFALSE ] = simplify_lfalse,
6880 [OP_LTRUE ] = simplify_ltrue,
6882 [OP_LOAD ] = simplify_noop,
6883 [OP_STORE ] = simplify_noop,
6885 [OP_NOOP ] = simplify_noop,
6887 [OP_INTCONST ] = simplify_noop,
6888 [OP_BLOBCONST ] = simplify_noop,
6889 [OP_ADDRCONST ] = simplify_noop,
6891 [OP_WRITE ] = simplify_noop,
6892 [OP_READ ] = simplify_noop,
6893 [OP_COPY ] = simplify_copy,
6894 [OP_PIECE ] = simplify_piece,
6895 [OP_ASM ] = simplify_noop,
6897 [OP_DOT ] = simplify_noop,
6898 [OP_VAL_VEC ] = simplify_noop,
6900 [OP_LIST ] = simplify_noop,
6901 [OP_BRANCH ] = simplify_branch,
6902 [OP_LABEL ] = simplify_label,
6903 [OP_ADECL ] = simplify_noop,
6904 [OP_SDECL ] = simplify_noop,
6905 [OP_PHI ] = simplify_phi,
6907 [OP_INB ] = simplify_noop,
6908 [OP_INW ] = simplify_noop,
6909 [OP_INL ] = simplify_noop,
6910 [OP_OUTB ] = simplify_noop,
6911 [OP_OUTW ] = simplify_noop,
6912 [OP_OUTL ] = simplify_noop,
6913 [OP_BSF ] = simplify_bsf,
6914 [OP_BSR ] = simplify_bsr,
6915 [OP_RDMSR ] = simplify_noop,
6916 [OP_WRMSR ] = simplify_noop,
6917 [OP_HLT ] = simplify_noop,
6920 static void simplify(struct compile_state *state, struct triple *ins)
6923 simplify_t do_simplify;
6927 if ((op < 0) || (op > sizeof(table_simplify)/sizeof(table_simplify[0]))) {
6931 do_simplify = table_simplify[op];
6934 internal_error(state, ins, "cannot simplify op: %d %s\n",
6939 do_simplify(state, ins);
6943 int ins_count, dup_count;
6944 dup = dup_triple(state, ins);
6945 do_simplify(state, ins);
6946 ins_count = TRIPLE_SIZE(ins->sizes);
6947 dup_count = TRIPLE_SIZE(dup->sizes);
6948 if ((dup->op != ins->op) ||
6949 (ins_count != dup_count) ||
6950 (memcmp(dup->param, ins->param,
6951 dup_count * sizeof(dup->param[0])) != 0) ||
6952 (memcmp(&dup->u, &ins->u, sizeof(ins->u)) != 0))
6955 fprintf(stderr, "simplify: %11p", ins);
6956 if (dup->op == ins->op) {
6957 fprintf(stderr, " %-11s", tops(ins->op));
6959 fprintf(stderr, " [%-10s %-10s]",
6960 tops(dup->op), tops(ins->op));
6962 min_count = dup_count;
6963 if (min_count > ins_count) {
6964 min_count = ins_count;
6966 for(i = 0; i < min_count; i++) {
6967 if (dup->param[i] == ins->param[i]) {
6968 fprintf(stderr, " %-11p", ins->param[i]);
6970 fprintf(stderr, " [%-10p %-10p]",
6971 dup->param[i], ins->param[i]);
6974 for(; i < ins_count; i++) {
6975 fprintf(stderr, " [%-9p]", ins->param[i]);
6977 for(; i < dup_count; i++) {
6978 fprintf(stderr, " [%-9p]", dup->param[i]);
6980 fprintf(stderr, "\n");
6986 } while(ins->op != op);
6989 static void simplify_all(struct compile_state *state)
6991 struct triple *ins, *first;
6992 first = state->first;
6995 simplify(state, ins);
6997 } while(ins != first->prev);
7000 simplify(state, ins);
7002 }while(ins != first);
7007 * ============================
7010 static void register_builtin_function(struct compile_state *state,
7011 const char *name, int op, struct type *rtype, ...)
7013 struct type *ftype, *atype, *param, **next;
7014 struct triple *def, *arg, *result, *work, *last, *first;
7015 struct hash_entry *ident;
7016 struct file_state file;
7022 /* Dummy file state to get debug handling right */
7023 memset(&file, 0, sizeof(file));
7024 file.basename = "<built-in>";
7026 file.report_line = 1;
7027 file.report_name = file.basename;
7028 file.prev = state->file;
7029 state->file = &file;
7030 state->function = name;
7032 /* Find the Parameter count */
7033 valid_op(state, op);
7034 parameters = table_ops[op].rhs;
7035 if (parameters < 0 ) {
7036 internal_error(state, 0, "Invalid builtin parameter count");
7039 /* Find the function type */
7040 ftype = new_type(TYPE_FUNCTION, rtype, 0);
7041 next = &ftype->right;
7042 va_start(args, rtype);
7043 for(i = 0; i < parameters; i++) {
7044 atype = va_arg(args, struct type *);
7048 *next = new_type(TYPE_PRODUCT, *next, atype);
7049 next = &((*next)->right);
7057 /* Generate the needed triples */
7058 def = triple(state, OP_LIST, ftype, 0, 0);
7059 first = label(state);
7060 RHS(def, 0) = first;
7062 /* Now string them together */
7063 param = ftype->right;
7064 for(i = 0; i < parameters; i++) {
7065 if ((param->type & TYPE_MASK) == TYPE_PRODUCT) {
7066 atype = param->left;
7070 arg = flatten(state, first, variable(state, atype));
7071 param = param->right;
7074 if ((rtype->type & TYPE_MASK) != TYPE_VOID) {
7075 result = flatten(state, first, variable(state, rtype));
7077 MISC(def, 0) = result;
7078 work = new_triple(state, op, rtype, -1, parameters);
7079 for(i = 0, arg = first->next; i < parameters; i++, arg = arg->next) {
7080 RHS(work, i) = read_expr(state, arg);
7082 if (result && ((rtype->type & TYPE_MASK) == TYPE_STRUCT)) {
7084 /* Populate the LHS with the target registers */
7085 work = flatten(state, first, work);
7086 work->type = &void_type;
7087 param = rtype->left;
7088 if (rtype->elements != TRIPLE_LHS(work->sizes)) {
7089 internal_error(state, 0, "Invalid result type");
7091 val = new_triple(state, OP_VAL_VEC, rtype, -1, -1);
7092 for(i = 0; i < rtype->elements; i++) {
7093 struct triple *piece;
7095 if ((param->type & TYPE_MASK) == TYPE_PRODUCT) {
7096 atype = param->left;
7098 if (!TYPE_ARITHMETIC(atype->type) &&
7099 !TYPE_PTR(atype->type)) {
7100 internal_error(state, 0, "Invalid lhs type");
7102 piece = triple(state, OP_PIECE, atype, work, 0);
7104 LHS(work, i) = piece;
7105 RHS(val, i) = piece;
7110 work = write_expr(state, result, work);
7112 work = flatten(state, first, work);
7113 last = flatten(state, first, label(state));
7114 name_len = strlen(name);
7115 ident = lookup(state, name, name_len);
7116 symbol(state, ident, &ident->sym_ident, def, ftype);
7118 state->file = file.prev;
7119 state->function = 0;
7121 fprintf(stdout, "\n");
7122 loc(stdout, state, 0);
7123 fprintf(stdout, "\n__________ builtin_function _________\n");
7124 print_triple(state, def);
7125 fprintf(stdout, "__________ builtin_function _________ done\n\n");
7129 static struct type *partial_struct(struct compile_state *state,
7130 const char *field_name, struct type *type, struct type *rest)
7132 struct hash_entry *field_ident;
7133 struct type *result;
7136 field_name_len = strlen(field_name);
7137 field_ident = lookup(state, field_name, field_name_len);
7139 result = clone_type(0, type);
7140 result->field_ident = field_ident;
7143 result = new_type(TYPE_PRODUCT, result, rest);
7148 static struct type *register_builtin_type(struct compile_state *state,
7149 const char *name, struct type *type)
7151 struct hash_entry *ident;
7154 name_len = strlen(name);
7155 ident = lookup(state, name, name_len);
7157 if ((type->type & TYPE_MASK) == TYPE_PRODUCT) {
7158 ulong_t elements = 0;
7160 type = new_type(TYPE_STRUCT, type, 0);
7162 while((field->type & TYPE_MASK) == TYPE_PRODUCT) {
7164 field = field->right;
7167 symbol(state, ident, &ident->sym_tag, 0, type);
7168 type->type_ident = ident;
7169 type->elements = elements;
7171 symbol(state, ident, &ident->sym_ident, 0, type);
7172 ident->tok = TOK_TYPE_NAME;
7177 static void register_builtins(struct compile_state *state)
7179 struct type *div_type, *ldiv_type;
7180 struct type *udiv_type, *uldiv_type;
7181 struct type *msr_type;
7183 div_type = register_builtin_type(state, "__builtin_div_t",
7184 partial_struct(state, "quot", &int_type,
7185 partial_struct(state, "rem", &int_type, 0)));
7186 ldiv_type = register_builtin_type(state, "__builtin_ldiv_t",
7187 partial_struct(state, "quot", &long_type,
7188 partial_struct(state, "rem", &long_type, 0)));
7189 udiv_type = register_builtin_type(state, "__builtin_udiv_t",
7190 partial_struct(state, "quot", &uint_type,
7191 partial_struct(state, "rem", &uint_type, 0)));
7192 uldiv_type = register_builtin_type(state, "__builtin_uldiv_t",
7193 partial_struct(state, "quot", &ulong_type,
7194 partial_struct(state, "rem", &ulong_type, 0)));
7196 register_builtin_function(state, "__builtin_div", OP_SDIVT, div_type,
7197 &int_type, &int_type);
7198 register_builtin_function(state, "__builtin_ldiv", OP_SDIVT, ldiv_type,
7199 &long_type, &long_type);
7200 register_builtin_function(state, "__builtin_udiv", OP_UDIVT, udiv_type,
7201 &uint_type, &uint_type);
7202 register_builtin_function(state, "__builtin_uldiv", OP_UDIVT, uldiv_type,
7203 &ulong_type, &ulong_type);
7205 register_builtin_function(state, "__builtin_inb", OP_INB, &uchar_type,
7207 register_builtin_function(state, "__builtin_inw", OP_INW, &ushort_type,
7209 register_builtin_function(state, "__builtin_inl", OP_INL, &uint_type,
7212 register_builtin_function(state, "__builtin_outb", OP_OUTB, &void_type,
7213 &uchar_type, &ushort_type);
7214 register_builtin_function(state, "__builtin_outw", OP_OUTW, &void_type,
7215 &ushort_type, &ushort_type);
7216 register_builtin_function(state, "__builtin_outl", OP_OUTL, &void_type,
7217 &uint_type, &ushort_type);
7219 register_builtin_function(state, "__builtin_bsf", OP_BSF, &int_type,
7221 register_builtin_function(state, "__builtin_bsr", OP_BSR, &int_type,
7224 msr_type = register_builtin_type(state, "__builtin_msr_t",
7225 partial_struct(state, "lo", &ulong_type,
7226 partial_struct(state, "hi", &ulong_type, 0)));
7228 register_builtin_function(state, "__builtin_rdmsr", OP_RDMSR, msr_type,
7230 register_builtin_function(state, "__builtin_wrmsr", OP_WRMSR, &void_type,
7231 &ulong_type, &ulong_type, &ulong_type);
7233 register_builtin_function(state, "__builtin_hlt", OP_HLT, &void_type,
7237 static struct type *declarator(
7238 struct compile_state *state, struct type *type,
7239 struct hash_entry **ident, int need_ident);
7240 static void decl(struct compile_state *state, struct triple *first);
7241 static struct type *specifier_qualifier_list(struct compile_state *state);
7242 static int isdecl_specifier(int tok);
7243 static struct type *decl_specifiers(struct compile_state *state);
7244 static int istype(int tok);
7245 static struct triple *expr(struct compile_state *state);
7246 static struct triple *assignment_expr(struct compile_state *state);
7247 static struct type *type_name(struct compile_state *state);
7248 static void statement(struct compile_state *state, struct triple *fist);
7250 static struct triple *call_expr(
7251 struct compile_state *state, struct triple *func)
7254 struct type *param, *type;
7255 ulong_t pvals, index;
7257 if ((func->type->type & TYPE_MASK) != TYPE_FUNCTION) {
7258 error(state, 0, "Called object is not a function");
7260 if (func->op != OP_LIST) {
7261 internal_error(state, 0, "improper function");
7263 eat(state, TOK_LPAREN);
7264 /* Find the return type without any specifiers */
7265 type = clone_type(0, func->type->left);
7266 def = new_triple(state, OP_CALL, func->type, -1, -1);
7269 pvals = TRIPLE_RHS(def->sizes);
7270 MISC(def, 0) = func;
7272 param = func->type->right;
7273 for(index = 0; index < pvals; index++) {
7275 struct type *arg_type;
7276 val = read_expr(state, assignment_expr(state));
7278 if ((param->type & TYPE_MASK) == TYPE_PRODUCT) {
7279 arg_type = param->left;
7281 write_compatible(state, arg_type, val->type);
7282 RHS(def, index) = val;
7283 if (index != (pvals - 1)) {
7284 eat(state, TOK_COMMA);
7285 param = param->right;
7288 eat(state, TOK_RPAREN);
7293 static struct triple *character_constant(struct compile_state *state)
7297 const signed char *str, *end;
7300 eat(state, TOK_LIT_CHAR);
7301 tk = &state->token[0];
7302 str = tk->val.str + 1;
7303 str_len = tk->str_len - 2;
7305 error(state, 0, "empty character constant");
7307 end = str + str_len;
7308 c = char_value(state, &str, end);
7310 error(state, 0, "multibyte character constant not supported");
7312 def = int_const(state, &char_type, (ulong_t)((long_t)c));
7316 static struct triple *string_constant(struct compile_state *state)
7321 const signed char *str, *end;
7322 signed char *buf, *ptr;
7326 type = new_type(TYPE_ARRAY, &char_type, 0);
7328 /* The while loop handles string concatenation */
7330 eat(state, TOK_LIT_STRING);
7331 tk = &state->token[0];
7332 str = tk->val.str + 1;
7333 str_len = tk->str_len - 2;
7335 error(state, 0, "negative string constant length");
7337 end = str + str_len;
7339 buf = xmalloc(type->elements + str_len + 1, "string_constant");
7340 memcpy(buf, ptr, type->elements);
7341 ptr = buf + type->elements;
7343 *ptr++ = char_value(state, &str, end);
7345 type->elements = ptr - buf;
7346 } while(peek(state) == TOK_LIT_STRING);
7348 type->elements += 1;
7349 def = triple(state, OP_BLOBCONST, type, 0, 0);
7355 static struct triple *integer_constant(struct compile_state *state)
7364 eat(state, TOK_LIT_INT);
7365 tk = &state->token[0];
7367 decimal = (tk->val.str[0] != '0');
7368 val = strtoul(tk->val.str, &end, 0);
7369 if ((val > ULONG_T_MAX) || ((val == ULONG_MAX) && (errno == ERANGE))) {
7370 error(state, 0, "Integer constant to large");
7373 if ((*end == 'u') || (*end == 'U')) {
7377 if ((*end == 'l') || (*end == 'L')) {
7381 if ((*end == 'u') || (*end == 'U')) {
7386 error(state, 0, "Junk at end of integer constant");
7393 if (!decimal && (val > LONG_T_MAX)) {
7399 if (val > UINT_T_MAX) {
7405 if (!decimal && (val > INT_T_MAX) && (val <= UINT_T_MAX)) {
7408 else if (!decimal && (val > LONG_T_MAX)) {
7411 else if (val > INT_T_MAX) {
7415 def = int_const(state, type, val);
7419 static struct triple *primary_expr(struct compile_state *state)
7427 struct hash_entry *ident;
7428 /* Here ident is either:
7432 eat(state, TOK_IDENT);
7433 ident = state->token[0].ident;
7434 if (!ident->sym_ident) {
7435 error(state, 0, "%s undeclared", ident->name);
7437 def = ident->sym_ident->def;
7440 case TOK_ENUM_CONST:
7442 struct hash_entry *ident;
7443 /* Here ident is an enumeration constant */
7444 eat(state, TOK_ENUM_CONST);
7445 ident = state->token[0].ident;
7446 if (!ident->sym_ident) {
7447 error(state, 0, "%s undeclared", ident->name);
7449 def = ident->sym_ident->def;
7453 eat(state, TOK_LPAREN);
7455 eat(state, TOK_RPAREN);
7458 def = integer_constant(state);
7461 eat(state, TOK_LIT_FLOAT);
7462 error(state, 0, "Floating point constants not supported");
7467 def = character_constant(state);
7469 case TOK_LIT_STRING:
7470 def = string_constant(state);
7474 error(state, 0, "Unexpected token: %s\n", tokens[tok]);
7479 static struct triple *postfix_expr(struct compile_state *state)
7483 def = primary_expr(state);
7485 struct triple *left;
7489 switch((tok = peek(state))) {
7491 eat(state, TOK_LBRACKET);
7492 def = mk_subscript_expr(state, left, expr(state));
7493 eat(state, TOK_RBRACKET);
7496 def = call_expr(state, def);
7500 struct hash_entry *field;
7501 eat(state, TOK_DOT);
7502 eat(state, TOK_IDENT);
7503 field = state->token[0].ident;
7504 def = deref_field(state, def, field);
7509 struct hash_entry *field;
7510 eat(state, TOK_ARROW);
7511 eat(state, TOK_IDENT);
7512 field = state->token[0].ident;
7513 def = mk_deref_expr(state, read_expr(state, def));
7514 def = deref_field(state, def, field);
7518 eat(state, TOK_PLUSPLUS);
7519 def = mk_post_inc_expr(state, left);
7521 case TOK_MINUSMINUS:
7522 eat(state, TOK_MINUSMINUS);
7523 def = mk_post_dec_expr(state, left);
7533 static struct triple *cast_expr(struct compile_state *state);
7535 static struct triple *unary_expr(struct compile_state *state)
7537 struct triple *def, *right;
7539 switch((tok = peek(state))) {
7541 eat(state, TOK_PLUSPLUS);
7542 def = mk_pre_inc_expr(state, unary_expr(state));
7544 case TOK_MINUSMINUS:
7545 eat(state, TOK_MINUSMINUS);
7546 def = mk_pre_dec_expr(state, unary_expr(state));
7549 eat(state, TOK_AND);
7550 def = mk_addr_expr(state, cast_expr(state), 0);
7553 eat(state, TOK_STAR);
7554 def = mk_deref_expr(state, read_expr(state, cast_expr(state)));
7557 eat(state, TOK_PLUS);
7558 right = read_expr(state, cast_expr(state));
7559 arithmetic(state, right);
7560 def = integral_promotion(state, right);
7563 eat(state, TOK_MINUS);
7564 right = read_expr(state, cast_expr(state));
7565 arithmetic(state, right);
7566 def = integral_promotion(state, right);
7567 def = triple(state, OP_NEG, def->type, def, 0);
7570 eat(state, TOK_TILDE);
7571 right = read_expr(state, cast_expr(state));
7572 integral(state, right);
7573 def = integral_promotion(state, right);
7574 def = triple(state, OP_INVERT, def->type, def, 0);
7577 eat(state, TOK_BANG);
7578 right = read_expr(state, cast_expr(state));
7580 def = lfalse_expr(state, right);
7586 eat(state, TOK_SIZEOF);
7588 tok2 = peek2(state);
7589 if ((tok1 == TOK_LPAREN) && istype(tok2)) {
7590 eat(state, TOK_LPAREN);
7591 type = type_name(state);
7592 eat(state, TOK_RPAREN);
7595 struct triple *expr;
7596 expr = unary_expr(state);
7598 release_expr(state, expr);
7600 def = int_const(state, &ulong_type, size_of(state, type));
7607 eat(state, TOK_ALIGNOF);
7609 tok2 = peek2(state);
7610 if ((tok1 == TOK_LPAREN) && istype(tok2)) {
7611 eat(state, TOK_LPAREN);
7612 type = type_name(state);
7613 eat(state, TOK_RPAREN);
7616 struct triple *expr;
7617 expr = unary_expr(state);
7619 release_expr(state, expr);
7621 def = int_const(state, &ulong_type, align_of(state, type));
7625 def = postfix_expr(state);
7631 static struct triple *cast_expr(struct compile_state *state)
7636 tok2 = peek2(state);
7637 if ((tok1 == TOK_LPAREN) && istype(tok2)) {
7639 eat(state, TOK_LPAREN);
7640 type = type_name(state);
7641 eat(state, TOK_RPAREN);
7642 def = mk_cast_expr(state, type, cast_expr(state));
7645 def = unary_expr(state);
7650 static struct triple *mult_expr(struct compile_state *state)
7654 def = cast_expr(state);
7656 struct triple *left, *right;
7657 struct type *result_type;
7660 switch(tok = (peek(state))) {
7664 left = read_expr(state, def);
7665 arithmetic(state, left);
7669 right = read_expr(state, cast_expr(state));
7670 arithmetic(state, right);
7672 result_type = arithmetic_result(state, left, right);
7673 sign = is_signed(result_type);
7676 case TOK_STAR: op = sign? OP_SMUL : OP_UMUL; break;
7677 case TOK_DIV: op = sign? OP_SDIV : OP_UDIV; break;
7678 case TOK_MOD: op = sign? OP_SMOD : OP_UMOD; break;
7680 def = triple(state, op, result_type, left, right);
7690 static struct triple *add_expr(struct compile_state *state)
7694 def = mult_expr(state);
7697 switch( peek(state)) {
7699 eat(state, TOK_PLUS);
7700 def = mk_add_expr(state, def, mult_expr(state));
7703 eat(state, TOK_MINUS);
7704 def = mk_sub_expr(state, def, mult_expr(state));
7714 static struct triple *shift_expr(struct compile_state *state)
7718 def = add_expr(state);
7720 struct triple *left, *right;
7723 switch((tok = peek(state))) {
7726 left = read_expr(state, def);
7727 integral(state, left);
7728 left = integral_promotion(state, left);
7732 right = read_expr(state, add_expr(state));
7733 integral(state, right);
7734 right = integral_promotion(state, right);
7736 op = (tok == TOK_SL)? OP_SL :
7737 is_signed(left->type)? OP_SSR: OP_USR;
7739 def = triple(state, op, left->type, left, right);
7749 static struct triple *relational_expr(struct compile_state *state)
7751 #warning "Extend relational exprs to work on more than arithmetic types"
7754 def = shift_expr(state);
7756 struct triple *left, *right;
7757 struct type *arg_type;
7760 switch((tok = peek(state))) {
7765 left = read_expr(state, def);
7766 arithmetic(state, left);
7770 right = read_expr(state, shift_expr(state));
7771 arithmetic(state, right);
7773 arg_type = arithmetic_result(state, left, right);
7774 sign = is_signed(arg_type);
7777 case TOK_LESS: op = sign? OP_SLESS : OP_ULESS; break;
7778 case TOK_MORE: op = sign? OP_SMORE : OP_UMORE; break;
7779 case TOK_LESSEQ: op = sign? OP_SLESSEQ : OP_ULESSEQ; break;
7780 case TOK_MOREEQ: op = sign? OP_SMOREEQ : OP_UMOREEQ; break;
7782 def = triple(state, op, &int_type, left, right);
7792 static struct triple *equality_expr(struct compile_state *state)
7794 #warning "Extend equality exprs to work on more than arithmetic types"
7797 def = relational_expr(state);
7799 struct triple *left, *right;
7802 switch((tok = peek(state))) {
7805 left = read_expr(state, def);
7806 arithmetic(state, left);
7808 right = read_expr(state, relational_expr(state));
7809 arithmetic(state, right);
7810 op = (tok == TOK_EQEQ) ? OP_EQ: OP_NOTEQ;
7811 def = triple(state, op, &int_type, left, right);
7821 static struct triple *and_expr(struct compile_state *state)
7824 def = equality_expr(state);
7825 while(peek(state) == TOK_AND) {
7826 struct triple *left, *right;
7827 struct type *result_type;
7828 left = read_expr(state, def);
7829 integral(state, left);
7830 eat(state, TOK_AND);
7831 right = read_expr(state, equality_expr(state));
7832 integral(state, right);
7833 result_type = arithmetic_result(state, left, right);
7834 def = triple(state, OP_AND, result_type, left, right);
7839 static struct triple *xor_expr(struct compile_state *state)
7842 def = and_expr(state);
7843 while(peek(state) == TOK_XOR) {
7844 struct triple *left, *right;
7845 struct type *result_type;
7846 left = read_expr(state, def);
7847 integral(state, left);
7848 eat(state, TOK_XOR);
7849 right = read_expr(state, and_expr(state));
7850 integral(state, right);
7851 result_type = arithmetic_result(state, left, right);
7852 def = triple(state, OP_XOR, result_type, left, right);
7857 static struct triple *or_expr(struct compile_state *state)
7860 def = xor_expr(state);
7861 while(peek(state) == TOK_OR) {
7862 struct triple *left, *right;
7863 struct type *result_type;
7864 left = read_expr(state, def);
7865 integral(state, left);
7867 right = read_expr(state, xor_expr(state));
7868 integral(state, right);
7869 result_type = arithmetic_result(state, left, right);
7870 def = triple(state, OP_OR, result_type, left, right);
7875 static struct triple *land_expr(struct compile_state *state)
7878 def = or_expr(state);
7879 while(peek(state) == TOK_LOGAND) {
7880 struct triple *left, *right;
7881 left = read_expr(state, def);
7883 eat(state, TOK_LOGAND);
7884 right = read_expr(state, or_expr(state));
7887 def = triple(state, OP_LAND, &int_type,
7888 ltrue_expr(state, left),
7889 ltrue_expr(state, right));
7894 static struct triple *lor_expr(struct compile_state *state)
7897 def = land_expr(state);
7898 while(peek(state) == TOK_LOGOR) {
7899 struct triple *left, *right;
7900 left = read_expr(state, def);
7902 eat(state, TOK_LOGOR);
7903 right = read_expr(state, land_expr(state));
7906 def = triple(state, OP_LOR, &int_type,
7907 ltrue_expr(state, left),
7908 ltrue_expr(state, right));
7913 static struct triple *conditional_expr(struct compile_state *state)
7916 def = lor_expr(state);
7917 if (peek(state) == TOK_QUEST) {
7918 struct triple *test, *left, *right;
7920 test = ltrue_expr(state, read_expr(state, def));
7921 eat(state, TOK_QUEST);
7922 left = read_expr(state, expr(state));
7923 eat(state, TOK_COLON);
7924 right = read_expr(state, conditional_expr(state));
7926 def = cond_expr(state, test, left, right);
7931 static struct triple *eval_const_expr(
7932 struct compile_state *state, struct triple *expr)
7935 if (is_const(expr)) {
7939 /* If we don't start out as a constant simplify into one */
7940 struct triple *head, *ptr;
7941 head = label(state); /* dummy initial triple */
7942 flatten(state, head, expr);
7943 for(ptr = head->next; ptr != head; ptr = ptr->next) {
7944 simplify(state, ptr);
7946 /* Remove the constant value the tail of the list */
7948 def->prev->next = def->next;
7949 def->next->prev = def->prev;
7950 def->next = def->prev = def;
7951 if (!is_const(def)) {
7952 error(state, 0, "Not a constant expression");
7954 /* Free the intermediate expressions */
7955 while(head->next != head) {
7956 release_triple(state, head->next);
7958 free_triple(state, head);
7963 static struct triple *constant_expr(struct compile_state *state)
7965 return eval_const_expr(state, conditional_expr(state));
7968 static struct triple *assignment_expr(struct compile_state *state)
7970 struct triple *def, *left, *right;
7972 /* The C grammer in K&R shows assignment expressions
7973 * only taking unary expressions as input on their
7974 * left hand side. But specifies the precedence of
7975 * assignemnt as the lowest operator except for comma.
7977 * Allowing conditional expressions on the left hand side
7978 * of an assignement results in a grammar that accepts
7979 * a larger set of statements than standard C. As long
7980 * as the subset of the grammar that is standard C behaves
7981 * correctly this should cause no problems.
7983 * For the extra token strings accepted by the grammar
7984 * none of them should produce a valid lvalue, so they
7985 * should not produce functioning programs.
7987 * GCC has this bug as well, so surprises should be minimal.
7989 def = conditional_expr(state);
7991 switch((tok = peek(state))) {
7993 lvalue(state, left);
7995 def = write_expr(state, left,
7996 read_expr(state, assignment_expr(state)));
8001 lvalue(state, left);
8002 arithmetic(state, left);
8004 right = read_expr(state, assignment_expr(state));
8005 arithmetic(state, right);
8007 sign = is_signed(left->type);
8010 case TOK_TIMESEQ: op = sign? OP_SMUL : OP_UMUL; break;
8011 case TOK_DIVEQ: op = sign? OP_SDIV : OP_UDIV; break;
8012 case TOK_MODEQ: op = sign? OP_SMOD : OP_UMOD; break;
8014 def = write_expr(state, left,
8015 triple(state, op, left->type,
8016 read_expr(state, left), right));
8019 lvalue(state, left);
8020 eat(state, TOK_PLUSEQ);
8021 def = write_expr(state, left,
8022 mk_add_expr(state, left, assignment_expr(state)));
8025 lvalue(state, left);
8026 eat(state, TOK_MINUSEQ);
8027 def = write_expr(state, left,
8028 mk_sub_expr(state, left, assignment_expr(state)));
8035 lvalue(state, left);
8036 integral(state, left);
8038 right = read_expr(state, assignment_expr(state));
8039 integral(state, right);
8040 right = integral_promotion(state, right);
8041 sign = is_signed(left->type);
8044 case TOK_SLEQ: op = OP_SL; break;
8045 case TOK_SREQ: op = sign? OP_SSR: OP_USR; break;
8046 case TOK_ANDEQ: op = OP_AND; break;
8047 case TOK_XOREQ: op = OP_XOR; break;
8048 case TOK_OREQ: op = OP_OR; break;
8050 def = write_expr(state, left,
8051 triple(state, op, left->type,
8052 read_expr(state, left), right));
8058 static struct triple *expr(struct compile_state *state)
8061 def = assignment_expr(state);
8062 while(peek(state) == TOK_COMMA) {
8063 struct triple *left, *right;
8065 eat(state, TOK_COMMA);
8066 right = assignment_expr(state);
8067 def = triple(state, OP_COMMA, right->type, left, right);
8072 static void expr_statement(struct compile_state *state, struct triple *first)
8074 if (peek(state) != TOK_SEMI) {
8075 flatten(state, first, expr(state));
8077 eat(state, TOK_SEMI);
8080 static void if_statement(struct compile_state *state, struct triple *first)
8082 struct triple *test, *jmp1, *jmp2, *middle, *end;
8084 jmp1 = jmp2 = middle = 0;
8086 eat(state, TOK_LPAREN);
8089 /* Cleanup and invert the test */
8090 test = lfalse_expr(state, read_expr(state, test));
8091 eat(state, TOK_RPAREN);
8092 /* Generate the needed pieces */
8093 middle = label(state);
8094 jmp1 = branch(state, middle, test);
8095 /* Thread the pieces together */
8096 flatten(state, first, test);
8097 flatten(state, first, jmp1);
8098 flatten(state, first, label(state));
8099 statement(state, first);
8100 if (peek(state) == TOK_ELSE) {
8101 eat(state, TOK_ELSE);
8102 /* Generate the rest of the pieces */
8104 jmp2 = branch(state, end, 0);
8105 /* Thread them together */
8106 flatten(state, first, jmp2);
8107 flatten(state, first, middle);
8108 statement(state, first);
8109 flatten(state, first, end);
8112 flatten(state, first, middle);
8116 static void for_statement(struct compile_state *state, struct triple *first)
8118 struct triple *head, *test, *tail, *jmp1, *jmp2, *end;
8119 struct triple *label1, *label2, *label3;
8120 struct hash_entry *ident;
8122 eat(state, TOK_FOR);
8123 eat(state, TOK_LPAREN);
8124 head = test = tail = jmp1 = jmp2 = 0;
8125 if (peek(state) != TOK_SEMI) {
8128 eat(state, TOK_SEMI);
8129 if (peek(state) != TOK_SEMI) {
8132 test = ltrue_expr(state, read_expr(state, test));
8134 eat(state, TOK_SEMI);
8135 if (peek(state) != TOK_RPAREN) {
8138 eat(state, TOK_RPAREN);
8139 /* Generate the needed pieces */
8140 label1 = label(state);
8141 label2 = label(state);
8142 label3 = label(state);
8144 jmp1 = branch(state, label3, 0);
8145 jmp2 = branch(state, label1, test);
8148 jmp2 = branch(state, label1, 0);
8151 /* Remember where break and continue go */
8153 ident = state->i_break;
8154 symbol(state, ident, &ident->sym_ident, end, end->type);
8155 ident = state->i_continue;
8156 symbol(state, ident, &ident->sym_ident, label2, label2->type);
8157 /* Now include the body */
8158 flatten(state, first, head);
8159 flatten(state, first, jmp1);
8160 flatten(state, first, label1);
8161 statement(state, first);
8162 flatten(state, first, label2);
8163 flatten(state, first, tail);
8164 flatten(state, first, label3);
8165 flatten(state, first, test);
8166 flatten(state, first, jmp2);
8167 flatten(state, first, end);
8168 /* Cleanup the break/continue scope */
8172 static void while_statement(struct compile_state *state, struct triple *first)
8174 struct triple *label1, *test, *label2, *jmp1, *jmp2, *end;
8175 struct hash_entry *ident;
8176 eat(state, TOK_WHILE);
8177 eat(state, TOK_LPAREN);
8180 test = ltrue_expr(state, read_expr(state, test));
8181 eat(state, TOK_RPAREN);
8182 /* Generate the needed pieces */
8183 label1 = label(state);
8184 label2 = label(state);
8185 jmp1 = branch(state, label2, 0);
8186 jmp2 = branch(state, label1, test);
8188 /* Remember where break and continue go */
8190 ident = state->i_break;
8191 symbol(state, ident, &ident->sym_ident, end, end->type);
8192 ident = state->i_continue;
8193 symbol(state, ident, &ident->sym_ident, label2, label2->type);
8194 /* Thread them together */
8195 flatten(state, first, jmp1);
8196 flatten(state, first, label1);
8197 statement(state, first);
8198 flatten(state, first, label2);
8199 flatten(state, first, test);
8200 flatten(state, first, jmp2);
8201 flatten(state, first, end);
8202 /* Cleanup the break/continue scope */
8206 static void do_statement(struct compile_state *state, struct triple *first)
8208 struct triple *label1, *label2, *test, *end;
8209 struct hash_entry *ident;
8211 /* Generate the needed pieces */
8212 label1 = label(state);
8213 label2 = label(state);
8215 /* Remember where break and continue go */
8217 ident = state->i_break;
8218 symbol(state, ident, &ident->sym_ident, end, end->type);
8219 ident = state->i_continue;
8220 symbol(state, ident, &ident->sym_ident, label2, label2->type);
8221 /* Now include the body */
8222 flatten(state, first, label1);
8223 statement(state, first);
8224 /* Cleanup the break/continue scope */
8226 /* Eat the rest of the loop */
8227 eat(state, TOK_WHILE);
8228 eat(state, TOK_LPAREN);
8229 test = read_expr(state, expr(state));
8231 eat(state, TOK_RPAREN);
8232 eat(state, TOK_SEMI);
8233 /* Thread the pieces together */
8234 test = ltrue_expr(state, test);
8235 flatten(state, first, label2);
8236 flatten(state, first, test);
8237 flatten(state, first, branch(state, label1, test));
8238 flatten(state, first, end);
8242 static void return_statement(struct compile_state *state, struct triple *first)
8244 struct triple *jmp, *mv, *dest, *var, *val;
8246 eat(state, TOK_RETURN);
8248 #warning "FIXME implement a more general excess branch elimination"
8250 /* If we have a return value do some more work */
8251 if (peek(state) != TOK_SEMI) {
8252 val = read_expr(state, expr(state));
8254 eat(state, TOK_SEMI);
8256 /* See if this last statement in a function */
8257 last = ((peek(state) == TOK_RBRACE) &&
8258 (state->scope_depth == GLOBAL_SCOPE_DEPTH +2));
8260 /* Find the return variable */
8261 var = MISC(state->main_function, 0);
8262 /* Find the return destination */
8263 dest = RHS(state->main_function, 0)->prev;
8265 /* If needed generate a jump instruction */
8267 jmp = branch(state, dest, 0);
8269 /* If needed generate an assignment instruction */
8271 mv = write_expr(state, var, val);
8273 /* Now put the code together */
8275 flatten(state, first, mv);
8276 flatten(state, first, jmp);
8279 flatten(state, first, jmp);
8283 static void break_statement(struct compile_state *state, struct triple *first)
8285 struct triple *dest;
8286 eat(state, TOK_BREAK);
8287 eat(state, TOK_SEMI);
8288 if (!state->i_break->sym_ident) {
8289 error(state, 0, "break statement not within loop or switch");
8291 dest = state->i_break->sym_ident->def;
8292 flatten(state, first, branch(state, dest, 0));
8295 static void continue_statement(struct compile_state *state, struct triple *first)
8297 struct triple *dest;
8298 eat(state, TOK_CONTINUE);
8299 eat(state, TOK_SEMI);
8300 if (!state->i_continue->sym_ident) {
8301 error(state, 0, "continue statement outside of a loop");
8303 dest = state->i_continue->sym_ident->def;
8304 flatten(state, first, branch(state, dest, 0));
8307 static void goto_statement(struct compile_state *state, struct triple *first)
8309 struct hash_entry *ident;
8310 eat(state, TOK_GOTO);
8311 eat(state, TOK_IDENT);
8312 ident = state->token[0].ident;
8313 if (!ident->sym_label) {
8314 /* If this is a forward branch allocate the label now,
8315 * it will be flattend in the appropriate location later.
8319 label_symbol(state, ident, ins);
8321 eat(state, TOK_SEMI);
8323 flatten(state, first, branch(state, ident->sym_label->def, 0));
8326 static void labeled_statement(struct compile_state *state, struct triple *first)
8329 struct hash_entry *ident;
8330 eat(state, TOK_IDENT);
8332 ident = state->token[0].ident;
8333 if (ident->sym_label && ident->sym_label->def) {
8334 ins = ident->sym_label->def;
8335 put_occurance(ins->occurance);
8336 ins->occurance = new_occurance(state);
8340 label_symbol(state, ident, ins);
8342 if (ins->id & TRIPLE_FLAG_FLATTENED) {
8343 error(state, 0, "label %s already defined", ident->name);
8345 flatten(state, first, ins);
8347 eat(state, TOK_COLON);
8348 statement(state, first);
8351 static void switch_statement(struct compile_state *state, struct triple *first)
8353 struct triple *value, *top, *end, *dbranch;
8354 struct hash_entry *ident;
8356 /* See if we have a valid switch statement */
8357 eat(state, TOK_SWITCH);
8358 eat(state, TOK_LPAREN);
8359 value = expr(state);
8360 integral(state, value);
8361 value = read_expr(state, value);
8362 eat(state, TOK_RPAREN);
8363 /* Generate the needed pieces */
8366 dbranch = branch(state, end, 0);
8367 /* Remember where case branches and break goes */
8369 ident = state->i_switch;
8370 symbol(state, ident, &ident->sym_ident, value, value->type);
8371 ident = state->i_case;
8372 symbol(state, ident, &ident->sym_ident, top, top->type);
8373 ident = state->i_break;
8374 symbol(state, ident, &ident->sym_ident, end, end->type);
8375 ident = state->i_default;
8376 symbol(state, ident, &ident->sym_ident, dbranch, dbranch->type);
8377 /* Thread them together */
8378 flatten(state, first, value);
8379 flatten(state, first, top);
8380 flatten(state, first, dbranch);
8381 statement(state, first);
8382 flatten(state, first, end);
8383 /* Cleanup the switch scope */
8387 static void case_statement(struct compile_state *state, struct triple *first)
8389 struct triple *cvalue, *dest, *test, *jmp;
8390 struct triple *ptr, *value, *top, *dbranch;
8392 /* See if w have a valid case statement */
8393 eat(state, TOK_CASE);
8394 cvalue = constant_expr(state);
8395 integral(state, cvalue);
8396 if (cvalue->op != OP_INTCONST) {
8397 error(state, 0, "integer constant expected");
8399 eat(state, TOK_COLON);
8400 if (!state->i_case->sym_ident) {
8401 error(state, 0, "case statement not within a switch");
8404 /* Lookup the interesting pieces */
8405 top = state->i_case->sym_ident->def;
8406 value = state->i_switch->sym_ident->def;
8407 dbranch = state->i_default->sym_ident->def;
8409 /* See if this case label has already been used */
8410 for(ptr = top; ptr != dbranch; ptr = ptr->next) {
8411 if (ptr->op != OP_EQ) {
8414 if (RHS(ptr, 1)->u.cval == cvalue->u.cval) {
8415 error(state, 0, "duplicate case %d statement",
8419 /* Generate the needed pieces */
8420 dest = label(state);
8421 test = triple(state, OP_EQ, &int_type, value, cvalue);
8422 jmp = branch(state, dest, test);
8423 /* Thread the pieces together */
8424 flatten(state, dbranch, test);
8425 flatten(state, dbranch, jmp);
8426 flatten(state, dbranch, label(state));
8427 flatten(state, first, dest);
8428 statement(state, first);
8431 static void default_statement(struct compile_state *state, struct triple *first)
8433 struct triple *dest;
8434 struct triple *dbranch, *end;
8436 /* See if we have a valid default statement */
8437 eat(state, TOK_DEFAULT);
8438 eat(state, TOK_COLON);
8440 if (!state->i_case->sym_ident) {
8441 error(state, 0, "default statement not within a switch");
8444 /* Lookup the interesting pieces */
8445 dbranch = state->i_default->sym_ident->def;
8446 end = state->i_break->sym_ident->def;
8448 /* See if a default statement has already happened */
8449 if (TARG(dbranch, 0) != end) {
8450 error(state, 0, "duplicate default statement");
8453 /* Generate the needed pieces */
8454 dest = label(state);
8456 /* Thread the pieces together */
8457 TARG(dbranch, 0) = dest;
8458 flatten(state, first, dest);
8459 statement(state, first);
8462 static void asm_statement(struct compile_state *state, struct triple *first)
8464 struct asm_info *info;
8466 struct triple *constraint;
8467 struct triple *expr;
8468 } out_param[MAX_LHS], in_param[MAX_RHS], clob_param[MAX_LHS];
8469 struct triple *def, *asm_str;
8470 int out, in, clobbers, more, colons, i;
8472 eat(state, TOK_ASM);
8473 /* For now ignore the qualifiers */
8474 switch(peek(state)) {
8476 eat(state, TOK_CONST);
8479 eat(state, TOK_VOLATILE);
8482 eat(state, TOK_LPAREN);
8483 asm_str = string_constant(state);
8486 out = in = clobbers = 0;
8488 if ((colons == 0) && (peek(state) == TOK_COLON)) {
8489 eat(state, TOK_COLON);
8491 more = (peek(state) == TOK_LIT_STRING);
8494 struct triple *constraint;
8497 if (out > MAX_LHS) {
8498 error(state, 0, "Maximum output count exceeded.");
8500 constraint = string_constant(state);
8501 str = constraint->u.blob;
8502 if (str[0] != '=') {
8503 error(state, 0, "Output constraint does not start with =");
8505 constraint->u.blob = str + 1;
8506 eat(state, TOK_LPAREN);
8507 var = conditional_expr(state);
8508 eat(state, TOK_RPAREN);
8511 out_param[out].constraint = constraint;
8512 out_param[out].expr = var;
8513 if (peek(state) == TOK_COMMA) {
8514 eat(state, TOK_COMMA);
8521 if ((colons == 1) && (peek(state) == TOK_COLON)) {
8522 eat(state, TOK_COLON);
8524 more = (peek(state) == TOK_LIT_STRING);
8527 struct triple *constraint;
8531 error(state, 0, "Maximum input count exceeded.");
8533 constraint = string_constant(state);
8534 str = constraint->u.blob;
8535 if (digitp(str[0] && str[1] == '\0')) {
8537 val = digval(str[0]);
8538 if ((val < 0) || (val >= out)) {
8539 error(state, 0, "Invalid input constraint %d", val);
8542 eat(state, TOK_LPAREN);
8543 val = conditional_expr(state);
8544 eat(state, TOK_RPAREN);
8546 in_param[in].constraint = constraint;
8547 in_param[in].expr = val;
8548 if (peek(state) == TOK_COMMA) {
8549 eat(state, TOK_COMMA);
8557 if ((colons == 2) && (peek(state) == TOK_COLON)) {
8558 eat(state, TOK_COLON);
8560 more = (peek(state) == TOK_LIT_STRING);
8562 struct triple *clobber;
8564 if ((clobbers + out) > MAX_LHS) {
8565 error(state, 0, "Maximum clobber limit exceeded.");
8567 clobber = string_constant(state);
8569 clob_param[clobbers].constraint = clobber;
8570 if (peek(state) == TOK_COMMA) {
8571 eat(state, TOK_COMMA);
8577 eat(state, TOK_RPAREN);
8578 eat(state, TOK_SEMI);
8581 info = xcmalloc(sizeof(*info), "asm_info");
8582 info->str = asm_str->u.blob;
8583 free_triple(state, asm_str);
8585 def = new_triple(state, OP_ASM, &void_type, clobbers + out, in);
8586 def->u.ainfo = info;
8588 /* Find the register constraints */
8589 for(i = 0; i < out; i++) {
8590 struct triple *constraint;
8591 constraint = out_param[i].constraint;
8592 info->tmpl.lhs[i] = arch_reg_constraint(state,
8593 out_param[i].expr->type, constraint->u.blob);
8594 free_triple(state, constraint);
8596 for(; i - out < clobbers; i++) {
8597 struct triple *constraint;
8598 constraint = clob_param[i - out].constraint;
8599 info->tmpl.lhs[i] = arch_reg_clobber(state, constraint->u.blob);
8600 free_triple(state, constraint);
8602 for(i = 0; i < in; i++) {
8603 struct triple *constraint;
8605 constraint = in_param[i].constraint;
8606 str = constraint->u.blob;
8607 if (digitp(str[0]) && str[1] == '\0') {
8608 struct reg_info cinfo;
8610 val = digval(str[0]);
8611 cinfo.reg = info->tmpl.lhs[val].reg;
8612 cinfo.regcm = arch_type_to_regcm(state, in_param[i].expr->type);
8613 cinfo.regcm &= info->tmpl.lhs[val].regcm;
8614 if (cinfo.reg == REG_UNSET) {
8615 cinfo.reg = REG_VIRT0 + val;
8617 if (cinfo.regcm == 0) {
8618 error(state, 0, "No registers for %d", val);
8620 info->tmpl.lhs[val] = cinfo;
8621 info->tmpl.rhs[i] = cinfo;
8624 info->tmpl.rhs[i] = arch_reg_constraint(state,
8625 in_param[i].expr->type, str);
8627 free_triple(state, constraint);
8630 /* Now build the helper expressions */
8631 for(i = 0; i < in; i++) {
8632 RHS(def, i) = read_expr(state,in_param[i].expr);
8634 flatten(state, first, def);
8635 for(i = 0; i < (out + clobbers); i++) {
8637 struct triple *piece;
8638 type = (i < out)? out_param[i].expr->type : &void_type;
8639 piece = triple(state, OP_PIECE, type, def, 0);
8641 LHS(def, i) = piece;
8642 flatten(state, first, piece);
8644 /* And write the helpers to their destinations */
8645 for(i = 0; i < out; i++) {
8646 struct triple *piece;
8647 piece = LHS(def, i);
8648 flatten(state, first,
8649 write_expr(state, out_param[i].expr, piece));
8654 static int isdecl(int tok)
8677 case TOK_TYPE_NAME: /* typedef name */
8684 static void compound_statement(struct compile_state *state, struct triple *first)
8686 eat(state, TOK_LBRACE);
8689 /* statement-list opt */
8690 while (peek(state) != TOK_RBRACE) {
8691 statement(state, first);
8694 eat(state, TOK_RBRACE);
8697 static void statement(struct compile_state *state, struct triple *first)
8701 if (tok == TOK_LBRACE) {
8702 compound_statement(state, first);
8704 else if (tok == TOK_IF) {
8705 if_statement(state, first);
8707 else if (tok == TOK_FOR) {
8708 for_statement(state, first);
8710 else if (tok == TOK_WHILE) {
8711 while_statement(state, first);
8713 else if (tok == TOK_DO) {
8714 do_statement(state, first);
8716 else if (tok == TOK_RETURN) {
8717 return_statement(state, first);
8719 else if (tok == TOK_BREAK) {
8720 break_statement(state, first);
8722 else if (tok == TOK_CONTINUE) {
8723 continue_statement(state, first);
8725 else if (tok == TOK_GOTO) {
8726 goto_statement(state, first);
8728 else if (tok == TOK_SWITCH) {
8729 switch_statement(state, first);
8731 else if (tok == TOK_ASM) {
8732 asm_statement(state, first);
8734 else if ((tok == TOK_IDENT) && (peek2(state) == TOK_COLON)) {
8735 labeled_statement(state, first);
8737 else if (tok == TOK_CASE) {
8738 case_statement(state, first);
8740 else if (tok == TOK_DEFAULT) {
8741 default_statement(state, first);
8743 else if (isdecl(tok)) {
8744 /* This handles C99 intermixing of statements and decls */
8748 expr_statement(state, first);
8752 static struct type *param_decl(struct compile_state *state)
8755 struct hash_entry *ident;
8756 /* Cheat so the declarator will know we are not global */
8759 type = decl_specifiers(state);
8760 type = declarator(state, type, &ident, 0);
8761 type->field_ident = ident;
8766 static struct type *param_type_list(struct compile_state *state, struct type *type)
8768 struct type *ftype, **next;
8769 ftype = new_type(TYPE_FUNCTION, type, param_decl(state));
8770 next = &ftype->right;
8771 while(peek(state) == TOK_COMMA) {
8772 eat(state, TOK_COMMA);
8773 if (peek(state) == TOK_DOTS) {
8774 eat(state, TOK_DOTS);
8775 error(state, 0, "variadic functions not supported");
8778 *next = new_type(TYPE_PRODUCT, *next, param_decl(state));
8779 next = &((*next)->right);
8786 static struct type *type_name(struct compile_state *state)
8789 type = specifier_qualifier_list(state);
8790 /* abstract-declarator (may consume no tokens) */
8791 type = declarator(state, type, 0, 0);
8795 static struct type *direct_declarator(
8796 struct compile_state *state, struct type *type,
8797 struct hash_entry **ident, int need_ident)
8802 arrays_complete(state, type);
8803 switch(peek(state)) {
8805 eat(state, TOK_IDENT);
8807 error(state, 0, "Unexpected identifier found");
8809 /* The name of what we are declaring */
8810 *ident = state->token[0].ident;
8813 eat(state, TOK_LPAREN);
8814 outer = declarator(state, type, ident, need_ident);
8815 eat(state, TOK_RPAREN);
8819 error(state, 0, "Identifier expected");
8825 arrays_complete(state, type);
8826 switch(peek(state)) {
8828 eat(state, TOK_LPAREN);
8829 type = param_type_list(state, type);
8830 eat(state, TOK_RPAREN);
8834 unsigned int qualifiers;
8835 struct triple *value;
8837 eat(state, TOK_LBRACKET);
8838 if (peek(state) != TOK_RBRACKET) {
8839 value = constant_expr(state);
8840 integral(state, value);
8842 eat(state, TOK_RBRACKET);
8844 qualifiers = type->type & (QUAL_MASK | STOR_MASK);
8845 type = new_type(TYPE_ARRAY | qualifiers, type, 0);
8847 type->elements = value->u.cval;
8848 free_triple(state, value);
8850 type->elements = ELEMENT_COUNT_UNSPECIFIED;
8862 arrays_complete(state, type);
8864 for(inner = outer; inner->left; inner = inner->left)
8872 static struct type *declarator(
8873 struct compile_state *state, struct type *type,
8874 struct hash_entry **ident, int need_ident)
8876 while(peek(state) == TOK_STAR) {
8877 eat(state, TOK_STAR);
8878 type = new_type(TYPE_POINTER | (type->type & STOR_MASK), type, 0);
8880 type = direct_declarator(state, type, ident, need_ident);
8885 static struct type *typedef_name(
8886 struct compile_state *state, unsigned int specifiers)
8888 struct hash_entry *ident;
8890 eat(state, TOK_TYPE_NAME);
8891 ident = state->token[0].ident;
8892 type = ident->sym_ident->type;
8893 specifiers |= type->type & QUAL_MASK;
8894 if ((specifiers & (STOR_MASK | QUAL_MASK)) !=
8895 (type->type & (STOR_MASK | QUAL_MASK))) {
8896 type = clone_type(specifiers, type);
8901 static struct type *enum_specifier(
8902 struct compile_state *state, unsigned int spec)
8904 struct hash_entry *ident;
8907 struct type *enum_type;
8910 eat(state, TOK_ENUM);
8912 if ((tok == TOK_IDENT) || (tok == TOK_ENUM_CONST) || (tok == TOK_TYPE_NAME)) {
8914 ident = state->token[0].ident;
8918 if (!ident || (peek(state) == TOK_LBRACE)) {
8920 eat(state, TOK_LBRACE);
8921 enum_type = new_type(TYPE_ENUM | spec, 0, 0);
8922 enum_type->type_ident = ident;
8923 next = &enum_type->right;
8925 struct hash_entry *eident;
8926 struct triple *value;
8928 eat(state, TOK_IDENT);
8929 eident = state->token[0].ident;
8930 if (eident->sym_ident) {
8931 error(state, 0, "%s already declared",
8934 eident->tok = TOK_ENUM_CONST;
8935 if (peek(state) == TOK_EQ) {
8938 val = constant_expr(state);
8939 integral(state, val);
8942 value = int_const(state, &int_type, base);
8943 symbol(state, eident, &eident->sym_ident, value, &int_type);
8944 entry = new_type(TYPE_LIST, 0, 0);
8945 entry->field_ident = eident;
8947 next = &entry->right;
8949 if (peek(state) == TOK_COMMA) {
8950 eat(state, TOK_COMMA);
8952 } while(peek(state) != TOK_RBRACE);
8953 eat(state, TOK_RBRACE);
8955 symbol(state, ident, &ident->sym_tag, 0, enum_type);
8958 if (ident && ident->sym_tag &&
8959 ident->sym_tag->type &&
8960 ((ident->sym_tag->type->type & TYPE_MASK) == TYPE_ENUM)) {
8961 enum_type = clone_type(spec, ident->sym_tag->type);
8963 else if (ident && !enum_type) {
8964 error(state, 0, "enum %s undeclared", ident->name);
8969 static struct type *struct_declarator(
8970 struct compile_state *state, struct type *type, struct hash_entry **ident)
8974 if (tok != TOK_COLON) {
8975 type = declarator(state, type, ident, 1);
8977 if ((tok == TOK_COLON) || (peek(state) == TOK_COLON)) {
8978 struct triple *value;
8979 eat(state, TOK_COLON);
8980 value = constant_expr(state);
8981 #warning "FIXME implement bitfields to reduce register usage"
8982 error(state, 0, "bitfields not yet implemented");
8987 static struct type *struct_or_union_specifier(
8988 struct compile_state *state, unsigned int spec)
8990 struct type *struct_type;
8991 struct hash_entry *ident;
8992 unsigned int type_join;
8996 switch(peek(state)) {
8998 eat(state, TOK_STRUCT);
8999 type_join = TYPE_PRODUCT;
9002 eat(state, TOK_UNION);
9003 type_join = TYPE_OVERLAP;
9004 error(state, 0, "unions not yet supported\n");
9007 eat(state, TOK_STRUCT);
9008 type_join = TYPE_PRODUCT;
9012 if ((tok == TOK_IDENT) || (tok == TOK_ENUM_CONST) || (tok == TOK_TYPE_NAME)) {
9014 ident = state->token[0].ident;
9016 if (!ident || (peek(state) == TOK_LBRACE)) {
9020 eat(state, TOK_LBRACE);
9021 next = &struct_type;
9023 struct type *base_type;
9025 base_type = specifier_qualifier_list(state);
9028 struct hash_entry *fident;
9030 type = struct_declarator(state, base_type, &fident);
9032 if (peek(state) == TOK_COMMA) {
9034 eat(state, TOK_COMMA);
9036 type = clone_type(0, type);
9037 type->field_ident = fident;
9039 *next = new_type(type_join, *next, type);
9040 next = &((*next)->right);
9045 eat(state, TOK_SEMI);
9046 } while(peek(state) != TOK_RBRACE);
9047 eat(state, TOK_RBRACE);
9048 struct_type = new_type(TYPE_STRUCT | spec, struct_type, 0);
9049 struct_type->type_ident = ident;
9050 struct_type->elements = elements;
9052 symbol(state, ident, &ident->sym_tag, 0, struct_type);
9055 if (ident && ident->sym_tag &&
9056 ident->sym_tag->type &&
9057 ((ident->sym_tag->type->type & TYPE_MASK) == TYPE_STRUCT)) {
9058 struct_type = clone_type(spec, ident->sym_tag->type);
9060 else if (ident && !struct_type) {
9061 error(state, 0, "struct %s undeclared", ident->name);
9066 static unsigned int storage_class_specifier_opt(struct compile_state *state)
9068 unsigned int specifiers;
9069 switch(peek(state)) {
9071 eat(state, TOK_AUTO);
9072 specifiers = STOR_AUTO;
9075 eat(state, TOK_REGISTER);
9076 specifiers = STOR_REGISTER;
9079 eat(state, TOK_STATIC);
9080 specifiers = STOR_STATIC;
9083 eat(state, TOK_EXTERN);
9084 specifiers = STOR_EXTERN;
9087 eat(state, TOK_TYPEDEF);
9088 specifiers = STOR_TYPEDEF;
9091 if (state->scope_depth <= GLOBAL_SCOPE_DEPTH) {
9092 specifiers = STOR_STATIC;
9095 specifiers = STOR_AUTO;
9101 static unsigned int function_specifier_opt(struct compile_state *state)
9103 /* Ignore the inline keyword */
9104 unsigned int specifiers;
9106 switch(peek(state)) {
9108 eat(state, TOK_INLINE);
9109 specifiers = STOR_INLINE;
9114 static unsigned int type_qualifiers(struct compile_state *state)
9116 unsigned int specifiers;
9119 specifiers = QUAL_NONE;
9121 switch(peek(state)) {
9123 eat(state, TOK_CONST);
9124 specifiers = QUAL_CONST;
9127 eat(state, TOK_VOLATILE);
9128 specifiers = QUAL_VOLATILE;
9131 eat(state, TOK_RESTRICT);
9132 specifiers = QUAL_RESTRICT;
9142 static struct type *type_specifier(
9143 struct compile_state *state, unsigned int spec)
9147 switch(peek(state)) {
9149 eat(state, TOK_VOID);
9150 type = new_type(TYPE_VOID | spec, 0, 0);
9153 eat(state, TOK_CHAR);
9154 type = new_type(TYPE_CHAR | spec, 0, 0);
9157 eat(state, TOK_SHORT);
9158 if (peek(state) == TOK_INT) {
9159 eat(state, TOK_INT);
9161 type = new_type(TYPE_SHORT | spec, 0, 0);
9164 eat(state, TOK_INT);
9165 type = new_type(TYPE_INT | spec, 0, 0);
9168 eat(state, TOK_LONG);
9169 switch(peek(state)) {
9171 eat(state, TOK_LONG);
9172 error(state, 0, "long long not supported");
9175 eat(state, TOK_DOUBLE);
9176 error(state, 0, "long double not supported");
9179 eat(state, TOK_INT);
9180 type = new_type(TYPE_LONG | spec, 0, 0);
9183 type = new_type(TYPE_LONG | spec, 0, 0);
9188 eat(state, TOK_FLOAT);
9189 error(state, 0, "type float not supported");
9192 eat(state, TOK_DOUBLE);
9193 error(state, 0, "type double not supported");
9196 eat(state, TOK_SIGNED);
9197 switch(peek(state)) {
9199 eat(state, TOK_LONG);
9200 switch(peek(state)) {
9202 eat(state, TOK_LONG);
9203 error(state, 0, "type long long not supported");
9206 eat(state, TOK_INT);
9207 type = new_type(TYPE_LONG | spec, 0, 0);
9210 type = new_type(TYPE_LONG | spec, 0, 0);
9215 eat(state, TOK_INT);
9216 type = new_type(TYPE_INT | spec, 0, 0);
9219 eat(state, TOK_SHORT);
9220 type = new_type(TYPE_SHORT | spec, 0, 0);
9223 eat(state, TOK_CHAR);
9224 type = new_type(TYPE_CHAR | spec, 0, 0);
9227 type = new_type(TYPE_INT | spec, 0, 0);
9232 eat(state, TOK_UNSIGNED);
9233 switch(peek(state)) {
9235 eat(state, TOK_LONG);
9236 switch(peek(state)) {
9238 eat(state, TOK_LONG);
9239 error(state, 0, "unsigned long long not supported");
9242 eat(state, TOK_INT);
9243 type = new_type(TYPE_ULONG | spec, 0, 0);
9246 type = new_type(TYPE_ULONG | spec, 0, 0);
9251 eat(state, TOK_INT);
9252 type = new_type(TYPE_UINT | spec, 0, 0);
9255 eat(state, TOK_SHORT);
9256 type = new_type(TYPE_USHORT | spec, 0, 0);
9259 eat(state, TOK_CHAR);
9260 type = new_type(TYPE_UCHAR | spec, 0, 0);
9263 type = new_type(TYPE_UINT | spec, 0, 0);
9267 /* struct or union specifier */
9270 type = struct_or_union_specifier(state, spec);
9272 /* enum-spefifier */
9274 type = enum_specifier(state, spec);
9278 type = typedef_name(state, spec);
9281 error(state, 0, "bad type specifier %s",
9282 tokens[peek(state)]);
9288 static int istype(int tok)
9314 static struct type *specifier_qualifier_list(struct compile_state *state)
9317 unsigned int specifiers = 0;
9319 /* type qualifiers */
9320 specifiers |= type_qualifiers(state);
9322 /* type specifier */
9323 type = type_specifier(state, specifiers);
9328 static int isdecl_specifier(int tok)
9331 /* storage class specifier */
9337 /* type qualifier */
9341 /* type specifiers */
9351 /* struct or union specifier */
9354 /* enum-spefifier */
9358 /* function specifiers */
9366 static struct type *decl_specifiers(struct compile_state *state)
9369 unsigned int specifiers;
9370 /* I am overly restrictive in the arragement of specifiers supported.
9371 * C is overly flexible in this department it makes interpreting
9372 * the parse tree difficult.
9376 /* storage class specifier */
9377 specifiers |= storage_class_specifier_opt(state);
9379 /* function-specifier */
9380 specifiers |= function_specifier_opt(state);
9382 /* type qualifier */
9383 specifiers |= type_qualifiers(state);
9385 /* type specifier */
9386 type = type_specifier(state, specifiers);
9395 static struct field_info designator(struct compile_state *state, struct type *type)
9398 struct field_info info;
9402 switch(peek(state)) {
9405 struct triple *value;
9406 if ((type->type & TYPE_MASK) != TYPE_ARRAY) {
9407 error(state, 0, "Array designator not in array initializer");
9409 eat(state, TOK_LBRACKET);
9410 value = constant_expr(state);
9411 eat(state, TOK_RBRACKET);
9413 info.type = type->left;
9414 info.offset = value->u.cval * size_of(state, info.type);
9419 struct hash_entry *field;
9420 if ((type->type & TYPE_MASK) != TYPE_STRUCT) {
9421 error(state, 0, "Struct designator not in struct initializer");
9423 eat(state, TOK_DOT);
9424 eat(state, TOK_IDENT);
9425 field = state->token[0].ident;
9426 info.offset = field_offset(state, type, field);
9427 info.type = field_type(state, type, field);
9431 error(state, 0, "Invalid designator");
9434 } while((tok == TOK_LBRACKET) || (tok == TOK_DOT));
9439 static struct triple *initializer(
9440 struct compile_state *state, struct type *type)
9442 struct triple *result;
9443 #warning "FIXME more consistent initializer handling (where should eval_const_expr go?"
9444 if (peek(state) != TOK_LBRACE) {
9445 result = assignment_expr(state);
9446 if (((type->type & TYPE_MASK) == TYPE_ARRAY) &&
9447 (type->elements == ELEMENT_COUNT_UNSPECIFIED) &&
9448 ((result->type->type & TYPE_MASK) == TYPE_ARRAY) &&
9449 (result->type->elements != ELEMENT_COUNT_UNSPECIFIED) &&
9450 (equiv_types(type->left, result->type->left))) {
9451 type->elements = result->type->elements;
9453 if (is_stable(state, result) &&
9454 ((result->type->type & TYPE_MASK) == TYPE_ARRAY) &&
9455 (type->type & TYPE_MASK) != TYPE_ARRAY)
9457 result = array_to_pointer(state, result);
9459 if (!is_init_compatible(state, type, result->type)) {
9460 error(state, 0, "Incompatible types in initializer");
9462 if (!equiv_types(type, result->type)) {
9463 result = mk_cast_expr(state, type, result);
9469 struct field_info info;
9471 if (((type->type & TYPE_MASK) != TYPE_ARRAY) &&
9472 ((type->type & TYPE_MASK) != TYPE_STRUCT)) {
9473 internal_error(state, 0, "unknown initializer type");
9476 info.type = type->left;
9477 if ((type->type & TYPE_MASK) == TYPE_STRUCT) {
9478 info.type = next_field(state, type, 0);
9480 if (type->elements == ELEMENT_COUNT_UNSPECIFIED) {
9483 max_offset = size_of(state, type);
9485 buf = xcmalloc(max_offset, "initializer");
9486 eat(state, TOK_LBRACE);
9488 struct triple *value;
9489 struct type *value_type;
9495 if ((tok == TOK_LBRACKET) || (tok == TOK_DOT)) {
9496 info = designator(state, type);
9498 if ((type->elements != ELEMENT_COUNT_UNSPECIFIED) &&
9499 (info.offset >= max_offset)) {
9500 error(state, 0, "element beyond bounds");
9502 value_type = info.type;
9503 value = eval_const_expr(state, initializer(state, value_type));
9504 value_size = size_of(state, value_type);
9505 if (((type->type & TYPE_MASK) == TYPE_ARRAY) &&
9506 (type->elements == ELEMENT_COUNT_UNSPECIFIED) &&
9507 (max_offset <= info.offset)) {
9511 old_size = max_offset;
9512 max_offset = info.offset + value_size;
9513 buf = xmalloc(max_offset, "initializer");
9514 memcpy(buf, old_buf, old_size);
9517 dest = ((char *)buf) + info.offset;
9518 if (value->op == OP_BLOBCONST) {
9519 memcpy(dest, value->u.blob, value_size);
9521 else if ((value->op == OP_INTCONST) && (value_size == 1)) {
9522 *((uint8_t *)dest) = value->u.cval & 0xff;
9524 else if ((value->op == OP_INTCONST) && (value_size == 2)) {
9525 *((uint16_t *)dest) = value->u.cval & 0xffff;
9527 else if ((value->op == OP_INTCONST) && (value_size == 4)) {
9528 *((uint32_t *)dest) = value->u.cval & 0xffffffff;
9531 internal_error(state, 0, "unhandled constant initializer");
9533 free_triple(state, value);
9534 if (peek(state) == TOK_COMMA) {
9535 eat(state, TOK_COMMA);
9538 info.offset += value_size;
9539 if ((type->type & TYPE_MASK) == TYPE_STRUCT) {
9540 info.type = next_field(state, type, info.type);
9541 info.offset = field_offset(state, type,
9542 info.type->field_ident);
9544 } while(comma && (peek(state) != TOK_RBRACE));
9545 if ((type->elements == ELEMENT_COUNT_UNSPECIFIED) &&
9546 ((type->type & TYPE_MASK) == TYPE_ARRAY)) {
9547 type->elements = max_offset / size_of(state, type->left);
9549 eat(state, TOK_RBRACE);
9550 result = triple(state, OP_BLOBCONST, type, 0, 0);
9551 result->u.blob = buf;
9556 static void resolve_branches(struct compile_state *state)
9558 /* Make a second pass and finish anything outstanding
9559 * with respect to branches. The only outstanding item
9560 * is to see if there are goto to labels that have not
9561 * been defined and to error about them.
9564 for(i = 0; i < HASH_TABLE_SIZE; i++) {
9565 struct hash_entry *entry;
9566 for(entry = state->hash_table[i]; entry; entry = entry->next) {
9568 if (!entry->sym_label) {
9571 ins = entry->sym_label->def;
9572 if (!(ins->id & TRIPLE_FLAG_FLATTENED)) {
9573 error(state, ins, "label `%s' used but not defined",
9580 static struct triple *function_definition(
9581 struct compile_state *state, struct type *type)
9583 struct triple *def, *tmp, *first, *end;
9584 struct hash_entry *ident;
9587 if ((type->type &TYPE_MASK) != TYPE_FUNCTION) {
9588 error(state, 0, "Invalid function header");
9591 /* Verify the function type */
9592 if (((type->right->type & TYPE_MASK) != TYPE_VOID) &&
9593 ((type->right->type & TYPE_MASK) != TYPE_PRODUCT) &&
9594 (type->right->field_ident == 0)) {
9595 error(state, 0, "Invalid function parameters");
9597 param = type->right;
9599 while((param->type & TYPE_MASK) == TYPE_PRODUCT) {
9601 if (!param->left->field_ident) {
9602 error(state, 0, "No identifier for parameter %d\n", i);
9604 param = param->right;
9607 if (((param->type & TYPE_MASK) != TYPE_VOID) && !param->field_ident) {
9608 error(state, 0, "No identifier for paramter %d\n", i);
9611 /* Get a list of statements for this function. */
9612 def = triple(state, OP_LIST, type, 0, 0);
9614 /* Start a new scope for the passed parameters */
9617 /* Put a label at the very start of a function */
9618 first = label(state);
9619 RHS(def, 0) = first;
9621 /* Put a label at the very end of a function */
9623 flatten(state, first, end);
9625 /* Walk through the parameters and create symbol table entries
9628 param = type->right;
9629 while((param->type & TYPE_MASK) == TYPE_PRODUCT) {
9630 ident = param->left->field_ident;
9631 tmp = variable(state, param->left);
9632 symbol(state, ident, &ident->sym_ident, tmp, tmp->type);
9633 flatten(state, end, tmp);
9634 param = param->right;
9636 if ((param->type & TYPE_MASK) != TYPE_VOID) {
9637 /* And don't forget the last parameter */
9638 ident = param->field_ident;
9639 tmp = variable(state, param);
9640 symbol(state, ident, &ident->sym_ident, tmp, tmp->type);
9641 flatten(state, end, tmp);
9643 /* Add a variable for the return value */
9645 if ((type->left->type & TYPE_MASK) != TYPE_VOID) {
9646 /* Remove all type qualifiers from the return type */
9647 tmp = variable(state, clone_type(0, type->left));
9648 flatten(state, end, tmp);
9649 /* Remember where the return value is */
9653 /* Remember which function I am compiling.
9654 * Also assume the last defined function is the main function.
9656 state->main_function = def;
9658 /* Now get the actual function definition */
9659 compound_statement(state, end);
9661 /* Finish anything unfinished with branches */
9662 resolve_branches(state);
9664 /* Remove the parameter scope */
9668 fprintf(stdout, "\n");
9669 loc(stdout, state, 0);
9670 fprintf(stdout, "\n__________ function_definition _________\n");
9671 print_triple(state, def);
9672 fprintf(stdout, "__________ function_definition _________ done\n\n");
9678 static struct triple *do_decl(struct compile_state *state,
9679 struct type *type, struct hash_entry *ident)
9683 /* Clean up the storage types used */
9684 switch (type->type & STOR_MASK) {
9687 /* These are the good types I am aiming for */
9690 type->type &= ~STOR_MASK;
9691 type->type |= STOR_AUTO;
9694 type->type &= ~STOR_MASK;
9695 type->type |= STOR_STATIC;
9699 error(state, 0, "typedef without name");
9701 symbol(state, ident, &ident->sym_ident, 0, type);
9702 ident->tok = TOK_TYPE_NAME;
9706 internal_error(state, 0, "Undefined storage class");
9708 if ((type->type & TYPE_MASK) == TYPE_FUNCTION) {
9709 error(state, 0, "Function prototypes not supported");
9712 ((type->type & STOR_MASK) == STOR_STATIC) &&
9713 ((type->type & QUAL_CONST) == 0)) {
9714 error(state, 0, "non const static variables not supported");
9717 def = variable(state, type);
9718 symbol(state, ident, &ident->sym_ident, def, type);
9723 static void decl(struct compile_state *state, struct triple *first)
9725 struct type *base_type, *type;
9726 struct hash_entry *ident;
9729 global = (state->scope_depth <= GLOBAL_SCOPE_DEPTH);
9730 base_type = decl_specifiers(state);
9732 type = declarator(state, base_type, &ident, 0);
9733 if (global && ident && (peek(state) == TOK_LBRACE)) {
9735 state->function = ident->name;
9736 def = function_definition(state, type);
9737 symbol(state, ident, &ident->sym_ident, def, type);
9738 state->function = 0;
9742 flatten(state, first, do_decl(state, type, ident));
9743 /* type or variable definition */
9746 if (peek(state) == TOK_EQ) {
9748 error(state, 0, "cannot assign to a type");
9751 flatten(state, first,
9753 ident->sym_ident->def,
9754 initializer(state, type)));
9756 arrays_complete(state, type);
9757 if (peek(state) == TOK_COMMA) {
9758 eat(state, TOK_COMMA);
9760 type = declarator(state, base_type, &ident, 0);
9761 flatten(state, first, do_decl(state, type, ident));
9765 eat(state, TOK_SEMI);
9769 static void decls(struct compile_state *state)
9771 struct triple *list;
9773 list = label(state);
9776 if (tok == TOK_EOF) {
9779 if (tok == TOK_SPACE) {
9780 eat(state, TOK_SPACE);
9783 if (list->next != list) {
9784 error(state, 0, "global variables not supported");
9790 * Data structurs for optimation.
9793 static int do_use_block(
9794 struct block *used, struct block_set **head, struct block *user,
9797 struct block_set **ptr, *new;
9804 if ((*ptr)->member == user) {
9807 ptr = &(*ptr)->next;
9809 new = xcmalloc(sizeof(*new), "block_set");
9821 static int do_unuse_block(
9822 struct block *used, struct block_set **head, struct block *unuser)
9824 struct block_set *use, **ptr;
9830 if (use->member == unuser) {
9832 memset(use, -1, sizeof(*use));
9843 static void use_block(struct block *used, struct block *user)
9846 /* Append new to the head of the list, print_block
9849 count = do_use_block(used, &used->use, user, 1);
9850 used->users += count;
9852 static void unuse_block(struct block *used, struct block *unuser)
9855 count = do_unuse_block(used, &used->use, unuser);
9856 used->users -= count;
9859 static void idom_block(struct block *idom, struct block *user)
9861 do_use_block(idom, &idom->idominates, user, 0);
9864 static void unidom_block(struct block *idom, struct block *unuser)
9866 do_unuse_block(idom, &idom->idominates, unuser);
9869 static void domf_block(struct block *block, struct block *domf)
9871 do_use_block(block, &block->domfrontier, domf, 0);
9874 static void undomf_block(struct block *block, struct block *undomf)
9876 do_unuse_block(block, &block->domfrontier, undomf);
9879 static void ipdom_block(struct block *ipdom, struct block *user)
9881 do_use_block(ipdom, &ipdom->ipdominates, user, 0);
9884 static void unipdom_block(struct block *ipdom, struct block *unuser)
9886 do_unuse_block(ipdom, &ipdom->ipdominates, unuser);
9889 static void ipdomf_block(struct block *block, struct block *ipdomf)
9891 do_use_block(block, &block->ipdomfrontier, ipdomf, 0);
9894 static void unipdomf_block(struct block *block, struct block *unipdomf)
9896 do_unuse_block(block, &block->ipdomfrontier, unipdomf);
9899 static int walk_triples(
9900 struct compile_state *state,
9901 int (*cb)(struct compile_state *state, struct triple *ptr))
9907 result = cb(state, ptr);
9908 if (ptr->next->prev != ptr) {
9909 internal_error(state, ptr->next, "bad prev");
9912 } while((result == 0) && (ptr != state->first));
9916 #define PRINT_LIST 1
9917 static int do_print_triple(struct compile_state *state, struct triple *ins)
9921 if (op == OP_LIST) {
9926 if ((op == OP_LABEL) && (ins->use)) {
9927 printf("\n%p:\n", ins);
9929 display_triple(stdout, ins);
9931 if ((ins->op == OP_BRANCH) && ins->use) {
9932 internal_error(state, ins, "branch used?");
9934 if (triple_is_branch(state, ins)) {
9940 static void print_triples(struct compile_state *state)
9942 walk_triples(state, do_print_triple);
9946 struct block *block;
9948 static void find_cf_blocks(struct cf_block *cf, struct block *block)
9950 if (!block || (cf[block->vertex].block == block)) {
9953 cf[block->vertex].block = block;
9954 find_cf_blocks(cf, block->left);
9955 find_cf_blocks(cf, block->right);
9958 static void print_control_flow(struct compile_state *state)
9960 struct cf_block *cf;
9962 printf("\ncontrol flow\n");
9963 cf = xcmalloc(sizeof(*cf) * (state->last_vertex + 1), "cf_block");
9964 find_cf_blocks(cf, state->first_block);
9966 for(i = 1; i <= state->last_vertex; i++) {
9967 struct block *block;
9968 block = cf[i].block;
9971 printf("(%p) %d:", block, block->vertex);
9973 printf(" %d", block->left->vertex);
9975 if (block->right && (block->right != block->left)) {
9976 printf(" %d", block->right->vertex);
9985 static struct block *basic_block(struct compile_state *state,
9986 struct triple *first)
9988 struct block *block;
9990 if (first->op != OP_LABEL) {
9991 internal_error(state, 0, "block does not start with a label");
9993 /* See if this basic block has already been setup */
9994 if (first->u.block != 0) {
9995 return first->u.block;
9997 /* Allocate another basic block structure */
9998 state->last_vertex += 1;
9999 block = xcmalloc(sizeof(*block), "block");
10000 block->first = block->last = first;
10001 block->vertex = state->last_vertex;
10004 if ((ptr != first) && (ptr->op == OP_LABEL) && (ptr->use)) {
10008 /* If ptr->u is not used remember where the baic block is */
10009 if (triple_stores_block(state, ptr)) {
10010 ptr->u.block = block;
10012 if (triple_is_branch(state, ptr)) {
10016 } while (ptr != state->first);
10017 if (ptr == state->first) {
10018 /* The block has no outflowing edges */
10020 else if (ptr->op == OP_LABEL) {
10021 block->left = basic_block(state, ptr);
10023 use_block(block->left, block);
10025 else if (triple_is_branch(state, ptr)) {
10027 /* Trace the branch target */
10028 block->right = basic_block(state, TARG(ptr, 0));
10029 use_block(block->right, block);
10030 /* If there is a test trace the branch as well */
10031 if (TRIPLE_RHS(ptr->sizes)) {
10032 block->left = basic_block(state, ptr->next);
10033 use_block(block->left, block);
10037 internal_error(state, 0, "Bad basic block split");
10040 fprintf(stderr, "basic_block: %10p [%2d] ( %10p - %10p ) %10p [%2d] %10p [%2d] \n",
10041 block, block->vertex,
10042 block->first, block->last,
10043 block->left ? block->left->first : 0,
10044 block->left ? block->left->vertex : -1,
10045 block->left ? block->left->first : 0,
10046 block->left ? block->left->vertex : -1);
10052 static void walk_blocks(struct compile_state *state,
10053 void (*cb)(struct compile_state *state, struct block *block, void *arg),
10056 struct triple *ptr, *first;
10057 struct block *last_block;
10059 first = state->first;
10062 struct block *block;
10063 if (triple_stores_block(state, ptr)) {
10064 block = ptr->u.block;
10065 if (block && (block != last_block)) {
10066 cb(state, block, arg);
10068 last_block = block;
10070 if (block && (block->last == ptr)) {
10074 } while(ptr != first);
10077 static void print_block(
10078 struct compile_state *state, struct block *block, void *arg)
10080 struct block_set *user;
10081 struct triple *ptr;
10084 fprintf(fp, "\nblock: %p (%d) %p<-%p %p<-%p\n",
10088 block->left && block->left->use?block->left->use->member : 0,
10090 block->right && block->right->use?block->right->use->member : 0);
10091 if (block->first->op == OP_LABEL) {
10092 fprintf(fp, "%p:\n", block->first);
10094 for(ptr = block->first; ; ptr = ptr->next) {
10095 display_triple(fp, ptr);
10096 if (ptr == block->last)
10099 fprintf(fp, "users %d: ", block->users);
10100 for(user = block->use; user; user = user->next) {
10101 fprintf(fp, "%p (%d) ",
10103 user->member->vertex);
10105 fprintf(fp,"\n\n");
10109 static void print_blocks(struct compile_state *state, FILE *fp)
10111 fprintf(fp, "--------------- blocks ---------------\n");
10112 walk_blocks(state, print_block, fp);
10115 static void prune_nonblock_triples(struct compile_state *state)
10117 struct block *block;
10118 struct triple *first, *ins, *next;
10119 /* Delete the triples not in a basic block */
10120 first = state->first;
10125 if (ins->op == OP_LABEL) {
10126 block = ins->u.block;
10129 release_triple(state, ins);
10131 if (block && block->last == ins) {
10135 } while(ins != first);
10138 static void setup_basic_blocks(struct compile_state *state)
10140 if (!triple_stores_block(state, state->first)) {
10141 internal_error(state, 0, "ins will not store block?");
10143 /* Find the basic blocks */
10144 state->last_vertex = 0;
10145 state->first_block = basic_block(state, state->first);
10146 /* Delete the triples not in a basic block */
10147 prune_nonblock_triples(state);
10149 /* Find the last basic block.
10151 * For purposes of reverse flow computation it is
10152 * important that the last basic block is empty.
10153 * This allows the control flow graph to be modified to
10154 * have one unique starting block and one unique final block.
10155 * With the insertion of a few extra edges.
10157 * If the final block contained instructions it could contain
10158 * phi functions from edges that would never contribute a
10159 * value. Which for now at least I consider a compile error.
10161 state->last_block = block_of_triple(state, state->first->prev);
10162 if ((state->last_block->first != state->last_block->last) ||
10163 (state->last_block->last->op != OP_LABEL))
10165 struct block *block, *prev_block;
10166 struct triple *final;
10167 prev_block = state->last_block;
10168 final = label(state);
10169 flatten(state, state->first, final);
10170 use_triple(final, final);
10171 block = basic_block(state, final);
10172 state->last_block = block;
10173 prev_block->left = block;
10174 use_block(prev_block->left, prev_block);
10177 /* If we are debugging print what I have just done */
10178 if (state->debug & DEBUG_BASIC_BLOCKS) {
10179 print_blocks(state, stdout);
10180 print_control_flow(state);
10184 static void free_basic_block(struct compile_state *state, struct block *block)
10186 struct block_set *entry, *next;
10187 struct block *child;
10191 if (block->vertex == -1) {
10194 block->vertex = -1;
10196 unuse_block(block->left, block);
10198 if (block->right) {
10199 unuse_block(block->right, block);
10202 unidom_block(block->idom, block);
10205 if (block->ipdom) {
10206 unipdom_block(block->ipdom, block);
10209 for(entry = block->use; entry; entry = next) {
10210 next = entry->next;
10211 child = entry->member;
10212 unuse_block(block, child);
10213 if (child->left == block) {
10216 if (child->right == block) {
10220 for(entry = block->idominates; entry; entry = next) {
10221 next = entry->next;
10222 child = entry->member;
10223 unidom_block(block, child);
10226 for(entry = block->domfrontier; entry; entry = next) {
10227 next = entry->next;
10228 child = entry->member;
10229 undomf_block(block, child);
10231 for(entry = block->ipdominates; entry; entry = next) {
10232 next = entry->next;
10233 child = entry->member;
10234 unipdom_block(block, child);
10237 for(entry = block->ipdomfrontier; entry; entry = next) {
10238 next = entry->next;
10239 child = entry->member;
10240 unipdomf_block(block, child);
10242 if (block->users != 0) {
10243 internal_error(state, 0, "block still has users");
10245 free_basic_block(state, block->left);
10247 free_basic_block(state, block->right);
10249 memset(block, -1, sizeof(*block));
10253 static void free_basic_blocks(struct compile_state *state)
10255 struct triple *first, *ins;
10256 free_basic_block(state, state->first_block);
10257 state->last_vertex = 0;
10258 state->first_block = state->last_block = 0;
10259 first = state->first;
10262 if (triple_stores_block(state, ins)) {
10266 } while(ins != first);
10270 struct sdom_block {
10271 struct block *block;
10272 struct sdom_block *sdominates;
10273 struct sdom_block *sdom_next;
10274 struct sdom_block *sdom;
10275 struct sdom_block *label;
10276 struct sdom_block *parent;
10277 struct sdom_block *ancestor;
10282 static void unsdom_block(struct sdom_block *block)
10284 struct sdom_block **ptr;
10285 if (!block->sdom_next) {
10288 ptr = &block->sdom->sdominates;
10290 if ((*ptr) == block) {
10291 *ptr = block->sdom_next;
10294 ptr = &(*ptr)->sdom_next;
10298 static void sdom_block(struct sdom_block *sdom, struct sdom_block *block)
10300 unsdom_block(block);
10301 block->sdom = sdom;
10302 block->sdom_next = sdom->sdominates;
10303 sdom->sdominates = block;
10308 static int initialize_sdblock(struct sdom_block *sd,
10309 struct block *parent, struct block *block, int vertex)
10311 if (!block || (sd[block->vertex].block == block)) {
10315 /* Renumber the blocks in a convinient fashion */
10316 block->vertex = vertex;
10317 sd[vertex].block = block;
10318 sd[vertex].sdom = &sd[vertex];
10319 sd[vertex].label = &sd[vertex];
10320 sd[vertex].parent = parent? &sd[parent->vertex] : 0;
10321 sd[vertex].ancestor = 0;
10322 sd[vertex].vertex = vertex;
10323 vertex = initialize_sdblock(sd, block, block->left, vertex);
10324 vertex = initialize_sdblock(sd, block, block->right, vertex);
10328 static int initialize_spdblock(
10329 struct compile_state *state, struct sdom_block *sd,
10330 struct block *parent, struct block *block, int vertex)
10332 struct block_set *user;
10333 if (!block || (sd[block->vertex].block == block)) {
10337 /* Renumber the blocks in a convinient fashion */
10338 block->vertex = vertex;
10339 sd[vertex].block = block;
10340 sd[vertex].sdom = &sd[vertex];
10341 sd[vertex].label = &sd[vertex];
10342 sd[vertex].parent = parent? &sd[parent->vertex] : 0;
10343 sd[vertex].ancestor = 0;
10344 sd[vertex].vertex = vertex;
10345 for(user = block->use; user; user = user->next) {
10346 vertex = initialize_spdblock(state, sd, block, user->member, vertex);
10351 static int setup_spdblocks(struct compile_state *state, struct sdom_block *sd)
10353 struct block *block;
10355 /* Setup as many sdpblocks as possible without using fake edges */
10356 vertex = initialize_spdblock(state, sd, 0, state->last_block, 0);
10358 /* Walk through the graph and find unconnected blocks. If
10359 * we can, add a fake edge from the unconnected blocks to the
10360 * end of the graph.
10362 block = state->first_block->last->next->u.block;
10363 for(; block && block != state->first_block; block = block->last->next->u.block) {
10364 if (sd[block->vertex].block == block) {
10367 if (block->left != 0) {
10371 #if DEBUG_SDP_BLOCKS
10372 fprintf(stderr, "Adding %d\n", vertex +1);
10375 block->left = state->last_block;
10376 use_block(block->left, block);
10377 vertex = initialize_spdblock(state, sd, state->last_block, block, vertex);
10382 static void compress_ancestors(struct sdom_block *v)
10384 /* This procedure assumes ancestor(v) != 0 */
10385 /* if (ancestor(ancestor(v)) != 0) {
10386 * compress(ancestor(ancestor(v)));
10387 * if (semi(label(ancestor(v))) < semi(label(v))) {
10388 * label(v) = label(ancestor(v));
10390 * ancestor(v) = ancestor(ancestor(v));
10393 if (!v->ancestor) {
10396 if (v->ancestor->ancestor) {
10397 compress_ancestors(v->ancestor->ancestor);
10398 if (v->ancestor->label->sdom->vertex < v->label->sdom->vertex) {
10399 v->label = v->ancestor->label;
10401 v->ancestor = v->ancestor->ancestor;
10405 static void compute_sdom(struct compile_state *state, struct sdom_block *sd)
10409 * for each v <= pred(w) {
10411 * if (semi[u] < semi[w] {
10412 * semi[w] = semi[u];
10415 * add w to bucket(vertex(semi[w]));
10416 * LINK(parent(w), w);
10419 * for each v <= bucket(parent(w)) {
10420 * delete v from bucket(parent(w));
10422 * dom(v) = (semi[u] < semi[v]) ? u : parent(w);
10425 for(i = state->last_vertex; i >= 2; i--) {
10426 struct sdom_block *v, *parent, *next;
10427 struct block_set *user;
10428 struct block *block;
10429 block = sd[i].block;
10430 parent = sd[i].parent;
10432 for(user = block->use; user; user = user->next) {
10433 struct sdom_block *v, *u;
10434 v = &sd[user->member->vertex];
10435 u = !(v->ancestor)? v : (compress_ancestors(v), v->label);
10436 if (u->sdom->vertex < sd[i].sdom->vertex) {
10437 sd[i].sdom = u->sdom;
10440 sdom_block(sd[i].sdom, &sd[i]);
10441 sd[i].ancestor = parent;
10443 for(v = parent->sdominates; v; v = next) {
10444 struct sdom_block *u;
10445 next = v->sdom_next;
10447 u = (!v->ancestor) ? v : (compress_ancestors(v), v->label);
10448 v->block->idom = (u->sdom->vertex < v->sdom->vertex)?
10449 u->block : parent->block;
10454 static void compute_spdom(struct compile_state *state, struct sdom_block *sd)
10458 * for each v <= pred(w) {
10460 * if (semi[u] < semi[w] {
10461 * semi[w] = semi[u];
10464 * add w to bucket(vertex(semi[w]));
10465 * LINK(parent(w), w);
10468 * for each v <= bucket(parent(w)) {
10469 * delete v from bucket(parent(w));
10471 * dom(v) = (semi[u] < semi[v]) ? u : parent(w);
10474 for(i = state->last_vertex; i >= 2; i--) {
10475 struct sdom_block *u, *v, *parent, *next;
10476 struct block *block;
10477 block = sd[i].block;
10478 parent = sd[i].parent;
10481 v = &sd[block->left->vertex];
10482 u = !(v->ancestor)? v : (compress_ancestors(v), v->label);
10483 if (u->sdom->vertex < sd[i].sdom->vertex) {
10484 sd[i].sdom = u->sdom;
10487 if (block->right && (block->right != block->left)) {
10488 v = &sd[block->right->vertex];
10489 u = !(v->ancestor)? v : (compress_ancestors(v), v->label);
10490 if (u->sdom->vertex < sd[i].sdom->vertex) {
10491 sd[i].sdom = u->sdom;
10494 sdom_block(sd[i].sdom, &sd[i]);
10495 sd[i].ancestor = parent;
10497 for(v = parent->sdominates; v; v = next) {
10498 struct sdom_block *u;
10499 next = v->sdom_next;
10501 u = (!v->ancestor) ? v : (compress_ancestors(v), v->label);
10502 v->block->ipdom = (u->sdom->vertex < v->sdom->vertex)?
10503 u->block : parent->block;
10508 static void compute_idom(struct compile_state *state, struct sdom_block *sd)
10511 for(i = 2; i <= state->last_vertex; i++) {
10512 struct block *block;
10513 block = sd[i].block;
10514 if (block->idom->vertex != sd[i].sdom->vertex) {
10515 block->idom = block->idom->idom;
10517 idom_block(block->idom, block);
10519 sd[1].block->idom = 0;
10522 static void compute_ipdom(struct compile_state *state, struct sdom_block *sd)
10525 for(i = 2; i <= state->last_vertex; i++) {
10526 struct block *block;
10527 block = sd[i].block;
10528 if (block->ipdom->vertex != sd[i].sdom->vertex) {
10529 block->ipdom = block->ipdom->ipdom;
10531 ipdom_block(block->ipdom, block);
10533 sd[1].block->ipdom = 0;
10537 * Every vertex of a flowgraph G = (V, E, r) except r has
10538 * a unique immediate dominator.
10539 * The edges {(idom(w), w) |w <= V - {r}} form a directed tree
10540 * rooted at r, called the dominator tree of G, such that
10541 * v dominates w if and only if v is a proper ancestor of w in
10542 * the dominator tree.
10545 * If v and w are vertices of G such that v <= w,
10546 * than any path from v to w must contain a common ancestor
10549 /* Lemma 2: For any vertex w != r, idom(w) -> w */
10550 /* Lemma 3: For any vertex w != r, sdom(w) -> w */
10551 /* Lemma 4: For any vertex w != r, idom(w) -> sdom(w) */
10553 * Let w != r. Suppose every u for which sdom(w) -> u -> w satisfies
10554 * sdom(u) >= sdom(w). Then idom(w) = sdom(w).
10557 * Let w != r and let u be a vertex for which sdom(u) is
10558 * minimum amoung vertices u satisfying sdom(w) -> u -> w.
10559 * Then sdom(u) <= sdom(w) and idom(u) = idom(w).
10561 /* Lemma 5: Let vertices v,w satisfy v -> w.
10562 * Then v -> idom(w) or idom(w) -> idom(v)
10565 static void find_immediate_dominators(struct compile_state *state)
10567 struct sdom_block *sd;
10568 /* w->sdom = min{v| there is a path v = v0,v1,...,vk = w such that:
10569 * vi > w for (1 <= i <= k - 1}
10572 * For any vertex w != r.
10574 * {v|(v,w) <= E and v < w } U
10575 * {sdom(u) | u > w and there is an edge (v, w) such that u -> v})
10578 * Let w != r and let u be a vertex for which sdom(u) is
10579 * minimum amoung vertices u satisfying sdom(w) -> u -> w.
10581 * { sdom(w) if sdom(w) = sdom(u),
10583 * { idom(u) otherwise
10585 /* The algorithm consists of the following 4 steps.
10586 * Step 1. Carry out a depth-first search of the problem graph.
10587 * Number the vertices from 1 to N as they are reached during
10588 * the search. Initialize the variables used in succeeding steps.
10589 * Step 2. Compute the semidominators of all vertices by applying
10590 * theorem 4. Carry out the computation vertex by vertex in
10591 * decreasing order by number.
10592 * Step 3. Implicitly define the immediate dominator of each vertex
10593 * by applying Corollary 1.
10594 * Step 4. Explicitly define the immediate dominator of each vertex,
10595 * carrying out the computation vertex by vertex in increasing order
10598 /* Step 1 initialize the basic block information */
10599 sd = xcmalloc(sizeof(*sd) * (state->last_vertex + 1), "sdom_state");
10600 initialize_sdblock(sd, 0, state->first_block, 0);
10606 /* Step 2 compute the semidominators */
10607 /* Step 3 implicitly define the immediate dominator of each vertex */
10608 compute_sdom(state, sd);
10609 /* Step 4 explicitly define the immediate dominator of each vertex */
10610 compute_idom(state, sd);
10614 static void find_post_dominators(struct compile_state *state)
10616 struct sdom_block *sd;
10618 /* Step 1 initialize the basic block information */
10619 sd = xcmalloc(sizeof(*sd) * (state->last_vertex + 1), "sdom_state");
10621 vertex = setup_spdblocks(state, sd);
10622 if (vertex != state->last_vertex) {
10623 internal_error(state, 0, "missing %d blocks\n",
10624 state->last_vertex - vertex);
10627 /* Step 2 compute the semidominators */
10628 /* Step 3 implicitly define the immediate dominator of each vertex */
10629 compute_spdom(state, sd);
10630 /* Step 4 explicitly define the immediate dominator of each vertex */
10631 compute_ipdom(state, sd);
10637 static void find_block_domf(struct compile_state *state, struct block *block)
10639 struct block *child;
10640 struct block_set *user;
10641 if (block->domfrontier != 0) {
10642 internal_error(state, block->first, "domfrontier present?");
10644 for(user = block->idominates; user; user = user->next) {
10645 child = user->member;
10646 if (child->idom != block) {
10647 internal_error(state, block->first, "bad idom");
10649 find_block_domf(state, child);
10651 if (block->left && block->left->idom != block) {
10652 domf_block(block, block->left);
10654 if (block->right && block->right->idom != block) {
10655 domf_block(block, block->right);
10657 for(user = block->idominates; user; user = user->next) {
10658 struct block_set *frontier;
10659 child = user->member;
10660 for(frontier = child->domfrontier; frontier; frontier = frontier->next) {
10661 if (frontier->member->idom != block) {
10662 domf_block(block, frontier->member);
10668 static void find_block_ipdomf(struct compile_state *state, struct block *block)
10670 struct block *child;
10671 struct block_set *user;
10672 if (block->ipdomfrontier != 0) {
10673 internal_error(state, block->first, "ipdomfrontier present?");
10675 for(user = block->ipdominates; user; user = user->next) {
10676 child = user->member;
10677 if (child->ipdom != block) {
10678 internal_error(state, block->first, "bad ipdom");
10680 find_block_ipdomf(state, child);
10682 for(user = block->use; user; user = user->next) {
10683 if (user->member->ipdom != block) {
10684 ipdomf_block(block, user->member);
10687 for(user = block->ipdominates; user; user = user->next) {
10688 struct block_set *frontier;
10689 child = user->member;
10690 for(frontier = child->ipdomfrontier; frontier; frontier = frontier->next) {
10691 if (frontier->member->ipdom != block) {
10692 ipdomf_block(block, frontier->member);
10698 static void print_dominated(
10699 struct compile_state *state, struct block *block, void *arg)
10701 struct block_set *user;
10704 fprintf(fp, "%d:", block->vertex);
10705 for(user = block->idominates; user; user = user->next) {
10706 fprintf(fp, " %d", user->member->vertex);
10707 if (user->member->idom != block) {
10708 internal_error(state, user->member->first, "bad idom");
10714 static void print_dominators(struct compile_state *state, FILE *fp)
10716 fprintf(fp, "\ndominates\n");
10717 walk_blocks(state, print_dominated, fp);
10721 static int print_frontiers(
10722 struct compile_state *state, struct block *block, int vertex)
10724 struct block_set *user;
10726 if (!block || (block->vertex != vertex + 1)) {
10731 printf("%d:", block->vertex);
10732 for(user = block->domfrontier; user; user = user->next) {
10733 printf(" %d", user->member->vertex);
10737 vertex = print_frontiers(state, block->left, vertex);
10738 vertex = print_frontiers(state, block->right, vertex);
10741 static void print_dominance_frontiers(struct compile_state *state)
10743 printf("\ndominance frontiers\n");
10744 print_frontiers(state, state->first_block, 0);
10748 static void analyze_idominators(struct compile_state *state)
10750 /* Find the immediate dominators */
10751 find_immediate_dominators(state);
10752 /* Find the dominance frontiers */
10753 find_block_domf(state, state->first_block);
10754 /* If debuging print the print what I have just found */
10755 if (state->debug & DEBUG_FDOMINATORS) {
10756 print_dominators(state, stdout);
10757 print_dominance_frontiers(state);
10758 print_control_flow(state);
10764 static void print_ipdominated(
10765 struct compile_state *state, struct block *block, void *arg)
10767 struct block_set *user;
10770 fprintf(fp, "%d:", block->vertex);
10771 for(user = block->ipdominates; user; user = user->next) {
10772 fprintf(fp, " %d", user->member->vertex);
10773 if (user->member->ipdom != block) {
10774 internal_error(state, user->member->first, "bad ipdom");
10780 static void print_ipdominators(struct compile_state *state, FILE *fp)
10782 fprintf(fp, "\nipdominates\n");
10783 walk_blocks(state, print_ipdominated, fp);
10786 static int print_pfrontiers(
10787 struct compile_state *state, struct block *block, int vertex)
10789 struct block_set *user;
10791 if (!block || (block->vertex != vertex + 1)) {
10796 printf("%d:", block->vertex);
10797 for(user = block->ipdomfrontier; user; user = user->next) {
10798 printf(" %d", user->member->vertex);
10801 for(user = block->use; user; user = user->next) {
10802 vertex = print_pfrontiers(state, user->member, vertex);
10806 static void print_ipdominance_frontiers(struct compile_state *state)
10808 printf("\nipdominance frontiers\n");
10809 print_pfrontiers(state, state->last_block, 0);
10813 static void analyze_ipdominators(struct compile_state *state)
10815 /* Find the post dominators */
10816 find_post_dominators(state);
10817 /* Find the control dependencies (post dominance frontiers) */
10818 find_block_ipdomf(state, state->last_block);
10819 /* If debuging print the print what I have just found */
10820 if (state->debug & DEBUG_RDOMINATORS) {
10821 print_ipdominators(state, stdout);
10822 print_ipdominance_frontiers(state);
10823 print_control_flow(state);
10827 static int bdominates(struct compile_state *state,
10828 struct block *dom, struct block *sub)
10830 while(sub && (sub != dom)) {
10836 static int tdominates(struct compile_state *state,
10837 struct triple *dom, struct triple *sub)
10839 struct block *bdom, *bsub;
10841 bdom = block_of_triple(state, dom);
10842 bsub = block_of_triple(state, sub);
10843 if (bdom != bsub) {
10844 result = bdominates(state, bdom, bsub);
10847 struct triple *ins;
10849 while((ins != bsub->first) && (ins != dom)) {
10852 result = (ins == dom);
10857 static void analyze_basic_blocks(struct compile_state *state)
10859 setup_basic_blocks(state);
10860 analyze_idominators(state);
10861 analyze_ipdominators(state);
10864 static void insert_phi_operations(struct compile_state *state)
10867 struct triple *first;
10868 int *has_already, *work;
10869 struct block *work_list, **work_list_tail;
10871 struct triple *var, *vnext;
10873 size = sizeof(int) * (state->last_vertex + 1);
10874 has_already = xcmalloc(size, "has_already");
10875 work = xcmalloc(size, "work");
10878 first = state->first;
10879 for(var = first->next; var != first ; var = vnext) {
10880 struct block *block;
10881 struct triple_set *user, *unext;
10883 if ((var->op != OP_ADECL) || !var->use) {
10888 work_list_tail = &work_list;
10889 for(user = var->use; user; user = unext) {
10890 unext = user->next;
10891 if (user->member->op == OP_READ) {
10894 if (user->member->op != OP_WRITE) {
10895 internal_error(state, user->member,
10896 "bad variable access");
10898 block = user->member->u.block;
10900 warning(state, user->member, "dead code");
10901 release_triple(state, user->member);
10904 if (work[block->vertex] >= iter) {
10907 work[block->vertex] = iter;
10908 *work_list_tail = block;
10909 block->work_next = 0;
10910 work_list_tail = &block->work_next;
10912 for(block = work_list; block; block = block->work_next) {
10913 struct block_set *df;
10914 for(df = block->domfrontier; df; df = df->next) {
10915 struct triple *phi;
10916 struct block *front;
10918 front = df->member;
10920 if (has_already[front->vertex] >= iter) {
10923 /* Count how many edges flow into this block */
10924 in_edges = front->users;
10925 /* Insert a phi function for this variable */
10926 get_occurance(var->occurance);
10927 phi = alloc_triple(
10928 state, OP_PHI, var->type, -1, in_edges,
10930 phi->u.block = front;
10931 MISC(phi, 0) = var;
10932 use_triple(var, phi);
10933 /* Insert the phi functions immediately after the label */
10934 insert_triple(state, front->first->next, phi);
10935 if (front->first == front->last) {
10936 front->last = front->first->next;
10938 has_already[front->vertex] = iter;
10939 transform_to_arch_instruction(state, phi);
10941 /* If necessary plan to visit the basic block */
10942 if (work[front->vertex] >= iter) {
10945 work[front->vertex] = iter;
10946 *work_list_tail = front;
10947 front->work_next = 0;
10948 work_list_tail = &front->work_next;
10952 xfree(has_already);
10958 struct triple_set *top;
10962 static int count_adecls(struct compile_state *state)
10964 struct triple *first, *ins;
10966 first = state->first;
10969 if (ins->op == OP_ADECL) {
10973 } while(ins != first);
10977 static void number_adecls(struct compile_state *state, struct stack *stacks)
10979 struct triple *first, *ins;
10981 first = state->first;
10984 if (ins->op == OP_ADECL) {
10986 stacks[adecls].orig_id = ins->id;
10990 } while(ins != first);
10993 static void restore_adecls(struct compile_state *state, struct stack *stacks)
10995 struct triple *first, *ins;
10996 first = state->first;
10999 if (ins->op == OP_ADECL) {
11000 ins->id = stacks[ins->id].orig_id;
11003 } while(ins != first);
11006 static struct triple *peek_triple(struct stack *stacks, struct triple *var)
11008 struct triple_set *head;
11009 struct triple *top_val;
11011 head = stacks[var->id].top;
11013 top_val = head->member;
11018 static void push_triple(struct stack *stacks, struct triple *var, struct triple *val)
11020 struct triple_set *new;
11021 /* Append new to the head of the list,
11022 * it's the only sensible behavoir for a stack.
11024 new = xcmalloc(sizeof(*new), "triple_set");
11026 new->next = stacks[var->id].top;
11027 stacks[var->id].top = new;
11030 static void pop_triple(struct stack *stacks, struct triple *var, struct triple *oldval)
11032 struct triple_set *set, **ptr;
11033 ptr = &stacks[var->id].top;
11036 if (set->member == oldval) {
11039 /* Only free one occurance from the stack */
11052 static void fixup_block_phi_variables(
11053 struct compile_state *state, struct stack *stacks, struct block *parent, struct block *block)
11055 struct block_set *set;
11056 struct triple *ptr;
11058 if (!parent || !block)
11060 /* Find the edge I am coming in on */
11062 for(set = block->use; set; set = set->next, edge++) {
11063 if (set->member == parent) {
11068 internal_error(state, 0, "phi input is not on a control predecessor");
11070 for(ptr = block->first; ; ptr = ptr->next) {
11071 if (ptr->op == OP_PHI) {
11072 struct triple *var, *val, **slot;
11073 var = MISC(ptr, 0);
11075 internal_error(state, ptr, "no var???");
11077 /* Find the current value of the variable */
11078 val = peek_triple(stacks, var);
11079 if (val && ((val->op == OP_WRITE) || (val->op == OP_READ))) {
11080 internal_error(state, val, "bad value in phi");
11082 if (edge >= TRIPLE_RHS(ptr->sizes)) {
11083 internal_error(state, ptr, "edges > phi rhs");
11085 slot = &RHS(ptr, edge);
11086 if ((*slot != 0) && (*slot != val)) {
11087 internal_error(state, ptr, "phi already bound on this edge");
11090 use_triple(val, ptr);
11092 if (ptr == block->last) {
11099 static void rename_block_variables(
11100 struct compile_state *state, struct stack *stacks, struct block *block)
11102 struct block_set *user;
11103 struct triple *ptr, *next, *last;
11107 last = block->first;
11109 for(ptr = block->first; !done; ptr = next) {
11111 if (ptr == block->last) {
11115 if (ptr->op == OP_READ) {
11116 struct triple *var, *val;
11118 unuse_triple(var, ptr);
11119 /* Find the current value of the variable */
11120 val = peek_triple(stacks, var);
11122 error(state, ptr, "variable used without being set");
11124 if ((val->op == OP_WRITE) || (val->op == OP_READ)) {
11125 internal_error(state, val, "bad value in read");
11127 propogate_use(state, ptr, val);
11128 release_triple(state, ptr);
11132 if (ptr->op == OP_WRITE) {
11133 struct triple *var, *val, *tval;
11135 tval = val = RHS(ptr, 1);
11136 if ((val->op == OP_WRITE) || (val->op == OP_READ)) {
11137 internal_error(state, ptr, "bad value in write");
11139 /* Insert a copy if the types differ */
11140 if (!equiv_types(ptr->type, val->type)) {
11141 if (val->op == OP_INTCONST) {
11142 tval = pre_triple(state, ptr, OP_INTCONST, ptr->type, 0, 0);
11143 tval->u.cval = val->u.cval;
11146 tval = pre_triple(state, ptr, OP_COPY, ptr->type, val, 0);
11147 use_triple(val, tval);
11149 transform_to_arch_instruction(state, tval);
11150 unuse_triple(val, ptr);
11151 RHS(ptr, 1) = tval;
11152 use_triple(tval, ptr);
11154 propogate_use(state, ptr, tval);
11155 unuse_triple(var, ptr);
11156 /* Push OP_WRITE ptr->right onto a stack of variable uses */
11157 push_triple(stacks, var, tval);
11159 if (ptr->op == OP_PHI) {
11160 struct triple *var;
11161 var = MISC(ptr, 0);
11162 /* Push OP_PHI onto a stack of variable uses */
11163 push_triple(stacks, var, ptr);
11167 block->last = last;
11169 /* Fixup PHI functions in the cf successors */
11170 fixup_block_phi_variables(state, stacks, block, block->left);
11171 fixup_block_phi_variables(state, stacks, block, block->right);
11172 /* rename variables in the dominated nodes */
11173 for(user = block->idominates; user; user = user->next) {
11174 rename_block_variables(state, stacks, user->member);
11176 /* pop the renamed variable stack */
11177 last = block->first;
11179 for(ptr = block->first; !done ; ptr = next) {
11181 if (ptr == block->last) {
11184 if (ptr->op == OP_WRITE) {
11185 struct triple *var;
11187 /* Pop OP_WRITE ptr->right from the stack of variable uses */
11188 pop_triple(stacks, var, RHS(ptr, 1));
11189 release_triple(state, ptr);
11192 if (ptr->op == OP_PHI) {
11193 struct triple *var;
11194 var = MISC(ptr, 0);
11195 /* Pop OP_WRITE ptr->right from the stack of variable uses */
11196 pop_triple(stacks, var, ptr);
11200 block->last = last;
11203 static void rename_variables(struct compile_state *state)
11205 struct stack *stacks;
11208 /* Allocate stacks for the Variables */
11209 adecls = count_adecls(state);
11210 stacks = xcmalloc(sizeof(stacks[0])*(adecls + 1), "adecl stacks");
11212 /* Give each adecl a stack */
11213 number_adecls(state, stacks);
11215 /* Rename the variables */
11216 rename_block_variables(state, stacks, state->first_block);
11218 /* Remove the stacks from the adecls */
11219 restore_adecls(state, stacks);
11223 static void prune_block_variables(struct compile_state *state,
11224 struct block *block)
11226 struct block_set *user;
11227 struct triple *next, *last, *ptr;
11229 last = block->first;
11231 for(ptr = block->first; !done; ptr = next) {
11233 if (ptr == block->last) {
11236 if (ptr->op == OP_ADECL) {
11237 struct triple_set *user, *next;
11238 for(user = ptr->use; user; user = next) {
11239 struct triple *use;
11241 use = user->member;
11242 if (use->op != OP_PHI) {
11243 internal_error(state, use, "decl still used");
11245 if (MISC(use, 0) != ptr) {
11246 internal_error(state, use, "bad phi use of decl");
11248 unuse_triple(ptr, use);
11251 release_triple(state, ptr);
11256 block->last = last;
11257 for(user = block->idominates; user; user = user->next) {
11258 prune_block_variables(state, user->member);
11262 struct phi_triple {
11263 struct triple *phi;
11268 static void keep_phi(struct compile_state *state, struct phi_triple *live, struct triple *phi)
11270 struct triple **slot;
11272 if (live[phi->id].alive) {
11275 live[phi->id].alive = 1;
11276 zrhs = TRIPLE_RHS(phi->sizes);
11277 slot = &RHS(phi, 0);
11278 for(i = 0; i < zrhs; i++) {
11279 struct triple *used;
11281 if (used && (used->op == OP_PHI)) {
11282 keep_phi(state, live, used);
11287 static void prune_unused_phis(struct compile_state *state)
11289 struct triple *first, *phi;
11290 struct phi_triple *live;
11293 /* Find the first instruction */
11294 first = state->first;
11296 /* Count how many phi functions I need to process */
11298 for(phi = first->next; phi != first; phi = phi->next) {
11299 if (phi->op == OP_PHI) {
11304 /* Mark them all dead */
11305 live = xcmalloc(sizeof(*live) * (phis + 1), "phi_triple");
11307 for(phi = first->next; phi != first; phi = phi->next) {
11308 if (phi->op != OP_PHI) {
11311 live[phis].alive = 0;
11312 live[phis].orig_id = phi->id;
11313 live[phis].phi = phi;
11318 /* Mark phis alive that are used by non phis */
11319 for(i = 0; i < phis; i++) {
11320 struct triple_set *set;
11321 for(set = live[i].phi->use; !live[i].alive && set; set = set->next) {
11322 if (set->member->op != OP_PHI) {
11323 keep_phi(state, live, live[i].phi);
11329 /* Delete the extraneous phis */
11330 for(i = 0; i < phis; i++) {
11331 struct triple **slot;
11333 if (!live[i].alive) {
11334 release_triple(state, live[i].phi);
11338 slot = &RHS(phi, 0);
11339 zrhs = TRIPLE_RHS(phi->sizes);
11340 for(j = 0; j < zrhs; j++) {
11342 error(state, phi, "variable not set on all paths to use");
11349 static void transform_to_ssa_form(struct compile_state *state)
11351 insert_phi_operations(state);
11352 rename_variables(state);
11354 prune_block_variables(state, state->first_block);
11355 prune_unused_phis(state);
11359 static void clear_vertex(
11360 struct compile_state *state, struct block *block, void *arg)
11362 /* Clear the current blocks vertex and the vertex of all
11363 * of the current blocks neighbors in case there are malformed
11364 * blocks with now instructions at this point.
11366 struct block_set *user;
11369 block->left->vertex = 0;
11371 if (block->right) {
11372 block->right->vertex = 0;
11374 for(user = block->use; user; user = user->next) {
11375 user->member->vertex = 0;
11379 static void mark_live_block(
11380 struct compile_state *state, struct block *block, int *next_vertex)
11382 /* See if this is a block that has not been marked */
11383 if (block->vertex != 0) {
11386 block->vertex = *next_vertex;
11388 if (triple_is_branch(state, block->last)) {
11389 struct triple **targ;
11390 targ = triple_targ(state, block->last, 0);
11391 for(; targ; targ = triple_targ(state, block->last, targ)) {
11395 if (!triple_stores_block(state, *targ)) {
11396 internal_error(state, 0, "bad targ");
11398 mark_live_block(state, (*targ)->u.block, next_vertex);
11401 else if (block->last->next != state->first) {
11402 struct triple *ins;
11403 ins = block->last->next;
11404 if (!triple_stores_block(state, ins)) {
11405 internal_error(state, 0, "bad block start");
11407 mark_live_block(state, ins->u.block, next_vertex);
11411 static void transform_from_ssa_form(struct compile_state *state)
11413 /* To get out of ssa form we insert moves on the incoming
11414 * edges to blocks containting phi functions.
11416 struct triple *first;
11417 struct triple *phi, *var, *next;
11420 /* Walk the control flow to see which blocks remain alive */
11421 walk_blocks(state, clear_vertex, 0);
11423 mark_live_block(state, state->first_block, &next_vertex);
11426 fprintf(stderr, "@ %s:%d\n", __FILE__, __LINE__);
11427 print_blocks(state, stderr);
11430 /* Walk all of the operations to find the phi functions */
11431 first = state->first;
11432 for(phi = first->next; phi != first ; phi = next) {
11433 struct block_set *set;
11434 struct block *block;
11435 struct triple **slot;
11436 struct triple *var;
11437 struct triple_set *use, *use_next;
11440 if (phi->op != OP_PHI) {
11444 block = phi->u.block;
11445 slot = &RHS(phi, 0);
11447 /* If this phi is in a dead block just forget it */
11448 if (block->vertex == 0) {
11449 release_triple(state, phi);
11453 /* Forget uses from code in dead blocks */
11454 for(use = phi->use; use; use = use_next) {
11455 struct block *ublock;
11456 struct triple **expr;
11457 use_next = use->next;
11458 ublock = block_of_triple(state, use->member);
11459 if ((use->member == phi) || (ublock->vertex != 0)) {
11462 expr = triple_rhs(state, use->member, 0);
11463 for(; expr; expr = triple_rhs(state, use->member, expr)) {
11464 if (*expr == phi) {
11468 unuse_triple(phi, use->member);
11470 /* A variable to replace the phi function */
11471 var = post_triple(state, phi, OP_ADECL, phi->type, 0,0);
11473 /* Replaces use of phi with var */
11474 propogate_use(state, phi, var);
11476 /* Walk all of the incoming edges/blocks and insert moves.
11479 for(edge = 0, set = block->use; set; set = set->next, edge++) {
11480 struct block *eblock, *vblock;
11481 struct triple *move;
11482 struct triple *val, *base;
11483 eblock = set->member;
11486 unuse_triple(val, phi);
11487 vblock = block_of_triple(state, val);
11489 /* If we don't have a value that belongs in an OP_WRITE
11492 if (!val || (val == &zero_triple) || (val == phi) ||
11493 (!vblock) || (vblock->vertex == 0)) {
11497 /* If the value occurs in a dead block see if a replacement
11498 * block can be found.
11500 while(eblock && (eblock->vertex == 0)) {
11501 eblock = eblock->idom;
11503 /* If not continue on with the next value. */
11504 if (!eblock || (eblock->vertex == 0)) {
11508 /* If we have an empty incoming block ignore it. */
11509 if (!eblock->first) {
11510 internal_error(state, 0, "empty block?");
11513 /* Make certain the write is placed in the edge block... */
11514 base = eblock->first;
11515 if (block_of_triple(state, val) == eblock) {
11518 move = post_triple(state, base, OP_WRITE, var->type, var, val);
11519 use_triple(val, move);
11520 use_triple(var, move);
11523 /* If var is not used free it */
11525 free_triple(state, var);
11528 /* Release the phi function */
11529 release_triple(state, phi);
11532 /* Walk all of the operations to find the adecls */
11533 for(var = first->next; var != first ; var = var->next) {
11534 struct triple_set *use, *use_next;
11535 if (var->op != OP_ADECL) {
11539 /* Walk through all of the rhs uses of var and
11540 * replace them with read of var.
11542 for(use = var->use; use; use = use_next) {
11543 struct triple *read, *user;
11544 struct triple **slot;
11546 use_next = use->next;
11547 user = use->member;
11549 /* Generate a read of var */
11550 read = pre_triple(state, user, OP_READ, var->type, var, 0);
11551 use_triple(var, read);
11553 /* Find the rhs uses and see if they need to be replaced */
11555 zrhs = TRIPLE_RHS(user->sizes);
11556 slot = &RHS(user, 0);
11557 for(i = 0; i < zrhs; i++) {
11558 if ((slot[i] == var) &&
11559 ((i != 0) || (user->op != OP_WRITE)))
11565 /* If we did use it cleanup the uses */
11567 unuse_triple(var, user);
11568 use_triple(read, user);
11570 /* If we didn't use it release the extra triple */
11572 release_triple(state, read);
11579 #define HI() do { fprintf(stderr, "@ %s:%d\n", __FILE__, __LINE__); print_blocks(state, stderr); } while (0)
11583 static void rebuild_ssa_form(struct compile_state *state)
11586 transform_from_ssa_form(state);
11588 free_basic_blocks(state);
11589 analyze_basic_blocks(state);
11591 insert_phi_operations(state);
11593 rename_variables(state);
11596 prune_block_variables(state, state->first_block);
11598 prune_unused_phis(state);
11604 * Register conflict resolution
11605 * =========================================================
11608 static struct reg_info find_def_color(
11609 struct compile_state *state, struct triple *def)
11611 struct triple_set *set;
11612 struct reg_info info;
11613 info.reg = REG_UNSET;
11615 if (!triple_is_def(state, def)) {
11618 info = arch_reg_lhs(state, def, 0);
11619 if (info.reg >= MAX_REGISTERS) {
11620 info.reg = REG_UNSET;
11622 for(set = def->use; set; set = set->next) {
11623 struct reg_info tinfo;
11625 i = find_rhs_use(state, set->member, def);
11629 tinfo = arch_reg_rhs(state, set->member, i);
11630 if (tinfo.reg >= MAX_REGISTERS) {
11631 tinfo.reg = REG_UNSET;
11633 if ((tinfo.reg != REG_UNSET) &&
11634 (info.reg != REG_UNSET) &&
11635 (tinfo.reg != info.reg)) {
11636 internal_error(state, def, "register conflict");
11638 if ((info.regcm & tinfo.regcm) == 0) {
11639 internal_error(state, def, "regcm conflict %x & %x == 0",
11640 info.regcm, tinfo.regcm);
11642 if (info.reg == REG_UNSET) {
11643 info.reg = tinfo.reg;
11645 info.regcm &= tinfo.regcm;
11647 if (info.reg >= MAX_REGISTERS) {
11648 internal_error(state, def, "register out of range");
11653 static struct reg_info find_lhs_pre_color(
11654 struct compile_state *state, struct triple *ins, int index)
11656 struct reg_info info;
11658 zrhs = TRIPLE_RHS(ins->sizes);
11659 zlhs = TRIPLE_LHS(ins->sizes);
11660 if (!zlhs && triple_is_def(state, ins)) {
11663 if (index >= zlhs) {
11664 internal_error(state, ins, "Bad lhs %d", index);
11666 info = arch_reg_lhs(state, ins, index);
11667 for(i = 0; i < zrhs; i++) {
11668 struct reg_info rinfo;
11669 rinfo = arch_reg_rhs(state, ins, i);
11670 if ((info.reg == rinfo.reg) &&
11671 (rinfo.reg >= MAX_REGISTERS)) {
11672 struct reg_info tinfo;
11673 tinfo = find_lhs_pre_color(state, RHS(ins, index), 0);
11674 info.reg = tinfo.reg;
11675 info.regcm &= tinfo.regcm;
11679 if (info.reg >= MAX_REGISTERS) {
11680 info.reg = REG_UNSET;
11685 static struct reg_info find_rhs_post_color(
11686 struct compile_state *state, struct triple *ins, int index);
11688 static struct reg_info find_lhs_post_color(
11689 struct compile_state *state, struct triple *ins, int index)
11691 struct triple_set *set;
11692 struct reg_info info;
11693 struct triple *lhs;
11694 #if DEBUG_TRIPLE_COLOR
11695 fprintf(stderr, "find_lhs_post_color(%p, %d)\n",
11698 if ((index == 0) && triple_is_def(state, ins)) {
11701 else if (index < TRIPLE_LHS(ins->sizes)) {
11702 lhs = LHS(ins, index);
11705 internal_error(state, ins, "Bad lhs %d", index);
11708 info = arch_reg_lhs(state, ins, index);
11709 if (info.reg >= MAX_REGISTERS) {
11710 info.reg = REG_UNSET;
11712 for(set = lhs->use; set; set = set->next) {
11713 struct reg_info rinfo;
11714 struct triple *user;
11716 user = set->member;
11717 zrhs = TRIPLE_RHS(user->sizes);
11718 for(i = 0; i < zrhs; i++) {
11719 if (RHS(user, i) != lhs) {
11722 rinfo = find_rhs_post_color(state, user, i);
11723 if ((info.reg != REG_UNSET) &&
11724 (rinfo.reg != REG_UNSET) &&
11725 (info.reg != rinfo.reg)) {
11726 internal_error(state, ins, "register conflict");
11728 if ((info.regcm & rinfo.regcm) == 0) {
11729 internal_error(state, ins, "regcm conflict %x & %x == 0",
11730 info.regcm, rinfo.regcm);
11732 if (info.reg == REG_UNSET) {
11733 info.reg = rinfo.reg;
11735 info.regcm &= rinfo.regcm;
11738 #if DEBUG_TRIPLE_COLOR
11739 fprintf(stderr, "find_lhs_post_color(%p, %d) -> ( %d, %x)\n",
11740 ins, index, info.reg, info.regcm);
11745 static struct reg_info find_rhs_post_color(
11746 struct compile_state *state, struct triple *ins, int index)
11748 struct reg_info info, rinfo;
11750 #if DEBUG_TRIPLE_COLOR
11751 fprintf(stderr, "find_rhs_post_color(%p, %d)\n",
11754 rinfo = arch_reg_rhs(state, ins, index);
11755 zlhs = TRIPLE_LHS(ins->sizes);
11756 if (!zlhs && triple_is_def(state, ins)) {
11760 if (info.reg >= MAX_REGISTERS) {
11761 info.reg = REG_UNSET;
11763 for(i = 0; i < zlhs; i++) {
11764 struct reg_info linfo;
11765 linfo = arch_reg_lhs(state, ins, i);
11766 if ((linfo.reg == rinfo.reg) &&
11767 (linfo.reg >= MAX_REGISTERS)) {
11768 struct reg_info tinfo;
11769 tinfo = find_lhs_post_color(state, ins, i);
11770 if (tinfo.reg >= MAX_REGISTERS) {
11771 tinfo.reg = REG_UNSET;
11773 info.regcm &= linfo.regcm;
11774 info.regcm &= tinfo.regcm;
11775 if (info.reg != REG_UNSET) {
11776 internal_error(state, ins, "register conflict");
11778 if (info.regcm == 0) {
11779 internal_error(state, ins, "regcm conflict");
11781 info.reg = tinfo.reg;
11784 #if DEBUG_TRIPLE_COLOR
11785 fprintf(stderr, "find_rhs_post_color(%p, %d) -> ( %d, %x)\n",
11786 ins, index, info.reg, info.regcm);
11791 static struct reg_info find_lhs_color(
11792 struct compile_state *state, struct triple *ins, int index)
11794 struct reg_info pre, post, info;
11795 #if DEBUG_TRIPLE_COLOR
11796 fprintf(stderr, "find_lhs_color(%p, %d)\n",
11799 pre = find_lhs_pre_color(state, ins, index);
11800 post = find_lhs_post_color(state, ins, index);
11801 if ((pre.reg != post.reg) &&
11802 (pre.reg != REG_UNSET) &&
11803 (post.reg != REG_UNSET)) {
11804 internal_error(state, ins, "register conflict");
11806 info.regcm = pre.regcm & post.regcm;
11807 info.reg = pre.reg;
11808 if (info.reg == REG_UNSET) {
11809 info.reg = post.reg;
11811 #if DEBUG_TRIPLE_COLOR
11812 fprintf(stderr, "find_lhs_color(%p, %d) -> ( %d, %x) ... (%d, %x) (%d, %x)\n",
11813 ins, index, info.reg, info.regcm,
11814 pre.reg, pre.regcm, post.reg, post.regcm);
11819 static struct triple *post_copy(struct compile_state *state, struct triple *ins)
11821 struct triple_set *entry, *next;
11822 struct triple *out;
11823 struct reg_info info, rinfo;
11825 info = arch_reg_lhs(state, ins, 0);
11826 out = post_triple(state, ins, OP_COPY, ins->type, ins, 0);
11827 use_triple(RHS(out, 0), out);
11828 /* Get the users of ins to use out instead */
11829 for(entry = ins->use; entry; entry = next) {
11831 next = entry->next;
11832 if (entry->member == out) {
11835 i = find_rhs_use(state, entry->member, ins);
11839 rinfo = arch_reg_rhs(state, entry->member, i);
11840 if ((info.reg == REG_UNNEEDED) && (rinfo.reg == REG_UNNEEDED)) {
11843 replace_rhs_use(state, ins, out, entry->member);
11845 transform_to_arch_instruction(state, out);
11849 static struct triple *typed_pre_copy(
11850 struct compile_state *state, struct type *type, struct triple *ins, int index)
11852 /* Carefully insert enough operations so that I can
11853 * enter any operation with a GPR32.
11856 struct triple **expr;
11858 struct reg_info info;
11859 if (ins->op == OP_PHI) {
11860 internal_error(state, ins, "pre_copy on a phi?");
11862 classes = arch_type_to_regcm(state, type);
11863 info = arch_reg_rhs(state, ins, index);
11864 expr = &RHS(ins, index);
11865 if ((info.regcm & classes) == 0) {
11866 internal_error(state, ins, "pre_copy with no register classes");
11868 in = pre_triple(state, ins, OP_COPY, type, *expr, 0);
11869 unuse_triple(*expr, ins);
11871 use_triple(RHS(in, 0), in);
11872 use_triple(in, ins);
11873 transform_to_arch_instruction(state, in);
11877 static struct triple *pre_copy(
11878 struct compile_state *state, struct triple *ins, int index)
11880 return typed_pre_copy(state, RHS(ins, index)->type, ins, index);
11884 static void insert_copies_to_phi(struct compile_state *state)
11886 /* To get out of ssa form we insert moves on the incoming
11887 * edges to blocks containting phi functions.
11889 struct triple *first;
11890 struct triple *phi;
11892 /* Walk all of the operations to find the phi functions */
11893 first = state->first;
11894 for(phi = first->next; phi != first ; phi = phi->next) {
11895 struct block_set *set;
11896 struct block *block;
11897 struct triple **slot, *copy;
11899 if (phi->op != OP_PHI) {
11902 phi->id |= TRIPLE_FLAG_POST_SPLIT;
11903 block = phi->u.block;
11904 slot = &RHS(phi, 0);
11905 /* Phi's that feed into mandatory live range joins
11906 * cause nasty complications. Insert a copy of
11907 * the phi value so I never have to deal with
11908 * that in the rest of the code.
11910 copy = post_copy(state, phi);
11911 copy->id |= TRIPLE_FLAG_PRE_SPLIT;
11912 /* Walk all of the incoming edges/blocks and insert moves.
11914 for(edge = 0, set = block->use; set; set = set->next, edge++) {
11915 struct block *eblock;
11916 struct triple *move;
11917 struct triple *val;
11918 struct triple *ptr;
11919 eblock = set->member;
11926 get_occurance(val->occurance);
11927 move = build_triple(state, OP_COPY, phi->type, val, 0,
11929 move->u.block = eblock;
11930 move->id |= TRIPLE_FLAG_PRE_SPLIT;
11931 use_triple(val, move);
11934 unuse_triple(val, phi);
11935 use_triple(move, phi);
11937 /* Walk up the dominator tree until I have found the appropriate block */
11938 while(eblock && !tdominates(state, val, eblock->last)) {
11939 eblock = eblock->idom;
11942 internal_error(state, phi, "Cannot find block dominated by %p",
11946 /* Walk through the block backwards to find
11947 * an appropriate location for the OP_COPY.
11949 for(ptr = eblock->last; ptr != eblock->first; ptr = ptr->prev) {
11950 struct triple **expr;
11951 if ((ptr == phi) || (ptr == val)) {
11954 expr = triple_rhs(state, ptr, 0);
11955 for(;expr; expr = triple_rhs(state, ptr, expr)) {
11956 if ((*expr) == phi) {
11962 if (triple_is_branch(state, ptr)) {
11963 internal_error(state, ptr,
11964 "Could not insert write to phi");
11966 insert_triple(state, ptr->next, move);
11967 if (eblock->last == ptr) {
11968 eblock->last = move;
11970 transform_to_arch_instruction(state, move);
11975 struct triple_reg_set {
11976 struct triple_reg_set *next;
11977 struct triple *member;
11978 struct triple *new;
11982 struct block *block;
11983 struct triple_reg_set *in;
11984 struct triple_reg_set *out;
11988 static int do_triple_set(struct triple_reg_set **head,
11989 struct triple *member, struct triple *new_member)
11991 struct triple_reg_set **ptr, *new;
11996 if ((*ptr)->member == member) {
11999 ptr = &(*ptr)->next;
12001 new = xcmalloc(sizeof(*new), "triple_set");
12002 new->member = member;
12003 new->new = new_member;
12009 static void do_triple_unset(struct triple_reg_set **head, struct triple *member)
12011 struct triple_reg_set *entry, **ptr;
12015 if (entry->member == member) {
12016 *ptr = entry->next;
12021 ptr = &entry->next;
12026 static int in_triple(struct reg_block *rb, struct triple *in)
12028 return do_triple_set(&rb->in, in, 0);
12030 static void unin_triple(struct reg_block *rb, struct triple *unin)
12032 do_triple_unset(&rb->in, unin);
12035 static int out_triple(struct reg_block *rb, struct triple *out)
12037 return do_triple_set(&rb->out, out, 0);
12039 static void unout_triple(struct reg_block *rb, struct triple *unout)
12041 do_triple_unset(&rb->out, unout);
12044 static int initialize_regblock(struct reg_block *blocks,
12045 struct block *block, int vertex)
12047 struct block_set *user;
12048 if (!block || (blocks[block->vertex].block == block)) {
12052 /* Renumber the blocks in a convinient fashion */
12053 block->vertex = vertex;
12054 blocks[vertex].block = block;
12055 blocks[vertex].vertex = vertex;
12056 for(user = block->use; user; user = user->next) {
12057 vertex = initialize_regblock(blocks, user->member, vertex);
12062 static int phi_in(struct compile_state *state, struct reg_block *blocks,
12063 struct reg_block *rb, struct block *suc)
12065 /* Read the conditional input set of a successor block
12066 * (i.e. the input to the phi nodes) and place it in the
12067 * current blocks output set.
12069 struct block_set *set;
12070 struct triple *ptr;
12074 /* Find the edge I am coming in on */
12075 for(edge = 0, set = suc->use; set; set = set->next, edge++) {
12076 if (set->member == rb->block) {
12081 internal_error(state, 0, "Not coming on a control edge?");
12083 for(done = 0, ptr = suc->first; !done; ptr = ptr->next) {
12084 struct triple **slot, *expr, *ptr2;
12085 int out_change, done2;
12086 done = (ptr == suc->last);
12087 if (ptr->op != OP_PHI) {
12090 slot = &RHS(ptr, 0);
12092 out_change = out_triple(rb, expr);
12096 /* If we don't define the variable also plast it
12097 * in the current blocks input set.
12099 ptr2 = rb->block->first;
12100 for(done2 = 0; !done2; ptr2 = ptr2->next) {
12101 if (ptr2 == expr) {
12104 done2 = (ptr2 == rb->block->last);
12109 change |= in_triple(rb, expr);
12114 static int reg_in(struct compile_state *state, struct reg_block *blocks,
12115 struct reg_block *rb, struct block *suc)
12117 struct triple_reg_set *in_set;
12120 /* Read the input set of a successor block
12121 * and place it in the current blocks output set.
12123 in_set = blocks[suc->vertex].in;
12124 for(; in_set; in_set = in_set->next) {
12125 int out_change, done;
12126 struct triple *first, *last, *ptr;
12127 out_change = out_triple(rb, in_set->member);
12131 /* If we don't define the variable also place it
12132 * in the current blocks input set.
12134 first = rb->block->first;
12135 last = rb->block->last;
12137 for(ptr = first; !done; ptr = ptr->next) {
12138 if (ptr == in_set->member) {
12141 done = (ptr == last);
12146 change |= in_triple(rb, in_set->member);
12148 change |= phi_in(state, blocks, rb, suc);
12153 static int use_in(struct compile_state *state, struct reg_block *rb)
12155 /* Find the variables we use but don't define and add
12156 * it to the current blocks input set.
12158 #warning "FIXME is this O(N^2) algorithm bad?"
12159 struct block *block;
12160 struct triple *ptr;
12165 for(done = 0, ptr = block->last; !done; ptr = ptr->prev) {
12166 struct triple **expr;
12167 done = (ptr == block->first);
12168 /* The variable a phi function uses depends on the
12169 * control flow, and is handled in phi_in, not
12172 if (ptr->op == OP_PHI) {
12175 expr = triple_rhs(state, ptr, 0);
12176 for(;expr; expr = triple_rhs(state, ptr, expr)) {
12177 struct triple *rhs, *test;
12183 /* See if rhs is defined in this block */
12184 for(tdone = 0, test = ptr; !tdone; test = test->prev) {
12185 tdone = (test == block->first);
12191 /* If I still have a valid rhs add it to in */
12192 change |= in_triple(rb, rhs);
12198 static struct reg_block *compute_variable_lifetimes(
12199 struct compile_state *state)
12201 struct reg_block *blocks;
12204 sizeof(*blocks)*(state->last_vertex + 1), "reg_block");
12205 initialize_regblock(blocks, state->last_block, 0);
12209 for(i = 1; i <= state->last_vertex; i++) {
12210 struct reg_block *rb;
12212 /* Add the left successor's input set to in */
12213 if (rb->block->left) {
12214 change |= reg_in(state, blocks, rb, rb->block->left);
12216 /* Add the right successor's input set to in */
12217 if ((rb->block->right) &&
12218 (rb->block->right != rb->block->left)) {
12219 change |= reg_in(state, blocks, rb, rb->block->right);
12221 /* Add use to in... */
12222 change |= use_in(state, rb);
12228 static void free_variable_lifetimes(
12229 struct compile_state *state, struct reg_block *blocks)
12232 /* free in_set && out_set on each block */
12233 for(i = 1; i <= state->last_vertex; i++) {
12234 struct triple_reg_set *entry, *next;
12235 struct reg_block *rb;
12237 for(entry = rb->in; entry ; entry = next) {
12238 next = entry->next;
12239 do_triple_unset(&rb->in, entry->member);
12241 for(entry = rb->out; entry; entry = next) {
12242 next = entry->next;
12243 do_triple_unset(&rb->out, entry->member);
12250 typedef void (*wvl_cb_t)(
12251 struct compile_state *state,
12252 struct reg_block *blocks, struct triple_reg_set *live,
12253 struct reg_block *rb, struct triple *ins, void *arg);
12255 static void walk_variable_lifetimes(struct compile_state *state,
12256 struct reg_block *blocks, wvl_cb_t cb, void *arg)
12260 for(i = 1; i <= state->last_vertex; i++) {
12261 struct triple_reg_set *live;
12262 struct triple_reg_set *entry, *next;
12263 struct triple *ptr, *prev;
12264 struct reg_block *rb;
12265 struct block *block;
12268 /* Get the blocks */
12272 /* Copy out into live */
12274 for(entry = rb->out; entry; entry = next) {
12275 next = entry->next;
12276 do_triple_set(&live, entry->member, entry->new);
12278 /* Walk through the basic block calculating live */
12279 for(done = 0, ptr = block->last; !done; ptr = prev) {
12280 struct triple **expr;
12283 done = (ptr == block->first);
12285 /* Ensure the current definition is in live */
12286 if (triple_is_def(state, ptr)) {
12287 do_triple_set(&live, ptr, 0);
12290 /* Inform the callback function of what is
12293 cb(state, blocks, live, rb, ptr, arg);
12295 /* Remove the current definition from live */
12296 do_triple_unset(&live, ptr);
12298 /* Add the current uses to live.
12300 * It is safe to skip phi functions because they do
12301 * not have any block local uses, and the block
12302 * output sets already properly account for what
12303 * control flow depedent uses phi functions do have.
12305 if (ptr->op == OP_PHI) {
12308 expr = triple_rhs(state, ptr, 0);
12309 for(;expr; expr = triple_rhs(state, ptr, expr)) {
12310 /* If the triple is not a definition skip it. */
12311 if (!*expr || !triple_is_def(state, *expr)) {
12314 do_triple_set(&live, *expr, 0);
12318 for(entry = live; entry; entry = next) {
12319 next = entry->next;
12320 do_triple_unset(&live, entry->member);
12325 static int count_triples(struct compile_state *state)
12327 struct triple *first, *ins;
12329 first = state->first;
12334 } while (ins != first);
12339 struct dead_triple {
12340 struct triple *triple;
12341 struct dead_triple *work_next;
12342 struct block *block;
12345 #define TRIPLE_FLAG_ALIVE 1
12349 static void awaken(
12350 struct compile_state *state,
12351 struct dead_triple *dtriple, struct triple **expr,
12352 struct dead_triple ***work_list_tail)
12354 struct triple *triple;
12355 struct dead_triple *dt;
12363 if (triple->id <= 0) {
12364 internal_error(state, triple, "bad triple id: %d",
12367 if (triple->op == OP_NOOP) {
12368 internal_error(state, triple, "awakening noop?");
12371 dt = &dtriple[triple->id];
12372 if (!(dt->flags & TRIPLE_FLAG_ALIVE)) {
12373 dt->flags |= TRIPLE_FLAG_ALIVE;
12374 if (!dt->work_next) {
12375 **work_list_tail = dt;
12376 *work_list_tail = &dt->work_next;
12381 static void eliminate_inefectual_code(struct compile_state *state)
12383 struct block *block;
12384 struct dead_triple *dtriple, *work_list, **work_list_tail, *dt;
12386 struct triple *first, *final, *ins;
12388 /* Setup the work list */
12390 work_list_tail = &work_list;
12392 first = state->first;
12393 final = state->first->prev;
12395 /* Count how many triples I have */
12396 triples = count_triples(state);
12398 /* Now put then in an array and mark all of the triples dead */
12399 dtriple = xcmalloc(sizeof(*dtriple) * (triples + 1), "dtriples");
12405 dtriple[i].triple = ins;
12406 dtriple[i].block = block_of_triple(state, ins);
12407 dtriple[i].flags = 0;
12408 dtriple[i].old_id = ins->id;
12410 /* See if it is an operation we always keep */
12411 if (!triple_is_pure(state, ins, dtriple[i].old_id)) {
12412 awaken(state, dtriple, &ins, &work_list_tail);
12416 } while(ins != first);
12418 struct block *block;
12419 struct dead_triple *dt;
12420 struct block_set *user;
12421 struct triple **expr;
12423 work_list = dt->work_next;
12425 work_list_tail = &work_list;
12427 /* Make certain the block the current instruction is in lives */
12428 block = block_of_triple(state, dt->triple);
12429 awaken(state, dtriple, &block->first, &work_list_tail);
12430 if (triple_is_branch(state, block->last)) {
12431 awaken(state, dtriple, &block->last, &work_list_tail);
12434 /* Wake up the data depencencies of this triple */
12437 expr = triple_rhs(state, dt->triple, expr);
12438 awaken(state, dtriple, expr, &work_list_tail);
12441 expr = triple_lhs(state, dt->triple, expr);
12442 awaken(state, dtriple, expr, &work_list_tail);
12445 expr = triple_misc(state, dt->triple, expr);
12446 awaken(state, dtriple, expr, &work_list_tail);
12448 /* Wake up the forward control dependencies */
12450 expr = triple_targ(state, dt->triple, expr);
12451 awaken(state, dtriple, expr, &work_list_tail);
12453 /* Wake up the reverse control dependencies of this triple */
12454 for(user = dt->block->ipdomfrontier; user; user = user->next) {
12455 awaken(state, dtriple, &user->member->last, &work_list_tail);
12456 if ((user->member->left != state->last_block) &&
12457 !triple_is_cond_branch(state, user->member->last)) {
12458 internal_error(state, dt->triple,
12459 "conditional branch missing");
12463 for(dt = &dtriple[1]; dt <= &dtriple[triples]; dt++) {
12464 if ((dt->triple->op == OP_NOOP) &&
12465 (dt->flags & TRIPLE_FLAG_ALIVE)) {
12466 internal_error(state, dt->triple, "noop effective?");
12468 dt->triple->id = dt->old_id; /* Restore the color */
12469 if (!(dt->flags & TRIPLE_FLAG_ALIVE)) {
12470 release_triple(state, dt->triple);
12477 static void insert_mandatory_copies(struct compile_state *state)
12479 struct triple *ins, *first;
12481 /* The object is with a minimum of inserted copies,
12482 * to resolve in fundamental register conflicts between
12483 * register value producers and consumers.
12484 * Theoretically we may be greater than minimal when we
12485 * are inserting copies before instructions but that
12486 * case should be rare.
12488 first = state->first;
12491 struct triple_set *entry, *next;
12492 struct triple *tmp;
12493 struct reg_info info;
12494 unsigned reg, regcm;
12495 int do_post_copy, do_pre_copy;
12497 if (!triple_is_def(state, ins)) {
12500 /* Find the architecture specific color information */
12501 info = arch_reg_lhs(state, ins, 0);
12502 if (info.reg >= MAX_REGISTERS) {
12503 info.reg = REG_UNSET;
12507 regcm = arch_type_to_regcm(state, ins->type);
12508 do_post_copy = do_pre_copy = 0;
12510 /* Walk through the uses of ins and check for conflicts */
12511 for(entry = ins->use; entry; entry = next) {
12512 struct reg_info rinfo;
12514 next = entry->next;
12515 i = find_rhs_use(state, entry->member, ins);
12520 /* Find the users color requirements */
12521 rinfo = arch_reg_rhs(state, entry->member, i);
12522 if (rinfo.reg >= MAX_REGISTERS) {
12523 rinfo.reg = REG_UNSET;
12526 /* See if I need a pre_copy */
12527 if (rinfo.reg != REG_UNSET) {
12528 if ((reg != REG_UNSET) && (reg != rinfo.reg)) {
12533 regcm &= rinfo.regcm;
12534 regcm = arch_regcm_normalize(state, regcm);
12538 /* Always use pre_copies for constants.
12539 * They do not take up any registers until a
12540 * copy places them in one.
12542 if ((info.reg == REG_UNNEEDED) &&
12543 (rinfo.reg != REG_UNNEEDED)) {
12549 (((info.reg != REG_UNSET) &&
12550 (reg != REG_UNSET) &&
12551 (info.reg != reg)) ||
12552 ((info.regcm & regcm) == 0));
12555 regcm = info.regcm;
12556 /* Walk through the uses of ins and do a pre_copy or see if a post_copy is warranted */
12557 for(entry = ins->use; entry; entry = next) {
12558 struct reg_info rinfo;
12560 next = entry->next;
12561 i = find_rhs_use(state, entry->member, ins);
12566 /* Find the users color requirements */
12567 rinfo = arch_reg_rhs(state, entry->member, i);
12568 if (rinfo.reg >= MAX_REGISTERS) {
12569 rinfo.reg = REG_UNSET;
12572 /* Now see if it is time to do the pre_copy */
12573 if (rinfo.reg != REG_UNSET) {
12574 if (((reg != REG_UNSET) && (reg != rinfo.reg)) ||
12575 ((regcm & rinfo.regcm) == 0) ||
12576 /* Don't let a mandatory coalesce sneak
12577 * into a operation that is marked to prevent
12580 ((reg != REG_UNNEEDED) &&
12581 ((ins->id & TRIPLE_FLAG_POST_SPLIT) ||
12582 (entry->member->id & TRIPLE_FLAG_PRE_SPLIT)))
12585 struct triple *user;
12586 user = entry->member;
12587 if (RHS(user, i) != ins) {
12588 internal_error(state, user, "bad rhs");
12590 tmp = pre_copy(state, user, i);
12591 tmp->id |= TRIPLE_FLAG_PRE_SPLIT;
12599 if ((regcm & rinfo.regcm) == 0) {
12601 struct triple *user;
12602 user = entry->member;
12603 if (RHS(user, i) != ins) {
12604 internal_error(state, user, "bad rhs");
12606 tmp = pre_copy(state, user, i);
12607 tmp->id |= TRIPLE_FLAG_PRE_SPLIT;
12613 regcm &= rinfo.regcm;
12616 if (do_post_copy) {
12617 struct reg_info pre, post;
12618 tmp = post_copy(state, ins);
12619 tmp->id |= TRIPLE_FLAG_PRE_SPLIT;
12620 pre = arch_reg_lhs(state, ins, 0);
12621 post = arch_reg_lhs(state, tmp, 0);
12622 if ((pre.reg == post.reg) && (pre.regcm == post.regcm)) {
12623 internal_error(state, tmp, "useless copy");
12628 } while(ins != first);
12632 struct live_range_edge;
12633 struct live_range_def;
12634 struct live_range {
12635 struct live_range_edge *edges;
12636 struct live_range_def *defs;
12637 /* Note. The list pointed to by defs is kept in order.
12638 * That is baring splits in the flow control
12639 * defs dominates defs->next wich dominates defs->next->next
12646 struct live_range *group_next, **group_prev;
12649 struct live_range_edge {
12650 struct live_range_edge *next;
12651 struct live_range *node;
12654 struct live_range_def {
12655 struct live_range_def *next;
12656 struct live_range_def *prev;
12657 struct live_range *lr;
12658 struct triple *def;
12662 #define LRE_HASH_SIZE 2048
12664 struct lre_hash *next;
12665 struct live_range *left;
12666 struct live_range *right;
12671 struct lre_hash *hash[LRE_HASH_SIZE];
12672 struct reg_block *blocks;
12673 struct live_range_def *lrd;
12674 struct live_range *lr;
12675 struct live_range *low, **low_tail;
12676 struct live_range *high, **high_tail;
12679 int passes, max_passes;
12680 #define MAX_ALLOCATION_PASSES 100
12685 struct print_interference_block_info {
12686 struct reg_state *rstate;
12690 static void print_interference_block(
12691 struct compile_state *state, struct block *block, void *arg)
12694 struct print_interference_block_info *info = arg;
12695 struct reg_state *rstate = info->rstate;
12696 FILE *fp = info->fp;
12697 struct reg_block *rb;
12698 struct triple *ptr;
12701 rb = &rstate->blocks[block->vertex];
12703 fprintf(fp, "\nblock: %p (%d), %p<-%p %p<-%p\n",
12707 block->left && block->left->use?block->left->use->member : 0,
12709 block->right && block->right->use?block->right->use->member : 0);
12711 struct triple_reg_set *in_set;
12712 fprintf(fp, " in:");
12713 for(in_set = rb->in; in_set; in_set = in_set->next) {
12714 fprintf(fp, " %-10p", in_set->member);
12719 for(done = 0, ptr = block->first; !done; ptr = ptr->next) {
12720 done = (ptr == block->last);
12721 if (ptr->op == OP_PHI) {
12728 for(edge = 0; edge < block->users; edge++) {
12729 fprintf(fp, " in(%d):", edge);
12730 for(done = 0, ptr = block->first; !done; ptr = ptr->next) {
12731 struct triple **slot;
12732 done = (ptr == block->last);
12733 if (ptr->op != OP_PHI) {
12736 slot = &RHS(ptr, 0);
12737 fprintf(fp, " %-10p", slot[edge]);
12742 if (block->first->op == OP_LABEL) {
12743 fprintf(fp, "%p:\n", block->first);
12745 for(done = 0, ptr = block->first; !done; ptr = ptr->next) {
12746 struct live_range *lr;
12750 done = (ptr == block->last);
12751 lr = rstate->lrd[ptr->id].lr;
12754 ptr->id = rstate->lrd[id].orig_id;
12755 SET_REG(ptr->id, lr->color);
12756 display_triple(fp, ptr);
12759 if (triple_is_def(state, ptr) && (lr->defs == 0)) {
12760 internal_error(state, ptr, "lr has no defs!");
12762 if (info->need_edges) {
12764 struct live_range_def *lrd;
12765 fprintf(fp, " range:");
12768 fprintf(fp, " %-10p", lrd->def);
12770 } while(lrd != lr->defs);
12773 if (lr->edges > 0) {
12774 struct live_range_edge *edge;
12775 fprintf(fp, " edges:");
12776 for(edge = lr->edges; edge; edge = edge->next) {
12777 struct live_range_def *lrd;
12778 lrd = edge->node->defs;
12780 fprintf(fp, " %-10p", lrd->def);
12782 } while(lrd != edge->node->defs);
12788 /* Do a bunch of sanity checks */
12789 valid_ins(state, ptr);
12790 if ((ptr->id < 0) || (ptr->id > rstate->defs)) {
12791 internal_error(state, ptr, "Invalid triple id: %d",
12796 struct triple_reg_set *out_set;
12797 fprintf(fp, " out:");
12798 for(out_set = rb->out; out_set; out_set = out_set->next) {
12799 fprintf(fp, " %-10p", out_set->member);
12806 static void print_interference_blocks(
12807 struct compile_state *state, struct reg_state *rstate, FILE *fp, int need_edges)
12809 struct print_interference_block_info info;
12810 info.rstate = rstate;
12812 info.need_edges = need_edges;
12813 fprintf(fp, "\nlive variables by block\n");
12814 walk_blocks(state, print_interference_block, &info);
12818 static unsigned regc_max_size(struct compile_state *state, int classes)
12823 for(i = 0; i < MAX_REGC; i++) {
12824 if (classes & (1 << i)) {
12826 size = arch_regc_size(state, i);
12827 if (size > max_size) {
12835 static int reg_is_reg(struct compile_state *state, int reg1, int reg2)
12837 unsigned equivs[MAX_REG_EQUIVS];
12839 if ((reg1 < 0) || (reg1 >= MAX_REGISTERS)) {
12840 internal_error(state, 0, "invalid register");
12842 if ((reg2 < 0) || (reg2 >= MAX_REGISTERS)) {
12843 internal_error(state, 0, "invalid register");
12845 arch_reg_equivs(state, equivs, reg1);
12846 for(i = 0; (i < MAX_REG_EQUIVS) && equivs[i] != REG_UNSET; i++) {
12847 if (equivs[i] == reg2) {
12854 static void reg_fill_used(struct compile_state *state, char *used, int reg)
12856 unsigned equivs[MAX_REG_EQUIVS];
12858 if (reg == REG_UNNEEDED) {
12861 arch_reg_equivs(state, equivs, reg);
12862 for(i = 0; (i < MAX_REG_EQUIVS) && equivs[i] != REG_UNSET; i++) {
12863 used[equivs[i]] = 1;
12868 static void reg_inc_used(struct compile_state *state, char *used, int reg)
12870 unsigned equivs[MAX_REG_EQUIVS];
12872 if (reg == REG_UNNEEDED) {
12875 arch_reg_equivs(state, equivs, reg);
12876 for(i = 0; (i < MAX_REG_EQUIVS) && equivs[i] != REG_UNSET; i++) {
12877 used[equivs[i]] += 1;
12882 static unsigned int hash_live_edge(
12883 struct live_range *left, struct live_range *right)
12885 unsigned int hash, val;
12886 unsigned long lval, rval;
12887 lval = ((unsigned long)left)/sizeof(struct live_range);
12888 rval = ((unsigned long)right)/sizeof(struct live_range);
12893 hash = (hash *263) + val;
12898 hash = (hash *263) + val;
12900 hash = hash & (LRE_HASH_SIZE - 1);
12904 static struct lre_hash **lre_probe(struct reg_state *rstate,
12905 struct live_range *left, struct live_range *right)
12907 struct lre_hash **ptr;
12908 unsigned int index;
12909 /* Ensure left <= right */
12910 if (left > right) {
12911 struct live_range *tmp;
12916 index = hash_live_edge(left, right);
12918 ptr = &rstate->hash[index];
12920 if (((*ptr)->left == left) && ((*ptr)->right == right)) {
12923 ptr = &(*ptr)->next;
12928 static int interfere(struct reg_state *rstate,
12929 struct live_range *left, struct live_range *right)
12931 struct lre_hash **ptr;
12932 ptr = lre_probe(rstate, left, right);
12933 return ptr && *ptr;
12936 static void add_live_edge(struct reg_state *rstate,
12937 struct live_range *left, struct live_range *right)
12939 /* FIXME the memory allocation overhead is noticeable here... */
12940 struct lre_hash **ptr, *new_hash;
12941 struct live_range_edge *edge;
12943 if (left == right) {
12946 if ((left == &rstate->lr[0]) || (right == &rstate->lr[0])) {
12949 /* Ensure left <= right */
12950 if (left > right) {
12951 struct live_range *tmp;
12956 ptr = lre_probe(rstate, left, right);
12961 fprintf(stderr, "new_live_edge(%p, %p)\n",
12964 new_hash = xmalloc(sizeof(*new_hash), "lre_hash");
12965 new_hash->next = *ptr;
12966 new_hash->left = left;
12967 new_hash->right = right;
12970 edge = xmalloc(sizeof(*edge), "live_range_edge");
12971 edge->next = left->edges;
12972 edge->node = right;
12973 left->edges = edge;
12976 edge = xmalloc(sizeof(*edge), "live_range_edge");
12977 edge->next = right->edges;
12979 right->edges = edge;
12980 right->degree += 1;
12983 static void remove_live_edge(struct reg_state *rstate,
12984 struct live_range *left, struct live_range *right)
12986 struct live_range_edge *edge, **ptr;
12987 struct lre_hash **hptr, *entry;
12988 hptr = lre_probe(rstate, left, right);
12989 if (!hptr || !*hptr) {
12993 *hptr = entry->next;
12996 for(ptr = &left->edges; *ptr; ptr = &(*ptr)->next) {
12998 if (edge->node == right) {
13000 memset(edge, 0, sizeof(*edge));
13006 for(ptr = &right->edges; *ptr; ptr = &(*ptr)->next) {
13008 if (edge->node == left) {
13010 memset(edge, 0, sizeof(*edge));
13018 static void remove_live_edges(struct reg_state *rstate, struct live_range *range)
13020 struct live_range_edge *edge, *next;
13021 for(edge = range->edges; edge; edge = next) {
13023 remove_live_edge(rstate, range, edge->node);
13027 static void transfer_live_edges(struct reg_state *rstate,
13028 struct live_range *dest, struct live_range *src)
13030 struct live_range_edge *edge, *next;
13031 for(edge = src->edges; edge; edge = next) {
13032 struct live_range *other;
13034 other = edge->node;
13035 remove_live_edge(rstate, src, other);
13036 add_live_edge(rstate, dest, other);
13041 /* Interference graph...
13043 * new(n) --- Return a graph with n nodes but no edges.
13044 * add(g,x,y) --- Return a graph including g with an between x and y
13045 * interfere(g, x, y) --- Return true if there exists an edge between the nodes
13046 * x and y in the graph g
13047 * degree(g, x) --- Return the degree of the node x in the graph g
13048 * neighbors(g, x, f) --- Apply function f to each neighbor of node x in the graph g
13050 * Implement with a hash table && a set of adjcency vectors.
13051 * The hash table supports constant time implementations of add and interfere.
13052 * The adjacency vectors support an efficient implementation of neighbors.
13056 * +---------------------------------------------------+
13057 * | +--------------+ |
13059 * renumber -> build graph -> colalesce -> spill_costs -> simplify -> select
13061 * -- In simplify implment optimistic coloring... (No backtracking)
13062 * -- Implement Rematerialization it is the only form of spilling we can perform
13063 * Essentially this means dropping a constant from a register because
13064 * we can regenerate it later.
13066 * --- Very conservative colalescing (don't colalesce just mark the opportunities)
13067 * coalesce at phi points...
13068 * --- Bias coloring if at all possible do the coalesing a compile time.
13073 static void different_colored(
13074 struct compile_state *state, struct reg_state *rstate,
13075 struct triple *parent, struct triple *ins)
13077 struct live_range *lr;
13078 struct triple **expr;
13079 lr = rstate->lrd[ins->id].lr;
13080 expr = triple_rhs(state, ins, 0);
13081 for(;expr; expr = triple_rhs(state, ins, expr)) {
13082 struct live_range *lr2;
13083 if (!*expr || (*expr == parent) || (*expr == ins)) {
13086 lr2 = rstate->lrd[(*expr)->id].lr;
13087 if (lr->color == lr2->color) {
13088 internal_error(state, ins, "live range too big");
13094 static struct live_range *coalesce_ranges(
13095 struct compile_state *state, struct reg_state *rstate,
13096 struct live_range *lr1, struct live_range *lr2)
13098 struct live_range_def *head, *mid1, *mid2, *end, *lrd;
13104 if (!lr1->defs || !lr2->defs) {
13105 internal_error(state, 0,
13106 "cannot coalese dead live ranges");
13108 if ((lr1->color == REG_UNNEEDED) ||
13109 (lr2->color == REG_UNNEEDED)) {
13110 internal_error(state, 0,
13111 "cannot coalesce live ranges without a possible color");
13113 if ((lr1->color != lr2->color) &&
13114 (lr1->color != REG_UNSET) &&
13115 (lr2->color != REG_UNSET)) {
13116 internal_error(state, lr1->defs->def,
13117 "cannot coalesce live ranges of different colors");
13119 color = lr1->color;
13120 if (color == REG_UNSET) {
13121 color = lr2->color;
13123 classes = lr1->classes & lr2->classes;
13125 internal_error(state, lr1->defs->def,
13126 "cannot coalesce live ranges with dissimilar register classes");
13128 #if DEBUG_COALESCING
13129 fprintf(stderr, "coalescing:");
13132 fprintf(stderr, " %p", lrd->def);
13134 } while(lrd != lr1->defs);
13135 fprintf(stderr, " |");
13138 fprintf(stderr, " %p", lrd->def);
13140 } while(lrd != lr2->defs);
13141 fprintf(stderr, "\n");
13143 /* If there is a clear dominate live range put it in lr1,
13144 * For purposes of this test phi functions are
13145 * considered dominated by the definitions that feed into
13148 if ((lr1->defs->prev->def->op == OP_PHI) ||
13149 ((lr2->defs->prev->def->op != OP_PHI) &&
13150 tdominates(state, lr2->defs->def, lr1->defs->def))) {
13151 struct live_range *tmp;
13157 if (lr1->defs->orig_id & TRIPLE_FLAG_POST_SPLIT) {
13158 fprintf(stderr, "lr1 post\n");
13160 if (lr1->defs->orig_id & TRIPLE_FLAG_PRE_SPLIT) {
13161 fprintf(stderr, "lr1 pre\n");
13163 if (lr2->defs->orig_id & TRIPLE_FLAG_POST_SPLIT) {
13164 fprintf(stderr, "lr2 post\n");
13166 if (lr2->defs->orig_id & TRIPLE_FLAG_PRE_SPLIT) {
13167 fprintf(stderr, "lr2 pre\n");
13171 fprintf(stderr, "coalesce color1(%p): %3d color2(%p) %3d\n",
13178 /* Append lr2 onto lr1 */
13179 #warning "FIXME should this be a merge instead of a splice?"
13180 /* This FIXME item applies to the correctness of live_range_end
13181 * and to the necessity of making multiple passes of coalesce_live_ranges.
13182 * A failure to find some coalesce opportunities in coaleace_live_ranges
13183 * does not impact the correct of the compiler just the efficiency with
13184 * which registers are allocated.
13187 mid1 = lr1->defs->prev;
13189 end = lr2->defs->prev;
13197 /* Fixup the live range in the added live range defs */
13202 } while(lrd != head);
13204 /* Mark lr2 as free. */
13206 lr2->color = REG_UNNEEDED;
13210 internal_error(state, 0, "lr1->defs == 0 ?");
13213 lr1->color = color;
13214 lr1->classes = classes;
13216 /* Keep the graph in sync by transfering the edges from lr2 to lr1 */
13217 transfer_live_edges(rstate, lr1, lr2);
13222 static struct live_range_def *live_range_head(
13223 struct compile_state *state, struct live_range *lr,
13224 struct live_range_def *last)
13226 struct live_range_def *result;
13231 else if (!tdominates(state, lr->defs->def, last->next->def)) {
13232 result = last->next;
13237 static struct live_range_def *live_range_end(
13238 struct compile_state *state, struct live_range *lr,
13239 struct live_range_def *last)
13241 struct live_range_def *result;
13244 result = lr->defs->prev;
13246 else if (!tdominates(state, last->prev->def, lr->defs->prev->def)) {
13247 result = last->prev;
13253 static void initialize_live_ranges(
13254 struct compile_state *state, struct reg_state *rstate)
13256 struct triple *ins, *first;
13257 size_t count, size;
13260 first = state->first;
13261 /* First count how many instructions I have.
13263 count = count_triples(state);
13264 /* Potentially I need one live range definitions for each
13267 rstate->defs = count;
13268 /* Potentially I need one live range for each instruction
13269 * plus an extra for the dummy live range.
13271 rstate->ranges = count + 1;
13272 size = sizeof(rstate->lrd[0]) * rstate->defs;
13273 rstate->lrd = xcmalloc(size, "live_range_def");
13274 size = sizeof(rstate->lr[0]) * rstate->ranges;
13275 rstate->lr = xcmalloc(size, "live_range");
13277 /* Setup the dummy live range */
13278 rstate->lr[0].classes = 0;
13279 rstate->lr[0].color = REG_UNSET;
13280 rstate->lr[0].defs = 0;
13284 /* If the triple is a variable give it a live range */
13285 if (triple_is_def(state, ins)) {
13286 struct reg_info info;
13287 /* Find the architecture specific color information */
13288 info = find_def_color(state, ins);
13290 rstate->lr[i].defs = &rstate->lrd[j];
13291 rstate->lr[i].color = info.reg;
13292 rstate->lr[i].classes = info.regcm;
13293 rstate->lr[i].degree = 0;
13294 rstate->lrd[j].lr = &rstate->lr[i];
13296 /* Otherwise give the triple the dummy live range. */
13298 rstate->lrd[j].lr = &rstate->lr[0];
13301 /* Initalize the live_range_def */
13302 rstate->lrd[j].next = &rstate->lrd[j];
13303 rstate->lrd[j].prev = &rstate->lrd[j];
13304 rstate->lrd[j].def = ins;
13305 rstate->lrd[j].orig_id = ins->id;
13310 } while(ins != first);
13311 rstate->ranges = i;
13313 /* Make a second pass to handle achitecture specific register
13318 int zlhs, zrhs, i, j;
13319 if (ins->id > rstate->defs) {
13320 internal_error(state, ins, "bad id");
13323 /* Walk through the template of ins and coalesce live ranges */
13324 zlhs = TRIPLE_LHS(ins->sizes);
13325 if ((zlhs == 0) && triple_is_def(state, ins)) {
13328 zrhs = TRIPLE_RHS(ins->sizes);
13330 #if DEBUG_COALESCING > 1
13331 fprintf(stderr, "mandatory coalesce: %p %d %d\n",
13334 for(i = 0; i < zlhs; i++) {
13335 struct reg_info linfo;
13336 struct live_range_def *lhs;
13337 linfo = arch_reg_lhs(state, ins, i);
13338 if (linfo.reg < MAX_REGISTERS) {
13341 if (triple_is_def(state, ins)) {
13342 lhs = &rstate->lrd[ins->id];
13344 lhs = &rstate->lrd[LHS(ins, i)->id];
13346 #if DEBUG_COALESCING > 1
13347 fprintf(stderr, "coalesce lhs(%d): %p %d\n",
13348 i, lhs, linfo.reg);
13351 for(j = 0; j < zrhs; j++) {
13352 struct reg_info rinfo;
13353 struct live_range_def *rhs;
13354 rinfo = arch_reg_rhs(state, ins, j);
13355 if (rinfo.reg < MAX_REGISTERS) {
13358 rhs = &rstate->lrd[RHS(ins, j)->id];
13359 #if DEBUG_COALESCING > 1
13360 fprintf(stderr, "coalesce rhs(%d): %p %d\n",
13361 j, rhs, rinfo.reg);
13364 if (rinfo.reg == linfo.reg) {
13365 coalesce_ranges(state, rstate,
13371 } while(ins != first);
13374 static void graph_ins(
13375 struct compile_state *state,
13376 struct reg_block *blocks, struct triple_reg_set *live,
13377 struct reg_block *rb, struct triple *ins, void *arg)
13379 struct reg_state *rstate = arg;
13380 struct live_range *def;
13381 struct triple_reg_set *entry;
13383 /* If the triple is not a definition
13384 * we do not have a definition to add to
13385 * the interference graph.
13387 if (!triple_is_def(state, ins)) {
13390 def = rstate->lrd[ins->id].lr;
13392 /* Create an edge between ins and everything that is
13393 * alive, unless the live_range cannot share
13394 * a physical register with ins.
13396 for(entry = live; entry; entry = entry->next) {
13397 struct live_range *lr;
13398 if ((entry->member->id < 0) || (entry->member->id > rstate->defs)) {
13399 internal_error(state, 0, "bad entry?");
13401 lr = rstate->lrd[entry->member->id].lr;
13405 if (!arch_regcm_intersect(def->classes, lr->classes)) {
13408 add_live_edge(rstate, def, lr);
13413 static struct live_range *get_verify_live_range(
13414 struct compile_state *state, struct reg_state *rstate, struct triple *ins)
13416 struct live_range *lr;
13417 struct live_range_def *lrd;
13419 if ((ins->id < 0) || (ins->id > rstate->defs)) {
13420 internal_error(state, ins, "bad ins?");
13422 lr = rstate->lrd[ins->id].lr;
13426 if (lrd->def == ins) {
13430 } while(lrd != lr->defs);
13432 internal_error(state, ins, "ins not in live range");
13437 static void verify_graph_ins(
13438 struct compile_state *state,
13439 struct reg_block *blocks, struct triple_reg_set *live,
13440 struct reg_block *rb, struct triple *ins, void *arg)
13442 struct reg_state *rstate = arg;
13443 struct triple_reg_set *entry1, *entry2;
13446 /* Compare live against edges and make certain the code is working */
13447 for(entry1 = live; entry1; entry1 = entry1->next) {
13448 struct live_range *lr1;
13449 lr1 = get_verify_live_range(state, rstate, entry1->member);
13450 for(entry2 = live; entry2; entry2 = entry2->next) {
13451 struct live_range *lr2;
13452 struct live_range_edge *edge2;
13455 if (entry2 == entry1) {
13458 lr2 = get_verify_live_range(state, rstate, entry2->member);
13460 internal_error(state, entry2->member,
13461 "live range with 2 values simultaneously alive");
13463 if (!arch_regcm_intersect(lr1->classes, lr2->classes)) {
13466 if (!interfere(rstate, lr1, lr2)) {
13467 internal_error(state, entry2->member,
13468 "edges don't interfere?");
13473 for(edge2 = lr2->edges; edge2; edge2 = edge2->next) {
13475 if (edge2->node == lr1) {
13479 if (lr2_degree != lr2->degree) {
13480 internal_error(state, entry2->member,
13481 "computed degree: %d does not match reported degree: %d\n",
13482 lr2_degree, lr2->degree);
13485 internal_error(state, entry2->member, "missing edge");
13493 static void print_interference_ins(
13494 struct compile_state *state,
13495 struct reg_block *blocks, struct triple_reg_set *live,
13496 struct reg_block *rb, struct triple *ins, void *arg)
13498 struct reg_state *rstate = arg;
13499 struct live_range *lr;
13502 lr = rstate->lrd[ins->id].lr;
13504 ins->id = rstate->lrd[id].orig_id;
13505 SET_REG(ins->id, lr->color);
13506 display_triple(stdout, ins);
13510 struct live_range_def *lrd;
13514 printf(" %-10p", lrd->def);
13516 } while(lrd != lr->defs);
13520 struct triple_reg_set *entry;
13522 for(entry = live; entry; entry = entry->next) {
13523 printf(" %-10p", entry->member);
13528 struct live_range_edge *entry;
13530 for(entry = lr->edges; entry; entry = entry->next) {
13531 struct live_range_def *lrd;
13532 lrd = entry->node->defs;
13534 printf(" %-10p", lrd->def);
13536 } while(lrd != entry->node->defs);
13541 if (triple_is_branch(state, ins)) {
13547 static int coalesce_live_ranges(
13548 struct compile_state *state, struct reg_state *rstate)
13550 /* At the point where a value is moved from one
13551 * register to another that value requires two
13552 * registers, thus increasing register pressure.
13553 * Live range coaleescing reduces the register
13554 * pressure by keeping a value in one register
13557 * In the case of a phi function all paths leading
13558 * into it must be allocated to the same register
13559 * otherwise the phi function may not be removed.
13561 * Forcing a value to stay in a single register
13562 * for an extended period of time does have
13563 * limitations when applied to non homogenous
13566 * The two cases I have identified are:
13567 * 1) Two forced register assignments may
13569 * 2) Registers may go unused because they
13570 * are only good for storing the value
13571 * and not manipulating it.
13573 * Because of this I need to split live ranges,
13574 * even outside of the context of coalesced live
13575 * ranges. The need to split live ranges does
13576 * impose some constraints on live range coalescing.
13578 * - Live ranges may not be coalesced across phi
13579 * functions. This creates a 2 headed live
13580 * range that cannot be sanely split.
13582 * - phi functions (coalesced in initialize_live_ranges)
13583 * are handled as pre split live ranges so we will
13584 * never attempt to split them.
13590 for(i = 0; i <= rstate->ranges; i++) {
13591 struct live_range *lr1;
13592 struct live_range_def *lrd1;
13593 lr1 = &rstate->lr[i];
13597 lrd1 = live_range_end(state, lr1, 0);
13598 for(; lrd1; lrd1 = live_range_end(state, lr1, lrd1)) {
13599 struct triple_set *set;
13600 if (lrd1->def->op != OP_COPY) {
13603 /* Skip copies that are the result of a live range split. */
13604 if (lrd1->orig_id & TRIPLE_FLAG_POST_SPLIT) {
13607 for(set = lrd1->def->use; set; set = set->next) {
13608 struct live_range_def *lrd2;
13609 struct live_range *lr2, *res;
13611 lrd2 = &rstate->lrd[set->member->id];
13613 /* Don't coalesce with instructions
13614 * that are the result of a live range
13617 if (lrd2->orig_id & TRIPLE_FLAG_PRE_SPLIT) {
13620 lr2 = rstate->lrd[set->member->id].lr;
13624 if ((lr1->color != lr2->color) &&
13625 (lr1->color != REG_UNSET) &&
13626 (lr2->color != REG_UNSET)) {
13629 if ((lr1->classes & lr2->classes) == 0) {
13633 if (interfere(rstate, lr1, lr2)) {
13637 res = coalesce_ranges(state, rstate, lr1, lr2);
13651 static void fix_coalesce_conflicts(struct compile_state *state,
13652 struct reg_block *blocks, struct triple_reg_set *live,
13653 struct reg_block *rb, struct triple *ins, void *arg)
13655 int *conflicts = arg;
13656 int zlhs, zrhs, i, j;
13658 /* See if we have a mandatory coalesce operation between
13659 * a lhs and a rhs value. If so and the rhs value is also
13660 * alive then this triple needs to be pre copied. Otherwise
13661 * we would have two definitions in the same live range simultaneously
13664 zlhs = TRIPLE_LHS(ins->sizes);
13665 if ((zlhs == 0) && triple_is_def(state, ins)) {
13668 zrhs = TRIPLE_RHS(ins->sizes);
13669 for(i = 0; i < zlhs; i++) {
13670 struct reg_info linfo;
13671 linfo = arch_reg_lhs(state, ins, i);
13672 if (linfo.reg < MAX_REGISTERS) {
13675 for(j = 0; j < zrhs; j++) {
13676 struct reg_info rinfo;
13677 struct triple *rhs;
13678 struct triple_reg_set *set;
13681 rinfo = arch_reg_rhs(state, ins, j);
13682 if (rinfo.reg != linfo.reg) {
13686 for(set = live; set && !found; set = set->next) {
13687 if (set->member == rhs) {
13692 struct triple *copy;
13693 copy = pre_copy(state, ins, j);
13694 copy->id |= TRIPLE_FLAG_PRE_SPLIT;
13702 static int correct_coalesce_conflicts(
13703 struct compile_state *state, struct reg_block *blocks)
13707 walk_variable_lifetimes(state, blocks, fix_coalesce_conflicts, &conflicts);
13711 static void replace_set_use(struct compile_state *state,
13712 struct triple_reg_set *head, struct triple *orig, struct triple *new)
13714 struct triple_reg_set *set;
13715 for(set = head; set; set = set->next) {
13716 if (set->member == orig) {
13722 static void replace_block_use(struct compile_state *state,
13723 struct reg_block *blocks, struct triple *orig, struct triple *new)
13726 #warning "WISHLIST visit just those blocks that need it *"
13727 for(i = 1; i <= state->last_vertex; i++) {
13728 struct reg_block *rb;
13730 replace_set_use(state, rb->in, orig, new);
13731 replace_set_use(state, rb->out, orig, new);
13735 static void color_instructions(struct compile_state *state)
13737 struct triple *ins, *first;
13738 first = state->first;
13741 if (triple_is_def(state, ins)) {
13742 struct reg_info info;
13743 info = find_lhs_color(state, ins, 0);
13744 if (info.reg >= MAX_REGISTERS) {
13745 info.reg = REG_UNSET;
13747 SET_INFO(ins->id, info);
13750 } while(ins != first);
13753 static struct reg_info read_lhs_color(
13754 struct compile_state *state, struct triple *ins, int index)
13756 struct reg_info info;
13757 if ((index == 0) && triple_is_def(state, ins)) {
13758 info.reg = ID_REG(ins->id);
13759 info.regcm = ID_REGCM(ins->id);
13761 else if (index < TRIPLE_LHS(ins->sizes)) {
13762 info = read_lhs_color(state, LHS(ins, index), 0);
13765 internal_error(state, ins, "Bad lhs %d", index);
13766 info.reg = REG_UNSET;
13772 static struct triple *resolve_tangle(
13773 struct compile_state *state, struct triple *tangle)
13775 struct reg_info info, uinfo;
13776 struct triple_set *set, *next;
13777 struct triple *copy;
13779 #warning "WISHLIST recalculate all affected instructions colors"
13780 info = find_lhs_color(state, tangle, 0);
13781 for(set = tangle->use; set; set = next) {
13782 struct triple *user;
13785 user = set->member;
13786 zrhs = TRIPLE_RHS(user->sizes);
13787 for(i = 0; i < zrhs; i++) {
13788 if (RHS(user, i) != tangle) {
13791 uinfo = find_rhs_post_color(state, user, i);
13792 if (uinfo.reg == info.reg) {
13793 copy = pre_copy(state, user, i);
13794 copy->id |= TRIPLE_FLAG_PRE_SPLIT;
13795 SET_INFO(copy->id, uinfo);
13800 uinfo = find_lhs_pre_color(state, tangle, 0);
13801 if (uinfo.reg == info.reg) {
13802 struct reg_info linfo;
13803 copy = post_copy(state, tangle);
13804 copy->id |= TRIPLE_FLAG_PRE_SPLIT;
13805 linfo = find_lhs_color(state, copy, 0);
13806 SET_INFO(copy->id, linfo);
13808 info = find_lhs_color(state, tangle, 0);
13809 SET_INFO(tangle->id, info);
13815 static void fix_tangles(struct compile_state *state,
13816 struct reg_block *blocks, struct triple_reg_set *live,
13817 struct reg_block *rb, struct triple *ins, void *arg)
13819 int *tangles = arg;
13820 struct triple *tangle;
13822 char used[MAX_REGISTERS];
13823 struct triple_reg_set *set;
13826 /* Find out which registers have multiple uses at this point */
13827 memset(used, 0, sizeof(used));
13828 for(set = live; set; set = set->next) {
13829 struct reg_info info;
13830 info = read_lhs_color(state, set->member, 0);
13831 if (info.reg == REG_UNSET) {
13834 reg_inc_used(state, used, info.reg);
13837 /* Now find the least dominated definition of a register in
13838 * conflict I have seen so far.
13840 for(set = live; set; set = set->next) {
13841 struct reg_info info;
13842 info = read_lhs_color(state, set->member, 0);
13843 if (used[info.reg] < 2) {
13846 /* Changing copies that feed into phi functions
13849 if (set->member->use &&
13850 (set->member->use->member->op == OP_PHI)) {
13853 if (!tangle || tdominates(state, set->member, tangle)) {
13854 tangle = set->member;
13857 /* If I have found a tangle resolve it */
13859 struct triple *post_copy;
13861 post_copy = resolve_tangle(state, tangle);
13863 replace_block_use(state, blocks, tangle, post_copy);
13865 if (post_copy && (tangle != ins)) {
13866 replace_set_use(state, live, tangle, post_copy);
13873 static int correct_tangles(
13874 struct compile_state *state, struct reg_block *blocks)
13878 color_instructions(state);
13879 walk_variable_lifetimes(state, blocks, fix_tangles, &tangles);
13884 static void ids_from_rstate(struct compile_state *state, struct reg_state *rstate);
13885 static void cleanup_rstate(struct compile_state *state, struct reg_state *rstate);
13887 struct triple *find_constrained_def(
13888 struct compile_state *state, struct live_range *range, struct triple *constrained)
13890 struct live_range_def *lrd;
13893 struct reg_info info;
13895 int is_constrained;
13896 regcm = arch_type_to_regcm(state, lrd->def->type);
13897 info = find_lhs_color(state, lrd->def, 0);
13898 regcm = arch_regcm_reg_normalize(state, regcm);
13899 info.regcm = arch_regcm_reg_normalize(state, info.regcm);
13900 /* If the 2 register class masks are not equal the
13901 * the current register class is constrained.
13903 is_constrained = regcm != info.regcm;
13905 /* Of the constrained live ranges deal with the
13906 * least dominated one first.
13908 if (is_constrained) {
13909 #if DEBUG_RANGE_CONFLICTS
13910 fprintf(stderr, "canidate: %p %-8s regcm: %x %x\n",
13911 lrd->def, tops(lrd->def->op), regcm, info.regcm);
13913 if (!constrained ||
13914 tdominates(state, lrd->def, constrained))
13916 constrained = lrd->def;
13920 } while(lrd != range->defs);
13921 return constrained;
13924 static int split_constrained_ranges(
13925 struct compile_state *state, struct reg_state *rstate,
13926 struct live_range *range)
13928 /* Walk through the edges in conflict and our current live
13929 * range, and find definitions that are more severly constrained
13930 * than they type of data they contain require.
13932 * Then pick one of those ranges and relax the constraints.
13934 struct live_range_edge *edge;
13935 struct triple *constrained;
13938 for(edge = range->edges; edge; edge = edge->next) {
13939 constrained = find_constrained_def(state, edge->node, constrained);
13941 if (!constrained) {
13942 constrained = find_constrained_def(state, range, constrained);
13944 #if DEBUG_RANGE_CONFLICTS
13945 fprintf(stderr, "constrained: %p %-8s\n",
13946 constrained, tops(constrained->op));
13949 ids_from_rstate(state, rstate);
13950 cleanup_rstate(state, rstate);
13951 resolve_tangle(state, constrained);
13953 return !!constrained;
13956 static int split_ranges(
13957 struct compile_state *state, struct reg_state *rstate,
13958 char *used, struct live_range *range)
13961 #if DEBUG_RANGE_CONFLICTS
13962 fprintf(stderr, "split_ranges %d %s %p\n",
13963 rstate->passes, tops(range->defs->def->op), range->defs->def);
13965 if ((range->color == REG_UNNEEDED) ||
13966 (rstate->passes >= rstate->max_passes)) {
13969 split = split_constrained_ranges(state, rstate, range);
13971 /* Ideally I would split the live range that will not be used
13972 * for the longest period of time in hopes that this will
13973 * (a) allow me to spill a register or
13974 * (b) allow me to place a value in another register.
13976 * So far I don't have a test case for this, the resolving
13977 * of mandatory constraints has solved all of my
13978 * know issues. So I have choosen not to write any
13979 * code until I cat get a better feel for cases where
13980 * it would be useful to have.
13983 #warning "WISHLIST implement live range splitting..."
13984 if ((DEBUG_RANGE_CONFLICTS > 1) &&
13985 (!split || (DEBUG_RANGE_CONFLICTS > 2))) {
13986 print_interference_blocks(state, rstate, stderr, 0);
13987 print_dominators(state, stderr);
13992 #if DEBUG_COLOR_GRAPH > 1
13993 #define cgdebug_printf(...) fprintf(stdout, __VA_ARGS__)
13994 #define cgdebug_flush() fflush(stdout)
13995 #define cgdebug_loc(STATE, TRIPLE) loc(stdout, STATE, TRIPLE)
13996 #elif DEBUG_COLOR_GRAPH == 1
13997 #define cgdebug_printf(...) fprintf(stderr, __VA_ARGS__)
13998 #define cgdebug_flush() fflush(stderr)
13999 #define cgdebug_loc(STATE, TRIPLE) loc(stderr, STATE, TRIPLE)
14001 #define cgdebug_printf(...)
14002 #define cgdebug_flush()
14003 #define cgdebug_loc(STATE, TRIPLE)
14007 static int select_free_color(struct compile_state *state,
14008 struct reg_state *rstate, struct live_range *range)
14010 struct triple_set *entry;
14011 struct live_range_def *lrd;
14012 struct live_range_def *phi;
14013 struct live_range_edge *edge;
14014 char used[MAX_REGISTERS];
14015 struct triple **expr;
14017 /* Instead of doing just the trivial color select here I try
14018 * a few extra things because a good color selection will help reduce
14022 /* Find the registers currently in use */
14023 memset(used, 0, sizeof(used));
14024 for(edge = range->edges; edge; edge = edge->next) {
14025 if (edge->node->color == REG_UNSET) {
14028 reg_fill_used(state, used, edge->node->color);
14030 #if DEBUG_COLOR_GRAPH > 1
14034 for(edge = range->edges; edge; edge = edge->next) {
14037 cgdebug_printf("\n%s edges: %d @%s:%d.%d\n",
14038 tops(range->def->op), i,
14039 range->def->filename, range->def->line, range->def->col);
14040 for(i = 0; i < MAX_REGISTERS; i++) {
14042 cgdebug_printf("used: %s\n",
14049 /* If a color is already assigned see if it will work */
14050 if (range->color != REG_UNSET) {
14051 struct live_range_def *lrd;
14052 if (!used[range->color]) {
14055 for(edge = range->edges; edge; edge = edge->next) {
14056 if (edge->node->color != range->color) {
14059 warning(state, edge->node->defs->def, "edge: ");
14060 lrd = edge->node->defs;
14062 warning(state, lrd->def, " %p %s",
14063 lrd->def, tops(lrd->def->op));
14065 } while(lrd != edge->node->defs);
14068 warning(state, range->defs->def, "def: ");
14070 warning(state, lrd->def, " %p %s",
14071 lrd->def, tops(lrd->def->op));
14073 } while(lrd != range->defs);
14074 internal_error(state, range->defs->def,
14075 "live range with already used color %s",
14076 arch_reg_str(range->color));
14079 /* If I feed into an expression reuse it's color.
14080 * This should help remove copies in the case of 2 register instructions
14081 * and phi functions.
14084 lrd = live_range_end(state, range, 0);
14085 for(; (range->color == REG_UNSET) && lrd ; lrd = live_range_end(state, range, lrd)) {
14086 entry = lrd->def->use;
14087 for(;(range->color == REG_UNSET) && entry; entry = entry->next) {
14088 struct live_range_def *insd;
14090 insd = &rstate->lrd[entry->member->id];
14091 if (insd->lr->defs == 0) {
14094 if (!phi && (insd->def->op == OP_PHI) &&
14095 !interfere(rstate, range, insd->lr)) {
14098 if (insd->lr->color == REG_UNSET) {
14101 regcm = insd->lr->classes;
14102 if (((regcm & range->classes) == 0) ||
14103 (used[insd->lr->color])) {
14106 if (interfere(rstate, range, insd->lr)) {
14109 range->color = insd->lr->color;
14112 /* If I feed into a phi function reuse it's color or the color
14113 * of something else that feeds into the phi function.
14116 if (phi->lr->color != REG_UNSET) {
14117 if (used[phi->lr->color]) {
14118 range->color = phi->lr->color;
14122 expr = triple_rhs(state, phi->def, 0);
14123 for(; expr; expr = triple_rhs(state, phi->def, expr)) {
14124 struct live_range *lr;
14129 lr = rstate->lrd[(*expr)->id].lr;
14130 if (lr->color == REG_UNSET) {
14133 regcm = lr->classes;
14134 if (((regcm & range->classes) == 0) ||
14135 (used[lr->color])) {
14138 if (interfere(rstate, range, lr)) {
14141 range->color = lr->color;
14145 /* If I don't interfere with a rhs node reuse it's color */
14146 lrd = live_range_head(state, range, 0);
14147 for(; (range->color == REG_UNSET) && lrd ; lrd = live_range_head(state, range, lrd)) {
14148 expr = triple_rhs(state, lrd->def, 0);
14149 for(; expr; expr = triple_rhs(state, lrd->def, expr)) {
14150 struct live_range *lr;
14155 lr = rstate->lrd[(*expr)->id].lr;
14156 if (lr->color == REG_UNSET) {
14159 regcm = lr->classes;
14160 if (((regcm & range->classes) == 0) ||
14161 (used[lr->color])) {
14164 if (interfere(rstate, range, lr)) {
14167 range->color = lr->color;
14171 /* If I have not opportunitically picked a useful color
14172 * pick the first color that is free.
14174 if (range->color == REG_UNSET) {
14176 arch_select_free_register(state, used, range->classes);
14178 if (range->color == REG_UNSET) {
14179 struct live_range_def *lrd;
14181 if (split_ranges(state, rstate, used, range)) {
14184 for(edge = range->edges; edge; edge = edge->next) {
14185 warning(state, edge->node->defs->def, "edge reg %s",
14186 arch_reg_str(edge->node->color));
14187 lrd = edge->node->defs;
14189 warning(state, lrd->def, " %s %p",
14190 tops(lrd->def->op), lrd->def);
14192 } while(lrd != edge->node->defs);
14194 warning(state, range->defs->def, "range: ");
14197 warning(state, lrd->def, " %s %p",
14198 tops(lrd->def->op), lrd->def);
14200 } while(lrd != range->defs);
14202 warning(state, range->defs->def, "classes: %x",
14204 for(i = 0; i < MAX_REGISTERS; i++) {
14206 warning(state, range->defs->def, "used: %s",
14210 #if DEBUG_COLOR_GRAPH < 2
14211 error(state, range->defs->def, "too few registers");
14213 internal_error(state, range->defs->def, "too few registers");
14216 range->classes &= arch_reg_regcm(state, range->color);
14217 if ((range->color == REG_UNSET) || (range->classes == 0)) {
14218 internal_error(state, range->defs->def, "select_free_color did not?");
14223 static int color_graph(struct compile_state *state, struct reg_state *rstate)
14226 struct live_range_edge *edge;
14227 struct live_range *range;
14229 cgdebug_printf("Lo: ");
14230 range = rstate->low;
14231 if (*range->group_prev != range) {
14232 internal_error(state, 0, "lo: *prev != range?");
14234 *range->group_prev = range->group_next;
14235 if (range->group_next) {
14236 range->group_next->group_prev = range->group_prev;
14238 if (&range->group_next == rstate->low_tail) {
14239 rstate->low_tail = range->group_prev;
14241 if (rstate->low == range) {
14242 internal_error(state, 0, "low: next != prev?");
14245 else if (rstate->high) {
14246 cgdebug_printf("Hi: ");
14247 range = rstate->high;
14248 if (*range->group_prev != range) {
14249 internal_error(state, 0, "hi: *prev != range?");
14251 *range->group_prev = range->group_next;
14252 if (range->group_next) {
14253 range->group_next->group_prev = range->group_prev;
14255 if (&range->group_next == rstate->high_tail) {
14256 rstate->high_tail = range->group_prev;
14258 if (rstate->high == range) {
14259 internal_error(state, 0, "high: next != prev?");
14265 cgdebug_printf(" %d\n", range - rstate->lr);
14266 range->group_prev = 0;
14267 for(edge = range->edges; edge; edge = edge->next) {
14268 struct live_range *node;
14270 /* Move nodes from the high to the low list */
14271 if (node->group_prev && (node->color == REG_UNSET) &&
14272 (node->degree == regc_max_size(state, node->classes))) {
14273 if (*node->group_prev != node) {
14274 internal_error(state, 0, "move: *prev != node?");
14276 *node->group_prev = node->group_next;
14277 if (node->group_next) {
14278 node->group_next->group_prev = node->group_prev;
14280 if (&node->group_next == rstate->high_tail) {
14281 rstate->high_tail = node->group_prev;
14283 cgdebug_printf("Moving...%d to low\n", node - rstate->lr);
14284 node->group_prev = rstate->low_tail;
14285 node->group_next = 0;
14286 *rstate->low_tail = node;
14287 rstate->low_tail = &node->group_next;
14288 if (*node->group_prev != node) {
14289 internal_error(state, 0, "move2: *prev != node?");
14294 colored = color_graph(state, rstate);
14296 cgdebug_printf("Coloring %d @", range - rstate->lr);
14297 cgdebug_loc(state, range->defs->def);
14299 colored = select_free_color(state, rstate, range);
14300 cgdebug_printf(" %s\n", arch_reg_str(range->color));
14305 static void verify_colors(struct compile_state *state, struct reg_state *rstate)
14307 struct live_range *lr;
14308 struct live_range_edge *edge;
14309 struct triple *ins, *first;
14310 char used[MAX_REGISTERS];
14311 first = state->first;
14314 if (triple_is_def(state, ins)) {
14315 if ((ins->id < 0) || (ins->id > rstate->defs)) {
14316 internal_error(state, ins,
14317 "triple without a live range def");
14319 lr = rstate->lrd[ins->id].lr;
14320 if (lr->color == REG_UNSET) {
14321 internal_error(state, ins,
14322 "triple without a color");
14324 /* Find the registers used by the edges */
14325 memset(used, 0, sizeof(used));
14326 for(edge = lr->edges; edge; edge = edge->next) {
14327 if (edge->node->color == REG_UNSET) {
14328 internal_error(state, 0,
14329 "live range without a color");
14331 reg_fill_used(state, used, edge->node->color);
14333 if (used[lr->color]) {
14334 internal_error(state, ins,
14335 "triple with already used color");
14339 } while(ins != first);
14342 static void color_triples(struct compile_state *state, struct reg_state *rstate)
14344 struct live_range *lr;
14345 struct triple *first, *ins;
14346 first = state->first;
14349 if ((ins->id < 0) || (ins->id > rstate->defs)) {
14350 internal_error(state, ins,
14351 "triple without a live range");
14353 lr = rstate->lrd[ins->id].lr;
14354 SET_REG(ins->id, lr->color);
14356 } while (ins != first);
14359 static struct live_range *merge_sort_lr(
14360 struct live_range *first, struct live_range *last)
14362 struct live_range *mid, *join, **join_tail, *pick;
14364 size = (last - first) + 1;
14366 mid = first + size/2;
14367 first = merge_sort_lr(first, mid -1);
14368 mid = merge_sort_lr(mid, last);
14372 /* merge the two lists */
14373 while(first && mid) {
14374 if ((first->degree < mid->degree) ||
14375 ((first->degree == mid->degree) &&
14376 (first->length < mid->length))) {
14378 first = first->group_next;
14380 first->group_prev = 0;
14385 mid = mid->group_next;
14387 mid->group_prev = 0;
14390 pick->group_next = 0;
14391 pick->group_prev = join_tail;
14393 join_tail = &pick->group_next;
14395 /* Splice the remaining list */
14396 pick = (first)? first : mid;
14399 pick->group_prev = join_tail;
14403 if (!first->defs) {
14411 static void ids_from_rstate(struct compile_state *state,
14412 struct reg_state *rstate)
14414 struct triple *ins, *first;
14415 if (!rstate->defs) {
14418 /* Display the graph if desired */
14419 if (state->debug & DEBUG_INTERFERENCE) {
14420 print_blocks(state, stdout);
14421 print_control_flow(state);
14423 first = state->first;
14427 struct live_range_def *lrd;
14428 lrd = &rstate->lrd[ins->id];
14429 ins->id = lrd->orig_id;
14432 } while(ins != first);
14435 static void cleanup_live_edges(struct reg_state *rstate)
14438 /* Free the edges on each node */
14439 for(i = 1; i <= rstate->ranges; i++) {
14440 remove_live_edges(rstate, &rstate->lr[i]);
14444 static void cleanup_rstate(struct compile_state *state, struct reg_state *rstate)
14446 cleanup_live_edges(rstate);
14447 xfree(rstate->lrd);
14450 /* Free the variable lifetime information */
14451 if (rstate->blocks) {
14452 free_variable_lifetimes(state, rstate->blocks);
14455 rstate->ranges = 0;
14458 rstate->blocks = 0;
14461 static void verify_consistency(struct compile_state *state);
14462 static void allocate_registers(struct compile_state *state)
14464 struct reg_state rstate;
14467 /* Clear out the reg_state */
14468 memset(&rstate, 0, sizeof(rstate));
14469 rstate.max_passes = MAX_ALLOCATION_PASSES;
14472 struct live_range **point, **next;
14477 #if DEBUG_RANGE_CONFLICTS
14478 fprintf(stderr, "pass: %d\n", rstate.passes);
14482 ids_from_rstate(state, &rstate);
14484 /* Cleanup the temporary data structures */
14485 cleanup_rstate(state, &rstate);
14487 /* Compute the variable lifetimes */
14488 rstate.blocks = compute_variable_lifetimes(state);
14490 /* Fix invalid mandatory live range coalesce conflicts */
14491 conflicts = correct_coalesce_conflicts(state, rstate.blocks);
14493 /* Fix two simultaneous uses of the same register.
14494 * In a few pathlogical cases a partial untangle moves
14495 * the tangle to a part of the graph we won't revisit.
14496 * So we keep looping until we have no more tangle fixes
14500 tangles = correct_tangles(state, rstate.blocks);
14503 if (state->debug & DEBUG_INSERTED_COPIES) {
14504 printf("After resolve_tangles\n");
14505 print_blocks(state, stdout);
14506 print_control_flow(state);
14508 verify_consistency(state);
14510 /* Allocate and initialize the live ranges */
14511 initialize_live_ranges(state, &rstate);
14513 /* Note current doing coalescing in a loop appears to
14514 * buys me nothing. The code is left this way in case
14515 * there is some value in it. Or if a future bugfix
14516 * yields some benefit.
14519 #if DEBUG_COALESCING
14520 fprintf(stderr, "coalescing\n");
14522 /* Remove any previous live edge calculations */
14523 cleanup_live_edges(&rstate);
14525 /* Compute the interference graph */
14526 walk_variable_lifetimes(
14527 state, rstate.blocks, graph_ins, &rstate);
14529 /* Display the interference graph if desired */
14530 if (state->debug & DEBUG_INTERFERENCE) {
14531 print_interference_blocks(state, &rstate, stdout, 1);
14532 printf("\nlive variables by instruction\n");
14533 walk_variable_lifetimes(
14534 state, rstate.blocks,
14535 print_interference_ins, &rstate);
14538 coalesced = coalesce_live_ranges(state, &rstate);
14540 #if DEBUG_COALESCING
14541 fprintf(stderr, "coalesced: %d\n", coalesced);
14543 } while(coalesced);
14545 #if DEBUG_CONSISTENCY > 1
14547 fprintf(stderr, "verify_graph_ins...\n");
14549 /* Verify the interference graph */
14550 walk_variable_lifetimes(
14551 state, rstate.blocks, verify_graph_ins, &rstate);
14553 fprintf(stderr, "verify_graph_ins done\n");
14557 /* Build the groups low and high. But with the nodes
14558 * first sorted by degree order.
14560 rstate.low_tail = &rstate.low;
14561 rstate.high_tail = &rstate.high;
14562 rstate.high = merge_sort_lr(&rstate.lr[1], &rstate.lr[rstate.ranges]);
14564 rstate.high->group_prev = &rstate.high;
14566 for(point = &rstate.high; *point; point = &(*point)->group_next)
14568 rstate.high_tail = point;
14569 /* Walk through the high list and move everything that needs
14572 for(point = &rstate.high; *point; point = next) {
14573 struct live_range *range;
14574 next = &(*point)->group_next;
14577 /* If it has a low degree or it already has a color
14578 * place the node in low.
14580 if ((range->degree < regc_max_size(state, range->classes)) ||
14581 (range->color != REG_UNSET)) {
14582 cgdebug_printf("Lo: %5d degree %5d%s\n",
14583 range - rstate.lr, range->degree,
14584 (range->color != REG_UNSET) ? " (colored)": "");
14585 *range->group_prev = range->group_next;
14586 if (range->group_next) {
14587 range->group_next->group_prev = range->group_prev;
14589 if (&range->group_next == rstate.high_tail) {
14590 rstate.high_tail = range->group_prev;
14592 range->group_prev = rstate.low_tail;
14593 range->group_next = 0;
14594 *rstate.low_tail = range;
14595 rstate.low_tail = &range->group_next;
14599 cgdebug_printf("hi: %5d degree %5d%s\n",
14600 range - rstate.lr, range->degree,
14601 (range->color != REG_UNSET) ? " (colored)": "");
14604 /* Color the live_ranges */
14605 colored = color_graph(state, &rstate);
14607 } while (!colored);
14609 /* Verify the graph was properly colored */
14610 verify_colors(state, &rstate);
14612 /* Move the colors from the graph to the triples */
14613 color_triples(state, &rstate);
14615 /* Cleanup the temporary data structures */
14616 cleanup_rstate(state, &rstate);
14619 /* Sparce Conditional Constant Propogation
14620 * =========================================
14624 struct lattice_node {
14626 struct triple *def;
14627 struct ssa_edge *out;
14628 struct flow_block *fblock;
14629 struct triple *val;
14630 /* lattice high val && !is_const(val)
14631 * lattice const is_const(val)
14632 * lattice low val == 0
14636 struct lattice_node *src;
14637 struct lattice_node *dst;
14638 struct ssa_edge *work_next;
14639 struct ssa_edge *work_prev;
14640 struct ssa_edge *out_next;
14643 struct flow_block *src;
14644 struct flow_block *dst;
14645 struct flow_edge *work_next;
14646 struct flow_edge *work_prev;
14647 struct flow_edge *in_next;
14648 struct flow_edge *out_next;
14651 struct flow_block {
14652 struct block *block;
14653 struct flow_edge *in;
14654 struct flow_edge *out;
14655 struct flow_edge left, right;
14660 struct lattice_node *lattice;
14661 struct ssa_edge *ssa_edges;
14662 struct flow_block *flow_blocks;
14663 struct flow_edge *flow_work_list;
14664 struct ssa_edge *ssa_work_list;
14668 static void scc_add_fedge(struct compile_state *state, struct scc_state *scc,
14669 struct flow_edge *fedge)
14671 if ((fedge == scc->flow_work_list) ||
14672 (fedge->work_next != fedge) ||
14673 (fedge->work_prev != fedge)) {
14676 if (!scc->flow_work_list) {
14677 scc->flow_work_list = fedge;
14678 fedge->work_next = fedge->work_prev = fedge;
14681 struct flow_edge *ftail;
14682 ftail = scc->flow_work_list->work_prev;
14683 fedge->work_next = ftail->work_next;
14684 fedge->work_prev = ftail;
14685 fedge->work_next->work_prev = fedge;
14686 fedge->work_prev->work_next = fedge;
14690 static struct flow_edge *scc_next_fedge(
14691 struct compile_state *state, struct scc_state *scc)
14693 struct flow_edge *fedge;
14694 fedge = scc->flow_work_list;
14696 fedge->work_next->work_prev = fedge->work_prev;
14697 fedge->work_prev->work_next = fedge->work_next;
14698 if (fedge->work_next != fedge) {
14699 scc->flow_work_list = fedge->work_next;
14701 scc->flow_work_list = 0;
14703 fedge->work_next = fedge->work_prev = fedge;
14708 static void scc_add_sedge(struct compile_state *state, struct scc_state *scc,
14709 struct ssa_edge *sedge)
14712 fprintf(stderr, "adding sedge: %5d (%4d -> %5d)\n",
14713 sedge - scc->ssa_edges,
14714 sedge->src->def->id,
14715 sedge->dst->def->id);
14717 if ((sedge == scc->ssa_work_list) ||
14718 (sedge->work_next != sedge) ||
14719 (sedge->work_prev != sedge)) {
14721 fprintf(stderr, "dupped sedge: %5d\n",
14722 sedge - scc->ssa_edges);
14726 if (!scc->ssa_work_list) {
14727 scc->ssa_work_list = sedge;
14728 sedge->work_next = sedge->work_prev = sedge;
14731 struct ssa_edge *stail;
14732 stail = scc->ssa_work_list->work_prev;
14733 sedge->work_next = stail->work_next;
14734 sedge->work_prev = stail;
14735 sedge->work_next->work_prev = sedge;
14736 sedge->work_prev->work_next = sedge;
14740 static struct ssa_edge *scc_next_sedge(
14741 struct compile_state *state, struct scc_state *scc)
14743 struct ssa_edge *sedge;
14744 sedge = scc->ssa_work_list;
14746 sedge->work_next->work_prev = sedge->work_prev;
14747 sedge->work_prev->work_next = sedge->work_next;
14748 if (sedge->work_next != sedge) {
14749 scc->ssa_work_list = sedge->work_next;
14751 scc->ssa_work_list = 0;
14753 sedge->work_next = sedge->work_prev = sedge;
14758 static void initialize_scc_state(
14759 struct compile_state *state, struct scc_state *scc)
14761 int ins_count, ssa_edge_count;
14762 int ins_index, ssa_edge_index, fblock_index;
14763 struct triple *first, *ins;
14764 struct block *block;
14765 struct flow_block *fblock;
14767 memset(scc, 0, sizeof(*scc));
14769 /* Inialize pass zero find out how much memory we need */
14770 first = state->first;
14772 ins_count = ssa_edge_count = 0;
14774 struct triple_set *edge;
14776 for(edge = ins->use; edge; edge = edge->next) {
14780 } while(ins != first);
14782 fprintf(stderr, "ins_count: %d ssa_edge_count: %d vertex_count: %d\n",
14783 ins_count, ssa_edge_count, state->last_vertex);
14785 scc->ins_count = ins_count;
14787 xcmalloc(sizeof(*scc->lattice)*(ins_count + 1), "lattice");
14789 xcmalloc(sizeof(*scc->ssa_edges)*(ssa_edge_count + 1), "ssa_edges");
14791 xcmalloc(sizeof(*scc->flow_blocks)*(state->last_vertex + 1),
14794 /* Initialize pass one collect up the nodes */
14797 ins_index = ssa_edge_index = fblock_index = 0;
14800 if ((ins->op == OP_LABEL) && (block != ins->u.block)) {
14801 block = ins->u.block;
14803 internal_error(state, ins, "label without block");
14806 block->vertex = fblock_index;
14807 fblock = &scc->flow_blocks[fblock_index];
14808 fblock->block = block;
14811 struct lattice_node *lnode;
14813 lnode = &scc->lattice[ins_index];
14816 lnode->fblock = fblock;
14817 lnode->val = ins; /* LATTICE HIGH */
14818 lnode->old_id = ins->id;
14819 ins->id = ins_index;
14822 } while(ins != first);
14823 /* Initialize pass two collect up the edges */
14828 if ((ins->op == OP_LABEL) && (block != ins->u.block)) {
14829 struct flow_edge *fedge, **ftail;
14830 struct block_set *bedge;
14831 block = ins->u.block;
14832 fblock = &scc->flow_blocks[block->vertex];
14835 ftail = &fblock->out;
14837 fblock->left.dst = &scc->flow_blocks[block->left->vertex];
14838 if (fblock->left.dst->block != block->left) {
14839 internal_error(state, 0, "block mismatch");
14841 fblock->left.out_next = 0;
14842 *ftail = &fblock->left;
14843 ftail = &fblock->left.out_next;
14845 if (block->right) {
14846 fblock->right.dst = &scc->flow_blocks[block->right->vertex];
14847 if (fblock->right.dst->block != block->right) {
14848 internal_error(state, 0, "block mismatch");
14850 fblock->right.out_next = 0;
14851 *ftail = &fblock->right;
14852 ftail = &fblock->right.out_next;
14854 for(fedge = fblock->out; fedge; fedge = fedge->out_next) {
14855 fedge->src = fblock;
14856 fedge->work_next = fedge->work_prev = fedge;
14857 fedge->executable = 0;
14859 ftail = &fblock->in;
14860 for(bedge = block->use; bedge; bedge = bedge->next) {
14861 struct block *src_block;
14862 struct flow_block *sfblock;
14863 struct flow_edge *sfedge;
14864 src_block = bedge->member;
14865 sfblock = &scc->flow_blocks[src_block->vertex];
14867 if (src_block->left == block) {
14868 sfedge = &sfblock->left;
14870 sfedge = &sfblock->right;
14873 ftail = &sfedge->in_next;
14874 sfedge->in_next = 0;
14878 struct triple_set *edge;
14879 struct ssa_edge **stail;
14880 struct lattice_node *lnode;
14881 lnode = &scc->lattice[ins->id];
14883 stail = &lnode->out;
14884 for(edge = ins->use; edge; edge = edge->next) {
14885 struct ssa_edge *sedge;
14886 ssa_edge_index += 1;
14887 sedge = &scc->ssa_edges[ssa_edge_index];
14889 stail = &sedge->out_next;
14890 sedge->src = lnode;
14891 sedge->dst = &scc->lattice[edge->member->id];
14892 sedge->work_next = sedge->work_prev = sedge;
14893 sedge->out_next = 0;
14897 } while(ins != first);
14898 /* Setup a dummy block 0 as a node above the start node */
14900 struct flow_block *fblock, *dst;
14901 struct flow_edge *fedge;
14902 fblock = &scc->flow_blocks[0];
14905 fblock->out = &fblock->left;
14906 dst = &scc->flow_blocks[state->first_block->vertex];
14907 fedge = &fblock->left;
14908 fedge->src = fblock;
14910 fedge->work_next = fedge;
14911 fedge->work_prev = fedge;
14912 fedge->in_next = fedge->dst->in;
14913 fedge->out_next = 0;
14914 fedge->executable = 0;
14915 fedge->dst->in = fedge;
14917 /* Initialize the work lists */
14918 scc->flow_work_list = 0;
14919 scc->ssa_work_list = 0;
14920 scc_add_fedge(state, scc, fedge);
14923 fprintf(stderr, "ins_index: %d ssa_edge_index: %d fblock_index: %d\n",
14924 ins_index, ssa_edge_index, fblock_index);
14929 static void free_scc_state(
14930 struct compile_state *state, struct scc_state *scc)
14932 xfree(scc->flow_blocks);
14933 xfree(scc->ssa_edges);
14934 xfree(scc->lattice);
14938 static struct lattice_node *triple_to_lattice(
14939 struct compile_state *state, struct scc_state *scc, struct triple *ins)
14941 if (ins->id <= 0) {
14942 internal_error(state, ins, "bad id");
14944 return &scc->lattice[ins->id];
14947 static struct triple *preserve_lval(
14948 struct compile_state *state, struct lattice_node *lnode)
14950 struct triple *old;
14951 /* Preserve the original value */
14953 old = dup_triple(state, lnode->val);
14954 if (lnode->val != lnode->def) {
14964 static int lval_changed(struct compile_state *state,
14965 struct triple *old, struct lattice_node *lnode)
14968 /* See if the lattice value has changed */
14970 if (!old && !lnode->val) {
14973 if (changed && lnode->val && !is_const(lnode->val)) {
14977 lnode->val && old &&
14978 (memcmp(lnode->val->param, old->param,
14979 TRIPLE_SIZE(lnode->val->sizes) * sizeof(lnode->val->param[0])) == 0) &&
14980 (memcmp(&lnode->val->u, &old->u, sizeof(old->u)) == 0)) {
14990 static void scc_visit_phi(struct compile_state *state, struct scc_state *scc,
14991 struct lattice_node *lnode)
14993 struct lattice_node *tmp;
14994 struct triple **slot, *old;
14995 struct flow_edge *fedge;
14998 if (lnode->def->op != OP_PHI) {
14999 internal_error(state, lnode->def, "not phi");
15001 /* Store the original value */
15002 old = preserve_lval(state, lnode);
15004 /* default to lattice high */
15005 lnode->val = lnode->def;
15006 slot = &RHS(lnode->def, 0);
15008 for(fedge = lnode->fblock->in; fedge; index++, fedge = fedge->in_next) {
15010 fprintf(stderr, "Examining edge: %d vertex: %d executable: %d\n",
15012 fedge->dst->block->vertex,
15016 if (!fedge->executable) {
15019 if (!slot[index]) {
15020 internal_error(state, lnode->def, "no phi value");
15022 tmp = triple_to_lattice(state, scc, slot[index]);
15023 /* meet(X, lattice low) = lattice low */
15027 /* meet(X, lattice high) = X */
15028 else if (!tmp->val) {
15029 lnode->val = lnode->val;
15031 /* meet(lattice high, X) = X */
15032 else if (!is_const(lnode->val)) {
15033 lnode->val = dup_triple(state, tmp->val);
15034 lnode->val->type = lnode->def->type;
15036 /* meet(const, const) = const or lattice low */
15037 else if (!constants_equal(state, lnode->val, tmp->val)) {
15044 changed = lval_changed(state, old, lnode);
15046 fprintf(stderr, "%p phi: %d -> %s %s\n",
15049 ((!lnode->val)? "lo": is_const(lnode->val)? "const": "hi"),
15050 changed? "changed" : ""
15053 /* If the lattice value has changed update the work lists. */
15055 struct ssa_edge *sedge;
15056 for(sedge = lnode->out; sedge; sedge = sedge->out_next) {
15057 scc_add_sedge(state, scc, sedge);
15062 static int compute_lnode_val(struct compile_state *state, struct scc_state *scc,
15063 struct lattice_node *lnode)
15066 struct triple *old, *scratch;
15067 struct triple **dexpr, **vexpr;
15070 /* Store the original value */
15071 old = preserve_lval(state, lnode);
15073 /* Reinitialize the value */
15074 lnode->val = scratch = dup_triple(state, lnode->def);
15075 scratch->id = lnode->old_id;
15076 scratch->next = scratch;
15077 scratch->prev = scratch;
15080 count = TRIPLE_SIZE(scratch->sizes);
15081 for(i = 0; i < count; i++) {
15082 dexpr = &lnode->def->param[i];
15083 vexpr = &scratch->param[i];
15085 if (((i < TRIPLE_MISC_OFF(scratch->sizes)) ||
15086 (i >= TRIPLE_TARG_OFF(scratch->sizes))) &&
15088 struct lattice_node *tmp;
15089 tmp = triple_to_lattice(state, scc, *dexpr);
15090 *vexpr = (tmp->val)? tmp->val : tmp->def;
15093 if (scratch->op == OP_BRANCH) {
15094 scratch->next = lnode->def->next;
15096 /* Recompute the value */
15097 #warning "FIXME see if simplify does anything bad"
15098 /* So far it looks like only the strength reduction
15099 * optimization are things I need to worry about.
15101 simplify(state, scratch);
15102 /* Cleanup my value */
15103 if (scratch->use) {
15104 internal_error(state, lnode->def, "scratch used?");
15106 if ((scratch->prev != scratch) ||
15107 ((scratch->next != scratch) &&
15108 ((lnode->def->op != OP_BRANCH) ||
15109 (scratch->next != lnode->def->next)))) {
15110 internal_error(state, lnode->def, "scratch in list?");
15112 /* undo any uses... */
15113 count = TRIPLE_SIZE(scratch->sizes);
15114 for(i = 0; i < count; i++) {
15115 vexpr = &scratch->param[i];
15117 unuse_triple(*vexpr, scratch);
15120 if (!is_const(scratch)) {
15121 for(i = 0; i < count; i++) {
15122 dexpr = &lnode->def->param[i];
15123 if (((i < TRIPLE_MISC_OFF(scratch->sizes)) ||
15124 (i >= TRIPLE_TARG_OFF(scratch->sizes))) &&
15126 struct lattice_node *tmp;
15127 tmp = triple_to_lattice(state, scc, *dexpr);
15135 (lnode->val->op == lnode->def->op) &&
15136 (memcmp(lnode->val->param, lnode->def->param,
15137 count * sizeof(lnode->val->param[0])) == 0) &&
15138 (memcmp(&lnode->val->u, &lnode->def->u, sizeof(lnode->def->u)) == 0)) {
15139 lnode->val = lnode->def;
15141 /* Find the cases that are always lattice lo */
15143 triple_is_def(state, lnode->val) &&
15144 !triple_is_pure(state, lnode->val, lnode->old_id)) {
15147 /* See if the lattice value has changed */
15148 changed = lval_changed(state, old, lnode);
15149 /* See if this value should not change */
15151 (( !triple_is_def(state, lnode->def) &&
15152 !triple_is_cond_branch(state, lnode->def)) ||
15153 (lnode->def->op == OP_PIECE))) {
15154 #warning "FIXME constant propogate through expressions with multiple left hand sides"
15156 internal_warning(state, lnode->def, "non def changes value?");
15160 /* See if we need to free the scratch value */
15161 if (lnode->val != scratch) {
15167 static void scc_visit_branch(struct compile_state *state, struct scc_state *scc,
15168 struct lattice_node *lnode)
15170 struct lattice_node *cond;
15173 struct flow_edge *fedge;
15174 fprintf(stderr, "branch: %d (",
15177 for(fedge = lnode->fblock->out; fedge; fedge = fedge->out_next) {
15178 fprintf(stderr, " %d", fedge->dst->block->vertex);
15180 fprintf(stderr, " )");
15181 if (TRIPLE_RHS(lnode->def->sizes) > 0) {
15182 fprintf(stderr, " <- %d",
15183 RHS(lnode->def, 0)->id);
15185 fprintf(stderr, "\n");
15188 if (lnode->def->op != OP_BRANCH) {
15189 internal_error(state, lnode->def, "not branch");
15191 /* This only applies to conditional branches */
15192 if (TRIPLE_RHS(lnode->def->sizes) == 0) {
15195 cond = triple_to_lattice(state, scc, RHS(lnode->def,0));
15196 if (cond->val && !is_const(cond->val)) {
15197 #warning "FIXME do I need to do something here?"
15198 warning(state, cond->def, "condition not constant?");
15201 if (cond->val == 0) {
15202 scc_add_fedge(state, scc, cond->fblock->out);
15203 scc_add_fedge(state, scc, cond->fblock->out->out_next);
15205 else if (cond->val->u.cval) {
15206 scc_add_fedge(state, scc, cond->fblock->out->out_next);
15209 scc_add_fedge(state, scc, cond->fblock->out);
15214 static void scc_visit_expr(struct compile_state *state, struct scc_state *scc,
15215 struct lattice_node *lnode)
15219 changed = compute_lnode_val(state, scc, lnode);
15222 struct triple **expr;
15223 fprintf(stderr, "expr: %3d %10s (",
15224 lnode->def->id, tops(lnode->def->op));
15225 expr = triple_rhs(state, lnode->def, 0);
15226 for(;expr;expr = triple_rhs(state, lnode->def, expr)) {
15228 fprintf(stderr, " %d", (*expr)->id);
15231 fprintf(stderr, " ) -> %s\n",
15232 (!lnode->val)? "lo": is_const(lnode->val)? "const": "hi");
15235 if (lnode->def->op == OP_BRANCH) {
15236 scc_visit_branch(state, scc, lnode);
15239 else if (changed) {
15240 struct ssa_edge *sedge;
15241 for(sedge = lnode->out; sedge; sedge = sedge->out_next) {
15242 scc_add_sedge(state, scc, sedge);
15247 static void scc_writeback_values(
15248 struct compile_state *state, struct scc_state *scc)
15250 struct triple *first, *ins;
15251 first = state->first;
15254 struct lattice_node *lnode;
15255 lnode = triple_to_lattice(state, scc, ins);
15258 !is_const(lnode->val) &&
15259 !triple_is_uncond_branch(state, lnode->val) &&
15260 (lnode->val->op != OP_NOOP))
15262 struct flow_edge *fedge;
15265 for(fedge = lnode->fblock->in;
15266 !executable && fedge; fedge = fedge->in_next) {
15267 executable |= fedge->executable;
15270 internal_warning(state, lnode->val,
15271 "lattice node %d %s->%s still high?",
15273 tops(lnode->def->op),
15274 tops(lnode->val->op));
15279 ins->id = lnode->old_id;
15280 if (lnode->val && (lnode->val != ins)) {
15281 /* See if it something I know how to write back */
15282 switch(lnode->val->op) {
15284 mkconst(state, ins, lnode->val->u.cval);
15287 mkaddr_const(state, ins,
15288 MISC(lnode->val, 0), lnode->val->u.cval);
15291 /* By default don't copy the changes,
15292 * recompute them in place instead.
15294 simplify(state, ins);
15297 if (is_const(lnode->val) &&
15298 !constants_equal(state, lnode->val, ins)) {
15299 internal_error(state, 0, "constants not equal");
15301 /* Free the lattice nodes */
15306 } while(ins != first);
15309 static void scc_transform(struct compile_state *state)
15311 struct scc_state scc;
15313 initialize_scc_state(state, &scc);
15315 while(scc.flow_work_list || scc.ssa_work_list) {
15316 struct flow_edge *fedge;
15317 struct ssa_edge *sedge;
15318 struct flow_edge *fptr;
15319 while((fedge = scc_next_fedge(state, &scc))) {
15320 struct block *block;
15321 struct triple *ptr;
15322 struct flow_block *fblock;
15325 if (fedge->executable) {
15329 internal_error(state, 0, "fedge without dst");
15332 internal_error(state, 0, "fedge without src");
15334 fedge->executable = 1;
15335 fblock = fedge->dst;
15336 block = fblock->block;
15338 for(fptr = fblock->in; fptr; fptr = fptr->in_next) {
15339 if (fptr->executable) {
15344 fprintf(stderr, "vertex: %d reps: %d\n",
15345 block->vertex, reps);
15349 for(ptr = block->first; !done; ptr = ptr->next) {
15350 struct lattice_node *lnode;
15351 done = (ptr == block->last);
15352 lnode = &scc.lattice[ptr->id];
15353 if (ptr->op == OP_PHI) {
15354 scc_visit_phi(state, &scc, lnode);
15356 else if (reps == 1) {
15357 scc_visit_expr(state, &scc, lnode);
15360 if (fblock->out && !fblock->out->out_next) {
15361 scc_add_fedge(state, &scc, fblock->out);
15364 while((sedge = scc_next_sedge(state, &scc))) {
15365 struct lattice_node *lnode;
15366 struct flow_block *fblock;
15367 lnode = sedge->dst;
15368 fblock = lnode->fblock;
15370 fprintf(stderr, "sedge: %5d (%5d -> %5d)\n",
15371 sedge - scc.ssa_edges,
15372 sedge->src->def->id,
15373 sedge->dst->def->id);
15375 if (lnode->def->op == OP_PHI) {
15376 scc_visit_phi(state, &scc, lnode);
15379 for(fptr = fblock->in; fptr; fptr = fptr->in_next) {
15380 if (fptr->executable) {
15385 scc_visit_expr(state, &scc, lnode);
15391 scc_writeback_values(state, &scc);
15392 free_scc_state(state, &scc);
15396 static void transform_to_arch_instructions(struct compile_state *state)
15398 struct triple *ins, *first;
15399 first = state->first;
15402 ins = transform_to_arch_instruction(state, ins);
15403 } while(ins != first);
15406 #if DEBUG_CONSISTENCY
15407 static void verify_uses(struct compile_state *state)
15409 struct triple *first, *ins;
15410 struct triple_set *set;
15411 first = state->first;
15414 struct triple **expr;
15415 expr = triple_rhs(state, ins, 0);
15416 for(; expr; expr = triple_rhs(state, ins, expr)) {
15417 struct triple *rhs;
15419 for(set = rhs?rhs->use:0; set; set = set->next) {
15420 if (set->member == ins) {
15425 internal_error(state, ins, "rhs not used");
15428 expr = triple_lhs(state, ins, 0);
15429 for(; expr; expr = triple_lhs(state, ins, expr)) {
15430 struct triple *lhs;
15432 for(set = lhs?lhs->use:0; set; set = set->next) {
15433 if (set->member == ins) {
15438 internal_error(state, ins, "lhs not used");
15442 } while(ins != first);
15445 static void verify_blocks_present(struct compile_state *state)
15447 struct triple *first, *ins;
15448 if (!state->first_block) {
15451 first = state->first;
15454 valid_ins(state, ins);
15455 if (triple_stores_block(state, ins)) {
15456 if (!ins->u.block) {
15457 internal_error(state, ins,
15458 "%p not in a block?\n", ins);
15462 } while(ins != first);
15466 static void verify_blocks(struct compile_state *state)
15468 struct triple *ins;
15469 struct block *block;
15471 block = state->first_block;
15478 struct block_set *user;
15480 for(ins = block->first; ins != block->last->next; ins = ins->next) {
15481 if (triple_stores_block(state, ins) && (ins->u.block != block)) {
15482 internal_error(state, ins, "inconsitent block specified");
15484 valid_ins(state, ins);
15487 for(user = block->use; user; user = user->next) {
15489 if (!user->member->first) {
15490 internal_error(state, block->first, "user is empty");
15492 if ((block == state->last_block) &&
15493 (user->member == state->first_block)) {
15496 if ((user->member->left != block) &&
15497 (user->member->right != block)) {
15498 internal_error(state, user->member->first,
15499 "user does not use block");
15502 if (triple_is_branch(state, block->last) &&
15503 (block->right != block_of_triple(state, TARG(block->last, 0))))
15505 internal_error(state, block->last, "block->right != TARG(0)");
15507 if (!triple_is_uncond_branch(state, block->last) &&
15508 (block != state->last_block) &&
15509 (block->left != block_of_triple(state, block->last->next)))
15511 internal_error(state, block->last, "block->left != block->last->next");
15514 for(user = block->left->use; user; user = user->next) {
15515 if (user->member == block) {
15519 if (!user || user->member != block) {
15520 internal_error(state, block->first,
15521 "block does not use left");
15523 if (!block->left->first) {
15524 internal_error(state, block->first, "left block is empty");
15527 if (block->right) {
15528 for(user = block->right->use; user; user = user->next) {
15529 if (user->member == block) {
15533 if (!user || user->member != block) {
15534 internal_error(state, block->first,
15535 "block does not use right");
15537 if (!block->right->first) {
15538 internal_error(state, block->first, "right block is empty");
15541 if (block->users != users) {
15542 internal_error(state, block->first,
15543 "computed users %d != stored users %d\n",
15544 users, block->users);
15546 for(user = block->ipdomfrontier; user; user = user->next) {
15547 if ((user->member->left != state->last_block) &&
15548 !triple_is_cond_branch(state, user->member->last)) {
15549 internal_error(state, user->member->last,
15550 "conditional branch missing");
15553 if (!triple_stores_block(state, block->last->next)) {
15554 internal_error(state, block->last->next,
15555 "cannot find next block");
15557 block = block->last->next->u.block;
15559 internal_error(state, block->last->next,
15562 } while(block != state->first_block);
15563 if (blocks != state->last_vertex) {
15564 internal_error(state, 0, "computed blocks != stored blocks %d\n",
15565 blocks, state->last_vertex);
15569 static void verify_domination(struct compile_state *state)
15571 struct triple *first, *ins;
15572 struct triple_set *set;
15573 if (!state->first_block) {
15577 first = state->first;
15580 for(set = ins->use; set; set = set->next) {
15581 struct triple **slot;
15582 struct triple *use_point;
15585 zrhs = TRIPLE_RHS(ins->sizes);
15586 slot = &RHS(set->member, 0);
15587 /* See if the use is on the right hand side */
15588 for(i = 0; i < zrhs; i++) {
15589 if (slot[i] == ins) {
15594 use_point = set->member;
15595 if (set->member->op == OP_PHI) {
15596 struct block_set *bset;
15598 bset = set->member->u.block->use;
15599 for(edge = 0; bset && (edge < i); edge++) {
15603 internal_error(state, set->member,
15604 "no edge for phi rhs %d\n", i);
15606 use_point = bset->member->last;
15610 !tdominates(state, ins, use_point)) {
15611 internal_warning(state, ins,
15612 "ins does not dominate rhs use");
15613 internal_error(state, use_point,
15614 "non dominated rhs use point?");
15618 } while(ins != first);
15621 static void verify_rhs(struct compile_state *state)
15623 struct triple *first, *ins;
15624 first = state->first;
15627 struct triple **slot;
15629 zrhs = TRIPLE_RHS(ins->sizes);
15630 slot = &RHS(ins, 0);
15631 for(i = 0; i < zrhs; i++) {
15632 if (slot[i] == 0) {
15633 internal_error(state, ins,
15634 "missing rhs %d on %s",
15637 if ((ins->op != OP_PHI) && (slot[i] == ins)) {
15638 internal_error(state, ins,
15639 "ins == rhs[%d] on %s",
15644 } while(ins != first);
15647 static void verify_piece(struct compile_state *state)
15649 struct triple *first, *ins;
15650 first = state->first;
15653 struct triple *ptr;
15655 lhs = TRIPLE_LHS(ins->sizes);
15656 for(ptr = ins->next, i = 0; i < lhs; i++, ptr = ptr->next) {
15657 if (ptr != LHS(ins, i)) {
15658 internal_error(state, ins, "malformed lhs on %s",
15661 if (ptr->op != OP_PIECE) {
15662 internal_error(state, ins, "bad lhs op %s at %d on %s",
15663 tops(ptr->op), i, tops(ins->op));
15665 if (ptr->u.cval != i) {
15666 internal_error(state, ins, "bad u.cval of %d %d expected",
15671 } while(ins != first);
15674 static void verify_ins_colors(struct compile_state *state)
15676 struct triple *first, *ins;
15678 first = state->first;
15682 } while(ins != first);
15684 static void verify_consistency(struct compile_state *state)
15686 verify_uses(state);
15687 verify_blocks_present(state);
15688 verify_blocks(state);
15689 verify_domination(state);
15691 verify_piece(state);
15692 verify_ins_colors(state);
15695 static void verify_consistency(struct compile_state *state) {}
15696 #endif /* DEBUG_CONSISTENCY */
15698 static void optimize(struct compile_state *state)
15700 if (state->debug & DEBUG_TRIPLES) {
15701 print_triples(state);
15703 /* Replace structures with simpler data types */
15704 flatten_structures(state);
15705 if (state->debug & DEBUG_TRIPLES) {
15706 print_triples(state);
15708 verify_consistency(state);
15709 /* Analize the intermediate code */
15710 analyze_basic_blocks(state);
15712 /* Transform the code to ssa form. */
15714 * The transformation to ssa form puts a phi function
15715 * on each of edge of a dominance frontier where that
15716 * phi function might be needed. At -O2 if we don't
15717 * eleminate the excess phi functions we can get an
15718 * exponential code size growth. So I kill the extra
15719 * phi functions early and I kill them often.
15721 transform_to_ssa_form(state);
15723 verify_consistency(state);
15724 if (state->debug & DEBUG_CODE_ELIMINATION) {
15725 fprintf(stdout, "After transform_to_ssa_form\n");
15726 print_blocks(state, stdout);
15728 /* Remove dead code */
15729 eliminate_inefectual_code(state);
15730 rebuild_ssa_form(state);
15731 verify_consistency(state);
15733 /* Do strength reduction and simple constant optimizations */
15734 if (state->optimize >= 1) {
15735 simplify_all(state);
15736 rebuild_ssa_form(state);
15738 if (state->debug & DEBUG_CODE_ELIMINATION) {
15739 fprintf(stdout, "After simplify_all\n");
15740 print_blocks(state, stdout);
15742 verify_consistency(state);
15743 /* Propogate constants throughout the code */
15744 if (state->optimize >= 2) {
15745 scc_transform(state);
15746 rebuild_ssa_form(state);
15748 verify_consistency(state);
15749 #warning "WISHLIST implement single use constants (least possible register pressure)"
15750 #warning "WISHLIST implement induction variable elimination"
15751 /* Select architecture instructions and an initial partial
15752 * coloring based on architecture constraints.
15754 transform_to_arch_instructions(state);
15755 verify_consistency(state);
15756 if (state->debug & DEBUG_ARCH_CODE) {
15757 printf("After transform_to_arch_instructions\n");
15758 print_blocks(state, stdout);
15759 print_control_flow(state);
15761 /* Remove dead code */
15762 eliminate_inefectual_code(state);
15763 rebuild_ssa_form(state);
15764 verify_consistency(state);
15765 if (state->debug & DEBUG_CODE_ELIMINATION) {
15766 printf("After eliminate_inefectual_code\n");
15767 print_blocks(state, stdout);
15768 print_control_flow(state);
15770 verify_consistency(state);
15771 /* Color all of the variables to see if they will fit in registers */
15772 insert_copies_to_phi(state);
15773 if (state->debug & DEBUG_INSERTED_COPIES) {
15774 printf("After insert_copies_to_phi\n");
15775 print_blocks(state, stdout);
15776 print_control_flow(state);
15778 verify_consistency(state);
15779 insert_mandatory_copies(state);
15780 if (state->debug & DEBUG_INSERTED_COPIES) {
15781 printf("After insert_mandatory_copies\n");
15782 print_blocks(state, stdout);
15783 print_control_flow(state);
15785 verify_consistency(state);
15786 allocate_registers(state);
15787 verify_consistency(state);
15788 if (state->debug & DEBUG_INTERMEDIATE_CODE) {
15789 print_blocks(state, stdout);
15791 if (state->debug & DEBUG_CONTROL_FLOW) {
15792 print_control_flow(state);
15794 /* Remove the optimization information.
15795 * This is more to check for memory consistency than to free memory.
15797 free_basic_blocks(state);
15800 static void print_op_asm(struct compile_state *state,
15801 struct triple *ins, FILE *fp)
15803 struct asm_info *info;
15805 unsigned lhs, rhs, i;
15806 info = ins->u.ainfo;
15807 lhs = TRIPLE_LHS(ins->sizes);
15808 rhs = TRIPLE_RHS(ins->sizes);
15809 /* Don't count the clobbers in lhs */
15810 for(i = 0; i < lhs; i++) {
15811 if (LHS(ins, i)->type == &void_type) {
15816 fprintf(fp, "#ASM\n");
15818 for(ptr = info->str; *ptr; ptr++) {
15820 unsigned long param;
15821 struct triple *piece;
15831 param = strtoul(ptr, &next, 10);
15833 error(state, ins, "Invalid asm template");
15835 if (param >= (lhs + rhs)) {
15836 error(state, ins, "Invalid param %%%u in asm template",
15839 piece = (param < lhs)? LHS(ins, param) : RHS(ins, param - lhs);
15841 arch_reg_str(ID_REG(piece->id)));
15844 fprintf(fp, "\n#NOT ASM\n");
15848 /* Only use the low x86 byte registers. This allows me
15849 * allocate the entire register when a byte register is used.
15851 #define X86_4_8BIT_GPRS 1
15854 #define X86_MMX_REGS (1<<0)
15855 #define X86_XMM_REGS (1<<1)
15857 /* The x86 register classes */
15858 #define REGC_FLAGS 0
15859 #define REGC_GPR8 1
15860 #define REGC_GPR16 2
15861 #define REGC_GPR32 3
15862 #define REGC_DIVIDEND64 4
15863 #define REGC_DIVIDEND32 5
15866 #define REGC_GPR32_8 8
15867 #define REGC_GPR16_8 9
15868 #define REGC_GPR8_LO 10
15869 #define REGC_IMM32 11
15870 #define REGC_IMM16 12
15871 #define REGC_IMM8 13
15872 #define LAST_REGC REGC_IMM8
15873 #if LAST_REGC >= MAX_REGC
15874 #error "MAX_REGC is to low"
15877 /* Register class masks */
15878 #define REGCM_FLAGS (1 << REGC_FLAGS)
15879 #define REGCM_GPR8 (1 << REGC_GPR8)
15880 #define REGCM_GPR16 (1 << REGC_GPR16)
15881 #define REGCM_GPR32 (1 << REGC_GPR32)
15882 #define REGCM_DIVIDEND64 (1 << REGC_DIVIDEND64)
15883 #define REGCM_DIVIDEND32 (1 << REGC_DIVIDEND32)
15884 #define REGCM_MMX (1 << REGC_MMX)
15885 #define REGCM_XMM (1 << REGC_XMM)
15886 #define REGCM_GPR32_8 (1 << REGC_GPR32_8)
15887 #define REGCM_GPR16_8 (1 << REGC_GPR16_8)
15888 #define REGCM_GPR8_LO (1 << REGC_GPR8_LO)
15889 #define REGCM_IMM32 (1 << REGC_IMM32)
15890 #define REGCM_IMM16 (1 << REGC_IMM16)
15891 #define REGCM_IMM8 (1 << REGC_IMM8)
15892 #define REGCM_ALL ((1 << (LAST_REGC + 1)) - 1)
15894 /* The x86 registers */
15895 #define REG_EFLAGS 2
15896 #define REGC_FLAGS_FIRST REG_EFLAGS
15897 #define REGC_FLAGS_LAST REG_EFLAGS
15906 #define REGC_GPR8_LO_FIRST REG_AL
15907 #define REGC_GPR8_LO_LAST REG_DL
15908 #define REGC_GPR8_FIRST REG_AL
15909 #define REGC_GPR8_LAST REG_DH
15918 #define REGC_GPR16_FIRST REG_AX
15919 #define REGC_GPR16_LAST REG_SP
15928 #define REGC_GPR32_FIRST REG_EAX
15929 #define REGC_GPR32_LAST REG_ESP
15930 #define REG_EDXEAX 27
15931 #define REGC_DIVIDEND64_FIRST REG_EDXEAX
15932 #define REGC_DIVIDEND64_LAST REG_EDXEAX
15933 #define REG_DXAX 28
15934 #define REGC_DIVIDEND32_FIRST REG_DXAX
15935 #define REGC_DIVIDEND32_LAST REG_DXAX
15936 #define REG_MMX0 29
15937 #define REG_MMX1 30
15938 #define REG_MMX2 31
15939 #define REG_MMX3 32
15940 #define REG_MMX4 33
15941 #define REG_MMX5 34
15942 #define REG_MMX6 35
15943 #define REG_MMX7 36
15944 #define REGC_MMX_FIRST REG_MMX0
15945 #define REGC_MMX_LAST REG_MMX7
15946 #define REG_XMM0 37
15947 #define REG_XMM1 38
15948 #define REG_XMM2 39
15949 #define REG_XMM3 40
15950 #define REG_XMM4 41
15951 #define REG_XMM5 42
15952 #define REG_XMM6 43
15953 #define REG_XMM7 44
15954 #define REGC_XMM_FIRST REG_XMM0
15955 #define REGC_XMM_LAST REG_XMM7
15956 #warning "WISHLIST figure out how to use pinsrw and pextrw to better use extended regs"
15957 #define LAST_REG REG_XMM7
15959 #define REGC_GPR32_8_FIRST REG_EAX
15960 #define REGC_GPR32_8_LAST REG_EDX
15961 #define REGC_GPR16_8_FIRST REG_AX
15962 #define REGC_GPR16_8_LAST REG_DX
15964 #define REGC_IMM8_FIRST -1
15965 #define REGC_IMM8_LAST -1
15966 #define REGC_IMM16_FIRST -2
15967 #define REGC_IMM16_LAST -1
15968 #define REGC_IMM32_FIRST -4
15969 #define REGC_IMM32_LAST -1
15971 #if LAST_REG >= MAX_REGISTERS
15972 #error "MAX_REGISTERS to low"
15976 static unsigned regc_size[LAST_REGC +1] = {
15977 [REGC_FLAGS] = REGC_FLAGS_LAST - REGC_FLAGS_FIRST + 1,
15978 [REGC_GPR8] = REGC_GPR8_LAST - REGC_GPR8_FIRST + 1,
15979 [REGC_GPR16] = REGC_GPR16_LAST - REGC_GPR16_FIRST + 1,
15980 [REGC_GPR32] = REGC_GPR32_LAST - REGC_GPR32_FIRST + 1,
15981 [REGC_DIVIDEND64] = REGC_DIVIDEND64_LAST - REGC_DIVIDEND64_FIRST + 1,
15982 [REGC_DIVIDEND32] = REGC_DIVIDEND32_LAST - REGC_DIVIDEND32_FIRST + 1,
15983 [REGC_MMX] = REGC_MMX_LAST - REGC_MMX_FIRST + 1,
15984 [REGC_XMM] = REGC_XMM_LAST - REGC_XMM_FIRST + 1,
15985 [REGC_GPR32_8] = REGC_GPR32_8_LAST - REGC_GPR32_8_FIRST + 1,
15986 [REGC_GPR16_8] = REGC_GPR16_8_LAST - REGC_GPR16_8_FIRST + 1,
15987 [REGC_GPR8_LO] = REGC_GPR8_LO_LAST - REGC_GPR8_LO_FIRST + 1,
15993 static const struct {
15995 } regcm_bound[LAST_REGC + 1] = {
15996 [REGC_FLAGS] = { REGC_FLAGS_FIRST, REGC_FLAGS_LAST },
15997 [REGC_GPR8] = { REGC_GPR8_FIRST, REGC_GPR8_LAST },
15998 [REGC_GPR16] = { REGC_GPR16_FIRST, REGC_GPR16_LAST },
15999 [REGC_GPR32] = { REGC_GPR32_FIRST, REGC_GPR32_LAST },
16000 [REGC_DIVIDEND64] = { REGC_DIVIDEND64_FIRST, REGC_DIVIDEND64_LAST },
16001 [REGC_DIVIDEND32] = { REGC_DIVIDEND32_FIRST, REGC_DIVIDEND32_LAST },
16002 [REGC_MMX] = { REGC_MMX_FIRST, REGC_MMX_LAST },
16003 [REGC_XMM] = { REGC_XMM_FIRST, REGC_XMM_LAST },
16004 [REGC_GPR32_8] = { REGC_GPR32_8_FIRST, REGC_GPR32_8_LAST },
16005 [REGC_GPR16_8] = { REGC_GPR16_8_FIRST, REGC_GPR16_8_LAST },
16006 [REGC_GPR8_LO] = { REGC_GPR8_LO_FIRST, REGC_GPR8_LO_LAST },
16007 [REGC_IMM32] = { REGC_IMM32_FIRST, REGC_IMM32_LAST },
16008 [REGC_IMM16] = { REGC_IMM16_FIRST, REGC_IMM16_LAST },
16009 [REGC_IMM8] = { REGC_IMM8_FIRST, REGC_IMM8_LAST },
16012 static int arch_encode_feature(const char *feature, unsigned long *features)
16019 { "p2", X86_MMX_REGS },
16020 { "p3", X86_MMX_REGS | X86_XMM_REGS },
16021 { "p4", X86_MMX_REGS | X86_XMM_REGS },
16022 { "k7", X86_MMX_REGS },
16023 { "k8", X86_MMX_REGS | X86_XMM_REGS },
16024 { "c3", X86_MMX_REGS },
16025 { "c3-2", X86_MMX_REGS | X86_XMM_REGS }, /* Nehemiah */
16030 if (strcmp(feature, "mmx") == 0) {
16031 *features |= X86_MMX_REGS;
16033 else if (strcmp(feature, "sse") == 0) {
16034 *features |= X86_XMM_REGS;
16036 else if (strncmp(feature, "cpu=", 4) == 0) {
16037 const char *cpu = feature + 4;
16038 for(ptr = cpus; ptr->name; ptr++) {
16039 if (strcmp(ptr->name, cpu) == 0) {
16044 *features |= ptr->cpu;
16056 static unsigned arch_regc_size(struct compile_state *state, int class)
16058 if ((class < 0) || (class > LAST_REGC)) {
16061 return regc_size[class];
16064 static int arch_regcm_intersect(unsigned regcm1, unsigned regcm2)
16066 /* See if two register classes may have overlapping registers */
16067 unsigned gpr_mask = REGCM_GPR8 | REGCM_GPR8_LO | REGCM_GPR16_8 | REGCM_GPR16 |
16068 REGCM_GPR32_8 | REGCM_GPR32 |
16069 REGCM_DIVIDEND32 | REGCM_DIVIDEND64;
16071 /* Special case for the immediates */
16072 if ((regcm1 & (REGCM_IMM32 | REGCM_IMM16 | REGCM_IMM8)) &&
16073 ((regcm1 & ~(REGCM_IMM32 | REGCM_IMM16 | REGCM_IMM8)) == 0) &&
16074 (regcm2 & (REGCM_IMM32 | REGCM_IMM16 | REGCM_IMM8)) &&
16075 ((regcm2 & ~(REGCM_IMM32 | REGCM_IMM16 | REGCM_IMM8)) == 0)) {
16078 return (regcm1 & regcm2) ||
16079 ((regcm1 & gpr_mask) && (regcm2 & gpr_mask));
16082 static void arch_reg_equivs(
16083 struct compile_state *state, unsigned *equiv, int reg)
16085 if ((reg < 0) || (reg > LAST_REG)) {
16086 internal_error(state, 0, "invalid register");
16091 #if X86_4_8BIT_GPRS
16095 *equiv++ = REG_EAX;
16096 *equiv++ = REG_DXAX;
16097 *equiv++ = REG_EDXEAX;
16100 #if X86_4_8BIT_GPRS
16104 *equiv++ = REG_EAX;
16105 *equiv++ = REG_DXAX;
16106 *equiv++ = REG_EDXEAX;
16109 #if X86_4_8BIT_GPRS
16113 *equiv++ = REG_EBX;
16117 #if X86_4_8BIT_GPRS
16121 *equiv++ = REG_EBX;
16124 #if X86_4_8BIT_GPRS
16128 *equiv++ = REG_ECX;
16132 #if X86_4_8BIT_GPRS
16136 *equiv++ = REG_ECX;
16139 #if X86_4_8BIT_GPRS
16143 *equiv++ = REG_EDX;
16144 *equiv++ = REG_DXAX;
16145 *equiv++ = REG_EDXEAX;
16148 #if X86_4_8BIT_GPRS
16152 *equiv++ = REG_EDX;
16153 *equiv++ = REG_DXAX;
16154 *equiv++ = REG_EDXEAX;
16159 *equiv++ = REG_EAX;
16160 *equiv++ = REG_DXAX;
16161 *equiv++ = REG_EDXEAX;
16166 *equiv++ = REG_EBX;
16171 *equiv++ = REG_ECX;
16176 *equiv++ = REG_EDX;
16177 *equiv++ = REG_DXAX;
16178 *equiv++ = REG_EDXEAX;
16181 *equiv++ = REG_ESI;
16184 *equiv++ = REG_EDI;
16187 *equiv++ = REG_EBP;
16190 *equiv++ = REG_ESP;
16196 *equiv++ = REG_DXAX;
16197 *equiv++ = REG_EDXEAX;
16213 *equiv++ = REG_DXAX;
16214 *equiv++ = REG_EDXEAX;
16235 *equiv++ = REG_EAX;
16236 *equiv++ = REG_EDX;
16237 *equiv++ = REG_EDXEAX;
16246 *equiv++ = REG_EAX;
16247 *equiv++ = REG_EDX;
16248 *equiv++ = REG_DXAX;
16251 *equiv++ = REG_UNSET;
16254 static unsigned arch_avail_mask(struct compile_state *state)
16256 unsigned avail_mask;
16257 /* REGCM_GPR8 is not available */
16258 avail_mask = REGCM_GPR8_LO | REGCM_GPR16_8 | REGCM_GPR16 |
16259 REGCM_GPR32 | REGCM_GPR32_8 |
16260 REGCM_DIVIDEND32 | REGCM_DIVIDEND64 |
16261 REGCM_IMM32 | REGCM_IMM16 | REGCM_IMM8 | REGCM_FLAGS;
16262 if (state->features & X86_MMX_REGS) {
16263 avail_mask |= REGCM_MMX;
16265 if (state->features & X86_XMM_REGS) {
16266 avail_mask |= REGCM_XMM;
16271 static unsigned arch_regcm_normalize(struct compile_state *state, unsigned regcm)
16273 unsigned mask, result;
16277 for(class = 0, mask = 1; mask; mask <<= 1, class++) {
16278 if ((result & mask) == 0) {
16281 if (class > LAST_REGC) {
16284 for(class2 = 0; class2 <= LAST_REGC; class2++) {
16285 if ((regcm_bound[class2].first >= regcm_bound[class].first) &&
16286 (regcm_bound[class2].last <= regcm_bound[class].last)) {
16287 result |= (1 << class2);
16291 result &= arch_avail_mask(state);
16295 static unsigned arch_regcm_reg_normalize(struct compile_state *state, unsigned regcm)
16297 /* Like arch_regcm_normalize except immediate register classes are excluded */
16298 regcm = arch_regcm_normalize(state, regcm);
16299 /* Remove the immediate register classes */
16300 regcm &= ~(REGCM_IMM32 | REGCM_IMM16 | REGCM_IMM8);
16305 static unsigned arch_reg_regcm(struct compile_state *state, int reg)
16310 for(class = 0; class <= LAST_REGC; class++) {
16311 if ((reg >= regcm_bound[class].first) &&
16312 (reg <= regcm_bound[class].last)) {
16313 mask |= (1 << class);
16317 internal_error(state, 0, "reg %d not in any class", reg);
16322 static struct reg_info arch_reg_constraint(
16323 struct compile_state *state, struct type *type, const char *constraint)
16325 static const struct {
16329 } constraints[] = {
16330 { 'r', REGCM_GPR32, REG_UNSET },
16331 { 'g', REGCM_GPR32, REG_UNSET },
16332 { 'p', REGCM_GPR32, REG_UNSET },
16333 { 'q', REGCM_GPR8_LO, REG_UNSET },
16334 { 'Q', REGCM_GPR32_8, REG_UNSET },
16335 { 'x', REGCM_XMM, REG_UNSET },
16336 { 'y', REGCM_MMX, REG_UNSET },
16337 { 'a', REGCM_GPR32, REG_EAX },
16338 { 'b', REGCM_GPR32, REG_EBX },
16339 { 'c', REGCM_GPR32, REG_ECX },
16340 { 'd', REGCM_GPR32, REG_EDX },
16341 { 'D', REGCM_GPR32, REG_EDI },
16342 { 'S', REGCM_GPR32, REG_ESI },
16343 { '\0', 0, REG_UNSET },
16345 unsigned int regcm;
16346 unsigned int mask, reg;
16347 struct reg_info result;
16349 regcm = arch_type_to_regcm(state, type);
16352 for(ptr = constraint; *ptr; ptr++) {
16357 for(i = 0; constraints[i].class != '\0'; i++) {
16358 if (constraints[i].class == *ptr) {
16362 if (constraints[i].class == '\0') {
16363 error(state, 0, "invalid register constraint ``%c''", *ptr);
16366 if ((constraints[i].mask & regcm) == 0) {
16367 error(state, 0, "invalid register class %c specified",
16370 mask |= constraints[i].mask;
16371 if (constraints[i].reg != REG_UNSET) {
16372 if ((reg != REG_UNSET) && (reg != constraints[i].reg)) {
16373 error(state, 0, "Only one register may be specified");
16375 reg = constraints[i].reg;
16379 result.regcm = mask;
16383 static struct reg_info arch_reg_clobber(
16384 struct compile_state *state, const char *clobber)
16386 struct reg_info result;
16387 if (strcmp(clobber, "memory") == 0) {
16388 result.reg = REG_UNSET;
16391 else if (strcmp(clobber, "%eax") == 0) {
16392 result.reg = REG_EAX;
16393 result.regcm = REGCM_GPR32;
16395 else if (strcmp(clobber, "%ebx") == 0) {
16396 result.reg = REG_EBX;
16397 result.regcm = REGCM_GPR32;
16399 else if (strcmp(clobber, "%ecx") == 0) {
16400 result.reg = REG_ECX;
16401 result.regcm = REGCM_GPR32;
16403 else if (strcmp(clobber, "%edx") == 0) {
16404 result.reg = REG_EDX;
16405 result.regcm = REGCM_GPR32;
16407 else if (strcmp(clobber, "%esi") == 0) {
16408 result.reg = REG_ESI;
16409 result.regcm = REGCM_GPR32;
16411 else if (strcmp(clobber, "%edi") == 0) {
16412 result.reg = REG_EDI;
16413 result.regcm = REGCM_GPR32;
16415 else if (strcmp(clobber, "%ebp") == 0) {
16416 result.reg = REG_EBP;
16417 result.regcm = REGCM_GPR32;
16419 else if (strcmp(clobber, "%esp") == 0) {
16420 result.reg = REG_ESP;
16421 result.regcm = REGCM_GPR32;
16423 else if (strcmp(clobber, "cc") == 0) {
16424 result.reg = REG_EFLAGS;
16425 result.regcm = REGCM_FLAGS;
16427 else if ((strncmp(clobber, "xmm", 3) == 0) &&
16428 octdigitp(clobber[3]) && (clobber[4] == '\0')) {
16429 result.reg = REG_XMM0 + octdigval(clobber[3]);
16430 result.regcm = REGCM_XMM;
16432 else if ((strncmp(clobber, "mmx", 3) == 0) &&
16433 octdigitp(clobber[3]) && (clobber[4] == '\0')) {
16434 result.reg = REG_MMX0 + octdigval(clobber[3]);
16435 result.regcm = REGCM_MMX;
16438 error(state, 0, "Invalid register clobber");
16439 result.reg = REG_UNSET;
16445 static int do_select_reg(struct compile_state *state,
16446 char *used, int reg, unsigned classes)
16452 mask = arch_reg_regcm(state, reg);
16453 return (classes & mask) ? reg : REG_UNSET;
16456 static int arch_select_free_register(
16457 struct compile_state *state, char *used, int classes)
16459 /* Live ranges with the most neighbors are colored first.
16461 * Generally it does not matter which colors are given
16462 * as the register allocator attempts to color live ranges
16463 * in an order where you are guaranteed not to run out of colors.
16465 * Occasionally the register allocator cannot find an order
16466 * of register selection that will find a free color. To
16467 * increase the odds the register allocator will work when
16468 * it guesses first give out registers from register classes
16469 * least likely to run out of registers.
16474 for(i = REGC_XMM_FIRST; (reg == REG_UNSET) && (i <= REGC_XMM_LAST); i++) {
16475 reg = do_select_reg(state, used, i, classes);
16477 for(i = REGC_MMX_FIRST; (reg == REG_UNSET) && (i <= REGC_MMX_LAST); i++) {
16478 reg = do_select_reg(state, used, i, classes);
16480 for(i = REGC_GPR32_LAST; (reg == REG_UNSET) && (i >= REGC_GPR32_FIRST); i--) {
16481 reg = do_select_reg(state, used, i, classes);
16483 for(i = REGC_GPR16_FIRST; (reg == REG_UNSET) && (i <= REGC_GPR16_LAST); i++) {
16484 reg = do_select_reg(state, used, i, classes);
16486 for(i = REGC_GPR8_FIRST; (reg == REG_UNSET) && (i <= REGC_GPR8_LAST); i++) {
16487 reg = do_select_reg(state, used, i, classes);
16489 for(i = REGC_GPR8_LO_FIRST; (reg == REG_UNSET) && (i <= REGC_GPR8_LO_LAST); i++) {
16490 reg = do_select_reg(state, used, i, classes);
16492 for(i = REGC_DIVIDEND32_FIRST; (reg == REG_UNSET) && (i <= REGC_DIVIDEND32_LAST); i++) {
16493 reg = do_select_reg(state, used, i, classes);
16495 for(i = REGC_DIVIDEND64_FIRST; (reg == REG_UNSET) && (i <= REGC_DIVIDEND64_LAST); i++) {
16496 reg = do_select_reg(state, used, i, classes);
16498 for(i = REGC_FLAGS_FIRST; (reg == REG_UNSET) && (i <= REGC_FLAGS_LAST); i++) {
16499 reg = do_select_reg(state, used, i, classes);
16505 static unsigned arch_type_to_regcm(struct compile_state *state, struct type *type)
16507 #warning "FIXME force types smaller (if legal) before I get here"
16510 switch(type->type & TYPE_MASK) {
16517 mask = REGCM_GPR8 | REGCM_GPR8_LO |
16518 REGCM_GPR16 | REGCM_GPR16_8 |
16519 REGCM_GPR32 | REGCM_GPR32_8 |
16520 REGCM_DIVIDEND32 | REGCM_DIVIDEND64 |
16521 REGCM_MMX | REGCM_XMM |
16522 REGCM_IMM32 | REGCM_IMM16 | REGCM_IMM8;
16526 mask = REGCM_GPR16 | REGCM_GPR16_8 |
16527 REGCM_GPR32 | REGCM_GPR32_8 |
16528 REGCM_DIVIDEND32 | REGCM_DIVIDEND64 |
16529 REGCM_MMX | REGCM_XMM |
16530 REGCM_IMM32 | REGCM_IMM16;
16537 mask = REGCM_GPR32 | REGCM_GPR32_8 |
16538 REGCM_DIVIDEND32 | REGCM_DIVIDEND64 |
16539 REGCM_MMX | REGCM_XMM |
16543 internal_error(state, 0, "no register class for type");
16546 mask = arch_regcm_normalize(state, mask);
16550 static int is_imm32(struct triple *imm)
16552 return ((imm->op == OP_INTCONST) && (imm->u.cval <= 0xffffffffUL)) ||
16553 (imm->op == OP_ADDRCONST);
16556 static int is_imm16(struct triple *imm)
16558 return ((imm->op == OP_INTCONST) && (imm->u.cval <= 0xffff));
16560 static int is_imm8(struct triple *imm)
16562 return ((imm->op == OP_INTCONST) && (imm->u.cval <= 0xff));
16565 static int get_imm32(struct triple *ins, struct triple **expr)
16567 struct triple *imm;
16569 while(imm->op == OP_COPY) {
16572 if (!is_imm32(imm)) {
16575 unuse_triple(*expr, ins);
16576 use_triple(imm, ins);
16581 static int get_imm8(struct triple *ins, struct triple **expr)
16583 struct triple *imm;
16585 while(imm->op == OP_COPY) {
16588 if (!is_imm8(imm)) {
16591 unuse_triple(*expr, ins);
16592 use_triple(imm, ins);
16597 #define TEMPLATE_NOP 0
16598 #define TEMPLATE_INTCONST8 1
16599 #define TEMPLATE_INTCONST32 2
16600 #define TEMPLATE_COPY8_REG 3
16601 #define TEMPLATE_COPY16_REG 4
16602 #define TEMPLATE_COPY32_REG 5
16603 #define TEMPLATE_COPY_IMM8 6
16604 #define TEMPLATE_COPY_IMM16 7
16605 #define TEMPLATE_COPY_IMM32 8
16606 #define TEMPLATE_PHI8 9
16607 #define TEMPLATE_PHI16 10
16608 #define TEMPLATE_PHI32 11
16609 #define TEMPLATE_STORE8 12
16610 #define TEMPLATE_STORE16 13
16611 #define TEMPLATE_STORE32 14
16612 #define TEMPLATE_LOAD8 15
16613 #define TEMPLATE_LOAD16 16
16614 #define TEMPLATE_LOAD32 17
16615 #define TEMPLATE_BINARY8_REG 18
16616 #define TEMPLATE_BINARY16_REG 19
16617 #define TEMPLATE_BINARY32_REG 20
16618 #define TEMPLATE_BINARY8_IMM 21
16619 #define TEMPLATE_BINARY16_IMM 22
16620 #define TEMPLATE_BINARY32_IMM 23
16621 #define TEMPLATE_SL8_CL 24
16622 #define TEMPLATE_SL16_CL 25
16623 #define TEMPLATE_SL32_CL 26
16624 #define TEMPLATE_SL8_IMM 27
16625 #define TEMPLATE_SL16_IMM 28
16626 #define TEMPLATE_SL32_IMM 29
16627 #define TEMPLATE_UNARY8 30
16628 #define TEMPLATE_UNARY16 31
16629 #define TEMPLATE_UNARY32 32
16630 #define TEMPLATE_CMP8_REG 33
16631 #define TEMPLATE_CMP16_REG 34
16632 #define TEMPLATE_CMP32_REG 35
16633 #define TEMPLATE_CMP8_IMM 36
16634 #define TEMPLATE_CMP16_IMM 37
16635 #define TEMPLATE_CMP32_IMM 38
16636 #define TEMPLATE_TEST8 39
16637 #define TEMPLATE_TEST16 40
16638 #define TEMPLATE_TEST32 41
16639 #define TEMPLATE_SET 42
16640 #define TEMPLATE_JMP 43
16641 #define TEMPLATE_INB_DX 44
16642 #define TEMPLATE_INB_IMM 45
16643 #define TEMPLATE_INW_DX 46
16644 #define TEMPLATE_INW_IMM 47
16645 #define TEMPLATE_INL_DX 48
16646 #define TEMPLATE_INL_IMM 49
16647 #define TEMPLATE_OUTB_DX 50
16648 #define TEMPLATE_OUTB_IMM 51
16649 #define TEMPLATE_OUTW_DX 52
16650 #define TEMPLATE_OUTW_IMM 53
16651 #define TEMPLATE_OUTL_DX 54
16652 #define TEMPLATE_OUTL_IMM 55
16653 #define TEMPLATE_BSF 56
16654 #define TEMPLATE_RDMSR 57
16655 #define TEMPLATE_WRMSR 58
16656 #define TEMPLATE_UMUL8 59
16657 #define TEMPLATE_UMUL16 60
16658 #define TEMPLATE_UMUL32 61
16659 #define TEMPLATE_DIV8 62
16660 #define TEMPLATE_DIV16 63
16661 #define TEMPLATE_DIV32 64
16662 #define LAST_TEMPLATE TEMPLATE_DIV32
16663 #if LAST_TEMPLATE >= MAX_TEMPLATES
16664 #error "MAX_TEMPLATES to low"
16667 #define COPY8_REGCM (REGCM_DIVIDEND64 | REGCM_DIVIDEND32 | REGCM_GPR32 | REGCM_GPR16 | REGCM_GPR8_LO | REGCM_MMX | REGCM_XMM)
16668 #define COPY16_REGCM (REGCM_DIVIDEND64 | REGCM_DIVIDEND32 | REGCM_GPR32 | REGCM_GPR16 | REGCM_MMX | REGCM_XMM)
16669 #define COPY32_REGCM (REGCM_DIVIDEND64 | REGCM_DIVIDEND32 | REGCM_GPR32 | REGCM_MMX | REGCM_XMM)
16672 static struct ins_template templates[] = {
16673 [TEMPLATE_NOP] = {},
16674 [TEMPLATE_INTCONST8] = {
16675 .lhs = { [0] = { REG_UNNEEDED, REGCM_IMM8 } },
16677 [TEMPLATE_INTCONST32] = {
16678 .lhs = { [0] = { REG_UNNEEDED, REGCM_IMM32 } },
16680 [TEMPLATE_COPY8_REG] = {
16681 .lhs = { [0] = { REG_UNSET, COPY8_REGCM } },
16682 .rhs = { [0] = { REG_UNSET, COPY8_REGCM } },
16684 [TEMPLATE_COPY16_REG] = {
16685 .lhs = { [0] = { REG_UNSET, COPY16_REGCM } },
16686 .rhs = { [0] = { REG_UNSET, COPY16_REGCM } },
16688 [TEMPLATE_COPY32_REG] = {
16689 .lhs = { [0] = { REG_UNSET, COPY32_REGCM } },
16690 .rhs = { [0] = { REG_UNSET, COPY32_REGCM } },
16692 [TEMPLATE_COPY_IMM8] = {
16693 .lhs = { [0] = { REG_UNSET, COPY8_REGCM } },
16694 .rhs = { [0] = { REG_UNNEEDED, REGCM_IMM8 } },
16696 [TEMPLATE_COPY_IMM16] = {
16697 .lhs = { [0] = { REG_UNSET, COPY16_REGCM } },
16698 .rhs = { [0] = { REG_UNNEEDED, REGCM_IMM16 | REGCM_IMM8 } },
16700 [TEMPLATE_COPY_IMM32] = {
16701 .lhs = { [0] = { REG_UNSET, COPY32_REGCM } },
16702 .rhs = { [0] = { REG_UNNEEDED, REGCM_IMM32 | REGCM_IMM16 | REGCM_IMM8 } },
16704 [TEMPLATE_PHI8] = {
16705 .lhs = { [0] = { REG_VIRT0, COPY8_REGCM } },
16707 [ 0] = { REG_VIRT0, COPY8_REGCM },
16708 [ 1] = { REG_VIRT0, COPY8_REGCM },
16709 [ 2] = { REG_VIRT0, COPY8_REGCM },
16710 [ 3] = { REG_VIRT0, COPY8_REGCM },
16711 [ 4] = { REG_VIRT0, COPY8_REGCM },
16712 [ 5] = { REG_VIRT0, COPY8_REGCM },
16713 [ 6] = { REG_VIRT0, COPY8_REGCM },
16714 [ 7] = { REG_VIRT0, COPY8_REGCM },
16715 [ 8] = { REG_VIRT0, COPY8_REGCM },
16716 [ 9] = { REG_VIRT0, COPY8_REGCM },
16717 [10] = { REG_VIRT0, COPY8_REGCM },
16718 [11] = { REG_VIRT0, COPY8_REGCM },
16719 [12] = { REG_VIRT0, COPY8_REGCM },
16720 [13] = { REG_VIRT0, COPY8_REGCM },
16721 [14] = { REG_VIRT0, COPY8_REGCM },
16722 [15] = { REG_VIRT0, COPY8_REGCM },
16724 [TEMPLATE_PHI16] = {
16725 .lhs = { [0] = { REG_VIRT0, COPY16_REGCM } },
16727 [ 0] = { REG_VIRT0, COPY16_REGCM },
16728 [ 1] = { REG_VIRT0, COPY16_REGCM },
16729 [ 2] = { REG_VIRT0, COPY16_REGCM },
16730 [ 3] = { REG_VIRT0, COPY16_REGCM },
16731 [ 4] = { REG_VIRT0, COPY16_REGCM },
16732 [ 5] = { REG_VIRT0, COPY16_REGCM },
16733 [ 6] = { REG_VIRT0, COPY16_REGCM },
16734 [ 7] = { REG_VIRT0, COPY16_REGCM },
16735 [ 8] = { REG_VIRT0, COPY16_REGCM },
16736 [ 9] = { REG_VIRT0, COPY16_REGCM },
16737 [10] = { REG_VIRT0, COPY16_REGCM },
16738 [11] = { REG_VIRT0, COPY16_REGCM },
16739 [12] = { REG_VIRT0, COPY16_REGCM },
16740 [13] = { REG_VIRT0, COPY16_REGCM },
16741 [14] = { REG_VIRT0, COPY16_REGCM },
16742 [15] = { REG_VIRT0, COPY16_REGCM },
16744 [TEMPLATE_PHI32] = {
16745 .lhs = { [0] = { REG_VIRT0, COPY32_REGCM } },
16747 [ 0] = { REG_VIRT0, COPY32_REGCM },
16748 [ 1] = { REG_VIRT0, COPY32_REGCM },
16749 [ 2] = { REG_VIRT0, COPY32_REGCM },
16750 [ 3] = { REG_VIRT0, COPY32_REGCM },
16751 [ 4] = { REG_VIRT0, COPY32_REGCM },
16752 [ 5] = { REG_VIRT0, COPY32_REGCM },
16753 [ 6] = { REG_VIRT0, COPY32_REGCM },
16754 [ 7] = { REG_VIRT0, COPY32_REGCM },
16755 [ 8] = { REG_VIRT0, COPY32_REGCM },
16756 [ 9] = { REG_VIRT0, COPY32_REGCM },
16757 [10] = { REG_VIRT0, COPY32_REGCM },
16758 [11] = { REG_VIRT0, COPY32_REGCM },
16759 [12] = { REG_VIRT0, COPY32_REGCM },
16760 [13] = { REG_VIRT0, COPY32_REGCM },
16761 [14] = { REG_VIRT0, COPY32_REGCM },
16762 [15] = { REG_VIRT0, COPY32_REGCM },
16764 [TEMPLATE_STORE8] = {
16766 [0] = { REG_UNSET, REGCM_GPR32 },
16767 [1] = { REG_UNSET, REGCM_GPR8_LO },
16770 [TEMPLATE_STORE16] = {
16772 [0] = { REG_UNSET, REGCM_GPR32 },
16773 [1] = { REG_UNSET, REGCM_GPR16 },
16776 [TEMPLATE_STORE32] = {
16778 [0] = { REG_UNSET, REGCM_GPR32 },
16779 [1] = { REG_UNSET, REGCM_GPR32 },
16782 [TEMPLATE_LOAD8] = {
16783 .lhs = { [0] = { REG_UNSET, REGCM_GPR8_LO } },
16784 .rhs = { [0] = { REG_UNSET, REGCM_GPR32 } },
16786 [TEMPLATE_LOAD16] = {
16787 .lhs = { [0] = { REG_UNSET, REGCM_GPR16 } },
16788 .rhs = { [0] = { REG_UNSET, REGCM_GPR32 } },
16790 [TEMPLATE_LOAD32] = {
16791 .lhs = { [0] = { REG_UNSET, REGCM_GPR32 } },
16792 .rhs = { [0] = { REG_UNSET, REGCM_GPR32 } },
16794 [TEMPLATE_BINARY8_REG] = {
16795 .lhs = { [0] = { REG_VIRT0, REGCM_GPR8_LO } },
16797 [0] = { REG_VIRT0, REGCM_GPR8_LO },
16798 [1] = { REG_UNSET, REGCM_GPR8_LO },
16801 [TEMPLATE_BINARY16_REG] = {
16802 .lhs = { [0] = { REG_VIRT0, REGCM_GPR16 } },
16804 [0] = { REG_VIRT0, REGCM_GPR16 },
16805 [1] = { REG_UNSET, REGCM_GPR16 },
16808 [TEMPLATE_BINARY32_REG] = {
16809 .lhs = { [0] = { REG_VIRT0, REGCM_GPR32 } },
16811 [0] = { REG_VIRT0, REGCM_GPR32 },
16812 [1] = { REG_UNSET, REGCM_GPR32 },
16815 [TEMPLATE_BINARY8_IMM] = {
16816 .lhs = { [0] = { REG_VIRT0, REGCM_GPR8_LO } },
16818 [0] = { REG_VIRT0, REGCM_GPR8_LO },
16819 [1] = { REG_UNNEEDED, REGCM_IMM8 },
16822 [TEMPLATE_BINARY16_IMM] = {
16823 .lhs = { [0] = { REG_VIRT0, REGCM_GPR16 } },
16825 [0] = { REG_VIRT0, REGCM_GPR16 },
16826 [1] = { REG_UNNEEDED, REGCM_IMM16 },
16829 [TEMPLATE_BINARY32_IMM] = {
16830 .lhs = { [0] = { REG_VIRT0, REGCM_GPR32 } },
16832 [0] = { REG_VIRT0, REGCM_GPR32 },
16833 [1] = { REG_UNNEEDED, REGCM_IMM32 },
16836 [TEMPLATE_SL8_CL] = {
16837 .lhs = { [0] = { REG_VIRT0, REGCM_GPR8_LO } },
16839 [0] = { REG_VIRT0, REGCM_GPR8_LO },
16840 [1] = { REG_CL, REGCM_GPR8_LO },
16843 [TEMPLATE_SL16_CL] = {
16844 .lhs = { [0] = { REG_VIRT0, REGCM_GPR16 } },
16846 [0] = { REG_VIRT0, REGCM_GPR16 },
16847 [1] = { REG_CL, REGCM_GPR8_LO },
16850 [TEMPLATE_SL32_CL] = {
16851 .lhs = { [0] = { REG_VIRT0, REGCM_GPR32 } },
16853 [0] = { REG_VIRT0, REGCM_GPR32 },
16854 [1] = { REG_CL, REGCM_GPR8_LO },
16857 [TEMPLATE_SL8_IMM] = {
16858 .lhs = { [0] = { REG_VIRT0, REGCM_GPR8_LO } },
16860 [0] = { REG_VIRT0, REGCM_GPR8_LO },
16861 [1] = { REG_UNNEEDED, REGCM_IMM8 },
16864 [TEMPLATE_SL16_IMM] = {
16865 .lhs = { [0] = { REG_VIRT0, REGCM_GPR16 } },
16867 [0] = { REG_VIRT0, REGCM_GPR16 },
16868 [1] = { REG_UNNEEDED, REGCM_IMM8 },
16871 [TEMPLATE_SL32_IMM] = {
16872 .lhs = { [0] = { REG_VIRT0, REGCM_GPR32 } },
16874 [0] = { REG_VIRT0, REGCM_GPR32 },
16875 [1] = { REG_UNNEEDED, REGCM_IMM8 },
16878 [TEMPLATE_UNARY8] = {
16879 .lhs = { [0] = { REG_VIRT0, REGCM_GPR8_LO } },
16880 .rhs = { [0] = { REG_VIRT0, REGCM_GPR8_LO } },
16882 [TEMPLATE_UNARY16] = {
16883 .lhs = { [0] = { REG_VIRT0, REGCM_GPR16 } },
16884 .rhs = { [0] = { REG_VIRT0, REGCM_GPR16 } },
16886 [TEMPLATE_UNARY32] = {
16887 .lhs = { [0] = { REG_VIRT0, REGCM_GPR32 } },
16888 .rhs = { [0] = { REG_VIRT0, REGCM_GPR32 } },
16890 [TEMPLATE_CMP8_REG] = {
16891 .lhs = { [0] = { REG_EFLAGS, REGCM_FLAGS } },
16893 [0] = { REG_UNSET, REGCM_GPR8_LO },
16894 [1] = { REG_UNSET, REGCM_GPR8_LO },
16897 [TEMPLATE_CMP16_REG] = {
16898 .lhs = { [0] = { REG_EFLAGS, REGCM_FLAGS } },
16900 [0] = { REG_UNSET, REGCM_GPR16 },
16901 [1] = { REG_UNSET, REGCM_GPR16 },
16904 [TEMPLATE_CMP32_REG] = {
16905 .lhs = { [0] = { REG_EFLAGS, REGCM_FLAGS } },
16907 [0] = { REG_UNSET, REGCM_GPR32 },
16908 [1] = { REG_UNSET, REGCM_GPR32 },
16911 [TEMPLATE_CMP8_IMM] = {
16912 .lhs = { [0] = { REG_EFLAGS, REGCM_FLAGS } },
16914 [0] = { REG_UNSET, REGCM_GPR8_LO },
16915 [1] = { REG_UNNEEDED, REGCM_IMM8 },
16918 [TEMPLATE_CMP16_IMM] = {
16919 .lhs = { [0] = { REG_EFLAGS, REGCM_FLAGS } },
16921 [0] = { REG_UNSET, REGCM_GPR16 },
16922 [1] = { REG_UNNEEDED, REGCM_IMM16 },
16925 [TEMPLATE_CMP32_IMM] = {
16926 .lhs = { [0] = { REG_EFLAGS, REGCM_FLAGS } },
16928 [0] = { REG_UNSET, REGCM_GPR32 },
16929 [1] = { REG_UNNEEDED, REGCM_IMM32 },
16932 [TEMPLATE_TEST8] = {
16933 .lhs = { [0] = { REG_EFLAGS, REGCM_FLAGS } },
16934 .rhs = { [0] = { REG_UNSET, REGCM_GPR8_LO } },
16936 [TEMPLATE_TEST16] = {
16937 .lhs = { [0] = { REG_EFLAGS, REGCM_FLAGS } },
16938 .rhs = { [0] = { REG_UNSET, REGCM_GPR16 } },
16940 [TEMPLATE_TEST32] = {
16941 .lhs = { [0] = { REG_EFLAGS, REGCM_FLAGS } },
16942 .rhs = { [0] = { REG_UNSET, REGCM_GPR32 } },
16945 .lhs = { [0] = { REG_UNSET, REGCM_GPR8_LO } },
16946 .rhs = { [0] = { REG_EFLAGS, REGCM_FLAGS } },
16949 .rhs = { [0] = { REG_EFLAGS, REGCM_FLAGS } },
16951 [TEMPLATE_INB_DX] = {
16952 .lhs = { [0] = { REG_AL, REGCM_GPR8_LO } },
16953 .rhs = { [0] = { REG_DX, REGCM_GPR16 } },
16955 [TEMPLATE_INB_IMM] = {
16956 .lhs = { [0] = { REG_AL, REGCM_GPR8_LO } },
16957 .rhs = { [0] = { REG_UNNEEDED, REGCM_IMM8 } },
16959 [TEMPLATE_INW_DX] = {
16960 .lhs = { [0] = { REG_AX, REGCM_GPR16 } },
16961 .rhs = { [0] = { REG_DX, REGCM_GPR16 } },
16963 [TEMPLATE_INW_IMM] = {
16964 .lhs = { [0] = { REG_AX, REGCM_GPR16 } },
16965 .rhs = { [0] = { REG_UNNEEDED, REGCM_IMM8 } },
16967 [TEMPLATE_INL_DX] = {
16968 .lhs = { [0] = { REG_EAX, REGCM_GPR32 } },
16969 .rhs = { [0] = { REG_DX, REGCM_GPR16 } },
16971 [TEMPLATE_INL_IMM] = {
16972 .lhs = { [0] = { REG_EAX, REGCM_GPR32 } },
16973 .rhs = { [0] = { REG_UNNEEDED, REGCM_IMM8 } },
16975 [TEMPLATE_OUTB_DX] = {
16977 [0] = { REG_AL, REGCM_GPR8_LO },
16978 [1] = { REG_DX, REGCM_GPR16 },
16981 [TEMPLATE_OUTB_IMM] = {
16983 [0] = { REG_AL, REGCM_GPR8_LO },
16984 [1] = { REG_UNNEEDED, REGCM_IMM8 },
16987 [TEMPLATE_OUTW_DX] = {
16989 [0] = { REG_AX, REGCM_GPR16 },
16990 [1] = { REG_DX, REGCM_GPR16 },
16993 [TEMPLATE_OUTW_IMM] = {
16995 [0] = { REG_AX, REGCM_GPR16 },
16996 [1] = { REG_UNNEEDED, REGCM_IMM8 },
16999 [TEMPLATE_OUTL_DX] = {
17001 [0] = { REG_EAX, REGCM_GPR32 },
17002 [1] = { REG_DX, REGCM_GPR16 },
17005 [TEMPLATE_OUTL_IMM] = {
17007 [0] = { REG_EAX, REGCM_GPR32 },
17008 [1] = { REG_UNNEEDED, REGCM_IMM8 },
17012 .lhs = { [0] = { REG_UNSET, REGCM_GPR32 } },
17013 .rhs = { [0] = { REG_UNSET, REGCM_GPR32 } },
17015 [TEMPLATE_RDMSR] = {
17017 [0] = { REG_EAX, REGCM_GPR32 },
17018 [1] = { REG_EDX, REGCM_GPR32 },
17020 .rhs = { [0] = { REG_ECX, REGCM_GPR32 } },
17022 [TEMPLATE_WRMSR] = {
17024 [0] = { REG_ECX, REGCM_GPR32 },
17025 [1] = { REG_EAX, REGCM_GPR32 },
17026 [2] = { REG_EDX, REGCM_GPR32 },
17029 [TEMPLATE_UMUL8] = {
17030 .lhs = { [0] = { REG_AX, REGCM_GPR16 } },
17032 [0] = { REG_AL, REGCM_GPR8_LO },
17033 [1] = { REG_UNSET, REGCM_GPR8_LO },
17036 [TEMPLATE_UMUL16] = {
17037 .lhs = { [0] = { REG_DXAX, REGCM_DIVIDEND32 } },
17039 [0] = { REG_AX, REGCM_GPR16 },
17040 [1] = { REG_UNSET, REGCM_GPR16 },
17043 [TEMPLATE_UMUL32] = {
17044 .lhs = { [0] = { REG_EDXEAX, REGCM_DIVIDEND64 } },
17046 [0] = { REG_EAX, REGCM_GPR32 },
17047 [1] = { REG_UNSET, REGCM_GPR32 },
17050 [TEMPLATE_DIV8] = {
17052 [0] = { REG_AL, REGCM_GPR8_LO },
17053 [1] = { REG_AH, REGCM_GPR8 },
17056 [0] = { REG_AX, REGCM_GPR16 },
17057 [1] = { REG_UNSET, REGCM_GPR8_LO },
17060 [TEMPLATE_DIV16] = {
17062 [0] = { REG_AX, REGCM_GPR16 },
17063 [1] = { REG_DX, REGCM_GPR16 },
17066 [0] = { REG_DXAX, REGCM_DIVIDEND32 },
17067 [1] = { REG_UNSET, REGCM_GPR16 },
17070 [TEMPLATE_DIV32] = {
17072 [0] = { REG_EAX, REGCM_GPR32 },
17073 [1] = { REG_EDX, REGCM_GPR32 },
17076 [0] = { REG_EDXEAX, REGCM_DIVIDEND64 },
17077 [1] = { REG_UNSET, REGCM_GPR32 },
17082 static void fixup_branch(struct compile_state *state,
17083 struct triple *branch, int jmp_op, int cmp_op, struct type *cmp_type,
17084 struct triple *left, struct triple *right)
17086 struct triple *test;
17088 internal_error(state, branch, "no branch test?");
17090 test = pre_triple(state, branch,
17091 cmp_op, cmp_type, left, right);
17092 test->template_id = TEMPLATE_TEST32;
17093 if (cmp_op == OP_CMP) {
17094 test->template_id = TEMPLATE_CMP32_REG;
17095 if (get_imm32(test, &RHS(test, 1))) {
17096 test->template_id = TEMPLATE_CMP32_IMM;
17099 use_triple(RHS(test, 0), test);
17100 use_triple(RHS(test, 1), test);
17101 unuse_triple(RHS(branch, 0), branch);
17102 RHS(branch, 0) = test;
17103 branch->op = jmp_op;
17104 branch->template_id = TEMPLATE_JMP;
17105 use_triple(RHS(branch, 0), branch);
17108 static void fixup_branches(struct compile_state *state,
17109 struct triple *cmp, struct triple *use, int jmp_op)
17111 struct triple_set *entry, *next;
17112 for(entry = use->use; entry; entry = next) {
17113 next = entry->next;
17114 if (entry->member->op == OP_COPY) {
17115 fixup_branches(state, cmp, entry->member, jmp_op);
17117 else if (entry->member->op == OP_BRANCH) {
17118 struct triple *branch;
17119 struct triple *left, *right;
17121 left = RHS(cmp, 0);
17122 if (TRIPLE_RHS(cmp->sizes) > 1) {
17123 right = RHS(cmp, 1);
17125 branch = entry->member;
17126 fixup_branch(state, branch, jmp_op,
17127 cmp->op, cmp->type, left, right);
17132 static void bool_cmp(struct compile_state *state,
17133 struct triple *ins, int cmp_op, int jmp_op, int set_op)
17135 struct triple_set *entry, *next;
17136 struct triple *set;
17138 /* Put a barrier up before the cmp which preceeds the
17139 * copy instruction. If a set actually occurs this gives
17140 * us a chance to move variables in registers out of the way.
17143 /* Modify the comparison operator */
17145 ins->template_id = TEMPLATE_TEST32;
17146 if (cmp_op == OP_CMP) {
17147 ins->template_id = TEMPLATE_CMP32_REG;
17148 if (get_imm32(ins, &RHS(ins, 1))) {
17149 ins->template_id = TEMPLATE_CMP32_IMM;
17152 /* Generate the instruction sequence that will transform the
17153 * result of the comparison into a logical value.
17155 set = post_triple(state, ins, set_op, &char_type, ins, 0);
17156 use_triple(ins, set);
17157 set->template_id = TEMPLATE_SET;
17159 for(entry = ins->use; entry; entry = next) {
17160 next = entry->next;
17161 if (entry->member == set) {
17164 replace_rhs_use(state, ins, set, entry->member);
17166 fixup_branches(state, ins, set, jmp_op);
17169 static struct triple *after_lhs(struct compile_state *state, struct triple *ins)
17171 struct triple *next;
17173 lhs = TRIPLE_LHS(ins->sizes);
17174 for(next = ins->next, i = 0; i < lhs; i++, next = next->next) {
17175 if (next != LHS(ins, i)) {
17176 internal_error(state, ins, "malformed lhs on %s",
17179 if (next->op != OP_PIECE) {
17180 internal_error(state, ins, "bad lhs op %s at %d on %s",
17181 tops(next->op), i, tops(ins->op));
17183 if (next->u.cval != i) {
17184 internal_error(state, ins, "bad u.cval of %d %d expected",
17191 struct reg_info arch_reg_lhs(struct compile_state *state, struct triple *ins, int index)
17193 struct ins_template *template;
17194 struct reg_info result;
17196 if (ins->op == OP_PIECE) {
17197 index = ins->u.cval;
17198 ins = MISC(ins, 0);
17200 zlhs = TRIPLE_LHS(ins->sizes);
17201 if (triple_is_def(state, ins)) {
17204 if (index >= zlhs) {
17205 internal_error(state, ins, "index %d out of range for %s\n",
17206 index, tops(ins->op));
17210 template = &ins->u.ainfo->tmpl;
17213 if (ins->template_id > LAST_TEMPLATE) {
17214 internal_error(state, ins, "bad template number %d",
17217 template = &templates[ins->template_id];
17220 result = template->lhs[index];
17221 result.regcm = arch_regcm_normalize(state, result.regcm);
17222 if (result.reg != REG_UNNEEDED) {
17223 result.regcm &= ~(REGCM_IMM32 | REGCM_IMM16 | REGCM_IMM8);
17225 if (result.regcm == 0) {
17226 internal_error(state, ins, "lhs %d regcm == 0", index);
17231 struct reg_info arch_reg_rhs(struct compile_state *state, struct triple *ins, int index)
17233 struct reg_info result;
17234 struct ins_template *template;
17235 if ((index > TRIPLE_RHS(ins->sizes)) ||
17236 (ins->op == OP_PIECE)) {
17237 internal_error(state, ins, "index %d out of range for %s\n",
17238 index, tops(ins->op));
17242 template = &ins->u.ainfo->tmpl;
17245 if (ins->template_id > LAST_TEMPLATE) {
17246 internal_error(state, ins, "bad template number %d",
17249 template = &templates[ins->template_id];
17252 result = template->rhs[index];
17253 result.regcm = arch_regcm_normalize(state, result.regcm);
17254 if (result.regcm == 0) {
17255 internal_error(state, ins, "rhs %d regcm == 0", index);
17260 static struct triple *mod_div(struct compile_state *state,
17261 struct triple *ins, int div_op, int index)
17263 struct triple *div, *piece0, *piece1;
17265 /* Generate a piece to hold the remainder */
17266 piece1 = post_triple(state, ins, OP_PIECE, ins->type, 0, 0);
17267 piece1->u.cval = 1;
17269 /* Generate a piece to hold the quotient */
17270 piece0 = post_triple(state, ins, OP_PIECE, ins->type, 0, 0);
17271 piece0->u.cval = 0;
17273 /* Generate the appropriate division instruction */
17274 div = post_triple(state, ins, div_op, ins->type, 0, 0);
17275 RHS(div, 0) = RHS(ins, 0);
17276 RHS(div, 1) = RHS(ins, 1);
17277 LHS(div, 0) = piece0;
17278 LHS(div, 1) = piece1;
17279 div->template_id = TEMPLATE_DIV32;
17280 use_triple(RHS(div, 0), div);
17281 use_triple(RHS(div, 1), div);
17282 use_triple(LHS(div, 0), div);
17283 use_triple(LHS(div, 1), div);
17285 /* Hook on piece0 */
17286 MISC(piece0, 0) = div;
17287 use_triple(div, piece0);
17289 /* Hook on piece1 */
17290 MISC(piece1, 0) = div;
17291 use_triple(div, piece1);
17293 /* Replate uses of ins with the appropriate piece of the div */
17294 propogate_use(state, ins, LHS(div, index));
17295 release_triple(state, ins);
17297 /* Return the address of the next instruction */
17298 return piece1->next;
17301 static struct triple *transform_to_arch_instruction(
17302 struct compile_state *state, struct triple *ins)
17304 /* Transform from generic 3 address instructions
17305 * to archtecture specific instructions.
17306 * And apply architecture specific constraints to instructions.
17307 * Copies are inserted to preserve the register flexibility
17308 * of 3 address instructions.
17310 struct triple *next;
17315 ins->template_id = TEMPLATE_INTCONST32;
17316 if (ins->u.cval < 256) {
17317 ins->template_id = TEMPLATE_INTCONST8;
17321 ins->template_id = TEMPLATE_INTCONST32;
17327 ins->template_id = TEMPLATE_NOP;
17330 size = size_of(state, ins->type);
17331 if (is_imm8(RHS(ins, 0)) && (size <= 1)) {
17332 ins->template_id = TEMPLATE_COPY_IMM8;
17334 else if (is_imm16(RHS(ins, 0)) && (size <= 2)) {
17335 ins->template_id = TEMPLATE_COPY_IMM16;
17337 else if (is_imm32(RHS(ins, 0)) && (size <= 4)) {
17338 ins->template_id = TEMPLATE_COPY_IMM32;
17340 else if (is_const(RHS(ins, 0))) {
17341 internal_error(state, ins, "bad constant passed to copy");
17343 else if (size <= 1) {
17344 ins->template_id = TEMPLATE_COPY8_REG;
17346 else if (size <= 2) {
17347 ins->template_id = TEMPLATE_COPY16_REG;
17349 else if (size <= 4) {
17350 ins->template_id = TEMPLATE_COPY32_REG;
17353 internal_error(state, ins, "bad type passed to copy");
17357 size = size_of(state, ins->type);
17359 ins->template_id = TEMPLATE_PHI8;
17361 else if (size <= 2) {
17362 ins->template_id = TEMPLATE_PHI16;
17364 else if (size <= 4) {
17365 ins->template_id = TEMPLATE_PHI32;
17368 internal_error(state, ins, "bad type passed to phi");
17372 switch(ins->type->type & TYPE_MASK) {
17373 case TYPE_CHAR: case TYPE_UCHAR:
17374 ins->template_id = TEMPLATE_STORE8;
17376 case TYPE_SHORT: case TYPE_USHORT:
17377 ins->template_id = TEMPLATE_STORE16;
17379 case TYPE_INT: case TYPE_UINT:
17380 case TYPE_LONG: case TYPE_ULONG:
17382 ins->template_id = TEMPLATE_STORE32;
17385 internal_error(state, ins, "unknown type in store");
17390 switch(ins->type->type & TYPE_MASK) {
17391 case TYPE_CHAR: case TYPE_UCHAR:
17392 ins->template_id = TEMPLATE_LOAD8;
17396 ins->template_id = TEMPLATE_LOAD16;
17403 ins->template_id = TEMPLATE_LOAD32;
17406 internal_error(state, ins, "unknown type in load");
17416 ins->template_id = TEMPLATE_BINARY32_REG;
17417 if (get_imm32(ins, &RHS(ins, 1))) {
17418 ins->template_id = TEMPLATE_BINARY32_IMM;
17423 ins->template_id = TEMPLATE_DIV32;
17424 next = after_lhs(state, ins);
17426 /* FIXME UMUL does not work yet.. */
17428 ins->template_id = TEMPLATE_UMUL32;
17431 next = mod_div(state, ins, OP_UDIVT, 0);
17434 next = mod_div(state, ins, OP_SDIVT, 0);
17437 next = mod_div(state, ins, OP_UDIVT, 1);
17440 next = mod_div(state, ins, OP_SDIVT, 1);
17445 ins->template_id = TEMPLATE_SL32_CL;
17446 if (get_imm8(ins, &RHS(ins, 1))) {
17447 ins->template_id = TEMPLATE_SL32_IMM;
17448 } else if (size_of(state, RHS(ins, 1)->type) > 1) {
17449 typed_pre_copy(state, &char_type, ins, 1);
17454 ins->template_id = TEMPLATE_UNARY32;
17457 bool_cmp(state, ins, OP_CMP, OP_JMP_EQ, OP_SET_EQ);
17460 bool_cmp(state, ins, OP_CMP, OP_JMP_NOTEQ, OP_SET_NOTEQ);
17463 bool_cmp(state, ins, OP_CMP, OP_JMP_SLESS, OP_SET_SLESS);
17466 bool_cmp(state, ins, OP_CMP, OP_JMP_ULESS, OP_SET_ULESS);
17469 bool_cmp(state, ins, OP_CMP, OP_JMP_SMORE, OP_SET_SMORE);
17472 bool_cmp(state, ins, OP_CMP, OP_JMP_UMORE, OP_SET_UMORE);
17475 bool_cmp(state, ins, OP_CMP, OP_JMP_SLESSEQ, OP_SET_SLESSEQ);
17478 bool_cmp(state, ins, OP_CMP, OP_JMP_ULESSEQ, OP_SET_ULESSEQ);
17481 bool_cmp(state, ins, OP_CMP, OP_JMP_SMOREEQ, OP_SET_SMOREEQ);
17484 bool_cmp(state, ins, OP_CMP, OP_JMP_UMOREEQ, OP_SET_UMOREEQ);
17487 bool_cmp(state, ins, OP_TEST, OP_JMP_NOTEQ, OP_SET_NOTEQ);
17490 bool_cmp(state, ins, OP_TEST, OP_JMP_EQ, OP_SET_EQ);
17493 if (TRIPLE_RHS(ins->sizes) > 0) {
17494 struct triple *left = RHS(ins, 0);
17495 fixup_branch(state, ins, OP_JMP_NOTEQ, OP_TEST,
17496 left->type, left, 0);
17500 ins->template_id = TEMPLATE_NOP;
17507 case OP_INB: ins->template_id = TEMPLATE_INB_DX; break;
17508 case OP_INW: ins->template_id = TEMPLATE_INW_DX; break;
17509 case OP_INL: ins->template_id = TEMPLATE_INL_DX; break;
17511 if (get_imm8(ins, &RHS(ins, 0))) {
17512 ins->template_id += 1;
17519 case OP_OUTB: ins->template_id = TEMPLATE_OUTB_DX; break;
17520 case OP_OUTW: ins->template_id = TEMPLATE_OUTW_DX; break;
17521 case OP_OUTL: ins->template_id = TEMPLATE_OUTL_DX; break;
17523 if (get_imm8(ins, &RHS(ins, 1))) {
17524 ins->template_id += 1;
17529 ins->template_id = TEMPLATE_BSF;
17532 ins->template_id = TEMPLATE_RDMSR;
17533 next = after_lhs(state, ins);
17536 ins->template_id = TEMPLATE_WRMSR;
17539 ins->template_id = TEMPLATE_NOP;
17542 ins->template_id = TEMPLATE_NOP;
17543 next = after_lhs(state, ins);
17545 /* Already transformed instructions */
17547 ins->template_id = TEMPLATE_TEST32;
17550 ins->template_id = TEMPLATE_CMP32_REG;
17551 if (get_imm32(ins, &RHS(ins, 1))) {
17552 ins->template_id = TEMPLATE_CMP32_IMM;
17556 ins->template_id = TEMPLATE_NOP;
17558 case OP_JMP_EQ: case OP_JMP_NOTEQ:
17559 case OP_JMP_SLESS: case OP_JMP_ULESS:
17560 case OP_JMP_SMORE: case OP_JMP_UMORE:
17561 case OP_JMP_SLESSEQ: case OP_JMP_ULESSEQ:
17562 case OP_JMP_SMOREEQ: case OP_JMP_UMOREEQ:
17563 ins->template_id = TEMPLATE_JMP;
17565 case OP_SET_EQ: case OP_SET_NOTEQ:
17566 case OP_SET_SLESS: case OP_SET_ULESS:
17567 case OP_SET_SMORE: case OP_SET_UMORE:
17568 case OP_SET_SLESSEQ: case OP_SET_ULESSEQ:
17569 case OP_SET_SMOREEQ: case OP_SET_UMOREEQ:
17570 ins->template_id = TEMPLATE_SET;
17572 /* Unhandled instructions */
17575 internal_error(state, ins, "unhandled ins: %d %s\n",
17576 ins->op, tops(ins->op));
17582 static long next_label(struct compile_state *state)
17584 static long label_counter = 0;
17585 return ++label_counter;
17587 static void generate_local_labels(struct compile_state *state)
17589 struct triple *first, *label;
17590 first = state->first;
17593 if ((label->op == OP_LABEL) ||
17594 (label->op == OP_SDECL)) {
17596 label->u.cval = next_label(state);
17602 label = label->next;
17603 } while(label != first);
17606 static int check_reg(struct compile_state *state,
17607 struct triple *triple, int classes)
17611 reg = ID_REG(triple->id);
17612 if (reg == REG_UNSET) {
17613 internal_error(state, triple, "register not set");
17615 mask = arch_reg_regcm(state, reg);
17616 if (!(classes & mask)) {
17617 internal_error(state, triple, "reg %d in wrong class",
17623 static const char *arch_reg_str(int reg)
17626 #error "Registers have renumberd fix arch_reg_str"
17628 static const char *regs[] = {
17632 "%al", "%bl", "%cl", "%dl", "%ah", "%bh", "%ch", "%dh",
17633 "%ax", "%bx", "%cx", "%dx", "%si", "%di", "%bp", "%sp",
17634 "%eax", "%ebx", "%ecx", "%edx", "%esi", "%edi", "%ebp", "%esp",
17637 "%mm0", "%mm1", "%mm2", "%mm3", "%mm4", "%mm5", "%mm6", "%mm7",
17638 "%xmm0", "%xmm1", "%xmm2", "%xmm3",
17639 "%xmm4", "%xmm5", "%xmm6", "%xmm7",
17641 if (!((reg >= REG_EFLAGS) && (reg <= REG_XMM7))) {
17648 static const char *reg(struct compile_state *state, struct triple *triple,
17652 reg = check_reg(state, triple, classes);
17653 return arch_reg_str(reg);
17656 const char *type_suffix(struct compile_state *state, struct type *type)
17658 const char *suffix;
17659 switch(size_of(state, type)) {
17660 case 1: suffix = "b"; break;
17661 case 2: suffix = "w"; break;
17662 case 4: suffix = "l"; break;
17664 internal_error(state, 0, "unknown suffix");
17671 static void print_const_val(
17672 struct compile_state *state, struct triple *ins, FILE *fp)
17676 fprintf(fp, " $%ld ",
17677 (long)(ins->u.cval));
17680 if (MISC(ins, 0)->op != OP_SDECL) {
17681 internal_error(state, ins, "bad base for addrconst");
17683 if (MISC(ins, 0)->u.cval <= 0) {
17684 internal_error(state, ins, "unlabeled constant");
17686 fprintf(fp, " $L%s%lu+%lu ",
17687 state->label_prefix,
17688 (unsigned long)(MISC(ins, 0)->u.cval),
17689 (unsigned long)(ins->u.cval));
17692 internal_error(state, ins, "unknown constant type");
17697 static void print_const(struct compile_state *state,
17698 struct triple *ins, FILE *fp)
17702 switch(ins->type->type & TYPE_MASK) {
17705 fprintf(fp, ".byte 0x%02lx\n",
17706 (unsigned long)(ins->u.cval));
17710 fprintf(fp, ".short 0x%04lx\n",
17711 (unsigned long)(ins->u.cval));
17717 fprintf(fp, ".int %lu\n",
17718 (unsigned long)(ins->u.cval));
17721 internal_error(state, ins, "Unknown constant type");
17725 if (MISC(ins, 0)->op != OP_SDECL) {
17726 internal_error(state, ins, "bad base for addrconst");
17728 if (MISC(ins, 0)->u.cval <= 0) {
17729 internal_error(state, ins, "unlabeled constant");
17731 fprintf(fp, ".int L%s%lu+%lu\n",
17732 state->label_prefix,
17733 (unsigned long)(MISC(ins, 0)->u.cval),
17734 (unsigned long)(ins->u.cval));
17738 unsigned char *blob;
17740 size = size_of(state, ins->type);
17741 blob = ins->u.blob;
17742 for(i = 0; i < size; i++) {
17743 fprintf(fp, ".byte 0x%02x\n",
17749 internal_error(state, ins, "Unknown constant type");
17754 #define TEXT_SECTION ".rom.text"
17755 #define DATA_SECTION ".rom.data"
17757 static long get_const_pool_ref(
17758 struct compile_state *state, struct triple *ins, FILE *fp)
17761 ref = next_label(state);
17762 fprintf(fp, ".section \"" DATA_SECTION "\"\n");
17763 fprintf(fp, ".balign %d\n", align_of(state, ins->type));
17764 fprintf(fp, "L%s%lu:\n", state->label_prefix, ref);
17765 print_const(state, ins, fp);
17766 fprintf(fp, ".section \"" TEXT_SECTION "\"\n");
17770 static void print_binary_op(struct compile_state *state,
17771 const char *op, struct triple *ins, FILE *fp)
17774 mask = REGCM_GPR32 | REGCM_GPR16 | REGCM_GPR8_LO;
17775 if (ID_REG(RHS(ins, 0)->id) != ID_REG(ins->id)) {
17776 internal_error(state, ins, "invalid register assignment");
17778 if (is_const(RHS(ins, 1))) {
17779 fprintf(fp, "\t%s ", op);
17780 print_const_val(state, RHS(ins, 1), fp);
17781 fprintf(fp, ", %s\n",
17782 reg(state, RHS(ins, 0), mask));
17785 unsigned lmask, rmask;
17787 lreg = check_reg(state, RHS(ins, 0), mask);
17788 rreg = check_reg(state, RHS(ins, 1), mask);
17789 lmask = arch_reg_regcm(state, lreg);
17790 rmask = arch_reg_regcm(state, rreg);
17791 mask = lmask & rmask;
17792 fprintf(fp, "\t%s %s, %s\n",
17794 reg(state, RHS(ins, 1), mask),
17795 reg(state, RHS(ins, 0), mask));
17798 static void print_unary_op(struct compile_state *state,
17799 const char *op, struct triple *ins, FILE *fp)
17802 mask = REGCM_GPR32 | REGCM_GPR16 | REGCM_GPR8_LO;
17803 fprintf(fp, "\t%s %s\n",
17805 reg(state, RHS(ins, 0), mask));
17808 static void print_op_shift(struct compile_state *state,
17809 const char *op, struct triple *ins, FILE *fp)
17812 mask = REGCM_GPR32 | REGCM_GPR16 | REGCM_GPR8_LO;
17813 if (ID_REG(RHS(ins, 0)->id) != ID_REG(ins->id)) {
17814 internal_error(state, ins, "invalid register assignment");
17816 if (is_const(RHS(ins, 1))) {
17817 fprintf(fp, "\t%s ", op);
17818 print_const_val(state, RHS(ins, 1), fp);
17819 fprintf(fp, ", %s\n",
17820 reg(state, RHS(ins, 0), mask));
17823 fprintf(fp, "\t%s %s, %s\n",
17825 reg(state, RHS(ins, 1), REGCM_GPR8_LO),
17826 reg(state, RHS(ins, 0), mask));
17830 static void print_op_in(struct compile_state *state, struct triple *ins, FILE *fp)
17837 case OP_INB: op = "inb", mask = REGCM_GPR8_LO; break;
17838 case OP_INW: op = "inw", mask = REGCM_GPR16; break;
17839 case OP_INL: op = "inl", mask = REGCM_GPR32; break;
17841 internal_error(state, ins, "not an in operation");
17845 dreg = check_reg(state, ins, mask);
17846 if (!reg_is_reg(state, dreg, REG_EAX)) {
17847 internal_error(state, ins, "dst != %%eax");
17849 if (is_const(RHS(ins, 0))) {
17850 fprintf(fp, "\t%s ", op);
17851 print_const_val(state, RHS(ins, 0), fp);
17852 fprintf(fp, ", %s\n",
17853 reg(state, ins, mask));
17857 addr_reg = check_reg(state, RHS(ins, 0), REGCM_GPR16);
17858 if (!reg_is_reg(state, addr_reg, REG_DX)) {
17859 internal_error(state, ins, "src != %%dx");
17861 fprintf(fp, "\t%s %s, %s\n",
17863 reg(state, RHS(ins, 0), REGCM_GPR16),
17864 reg(state, ins, mask));
17868 static void print_op_out(struct compile_state *state, struct triple *ins, FILE *fp)
17875 case OP_OUTB: op = "outb", mask = REGCM_GPR8_LO; break;
17876 case OP_OUTW: op = "outw", mask = REGCM_GPR16; break;
17877 case OP_OUTL: op = "outl", mask = REGCM_GPR32; break;
17879 internal_error(state, ins, "not an out operation");
17883 lreg = check_reg(state, RHS(ins, 0), mask);
17884 if (!reg_is_reg(state, lreg, REG_EAX)) {
17885 internal_error(state, ins, "src != %%eax");
17887 if (is_const(RHS(ins, 1))) {
17888 fprintf(fp, "\t%s %s,",
17889 op, reg(state, RHS(ins, 0), mask));
17890 print_const_val(state, RHS(ins, 1), fp);
17895 addr_reg = check_reg(state, RHS(ins, 1), REGCM_GPR16);
17896 if (!reg_is_reg(state, addr_reg, REG_DX)) {
17897 internal_error(state, ins, "dst != %%dx");
17899 fprintf(fp, "\t%s %s, %s\n",
17901 reg(state, RHS(ins, 0), mask),
17902 reg(state, RHS(ins, 1), REGCM_GPR16));
17906 static void print_op_move(struct compile_state *state,
17907 struct triple *ins, FILE *fp)
17909 /* op_move is complex because there are many types
17910 * of registers we can move between.
17911 * Because OP_COPY will be introduced in arbitrary locations
17912 * OP_COPY must not affect flags.
17914 int omit_copy = 1; /* Is it o.k. to omit a noop copy? */
17915 struct triple *dst, *src;
17916 if (ins->op == OP_COPY) {
17921 internal_error(state, ins, "unknown move operation");
17924 if (!is_const(src)) {
17925 int src_reg, dst_reg;
17926 int src_regcm, dst_regcm;
17927 src_reg = ID_REG(src->id);
17928 dst_reg = ID_REG(dst->id);
17929 src_regcm = arch_reg_regcm(state, src_reg);
17930 dst_regcm = arch_reg_regcm(state, dst_reg);
17931 /* If the class is the same just move the register */
17932 if (src_regcm & dst_regcm &
17933 (REGCM_GPR8_LO | REGCM_GPR16 | REGCM_GPR32)) {
17934 if ((src_reg != dst_reg) || !omit_copy) {
17935 fprintf(fp, "\tmov %s, %s\n",
17936 reg(state, src, src_regcm),
17937 reg(state, dst, dst_regcm));
17940 /* Move 32bit to 16bit */
17941 else if ((src_regcm & REGCM_GPR32) &&
17942 (dst_regcm & REGCM_GPR16)) {
17943 src_reg = (src_reg - REGC_GPR32_FIRST) + REGC_GPR16_FIRST;
17944 if ((src_reg != dst_reg) || !omit_copy) {
17945 fprintf(fp, "\tmovw %s, %s\n",
17946 arch_reg_str(src_reg),
17947 arch_reg_str(dst_reg));
17950 /* Move from 32bit gprs to 16bit gprs */
17951 else if ((src_regcm & REGCM_GPR32) &&
17952 (dst_regcm & REGCM_GPR16)) {
17953 dst_reg = (dst_reg - REGC_GPR16_FIRST) + REGC_GPR32_FIRST;
17954 if ((src_reg != dst_reg) || !omit_copy) {
17955 fprintf(fp, "\tmov %s, %s\n",
17956 arch_reg_str(src_reg),
17957 arch_reg_str(dst_reg));
17960 /* Move 32bit to 8bit */
17961 else if ((src_regcm & REGCM_GPR32_8) &&
17962 (dst_regcm & REGCM_GPR8_LO))
17964 src_reg = (src_reg - REGC_GPR32_8_FIRST) + REGC_GPR8_FIRST;
17965 if ((src_reg != dst_reg) || !omit_copy) {
17966 fprintf(fp, "\tmovb %s, %s\n",
17967 arch_reg_str(src_reg),
17968 arch_reg_str(dst_reg));
17971 /* Move 16bit to 8bit */
17972 else if ((src_regcm & REGCM_GPR16_8) &&
17973 (dst_regcm & REGCM_GPR8_LO))
17975 src_reg = (src_reg - REGC_GPR16_8_FIRST) + REGC_GPR8_FIRST;
17976 if ((src_reg != dst_reg) || !omit_copy) {
17977 fprintf(fp, "\tmovb %s, %s\n",
17978 arch_reg_str(src_reg),
17979 arch_reg_str(dst_reg));
17982 /* Move 8/16bit to 16/32bit */
17983 else if ((src_regcm & (REGCM_GPR8_LO | REGCM_GPR16)) &&
17984 (dst_regcm & (REGCM_GPR16 | REGCM_GPR32))) {
17986 op = is_signed(src->type)? "movsx": "movzx";
17987 fprintf(fp, "\t%s %s, %s\n",
17989 reg(state, src, src_regcm),
17990 reg(state, dst, dst_regcm));
17992 /* Move between sse registers */
17993 else if ((src_regcm & dst_regcm & REGCM_XMM)) {
17994 if ((src_reg != dst_reg) || !omit_copy) {
17995 fprintf(fp, "\tmovdqa %s, %s\n",
17996 reg(state, src, src_regcm),
17997 reg(state, dst, dst_regcm));
18000 /* Move between mmx registers */
18001 else if ((src_regcm & dst_regcm & REGCM_MMX)) {
18002 if ((src_reg != dst_reg) || !omit_copy) {
18003 fprintf(fp, "\tmovq %s, %s\n",
18004 reg(state, src, src_regcm),
18005 reg(state, dst, dst_regcm));
18008 /* Move from sse to mmx registers */
18009 else if ((src_regcm & REGCM_XMM) && (dst_regcm & REGCM_MMX)) {
18010 fprintf(fp, "\tmovdq2q %s, %s\n",
18011 reg(state, src, src_regcm),
18012 reg(state, dst, dst_regcm));
18014 /* Move from mmx to sse registers */
18015 else if ((src_regcm & REGCM_MMX) && (dst_regcm & REGCM_XMM)) {
18016 fprintf(fp, "\tmovq2dq %s, %s\n",
18017 reg(state, src, src_regcm),
18018 reg(state, dst, dst_regcm));
18020 /* Move between 32bit gprs & mmx/sse registers */
18021 else if ((src_regcm & (REGCM_GPR32 | REGCM_MMX | REGCM_XMM)) &&
18022 (dst_regcm & (REGCM_GPR32 | REGCM_MMX | REGCM_XMM))) {
18023 fprintf(fp, "\tmovd %s, %s\n",
18024 reg(state, src, src_regcm),
18025 reg(state, dst, dst_regcm));
18027 /* Move from 16bit gprs & mmx/sse registers */
18028 else if ((src_regcm & REGCM_GPR16) &&
18029 (dst_regcm & (REGCM_MMX | REGCM_XMM))) {
18032 op = is_signed(src->type)? "movsx":"movzx";
18033 mid_reg = (src_reg - REGC_GPR16_FIRST) + REGC_GPR32_FIRST;
18034 fprintf(fp, "\t%s %s, %s\n\tmovd %s, %s\n",
18036 arch_reg_str(src_reg),
18037 arch_reg_str(mid_reg),
18038 arch_reg_str(mid_reg),
18039 arch_reg_str(dst_reg));
18041 /* Move from mmx/sse registers to 16bit gprs */
18042 else if ((src_regcm & (REGCM_MMX | REGCM_XMM)) &&
18043 (dst_regcm & REGCM_GPR16)) {
18044 dst_reg = (dst_reg - REGC_GPR16_FIRST) + REGC_GPR32_FIRST;
18045 fprintf(fp, "\tmovd %s, %s\n",
18046 arch_reg_str(src_reg),
18047 arch_reg_str(dst_reg));
18049 /* Move from gpr to 64bit dividend */
18050 else if ((src_regcm & (REGCM_GPR32 | REGCM_GPR16 | REGCM_GPR8_LO)) &&
18051 (dst_regcm & REGCM_DIVIDEND64)) {
18052 const char *extend;
18053 extend = is_signed(src->type)? "cltd":"movl $0, %edx";
18054 fprintf(fp, "\tmov %s, %%eax\n\t%s\n",
18055 arch_reg_str(src_reg),
18058 /* Move from 64bit gpr to gpr */
18059 else if ((src_regcm & REGCM_DIVIDEND64) &&
18060 (dst_regcm & (REGCM_GPR32 | REGCM_GPR16 | REGCM_GPR8_LO))) {
18061 if (dst_regcm & REGCM_GPR32) {
18064 else if (dst_regcm & REGCM_GPR16) {
18067 else if (dst_regcm & REGCM_GPR8_LO) {
18070 fprintf(fp, "\tmov %s, %s\n",
18071 arch_reg_str(src_reg),
18072 arch_reg_str(dst_reg));
18074 /* Move from mmx/sse registers to 64bit gpr */
18075 else if ((src_regcm & (REGCM_MMX | REGCM_XMM)) &&
18076 (dst_regcm & REGCM_DIVIDEND64)) {
18077 const char *extend;
18078 extend = is_signed(src->type)? "cltd": "movl $0, %edx";
18079 fprintf(fp, "\tmovd %s, %%eax\n\t%s\n",
18080 arch_reg_str(src_reg),
18083 /* Move from 64bit gpr to mmx/sse register */
18084 else if ((src_regcm & REGCM_DIVIDEND64) &&
18085 (dst_regcm & (REGCM_XMM | REGCM_MMX))) {
18086 fprintf(fp, "\tmovd %%eax, %s\n",
18087 arch_reg_str(dst_reg));
18089 #if X86_4_8BIT_GPRS
18090 /* Move from 8bit gprs to mmx/sse registers */
18091 else if ((src_regcm & REGCM_GPR8_LO) && (src_reg <= REG_DL) &&
18092 (dst_regcm & (REGCM_MMX | REGCM_XMM))) {
18095 op = is_signed(src->type)? "movsx":"movzx";
18096 mid_reg = (src_reg - REGC_GPR8_FIRST) + REGC_GPR32_FIRST;
18097 fprintf(fp, "\t%s %s, %s\n\tmovd %s, %s\n",
18099 reg(state, src, src_regcm),
18100 arch_reg_str(mid_reg),
18101 arch_reg_str(mid_reg),
18102 reg(state, dst, dst_regcm));
18104 /* Move from mmx/sse registers and 8bit gprs */
18105 else if ((src_regcm & (REGCM_MMX | REGCM_XMM)) &&
18106 (dst_regcm & REGCM_GPR8_LO) && (dst_reg <= REG_DL)) {
18108 mid_reg = (dst_reg - REGC_GPR8_FIRST) + REGC_GPR32_FIRST;
18109 fprintf(fp, "\tmovd %s, %s\n",
18110 reg(state, src, src_regcm),
18111 arch_reg_str(mid_reg));
18113 /* Move from 32bit gprs to 8bit gprs */
18114 else if ((src_regcm & REGCM_GPR32) &&
18115 (dst_regcm & REGCM_GPR8_LO)) {
18116 dst_reg = (dst_reg - REGC_GPR8_FIRST) + REGC_GPR32_FIRST;
18117 if ((src_reg != dst_reg) || !omit_copy) {
18118 fprintf(fp, "\tmov %s, %s\n",
18119 arch_reg_str(src_reg),
18120 arch_reg_str(dst_reg));
18123 /* Move from 16bit gprs to 8bit gprs */
18124 else if ((src_regcm & REGCM_GPR16) &&
18125 (dst_regcm & REGCM_GPR8_LO)) {
18126 dst_reg = (dst_reg - REGC_GPR8_FIRST) + REGC_GPR16_FIRST;
18127 if ((src_reg != dst_reg) || !omit_copy) {
18128 fprintf(fp, "\tmov %s, %s\n",
18129 arch_reg_str(src_reg),
18130 arch_reg_str(dst_reg));
18133 #endif /* X86_4_8BIT_GPRS */
18135 internal_error(state, ins, "unknown copy type");
18141 dst_reg = ID_REG(dst->id);
18142 dst_regcm = arch_reg_regcm(state, dst_reg);
18143 if (dst_regcm & (REGCM_GPR32 | REGCM_GPR16 | REGCM_GPR8_LO)) {
18144 fprintf(fp, "\tmov ");
18145 print_const_val(state, src, fp);
18146 fprintf(fp, ", %s\n",
18147 reg(state, dst, REGCM_GPR32 | REGCM_GPR16 | REGCM_GPR8_LO));
18149 else if (dst_regcm & REGCM_DIVIDEND64) {
18150 if (size_of(state, dst->type) > 4) {
18151 internal_error(state, ins, "64bit constant...");
18153 fprintf(fp, "\tmov $0, %%edx\n");
18154 fprintf(fp, "\tmov ");
18155 print_const_val(state, src, fp);
18156 fprintf(fp, ", %%eax\n");
18158 else if (dst_regcm & REGCM_DIVIDEND32) {
18159 if (size_of(state, dst->type) > 2) {
18160 internal_error(state, ins, "32bit constant...");
18162 fprintf(fp, "\tmov $0, %%dx\n");
18163 fprintf(fp, "\tmov ");
18164 print_const_val(state, src, fp);
18165 fprintf(fp, ", %%ax");
18167 else if (dst_regcm & (REGCM_XMM | REGCM_MMX)) {
18169 ref = get_const_pool_ref(state, src, fp);
18170 fprintf(fp, "\tmovq L%s%lu, %s\n",
18171 state->label_prefix, ref,
18172 reg(state, dst, (REGCM_XMM | REGCM_MMX)));
18175 internal_error(state, ins, "unknown copy immediate type");
18180 static void print_op_load(struct compile_state *state,
18181 struct triple *ins, FILE *fp)
18183 struct triple *dst, *src;
18186 if (is_const(src) || is_const(dst)) {
18187 internal_error(state, ins, "unknown load operation");
18189 fprintf(fp, "\tmov (%s), %s\n",
18190 reg(state, src, REGCM_GPR32),
18191 reg(state, dst, REGCM_GPR8_LO | REGCM_GPR16 | REGCM_GPR32));
18195 static void print_op_store(struct compile_state *state,
18196 struct triple *ins, FILE *fp)
18198 struct triple *dst, *src;
18201 if (is_const(src) && (src->op == OP_INTCONST)) {
18203 value = (long_t)(src->u.cval);
18204 fprintf(fp, "\tmov%s $%ld, (%s)\n",
18205 type_suffix(state, src->type),
18207 reg(state, dst, REGCM_GPR32));
18209 else if (is_const(dst) && (dst->op == OP_INTCONST)) {
18210 fprintf(fp, "\tmov%s %s, 0x%08lx\n",
18211 type_suffix(state, src->type),
18212 reg(state, src, REGCM_GPR8_LO | REGCM_GPR16 | REGCM_GPR32),
18213 (unsigned long)(dst->u.cval));
18216 if (is_const(src) || is_const(dst)) {
18217 internal_error(state, ins, "unknown store operation");
18219 fprintf(fp, "\tmov%s %s, (%s)\n",
18220 type_suffix(state, src->type),
18221 reg(state, src, REGCM_GPR8_LO | REGCM_GPR16 | REGCM_GPR32),
18222 reg(state, dst, REGCM_GPR32));
18228 static void print_op_smul(struct compile_state *state,
18229 struct triple *ins, FILE *fp)
18231 if (!is_const(RHS(ins, 1))) {
18232 fprintf(fp, "\timul %s, %s\n",
18233 reg(state, RHS(ins, 1), REGCM_GPR32),
18234 reg(state, RHS(ins, 0), REGCM_GPR32));
18237 fprintf(fp, "\timul ");
18238 print_const_val(state, RHS(ins, 1), fp);
18239 fprintf(fp, ", %s\n", reg(state, RHS(ins, 0), REGCM_GPR32));
18243 static void print_op_cmp(struct compile_state *state,
18244 struct triple *ins, FILE *fp)
18248 mask = REGCM_GPR32 | REGCM_GPR16 | REGCM_GPR8_LO;
18249 dreg = check_reg(state, ins, REGCM_FLAGS);
18250 if (!reg_is_reg(state, dreg, REG_EFLAGS)) {
18251 internal_error(state, ins, "bad dest register for cmp");
18253 if (is_const(RHS(ins, 1))) {
18254 fprintf(fp, "\tcmp ");
18255 print_const_val(state, RHS(ins, 1), fp);
18256 fprintf(fp, ", %s\n", reg(state, RHS(ins, 0), mask));
18259 unsigned lmask, rmask;
18261 lreg = check_reg(state, RHS(ins, 0), mask);
18262 rreg = check_reg(state, RHS(ins, 1), mask);
18263 lmask = arch_reg_regcm(state, lreg);
18264 rmask = arch_reg_regcm(state, rreg);
18265 mask = lmask & rmask;
18266 fprintf(fp, "\tcmp %s, %s\n",
18267 reg(state, RHS(ins, 1), mask),
18268 reg(state, RHS(ins, 0), mask));
18272 static void print_op_test(struct compile_state *state,
18273 struct triple *ins, FILE *fp)
18276 mask = REGCM_GPR32 | REGCM_GPR16 | REGCM_GPR8_LO;
18277 fprintf(fp, "\ttest %s, %s\n",
18278 reg(state, RHS(ins, 0), mask),
18279 reg(state, RHS(ins, 0), mask));
18282 static void print_op_branch(struct compile_state *state,
18283 struct triple *branch, FILE *fp)
18285 const char *bop = "j";
18286 if (branch->op == OP_JMP) {
18287 if (TRIPLE_RHS(branch->sizes) != 0) {
18288 internal_error(state, branch, "jmp with condition?");
18293 struct triple *ptr;
18294 if (TRIPLE_RHS(branch->sizes) != 1) {
18295 internal_error(state, branch, "jmpcc without condition?");
18297 check_reg(state, RHS(branch, 0), REGCM_FLAGS);
18298 if ((RHS(branch, 0)->op != OP_CMP) &&
18299 (RHS(branch, 0)->op != OP_TEST)) {
18300 internal_error(state, branch, "bad branch test");
18302 #warning "FIXME I have observed instructions between the test and branch instructions"
18303 ptr = RHS(branch, 0);
18304 for(ptr = RHS(branch, 0)->next; ptr != branch; ptr = ptr->next) {
18305 if (ptr->op != OP_COPY) {
18306 internal_error(state, branch, "branch does not follow test");
18309 switch(branch->op) {
18310 case OP_JMP_EQ: bop = "jz"; break;
18311 case OP_JMP_NOTEQ: bop = "jnz"; break;
18312 case OP_JMP_SLESS: bop = "jl"; break;
18313 case OP_JMP_ULESS: bop = "jb"; break;
18314 case OP_JMP_SMORE: bop = "jg"; break;
18315 case OP_JMP_UMORE: bop = "ja"; break;
18316 case OP_JMP_SLESSEQ: bop = "jle"; break;
18317 case OP_JMP_ULESSEQ: bop = "jbe"; break;
18318 case OP_JMP_SMOREEQ: bop = "jge"; break;
18319 case OP_JMP_UMOREEQ: bop = "jae"; break;
18321 internal_error(state, branch, "Invalid branch op");
18326 fprintf(fp, "\t%s L%s%lu\n",
18328 state->label_prefix,
18329 (unsigned long)(TARG(branch, 0)->u.cval));
18332 static void print_op_set(struct compile_state *state,
18333 struct triple *set, FILE *fp)
18335 const char *sop = "set";
18336 if (TRIPLE_RHS(set->sizes) != 1) {
18337 internal_error(state, set, "setcc without condition?");
18339 check_reg(state, RHS(set, 0), REGCM_FLAGS);
18340 if ((RHS(set, 0)->op != OP_CMP) &&
18341 (RHS(set, 0)->op != OP_TEST)) {
18342 internal_error(state, set, "bad set test");
18344 if (RHS(set, 0)->next != set) {
18345 internal_error(state, set, "set does not follow test");
18348 case OP_SET_EQ: sop = "setz"; break;
18349 case OP_SET_NOTEQ: sop = "setnz"; break;
18350 case OP_SET_SLESS: sop = "setl"; break;
18351 case OP_SET_ULESS: sop = "setb"; break;
18352 case OP_SET_SMORE: sop = "setg"; break;
18353 case OP_SET_UMORE: sop = "seta"; break;
18354 case OP_SET_SLESSEQ: sop = "setle"; break;
18355 case OP_SET_ULESSEQ: sop = "setbe"; break;
18356 case OP_SET_SMOREEQ: sop = "setge"; break;
18357 case OP_SET_UMOREEQ: sop = "setae"; break;
18359 internal_error(state, set, "Invalid set op");
18362 fprintf(fp, "\t%s %s\n",
18363 sop, reg(state, set, REGCM_GPR8_LO));
18366 static void print_op_bit_scan(struct compile_state *state,
18367 struct triple *ins, FILE *fp)
18371 case OP_BSF: op = "bsf"; break;
18372 case OP_BSR: op = "bsr"; break;
18374 internal_error(state, ins, "unknown bit scan");
18384 reg(state, RHS(ins, 0), REGCM_GPR32),
18385 reg(state, ins, REGCM_GPR32),
18386 reg(state, ins, REGCM_GPR32));
18390 static void print_sdecl(struct compile_state *state,
18391 struct triple *ins, FILE *fp)
18393 fprintf(fp, ".section \"" DATA_SECTION "\"\n");
18394 fprintf(fp, ".balign %d\n", align_of(state, ins->type));
18395 fprintf(fp, "L%s%lu:\n",
18396 state->label_prefix, (unsigned long)(ins->u.cval));
18397 print_const(state, MISC(ins, 0), fp);
18398 fprintf(fp, ".section \"" TEXT_SECTION "\"\n");
18402 static void print_instruction(struct compile_state *state,
18403 struct triple *ins, FILE *fp)
18405 /* Assumption: after I have exted the register allocator
18406 * everything is in a valid register.
18410 print_op_asm(state, ins, fp);
18412 case OP_ADD: print_binary_op(state, "add", ins, fp); break;
18413 case OP_SUB: print_binary_op(state, "sub", ins, fp); break;
18414 case OP_AND: print_binary_op(state, "and", ins, fp); break;
18415 case OP_XOR: print_binary_op(state, "xor", ins, fp); break;
18416 case OP_OR: print_binary_op(state, "or", ins, fp); break;
18417 case OP_SL: print_op_shift(state, "shl", ins, fp); break;
18418 case OP_USR: print_op_shift(state, "shr", ins, fp); break;
18419 case OP_SSR: print_op_shift(state, "sar", ins, fp); break;
18420 case OP_POS: break;
18421 case OP_NEG: print_unary_op(state, "neg", ins, fp); break;
18422 case OP_INVERT: print_unary_op(state, "not", ins, fp); break;
18426 /* Don't generate anything here for constants */
18428 /* Don't generate anything for variable declarations. */
18431 print_sdecl(state, ins, fp);
18434 print_op_move(state, ins, fp);
18437 print_op_load(state, ins, fp);
18440 print_op_store(state, ins, fp);
18443 print_op_smul(state, ins, fp);
18445 case OP_CMP: print_op_cmp(state, ins, fp); break;
18446 case OP_TEST: print_op_test(state, ins, fp); break;
18448 case OP_JMP_EQ: case OP_JMP_NOTEQ:
18449 case OP_JMP_SLESS: case OP_JMP_ULESS:
18450 case OP_JMP_SMORE: case OP_JMP_UMORE:
18451 case OP_JMP_SLESSEQ: case OP_JMP_ULESSEQ:
18452 case OP_JMP_SMOREEQ: case OP_JMP_UMOREEQ:
18453 print_op_branch(state, ins, fp);
18455 case OP_SET_EQ: case OP_SET_NOTEQ:
18456 case OP_SET_SLESS: case OP_SET_ULESS:
18457 case OP_SET_SMORE: case OP_SET_UMORE:
18458 case OP_SET_SLESSEQ: case OP_SET_ULESSEQ:
18459 case OP_SET_SMOREEQ: case OP_SET_UMOREEQ:
18460 print_op_set(state, ins, fp);
18462 case OP_INB: case OP_INW: case OP_INL:
18463 print_op_in(state, ins, fp);
18465 case OP_OUTB: case OP_OUTW: case OP_OUTL:
18466 print_op_out(state, ins, fp);
18470 print_op_bit_scan(state, ins, fp);
18473 after_lhs(state, ins);
18474 fprintf(fp, "\trdmsr\n");
18477 fprintf(fp, "\twrmsr\n");
18480 fprintf(fp, "\thlt\n");
18483 fprintf(fp, "\tidiv %s\n", reg(state, RHS(ins, 1), REGCM_GPR32));
18486 fprintf(fp, "\tdiv %s\n", reg(state, RHS(ins, 1), REGCM_GPR32));
18489 fprintf(fp, "\tmul %s\n", reg(state, RHS(ins, 1), REGCM_GPR32));
18495 fprintf(fp, "L%s%lu:\n",
18496 state->label_prefix, (unsigned long)(ins->u.cval));
18498 /* Ignore OP_PIECE */
18501 /* Operations that should never get here */
18502 case OP_SDIV: case OP_UDIV:
18503 case OP_SMOD: case OP_UMOD:
18504 case OP_LTRUE: case OP_LFALSE: case OP_EQ: case OP_NOTEQ:
18505 case OP_SLESS: case OP_ULESS: case OP_SMORE: case OP_UMORE:
18506 case OP_SLESSEQ: case OP_ULESSEQ: case OP_SMOREEQ: case OP_UMOREEQ:
18508 internal_error(state, ins, "unknown op: %d %s",
18509 ins->op, tops(ins->op));
18514 static void print_instructions(struct compile_state *state)
18516 struct triple *first, *ins;
18517 int print_location;
18518 struct occurance *last_occurance;
18520 int max_inline_depth;
18521 max_inline_depth = 0;
18522 print_location = 1;
18523 last_occurance = 0;
18524 fp = state->output;
18525 fprintf(fp, ".section \"" TEXT_SECTION "\"\n");
18526 first = state->first;
18529 if (print_location &&
18530 last_occurance != ins->occurance) {
18531 if (!ins->occurance->parent) {
18532 fprintf(fp, "\t/* %s,%s:%d.%d */\n",
18533 ins->occurance->function,
18534 ins->occurance->filename,
18535 ins->occurance->line,
18536 ins->occurance->col);
18539 struct occurance *ptr;
18541 fprintf(fp, "\t/*\n");
18543 for(ptr = ins->occurance; ptr; ptr = ptr->parent) {
18545 fprintf(fp, "\t * %s,%s:%d.%d\n",
18551 fprintf(fp, "\t */\n");
18552 if (inline_depth > max_inline_depth) {
18553 max_inline_depth = inline_depth;
18556 if (last_occurance) {
18557 put_occurance(last_occurance);
18559 get_occurance(ins->occurance);
18560 last_occurance = ins->occurance;
18563 print_instruction(state, ins, fp);
18565 } while(ins != first);
18566 if (print_location) {
18567 fprintf(fp, "/* max inline depth %d */\n",
18572 static void generate_code(struct compile_state *state)
18574 generate_local_labels(state);
18575 print_instructions(state);
18579 static void print_tokens(struct compile_state *state)
18582 tk = &state->token[0];
18587 next_token(state, 0);
18589 loc(stdout, state, 0);
18590 printf("%s <- `%s'\n",
18592 tk->ident ? tk->ident->name :
18593 tk->str_len ? tk->val.str : "");
18595 } while(tk->tok != TOK_EOF);
18598 static void call_main(struct compile_state *state)
18600 struct triple *call;
18601 call = new_triple(state, OP_CALL, &void_func, -1, -1);
18602 call->type = &void_type;
18603 MISC(call, 0) = state->main_function;
18604 flatten(state, state->first, call);
18607 static void compile(const char *filename, const char *ofilename,
18608 unsigned long features, int debug, int opt, const char *label_prefix)
18611 struct compile_state state;
18612 struct triple *ptr;
18613 memset(&state, 0, sizeof(state));
18615 for(i = 0; i < sizeof(state.token)/sizeof(state.token[0]); i++) {
18616 memset(&state.token[i], 0, sizeof(state.token[i]));
18617 state.token[i].tok = -1;
18619 /* Remember the debug settings */
18620 state.features = features;
18621 state.debug = debug;
18622 state.optimize = opt;
18623 /* Remember the output filename */
18624 state.ofilename = ofilename;
18625 state.output = fopen(state.ofilename, "w");
18626 if (!state.output) {
18627 error(&state, 0, "Cannot open output file %s\n",
18630 /* Remember the label prefix */
18631 state.label_prefix = label_prefix;
18632 /* Prep the preprocessor */
18633 state.if_depth = 0;
18634 state.if_value = 0;
18635 /* register the C keywords */
18636 register_keywords(&state);
18637 /* register the keywords the macro preprocessor knows */
18638 register_macro_keywords(&state);
18639 /* Memorize where some special keywords are. */
18640 state.i_switch = lookup(&state, "switch", 6);
18641 state.i_case = lookup(&state, "case", 4);
18642 state.i_continue = lookup(&state, "continue", 8);
18643 state.i_break = lookup(&state, "break", 5);
18644 state.i_default = lookup(&state, "default", 7);
18646 /* Allocate beginning bounding labels for the function list */
18647 state.first = label(&state);
18648 state.first->id |= TRIPLE_FLAG_VOLATILE;
18649 use_triple(state.first, state.first);
18650 ptr = label(&state);
18651 ptr->id |= TRIPLE_FLAG_VOLATILE;
18652 use_triple(ptr, ptr);
18653 flatten(&state, state.first, ptr);
18655 /* Enter the globl definition scope */
18656 start_scope(&state);
18657 register_builtins(&state);
18658 compile_file(&state, filename, 1);
18660 print_tokens(&state);
18663 /* Exit the global definition scope */
18666 /* Call the main function */
18669 /* Now that basic compilation has happened
18670 * optimize the intermediate code
18674 generate_code(&state);
18676 fprintf(stderr, "done\n");
18680 static void version(void)
18682 printf("romcc " VERSION " released " RELEASE_DATE "\n");
18685 static void usage(void)
18689 "Usage: romcc <source>.c\n"
18690 "Compile a C source file without using ram\n"
18694 static void arg_error(char *fmt, ...)
18697 va_start(args, fmt);
18698 vfprintf(stderr, fmt, args);
18704 int main(int argc, char **argv)
18706 const char *filename;
18707 const char *ofilename;
18708 const char *label_prefix;
18709 unsigned long features;
18715 ofilename = "auto.inc";
18719 while((argc > 1) && (argc != last_argc)) {
18721 if (strncmp(argv[1], "--debug=", 8) == 0) {
18722 debug = atoi(argv[1] + 8);
18726 else if (strncmp(argv[1], "--label-prefix=", 15) == 0) {
18727 label_prefix= argv[1] + 15;
18731 else if ((strcmp(argv[1],"-O") == 0) ||
18732 (strcmp(argv[1], "-O1") == 0)) {
18737 else if (strcmp(argv[1],"-O2") == 0) {
18742 else if ((strcmp(argv[1], "-o") == 0) && (argc > 2)) {
18743 ofilename = argv[2];
18747 else if (strncmp(argv[1], "-m", 2) == 0) {
18749 result = arch_encode_feature(argv[1] + 2, &features);
18751 arg_error("Invalid feature specified: %s\n",
18759 arg_error("Wrong argument count %d\n", argc);
18761 filename = argv[1];
18762 compile(filename, ofilename, features, debug, optimize, label_prefix);