2 * cfold.c: Constant folding support
5 * Paolo Molaro (lupus@ximian.com)
6 * Dietmar Maurer (dietmar@ximian.com)
8 * (C) 2003 Ximian, Inc. http://www.ximian.com
13 mono_is_power_of_two (guint32 val)
17 for (i = 0, j = 1, k = 0xfffffffe; i < 32; ++i, j = j << 1, k = k << 1) {
21 if (i == 32 || val & k)
26 #define FOLD_BINOP(name,op) \
28 if (inst->inst_i0->opcode != OP_ICONST) \
30 if (inst->inst_i1->opcode == OP_ICONST) { \
31 inst->opcode = OP_ICONST; \
32 inst->inst_c0 = (gint32)(inst->inst_i0->inst_c0 op inst->inst_i1->inst_c0); \
37 * We try to put constants on the left side of a commutative operation
38 * because it reduces register pressure and it matches the usual cpu
39 * instructions with immediates.
41 #define FOLD_BINOPCOMM(name,op) \
43 if (inst->inst_i0->opcode == OP_ICONST) {\
44 if (inst->inst_i1->opcode == OP_ICONST) { \
45 inst->opcode = OP_ICONST; \
46 inst->inst_c0 = (gint32)(inst->inst_i0->inst_c0 op inst->inst_i1->inst_c0); \
49 MonoInst *tmp = inst->inst_i0; \
50 inst->inst_i0 = inst->inst_i1; \
51 inst->inst_i1 = tmp; \
54 if (inst->inst_i1->opcode == OP_ICONST && inst->opcode == CEE_ADD) { \
55 if (inst->inst_i1->inst_c0 == 0) { \
56 *inst = *(inst->inst_i0); \
60 if (inst->inst_i1->opcode == OP_ICONST && inst->opcode == CEE_MUL) { \
62 if (inst->inst_i1->inst_c0 == 1) { \
63 *inst = *(inst->inst_i0); \
65 } else if (inst->inst_i1->inst_c0 == -1) { \
66 inst->opcode = CEE_NEG; \
69 power2 = mono_is_power_of_two (inst->inst_i1->inst_c0); \
70 if (power2 < 0) return; \
71 inst->opcode = CEE_SHL; \
72 inst->inst_i1->inst_c0 = power2; \
77 * We can't let this cause a division by zero exception since the division
78 * might not be executed during runtime.
80 #define FOLD_BINOPZ(name,op,cast) \
82 if (inst->inst_i1->opcode == OP_ICONST && inst->opcode == CEE_REM_UN && inst->inst_i1->inst_c0 == 2) { \
83 inst->opcode = CEE_AND; \
84 inst->inst_i1->inst_c0 = 1; \
87 if (inst->inst_i1->opcode == OP_ICONST) { \
88 if (!inst->inst_i1->inst_c0) return; \
89 if (inst->inst_i0->opcode == OP_ICONST) { \
90 if ((inst->inst_i0->inst_c0 == G_MININT32) && (inst->inst_i1->inst_c0 == -1)) \
92 inst->inst_c0 = (cast)inst->inst_i0->inst_c0 op (cast)inst->inst_i1->inst_c0; \
93 inst->opcode = OP_ICONST; \
95 int power2 = mono_is_power_of_two (inst->inst_i1->inst_c0); \
96 if (power2 < 0) return; \
97 if (inst->opcode == CEE_REM_UN) { \
98 inst->opcode = CEE_AND; \
99 inst->inst_i1->inst_c0 = (1 << power2) - 1; \
100 } else if (inst->opcode == CEE_DIV_UN) { \
101 inst->opcode = CEE_SHR_UN; \
102 inst->inst_i1->inst_c0 = power2; \
108 #define FOLD_CXX(name,op,cast) \
110 if (inst->inst_i0->opcode != OP_COMPARE) \
112 if (inst->inst_i0->inst_i0->opcode != OP_ICONST) \
114 if (inst->inst_i0->inst_i1->opcode == OP_ICONST) { \
115 inst->opcode = OP_ICONST; \
116 inst->inst_c0 = (cast)inst->inst_i0->inst_i0->inst_c0 op (cast)inst->inst_i0->inst_i1->inst_c0; \
120 #define FOLD_UNOP(name,op) \
122 if (inst->inst_i0->opcode == OP_ICONST) { \
123 inst->opcode = OP_ICONST; \
124 inst->inst_c0 = op inst->inst_i0->inst_c0; \
125 } else if (inst->inst_i0->opcode == OP_I8CONST) { \
126 inst->opcode = OP_I8CONST; \
127 inst->inst_l = op inst->inst_i0->inst_l; \
130 #define FOLD_BRBINOP(name,op,cast) \
132 if (inst->inst_i0->opcode != OP_COMPARE) \
134 if (inst->inst_i0->inst_i0->opcode != OP_ICONST) \
136 if (inst->inst_i0->inst_i1->opcode == OP_ICONST) { \
137 if ((cast)inst->inst_i0->inst_i0->inst_c0 op (cast)inst->inst_i0->inst_i1->inst_c0) \
138 inst->opcode = OP_BR; \
140 inst->opcode = OP_NOP; \
145 * Helper function to do constant expression evaluation.
146 * We do constant folding of integers only, FP stuff is much more tricky,
147 * int64 probably not worth it.
150 mono_constant_fold_inst (MonoInst *inst, gpointer data)
152 switch (inst->opcode) {
154 /* FIXME: the CEE_B* don't contain operands, need to use the OP_COMPARE instruction */
155 /*FOLD_BRBINOP (CEE_BEQ,==,gint32)
156 FOLD_BRBINOP (CEE_BGE,>=,gint32)
157 FOLD_BRBINOP (CEE_BGT,>,gint32)
158 FOLD_BRBINOP (CEE_BLE,<=,gint32)
159 FOLD_BRBINOP (CEE_BLT,<,gint32)
160 FOLD_BRBINOP (CEE_BNE_UN,!=,guint32)
161 FOLD_BRBINOP (CEE_BGE_UN,>=,guint32)
162 FOLD_BRBINOP (CEE_BGT_UN,>,guint32)
163 FOLD_BRBINOP (CEE_BLE_UN,<=,guint32)
164 FOLD_BRBINOP (CEE_BLT_UN,<,guint32)*/
166 FOLD_BINOPCOMM (CEE_MUL,*)
168 FOLD_BINOPCOMM (CEE_ADD,+)
169 FOLD_BINOP (CEE_SUB,-)
170 FOLD_BINOPZ (CEE_DIV,/,gint32)
171 FOLD_BINOPZ (CEE_DIV_UN,/,guint32)
172 FOLD_BINOPZ (CEE_REM,%,gint32)
173 FOLD_BINOPZ (CEE_REM_UN,%,guint32)
174 FOLD_BINOPCOMM (CEE_AND,&)
175 FOLD_BINOPCOMM (CEE_OR,|)
176 FOLD_BINOPCOMM (CEE_XOR,^)
177 FOLD_BINOP (CEE_SHL,<<)
178 FOLD_BINOP (CEE_SHR,>>)
180 if (inst->inst_i0->opcode != OP_ICONST)
182 if (inst->inst_i1->opcode == OP_ICONST) {
183 inst->opcode = OP_ICONST;
184 inst->inst_c0 = (guint32)inst->inst_i0->inst_c0 >> (guint32)inst->inst_i1->inst_c0;
187 FOLD_UNOP (CEE_NEG,-)
188 FOLD_UNOP (CEE_NOT,~)
189 FOLD_CXX (OP_CEQ,==,gint32)
190 FOLD_CXX (OP_CGT,>,gint32)
191 FOLD_CXX (OP_CGT_UN,>,guint32)
192 FOLD_CXX (OP_CLT,<,gint32)
193 FOLD_CXX (OP_CLT_UN,<,guint32)
195 if (inst->inst_i0->opcode == OP_ICONST) {
196 inst->opcode = OP_I8CONST;
197 inst->inst_l = inst->inst_i0->inst_c0;
202 if (inst->inst_i0->opcode == OP_ICONST) {
203 inst->opcode = OP_ICONST;
204 inst->inst_c0 = inst->inst_i0->inst_c0;
205 } else if (inst->inst_i0->opcode == CEE_LDIND_I) {
206 *inst = *inst->inst_i0;
209 /* we should be able to handle isinst and castclass as well */
215 * *ovf* opcodes? I'ts slow and hard to do in C.
216 * switch can be replaced by a simple jump
218 #if SIZEOF_VOID_P == 4
220 if ((inst->inst_left->type == STACK_I4) || (inst->inst_left->type == STACK_PTR)) {
221 *inst = *inst->inst_left;
231 mono_constant_fold (MonoCompile *cfg)
235 for (bb = cfg->bb_entry; bb; bb = bb->next_bb) {
237 MONO_BB_FOR_EACH_INS (bb, ins)
238 mono_inst_foreach (ins, mono_constant_fold_inst, NULL);
243 * If the arguments to the cond branch are constants, eval and
244 * return BRANCH_NOT_TAKEN for not taken, BRANCH_TAKEN for taken,
245 * BRANCH_UNDEF otherwise.
246 * If this code is changed to handle also non-const values, make sure
247 * side effects are handled in optimize_branches() in mini.c, by
248 * inserting pop instructions.
251 mono_eval_cond_branch (MonoInst *ins)
253 MonoInst *left, *right;
254 /* FIXME: handle also 64 bit ints */
255 left = ins->inst_left->inst_left;
256 if (left->opcode != OP_ICONST && left->opcode != OP_PCONST)
258 right = ins->inst_left->inst_right;
259 if (right->opcode != OP_ICONST && right->opcode != OP_PCONST)
261 switch (ins->opcode) {
263 if (left->inst_c0 == right->inst_c0)
265 return BRANCH_NOT_TAKEN;
267 if (left->inst_c0 >= right->inst_c0)
269 return BRANCH_NOT_TAKEN;
271 if (left->inst_c0 > right->inst_c0)
273 return BRANCH_NOT_TAKEN;
275 if (left->inst_c0 <= right->inst_c0)
277 return BRANCH_NOT_TAKEN;
279 if (left->inst_c0 < right->inst_c0)
281 return BRANCH_NOT_TAKEN;
283 if ((gsize)left->inst_c0 != (gsize)right->inst_c0)
285 return BRANCH_NOT_TAKEN;
287 if ((gsize)left->inst_c0 >= (gsize)right->inst_c0)
289 return BRANCH_NOT_TAKEN;
291 if ((gsize)left->inst_c0 > (gsize)right->inst_c0)
293 return BRANCH_NOT_TAKEN;
295 if ((gsize)left->inst_c0 <= (gsize)right->inst_c0)
297 return BRANCH_NOT_TAKEN;
299 if ((gsize)left->inst_c0 < (gsize)right->inst_c0)
301 return BRANCH_NOT_TAKEN;
307 #define MYGINT32_MAX 2147483647
308 #define G_MININT32 (-MYGINT32_MAX -1)
311 #define FOLD_UNOP2(name,op) \
313 dest->inst_c0 = op arg1->inst_c0; \
316 #define FOLD_BINOP2(name, op) \
318 dest->inst_c0 = arg1->inst_c0 op arg2->inst_c0; \
321 #define FOLD_BINOPC2(name,op,cast) \
323 dest->inst_c0 = (cast)arg1->inst_c0 op (cast)arg2->inst_c0; \
326 #define FOLD_BINOP2_IMM(name, op) \
328 dest->inst_c0 = arg1->inst_c0 op ins->inst_imm; \
331 #define FOLD_BINOPC2_IMM(name, op, cast) \
333 dest->inst_c0 = (cast)arg1->inst_c0 op (cast)ins->inst_imm; \
336 #define FOLD_BINOPCXX2(name,op,cast) \
338 res = (cast)arg1->inst_c0 op (cast)arg2->inst_c0; \
342 #define MONO_INST_NEW(cfg,dest,op) do { \
343 (dest) = mono_mempool_alloc ((cfg)->mempool, sizeof (MonoInst)); \
344 (dest)->inst_p0 = (dest)->inst_p1 = (dest)->next = NULL; \
345 (dest)->opcode = (op); \
347 (dest)->dreg = (dest)->sreg1 = (dest)->sreg2 = -1; \
350 #define ALLOC_DEST(cfg, dest, ins) do { \
352 MONO_INST_NEW ((cfg), (dest), -1); \
353 (dest)->dreg = (ins)->dreg; \
358 * mono_constant_fold_ins2:
360 * Perform constant folding on INS, using ARG1 and ARG2 as the arguments. If OVERWRITE is
361 * true, then store the result back into INS and return INS. Otherwise allocate a new ins,
362 * store the result into it and return it. If constant folding cannot be performed, return
366 mono_constant_fold_ins2 (MonoCompile *cfg, MonoInst *ins, MonoInst *arg1, MonoInst *arg2, gboolean overwrite)
368 MonoInst *dest = NULL;
373 switch (ins->opcode) {
379 if (arg2->opcode == OP_ICONST) {
380 if (arg1->opcode == OP_ICONST) {
381 ALLOC_DEST (cfg, dest, ins);
382 switch (ins->opcode) {
383 FOLD_BINOP2 (OP_IMUL, *);
384 FOLD_BINOP2 (OP_IADD, +);
385 FOLD_BINOP2 (OP_IAND, &);
386 FOLD_BINOP2 (OP_IOR, |);
387 FOLD_BINOP2 (OP_IXOR, ^);
389 dest->opcode = OP_ICONST;
390 dest->sreg1 = dest->sreg2 = -1;
392 } else if (arg1->opcode == OP_ICONST) {
394 * This is commutative so swap the arguments, allowing the _imm variant
397 if (mono_op_to_op_imm (ins->opcode) != -1) {
398 ALLOC_DEST (cfg, dest, ins);
399 dest->opcode = mono_op_to_op_imm (ins->opcode);
400 dest->sreg1 = ins->sreg2;
402 dest->inst_imm = arg1->inst_c0;
416 if (arg1->opcode == OP_ICONST) {
417 ALLOC_DEST (cfg, dest, ins);
418 switch (ins->opcode) {
419 FOLD_BINOP2_IMM (OP_IMUL_IMM, *);
420 FOLD_BINOP2_IMM (OP_IADD_IMM, +);
421 FOLD_BINOP2_IMM (OP_IAND_IMM, &);
422 FOLD_BINOP2_IMM (OP_IOR_IMM, |);
423 FOLD_BINOP2_IMM (OP_IXOR_IMM, ^);
424 FOLD_BINOP2_IMM (OP_ISUB_IMM, -);
425 FOLD_BINOP2_IMM (OP_ISHL_IMM, <<);
426 FOLD_BINOP2_IMM (OP_ISHR_IMM, >>);
427 FOLD_BINOPC2_IMM (OP_ISHR_UN_IMM, >>, guint32);
428 FOLD_BINOP2_IMM (OP_SHL_IMM, <<);
430 dest->opcode = OP_ICONST;
431 dest->sreg1 = dest->sreg2 = -1;
438 if ((arg1->opcode == OP_ICONST) && (arg2->opcode == OP_ICONST)) {
439 ALLOC_DEST (cfg, dest, ins);
440 switch (ins->opcode) {
441 FOLD_BINOP2 (OP_ISUB, -);
442 FOLD_BINOP2 (OP_ISHL, <<);
443 FOLD_BINOP2 (OP_ISHR, >>);
444 FOLD_BINOPC2 (OP_ISHR_UN, >>, guint32);
446 dest->opcode = OP_ICONST;
447 dest->sreg1 = dest->sreg2 = -1;
454 if ((arg1->opcode == OP_ICONST) && (arg2->opcode == OP_ICONST)) {
455 if ((arg2->inst_c0 == 0) || ((arg1->inst_c0 == G_MININT32) && (arg2->inst_c0 == -1)))
457 ALLOC_DEST (cfg, dest, ins);
458 switch (ins->opcode) {
459 FOLD_BINOPC2 (OP_IDIV, /, gint32);
460 FOLD_BINOPC2 (OP_IDIV_UN, /, guint32);
461 FOLD_BINOPC2 (OP_IREM, %, gint32);
462 FOLD_BINOPC2 (OP_IREM_UN, %, guint32);
464 dest->opcode = OP_ICONST;
465 dest->sreg1 = dest->sreg2 = -1;
472 if (arg1->opcode == OP_ICONST) {
473 if ((ins->inst_imm == 0) || ((arg1->inst_c0 == G_MININT32) && (ins->inst_imm == -1)))
475 ALLOC_DEST (cfg, dest, ins);
476 switch (ins->opcode) {
477 FOLD_BINOPC2_IMM (OP_IDIV_IMM, /, gint32);
478 FOLD_BINOPC2_IMM (OP_IDIV_UN_IMM, /, guint32);
479 FOLD_BINOPC2_IMM (OP_IREM_IMM, %, gint32);
480 FOLD_BINOPC2_IMM (OP_IREM_UN_IMM, %, guint32);
482 g_assert_not_reached ();
484 dest->opcode = OP_ICONST;
485 dest->sreg1 = dest->sreg2 = -1;
491 if (arg1->opcode == OP_ICONST) {
492 /* INEG sets cflags on x86, and the LNEG decomposition depends on that */
493 if ((ins->opcode == OP_INEG) && ins->next && (ins->next->opcode == OP_ADC_IMM))
495 ALLOC_DEST (cfg, dest, ins);
496 switch (ins->opcode) {
497 FOLD_UNOP2 (OP_INEG,-);
498 FOLD_UNOP2 (OP_INOT,~);
500 dest->opcode = OP_ICONST;
501 dest->sreg1 = dest->sreg2 = -1;
505 #if SIZEOF_VOID_P == 8
506 if ((arg1->opcode == OP_ICONST) || (arg1->opcode == OP_I8CONST)) {
508 if (arg1->opcode == OP_ICONST) {
510 ALLOC_DEST (cfg, dest, ins);
511 dest->opcode = arg1->opcode;
512 dest->sreg1 = dest->sreg2 = -1;
513 dest->inst_c0 = arg1->inst_c0;
517 if (arg1->opcode == OP_VZERO) {
518 ALLOC_DEST (cfg, dest, ins);
519 dest->opcode = OP_VZERO;
526 case OP_ICOMPARE_IMM: {
528 if (ins->sreg2 == -1) {
530 arg2->opcode = OP_ICONST;
531 arg2->inst_c0 = ins->inst_imm;
534 if ((arg1->opcode == OP_ICONST) && (arg2->opcode == OP_ICONST) && ins->next) {
535 MonoInst *next = ins->next;
536 gboolean res = FALSE;
538 switch (next->opcode) {
549 switch (next->opcode) {
550 FOLD_BINOPCXX2 (OP_CEQ,==,gint32);
551 FOLD_BINOPCXX2 (OP_ICEQ,==,gint32);
552 FOLD_BINOPCXX2 (OP_CGT,>,gint32);
553 FOLD_BINOPCXX2 (OP_ICGT,>,gint32);
554 FOLD_BINOPCXX2 (OP_CGT_UN,>,guint32);
555 FOLD_BINOPCXX2 (OP_ICGT_UN,>,guint32);
556 FOLD_BINOPCXX2 (OP_CLT,<,gint32);
557 FOLD_BINOPCXX2 (OP_ICLT,<,gint32);
558 FOLD_BINOPCXX2 (OP_CLT_UN,<,guint32);
559 FOLD_BINOPCXX2 (OP_ICLT_UN,<,guint32);
564 next->opcode = OP_ICONST;
566 next->sreg1 = next->sreg2 = -1;
568 ALLOC_DEST (cfg, dest, ins);
569 dest->opcode = OP_ICONST;
583 switch (next->opcode) {
584 FOLD_BINOPCXX2 (OP_IBEQ,==,gint32);
585 FOLD_BINOPCXX2 (OP_IBNE_UN,!=,guint32);
586 FOLD_BINOPCXX2 (OP_IBGT,>,gint32);
587 FOLD_BINOPCXX2 (OP_IBGT_UN,>,guint32);
588 FOLD_BINOPCXX2 (OP_IBGE,>=,gint32);
589 FOLD_BINOPCXX2 (OP_IBGE_UN,>=,guint32);
590 FOLD_BINOPCXX2 (OP_IBLT,<,gint32);
591 FOLD_BINOPCXX2 (OP_IBLT_UN,<,guint32);
592 FOLD_BINOPCXX2 (OP_IBLE,<=,gint32);
593 FOLD_BINOPCXX2 (OP_IBLE_UN,<=,guint32);
598 * Can't nullify OP_COMPARE here since the decompose long branch
599 * opcodes depend on it being executed. Also, the branch might not
600 * be eliminated after all if loop opts is disabled, for example.
603 next->flags |= MONO_INST_CFOLD_TAKEN;
605 next->flags |= MONO_INST_CFOLD_NOT_TAKEN;
607 ALLOC_DEST (cfg, dest, ins);
608 dest->opcode = OP_ICONST;
614 /* This happens when a conditional branch is eliminated */
615 if (next->next == NULL) {
628 if (arg1->opcode == OP_R8CONST) {
629 ALLOC_DEST (cfg, dest, ins);
630 dest->opcode = OP_R8CONST;
632 dest->inst_p0 = arg1->inst_p0;
639 * *ovf* opcodes? It's slow and hard to do in C.
640 * switch can be replaced by a simple jump