1 /* jit/i386/codegen.h - code generation macros and definitions for x86_64
3 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003
4 R. Grafl, A. Krall, C. Kruegel, C. Oates, R. Obermaisser,
5 M. Probst, S. Ring, E. Steiner, C. Thalinger, D. Thuernbeck,
6 P. Tomsich, J. Wenninger
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
25 Contact: cacao@complang.tuwien.ac.at
27 Authors: Andreas Krall
30 $Id: codegen.h 928 2004-02-26 00:18:36Z twisti $
41 /* x86_64 register numbers */
79 /* preallocated registers *****************************************************/
81 /* integer registers */
83 #define REG_RESULT RAX /* to deliver method results */
85 #define REG_ITMP1 RAX /* temporary register */
86 #define REG_ITMP2 R10 /* temporary register and method pointer */
87 #define REG_ITMP3 R11 /* temporary register */
89 #define REG_NULL -1 /* used for reg_of_var where d is not needed */
91 #define REG_ITMP1_XPTR RAX /* exception pointer = temporary register 1 */
92 #define REG_ITMP2_XPC R10 /* exception pc = temporary register 2 */
94 #define REG_SP RSP /* stack pointer */
96 /* floating point registers */
98 #define REG_FRESULT XMM0 /* to deliver floating point method results */
100 #define REG_FTMP1 XMM8 /* temporary floating point register */
101 #define REG_FTMP2 XMM9 /* temporary floating point register */
102 #define REG_FTMP3 XMM10 /* temporary floating point register */
105 #define INT_ARG_CNT 6 /* number of int argument registers */
106 #define INT_SAV_CNT 5 /* number of int callee saved registers */
108 #define FLT_ARG_CNT 4 /* number of flt argument registers */
109 #define FLT_SAV_CNT 0 /* number of flt callee saved registers */
112 /* macros to create code ******************************************************/
114 /* immediate data union */
126 /* opcodes for alu instructions */
150 } X86_64_Shift_Opcode;
156 X86_64_CC_B = 2, X86_64_CC_C = 2, X86_64_CC_NAE = 2,
157 X86_64_CC_BE = 6, X86_64_CC_NA = 6,
158 X86_64_CC_AE = 3, X86_64_CC_NB = 3, X86_64_CC_NC = 3,
159 X86_64_CC_E = 4, X86_64_CC_Z = 4,
160 X86_64_CC_NE = 5, X86_64_CC_NZ = 5,
161 X86_64_CC_A = 7, X86_64_CC_NBE = 7,
162 X86_64_CC_S = 8, X86_64_CC_LZ = 8,
163 X86_64_CC_NS = 9, X86_64_CC_GEZ = 9,
164 X86_64_CC_P = 0x0a, X86_64_CC_PE = 0x0a,
165 X86_64_CC_NP = 0x0b, X86_64_CC_PO = 0x0b,
166 X86_64_CC_L = 0x0c, X86_64_CC_NGE = 0x0c,
167 X86_64_CC_GE = 0x0d, X86_64_CC_NL = 0x0d,
168 X86_64_CC_LE = 0x0e, X86_64_CC_NG = 0x0e,
169 X86_64_CC_G = 0x0f, X86_64_CC_NLE = 0x0f,
174 /* modrm and stuff */
176 #define x86_64_address_byte(mod,reg,rm) \
177 *(mcodeptr++) = ((((mod) & 0x03) << 6) | (((reg) & 0x07) << 3) | ((rm) & 0x07));
180 #define x86_64_emit_reg(reg,rm) \
181 x86_64_address_byte(3,(reg),(rm));
184 #define x86_64_emit_rex(size,reg,index,rm) \
185 if ((size) == 1 || (reg) > 7 || (index) > 7 || (rm) > 7) { \
186 *(mcodeptr++) = (0x40 | (((size) & 0x01) << 3) | ((((reg) >> 3) & 0x01) << 2) | ((((index) >> 3) & 0x01) << 1) | (((rm) >> 3) & 0x01)); \
190 #define x86_64_emit_mem(r,disp) \
192 x86_64_address_byte(0,(r),5); \
193 x86_64_emit_imm32((disp)); \
197 #define x86_64_emit_membase(basereg,disp,dreg) \
199 if ((basereg) == REG_SP || (basereg) == R12) { \
201 x86_64_address_byte(0,(dreg),REG_SP); \
202 x86_64_address_byte(0,REG_SP,REG_SP); \
203 } else if (x86_64_is_imm8((disp))) { \
204 x86_64_address_byte(1,(dreg),REG_SP); \
205 x86_64_address_byte(0,REG_SP,REG_SP); \
206 x86_64_emit_imm8((disp)); \
208 x86_64_address_byte(2,(dreg),REG_SP); \
209 x86_64_address_byte(0,REG_SP,REG_SP); \
210 x86_64_emit_imm32((disp)); \
214 if ((disp) == 0 && (basereg) != RBP && (basereg) != R13) { \
215 x86_64_address_byte(0,(dreg),(basereg)); \
219 if ((basereg) == RIP) { \
220 x86_64_address_byte(0,(dreg),RBP); \
221 x86_64_emit_imm32((disp)); \
225 if (x86_64_is_imm8((disp))) { \
226 x86_64_address_byte(1,(dreg),(basereg)); \
227 x86_64_emit_imm8((disp)); \
229 x86_64_address_byte(2,(dreg),(basereg)); \
230 x86_64_emit_imm32((disp)); \
235 #define x86_64_emit_memindex(reg,disp,basereg,indexreg,scale) \
237 if ((basereg) == -1) { \
238 x86_64_address_byte(0,(reg),4); \
239 x86_64_address_byte((scale),(indexreg),5); \
240 x86_64_emit_imm32((disp)); \
242 } else if ((disp) == 0 && (basereg) != RBP && (basereg) != R13) { \
243 x86_64_address_byte(0,(reg),4); \
244 x86_64_address_byte((scale),(indexreg),(basereg)); \
246 } else if (x86_64_is_imm8((disp))) { \
247 x86_64_address_byte(1,(reg),4); \
248 x86_64_address_byte((scale),(indexreg),(basereg)); \
249 x86_64_emit_imm8 ((disp)); \
252 x86_64_address_byte(2,(reg),4); \
253 x86_64_address_byte((scale),(indexreg),(basereg)); \
254 x86_64_emit_imm32((disp)); \
259 #define x86_64_is_imm8(imm) \
260 (((long)(imm) >= -128 && (long)(imm) <= 127))
263 #define x86_64_is_imm32(imm) \
264 ((long)(imm) >= (-2147483647-1) && (long)(imm) <= 2147483647)
267 #define x86_64_emit_imm8(imm) \
268 *(mcodeptr++) = (u1) ((imm) & 0xff);
271 #define x86_64_emit_imm16(imm) \
273 x86_64_imm_buf imb; \
274 imb.i = (s4) (imm); \
275 *(mcodeptr++) = imb.b[0]; \
276 *(mcodeptr++) = imb.b[1]; \
280 #define x86_64_emit_imm32(imm) \
282 x86_64_imm_buf imb; \
283 imb.i = (s4) (imm); \
284 *(mcodeptr++) = imb.b[0]; \
285 *(mcodeptr++) = imb.b[1]; \
286 *(mcodeptr++) = imb.b[2]; \
287 *(mcodeptr++) = imb.b[3]; \
291 #define x86_64_emit_imm64(imm) \
293 x86_64_imm_buf imb; \
294 imb.l = (s8) (imm); \
295 *(mcodeptr++) = imb.b[0]; \
296 *(mcodeptr++) = imb.b[1]; \
297 *(mcodeptr++) = imb.b[2]; \
298 *(mcodeptr++) = imb.b[3]; \
299 *(mcodeptr++) = imb.b[4]; \
300 *(mcodeptr++) = imb.b[5]; \
301 *(mcodeptr++) = imb.b[6]; \
302 *(mcodeptr++) = imb.b[7]; \
306 /* code generation prototypes */
308 void x86_64_emit_ialu(s4 alu_op, stackptr src, instruction *iptr);
309 void x86_64_emit_lalu(s4 alu_op, stackptr src, instruction *iptr);
310 void x86_64_emit_ialuconst(s4 alu_op, stackptr src, instruction *iptr);
311 void x86_64_emit_laluconst(s4 alu_op, stackptr src, instruction *iptr);
312 void x86_64_emit_ishift(s4 shift_op, stackptr src, instruction *iptr);
313 void x86_64_emit_lshift(s4 shift_op, stackptr src, instruction *iptr);
314 void x86_64_emit_ishiftconst(s4 shift_op, stackptr src, instruction *iptr);
315 void x86_64_emit_lshiftconst(s4 shift_op, stackptr src, instruction *iptr);
316 void x86_64_emit_ifcc(s4 if_op, stackptr src, instruction *iptr);
317 void x86_64_emit_if_lcc(s4 if_op, stackptr src, instruction *iptr);
318 void x86_64_emit_if_icmpcc(s4 if_op, stackptr src, instruction *iptr);
319 void x86_64_emit_if_lcmpcc(s4 if_op, stackptr src, instruction *iptr);
322 /* integer instructions */
324 void x86_64_mov_reg_reg(s8 reg, s8 dreg);
325 void x86_64_mov_imm_reg(s8 imm, s8 reg);
326 void x86_64_movl_imm_reg(s8 imm, s8 reg);
327 void x86_64_mov_membase_reg(s8 basereg, s8 disp, s8 reg);
328 void x86_64_movl_membase_reg(s8 basereg, s8 disp, s8 reg);
329 void x86_64_mov_membase32_reg(s8 basereg, s8 disp, s8 reg);
330 void x86_64_mov_reg_membase(s8 reg, s8 basereg, s8 disp);
331 void x86_64_movl_reg_membase(s8 reg, s8 basereg, s8 disp);
332 void x86_64_mov_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg);
333 void x86_64_movl_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg);
334 void x86_64_mov_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale);
335 void x86_64_movl_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale);
336 void x86_64_movw_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale);
337 void x86_64_movb_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale);
338 void x86_64_mov_imm_membase(s8 imm, s8 basereg, s8 disp);
339 void x86_64_movl_imm_membase(s8 imm, s8 basereg, s8 disp);
340 void x86_64_movsbq_reg_reg(s8 reg, s8 dreg);
341 void x86_64_movsbq_membase_reg(s8 basereg, s8 disp, s8 dreg);
342 void x86_64_movswq_reg_reg(s8 reg, s8 dreg);
343 void x86_64_movswq_membase_reg(s8 basereg, s8 disp, s8 dreg);
344 void x86_64_movslq_reg_reg(s8 reg, s8 dreg);
345 void x86_64_movslq_membase_reg(s8 basereg, s8 disp, s8 dreg);
346 void x86_64_movzwq_reg_reg(s8 reg, s8 dreg);
347 void x86_64_movzwq_membase_reg(s8 basereg, s8 disp, s8 dreg);
348 void x86_64_movswq_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg);
349 void x86_64_movsbq_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg);
350 void x86_64_movzwq_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg);
351 void x86_64_alu_reg_reg(s8 opc, s8 reg, s8 dreg);
352 void x86_64_alul_reg_reg(s8 opc, s8 reg, s8 dreg);
353 void x86_64_alu_reg_membase(s8 opc, s8 reg, s8 basereg, s8 disp);
354 void x86_64_alul_reg_membase(s8 opc, s8 reg, s8 basereg, s8 disp);
355 void x86_64_alu_membase_reg(s8 opc, s8 basereg, s8 disp, s8 reg);
356 void x86_64_alul_membase_reg(s8 opc, s8 basereg, s8 disp, s8 reg);
357 void x86_64_alu_imm_reg(s8 opc, s8 imm, s8 dreg);
358 void x86_64_alul_imm_reg(s8 opc, s8 imm, s8 dreg);
359 void x86_64_alu_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp);
360 void x86_64_alul_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp);
361 void x86_64_test_reg_reg(s8 reg, s8 dreg);
362 void x86_64_testl_reg_reg(s8 reg, s8 dreg);
363 void x86_64_test_imm_reg(s8 imm, s8 reg);
364 void x86_64_testw_imm_reg(s8 imm, s8 reg);
365 void x86_64_testb_imm_reg(s8 imm, s8 reg);
366 void x86_64_lea_membase_reg(s8 basereg, s8 disp, s8 reg);
367 void x86_64_leal_membase_reg(s8 basereg, s8 disp, s8 reg);
368 void x86_64_inc_reg(s8 reg);
369 void x86_64_incl_reg(s8 reg);
370 void x86_64_inc_membase(s8 basereg, s8 disp);
371 void x86_64_incl_membase(s8 basereg, s8 disp);
372 void x86_64_dec_reg(s8 reg);
373 void x86_64_decl_reg(s8 reg);
374 void x86_64_dec_membase(s8 basereg, s8 disp);
375 void x86_64_decl_membase(s8 basereg, s8 disp);
378 void x86_64_imul_reg_reg(s8 reg, s8 dreg);
379 void x86_64_imull_reg_reg(s8 reg, s8 dreg);
380 void x86_64_imul_membase_reg(s8 basereg, s8 disp, s8 dreg);
381 void x86_64_imull_membase_reg(s8 basereg, s8 disp, s8 dreg);
382 void x86_64_imul_imm_reg(s8 imm, s8 dreg);
383 void x86_64_imul_imm_reg_reg(s8 imm,s8 reg, s8 dreg);
384 void x86_64_imull_imm_reg_reg(s8 imm, s8 reg, s8 dreg);
385 void x86_64_imul_imm_membase_reg(s8 imm, s8 basereg, s8 disp, s8 dreg);
386 void x86_64_imull_imm_membase_reg(s8 imm, s8 basereg, s8 disp, s8 dreg);
387 void x86_64_idiv_reg(s8 reg);
388 void x86_64_idivl_reg(s8 reg);
390 void x86_64_shift_reg(s8 opc, s8 reg);
391 void x86_64_shiftl_reg(s8 opc, s8 reg);
392 void x86_64_shift_membase(s8 opc, s8 basereg, s8 disp);
393 void x86_64_shiftl_membase(s8 opc, s8 basereg, s8 disp);
394 void x86_64_shift_imm_reg(s8 opc, s8 imm, s8 dreg);
395 void x86_64_shiftl_imm_reg(s8 opc, s8 imm, s8 dreg);
396 void x86_64_shift_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp);
397 void x86_64_shiftl_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp);
398 void x86_64_jmp_imm(s8 imm);
399 void x86_64_jmp_reg(s8 reg);
400 void x86_64_jcc(s8 opc, s8 imm);
401 void x86_64_setcc_reg(s8 opc, s8 reg);
402 void x86_64_setcc_membase(s8 opc, s8 basereg, s8 disp);
403 void x86_64_cmovcc_reg_reg(s8 opc, s8 reg, s8 dreg);
404 void x86_64_cmovccl_reg_reg(s8 opc, s8 reg, s8 dreg);
405 void x86_64_neg_reg(s8 reg);
406 void x86_64_negl_reg(s8 reg);
407 void x86_64_neg_membase(s8 basereg, s8 disp);
408 void x86_64_negl_membase(s8 basereg, s8 disp);
409 void x86_64_push_imm(s8 imm);
410 void x86_64_pop_reg(s8 reg);
411 void x86_64_xchg_reg_reg(s8 reg, s8 dreg);
413 void x86_64_call_reg(s8 reg);
414 void x86_64_call_imm(s8 imm);
417 /* floating point instructions (SSE2) */
419 void x86_64_addsd_reg_reg(s8 reg, s8 dreg);
420 void x86_64_addss_reg_reg(s8 reg, s8 dreg);
421 void x86_64_cvtsi2ssq_reg_reg(s8 reg, s8 dreg);
422 void x86_64_cvtsi2ss_reg_reg(s8 reg, s8 dreg);
423 void x86_64_cvtsi2sdq_reg_reg(s8 reg, s8 dreg);
424 void x86_64_cvtsi2sd_reg_reg(s8 reg, s8 dreg);
425 void x86_64_cvtss2sd_reg_reg(s8 reg, s8 dreg);
426 void x86_64_cvtsd2ss_reg_reg(s8 reg, s8 dreg);
427 void x86_64_cvttss2siq_reg_reg(s8 reg, s8 dreg);
428 void x86_64_cvttss2si_reg_reg(s8 reg, s8 dreg);
429 void x86_64_cvttsd2siq_reg_reg(s8 reg, s8 dreg);
430 void x86_64_cvttsd2si_reg_reg(s8 reg, s8 dreg);
431 void x86_64_divss_reg_reg(s8 reg, s8 dreg);
432 void x86_64_divsd_reg_reg(s8 reg, s8 dreg);
433 void x86_64_movd_reg_freg(s8 reg, s8 freg);
434 void x86_64_movd_freg_reg(s8 freg, s8 reg);
435 void x86_64_movd_reg_membase(s8 reg, s8 basereg, s8 disp);
436 void x86_64_movd_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale);
437 void x86_64_movd_membase_reg(s8 basereg, s8 disp, s8 dreg);
438 void x86_64_movdl_membase_reg(s8 basereg, s8 disp, s8 dreg);
439 void x86_64_movd_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg);
440 void x86_64_movq_reg_reg(s8 reg, s8 dreg);
441 void x86_64_movq_reg_membase(s8 reg, s8 basereg, s8 disp);
442 void x86_64_movq_membase_reg(s8 basereg, s8 disp, s8 dreg);
443 void x86_64_movss_reg_reg(s8 reg, s8 dreg);
444 void x86_64_movsd_reg_reg(s8 reg, s8 dreg);
445 void x86_64_movss_reg_membase(s8 reg, s8 basereg, s8 disp);
446 void x86_64_movsd_reg_membase(s8 reg, s8 basereg, s8 disp);
447 void x86_64_movss_membase_reg(s8 basereg, s8 disp, s8 dreg);
448 void x86_64_movlps_membase_reg(s8 basereg, s8 disp, s8 dreg);
449 void x86_64_movsd_membase_reg(s8 basereg, s8 disp, s8 dreg);
450 void x86_64_movlpd_membase_reg(s8 basereg, s8 disp, s8 dreg);
451 void x86_64_movss_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale);
452 void x86_64_movsd_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale);
453 void x86_64_movss_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg);
454 void x86_64_movsd_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg);
455 void x86_64_mulss_reg_reg(s8 reg, s8 dreg);
456 void x86_64_mulsd_reg_reg(s8 reg, s8 dreg);
457 void x86_64_subss_reg_reg(s8 reg, s8 dreg);
458 void x86_64_subsd_reg_reg(s8 reg, s8 dreg);
459 void x86_64_ucomiss_reg_reg(s8 reg, s8 dreg);
460 void x86_64_ucomisd_reg_reg(s8 reg, s8 dreg);
461 void x86_64_xorps_reg_reg(s8 reg, s8 dreg);
462 void x86_64_xorps_membase_reg(s8 basereg, s8 disp, s8 dreg);
463 void x86_64_xorpd_reg_reg(s8 reg, s8 dreg);
464 void x86_64_xorpd_membase_reg(s8 basereg, s8 disp, s8 dreg);
467 /* function gen_resolvebranch **************************************************
469 backpatches a branch instruction
471 parameters: ip ... pointer to instruction after branch (void*)
472 so ... offset of instruction after branch (s8)
473 to ... offset of branch target (s8)
475 *******************************************************************************/
477 #define gen_resolvebranch(ip,so,to) \
478 *((s4*) ((ip) - 4)) = (s4) ((to) - (so));
481 /* function prototypes */
484 void init_exceptions();
486 void codegen_close();
487 void dseg_display(s4 *s4ptr);
489 #endif /* _CODEGEN_H */
493 * These are local overrides for various environment variables in Emacs.
494 * Please do not remove this and leave it at the end of the file, where
495 * Emacs will automagically detect them.
496 * ---------------------------------------------------------------------
499 * indent-tabs-mode: t