1 /* jit/i386/codegen.h - code generation macros and definitions for x86_64
3 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003
4 R. Grafl, A. Krall, C. Kruegel, C. Oates, R. Obermaisser,
5 M. Probst, S. Ring, E. Steiner, C. Thalinger, D. Thuernbeck,
6 P. Tomsich, J. Wenninger
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
25 Contact: cacao@complang.tuwien.ac.at
27 Authors: Andreas Krall
30 $Id: codegen.h 573 2003-11-07 08:53:55Z twisti $
41 /* x86_64 register numbers */
79 /* preallocated registers *****************************************************/
81 /* integer registers */
83 #define REG_RESULT RAX /* to deliver method results */
85 #define REG_ITMP1 RAX /* temporary register */
86 #define REG_ITMP2 R10 /* temporary register and method pointer */
87 #define REG_ITMP3 R11 /* temporary register */
89 #define REG_NULL -1 /* used for reg_of_var where d is not needed */
91 #define REG_ITMP1_XPTR RAX /* exception pointer = temporary register 1 */
92 #define REG_ITMP2_XPC R10 /* exception pc = temporary register 2 */
94 #define REG_SP RSP /* stack pointer */
96 /* floating point registers */
98 #define REG_FRESULT XMM0 /* to deliver floating point method results */
100 #define REG_FTMP1 XMM8 /* temporary floating point register */
101 #define REG_FTMP2 XMM9 /* temporary floating point register */
102 #define REG_FTMP3 XMM10 /* temporary floating point register */
105 /* stackframe-infos ***********************************************************/
107 int parentargs_base; /* offset in stackframe for the parameter from the caller*/
110 /* macros to create code ******************************************************/
112 /* immediate data union */
124 /* opcodes for alu instructions */
148 } X86_64_Shift_Opcode;
154 X86_64_CC_B = 2, X86_64_CC_C = 2, X86_64_CC_NAE = 2,
155 X86_64_CC_BE = 6, X86_64_CC_NA = 6,
156 X86_64_CC_AE = 3, X86_64_CC_NB = 3, X86_64_CC_NC = 3,
157 X86_64_CC_E = 4, X86_64_CC_Z = 4,
158 X86_64_CC_NE = 5, X86_64_CC_NZ = 5,
159 X86_64_CC_A = 7, X86_64_CC_NBE = 7,
160 X86_64_CC_S = 8, X86_64_CC_LZ = 8,
161 X86_64_CC_NS = 9, X86_64_CC_GEZ = 9,
162 X86_64_CC_P = 0x0a, X86_64_CC_PE = 0x0a,
163 X86_64_CC_NP = 0x0b, X86_64_CC_PO = 0x0b,
164 X86_64_CC_L = 0x0c, X86_64_CC_NGE = 0x0c,
165 X86_64_CC_GE = 0x0d, X86_64_CC_NL = 0x0d,
166 X86_64_CC_LE = 0x0e, X86_64_CC_NG = 0x0e,
167 X86_64_CC_G = 0x0f, X86_64_CC_NLE = 0x0f,
172 /* modrm and stuff */
174 #define x86_64_address_byte(mod,reg,rm) \
175 *(mcodeptr++) = ((((mod) & 0x03) << 6) | (((reg) & 0x07) << 3) | ((rm) & 0x07));
178 #define x86_64_emit_reg(reg,rm) \
179 x86_64_address_byte(3,(reg),(rm));
182 #define x86_64_emit_rex(size,reg,index,rm) \
183 if ((size) == 1 || (reg) > 7 || (index) > 7 || (rm) > 7) { \
184 *(mcodeptr++) = (0x40 | (((size) & 0x01) << 3) | ((((reg) >> 3) & 0x01) << 2) | ((((index) >> 3) & 0x01) << 1) | (((rm) >> 3) & 0x01)); \
188 #define x86_64_emit_mem(r,disp) \
190 x86_64_address_byte(0,(r),5); \
191 x86_64_emit_imm32((disp)); \
195 #define x86_64_emit_membase(basereg,disp,dreg) \
197 if ((basereg) == REG_SP || (basereg) == R12) { \
199 x86_64_address_byte(0,(dreg),REG_SP); \
200 x86_64_address_byte(0,REG_SP,REG_SP); \
201 } else if (x86_64_is_imm8((disp))) { \
202 x86_64_address_byte(1,(dreg),REG_SP); \
203 x86_64_address_byte(0,REG_SP,REG_SP); \
204 x86_64_emit_imm8((disp)); \
206 x86_64_address_byte(2,(dreg),REG_SP); \
207 x86_64_address_byte(0,REG_SP,REG_SP); \
208 x86_64_emit_imm32((disp)); \
212 if ((disp) == 0 && (basereg) != RBP && (basereg) != R13) { \
213 x86_64_address_byte(0,(dreg),(basereg)); \
217 if ((basereg) == RIP) { \
218 x86_64_address_byte(0,(dreg),RBP); \
219 x86_64_emit_imm32((disp)); \
223 if (x86_64_is_imm8((disp))) { \
224 x86_64_address_byte(1,(dreg),(basereg)); \
225 x86_64_emit_imm8((disp)); \
227 x86_64_address_byte(2,(dreg),(basereg)); \
228 x86_64_emit_imm32((disp)); \
233 #define x86_64_emit_memindex(reg,disp,basereg,indexreg,scale) \
235 if ((basereg) == -1) { \
236 x86_64_address_byte(0,(reg),4); \
237 x86_64_address_byte((scale),(indexreg),5); \
238 x86_64_emit_imm32((disp)); \
240 } else if ((disp) == 0 && (basereg) != RBP && (basereg) != R13) { \
241 x86_64_address_byte(0,(reg),4); \
242 x86_64_address_byte((scale),(indexreg),(basereg)); \
244 } else if (x86_64_is_imm8((disp))) { \
245 x86_64_address_byte(1,(reg),4); \
246 x86_64_address_byte((scale),(indexreg),(basereg)); \
247 x86_64_emit_imm8 ((disp)); \
250 x86_64_address_byte(2,(reg),4); \
251 x86_64_address_byte((scale),(indexreg),(basereg)); \
252 x86_64_emit_imm32((disp)); \
257 #define x86_64_is_imm8(imm) \
258 (((long)(imm) >= -128 && (long)(imm) <= 127))
261 #define x86_64_is_imm32(imm) \
262 ((long)(imm) >= (-2147483647-1) && (long)(imm) <= 2147483647)
265 #define x86_64_emit_imm8(imm) \
266 *(mcodeptr++) = (u1) ((imm) & 0xff);
269 #define x86_64_emit_imm16(imm) \
271 x86_64_imm_buf imb; \
272 imb.i = (s4) (imm); \
273 *(mcodeptr++) = imb.b[0]; \
274 *(mcodeptr++) = imb.b[1]; \
278 #define x86_64_emit_imm32(imm) \
280 x86_64_imm_buf imb; \
281 imb.i = (s4) (imm); \
282 *(mcodeptr++) = imb.b[0]; \
283 *(mcodeptr++) = imb.b[1]; \
284 *(mcodeptr++) = imb.b[2]; \
285 *(mcodeptr++) = imb.b[3]; \
289 #define x86_64_emit_imm64(imm) \
291 x86_64_imm_buf imb; \
292 imb.l = (s8) (imm); \
293 *(mcodeptr++) = imb.b[0]; \
294 *(mcodeptr++) = imb.b[1]; \
295 *(mcodeptr++) = imb.b[2]; \
296 *(mcodeptr++) = imb.b[3]; \
297 *(mcodeptr++) = imb.b[4]; \
298 *(mcodeptr++) = imb.b[5]; \
299 *(mcodeptr++) = imb.b[6]; \
300 *(mcodeptr++) = imb.b[7]; \
304 /* code generation prototypes */
306 void x86_64_emit_ialu(s4 alu_op, stackptr src, instruction *iptr);
307 void x86_64_emit_lalu(s4 alu_op, stackptr src, instruction *iptr);
308 void x86_64_emit_ialuconst(s4 alu_op, stackptr src, instruction *iptr);
309 void x86_64_emit_laluconst(s4 alu_op, stackptr src, instruction *iptr);
310 void x86_64_emit_ishift(s4 shift_op, stackptr src, instruction *iptr);
311 void x86_64_emit_lshift(s4 shift_op, stackptr src, instruction *iptr);
312 void x86_64_emit_ishiftconst(s4 shift_op, stackptr src, instruction *iptr);
313 void x86_64_emit_lshiftconst(s4 shift_op, stackptr src, instruction *iptr);
314 void x86_64_emit_ifcc(s4 if_op, stackptr src, instruction *iptr);
315 void x86_64_emit_if_lcc(s4 if_op, stackptr src, instruction *iptr);
316 void x86_64_emit_if_icmpcc(s4 if_op, stackptr src, instruction *iptr);
317 void x86_64_emit_if_lcmpcc(s4 if_op, stackptr src, instruction *iptr);
320 /* integer instructions */
322 void x86_64_mov_reg_reg(s8 reg, s8 dreg);
323 void x86_64_mov_imm_reg(s8 imm, s8 reg);
324 void x86_64_movl_imm_reg(s8 imm, s8 reg);
325 void x86_64_mov_membase_reg(s8 basereg, s8 disp, s8 reg);
326 void x86_64_movl_membase_reg(s8 basereg, s8 disp, s8 reg);
327 void x86_64_mov_membase32_reg(s8 basereg, s8 disp, s8 reg);
328 void x86_64_mov_reg_membase(s8 reg, s8 basereg, s8 disp);
329 void x86_64_movl_reg_membase(s8 reg, s8 basereg, s8 disp);
330 void x86_64_mov_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg);
331 void x86_64_movl_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg);
332 void x86_64_mov_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale);
333 void x86_64_movl_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale);
334 void x86_64_movw_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale);
335 void x86_64_movb_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale);
336 void x86_64_mov_imm_membase(s8 imm, s8 basereg, s8 disp);
337 void x86_64_movl_imm_membase(s8 imm, s8 basereg, s8 disp);
338 void x86_64_movsbq_reg_reg(s8 reg, s8 dreg);
339 void x86_64_movsbq_membase_reg(s8 basereg, s8 disp, s8 dreg);
340 void x86_64_movswq_reg_reg(s8 reg, s8 dreg);
341 void x86_64_movswq_membase_reg(s8 basereg, s8 disp, s8 dreg);
342 void x86_64_movslq_reg_reg(s8 reg, s8 dreg);
343 void x86_64_movslq_membase_reg(s8 basereg, s8 disp, s8 dreg);
344 void x86_64_movzwq_reg_reg(s8 reg, s8 dreg);
345 void x86_64_movzwq_membase_reg(s8 basereg, s8 disp, s8 dreg);
346 void x86_64_movswq_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg);
347 void x86_64_movsbq_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg);
348 void x86_64_movzwq_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg);
349 void x86_64_alu_reg_reg(s8 opc, s8 reg, s8 dreg);
350 void x86_64_alul_reg_reg(s8 opc, s8 reg, s8 dreg);
351 void x86_64_alu_reg_membase(s8 opc, s8 reg, s8 basereg, s8 disp);
352 void x86_64_alul_reg_membase(s8 opc, s8 reg, s8 basereg, s8 disp);
353 void x86_64_alu_membase_reg(s8 opc, s8 basereg, s8 disp, s8 reg);
354 void x86_64_alul_membase_reg(s8 opc, s8 basereg, s8 disp, s8 reg);
355 void x86_64_alu_imm_reg(s8 opc, s8 imm, s8 dreg);
356 void x86_64_alul_imm_reg(s8 opc, s8 imm, s8 dreg);
357 void x86_64_alu_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp);
358 void x86_64_alul_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp);
359 void x86_64_test_reg_reg(s8 reg, s8 dreg);
360 void x86_64_testl_reg_reg(s8 reg, s8 dreg);
361 void x86_64_test_imm_reg(s8 imm, s8 reg);
362 void x86_64_testw_imm_reg(s8 imm, s8 reg);
363 void x86_64_testb_imm_reg(s8 imm, s8 reg);
364 void x86_64_lea_membase_reg(s8 basereg, s8 disp, s8 reg);
365 void x86_64_leal_membase_reg(s8 basereg, s8 disp, s8 reg);
366 void x86_64_inc_reg(s8 reg);
367 void x86_64_incl_reg(s8 reg);
368 void x86_64_inc_membase(s8 basereg, s8 disp);
369 void x86_64_incl_membase(s8 basereg, s8 disp);
370 void x86_64_dec_reg(s8 reg);
371 void x86_64_decl_reg(s8 reg);
372 void x86_64_dec_membase(s8 basereg, s8 disp);
373 void x86_64_decl_membase(s8 basereg, s8 disp);
376 void x86_64_imul_reg_reg(s8 reg, s8 dreg);
377 void x86_64_imull_reg_reg(s8 reg, s8 dreg);
378 void x86_64_imul_membase_reg(s8 basereg, s8 disp, s8 dreg);
379 void x86_64_imull_membase_reg(s8 basereg, s8 disp, s8 dreg);
380 void x86_64_imul_imm_reg(s8 imm, s8 dreg);
381 void x86_64_imul_imm_reg_reg(s8 imm,s8 reg, s8 dreg);
382 void x86_64_imull_imm_reg_reg(s8 imm, s8 reg, s8 dreg);
383 void x86_64_imul_imm_membase_reg(s8 imm, s8 basereg, s8 disp, s8 dreg);
384 void x86_64_imull_imm_membase_reg(s8 imm, s8 basereg, s8 disp, s8 dreg);
385 void x86_64_idiv_reg(s8 reg);
386 void x86_64_idivl_reg(s8 reg);
388 void x86_64_shift_reg(s8 opc, s8 reg);
389 void x86_64_shiftl_reg(s8 opc, s8 reg);
390 void x86_64_shift_membase(s8 opc, s8 basereg, s8 disp);
391 void x86_64_shiftl_membase(s8 opc, s8 basereg, s8 disp);
392 void x86_64_shift_imm_reg(s8 opc, s8 imm, s8 dreg);
393 void x86_64_shiftl_imm_reg(s8 opc, s8 imm, s8 dreg);
394 void x86_64_shift_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp);
395 void x86_64_shiftl_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp);
396 void x86_64_jmp_imm(s8 imm);
397 void x86_64_jmp_reg(s8 reg);
398 void x86_64_jcc(s8 opc, s8 imm);
399 void x86_64_setcc_reg(s8 opc, s8 reg);
400 void x86_64_setcc_membase(s8 opc, s8 basereg, s8 disp);
401 void x86_64_cmovcc_reg_reg(s8 opc, s8 reg, s8 dreg);
402 void x86_64_cmovccl_reg_reg(s8 opc, s8 reg, s8 dreg);
403 void x86_64_neg_reg(s8 reg);
404 void x86_64_negl_reg(s8 reg);
405 void x86_64_neg_membase(s8 basereg, s8 disp);
406 void x86_64_negl_membase(s8 basereg, s8 disp);
407 void x86_64_push_imm(s8 imm);
408 void x86_64_pop_reg(s8 reg);
409 void x86_64_xchg_reg_reg(s8 reg, s8 dreg);
411 void x86_64_call_reg(s8 reg);
412 void x86_64_call_imm(s8 imm);
415 /* floating point instructions (SSE2) */
417 void x86_64_addsd_reg_reg(s8 reg, s8 dreg);
418 void x86_64_addss_reg_reg(s8 reg, s8 dreg);
419 void x86_64_cvtsi2ssq_reg_reg(s8 reg, s8 dreg);
420 void x86_64_cvtsi2ss_reg_reg(s8 reg, s8 dreg);
421 void x86_64_cvtsi2sdq_reg_reg(s8 reg, s8 dreg);
422 void x86_64_cvtsi2sd_reg_reg(s8 reg, s8 dreg);
423 void x86_64_cvtss2sd_reg_reg(s8 reg, s8 dreg);
424 void x86_64_cvtsd2ss_reg_reg(s8 reg, s8 dreg);
425 void x86_64_cvttss2siq_reg_reg(s8 reg, s8 dreg);
426 void x86_64_cvttss2si_reg_reg(s8 reg, s8 dreg);
427 void x86_64_cvttsd2siq_reg_reg(s8 reg, s8 dreg);
428 void x86_64_cvttsd2si_reg_reg(s8 reg, s8 dreg);
429 void x86_64_divss_reg_reg(s8 reg, s8 dreg);
430 void x86_64_divsd_reg_reg(s8 reg, s8 dreg);
431 void x86_64_movd_reg_freg(s8 reg, s8 freg);
432 void x86_64_movd_freg_reg(s8 freg, s8 reg);
433 void x86_64_movd_reg_membase(s8 reg, s8 basereg, s8 disp);
434 void x86_64_movd_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale);
435 void x86_64_movd_membase_reg(s8 basereg, s8 disp, s8 dreg);
436 void x86_64_movdl_membase_reg(s8 basereg, s8 disp, s8 dreg);
437 void x86_64_movd_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg);
438 void x86_64_movq_reg_reg(s8 reg, s8 dreg);
439 void x86_64_movq_reg_membase(s8 reg, s8 basereg, s8 disp);
440 void x86_64_movq_membase_reg(s8 basereg, s8 disp, s8 dreg);
441 void x86_64_movss_reg_reg(s8 reg, s8 dreg);
442 void x86_64_movsd_reg_reg(s8 reg, s8 dreg);
443 void x86_64_movss_reg_membase(s8 reg, s8 basereg, s8 disp);
444 void x86_64_movsd_reg_membase(s8 reg, s8 basereg, s8 disp);
445 void x86_64_movss_membase_reg(s8 basereg, s8 disp, s8 dreg);
446 void x86_64_movlps_membase_reg(s8 basereg, s8 disp, s8 dreg);
447 void x86_64_movsd_membase_reg(s8 basereg, s8 disp, s8 dreg);
448 void x86_64_movlpd_membase_reg(s8 basereg, s8 disp, s8 dreg);
449 void x86_64_movss_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale);
450 void x86_64_movsd_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale);
451 void x86_64_movss_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg);
452 void x86_64_movsd_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg);
453 void x86_64_mulss_reg_reg(s8 reg, s8 dreg);
454 void x86_64_mulsd_reg_reg(s8 reg, s8 dreg);
455 void x86_64_subss_reg_reg(s8 reg, s8 dreg);
456 void x86_64_subsd_reg_reg(s8 reg, s8 dreg);
457 void x86_64_ucomiss_reg_reg(s8 reg, s8 dreg);
458 void x86_64_ucomisd_reg_reg(s8 reg, s8 dreg);
459 void x86_64_xorps_reg_reg(s8 reg, s8 dreg);
460 void x86_64_xorps_membase_reg(s8 basereg, s8 disp, s8 dreg);
461 void x86_64_xorpd_reg_reg(s8 reg, s8 dreg);
462 void x86_64_xorpd_membase_reg(s8 basereg, s8 disp, s8 dreg);
465 /* function gen_resolvebranch **************************************************
467 backpatches a branch instruction
469 parameters: ip ... pointer to instruction after branch (void*)
470 so ... offset of instruction after branch (s8)
471 to ... offset of branch target (s8)
473 *******************************************************************************/
475 #define gen_resolvebranch(ip,so,to) \
476 *((s4*) ((ip) - 4)) = (s4) ((to) - (so));
478 #define SOFTNULLPTRCHECK /* soft null pointer check supportet as option */
480 #endif /* _CODEGEN_H */
484 * These are local overrides for various environment variables in Emacs.
485 * Please do not remove this and leave it at the end of the file, where
486 * Emacs will automagically detect them.
487 * ---------------------------------------------------------------------
490 * indent-tabs-mode: t