Reinhard Grafl
Christian Thalinger
- $Id: asmpart.S 2733 2005-06-17 12:18:29Z twisti $
+ $Id: asmpart.S 2956 2005-07-09 14:04:34Z twisti $
*/
.globl asm_wrapper_patcher
.globl asm_builtin_arraycheckcast
- .globl asm_builtin_aastore
.globl asm_builtin_f2i
.globl asm_builtin_f2l
.globl asm_criticalsections
.globl asm_getclassvalues_atomic
- .globl asm_prepare_native_stackinfo
- .globl asm_remove_native_stackinfo
- .globl asm_throw_and_handle_exception
- .globl asm_throw_and_handle_hardware_arithmetic_exception
-
/********************* function asm_calljavafunction ***************************
* *
*******************************************************************************/
asm_wrapper_patcher:
- sub $(17*8),%rsp /* stack frame (16-byte aligned) */
+ sub $(19*8+sizestackframeinfo),sp /* stack frame (16-byte aligned) */
SAVE_ARGUMENT_REGISTERS(0)
SAVE_TEMPORARY_REGISTERS(14)
- mov itmp1,15*8(%rsp) /* save itmp1 and itmp2 */
- mov itmp2,16*8(%rsp) /* can be used by some instructions */
+ mov itmp1,15*8(sp) /* save itmp1 and itmp2 */
+ mov itmp2,16*8(sp) /* can be used by some instructions */
+
+ mov sp,a0 /* create stackframe info */
+ add $(19*8),a0
+ xor a1,a1 /* if pv is NULL, use findmethod */
+ mov sp,a2
+ add $((5+19)*8+sizestackframeinfo),a2
+ mov ((4+19)*8+sizestackframeinfo)(sp),a3
+ call stacktrace_create_inline_stackframeinfo
- mov %rsp,a0 /* pass stack pointer */
- add $(18*8),a0 /* skip patcher function pointer */
- mov 17*8(%rsp),itmp3 /* get function pointer */
+ mov sp,a0 /* pass stack pointer */
+ add $((1+19)*8+sizestackframeinfo),a0 /* skip function pointer */
+ mov (19*8+sizestackframeinfo)(sp),itmp3 /* get function pointer */
call *itmp3 /* call the patcher function */
- mov v0,itmp3 /* save return value */
+ mov v0,17*8(sp) /* save return value */
+
+ mov sp,a0 /* remove stackframe info */
+ add $(19*8),a0
+ call stacktrace_remove_stackframeinfo
RESTORE_ARGUMENT_REGISTERS(0)
RESTORE_TEMPORARY_REGISTERS(14)
- mov 15*8(%rsp),itmp1 /* restore itmp1 and itmp2 */
- mov 16*8(%rsp),itmp2 /* can be used by some instructions */
+ mov 15*8(sp),itmp1 /* restore itmp1 and itmp2 */
+ mov 16*8(sp),itmp2 /* can be used by some instructions */
+ mov 17*8(sp),itmp3 /* restore return value */
- add $((4+17)*8),%rsp /* remove stack frame, keep ra */
+ add $((4+19)*8+sizestackframeinfo),sp /* remove stack frame, keep ra */
test itmp3,itmp3 /* exception thrown? */
jz L_asm_wrapper_patcher_exception
ret /* call new patched code */
L_asm_wrapper_patcher_exception:
- /*stack bottom is xpc and it is directly below the last java stackframe*/
- push $0
- push $0
- push $0 /*padding*/
- call asm_prepare_native_stackinfo /* be aware of the stack effect and calling convention explained above*/
-
#if defined(USE_THREADS) && defined(NATIVE_THREADS)
call builtin_asm_get_exceptionptrptr
- mov v0,itmp2
+ mov v0,itmp2 /* v0 == xptr */
#else
lea _exceptionptr,itmp2
#endif
- mov (itmp2),a0 /* get the exception pointer */
+ mov (itmp2),xptr /* get the exception pointer */
movl $0,(itmp2) /* clear exception pointer */
- call helper_fillin_stacktrace
-
- mov v0,xptr
-
- call asm_remove_native_stackinfo /* be aware of the stack effect and calling convention explained above*/
- add $8,%rsp
pop xpc /* get and remove return address */
jmp asm_handle_exception
ret
-/* asm_builtin_arraycheckcast **************************************************
-
- Does the cast check and eventually throws an exception.
-
-*******************************************************************************/
-
-asm_builtin_arraycheckcast:
- sub $24,%rsp /* keep stack 16-byte aligned */
- mov %rdi,(%rsp) /* save object pointer */
- call builtin_arraycheckcast
- test %rax,%rax /* if (false) throw exception */
- je nb_carray_throw
- mov (%rsp),%rax /* return object pointer */
- add $24,%rsp /* free stack space */
- ret
-
-nb_carray_throw:
- /*call new_classcastexception*/
- add $24,%rsp
- pop xpc /* delete return address */
- sub $3,xpc /* faulting address is ra - 3 */
- mov string_java_lang_ClassCastException,xptr
- jmp asm_throw_and_handle_exception
-
-
-/* asm_builtin_aastore *********************************************************
-
- Checks if the object can be stored in the given array and stores the
- address if it's possible. This function can also throw some exceptions.
-
-*******************************************************************************/
-
-asm_builtin_aastore:
- sub $(3*8),%rsp /* allocate stack space */
- test %rdi,%rdi /* if null pointer throw exception */
- je nb_aastore_null
-
- movl offarraysize(%rdi),%eax /* load size */
- cmpl %eax,%esi /* do bound check */
- jae nb_aastore_bound /* if out of bounds throw exception */
-
- shl $3,%rsi /* index * 8 */
- mov %rdi,%r10
- add %rsi,%r10 /* add index * 8 to arrayref */
-
- mov %r10,(%rsp) /* save store position */
- mov %rdx,8(%rsp) /* save object */
-
- mov %rdx,%rsi /* object is second argument */
- call builtin_canstore /* builtin_canstore(arrayref,object) */
- test %rax,%rax /* if (false) throw exception */
- je nb_aastore_throw
-
- mov (%rsp),%r10 /* restore store position */
- mov 8(%rsp),%rdx /* restore object */
- mov %rdx,offobjarrdata(%r10)/* store objectptr in array */
- add $(3*8),%rsp /* free stack space */
- ret
-
-nb_aastore_null:
- add $24,%rsp
- pop xpc /* delete return address from stack */
- sub $3,xpc /* faulting address is return adress - 3 */
- mov string_java_lang_NullPointerException,xptr
- jmp asm_throw_and_handle_exception
-
-nb_aastore_bound:
- add $24,%rsp
- push $0 /*directly below return address*/
- push $0 /*internal*/
- push $0 /*padding*/
- mov %rsi,itmp1
-
- call asm_prepare_native_stackinfo
-
- mov itmp1,%rdi /* move index into a0 */
- call new_arrayindexoutofboundsexception
-
- call asm_remove_native_stackinfo
-
- pop xpc /* just remove one quadword */
- pop xpc /* delete return address */
- sub $3,xpc /* faulting address is return adress - 3 */
- jmp asm_handle_exception
-
-nb_aastore_throw:
- /*call new_arraystoreexception*/
- add $24,%rsp
- pop xpc /* delete return address */
- sub $3,xpc /* faulting address is return adress - 3 */
- mov string_java_lang_ArrayStoreException,xptr
- jmp asm_throw_and_handle_exception
-
-
/******************* function asm_initialize_thread_stack **********************
* *
* initialized a thread stack *
ret
-
-
-/************************ function asm_prepare_native_stackinfo ****************************
-* *
-* creates a stackfame for the begin of a native function (either builtin or not ) *
-* expected stack at begin of function *
-* .... *
-* address of the jit call which invokes the native *
-* begin address of stack frame of the java method *
-* method pointer or 0 (for built ins) *
-* padding for stackframesize 16*n+8 *
-* return address *
-* *
-* at end of function: *
-* ... *
-* address of the jit call which invokes the native *
-* begin address of stack frame of the java method *
-* method pointer or 0 (for built ins) *
-* address of thread specific top of native list *
-* old value of thread specific head *
-* padding for stackframesize 16*n+8) *
-* return address *
-* *
-* .... *
-* This thing is less efficient than the original #define (callerside) *
-* destroyes REG_ITMP2, keeps REG_ITMP1 *
-********************************************************************************************/
-
-
-asm_prepare_native_stackinfo:
- sub $16,%rsp /*space for the 2 new pointers*/
- mov 16(%rsp),itmp2
- mov itmp2,(%rsp)
- push itmp1
- call builtin_asm_get_stackframeinfo
-
- mov itmp1, 32(%rsp)
- mov (itmp1),itmp2
- mov itmp2,24(%rsp)
- mov %rsp,itmp2
- add $24,itmp2
- mov itmp2,(itmp1)
- pop itmp1
- ret
-
-
-
-/************************ function asm_remove _native_stackinfo *******************************************
-* *
-* removes a stackfame for the begin of a native function (either builtin or not) *
-* expected stack at begin of function *
-* address of the jit call which invokes the native *
-* begin address of stack frame of the java method *
-* method pointer or 0 (for built ins) *
-* address thread specific top of native list *
-* old value of thread specific head *
-* padding *
-* return address *
-* *
-* at end of function: *
-* .... *
-* return adresss of the jit call which invokes the native *
-* padding *
-* return address *
-* *
-* *
-* *
-* This thing is less efficient than the original #define (callerside), uses ITMP2,uses ITMP3,keeps ITMP1 *
-***********************************************************************************************************/
-
-asm_remove_native_stackinfo:
- mov 16(%rsp),itmp2
- mov 24(%rsp),itmp3
- mov itmp2,(itmp3)
- pop itmp3
- add $32,%rsp
- push itmp3
- ret
-
-
-
-asm_throw_and_handle_exception:
- push xpc /* the pushed XPC is directly below the java frame*/
- push $0
- push $0
- push $0 /*padding*/
- call asm_prepare_native_stackinfo /* be aware of the stack effect and calling convention explained above*/
- mov itmp1,%rdi
-
- call new_exception
-
- call asm_remove_native_stackinfo /* be aware of the stack effect and calling convention explained above*/
-
- pop xpc
- pop xpc
-
- jmp asm_handle_exception
- ret /*should never be reached */
-
-
-asm_throw_and_handle_hardware_arithmetic_exception:
-
- push xpc
- push $0 /* the pushed XPC is directly below the java frame*/
- push $0
- push $0 /*padding*/
- call asm_prepare_native_stackinfo /* be aware of the stack effect and calling convention explained above*/
-
- mov string_java_lang_ArithmeticException_message,%rsi
- mov string_java_lang_ArithmeticException,%rdi
-
- call new_exception_message
-
- call asm_remove_native_stackinfo /* be aware of the stack effect and calling convention explained above*/
- pop xpc
- pop xpc
-
- jmp asm_handle_exception
- ret /*should never be reached */
-
-
asm_getclassvalues_atomic:
_crit_restart2:
_crit_begin2:
Changes: Christian Ullrich
- $Id: codegen.c 2875 2005-06-30 09:16:21Z twisti $
+ $Id: codegen.c 2956 2005-07-09 14:04:34Z twisti $
*/
}
gen_div_check(src);
- x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
+ x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
- x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
- x86_64_jcc(cd, X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
+ x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
+ x86_64_jcc(cd, X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
- x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
+ x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
x86_64_cltd(cd);
x86_64_idivl_reg(cd, REG_ITMP3);
if (iptr->dst->flags & INMEMORY) {
x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
- x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
+ x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
} else {
M_INTMOVE(RAX, iptr->dst->regoff);
if (iptr->dst->regoff != RDX) {
- x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
+ x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
}
}
break;
}
gen_div_check(src);
- x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
+ x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
- x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
+ x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
- x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
- x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
- x86_64_jcc(cd, X86_64_CC_E, 1 + 3); /* 6 bytes */
+ x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
+ x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
+ x86_64_jcc(cd, X86_64_CC_E, 1 + 3); /* 6 bytes */
x86_64_cltd(cd);
x86_64_idivl_reg(cd, REG_ITMP3);
if (iptr->dst->flags & INMEMORY) {
x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
- x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
+ x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
} else {
M_INTMOVE(RDX, iptr->dst->regoff);
if (iptr->dst->regoff != RDX) {
- x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
+ x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
}
}
break;
break;
- case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
-
- var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
- var_to_reg_int(s2, src->prev, REG_ITMP2);
- if (iptr->op1 == 0) {
- gen_nullptr_check(s1);
- gen_bound_check;
- }
- var_to_reg_int(s3, src, REG_ITMP3);
- x86_64_mov_reg_memindex(cd, s3, OFFSET(java_objectarray, data[0]), s1, s2, 3);
- break;
-
case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
x86_64_movb_reg_memindex(cd, s3, OFFSET(java_bytearray, data[0]), s1, s2, 0);
break;
+ case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
+
+ var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
+ var_to_reg_int(s2, src->prev, REG_ITMP2);
+/* if (iptr->op1 == 0) { */
+ gen_nullptr_check(s1);
+ gen_bound_check;
+/* } */
+ var_to_reg_int(s3, src, REG_ITMP3);
+
+ M_MOV(s1, rd->argintregs[0]);
+ M_MOV(s3, rd->argintregs[1]);
+ bte = iptr->val.a;
+ x86_64_mov_imm_reg(cd, (ptrint) bte->fp, REG_ITMP1);
+ x86_64_call_reg(cd, REG_ITMP1);
+ M_TEST(REG_RESULT);
+ M_BEQ(0);
+ codegen_addxstorerefs(cd, cd->mcodeptr);
+
+ var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
+ var_to_reg_int(s2, src->prev, REG_ITMP2);
+ var_to_reg_int(s3, src, REG_ITMP3);
+ x86_64_mov_reg_memindex(cd, s3, OFFSET(java_objectarray, data[0]), s1, s2, 3);
+ break;
+
+
case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
var_to_reg_int(s1, src->prev, REG_ITMP1);
PATCHER_get_putstatic,
(unresolved_field *) iptr->target);
- if (showdisassemble) {
+ if (opt_showdisassemble) {
M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
}
codegen_addpatchref(cd, cd->mcodeptr,
PATCHER_clinit, fi->class);
- if (showdisassemble) {
+ if (opt_showdisassemble) {
M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
}
}
PATCHER_get_putstatic,
(unresolved_field *) iptr->target);
- if (showdisassemble) {
+ if (opt_showdisassemble) {
M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
}
codegen_addpatchref(cd, cd->mcodeptr,
PATCHER_clinit, fi->class);
- if (showdisassemble) {
+ if (opt_showdisassemble) {
M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
}
}
PATCHER_get_putstatic,
(unresolved_field *) iptr[1].target);
- if (showdisassemble) {
+ if (opt_showdisassemble) {
M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
}
codegen_addpatchref(cd, cd->mcodeptr,
PATCHER_clinit, fi->class);
- if (showdisassemble) {
+ if (opt_showdisassemble) {
M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
}
}
PATCHER_get_putfield,
(unresolved_field *) iptr->target);
- if (showdisassemble) {
+ if (opt_showdisassemble) {
M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
}
PATCHER_get_putfield,
(unresolved_field *) iptr->target);
- if (showdisassemble) {
+ if (opt_showdisassemble) {
M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
}
PATCHER_putfieldconst,
(unresolved_field *) iptr[1].target);
- if (showdisassemble) {
+ if (opt_showdisassemble) {
M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
}
codegen_addpatchref(cd, cd->mcodeptr,
bte->fp, iptr->target);
- if (showdisassemble) {
+ if (opt_showdisassemble) {
M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
}
codegen_addpatchref(cd, cd->mcodeptr,
PATCHER_invokestatic_special, um);
- if (showdisassemble) {
+ if (opt_showdisassemble) {
M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
}
codegen_addpatchref(cd, cd->mcodeptr,
PATCHER_invokevirtual, um);
- if (showdisassemble) {
+ if (opt_showdisassemble) {
M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
}
codegen_addpatchref(cd, cd->mcodeptr,
PATCHER_invokeinterface, um);
- if (showdisassemble) {
+ if (opt_showdisassemble) {
M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
}
3 /* test */ + 6 /* jcc */;
if (!super)
- s2 += (showdisassemble ? 5 : 0);
+ s2 += (opt_showdisassemble ? 5 : 0);
/* calculate class checkcast code size */
s3 += 3 /* cmp */ + 6 /* jcc */;
if (!super)
- s3 += (showdisassemble ? 5 : 0);
+ s3 += (opt_showdisassemble ? 5 : 0);
/* if class is not resolved, check which code to call */
if (!super) {
x86_64_test_reg_reg(cd, s1, s1);
- x86_64_jcc(cd, X86_64_CC_Z, 6 + (showdisassemble ? 5 : 0) + 7 + 6 + s2 + 5 + s3);
+ x86_64_jcc(cd, X86_64_CC_Z, 6 + (opt_showdisassemble ? 5 : 0) + 7 + 6 + s2 + 5 + s3);
codegen_addpatchref(cd, cd->mcodeptr,
PATCHER_checkcast_instanceof_flags,
(constant_classref *) iptr->target);
- if (showdisassemble) {
+ if (opt_showdisassemble) {
M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
}
PATCHER_checkcast_instanceof_interface,
(constant_classref *) iptr->target);
- if (showdisassemble) {
+ if (opt_showdisassemble) {
M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
}
}
PATCHER_checkcast_class,
(constant_classref *) iptr->target);
- if (showdisassemble) {
+ if (opt_showdisassemble) {
M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
}
}
d = reg_of_var(rd, iptr->dst, REG_ITMP3);
M_INTMOVE(s1, d);
store_reg_to_var_int(iptr->dst, d);
-/* if (iptr->dst->flags & INMEMORY) { */
-/* x86_64_mov_reg_membase(cd, s1, REG_SP, iptr->dst->regoff * 8); */
-/* } else { */
-/* M_INTMOVE(s1, iptr->dst->regoff); */
-/* } */
}
break;
+ case ICMD_ARRAYCHECKCAST: /* ..., objectref ==> ..., objectref */
+ /* op1: 1... resolved, 0... not resolved */
+
+ var_to_reg_int(s1, src, REG_ITMP1);
+ M_INTMOVE(s1, rd->argintregs[0]);
+
+ bte = iptr->val.a;
+
+ if (!iptr->op1) {
+ codegen_addpatchref(cd, cd->mcodeptr, bte->fp, iptr->target);
+
+ if (opt_showdisassemble) {
+ M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
+ }
+
+ a = 0;
+
+ } else {
+ a = (ptrint) bte->fp;
+ }
+
+ x86_64_mov_imm_reg(cd, (ptrint) iptr->target, rd->argintregs[1]);
+ x86_64_mov_imm_reg(cd, (ptrint) a, REG_ITMP1);
+ x86_64_call_reg(cd, REG_ITMP1);
+ M_TEST(REG_RESULT);
+ M_BEQ(0);
+ codegen_addxcastrefs(cd, cd->mcodeptr);
+
+ var_to_reg_int(s1, src, REG_ITMP1);
+ d = reg_of_var(rd, iptr->dst, REG_ITMP1);
+ M_INTMOVE(s1, d);
+ store_reg_to_var_int(iptr->dst, d);
+ break;
+
case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
/* op1: 0 == array, 1 == class */
3 /* test */ + 4 /* setcc */;
if (!super)
- s2 += (showdisassemble ? 5 : 0);
+ s2 += (opt_showdisassemble ? 5 : 0);
/* calculate class instanceof code size */
s3 += 3 /* sub */ + 3 /* xor */ + 3 /* cmp */ + 4 /* setcc */;
if (!super)
- s3 += (showdisassemble ? 5 : 0);
+ s3 += (opt_showdisassemble ? 5 : 0);
x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
if (!super) {
x86_64_test_reg_reg(cd, s1, s1);
- x86_64_jcc(cd, X86_64_CC_Z, (6 + (showdisassemble ? 5 : 0) +
+ x86_64_jcc(cd, X86_64_CC_Z, (6 + (opt_showdisassemble ? 5 : 0) +
7 + 6 + s2 + 5 + s3));
codegen_addpatchref(cd, cd->mcodeptr,
PATCHER_checkcast_instanceof_flags,
(constant_classref *) iptr->target);
- if (showdisassemble) {
+ if (opt_showdisassemble) {
M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
}
PATCHER_checkcast_instanceof_interface,
(constant_classref *) iptr->target);
- if (showdisassemble) {
+ if (opt_showdisassemble) {
M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
}
}
PATCHER_instanceof_class,
(constant_classref *) iptr->target);
- if (showdisassemble) {
+ if (opt_showdisassemble) {
M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
}
}
(functionptr) (ptrint) iptr->target,
iptr->val.a);
- if (showdisassemble) {
+ if (opt_showdisassemble) {
M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
}
/* move index register into REG_ITMP1 */
- M_MOV(bref->reg, REG_ITMP1); /* 3 bytes */
+ M_MOV(bref->reg, REG_ITMP1); /* 3 bytes */
+
+ x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
+ dseg_adddata(cd, cd->mcodeptr);
+ M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
+
+ if (xcodeptr != NULL) {
+ x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
+
+ } else {
+ xcodeptr = cd->mcodeptr;
+
+ M_ASUB_IMM(2 * 8 + sizeof(stackframeinfo), REG_SP);
+ M_IST(REG_ITMP1, REG_SP, 0 * 8);
+ M_AST(REG_ITMP2_XPC, REG_SP, 1 * 8);
+
+ /* create stackframe info */
+
+ M_MOV(REG_SP, rd->argintregs[0]);
+ M_AADD_IMM(2 * 8, rd->argintregs[0]);
+
+ x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[1]);
+
+ M_MOV(REG_SP, rd->argintregs[2]);
+ M_AADD_IMM(2 * 8 + sizeof(stackframeinfo), rd->argintregs[2]);
+ M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
+ x86_64_mov_imm_reg(cd, (ptrint) stacktrace_create_inline_stackframeinfo,
+ REG_ITMP3);
+ x86_64_call_reg(cd, REG_ITMP3);
+
+ /* create exception */
+
+ M_ILD(rd->argintregs[0], REG_SP, 0 * 8);
+ x86_64_mov_imm_reg(cd, (ptrint) new_arrayindexoutofboundsexception,
+ REG_ITMP3);
+ x86_64_call_reg(cd, REG_ITMP3);
+ M_AST(REG_RESULT, REG_SP, 0 * 8);
+
+ /* removed stackframe info */
+
+ M_MOV(REG_SP, rd->argintregs[0]);
+ M_AADD_IMM(2 * 8, rd->argintregs[0]);
+ x86_64_mov_imm_reg(cd, (ptrint) stacktrace_remove_stackframeinfo,
+ REG_ITMP3);
+ x86_64_call_reg(cd, REG_ITMP3);
+
+ M_ALD(REG_ITMP1_XPTR, REG_SP, 0 * 8);
+ M_ALD(REG_ITMP2_XPC, REG_SP, 1 * 8);
+ M_AADD_IMM(2 * 8 + sizeof(stackframeinfo), REG_SP);
+
+ x86_64_mov_imm_reg(cd, (ptrint) asm_handle_exception, REG_ITMP3);
+ x86_64_jmp_reg(cd, REG_ITMP3);
+ }
+ }
+
+ /* generate ArrayStoreException stubs */
+
+ xcodeptr = NULL;
+
+ for (bref = cd->xstorerefs; bref != NULL; bref = bref->next) {
+ if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
+ gen_resolvebranch(cd->mcodebase + bref->branchpos,
+ bref->branchpos,
+ xcodeptr - cd->mcodebase - (10 + 7));
+ continue;
+ }
+
+ gen_resolvebranch(cd->mcodebase + bref->branchpos,
+ bref->branchpos,
+ cd->mcodeptr - cd->mcodebase);
+
+ MCODECHECK(512);
x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
dseg_adddata(cd, cd->mcodeptr);
- x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
- M_AADD(REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
+ M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
if (xcodeptr != NULL) {
x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
} else {
xcodeptr = cd->mcodeptr;
+ M_ASUB_IMM(2 * 8 + sizeof(stackframeinfo), REG_SP);
+ M_AST(REG_ITMP2_XPC, REG_SP, 1 * 8);
- /*create stackinfo -- begin*/
- x86_64_alu_imm_reg(cd, X86_64_SUB, 4 * 8, REG_SP);
- x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 3 * 8);
- x86_64_mov_imm_membase(cd,0,REG_SP,2*8);
- x86_64_mov_imm_membase(cd,0,REG_SP,1*8);
- x86_64_mov_imm_reg(cd,(ptrint)asm_prepare_native_stackinfo,REG_ITMP3);
- x86_64_call_reg(cd,REG_ITMP3);
- /*create stackinfo -- end*/
+ /* create stackframe info */
- x86_64_mov_reg_reg(cd, REG_ITMP1, rd->argintregs[0]);
- x86_64_mov_imm_reg(cd, (ptrint) new_arrayindexoutofboundsexception, REG_ITMP3);
+ M_MOV(REG_SP, rd->argintregs[0]);
+ M_AADD_IMM(2 * 8, rd->argintregs[0]);
+
+ x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[1]);
+
+ M_MOV(REG_SP, rd->argintregs[2]);
+ M_AADD_IMM(2 * 8 + sizeof(stackframeinfo), rd->argintregs[2]);
+ M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
+ x86_64_mov_imm_reg(cd, (ptrint) stacktrace_create_inline_stackframeinfo,
+ REG_ITMP3);
x86_64_call_reg(cd, REG_ITMP3);
- /*remove stackinfo -- begin*/
- x86_64_mov_imm_reg(cd,(ptrint)asm_remove_native_stackinfo,REG_ITMP3);
- x86_64_call_reg(cd,REG_ITMP3);
- /*remove stackinfo -- end*/
+ /* create exception */
- x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, REG_ITMP2_XPC);
- x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
+ x86_64_mov_imm_reg(cd, (ptrint) new_arraystoreexception,
+ REG_ITMP3);
+ x86_64_call_reg(cd, REG_ITMP3);
+ M_AST(REG_RESULT, REG_SP, 0 * 8);
+
+ /* removed stackframe info */
+
+ M_MOV(REG_SP, rd->argintregs[0]);
+ M_AADD_IMM(2 * 8, rd->argintregs[0]);
+ x86_64_mov_imm_reg(cd, (ptrint) stacktrace_remove_stackframeinfo,
+ REG_ITMP3);
+ x86_64_call_reg(cd, REG_ITMP3);
+
+ M_ALD(REG_ITMP1_XPTR, REG_SP, 0 * 8);
+ M_ALD(REG_ITMP2_XPC, REG_SP, 1 * 8);
+ M_AADD_IMM(2 * 8 + sizeof(stackframeinfo), REG_SP);
x86_64_mov_imm_reg(cd, (ptrint) asm_handle_exception, REG_ITMP3);
x86_64_jmp_reg(cd, REG_ITMP3);
if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
gen_resolvebranch(cd->mcodebase + bref->branchpos,
bref->branchpos,
- xcodeptr - cd->mcodebase - (10 + 10 + 3));
+ xcodeptr - cd->mcodebase - (10 + 7));
continue;
}
x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
dseg_adddata(cd, cd->mcodeptr);
- x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
- x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
+ M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
if (xcodeptr != NULL) {
x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
} else {
xcodeptr = cd->mcodeptr;
+ M_ASUB_IMM(2 * 8 + sizeof(stackframeinfo), REG_SP);
+ M_AST(REG_ITMP2_XPC, REG_SP, 1 * 8);
+
+ /* create stackframe info */
+
+ M_MOV(REG_SP, rd->argintregs[0]);
+ M_AADD_IMM(2 * 8, rd->argintregs[0]);
- /*create stackinfo -- begin*/
- x86_64_alu_imm_reg(cd, X86_64_SUB, 4 * 8, REG_SP);
- x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 3 * 8);
- x86_64_mov_imm_membase(cd,0,REG_SP,2*8);
- x86_64_mov_imm_membase(cd,0,REG_SP,1*8);
- x86_64_mov_imm_reg(cd,(ptrint)asm_prepare_native_stackinfo,REG_ITMP3);
- x86_64_call_reg(cd,REG_ITMP3);
- /*create stackinfo -- end*/
+ x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[1]);
- x86_64_mov_imm_reg(cd, (u8) new_negativearraysizeexception, REG_ITMP3);
+ M_MOV(REG_SP, rd->argintregs[2]);
+ M_AADD_IMM(2 * 8 + sizeof(stackframeinfo), rd->argintregs[2]);
+ M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
+ x86_64_mov_imm_reg(cd, (ptrint) stacktrace_create_inline_stackframeinfo,
+ REG_ITMP3);
x86_64_call_reg(cd, REG_ITMP3);
- /*remove stackinfo -- begin*/
- x86_64_mov_imm_reg(cd,(ptrint)asm_remove_native_stackinfo,REG_ITMP3);
- x86_64_call_reg(cd,REG_ITMP3);
- /*remove stackinfo -- end*/
+ /* create exception */
+
+ x86_64_mov_imm_reg(cd, (ptrint) new_negativearraysizeexception,
+ REG_ITMP3);
+ x86_64_call_reg(cd, REG_ITMP3);
+ M_AST(REG_RESULT, REG_SP, 0 * 8);
+
+ /* removed stackframe info */
+
+ M_MOV(REG_SP, rd->argintregs[0]);
+ M_AADD_IMM(2 * 8, rd->argintregs[0]);
+ x86_64_mov_imm_reg(cd, (ptrint) stacktrace_remove_stackframeinfo,
+ REG_ITMP3);
+ x86_64_call_reg(cd, REG_ITMP3);
- x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, REG_ITMP2_XPC);
- x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
+ M_ALD(REG_ITMP1_XPTR, REG_SP, 0 * 8);
+ M_ALD(REG_ITMP2_XPC, REG_SP, 1 * 8);
+ M_AADD_IMM(2 * 8 + sizeof(stackframeinfo), REG_SP);
- x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
+ x86_64_mov_imm_reg(cd, (ptrint) asm_handle_exception, REG_ITMP3);
x86_64_jmp_reg(cd, REG_ITMP3);
}
}
- /* generate cast check stubs */
+ /* generate ClassCastException stubs */
xcodeptr = NULL;
if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
gen_resolvebranch(cd->mcodebase + bref->branchpos,
bref->branchpos,
- xcodeptr - cd->mcodebase - (10 + 10 + 3));
+ xcodeptr - cd->mcodebase - (10 + 7));
continue;
}
x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
dseg_adddata(cd, cd->mcodeptr);
- x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
- x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
+ M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
if (xcodeptr != NULL) {
x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
} else {
xcodeptr = cd->mcodeptr;
- /*create stackinfo -- begin*/
- x86_64_alu_imm_reg(cd, X86_64_SUB, 4 * 8, REG_SP);
- x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 3 * 8);
- x86_64_mov_imm_membase(cd,0,REG_SP,2*8);
- x86_64_mov_imm_membase(cd,0,REG_SP,1*8);
- x86_64_mov_imm_reg(cd,(ptrint)asm_prepare_native_stackinfo,REG_ITMP3);
- x86_64_call_reg(cd,REG_ITMP3);
- /*create stackinfo -- end*/
+ M_ASUB_IMM(2 * 8 + sizeof(stackframeinfo), REG_SP);
+ M_AST(REG_ITMP2_XPC, REG_SP, 1 * 8);
+
+ /* create stackframe info */
+
+ M_MOV(REG_SP, rd->argintregs[0]);
+ M_AADD_IMM(2 * 8, rd->argintregs[0]);
+
+ x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[1]);
+
+ M_MOV(REG_SP, rd->argintregs[2]);
+ M_AADD_IMM(2 * 8 + sizeof(stackframeinfo), rd->argintregs[2]);
+ M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
+ x86_64_mov_imm_reg(cd, (ptrint) stacktrace_create_inline_stackframeinfo,
+ REG_ITMP3);
+ x86_64_call_reg(cd, REG_ITMP3);
+ /* create exception */
- x86_64_mov_imm_reg(cd, (u8) new_classcastexception, REG_ITMP3);
+ x86_64_mov_imm_reg(cd, (ptrint) new_classcastexception, REG_ITMP3);
x86_64_call_reg(cd, REG_ITMP3);
+ M_AST(REG_RESULT, REG_SP, 0 * 8);
- /*remove stackinfo -- begin*/
- x86_64_mov_imm_reg(cd,(ptrint)asm_remove_native_stackinfo,REG_ITMP3);
- x86_64_call_reg(cd,REG_ITMP3);
- /*remove stackinfo -- end*/
+ /* removed stackframe info */
- x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, REG_ITMP2_XPC);
- x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
+ M_MOV(REG_SP, rd->argintregs[0]);
+ M_AADD_IMM(2 * 8, rd->argintregs[0]);
+ x86_64_mov_imm_reg(cd, (ptrint) stacktrace_remove_stackframeinfo,
+ REG_ITMP3);
+ x86_64_call_reg(cd, REG_ITMP3);
+
+ M_ALD(REG_ITMP1_XPTR, REG_SP, 0 * 8);
+ M_ALD(REG_ITMP2_XPC, REG_SP, 1 * 8);
+ M_AADD_IMM(2 * 8 + sizeof(stackframeinfo), REG_SP);
- x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
+ x86_64_mov_imm_reg(cd, (ptrint) asm_handle_exception, REG_ITMP3);
x86_64_jmp_reg(cd, REG_ITMP3);
}
}
- /* generate divide by zero check stubs */
+ /* generate ArithmeticException stubs */
xcodeptr = NULL;
if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
gen_resolvebranch(cd->mcodebase + bref->branchpos,
bref->branchpos,
- xcodeptr - cd->mcodebase - (10 + 10 + 3));
+ xcodeptr - cd->mcodebase - (10 + 7));
continue;
}
x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
dseg_adddata(cd, cd->mcodeptr);
- x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
- x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
+ M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
if (xcodeptr != NULL) {
x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
} else {
xcodeptr = cd->mcodeptr;
- /*create stackinfo -- begin*/
- x86_64_alu_imm_reg(cd, X86_64_SUB, 4 * 8, REG_SP);
- x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 3 * 8);
- x86_64_mov_imm_membase(cd,0,REG_SP,2*8);
- x86_64_mov_imm_membase(cd,0,REG_SP,1*8);
- x86_64_mov_imm_reg(cd,(ptrint)asm_prepare_native_stackinfo,REG_ITMP3);
- x86_64_call_reg(cd,REG_ITMP3);
- /*create stackinfo -- end*/
+ M_ASUB_IMM(2 * 8 + sizeof(stackframeinfo), REG_SP);
+ M_AST(REG_ITMP2_XPC, REG_SP, 1 * 8);
+
+ /* create stackframe info */
+
+ M_MOV(REG_SP, rd->argintregs[0]);
+ M_AADD_IMM(2 * 8, rd->argintregs[0]);
+
+ x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[1]);
- x86_64_mov_imm_reg(cd, (u8) new_arithmeticexception, REG_ITMP3);
+ M_MOV(REG_SP, rd->argintregs[2]);
+ M_AADD_IMM(2 * 8 + sizeof(stackframeinfo), rd->argintregs[2]);
+ M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
+ x86_64_mov_imm_reg(cd, (ptrint) stacktrace_create_inline_stackframeinfo,
+ REG_ITMP3);
x86_64_call_reg(cd, REG_ITMP3);
- /*remove stackinfo -- begin*/
- x86_64_mov_imm_reg(cd,(ptrint)asm_remove_native_stackinfo,REG_ITMP3);
- x86_64_call_reg(cd,REG_ITMP3);
- /*remove stackinfo -- end*/
+ /* create exception */
- x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, REG_ITMP2_XPC);
- x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
+ x86_64_mov_imm_reg(cd, (ptrint) new_arithmeticexception, REG_ITMP3);
+ x86_64_call_reg(cd, REG_ITMP3);
+ M_AST(REG_RESULT, REG_SP, 0 * 8);
+
+ /* removed stackframe info */
+
+ M_MOV(REG_SP, rd->argintregs[0]);
+ M_AADD_IMM(2 * 8, rd->argintregs[0]);
+ x86_64_mov_imm_reg(cd, (ptrint) stacktrace_remove_stackframeinfo,
+ REG_ITMP3);
+ x86_64_call_reg(cd, REG_ITMP3);
+
+ M_ALD(REG_ITMP1_XPTR, REG_SP, 0 * 8);
+ M_ALD(REG_ITMP2_XPC, REG_SP, 1 * 8);
+ M_AADD_IMM(2 * 8 + sizeof(stackframeinfo), REG_SP);
- x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
+ x86_64_mov_imm_reg(cd, (ptrint) asm_handle_exception, REG_ITMP3);
x86_64_jmp_reg(cd, REG_ITMP3);
}
}
- /* generate exception check stubs */
+ /* generate NullpointerException stubs */
xcodeptr = NULL;
- for (bref = cd->xexceptionrefs; bref != NULL; bref = bref->next) {
+ for (bref = cd->xnullrefs; bref != NULL; bref = bref->next) {
if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
gen_resolvebranch(cd->mcodebase + bref->branchpos,
bref->branchpos,
- xcodeptr - cd->mcodebase - (10 + 10 + 3));
+ xcodeptr - cd->mcodebase - (10 + 7));
continue;
}
x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
dseg_adddata(cd, cd->mcodeptr);
- x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1); /* 10 bytes */
- x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
+ M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
if (xcodeptr != NULL) {
x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
} else {
xcodeptr = cd->mcodeptr;
- x86_64_alu_imm_reg(cd, X86_64_SUB, 4*8, REG_SP);
- x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 3*8);
- x86_64_mov_imm_membase(cd, 0, REG_SP, 2*8);
- x86_64_mov_imm_membase(cd, 0, REG_SP, 1*8);
- x86_64_mov_imm_membase(cd, 0, REG_SP, 0*8);
- x86_64_mov_imm_reg(cd,(u8) asm_prepare_native_stackinfo,REG_ITMP1);
- x86_64_call_reg(cd,REG_ITMP1);
-
+ M_ASUB_IMM(2 * 8 + sizeof(stackframeinfo), REG_SP);
+ M_AST(REG_ITMP2_XPC, REG_SP, 1 * 8);
-#if defined(USE_THREADS) && defined(NATIVE_THREADS)
- x86_64_mov_imm_reg(cd, (u8) &builtin_get_exceptionptrptr, REG_ITMP1);
- x86_64_call_reg(cd, REG_ITMP1);
- x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
- x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
- x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
-#else
- x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
- x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP1_XPTR);
- x86_64_mov_imm_membase(cd, 0, REG_ITMP3, 0);
-#endif
- x86_64_mov_reg_reg(cd,REG_ITMP1_XPTR,RDI);
- x86_64_mov_imm_reg(cd,(u8) helper_fillin_stacktrace_always,REG_ITMP1);
- x86_64_call_reg(cd,REG_ITMP1);
- x86_64_mov_reg_reg(cd,REG_RESULT,REG_ITMP1_XPTR);
+ /* create stackframe info */
- x86_64_mov_imm_reg(cd,(u8) asm_remove_native_stackinfo,REG_ITMP2);
- x86_64_call_reg(cd,REG_ITMP2);
-
- x86_64_alu_imm_reg(cd, X86_64_ADD, 8, REG_SP);
- x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC);
- x86_64_alu_imm_reg(cd, X86_64_ADD, 8, REG_SP);
+ M_MOV(REG_SP, rd->argintregs[0]);
+ M_AADD_IMM(2 * 8, rd->argintregs[0]);
+
+ x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[1]);
+
+ M_MOV(REG_SP, rd->argintregs[2]);
+ M_AADD_IMM(2 * 8 + sizeof(stackframeinfo), rd->argintregs[2]);
+ M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
+ x86_64_mov_imm_reg(cd, (ptrint) stacktrace_create_inline_stackframeinfo,
+ REG_ITMP3);
+ x86_64_call_reg(cd, REG_ITMP3);
+
+ /* create exception */
+
+ x86_64_mov_imm_reg(cd, (ptrint) new_nullpointerexception,
+ REG_ITMP3);
+ x86_64_call_reg(cd, REG_ITMP3);
+ M_AST(REG_RESULT, REG_SP, 0 * 8);
+ /* removed stackframe info */
- x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
+ M_MOV(REG_SP, rd->argintregs[0]);
+ M_AADD_IMM(2 * 8, rd->argintregs[0]);
+ x86_64_mov_imm_reg(cd, (ptrint) stacktrace_remove_stackframeinfo,
+ REG_ITMP3);
+ x86_64_call_reg(cd, REG_ITMP3);
+
+ M_ALD(REG_ITMP1_XPTR, REG_SP, 0 * 8);
+ M_ALD(REG_ITMP2_XPC, REG_SP, 1 * 8);
+ M_AADD_IMM(2 * 8 + sizeof(stackframeinfo), REG_SP);
+
+ x86_64_mov_imm_reg(cd, (ptrint) asm_handle_exception, REG_ITMP3);
x86_64_jmp_reg(cd, REG_ITMP3);
}
}
- /* generate NullpointerException stubs */
+ /* generate ICMD_CHECKEXCEPTION stubs */
xcodeptr = NULL;
- for (bref = cd->xnullrefs; bref != NULL; bref = bref->next) {
+ for (bref = cd->xexceptionrefs; bref != NULL; bref = bref->next) {
if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
gen_resolvebranch(cd->mcodebase + bref->branchpos,
bref->branchpos,
- xcodeptr - cd->mcodebase - (10 + 10 + 3));
+ xcodeptr - cd->mcodebase - (10 + 7));
continue;
}
x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
dseg_adddata(cd, cd->mcodeptr);
- x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1); /* 10 bytes */
- M_AADD(REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
+ M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
if (xcodeptr != NULL) {
x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
} else {
xcodeptr = cd->mcodeptr;
- /*create stackinfo -- begin*/
- x86_64_alu_imm_reg(cd, X86_64_SUB, 4 * 8, REG_SP);
- x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 3 * 8);
- x86_64_mov_imm_membase(cd,0,REG_SP,2*8);
- x86_64_mov_imm_membase(cd,0,REG_SP,1*8);
- x86_64_mov_imm_reg(cd,(ptrint)asm_prepare_native_stackinfo,REG_ITMP3);
- x86_64_call_reg(cd,REG_ITMP3);
- /*create stackinfo -- end*/
+ M_ASUB_IMM(2 * 8 + sizeof(stackframeinfo), REG_SP);
+ M_AST(REG_ITMP2_XPC, REG_SP, 1 * 8);
+
+ /* create stackframe info */
+
+ M_MOV(REG_SP, rd->argintregs[0]);
+ M_AADD_IMM(2 * 8, rd->argintregs[0]);
+ x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[1]);
- x86_64_mov_imm_reg(cd, (ptrint) new_nullpointerexception, REG_ITMP3);
+ M_MOV(REG_SP, rd->argintregs[2]);
+ M_AADD_IMM(2 * 8 + sizeof(stackframeinfo), rd->argintregs[2]);
+ M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
+ x86_64_mov_imm_reg(cd, (ptrint) stacktrace_create_inline_stackframeinfo,
+ REG_ITMP3);
x86_64_call_reg(cd, REG_ITMP3);
- /*remove stackinfo -- begin*/
- x86_64_mov_imm_reg(cd,(ptrint)asm_remove_native_stackinfo,REG_ITMP3);
- x86_64_call_reg(cd,REG_ITMP3);
- /*remove stackinfo -- end*/
+#if defined(USE_THREADS) && defined(NATIVE_THREADS)
+ x86_64_mov_imm_reg(cd, (ptrint) &builtin_get_exceptionptrptr,
+ REG_ITMP1);
+ x86_64_call_reg(cd, REG_ITMP1);
- x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, REG_ITMP2_XPC);
- x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
+ M_ALD(REG_ITMP3, REG_RESULT, 0);
+ x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
+ M_MOV(REG_ITMP3, REG_ITMP1_XPTR);
+#else
+ x86_64_mov_imm_reg(cd, (ptrint) &_exceptionptr, REG_ITMP3);
+ M_ALD(REG_ITMP1_XPTR, REG_ITMP3, 0);
+ x86_64_mov_imm_membase(cd, 0, REG_ITMP3, 0);
+#endif
+
+ M_AST(REG_ITMP1_XPTR, REG_SP, 0 * 8);
+
+ /* call fillInStackTrace */
+
+ M_MOV(REG_ITMP1_XPTR, rd->argintregs[0]);
+ x86_64_mov_imm_reg(cd, (ptrint) stacktrace_call_fillInStackTrace,
+ REG_ITMP3);
+ x86_64_call_reg(cd, REG_ITMP3);
+
+ /* removed stackframe info */
+
+ M_MOV(REG_SP, rd->argintregs[0]);
+ M_AADD_IMM(2 * 8, rd->argintregs[0]);
+ x86_64_mov_imm_reg(cd, (ptrint) stacktrace_remove_stackframeinfo,
+ REG_ITMP3);
+ x86_64_call_reg(cd, REG_ITMP3);
+
+ M_ALD(REG_ITMP1_XPTR, REG_SP, 0 * 8);
+ M_ALD(REG_ITMP2_XPC, REG_SP, 1 * 8);
+ M_AADD_IMM(2 * 8 + sizeof(stackframeinfo), REG_SP);
x86_64_mov_imm_reg(cd, (ptrint) asm_handle_exception, REG_ITMP3);
x86_64_jmp_reg(cd, REG_ITMP3);
/* calculate stack frame size */
- stackframesize = 4 + INT_ARG_CNT + FLT_ARG_CNT + nmd->memuse;
+ stackframesize =
+ sizeof(stackframeinfo) / SIZEOF_VOID_P +
+ INT_ARG_CNT + FLT_ARG_CNT +
+ nmd->memuse;
if (!(stackframesize & 0x1)) /* keep stack 16-byte aligned */
stackframesize++;
if ((m->flags & ACC_STATIC) && !m->class->initialized) {
codegen_addpatchref(cd, cd->mcodeptr, PATCHER_clinit, m->class);
- if (showdisassemble) {
+ if (opt_showdisassemble) {
M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
}
}
/* create dynamic stack info */
- x86_64_mov_imm_membase(cd, 0, REG_SP, (stackframesize - 1) * 8);
- x86_64_mov_imm_reg(cd, (ptrint) m, REG_ITMP1);
- M_AST(REG_ITMP1, REG_SP, (stackframesize - 2) * 8);
- x86_64_mov_imm_reg(cd, (ptrint) builtin_asm_get_stackframeinfo, REG_ITMP1);
+ M_MOV(REG_SP, rd->argintregs[0]);
+ M_AADD_IMM(stackframesize * 8 - sizeof(stackframeinfo), rd->argintregs[0]);
+ x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[1]);
+ M_MOV(REG_SP, rd->argintregs[2]);
+ M_AADD_IMM(stackframesize * 8 + SIZEOF_VOID_P, rd->argintregs[2]);
+ M_ALD(rd->argintregs[3], REG_SP, stackframesize * 8);
+ x86_64_mov_imm_reg(cd, (ptrint) stacktrace_create_native_stackframeinfo,
+ REG_ITMP1);
x86_64_call_reg(cd, REG_ITMP1);
-
- M_AST(REG_RESULT, REG_SP, (stackframesize - 3) * 8);
- M_ALD(REG_ITMP2, REG_RESULT, 0);
- M_AST(REG_ITMP2, REG_SP, (stackframesize - 4) * 8);
- M_MOV(REG_SP, REG_ITMP2);
- M_AADD_IMM((stackframesize - 4) * 8, REG_ITMP2);
- M_AST(REG_ITMP2, REG_RESULT, 0);
-
+
STATS({
x86_64_mov_imm_reg(cd, (ptrint) nativeinvokation, REG_ITMP1);
x86_64_call_reg(cd, REG_ITMP1);
if (f == NULL) {
codegen_addpatchref(cd, cd->mcodeptr, PATCHER_resolve_native, m);
- if (showdisassemble) {
+ if (opt_showdisassemble) {
M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
}
}
x86_64_call_reg(cd, REG_ITMP1);
- /* remove dynamic stack info */
+ /* remove native stackframe info */
- M_LLD(REG_ITMP2, REG_SP, (stackframesize - 4) * 8);
- M_LLD(REG_ITMP3, REG_SP, (stackframesize - 3) * 8);
- M_LST(REG_ITMP2, REG_ITMP3, 0);
+ if (IS_INT_LNG_TYPE(md->returntype.type))
+ M_LST(REG_RESULT, REG_SP, 0 * 8);
+ else
+ M_DST(REG_FRESULT, REG_SP, 0 * 8);
+
+ M_MOV(REG_SP, rd->argintregs[0]);
+ M_AADD_IMM(stackframesize * 8 - sizeof(stackframeinfo), rd->argintregs[0]);
+ x86_64_mov_imm_reg(cd, (ptrint) stacktrace_remove_stackframeinfo,
+ REG_ITMP1);
+ x86_64_call_reg(cd, REG_ITMP1);
+
+ if (IS_INT_LNG_TYPE(md->returntype.type))
+ M_LLD(REG_RESULT, REG_SP, 0 * 8);
+ else
+ M_DLD(REG_FRESULT, REG_SP, 0 * 8);
/* generate call trace */