/* src/vm/jit/x86_64/asmpart.S - Java-C interface functions for x86_64
- Copyright (C) 1996-2005, 2006 R. Grafl, A. Krall, C. Kruegel,
+ Copyright (C) 1996-2005, 2006, 2007 R. Grafl, A. Krall, C. Kruegel,
C. Oates, R. Obermaisser, M. Platter, M. Probst, S. Ring,
E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich,
J. Wenninger, Institut f. Computersprachen - TU Wien
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
- Contact: cacao@cacaojvm.org
-
- Authors: Andreas Krall
- Reinhard Grafl
- Christian Thalinger
-
- Changes: Edwin Steiner
-
- $Id: asmpart.S 5041 2006-06-20 09:10:05Z twisti $
+ $Id: asmpart.S 7486 2007-03-08 13:50:07Z twisti $
*/
.globl asm_vm_call_method_float
.globl asm_vm_call_method_double
.globl asm_vm_call_method_exception_handler
+ .globl asm_vm_call_method_end
.globl asm_call_jit_compiler
+
.globl asm_handle_exception
.globl asm_handle_nat_exception
- .globl asm_wrapper_patcher
+ .globl asm_abstractmethoderror
+
+ .globl asm_patcher_wrapper
+#if defined(ENABLE_REPLACEMENT)
.globl asm_replacement_out
.globl asm_replacement_in
+#endif
.globl asm_builtin_f2i
.globl asm_builtin_f2l
.globl asm_builtin_d2i
.globl asm_builtin_d2l
+ .globl asm_compare_and_swap
+ .globl asm_memory_barrier
+
.globl asm_criticalsections
.globl asm_getclassvalues_atomic
movq offvmargdata(itmp2),fa7
jmp L_register_copy
+asm_vm_call_method_end:
+ nop
/****************** function asm_call_jit_compiler *****************************
* *
jmp *v0 /* ...and now call the new method */
L_asm_call_jit_compiler_exception:
-#if defined(ENABLE_THREADS)
- call builtin_asm_get_exceptionptrptr@PLT
- mov v0,itmp2 /* v0 == xptr */
-#else
- lea _no_threads_exceptionptr(%rip),itmp2
-#endif
- mov (itmp2),xptr /* get the exception pointer */
- movl $0,(itmp2) /* clear exception pointer */
-
+ call exceptions_get_and_clear_exception@PLT
pop xpc /* delete return address */
- sub $5,xpc /* faulting address is ra - 5 */
+ sub $3,xpc /* faulting address is ra - 3 */
jmp L_asm_handle_exception
mov t0,4*8(sp) /* save maybe-leaf flag */
mov xpc,a0 /* exception pc */
- call codegen_findmethod@PLT
+ call codegen_get_pv_from_pc@PLT
mov v0,2*8(sp) /* save data segment pointer */
mov 0*8(sp),a0 /* pass exception pointer */
jmp L_asm_handle_exception_stack_loop
-/* asm_wrapper_patcher *********************************************************
+/* asm_abstractmethoderror *****************************************************
+
+ Creates and throws an AbstractMethodError.
+
+*******************************************************************************/
+
+asm_abstractmethoderror:
+ mov sp,a0 /* pass java sp */
+ add $1*8,a0
+ mov 0*8(sp),a1 /* pass exception address */
+ sub $3,a1
+ call exceptions_asm_new_abstractmethoderror@PLT
+ /* exception pointer is return value */
+ pop xpc /* get exception address */
+ sub $3,xpc /* exception address is ra - 3 */
+ jmp L_asm_handle_exception
+
+
+/* asm_patcher_wrapper *********************************************************
XXX
*******************************************************************************/
-asm_wrapper_patcher:
+asm_patcher_wrapper:
push bp /* save base pointer */
mov sp,bp /* move actual sp to bp */
- sub $((3+ARG_CNT+TMP_CNT)*8+sizestackframeinfo),sp
+ sub $(3+ARG_CNT+TMP_CNT)*8,sp
and $0xfffffffffffffff0,sp /* align sp to 16-byte (this is for */
/* leaf functions) */
mov itmp1,0*8(sp) /* save itmp1 and itmp2 */
mov itmp2,1*8(sp) /* can be used by some instructions */
- mov sp,a0 /* create stackframe info */
- add $((3+ARG_CNT+TMP_CNT)*8),a0
- xor a1,a1 /* if pv is NULL, use findmethod */
- mov bp,a2 /* pass java sp */
- add $((6+1)*8),a2
- mov ((5+1)*8)(bp),a3 /* pass ra to java function */
- mov a3,a4 /* xpc is equal to ra */
- call stacktrace_create_extern_stackframeinfo@PLT
-
- mov bp,a0 /* pass stack pointer */
- add $((1+1)*8),a0 /* skip function pointer */
- mov 1*8(bp),itmp3 /* get function pointer */
- call *itmp3 /* call the patcher function */
+ mov bp,a0 /* pass SP of patcher stub */
+ add $(1*8),a0
+ mov $0,a1 /* pass PV (if NULL, use findmethod) */
+ mov $0,a2 /* pass RA (it's on the stack) */
+ call patcher_wrapper@PLT
mov v0,2*8(sp) /* save return value */
- mov sp,a0 /* remove stackframe info */
- add $((3+ARG_CNT+TMP_CNT)*8),a0
- call stacktrace_remove_stackframeinfo@PLT
-
RESTORE_ARGUMENT_REGISTERS(3)
RESTORE_TEMPORARY_REGISTERS(3+ARG_CNT)
mov bp,sp /* restore original sp */
pop bp /* restore bp */
- add $(5*8),sp /* remove patcher stackframe, keep ra */
+ add $(5*8),sp /* remove patcher stackframe, keep RA */
test itmp3,itmp3 /* exception thrown? */
- jz L_asm_wrapper_patcher_exception
+ jne L_asm_patcher_wrapper_exception
ret /* call new patched code */
-L_asm_wrapper_patcher_exception:
-#if defined(ENABLE_THREADS)
- call builtin_asm_get_exceptionptrptr@PLT
- mov v0,itmp2 /* v0 == xptr */
-#else
- mov _no_threads_exceptionptr,itmp2
-#endif
- mov (itmp2),xptr /* get the exception pointer */
- movl $0,(itmp2) /* clear exception pointer */
-
+L_asm_patcher_wrapper_exception:
+ mov itmp3,xptr /* get exception */
pop xpc /* get and remove return address */
jmp L_asm_handle_exception
+#if defined(ENABLE_REPLACEMENT)
/* asm_replacement_out *********************************************************
This function never returns!
C prototype:
- void asm_replacement_in(executionstate *es);
+ void asm_replacement_in(executionstate *es, replace_safestack_t *st);
*******************************************************************************/
asm_replacement_in:
- mov a0,%rbp /* executionstate *es */
+ /* get arguments */
+ mov a1,s1 /* replace_safestack_t *st */
+ mov a0,%rbp /* executionstate *es == safe stack */
+
+ /* switch to the safe stack */
+ mov %rbp,sp
+
+ /* call replace_build_execution_state(st) */
+ mov s1,a0
+ call replace_build_execution_state@PLT
/* set new sp */
- mov (offes_sp)(%rbp),%rsp
-
- /* store address of new code */
- push (offes_pc)(%rbp)
-
+ mov (offes_sp)(%rbp),sp
+
+ /* push address of new code */
+ pushq (offes_pc)(%rbp)
+
+ /* allocate an executionstate_t on the stack */
+ sub $(sizeexecutionstate),sp
+
+ /* call replace_free_safestack(st,& of allocated executionstate_t) */
+ mov sp,a1
+ mov s1,a0
+ call replace_free_safestack@PLT
+
/* copy registers from execution state */
- movq (XMM0 *8+offes_fltregs)(%rbp),%xmm0
- movq (XMM1 *8+offes_fltregs)(%rbp),%xmm1
- movq (XMM2 *8+offes_fltregs)(%rbp),%xmm2
- movq (XMM3 *8+offes_fltregs)(%rbp),%xmm3
- movq (XMM4 *8+offes_fltregs)(%rbp),%xmm4
- movq (XMM5 *8+offes_fltregs)(%rbp),%xmm5
- movq (XMM6 *8+offes_fltregs)(%rbp),%xmm6
- movq (XMM7 *8+offes_fltregs)(%rbp),%xmm7
- movq (XMM8 *8+offes_fltregs)(%rbp),%xmm8
- movq (XMM9 *8+offes_fltregs)(%rbp),%xmm9
- movq (XMM10*8+offes_fltregs)(%rbp),%xmm10
- movq (XMM11*8+offes_fltregs)(%rbp),%xmm11
- movq (XMM12*8+offes_fltregs)(%rbp),%xmm12
- movq (XMM13*8+offes_fltregs)(%rbp),%xmm13
- movq (XMM14*8+offes_fltregs)(%rbp),%xmm14
- movq (XMM15*8+offes_fltregs)(%rbp),%xmm15
-
- mov (RAX*8+offes_intregs)(%rbp),%rax
- mov (RBX*8+offes_intregs)(%rbp),%rbx
- mov (RCX*8+offes_intregs)(%rbp),%rcx
- mov (RDX*8+offes_intregs)(%rbp),%rdx
- mov (RSI*8+offes_intregs)(%rbp),%rsi
- mov (RDI*8+offes_intregs)(%rbp),%rdi
- mov (R8 *8+offes_intregs)(%rbp),%r8
- mov (R9 *8+offes_intregs)(%rbp),%r9
- mov (R10*8+offes_intregs)(%rbp),%r10
- mov (R11*8+offes_intregs)(%rbp),%r11
- mov (R12*8+offes_intregs)(%rbp),%r12
- mov (R13*8+offes_intregs)(%rbp),%r13
- mov (R14*8+offes_intregs)(%rbp),%r14
- mov (R15*8+offes_intregs)(%rbp),%r15
-
- mov (RBP*8+offes_intregs)(%rbp),%rbp
+ movq (XMM0 *8+offes_fltregs)(sp),%xmm0
+ movq (XMM1 *8+offes_fltregs)(sp),%xmm1
+ movq (XMM2 *8+offes_fltregs)(sp),%xmm2
+ movq (XMM3 *8+offes_fltregs)(sp),%xmm3
+ movq (XMM4 *8+offes_fltregs)(sp),%xmm4
+ movq (XMM5 *8+offes_fltregs)(sp),%xmm5
+ movq (XMM6 *8+offes_fltregs)(sp),%xmm6
+ movq (XMM7 *8+offes_fltregs)(sp),%xmm7
+ movq (XMM8 *8+offes_fltregs)(sp),%xmm8
+ movq (XMM9 *8+offes_fltregs)(sp),%xmm9
+ movq (XMM10*8+offes_fltregs)(sp),%xmm10
+ movq (XMM11*8+offes_fltregs)(sp),%xmm11
+ movq (XMM12*8+offes_fltregs)(sp),%xmm12
+ movq (XMM13*8+offes_fltregs)(sp),%xmm13
+ movq (XMM14*8+offes_fltregs)(sp),%xmm14
+ movq (XMM15*8+offes_fltregs)(sp),%xmm15
+
+ mov (RAX*8+offes_intregs)(sp),%rax
+ mov (RBX*8+offes_intregs)(sp),%rbx
+ mov (RCX*8+offes_intregs)(sp),%rcx
+ mov (RDX*8+offes_intregs)(sp),%rdx
+ mov (RSI*8+offes_intregs)(sp),%rsi
+ mov (RDI*8+offes_intregs)(sp),%rdi
+ mov (RBP*8+offes_intregs)(sp),%rbp
+ mov (R8 *8+offes_intregs)(sp),%r8
+ mov (R9 *8+offes_intregs)(sp),%r9
+ mov (R10*8+offes_intregs)(sp),%r10
+ mov (R11*8+offes_intregs)(sp),%r11
+ mov (R12*8+offes_intregs)(sp),%r12
+ mov (R13*8+offes_intregs)(sp),%r13
+ mov (R14*8+offes_intregs)(sp),%r14
+ mov (R15*8+offes_intregs)(sp),%r15
+
+ /* pop the execution state off the stack */
+ add $(sizeexecutionstate),sp
/* jump to new code */
ret
+#endif /* defined(ENABLE_REPLACEMENT) */
+
/* asm_builtin_x2x *************************************************************
* *
ret
+/* asm_compare_and_swap ********************************************************
+
+ Does an atomic compare and swap. Required for the lock
+ implementation.
+
+*******************************************************************************/
+
+asm_compare_and_swap:
+ mov a1,v0 /* v0 is %rax */
+ lock cmpxchg a2,(a0)
+ ret
+
+
+/* asm_memory_barrier **********************************************************
+
+ A memory barrier for the Java Memory Model.
+
+*******************************************************************************/
+
+asm_memory_barrier:
+ mfence
+ ret
+
+
asm_getclassvalues_atomic:
_crit_restart:
_crit_begin: