/* src/vm/jit/i386/asmpart.S - Java-C interface functions for i386
- Copyright (C) 1996-2005, 2006 R. Grafl, A. Krall, C. Kruegel,
+ Copyright (C) 1996-2005, 2006, 2007 R. Grafl, A. Krall, C. Kruegel,
C. Oates, R. Obermaisser, M. Platter, M. Probst, S. Ring,
E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich,
J. Wenninger, Institut f. Computersprachen - TU Wien
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
- Contact: cacao@cacaojvm.org
-
- Authors: Andreas Krall
- Reinhard Grafl
- Christian Thalinger
-
- Changes: Joseph Wenninger
- Edwin Steiner
-
- $Id: asmpart.S 5145 2006-07-17 11:48:38Z twisti $
+ $Id: asmpart.S 7454 2007-03-05 15:40:48Z tbfg $
*/
#include "config.h"
+#include "md-asm.h"
+
#include "vm/jit/i386/arch.h"
#include "vm/jit/i386/md-abi.h"
-#include "vm/jit/i386/md-asm.h"
#include "vm/jit/i386/offsets.h"
#include "vm/jit/abi-asm.h"
.globl asm_vm_call_method_float
.globl asm_vm_call_method_double
.globl asm_vm_call_method_exception_handler
+ .globl asm_vm_call_method_end
.globl asm_call_jit_compiler
.globl asm_handle_nat_exception
.globl asm_patcher_wrapper
+#if defined(ENABLE_REPLACEMENT)
.globl asm_replacement_out
.globl asm_replacement_in
+#endif
.globl asm_builtin_f2i
.globl asm_builtin_f2l
.globl asm_builtin_d2i
.globl asm_builtin_d2l
+ .globl asm_compare_and_swap
+ .globl asm_memory_barrier
+
.globl asm_criticalsections
.globl asm_getclassvalues_atomic
push xptr /* pass exception pointer */
call builtin_throw_exception
add $4,sp
+asm_vm_call_method_end:
jmp L_asm_vm_call_method_return
asm_call_jit_compiler:
L_asm_call_jit_compiler: /* required for PIC code */
- sub $(4*4),sp /* create stack frame */
+ sub $(4*4),sp /* keep stack 16-byte aligned */
mov itmp1,0*4(sp) /* pass methodinfo pointer */
mov mptr,1*4(sp) /* pass method pointer */
asm_handle_exception:
L_asm_handle_exception: /* required for PIC code */
- sub $((ARG_CNT+TMP_CNT)*4),sp /* create maybe-leaf stackframe */
+ sub $((ARG_CNT+TMP_CNT+3)*4),sp /* keep stack 16-byte aligned */
SAVE_ARGUMENT_REGISTERS(0) /* we save arg and temp registers in */
SAVE_TEMPORARY_REGISTERS(ARG_CNT) /* case this is a leaf method */
- mov $((ARG_CNT+TMP_CNT)*4),itmp3/* prepare a3 for handle_exception */
+ mov $((ARG_CNT+TMP_CNT+3)*4),itmp3 /* prepare a3 for handle_exception */
mov $1,t0 /* set maybe-leaf flag */
L_asm_handle_exception_stack_loop:
- sub $(10*4),sp /* create stackframe */
+ sub $(12*4),sp /* keep stack 16-byte aligned */
mov xptr,4*4(sp) /* save exception pointer */
mov xpc,5*4(sp) /* save exception pc */
add sp,itmp3 /* calculate Java sp into a3... */
- add $(10*4),itmp3
+ add $(12*4),itmp3
mov itmp3,7*4(sp) /* ...and save it */
mov t0,8*4(sp) /* save maybe-leaf flag */
mov xpc,0*4(sp) /* pass exception pc */
- call codegen_findmethod
+ call codegen_get_pv_from_pc
mov v0,6*4(sp) /* save data segment pointer */
mov 4*4(sp),itmp3 /* pass exception pointer */
mov v0,xpc /* move handlerpc into xpc */
mov 4*4(sp),xptr /* restore exception pointer */
mov 8*4(sp),t0 /* get maybe-leaf flag */
- add $(10*4),sp /* free stackframe */
+ add $(12*4),sp /* free stackframe */
test t0,t0 /* test for maybe-leaf flag */
jz L_asm_handle_exception_no_leaf
RESTORE_ARGUMENT_REGISTERS(0) /* if this is a leaf method, we have */
RESTORE_TEMPORARY_REGISTERS(ARG_CNT)/* to restore arg and temp registers */
- add $((ARG_CNT+TMP_CNT)*4),sp /* remove maybe-leaf stackframe */
+ add $((ARG_CNT+TMP_CNT+3)*4),sp /* remove maybe-leaf stackframe */
L_asm_handle_exception_no_leaf:
jmp *xpc /* jump to exception handler */
mov 4*4(sp),xptr /* restore exception pointer */
mov 6*4(sp),itmp3 /* restore data segment pointer */
mov 8*4(sp),t0 /* get maybe-leaf flag */
- add $(10*4),sp /* free stackframe */
+ add $(12*4),sp /* free stackframe */
test t0,t0
jz L_asm_handle_exception_no_leaf_stack
- add $((ARG_CNT+TMP_CNT)*4),sp /* remove maybe-leaf stackframe */
+ add $((ARG_CNT+TMP_CNT+3)*4),sp /* remove maybe-leaf stackframe */
xor t0,t0 /* clear the maybe-leaf flag */
L_asm_handle_exception_no_leaf_stack:
*******************************************************************************/
asm_abstractmethoderror:
- sub $(2*4),sp /* create stack frame */
+ sub $(3*4),sp /* keep stack 16-byte aligned */
mov sp,itmp1 /* pass java sp */
- add $((1+2)*4),itmp1
+ add $((1+3)*4),itmp1
mov itmp1,0*4(sp)
- mov 2*4(sp),itmp2 /* pass exception address */
+ mov 3*4(sp),itmp2 /* pass exception address */
sub $2,itmp2
mov itmp2,1*4(sp)
call exceptions_asm_new_abstractmethoderror
/* exception pointer is return value */
- add $(2*4),sp /* remove stack frame */
+ add $(3*4),sp /* remove stack frame */
pop xpc /* get exception address */
sub $2,xpc /* exception address is ra - 2 */
*******************************************************************************/
asm_patcher_wrapper:
- sub $((2+4)*4),sp /* create stack frame */
+ sub $((1+4+4)*4),sp /* keep stack 16-byte aligned */
mov itmp1,(0+4)*4(sp) /* save itmp1 and itmp2 */
mov itmp2,(1+4)*4(sp)
mov sp,itmp1 /* pass SP of patcher stub */
- add $((2+4)*4),itmp1
+ add $((1+4+4)*4),itmp1
mov itmp1,0*4(sp)
movl $0,1*4(sp) /* pass PV (if NULL, use findmethod) */
movl $0,2*4(sp) /* pass RA (it's on the stack) */
call patcher_wrapper
- mov v0,0*4(sp) /* save return value */
+ mov v0,itmp3 /* save return value */
mov (0+4)*4(sp),itmp1 /* restore itmp1 and itmp2 */
mov (1+4)*4(sp),itmp2
- mov 0*4(sp),itmp3 /* restore return value */
- add $((6+2+4)*4),sp /* remove stack frame, keep RA */
-
test itmp3,itmp3 /* exception thrown? */
jne L_asm_patcher_wrapper_exception
+ mov (5+1+4+4)*4(sp),itmp3 /* restore itmp3 */
+ add $((6+1+4+4)*4),sp /* remove stack frame, keep RA */
+
ret /* jump to new patched code */
L_asm_patcher_wrapper_exception:
+ add $((6+1+4+4)*4),sp /* remove stack frame, keep RA */
mov itmp3,xptr /* get exception */
pop xpc /* get and remove return address */
jmp L_asm_handle_exception
+#if defined(ENABLE_REPLACEMENT)
/* asm_replacement_out *********************************************************
sub $(sizeexecutionstate + REPLACEMENT_ROOM),sp
/* save registers in execution state */
- mov %eax,(EAX*8+offes_intregs)(sp)
- mov %ebx,(EBX*8+offes_intregs)(sp)
- mov %ecx,(ECX*8+offes_intregs)(sp)
- mov %edx,(EDX*8+offes_intregs)(sp)
- mov %esi,(ESI*8+offes_intregs)(sp)
- mov %edi,(EDI*8+offes_intregs)(sp)
- mov %ebp,(EBP*8+offes_intregs)(sp)
- movl $0 ,(ESP*8+offes_intregs)(sp) /* not used */
-
-#ifndef NDEBUG
- /* clear high 32bit */
- movl $0,(4+0*8+offes_intregs)(sp)
- movl $0,(4+1*8+offes_intregs)(sp)
- movl $0,(4+2*8+offes_intregs)(sp)
- movl $0,(4+3*8+offes_intregs)(sp)
- movl $0,(4+4*8+offes_intregs)(sp)
- movl $0,(4+5*8+offes_intregs)(sp)
- movl $0,(4+6*8+offes_intregs)(sp)
- movl $0,(4+7*8+offes_intregs)(sp)
-#endif
+ mov %eax,(EAX*4+offes_intregs)(sp)
+ mov %ebx,(EBX*4+offes_intregs)(sp)
+ mov %ecx,(ECX*4+offes_intregs)(sp)
+ mov %edx,(EDX*4+offes_intregs)(sp)
+ mov %esi,(ESI*4+offes_intregs)(sp)
+ mov %edi,(EDI*4+offes_intregs)(sp)
+ mov %ebp,(EBP*4+offes_intregs)(sp)
+ movl $0 ,(ESP*4+offes_intregs)(sp) /* not used */
/* calculate sp of method */
mov sp,itmp1
push sp /* arg1: execution state */
push itmp1 /* arg0: replacement point */
call replace_me /* call C function replace_me */
- call abort /* NEVER REACHED */
+
/* asm_replacement_in **********************************************************
This function never returns!
C prototype:
- void asm_replacement_in(executionstate *es);
+ void asm_replacement_in(executionstate *es, replace_safestack_t *st);
*******************************************************************************/
asm_replacement_in:
- mov 4(sp),%ebp /* executionstate *es */
+ /* get arguments */
+ mov 8(sp),%esi /* replace_safestack_t *st */
+ mov 4(sp),%ebp /* executionstate *es == safe stack */
+
+ /* switch to the safe stack and build a stack frame */
+ mov %ebp,sp
+ sub $(1*4),sp
+
+ /* call replace_build_execution_state(st) */
+ mov %esi,(0*4)(sp)
+ call replace_build_execution_state
/* set new sp */
- mov (offes_sp)(%ebp),%esp
-
- /* store address of new code */
+ mov (offes_sp)(%ebp),sp
+
+ /* push address of new code */
push (offes_pc)(%ebp)
-
- /* copy registers from execution state */
- mov (EAX*8+offes_intregs)(%ebp),%eax
- mov (EBX*8+offes_intregs)(%ebp),%ebx
- mov (ECX*8+offes_intregs)(%ebp),%ecx
- mov (EDX*8+offes_intregs)(%ebp),%edx
- mov (ESI*8+offes_intregs)(%ebp),%esi
- mov (EDI*8+offes_intregs)(%ebp),%edi
- mov (EBP*8+offes_intregs)(%ebp),%ebp
+ /* allocate an executionstate_t on the stack */
+ sub $(sizeexecutionstate),sp
- /* jump to new code */
+ /* call replace_free_safestack(st,& of allocated executionstate_t) */
+ push sp /* tmpes */
+ push %esi /* st */
+ call replace_free_safestack
+ add $(2*4),sp
+
+ /* copy registers from execution state */
+ mov (EAX*4+offes_intregs)(sp),%eax
+ mov (EBX*4+offes_intregs)(sp),%ebx
+ mov (ECX*4+offes_intregs)(sp),%ecx
+ mov (EDX*4+offes_intregs)(sp),%edx
+ mov (ESI*4+offes_intregs)(sp),%esi
+ mov (EDI*4+offes_intregs)(sp),%edi
+ mov (EBP*4+offes_intregs)(sp),%ebp
+
+ /* pop the execution state off the stack */
+ add $(sizeexecutionstate),sp
+
+ /* jump to new code, hold your thumbs! ;) */
ret
+#endif /* defined(ENABLE_REPLACEMENT) */
+
+
/************************ function asm_builtin_x2x *****************************
* *
* Wrapper functions for corner cases *
*******************************************************************************/
asm_builtin_f2i:
- sub $4,%esp
+ sub $(3*4),%esp
fsts (%esp)
call builtin_f2i
- add $4,%esp
+ add $(3*4),%esp
ret
asm_builtin_d2i:
- sub $8,%esp
+ sub $(3*4),%esp
fstl (%esp)
call builtin_d2i
- add $8,%esp
+ add $(3*4),%esp
ret
asm_builtin_f2l:
- sub $4,%esp
+ sub $(3*4),%esp
fsts (%esp)
call builtin_f2l
- add $4,%esp
+ add $(3*4),%esp
ret
asm_builtin_d2l:
- sub $8,%esp
+ sub $(3*4),%esp
fstl (%esp)
call builtin_d2l
- add $8,%esp
+ add $(3*4),%esp
ret
+/* asm_compare_and_swap ********************************************************
+
+ Does an atomic compare and swap. Required for the lock
+ implementation.
+
+ Atomically do the following: Check if the location still contains
+ `oldval`. If so, replace it by `newval` and return `oldval`.
+
+ RETURN VALUE:
+ the old value at *p
+
+ long compare_and_swap(volatile long *p, long oldval, long newval);
+
+*******************************************************************************/
+
+asm_compare_and_swap:
+ mov 1*4(sp),%ecx /* load p into a register */
+ mov 2*4(sp),%eax /* load oldval into return register */
+ mov 3*4(sp),%edx /* load newval into a register */
+ lock; cmpxchgl %edx,0(%ecx)
+ ret
+
+
+/* asm_memory_barrier **********************************************************
+
+ A memory barrier for the Java Memory Model.
+
+*******************************************************************************/
+
+asm_memory_barrier:
+ lock; add $0,0(sp)
+ ret
+
+
asm_getclassvalues_atomic:
_crit_restart2:
mov 4(%esp),%ecx /* super */