/* src/vm/jit/mips/asmpart.S - Java-C interface functions for MIPS
- Copyright (C) 1996-2005, 2006 R. Grafl, A. Krall, C. Kruegel,
+ Copyright (C) 1996-2005, 2006, 2007 R. Grafl, A. Krall, C. Kruegel,
C. Oates, R. Obermaisser, M. Platter, M. Probst, S. Ring,
E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich,
J. Wenninger, Institut f. Computersprachen - TU Wien
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
- Contact: cacao@cacaojvm.org
-
- Authors: Andreas Krall
-
- Changes: Christian Thalinger
- Edwin Steiner
-
- $Id: asmpart.S 4706 2006-03-30 09:20:42Z twisti $
-
*/
#include "vm/jit/mips/md-abi.h"
#include "vm/jit/mips/md-asm.h"
-#include "vm/jit/mips/offsets.h"
-#include "vm/jit/abi.h"
+#include "vm/jit/abi-asm.h"
#include "vm/jit/methodheader.h"
.globl asm_vm_call_method_long
.globl asm_vm_call_method_float
.globl asm_vm_call_method_double
+ .globl asm_vm_call_method_exception_handler
+ .globl asm_vm_call_method_end
.globl asm_call_jit_compiler
+
.globl asm_handle_exception
.globl asm_handle_nat_exception
- .globl asm_wrapper_patcher
+ .globl asm_abstractmethoderror
+#if defined(ENABLE_REPLACEMENT)
.globl asm_replacement_out
.globl asm_replacement_in
-
- .globl asm_perform_threadswitch
- .globl asm_initialize_thread_stack
- .globl asm_switchstackandcall
- .globl asm_getclassvalues_atomic
- .globl asm_criticalsections
+#endif
.globl compare_and_swap
-/********************* function asm_calljavafunction ***************************
+/* asm_vm_call_method **********************************************************
* *
* This function calls a Java-method (which possibly needs compilation) *
* with up to 4 address parameters. *
.align 3
+#if SIZEOF_VOID_P == 8
+
.dword 0 /* catch type all */
- .dword calljava_xhandler2 /* handler pc */
- .dword calljava_xhandler2 /* end pc */
- .dword asm_vm_call_method /* start pc */
+ .dword 0 /* handler pc */
+ .dword 0 /* end pc */
+ .dword 0 /* start pc */
.word 1 /* extable size */
.word 0 /* 4-byte ALIGNMENT PADDING */
.dword 0 /* line number table start */
.word 0 /* isleaf */
.word 0 /* IsSync */
.word 0 /* frame size */
- .dword 0 /* method pointer (pointer to name) */
+ .dword 0 /* codeinfo pointer */
+
+#else /* SIZEOF_VOID_P == 8 */
+
+ .word 0 /* catch type all */
+ .word 0 /* handler pc */
+ .word 0 /* end pc */
+ .word 0 /* start pc */
+ .word 1 /* extable size */
+ .word 0 /* line number table start */
+ .word 0 /* line number table size */
+ .word 0 /* fltsave */
+ .word 0 /* intsave */
+ .word 0 /* isleaf */
+ .word 0 /* IsSync */
+ .word 0 /* frame size */
+ .word 0 /* method pointer (pointer to name) */
+
+#endif /* SIZEOF_VOID_P == 8 */
asm_vm_call_method:
asm_vm_call_method_int:
.set noreorder /* XXX we need to recompute pv */
aaddiu sp,sp,-12*8 /* allocate stack space (only 11 needed)*/
- ast ra,0(sp) /* save return address */
+ ast ra,0*8(sp) /* save return address */
bal L_asm_vm_call_method_compute_pv
ast pv,1*8(sp) /* procedure vector */
L_asm_vm_call_method_compute_pv:
aaddiu pv,ra,-4*4
- ast s7,3*8(sp)
+ ast s0,3*8(sp) /* save callee saved register */
+ ast a0,4*8(sp) /* save method PV */
+
+#if SIZEOF_VOID_P == 8
sdc1 fss0,5*8(sp) /* save non JavaABI saved flt registers */
sdc1 fss1,6*8(sp)
sdc1 fss2,7*8(sp)
sdc1 fss3,8*8(sp)
sdc1 fss4,9*8(sp)
sdc1 fss5,10*8(sp)
+#endif
- ast a0,4*8(sp) /* save method pointer for compiler */
-
- move t0,a2
- move s7,a1
- blez s7,calljava_argsloaded
- nop
-
- ald a0,offvmargdata(t0)
- ldc1 fa0,offvmargdata(t0)
- daddi s7,s7,-1
- blez s7,calljava_argsloaded
- nop
-
- ald a1,offvmargdata+sizevmarg*1(t0)
- ldc1 fa1,offvmargdata+sizevmarg*1(t0)
- daddi s7,s7,-1
- blez s7,calljava_argsloaded
- nop
-
- ald a2,offvmargdata+sizevmarg*2(t0)
- ldc1 fa2,offvmargdata+sizevmarg*2(t0)
- daddi s7,s7,-1
- blez s7,calljava_argsloaded
+ move t0,a1 /* address of data structure */
+ move t1,a2 /* stack argument count */
+ move s0,sp /* save stack pointer */
+
+#if SIZEOF_VOID_P == 8
+
+ ld a0,0*8(t0)
+ ld a1,1*8(t0)
+ ld a2,2*8(t0)
+ ld a3,3*8(t0)
+ ld a4,4*8(t0)
+ ld a5,5*8(t0)
+ ld a6,6*8(t0)
+ ld a7,7*8(t0)
+
+ ldc1 fa0,8*8(t0)
+ ldc1 fa1,9*8(t0)
+ ldc1 fa2,10*8(t0)
+ ldc1 fa3,11*8(t0)
+ ldc1 fa4,12*8(t0)
+ ldc1 fa5,13*8(t0)
+ ldc1 fa6,14*8(t0)
+ ldc1 fa7,15*8(t0)
+
+#else /* SIZEOF_VOID_P == 8 */
+
+# if WORDS_BIGENDIAN == 1
+ lw a0,0*8+4(t0)
+ lw a1,1*8+4(t0)
+ lw a2,2*8+4(t0)
+ lw a3,3*8+4(t0)
+# else
+ lw a0,0*8(t0)
+ lw a1,1*8(t0)
+ lw a2,2*8(t0)
+ lw a3,3*8(t0)
+# endif
+
+# if !defined(ENABLE_SOFT_FLOAT)
+ ldc1 fa0,4*8(t0)
+ ldc1 fa1,5*8(t0)
+# endif
+
+#endif /* SIZEOF_VOID_P == 8 */
+
+ beqz t1,L_asm_vm_call_method_stack_copy_done
nop
- ald a3,offvmargdata+sizevmarg*3(t0)
- ldc1 fa3,offvmargdata+sizevmarg*3(t0)
- daddi s7,s7,-1
- blez s7,calljava_argsloaded
- nop
+ sll t2,t1,3 /* calculate stackframe size (* 8) */
+ asubu sp,sp,t2 /* create stackframe */
+ move t2,sp /* temporary stack pointer */
- ald a4,offvmargdata+sizevmarg*4(t0)
- ldc1 fa4,offvmargdata+sizevmarg*4(t0)
- daddi s7,s7,-1
- blez s7,calljava_argsloaded
- nop
-
- ald a5,offvmargdata+sizevmarg*5(t0)
- ldc1 fa5,offvmargdata+sizevmarg*5(t0)
- daddi s7,s7,-1
- blez s7,calljava_argsloaded
- nop
+L_asm_vm_call_method_stack_copy_loop:
+#if SIZEOF_VOID_P == 8
+ ld t3,16*8(t0) /* load argument */
+ sd t3,0(t2) /* store argument on stack */
+#else
+# if !defined(ENABLE_SOFT_FLOAT)
+ lw t3,6*8+0(t0) /* load argument */
+ lw t4,6*8+4(t0)
+ sw t3,0(t2) /* store argument on stack */
+ sw t4,4(t2)
+# else
+# error implement me
+# endif
+#endif
- ald a6,offvmargdata+sizevmarg*6(t0)
- ldc1 fa6,offvmargdata+sizevmarg*6(t0)
- daddi s7,s7,-1
- blez s7,calljava_argsloaded
- nop
+ aaddi t1,t1,-1 /* subtract 1 argument */
+ aaddi t0,t0,8 /* load address of next argument */
+ aaddi t2,t2,8 /* increase stack pointer */
- ald a7,offvmargdata+sizevmarg*7(t0)
- ldc1 fa7,offvmargdata+sizevmarg*7(t0)
- daddi s7,s7,-1
-
-calljava_argsloaded:
- move t8,sp /* save stack pointer */
- blez s7,calljava_nocopy
- nop
- subu t1,zero,s7
- sll t2,t1,3
- aaddu sp,sp,t2
- aaddu t2,t2,t8
-
-calljava_copyloop:
- ald t3,offvmargdata+sizevmarg*8(t0)
- ast t3,0(t2)
- ala t1,1(t1)
- ala t0,sizevmarg(t0)
- ala t2,8(t2)
- bnez t1,calljava_copyloop
+ bgtz t1,L_asm_vm_call_method_stack_copy_loop
nop
-calljava_nocopy:
- ald itmp1,4*8(t8) /* pass method pointer via itmp1 */
-
- ala mptr,asm_call_jit_compiler/* fake virtual function call (2 instr) */
- ast mptr,2*8(t8) /* store function address */
- ala mptr,1*8(t8) /* set method pointer */
-
- ald pv,1*8(mptr) /* method call as in Java */
- jalr pv /* call JIT compiler */
+L_asm_vm_call_method_stack_copy_done:
+ ala mptr,4*8(s0) /* get address of PV */
+ ald pv,0*8(mptr) /* load PV */
+ jalr pv
nop
L_asm_vm_call_method_recompute_pv:
-/* aaddiu pv,ra,(asm_vm_call_method - L_asm_vm_call_method_recompute_pv)*/
+#if SIZEOF_VOID_P == 8
aaddiu pv,ra,-76*4 /* recompute procedure vector */
+#else
+ aaddiu pv,ra,(asm_vm_call_method - L_asm_vm_call_method_recompute_pv)
+#endif
.set reorder /* XXX we need to recompute pv */
+ move sp,s0 /* restore stack pointer */
+
calljava_return2:
- ald ra,0(sp) /* restore return address */
- ald pv,8(sp) /* restore procedure vector */
- ald s7,3*8(sp)
+ ald ra,0*8(sp) /* restore return address */
+ ald pv,1*8(sp) /* restore procedure vector */
+ ald s0,3*8(sp)
+#if SIZEOF_VOID_P == 8
ldc1 fss0,5*8(sp) /* restore non JavaABI saved flt regs */
ldc1 fss1,6*8(sp)
ldc1 fss2,7*8(sp)
ldc1 fss3,8*8(sp)
ldc1 fss4,9*8(sp)
ldc1 fss5,10*8(sp)
+#endif
aaddiu sp,sp,12*8 /* free stack space */
j ra /* return */
-calljava_xhandler2:
- asll s7,s7,3
- aaddu sp,s7,sp
+asm_vm_call_method_exception_handler:
+ move sp,s0 /* restore stack pointer */
+#if SIZEOF_VOID_P == 4
+ aaddiu sp,sp,-4*4 /* reserve space for 1 argument */
+#endif
+
move a0,itmp1
jal builtin_throw_exception
- move v0,zero /* clear return value for exception */
+#if SIZEOF_VOID_P == 4
+ aaddiu sp,sp,4*4
+#endif
+asm_vm_call_method_end:
b calljava_return2
.end asm_vm_call_method
* *
*******************************************************************************/
-
.ent asm_call_jit_compiler
asm_call_jit_compiler:
- aaddiu sp,sp,-(20*8+sizestackframeinfo) /* allocate stack space */
+#if SIZEOF_VOID_P == 8
+
+ aaddiu sp,sp,-(ARG_CNT+2)*8 /* +2: keep stack 16-bytes aligned */
+
+ ast ra,0*8(sp) /* save return address */
+
+ SAVE_ARGUMENT_REGISTERS(1)
+
+ move a0,itmp1 /* pass methodinfo pointer */
+ move a1,mptr /* pass method pointer */
+ aaddiu a2,sp,(ARG_CNT+2)*8 /* pass java sp */
+ move a3,ra
+ jal jit_asm_compile /* call jit compiler */
+ move pv,v0
+
+ ald ra,0*8(sp) /* restore return address */
- SAVE_ARGUMENT_REGISTERS(0)
+ RESTORE_ARGUMENT_REGISTERS(1)
- ast mptr,16*8(sp) /* save method pointer */
- ast ra,17*8(sp) /* save return address */
- ast itmp1,18*8(sp) /* save methodinfo pointer */
+ aaddiu sp,sp,(ARG_CNT+2)*8 /* remove stack frame */
- aaddiu a0,sp,20*8 /* create stackframe info */
- move a1,zero /* we don't have pv handy */
- aaddiu a2,sp,(20*8+sizestackframeinfo) /* pass java sp */
- ald a3,17*8(sp) /* pass java ra */
- move a4,a3 /* xpc is equal to ra */
- jal stacktrace_create_extern_stackframeinfo
+#else /* SIZEOF_VOID_P == 8 */
- ald a0,18*8(sp) /* pass methodinfo pointer */
- jal jit_compile /* jit compiler */
- ast v0,18*8(sp) /* save return value */
+ aaddiu sp,sp,-(ARG_CNT+2)*8 /* +4: keep stack 16-bytes aligned */
- aaddiu a0,sp,20*8 /* remove stackframe info */
- jal stacktrace_remove_stackframeinfo
+ ast ra,4*4+0*4(sp) /* save return address */
- ald a0,17*8(sp) /* pass return address */
- aaddiu a1,sp,20*8 /* pass stackframeinfo (for PV) */
- ald a2,16*8(sp) /* pass method pointer */
- jal md_assembler_get_patch_address /* get address of patch position */
- move t0,v0 /* move offset to t0 for later use */
+ SAVE_ARGUMENT_REGISTERS(6)
- RESTORE_ARGUMENT_REGISTERS(0)
+ move a0,itmp1 /* pass methodinfo pointer */
+ move a1,mptr /* pass method pointer */
+ aaddiu a2,sp,(ARG_CNT+2)*8 /* pass java sp */
+ move a3,ra
+ jal jit_asm_compile /* call jit compiler */
+ move pv,v0
- ald ra,17*8(sp) /* restore return address */
- ald v0,18*8(sp) /* restore return value */
- aaddiu sp,sp,20*8+sizestackframeinfo /* deallocate stack area */
+ ald ra,4*4+0*4(sp) /* restore return address */
- beqz v0,L_asm_call_jit_compiler_exception
+ RESTORE_ARGUMENT_REGISTERS(6)
+
+ aaddiu sp,sp,(ARG_CNT+2)*8 /* remove stack frame */
+
+#endif /* SIZEOF_VOID_P == 8 */
+
+ beqz pv,L_asm_call_jit_compiler_exception
- ast v0,0(t0) /* store new method address */
- move pv,v0 /* move method address into pv */
jr pv /* and call method. The method returns */
/* directly to the caller (ra). */
L_asm_call_jit_compiler_exception:
-#if defined(USE_THREADS) && defined(NATIVE_THREADS)
aaddiu sp,sp,-2*8
ast ra,0*8(sp)
- jal builtin_asm_get_exceptionptrptr
+ jal exceptions_get_and_clear_exception
ald ra,0*8(sp)
aaddiu sp,sp,2*8
-#else
- la v0,_exceptionptr
-#endif
- ald xptr,0(v0) /* get the exception pointer */
- ast zero,0(v0) /* clear the exception pointer */
- aaddiu xpc,ra,-4 /* faulting address is return adress - 4 */
+ move xptr,v0 /* get exception */
+ aaddiu xpc,ra,-4 /* exception address is RA - 4 */
b asm_handle_nat_exception
.end asm_call_jit_compiler
asm_handle_nat_exception:
L_asm_handle_exception_stack_loop:
- aaddiu sp,sp,-6*8 /* allocate stack */
+#if SIZEOF_VOID_P == 8
+ aaddiu sp,sp,-6*8 /* keep stack 16-byte aligned */
ast xptr,0*8(sp) /* save exception pointer */
ast xpc,1*8(sp) /* save exception pc */
- ast ra,3*8(sp) /* save return address */
+ ast ra,3*8(sp) /* save RA */
ast zero,4*8(sp) /* save maybe-leaf flag (cleared) */
+#else
+ aaddiu sp,sp,-(4*4+6*8) /* allocate stack */
+ ast xptr,4*4+0*8(sp) /* save exception pointer */
+ ast xpc,4*4+1*8(sp) /* save exception pc */
+ ast ra,4*4+3*8(sp) /* save return address */
+ ast zero,4*4+4*8(sp) /* save maybe-leaf flag (cleared) */
+#endif
+
+ move a0,ra /* pass RA */
+ jal md_codegen_get_pv_from_pc /* get PV from RA */
+
+#if SIZEOF_VOID_P == 8
+ ast v0,2*8(sp) /* save PV */
- move a0,ra /* pass return address */
- jal md_codegen_findmethod /* get PV from RA */
- ast v0,2*8(sp) /* save data segment pointer */
+ ald a0,0*8(sp) /* pass xptr */
+ ald a1,1*8(sp) /* pass xpc */
+ move a2,v0 /* pass PV */
+ aaddiu a3,sp,6*8 /* pass Java SP */
+#else
+ ast v0,4*4+2*8(sp) /* save data segment pointer */
- ald a0,0*8(sp) /* pass exception pointer */
- ald a1,1*8(sp) /* pass exception pc */
+ ald a0,4*4+0*8(sp) /* pass exception pointer */
+ ald a1,4*4+1*8(sp) /* pass exception pc */
move a2,v0 /* pass data segment pointer */
- aaddiu a3,sp,6*8 /* pass Java stack pointer */
+ aaddiu a3,sp,(4*4+6*8) /* pass Java stack pointer */
+#endif
b L_asm_handle_exception_continue
SAVE_ARGUMENT_REGISTERS(0) /* we save arg and temp registers in */
SAVE_TEMPORARY_REGISTERS(ARG_CNT) /* case this is a leaf method */
+#if SIZEOF_VOID_P == 8
aaddiu sp,sp,-6*8 /* allocate stack */
ast xptr,0*8(sp) /* save exception pointer */
- ast xpc,1*8(sp) /* save exception pc */
- ast pv,2*8(sp) /* save data segment pointer */
- ast ra,3*8(sp) /* save return address */
+ ast pv,2*8(sp) /* save PV */
+ ast ra,3*8(sp) /* save RA */
addu t0,zero,1 /* set maybe-leaf flag */
ast t0,4*8(sp) /* save maybe-leaf flag */
+#else
+ aaddiu sp,sp,-(4*4+6*8) /* allocate stack */
+ ast xptr,4*4+0*8(sp) /* save exception pointer */
+ ast xpc,4*4+1*8(sp) /* save exception pc */
+ ast pv,4*4+2*8(sp) /* save data segment pointer */
+ ast ra,4*4+3*8(sp) /* save return address */
+ addu t0,zero,1 /* set maybe-leaf flag */
+ ast t0,4*4+4*8(sp) /* save maybe-leaf flag */
+#endif
- move a0,xptr /* pass exception pointer */
- move a1,xpc /* pass exception pc */
- move a2,pv /* pass data segment pointer */
- aaddiu a3,sp,(ARG_CNT+TMP_CNT+6)*8 /* pass Java stack pointer */
+ move a0,xptr /* pass xptr */
+ move a1,xpc /* pass xpc */
+ move a2,pv /* pass PV */
+
+#if SIZEOF_VOID_P == 8
+ aaddiu a3,sp,(ARG_CNT+TMP_CNT+6)*8 /* pass Java SP */
+#else
+ aaddiu a3,sp,4*4+(ARG_CNT+TMP_CNT+6)*8 /* pass Java stack pointer */
+#endif
L_asm_handle_exception_continue:
jal exceptions_handle_exception
beqz v0,L_asm_handle_exception_not_catched
move xpc,v0 /* move handlerpc into xpc */
+
+#if SIZEOF_VOID_P == 8
ald xptr,0*8(sp) /* restore exception pointer */
- ald pv,2*8(sp) /* restore data segment pointer */
- ald ra,3*8(sp) /* restore return address */
+ ald pv,2*8(sp) /* restore PV */
+ ald ra,3*8(sp) /* restore RA */
ald t0,4*8(sp) /* get maybe-leaf flag */
aaddiu sp,sp,6*8 /* free stackframe */
+#else
+ ald xptr,4*4+0*8(sp) /* restore exception pointer */
+ ald pv,4*4+2*8(sp) /* restore data segment pointer */
+ ald ra,4*4+3*8(sp) /* restore return address */
+ ald t0,4*4+4*8(sp) /* get maybe-leaf flag */
+ aaddiu sp,sp,4*4+6*8 /* free stackframe */
+#endif
beqz t0,L_asm_handle_exception_no_leaf
jr xpc /* jump to the handler */
L_asm_handle_exception_not_catched:
- ald xptr,0*8(sp) /* restore exception pointer */
- ald pv,2*8(sp) /* restore data segment pointer */
- ald ra,3*8(sp) /* restore return address */
+#if SIZEOF_VOID_P == 8
+ ald xptr,0*8(sp) /* restore xptr */
+ ald pv,2*8(sp) /* restore PV */
+ ald ra,3*8(sp) /* restore RA */
ald t0,4*8(sp) /* get maybe-leaf flag */
aaddiu sp,sp,6*8 /* free stackframe */
+#else
+ ald xptr,4*4+0*8(sp) /* restore xptr */
+ ald pv,4*4+2*8(sp) /* restore PV */
+ ald ra,4*4+3*8(sp) /* restore RA */
+ ald t0,4*4+4*8(sp) /* get maybe-leaf flag */
+ aaddiu sp,sp,4*4+6*8 /* free stackframe */
+#endif
beqz t0,L_asm_handle_exception_no_leaf_stack
ald s5,-3*8(t1)
ald s6,-2*8(t1)
ald s7,-1*8(t1)
+
ex_int2:
sll t2,t2,1 /* t2 = register count * 4 * 2 */
asubu t1,t1,t2 /* t1 = t0 - 8 * register count */
asubu t3,t3,t2 /* t3 = ex_int_sav - 4 * register count */
jr t3 /* jump to save position */
+#if SIZEOF_VOID_P == 8
+ ldc1 fs0,-4*8(t1)
+ ldc1 fs1,-3*8(t1)
+ ldc1 fs2,-2*8(t1)
+ ldc1 fs3,-1*8(t1)
+#else /* SIZEOF_VOID_P == 8 */
+# if !defined(ENABLE_SOFT_FLOAT)
ldc1 fs0,-4*8(t1)
ldc1 fs1,-3*8(t1)
ldc1 fs2,-2*8(t1)
ldc1 fs3,-1*8(t1)
+ ldc1 fs4,-1*8(t1)
+ ldc1 fs5,-1*8(t1)
+# endif /* !defined(ENABLE_SOFT_FLOAT) */
+#endif /* SIZEOF_VOID_P == 8 */
ex_flt2:
lw t1,FrameSize(pv) /* get frame size */
.end asm_handle_nat_exception
-/* asm_wrapper_patcher *********************************************************
+/* asm_abstractmethoderror *****************************************************
- XXX
-
- Stack layout:
- 40 return address into JIT code (patch position)
- 32 pointer to virtual java_objectheader
- 24 machine code (which is patched back later)
- 16 unresolved class/method/field reference
- 8 data segment displacement from load instructions
- 0 patcher function pointer to call
+ Creates and throws an AbstractMethodError.
*******************************************************************************/
-
- .ent asm_wrapper_patcher
-
-asm_wrapper_patcher:
- aaddiu sp,sp,-((2+16+22+4)*8+sizestackframeinfo) /* create stack frame */
-
- SAVE_RETURN_REGISTERS(0) /* save 1 int/1 float return registers */
- SAVE_ARGUMENT_REGISTERS(2) /* save 8 int/8 float argument registers */
- SAVE_TEMPORARY_REGISTERS(18) /* save 5 int/16 float temporary registers */
-
- ast itmp1,(2+16+22+0)*8(sp) /* save itmp1 */
- ast itmp2,(2+16+22+1)*8(sp) /* save itmp2 */
- ast ra,(2+16+22+2)*8(sp) /* save method return address (for leafs) */
- ast pv,(2+16+22+3)*8(sp) /* save pv of calling java function */
-
- aaddiu a0,sp,(2+16+22+4)*8 /* create stackframe info */
- move a1,pv /* pass java pv */
- aaddiu a2,sp,((6+2+16+22+4)*8+sizestackframeinfo) /* pass java sp */
- move a3,ra /* this is correct for leafs */
- ald a4,((5+2+16+22+4)*8+sizestackframeinfo)(sp) /* pass xpc */
- jal stacktrace_create_extern_stackframeinfo
-
- aaddiu a0,sp,((0+2+16+22+4)*8+sizestackframeinfo) /* pass sp */
- ald itmp3,((0+2+16+22+4)*8+sizestackframeinfo)(sp) /* get function */
- ald itmp1,(2+16+22+3)*8(sp) /* save pv to the position of fp */
- ast itmp1,((0+2+16+22+4)*8+sizestackframeinfo)(sp)
- jalr itmp3
- ast v0,((0+2+16+22+4)*8+sizestackframeinfo)(sp) /* save return value */
-
- aaddiu a0,sp,(2+16+22+4)*8 /* remove stackframe info */
- jal stacktrace_remove_stackframeinfo
-
- RESTORE_RETURN_REGISTERS(0) /* restore 1 int/1 float return registers */
- RESTORE_ARGUMENT_REGISTERS(2) /* restore 8 int/8 float argument registers */
- RESTORE_TEMPORARY_REGISTERS(18) /* restore 5 int/16 float temporary reg. */
-
- ald itmp1,(2+16+22+0)*8(sp) /* restore itmp1 */
- ald itmp2,(2+16+22+1)*8(sp) /* restore itmp2 */
- ald ra,(2+16+22+2)*8(sp) /* restore method return address (for leafs)*/
- ald pv,(2+16+22+3)*8(sp) /* restore pv of calling java function */
-
- ald itmp3,((0+2+16+22+4)*8+sizestackframeinfo)(sp) /* get return value*/
- beqz itmp3,L_asm_wrapper_patcher_exception
-
- ald itmp3,((5+2+16+22+4)*8+sizestackframeinfo)(sp) /* get RA to JIT */
- aaddiu sp,sp,((6+2+16+22+4)*8+sizestackframeinfo) /* remove stack frame */
-
- jr itmp3 /* jump to new patched code */
-
-L_asm_wrapper_patcher_exception:
- ald xpc,((5+2+16+22+4)*8+sizestackframeinfo)(sp) /* RA to JIT is xpc */
- aaddiu sp,sp,((6+2+16+22+4)*8+sizestackframeinfo) /* remove stack frame */
-
-#if defined(USE_THREADS) && defined(NATIVE_THREADS)
- daddiu sp,sp,-4*8
- sd xpc,0*8(sp)
- sd ra,1*8(sp)
- sd pv,2*8(sp)
- jal builtin_asm_get_exceptionptrptr
- ld xpc,0*8(sp)
- ld ra,1*8(sp)
- ld pv,2*8(sp)
- daddiu sp,sp,4*8
-#else
- la v0,_exceptionptr
-#endif
- ld xptr,0(v0) /* get the exception pointer */
- sd zero,0(v0) /* clear the exception pointer */
- b asm_handle_exception
- .end asm_wrapper_patcher
+ .ent asm_abstractmethoderror
+
+asm_abstractmethoderror:
+ aaddiu sp,sp,-2*8 /* create stackframe */
+ ast ra,0*8(sp) /* save return address */
+ aaddiu a0,sp,2*8 /* pass java sp */
+ move a1,ra /* pass exception address */
+ jal exceptions_asm_new_abstractmethoderror
+ ald ra,0*8(sp) /* restore return address */
+ aaddiu sp,sp,2*8 /* remove stackframe */
+ move xptr,v0 /* get exception pointer */
+ aaddiu xpc,ra,-4 /* exception address is ra - 4 */
+ b asm_handle_nat_exception
+
+ .end asm_abstractmethoderror
+
+
+#if defined(ENABLE_REPLACEMENT)
/* asm_replacement_out *********************************************************
asm_replacement_out:
/* create stack frame */
- daddiu sp,sp,-REPLACEMENT_STACK_OFFSET
+ aaddiu sp,sp,-REPLACEMENT_STACK_OFFSET
/* save registers in execution state */
- sd $0 ,( 0*8+offes_intregs)(sp)
- sd $1 ,( 1*8+offes_intregs)(sp)
- sd $2 ,( 2*8+offes_intregs)(sp)
- sd $3 ,( 3*8+offes_intregs)(sp)
- sd $4 ,( 4*8+offes_intregs)(sp)
- sd $5 ,( 5*8+offes_intregs)(sp)
- sd $6 ,( 6*8+offes_intregs)(sp)
- sd $7 ,( 7*8+offes_intregs)(sp)
- sd $8 ,( 8*8+offes_intregs)(sp)
- sd $9 ,( 9*8+offes_intregs)(sp)
- sd $10,(10*8+offes_intregs)(sp)
- sd $11,(11*8+offes_intregs)(sp)
- sd $12,(12*8+offes_intregs)(sp)
- sd $13,(13*8+offes_intregs)(sp)
- sd $14,(14*8+offes_intregs)(sp)
- sd $15,(15*8+offes_intregs)(sp)
- sd $16,(16*8+offes_intregs)(sp)
- sd $17,(17*8+offes_intregs)(sp)
- sd $18,(18*8+offes_intregs)(sp)
- sd $19,(19*8+offes_intregs)(sp)
- sd $20,(20*8+offes_intregs)(sp)
- sd $21,(21*8+offes_intregs)(sp)
- sd $22,(22*8+offes_intregs)(sp)
- sd $23,(23*8+offes_intregs)(sp)
- sd $24,(24*8+offes_intregs)(sp)
- sd $25,(25*8+offes_intregs)(sp)
- sd $26,(26*8+offes_intregs)(sp)
- sd $27,(27*8+offes_intregs)(sp)
- sd $28,(28*8+offes_intregs)(sp)
- sd $29,(29*8+offes_intregs)(sp)
- sd $30,(30*8+offes_intregs)(sp)
- sd $31,(31*8+offes_intregs)(sp)
-
+ ast $0 ,( 0*8+offes_intregs)(sp)
+ ast $1 ,( 1*8+offes_intregs)(sp)
+ ast $2 ,( 2*8+offes_intregs)(sp)
+ ast $3 ,( 3*8+offes_intregs)(sp)
+ ast $4 ,( 4*8+offes_intregs)(sp)
+ ast $5 ,( 5*8+offes_intregs)(sp)
+ ast $6 ,( 6*8+offes_intregs)(sp)
+ ast $7 ,( 7*8+offes_intregs)(sp)
+ ast $8 ,( 8*8+offes_intregs)(sp)
+ ast $9 ,( 9*8+offes_intregs)(sp)
+ ast $10,(10*8+offes_intregs)(sp)
+ ast $11,(11*8+offes_intregs)(sp)
+ ast $12,(12*8+offes_intregs)(sp)
+ ast $13,(13*8+offes_intregs)(sp)
+ ast $14,(14*8+offes_intregs)(sp)
+ ast $15,(15*8+offes_intregs)(sp)
+ ast $16,(16*8+offes_intregs)(sp)
+ ast $17,(17*8+offes_intregs)(sp)
+ ast $18,(18*8+offes_intregs)(sp)
+ ast $19,(19*8+offes_intregs)(sp)
+ ast $20,(20*8+offes_intregs)(sp)
+ ast $21,(21*8+offes_intregs)(sp)
+ ast $22,(22*8+offes_intregs)(sp)
+ ast $23,(23*8+offes_intregs)(sp)
+ ast $24,(24*8+offes_intregs)(sp)
+ ast $25,(25*8+offes_intregs)(sp)
+ ast $26,(26*8+offes_intregs)(sp)
+ ast $27,(27*8+offes_intregs)(sp)
+ ast $28,(28*8+offes_intregs)(sp)
+ ast $29,(29*8+offes_intregs)(sp)
+ ast $30,(30*8+offes_intregs)(sp)
+ ast $31,(31*8+offes_intregs)(sp)
+
+#if SIZEOF_VOID_P == 8
+
sdc1 $f0 ,( 0*8+offes_fltregs)(sp)
sdc1 $f1 ,( 1*8+offes_fltregs)(sp)
sdc1 $f2 ,( 2*8+offes_fltregs)(sp)
sdc1 $f29,(29*8+offes_fltregs)(sp)
sdc1 $f30,(30*8+offes_fltregs)(sp)
sdc1 $f31,(31*8+offes_fltregs)(sp)
+
+#else /* SIZEOF_VOID_P == 8 */
+
+ sdc1 $f0 ,( 0*8+offes_fltregs)(sp)
+ sdc1 $f2 ,( 2*8+offes_fltregs)(sp)
+ sdc1 $f4 ,( 4*8+offes_fltregs)(sp)
+ sdc1 $f6 ,( 6*8+offes_fltregs)(sp)
+ sdc1 $f8 ,( 8*8+offes_fltregs)(sp)
+ sdc1 $f10,(10*8+offes_fltregs)(sp)
+ sdc1 $f12,(12*8+offes_fltregs)(sp)
+ sdc1 $f14,(14*8+offes_fltregs)(sp)
+ sdc1 $f16,(16*8+offes_fltregs)(sp)
+ sdc1 $f18,(18*8+offes_fltregs)(sp)
+ sdc1 $f20,(20*8+offes_fltregs)(sp)
+ sdc1 $f22,(22*8+offes_fltregs)(sp)
+ sdc1 $f24,(24*8+offes_fltregs)(sp)
+ sdc1 $f26,(26*8+offes_fltregs)(sp)
+ sdc1 $f28,(28*8+offes_fltregs)(sp)
+ sdc1 $f30,(30*8+offes_fltregs)(sp)
+
+#endif /* SIZEOF_VOID_P == 8 */
/* calculate sp of method */
- daddiu itmp1,sp,(REPLACEMENT_STACK_OFFSET + 2*8)
- sd itmp1,(offes_sp)(sp)
+ aaddiu itmp1,sp,(REPLACEMENT_STACK_OFFSET + 2*8)
+ ast itmp1,(offes_sp)(sp)
/* store pv */
- sd pv,(offes_pv)(sp)
+ ast pv,(offes_pv)(sp)
/* call replace_me */
- ld a0,-(2*8)(itmp1) /* arg0: rplpoint * */
+ ald a0,-(2*8)(itmp1) /* arg0: rplpoint * */
move a1,sp /* arg1: execution state */
jal replace_me /* call C function replace_me */
jal abort /* NEVER REACHED */
/* a0 == executionstate *es */
/* set new sp and pv */
- ld sp,(offes_sp)(a0)
- ld pv,(offes_pv)(a0)
+ ald sp,(offes_sp)(a0)
+ ald pv,(offes_pv)(a0)
/* copy registers from execution state */
/* $0 is zero */
- ld $1 ,( 1*8+offes_intregs)(a0)
- ld $2 ,( 2*8+offes_intregs)(a0)
- ld $3 ,( 2*8+offes_intregs)(a0)
+ ald $1 ,( 1*8+offes_intregs)(a0)
+ ald $2 ,( 2*8+offes_intregs)(a0)
+ ald $3 ,( 2*8+offes_intregs)(a0)
/* a0 is loaded below */
- ld $5 ,( 5*8+offes_intregs)(a0)
- ld $6 ,( 6*8+offes_intregs)(a0)
- ld $7 ,( 7*8+offes_intregs)(a0)
- ld $8 ,( 8*8+offes_intregs)(a0)
- ld $9 ,( 9*8+offes_intregs)(a0)
- ld $10,(10*8+offes_intregs)(a0)
- ld $11,(11*8+offes_intregs)(a0)
- ld $12,(12*8+offes_intregs)(a0)
- ld $13,(13*8+offes_intregs)(a0)
- ld $14,(14*8+offes_intregs)(a0)
- ld $15,(15*8+offes_intregs)(a0)
- ld $16,(16*8+offes_intregs)(a0)
- ld $17,(17*8+offes_intregs)(a0)
- ld $18,(18*8+offes_intregs)(a0)
- ld $19,(19*8+offes_intregs)(a0)
- ld $20,(20*8+offes_intregs)(a0)
- ld $21,(21*8+offes_intregs)(a0)
- ld $22,(22*8+offes_intregs)(a0)
- ld $23,(23*8+offes_intregs)(a0)
- ld $24,(24*8+offes_intregs)(a0)
- ld $25,(25*8+offes_intregs)(a0)
- ld $26,(26*8+offes_intregs)(a0)
- ld $27,(27*8+offes_intregs)(a0)
- ld $28,(28*8+offes_intregs)(a0)
+ ald $5 ,( 5*8+offes_intregs)(a0)
+ ald $6 ,( 6*8+offes_intregs)(a0)
+ ald $7 ,( 7*8+offes_intregs)(a0)
+ ald $8 ,( 8*8+offes_intregs)(a0)
+ ald $9 ,( 9*8+offes_intregs)(a0)
+ ald $10,(10*8+offes_intregs)(a0)
+ ald $11,(11*8+offes_intregs)(a0)
+ ald $12,(12*8+offes_intregs)(a0)
+ ald $13,(13*8+offes_intregs)(a0)
+ ald $14,(14*8+offes_intregs)(a0)
+ ald $15,(15*8+offes_intregs)(a0)
+ ald $16,(16*8+offes_intregs)(a0)
+ ald $17,(17*8+offes_intregs)(a0)
+ ald $18,(18*8+offes_intregs)(a0)
+ ald $19,(19*8+offes_intregs)(a0)
+ ald $20,(20*8+offes_intregs)(a0)
+ ald $21,(21*8+offes_intregs)(a0)
+ ald $22,(22*8+offes_intregs)(a0)
+ ald $23,(23*8+offes_intregs)(a0)
+ ald $24,(24*8+offes_intregs)(a0)
+ ald $25,(25*8+offes_intregs)(a0)
+ ald $26,(26*8+offes_intregs)(a0)
+ ald $27,(27*8+offes_intregs)(a0)
+ ald $28,(28*8+offes_intregs)(a0)
/* $29 is sp */
/* $30 is pv */
- ld $31,(31*8+offes_intregs)(a0)
+ ald $31,(31*8+offes_intregs)(a0)
+#if SIZEOF_VOID_P == 8
+
ldc1 $f0 ,( 0*8+offes_fltregs)(a0)
ldc1 $f1 ,( 1*8+offes_fltregs)(a0)
ldc1 $f2 ,( 2*8+offes_fltregs)(a0)
ldc1 $f30,(30*8+offes_fltregs)(a0)
ldc1 $f31,(31*8+offes_fltregs)(a0)
+#else /* SIZEOF_VOID_P == 8 */
+
+ ldc1 $f0 ,( 0*8+offes_fltregs)(a0)
+ ldc1 $f2 ,( 2*8+offes_fltregs)(a0)
+ ldc1 $f4 ,( 4*8+offes_fltregs)(a0)
+ ldc1 $f6 ,( 6*8+offes_fltregs)(a0)
+ ldc1 $f8 ,( 8*8+offes_fltregs)(a0)
+ ldc1 $f10,(10*8+offes_fltregs)(a0)
+ ldc1 $f12,(12*8+offes_fltregs)(a0)
+ ldc1 $f14,(14*8+offes_fltregs)(a0)
+ ldc1 $f16,(16*8+offes_fltregs)(a0)
+ ldc1 $f18,(18*8+offes_fltregs)(a0)
+ ldc1 $f20,(20*8+offes_fltregs)(a0)
+ ldc1 $f22,(22*8+offes_fltregs)(a0)
+ ldc1 $f24,(24*8+offes_fltregs)(a0)
+ ldc1 $f26,(26*8+offes_fltregs)(a0)
+ ldc1 $f28,(28*8+offes_fltregs)(a0)
+ ldc1 $f30,(30*8+offes_fltregs)(a0)
+
+#endif /* SIZEOF_VOID_P == 8 */
+
/* load new pc */
- ld itmp3,offes_pc(a0)
+ ald itmp3,offes_pc(a0)
/* load a0 */
- ld a0,(4*8+offes_intregs)(a0)
+ ald a0,(4*8+offes_intregs)(a0)
/* jump to new code */
.end asm_replacement_in
-/******************* function asm_initialize_thread_stack **********************
-* *
-* u1* asm_initialize_thread_stack (void *func, u1 *stack); *
-* *
-* initialize a thread stack *
-* *
-*******************************************************************************/
-
- .ent asm_initialize_thread_stack
-
-asm_initialize_thread_stack:
- aaddiu a1,a1,-14*8 /* allocate save area */
- sd zero, 0*8(a1) /* s0 initalize thread area */
- sd zero, 1*8(a1) /* s1 */
- sd zero, 2*8(a1) /* s2 */
- sd zero, 3*8(a1) /* s3 */
- sd zero, 4*8(a1) /* s4 */
- sd zero, 5*8(a1) /* s5 */
- sd zero, 6*8(a1) /* s6 */
- sd zero, 7*8(a1) /* s7 */
- sd zero, 8*8(a1) /* s8 */
- sd zero, 9*8(a1) /* fs0 */
- sd zero,10*8(a1) /* fs1 */
- sd zero,11*8(a1) /* fs2 */
- sd zero,12*8(a1) /* fs3 */
- sd a0, 13*8(a1)
- move v0,a1
- j ra /* return */
-
- .end asm_initialize_thread_stack
-
-
-/******************* function asm_perform_threadswitch *************************
-* *
-* void asm_perform_threadswitch (u1 **from, u1 **to, u1 **stackTop); *
-* *
-* performs a threadswitch *
-* *
-*******************************************************************************/
-
- .ent asm_perform_threadswitch
-
-asm_perform_threadswitch:
- aaddiu sp,sp,-14*8 /* allocate new stack */
- sd s0, 0*8(sp) /* save saved registers of old thread */
- sd s1, 1*8(sp)
- sd s2, 2*8(sp)
- sd s3, 3*8(sp)
- sd s4, 4*8(sp)
- sd s5, 5*8(sp)
- sd s6, 6*8(sp)
- sd s7, 7*8(sp)
- sd s8, 8*8(sp)
- sdc1 fs0, 9*8(sp)
- sdc1 fs1,10*8(sp)
- sdc1 fs2,11*8(sp)
- sdc1 fs3,12*8(sp)
- sd ra, 13*8(sp)
- ast sp,0(a0) /* save old stack pointer */
- ast sp,0(a2) /* stackTop = old stack pointer */
- ald sp,0(a1) /* load new stack pointer */
- ld s0, 0*8(sp) /* load saved registers of new thread */
- ld s1, 1*8(sp)
- ld s2, 2*8(sp)
- ld s3, 3*8(sp)
- ld s4, 4*8(sp)
- ld s5, 5*8(sp)
- ld s6, 6*8(sp)
- ld s7, 7*8(sp)
- ld s8, 8*8(sp)
- ldc1 fs0, 9*8(sp)
- ldc1 fs1,10*8(sp)
- ldc1 fs2,11*8(sp)
- ldc1 fs3,12*8(sp)
- ld ra, 13*8(sp)
- aaddiu sp,sp,14*8 /* deallocate new stack */
- move itmp3, ra
- j ra /* return */
-
- .end asm_perform_threadswitch
-
-
-/********************* function asm_switchstackandcall *************************
-* *
-* void asm_switchstackandcall (void *stack, void *func, void **stacktopsave); *
-* *
-* Switches to a new stack, calls a function and switches back. *
-* a0 new stack pointer *
-* a1 function pointer *
-* a2 pointer to variable where stack top should be stored *
-* *
-*******************************************************************************/
-
- .ent asm_switchstackandcall
-
-asm_switchstackandcall:
- aaddiu a0,a0,-16 /* allocate new stack */
- sd ra,0(a0) /* save return address on new stack */
- sd sp,8(a0) /* save old stack pointer on new stack */
- sd sp,0(a2) /* save old stack pointer to variable */
- move sp,a0 /* switch to new stack */
-
- move itmp3,a1
- move a0,a3
- jalr itmp3 /* and call function */
-
- ld ra,0(sp) /* load return address */
- ld sp,8(sp) /* switch to old stack */
-
- j ra /* return */
-
- .end asm_switchstackandcall
+#endif /* defined(ENABLE_REPLACEMENT) */
- .ent asm_getclassvalues_atomic
-
-asm_getclassvalues_atomic:
-_crit_restart:
-_crit_begin:
- lw t0,offbaseval(a0)
- lw t1,offdiffval(a0)
- lw t2,offbaseval(a1)
-_crit_end:
- sw t0,offcast_super_baseval(a2)
- sw t1,offcast_super_diffval(a2)
- sw t2,offcast_sub_baseval(a2)
- j ra
-
- .end asm_getclassvalues_atomic
-
- .data
-
-asm_criticalsections:
-#if defined(USE_THREADS) && defined(NATIVE_THREADS)
- .dword _crit_begin
- .dword _crit_end
- .dword _crit_restart
-#endif
- .dword 0
-
-
- .text
-
.ent compare_and_swap
compare_and_swap:
.end compare_and_swap
-/* Disable exec-stacks, required for Gentoo ***********************************/
+/* disable exec-stacks ********************************************************/
-#if defined(__GCC__) && defined(__ELF__)
- .section .note.GNU-stack,"",@progbits
+#if defined(__linux__) && defined(__ELF__)
+ .section .note.GNU-stack,"",%progbits
#endif