/* src/vm/jit/powerpc/asmpart.S - Java-C interface functions for PowerPC
- Copyright (C) 1996-2005 R. Grafl, A. Krall, C. Kruegel, C. Oates,
- R. Obermaisser, M. Platter, M. Probst, S. Ring, E. Steiner,
- C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich, J. Wenninger,
- Institut f. Computersprachen - TU Wien
+ Copyright (C) 1996-2005, 2006 R. Grafl, A. Krall, C. Kruegel,
+ C. Oates, R. Obermaisser, M. Platter, M. Probst, S. Ring,
+ E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich,
+ J. Wenninger, Institut f. Computersprachen - TU Wien
This file is part of CACAO.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
- Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
- 02111-1307, USA.
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
+ 02110-1301, USA.
- Contact: cacao@complang.tuwien.ac.at
+ Contact: cacao@cacaojvm.org
Authors: Andreas Krall
Reinhard Grafl
Changes: Christian Thalinger
- $Id: asmpart.S 3169 2005-09-10 20:32:22Z twisti $
+ $Id: asmpart.S 4357 2006-01-22 23:33:38Z twisti $
*/
#include "config.h"
+
#include "md-abi.h"
#include "md-asm.h"
+#include "vm/jit/abi.h"
+#include "vm/jit/methodheader.h"
#include "vm/jit/powerpc/offsets.h"
-#include "vm/jit/powerpc/asmoffsets.h"
.text
.long 0 /* intsave */
.long 0 /* isleaf */
.long 0 /* IsSync */
- .long 24 /* frame size */
+ .long 0 /* frame size */
.long 0 /* method pointer (pointer to name) */
asm_calljavafunction:
asm_calljavafunction_int:
mflr r0
- stw r31,-4(r1)
-/* stw r30,-8(r1)*/
- stw pv,-12(r1)
stw r0,LA_LR_OFFSET(r1)
- stwu r1,-148(r1)
+ stwu r1,-40*4(r1)
#if defined(__DARWIN__)
- bl 0f
-0:
- mflr r31
+ stw itmp1,10*4(sp) /* register r11 is callee saved */
#endif
+ stw pv,11*4(sp) /* save PV register */
- stw t0,40(r1)
- stw t1,44(r1)
- stw t2,48(r1)
- stw t3,52(r1)
- stw t4,56(r1)
- stw t5,60(r1)
- stw t6,64(r1)
-/* stw t7,68(r1) */
-
- stfd ftmp1,72(r1)
- stfd ftmp2,80(r1)
- stfd ft0,88(r1)
- stfd ft1,96(r1)
- stfd ft2,104(r1)
- stfd ft3,112(r1)
- stfd ft4,120(r1)
- stfd ft5,128(r1)
+ stw itmp3,12*4(sp) /* registers r14-r31 are callee saved */
+ stfd ftmp1,14*4(sp) /* registers f14-f31 are callee saved */
+ stfd ftmp2,16*4(sp)
+
+#if defined(__DARWIN__)
+ stw t1,18*4(r1)
+ stw t2,19*4(r1)
+ stw t3,20*4(r1)
+ stw t4,21*4(r1)
+ stw t5,22*4(r1)
+ stw t6,23*4(r1)
+ stw t7,24*4(r1)
+
+ stfd ft0,26*4(r1)
+ stfd ft1,28*4(r1)
+ stfd ft2,30*4(r1)
+ stfd ft3,32*4(r1)
+ stfd ft4,34*4(r1)
+ stfd ft5,36*4(r1)
+#else
+ SAVE_TEMPORARY_REGISTERS(18) /* the offset has to be even */
+#endif
stw a0,36(r1)
addi itmp1,r1,36
mr a3,a4
#if defined(__DARWIN__)
-/* addis mptr,r31,ha16(_asm_call_jit_compiler-0b)*/
- addi mptr,r31,lo16(asm_call_jit_compiler-0b)
+ lis mptr,ha16(asm_call_jit_compiler)
+ addi mptr,mptr,lo16(asm_call_jit_compiler)
#else
-/* addi mptr,r31,(asm_call_jit_compiler-0b)@l*/
lis mptr,asm_call_jit_compiler@ha
addi mptr,mptr,asm_call_jit_compiler@l
#endif
addi pv,itmp1,(asm_calljavafunction-1b)@l
#endif
-calljava_regrestore:
- lwz t0,40(r1)
- lwz t1,44(r1)
- lwz t2,48(r1)
- lwz t3,52(r1)
- lwz t4,56(r1)
- lwz t5,60(r1)
- lwz t6,64(r1)
-/* lwz t7,68(r1) */
-
- lfd ftmp1,72(r1)
- lfd ftmp2,80(r1)
- lfd ft0,88(r1)
- lfd ft1,96(r1)
- lfd ft2,104(r1)
- lfd ft3,112(r1)
- lfd ft4,120(r1)
- lfd ft5,128(r1)
-
- lwz r0,148+LA_LR_OFFSET(r1)
+L_asm_calljavafunction_return:
+#if defined(__DARWIN__)
+ lwz itmp1,10*4(sp) /* register r11 is callee saved */
+#endif
+ lwz pv,11*4(sp) /* save PV register */
+
+ lwz itmp3,12*4(sp)
+ lfd ftmp1,14*4(sp) /* registers f14-f31 are callee saved */
+ lfd ftmp2,16*4(sp)
+
+#if defined(__DARWIN__)
+ lwz t1,18*4(r1)
+ lwz t2,19*4(r1)
+ lwz t3,20*4(r1)
+ lwz t4,21*4(r1)
+ lwz t5,22*4(r1)
+ lwz t6,23*4(r1)
+ lwz t7,24*4(r1)
+
+ lfd ft0,26*4(r1)
+ lfd ft1,28*4(r1)
+ lfd ft2,30*4(r1)
+ lfd ft3,32*4(r1)
+ lfd ft4,34*4(r1)
+ lfd ft5,36*4(r1)
+#else
+ RESTORE_TEMPORARY_REGISTERS(18) /* the offset has to be even */
+#endif
+
+ lwz r0,40*4+LA_LR_OFFSET(r1)
mtlr r0
- addi r1,r1,148
- lwz pv,-12(r1)
-/* lwz r30,-8(r1)*/
- lwz r31,-4(r1)
+ addi r1,r1,40*4
blr
calljava_xhandler:
mr a0,itmp1
bl builtin_throw_exception
li v0,0 /* return NULL */
- b calljava_regrestore
+ b L_asm_calljavafunction_return
.long 0 /* intsave */
.long 0 /* isleaf */
.long 0 /* IsSync */
- .long 24 /* frame size */
+ .long 0 /* frame size */
.long 0 /* method pointer (pointer to name) */
asm_calljavafunction2:
asm_calljavafunction2float:
asm_calljavafunction2double:
mflr r0
- stw r31,-4(r1)
-/* stw r30,-8(r1)*/
- stw pv,-12(r1)
stw r0,LA_LR_OFFSET(r1)
- stwu r1,-148(r1)
- bl 0f
-0:
- mflr r31
-
- stw r16,40(r1)
- stw r17,44(r1)
- stw r18,48(r1)
- stw r19,52(r1)
- stw r20,56(r1)
- stw r21,60(r1)
- stw r22,64(r1)
- stw r23,68(r1)
-
- stfd fr16,72(r1)
- stfd fr17,80(r1)
- stfd fr18,88(r1)
- stfd fr19,96(r1)
- stfd fr20,104(r1)
- stfd fr21,112(r1)
- stfd fr22,120(r1)
- stfd fr23,128(r1)
+ stwu r1,-40*4(r1)
+
+#if defined(__DARWIN__)
+ stw itmp1,10*4(sp) /* register r11 is callee saved */
+#endif
+ stw pv,11*4(sp) /* save PV register */
+
+ stw itmp3,12*4(sp) /* registers r14-r31 are callee saved */
+ stfd ftmp1,14*4(sp) /* registers f14-f31 are callee saved */
+ stfd ftmp2,16*4(sp)
+
+#if defined(__DARWIN__)
+ stw t1,18*4(r1)
+ stw t2,19*4(r1)
+ stw t3,20*4(r1)
+ stw t4,21*4(r1)
+ stw t5,22*4(r1)
+ stw t6,23*4(r1)
+ stw t7,24*4(r1)
+
+ stfd ft0,26*4(r1)
+ stfd ft1,28*4(r1)
+ stfd ft2,30*4(r1)
+ stfd ft3,32*4(r1)
+ stfd ft4,34*4(r1)
+ stfd ft5,36*4(r1)
+#else
+ SAVE_TEMPORARY_REGISTERS(18) /* the offset has to be even */
+#endif
stw r3,36(r1) /* save method pointer for compiler */
mr itmp1,r6 /* pointer to arg block */
mr itmp2,r4 /* arg count */
- mr. itmp2,itmp2
- ble calljava_argsloaded
+ addi itmp1,itmp1,-sizejniblock /* initialize pointer (smaller code) */
+ addi itmp2,itmp2,1 /* initialize argument count */
+ li r17,0 /* initialize integer argument counter */
+ li r18,0 /* initialize float argument counter */
- addi itmp2,itmp2,-1
- lwz r3,offjniitem+4(itmp1)
+L_register_copy:
+ addi itmp1,itmp1,sizejniblock /* goto next argument block */
+ addi itmp2,itmp2,-1 /* argument count - 1 */
mr. itmp2,itmp2
- ble calljava_argsloaded
+ beq L_register_copy_done
- addi itmp2,itmp2,-1
- lwz r4,offjniitem+sizejniblock*1+4(itmp1)
- mr. itmp2,itmp2
- ble calljava_argsloaded
- addi itmp2,itmp2,-1
+ lwz itmp3,offjniitemtype+4(itmp1)
+ andi. r0,itmp3,0x0002 /* is this a float/double type? */
+ bne L_register_handle_float
- addi itmp2,itmp2,-1
- lwz r5,offjniitem+sizejniblock*2+4(itmp1)
- mr. itmp2,itmp2
- ble calljava_argsloaded
- addi itmp2,itmp2,-1
+ cmpwi r17,INT_ARG_CNT /* are we out of integer argument */
+ beq L_register_copy /* registers? yes, next loop */
- addi itmp2,itmp2,-1
- lwz r6,offjniitem+sizejniblock*3+4(itmp1)
- mr. itmp2,itmp2
- ble calljava_argsloaded
- addi itmp2,itmp2,-1
+ andi. r0,itmp3,0x0001 /* is this a long type? */
+ bne L_register_handle_long
-calljava_argsloaded:
+L_register_handle_int:
+#if defined(__DARWIN__)
+ lis itmp3,ha16(jumptable_int)
+ addi itmp3,itmp3,lo16(jumptable_int)
+#else
+ lis itmp3,jumptable_int@ha
+ addi itmp3,itmp3,jumptable_int@l
+#endif
+ slwi r19,r17,2 /* multiple of 4-bytes */
+ add itmp3,itmp3,r19 /* calculate address of jumptable */
+ lwz itmp3,0(itmp3) /* load function address */
+ addi r17,r17,1 /* integer argument counter + 1 */
+ mtctr itmp3
+ bctr
+
+L_register_handle_long:
+#if defined(__DARWIN__)
+ lis itmp3,ha16(jumptable_long)
+ addi itmp3,itmp3,lo16(jumptable_long)
+#else
+ lis itmp3,jumptable_long@ha
+ addi itmp3,itmp3,jumptable_long@l
+#endif
+ addi r19,r17,1 /* align to even numbers */
+ srwi r19,r19,1
+ slwi r19,r19,1
+ slwi r19,r19,2 /* multiple of 4-bytes */
+ add itmp3,itmp3,r19 /* calculate address of jumptable */
+ lwz itmp3,0(itmp3) /* load function address */
+ addi r17,r17,1 /* integer argument counter + 1 */
+ mtctr itmp3
+ bctr
+
+L_register_handle_float:
+L_register_copy_done:
+
+L_stack_copy_done:
addi itmp1,r1,36
#if defined(__DARWIN__)
-/* addis mptr,r31,ha16(_asm_call_jit_compiler-0b)*/
- addi mptr,r31,lo16(asm_call_jit_compiler-0b)
+ lis mptr,ha16(asm_call_jit_compiler)
+ addi mptr,mptr,lo16(asm_call_jit_compiler)
#else
- addi mptr,r31,(asm_call_jit_compiler-0b)@l
+ lis mptr,asm_call_jit_compiler@ha
+ addi mptr,mptr,asm_call_jit_compiler@l
#endif
stw mptr,32(r1)
addi mptr,r1,28
addi pv,itmp1,(asm_calljavafunction2-1b)@l
#endif
-calljava_regrestore2:
- lwz r16,40(r1)
- lwz r17,44(r1)
- lwz r18,48(r1)
- lwz r19,52(r1)
- lwz r20,56(r1)
- lwz r21,60(r1)
- lwz r22,64(r1)
- lwz r23,68(r1)
-
- lfd fr16,72(r1)
- lfd fr17,80(r1)
- lfd fr18,88(r1)
- lfd fr19,96(r1)
- lfd fr20,104(r1)
- lfd fr21,112(r1)
- lfd fr22,120(r1)
- lfd fr23,128(r1)
-
- lwz r0,148+LA_LR_OFFSET(r1)
+L_asm_calljavafunction2_return:
+#if defined(__DARWIN__)
+ lwz itmp1,10*4(sp) /* register r11 is callee saved */
+#endif
+ lwz pv,11*4(sp) /* save PV register */
+
+ lwz itmp3,12*4(sp)
+ lfd ftmp1,14*4(sp) /* registers f14-f31 are callee saved */
+ lfd ftmp2,16*4(sp)
+
+#if defined(__DARWIN__)
+ lwz t1,18*4(r1)
+ lwz t2,19*4(r1)
+ lwz t3,20*4(r1)
+ lwz t4,21*4(r1)
+ lwz t5,22*4(r1)
+ lwz t6,23*4(r1)
+ lwz t7,24*4(r1)
+
+ lfd ft0,26*4(r1)
+ lfd ft1,28*4(r1)
+ lfd ft2,30*4(r1)
+ lfd ft3,32*4(r1)
+ lfd ft4,34*4(r1)
+ lfd ft5,36*4(r1)
+#else
+ RESTORE_TEMPORARY_REGISTERS(18) /* the offset has to be even */
+#endif
+
+ lwz r0,40*4+LA_LR_OFFSET(r1)
mtlr r0
- addi r1,r1,148
- lwz pv,-12(r1)
-/* lwz r30,-8(r1)*/
- lwz r31,-4(r1)
+ addi r1,r1,40*4
blr
calljava_xhandler2:
mr r3,itmp1
bl builtin_throw_exception
li v0,0 /* return NULL */
- b calljava_regrestore2
+ b L_asm_calljavafunction2_return
+
+
+jumptable_int:
+ .long L_handle_a0
+ .long L_handle_a1
+ .long L_handle_a2
+ .long L_handle_a3
+ .long L_handle_a4
+ .long L_handle_a5
+ .long L_handle_a6
+ .long L_handle_a7
+
+L_handle_a0:
+ lwz a0,offjniitem+4(itmp1)
+ b L_register_copy
+L_handle_a1:
+ lwz a1,offjniitem+4(itmp1)
+ b L_register_copy
+L_handle_a2:
+ lwz a2,offjniitem+4(itmp1)
+ b L_register_copy
+L_handle_a3:
+ lwz a3,offjniitem+4(itmp1)
+ b L_register_copy
+L_handle_a4:
+ lwz a4,offjniitem+4(itmp1)
+ b L_register_copy
+L_handle_a5:
+ lwz a5,offjniitem+4(itmp1)
+ b L_register_copy
+L_handle_a6:
+ lwz a6,offjniitem+4(itmp1)
+ b L_register_copy
+L_handle_a7:
+ lwz a7,offjniitem+4(itmp1)
+ b L_register_copy
+
+
+jumptable_long:
+#if defined(__DARWIN__)
+#else
+ /* we have two entries here, so we get the even argument register
+ alignment for linux */
+
+ .long L_handle_a0_a1
+ .long 0
+ .long L_handle_a2_a3
+ .long 0
+ .long L_handle_a4_a5
+ .long 0
+ .long L_handle_a6_a7
+ .long 0
+#endif
+
+L_handle_a0_a1:
+ lwz a0,offjniitem+0(itmp1)
+ lwz a1,offjniitem+4(itmp1)
+ b L_register_copy
+L_handle_a2_a3:
+ lwz a2,offjniitem+0(itmp1)
+ lwz a3,offjniitem+4(itmp1)
+ b L_register_copy
+L_handle_a4_a5:
+ lwz a4,offjniitem+0(itmp1)
+ lwz a5,offjniitem+4(itmp1)
+ b L_register_copy
+L_handle_a6_a7:
+ lwz a6,offjniitem+0(itmp1)
+ lwz a7,offjniitem+4(itmp1)
+ b L_register_copy
/* asm_call_jit_compiler *******************************************************
stw mptr,(LA_SIZE + 5*4 + INT_ARG_CNT*4 + FLT_ARG_CNT*8 + 2*4)(r1)
#if defined(__DARWIN__)
- stw a0,28(r1)
- stw a1,32(r1)
- stw a2,36(r1)
- stw a3,40(r1)
- stw a4,44(r1)
- stw a5,48(r1)
- stw a6,52(r1)
- stw a7,56(r1)
-
- stfd fa0,60(r1)
- stfd fa1,68(r1)
- stfd fa2,76(r1)
- stfd fa3,84(r1)
- stfd fa4,92(r1)
- stfd fa5,100(r1)
- stfd fa6,108(r1)
- stfd fa7,116(r1)
- stfd fa8,124(r1)
- stfd fa9,132(r1)
- stfd fa10,140(r1)
- stfd fa11,148(r1)
- stfd fa12,156(r1)
+ stw a0,(LA_WORD_SIZE+5+0)*4(r1)
+ stw a1,(LA_WORD_SIZE+5+1)*4(r1)
+ stw a2,(LA_WORD_SIZE+5+2)*4(r1)
+ stw a3,(LA_WORD_SIZE+5+3)*4(r1)
+ stw a4,(LA_WORD_SIZE+5+4)*4(r1)
+ stw a5,(LA_WORD_SIZE+5+5)*4(r1)
+ stw a6,(LA_WORD_SIZE+5+6)*4(r1)
+ stw a7,(LA_WORD_SIZE+5+7)*4(r1)
+
+ stfd fa0,(LA_WORD_SIZE+5+8)*4(r1)
+ stfd fa1,(LA_WORD_SIZE+5+10)*4(r1)
+ stfd fa2,(LA_WORD_SIZE+5+12)*4(r1)
+ stfd fa3,(LA_WORD_SIZE+5+14)*4(r1)
+ stfd fa4,(LA_WORD_SIZE+5+16)*4(r1)
+ stfd fa5,(LA_WORD_SIZE+5+18)*4(r1)
+ stfd fa6,(LA_WORD_SIZE+5+20)*4(r1)
+ stfd fa7,(LA_WORD_SIZE+5+22)*4(r1)
+ stfd fa8,(LA_WORD_SIZE+5+24)*4(r1)
+ stfd fa9,(LA_WORD_SIZE+5+26)*4(r1)
+ stfd fa10,(LA_WORD_SIZE+5+28)*4(r1)
+ stfd fa11,(LA_WORD_SIZE+5+30)*4(r1)
+ stfd fa12,(LA_WORD_SIZE+5+32)*4(r1)
#else
SAVE_ARGUMENT_REGISTERS(LA_WORD_SIZE+1)
#endif
bl stacktrace_remove_stackframeinfo
#if defined(__DARWIN__)
- lwz a0,28(r1)
- lwz a1,32(r1)
- lwz a2,36(r1)
- lwz a3,40(r1)
- lwz a4,44(r1)
- lwz a5,48(r1)
- lwz a6,52(r1)
- lwz a7,56(r1)
-
- lfd fa0,60(r1)
- lfd fa1,68(r1)
- lfd fa2,76(r1)
- lfd fa3,84(r1)
- lfd fa4,92(r1)
- lfd fa5,100(r1)
- lfd fa6,108(r1)
- lfd fa7,116(r1)
- lfd fa8,124(r1)
- lfd fa9,132(r1)
- lfd fa10,140(r1)
- lfd fa11,148(r1)
- lfd fa12,156(r1)
+ lwz a0,(LA_WORD_SIZE+5+0)*4(r1)
+ lwz a1,(LA_WORD_SIZE+5+1)*4(r1)
+ lwz a2,(LA_WORD_SIZE+5+2)*4(r1)
+ lwz a3,(LA_WORD_SIZE+5+3)*4(r1)
+ lwz a4,(LA_WORD_SIZE+5+4)*4(r1)
+ lwz a5,(LA_WORD_SIZE+5+5)*4(r1)
+ lwz a6,(LA_WORD_SIZE+5+6)*4(r1)
+ lwz a7,(LA_WORD_SIZE+5+7)*4(r1)
+
+ lfd fa0,(LA_WORD_SIZE+5+8)*4(r1)
+ lfd fa1,(LA_WORD_SIZE+5+10)*4(r1)
+ lfd fa2,(LA_WORD_SIZE+5+12)*4(r1)
+ lfd fa3,(LA_WORD_SIZE+5+14)*4(r1)
+ lfd fa4,(LA_WORD_SIZE+5+16)*4(r1)
+ lfd fa5,(LA_WORD_SIZE+5+18)*4(r1)
+ lfd fa6,(LA_WORD_SIZE+5+20)*4(r1)
+ lfd fa7,(LA_WORD_SIZE+5+22)*4(r1)
+ lfd fa8,(LA_WORD_SIZE+5+24)*4(r1)
+ lfd fa9,(LA_WORD_SIZE+5+26)*4(r1)
+ lfd fa10,(LA_WORD_SIZE+5+28)*4(r1)
+ lfd fa11,(LA_WORD_SIZE+5+30)*4(r1)
+ lfd fa12,(LA_WORD_SIZE+5+32)*4(r1)
#else
RESTORE_ARGUMENT_REGISTERS(LA_WORD_SIZE+1)
#endif
L_asm_call_jit_compiler_exception:
#if defined(USE_THREADS) && defined(NATIVE_THREADS)
mflr r0
- stw r0,LA_LR_OFFSET(r1)
- stwu r1,-LA_SIZE_ALIGNED(r1) /* preserve linkage area */
+ stw r0,LA_LR_OFFSET(sp)
+ stwu sp,-LA_SIZE_ALIGNED(sp) /* preserve linkage area */
bl builtin_asm_get_exceptionptrptr
- lwz r0,LA_SIZE_ALIGNED+LA_LR_OFFSET(r1)
+ lwz r0,LA_SIZE_ALIGNED+LA_LR_OFFSET(sp)
mtlr r0
- addi r1,r1,LA_SIZE_ALIGNED
+ addi sp,sp,LA_SIZE_ALIGNED
#else
# if defined(__DARWIN__)
- lwz v0,lo16(_exceptionptr-0b)(pv)
+ lwz v0,lo16(_no_threads_exceptionptr-0b)(pv)
# else
- lis v0,_exceptionptr@ha
- addi v0,v0,_exceptionptr@l
+ lis v0,_no_threads_exceptionptr@ha
+ addi v0,v0,_no_threads_exceptionptr@l
# endif
#endif
lwz xptr,0(v0) /* get the exception pointer */
add pv,pv,itmp3
asm_handle_exception:
- addi r1,r1,-18*4
- stw r0,0*4(r1)
- stw r2,1*4(r1)
- stw r3,2*4(r1)
- stw r4,3*4(r1)
- stw r5,4*4(r1)
- stw r6,5*4(r1)
- stw r7,6*4(r1)
- stw r8,7*4(r1)
- stw r9,8*4(r1)
- stw r10,9*4(r1)
- stw r16,10*4(r1)
- stw r17,11*4(r1)
- stw r18,12*4(r1)
- stw r19,13*4(r1)
- stw r20,14*4(r1)
- stw r21,15*4(r1)
- stw r22,16*4(r1)
- stw r23,17*4(r1)
-
- li r9,1
-ex_stack_loop:
- addi r1,r1,-4*4 /* allocate stack */
- stw xptr,0*4(r1) /* save used register */
- stw xpc,1*4(r1)
- mflr xptr
- stw xptr,2*4(r1)
- stw r9,3*4(r1)
-
- lwz r3,0*4(r1) /* exception pointer */
- lwz r4,MethodPointer(pv) /* method pointer */
- mr r5,xpc /* exception pc */
-/* mr r6,r9 */
- li r6,0 /* line number */
- li r7,4 /* set no unwind flag */
-
- /* XXX no valid stack frame chaining here */
- addi r1,r1,-(24+5*4) /* 24 linkage area + 5 argument * 4 */
- bl builtin_trace_exception
- addi r1,r1,(24+5*4)
-
- lwz xptr,2*4(r1)
- mtlr xptr
- lwz xptr,0*4(r1) /* restore xptr */
- lwz xpc,1*4(r1)
- lwz r9,3*4(r1)
- addi r1,r1,4*4
-
- lwz r3,ExTableSize(pv) /* r3 = exception table size */
- mr. r3,r3 /* if empty table skip */
- beq empty_table
-
- addi r4,pv,ExTableStart /* r4 = start of exception table */
-
-ex_table_loop:
- lwz r5,ExStartPC(r4) /* r5 = exception start pc */
- cmplw r5,xpc /* (startpc <= xpc) */
- bgt ex_table_cont
- lwz r5,ExEndPC(r4) /* r5 = exception end pc */
- cmplw xpc,r5 /* (xpc < endpc) */
- bge ex_table_cont
- lwz r7,ExCatchType(r4) /* r7 = exception catch type */
- mr. r7,r7
- beq ex_handle_it
-
- lwz itmp3,offclassloaded(r7)
- mr. itmp3,itmp3
- bne L_class_loaded
-
- /* XXX no valid stack frame chaining here */
- addi r1,r1,-16*4 /* allocate stack */
- stw r3,7*4(r1) /* save used registers */
- stw r4,8*4(r1) /* 6*4 (linkage) + 1*4 (arg1) + 7*4 (save) */
- stw r9,9*4(r1)
- stw xptr,10*4(r1)
- stw xpc,11*4(r1)
- mflr xptr
- stw xptr,12*4(r1)
- stw r7,13*4(r1)
-
- mr r3,r7 /* arg1 = exceptionclass */
- bl load_class_bootstrap
-
- lwz r3,7*4(r1)
- lwz r4,8*4(r1)
- lwz r9,9*4(r1)
- lwz xptr,10*4(r1)
- lwz xpc,11*4(r1)
- lwz itmp3,12*4(r1)
- mtlr itmp3
- lwz r7,13*4(r1)
- addi r1,r1,16*4
-
-L_class_loaded:
- lwz itmp3,offclasslinked(r7)
- mr. itmp3,itmp3
- /* XXX no valid stack frame chaining here */
- addi r1,r1,-16*4 /* allocate stack */
- stw r7,13*4(r1)
- bne L_class_linked
-
- stw r3,7*4(r1) /* save used registers */
- stw r4,8*4(r1) /* 6*4 (linkage) + 1*4 (arg1) + 7*4 (save) */
- stw r9,9*4(r1)
- stw xptr,10*4(r1)
- stw xpc,11*4(r1)
- mflr xptr
- stw xptr,12*4(r1)
-
- mr r3,r7 /* arg1 = exceptionclass */
- bl link_class
-
- lwz r3,7*4(r1)
- lwz r4,8*4(r1)
- lwz r9,9*4(r1)
- lwz xptr,10*4(r1)
- lwz xpc,11*4(r1)
- lwz itmp3,12*4(r1)
- mtlr itmp3
-
-L_class_linked:
-_crit_restart1:
- lwz r7,13*4(r1)
-_crit_begin1:
- lwz r6,offobjvftbl(xptr) /* r6 = vftblptr(xptr) */
- lwz r7,offclassvftbl(r7) /* r7 = vftblptr(catchtype) class (not obj) */
- lwz r6,offbaseval(r6) /* r6 = baseval(xptr) */
- lwz r8,offbaseval(r7) /* r8 = baseval(catchtype) */
- lwz r7,offdiffval(r7) /* r7 = diffval(catchtype) */
-_crit_end1:
- subf r6,r8,r6 /* r6 = baseval(xptr) - baseval(catchtype) */
- cmplw r6,r7 /* xptr is instanceof catchtype */
- addi r1,r1,16*4
- bgt ex_table_cont /* if (false) continue */
-
-ex_handle_it:
- lwz xpc,ExHandlerPC(r4) /* xpc = exception handler pc */
- mr. r9,r9
- beq ex_jump
-
- lwz r0,0*4(r1)
- lwz r2,1*4(r1)
- lwz r3,2*4(r1)
- lwz r4,3*4(r1)
- lwz r5,4*4(r1)
- lwz r6,5*4(r1)
- lwz r7,6*4(r1)
- lwz r8,7*4(r1)
- lwz r9,8*4(r1)
- lwz r10,9*4(r1)
- lwz r16,10*4(r1)
- lwz r17,11*4(r1)
- lwz r18,12*4(r1)
- lwz r19,13*4(r1)
- lwz r20,14*4(r1)
- lwz r21,15*4(r1)
- lwz r22,16*4(r1)
- lwz r23,17*4(r1)
- addi r1,r1,18*4
-
-ex_jump:
- mtctr xpc
- bctr
+ addi sp,sp,-(ARG_CNT+TMP_CNT)*8 /* create maybe-leaf stackframe */
-ex_table_cont:
- addi r4,r4,ExEntrySize /* next exception table entry */
- addic. r3,r3,-1 /* decrement entry counter */
- bgt ex_table_loop /* if (t0 > 0) next entry */
-
-empty_table:
- mr. r9,r9 /* if here the first time, then */
- beq ex_already_cleared
- addi r1,r1,18*4 /* deallocate stack and */
- li r9,0 /* clear the no unwind flag */
-ex_already_cleared:
- lwz r3,IsSync(pv) /* t0 = SyncOffset */
- mr. r3,r3
- beq no_monitor_exit /* if zero no monitorexit */
-
-#if defined(USE_THREADS)
- add r3,r1,r3
- lwz r6,-4(r3)
-
- addi r1,r1,-6*4
- stw r3,0*4(r1)
- stw r4,1*4(r1)
- stw r9,2*4(r1)
- stw xptr,3*4(r1)
- stw xpc,4*4(r1)
- mflr xptr
- stw xptr,5*4(r1)
-
- mr r3,r6
- /* XXX no valid stack frame chaining here */
- addi r1,r1,-40
- bl builtin_monitorexit
- addi r1,r1,40
-
- lwz xptr,5*4(r1)
- mtlr xptr
- lwz r3,0*4(r1)
- lwz r4,1*4(r1)
- lwz r9,2*4(r1)
- lwz xptr,3*4(r1)
- lwz xpc,4*4(r1)
- addi r1,r1,6*4
+#if defined(__DARWIN__)
+#else
+ SAVE_ARGUMENT_REGISTERS(0) /* we save arg and temp registers in */
+ SAVE_TEMPORARY_REGISTERS(ARG_CNT) /* case this is a leaf method */
#endif
-no_monitor_exit:
- lwz r3,FrameSize(pv) /* r3 = frame size */
- add r1,r1,r3 /* unwind stack */
- mr r3,r1 /* r3 = pointer to save area */
- lwz r4,IsLeaf(pv) /* r4 = is leaf procedure */
- mr. r4,r4
- bne ex_no_restore /* if (leaf) skip */
- lwz r4,LA_LR_OFFSET(r3) /* restore ra */
- mtlr r4 /* t0-- */
-ex_no_restore:
- mflr r4 /* the new xpc is ra */
- mr xpc,r4
- lwz r4,IntSave(pv) /* r4 = saved int register count */
+ li a3,(ARG_CNT+TMP_CNT)*8 /* prepare a3 for handle_exception */
+ li a4,1 /* set maybe-leaf flag */
+
+L_asm_handle_exception_stack_loop:
+ addi sp,sp,-(LA_WORD_SIZE+4+5)*4 /* allocate stack */
+ stw xptr,LA_SIZE+4*4(sp) /* save exception pointer */
+ stw xpc,LA_SIZE+5*4(sp) /* save exception pc */
+ stw pv,LA_SIZE+6*4(sp) /* save data segment pointer */
+ mflr r0 /* save return address */
+ stw r0,LA_SIZE+5*4(sp)
+ add a3,a3,sp /* calculate Java sp into a3... */
+ addi a3,a3,(LA_WORD_SIZE+4+5)*4
+ stw a4,LA_SIZE+8*4(sp) /* save maybe-leaf flag */
+
+ mr a0,xptr /* pass exception pointer */
+ mr a1,xpc /* pass exception pc */
+ mr a2,pv /* pass data segment pointer */
+ /* a3 is still set */
+ bl exceptions_handle_exception
+
+ mr. v0,v0
+ beq L_asm_handle_exception_not_catched
+
+ mr xpc,v0 /* move handlerpc into xpc */
+ lwz xptr,LA_SIZE+4*4(sp) /* restore exception pointer */
+ lwz pv,LA_SIZE+6*4(sp) /* restore data segment pointer */
+ lwz r0,LA_SIZE+5*4(sp) /* restore return address */
+ mtlr r0
+ lwz a4,LA_SIZE+8*4(sp) /* get maybe-leaf flag */
+ addi sp,sp,(LA_WORD_SIZE+4+5)*4 /* free stack frame */
+
+ mr. a4,a4
+ beq L_asm_handle_exception_no_leaf
+
+#if defined(__DARWIN__)
+#else
+ RESTORE_ARGUMENT_REGISTERS(0) /* if this is a leaf method, we have */
+ RESTORE_TEMPORARY_REGISTERS(ARG_CNT)/* to restore arg and temp registers */
+#endif
+
+ addi sp,sp,(ARG_CNT+TMP_CNT)*8 /* remove maybe-leaf stackframe */
+
+L_asm_handle_exception_no_leaf:
+ mtctr xpc /* jump to the handler */
+ bctr
+
+L_asm_handle_exception_not_catched:
+ lwz xptr,LA_SIZE+4*4(sp) /* restore exception pointer */
+ lwz pv,LA_SIZE+6*4(sp) /* restore data segment pointer */
+ lwz r0,LA_SIZE+5*4(sp) /* restore return address */
+ mtlr r0
+ lwz a4,LA_SIZE+8*4(sp) /* get maybe-leaf flag */
+ addi sp,sp,(LA_WORD_SIZE+4+5)*4 /* free stack frame */
+
+ mr. a4,a4
+ beq L_asm_handle_exception_no_leaf_stack
+
+ addi sp,sp,(ARG_CNT+TMP_CNT)*8 /* remove maybe-leaf stackframe */
+ li a4,0 /* clear the maybe-leaf flag */
+
+L_asm_handle_exception_no_leaf_stack:
+ lwz t0,FrameSize(pv) /* get frame size */
+ add t0,sp,t0 /* pointer to save area */
+
+ lwz t1,IsLeaf(pv) /* is leaf procedure */
+ mr. t1,t1
+ bne L_asm_handle_exception_no_ra_restore
+
+ lwz r0,LA_LR_OFFSET(t0) /* restore ra */
+ mtlr r0
+
+L_asm_handle_exception_no_ra_restore:
+ mflr xpc /* the new xpc is ra */
+ lwz t1,IntSave(pv) /* t1 = saved int register count */
bl ex_int1
ex_int1:
- mflr r5
+ mflr t2 /* t2 = current pc */
#if defined(__DARWIN__)
- addi r5,r5,lo16(ex_int2-ex_int1)
+ addi t2,t2,lo16(ex_int2-ex_int1)
#else
- addi r5,r5,(ex_int2-ex_int1)@l
+ addi t2,t2,(ex_int2-ex_int1)@l
#endif
- slwi r4,r4,2
- subf r5,r4,r5
- mtctr r5
+ slwi t1,t1,2 /* t1 = register count * 4 */
+ subf t2,t1,t2 /* t2 = IntSave - t1 */
+ mtctr t2
bctr
- lwz s0,-40(r3)
- lwz s1,-36(r3)
- lwz s2,-32(r3)
- lwz s3,-28(r3)
- lwz s4,-24(r3)
- lwz s5,-20(r3)
- lwz s6,-16(r3)
- lwz s7,-12(r3)
- lwz s8,-8(r3)
- lwz s9,-4(r3)
+
+ lwz s0,-10*4(t0)
+ lwz s1,-9*4(t0)
+ lwz s2,-8*4(t0)
+ lwz s3,-7*4(t0)
+ lwz s4,-6*4(t0)
+ lwz s5,-5*4(t0)
+ lwz s6,-4*4(t0)
+ lwz s7,-3*4(t0)
+ lwz s8,-2*4(t0)
+ lwz s9,-1*4(t0)
ex_int2:
- subf r3,r4,r3
+ subf t0,t1,t0 /* t0 = t0 - register count * 4 */
- lwz r4,FltSave(pv)
+ lwz t1,FltSave(pv)
bl ex_flt1
ex_flt1:
- mflr r5
+ mflr t2
#if defined(__DARWIN__)
- addi r5,r5,lo16(ex_flt2-ex_flt1)
+ addi t2,t2,lo16(ex_flt2-ex_flt1)
#else
- addi r5,r5,(ex_flt2-ex_flt1)@l
+ addi t2,t2,(ex_flt2-ex_flt1)@l
#endif
- slwi r4,r4,2
- subf r5,r4,r5
- mtctr r5
+ slwi t1,t1,2 /* t1 = register count * 4 */
+ subf t2,t1,t2 /* t2 = FltSave - t1 */
+ mtctr t2
bctr
- lfd fs0,-80(r3)
- lfd fs1,-72(r3)
- lfd fs2,-64(r3)
- lfd fs3,-56(r3)
- lfd fs4,-48(r3)
- lfd fs5,-40(r3)
- lfd fs6,-32(r3)
- lfd fs7,-24(r3)
- lfd fs8,-16(r3)
- lfd fs9,-8(r3)
+
+ lfd fs0,-10*8(t0)
+ lfd fs1,-9*8(t0)
+ lfd fs2,-8*8(t0)
+ lfd fs3,-7*8(t0)
+ lfd fs4,-6*8(t0)
+ lfd fs5,-5*8(t0)
+ lfd fs6,-4*8(t0)
+ lfd fs7,-3*8(t0)
+ lfd fs8,-2*8(t0)
+ lfd fs9,-1*8(t0)
ex_flt2:
+ lwz t0,FrameSize(pv) /* get frame size */
+ add sp,sp,t0 /* unwind stack */
+ li a3,0 /* prepare a3 for handle_exception */
+
mtlr xpc
lwz itmp3,4(xpc)
extsh itmp3,itmp3
lwz itmp3,8(xpc)
srwi itmp3,itmp3,16
cmpwi itmp3,0x3dad
- bne ex_stack_loop
+ bne L_asm_handle_exception_stack_loop
lwz itmp3,8(xpc)
slwi itmp3,itmp3,16
add pv,pv,itmp3
- b ex_stack_loop
+
+ b L_asm_handle_exception_stack_loop
/* asm_wrapper_patcher *********************************************************
*******************************************************************************/
asm_wrapper_patcher:
- mflr r0
- stw r0,8*4+LA_LR_OFFSET(r1) /* skip stack frame of patcher stub */
+ mflr r0 /* get Java return address (leaf) */
+ stw r0,6*4(sp) /* store it in the stub stackframe */
/* keep stack 16-bytes aligned: 6+1+37 = 44 */
- stwu r1,-(LA_SIZE+(5+38)*4+sizestackframeinfo)(r1)
+ stwu sp,-(LA_SIZE+(5+58)*4+sizestackframeinfo)(sp)
-#if 1
+#if defined(__DARWIN__)
stw a0,LA_SIZE+(5+0)*4(r1) /* save argument registers */
stw a1,LA_SIZE+(5+1)*4(r1) /* preserve linkage area (24 bytes) */
stw a2,LA_SIZE+(5+2)*4(r1) /* and 4 bytes for 4 argument */
stfd fa10,LA_SIZE+(5+28)*4(sp)
stfd fa11,LA_SIZE+(5+30)*4(sp)
stfd fa12,LA_SIZE+(5+32)*4(sp)
-#else
- SAVE_ARGUMENT_REGISTERS(LA_WORD_SIZE+1) /* save 8 int/13 float arguments */
-#endif
-#if 0
- stw r2,25*8(r1)
- stw r16,26*8(r1)
- stw r17,27*8(r1)
- stw r18,28*8(r1)
- stw r19,29*8(r1)
- stw r20,30*8(r1)
- stw r21,31*8(r1)
- stw r22,32*8(r1)
- stw r23,33*8(r1)
+ stw t0,(LA_WORD_SIZE+5+33)*4(r1)
+ stw t1,(LA_WORD_SIZE+5+34)*4(r1)
+ stw t2,(LA_WORD_SIZE+5+35)*4(r1)
+ stw t3,(LA_WORD_SIZE+5+36)*4(r1)
+ stw t4,(LA_WORD_SIZE+5+37)*4(r1)
+ stw t5,(LA_WORD_SIZE+5+38)*4(r1)
+ stw t6,(LA_WORD_SIZE+5+39)*4(r1)
+ stw t7,(LA_WORD_SIZE+5+40)*4(r1)
+
+ stfd ft0,(LA_WORD_SIZE+5+42)*4(r1)
+ stfd ft1,(LA_WORD_SIZE+5+44)*4(r1)
+ stfd ft2,(LA_WORD_SIZE+5+46)*4(r1)
+ stfd ft3,(LA_WORD_SIZE+5+48)*4(r1)
+ stfd ft4,(LA_WORD_SIZE+5+50)*4(r1)
+ stfd ft5,(LA_WORD_SIZE+5+52)*4(r1)
+#else
+ SAVE_ARGUMENT_REGISTERS(LA_WORD_SIZE+1) /* save 8 int/8 float arguments */
+ SAVE_TEMPORARY_REGISTERS(LA_WORD_SIZE+1+24)
#endif
- stw itmp1,LA_SIZE+(5+34)*4(sp)
- stw itmp2,LA_SIZE+(5+35)*4(sp)
- stw pv,LA_SIZE+(5+36)*4(sp)
+ stw itmp1,LA_SIZE+(5+54)*4(sp)
+ stw itmp2,LA_SIZE+(5+55)*4(sp)
+ stw pv,LA_SIZE+(5+56)*4(sp)
- addi a0,sp,LA_SIZE+(5+38)*4 /* create stackframe info */
+ addi a0,sp,LA_SIZE+(5+58)*4 /* create stackframe info */
mr a1,pv
- addi a2,sp,(8+LA_WORD_SIZE+5+38)*4+sizestackframeinfo
+ addi a2,sp,(8+LA_WORD_SIZE+5+58)*4+sizestackframeinfo
mr a3,r0 /* this is correct for leafs */
- mr a4,a3 /* pass xpc */
+ lwz a4,((5+LA_WORD_SIZE+5+58)*4+sizestackframeinfo)(sp) /* pass xpc */
bl stacktrace_create_extern_stackframeinfo
- addi a0,sp,(0+LA_WORD_SIZE+5+38)*4+sizestackframeinfo /* pass sp */
- lwz pv,(0+LA_WORD_SIZE+5+38)*4+sizestackframeinfo(sp) /* get function */
- lwz itmp1,LA_SIZE+(5+36)*4(sp) /* move pv to position of fp */
- stw itmp1,(0+LA_WORD_SIZE+5+38)*4+sizestackframeinfo(sp)
+ addi a0,sp,(0+LA_WORD_SIZE+5+58)*4+sizestackframeinfo /* pass sp */
+ lwz pv,(0+LA_WORD_SIZE+5+58)*4+sizestackframeinfo(sp) /* get function */
+ lwz itmp1,LA_SIZE+(5+56)*4(sp) /* move pv to position of fp */
+ stw itmp1,(0+LA_WORD_SIZE+5+58)*4+sizestackframeinfo(sp)
mtctr pv /* call the patcher function */
bctrl
- stw r3,LA_SIZE+(5+37)*4(sp) /* save return value */
+ stw v0,LA_SIZE+(5+57)*4(sp) /* save return value */
- addi a0,sp,LA_SIZE+(5+38)*4
+ addi a0,sp,LA_SIZE+(5+58)*4
bl stacktrace_remove_stackframeinfo /* remove stackframe info */
- lwz itmp3,LA_SIZE+(5+37)*4(sp) /* restore return value into temp reg.*/
-
-#if 1
+#if defined(__DARWIN__)
lwz a0,LA_SIZE+(5+0)*4(r1)
lwz a1,LA_SIZE+(5+1)*4(r1)
lwz a2,LA_SIZE+(5+2)*4(r1)
lfd fa10,LA_SIZE+(5+28)*4(sp)
lfd fa11,LA_SIZE+(5+30)*4(sp)
lfd fa12,LA_SIZE+(5+32)*4(sp)
-#else
- RESTORE_ARGUMENT_REGISTERS(LA_WORD_SIZE+4)/* restore 8 int/13 float args */
-#endif
-#if 0
- lwz r2,25*8(r1)
- lwz r16,26*8(r1)
- lwz r17,27*8(r1)
- lwz r18,28*8(r1)
- lwz r19,29*8(r1)
- lwz r20,30*8(r1)
- lwz r21,31*8(r1)
- lwz r22,32*8(r1)
- lwz r23,33*8(r1)
+ lwz t0,(LA_WORD_SIZE+5+33)*4(r1)
+ lwz t1,(LA_WORD_SIZE+5+34)*4(r1)
+ lwz t2,(LA_WORD_SIZE+5+35)*4(r1)
+ lwz t3,(LA_WORD_SIZE+5+36)*4(r1)
+ lwz t4,(LA_WORD_SIZE+5+37)*4(r1)
+ lwz t5,(LA_WORD_SIZE+5+38)*4(r1)
+ lwz t6,(LA_WORD_SIZE+5+39)*4(r1)
+ lwz t7,(LA_WORD_SIZE+5+40)*4(r1)
+
+ lfd ft0,(LA_WORD_SIZE+5+42)*4(r1)
+ lfd ft1,(LA_WORD_SIZE+5+44)*4(r1)
+ lfd ft2,(LA_WORD_SIZE+5+46)*4(r1)
+ lfd ft3,(LA_WORD_SIZE+5+48)*4(r1)
+ lfd ft4,(LA_WORD_SIZE+5+50)*4(r1)
+ lfd ft5,(LA_WORD_SIZE+5+52)*4(r1)
+#else
+ RESTORE_ARGUMENT_REGISTERS(LA_WORD_SIZE+1) /* restore 8 int/8 float args */
+ RESTORE_TEMPORARY_REGISTERS(LA_WORD_SIZE+1+24)
#endif
- /* get return address (into JIT code) */
- lwz itmp1,(5+LA_WORD_SIZE+5+38)*4+sizestackframeinfo(sp)
- mtlr itmp1
+ lwz itmp1,LA_SIZE+(5+54)*4(sp)
+ lwz itmp2,LA_SIZE+(5+55)*4(sp)
+ lwz pv,LA_SIZE+(5+56)*4(sp)
+ lwz itmp3,LA_SIZE+(5+57)*4(sp) /* restore return value into temp reg.*/
- lwz itmp1,LA_SIZE+(5+34)*4(sp)
- lwz itmp2,LA_SIZE+(5+35)*4(sp)
- lwz pv,LA_SIZE+(5+36)*4(sp)
-
- /* remove stack frame + patcher stub stack */
- addi r1,r1,(8+LA_WORD_SIZE+5+38)*4+sizestackframeinfo
+ lwz r0,(6+LA_WORD_SIZE+5+58)*4+sizestackframeinfo(sp) /* restore RA */
+ mtlr r0
mr. itmp3,itmp3 /* check for an exception */
beq L_asm_wrapper_patcher_exception
- blr /* jump to new patched code */
+ /* get return address (into JIT code) */
+ lwz itmp3,(5+LA_WORD_SIZE+5+58)*4+sizestackframeinfo(sp)
+
+ /* remove stack frame + patcher stub stack */
+ addi sp,sp,(8+LA_WORD_SIZE+5+58)*4+sizestackframeinfo
+
+ mtctr itmp3
+ bctr /* jump to new patched code */
L_asm_wrapper_patcher_exception:
+ lwz xpc,(5+LA_WORD_SIZE+5+58)*4+sizestackframeinfo(sp)
+ addi sp,sp,(8+LA_WORD_SIZE+5+58)*4+sizestackframeinfo
+
#if defined(USE_THREADS) && defined(NATIVE_THREADS)
mflr r0
- stw r0,LA_LR_OFFSET(r1)
- stwu r1,-LA_SIZE_ALIGNED(r1) /* preserve linkage area */
+ stw r0,LA_LR_OFFSET(sp)
+ stwu sp,-(LA_SIZE+1*4)(sp) /* preserve linkage area */
+ stw xpc,LA_SIZE+0*4(sp)
bl builtin_asm_get_exceptionptrptr
- lwz r0,LA_SIZE_ALIGNED+LA_LR_OFFSET(r1)
- mtlr r0
- addi r1,r1,LA_SIZE_ALIGNED
+ lwz xpc,LA_SIZE+0*4(sp)
+ lwz r0,LA_SIZE+1*4+LA_LR_OFFSET(sp)
+ mtlr r0
+ addi sp,sp,LA_SIZE+1*4
#else
# if defined(__DARWIN__)
- lwz v0,lo16(_exceptionptr-0b)(pv)
+ lwz v0,lo16(_no_threads_exceptionptr-0b)(pv)
# else
- lis v0,_exceptionptr@ha
- addi v0,v0,_exceptionptr@l
+ lis v0,_no_threads_exceptionptr@ha
+ addi v0,v0,_no_threads_exceptionptr@l
# endif
#endif
lwz xptr,0(v0) /* get the exception pointer */
li itmp3,0
stw itmp3,0(v0) /* clear the exception pointer */
-
- mflr xpc
b asm_handle_exception
asm_getclassvalues_atomic:
-_crit_restart2:
-_crit_begin2:
+_crit_restart:
+_crit_begin:
lwz r6,offbaseval(r3)
lwz r7,offdiffval(r3)
lwz r8,offbaseval(r4)
-_crit_end2:
+_crit_end:
stw r6,offcast_super_baseval(r5)
stw r7,offcast_super_diffval(r5)
stw r8,offcast_sub_baseval(r5)
asm_criticalsections:
#if defined(USE_THREADS) && defined(NATIVE_THREADS)
- .long _crit_begin1
- .long _crit_end1
- .long _crit_restart1
- .long _crit_begin2
- .long _crit_end2
- .long _crit_restart2
+ .long _crit_begin
+ .long _crit_end
+ .long _crit_restart
#endif
.long 0