Christian Thalinger
Edwin Steiner
- $Id: asmpart.S 5272 2006-08-23 15:55:38Z twisti $
+ $Id: asmpart.S 6147 2006-12-07 23:54:40Z edwin $
*/
NOTE: itmp3 is not restored!
C prototype:
- void asm_replacement_in(executionstate *es);
+ void asm_replacement_in(executionstate *es, replace_safestack_t *st);
*******************************************************************************/
asm_replacement_in:
/* a0 == executionstate *es */
- /* set new sp and pv */
- ldq sp,(offes_sp)(a0)
- ldq pv,(offes_pv)(a0)
+ /* get arguments */
+ mov a1,s1 /* replace_safestack_t *st */
+ mov a0,s2 /* executionstate *es == safe stack */
+
+ /* switch to the safe stack */
+ mov s2,sp
+
+ /* call replace_build_execution_state(st) */
+ mov s1,a0
+ jsr ra,replace_build_execution_state
+
+ /* set new sp */
+ ldq sp,(offes_sp)(s2)
+
+ /* build stack frame */
+ lda sp,(-sizeexecutionstate)(sp)
+
+ /* call replace_free_safestack(st,& of allocated executionstate_t) */
+ mov sp,a1 /* tmpes */
+ mov s1,a0 /* st */
+ jsr ra,replace_free_safestack
+
+ /* set new pv */
+ ldq pv,(offes_pv)(sp)
/* copy registers from execution state */
- ldq $0 ,( 0*8+offes_intregs)(a0)
- ldq $1 ,( 1*8+offes_intregs)(a0)
- ldq $2 ,( 2*8+offes_intregs)(a0)
- ldq $3 ,( 3*8+offes_intregs)(a0)
- ldq $4 ,( 4*8+offes_intregs)(a0)
- ldq $5 ,( 5*8+offes_intregs)(a0)
- ldq $6 ,( 6*8+offes_intregs)(a0)
- ldq $7 ,( 7*8+offes_intregs)(a0)
- ldq $8 ,( 8*8+offes_intregs)(a0)
- ldq $9 ,( 9*8+offes_intregs)(a0)
- ldq $10,(10*8+offes_intregs)(a0)
- ldq $11,(11*8+offes_intregs)(a0)
- ldq $12,(12*8+offes_intregs)(a0)
- ldq $13,(13*8+offes_intregs)(a0)
- ldq $14,(14*8+offes_intregs)(a0)
- ldq $15,(15*8+offes_intregs)(a0)
- /* a0 is loaded below */
- ldq $17,(17*8+offes_intregs)(a0)
- ldq $18,(18*8+offes_intregs)(a0)
- ldq $19,(19*8+offes_intregs)(a0)
- ldq $20,(20*8+offes_intregs)(a0)
- ldq $21,(21*8+offes_intregs)(a0)
- ldq $22,(22*8+offes_intregs)(a0)
- ldq $23,(23*8+offes_intregs)(a0)
- ldq $24,(24*8+offes_intregs)(a0)
- ldq $25,(25*8+offes_intregs)(a0)
- ldq $26,(26*8+offes_intregs)(a0)
- /* $27 is pv */
- ldq $28,(28*8+offes_intregs)(a0)
- ldq $29,(29*8+offes_intregs)(a0)
+ ldq $0 ,( 0*8+offes_intregs)(sp)
+ ldq $1 ,( 1*8+offes_intregs)(sp)
+ ldq $2 ,( 2*8+offes_intregs)(sp)
+ ldq $3 ,( 3*8+offes_intregs)(sp)
+ ldq $4 ,( 4*8+offes_intregs)(sp)
+ ldq $5 ,( 5*8+offes_intregs)(sp)
+ ldq $6 ,( 6*8+offes_intregs)(sp)
+ ldq $7 ,( 7*8+offes_intregs)(sp)
+ ldq $8 ,( 8*8+offes_intregs)(sp)
+ ldq $9 ,( 9*8+offes_intregs)(sp)
+ ldq $10,(10*8+offes_intregs)(sp)
+ ldq $11,(11*8+offes_intregs)(sp)
+ ldq $12,(12*8+offes_intregs)(sp)
+ ldq $13,(13*8+offes_intregs)(sp)
+ ldq $14,(14*8+offes_intregs)(sp)
+ ldq $15,(15*8+offes_intregs)(sp)
+ ldq a0, (16*8+offes_intregs)(sp)
+ ldq $17,(17*8+offes_intregs)(sp)
+ ldq $18,(18*8+offes_intregs)(sp)
+ ldq $19,(19*8+offes_intregs)(sp)
+ ldq $20,(20*8+offes_intregs)(sp)
+ ldq $21,(21*8+offes_intregs)(sp)
+ ldq $22,(22*8+offes_intregs)(sp)
+ ldq $23,(23*8+offes_intregs)(sp)
+ ldq $24,(24*8+offes_intregs)(sp)
+ ldq $25,(25*8+offes_intregs)(sp)
+ ldq $26,(26*8+offes_intregs)(sp)
+ /* $27 is pv */
+ ldq $28,(28*8+offes_intregs)(sp)
+ ldq $29,(29*8+offes_intregs)(sp)
/* $30 is sp */
/* $31 is zero */
- ldt $f0 ,( 0*8+offes_fltregs)(a0)
- ldt $f1 ,( 1*8+offes_fltregs)(a0)
- ldt $f2 ,( 2*8+offes_fltregs)(a0)
- ldt $f3 ,( 3*8+offes_fltregs)(a0)
- ldt $f4 ,( 4*8+offes_fltregs)(a0)
- ldt $f5 ,( 5*8+offes_fltregs)(a0)
- ldt $f6 ,( 6*8+offes_fltregs)(a0)
- ldt $f7 ,( 7*8+offes_fltregs)(a0)
- ldt $f8 ,( 8*8+offes_fltregs)(a0)
- ldt $f9 ,( 9*8+offes_fltregs)(a0)
- ldt $f10,(10*8+offes_fltregs)(a0)
- ldt $f11,(11*8+offes_fltregs)(a0)
- ldt $f12,(12*8+offes_fltregs)(a0)
- ldt $f13,(13*8+offes_fltregs)(a0)
- ldt $f14,(14*8+offes_fltregs)(a0)
- ldt $f15,(15*8+offes_fltregs)(a0)
- ldt $f16,(16*8+offes_fltregs)(a0)
- ldt $f17,(17*8+offes_fltregs)(a0)
- ldt $f18,(18*8+offes_fltregs)(a0)
- ldt $f19,(19*8+offes_fltregs)(a0)
- ldt $f20,(20*8+offes_fltregs)(a0)
- ldt $f21,(21*8+offes_fltregs)(a0)
- ldt $f22,(22*8+offes_fltregs)(a0)
- ldt $f23,(23*8+offes_fltregs)(a0)
- ldt $f24,(24*8+offes_fltregs)(a0)
- ldt $f25,(25*8+offes_fltregs)(a0)
- ldt $f26,(26*8+offes_fltregs)(a0)
- ldt $f27,(27*8+offes_fltregs)(a0)
- ldt $f28,(28*8+offes_fltregs)(a0)
- ldt $f29,(29*8+offes_fltregs)(a0)
- ldt $f30,(30*8+offes_fltregs)(a0)
- ldt $f31,(31*8+offes_fltregs)(a0)
+ ldt $f0 ,( 0*8+offes_fltregs)(sp)
+ ldt $f1 ,( 1*8+offes_fltregs)(sp)
+ ldt $f2 ,( 2*8+offes_fltregs)(sp)
+ ldt $f3 ,( 3*8+offes_fltregs)(sp)
+ ldt $f4 ,( 4*8+offes_fltregs)(sp)
+ ldt $f5 ,( 5*8+offes_fltregs)(sp)
+ ldt $f6 ,( 6*8+offes_fltregs)(sp)
+ ldt $f7 ,( 7*8+offes_fltregs)(sp)
+ ldt $f8 ,( 8*8+offes_fltregs)(sp)
+ ldt $f9 ,( 9*8+offes_fltregs)(sp)
+ ldt $f10,(10*8+offes_fltregs)(sp)
+ ldt $f11,(11*8+offes_fltregs)(sp)
+ ldt $f12,(12*8+offes_fltregs)(sp)
+ ldt $f13,(13*8+offes_fltregs)(sp)
+ ldt $f14,(14*8+offes_fltregs)(sp)
+ ldt $f15,(15*8+offes_fltregs)(sp)
+ ldt $f16,(16*8+offes_fltregs)(sp)
+ ldt $f17,(17*8+offes_fltregs)(sp)
+ ldt $f18,(18*8+offes_fltregs)(sp)
+ ldt $f19,(19*8+offes_fltregs)(sp)
+ ldt $f20,(20*8+offes_fltregs)(sp)
+ ldt $f21,(21*8+offes_fltregs)(sp)
+ ldt $f22,(22*8+offes_fltregs)(sp)
+ ldt $f23,(23*8+offes_fltregs)(sp)
+ ldt $f24,(24*8+offes_fltregs)(sp)
+ ldt $f25,(25*8+offes_fltregs)(sp)
+ ldt $f26,(26*8+offes_fltregs)(sp)
+ ldt $f27,(27*8+offes_fltregs)(sp)
+ ldt $f28,(28*8+offes_fltregs)(sp)
+ ldt $f29,(29*8+offes_fltregs)(sp)
+ ldt $f30,(30*8+offes_fltregs)(sp)
+ ldt $f31,(31*8+offes_fltregs)(sp)
/* load new pc */
- ldq itmp3,offes_pc(a0)
+ ldq itmp3,offes_pc(sp)
+
+ /* remove stack frame */
+
+ lda sp,(sizeexecutionstate)(sp)
- /* load a0 */
-
- ldq a0,(16*8+offes_intregs)(a0)
-
/* jump to new code */
jmp zero,(itmp3)
Changes: Christian Thalinger
Edwin Steiner
- $Id: asmpart.h 5950 2006-11-11 17:08:14Z edwin $
+ $Id: asmpart.h 6147 2006-12-07 23:54:40Z edwin $
*/
/* functions for on-stack replacement */
void asm_replacement_out(void);
-void asm_replacement_in(executionstate_t *es);
+void asm_replacement_in(executionstate_t *es, replace_safestack_t *st);
#if defined(ENABLE_THREADS)
extern critical_section_node_t asm_criticalsections;
Changes: Joseph Wenninger
Edwin Steiner
- $Id: asmpart.S 5256 2006-08-21 15:25:59Z twisti $
+ $Id: asmpart.S 6147 2006-12-07 23:54:40Z edwin $
*/
This function never returns!
C prototype:
- void asm_replacement_in(executionstate *es);
+ void asm_replacement_in(executionstate *es, replace_safestack_t *st);
*******************************************************************************/
asm_replacement_in:
- mov 4(sp),%ebp /* executionstate *es */
+ /* get arguments */
+ mov 8(sp),%esi /* replace_safestack_t *st */
+ mov 4(sp),%ebp /* executionstate *es == safe stack */
+
+ /* switch to the safe stack and build a stack frame */
+ mov %ebp,sp
+ sub $(1*4),sp
+
+ /* call replace_build_execution_state(st) */
+ mov %esi,(0*4)(sp)
+ call replace_build_execution_state
/* set new sp */
- mov (offes_sp)(%ebp),%esp
-
- /* store address of new code */
+ mov (offes_sp)(%ebp),sp
+
+ /* push address of new code */
push (offes_pc)(%ebp)
-
- /* copy registers from execution state */
- mov (EAX*8+offes_intregs)(%ebp),%eax
- mov (EBX*8+offes_intregs)(%ebp),%ebx
- mov (ECX*8+offes_intregs)(%ebp),%ecx
- mov (EDX*8+offes_intregs)(%ebp),%edx
- mov (ESI*8+offes_intregs)(%ebp),%esi
- mov (EDI*8+offes_intregs)(%ebp),%edi
- mov (EBP*8+offes_intregs)(%ebp),%ebp
+ /* allocate an executionstate_t on the stack */
+ sub $(sizeexecutionstate),sp
+
+ /* call replace_free_safestack(st,& of allocated executionstate_t) */
+ push sp /* tmpes */
+ push %esi /* st */
+ call replace_free_safestack
+ add $(2*4),sp
- /* jump to new code */
+ /* copy registers from execution state */
+ mov (EAX*8+offes_intregs)(sp),%eax
+ mov (EBX*8+offes_intregs)(sp),%ebx
+ mov (ECX*8+offes_intregs)(sp),%ecx
+ mov (EDX*8+offes_intregs)(sp),%edx
+ mov (ESI*8+offes_intregs)(sp),%esi
+ mov (EDI*8+offes_intregs)(sp),%edi
+ mov (EBP*8+offes_intregs)(sp),%ebp
+
+ /* pop the execution state off the stack */
+ add $(sizeexecutionstate),sp
+
+ /* jump to new code, hold your thumbs! ;) */
ret
Changes: Christian Thalinger
Edwin Steiner
- $Id: asmpart.S 5578 2006-09-29 11:35:04Z twisti $
+ $Id: asmpart.S 6147 2006-12-07 23:54:40Z edwin $
*/
/* XXX we should find a cleaner solution here */
#define REPLACEMENT_ROOM 512
+#define sizeexecutionstate_ALIGNED ((sizeexecutionstate + 15) & ~15)
+
asm_replacement_out:
/* create stack frame */
- addi sp,sp,-(sizeexecutionstate + REPLACEMENT_ROOM) /* XXX align */
+ addi sp,sp,-(sizeexecutionstate_ALIGNED + REPLACEMENT_ROOM)
/* save link register */
mflr itmp3
/* save registers in execution state */
- stw r0 ,( 0*8+offes_intregs)(sp)
- stw r1 ,( 1*8+offes_intregs)(sp)
- stw r2 ,( 2*8+offes_intregs)(sp)
- stw r3 ,( 3*8+offes_intregs)(sp)
- stw r4 ,( 4*8+offes_intregs)(sp)
- stw r5 ,( 5*8+offes_intregs)(sp)
- stw r6 ,( 6*8+offes_intregs)(sp)
- stw r7 ,( 7*8+offes_intregs)(sp)
- stw r8 ,( 8*8+offes_intregs)(sp)
- stw r9 ,( 9*8+offes_intregs)(sp)
- stw r10,(10*8+offes_intregs)(sp)
- stw r11,(11*8+offes_intregs)(sp)
- stw r12,(12*8+offes_intregs)(sp)
- stw r13,(13*8+offes_intregs)(sp)
- stw r14,(14*8+offes_intregs)(sp)
- stw r15,(15*8+offes_intregs)(sp)
- stw r16,(16*8+offes_intregs)(sp) /* link register */
- stw r17,(17*8+offes_intregs)(sp)
- stw r18,(18*8+offes_intregs)(sp)
- stw r19,(19*8+offes_intregs)(sp)
- stw r20,(20*8+offes_intregs)(sp)
- stw r21,(21*8+offes_intregs)(sp)
- stw r22,(22*8+offes_intregs)(sp)
- stw r23,(23*8+offes_intregs)(sp)
- stw r24,(24*8+offes_intregs)(sp)
- stw r25,(25*8+offes_intregs)(sp)
- stw r26,(26*8+offes_intregs)(sp)
- stw r27,(27*8+offes_intregs)(sp)
- stw r28,(28*8+offes_intregs)(sp)
- stw r29,(29*8+offes_intregs)(sp)
- stw r30,(30*8+offes_intregs)(sp)
- stw r31,(31*8+offes_intregs)(sp)
+ stw r0 ,( 0*8+4+offes_intregs)(sp)
+ stw r1 ,( 1*8+4+offes_intregs)(sp)
+ stw r2 ,( 2*8+4+offes_intregs)(sp)
+ stw r3 ,( 3*8+4+offes_intregs)(sp)
+ stw r4 ,( 4*8+4+offes_intregs)(sp)
+ stw r5 ,( 5*8+4+offes_intregs)(sp)
+ stw r6 ,( 6*8+4+offes_intregs)(sp)
+ stw r7 ,( 7*8+4+offes_intregs)(sp)
+ stw r8 ,( 8*8+4+offes_intregs)(sp)
+ stw r9 ,( 9*8+4+offes_intregs)(sp)
+ stw r10,(10*8+4+offes_intregs)(sp)
+ stw r11,(11*8+4+offes_intregs)(sp)
+ stw r12,(12*8+4+offes_intregs)(sp)
+ stw r13,(13*8+4+offes_intregs)(sp)
+ stw r14,(14*8+4+offes_intregs)(sp)
+ stw r15,(15*8+4+offes_intregs)(sp)
+ stw r16,(16*8+4+offes_intregs)(sp) /* link register stored as itmp3 */
+ stw r17,(17*8+4+offes_intregs)(sp)
+ stw r18,(18*8+4+offes_intregs)(sp)
+ stw r19,(19*8+4+offes_intregs)(sp)
+ stw r20,(20*8+4+offes_intregs)(sp)
+ stw r21,(21*8+4+offes_intregs)(sp)
+ stw r22,(22*8+4+offes_intregs)(sp)
+ stw r23,(23*8+4+offes_intregs)(sp)
+ stw r24,(24*8+4+offes_intregs)(sp)
+ stw r25,(25*8+4+offes_intregs)(sp)
+ stw r26,(26*8+4+offes_intregs)(sp)
+ stw r27,(27*8+4+offes_intregs)(sp)
+ stw r28,(28*8+4+offes_intregs)(sp)
+ stw r29,(29*8+4+offes_intregs)(sp)
+ stw r30,(30*8+4+offes_intregs)(sp)
+ stw r31,(31*8+4+offes_intregs)(sp)
stfd fr0 ,( 0*8+offes_fltregs)(sp)
stfd fr1 ,( 1*8+offes_fltregs)(sp)
stfd fr31,(31*8+offes_fltregs)(sp)
/* calculate sp of method */
- addi itmp1,sp,(sizeexecutionstate + REPLACEMENT_ROOM + 4*4)
+ addi itmp1,sp,(sizeexecutionstate_ALIGNED + REPLACEMENT_ROOM + 4*4)
stw itmp1,(offes_sp)(sp)
/* store pv */
NOTE: itmp3 is not restored!
C prototype:
- void asm_replacement_in(executionstate *es);
+ void asm_replacement_in(executionstate *es, replace_safestack_t *st);
*******************************************************************************/
asm_replacement_in:
- /* a0 == executionstate *es */
+ /* a0 == executionstate *es */
+ /* a1 == replace_safestack_t *st */
+
+ /* get arguments */
+ mr s1,a1 /* replace_safestack_t *st */
+ mr s2,a0 /* executionstate *es == safe stack */
+
+ /* switch to the safe stack */
+ mr sp,s2
+
+ /* reserve linkage area */
+ addi sp,sp,-(LA_SIZE_ALIGNED)
+
+ /* call replace_build_execution_state(st) */
+ mr a0,s1
+ bl replace_build_execution_state
- /* set new sp and pv */
- lwz sp,(offes_sp)(a0)
- lwz pv,(offes_pv)(a0)
+ /* set new sp */
+ lwz sp,(offes_sp)(s2)
+
+ /* build stack frame */
+ addi sp,sp,-(sizeexecutionstate_ALIGNED)
+
+ /* call replace_free_safestack(st,& of allocated executionstate_t) */
+ mr a1,sp /* tmpes */
+ mr a0,s1 /* st */
+ addi sp,sp,-(LA_SIZE_ALIGNED) /* reserve linkage area */
+ bl replace_free_safestack
+ addi sp,sp,+(LA_SIZE_ALIGNED) /* tear down linkage area */
+
+ /* set new pv */
+ lwz pv,(offes_pv)(sp)
/* copy registers from execution state */
- lwz r0 ,( 0*8+offes_intregs)(a0)
- /* r1 is sp */
- /* r2 is reserved */
- /* a0 is loaded below */
- lwz r4 ,( 4*8+offes_intregs)(a0)
- lwz r5 ,( 5*8+offes_intregs)(a0)
- lwz r6 ,( 6*8+offes_intregs)(a0)
- lwz r7 ,( 7*8+offes_intregs)(a0)
- lwz r8 ,( 8*8+offes_intregs)(a0)
- lwz r9 ,( 9*8+offes_intregs)(a0)
- lwz r10,(10*8+offes_intregs)(a0)
- lwz r11,(11*8+offes_intregs)(a0)
- lwz r12,(12*8+offes_intregs)(a0)
- /* r13 is pv */
- lwz r14,(14*8+offes_intregs)(a0)
- lwz r15,(15*8+offes_intregs)(a0)
- lwz r16,(16*8+offes_intregs)(a0) /* link register */
- lwz r17,(17*8+offes_intregs)(a0)
- lwz r18,(18*8+offes_intregs)(a0)
- lwz r19,(19*8+offes_intregs)(a0)
- lwz r20,(20*8+offes_intregs)(a0)
- lwz r21,(21*8+offes_intregs)(a0)
- lwz r22,(22*8+offes_intregs)(a0)
- lwz r23,(23*8+offes_intregs)(a0)
- lwz r24,(24*8+offes_intregs)(a0)
- lwz r25,(25*8+offes_intregs)(a0)
- lwz r26,(26*8+offes_intregs)(a0)
- lwz r27,(27*8+offes_intregs)(a0)
- lwz r28,(28*8+offes_intregs)(a0)
- lwz r29,(29*8+offes_intregs)(a0)
- lwz r30,(30*8+offes_intregs)(a0)
- lwz r31,(31*8+offes_intregs)(a0)
+ lwz r0 ,( 0*8+4+offes_intregs)(sp)
+ /* r1 is sp */
+ /* r2 is reserved */
+ lwz a0 ,( 3*8+4+offes_intregs)(sp)
+ lwz r4 ,( 4*8+4+offes_intregs)(sp)
+ lwz r5 ,( 5*8+4+offes_intregs)(sp)
+ lwz r6 ,( 6*8+4+offes_intregs)(sp)
+ lwz r7 ,( 7*8+4+offes_intregs)(sp)
+ lwz r8 ,( 8*8+4+offes_intregs)(sp)
+ lwz r9 ,( 9*8+4+offes_intregs)(sp)
+ lwz r10,(10*8+4+offes_intregs)(sp)
+ lwz r11,(11*8+4+offes_intregs)(sp)
+ lwz r12,(12*8+4+offes_intregs)(sp)
+ /* r13 is pv */
+ lwz r14,(14*8+4+offes_intregs)(sp)
+ lwz r15,(15*8+4+offes_intregs)(sp)
+ lwz r16,(16*8+4+offes_intregs)(sp) /* itmp3, later to link register */
+ lwz r17,(17*8+4+offes_intregs)(sp)
+ lwz r18,(18*8+4+offes_intregs)(sp)
+ lwz r19,(19*8+4+offes_intregs)(sp)
+ lwz r20,(20*8+4+offes_intregs)(sp)
+ lwz r21,(21*8+4+offes_intregs)(sp)
+ lwz r22,(22*8+4+offes_intregs)(sp)
+ lwz r23,(23*8+4+offes_intregs)(sp)
+ lwz r24,(24*8+4+offes_intregs)(sp)
+ lwz r25,(25*8+4+offes_intregs)(sp)
+ lwz r26,(26*8+4+offes_intregs)(sp)
+ lwz r27,(27*8+4+offes_intregs)(sp)
+ lwz r28,(28*8+4+offes_intregs)(sp)
+ lwz r29,(29*8+4+offes_intregs)(sp)
+ lwz r30,(30*8+4+offes_intregs)(sp)
+ lwz r31,(31*8+4+offes_intregs)(sp)
- lfd fr0 ,( 0*8+offes_fltregs)(a0)
- lfd fr1 ,( 1*8+offes_fltregs)(a0)
- lfd fr2 ,( 2*8+offes_fltregs)(a0)
- lfd fr3 ,( 3*8+offes_fltregs)(a0)
- lfd fr4 ,( 4*8+offes_fltregs)(a0)
- lfd fr5 ,( 5*8+offes_fltregs)(a0)
- lfd fr6 ,( 6*8+offes_fltregs)(a0)
- lfd fr7 ,( 7*8+offes_fltregs)(a0)
- lfd fr8 ,( 8*8+offes_fltregs)(a0)
- lfd fr9 ,( 9*8+offes_fltregs)(a0)
- lfd fr10,(10*8+offes_fltregs)(a0)
- lfd fr11,(11*8+offes_fltregs)(a0)
- lfd fr12,(12*8+offes_fltregs)(a0)
- lfd fr13,(13*8+offes_fltregs)(a0)
- lfd fr14,(14*8+offes_fltregs)(a0)
- lfd fr15,(15*8+offes_fltregs)(a0)
- lfd fr16,(16*8+offes_fltregs)(a0)
- lfd fr17,(17*8+offes_fltregs)(a0)
- lfd fr18,(18*8+offes_fltregs)(a0)
- lfd fr19,(19*8+offes_fltregs)(a0)
- lfd fr20,(20*8+offes_fltregs)(a0)
- lfd fr21,(21*8+offes_fltregs)(a0)
- lfd fr22,(22*8+offes_fltregs)(a0)
- lfd fr23,(23*8+offes_fltregs)(a0)
- lfd fr24,(24*8+offes_fltregs)(a0)
- lfd fr25,(25*8+offes_fltregs)(a0)
- lfd fr26,(26*8+offes_fltregs)(a0)
- lfd fr27,(27*8+offes_fltregs)(a0)
- lfd fr28,(28*8+offes_fltregs)(a0)
- lfd fr29,(29*8+offes_fltregs)(a0)
- lfd fr30,(30*8+offes_fltregs)(a0)
- lfd fr31,(31*8+offes_fltregs)(a0)
+ lfd fr0 ,( 0*8+offes_fltregs)(sp)
+ lfd fr1 ,( 1*8+offes_fltregs)(sp)
+ lfd fr2 ,( 2*8+offes_fltregs)(sp)
+ lfd fr3 ,( 3*8+offes_fltregs)(sp)
+ lfd fr4 ,( 4*8+offes_fltregs)(sp)
+ lfd fr5 ,( 5*8+offes_fltregs)(sp)
+ lfd fr6 ,( 6*8+offes_fltregs)(sp)
+ lfd fr7 ,( 7*8+offes_fltregs)(sp)
+ lfd fr8 ,( 8*8+offes_fltregs)(sp)
+ lfd fr9 ,( 9*8+offes_fltregs)(sp)
+ lfd fr10,(10*8+offes_fltregs)(sp)
+ lfd fr11,(11*8+offes_fltregs)(sp)
+ lfd fr12,(12*8+offes_fltregs)(sp)
+ lfd fr13,(13*8+offes_fltregs)(sp)
+ lfd fr14,(14*8+offes_fltregs)(sp)
+ lfd fr15,(15*8+offes_fltregs)(sp)
+ lfd fr16,(16*8+offes_fltregs)(sp)
+ lfd fr17,(17*8+offes_fltregs)(sp)
+ lfd fr18,(18*8+offes_fltregs)(sp)
+ lfd fr19,(19*8+offes_fltregs)(sp)
+ lfd fr20,(20*8+offes_fltregs)(sp)
+ lfd fr21,(21*8+offes_fltregs)(sp)
+ lfd fr22,(22*8+offes_fltregs)(sp)
+ lfd fr23,(23*8+offes_fltregs)(sp)
+ lfd fr24,(24*8+offes_fltregs)(sp)
+ lfd fr25,(25*8+offes_fltregs)(sp)
+ lfd fr26,(26*8+offes_fltregs)(sp)
+ lfd fr27,(27*8+offes_fltregs)(sp)
+ lfd fr28,(28*8+offes_fltregs)(sp)
+ lfd fr29,(29*8+offes_fltregs)(sp)
+ lfd fr30,(30*8+offes_fltregs)(sp)
+ lfd fr31,(31*8+offes_fltregs)(sp)
/* restore link register */
/* load new pc */
- lwz itmp3,offes_pc(a0)
+ lwz itmp3,offes_pc(sp)
- /* load a0 */
-
- lwz a0,(3*8+offes_intregs)(a0)
+ /* remove stack frame */
+
+ addi sp,sp,+(sizeexecutionstate_ALIGNED)
/* jump to new code */
m = code->m;
+ /* set codeinfo flags */
+
+ if (jd->isleafmethod)
+ CODE_SETFLAG_LEAFMETHOD(code);
+
/* in instance methods, we may need a rplpoint at the method entry */
#if defined(REPLACE_PATCH_DYNAMIC_CALL)
#if defined(REPLACE_PATCH_DYNAMIC_CALL)
if (topframe && !(rp->method->flags & ACC_STATIC) && rp == code->rplpoints) {
+#if 1
+ /* we are at the start of the method body, so if local 0 is set, */
+ /* it is the instance. */
+ if (frame->javalocaltype[0] == TYPE_ADR)
+ frame->instance = frame->javalocals[0];
+#else
rplalloc instra;
methoddesc *md;
instra.flags = 0;
}
replace_read_value(es, sp, &instra, &(frame->instance));
+#endif
}
#endif /* defined(REPLACE_PATCH_DYNAMIC_CALL) */
}
+/* replace_patch_method_pointer ************************************************
+
+ Patch a method pointer (may be in code, data segment, vftbl, or interface
+ table).
+
+ IN:
+ mpp..............address of the method pointer to patch
+ entrypoint.......the new entrypoint of the method
+ kind.............kind of call to patch, used only for debugging
+
+*******************************************************************************/
+
+static void replace_patch_method_pointer(methodptr *mpp,
+ methodptr entrypoint,
+ const char *kind)
+{
+#if !defined(NDEBUG)
+ codeinfo *oldcode;
+ codeinfo *newcode;
+#endif
+
+ DOLOG( printf("patch method pointer from: %p to %p\n",
+ (void*) *mpp, (void*)entrypoint); );
+
+#if !defined(NDEBUG)
+ oldcode = *(codeinfo **)((u1*)(*mpp) + CodeinfoPointer);
+ newcode = *(codeinfo **)((u1*)(entrypoint) + CodeinfoPointer);
+
+ DOLOG_SHORT( printf("\tpatch %s %p ", kind, (void*) oldcode);
+ method_println(oldcode->m);
+ printf("\t with %p ", (void*) newcode);
+ method_println(newcode->m); );
+
+ assert(oldcode->m == newcode->m);
+#endif
+
+ /* write the new entrypoint */
+
+ *mpp = (methodptr) entrypoint;
+}
+
+
/* replace_patch_future_calls **************************************************
Analyse a call site and depending on the kind of call patch the call, the
IN:
ra...............return address pointing after the call site
- calleeframe......source frame of the callee
- calleecode.......the codeinfo of the callee
+ callerframe......source frame of the caller
+ calleeframe......source frame of the callee, must have been mapped
*******************************************************************************/
-void replace_patch_future_calls(u1 *ra, sourceframe_t *calleeframe, codeinfo *calleecode)
+void replace_patch_future_calls(u1 *ra,
+ sourceframe_t *callerframe,
+ sourceframe_t *calleeframe)
{
- methodptr *mpp;
- bool atentry;
-#if !defined(NDEBUG)
- codeinfo *oldcode;
- codeinfo *newcode;
-#endif
-#if defined(REPLACE_VERBOSE)
- s4 i;
- char *logkind;
- int disas = 0;
-#endif
+ u1 *patchpos;
+ methodptr entrypoint;
+ methodptr oldentrypoint;
+ methodptr *mpp;
+ methodptr *mppend;
+ bool atentry;
+ stackframeinfo sfi;
+ codeinfo *calleecode;
+ methodinfo *calleem;
+ java_objectheader *obj;
+ struct _vftbl *vftbl;
+ s4 i;
assert(ra);
+ assert(callerframe->down == calleeframe);
+
+ /* get the new codeinfo and the method that shall be entered */
+
+ calleecode = calleeframe->tocode;
assert(calleecode);
- mpp = NULL;
+ calleem = calleeframe->method;
+ assert(calleem == calleecode->m);
+
+ entrypoint = (methodptr) calleecode->entrypoint;
+
+ /* check if we are at an method entry rplpoint at the innermost frame */
atentry = (calleeframe->down == NULL)
- && !(calleecode->m->flags & ACC_STATIC)
+ && !(calleem->flags & ACC_STATIC)
&& (calleeframe->fromrp->id == 0); /* XXX */
- DOLOG( printf("bytes at patch position:");
- for (i=0; i<16; ++i)
- printf(" %02x", ra[-16+i]);
- printf("\n"); );
+ /* get the position to patch, in case it was a statically bound call */
- if (ra[-2] == 0xff && ra[-1] == 0xd1
- && ra[-7] == 0xb9)
- {
- DOLOG_SHORT( logkind = "static "; );
- DOLOG( printf("PATCHING static call to "); method_println(calleecode->m); disas = 7; );
- REPLACE_COUNT(stat_staticpatch);
- mpp = (methodptr*)(ra - 6);
- }
-#if defined(REPLACE_PATCH_DYNAMIC_CALL)
- else if (ra[-2] == 0xff && ra[-1] == 0xd2
- && ra[-8] == 0x8b && ra[-7] == 0x91
- && atentry)
- {
- java_objectheader *obj;
- u1 *table;
- u4 offset;
+ sfi.pv = callerframe->fromcode->entrypoint;
+ patchpos = md_get_method_patch_address(ra, &sfi, NULL);
- DOLOG_SHORT( printf("\tinstance: "); java_value_print(TYPE_ADR, calleeframe->instance);
- printf("\n"); );
+ if (patchpos == NULL) {
+ /* the call was dispatched dynamically */
- assert(calleeframe->instance != 0);
+ /* we can only patch such calls if we are at the entry point */
- obj = (java_objectheader *) (ptrint) calleeframe->instance;
- table = (u1*) obj->vftbl;
- offset = *(u4*)(ra - 6);
+ if (!atentry)
+ return;
- if (ra[-10] == 0x8b && ra[-9] == 0x08) {
- mpp = (methodptr *) (table + offset);
- DOLOG_SHORT( logkind = "virtual "; );
- DOLOG( printf("updating virtual call at %p\n", (void*) ra); disas = 8);
- }
- else {
- u4 ioffset = *(u4*)(ra - 12);
- u1 *itable = *(u1**)(table + ioffset);
+ assert((calleem->flags & ACC_STATIC) == 0);
- assert(ra[-14] == 0x8b && ra[-13] == 0x89);
- mpp = (methodptr *) (itable + offset);
- DOLOG_SHORT( logkind = "interface"; );
- DOLOG( printf("updating interface call at %p\n", (void*) ra); disas = 14);
+ /* we need to know the instance */
+
+ if (!calleeframe->instance) {
+ DOLOG_SHORT( printf("WARNING: object instance unknown!\n"); );
+ return;
}
- }
-#endif /* defined(REPLACE_PATCH_DYNAMIC_CALL) */
- if (mpp == NULL)
- return;
+ /* get the vftbl */
- DOLOG(
- u1* u1ptr = ra - disas;
- DISASSINSTR(u1ptr);
- DISASSINSTR(u1ptr);
- if (disas > 8)
- DISASSINSTR(u1ptr);
- fflush(stdout);
- );
+ obj = (java_objectheader *) (ptrint) calleeframe->instance;
+ vftbl = obj->vftbl;
- DOLOG( printf("patch method pointer from: %p to %p\n",
- (void*) *mpp, (void*)calleecode->entrypoint); );
+ assert(vftbl->class->vftbl == vftbl);
-#if !defined(NDEBUG)
- oldcode = *(codeinfo **)((u1*)(*mpp) + CodeinfoPointer);
- newcode = *(codeinfo **)((u1*)(calleecode->entrypoint) + CodeinfoPointer);
+ DOLOG_SHORT( printf("\tclass: "); class_println(vftbl->class); );
- DOLOG_SHORT( printf("\tpatch %s %p ", logkind, (void*) oldcode->entrypoint);
- method_println(oldcode->m);
- printf("\t with %p ", (void*) newcode->entrypoint);
- method_println(newcode->m); );
+ /* patch the vftbl of the class */
- assert(oldcode->m == newcode->m);
-#endif
+ replace_patch_method_pointer(vftbl->table + calleem->vftblindex,
+ entrypoint,
+ "virtual");
- /* write the new entrypoint */
+ /* patch the interface tables */
+
+ oldentrypoint = calleeframe->fromcode->entrypoint;
+ assert(oldentrypoint);
- *mpp = (methodptr) calleecode->entrypoint;
+ for (i=0; i < vftbl->interfacetablelength; ++i) {
+ mpp = vftbl->interfacetable[-i];
+ mppend = mpp + vftbl->interfacevftbllength[i];
+ for (; mpp != mppend; ++mpp)
+ if (*mpp == oldentrypoint) {
+ replace_patch_method_pointer(mpp, entrypoint, "interface");
+ }
+ }
+ }
+ else {
+ /* the call was statically bound */
+
+ replace_patch_method_pointer((methodptr *) patchpos, entrypoint, "static");
+ }
}
es...............execution state
rpcall...........the replacement point at the call site
callerframe......source frame of the caller
- calleecode.......the codeinfo of the callee
- calleeframe......source frame of the callee
+ calleeframe......source frame of the callee, must have been mapped
OUT:
*es..............the execution state after pushing the stack frame
void replace_push_activation_record(executionstate_t *es,
rplpoint *rpcall,
sourceframe_t *callerframe,
- codeinfo *calleecode,
sourceframe_t *calleeframe)
{
s4 reg;
stackslot_t *basesp;
stackslot_t *sp;
u1 *ra;
+ codeinfo *calleecode;
assert(es);
assert(rpcall && rpcall->type == RPLPOINT_TYPE_CALL);
- assert(calleecode);
assert(callerframe);
assert(calleeframe);
assert(calleeframe == callerframe->down);
+ /* the compilation unit we are entering */
+
+ calleecode = calleeframe->tocode;
+ assert(calleecode);
+
/* write the return address */
es->sp -= SIZE_OF_STACKSLOT;
/* set the new pc XXX not needed */
- es->pc = es->code->entrypoint;
+ es->pc = calleecode->entrypoint;
/* build the stackframe */
DOLOG( printf("building stackframe of %d words at %p\n",
- es->code->stackframesize, (void*)es->sp); );
+ calleecode->stackframesize, (void*)es->sp); );
sp = (stackslot_t *) es->sp;
basesp = sp;
- sp -= es->code->stackframesize;
+ sp -= calleecode->stackframesize;
es->sp = (u1*) sp;
/* in debug mode, invalidate stack frame first */
/* save int registers */
reg = INT_REG_CNT;
- for (i=0; i<es->code->savedintcount; ++i) {
+ for (i=0; i<calleecode->savedintcount; ++i) {
while (nregdescint[--reg] != REG_SAV)
;
*--basesp = es->intregs[reg];
/* XXX align? */
reg = FLT_REG_CNT;
- for (i=0; i<es->code->savedfltcount; ++i) {
+ for (i=0; i<calleecode->savedfltcount; ++i) {
while (nregdescfloat[--reg] != REG_SAV)
;
basesp -= STACK_SLOTS_PER_FLOAT;
/* write slots used for synchronization */
- count = code_get_sync_slot_count(es->code);
+ count = code_get_sync_slot_count(calleecode);
assert(count == calleeframe->syncslotcount);
for (i=0; i<count; ++i) {
- sp[es->code->memuse + i] = calleeframe->syncslots[i];
+ sp[calleecode->memuse + i] = calleeframe->syncslots[i];
}
/* set the PV */
- es->pv = es->code->entrypoint;
+ es->pv = calleecode->entrypoint;
/* redirect future invocations */
#else
if (rpcall == callerframe->fromrp)
#endif
- replace_patch_future_calls(ra, calleeframe, calleecode);
+ replace_patch_future_calls(ra, callerframe, calleeframe);
}
}
-/* replace_build_execution_state ***********************************************
+/* replace_build_execution_state_intern ****************************************
Build an execution state for the given (mapped) source state.
!!! CAUTION: This function rewrites the machine stack !!!
+ THIS FUNCTION MUST BE CALLED USING A SAFE STACK AREA!
+
IN:
ss...............the source state. Must have been mapped by
replace_map_source_state before.
*******************************************************************************/
-static void replace_build_execution_state(sourcestate_t *ss,
- executionstate_t *es)
+static void replace_build_execution_state_intern(sourcestate_t *ss,
+ executionstate_t *es)
{
rplpoint *rp;
sourceframe_t *prevframe;
- codeinfo *code;
while (true) {
- code = ss->frames->tocode;
rp = ss->frames->torp;
- assert(code);
assert(rp);
- assert(es->code == code);
+ assert(es->code == ss->frames->tocode);
DOLOG( printf("creating execution state for%s:\n",
(ss->frames->down == NULL) ? " TOPFRAME" : "");
DOLOG( printf("pushing activation record for:\n");
replace_replacement_point_println(rp, 1); );
- code = ss->frames->tocode;
- replace_push_activation_record(es, rp, prevframe, code, ss->frames);
+ replace_push_activation_record(es, rp, prevframe, ss->frames);
}
DOLOG( replace_executionstate_println(es); );
}
+/* replace_build_execution_state ***********************************************
+
+ This function contains the final phase of replacement. It builds the new
+ execution state, releases dump memory, and returns to the calling
+ assembler function which finishes replacement.
+
+ NOTE: This function is called from asm_replacement_in, with the stack
+ pointer at the start of the safe stack area.
+
+ THIS FUNCTION MUST BE CALLED USING A SAFE STACK AREA!
+
+ CAUTION: This function and its children must not use a lot of stack!
+ There are only REPLACE_SAFESTACK_SIZE bytes of C stack
+ available.
+
+ IN:
+ st...............the safestack contained the necessary data
+
+*******************************************************************************/
+
+void replace_build_execution_state(replace_safestack_t *st)
+{
+ replace_build_execution_state_intern(st->ss, &(st->es));
+
+ DOLOG( replace_executionstate_println(&(st->es)); );
+
+ /* release dump area */
+
+ dump_release(st->dumpsize);
+
+ /* new code is entered after returning */
+
+ DOLOG( printf("JUMPING IN!\n"); fflush(stdout); );
+}
+
+
+/* replace_alloc_safestack *****************************************************
+
+ Allocate a safe stack area to use during the final phase of replacement.
+ The returned area is not initialized. This must be done by the caller.
+
+ RETURN VALUE:
+ a newly allocated replace_safestack_t *
+
+*******************************************************************************/
+
+static replace_safestack_t *replace_alloc_safestack()
+{
+ u1 *mem;
+ replace_safestack_t *st;
+
+ mem = MNEW(u1, sizeof(replace_safestack_t) + REPLACE_STACK_ALIGNMENT - 1);
+
+ st = (replace_safestack_t *) ((ptrint)(mem + REPLACE_STACK_ALIGNMENT - 1)
+ & ~(REPLACE_STACK_ALIGNMENT - 1));
+
+#if !defined(NDEBUG)
+ memset(st, 0xa5, sizeof(replace_safestack_t));
+#endif
+
+ st->mem = mem;
+
+ return st;
+}
+
+
+/* replace_free_safestack ******************************************************
+
+ Free the given safestack structure, making a copy of the contained
+ execution state before freeing it.
+
+ NOTE: This function is called from asm_replacement_in.
+
+ IN:
+ st...............the safestack to free
+ tmpes............where to copy the execution state to
+
+ OUT:
+ *tmpes...........receives a copy of st->es
+
+*******************************************************************************/
+
+void replace_free_safestack(replace_safestack_t *st, executionstate_t *tmpes)
+{
+ u1 *mem;
+
+ /* copy the executionstate_t to the temporary location */
+
+ *tmpes = st->es;
+
+ /* get the memory address to free */
+
+ mem = st->mem;
+
+ /* destroy memory (in debug mode) */
+
+#if !defined(NDEBUG)
+ memset(st, 0xa5, sizeof(replace_safestack_t));
+#endif
+
+ /* free the safe stack struct */
+
+ MFREE(mem, u1, sizeof(replace_safestack_t) + REPLACE_STACK_ALIGNMENT - 1);
+}
+
+
/* replace_me ******************************************************************
This function is called by asm_replacement_out when a thread reaches
void replace_me(rplpoint *rp, executionstate_t *es)
{
- sourcestate_t *ss;
- sourceframe_t *frame;
- s4 dumpsize;
- rplpoint *origrp;
+ sourcestate_t *ss;
+ sourceframe_t *frame;
+ s4 dumpsize;
+ rplpoint *origrp;
+ replace_safestack_t *safestack;
origrp = rp;
es->code = code_find_codeinfo_for_pc(rp->pc);
DOLOG( replace_executionstate_println(es); );
- replace_build_execution_state(ss, es);
+ /* allocate a safe stack area and copy all needed data there */
- DOLOG( replace_executionstate_println(es); );
+ safestack = replace_alloc_safestack();
- /* release dump area */
+ safestack->es = *es;
+ safestack->ss = ss;
+ safestack->dumpsize = dumpsize;
- dump_release(dumpsize);
-
- /* enter new code */
-
- DOLOG( printf("JUMPING IN!\n"); fflush(stdout); );
+ /* call the assembler code for the last phase of replacement */
#if (defined(__I386__) || defined(__X86_64__) || defined(__ALPHA__) || defined(__POWERPC__) || defined(__MIPS__)) && defined(ENABLE_JIT)
- asm_replacement_in(es);
+ asm_replacement_in(&(safestack->es), safestack);
#endif
+
abort(); /* NOT REACHED */
}
printf("\t");
else
printf(" ");
+#if SIZEOF_VOID_P == 8
printf("%-3s = %016llx",regs[i],(unsigned long long)es->intregs[i]);
+#else
+ printf("%-3s = %08lx",regs[i],(unsigned long)es->intregs[i]);
+#endif
if (i%4 == 3)
printf("\n");
}
#include "vm/jit/reg.h"
+/* alignment for the safe stack used during replacement */
+
+#define REPLACE_STACK_ALIGNMENT 16
+
/* the size of the safe stack we use during replacement */
+/* Must be a multiple of REPLACE_STACK_ALIGNMENT. */
-#define REPLACE_SAFESTACK_SIZE 4096 /* bytes */
+#define REPLACE_SAFESTACK_SIZE 16384 /* bytes */
/*** structs *********************************************************/
u1 stack[REPLACE_SAFESTACK_SIZE];
executionstate_t es;
sourcestate_t *ss;
+ u1 *mem; /* start of the allocated memory chunk */
s4 dumpsize;
};
+/*** macros for the codegens *******************************************/
+
+#define REPLACEMENT_POINTS_INIT(cd, jd) \
+ if (!replace_create_replacement_points(jd)) \
+ return false; \
+ (cd)->replacementpoint = (jd)->code->rplpoints;
+
+#define REPLACEMENT_POINT_BLOCK_START(cd, bptr) \
+ if ((bptr)->bitflags & BBFLAG_REPLACEMENT) \
+ codegen_set_replacement_point((cd) RPLPOINT_CHECK_BB(bptr));
+
+#define REPLACEMENT_POINT_INLINE_START(cd, iptr) \
+ codegen_set_replacement_point(cd RPLPOINT_CHECK(INLINE));
+
+#define REPLACEMENT_POINT_INLINE_BODY(cd, iptr) \
+ codegen_set_replacement_point_notrap(cd RPLPOINT_CHECK(BODY));
+
+#define REPLACEMENT_POINT_RETURN(cd, iptr) \
+ codegen_set_replacement_point(cd RPLPOINT_CHECK(RETURN));
+
+#define REPLACEMENT_POINT_INVOKE(cd, iptr) \
+ codegen_set_replacement_point(cd RPLPOINT_CHECK(CALL));
+
+#define REPLACEMENT_POINT_INVOKE_RETURN(cd, iptr) \
+ if (iptr->opc != ICMD_BUILTIN) \
+ cd->replacementpoint[-1].callsize = (cd->mcodeptr - cd->mcodebase)\
+ - (ptrint) cd->replacementpoint[-1].pc;
+
+#define REPLACEMENT_EMIT_STUBS(jd) \
+ emit_replacement_stubs(jd);
+
/*** prototypes ********************************************************/
bool replace_create_replacement_points(jitdata *jd);
Changes: Edwin Steiner
- $Id: asmpart.S 5233 2006-08-14 10:59:39Z twisti $
+ $Id: asmpart.S 6147 2006-12-07 23:54:40Z edwin $
*/
This function never returns!
C prototype:
- void asm_replacement_in(executionstate *es);
+ void asm_replacement_in(executionstate *es, replace_safestack_t *st);
*******************************************************************************/
asm_replacement_in:
- mov a0,%rbp /* executionstate *es */
+ /* get arguments */
+ mov a1,s1 /* replace_safestack_t *st */
+ mov a0,%rbp /* executionstate *es == safe stack */
+
+ /* switch to the safe stack */
+ mov %rbp,sp
+
+ /* call replace_build_execution_state(st) */
+ mov s1,a0
+ call replace_build_execution_state@PLT
/* set new sp */
- mov (offes_sp)(%rbp),%rsp
-
- /* store address of new code */
- push (offes_pc)(%rbp)
-
+ mov (offes_sp)(%rbp),sp
+
+ /* push address of new code */
+ pushq (offes_pc)(%rbp)
+
+ /* allocate an executionstate_t on the stack */
+ sub $(sizeexecutionstate),sp
+
+ /* call replace_free_safestack(st,& of allocated executionstate_t) */
+ mov sp,a1
+ mov s1,a0
+ call replace_free_safestack@PLT
+
/* copy registers from execution state */
- movq (XMM0 *8+offes_fltregs)(%rbp),%xmm0
- movq (XMM1 *8+offes_fltregs)(%rbp),%xmm1
- movq (XMM2 *8+offes_fltregs)(%rbp),%xmm2
- movq (XMM3 *8+offes_fltregs)(%rbp),%xmm3
- movq (XMM4 *8+offes_fltregs)(%rbp),%xmm4
- movq (XMM5 *8+offes_fltregs)(%rbp),%xmm5
- movq (XMM6 *8+offes_fltregs)(%rbp),%xmm6
- movq (XMM7 *8+offes_fltregs)(%rbp),%xmm7
- movq (XMM8 *8+offes_fltregs)(%rbp),%xmm8
- movq (XMM9 *8+offes_fltregs)(%rbp),%xmm9
- movq (XMM10*8+offes_fltregs)(%rbp),%xmm10
- movq (XMM11*8+offes_fltregs)(%rbp),%xmm11
- movq (XMM12*8+offes_fltregs)(%rbp),%xmm12
- movq (XMM13*8+offes_fltregs)(%rbp),%xmm13
- movq (XMM14*8+offes_fltregs)(%rbp),%xmm14
- movq (XMM15*8+offes_fltregs)(%rbp),%xmm15
-
- mov (RAX*8+offes_intregs)(%rbp),%rax
- mov (RBX*8+offes_intregs)(%rbp),%rbx
- mov (RCX*8+offes_intregs)(%rbp),%rcx
- mov (RDX*8+offes_intregs)(%rbp),%rdx
- mov (RSI*8+offes_intregs)(%rbp),%rsi
- mov (RDI*8+offes_intregs)(%rbp),%rdi
- mov (R8 *8+offes_intregs)(%rbp),%r8
- mov (R9 *8+offes_intregs)(%rbp),%r9
- mov (R10*8+offes_intregs)(%rbp),%r10
- mov (R11*8+offes_intregs)(%rbp),%r11
- mov (R12*8+offes_intregs)(%rbp),%r12
- mov (R13*8+offes_intregs)(%rbp),%r13
- mov (R14*8+offes_intregs)(%rbp),%r14
- mov (R15*8+offes_intregs)(%rbp),%r15
-
- mov (RBP*8+offes_intregs)(%rbp),%rbp
+ movq (XMM0 *8+offes_fltregs)(sp),%xmm0
+ movq (XMM1 *8+offes_fltregs)(sp),%xmm1
+ movq (XMM2 *8+offes_fltregs)(sp),%xmm2
+ movq (XMM3 *8+offes_fltregs)(sp),%xmm3
+ movq (XMM4 *8+offes_fltregs)(sp),%xmm4
+ movq (XMM5 *8+offes_fltregs)(sp),%xmm5
+ movq (XMM6 *8+offes_fltregs)(sp),%xmm6
+ movq (XMM7 *8+offes_fltregs)(sp),%xmm7
+ movq (XMM8 *8+offes_fltregs)(sp),%xmm8
+ movq (XMM9 *8+offes_fltregs)(sp),%xmm9
+ movq (XMM10*8+offes_fltregs)(sp),%xmm10
+ movq (XMM11*8+offes_fltregs)(sp),%xmm11
+ movq (XMM12*8+offes_fltregs)(sp),%xmm12
+ movq (XMM13*8+offes_fltregs)(sp),%xmm13
+ movq (XMM14*8+offes_fltregs)(sp),%xmm14
+ movq (XMM15*8+offes_fltregs)(sp),%xmm15
+
+ mov (RAX*8+offes_intregs)(sp),%rax
+ mov (RBX*8+offes_intregs)(sp),%rbx
+ mov (RCX*8+offes_intregs)(sp),%rcx
+ mov (RDX*8+offes_intregs)(sp),%rdx
+ mov (RSI*8+offes_intregs)(sp),%rsi
+ mov (RDI*8+offes_intregs)(sp),%rdi
+ mov (RBP*8+offes_intregs)(sp),%rbp
+ mov (R8 *8+offes_intregs)(sp),%r8
+ mov (R9 *8+offes_intregs)(sp),%r9
+ mov (R10*8+offes_intregs)(sp),%r10
+ mov (R11*8+offes_intregs)(sp),%r11
+ mov (R12*8+offes_intregs)(sp),%r12
+ mov (R13*8+offes_intregs)(sp),%r13
+ mov (R14*8+offes_intregs)(sp),%r14
+ mov (R15*8+offes_intregs)(sp),%r15
+
+ /* pop the execution state off the stack */
+ add $(sizeexecutionstate),sp
/* jump to new code */
ret