-/* -*- mode: asm; tab-width: 4 -*- */
-/* x86_64/asmpart.S ************************************************************
-* *
-* It contains the Java-C interface functions for x86_64 processors. *
-* *
-* Copyright (c) 1997 A. Krall, R. Grafl, M. Gschwind, M. Probst *
-* *
-* See file COPYRIGHT for information on usage and disclaimer of warranties *
-* *
-* Authors: Andreas Krall EMAIL: cacao@complang.tuwien.ac.at *
-* Reinhard Grafl EMAIL: cacao@complang.tuwien.ac.at *
-* Christian Thalinger EMAIL: cacao@complang.tuwien.ac.at *
-* *
-* Last Change: $Id: asmpart.S 560 2003-11-02 23:30:59Z twisti $ *
-* *
-*******************************************************************************/
+/* src/vm/jit/x86_64/asmpart.S - Java-C interface functions for x86_64
+
+ Copyright (C) 1996-2005, 2006, 2007 R. Grafl, A. Krall, C. Kruegel,
+ C. Oates, R. Obermaisser, M. Platter, M. Probst, S. Ring,
+ E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich,
+ J. Wenninger, Institut f. Computersprachen - TU Wien
+
+ This file is part of CACAO.
+
+ This program is free software; you can redistribute it and/or
+ modify it under the terms of the GNU General Public License as
+ published by the Free Software Foundation; either version 2, or (at
+ your option) any later version.
+
+ This program is distributed in the hope that it will be useful, but
+ WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
+ 02110-1301, USA.
+
+*/
+
+
+#include "config.h"
+
+#include "vm/jit/x86_64/arch.h"
+#include "vm/jit/x86_64/md-abi.h"
+#include "vm/jit/x86_64/md-asm.h"
+
+#include "vm/jit/abi-asm.h"
+#include "vm/jit/methodheader.h"
-#include "offsets.h"
.text
-/********************* exported functions and variables ***********************/
+/* export functions ***********************************************************/
+
+ .globl asm_vm_call_method
+ .globl asm_vm_call_method_int
+ .globl asm_vm_call_method_long
+ .globl asm_vm_call_method_float
+ .globl asm_vm_call_method_double
+ .globl asm_vm_call_method_exception_handler
+ .globl asm_vm_call_method_end
- .globl asm_calljavamethod
- .globl asm_calljavafunction
.globl asm_call_jit_compiler
- .globl asm_dumpregistersandcall
+
.globl asm_handle_exception
.globl asm_handle_nat_exception
- .globl asm_builtin_checkcast
- .globl asm_builtin_checkarraycast
- .globl asm_builtin_anewarray
- .globl asm_builtin_newarray_array
- .globl asm_builtin_aastore
- .globl asm_builtin_monitorenter
- .globl asm_builtin_monitorexit
- .globl asm_builtin_f2i
- .globl asm_builtin_f2l
- .globl asm_builtin_d2i
- .globl asm_builtin_d2l
- .globl asm_builtin_arrayinstanceof
- .globl asm_perform_threadswitch
- .globl asm_initialize_thread_stack
- .globl asm_switchstackandcall
- .globl asm_getcallingmethod
- .globl asm_builtin_trace
- .globl asm_builtin_exittrace
-
-/*************************** imported functions *******************************/
-
- .globl jit_compile
- .globl builtin_monitorexit
- .globl builtin_throw_exception
- .globl builtin_trace_exception
- .globl findmethod
-
-/********************* function asm_calljavamethod *****************************
-* *
-* This function calls a Java-method (which possibly needs compilation) *
-* with up to 4 parameters. *
-* *
-* This functions calls the JIT-compiler which eventually translates the *
-* method into machine code. *
-* *
-* An possibly throwed exception will be returned to the caller as function *
-* return value, so the java method cannot return a fucntion value (this *
-* function usually calls 'main' and '<clinit>' which do not return a *
-* function value). *
-* *
-* C-prototype: *
-* javaobject_header *asm_calljavamethod (methodinfo *m, *
-* void *arg1, void *arg2, void *arg3, void *arg4); *
-* *
-*******************************************************************************/
+ .globl asm_abstractmethoderror
-#define MethodPointer -8
-#define FrameSize -12
-#define IsSync -16
-#define IsLeaf -20
-#define IntSave -24
-#define FltSave -28
-#define ExTableSize -32
-#define ExTableStart -32
+ .globl asm_patcher_wrapper
-#define ExEntrySize -32
-#define ExStartPC -8
-#define ExEndPC -16
-#define ExHandlerPC -24
-#define ExCatchType -32
+#if defined(ENABLE_REPLACEMENT)
+ .globl asm_replacement_out
+ .globl asm_replacement_in
+#endif
-call_name:
- .ascii "calljavamethod\0\0"
+ .globl asm_builtin_f2i
+ .globl asm_builtin_f2l
+ .globl asm_builtin_d2i
+ .globl asm_builtin_d2l
- .align 8
- .quad 0 /* catch type all */
- .quad calljava_xhandler /* handler pc */
- .quad calljava_xhandler /* end pc */
- .quad asm_calljavamethod /* start pc */
- .long 1 /* extable size */
- .long 0 /* fltsave */
- .long 0 /* intsave */
- .long 0 /* isleaf */
- .long 0 /* IsSync */
- .long 8 /* frame size */
- .quad 0 /* method pointer (pointer to name) */
-
-asm_calljavamethod:
- sub $8,%rsp /* keep %rsp 16-byte aligned */
- mov %rbp,(%rsp)
-
- mov %rdi,%rax /* move function pointer to %rax */
- /* compilerstub uses this */
-
- mov %rsi,%rdi /* pass remaining parameters */
- mov %rdx,%rsi
- mov %rcx,%rdx
- mov %r8,%rcx
-
- lea asm_call_jit_compiler,%r11
- call *%r11 /* call JIT compiler */
-
-calljava_jit:
-calljava_return:
-calljava_ret:
- xor %rax,%rax
- mov (%rsp),%rbp
- add $8,%rsp /* keep %rsp 16-byte aligned */
- ret
-
-calljava_xhandler:
- mov %rax,%rdi /* pass exception pointer */
- call builtin_throw_exception
- mov (%rsp),%rbp
- add $8,%rsp
- ret
+ .globl asm_compare_and_swap
+ .globl asm_memory_barrier
/********************* function asm_calljavafunction ***************************
* *
*******************************************************************************/
-call_name2:
- .ascii "calljavafunction\0\0"
-
.align 8
- .quad 0 /* catch type all */
- .quad calljava_xhandler2 /* handler pc */
- .quad calljava_xhandler2 /* end pc */
- .quad asm_calljavafunction /* start pc */
- .long 1 /* extable size */
- .long 0 /* fltsave */
- .long 0 /* intsave */
- .long 0 /* isleaf */
- .long 0 /* IsSync */
- .long 8 /* frame size */
- .quad 0 /* method pointer (pointer to name) */
-
-asm_calljavafunction:
- sub $8,%rsp /* keep stack 16-byte aligned */
- mov %rbp,(%rsp)
- mov %rdi,%rax /* move function pointer to %rax */
- /* compilerstub uses this */
-
- mov %rsi,%rdi /* pass remaining parameters */
- mov %rdx,%rsi
- mov %rcx,%rdx
- mov %r8,%rcx
-
- lea asm_call_jit_compiler,%r11
- call *%r11 /* call JIT compiler */
-
-calljava_jit2:
-calljava_return2:
-calljava_ret2:
- mov (%rsp),%rbp
- add $8,%rsp /* free stack space */
- ret
-
-calljava_xhandler2:
- mov %rax,%rdi /* pass exception pointer */
- call builtin_throw_exception
- mov (%rsp),%rbp
- add $8,%rsp
- ret
-
+
+ .quad 0 /* catch type all */
+ .quad 0 /* handler pc */
+ .quad 0 /* end pc */
+ .quad 0 /* start pc */
+ .long 1 /* extable size */
+ .long 0 /* ALIGNMENT PADDING */
+ .quad 0 /* line number table start */
+ .quad 0 /* line number table size */
+ .long 0 /* ALIGNMENT PADDING */
+ .long 0 /* fltsave */
+ .long 0 /* intsave */
+ .long 0 /* isleaf */
+ .long 0 /* IsSync */
+ .long 0 /* frame size */
+ .quad 0 /* codeinfo pointer */
+
+asm_vm_call_method:
+asm_vm_call_method_int:
+asm_vm_call_method_long:
+asm_vm_call_method_float:
+asm_vm_call_method_double:
+ sub $(7*8),sp /* keep stack 16-byte aligned */
+ mov %rbx,0*8(sp) /* %rbx is not a callee saved in cacao*/
+ mov s0,1*8(sp)
+ mov s1,2*8(sp)
+ mov s2,3*8(sp)
+ mov s3,4*8(sp)
+ mov s4,5*8(sp)
+
+ mov a0,6*8(sp) /* store method PV */
+
+ mov sp,s0 /* save stack pointer */
+
+ mov a1,t0 /* address of data structure */
+ mov a2,itmp1 /* number of stack arguments */
+
+ mov 0*8(t0),a0
+ mov 1*8(t0),a1
+ mov 2*8(t0),a2
+ mov 3*8(t0),a3
+ mov 4*8(t0),a4
+ mov 5*8(t0),a5
+
+ movq 6*8(t0),fa0
+ movq 7*8(t0),fa1
+ movq 8*8(t0),fa2
+ movq 9*8(t0),fa3
+ movq 10*8(t0),fa4
+ movq 11*8(t0),fa5
+ movq 12*8(t0),fa6
+ movq 13*8(t0),fa7
+
+ cmp $0,itmp1l
+ je L_asm_vm_call_method_stack_copy_done
+
+ mov itmp1,itmp2
+ add $1,itmp2 /* keep stack 16-byte aligned */
+ and $0xfffffffffffffffe,itmp2
+ shl $3,itmp2 /* calculate stack size */
+ sub itmp2,sp /* create stack frame */
+ mov sp,itmp2 /* temporary stack pointer */
+
+L_asm_vm_call_method_stack_copy_loop:
+ mov 14*8(t0),itmp3 /* load argument */
+ mov itmp3,0(itmp2) /* store argument on stack */
+
+ sub $1,itmp1l /* subtract 1 argument */
+ add $8,t0 /* set address of next argument */
+ add $8,itmp2 /* increase SP */
+
+ cmp $0,itmp1l
+ jg L_asm_vm_call_method_stack_copy_loop
+
+L_asm_vm_call_method_stack_copy_done:
+ lea (6*8-256)(s0),mptr /* We subtract 256 to force the next */
+ /* move instruction to have a 32-bit */
+ /* offset. */
+
+ mov (0*8+256)(mptr),itmp3 /* load PV */
+ call *itmp3
+
+ mov s0,sp /* restore SP */
+
+L_asm_vm_call_method_return:
+ mov 0*8(sp),%rbx /* restore callee saved registers */
+ mov 1*8(sp),s0
+ mov 2*8(sp),s1
+ mov 3*8(sp),s2
+ mov 4*8(sp),s3
+ mov 5*8(sp),s4
+ add $(7*8),sp /* free stack space */
+ ret
+
+asm_vm_call_method_exception_handler:
+ mov xptr,a0 /* pass exception pointer */
+ call builtin_throw_exception@PLT
+ jmp L_asm_vm_call_method_return
+
+asm_vm_call_method_end:
+ nop
+
/****************** function asm_call_jit_compiler *****************************
* *
* *
*******************************************************************************/
-
asm_call_jit_compiler:
- sub $8,%rsp /* keep stack 16-byte aligned */
-
- mov %rbx,(%rsp) /* save register */
-
- mov 8(%rsp),%r11 /* get return address */
- mov -1(%r11),%bl /* get function code */
- cmp $0xd2,%bl /* called with `call *REG_ITMP2' (%r10)? */
- jne L_not_static_special
+L_asm_call_jit_compiler: /* required for PIC code */
+ sub $(ARG_CNT+1)*8,sp /* +1: keep stack 16-byte aligned */
- sub $11,%r11 /* calculate address of immediate */
- jmp L_call_jit_compile
-
-L_not_static_special:
- cmp $0xd0,%bl /* called with `call *REG_ITMP1' (%rax) */
- jne L_not_virtual_interface
-
- sub $7,%r11 /* calculate address of offset */
- mov (%r11),%r11d /* get offset (32-bit) */
- add %r10,%r11 /* add base address to get method address */
- jmp L_call_jit_compile
+ SAVE_ARGUMENT_REGISTERS(0)
-L_not_virtual_interface: /* a call from asm_calljavamethod */
- xor %r11,%r11
-
-L_call_jit_compile:
- mov (%rsp),%rbx /* restore register */
-
- sub $(24*8),%rsp /* 8 + 48 + 64 + 64 */
-
- mov %r11,0*8(%rsp) /* save address for method pointer */
-
- mov %rdi,1*8(%rsp) /* save arguments */
- mov %rsi,2*8(%rsp)
- mov %rdx,3*8(%rsp)
- mov %rcx,4*8(%rsp)
- mov %r8,5*8(%rsp)
- mov %r9,6*8(%rsp)
-
- movq %xmm0,7*8(%rsp)
- movq %xmm1,8*8(%rsp)
- movq %xmm2,9*8(%rsp)
- movq %xmm3,10*8(%rsp)
- movq %xmm4,11*8(%rsp)
- movq %xmm5,12*8(%rsp)
- movq %xmm6,13*8(%rsp)
- movq %xmm7,14*8(%rsp)
-
- movq %xmm8,15*8(%rsp) /* we use them as callee saved registers */
- movq %xmm9,16*8(%rsp)
- movq %xmm10,17*8(%rsp)
- movq %xmm11,18*8(%rsp)
- movq %xmm12,19*8(%rsp)
- movq %xmm13,20*8(%rsp)
- movq %xmm14,21*8(%rsp)
- movq %xmm15,22*8(%rsp)
-
- mov %rax,%rdi /* pass method pointer */
- call jit_compile
-
- mov 0*8(%rsp),%r11
-
- mov 1*8(%rsp),%rdi
- mov 2*8(%rsp),%rsi
- mov 3*8(%rsp),%rdx
- mov 4*8(%rsp),%rcx
- mov 5*8(%rsp),%r8
- mov 6*8(%rsp),%r9
-
- movq 7*8(%rsp),%xmm0
- movq 8*8(%rsp),%xmm1
- movq 9*8(%rsp),%xmm2
- movq 10*8(%rsp),%xmm3
- movq 11*8(%rsp),%xmm4
- movq 12*8(%rsp),%xmm5
- movq 13*8(%rsp),%xmm6
- movq 14*8(%rsp),%xmm7
-
- movq 15*8(%rsp),%xmm8
- movq 16*8(%rsp),%xmm9
- movq 17*8(%rsp),%xmm10
- movq 18*8(%rsp),%xmm11
- movq 19*8(%rsp),%xmm12
- movq 20*8(%rsp),%xmm13
- movq 21*8(%rsp),%xmm14
- movq 22*8(%rsp),%xmm15
-
- add $(24*8),%rsp
-
- test %r11,%r11 /* is address == 0 (asm_calljavamethod) */
- je L_call_method
-
- mov %rax,(%r11) /* and now save the new pointer */
+ mov itmp1,a0 /* pass methodinfo pointer */
+ mov mptr,a1 /* pass method pointer */
+ mov sp,a2 /* pass java sp */
+ add $(1+ARG_CNT+1)*8,a2
+ mov (ARG_CNT+1)*8(sp),a3 /* pass ra to java function */
+ call jit_asm_compile@PLT
-L_call_method:
- add $8,%rsp /* keep stack 16-byte aligned */
- jmp *%rax /* ...and now call the new method */
+ RESTORE_ARGUMENT_REGISTERS(0)
+ add $(ARG_CNT+1)*8,sp /* remove stack frame */
+ test v0,v0 /* check for exception */
+ je L_asm_call_jit_compiler_exception
-/****************** function asm_dumpregistersandcall **************************
-* *
-* This funtion saves all callee saved registers and calls the function *
-* which is passed as parameter. *
-* *
-* This function is needed by the garbage collector, which needs to access *
-* all registers which are stored on the stack. Unused registers are *
-* cleared to avoid interferances with the GC. *
-* *
-* void asm_dumpregistersandcall (functionptr f); *
-* *
-*******************************************************************************/
-
-asm_dumpregistersandcall:
- sub $(7*8),%rsp /* allocate stack space */
-
- mov %rbx,0*8(%rsp) /* save all callee saved registers */
- mov %rsp,1*8(%rsp)
- mov %rbp,2*8(%rsp)
- mov %r12,3*8(%rsp)
- mov %r13,4*8(%rsp)
- mov %r14,5*8(%rsp)
- mov %r15,6*8(%rsp)
-
- xor %rax,%rax /* intialize the remaining registers */
- xor %rcx,%rcx
- xor %rdx,%rdx
- xor %rsi,%rsi
- xor %r8,%r8
- xor %r9,%r9
- xor %r10,%r10
- xor %r11,%r11
-
- call *%rdi /* call function */
+ jmp *v0 /* ...and now call the new method */
- mov 0*8(%rsp),%rbx
- mov 1*8(%rsp),%rsp
- mov 2*8(%rsp),%rbp
- mov 3*8(%rsp),%r12
- mov 4*8(%rsp),%r13
- mov 5*8(%rsp),%r14
- mov 6*8(%rsp),%r15
+L_asm_call_jit_compiler_exception:
+ call exceptions_get_and_clear_exception@PLT
+ pop xpc /* delete return address */
+ sub $3,xpc /* faulting address is ra - 3 */
+ jmp L_asm_handle_exception
- add $(7*8),%rsp
- ret
-
-/********************* function asm_handle_exception ***************************
+/* asm_handle_exception ********************************************************
* *
* This function handles an exception. It does not use the usual calling *
* conventions. The exception pointer is passed in REG_ITMP1 and the *
* the local exception table for a handler. If no one is found, it unwinds *
* stacks and continues searching the callers. *
* *
-* void asm_handle_exception (exceptionptr, exceptionpc); *
-* *
*******************************************************************************/
asm_handle_nat_exception:
- add $8,%rsp /* clear return address of native stub */
+ add $8,sp /* clear return address of native stub*/
asm_handle_exception:
- sub $(4*8),%rsp
- mov %rax,0*8(%rsp) /* save exception pointer */
- mov %r10,1*8(%rsp) /* save exception pc */
-
- mov %r10,%rdi /* exception pc */
- call findmethod
- mov %rax,%r11
- mov %rax,2*8(%rsp) /* save data segment pointer */
-
- mov 0*8(%rsp),%rax /* restore exception pointer */
- mov 1*8(%rsp),%r10 /* restore exception pc */
-
-ex_stack_loop:
- mov %rax,%rdi /* exception pointer */
- mov MethodPointer(%r11),%rsi /* method pointer */
- mov %r10,%rdx /* exception pc */
- mov $1,%rcx /* set no unwind flag */
- call builtin_trace_exception
-
- mov 2*8(%rsp),%r11 /* %r11 = data segment pointer */
- mov ExTableSize(%r11),%rcx /* %rcx = exception table size */
- test %rcx,%rcx /* if empty table skip */
- je empty_table
-
- lea ExTableStart(%r11),%rdi /* %rdi = start of exception table */
- mov 0*8(%rsp),%rax /* get xptr */
-
-ex_table_loop:
- mov 1*8(%rsp),%r10 /* get xpc */
-
- mov ExStartPC(%rdi),%rdx /* %rdx = exception start pc */
- cmp %r10,%rdx /* %rdx = (startpc <= xpc) */
- jg ex_table_cont /* if (false) continue */
- mov ExEndPC(%rdi),%rdx /* %rdx = exception end pc */
- cmp %rdx,%r10 /* %rdx = (xpc < endpc) */
- jge ex_table_cont /* if (false) continue */
- mov ExCatchType(%rdi),%rdx /* %rdx = exception catch type */
- test %rdx,%rdx /* NULL catches everything */
- je ex_handle_it
-
- mov offobjvftbl(%rax),%rsi /* %rsi = vftblptr(xptr) */
- mov offobjvftbl(%rdx),%rdx /* %rdx = vftblptr(catchtype) class (not obj) */
- mov offbaseval(%rsi),%esi /* %esi = baseval(xptr) */
- mov offbaseval(%rdx),%r10d /* %r10d = baseval(catchtype) */
- mov offdiffval(%rdx),%edx /* %edx = diffval(catchtype) */
- sub %r10d,%esi /* %esi = baseval(xptr) - baseval(catchtype) */
- cmp %edx,%esi /* xptr is instanceof catchtype */
- ja ex_table_cont
-
-ex_handle_it:
- mov ExHandlerPC(%rdi),%r10 /* xpc = exception handler pc */
+L_asm_handle_exception: /* required for PIC code */
+ sub $((ARG_CNT+TMP_CNT)*8),sp /* create maybe-leaf stackframe */
- mov 0*8(%rsp),%rax /* restore exception pointer */
- add $(4*8),%rsp /* free stack frame */
+ SAVE_ARGUMENT_REGISTERS(0) /* we save arg and temp registers in */
+ SAVE_TEMPORARY_REGISTERS(ARG_CNT) /* case this is a leaf method */
- jmp *%r10 /* jump to the handler */
+ mov $((ARG_CNT+TMP_CNT)*8),a3 /* prepare a3 for handle_exception */
+ mov $1,t0 /* set maybe-leaf flag */
-ex_table_cont:
- lea ExEntrySize(%rdi),%rdi /* next exception table entry */
- dec %rcx /* decrement entry counter */
- test %rcx,%rcx /* if (t0 > 0) next entry */
- jg ex_table_loop
-
-empty_table:
- mov 0*8(%rsp),%rax /* restore exception pointer */
- mov 1*8(%rsp),%r10 /* restore exception pc */
- mov 2*8(%rsp),%r11 /* restore data segment pointer */
- add $(4*8),%rsp
-
- mov %rax,%rcx /* save exception pointer */
-
-ex_already_cleared:
- movl IsSync(%r11),%eax /* %rax = SyncOffset */
- test %rax,%rax /* if zero no monitorexit */
- je no_monitor_exit
-
- add %rsp,%rax
- mov -8(%rax),%rdi
+L_asm_handle_exception_stack_loop:
+ sub $(6*8),sp
+ mov xptr,0*8(sp) /* save exception pointer */
+ mov xpc,1*8(sp) /* save exception pc */
+ add sp,a3 /* calculate Java sp into a3... */
+ add $(6*8),a3
+ mov a3,3*8(sp) /* ...and save it */
+ mov t0,4*8(sp) /* save maybe-leaf flag */
- sub $(4*8),%rsp
- mov %rcx,0*8(%rsp)
- mov %r10,1*8(%rsp)
- mov %r11,2*8(%rsp)
+ mov xpc,a0 /* exception pc */
+ call codegen_get_pv_from_pc@PLT
+ mov v0,2*8(sp) /* save data segment pointer */
- call builtin_monitorexit
+ mov 0*8(sp),a0 /* pass exception pointer */
+ mov 1*8(sp),a1 /* pass exception pc */
+ mov v0,a2 /* pass data segment pointer */
+ mov 3*8(sp),a3 /* pass Java stack pointer */
+ call exceptions_handle_exception@PLT
- mov 0*8(%rsp),%rcx
- mov 1*8(%rsp),%r10
- mov 2*8(%rsp),%r11
- add $(4*8),%rsp
-
-no_monitor_exit:
- movl FrameSize(%r11),%eax /* %eax = frame size */
- add %rax,%rsp /* unwind stack */
- mov %rsp,%rax /* %rax = pointer to save area */
-
- movl IntSave(%r11),%edx /* %edx = saved int register count */
- testl %edx,%edx
- je noint
-
- cmpl $1,%edx
- je int1
- cmpl $2,%edx
- je int2
- cmpl $3,%edx
- je int3
- cmpl $4,%edx
- je int4
- cmpl $5,%edx
- je int5
-
- mov -48(%rax),%rbx
-int5:
- mov -40(%rax),%rbp
+ test v0,v0
+ jz L_asm_handle_exception_not_catched
+
+ mov v0,xpc /* move handlerpc into xpc */
+ mov 0*8(sp),xptr /* restore exception pointer */
+ mov 4*8(sp),t0 /* get maybe-leaf flag */
+ add $(6*8),sp /* free stack frame */
+
+ test t0,t0 /* test for maybe-leaf flag */
+ jz L_asm_handle_exception_no_leaf
+
+ RESTORE_ARGUMENT_REGISTERS(0) /* if this is a leaf method, we have */
+ RESTORE_TEMPORARY_REGISTERS(ARG_CNT)/* to restore arg and temp registers */
+
+ add $((ARG_CNT+TMP_CNT)*8),sp /* remove maybe-leaf stackframe */
+
+L_asm_handle_exception_no_leaf:
+ jmp *xpc /* jump to the handler */
+
+L_asm_handle_exception_not_catched:
+ mov 0*8(sp),xptr /* restore exception pointer */
+ mov 2*8(sp),itmp3 /* restore data segment pointer */
+ mov 4*8(sp),t0 /* get maybe-leaf flag */
+ add $(6*8),sp
+
+ test t0,t0
+ jz L_asm_handle_exception_no_leaf_stack
+
+ add $((ARG_CNT+TMP_CNT)*8),sp /* remove maybe-leaf stackframe */
+ xor t0,t0 /* clear the isleaf flags */
+
+L_asm_handle_exception_no_leaf_stack:
+ mov FrameSize(itmp3),itmp2l /* get frame size */
+ add sp,itmp2 /* pointer to save area */
+
+ mov IntSave(itmp3),a0l /* a0l = saved int register count */
+ test a0l,a0l
+ je noint
+
+ cmp $1,a0l
+ je int1
+ cmp $2,a0l
+ je int2
+ cmp $3,a0l
+ je int3
+ cmp $4,a0l
+ je int4
+
+ mov -5*8(itmp2),s0
int4:
- mov -32(%rax),%r12
+ mov -4*8(itmp2),s1
int3:
- mov -24(%rax),%r13
+ mov -3*8(itmp2),s2
int2:
- mov -16(%rax),%r14
+ mov -2*8(itmp2),s3
int1:
- mov -8(%rax),%r15
+ mov -1*8(itmp2),s4
- shll $3,%edx /* multiply by 8 bytes */
- sub %rdx,%rax
+ shl $3,a0l /* multiply by 8 bytes */
+ sub a0,itmp2
noint:
- mov FltSave(%r11),%edx /* %edx = saved flt register count */
- testl %edx,%edx
- je noflt
-
- cmpl $1,%edx
- je flt1
- cmpl $2,%edx
- je flt2
- cmpl $3,%edx
- je flt3
- cmpl $4,%edx
- je flt4
- cmpl $5,%edx
- je flt5
- cmpl $6,%edx
- je flt7
- cmpl $7,%edx
- je flt7
-
- movq -64(%rax),%xmm8
-flt7:
- movq -56(%rax),%xmm9
-flt6:
- movq -48(%rax),%xmm10
-flt5:
- movq -40(%rax),%xmm11
+#if 0
+ mov FltSave(itmp3),a0l /* a0l = saved flt register count */
+ test a0l,a0l
+ je noflt
+
+ cmpl $1,a0l
+ je flt1
+ cmpl $2,a0l
+ je flt2
+ cmpl $3,a0l
+ je flt3
+ cmpl $4,a0l
+ je flt4
+
+ movq -5*8(itmp2),%xmm11
flt4:
- movq -32(%rax),%xmm12
+ movq -4*8(itmp2),%xmm12
flt3:
- movq -24(%rax),%xmm13
+ movq -3*8(itmp2),%xmm13
flt2:
- movq -16(%rax),%xmm14
+ movq -2*8(itmp2),%xmm14
flt1:
- movq -8(%rax),%xmm15
+ movq -1*8(itmp2),%xmm15
-noflt:
- pop %r10 /* the new xpc is return address */
- sub $3,%r10 /* subtract 3 bytes for call */
+noflt:
+#endif
+ mov FrameSize(itmp3),itmp2l /* get frame size */
+ add itmp2,sp /* unwind stack */
- sub $(2*8),%rsp
- mov %rcx,0*8(%rsp)
- mov %r10,1*8(%rsp)
+ /* exception pointer is still set */
+ pop xpc /* the new xpc is return address */
+ sub $3,xpc /* subtract 3 bytes for call */
- mov %r10,%rdi
- call findmethod /* get the new data segment ptr */
- mov %rax,%r11
-
- mov 0*8(%rsp),%rcx
- mov 1*8(%rsp),%r10
- add $(2*8),%rsp
+ xor a3,a3 /* prepare a3 for handle_exception */
+
+ jmp L_asm_handle_exception_stack_loop
- mov %rcx,%rax /* restore saved exception pointer */
- sub $(4*8),%rsp
-
- mov %rax,0*8(%rsp) /* save exception pointer */
- mov %r10,1*8(%rsp) /* save exception pc */
- mov %r11,2*8(%rsp) /* save data segment pointer */
-
- jmp ex_stack_loop
+/* asm_abstractmethoderror *****************************************************
+ Creates and throws an AbstractMethodError.
-/********************* function asm_builtin_monitorenter ***********************
-* *
-* Does null check and calls monitorenter or throws an exception *
-* *
*******************************************************************************/
-asm_builtin_monitorenter:
- test %rdi,%rdi
- je nb_monitorenter /* if (null) throw exception */
- jmp builtin_monitorenter /* else call builtin_monitorenter */
-
-nb_monitorenter:
- pop %r10 /* delete return address */
- sub $3,%r10 /* faulting address is return adress - 3 */
- mov proto_java_lang_NullPointerException,%rax
- jmp asm_handle_exception
-
+asm_abstractmethoderror:
+ mov sp,a0 /* pass java sp */
+ add $1*8,a0
+ mov 0*8(sp),a1 /* pass exception address */
+ sub $3,a1
+ call exceptions_asm_new_abstractmethoderror@PLT
+ /* exception pointer is return value */
+ pop xpc /* get exception address */
+ sub $3,xpc /* exception address is ra - 3 */
+ jmp L_asm_handle_exception
-/********************* function asm_builtin_monitorexit ************************
-* *
-* Does null check and calls monitorexit or throws an exception *
-* *
-*******************************************************************************/
-asm_builtin_monitorexit:
- test %rdi,%rdi
- je nb_monitorexit /* if (null) throw exception */
- jmp builtin_monitorexit /* else call builtin_monitorenter */
+/* asm_patcher_wrapper *********************************************************
-nb_monitorexit:
- pop %r10 /* delete return address */
- sub $3,%r10 /* faulting address is return adress - 3 */
- mov proto_java_lang_NullPointerException,%rax
- jmp asm_handle_exception
+ XXX
+ Stack layout:
+ 40 return address
+ 32 pointer to virtual java_objectheader
+ 24 machine code (which is patched back later)
+ 16 unresolved class/method/field reference
+ 8 data segment displacement from load instructions
+ 0 pointer to patcher function
+ -8 bp
-/********************* function asm_builtin_x2x ********************************
-* *
-* Wrapper functions for float to int corner cases *
-* *
*******************************************************************************/
-asm_builtin_f2i:
- sub $(14*8),%rsp
-
- mov %rdi,0*8(%rsp)
- mov %rsi,1*8(%rsp)
- mov %rdx,2*8(%rsp)
- mov %rcx,3*8(%rsp)
- mov %r8,4*8(%rsp)
- mov %r9,5*8(%rsp)
-
- movq %xmm0,6*8(%rsp)
- movq %xmm1,7*8(%rsp)
- movq %xmm2,8*8(%rsp)
- movq %xmm3,9*8(%rsp)
- movq %xmm4,10*8(%rsp)
- movq %xmm5,11*8(%rsp)
- movq %xmm6,12*8(%rsp)
- movq %xmm7,13*8(%rsp)
-
- movq %xmm8,%xmm0
- call builtin_f2i
-
- mov 0*8(%rsp),%rdi
- mov 1*8(%rsp),%rsi
- mov 2*8(%rsp),%rdx
- mov 3*8(%rsp),%rcx
- mov 4*8(%rsp),%r8
- mov 5*8(%rsp),%r9
-
- movq 6*8(%rsp),%xmm0
- movq 7*8(%rsp),%xmm1
- movq 8*8(%rsp),%xmm2
- movq 9*8(%rsp),%xmm3
- movq 10*8(%rsp),%xmm4
- movq 11*8(%rsp),%xmm5
- movq 12*8(%rsp),%xmm6
- movq 13*8(%rsp),%xmm7
-
- add $(14*8),%rsp
- ret
+asm_patcher_wrapper:
+ push bp /* save base pointer */
+ mov sp,bp /* move actual sp to bp */
+ sub $(3+ARG_CNT+TMP_CNT)*8,sp
+ and $0xfffffffffffffff0,sp /* align sp to 16-byte (this is for */
+ /* leaf functions) */
-asm_builtin_f2l:
- sub $(14*8),%rsp
-
- mov %rdi,0*8(%rsp)
- mov %rsi,1*8(%rsp)
- mov %rdx,2*8(%rsp)
- mov %rcx,3*8(%rsp)
- mov %r8,4*8(%rsp)
- mov %r9,5*8(%rsp)
-
- movq %xmm0,6*8(%rsp)
- movq %xmm1,7*8(%rsp)
- movq %xmm2,8*8(%rsp)
- movq %xmm3,9*8(%rsp)
- movq %xmm4,10*8(%rsp)
- movq %xmm5,11*8(%rsp)
- movq %xmm6,12*8(%rsp)
- movq %xmm7,13*8(%rsp)
-
- movq %xmm8,%xmm0
- call builtin_f2l
-
- mov 0*8(%rsp),%rdi
- mov 1*8(%rsp),%rsi
- mov 2*8(%rsp),%rdx
- mov 3*8(%rsp),%rcx
- mov 4*8(%rsp),%r8
- mov 5*8(%rsp),%r9
-
- movq 6*8(%rsp),%xmm0
- movq 7*8(%rsp),%xmm1
- movq 8*8(%rsp),%xmm2
- movq 9*8(%rsp),%xmm3
- movq 10*8(%rsp),%xmm4
- movq 11*8(%rsp),%xmm5
- movq 12*8(%rsp),%xmm6
- movq 13*8(%rsp),%xmm7
-
- add $(14*8),%rsp
- ret
+ SAVE_ARGUMENT_REGISTERS(3)
+ SAVE_TEMPORARY_REGISTERS(3+ARG_CNT)
-
-asm_builtin_d2i:
- sub $(14*8),%rsp
-
- mov %rdi,0*8(%rsp)
- mov %rsi,1*8(%rsp)
- mov %rdx,2*8(%rsp)
- mov %rcx,3*8(%rsp)
- mov %r8,4*8(%rsp)
- mov %r9,5*8(%rsp)
-
- movq %xmm0,6*8(%rsp)
- movq %xmm1,7*8(%rsp)
- movq %xmm2,8*8(%rsp)
- movq %xmm3,9*8(%rsp)
- movq %xmm4,10*8(%rsp)
- movq %xmm5,11*8(%rsp)
- movq %xmm6,12*8(%rsp)
- movq %xmm7,13*8(%rsp)
-
- movq %xmm8,%xmm0
- call builtin_d2i
-
- mov 0*8(%rsp),%rdi
- mov 1*8(%rsp),%rsi
- mov 2*8(%rsp),%rdx
- mov 3*8(%rsp),%rcx
- mov 4*8(%rsp),%r8
- mov 5*8(%rsp),%r9
-
- movq 6*8(%rsp),%xmm0
- movq 7*8(%rsp),%xmm1
- movq 8*8(%rsp),%xmm2
- movq 9*8(%rsp),%xmm3
- movq 10*8(%rsp),%xmm4
- movq 11*8(%rsp),%xmm5
- movq 12*8(%rsp),%xmm6
- movq 13*8(%rsp),%xmm7
-
- add $(14*8),%rsp
- ret
+ mov itmp1,0*8(sp) /* save itmp1 and itmp2 */
+ mov itmp2,1*8(sp) /* can be used by some instructions */
+ mov bp,a0 /* pass SP of patcher stub */
+ add $(1*8),a0
+ mov $0,a1 /* pass PV (if NULL, use findmethod) */
+ mov $0,a2 /* pass RA (it's on the stack) */
+ call patcher_wrapper@PLT
+ mov v0,2*8(sp) /* save return value */
-asm_builtin_d2l:
- sub $(14*8),%rsp
-
- mov %rdi,0*8(%rsp)
- mov %rsi,1*8(%rsp)
- mov %rdx,2*8(%rsp)
- mov %rcx,3*8(%rsp)
- mov %r8,4*8(%rsp)
- mov %r9,5*8(%rsp)
-
- movq %xmm0,6*8(%rsp)
- movq %xmm1,7*8(%rsp)
- movq %xmm2,8*8(%rsp)
- movq %xmm3,9*8(%rsp)
- movq %xmm4,10*8(%rsp)
- movq %xmm5,11*8(%rsp)
- movq %xmm6,12*8(%rsp)
- movq %xmm7,13*8(%rsp)
-
- movq %xmm8,%xmm0
- call builtin_d2l
-
- mov 0*8(%rsp),%rdi
- mov 1*8(%rsp),%rsi
- mov 2*8(%rsp),%rdx
- mov 3*8(%rsp),%rcx
- mov 4*8(%rsp),%r8
- mov 5*8(%rsp),%r9
-
- movq 6*8(%rsp),%xmm0
- movq 7*8(%rsp),%xmm1
- movq 8*8(%rsp),%xmm2
- movq 9*8(%rsp),%xmm3
- movq 10*8(%rsp),%xmm4
- movq 11*8(%rsp),%xmm5
- movq 12*8(%rsp),%xmm6
- movq 13*8(%rsp),%xmm7
-
- add $(14*8),%rsp
- ret
+ RESTORE_ARGUMENT_REGISTERS(3)
+ RESTORE_TEMPORARY_REGISTERS(3+ARG_CNT)
-
-/*********************** function new_builtin_checkcast ************************
-* *
-* Does the cast check and eventually throws an exception *
-* *
-*******************************************************************************/
+ mov 0*8(sp),itmp1 /* restore itmp1 and itmp2 */
+ mov 1*8(sp),itmp2 /* can be used by some instructions */
+ mov 2*8(sp),itmp3 /* restore return value */
-asm_builtin_checkcast:
- xor %rax,%rax
- mov %rax,(%rax)
- ret
+ mov bp,sp /* restore original sp */
+ pop bp /* restore bp */
+ add $(5*8),sp /* remove patcher stackframe, keep RA */
-
-/******************* function asm_builtin_checkarraycast ***********************
-* *
-* Does the cast check and eventually throws an exception *
-* *
-*******************************************************************************/
+ test itmp3,itmp3 /* exception thrown? */
+ jne L_asm_patcher_wrapper_exception
+ ret /* call new patched code */
-asm_builtin_checkarraycast:
- sub $24,%rsp /* keep stack 16-byte aligned */
- mov %rdi,(%rsp) /* save object pointer */
- call builtin_checkarraycast /* builtin_checkarraycast */
- test %rax,%rax /* if (false) throw exception */
- je nb_carray_throw
- mov (%rsp),%rax /* return object pointer */
- add $24,%rsp /* free stack space */
- ret
-
-nb_carray_throw:
- add $24,%rsp
- pop %r10 /* delete return address */
- sub $3,%r10 /* faulting address is return adress - 3 */
- mov proto_java_lang_ClassCastException,%rax
- jmp asm_handle_exception
+L_asm_patcher_wrapper_exception:
+ mov itmp3,xptr /* get exception */
+ pop xpc /* get and remove return address */
+ jmp L_asm_handle_exception
-
-/******************* function asm_builtin_aastore ******************************
-* *
-* Does the cast check and eventually throws an exception *
-* *
-*******************************************************************************/
+#if defined(ENABLE_REPLACEMENT)
-asm_builtin_aastore:
- sub $24,%rsp /* allocate stack space */
- test %rdi,%rdi /* if null pointer throw exception */
- je nb_aastore_null
+/* asm_replacement_out *********************************************************
- movl offarraysize(%rdi),%eax /* load size */
- cmpl %eax,%esi /* do bound check */
- ja nb_aastore_bound /* if out of bounds throw exception */
+ This code is jumped to from the replacement-out stubs that are executed
+ when a thread reaches an activated replacement point.
- shl $3,%rsi /* index * 8 */
- mov %rdi,%r10
- add %rsi,%r10 /* add index * 8 to arrayref */
-
- mov %r10,(%rsp) /* save store position */
- mov %rdx,8(%rsp) /* save object */
-
- mov %rdx,%rsi /* object is second argument */
- call builtin_canstore /* builtin_canstore(arrayref,object) */
- test %rax,%rax /* if (false) throw exception */
- je nb_aastore_throw
-
- mov (%rsp),%r10 /* restore store position */
- mov 8(%rsp),%rdx /* restore object */
- mov %rdx,offobjarrdata(%r10)/* store objectptr in array */
- add $24,%rsp /* free stack space */
- ret
-
-nb_aastore_null:
- add $24,%rsp
- pop %r10 /* delete return address */
- sub $3,%r10 /* faulting address is return adress - 3 */
-
- mov proto_java_lang_NullPointerException,%rax
- jmp asm_handle_exception
+ The purpose of asm_replacement_out is to read out the parts of the
+ execution state that cannot be accessed from C code, store this state,
+ and then call the C function replace_me.
-nb_aastore_bound:
- add $24,%rsp
- pop %r10 /* delete return address */
- sub $3,%r10 /* faulting address is return adress - 3 */
-
- mov proto_java_lang_ArrayIndexOutOfBoundsException,%rax
- jmp asm_handle_exception
-
-nb_aastore_throw:
- add $24,%rsp
- pop %r10 /* delete return address */
- sub $3,%r10 /* faulting address is return adress - 3 */
-
- mov proto_java_lang_ArrayStoreException,%rax
- jmp asm_handle_exception
+ Stack layout:
+ 8 start of stack inside method to replace
+ 0 rplpoint * info on the replacement point that was reached
-
-/******************* function asm_initialize_thread_stack **********************
-* *
-* initialized a thread stack *
-* (to)->restorePoint = asm_initialize_thread_stack((u1*)(func), (to)->stackEnd)*
-* *
*******************************************************************************/
-asm_initialize_thread_stack:
- sub $(7*8),%rsi
-
- xor %r10,%r10
- mov %r10,0*8(%rsi)
- mov %r10,1*8(%rsi)
- mov %r10,2*8(%rsi)
- mov %r10,3*8(%rsi)
- mov %r10,4*8(%rsi)
- mov %r10,5*8(%rsi)
-
- mov %rdi,6*8(%rsi) /* save (u1*) (func) */
- mov %rsi,%rax /* return restorepoint in %rax */
- ret
+/* some room to accomodate changes of the stack frame size during replacement */
+ /* XXX we should find a cleaner solution here */
+#define REPLACEMENT_ROOM 512
+
+asm_replacement_out:
+ /* create stack frame */
+ sub $(sizeexecutionstate + REPLACEMENT_ROOM),sp
+
+ /* save registers in execution state */
+ mov %rax,(RAX*8+offes_intregs)(sp)
+ mov %rbx,(RBX*8+offes_intregs)(sp)
+ mov %rcx,(RCX*8+offes_intregs)(sp)
+ mov %rdx,(RDX*8+offes_intregs)(sp)
+ mov %rsi,(RSI*8+offes_intregs)(sp)
+ mov %rdi,(RDI*8+offes_intregs)(sp)
+ mov %rbp,(RBP*8+offes_intregs)(sp)
+ movq $0 ,(RSP*8+offes_intregs)(sp) /* not used */
+ mov %r8 ,(R8 *8+offes_intregs)(sp)
+ mov %r9 ,(R9 *8+offes_intregs)(sp)
+ mov %r10,(R10*8+offes_intregs)(sp)
+ mov %r11,(R11*8+offes_intregs)(sp)
+ mov %r12,(R12*8+offes_intregs)(sp)
+ mov %r13,(R13*8+offes_intregs)(sp)
+ mov %r14,(R14*8+offes_intregs)(sp)
+ mov %r15,(R15*8+offes_intregs)(sp)
+
+ movq %xmm0 ,(XMM0 *8+offes_fltregs)(sp)
+ movq %xmm1 ,(XMM1 *8+offes_fltregs)(sp)
+ movq %xmm2 ,(XMM2 *8+offes_fltregs)(sp)
+ movq %xmm3 ,(XMM3 *8+offes_fltregs)(sp)
+ movq %xmm4 ,(XMM4 *8+offes_fltregs)(sp)
+ movq %xmm5 ,(XMM5 *8+offes_fltregs)(sp)
+ movq %xmm6 ,(XMM6 *8+offes_fltregs)(sp)
+ movq %xmm7 ,(XMM7 *8+offes_fltregs)(sp)
+ movq %xmm8 ,(XMM8 *8+offes_fltregs)(sp)
+ movq %xmm9 ,(XMM9 *8+offes_fltregs)(sp)
+ movq %xmm10,(XMM10*8+offes_fltregs)(sp)
+ movq %xmm11,(XMM11*8+offes_fltregs)(sp)
+ movq %xmm12,(XMM12*8+offes_fltregs)(sp)
+ movq %xmm13,(XMM13*8+offes_fltregs)(sp)
+ movq %xmm14,(XMM14*8+offes_fltregs)(sp)
+ movq %xmm15,(XMM15*8+offes_fltregs)(sp)
+
+ /* calculate sp of method */
+ mov sp,itmp1
+ add $(sizeexecutionstate + REPLACEMENT_ROOM + 8),itmp1
+ mov itmp1,(offes_sp)(sp)
+
+ /* pv must be looked up via AVL tree */
+ movq $0,(offes_pv)(sp)
+
+ /* call replace_me */
+ mov -8(itmp1),a0 /* rplpoint * */
+ mov sp,a1 /* arg1: execution state */
+ call replace_me@PLT /* call C function replace_me */
+ call abort@PLT /* NEVER REACHED */
+
+/* asm_replacement_in **********************************************************
+
+ This code writes the given execution state and jumps to the replacement
+ code.
+
+ This function never returns!
+
+ C prototype:
+ void asm_replacement_in(executionstate *es, replace_safestack_t *st);
+*******************************************************************************/
-/******************* function asm_perform_threadswitch *************************
-* *
-* void asm_perform_threadswitch (u1 **from, u1 **to, u1 **stackTop); *
+asm_replacement_in:
+ /* get arguments */
+ mov a1,s1 /* replace_safestack_t *st */
+ mov a0,%rbp /* executionstate *es == safe stack */
+
+ /* switch to the safe stack */
+ mov %rbp,sp
+
+ /* call replace_build_execution_state(st) */
+ mov s1,a0
+ call replace_build_execution_state@PLT
+
+ /* set new sp */
+ mov (offes_sp)(%rbp),sp
+
+ /* push address of new code */
+ pushq (offes_pc)(%rbp)
+
+ /* allocate an executionstate_t on the stack */
+ sub $(sizeexecutionstate),sp
+
+ /* call replace_free_safestack(st,& of allocated executionstate_t) */
+ mov sp,a1
+ mov s1,a0
+ call replace_free_safestack@PLT
+
+ /* copy registers from execution state */
+ movq (XMM0 *8+offes_fltregs)(sp),%xmm0
+ movq (XMM1 *8+offes_fltregs)(sp),%xmm1
+ movq (XMM2 *8+offes_fltregs)(sp),%xmm2
+ movq (XMM3 *8+offes_fltregs)(sp),%xmm3
+ movq (XMM4 *8+offes_fltregs)(sp),%xmm4
+ movq (XMM5 *8+offes_fltregs)(sp),%xmm5
+ movq (XMM6 *8+offes_fltregs)(sp),%xmm6
+ movq (XMM7 *8+offes_fltregs)(sp),%xmm7
+ movq (XMM8 *8+offes_fltregs)(sp),%xmm8
+ movq (XMM9 *8+offes_fltregs)(sp),%xmm9
+ movq (XMM10*8+offes_fltregs)(sp),%xmm10
+ movq (XMM11*8+offes_fltregs)(sp),%xmm11
+ movq (XMM12*8+offes_fltregs)(sp),%xmm12
+ movq (XMM13*8+offes_fltregs)(sp),%xmm13
+ movq (XMM14*8+offes_fltregs)(sp),%xmm14
+ movq (XMM15*8+offes_fltregs)(sp),%xmm15
+
+ mov (RAX*8+offes_intregs)(sp),%rax
+ mov (RBX*8+offes_intregs)(sp),%rbx
+ mov (RCX*8+offes_intregs)(sp),%rcx
+ mov (RDX*8+offes_intregs)(sp),%rdx
+ mov (RSI*8+offes_intregs)(sp),%rsi
+ mov (RDI*8+offes_intregs)(sp),%rdi
+ mov (RBP*8+offes_intregs)(sp),%rbp
+ mov (R8 *8+offes_intregs)(sp),%r8
+ mov (R9 *8+offes_intregs)(sp),%r9
+ mov (R10*8+offes_intregs)(sp),%r10
+ mov (R11*8+offes_intregs)(sp),%r11
+ mov (R12*8+offes_intregs)(sp),%r12
+ mov (R13*8+offes_intregs)(sp),%r13
+ mov (R14*8+offes_intregs)(sp),%r14
+ mov (R15*8+offes_intregs)(sp),%r15
+
+ /* pop the execution state off the stack */
+ add $(sizeexecutionstate),sp
+
+ /* jump to new code */
+ ret
+
+#endif /* defined(ENABLE_REPLACEMENT) */
+
+
+/* asm_builtin_x2x *************************************************************
* *
-* performs a threadswitch *
+* Wrapper functions for float to int corner cases *
* *
*******************************************************************************/
-asm_perform_threadswitch:
- sub $(7*8),%rsp /* allocate stack frame */
+asm_builtin_f2i:
+ sub $(ARG_CNT*8),sp
+
+ SAVE_ARGUMENT_REGISTERS(0)
+
+ movq ftmp1,fa0
+ call builtin_f2i@PLT
+
+ RESTORE_ARGUMENT_REGISTERS(0)
+
+ add $(ARG_CNT*8),sp
+ ret
- mov %rbx,0*8(%rsp)
- mov %rbp,1*8(%rsp)
- mov %r12,2*8(%rsp)
- mov %r13,3*8(%rsp)
- mov %r14,4*8(%rsp)
- mov %r15,5*8(%rsp)
- mov 7*8(%rsp),%rax /* save current return address */
- mov %rax,6*8(%rsp)
+asm_builtin_f2l:
+ sub $(ARG_CNT*8),sp
+
+ SAVE_ARGUMENT_REGISTERS(0)
+
+ movq ftmp1,fa0
+ call builtin_f2l@PLT
+
+ RESTORE_ARGUMENT_REGISTERS(0)
+
+ add $(ARG_CNT*8),sp
+ ret
- mov %rsp,(%rdi) /* first argument **from */
- mov %rsp,(%rdx) /* third argument **stackTop */
- mov (%rsi),%rsp /* load new stack pointer */
+asm_builtin_d2i:
+ sub $(ARG_CNT*8),sp
+
+ SAVE_ARGUMENT_REGISTERS(0)
+
+ movq ftmp1,fa0
+ call builtin_d2i@PLT
+
+ RESTORE_ARGUMENT_REGISTERS(0)
+
+ add $(ARG_CNT*8),sp
+ ret
- mov 0*8(%rsp),%rbx
- mov 1*8(%rsp),%rbp
- mov 2*8(%rsp),%r12
- mov 3*8(%rsp),%r13
- mov 4*8(%rsp),%r14
- mov 5*8(%rsp),%r15
- mov 6*8(%rsp),%rax /* restore return address */
- add $(7*8),%rsp /* free stack frame */
- mov %rax,(%rsp)
- ret
-
+asm_builtin_d2l:
+ sub $(ARG_CNT*8),sp
+
+ SAVE_ARGUMENT_REGISTERS(0)
+
+ movq ftmp1,fa0
+ call builtin_d2l@PLT
+
+ RESTORE_ARGUMENT_REGISTERS(0)
+
+ add $(ARG_CNT*8),sp
+ ret
-/********************* function asm_switchstackandcall *************************
-* *
-* int asm_switchstackandcall (void *stack, void *func, void **stacktopsave, *
-* void *p); *
-* *
-* Switches to a new stack, calls a function and switches back. *
-* a0 (%rdi) new stack pointer *
-* a1 (%rsi) function pointer *
-* a2 (%rdx) pointer to variable where stack top should be stored *
-* a3 (%rcx) pointer to user data, is passed to the function *
-* *
-*******************************************************************************/
-asm_switchstackandcall:
- sub $8,%rsp /* keep stack 16-byte aligned */
- sub $16,%rdi /* allocate new stack */
+/* asm_compare_and_swap ********************************************************
- mov 8(%rsp),%rax /* save return address on new stack */
- mov %rax,(%rdi)
- mov %rsp,8(%rdi) /* save old stack pointer on new stack */
- mov %rsp,(%rdx) /* save old stack pointer to variable */
+ Does an atomic compare and swap. Required for the lock
+ implementation.
- mov %rdi,%rsp /* switch to new stack */
+*******************************************************************************/
- mov %rcx,%rdi /* pass pointer */
- call *%rsi /* and call function */
+asm_compare_and_swap:
+ mov a1,v0 /* v0 is %rax */
+ lock cmpxchg a2,(a0)
+ ret
- mov (%rsp),%r10 /* load return address */
- mov 8(%rsp),%rsp /* switch to old stack */
- add $8,%rsp /* free stack space */
- mov %r10,(%rsp) /* write return adress */
- ret
-
-/********************* function asm_getcallingmethod ***************************
-* *
-* classinfo *asm_getcallingmethod (); *
-* *
-* goes back stack frames to get the calling method *
-* *
-* t2 .. sp *
-* t3 .. ra *
-* t4 .. pv *
-* *
-* Stack: *
-* java function *
-* native stub *
-* Java_java_lang_System_getCallerClass *
-* *
+/* asm_memory_barrier **********************************************************
+
+ A memory barrier for the Java Memory Model.
+
*******************************************************************************/
-asm_getcallingmethod:
- mov %rbp,%rax /* return address of native function */
- add $(2*8),%rax /* %rsp, return address */
- add $(7*8),%rax /* native stub stackframe */
- mov (%rax),%rdi /* return address to java function */
- call findmethod
- mov MethodPointer(%rax),%rax
- ret
+asm_memory_barrier:
+ mfence
+ ret
-/*********************** function asm_builtin_trace ****************************
-* *
-* Intended to be called from the native stub. Saves all argument registers *
-* and calls builtin_trace_args. *
-* *
-*******************************************************************************/
+/* disable exec-stacks ********************************************************/
+
+#if defined(__linux__) && defined(__ELF__)
+ .section .note.GNU-stack,"",%progbits
+#endif
-asm_builtin_trace:
- sub $(15*8),%rsp /* 14 + 1 */
-
- mov %rdi,0*8(%rsp) /* save arguments */
- mov %rsi,1*8(%rsp)
- mov %rdx,2*8(%rsp)
- mov %rcx,3*8(%rsp)
- mov %r8,4*8(%rsp)
- mov %r9,5*8(%rsp)
-
- movq %xmm0,6*8(%rsp)
- movq %xmm1,7*8(%rsp)
- movq %xmm2,8*8(%rsp)
- movq %xmm3,9*8(%rsp)
- movq %xmm4,10*8(%rsp)
- movq %xmm5,11*8(%rsp)
- movq %xmm6,12*8(%rsp)
- movq %xmm7,13*8(%rsp)
-
- call builtin_trace_args
-
- mov 0*8(%rsp),%rdi
- mov 1*8(%rsp),%rsi
- mov 2*8(%rsp),%rdx
- mov 3*8(%rsp),%rcx
- mov 4*8(%rsp),%r8
- mov 5*8(%rsp),%r9
-
- movq 6*8(%rsp),%xmm0
- movq 7*8(%rsp),%xmm1
- movq 8*8(%rsp),%xmm2
- movq 9*8(%rsp),%xmm3
- movq 10*8(%rsp),%xmm4
- movq 11*8(%rsp),%xmm5
- movq 12*8(%rsp),%xmm6
- movq 13*8(%rsp),%xmm7
-
- add $(15*8),%rsp
- ret
-
-
-/********************* function asm_builtin_exittrace **************************
-* *
-* Intended to be called from the native stub. Saves return value and calls *
-* builtin_displaymethodstop. *
-* *
-*******************************************************************************/
-asm_builtin_exittrace:
- sub $(3*8),%rsp
- mov %rax,0*8(%rsp)
- movq %xmm0,1*8(%rsp)
- movq %xmm0,%xmm1
- call builtin_displaymethodstop
- mov 0*8(%rsp),%rax
- movq 1*8(%rsp),%xmm0
- add $(3*8),%rsp
- ret
+/*
+ * These are local overrides for various environment variables in Emacs.
+ * Please do not remove this and leave it at the end of the file, where
+ * Emacs will automagically detect them.
+ * ---------------------------------------------------------------------
+ * Local variables:
+ * mode: asm
+ * indent-tabs-mode: t
+ * c-basic-offset: 4
+ * tab-width: 4
+ * End:
+ * vim:noexpandtab:sw=4:ts=4:
+ */