+ ia64_alloc (code, local0 + 0, local0 - in0, out0 - local0, nout, 0);
+ ia64_mov_from_br (code, local0 + 1, IA64_RP);
+
+ r_pro = g_malloc0 (_U_dyn_region_info_size (2));
+ r_pro->op_count = 2;
+ r_pro->insn_count = 6;
+ i = 0;
+ _U_dyn_op_save_reg (&r_pro->op[i++], _U_QP_TRUE, /* when=*/ 2,
+ /* reg=*/ UNW_IA64_AR_PFS, /* dst=*/ UNW_IA64_GR + local0 + 0);
+ _U_dyn_op_save_reg (&r_pro->op[i++], _U_QP_TRUE, /* when=*/ 5,
+ /* reg=*/ UNW_IA64_RP, /* dst=*/ UNW_IA64_GR + local0 + 1);
+ g_assert ((unsigned) i <= r_pro->op_count);
+
+ /* Call exception_from_token */
+ ia64_movl (code, out0 + 0, mono_defaults.exception_class->image);
+ ia64_mov (code, out0 + 1, in0 + 0);
+ ia64_movl (code, GP_SCRATCH_REG, MONO_TOKEN_TYPE_DEF);
+ ia64_add (code, out0 + 1, in0 + 0, GP_SCRATCH_REG);
+ ptr = mono_exception_from_token;
+ ia64_movl (code, GP_SCRATCH_REG, ptr);
+ ia64_ld8_inc_imm (code, GP_SCRATCH_REG2, GP_SCRATCH_REG, 8);
+ ia64_mov_to_br (code, IA64_B6, GP_SCRATCH_REG2);
+ ia64_ld8 (code, IA64_GP, GP_SCRATCH_REG);
+ ia64_br_call_reg (code, IA64_B0, IA64_B6);
+ ia64_mov (code, local0 + 3, IA64_R8);
+
+ /* Compute throw ip */
+ ia64_mov (code, local0 + 2, local0 + 1);
+ ia64_sub (code, local0 + 2, local0 + 2, in0 + 1);
+
+ /* Trick the unwind library into using throw_ip as the IP in the caller frame */
+ ia64_mov (code, local0 + 1, local0 + 2);
+
+ /* Set args */
+ ia64_mov (code, out0 + 0, local0 + 3);
+ ia64_mov (code, out0 + 1, IA64_R0);
+
+ /* Call throw_exception */
+ ptr = throw_exception;
+ ia64_movl (code, GP_SCRATCH_REG, ptr);
+ ia64_ld8_inc_imm (code, GP_SCRATCH_REG2, GP_SCRATCH_REG, 8);
+ ia64_mov_to_br (code, IA64_B6, GP_SCRATCH_REG2);
+ ia64_ld8 (code, IA64_GP, GP_SCRATCH_REG);
+ ia64_br_call_reg (code, IA64_B0, IA64_B6);
+
+ ia64_break_i (code, 1002);