+ /*
+ * SIMD
+ */
+#if defined(TARGET_X86) || defined(TARGET_AMD64)
+ case OP_XZERO: {
+ values [ins->dreg] = LLVMConstNull (type_to_llvm_type (ctx, &ins->klass->byval_arg));
+ break;
+ }
+ case OP_LOADX_MEMBASE: {
+ LLVMTypeRef t = type_to_llvm_type (ctx, &ins->klass->byval_arg);
+ LLVMValueRef src;
+
+ src = convert (ctx, LLVMBuildAdd (builder, convert (ctx, values [ins->inst_basereg], IntPtrType ()), LLVMConstInt (IntPtrType (), ins->inst_offset, FALSE), ""), LLVMPointerType (t, 0));
+ values [ins->dreg] = LLVMBuildLoad (builder, src, "");
+ break;
+ }
+ case OP_ADDPD:
+ case OP_ADDPS:
+ case OP_PADDB:
+ case OP_PADDW:
+ case OP_PADDD:
+ case OP_PADDQ:
+ values [ins->dreg] = LLVMBuildAdd (builder, lhs, rhs, "");
+ break;
+ case OP_SUBPD:
+ case OP_SUBPS:
+ case OP_PSUBB:
+ case OP_PSUBW:
+ case OP_PSUBD:
+ case OP_PSUBQ:
+ values [ins->dreg] = LLVMBuildSub (builder, lhs, rhs, "");
+ break;
+ case OP_MULPD:
+ case OP_MULPS:
+ values [ins->dreg] = LLVMBuildMul (builder, lhs, rhs, "");
+ break;
+ case OP_DIVPD:
+ case OP_DIVPS:
+ values [ins->dreg] = LLVMBuildFDiv (builder, lhs, rhs, "");
+ break;
+ case OP_PAND:
+ values [ins->dreg] = LLVMBuildAnd (builder, lhs, rhs, "");
+ break;
+ case OP_POR:
+ values [ins->dreg] = LLVMBuildOr (builder, lhs, rhs, "");
+ break;
+ case OP_PXOR:
+ values [ins->dreg] = LLVMBuildXor (builder, lhs, rhs, "");
+ break;
+ case OP_ANDPS:
+ case OP_ANDNPS:
+ case OP_ORPS:
+ case OP_XORPS:
+ case OP_ANDPD:
+ case OP_ANDNPD:
+ case OP_ORPD:
+ case OP_XORPD: {
+ LLVMTypeRef t, rt;
+ LLVMValueRef v;
+
+ switch (ins->opcode) {
+ case OP_ANDPS:
+ case OP_ANDNPS:
+ case OP_ORPS:
+ case OP_XORPS:
+ t = LLVMVectorType (LLVMInt32Type (), 4);
+ rt = LLVMVectorType (LLVMFloatType (), 4);
+ break;
+ case OP_ANDPD:
+ case OP_ANDNPD:
+ case OP_ORPD:
+ case OP_XORPD:
+ t = LLVMVectorType (LLVMInt64Type (), 2);
+ rt = LLVMVectorType (LLVMDoubleType (), 2);
+ break;
+ default:
+ t = LLVMInt32Type ();
+ rt = LLVMInt32Type ();
+ g_assert_not_reached ();
+ }
+
+ lhs = LLVMBuildBitCast (builder, lhs, t, "");
+ rhs = LLVMBuildBitCast (builder, rhs, t, "");
+ switch (ins->opcode) {
+ case OP_ANDPS:
+ case OP_ANDPD:
+ v = LLVMBuildAnd (builder, lhs, rhs, "");
+ break;
+ case OP_ORPS:
+ case OP_ORPD:
+ v = LLVMBuildOr (builder, lhs, rhs, "");
+ break;
+ case OP_XORPS:
+ case OP_XORPD:
+ v = LLVMBuildXor (builder, lhs, rhs, "");
+ break;
+ case OP_ANDNPS:
+ case OP_ANDNPD:
+ v = LLVMBuildAnd (builder, lhs, LLVMBuildNot (builder, rhs, ""), "");
+ break;
+ }
+ values [ins->dreg] = LLVMBuildBitCast (builder, v, rt, "");
+ break;
+ }
+ case OP_MINPD:
+ case OP_MINPS:
+ case OP_MAXPD:
+ case OP_MAXPS:
+ case OP_PMIND_UN:
+ case OP_PMINW_UN:
+ case OP_PMINB_UN:
+ case OP_PMAXD_UN:
+ case OP_PMAXW_UN:
+ case OP_PMAXB_UN: {
+ LLVMValueRef args [2];
+
+ args [0] = lhs;
+ args [1] = rhs;
+
+ values [ins->dreg] = LLVMBuildCall (builder, LLVMGetNamedFunction (module, simd_op_to_intrins (ins->opcode)), args, 2, dname);
+ break;
+ }
+ case OP_EXTRACT_R8:
+ case OP_EXTRACT_I8:
+ case OP_EXTRACT_I4:
+ case OP_EXTRACT_I2:
+ case OP_EXTRACT_U2:
+ case OP_EXTRACT_I1:
+ case OP_EXTRACT_U1: {
+ LLVMTypeRef t;
+
+ switch (ins->opcode) {
+ case OP_EXTRACT_R8:
+ t = LLVMVectorType (LLVMDoubleType (), 2);
+ break;
+ case OP_EXTRACT_I8:
+ t = LLVMVectorType (LLVMInt64Type (), 2);
+ break;
+ case OP_EXTRACT_I4:
+ t = LLVMVectorType (LLVMInt32Type (), 4);
+ break;
+ case OP_EXTRACT_I2:
+ case OP_EXTRACT_U2:
+ t = LLVMVectorType (LLVMInt16Type (), 8);
+ break;
+ case OP_EXTRACT_I1:
+ case OP_EXTRACT_U1:
+ t = LLVMVectorType (LLVMInt8Type (), 16);
+ break;
+ default:
+ t = LLVMInt32Type ();
+ g_assert_not_reached ();
+ }
+
+ lhs = LLVMBuildBitCast (builder, lhs, t, "");
+ values [ins->dreg] = LLVMBuildExtractElement (builder, lhs, LLVMConstInt (LLVMInt32Type (), ins->inst_c0, FALSE), "");
+ break;
+ }
+#endif
+
+ case OP_DUMMY_USE:
+ break;
+
+ /*
+ * EXCEPTION HANDLING
+ */
+ case OP_IMPLICIT_EXCEPTION:
+ /* This marks a place where an implicit exception can happen */
+ if (bb->region != -1)
+ LLVM_FAILURE (ctx, "implicit-exception");
+ break;
+ case OP_THROW: {
+ MonoMethodSignature *throw_sig;
+ LLVMValueRef callee, arg;
+
+ if (!ctx->lmodule->throw) {
+ throw_sig = mono_metadata_signature_alloc (mono_defaults.corlib, 1);
+ throw_sig->ret = &mono_defaults.void_class->byval_arg;
+ throw_sig->params [0] = &mono_defaults.object_class->byval_arg;
+ if (cfg->compile_aot) {
+ callee = get_plt_entry (ctx, sig_to_llvm_sig (ctx, throw_sig, NULL), MONO_PATCH_INFO_INTERNAL_METHOD, "mono_arch_throw_exception");
+ } else {
+ callee = LLVMAddFunction (module, "mono_arch_throw_exception", sig_to_llvm_sig (ctx, throw_sig, NULL));
+
+#ifdef TARGET_X86
+ /*
+ * LLVM doesn't push the exception argument, so we need a different
+ * trampoline.
+ */
+ LLVMAddGlobalMapping (ee, callee, resolve_patch (cfg, MONO_PATCH_INFO_INTERNAL_METHOD, "mono_arch_llvm_throw_exception"));
+#else
+ LLVMAddGlobalMapping (ee, callee, resolve_patch (cfg, MONO_PATCH_INFO_INTERNAL_METHOD, "mono_arch_throw_exception"));
+#endif
+ }
+
+ mono_memory_barrier ();
+ ctx->lmodule->throw = callee;
+ }
+ arg = convert (ctx, values [ins->sreg1], type_to_llvm_type (ctx, &mono_defaults.object_class->byval_arg));
+ emit_call (ctx, bb, &builder, ctx->lmodule->throw, &arg, 1);
+ break;
+ }
+ case OP_CALL_HANDLER: {
+ /*
+ * We don't 'call' handlers, but instead simply branch to them.
+ * The code generated by ENDFINALLY will branch back to us.
+ */
+ LLVMBasicBlockRef finally_bb, noex_bb;
+ GSList *bb_list;
+
+ finally_bb = get_bb (ctx, ins->inst_target_bb);
+
+ bb_list = bblocks [ins->inst_target_bb->block_num].call_handler_return_bbs;
+
+ /*
+ * Set the indicator variable for the finally clause.
+ */
+ lhs = bblocks [ins->inst_target_bb->block_num].finally_ind;
+ g_assert (lhs);
+ LLVMBuildStore (builder, LLVMConstInt (LLVMInt32Type (), g_slist_length (bb_list) + 1, FALSE), lhs);
+
+ /* Branch to the finally clause */
+ LLVMBuildBr (builder, finally_bb);
+
+ noex_bb = gen_bb (ctx, "CALL_HANDLER_CONT_BB");
+ // FIXME: Use a mempool
+ bblocks [ins->inst_target_bb->block_num].call_handler_return_bbs = g_slist_append (bblocks [ins->inst_target_bb->block_num].call_handler_return_bbs, noex_bb);
+
+ builder = ctx->builder = create_builder (ctx);
+ LLVMPositionBuilderAtEnd (ctx->builder, noex_bb);
+
+ bblocks [bb->block_num].end_bblock = noex_bb;
+ break;
+ }
+ case OP_START_HANDLER: {
+ break;
+ }
+ case OP_ENDFINALLY: {
+ LLVMBasicBlockRef resume_bb;
+ MonoBasicBlock *handler_bb;
+ LLVMValueRef val, switch_ins;
+ GSList *bb_list;
+
+ handler_bb = g_hash_table_lookup (ctx->region_to_handler, GUINT_TO_POINTER (mono_get_block_region_notry (cfg, bb->region)));
+ g_assert (handler_bb);
+ lhs = bblocks [handler_bb->block_num].finally_ind;
+ g_assert (lhs);
+
+ bb_list = bblocks [handler_bb->block_num].call_handler_return_bbs;
+
+ resume_bb = gen_bb (ctx, "ENDFINALLY_RESUME_BB");
+
+ /* Load the finally variable */
+ val = LLVMBuildLoad (builder, lhs, "");
+
+ /* Reset the variable */
+ LLVMBuildStore (builder, LLVMConstInt (LLVMInt32Type (), 0, FALSE), lhs);
+
+ /* Branch to either resume_bb, or to the bblocks in bb_list */
+ switch_ins = LLVMBuildSwitch (builder, val, resume_bb, g_slist_length (bb_list));
+ /*
+ * The other targets are added at the end to handle OP_CALL_HANDLER
+ * opcodes processed later.
+ */
+ bblocks [handler_bb->block_num].endfinally_switch = switch_ins;
+ /*
+ for (i = 0; i < g_slist_length (bb_list); ++i)
+ LLVMAddCase (switch_ins, LLVMConstInt (LLVMInt32Type (), i + 1, FALSE), g_slist_nth (bb_list, i)->data);
+ */
+
+ builder = ctx->builder = create_builder (ctx);
+ LLVMPositionBuilderAtEnd (ctx->builder, resume_bb);
+
+ LLVMBuildCall (builder, LLVMGetNamedFunction (module, "mono_resume_unwind"), NULL, 0, "");
+ LLVMBuildUnreachable (builder);
+ has_terminator = TRUE;
+ break;
+ }