inline static MonoCallInst *
mono_emit_call_args (MonoCompile *cfg, MonoMethodSignature *sig,
- MonoInst **args, int calli, int virtual_, int tail, int rgctx, int unbox_trampoline)
+ MonoInst **args, int calli, int virtual_, int tail, int rgctx, int unbox_trampoline, MonoMethod *target)
{
MonoType *sig_ret;
MonoCallInst *call;
tail = FALSE;
if (tail) {
- mini_profiler_emit_instrumentation_call (cfg, mono_profiler_raise_method_leave, FALSE, NULL, NULL);
+ mini_profiler_emit_tail_call (cfg, target);
MONO_INST_NEW_CALL (cfg, call, OP_TAILCALL);
} else
MONO_ADD_INS (cfg->cbb, ins);
}
- call = mono_emit_call_args (cfg, sig, args, TRUE, FALSE, FALSE, rgctx_arg ? TRUE : FALSE, FALSE);
+ call = mono_emit_call_args (cfg, sig, args, TRUE, FALSE, FALSE, rgctx_arg ? TRUE : FALSE, FALSE, NULL);
call->inst.sreg1 = addr->dreg;
need_unbox_trampoline = method->klass == mono_defaults.object_class || mono_class_is_interface (method->klass);
- call = mono_emit_call_args (cfg, sig, args, FALSE, virtual_, tail, rgctx_arg ? TRUE : FALSE, need_unbox_trampoline);
+ call = mono_emit_call_args (cfg, sig, args, FALSE, virtual_, tail, rgctx_arg ? TRUE : FALSE, need_unbox_trampoline, method);
#ifndef DISABLE_REMOTING
if (might_be_remote)
g_assert (sig);
- call = mono_emit_call_args (cfg, sig, args, FALSE, FALSE, FALSE, FALSE, FALSE);
+ call = mono_emit_call_args (cfg, sig, args, FALSE, FALSE, FALSE, FALSE, FALSE, NULL);
call->fptr = func;
MONO_ADD_INS (cfg->cbb, (MonoInst*)call);
/* This happens often in argument checking code, eg. throw new FooException... */
/* Avoid relocations and save some space by calling a helper function specialized to mscorlib */
EMIT_NEW_ICONST (cfg, iargs [0], mono_metadata_token_index (klass->type_token));
- return mono_emit_jit_icall (cfg, mono_helper_newobj_mscorlib, iargs);
+ alloc_ftn = mono_helper_newobj_mscorlib;
} else {
MonoVTable *vtable = mono_class_vtable (cfg->domain, klass);
MonoMethod *managed_alloc = NULL;
- gboolean pass_lw;
if (!vtable) {
mono_cfg_set_exception (cfg, MONO_EXCEPTION_TYPE_LOAD);
EMIT_NEW_ICONST (cfg, iargs [1], size);
return mono_emit_method_call (cfg, managed_alloc, iargs, NULL);
}
- alloc_ftn = mono_class_get_allocation_ftn (vtable, for_box, &pass_lw);
- if (pass_lw) {
- guint32 lw = vtable->klass->instance_size;
- lw = ((lw + (sizeof (gpointer) - 1)) & ~(sizeof (gpointer) - 1)) / sizeof (gpointer);
- EMIT_NEW_ICONST (cfg, iargs [0], lw);
- EMIT_NEW_VTABLECONST (cfg, iargs [1], vtable);
- }
- else {
- EMIT_NEW_VTABLECONST (cfg, iargs [0], vtable);
- }
+ alloc_ftn = ves_icall_object_new_specific;
+ EMIT_NEW_VTABLECONST (cfg, iargs [0], vtable);
}
return mono_emit_jit_icall (cfg, alloc_ftn, iargs);
/* Set target field */
/* Optimize away setting of NULL target */
if (!MONO_INS_IS_PCONST_NULL (target)) {
+ MONO_EMIT_NEW_BIALU_IMM (cfg, OP_COMPARE_IMM, -1, target->dreg, 0);
+ MONO_EMIT_NEW_COND_EXC (cfg, EQ, "NullReferenceException");
MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORE_MEMBASE_REG, obj->dreg, MONO_STRUCT_OFFSET (MonoDelegate, target), target->dreg);
if (cfg->gen_write_barriers) {
dreg = alloc_preg (cfg);
prev_args = cfg->args;
prev_arg_types = cfg->arg_types;
prev_inlined_method = cfg->inlined_method;
- cfg->inlined_method = cmethod;
- cfg->ret_var_set = FALSE;
- cfg->inline_depth ++;
+ prev_ret_var_set = cfg->ret_var_set;
prev_real_offset = cfg->real_offset;
prev_cbb_hash = cfg->cbb_hash;
prev_cil_offset_to_bb = cfg->cil_offset_to_bb;
prev_cbb = cfg->cbb;
prev_current_method = cfg->current_method;
prev_generic_context = cfg->generic_context;
- prev_ret_var_set = cfg->ret_var_set;
prev_disable_inline = cfg->disable_inline;
+ cfg->inlined_method = cmethod;
+ cfg->ret_var_set = FALSE;
+ cfg->inline_depth ++;
+
if (ip && *ip == CEE_CALLVIRT && !(cmethod->flags & METHOD_ATTRIBUTE_STATIC))
virtual_ = TRUE;
cfg->headers_to_free = g_slist_prepend_mempool (cfg->mempool, cfg->headers_to_free, header);
}
+static guint32
+mono_type_to_stloc_coerce (MonoType *type)
+{
+ if (type->byref)
+ return 0;
+
+ type = mini_get_underlying_type (type);
+handle_enum:
+ switch (type->type) {
+ case MONO_TYPE_I1:
+ return OP_ICONV_TO_I1;
+ case MONO_TYPE_U1:
+ return OP_ICONV_TO_U1;
+ case MONO_TYPE_I2:
+ return OP_ICONV_TO_I2;
+ case MONO_TYPE_U2:
+ return OP_ICONV_TO_U2;
+ case MONO_TYPE_I4:
+ case MONO_TYPE_U4:
+ case MONO_TYPE_I:
+ case MONO_TYPE_U:
+ case MONO_TYPE_PTR:
+ case MONO_TYPE_FNPTR:
+ case MONO_TYPE_CLASS:
+ case MONO_TYPE_STRING:
+ case MONO_TYPE_OBJECT:
+ case MONO_TYPE_SZARRAY:
+ case MONO_TYPE_ARRAY:
+ case MONO_TYPE_I8:
+ case MONO_TYPE_U8:
+ case MONO_TYPE_R4:
+ case MONO_TYPE_R8:
+ case MONO_TYPE_TYPEDBYREF:
+ case MONO_TYPE_GENERICINST:
+ return 0;
+ case MONO_TYPE_VALUETYPE:
+ if (type->data.klass->enumtype) {
+ type = mono_class_enum_basetype (type->data.klass);
+ goto handle_enum;
+ }
+ return 0;
+ case MONO_TYPE_VAR:
+ case MONO_TYPE_MVAR: //TODO I believe we don't need to handle gsharedvt as there won't be match and, for example, u1 is not covariant to u32
+ return 0;
+ default:
+ g_error ("unknown type 0x%02x in mono_type_to_stloc_coerce", type->type);
+ }
+ return -1;
+}
+
static void
emit_stloc_ir (MonoCompile *cfg, MonoInst **sp, MonoMethodHeader *header, int n)
{
MonoInst *ins;
+ guint32 coerce_op = mono_type_to_stloc_coerce (header->locals [n]);
+
+ if (coerce_op) {
+ if (cfg->cbb->last_ins == sp [0] && sp [0]->opcode == coerce_op) {
+ if (cfg->verbose_level > 2)
+ printf ("Found existing coercing is enough for stloc\n");
+ } else {
+ MONO_INST_NEW (cfg, ins, coerce_op);
+ ins->dreg = alloc_ireg (cfg);
+ ins->sreg1 = sp [0]->dreg;
+ ins->type = STACK_I4;
+ ins->klass = mono_class_from_mono_type (header->locals [n]);
+ MONO_ADD_INS (cfg->cbb, ins);
+ *sp = mono_decompose_opcode (cfg, ins);
+ }
+ }
+
+
guint32 opcode = mono_type_to_regmove (cfg, header->locals [n]);
if ((opcode == OP_MOVE) && cfg->cbb->last_ins == sp [0] &&
((sp [0]->opcode == OP_ICONST) || (sp [0]->opcode == OP_I8CONST))) {
}
}
+static void
+emit_starg_ir (MonoCompile *cfg, MonoInst **sp, int n)
+{
+ MonoInst *ins;
+ guint32 coerce_op = mono_type_to_stloc_coerce (cfg->arg_types [n]);
+
+ if (coerce_op) {
+ if (cfg->cbb->last_ins == sp [0] && sp [0]->opcode == coerce_op) {
+ if (cfg->verbose_level > 2)
+ printf ("Found existing coercing is enough for starg\n");
+ } else {
+ MONO_INST_NEW (cfg, ins, coerce_op);
+ ins->dreg = alloc_ireg (cfg);
+ ins->sreg1 = sp [0]->dreg;
+ ins->type = STACK_I4;
+ ins->klass = mono_class_from_mono_type (cfg->arg_types [n]);
+ MONO_ADD_INS (cfg->cbb, ins);
+ *sp = mono_decompose_opcode (cfg, ins);
+ }
+ }
+
+ EMIT_NEW_ARGSTORE (cfg, ins, n, *sp);
+}
+
/*
* ldloca inhibits many optimizations so try to get rid of it in common
* cases.
CHECK_ARG (n);
if (!dont_verify_stloc && target_type_is_incompatible (cfg, param_types [ip [1]], *sp))
UNVERIFIED;
- EMIT_NEW_ARGSTORE (cfg, ins, n, *sp);
+ emit_starg_ir (cfg, sp, n);
ip += 2;
break;
case CEE_LDLOC_S:
if (cfg->gshared && mono_method_check_context_used (cmethod))
GENERIC_SHARING_FAILURE (CEE_JMP);
- mini_profiler_emit_instrumentation_call (cfg, mono_profiler_raise_method_leave, FALSE, NULL, NULL);
+ mini_profiler_emit_tail_call (cfg, cmethod);
fsig = mono_method_signature (cmethod);
n = fsig->param_count + fsig->hasthis;
}
for (i = 0; i < n; ++i)
EMIT_NEW_ARGSTORE (cfg, ins, i, sp [i]);
+
+ mini_profiler_emit_tail_call (cfg, cmethod);
+
MONO_INST_NEW (cfg, ins, OP_BR);
MONO_ADD_INS (cfg->cbb, ins);
tblock = start_bblock->out_bb [0];
/* Handle tail calls similarly to normal calls */
tail_call = TRUE;
} else {
- mini_profiler_emit_instrumentation_call (cfg, mono_profiler_raise_method_leave, FALSE, NULL, NULL);
+ mini_profiler_emit_tail_call (cfg, cmethod);
MONO_INST_NEW_CALL (cfg, call, OP_JMP);
call->tail_call = TRUE;
break;
}
case CEE_RET:
- mini_profiler_emit_instrumentation_call (cfg, mono_profiler_raise_method_leave, FALSE, sp - 1, sig->ret);
+ mini_profiler_emit_leave (cfg, sig->ret->type != MONO_TYPE_VOID ? sp [-1] : NULL);
if (cfg->method != method) {
/* return from inlined method */
if (sp != stack_start)
UNVERIFIED;
+ mini_profiler_emit_leave (cfg, sp [0]);
+
MONO_INST_NEW (cfg, ins, OP_BR);
ins->inst_target_bb = end_bblock;
MONO_ADD_INS (cfg->cbb, ins);
cfg->dyn_call_var->flags |= MONO_INST_VOLATILE;
}
- /* Has to use a call inst since it local regalloc expects it */
+ /* Has to use a call inst since local regalloc expects it */
MONO_INST_NEW_CALL (cfg, call, OP_DYN_CALL);
ins = (MonoInst*)call;
sp -= 2;
MONO_ADD_INS (cfg->cbb, ins);
cfg->param_area = MAX (cfg->param_area, cfg->backend->dyn_call_param_area);
+ /* OP_DYN_CALL might need to allocate a dynamically sized param area */
+ cfg->flags |= MONO_CFG_HAS_ALLOCA;
ip += 2;
inline_costs += 10 * num_calls++;
CHECK_ARG (n);
if (!dont_verify_stloc && target_type_is_incompatible (cfg, param_types [n], *sp))
UNVERIFIED;
- EMIT_NEW_ARGSTORE (cfg, ins, n, *sp);
+ emit_starg_ir (cfg, sp, n);
ip += 4;
break;
case CEE_LDLOC:
}
cfg->cbb = init_localsbb;
- mini_profiler_emit_instrumentation_call (cfg, mono_profiler_raise_method_enter, TRUE, NULL, NULL);
+ mini_profiler_emit_enter (cfg);
if (seq_points) {
MonoBasicBlock *bb;