+gdouble
+ves_icall_System_Threading_Interlocked_CompareExchange_Double (gdouble *location, gdouble value, gdouble comparand)
+{
+#if SIZEOF_VOID_P == 8
+ LongDoubleUnion val, comp, ret;
+
+ val.fval = value;
+ comp.fval = comparand;
+ ret.ival = (gint64)InterlockedCompareExchangePointer((gpointer *) location, (gpointer)val.ival, (gpointer)comp.ival);
+
+ return ret.fval;
+#else
+ gdouble old;
+
+ EnterCriticalSection(&interlocked_mutex);
+ old = *location;
+ if (old == comparand)
+ *location = value;
+ LeaveCriticalSection(&interlocked_mutex);
+
+ return old;
+#endif
+}
+
+gint64
+ves_icall_System_Threading_Interlocked_CompareExchange_Long (gint64 *location, gint64 value, gint64 comparand)
+{
+#if SIZEOF_VOID_P == 8
+ return (gint64)InterlockedCompareExchangePointer((gpointer *) location, (gpointer)value, (gpointer)comparand);
+#else
+ gint64 old;
+
+ EnterCriticalSection(&interlocked_mutex);
+ old = *location;
+ if (old == comparand)
+ *location = value;
+ LeaveCriticalSection(&interlocked_mutex);
+
+ return old;
+#endif
+}
+
+gint32
+ves_icall_System_Threading_Interlocked_Add_Int (gint32 *location, gint32 value)
+{
+#if SIZEOF_VOID_P == 8
+ /* Should be implemented as a JIT intrinsic */
+ mono_raise_exception (mono_get_exception_not_implemented (NULL));
+ return 0;
+#else
+ gint32 orig;
+
+ EnterCriticalSection(&interlocked_mutex);
+ orig = *location;
+ *location = orig + value;
+ LeaveCriticalSection(&interlocked_mutex);
+
+ return orig;
+#endif
+}
+
+gint64
+ves_icall_System_Threading_Interlocked_Add_Long (gint64 *location, gint64 value)
+{
+#if SIZEOF_VOID_P == 8
+ /* Should be implemented as a JIT intrinsic */
+ mono_raise_exception (mono_get_exception_not_implemented (NULL));
+ return 0;
+#else
+ gint64 orig;
+
+ EnterCriticalSection(&interlocked_mutex);
+ orig = *location;
+ *location = orig + value;
+ LeaveCriticalSection(&interlocked_mutex);
+
+ return orig;
+#endif
+}
+
+gint64
+ves_icall_System_Threading_Interlocked_Read_Long (gint64 *location)
+{
+#if SIZEOF_VOID_P == 8
+ /* 64 bit reads are already atomic */
+ return *location;
+#else
+ gint64 res;
+
+ EnterCriticalSection(&interlocked_mutex);
+ res = *location;
+ LeaveCriticalSection(&interlocked_mutex);
+
+ return res;
+#endif
+}
+
+void
+ves_icall_System_Threading_Thread_ClrState (MonoThread* this, guint32 state)
+{
+ mono_monitor_enter (this->synch_lock);
+ this->state &= ~state;
+ if (state & ThreadState_Background) {
+ /* If the thread changes the background mode, the main thread has to
+ * be notified, since it has to rebuild the list of threads to
+ * wait for.
+ */
+ SetEvent (background_change_event);
+ }
+ mono_monitor_exit (this->synch_lock);
+}
+
+void
+ves_icall_System_Threading_Thread_SetState (MonoThread* this, guint32 state)
+{
+ mono_monitor_enter (this->synch_lock);
+ this->state |= state;
+ if (state & ThreadState_Background) {
+ /* If the thread changes the background mode, the main thread has to
+ * be notified, since it has to rebuild the list of threads to
+ * wait for.
+ */
+ SetEvent (background_change_event);
+ }
+ mono_monitor_exit (this->synch_lock);
+}
+
+guint32
+ves_icall_System_Threading_Thread_GetState (MonoThread* this)
+{
+ guint32 state;
+ mono_monitor_enter (this->synch_lock);
+ state = this->state;
+ mono_monitor_exit (this->synch_lock);
+ return state;
+}
+