+ guint32 offset;
+
+ /* Patch the jump table entry used by the plt entry */
+
+#if defined(__native_client_codegen__) || defined(__native_client__)
+ /* for both compiler and runtime */
+ /* A PLT entry: */
+ /* mov <DISP>(%ebx), %ecx */
+ /* and 0xffffffe0, %ecx */
+ /* jmp *%ecx */
+ g_assert (code [0] == 0x8b);
+ g_assert (code [1] == 0x8b);
+
+ offset = *(guint32*)(code + 2);
+#elif defined(__default_codegen__)
+ /* A PLT entry: jmp *<DISP>(%ebx) */
+ g_assert (code [0] == 0xff);
+ g_assert (code [1] == 0xa3);
+
+ offset = *(guint32*)(code + 2);
+#endif /* __native_client_codegen__ */
+ if (!got)
+ got = (gpointer*)(gsize) regs [MONO_ARCH_GOT_REG];
+ *(guint8**)((guint8*)got + offset) = addr;
+}
+
+static gpointer
+get_vcall_slot (guint8 *code, mgreg_t *regs, int *displacement)
+{
+ const int kBufSize = NACL_SIZE (8, 16);
+ guint8 buf [64];
+ guint8 reg = 0;
+ gint32 disp = 0;
+
+ mono_breakpoint_clean_code (NULL, code, kBufSize, buf, kBufSize);
+ code = buf + 8;
+
+ *displacement = 0;
+
+ if ((code [0] == 0xff) && ((code [1] & 0x18) == 0x10) && ((code [1] >> 6) == 2)) {
+ reg = code [1] & 0x07;
+ disp = *((gint32*)(code + 2));
+#if defined(__native_client_codegen__) || defined(__native_client__)
+ } else if ((code[1] == 0x83) && (code[2] == 0xe1) && (code[4] == 0xff) &&
+ (code[5] == 0xd1) && (code[-5] == 0x8b)) {
+ disp = *((gint32*)(code - 3));
+ reg = code[-4] & 0x07;
+ } else if ((code[-2] == 0x8b) && (code[1] == 0x83) && (code[4] == 0xff)) {
+ reg = code[-1] & 0x07;
+ disp = (signed char)code[0];
+#endif
+ } else {
+ g_assert_not_reached ();
+ return NULL;
+ }