+
+ post_code(0x30)
+
+ /* Disable cache. */
+ movl %cr0, %eax
+ orl $(1 << 30), %eax
+ movl %eax, %cr0
+
+ post_code(0x31)
+
+ /* Disable MTRR. */
+ movl $MTRRdefType_MSR, %ecx
+ rdmsr
+ andl $(~MTRRdefTypeEn), %eax
+ wrmsr
+
+ post_code(0x31)
+
+ invd
+#if 0
+ xorl %eax, %eax
+ xorl %edx, %edx
+ movl $MTRRphysBase_MSR(0), %ecx
+ wrmsr
+ movl $MTRRphysMask_MSR(0), %ecx
+ wrmsr
+ movl $MTRRphysBase_MSR(1), %ecx
+ wrmsr
+ movl $MTRRphysMask_MSR(1), %ecx
+ wrmsr
+#endif
+
+ post_code(0x33)
+
+ /* Enable cache. */
+ movl %cr0, %eax
+ andl $~((1 << 30) | (1 << 29)), %eax
+ movl %eax, %cr0
+
+ post_code(0x36)
+
+ /* Disable cache. */
+ movl %cr0, %eax
+ orl $(1 << 30), %eax
+ movl %eax, %cr0
+
+ post_code(0x38)
+
+ /* Enable Write Back and Speculative Reads for the first 1MB. */
+ movl $MTRRphysBase_MSR(0), %ecx
+ movl $(0x00000000 | MTRR_TYPE_WRBACK), %eax
+ xorl %edx, %edx
+ wrmsr
+ movl $MTRRphysMask_MSR(0), %ecx
+ movl $(~(1024 * 1024 - 1) | MTRRphysMaskValid), %eax
+ movl $0x0000000f, %edx // 36bit address space
+ wrmsr
+
+ post_code(0x39)
+
+ /* And enable cache again after setting MTRRs. */
+ movl %cr0, %eax
+ andl $~((1 << 30) | (1 << 29)), %eax
+ movl %eax, %cr0
+
+ post_code(0x3a)
+
+ /* Enable MTRR. */
+ movl $MTRRdefType_MSR, %ecx
+ rdmsr
+ orl $MTRRdefTypeEn, %eax
+ wrmsr
+
+ post_code(0x3b)
+
+ /* Enable prefetchers */
+ movl $0x01a0, %ecx
+ rdmsr
+ andl $~((1 << 9) | (1 << 19)), %eax
+ andl $~((1 << 5) | (1 << 7)), %edx
+ wrmsr
+
+ /* Invalidate the cache again. */
+ invd
+
+ post_code(0x3c)
+
+ /* Clear boot_complete flag. */
+ xorl %ebp, %ebp
+__main:
+ post_code(POST_PREPARE_RAMSTAGE)
+ cld /* Clear direction flag. */
+
+ movl %ebp, %esi
+
+ movl $ROMSTAGE_STACK, %esp
+ movl %esp, %ebp
+ pushl %esi
+ call copy_and_run
+
+.Lhlt:
+ post_code(POST_DEAD_CODE)