3 #include <cpu/x86/cache.h>
4 #include <cpu/x86/mtrr.h>
5 #include <cpu/x86/msr.h>
8 static void disable_var_mtrr(unsigned reg)
10 /* The invalid bit is kept in the mask so we simply
11 * clear the relevent mask register to disable a
15 zero.lo = zero.hi = 0;
16 wrmsr(MTRRphysMask_MSR(reg), zero);
20 static void set_var_mtrr(
21 unsigned reg, unsigned base, unsigned size, unsigned type)
24 /* Bit Bit 32-35 of MTRRphysMask should be set to 1 */
25 /* FIXME: It only support 4G less range */
27 basem.lo = base | type;
29 wrmsr(MTRRphysBase_MSR(reg), basem);
30 maskm.lo = ~(size - 1) | 0x800;
31 maskm.hi = (1<<(CONFIG_CPU_ADDR_BITS-32))-1;
32 wrmsr(MTRRphysMask_MSR(reg), maskm);
36 static void set_var_mtrr_x(
37 unsigned reg, uint32_t base_lo, uint32_t base_hi, uint32_t size_lo, uint32_t size_hi, unsigned type)
40 /* Bit Bit 32-35 of MTRRphysMask should be set to 1 */
42 basem.lo = (base_lo & 0xfffff000) | type;
43 basem.hi = base_hi & ((1<<(CONFIG_CPU_ADDR_BITS-32))-1);
44 wrmsr(MTRRphysBase_MSR(reg), basem);
45 maskm.hi = (1<<(CONFIG_CPU_ADDR_BITS-32))-1;
47 maskm.lo = ~(size_lo - 1) | 0x800;
50 maskm.hi &= ~(size_hi - 1);
52 wrmsr(MTRRphysMask_MSR(reg), maskm);
56 static inline void cache_lbmem(int type)
58 /* Enable caching for 0 - 1MB using variable mtrr */
60 set_var_mtrr(0, 0x00000000, CONFIG_RAMTOP, type);
64 /* the fixed and variable MTTRs are power-up with random values,
65 * clear them to MTRR_TYPE_UNCACHEABLE for safty.
67 static void do_early_mtrr_init(const unsigned long *mtrr_msrs)
70 * The cache is not enabled in cr0 nor in MTRRdefType_MSR
71 * entry32.inc ensures the cache is not enabled in cr0
74 const unsigned long *msr_addr;
76 /* Inialize all of the relevant msrs to 0 */
80 for(msr_addr = mtrr_msrs; (msr_nr = *msr_addr); msr_addr++) {
84 #if defined(CONFIG_XIP_ROM_SIZE)
85 /* enable write through caching so we can do execute in place
88 set_var_mtrr(1, REAL_XIP_ROM_BASE, CONFIG_XIP_ROM_SIZE, MTRR_TYPE_WRBACK);
91 /* Set the default memory type and enable fixed and variable MTRRs
93 /* Enable Variable MTRRs */
96 wrmsr(MTRRdefType_MSR, msr);
100 static inline void early_mtrr_init(void)
102 static const unsigned long mtrr_msrs[] = {
109 0x200, 0x201, 0x202, 0x203,
110 0x204, 0x205, 0x206, 0x207,
111 0x208, 0x209, 0x20A, 0x20B,
112 0x20C, 0x20D, 0x20E, 0x20F,
113 /* NULL end of table */
117 do_early_mtrr_init(mtrr_msrs);
121 static inline int early_mtrr_init_detected(void)
124 /* See if MTRR's are enabled.
125 * a #RESET disables them while an #INIT
126 * preserves their state. This works
127 * on both Intel and AMD cpus, at least
128 * according to the documentation.
130 msr = rdmsr(MTRRdefType_MSR);
131 return msr.lo & 0x00000800;
134 #endif /* EARLYMTRR_C */