3 #include <cpu/x86/cache.h>
4 #include <cpu/x86/mtrr.h>
5 #include <cpu/amd/mtrr.h>
6 #include <cpu/x86/msr.h>
9 static void disable_var_mtrr(unsigned reg)
11 /* The invalid bit is kept in the mask so we simply
12 * clear the relevent mask register to disable a
16 zero.lo = zero.hi = 0;
17 wrmsr(MTRRphysMask_MSR(reg), zero);
21 static void set_var_mtrr(
22 unsigned reg, unsigned base, unsigned size, unsigned type)
25 /* Bit Bit 32-35 of MTRRphysMask should be set to 1 */
26 /* FIXME: It only support 4G less range */
28 basem.lo = base | type;
30 wrmsr(MTRRphysBase_MSR(reg), basem);
31 maskm.lo = ~(size - 1) | 0x800;
32 maskm.hi = (1<<(CONFIG_CPU_ADDR_BITS-32))-1;
33 wrmsr(MTRRphysMask_MSR(reg), maskm);
37 static void set_var_mtrr_x(
38 unsigned reg, uint32_t base_lo, uint32_t base_hi, uint32_t size_lo, uint32_t size_hi, unsigned type)
41 /* Bit Bit 32-35 of MTRRphysMask should be set to 1 */
43 basem.lo = (base_lo & 0xfffff000) | type;
44 basem.hi = base_hi & ((1<<(CONFIG_CPU_ADDR_BITS-32))-1);
45 wrmsr(MTRRphysBase_MSR(reg), basem);
46 maskm.hi = (1<<(CONFIG_CPU_ADDR_BITS-32))-1;
48 maskm.lo = ~(size_lo - 1) | 0x800;
51 maskm.hi &= ~(size_hi - 1);
53 wrmsr(MTRRphysMask_MSR(reg), maskm);
57 static inline void cache_lbmem(int type)
59 /* Enable caching for 0 - 1MB using variable mtrr */
61 set_var_mtrr(0, 0x00000000, CONFIG_RAMTOP, type);
65 #if !defined(CONFIG_USE_DCACHE_RAM) || (CONFIG_USE_DCACHE_RAM == 0)
66 /* the fixed and variable MTTRs are power-up with random values,
67 * clear them to MTRR_TYPE_UNCACHEABLE for safty.
69 static void do_early_mtrr_init(const unsigned long *mtrr_msrs)
72 * The cache is not enabled in cr0 nor in MTRRdefType_MSR
73 * entry32.inc ensures the cache is not enabled in cr0
76 const unsigned long *msr_addr;
78 /* Inialize all of the relevant msrs to 0 */
82 for(msr_addr = mtrr_msrs; (msr_nr = *msr_addr); msr_addr++) {
86 #if defined(CONFIG_XIP_ROM_SIZE)
87 /* enable write through caching so we can do execute in place
90 set_var_mtrr(1, REAL_XIP_ROM_BASE, CONFIG_XIP_ROM_SIZE, MTRR_TYPE_WRBACK);
93 /* Set the default memory type and enable fixed and variable MTRRs
95 /* Enable Variable MTRRs */
98 wrmsr(MTRRdefType_MSR, msr);
102 static inline void early_mtrr_init(void)
104 static const unsigned long mtrr_msrs[] = {
111 0x200, 0x201, 0x202, 0x203,
112 0x204, 0x205, 0x206, 0x207,
113 0x208, 0x209, 0x20A, 0x20B,
114 0x20C, 0x20D, 0x20E, 0x20F,
115 /* NULL end of table */
119 do_early_mtrr_init(mtrr_msrs);
124 static inline int early_mtrr_init_detected(void)
127 /* See if MTRR's are enabled.
128 * a #RESET disables them while an #INIT
129 * preserves their state. This works
130 * on both Intel and AMD cpus, at least
131 * according to the documentation.
133 msr = rdmsr(MTRRdefType_MSR);
134 return msr.lo & 0x00000800;
137 #endif /* EARLYMTRR_C */