3 #include <cpu/x86/cache.h>
4 #include <cpu/x86/mtrr.h>
5 #include <cpu/amd/mtrr.h>
6 #include <cpu/x86/msr.h>
9 static void disable_var_mtrr(unsigned reg)
11 /* The invalid bit is kept in the mask so we simply
12 * clear the relevent mask register to disable a
16 zero.lo = zero.hi = 0;
17 wrmsr(MTRRphysMask_MSR(reg), zero);
21 static void set_var_mtrr(
22 unsigned reg, unsigned base, unsigned size, unsigned type)
25 /* Bit Bit 32-35 of MTRRphysMask should be set to 1 */
26 /* FIXME: It only support 4G less range */
28 basem.lo = base | type;
30 wrmsr(MTRRphysBase_MSR(reg), basem);
31 maskm.lo = ~(size - 1) | 0x800;
32 maskm.hi = (1<<(CONFIG_CPU_ADDR_BITS-32))-1;
33 wrmsr(MTRRphysMask_MSR(reg), maskm);
37 static void set_var_mtrr_x(
38 unsigned reg, uint32_t base_lo, uint32_t base_hi, uint32_t size_lo, uint32_t size_hi, unsigned type)
41 /* Bit Bit 32-35 of MTRRphysMask should be set to 1 */
43 basem.lo = (base_lo & 0xfffff000) | type;
44 basem.hi = base_hi & ((1<<(CONFIG_CPU_ADDR_BITS-32))-1);
45 wrmsr(MTRRphysBase_MSR(reg), basem);
46 maskm.hi = (1<<(CONFIG_CPU_ADDR_BITS-32))-1;
48 maskm.lo = ~(size_lo - 1) | 0x800;
51 maskm.hi &= ~(size_hi - 1);
53 wrmsr(MTRRphysMask_MSR(reg), maskm);
57 static inline void cache_lbmem(int type)
59 /* Enable caching for 0 - 1MB using variable mtrr */
61 set_var_mtrr(0, 0x00000000, CONFIG_RAMTOP, type);
65 /* the fixed and variable MTTRs are power-up with random values,
66 * clear them to MTRR_TYPE_UNCACHEABLE for safty.
68 static void do_early_mtrr_init(const unsigned long *mtrr_msrs)
71 * The cache is not enabled in cr0 nor in MTRRdefType_MSR
72 * entry32.inc ensures the cache is not enabled in cr0
75 const unsigned long *msr_addr;
77 /* Inialize all of the relevant msrs to 0 */
81 for(msr_addr = mtrr_msrs; (msr_nr = *msr_addr); msr_addr++) {
85 #if defined(CONFIG_XIP_ROM_SIZE)
86 /* enable write through caching so we can do execute in place
89 set_var_mtrr(1, REAL_XIP_ROM_BASE, CONFIG_XIP_ROM_SIZE, MTRR_TYPE_WRBACK);
92 /* Set the default memory type and enable fixed and variable MTRRs
94 /* Enable Variable MTRRs */
97 wrmsr(MTRRdefType_MSR, msr);
101 static inline void early_mtrr_init(void)
103 static const unsigned long mtrr_msrs[] = {
110 0x200, 0x201, 0x202, 0x203,
111 0x204, 0x205, 0x206, 0x207,
112 0x208, 0x209, 0x20A, 0x20B,
113 0x20C, 0x20D, 0x20E, 0x20F,
114 /* NULL end of table */
118 do_early_mtrr_init(mtrr_msrs);
122 static inline int early_mtrr_init_detected(void)
125 /* See if MTRR's are enabled.
126 * a #RESET disables them while an #INIT
127 * preserves their state. This works
128 * on both Intel and AMD cpus, at least
129 * according to the documentation.
131 msr = rdmsr(MTRRdefType_MSR);
132 return msr.lo & 0x00000800;
135 #endif /* EARLYMTRR_C */