2 * This file is part of the coreboot project.
4 * Copyright (C) 2010 Advanced Micro Devices, Inc.
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; version 2 of the License.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License
16 * along with this program; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
20 /* Description: Main memory controller system configuration for DDR 3 */
22 /* KNOWN ISSUES - ERRATA
24 * Trtp is not calculated correctly when the controller is in 64-bit mode, it
25 * is 1 busclock off. No fix planned. The controller is not ordinarily in
28 * 32 Byte burst not supported. No fix planned. The controller is not
29 * ordinarily in 64-bit mode.
31 * Trc precision does not use extra Jedec defined fractional component.
32 * InsteadTrc (course) is rounded up to nearest 1 ns.
34 * Mini and Micro DIMM not supported. Only RDIMM, UDIMM, SO-DIMM defined types
38 static u8 ReconfigureDIMMspare_D(struct MCTStatStruc *pMCTstat,
39 struct DCTStatStruc *pDCTstatA);
40 static void DQSTiming_D(struct MCTStatStruc *pMCTstat,
41 struct DCTStatStruc *pDCTstatA);
42 static void LoadDQSSigTmgRegs_D(struct MCTStatStruc *pMCTstat,
43 struct DCTStatStruc *pDCTstatA);
44 static void HTMemMapInit_D(struct MCTStatStruc *pMCTstat,
45 struct DCTStatStruc *pDCTstatA);
46 static void MCTMemClr_D(struct MCTStatStruc *pMCTstat,
47 struct DCTStatStruc *pDCTstatA);
48 static void DCTMemClr_Init_D(struct MCTStatStruc *pMCTstat,
49 struct DCTStatStruc *pDCTstat);
50 static void DCTMemClr_Sync_D(struct MCTStatStruc *pMCTstat,
51 struct DCTStatStruc *pDCTstat);
52 static void MCTMemClrSync_D(struct MCTStatStruc *pMCTstat,
53 struct DCTStatStruc *pDCTstatA);
54 static u8 NodePresent_D(u8 Node);
55 static void SyncDCTsReady_D(struct MCTStatStruc *pMCTstat,
56 struct DCTStatStruc *pDCTstatA);
57 static void StartupDCT_D(struct MCTStatStruc *pMCTstat,
58 struct DCTStatStruc *pDCTstat, u8 dct);
59 static void ClearDCT_D(struct MCTStatStruc *pMCTstat,
60 struct DCTStatStruc *pDCTstat, u8 dct);
61 static u8 AutoCycTiming_D(struct MCTStatStruc *pMCTstat,
62 struct DCTStatStruc *pDCTstat, u8 dct);
63 static void GetPresetmaxF_D(struct MCTStatStruc *pMCTstat,
64 struct DCTStatStruc *pDCTstat);
65 static void SPDGetTCL_D(struct MCTStatStruc *pMCTstat,
66 struct DCTStatStruc *pDCTstat, u8 dct);
67 static u8 AutoConfig_D(struct MCTStatStruc *pMCTstat,
68 struct DCTStatStruc *pDCTstat, u8 dct);
69 static u8 PlatformSpec_D(struct MCTStatStruc *pMCTstat,
70 struct DCTStatStruc *pDCTstat, u8 dct);
71 static void SPDSetBanks_D(struct MCTStatStruc *pMCTstat,
72 struct DCTStatStruc *pDCTstat, u8 dct);
73 static void StitchMemory_D(struct MCTStatStruc *pMCTstat,
74 struct DCTStatStruc *pDCTstat, u8 dct);
75 static u16 Get_Fk_D(u8 k);
76 static u8 Get_DIMMAddress_D(struct DCTStatStruc *pDCTstat, u8 i);
77 static void mct_initDCT(struct MCTStatStruc *pMCTstat,
78 struct DCTStatStruc *pDCTstat);
79 static void mct_DramInit(struct MCTStatStruc *pMCTstat,
80 struct DCTStatStruc *pDCTstat, u8 dct);
81 static u8 mct_PlatformSpec(struct MCTStatStruc *pMCTstat,
82 struct DCTStatStruc *pDCTstat, u8 dct);
83 static u8 mct_BeforePlatformSpec(struct MCTStatStruc *pMCTstat,
84 struct DCTStatStruc *pDCTstat, u8 dct);
85 static void mct_SyncDCTsReady(struct DCTStatStruc *pDCTstat);
86 static void Get_Trdrd(struct MCTStatStruc *pMCTstat,
87 struct DCTStatStruc *pDCTstat, u8 dct);
88 static void mct_AfterGetCLT(struct MCTStatStruc *pMCTstat,
89 struct DCTStatStruc *pDCTstat, u8 dct);
90 static u8 mct_SPDCalcWidth(struct MCTStatStruc *pMCTstat,\
91 struct DCTStatStruc *pDCTstat, u8 dct);
92 static void mct_AfterStitchMemory(struct MCTStatStruc *pMCTstat,
93 struct DCTStatStruc *pDCTstat, u8 dct);
94 static u8 mct_DIMMPresence(struct MCTStatStruc *pMCTstat,
95 struct DCTStatStruc *pDCTstat, u8 dct);
96 static void Set_OtherTiming(struct MCTStatStruc *pMCTstat,
97 struct DCTStatStruc *pDCTstat, u8 dct);
98 static void Get_Twrwr(struct MCTStatStruc *pMCTstat,
99 struct DCTStatStruc *pDCTstat, u8 dct);
100 static void Get_Twrrd(struct MCTStatStruc *pMCTstat,
101 struct DCTStatStruc *pDCTstat, u8 dct);
102 static void Get_TrwtTO(struct MCTStatStruc *pMCTstat,
103 struct DCTStatStruc *pDCTstat, u8 dct);
104 static void Get_TrwtWB(struct MCTStatStruc *pMCTstat,
105 struct DCTStatStruc *pDCTstat);
106 static void Get_DqsRcvEnGross_Diff(struct DCTStatStruc *pDCTstat,
107 u32 dev, u32 index_reg);
108 static void Get_WrDatGross_Diff(struct DCTStatStruc *pDCTstat, u8 dct,
109 u32 dev, u32 index_reg);
110 static u16 Get_DqsRcvEnGross_MaxMin(struct DCTStatStruc *pDCTstat,
111 u32 dev, u32 index_reg, u32 index);
112 static void mct_FinalMCT_D(struct MCTStatStruc *pMCTstat,
113 struct DCTStatStruc *pDCTstat);
114 static u16 Get_WrDatGross_MaxMin(struct DCTStatStruc *pDCTstat, u8 dct,
115 u32 dev, u32 index_reg, u32 index);
116 static void mct_InitialMCT_D(struct MCTStatStruc *pMCTstat,
117 struct DCTStatStruc *pDCTstat);
118 static void mct_init(struct MCTStatStruc *pMCTstat,
119 struct DCTStatStruc *pDCTstat);
120 static void clear_legacy_Mode(struct MCTStatStruc *pMCTstat,
121 struct DCTStatStruc *pDCTstat);
122 static void mct_HTMemMapExt(struct MCTStatStruc *pMCTstat,
123 struct DCTStatStruc *pDCTstatA);
124 static void SetCSTriState(struct MCTStatStruc *pMCTstat,
125 struct DCTStatStruc *pDCTstat, u8 dct);
126 static void SetCKETriState(struct MCTStatStruc *pMCTstat,
127 struct DCTStatStruc *pDCTstat, u8 dct);
128 static void SetODTTriState(struct MCTStatStruc *pMCTstat,
129 struct DCTStatStruc *pDCTstat, u8 dct);
130 static void InitPhyCompensation(struct MCTStatStruc *pMCTstat,
131 struct DCTStatStruc *pDCTstat, u8 dct);
132 static u32 mct_NodePresent_D(void);
133 static void mct_OtherTiming(struct MCTStatStruc *pMCTstat,
134 struct DCTStatStruc *pDCTstatA);
135 static void mct_ResetDataStruct_D(struct MCTStatStruc *pMCTstat,
136 struct DCTStatStruc *pDCTstatA);
137 static void mct_EarlyArbEn_D(struct MCTStatStruc *pMCTstat,
138 struct DCTStatStruc *pDCTstat, u8 dct);
139 static void mct_BeforeDramInit_Prod_D(struct MCTStatStruc *pMCTstat,
140 struct DCTStatStruc *pDCTstat);
141 void mct_ClrClToNB_D(struct MCTStatStruc *pMCTstat,
142 struct DCTStatStruc *pDCTstat);
143 static u8 CheckNBCOFEarlyArbEn(struct MCTStatStruc *pMCTstat,
144 struct DCTStatStruc *pDCTstat);
145 void mct_ClrWbEnhWsbDis_D(struct MCTStatStruc *pMCTstat,
146 struct DCTStatStruc *pDCTstat);
147 static void mct_BeforeDQSTrain_D(struct MCTStatStruc *pMCTstat,
148 struct DCTStatStruc *pDCTstatA);
149 static void AfterDramInit_D(struct DCTStatStruc *pDCTstat, u8 dct);
150 static void mct_ResetDLL_D(struct MCTStatStruc *pMCTstat,
151 struct DCTStatStruc *pDCTstat, u8 dct);
152 static void ProgDramMRSReg_D(struct MCTStatStruc *pMCTstat,
153 struct DCTStatStruc *pDCTstat, u8 dct);
154 static void mct_DramInit_Sw_D(struct MCTStatStruc *pMCTstat,
155 struct DCTStatStruc *pDCTstat, u8 dct);
156 static u32 mct_DisDllShutdownSR(struct MCTStatStruc *pMCTstat,
157 struct DCTStatStruc *pDCTstat, u32 DramConfigLo, u8 dct);
158 static void mct_EnDllShutdownSR(struct MCTStatStruc *pMCTstat,
159 struct DCTStatStruc *pDCTstat, u8 dct);
161 static u32 mct_MR1Odt_RDimm(struct MCTStatStruc *pMCTstat,
162 struct DCTStatStruc *pDCTstat, u8 dct, u32 MrsChipSel);
163 static u32 mct_DramTermDyn_RDimm(struct MCTStatStruc *pMCTstat,
164 struct DCTStatStruc *pDCTstat, u8 dimm);
165 static u32 mct_SetDramConfigMisc2(struct DCTStatStruc *pDCTstat, u8 dct, u32 misc2);
166 static void mct_BeforeDQSTrainSamp(struct DCTStatStruc *pDCTstat);
167 static void mct_WriteLevelization_HW(struct MCTStatStruc *pMCTstat, struct DCTStatStruc *pDCTstatA);
168 static u8 Get_Latency_Diff(struct MCTStatStruc *pMCTstat,
169 struct DCTStatStruc *pDCTstat, u8 dct);
170 static void SyncSetting(struct DCTStatStruc *pDCTstat);
171 static u8 crcCheck(u8 smbaddr);
172 static void mct_ExtMCTConfig_Bx(struct DCTStatStruc *pDCTstat);
173 static void mct_ExtMCTConfig_Cx(struct DCTStatStruc *pDCTstat);
175 /*See mctAutoInitMCT header for index relationships to CL and T*/
176 static const u16 Table_F_k[] = {00,200,266,333,400,533 };
177 static const u8 Tab_BankAddr[] = {0x3F,0x01,0x09,0x3F,0x3F,0x11,0x0A,0x19,0x12,0x1A,0x21,0x22,0x23};
178 static const u8 Table_DQSRcvEn_Offset[] = {0x00,0x01,0x10,0x11,0x2};
180 /****************************************************************************
181 Describe how platform maps MemClk pins to logical DIMMs. The MemClk pins
182 are identified based on BKDG definition of Fn2x88[MemClkDis] bitmap.
183 AGESA will base on this value to disable unused MemClk to save power.
185 If MEMCLK_MAPPING or MEMCLK_MAPPING contains all zeroes, AGESA will use
186 default MemClkDis setting based on package type.
189 BKDG definition of Fn2x88[MemClkDis] bitmap for AM3 package is like below:
190 Bit AM3/S1g3 pin name
200 And platform has the following routing:
201 CS0 M[B,A]_CLK_H/L[4]
202 CS1 M[B,A]_CLK_H/L[2]
203 CS2 M[B,A]_CLK_H/L[3]
204 CS3 M[B,A]_CLK_H/L[5]
207 ; CS0 CS1 CS2 CS3 CS4 CS5 CS6 CS7
208 MEMCLK_MAPPING EQU 00010000b, 00000100b, 00001000b, 00100000b, 00000000b, 00000000b, 00000000b, 00000000b
211 /* Note: If you are not sure about the pin mappings at initial stage, we dont have to disable MemClk.
212 * Set entries in the tables all 0xFF. */
213 static const u8 Tab_L1CLKDis[] = {0x20, 0x20, 0x10, 0x10, 0x08, 0x08, 0x04, 0x04};
214 static const u8 Tab_AM3CLKDis[] = {0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00};
215 static const u8 Tab_S1CLKDis[] = {0xA2, 0xA2, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00};
216 static const u8 Tab_ManualCLKDis[]= {0x10, 0x04, 0x08, 0x20, 0x00, 0x00, 0x00, 0x00};
218 static const u8 Table_Comp_Rise_Slew_20x[] = {7, 3, 2, 2, 0xFF};
219 static const u8 Table_Comp_Rise_Slew_15x[] = {7, 7, 3, 2, 0xFF};
220 static const u8 Table_Comp_Fall_Slew_20x[] = {7, 5, 3, 2, 0xFF};
221 static const u8 Table_Comp_Fall_Slew_15x[] = {7, 7, 5, 3, 0xFF};
223 static void mctAutoInitMCT_D(struct MCTStatStruc *pMCTstat,
224 struct DCTStatStruc *pDCTstatA)
227 * Memory may be mapped contiguously all the way up to 4GB (depending on setup
228 * options). It is the responsibility of PCI subsystem to create an uncacheable
229 * IO region below 4GB and to adjust TOP_MEM downward prior to any IO mapping or
230 * accesses. It is the same responsibility of the CPU sub-system prior to
233 * Slot Number is an external convention, and is determined by OEM with accompanying
234 * silk screening. OEM may choose to use Slot number convention which is consistent
235 * with DIMM number conventions. All AMD engineering platforms do.
237 * Build Requirements:
238 * 1. MCT_SEG0_START and MCT_SEG0_END macros to begin and end the code segment,
239 * defined in mcti.inc.
241 * Run-Time Requirements:
242 * 1. Complete Hypertransport Bus Configuration
243 * 2. SMBus Controller Initialized
244 * 1. BSP in Big Real Mode
245 * 2. Stack at SS:SP, located somewhere between A000:0000 and F000:FFFF
246 * 3. Checksummed or Valid NVRAM bits
247 * 4. MCG_CTL=-1, MC4_CTL_EN=0 for all CPUs
248 * 5. MCi_STS from shutdown/warm reset recorded (if desired) prior to entry
249 * 6. All var MTRRs reset to zero
250 * 7. State of NB_CFG.DisDatMsk set properly on all CPUs
251 * 8. All CPUs at 2Ghz Speed (unless DQS training is not installed).
252 * 9. All cHT links at max Speed/Width (unless DQS training is not installed).
255 * Global relationship between index values and item values:
257 * pDCTstat.CASL pDCTstat.Speed
259 * --------------------------
273 mctInitMemGPIOs_A_D(); /* Set any required GPIOs*/
276 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
277 struct DCTStatStruc *pDCTstat;
278 pDCTstat = pDCTstatA + Node;
279 pDCTstat->Node_ID = Node;
280 pDCTstat->dev_host = PA_HOST(Node);
281 pDCTstat->dev_map = PA_MAP(Node);
282 pDCTstat->dev_dct = PA_DCT(Node);
283 pDCTstat->dev_nbmisc = PA_NBMISC(Node);
284 pDCTstat->NodeSysBase = node_sys_base;
286 mct_init(pMCTstat, pDCTstat);
287 mctNodeIDDebugPort_D();
288 pDCTstat->NodePresent = NodePresent_D(Node);
289 if (pDCTstat->NodePresent) { /* See if Node is there*/
290 clear_legacy_Mode(pMCTstat, pDCTstat);
291 pDCTstat->LogicalCPUID = mctGetLogicalCPUID_D(Node);
293 mct_InitialMCT_D(pMCTstat, pDCTstat);
295 mctSMBhub_Init(Node); /* Switch SMBUS crossbar to proper node*/
297 mct_initDCT(pMCTstat, pDCTstat);
298 if (pDCTstat->ErrCode == SC_FatalErr) {
299 goto fatalexit; /* any fatal errors?*/
300 } else if (pDCTstat->ErrCode < SC_StopError) {
303 } /* if Node present */
304 node_sys_base = pDCTstat->NodeSysBase;
305 node_sys_base += (pDCTstat->NodeSysLimit + 2) & ~0x0F;
307 if (NodesWmem == 0) {
308 printk(BIOS_DEBUG, "No Nodes?!\n");
312 printk(BIOS_DEBUG, "mctAutoInitMCT_D: SyncDCTsReady_D\n");
313 SyncDCTsReady_D(pMCTstat, pDCTstatA); /* Make sure DCTs are ready for accesses.*/
315 printk(BIOS_DEBUG, "mctAutoInitMCT_D: HTMemMapInit_D\n");
316 HTMemMapInit_D(pMCTstat, pDCTstatA); /* Map local memory into system address space.*/
319 printk(BIOS_DEBUG, "mctAutoInitMCT_D: CPUMemTyping_D\n");
320 CPUMemTyping_D(pMCTstat, pDCTstatA); /* Map dram into WB/UC CPU cacheability */
321 mctHookAfterCPU(); /* Setup external northbridge(s) */
323 printk(BIOS_DEBUG, "mctAutoInitMCT_D: DQSTiming_D\n");
324 DQSTiming_D(pMCTstat, pDCTstatA); /* Get Receiver Enable and DQS signal timing*/
326 printk(BIOS_DEBUG, "mctAutoInitMCT_D: UMAMemTyping_D\n");
327 UMAMemTyping_D(pMCTstat, pDCTstatA); /* Fix up for UMA sizing */
329 printk(BIOS_DEBUG, "mctAutoInitMCT_D: :OtherTiming\n");
330 mct_OtherTiming(pMCTstat, pDCTstatA);
332 if (ReconfigureDIMMspare_D(pMCTstat, pDCTstatA)) { /* RESET# if 1st pass of DIMM spare enabled*/
336 InterleaveNodes_D(pMCTstat, pDCTstatA);
337 InterleaveChannels_D(pMCTstat, pDCTstatA);
339 printk(BIOS_DEBUG, "mctAutoInitMCT_D: ECCInit_D\n");
340 if (ECCInit_D(pMCTstat, pDCTstatA)) { /* Setup ECC control and ECC check-bits*/
341 printk(BIOS_DEBUG, "mctAutoInitMCT_D: MCTMemClr_D\n");
342 MCTMemClr_D(pMCTstat,pDCTstatA);
345 mct_FinalMCT_D(pMCTstat, pDCTstatA);
346 printk(BIOS_DEBUG, "mctAutoInitMCT_D Done: Global Status: %x\n", pMCTstat->GStatus);
350 die("mct_d: fatalexit");
353 static u8 ReconfigureDIMMspare_D(struct MCTStatStruc *pMCTstat,
354 struct DCTStatStruc *pDCTstatA)
358 if (mctGet_NVbits(NV_CS_SpareCTL)) {
359 if (MCT_DIMM_SPARE_NO_WARM) {
360 /* Do no warm-reset DIMM spare */
361 if (pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)) {
362 LoadDQSSigTmgRegs_D(pMCTstat, pDCTstatA);
365 mct_ResetDataStruct_D(pMCTstat, pDCTstatA);
366 pMCTstat->GStatus |= 1 << GSB_EnDIMMSpareNW;
370 /* Do warm-reset DIMM spare */
371 if (mctGet_NVbits(NV_DQSTrainCTL))
382 static void DQSTiming_D(struct MCTStatStruc *pMCTstat,
383 struct DCTStatStruc *pDCTstatA)
387 if (pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)) {
391 nv_DQSTrainCTL = mctGet_NVbits(NV_DQSTrainCTL);
392 /* FIXME: BOZO- DQS training every time*/
395 mct_BeforeDQSTrain_D(pMCTstat, pDCTstatA);
396 phyAssistedMemFnceTraining(pMCTstat, pDCTstatA);
398 if (nv_DQSTrainCTL) {
399 mctHookBeforeAnyTraining(pMCTstat, pDCTstatA);
400 /* TODO: should be in mctHookBeforeAnyTraining */
401 _WRMSR(0x26C, 0x04040404, 0x04040404);
402 _WRMSR(0x26D, 0x04040404, 0x04040404);
403 _WRMSR(0x26E, 0x04040404, 0x04040404);
404 _WRMSR(0x26F, 0x04040404, 0x04040404);
405 mct_WriteLevelization_HW(pMCTstat, pDCTstatA);
407 TrainReceiverEn_D(pMCTstat, pDCTstatA, FirstPass);
409 mct_TrainDQSPos_D(pMCTstat, pDCTstatA);
411 /* Second Pass never used for Barcelona! */
412 /* TrainReceiverEn_D(pMCTstat, pDCTstatA, SecondPass); */
414 mctSetEccDQSRcvrEn_D(pMCTstat, pDCTstatA);
416 /* FIXME - currently uses calculated value TrainMaxReadLatency_D(pMCTstat, pDCTstatA); */
417 mctHookAfterAnyTraining();
418 mctSaveDQSSigTmg_D();
420 MCTMemClr_D(pMCTstat, pDCTstatA);
422 mctGetDQSSigTmg_D(); /* get values into data structure */
423 LoadDQSSigTmgRegs_D(pMCTstat, pDCTstatA); /* load values into registers.*/
424 /* mctDoWarmResetMemClr_D(); */
425 MCTMemClr_D(pMCTstat, pDCTstatA);
429 static void LoadDQSSigTmgRegs_D(struct MCTStatStruc *pMCTstat,
430 struct DCTStatStruc *pDCTstatA)
432 u8 Node, Receiver, Channel, Dir, DIMM;
441 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
442 struct DCTStatStruc *pDCTstat;
443 pDCTstat = pDCTstatA + Node;
445 if (pDCTstat->DCTSysLimit) {
446 dev = pDCTstat->dev_dct;
447 for (Channel = 0;Channel < 2; Channel++) {
448 /* there are four receiver pairs,
449 loosely associated with chipselects.*/
450 index_reg = 0x98 + Channel * 0x100;
451 for (Receiver = 0; Receiver < 8; Receiver += 2) {
452 /* Set Receiver Enable Values */
453 mct_SetRcvrEnDly_D(pDCTstat,
455 1, /* FinalValue, From stack */
459 (Receiver >> 1) * 3 + 0x10, /* Addl_Index */
460 2); /* Pass Second Pass ? */
461 /* Restore Write levelization training data */
462 for (ByteLane = 0; ByteLane < 9; ByteLane ++) {
463 txdqs = pDCTstat->CH_D_B_TxDqs[Channel][Receiver >> 1][ByteLane];
464 index = Table_DQSRcvEn_Offset[ByteLane >> 1];
465 index += (Receiver >> 1) * 3 + 0x10 + 0x20; /* Addl_Index */
466 val = Get_NB32_index_wait(dev, 0x98 + 0x100*Channel, index);
467 if (ByteLane & 1) { /* odd byte lane */
468 val &= ~(0xFF << 16);
474 Set_NB32_index_wait(dev, 0x98 + 0x100*Channel, index, val);
478 for (Channel = 0; Channel<2; Channel++) {
479 SetEccDQSRcvrEn_D(pDCTstat, Channel);
482 for (Channel = 0; Channel < 2; Channel++) {
484 index_reg = 0x98 + Channel * 0x100;
487 * when 400, 533, 667, it will support dimm0/1/2/3,
488 * and set conf for dimm0, hw will copy to dimm1/2/3
489 * set for dimm1, hw will copy to dimm3
490 * Rev A/B only support DIMM0/1 when 800Mhz and above
491 * + 0x100 to next dimm
492 * Rev C support DIMM0/1/2/3 when 800Mhz and above
493 * + 0x100 to next dimm
495 for (DIMM = 0; DIMM < 4; DIMM++) {
497 index = 0; /* CHA Write Data Timing Low */
499 if (pDCTstat->Speed >= 4) {
500 index = 0x100 * DIMM;
505 for (Dir = 0; Dir < 2; Dir++) {/* RD/WR */
506 p = pDCTstat->CH_D_DIR_B_DQS[Channel][DIMM][Dir];
507 val = stream_to_int(p); /* CHA Read Data Timing High */
508 Set_NB32_index_wait(dev, index_reg, index+1, val);
509 val = stream_to_int(p+4); /* CHA Write Data Timing High */
510 Set_NB32_index_wait(dev, index_reg, index+2, val);
511 val = *(p+8); /* CHA Write ECC Timing */
512 Set_NB32_index_wait(dev, index_reg, index+3, val);
518 for (Channel = 0; Channel<2; Channel++) {
519 reg = 0x78 + Channel * 0x100;
520 val = Get_NB32(dev, reg);
522 val |= ((u32) pDCTstat->CH_MaxRdLat[Channel] << 22);
523 val &= ~(1<<DqsRcvEnTrain);
524 Set_NB32(dev, reg, val); /* program MaxRdLatency to correspond with current delay*/
530 static void HTMemMapInit_D(struct MCTStatStruc *pMCTstat,
531 struct DCTStatStruc *pDCTstatA)
534 u32 NextBase, BottomIO;
535 u8 _MemHoleRemap, DramHoleBase, DramHoleOffset;
536 u32 HoleSize, DramSelBaseAddr;
542 struct DCTStatStruc *pDCTstat;
544 _MemHoleRemap = mctGet_NVbits(NV_MemHole);
546 if (pMCTstat->HoleBase == 0) {
547 DramHoleBase = mctGet_NVbits(NV_BottomIO);
549 DramHoleBase = pMCTstat->HoleBase >> (24-8);
552 BottomIO = DramHoleBase << (24-8);
555 pDCTstat = pDCTstatA + 0;
556 dev = pDCTstat->dev_map;
558 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
559 pDCTstat = pDCTstatA + Node;
560 devx = pDCTstat->dev_map;
562 pDCTstat = pDCTstatA + Node; /* ??? */
563 if (!pDCTstat->GangedMode) {
564 DramSelBaseAddr = pDCTstat->NodeSysLimit - pDCTstat->DCTSysLimit;
565 /*In unganged mode, we must add DCT0 and DCT1 to DCTSysLimit */
566 val = pDCTstat->NodeSysLimit;
567 if ((val & 0xFF) == 0xFE) {
571 pDCTstat->DCTSysLimit = val;
574 base = pDCTstat->DCTSysBase;
575 limit = pDCTstat->DCTSysLimit;
579 DramSelBaseAddr += NextBase;
580 printk(BIOS_DEBUG, " Node: %02x base: %02x limit: %02x BottomIO: %02x\n", Node, base, limit, BottomIO);
583 if ((base < BottomIO) && (limit >= BottomIO)) {
585 pDCTstat->Status |= 1 << SB_HWHole;
586 pMCTstat->GStatus |= 1 << GSB_HWHole;
587 pDCTstat->DCTSysBase = base;
588 pDCTstat->DCTSysLimit = limit;
589 pDCTstat->DCTHoleBase = BottomIO;
590 pMCTstat->HoleBase = BottomIO;
591 HoleSize = _4GB_RJ8 - BottomIO; /* HoleSize[39:8] */
592 if ((DramSelBaseAddr > 0) && (DramSelBaseAddr < BottomIO))
593 base = DramSelBaseAddr;
594 val = ((base + HoleSize) >> (24-8)) & 0xFF;
595 DramHoleOffset = val;
596 val <<= 8; /* shl 16, rol 24 */
597 val |= DramHoleBase << 24;
598 val |= 1 << DramHoleValid;
599 Set_NB32(devx, 0xF0, val); /* Dram Hole Address Reg */
600 pDCTstat->DCTSysLimit += HoleSize;
601 base = pDCTstat->DCTSysBase;
602 limit = pDCTstat->DCTSysLimit;
603 } else if (base == BottomIO) {
605 pMCTstat->GStatus |= 1<<GSB_SpIntRemapHole;
606 pDCTstat->Status |= 1<<SB_SWNodeHole;
607 pMCTstat->GStatus |= 1<<GSB_SoftHole;
608 pMCTstat->HoleBase = base;
612 pDCTstat->DCTSysBase = base;
613 pDCTstat->DCTSysLimit = limit;
615 /* No Remapping. Normal Contiguous mapping */
616 pDCTstat->DCTSysBase = base;
617 pDCTstat->DCTSysLimit = limit;
620 /*No Remapping. Normal Contiguous mapping*/
621 pDCTstat->DCTSysBase = base;
622 pDCTstat->DCTSysLimit = limit;
624 base |= 3; /* set WE,RE fields*/
625 pMCTstat->SysLimit = limit;
627 Set_NB32(dev, 0x40 + (Node << 3), base); /* [Node] + Dram Base 0 */
629 val = limit & 0xFFFF0000;
631 Set_NB32(dev, 0x44 + (Node << 3), val); /* set DstNode */
633 printk(BIOS_DEBUG, " Node: %02x base: %02x limit: %02x \n", Node, base, limit);
634 limit = pDCTstat->DCTSysLimit;
636 NextBase = (limit & 0xFFFF0000) + 0x10000;
640 /* Copy dram map from Node 0 to Node 1-7 */
641 for (Node = 1; Node < MAX_NODES_SUPPORTED; Node++) {
643 pDCTstat = pDCTstatA + Node;
644 devx = pDCTstat->dev_map;
646 if (pDCTstat->NodePresent) {
647 reg = 0x40; /*Dram Base 0*/
649 val = Get_NB32(dev, reg);
650 Set_NB32(devx, reg, val);
652 } while ( reg < 0x80);
654 break; /* stop at first absent Node */
658 /*Copy dram map to F1x120/124*/
659 mct_HTMemMapExt(pMCTstat, pDCTstatA);
662 static void MCTMemClr_D(struct MCTStatStruc *pMCTstat,
663 struct DCTStatStruc *pDCTstatA)
666 /* Initiates a memory clear operation for all node. The mem clr
667 * is done in parallel. After the memclr is complete, all processors
668 * status are checked to ensure that memclr has completed.
671 struct DCTStatStruc *pDCTstat;
673 if (!mctGet_NVbits(NV_DQSTrainCTL)){
674 /* FIXME: callback to wrapper: mctDoWarmResetMemClr_D */
675 } else { /* NV_DQSTrainCTL == 1 */
676 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
677 pDCTstat = pDCTstatA + Node;
679 if (pDCTstat->NodePresent) {
680 DCTMemClr_Init_D(pMCTstat, pDCTstat);
683 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
684 pDCTstat = pDCTstatA + Node;
686 if (pDCTstat->NodePresent) {
687 DCTMemClr_Sync_D(pMCTstat, pDCTstat);
693 static void DCTMemClr_Init_D(struct MCTStatStruc *pMCTstat,
694 struct DCTStatStruc *pDCTstat)
700 /* Initiates a memory clear operation on one node */
701 if (pDCTstat->DCTSysLimit) {
702 dev = pDCTstat->dev_dct;
706 val = Get_NB32(dev, reg);
707 } while (val & (1 << MemClrBusy));
709 val |= (1 << MemClrInit);
710 Set_NB32(dev, reg, val);
714 static void MCTMemClrSync_D(struct MCTStatStruc *pMCTstat,
715 struct DCTStatStruc *pDCTstatA)
717 /* Ensures that memory clear has completed on all node.*/
719 struct DCTStatStruc *pDCTstat;
721 if (!mctGet_NVbits(NV_DQSTrainCTL)){
722 /* callback to wrapper: mctDoWarmResetMemClr_D */
723 } else { /* NV_DQSTrainCTL == 1 */
724 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
725 pDCTstat = pDCTstatA + Node;
727 if (pDCTstat->NodePresent) {
728 DCTMemClr_Sync_D(pMCTstat, pDCTstat);
734 static void DCTMemClr_Sync_D(struct MCTStatStruc *pMCTstat,
735 struct DCTStatStruc *pDCTstat)
738 u32 dev = pDCTstat->dev_dct;
741 /* Ensure that a memory clear operation has completed on one node */
742 if (pDCTstat->DCTSysLimit){
746 val = Get_NB32(dev, reg);
747 } while (val & (1 << MemClrBusy));
750 val = Get_NB32(dev, reg);
751 } while (!(val & (1 << Dr_MemClrStatus)));
754 val = 0x0FE40FC0; /* BKDG recommended */
755 val |= MCCH_FlushWrOnStpGnt; /* Set for S3 */
756 Set_NB32(dev, 0x11C, val);
759 static u8 NodePresent_D(u8 Node)
762 * Determine if a single Hammer Node exists within the network.
769 dev = PA_HOST(Node); /*test device/vendor id at host bridge */
770 val = Get_NB32(dev, 0);
771 dword = mct_NodePresent_D(); /* FIXME: BOZO -11001022h rev for F */
772 if (val == dword) { /* AMD Hammer Family CPU HT Configuration */
773 if (oemNodePresent_D(Node, &ret))
775 /* Node ID register */
776 val = Get_NB32(dev, 0x60);
779 if (val == dword) /* current nodeID = requested nodeID ? */
786 static void DCTInit_D(struct MCTStatStruc *pMCTstat, struct DCTStatStruc *pDCTstat, u8 dct)
789 * Initialize DRAM on single Athlon 64/Opteron Node.
794 ClearDCT_D(pMCTstat, pDCTstat, dct);
795 stopDCTflag = 1; /*preload flag with 'disable' */
796 /* enable DDR3 support */
797 val = Get_NB32(pDCTstat->dev_dct, 0x94 + dct * 0x100);
798 val |= 1 << Ddr3Mode;
799 Set_NB32(pDCTstat->dev_dct, 0x94 + dct * 0x100, val);
800 if (mct_DIMMPresence(pMCTstat, pDCTstat, dct) < SC_StopError) {
801 printk(BIOS_DEBUG, "\t\tDCTInit_D: mct_DIMMPresence Done\n");
802 if (mct_SPDCalcWidth(pMCTstat, pDCTstat, dct) < SC_StopError) {
803 printk(BIOS_DEBUG, "\t\tDCTInit_D: mct_SPDCalcWidth Done\n");
804 if (AutoCycTiming_D(pMCTstat, pDCTstat, dct) < SC_StopError) {
805 printk(BIOS_DEBUG, "\t\tDCTInit_D: AutoCycTiming_D Done\n");
806 if (AutoConfig_D(pMCTstat, pDCTstat, dct) < SC_StopError) {
807 printk(BIOS_DEBUG, "\t\tDCTInit_D: AutoConfig_D Done\n");
808 if (PlatformSpec_D(pMCTstat, pDCTstat, dct) < SC_StopError) {
809 printk(BIOS_DEBUG, "\t\tDCTInit_D: PlatformSpec_D Done\n");
811 if (!(pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW))) {
812 printk(BIOS_DEBUG, "\t\tDCTInit_D: StartupDCT_D\n");
813 StartupDCT_D(pMCTstat, pDCTstat, dct); /*yeaahhh! */
822 u32 reg_off = dct * 0x100;
823 val = 1<<DisDramInterface;
824 Set_NB32(pDCTstat->dev_dct, reg_off+0x94, val);
825 /*To maximize power savings when DisDramInterface=1b,
826 all of the MemClkDis bits should also be set.*/
828 Set_NB32(pDCTstat->dev_dct, reg_off+0x88, val);
830 mct_EnDllShutdownSR(pMCTstat, pDCTstat, dct);
834 static void SyncDCTsReady_D(struct MCTStatStruc *pMCTstat,
835 struct DCTStatStruc *pDCTstatA)
837 /* Wait (and block further access to dram) for all DCTs to be ready,
838 * by polling all InitDram bits and waiting for possible memory clear
839 * operations to be complete. Read MemClkFreqVal bit to see if
840 * the DIMMs are present in this node.
845 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
846 struct DCTStatStruc *pDCTstat;
847 pDCTstat = pDCTstatA + Node;
848 mct_SyncDCTsReady(pDCTstat);
851 /* re-enable phy compensation engine when dram init is completed on all nodes. */
852 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
853 struct DCTStatStruc *pDCTstat;
854 pDCTstat = pDCTstatA + Node;
855 if (pDCTstat->NodePresent) {
856 if (pDCTstat->DIMMValidDCT[0] > 0 || pDCTstat->DIMMValidDCT[1] > 0) {
857 /* re-enable phy compensation engine when dram init on both DCTs is completed. */
858 val = Get_NB32_index_wait(pDCTstat->dev_dct, 0x98, 0x8);
859 val &= ~(1 << DisAutoComp);
860 Set_NB32_index_wait(pDCTstat->dev_dct, 0x98, 0x8, val);
864 /* wait 750us before any memory access can be made. */
868 static void StartupDCT_D(struct MCTStatStruc *pMCTstat,
869 struct DCTStatStruc *pDCTstat, u8 dct)
871 /* Read MemClkFreqVal bit to see if the DIMMs are present in this node.
872 * If the DIMMs are present then set the DRAM Enable bit for this node.
874 * Setting dram init starts up the DCT state machine, initializes the
875 * dram devices with MRS commands, and kicks off any
876 * HW memory clear process that the chip is capable of. The sooner
877 * that dram init is set for all nodes, the faster the memory system
878 * initialization can complete. Thus, the init loop is unrolled into
879 * two loops so as to start the processes for non BSP nodes sooner.
880 * This procedure will not wait for the process to finish.
881 * Synchronization is handled elsewhere.
885 u32 reg_off = dct * 0x100;
887 dev = pDCTstat->dev_dct;
888 val = Get_NB32(dev, 0x94 + reg_off);
889 if (val & (1<<MemClkFreqVal)) {
890 mctHookBeforeDramInit(); /* generalized Hook */
891 if (!(pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)))
892 mct_DramInit(pMCTstat, pDCTstat, dct);
893 AfterDramInit_D(pDCTstat, dct);
894 mctHookAfterDramInit(); /* generalized Hook*/
898 static void ClearDCT_D(struct MCTStatStruc *pMCTstat,
899 struct DCTStatStruc *pDCTstat, u8 dct)
902 u32 dev = pDCTstat->dev_dct;
903 u32 reg = 0x40 + 0x100 * dct;
906 if (pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)) {
907 reg_end = 0x78 + 0x100 * dct;
909 reg_end = 0xA4 + 0x100 * dct;
912 while(reg < reg_end) {
913 if ((reg & 0xFF) == 0x90) {
914 if (pDCTstat->LogicalCPUID & AMD_DR_Dx) {
915 val = Get_NB32(dev, reg); /* get DRAMConfigLow */
916 val |= 0x08000000; /* preserve value of DisDllShutdownSR for only Rev.D */
919 Set_NB32(dev, reg, val);
925 dev = pDCTstat->dev_map;
927 Set_NB32(dev, reg, val);
930 static void SPD2ndTiming(struct MCTStatStruc *pMCTstat,
931 struct DCTStatStruc *pDCTstat, u8 dct)
935 u16 Trp, Trrd, Trcd, Tras, Trc;
938 u32 DramTimingLo, DramTimingHi;
950 /* Gather all DIMM mini-max values for cycle timing data */
959 for (i=0; i < 4; i++)
963 for ( i = 0; i< MAX_DIMMS_SUPPORTED; i++) {
965 if (pDCTstat->DIMMValid & (1 << i)) {
966 smbaddr = Get_DIMMAddress_D(pDCTstat, (dct + i));
968 val = mctRead_SPD(smbaddr, SPD_MTBDivisor); /* MTB=Dividend/Divisor */
969 MTB16x = ((mctRead_SPD(smbaddr, SPD_MTBDividend) & 0xFF)<<4);
970 MTB16x /= val; /* transfer to MTB*16 */
972 byte = mctRead_SPD(smbaddr, SPD_tRPmin);
977 byte = mctRead_SPD(smbaddr, SPD_tRRDmin);
982 byte = mctRead_SPD(smbaddr, SPD_tRCDmin);
987 byte = mctRead_SPD(smbaddr, SPD_tRTPmin);
992 byte = mctRead_SPD(smbaddr, SPD_tWRmin);
997 byte = mctRead_SPD(smbaddr, SPD_tWTRmin);
1002 val = mctRead_SPD(smbaddr, SPD_Upper_tRAS_tRC) & 0xFF;
1005 val |= mctRead_SPD(smbaddr, SPD_tRCmin) & 0xFF;
1010 byte = mctRead_SPD(smbaddr, SPD_Density) & 0xF;
1011 if (Trfc[LDIMM] < byte)
1014 val = mctRead_SPD(smbaddr, SPD_Upper_tRAS_tRC) & 0xF;
1016 val |= (mctRead_SPD(smbaddr, SPD_tRASmin) & 0xFF);
1021 val = mctRead_SPD(smbaddr, SPD_Upper_tFAW) & 0xF;
1023 val |= mctRead_SPD(smbaddr, SPD_tFAWmin) & 0xFF;
1027 } /* Dimm Present */
1030 /* Convert DRAM CycleTiming values and store into DCT structure */
1031 byte = pDCTstat->DIMMAutoSpeed;
1042 1. All secondary time values given in SPDs are in binary with units of ns.
1043 2. Some time values are scaled by 16, in order to have least count of 0.25 ns
1044 (more accuracy). JEDEC SPD spec. shows which ones are x1 and x4.
1045 3. Internally to this SW, cycle time, tCK16x, is scaled by 16 to match time values
1049 pDCTstat->DIMMTras = (u16)Tras;
1050 val = Tras / tCK16x;
1051 if (Tras % tCK16x) { /* round up number of busclocks */
1054 if (val < Min_TrasT)
1056 else if (val > Max_TrasT)
1058 pDCTstat->Tras = val;
1061 pDCTstat->DIMMTrp = Trp;
1063 if (Trp % tCK16x) { /* round up number of busclocks */
1068 else if (val > Max_TrpT)
1070 pDCTstat->Trp = val;
1073 pDCTstat->DIMMTrrd = Trrd;
1074 val = Trrd / tCK16x;
1075 if (Trrd % tCK16x) { /* round up number of busclocks */
1078 if (val < Min_TrrdT)
1080 else if (val > Max_TrrdT)
1082 pDCTstat->Trrd = val;
1085 pDCTstat->DIMMTrcd = Trcd;
1086 val = Trcd / tCK16x;
1087 if (Trcd % tCK16x) { /* round up number of busclocks */
1090 if (val < Min_TrcdT)
1092 else if (val > Max_TrcdT)
1094 pDCTstat->Trcd = val;
1097 pDCTstat->DIMMTrc = Trc;
1099 if (Trc % tCK16x) { /* round up number of busclocks */
1104 else if (val > Max_TrcT)
1106 pDCTstat->Trc = val;
1109 pDCTstat->DIMMTrtp = Trtp;
1110 val = Trtp / tCK16x;
1111 if (Trtp % tCK16x) {
1114 if (val < Min_TrtpT)
1116 else if (val > Max_TrtpT)
1118 pDCTstat->Trtp = val;
1121 pDCTstat->DIMMTwr = Twr;
1123 if (Twr % tCK16x) { /* round up number of busclocks */
1128 else if (val > Max_TwrT)
1130 pDCTstat->Twr = val;
1133 pDCTstat->DIMMTwtr = Twtr;
1134 val = Twtr / tCK16x;
1135 if (Twtr % tCK16x) { /* round up number of busclocks */
1138 if (val < Min_TwtrT)
1140 else if (val > Max_TwtrT)
1142 pDCTstat->Twtr = val;
1146 pDCTstat->Trfc[i] = Trfc[i];
1149 pDCTstat->DIMMTfaw = Tfaw;
1150 val = Tfaw / tCK16x;
1151 if (Tfaw % tCK16x) { /* round up number of busclocks */
1154 if (val < Min_TfawT)
1156 else if (val > Max_TfawT)
1158 pDCTstat->Tfaw = val;
1160 mctAdjustAutoCycTmg_D();
1162 /* Program DRAM Timing values */
1163 DramTimingLo = 0; /* Dram Timing Low init */
1164 val = pDCTstat->CASL - 2; /* pDCTstat.CASL to reg. definition */
1165 DramTimingLo |= val;
1167 val = pDCTstat->Trcd - Bias_TrcdT;
1168 DramTimingLo |= val<<4;
1170 val = pDCTstat->Trp - Bias_TrpT;
1171 val = mct_AdjustSPDTimings(pMCTstat, pDCTstat, val);
1172 DramTimingLo |= val<<7;
1174 val = pDCTstat->Trtp - Bias_TrtpT;
1175 DramTimingLo |= val<<10;
1177 val = pDCTstat->Tras - Bias_TrasT;
1178 DramTimingLo |= val<<12;
1180 val = pDCTstat->Trc - Bias_TrcT;
1181 DramTimingLo |= val<<16;
1183 val = pDCTstat->Trrd - Bias_TrrdT;
1184 DramTimingLo |= val<<22;
1186 DramTimingHi = 0; /* Dram Timing High init */
1187 val = pDCTstat->Twtr - Bias_TwtrT;
1188 DramTimingHi |= val<<8;
1191 DramTimingHi |= val<<16;
1198 DramTimingHi |= val << 20;
1200 dev = pDCTstat->dev_dct;
1201 reg_off = 0x100 * dct;
1203 val = pDCTstat->Twr;
1208 val = mct_AdjustSPDTimings(pMCTstat, pDCTstat, val);
1211 dword = Get_NB32(dev, 0x84 + reg_off);
1214 Set_NB32(dev, 0x84 + reg_off, dword);
1217 val = pDCTstat->Tfaw;
1218 val = mct_AdjustSPDTimings(pMCTstat, pDCTstat, val);
1222 dword = Get_NB32(dev, 0x94 + reg_off);
1223 dword &= ~0xf0000000;
1225 Set_NB32(dev, 0x94 + reg_off, dword);
1227 /* dev = pDCTstat->dev_dct; */
1228 /* reg_off = 0x100 * dct; */
1230 if (pDCTstat->Speed > 4) {
1231 val = Get_NB32(dev, 0x88 + reg_off);
1233 DramTimingLo |= val;
1235 Set_NB32(dev, 0x88 + reg_off, DramTimingLo); /*DCT Timing Low*/
1237 if (pDCTstat->Speed > 4) {
1238 DramTimingHi |= 1 << DisAutoRefresh;
1240 DramTimingHi |= 0x000018FF;
1241 Set_NB32(dev, 0x8c + reg_off, DramTimingHi); /*DCT Timing Hi*/
1243 /* dump_pci_device(PCI_DEV(0, 0x18+pDCTstat->Node_ID, 2)); */
1246 static u8 AutoCycTiming_D(struct MCTStatStruc *pMCTstat,
1247 struct DCTStatStruc *pDCTstat, u8 dct)
1249 /* Initialize DCT Timing registers as per DIMM SPD.
1250 * For primary timing (T, CL) use best case T value.
1251 * For secondary timing params., use most aggressive settings
1254 * There are three components to determining "maximum frequency":
1255 * SPD component, Bus load component, and "Preset" max frequency
1258 * The SPD component is a function of the min cycle time specified
1259 * by each DIMM, and the interaction of cycle times from all DIMMs
1260 * in conjunction with CAS latency. The SPD component only applies
1261 * when user timing mode is 'Auto'.
1263 * The Bus load component is a limiting factor determined by electrical
1264 * characteristics on the bus as a result of varying number of device
1265 * loads. The Bus load component is specific to each platform but may
1266 * also be a function of other factors. The bus load component only
1267 * applies when user timing mode is 'Auto'.
1269 * The Preset component is subdivided into three items and is
1270 * the minimum of the set: Silicon revision, user limit
1271 * setting when user timing mode is 'Auto' and memclock mode
1272 * is 'Limit', OEM build specification of the maximum
1273 * frequency. The Preset component is only applies when user
1274 * timing mode is 'Auto'.
1277 /* Get primary timing (CAS Latency and Cycle Time) */
1278 if (pDCTstat->Speed == 0) {
1279 mctGet_MaxLoadFreq(pDCTstat);
1281 /* and Factor in presets (setup options, Si cap, etc.) */
1282 GetPresetmaxF_D(pMCTstat, pDCTstat);
1284 /* Go get best T and CL as specified by DIMM mfgs. and OEM */
1285 SPDGetTCL_D(pMCTstat, pDCTstat, dct);
1286 /* skip callback mctForce800to1067_D */
1287 pDCTstat->Speed = pDCTstat->DIMMAutoSpeed;
1288 pDCTstat->CASL = pDCTstat->DIMMCASL;
1291 mct_AfterGetCLT(pMCTstat, pDCTstat, dct);
1293 SPD2ndTiming(pMCTstat, pDCTstat, dct);
1295 printk(BIOS_DEBUG, "AutoCycTiming: Status %x\n", pDCTstat->Status);
1296 printk(BIOS_DEBUG, "AutoCycTiming: ErrStatus %x\n", pDCTstat->ErrStatus);
1297 printk(BIOS_DEBUG, "AutoCycTiming: ErrCode %x\n", pDCTstat->ErrCode);
1298 printk(BIOS_DEBUG, "AutoCycTiming: Done\n\n");
1300 mctHookAfterAutoCycTmg();
1302 return pDCTstat->ErrCode;
1305 static void GetPresetmaxF_D(struct MCTStatStruc *pMCTstat,
1306 struct DCTStatStruc *pDCTstat)
1308 /* Get max frequency from OEM platform definition, from any user
1309 * override (limiting) of max frequency, and from any Si Revision
1310 * Specific information. Return the least of these three in
1311 * DCTStatStruc.PresetmaxFreq.
1313 /* TODO: Set the proper max frequency in wrappers/mcti_d.c. */
1317 /* Get CPU Si Revision defined limit (NPT) */
1318 proposedFreq = 800; /* Rev F0 programmable max memclock is */
1320 /*Get User defined limit if "limit" mode */
1321 if ( mctGet_NVbits(NV_MCTUSRTMGMODE) == 1) {
1322 word = Get_Fk_D(mctGet_NVbits(NV_MemCkVal) + 1);
1323 if (word < proposedFreq)
1324 proposedFreq = word;
1326 /* Get Platform defined limit */
1327 word = mctGet_NVbits(NV_MAX_MEMCLK);
1328 if (word < proposedFreq)
1329 proposedFreq = word;
1331 word = pDCTstat->PresetmaxFreq;
1332 if (word > proposedFreq)
1333 word = proposedFreq;
1335 pDCTstat->PresetmaxFreq = word;
1337 /* Check F3xE8[DdrMaxRate] for maximum DRAM data rate support */
1340 static void SPDGetTCL_D(struct MCTStatStruc *pMCTstat,
1341 struct DCTStatStruc *pDCTstat, u8 dct)
1343 /* Find the best T and CL primary timing parameter pair, per Mfg.,
1344 * for the given set of DIMMs, and store into DCTStatStruc
1345 * (.DIMMAutoSpeed and .DIMMCASL). See "Global relationship between
1346 * index values and item values" for definition of CAS latency
1347 * index (j) and Frequency index (k).
1349 u8 i, CASLatLow, CASLatHigh;
1354 u8 CLactual, CLdesired, CLT_Fail;
1356 u8 smbaddr, byte = 0, bytex = 0;
1364 for (i = 0; i < MAX_DIMMS_SUPPORTED; i++) {
1365 if (pDCTstat->DIMMValid & (1 << i)) {
1366 smbaddr = Get_DIMMAddress_D(pDCTstat, (dct + i));
1367 /* Step 1: Determine the common set of supported CAS Latency
1368 * values for all modules on the memory channel using the CAS
1369 * Latencies Supported in SPD bytes 14 and 15.
1371 byte = mctRead_SPD(smbaddr, SPD_CASLow);
1373 byte = mctRead_SPD(smbaddr, SPD_CASHigh);
1375 /* Step 2: Determine tAAmin(all) which is the largest tAAmin
1376 value for all modules on the memory channel (SPD byte 16). */
1377 byte = mctRead_SPD(smbaddr, SPD_MTBDivisor);
1379 MTB16x = ((mctRead_SPD(smbaddr, SPD_MTBDividend) & 0xFF)<<4);
1380 MTB16x /= byte; /* transfer to MTB*16 */
1382 byte = mctRead_SPD(smbaddr, SPD_tAAmin);
1383 if (tAAmin16x < byte * MTB16x)
1384 tAAmin16x = byte * MTB16x;
1385 /* Step 3: Determine tCKmin(all) which is the largest tCKmin
1386 value for all modules on the memory channel (SPD byte 12). */
1387 byte = mctRead_SPD(smbaddr, SPD_tCKmin);
1389 if (tCKmin16x < byte * MTB16x)
1390 tCKmin16x = byte * MTB16x;
1393 /* calculate tCKproposed16x */
1394 tCKproposed16x = 16000 / pDCTstat->PresetmaxFreq;
1395 if (tCKmin16x > tCKproposed16x)
1396 tCKproposed16x = tCKmin16x;
1398 /* mctHookTwo1333DimmOverride(); */
1399 /* For UDIMM, if there are two DDR3-1333 on the same channel,
1400 downgrade DDR speed to 1066. */
1402 /* TODO: get user manual tCK16x(Freq.) and overwrite current tCKproposed16x if manual. */
1403 if (tCKproposed16x == 20)
1404 pDCTstat->TargetFreq = 7;
1405 else if (tCKproposed16x <= 24) {
1406 pDCTstat->TargetFreq = 6;
1407 tCKproposed16x = 24;
1409 else if (tCKproposed16x <= 30) {
1410 pDCTstat->TargetFreq = 5;
1411 tCKproposed16x = 30;
1414 pDCTstat->TargetFreq = 4;
1415 tCKproposed16x = 40;
1417 /* Running through this loop twice:
1418 - First time find tCL at target frequency
1419 - Second tim find tCL at 400MHz */
1423 /* Step 4: For a proposed tCK value (tCKproposed) between tCKmin(all) and tCKmax,
1424 determine the desired CAS Latency. If tCKproposed is not a standard JEDEC
1425 value (2.5, 1.875, 1.5, or 1.25 ns) then tCKproposed must be adjusted to the
1426 next lower standard tCK value for calculating CLdesired.
1427 CLdesired = ceiling ( tAAmin(all) / tCKproposed )
1428 where tAAmin is defined in Byte 16. The ceiling function requires that the
1429 quotient be rounded up always. */
1430 CLdesired = tAAmin16x / tCKproposed16x;
1431 if (tAAmin16x % tCKproposed16x)
1433 /* Step 5: Chose an actual CAS Latency (CLactual) that is greather than or equal
1434 to CLdesired and is supported by all modules on the memory channel as
1435 determined in step 1. If no such value exists, choose a higher tCKproposed
1436 value and repeat steps 4 and 5 until a solution is found. */
1437 for (i = 0, CLactual = 4; i < 15; i++, CLactual++) {
1438 if ((CASLatHigh << 8 | CASLatLow) & (1 << i)) {
1439 if (CLdesired <= CLactual)
1445 /* Step 6: Once the calculation of CLactual is completed, the BIOS must also
1446 verify that this CAS Latency value does not exceed tAAmax, which is 20 ns
1447 for all DDR3 speed grades, by multiplying CLactual times tCKproposed. If
1448 not, choose a lower CL value and repeat steps 5 and 6 until a solution is found. */
1449 if (CLactual * tCKproposed16x > 320)
1453 bytex = CLactual - 2;
1454 if (tCKproposed16x == 20)
1456 else if (tCKproposed16x == 24)
1458 else if (tCKproposed16x == 30)
1463 /* mctHookManualCLOverride */
1467 if (tCKproposed16x != 40) {
1468 if (pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)) {
1469 pDCTstat->DIMMAutoSpeed = byte;
1470 pDCTstat->DIMMCASL = bytex;
1473 pDCTstat->TargetCASL = bytex;
1474 tCKproposed16x = 40;
1477 pDCTstat->DIMMAutoSpeed = byte;
1478 pDCTstat->DIMMCASL = bytex;
1483 printk(BIOS_DEBUG, "SPDGetTCL_D: DIMMCASL %x\n", pDCTstat->DIMMCASL);
1484 printk(BIOS_DEBUG, "SPDGetTCL_D: DIMMAutoSpeed %x\n", pDCTstat->DIMMAutoSpeed);
1486 printk(BIOS_DEBUG, "SPDGetTCL_D: Status %x\n", pDCTstat->Status);
1487 printk(BIOS_DEBUG, "SPDGetTCL_D: ErrStatus %x\n", pDCTstat->ErrStatus);
1488 printk(BIOS_DEBUG, "SPDGetTCL_D: ErrCode %x\n", pDCTstat->ErrCode);
1489 printk(BIOS_DEBUG, "SPDGetTCL_D: Done\n\n");
1492 static u8 PlatformSpec_D(struct MCTStatStruc *pMCTstat,
1493 struct DCTStatStruc *pDCTstat, u8 dct)
1499 mctGet_PS_Cfg_D(pMCTstat, pDCTstat, dct);
1501 if (pDCTstat->GangedMode == 1) {
1502 mctGet_PS_Cfg_D(pMCTstat, pDCTstat, 1);
1503 mct_BeforePlatformSpec(pMCTstat, pDCTstat, 1);
1506 if ( pDCTstat->_2Tmode == 2) {
1507 dev = pDCTstat->dev_dct;
1508 reg = 0x94 + 0x100 * dct; /* Dram Configuration Hi */
1509 val = Get_NB32(dev, reg);
1510 val |= 1 << 20; /* 2T CMD mode */
1511 Set_NB32(dev, reg, val);
1514 mct_BeforePlatformSpec(pMCTstat, pDCTstat, dct);
1515 mct_PlatformSpec(pMCTstat, pDCTstat, dct);
1516 if (pDCTstat->DIMMAutoSpeed == 4)
1517 InitPhyCompensation(pMCTstat, pDCTstat, dct);
1518 mctHookAfterPSCfg();
1520 return pDCTstat->ErrCode;
1523 static u8 AutoConfig_D(struct MCTStatStruc *pMCTstat,
1524 struct DCTStatStruc *pDCTstat, u8 dct)
1526 u32 DramControl, DramTimingLo, Status;
1527 u32 DramConfigLo, DramConfigHi, DramConfigMisc, DramConfigMisc2;
1538 DramConfigMisc2 = 0;
1540 /* set bank addressing and Masks, plus CS pops */
1541 SPDSetBanks_D(pMCTstat, pDCTstat, dct);
1542 if (pDCTstat->ErrCode == SC_StopError)
1543 goto AutoConfig_exit;
1545 /* map chip-selects into local address space */
1546 StitchMemory_D(pMCTstat, pDCTstat, dct);
1547 InterleaveBanks_D(pMCTstat, pDCTstat, dct);
1549 /* temp image of status (for convenience). RO usage! */
1550 Status = pDCTstat->Status;
1552 dev = pDCTstat->dev_dct;
1553 reg_off = 0x100 * dct;
1556 /* Build Dram Control Register Value */
1557 DramConfigMisc2 = Get_NB32 (dev, 0xA8 + reg_off); /* Dram Control*/
1558 DramControl = Get_NB32 (dev, 0x78 + reg_off); /* Dram Control*/
1560 /* FIXME: Skip mct_checkForDxSupport */
1561 /* REV_CALL mct_DoRdPtrInit if not Dx */
1562 if (pDCTstat->LogicalCPUID & AMD_DR_Bx)
1566 DramControl &= ~0xFF;
1567 DramControl |= val; /* RdPtrInit = 6 for Cx CPU */
1569 if (mctGet_NVbits(NV_CLKHZAltVidC3))
1570 DramControl |= 1<<16; /* check */
1572 DramControl |= 0x00002A00;
1574 /* FIXME: Skip for Ax versions */
1575 /* callback not required - if (!mctParityControl_D()) */
1576 if (Status & (1 << SB_128bitmode))
1577 DramConfigLo |= 1 << Width128; /* 128-bit mode (normal) */
1582 if (pDCTstat->Dimmx4Present & (1 << word))
1583 DramConfigLo |= 1 << dword; /* X4Dimm[3:0] */
1589 if (!(Status & (1 << SB_Registered)))
1590 DramConfigLo |= 1 << UnBuffDimm; /* Unbuffered DIMMs */
1592 if (mctGet_NVbits(NV_ECC_CAP))
1593 if (Status & (1 << SB_ECCDIMMs))
1594 if ( mctGet_NVbits(NV_ECC))
1595 DramConfigLo |= 1 << DimmEcEn;
1597 DramConfigLo = mct_DisDllShutdownSR(pMCTstat, pDCTstat, DramConfigLo, dct);
1599 /* Build Dram Config Hi Register Value */
1600 dword = pDCTstat->Speed;
1601 DramConfigHi |= dword - 1; /* get MemClk encoding */
1602 DramConfigHi |= 1 << MemClkFreqVal;
1604 if (Status & (1 << SB_Registered))
1605 if ((pDCTstat->Dimmx4Present != 0) && (pDCTstat->Dimmx8Present != 0))
1606 /* set only if x8 Registered DIMMs in System*/
1607 DramConfigHi |= 1 << RDqsEn;
1609 if (mctGet_NVbits(NV_CKE_CTL))
1610 /*Chip Select control of CKE*/
1611 DramConfigHi |= 1 << 16;
1613 /* Control Bank Swizzle */
1614 if (0) /* call back not needed mctBankSwizzleControl_D()) */
1615 DramConfigHi &= ~(1 << BankSwizzleMode);
1617 DramConfigHi |= 1 << BankSwizzleMode; /* recommended setting (default) */
1619 /* Check for Quadrank DIMM presence */
1620 if ( pDCTstat->DimmQRPresent != 0) {
1621 byte = mctGet_NVbits(NV_4RANKType);
1623 DramConfigHi |= 1 << 17; /* S4 (4-Rank SO-DIMMs) */
1625 DramConfigHi |= 1 << 18; /* R4 (4-Rank Registered DIMMs) */
1628 if (0) /* call back not needed mctOverrideDcqBypMax_D ) */
1629 val = mctGet_NVbits(NV_BYPMAX);
1631 val = 0x0f; /* recommended setting (default) */
1632 DramConfigHi |= val << 24;
1634 if (pDCTstat->LogicalCPUID & (AMD_DR_Cx | AMD_DR_Bx))
1635 DramConfigHi |= 1 << DcqArbBypassEn;
1637 /* Build MemClkDis Value from Dram Timing Lo and
1638 Dram Config Misc Registers
1639 1. We will assume that MemClkDis field has been preset prior to this
1641 2. We will only set MemClkDis bits if a DIMM is NOT present AND if:
1642 NV_AllMemClks <>0 AND SB_DiagClks ==0 */
1644 /* Dram Timing Low (owns Clock Enable bits) */
1645 DramTimingLo = Get_NB32(dev, 0x88 + reg_off);
1646 if (mctGet_NVbits(NV_AllMemClks) == 0) {
1647 /* Special Jedec SPD diagnostic bit - "enable all clocks" */
1648 if (!(pDCTstat->Status & (1<<SB_DiagClks))) {
1651 p = Tab_ManualCLKDis;
1654 byte = mctGet_NVbits(NV_PACK_TYPE);
1657 else if (byte == PT_M2 || byte == PT_AS)
1664 while(dword < MAX_CS_SUPPORTED) {
1665 if (pDCTstat->CSPresent & (1<<dword)){
1666 /* re-enable clocks for the enabled CS */
1672 DramTimingLo |= byte << 24;
1676 printk(BIOS_DEBUG, "AutoConfig_D: DramControl: %x\n", DramControl);
1677 printk(BIOS_DEBUG, "AutoConfig_D: DramTimingLo: %x\n", DramTimingLo);
1678 printk(BIOS_DEBUG, "AutoConfig_D: DramConfigMisc: %x\n", DramConfigMisc);
1679 printk(BIOS_DEBUG, "AutoConfig_D: DramConfigMisc2: %x\n", DramConfigMisc2);
1680 printk(BIOS_DEBUG, "AutoConfig_D: DramConfigLo: %x\n", DramConfigLo);
1681 printk(BIOS_DEBUG, "AutoConfig_D: DramConfigHi: %x\n", DramConfigHi);
1683 /* Write Values to the registers */
1684 Set_NB32(dev, 0x78 + reg_off, DramControl);
1685 Set_NB32(dev, 0x88 + reg_off, DramTimingLo);
1686 Set_NB32(dev, 0xA0 + reg_off, DramConfigMisc);
1687 DramConfigMisc2 = mct_SetDramConfigMisc2(pDCTstat, dct, DramConfigMisc2);
1688 Set_NB32(dev, 0xA8 + reg_off, DramConfigMisc2);
1689 Set_NB32(dev, 0x90 + reg_off, DramConfigLo);
1690 ProgDramMRSReg_D(pMCTstat, pDCTstat, dct);
1691 dword = Get_NB32(dev, 0x94 + reg_off);
1692 DramConfigHi |= dword;
1693 mct_SetDramConfigHi_D(pDCTstat, dct, DramConfigHi);
1694 mct_EarlyArbEn_D(pMCTstat, pDCTstat, dct);
1695 mctHookAfterAutoCfg();
1697 /* dump_pci_device(PCI_DEV(0, 0x18+pDCTstat->Node_ID, 2)); */
1699 printk(BIOS_DEBUG, "AutoConfig: Status %x\n", pDCTstat->Status);
1700 printk(BIOS_DEBUG, "AutoConfig: ErrStatus %x\n", pDCTstat->ErrStatus);
1701 printk(BIOS_DEBUG, "AutoConfig: ErrCode %x\n", pDCTstat->ErrCode);
1702 printk(BIOS_DEBUG, "AutoConfig: Done\n\n");
1704 return pDCTstat->ErrCode;
1707 static void SPDSetBanks_D(struct MCTStatStruc *pMCTstat,
1708 struct DCTStatStruc *pDCTstat, u8 dct)
1710 /* Set bank addressing, program Mask values and build a chip-select
1711 * population map. This routine programs PCI 0:24N:2x80 config register
1712 * and PCI 0:24N:2x60,64,68,6C config registers (CS Mask 0-3).
1714 u8 ChipSel, Rows, Cols, Ranks, Banks;
1715 u32 BankAddrReg, csMask;
1726 dev = pDCTstat->dev_dct;
1727 reg_off = 0x100 * dct;
1730 for (ChipSel = 0; ChipSel < MAX_CS_SUPPORTED; ChipSel+=2) {
1732 if ((pDCTstat->Status & (1 << SB_64MuxedMode)) && ChipSel >=4)
1735 if (pDCTstat->DIMMValid & (1<<byte)) {
1736 smbaddr = Get_DIMMAddress_D(pDCTstat, (ChipSel + dct));
1738 byte = mctRead_SPD(smbaddr, SPD_Addressing);
1739 Rows = (byte >> 3) & 0x7; /* Rows:0b=12-bit,... */
1740 Cols = byte & 0x7; /* Cols:0b=9-bit,... */
1742 byte = mctRead_SPD(smbaddr, SPD_Density);
1743 Banks = (byte >> 4) & 7; /* Banks:0b=3-bit,... */
1745 byte = mctRead_SPD(smbaddr, SPD_Organization);
1746 Ranks = ((byte >> 3) & 7) + 1;
1748 /* Configure Bank encoding
1749 * Use a 6-bit key into a lookup table.
1750 * Key (index) = RRRBCC, where CC is the number of Columns minus 9,
1751 * RRR is the number of Rows minus 12, and B is the number of banks
1758 byte |= Rows << 3; /* RRRBCC internal encode */
1760 for (dword=0; dword < 13; dword++) {
1761 if (byte == Tab_BankAddr[dword])
1768 /* bit no. of CS field in address mapping reg.*/
1769 dword <<= (ChipSel<<1);
1770 BankAddrReg |= dword;
1772 /* Mask value=(2pow(rows+cols+banks+3)-1)>>8,
1773 or 2pow(rows+cols+banks-5)-1*/
1776 byte = Rows + Cols; /* cl=rows+cols*/
1777 byte += 21; /* row:12+col:9 */
1778 byte -= 2; /* 3 banks - 5 */
1780 if (pDCTstat->Status & (1 << SB_128bitmode))
1781 byte++; /* double mask size if in 128-bit mode*/
1783 csMask |= 1 << byte;
1786 /*set ChipSelect population indicator even bits*/
1787 pDCTstat->CSPresent |= (1<<ChipSel);
1789 /*set ChipSelect population indicator odd bits*/
1790 pDCTstat->CSPresent |= 1 << (ChipSel + 1);
1792 reg = 0x60+(ChipSel<<1) + reg_off; /*Dram CS Mask Register */
1794 val &= 0x1FF83FE0; /* Mask out reserved bits.*/
1795 Set_NB32(dev, reg, val);
1797 if (pDCTstat->DIMMSPDCSE & (1<<ChipSel))
1798 pDCTstat->CSTestFail |= (1<<ChipSel);
1800 } /* while ChipSel*/
1802 SetCSTriState(pMCTstat, pDCTstat, dct);
1803 SetCKETriState(pMCTstat, pDCTstat, dct);
1804 SetODTTriState(pMCTstat, pDCTstat, dct);
1806 if (pDCTstat->Status & (1 << SB_128bitmode)) {
1807 SetCSTriState(pMCTstat, pDCTstat, 1); /* force dct1) */
1808 SetCKETriState(pMCTstat, pDCTstat, 1); /* force dct1) */
1809 SetODTTriState(pMCTstat, pDCTstat, 1); /* force dct1) */
1812 word = pDCTstat->CSPresent;
1813 mctGetCS_ExcludeMap(); /* mask out specified chip-selects */
1814 word ^= pDCTstat->CSPresent;
1815 pDCTstat->CSTestFail |= word; /* enable ODT to disabled DIMMs */
1816 if (!pDCTstat->CSPresent)
1817 pDCTstat->ErrCode = SC_StopError;
1819 reg = 0x80 + reg_off; /* Bank Addressing Register */
1820 Set_NB32(dev, reg, BankAddrReg);
1822 pDCTstat->CSPresent_DCT[dct] = pDCTstat->CSPresent;
1823 /* dump_pci_device(PCI_DEV(0, 0x18+pDCTstat->Node_ID, 2)); */
1825 printk(BIOS_DEBUG, "SPDSetBanks: CSPresent %x\n", pDCTstat->CSPresent_DCT[dct]);
1826 printk(BIOS_DEBUG, "SPDSetBanks: Status %x\n", pDCTstat->Status);
1827 printk(BIOS_DEBUG, "SPDSetBanks: ErrStatus %x\n", pDCTstat->ErrStatus);
1828 printk(BIOS_DEBUG, "SPDSetBanks: ErrCode %x\n", pDCTstat->ErrCode);
1829 printk(BIOS_DEBUG, "SPDSetBanks: Done\n\n");
1832 static void SPDCalcWidth_D(struct MCTStatStruc *pMCTstat,
1833 struct DCTStatStruc *pDCTstat)
1835 /* Per SPDs, check the symmetry of DIMM pairs (DIMM on Channel A
1836 * matching with DIMM on Channel B), the overall DIMM population,
1837 * and determine the width mode: 64-bit, 64-bit muxed, 128-bit.
1840 u8 smbaddr, smbaddr1;
1843 /* Check Symmetry of Channel A and Channel B DIMMs
1844 (must be matched for 128-bit mode).*/
1845 for (i=0; i < MAX_DIMMS_SUPPORTED; i += 2) {
1846 if ((pDCTstat->DIMMValid & (1 << i)) && (pDCTstat->DIMMValid & (1<<(i+1)))) {
1847 smbaddr = Get_DIMMAddress_D(pDCTstat, i);
1848 smbaddr1 = Get_DIMMAddress_D(pDCTstat, i+1);
1850 byte = mctRead_SPD(smbaddr, SPD_Addressing) & 0x7;
1851 byte1 = mctRead_SPD(smbaddr1, SPD_Addressing) & 0x7;
1852 if (byte != byte1) {
1853 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
1857 byte = mctRead_SPD(smbaddr, SPD_Density) & 0x0f;
1858 byte1 = mctRead_SPD(smbaddr1, SPD_Density) & 0x0f;
1859 if (byte != byte1) {
1860 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
1864 byte = mctRead_SPD(smbaddr, SPD_Organization) & 0x7;
1865 byte1 = mctRead_SPD(smbaddr1, SPD_Organization) & 0x7;
1866 if (byte != byte1) {
1867 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
1871 byte = (mctRead_SPD(smbaddr, SPD_Organization) >> 3) & 0x7;
1872 byte1 = (mctRead_SPD(smbaddr1, SPD_Organization) >> 3) & 0x7;
1873 if (byte != byte1) {
1874 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
1878 byte = mctRead_SPD(smbaddr, SPD_DMBANKS) & 7; /* #ranks-1 */
1879 byte1 = mctRead_SPD(smbaddr1, SPD_DMBANKS) & 7; /* #ranks-1 */
1880 if (byte != byte1) {
1881 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
1890 static void StitchMemory_D(struct MCTStatStruc *pMCTstat,
1891 struct DCTStatStruc *pDCTstat, u8 dct)
1893 /* Requires that Mask values for each bank be programmed first and that
1894 * the chip-select population indicator is correctly set.
1897 u32 nxtcsBase, curcsBase;
1899 u32 Sizeq, BiggestBank;
1908 dev = pDCTstat->dev_dct;
1909 reg_off = 0x100 * dct;
1913 /* CS Sparing 1=enabled, 0=disabled */
1914 if (mctGet_NVbits(NV_CS_SpareCTL) & 1) {
1915 if (MCT_DIMM_SPARE_NO_WARM) {
1916 /* Do no warm-reset DIMM spare */
1917 if (pMCTstat->GStatus & 1 << GSB_EnDIMMSpareNW) {
1918 word = pDCTstat->CSPresent;
1922 /* Make sure at least two chip-selects are available */
1925 pDCTstat->ErrStatus |= 1 << SB_SpareDis;
1928 if (!mctGet_NVbits(NV_DQSTrainCTL)) { /*DQS Training 1=enabled, 0=disabled */
1929 word = pDCTstat->CSPresent;
1931 word &= ~(1 << val);
1933 /* Make sure at least two chip-selects are available */
1936 pDCTstat->ErrStatus |= 1 << SB_SpareDis;
1941 nxtcsBase = 0; /* Next available cs base ADDR[39:8] */
1942 for (p=0; p < MAX_DIMMS_SUPPORTED; p++) {
1944 for (q = 0; q < MAX_CS_SUPPORTED; q++) { /* from DIMMS to CS */
1945 if (pDCTstat->CSPresent & (1 << q)) { /* bank present? */
1946 reg = 0x40 + (q << 2) + reg_off; /* Base[q] reg.*/
1947 val = Get_NB32(dev, reg);
1948 if (!(val & 3)) { /* (CSEnable|Spare==1)bank is enabled already? */
1949 reg = 0x60 + (q << 1) + reg_off; /*Mask[q] reg.*/
1950 val = Get_NB32(dev, reg);
1954 Sizeq = val; /* never used */
1955 if (val > BiggestBank) {
1956 /*Bingo! possibly Map this chip-select next! */
1961 } /*if bank present */
1963 if (BiggestBank !=0) {
1964 curcsBase = nxtcsBase; /* curcsBase=nxtcsBase*/
1965 /* DRAM CS Base b Address Register offset */
1966 reg = 0x40 + (b << 2) + reg_off;
1969 val = 1 << Spare; /* Spare Enable*/
1972 val |= 1 << CSEnable; /* Bank Enable */
1974 if (((reg - 0x40) >> 2) & 1) {
1975 if (!(pDCTstat->Status & (1 << SB_Registered))) {
1977 dimValid = pDCTstat->DIMMValid;
1980 if ((dimValid & pDCTstat->MirrPresU_NumRegR) != 0) {
1981 val |= 1 << onDimmMirror;
1985 Set_NB32(dev, reg, val);
1989 /* let nxtcsBase+=Size[b] */
1990 nxtcsBase += BiggestBank;
1993 /* bank present but disabled?*/
1994 if ( pDCTstat->CSTestFail & (1 << p)) {
1995 /* DRAM CS Base b Address Register offset */
1996 reg = (p << 2) + 0x40 + reg_off;
1997 val = 1 << TestFail;
1998 Set_NB32(dev, reg, val);
2003 pDCTstat->DCTSysLimit = nxtcsBase - 1;
2004 mct_AfterStitchMemory(pMCTstat, pDCTstat, dct);
2007 /* dump_pci_device(PCI_DEV(0, 0x18+pDCTstat->Node_ID, 2)); */
2009 printk(BIOS_DEBUG, "StitchMemory: Status %x\n", pDCTstat->Status);
2010 printk(BIOS_DEBUG, "StitchMemory: ErrStatus %x\n", pDCTstat->ErrStatus);
2011 printk(BIOS_DEBUG, "StitchMemory: ErrCode %x\n", pDCTstat->ErrCode);
2012 printk(BIOS_DEBUG, "StitchMemory: Done\n\n");
2015 static u16 Get_Fk_D(u8 k)
2017 return Table_F_k[k]; /* FIXME: k or k<<1 ? */
2020 static u8 DIMMPresence_D(struct MCTStatStruc *pMCTstat,
2021 struct DCTStatStruc *pDCTstat)
2023 /* Check DIMMs present, verify checksum, flag SDRAM type,
2024 * build population indicator bitmaps, and preload bus loading
2025 * of DIMMs into DCTStatStruc.
2026 * MAAload=number of devices on the "A" bus.
2027 * MABload=number of devices on the "B" bus.
2028 * MAAdimms=number of DIMMs on the "A" bus slots.
2029 * MABdimms=number of DIMMs on the "B" bus slots.
2030 * DATAAload=number of ranks on the "A" bus slots.
2031 * DATABload=number of ranks on the "B" bus slots.
2036 u16 RegDIMMPresent, MaxDimms;
2041 /* preload data structure with addrs */
2042 mctGet_DIMMAddr(pDCTstat, pDCTstat->Node_ID);
2044 DimmSlots = MaxDimms = mctGet_NVbits(NV_MAX_DIMMS);
2046 SPDCtrl = mctGet_NVbits(NV_SPDCHK_RESTRT);
2049 pDCTstat->DimmQRPresent = 0;
2051 for (i = 0; i < MAX_DIMMS_SUPPORTED; i++) {
2055 if ((pDCTstat->DimmQRPresent & (1 << i)) || (i < DimmSlots)) {
2057 smbaddr = Get_DIMMAddress_D(pDCTstat, i);
2058 status = mctRead_SPD(smbaddr, SPD_ByteUse);
2059 if (status >= 0) { /* SPD access is ok */
2060 pDCTstat->DIMMPresent |= 1 << i;
2061 if (crcCheck(smbaddr)) { /* CRC is OK */
2062 byte = mctRead_SPD(smbaddr, SPD_TYPE);
2063 if (byte == JED_DDR3SDRAM) {
2064 /*Dimm is 'Present'*/
2065 pDCTstat->DIMMValid |= 1 << i;
2068 pDCTstat->DIMMSPDCSE = 1 << i;
2070 pDCTstat->ErrStatus |= 1 << SB_DIMMChkSum;
2071 pDCTstat->ErrCode = SC_StopError;
2073 /*if NV_SPDCHK_RESTRT is set to 1, ignore faulty SPD checksum*/
2074 pDCTstat->ErrStatus |= 1<<SB_DIMMChkSum;
2075 byte = mctRead_SPD(smbaddr, SPD_TYPE);
2076 if (byte == JED_DDR3SDRAM)
2077 pDCTstat->DIMMValid |= 1 << i;
2080 /* Check module type */
2081 byte = mctRead_SPD(smbaddr, SPD_DIMMTYPE) & 0x7;
2082 if (byte == JED_RDIMM || byte == JED_MiniRDIMM)
2083 RegDIMMPresent |= 1 << i;
2084 /* Check ECC capable */
2085 byte = mctRead_SPD(smbaddr, SPD_BusWidth);
2086 if (byte & JED_ECC) {
2087 /* DIMM is ECC capable */
2088 pDCTstat->DimmECCPresent |= 1 << i;
2090 /* Check if x4 device */
2091 devwidth = mctRead_SPD(smbaddr, SPD_Organization) & 0x7; /* 0:x4,1:x8,2:x16 */
2092 if (devwidth == 0) {
2093 /* DIMM is made with x4 or x16 drams */
2094 pDCTstat->Dimmx4Present |= 1 << i;
2095 } else if (devwidth == 1) {
2096 pDCTstat->Dimmx8Present |= 1 << i;
2097 } else if (devwidth == 2) {
2098 pDCTstat->Dimmx16Present |= 1 << i;
2101 byte = (mctRead_SPD(smbaddr, SPD_Organization) >> 3);
2103 if (byte == 3) { /* 4ranks */
2104 /* if any DIMMs are QR, we have to make two passes through DIMMs*/
2105 if ( pDCTstat->DimmQRPresent == 0) {
2108 if (i < DimmSlots) {
2109 pDCTstat->DimmQRPresent |= (1 << i) | (1 << (i+4));
2111 pDCTstat->MAdimms[i & 1] --;
2113 byte = 1; /* upper two ranks of QR DIMM will be counted on another DIMM number iteration*/
2114 } else if (byte == 1) { /* 2ranks */
2115 pDCTstat->DimmDRPresent |= 1 << i;
2120 else if (devwidth == 1)
2122 else if (devwidth == 2)
2125 byte++; /* al+1=rank# */
2127 bytex <<= 1; /*double Addr bus load value for dual rank DIMMs*/
2130 pDCTstat->DATAload[j] += byte; /*number of ranks on DATA bus*/
2131 pDCTstat->MAload[j] += bytex; /*number of devices on CMD/ADDR bus*/
2132 pDCTstat->MAdimms[j]++; /*number of DIMMs on A bus */
2134 /* check address mirror support for unbuffered dimm */
2135 /* check number of registers on a dimm for registered dimm */
2136 byte = mctRead_SPD(smbaddr, SPD_AddressMirror);
2137 if (RegDIMMPresent & (1 << i)) {
2139 pDCTstat->MirrPresU_NumRegR |= 1 << i;
2141 if ((byte & 1) == 1)
2142 pDCTstat->MirrPresU_NumRegR |= 1 << i;
2144 /* Get byte62: Reference Raw Card information. We dont need it now. */
2145 /* byte = mctRead_SPD(smbaddr, SPD_RefRawCard); */
2146 /* Get Byte65/66 for register manufacture ID code */
2147 if ((0x97 == mctRead_SPD(smbaddr, SPD_RegManufactureID_H)) &&
2148 (0x80 == mctRead_SPD(smbaddr, SPD_RegManufactureID_L))) {
2149 if (0x16 == mctRead_SPD(smbaddr, SPD_RegManRevID))
2150 pDCTstat->RegMan2Present |= 1 << i;
2152 pDCTstat->RegMan1Present |= 1 << i;
2154 /* Get Control word values for RC3. We dont need it. */
2155 byte = mctRead_SPD(smbaddr, 70);
2156 pDCTstat->CtrlWrd3 |= (byte >> 4) << (i << 2); /* C3 = SPD byte 70 [7:4] */
2157 /* Get Control word values for RC4, and RC5 */
2158 byte = mctRead_SPD(smbaddr, 71);
2159 pDCTstat->CtrlWrd4 |= (byte & 0xFF) << (i << 2); /* RC4 = SPD byte 71 [3:0] */
2160 pDCTstat->CtrlWrd5 |= (byte >> 4) << (i << 2); /* RC5 = SPD byte 71 [7:4] */
2164 printk(BIOS_DEBUG, "\t DIMMPresence: DIMMValid=%x\n", pDCTstat->DIMMValid);
2165 printk(BIOS_DEBUG, "\t DIMMPresence: DIMMPresent=%x\n", pDCTstat->DIMMPresent);
2166 printk(BIOS_DEBUG, "\t DIMMPresence: RegDIMMPresent=%x\n", RegDIMMPresent);
2167 printk(BIOS_DEBUG, "\t DIMMPresence: DimmECCPresent=%x\n", pDCTstat->DimmECCPresent);
2168 printk(BIOS_DEBUG, "\t DIMMPresence: DimmPARPresent=%x\n", pDCTstat->DimmPARPresent);
2169 printk(BIOS_DEBUG, "\t DIMMPresence: Dimmx4Present=%x\n", pDCTstat->Dimmx4Present);
2170 printk(BIOS_DEBUG, "\t DIMMPresence: Dimmx8Present=%x\n", pDCTstat->Dimmx8Present);
2171 printk(BIOS_DEBUG, "\t DIMMPresence: Dimmx16Present=%x\n", pDCTstat->Dimmx16Present);
2172 printk(BIOS_DEBUG, "\t DIMMPresence: DimmPlPresent=%x\n", pDCTstat->DimmPlPresent);
2173 printk(BIOS_DEBUG, "\t DIMMPresence: DimmDRPresent=%x\n", pDCTstat->DimmDRPresent);
2174 printk(BIOS_DEBUG, "\t DIMMPresence: DimmQRPresent=%x\n", pDCTstat->DimmQRPresent);
2175 printk(BIOS_DEBUG, "\t DIMMPresence: DATAload[0]=%x\n", pDCTstat->DATAload[0]);
2176 printk(BIOS_DEBUG, "\t DIMMPresence: MAload[0]=%x\n", pDCTstat->MAload[0]);
2177 printk(BIOS_DEBUG, "\t DIMMPresence: MAdimms[0]=%x\n", pDCTstat->MAdimms[0]);
2178 printk(BIOS_DEBUG, "\t DIMMPresence: DATAload[1]=%x\n", pDCTstat->DATAload[1]);
2179 printk(BIOS_DEBUG, "\t DIMMPresence: MAload[1]=%x\n", pDCTstat->MAload[1]);
2180 printk(BIOS_DEBUG, "\t DIMMPresence: MAdimms[1]=%x\n", pDCTstat->MAdimms[1]);
2182 if (pDCTstat->DIMMValid != 0) { /* If any DIMMs are present...*/
2183 if (RegDIMMPresent != 0) {
2184 if ((RegDIMMPresent ^ pDCTstat->DIMMValid) !=0) {
2185 /* module type DIMM mismatch (reg'ed, unbuffered) */
2186 pDCTstat->ErrStatus |= 1<<SB_DimmMismatchM;
2187 pDCTstat->ErrCode = SC_StopError;
2189 /* all DIMMs are registered */
2190 pDCTstat->Status |= 1<<SB_Registered;
2193 if (pDCTstat->DimmECCPresent != 0) {
2194 if ((pDCTstat->DimmECCPresent ^ pDCTstat->DIMMValid )== 0) {
2195 /* all DIMMs are ECC capable */
2196 pDCTstat->Status |= 1<<SB_ECCDIMMs;
2199 if (pDCTstat->DimmPARPresent != 0) {
2200 if ((pDCTstat->DimmPARPresent ^ pDCTstat->DIMMValid) == 0) {
2201 /*all DIMMs are Parity capable */
2202 pDCTstat->Status |= 1<<SB_PARDIMMs;
2206 /* no DIMMs present or no DIMMs that qualified. */
2207 pDCTstat->ErrStatus |= 1<<SB_NoDimms;
2208 pDCTstat->ErrCode = SC_StopError;
2211 printk(BIOS_DEBUG, "\t DIMMPresence: Status %x\n", pDCTstat->Status);
2212 printk(BIOS_DEBUG, "\t DIMMPresence: ErrStatus %x\n", pDCTstat->ErrStatus);
2213 printk(BIOS_DEBUG, "\t DIMMPresence: ErrCode %x\n", pDCTstat->ErrCode);
2214 printk(BIOS_DEBUG, "\t DIMMPresence: Done\n\n");
2216 mctHookAfterDIMMpre();
2218 return pDCTstat->ErrCode;
2221 static u8 Get_DIMMAddress_D(struct DCTStatStruc *pDCTstat, u8 i)
2225 p = pDCTstat->DIMMAddr;
2226 /* mct_BeforeGetDIMMAddress(); */
2230 static void mct_initDCT(struct MCTStatStruc *pMCTstat,
2231 struct DCTStatStruc *pDCTstat)
2236 /* Config. DCT0 for Ganged or unganged mode */
2237 DCTInit_D(pMCTstat, pDCTstat, 0);
2238 if (pDCTstat->ErrCode == SC_FatalErr) {
2239 /* Do nothing goto exitDCTInit; any fatal errors? */
2241 /* Configure DCT1 if unganged and enabled*/
2242 if (!pDCTstat->GangedMode) {
2243 if (pDCTstat->DIMMValidDCT[1] > 0) {
2244 err_code = pDCTstat->ErrCode; /* save DCT0 errors */
2245 pDCTstat->ErrCode = 0;
2246 DCTInit_D(pMCTstat, pDCTstat, 1);
2247 if (pDCTstat->ErrCode == 2) /* DCT1 is not Running */
2248 pDCTstat->ErrCode = err_code; /* Using DCT0 Error code to update pDCTstat.ErrCode */
2250 val = 1 << DisDramInterface;
2251 Set_NB32(pDCTstat->dev_dct, 0x100 + 0x94, val);
2258 static void mct_DramInit(struct MCTStatStruc *pMCTstat,
2259 struct DCTStatStruc *pDCTstat, u8 dct)
2261 mct_BeforeDramInit_Prod_D(pMCTstat, pDCTstat);
2262 mct_DramInit_Sw_D(pMCTstat, pDCTstat, dct);
2263 /* mct_DramInit_Hw_D(pMCTstat, pDCTstat, dct); */
2266 static u8 mct_setMode(struct MCTStatStruc *pMCTstat,
2267 struct DCTStatStruc *pDCTstat)
2274 byte = bytex = pDCTstat->DIMMValid;
2275 bytex &= 0x55; /* CHA DIMM pop */
2276 pDCTstat->DIMMValidDCT[0] = bytex;
2278 byte &= 0xAA; /* CHB DIMM popa */
2280 pDCTstat->DIMMValidDCT[1] = byte;
2282 if (byte != bytex) {
2283 pDCTstat->ErrStatus &= ~(1 << SB_DimmMismatchO);
2285 byte = mctGet_NVbits(NV_Unganged);
2287 pDCTstat->ErrStatus |= (1 << SB_DimmMismatchO); /* Set temp. to avoid setting of ganged mode */
2289 if (!(pDCTstat->ErrStatus & (1 << SB_DimmMismatchO))) {
2290 pDCTstat->GangedMode = 1;
2291 /* valid 128-bit mode population. */
2292 pDCTstat->Status |= 1 << SB_128bitmode;
2294 val = Get_NB32(pDCTstat->dev_dct, reg);
2295 val |= 1 << DctGangEn;
2296 Set_NB32(pDCTstat->dev_dct, reg, val);
2298 if (byte) /* NV_Unganged */
2299 pDCTstat->ErrStatus &= ~(1 << SB_DimmMismatchO); /* Clear so that there is no DIMM missmatch error */
2301 return pDCTstat->ErrCode;
2304 u32 Get_NB32(u32 dev, u32 reg)
2306 return pci_read_config32(dev, reg);
2309 void Set_NB32(u32 dev, u32 reg, u32 val)
2311 pci_write_config32(dev, reg, val);
2315 u32 Get_NB32_index(u32 dev, u32 index_reg, u32 index)
2319 Set_NB32(dev, index_reg, index);
2320 dword = Get_NB32(dev, index_reg+0x4);
2325 void Set_NB32_index(u32 dev, u32 index_reg, u32 index, u32 data)
2327 Set_NB32(dev, index_reg, index);
2328 Set_NB32(dev, index_reg + 0x4, data);
2331 u32 Get_NB32_index_wait(u32 dev, u32 index_reg, u32 index)
2337 index &= ~(1 << DctAccessWrite);
2338 Set_NB32(dev, index_reg, index);
2340 dword = Get_NB32(dev, index_reg);
2341 } while (!(dword & (1 << DctAccessDone)));
2342 dword = Get_NB32(dev, index_reg + 0x4);
2347 void Set_NB32_index_wait(u32 dev, u32 index_reg, u32 index, u32 data)
2352 Set_NB32(dev, index_reg + 0x4, data);
2353 index |= (1 << DctAccessWrite);
2354 Set_NB32(dev, index_reg, index);
2356 dword = Get_NB32(dev, index_reg);
2357 } while (!(dword & (1 << DctAccessDone)));
2361 static u8 mct_BeforePlatformSpec(struct MCTStatStruc *pMCTstat,
2362 struct DCTStatStruc *pDCTstat, u8 dct)
2364 /* mct_checkForCxDxSupport_D */
2365 if (pDCTstat->LogicalCPUID & AMD_DR_GT_Bx) {
2366 /* 1. Write 00000000h to F2x[1,0]9C_xD08E000 */
2367 Set_NB32_index_wait(pDCTstat->dev_dct, 0x98 + dct * 0x100, 0x0D08E000, 0);
2368 /* 2. If DRAM Configuration Register[MemClkFreq] (F2x[1,0]94[2:0]) is
2369 greater than or equal to 011b (DDR-800 and higher),
2370 then write 00000080h to F2x[1,0]9C_xD02E001,
2371 else write 00000090h to F2x[1,0]9C_xD02E001. */
2372 if (pDCTstat->Speed >= 4)
2373 Set_NB32_index_wait(pDCTstat->dev_dct, 0x98 + dct * 0x100, 0xD02E001, 0x80);
2375 Set_NB32_index_wait(pDCTstat->dev_dct, 0x98 + dct * 0x100, 0xD02E001, 0x90);
2377 return pDCTstat->ErrCode;
2380 static u8 mct_PlatformSpec(struct MCTStatStruc *pMCTstat,
2381 struct DCTStatStruc *pDCTstat, u8 dct)
2383 /* Get platform specific config/timing values from the interface layer
2384 * and program them into DCT.
2387 u32 dev = pDCTstat->dev_dct;
2389 u8 i, i_start, i_end;
2391 if (pDCTstat->GangedMode) {
2392 SyncSetting(pDCTstat);
2393 /* mct_SetupSync_D */
2400 for (i=i_start; i<i_end; i++) {
2401 index_reg = 0x98 + (i * 0x100);
2402 Set_NB32_index_wait(dev, index_reg, 0x00, pDCTstat->CH_ODC_CTL[i]); /* Channel A Output Driver Compensation Control */
2403 Set_NB32_index_wait(dev, index_reg, 0x04, pDCTstat->CH_ADDR_TMG[i]); /* Channel A Output Driver Compensation Control */
2406 return pDCTstat->ErrCode;
2409 static void mct_SyncDCTsReady(struct DCTStatStruc *pDCTstat)
2414 if (pDCTstat->NodePresent) {
2415 dev = pDCTstat->dev_dct;
2417 if ((pDCTstat->DIMMValidDCT[0] ) || (pDCTstat->DIMMValidDCT[1])) { /* This Node has dram */
2419 val = Get_NB32(dev, 0x110);
2420 } while (!(val & (1 << DramEnabled)));
2422 } /* Node is present */
2425 static void mct_AfterGetCLT(struct MCTStatStruc *pMCTstat,
2426 struct DCTStatStruc *pDCTstat, u8 dct)
2428 if (!pDCTstat->GangedMode) {
2430 pDCTstat->DIMMValid = pDCTstat->DIMMValidDCT[dct];
2431 if (pDCTstat->DIMMValidDCT[dct] == 0)
2432 pDCTstat->ErrCode = SC_StopError;
2434 pDCTstat->CSPresent = 0;
2435 pDCTstat->CSTestFail = 0;
2436 pDCTstat->DIMMValid = pDCTstat->DIMMValidDCT[dct];
2437 if (pDCTstat->DIMMValidDCT[dct] == 0)
2438 pDCTstat->ErrCode = SC_StopError;
2443 static u8 mct_SPDCalcWidth(struct MCTStatStruc *pMCTstat,
2444 struct DCTStatStruc *pDCTstat, u8 dct)
2450 SPDCalcWidth_D(pMCTstat, pDCTstat);
2451 ret = mct_setMode(pMCTstat, pDCTstat);
2453 ret = pDCTstat->ErrCode;
2456 if (pDCTstat->DIMMValidDCT[0] == 0) {
2457 val = Get_NB32(pDCTstat->dev_dct, 0x94);
2458 val |= 1 << DisDramInterface;
2459 Set_NB32(pDCTstat->dev_dct, 0x94, val);
2461 if (pDCTstat->DIMMValidDCT[1] == 0) {
2462 val = Get_NB32(pDCTstat->dev_dct, 0x94 + 0x100);
2463 val |= 1 << DisDramInterface;
2464 Set_NB32(pDCTstat->dev_dct, 0x94 + 0x100, val);
2467 printk(BIOS_DEBUG, "SPDCalcWidth: Status %x\n", pDCTstat->Status);
2468 printk(BIOS_DEBUG, "SPDCalcWidth: ErrStatus %x\n", pDCTstat->ErrStatus);
2469 printk(BIOS_DEBUG, "SPDCalcWidth: ErrCode %x\n", pDCTstat->ErrCode);
2470 printk(BIOS_DEBUG, "SPDCalcWidth: Done\n");
2471 /* Disable dram interface before DRAM init */
2476 static void mct_AfterStitchMemory(struct MCTStatStruc *pMCTstat,
2477 struct DCTStatStruc *pDCTstat, u8 dct)
2486 _MemHoleRemap = mctGet_NVbits(NV_MemHole);
2487 DramHoleBase = mctGet_NVbits(NV_BottomIO);
2489 /* Increase hole size so;[31:24]to[31:16]
2490 * it has granularity of 128MB shl eax,8
2491 * Set 'effective' bottom IOmov DramHoleBase,eax
2493 pMCTstat->HoleBase = (DramHoleBase & 0xFFFFF800) << 8;
2495 /* In unganged mode, we must add DCT0 and DCT1 to DCTSysLimit */
2496 if (!pDCTstat->GangedMode) {
2497 dev = pDCTstat->dev_dct;
2498 pDCTstat->NodeSysLimit += pDCTstat->DCTSysLimit;
2499 /* if DCT0 and DCT1 both exist, set DctSelBaseAddr[47:27] to the top of DCT0 */
2501 if (pDCTstat->DIMMValidDCT[1] > 0) {
2502 dword = pDCTstat->DCTSysLimit + 1;
2503 dword += pDCTstat->NodeSysBase;
2504 dword >>= 8; /* scale [39:8] to [47:27],and to F2x110[31:11] */
2505 if ((dword >= DramHoleBase) && _MemHoleRemap) {
2506 pMCTstat->HoleBase = (DramHoleBase & 0xFFFFF800) << 8;
2507 val = pMCTstat->HoleBase;
2509 val = (((~val) & 0xFF) + 1);
2514 val = Get_NB32(dev, reg);
2517 val |= 3; /* Set F2x110[DctSelHiRngEn], F2x110[DctSelHi] */
2518 Set_NB32(dev, reg, val);
2522 Set_NB32(dev, reg, val);
2525 /* Program the DctSelBaseAddr value to 0
2526 if DCT 0 is disabled */
2527 if (pDCTstat->DIMMValidDCT[0] == 0) {
2528 dword = pDCTstat->NodeSysBase;
2530 if ((dword >= DramHoleBase) && _MemHoleRemap) {
2531 pMCTstat->HoleBase = (DramHoleBase & 0xFFFFF800) << 8;
2532 val = pMCTstat->HoleBase;
2535 val |= (((~val) & 0xFFFF) + 1);
2540 Set_NB32(dev, reg, val);
2543 val |= 3; /* Set F2x110[DctSelHiRngEn], F2x110[DctSelHi] */
2544 Set_NB32(dev, reg, val);
2548 pDCTstat->NodeSysLimit += pDCTstat->DCTSysLimit;
2550 printk(BIOS_DEBUG, "AfterStitch pDCTstat->NodeSysBase = %x\n", pDCTstat->NodeSysBase);
2551 printk(BIOS_DEBUG, "mct_AfterStitchMemory: pDCTstat->NodeSysLimit = %x\n", pDCTstat->NodeSysLimit);
2554 static u8 mct_DIMMPresence(struct MCTStatStruc *pMCTstat,
2555 struct DCTStatStruc *pDCTstat, u8 dct)
2560 ret = DIMMPresence_D(pMCTstat, pDCTstat);
2562 ret = pDCTstat->ErrCode;
2567 /* mct_BeforeGetDIMMAddress inline in C */
2569 static void mct_OtherTiming(struct MCTStatStruc *pMCTstat,
2570 struct DCTStatStruc *pDCTstatA)
2574 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
2575 struct DCTStatStruc *pDCTstat;
2576 pDCTstat = pDCTstatA + Node;
2577 if (pDCTstat->NodePresent) {
2578 if (pDCTstat->DIMMValidDCT[0]) {
2579 pDCTstat->DIMMValid = pDCTstat->DIMMValidDCT[0];
2580 Set_OtherTiming(pMCTstat, pDCTstat, 0);
2582 if (pDCTstat->DIMMValidDCT[1] && !pDCTstat->GangedMode ) {
2583 pDCTstat->DIMMValid = pDCTstat->DIMMValidDCT[1];
2584 Set_OtherTiming(pMCTstat, pDCTstat, 1);
2586 } /* Node is present*/
2590 static void Set_OtherTiming(struct MCTStatStruc *pMCTstat,
2591 struct DCTStatStruc *pDCTstat, u8 dct)
2594 u32 reg_off = 0x100 * dct;
2597 u32 dev = pDCTstat->dev_dct;
2599 Get_DqsRcvEnGross_Diff(pDCTstat, dev, 0x98 + reg_off);
2600 Get_WrDatGross_Diff(pDCTstat, dct, dev, 0x98 + reg_off);
2601 Get_Trdrd(pMCTstat, pDCTstat, dct);
2602 Get_Twrwr(pMCTstat, pDCTstat, dct);
2603 Get_Twrrd(pMCTstat, pDCTstat, dct);
2604 Get_TrwtTO(pMCTstat, pDCTstat, dct);
2605 Get_TrwtWB(pMCTstat, pDCTstat);
2607 reg = 0x8C + reg_off; /* Dram Timing Hi */
2608 val = Get_NB32(dev, reg);
2610 dword = pDCTstat->TrwtTO;
2612 dword = pDCTstat->Twrrd & 3;
2614 dword = pDCTstat->Twrwr & 3;
2616 dword = pDCTstat->Trdrd & 3;
2618 dword = pDCTstat->TrwtWB;
2620 Set_NB32(dev, reg, val);
2622 reg = 0x78 + reg_off;
2623 val = Get_NB32(dev, reg);
2625 dword = pDCTstat->Twrrd >> 2;
2627 dword = pDCTstat->Twrwr >> 2;
2629 dword = pDCTstat->Trdrd >> 2;
2631 Set_NB32(dev, reg, val);
2634 static void Get_Trdrd(struct MCTStatStruc *pMCTstat,
2635 struct DCTStatStruc *pDCTstat, u8 dct)
2639 Trdrd = ((int8_t)(pDCTstat->DqsRcvEnGrossMax - pDCTstat->DqsRcvEnGrossMin) >> 1) + 1;
2642 pDCTstat->Trdrd = Trdrd;
2645 static void Get_Twrwr(struct MCTStatStruc *pMCTstat,
2646 struct DCTStatStruc *pDCTstat, u8 dct)
2650 Twrwr = ((int8_t)(pDCTstat->WrDatGrossMax - pDCTstat->WrDatGrossMin) >> 1) + 2;
2657 pDCTstat->Twrwr = Twrwr;
2660 static void Get_Twrrd(struct MCTStatStruc *pMCTstat,
2661 struct DCTStatStruc *pDCTstat, u8 dct)
2666 LDplus1 = Get_Latency_Diff(pMCTstat, pDCTstat, dct);
2668 Twrrd = ((int8_t)(pDCTstat->WrDatGrossMax - pDCTstat->DqsRcvEnGrossMin) >> 1) + 4 - LDplus1;
2672 else if (Twrrd > 10)
2674 pDCTstat->Twrrd = Twrrd;
2677 static void Get_TrwtTO(struct MCTStatStruc *pMCTstat,
2678 struct DCTStatStruc *pDCTstat, u8 dct)
2683 LDplus1 = Get_Latency_Diff(pMCTstat, pDCTstat, dct);
2685 TrwtTO = ((int8_t)(pDCTstat->DqsRcvEnGrossMax - pDCTstat->WrDatGrossMin) >> 1) + LDplus1;
2687 pDCTstat->TrwtTO = TrwtTO;
2690 static void Get_TrwtWB(struct MCTStatStruc *pMCTstat,
2691 struct DCTStatStruc *pDCTstat)
2693 /* TrwtWB ensures read-to-write data-bus turnaround.
2694 This value should be one more than the programmed TrwtTO.*/
2695 pDCTstat->TrwtWB = pDCTstat->TrwtTO;
2698 static u8 Get_Latency_Diff(struct MCTStatStruc *pMCTstat,
2699 struct DCTStatStruc *pDCTstat, u8 dct)
2701 u32 reg_off = 0x100 * dct;
2702 u32 dev = pDCTstat->dev_dct;
2705 val1 = Get_NB32(dev, reg_off + 0x88) & 0xF;
2706 val2 = (Get_NB32(dev, reg_off + 0x84) >> 20) & 7;
2711 static void Get_DqsRcvEnGross_Diff(struct DCTStatStruc *pDCTstat,
2712 u32 dev, u32 index_reg)
2714 u8 Smallest, Largest;
2718 /* The largest DqsRcvEnGrossDelay of any DIMM minus the
2719 DqsRcvEnGrossDelay of any other DIMM is equal to the Critical
2720 Gross Delay Difference (CGDD) */
2721 /* DqsRcvEn byte 1,0 */
2722 val = Get_DqsRcvEnGross_MaxMin(pDCTstat, dev, index_reg, 0x10);
2723 Largest = val & 0xFF;
2724 Smallest = (val >> 8) & 0xFF;
2726 /* DqsRcvEn byte 3,2 */
2727 val = Get_DqsRcvEnGross_MaxMin(pDCTstat, dev, index_reg, 0x11);
2729 bytex = (val >> 8) & 0xFF;
2730 if (bytex < Smallest)
2735 /* DqsRcvEn byte 5,4 */
2736 val = Get_DqsRcvEnGross_MaxMin(pDCTstat, dev, index_reg, 0x20);
2738 bytex = (val >> 8) & 0xFF;
2739 if (bytex < Smallest)
2744 /* DqsRcvEn byte 7,6 */
2745 val = Get_DqsRcvEnGross_MaxMin(pDCTstat, dev, index_reg, 0x21);
2747 bytex = (val >> 8) & 0xFF;
2748 if (bytex < Smallest)
2753 if (pDCTstat->DimmECCPresent> 0) {
2755 val = Get_DqsRcvEnGross_MaxMin(pDCTstat, dev, index_reg, 0x12);
2757 bytex = (val >> 8) & 0xFF;
2758 if (bytex < Smallest)
2764 pDCTstat->DqsRcvEnGrossMax = Largest;
2765 pDCTstat->DqsRcvEnGrossMin = Smallest;
2768 static void Get_WrDatGross_Diff(struct DCTStatStruc *pDCTstat,
2769 u8 dct, u32 dev, u32 index_reg)
2771 u8 Smallest = 0, Largest = 0;
2775 /* The largest WrDatGrossDlyByte of any DIMM minus the
2776 WrDatGrossDlyByte of any other DIMM is equal to CGDD */
2777 if (pDCTstat->DIMMValid & (1 << 0)) {
2778 val = Get_WrDatGross_MaxMin(pDCTstat, dct, dev, index_reg, 0x01); /* WrDatGrossDlyByte byte 0,1,2,3 for DIMM0 */
2779 Largest = val & 0xFF;
2780 Smallest = (val >> 8) & 0xFF;
2782 if (pDCTstat->DIMMValid & (1 << 2)) {
2783 val = Get_WrDatGross_MaxMin(pDCTstat, dct, dev, index_reg, 0x101); /* WrDatGrossDlyByte byte 0,1,2,3 for DIMM1 */
2785 bytex = (val >> 8) & 0xFF;
2786 if (bytex < Smallest)
2792 /* If Cx, 2 more dimm need to be checked to find out the largest and smallest */
2793 if (pDCTstat->LogicalCPUID & AMD_DR_Cx) {
2794 if (pDCTstat->DIMMValid & (1 << 4)) {
2795 val = Get_WrDatGross_MaxMin(pDCTstat, dct, dev, index_reg, 0x201); /* WrDatGrossDlyByte byte 0,1,2,3 for DIMM2 */
2797 bytex = (val >> 8) & 0xFF;
2798 if (bytex < Smallest)
2803 if (pDCTstat->DIMMValid & (1 << 6)) {
2804 val = Get_WrDatGross_MaxMin(pDCTstat, dct, dev, index_reg, 0x301); /* WrDatGrossDlyByte byte 0,1,2,3 for DIMM2 */
2806 bytex = (val >> 8) & 0xFF;
2807 if (bytex < Smallest)
2814 pDCTstat->WrDatGrossMax = Largest;
2815 pDCTstat->WrDatGrossMin = Smallest;
2818 static u16 Get_DqsRcvEnGross_MaxMin(struct DCTStatStruc *pDCTstat,
2819 u32 dev, u32 index_reg,
2822 u8 Smallest, Largest;
2835 for (i=0; i < 8; i+=2) {
2836 if ( pDCTstat->DIMMValid & (1 << i)) {
2837 val = Get_NB32_index_wait(dev, index_reg, index);
2839 byte = (val >> 5) & 0xFF;
2840 if (byte < Smallest)
2845 byte = (val >> (16 + 5)) & 0xFF;
2846 if (byte < Smallest)
2862 static u16 Get_WrDatGross_MaxMin(struct DCTStatStruc *pDCTstat,
2863 u8 dct, u32 dev, u32 index_reg,
2866 u8 Smallest, Largest;
2874 for (i=0; i < 2; i++) {
2875 val = Get_NB32_index_wait(dev, index_reg, index);
2878 for (j=0; j < 4; j++) {
2880 if (byte < Smallest)
2889 if (pDCTstat->DimmECCPresent > 0) {
2891 val = Get_NB32_index_wait(dev, index_reg, index);
2895 if (byte < Smallest)
2908 static void mct_PhyController_Config(struct MCTStatStruc *pMCTstat,
2909 struct DCTStatStruc *pDCTstat, u8 dct)
2911 u32 index_reg = 0x98 + 0x100 * dct;
2912 u32 dev = pDCTstat->dev_dct;
2915 if (pDCTstat->LogicalCPUID & (AMD_DR_DAC2_OR_C3 | AMD_RB_C3)) {
2916 if (pDCTstat->Dimmx4Present == 0) {
2917 /* Set bit7 RxDqsUDllPowerDown to register F2x[1, 0]98_x0D0F0F13 for power saving */
2918 val = Get_NB32_index_wait(dev, index_reg, 0x0D0F0F13); /* Agesa v3 v6 might be wrong here. */
2919 val |= 1 << 7; /* BIOS should set this bit when x4 DIMMs are not present */
2920 Set_NB32_index_wait(dev, index_reg, 0x0D0F0F13, val);
2924 if (pDCTstat->LogicalCPUID & AMD_DR_DAC2_OR_C3) {
2925 if (pDCTstat->DimmECCPresent == 0) {
2926 /* Set bit4 PwrDn to register F2x[1, 0]98_x0D0F0830 for power saving */
2927 val = Get_NB32_index_wait(dev, index_reg, 0x0D0F0830);
2928 val |= 1 << 4; /* BIOS should set this bit if ECC DIMMs are not present */
2929 Set_NB32_index_wait(dev, index_reg, 0x0D0F0830, val);
2935 static void mct_FinalMCT_D(struct MCTStatStruc *pMCTstat,
2936 struct DCTStatStruc *pDCTstatA)
2939 struct DCTStatStruc *pDCTstat;
2942 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
2943 pDCTstat = pDCTstatA + Node;
2945 if (pDCTstat->NodePresent) {
2946 mct_PhyController_Config(pMCTstat, pDCTstat, 0);
2947 mct_PhyController_Config(pMCTstat, pDCTstat, 1);
2949 if (!(pDCTstat->LogicalCPUID & AMD_DR_Dx)) { /* mct_checkForDxSupport */
2950 mct_ExtMCTConfig_Cx(pDCTstat);
2951 mct_ExtMCTConfig_Bx(pDCTstat);
2952 } else { /* For Dx CPU */
2953 val = 0x0CE00F00 | 1 << 29/* FlushWrOnStpGnt */;
2954 if (!(pDCTstat->GangedMode))
2955 val |= 0x20; /* MctWrLimit = 8 for Unganed mode */
2957 val |= 0x40; /* MctWrLimit = 16 for ganed mode */
2958 Set_NB32(pDCTstat->dev_dct, 0x11C, val);
2960 val = Get_NB32(pDCTstat->dev_dct, 0x1B0);
2962 val |= 0x101; /* BKDG recommended settings */
2963 val |= 0x0FC00000; /* Agesa V5 */
2964 if (!(pDCTstat->GangedMode))
2970 switch (pDCTstat->Speed) {
2972 val |= 0x50000000; /* 5 for DDR800 */
2975 val |= 0x60000000; /* 6 for DDR1066 */
2978 val |= 0x80000000; /* 8 for DDR800 */
2981 val |= 0x90000000; /* 9 for DDR1600 */
2984 Set_NB32(pDCTstat->dev_dct, 0x1B0, val);
2988 /* ClrClToNB_D postponed until we're done executing from ROM */
2989 mct_ClrWbEnhWsbDis_D(pMCTstat, pDCTstat);
2991 /* set F3x8C[DisFastTprWr] on all DR, if L3Size=0 */
2992 if (pDCTstat->LogicalCPUID & AMD_DR_ALL) {
2993 if (!(cpuid_edx(0x80000006) & 0xFFFC0000)) {
2994 val = Get_NB32(pDCTstat->dev_nbmisc, 0x8C);
2996 Set_NB32(pDCTstat->dev_nbmisc, 0x8C, val);
3001 static void mct_InitialMCT_D(struct MCTStatStruc *pMCTstat, struct DCTStatStruc *pDCTstat)
3003 mct_SetClToNB_D(pMCTstat, pDCTstat);
3004 mct_SetWbEnhWsbDis_D(pMCTstat, pDCTstat);
3007 static u32 mct_NodePresent_D(void)
3014 static void mct_init(struct MCTStatStruc *pMCTstat,
3015 struct DCTStatStruc *pDCTstat)
3020 pDCTstat->GangedMode = 0;
3021 pDCTstat->DRPresent = 1;
3023 /* enable extend PCI configuration access */
3025 _RDMSR(addr, &lo, &hi);
3026 if (hi & (1 << (46-32))) {
3027 pDCTstat->Status |= 1 << SB_ExtConfig;
3030 _WRMSR(addr, lo, hi);
3034 static void clear_legacy_Mode(struct MCTStatStruc *pMCTstat,
3035 struct DCTStatStruc *pDCTstat)
3039 u32 dev = pDCTstat->dev_dct;
3041 /* Clear Legacy BIOS Mode bit */
3043 val = Get_NB32(dev, reg);
3044 val &= ~(1<<LegacyBiosMode);
3045 Set_NB32(dev, reg, val);
3048 val = Get_NB32(dev, reg);
3049 val &= ~(1<<LegacyBiosMode);
3050 Set_NB32(dev, reg, val);
3053 static void mct_HTMemMapExt(struct MCTStatStruc *pMCTstat,
3054 struct DCTStatStruc *pDCTstatA)
3057 u32 Drambase, Dramlimit;
3063 struct DCTStatStruc *pDCTstat;
3065 pDCTstat = pDCTstatA + 0;
3066 dev = pDCTstat->dev_map;
3068 /* Copy dram map from F1x40/44,F1x48/4c,
3069 to F1x120/124(Node0),F1x120/124(Node1),...*/
3070 for (Node=0; Node < MAX_NODES_SUPPORTED; Node++) {
3071 pDCTstat = pDCTstatA + Node;
3072 devx = pDCTstat->dev_map;
3074 /* get base/limit from Node0 */
3075 reg = 0x40 + (Node << 3); /* Node0/Dram Base 0 */
3076 val = Get_NB32(dev, reg);
3077 Drambase = val >> ( 16 + 3);
3079 reg = 0x44 + (Node << 3); /* Node0/Dram Base 0 */
3080 val = Get_NB32(dev, reg);
3081 Dramlimit = val >> (16 + 3);
3083 /* set base/limit to F1x120/124 per Node */
3084 if (pDCTstat->NodePresent) {
3085 reg = 0x120; /* F1x120,DramBase[47:27] */
3086 val = Get_NB32(devx, reg);
3089 Set_NB32(devx, reg, val);
3092 val = Get_NB32(devx, reg);
3095 Set_NB32(devx, reg, val);
3097 if ( pMCTstat->GStatus & ( 1 << GSB_HWHole)) {
3099 val = Get_NB32(devx, reg);
3100 val |= (1 << DramMemHoistValid);
3101 val &= ~(0xFF << 24);
3102 dword = (pMCTstat->HoleBase >> (24 - 8)) & 0xFF;
3105 Set_NB32(devx, reg, val);
3112 static void SetCSTriState(struct MCTStatStruc *pMCTstat,
3113 struct DCTStatStruc *pDCTstat, u8 dct)
3116 u32 dev = pDCTstat->dev_dct;
3117 u32 index_reg = 0x98 + 0x100 * dct;
3121 /* Tri-state unused chipselects when motherboard
3122 termination is available */
3124 /* FIXME: skip for Ax */
3126 word = pDCTstat->CSPresent;
3127 if (pDCTstat->Status & (1 << SB_Registered)) {
3128 word |= (word & 0x55) << 1;
3130 word = (~word) & 0xFF;
3132 val = Get_NB32_index_wait(dev, index_reg, index);
3134 Set_NB32_index_wait(dev, index_reg, index, val);
3137 static void SetCKETriState(struct MCTStatStruc *pMCTstat,
3138 struct DCTStatStruc *pDCTstat, u8 dct)
3142 u32 index_reg = 0x98 + 0x100 * dct;
3146 /* Tri-state unused CKEs when motherboard termination is available */
3148 /* FIXME: skip for Ax */
3150 dev = pDCTstat->dev_dct;
3151 word = pDCTstat->CSPresent;
3154 val = Get_NB32_index_wait(dev, index_reg, index);
3155 if ((word & 0x55) == 0)
3158 if ((word & 0xAA) == 0)
3161 Set_NB32_index_wait(dev, index_reg, index, val);
3164 static void SetODTTriState(struct MCTStatStruc *pMCTstat,
3165 struct DCTStatStruc *pDCTstat, u8 dct)
3169 u32 index_reg = 0x98 + 0x100 * dct;
3175 /* FIXME: skip for Ax */
3177 dev = pDCTstat->dev_dct;
3179 /* Tri-state unused ODTs when motherboard termination is available */
3180 max_dimms = (u8) mctGet_NVbits(NV_MAX_DIMMS);
3181 odt = 0x0F; /* ODT tri-state setting */
3183 if (pDCTstat->Status & (1 <<SB_Registered)) {
3184 for (cs = 0; cs < 8; cs += 2) {
3185 if (pDCTstat->CSPresent & (1 << cs)) {
3186 odt &= ~(1 << (cs / 2));
3187 if (mctGet_NVbits(NV_4RANKType) != 0) { /* quad-rank capable platform */
3188 if (pDCTstat->CSPresent & (1 << (cs + 1)))
3189 odt &= ~(4 << (cs / 2));
3193 } else { /* AM3 package */
3194 val = ~(pDCTstat->CSPresent);
3195 odt = val & 9; /* swap bits 1 and 2 */
3203 val = Get_NB32_index_wait(dev, index_reg, index);
3204 val |= ((odt & 0xFF) << 8); /* set bits 11:8 ODTTriState[3:0] */
3205 Set_NB32_index_wait(dev, index_reg, index, val);
3209 static void InitPhyCompensation(struct MCTStatStruc *pMCTstat,
3210 struct DCTStatStruc *pDCTstat, u8 dct)
3213 u32 index_reg = 0x98 + 0x100 * dct;
3214 u32 dev = pDCTstat->dev_dct;
3220 val = Get_NB32_index_wait(dev, index_reg, 0x00);
3222 for (i=0; i < 6; i++) {
3226 p = Table_Comp_Rise_Slew_15x;
3227 valx = p[(val >> 16) & 3];
3231 p = Table_Comp_Fall_Slew_15x;
3232 valx = p[(val >> 16) & 3];
3235 p = Table_Comp_Rise_Slew_20x;
3236 valx = p[(val >> 8) & 3];
3239 p = Table_Comp_Fall_Slew_20x;
3240 valx = p[(val >> 8) & 3];
3244 dword |= valx << (5 * i);
3247 /* Override/Exception */
3248 if (!pDCTstat->GangedMode) {
3249 i = 0; /* use i for the dct setting required */
3250 if (pDCTstat->MAdimms[0] < 4)
3252 if (((pDCTstat->Speed == 2) || (pDCTstat->Speed == 3)) && (pDCTstat->MAdimms[i] == 4)) {
3253 dword &= 0xF18FFF18;
3254 index_reg = 0x98; /* force dct = 0 */
3258 Set_NB32_index_wait(dev, index_reg, 0x0a, dword);
3261 static void mct_EarlyArbEn_D(struct MCTStatStruc *pMCTstat,
3262 struct DCTStatStruc *pDCTstat, u8 dct)
3266 u32 dev = pDCTstat->dev_dct;
3268 /* GhEnhancement #18429 modified by askar: For low NB CLK :
3269 * Memclk ratio, the DCT may need to arbitrate early to avoid
3270 * unnecessary bubbles.
3271 * bit 19 of F2x[1,0]78 Dram Control Register, set this bit only when
3272 * NB CLK : Memclk ratio is between 3:1 (inclusive) to 4:5 (inclusive)
3274 reg = 0x78 + 0x100 * dct;
3275 val = Get_NB32(dev, reg);
3277 if (pDCTstat->LogicalCPUID & (AMD_DR_Cx | AMD_DR_Dx))
3278 val |= (1 << EarlyArbEn);
3279 else if (CheckNBCOFEarlyArbEn(pMCTstat, pDCTstat))
3280 val |= (1 << EarlyArbEn);
3282 Set_NB32(dev, reg, val);
3285 static u8 CheckNBCOFEarlyArbEn(struct MCTStatStruc *pMCTstat,
3286 struct DCTStatStruc *pDCTstat)
3292 u32 dev = pDCTstat->dev_dct;
3296 /* Check if NB COF >= 4*Memclk, if it is not, return a fatal error
3299 /* 3*(Fn2xD4[NBFid]+4)/(2^NbDid)/(3+Fn2x94[MemClkFreq]) */
3300 _RDMSR(0xC0010071, &lo, &hi);
3305 val = Get_NB32(dev, reg);
3306 if (!(val & (1 << MemClkFreqVal)))
3307 val = Get_NB32(dev, reg + 0x100); /* get the DCT1 value */
3315 dev = pDCTstat->dev_nbmisc;
3317 val = Get_NB32(dev, reg);
3325 /* Yes this could be nicer but this was how the asm was.... */
3326 if (val < 3) { /* NClk:MemClk < 3:1 */
3328 } else if (val > 4) { /* NClk:MemClk >= 5:1 */
3330 } else if ((val == 4) && (rem > tmp)) { /* NClk:MemClk > 4.5:1 */
3333 return 1; /* 3:1 <= NClk:MemClk <= 4.5:1*/
3337 static void mct_ResetDataStruct_D(struct MCTStatStruc *pMCTstat,
3338 struct DCTStatStruc *pDCTstatA)
3342 struct DCTStatStruc *pDCTstat;
3345 u16 host_serv1, host_serv2;
3347 /* Initialize Data structures by clearing all entries to 0 */
3348 p = (u8 *) pMCTstat;
3349 for (i = 0; i < sizeof(struct MCTStatStruc); i++) {
3353 for (Node = 0; Node < 8; Node++) {
3354 pDCTstat = pDCTstatA + Node;
3355 host_serv1 = pDCTstat->HostBiosSrvc1;
3356 host_serv2 = pDCTstat->HostBiosSrvc2;
3358 p = (u8 *) pDCTstat;
3360 stop = ((u32) &((struct DCTStatStruc *)0)->CH_MaxRdLat[2]);
3361 for (i = start; i < stop ; i++) {
3365 start = ((u32) &((struct DCTStatStruc *)0)->CH_D_BC_RCVRDLY[2][4]);
3366 stop = sizeof(struct DCTStatStruc);
3367 for (i = start; i < stop; i++) {
3370 pDCTstat->HostBiosSrvc1 = host_serv1;
3371 pDCTstat->HostBiosSrvc2 = host_serv2;
3375 static void mct_BeforeDramInit_Prod_D(struct MCTStatStruc *pMCTstat,
3376 struct DCTStatStruc *pDCTstat)
3380 u32 dev = pDCTstat->dev_dct;
3382 if (pDCTstat->LogicalCPUID & AMD_DR_Dx) {
3383 if ((pDCTstat->Speed == 3))
3387 for (i=0; i < 2; i++) {
3388 reg_off = 0x100 * i;
3389 Set_NB32(dev, 0x98 + reg_off, 0x0D000030);
3390 Set_NB32(dev, 0x9C + reg_off, dword);
3391 Set_NB32(dev, 0x98 + reg_off, 0x4D040F30);
3396 static void mct_EnDllShutdownSR(struct MCTStatStruc *pMCTstat,
3397 struct DCTStatStruc *pDCTstat, u8 dct)
3399 u32 reg_off = 0x100 * dct;
3400 u32 dev = pDCTstat->dev_dct, val;
3402 /* Write 0000_07D0h to register F2x[1, 0]98_x4D0FE006 */
3403 if (pDCTstat->LogicalCPUID & (AMD_DR_DAC2_OR_C3)) {
3404 Set_NB32(dev, 0x9C + reg_off, 0x1C);
3405 Set_NB32(dev, 0x98 + reg_off, 0x4D0FE006);
3406 Set_NB32(dev, 0x9C + reg_off, 0x13D);
3407 Set_NB32(dev, 0x98 + reg_off, 0x4D0FE007);
3409 val = Get_NB32(dev, 0x90 + reg_off);
3410 val &= ~(1 << 27/* DisDllShutdownSR */);
3411 Set_NB32(dev, 0x90 + reg_off, val);
3415 static u32 mct_DisDllShutdownSR(struct MCTStatStruc *pMCTstat,
3416 struct DCTStatStruc *pDCTstat, u32 DramConfigLo, u8 dct)
3418 u32 reg_off = 0x100 * dct;
3419 u32 dev = pDCTstat->dev_dct;
3421 /* Write 0000_07D0h to register F2x[1, 0]98_x4D0FE006 */
3422 if (pDCTstat->LogicalCPUID & (AMD_DR_DAC2_OR_C3)) {
3423 Set_NB32(dev, 0x9C + reg_off, 0x7D0);
3424 Set_NB32(dev, 0x98 + reg_off, 0x4D0FE006);
3425 Set_NB32(dev, 0x9C + reg_off, 0x190);
3426 Set_NB32(dev, 0x98 + reg_off, 0x4D0FE007);
3428 DramConfigLo |= /* DisDllShutdownSR */ 1 << 27;
3431 return DramConfigLo;
3434 void mct_SetClToNB_D(struct MCTStatStruc *pMCTstat,
3435 struct DCTStatStruc *pDCTstat)
3440 /* FIXME: Maybe check the CPUID? - not for now. */
3441 /* pDCTstat->LogicalCPUID; */
3444 _RDMSR(msr, &lo, &hi);
3445 lo |= 1 << ClLinesToNbDis;
3446 _WRMSR(msr, lo, hi);
3449 void mct_ClrClToNB_D(struct MCTStatStruc *pMCTstat,
3450 struct DCTStatStruc *pDCTstat)
3456 /* FIXME: Maybe check the CPUID? - not for now. */
3457 /* pDCTstat->LogicalCPUID; */
3460 _RDMSR(msr, &lo, &hi);
3461 if (!pDCTstat->ClToNB_flag)
3462 lo &= ~(1<<ClLinesToNbDis);
3463 _WRMSR(msr, lo, hi);
3467 void mct_SetWbEnhWsbDis_D(struct MCTStatStruc *pMCTstat,
3468 struct DCTStatStruc *pDCTstat)
3473 /* FIXME: Maybe check the CPUID? - not for now. */
3474 /* pDCTstat->LogicalCPUID; */
3477 _RDMSR(msr, &lo, &hi);
3478 hi |= (1 << WbEnhWsbDis_D);
3479 _WRMSR(msr, lo, hi);
3482 void mct_ClrWbEnhWsbDis_D(struct MCTStatStruc *pMCTstat,
3483 struct DCTStatStruc *pDCTstat)
3488 /* FIXME: Maybe check the CPUID? - not for now. */
3489 /* pDCTstat->LogicalCPUID; */
3492 _RDMSR(msr, &lo, &hi);
3493 hi &= ~(1 << WbEnhWsbDis_D);
3494 _WRMSR(msr, lo, hi);
3497 void ProgDramMRSReg_D(struct MCTStatStruc *pMCTstat,
3498 struct DCTStatStruc *pDCTstat, u8 dct)
3505 /* Set chip select CKE control mode */
3506 if (mctGet_NVbits(NV_CKE_CTL)) {
3507 if (pDCTstat->CSPresent == 3) {
3509 word = pDCTstat->DIMMSPDCSE;
3520 DrvImpCtrl: drive impedance control.01b(34 ohm driver; Ron34 = Rzq/7)
3523 /* Dram nominal termination: */
3524 byte = pDCTstat->MAdimms[dct];
3525 if (!(pDCTstat->Status & (1 << SB_Registered))) {
3526 DramMRS |= 1 << 7; /* 60 ohms */
3528 if (pDCTstat->Speed < 6)
3529 DramMRS |= 1 << 8; /* 40 ohms */
3531 DramMRS |= 1 << 9; /* 30 ohms */
3534 /* Dram dynamic termination: Disable(1DIMM), 120ohm(>=2DIMM) */
3535 if (!(pDCTstat->Status & (1 << SB_Registered))) {
3537 if (pDCTstat->Speed == 7)
3543 DramMRS |= mct_DramTermDyn_RDimm(pMCTstat, pDCTstat, byte);
3546 /* burst length control */
3547 if (pDCTstat->Status & (1 << SB_128bitmode))
3549 /* Qoff=0, output buffers enabled */
3551 DramMRS |= (pDCTstat->Speed - 4) << 20;
3552 /* ASR=1, auto self refresh */
3556 dword = Get_NB32(pDCTstat->dev_dct, 0x100 * dct + 0x84);
3557 dword &= ~0x00FC2F8F;
3559 Set_NB32(pDCTstat->dev_dct, 0x100 * dct + 0x84, dword);
3562 void mct_SetDramConfigHi_D(struct DCTStatStruc *pDCTstat, u32 dct,
3565 /* Bug#15114: Comp. update interrupted by Freq. change can cause
3566 * subsequent update to be invalid during any MemClk frequency change:
3567 * Solution: From the bug report:
3568 * 1. A software-initiated frequency change should be wrapped into the
3569 * following sequence :
3570 * - a) Disable Compensation (F2[1, 0]9C_x08[30] )
3571 * b) Reset the Begin Compensation bit (D3CMP->COMP_CONFIG[0]) in all the compensation engines
3572 * c) Do frequency change
3573 * d) Enable Compensation (F2[1, 0]9C_x08[30] )
3574 * 2. A software-initiated Disable Compensation should always be
3575 * followed by step b) of the above steps.
3576 * Silicon Status: Fixed In Rev B0
3578 * Errata#177: DRAM Phy Automatic Compensation Updates May Be Invalid
3579 * Solution: BIOS should disable the phy automatic compensation prior
3580 * to initiating a memory clock frequency change as follows:
3581 * 1. Disable PhyAutoComp by writing 1'b1 to F2x[1, 0]9C_x08[30]
3582 * 2. Reset the Begin Compensation bits by writing 32'h0 to
3583 * F2x[1, 0]9C_x4D004F00
3584 * 3. Perform frequency change
3585 * 4. Enable PhyAutoComp by writing 1'b0 to F2x[1, 0]9C_08[30]
3586 * In addition, any time software disables the automatic phy
3587 * compensation it should reset the begin compensation bit per step 2.
3588 * Silicon Status: Fixed in DR-B0
3591 u32 dev = pDCTstat->dev_dct;
3592 u32 index_reg = 0x98 + 0x100 * dct;
3598 val = Get_NB32_index_wait(dev, index_reg, index);
3599 if (!(val & (1 << DisAutoComp)))
3600 Set_NB32_index_wait(dev, index_reg, index, val | (1 << DisAutoComp));
3604 Set_NB32(dev, 0x94 + 0x100 * dct, DramConfigHi);
3607 static void mct_BeforeDQSTrain_D(struct MCTStatStruc *pMCTstat,
3608 struct DCTStatStruc *pDCTstatA)
3611 struct DCTStatStruc *pDCTstat;
3615 * Bug#15115: Uncertainty In The Sync Chain Leads To Setup Violations
3617 * Solution: BIOS should program DRAM Control Register[RdPtrInit] =
3618 * 5h, (F2x[1, 0]78[3:0] = 5h).
3619 * Silicon Status: Fixed In Rev B0
3621 * Bug#15880: Determine validity of reset settings for DDR PHY timing.
3622 * Solution: At least, set WrDqs fine delay to be 0 for DDR3 training.
3624 for (Node = 0; Node < 8; Node++) {
3625 pDCTstat = pDCTstatA + Node;
3627 if (pDCTstat->NodePresent) {
3628 mct_BeforeDQSTrainSamp(pDCTstat); /* only Bx */
3629 mct_ResetDLL_D(pMCTstat, pDCTstat, 0);
3630 mct_ResetDLL_D(pMCTstat, pDCTstat, 1);
3635 static void mct_ResetDLL_D(struct MCTStatStruc *pMCTstat,
3636 struct DCTStatStruc *pDCTstat, u8 dct)
3639 u32 dev = pDCTstat->dev_dct;
3640 u32 reg_off = 0x100 * dct;
3646 /* Skip reset DLL for B3 */
3647 if (pDCTstat->LogicalCPUID & AMD_DR_B3) {
3652 _RDMSR(addr, &lo, &hi);
3653 if(lo & (1<<17)) { /* save the old value */
3656 lo |= (1<<17); /* HWCR.wrap32dis */
3657 /* Setting wrap32dis allows 64-bit memory references in 32bit mode */
3658 _WRMSR(addr, lo, hi);
3660 pDCTstat->Channel = dct;
3661 Receiver = mct_InitReceiver_D(pDCTstat, dct);
3662 /* there are four receiver pairs, loosely associated with chipselects.*/
3663 for (; Receiver < 8; Receiver += 2) {
3664 if (mct_RcvrRankEnabled_D(pMCTstat, pDCTstat, dct, Receiver)) {
3665 addr = mct_GetRcvrSysAddr_D(pMCTstat, pDCTstat, dct, Receiver, &valid);
3667 mct_Read1LTestPattern_D(pMCTstat, pDCTstat, addr); /* cache fills */
3669 /* Write 0000_8000h to register F2x[1,0]9C_xD080F0C */
3670 Set_NB32_index_wait(dev, 0x98 + reg_off, 0x4D080F0C, 0x00008000);
3671 mct_Wait(80); /* wait >= 300ns */
3673 /* Write 0000_0000h to register F2x[1,0]9C_xD080F0C */
3674 Set_NB32_index_wait(dev, 0x98 + reg_off, 0x4D080F0C, 0x00000000);
3675 mct_Wait(800); /* wait >= 2us */
3683 _RDMSR(addr, &lo, &hi);
3684 lo &= ~(1<<17); /* restore HWCR.wrap32dis */
3685 _WRMSR(addr, lo, hi);
3689 static void mct_EnableDatIntlv_D(struct MCTStatStruc *pMCTstat,
3690 struct DCTStatStruc *pDCTstat)
3692 u32 dev = pDCTstat->dev_dct;
3695 /* Enable F2x110[DctDatIntlv] */
3696 /* Call back not required mctHookBeforeDatIntlv_D() */
3697 /* FIXME Skip for Ax */
3698 if (!pDCTstat->GangedMode) {
3699 val = Get_NB32(dev, 0x110);
3700 val |= 1 << 5; /* DctDatIntlv */
3701 Set_NB32(dev, 0x110, val);
3703 /* FIXME Skip for Cx */
3704 dev = pDCTstat->dev_nbmisc;
3705 val = Get_NB32(dev, 0x8C); /* NB Configuration Hi */
3706 val |= 1 << (36-32); /* DisDatMask */
3707 Set_NB32(dev, 0x8C, val);
3711 static void SetDllSpeedUp_D(struct MCTStatStruc *pMCTstat,
3712 struct DCTStatStruc *pDCTstat, u8 dct)
3715 u32 dev = pDCTstat->dev_dct;
3716 u32 reg_off = 0x100 * dct;
3718 if (pDCTstat->Speed >= 7) { /* DDR1600 and above */
3719 /* Set bit13 PowerDown to register F2x[1, 0]98_x0D080F10 */
3720 Set_NB32(dev, reg_off + 0x98, 0x0D080F10);
3721 val = Get_NB32(dev, reg_off + 0x9C);
3723 Set_NB32(dev, reg_off + 0x9C, val);
3724 Set_NB32(dev, reg_off + 0x98, 0x4D080F10);
3726 /* Set bit13 PowerDown to register F2x[1, 0]98_x0D080F11 */
3727 Set_NB32(dev, reg_off + 0x98, 0x0D080F11);
3728 val = Get_NB32(dev, reg_off + 0x9C);
3730 Set_NB32(dev, reg_off + 0x9C, val);
3731 Set_NB32(dev, reg_off + 0x98, 0x4D080F11);
3733 /* Set bit13 PowerDown to register F2x[1, 0]98_x0D088F30 */
3734 Set_NB32(dev, reg_off + 0x98, 0x0D088F30);
3735 val = Get_NB32(dev, reg_off + 0x9C);
3737 Set_NB32(dev, reg_off + 0x9C, val);
3738 Set_NB32(dev, reg_off + 0x98, 0x4D088F30);
3740 /* Set bit13 PowerDown to register F2x[1, 0]98_x0D08CF30 */
3741 Set_NB32(dev, reg_off + 0x98, 0x0D08CF30);
3742 val = Get_NB32(dev, reg_off + 0x9C);
3744 Set_NB32(dev, reg_off + 0x9C, val);
3745 Set_NB32(dev, reg_off + 0x98, 0x4D08CF30);
3750 static void SyncSetting(struct DCTStatStruc *pDCTstat)
3752 /* set F2x78[ChSetupSync] when F2x[1, 0]9C_x04[AddrCmdSetup, CsOdtSetup,
3753 * CkeSetup] setups for one DCT are all 0s and at least one of the setups,
3754 * F2x[1, 0]9C_x04[AddrCmdSetup, CsOdtSetup, CkeSetup], of the other
3758 u32 dev = pDCTstat->dev_dct;
3761 cha = pDCTstat->CH_ADDR_TMG[0] & 0x0202020;
3762 chb = pDCTstat->CH_ADDR_TMG[1] & 0x0202020;
3764 if ((cha != chb) && ((cha == 0) || (chb == 0))) {
3765 val = Get_NB32(dev, 0x78);
3766 val |= 1 << ChSetupSync;
3767 Set_NB32(dev, 0x78, val);
3771 static void AfterDramInit_D(struct DCTStatStruc *pDCTstat, u8 dct) {
3774 u32 reg_off = 0x100 * dct;
3775 u32 dev = pDCTstat->dev_dct;
3777 if (pDCTstat->LogicalCPUID & (AMD_DR_B2 | AMD_DR_B3)) {
3778 mct_Wait(10000); /* Wait 50 us*/
3779 val = Get_NB32(dev, 0x110);
3780 if (!(val & (1 << DramEnabled))) {
3781 /* If 50 us expires while DramEnable =0 then do the following */
3782 val = Get_NB32(dev, 0x90 + reg_off);
3783 val &= ~(1 << Width128); /* Program Width128 = 0 */
3784 Set_NB32(dev, 0x90 + reg_off, val);
3786 val = Get_NB32_index_wait(dev, 0x98 + reg_off, 0x05); /* Perform dummy CSR read to F2x09C_x05 */
3788 if (pDCTstat->GangedMode) {
3789 val = Get_NB32(dev, 0x90 + reg_off);
3790 val |= 1 << Width128; /* Program Width128 = 0 */
3791 Set_NB32(dev, 0x90 + reg_off, val);
3797 /* ==========================================================
3798 * 6-bit Bank Addressing Table
3801 * CCC=Columns-9 binary
3802 * ==========================================================
3803 * DCT CCCBRR Rows Banks Columns 64-bit CS Size
3805 * 0000 000000 13 2 9 128MB
3806 * 0001 001000 13 2 10 256MB
3807 * 0010 001001 14 2 10 512MB
3808 * 0011 010000 13 2 11 512MB
3809 * 0100 001100 13 3 10 512MB
3810 * 0101 001101 14 3 10 1GB
3811 * 0110 010001 14 2 11 1GB
3812 * 0111 001110 15 3 10 2GB
3813 * 1000 010101 14 3 11 2GB
3814 * 1001 010110 15 3 11 4GB
3815 * 1010 001111 16 3 10 4GB
3816 * 1011 010111 16 3 11 8GB
3818 u8 crcCheck(u8 smbaddr)
3825 byte_use = mctRead_SPD(smbaddr, SPD_ByteUse);
3826 if (byte_use & 0x80)
3832 for (Index = 0; Index < byte_use; Index ++) {
3833 byte = mctRead_SPD(smbaddr, Index);
3835 for (i=0; i<8; i++) {
3843 return CRC == (mctRead_SPD(smbaddr, SPD_byte_127) << 8 | mctRead_SPD(smbaddr, SPD_byte_126));