2 * This file is part of the coreboot project.
4 * Copyright (C) 2010 Advanced Micro Devices, Inc.
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; version 2 of the License.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License
16 * along with this program; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
20 /* Description: Main memory controller system configuration for DDR 3 */
22 /* KNOWN ISSUES - ERRATA
24 * Trtp is not calculated correctly when the controller is in 64-bit mode, it
25 * is 1 busclock off. No fix planned. The controller is not ordinarily in
28 * 32 Byte burst not supported. No fix planned. The controller is not
29 * ordinarily in 64-bit mode.
31 * Trc precision does not use extra Jedec defined fractional component.
32 * InsteadTrc (course) is rounded up to nearest 1 ns.
34 * Mini and Micro DIMM not supported. Only RDIMM, UDIMM, SO-DIMM defined types
38 static u8 ReconfigureDIMMspare_D(struct MCTStatStruc *pMCTstat,
39 struct DCTStatStruc *pDCTstatA);
40 static void DQSTiming_D(struct MCTStatStruc *pMCTstat,
41 struct DCTStatStruc *pDCTstatA);
42 static void LoadDQSSigTmgRegs_D(struct MCTStatStruc *pMCTstat,
43 struct DCTStatStruc *pDCTstatA);
44 static void HTMemMapInit_D(struct MCTStatStruc *pMCTstat,
45 struct DCTStatStruc *pDCTstatA);
46 static void MCTMemClr_D(struct MCTStatStruc *pMCTstat,
47 struct DCTStatStruc *pDCTstatA);
48 static void DCTMemClr_Init_D(struct MCTStatStruc *pMCTstat,
49 struct DCTStatStruc *pDCTstat);
50 static void DCTMemClr_Sync_D(struct MCTStatStruc *pMCTstat,
51 struct DCTStatStruc *pDCTstat);
52 static void MCTMemClrSync_D(struct MCTStatStruc *pMCTstat,
53 struct DCTStatStruc *pDCTstatA);
54 static u8 NodePresent_D(u8 Node);
55 static void SyncDCTsReady_D(struct MCTStatStruc *pMCTstat,
56 struct DCTStatStruc *pDCTstatA);
57 static void StartupDCT_D(struct MCTStatStruc *pMCTstat,
58 struct DCTStatStruc *pDCTstat, u8 dct);
59 static void ClearDCT_D(struct MCTStatStruc *pMCTstat,
60 struct DCTStatStruc *pDCTstat, u8 dct);
61 static u8 AutoCycTiming_D(struct MCTStatStruc *pMCTstat,
62 struct DCTStatStruc *pDCTstat, u8 dct);
63 static void GetPresetmaxF_D(struct MCTStatStruc *pMCTstat,
64 struct DCTStatStruc *pDCTstat);
65 static void SPDGetTCL_D(struct MCTStatStruc *pMCTstat,
66 struct DCTStatStruc *pDCTstat, u8 dct);
67 static u8 AutoConfig_D(struct MCTStatStruc *pMCTstat,
68 struct DCTStatStruc *pDCTstat, u8 dct);
69 static u8 PlatformSpec_D(struct MCTStatStruc *pMCTstat,
70 struct DCTStatStruc *pDCTstat, u8 dct);
71 static void SPDSetBanks_D(struct MCTStatStruc *pMCTstat,
72 struct DCTStatStruc *pDCTstat, u8 dct);
73 static void StitchMemory_D(struct MCTStatStruc *pMCTstat,
74 struct DCTStatStruc *pDCTstat, u8 dct);
75 static u16 Get_Fk_D(u8 k);
76 static u8 Get_DIMMAddress_D(struct DCTStatStruc *pDCTstat, u8 i);
77 static void mct_initDCT(struct MCTStatStruc *pMCTstat,
78 struct DCTStatStruc *pDCTstat);
79 static void mct_DramInit(struct MCTStatStruc *pMCTstat,
80 struct DCTStatStruc *pDCTstat, u8 dct);
81 static u8 mct_PlatformSpec(struct MCTStatStruc *pMCTstat,
82 struct DCTStatStruc *pDCTstat, u8 dct);
83 static void mct_SyncDCTsReady(struct DCTStatStruc *pDCTstat);
84 static void Get_Trdrd(struct MCTStatStruc *pMCTstat,
85 struct DCTStatStruc *pDCTstat, u8 dct);
86 static void mct_AfterGetCLT(struct MCTStatStruc *pMCTstat,
87 struct DCTStatStruc *pDCTstat, u8 dct);
88 static u8 mct_SPDCalcWidth(struct MCTStatStruc *pMCTstat,\
89 struct DCTStatStruc *pDCTstat, u8 dct);
90 static void mct_AfterStitchMemory(struct MCTStatStruc *pMCTstat,
91 struct DCTStatStruc *pDCTstat, u8 dct);
92 static u8 mct_DIMMPresence(struct MCTStatStruc *pMCTstat,
93 struct DCTStatStruc *pDCTstat, u8 dct);
94 static void Set_OtherTiming(struct MCTStatStruc *pMCTstat,
95 struct DCTStatStruc *pDCTstat, u8 dct);
96 static void Get_Twrwr(struct MCTStatStruc *pMCTstat,
97 struct DCTStatStruc *pDCTstat, u8 dct);
98 static void Get_Twrrd(struct MCTStatStruc *pMCTstat,
99 struct DCTStatStruc *pDCTstat, u8 dct);
100 static void Get_TrwtTO(struct MCTStatStruc *pMCTstat,
101 struct DCTStatStruc *pDCTstat, u8 dct);
102 static void Get_TrwtWB(struct MCTStatStruc *pMCTstat,
103 struct DCTStatStruc *pDCTstat);
104 static void Get_DqsRcvEnGross_Diff(struct DCTStatStruc *pDCTstat,
105 u32 dev, u32 index_reg);
106 static void Get_WrDatGross_Diff(struct DCTStatStruc *pDCTstat, u8 dct,
107 u32 dev, u32 index_reg);
108 static u16 Get_DqsRcvEnGross_MaxMin(struct DCTStatStruc *pDCTstat,
109 u32 dev, u32 index_reg, u32 index);
110 static void mct_FinalMCT_D(struct MCTStatStruc *pMCTstat,
111 struct DCTStatStruc *pDCTstat);
112 static u16 Get_WrDatGross_MaxMin(struct DCTStatStruc *pDCTstat, u8 dct,
113 u32 dev, u32 index_reg, u32 index);
114 static void mct_InitialMCT_D(struct MCTStatStruc *pMCTstat,
115 struct DCTStatStruc *pDCTstat);
116 static void mct_init(struct MCTStatStruc *pMCTstat,
117 struct DCTStatStruc *pDCTstat);
118 static void clear_legacy_Mode(struct MCTStatStruc *pMCTstat,
119 struct DCTStatStruc *pDCTstat);
120 static void mct_HTMemMapExt(struct MCTStatStruc *pMCTstat,
121 struct DCTStatStruc *pDCTstatA);
122 static void SetCSTriState(struct MCTStatStruc *pMCTstat,
123 struct DCTStatStruc *pDCTstat, u8 dct);
124 static void SetCKETriState(struct MCTStatStruc *pMCTstat,
125 struct DCTStatStruc *pDCTstat, u8 dct);
126 static void SetODTTriState(struct MCTStatStruc *pMCTstat,
127 struct DCTStatStruc *pDCTstat, u8 dct);
128 static void InitPhyCompensation(struct MCTStatStruc *pMCTstat,
129 struct DCTStatStruc *pDCTstat, u8 dct);
130 static u32 mct_NodePresent_D(void);
131 static void mct_OtherTiming(struct MCTStatStruc *pMCTstat,
132 struct DCTStatStruc *pDCTstatA);
133 static void mct_ResetDataStruct_D(struct MCTStatStruc *pMCTstat,
134 struct DCTStatStruc *pDCTstatA);
135 static void mct_EarlyArbEn_D(struct MCTStatStruc *pMCTstat,
136 struct DCTStatStruc *pDCTstat);
137 static void mct_BeforeDramInit_Prod_D(struct MCTStatStruc *pMCTstat,
138 struct DCTStatStruc *pDCTstat);
139 void mct_ClrClToNB_D(struct MCTStatStruc *pMCTstat,
140 struct DCTStatStruc *pDCTstat);
141 static u8 CheckNBCOFEarlyArbEn(struct MCTStatStruc *pMCTstat,
142 struct DCTStatStruc *pDCTstat);
143 void mct_ClrWbEnhWsbDis_D(struct MCTStatStruc *pMCTstat,
144 struct DCTStatStruc *pDCTstat);
145 static void mct_BeforeDQSTrain_D(struct MCTStatStruc *pMCTstat,
146 struct DCTStatStruc *pDCTstatA);
147 static void AfterDramInit_D(struct DCTStatStruc *pDCTstat, u8 dct);
148 static void mct_ResetDLL_D(struct MCTStatStruc *pMCTstat,
149 struct DCTStatStruc *pDCTstat, u8 dct);
150 static void ProgDramMRSReg_D(struct MCTStatStruc *pMCTstat,
151 struct DCTStatStruc *pDCTstat, u8 dct);
152 static void mct_DramInit_Sw_D(struct MCTStatStruc *pMCTstat,
153 struct DCTStatStruc *pDCTstat, u8 dct);
154 static u32 mct_DisDllShutdownSR(struct MCTStatStruc *pMCTstat,
155 struct DCTStatStruc *pDCTstat, u32 DramConfigLo, u8 dct);
157 static u32 mct_DramTermDyn_RDimm(struct MCTStatStruc *pMCTstat,
158 struct DCTStatStruc *pDCTstat, u8 dimm);
159 static u32 mct_SetDramConfigMisc2(struct DCTStatStruc *pDCTstat, u8 dct, u32 misc2);
160 static void mct_BeforeDQSTrainSamp(struct DCTStatStruc *pDCTstat);
161 static void mct_WriteLevelization_HW(struct MCTStatStruc *pMCTstat, struct DCTStatStruc *pDCTstatA);
162 static u8 Get_Latency_Diff(struct MCTStatStruc *pMCTstat,
163 struct DCTStatStruc *pDCTstat, u8 dct);
164 static void SyncSetting(struct DCTStatStruc *pDCTstat);
165 static u8 crcCheck(u8 smbaddr);
167 /*See mctAutoInitMCT header for index relationships to CL and T*/
168 static const u16 Table_F_k[] = {00,200,266,333,400,533 };
169 static const u8 Tab_BankAddr[] = {0x3F,0x01,0x09,0x3F,0x3F,0x11,0x0A,0x19,0x12,0x1A,0x21,0x22,0x23};
170 static const u8 Table_DQSRcvEn_Offset[] = {0x00,0x01,0x10,0x11,0x2};
172 /****************************************************************************
173 Describe how platform maps MemClk pins to logical DIMMs. The MemClk pins
174 are identified based on BKDG definition of Fn2x88[MemClkDis] bitmap.
175 AGESA will base on this value to disable unused MemClk to save power.
177 If MEMCLK_MAPPING or MEMCLK_MAPPING contains all zeroes, AGESA will use
178 default MemClkDis setting based on package type.
181 BKDG definition of Fn2x88[MemClkDis] bitmap for AM3 package is like below:
182 Bit AM3/S1g3 pin name
192 And platform has the following routing:
193 CS0 M[B,A]_CLK_H/L[4]
194 CS1 M[B,A]_CLK_H/L[2]
195 CS2 M[B,A]_CLK_H/L[3]
196 CS3 M[B,A]_CLK_H/L[5]
199 ; CS0 CS1 CS2 CS3 CS4 CS5 CS6 CS7
200 MEMCLK_MAPPING EQU 00010000b, 00000100b, 00001000b, 00100000b, 00000000b, 00000000b, 00000000b, 00000000b
203 /* Note: If you are not sure about the pin mappings at initial stage, we dont have to disable MemClk.
204 * Set entries in the tables all 0xFF. */
205 static const u8 Tab_L1CLKDis[] = {0x20, 0x20, 0x10, 0x10, 0x08, 0x08, 0x04, 0x04};
206 static const u8 Tab_AM3CLKDis[] = {0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00};
207 static const u8 Tab_S1CLKDis[] = {0xA2, 0xA2, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00};
208 static const u8 Tab_ManualCLKDis[]= {0x10, 0x04, 0x08, 0x20, 0x00, 0x00, 0x00, 0x00};
210 static const u8 Table_Comp_Rise_Slew_20x[] = {7, 3, 2, 2, 0xFF};
211 static const u8 Table_Comp_Rise_Slew_15x[] = {7, 7, 3, 2, 0xFF};
212 static const u8 Table_Comp_Fall_Slew_20x[] = {7, 5, 3, 2, 0xFF};
213 static const u8 Table_Comp_Fall_Slew_15x[] = {7, 7, 5, 3, 0xFF};
215 static void mctAutoInitMCT_D(struct MCTStatStruc *pMCTstat,
216 struct DCTStatStruc *pDCTstatA)
219 * Memory may be mapped contiguously all the way up to 4GB (depending on setup
220 * options). It is the responsibility of PCI subsystem to create an uncacheable
221 * IO region below 4GB and to adjust TOP_MEM downward prior to any IO mapping or
222 * accesses. It is the same responsibility of the CPU sub-system prior to
225 * Slot Number is an external convention, and is determined by OEM with accompanying
226 * silk screening. OEM may choose to use Slot number convention which is consistent
227 * with DIMM number conventions. All AMD engineering platforms do.
229 * Build Requirements:
230 * 1. MCT_SEG0_START and MCT_SEG0_END macros to begin and end the code segment,
231 * defined in mcti.inc.
233 * Run-Time Requirements:
234 * 1. Complete Hypertransport Bus Configuration
235 * 2. SMBus Controller Initialized
236 * 1. BSP in Big Real Mode
237 * 2. Stack at SS:SP, located somewhere between A000:0000 and F000:FFFF
238 * 3. Checksummed or Valid NVRAM bits
239 * 4. MCG_CTL=-1, MC4_CTL_EN=0 for all CPUs
240 * 5. MCi_STS from shutdown/warm reset recorded (if desired) prior to entry
241 * 6. All var MTRRs reset to zero
242 * 7. State of NB_CFG.DisDatMsk set properly on all CPUs
243 * 8. All CPUs at 2Ghz Speed (unless DQS training is not installed).
244 * 9. All cHT links at max Speed/Width (unless DQS training is not installed).
247 * Global relationship between index values and item values:
249 * pDCTstat.CASL pDCTstat.Speed
251 * --------------------------
265 mctInitMemGPIOs_A_D(); /* Set any required GPIOs*/
268 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
269 struct DCTStatStruc *pDCTstat;
270 pDCTstat = pDCTstatA + Node;
271 pDCTstat->Node_ID = Node;
272 pDCTstat->dev_host = PA_HOST(Node);
273 pDCTstat->dev_map = PA_MAP(Node);
274 pDCTstat->dev_dct = PA_DCT(Node);
275 pDCTstat->dev_nbmisc = PA_NBMISC(Node);
276 pDCTstat->NodeSysBase = node_sys_base;
278 mct_init(pMCTstat, pDCTstat);
279 mctNodeIDDebugPort_D();
280 pDCTstat->NodePresent = NodePresent_D(Node);
281 if (pDCTstat->NodePresent) { /* See if Node is there*/
282 clear_legacy_Mode(pMCTstat, pDCTstat);
283 pDCTstat->LogicalCPUID = mctGetLogicalCPUID_D(Node);
285 mct_InitialMCT_D(pMCTstat, pDCTstat);
287 mctSMBhub_Init(Node); /* Switch SMBUS crossbar to proper node*/
289 mct_initDCT(pMCTstat, pDCTstat);
290 if (pDCTstat->ErrCode == SC_FatalErr) {
291 goto fatalexit; /* any fatal errors?*/
292 } else if (pDCTstat->ErrCode < SC_StopError) {
295 } /* if Node present */
296 node_sys_base = pDCTstat->NodeSysBase;
297 node_sys_base += (pDCTstat->NodeSysLimit + 2) & ~0x0F;
299 if (NodesWmem == 0) {
300 printk(BIOS_DEBUG, "No Nodes?!\n");
304 printk(BIOS_DEBUG, "mctAutoInitMCT_D: SyncDCTsReady_D\n");
305 SyncDCTsReady_D(pMCTstat, pDCTstatA); /* Make sure DCTs are ready for accesses.*/
307 printk(BIOS_DEBUG, "mctAutoInitMCT_D: HTMemMapInit_D\n");
308 HTMemMapInit_D(pMCTstat, pDCTstatA); /* Map local memory into system address space.*/
311 printk(BIOS_DEBUG, "mctAutoInitMCT_D: CPUMemTyping_D\n");
312 CPUMemTyping_D(pMCTstat, pDCTstatA); /* Map dram into WB/UC CPU cacheability */
313 mctHookAfterCPU(); /* Setup external northbridge(s) */
315 printk(BIOS_DEBUG, "mctAutoInitMCT_D: DQSTiming_D\n");
316 DQSTiming_D(pMCTstat, pDCTstatA); /* Get Receiver Enable and DQS signal timing*/
318 printk(BIOS_DEBUG, "mctAutoInitMCT_D: UMAMemTyping_D\n");
319 UMAMemTyping_D(pMCTstat, pDCTstatA); /* Fix up for UMA sizing */
321 printk(BIOS_DEBUG, "mctAutoInitMCT_D: :OtherTiming\n");
322 mct_OtherTiming(pMCTstat, pDCTstatA);
324 if (ReconfigureDIMMspare_D(pMCTstat, pDCTstatA)) { /* RESET# if 1st pass of DIMM spare enabled*/
328 InterleaveNodes_D(pMCTstat, pDCTstatA);
329 InterleaveChannels_D(pMCTstat, pDCTstatA);
331 printk(BIOS_DEBUG, "mctAutoInitMCT_D: ECCInit_D\n");
332 if (ECCInit_D(pMCTstat, pDCTstatA)) { /* Setup ECC control and ECC check-bits*/
333 printk(BIOS_DEBUG, "mctAutoInitMCT_D: MCTMemClr_D\n");
334 MCTMemClr_D(pMCTstat,pDCTstatA);
337 mct_FinalMCT_D(pMCTstat, (pDCTstatA + 0) ); /* Node 0 */
338 printk(BIOS_DEBUG, "All Done\n");
342 die("mct_d: fatalexit");
345 static u8 ReconfigureDIMMspare_D(struct MCTStatStruc *pMCTstat,
346 struct DCTStatStruc *pDCTstatA)
350 if (mctGet_NVbits(NV_CS_SpareCTL)) {
351 if (MCT_DIMM_SPARE_NO_WARM) {
352 /* Do no warm-reset DIMM spare */
353 if (pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)) {
354 LoadDQSSigTmgRegs_D(pMCTstat, pDCTstatA);
357 mct_ResetDataStruct_D(pMCTstat, pDCTstatA);
358 pMCTstat->GStatus |= 1 << GSB_EnDIMMSpareNW;
362 /* Do warm-reset DIMM spare */
363 if (mctGet_NVbits(NV_DQSTrainCTL))
374 static void DQSTiming_D(struct MCTStatStruc *pMCTstat,
375 struct DCTStatStruc *pDCTstatA)
379 if (pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)) {
383 nv_DQSTrainCTL = mctGet_NVbits(NV_DQSTrainCTL);
384 /* FIXME: BOZO- DQS training every time*/
387 mct_BeforeDQSTrain_D(pMCTstat, pDCTstatA);
388 phyAssistedMemFnceTraining(pMCTstat, pDCTstatA);
390 if (nv_DQSTrainCTL) {
391 mctHookBeforeAnyTraining(pMCTstat, pDCTstatA);
392 /* TODO: should be in mctHookBeforeAnyTraining */
393 _WRMSR(0x26C, 0x04040404, 0x04040404);
394 _WRMSR(0x26D, 0x04040404, 0x04040404);
395 _WRMSR(0x26E, 0x04040404, 0x04040404);
396 _WRMSR(0x26F, 0x04040404, 0x04040404);
397 mct_WriteLevelization_HW(pMCTstat, pDCTstatA);
399 TrainReceiverEn_D(pMCTstat, pDCTstatA, FirstPass);
401 mct_TrainDQSPos_D(pMCTstat, pDCTstatA);
403 /* Second Pass never used for Barcelona! */
404 /* TrainReceiverEn_D(pMCTstat, pDCTstatA, SecondPass); */
406 mctSetEccDQSRcvrEn_D(pMCTstat, pDCTstatA);
408 /* FIXME - currently uses calculated value TrainMaxReadLatency_D(pMCTstat, pDCTstatA); */
409 mctHookAfterAnyTraining();
410 mctSaveDQSSigTmg_D();
412 MCTMemClr_D(pMCTstat, pDCTstatA);
414 mctGetDQSSigTmg_D(); /* get values into data structure */
415 LoadDQSSigTmgRegs_D(pMCTstat, pDCTstatA); /* load values into registers.*/
416 /* mctDoWarmResetMemClr_D(); */
417 MCTMemClr_D(pMCTstat, pDCTstatA);
421 static void LoadDQSSigTmgRegs_D(struct MCTStatStruc *pMCTstat,
422 struct DCTStatStruc *pDCTstatA)
424 u8 Node, Receiver, Channel, Dir, DIMM;
433 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
434 struct DCTStatStruc *pDCTstat;
435 pDCTstat = pDCTstatA + Node;
437 if (pDCTstat->DCTSysLimit) {
438 dev = pDCTstat->dev_dct;
439 for (Channel = 0;Channel < 2; Channel++) {
440 /* there are four receiver pairs,
441 loosely associated with chipselects.*/
442 index_reg = 0x98 + Channel * 0x100;
443 for (Receiver = 0; Receiver < 8; Receiver += 2) {
444 /* Set Receiver Enable Values */
445 mct_SetRcvrEnDly_D(pDCTstat,
447 1, /* FinalValue, From stack */
451 (Receiver >> 1) * 3 + 0x10, /* Addl_Index */
452 2); /* Pass Second Pass ? */
453 /* Restore Write levelization training data */
454 for (ByteLane = 0; ByteLane < 9; ByteLane ++) {
455 txdqs = pDCTstat->CH_D_B_TxDqs[Channel][Receiver >> 1][ByteLane];
456 index = Table_DQSRcvEn_Offset[ByteLane >> 1];
457 index += (Receiver >> 1) * 3 + 0x10 + 0x20; /* Addl_Index */
458 val = Get_NB32_index_wait(dev, 0x98 + 0x100*Channel, index);
459 if (ByteLane & 1) { /* odd byte lane */
460 val &= ~(0xFF << 16);
466 Set_NB32_index_wait(dev, 0x98 + 0x100*Channel, index, val);
470 for (Channel = 0; Channel<2; Channel++) {
471 SetEccDQSRcvrEn_D(pDCTstat, Channel);
474 for (Channel = 0; Channel < 2; Channel++) {
476 index_reg = 0x98 + Channel * 0x100;
479 * when 400, 533, 667, it will support dimm0/1/2/3,
480 * and set conf for dimm0, hw will copy to dimm1/2/3
481 * set for dimm1, hw will copy to dimm3
482 * Rev A/B only support DIMM0/1 when 800Mhz and above
483 * + 0x100 to next dimm
484 * Rev C support DIMM0/1/2/3 when 800Mhz and above
485 * + 0x100 to next dimm
487 for (DIMM = 0; DIMM < 4; DIMM++) {
489 index = 0; /* CHA Write Data Timing Low */
491 if (pDCTstat->Speed >= 4) {
492 index = 0x100 * DIMM;
497 for (Dir = 0; Dir < 2; Dir++) {/* RD/WR */
498 p = pDCTstat->CH_D_DIR_B_DQS[Channel][DIMM][Dir];
499 val = stream_to_int(p); /* CHA Read Data Timing High */
500 Set_NB32_index_wait(dev, index_reg, index+1, val);
501 val = stream_to_int(p+4); /* CHA Write Data Timing High */
502 Set_NB32_index_wait(dev, index_reg, index+2, val);
503 val = *(p+8); /* CHA Write ECC Timing */
504 Set_NB32_index_wait(dev, index_reg, index+3, val);
510 for (Channel = 0; Channel<2; Channel++) {
511 reg = 0x78 + Channel * 0x100;
512 val = Get_NB32(dev, reg);
514 val |= ((u32) pDCTstat->CH_MaxRdLat[Channel] << 22);
515 val &= ~(1<<DqsRcvEnTrain);
516 Set_NB32(dev, reg, val); /* program MaxRdLatency to correspond with current delay*/
522 static void HTMemMapInit_D(struct MCTStatStruc *pMCTstat,
523 struct DCTStatStruc *pDCTstatA)
526 u32 NextBase, BottomIO;
527 u8 _MemHoleRemap, DramHoleBase, DramHoleOffset;
528 u32 HoleSize, DramSelBaseAddr;
534 struct DCTStatStruc *pDCTstat;
536 _MemHoleRemap = mctGet_NVbits(NV_MemHole);
538 if (pMCTstat->HoleBase == 0) {
539 DramHoleBase = mctGet_NVbits(NV_BottomIO);
541 DramHoleBase = pMCTstat->HoleBase >> (24-8);
544 BottomIO = DramHoleBase << (24-8);
547 pDCTstat = pDCTstatA + 0;
548 dev = pDCTstat->dev_map;
550 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
551 pDCTstat = pDCTstatA + Node;
552 devx = pDCTstat->dev_map;
554 pDCTstat = pDCTstatA + Node; /* ??? */
555 if (!pDCTstat->GangedMode) {
556 DramSelBaseAddr = pDCTstat->NodeSysLimit - pDCTstat->DCTSysLimit;
557 /*In unganged mode, we must add DCT0 and DCT1 to DCTSysLimit */
558 val = pDCTstat->NodeSysLimit;
559 if ((val & 0xFF) == 0xFE) {
563 pDCTstat->DCTSysLimit = val;
566 base = pDCTstat->DCTSysBase;
567 limit = pDCTstat->DCTSysLimit;
571 DramSelBaseAddr += NextBase;
572 printk(BIOS_DEBUG, " Node: %02x base: %02x limit: %02x BottomIO: %02x\n", Node, base, limit, BottomIO);
575 if ((base < BottomIO) && (limit >= BottomIO)) {
577 pDCTstat->Status |= 1 << SB_HWHole;
578 pMCTstat->GStatus |= 1 << GSB_HWHole;
579 pDCTstat->DCTSysBase = base;
580 pDCTstat->DCTSysLimit = limit;
581 pDCTstat->DCTHoleBase = BottomIO;
582 pMCTstat->HoleBase = BottomIO;
583 HoleSize = _4GB_RJ8 - BottomIO; /* HoleSize[39:8] */
584 if ((DramSelBaseAddr > 0) && (DramSelBaseAddr < BottomIO))
585 base = DramSelBaseAddr;
586 val = ((base + HoleSize) >> (24-8)) & 0xFF;
587 DramHoleOffset = val;
588 val <<= 8; /* shl 16, rol 24 */
589 val |= DramHoleBase << 24;
590 val |= 1 << DramHoleValid;
591 Set_NB32(devx, 0xF0, val); /* Dram Hole Address Reg */
592 pDCTstat->DCTSysLimit += HoleSize;
593 base = pDCTstat->DCTSysBase;
594 limit = pDCTstat->DCTSysLimit;
595 } else if (base == BottomIO) {
597 pMCTstat->GStatus |= 1<<GSB_SpIntRemapHole;
598 pDCTstat->Status |= 1<<SB_SWNodeHole;
599 pMCTstat->GStatus |= 1<<GSB_SoftHole;
600 pMCTstat->HoleBase = base;
604 pDCTstat->DCTSysBase = base;
605 pDCTstat->DCTSysLimit = limit;
607 /* No Remapping. Normal Contiguous mapping */
608 pDCTstat->DCTSysBase = base;
609 pDCTstat->DCTSysLimit = limit;
612 /*No Remapping. Normal Contiguous mapping*/
613 pDCTstat->DCTSysBase = base;
614 pDCTstat->DCTSysLimit = limit;
616 base |= 3; /* set WE,RE fields*/
617 pMCTstat->SysLimit = limit;
619 Set_NB32(dev, 0x40 + (Node << 3), base); /* [Node] + Dram Base 0 */
621 val = limit & 0xFFFF0000;
623 Set_NB32(dev, 0x44 + (Node << 3), val); /* set DstNode */
625 printk(BIOS_DEBUG, " Node: %02x base: %02x limit: %02x \n", Node, base, limit);
626 limit = pDCTstat->DCTSysLimit;
628 NextBase = (limit & 0xFFFF0000) + 0x10000;
632 /* Copy dram map from Node 0 to Node 1-7 */
633 for (Node = 1; Node < MAX_NODES_SUPPORTED; Node++) {
635 pDCTstat = pDCTstatA + Node;
636 devx = pDCTstat->dev_map;
638 if (pDCTstat->NodePresent) {
639 reg = 0x40; /*Dram Base 0*/
641 val = Get_NB32(dev, reg);
642 Set_NB32(devx, reg, val);
644 } while ( reg < 0x80);
646 break; /* stop at first absent Node */
650 /*Copy dram map to F1x120/124*/
651 mct_HTMemMapExt(pMCTstat, pDCTstatA);
654 static void MCTMemClr_D(struct MCTStatStruc *pMCTstat,
655 struct DCTStatStruc *pDCTstatA)
658 /* Initiates a memory clear operation for all node. The mem clr
659 * is done in paralel. After the memclr is complete, all processors
660 * status are checked to ensure that memclr has completed.
663 struct DCTStatStruc *pDCTstat;
665 if (!mctGet_NVbits(NV_DQSTrainCTL)){
666 /* FIXME: callback to wrapper: mctDoWarmResetMemClr_D */
667 } else { /* NV_DQSTrainCTL == 1 */
668 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
669 pDCTstat = pDCTstatA + Node;
671 if (pDCTstat->NodePresent) {
672 DCTMemClr_Init_D(pMCTstat, pDCTstat);
675 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
676 pDCTstat = pDCTstatA + Node;
678 if (pDCTstat->NodePresent) {
679 DCTMemClr_Sync_D(pMCTstat, pDCTstat);
685 static void DCTMemClr_Init_D(struct MCTStatStruc *pMCTstat,
686 struct DCTStatStruc *pDCTstat)
692 /* Initiates a memory clear operation on one node */
693 if (pDCTstat->DCTSysLimit) {
694 dev = pDCTstat->dev_dct;
698 val = Get_NB32(dev, reg);
699 } while (val & (1 << MemClrBusy));
701 val |= (1 << MemClrInit);
702 Set_NB32(dev, reg, val);
706 static void MCTMemClrSync_D(struct MCTStatStruc *pMCTstat,
707 struct DCTStatStruc *pDCTstatA)
709 /* Ensures that memory clear has completed on all node.*/
711 struct DCTStatStruc *pDCTstat;
713 if (!mctGet_NVbits(NV_DQSTrainCTL)){
714 /* callback to wrapper: mctDoWarmResetMemClr_D */
715 } else { /* NV_DQSTrainCTL == 1 */
716 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
717 pDCTstat = pDCTstatA + Node;
719 if (pDCTstat->NodePresent) {
720 DCTMemClr_Sync_D(pMCTstat, pDCTstat);
726 static void DCTMemClr_Sync_D(struct MCTStatStruc *pMCTstat,
727 struct DCTStatStruc *pDCTstat)
730 u32 dev = pDCTstat->dev_dct;
733 /* Ensure that a memory clear operation has completed on one node */
734 if (pDCTstat->DCTSysLimit){
738 val = Get_NB32(dev, reg);
739 } while (val & (1 << MemClrBusy));
742 val = Get_NB32(dev, reg);
743 } while (!(val & (1 << Dr_MemClrStatus)));
746 val = 0x0FE40FC0; /* BKDG recommended */
747 val |= MCCH_FlushWrOnStpGnt; /* Set for S3 */
748 Set_NB32(dev, 0x11C, val);
751 static u8 NodePresent_D(u8 Node)
754 * Determine if a single Hammer Node exists within the network.
761 dev = PA_HOST(Node); /*test device/vendor id at host bridge */
762 val = Get_NB32(dev, 0);
763 dword = mct_NodePresent_D(); /* FIXME: BOZO -11001022h rev for F */
764 if (val == dword) { /* AMD Hammer Family CPU HT Configuration */
765 if (oemNodePresent_D(Node, &ret))
767 /* Node ID register */
768 val = Get_NB32(dev, 0x60);
771 if (val == dword) /* current nodeID = requested nodeID ? */
778 static void DCTInit_D(struct MCTStatStruc *pMCTstat, struct DCTStatStruc *pDCTstat, u8 dct)
781 * Initialize DRAM on single Athlon 64/Opteron Node.
786 ClearDCT_D(pMCTstat, pDCTstat, dct);
787 stopDCTflag = 1; /*preload flag with 'disable' */
788 /* enable DDR3 support */
789 val = Get_NB32(pDCTstat->dev_dct, 0x94 + dct * 0x100);
790 val |= 1 << Ddr3Mode;
791 Set_NB32(pDCTstat->dev_dct, 0x94 + dct * 0x100, val);
792 if (mct_DIMMPresence(pMCTstat, pDCTstat, dct) < SC_StopError) {
793 printk(BIOS_DEBUG, "\t\tDCTInit_D: mct_DIMMPresence Done\n");
794 if (mct_SPDCalcWidth(pMCTstat, pDCTstat, dct) < SC_StopError) {
795 printk(BIOS_DEBUG, "\t\tDCTInit_D: mct_SPDCalcWidth Done\n");
796 if (AutoCycTiming_D(pMCTstat, pDCTstat, dct) < SC_StopError) {
797 printk(BIOS_DEBUG, "\t\tDCTInit_D: AutoCycTiming_D Done\n");
798 if (AutoConfig_D(pMCTstat, pDCTstat, dct) < SC_StopError) {
799 printk(BIOS_DEBUG, "\t\tDCTInit_D: AutoConfig_D Done\n");
800 if (PlatformSpec_D(pMCTstat, pDCTstat, dct) < SC_StopError) {
801 printk(BIOS_DEBUG, "\t\tDCTInit_D: PlatformSpec_D Done\n");
803 if (!(pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW))) {
804 printk(BIOS_DEBUG, "\t\tDCTInit_D: StartupDCT_D\n");
805 StartupDCT_D(pMCTstat, pDCTstat, dct); /*yeaahhh! */
814 u32 reg_off = dct * 0x100;
815 val = 1<<DisDramInterface;
816 Set_NB32(pDCTstat->dev_dct, reg_off+0x94, val);
817 /*To maximize power savings when DisDramInterface=1b,
818 all of the MemClkDis bits should also be set.*/
820 Set_NB32(pDCTstat->dev_dct, reg_off+0x88, val);
822 /* mct_EnDllShutdownSR */
826 static void SyncDCTsReady_D(struct MCTStatStruc *pMCTstat,
827 struct DCTStatStruc *pDCTstatA)
829 /* Wait (and block further access to dram) for all DCTs to be ready,
830 * by polling all InitDram bits and waiting for possible memory clear
831 * operations to be complete. Read MemClkFreqVal bit to see if
832 * the DIMMs are present in this node.
837 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
838 struct DCTStatStruc *pDCTstat;
839 pDCTstat = pDCTstatA + Node;
840 mct_SyncDCTsReady(pDCTstat);
843 /* re-enable phy compensation engine when dram init is completed on all nodes. */
844 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
845 struct DCTStatStruc *pDCTstat;
846 pDCTstat = pDCTstatA + Node;
847 if (pDCTstat->NodePresent) {
848 if (pDCTstat->DIMMValidDCT[0] > 0 || pDCTstat->DIMMValidDCT[1] > 0) {
849 /* re-enable phy compensation engine when dram init on both DCTs is completed. */
850 val = Get_NB32_index_wait(pDCTstat->dev_dct, 0x98, 0x8);
851 val &= ~(1 << DisAutoComp);
852 Set_NB32_index_wait(pDCTstat->dev_dct, 0x98, 0x8, val);
856 /* wait 750us before any memory access can be made. */
860 static void StartupDCT_D(struct MCTStatStruc *pMCTstat,
861 struct DCTStatStruc *pDCTstat, u8 dct)
863 /* Read MemClkFreqVal bit to see if the DIMMs are present in this node.
864 * If the DIMMs are present then set the DRAM Enable bit for this node.
866 * Setting dram init starts up the DCT state machine, initializes the
867 * dram devices with MRS commands, and kicks off any
868 * HW memory clear process that the chip is capable of. The sooner
869 * that dram init is set for all nodes, the faster the memory system
870 * initialization can complete. Thus, the init loop is unrolled into
871 * two loops so as to start the processeses for non BSP nodes sooner.
872 * This procedure will not wait for the process to finish.
873 * Synchronization is handled elsewhere.
877 u32 reg_off = dct * 0x100;
879 dev = pDCTstat->dev_dct;
880 val = Get_NB32(dev, 0x94 + reg_off);
881 if (val & (1<<MemClkFreqVal)) {
882 mctHookBeforeDramInit(); /* generalized Hook */
883 if (!(pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)))
884 mct_DramInit(pMCTstat, pDCTstat, dct);
885 AfterDramInit_D(pDCTstat, dct);
886 mctHookAfterDramInit(); /* generalized Hook*/
890 static void ClearDCT_D(struct MCTStatStruc *pMCTstat,
891 struct DCTStatStruc *pDCTstat, u8 dct)
894 u32 dev = pDCTstat->dev_dct;
895 u32 reg = 0x40 + 0x100 * dct;
898 if (pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)) {
899 reg_end = 0x78 + 0x100 * dct;
901 reg_end = 0xA4 + 0x100 * dct;
904 while(reg < reg_end) {
905 Set_NB32(dev, reg, val);
910 dev = pDCTstat->dev_map;
912 Set_NB32(dev, reg, val);
915 static void SPD2ndTiming(struct MCTStatStruc *pMCTstat,
916 struct DCTStatStruc *pDCTstat, u8 dct)
920 u16 Trp, Trrd, Trcd, Tras, Trc;
923 u32 DramTimingLo, DramTimingHi;
935 /* Gather all DIMM mini-max values for cycle timing data */
944 for (i=0; i < 4; i++)
948 for ( i = 0; i< MAX_DIMMS_SUPPORTED; i++) {
950 if (pDCTstat->DIMMValid & (1 << i)) {
951 smbaddr = Get_DIMMAddress_D(pDCTstat, (dct + i));
953 val = mctRead_SPD(smbaddr, SPD_MTBDivisor); /* MTB=Dividend/Divisor */
954 MTB16x = ((mctRead_SPD(smbaddr, SPD_MTBDividend) & 0xFF)<<4);
955 MTB16x /= val; /* transfer to MTB*16 */
957 byte = mctRead_SPD(smbaddr, SPD_tRPmin);
962 byte = mctRead_SPD(smbaddr, SPD_tRRDmin);
967 byte = mctRead_SPD(smbaddr, SPD_tRCDmin);
972 byte = mctRead_SPD(smbaddr, SPD_tRTPmin);
977 byte = mctRead_SPD(smbaddr, SPD_tWRmin);
982 byte = mctRead_SPD(smbaddr, SPD_tWTRmin);
987 val = mctRead_SPD(smbaddr, SPD_Upper_tRAS_tRC) & 0xFF;
990 val |= mctRead_SPD(smbaddr, SPD_tRCmin) & 0xFF;
995 byte = mctRead_SPD(smbaddr, SPD_Density) & 0xF;
996 if (Trfc[LDIMM] < byte)
999 val = mctRead_SPD(smbaddr, SPD_Upper_tRAS_tRC) & 0xF;
1001 val |= (mctRead_SPD(smbaddr, SPD_tRASmin) & 0xFF);
1006 val = mctRead_SPD(smbaddr, SPD_Upper_tFAW) & 0xF;
1008 val |= mctRead_SPD(smbaddr, SPD_tFAWmin) & 0xFF;
1012 } /* Dimm Present */
1015 /* Convert DRAM CycleTiming values and store into DCT structure */
1016 byte = pDCTstat->DIMMAutoSpeed;
1027 1. All secondary time values given in SPDs are in binary with units of ns.
1028 2. Some time values are scaled by 16, in order to have least count of 0.25 ns
1029 (more accuracy). JEDEC SPD spec. shows which ones are x1 and x4.
1030 3. Internally to this SW, cycle time, tCK16x, is scaled by 16 to match time values
1034 pDCTstat->DIMMTras = (u16)Tras;
1035 val = Tras / tCK16x;
1036 if (Tras % tCK16x) { /* round up number of busclocks */
1039 if (val < Min_TrasT)
1041 else if (val > Max_TrasT)
1043 pDCTstat->Tras = val;
1046 pDCTstat->DIMMTrp = Trp;
1048 if (Trp % tCK16x) { /* round up number of busclocks */
1053 else if (val > Max_TrpT)
1055 pDCTstat->Trp = val;
1058 pDCTstat->DIMMTrrd = Trrd;
1059 val = Trrd / tCK16x;
1060 if (Trrd % tCK16x) { /* round up number of busclocks */
1063 if (val < Min_TrrdT)
1065 else if (val > Max_TrrdT)
1067 pDCTstat->Trrd = val;
1070 pDCTstat->DIMMTrcd = Trcd;
1071 val = Trcd / tCK16x;
1072 if (Trcd % tCK16x) { /* round up number of busclocks */
1075 if (val < Min_TrcdT)
1077 else if (val > Max_TrcdT)
1079 pDCTstat->Trcd = val;
1082 pDCTstat->DIMMTrc = Trc;
1084 if (Trc % tCK16x) { /* round up number of busclocks */
1089 else if (val > Max_TrcT)
1091 pDCTstat->Trc = val;
1094 pDCTstat->DIMMTrtp = Trtp;
1095 val = Trtp / tCK16x;
1096 if (Trtp % tCK16x) {
1099 if (val < Min_TrtpT)
1101 else if (val > Max_TrtpT)
1103 pDCTstat->Trtp = val;
1106 pDCTstat->DIMMTwr = Twr;
1108 if (Twr % tCK16x) { /* round up number of busclocks */
1113 else if (val > Max_TwrT)
1115 pDCTstat->Twr = val;
1118 pDCTstat->DIMMTwtr = Twtr;
1119 val = Twtr / tCK16x;
1120 if (Twtr % tCK16x) { /* round up number of busclocks */
1123 if (val < Min_TwtrT)
1125 else if (val > Max_TwtrT)
1127 pDCTstat->Twtr = val;
1131 pDCTstat->Trfc[i] = Trfc[i];
1134 pDCTstat->DIMMTfaw = Tfaw;
1135 val = Tfaw / tCK16x;
1136 if (Tfaw % tCK16x) { /* round up number of busclocks */
1139 if (val < Min_TfawT)
1141 else if (val > Max_TfawT)
1143 pDCTstat->Tfaw = val;
1145 mctAdjustAutoCycTmg_D();
1147 /* Program DRAM Timing values */
1148 DramTimingLo = 0; /* Dram Timing Low init */
1149 val = pDCTstat->CASL - 2; /* pDCTstat.CASL to reg. definition */
1150 DramTimingLo |= val;
1152 val = pDCTstat->Trcd - Bias_TrcdT;
1153 DramTimingLo |= val<<4;
1155 val = pDCTstat->Trp - Bias_TrpT;
1156 val = mct_AdjustSPDTimings(pMCTstat, pDCTstat, val);
1157 DramTimingLo |= val<<7;
1159 val = pDCTstat->Trtp - Bias_TrtpT;
1160 DramTimingLo |= val<<10;
1162 val = pDCTstat->Tras - Bias_TrasT;
1163 DramTimingLo |= val<<12;
1165 val = pDCTstat->Trc - Bias_TrcT;
1166 DramTimingLo |= val<<16;
1168 val = pDCTstat->Trrd - Bias_TrrdT;
1169 DramTimingLo |= val<<22;
1171 DramTimingHi = 0; /* Dram Timing High init */
1172 val = pDCTstat->Twtr - Bias_TwtrT;
1173 DramTimingHi |= val<<8;
1176 DramTimingHi |= val<<16;
1183 DramTimingHi |= val << 20;
1185 dev = pDCTstat->dev_dct;
1186 reg_off = 0x100 * dct;
1188 val = pDCTstat->Twr;
1193 val = mct_AdjustSPDTimings(pMCTstat, pDCTstat, val);
1196 dword = Get_NB32(dev, 0x84 + reg_off);
1199 Set_NB32(dev, 0x84 + reg_off, dword);
1202 val = pDCTstat->Tfaw;
1203 val = mct_AdjustSPDTimings(pMCTstat, pDCTstat, val);
1207 dword = Get_NB32(dev, 0x94 + reg_off);
1208 dword &= ~0xf0000000;
1210 Set_NB32(dev, 0x94 + reg_off, dword);
1212 /* dev = pDCTstat->dev_dct; */
1213 /* reg_off = 0x100 * dct; */
1215 if (pDCTstat->Speed > 4) {
1216 val = Get_NB32(dev, 0x88 + reg_off);
1218 DramTimingLo |= val;
1220 Set_NB32(dev, 0x88 + reg_off, DramTimingLo); /*DCT Timing Low*/
1222 if (pDCTstat->Speed > 4) {
1223 DramTimingLo |= 1 << DisAutoRefresh;
1225 DramTimingHi |= 0x000018FF;
1226 Set_NB32(dev, 0x8c + reg_off, DramTimingHi); /*DCT Timing Hi*/
1228 /* dump_pci_device(PCI_DEV(0, 0x18+pDCTstat->Node_ID, 2)); */
1231 static u8 AutoCycTiming_D(struct MCTStatStruc *pMCTstat,
1232 struct DCTStatStruc *pDCTstat, u8 dct)
1234 /* Initialize DCT Timing registers as per DIMM SPD.
1235 * For primary timing (T, CL) use best case T value.
1236 * For secondary timing params., use most aggressive settings
1239 * There are three components to determining "maximum frequency":
1240 * SPD component, Bus load component, and "Preset" max frequency
1243 * The SPD component is a function of the min cycle time specified
1244 * by each DIMM, and the interaction of cycle times from all DIMMs
1245 * in conjunction with CAS latency. The SPD component only applies
1246 * when user timing mode is 'Auto'.
1248 * The Bus load component is a limiting factor determined by electrical
1249 * characteristics on the bus as a result of varying number of device
1250 * loads. The Bus load component is specific to each platform but may
1251 * also be a function of other factors. The bus load component only
1252 * applies when user timing mode is 'Auto'.
1254 * The Preset component is subdivided into three items and is
1255 * the minimum of the set: Silicon revision, user limit
1256 * setting when user timing mode is 'Auto' and memclock mode
1257 * is 'Limit', OEM build specification of the maximum
1258 * frequency. The Preset component is only applies when user
1259 * timing mode is 'Auto'.
1262 /* Get primary timing (CAS Latency and Cycle Time) */
1263 if (pDCTstat->Speed == 0) {
1264 mctGet_MaxLoadFreq(pDCTstat);
1266 /* and Factor in presets (setup options, Si cap, etc.) */
1267 GetPresetmaxF_D(pMCTstat, pDCTstat);
1269 /* Go get best T and CL as specified by DIMM mfgs. and OEM */
1270 SPDGetTCL_D(pMCTstat, pDCTstat, dct);
1271 /* skip callback mctForce800to1067_D */
1272 pDCTstat->Speed = pDCTstat->DIMMAutoSpeed;
1273 pDCTstat->CASL = pDCTstat->DIMMCASL;
1276 mct_AfterGetCLT(pMCTstat, pDCTstat, dct);
1278 SPD2ndTiming(pMCTstat, pDCTstat, dct);
1280 printk(BIOS_DEBUG, "AutoCycTiming: Status %x\n", pDCTstat->Status);
1281 printk(BIOS_DEBUG, "AutoCycTiming: ErrStatus %x\n", pDCTstat->ErrStatus);
1282 printk(BIOS_DEBUG, "AutoCycTiming: ErrCode %x\n", pDCTstat->ErrCode);
1283 printk(BIOS_DEBUG, "AutoCycTiming: Done\n\n");
1285 mctHookAfterAutoCycTmg();
1287 return pDCTstat->ErrCode;
1290 static void GetPresetmaxF_D(struct MCTStatStruc *pMCTstat,
1291 struct DCTStatStruc *pDCTstat)
1293 /* Get max frequency from OEM platform definition, from any user
1294 * override (limiting) of max frequency, and from any Si Revision
1295 * Specific information. Return the least of these three in
1296 * DCTStatStruc.PresetmaxFreq.
1301 /* Get CPU Si Revision defined limit (NPT) */
1302 proposedFreq = 533; /* Rev F0 programmable max memclock is */
1304 /*Get User defined limit if "limit" mode */
1305 if ( mctGet_NVbits(NV_MCTUSRTMGMODE) == 1) {
1306 word = Get_Fk_D(mctGet_NVbits(NV_MemCkVal) + 1);
1307 if (word < proposedFreq)
1308 proposedFreq = word;
1310 /* Get Platform defined limit */
1311 word = mctGet_NVbits(NV_MAX_MEMCLK);
1312 if (word < proposedFreq)
1313 proposedFreq = word;
1315 word = pDCTstat->PresetmaxFreq;
1316 if (word > proposedFreq)
1317 word = proposedFreq;
1319 pDCTstat->PresetmaxFreq = word;
1321 /* Check F3xE8[DdrMaxRate] for maximum DRAM data rate support */
1324 static void SPDGetTCL_D(struct MCTStatStruc *pMCTstat,
1325 struct DCTStatStruc *pDCTstat, u8 dct)
1327 /* Find the best T and CL primary timing parameter pair, per Mfg.,
1328 * for the given set of DIMMs, and store into DCTStatStruc
1329 * (.DIMMAutoSpeed and .DIMMCASL). See "Global relationship between
1330 * index values and item values" for definition of CAS latency
1331 * index (j) and Frequency index (k).
1333 u8 i, CASLatLow, CASLatHigh;
1338 u8 CLactual, CLdesired, CLT_Fail;
1340 u8 smbaddr, byte, bytex;
1348 for (i = 0; i < MAX_DIMMS_SUPPORTED; i++) {
1349 if (pDCTstat->DIMMValid & (1 << i)) {
1350 smbaddr = Get_DIMMAddress_D(pDCTstat, (dct + i));
1351 /* Step 1: Determine the common set of supported CAS Latency
1352 * values for all modules on the memory channel using the CAS
1353 * Latencies Supported in SPD bytes 14 and 15.
1355 byte = mctRead_SPD(smbaddr, SPD_CASLow);
1357 byte = mctRead_SPD(smbaddr, SPD_CASHigh);
1359 /* Step 2: Determine tAAmin(all) which is the largest tAAmin
1360 value for all modules on the memory channel (SPD byte 16). */
1361 byte = mctRead_SPD(smbaddr, SPD_MTBDivisor);
1363 MTB16x = ((mctRead_SPD(smbaddr, SPD_MTBDividend) & 0xFF)<<4);
1364 MTB16x /= byte; /* transfer to MTB*16 */
1366 byte = mctRead_SPD(smbaddr, SPD_tAAmin);
1367 if (tAAmin16x < byte * MTB16x)
1368 tAAmin16x = byte * MTB16x;
1369 /* Step 3: Determine tCKmin(all) which is the largest tCKmin
1370 value for all modules on the memory channel (SPD byte 12). */
1371 byte = mctRead_SPD(smbaddr, SPD_tCKmin);
1373 if (tCKmin16x < byte * MTB16x)
1374 tCKmin16x = byte * MTB16x;
1377 /* calculate tCKproposed16x */
1378 tCKproposed16x = 16000 / pDCTstat->PresetmaxFreq;
1379 if (tCKmin16x > tCKproposed16x)
1380 tCKproposed16x = tCKmin16x;
1382 /* mctHookTwo1333DimmOverride(); */
1383 /* For UDIMM, if there are two DDR3-1333 on the same channel,
1384 downgrade DDR speed to 1066. */
1386 /* TODO: get user manual tCK16x(Freq.) and overwrite current tCKproposed16x if manual. */
1387 if (tCKproposed16x == 20)
1388 pDCTstat->TargetFreq = 7;
1389 else if (tCKproposed16x <= 24) {
1390 pDCTstat->TargetFreq = 6;
1391 tCKproposed16x = 24;
1393 else if (tCKproposed16x <= 30) {
1394 pDCTstat->TargetFreq = 5;
1395 tCKproposed16x = 30;
1398 pDCTstat->TargetFreq = 4;
1399 tCKproposed16x = 40;
1401 /* Running through this loop twice:
1402 - First time find tCL at target frequency
1403 - Second tim find tCL at 400MHz */
1407 /* Step 4: For a proposed tCK value (tCKproposed) between tCKmin(all) and tCKmax,
1408 determine the desired CAS Latency. If tCKproposed is not a standard JEDEC
1409 value (2.5, 1.875, 1.5, or 1.25 ns) then tCKproposed must be adjusted to the
1410 next lower standard tCK value for calculating CLdesired.
1411 CLdesired = ceiling ( tAAmin(all) / tCKproposed )
1412 where tAAmin is defined in Byte 16. The ceiling function requires that the
1413 quotient be rounded up always. */
1414 CLdesired = tAAmin16x / tCKproposed16x;
1415 if (tAAmin16x % tCKproposed16x)
1417 /* Step 5: Chose an actual CAS Latency (CLactual) that is greather than or equal
1418 to CLdesired and is supported by all modules on the memory channel as
1419 determined in step 1. If no such value exists, choose a higher tCKproposed
1420 value and repeat steps 4 and 5 until a solution is found. */
1421 for (i = 0, CLactual = 4; i < 15; i++, CLactual++) {
1422 if ((CASLatHigh << 8 | CASLatLow) & (1 << i)) {
1423 if (CLdesired <= CLactual)
1429 /* Step 6: Once the calculation of CLactual is completed, the BIOS must also
1430 verify that this CAS Latency value does not exceed tAAmax, which is 20 ns
1431 for all DDR3 speed grades, by multiplying CLactual times tCKproposed. If
1432 not, choose a lower CL value and repeat steps 5 and 6 until a solution is found. */
1433 if (CLactual * tCKproposed16x > 320)
1437 bytex = CLactual - 2;
1438 if (tCKproposed16x == 20)
1440 else if (tCKproposed16x == 24)
1442 else if (tCKproposed16x == 30)
1447 /* mctHookManualCLOverride */
1451 if (tCKproposed16x != 40) {
1452 if (pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)) {
1453 pDCTstat->DIMMAutoSpeed = byte;
1454 pDCTstat->DIMMCASL = bytex;
1457 pDCTstat->TargetCASL = bytex;
1458 tCKproposed16x = 40;
1461 pDCTstat->DIMMAutoSpeed = byte;
1462 pDCTstat->DIMMCASL = bytex;
1467 printk(BIOS_DEBUG, "SPDGetTCL_D: DIMMCASL %x\n", pDCTstat->DIMMCASL);
1468 printk(BIOS_DEBUG, "SPDGetTCL_D: DIMMAutoSpeed %x\n", pDCTstat->DIMMAutoSpeed);
1470 printk(BIOS_DEBUG, "SPDGetTCL_D: Status %x\n", pDCTstat->Status);
1471 printk(BIOS_DEBUG, "SPDGetTCL_D: ErrStatus %x\n", pDCTstat->ErrStatus);
1472 printk(BIOS_DEBUG, "SPDGetTCL_D: ErrCode %x\n", pDCTstat->ErrCode);
1473 printk(BIOS_DEBUG, "SPDGetTCL_D: Done\n\n");
1476 static u8 PlatformSpec_D(struct MCTStatStruc *pMCTstat,
1477 struct DCTStatStruc *pDCTstat, u8 dct)
1483 mctGet_PS_Cfg_D(pMCTstat, pDCTstat, dct);
1485 if (pDCTstat->GangedMode == 1) {
1486 mctGet_PS_Cfg_D(pMCTstat, pDCTstat, 1);
1489 if ( pDCTstat->_2Tmode == 2) {
1490 dev = pDCTstat->dev_dct;
1491 reg = 0x94 + 0x100 * dct; /* Dram Configuration Hi */
1492 val = Get_NB32(dev, reg);
1493 val |= 1 << 20; /* 2T CMD mode */
1494 Set_NB32(dev, reg, val);
1497 mct_PlatformSpec(pMCTstat, pDCTstat, dct);
1498 if (pDCTstat->DIMMAutoSpeed == 4)
1499 InitPhyCompensation(pMCTstat, pDCTstat, dct);
1500 mctHookAfterPSCfg();
1502 return pDCTstat->ErrCode;
1505 static u8 AutoConfig_D(struct MCTStatStruc *pMCTstat,
1506 struct DCTStatStruc *pDCTstat, u8 dct)
1508 u32 DramControl, DramTimingLo, Status;
1509 u32 DramConfigLo, DramConfigHi, DramConfigMisc, DramConfigMisc2;
1520 DramConfigMisc2 = 0;
1522 /* set bank addessing and Masks, plus CS pops */
1523 SPDSetBanks_D(pMCTstat, pDCTstat, dct);
1524 if (pDCTstat->ErrCode == SC_StopError)
1525 goto AutoConfig_exit;
1527 /* map chip-selects into local address space */
1528 StitchMemory_D(pMCTstat, pDCTstat, dct);
1529 InterleaveBanks_D(pMCTstat, pDCTstat, dct);
1531 /* temp image of status (for convenience). RO usage! */
1532 Status = pDCTstat->Status;
1534 dev = pDCTstat->dev_dct;
1535 reg_off = 0x100 * dct;
1538 /* Build Dram Control Register Value */
1539 DramConfigMisc2 = Get_NB32 (dev, 0xA8 + reg_off); /* Dram Control*/
1540 DramControl = Get_NB32 (dev, 0x78 + reg_off); /* Dram Control*/
1542 /* FIXME: Skip mct_checkForDxSupport */
1543 /* REV_CALL mct_DoRdPtrInit if not Dx */
1544 if (pDCTstat->LogicalCPUID & AMD_DR_Bx)
1548 DramControl &= ~0xFF;
1549 DramControl |= val; /* RdPrtInit = 6 for Cx CPU */
1551 if (mctGet_NVbits(NV_CLKHZAltVidC3))
1552 DramControl |= 1<<16; /* check */
1554 DramControl |= 0x00002A00;
1556 /* FIXME: Skip for Ax versions */
1557 /* callback not required - if (!mctParityControl_D()) */
1558 if (Status & (1 << SB_128bitmode))
1559 DramConfigLo |= 1 << Width128; /* 128-bit mode (normal) */
1564 if (pDCTstat->Dimmx4Present & (1 << word))
1565 DramConfigLo |= 1 << dword; /* X4Dimm[3:0] */
1571 if (!(Status & (1 << SB_Registered)))
1572 DramConfigLo |= 1 << UnBuffDimm; /* Unbufferd DIMMs */
1574 if (mctGet_NVbits(NV_ECC_CAP))
1575 if (Status & (1 << SB_ECCDIMMs))
1576 if ( mctGet_NVbits(NV_ECC))
1577 DramConfigLo |= 1 << DimmEcEn;
1579 DramConfigLo = mct_DisDllShutdownSR(pMCTstat, pDCTstat, DramConfigLo, dct);
1581 /* Build Dram Config Hi Register Value */
1582 dword = pDCTstat->Speed;
1583 DramConfigHi |= dword - 1; /* get MemClk encoding */
1584 DramConfigHi |= 1 << MemClkFreqVal;
1586 if (Status & (1 << SB_Registered))
1587 if ((pDCTstat->Dimmx4Present != 0) && (pDCTstat->Dimmx8Present != 0))
1588 /* set only if x8 Registered DIMMs in System*/
1589 DramConfigHi |= 1 << RDqsEn;
1591 if (mctGet_NVbits(NV_CKE_CTL))
1592 /*Chip Select control of CKE*/
1593 DramConfigHi |= 1 << 16;
1595 /* Control Bank Swizzle */
1596 if (0) /* call back not needed mctBankSwizzleControl_D()) */
1597 DramConfigHi &= ~(1 << BankSwizzleMode);
1599 DramConfigHi |= 1 << BankSwizzleMode; /* recommended setting (default) */
1601 /* Check for Quadrank DIMM presence */
1602 if ( pDCTstat->DimmQRPresent != 0) {
1603 byte = mctGet_NVbits(NV_4RANKType);
1605 DramConfigHi |= 1 << 17; /* S4 (4-Rank SO-DIMMs) */
1607 DramConfigHi |= 1 << 18; /* R4 (4-Rank Registered DIMMs) */
1610 if (0) /* call back not needed mctOverrideDcqBypMax_D ) */
1611 val = mctGet_NVbits(NV_BYPMAX);
1613 val = 0x0f; /* recommended setting (default) */
1614 DramConfigHi |= val << 24;
1616 if (pDCTstat->LogicalCPUID & (AMD_DR_Cx | AMD_DR_Bx))
1617 DramConfigHi |= 1 << DcqArbBypassEn;
1619 /* Build MemClkDis Value from Dram Timing Lo and
1620 Dram Config Misc Registers
1621 1. We will assume that MemClkDis field has been preset prior to this
1623 2. We will only set MemClkDis bits if a DIMM is NOT present AND if:
1624 NV_AllMemClks <>0 AND SB_DiagClks ==0 */
1626 /* Dram Timing Low (owns Clock Enable bits) */
1627 DramTimingLo = Get_NB32(dev, 0x88 + reg_off);
1628 if (mctGet_NVbits(NV_AllMemClks) == 0) {
1629 /* Special Jedec SPD diagnostic bit - "enable all clocks" */
1630 if (!(pDCTstat->Status & (1<<SB_DiagClks))) {
1633 p = Tab_ManualCLKDis;
1636 byte = mctGet_NVbits(NV_PACK_TYPE);
1639 else if (byte == PT_M2 || byte == PT_AS)
1646 while(dword < MAX_CS_SUPPORTED) {
1647 if (pDCTstat->CSPresent & (1<<dword)){
1648 /* re-enable clocks for the enabled CS */
1654 DramTimingLo |= byte << 24;
1658 printk(BIOS_DEBUG, "AutoConfig_D: DramControl: %x\n", DramControl);
1659 printk(BIOS_DEBUG, "AutoConfig_D: DramTimingLo: %x\n", DramTimingLo);
1660 printk(BIOS_DEBUG, "AutoConfig_D: DramConfigMisc: %x\n", DramConfigMisc);
1661 printk(BIOS_DEBUG, "AutoConfig_D: DramConfigMisc2: %x\n", DramConfigMisc2);
1662 printk(BIOS_DEBUG, "AutoConfig_D: DramConfigLo: %x\n", DramConfigLo);
1663 printk(BIOS_DEBUG, "AutoConfig_D: DramConfigHi: %x\n", DramConfigHi);
1665 /* Write Values to the registers */
1666 Set_NB32(dev, 0x78 + reg_off, DramControl);
1667 Set_NB32(dev, 0x88 + reg_off, DramTimingLo);
1668 Set_NB32(dev, 0xA0 + reg_off, DramConfigMisc);
1669 DramConfigMisc2 = mct_SetDramConfigMisc2(pDCTstat, dct, DramConfigMisc2);
1670 Set_NB32(dev, 0xA8 + reg_off, DramConfigMisc2);
1671 Set_NB32(dev, 0x90 + reg_off, DramConfigLo);
1672 ProgDramMRSReg_D(pMCTstat, pDCTstat, dct);
1673 dword = Get_NB32(dev, 0x94 + reg_off);
1674 DramConfigHi |= dword;
1675 mct_SetDramConfigHi_D(pDCTstat, dct, DramConfigHi);
1676 mct_EarlyArbEn_D(pMCTstat, pDCTstat);
1677 mctHookAfterAutoCfg();
1679 /* dump_pci_device(PCI_DEV(0, 0x18+pDCTstat->Node_ID, 2)); */
1681 printk(BIOS_DEBUG, "AutoConfig: Status %x\n", pDCTstat->Status);
1682 printk(BIOS_DEBUG, "AutoConfig: ErrStatus %x\n", pDCTstat->ErrStatus);
1683 printk(BIOS_DEBUG, "AutoConfig: ErrCode %x\n", pDCTstat->ErrCode);
1684 printk(BIOS_DEBUG, "AutoConfig: Done\n\n");
1686 return pDCTstat->ErrCode;
1689 static void SPDSetBanks_D(struct MCTStatStruc *pMCTstat,
1690 struct DCTStatStruc *pDCTstat, u8 dct)
1692 /* Set bank addressing, program Mask values and build a chip-select
1693 * population map. This routine programs PCI 0:24N:2x80 config register
1694 * and PCI 0:24N:2x60,64,68,6C config registers (CS Mask 0-3).
1696 u8 ChipSel, Rows, Cols, Ranks, Banks;
1697 u32 BankAddrReg, csMask;
1708 dev = pDCTstat->dev_dct;
1709 reg_off = 0x100 * dct;
1712 for (ChipSel = 0; ChipSel < MAX_CS_SUPPORTED; ChipSel+=2) {
1714 if ((pDCTstat->Status & (1 << SB_64MuxedMode)) && ChipSel >=4)
1717 if (pDCTstat->DIMMValid & (1<<byte)) {
1718 smbaddr = Get_DIMMAddress_D(pDCTstat, (ChipSel + dct));
1720 byte = mctRead_SPD(smbaddr, SPD_Addressing);
1721 Rows = (byte >> 3) & 0x7; /* Rows:0b=12-bit,... */
1722 Cols = byte & 0x7; /* Cols:0b=9-bit,... */
1724 byte = mctRead_SPD(smbaddr, SPD_Density);
1725 Banks = (byte >> 4) & 7; /* Banks:0b=3-bit,... */
1727 byte = mctRead_SPD(smbaddr, SPD_Organization);
1728 Ranks = ((byte >> 3) & 7) + 1;
1730 /* Configure Bank encoding
1731 * Use a 6-bit key into a lookup table.
1732 * Key (index) = RRRBCC, where CC is the number of Columns minus 9,
1733 * RRR is the number of Rows minus 12, and B is the number of banks
1740 byte |= Rows << 3; /* RRRBCC internal encode */
1742 for (dword=0; dword < 13; dword++) {
1743 if (byte == Tab_BankAddr[dword])
1750 /* bit no. of CS field in address mapping reg.*/
1751 dword <<= (ChipSel<<1);
1752 BankAddrReg |= dword;
1754 /* Mask value=(2pow(rows+cols+banks+3)-1)>>8,
1755 or 2pow(rows+cols+banks-5)-1*/
1758 byte = Rows + Cols; /* cl=rows+cols*/
1759 byte += 21; /* row:12+col:9 */
1760 byte -= 2; /* 3 banks - 5 */
1762 if (pDCTstat->Status & (1 << SB_128bitmode))
1763 byte++; /* double mask size if in 128-bit mode*/
1765 csMask |= 1 << byte;
1768 /*set ChipSelect population indicator even bits*/
1769 pDCTstat->CSPresent |= (1<<ChipSel);
1771 /*set ChipSelect population indicator odd bits*/
1772 pDCTstat->CSPresent |= 1 << (ChipSel + 1);
1774 reg = 0x60+(ChipSel<<1) + reg_off; /*Dram CS Mask Register */
1776 val &= 0x1FF83FE0; /* Mask out reserved bits.*/
1777 Set_NB32(dev, reg, val);
1779 if (pDCTstat->DIMMSPDCSE & (1<<ChipSel))
1780 pDCTstat->CSTestFail |= (1<<ChipSel);
1782 } /* while ChipSel*/
1784 SetCSTriState(pMCTstat, pDCTstat, dct);
1785 SetCKETriState(pMCTstat, pDCTstat, dct);
1786 SetODTTriState(pMCTstat, pDCTstat, dct);
1788 if (pDCTstat->Status & (1 << SB_128bitmode)) {
1789 SetCSTriState(pMCTstat, pDCTstat, 1); /* force dct1) */
1790 SetCKETriState(pMCTstat, pDCTstat, 1); /* force dct1) */
1791 SetODTTriState(pMCTstat, pDCTstat, 1); /* force dct1) */
1794 word = pDCTstat->CSPresent;
1795 mctGetCS_ExcludeMap(); /* mask out specified chip-selects */
1796 word ^= pDCTstat->CSPresent;
1797 pDCTstat->CSTestFail |= word; /* enable ODT to disabled DIMMs */
1798 if (!pDCTstat->CSPresent)
1799 pDCTstat->ErrCode = SC_StopError;
1801 reg = 0x80 + reg_off; /* Bank Addressing Register */
1802 Set_NB32(dev, reg, BankAddrReg);
1804 pDCTstat->CSPresent_DCT[dct] = pDCTstat->CSPresent;
1805 /* dump_pci_device(PCI_DEV(0, 0x18+pDCTstat->Node_ID, 2)); */
1807 printk(BIOS_DEBUG, "SPDSetBanks: CSPresent %x\n", pDCTstat->CSPresent_DCT[dct]);
1808 printk(BIOS_DEBUG, "SPDSetBanks: Status %x\n", pDCTstat->Status);
1809 printk(BIOS_DEBUG, "SPDSetBanks: ErrStatus %x\n", pDCTstat->ErrStatus);
1810 printk(BIOS_DEBUG, "SPDSetBanks: ErrCode %x\n", pDCTstat->ErrCode);
1811 printk(BIOS_DEBUG, "SPDSetBanks: Done\n\n");
1814 static void SPDCalcWidth_D(struct MCTStatStruc *pMCTstat,
1815 struct DCTStatStruc *pDCTstat)
1817 /* Per SPDs, check the symmetry of DIMM pairs (DIMM on Channel A
1818 * matching with DIMM on Channel B), the overall DIMM population,
1819 * and determine the width mode: 64-bit, 64-bit muxed, 128-bit.
1822 u8 smbaddr, smbaddr1;
1825 /* Check Symmetry of Channel A and Channel B DIMMs
1826 (must be matched for 128-bit mode).*/
1827 for (i=0; i < MAX_DIMMS_SUPPORTED; i += 2) {
1828 if ((pDCTstat->DIMMValid & (1 << i)) && (pDCTstat->DIMMValid & (1<<(i+1)))) {
1829 smbaddr = Get_DIMMAddress_D(pDCTstat, i);
1830 smbaddr1 = Get_DIMMAddress_D(pDCTstat, i+1);
1832 byte = mctRead_SPD(smbaddr, SPD_Addressing) & 0x7;
1833 byte1 = mctRead_SPD(smbaddr1, SPD_Addressing) & 0x7;
1834 if (byte != byte1) {
1835 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
1839 byte = mctRead_SPD(smbaddr, SPD_Density) & 0x0f;
1840 byte1 = mctRead_SPD(smbaddr1, SPD_Density) & 0x0f;
1841 if (byte != byte1) {
1842 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
1846 byte = mctRead_SPD(smbaddr, SPD_Organization) & 0x7;
1847 byte1 = mctRead_SPD(smbaddr1, SPD_Organization) & 0x7;
1848 if (byte != byte1) {
1849 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
1853 byte = (mctRead_SPD(smbaddr, SPD_Organization) >> 3) & 0x7;
1854 byte1 = (mctRead_SPD(smbaddr1, SPD_Organization) >> 3) & 0x7;
1855 if (byte != byte1) {
1856 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
1860 byte = mctRead_SPD(smbaddr, SPD_DMBANKS) & 7; /* #ranks-1 */
1861 byte1 = mctRead_SPD(smbaddr1, SPD_DMBANKS) & 7; /* #ranks-1 */
1862 if (byte != byte1) {
1863 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
1872 static void StitchMemory_D(struct MCTStatStruc *pMCTstat,
1873 struct DCTStatStruc *pDCTstat, u8 dct)
1875 /* Requires that Mask values for each bank be programmed first and that
1876 * the chip-select population indicator is correctly set.
1879 u32 nxtcsBase, curcsBase;
1881 u32 Sizeq, BiggestBank;
1890 dev = pDCTstat->dev_dct;
1891 reg_off = 0x100 * dct;
1895 /* CS Sparing 1=enabled, 0=disabled */
1896 if (mctGet_NVbits(NV_CS_SpareCTL) & 1) {
1897 if (MCT_DIMM_SPARE_NO_WARM) {
1898 /* Do no warm-reset DIMM spare */
1899 if (pMCTstat->GStatus & 1 << GSB_EnDIMMSpareNW) {
1900 word = pDCTstat->CSPresent;
1904 /* Make sure at least two chip-selects are available */
1907 pDCTstat->ErrStatus |= 1 << SB_SpareDis;
1910 if (!mctGet_NVbits(NV_DQSTrainCTL)) { /*DQS Training 1=enabled, 0=disabled */
1911 word = pDCTstat->CSPresent;
1913 word &= ~(1 << val);
1915 /* Make sure at least two chip-selects are available */
1918 pDCTstat->ErrStatus |= 1 << SB_SpareDis;
1923 nxtcsBase = 0; /* Next available cs base ADDR[39:8] */
1924 for (p=0; p < MAX_DIMMS_SUPPORTED; p++) {
1926 for (q = 0; q < MAX_CS_SUPPORTED; q++) { /* from DIMMS to CS */
1927 if (pDCTstat->CSPresent & (1 << q)) { /* bank present? */
1928 reg = 0x40 + (q << 2) + reg_off; /* Base[q] reg.*/
1929 val = Get_NB32(dev, reg);
1930 if (!(val & 3)) { /* (CSEnable|Spare==1)bank is enabled already? */
1931 reg = 0x60 + (q << 1) + reg_off; /*Mask[q] reg.*/
1932 val = Get_NB32(dev, reg);
1936 Sizeq = val; /* never used */
1937 if (val > BiggestBank) {
1938 /*Bingo! possibly Map this chip-select next! */
1943 } /*if bank present */
1945 if (BiggestBank !=0) {
1946 curcsBase = nxtcsBase; /* curcsBase=nxtcsBase*/
1947 /* DRAM CS Base b Address Register offset */
1948 reg = 0x40 + (b << 2) + reg_off;
1951 val = 1 << Spare; /* Spare Enable*/
1954 val |= 1 << CSEnable; /* Bank Enable */
1956 if (((reg - 0x40) >> 2) & 1) {
1957 if (!(pDCTstat->Status & (1 << SB_Registered))) {
1959 dimValid = pDCTstat->DIMMValid;
1962 if ((dimValid & pDCTstat->MirrPresU_NumRegR) != 0) {
1963 val |= 1 << onDimmMirror;
1967 Set_NB32(dev, reg, val);
1971 /* let nxtcsBase+=Size[b] */
1972 nxtcsBase += BiggestBank;
1975 /* bank present but disabled?*/
1976 if ( pDCTstat->CSTestFail & (1 << p)) {
1977 /* DRAM CS Base b Address Register offset */
1978 reg = (p << 2) + 0x40 + reg_off;
1979 val = 1 << TestFail;
1980 Set_NB32(dev, reg, val);
1985 pDCTstat->DCTSysLimit = nxtcsBase - 1;
1986 mct_AfterStitchMemory(pMCTstat, pDCTstat, dct);
1989 /* dump_pci_device(PCI_DEV(0, 0x18+pDCTstat->Node_ID, 2)); */
1991 printk(BIOS_DEBUG, "StitchMemory: Status %x\n", pDCTstat->Status);
1992 printk(BIOS_DEBUG, "StitchMemory: ErrStatus %x\n", pDCTstat->ErrStatus);
1993 printk(BIOS_DEBUG, "StitchMemory: ErrCode %x\n", pDCTstat->ErrCode);
1994 printk(BIOS_DEBUG, "StitchMemory: Done\n\n");
1997 static u16 Get_Fk_D(u8 k)
1999 return Table_F_k[k]; /* FIXME: k or k<<1 ? */
2002 static u8 DIMMPresence_D(struct MCTStatStruc *pMCTstat,
2003 struct DCTStatStruc *pDCTstat)
2005 /* Check DIMMs present, verify checksum, flag SDRAM type,
2006 * build population indicator bitmaps, and preload bus loading
2007 * of DIMMs into DCTStatStruc.
2008 * MAAload=number of devices on the "A" bus.
2009 * MABload=number of devices on the "B" bus.
2010 * MAAdimms=number of DIMMs on the "A" bus slots.
2011 * MABdimms=number of DIMMs on the "B" bus slots.
2012 * DATAAload=number of ranks on the "A" bus slots.
2013 * DATABload=number of ranks on the "B" bus slots.
2018 u16 RegDIMMPresent, MaxDimms;
2023 /* preload data structure with addrs */
2024 mctGet_DIMMAddr(pDCTstat, pDCTstat->Node_ID);
2026 DimmSlots = MaxDimms = mctGet_NVbits(NV_MAX_DIMMS);
2028 SPDCtrl = mctGet_NVbits(NV_SPDCHK_RESTRT);
2031 pDCTstat->DimmQRPresent = 0;
2033 for (i = 0; i < MAX_DIMMS_SUPPORTED; i++) {
2037 if ((pDCTstat->DimmQRPresent & (1 << i)) || (i < DimmSlots)) {
2039 smbaddr = Get_DIMMAddress_D(pDCTstat, i);
2040 status = mctRead_SPD(smbaddr, SPD_ByteUse);
2041 if (status >= 0) { /* SPD access is ok */
2042 pDCTstat->DIMMPresent |= 1 << i;
2043 if (crcCheck(smbaddr)) { /* CRC is OK */
2044 byte = mctRead_SPD(smbaddr, SPD_TYPE);
2045 if (byte == JED_DDR3SDRAM) {
2046 /*Dimm is 'Present'*/
2047 pDCTstat->DIMMValid |= 1 << i;
2050 pDCTstat->DIMMSPDCSE = 1 << i;
2052 pDCTstat->ErrStatus |= 1 << SB_DIMMChkSum;
2053 pDCTstat->ErrCode = SC_StopError;
2055 /*if NV_SPDCHK_RESTRT is set to 1, ignore faulty SPD checksum*/
2056 pDCTstat->ErrStatus |= 1<<SB_DIMMChkSum;
2057 byte = mctRead_SPD(smbaddr, SPD_TYPE);
2058 if (byte == JED_DDR3SDRAM)
2059 pDCTstat->DIMMValid |= 1 << i;
2062 /* Check module type */
2063 byte = mctRead_SPD(smbaddr, SPD_DIMMTYPE) & 0x7;
2064 if (byte == JED_RDIMM || byte == JED_MiniRDIMM)
2065 RegDIMMPresent |= 1 << i;
2066 /* Check ECC capable */
2067 byte = mctRead_SPD(smbaddr, SPD_BusWidth);
2068 if (byte & JED_ECC) {
2069 /* DIMM is ECC capable */
2070 pDCTstat->DimmECCPresent |= 1 << i;
2072 /* Check if x4 device */
2073 devwidth = mctRead_SPD(smbaddr, SPD_Organization) & 0x7; /* 0:x4,1:x8,2:x16 */
2074 if (devwidth == 0) {
2075 /* DIMM is made with x4 or x16 drams */
2076 pDCTstat->Dimmx4Present |= 1 << i;
2077 } else if (devwidth == 1) {
2078 pDCTstat->Dimmx8Present |= 1 << i;
2079 } else if (devwidth == 2) {
2080 pDCTstat->Dimmx16Present |= 1 << i;
2083 byte = (mctRead_SPD(smbaddr, SPD_Organization) >> 3);
2085 if (byte == 3) { /* 4ranks */
2086 /* if any DIMMs are QR, we have to make two passes through DIMMs*/
2087 if ( pDCTstat->DimmQRPresent == 0) {
2090 if (i < DimmSlots) {
2091 pDCTstat->DimmQRPresent |= (1 << i) | (1 << (i+4));
2093 pDCTstat->MAdimms[i & 1] --;
2095 byte = 1; /* upper two ranks of QR DIMM will be counted on another DIMM number iteration*/
2096 } else if (byte == 1) { /* 2ranks */
2097 pDCTstat->DimmDRPresent |= 1 << i;
2102 else if (devwidth == 1)
2104 else if (devwidth == 2)
2107 byte++; /* al+1=rank# */
2109 bytex <<= 1; /*double Addr bus load value for dual rank DIMMs*/
2112 pDCTstat->DATAload[j] += byte; /*number of ranks on DATA bus*/
2113 pDCTstat->MAload[j] += bytex; /*number of devices on CMD/ADDR bus*/
2114 pDCTstat->MAdimms[j]++; /*number of DIMMs on A bus */
2116 /* check address mirror support for unbuffered dimm */
2117 /* check number of registers on a dimm for registered dimm */
2118 byte = mctRead_SPD(smbaddr, SPD_AddressMirror);
2119 if (RegDIMMPresent & (1 << i)) {
2121 pDCTstat->MirrPresU_NumRegR |= 1 << i;
2123 if ((byte & 1) == 1)
2124 pDCTstat->MirrPresU_NumRegR |= 1 << i;
2126 /* Get byte62: Reference Raw Card information. We dont need it now. */
2127 /* byte = mctRead_SPD(smbaddr, SPD_RefRawCard); */
2128 /* Get Byte65/66 for register manufacture ID code */
2129 if ((0x97 == mctRead_SPD(smbaddr, SPD_RegManufactureID_H)) &&
2130 (0x80 == mctRead_SPD(smbaddr, SPD_RegManufactureID_L))) {
2131 if (0x16 == mctRead_SPD(smbaddr, SPD_RegManRevID))
2132 pDCTstat->RegMan2Present |= 1 << i;
2134 pDCTstat->RegMan1Present |= 1 << i;
2136 /* Get Control word values for RC3. We dont need it. */
2137 byte = mctRead_SPD(smbaddr, 70);
2138 pDCTstat->CtrlWrd3 |= (byte >> 4) << (i << 2); /* C3 = SPD byte 70 [7:4] */
2139 /* Get Control word values for RC4, and RC5 */
2140 byte = mctRead_SPD(smbaddr, 71);
2141 pDCTstat->CtrlWrd4 |= (byte & 0xFF) << (i << 2); /* RC4 = SPD byte 71 [3:0] */
2142 pDCTstat->CtrlWrd5 |= (byte >> 4) << (i << 2); /* RC5 = SPD byte 71 [7:4] */
2146 printk(BIOS_DEBUG, "\t DIMMPresence: DIMMValid=%x\n", pDCTstat->DIMMValid);
2147 printk(BIOS_DEBUG, "\t DIMMPresence: DIMMPresent=%x\n", pDCTstat->DIMMPresent);
2148 printk(BIOS_DEBUG, "\t DIMMPresence: RegDIMMPresent=%x\n", RegDIMMPresent);
2149 printk(BIOS_DEBUG, "\t DIMMPresence: DimmECCPresent=%x\n", pDCTstat->DimmECCPresent);
2150 printk(BIOS_DEBUG, "\t DIMMPresence: DimmPARPresent=%x\n", pDCTstat->DimmPARPresent);
2151 printk(BIOS_DEBUG, "\t DIMMPresence: Dimmx4Present=%x\n", pDCTstat->Dimmx4Present);
2152 printk(BIOS_DEBUG, "\t DIMMPresence: Dimmx8Present=%x\n", pDCTstat->Dimmx8Present);
2153 printk(BIOS_DEBUG, "\t DIMMPresence: Dimmx16Present=%x\n", pDCTstat->Dimmx16Present);
2154 printk(BIOS_DEBUG, "\t DIMMPresence: DimmPlPresent=%x\n", pDCTstat->DimmPlPresent);
2155 printk(BIOS_DEBUG, "\t DIMMPresence: DimmDRPresent=%x\n", pDCTstat->DimmDRPresent);
2156 printk(BIOS_DEBUG, "\t DIMMPresence: DimmQRPresent=%x\n", pDCTstat->DimmQRPresent);
2157 printk(BIOS_DEBUG, "\t DIMMPresence: DATAload[0]=%x\n", pDCTstat->DATAload[0]);
2158 printk(BIOS_DEBUG, "\t DIMMPresence: MAload[0]=%x\n", pDCTstat->MAload[0]);
2159 printk(BIOS_DEBUG, "\t DIMMPresence: MAdimms[0]=%x\n", pDCTstat->MAdimms[0]);
2160 printk(BIOS_DEBUG, "\t DIMMPresence: DATAload[1]=%x\n", pDCTstat->DATAload[1]);
2161 printk(BIOS_DEBUG, "\t DIMMPresence: MAload[1]=%x\n", pDCTstat->MAload[1]);
2162 printk(BIOS_DEBUG, "\t DIMMPresence: MAdimms[1]=%x\n", pDCTstat->MAdimms[1]);
2164 if (pDCTstat->DIMMValid != 0) { /* If any DIMMs are present...*/
2165 if (RegDIMMPresent != 0) {
2166 if ((RegDIMMPresent ^ pDCTstat->DIMMValid) !=0) {
2167 /* module type DIMM mismatch (reg'ed, unbuffered) */
2168 pDCTstat->ErrStatus |= 1<<SB_DimmMismatchM;
2169 pDCTstat->ErrCode = SC_StopError;
2171 /* all DIMMs are registered */
2172 pDCTstat->Status |= 1<<SB_Registered;
2175 if (pDCTstat->DimmECCPresent != 0) {
2176 if ((pDCTstat->DimmECCPresent ^ pDCTstat->DIMMValid )== 0) {
2177 /* all DIMMs are ECC capable */
2178 pDCTstat->Status |= 1<<SB_ECCDIMMs;
2181 if (pDCTstat->DimmPARPresent != 0) {
2182 if ((pDCTstat->DimmPARPresent ^ pDCTstat->DIMMValid) == 0) {
2183 /*all DIMMs are Parity capable */
2184 pDCTstat->Status |= 1<<SB_PARDIMMs;
2188 /* no DIMMs present or no DIMMs that qualified. */
2189 pDCTstat->ErrStatus |= 1<<SB_NoDimms;
2190 pDCTstat->ErrCode = SC_StopError;
2193 printk(BIOS_DEBUG, "\t DIMMPresence: Status %x\n", pDCTstat->Status);
2194 printk(BIOS_DEBUG, "\t DIMMPresence: ErrStatus %x\n", pDCTstat->ErrStatus);
2195 printk(BIOS_DEBUG, "\t DIMMPresence: ErrCode %x\n", pDCTstat->ErrCode);
2196 printk(BIOS_DEBUG, "\t DIMMPresence: Done\n\n");
2198 mctHookAfterDIMMpre();
2200 return pDCTstat->ErrCode;
2203 static u8 Get_DIMMAddress_D(struct DCTStatStruc *pDCTstat, u8 i)
2207 p = pDCTstat->DIMMAddr;
2208 /* mct_BeforeGetDIMMAddress(); */
2212 static void mct_initDCT(struct MCTStatStruc *pMCTstat,
2213 struct DCTStatStruc *pDCTstat)
2218 /* Config. DCT0 for Ganged or unganged mode */
2219 DCTInit_D(pMCTstat, pDCTstat, 0);
2220 if (pDCTstat->ErrCode == SC_FatalErr) {
2221 /* Do nothing goto exitDCTInit; any fatal errors? */
2223 /* Configure DCT1 if unganged and enabled*/
2224 if (!pDCTstat->GangedMode) {
2225 if (pDCTstat->DIMMValidDCT[1] > 0) {
2226 err_code = pDCTstat->ErrCode; /* save DCT0 errors */
2227 pDCTstat->ErrCode = 0;
2228 DCTInit_D(pMCTstat, pDCTstat, 1);
2229 if (pDCTstat->ErrCode == 2) /* DCT1 is not Running */
2230 pDCTstat->ErrCode = err_code; /* Using DCT0 Error code to update pDCTstat.ErrCode */
2232 val = 1 << DisDramInterface;
2233 Set_NB32(pDCTstat->dev_dct, 0x100 + 0x94, val);
2240 static void mct_DramInit(struct MCTStatStruc *pMCTstat,
2241 struct DCTStatStruc *pDCTstat, u8 dct)
2243 mct_BeforeDramInit_Prod_D(pMCTstat, pDCTstat);
2244 mct_DramInit_Sw_D(pMCTstat, pDCTstat, dct);
2245 /* mct_DramInit_Hw_D(pMCTstat, pDCTstat, dct); */
2248 static u8 mct_setMode(struct MCTStatStruc *pMCTstat,
2249 struct DCTStatStruc *pDCTstat)
2256 byte = bytex = pDCTstat->DIMMValid;
2257 bytex &= 0x55; /* CHA DIMM pop */
2258 pDCTstat->DIMMValidDCT[0] = bytex;
2260 byte &= 0xAA; /* CHB DIMM popa */
2262 pDCTstat->DIMMValidDCT[1] = byte;
2264 if (byte != bytex) {
2265 pDCTstat->ErrStatus &= ~(1 << SB_DimmMismatchO);
2267 byte = mctGet_NVbits(NV_Unganged);
2269 pDCTstat->ErrStatus |= (1 << SB_DimmMismatchO); /* Set temp. to avoid setting of ganged mode */
2271 if (!(pDCTstat->ErrStatus & (1 << SB_DimmMismatchO))) {
2272 pDCTstat->GangedMode = 1;
2273 /* valid 128-bit mode population. */
2274 pDCTstat->Status |= 1 << SB_128bitmode;
2276 val = Get_NB32(pDCTstat->dev_dct, reg);
2277 val |= 1 << DctGangEn;
2278 Set_NB32(pDCTstat->dev_dct, reg, val);
2280 if (byte) /* NV_Unganged */
2281 pDCTstat->ErrStatus &= ~(1 << SB_DimmMismatchO); /* Clear so that there is no DIMM missmatch error */
2283 return pDCTstat->ErrCode;
2286 u32 Get_NB32(u32 dev, u32 reg)
2288 return pci_read_config32(dev, reg);
2291 void Set_NB32(u32 dev, u32 reg, u32 val)
2293 pci_write_config32(dev, reg, val);
2297 u32 Get_NB32_index(u32 dev, u32 index_reg, u32 index)
2301 Set_NB32(dev, index_reg, index);
2302 dword = Get_NB32(dev, index_reg+0x4);
2307 void Set_NB32_index(u32 dev, u32 index_reg, u32 index, u32 data)
2309 Set_NB32(dev, index_reg, index);
2310 Set_NB32(dev, index_reg + 0x4, data);
2313 u32 Get_NB32_index_wait(u32 dev, u32 index_reg, u32 index)
2319 index &= ~(1 << DctAccessWrite);
2320 Set_NB32(dev, index_reg, index);
2322 dword = Get_NB32(dev, index_reg);
2323 } while (!(dword & (1 << DctAccessDone)));
2324 dword = Get_NB32(dev, index_reg + 0x4);
2329 void Set_NB32_index_wait(u32 dev, u32 index_reg, u32 index, u32 data)
2334 Set_NB32(dev, index_reg + 0x4, data);
2335 index |= (1 << DctAccessWrite);
2336 Set_NB32(dev, index_reg, index);
2338 dword = Get_NB32(dev, index_reg);
2339 } while (!(dword & (1 << DctAccessDone)));
2343 static u8 mct_PlatformSpec(struct MCTStatStruc *pMCTstat,
2344 struct DCTStatStruc *pDCTstat, u8 dct)
2346 /* Get platform specific config/timing values from the interface layer
2347 * and program them into DCT.
2350 u32 dev = pDCTstat->dev_dct;
2352 u8 i, i_start, i_end;
2354 if (pDCTstat->GangedMode) {
2355 SyncSetting(pDCTstat);
2356 /* mct_SetupSync_D */
2363 for (i=i_start; i<i_end; i++) {
2364 index_reg = 0x98 + (i * 0x100);
2365 Set_NB32_index_wait(dev, index_reg, 0x00, pDCTstat->CH_ODC_CTL[i]); /* Channel A Output Driver Compensation Control */
2366 Set_NB32_index_wait(dev, index_reg, 0x04, pDCTstat->CH_ADDR_TMG[i]); /* Channel A Output Driver Compensation Control */
2369 return pDCTstat->ErrCode;
2373 static void mct_SyncDCTsReady(struct DCTStatStruc *pDCTstat)
2378 if (pDCTstat->NodePresent) {
2379 dev = pDCTstat->dev_dct;
2381 if ((pDCTstat->DIMMValidDCT[0] ) || (pDCTstat->DIMMValidDCT[1])) { /* This Node has dram */
2383 val = Get_NB32(dev, 0x110);
2384 } while (!(val & (1 << DramEnabled)));
2386 } /* Node is present */
2389 static void mct_AfterGetCLT(struct MCTStatStruc *pMCTstat,
2390 struct DCTStatStruc *pDCTstat, u8 dct)
2392 if (!pDCTstat->GangedMode) {
2394 pDCTstat->DIMMValid = pDCTstat->DIMMValidDCT[dct];
2395 if (pDCTstat->DIMMValidDCT[dct] == 0)
2396 pDCTstat->ErrCode = SC_StopError;
2398 pDCTstat->CSPresent = 0;
2399 pDCTstat->CSTestFail = 0;
2400 pDCTstat->DIMMValid = pDCTstat->DIMMValidDCT[dct];
2401 if (pDCTstat->DIMMValidDCT[dct] == 0)
2402 pDCTstat->ErrCode = SC_StopError;
2407 static u8 mct_SPDCalcWidth(struct MCTStatStruc *pMCTstat,
2408 struct DCTStatStruc *pDCTstat, u8 dct)
2414 SPDCalcWidth_D(pMCTstat, pDCTstat);
2415 ret = mct_setMode(pMCTstat, pDCTstat);
2417 ret = pDCTstat->ErrCode;
2420 if (pDCTstat->DIMMValidDCT[0] == 0) {
2421 val = Get_NB32(pDCTstat->dev_dct, 0x94);
2422 val |= 1 << DisDramInterface;
2423 Set_NB32(pDCTstat->dev_dct, 0x94, val);
2425 if (pDCTstat->DIMMValidDCT[1] == 0) {
2426 val = Get_NB32(pDCTstat->dev_dct, 0x94 + 0x100);
2427 val |= 1 << DisDramInterface;
2428 Set_NB32(pDCTstat->dev_dct, 0x94 + 0x100, val);
2431 printk(BIOS_DEBUG, "SPDCalcWidth: Status %x\n", pDCTstat->Status);
2432 printk(BIOS_DEBUG, "SPDCalcWidth: ErrStatus %x\n", pDCTstat->ErrStatus);
2433 printk(BIOS_DEBUG, "SPDCalcWidth: ErrCode %x\n", pDCTstat->ErrCode);
2434 printk(BIOS_DEBUG, "SPDCalcWidth: Done\n");
2435 /* Disable dram interface before DRAM init */
2440 static void mct_AfterStitchMemory(struct MCTStatStruc *pMCTstat,
2441 struct DCTStatStruc *pDCTstat, u8 dct)
2450 _MemHoleRemap = mctGet_NVbits(NV_MemHole);
2451 DramHoleBase = mctGet_NVbits(NV_BottomIO);
2453 /* Increase hole size so;[31:24]to[31:16]
2454 * it has granularity of 128MB shl eax,8
2455 * Set 'effective' bottom IOmov DramHoleBase,eax
2457 pMCTstat->HoleBase = (DramHoleBase & 0xFFFFF800) << 8;
2459 /* In unganged mode, we must add DCT0 and DCT1 to DCTSysLimit */
2460 if (!pDCTstat->GangedMode) {
2461 dev = pDCTstat->dev_dct;
2462 pDCTstat->NodeSysLimit += pDCTstat->DCTSysLimit;
2463 /* if DCT0 and DCT1 both exist, set DctSelBaseAddr[47:27] to the top of DCT0 */
2465 if (pDCTstat->DIMMValidDCT[1] > 0) {
2466 dword = pDCTstat->DCTSysLimit + 1;
2467 dword += pDCTstat->NodeSysBase;
2468 dword >>= 8; /* scale [39:8] to [47:27],and to F2x110[31:11] */
2469 if ((dword >= DramHoleBase) && _MemHoleRemap) {
2470 pMCTstat->HoleBase = (DramHoleBase & 0xFFFFF800) << 8;
2471 val = pMCTstat->HoleBase;
2473 val = (((~val) & 0xFF) + 1);
2478 val = Get_NB32(dev, reg);
2481 val |= 3; /* Set F2x110[DctSelHiRngEn], F2x110[DctSelHi] */
2482 Set_NB32(dev, reg, val);
2486 Set_NB32(dev, reg, val);
2489 /* Program the DctSelBaseAddr value to 0
2490 if DCT 0 is disabled */
2491 if (pDCTstat->DIMMValidDCT[0] == 0) {
2492 dword = pDCTstat->NodeSysBase;
2494 if ((dword >= DramHoleBase) && _MemHoleRemap) {
2495 pMCTstat->HoleBase = (DramHoleBase & 0xFFFFF800) << 8;
2496 val = pMCTstat->HoleBase;
2499 val |= (((~val) & 0xFFFF) + 1);
2504 Set_NB32(dev, reg, val);
2507 val |= 3; /* Set F2x110[DctSelHiRngEn], F2x110[DctSelHi] */
2508 Set_NB32(dev, reg, val);
2512 pDCTstat->NodeSysLimit += pDCTstat->DCTSysLimit;
2514 printk(BIOS_DEBUG, "AfterStitch pDCTstat->NodeSysBase = %x\n", pDCTstat->NodeSysBase);
2515 printk(BIOS_DEBUG, "mct_AfterStitchMemory: pDCTstat->NodeSysLimit = %x\n", pDCTstat->NodeSysLimit);
2518 static u8 mct_DIMMPresence(struct MCTStatStruc *pMCTstat,
2519 struct DCTStatStruc *pDCTstat, u8 dct)
2524 ret = DIMMPresence_D(pMCTstat, pDCTstat);
2526 ret = pDCTstat->ErrCode;
2531 /* mct_BeforeGetDIMMAddress inline in C */
2533 static void mct_OtherTiming(struct MCTStatStruc *pMCTstat,
2534 struct DCTStatStruc *pDCTstatA)
2538 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
2539 struct DCTStatStruc *pDCTstat;
2540 pDCTstat = pDCTstatA + Node;
2541 if (pDCTstat->NodePresent) {
2542 if (pDCTstat->DIMMValidDCT[0]) {
2543 pDCTstat->DIMMValid = pDCTstat->DIMMValidDCT[0];
2544 Set_OtherTiming(pMCTstat, pDCTstat, 0);
2546 if (pDCTstat->DIMMValidDCT[1] && !pDCTstat->GangedMode ) {
2547 pDCTstat->DIMMValid = pDCTstat->DIMMValidDCT[1];
2548 Set_OtherTiming(pMCTstat, pDCTstat, 1);
2550 } /* Node is present*/
2554 static void Set_OtherTiming(struct MCTStatStruc *pMCTstat,
2555 struct DCTStatStruc *pDCTstat, u8 dct)
2558 u32 reg_off = 0x100 * dct;
2561 u32 dev = pDCTstat->dev_dct;
2563 Get_DqsRcvEnGross_Diff(pDCTstat, dev, 0x98 + reg_off);
2564 Get_WrDatGross_Diff(pDCTstat, dct, dev, 0x98 + reg_off);
2565 Get_Trdrd(pMCTstat, pDCTstat, dct);
2566 Get_Twrwr(pMCTstat, pDCTstat, dct);
2567 Get_Twrrd(pMCTstat, pDCTstat, dct);
2568 Get_TrwtTO(pMCTstat, pDCTstat, dct);
2569 Get_TrwtWB(pMCTstat, pDCTstat);
2571 reg = 0x8C + reg_off; /* Dram Timing Hi */
2572 val = Get_NB32(dev, reg);
2574 dword = pDCTstat->TrwtTO;
2576 dword = pDCTstat->Twrrd & 3;
2578 dword = pDCTstat->Twrwr & 3;
2580 dword = pDCTstat->Trdrd & 3;
2582 dword = pDCTstat->TrwtWB;
2584 Set_NB32(dev, reg, val);
2586 reg = 0x78 + reg_off;
2587 val = Get_NB32(dev, reg);
2589 dword = pDCTstat->Twrrd >> 2;
2591 dword = pDCTstat->Twrwr >> 2;
2593 dword = pDCTstat->Trdrd >> 2;
2595 Set_NB32(dev, reg, val);
2598 static void Get_Trdrd(struct MCTStatStruc *pMCTstat,
2599 struct DCTStatStruc *pDCTstat, u8 dct)
2603 Trdrd = ((int8_t)(pDCTstat->DqsRcvEnGrossMax - pDCTstat->DqsRcvEnGrossMin) >> 1) + 1;
2606 pDCTstat->Trdrd = Trdrd;
2609 static void Get_Twrwr(struct MCTStatStruc *pMCTstat,
2610 struct DCTStatStruc *pDCTstat, u8 dct)
2614 Twrwr = ((int8_t)(pDCTstat->WrDatGrossMax - pDCTstat->WrDatGrossMin) >> 1) + 2;
2621 pDCTstat->Twrwr = Twrwr;
2624 static void Get_Twrrd(struct MCTStatStruc *pMCTstat,
2625 struct DCTStatStruc *pDCTstat, u8 dct)
2630 LDplus1 = Get_Latency_Diff(pMCTstat, pDCTstat, dct);
2632 Twrrd = ((int8_t)(pDCTstat->WrDatGrossMax - pDCTstat->DqsRcvEnGrossMin) >> 1) + 4 - LDplus1;
2636 else if (Twrrd > 10)
2638 pDCTstat->Twrrd = Twrrd;
2641 static void Get_TrwtTO(struct MCTStatStruc *pMCTstat,
2642 struct DCTStatStruc *pDCTstat, u8 dct)
2647 LDplus1 = Get_Latency_Diff(pMCTstat, pDCTstat, dct);
2649 TrwtTO = ((int8_t)(pDCTstat->DqsRcvEnGrossMax - pDCTstat->WrDatGrossMin) >> 1) + LDplus1;
2651 pDCTstat->TrwtTO = TrwtTO;
2654 static void Get_TrwtWB(struct MCTStatStruc *pMCTstat,
2655 struct DCTStatStruc *pDCTstat)
2657 /* TrwtWB ensures read-to-write data-bus turnaround.
2658 This value should be one more than the programmed TrwtTO.*/
2659 pDCTstat->TrwtWB = pDCTstat->TrwtTO;
2662 static u8 Get_Latency_Diff(struct MCTStatStruc *pMCTstat,
2663 struct DCTStatStruc *pDCTstat, u8 dct)
2665 u32 reg_off = 0x100 * dct;
2666 u32 dev = pDCTstat->dev_dct;
2669 val1 = Get_NB32(dev, reg_off + 0x88) & 0xF;
2670 val2 = (Get_NB32(dev, reg_off + 0x84) >> 20) & 7;
2675 static void Get_DqsRcvEnGross_Diff(struct DCTStatStruc *pDCTstat,
2676 u32 dev, u32 index_reg)
2678 u8 Smallest, Largest;
2682 /* The largest DqsRcvEnGrossDelay of any DIMM minus the
2683 DqsRcvEnGrossDelay of any other DIMM is equal to the Critical
2684 Gross Delay Difference (CGDD) */
2685 /* DqsRcvEn byte 1,0 */
2686 val = Get_DqsRcvEnGross_MaxMin(pDCTstat, dev, index_reg, 0x10);
2687 Largest = val & 0xFF;
2688 Smallest = (val >> 8) & 0xFF;
2690 /* DqsRcvEn byte 3,2 */
2691 val = Get_DqsRcvEnGross_MaxMin(pDCTstat, dev, index_reg, 0x11);
2693 bytex = (val >> 8) & 0xFF;
2694 if (bytex < Smallest)
2699 /* DqsRcvEn byte 5,4 */
2700 val = Get_DqsRcvEnGross_MaxMin(pDCTstat, dev, index_reg, 0x20);
2702 bytex = (val >> 8) & 0xFF;
2703 if (bytex < Smallest)
2708 /* DqsRcvEn byte 7,6 */
2709 val = Get_DqsRcvEnGross_MaxMin(pDCTstat, dev, index_reg, 0x21);
2711 bytex = (val >> 8) & 0xFF;
2712 if (bytex < Smallest)
2717 if (pDCTstat->DimmECCPresent> 0) {
2719 val = Get_DqsRcvEnGross_MaxMin(pDCTstat, dev, index_reg, 0x12);
2721 bytex = (val >> 8) & 0xFF;
2722 if (bytex < Smallest)
2728 pDCTstat->DqsRcvEnGrossMax = Largest;
2729 pDCTstat->DqsRcvEnGrossMin = Smallest;
2732 static void Get_WrDatGross_Diff(struct DCTStatStruc *pDCTstat,
2733 u8 dct, u32 dev, u32 index_reg)
2735 u8 Smallest, Largest;
2739 /* The largest WrDatGrossDlyByte of any DIMM minus the
2740 WrDatGrossDlyByte of any other DIMM is equal to CGDD */
2741 if (pDCTstat->DIMMValid & (1 << 0)) {
2742 val = Get_WrDatGross_MaxMin(pDCTstat, dct, dev, index_reg, 0x01); /* WrDatGrossDlyByte byte 0,1,2,3 for DIMM0 */
2743 Largest = val & 0xFF;
2744 Smallest = (val >> 8) & 0xFF;
2746 if (pDCTstat->DIMMValid & (1 << 2)) {
2747 val = Get_WrDatGross_MaxMin(pDCTstat, dct, dev, index_reg, 0x101); /* WrDatGrossDlyByte byte 0,1,2,3 for DIMM1 */
2749 bytex = (val >> 8) & 0xFF;
2750 if (bytex < Smallest)
2756 /* If Cx, 2 more dimm need to be checked to find out the largest and smallest */
2757 if (pDCTstat->LogicalCPUID & AMD_DR_Cx) {
2758 if (pDCTstat->DIMMValid & (1 << 4)) {
2759 val = Get_WrDatGross_MaxMin(pDCTstat, dct, dev, index_reg, 0x201); /* WrDatGrossDlyByte byte 0,1,2,3 for DIMM2 */
2761 bytex = (val >> 8) & 0xFF;
2762 if (bytex < Smallest)
2767 if (pDCTstat->DIMMValid & (1 << 6)) {
2768 val = Get_WrDatGross_MaxMin(pDCTstat, dct, dev, index_reg, 0x301); /* WrDatGrossDlyByte byte 0,1,2,3 for DIMM2 */
2770 bytex = (val >> 8) & 0xFF;
2771 if (bytex < Smallest)
2778 pDCTstat->WrDatGrossMax = Largest;
2779 pDCTstat->WrDatGrossMin = Smallest;
2782 static u16 Get_DqsRcvEnGross_MaxMin(struct DCTStatStruc *pDCTstat,
2783 u32 dev, u32 index_reg,
2786 u8 Smallest, Largest;
2799 for (i=0; i < 8; i+=2) {
2800 if ( pDCTstat->DIMMValid & (1 << i)) {
2801 val = Get_NB32_index_wait(dev, index_reg, index);
2803 byte = (val >> 5) & 0xFF;
2804 if (byte < Smallest)
2809 byte = (val >> (16 + 5)) & 0xFF;
2810 if (byte < Smallest)
2826 static u16 Get_WrDatGross_MaxMin(struct DCTStatStruc *pDCTstat,
2827 u8 dct, u32 dev, u32 index_reg,
2830 u8 Smallest, Largest;
2838 for (i=0; i < 2; i++) {
2839 val = Get_NB32_index_wait(dev, index_reg, index);
2842 for (j=0; j < 4; j++) {
2844 if (byte < Smallest)
2853 if (pDCTstat->DimmECCPresent > 0) {
2855 val = Get_NB32_index_wait(dev, index_reg, index);
2859 if (byte < Smallest)
2872 static void mct_FinalMCT_D(struct MCTStatStruc *pMCTstat,
2873 struct DCTStatStruc *pDCTstat)
2875 mct_ClrClToNB_D(pMCTstat, pDCTstat);
2876 mct_ClrWbEnhWsbDis_D(pMCTstat, pDCTstat);
2879 static void mct_InitialMCT_D(struct MCTStatStruc *pMCTstat, struct DCTStatStruc *pDCTstat)
2881 mct_SetClToNB_D(pMCTstat, pDCTstat);
2882 mct_SetWbEnhWsbDis_D(pMCTstat, pDCTstat);
2885 static u32 mct_NodePresent_D(void)
2892 static void mct_init(struct MCTStatStruc *pMCTstat,
2893 struct DCTStatStruc *pDCTstat)
2898 pDCTstat->GangedMode = 0;
2899 pDCTstat->DRPresent = 1;
2901 /* enable extend PCI configuration access */
2903 _RDMSR(addr, &lo, &hi);
2904 if (hi & (1 << (46-32))) {
2905 pDCTstat->Status |= 1 << SB_ExtConfig;
2908 _WRMSR(addr, lo, hi);
2912 static void clear_legacy_Mode(struct MCTStatStruc *pMCTstat,
2913 struct DCTStatStruc *pDCTstat)
2917 u32 dev = pDCTstat->dev_dct;
2919 /* Clear Legacy BIOS Mode bit */
2921 val = Get_NB32(dev, reg);
2922 val &= ~(1<<LegacyBiosMode);
2923 Set_NB32(dev, reg, val);
2926 val = Get_NB32(dev, reg);
2927 val &= ~(1<<LegacyBiosMode);
2928 Set_NB32(dev, reg, val);
2931 static void mct_HTMemMapExt(struct MCTStatStruc *pMCTstat,
2932 struct DCTStatStruc *pDCTstatA)
2935 u32 Drambase, Dramlimit;
2941 struct DCTStatStruc *pDCTstat;
2943 pDCTstat = pDCTstatA + 0;
2944 dev = pDCTstat->dev_map;
2946 /* Copy dram map from F1x40/44,F1x48/4c,
2947 to F1x120/124(Node0),F1x120/124(Node1),...*/
2948 for (Node=0; Node < MAX_NODES_SUPPORTED; Node++) {
2949 pDCTstat = pDCTstatA + Node;
2950 devx = pDCTstat->dev_map;
2952 /* get base/limit from Node0 */
2953 reg = 0x40 + (Node << 3); /* Node0/Dram Base 0 */
2954 val = Get_NB32(dev, reg);
2955 Drambase = val >> ( 16 + 3);
2957 reg = 0x44 + (Node << 3); /* Node0/Dram Base 0 */
2958 val = Get_NB32(dev, reg);
2959 Dramlimit = val >> (16 + 3);
2961 /* set base/limit to F1x120/124 per Node */
2962 if (pDCTstat->NodePresent) {
2963 reg = 0x120; /* F1x120,DramBase[47:27] */
2964 val = Get_NB32(devx, reg);
2967 Set_NB32(devx, reg, val);
2970 val = Get_NB32(devx, reg);
2973 Set_NB32(devx, reg, val);
2975 if ( pMCTstat->GStatus & ( 1 << GSB_HWHole)) {
2977 val = Get_NB32(devx, reg);
2978 val |= (1 << DramMemHoistValid);
2979 val &= ~(0xFF << 24);
2980 dword = (pMCTstat->HoleBase >> (24 - 8)) & 0xFF;
2983 Set_NB32(devx, reg, val);
2990 static void SetCSTriState(struct MCTStatStruc *pMCTstat,
2991 struct DCTStatStruc *pDCTstat, u8 dct)
2994 u32 dev = pDCTstat->dev_dct;
2995 u32 index_reg = 0x98 + 0x100 * dct;
2999 /* Tri-state unused chipselects when motherboard
3000 termination is available */
3002 /* FIXME: skip for Ax */
3004 word = pDCTstat->CSPresent;
3005 if (pDCTstat->Status & (1 << SB_Registered)) {
3006 word |= (word & 0x55) << 1;
3008 word = (~word) & 0xFF;
3010 val = Get_NB32_index_wait(dev, index_reg, index);
3012 Set_NB32_index_wait(dev, index_reg, index, val);
3015 static void SetCKETriState(struct MCTStatStruc *pMCTstat,
3016 struct DCTStatStruc *pDCTstat, u8 dct)
3020 u32 index_reg = 0x98 + 0x100 * dct;
3024 /* Tri-state unused CKEs when motherboard termination is available */
3026 /* FIXME: skip for Ax */
3028 dev = pDCTstat->dev_dct;
3029 word = pDCTstat->CSPresent;
3032 val = Get_NB32_index_wait(dev, index_reg, index);
3033 if ((word & 0x55) == 0)
3036 if ((word & 0xAA) == 0)
3039 Set_NB32_index_wait(dev, index_reg, index, val);
3042 static void SetODTTriState(struct MCTStatStruc *pMCTstat,
3043 struct DCTStatStruc *pDCTstat, u8 dct)
3047 u32 index_reg = 0x98 + 0x100 * dct;
3053 /* FIXME: skip for Ax */
3055 dev = pDCTstat->dev_dct;
3057 /* Tri-state unused ODTs when motherboard termination is available */
3058 max_dimms = (u8) mctGet_NVbits(NV_MAX_DIMMS);
3059 odt = 0x0F; /* ODT tri-state setting */
3061 if (pDCTstat->Status & (1 <<SB_Registered)) {
3062 for (cs = 0; cs < 8; cs += 2) {
3063 if (pDCTstat->CSPresent & (1 << cs)) {
3064 odt &= ~(1 << (cs / 2));
3065 if (mctGet_NVbits(NV_4RANKType) != 0) { /* quad-rank capable platform */
3066 if (pDCTstat->CSPresent & (1 << (cs + 1)))
3067 odt &= ~(4 << (cs / 2));
3071 } else { /* AM3 package */
3072 val = ~(pDCTstat->CSPresent);
3073 odt = val & 9; /* swap bits 1 and 2 */
3081 val = Get_NB32_index_wait(dev, index_reg, index);
3082 val |= ((odt & 0xFF) << 8); /* set bits 11:8 ODTTriState[3:0] */
3083 Set_NB32_index_wait(dev, index_reg, index, val);
3087 static void InitPhyCompensation(struct MCTStatStruc *pMCTstat,
3088 struct DCTStatStruc *pDCTstat, u8 dct)
3091 u32 index_reg = 0x98 + 0x100 * dct;
3092 u32 dev = pDCTstat->dev_dct;
3098 val = Get_NB32_index_wait(dev, index_reg, 0x00);
3100 for (i=0; i < 6; i++) {
3104 p = Table_Comp_Rise_Slew_15x;
3105 valx = p[(val >> 16) & 3];
3109 p = Table_Comp_Fall_Slew_15x;
3110 valx = p[(val >> 16) & 3];
3113 p = Table_Comp_Rise_Slew_20x;
3114 valx = p[(val >> 8) & 3];
3117 p = Table_Comp_Fall_Slew_20x;
3118 valx = p[(val >> 8) & 3];
3122 dword |= valx << (5 * i);
3125 /* Override/Exception */
3126 if (!pDCTstat->GangedMode) {
3127 i = 0; /* use i for the dct setting required */
3128 if (pDCTstat->MAdimms[0] < 4)
3130 if (((pDCTstat->Speed == 2) || (pDCTstat->Speed == 3)) && (pDCTstat->MAdimms[i] == 4)) {
3131 dword &= 0xF18FFF18;
3132 index_reg = 0x98; /* force dct = 0 */
3136 Set_NB32_index_wait(dev, index_reg, 0x0a, dword);
3139 static void mct_EarlyArbEn_D(struct MCTStatStruc *pMCTstat,
3140 struct DCTStatStruc *pDCTstat)
3144 u32 dev = pDCTstat->dev_dct;
3146 /* GhEnhancement #18429 modified by askar: For low NB CLK :
3147 * Memclk ratio, the DCT may need to arbitrate early to avoid
3148 * unnecessary bubbles.
3149 * bit 19 of F2x[1,0]78 Dram Control Register, set this bit only when
3150 * NB CLK : Memclk ratio is between 3:1 (inclusive) to 4:5 (inclusive)
3153 val = Get_NB32(dev, reg);
3155 if (pDCTstat->LogicalCPUID & (AMD_DR_Bx | AMD_DR_Cx))
3156 val |= (1 << EarlyArbEn);
3157 else if (CheckNBCOFEarlyArbEn(pMCTstat, pDCTstat))
3158 val |= (1 << EarlyArbEn);
3160 Set_NB32(dev, reg, val);
3163 static u8 CheckNBCOFEarlyArbEn(struct MCTStatStruc *pMCTstat,
3164 struct DCTStatStruc *pDCTstat)
3170 u32 dev = pDCTstat->dev_dct;
3174 /* Check if NB COF >= 4*Memclk, if it is not, return a fatal error
3177 /* 3*(Fn2xD4[NBFid]+4)/(2^NbDid)/(3+Fn2x94[MemClkFreq]) */
3178 _RDMSR(0xC0010071, &lo, &hi);
3183 val = Get_NB32(dev, reg);
3184 if (!(val & (1 << MemClkFreqVal)))
3185 val = Get_NB32(dev, reg + 0x100); /* get the DCT1 value */
3193 dev = pDCTstat->dev_nbmisc;
3195 val = Get_NB32(dev, reg);
3203 /* Yes this could be nicer but this was how the asm was.... */
3204 if (val < 3) { /* NClk:MemClk < 3:1 */
3206 } else if (val > 4) { /* NClk:MemClk >= 5:1 */
3208 } else if ((val == 4) && (rem > tmp)) { /* NClk:MemClk > 4.5:1 */
3211 return 1; /* 3:1 <= NClk:MemClk <= 4.5:1*/
3215 static void mct_ResetDataStruct_D(struct MCTStatStruc *pMCTstat,
3216 struct DCTStatStruc *pDCTstatA)
3220 struct DCTStatStruc *pDCTstat;
3223 u16 host_serv1, host_serv2;
3225 /* Initialize Data structures by clearing all entries to 0 */
3226 p = (u8 *) pMCTstat;
3227 for (i = 0; i < sizeof(struct MCTStatStruc); i++) {
3231 for (Node = 0; Node < 8; Node++) {
3232 pDCTstat = pDCTstatA + Node;
3233 host_serv1 = pDCTstat->HostBiosSrvc1;
3234 host_serv2 = pDCTstat->HostBiosSrvc2;
3236 p = (u8 *) pDCTstat;
3238 stop = ((u32) &((struct DCTStatStruc *)0)->CH_MaxRdLat[2]);
3239 for (i = start; i < stop ; i++) {
3243 start = ((u32) &((struct DCTStatStruc *)0)->CH_D_BC_RCVRDLY[2][4]);
3244 stop = sizeof(struct DCTStatStruc);
3245 for (i = start; i < stop; i++) {
3248 pDCTstat->HostBiosSrvc1 = host_serv1;
3249 pDCTstat->HostBiosSrvc2 = host_serv2;
3253 static void mct_BeforeDramInit_Prod_D(struct MCTStatStruc *pMCTstat,
3254 struct DCTStatStruc *pDCTstat)
3258 u32 dev = pDCTstat->dev_dct;
3260 if (pDCTstat->LogicalCPUID & AMD_DR_Dx) {
3261 if ((pDCTstat->Speed == 3))
3265 for (i=0; i < 2; i++) {
3266 reg_off = 0x100 * i;
3267 Set_NB32(dev, 0x98 + reg_off, 0x0D000030);
3268 Set_NB32(dev, 0x9C + reg_off, dword);
3269 Set_NB32(dev, 0x98 + reg_off, 0x4D040F30);
3274 static u32 mct_DisDllShutdownSR(struct MCTStatStruc *pMCTstat,
3275 struct DCTStatStruc *pDCTstat, u32 DramConfigLo, u8 dct)
3277 u32 reg_off = 0x100 * dct;
3278 u32 dev = pDCTstat->dev_dct;
3280 /* Write 0000_07D0h to register F2x[1, 0]98_x4D0FE006 */
3281 if (pDCTstat->LogicalCPUID & (AMD_DA_C2 | AMD_RB_C3)) {
3282 Set_NB32(dev, 0x9C + reg_off, 0x1c);
3283 Set_NB32(dev, 0x98 + reg_off, 0x4D0FE006);
3284 Set_NB32(dev, 0x9C + reg_off, 0x13d);
3285 Set_NB32(dev, 0x98 + reg_off, 0x4D0FE007);
3288 return DramConfigLo | /* DisDllShutdownSR */ 1 << 27;
3291 void mct_SetClToNB_D(struct MCTStatStruc *pMCTstat,
3292 struct DCTStatStruc *pDCTstat)
3297 /* FIXME: Maybe check the CPUID? - not for now. */
3298 /* pDCTstat->LogicalCPUID; */
3301 _RDMSR(msr, &lo, &hi);
3302 lo |= 1 << ClLinesToNbDis;
3303 _WRMSR(msr, lo, hi);
3306 void mct_ClrClToNB_D(struct MCTStatStruc *pMCTstat,
3307 struct DCTStatStruc *pDCTstat)
3313 /* FIXME: Maybe check the CPUID? - not for now. */
3314 /* pDCTstat->LogicalCPUID; */
3317 _RDMSR(msr, &lo, &hi);
3318 if (!pDCTstat->ClToNB_flag)
3319 lo &= ~(1<<ClLinesToNbDis);
3320 _WRMSR(msr, lo, hi);
3324 void mct_SetWbEnhWsbDis_D(struct MCTStatStruc *pMCTstat,
3325 struct DCTStatStruc *pDCTstat)
3330 /* FIXME: Maybe check the CPUID? - not for now. */
3331 /* pDCTstat->LogicalCPUID; */
3334 _RDMSR(msr, &lo, &hi);
3335 hi |= (1 << WbEnhWsbDis_D);
3336 _WRMSR(msr, lo, hi);
3339 void mct_ClrWbEnhWsbDis_D(struct MCTStatStruc *pMCTstat,
3340 struct DCTStatStruc *pDCTstat)
3345 /* FIXME: Maybe check the CPUID? - not for now. */
3346 /* pDCTstat->LogicalCPUID; */
3349 _RDMSR(msr, &lo, &hi);
3350 hi &= ~(1 << WbEnhWsbDis_D);
3351 _WRMSR(msr, lo, hi);
3354 static u32 mct_DramTermDyn_RDimm(struct MCTStatStruc *pMCTstat,
3355 struct DCTStatStruc *pDCTstat, u8 dimm)
3357 u8 DimmsInstalled = dimm;
3358 u32 DramTermDyn = 0;
3359 u8 Speed = pDCTstat->Speed;
3361 if (mctGet_NVbits(NV_MAX_DIMMS) == 4) {
3362 if (pDCTstat->CSPresent & 0xF0) {
3363 if (DimmsInstalled == 1)
3365 DramTermDyn |= 1 << 10;
3367 DramTermDyn |= 1 << 11;
3370 DramTermDyn |= 1 << 11;
3372 DramTermDyn |= 1 << 10;
3374 if (DimmsInstalled != 1) {
3376 DramTermDyn |= 1 << 10;
3378 DramTermDyn |= 1 << 11;
3382 if (DimmsInstalled != 1)
3383 DramTermDyn |= 1 << 11;
3388 void ProgDramMRSReg_D(struct MCTStatStruc *pMCTstat,
3389 struct DCTStatStruc *pDCTstat, u8 dct)
3396 /* Set chip select CKE control mode */
3397 if (mctGet_NVbits(NV_CKE_CTL)) {
3398 if (pDCTstat->CSPresent == 3) {
3400 word = pDCTstat->DIMMSPDCSE;
3411 DrvImpCtrl: drive impedance control.01b(34 ohm driver; Ron34 = Rzq/7)
3414 /* Dram nominal termination: */
3415 byte = pDCTstat->MAdimms[dct];
3416 if (!(pDCTstat->Status & (1 << SB_Registered))) {
3417 DramMRS |= 1 << 7; /* 60 ohms */
3419 if (pDCTstat->Speed < 6)
3420 DramMRS |= 1 << 8; /* 40 ohms */
3422 DramMRS |= 1 << 9; /* 30 ohms */
3425 /* Dram dynamic termination: Disable(1DIMM), 120ohm(>=2DIMM) */
3426 if (!(pDCTstat->Status & (1 << SB_Registered))) {
3428 if (pDCTstat->Speed == 7)
3434 DramMRS |= mct_DramTermDyn_RDimm(pMCTstat, pDCTstat, byte);
3437 /* burst length control */
3438 if (pDCTstat->Status & (1 << SB_128bitmode))
3440 /* Qoff=0, output buffers enabled */
3442 DramMRS |= (pDCTstat->Speed - 4) << 20;
3443 /* ASR=1, auto self refresh */
3447 dword = Get_NB32(pDCTstat->dev_dct, 0x100 * dct + 0x84);
3448 dword &= ~0x00FC2F8F;
3450 Set_NB32(pDCTstat->dev_dct, 0x100 * dct + 0x84, dword);
3453 void mct_SetDramConfigHi_D(struct DCTStatStruc *pDCTstat, u32 dct,
3456 /* Bug#15114: Comp. update interrupted by Freq. change can cause
3457 * subsequent update to be invalid during any MemClk frequency change:
3458 * Solution: From the bug report:
3459 * 1. A software-initiated frequency change should be wrapped into the
3460 * following sequence :
3461 * - a) Disable Compensation (F2[1, 0]9C_x08[30] )
3462 * b) Reset the Begin Compensation bit (D3CMP->COMP_CONFIG[0]) in all the compensation engines
3463 * c) Do frequency change
3464 * d) Enable Compensation (F2[1, 0]9C_x08[30] )
3465 * 2. A software-initiated Disable Compensation should always be
3466 * followed by step b) of the above steps.
3467 * Silicon Status: Fixed In Rev B0
3469 * Errata#177: DRAM Phy Automatic Compensation Updates May Be Invalid
3470 * Solution: BIOS should disable the phy automatic compensation prior
3471 * to initiating a memory clock frequency change as follows:
3472 * 1. Disable PhyAutoComp by writing 1'b1 to F2x[1, 0]9C_x08[30]
3473 * 2. Reset the Begin Compensation bits by writing 32'h0 to
3474 * F2x[1, 0]9C_x4D004F00
3475 * 3. Perform frequency change
3476 * 4. Enable PhyAutoComp by writing 1'b0 to F2x[1, 0]9C_08[30]
3477 * In addition, any time software disables the automatic phy
3478 * compensation it should reset the begin compensation bit per step 2.
3479 * Silicon Status: Fixed in DR-B0
3482 u32 dev = pDCTstat->dev_dct;
3483 u32 index_reg = 0x98 + 0x100 * dct;
3489 val = Get_NB32_index_wait(dev, index_reg, index);
3490 if (!(val & (1 << DisAutoComp)))
3491 Set_NB32_index_wait(dev, index_reg, index, val | (1 << DisAutoComp));
3495 Set_NB32(dev, 0x94 + 0x100 * dct, DramConfigHi);
3498 static void mct_BeforeDQSTrain_D(struct MCTStatStruc *pMCTstat,
3499 struct DCTStatStruc *pDCTstatA)
3502 struct DCTStatStruc *pDCTstat;
3506 * Bug#15115: Uncertainty In The Sync Chain Leads To Setup Violations
3508 * Solution: BIOS should program DRAM Control Register[RdPtrInit] =
3509 * 5h, (F2x[1, 0]78[3:0] = 5h).
3510 * Silicon Status: Fixed In Rev B0
3512 * Bug#15880: Determine validity of reset settings for DDR PHY timing.
3513 * Solutiuon: At least, set WrDqs fine delay to be 0 for DDR3 training.
3515 for (Node = 0; Node < 8; Node++) {
3516 pDCTstat = pDCTstatA + Node;
3518 if (pDCTstat->NodePresent)
3519 mct_BeforeDQSTrainSamp(pDCTstat); /* only Bx */
3520 mct_ResetDLL_D(pMCTstat, pDCTstat, 0);
3521 mct_ResetDLL_D(pMCTstat, pDCTstat, 1);
3525 static void mct_ResetDLL_D(struct MCTStatStruc *pMCTstat,
3526 struct DCTStatStruc *pDCTstat, u8 dct)
3529 u32 dev = pDCTstat->dev_dct;
3530 u32 reg_off = 0x100 * dct;
3536 /* Skip reset DLL for B3 */
3537 if (pDCTstat->LogicalCPUID & AMD_DR_B3) {
3542 _RDMSR(addr, &lo, &hi);
3543 if(lo & (1<<17)) { /* save the old value */
3546 lo |= (1<<17); /* HWCR.wrap32dis */
3547 /* Setting wrap32dis allows 64-bit memory references in 32bit mode */
3548 _WRMSR(addr, lo, hi);
3550 pDCTstat->Channel = dct;
3551 Receiver = mct_InitReceiver_D(pDCTstat, dct);
3552 /* there are four receiver pairs, loosely associated with chipselects.*/
3553 for (; Receiver < 8; Receiver += 2) {
3554 if (mct_RcvrRankEnabled_D(pMCTstat, pDCTstat, dct, Receiver)) {
3555 addr = mct_GetRcvrSysAddr_D(pMCTstat, pDCTstat, dct, Receiver, &valid);
3557 mct_Read1LTestPattern_D(pMCTstat, pDCTstat, addr); /* cache fills */
3559 /* Write 0000_8000h to register F2x[1,0]9C_xD080F0C */
3560 Set_NB32_index_wait(dev, 0x98 + reg_off, 0x4D080F0C, 0x00008000);
3561 mct_Wait(80); /* wait >= 300ns */
3563 /* Write 0000_0000h to register F2x[1,0]9C_xD080F0C */
3564 Set_NB32_index_wait(dev, 0x98 + reg_off, 0x4D080F0C, 0x00000000);
3565 mct_Wait(800); /* wait >= 2us */
3573 _RDMSR(addr, &lo, &hi);
3574 lo &= ~(1<<17); /* restore HWCR.wrap32dis */
3575 _WRMSR(addr, lo, hi);
3579 static void mct_EnableDatIntlv_D(struct MCTStatStruc *pMCTstat,
3580 struct DCTStatStruc *pDCTstat)
3582 u32 dev = pDCTstat->dev_dct;
3585 /* Enable F2x110[DctDatIntlv] */
3586 /* Call back not required mctHookBeforeDatIntlv_D() */
3587 /* FIXME Skip for Ax */
3588 if (!pDCTstat->GangedMode) {
3589 val = Get_NB32(dev, 0x110);
3590 val |= 1 << 5; /* DctDatIntlv */
3591 Set_NB32(dev, 0x110, val);
3593 /* FIXME Skip for Cx */
3594 dev = pDCTstat->dev_nbmisc;
3595 val = Get_NB32(dev, 0x8C); /* NB Configuration Hi */
3596 val |= 1 << (36-32); /* DisDatMask */
3597 Set_NB32(dev, 0x8C, val);
3601 static void SetDllSpeedUp_D(struct MCTStatStruc *pMCTstat,
3602 struct DCTStatStruc *pDCTstat, u8 dct)
3605 u32 dev = pDCTstat->dev_dct;
3606 u32 reg_off = 0x100 * dct;
3608 if (pDCTstat->Speed >= 7) { /* DDR1600 and above */
3609 /* Set bit13 PowerDown to register F2x[1, 0]98_x0D080F10 */
3610 Set_NB32(dev, reg_off + 0x98, 0x0D080F10);
3611 val = Get_NB32(dev, reg_off + 0x9C);
3613 Set_NB32(dev, reg_off + 0x9C, val);
3614 Set_NB32(dev, reg_off + 0x98, 0x4D080F10);
3616 /* Set bit13 PowerDown to register F2x[1, 0]98_x0D080F11 */
3617 Set_NB32(dev, reg_off + 0x98, 0x0D080F11);
3618 val = Get_NB32(dev, reg_off + 0x9C);
3620 Set_NB32(dev, reg_off + 0x9C, val);
3621 Set_NB32(dev, reg_off + 0x98, 0x4D080F11);
3623 /* Set bit13 PowerDown to register F2x[1, 0]98_x0D088F30 */
3624 Set_NB32(dev, reg_off + 0x98, 0x0D088F30);
3625 val = Get_NB32(dev, reg_off + 0x9C);
3627 Set_NB32(dev, reg_off + 0x9C, val);
3628 Set_NB32(dev, reg_off + 0x98, 0x4D088F30);
3630 /* Set bit13 PowerDown to register F2x[1, 0]98_x0D08CF30 */
3631 Set_NB32(dev, reg_off + 0x98, 0x0D08CF30);
3632 val = Get_NB32(dev, reg_off + 0x9C);
3634 Set_NB32(dev, reg_off + 0x9C, val);
3635 Set_NB32(dev, reg_off + 0x98, 0x4D08CF30);
3640 static void SyncSetting(struct DCTStatStruc *pDCTstat)
3642 /* set F2x78[ChSetupSync] when F2x[1, 0]9C_x04[AddrCmdSetup, CsOdtSetup,
3643 * CkeSetup] setups for one DCT are all 0s and at least one of the setups,
3644 * F2x[1, 0]9C_x04[AddrCmdSetup, CsOdtSetup, CkeSetup], of the other
3648 u32 dev = pDCTstat->dev_dct;
3651 cha = pDCTstat->CH_ADDR_TMG[0] & 0x0202020;
3652 chb = pDCTstat->CH_ADDR_TMG[1] & 0x0202020;
3654 if ((cha != chb) && ((cha == 0) || (chb == 0))) {
3655 val = Get_NB32(dev, 0x78);
3656 val |= 1 << ChSetupSync;
3657 Set_NB32(dev, 0x78, val);
3661 static void AfterDramInit_D(struct DCTStatStruc *pDCTstat, u8 dct) {
3664 u32 reg_off = 0x100 * dct;
3665 u32 dev = pDCTstat->dev_dct;
3667 if (pDCTstat->LogicalCPUID & (AMD_DR_B2 | AMD_DR_B3)) {
3668 mct_Wait(10000); /* Wait 50 us*/
3669 val = Get_NB32(dev, 0x110);
3670 if (!(val & (1 << DramEnabled))) {
3671 /* If 50 us expires while DramEnable =0 then do the following */
3672 val = Get_NB32(dev, 0x90 + reg_off);
3673 val &= ~(1 << Width128); /* Program Width128 = 0 */
3674 Set_NB32(dev, 0x90 + reg_off, val);
3676 val = Get_NB32_index_wait(dev, 0x98 + reg_off, 0x05); /* Perform dummy CSR read to F2x09C_x05 */
3678 if (pDCTstat->GangedMode) {
3679 val = Get_NB32(dev, 0x90 + reg_off);
3680 val |= 1 << Width128; /* Program Width128 = 0 */
3681 Set_NB32(dev, 0x90 + reg_off, val);
3687 /* ==========================================================
3688 * 6-bit Bank Addressing Table
3691 * CCC=Columns-9 binary
3692 * ==========================================================
3693 * DCT CCCBRR Rows Banks Columns 64-bit CS Size
3695 * 0000 000000 13 2 9 128MB
3696 * 0001 001000 13 2 10 256MB
3697 * 0010 001001 14 2 10 512MB
3698 * 0011 010000 13 2 11 512MB
3699 * 0100 001100 13 3 10 512MB
3700 * 0101 001101 14 3 10 1GB
3701 * 0110 010001 14 2 11 1GB
3702 * 0111 001110 15 3 10 2GB
3703 * 1000 010101 14 3 11 2GB
3704 * 1001 010110 15 3 11 4GB
3705 * 1010 001111 16 3 10 4GB
3706 * 1011 010111 16 3 11 8GB
3708 u8 crcCheck(u8 smbaddr)
3715 byte_use = mctRead_SPD(smbaddr, SPD_ByteUse);
3716 if (byte_use & 0x80)
3722 for (Index = 0; Index < byte_use; Index ++) {
3723 byte = mctRead_SPD(smbaddr, Index);
3725 for (i=0; i<8; i++) {
3733 return CRC == (mctRead_SPD(smbaddr, SPD_byte_127) << 8 | mctRead_SPD(smbaddr, SPD_byte_126));