2 * This file is part of the coreboot project.
4 * Copyright (C) 2010 Advanced Micro Devices, Inc.
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; version 2 of the License.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License
16 * along with this program; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
20 /* Description: Main memory controller system configuration for DDR 3 */
22 /* KNOWN ISSUES - ERRATA
24 * Trtp is not calculated correctly when the controller is in 64-bit mode, it
25 * is 1 busclock off. No fix planned. The controller is not ordinarily in
28 * 32 Byte burst not supported. No fix planned. The controller is not
29 * ordinarily in 64-bit mode.
31 * Trc precision does not use extra Jedec defined fractional component.
32 * InsteadTrc (course) is rounded up to nearest 1 ns.
34 * Mini and Micro DIMM not supported. Only RDIMM, UDIMM, SO-DIMM defined types
38 static u8 ReconfigureDIMMspare_D(struct MCTStatStruc *pMCTstat,
39 struct DCTStatStruc *pDCTstatA);
40 static void DQSTiming_D(struct MCTStatStruc *pMCTstat,
41 struct DCTStatStruc *pDCTstatA);
42 static void LoadDQSSigTmgRegs_D(struct MCTStatStruc *pMCTstat,
43 struct DCTStatStruc *pDCTstatA);
44 static void HTMemMapInit_D(struct MCTStatStruc *pMCTstat,
45 struct DCTStatStruc *pDCTstatA);
46 static void MCTMemClr_D(struct MCTStatStruc *pMCTstat,
47 struct DCTStatStruc *pDCTstatA);
48 static void DCTMemClr_Init_D(struct MCTStatStruc *pMCTstat,
49 struct DCTStatStruc *pDCTstat);
50 static void DCTMemClr_Sync_D(struct MCTStatStruc *pMCTstat,
51 struct DCTStatStruc *pDCTstat);
52 static void MCTMemClrSync_D(struct MCTStatStruc *pMCTstat,
53 struct DCTStatStruc *pDCTstatA);
54 static u8 NodePresent_D(u8 Node);
55 static void SyncDCTsReady_D(struct MCTStatStruc *pMCTstat,
56 struct DCTStatStruc *pDCTstatA);
57 static void StartupDCT_D(struct MCTStatStruc *pMCTstat,
58 struct DCTStatStruc *pDCTstat, u8 dct);
59 static void ClearDCT_D(struct MCTStatStruc *pMCTstat,
60 struct DCTStatStruc *pDCTstat, u8 dct);
61 static u8 AutoCycTiming_D(struct MCTStatStruc *pMCTstat,
62 struct DCTStatStruc *pDCTstat, u8 dct);
63 static void GetPresetmaxF_D(struct MCTStatStruc *pMCTstat,
64 struct DCTStatStruc *pDCTstat);
65 static void SPDGetTCL_D(struct MCTStatStruc *pMCTstat,
66 struct DCTStatStruc *pDCTstat, u8 dct);
67 static u8 AutoConfig_D(struct MCTStatStruc *pMCTstat,
68 struct DCTStatStruc *pDCTstat, u8 dct);
69 static u8 PlatformSpec_D(struct MCTStatStruc *pMCTstat,
70 struct DCTStatStruc *pDCTstat, u8 dct);
71 static void SPDSetBanks_D(struct MCTStatStruc *pMCTstat,
72 struct DCTStatStruc *pDCTstat, u8 dct);
73 static void StitchMemory_D(struct MCTStatStruc *pMCTstat,
74 struct DCTStatStruc *pDCTstat, u8 dct);
75 static u16 Get_Fk_D(u8 k);
76 static u8 Get_DIMMAddress_D(struct DCTStatStruc *pDCTstat, u8 i);
77 static void mct_initDCT(struct MCTStatStruc *pMCTstat,
78 struct DCTStatStruc *pDCTstat);
79 static void mct_DramInit(struct MCTStatStruc *pMCTstat,
80 struct DCTStatStruc *pDCTstat, u8 dct);
81 static u8 mct_PlatformSpec(struct MCTStatStruc *pMCTstat,
82 struct DCTStatStruc *pDCTstat, u8 dct);
83 static void mct_SyncDCTsReady(struct DCTStatStruc *pDCTstat);
84 static void Get_Trdrd(struct MCTStatStruc *pMCTstat,
85 struct DCTStatStruc *pDCTstat, u8 dct);
86 static void mct_AfterGetCLT(struct MCTStatStruc *pMCTstat,
87 struct DCTStatStruc *pDCTstat, u8 dct);
88 static u8 mct_SPDCalcWidth(struct MCTStatStruc *pMCTstat,\
89 struct DCTStatStruc *pDCTstat, u8 dct);
90 static void mct_AfterStitchMemory(struct MCTStatStruc *pMCTstat,
91 struct DCTStatStruc *pDCTstat, u8 dct);
92 static u8 mct_DIMMPresence(struct MCTStatStruc *pMCTstat,
93 struct DCTStatStruc *pDCTstat, u8 dct);
94 static void Set_OtherTiming(struct MCTStatStruc *pMCTstat,
95 struct DCTStatStruc *pDCTstat, u8 dct);
96 static void Get_Twrwr(struct MCTStatStruc *pMCTstat,
97 struct DCTStatStruc *pDCTstat, u8 dct);
98 static void Get_Twrrd(struct MCTStatStruc *pMCTstat,
99 struct DCTStatStruc *pDCTstat, u8 dct);
100 static void Get_TrwtTO(struct MCTStatStruc *pMCTstat,
101 struct DCTStatStruc *pDCTstat, u8 dct);
102 static void Get_TrwtWB(struct MCTStatStruc *pMCTstat,
103 struct DCTStatStruc *pDCTstat);
104 static void Get_DqsRcvEnGross_Diff(struct DCTStatStruc *pDCTstat,
105 u32 dev, u32 index_reg);
106 static void Get_WrDatGross_Diff(struct DCTStatStruc *pDCTstat, u8 dct,
107 u32 dev, u32 index_reg);
108 static u16 Get_DqsRcvEnGross_MaxMin(struct DCTStatStruc *pDCTstat,
109 u32 dev, u32 index_reg, u32 index);
110 static void mct_FinalMCT_D(struct MCTStatStruc *pMCTstat,
111 struct DCTStatStruc *pDCTstat);
112 static u16 Get_WrDatGross_MaxMin(struct DCTStatStruc *pDCTstat, u8 dct,
113 u32 dev, u32 index_reg, u32 index);
114 static void mct_InitialMCT_D(struct MCTStatStruc *pMCTstat,
115 struct DCTStatStruc *pDCTstat);
116 static void mct_init(struct MCTStatStruc *pMCTstat,
117 struct DCTStatStruc *pDCTstat);
118 static void clear_legacy_Mode(struct MCTStatStruc *pMCTstat,
119 struct DCTStatStruc *pDCTstat);
120 static void mct_HTMemMapExt(struct MCTStatStruc *pMCTstat,
121 struct DCTStatStruc *pDCTstatA);
122 static void SetCSTriState(struct MCTStatStruc *pMCTstat,
123 struct DCTStatStruc *pDCTstat, u8 dct);
124 static void SetCKETriState(struct MCTStatStruc *pMCTstat,
125 struct DCTStatStruc *pDCTstat, u8 dct);
126 static void SetODTTriState(struct MCTStatStruc *pMCTstat,
127 struct DCTStatStruc *pDCTstat, u8 dct);
128 static void InitPhyCompensation(struct MCTStatStruc *pMCTstat,
129 struct DCTStatStruc *pDCTstat, u8 dct);
130 static u32 mct_NodePresent_D(void);
131 static void mct_OtherTiming(struct MCTStatStruc *pMCTstat,
132 struct DCTStatStruc *pDCTstatA);
133 static void mct_ResetDataStruct_D(struct MCTStatStruc *pMCTstat,
134 struct DCTStatStruc *pDCTstatA);
135 static void mct_EarlyArbEn_D(struct MCTStatStruc *pMCTstat,
136 struct DCTStatStruc *pDCTstat);
137 static void mct_BeforeDramInit_Prod_D(struct MCTStatStruc *pMCTstat,
138 struct DCTStatStruc *pDCTstat);
139 void mct_ClrClToNB_D(struct MCTStatStruc *pMCTstat,
140 struct DCTStatStruc *pDCTstat);
141 static u8 CheckNBCOFEarlyArbEn(struct MCTStatStruc *pMCTstat,
142 struct DCTStatStruc *pDCTstat);
143 void mct_ClrWbEnhWsbDis_D(struct MCTStatStruc *pMCTstat,
144 struct DCTStatStruc *pDCTstat);
145 static void mct_BeforeDQSTrain_D(struct MCTStatStruc *pMCTstat,
146 struct DCTStatStruc *pDCTstatA);
147 static void AfterDramInit_D(struct DCTStatStruc *pDCTstat, u8 dct);
148 static void mct_ResetDLL_D(struct MCTStatStruc *pMCTstat,
149 struct DCTStatStruc *pDCTstat, u8 dct);
150 static void ProgDramMRSReg_D(struct MCTStatStruc *pMCTstat,
151 struct DCTStatStruc *pDCTstat, u8 dct);
152 static void mct_DramInit_Sw_D(struct MCTStatStruc *pMCTstat,
153 struct DCTStatStruc *pDCTstat, u8 dct);
154 static u32 mct_DisDllShutdownSR(struct MCTStatStruc *pMCTstat,
155 struct DCTStatStruc *pDCTstat, u32 DramConfigLo, u8 dct);
157 static u32 mct_DramTermDyn_RDimm(struct MCTStatStruc *pMCTstat,
158 struct DCTStatStruc *pDCTstat, u8 dimm);
159 static u32 mct_SetDramConfigMisc2(struct DCTStatStruc *pDCTstat, u8 dct, u32 misc2);
160 static void mct_BeforeDQSTrainSamp(struct DCTStatStruc *pDCTstat);
161 static void mct_WriteLevelization_HW(struct MCTStatStruc *pMCTstat, struct DCTStatStruc *pDCTstatA);
162 static u8 Get_Latency_Diff(struct MCTStatStruc *pMCTstat,
163 struct DCTStatStruc *pDCTstat, u8 dct);
164 static void SyncSetting(struct DCTStatStruc *pDCTstat);
165 static u8 crcCheck(u8 smbaddr);
167 /*See mctAutoInitMCT header for index relationships to CL and T*/
168 static const u16 Table_F_k[] = {00,200,266,333,400,533 };
169 static const u8 Tab_BankAddr[] = {0x3F,0x01,0x09,0x3F,0x3F,0x11,0x0A,0x19,0x12,0x1A,0x21,0x22,0x23};
170 static const u8 Table_DQSRcvEn_Offset[] = {0x00,0x01,0x10,0x11,0x2};
172 /****************************************************************************
173 Describe how platform maps MemClk pins to logical DIMMs. The MemClk pins
174 are identified based on BKDG definition of Fn2x88[MemClkDis] bitmap.
175 AGESA will base on this value to disable unused MemClk to save power.
177 If MEMCLK_MAPPING or MEMCLK_MAPPING contains all zeroes, AGESA will use
178 default MemClkDis setting based on package type.
181 BKDG definition of Fn2x88[MemClkDis] bitmap for AM3 package is like below:
182 Bit AM3/S1g3 pin name
192 And platform has the following routing:
193 CS0 M[B,A]_CLK_H/L[4]
194 CS1 M[B,A]_CLK_H/L[2]
195 CS2 M[B,A]_CLK_H/L[3]
196 CS3 M[B,A]_CLK_H/L[5]
199 ; CS0 CS1 CS2 CS3 CS4 CS5 CS6 CS7
200 MEMCLK_MAPPING EQU 00010000b, 00000100b, 00001000b, 00100000b, 00000000b, 00000000b, 00000000b, 00000000b
203 /* Note: If you are not sure about the pin mappings at initial stage, we dont have to disable MemClk.
204 * Set entries in the tables all 0xFF. */
205 static const u8 Tab_L1CLKDis[] = {0x20, 0x20, 0x10, 0x10, 0x08, 0x08, 0x04, 0x04};
206 static const u8 Tab_AM3CLKDis[] = {0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00};
207 static const u8 Tab_S1CLKDis[] = {0xA2, 0xA2, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00};
208 static const u8 Tab_ManualCLKDis[]= {0x10, 0x04, 0x08, 0x20, 0x00, 0x00, 0x00, 0x00};
210 static const u8 Table_Comp_Rise_Slew_20x[] = {7, 3, 2, 2, 0xFF};
211 static const u8 Table_Comp_Rise_Slew_15x[] = {7, 7, 3, 2, 0xFF};
212 static const u8 Table_Comp_Fall_Slew_20x[] = {7, 5, 3, 2, 0xFF};
213 static const u8 Table_Comp_Fall_Slew_15x[] = {7, 7, 5, 3, 0xFF};
215 static void mctAutoInitMCT_D(struct MCTStatStruc *pMCTstat,
216 struct DCTStatStruc *pDCTstatA)
219 * Memory may be mapped contiguously all the way up to 4GB (depending on setup
220 * options). It is the responsibility of PCI subsystem to create an uncacheable
221 * IO region below 4GB and to adjust TOP_MEM downward prior to any IO mapping or
222 * accesses. It is the same responsibility of the CPU sub-system prior to
225 * Slot Number is an external convention, and is determined by OEM with accompanying
226 * silk screening. OEM may choose to use Slot number convention which is consistent
227 * with DIMM number conventions. All AMD engineering platforms do.
229 * Build Requirements:
230 * 1. MCT_SEG0_START and MCT_SEG0_END macros to begin and end the code segment,
231 * defined in mcti.inc.
233 * Run-Time Requirements:
234 * 1. Complete Hypertransport Bus Configuration
235 * 2. SMBus Controller Initialized
236 * 1. BSP in Big Real Mode
237 * 2. Stack at SS:SP, located somewhere between A000:0000 and F000:FFFF
238 * 3. Checksummed or Valid NVRAM bits
239 * 4. MCG_CTL=-1, MC4_CTL_EN=0 for all CPUs
240 * 5. MCi_STS from shutdown/warm reset recorded (if desired) prior to entry
241 * 6. All var MTRRs reset to zero
242 * 7. State of NB_CFG.DisDatMsk set properly on all CPUs
243 * 8. All CPUs at 2Ghz Speed (unless DQS training is not installed).
244 * 9. All cHT links at max Speed/Width (unless DQS training is not installed).
247 * Global relationship between index values and item values:
249 * pDCTstat.CASL pDCTstat.Speed
251 * --------------------------
265 mctInitMemGPIOs_A_D(); /* Set any required GPIOs*/
268 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
269 struct DCTStatStruc *pDCTstat;
270 pDCTstat = pDCTstatA + Node;
271 pDCTstat->Node_ID = Node;
272 pDCTstat->dev_host = PA_HOST(Node);
273 pDCTstat->dev_map = PA_MAP(Node);
274 pDCTstat->dev_dct = PA_DCT(Node);
275 pDCTstat->dev_nbmisc = PA_NBMISC(Node);
276 pDCTstat->NodeSysBase = node_sys_base;
278 mct_init(pMCTstat, pDCTstat);
279 mctNodeIDDebugPort_D();
280 pDCTstat->NodePresent = NodePresent_D(Node);
281 if (pDCTstat->NodePresent) { /* See if Node is there*/
282 clear_legacy_Mode(pMCTstat, pDCTstat);
283 pDCTstat->LogicalCPUID = mctGetLogicalCPUID_D(Node);
285 mct_InitialMCT_D(pMCTstat, pDCTstat);
287 mctSMBhub_Init(Node); /* Switch SMBUS crossbar to proper node*/
289 mct_initDCT(pMCTstat, pDCTstat);
290 if (pDCTstat->ErrCode == SC_FatalErr) {
291 goto fatalexit; /* any fatal errors?*/
292 } else if (pDCTstat->ErrCode < SC_StopError) {
295 } /* if Node present */
296 node_sys_base = pDCTstat->NodeSysBase;
297 node_sys_base += (pDCTstat->NodeSysLimit + 2) & ~0x0F;
299 if (NodesWmem == 0) {
300 printk(BIOS_DEBUG, "No Nodes?!\n");
304 printk(BIOS_DEBUG, "mctAutoInitMCT_D: SyncDCTsReady_D\n");
305 SyncDCTsReady_D(pMCTstat, pDCTstatA); /* Make sure DCTs are ready for accesses.*/
307 printk(BIOS_DEBUG, "mctAutoInitMCT_D: HTMemMapInit_D\n");
308 HTMemMapInit_D(pMCTstat, pDCTstatA); /* Map local memory into system address space.*/
311 printk(BIOS_DEBUG, "mctAutoInitMCT_D: CPUMemTyping_D\n");
312 CPUMemTyping_D(pMCTstat, pDCTstatA); /* Map dram into WB/UC CPU cacheability */
313 mctHookAfterCPU(); /* Setup external northbridge(s) */
315 printk(BIOS_DEBUG, "mctAutoInitMCT_D: DQSTiming_D\n");
316 DQSTiming_D(pMCTstat, pDCTstatA); /* Get Receiver Enable and DQS signal timing*/
318 printk(BIOS_DEBUG, "mctAutoInitMCT_D: UMAMemTyping_D\n");
319 UMAMemTyping_D(pMCTstat, pDCTstatA); /* Fix up for UMA sizing */
321 printk(BIOS_DEBUG, "mctAutoInitMCT_D: :OtherTiming\n");
322 mct_OtherTiming(pMCTstat, pDCTstatA);
324 if (ReconfigureDIMMspare_D(pMCTstat, pDCTstatA)) { /* RESET# if 1st pass of DIMM spare enabled*/
328 InterleaveNodes_D(pMCTstat, pDCTstatA);
329 InterleaveChannels_D(pMCTstat, pDCTstatA);
331 printk(BIOS_DEBUG, "mctAutoInitMCT_D: ECCInit_D\n");
332 if (ECCInit_D(pMCTstat, pDCTstatA)) { /* Setup ECC control and ECC check-bits*/
333 printk(BIOS_DEBUG, "mctAutoInitMCT_D: MCTMemClr_D\n");
334 MCTMemClr_D(pMCTstat,pDCTstatA);
337 mct_FinalMCT_D(pMCTstat, (pDCTstatA + 0) ); /* Node 0 */
338 printk(BIOS_DEBUG, "All Done\n");
342 die("mct_d: fatalexit");
345 static u8 ReconfigureDIMMspare_D(struct MCTStatStruc *pMCTstat,
346 struct DCTStatStruc *pDCTstatA)
350 if (mctGet_NVbits(NV_CS_SpareCTL)) {
351 if (MCT_DIMM_SPARE_NO_WARM) {
352 /* Do no warm-reset DIMM spare */
353 if (pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)) {
354 LoadDQSSigTmgRegs_D(pMCTstat, pDCTstatA);
357 mct_ResetDataStruct_D(pMCTstat, pDCTstatA);
358 pMCTstat->GStatus |= 1 << GSB_EnDIMMSpareNW;
362 /* Do warm-reset DIMM spare */
363 if (mctGet_NVbits(NV_DQSTrainCTL))
374 static void DQSTiming_D(struct MCTStatStruc *pMCTstat,
375 struct DCTStatStruc *pDCTstatA)
379 if (pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)) {
383 nv_DQSTrainCTL = mctGet_NVbits(NV_DQSTrainCTL);
384 /* FIXME: BOZO- DQS training every time*/
387 mct_BeforeDQSTrain_D(pMCTstat, pDCTstatA);
388 phyAssistedMemFnceTraining(pMCTstat, pDCTstatA);
390 if (nv_DQSTrainCTL) {
391 mctHookBeforeAnyTraining(pMCTstat, pDCTstatA);
392 /* TODO: should be in mctHookBeforeAnyTraining */
393 _WRMSR(0x26C, 0x04040404, 0x04040404);
394 _WRMSR(0x26D, 0x04040404, 0x04040404);
395 _WRMSR(0x26E, 0x04040404, 0x04040404);
396 _WRMSR(0x26F, 0x04040404, 0x04040404);
397 mct_WriteLevelization_HW(pMCTstat, pDCTstatA);
399 TrainReceiverEn_D(pMCTstat, pDCTstatA, FirstPass);
401 mct_TrainDQSPos_D(pMCTstat, pDCTstatA);
403 /* Second Pass never used for Barcelona! */
404 /* TrainReceiverEn_D(pMCTstat, pDCTstatA, SecondPass); */
406 mctSetEccDQSRcvrEn_D(pMCTstat, pDCTstatA);
408 /* FIXME - currently uses calculated value TrainMaxReadLatency_D(pMCTstat, pDCTstatA); */
409 mctHookAfterAnyTraining();
410 mctSaveDQSSigTmg_D();
412 MCTMemClr_D(pMCTstat, pDCTstatA);
414 mctGetDQSSigTmg_D(); /* get values into data structure */
415 LoadDQSSigTmgRegs_D(pMCTstat, pDCTstatA); /* load values into registers.*/
416 /* mctDoWarmResetMemClr_D(); */
417 MCTMemClr_D(pMCTstat, pDCTstatA);
421 static void LoadDQSSigTmgRegs_D(struct MCTStatStruc *pMCTstat,
422 struct DCTStatStruc *pDCTstatA)
424 u8 Node, Receiver, Channel, Dir, DIMM;
433 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
434 struct DCTStatStruc *pDCTstat;
435 pDCTstat = pDCTstatA + Node;
437 if (pDCTstat->DCTSysLimit) {
438 dev = pDCTstat->dev_dct;
439 for (Channel = 0;Channel < 2; Channel++) {
440 /* there are four receiver pairs,
441 loosely associated with chipselects.*/
442 index_reg = 0x98 + Channel * 0x100;
443 for (Receiver = 0; Receiver < 8; Receiver += 2) {
444 /* Set Receiver Enable Values */
445 mct_SetRcvrEnDly_D(pDCTstat,
447 1, /* FinalValue, From stack */
451 (Receiver >> 1) * 3 + 0x10, /* Addl_Index */
452 2); /* Pass Second Pass ? */
453 /* Restore Write levelization training data */
454 for (ByteLane = 0; ByteLane < 9; ByteLane ++) {
455 txdqs = pDCTstat->CH_D_B_TxDqs[Channel][Receiver >> 1][ByteLane];
456 index = Table_DQSRcvEn_Offset[ByteLane >> 1];
457 index += (Receiver >> 1) * 3 + 0x10 + 0x20; /* Addl_Index */
458 val = Get_NB32_index_wait(dev, 0x98 + 0x100*Channel, index);
459 if (ByteLane & 1) { /* odd byte lane */
460 val &= ~(0xFF << 16);
466 Set_NB32_index_wait(dev, 0x98 + 0x100*Channel, index, val);
470 for (Channel = 0; Channel<2; Channel++) {
471 SetEccDQSRcvrEn_D(pDCTstat, Channel);
474 for (Channel = 0; Channel < 2; Channel++) {
476 index_reg = 0x98 + Channel * 0x100;
479 * when 400, 533, 667, it will support dimm0/1/2/3,
480 * and set conf for dimm0, hw will copy to dimm1/2/3
481 * set for dimm1, hw will copy to dimm3
482 * Rev A/B only support DIMM0/1 when 800Mhz and above
483 * + 0x100 to next dimm
484 * Rev C support DIMM0/1/2/3 when 800Mhz and above
485 * + 0x100 to next dimm
487 for (DIMM = 0; DIMM < 4; DIMM++) {
489 index = 0; /* CHA Write Data Timing Low */
491 if (pDCTstat->Speed >= 4) {
492 index = 0x100 * DIMM;
497 for (Dir = 0; Dir < 2; Dir++) {/* RD/WR */
498 p = pDCTstat->CH_D_DIR_B_DQS[Channel][DIMM][Dir];
499 val = stream_to_int(p); /* CHA Read Data Timing High */
500 Set_NB32_index_wait(dev, index_reg, index+1, val);
501 val = stream_to_int(p+4); /* CHA Write Data Timing High */
502 Set_NB32_index_wait(dev, index_reg, index+2, val);
503 val = *(p+8); /* CHA Write ECC Timing */
504 Set_NB32_index_wait(dev, index_reg, index+3, val);
510 for (Channel = 0; Channel<2; Channel++) {
511 reg = 0x78 + Channel * 0x100;
512 val = Get_NB32(dev, reg);
514 val |= ((u32) pDCTstat->CH_MaxRdLat[Channel] << 22);
515 val &= ~(1<<DqsRcvEnTrain);
516 Set_NB32(dev, reg, val); /* program MaxRdLatency to correspond with current delay*/
522 static void HTMemMapInit_D(struct MCTStatStruc *pMCTstat,
523 struct DCTStatStruc *pDCTstatA)
526 u32 NextBase, BottomIO;
527 u8 _MemHoleRemap, DramHoleBase, DramHoleOffset;
528 u32 HoleSize, DramSelBaseAddr;
534 struct DCTStatStruc *pDCTstat;
536 _MemHoleRemap = mctGet_NVbits(NV_MemHole);
538 if (pMCTstat->HoleBase == 0) {
539 DramHoleBase = mctGet_NVbits(NV_BottomIO);
541 DramHoleBase = pMCTstat->HoleBase >> (24-8);
544 BottomIO = DramHoleBase << (24-8);
547 pDCTstat = pDCTstatA + 0;
548 dev = pDCTstat->dev_map;
550 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
551 pDCTstat = pDCTstatA + Node;
552 devx = pDCTstat->dev_map;
554 pDCTstat = pDCTstatA + Node; /* ??? */
555 if (!pDCTstat->GangedMode) {
556 DramSelBaseAddr = pDCTstat->NodeSysLimit - pDCTstat->DCTSysLimit;
557 /*In unganged mode, we must add DCT0 and DCT1 to DCTSysLimit */
558 val = pDCTstat->NodeSysLimit;
559 if ((val & 0xFF) == 0xFE) {
563 pDCTstat->DCTSysLimit = val;
566 base = pDCTstat->DCTSysBase;
567 limit = pDCTstat->DCTSysLimit;
571 DramSelBaseAddr += NextBase;
572 printk(BIOS_DEBUG, " Node: %02x base: %02x limit: %02x BottomIO: %02x\n", Node, base, limit, BottomIO);
575 if ((base < BottomIO) && (limit >= BottomIO)) {
577 pDCTstat->Status |= 1 << SB_HWHole;
578 pMCTstat->GStatus |= 1 << GSB_HWHole;
579 pDCTstat->DCTSysBase = base;
580 pDCTstat->DCTSysLimit = limit;
581 pDCTstat->DCTHoleBase = BottomIO;
582 pMCTstat->HoleBase = BottomIO;
583 HoleSize = _4GB_RJ8 - BottomIO; /* HoleSize[39:8] */
584 if ((DramSelBaseAddr > 0) && (DramSelBaseAddr < BottomIO))
585 base = DramSelBaseAddr;
586 val = ((base + HoleSize) >> (24-8)) & 0xFF;
587 DramHoleOffset = val;
588 val <<= 8; /* shl 16, rol 24 */
589 val |= DramHoleBase << 24;
590 val |= 1 << DramHoleValid;
591 Set_NB32(devx, 0xF0, val); /* Dram Hole Address Reg */
592 pDCTstat->DCTSysLimit += HoleSize;
593 base = pDCTstat->DCTSysBase;
594 limit = pDCTstat->DCTSysLimit;
595 } else if (base == BottomIO) {
597 pMCTstat->GStatus |= 1<<GSB_SpIntRemapHole;
598 pDCTstat->Status |= 1<<SB_SWNodeHole;
599 pMCTstat->GStatus |= 1<<GSB_SoftHole;
600 pMCTstat->HoleBase = base;
604 pDCTstat->DCTSysBase = base;
605 pDCTstat->DCTSysLimit = limit;
607 /* No Remapping. Normal Contiguous mapping */
608 pDCTstat->DCTSysBase = base;
609 pDCTstat->DCTSysLimit = limit;
612 /*No Remapping. Normal Contiguous mapping*/
613 pDCTstat->DCTSysBase = base;
614 pDCTstat->DCTSysLimit = limit;
616 base |= 3; /* set WE,RE fields*/
617 pMCTstat->SysLimit = limit;
619 Set_NB32(dev, 0x40 + (Node << 3), base); /* [Node] + Dram Base 0 */
621 val = limit & 0xFFFF0000;
623 Set_NB32(dev, 0x44 + (Node << 3), val); /* set DstNode */
625 printk(BIOS_DEBUG, " Node: %02x base: %02x limit: %02x \n", Node, base, limit);
626 limit = pDCTstat->DCTSysLimit;
628 NextBase = (limit & 0xFFFF0000) + 0x10000;
632 /* Copy dram map from Node 0 to Node 1-7 */
633 for (Node = 1; Node < MAX_NODES_SUPPORTED; Node++) {
635 pDCTstat = pDCTstatA + Node;
636 devx = pDCTstat->dev_map;
638 if (pDCTstat->NodePresent) {
639 reg = 0x40; /*Dram Base 0*/
641 val = Get_NB32(dev, reg);
642 Set_NB32(devx, reg, val);
644 } while ( reg < 0x80);
646 break; /* stop at first absent Node */
650 /*Copy dram map to F1x120/124*/
651 mct_HTMemMapExt(pMCTstat, pDCTstatA);
654 static void MCTMemClr_D(struct MCTStatStruc *pMCTstat,
655 struct DCTStatStruc *pDCTstatA)
658 /* Initiates a memory clear operation for all node. The mem clr
659 * is done in paralel. After the memclr is complete, all processors
660 * status are checked to ensure that memclr has completed.
663 struct DCTStatStruc *pDCTstat;
665 if (!mctGet_NVbits(NV_DQSTrainCTL)){
666 /* FIXME: callback to wrapper: mctDoWarmResetMemClr_D */
667 } else { /* NV_DQSTrainCTL == 1 */
668 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
669 pDCTstat = pDCTstatA + Node;
671 if (pDCTstat->NodePresent) {
672 DCTMemClr_Init_D(pMCTstat, pDCTstat);
675 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
676 pDCTstat = pDCTstatA + Node;
678 if (pDCTstat->NodePresent) {
679 DCTMemClr_Sync_D(pMCTstat, pDCTstat);
685 static void DCTMemClr_Init_D(struct MCTStatStruc *pMCTstat,
686 struct DCTStatStruc *pDCTstat)
692 /* Initiates a memory clear operation on one node */
693 if (pDCTstat->DCTSysLimit) {
694 dev = pDCTstat->dev_dct;
698 val = Get_NB32(dev, reg);
699 } while (val & (1 << MemClrBusy));
701 val |= (1 << MemClrInit);
702 Set_NB32(dev, reg, val);
706 static void MCTMemClrSync_D(struct MCTStatStruc *pMCTstat,
707 struct DCTStatStruc *pDCTstatA)
709 /* Ensures that memory clear has completed on all node.*/
711 struct DCTStatStruc *pDCTstat;
713 if (!mctGet_NVbits(NV_DQSTrainCTL)){
714 /* callback to wrapper: mctDoWarmResetMemClr_D */
715 } else { /* NV_DQSTrainCTL == 1 */
716 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
717 pDCTstat = pDCTstatA + Node;
719 if (pDCTstat->NodePresent) {
720 DCTMemClr_Sync_D(pMCTstat, pDCTstat);
726 static void DCTMemClr_Sync_D(struct MCTStatStruc *pMCTstat,
727 struct DCTStatStruc *pDCTstat)
730 u32 dev = pDCTstat->dev_dct;
733 /* Ensure that a memory clear operation has completed on one node */
734 if (pDCTstat->DCTSysLimit){
738 val = Get_NB32(dev, reg);
739 } while (val & (1 << MemClrBusy));
742 val = Get_NB32(dev, reg);
743 } while (!(val & (1 << Dr_MemClrStatus)));
746 val = 0x0FE40FC0; /* BKDG recommended */
747 val |= MCCH_FlushWrOnStpGnt; /* Set for S3 */
748 Set_NB32(dev, 0x11C, val);
751 static u8 NodePresent_D(u8 Node)
754 * Determine if a single Hammer Node exists within the network.
761 dev = PA_HOST(Node); /*test device/vendor id at host bridge */
762 val = Get_NB32(dev, 0);
763 dword = mct_NodePresent_D(); /* FIXME: BOZO -11001022h rev for F */
764 if (val == dword) { /* AMD Hammer Family CPU HT Configuration */
765 if (oemNodePresent_D(Node, &ret))
767 /* Node ID register */
768 val = Get_NB32(dev, 0x60);
771 if (val == dword) /* current nodeID = requested nodeID ? */
778 static void DCTInit_D(struct MCTStatStruc *pMCTstat, struct DCTStatStruc *pDCTstat, u8 dct)
781 * Initialize DRAM on single Athlon 64/Opteron Node.
786 ClearDCT_D(pMCTstat, pDCTstat, dct);
787 stopDCTflag = 1; /*preload flag with 'disable' */
788 /* enable DDR3 support */
789 val = Get_NB32(pDCTstat->dev_dct, 0x94 + dct * 0x100);
790 val |= 1 << Ddr3Mode;
791 Set_NB32(pDCTstat->dev_dct, 0x94 + dct * 0x100, val);
792 if (mct_DIMMPresence(pMCTstat, pDCTstat, dct) < SC_StopError) {
793 printk(BIOS_DEBUG, "\t\tDCTInit_D: mct_DIMMPresence Done\n");
794 if (mct_SPDCalcWidth(pMCTstat, pDCTstat, dct) < SC_StopError) {
795 printk(BIOS_DEBUG, "\t\tDCTInit_D: mct_SPDCalcWidth Done\n");
796 if (AutoCycTiming_D(pMCTstat, pDCTstat, dct) < SC_StopError) {
797 printk(BIOS_DEBUG, "\t\tDCTInit_D: AutoCycTiming_D Done\n");
798 if (AutoConfig_D(pMCTstat, pDCTstat, dct) < SC_StopError) {
799 printk(BIOS_DEBUG, "\t\tDCTInit_D: AutoConfig_D Done\n");
800 if (PlatformSpec_D(pMCTstat, pDCTstat, dct) < SC_StopError) {
801 printk(BIOS_DEBUG, "\t\tDCTInit_D: PlatformSpec_D Done\n");
803 if (!(pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW))) {
804 printk(BIOS_DEBUG, "\t\tDCTInit_D: StartupDCT_D\n");
805 StartupDCT_D(pMCTstat, pDCTstat, dct); /*yeaahhh! */
814 u32 reg_off = dct * 0x100;
815 val = 1<<DisDramInterface;
816 Set_NB32(pDCTstat->dev_dct, reg_off+0x94, val);
817 /*To maximize power savings when DisDramInterface=1b,
818 all of the MemClkDis bits should also be set.*/
820 Set_NB32(pDCTstat->dev_dct, reg_off+0x88, val);
822 /* mct_EnDllShutdownSR */
826 static void SyncDCTsReady_D(struct MCTStatStruc *pMCTstat,
827 struct DCTStatStruc *pDCTstatA)
829 /* Wait (and block further access to dram) for all DCTs to be ready,
830 * by polling all InitDram bits and waiting for possible memory clear
831 * operations to be complete. Read MemClkFreqVal bit to see if
832 * the DIMMs are present in this node.
837 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
838 struct DCTStatStruc *pDCTstat;
839 pDCTstat = pDCTstatA + Node;
840 mct_SyncDCTsReady(pDCTstat);
843 /* re-enable phy compensation engine when dram init is completed on all nodes. */
844 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
845 struct DCTStatStruc *pDCTstat;
846 pDCTstat = pDCTstatA + Node;
847 if (pDCTstat->NodePresent) {
848 if (pDCTstat->DIMMValidDCT[0] > 0 || pDCTstat->DIMMValidDCT[1] > 0) {
849 /* re-enable phy compensation engine when dram init on both DCTs is completed. */
850 val = Get_NB32_index_wait(pDCTstat->dev_dct, 0x98, 0x8);
851 val &= ~(1 << DisAutoComp);
852 Set_NB32_index_wait(pDCTstat->dev_dct, 0x98, 0x8, val);
856 /* wait 750us before any memory access can be made. */
860 static void StartupDCT_D(struct MCTStatStruc *pMCTstat,
861 struct DCTStatStruc *pDCTstat, u8 dct)
863 /* Read MemClkFreqVal bit to see if the DIMMs are present in this node.
864 * If the DIMMs are present then set the DRAM Enable bit for this node.
866 * Setting dram init starts up the DCT state machine, initializes the
867 * dram devices with MRS commands, and kicks off any
868 * HW memory clear process that the chip is capable of. The sooner
869 * that dram init is set for all nodes, the faster the memory system
870 * initialization can complete. Thus, the init loop is unrolled into
871 * two loops so as to start the processeses for non BSP nodes sooner.
872 * This procedure will not wait for the process to finish.
873 * Synchronization is handled elsewhere.
877 u32 reg_off = dct * 0x100;
879 dev = pDCTstat->dev_dct;
880 val = Get_NB32(dev, 0x94 + reg_off);
881 if (val & (1<<MemClkFreqVal)) {
882 mctHookBeforeDramInit(); /* generalized Hook */
883 if (!(pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)))
884 mct_DramInit(pMCTstat, pDCTstat, dct);
885 AfterDramInit_D(pDCTstat, dct);
886 mctHookAfterDramInit(); /* generalized Hook*/
890 static void ClearDCT_D(struct MCTStatStruc *pMCTstat,
891 struct DCTStatStruc *pDCTstat, u8 dct)
894 u32 dev = pDCTstat->dev_dct;
895 u32 reg = 0x40 + 0x100 * dct;
898 if (pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)) {
899 reg_end = 0x78 + 0x100 * dct;
901 reg_end = 0xA4 + 0x100 * dct;
904 while(reg < reg_end) {
905 Set_NB32(dev, reg, val);
910 dev = pDCTstat->dev_map;
912 Set_NB32(dev, reg, val);
915 static void SPD2ndTiming(struct MCTStatStruc *pMCTstat,
916 struct DCTStatStruc *pDCTstat, u8 dct)
920 u16 Trp, Trrd, Trcd, Tras, Trc;
923 u32 DramTimingLo, DramTimingHi;
935 /* Gather all DIMM mini-max values for cycle timing data */
944 for (i=0; i < 4; i++)
948 for ( i = 0; i< MAX_DIMMS_SUPPORTED; i++) {
950 if (pDCTstat->DIMMValid & (1 << i)) {
951 smbaddr = Get_DIMMAddress_D(pDCTstat, (dct + i));
953 val = mctRead_SPD(smbaddr, SPD_MTBDivisor); /* MTB=Dividend/Divisor */
954 MTB16x = ((mctRead_SPD(smbaddr, SPD_MTBDividend) & 0xFF)<<4);
955 MTB16x /= val; /* transfer to MTB*16 */
957 byte = mctRead_SPD(smbaddr, SPD_tRPmin);
962 byte = mctRead_SPD(smbaddr, SPD_tRRDmin);
967 byte = mctRead_SPD(smbaddr, SPD_tRCDmin);
972 byte = mctRead_SPD(smbaddr, SPD_tRTPmin);
977 byte = mctRead_SPD(smbaddr, SPD_tWRmin);
982 byte = mctRead_SPD(smbaddr, SPD_tWTRmin);
987 val = mctRead_SPD(smbaddr, SPD_Upper_tRAS_tRC) & 0xFF;
990 val |= mctRead_SPD(smbaddr, SPD_tRCmin) & 0xFF;
995 byte = mctRead_SPD(smbaddr, SPD_Density) & 0xF;
996 if (Trfc[LDIMM] < byte)
999 val = mctRead_SPD(smbaddr, SPD_Upper_tRAS_tRC) & 0xF;
1001 val |= (mctRead_SPD(smbaddr, SPD_tRASmin) & 0xFF);
1006 val = mctRead_SPD(smbaddr, SPD_Upper_tFAW) & 0xF;
1008 val |= mctRead_SPD(smbaddr, SPD_tFAWmin) & 0xFF;
1012 } /* Dimm Present */
1015 /* Convert DRAM CycleTiming values and store into DCT structure */
1016 byte = pDCTstat->DIMMAutoSpeed;
1027 1. All secondary time values given in SPDs are in binary with units of ns.
1028 2. Some time values are scaled by 16, in order to have least count of 0.25 ns
1029 (more accuracy). JEDEC SPD spec. shows which ones are x1 and x4.
1030 3. Internally to this SW, cycle time, tCK16x, is scaled by 16 to match time values
1034 pDCTstat->DIMMTras = (u16)Tras;
1035 val = Tras / tCK16x;
1036 if (Tras % tCK16x) { /* round up number of busclocks */
1039 if (val < Min_TrasT)
1041 else if (val > Max_TrasT)
1043 pDCTstat->Tras = val;
1046 pDCTstat->DIMMTrp = Trp;
1048 if (Trp % tCK16x) { /* round up number of busclocks */
1053 else if (val > Max_TrpT)
1055 pDCTstat->Trp = val;
1058 pDCTstat->DIMMTrrd = Trrd;
1059 val = Trrd / tCK16x;
1060 if (Trrd % tCK16x) { /* round up number of busclocks */
1063 if (val < Min_TrrdT)
1065 else if (val > Max_TrrdT)
1067 pDCTstat->Trrd = val;
1070 pDCTstat->DIMMTrcd = Trcd;
1071 val = Trcd / tCK16x;
1072 if (Trcd % tCK16x) { /* round up number of busclocks */
1075 if (val < Min_TrcdT)
1077 else if (val > Max_TrcdT)
1079 pDCTstat->Trcd = val;
1082 pDCTstat->DIMMTrc = Trc;
1084 if (Trc % tCK16x) { /* round up number of busclocks */
1089 else if (val > Max_TrcT)
1091 pDCTstat->Trc = val;
1094 pDCTstat->DIMMTrtp = Trtp;
1095 val = Trtp / tCK16x;
1096 if (Trtp % tCK16x) {
1099 if (val < Min_TrtpT)
1101 else if (val > Max_TrtpT)
1103 pDCTstat->Trtp = val;
1106 pDCTstat->DIMMTwr = Twr;
1108 if (Twr % tCK16x) { /* round up number of busclocks */
1113 else if (val > Max_TwrT)
1115 pDCTstat->Twr = val;
1118 pDCTstat->DIMMTwtr = Twtr;
1119 val = Twtr / tCK16x;
1120 if (Twtr % tCK16x) { /* round up number of busclocks */
1123 if (val < Min_TwtrT)
1125 else if (val > Max_TwtrT)
1127 pDCTstat->Twtr = val;
1131 pDCTstat->Trfc[i] = Trfc[i];
1134 pDCTstat->DIMMTfaw = Tfaw;
1135 val = Tfaw / tCK16x;
1136 if (Tfaw % tCK16x) { /* round up number of busclocks */
1139 if (val < Min_TfawT)
1141 else if (val > Max_TfawT)
1143 pDCTstat->Tfaw = val;
1145 mctAdjustAutoCycTmg_D();
1147 /* Program DRAM Timing values */
1148 DramTimingLo = 0; /* Dram Timing Low init */
1149 val = pDCTstat->CASL - 2; /* pDCTstat.CASL to reg. definition */
1150 DramTimingLo |= val;
1152 val = pDCTstat->Trcd - Bias_TrcdT;
1153 DramTimingLo |= val<<4;
1155 val = pDCTstat->Trp - Bias_TrpT;
1156 val = mct_AdjustSPDTimings(pMCTstat, pDCTstat, val);
1157 DramTimingLo |= val<<7;
1159 val = pDCTstat->Trtp - Bias_TrtpT;
1160 DramTimingLo |= val<<10;
1162 val = pDCTstat->Tras - Bias_TrasT;
1163 DramTimingLo |= val<<12;
1165 val = pDCTstat->Trc - Bias_TrcT;
1166 DramTimingLo |= val<<16;
1168 val = pDCTstat->Trrd - Bias_TrrdT;
1169 DramTimingLo |= val<<22;
1171 DramTimingHi = 0; /* Dram Timing High init */
1172 val = pDCTstat->Twtr - Bias_TwtrT;
1173 DramTimingHi |= val<<8;
1176 DramTimingHi |= val<<16;
1183 DramTimingHi |= val << 20;
1185 dev = pDCTstat->dev_dct;
1186 reg_off = 0x100 * dct;
1188 val = pDCTstat->Twr;
1193 val = mct_AdjustSPDTimings(pMCTstat, pDCTstat, val);
1196 dword = Get_NB32(dev, 0x84 + reg_off);
1199 Set_NB32(dev, 0x84 + reg_off, dword);
1202 val = pDCTstat->Tfaw;
1203 val = mct_AdjustSPDTimings(pMCTstat, pDCTstat, val);
1207 dword = Get_NB32(dev, 0x94 + reg_off);
1208 dword &= ~0xf0000000;
1210 Set_NB32(dev, 0x94 + reg_off, dword);
1212 /* dev = pDCTstat->dev_dct; */
1213 /* reg_off = 0x100 * dct; */
1215 if (pDCTstat->Speed > 4) {
1216 val = Get_NB32(dev, 0x88 + reg_off);
1218 DramTimingLo |= val;
1220 Set_NB32(dev, 0x88 + reg_off, DramTimingLo); /*DCT Timing Low*/
1222 if (pDCTstat->Speed > 4) {
1223 DramTimingLo |= 1 << DisAutoRefresh;
1225 DramTimingHi |= 0x000018FF;
1226 Set_NB32(dev, 0x8c + reg_off, DramTimingHi); /*DCT Timing Hi*/
1228 /* dump_pci_device(PCI_DEV(0, 0x18+pDCTstat->Node_ID, 2)); */
1231 static u8 AutoCycTiming_D(struct MCTStatStruc *pMCTstat,
1232 struct DCTStatStruc *pDCTstat, u8 dct)
1234 /* Initialize DCT Timing registers as per DIMM SPD.
1235 * For primary timing (T, CL) use best case T value.
1236 * For secondary timing params., use most aggressive settings
1239 * There are three components to determining "maximum frequency":
1240 * SPD component, Bus load component, and "Preset" max frequency
1243 * The SPD component is a function of the min cycle time specified
1244 * by each DIMM, and the interaction of cycle times from all DIMMs
1245 * in conjunction with CAS latency. The SPD component only applies
1246 * when user timing mode is 'Auto'.
1248 * The Bus load component is a limiting factor determined by electrical
1249 * characteristics on the bus as a result of varying number of device
1250 * loads. The Bus load component is specific to each platform but may
1251 * also be a function of other factors. The bus load component only
1252 * applies when user timing mode is 'Auto'.
1254 * The Preset component is subdivided into three items and is
1255 * the minimum of the set: Silicon revision, user limit
1256 * setting when user timing mode is 'Auto' and memclock mode
1257 * is 'Limit', OEM build specification of the maximum
1258 * frequency. The Preset component is only applies when user
1259 * timing mode is 'Auto'.
1262 /* Get primary timing (CAS Latency and Cycle Time) */
1263 if (pDCTstat->Speed == 0) {
1264 mctGet_MaxLoadFreq(pDCTstat);
1266 /* and Factor in presets (setup options, Si cap, etc.) */
1267 GetPresetmaxF_D(pMCTstat, pDCTstat);
1269 /* Go get best T and CL as specified by DIMM mfgs. and OEM */
1270 SPDGetTCL_D(pMCTstat, pDCTstat, dct);
1271 /* skip callback mctForce800to1067_D */
1272 pDCTstat->Speed = pDCTstat->DIMMAutoSpeed;
1273 pDCTstat->CASL = pDCTstat->DIMMCASL;
1276 mct_AfterGetCLT(pMCTstat, pDCTstat, dct);
1278 SPD2ndTiming(pMCTstat, pDCTstat, dct);
1280 printk(BIOS_DEBUG, "AutoCycTiming: Status %x\n", pDCTstat->Status);
1281 printk(BIOS_DEBUG, "AutoCycTiming: ErrStatus %x\n", pDCTstat->ErrStatus);
1282 printk(BIOS_DEBUG, "AutoCycTiming: ErrCode %x\n", pDCTstat->ErrCode);
1283 printk(BIOS_DEBUG, "AutoCycTiming: Done\n\n");
1285 mctHookAfterAutoCycTmg();
1287 return pDCTstat->ErrCode;
1290 static void GetPresetmaxF_D(struct MCTStatStruc *pMCTstat,
1291 struct DCTStatStruc *pDCTstat)
1293 /* Get max frequency from OEM platform definition, from any user
1294 * override (limiting) of max frequency, and from any Si Revision
1295 * Specific information. Return the least of these three in
1296 * DCTStatStruc.PresetmaxFreq.
1301 /* Get CPU Si Revision defined limit (NPT) */
1302 proposedFreq = 533; /* Rev F0 programmable max memclock is */
1304 /*Get User defined limit if "limit" mode */
1305 if ( mctGet_NVbits(NV_MCTUSRTMGMODE) == 1) {
1306 word = Get_Fk_D(mctGet_NVbits(NV_MemCkVal) + 1);
1307 if (word < proposedFreq)
1308 proposedFreq = word;
1310 /* Get Platform defined limit */
1311 word = mctGet_NVbits(NV_MAX_MEMCLK);
1312 if (word < proposedFreq)
1313 proposedFreq = word;
1315 word = pDCTstat->PresetmaxFreq;
1316 if (word > proposedFreq)
1317 word = proposedFreq;
1319 pDCTstat->PresetmaxFreq = word;
1321 /* Check F3xE8[DdrMaxRate] for maximum DRAM data rate support */
1324 static void SPDGetTCL_D(struct MCTStatStruc *pMCTstat,
1325 struct DCTStatStruc *pDCTstat, u8 dct)
1327 /* Find the best T and CL primary timing parameter pair, per Mfg.,
1328 * for the given set of DIMMs, and store into DCTStatStruc
1329 * (.DIMMAutoSpeed and .DIMMCASL). See "Global relationship between
1330 * index values and item values" for definition of CAS latency
1331 * index (j) and Frequency index (k).
1333 u8 i, CASLatLow, CASLatHigh;
1338 u8 CLactual, CLdesired, CLT_Fail;
1340 u8 smbaddr, byte, bytex;
1348 for (i = 0; i < MAX_DIMMS_SUPPORTED; i++) {
1349 if (pDCTstat->DIMMValid & (1 << i)) {
1350 smbaddr = Get_DIMMAddress_D(pDCTstat, (dct + i));
1351 /* Step 1: Determine the common set of supported CAS Latency
1352 * values for all modules on the memory channel using the CAS
1353 * Latencies Supported in SPD bytes 14 and 15.
1355 byte = mctRead_SPD(smbaddr, SPD_CASLow);
1357 byte = mctRead_SPD(smbaddr, SPD_CASHigh);
1359 /* Step 2: Determine tAAmin(all) which is the largest tAAmin
1360 value for all modules on the memory channel (SPD byte 16). */
1361 byte = mctRead_SPD(smbaddr, SPD_MTBDivisor);
1363 MTB16x = ((mctRead_SPD(smbaddr, SPD_MTBDividend) & 0xFF)<<4);
1364 MTB16x /= byte; /* transfer to MTB*16 */
1366 byte = mctRead_SPD(smbaddr, SPD_tAAmin);
1367 if (tAAmin16x < byte * MTB16x)
1368 tAAmin16x = byte * MTB16x;
1369 /* Step 3: Determine tCKmin(all) which is the largest tCKmin
1370 value for all modules on the memory channel (SPD byte 12). */
1371 byte = mctRead_SPD(smbaddr, SPD_tCKmin);
1373 if (tCKmin16x < byte * MTB16x)
1374 tCKmin16x = byte * MTB16x;
1377 /* calculate tCKproposed16x */
1378 tCKproposed16x = 16000 / pDCTstat->PresetmaxFreq;
1379 if (tCKmin16x > tCKproposed16x)
1380 tCKproposed16x = tCKmin16x;
1382 /* mctHookTwo1333DimmOverride(); */
1383 /* For UDIMM, if there are two DDR3-1333 on the same channel,
1384 downgrade DDR speed to 1066. */
1386 /* TODO: get user manual tCK16x(Freq.) and overwrite current tCKproposed16x if manual. */
1387 if (tCKproposed16x == 20)
1388 pDCTstat->TargetFreq = 7;
1389 else if (tCKproposed16x <= 24) {
1390 pDCTstat->TargetFreq = 6;
1391 tCKproposed16x = 24;
1393 else if (tCKproposed16x <= 30) {
1394 pDCTstat->TargetFreq = 5;
1395 tCKproposed16x = 30;
1398 pDCTstat->TargetFreq = 4;
1399 tCKproposed16x = 40;
1401 /* Running through this loop twice:
1402 - First time find tCL at target frequency
1403 - Second tim find tCL at 400MHz */
1407 /* Step 4: For a proposed tCK value (tCKproposed) between tCKmin(all) and tCKmax,
1408 determine the desired CAS Latency. If tCKproposed is not a standard JEDEC
1409 value (2.5, 1.875, 1.5, or 1.25 ns) then tCKproposed must be adjusted to the
1410 next lower standard tCK value for calculating CLdesired.
1411 CLdesired = ceiling ( tAAmin(all) / tCKproposed )
1412 where tAAmin is defined in Byte 16. The ceiling function requires that the
1413 quotient be rounded up always. */
1414 CLdesired = tAAmin16x / tCKproposed16x;
1415 if (tAAmin16x % tCKproposed16x)
1417 /* Step 5: Chose an actual CAS Latency (CLactual) that is greather than or equal
1418 to CLdesired and is supported by all modules on the memory channel as
1419 determined in step 1. If no such value exists, choose a higher tCKproposed
1420 value and repeat steps 4 and 5 until a solution is found. */
1421 for (i = 0, CLactual = 4; i < 15; i++, CLactual++) {
1422 if ((CASLatHigh << 8 | CASLatLow) & (1 << i)) {
1423 if (CLdesired <= CLactual)
1429 /* Step 6: Once the calculation of CLactual is completed, the BIOS must also
1430 verify that this CAS Latency value does not exceed tAAmax, which is 20 ns
1431 for all DDR3 speed grades, by multiplying CLactual times tCKproposed. If
1432 not, choose a lower CL value and repeat steps 5 and 6 until a solution is found. */
1433 if (CLactual * tCKproposed16x > 320)
1437 bytex = CLactual - 2;
1438 if (tCKproposed16x == 20)
1440 else if (tCKproposed16x == 24)
1442 else if (tCKproposed16x == 30)
1447 /* mctHookManualCLOverride */
1451 if (tCKproposed16x != 40) {
1452 if (pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)) {
1453 pDCTstat->DIMMAutoSpeed = byte;
1454 pDCTstat->DIMMCASL = bytex;
1457 pDCTstat->TargetCASL = bytex;
1458 tCKproposed16x = 40;
1461 pDCTstat->DIMMAutoSpeed = byte;
1462 pDCTstat->DIMMCASL = bytex;
1467 printk(BIOS_DEBUG, "SPDGetTCL_D: DIMMCASL %x\n", pDCTstat->DIMMCASL);
1468 printk(BIOS_DEBUG, "SPDGetTCL_D: DIMMAutoSpeed %x\n", pDCTstat->DIMMAutoSpeed);
1470 printk(BIOS_DEBUG, "SPDGetTCL_D: Status %x\n", pDCTstat->Status);
1471 printk(BIOS_DEBUG, "SPDGetTCL_D: ErrStatus %x\n", pDCTstat->ErrStatus);
1472 printk(BIOS_DEBUG, "SPDGetTCL_D: ErrCode %x\n", pDCTstat->ErrCode);
1473 printk(BIOS_DEBUG, "SPDGetTCL_D: Done\n\n");
1476 static u8 PlatformSpec_D(struct MCTStatStruc *pMCTstat,
1477 struct DCTStatStruc *pDCTstat, u8 dct)
1483 mctGet_PS_Cfg_D(pMCTstat, pDCTstat, dct);
1485 if (pDCTstat->GangedMode == 1) {
1486 mctGet_PS_Cfg_D(pMCTstat, pDCTstat, 1);
1489 if ( pDCTstat->_2Tmode == 2) {
1490 dev = pDCTstat->dev_dct;
1491 reg = 0x94 + 0x100 * dct; /* Dram Configuration Hi */
1492 val = Get_NB32(dev, reg);
1493 val |= 1 << 20; /* 2T CMD mode */
1494 Set_NB32(dev, reg, val);
1497 mct_PlatformSpec(pMCTstat, pDCTstat, dct);
1498 if (pDCTstat->DIMMAutoSpeed == 4)
1499 InitPhyCompensation(pMCTstat, pDCTstat, dct);
1500 mctHookAfterPSCfg();
1502 return pDCTstat->ErrCode;
1505 static u8 AutoConfig_D(struct MCTStatStruc *pMCTstat,
1506 struct DCTStatStruc *pDCTstat, u8 dct)
1508 u32 DramControl, DramTimingLo, Status;
1509 u32 DramConfigLo, DramConfigHi, DramConfigMisc, DramConfigMisc2;
1520 DramConfigMisc2 = 0;
1522 /* set bank addessing and Masks, plus CS pops */
1523 SPDSetBanks_D(pMCTstat, pDCTstat, dct);
1524 if (pDCTstat->ErrCode == SC_StopError)
1525 goto AutoConfig_exit;
1527 /* map chip-selects into local address space */
1528 StitchMemory_D(pMCTstat, pDCTstat, dct);
1529 InterleaveBanks_D(pMCTstat, pDCTstat, dct);
1531 /* temp image of status (for convenience). RO usage! */
1532 Status = pDCTstat->Status;
1534 dev = pDCTstat->dev_dct;
1535 reg_off = 0x100 * dct;
1538 /* Build Dram Control Register Value */
1539 DramConfigMisc2 = Get_NB32 (dev, 0xA8 + reg_off); /* Dram Control*/
1540 DramControl = Get_NB32 (dev, 0x78 + reg_off); /* Dram Control*/
1542 /* FIXME: Skip mct_checkForDxSupport */
1543 /* REV_CALL mct_DoRdPtrInit if not Dx */
1544 if (pDCTstat->LogicalCPUID & AMD_DR_Bx)
1548 DramControl &= ~0xFF;
1549 DramControl |= val; /* RdPrtInit = 6 for Cx CPU */
1551 if (mctGet_NVbits(NV_CLKHZAltVidC3))
1552 DramControl |= 1<<16; /* check */
1554 DramControl |= 0x00002A00;
1556 /* FIXME: Skip for Ax versions */
1557 /* callback not required - if (!mctParityControl_D()) */
1558 if (Status & (1 << SB_128bitmode))
1559 DramConfigLo |= 1 << Width128; /* 128-bit mode (normal) */
1564 if (pDCTstat->Dimmx4Present & (1 << word))
1565 DramConfigLo |= 1 << dword; /* X4Dimm[3:0] */
1571 if (!(Status & (1 << SB_Registered)))
1572 DramConfigLo |= 1 << UnBuffDimm; /* Unbufferd DIMMs */
1574 if (mctGet_NVbits(NV_ECC_CAP))
1575 if (Status & (1 << SB_ECCDIMMs))
1576 if ( mctGet_NVbits(NV_ECC))
1577 DramConfigLo |= 1 << DimmEcEn;
1579 DramConfigLo = mct_DisDllShutdownSR(pMCTstat, pDCTstat, DramConfigLo, dct);
1581 /* Build Dram Config Hi Register Value */
1582 dword = pDCTstat->Speed;
1583 DramConfigHi |= dword - 1; /* get MemClk encoding */
1584 DramConfigHi |= 1 << MemClkFreqVal;
1586 if (Status & (1 << SB_Registered))
1587 if ((pDCTstat->Dimmx4Present != 0) && (pDCTstat->Dimmx8Present != 0))
1588 /* set only if x8 Registered DIMMs in System*/
1589 DramConfigHi |= 1 << RDqsEn;
1591 if (mctGet_NVbits(NV_CKE_CTL))
1592 /*Chip Select control of CKE*/
1593 DramConfigHi |= 1 << 16;
1595 /* Control Bank Swizzle */
1596 if (0) /* call back not needed mctBankSwizzleControl_D()) */
1597 DramConfigHi &= ~(1 << BankSwizzleMode);
1599 DramConfigHi |= 1 << BankSwizzleMode; /* recommended setting (default) */
1601 /* Check for Quadrank DIMM presence */
1602 if ( pDCTstat->DimmQRPresent != 0) {
1603 byte = mctGet_NVbits(NV_4RANKType);
1605 DramConfigHi |= 1 << 17; /* S4 (4-Rank SO-DIMMs) */
1607 DramConfigHi |= 1 << 18; /* R4 (4-Rank Registered DIMMs) */
1610 if (0) /* call back not needed mctOverrideDcqBypMax_D ) */
1611 val = mctGet_NVbits(NV_BYPMAX);
1613 val = 0x0f; /* recommended setting (default) */
1614 DramConfigHi |= val << 24;
1616 if (pDCTstat->LogicalCPUID & (AMD_DR_Cx | AMD_DR_Bx))
1617 DramConfigHi |= 1 << DcqArbBypassEn;
1619 /* Build MemClkDis Value from Dram Timing Lo and
1620 Dram Config Misc Registers
1621 1. We will assume that MemClkDis field has been preset prior to this
1623 2. We will only set MemClkDis bits if a DIMM is NOT present AND if:
1624 NV_AllMemClks <>0 AND SB_DiagClks ==0 */
1626 /* Dram Timing Low (owns Clock Enable bits) */
1627 DramTimingLo = Get_NB32(dev, 0x88 + reg_off);
1628 if (mctGet_NVbits(NV_AllMemClks) == 0) {
1629 /* Special Jedec SPD diagnostic bit - "enable all clocks" */
1630 if (!(pDCTstat->Status & (1<<SB_DiagClks))) {
1633 p = Tab_ManualCLKDis;
1636 byte = mctGet_NVbits(NV_PACK_TYPE);
1639 else if (byte == PT_M2 || byte == PT_AS)
1646 while(dword < MAX_CS_SUPPORTED) {
1647 if (pDCTstat->CSPresent & (1<<dword)){
1648 /* re-enable clocks for the enabled CS */
1654 DramTimingLo |= byte << 24;
1658 printk(BIOS_DEBUG, "AutoConfig_D: DramControl: %x\n", DramControl);
1659 printk(BIOS_DEBUG, "AutoConfig_D: DramTimingLo: %x\n", DramTimingLo);
1660 printk(BIOS_DEBUG, "AutoConfig_D: DramConfigMisc: %x\n", DramConfigMisc);
1661 printk(BIOS_DEBUG, "AutoConfig_D: DramConfigMisc2: %x\n", DramConfigMisc2);
1662 printk(BIOS_DEBUG, "AutoConfig_D: DramConfigLo: %x\n", DramConfigLo);
1663 printk(BIOS_DEBUG, "AutoConfig_D: DramConfigHi: %x\n", DramConfigHi);
1665 /* Write Values to the registers */
1666 Set_NB32(dev, 0x78 + reg_off, DramControl);
1667 Set_NB32(dev, 0x88 + reg_off, DramTimingLo);
1668 Set_NB32(dev, 0xA0 + reg_off, DramConfigMisc);
1669 DramConfigMisc2 = mct_SetDramConfigMisc2(pDCTstat, dct, DramConfigMisc2);
1670 Set_NB32(dev, 0xA8 + reg_off, DramConfigMisc2);
1671 Set_NB32(dev, 0x90 + reg_off, DramConfigLo);
1672 ProgDramMRSReg_D(pMCTstat, pDCTstat, dct);
1673 dword = Get_NB32(dev, 0x94 + reg_off);
1674 DramConfigHi |= dword;
1675 mct_SetDramConfigHi_D(pDCTstat, dct, DramConfigHi);
1676 mct_EarlyArbEn_D(pMCTstat, pDCTstat);
1677 mctHookAfterAutoCfg();
1679 /* dump_pci_device(PCI_DEV(0, 0x18+pDCTstat->Node_ID, 2)); */
1681 printk(BIOS_DEBUG, "AutoConfig: Status %x\n", pDCTstat->Status);
1682 printk(BIOS_DEBUG, "AutoConfig: ErrStatus %x\n", pDCTstat->ErrStatus);
1683 printk(BIOS_DEBUG, "AutoConfig: ErrCode %x\n", pDCTstat->ErrCode);
1684 printk(BIOS_DEBUG, "AutoConfig: Done\n\n");
1686 return pDCTstat->ErrCode;
1689 static void SPDSetBanks_D(struct MCTStatStruc *pMCTstat,
1690 struct DCTStatStruc *pDCTstat, u8 dct)
1692 /* Set bank addressing, program Mask values and build a chip-select
1693 * population map. This routine programs PCI 0:24N:2x80 config register
1694 * and PCI 0:24N:2x60,64,68,6C config registers (CS Mask 0-3).
1696 u8 ChipSel, Rows, Cols, Ranks, Banks;
1697 u32 BankAddrReg, csMask;
1708 dev = pDCTstat->dev_dct;
1709 reg_off = 0x100 * dct;
1712 for (ChipSel = 0; ChipSel < MAX_CS_SUPPORTED; ChipSel+=2) {
1714 if ((pDCTstat->Status & (1 << SB_64MuxedMode)) && ChipSel >=4)
1717 if (pDCTstat->DIMMValid & (1<<byte)) {
1718 smbaddr = Get_DIMMAddress_D(pDCTstat, (ChipSel + dct));
1720 byte = mctRead_SPD(smbaddr, SPD_Addressing);
1721 Rows = (byte >> 3) & 0x7; /* Rows:0b=12-bit,... */
1722 Cols = byte & 0x7; /* Cols:0b=9-bit,... */
1724 byte = mctRead_SPD(smbaddr, SPD_Density);
1725 Banks = (byte >> 4) & 7; /* Banks:0b=3-bit,... */
1727 byte = mctRead_SPD(smbaddr, SPD_Organization);
1728 Ranks = ((byte >> 3) & 7) + 1;
1730 /* Configure Bank encoding
1731 * Use a 6-bit key into a lookup table.
1732 * Key (index) = RRRBCC, where CC is the number of Columns minus 9,
1733 * RRR is the number of Rows minus 12, and B is the number of banks
1740 byte |= Rows << 3; /* RRRBCC internal encode */
1742 for (dword=0; dword < 13; dword++) {
1743 if (byte == Tab_BankAddr[dword])
1750 /* bit no. of CS field in address mapping reg.*/
1751 dword <<= (ChipSel<<1);
1752 BankAddrReg |= dword;
1754 /* Mask value=(2pow(rows+cols+banks+3)-1)>>8,
1755 or 2pow(rows+cols+banks-5)-1*/
1758 byte = Rows + Cols; /* cl=rows+cols*/
1759 byte += 21; /* row:12+col:9 */
1760 byte -= 2; /* 3 banks - 5 */
1762 if (pDCTstat->Status & (1 << SB_128bitmode))
1763 byte++; /* double mask size if in 128-bit mode*/
1765 csMask |= 1 << byte;
1768 /*set ChipSelect population indicator even bits*/
1769 pDCTstat->CSPresent |= (1<<ChipSel);
1771 /*set ChipSelect population indicator odd bits*/
1772 pDCTstat->CSPresent |= 1 << (ChipSel + 1);
1774 reg = 0x60+(ChipSel<<1) + reg_off; /*Dram CS Mask Register */
1776 val &= 0x1FF83FE0; /* Mask out reserved bits.*/
1777 Set_NB32(dev, reg, val);
1779 if (pDCTstat->DIMMSPDCSE & (1<<ChipSel))
1780 pDCTstat->CSTestFail |= (1<<ChipSel);
1782 } /* while ChipSel*/
1784 SetCSTriState(pMCTstat, pDCTstat, dct);
1785 SetCKETriState(pMCTstat, pDCTstat, dct);
1786 SetODTTriState(pMCTstat, pDCTstat, dct);
1788 if (pDCTstat->Status & (1 << SB_128bitmode)) {
1789 SetCSTriState(pMCTstat, pDCTstat, 1); /* force dct1) */
1790 SetCKETriState(pMCTstat, pDCTstat, 1); /* force dct1) */
1791 SetODTTriState(pMCTstat, pDCTstat, 1); /* force dct1) */
1794 word = pDCTstat->CSPresent;
1795 mctGetCS_ExcludeMap(); /* mask out specified chip-selects */
1796 word ^= pDCTstat->CSPresent;
1797 pDCTstat->CSTestFail |= word; /* enable ODT to disabled DIMMs */
1798 if (!pDCTstat->CSPresent)
1799 pDCTstat->ErrCode = SC_StopError;
1801 reg = 0x80 + reg_off; /* Bank Addressing Register */
1802 Set_NB32(dev, reg, BankAddrReg);
1804 pDCTstat->CSPresent_DCT[dct] = pDCTstat->CSPresent;
1805 /* dump_pci_device(PCI_DEV(0, 0x18+pDCTstat->Node_ID, 2)); */
1807 printk(BIOS_DEBUG, "SPDSetBanks: CSPresent %x\n", pDCTstat->CSPresent_DCT[dct]);
1808 printk(BIOS_DEBUG, "SPDSetBanks: Status %x\n", pDCTstat->Status);
1809 printk(BIOS_DEBUG, "SPDSetBanks: ErrStatus %x\n", pDCTstat->ErrStatus);
1810 printk(BIOS_DEBUG, "SPDSetBanks: ErrCode %x\n", pDCTstat->ErrCode);
1811 printk(BIOS_DEBUG, "SPDSetBanks: Done\n\n");
1814 static void SPDCalcWidth_D(struct MCTStatStruc *pMCTstat,
1815 struct DCTStatStruc *pDCTstat)
1817 /* Per SPDs, check the symmetry of DIMM pairs (DIMM on Channel A
1818 * matching with DIMM on Channel B), the overall DIMM population,
1819 * and determine the width mode: 64-bit, 64-bit muxed, 128-bit.
1822 u8 smbaddr, smbaddr1;
1825 /* Check Symmetry of Channel A and Channel B DIMMs
1826 (must be matched for 128-bit mode).*/
1827 for (i=0; i < MAX_DIMMS_SUPPORTED; i += 2) {
1828 if ((pDCTstat->DIMMValid & (1 << i)) && (pDCTstat->DIMMValid & (1<<(i+1)))) {
1829 smbaddr = Get_DIMMAddress_D(pDCTstat, i);
1830 smbaddr1 = Get_DIMMAddress_D(pDCTstat, i+1);
1832 byte = mctRead_SPD(smbaddr, SPD_Addressing) & 0x7;
1833 byte1 = mctRead_SPD(smbaddr1, SPD_Addressing) & 0x7;
1834 if (byte != byte1) {
1835 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
1839 byte = mctRead_SPD(smbaddr, SPD_Density) & 0x0f;
1840 byte1 = mctRead_SPD(smbaddr1, SPD_Density) & 0x0f;
1841 if (byte != byte1) {
1842 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
1846 byte = mctRead_SPD(smbaddr, SPD_Organization) & 0x7;
1847 byte1 = mctRead_SPD(smbaddr1, SPD_Organization) & 0x7;
1848 if (byte != byte1) {
1849 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
1853 byte = (mctRead_SPD(smbaddr, SPD_Organization) >> 3) & 0x7;
1854 byte1 = (mctRead_SPD(smbaddr1, SPD_Organization) >> 3) & 0x7;
1855 if (byte != byte1) {
1856 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
1860 byte = mctRead_SPD(smbaddr, SPD_DMBANKS) & 7; /* #ranks-1 */
1861 byte1 = mctRead_SPD(smbaddr1, SPD_DMBANKS) & 7; /* #ranks-1 */
1862 if (byte != byte1) {
1863 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
1872 static void StitchMemory_D(struct MCTStatStruc *pMCTstat,
1873 struct DCTStatStruc *pDCTstat, u8 dct)
1875 /* Requires that Mask values for each bank be programmed first and that
1876 * the chip-select population indicator is correctly set.
1879 u32 nxtcsBase, curcsBase;
1881 u32 Sizeq, BiggestBank;
1890 dev = pDCTstat->dev_dct;
1891 reg_off = 0x100 * dct;
1895 /* CS Sparing 1=enabled, 0=disabled */
1896 if (mctGet_NVbits(NV_CS_SpareCTL) & 1) {
1897 if (MCT_DIMM_SPARE_NO_WARM) {
1898 /* Do no warm-reset DIMM spare */
1899 if (pMCTstat->GStatus & 1 << GSB_EnDIMMSpareNW) {
1900 word = pDCTstat->CSPresent;
1904 /* Make sure at least two chip-selects are available */
1907 pDCTstat->ErrStatus |= 1 << SB_SpareDis;
1910 if (!mctGet_NVbits(NV_DQSTrainCTL)) { /*DQS Training 1=enabled, 0=disabled */
1911 word = pDCTstat->CSPresent;
1913 word &= ~(1 << val);
1915 /* Make sure at least two chip-selects are available */
1918 pDCTstat->ErrStatus |= 1 << SB_SpareDis;
1923 nxtcsBase = 0; /* Next available cs base ADDR[39:8] */
1924 for (p=0; p < MAX_DIMMS_SUPPORTED; p++) {
1926 for (q = 0; q < MAX_CS_SUPPORTED; q++) { /* from DIMMS to CS */
1927 if (pDCTstat->CSPresent & (1 << q)) { /* bank present? */
1928 reg = 0x40 + (q << 2) + reg_off; /* Base[q] reg.*/
1929 val = Get_NB32(dev, reg);
1930 if (!(val & 3)) { /* (CSEnable|Spare==1)bank is enabled already? */
1931 reg = 0x60 + (q << 1) + reg_off; /*Mask[q] reg.*/
1932 val = Get_NB32(dev, reg);
1936 Sizeq = val; /* never used */
1937 if (val > BiggestBank) {
1938 /*Bingo! possibly Map this chip-select next! */
1943 } /*if bank present */
1945 if (BiggestBank !=0) {
1946 curcsBase = nxtcsBase; /* curcsBase=nxtcsBase*/
1947 /* DRAM CS Base b Address Register offset */
1948 reg = 0x40 + (b << 2) + reg_off;
1951 val = 1 << Spare; /* Spare Enable*/
1954 val |= 1 << CSEnable; /* Bank Enable */
1956 if (((reg - 0x40) >> 2) & 1) {
1957 if (!(pDCTstat->Status & (1 << SB_Registered))) {
1959 dimValid = pDCTstat->DIMMValid;
1962 if ((dimValid & pDCTstat->MirrPresU_NumRegR) != 0) {
1963 val |= 1 << onDimmMirror;
1967 Set_NB32(dev, reg, val);
1971 /* let nxtcsBase+=Size[b] */
1972 nxtcsBase += BiggestBank;
1975 /* bank present but disabled?*/
1976 if ( pDCTstat->CSTestFail & (1 << p)) {
1977 /* DRAM CS Base b Address Register offset */
1978 reg = (p << 2) + 0x40 + reg_off;
1979 val = 1 << TestFail;
1980 Set_NB32(dev, reg, val);
1985 pDCTstat->DCTSysLimit = nxtcsBase - 1;
1986 mct_AfterStitchMemory(pMCTstat, pDCTstat, dct);
1989 /* dump_pci_device(PCI_DEV(0, 0x18+pDCTstat->Node_ID, 2)); */
1991 printk(BIOS_DEBUG, "StitchMemory: Status %x\n", pDCTstat->Status);
1992 printk(BIOS_DEBUG, "StitchMemory: ErrStatus %x\n", pDCTstat->ErrStatus);
1993 printk(BIOS_DEBUG, "StitchMemory: ErrCode %x\n", pDCTstat->ErrCode);
1994 printk(BIOS_DEBUG, "StitchMemory: Done\n\n");
1997 static u16 Get_Fk_D(u8 k)
1999 return Table_F_k[k]; /* FIXME: k or k<<1 ? */
2002 static u8 DIMMPresence_D(struct MCTStatStruc *pMCTstat,
2003 struct DCTStatStruc *pDCTstat)
2005 /* Check DIMMs present, verify checksum, flag SDRAM type,
2006 * build population indicator bitmaps, and preload bus loading
2007 * of DIMMs into DCTStatStruc.
2008 * MAAload=number of devices on the "A" bus.
2009 * MABload=number of devices on the "B" bus.
2010 * MAAdimms=number of DIMMs on the "A" bus slots.
2011 * MABdimms=number of DIMMs on the "B" bus slots.
2012 * DATAAload=number of ranks on the "A" bus slots.
2013 * DATABload=number of ranks on the "B" bus slots.
2018 u16 RegDIMMPresent, MaxDimms;
2023 /* preload data structure with addrs */
2024 mctGet_DIMMAddr(pDCTstat, pDCTstat->Node_ID);
2026 DimmSlots = MaxDimms = mctGet_NVbits(NV_MAX_DIMMS);
2028 SPDCtrl = mctGet_NVbits(NV_SPDCHK_RESTRT);
2031 pDCTstat->DimmQRPresent = 0;
2033 for (i = 0; i< MAX_DIMMS_SUPPORTED; i++) {
2037 if ((pDCTstat->DimmQRPresent & (1 << i)) || (i < DimmSlots)) {
2039 smbaddr = Get_DIMMAddress_D(pDCTstat, i);
2040 status = mctRead_SPD(smbaddr, SPD_ByteUse);
2041 if (status >= 0) { /* SPD access is ok */
2042 pDCTstat->DIMMPresent |= 1 << i;
2043 if (crcCheck(smbaddr)) { /* CRC is OK */
2044 byte = mctRead_SPD(smbaddr, SPD_TYPE);
2045 if (byte == JED_DDR3SDRAM) {
2046 /*Dimm is 'Present'*/
2047 pDCTstat->DIMMValid |= 1 << i;
2050 pDCTstat->DIMMSPDCSE = 1 << i;
2052 pDCTstat->ErrStatus |= 1 << SB_DIMMChkSum;
2053 pDCTstat->ErrCode = SC_StopError;
2055 /*if NV_SPDCHK_RESTRT is set to 1, ignore faulty SPD checksum*/
2056 pDCTstat->ErrStatus |= 1<<SB_DIMMChkSum;
2057 byte = mctRead_SPD(smbaddr, SPD_TYPE);
2058 if (byte == JED_DDR3SDRAM)
2059 pDCTstat->DIMMValid |= 1 << i;
2062 /* Check module type */
2063 byte = mctRead_SPD(smbaddr, SPD_DIMMTYPE) & 0x7;
2064 if (byte == JED_RDIMM || byte == JED_MiniRDIMM)
2065 RegDIMMPresent |= 1 << i;
2066 /* Check ECC capable */
2067 byte = mctRead_SPD(smbaddr, SPD_BusWidth);
2068 if (byte & JED_ECC) {
2069 /* DIMM is ECC capable */
2070 pDCTstat->DimmECCPresent |= 1 << i;
2072 /* Check if x4 device */
2073 devwidth = mctRead_SPD(smbaddr, SPD_Organization) & 0x7; /* 0:x4,1:x8,2:x16 */
2074 if (devwidth == 0) {
2075 /* DIMM is made with x4 or x16 drams */
2076 pDCTstat->Dimmx4Present |= 1 << i;
2077 } else if (devwidth == 1) {
2078 pDCTstat->Dimmx8Present |= 1 << i;
2079 } else if (devwidth == 2) {
2080 pDCTstat->Dimmx16Present |= 1 << i;
2083 byte = (mctRead_SPD(smbaddr, SPD_Organization) >> 3);
2085 if (byte == 3) { /* 4ranks */
2086 /* if any DIMMs are QR, we have to make two passes through DIMMs*/
2087 if ( pDCTstat->DimmQRPresent == 0) {
2090 if (i < DimmSlots) {
2091 pDCTstat->DimmQRPresent |= (1 << i) | (1 << (i+4));
2093 pDCTstat->MAdimms[i & 1] --;
2095 byte = 1; /* upper two ranks of QR DIMM will be counted on another DIMM number iteration*/
2096 } else if (byte == 1) { /* 2ranks */
2097 pDCTstat->DimmDRPresent |= 1 << i;
2102 else if (devwidth == 1)
2104 else if (devwidth == 2)
2107 byte++; /* al+1=rank# */
2109 bytex <<= 1; /*double Addr bus load value for dual rank DIMMs*/
2112 pDCTstat->DATAload[j] += byte; /*number of ranks on DATA bus*/
2113 pDCTstat->MAload[j] += bytex; /*number of devices on CMD/ADDR bus*/
2114 pDCTstat->MAdimms[j]++; /*number of DIMMs on A bus */
2116 /* check address mirror support for unbuffered dimm */
2117 /* check number of registers on a dimm for registered dimm */
2118 byte = mctRead_SPD(smbaddr, SPD_AddressMirror);
2119 if (RegDIMMPresent & (1 << i)) {
2121 pDCTstat->MirrPresU_NumRegR |= 1 << i;
2123 if ((byte & 1) == 1)
2124 pDCTstat->MirrPresU_NumRegR |= 1 << i;
2126 /* Get byte62: Reference Raw Card information. We dont need it now. */
2127 /* byte = mctRead_SPD(smbaddr, 62); */
2128 /* Get Control word values for RC3. We dont need it. */
2129 byte = mctRead_SPD(smbaddr, 70);
2130 pDCTstat->CtrlWrd3 |= (byte >> 4) << (i << 2); /* C3 = SPD byte 70 [7:4] */
2131 /* Get Control word values for RC4, and RC5 */
2132 byte = mctRead_SPD(smbaddr, 71);
2133 pDCTstat->CtrlWrd4 |= (byte & 0xFF) << (i << 2); /* RC4 = SPD byte 71 [3:0] */
2134 pDCTstat->CtrlWrd5 |= (byte >> 4) << (i << 2); /* RC5 = SPD byte 71 [7:4] */
2138 printk(BIOS_DEBUG, "\t DIMMPresence: DIMMValid=%x\n", pDCTstat->DIMMValid);
2139 printk(BIOS_DEBUG, "\t DIMMPresence: DIMMPresent=%x\n", pDCTstat->DIMMPresent);
2140 printk(BIOS_DEBUG, "\t DIMMPresence: RegDIMMPresent=%x\n", RegDIMMPresent);
2141 printk(BIOS_DEBUG, "\t DIMMPresence: DimmECCPresent=%x\n", pDCTstat->DimmECCPresent);
2142 printk(BIOS_DEBUG, "\t DIMMPresence: DimmPARPresent=%x\n", pDCTstat->DimmPARPresent);
2143 printk(BIOS_DEBUG, "\t DIMMPresence: Dimmx4Present=%x\n", pDCTstat->Dimmx4Present);
2144 printk(BIOS_DEBUG, "\t DIMMPresence: Dimmx8Present=%x\n", pDCTstat->Dimmx8Present);
2145 printk(BIOS_DEBUG, "\t DIMMPresence: Dimmx16Present=%x\n", pDCTstat->Dimmx16Present);
2146 printk(BIOS_DEBUG, "\t DIMMPresence: DimmPlPresent=%x\n", pDCTstat->DimmPlPresent);
2147 printk(BIOS_DEBUG, "\t DIMMPresence: DimmDRPresent=%x\n", pDCTstat->DimmDRPresent);
2148 printk(BIOS_DEBUG, "\t DIMMPresence: DimmQRPresent=%x\n", pDCTstat->DimmQRPresent);
2149 printk(BIOS_DEBUG, "\t DIMMPresence: DATAload[0]=%x\n", pDCTstat->DATAload[0]);
2150 printk(BIOS_DEBUG, "\t DIMMPresence: MAload[0]=%x\n", pDCTstat->MAload[0]);
2151 printk(BIOS_DEBUG, "\t DIMMPresence: MAdimms[0]=%x\n", pDCTstat->MAdimms[0]);
2152 printk(BIOS_DEBUG, "\t DIMMPresence: DATAload[1]=%x\n", pDCTstat->DATAload[1]);
2153 printk(BIOS_DEBUG, "\t DIMMPresence: MAload[1]=%x\n", pDCTstat->MAload[1]);
2154 printk(BIOS_DEBUG, "\t DIMMPresence: MAdimms[1]=%x\n", pDCTstat->MAdimms[1]);
2156 if (pDCTstat->DIMMValid != 0) { /* If any DIMMs are present...*/
2157 if (RegDIMMPresent != 0) {
2158 if ((RegDIMMPresent ^ pDCTstat->DIMMValid) !=0) {
2159 /* module type DIMM mismatch (reg'ed, unbuffered) */
2160 pDCTstat->ErrStatus |= 1<<SB_DimmMismatchM;
2161 pDCTstat->ErrCode = SC_StopError;
2163 /* all DIMMs are registered */
2164 pDCTstat->Status |= 1<<SB_Registered;
2167 if (pDCTstat->DimmECCPresent != 0) {
2168 if ((pDCTstat->DimmECCPresent ^ pDCTstat->DIMMValid )== 0) {
2169 /* all DIMMs are ECC capable */
2170 pDCTstat->Status |= 1<<SB_ECCDIMMs;
2173 if (pDCTstat->DimmPARPresent != 0) {
2174 if ((pDCTstat->DimmPARPresent ^ pDCTstat->DIMMValid) == 0) {
2175 /*all DIMMs are Parity capable */
2176 pDCTstat->Status |= 1<<SB_PARDIMMs;
2180 /* no DIMMs present or no DIMMs that qualified. */
2181 pDCTstat->ErrStatus |= 1<<SB_NoDimms;
2182 pDCTstat->ErrCode = SC_StopError;
2185 printk(BIOS_DEBUG, "\t DIMMPresence: Status %x\n", pDCTstat->Status);
2186 printk(BIOS_DEBUG, "\t DIMMPresence: ErrStatus %x\n", pDCTstat->ErrStatus);
2187 printk(BIOS_DEBUG, "\t DIMMPresence: ErrCode %x\n", pDCTstat->ErrCode);
2188 printk(BIOS_DEBUG, "\t DIMMPresence: Done\n\n");
2190 mctHookAfterDIMMpre();
2192 return pDCTstat->ErrCode;
2195 static u8 Get_DIMMAddress_D(struct DCTStatStruc *pDCTstat, u8 i)
2199 p = pDCTstat->DIMMAddr;
2200 /* mct_BeforeGetDIMMAddress(); */
2204 static void mct_initDCT(struct MCTStatStruc *pMCTstat,
2205 struct DCTStatStruc *pDCTstat)
2210 /* Config. DCT0 for Ganged or unganged mode */
2211 DCTInit_D(pMCTstat, pDCTstat, 0);
2212 if (pDCTstat->ErrCode == SC_FatalErr) {
2213 /* Do nothing goto exitDCTInit; any fatal errors? */
2215 /* Configure DCT1 if unganged and enabled*/
2216 if (!pDCTstat->GangedMode) {
2217 if ( pDCTstat->DIMMValidDCT[1] > 0) {
2218 err_code = pDCTstat->ErrCode; /* save DCT0 errors */
2219 pDCTstat->ErrCode = 0;
2220 DCTInit_D(pMCTstat, pDCTstat, 1);
2221 if (pDCTstat->ErrCode == 2) /* DCT1 is not Running */
2222 pDCTstat->ErrCode = err_code; /* Using DCT0 Error code to update pDCTstat.ErrCode */
2224 val = 1 << DisDramInterface;
2225 Set_NB32(pDCTstat->dev_dct, 0x100 + 0x94, val);
2232 static void mct_DramInit(struct MCTStatStruc *pMCTstat,
2233 struct DCTStatStruc *pDCTstat, u8 dct)
2235 mct_BeforeDramInit_Prod_D(pMCTstat, pDCTstat);
2236 mct_DramInit_Sw_D(pMCTstat, pDCTstat, dct);
2237 /* mct_DramInit_Hw_D(pMCTstat, pDCTstat, dct); */
2240 static u8 mct_setMode(struct MCTStatStruc *pMCTstat,
2241 struct DCTStatStruc *pDCTstat)
2248 byte = bytex = pDCTstat->DIMMValid;
2249 bytex &= 0x55; /* CHA DIMM pop */
2250 pDCTstat->DIMMValidDCT[0] = bytex;
2252 byte &= 0xAA; /* CHB DIMM popa */
2254 pDCTstat->DIMMValidDCT[1] = byte;
2256 if (byte != bytex) {
2257 pDCTstat->ErrStatus &= ~(1 << SB_DimmMismatchO);
2259 byte = mctGet_NVbits(NV_Unganged);
2261 pDCTstat->ErrStatus |= (1 << SB_DimmMismatchO); /* Set temp. to avoid setting of ganged mode */
2263 if (!(pDCTstat->ErrStatus & (1 << SB_DimmMismatchO))) {
2264 pDCTstat->GangedMode = 1;
2265 /* valid 128-bit mode population. */
2266 pDCTstat->Status |= 1 << SB_128bitmode;
2268 val = Get_NB32(pDCTstat->dev_dct, reg);
2269 val |= 1 << DctGangEn;
2270 Set_NB32(pDCTstat->dev_dct, reg, val);
2272 if (byte) /* NV_Unganged */
2273 pDCTstat->ErrStatus &= ~(1 << SB_DimmMismatchO); /* Clear so that there is no DIMM missmatch error */
2275 return pDCTstat->ErrCode;
2278 u32 Get_NB32(u32 dev, u32 reg)
2280 return pci_read_config32(dev, reg);
2283 void Set_NB32(u32 dev, u32 reg, u32 val)
2285 pci_write_config32(dev, reg, val);
2289 u32 Get_NB32_index(u32 dev, u32 index_reg, u32 index)
2293 Set_NB32(dev, index_reg, index);
2294 dword = Get_NB32(dev, index_reg+0x4);
2299 void Set_NB32_index(u32 dev, u32 index_reg, u32 index, u32 data)
2301 Set_NB32(dev, index_reg, index);
2302 Set_NB32(dev, index_reg + 0x4, data);
2305 u32 Get_NB32_index_wait(u32 dev, u32 index_reg, u32 index)
2311 index &= ~(1 << DctAccessWrite);
2312 Set_NB32(dev, index_reg, index);
2314 dword = Get_NB32(dev, index_reg);
2315 } while (!(dword & (1 << DctAccessDone)));
2316 dword = Get_NB32(dev, index_reg + 0x4);
2321 void Set_NB32_index_wait(u32 dev, u32 index_reg, u32 index, u32 data)
2326 Set_NB32(dev, index_reg + 0x4, data);
2327 index |= (1 << DctAccessWrite);
2328 Set_NB32(dev, index_reg, index);
2330 dword = Get_NB32(dev, index_reg);
2331 } while (!(dword & (1 << DctAccessDone)));
2335 static u8 mct_PlatformSpec(struct MCTStatStruc *pMCTstat,
2336 struct DCTStatStruc *pDCTstat, u8 dct)
2338 /* Get platform specific config/timing values from the interface layer
2339 * and program them into DCT.
2342 u32 dev = pDCTstat->dev_dct;
2344 u8 i, i_start, i_end;
2346 if (pDCTstat->GangedMode) {
2347 SyncSetting(pDCTstat);
2348 /* mct_SetupSync_D */
2355 for (i=i_start; i<i_end; i++) {
2356 index_reg = 0x98 + (i * 0x100);
2357 Set_NB32_index_wait(dev, index_reg, 0x00, pDCTstat->CH_ODC_CTL[i]); /* Channel A Output Driver Compensation Control */
2358 Set_NB32_index_wait(dev, index_reg, 0x04, pDCTstat->CH_ADDR_TMG[i]); /* Channel A Output Driver Compensation Control */
2361 return pDCTstat->ErrCode;
2365 static void mct_SyncDCTsReady(struct DCTStatStruc *pDCTstat)
2370 if (pDCTstat->NodePresent) {
2371 dev = pDCTstat->dev_dct;
2373 if ((pDCTstat->DIMMValidDCT[0] ) || (pDCTstat->DIMMValidDCT[1])) { /* This Node has dram */
2375 val = Get_NB32(dev, 0x110);
2376 } while (!(val & (1 << DramEnabled)));
2378 } /* Node is present */
2381 static void mct_AfterGetCLT(struct MCTStatStruc *pMCTstat,
2382 struct DCTStatStruc *pDCTstat, u8 dct)
2384 if (!pDCTstat->GangedMode) {
2386 pDCTstat->DIMMValid = pDCTstat->DIMMValidDCT[dct];
2387 if (pDCTstat->DIMMValidDCT[dct] == 0)
2388 pDCTstat->ErrCode = SC_StopError;
2390 pDCTstat->CSPresent = 0;
2391 pDCTstat->CSTestFail = 0;
2392 pDCTstat->DIMMValid = pDCTstat->DIMMValidDCT[dct];
2393 if (pDCTstat->DIMMValidDCT[dct] == 0)
2394 pDCTstat->ErrCode = SC_StopError;
2399 static u8 mct_SPDCalcWidth(struct MCTStatStruc *pMCTstat,
2400 struct DCTStatStruc *pDCTstat, u8 dct)
2406 SPDCalcWidth_D(pMCTstat, pDCTstat);
2407 ret = mct_setMode(pMCTstat, pDCTstat);
2409 ret = pDCTstat->ErrCode;
2412 if (pDCTstat->DIMMValidDCT[0] == 0) {
2413 val = Get_NB32(pDCTstat->dev_dct, 0x94);
2414 val |= 1 << DisDramInterface;
2415 Set_NB32(pDCTstat->dev_dct, 0x94, val);
2417 if (pDCTstat->DIMMValidDCT[1] == 0) {
2418 val = Get_NB32(pDCTstat->dev_dct, 0x94 + 0x100);
2419 val |= 1 << DisDramInterface;
2420 Set_NB32(pDCTstat->dev_dct, 0x94 + 0x100, val);
2423 printk(BIOS_DEBUG, "SPDCalcWidth: Status %x\n", pDCTstat->Status);
2424 printk(BIOS_DEBUG, "SPDCalcWidth: ErrStatus %x\n", pDCTstat->ErrStatus);
2425 printk(BIOS_DEBUG, "SPDCalcWidth: ErrCode %x\n", pDCTstat->ErrCode);
2426 printk(BIOS_DEBUG, "SPDCalcWidth: Done\n");
2427 /* Disable dram interface before DRAM init */
2432 static void mct_AfterStitchMemory(struct MCTStatStruc *pMCTstat,
2433 struct DCTStatStruc *pDCTstat, u8 dct)
2442 _MemHoleRemap = mctGet_NVbits(NV_MemHole);
2443 DramHoleBase = mctGet_NVbits(NV_BottomIO);
2445 /* Increase hole size so;[31:24]to[31:16]
2446 * it has granularity of 128MB shl eax,8
2447 * Set 'effective' bottom IOmov DramHoleBase,eax
2449 pMCTstat->HoleBase = (DramHoleBase & 0xFFFFF800) << 8;
2451 /* In unganged mode, we must add DCT0 and DCT1 to DCTSysLimit */
2452 if (!pDCTstat->GangedMode) {
2453 dev = pDCTstat->dev_dct;
2454 pDCTstat->NodeSysLimit += pDCTstat->DCTSysLimit;
2455 /* if DCT0 and DCT1 both exist, set DctSelBaseAddr[47:27] to the top of DCT0 */
2457 if (pDCTstat->DIMMValidDCT[1] > 0) {
2458 dword = pDCTstat->DCTSysLimit + 1;
2459 dword += pDCTstat->NodeSysBase;
2460 dword >>= 8; /* scale [39:8] to [47:27],and to F2x110[31:11] */
2461 if ((dword >= DramHoleBase) && _MemHoleRemap) {
2462 pMCTstat->HoleBase = (DramHoleBase & 0xFFFFF800) << 8;
2463 val = pMCTstat->HoleBase;
2465 val = (((~val) & 0xFF) + 1);
2470 val = Get_NB32(dev, reg);
2473 val |= 3; /* Set F2x110[DctSelHiRngEn], F2x110[DctSelHi] */
2474 Set_NB32(dev, reg, val);
2478 Set_NB32(dev, reg, val);
2481 /* Program the DctSelBaseAddr value to 0
2482 if DCT 0 is disabled */
2483 if (pDCTstat->DIMMValidDCT[0] == 0) {
2484 dword = pDCTstat->NodeSysBase;
2486 if ((dword >= DramHoleBase) && _MemHoleRemap) {
2487 pMCTstat->HoleBase = (DramHoleBase & 0xFFFFF800) << 8;
2488 val = pMCTstat->HoleBase;
2491 val |= (((~val) & 0xFFFF) + 1);
2496 Set_NB32(dev, reg, val);
2499 val |= 3; /* Set F2x110[DctSelHiRngEn], F2x110[DctSelHi] */
2500 Set_NB32(dev, reg, val);
2504 pDCTstat->NodeSysLimit += pDCTstat->DCTSysLimit;
2506 printk(BIOS_DEBUG, "AfterStitch pDCTstat->NodeSysBase = %x\n", pDCTstat->NodeSysBase);
2507 printk(BIOS_DEBUG, "mct_AfterStitchMemory: pDCTstat->NodeSysLimit = %x\n", pDCTstat->NodeSysLimit);
2510 static u8 mct_DIMMPresence(struct MCTStatStruc *pMCTstat,
2511 struct DCTStatStruc *pDCTstat, u8 dct)
2516 ret = DIMMPresence_D(pMCTstat, pDCTstat);
2518 ret = pDCTstat->ErrCode;
2523 /* mct_BeforeGetDIMMAddress inline in C */
2525 static void mct_OtherTiming(struct MCTStatStruc *pMCTstat,
2526 struct DCTStatStruc *pDCTstatA)
2530 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
2531 struct DCTStatStruc *pDCTstat;
2532 pDCTstat = pDCTstatA + Node;
2533 if (pDCTstat->NodePresent) {
2534 if (pDCTstat->DIMMValidDCT[0]) {
2535 pDCTstat->DIMMValid = pDCTstat->DIMMValidDCT[0];
2536 Set_OtherTiming(pMCTstat, pDCTstat, 0);
2538 if (pDCTstat->DIMMValidDCT[1] && !pDCTstat->GangedMode ) {
2539 pDCTstat->DIMMValid = pDCTstat->DIMMValidDCT[1];
2540 Set_OtherTiming(pMCTstat, pDCTstat, 1);
2542 } /* Node is present*/
2546 static void Set_OtherTiming(struct MCTStatStruc *pMCTstat,
2547 struct DCTStatStruc *pDCTstat, u8 dct)
2550 u32 reg_off = 0x100 * dct;
2553 u32 dev = pDCTstat->dev_dct;
2555 Get_DqsRcvEnGross_Diff(pDCTstat, dev, 0x98 + reg_off);
2556 Get_WrDatGross_Diff(pDCTstat, dct, dev, 0x98 + reg_off);
2557 Get_Trdrd(pMCTstat, pDCTstat, dct);
2558 Get_Twrwr(pMCTstat, pDCTstat, dct);
2559 Get_Twrrd(pMCTstat, pDCTstat, dct);
2560 Get_TrwtTO(pMCTstat, pDCTstat, dct);
2561 Get_TrwtWB(pMCTstat, pDCTstat);
2563 reg = 0x8C + reg_off; /* Dram Timing Hi */
2564 val = Get_NB32(dev, reg);
2566 dword = pDCTstat->TrwtTO;
2568 dword = pDCTstat->Twrrd & 3;
2570 dword = pDCTstat->Twrwr & 3;
2572 dword = pDCTstat->Trdrd & 3;
2574 dword = pDCTstat->TrwtWB;
2576 Set_NB32(dev, reg, val);
2578 reg = 0x78 + reg_off;
2579 val = Get_NB32(dev, reg);
2581 dword = pDCTstat->Twrrd >> 2;
2583 dword = pDCTstat->Twrwr >> 2;
2585 dword = pDCTstat->Trdrd >> 2;
2587 Set_NB32(dev, reg, val);
2590 static void Get_Trdrd(struct MCTStatStruc *pMCTstat,
2591 struct DCTStatStruc *pDCTstat, u8 dct)
2595 Trdrd = ((int8_t)(pDCTstat->DqsRcvEnGrossMax - pDCTstat->DqsRcvEnGrossMin) >> 1) + 1;
2598 pDCTstat->Trdrd = Trdrd;
2601 static void Get_Twrwr(struct MCTStatStruc *pMCTstat,
2602 struct DCTStatStruc *pDCTstat, u8 dct)
2606 Twrwr = ((int8_t)(pDCTstat->WrDatGrossMax - pDCTstat->WrDatGrossMin) >> 1) + 2;
2613 pDCTstat->Twrwr = Twrwr;
2616 static void Get_Twrrd(struct MCTStatStruc *pMCTstat,
2617 struct DCTStatStruc *pDCTstat, u8 dct)
2622 LDplus1 = Get_Latency_Diff(pMCTstat, pDCTstat, dct);
2624 Twrrd = ((int8_t)(pDCTstat->WrDatGrossMax - pDCTstat->DqsRcvEnGrossMin) >> 1) + 4 - LDplus1;
2628 else if (Twrrd > 10)
2630 pDCTstat->Twrrd = Twrrd;
2633 static void Get_TrwtTO(struct MCTStatStruc *pMCTstat,
2634 struct DCTStatStruc *pDCTstat, u8 dct)
2639 LDplus1 = Get_Latency_Diff(pMCTstat, pDCTstat, dct);
2641 TrwtTO = ((int8_t)(pDCTstat->DqsRcvEnGrossMax - pDCTstat->WrDatGrossMin) >> 1) + LDplus1;
2643 pDCTstat->TrwtTO = TrwtTO;
2646 static void Get_TrwtWB(struct MCTStatStruc *pMCTstat,
2647 struct DCTStatStruc *pDCTstat)
2649 /* TrwtWB ensures read-to-write data-bus turnaround.
2650 This value should be one more than the programmed TrwtTO.*/
2651 pDCTstat->TrwtWB = pDCTstat->TrwtTO;
2654 static u8 Get_Latency_Diff(struct MCTStatStruc *pMCTstat,
2655 struct DCTStatStruc *pDCTstat, u8 dct)
2657 u32 reg_off = 0x100 * dct;
2658 u32 dev = pDCTstat->dev_dct;
2661 val1 = Get_NB32(dev, reg_off + 0x88) & 0xF;
2662 val2 = (Get_NB32(dev, reg_off + 0x84) >> 20) & 7;
2667 static void Get_DqsRcvEnGross_Diff(struct DCTStatStruc *pDCTstat,
2668 u32 dev, u32 index_reg)
2670 u8 Smallest, Largest;
2674 /* The largest DqsRcvEnGrossDelay of any DIMM minus the
2675 DqsRcvEnGrossDelay of any other DIMM is equal to the Critical
2676 Gross Delay Difference (CGDD) */
2677 /* DqsRcvEn byte 1,0 */
2678 val = Get_DqsRcvEnGross_MaxMin(pDCTstat, dev, index_reg, 0x10);
2679 Largest = val & 0xFF;
2680 Smallest = (val >> 8) & 0xFF;
2682 /* DqsRcvEn byte 3,2 */
2683 val = Get_DqsRcvEnGross_MaxMin(pDCTstat, dev, index_reg, 0x11);
2685 bytex = (val >> 8) & 0xFF;
2686 if (bytex < Smallest)
2691 /* DqsRcvEn byte 5,4 */
2692 val = Get_DqsRcvEnGross_MaxMin(pDCTstat, dev, index_reg, 0x20);
2694 bytex = (val >> 8) & 0xFF;
2695 if (bytex < Smallest)
2700 /* DqsRcvEn byte 7,6 */
2701 val = Get_DqsRcvEnGross_MaxMin(pDCTstat, dev, index_reg, 0x21);
2703 bytex = (val >> 8) & 0xFF;
2704 if (bytex < Smallest)
2709 if (pDCTstat->DimmECCPresent> 0) {
2711 val = Get_DqsRcvEnGross_MaxMin(pDCTstat, dev, index_reg, 0x12);
2713 bytex = (val >> 8) & 0xFF;
2714 if (bytex < Smallest)
2720 pDCTstat->DqsRcvEnGrossMax = Largest;
2721 pDCTstat->DqsRcvEnGrossMin = Smallest;
2724 static void Get_WrDatGross_Diff(struct DCTStatStruc *pDCTstat,
2725 u8 dct, u32 dev, u32 index_reg)
2727 u8 Smallest, Largest;
2731 /* The largest WrDatGrossDlyByte of any DIMM minus the
2732 WrDatGrossDlyByte of any other DIMM is equal to CGDD */
2733 if (pDCTstat->DIMMValid & (1 << 0)) {
2734 val = Get_WrDatGross_MaxMin(pDCTstat, dct, dev, index_reg, 0x01); /* WrDatGrossDlyByte byte 0,1,2,3 for DIMM0 */
2735 Largest = val & 0xFF;
2736 Smallest = (val >> 8) & 0xFF;
2738 if (pDCTstat->DIMMValid & (1 << 2)) {
2739 val = Get_WrDatGross_MaxMin(pDCTstat, dct, dev, index_reg, 0x101); /* WrDatGrossDlyByte byte 0,1,2,3 for DIMM1 */
2741 bytex = (val >> 8) & 0xFF;
2742 if (bytex < Smallest)
2748 /* If Cx, 2 more dimm need to be checked to find out the largest and smallest */
2749 if (pDCTstat->LogicalCPUID & AMD_DR_Cx) {
2750 if (pDCTstat->DIMMValid & (1 << 4)) {
2751 val = Get_WrDatGross_MaxMin(pDCTstat, dct, dev, index_reg, 0x201); /* WrDatGrossDlyByte byte 0,1,2,3 for DIMM2 */
2753 bytex = (val >> 8) & 0xFF;
2754 if (bytex < Smallest)
2759 if (pDCTstat->DIMMValid & (1 << 6)) {
2760 val = Get_WrDatGross_MaxMin(pDCTstat, dct, dev, index_reg, 0x301); /* WrDatGrossDlyByte byte 0,1,2,3 for DIMM2 */
2762 bytex = (val >> 8) & 0xFF;
2763 if (bytex < Smallest)
2770 pDCTstat->WrDatGrossMax = Largest;
2771 pDCTstat->WrDatGrossMin = Smallest;
2774 static u16 Get_DqsRcvEnGross_MaxMin(struct DCTStatStruc *pDCTstat,
2775 u32 dev, u32 index_reg,
2778 u8 Smallest, Largest;
2791 for (i=0; i < 8; i+=2) {
2792 if ( pDCTstat->DIMMValid & (1 << i)) {
2793 val = Get_NB32_index_wait(dev, index_reg, index);
2795 byte = (val >> 5) & 0xFF;
2796 if (byte < Smallest)
2801 byte = (val >> (16 + 5)) & 0xFF;
2802 if (byte < Smallest)
2818 static u16 Get_WrDatGross_MaxMin(struct DCTStatStruc *pDCTstat,
2819 u8 dct, u32 dev, u32 index_reg,
2822 u8 Smallest, Largest;
2830 for (i=0; i < 2; i++) {
2831 val = Get_NB32_index_wait(dev, index_reg, index);
2834 for (j=0; j < 4; j++) {
2836 if (byte < Smallest)
2845 if (pDCTstat->DimmECCPresent > 0) {
2847 val = Get_NB32_index_wait(dev, index_reg, index);
2851 if (byte < Smallest)
2864 static void mct_FinalMCT_D(struct MCTStatStruc *pMCTstat,
2865 struct DCTStatStruc *pDCTstat)
2867 mct_ClrClToNB_D(pMCTstat, pDCTstat);
2868 mct_ClrWbEnhWsbDis_D(pMCTstat, pDCTstat);
2871 static void mct_InitialMCT_D(struct MCTStatStruc *pMCTstat, struct DCTStatStruc *pDCTstat)
2873 mct_SetClToNB_D(pMCTstat, pDCTstat);
2874 mct_SetWbEnhWsbDis_D(pMCTstat, pDCTstat);
2877 static u32 mct_NodePresent_D(void)
2884 static void mct_init(struct MCTStatStruc *pMCTstat,
2885 struct DCTStatStruc *pDCTstat)
2890 pDCTstat->GangedMode = 0;
2891 pDCTstat->DRPresent = 1;
2893 /* enable extend PCI configuration access */
2895 _RDMSR(addr, &lo, &hi);
2896 if (hi & (1 << (46-32))) {
2897 pDCTstat->Status |= 1 << SB_ExtConfig;
2900 _WRMSR(addr, lo, hi);
2904 static void clear_legacy_Mode(struct MCTStatStruc *pMCTstat,
2905 struct DCTStatStruc *pDCTstat)
2909 u32 dev = pDCTstat->dev_dct;
2911 /* Clear Legacy BIOS Mode bit */
2913 val = Get_NB32(dev, reg);
2914 val &= ~(1<<LegacyBiosMode);
2915 Set_NB32(dev, reg, val);
2918 val = Get_NB32(dev, reg);
2919 val &= ~(1<<LegacyBiosMode);
2920 Set_NB32(dev, reg, val);
2923 static void mct_HTMemMapExt(struct MCTStatStruc *pMCTstat,
2924 struct DCTStatStruc *pDCTstatA)
2927 u32 Drambase, Dramlimit;
2933 struct DCTStatStruc *pDCTstat;
2935 pDCTstat = pDCTstatA + 0;
2936 dev = pDCTstat->dev_map;
2938 /* Copy dram map from F1x40/44,F1x48/4c,
2939 to F1x120/124(Node0),F1x120/124(Node1),...*/
2940 for (Node=0; Node < MAX_NODES_SUPPORTED; Node++) {
2941 pDCTstat = pDCTstatA + Node;
2942 devx = pDCTstat->dev_map;
2944 /* get base/limit from Node0 */
2945 reg = 0x40 + (Node << 3); /* Node0/Dram Base 0 */
2946 val = Get_NB32(dev, reg);
2947 Drambase = val >> ( 16 + 3);
2949 reg = 0x44 + (Node << 3); /* Node0/Dram Base 0 */
2950 val = Get_NB32(dev, reg);
2951 Dramlimit = val >> (16 + 3);
2953 /* set base/limit to F1x120/124 per Node */
2954 if (pDCTstat->NodePresent) {
2955 reg = 0x120; /* F1x120,DramBase[47:27] */
2956 val = Get_NB32(devx, reg);
2959 Set_NB32(devx, reg, val);
2962 val = Get_NB32(devx, reg);
2965 Set_NB32(devx, reg, val);
2967 if ( pMCTstat->GStatus & ( 1 << GSB_HWHole)) {
2969 val = Get_NB32(devx, reg);
2970 val |= (1 << DramMemHoistValid);
2971 val &= ~(0xFF << 24);
2972 dword = (pMCTstat->HoleBase >> (24 - 8)) & 0xFF;
2975 Set_NB32(devx, reg, val);
2982 static void SetCSTriState(struct MCTStatStruc *pMCTstat,
2983 struct DCTStatStruc *pDCTstat, u8 dct)
2986 u32 dev = pDCTstat->dev_dct;
2987 u32 index_reg = 0x98 + 0x100 * dct;
2991 /* Tri-state unused chipselects when motherboard
2992 termination is available */
2994 /* FIXME: skip for Ax */
2996 word = pDCTstat->CSPresent;
2997 if (pDCTstat->Status & (1 << SB_Registered)) {
2998 word |= (word & 0x55) << 1;
3000 word = (~word) & 0xFF;
3002 val = Get_NB32_index_wait(dev, index_reg, index);
3004 Set_NB32_index_wait(dev, index_reg, index, val);
3007 static void SetCKETriState(struct MCTStatStruc *pMCTstat,
3008 struct DCTStatStruc *pDCTstat, u8 dct)
3012 u32 index_reg = 0x98 + 0x100 * dct;
3016 /* Tri-state unused CKEs when motherboard termination is available */
3018 /* FIXME: skip for Ax */
3020 dev = pDCTstat->dev_dct;
3021 word = pDCTstat->CSPresent;
3024 val = Get_NB32_index_wait(dev, index_reg, index);
3025 if ((word & 0x55) == 0)
3028 if ((word & 0xAA) == 0)
3031 Set_NB32_index_wait(dev, index_reg, index, val);
3034 static void SetODTTriState(struct MCTStatStruc *pMCTstat,
3035 struct DCTStatStruc *pDCTstat, u8 dct)
3039 u32 index_reg = 0x98 + 0x100 * dct;
3045 /* FIXME: skip for Ax */
3047 dev = pDCTstat->dev_dct;
3049 /* Tri-state unused ODTs when motherboard termination is available */
3050 max_dimms = (u8) mctGet_NVbits(NV_MAX_DIMMS);
3051 odt = 0x0F; /* ODT tri-state setting */
3053 if (pDCTstat->Status & (1 <<SB_Registered)) {
3054 for (cs = 0; cs < 8; cs += 2) {
3055 if (pDCTstat->CSPresent & (1 << cs)) {
3056 odt &= ~(1 << (cs / 2));
3057 if (mctGet_NVbits(NV_4RANKType) != 0) { /* quad-rank capable platform */
3058 if (pDCTstat->CSPresent & (1 << (cs + 1)))
3059 odt &= ~(4 << (cs / 2));
3063 } else { /* AM3 package */
3064 val = ~(pDCTstat->CSPresent);
3065 odt = val & 9; /* swap bits 1 and 2 */
3073 val = Get_NB32_index_wait(dev, index_reg, index);
3074 val |= ((odt & 0xFF) << 8); /* set bits 11:8 ODTTriState[3:0] */
3075 Set_NB32_index_wait(dev, index_reg, index, val);
3079 static void InitPhyCompensation(struct MCTStatStruc *pMCTstat,
3080 struct DCTStatStruc *pDCTstat, u8 dct)
3083 u32 index_reg = 0x98 + 0x100 * dct;
3084 u32 dev = pDCTstat->dev_dct;
3090 val = Get_NB32_index_wait(dev, index_reg, 0x00);
3092 for (i=0; i < 6; i++) {
3096 p = Table_Comp_Rise_Slew_15x;
3097 valx = p[(val >> 16) & 3];
3101 p = Table_Comp_Fall_Slew_15x;
3102 valx = p[(val >> 16) & 3];
3105 p = Table_Comp_Rise_Slew_20x;
3106 valx = p[(val >> 8) & 3];
3109 p = Table_Comp_Fall_Slew_20x;
3110 valx = p[(val >> 8) & 3];
3114 dword |= valx << (5 * i);
3117 /* Override/Exception */
3118 if (!pDCTstat->GangedMode) {
3119 i = 0; /* use i for the dct setting required */
3120 if (pDCTstat->MAdimms[0] < 4)
3122 if (((pDCTstat->Speed == 2) || (pDCTstat->Speed == 3)) && (pDCTstat->MAdimms[i] == 4))
3123 dword &= 0xF18FFF18;
3124 index_reg = 0x98; /* force dct = 0 */
3127 Set_NB32_index_wait(dev, index_reg, 0x0a, dword);
3130 static void mct_EarlyArbEn_D(struct MCTStatStruc *pMCTstat,
3131 struct DCTStatStruc *pDCTstat)
3135 u32 dev = pDCTstat->dev_dct;
3137 /* GhEnhancement #18429 modified by askar: For low NB CLK :
3138 * Memclk ratio, the DCT may need to arbitrate early to avoid
3139 * unnecessary bubbles.
3140 * bit 19 of F2x[1,0]78 Dram Control Register, set this bit only when
3141 * NB CLK : Memclk ratio is between 3:1 (inclusive) to 4:5 (inclusive)
3144 val = Get_NB32(dev, reg);
3146 if (pDCTstat->LogicalCPUID & (AMD_DR_Bx | AMD_DR_Cx))
3147 val |= (1 << EarlyArbEn);
3148 else if (CheckNBCOFEarlyArbEn(pMCTstat, pDCTstat))
3149 val |= (1 << EarlyArbEn);
3151 Set_NB32(dev, reg, val);
3154 static u8 CheckNBCOFEarlyArbEn(struct MCTStatStruc *pMCTstat,
3155 struct DCTStatStruc *pDCTstat)
3161 u32 dev = pDCTstat->dev_dct;
3165 /* Check if NB COF >= 4*Memclk, if it is not, return a fatal error
3168 /* 3*(Fn2xD4[NBFid]+4)/(2^NbDid)/(3+Fn2x94[MemClkFreq]) */
3169 _RDMSR(0xC0010071, &lo, &hi);
3174 val = Get_NB32(dev, reg);
3175 if (!(val & (1 << MemClkFreqVal)))
3176 val = Get_NB32(dev, reg + 0x100); /* get the DCT1 value */
3184 dev = pDCTstat->dev_nbmisc;
3186 val = Get_NB32(dev, reg);
3194 /* Yes this could be nicer but this was how the asm was.... */
3195 if (val < 3) { /* NClk:MemClk < 3:1 */
3197 } else if (val > 4) { /* NClk:MemClk >= 5:1 */
3199 } else if ((val == 4) && (rem > tmp)) { /* NClk:MemClk > 4.5:1 */
3202 return 1; /* 3:1 <= NClk:MemClk <= 4.5:1*/
3206 static void mct_ResetDataStruct_D(struct MCTStatStruc *pMCTstat,
3207 struct DCTStatStruc *pDCTstatA)
3211 struct DCTStatStruc *pDCTstat;
3214 u16 host_serv1, host_serv2;
3216 /* Initialize Data structures by clearing all entries to 0 */
3217 p = (u8 *) pMCTstat;
3218 for (i = 0; i < sizeof(struct MCTStatStruc); i++) {
3222 for (Node = 0; Node < 8; Node++) {
3223 pDCTstat = pDCTstatA + Node;
3224 host_serv1 = pDCTstat->HostBiosSrvc1;
3225 host_serv2 = pDCTstat->HostBiosSrvc2;
3227 p = (u8 *) pDCTstat;
3229 stop = ((u32) &((struct DCTStatStruc *)0)->CH_MaxRdLat[2]);
3230 for (i = start; i < stop ; i++) {
3234 start = ((u32) &((struct DCTStatStruc *)0)->CH_D_BC_RCVRDLY[2][4]);
3235 stop = sizeof(struct DCTStatStruc);
3236 for (i = start; i < stop; i++) {
3239 pDCTstat->HostBiosSrvc1 = host_serv1;
3240 pDCTstat->HostBiosSrvc2 = host_serv2;
3244 static void mct_BeforeDramInit_Prod_D(struct MCTStatStruc *pMCTstat,
3245 struct DCTStatStruc *pDCTstat)
3249 u32 dev = pDCTstat->dev_dct;
3251 if (pDCTstat->LogicalCPUID & AMD_DR_Dx) {
3252 if ((pDCTstat->Speed == 3))
3256 for (i=0; i < 2; i++) {
3257 reg_off = 0x100 * i;
3258 Set_NB32(dev, 0x98 + reg_off, 0x0D000030);
3259 Set_NB32(dev, 0x9C + reg_off, dword);
3260 Set_NB32(dev, 0x98 + reg_off, 0x4D040F30);
3265 static u32 mct_DisDllShutdownSR(struct MCTStatStruc *pMCTstat,
3266 struct DCTStatStruc *pDCTstat, u32 DramConfigLo, u8 dct)
3268 u32 reg_off = 0x100 * dct;
3269 u32 dev = pDCTstat->dev_dct;
3271 /* Write 0000_07D0h to register F2x[1, 0]98_x4D0FE006 */
3272 if (pDCTstat->LogicalCPUID & (AMD_DA_C2 | AMD_RB_C3)) {
3273 Set_NB32(dev, 0x9C + reg_off, 0x1c);
3274 Set_NB32(dev, 0x98 + reg_off, 0x4D0FE006);
3275 Set_NB32(dev, 0x9C + reg_off, 0x13d);
3276 Set_NB32(dev, 0x98 + reg_off, 0x4D0FE007);
3279 return DramConfigLo | /* DisDllShutdownSR */ 1 << 27;
3282 void mct_SetClToNB_D(struct MCTStatStruc *pMCTstat,
3283 struct DCTStatStruc *pDCTstat)
3288 /* FIXME: Maybe check the CPUID? - not for now. */
3289 /* pDCTstat->LogicalCPUID; */
3292 _RDMSR(msr, &lo, &hi);
3293 lo |= 1 << ClLinesToNbDis;
3294 _WRMSR(msr, lo, hi);
3297 void mct_ClrClToNB_D(struct MCTStatStruc *pMCTstat,
3298 struct DCTStatStruc *pDCTstat)
3304 /* FIXME: Maybe check the CPUID? - not for now. */
3305 /* pDCTstat->LogicalCPUID; */
3308 _RDMSR(msr, &lo, &hi);
3309 if (!pDCTstat->ClToNB_flag)
3310 lo &= ~(1<<ClLinesToNbDis);
3311 _WRMSR(msr, lo, hi);
3315 void mct_SetWbEnhWsbDis_D(struct MCTStatStruc *pMCTstat,
3316 struct DCTStatStruc *pDCTstat)
3321 /* FIXME: Maybe check the CPUID? - not for now. */
3322 /* pDCTstat->LogicalCPUID; */
3325 _RDMSR(msr, &lo, &hi);
3326 hi |= (1 << WbEnhWsbDis_D);
3327 _WRMSR(msr, lo, hi);
3330 void mct_ClrWbEnhWsbDis_D(struct MCTStatStruc *pMCTstat,
3331 struct DCTStatStruc *pDCTstat)
3336 /* FIXME: Maybe check the CPUID? - not for now. */
3337 /* pDCTstat->LogicalCPUID; */
3340 _RDMSR(msr, &lo, &hi);
3341 hi &= ~(1 << WbEnhWsbDis_D);
3342 _WRMSR(msr, lo, hi);
3345 static u32 mct_DramTermDyn_RDimm(struct MCTStatStruc *pMCTstat,
3346 struct DCTStatStruc *pDCTstat, u8 dimm)
3348 u8 DimmsInstalled = dimm;
3349 u32 DramTermDyn = 0;
3350 u8 Speed = pDCTstat->Speed;
3352 if (mctGet_NVbits(NV_MAX_DIMMS) == 4) {
3353 if (pDCTstat->CSPresent & 0xF0) {
3354 if (DimmsInstalled == 1)
3356 DramTermDyn |= 1 << 10;
3358 DramTermDyn |= 1 << 11;
3361 DramTermDyn |= 1 << 11;
3363 DramTermDyn |= 1 << 10;
3365 if (DimmsInstalled != 1) {
3367 DramTermDyn |= 1 << 10;
3369 DramTermDyn |= 1 << 11;
3373 if (DimmsInstalled != 1)
3374 DramTermDyn |= 1 << 11;
3379 void ProgDramMRSReg_D(struct MCTStatStruc *pMCTstat,
3380 struct DCTStatStruc *pDCTstat, u8 dct)
3387 /* Set chip select CKE control mode */
3388 if (mctGet_NVbits(NV_CKE_CTL)) {
3389 if (pDCTstat->CSPresent == 3) {
3391 word = pDCTstat->DIMMSPDCSE;
3402 DrvImpCtrl: drive impedance control.01b(34 ohm driver; Ron34 = Rzq/7)
3405 /* Dram nominal termination: */
3406 byte = pDCTstat->MAdimms[dct];
3407 if (!(pDCTstat->Status & (1 << SB_Registered))) {
3408 DramMRS |= 1 << 7; /* 60 ohms */
3410 if (pDCTstat->Speed < 6)
3411 DramMRS |= 1 << 8; /* 40 ohms */
3413 DramMRS |= 1 << 9; /* 30 ohms */
3416 /* Dram dynamic termination: Disable(1DIMM), 120ohm(>=2DIMM) */
3417 if (!(pDCTstat->Status & (1 << SB_Registered))) {
3419 if (pDCTstat->Speed == 7)
3425 DramMRS |= mct_DramTermDyn_RDimm(pMCTstat, pDCTstat, byte);
3428 /* burst length control */
3429 if (pDCTstat->Status & (1 << SB_128bitmode))
3431 /* Qoff=0, output buffers enabled */
3433 DramMRS |= (pDCTstat->Speed - 4) << 20;
3434 /* ASR=1, auto self refresh */
3438 dword = Get_NB32(pDCTstat->dev_dct, 0x100 * dct + 0x84);
3439 dword &= ~0x00FC2F8F;
3441 Set_NB32(pDCTstat->dev_dct, 0x100 * dct + 0x84, dword);
3444 void mct_SetDramConfigHi_D(struct DCTStatStruc *pDCTstat, u32 dct,
3447 /* Bug#15114: Comp. update interrupted by Freq. change can cause
3448 * subsequent update to be invalid during any MemClk frequency change:
3449 * Solution: From the bug report:
3450 * 1. A software-initiated frequency change should be wrapped into the
3451 * following sequence :
3452 * - a) Disable Compensation (F2[1, 0]9C_x08[30] )
3453 * b) Reset the Begin Compensation bit (D3CMP->COMP_CONFIG[0]) in all the compensation engines
3454 * c) Do frequency change
3455 * d) Enable Compensation (F2[1, 0]9C_x08[30] )
3456 * 2. A software-initiated Disable Compensation should always be
3457 * followed by step b) of the above steps.
3458 * Silicon Status: Fixed In Rev B0
3460 * Errata#177: DRAM Phy Automatic Compensation Updates May Be Invalid
3461 * Solution: BIOS should disable the phy automatic compensation prior
3462 * to initiating a memory clock frequency change as follows:
3463 * 1. Disable PhyAutoComp by writing 1'b1 to F2x[1, 0]9C_x08[30]
3464 * 2. Reset the Begin Compensation bits by writing 32'h0 to
3465 * F2x[1, 0]9C_x4D004F00
3466 * 3. Perform frequency change
3467 * 4. Enable PhyAutoComp by writing 1'b0 to F2x[1, 0]9C_08[30]
3468 * In addition, any time software disables the automatic phy
3469 * compensation it should reset the begin compensation bit per step 2.
3470 * Silicon Status: Fixed in DR-B0
3473 u32 dev = pDCTstat->dev_dct;
3474 u32 index_reg = 0x98 + 0x100 * dct;
3480 val = Get_NB32_index_wait(dev, index_reg, index);
3481 if (!(val & (1 << DisAutoComp)))
3482 Set_NB32_index_wait(dev, index_reg, index, val | (1 << DisAutoComp));
3486 Set_NB32(dev, 0x94 + 0x100 * dct, DramConfigHi);
3489 static void mct_BeforeDQSTrain_D(struct MCTStatStruc *pMCTstat,
3490 struct DCTStatStruc *pDCTstatA)
3493 struct DCTStatStruc *pDCTstat;
3497 * Bug#15115: Uncertainty In The Sync Chain Leads To Setup Violations
3499 * Solution: BIOS should program DRAM Control Register[RdPtrInit] =
3500 * 5h, (F2x[1, 0]78[3:0] = 5h).
3501 * Silicon Status: Fixed In Rev B0
3503 * Bug#15880: Determine validity of reset settings for DDR PHY timing.
3504 * Solutiuon: At least, set WrDqs fine delay to be 0 for DDR3 training.
3506 for (Node = 0; Node < 8; Node++) {
3507 pDCTstat = pDCTstatA + Node;
3509 if (pDCTstat->NodePresent)
3510 mct_BeforeDQSTrainSamp(pDCTstat); /* only Bx */
3511 mct_ResetDLL_D(pMCTstat, pDCTstat, 0);
3512 mct_ResetDLL_D(pMCTstat, pDCTstat, 1);
3516 static void mct_ResetDLL_D(struct MCTStatStruc *pMCTstat,
3517 struct DCTStatStruc *pDCTstat, u8 dct)
3520 u32 dev = pDCTstat->dev_dct;
3521 u32 reg_off = 0x100 * dct;
3527 /* Skip reset DLL for B3 */
3528 if (pDCTstat->LogicalCPUID & AMD_DR_B3) {
3533 _RDMSR(addr, &lo, &hi);
3534 if(lo & (1<<17)) { /* save the old value */
3537 lo |= (1<<17); /* HWCR.wrap32dis */
3538 /* Setting wrap32dis allows 64-bit memory references in 32bit mode */
3539 _WRMSR(addr, lo, hi);
3541 pDCTstat->Channel = dct;
3542 Receiver = mct_InitReceiver_D(pDCTstat, dct);
3543 /* there are four receiver pairs, loosely associated with chipselects.*/
3544 for (; Receiver < 8; Receiver += 2) {
3545 if (mct_RcvrRankEnabled_D(pMCTstat, pDCTstat, dct, Receiver)) {
3546 addr = mct_GetRcvrSysAddr_D(pMCTstat, pDCTstat, dct, Receiver, &valid);
3548 mct_Read1LTestPattern_D(pMCTstat, pDCTstat, addr); /* cache fills */
3550 /* Write 0000_8000h to register F2x[1,0]9C_xD080F0C */
3551 Set_NB32_index_wait(dev, 0x98 + reg_off, 0x4D080F0C, 0x00008000);
3552 mct_Wait(80); /* wait >= 300ns */
3554 /* Write 0000_0000h to register F2x[1,0]9C_xD080F0C */
3555 Set_NB32_index_wait(dev, 0x98 + reg_off, 0x4D080F0C, 0x00000000);
3556 mct_Wait(800); /* wait >= 2us */
3564 _RDMSR(addr, &lo, &hi);
3565 lo &= ~(1<<17); /* restore HWCR.wrap32dis */
3566 _WRMSR(addr, lo, hi);
3570 static void mct_EnableDatIntlv_D(struct MCTStatStruc *pMCTstat,
3571 struct DCTStatStruc *pDCTstat)
3573 u32 dev = pDCTstat->dev_dct;
3576 /* Enable F2x110[DctDatIntlv] */
3577 /* Call back not required mctHookBeforeDatIntlv_D() */
3578 /* FIXME Skip for Ax */
3579 if (!pDCTstat->GangedMode) {
3580 val = Get_NB32(dev, 0x110);
3581 val |= 1 << 5; /* DctDatIntlv */
3582 Set_NB32(dev, 0x110, val);
3584 /* FIXME Skip for Cx */
3585 dev = pDCTstat->dev_nbmisc;
3586 val = Get_NB32(dev, 0x8C); /* NB Configuration Hi */
3587 val |= 1 << (36-32); /* DisDatMask */
3588 Set_NB32(dev, 0x8C, val);
3592 static void SetDllSpeedUp_D(struct MCTStatStruc *pMCTstat,
3593 struct DCTStatStruc *pDCTstat, u8 dct)
3596 u32 dev = pDCTstat->dev_dct;
3597 u32 reg_off = 0x100 * dct;
3599 if (pDCTstat->Speed >= 7) { /* DDR1600 and above */
3600 /* Set bit13 PowerDown to register F2x[1, 0]98_x0D080F10 */
3601 Set_NB32(dev, reg_off + 0x98, 0x0D080F10);
3602 val = Get_NB32(dev, reg_off + 0x9C);
3604 Set_NB32(dev, reg_off + 0x9C, val);
3605 Set_NB32(dev, reg_off + 0x98, 0x4D080F10);
3607 /* Set bit13 PowerDown to register F2x[1, 0]98_x0D080F11 */
3608 Set_NB32(dev, reg_off + 0x98, 0x0D080F11);
3609 val = Get_NB32(dev, reg_off + 0x9C);
3611 Set_NB32(dev, reg_off + 0x9C, val);
3612 Set_NB32(dev, reg_off + 0x98, 0x4D080F11);
3614 /* Set bit13 PowerDown to register F2x[1, 0]98_x0D088F30 */
3615 Set_NB32(dev, reg_off + 0x98, 0x0D088F30);
3616 val = Get_NB32(dev, reg_off + 0x9C);
3618 Set_NB32(dev, reg_off + 0x9C, val);
3619 Set_NB32(dev, reg_off + 0x98, 0x4D088F30);
3621 /* Set bit13 PowerDown to register F2x[1, 0]98_x0D08CF30 */
3622 Set_NB32(dev, reg_off + 0x98, 0x0D08CF30);
3623 val = Get_NB32(dev, reg_off + 0x9C);
3625 Set_NB32(dev, reg_off + 0x9C, val);
3626 Set_NB32(dev, reg_off + 0x98, 0x4D08CF30);
3631 static void SyncSetting(struct DCTStatStruc *pDCTstat)
3633 /* set F2x78[ChSetupSync] when F2x[1, 0]9C_x04[AddrCmdSetup, CsOdtSetup,
3634 * CkeSetup] setups for one DCT are all 0s and at least one of the setups,
3635 * F2x[1, 0]9C_x04[AddrCmdSetup, CsOdtSetup, CkeSetup], of the other
3639 u32 dev = pDCTstat->dev_dct;
3642 cha = pDCTstat->CH_ADDR_TMG[0] & 0x0202020;
3643 chb = pDCTstat->CH_ADDR_TMG[1] & 0x0202020;
3645 if ((cha != chb) && ((cha == 0) || (chb == 0))) {
3646 val = Get_NB32(dev, 0x78);
3647 val |= 1 << ChSetupSync;
3648 Set_NB32(dev, 0x78, val);
3652 static void AfterDramInit_D(struct DCTStatStruc *pDCTstat, u8 dct) {
3655 u32 reg_off = 0x100 * dct;
3656 u32 dev = pDCTstat->dev_dct;
3658 if (pDCTstat->LogicalCPUID & (AMD_DR_B2 | AMD_DR_B3)) {
3659 mct_Wait(10000); /* Wait 50 us*/
3660 val = Get_NB32(dev, 0x110);
3661 if (!(val & (1 << DramEnabled))) {
3662 /* If 50 us expires while DramEnable =0 then do the following */
3663 val = Get_NB32(dev, 0x90 + reg_off);
3664 val &= ~(1 << Width128); /* Program Width128 = 0 */
3665 Set_NB32(dev, 0x90 + reg_off, val);
3667 val = Get_NB32_index_wait(dev, 0x98 + reg_off, 0x05); /* Perform dummy CSR read to F2x09C_x05 */
3669 if (pDCTstat->GangedMode) {
3670 val = Get_NB32(dev, 0x90 + reg_off);
3671 val |= 1 << Width128; /* Program Width128 = 0 */
3672 Set_NB32(dev, 0x90 + reg_off, val);
3678 /* ==========================================================
3679 * 6-bit Bank Addressing Table
3682 * CCC=Columns-9 binary
3683 * ==========================================================
3684 * DCT CCCBRR Rows Banks Columns 64-bit CS Size
3686 * 0000 000000 13 2 9 128MB
3687 * 0001 001000 13 2 10 256MB
3688 * 0010 001001 14 2 10 512MB
3689 * 0011 010000 13 2 11 512MB
3690 * 0100 001100 13 3 10 512MB
3691 * 0101 001101 14 3 10 1GB
3692 * 0110 010001 14 2 11 1GB
3693 * 0111 001110 15 3 10 2GB
3694 * 1000 010101 14 3 11 2GB
3695 * 1001 010110 15 3 11 4GB
3696 * 1010 001111 16 3 10 4GB
3697 * 1011 010111 16 3 11 8GB
3699 u8 crcCheck(u8 smbaddr)
3706 byte_use = mctRead_SPD(smbaddr, SPD_ByteUse);
3707 if (byte_use & 0x80)
3713 for (Index = 0; Index < byte_use; Index ++) {
3714 byte = mctRead_SPD(smbaddr, Index);
3716 for (i=0; i<8; i++) {
3724 return CRC == (mctRead_SPD(smbaddr, SPD_byte_127) << 8 | mctRead_SPD(smbaddr, SPD_byte_126));