2 * This file is part of the LinuxBIOS project.
4 * Copyright (C) 2007 Advanced Micro Devices, Inc.
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; version 2 of the License.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License
16 * along with this program; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
20 /* Description: Main memory controller system configuration for DDR 2 */
23 /* KNOWN ISSUES - ERRATA
25 * Trtp is not calculated correctly when the controller is in 64-bit mode, it
26 * is 1 busclock off. No fix planned. The controller is not ordinarily in
29 * 32 Byte burst not supported. No fix planned. The controller is not
30 * ordinarily in 64-bit mode.
32 * Trc precision does not use extra Jedec defined fractional component.
33 * InsteadTrc (course) is rounded up to nearest 1 ns.
35 * Mini and Micro DIMM not supported. Only RDIMM, UDIMM, SO-DIMM defined types
39 static u8 ReconfigureDIMMspare_D(struct MCTStatStruc *pMCTstat,
40 struct DCTStatStruc *pDCTstatA);
41 static void DQSTiming_D(struct MCTStatStruc *pMCTstat,
42 struct DCTStatStruc *pDCTstatA);
43 static void LoadDQSSigTmgRegs_D(struct MCTStatStruc *pMCTstat,
44 struct DCTStatStruc *pDCTstatA);
45 static void ResetNBECCstat_D(struct MCTStatStruc *pMCTstat,
46 struct DCTStatStruc *pDCTstatA);
47 static void HTMemMapInit_D(struct MCTStatStruc *pMCTstat,
48 struct DCTStatStruc *pDCTstatA);
49 static void MCTMemClr_D(struct MCTStatStruc *pMCTstat,
50 struct DCTStatStruc *pDCTstatA);
51 static void DCTMemClr_Init_D(struct MCTStatStruc *pMCTstat,
52 struct DCTStatStruc *pDCTstat);
53 static void DCTMemClr_Sync_D(struct MCTStatStruc *pMCTstat,
54 struct DCTStatStruc *pDCTstat);
55 static void MCTMemClrSync_D(struct MCTStatStruc *pMCTstat,
56 struct DCTStatStruc *pDCTstatA);
57 static u8 NodePresent_D(u8 Node);
58 static void SyncDCTsReady_D(struct MCTStatStruc *pMCTstat,
59 struct DCTStatStruc *pDCTstatA);
60 static void StartupDCT_D(struct MCTStatStruc *pMCTstat,
61 struct DCTStatStruc *pDCTstat, u8 dct);
62 static void ClearDCT_D(struct MCTStatStruc *pMCTstat,
63 struct DCTStatStruc *pDCTstat, u8 dct);
64 static u8 AutoCycTiming_D(struct MCTStatStruc *pMCTstat,
65 struct DCTStatStruc *pDCTstat, u8 dct);
66 static void GetPresetmaxF_D(struct MCTStatStruc *pMCTstat,
67 struct DCTStatStruc *pDCTstat);
68 static void SPDGetTCL_D(struct MCTStatStruc *pMCTstat,
69 struct DCTStatStruc *pDCTstat, u8 dct);
70 static u8 AutoConfig_D(struct MCTStatStruc *pMCTstat,
71 struct DCTStatStruc *pDCTstat, u8 dct);
72 static u8 PlatformSpec_D(struct MCTStatStruc *pMCTstat,
73 struct DCTStatStruc *pDCTstat, u8 dct);
74 static void SPDSetBanks_D(struct MCTStatStruc *pMCTstat,
75 struct DCTStatStruc *pDCTstat, u8 dct);
76 static void StitchMemory_D(struct MCTStatStruc *pMCTstat,
77 struct DCTStatStruc *pDCTstat, u8 dct);
78 static u8 Get_DefTrc_k_D(u8 k);
79 static u16 Get_40Tk_D(u8 k);
80 static u16 Get_Fk_D(u8 k);
81 static u8 Dimm_Supports_D(struct DCTStatStruc *pDCTstat, u8 i, u8 j, u8 k);
82 static u8 Sys_Capability_D(struct MCTStatStruc *pMCTstat,
83 struct DCTStatStruc *pDCTstat, int j, int k);
84 static u8 Get_DIMMAddress_D(struct DCTStatStruc *pDCTstat, u8 i);
85 static void mct_initDCT(struct MCTStatStruc *pMCTstat,
86 struct DCTStatStruc *pDCTstat);
87 static void mct_DramInit(struct MCTStatStruc *pMCTstat,
88 struct DCTStatStruc *pDCTstat, u8 dct);
89 static u8 mct_PlatformSpec(struct MCTStatStruc *pMCTstat,
90 struct DCTStatStruc *pDCTstat, u8 dct);
91 static void mct_SyncDCTsReady(struct DCTStatStruc *pDCTstat);
92 static void Get_Trdrd(struct MCTStatStruc *pMCTstat,
93 struct DCTStatStruc *pDCTstat, u8 dct);
94 static void mct_AfterGetCLT(struct MCTStatStruc *pMCTstat,
95 struct DCTStatStruc *pDCTstat, u8 dct);
96 static u8 mct_SPDCalcWidth(struct MCTStatStruc *pMCTstat,\
97 struct DCTStatStruc *pDCTstat, u8 dct);
98 static void mct_AfterStitchMemory(struct MCTStatStruc *pMCTstat,
99 struct DCTStatStruc *pDCTstat, u8 dct);
100 static u8 mct_DIMMPresence(struct MCTStatStruc *pMCTstat,
101 struct DCTStatStruc *pDCTstat, u8 dct);
102 static void Set_OtherTiming(struct MCTStatStruc *pMCTstat,
103 struct DCTStatStruc *pDCTstat, u8 dct);
104 static void Get_Twrwr(struct MCTStatStruc *pMCTstat,
105 struct DCTStatStruc *pDCTstat, u8 dct);
106 static void Get_Twrrd(struct MCTStatStruc *pMCTstat,
107 struct DCTStatStruc *pDCTstat, u8 dct);
108 static void Get_TrwtTO(struct MCTStatStruc *pMCTstat,
109 struct DCTStatStruc *pDCTstat, u8 dct);
110 static void Get_TrwtWB(struct MCTStatStruc *pMCTstat,
111 struct DCTStatStruc *pDCTstat);
112 static u8 Check_DqsRcvEn_Diff(struct DCTStatStruc *pDCTstat, u8 dct,
113 u32 dev, u32 index_reg, u32 index);
114 static u8 Get_DqsRcvEnGross_Diff(struct DCTStatStruc *pDCTstat,
115 u32 dev, u32 index_reg);
116 static u8 Get_WrDatGross_Diff(struct DCTStatStruc *pDCTstat, u8 dct,
117 u32 dev, u32 index_reg);
118 static u16 Get_DqsRcvEnGross_MaxMin(struct DCTStatStruc *pDCTstat,
119 u32 dev, u32 index_reg, u32 index);
120 static void mct_FinalMCT_D(struct MCTStatStruc *pMCTstat,
121 struct DCTStatStruc *pDCTstat);
122 static u16 Get_WrDatGross_MaxMin(struct DCTStatStruc *pDCTstat, u8 dct,
123 u32 dev, u32 index_reg, u32 index);
124 static void mct_InitialMCT_D(struct MCTStatStruc *pMCTstat,
125 struct DCTStatStruc *pDCTstat);
126 static void mct_init(struct MCTStatStruc *pMCTstat,
127 struct DCTStatStruc *pDCTstat);
128 static void clear_legacy_Mode(struct MCTStatStruc *pMCTstat,
129 struct DCTStatStruc *pDCTstat);
130 static void mct_HTMemMapExt(struct MCTStatStruc *pMCTstat,
131 struct DCTStatStruc *pDCTstatA);
132 static void SetCSTriState(struct MCTStatStruc *pMCTstat,
133 struct DCTStatStruc *pDCTstat, u8 dct);
134 static void SetODTTriState(struct MCTStatStruc *pMCTstat,
135 struct DCTStatStruc *pDCTstat, u8 dct);
136 static void InitPhyCompensation(struct MCTStatStruc *pMCTstat,
137 struct DCTStatStruc *pDCTstat, u8 dct);
138 static u32 mct_NodePresent_D(void);
139 static void WaitRoutine_D(u32 time);
140 static void mct_OtherTiming(struct MCTStatStruc *pMCTstat,
141 struct DCTStatStruc *pDCTstatA);
142 static void mct_ResetDataStruct_D(struct MCTStatStruc *pMCTstat,
143 struct DCTStatStruc *pDCTstatA);
144 static void mct_EarlyArbEn_D(struct MCTStatStruc *pMCTstat,
145 struct DCTStatStruc *pDCTstat);
146 static void mct_BeforeDramInit_Prod_D(struct MCTStatStruc *pMCTstat,
147 struct DCTStatStruc *pDCTstat);
148 void mct_ClrClToNB_D(struct MCTStatStruc *pMCTstat,
149 struct DCTStatStruc *pDCTstat);
150 static u8 CheckNBCOFEarlyArbEn(struct MCTStatStruc *pMCTstat,
151 struct DCTStatStruc *pDCTstat);
152 void mct_ClrWbEnhWsbDis_D(struct MCTStatStruc *pMCTstat,
153 struct DCTStatStruc *pDCTstat);
154 static void mct_BeforeDQSTrain_D(struct MCTStatStruc *pMCTstat,
155 struct DCTStatStruc *pDCTstatA);
156 static void AfterDramInit_D(struct DCTStatStruc *pDCTstat, u8 dct);
157 static void mct_ResetDLL_D(struct MCTStatStruc *pMCTstat,
158 struct DCTStatStruc *pDCTstat, u8 dct);
161 /*See mctAutoInitMCT header for index relationships to CL and T*/
162 static const u16 Table_F_k[] = {00,200,266,333,400,533 };
163 static const u8 Table_T_k[] = {0x00,0x50,0x3D,0x30,0x25, 0x18 };
164 static const u8 Table_CL2_j[] = {0x04,0x08,0x10,0x20,0x40, 0x80 };
165 static const u8 Tab_defTrc_k[] = {0x0,0x41,0x3C,0x3C,0x3A, 0x3A };
166 static const u16 Tab_40T_k[] = {00,200,150,120,100,75 };
167 static const u8 Tab_TrefT_k[] = {00,0,1,1,2,2,3,4,5,6,0,0};
168 static const u8 Tab_BankAddr[] = {0x0,0x08,0x09,0x10,0x0C,0x0D,0x11,0x0E,0x15,0x16,0x0F,0x17};
169 static const u8 Tab_tCL_j[] = {0,2,3,4,5};
170 static const u8 Tab_1KTfawT_k[] = {00,8,10,13,14,20};
171 static const u8 Tab_2KTfawT_k[] = {00,10,14,17,18,24};
172 static const u8 Tab_L1CLKDis[] = {8,8,6,4,2,0,8,8};
173 static const u8 Tab_M2CLKDis[] = {2,0,8,8,2,0,2,0};
174 static const u8 Tab_S1CLKDis[] = {8,0,8,8,8,0,8,0};
175 static const u8 Table_Comp_Rise_Slew_20x[] = {7, 3, 2, 2, 0xFF};
176 static const u8 Table_Comp_Rise_Slew_15x[] = {7, 7, 3, 2, 0xFF};
177 static const u8 Table_Comp_Fall_Slew_20x[] = {7, 5, 3, 2, 0xFF};
178 static const u8 Table_Comp_Fall_Slew_15x[] = {7, 7, 5, 3, 0xFF};
180 void mctAutoInitMCT_D(struct MCTStatStruc *pMCTstat,
181 struct DCTStatStruc *pDCTstatA)
184 * Memory may be mapped contiguously all the way up to 4GB (depending
185 * on setup options). It is the responsibility of PCI subsystem to
186 * create an uncacheable IO region below 4GB and to adjust TOP_MEM
187 * downward prior to any IO mapping or accesses. It is the same
188 * responsibility of the CPU sub-system prior toaccessing LAPIC.
190 * Slot Number is an external convention, and is determined by OEM with
191 * accompanying silk screening. OEM may choose to use Slot number
192 * convention which is consistent with DIMM number conventions.
193 * All AMD engineering
196 * Run-Time Requirements:
197 * 1. Complete Hypertransport Bus Configuration
198 * 2. SMBus Controller Initialized
199 * 3. Checksummed or Valid NVRAM bits
200 * 4. MCG_CTL=-1, MC4_CTL_EN=0 for all CPUs
201 * 5. MCi_STS from shutdown/warm reset recorded (if desired) prior to
203 * 6. All var MTRRs reset to zero
204 * 7. State of NB_CFG.DisDatMsk set properly on all CPUs
205 * 8. All CPUs at 2Ghz Speed (unless DQS training is not installed).
206 * 9. All cHT links at max Speed/Width (unless DQS training is not
210 * Global relationship between index values and item values:
212 * --------------------------
224 mctInitMemGPIOs_A_D(); /* Set any required GPIOs*/
227 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
228 struct DCTStatStruc *pDCTstat;
229 pDCTstat = pDCTstatA + Node;
230 pDCTstat->Node_ID = Node;
231 pDCTstat->dev_host = PA_HOST(Node);
232 pDCTstat->dev_map = PA_MAP(Node);
233 pDCTstat->dev_dct = PA_DCT(Node);
234 pDCTstat->dev_nbmisc = PA_NBMISC(Node);
235 pDCTstat->NodeSysBase = node_sys_base;
237 print_tx("mctAutoInitMCT_D: mct_init Node ", Node);
238 mct_init(pMCTstat, pDCTstat);
239 mctNodeIDDebugPort_D();
240 pDCTstat->NodePresent = NodePresent_D(Node);
241 if (pDCTstat->NodePresent) { /* See if Node is there*/
242 print_t("mctAutoInitMCT_D: clear_legacy_Mode\n");
243 clear_legacy_Mode(pMCTstat, pDCTstat);
244 pDCTstat->LogicalCPUID = mctGetLogicalCPUID_D(Node);
246 print_t("mctAutoInitMCT_D: mct_InitialMCT_D\n");
247 mct_InitialMCT_D(pMCTstat, pDCTstat);
249 print_t("mctAutoInitMCT_D: mctSMBhub_Init\n");
250 mctSMBhub_Init(Node); /* Switch SMBUS crossbar to proper node*/
252 print_t("mctAutoInitMCT_D: mct_initDCT\n");
253 mct_initDCT(pMCTstat, pDCTstat);
254 if (pDCTstat->ErrCode == SC_FatalErr) {
255 goto fatalexit; /* any fatal errors?*/
256 } else if (pDCTstat->ErrCode < SC_StopError) {
259 } /* if Node present */
260 node_sys_base = pDCTstat->NodeSysBase;
261 node_sys_base += (pDCTstat->NodeSysLimit + 2) & ~0x0F;
263 if (NodesWmem == 0) {
264 print_debug("No Nodes?!\n");
268 print_t("mctAutoInitMCT_D: SyncDCTsReady_D\n");
269 SyncDCTsReady_D(pMCTstat, pDCTstatA); /* Make sure DCTs are ready for accesses.*/
271 print_t("mctAutoInitMCT_D: HTMemMapInit_D\n");
272 HTMemMapInit_D(pMCTstat, pDCTstatA); /* Map local memory into system address space.*/
275 print_t("mctAutoInitMCT_D: CPUMemTyping_D\n");
276 CPUMemTyping_D(pMCTstat, pDCTstatA); /* Map dram into WB/UC CPU cacheability */
277 mctHookAfterCPU(); /* Setup external northbridge(s) */
279 print_t("mctAutoInitMCT_D: DQSTiming_D\n");
280 DQSTiming_D(pMCTstat, pDCTstatA); /* Get Receiver Enable and DQS signal timing*/
282 print_t("mctAutoInitMCT_D: :OtherTiming\n");
283 mct_OtherTiming(pMCTstat, pDCTstatA);
285 if (ReconfigureDIMMspare_D(pMCTstat, pDCTstatA)) { /* RESET# if 1st pass of DIMM spare enabled*/
289 InterleaveNodes_D(pMCTstat, pDCTstatA);
290 InterleaveChannels_D(pMCTstat, pDCTstatA);
292 print_t("mctAutoInitMCT_D: ECCInit_D\n");
293 if (ECCInit_D(pMCTstat, pDCTstatA)) { /* Setup ECC control and ECC check-bits*/
294 print_t("mctAutoInitMCT_D: MCTMemClr_D\n");
295 MCTMemClr_D(pMCTstat,pDCTstatA);
298 mct_FinalMCT_D(pMCTstat, (pDCTstatA + 0) ); // Node 0
299 print_t("All Done\n");
303 die("mct_d: fatalexit");
307 static u8 ReconfigureDIMMspare_D(struct MCTStatStruc *pMCTstat,
308 struct DCTStatStruc *pDCTstatA)
312 if (mctGet_NVbits(NV_CS_SpareCTL)) {
313 if (MCT_DIMM_SPARE_NO_WARM) {
314 /* Do no warm-reset DIMM spare */
315 if (pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)) {
316 LoadDQSSigTmgRegs_D(pMCTstat, pDCTstatA);
319 mct_ResetDataStruct_D(pMCTstat, pDCTstatA);
320 pMCTstat->GStatus |= 1 << GSB_EnDIMMSpareNW;
324 /* Do warm-reset DIMM spare */
325 if (mctGet_NVbits(NV_DQSTrainCTL))
339 static void DQSTiming_D(struct MCTStatStruc *pMCTstat,
340 struct DCTStatStruc *pDCTstatA)
344 if (pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)) {
347 nv_DQSTrainCTL = mctGet_NVbits(NV_DQSTrainCTL);
348 /* FIXME: BOZO- DQS training every time*/
351 if (nv_DQSTrainCTL) {
352 print_t("DQSTiming_D: mct_BeforeDQSTrain_D:\n");
353 mct_BeforeDQSTrain_D(pMCTstat, pDCTstatA);;
354 phyAssistedMemFnceTraining(pMCTstat, pDCTstatA);
355 mctHookBeforeAnyTraining();
357 print_t("DQSTiming_D: TrainReceiverEn_D FirstPass:\n");
358 TrainReceiverEn_D(pMCTstat, pDCTstatA, FirstPass);
360 print_t("DQSTiming_D: mct_TrainDQSPos_D\n");
361 mct_TrainDQSPos_D(pMCTstat, pDCTstatA);
363 // Second Pass never used for Barcelona!
364 //print_t("DQSTiming_D: TrainReceiverEn_D SecondPass:\n");
365 //TrainReceiverEn_D(pMCTstat, pDCTstatA, SecondPass);
367 print_t("DQSTiming_D: mctSetEccDQSRcvrEn_D\n");
368 mctSetEccDQSRcvrEn_D(pMCTstat, pDCTstatA);
370 print_t("DQSTiming_D: TrainMaxReadLatency_D\n");
371 //FIXME - currently uses calculated value TrainMaxReadLatency_D(pMCTstat, pDCTstatA);
372 mctHookAfterAnyTraining();
373 mctSaveDQSSigTmg_D();
375 print_t("DQSTiming_D: mct_EndDQSTraining_D\n");
376 mct_EndDQSTraining_D(pMCTstat, pDCTstatA);
378 print_t("DQSTiming_D: MCTMemClr_D\n");
379 MCTMemClr_D(pMCTstat, pDCTstatA);
381 mctGetDQSSigTmg_D(); /* get values into data structure */
382 LoadDQSSigTmgRegs_D(pMCTstat, pDCTstatA); /* load values into registers.*/
383 //mctDoWarmResetMemClr_D();
384 MCTMemClr_D(pMCTstat, pDCTstatA);
389 static void LoadDQSSigTmgRegs_D(struct MCTStatStruc *pMCTstat,
390 struct DCTStatStruc *pDCTstatA)
392 u8 Node, Receiver, Channel, Dir, DIMM;
400 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
401 struct DCTStatStruc *pDCTstat;
402 pDCTstat = pDCTstatA + Node;
404 if (pDCTstat->DCTSysLimit) {
405 dev = pDCTstat->dev_dct;
406 for (Channel = 0;Channel < 2; Channel++) {
407 /* there are four receiver pairs,
408 loosely associated with chipselects.*/
409 index_reg = 0x98 + Channel * 0x100;
410 for (Receiver = 0; Receiver < 8; Receiver += 2) {
411 /* Set Receiver Enable Values */
412 mct_SetRcvrEnDly_D(pDCTstat,
414 1, /* FinalValue, From stack */
418 (Receiver >> 1) * 3 + 0x10, /* Addl_Index */
419 2); /* Pass Second Pass ? */
423 for (Channel = 0; Channel<2; Channel++) {
424 SetEccDQSRcvrEn_D(pDCTstat, Channel);
427 for (Channel = 0; Channel < 2; Channel++) {
429 index_reg = 0x98 + Channel * 0x100;
432 * when 400, 533, 667, it will support dimm0/1/2/3,
433 * and set conf for dimm0, hw will copy to dimm1/2/3
434 * set for dimm1, hw will copy to dimm3
435 * Rev A/B only support DIMM0/1 when 800Mhz and above
436 * + 0x100 to next dimm
437 * Rev C support DIMM0/1/2/3 when 800Mhz and above
438 * + 0x100 to next dimm
440 for (DIMM = 0; DIMM < 2; DIMM++) {
442 index = 0; /* CHA Write Data Timing Low */
444 if (pDCTstat->Speed >= 4) {
445 index = 0x100 * DIMM;
450 for (Dir=0;Dir<2;Dir++) {//RD/WR
451 p = pDCTstat->CH_D_DIR_B_DQS[Channel][DIMM][Dir];
452 val = stream_to_int(p); /* CHA Read Data Timing High */
453 Set_NB32_index_wait(dev, index_reg, index+1, val);
454 val = stream_to_int(p+4); /* CHA Write Data Timing High */
455 Set_NB32_index_wait(dev, index_reg, index+2, val);
456 val = *(p+8); /* CHA Write ECC Timing */
457 Set_NB32_index_wait(dev, index_reg, index+3, val);
463 for (Channel = 0; Channel<2; Channel++) {
464 reg = 0x78 + Channel * 0x100;
465 val = Get_NB32(dev, reg);
467 val |= ((u32) pDCTstat->CH_MaxRdLat[Channel] << 22);
468 val &= ~(1<<DqsRcvEnTrain);
469 Set_NB32(dev, reg, val); /* program MaxRdLatency to correspond with current delay*/
476 static void ResetNBECCstat_D(struct MCTStatStruc *pMCTstat,
477 struct DCTStatStruc *pDCTstatA)
479 /* Clear MC4_STS for all Nodes in the system. This is required in some
480 * circumstances to clear left over garbage from cold reset, shutdown,
481 * or normal ECC memory conditioning.
484 //FIXME: this function depends on pDCTstat Array ( with Node id ) - Is this really a problem?
489 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
490 struct DCTStatStruc *pDCTstat;
491 pDCTstat = pDCTstatA + Node;
493 if (pDCTstat->NodePresent) {
494 dev = pDCTstat->dev_nbmisc;
495 /*MCA NB Status Low (alias to MC4_STS[31:0] */
496 Set_NB32(dev, 0x48, 0);
497 /* MCA NB Status High (alias to MC4_STS[63:32] */
498 Set_NB32(dev, 0x4C, 0);
504 static void HTMemMapInit_D(struct MCTStatStruc *pMCTstat,
505 struct DCTStatStruc *pDCTstatA)
508 u32 NextBase, BottomIO;
509 u8 _MemHoleRemap, DramHoleBase, DramHoleOffset;
510 u32 HoleSize, DramSelBaseAddr;
516 struct DCTStatStruc *pDCTstat;
518 _MemHoleRemap = mctGet_NVbits(NV_MemHole);
520 if (pMCTstat->HoleBase == 0) {
521 DramHoleBase = mctGet_NVbits(NV_BottomIO);
523 DramHoleBase = pMCTstat->HoleBase >> (24-8);
526 BottomIO = DramHoleBase << (24-8);
529 pDCTstat = pDCTstatA + 0;
530 dev = pDCTstat->dev_map;
533 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
535 pDCTstat = pDCTstatA + Node;
536 if (!pDCTstat->GangedMode) {
537 DramSelBaseAddr = pDCTstat->NodeSysLimit - pDCTstat->DCTSysLimit;
538 /*In unganged mode, we must add DCT0 and DCT1 to DCTSysLimit */
539 val = pDCTstat->NodeSysLimit;
540 if ((val & 0xFF) == 0xFE) {
544 pDCTstat->DCTSysLimit = val;
547 base = pDCTstat->DCTSysBase;
548 limit = pDCTstat->DCTSysLimit;
552 DramSelBaseAddr += NextBase;
553 printk_debug(" Node: %02x base: %02x limit: %02x BottomIO: %02x\n", Node, base, limit, BottomIO);
556 if ((base < BottomIO) && (limit >= BottomIO)) {
558 pDCTstat->Status |= 1 << SB_HWHole;
559 pMCTstat->GStatus |= 1 << GSB_HWHole;
560 pDCTstat->DCTSysBase = base;
561 pDCTstat->DCTSysLimit = limit;
562 pDCTstat->DCTHoleBase = BottomIO;
563 pMCTstat->HoleBase = BottomIO;
564 HoleSize = _4GB_RJ8 - BottomIO; /* HoleSize[39:8] */
565 if ((DramSelBaseAddr > 0) && (DramSelBaseAddr < BottomIO))
566 base = DramSelBaseAddr;
567 val = ((base + HoleSize) >> (24-8)) & 0xFF;
568 DramHoleOffset = val;
569 val <<= 8; /* shl 16, rol 24 */
570 val |= DramHoleBase << 24;
571 val |= 1 << DramHoleValid;
572 Set_NB32(dev, 0xF0, val); /*Dram Hole Address Register*/
573 pDCTstat->DCTSysLimit += HoleSize;
574 base = pDCTstat->DCTSysBase;
575 limit = pDCTstat->DCTSysLimit;
576 } else if (base == BottomIO) {
578 pMCTstat->GStatus |= 1<<GSB_SpIntRemapHole;
579 pDCTstat->Status |= 1<<SB_SWNodeHole;
580 pMCTstat->GStatus |= 1<<GSB_SoftHole;
581 pMCTstat->HoleBase = base;
585 pDCTstat->DCTSysBase = base;
586 pDCTstat->DCTSysLimit = limit;
588 /* No Remapping. Normal Contiguous mapping */
589 pDCTstat->DCTSysBase = base;
590 pDCTstat->DCTSysLimit = limit;
593 /*No Remapping. Normal Contiguous mapping*/
594 pDCTstat->DCTSysBase = base;
595 pDCTstat->DCTSysLimit = limit;
597 base |= 3; /* set WE,RE fields*/
598 pMCTstat->SysLimit = limit;
600 Set_NB32(dev, 0x40 + (Node << 3), base); /* [Node] + Dram Base 0 */
601 val = limit & 0xffff0000;
602 val |= Node; /* set DstNode*/
603 Set_NB32(dev, 0x44 + (Node << 3), val); /* set DstNode */
605 limit = pDCTstat->DCTSysLimit;
607 NextBase = (limit & 0xffff0000) + 0x10000;
611 /* Copy dram map from Node 0 to Node 1-7 */
612 for (Node = 1; Node < MAX_NODES_SUPPORTED; Node++) {
613 pDCTstat = pDCTstatA + Node;
615 u32 devx = pDCTstat->dev_map;
617 if (pDCTstat->NodePresent) {
618 printk_debug(" Copy dram map from Node 0 to Node %02x \n", Node);
619 reg = 0x40; /*Dram Base 0*/
621 val = Get_NB32(dev, reg);
622 Set_NB32(devx, reg, val);
624 } while ( reg < 0x80);
626 break; /* stop at first absent Node */
630 /*Copy dram map to F1x120/124*/
631 mct_HTMemMapExt(pMCTstat, pDCTstatA);
635 static void MCTMemClr_D(struct MCTStatStruc *pMCTstat,
636 struct DCTStatStruc *pDCTstatA)
639 /* Initiates a memory clear operation for all node. The mem clr
640 * is done in paralel. After the memclr is complete, all processors
641 * status are checked to ensure that memclr has completed.
644 struct DCTStatStruc *pDCTstat;
646 if (!mctGet_NVbits(NV_DQSTrainCTL)){
647 // FIXME: callback to wrapper: mctDoWarmResetMemClr_D
648 } else { // NV_DQSTrainCTL == 1
649 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
650 pDCTstat = pDCTstatA + Node;
652 if (pDCTstat->NodePresent) {
653 DCTMemClr_Init_D(pMCTstat, pDCTstat);
656 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
657 pDCTstat = pDCTstatA + Node;
659 if (pDCTstat->NodePresent) {
660 DCTMemClr_Sync_D(pMCTstat, pDCTstat);
667 static void DCTMemClr_Init_D(struct MCTStatStruc *pMCTstat,
668 struct DCTStatStruc *pDCTstat)
674 /* Initiates a memory clear operation on one node */
675 if (pDCTstat->DCTSysLimit) {
676 dev = pDCTstat->dev_dct;
680 val = Get_NB32(dev, reg);
681 } while (val & (1 << MemClrBusy));
683 val |= (1 << MemClrInit);
684 Set_NB32(dev, reg, val);
690 static void MCTMemClrSync_D(struct MCTStatStruc *pMCTstat,
691 struct DCTStatStruc *pDCTstatA)
693 /* Ensures that memory clear has completed on all node.*/
695 struct DCTStatStruc *pDCTstat;
697 if (!mctGet_NVbits(NV_DQSTrainCTL)){
698 // callback to wrapper: mctDoWarmResetMemClr_D
699 } else { // NV_DQSTrainCTL == 1
700 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
701 pDCTstat = pDCTstatA + Node;
703 if (pDCTstat->NodePresent) {
704 DCTMemClr_Sync_D(pMCTstat, pDCTstat);
711 static void DCTMemClr_Sync_D(struct MCTStatStruc *pMCTstat,
712 struct DCTStatStruc *pDCTstat)
715 u32 dev = pDCTstat->dev_dct;
718 /* Ensure that a memory clear operation has completed on one node */
719 if (pDCTstat->DCTSysLimit){
723 val = Get_NB32(dev, reg);
724 } while (val & (1 << MemClrBusy));
727 val = Get_NB32(dev, reg);
728 } while (!(val & (1 << Dr_MemClrStatus)));
731 val = 0x0FE40FC0; // BKDG recommended
732 val |= MCCH_FlushWrOnStpGnt; // Set for S3
733 Set_NB32(dev, 0x11C, val);
737 static u8 NodePresent_D(u8 Node)
740 * Determine if a single Hammer Node exists within the network.
748 dev = PA_HOST(Node); /*test device/vendor id at host bridge */
749 val = Get_NB32(dev, 0);
750 dword = mct_NodePresent_D(); /* FIXME: BOZO -11001022h rev for F */
751 if (val == dword) { /* AMD Hammer Family CPU HT Configuration */
752 if (oemNodePresent_D(Node, &ret))
754 /* Node ID register */
755 val = Get_NB32(dev, 0x60);
758 if (val == dword) /* current nodeID = requested nodeID ? */
768 static void DCTInit_D(struct MCTStatStruc *pMCTstat, struct DCTStatStruc *pDCTstat, u8 dct)
771 * Initialize DRAM on single Athlon 64/Opteron Node.
777 ClearDCT_D(pMCTstat, pDCTstat, dct);
778 stopDCTflag = 1; /*preload flag with 'disable' */
779 if (mct_DIMMPresence(pMCTstat, pDCTstat, dct) < SC_StopError) {
780 print_t("\t\tDCTInit_D: mct_DIMMPresence Done\n");
781 if (mct_SPDCalcWidth(pMCTstat, pDCTstat, dct) < SC_StopError) {
782 print_t("\t\tDCTInit_D: mct_SPDCalcWidth Done\n");
783 if (AutoCycTiming_D(pMCTstat, pDCTstat, dct) < SC_StopError) {
784 print_t("\t\tDCTInit_D: AutoCycTiming_D Done\n");
785 if (AutoConfig_D(pMCTstat, pDCTstat, dct) < SC_StopError) {
786 print_t("\t\tDCTInit_D: AutoConfig_D Done\n");
787 if (PlatformSpec_D(pMCTstat, pDCTstat, dct) < SC_StopError) {
788 print_t("\t\tDCTInit_D: PlatformSpec_D Done\n");
790 if (!(pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW))) {
791 print_t("\t\tDCTInit_D: StartupDCT_D\n");
792 StartupDCT_D(pMCTstat, pDCTstat, dct); /*yeaahhh! */
800 u32 reg_off = dct * 0x100;
801 val = 1<<DisDramInterface;
802 Set_NB32(pDCTstat->dev_dct, reg_off+0x94, val);
803 /*To maximize power savings when DisDramInterface=1b,
804 all of the MemClkDis bits should also be set.*/
806 Set_NB32(pDCTstat->dev_dct, reg_off+0x88, val);
811 static void SyncDCTsReady_D(struct MCTStatStruc *pMCTstat,
812 struct DCTStatStruc *pDCTstatA)
814 /* Wait (and block further access to dram) for all DCTs to be ready,
815 * by polling all InitDram bits and waiting for possible memory clear
816 * operations to be complete. Read MemClkFreqVal bit to see if
817 * the DIMMs are present in this node.
822 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
823 struct DCTStatStruc *pDCTstat;
824 pDCTstat = pDCTstatA + Node;
825 mct_SyncDCTsReady(pDCTstat);
830 static void StartupDCT_D(struct MCTStatStruc *pMCTstat,
831 struct DCTStatStruc *pDCTstat, u8 dct)
833 /* Read MemClkFreqVal bit to see if the DIMMs are present in this node.
834 * If the DIMMs are present then set the DRAM Enable bit for this node.
836 * Setting dram init starts up the DCT state machine, initializes the
837 * dram devices with MRS commands, and kicks off any
838 * HW memory clear process that the chip is capable of. The sooner
839 * that dram init is set for all nodes, the faster the memory system
840 * initialization can complete. Thus, the init loop is unrolled into
841 * two loops so as to start the processeses for non BSP nodes sooner.
842 * This procedure will not wait for the process to finish.
843 * Synchronization is handled elsewhere.
850 u32 reg_off = dct * 0x100;
852 dev = pDCTstat->dev_dct;
853 val = Get_NB32(dev, 0x94 + reg_off);
854 if (val & (1<<MemClkFreqVal)) {
855 print_t("\t\t\tStartupDCT_D: MemClkFreqVal\n");
856 byte = mctGet_NVbits(NV_DQSTrainCTL);
858 /* Enable DQSRcvEn training mode */
859 print_t("\t\t\tStartupDCT_D: DqsRcvEnTrain set \n");
860 reg = 0x78 + reg_off;
861 val = Get_NB32(dev, reg);
862 /* Setting this bit forces a 1T window with hard left
863 * pass/fail edge and a probabalistic right pass/fail
864 * edge. LEFT edge is referenced for final
865 * receiver enable position.*/
866 val |= 1 << DqsRcvEnTrain;
867 Set_NB32(dev, reg, val);
869 mctHookBeforeDramInit(); /* generalized Hook */
870 print_t("\t\t\tStartupDCT_D: DramInit \n");
871 mct_DramInit(pMCTstat, pDCTstat, dct);
872 AfterDramInit_D(pDCTstat, dct);
873 mctHookAfterDramInit(); /* generalized Hook*/
878 static void ClearDCT_D(struct MCTStatStruc *pMCTstat,
879 struct DCTStatStruc *pDCTstat, u8 dct)
882 u32 dev = pDCTstat->dev_dct;
883 u32 reg = 0x40 + 0x100 * dct;
886 if (pMCTstat->GStatus & (1 << GSB_EnDIMMSpareNW)) {
887 reg_end = 0x78 + 0x100 * dct;
889 reg_end = 0xA4 + 0x100 * dct;
892 while(reg < reg_end) {
893 Set_NB32(dev, reg, val);
898 dev = pDCTstat->dev_map;
900 Set_NB32(dev, reg, val);
904 static u8 AutoCycTiming_D(struct MCTStatStruc *pMCTstat,
905 struct DCTStatStruc *pDCTstat, u8 dct)
907 /* Initialize DCT Timing registers as per DIMM SPD.
908 * For primary timing (T, CL) use best case T value.
909 * For secondary timing params., use most aggressive settings
912 * There are three components to determining "maximum frequency":
913 * SPD component, Bus load component, and "Preset" max frequency
916 * The SPD component is a function of the min cycle time specified
917 * by each DIMM, and the interaction of cycle times from all DIMMs
918 * in conjunction with CAS latency. The SPD component only applies
919 * when user timing mode is 'Auto'.
921 * The Bus load component is a limiting factor determined by electrical
922 * characteristics on the bus as a result of varying number of device
923 * loads. The Bus load component is specific to each platform but may
924 * also be a function of other factors. The bus load component only
925 * applies when user timing mode is 'Auto'.
927 * The Preset component is subdivided into three items and is the
928 * minimum of the set: Silicon revision, user limit setting when user
929 * timing mode is 'Auto' and memclock mode is 'Limit', OEM build
930 * specification of the maximum frequency. The Preset component is only
931 * applies when user timing mode is 'Auto'.
936 u8 Trp, Trrd, Trcd, Tras, Trc, Trfc[4], Rows;
937 u32 DramTimingLo, DramTimingHi;
950 /* Get primary timing (CAS Latency and Cycle Time) */
951 if (pDCTstat->Speed == 0) {
952 mctGet_MaxLoadFreq(pDCTstat);
954 /* and Factor in presets (setup options, Si cap, etc.) */
955 GetPresetmaxF_D(pMCTstat, pDCTstat);
957 /* Go get best T and CL as specified by DIMM mfgs. and OEM */
958 SPDGetTCL_D(pMCTstat, pDCTstat, dct);
959 /* skip callback mctForce800to1067_D */
960 pDCTstat->Speed = pDCTstat->DIMMAutoSpeed;
961 pDCTstat->CASL = pDCTstat->DIMMCASL;
963 /* if "manual" memclock mode */
964 if ( mctGet_NVbits(NV_MCTUSRTMGMODE) == 2)
965 pDCTstat->Speed = mctGet_NVbits(NV_MemCkVal) + 1;
967 mct_AfterGetCLT(pMCTstat, pDCTstat, dct);
970 /* Gather all DIMM mini-max values for cycle timing data */
980 for (i=0; i < 4; i++)
983 for ( i = 0; i< MAX_DIMMS_SUPPORTED; i++) {
985 if (pDCTstat->DIMMValid & (1 << i)) {
986 smbaddr = Get_DIMMAddress_D(pDCTstat, i);
987 byte = mctRead_SPD(smbaddr, SPD_ROWSZ);
989 Rows = byte; /* keep track of largest row sz */
991 byte = mctRead_SPD(smbaddr, SPD_TRP);
995 byte = mctRead_SPD(smbaddr, SPD_TRRD);
999 byte = mctRead_SPD(smbaddr, SPD_TRCD);
1003 byte = mctRead_SPD(smbaddr, SPD_TRTP);
1007 byte = mctRead_SPD(smbaddr, SPD_TWR);
1011 byte = mctRead_SPD(smbaddr, SPD_TWTR);
1015 val = mctRead_SPD(smbaddr, SPD_TRC);
1016 if ((val == 0) || (val == 0xFF)) {
1017 pDCTstat->ErrStatus |= 1<<SB_NoTrcTrfc;
1018 pDCTstat->ErrCode = SC_VarianceErr;
1019 val = Get_DefTrc_k_D(pDCTstat->DIMMAutoSpeed);
1021 byte = mctRead_SPD(smbaddr, SPD_TRCRFC);
1023 val++; /* round up in case fractional extention is non-zero.*/
1029 /* dev density=rank size/#devs per rank */
1030 byte = mctRead_SPD(smbaddr, SPD_BANKSZ);
1032 val = ((byte >> 5) | (byte << 3)) & 0xFF;
1035 byte = mctRead_SPD(smbaddr, SPD_DEVWIDTH) & 0xFE; /* dev density=2^(rows+columns+banks) */
1038 } else if (byte == 8) {
1040 } else if (byte == 16) {
1046 if (Trfc[LDIMM] < byte)
1049 byte = mctRead_SPD(smbaddr, SPD_TRAS);
1052 } /* Dimm Present */
1055 /* Convert DRAM CycleTiming values and store into DCT structure */
1057 byte = pDCTstat->DIMMAutoSpeed;
1060 Tk40 = Get_40Tk_D(byte);
1064 1. All secondary time values given in SPDs are in binary with units of ns.
1065 2. Some time values are scaled by four, in order to have least count of 0.25 ns
1066 (more accuracy). JEDEC SPD spec. shows which ones are x1 and x4.
1067 3. Internally to this SW, cycle time, Tk, is scaled by 10 to affect a
1068 least count of 0.1 ns (more accuracy).
1069 4. SPD values not scaled are multiplied by 10 and then divided by 10T to find
1070 equivalent minimum number of bus clocks (a remainder causes round-up of clocks).
1071 5. SPD values that are prescaled by 4 are multiplied by 10 and then divided by 40T to find
1072 equivalent minimum number of bus clocks (a remainder causes round-up of clocks).*/
1076 pDCTstat->DIMMTras = (u16)dword;
1078 if (dword % Tk40) { /* round up number of busclocks */
1082 if (val < Min_TrasT_1066)
1083 val = Min_TrasT_1066;
1084 else if (val > Max_TrasT_1066)
1085 val = Max_TrasT_1066;
1087 if (val < Min_TrasT)
1089 else if (val > Max_TrasT)
1092 pDCTstat->Tras = val;
1096 pDCTstat->DIMMTrp = dword;
1098 if (dword % Tk40) { /* round up number of busclocks */
1102 if (val < Min_TrasT_1066)
1103 val = Min_TrpT_1066;
1104 else if (val > Max_TrpT_1066)
1105 val = Max_TrpT_1066;
1109 else if (val > Max_TrpT)
1112 pDCTstat->Trp = val;
1116 pDCTstat->DIMMTrrd = dword;
1118 if (dword % Tk40) { /* round up number of busclocks */
1122 if (val < Min_TrrdT_1066)
1123 val = Min_TrrdT_1066;
1124 else if (val > Max_TrrdT_1066)
1125 val = Max_TrrdT_1066;
1127 if (val < Min_TrrdT)
1129 else if (val > Max_TrrdT)
1132 pDCTstat->Trrd = val;
1136 pDCTstat->DIMMTrcd = dword;
1138 if (dword % Tk40) { /* round up number of busclocks */
1142 if (val < Min_TrcdT_1066)
1143 val = Min_TrcdT_1066;
1144 else if (val > Max_TrcdT_1066)
1145 val = Max_TrcdT_1066;
1147 if (val < Min_TrcdT)
1149 else if (val > Max_TrcdT)
1152 pDCTstat->Trcd = val;
1156 pDCTstat->DIMMTrc = dword;
1158 if (dword % Tk40) { /* round up number of busclocks */
1162 if (val < Min_TrcT_1066)
1163 val = Min_TrcT_1066;
1164 else if (val > Max_TrcT_1066)
1165 val = Max_TrcT_1066;
1169 else if (val > Max_TrcT)
1172 pDCTstat->Trc = val;
1176 pDCTstat->DIMMTrtp = dword;
1177 val = pDCTstat->Speed;
1179 val = 2; /* Calculate by 7.75ns / Speed in ns to get clock # */
1180 } else if (val == 4) { /* Note a speed of 3 will be a Trtp of 3 */
1182 } else if (val == 5){
1185 pDCTstat->Trtp = val;
1189 pDCTstat->DIMMTwr = dword;
1191 if (dword % Tk40) { /* round up number of busclocks */
1195 if (val < Min_TwrT_1066)
1196 val = Min_TwrT_1066;
1197 else if (val > Max_TwrT_1066)
1198 val = Max_TwrT_1066;
1202 else if (val > Max_TwrT)
1205 pDCTstat->Twr = val;
1209 pDCTstat->DIMMTwtr = dword;
1211 if (dword % Tk40) { /* round up number of busclocks */
1215 if (val < Min_TwrT_1066)
1216 val = Min_TwtrT_1066;
1217 else if (val > Max_TwtrT_1066)
1218 val = Max_TwtrT_1066;
1220 if (val < Min_TwtrT)
1222 else if (val > Max_TwtrT)
1225 pDCTstat->Twtr = val;
1230 pDCTstat->Trfc[i] = Trfc[i];
1232 mctAdjustAutoCycTmg_D();
1234 /* Program DRAM Timing values */
1235 DramTimingLo = 0; /* Dram Timing Low init */
1236 val = pDCTstat->CASL;
1237 val = Tab_tCL_j[val];
1238 DramTimingLo |= val;
1240 val = pDCTstat->Trcd;
1242 val -= Bias_TrcdT_1066;
1246 DramTimingLo |= val<<4;
1248 val = pDCTstat->Trp;
1250 val -= Bias_TrpT_1066;
1255 DramTimingLo |= val<<7;
1257 val = pDCTstat->Trtp;
1259 DramTimingLo |= val<<11;
1261 val = pDCTstat->Tras;
1263 val -= Bias_TrasT_1066;
1266 DramTimingLo |= val<<12;
1268 val = pDCTstat->Trc;
1270 DramTimingLo |= val<<16;
1273 val = pDCTstat->Twr;
1275 DramTimingLo |= val<<20;
1278 val = pDCTstat->Trrd;
1280 val -= Bias_TrrdT_1066;
1283 DramTimingLo |= val<<22;
1286 DramTimingHi = 0; /* Dram Timing Low init */
1287 val = pDCTstat->Twtr;
1289 val -= Bias_TwtrT_1066;
1292 DramTimingHi |= val<<8;
1295 DramTimingHi |= val<<16;
1302 DramTimingHi |= val << 20;
1305 dev = pDCTstat->dev_dct;
1306 reg_off = 0x100 * dct;
1307 print_tx("AutoCycTiming: DramTimingLo ", DramTimingLo);
1308 print_tx("AutoCycTiming: DramTimingHi ", DramTimingHi);
1310 Set_NB32(dev, 0x88 + reg_off, DramTimingLo); /*DCT Timing Low*/
1311 DramTimingHi |=0x0000FC77;
1312 Set_NB32(dev, 0x8c + reg_off, DramTimingHi); /*DCT Timing Hi*/
1316 dword = pDCTstat->Twr;
1317 dword -= Bias_TwrT_1066;
1319 reg = 0x84 + reg_off;
1320 val = Get_NB32(dev, reg);
1323 Set_NB32(dev, reg, val);
1325 // dump_pci_device(PCI_DEV(0, 0x18+pDCTstat->Node_ID, 2));
1327 print_tx("AutoCycTiming: Status ", pDCTstat->Status);
1328 print_tx("AutoCycTiming: ErrStatus ", pDCTstat->ErrStatus);
1329 print_tx("AutoCycTiming: ErrCode ", pDCTstat->ErrCode);
1330 print_t("AutoCycTiming: Done\n");
1332 mctHookAfterAutoCycTmg();
1334 return pDCTstat->ErrCode;
1338 static void GetPresetmaxF_D(struct MCTStatStruc *pMCTstat,
1339 struct DCTStatStruc *pDCTstat)
1341 /* Get max frequency from OEM platform definition, from any user
1342 * override (limiting) of max frequency, and from any Si Revision
1343 * Specific information. Return the least of these three in
1344 * DCTStatStruc.PresetmaxFreq.
1350 /* Get CPU Si Revision defined limit (NPT) */
1351 proposedFreq = 533; /* Rev F0 programmable max memclock is */
1353 /*Get User defined limit if "limit" mode */
1354 if ( mctGet_NVbits(NV_MCTUSRTMGMODE) == 1) {
1355 word = Get_Fk_D(mctGet_NVbits(NV_MemCkVal) + 1);
1356 if (word < proposedFreq)
1357 proposedFreq = word;
1359 /* Get Platform defined limit */
1360 word = mctGet_NVbits(NV_MAX_MEMCLK);
1361 if (word < proposedFreq)
1362 proposedFreq = word;
1364 word = pDCTstat->PresetmaxFreq;
1365 if (word > proposedFreq)
1366 word = proposedFreq;
1368 pDCTstat->PresetmaxFreq = word;
1374 static void SPDGetTCL_D(struct MCTStatStruc *pMCTstat,
1375 struct DCTStatStruc *pDCTstat, u8 dct)
1377 /* Find the best T and CL primary timing parameter pair, per Mfg.,
1378 * for the given set of DIMMs, and store into DCTStatStruc
1379 * (.DIMMAutoSpeed and .DIMMCASL). See "Global relationship between
1380 * index values and item values" for definition of CAS latency
1381 * index (j) and Frequency index (k).
1386 /* i={0..7} (std. physical DIMM number)
1387 * j is an integer which enumerates increasing CAS latency.
1388 * k is an integer which enumerates decreasing cycle time.
1389 * CL no. {0,1,2} corresponds to CL X, CL X-.5, or CL X-1 (per individual DIMM)
1390 * Max timing values are per parameter, of all DIMMs, spec'd in ns like the SPD.
1395 for (k=K_MAX; k >= K_MIN; k--) {
1396 for (j = J_MIN; j <= J_MAX; j++) {
1397 if (Sys_Capability_D(pMCTstat, pDCTstat, j, k) ) {
1398 /* 1. check to see if DIMMi is populated.
1399 2. check if DIMMi supports CLj and Tjk */
1400 for (i = 0; i < MAX_DIMMS_SUPPORTED; i++) {
1401 if (pDCTstat->DIMMValid & (1 << i)) {
1402 if (Dimm_Supports_D(pDCTstat, i, j, k))
1406 if (i == MAX_DIMMS_SUPPORTED) {
1416 if (T1min != 0xFF) {
1417 pDCTstat->DIMMCASL = CL1min; /*mfg. optimized */
1418 pDCTstat->DIMMAutoSpeed = T1min;
1419 print_tx("SPDGetTCL_D: DIMMCASL ", pDCTstat->DIMMCASL);
1420 print_tx("SPDGetTCL_D: DIMMAutoSpeed ", pDCTstat->DIMMAutoSpeed);
1423 pDCTstat->DIMMCASL = CL_DEF; /* failsafe values (running in min. mode) */
1424 pDCTstat->DIMMAutoSpeed = T_DEF;
1425 pDCTstat->ErrStatus |= 1 << SB_DimmMismatchT;
1426 pDCTstat->ErrStatus |= 1 << SB_MinimumMode;
1427 pDCTstat->ErrCode = SC_VarianceErr;
1429 print_tx("SPDGetTCL_D: Status ", pDCTstat->Status);
1430 print_tx("SPDGetTCL_D: ErrStatus ", pDCTstat->ErrStatus);
1431 print_tx("SPDGetTCL_D: ErrCode ", pDCTstat->ErrCode);
1432 print_t("SPDGetTCL_D: Done\n");
1436 static u8 PlatformSpec_D(struct MCTStatStruc *pMCTstat,
1437 struct DCTStatStruc *pDCTstat, u8 dct)
1443 mctGet_PS_Cfg_D(pMCTstat, pDCTstat, dct);
1445 if (pDCTstat->GangedMode) {
1446 mctGet_PS_Cfg_D(pMCTstat, pDCTstat, 1);
1449 if ( pDCTstat->_2Tmode == 2) {
1450 dev = pDCTstat->dev_dct;
1451 reg = 0x94 + 0x100 * dct; /* Dram Configuration Hi */
1452 val = Get_NB32(dev, reg);
1453 val |= 1 << 20; /* 2T CMD mode */
1454 Set_NB32(dev, reg, val);
1457 mct_PlatformSpec(pMCTstat, pDCTstat, dct);
1458 InitPhyCompensation(pMCTstat, pDCTstat, dct);
1459 mctHookAfterPSCfg();
1460 return pDCTstat->ErrCode;
1464 static u8 AutoConfig_D(struct MCTStatStruc *pMCTstat,
1465 struct DCTStatStruc *pDCTstat, u8 dct)
1467 u32 DramControl, DramTimingLo, Status;
1468 u32 DramConfigLo, DramConfigHi, DramConfigMisc, DramConfigMisc2;
1476 print_tx("AutoConfig_D: DCT: ", dct);
1481 DramConfigMisc2 = 0;
1483 /* set bank addessing and Masks, plus CS pops */
1484 SPDSetBanks_D(pMCTstat, pDCTstat, dct);
1485 if (pDCTstat->ErrCode == SC_StopError)
1486 goto AutoConfig_exit;
1488 /* map chip-selects into local address space */
1489 StitchMemory_D(pMCTstat, pDCTstat, dct);
1490 InterleaveBanks_D(pMCTstat, pDCTstat, dct);
1492 /* temp image of status (for convenience). RO usage! */
1493 Status = pDCTstat->Status;
1495 dev = pDCTstat->dev_dct;
1496 reg_off = 0x100 * dct;
1499 /* Build Dram Control Register Value */
1500 DramConfigMisc2 = Get_NB32 (dev, 0xA8 + reg_off); /* Dram Control*/
1501 DramControl = Get_NB32 (dev, 0x78 + reg_off); /* Dram Control*/
1503 if (mctGet_NVbits(NV_CLKHZAltVidC3))
1504 DramControl |= 1<<16;
1506 // FIXME: Add support(skip) for Ax and Cx versions
1507 DramControl |= 5; /* RdPtrInit */
1510 /* Build Dram Config Lo Register Value */
1511 DramConfigLo |= 1 << 4; /* 75 Ohms ODT */
1512 if (mctGet_NVbits(NV_MAX_DIMMS) == 8) {
1513 if (pDCTstat->Speed == 3) {
1514 if ((pDCTstat->MAdimms[dct] == 4))
1515 DramConfigLo |= 1 << 5; /* 50 Ohms ODT */
1516 } else if (pDCTstat->Speed == 4){
1517 if ((pDCTstat->MAdimms[dct] != 1))
1518 DramConfigLo |= 1 << 5; /* 50 Ohms ODT */
1521 // FIXME: Skip for Ax versions
1522 if ((pDCTstat->MAdimms[dct] == 4)) {
1523 if ( pDCTstat->DimmQRPresent != 0) {
1524 if ((pDCTstat->Speed == 3) || (pDCTstat->Speed == 4)) {
1525 DramConfigLo |= 1 << 5; /* 50 Ohms ODT */
1527 } else if ((pDCTstat->MAdimms[dct] == 4)) {
1528 if (pDCTstat->Speed == 4) {
1529 if ( pDCTstat->DimmQRPresent != 0) {
1530 DramConfigLo |= 1 << 5; /* 50 Ohms ODT */
1534 } else if ((pDCTstat->MAdimms[dct] == 2)) {
1535 DramConfigLo |= 1 << 5; /* 50 Ohms ODT */
1540 // FIXME: Skip for Ax versions
1541 /* callback not required - if (!mctParityControl_D()) */
1542 if (Status & (1 << SB_PARDIMMs)) {
1543 DramConfigLo |= 1 << ParEn;
1544 DramConfigMisc2 |= 1 << ActiveCmdAtRst;
1546 DramConfigLo &= ~(1 << ParEn);
1547 DramConfigMisc2 &= ~(1 << ActiveCmdAtRst);
1550 if (mctGet_NVbits(NV_BurstLen32)) {
1551 if (!pDCTstat->GangedMode)
1552 DramConfigLo |= 1 << BurstLength32;
1555 if (Status & (1 << SB_128bitmode))
1556 DramConfigLo |= 1 << Width128; /* 128-bit mode (normal) */
1561 if (pDCTstat->Dimmx4Present & (1 << word))
1562 DramConfigLo |= 1 << dword; /* X4Dimm[3:0] */
1568 if (!(Status & (1 << SB_Registered)))
1569 DramConfigLo |= 1 << UnBuffDimm; /* Unbufferd DIMMs */
1571 if (mctGet_NVbits(NV_ECC_CAP))
1572 if (Status & (1 << SB_ECCDIMMs))
1573 if ( mctGet_NVbits(NV_ECC))
1574 DramConfigLo |= 1 << DimmEcEn;
1578 /* Build Dram Config Hi Register Value */
1579 dword = pDCTstat->Speed;
1580 DramConfigHi |= dword - 1; /* get MemClk encoding */
1581 DramConfigHi |= 1 << MemClkFreqVal;
1583 if (Status & (1 << SB_Registered))
1584 if ((pDCTstat->Dimmx4Present != 0) && (pDCTstat->Dimmx8Present != 0))
1585 /* set only if x8 Registered DIMMs in System*/
1586 DramConfigHi |= 1 << RDqsEn;
1588 if (mctGet_NVbits(NV_CKE_PDEN)) {
1589 DramConfigHi |= 1 << 15; /* PowerDownEn */
1590 if (mctGet_NVbits(NV_CKE_CTL))
1591 /*Chip Select control of CKE*/
1592 DramConfigHi |= 1 << 16;
1595 /* Control Bank Swizzle */
1596 if (0) /* call back not needed mctBankSwizzleControl_D()) */
1597 DramConfigHi &= ~(1 << BankSwizzleMode);
1599 DramConfigHi |= 1 << BankSwizzleMode; /* recommended setting (default) */
1601 /* Check for Quadrank DIMM presence */
1602 if ( pDCTstat->DimmQRPresent != 0) {
1603 byte = mctGet_NVbits(NV_4RANKType);
1605 DramConfigHi |= 1 << 17; /* S4 (4-Rank SO-DIMMs) */
1607 DramConfigHi |= 1 << 18; /* R4 (4-Rank Registered DIMMs) */
1610 if (0) /* call back not needed mctOverrideDcqBypMax_D ) */
1611 val = mctGet_NVbits(NV_BYPMAX);
1613 val = 0x0f; // recommended setting (default)
1614 DramConfigHi |= val << 24;
1616 val = pDCTstat->DIMM2Kpage;
1617 if (pDCTstat->GangedMode != 0) {
1625 val = Tab_2KTfawT_k[pDCTstat->Speed];
1627 val = Tab_1KTfawT_k[pDCTstat->Speed];
1629 if (pDCTstat->Speed == 5)
1634 DramConfigHi |= val; /* Tfaw for 1K or 2K paged drams */
1636 // FIXME: Skip for Ax versions
1637 DramConfigHi |= 1 << DcqArbBypassEn;
1640 /* Build MemClkDis Value from Dram Timing Lo and
1641 Dram Config Misc Registers
1642 1. We will assume that MemClkDis field has been preset prior to this
1644 2. We will only set MemClkDis bits if a DIMM is NOT present AND if:
1645 NV_AllMemClks <>0 AND SB_DiagClks ==0 */
1648 /* Dram Timing Low (owns Clock Enable bits) */
1649 DramTimingLo = Get_NB32(dev, 0x88 + reg_off);
1650 if (mctGet_NVbits(NV_AllMemClks) == 0) {
1651 /* Special Jedec SPD diagnostic bit - "enable all clocks" */
1652 if (!(pDCTstat->Status & (1<<SB_DiagClks))) {
1654 byte = mctGet_NVbits(NV_PACK_TYPE);
1657 else if (byte == PT_M2)
1663 while(dword < MAX_DIMMS_SUPPORTED) {
1665 print_tx("DramTimingLo: val=", val);
1666 if (!(pDCTstat->DIMMValid & (1<<val)))
1668 DramTimingLo |= 1<<(dword+24);
1674 print_tx("AutoConfig_D: DramControl: ", DramControl);
1675 print_tx("AutoConfig_D: DramTimingLo: ", DramTimingLo);
1676 print_tx("AutoConfig_D: DramConfigMisc: ", DramConfigMisc);
1677 print_tx("AutoConfig_D: DramConfigMisc2: ", DramConfigMisc2);
1678 print_tx("AutoConfig_D: DramConfigLo: ", DramConfigLo);
1679 print_tx("AutoConfig_D: DramConfigHi: ", DramConfigHi);
1681 /* Write Values to the registers */
1682 Set_NB32(dev, 0x78 + reg_off, DramControl);
1683 Set_NB32(dev, 0x88 + reg_off, DramTimingLo);
1684 Set_NB32(dev, 0xA0 + reg_off, DramConfigMisc);
1685 Set_NB32(dev, 0xA8 + reg_off, DramConfigMisc2);
1686 Set_NB32(dev, 0x90 + reg_off, DramConfigLo);
1687 mct_SetDramConfigHi_D(pDCTstat, dct, DramConfigHi);
1688 mct_ForceAutoPrecharge_D(pDCTstat, dct);
1689 mct_EarlyArbEn_D(pMCTstat, pDCTstat);
1690 mctHookAfterAutoCfg();
1692 // dump_pci_device(PCI_DEV(0, 0x18+pDCTstat->Node_ID, 2));
1694 print_tx("AutoConfig: Status ", pDCTstat->Status);
1695 print_tx("AutoConfig: ErrStatus ", pDCTstat->ErrStatus);
1696 print_tx("AutoConfig: ErrCode ", pDCTstat->ErrCode);
1697 print_t("AutoConfig: Done\n");
1699 return pDCTstat->ErrCode;
1703 static void SPDSetBanks_D(struct MCTStatStruc *pMCTstat,
1704 struct DCTStatStruc *pDCTstat, u8 dct)
1706 /* Set bank addressing, program Mask values and build a chip-select
1707 * population map. This routine programs PCI 0:24N:2x80 config register
1708 * and PCI 0:24N:2x60,64,68,6C config registers (CS Mask 0-3).
1711 u8 ChipSel, Rows, Cols, Ranks ,Banks, DevWidth;
1712 u32 BankAddrReg, csMask;
1723 dev = pDCTstat->dev_dct;
1724 reg_off = 0x100 * dct;
1727 for (ChipSel = 0; ChipSel < MAX_CS_SUPPORTED; ChipSel+=2) {
1729 if ((pDCTstat->Status & (1 << SB_64MuxedMode)) && ChipSel >=4)
1732 if (pDCTstat->DIMMValid & (1<<byte)) {
1733 smbaddr = Get_DIMMAddress_D(pDCTstat, (ChipSel + dct));
1735 byte = mctRead_SPD(smbaddr, SPD_ROWSZ);
1738 byte = mctRead_SPD(smbaddr, SPD_COLSZ);
1741 Banks = mctRead_SPD(smbaddr, SPD_LBANKS);
1743 byte = mctRead_SPD(smbaddr, SPD_DEVWIDTH);
1744 DevWidth = byte & 0x7f; /* bits 0-6 = bank 0 width */
1746 byte = mctRead_SPD(smbaddr, SPD_DMBANKS);
1747 Ranks = (byte & 7) + 1;
1749 /* Configure Bank encoding
1750 * Use a 6-bit key into a lookup table.
1751 * Key (index) = CCCBRR, where CCC is the number of
1752 * Columns minus 9,RR is the number of Rows minus 13,
1753 * and B is the number of banks minus 2.
1754 * See "6-bit Bank Addressing Table" at the end of
1756 byte = Cols - 9; /* 9 Cols is smallest dev size */
1757 byte <<= 3; /* make room for row and bank bits*/
1761 /* 13 Rows is smallest dev size */
1762 byte |= Rows - 13; /* CCCBRR internal encode */
1764 for (dword=0; dword < 12; dword++) {
1765 if (byte == Tab_BankAddr[dword])
1771 /* bit no. of CS field in address mapping reg.*/
1772 dword <<= (ChipSel<<1);
1773 BankAddrReg |= dword;
1775 /* Mask value=(2pow(rows+cols+banks+3)-1)>>8,
1776 or 2pow(rows+cols+banks-5)-1*/
1779 byte = Rows + Cols; /* cl=rows+cols*/
1781 byte -= 2; /* 3 banks - 5 */
1783 byte -= 3; /* 2 banks - 5 */
1784 /* mask size (64-bit rank only) */
1786 if (pDCTstat->Status & (1 << SB_128bitmode))
1787 byte++; /* double mask size if in 128-bit mode*/
1789 csMask |= 1 << byte;
1792 /*set ChipSelect population indicator even bits*/
1793 pDCTstat->CSPresent |= (1<<ChipSel);
1795 /*set ChipSelect population indicator odd bits*/
1796 pDCTstat->CSPresent |= 1 << (ChipSel + 1);
1798 reg = 0x60+(ChipSel<<1) + reg_off; /*Dram CS Mask Register */
1800 val &= 0x1FF83FE0; /* Mask out reserved bits.*/
1801 Set_NB32(dev, reg, val);
1804 if (pDCTstat->DIMMSPDCSE & (1<<ChipSel))
1805 pDCTstat->CSTestFail |= (1<<ChipSel);
1807 } /* while ChipSel*/
1809 SetCSTriState(pMCTstat, pDCTstat, dct);
1810 /* SetCKETriState */
1811 SetODTTriState(pMCTstat, pDCTstat, dct);
1813 if ( pDCTstat->Status & 1<<SB_128bitmode) {
1814 SetCSTriState(pMCTstat, pDCTstat, 1); /* force dct1) */
1815 SetODTTriState(pMCTstat, pDCTstat, 1); /* force dct1) */
1817 word = pDCTstat->CSPresent;
1818 mctGetCS_ExcludeMap(); /* mask out specified chip-selects */
1819 word ^= pDCTstat->CSPresent;
1820 pDCTstat->CSTestFail |= word; /* enable ODT to disabled DIMMs */
1821 if (!pDCTstat->CSPresent)
1822 pDCTstat->ErrCode = SC_StopError;
1824 reg = 0x80 + reg_off; /* Bank Addressing Register */
1825 Set_NB32(dev, reg, BankAddrReg);
1827 // dump_pci_device(PCI_DEV(0, 0x18+pDCTstat->Node_ID, 2));
1829 print_tx("SPDSetBanks: Status ", pDCTstat->Status);
1830 print_tx("SPDSetBanks: ErrStatus ", pDCTstat->ErrStatus);
1831 print_tx("SPDSetBanks: ErrCode ", pDCTstat->ErrCode);
1832 print_t("SPDSetBanks: Done\n");
1836 static void SPDCalcWidth_D(struct MCTStatStruc *pMCTstat,
1837 struct DCTStatStruc *pDCTstat)
1839 /* Per SPDs, check the symmetry of DIMM pairs (DIMM on Channel A
1840 * matching with DIMM on Channel B), the overall DIMM population,
1841 * and determine the width mode: 64-bit, 64-bit muxed, 128-bit.
1845 u8 smbaddr, smbaddr1;
1848 /* Check Symmetry of Channel A and Channel B DIMMs
1849 (must be matched for 128-bit mode).*/
1850 for (i=0; i < MAX_DIMMS_SUPPORTED; i += 2) {
1851 if ((pDCTstat->DIMMValid & (1 << i)) && (pDCTstat->DIMMValid & (1<<(i+1)))) {
1852 smbaddr = Get_DIMMAddress_D(pDCTstat, i);
1853 smbaddr1 = Get_DIMMAddress_D(pDCTstat, i+1);
1855 byte = mctRead_SPD(smbaddr, SPD_ROWSZ) & 0x1f;
1856 byte1 = mctRead_SPD(smbaddr1, SPD_ROWSZ) & 0x1f;
1857 if (byte != byte1) {
1858 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
1862 byte = mctRead_SPD(smbaddr, SPD_COLSZ) & 0x1f;
1863 byte1 = mctRead_SPD(smbaddr1, SPD_COLSZ) & 0x1f;
1864 if (byte != byte1) {
1865 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
1869 byte = mctRead_SPD(smbaddr, SPD_BANKSZ);
1870 byte1 = mctRead_SPD(smbaddr1, SPD_BANKSZ);
1871 if (byte != byte1) {
1872 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
1876 byte = mctRead_SPD(smbaddr, SPD_DEVWIDTH) & 0x7f;
1877 byte1 = mctRead_SPD(smbaddr1, SPD_DEVWIDTH) & 0x7f;
1878 if (byte != byte1) {
1879 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
1883 byte = mctRead_SPD(smbaddr, SPD_DMBANKS) & 7; /* #ranks-1 */
1884 byte1 = mctRead_SPD(smbaddr1, SPD_DMBANKS) & 7; /* #ranks-1 */
1885 if (byte != byte1) {
1886 pDCTstat->ErrStatus |= (1<<SB_DimmMismatchO);
1896 static void StitchMemory_D(struct MCTStatStruc *pMCTstat,
1897 struct DCTStatStruc *pDCTstat, u8 dct)
1899 /* Requires that Mask values for each bank be programmed first and that
1900 * the chip-select population indicator is correctly set.
1904 u32 nxtcsBase, curcsBase;
1906 u32 Sizeq, BiggestBank;
1916 dev = pDCTstat->dev_dct;
1917 reg_off = 0x100 * dct;
1921 /* CS Sparing 1=enabled, 0=disabled */
1922 if (mctGet_NVbits(NV_CS_SpareCTL) & 1) {
1923 if (MCT_DIMM_SPARE_NO_WARM) {
1924 /* Do no warm-reset DIMM spare */
1925 if (pMCTstat->GStatus & 1 << GSB_EnDIMMSpareNW) {
1926 word = pDCTstat->CSPresent;
1930 /* Make sure at least two chip-selects are available */
1933 pDCTstat->ErrStatus |= 1 << SB_SpareDis;
1936 if (!mctGet_NVbits(NV_DQSTrainCTL)) { /*DQS Training 1=enabled, 0=disabled */
1937 word = pDCTstat->CSPresent;
1939 word &= ~(1 << val);
1941 /* Make sure at least two chip-selects are available */
1944 pDCTstat->ErrStatus |= 1 << SB_SpareDis;
1949 nxtcsBase = 0; /* Next available cs base ADDR[39:8] */
1950 for (p=0; p < MAX_DIMMS_SUPPORTED; p++) {
1952 for (q = 0; q < MAX_CS_SUPPORTED; q++) { /* from DIMMS to CS */
1953 if (pDCTstat->CSPresent & (1 << q)) { /* bank present? */
1954 reg = 0x40 + (q << 2) + reg_off; /* Base[q] reg.*/
1955 val = Get_NB32(dev, reg);
1956 if (!(val & 3)) { /* (CSEnable|Spare==1)bank is enabled already? */
1957 reg = 0x60 + (q << 1) + reg_off; /*Mask[q] reg.*/
1958 val = Get_NB32(dev, reg);
1962 Sizeq = val; //never used
1963 if (val > BiggestBank) {
1964 /*Bingo! possibly Map this chip-select next! */
1969 } /*if bank present */
1971 if (BiggestBank !=0) {
1972 curcsBase = nxtcsBase; /* curcsBase=nxtcsBase*/
1973 /* DRAM CS Base b Address Register offset */
1974 reg = 0x40 + (b << 2) + reg_off;
1977 val = 1 << Spare; /* Spare Enable*/
1980 val |= 1 << CSEnable; /* Bank Enable */
1982 Set_NB32(dev, reg, val);
1986 /* let nxtcsBase+=Size[b] */
1987 nxtcsBase += BiggestBank;
1990 /* bank present but disabled?*/
1991 if ( pDCTstat->CSTestFail & (1 << p)) {
1992 /* DRAM CS Base b Address Register offset */
1993 reg = (p << 2) + 0x40 + reg_off;
1994 val = 1 << TestFail;
1995 Set_NB32(dev, reg, val);
2000 pDCTstat->DCTSysLimit = nxtcsBase - 1;
2001 mct_AfterStitchMemory(pMCTstat, pDCTstat, dct);
2004 // dump_pci_device(PCI_DEV(0, 0x18+pDCTstat->Node_ID, 2));
2006 print_tx("StitchMemory: Status ", pDCTstat->Status);
2007 print_tx("StitchMemory: ErrStatus ", pDCTstat->ErrStatus);
2008 print_tx("StitchMemory: ErrCode ", pDCTstat->ErrCode);
2009 print_t("StitchMemory: Done\n");
2013 static u8 Get_Tk_D(u8 k)
2015 return Table_T_k[k];
2019 static u8 Get_CLj_D(u8 j)
2021 return Table_CL2_j[j];
2024 static u8 Get_DefTrc_k_D(u8 k)
2026 return Tab_defTrc_k[k];
2030 static u16 Get_40Tk_D(u8 k)
2032 return Tab_40T_k[k]; /* FIXME: k or k<<1 ?*/
2036 static u16 Get_Fk_D(u8 k)
2038 return Table_F_k[k]; /* FIXME: k or k<<1 ? */
2042 static u8 Dimm_Supports_D(struct DCTStatStruc *pDCTstat,
2052 DIMMi = Get_DIMMAddress_D(pDCTstat, i);
2056 /* check if DIMMi supports CLj */
2057 CL_i = mctRead_SPD(DIMMi, SPD_CASLAT);
2060 /*find out if its CL X, CLX-1, or CLX-2 */
2061 word = bsr(byte); /* bit position of CLj */
2062 wordx = bsr(CL_i); /* bit position of CLX of CLi */
2063 wordx -= word; /* CL number (CL no. = 0,1, 2, or 3) */
2064 wordx <<= 3; /* 8 bits per SPD byte index */
2065 /*get T from SPD byte 9, 23, 25*/
2066 word = (EncodedTSPD >> wordx) & 0xFF;
2068 byte = mctRead_SPD(DIMMi, word); /* DIMMi speed */
2071 } else if (byte == 0){
2072 pDCTstat->ErrStatus |= 1<<SB_NoCycTime;
2075 ret = 0; /* DIMM is capable! */
2084 static u8 DIMMPresence_D(struct MCTStatStruc *pMCTstat,
2085 struct DCTStatStruc *pDCTstat)
2087 /* Check DIMMs present, verify checksum, flag SDRAM type,
2088 * build population indicator bitmaps, and preload bus loading
2089 * of DIMMs into DCTStatStruc.
2090 * MAAload=number of devices on the "A" bus.
2091 * MABload=number of devices on the "B" bus.
2092 * MAAdimms=number of DIMMs on the "A" bus slots.
2093 * MABdimms=number of DIMMs on the "B" bus slots.
2094 * DATAAload=number of ranks on the "A" bus slots.
2095 * DATABload=number of ranks on the "B" bus slots.
2102 u16 RegDIMMPresent, MaxDimms;
2108 /* preload data structure with addrs */
2109 mctGet_DIMMAddr(pDCTstat, pDCTstat->Node_ID);
2111 DimmSlots = MaxDimms = mctGet_NVbits(NV_MAX_DIMMS);
2113 SPDCtrl = mctGet_NVbits(NV_SPDCHK_RESTRT);
2116 pDCTstat->DimmQRPresent = 0;
2118 for (i = 0; i< MAX_DIMMS_SUPPORTED; i++) {
2122 if ((pDCTstat->DimmQRPresent & (1 << i)) || (i < DimmSlots)) {
2123 print_tx("\t DIMMPresence: i=", i);
2124 smbaddr = Get_DIMMAddress_D(pDCTstat, i);
2125 print_tx("\t DIMMPresence: smbaddr=", smbaddr);
2128 for (Index=0; Index < 64; Index++){
2130 status = mctRead_SPD(smbaddr, Index);
2133 byte = status & 0xFF;
2139 pDCTstat->DIMMPresent |= 1 << i;
2140 if ((Checksum & 0xFF) == byte) {
2141 byte = mctRead_SPD(smbaddr, SPD_TYPE);
2142 if (byte == JED_DDR2SDRAM) {
2143 /*Dimm is 'Present'*/
2144 pDCTstat->DIMMValid |= 1 << i;
2147 pDCTstat->DIMMSPDCSE = 1 << i;
2149 pDCTstat->ErrStatus |= 1 << SB_DIMMChkSum;
2150 pDCTstat->ErrCode = SC_StopError;
2152 /*if NV_SPDCHK_RESTRT is set to 1, ignore faulty SPD checksum*/
2153 pDCTstat->ErrStatus |= 1<<SB_DIMMChkSum;
2154 byte = mctRead_SPD(smbaddr, SPD_TYPE);
2155 if (byte == JED_DDR2SDRAM)
2156 pDCTstat->DIMMValid |= 1 << i;
2159 /* Check module type */
2160 byte = mctRead_SPD(smbaddr, SPD_DIMMTYPE);
2161 if (byte & JED_REGADCMSK)
2162 RegDIMMPresent |= 1 << i;
2163 /* Check ECC capable */
2164 byte = mctRead_SPD(smbaddr, SPD_EDCTYPE);
2165 if (byte & JED_ECC) {
2166 /* DIMM is ECC capable */
2167 pDCTstat->DimmECCPresent |= 1 << i;
2169 if (byte & JED_ADRCPAR) {
2170 /* DIMM is ECC capable */
2171 pDCTstat->DimmPARPresent |= 1 << i;
2173 /* Check if x4 device */
2174 devwidth = mctRead_SPD(smbaddr, SPD_DEVWIDTH) & 0xFE;
2175 if (devwidth == 4) {
2176 /* DIMM is made with x4 or x16 drams */
2177 pDCTstat->Dimmx4Present |= 1 << i;
2178 } else if (devwidth == 8) {
2179 pDCTstat->Dimmx8Present |= 1 << i;
2180 } else if (devwidth == 16) {
2181 pDCTstat->Dimmx16Present |= 1 << i;
2183 /* check page size */
2184 byte = mctRead_SPD(smbaddr, SPD_COLSZ);
2188 word *= devwidth; /* (((2^COLBITS) / 8) * ORG) / 2048 */
2191 pDCTstat->DIMM2Kpage |= 1 << i;
2193 /*Check if SPD diag bit 'analysis probe installed' is set */
2194 byte = mctRead_SPD(smbaddr, SPD_ATTRIB);
2195 if ( byte & JED_PROBEMSK )
2196 pDCTstat->Status |= 1<<SB_DiagClks;
2198 byte = mctRead_SPD(smbaddr, SPD_DMBANKS);
2199 if (!(byte & (1<< SPDPLBit)))
2200 pDCTstat->DimmPlPresent |= 1 << i;
2204 /* if any DIMMs are QR, we have to make two passes through DIMMs*/
2205 if ( pDCTstat->DimmQRPresent == 0) {
2208 if (i < DimmSlots) {
2209 pDCTstat->DimmQRPresent |= (1 << i) | (1 << (i+4));
2211 byte = 2; /* upper two ranks of QR DIMM will be counted on another DIMM number iteration*/
2212 } else if (byte == 2) {
2213 pDCTstat->DimmDRPresent |= 1 << i;
2218 else if (devwidth == 4)
2222 bytex <<= 1; /*double Addr bus load value for dual rank DIMMs*/
2225 pDCTstat->DATAload[j] += byte; /*number of ranks on DATA bus*/
2226 pDCTstat->MAload[j] += bytex; /*number of devices on CMD/ADDR bus*/
2227 pDCTstat->MAdimms[j]++; /*number of DIMMs on A bus */
2228 /*check for DRAM package Year <= 06*/
2229 byte = mctRead_SPD(smbaddr, SPD_MANDATEYR);
2230 if (byte < MYEAR06) {
2231 /*Year < 06 and hence Week < 24 of 06 */
2232 pDCTstat->DimmYr06 |= 1 << i;
2233 pDCTstat->DimmWk2406 |= 1 << i;
2234 } else if (byte == MYEAR06) {
2235 /*Year = 06, check if Week <= 24 */
2236 pDCTstat->DimmYr06 |= 1 << i;
2237 byte = mctRead_SPD(smbaddr, SPD_MANDATEWK);
2238 if (byte <= MWEEK24)
2239 pDCTstat->DimmWk2406 |= 1 << i;
2245 print_tx("\t DIMMPresence: DIMMValid=", pDCTstat->DIMMValid);
2246 print_tx("\t DIMMPresence: DIMMPresent=", pDCTstat->DIMMPresent);
2247 print_tx("\t DIMMPresence: RegDIMMPresent=", RegDIMMPresent);
2248 print_tx("\t DIMMPresence: DimmECCPresent=", pDCTstat->DimmECCPresent);
2249 print_tx("\t DIMMPresence: DimmPARPresent=", pDCTstat->DimmPARPresent);
2250 print_tx("\t DIMMPresence: Dimmx4Present=", pDCTstat->Dimmx4Present);
2251 print_tx("\t DIMMPresence: Dimmx8Present=", pDCTstat->Dimmx8Present);
2252 print_tx("\t DIMMPresence: Dimmx16Present=", pDCTstat->Dimmx16Present);
2253 print_tx("\t DIMMPresence: DimmPlPresent=", pDCTstat->DimmPlPresent);
2254 print_tx("\t DIMMPresence: DimmDRPresent=", pDCTstat->DimmDRPresent);
2255 print_tx("\t DIMMPresence: DimmQRPresent=", pDCTstat->DimmQRPresent);
2256 print_tx("\t DIMMPresence: DATAload[0]=", pDCTstat->DATAload[0]);
2257 print_tx("\t DIMMPresence: MAload[0]=", pDCTstat->MAload[0]);
2258 print_tx("\t DIMMPresence: MAdimms[0]=", pDCTstat->MAdimms[0]);
2259 print_tx("\t DIMMPresence: DATAload[1]=", pDCTstat->DATAload[1]);
2260 print_tx("\t DIMMPresence: MAload[1]=", pDCTstat->MAload[1]);
2261 print_tx("\t DIMMPresence: MAdimms[1]=", pDCTstat->MAdimms[1]);
2263 if (pDCTstat->DIMMValid != 0) { /* If any DIMMs are present...*/
2264 if (RegDIMMPresent != 0) {
2265 if ((RegDIMMPresent ^ pDCTstat->DIMMValid) !=0) {
2266 /* module type DIMM mismatch (reg'ed, unbuffered) */
2267 pDCTstat->ErrStatus |= 1<<SB_DimmMismatchM;
2268 pDCTstat->ErrCode = SC_StopError;
2270 /* all DIMMs are registered */
2271 pDCTstat->Status |= 1<<SB_Registered;
2274 if (pDCTstat->DimmECCPresent != 0) {
2275 if ((pDCTstat->DimmECCPresent ^ pDCTstat->DIMMValid )== 0) {
2276 /* all DIMMs are ECC capable */
2277 pDCTstat->Status |= 1<<SB_ECCDIMMs;
2280 if (pDCTstat->DimmPARPresent != 0) {
2281 if ((pDCTstat->DimmPARPresent ^ pDCTstat->DIMMValid) == 0) {
2282 /*all DIMMs are Parity capable */
2283 pDCTstat->Status |= 1<<SB_PARDIMMs;
2287 /* no DIMMs present or no DIMMs that qualified. */
2288 pDCTstat->ErrStatus |= 1<<SB_NoDimms;
2289 pDCTstat->ErrCode = SC_StopError;
2292 print_tx("\t DIMMPresence: Status ", pDCTstat->Status);
2293 print_tx("\t DIMMPresence: ErrStatus ", pDCTstat->ErrStatus);
2294 print_tx("\t DIMMPresence: ErrCode ", pDCTstat->ErrCode);
2295 print_t("\t DIMMPresence: Done\n");
2297 mctHookAfterDIMMpre();
2299 return pDCTstat->ErrCode;
2303 static u8 Sys_Capability_D(struct MCTStatStruc *pMCTstat,
2304 struct DCTStatStruc *pDCTstat, int j, int k)
2306 /* Determine if system is capable of operating at given input
2307 * parameters for CL, and T. There are three components to
2308 * determining "maximum frequency" in AUTO mode: SPD component,
2309 * Bus load component, and "Preset" max frequency component.
2310 * This procedure is used to help find the SPD component and relies
2311 * on pre-determination of the bus load component and the Preset
2312 * components. The generalized algorithm for finding maximum
2313 * frequency is structured this way so as to optimize for CAS
2314 * latency (which might get better as a result of reduced frequency).
2315 * See "Global relationship between index values and item values"
2316 * for definition of CAS latency index (j) and Frequency index (k).
2321 if (Get_Fk_D(k) > pDCTstat->PresetmaxFreq)
2326 /* compare proposed CAS latency with AMD Si capabilities */
2327 if ((j < J_MIN) || (j > J_MAX))
2339 static u8 Get_DIMMAddress_D(struct DCTStatStruc *pDCTstat, u8 i)
2343 p = pDCTstat->DIMMAddr;
2344 //mct_BeforeGetDIMMAddress();
2349 static void mct_initDCT(struct MCTStatStruc *pMCTstat,
2350 struct DCTStatStruc *pDCTstat)
2355 /* Config. DCT0 for Ganged or unganged mode */
2356 print_t("\tmct_initDCT: DCTInit_D 0\n");
2357 DCTInit_D(pMCTstat, pDCTstat, 0);
2358 if (pDCTstat->ErrCode == SC_FatalErr) {
2359 // Do nothing goto exitDCTInit; /* any fatal errors? */
2361 /* Configure DCT1 if unganged and enabled*/
2362 if (!pDCTstat->GangedMode) {
2363 if ( pDCTstat->DIMMValidDCT[1] > 0) {
2364 print_t("\tmct_initDCT: DCTInit_D 1\n");
2365 err_code = pDCTstat->ErrCode; /* save DCT0 errors */
2366 pDCTstat->ErrCode = 0;
2367 DCTInit_D(pMCTstat, pDCTstat, 1);
2368 if (pDCTstat->ErrCode == 2) /* DCT1 is not Running */
2369 pDCTstat->ErrCode = err_code; /* Using DCT0 Error code to update pDCTstat.ErrCode */
2371 val = 1 << DisDramInterface;
2372 Set_NB32(pDCTstat->dev_dct, 0x100 + 0x94, val);
2380 static void mct_DramInit(struct MCTStatStruc *pMCTstat,
2381 struct DCTStatStruc *pDCTstat, u8 dct)
2385 mct_BeforeDramInit_Prod_D(pMCTstat, pDCTstat);
2386 // FIXME: for rev A: mct_BeforeDramInit_D(pDCTstat, dct);
2388 /* Disable auto refresh before Dram init when in ganged mode */
2389 if (pDCTstat->GangedMode) {
2390 val = Get_NB32(pDCTstat->dev_dct, 0x8C + (0x100 * dct));
2391 val |= 1 << DisAutoRefresh;
2392 Set_NB32(pDCTstat->dev_dct, 0x8C + (0x100 * dct), val);
2395 mct_DramInit_Hw_D(pMCTstat, pDCTstat, dct);
2397 /* Re-enable auto refresh after Dram init when in ganged mode
2398 * to ensure both DCTs are in sync
2401 if (pDCTstat->GangedMode) {
2403 val = Get_NB32(pDCTstat->dev_dct, 0x90 + (0x100 * dct));
2404 } while (!(val & (1 << InitDram)));
2408 val = Get_NB32(pDCTstat->dev_dct, 0x8C + (0x100 * dct));
2409 val &= ~(1 << DisAutoRefresh);
2410 val |= 1 << DisAutoRefresh;
2411 val &= ~(1 << DisAutoRefresh);
2416 static u8 mct_setMode(struct MCTStatStruc *pMCTstat,
2417 struct DCTStatStruc *pDCTstat)
2424 byte = bytex = pDCTstat->DIMMValid;
2425 bytex &= 0x55; /* CHA DIMM pop */
2426 pDCTstat->DIMMValidDCT[0] = bytex;
2428 byte &= 0xAA; /* CHB DIMM popa */
2430 pDCTstat->DIMMValidDCT[1] = byte;
2432 if (byte != bytex) {
2433 pDCTstat->ErrStatus &= ~(1 << SB_DimmMismatchO);
2435 if ( mctGet_NVbits(NV_Unganged) )
2436 pDCTstat->ErrStatus |= (1 << SB_DimmMismatchO);
2438 if (!(pDCTstat->ErrStatus & (1 << SB_DimmMismatchO))) {
2439 pDCTstat->GangedMode = 1;
2440 /* valid 128-bit mode population. */
2441 pDCTstat->Status |= 1 << SB_128bitmode;
2443 val = Get_NB32(pDCTstat->dev_dct, reg);
2444 val |= 1 << DctGangEn;
2445 Set_NB32(pDCTstat->dev_dct, reg, val);
2446 print_tx("setMode: DRAM Controller Select Low Register = ", val);
2449 return pDCTstat->ErrCode;
2453 u32 Get_NB32(u32 dev, u32 reg)
2457 addr = (dev>>4) | (reg & 0xFF) | ((reg & 0xf00)<<16);
2458 outl((1<<31) | (addr & ~3), 0xcf8);
2464 void Set_NB32(u32 dev, u32 reg, u32 val)
2468 addr = (dev>>4) | (reg & 0xFF) | ((reg & 0xf00)<<16);
2469 outl((1<<31) | (addr & ~3), 0xcf8);
2474 u32 Get_NB32_index(u32 dev, u32 index_reg, u32 index)
2478 Set_NB32(dev, index_reg, index);
2479 dword = Get_NB32(dev, index_reg+0x4);
2484 void Set_NB32_index(u32 dev, u32 index_reg, u32 index, u32 data)
2486 Set_NB32(dev, index_reg, index);
2487 Set_NB32(dev, index_reg + 0x4, data);
2491 u32 Get_NB32_index_wait(u32 dev, u32 index_reg, u32 index)
2497 index &= ~(1 << DctAccessWrite);
2498 Set_NB32(dev, index_reg, index);
2500 dword = Get_NB32(dev, index_reg);
2501 } while (!(dword & (1 << DctAccessDone)));
2502 dword = Get_NB32(dev, index_reg + 0x4);
2508 void Set_NB32_index_wait(u32 dev, u32 index_reg, u32 index, u32 data)
2513 Set_NB32(dev, index_reg + 0x4, data);
2514 index |= (1 << DctAccessWrite);
2515 Set_NB32(dev, index_reg, index);
2517 dword = Get_NB32(dev, index_reg);
2518 } while (!(dword & (1 << DctAccessDone)));
2523 static u8 mct_PlatformSpec(struct MCTStatStruc *pMCTstat,
2524 struct DCTStatStruc *pDCTstat, u8 dct)
2526 /* Get platform specific config/timing values from the interface layer
2527 * and program them into DCT.
2530 u32 dev = pDCTstat->dev_dct;
2532 u8 i, i_start, i_end;
2534 if (pDCTstat->GangedMode) {
2535 SyncSetting(pDCTstat);
2542 for (i=i_start; i<i_end; i++) {
2543 index_reg = 0x98 + (i * 0x100);
2544 Set_NB32_index_wait(dev, index_reg, 0x00, pDCTstat->CH_ODC_CTL[i]); /* Channel A Output Driver Compensation Control */
2545 Set_NB32_index_wait(dev, index_reg, 0x04, pDCTstat->CH_ADDR_TMG[i]); /* Channel A Output Driver Compensation Control */
2548 return pDCTstat->ErrCode;
2553 static void mct_SyncDCTsReady(struct DCTStatStruc *pDCTstat)
2558 if (pDCTstat->NodePresent) {
2559 print_tx("mct_SyncDCTsReady: Node ", pDCTstat->Node_ID);
2560 dev = pDCTstat->dev_dct;
2562 if ((pDCTstat->DIMMValidDCT[0] ) || (pDCTstat->DIMMValidDCT[1])) { /* This Node has dram */
2564 val = Get_NB32(dev, 0x110);
2565 } while (!(val & (1 << DramEnabled)));
2566 print_t("mct_SyncDCTsReady: DramEnabled\n");
2568 } /* Node is present */
2572 static void mct_AfterGetCLT(struct MCTStatStruc *pMCTstat,
2573 struct DCTStatStruc *pDCTstat, u8 dct)
2575 if (!pDCTstat->GangedMode) {
2577 pDCTstat->DIMMValid = pDCTstat->DIMMValidDCT[dct];
2578 if (pDCTstat->DIMMValidDCT[dct] == 0)
2579 pDCTstat->ErrCode = SC_StopError;
2581 pDCTstat->CSPresent = 0;
2582 pDCTstat->CSTestFail = 0;
2583 pDCTstat->DIMMValid = pDCTstat->DIMMValidDCT[dct];
2584 if (pDCTstat->DIMMValidDCT[dct] == 0)
2585 pDCTstat->ErrCode = SC_StopError;
2590 static u8 mct_SPDCalcWidth(struct MCTStatStruc *pMCTstat,
2591 struct DCTStatStruc *pDCTstat, u8 dct)
2596 SPDCalcWidth_D(pMCTstat, pDCTstat);
2597 ret = mct_setMode(pMCTstat, pDCTstat);
2599 ret = pDCTstat->ErrCode;
2602 print_tx("SPDCalcWidth: Status ", pDCTstat->Status);
2603 print_tx("SPDCalcWidth: ErrStatus ", pDCTstat->ErrStatus);
2604 print_tx("SPDCalcWidth: ErrCode ", pDCTstat->ErrCode);
2605 print_t("SPDCalcWidth: Done\n");
2611 static void mct_AfterStitchMemory(struct MCTStatStruc *pMCTstat,
2612 struct DCTStatStruc *pDCTstat, u8 dct)
2621 _MemHoleRemap = mctGet_NVbits(NV_MemHole);
2622 DramHoleBase = mctGet_NVbits(NV_BottomIO);
2624 /* Increase hole size so;[31:24]to[31:16]
2625 * it has granularity of 128MB shl eax,8
2626 * Set 'effective' bottom IOmov DramHoleBase,eax
2628 pMCTstat->HoleBase = (DramHoleBase & 0xFFFFF800) << 8;
2630 /* In unganged mode, we must add DCT0 and DCT1 to DCTSysLimit */
2631 if (!pDCTstat->GangedMode) {
2632 dev = pDCTstat->dev_dct;
2633 pDCTstat->NodeSysLimit += pDCTstat->DCTSysLimit;
2634 /* if DCT0 and DCT1 exist both, set DctSelBaseAddr[47:27] */
2636 if (pDCTstat->DIMMValidDCT[1] > 0) {
2637 dword = pDCTstat->DCTSysLimit + 1;
2638 dword += pDCTstat->NodeSysBase;
2639 dword >>= 8; /* scale [39:8] to [47:27],and to F2x110[31:11] */
2640 if ((dword >= DramHoleBase) && _MemHoleRemap) {
2641 pMCTstat->HoleBase = (DramHoleBase & 0xFFFFF800) << 8;
2642 val = pMCTstat->HoleBase;
2645 val |= (((~val) & 0xFF) + 1);
2650 val = Get_NB32(dev, reg);
2653 val |= 3; /* Set F2x110[DctSelHiRngEn], F2x110[DctSelHi] */
2654 Set_NB32(dev, reg, val);
2655 print_tx("AfterStitch DCT0 and DCT1: DRAM Controller Select Low Register = ", val);
2659 Set_NB32(dev, reg, val);
2662 /* Program the DctSelBaseAddr value to 0
2663 if DCT 0 is disabled */
2664 if (pDCTstat->DIMMValidDCT[0] == 0) {
2665 dword = pDCTstat->NodeSysBase;
2667 if (dword >= DramHoleBase) {
2668 pMCTstat->HoleBase = (DramHoleBase & 0xFFFFF800) << 8;
2669 val = pMCTstat->HoleBase;
2672 val |= (((~val) & 0xFFFF) + 1);
2677 Set_NB32(dev, reg, val);
2680 val |= 3; /* Set F2x110[DctSelHiRngEn], F2x110[DctSelHi] */
2681 Set_NB32(dev, reg, val);
2682 print_tx("AfterStitch DCT1 only: DRAM Controller Select Low Register = ", val);
2686 pDCTstat->NodeSysLimit += pDCTstat->DCTSysLimit;
2688 print_tx("AfterStitch pDCTstat->NodeSysBase = ", pDCTstat->NodeSysBase);
2689 print_tx("mct_AfterStitchMemory: pDCTstat->NodeSysLimit ", pDCTstat->NodeSysLimit);
2693 static u8 mct_DIMMPresence(struct MCTStatStruc *pMCTstat,
2694 struct DCTStatStruc *pDCTstat, u8 dct)
2699 ret = DIMMPresence_D(pMCTstat, pDCTstat);
2701 ret = pDCTstat->ErrCode;
2707 /* mct_BeforeGetDIMMAddress inline in C */
2710 static void mct_OtherTiming(struct MCTStatStruc *pMCTstat,
2711 struct DCTStatStruc *pDCTstatA)
2715 for (Node = 0; Node < MAX_NODES_SUPPORTED; Node++) {
2716 struct DCTStatStruc *pDCTstat;
2717 pDCTstat = pDCTstatA + Node;
2718 if (pDCTstat->NodePresent) {
2719 if (pDCTstat->DIMMValidDCT[0]) {
2720 pDCTstat->DIMMValid = pDCTstat->DIMMValidDCT[0];
2721 Set_OtherTiming(pMCTstat, pDCTstat, 0);
2723 if (pDCTstat->DIMMValidDCT[1] && !pDCTstat->GangedMode ) {
2724 pDCTstat->DIMMValid = pDCTstat->DIMMValidDCT[1];
2725 Set_OtherTiming(pMCTstat, pDCTstat, 1);
2727 } /* Node is present*/
2732 static void Set_OtherTiming(struct MCTStatStruc *pMCTstat,
2733 struct DCTStatStruc *pDCTstat, u8 dct)
2736 u32 reg_off = 0x100 * dct;
2739 u32 dev = pDCTstat->dev_dct;
2741 Get_Trdrd(pMCTstat, pDCTstat, dct);
2742 Get_Twrwr(pMCTstat, pDCTstat, dct);
2743 Get_Twrrd(pMCTstat, pDCTstat, dct);
2744 Get_TrwtTO(pMCTstat, pDCTstat, dct);
2745 Get_TrwtWB(pMCTstat, pDCTstat);
2747 reg = 0x8C + reg_off; /* Dram Timing Hi */
2748 val = Get_NB32(dev, reg);
2750 dword = pDCTstat->TrwtTO; //0x07
2752 dword = pDCTstat->Twrrd; //0x03
2754 dword = pDCTstat->Twrwr; //0x03
2756 dword = pDCTstat->Trdrd; //0x03
2758 dword = pDCTstat->TrwtWB; //0x07
2760 val = OtherTiming_A_D(pDCTstat, val);
2761 Set_NB32(dev, reg, val);
2766 static void Get_Trdrd(struct MCTStatStruc *pMCTstat,
2767 struct DCTStatStruc *pDCTstat, u8 dct)
2773 u32 index_reg = 0x98 + 0x100 * dct;
2774 u32 dev = pDCTstat->dev_dct;
2776 if ((pDCTstat->Dimmx4Present != 0) && (pDCTstat->Dimmx8Present != 0)) {
2777 /* mixed (x4 or x8) DIMM types
2778 the largest DqsRcvEnGrossDelay of any DIMM minus the DqsRcvEnGrossDelay
2779 of any other DIMM is equal to the Critical Gross Delay Difference (CGDD) for Trdrd.*/
2780 byte = Get_DqsRcvEnGross_Diff(pDCTstat, dev, index_reg);
2788 Trdrd with non-mixed DIMM types
2789 RdDqsTime are the same for all DIMMs and DqsRcvEn difference between
2790 any two DIMMs is less than half of a MEMCLK, BIOS should program Trdrd to 0000b,
2791 else BIOS should program Trdrd to 0001b.
2793 RdDqsTime are the same for all DIMMs
2794 DDR400~DDR667 only use one set register
2795 DDR800 have two set register for DIMM0 and DIMM1 */
2797 if (pDCTstat->Speed > 3) {
2798 /* DIMM0+DIMM1 exist */ //NOTE it should be 5
2799 val = bsf(pDCTstat->DIMMValid);
2800 dword = bsr(pDCTstat->DIMMValid);
2801 if (dword != val && dword != 0) {
2802 /* DCT Read DQS Timing Control - DIMM0 - Low */
2803 dword = Get_NB32_index_wait(dev, index_reg, 0x05);
2804 /* DCT Read DQS Timing Control - DIMM1 - Low */
2805 val = Get_NB32_index_wait(dev, index_reg, 0x105);
2809 /* DCT Read DQS Timing Control - DIMM0 - High */
2810 dword = Get_NB32_index_wait(dev, index_reg, 0x06);
2811 /* DCT Read DQS Timing Control - DIMM1 - High */
2812 val = Get_NB32_index_wait(dev, index_reg, 0x106);
2818 /* DqsRcvEn difference between any two DIMMs is
2819 less than half of a MEMCLK */
2820 /* DqsRcvEn byte 1,0*/
2821 if (Check_DqsRcvEn_Diff(pDCTstat, dct, dev, index_reg, 0x10))
2823 /* DqsRcvEn byte 3,2*/
2824 if (Check_DqsRcvEn_Diff(pDCTstat, dct, dev, index_reg, 0x11))
2826 /* DqsRcvEn byte 5,4*/
2827 if (Check_DqsRcvEn_Diff(pDCTstat, dct, dev, index_reg, 0x20))
2829 /* DqsRcvEn byte 7,6*/
2830 if (Check_DqsRcvEn_Diff(pDCTstat, dct, dev, index_reg, 0x21))
2833 if (Check_DqsRcvEn_Diff(pDCTstat, dct, dev, index_reg, 0x12))
2839 pDCTstat->Trdrd = Trdrd;
2844 static void Get_Twrwr(struct MCTStatStruc *pMCTstat,
2845 struct DCTStatStruc *pDCTstat, u8 dct)
2848 u32 index_reg = 0x98 + 0x100 * dct;
2849 u32 dev = pDCTstat->dev_dct;
2853 /* WrDatGrossDlyByte only use one set register when DDR400~DDR667
2854 DDR800 have two set register for DIMM0 and DIMM1 */
2855 if (pDCTstat->Speed > 3) {
2856 val = bsf(pDCTstat->DIMMValid);
2857 dword = bsr(pDCTstat->DIMMValid);
2858 if (dword != val && dword != 0) {
2859 /*the largest WrDatGrossDlyByte of any DIMM minus the
2860 WrDatGrossDlyByte of any other DIMM is equal to CGDD */
2861 val = Get_WrDatGross_Diff(pDCTstat, dct, dev, index_reg);
2868 pDCTstat->Twrwr = Twrwr;
2872 static void Get_Twrrd(struct MCTStatStruc *pMCTstat,
2873 struct DCTStatStruc *pDCTstat, u8 dct)
2876 u32 index_reg = 0x98 + 0x100 * dct;
2877 u32 dev = pDCTstat->dev_dct;
2879 /* On any given byte lane, the largest WrDatGrossDlyByte delay of
2880 any DIMM minus the DqsRcvEnGrossDelay delay of any other DIMM is
2881 equal to the Critical Gross Delay Difference (CGDD) for Twrrd.*/
2882 pDCTstat->Twrrd = 0;
2883 Get_DqsRcvEnGross_Diff(pDCTstat, dev, index_reg);
2884 Get_WrDatGross_Diff(pDCTstat, dct, dev, index_reg);
2885 bytex = pDCTstat->DqsRcvEnGrossL;
2886 byte = pDCTstat->WrDatGrossH;
2896 pDCTstat->Twrrd = bytex;
2900 static void Get_TrwtTO(struct MCTStatStruc *pMCTstat,
2901 struct DCTStatStruc *pDCTstat, u8 dct)
2904 u32 index_reg = 0x98 + 0x100 * dct;
2905 u32 dev = pDCTstat->dev_dct;
2907 /* On any given byte lane, the largest WrDatGrossDlyByte delay of
2908 any DIMM minus the DqsRcvEnGrossDelay delay of any other DIMM is
2909 equal to the Critical Gross Delay Difference (CGDD) for TrwtTO. */
2910 Get_DqsRcvEnGross_Diff(pDCTstat, dev, index_reg);
2911 Get_WrDatGross_Diff(pDCTstat, dct, dev, index_reg);
2912 bytex = pDCTstat->DqsRcvEnGrossL;
2913 byte = pDCTstat->WrDatGrossH;
2916 if ((bytex == 1) || (bytex == 2))
2922 if ((byte == 0) || (byte == 1))
2928 pDCTstat->TrwtTO = bytex;
2932 static void Get_TrwtWB(struct MCTStatStruc *pMCTstat,
2933 struct DCTStatStruc *pDCTstat)
2935 /* TrwtWB ensures read-to-write data-bus turnaround.
2936 This value should be one more than the programmed TrwtTO.*/
2937 pDCTstat->TrwtWB = pDCTstat->TrwtTO + 1;
2941 static u8 Check_DqsRcvEn_Diff(struct DCTStatStruc *pDCTstat,
2942 u8 dct, u32 dev, u32 index_reg,
2945 u8 Smallest_0, Largest_0, Smallest_1, Largest_1;
2955 for (i=0; i < 8; i+=2) {
2956 if ( pDCTstat->DIMMValid & (1 << i)) {
2957 val = Get_NB32_index_wait(dev, index_reg, index);
2959 if (byte < Smallest_0)
2961 if (byte > Largest_0)
2963 byte = (val >> 16) & 0xFF;
2964 if (byte < Smallest_1)
2966 if (byte > Largest_1)
2972 /* check if total DqsRcvEn delay difference between any
2973 two DIMMs is less than half of a MEMCLK */
2974 if ((Largest_0 - Smallest_0) > 31)
2976 if ((Largest_1 - Smallest_1) > 31)
2982 static u8 Get_DqsRcvEnGross_Diff(struct DCTStatStruc *pDCTstat,
2983 u32 dev, u32 index_reg)
2985 u8 Smallest, Largest;
2989 /* The largest DqsRcvEnGrossDelay of any DIMM minus the
2990 DqsRcvEnGrossDelay of any other DIMM is equal to the Critical
2991 Gross Delay Difference (CGDD) */
2992 /* DqsRcvEn byte 1,0 */
2993 val = Get_DqsRcvEnGross_MaxMin(pDCTstat, dev, index_reg, 0x10);
2994 Largest = val & 0xFF;
2995 Smallest = (val >> 8) & 0xFF;
2997 /* DqsRcvEn byte 3,2 */
2998 val = Get_DqsRcvEnGross_MaxMin(pDCTstat, dev, index_reg, 0x11);
3000 bytex = (val >> 8) & 0xFF;
3001 if (bytex < Smallest)
3006 /* DqsRcvEn byte 5,4 */
3007 val = Get_DqsRcvEnGross_MaxMin(pDCTstat, dev, index_reg, 0x20);
3009 bytex = (val >> 8) & 0xFF;
3010 if (bytex < Smallest)
3015 /* DqsRcvEn byte 7,6 */
3016 val = Get_DqsRcvEnGross_MaxMin(pDCTstat, dev, index_reg, 0x21);
3018 bytex = (val >> 8) & 0xFF;
3019 if (bytex < Smallest)
3024 if (pDCTstat->DimmECCPresent> 0) {
3026 val = Get_DqsRcvEnGross_MaxMin(pDCTstat, dev, index_reg, 0x12);
3028 bytex = (val >> 8) & 0xFF;
3029 if (bytex < Smallest)
3035 pDCTstat->DqsRcvEnGrossL = Largest;
3036 return Largest - Smallest;
3040 static u8 Get_WrDatGross_Diff(struct DCTStatStruc *pDCTstat,
3041 u8 dct, u32 dev, u32 index_reg)
3043 u8 Smallest, Largest;
3047 /* The largest WrDatGrossDlyByte of any DIMM minus the
3048 WrDatGrossDlyByte of any other DIMM is equal to CGDD */
3049 val = Get_WrDatGross_MaxMin(pDCTstat, dct, dev, index_reg, 0x01); /* WrDatGrossDlyByte byte 0,1,2,3 for DIMM0 */
3050 Largest = val & 0xFF;
3051 Smallest = (val >> 8) & 0xFF;
3052 val = Get_WrDatGross_MaxMin(pDCTstat, dct, dev, index_reg, 0x101); /* WrDatGrossDlyByte byte 0,1,2,3 for DIMM1 */
3054 bytex = (val >> 8) & 0xFF;
3055 if (bytex < Smallest)
3060 // FIXME: Add Cx support.
3062 pDCTstat->WrDatGrossH = Largest;
3063 return Largest - Smallest;
3066 static u16 Get_DqsRcvEnGross_MaxMin(struct DCTStatStruc *pDCTstat,
3067 u32 dev, u32 index_reg,
3070 u8 Smallest, Largest;
3079 for (i=0; i < 8; i+=2) {
3080 if ( pDCTstat->DIMMValid & (1 << i)) {
3081 val = Get_NB32_index_wait(dev, index_reg, index);
3083 byte = (val >> 5) & 0xFF;
3084 if (byte < Smallest)
3088 byte = (val >> (16 + 5)) & 0xFF;
3089 if (byte < Smallest)
3104 static u16 Get_WrDatGross_MaxMin(struct DCTStatStruc *pDCTstat,
3105 u8 dct, u32 dev, u32 index_reg,
3108 u8 Smallest, Largest;
3116 for (i=0; i < 2; i++) {
3117 val = Get_NB32_index_wait(dev, index_reg, index);
3120 for (j=0; j < 4; j++) {
3122 if (byte < Smallest)
3131 if (pDCTstat->DimmECCPresent > 0) {
3133 val = Get_NB32_index_wait(dev, index_reg, index);
3137 if (byte < Smallest)
3152 static void mct_FinalMCT_D(struct MCTStatStruc *pMCTstat,
3153 struct DCTStatStruc *pDCTstat)
3155 print_t("\tmct_FinalMCT_D: Clr Cl, Wb\n");
3158 mct_ClrClToNB_D(pMCTstat, pDCTstat);
3159 mct_ClrWbEnhWsbDis_D(pMCTstat, pDCTstat);
3163 static void mct_InitialMCT_D(struct MCTStatStruc *pMCTstat, struct DCTStatStruc *pDCTstat)
3165 print_t("\tmct_InitialMCT_D: Set Cl, Wb\n");
3166 mct_SetClToNB_D(pMCTstat, pDCTstat);
3167 mct_SetWbEnhWsbDis_D(pMCTstat, pDCTstat);
3171 static u32 mct_NodePresent_D(void)
3179 static void mct_init(struct MCTStatStruc *pMCTstat,
3180 struct DCTStatStruc *pDCTstat)
3185 pDCTstat->GangedMode = 0;
3186 pDCTstat->DRPresent = 1;
3188 /* enable extend PCI configuration access */
3190 _RDMSR(addr, &lo, &hi);
3191 if (hi & (1 << (46-32))) {
3192 pDCTstat->Status |= 1 << SB_ExtConfig;
3195 _WRMSR(addr, lo, hi);
3200 static void clear_legacy_Mode(struct MCTStatStruc *pMCTstat,
3201 struct DCTStatStruc *pDCTstat)
3205 u32 dev = pDCTstat->dev_dct;
3207 /* Clear Legacy BIOS Mode bit */
3209 val = Get_NB32(dev, reg);
3210 val &= ~(1<<LegacyBiosMode);
3211 Set_NB32(dev, reg, val);
3215 static void mct_HTMemMapExt(struct MCTStatStruc *pMCTstat,
3216 struct DCTStatStruc *pDCTstatA)
3219 u32 Drambase, Dramlimit;
3225 struct DCTStatStruc *pDCTstat;
3227 pDCTstat = pDCTstatA + 0;
3228 dev = pDCTstat->dev_map;
3230 /* Copy dram map from F1x40/44,F1x48/4c,
3231 to F1x120/124(Node0),F1x120/124(Node1),...*/
3232 for (Node=0; Node < MAX_NODES_SUPPORTED; Node++) {
3233 pDCTstat = pDCTstatA + Node;
3234 devx = pDCTstat->dev_map;
3236 /* get base/limit from Node0 */
3237 reg = 0x40 + (Node << 3); /* Node0/Dram Base 0 */
3238 val = Get_NB32(dev, reg);
3239 Drambase = val >> ( 16 + 3);
3241 reg = 0x44 + (Node << 3); /* Node0/Dram Base 0 */
3242 val = Get_NB32(dev, reg);
3243 Dramlimit = val >> (16 + 3);
3245 /* set base/limit to F1x120/124 per Node */
3246 if (pDCTstat->NodePresent) {
3247 reg = 0x120; /* F1x120,DramBase[47:27] */
3248 val = Get_NB32(devx, reg);
3251 Set_NB32(devx, reg, val);
3254 val = Get_NB32(devx, reg);
3257 Set_NB32(devx, reg, val);
3259 if ( pMCTstat->GStatus & ( 1 << GSB_HWHole)) {
3261 val = Get_NB32(devx, reg);
3262 val |= (1 << DramMemHoistValid);
3263 val &= ~(0xFF << 24);
3264 dword = (pMCTstat->HoleBase >> (24 - 8)) & 0xFF;
3267 Set_NB32(devx, reg, val);
3274 static void SetCSTriState(struct MCTStatStruc *pMCTstat,
3275 struct DCTStatStruc *pDCTstat, u8 dct)
3278 u32 dev = pDCTstat->dev_dct;
3279 u32 index_reg = 0x98 + 0x100 * dct;
3284 /* Tri-state unused chipselects when motherboard
3285 termination is available */
3287 // FIXME: skip for Ax
3289 word = pDCTstat->CSPresent;
3290 if (pDCTstat->Status & (1 << SB_Registered)) {
3291 for (cs = 0; cs < 8; cs++) {
3292 if (word & (1 << cs)) {
3294 word |= 1 << (cs + 1);
3298 word = (~word) & 0xFF;
3300 val = Get_NB32_index_wait(dev, index_reg, index);
3302 Set_NB32_index_wait(dev, index_reg, index, val);
3307 static void SetCKETriState(struct MCTStatStruc *pMCTstat,
3308 struct DCTStatStruc *pDCTstat, u8 dct)
3312 u32 index_reg = 0x98 + 0x100 * dct;
3317 /* Tri-state unused CKEs when motherboard termination is available */
3319 // FIXME: skip for Ax
3321 dev = pDCTstat->dev_dct;
3323 for (cs = 0; cs < 8; cs++) {
3324 if (pDCTstat->CSPresent & (1 << cs)) {
3333 val = Get_NB32_index_wait(dev, index_reg, index);
3334 if ((word & 0x00FF) == 1)
3339 if ((word >> 8) == 1)
3344 Set_NB32_index_wait(dev, index_reg, index, val);
3348 static void SetODTTriState(struct MCTStatStruc *pMCTstat,
3349 struct DCTStatStruc *pDCTstat, u8 dct)
3353 u32 index_reg = 0x98 + 0x100 * dct;
3358 /* Tri-state unused ODTs when motherboard termination is available */
3360 // FIXME: skip for Ax
3362 dev = pDCTstat->dev_dct;
3364 for (cs = 0; cs < 8; cs += 2) {
3365 if (!(pDCTstat->CSPresent & (1 << cs))) {
3366 if (!(pDCTstat->CSPresent & (1 << (cs + 1))))
3367 word |= (1 << (cs >> 1));
3372 val = Get_NB32_index_wait(dev, index_reg, index);
3374 Set_NB32_index_wait(dev, index_reg, index, val);
3378 static void InitPhyCompensation(struct MCTStatStruc *pMCTstat,
3379 struct DCTStatStruc *pDCTstat, u8 dct)
3382 u32 index_reg = 0x98 + 0x100 * dct;
3383 u32 dev = pDCTstat->dev_dct;
3389 val = Get_NB32_index_wait(dev, index_reg, 0x00);
3391 for (i=0; i < 6; i++) {
3395 p = Table_Comp_Rise_Slew_15x;
3396 valx = p[(val >> 16) & 3];
3400 p = Table_Comp_Fall_Slew_15x;
3401 valx = p[(val >> 16) & 3];
3404 p = Table_Comp_Rise_Slew_20x;
3405 valx = p[(val >> 8) & 3];
3408 p = Table_Comp_Fall_Slew_20x;
3409 valx = p[(val >> 8) & 3];
3413 dword |= valx << (5 * i);
3416 /* Override/Exception */
3417 if ((pDCTstat->Speed == 2) && (pDCTstat->MAdimms[dct] == 4))
3418 dword &= 0xF18FFF18;
3420 Set_NB32_index_wait(dev, index_reg, 0x0a, dword);
3424 static void WaitRoutine_D(u32 time)
3433 static void mct_EarlyArbEn_D(struct MCTStatStruc *pMCTstat,
3434 struct DCTStatStruc *pDCTstat)
3438 u32 dev = pDCTstat->dev_dct;
3440 /* GhEnhancement #18429 modified by askar: For low NB CLK :
3441 * Memclk ratio, the DCT may need to arbitrate early to avoid
3442 * unnecessary bubbles.
3443 * bit 19 of F2x[1,0]78 Dram Control Register, set this bit only when
3444 * NB CLK : Memclk ratio is between 3:1 (inclusive) to 4:5 (inclusive)
3448 val = Get_NB32(dev, reg);
3450 //FIXME: check for Cx
3451 if (CheckNBCOFEarlyArbEn(pMCTstat, pDCTstat))
3452 val |= (1 << EarlyArbEn);
3454 Set_NB32(dev, reg, val);
3459 static u8 CheckNBCOFEarlyArbEn(struct MCTStatStruc *pMCTstat,
3460 struct DCTStatStruc *pDCTstat)
3466 u32 dev = pDCTstat->dev_dct;
3470 /* Check if NB COF >= 4*Memclk, if it is not, return a fatal error
3473 /* 3*(Fn2xD4[NBFid]+4)/(2^NbDid)/(3+Fn2x94[MemClkFreq]) */
3474 _RDMSR(0xC0010071, &lo, &hi);
3480 val = Get_NB32(dev, reg);
3481 if (!(val & (1 << MemClkFreqVal)))
3482 val = Get_NB32(dev, reg * 0x100); /* get the DCT1 value */
3490 dev = pDCTstat->dev_nbmisc;
3492 val = Get_NB32(dev, reg);
3500 // Yes this could be nicer but this was how the asm was....
3501 if (val < 3) { /* NClk:MemClk < 3:1 */
3503 } else if (val > 4) { /* NClk:MemClk >= 5:1 */
3505 } else if ((val == 4) && (rem > tmp)) { /* NClk:MemClk > 4.5:1 */
3508 return 1; /* 3:1 <= NClk:MemClk <= 4.5:1*/
3513 static void mct_ResetDataStruct_D(struct MCTStatStruc *pMCTstat,
3514 struct DCTStatStruc *pDCTstatA)
3518 struct DCTStatStruc *pDCTstat;
3521 u16 host_serv1, host_serv2;
3523 /* Initialize Data structures by clearing all entries to 0 */
3524 p = (u8 *) pMCTstat;
3525 for (i = 0; i < sizeof(struct MCTStatStruc); i++) {
3529 for (Node = 0; Node < 8; Node++) {
3530 pDCTstat = pDCTstatA + Node;
3531 host_serv1 = pDCTstat->HostBiosSrvc1;
3532 host_serv2 = pDCTstat->HostBiosSrvc2;
3534 p = (u8 *) pDCTstat;
3536 stop = ((u16) &((struct DCTStatStruc *)0)->CH_MaxRdLat[2]);
3537 for (i = start; i < stop ; i++) {
3541 start = ((u16) &((struct DCTStatStruc *)0)->CH_D_BC_RCVRDLY[2][4]);
3542 stop = sizeof(struct DCTStatStruc);
3543 for (i = start; i < stop; i++) {
3546 pDCTstat->HostBiosSrvc1 = host_serv1;
3547 pDCTstat->HostBiosSrvc2 = host_serv2;
3552 static void mct_BeforeDramInit_Prod_D(struct MCTStatStruc *pMCTstat,
3553 struct DCTStatStruc *pDCTstat)
3557 u32 dev = pDCTstat->dev_dct;
3559 // FIXME: skip for Ax
3560 if ((pDCTstat->Speed == 3) || ( pDCTstat->Speed == 2)) { // MemClkFreq = 667MHz or 533Mhz
3561 for (i=0; i < 2; i++) {
3562 reg_off = 0x100 * i;
3563 Set_NB32(dev, 0x98 + reg_off, 0x0D000030);
3564 Set_NB32(dev, 0x9C + reg_off, 0x00000806);
3565 Set_NB32(dev, 0x98 + reg_off, 0x4D040F30);
3571 void mct_AdjustDelayRange_D(struct MCTStatStruc *pMCTstat,
3572 struct DCTStatStruc *pDCTstat, u8 *dqs_pos)
3574 // FIXME: Skip for Ax
3575 if ((pDCTstat->Speed == 3) || ( pDCTstat->Speed == 2)) { // MemClkFreq = 667MHz or 533Mhz
3581 void mct_SetClToNB_D(struct MCTStatStruc *pMCTstat,
3582 struct DCTStatStruc *pDCTstat)
3587 // FIXME: Maybe check the CPUID? - not for now.
3588 // pDCTstat->LogicalCPUID;
3591 _RDMSR(msr, &lo, &hi);
3592 lo |= 1 << ClLinesToNbDis;
3593 _WRMSR(msr, lo, hi);
3597 void mct_ClrClToNB_D(struct MCTStatStruc *pMCTstat,
3598 struct DCTStatStruc *pDCTstat)
3604 // FIXME: Maybe check the CPUID? - not for now.
3605 // pDCTstat->LogicalCPUID;
3608 _RDMSR(msr, &lo, &hi);
3609 if (!pDCTstat->ClToNB_flag)
3610 lo &= ~(1<<ClLinesToNbDis);
3611 _WRMSR(msr, lo, hi);
3616 void mct_SetWbEnhWsbDis_D(struct MCTStatStruc *pMCTstat,
3617 struct DCTStatStruc *pDCTstat)
3622 // FIXME: Maybe check the CPUID? - not for now.
3623 // pDCTstat->LogicalCPUID;
3626 _RDMSR(msr, &lo, &hi);
3627 hi |= (1 << WbEnhWsbDis_D);
3628 _WRMSR(msr, lo, hi);
3632 void mct_ClrWbEnhWsbDis_D(struct MCTStatStruc *pMCTstat,
3633 struct DCTStatStruc *pDCTstat)
3638 // FIXME: Maybe check the CPUID? - not for now.
3639 // pDCTstat->LogicalCPUID;
3642 _RDMSR(msr, &lo, &hi);
3643 hi &= ~(1 << WbEnhWsbDis_D);
3644 _WRMSR(msr, lo, hi);
3648 void mct_SetDramConfigHi_D(struct DCTStatStruc *pDCTstat, u32 dct,
3651 /* Bug#15114: Comp. update interrupted by Freq. change can cause
3652 * subsequent update to be invalid during any MemClk frequency change:
3653 * Solution: From the bug report:
3654 * 1. A software-initiated frequency change should be wrapped into the
3655 * following sequence :
3656 * - a) Disable Compensation (F2[1, 0]9C_x08[30] )
3657 * b) Reset the Begin Compensation bit (D3CMP->COMP_CONFIG[0]) in all the compensation engines
3658 * c) Do frequency change
3659 * d) Enable Compensation (F2[1, 0]9C_x08[30] )
3660 * 2. A software-initiated Disable Compensation should always be
3661 * followed by step b) of the above steps.
3662 * Silicon Status: Fixed In Rev B0
3664 * Errata#177: DRAM Phy Automatic Compensation Updates May Be Invalid
3665 * Solution: BIOS should disable the phy automatic compensation prior
3666 * to initiating a memory clock frequency change as follows:
3667 * 1. Disable PhyAutoComp by writing 1'b1 to F2x[1, 0]9C_x08[30]
3668 * 2. Reset the Begin Compensation bits by writing 32'h0 to
3669 * F2x[1, 0]9C_x4D004F00
3670 * 3. Perform frequency change
3671 * 4. Enable PhyAutoComp by writing 1'b0 to F2x[1, 0]9C_08[30]
3672 * In addition, any time software disables the automatic phy
3673 * compensation it should reset the begin compensation bit per step 2.
3674 * Silicon Status: Fixed in DR-B0
3677 u32 dev = pDCTstat->dev_dct;
3678 u32 index_reg = 0x98 + 0x100 * dct;
3684 val = Get_NB32_index_wait(dev, index_reg, index);
3685 Set_NB32_index_wait(dev, index_reg, index, val | (1 << DisAutoComp));
3687 //FIXME: check for Bx Cx CPU
3688 // if Ax mct_SetDramConfigHi_Samp_D
3691 index = 0x4D014F00; /* F2x[1, 0]9C_x[D0FFFFF:D000000] DRAM Phy Debug Registers */
3692 index |= 1 << DctAccessWrite;
3694 Set_NB32_index_wait(dev, index_reg, index, val);
3696 Set_NB32(dev, 0x94 + 0x100 * dct, DramConfigHi);
3699 val = Get_NB32_index_wait(dev, index_reg, index);
3700 Set_NB32_index_wait(dev, index_reg, index, val & (~(1 << DisAutoComp)));
3703 static void mct_BeforeDQSTrain_D(struct MCTStatStruc *pMCTstat,
3704 struct DCTStatStruc *pDCTstatA)
3707 struct DCTStatStruc *pDCTstat;
3711 * Bug#15115: Uncertainty In The Sync Chain Leads To Setup Violations
3713 * Solution: BIOS should program DRAM Control Register[RdPtrInit] =
3714 * 5h, (F2x[1, 0]78[3:0] = 5h).
3715 * Silicon Status: Fixed In Rev B0
3717 * Bug#15880: Determine validity of reset settings for DDR PHY timing.
3718 * Solutiuon: At least, set WrDqs fine delay to be 0 for DDR2 training.
3721 for (Node = 0; Node < 8; Node++) {
3722 pDCTstat = pDCTstatA + Node;
3724 if (pDCTstat->NodePresent)
3725 mct_BeforeDQSTrain_Samp_D(pMCTstat, pDCTstat);
3726 mct_ResetDLL_D(pMCTstat, pDCTstat, 0);
3727 mct_ResetDLL_D(pMCTstat, pDCTstat, 1);
3732 static void mct_ResetDLL_D(struct MCTStatStruc *pMCTstat,
3733 struct DCTStatStruc *pDCTstat, u8 dct)
3737 u32 dev = pDCTstat->dev_dct;
3738 u32 reg_off = 0x100 * dct;
3742 pDCTstat->Channel = dct;
3743 Receiver = mct_InitReceiver_D(pDCTstat, dct);
3744 /* there are four receiver pairs, loosely associated with chipselects.*/
3745 for (; Receiver < 8; Receiver += 2) {
3746 if (mct_RcvrRankEnabled_D(pMCTstat, pDCTstat, dct, Receiver)) {
3747 addr = mct_GetRcvrSysAddr_D(pMCTstat, pDCTstat, dct, Receiver, &valid);
3749 mct_Read1LTestPattern_D(pMCTstat, pDCTstat, addr); /* cache fills */
3750 Set_NB32(dev, 0x98 + reg_off, 0x0D00000C);
3751 val = Get_NB32(dev, 0x9C + reg_off);
3753 Set_NB32(dev, 0x9C + reg_off, val);
3754 Set_NB32(dev, 0x98 + reg_off, 0x4D0F0F0C);
3755 mct_Wait_10ns(60); /* wait >= 300ns */
3757 Set_NB32(dev, 0x98 + reg_off, 0x0D00000C);
3758 val = Get_NB32(dev, 0x9C + reg_off);
3760 Set_NB32(dev, 0x9C + reg_off, val);
3761 Set_NB32(dev, 0x98 + reg_off, 0x4D0F0F0C);
3762 mct_Wait_10ns(400); /* wait >= 2us */
3770 static void mct_EnableDatIntlv_D(struct MCTStatStruc *pMCTstat,
3771 struct DCTStatStruc *pDCTstat)
3773 u32 dev = pDCTstat->dev_dct;
3776 /* Enable F2x110[DctDatIntlv] */
3777 // Call back not required mctHookBeforeDatIntlv_D()
3778 // FIXME Skip for Ax
3779 if (!pDCTstat->GangedMode) {
3780 val = Get_NB32(dev, 0x110);
3781 val |= 1 << 5; // DctDatIntlv
3782 Set_NB32(dev, 0x110, val);
3784 // FIXME Skip for Cx
3785 dev = pDCTstat->dev_nbmisc;
3786 val = Get_NB32(dev, 0x8C); // NB Configuration Hi
3787 val |= 36-32; // DisDatMask
3788 Set_NB32(dev, 0x8C, val);
3793 static void mct_SetupSync_D(struct MCTStatStruc *pMCTstat,
3794 struct DCTStatStruc *pDCTstat)
3796 /* set F2x78[ChSetupSync] when F2x[1, 0]9C_x04[AddrCmdSetup, CsOdtSetup,
3797 * CkeSetup] setups for one DCT are all 0s and at least one of the setups,
3798 * F2x[1, 0]9C_x04[AddrCmdSetup, CsOdtSetup, CkeSetup], of the other
3802 u32 dev = pDCTstat->dev_dct;
3805 cha = pDCTstat->CH_ADDR_TMG[0] & 0x0202020;
3806 chb = pDCTstat->CH_ADDR_TMG[1] & 0x0202020;
3808 if ((cha != chb) && ((cha == 0) || (chb == 0))) {
3809 val = Get_NB32(dev, 0x78);
3811 Set_NB32(dev, 0x78, val);
3815 static void AfterDramInit_D(struct DCTStatStruc *pDCTstat, u8 dct) {
3818 u32 reg_off = 0x100 * dct;
3819 u32 dev = pDCTstat->dev_dct;
3821 if (pDCTstat->LogicalCPUID & AMD_DR_B2) {
3822 mct_Wait_10ns(5000); /* Wait 50 us*/
3823 val = Get_NB32(dev, 0x110);
3824 if ( val & (1 << DramEnabled)) {
3825 /* If 50 us expires while DramEnable =0 then do the following */
3826 val = Get_NB32(dev, 0x90 + reg_off);
3827 val &= ~(1 << Width128); /* Program Width128 = 0 */
3828 Set_NB32(dev, 0x90 + reg_off, val);
3830 val = Get_NB32_index_wait(dev, 0x98 + reg_off, 0x05); /* Perform dummy CSR read to F2x09C_x05 */
3832 if (pDCTstat->GangedMode) {
3833 val = Get_NB32(dev, 0x90 + reg_off);
3834 val |= 1 << Width128; /* Program Width128 = 0 */
3835 Set_NB32(dev, 0x90 + reg_off, val);
3842 /* ==========================================================
3843 * 6-bit Bank Addressing Table
3846 * CCC=Columns-9 binary
3847 * ==========================================================
3848 * DCT CCCBRR Rows Banks Columns 64-bit CS Size
3850 * 0000 000000 13 2 9 128MB
3851 * 0001 001000 13 2 10 256MB
3852 * 0010 001001 14 2 10 512MB
3853 * 0011 010000 13 2 11 512MB
3854 * 0100 001100 13 3 10 512MB
3855 * 0101 001101 14 3 10 1GB
3856 * 0110 010001 14 2 11 1GB
3857 * 0111 001110 15 3 10 2GB
3858 * 1000 010101 14 3 11 2GB
3859 * 1001 010110 15 3 11 4GB
3860 * 1010 001111 16 3 10 4GB
3861 * 1011 010111 16 3 11 8GB