2 * This file is part of the coreboot project.
4 * Copyright (C) 2005 Eric W. Biederman and Tom Zimmerman
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License
16 * along with this program; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
21 #include <cpu/x86/mtrr.h>
22 #include <cpu/x86/cache.h>
27 #define BAR 0x40000000
29 static void sdram_set_registers(const struct mem_controller *ctrl)
31 static const unsigned int register_values[] = {
33 /* CKDIS 0x8c disable clocks */
34 PCI_ADDR(0, 0x00, 0, CKDIS), 0xffff0000, 0x0000ffff,
36 /* 0x9c Device present and extended RAM control
37 * DEVPRES is very touchy, hard code the initialization
38 * of PCI-E ports here.
40 PCI_ADDR(0, 0x00, 0, DEVPRES), 0x00000000, 0x07020801 | DEVPRES_CONFIG,
42 /* 0xc8 Remap RAM base and limit off */
43 PCI_ADDR(0, 0x00, 0, REMAPLIMIT), 0x00000000, 0x03df0000,
46 PCI_ADDR(0, 0x00, 0, 0xd8), 0x00000000, 0xb5930000,
47 PCI_ADDR(0, 0x00, 0, 0xe8), 0x00000000, 0x00004a2a,
50 PCI_ADDR(0, 0x00, 0, MCHCFG0), 0xfce0ffff, 0x00006000, /* 6000 */
53 PCI_ADDR(0, 0x00, 0, PAM-1), 0xcccccc7f, 0x33333000,
54 PCI_ADDR(0, 0x00, 0, PAM+3), 0xcccccccc, 0x33333333,
57 PCI_ADDR(0, 0x00, 0, DEVPRES1), 0xffbffff, (1<<22)|(6<<2) | DEVPRES1_CONFIG,
60 PCI_ADDR(0, 0x00, 0, IURBASE), 0x00000fff, BAR |0,
65 max = ARRAY_SIZE(register_values);
66 for(i = 0; i < max; i += 3) {
70 dev = (register_values[i] & ~0xff) - PCI_DEV(0, 0x00, 0) + PCI_DEV(0, 0x00, 0);
71 where = register_values[i] & 0xff;
72 reg = pci_read_config32(dev, where);
73 reg &= register_values[i+1];
74 reg |= register_values[i+2];
75 pci_write_config32(dev, where, reg);
77 print_spew("done.\n");
87 static struct dimm_size spd_get_dimm_size(unsigned device)
89 /* Calculate the log base 2 size of a DIMM in bits */
97 value = spd_read_byte(device, 2); /* type */
98 if (value < 0) goto hw_err;
99 if (value == 8) ddr2 = 1;
101 /* Note it might be easier to use byte 31 here, it has the DIMM size as
102 * a multiple of 4MB. The way we do it now we can size both
103 * sides of an assymetric dimm.
105 value = spd_read_byte(device, 3); /* rows */
106 if (value < 0) goto hw_err;
107 if ((value & 0xf) == 0) goto val_err;
108 sz.side1 += value & 0xf;
110 value = spd_read_byte(device, 4); /* columns */
111 if (value < 0) goto hw_err;
112 if ((value & 0xf) == 0) goto val_err;
113 sz.side1 += value & 0xf;
115 value = spd_read_byte(device, 17); /* banks */
116 if (value < 0) goto hw_err;
117 if ((value & 0xff) == 0) goto val_err;
118 sz.side1 += log2(value & 0xff);
120 /* Get the module data width and convert it to a power of two */
121 value = spd_read_byte(device, 7); /* (high byte) */
122 if (value < 0) goto hw_err;
126 low = spd_read_byte(device, 6); /* (low byte) */
127 if (low < 0) goto hw_err;
128 value = value | (low & 0xff);
129 if ((value != 72) && (value != 64)) goto val_err;
130 sz.side1 += log2(value);
133 value = spd_read_byte(device, 5); /* number of physical banks */
135 if (value < 0) goto hw_err;
138 if (value == 1) goto out;
139 if (value != 2) goto val_err;
141 /* Start with the symmetrical case */
144 value = spd_read_byte(device, 3); /* rows */
145 if (value < 0) goto hw_err;
146 if ((value & 0xf0) == 0) goto out; /* If symmetrical we are done */
147 sz.side2 -= (value & 0x0f); /* Subtract out rows on side 1 */
148 sz.side2 += ((value >> 4) & 0x0f); /* Add in rows on side 2 */
150 value = spd_read_byte(device, 4); /* columns */
151 if (value < 0) goto hw_err;
152 if ((value & 0xff) == 0) goto val_err;
153 sz.side2 -= (value & 0x0f); /* Subtract out columns on side 1 */
154 sz.side2 += ((value >> 4) & 0x0f); /* Add in columsn on side 2 */
158 die("Bad SPD value\n");
159 /* If an hw_error occurs report that I have no memory */
168 static long spd_set_ram_size(const struct mem_controller *ctrl, long dimm_mask)
173 for(i = cum = 0; i < DIMM_SOCKETS; i++) {
175 if (dimm_mask & (1 << i)) {
176 sz = spd_get_dimm_size(ctrl->channel0[i]);
178 return -1; /* Report SPD error */
180 /* convert bits to multiples of 64MB */
182 cum += (1 << sz.side1);
184 pci_write_config8(PCI_DEV(0, 0x00, 0), DRB + (i*2), cum);
187 cum += (1 << sz.side2);
189 pci_write_config8(PCI_DEV(0, 0x00, 0), DRB+1 + (i*2), cum);
192 pci_write_config8(PCI_DEV(0, 0x00, 0), DRB + (i*2), cum);
193 pci_write_config8(PCI_DEV(0, 0x00, 0), DRB+1 + (i*2), cum);
196 /* set TOM top of memory 0xcc */
197 pci_write_config16(PCI_DEV(0, 0x00, 0), TOM, cum);
198 /* set TOLM top of low memory */
204 pci_write_config16(PCI_DEV(0, 0x00, 0), TOLM, cum);
209 static unsigned int spd_detect_dimms(const struct mem_controller *ctrl)
214 for(i = 0; i < DIMM_SOCKETS; i++) {
217 device = ctrl->channel0[i];
219 byte = spd_read_byte(device, 2); /* Type */
220 if ((byte == 7) || (byte == 8)) {
221 dimm_mask |= (1 << i);
224 device = ctrl->channel1[i];
226 byte = spd_read_byte(device, 2);
227 if ((byte == 7) || (byte == 8)) {
228 dimm_mask |= (1 << (i + DIMM_SOCKETS));
236 static int spd_set_row_attributes(const struct mem_controller *ctrl,
246 for(cnt=0; cnt < 4; cnt++) {
247 if (!(dimm_mask & (1 << cnt))) {
251 value = spd_read_byte(ctrl->channel0[cnt], 3); /* rows */
252 if (value < 0) goto hw_err;
253 if ((value & 0xf) == 0) goto val_err;
256 value = spd_read_byte(ctrl->channel0[cnt], 4); /* columns */
257 if (value < 0) goto hw_err;
258 if ((value & 0xf) == 0) goto val_err;
261 value = spd_read_byte(ctrl->channel0[cnt], 17); /* banks */
262 if (value < 0) goto hw_err;
263 if ((value & 0xff) == 0) goto val_err;
264 reg += log2(value & 0xff);
266 /* Get the device width and convert it to a power of two */
267 value = spd_read_byte(ctrl->channel0[cnt], 13);
268 if (value < 0) goto hw_err;
269 value = log2(value & 0xff);
271 if(reg < 27) goto hw_err;
275 dra += reg << (cnt*8);
276 value = spd_read_byte(ctrl->channel0[cnt], 5);
278 dra += reg << ((cnt*8)+4);
282 pci_write_config32(PCI_DEV(0, 0x00, 0), DRA, dra);
286 die("Bad SPD value\n");
287 /* If an hw_error occurs report that I have no memory */
296 static int spd_set_drt_attributes(const struct mem_controller *ctrl,
297 long dimm_mask, uint32_t drc)
308 static const unsigned char cycle_time[3] = {0x75,0x60,0x50};
309 static const int latency_indicies[] = { 26, 23, 9 };
312 drt = pci_read_config32(PCI_DEV(0, 0x00, 0), DRT);
313 drt &= 3; /* save bits 1:0 */
315 for(first_dimm = 0; first_dimm < 4; first_dimm++) {
316 if (dimm_mask & (1 << first_dimm))
321 value = spd_read_byte(ctrl->channel0[first_dimm], 2);
323 drt |= (3<<5); /* back to bark write turn around & cycle add */
326 drt |= (3<<18); /* Trasmax */
328 for(cnt=0; cnt < 4; cnt++) {
329 if (!(dimm_mask & (1 << cnt))) {
332 reg = spd_read_byte(ctrl->channel0[cnt], 18); /* CAS Latency */
333 /* Compute the lowest cas latency supported */
334 latency = log2(reg) -2;
336 /* Loop through and find a fast clock with a low latency */
337 for(index = 0; index < 3; index++, latency++) {
338 if ((latency < 2) || (latency > 4) ||
339 (!(reg & (1 << latency)))) {
342 value = spd_read_byte(ctrl->channel0[cnt],
343 latency_indicies[index]);
345 if(value <= cycle_time[drc&3]) {
346 if( latency > cas_latency) {
347 cas_latency = latency;
353 index = (cas_latency-2);
354 if((index)==0) cas_latency = 20;
355 else if((index)==1) cas_latency = 25;
356 else cas_latency = 30;
358 for(cnt=0;cnt<4;cnt++) {
359 if (!(dimm_mask & (1 << cnt))) {
362 reg = spd_read_byte(ctrl->channel0[cnt], 27)&0x0ff;
363 if(((index>>8)&0x0ff)<reg) {
364 index &= ~(0x0ff << 8);
367 reg = spd_read_byte(ctrl->channel0[cnt], 28)&0x0ff;
368 if(((index>>16)&0x0ff)<reg) {
369 index &= ~(0x0ff << 16);
372 reg = spd_read_byte(ctrl->channel0[cnt], 29)&0x0ff;
373 if(((index2>>0)&0x0ff)<reg) {
374 index2 &= ~(0x0ff << 0);
377 reg = spd_read_byte(ctrl->channel0[cnt], 41)&0x0ff;
378 if(((index2>>8)&0x0ff)<reg) {
379 index2 &= ~(0x0ff << 8);
382 reg = spd_read_byte(ctrl->channel0[cnt], 42)&0x0ff;
383 if(((index2>>16)&0x0ff)<reg) {
384 index2 &= ~(0x0ff << 16);
390 value = cycle_time[drc&3];
391 if(value <= 0x50) { /* 200 MHz */
393 drt |= (2<<2); /* CAS latency 4 */
396 drt |= (1<<2); /* CAS latency 3 */
399 if((index&0x0ff00)<=0x03c00) {
400 drt |= (1<<8); /* Trp RAS Precharg */
402 drt |= (2<<8); /* Trp RAS Precharg */
405 /* Trcd RAS to CAS delay */
406 if((index2&0x0ff)<=0x03c) {
412 /* Tdal Write auto precharge recovery delay */
416 if((index2&0x0ff00)<=0x03700)
418 else if((index2&0xff00)<=0x03c00)
421 drt |= (2<<14); /* spd 41 */
423 drt |= (2<<16); /* Twr not defined for DDR docs say use 2 */
426 if((index&0x0ff0000)<=0x0140000) {
428 } else if((index&0x0ff0000)<=0x0280000) {
430 } else if((index&0x0ff0000)<=0x03c0000) {
436 /* Trfc Auto refresh cycle time */
437 if((index2&0x0ff0000)<=0x04b0000) {
439 } else if((index2&0x0ff0000)<=0x0690000) {
444 /* Docs say use 55 for all 200Mhz */
447 else if(value <= 0x60) { /* 167 Mhz */
448 /* according to new documentation CAS latency is 00
449 * for bits 3:2 for all 167 Mhz
450 drt |= ((index&3)<<2); */ /* set CAS latency */
451 if((index&0x0ff00)<=0x03000) {
452 drt |= (1<<8); /* Trp RAS Precharg */
454 drt |= (2<<8); /* Trp RAS Precharg */
457 /* Trcd RAS to CAS delay */
458 if((index2&0x0ff)<=0x030) {
464 /* Tdal Write auto precharge recovery delay */
468 drt |= (2<<14); /* spd 41, but only one choice */
470 drt |= (2<<16); /* Twr not defined for DDR docs say 2 */
473 if((index&0x0ff0000)<=0x0180000) {
475 } else if((index&0x0ff0000)<=0x0300000) {
481 /* Trfc Auto refresh cycle time */
482 if((index2&0x0ff0000)<=0x0480000) {
484 } else if((index2&0x0ff0000)<=0x0780000) {
489 /* Docs state to use 99 for all 167 Mhz */
492 else if(value <= 0x75) { /* 133 Mhz */
493 drt |= ((index&3)<<2); /* set CAS latency */
494 if((index&0x0ff00)<=0x03c00) {
495 drt |= (1<<8); /* Trp RAS Precharg */
497 drt |= (2<<8); /* Trp RAS Precharg */
500 /* Trcd RAS to CAS delay */
501 if((index2&0x0ff)<=0x03c) {
507 /* Tdal Write auto precharge recovery delay */
511 drt |= (2<<14); /* spd 41, but only one choice */
513 drt |= (1<<16); /* Twr not defined for DDR docs say 1 */
516 if((index&0x0ff0000)<=0x01e0000) {
518 } else if((index&0x0ff0000)<=0x03c0000) {
524 /* Trfc Auto refresh cycle time */
525 if((index2&0x0ff0000)<=0x04b0000) {
527 } else if((index2&0x0ff0000)<=0x0780000) {
533 /* Based on CAS latency */
541 die("Invalid SPD 9 bus speed.\n");
545 pci_write_config32(PCI_DEV(0, 0x00, 0), DRT, drt);
550 static int spd_set_dram_controller_mode(const struct mem_controller *ctrl,
558 unsigned char dram_type = 0xff;
559 unsigned char ecc = 0xff;
560 unsigned char rate = 62;
561 static const unsigned char spd_rates[6] = {15,3,7,7,62,62};
562 static const unsigned char drc_rates[5] = {0,15,7,62,3};
563 static const unsigned char fsb_conversion[4] = {3,1,3,2};
566 drc = pci_read_config32(PCI_DEV(0, 0x00, 0), DRC);
567 for(cnt=0; cnt < 4; cnt++) {
568 if (!(dimm_mask & (1 << cnt))) {
571 value = spd_read_byte(ctrl->channel0[cnt], 11); /* ECC */
572 reg = spd_read_byte(ctrl->channel0[cnt], 2); /* Type */
573 if (value == 2) { /* RAM is ECC capable */
579 die("ERROR - Mixed DDR & DDR2 RAM\n");
582 else if ( reg == 7 ) {
586 else if ( ecc > 1 ) {
587 die("ERROR - Mixed DDR & DDR2 RAM\n");
591 die("ERROR - RAM not DDR\n");
595 die("ERROR - Non ECC memory dimm\n");
598 value = spd_read_byte(ctrl->channel0[cnt], 12); /*refresh rate*/
599 value &= 0x0f; /* clip self refresh bit */
600 if (value > 5) goto hw_err;
601 if (rate > spd_rates[value])
602 rate = spd_rates[value];
604 value = spd_read_byte(ctrl->channel0[cnt], 9); /* cycle time */
605 if (value > 0x75) goto hw_err;
607 if (dram_type >= 2) {
608 if (reg == 8) { /*speed is good, is this ddr2?*/
610 } else { /* not ddr2 so use ddr333 */
615 else if (value <= 0x60) {
616 if (dram_type >= 1) dram_type = 1;
618 else dram_type = 0; /* ddr266 */
622 if (read_option(CMOS_VSTART_ECC_memory,CMOS_VLEN_ECC_memory,1) == 0) {
623 ecc = 0; /* ECC off in CMOS so disable it */
624 print_debug("ECC off\n");
627 print_debug("ECC on\n");
629 drc &= ~(3 << 20); /* clear the ecc bits */
630 drc |= (ecc << 20); /* or in the calculated ecc bits */
631 for ( cnt = 1; cnt < 5; cnt++)
632 if (drc_rates[cnt] == rate)
635 drc &= ~(7 << 8); /* clear the rate bits */
639 if (reg == 8) { /* independant clocks */
643 drc |= (1 << 26); /* set the overlap bit - the factory BIOS does */
644 drc |= (1 << 27); /* set DED retry enable - the factory BIOS does */
647 value = msr.lo >> 16;
649 drc &= ~(3 << 2); /* set the front side bus */
650 drc |= (fsb_conversion[value] << 2);
651 drc &= ~(3 << 0); /* set the dram type */
652 drc |= (dram_type << 0);
657 die("Bad SPD value\n");
658 /* If an hw_error occurs report that I have no memory */
665 static void sdram_set_spd_registers(const struct mem_controller *ctrl)
669 /* Test if we can read the spd and if ram is ddr or ddr2 */
670 dimm_mask = spd_detect_dimms(ctrl);
671 if (!(dimm_mask & ((1 << DIMM_SOCKETS) - 1))) {
672 print_err("No memory for this cpu\n");
678 static void do_delay(void)
686 static void pll_setup(uint32_t drc)
689 if(drc&3) { /* DDR 333 or DDR 400 */
690 if((drc&0x0c) == 0x0c) { /* FSB 200 */
693 else if((drc&0x0c) == 0x08) { /* FSB 167 */
696 else if(drc&1){ /* FSB 133 DDR 333 */
699 else { /* FSB 133 DDR 400 */
704 if((drc&0x08) == 0x08) { /* FSB 200 or 167 */
711 mainboard_set_e7520_pll(pins);
715 #define TIMEOUT_LOOPS 300000
717 #define DCALCSR 0x100
718 #define DCALADDR 0x104
719 #define DCALDATA 0x108
721 static void set_on_dimm_termination_enable(const struct mem_controller *ctrl)
728 /* Set up northbridge values */
730 pci_write_config32(PCI_DEV(0, 0x00, 0), 0x88, 0xf0000180);
731 /* Figure out which slots are Empty, Single, or Double sided */
732 for(i=0,t4=0,c2=0;i<8;i+=2) {
733 c1 = pci_read_config8(PCI_DEV(0, 0x00, 0), DRB+i);
734 if(c1 == c2) continue;
735 c2 = pci_read_config8(PCI_DEV(0, 0x00, 0), DRB+1+i);
743 if( ((t4>>8)&0x0f) == 0 ) {
744 data32 = 0x00000010; /* EEES */
747 if ( ((t4>>16)&0x0f) == 0 ) {
748 data32 = 0x00003132; /* EESS */
751 if ( ((t4>>24)&0x0f) == 0 ) {
752 data32 = 0x00335566; /* ESSS */
755 data32 = 0x77bbddee; /* SSSS */
759 if( ((t4>>8)&0x0f) == 0 ) {
760 data32 = 0x00003132; /* EEED */
763 if ( ((t4>>8)&0x0f) == 2 ) {
764 data32 = 0xb373ecdc; /* EEDD */
767 if ( ((t4>>16)&0x0f) == 0 ) {
768 data32 = 0x00b3a898; /* EESD */
771 data32 = 0x777becdc; /* ESSD */
774 die("Error - First dimm slot empty\n");
777 print_debug("ODT Value = ");
778 print_debug_hex32(data32);
781 pci_write_config32(PCI_DEV(0, 0x00, 0), 0xb0, data32);
783 for(dimm=0;dimm<8;dimm+=1) {
785 write32(BAR+DCALADDR, 0x0b840001);
786 write32(BAR+DCALCSR, 0x83000003 | (dimm << 20));
788 for(i=0;i<1001;i++) {
789 data32 = read32(BAR+DCALCSR);
790 if(!(data32 & (1<<31)))
795 static void set_receive_enable(const struct mem_controller *ctrl)
806 uint32_t data32_dram;
807 uint32_t dcal_data32_0;
808 uint32_t dcal_data32_1;
809 uint32_t dcal_data32_2;
810 uint32_t dcal_data32_3;
815 for(dimm=0;dimm<8;dimm+=1) {
818 write32(BAR+DCALDATA+(17*4), 0x04020000);
819 write32(BAR+DCALCSR, 0x83800004 | (dimm << 20));
821 for(i=0;i<1001;i++) {
822 data32 = read32(BAR+DCALCSR);
823 if(!(data32 & (1<<31)))
829 dcal_data32_0 = read32(BAR+DCALDATA + 0);
830 dcal_data32_1 = read32(BAR+DCALDATA + 4);
831 dcal_data32_2 = read32(BAR+DCALDATA + 8);
832 dcal_data32_3 = read32(BAR+DCALDATA + 12);
835 dcal_data32_0 = read32(BAR+DCALDATA + 16);
836 dcal_data32_1 = read32(BAR+DCALDATA + 20);
837 dcal_data32_2 = read32(BAR+DCALDATA + 24);
838 dcal_data32_3 = read32(BAR+DCALDATA + 28);
841 /* check if bank is installed */
842 if((dcal_data32_0 == 0) && (dcal_data32_2 == 0))
844 /* Calculate the timing value */
847 for(i=0,edge=0,bit=63,cnt=31,data32r=0,
848 work32l=dcal_data32_1,work32h=dcal_data32_3;
851 if(work32l & (1<<cnt))
854 work32l = dcal_data32_0;
855 work32h = dcal_data32_2;
861 if(!(work32l & (1<<cnt)))
864 work32l = dcal_data32_0;
865 work32h = dcal_data32_2;
873 data32 = ((bit%8) << 1);
874 if(work32h & (1<<cnt))
899 work32l = dcal_data32_0;
900 work32h = dcal_data32_2;
906 if(!(work32l & (1<<cnt)))
909 if(work32l & (1<<cnt))
912 data32 = (((cnt-1)%8)<<1);
913 if(work32h & (1<<(cnt-1))) {
916 /* test for frame edge cross overs */
917 if((edge == 1) && (data32 > 12) &&
918 (((recen+16)-data32) < 3)) {
922 if((edge == 2) && (data32 < 4) &&
923 ((recen - data32) > 12)) {
927 if(((recen+3) >= data32) && ((recen-3) <= data32))
937 recen+=2; /* this is not in the spec, but matches
938 the factory output, and has less failure */
939 recen <<= (dimm/2) * 8;
948 /* Check for Eratta problem */
949 for(i=cnt=0;i<32;i+=8) {
950 if (((recena>>i)&0x0f)>7) {
954 if((recena>>i)&0x0f) {
960 cnt = (cnt&0x0f) - (cnt>>16);
963 if(((recena>>i)&0x0f)>7) {
964 recena &= ~(0x0f<<i);
971 if(((recena>>i)&0x0f)<8) {
972 recena &= ~(0x0f<<i);
978 for(i=cnt=0;i<32;i+=8) {
979 if (((recenb>>i)&0x0f)>7) {
983 if((recenb>>i)&0x0f) {
989 cnt = (cnt&0x0f) - (cnt>>16);
992 if(((recenb>>i)&0x0f)>7) {
993 recenb &= ~(0x0f<<i);
1000 if(((recenb>>8)&0x0f)<8) {
1001 recenb &= ~(0x0f<<i);
1008 print_debug("Receive enable A = ");
1009 print_debug_hex32(recena);
1010 print_debug(", Receive enable B = ");
1011 print_debug_hex32(recenb);
1014 /* clear out the calibration area */
1015 write32(BAR+DCALDATA+(16*4), 0x00000000);
1016 write32(BAR+DCALDATA+(17*4), 0x00000000);
1017 write32(BAR+DCALDATA+(18*4), 0x00000000);
1018 write32(BAR+DCALDATA+(19*4), 0x00000000);
1021 write32(BAR+DCALCSR, 0x0000000f);
1023 write32(BAR+0x150, recena);
1024 write32(BAR+0x154, recenb);
1028 static void sdram_enable(int controllers, const struct mem_controller *ctrl)
1039 volatile unsigned long *iptrv;
1044 static const struct {
1047 /* FSB 133 DIMM 266 */
1048 {{ 0x00000001, 0x00000000, 0x00000001, 0x00000000}},
1049 /* FSB 133 DIMM 333 */
1050 {{ 0x00000000, 0x00000000, 0x00000000, 0x00000000}},
1051 /* FSB 133 DIMM 400 */
1052 {{ 0x00000120, 0x00000000, 0x00000032, 0x00000010}},
1053 /* FSB 167 DIMM 266 */
1054 {{ 0x00005432, 0x00001000, 0x00004325, 0x00000000}},
1055 /* FSB 167 DIMM 333 */
1056 {{ 0x00000001, 0x00000000, 0x00000001, 0x00000000}},
1057 /* FSB 167 DIMM 400 */
1058 {{ 0x00154320, 0x00000000, 0x00065432, 0x00010000}},
1059 /* FSB 200 DIMM 266 */
1060 {{ 0x00000032, 0x00000010, 0x00000120, 0x00000000}},
1061 /* FSB 200 DIMM 333 */
1062 {{ 0x00065432, 0x00010000, 0x00154320, 0x00000000}},
1063 /* FSB 200 DIMM 400 */
1064 {{ 0x00000001, 0x00000000, 0x00000001, 0x00000000}},
1067 static const uint32_t dqs_data[] = {
1068 0xffffffff, 0xffffffff, 0x000000ff,
1069 0xffffffff, 0xffffffff, 0x000000ff,
1070 0xffffffff, 0xffffffff, 0x000000ff,
1071 0xffffffff, 0xffffffff, 0x000000ff,
1072 0xffffffff, 0xffffffff, 0x000000ff,
1073 0xffffffff, 0xffffffff, 0x000000ff,
1074 0xffffffff, 0xffffffff, 0x000000ff,
1075 0xffffffff, 0xffffffff, 0x000000ff};
1077 mask = spd_detect_dimms(ctrl);
1078 print_debug("Starting SDRAM Enable\n");
1081 #ifdef DIMM_MAP_LOGICAL
1082 pci_write_config32(PCI_DEV(0, 0x00, 0), DRM,
1083 0x00210000 | DIMM_MAP_LOGICAL);
1085 pci_write_config32(PCI_DEV(0, 0x00, 0), DRM, 0x00211248);
1087 /* set dram type and Front Side Bus freq. */
1088 drc = spd_set_dram_controller_mode(ctrl, mask);
1090 die("Error calculating DRC\n");
1093 data32 = drc & ~(3 << 20); /* clear ECC mode */
1094 data32 = data32 & ~(7 << 8); /* clear refresh rates */
1095 data32 = data32 | (1 << 5); /* temp turn off of ODT */
1096 /* Set gearing, then dram controller mode */
1097 /* drc bits 1:0 = DIMM speed, bits 3:2 = FSB speed */
1098 for(iptr = gearing[(drc&3)+((((drc>>2)&3)-1)*3)].clkgr,cnt=0;
1100 pci_write_config32(PCI_DEV(0, 0x00, 0), 0xa0+(cnt*4), iptr[cnt]);
1103 pci_write_config32(PCI_DEV(0, 0x00, 0), DRC, data32);
1105 /* turn the clocks on */
1107 pci_write_config16(PCI_DEV(0, 0x00, 0), CKDIS, 0x0000);
1109 /* 0x9a DDRCSR Take subsystem out of idle */
1110 data16 = pci_read_config16(PCI_DEV(0, 0x00, 0), DDRCSR);
1111 data16 &= ~(7 << 12);
1112 data16 |= (3 << 12); /* use dual channel lock step */
1113 pci_write_config16(PCI_DEV(0, 0x00, 0), DDRCSR, data16);
1115 /* program row size DRB */
1116 spd_set_ram_size(ctrl, mask);
1118 /* program page size DRA */
1119 spd_set_row_attributes(ctrl, mask);
1121 /* program DRT timing values */
1122 cas_latency = spd_set_drt_attributes(ctrl, mask, drc);
1124 for(i=0;i<8;i++) { /* loop throught each dimm to test for row */
1125 print_debug("DIMM ");
1126 print_debug_hex8(i);
1131 write32(BAR + 0x100, (0x03000000 | (i<<20)));
1133 write32(BAR+0x100, (0x83000000 | (i<<20)));
1135 data32 = read32(BAR+DCALCSR);
1136 while(data32 & 0x80000000)
1137 data32 = read32(BAR+DCALCSR);
1144 for(cs=0;cs<8;cs++) {
1145 write32(BAR + DCALCSR, (0x83000000 | (cs<<20)));
1146 data32 = read32(BAR+DCALCSR);
1147 while(data32 & 0x80000000)
1148 data32 = read32(BAR+DCALCSR);
1151 /* Precharg all banks */
1153 for(cs=0;cs<8;cs++) {
1154 if ((drc & 3) == 2) /* DDR2 */
1155 write32(BAR+DCALADDR, 0x04000000);
1157 write32(BAR+DCALADDR, 0x00000000);
1158 write32(BAR+DCALCSR, (0x83000002 | (cs<<20)));
1159 data32 = read32(BAR+DCALCSR);
1160 while(data32 & 0x80000000)
1161 data32 = read32(BAR+DCALCSR);
1164 /* EMRS dll's enabled */
1166 for(cs=0;cs<8;cs++) {
1167 if ((drc & 3) == 2) /* DDR2 */
1168 /* fixme hard code AL additive latency */
1169 write32(BAR+DCALADDR, 0x0b940001);
1171 write32(BAR+DCALADDR, 0x00000001);
1172 write32(BAR+DCALCSR, (0x83000003 | (cs<<20)));
1173 data32 = read32(BAR+DCALCSR);
1174 while(data32 & 0x80000000)
1175 data32 = read32(BAR+DCALCSR);
1177 /* MRS reset dll's */
1179 if ((drc & 3) == 2) { /* DDR2 */
1180 if(cas_latency == 30)
1181 mode_reg = 0x053a0000;
1183 mode_reg = 0x054a0000;
1186 if(cas_latency == 20)
1187 mode_reg = 0x012a0000;
1188 else /* CAS Latency 2.5 */
1189 mode_reg = 0x016a0000;
1191 for(cs=0;cs<8;cs++) {
1192 write32(BAR+DCALADDR, mode_reg);
1193 write32(BAR+DCALCSR, (0x83000003 | (cs<<20)));
1194 data32 = read32(BAR+DCALCSR);
1195 while(data32 & 0x80000000)
1196 data32 = read32(BAR+DCALCSR);
1199 /* Precharg all banks */
1203 for(cs=0;cs<8;cs++) {
1204 if ((drc & 3) == 2) /* DDR2 */
1205 write32(BAR+DCALADDR, 0x04000000);
1207 write32(BAR+DCALADDR, 0x00000000);
1208 write32(BAR+DCALCSR, (0x83000002 | (cs<<20)));
1209 data32 = read32(BAR+DCALCSR);
1210 while(data32 & 0x80000000)
1211 data32 = read32(BAR+DCALCSR);
1214 /* Do 2 refreshes */
1216 for(cs=0;cs<8;cs++) {
1217 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1218 data32 = read32(BAR+DCALCSR);
1219 while(data32 & 0x80000000)
1220 data32 = read32(BAR+DCALCSR);
1223 for(cs=0;cs<8;cs++) {
1224 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1225 data32 = read32(BAR+DCALCSR);
1226 while(data32 & 0x80000000)
1227 data32 = read32(BAR+DCALCSR);
1230 /* for good luck do 6 more */
1231 for(cs=0;cs<8;cs++) {
1232 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1235 for(cs=0;cs<8;cs++) {
1236 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1239 for(cs=0;cs<8;cs++) {
1240 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1243 for(cs=0;cs<8;cs++) {
1244 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1247 for(cs=0;cs<8;cs++) {
1248 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1251 for(cs=0;cs<8;cs++) {
1252 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1255 /* MRS reset dll's normal */
1257 for(cs=0;cs<8;cs++) {
1258 write32(BAR+DCALADDR, (mode_reg & ~(1<<24)));
1259 write32(BAR+DCALCSR, (0x83000003 | (cs<<20)));
1260 data32 = read32(BAR+DCALCSR);
1261 while(data32 & 0x80000000)
1262 data32 = read32(BAR+DCALCSR);
1265 /* Do only if DDR2 EMRS dll's enabled */
1266 if ((drc & 3) == 2) { /* DDR2 */
1268 for(cs=0;cs<8;cs++) {
1269 write32(BAR+DCALADDR, (0x0b940001));
1270 write32(BAR+DCALCSR, (0x83000003 | (cs<<20)));
1271 data32 = read32(BAR+DCALCSR);
1272 while(data32 & 0x80000000)
1273 data32 = read32(BAR+DCALCSR);
1279 write32(BAR+DCALCSR, 0x0000000f);
1281 /* DDR1 This is test code to copy some codes in the factory setup */
1283 write32(BAR, 0x00100000);
1285 if ((drc & 3) == 2) { /* DDR2 */
1286 /* enable on dimm termination */
1287 set_on_dimm_termination_enable(ctrl);
1290 pci_write_config32(PCI_DEV(0, 0x00, 0), 0x88, 0xa0000000 );
1293 /* receive enable calibration */
1294 set_receive_enable(ctrl);
1297 pci_write_config32(PCI_DEV(0, 0x00, 0), 0x94, 0x3904a100 );
1298 for(i = 0, cnt = (BAR+0x200); i < 24; i++, cnt+=4) {
1299 write32(cnt, dqs_data[i]);
1301 pci_write_config32(PCI_DEV(0, 0x00, 0), 0x94, 0x3904a100 );
1303 /* Enable refresh */
1305 data32 = drc & ~(3 << 20); /* clear ECC mode */
1306 pci_write_config32(PCI_DEV(0, 0x00, 0), DRC, data32);
1307 write32(BAR+DCALCSR, 0x0008000f);
1309 /* clear memory and init ECC */
1310 print_debug("Clearing memory\n");
1311 for(i=0;i<64;i+=4) {
1312 write32(BAR+DCALDATA+i, 0x00000000);
1315 for(cs=0;cs<8;cs++) {
1316 write32(BAR+DCALCSR, (0x830831d8 | (cs<<20)));
1317 data32 = read32(BAR+DCALCSR);
1318 while(data32 & 0x80000000)
1319 data32 = read32(BAR+DCALCSR);
1322 /* Bring memory subsystem on line */
1323 data32 = pci_read_config32(PCI_DEV(0, 0x00, 0), 0x98);
1324 data32 |= (1 << 31);
1325 pci_write_config32(PCI_DEV(0, 0x00, 0), 0x98, data32);
1326 /* wait for completion */
1327 print_debug("Waiting for mem complete\n");
1329 data32 = pci_read_config32(PCI_DEV(0, 0x00, 0), 0x98);
1330 if( (data32 & (1<<31)) == 0)
1333 print_debug("Done\n");
1335 /* Set initialization complete */
1338 data32 = drc & ~(3 << 20); /* clear ECC mode */
1339 pci_write_config32(PCI_DEV(0, 0x00, 0), DRC, data32);
1341 /* Set the ecc mode */
1342 pci_write_config32(PCI_DEV(0, 0x00, 0), DRC, drc);
1344 /* Enable memory scrubbing */
1346 data16 = pci_read_config16(PCI_DEV(0, 0x00, 0), MCHSCRB);
1348 data16 |= ((2 << 2) | (2 << 0));
1349 pci_write_config16(PCI_DEV(0, 0x00, 0), MCHSCRB, data16);
1351 /* The memory is now setup, use it */
1352 cache_lbmem(MTRR_TYPE_WRBACK);