2 * This file is part of the coreboot project.
4 * Copyright (C) 2005 Eric W. Biederman and Tom Zimmerman
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License
16 * along with this program; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
20 #include <cpu/x86/mtrr.h>
21 #include <cpu/x86/cache.h>
25 #include <pc80/mc146818rtc.h>
26 #if CONFIG_HAVE_OPTION_TABLE
27 #include "option_table.h"
30 #define BAR 0x40000000
32 static void sdram_set_registers(const struct mem_controller *ctrl)
34 static const unsigned int register_values[] = {
36 /* CKDIS 0x8c disable clocks */
37 PCI_ADDR(0, 0x00, 0, CKDIS), 0xffff0000, 0x0000ffff,
39 /* 0x9c Device present and extended RAM control
40 * DEVPRES is very touchy, hard code the initialization
41 * of PCI-E ports here.
43 PCI_ADDR(0, 0x00, 0, DEVPRES), 0x00000000, 0x07020801 | DEVPRES_CONFIG,
45 /* 0xc8 Remap RAM base and limit off */
46 PCI_ADDR(0, 0x00, 0, REMAPLIMIT), 0x00000000, 0x03df0000,
49 PCI_ADDR(0, 0x00, 0, 0xd8), 0x00000000, 0xb5930000,
50 PCI_ADDR(0, 0x00, 0, 0xe8), 0x00000000, 0x00004a2a,
53 PCI_ADDR(0, 0x00, 0, MCHCFG0), 0xfce0ffff, 0x00006000, /* 6000 */
56 PCI_ADDR(0, 0x00, 0, PAM-1), 0xcccccc7f, 0x33333000,
57 PCI_ADDR(0, 0x00, 0, PAM+3), 0xcccccccc, 0x33333333,
60 PCI_ADDR(0, 0x00, 0, DEVPRES1), 0xffbffff, (1<<22)|(6<<2) | DEVPRES1_CONFIG,
63 PCI_ADDR(0, 0x00, 0, IURBASE), 0x00000fff, BAR |0,
68 max = ARRAY_SIZE(register_values);
69 for(i = 0; i < max; i += 3) {
73 dev = (register_values[i] & ~0xff) - PCI_DEV(0, 0x00, 0) + PCI_DEV(0, 0x00, 0);
74 where = register_values[i] & 0xff;
75 reg = pci_read_config32(dev, where);
76 reg &= register_values[i+1];
77 reg |= register_values[i+2];
78 pci_write_config32(dev, where, reg);
80 print_spew("done.\n");
88 static struct dimm_size spd_get_dimm_size(unsigned device)
90 /* Calculate the log base 2 size of a DIMM in bits */
98 value = spd_read_byte(device, 2); /* type */
99 if (value < 0) goto hw_err;
100 if (value == 8) ddr2 = 1;
102 /* Note it might be easier to use byte 31 here, it has the DIMM size as
103 * a multiple of 4MB. The way we do it now we can size both
104 * sides of an assymetric dimm.
106 value = spd_read_byte(device, 3); /* rows */
107 if (value < 0) goto hw_err;
108 if ((value & 0xf) == 0) goto val_err;
109 sz.side1 += value & 0xf;
111 value = spd_read_byte(device, 4); /* columns */
112 if (value < 0) goto hw_err;
113 if ((value & 0xf) == 0) goto val_err;
114 sz.side1 += value & 0xf;
116 value = spd_read_byte(device, 17); /* banks */
117 if (value < 0) goto hw_err;
118 if ((value & 0xff) == 0) goto val_err;
119 sz.side1 += log2(value & 0xff);
121 /* Get the module data width and convert it to a power of two */
122 value = spd_read_byte(device, 7); /* (high byte) */
123 if (value < 0) goto hw_err;
127 low = spd_read_byte(device, 6); /* (low byte) */
128 if (low < 0) goto hw_err;
129 value = value | (low & 0xff);
130 if ((value != 72) && (value != 64)) goto val_err;
131 sz.side1 += log2(value);
134 value = spd_read_byte(device, 5); /* number of physical banks */
136 if (value < 0) goto hw_err;
139 if (value == 1) goto out;
140 if (value != 2) goto val_err;
142 /* Start with the symmetrical case */
145 value = spd_read_byte(device, 3); /* rows */
146 if (value < 0) goto hw_err;
147 if ((value & 0xf0) == 0) goto out; /* If symmetrical we are done */
148 sz.side2 -= (value & 0x0f); /* Subtract out rows on side 1 */
149 sz.side2 += ((value >> 4) & 0x0f); /* Add in rows on side 2 */
151 value = spd_read_byte(device, 4); /* columns */
152 if (value < 0) goto hw_err;
153 if ((value & 0xff) == 0) goto val_err;
154 sz.side2 -= (value & 0x0f); /* Subtract out columns on side 1 */
155 sz.side2 += ((value >> 4) & 0x0f); /* Add in columsn on side 2 */
159 die("Bad SPD value\n");
160 /* If an hw_error occurs report that I have no memory */
169 static long spd_set_ram_size(const struct mem_controller *ctrl, long dimm_mask)
174 for(i = cum = 0; i < DIMM_SOCKETS; i++) {
176 if (dimm_mask & (1 << i)) {
177 sz = spd_get_dimm_size(ctrl->channel0[i]);
179 return -1; /* Report SPD error */
181 /* convert bits to multiples of 64MB */
183 cum += (1 << sz.side1);
185 pci_write_config8(PCI_DEV(0, 0x00, 0), DRB + (i*2), cum);
188 cum += (1 << sz.side2);
190 pci_write_config8(PCI_DEV(0, 0x00, 0), DRB+1 + (i*2), cum);
193 pci_write_config8(PCI_DEV(0, 0x00, 0), DRB + (i*2), cum);
194 pci_write_config8(PCI_DEV(0, 0x00, 0), DRB+1 + (i*2), cum);
197 /* set TOM top of memory 0xcc */
198 pci_write_config16(PCI_DEV(0, 0x00, 0), TOM, cum);
199 /* set TOLM top of low memory */
205 pci_write_config16(PCI_DEV(0, 0x00, 0), TOLM, cum);
209 static unsigned int spd_detect_dimms(const struct mem_controller *ctrl)
214 for(i = 0; i < DIMM_SOCKETS; i++) {
217 device = ctrl->channel0[i];
219 byte = spd_read_byte(device, 2); /* Type */
220 if ((byte == 7) || (byte == 8)) {
221 dimm_mask |= (1 << i);
224 device = ctrl->channel1[i];
226 byte = spd_read_byte(device, 2);
227 if ((byte == 7) || (byte == 8)) {
228 dimm_mask |= (1 << (i + DIMM_SOCKETS));
235 static int spd_set_row_attributes(const struct mem_controller *ctrl,
245 for(cnt=0; cnt < 4; cnt++) {
246 if (!(dimm_mask & (1 << cnt))) {
250 value = spd_read_byte(ctrl->channel0[cnt], 3); /* rows */
251 if (value < 0) goto hw_err;
252 if ((value & 0xf) == 0) goto val_err;
255 value = spd_read_byte(ctrl->channel0[cnt], 4); /* columns */
256 if (value < 0) goto hw_err;
257 if ((value & 0xf) == 0) goto val_err;
260 value = spd_read_byte(ctrl->channel0[cnt], 17); /* banks */
261 if (value < 0) goto hw_err;
262 if ((value & 0xff) == 0) goto val_err;
263 reg += log2(value & 0xff);
265 /* Get the device width and convert it to a power of two */
266 value = spd_read_byte(ctrl->channel0[cnt], 13);
267 if (value < 0) goto hw_err;
268 value = log2(value & 0xff);
270 if(reg < 27) goto hw_err;
274 dra += reg << (cnt*8);
275 value = spd_read_byte(ctrl->channel0[cnt], 5);
277 dra += reg << ((cnt*8)+4);
281 pci_write_config32(PCI_DEV(0, 0x00, 0), DRA, dra);
285 die("Bad SPD value\n");
286 /* If an hw_error occurs report that I have no memory */
294 static int spd_set_drt_attributes(const struct mem_controller *ctrl,
295 long dimm_mask, uint32_t drc)
306 static const unsigned char cycle_time[3] = {0x75,0x60,0x50};
307 static const int latency_indicies[] = { 26, 23, 9 };
310 drt = pci_read_config32(PCI_DEV(0, 0x00, 0), DRT);
311 drt &= 3; /* save bits 1:0 */
313 for(first_dimm = 0; first_dimm < 4; first_dimm++) {
314 if (dimm_mask & (1 << first_dimm))
319 value = spd_read_byte(ctrl->channel0[first_dimm], 2);
321 drt |= (3<<5); /* back to bark write turn around & cycle add */
324 drt |= (3<<18); /* Trasmax */
326 for(cnt=0; cnt < 4; cnt++) {
327 if (!(dimm_mask & (1 << cnt))) {
330 reg = spd_read_byte(ctrl->channel0[cnt], 18); /* CAS Latency */
331 /* Compute the lowest cas latency supported */
332 latency = log2(reg) -2;
334 /* Loop through and find a fast clock with a low latency */
335 for(index = 0; index < 3; index++, latency++) {
336 if ((latency < 2) || (latency > 4) ||
337 (!(reg & (1 << latency)))) {
340 value = spd_read_byte(ctrl->channel0[cnt],
341 latency_indicies[index]);
343 if(value <= cycle_time[drc&3]) {
344 if( latency > cas_latency) {
345 cas_latency = latency;
351 index = (cas_latency-2);
352 if((index)==0) cas_latency = 20;
353 else if((index)==1) cas_latency = 25;
354 else cas_latency = 30;
356 for(cnt=0;cnt<4;cnt++) {
357 if (!(dimm_mask & (1 << cnt))) {
360 reg = spd_read_byte(ctrl->channel0[cnt], 27)&0x0ff;
361 if(((index>>8)&0x0ff)<reg) {
362 index &= ~(0x0ff << 8);
365 reg = spd_read_byte(ctrl->channel0[cnt], 28)&0x0ff;
366 if(((index>>16)&0x0ff)<reg) {
367 index &= ~(0x0ff << 16);
370 reg = spd_read_byte(ctrl->channel0[cnt], 29)&0x0ff;
371 if(((index2>>0)&0x0ff)<reg) {
372 index2 &= ~(0x0ff << 0);
375 reg = spd_read_byte(ctrl->channel0[cnt], 41)&0x0ff;
376 if(((index2>>8)&0x0ff)<reg) {
377 index2 &= ~(0x0ff << 8);
380 reg = spd_read_byte(ctrl->channel0[cnt], 42)&0x0ff;
381 if(((index2>>16)&0x0ff)<reg) {
382 index2 &= ~(0x0ff << 16);
388 value = cycle_time[drc&3];
389 if(value <= 0x50) { /* 200 MHz */
391 drt |= (2<<2); /* CAS latency 4 */
394 drt |= (1<<2); /* CAS latency 3 */
397 if((index&0x0ff00)<=0x03c00) {
398 drt |= (1<<8); /* Trp RAS Precharg */
400 drt |= (2<<8); /* Trp RAS Precharg */
403 /* Trcd RAS to CAS delay */
404 if((index2&0x0ff)<=0x03c) {
410 /* Tdal Write auto precharge recovery delay */
414 if((index2&0x0ff00)<=0x03700)
416 else if((index2&0xff00)<=0x03c00)
419 drt |= (2<<14); /* spd 41 */
421 drt |= (2<<16); /* Twr not defined for DDR docs say use 2 */
424 if((index&0x0ff0000)<=0x0140000) {
426 } else if((index&0x0ff0000)<=0x0280000) {
428 } else if((index&0x0ff0000)<=0x03c0000) {
434 /* Trfc Auto refresh cycle time */
435 if((index2&0x0ff0000)<=0x04b0000) {
437 } else if((index2&0x0ff0000)<=0x0690000) {
442 /* Docs say use 55 for all 200Mhz */
445 else if(value <= 0x60) { /* 167 Mhz */
446 /* according to new documentation CAS latency is 00
447 * for bits 3:2 for all 167 Mhz
448 drt |= ((index&3)<<2); */ /* set CAS latency */
449 if((index&0x0ff00)<=0x03000) {
450 drt |= (1<<8); /* Trp RAS Precharg */
452 drt |= (2<<8); /* Trp RAS Precharg */
455 /* Trcd RAS to CAS delay */
456 if((index2&0x0ff)<=0x030) {
462 /* Tdal Write auto precharge recovery delay */
466 drt |= (2<<14); /* spd 41, but only one choice */
468 drt |= (2<<16); /* Twr not defined for DDR docs say 2 */
471 if((index&0x0ff0000)<=0x0180000) {
473 } else if((index&0x0ff0000)<=0x0300000) {
479 /* Trfc Auto refresh cycle time */
480 if((index2&0x0ff0000)<=0x0480000) {
482 } else if((index2&0x0ff0000)<=0x0780000) {
487 /* Docs state to use 99 for all 167 Mhz */
490 else if(value <= 0x75) { /* 133 Mhz */
491 drt |= ((index&3)<<2); /* set CAS latency */
492 if((index&0x0ff00)<=0x03c00) {
493 drt |= (1<<8); /* Trp RAS Precharg */
495 drt |= (2<<8); /* Trp RAS Precharg */
498 /* Trcd RAS to CAS delay */
499 if((index2&0x0ff)<=0x03c) {
505 /* Tdal Write auto precharge recovery delay */
509 drt |= (2<<14); /* spd 41, but only one choice */
511 drt |= (1<<16); /* Twr not defined for DDR docs say 1 */
514 if((index&0x0ff0000)<=0x01e0000) {
516 } else if((index&0x0ff0000)<=0x03c0000) {
522 /* Trfc Auto refresh cycle time */
523 if((index2&0x0ff0000)<=0x04b0000) {
525 } else if((index2&0x0ff0000)<=0x0780000) {
531 /* Based on CAS latency */
539 die("Invalid SPD 9 bus speed.\n");
543 pci_write_config32(PCI_DEV(0, 0x00, 0), DRT, drt);
548 static int spd_set_dram_controller_mode(const struct mem_controller *ctrl,
556 unsigned char dram_type = 0xff;
557 unsigned char ecc = 0xff;
558 unsigned char rate = 62;
559 static const unsigned char spd_rates[6] = {15,3,7,7,62,62};
560 static const unsigned char drc_rates[5] = {0,15,7,62,3};
561 static const unsigned char fsb_conversion[4] = {3,1,3,2};
564 drc = pci_read_config32(PCI_DEV(0, 0x00, 0), DRC);
565 for(cnt=0; cnt < 4; cnt++) {
566 if (!(dimm_mask & (1 << cnt))) {
569 value = spd_read_byte(ctrl->channel0[cnt], 11); /* ECC */
570 reg = spd_read_byte(ctrl->channel0[cnt], 2); /* Type */
571 if (value == 2) { /* RAM is ECC capable */
577 die("ERROR - Mixed DDR & DDR2 RAM\n");
580 else if ( reg == 7 ) {
584 else if ( ecc > 1 ) {
585 die("ERROR - Mixed DDR & DDR2 RAM\n");
589 die("ERROR - RAM not DDR\n");
593 die("ERROR - Non ECC memory dimm\n");
596 value = spd_read_byte(ctrl->channel0[cnt], 12); /*refresh rate*/
597 value &= 0x0f; /* clip self refresh bit */
598 if (value > 5) goto hw_err;
599 if (rate > spd_rates[value])
600 rate = spd_rates[value];
602 value = spd_read_byte(ctrl->channel0[cnt], 9); /* cycle time */
603 if (value > 0x75) goto hw_err;
605 if (dram_type >= 2) {
606 if (reg == 8) { /*speed is good, is this ddr2?*/
608 } else { /* not ddr2 so use ddr333 */
613 else if (value <= 0x60) {
614 if (dram_type >= 1) dram_type = 1;
616 else dram_type = 0; /* ddr266 */
620 #if CONFIG_HAVE_OPTION_TABLE
621 if (read_option(ECC_memory, 1) == 0) {
622 ecc = 0; /* ECC off in CMOS so disable it */
623 print_debug("ECC off\n");
627 print_debug("ECC on\n");
629 drc &= ~(3 << 20); /* clear the ecc bits */
630 drc |= (ecc << 20); /* or in the calculated ecc bits */
631 for ( cnt = 1; cnt < 5; cnt++)
632 if (drc_rates[cnt] == rate)
635 drc &= ~(7 << 8); /* clear the rate bits */
639 if (reg == 8) { /* independant clocks */
643 drc |= (1 << 26); /* set the overlap bit - the factory BIOS does */
644 drc |= (1 << 27); /* set DED retry enable - the factory BIOS does */
647 value = msr.lo >> 16;
649 drc &= ~(3 << 2); /* set the front side bus */
650 drc |= (fsb_conversion[value] << 2);
651 drc &= ~(3 << 0); /* set the dram type */
652 drc |= (dram_type << 0);
657 die("Bad SPD value\n");
658 /* If an hw_error occurs report that I have no memory */
665 static void sdram_set_spd_registers(const struct mem_controller *ctrl)
669 /* Test if we can read the spd and if ram is ddr or ddr2 */
670 dimm_mask = spd_detect_dimms(ctrl);
671 if (!(dimm_mask & ((1 << DIMM_SOCKETS) - 1))) {
672 print_err("No memory for this cpu\n");
678 static void do_delay(void)
686 static void pll_setup(uint32_t drc)
689 if(drc&3) { /* DDR 333 or DDR 400 */
690 if((drc&0x0c) == 0x0c) { /* FSB 200 */
693 else if((drc&0x0c) == 0x08) { /* FSB 167 */
696 else if(drc&1){ /* FSB 133 DDR 333 */
699 else { /* FSB 133 DDR 400 */
704 if((drc&0x08) == 0x08) { /* FSB 200 or 167 */
711 mainboard_set_e7520_pll(pins);
715 #define TIMEOUT_LOOPS 300000
717 #define DCALCSR 0x100
718 #define DCALADDR 0x104
719 #define DCALDATA 0x108
721 static void set_on_dimm_termination_enable(const struct mem_controller *ctrl)
728 /* Set up northbridge values */
730 pci_write_config32(PCI_DEV(0, 0x00, 0), 0x88, 0xf0000180);
731 /* Figure out which slots are Empty, Single, or Double sided */
732 for(i=0,t4=0,c2=0;i<8;i+=2) {
733 c1 = pci_read_config8(PCI_DEV(0, 0x00, 0), DRB+i);
734 if(c1 == c2) continue;
735 c2 = pci_read_config8(PCI_DEV(0, 0x00, 0), DRB+1+i);
743 if( ((t4>>8)&0x0f) == 0 ) {
744 data32 = 0x00000010; /* EEES */
747 if ( ((t4>>16)&0x0f) == 0 ) {
748 data32 = 0x00003132; /* EESS */
751 if ( ((t4>>24)&0x0f) == 0 ) {
752 data32 = 0x00335566; /* ESSS */
755 data32 = 0x77bbddee; /* SSSS */
759 if( ((t4>>8)&0x0f) == 0 ) {
760 data32 = 0x00003132; /* EEED */
763 if ( ((t4>>8)&0x0f) == 2 ) {
764 data32 = 0xb373ecdc; /* EEDD */
767 if ( ((t4>>16)&0x0f) == 0 ) {
768 data32 = 0x00b3a898; /* EESD */
771 data32 = 0x777becdc; /* ESSD */
774 die("Error - First dimm slot empty\n");
777 print_debug("ODT Value = ");
778 print_debug_hex32(data32);
781 pci_write_config32(PCI_DEV(0, 0x00, 0), 0xb0, data32);
783 for(dimm=0;dimm<8;dimm+=1) {
785 write32(BAR+DCALADDR, 0x0b840001);
786 write32(BAR+DCALCSR, 0x83000003 | (dimm << 20));
788 for(i=0;i<1001;i++) {
789 data32 = read32(BAR+DCALCSR);
790 if(!(data32 & (1<<31)))
796 static void set_receive_enable(const struct mem_controller *ctrl)
807 uint32_t data32_dram;
808 uint32_t dcal_data32_0;
809 uint32_t dcal_data32_1;
810 uint32_t dcal_data32_2;
811 uint32_t dcal_data32_3;
816 for(dimm=0;dimm<8;dimm+=1) {
819 write32(BAR+DCALDATA+(17*4), 0x04020000);
820 write32(BAR+DCALCSR, 0x83800004 | (dimm << 20));
822 for(i=0;i<1001;i++) {
823 data32 = read32(BAR+DCALCSR);
824 if(!(data32 & (1<<31)))
830 dcal_data32_0 = read32(BAR+DCALDATA + 0);
831 dcal_data32_1 = read32(BAR+DCALDATA + 4);
832 dcal_data32_2 = read32(BAR+DCALDATA + 8);
833 dcal_data32_3 = read32(BAR+DCALDATA + 12);
836 dcal_data32_0 = read32(BAR+DCALDATA + 16);
837 dcal_data32_1 = read32(BAR+DCALDATA + 20);
838 dcal_data32_2 = read32(BAR+DCALDATA + 24);
839 dcal_data32_3 = read32(BAR+DCALDATA + 28);
842 /* check if bank is installed */
843 if((dcal_data32_0 == 0) && (dcal_data32_2 == 0))
845 /* Calculate the timing value */
848 for(i=0,edge=0,bit=63,cnt=31,data32r=0,
849 work32l=dcal_data32_1,work32h=dcal_data32_3;
852 if(work32l & (1<<cnt))
855 work32l = dcal_data32_0;
856 work32h = dcal_data32_2;
862 if(!(work32l & (1<<cnt)))
865 work32l = dcal_data32_0;
866 work32h = dcal_data32_2;
874 data32 = ((bit%8) << 1);
875 if(work32h & (1<<cnt))
900 work32l = dcal_data32_0;
901 work32h = dcal_data32_2;
907 if(!(work32l & (1<<cnt)))
910 if(work32l & (1<<cnt))
913 data32 = (((cnt-1)%8)<<1);
914 if(work32h & (1<<(cnt-1))) {
917 /* test for frame edge cross overs */
918 if((edge == 1) && (data32 > 12) &&
919 (((recen+16)-data32) < 3)) {
923 if((edge == 2) && (data32 < 4) &&
924 ((recen - data32) > 12)) {
928 if(((recen+3) >= data32) && ((recen-3) <= data32))
938 recen+=2; /* this is not in the spec, but matches
939 the factory output, and has less failure */
940 recen <<= (dimm/2) * 8;
949 /* Check for Eratta problem */
950 for(i=cnt=0;i<32;i+=8) {
951 if (((recena>>i)&0x0f)>7) {
955 if((recena>>i)&0x0f) {
961 cnt = (cnt&0x0f) - (cnt>>16);
964 if(((recena>>i)&0x0f)>7) {
965 recena &= ~(0x0f<<i);
972 if(((recena>>i)&0x0f)<8) {
973 recena &= ~(0x0f<<i);
979 for(i=cnt=0;i<32;i+=8) {
980 if (((recenb>>i)&0x0f)>7) {
984 if((recenb>>i)&0x0f) {
990 cnt = (cnt&0x0f) - (cnt>>16);
993 if(((recenb>>i)&0x0f)>7) {
994 recenb &= ~(0x0f<<i);
1000 for(i=0;i<32;i+=8) {
1001 if(((recenb>>8)&0x0f)<8) {
1002 recenb &= ~(0x0f<<i);
1009 print_debug("Receive enable A = ");
1010 print_debug_hex32(recena);
1011 print_debug(", Receive enable B = ");
1012 print_debug_hex32(recenb);
1015 /* clear out the calibration area */
1016 write32(BAR+DCALDATA+(16*4), 0x00000000);
1017 write32(BAR+DCALDATA+(17*4), 0x00000000);
1018 write32(BAR+DCALDATA+(18*4), 0x00000000);
1019 write32(BAR+DCALDATA+(19*4), 0x00000000);
1022 write32(BAR+DCALCSR, 0x0000000f);
1024 write32(BAR+0x150, recena);
1025 write32(BAR+0x154, recenb);
1028 static void sdram_enable(int controllers, const struct mem_controller *ctrl)
1039 volatile unsigned long *iptrv;
1044 static const struct {
1047 /* FSB 133 DIMM 266 */
1048 {{ 0x00000001, 0x00000000, 0x00000001, 0x00000000}},
1049 /* FSB 133 DIMM 333 */
1050 {{ 0x00000000, 0x00000000, 0x00000000, 0x00000000}},
1051 /* FSB 133 DIMM 400 */
1052 {{ 0x00000120, 0x00000000, 0x00000032, 0x00000010}},
1053 /* FSB 167 DIMM 266 */
1054 {{ 0x00005432, 0x00001000, 0x00004325, 0x00000000}},
1055 /* FSB 167 DIMM 333 */
1056 {{ 0x00000001, 0x00000000, 0x00000001, 0x00000000}},
1057 /* FSB 167 DIMM 400 */
1058 {{ 0x00154320, 0x00000000, 0x00065432, 0x00010000}},
1059 /* FSB 200 DIMM 266 */
1060 {{ 0x00000032, 0x00000010, 0x00000120, 0x00000000}},
1061 /* FSB 200 DIMM 333 */
1062 {{ 0x00065432, 0x00010000, 0x00154320, 0x00000000}},
1063 /* FSB 200 DIMM 400 */
1064 {{ 0x00000001, 0x00000000, 0x00000001, 0x00000000}},
1067 static const uint32_t dqs_data[] = {
1068 0xffffffff, 0xffffffff, 0x000000ff,
1069 0xffffffff, 0xffffffff, 0x000000ff,
1070 0xffffffff, 0xffffffff, 0x000000ff,
1071 0xffffffff, 0xffffffff, 0x000000ff,
1072 0xffffffff, 0xffffffff, 0x000000ff,
1073 0xffffffff, 0xffffffff, 0x000000ff,
1074 0xffffffff, 0xffffffff, 0x000000ff,
1075 0xffffffff, 0xffffffff, 0x000000ff};
1077 mask = spd_detect_dimms(ctrl);
1078 print_debug("Starting SDRAM Enable\n");
1081 pci_write_config32(PCI_DEV(0, 0x00, 0), DRM,
1082 0x00210000 | CONFIG_DIMM_MAP_LOGICAL);
1083 /* set dram type and Front Side Bus freq. */
1084 drc = spd_set_dram_controller_mode(ctrl, mask);
1086 die("Error calculating DRC\n");
1089 data32 = drc & ~(3 << 20); /* clear ECC mode */
1090 data32 = data32 & ~(7 << 8); /* clear refresh rates */
1091 data32 = data32 | (1 << 5); /* temp turn off of ODT */
1092 /* Set gearing, then dram controller mode */
1093 /* drc bits 1:0 = DIMM speed, bits 3:2 = FSB speed */
1094 for(iptr = gearing[(drc&3)+((((drc>>2)&3)-1)*3)].clkgr,cnt=0;
1096 pci_write_config32(PCI_DEV(0, 0x00, 0), 0xa0+(cnt*4), iptr[cnt]);
1099 pci_write_config32(PCI_DEV(0, 0x00, 0), DRC, data32);
1101 /* turn the clocks on */
1103 pci_write_config16(PCI_DEV(0, 0x00, 0), CKDIS, 0x0000);
1105 /* 0x9a DDRCSR Take subsystem out of idle */
1106 data16 = pci_read_config16(PCI_DEV(0, 0x00, 0), DDRCSR);
1107 data16 &= ~(7 << 12);
1108 data16 |= (3 << 12); /* use dual channel lock step */
1109 pci_write_config16(PCI_DEV(0, 0x00, 0), DDRCSR, data16);
1111 /* program row size DRB */
1112 spd_set_ram_size(ctrl, mask);
1114 /* program page size DRA */
1115 spd_set_row_attributes(ctrl, mask);
1117 /* program DRT timing values */
1118 cas_latency = spd_set_drt_attributes(ctrl, mask, drc);
1120 for(i=0;i<8;i++) { /* loop throught each dimm to test for row */
1121 print_debug("DIMM ");
1122 print_debug_hex8(i);
1127 write32(BAR + 0x100, (0x03000000 | (i<<20)));
1129 write32(BAR+0x100, (0x83000000 | (i<<20)));
1131 do data32 = read32(BAR+DCALCSR);
1132 while(data32 & 0x80000000);
1139 for(cs=0;cs<8;cs++) {
1140 write32(BAR + DCALCSR, (0x83000000 | (cs<<20)));
1141 do data32 = read32(BAR+DCALCSR);
1142 while(data32 & 0x80000000);
1145 /* Precharg all banks */
1147 for(cs=0;cs<8;cs++) {
1148 if ((drc & 3) == 2) /* DDR2 */
1149 write32(BAR+DCALADDR, 0x04000000);
1151 write32(BAR+DCALADDR, 0x00000000);
1152 write32(BAR+DCALCSR, (0x83000002 | (cs<<20)));
1153 do data32 = read32(BAR+DCALCSR);
1154 while(data32 & 0x80000000);
1157 /* EMRS dll's enabled */
1159 for(cs=0;cs<8;cs++) {
1160 if ((drc & 3) == 2) /* DDR2 */
1161 /* fixme hard code AL additive latency */
1162 write32(BAR+DCALADDR, 0x0b940001);
1164 write32(BAR+DCALADDR, 0x00000001);
1165 write32(BAR+DCALCSR, (0x83000003 | (cs<<20)));
1166 do data32 = read32(BAR+DCALCSR);
1167 while(data32 & 0x80000000);
1169 /* MRS reset dll's */
1171 if ((drc & 3) == 2) { /* DDR2 */
1172 if(cas_latency == 30)
1173 mode_reg = 0x053a0000;
1175 mode_reg = 0x054a0000;
1178 if(cas_latency == 20)
1179 mode_reg = 0x012a0000;
1180 else /* CAS Latency 2.5 */
1181 mode_reg = 0x016a0000;
1183 for(cs=0;cs<8;cs++) {
1184 write32(BAR+DCALADDR, mode_reg);
1185 write32(BAR+DCALCSR, (0x83000003 | (cs<<20)));
1186 do data32 = read32(BAR+DCALCSR);
1187 while(data32 & 0x80000000);
1190 /* Precharg all banks */
1194 for(cs=0;cs<8;cs++) {
1195 if ((drc & 3) == 2) /* DDR2 */
1196 write32(BAR+DCALADDR, 0x04000000);
1198 write32(BAR+DCALADDR, 0x00000000);
1199 write32(BAR+DCALCSR, (0x83000002 | (cs<<20)));
1200 do data32 = read32(BAR+DCALCSR);
1201 while(data32 & 0x80000000);
1204 /* Do 2 refreshes */
1206 for(cs=0;cs<8;cs++) {
1207 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1208 do data32 = read32(BAR+DCALCSR);
1209 while(data32 & 0x80000000);
1212 for(cs=0;cs<8;cs++) {
1213 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1214 do data32 = read32(BAR+DCALCSR);
1215 while(data32 & 0x80000000);
1218 /* for good luck do 6 more */
1219 for(cs=0;cs<8;cs++) {
1220 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1223 for(cs=0;cs<8;cs++) {
1224 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1227 for(cs=0;cs<8;cs++) {
1228 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1231 for(cs=0;cs<8;cs++) {
1232 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1235 for(cs=0;cs<8;cs++) {
1236 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1239 for(cs=0;cs<8;cs++) {
1240 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1243 /* MRS reset dll's normal */
1245 for(cs=0;cs<8;cs++) {
1246 write32(BAR+DCALADDR, (mode_reg & ~(1<<24)));
1247 write32(BAR+DCALCSR, (0x83000003 | (cs<<20)));
1248 do data32 = read32(BAR+DCALCSR);
1249 while(data32 & 0x80000000);
1252 /* Do only if DDR2 EMRS dll's enabled */
1253 if ((drc & 3) == 2) { /* DDR2 */
1255 for(cs=0;cs<8;cs++) {
1256 write32(BAR+DCALADDR, (0x0b940001));
1257 write32(BAR+DCALCSR, (0x83000003 | (cs<<20)));
1258 do data32 = read32(BAR+DCALCSR);
1259 while(data32 & 0x80000000);
1265 write32(BAR+DCALCSR, 0x0000000f);
1267 /* DDR1 This is test code to copy some codes in the factory setup */
1269 write32(BAR, 0x00100000);
1271 if ((drc & 3) == 2) { /* DDR2 */
1272 /* enable on dimm termination */
1273 set_on_dimm_termination_enable(ctrl);
1276 pci_write_config32(PCI_DEV(0, 0x00, 0), 0x88, 0xa0000000 );
1279 /* receive enable calibration */
1280 set_receive_enable(ctrl);
1283 pci_write_config32(PCI_DEV(0, 0x00, 0), 0x94, 0x3904a100 );
1284 for(i = 0, cnt = (BAR+0x200); i < 24; i++, cnt+=4) {
1285 write32(cnt, dqs_data[i]);
1287 pci_write_config32(PCI_DEV(0, 0x00, 0), 0x94, 0x3904a100 );
1289 /* Enable refresh */
1291 data32 = drc & ~(3 << 20); /* clear ECC mode */
1292 pci_write_config32(PCI_DEV(0, 0x00, 0), DRC, data32);
1293 write32(BAR+DCALCSR, 0x0008000f);
1295 /* clear memory and init ECC */
1296 print_debug("Clearing memory\n");
1297 for(i=0;i<64;i+=4) {
1298 write32(BAR+DCALDATA+i, 0x00000000);
1301 for(cs=0;cs<8;cs++) {
1302 write32(BAR+DCALCSR, (0x830831d8 | (cs<<20)));
1303 do data32 = read32(BAR+DCALCSR);
1304 while(data32 & 0x80000000);
1307 /* Bring memory subsystem on line */
1308 data32 = pci_read_config32(PCI_DEV(0, 0x00, 0), 0x98);
1309 data32 |= (1 << 31);
1310 pci_write_config32(PCI_DEV(0, 0x00, 0), 0x98, data32);
1311 /* wait for completion */
1312 print_debug("Waiting for mem complete\n");
1314 data32 = pci_read_config32(PCI_DEV(0, 0x00, 0), 0x98);
1315 if( (data32 & (1<<31)) == 0)
1318 print_debug("Done\n");
1320 /* Set initialization complete */
1323 data32 = drc & ~(3 << 20); /* clear ECC mode */
1324 pci_write_config32(PCI_DEV(0, 0x00, 0), DRC, data32);
1326 /* Set the ecc mode */
1327 pci_write_config32(PCI_DEV(0, 0x00, 0), DRC, drc);
1329 /* Enable memory scrubbing */
1331 data16 = pci_read_config16(PCI_DEV(0, 0x00, 0), MCHSCRB);
1333 data16 |= ((2 << 2) | (2 << 0));
1334 pci_write_config16(PCI_DEV(0, 0x00, 0), MCHSCRB, data16);
1336 /* The memory is now setup, use it */
1337 cache_lbmem(MTRR_TYPE_WRBACK);