2 * This file is part of the coreboot project.
4 * Copyright (C) 2005 Eric W. Biederman and Tom Zimmerman
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License
16 * along with this program; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
21 #include <cpu/x86/mtrr.h>
22 #include <cpu/x86/cache.h>
26 #if CONFIG_HAVE_OPTION_TABLE
27 #include "option_table.h"
30 #define BAR 0x40000000
32 static void sdram_set_registers(const struct mem_controller *ctrl)
34 static const unsigned int register_values[] = {
36 /* CKDIS 0x8c disable clocks */
37 PCI_ADDR(0, 0x00, 0, CKDIS), 0xffff0000, 0x0000ffff,
39 /* 0x9c Device present and extended RAM control
40 * DEVPRES is very touchy, hard code the initialization
41 * of PCI-E ports here.
43 PCI_ADDR(0, 0x00, 0, DEVPRES), 0x00000000, 0x07020801 | DEVPRES_CONFIG,
45 /* 0xc8 Remap RAM base and limit off */
46 PCI_ADDR(0, 0x00, 0, REMAPLIMIT), 0x00000000, 0x03df0000,
49 PCI_ADDR(0, 0x00, 0, 0xd8), 0x00000000, 0xb5930000,
50 PCI_ADDR(0, 0x00, 0, 0xe8), 0x00000000, 0x00004a2a,
53 PCI_ADDR(0, 0x00, 0, MCHCFG0), 0xfce0ffff, 0x00006000, /* 6000 */
56 PCI_ADDR(0, 0x00, 0, PAM-1), 0xcccccc7f, 0x33333000,
57 PCI_ADDR(0, 0x00, 0, PAM+3), 0xcccccccc, 0x33333333,
60 PCI_ADDR(0, 0x00, 0, DEVPRES1), 0xffbffff, (1<<22)|(6<<2) | DEVPRES1_CONFIG,
63 PCI_ADDR(0, 0x00, 0, IURBASE), 0x00000fff, BAR |0,
68 max = ARRAY_SIZE(register_values);
69 for(i = 0; i < max; i += 3) {
73 dev = (register_values[i] & ~0xff) - PCI_DEV(0, 0x00, 0) + PCI_DEV(0, 0x00, 0);
74 where = register_values[i] & 0xff;
75 reg = pci_read_config32(dev, where);
76 reg &= register_values[i+1];
77 reg |= register_values[i+2];
78 pci_write_config32(dev, where, reg);
80 print_spew("done.\n");
90 static struct dimm_size spd_get_dimm_size(unsigned device)
92 /* Calculate the log base 2 size of a DIMM in bits */
100 value = spd_read_byte(device, 2); /* type */
101 if (value < 0) goto hw_err;
102 if (value == 8) ddr2 = 1;
104 /* Note it might be easier to use byte 31 here, it has the DIMM size as
105 * a multiple of 4MB. The way we do it now we can size both
106 * sides of an assymetric dimm.
108 value = spd_read_byte(device, 3); /* rows */
109 if (value < 0) goto hw_err;
110 if ((value & 0xf) == 0) goto val_err;
111 sz.side1 += value & 0xf;
113 value = spd_read_byte(device, 4); /* columns */
114 if (value < 0) goto hw_err;
115 if ((value & 0xf) == 0) goto val_err;
116 sz.side1 += value & 0xf;
118 value = spd_read_byte(device, 17); /* banks */
119 if (value < 0) goto hw_err;
120 if ((value & 0xff) == 0) goto val_err;
121 sz.side1 += log2(value & 0xff);
123 /* Get the module data width and convert it to a power of two */
124 value = spd_read_byte(device, 7); /* (high byte) */
125 if (value < 0) goto hw_err;
129 low = spd_read_byte(device, 6); /* (low byte) */
130 if (low < 0) goto hw_err;
131 value = value | (low & 0xff);
132 if ((value != 72) && (value != 64)) goto val_err;
133 sz.side1 += log2(value);
136 value = spd_read_byte(device, 5); /* number of physical banks */
138 if (value < 0) goto hw_err;
141 if (value == 1) goto out;
142 if (value != 2) goto val_err;
144 /* Start with the symmetrical case */
147 value = spd_read_byte(device, 3); /* rows */
148 if (value < 0) goto hw_err;
149 if ((value & 0xf0) == 0) goto out; /* If symmetrical we are done */
150 sz.side2 -= (value & 0x0f); /* Subtract out rows on side 1 */
151 sz.side2 += ((value >> 4) & 0x0f); /* Add in rows on side 2 */
153 value = spd_read_byte(device, 4); /* columns */
154 if (value < 0) goto hw_err;
155 if ((value & 0xff) == 0) goto val_err;
156 sz.side2 -= (value & 0x0f); /* Subtract out columns on side 1 */
157 sz.side2 += ((value >> 4) & 0x0f); /* Add in columsn on side 2 */
161 die("Bad SPD value\n");
162 /* If an hw_error occurs report that I have no memory */
171 static long spd_set_ram_size(const struct mem_controller *ctrl, long dimm_mask)
176 for(i = cum = 0; i < DIMM_SOCKETS; i++) {
178 if (dimm_mask & (1 << i)) {
179 sz = spd_get_dimm_size(ctrl->channel0[i]);
181 return -1; /* Report SPD error */
183 /* convert bits to multiples of 64MB */
185 cum += (1 << sz.side1);
187 pci_write_config8(PCI_DEV(0, 0x00, 0), DRB + (i*2), cum);
190 cum += (1 << sz.side2);
192 pci_write_config8(PCI_DEV(0, 0x00, 0), DRB+1 + (i*2), cum);
195 pci_write_config8(PCI_DEV(0, 0x00, 0), DRB + (i*2), cum);
196 pci_write_config8(PCI_DEV(0, 0x00, 0), DRB+1 + (i*2), cum);
199 /* set TOM top of memory 0xcc */
200 pci_write_config16(PCI_DEV(0, 0x00, 0), TOM, cum);
201 /* set TOLM top of low memory */
207 pci_write_config16(PCI_DEV(0, 0x00, 0), TOLM, cum);
212 static unsigned int spd_detect_dimms(const struct mem_controller *ctrl)
217 for(i = 0; i < DIMM_SOCKETS; i++) {
220 device = ctrl->channel0[i];
222 byte = spd_read_byte(device, 2); /* Type */
223 if ((byte == 7) || (byte == 8)) {
224 dimm_mask |= (1 << i);
227 device = ctrl->channel1[i];
229 byte = spd_read_byte(device, 2);
230 if ((byte == 7) || (byte == 8)) {
231 dimm_mask |= (1 << (i + DIMM_SOCKETS));
239 static int spd_set_row_attributes(const struct mem_controller *ctrl,
249 for(cnt=0; cnt < 4; cnt++) {
250 if (!(dimm_mask & (1 << cnt))) {
254 value = spd_read_byte(ctrl->channel0[cnt], 3); /* rows */
255 if (value < 0) goto hw_err;
256 if ((value & 0xf) == 0) goto val_err;
259 value = spd_read_byte(ctrl->channel0[cnt], 4); /* columns */
260 if (value < 0) goto hw_err;
261 if ((value & 0xf) == 0) goto val_err;
264 value = spd_read_byte(ctrl->channel0[cnt], 17); /* banks */
265 if (value < 0) goto hw_err;
266 if ((value & 0xff) == 0) goto val_err;
267 reg += log2(value & 0xff);
269 /* Get the device width and convert it to a power of two */
270 value = spd_read_byte(ctrl->channel0[cnt], 13);
271 if (value < 0) goto hw_err;
272 value = log2(value & 0xff);
274 if(reg < 27) goto hw_err;
278 dra += reg << (cnt*8);
279 value = spd_read_byte(ctrl->channel0[cnt], 5);
281 dra += reg << ((cnt*8)+4);
285 pci_write_config32(PCI_DEV(0, 0x00, 0), DRA, dra);
289 die("Bad SPD value\n");
290 /* If an hw_error occurs report that I have no memory */
299 static int spd_set_drt_attributes(const struct mem_controller *ctrl,
300 long dimm_mask, uint32_t drc)
311 static const unsigned char cycle_time[3] = {0x75,0x60,0x50};
312 static const int latency_indicies[] = { 26, 23, 9 };
315 drt = pci_read_config32(PCI_DEV(0, 0x00, 0), DRT);
316 drt &= 3; /* save bits 1:0 */
318 for(first_dimm = 0; first_dimm < 4; first_dimm++) {
319 if (dimm_mask & (1 << first_dimm))
324 value = spd_read_byte(ctrl->channel0[first_dimm], 2);
326 drt |= (3<<5); /* back to bark write turn around & cycle add */
329 drt |= (3<<18); /* Trasmax */
331 for(cnt=0; cnt < 4; cnt++) {
332 if (!(dimm_mask & (1 << cnt))) {
335 reg = spd_read_byte(ctrl->channel0[cnt], 18); /* CAS Latency */
336 /* Compute the lowest cas latency supported */
337 latency = log2(reg) -2;
339 /* Loop through and find a fast clock with a low latency */
340 for(index = 0; index < 3; index++, latency++) {
341 if ((latency < 2) || (latency > 4) ||
342 (!(reg & (1 << latency)))) {
345 value = spd_read_byte(ctrl->channel0[cnt],
346 latency_indicies[index]);
348 if(value <= cycle_time[drc&3]) {
349 if( latency > cas_latency) {
350 cas_latency = latency;
356 index = (cas_latency-2);
357 if((index)==0) cas_latency = 20;
358 else if((index)==1) cas_latency = 25;
359 else cas_latency = 30;
361 for(cnt=0;cnt<4;cnt++) {
362 if (!(dimm_mask & (1 << cnt))) {
365 reg = spd_read_byte(ctrl->channel0[cnt], 27)&0x0ff;
366 if(((index>>8)&0x0ff)<reg) {
367 index &= ~(0x0ff << 8);
370 reg = spd_read_byte(ctrl->channel0[cnt], 28)&0x0ff;
371 if(((index>>16)&0x0ff)<reg) {
372 index &= ~(0x0ff << 16);
375 reg = spd_read_byte(ctrl->channel0[cnt], 29)&0x0ff;
376 if(((index2>>0)&0x0ff)<reg) {
377 index2 &= ~(0x0ff << 0);
380 reg = spd_read_byte(ctrl->channel0[cnt], 41)&0x0ff;
381 if(((index2>>8)&0x0ff)<reg) {
382 index2 &= ~(0x0ff << 8);
385 reg = spd_read_byte(ctrl->channel0[cnt], 42)&0x0ff;
386 if(((index2>>16)&0x0ff)<reg) {
387 index2 &= ~(0x0ff << 16);
393 value = cycle_time[drc&3];
394 if(value <= 0x50) { /* 200 MHz */
396 drt |= (2<<2); /* CAS latency 4 */
399 drt |= (1<<2); /* CAS latency 3 */
402 if((index&0x0ff00)<=0x03c00) {
403 drt |= (1<<8); /* Trp RAS Precharg */
405 drt |= (2<<8); /* Trp RAS Precharg */
408 /* Trcd RAS to CAS delay */
409 if((index2&0x0ff)<=0x03c) {
415 /* Tdal Write auto precharge recovery delay */
419 if((index2&0x0ff00)<=0x03700)
421 else if((index2&0xff00)<=0x03c00)
424 drt |= (2<<14); /* spd 41 */
426 drt |= (2<<16); /* Twr not defined for DDR docs say use 2 */
429 if((index&0x0ff0000)<=0x0140000) {
431 } else if((index&0x0ff0000)<=0x0280000) {
433 } else if((index&0x0ff0000)<=0x03c0000) {
439 /* Trfc Auto refresh cycle time */
440 if((index2&0x0ff0000)<=0x04b0000) {
442 } else if((index2&0x0ff0000)<=0x0690000) {
447 /* Docs say use 55 for all 200Mhz */
450 else if(value <= 0x60) { /* 167 Mhz */
451 /* according to new documentation CAS latency is 00
452 * for bits 3:2 for all 167 Mhz
453 drt |= ((index&3)<<2); */ /* set CAS latency */
454 if((index&0x0ff00)<=0x03000) {
455 drt |= (1<<8); /* Trp RAS Precharg */
457 drt |= (2<<8); /* Trp RAS Precharg */
460 /* Trcd RAS to CAS delay */
461 if((index2&0x0ff)<=0x030) {
467 /* Tdal Write auto precharge recovery delay */
471 drt |= (2<<14); /* spd 41, but only one choice */
473 drt |= (2<<16); /* Twr not defined for DDR docs say 2 */
476 if((index&0x0ff0000)<=0x0180000) {
478 } else if((index&0x0ff0000)<=0x0300000) {
484 /* Trfc Auto refresh cycle time */
485 if((index2&0x0ff0000)<=0x0480000) {
487 } else if((index2&0x0ff0000)<=0x0780000) {
492 /* Docs state to use 99 for all 167 Mhz */
495 else if(value <= 0x75) { /* 133 Mhz */
496 drt |= ((index&3)<<2); /* set CAS latency */
497 if((index&0x0ff00)<=0x03c00) {
498 drt |= (1<<8); /* Trp RAS Precharg */
500 drt |= (2<<8); /* Trp RAS Precharg */
503 /* Trcd RAS to CAS delay */
504 if((index2&0x0ff)<=0x03c) {
510 /* Tdal Write auto precharge recovery delay */
514 drt |= (2<<14); /* spd 41, but only one choice */
516 drt |= (1<<16); /* Twr not defined for DDR docs say 1 */
519 if((index&0x0ff0000)<=0x01e0000) {
521 } else if((index&0x0ff0000)<=0x03c0000) {
527 /* Trfc Auto refresh cycle time */
528 if((index2&0x0ff0000)<=0x04b0000) {
530 } else if((index2&0x0ff0000)<=0x0780000) {
536 /* Based on CAS latency */
544 die("Invalid SPD 9 bus speed.\n");
548 pci_write_config32(PCI_DEV(0, 0x00, 0), DRT, drt);
553 static int spd_set_dram_controller_mode(const struct mem_controller *ctrl,
561 unsigned char dram_type = 0xff;
562 unsigned char ecc = 0xff;
563 unsigned char rate = 62;
564 static const unsigned char spd_rates[6] = {15,3,7,7,62,62};
565 static const unsigned char drc_rates[5] = {0,15,7,62,3};
566 static const unsigned char fsb_conversion[4] = {3,1,3,2};
569 drc = pci_read_config32(PCI_DEV(0, 0x00, 0), DRC);
570 for(cnt=0; cnt < 4; cnt++) {
571 if (!(dimm_mask & (1 << cnt))) {
574 value = spd_read_byte(ctrl->channel0[cnt], 11); /* ECC */
575 reg = spd_read_byte(ctrl->channel0[cnt], 2); /* Type */
576 if (value == 2) { /* RAM is ECC capable */
582 die("ERROR - Mixed DDR & DDR2 RAM\n");
585 else if ( reg == 7 ) {
589 else if ( ecc > 1 ) {
590 die("ERROR - Mixed DDR & DDR2 RAM\n");
594 die("ERROR - RAM not DDR\n");
598 die("ERROR - Non ECC memory dimm\n");
601 value = spd_read_byte(ctrl->channel0[cnt], 12); /*refresh rate*/
602 value &= 0x0f; /* clip self refresh bit */
603 if (value > 5) goto hw_err;
604 if (rate > spd_rates[value])
605 rate = spd_rates[value];
607 value = spd_read_byte(ctrl->channel0[cnt], 9); /* cycle time */
608 if (value > 0x75) goto hw_err;
610 if (dram_type >= 2) {
611 if (reg == 8) { /*speed is good, is this ddr2?*/
613 } else { /* not ddr2 so use ddr333 */
618 else if (value <= 0x60) {
619 if (dram_type >= 1) dram_type = 1;
621 else dram_type = 0; /* ddr266 */
625 #if CONFIG_HAVE_OPTION_TABLE
626 if (read_option(CMOS_VSTART_ECC_memory, CMOS_VLEN_ECC_memory, 1) == 0) {
627 ecc = 0; /* ECC off in CMOS so disable it */
628 print_debug("ECC off\n");
632 print_debug("ECC on\n");
634 drc &= ~(3 << 20); /* clear the ecc bits */
635 drc |= (ecc << 20); /* or in the calculated ecc bits */
636 for ( cnt = 1; cnt < 5; cnt++)
637 if (drc_rates[cnt] == rate)
640 drc &= ~(7 << 8); /* clear the rate bits */
644 if (reg == 8) { /* independant clocks */
648 drc |= (1 << 26); /* set the overlap bit - the factory BIOS does */
649 drc |= (1 << 27); /* set DED retry enable - the factory BIOS does */
652 value = msr.lo >> 16;
654 drc &= ~(3 << 2); /* set the front side bus */
655 drc |= (fsb_conversion[value] << 2);
656 drc &= ~(3 << 0); /* set the dram type */
657 drc |= (dram_type << 0);
662 die("Bad SPD value\n");
663 /* If an hw_error occurs report that I have no memory */
670 static void sdram_set_spd_registers(const struct mem_controller *ctrl)
674 /* Test if we can read the spd and if ram is ddr or ddr2 */
675 dimm_mask = spd_detect_dimms(ctrl);
676 if (!(dimm_mask & ((1 << DIMM_SOCKETS) - 1))) {
677 print_err("No memory for this cpu\n");
683 static void do_delay(void)
691 static void pll_setup(uint32_t drc)
694 if(drc&3) { /* DDR 333 or DDR 400 */
695 if((drc&0x0c) == 0x0c) { /* FSB 200 */
698 else if((drc&0x0c) == 0x08) { /* FSB 167 */
701 else if(drc&1){ /* FSB 133 DDR 333 */
704 else { /* FSB 133 DDR 400 */
709 if((drc&0x08) == 0x08) { /* FSB 200 or 167 */
716 mainboard_set_e7520_pll(pins);
720 #define TIMEOUT_LOOPS 300000
722 #define DCALCSR 0x100
723 #define DCALADDR 0x104
724 #define DCALDATA 0x108
726 static void set_on_dimm_termination_enable(const struct mem_controller *ctrl)
733 /* Set up northbridge values */
735 pci_write_config32(PCI_DEV(0, 0x00, 0), 0x88, 0xf0000180);
736 /* Figure out which slots are Empty, Single, or Double sided */
737 for(i=0,t4=0,c2=0;i<8;i+=2) {
738 c1 = pci_read_config8(PCI_DEV(0, 0x00, 0), DRB+i);
739 if(c1 == c2) continue;
740 c2 = pci_read_config8(PCI_DEV(0, 0x00, 0), DRB+1+i);
748 if( ((t4>>8)&0x0f) == 0 ) {
749 data32 = 0x00000010; /* EEES */
752 if ( ((t4>>16)&0x0f) == 0 ) {
753 data32 = 0x00003132; /* EESS */
756 if ( ((t4>>24)&0x0f) == 0 ) {
757 data32 = 0x00335566; /* ESSS */
760 data32 = 0x77bbddee; /* SSSS */
764 if( ((t4>>8)&0x0f) == 0 ) {
765 data32 = 0x00003132; /* EEED */
768 if ( ((t4>>8)&0x0f) == 2 ) {
769 data32 = 0xb373ecdc; /* EEDD */
772 if ( ((t4>>16)&0x0f) == 0 ) {
773 data32 = 0x00b3a898; /* EESD */
776 data32 = 0x777becdc; /* ESSD */
779 die("Error - First dimm slot empty\n");
782 print_debug("ODT Value = ");
783 print_debug_hex32(data32);
786 pci_write_config32(PCI_DEV(0, 0x00, 0), 0xb0, data32);
788 for(dimm=0;dimm<8;dimm+=1) {
790 write32(BAR+DCALADDR, 0x0b840001);
791 write32(BAR+DCALCSR, 0x83000003 | (dimm << 20));
793 for(i=0;i<1001;i++) {
794 data32 = read32(BAR+DCALCSR);
795 if(!(data32 & (1<<31)))
800 static void set_receive_enable(const struct mem_controller *ctrl)
811 uint32_t data32_dram;
812 uint32_t dcal_data32_0;
813 uint32_t dcal_data32_1;
814 uint32_t dcal_data32_2;
815 uint32_t dcal_data32_3;
820 for(dimm=0;dimm<8;dimm+=1) {
823 write32(BAR+DCALDATA+(17*4), 0x04020000);
824 write32(BAR+DCALCSR, 0x83800004 | (dimm << 20));
826 for(i=0;i<1001;i++) {
827 data32 = read32(BAR+DCALCSR);
828 if(!(data32 & (1<<31)))
834 dcal_data32_0 = read32(BAR+DCALDATA + 0);
835 dcal_data32_1 = read32(BAR+DCALDATA + 4);
836 dcal_data32_2 = read32(BAR+DCALDATA + 8);
837 dcal_data32_3 = read32(BAR+DCALDATA + 12);
840 dcal_data32_0 = read32(BAR+DCALDATA + 16);
841 dcal_data32_1 = read32(BAR+DCALDATA + 20);
842 dcal_data32_2 = read32(BAR+DCALDATA + 24);
843 dcal_data32_3 = read32(BAR+DCALDATA + 28);
846 /* check if bank is installed */
847 if((dcal_data32_0 == 0) && (dcal_data32_2 == 0))
849 /* Calculate the timing value */
852 for(i=0,edge=0,bit=63,cnt=31,data32r=0,
853 work32l=dcal_data32_1,work32h=dcal_data32_3;
856 if(work32l & (1<<cnt))
859 work32l = dcal_data32_0;
860 work32h = dcal_data32_2;
866 if(!(work32l & (1<<cnt)))
869 work32l = dcal_data32_0;
870 work32h = dcal_data32_2;
878 data32 = ((bit%8) << 1);
879 if(work32h & (1<<cnt))
904 work32l = dcal_data32_0;
905 work32h = dcal_data32_2;
911 if(!(work32l & (1<<cnt)))
914 if(work32l & (1<<cnt))
917 data32 = (((cnt-1)%8)<<1);
918 if(work32h & (1<<(cnt-1))) {
921 /* test for frame edge cross overs */
922 if((edge == 1) && (data32 > 12) &&
923 (((recen+16)-data32) < 3)) {
927 if((edge == 2) && (data32 < 4) &&
928 ((recen - data32) > 12)) {
932 if(((recen+3) >= data32) && ((recen-3) <= data32))
942 recen+=2; /* this is not in the spec, but matches
943 the factory output, and has less failure */
944 recen <<= (dimm/2) * 8;
953 /* Check for Eratta problem */
954 for(i=cnt=0;i<32;i+=8) {
955 if (((recena>>i)&0x0f)>7) {
959 if((recena>>i)&0x0f) {
965 cnt = (cnt&0x0f) - (cnt>>16);
968 if(((recena>>i)&0x0f)>7) {
969 recena &= ~(0x0f<<i);
976 if(((recena>>i)&0x0f)<8) {
977 recena &= ~(0x0f<<i);
983 for(i=cnt=0;i<32;i+=8) {
984 if (((recenb>>i)&0x0f)>7) {
988 if((recenb>>i)&0x0f) {
994 cnt = (cnt&0x0f) - (cnt>>16);
997 if(((recenb>>i)&0x0f)>7) {
998 recenb &= ~(0x0f<<i);
1004 for(i=0;i<32;i+=8) {
1005 if(((recenb>>8)&0x0f)<8) {
1006 recenb &= ~(0x0f<<i);
1013 print_debug("Receive enable A = ");
1014 print_debug_hex32(recena);
1015 print_debug(", Receive enable B = ");
1016 print_debug_hex32(recenb);
1019 /* clear out the calibration area */
1020 write32(BAR+DCALDATA+(16*4), 0x00000000);
1021 write32(BAR+DCALDATA+(17*4), 0x00000000);
1022 write32(BAR+DCALDATA+(18*4), 0x00000000);
1023 write32(BAR+DCALDATA+(19*4), 0x00000000);
1026 write32(BAR+DCALCSR, 0x0000000f);
1028 write32(BAR+0x150, recena);
1029 write32(BAR+0x154, recenb);
1033 static void sdram_enable(int controllers, const struct mem_controller *ctrl)
1044 volatile unsigned long *iptrv;
1049 static const struct {
1052 /* FSB 133 DIMM 266 */
1053 {{ 0x00000001, 0x00000000, 0x00000001, 0x00000000}},
1054 /* FSB 133 DIMM 333 */
1055 {{ 0x00000000, 0x00000000, 0x00000000, 0x00000000}},
1056 /* FSB 133 DIMM 400 */
1057 {{ 0x00000120, 0x00000000, 0x00000032, 0x00000010}},
1058 /* FSB 167 DIMM 266 */
1059 {{ 0x00005432, 0x00001000, 0x00004325, 0x00000000}},
1060 /* FSB 167 DIMM 333 */
1061 {{ 0x00000001, 0x00000000, 0x00000001, 0x00000000}},
1062 /* FSB 167 DIMM 400 */
1063 {{ 0x00154320, 0x00000000, 0x00065432, 0x00010000}},
1064 /* FSB 200 DIMM 266 */
1065 {{ 0x00000032, 0x00000010, 0x00000120, 0x00000000}},
1066 /* FSB 200 DIMM 333 */
1067 {{ 0x00065432, 0x00010000, 0x00154320, 0x00000000}},
1068 /* FSB 200 DIMM 400 */
1069 {{ 0x00000001, 0x00000000, 0x00000001, 0x00000000}},
1072 static const uint32_t dqs_data[] = {
1073 0xffffffff, 0xffffffff, 0x000000ff,
1074 0xffffffff, 0xffffffff, 0x000000ff,
1075 0xffffffff, 0xffffffff, 0x000000ff,
1076 0xffffffff, 0xffffffff, 0x000000ff,
1077 0xffffffff, 0xffffffff, 0x000000ff,
1078 0xffffffff, 0xffffffff, 0x000000ff,
1079 0xffffffff, 0xffffffff, 0x000000ff,
1080 0xffffffff, 0xffffffff, 0x000000ff};
1082 mask = spd_detect_dimms(ctrl);
1083 print_debug("Starting SDRAM Enable\n");
1086 #ifdef DIMM_MAP_LOGICAL
1087 pci_write_config32(PCI_DEV(0, 0x00, 0), DRM,
1088 0x00210000 | DIMM_MAP_LOGICAL);
1090 pci_write_config32(PCI_DEV(0, 0x00, 0), DRM, 0x00211248);
1092 /* set dram type and Front Side Bus freq. */
1093 drc = spd_set_dram_controller_mode(ctrl, mask);
1095 die("Error calculating DRC\n");
1098 data32 = drc & ~(3 << 20); /* clear ECC mode */
1099 data32 = data32 & ~(7 << 8); /* clear refresh rates */
1100 data32 = data32 | (1 << 5); /* temp turn off of ODT */
1101 /* Set gearing, then dram controller mode */
1102 /* drc bits 1:0 = DIMM speed, bits 3:2 = FSB speed */
1103 for(iptr = gearing[(drc&3)+((((drc>>2)&3)-1)*3)].clkgr,cnt=0;
1105 pci_write_config32(PCI_DEV(0, 0x00, 0), 0xa0+(cnt*4), iptr[cnt]);
1108 pci_write_config32(PCI_DEV(0, 0x00, 0), DRC, data32);
1110 /* turn the clocks on */
1112 pci_write_config16(PCI_DEV(0, 0x00, 0), CKDIS, 0x0000);
1114 /* 0x9a DDRCSR Take subsystem out of idle */
1115 data16 = pci_read_config16(PCI_DEV(0, 0x00, 0), DDRCSR);
1116 data16 &= ~(7 << 12);
1117 data16 |= (3 << 12); /* use dual channel lock step */
1118 pci_write_config16(PCI_DEV(0, 0x00, 0), DDRCSR, data16);
1120 /* program row size DRB */
1121 spd_set_ram_size(ctrl, mask);
1123 /* program page size DRA */
1124 spd_set_row_attributes(ctrl, mask);
1126 /* program DRT timing values */
1127 cas_latency = spd_set_drt_attributes(ctrl, mask, drc);
1129 for(i=0;i<8;i++) { /* loop throught each dimm to test for row */
1130 print_debug("DIMM ");
1131 print_debug_hex8(i);
1136 write32(BAR + 0x100, (0x03000000 | (i<<20)));
1138 write32(BAR+0x100, (0x83000000 | (i<<20)));
1140 data32 = read32(BAR+DCALCSR);
1141 while(data32 & 0x80000000)
1142 data32 = read32(BAR+DCALCSR);
1149 for(cs=0;cs<8;cs++) {
1150 write32(BAR + DCALCSR, (0x83000000 | (cs<<20)));
1151 data32 = read32(BAR+DCALCSR);
1152 while(data32 & 0x80000000)
1153 data32 = read32(BAR+DCALCSR);
1156 /* Precharg all banks */
1158 for(cs=0;cs<8;cs++) {
1159 if ((drc & 3) == 2) /* DDR2 */
1160 write32(BAR+DCALADDR, 0x04000000);
1162 write32(BAR+DCALADDR, 0x00000000);
1163 write32(BAR+DCALCSR, (0x83000002 | (cs<<20)));
1164 data32 = read32(BAR+DCALCSR);
1165 while(data32 & 0x80000000)
1166 data32 = read32(BAR+DCALCSR);
1169 /* EMRS dll's enabled */
1171 for(cs=0;cs<8;cs++) {
1172 if ((drc & 3) == 2) /* DDR2 */
1173 /* fixme hard code AL additive latency */
1174 write32(BAR+DCALADDR, 0x0b940001);
1176 write32(BAR+DCALADDR, 0x00000001);
1177 write32(BAR+DCALCSR, (0x83000003 | (cs<<20)));
1178 data32 = read32(BAR+DCALCSR);
1179 while(data32 & 0x80000000)
1180 data32 = read32(BAR+DCALCSR);
1182 /* MRS reset dll's */
1184 if ((drc & 3) == 2) { /* DDR2 */
1185 if(cas_latency == 30)
1186 mode_reg = 0x053a0000;
1188 mode_reg = 0x054a0000;
1191 if(cas_latency == 20)
1192 mode_reg = 0x012a0000;
1193 else /* CAS Latency 2.5 */
1194 mode_reg = 0x016a0000;
1196 for(cs=0;cs<8;cs++) {
1197 write32(BAR+DCALADDR, mode_reg);
1198 write32(BAR+DCALCSR, (0x83000003 | (cs<<20)));
1199 data32 = read32(BAR+DCALCSR);
1200 while(data32 & 0x80000000)
1201 data32 = read32(BAR+DCALCSR);
1204 /* Precharg all banks */
1208 for(cs=0;cs<8;cs++) {
1209 if ((drc & 3) == 2) /* DDR2 */
1210 write32(BAR+DCALADDR, 0x04000000);
1212 write32(BAR+DCALADDR, 0x00000000);
1213 write32(BAR+DCALCSR, (0x83000002 | (cs<<20)));
1214 data32 = read32(BAR+DCALCSR);
1215 while(data32 & 0x80000000)
1216 data32 = read32(BAR+DCALCSR);
1219 /* Do 2 refreshes */
1221 for(cs=0;cs<8;cs++) {
1222 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1223 data32 = read32(BAR+DCALCSR);
1224 while(data32 & 0x80000000)
1225 data32 = read32(BAR+DCALCSR);
1228 for(cs=0;cs<8;cs++) {
1229 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1230 data32 = read32(BAR+DCALCSR);
1231 while(data32 & 0x80000000)
1232 data32 = read32(BAR+DCALCSR);
1235 /* for good luck do 6 more */
1236 for(cs=0;cs<8;cs++) {
1237 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1240 for(cs=0;cs<8;cs++) {
1241 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1244 for(cs=0;cs<8;cs++) {
1245 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1248 for(cs=0;cs<8;cs++) {
1249 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1252 for(cs=0;cs<8;cs++) {
1253 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1256 for(cs=0;cs<8;cs++) {
1257 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1260 /* MRS reset dll's normal */
1262 for(cs=0;cs<8;cs++) {
1263 write32(BAR+DCALADDR, (mode_reg & ~(1<<24)));
1264 write32(BAR+DCALCSR, (0x83000003 | (cs<<20)));
1265 data32 = read32(BAR+DCALCSR);
1266 while(data32 & 0x80000000)
1267 data32 = read32(BAR+DCALCSR);
1270 /* Do only if DDR2 EMRS dll's enabled */
1271 if ((drc & 3) == 2) { /* DDR2 */
1273 for(cs=0;cs<8;cs++) {
1274 write32(BAR+DCALADDR, (0x0b940001));
1275 write32(BAR+DCALCSR, (0x83000003 | (cs<<20)));
1276 data32 = read32(BAR+DCALCSR);
1277 while(data32 & 0x80000000)
1278 data32 = read32(BAR+DCALCSR);
1284 write32(BAR+DCALCSR, 0x0000000f);
1286 /* DDR1 This is test code to copy some codes in the factory setup */
1288 write32(BAR, 0x00100000);
1290 if ((drc & 3) == 2) { /* DDR2 */
1291 /* enable on dimm termination */
1292 set_on_dimm_termination_enable(ctrl);
1295 pci_write_config32(PCI_DEV(0, 0x00, 0), 0x88, 0xa0000000 );
1298 /* receive enable calibration */
1299 set_receive_enable(ctrl);
1302 pci_write_config32(PCI_DEV(0, 0x00, 0), 0x94, 0x3904a100 );
1303 for(i = 0, cnt = (BAR+0x200); i < 24; i++, cnt+=4) {
1304 write32(cnt, dqs_data[i]);
1306 pci_write_config32(PCI_DEV(0, 0x00, 0), 0x94, 0x3904a100 );
1308 /* Enable refresh */
1310 data32 = drc & ~(3 << 20); /* clear ECC mode */
1311 pci_write_config32(PCI_DEV(0, 0x00, 0), DRC, data32);
1312 write32(BAR+DCALCSR, 0x0008000f);
1314 /* clear memory and init ECC */
1315 print_debug("Clearing memory\n");
1316 for(i=0;i<64;i+=4) {
1317 write32(BAR+DCALDATA+i, 0x00000000);
1320 for(cs=0;cs<8;cs++) {
1321 write32(BAR+DCALCSR, (0x830831d8 | (cs<<20)));
1322 data32 = read32(BAR+DCALCSR);
1323 while(data32 & 0x80000000)
1324 data32 = read32(BAR+DCALCSR);
1327 /* Bring memory subsystem on line */
1328 data32 = pci_read_config32(PCI_DEV(0, 0x00, 0), 0x98);
1329 data32 |= (1 << 31);
1330 pci_write_config32(PCI_DEV(0, 0x00, 0), 0x98, data32);
1331 /* wait for completion */
1332 print_debug("Waiting for mem complete\n");
1334 data32 = pci_read_config32(PCI_DEV(0, 0x00, 0), 0x98);
1335 if( (data32 & (1<<31)) == 0)
1338 print_debug("Done\n");
1340 /* Set initialization complete */
1343 data32 = drc & ~(3 << 20); /* clear ECC mode */
1344 pci_write_config32(PCI_DEV(0, 0x00, 0), DRC, data32);
1346 /* Set the ecc mode */
1347 pci_write_config32(PCI_DEV(0, 0x00, 0), DRC, drc);
1349 /* Enable memory scrubbing */
1351 data16 = pci_read_config16(PCI_DEV(0, 0x00, 0), MCHSCRB);
1353 data16 |= ((2 << 2) | (2 << 0));
1354 pci_write_config16(PCI_DEV(0, 0x00, 0), MCHSCRB, data16);
1356 /* The memory is now setup, use it */
1357 cache_lbmem(MTRR_TYPE_WRBACK);