2 * This file is part of the coreboot project.
4 * Copyright (C) 2005 Eric W. Biederman and Tom Zimmerman
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License
16 * along with this program; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
21 #include <cpu/x86/mtrr.h>
22 #include <cpu/x86/cache.h>
26 #include <pc80/mc146818rtc.h>
27 #if CONFIG_HAVE_OPTION_TABLE
28 #include "option_table.h"
31 #define BAR 0x40000000
33 static void sdram_set_registers(const struct mem_controller *ctrl)
35 static const unsigned int register_values[] = {
37 /* CKDIS 0x8c disable clocks */
38 PCI_ADDR(0, 0x00, 0, CKDIS), 0xffff0000, 0x0000ffff,
40 /* 0x9c Device present and extended RAM control
41 * DEVPRES is very touchy, hard code the initialization
42 * of PCI-E ports here.
44 PCI_ADDR(0, 0x00, 0, DEVPRES), 0x00000000, 0x07020801 | DEVPRES_CONFIG,
46 /* 0xc8 Remap RAM base and limit off */
47 PCI_ADDR(0, 0x00, 0, REMAPLIMIT), 0x00000000, 0x03df0000,
50 PCI_ADDR(0, 0x00, 0, 0xd8), 0x00000000, 0xb5930000,
51 PCI_ADDR(0, 0x00, 0, 0xe8), 0x00000000, 0x00004a2a,
54 PCI_ADDR(0, 0x00, 0, MCHCFG0), 0xfce0ffff, 0x00006000, /* 6000 */
57 PCI_ADDR(0, 0x00, 0, PAM-1), 0xcccccc7f, 0x33333000,
58 PCI_ADDR(0, 0x00, 0, PAM+3), 0xcccccccc, 0x33333333,
61 PCI_ADDR(0, 0x00, 0, DEVPRES1), 0xffbffff, (1<<22)|(6<<2) | DEVPRES1_CONFIG,
64 PCI_ADDR(0, 0x00, 0, IURBASE), 0x00000fff, BAR |0,
69 max = ARRAY_SIZE(register_values);
70 for(i = 0; i < max; i += 3) {
74 dev = (register_values[i] & ~0xff) - PCI_DEV(0, 0x00, 0) + PCI_DEV(0, 0x00, 0);
75 where = register_values[i] & 0xff;
76 reg = pci_read_config32(dev, where);
77 reg &= register_values[i+1];
78 reg |= register_values[i+2];
79 pci_write_config32(dev, where, reg);
81 print_spew("done.\n");
91 static struct dimm_size spd_get_dimm_size(unsigned device)
93 /* Calculate the log base 2 size of a DIMM in bits */
101 value = spd_read_byte(device, 2); /* type */
102 if (value < 0) goto hw_err;
103 if (value == 8) ddr2 = 1;
105 /* Note it might be easier to use byte 31 here, it has the DIMM size as
106 * a multiple of 4MB. The way we do it now we can size both
107 * sides of an assymetric dimm.
109 value = spd_read_byte(device, 3); /* rows */
110 if (value < 0) goto hw_err;
111 if ((value & 0xf) == 0) goto val_err;
112 sz.side1 += value & 0xf;
114 value = spd_read_byte(device, 4); /* columns */
115 if (value < 0) goto hw_err;
116 if ((value & 0xf) == 0) goto val_err;
117 sz.side1 += value & 0xf;
119 value = spd_read_byte(device, 17); /* banks */
120 if (value < 0) goto hw_err;
121 if ((value & 0xff) == 0) goto val_err;
122 sz.side1 += log2(value & 0xff);
124 /* Get the module data width and convert it to a power of two */
125 value = spd_read_byte(device, 7); /* (high byte) */
126 if (value < 0) goto hw_err;
130 low = spd_read_byte(device, 6); /* (low byte) */
131 if (low < 0) goto hw_err;
132 value = value | (low & 0xff);
133 if ((value != 72) && (value != 64)) goto val_err;
134 sz.side1 += log2(value);
137 value = spd_read_byte(device, 5); /* number of physical banks */
139 if (value < 0) goto hw_err;
142 if (value == 1) goto out;
143 if (value != 2) goto val_err;
145 /* Start with the symmetrical case */
148 value = spd_read_byte(device, 3); /* rows */
149 if (value < 0) goto hw_err;
150 if ((value & 0xf0) == 0) goto out; /* If symmetrical we are done */
151 sz.side2 -= (value & 0x0f); /* Subtract out rows on side 1 */
152 sz.side2 += ((value >> 4) & 0x0f); /* Add in rows on side 2 */
154 value = spd_read_byte(device, 4); /* columns */
155 if (value < 0) goto hw_err;
156 if ((value & 0xff) == 0) goto val_err;
157 sz.side2 -= (value & 0x0f); /* Subtract out columns on side 1 */
158 sz.side2 += ((value >> 4) & 0x0f); /* Add in columsn on side 2 */
162 die("Bad SPD value\n");
163 /* If an hw_error occurs report that I have no memory */
172 static long spd_set_ram_size(const struct mem_controller *ctrl, long dimm_mask)
177 for(i = cum = 0; i < DIMM_SOCKETS; i++) {
179 if (dimm_mask & (1 << i)) {
180 sz = spd_get_dimm_size(ctrl->channel0[i]);
182 return -1; /* Report SPD error */
184 /* convert bits to multiples of 64MB */
186 cum += (1 << sz.side1);
188 pci_write_config8(PCI_DEV(0, 0x00, 0), DRB + (i*2), cum);
191 cum += (1 << sz.side2);
193 pci_write_config8(PCI_DEV(0, 0x00, 0), DRB+1 + (i*2), cum);
196 pci_write_config8(PCI_DEV(0, 0x00, 0), DRB + (i*2), cum);
197 pci_write_config8(PCI_DEV(0, 0x00, 0), DRB+1 + (i*2), cum);
200 /* set TOM top of memory 0xcc */
201 pci_write_config16(PCI_DEV(0, 0x00, 0), TOM, cum);
202 /* set TOLM top of low memory */
208 pci_write_config16(PCI_DEV(0, 0x00, 0), TOLM, cum);
213 static unsigned int spd_detect_dimms(const struct mem_controller *ctrl)
218 for(i = 0; i < DIMM_SOCKETS; i++) {
221 device = ctrl->channel0[i];
223 byte = spd_read_byte(device, 2); /* Type */
224 if ((byte == 7) || (byte == 8)) {
225 dimm_mask |= (1 << i);
228 device = ctrl->channel1[i];
230 byte = spd_read_byte(device, 2);
231 if ((byte == 7) || (byte == 8)) {
232 dimm_mask |= (1 << (i + DIMM_SOCKETS));
240 static int spd_set_row_attributes(const struct mem_controller *ctrl,
250 for(cnt=0; cnt < 4; cnt++) {
251 if (!(dimm_mask & (1 << cnt))) {
255 value = spd_read_byte(ctrl->channel0[cnt], 3); /* rows */
256 if (value < 0) goto hw_err;
257 if ((value & 0xf) == 0) goto val_err;
260 value = spd_read_byte(ctrl->channel0[cnt], 4); /* columns */
261 if (value < 0) goto hw_err;
262 if ((value & 0xf) == 0) goto val_err;
265 value = spd_read_byte(ctrl->channel0[cnt], 17); /* banks */
266 if (value < 0) goto hw_err;
267 if ((value & 0xff) == 0) goto val_err;
268 reg += log2(value & 0xff);
270 /* Get the device width and convert it to a power of two */
271 value = spd_read_byte(ctrl->channel0[cnt], 13);
272 if (value < 0) goto hw_err;
273 value = log2(value & 0xff);
275 if(reg < 27) goto hw_err;
279 dra += reg << (cnt*8);
280 value = spd_read_byte(ctrl->channel0[cnt], 5);
282 dra += reg << ((cnt*8)+4);
286 pci_write_config32(PCI_DEV(0, 0x00, 0), DRA, dra);
290 die("Bad SPD value\n");
291 /* If an hw_error occurs report that I have no memory */
300 static int spd_set_drt_attributes(const struct mem_controller *ctrl,
301 long dimm_mask, uint32_t drc)
312 static const unsigned char cycle_time[3] = {0x75,0x60,0x50};
313 static const int latency_indicies[] = { 26, 23, 9 };
316 drt = pci_read_config32(PCI_DEV(0, 0x00, 0), DRT);
317 drt &= 3; /* save bits 1:0 */
319 for(first_dimm = 0; first_dimm < 4; first_dimm++) {
320 if (dimm_mask & (1 << first_dimm))
325 value = spd_read_byte(ctrl->channel0[first_dimm], 2);
327 drt |= (3<<5); /* back to bark write turn around & cycle add */
330 drt |= (3<<18); /* Trasmax */
332 for(cnt=0; cnt < 4; cnt++) {
333 if (!(dimm_mask & (1 << cnt))) {
336 reg = spd_read_byte(ctrl->channel0[cnt], 18); /* CAS Latency */
337 /* Compute the lowest cas latency supported */
338 latency = log2(reg) -2;
340 /* Loop through and find a fast clock with a low latency */
341 for(index = 0; index < 3; index++, latency++) {
342 if ((latency < 2) || (latency > 4) ||
343 (!(reg & (1 << latency)))) {
346 value = spd_read_byte(ctrl->channel0[cnt],
347 latency_indicies[index]);
349 if(value <= cycle_time[drc&3]) {
350 if( latency > cas_latency) {
351 cas_latency = latency;
357 index = (cas_latency-2);
358 if((index)==0) cas_latency = 20;
359 else if((index)==1) cas_latency = 25;
360 else cas_latency = 30;
362 for(cnt=0;cnt<4;cnt++) {
363 if (!(dimm_mask & (1 << cnt))) {
366 reg = spd_read_byte(ctrl->channel0[cnt], 27)&0x0ff;
367 if(((index>>8)&0x0ff)<reg) {
368 index &= ~(0x0ff << 8);
371 reg = spd_read_byte(ctrl->channel0[cnt], 28)&0x0ff;
372 if(((index>>16)&0x0ff)<reg) {
373 index &= ~(0x0ff << 16);
376 reg = spd_read_byte(ctrl->channel0[cnt], 29)&0x0ff;
377 if(((index2>>0)&0x0ff)<reg) {
378 index2 &= ~(0x0ff << 0);
381 reg = spd_read_byte(ctrl->channel0[cnt], 41)&0x0ff;
382 if(((index2>>8)&0x0ff)<reg) {
383 index2 &= ~(0x0ff << 8);
386 reg = spd_read_byte(ctrl->channel0[cnt], 42)&0x0ff;
387 if(((index2>>16)&0x0ff)<reg) {
388 index2 &= ~(0x0ff << 16);
394 value = cycle_time[drc&3];
395 if(value <= 0x50) { /* 200 MHz */
397 drt |= (2<<2); /* CAS latency 4 */
400 drt |= (1<<2); /* CAS latency 3 */
403 if((index&0x0ff00)<=0x03c00) {
404 drt |= (1<<8); /* Trp RAS Precharg */
406 drt |= (2<<8); /* Trp RAS Precharg */
409 /* Trcd RAS to CAS delay */
410 if((index2&0x0ff)<=0x03c) {
416 /* Tdal Write auto precharge recovery delay */
420 if((index2&0x0ff00)<=0x03700)
422 else if((index2&0xff00)<=0x03c00)
425 drt |= (2<<14); /* spd 41 */
427 drt |= (2<<16); /* Twr not defined for DDR docs say use 2 */
430 if((index&0x0ff0000)<=0x0140000) {
432 } else if((index&0x0ff0000)<=0x0280000) {
434 } else if((index&0x0ff0000)<=0x03c0000) {
440 /* Trfc Auto refresh cycle time */
441 if((index2&0x0ff0000)<=0x04b0000) {
443 } else if((index2&0x0ff0000)<=0x0690000) {
448 /* Docs say use 55 for all 200Mhz */
451 else if(value <= 0x60) { /* 167 Mhz */
452 /* according to new documentation CAS latency is 00
453 * for bits 3:2 for all 167 Mhz
454 drt |= ((index&3)<<2); */ /* set CAS latency */
455 if((index&0x0ff00)<=0x03000) {
456 drt |= (1<<8); /* Trp RAS Precharg */
458 drt |= (2<<8); /* Trp RAS Precharg */
461 /* Trcd RAS to CAS delay */
462 if((index2&0x0ff)<=0x030) {
468 /* Tdal Write auto precharge recovery delay */
472 drt |= (2<<14); /* spd 41, but only one choice */
474 drt |= (2<<16); /* Twr not defined for DDR docs say 2 */
477 if((index&0x0ff0000)<=0x0180000) {
479 } else if((index&0x0ff0000)<=0x0300000) {
485 /* Trfc Auto refresh cycle time */
486 if((index2&0x0ff0000)<=0x0480000) {
488 } else if((index2&0x0ff0000)<=0x0780000) {
493 /* Docs state to use 99 for all 167 Mhz */
496 else if(value <= 0x75) { /* 133 Mhz */
497 drt |= ((index&3)<<2); /* set CAS latency */
498 if((index&0x0ff00)<=0x03c00) {
499 drt |= (1<<8); /* Trp RAS Precharg */
501 drt |= (2<<8); /* Trp RAS Precharg */
504 /* Trcd RAS to CAS delay */
505 if((index2&0x0ff)<=0x03c) {
511 /* Tdal Write auto precharge recovery delay */
515 drt |= (2<<14); /* spd 41, but only one choice */
517 drt |= (1<<16); /* Twr not defined for DDR docs say 1 */
520 if((index&0x0ff0000)<=0x01e0000) {
522 } else if((index&0x0ff0000)<=0x03c0000) {
528 /* Trfc Auto refresh cycle time */
529 if((index2&0x0ff0000)<=0x04b0000) {
531 } else if((index2&0x0ff0000)<=0x0780000) {
537 /* Based on CAS latency */
545 die("Invalid SPD 9 bus speed.\n");
549 pci_write_config32(PCI_DEV(0, 0x00, 0), DRT, drt);
554 static int spd_set_dram_controller_mode(const struct mem_controller *ctrl,
562 unsigned char dram_type = 0xff;
563 unsigned char ecc = 0xff;
564 unsigned char rate = 62;
565 static const unsigned char spd_rates[6] = {15,3,7,7,62,62};
566 static const unsigned char drc_rates[5] = {0,15,7,62,3};
567 static const unsigned char fsb_conversion[4] = {3,1,3,2};
570 drc = pci_read_config32(PCI_DEV(0, 0x00, 0), DRC);
571 for(cnt=0; cnt < 4; cnt++) {
572 if (!(dimm_mask & (1 << cnt))) {
575 value = spd_read_byte(ctrl->channel0[cnt], 11); /* ECC */
576 reg = spd_read_byte(ctrl->channel0[cnt], 2); /* Type */
577 if (value == 2) { /* RAM is ECC capable */
583 die("ERROR - Mixed DDR & DDR2 RAM\n");
586 else if ( reg == 7 ) {
590 else if ( ecc > 1 ) {
591 die("ERROR - Mixed DDR & DDR2 RAM\n");
595 die("ERROR - RAM not DDR\n");
599 die("ERROR - Non ECC memory dimm\n");
602 value = spd_read_byte(ctrl->channel0[cnt], 12); /*refresh rate*/
603 value &= 0x0f; /* clip self refresh bit */
604 if (value > 5) goto hw_err;
605 if (rate > spd_rates[value])
606 rate = spd_rates[value];
608 value = spd_read_byte(ctrl->channel0[cnt], 9); /* cycle time */
609 if (value > 0x75) goto hw_err;
611 if (dram_type >= 2) {
612 if (reg == 8) { /*speed is good, is this ddr2?*/
614 } else { /* not ddr2 so use ddr333 */
619 else if (value <= 0x60) {
620 if (dram_type >= 1) dram_type = 1;
622 else dram_type = 0; /* ddr266 */
626 #if CONFIG_HAVE_OPTION_TABLE
627 if (read_option(CMOS_VSTART_ECC_memory, CMOS_VLEN_ECC_memory, 1) == 0) {
628 ecc = 0; /* ECC off in CMOS so disable it */
629 print_debug("ECC off\n");
633 print_debug("ECC on\n");
635 drc &= ~(3 << 20); /* clear the ecc bits */
636 drc |= (ecc << 20); /* or in the calculated ecc bits */
637 for ( cnt = 1; cnt < 5; cnt++)
638 if (drc_rates[cnt] == rate)
641 drc &= ~(7 << 8); /* clear the rate bits */
645 if (reg == 8) { /* independant clocks */
649 drc |= (1 << 26); /* set the overlap bit - the factory BIOS does */
650 drc |= (1 << 27); /* set DED retry enable - the factory BIOS does */
653 value = msr.lo >> 16;
655 drc &= ~(3 << 2); /* set the front side bus */
656 drc |= (fsb_conversion[value] << 2);
657 drc &= ~(3 << 0); /* set the dram type */
658 drc |= (dram_type << 0);
663 die("Bad SPD value\n");
664 /* If an hw_error occurs report that I have no memory */
671 static void sdram_set_spd_registers(const struct mem_controller *ctrl)
675 /* Test if we can read the spd and if ram is ddr or ddr2 */
676 dimm_mask = spd_detect_dimms(ctrl);
677 if (!(dimm_mask & ((1 << DIMM_SOCKETS) - 1))) {
678 print_err("No memory for this cpu\n");
684 static void do_delay(void)
692 static void pll_setup(uint32_t drc)
695 if(drc&3) { /* DDR 333 or DDR 400 */
696 if((drc&0x0c) == 0x0c) { /* FSB 200 */
699 else if((drc&0x0c) == 0x08) { /* FSB 167 */
702 else if(drc&1){ /* FSB 133 DDR 333 */
705 else { /* FSB 133 DDR 400 */
710 if((drc&0x08) == 0x08) { /* FSB 200 or 167 */
717 mainboard_set_e7520_pll(pins);
721 #define TIMEOUT_LOOPS 300000
723 #define DCALCSR 0x100
724 #define DCALADDR 0x104
725 #define DCALDATA 0x108
727 static void set_on_dimm_termination_enable(const struct mem_controller *ctrl)
734 /* Set up northbridge values */
736 pci_write_config32(PCI_DEV(0, 0x00, 0), 0x88, 0xf0000180);
737 /* Figure out which slots are Empty, Single, or Double sided */
738 for(i=0,t4=0,c2=0;i<8;i+=2) {
739 c1 = pci_read_config8(PCI_DEV(0, 0x00, 0), DRB+i);
740 if(c1 == c2) continue;
741 c2 = pci_read_config8(PCI_DEV(0, 0x00, 0), DRB+1+i);
749 if( ((t4>>8)&0x0f) == 0 ) {
750 data32 = 0x00000010; /* EEES */
753 if ( ((t4>>16)&0x0f) == 0 ) {
754 data32 = 0x00003132; /* EESS */
757 if ( ((t4>>24)&0x0f) == 0 ) {
758 data32 = 0x00335566; /* ESSS */
761 data32 = 0x77bbddee; /* SSSS */
765 if( ((t4>>8)&0x0f) == 0 ) {
766 data32 = 0x00003132; /* EEED */
769 if ( ((t4>>8)&0x0f) == 2 ) {
770 data32 = 0xb373ecdc; /* EEDD */
773 if ( ((t4>>16)&0x0f) == 0 ) {
774 data32 = 0x00b3a898; /* EESD */
777 data32 = 0x777becdc; /* ESSD */
780 die("Error - First dimm slot empty\n");
783 print_debug("ODT Value = ");
784 print_debug_hex32(data32);
787 pci_write_config32(PCI_DEV(0, 0x00, 0), 0xb0, data32);
789 for(dimm=0;dimm<8;dimm+=1) {
791 write32(BAR+DCALADDR, 0x0b840001);
792 write32(BAR+DCALCSR, 0x83000003 | (dimm << 20));
794 for(i=0;i<1001;i++) {
795 data32 = read32(BAR+DCALCSR);
796 if(!(data32 & (1<<31)))
801 static void set_receive_enable(const struct mem_controller *ctrl)
812 uint32_t data32_dram;
813 uint32_t dcal_data32_0;
814 uint32_t dcal_data32_1;
815 uint32_t dcal_data32_2;
816 uint32_t dcal_data32_3;
821 for(dimm=0;dimm<8;dimm+=1) {
824 write32(BAR+DCALDATA+(17*4), 0x04020000);
825 write32(BAR+DCALCSR, 0x83800004 | (dimm << 20));
827 for(i=0;i<1001;i++) {
828 data32 = read32(BAR+DCALCSR);
829 if(!(data32 & (1<<31)))
835 dcal_data32_0 = read32(BAR+DCALDATA + 0);
836 dcal_data32_1 = read32(BAR+DCALDATA + 4);
837 dcal_data32_2 = read32(BAR+DCALDATA + 8);
838 dcal_data32_3 = read32(BAR+DCALDATA + 12);
841 dcal_data32_0 = read32(BAR+DCALDATA + 16);
842 dcal_data32_1 = read32(BAR+DCALDATA + 20);
843 dcal_data32_2 = read32(BAR+DCALDATA + 24);
844 dcal_data32_3 = read32(BAR+DCALDATA + 28);
847 /* check if bank is installed */
848 if((dcal_data32_0 == 0) && (dcal_data32_2 == 0))
850 /* Calculate the timing value */
853 for(i=0,edge=0,bit=63,cnt=31,data32r=0,
854 work32l=dcal_data32_1,work32h=dcal_data32_3;
857 if(work32l & (1<<cnt))
860 work32l = dcal_data32_0;
861 work32h = dcal_data32_2;
867 if(!(work32l & (1<<cnt)))
870 work32l = dcal_data32_0;
871 work32h = dcal_data32_2;
879 data32 = ((bit%8) << 1);
880 if(work32h & (1<<cnt))
905 work32l = dcal_data32_0;
906 work32h = dcal_data32_2;
912 if(!(work32l & (1<<cnt)))
915 if(work32l & (1<<cnt))
918 data32 = (((cnt-1)%8)<<1);
919 if(work32h & (1<<(cnt-1))) {
922 /* test for frame edge cross overs */
923 if((edge == 1) && (data32 > 12) &&
924 (((recen+16)-data32) < 3)) {
928 if((edge == 2) && (data32 < 4) &&
929 ((recen - data32) > 12)) {
933 if(((recen+3) >= data32) && ((recen-3) <= data32))
943 recen+=2; /* this is not in the spec, but matches
944 the factory output, and has less failure */
945 recen <<= (dimm/2) * 8;
954 /* Check for Eratta problem */
955 for(i=cnt=0;i<32;i+=8) {
956 if (((recena>>i)&0x0f)>7) {
960 if((recena>>i)&0x0f) {
966 cnt = (cnt&0x0f) - (cnt>>16);
969 if(((recena>>i)&0x0f)>7) {
970 recena &= ~(0x0f<<i);
977 if(((recena>>i)&0x0f)<8) {
978 recena &= ~(0x0f<<i);
984 for(i=cnt=0;i<32;i+=8) {
985 if (((recenb>>i)&0x0f)>7) {
989 if((recenb>>i)&0x0f) {
995 cnt = (cnt&0x0f) - (cnt>>16);
998 if(((recenb>>i)&0x0f)>7) {
999 recenb &= ~(0x0f<<i);
1005 for(i=0;i<32;i+=8) {
1006 if(((recenb>>8)&0x0f)<8) {
1007 recenb &= ~(0x0f<<i);
1014 print_debug("Receive enable A = ");
1015 print_debug_hex32(recena);
1016 print_debug(", Receive enable B = ");
1017 print_debug_hex32(recenb);
1020 /* clear out the calibration area */
1021 write32(BAR+DCALDATA+(16*4), 0x00000000);
1022 write32(BAR+DCALDATA+(17*4), 0x00000000);
1023 write32(BAR+DCALDATA+(18*4), 0x00000000);
1024 write32(BAR+DCALDATA+(19*4), 0x00000000);
1027 write32(BAR+DCALCSR, 0x0000000f);
1029 write32(BAR+0x150, recena);
1030 write32(BAR+0x154, recenb);
1034 static void sdram_enable(int controllers, const struct mem_controller *ctrl)
1045 volatile unsigned long *iptrv;
1050 static const struct {
1053 /* FSB 133 DIMM 266 */
1054 {{ 0x00000001, 0x00000000, 0x00000001, 0x00000000}},
1055 /* FSB 133 DIMM 333 */
1056 {{ 0x00000000, 0x00000000, 0x00000000, 0x00000000}},
1057 /* FSB 133 DIMM 400 */
1058 {{ 0x00000120, 0x00000000, 0x00000032, 0x00000010}},
1059 /* FSB 167 DIMM 266 */
1060 {{ 0x00005432, 0x00001000, 0x00004325, 0x00000000}},
1061 /* FSB 167 DIMM 333 */
1062 {{ 0x00000001, 0x00000000, 0x00000001, 0x00000000}},
1063 /* FSB 167 DIMM 400 */
1064 {{ 0x00154320, 0x00000000, 0x00065432, 0x00010000}},
1065 /* FSB 200 DIMM 266 */
1066 {{ 0x00000032, 0x00000010, 0x00000120, 0x00000000}},
1067 /* FSB 200 DIMM 333 */
1068 {{ 0x00065432, 0x00010000, 0x00154320, 0x00000000}},
1069 /* FSB 200 DIMM 400 */
1070 {{ 0x00000001, 0x00000000, 0x00000001, 0x00000000}},
1073 static const uint32_t dqs_data[] = {
1074 0xffffffff, 0xffffffff, 0x000000ff,
1075 0xffffffff, 0xffffffff, 0x000000ff,
1076 0xffffffff, 0xffffffff, 0x000000ff,
1077 0xffffffff, 0xffffffff, 0x000000ff,
1078 0xffffffff, 0xffffffff, 0x000000ff,
1079 0xffffffff, 0xffffffff, 0x000000ff,
1080 0xffffffff, 0xffffffff, 0x000000ff,
1081 0xffffffff, 0xffffffff, 0x000000ff};
1083 mask = spd_detect_dimms(ctrl);
1084 print_debug("Starting SDRAM Enable\n");
1087 #ifdef DIMM_MAP_LOGICAL
1088 pci_write_config32(PCI_DEV(0, 0x00, 0), DRM,
1089 0x00210000 | DIMM_MAP_LOGICAL);
1091 pci_write_config32(PCI_DEV(0, 0x00, 0), DRM, 0x00211248);
1093 /* set dram type and Front Side Bus freq. */
1094 drc = spd_set_dram_controller_mode(ctrl, mask);
1096 die("Error calculating DRC\n");
1099 data32 = drc & ~(3 << 20); /* clear ECC mode */
1100 data32 = data32 & ~(7 << 8); /* clear refresh rates */
1101 data32 = data32 | (1 << 5); /* temp turn off of ODT */
1102 /* Set gearing, then dram controller mode */
1103 /* drc bits 1:0 = DIMM speed, bits 3:2 = FSB speed */
1104 for(iptr = gearing[(drc&3)+((((drc>>2)&3)-1)*3)].clkgr,cnt=0;
1106 pci_write_config32(PCI_DEV(0, 0x00, 0), 0xa0+(cnt*4), iptr[cnt]);
1109 pci_write_config32(PCI_DEV(0, 0x00, 0), DRC, data32);
1111 /* turn the clocks on */
1113 pci_write_config16(PCI_DEV(0, 0x00, 0), CKDIS, 0x0000);
1115 /* 0x9a DDRCSR Take subsystem out of idle */
1116 data16 = pci_read_config16(PCI_DEV(0, 0x00, 0), DDRCSR);
1117 data16 &= ~(7 << 12);
1118 data16 |= (3 << 12); /* use dual channel lock step */
1119 pci_write_config16(PCI_DEV(0, 0x00, 0), DDRCSR, data16);
1121 /* program row size DRB */
1122 spd_set_ram_size(ctrl, mask);
1124 /* program page size DRA */
1125 spd_set_row_attributes(ctrl, mask);
1127 /* program DRT timing values */
1128 cas_latency = spd_set_drt_attributes(ctrl, mask, drc);
1130 for(i=0;i<8;i++) { /* loop throught each dimm to test for row */
1131 print_debug("DIMM ");
1132 print_debug_hex8(i);
1137 write32(BAR + 0x100, (0x03000000 | (i<<20)));
1139 write32(BAR+0x100, (0x83000000 | (i<<20)));
1141 data32 = read32(BAR+DCALCSR);
1142 while(data32 & 0x80000000)
1143 data32 = read32(BAR+DCALCSR);
1150 for(cs=0;cs<8;cs++) {
1151 write32(BAR + DCALCSR, (0x83000000 | (cs<<20)));
1152 data32 = read32(BAR+DCALCSR);
1153 while(data32 & 0x80000000)
1154 data32 = read32(BAR+DCALCSR);
1157 /* Precharg all banks */
1159 for(cs=0;cs<8;cs++) {
1160 if ((drc & 3) == 2) /* DDR2 */
1161 write32(BAR+DCALADDR, 0x04000000);
1163 write32(BAR+DCALADDR, 0x00000000);
1164 write32(BAR+DCALCSR, (0x83000002 | (cs<<20)));
1165 data32 = read32(BAR+DCALCSR);
1166 while(data32 & 0x80000000)
1167 data32 = read32(BAR+DCALCSR);
1170 /* EMRS dll's enabled */
1172 for(cs=0;cs<8;cs++) {
1173 if ((drc & 3) == 2) /* DDR2 */
1174 /* fixme hard code AL additive latency */
1175 write32(BAR+DCALADDR, 0x0b940001);
1177 write32(BAR+DCALADDR, 0x00000001);
1178 write32(BAR+DCALCSR, (0x83000003 | (cs<<20)));
1179 data32 = read32(BAR+DCALCSR);
1180 while(data32 & 0x80000000)
1181 data32 = read32(BAR+DCALCSR);
1183 /* MRS reset dll's */
1185 if ((drc & 3) == 2) { /* DDR2 */
1186 if(cas_latency == 30)
1187 mode_reg = 0x053a0000;
1189 mode_reg = 0x054a0000;
1192 if(cas_latency == 20)
1193 mode_reg = 0x012a0000;
1194 else /* CAS Latency 2.5 */
1195 mode_reg = 0x016a0000;
1197 for(cs=0;cs<8;cs++) {
1198 write32(BAR+DCALADDR, mode_reg);
1199 write32(BAR+DCALCSR, (0x83000003 | (cs<<20)));
1200 data32 = read32(BAR+DCALCSR);
1201 while(data32 & 0x80000000)
1202 data32 = read32(BAR+DCALCSR);
1205 /* Precharg all banks */
1209 for(cs=0;cs<8;cs++) {
1210 if ((drc & 3) == 2) /* DDR2 */
1211 write32(BAR+DCALADDR, 0x04000000);
1213 write32(BAR+DCALADDR, 0x00000000);
1214 write32(BAR+DCALCSR, (0x83000002 | (cs<<20)));
1215 data32 = read32(BAR+DCALCSR);
1216 while(data32 & 0x80000000)
1217 data32 = read32(BAR+DCALCSR);
1220 /* Do 2 refreshes */
1222 for(cs=0;cs<8;cs++) {
1223 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1224 data32 = read32(BAR+DCALCSR);
1225 while(data32 & 0x80000000)
1226 data32 = read32(BAR+DCALCSR);
1229 for(cs=0;cs<8;cs++) {
1230 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1231 data32 = read32(BAR+DCALCSR);
1232 while(data32 & 0x80000000)
1233 data32 = read32(BAR+DCALCSR);
1236 /* for good luck do 6 more */
1237 for(cs=0;cs<8;cs++) {
1238 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1241 for(cs=0;cs<8;cs++) {
1242 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1245 for(cs=0;cs<8;cs++) {
1246 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1249 for(cs=0;cs<8;cs++) {
1250 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1253 for(cs=0;cs<8;cs++) {
1254 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1257 for(cs=0;cs<8;cs++) {
1258 write32(BAR+DCALCSR, (0x83000001 | (cs<<20)));
1261 /* MRS reset dll's normal */
1263 for(cs=0;cs<8;cs++) {
1264 write32(BAR+DCALADDR, (mode_reg & ~(1<<24)));
1265 write32(BAR+DCALCSR, (0x83000003 | (cs<<20)));
1266 data32 = read32(BAR+DCALCSR);
1267 while(data32 & 0x80000000)
1268 data32 = read32(BAR+DCALCSR);
1271 /* Do only if DDR2 EMRS dll's enabled */
1272 if ((drc & 3) == 2) { /* DDR2 */
1274 for(cs=0;cs<8;cs++) {
1275 write32(BAR+DCALADDR, (0x0b940001));
1276 write32(BAR+DCALCSR, (0x83000003 | (cs<<20)));
1277 data32 = read32(BAR+DCALCSR);
1278 while(data32 & 0x80000000)
1279 data32 = read32(BAR+DCALCSR);
1285 write32(BAR+DCALCSR, 0x0000000f);
1287 /* DDR1 This is test code to copy some codes in the factory setup */
1289 write32(BAR, 0x00100000);
1291 if ((drc & 3) == 2) { /* DDR2 */
1292 /* enable on dimm termination */
1293 set_on_dimm_termination_enable(ctrl);
1296 pci_write_config32(PCI_DEV(0, 0x00, 0), 0x88, 0xa0000000 );
1299 /* receive enable calibration */
1300 set_receive_enable(ctrl);
1303 pci_write_config32(PCI_DEV(0, 0x00, 0), 0x94, 0x3904a100 );
1304 for(i = 0, cnt = (BAR+0x200); i < 24; i++, cnt+=4) {
1305 write32(cnt, dqs_data[i]);
1307 pci_write_config32(PCI_DEV(0, 0x00, 0), 0x94, 0x3904a100 );
1309 /* Enable refresh */
1311 data32 = drc & ~(3 << 20); /* clear ECC mode */
1312 pci_write_config32(PCI_DEV(0, 0x00, 0), DRC, data32);
1313 write32(BAR+DCALCSR, 0x0008000f);
1315 /* clear memory and init ECC */
1316 print_debug("Clearing memory\n");
1317 for(i=0;i<64;i+=4) {
1318 write32(BAR+DCALDATA+i, 0x00000000);
1321 for(cs=0;cs<8;cs++) {
1322 write32(BAR+DCALCSR, (0x830831d8 | (cs<<20)));
1323 data32 = read32(BAR+DCALCSR);
1324 while(data32 & 0x80000000)
1325 data32 = read32(BAR+DCALCSR);
1328 /* Bring memory subsystem on line */
1329 data32 = pci_read_config32(PCI_DEV(0, 0x00, 0), 0x98);
1330 data32 |= (1 << 31);
1331 pci_write_config32(PCI_DEV(0, 0x00, 0), 0x98, data32);
1332 /* wait for completion */
1333 print_debug("Waiting for mem complete\n");
1335 data32 = pci_read_config32(PCI_DEV(0, 0x00, 0), 0x98);
1336 if( (data32 & (1<<31)) == 0)
1339 print_debug("Done\n");
1341 /* Set initialization complete */
1344 data32 = drc & ~(3 << 20); /* clear ECC mode */
1345 pci_write_config32(PCI_DEV(0, 0x00, 0), DRC, data32);
1347 /* Set the ecc mode */
1348 pci_write_config32(PCI_DEV(0, 0x00, 0), DRC, drc);
1350 /* Enable memory scrubbing */
1352 data16 = pci_read_config16(PCI_DEV(0, 0x00, 0), MCHSCRB);
1354 data16 |= ((2 << 2) | (2 << 0));
1355 pci_write_config16(PCI_DEV(0, 0x00, 0), MCHSCRB, data16);
1357 /* The memory is now setup, use it */
1358 cache_lbmem(MTRR_TYPE_WRBACK);