1 // QEMU Cirrus CLGD 54xx VGABIOS Extension.
3 // Copyright (C) 2009 Kevin O'Connor <kevin@koconnor.net>
4 // Copyright (c) 2004 Makoto Suzuki (suzu)
6 // This file may be distributed under the terms of the GNU LGPLv3 license.
8 #include "clext.h" // clext_init
9 #include "vgabios.h" // VBE_VENDOR_STRING
10 #include "biosvar.h" // GET_GLOBAL
11 #include "util.h" // dprintf
12 #include "bregs.h" // struct bregs
13 #include "stdvga.h" // VGAREG_SEQU_ADDRESS
14 #include "pci.h" // pci_config_readl
15 #include "pci_regs.h" // PCI_BASE_ADDRESS_0
18 /****************************************************************
20 ****************************************************************/
23 static u16 cseq_vga[] VAR16 = {0x0007,0xffff};
24 static u16 cgraph_vga[] VAR16 = {0x0009,0x000a,0x000b,0xffff};
25 static u16 ccrtc_vga[] VAR16 = {0x001a,0x001b,0x001d,0xffff};
28 static u16 cgraph_svgacolor[] VAR16 = {
29 0x0000,0x0001,0x0002,0x0003,0x0004,0x4005,0x0506,0x0f07,0xff08,
34 static u16 cseq_640x480x8[] VAR16 = {
35 0x0300,0x2101,0x0f02,0x0003,0x0e04,0x1107,
36 0x580b,0x580c,0x580d,0x580e,
38 0x331b,0x331c,0x331d,0x331e,
41 static u16 ccrtc_640x480x8[] VAR16 = {
43 0x5f00,0x4f01,0x4f02,0x8003,0x5204,0x1e05,0x0b06,0x3e07,
45 0xea10,0xdf12,0x5013,0x4014,0xdf15,0x0b16,0xc317,0xff18,
50 static u16 cseq_640x480x16[] VAR16 = {
51 0x0300,0x2101,0x0f02,0x0003,0x0e04,0x1707,
52 0x580b,0x580c,0x580d,0x580e,
54 0x331b,0x331c,0x331d,0x331e,
57 static u16 ccrtc_640x480x16[] VAR16 = {
59 0x5f00,0x4f01,0x4f02,0x8003,0x5204,0x1e05,0x0b06,0x3e07,
61 0xea10,0xdf12,0xa013,0x4014,0xdf15,0x0b16,0xc317,0xff18,
66 static u16 cseq_640x480x24[] VAR16 = {
67 0x0300,0x2101,0x0f02,0x0003,0x0e04,0x1507,
68 0x580b,0x580c,0x580d,0x580e,
70 0x331b,0x331c,0x331d,0x331e,
73 static u16 ccrtc_640x480x24[] VAR16 = {
75 0x5f00,0x4f01,0x4f02,0x8003,0x5204,0x1e05,0x0b06,0x3e07,
77 0xea10,0xdf12,0xf013,0x4014,0xdf15,0x0b16,0xc317,0xff18,
82 static u16 cseq_800x600x8[] VAR16 = {
83 0x0300,0x2101,0x0f02,0x0003,0x0e04,0x1107,
84 0x230b,0x230c,0x230d,0x230e,
86 0x141b,0x141c,0x141d,0x141e,
89 static u16 ccrtc_800x600x8[] VAR16 = {
90 0x2311,0x7d00,0x6301,0x6302,0x8003,0x6b04,0x1a05,0x9806,0xf007,
92 0x7d10,0x5712,0x6413,0x4014,0x5715,0x9816,0xc317,0xff18,
97 static u16 cseq_800x600x16[] VAR16 = {
98 0x0300,0x2101,0x0f02,0x0003,0x0e04,0x1707,
99 0x230b,0x230c,0x230d,0x230e,
100 0x0412,0x0013,0x2017,
101 0x141b,0x141c,0x141d,0x141e,
104 static u16 ccrtc_800x600x16[] VAR16 = {
105 0x2311,0x7d00,0x6301,0x6302,0x8003,0x6b04,0x1a05,0x9806,0xf007,
106 0x6009,0x000c,0x000d,
107 0x7d10,0x5712,0xc813,0x4014,0x5715,0x9816,0xc317,0xff18,
108 0x001a,0x221b,0x001d,
112 static u16 cseq_800x600x24[] VAR16 = {
113 0x0300,0x2101,0x0f02,0x0003,0x0e04,0x1507,
114 0x230b,0x230c,0x230d,0x230e,
115 0x0412,0x0013,0x2017,
116 0x141b,0x141c,0x141d,0x141e,
119 static u16 ccrtc_800x600x24[] VAR16 = {
120 0x2311,0x7d00,0x6301,0x6302,0x8003,0x6b04,0x1a05,0x9806,0xf007,
121 0x6009,0x000c,0x000d,
122 0x7d10,0x5712,0x2c13,0x4014,0x5715,0x9816,0xc317,0xff18,
123 0x001a,0x321b,0x001d,
127 static u16 cseq_1024x768x8[] VAR16 = {
128 0x0300,0x2101,0x0f02,0x0003,0x0e04,0x1107,
129 0x760b,0x760c,0x760d,0x760e,
130 0x0412,0x0013,0x2017,
131 0x341b,0x341c,0x341d,0x341e,
134 static u16 ccrtc_1024x768x8[] VAR16 = {
135 0x2911,0xa300,0x7f01,0x7f02,0x8603,0x8304,0x9405,0x2406,0xf507,
136 0x6009,0x000c,0x000d,
137 0x0310,0xff12,0x8013,0x4014,0xff15,0x2416,0xc317,0xff18,
138 0x001a,0x221b,0x001d,
142 static u16 cseq_1024x768x16[] VAR16 = {
143 0x0300,0x2101,0x0f02,0x0003,0x0e04,0x1707,
144 0x760b,0x760c,0x760d,0x760e,
145 0x0412,0x0013,0x2017,
146 0x341b,0x341c,0x341d,0x341e,
149 static u16 ccrtc_1024x768x16[] VAR16 = {
150 0x2911,0xa300,0x7f01,0x7f02,0x8603,0x8304,0x9405,0x2406,0xf507,
151 0x6009,0x000c,0x000d,
152 0x0310,0xff12,0x0013,0x4014,0xff15,0x2416,0xc317,0xff18,
153 0x001a,0x321b,0x001d,
157 static u16 cseq_1024x768x24[] VAR16 = {
158 0x0300,0x2101,0x0f02,0x0003,0x0e04,0x1507,
159 0x760b,0x760c,0x760d,0x760e,
160 0x0412,0x0013,0x2017,
161 0x341b,0x341c,0x341d,0x341e,
164 static u16 ccrtc_1024x768x24[] VAR16 = {
165 0x2911,0xa300,0x7f01,0x7f02,0x8603,0x8304,0x9405,0x2406,0xf507,
166 0x6009,0x000c,0x000d,
167 0x0310,0xff12,0x8013,0x4014,0xff15,0x2416,0xc317,0xff18,
168 0x001a,0x321b,0x001d,
172 static u16 cseq_1280x1024x8[] VAR16 = {
173 0x0300,0x2101,0x0f02,0x0003,0x0e04,0x1107,
174 0x760b,0x760c,0x760d,0x760e,
175 0x0412,0x0013,0x2017,
176 0x341b,0x341c,0x341d,0x341e,
179 static u16 ccrtc_1280x1024x8[] VAR16 = {
180 0x2911,0xc300,0x9f01,0x9f02,0x8603,0x8304,0x9405,0x2406,0xf707,
181 0x6009,0x000c,0x000d,
182 0x0310,0xff12,0xa013,0x4014,0xff15,0x2416,0xc317,0xff18,
183 0x001a,0x221b,0x001d,
187 static u16 cseq_1280x1024x16[] VAR16 = {
188 0x0300,0x2101,0x0f02,0x0003,0x0e04,0x1707,
189 0x760b,0x760c,0x760d,0x760e,
190 0x0412,0x0013,0x2017,
191 0x341b,0x341c,0x341d,0x341e,
194 static u16 ccrtc_1280x1024x16[] VAR16 = {
195 0x2911,0xc300,0x9f01,0x9f02,0x8603,0x8304,0x9405,0x2406,0xf707,
196 0x6009,0x000c,0x000d,
197 0x0310,0xff12,0x4013,0x4014,0xff15,0x2416,0xc317,0xff18,
198 0x001a,0x321b,0x001d,
203 static u16 cseq_1600x1200x8[] VAR16 = {
204 0x0300,0x2101,0x0f02,0x0003,0x0e04,0x1107,
205 0x760b,0x760c,0x760d,0x760e,
206 0x0412,0x0013,0x2017,
207 0x341b,0x341c,0x341d,0x341e,
210 static u16 ccrtc_1600x1200x8[] VAR16 = {
211 0x2911,0xc300,0x9f01,0x9f02,0x8603,0x8304,0x9405,0x2406,0xf707,
212 0x6009,0x000c,0x000d,
213 0x0310,0xff12,0xc813,0x4014,0xff15,0x2416,0xc317,0xff18,
214 0x001a,0x221b,0x001d,
218 struct cirrus_mode_s {
220 struct vgamode_s info;
222 u16 hidden_dac; /* 0x3c6 */
223 u16 *seq; /* 0x3c4 */
224 u16 *graph; /* 0x3ce */
225 u16 *crtc; /* 0x3d4 */
228 static struct cirrus_mode_s cirrus_modes[] VAR16 = {
229 {0x5f,{MM_PACKED,640,480,8,8,16,SEG_GRAPH},0x00,
230 cseq_640x480x8,cgraph_svgacolor,ccrtc_640x480x8},
231 {0x64,{MM_DIRECT,640,480,16,8,16,SEG_GRAPH},0xe1,
232 cseq_640x480x16,cgraph_svgacolor,ccrtc_640x480x16},
233 {0x66,{MM_DIRECT,640,480,15,8,16,SEG_GRAPH},0xf0,
234 cseq_640x480x16,cgraph_svgacolor,ccrtc_640x480x16},
235 {0x71,{MM_DIRECT,640,480,24,8,16,SEG_GRAPH},0xe5,
236 cseq_640x480x24,cgraph_svgacolor,ccrtc_640x480x24},
238 {0x5c,{MM_PACKED,800,600,8,8,16,SEG_GRAPH},0x00,
239 cseq_800x600x8,cgraph_svgacolor,ccrtc_800x600x8},
240 {0x65,{MM_DIRECT,800,600,16,8,16,SEG_GRAPH},0xe1,
241 cseq_800x600x16,cgraph_svgacolor,ccrtc_800x600x16},
242 {0x67,{MM_DIRECT,800,600,15,8,16,SEG_GRAPH},0xf0,
243 cseq_800x600x16,cgraph_svgacolor,ccrtc_800x600x16},
245 {0x60,{MM_PACKED,1024,768,8,8,16,SEG_GRAPH},0x00,
246 cseq_1024x768x8,cgraph_svgacolor,ccrtc_1024x768x8},
247 {0x74,{MM_DIRECT,1024,768,16,8,16,SEG_GRAPH},0xe1,
248 cseq_1024x768x16,cgraph_svgacolor,ccrtc_1024x768x16},
249 {0x68,{MM_DIRECT,1024,768,15,8,16,SEG_GRAPH},0xf0,
250 cseq_1024x768x16,cgraph_svgacolor,ccrtc_1024x768x16},
252 {0x78,{MM_DIRECT,800,600,24,8,16,SEG_GRAPH},0xe5,
253 cseq_800x600x24,cgraph_svgacolor,ccrtc_800x600x24},
254 {0x79,{MM_DIRECT,1024,768,24,8,16,SEG_GRAPH},0xe5,
255 cseq_1024x768x24,cgraph_svgacolor,ccrtc_1024x768x24},
257 {0x6d,{MM_PACKED,1280,1024,8,8,16,SEG_GRAPH},0x00,
258 cseq_1280x1024x8,cgraph_svgacolor,ccrtc_1280x1024x8},
259 {0x69,{MM_DIRECT,1280,1024,15,8,16,SEG_GRAPH},0xf0,
260 cseq_1280x1024x16,cgraph_svgacolor,ccrtc_1280x1024x16},
261 {0x75,{MM_DIRECT,1280,1024,16,8,16,SEG_GRAPH},0xe1,
262 cseq_1280x1024x16,cgraph_svgacolor,ccrtc_1280x1024x16},
264 {0x7b,{MM_PACKED,1600,1200,8,8,16,SEG_GRAPH},0x00,
265 cseq_1600x1200x8,cgraph_svgacolor,ccrtc_1600x1200x8},
268 static struct cirrus_mode_s mode_switchback VAR16 =
269 {0xfe,{0xff},0,cseq_vga,cgraph_vga,ccrtc_vga};
273 } cirrus_vesa_modelist[] VAR16 = {
307 /****************************************************************
309 ****************************************************************/
312 is_cirrus_mode(struct vgamode_s *vmode_g)
314 return (vmode_g >= &cirrus_modes[0].info
315 && vmode_g <= &cirrus_modes[ARRAY_SIZE(cirrus_modes)-1].info);
319 cirrus_vesamode_to_mode(u16 vesamode)
322 for (i=0; i<ARRAY_SIZE(cirrus_vesa_modelist); i++)
323 if (GET_GLOBAL(cirrus_vesa_modelist[i].vesamode) == vesamode)
324 return GET_GLOBAL(cirrus_vesa_modelist[i].mode);
328 static struct cirrus_mode_s *
329 cirrus_get_modeentry(int mode)
331 int transmode = cirrus_vesamode_to_mode(mode);
334 struct cirrus_mode_s *table_g = cirrus_modes;
335 while (table_g < &cirrus_modes[ARRAY_SIZE(cirrus_modes)]) {
336 u16 tmode = GET_GLOBAL(table_g->mode);
345 clext_find_mode(int mode)
347 struct cirrus_mode_s *table_g = cirrus_get_modeentry(mode);
349 return &table_g->info;
350 return stdvga_find_mode(mode);
354 cirrus_switch_mode_setregs(u16 *data, u16 port)
357 u16 val = GET_GLOBAL(*data);
366 cirrus_switch_mode(struct cirrus_mode_s *table)
368 // Unlock cirrus special
369 stdvga_sequ_write(0x06, 0x12);
370 cirrus_switch_mode_setregs(GET_GLOBAL(table->seq), VGAREG_SEQU_ADDRESS);
371 cirrus_switch_mode_setregs(GET_GLOBAL(table->graph), VGAREG_GRDC_ADDRESS);
372 cirrus_switch_mode_setregs(GET_GLOBAL(table->crtc), stdvga_get_crtc());
374 stdvga_pelmask_write(0x00);
375 stdvga_pelmask_read();
376 stdvga_pelmask_read();
377 stdvga_pelmask_read();
378 stdvga_pelmask_read();
379 stdvga_pelmask_write(GET_GLOBAL(table->hidden_dac));
380 stdvga_pelmask_write(0xff);
382 u8 memmodel = GET_GLOBAL(table->info.memmodel);
384 if (memmodel == MM_PLANAR)
386 else if (memmodel != MM_TEXT)
388 stdvga_attr_mask(0x10, 0x01, on);
392 cirrus_get_memsize(void)
394 // get DRAM band width
395 u8 v = stdvga_sequ_read(0x0f);
396 u8 x = (v >> 3) & 0x03;
397 if (x == 0x03 && v & 0x80)
404 clext_get_window(struct vgamode_s *vmode_g, int window)
406 return stdvga_grdc_read(window + 9);
410 clext_set_window(struct vgamode_s *vmode_g, int window, int val)
414 stdvga_grdc_write(window + 9, val);
419 cirrus_enable_16k_granularity(void)
421 stdvga_grdc_mask(0x0b, 0x00, 0x20);
425 cirrus_clear_vram(u16 param)
427 cirrus_enable_16k_granularity();
428 u8 count = GET_GLOBAL(VBE_total_memory) / (16 * 1024);
430 for (i=0; i<count; i++) {
431 stdvga_grdc_write(0x09, i);
432 memset16_far(SEG_GRAPH, 0, param, 16 * 1024);
434 stdvga_grdc_write(0x09, 0x00);
438 clext_set_mode(struct vgamode_s *vmode_g, int flags)
440 if (!is_cirrus_mode(vmode_g)) {
441 cirrus_switch_mode(&mode_switchback);
442 dprintf(1, "cirrus mode switch regular\n");
443 return stdvga_set_mode(vmode_g, flags);
445 struct cirrus_mode_s *table_g = container_of(
446 vmode_g, struct cirrus_mode_s, info);
447 cirrus_switch_mode(table_g);
448 if (!(flags & MF_LINEARFB))
449 cirrus_enable_16k_granularity();
450 if (!(flags & MF_NOCLEARMEM))
451 cirrus_clear_vram(0);
458 stdvga_sequ_write(0x06, 0x92);
459 return stdvga_sequ_read(0x06) == 0x12;
463 /****************************************************************
465 ****************************************************************/
468 clext_101280(struct bregs *regs)
470 u8 v = stdvga_crtc_read(stdvga_get_crtc(), 0x27);
484 clext_101281(struct bregs *regs)
491 clext_101282(struct bregs *regs)
493 regs->al = stdvga_crtc_read(stdvga_get_crtc(), 0x27) & 0x03;
498 clext_101285(struct bregs *regs)
500 regs->al = GET_GLOBAL(VBE_total_memory) / (64*1024);
504 clext_10129a(struct bregs *regs)
510 extern void a0h_callback(void);
512 // fatal: not implemented yet
519 clext_1012a0(struct bregs *regs)
521 struct cirrus_mode_s *table_g = cirrus_get_modeentry(regs->al & 0x7f);
522 regs->ah = (table_g ? 1 : 0);
524 regs->di = regs->ds = regs->es = regs->bx = (u32)a0h_callback;
528 clext_1012a1(struct bregs *regs)
530 regs->bx = 0x0e00; // IBM 8512/8513, color
534 clext_1012a2(struct bregs *regs)
536 regs->al = 0x07; // HSync 31.5 - 64.0 kHz
540 clext_1012ae(struct bregs *regs)
542 regs->al = 0x01; // High Refresh 75Hz
546 clext_1012XX(struct bregs *regs)
552 clext_1012(struct bregs *regs)
555 case 0x80: clext_101280(regs); break;
556 case 0x81: clext_101281(regs); break;
557 case 0x82: clext_101282(regs); break;
558 case 0x85: clext_101285(regs); break;
559 case 0x9a: clext_10129a(regs); break;
560 case 0xa0: clext_1012a0(regs); break;
561 case 0xa1: clext_1012a1(regs); break;
562 case 0xa2: clext_1012a2(regs); break;
563 case 0xae: clext_1012ae(regs); break;
564 default: clext_1012XX(regs); break;
569 /****************************************************************
571 ****************************************************************/
574 clext_list_modes(u16 seg, u16 *dest, u16 *last)
577 for (i=0; i<ARRAY_SIZE(cirrus_vesa_modelist) && dest<last; i++) {
578 SET_FARVAR(seg, *dest, GET_GLOBAL(cirrus_vesa_modelist[i].vesamode));
581 stdvga_list_modes(seg, dest, last);
585 cirrus_get_bpp_bytes(void)
587 u8 v = stdvga_sequ_read(0x07) & 0x0e;
597 cirrus_set_line_offset(u16 new_line_offset)
599 new_line_offset /= 8;
600 u16 crtc_addr = stdvga_get_crtc();
601 stdvga_crtc_write(crtc_addr, 0x13, new_line_offset);
602 stdvga_crtc_mask(crtc_addr, 0x1b, 0x10, (new_line_offset & 0x100) >> 4);
606 cirrus_get_line_offset(void)
608 u16 crtc_addr = stdvga_get_crtc();
609 u8 reg13 = stdvga_crtc_read(crtc_addr, 0x13);
610 u8 reg1b = stdvga_crtc_read(crtc_addr, 0x1b);
611 return (((reg1b & 0x10) << 4) + reg13) * 8;
615 cirrus_set_start_addr(u32 addr)
617 u16 crtc_addr = stdvga_get_crtc();
618 stdvga_crtc_write(crtc_addr, 0x0d, addr);
619 stdvga_crtc_write(crtc_addr, 0x0c, addr >> 8);
620 stdvga_crtc_mask(crtc_addr, 0x1d, 0x80, (addr & 0x0800) >> 4);
621 stdvga_crtc_mask(crtc_addr, 0x1b, 0x0d
622 , ((addr & 0x0100) >> 8) | ((addr & 0x0600) >> 7));
626 cirrus_get_start_addr(void)
628 u16 crtc_addr = stdvga_get_crtc();
629 u8 b2 = stdvga_crtc_read(crtc_addr, 0x0c);
630 u8 b1 = stdvga_crtc_read(crtc_addr, 0x0d);
631 u8 b3 = stdvga_crtc_read(crtc_addr, 0x1b);
632 u8 b4 = stdvga_crtc_read(crtc_addr, 0x1d);
633 return (b1 | (b2<<8) | ((b3 & 0x01) << 16) | ((b3 & 0x0c) << 15)
634 | ((b4 & 0x80) << 12));
638 cirrus_vesa_06h(struct bregs *regs)
645 if (regs->bl == 0x00) {
646 cirrus_set_line_offset(cirrus_get_bpp_bytes() * regs->cx);
647 } else if (regs->bl == 0x02) {
648 cirrus_set_line_offset(regs->cx);
651 u32 v = cirrus_get_line_offset();
652 regs->cx = v / cirrus_get_bpp_bytes();
654 regs->dx = GET_GLOBAL(VBE_total_memory) / v;
659 cirrus_vesa_07h(struct bregs *regs)
661 if (regs->bl == 0x80 || regs->bl == 0x00) {
662 u32 addr = (cirrus_get_bpp_bytes() * regs->cx
663 + cirrus_get_line_offset() * regs->dx);
664 cirrus_set_start_addr(addr / 4);
665 } else if (regs->bl == 0x01) {
666 u32 addr = cirrus_get_start_addr() * 4;
667 u32 linelength = cirrus_get_line_offset();
668 regs->dx = addr / linelength;
669 regs->cx = (addr % linelength) / cirrus_get_bpp_bytes();
679 cirrus_vesa_10h(struct bregs *regs)
681 if (regs->bl == 0x00) {
686 if (regs->bl == 0x01) {
687 SET_BDA(vbe_flag, regs->bh);
691 if (regs->bl == 0x02) {
692 regs->bh = GET_BDA(vbe_flag);
700 cirrus_vesa_not_handled(struct bregs *regs)
707 cirrus_vesa(struct bregs *regs)
710 case 0x06: cirrus_vesa_06h(regs); break;
711 case 0x07: cirrus_vesa_07h(regs); break;
712 case 0x10: cirrus_vesa_10h(regs); break;
713 default: cirrus_vesa_not_handled(regs); break;
718 /****************************************************************
720 ****************************************************************/
725 int ret = stdvga_init();
729 dprintf(1, "cirrus init\n");
730 if (! cirrus_check())
732 dprintf(1, "cirrus init 2\n");
735 int bdf = GET_GLOBAL(VgaBDF);
736 if (CONFIG_VGA_PCI && bdf >= 0)
737 lfb_addr = (pci_config_readl(bdf, PCI_BASE_ADDRESS_0)
738 & PCI_BASE_ADDRESS_MEM_MASK);
739 SET_VGA(VBE_framebuffer, lfb_addr);
740 u16 totalmem = cirrus_get_memsize();
741 SET_VGA(VBE_total_memory, totalmem * 64 * 1024);
742 SET_VGA(VBE_win_granularity, 16);
745 stdvga_sequ_write(0x0a, stdvga_sequ_read(0x0f) & 0x18);
747 stdvga_sequ_write(0x07, 0x00);
749 stdvga_grdc_write(0x31, 0x04);
750 stdvga_grdc_write(0x31, 0x00);