From 439229c7cb97f6c4cddd3965c3e9d2b8319fe83c Mon Sep 17 00:00:00 2001 From: Markus Armbruster Date: Fri, 4 Nov 2011 10:38:29 +0100 Subject: console: Fix rendering of VGA underline vga_putcharxy()'s underline code sets font_data to 0xffff instead of 0xff. vga_putcharxy() then reads dmask16[0xffff >> 4] and dmask4[0xffff >> 6]. In practice, these out-of-bounds subscripts "only" put a few crap bits into the display surface. For 32 bit pixels, there's no array access. font_data's extra bits go straight into the display surface. Broken when commit 6d6f7c28 implemented underline. Spotted by Coverity. Signed-off-by: Markus Armbruster Signed-off-by: Anthony Liguori --- console.c | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'console.c') diff --git a/console.c b/console.c index e43de92c00..f6fe44195b 100644 --- a/console.c +++ b/console.c @@ -467,7 +467,7 @@ static void vga_putcharxy(DisplayState *ds, int x, int y, int ch, font_data = *font_ptr++; if (t_attrib->uline && ((i == FONT_HEIGHT - 2) || (i == FONT_HEIGHT - 3))) { - font_data = 0xFFFF; + font_data = 0xFF; } ((uint32_t *)d)[0] = (dmask16[(font_data >> 4)] & xorcol) ^ bgcol; ((uint32_t *)d)[1] = (dmask16[(font_data >> 0) & 0xf] & xorcol) ^ bgcol; @@ -480,7 +480,7 @@ static void vga_putcharxy(DisplayState *ds, int x, int y, int ch, font_data = *font_ptr++; if (t_attrib->uline && ((i == FONT_HEIGHT - 2) || (i == FONT_HEIGHT - 3))) { - font_data = 0xFFFF; + font_data = 0xFF; } ((uint32_t *)d)[0] = (dmask4[(font_data >> 6)] & xorcol) ^ bgcol; ((uint32_t *)d)[1] = (dmask4[(font_data >> 4) & 3] & xorcol) ^ bgcol; @@ -493,7 +493,7 @@ static void vga_putcharxy(DisplayState *ds, int x, int y, int ch, for(i = 0; i < FONT_HEIGHT; i++) { font_data = *font_ptr++; if (t_attrib->uline && ((i == FONT_HEIGHT - 2) || (i == FONT_HEIGHT - 3))) { - font_data = 0xFFFF; + font_data = 0xFF; } ((uint32_t *)d)[0] = (-((font_data >> 7)) & xorcol) ^ bgcol; ((uint32_t *)d)[1] = (-((font_data >> 6) & 1) & xorcol) ^ bgcol; -- cgit v1.2.3