diff options
author | j_mayer <j_mayer@c046a42c-6fe2-441c-8c8c-71466251a162> | 2007-10-04 00:51:58 +0000 |
---|---|---|
committer | j_mayer <j_mayer@c046a42c-6fe2-441c-8c8c-71466251a162> | 2007-10-04 00:51:58 +0000 |
commit | d63001d11434fc6bf217255b51f625a75d05fb35 (patch) | |
tree | f9c98fc52c58140c19c77ff12c4089c215a22e76 /target-ppc/op_helper_mem.h | |
parent | 064034211a65bb602a32ccee18d92109eb2cd656 (diff) |
Make PowerPC cache line size implementation dependant.
Implement dcbz tunable cache line size for PowerPC 970.
Make hardware reset vector implementation dependant.
git-svn-id: svn://svn.savannah.nongnu.org/qemu/trunk@3321 c046a42c-6fe2-441c-8c8c-71466251a162
Diffstat (limited to 'target-ppc/op_helper_mem.h')
-rw-r--r-- | target-ppc/op_helper_mem.h | 102 |
1 files changed, 98 insertions, 4 deletions
diff --git a/target-ppc/op_helper_mem.h b/target-ppc/op_helper_mem.h index e8cca09e92..f5cb6970c4 100644 --- a/target-ppc/op_helper_mem.h +++ b/target-ppc/op_helper_mem.h @@ -252,8 +252,9 @@ void glue(do_icbi, MEMSUFFIX) (void) * do the load "by hand". */ tmp = glue(ldl, MEMSUFFIX)((uint32_t)T0); - T0 &= ~(ICACHE_LINE_SIZE - 1); - tb_invalidate_page_range((uint32_t)T0, (uint32_t)(T0 + ICACHE_LINE_SIZE)); + T0 &= ~(env->icache_line_size - 1); + tb_invalidate_page_range((uint32_t)T0, + (uint32_t)(T0 + env->icache_line_size)); } #if defined(TARGET_PPC64) @@ -266,8 +267,101 @@ void glue(do_icbi_64, MEMSUFFIX) (void) * do the load "by hand". */ tmp = glue(ldq, MEMSUFFIX)((uint64_t)T0); - T0 &= ~(ICACHE_LINE_SIZE - 1); - tb_invalidate_page_range((uint64_t)T0, (uint64_t)(T0 + ICACHE_LINE_SIZE)); + T0 &= ~(env->icache_line_size - 1); + tb_invalidate_page_range((uint64_t)T0, + (uint64_t)(T0 + env->icache_line_size)); +} +#endif + +void glue(do_dcbz, MEMSUFFIX) (void) +{ + int dcache_line_size = env->dcache_line_size; + + /* XXX: should be 970 specific (?) */ + if (((env->spr[SPR_970_HID5] >> 7) & 0x3) == 1) + dcache_line_size = 32; + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0); + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0); + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0); + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0); + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0); + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0); + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0); + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0); + if (dcache_line_size >= 64) { + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0); + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0); + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0); + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0); + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0); + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0); + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0); + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0); + if (dcache_line_size >= 128) { + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x40UL), 0); + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x44UL), 0); + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x48UL), 0); + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x4CUL), 0); + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x50UL), 0); + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x54UL), 0); + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x58UL), 0); + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x5CUL), 0); + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x60UL), 0); + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x64UL), 0); + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x68UL), 0); + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x6CUL), 0); + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x70UL), 0); + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x74UL), 0); + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x78UL), 0); + glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x7CUL), 0); + } + } +} + +#if defined(TARGET_PPC64) +void glue(do_dcbz_64, MEMSUFFIX) (void) +{ + int dcache_line_size = env->dcache_line_size; + + /* XXX: should be 970 specific (?) */ + if (((env->spr[SPR_970_HID5] >> 7) & 0x3) == 1) + dcache_line_size = 32; + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0); + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0); + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0); + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0); + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0); + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0); + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0); + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0); + if (dcache_line_size >= 64) { + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0); + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0); + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0); + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0); + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0); + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0); + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0); + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0); + if (dcache_line_size >= 128) { + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x40UL), 0); + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x44UL), 0); + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x48UL), 0); + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x4CUL), 0); + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x50UL), 0); + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x54UL), 0); + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x58UL), 0); + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x5CUL), 0); + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x60UL), 0); + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x64UL), 0); + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x68UL), 0); + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x6CUL), 0); + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x70UL), 0); + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x74UL), 0); + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x78UL), 0); + glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x7CUL), 0); + } + } } #endif |