diff options
author | Stuart Brady <sdbrady@ntlworld.com> | 2009-07-12 20:09:40 +0100 |
---|---|---|
committer | Anthony Liguori <aliguori@us.ibm.com> | 2009-07-16 17:28:50 -0500 |
commit | 9f6839d4b1721524a10c77768f73697cf1724ce5 (patch) | |
tree | 8b2fdff39f976a5fa84fcf017d41f288e0fe7830 | |
parent | 1db6947dafa7f33a309130ccbf461748adac6da0 (diff) |
Remove dead i386 assembly code from softmmu_header.h
This patch removes dead i386 assembly code from softmmu_header.h.
The code is conditional on ASM_SOFTMMU, which is never defined.
Optimisation for the fast path is already handled by tcg_out_qemu_ld()
and tcg_out_qemu_st(), so there seems to be little need for this code.
Signed-off-by: Stuart Brady <stuart.brady@gmail.com>
Signed-off-by: Anthony Liguori <aliguori@us.ibm.com>
-rw-r--r-- | softmmu_header.h | 146 |
1 files changed, 0 insertions, 146 deletions
diff --git a/softmmu_header.h b/softmmu_header.h index ae737ff9da..6a36e01dc1 100644 --- a/softmmu_header.h +++ b/softmmu_header.h @@ -69,150 +69,6 @@ #define ADDR_READ addr_read #endif -#if (DATA_SIZE <= 4) && (TARGET_LONG_BITS == 32) && defined(__i386__) && \ - (ACCESS_TYPE < NB_MMU_MODES) && defined(ASM_SOFTMMU) - -static inline RES_TYPE glue(glue(ld, USUFFIX), MEMSUFFIX)(target_ulong ptr) -{ - int res; - - asm volatile ("movl %1, %%edx\n" - "movl %1, %%eax\n" - "shrl %3, %%edx\n" - "andl %4, %%eax\n" - "andl %2, %%edx\n" - "leal %5(%%edx, %%ebp), %%edx\n" - "cmpl (%%edx), %%eax\n" - "movl %1, %%eax\n" - "je 1f\n" - "movl %6, %%edx\n" - "call %7\n" - "movl %%eax, %0\n" - "jmp 2f\n" - "1:\n" - "addl 12(%%edx), %%eax\n" -#if DATA_SIZE == 1 - "movzbl (%%eax), %0\n" -#elif DATA_SIZE == 2 - "movzwl (%%eax), %0\n" -#elif DATA_SIZE == 4 - "movl (%%eax), %0\n" -#else -#error unsupported size -#endif - "2:\n" - : "=r" (res) - : "r" (ptr), - "i" ((CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS), - "i" (TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS), - "i" (TARGET_PAGE_MASK | (DATA_SIZE - 1)), - "m" (*(uint32_t *)offsetof(CPUState, tlb_table[CPU_MMU_INDEX][0].addr_read)), - "i" (CPU_MMU_INDEX), - "m" (*(uint8_t *)&glue(glue(__ld, SUFFIX), MMUSUFFIX)) - : "%eax", "%ecx", "%edx", "memory", "cc"); - return res; -} - -#if DATA_SIZE <= 2 -static inline int glue(glue(lds, SUFFIX), MEMSUFFIX)(target_ulong ptr) -{ - int res; - - asm volatile ("movl %1, %%edx\n" - "movl %1, %%eax\n" - "shrl %3, %%edx\n" - "andl %4, %%eax\n" - "andl %2, %%edx\n" - "leal %5(%%edx, %%ebp), %%edx\n" - "cmpl (%%edx), %%eax\n" - "movl %1, %%eax\n" - "je 1f\n" - "movl %6, %%edx\n" - "call %7\n" -#if DATA_SIZE == 1 - "movsbl %%al, %0\n" -#elif DATA_SIZE == 2 - "movswl %%ax, %0\n" -#else -#error unsupported size -#endif - "jmp 2f\n" - "1:\n" - "addl 12(%%edx), %%eax\n" -#if DATA_SIZE == 1 - "movsbl (%%eax), %0\n" -#elif DATA_SIZE == 2 - "movswl (%%eax), %0\n" -#else -#error unsupported size -#endif - "2:\n" - : "=r" (res) - : "r" (ptr), - "i" ((CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS), - "i" (TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS), - "i" (TARGET_PAGE_MASK | (DATA_SIZE - 1)), - "m" (*(uint32_t *)offsetof(CPUState, tlb_table[CPU_MMU_INDEX][0].addr_read)), - "i" (CPU_MMU_INDEX), - "m" (*(uint8_t *)&glue(glue(__ld, SUFFIX), MMUSUFFIX)) - : "%eax", "%ecx", "%edx", "memory", "cc"); - return res; -} -#endif - -static inline void glue(glue(st, SUFFIX), MEMSUFFIX)(target_ulong ptr, RES_TYPE v) -{ - asm volatile ("movl %0, %%edx\n" - "movl %0, %%eax\n" - "shrl %3, %%edx\n" - "andl %4, %%eax\n" - "andl %2, %%edx\n" - "leal %5(%%edx, %%ebp), %%edx\n" - "cmpl (%%edx), %%eax\n" - "movl %0, %%eax\n" - "je 1f\n" -#if DATA_SIZE == 1 - "movzbl %b1, %%edx\n" -#elif DATA_SIZE == 2 - "movzwl %w1, %%edx\n" -#elif DATA_SIZE == 4 - "movl %1, %%edx\n" -#else -#error unsupported size -#endif - "movl %6, %%ecx\n" - "call %7\n" - "jmp 2f\n" - "1:\n" - "addl 8(%%edx), %%eax\n" -#if DATA_SIZE == 1 - "movb %b1, (%%eax)\n" -#elif DATA_SIZE == 2 - "movw %w1, (%%eax)\n" -#elif DATA_SIZE == 4 - "movl %1, (%%eax)\n" -#else -#error unsupported size -#endif - "2:\n" - : - : "r" (ptr), -#if DATA_SIZE == 1 - "q" (v), -#else - "r" (v), -#endif - "i" ((CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS), - "i" (TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS), - "i" (TARGET_PAGE_MASK | (DATA_SIZE - 1)), - "m" (*(uint32_t *)offsetof(CPUState, tlb_table[CPU_MMU_INDEX][0].addr_write)), - "i" (CPU_MMU_INDEX), - "m" (*(uint8_t *)&glue(glue(__st, SUFFIX), MMUSUFFIX)) - : "%eax", "%ecx", "%edx", "memory", "cc"); -} - -#else - /* generic load/store macros */ static inline RES_TYPE glue(glue(ld, USUFFIX), MEMSUFFIX)(target_ulong ptr) @@ -283,8 +139,6 @@ static inline void glue(glue(st, SUFFIX), MEMSUFFIX)(target_ulong ptr, RES_TYPE #endif /* ACCESS_TYPE != (NB_MMU_MODES + 1) */ -#endif /* !asm */ - #if ACCESS_TYPE != (NB_MMU_MODES + 1) #if DATA_SIZE == 8 |