diff options
Diffstat (limited to 'tests/tcg/xtensa/test_lsc.S')
-rw-r--r-- | tests/tcg/xtensa/test_lsc.S | 170 |
1 files changed, 157 insertions, 13 deletions
diff --git a/tests/tcg/xtensa/test_lsc.S b/tests/tcg/xtensa/test_lsc.S index 0578bf19e7..348822bdd3 100644 --- a/tests/tcg/xtensa/test_lsc.S +++ b/tests/tcg/xtensa/test_lsc.S @@ -1,4 +1,5 @@ #include "macros.inc" +#include "fpu.h" test_suite lsc @@ -9,9 +10,14 @@ test lsi wsr a2, cpenable movi a2, 1f - lsi f0, a2, 0 lsi f1, a2, 4 +#if DFPU + lsi f2, a2, 8 + lsip f0, a2, 8 +#else + lsi f0, a2, 0 lsiu f2, a2, 8 +#endif movi a3, 1f + 8 assert eq, a2, a3 rfr a2, f0 @@ -34,13 +40,18 @@ test ssi movi a2, 1f movi a3, 0x40800000 wfr f3, a3 - ssi f3, a2, 0 movi a3, 0x40a00000 wfr f4, a3 - ssi f4, a2, 4 movi a3, 0x40c00000 wfr f5, a3 + ssi f4, a2, 4 +#if DFPU + ssi f5, a2, 8 + ssip f3, a2, 8 +#else + ssi f3, a2, 0 ssiu f5, a2, 8 +#endif movi a3, 1f + 8 assert eq, a2, a3 l32i a4, a2, -8 @@ -62,11 +73,16 @@ test_end test lsx movi a2, 1f movi a3, 0 + movi a4, 4 + movi a5, 8 + lsx f7, a2, a4 +#if DFPU + lsx f8, a2, a5 + lsxp f6, a2, a5 +#else lsx f6, a2, a3 - movi a3, 4 - lsx f7, a2, a3 - movi a3, 8 - lsxu f8, a2, a3 + lsxu f8, a2, a5 +#endif movi a3, 1f + 8 assert eq, a2, a3 rfr a2, f6 @@ -87,18 +103,23 @@ test_end test ssx movi a2, 1f - movi a3, 0 movi a4, 0x41200000 wfr f9, a4 - ssx f9, a2, a3 - movi a3, 4 movi a4, 0x41300000 wfr f10, a4 - ssx f10, a2, a3 - movi a3, 8 movi a4, 0x41400000 wfr f11, a4 - ssxu f11, a2, a3 + movi a3, 0 + movi a4, 4 + movi a5, 8 + ssx f10, a2, a4 +#if DFPU + ssx f11, a2, a5 + ssxp f9, a2, a5 +#else + ssx f9, a2, a3 + ssxu f11, a2, a5 +#endif movi a3, 1f + 8 assert eq, a2, a3 l32i a4, a2, -8 @@ -119,4 +140,127 @@ test_end #endif +#if XCHAL_HAVE_DFP + +#if XCHAL_HAVE_BE +#define F64_HIGH_OFF 0 +#else +#define F64_HIGH_OFF 4 +#endif + +.macro movdf fr, hi, lo + movi a2, \hi + movi a3, \lo + wfrd \fr, a2, a3 +.endm + +test ldi + movi a2, 1 + wsr a2, cpenable + + movi a2, 1f + ldi f1, a2, 8 + ldi f2, a2, 16 + ldip f0, a2, 16 + movi a3, 1f + 16 + assert eq, a2, a3 + rfrd a2, f0 + movi a3, 0x3ff00000 + assert eq, a2, a3 + rfrd a2, f1 + movi a3, 0x40000000 + assert eq, a2, a3 + rfrd a2, f2 + movi a3, 0x40080000 + assert eq, a2, a3 +.data + .align 8 +1: +.double 1, 2, 3 +.text +test_end + +test sdi + movdf f3, 0x40800000, 0 + movdf f4, 0x40a00000, 0 + movdf f5, 0x40c00000, 0 + movi a2, 1f + sdi f4, a2, 8 + sdi f5, a2, 16 + sdip f3, a2, 16 + movi a3, 1f + 16 + assert eq, a2, a3 + l32i a4, a2, -16 + F64_HIGH_OFF + movi a3, 0x40800000 + assert eq, a4, a3 + l32i a4, a2, -8 + F64_HIGH_OFF + movi a3, 0x40a00000 + assert eq, a4, a3 + l32i a4, a2, F64_HIGH_OFF + movi a3, 0x40c00000 + assert eq, a4, a3 +.data + .align 8 +1: +.double 0, 0, 0 +.text +test_end + +test ldx + movi a2, 1f + movi a3, 0 + movi a4, 8 + movi a5, 16 + ldx f7, a2, a4 + ldx f8, a2, a5 + ldxp f6, a2, a5 + movi a3, 1f + 16 + assert eq, a2, a3 + rfrd a2, f6 + movi a3, 0x401c0000 + assert eq, a2, a3 + rfrd a2, f7 + movi a3, 0x40200000 + assert eq, a2, a3 + rfrd a2, f8 + movi a3, 0x40220000 + assert eq, a2, a3 +.data + .align 8 +1: +.double 7, 8, 9 +.text +test_end + +test sdx + movdf f9, 0x41200000, 0 + movdf f10, 0x41300000, 0 + movdf f11, 0x41400000, 0 + movi a2, 1f + movi a3, 0 + movi a4, 8 + movi a5, 16 + sdx f10, a2, a4 + sdx f11, a2, a5 + sdxp f9, a2, a5 + movi a3, 1f + 16 + assert eq, a2, a3 + l32i a4, a2, -16 + F64_HIGH_OFF + movi a3, 0x41200000 + assert eq, a4, a3 + l32i a4, a2, -8 + F64_HIGH_OFF + movi a3, 0x41300000 + assert eq, a4, a3 + l32i a4, a2, F64_HIGH_OFF + movi a3, 0x41400000 + assert eq, a4, a3 +.data + .align 8 +1: +.double 0, 0, 0 +.text +test_end + +#endif + test_suite_end |