aboutsummaryrefslogtreecommitdiff
path: root/include
diff options
context:
space:
mode:
authorRichard Henderson <rth@twiddle.net>2013-01-04 16:39:28 -0800
committerBlue Swirl <blauwirbel@gmail.com>2013-01-12 12:24:07 +0000
commit7db2145a6826b14efceb8dd64bfe6ad8647072eb (patch)
tree982400542735620040552a54e264f006409d6a58 /include
parentcdfe2851c6c33c133bd90c2643cb0486c0f5b325 (diff)
bswap: Add host endian unaligned access functions
Signed-off-by: Richard Henderson <rth@twiddle.net> Signed-off-by: Blue Swirl <blauwirbel@gmail.com>
Diffstat (limited to 'include')
-rw-r--r--include/qemu/bswap.h51
1 files changed, 50 insertions, 1 deletions
diff --git a/include/qemu/bswap.h b/include/qemu/bswap.h
index b2a8f94bb4..381554b5b1 100644
--- a/include/qemu/bswap.h
+++ b/include/qemu/bswap.h
@@ -226,6 +226,8 @@ static inline uint32_t qemu_bswap_len(uint32_t value, int len)
return bswap32(value) >> (32 - 8 * len);
}
+/* Unions for reinterpreting between floats and integers. */
+
typedef union {
float32 f;
uint32_t l;
@@ -309,7 +311,7 @@ typedef union {
* q: 64 bits
*
* endian is:
- * (empty): 8 bit access
+ * (empty): host endian
* be : big endian
* le : little endian
*/
@@ -328,6 +330,53 @@ static inline void stb_p(void *ptr, int v)
*(uint8_t *)ptr = v;
}
+/* Any compiler worth its salt will turn these memcpy into native unaligned
+ operations. Thus we don't need to play games with packed attributes, or
+ inline byte-by-byte stores. */
+
+static inline int lduw_p(const void *ptr)
+{
+ uint16_t r;
+ memcpy(&r, ptr, sizeof(r));
+ return r;
+}
+
+static inline int ldsw_p(const void *ptr)
+{
+ int16_t r;
+ memcpy(&r, ptr, sizeof(r));
+ return r;
+}
+
+static inline void stw_p(void *ptr, uint16_t v)
+{
+ memcpy(ptr, &v, sizeof(v));
+}
+
+static inline int ldl_p(const void *ptr)
+{
+ int32_t r;
+ memcpy(&r, ptr, sizeof(r));
+ return r;
+}
+
+static inline void stl_p(void *ptr, uint32_t v)
+{
+ memcpy(ptr, &v, sizeof(v));
+}
+
+static inline uint64_t ldq_p(const void *ptr)
+{
+ uint64_t r;
+ memcpy(&r, ptr, sizeof(r));
+ return r;
+}
+
+static inline void stq_p(void *ptr, uint64_t v)
+{
+ memcpy(ptr, &v, sizeof(v));
+}
+
/* NOTE: on arm, putting 2 in /proc/sys/debug/alignment so that the
kernel handles unaligned load/stores may give better results, but
it is a system wide setting : bad */