summaryrefslogtreecommitdiffstats
path: root/include/asm-mips/unaligned.h
diff options
context:
space:
mode:
authorRalf Baechle <ralf@linux-mips.org>2000-10-02 20:46:06 +0000
committerRalf Baechle <ralf@linux-mips.org>2000-10-02 20:46:06 +0000
commit08e47c5800543e2a36ef2551ababa353ee247e02 (patch)
tree53edbe9e15eb35c275400f07f11ed92e548adf26 /include/asm-mips/unaligned.h
parent269ef0b8beaf5f3672030f4ac67de66e4a591c9b (diff)
Fix the unaligned access macros.
Diffstat (limited to 'include/asm-mips/unaligned.h')
-rw-r--r--include/asm-mips/unaligned.h166
1 files changed, 81 insertions, 85 deletions
diff --git a/include/asm-mips/unaligned.h b/include/asm-mips/unaligned.h
index 622e1e977..772cef1c3 100644
--- a/include/asm-mips/unaligned.h
+++ b/include/asm-mips/unaligned.h
@@ -1,10 +1,10 @@
-/* $Id$
- *
+/*
* This file is subject to the terms and conditions of the GNU General Public
* License. See the file "COPYING" in the main directory of this archive
* for more details.
*
- * Copyright (C) 1996, 1999 by Ralf Baechle
+ * Copyright (C) 1996, 1999, 2000 by Ralf Baechle
+ * Copyright (C) 1999, 2000 Silicon Graphics, Inc.
*/
#ifndef _ASM_UNALIGNED_H
#define _ASM_UNALIGNED_H
@@ -13,133 +13,129 @@ extern void __get_unaligned_bad_length(void);
extern void __put_unaligned_bad_length(void);
/*
- * Load quad unaligned.
+ * Load double unaligned.
+ *
+ * This could have been implemented in plain C like IA64 but egcs 1.0.3a
+ * inflates this to 23 instructions ...
*/
-extern __inline__ unsigned long ldq_u(const unsigned long long * __addr)
+extern inline unsigned long long __ldq_u(const unsigned long long * __addr)
{
unsigned long long __res;
- __asm__("uld\t%0,(%1)"
- :"=&r" (__res)
- :"r" (__addr));
+ __asm__("ulw\t%0, %1\n\t"
+ "ulw\t%D0, 4+%1"
+ : "=&r" (__res)
+ : "m" (*__addr));
return __res;
}
/*
- * Load long unaligned.
+ * Load word unaligned.
*/
-extern __inline__ unsigned long ldl_u(const unsigned int * __addr)
+extern inline unsigned long __ldl_u(const unsigned int * __addr)
{
unsigned long __res;
- __asm__("ulw\t%0,(%1)"
- :"=&r" (__res)
- :"r" (__addr));
+ __asm__("ulw\t%0,%1"
+ : "=&r" (__res)
+ : "m" (*__addr));
return __res;
}
/*
- * Load word unaligned.
+ * Load halfword unaligned.
*/
-extern __inline__ unsigned long ldw_u(const unsigned short * __addr)
+extern inline unsigned long __ldw_u(const unsigned short * __addr)
{
unsigned long __res;
- __asm__("ulh\t%0,(%1)"
- :"=&r" (__res)
- :"r" (__addr));
+ __asm__("ulh\t%0,%1"
+ : "=&r" (__res)
+ : "m" (*__addr));
return __res;
}
/*
- * Store quad ununaligned.
+ * Store doubleword ununaligned.
*/
-extern __inline__ void stq_u(unsigned long __val, unsigned long long * __addr)
+extern inline void __stq_u(unsigned long __val, unsigned long long * __addr)
{
- __asm__ __volatile__(
- "usd\t%0,(%1)"
- : /* No results */
- :"r" (__val),
- "r" (__addr));
+ __asm__("usw\t%1, %0\n\t"
+ "usw\t%D1, 4+%0"
+ : "=m" (*__addr)
+ : "r" (__val));
}
/*
* Store long ununaligned.
*/
-extern __inline__ void stl_u(unsigned long __val, unsigned int * __addr)
+extern inline void __stl_u(unsigned long __val, unsigned int * __addr)
{
- __asm__ __volatile__(
- "usw\t%0,(%1)"
- : /* No results */
- :"r" (__val),
- "r" (__addr));
+ __asm__("usw\t%1, %0"
+ : "=m" (*__addr)
+ : "r" (__val));
}
/*
* Store word ununaligned.
*/
-extern __inline__ void stw_u(unsigned long __val, unsigned short * __addr)
-{
- __asm__ __volatile__(
- "ush\t%0,(%1)"
- : /* No results */
- :"r" (__val),
- "r" (__addr));
-}
-
-extern inline unsigned long __get_unaligned(const void *ptr, size_t size)
-{
- unsigned long val;
- switch (size) {
- case 1:
- val = *(const unsigned char *)ptr;
- break;
- case 2:
- val = ldw_u((const unsigned short *)ptr);
- break;
- case 4:
- val = ldl_u((const unsigned int *)ptr);
- break;
- case 8:
- val = ldq_u((const unsigned long long *)ptr);
- break;
- default:
- __get_unaligned_bad_length();
- break;
- }
- return val;
-}
-
-extern inline void __put_unaligned(unsigned long val, void *ptr, size_t size)
+extern inline void __stw_u(unsigned long __val, unsigned short * __addr)
{
- switch (size) {
- case 1:
- *(unsigned char *)ptr = (val);
- break;
- case 2:
- stw_u(val, (unsigned short *)ptr);
- break;
- case 4:
- stl_u(val, (unsigned int *)ptr);
- break;
- case 8:
- stq_u(val, (unsigned long long *)ptr);
- break;
- default:
- __put_unaligned_bad_length();
- break;
- }
+ __asm__("ush\t%1, %0"
+ : "=m" (*__addr)
+ : "r" (__val));
}
/*
* The main single-value unaligned transfer routines.
*/
-#define get_unaligned(ptr) \
- ((__typeof__(*(ptr)))__get_unaligned((ptr), sizeof(*(ptr))))
-#define put_unaligned(x,ptr) \
- __put_unaligned((unsigned long)(x), (ptr), sizeof(*(ptr)))
+#define get_unaligned(ptr) \
+({ \
+ __typeof__(*(ptr)) __val; \
+ \
+ switch (sizeof(*(ptr))) { \
+ case 1: \
+ __val = *(const unsigned char *)ptr; \
+ break; \
+ case 2: \
+ __val = __ldw_u((const unsigned short *)ptr); \
+ break; \
+ case 4: \
+ __val = __ldl_u((const unsigned int *)ptr); \
+ break; \
+ case 8: \
+ __val = __ldq_u((const unsigned long long *)ptr); \
+ break; \
+ default: \
+ __get_unaligned_bad_length(); \
+ break; \
+ } \
+ \
+ __val; \
+})
+
+#define put_unaligned(x,ptr) \
+do { \
+ switch (sizeof(*(ptr))) { \
+ case 1: \
+ *(unsigned char *)ptr = (val); \
+ break; \
+ case 2: \
+ __stw_u(val, (unsigned short *)ptr); \
+ break; \
+ case 4: \
+ __stl_u(val, (unsigned int *)ptr); \
+ break; \
+ case 8: \
+ __stq_u(val, (unsigned long long *)ptr); \
+ break; \
+ default: \
+ __put_unaligned_bad_length(); \
+ break; \
+ } \
+} while(0)
#endif /* _ASM_UNALIGNED_H */