summaryrefslogtreecommitdiffstats
path: root/include/asm-arm/byteorder.h
diff options
context:
space:
mode:
authorRalf Baechle <ralf@linux-mips.org>1999-02-15 02:15:32 +0000
committerRalf Baechle <ralf@linux-mips.org>1999-02-15 02:15:32 +0000
commit86464aed71025541805e7b1515541aee89879e33 (patch)
treee01a457a4912a8553bc65524aa3125d51f29f810 /include/asm-arm/byteorder.h
parent88f99939ecc6a95a79614574cb7d95ffccfc3466 (diff)
Merge with Linux 2.2.1.
Diffstat (limited to 'include/asm-arm/byteorder.h')
-rw-r--r--include/asm-arm/byteorder.h14
1 files changed, 13 insertions, 1 deletions
diff --git a/include/asm-arm/byteorder.h b/include/asm-arm/byteorder.h
index 7b232d906..60a191cc3 100644
--- a/include/asm-arm/byteorder.h
+++ b/include/asm-arm/byteorder.h
@@ -3,7 +3,14 @@
#include <asm/types.h>
-#ifdef __GNUC__
+#if defined(__GNUC__) && __GNUC__ == 2 && __GNUC_MINOR__ < 8
+
+/* Recent versions of GCC can open code the swaps at least as well
+ as we can write them by hand, so the "optimisations" here only
+ make sense for older compilers. Worse, some versions of GCC
+ actually go wrong in the presence of the assembler versions.
+ We play it safe and only turn them on for compilers older than
+ GCC 2.8.0. */
static __inline__ __const__ __u32 ___arch__swab32(__u32 x)
{
@@ -33,6 +40,11 @@ static __inline__ __const__ __u16 ___arch__swab16(__u16 x)
#endif /* __GNUC__ */
+#if !defined(__STRICT_ANSI__) || defined(__KERNEL__)
+# define __BYTEORDER_HAS_U64__
+# define __SWAB_64_THRU_32__
+#endif
+
#include <linux/byteorder/little_endian.h>
#endif