summaryrefslogtreecommitdiffstats
path: root/include/asm-alpha/byteorder.h
blob: b8afddcd5addcbb2f354d3dea4eee1cdbfb73227 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
#ifndef _ALPHA_BYTEORDER_H
#define _ALPHA_BYTEORDER_H

#include <asm/types.h>

#ifdef __GNUC__

static __inline__ __const__ __u32 ___arch__swab32(__u32 x)
{
	__u64 t1, t2, t3;

	/* Break the final or's out of the block so that gcc can
	   schedule them at will.  Further, use add not or so that
	   we elide the sign extend gcc will put in because the
	   return type is not a long.  */

	__asm__(
	"insbl	%3,3,%1		# %1 = dd000000\n\t"
	"zapnot	%3,2,%2		# %2 = 0000cc00\n\t"
	"sll	%2,8,%2		# %2 = 00cc0000\n\t"
	"or	%2,%1,%1	# %1 = ddcc0000\n\t"
	"zapnot	%3,4,%2		# %2 = 00bb0000\n\t"
	"extbl	%3,3,%0		# %0 = 000000aa\n\t"
	"srl	%2,8,%2		# %2 = 0000bb00"
	: "=r"(t3), "=&r"(t1), "=&r"(t2)
	: "r"(x));

	return t3 + t2 + t1;
}

static __inline__ __const__ __u16 ___arch__swab16(__u16 x)
{
	__u64 t1, t2;

	__asm__(
	"insbl	%2,1,%1		# %1 = bb00\n\t"
	"extbl	%2,1,%0		# %0 = 00aa"
	: "=r"(t1), "=&r"(t2) : "r"(x));

	return t1 | t2;
}

#define __arch__swab32(x) ___arch__swab32(x)
#define __arch__swab16(x) ___arch__swab16(x)

#endif /* __GNUC__ */

#define __BYTEORDER_HAS_U64__

#include <linux/byteorder/little_endian.h>

#endif /* _ALPHA_BYTEORDER_H */