summaryrefslogtreecommitdiffstats
path: root/include/asm-m68k/bitops.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-m68k/bitops.h')
-rw-r--r--include/asm-m68k/bitops.h231
1 files changed, 184 insertions, 47 deletions
diff --git a/include/asm-m68k/bitops.h b/include/asm-m68k/bitops.h
index 901fa48a0..b63496040 100644
--- a/include/asm-m68k/bitops.h
+++ b/include/asm-m68k/bitops.h
@@ -4,17 +4,14 @@
* Copyright 1992, Linus Torvalds.
*
* This file is subject to the terms and conditions of the GNU General Public
- * License. See the file README.legal in the main directory of this archive
+ * License. See the file COPYING in the main directory of this archive
* for more details.
*/
/*
* Require 68020 or better.
*
- * They don't use the standard m680x0 bit ordering.
- * Instead, the use the standard m680x0 bitfield ordering.
- *
- * Thus, bit 0 is the MSB of addr; bit 32 is the MSB of (addr+1).
+ * They use the standard big-endian m680x0 bit ordering.
*/
extern __inline__ int set_bit(int nr,void * vaddr)
@@ -22,7 +19,7 @@ extern __inline__ int set_bit(int nr,void * vaddr)
char retval;
__asm__ __volatile__ ("bfset %2@{%1:#1}; sne %0"
- : "=d" (retval) : "d" (nr), "a" (vaddr));
+ : "=d" (retval) : "d" (nr^31), "a" (vaddr));
return retval;
}
@@ -32,7 +29,7 @@ extern __inline__ int clear_bit(int nr, void * vaddr)
char retval;
__asm__ __volatile__ ("bfclr %2@{%1:#1}; sne %0"
- : "=d" (retval) : "d" (nr), "a" (vaddr));
+ : "=d" (retval) : "d" (nr^31), "a" (vaddr));
return retval;
}
@@ -42,61 +39,57 @@ extern __inline__ int change_bit(int nr, void * vaddr)
char retval;
__asm__ __volatile__ ("bfchg %2@{%1:#1}; sne %0"
- : "=d" (retval) : "d" (nr), "a" (vaddr));
+ : "=d" (retval) : "d" (nr^31), "a" (vaddr));
return retval;
}
extern __inline__ int test_bit(int nr, const void * vaddr)
{
- char retval;
-
- __asm__ __volatile__ ("bftst %2@{%1:#1}; sne %0"
- : "=d" (retval) : "d" (nr), "a" (vaddr));
-
- return retval;
+ return ((1UL << (nr & 31)) & (((const unsigned int *) vaddr)[nr >> 5])) != 0;
}
-extern inline int find_first_zero_bit(void * vaddr, unsigned size)
+extern __inline__ int find_first_zero_bit(void * vaddr, unsigned size)
{
- unsigned long res;
- unsigned long *p;
- unsigned long *addr = vaddr;
+ unsigned long *p = vaddr, *addr = vaddr;
+ unsigned long allones = ~0UL;
+ int res;
+ unsigned long num;
if (!size)
return 0;
- __asm__ __volatile__ (" moveq #-1,d0\n\t"
- "1:"
- " cmpl %1@+,d0\n\t"
- " bne 2f\n\t"
- " subql #1,%0\n\t"
- " bne 1b\n\t"
- " bra 5f\n\t"
- "2:"
- " movel %1@-,d0\n\t"
- " notl d0\n\t"
- " bfffo d0{#0,#0},%0\n\t"
- "5:"
- : "=d" (res), "=a" (p)
- : "0" ((size + 31) >> 5), "1" (addr)
- : "d0");
- return ((p - addr) << 5) + res;
-}
-
-static inline int find_next_zero_bit (void *vaddr, int size,
+
+ size = (size >> 5) + ((size & 31) > 0);
+ while (*p++ == allones)
+ {
+ if (--size == 0)
+ return (p - addr) << 5;
+ }
+
+ num = ~*--p;
+ __asm__ __volatile__ ("bfffo %1{#0,#0},%0"
+ : "=d" (res) : "d" (num & -num));
+ return ((p - addr) << 5) + (res ^ 31);
+}
+
+extern __inline__ int find_next_zero_bit (void *vaddr, int size,
int offset)
{
unsigned long *addr = vaddr;
unsigned long *p = addr + (offset >> 5);
- int set = 0, bit = offset & 31, res;
+ int set = 0, bit = offset & 31UL, res;
+
+ if (offset >= size)
+ return size;
if (bit) {
+ unsigned long num = ~*p & (~0UL << bit);
+
/* Look for zero in first longword */
- __asm__("bfffo %1{#0,#0},%0"
- : "=d" (set)
- : "d" (~*p << bit));
- if (set < (32 - bit))
- return set + offset;
+ __asm__ __volatile__ ("bfffo %1{#0,#0},%0"
+ : "=d" (res) : "d" (num & -num));
+ if (res < 32)
+ return (offset & ~31UL) + (res ^ 31);
set = 32 - bit;
p++;
}
@@ -109,12 +102,156 @@ static inline int find_next_zero_bit (void *vaddr, int size,
* ffz = Find First Zero in word. Undefined if no zero exists,
* so code should check against ~0UL first..
*/
-extern inline unsigned long ffz(unsigned long word)
+extern __inline__ unsigned long ffz(unsigned long word)
{
+ int res;
+
__asm__ __volatile__ ("bfffo %1{#0,#0},%0"
- : "=d" (word)
- : "d" (~(word)));
- return word;
+ : "=d" (res) : "d" (~word & -~word));
+ return res ^ 31;
+}
+
+/* Bitmap functions for the minix filesystem */
+
+extern __inline__ int
+minix_find_first_zero_bit (const void *vaddr, unsigned size)
+{
+ const unsigned short *p = vaddr, *addr = vaddr;
+ int res;
+ unsigned short num;
+
+ if (!size)
+ return 0;
+
+ size = (size >> 4) + ((size & 15) > 0);
+ while (*p++ == 0xffff)
+ {
+ if (--size == 0)
+ return (p - addr) << 4;
+ }
+
+ num = ~*--p;
+ __asm__ __volatile__ ("bfffo %1{#16,#16},%0"
+ : "=d" (res) : "d" (num & -num));
+ return ((p - addr) << 4) + (res ^ 31);
+}
+
+extern __inline__ int
+minix_set_bit (int nr, void *vaddr)
+{
+ char retval;
+
+ __asm__ __volatile__ ("bfset %2{%1:#1}; sne %0"
+ : "=d" (retval) : "d" (nr^15), "m" (*(char *)vaddr));
+
+ return retval;
+}
+
+extern __inline__ int
+minix_clear_bit (int nr, void *vaddr)
+{
+ char retval;
+
+ __asm__ __volatile__ ("bfclr %2{%1:#1}; sne %0"
+ : "=d" (retval) : "d" (nr^15), "m" (*(char *) vaddr));
+
+ return retval;
+}
+
+extern __inline__ int
+minix_test_bit (int nr, const void *vaddr)
+{
+ return ((1U << (nr & 15)) & (((const unsigned short *) vaddr)[nr >> 4])) != 0;
+}
+
+/* Bitmap functions for the ext2 filesystem. */
+
+extern __inline__ int
+ext2_set_bit (int nr, void *vaddr)
+{
+ char retval;
+
+ __asm__ __volatile__ ("bfset %2{%1,#1}; sne %0"
+ : "=d" (retval) : "d" (nr^7), "m" (*(char *) vaddr));
+
+ return retval;
+}
+
+extern __inline__ int
+ext2_clear_bit (int nr, void *vaddr)
+{
+ char retval;
+
+ __asm__ __volatile__ ("bfclr %2{%1,#1}; sne %0"
+ : "=d" (retval) : "d" (nr^7), "m" (*(char *) vaddr));
+
+ return retval;
+}
+
+extern __inline__ int
+ext2_test_bit (int nr, const void *vaddr)
+{
+ return ((1U << (nr & 7)) & (((const unsigned char *) vaddr)[nr >> 3])) != 0;
+}
+
+extern __inline__ int
+ext2_find_first_zero_bit (const void *vaddr, unsigned size)
+{
+ const unsigned long *p = vaddr, *addr = vaddr;
+ int res;
+
+ if (!size)
+ return 0;
+
+ size = (size >> 5) + ((size & 31) > 0);
+ while (*p++ == ~0UL)
+ {
+ if (--size == 0)
+ return (p - addr) << 5;
+ }
+
+ --p;
+ for (res = 0; res < 32; res++)
+ if (!ext2_test_bit (res, p))
+ break;
+ return (p - addr) * 32 + res;
+}
+
+extern __inline__ int
+ext2_find_next_zero_bit (const void *vaddr, unsigned size, unsigned offset)
+{
+ const unsigned long *addr = vaddr;
+ const unsigned long *p = addr + (offset >> 5);
+ int bit = offset & 31UL, res;
+
+ if (offset >= size)
+ return size;
+
+ if (bit) {
+ /* Look for zero in first longword */
+ for (res = bit; res < 32; res++)
+ if (!ext2_test_bit (res, p))
+ return (p - addr) * 32 + res;
+ p++;
+ }
+ /* No zero yet, search remaining full bytes for a zero */
+ res = ext2_find_first_zero_bit (p, size - 32 * (p - addr));
+ return (p - addr) * 32 + res;
+}
+
+/* Byte swapping. */
+
+extern __inline__ unsigned short
+swab16 (unsigned short val)
+{
+ return (val << 8) | (val >> 8);
+}
+
+extern __inline__ unsigned int
+swab32 (unsigned int val)
+{
+ __asm__ ("rolw #8,%0; swap %0; rolw #8,%0" : "=d" (val) : "0" (val));
+ return val;
}
#endif /* _M68K_BITOPS_H */