#ifndef _I386_BITOPS_H #define _I386_BITOPS_H /* * Copyright 1992, Linus Torvalds. */ /* * These have to be done with inline assembly: that way the bit-setting * is guaranteed to be atomic. All bit operations return 0 if the bit * was cleared before the operation and != 0 if it was not. * * bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1). */ #ifdef __SMP__ #define LOCK_PREFIX "lock ; " #else #define LOCK_PREFIX "" #endif /* * Some hacks to defeat gcc over-optimizations.. */ struct __dummy { unsigned long a[100]; }; #define ADDR (*(struct __dummy *) addr) #define CONST_ADDR (*(const struct __dummy *) addr) extern __inline__ void set_bit(int nr, volatile void * addr) { __asm__ __volatile__( LOCK_PREFIX "btsl %1,%0" :"=m" (ADDR) :"ir" (nr)); } extern __inline__ void clear_bit(int nr, volatile void * addr) { __asm__ __volatile__( LOCK_PREFIX "btrl %1,%0" :"=m" (ADDR) :"ir" (nr)); } extern __inline__ void change_bit(int nr, volatile void * addr) { __asm__ __volatile__( LOCK_PREFIX "btcl %1,%0" :"=m" (ADDR) :"ir" (nr)); } extern __inline__ int test_and_set_bit(int nr, volatile void * addr) { int oldbit; __asm__ __volatile__( LOCK_PREFIX "btsl %2,%1\n\tsbbl %0,%0" :"=r" (oldbit),"=m" (ADDR) :"ir" (nr)); return oldbit; } extern __inline__ int test_and_clear_bit(int nr, volatile void * addr) { int oldbit; __asm__ __volatile__( LOCK_PREFIX "btrl %2,%1\n\tsbbl %0,%0" :"=r" (oldbit),"=m" (ADDR) :"ir" (nr)); return oldbit; } extern __inline__ int test_and_change_bit(int nr, volatile void * addr) { int oldbit; __asm__ __volatile__( LOCK_PREFIX "btcl %2,%1\n\tsbbl %0,%0" :"=r" (oldbit),"=m" (ADDR) :"ir" (nr)); return oldbit; } /* * This routine doesn't need to be atomic. */ extern __inline__ int __constant_test_bit(int nr, const volatile void * addr) { return ((1UL << (nr & 31)) & (((const volatile unsigned int *) addr)[nr >> 5])) != 0; } extern __inline__ int __test_bit(int nr, volatile void * addr) { int oldbit; __asm__ __volatile__( "btl %2,%1\n\tsbbl %0,%0" :"=r" (oldbit) :"m" (ADDR),"ir" (nr)); return oldbit; } #define test_bit(nr,addr) \ (__builtin_constant_p(nr) ? \ __constant_test_bit((nr),(addr)) : \ __test_bit((nr),(addr))) /* * Find-bit routines.. */ extern __inline__ int find_first_zero_bit(void * addr, unsigned size) { int res; if (!size) return 0; __asm__("cld\n\t" "movl $-1,%%eax\n\t" "xorl %%edx,%%edx\n\t" "repe; scasl\n\t" "je 1f\n\t" "xorl -4(%%edi),%%eax\n\t" "subl $4,%%edi\n\t" "bsfl %%eax,%%edx\n" "1:\tsubl %%ebx,%%edi\n\t" "shll $3,%%edi\n\t" "addl %%edi,%%edx" :"=d" (res) :"c" ((size + 31) >> 5), "D" (addr), "b" (addr) :"ax", "cx", "di"); return res; } extern __inline__ int find_next_zero_bit (void * addr, int size, int offset) { unsigned long * p = ((unsigned long *) addr) + (offset >> 5); int set = 0, bit = offset & 31, res; if (bit) { /* * Look for zero in first byte */ __asm__("bsfl %1,%0\n\t" "jne 1f\n\t" "movl $32, %0\n" "1:" : "=r" (set) : "r" (~(*p >> bit))); if (set < (32 - bit)) return set + offset; set = 32 - bit; p++; } /* * No zero yet, search remaining full bytes for a zero */ res = find_first_zero_bit (p, size - 32 * (p - (unsigned long *) addr)); return (offset + set + res); } /* * ffz = Find First Zero in word. Undefined if no zero exists, * so code should check against ~0UL first.. */ extern __inline__ unsigned long ffz(unsigned long word) { __asm__("bsfl %1,%0" :"=r" (word) :"r" (~word)); return word; } #ifdef __KERNEL__ #define ext2_set_bit test_and_set_bit #define ext2_clear_bit test_and_clear_bit #define ext2_test_bit test_bit #define ext2_find_first_zero_bit find_first_zero_bit #define ext2_find_next_zero_bit find_next_zero_bit /* Bitmap functions for the minix filesystem. */ #define minix_set_bit(nr,addr) test_and_set_bit(nr,addr) #define minix_clear_bit(nr,addr) test_and_clear_bit(nr,addr) #define minix_test_bit(nr,addr) test_bit(nr,addr) #define minix_find_first_zero_bit(addr,size) find_first_zero_bit(addr,size) #endif /* __KERNEL__ */ #endif /* _I386_BITOPS_H */