summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorRalf Baechle <ralf@linux-mips.org>2000-10-02 00:56:17 +0000
committerRalf Baechle <ralf@linux-mips.org>2000-10-02 00:56:17 +0000
commitc0cfab394d66382b9d6fb262b866cea1acf7c94d (patch)
treecbf6c751918179143cfae2a3ab747da6926e4715
parent43ab7d1ecdb1e3780d1fab3b9d9c4409135bda5c (diff)
Fix various bitops wreckage.
-rw-r--r--include/asm-mips/bitops.h187
-rw-r--r--include/asm-mips/mipsregs.h42
-rw-r--r--include/asm-mips64/bitops.h19
3 files changed, 118 insertions, 130 deletions
diff --git a/include/asm-mips/bitops.h b/include/asm-mips/bitops.h
index 326294ab2..7aa1c9d8e 100644
--- a/include/asm-mips/bitops.h
+++ b/include/asm-mips/bitops.h
@@ -1,10 +1,10 @@
-/* $Id: bitops.h,v 1.7 1999/08/19 22:56:33 ralf Exp $
- *
+/*
* This file is subject to the terms and conditions of the GNU General Public
* License. See the file "COPYING" in the main directory of this archive
* for more details.
*
- * Copyright (c) 1994 - 1997, 1999 Ralf Baechle (ralf@gnu.org)
+ * Copyright (c) 1994 - 1997, 1999, 2000 Ralf Baechle (ralf@gnu.org)
+ * Copyright (c) 2000 Silicon Graphics, Inc.
*/
#ifndef _ASM_BITOPS_H
#define _ASM_BITOPS_H
@@ -40,14 +40,14 @@
* elements. With respect to a future 64 bit implementation it is
* wrong to use long *. Use u32 * or int *.
*/
-extern __inline__ void set_bit(int nr, void *addr);
-extern __inline__ void clear_bit(int nr, void *addr);
-extern __inline__ void change_bit(int nr, void *addr);
-extern __inline__ int test_and_set_bit(int nr, void *addr);
-extern __inline__ int test_and_clear_bit(int nr, void *addr);
-extern __inline__ int test_and_change_bit(int nr, void *addr);
-
-extern __inline__ int test_bit(int nr, const void *addr);
+extern __inline__ void set_bit(int nr, volatile void *addr);
+extern __inline__ void clear_bit(int nr, volatile void *addr);
+extern __inline__ void change_bit(int nr, volatile void *addr);
+extern __inline__ int test_and_set_bit(int nr, volatile void *addr);
+extern __inline__ int test_and_clear_bit(int nr, volatile void *addr);
+extern __inline__ int test_and_change_bit(int nr, volatile void *addr);
+
+extern __inline__ int test_bit(int nr, volatile void *addr);
#ifndef __MIPSEB__
extern __inline__ int find_first_zero_bit (void *addr, unsigned size);
#endif
@@ -63,90 +63,115 @@ extern __inline__ unsigned long ffz(unsigned long word);
* interrupt friendly
*/
-/*
- * The following functions will only work for the R4000!
- */
-
-extern __inline__ void set_bit(int nr, void *addr)
+extern __inline__ void
+set_bit(int nr, volatile void *addr)
{
- int mask, mw;
-
- addr += ((nr >> 3) & ~3);
- mask = 1 << (nr & 0x1f);
- do {
- mw = load_linked(addr);
- } while (!store_conditional(addr, mw|mask));
+ unsigned long *m = ((unsigned long *) addr) + (nr >> 5);
+ unsigned long temp;
+
+ __asm__ __volatile__(
+ "1:\tll\t%0, %1\t\t# set_bit\n\t"
+ "or\t%0, %2\n\t"
+ "sc\t%0, %1\n\t"
+ "beqz\t%0, 1b"
+ :"=&r" (temp), "=m" (*m)
+ :"ir" (1UL << (nr & 0x1f)), "m" (*m));
}
-extern __inline__ void clear_bit(int nr, void *addr)
+extern __inline__ void
+clear_bit(int nr, volatile void *addr)
{
- int mask, mw;
-
- addr += ((nr >> 3) & ~3);
- mask = 1 << (nr & 0x1f);
- do {
- mw = load_linked(addr);
- }
- while (!store_conditional(addr, mw & ~mask));
+ unsigned long *m = ((unsigned long *) addr) + (nr >> 5);
+ unsigned long temp;
+
+ __asm__ __volatile__(
+ "1:\tll\t%0, %1\t\t# clear_bit\n\t"
+ "and\t%0, %2\n\t"
+ "sc\t%0, %1\n\t"
+ "beqz\t%0, 1b\n\t"
+ :"=&r" (temp), "=m" (*m)
+ :"ir" (~(1UL << (nr & 0x1f))), "m" (*m));
}
-extern __inline__ void change_bit(int nr, void *addr)
+extern __inline__ void
+change_bit(int nr, volatile void *addr)
{
- int mask, mw;
-
- addr += ((nr >> 3) & ~3);
- mask = 1 << (nr & 0x1f);
- do {
- mw = load_linked(addr);
- } while (!store_conditional(addr, mw ^ mask));
+ unsigned long *m = ((unsigned long *) addr) + (nr >> 5);
+ unsigned long temp;
+
+ __asm__ __volatile__(
+ "1:\tll\t%0, %1\t\t# change_bit\n\t"
+ "xor\t%0, %2\n\t"
+ "sc\t%0, %1\n\t"
+ "beqz\t%0, 1b"
+ :"=&r" (temp), "=m" (*m)
+ :"ir" (1UL << (nr & 0x1f)), "m" (*m));
}
-extern __inline__ int test_and_set_bit(int nr, void *addr)
+extern __inline__ int
+test_and_set_bit(int nr, volatile void *addr)
{
- int mask, retval, mw;
-
- addr += ((nr >> 3) & ~3);
- mask = 1 << (nr & 0x1f);
- do {
- mw = load_linked(addr);
- retval = (mask & mw) != 0;
- } while (!store_conditional(addr, mw|mask));
-
- return retval;
+ unsigned long *m = ((unsigned long *) addr) + (nr >> 5);
+ unsigned long temp, res;
+
+ __asm__ __volatile__(
+ ".set\tnoreorder\t\t# test_and_set_bit\n"
+ "1:\tll\t%0, %1\n\t"
+ "or\t%2, %0, %3\n\t"
+ "sc\t%2, %1\n\t"
+ "beqz\t%2, 1b\n\t"
+ " and\t%2, %0, %3\n\t"
+ ".set\treorder"
+ :"=&r" (temp), "=m" (*m), "=&r" (res)
+ :"r" (1UL << (nr & 0x1f)), "m" (*m));
+
+ return res != 0;
}
-extern __inline__ int test_and_clear_bit(int nr, void *addr)
+extern __inline__ int
+test_and_clear_bit(int nr, volatile void *addr)
{
- int mask, retval, mw;
-
- addr += ((nr >> 3) & ~3);
- mask = 1 << (nr & 0x1f);
- do {
- mw = load_linked(addr);
- retval = (mask & mw) != 0;
- }
- while (!store_conditional(addr, mw & ~mask));
-
- return retval;
+ unsigned long *m = ((unsigned long *) addr) + (nr >> 5);
+ unsigned long temp, res;
+
+ __asm__ __volatile__(
+ ".set\tnoreorder\t\t# test_and_clear_bit\n"
+ "1:\tll\t%0, %1\n\t"
+ "or\t%2, %0, %3\n\t"
+ "xor\t%2, %3\n\t"
+ "sc\t%2, %1\n\t"
+ "beqz\t%2, 1b\n\t"
+ " and\t%2, %0, %3\n\t"
+ ".set\treorder"
+ :"=&r" (temp), "=m" (*m), "=&r" (res)
+ :"r" (1UL << (nr & 0x1f)), "m" (*m));
+
+ return res != 0;
}
-extern __inline__ int test_and_change_bit(int nr, void *addr)
+extern __inline__ int
+test_and_change_bit(int nr, volatile void *addr)
{
- int mask, retval, mw;
-
- addr += ((nr >> 3) & ~3);
- mask = 1 << (nr & 0x1f);
- do {
- mw = load_linked(addr);
- retval = (mask & mw) != 0;
- } while (!store_conditional(addr, mw ^ mask));
-
- return retval;
+ unsigned long *m = ((unsigned long *) addr) + (nr >> 5);
+ unsigned long temp, res;
+
+ __asm__ __volatile__(
+ ".set\tnoreorder\t\t# test_and_change_bit\n"
+ "1:\tll\t%0, %1\n\t"
+ "xor\t%2, %0, %3\n\t"
+ "sc\t%2, %1\n\t"
+ "beqz\t%2, 1b\n\t"
+ " and\t%2, %0, %3\n\t"
+ ".set\treorder"
+ :"=&r" (temp), "=m" (*m), "=&r" (res)
+ :"r" (1UL << (nr & 0x1f)), "m" (*m));
+
+ return res != 0;
}
#else /* MIPS I */
-extern __inline__ void set_bit(int nr, void * addr)
+extern __inline__ void set_bit(int nr, volatile void * addr)
{
int mask;
int *a = addr;
@@ -159,7 +184,7 @@ extern __inline__ void set_bit(int nr, void * addr)
__bi_restore_flags(flags);
}
-extern __inline__ void clear_bit(int nr, void * addr)
+extern __inline__ void clear_bit(int nr, volatile void * addr)
{
int mask;
int *a = addr;
@@ -172,7 +197,7 @@ extern __inline__ void clear_bit(int nr, void * addr)
__bi_restore_flags(flags);
}
-extern __inline__ void change_bit(int nr, void * addr)
+extern __inline__ void change_bit(int nr, volatile void * addr)
{
int mask;
int *a = addr;
@@ -185,7 +210,7 @@ extern __inline__ void change_bit(int nr, void * addr)
__bi_restore_flags(flags);
}
-extern __inline__ int test_and_set_bit(int nr, void * addr)
+extern __inline__ int test_and_set_bit(int nr, volatile void * addr)
{
int mask, retval;
int *a = addr;
@@ -201,7 +226,7 @@ extern __inline__ int test_and_set_bit(int nr, void * addr)
return retval;
}
-extern __inline__ int test_and_clear_bit(int nr, void * addr)
+extern __inline__ int test_and_clear_bit(int nr, volatile void * addr)
{
int mask, retval;
int *a = addr;
@@ -217,7 +242,7 @@ extern __inline__ int test_and_clear_bit(int nr, void * addr)
return retval;
}
-extern __inline__ int test_and_change_bit(int nr, void * addr)
+extern __inline__ int test_and_change_bit(int nr, volatile void * addr)
{
int mask, retval;
int *a = addr;
@@ -240,7 +265,7 @@ extern __inline__ int test_and_change_bit(int nr, void * addr)
#endif /* MIPS I */
-extern __inline__ int test_bit(int nr, const void *addr)
+extern __inline__ int test_bit(int nr, volatile void *addr)
{
return ((1UL << (nr & 31)) & (((const unsigned int *) addr)[nr >> 5])) != 0;
}
diff --git a/include/asm-mips/mipsregs.h b/include/asm-mips/mipsregs.h
index f7a29a81f..11f8e68d5 100644
--- a/include/asm-mips/mipsregs.h
+++ b/include/asm-mips/mipsregs.h
@@ -1,10 +1,10 @@
-/* $Id: mipsregs.h,v 1.6 1999/07/26 19:42:43 harald Exp $
- *
+/*
* This file is subject to the terms and conditions of the GNU General Public
* License. See the file "COPYING" in the main directory of this archive
* for more details.
*
- * Copyright (C) 1994, 1995, 1996, 1997 by Ralf Baechle
+ * Copyright (C) 1994, 1995, 1996, 1997, 2000 by Ralf Baechle
+ * Copyright (C) 2000 Silicon Graphics, Inc.
* Modified for further R[236]000 support by Paul M. Antoine, 1996.
*/
#ifndef __ASM_MIPS_MIPSREGS_H
@@ -186,42 +186,6 @@ __BUILD_SET_CP0(config,CP0_CONFIG)
#endif /* defined (_LANGUAGE_ASSEMBLY) */
/*
- * Inline code for use of the ll and sc instructions
- *
- * FIXME: This instruction is only available on MIPS ISA >=2.
- * Since these operations are only being used for atomic operations
- * the easiest workaround for the R[23]00 is to disable interrupts.
- * This fails for R3000 SMP machines which use that many different
- * technologies as replacement that it is difficult to create even
- * just a hook for for all machines to hook into. The only good
- * thing is that there is currently no R3000 SMP machine on the
- * Linux/MIPS target list ...
- */
-#define load_linked(addr) \
-({ \
- unsigned int __res; \
- \
- __asm__ __volatile__( \
- "ll\t%0,(%1)" \
- : "=r" (__res) \
- : "r" ((unsigned long) (addr))); \
- \
- __res; \
-})
-
-#define store_conditional(addr,value) \
-({ \
- int __res; \
- \
- __asm__ __volatile__( \
- "sc\t%0,(%2)" \
- : "=r" (__res) \
- : "0" (value), "r" (addr)); \
- \
- __res; \
-})
-
-/*
* Bitfields in the R4xx0 cp0 status register
*/
#define ST0_IE 0x00000001
diff --git a/include/asm-mips64/bitops.h b/include/asm-mips64/bitops.h
index 1e829c71b..a6ff41cc0 100644
--- a/include/asm-mips64/bitops.h
+++ b/include/asm-mips64/bitops.h
@@ -1,11 +1,10 @@
-/* $Id: bitops.h,v 1.3 1999/08/20 21:59:08 ralf Exp $
- *
+/*
* This file is subject to the terms and conditions of the GNU General Public
* License. See the file "COPYING" in the main directory of this archive
* for more details.
*
- * Copyright (c) 1994 - 1999 Ralf Baechle (ralf@gnu.org)
- * Copyright (c) 1999 Silicon Graphics, Inc.
+ * Copyright (c) 1994, 95, 96, 97, 98, 99, 2000 Ralf Baechle
+ * Copyright (c) 1999, 2000 Silicon Graphics, Inc.
*/
#ifndef _ASM_BITOPS_H
#define _ASM_BITOPS_H
@@ -27,7 +26,7 @@
*/
extern __inline__ void
-set_bit(unsigned long nr, void *addr)
+set_bit(unsigned long nr, volatile void *addr)
{
unsigned long *m = ((unsigned long *) addr) + (nr >> 6);
unsigned long temp;
@@ -42,7 +41,7 @@ set_bit(unsigned long nr, void *addr)
}
extern __inline__ void
-clear_bit(unsigned long nr, void *addr)
+clear_bit(unsigned long nr, volatile void *addr)
{
unsigned long *m = ((unsigned long *) addr) + (nr >> 6);
unsigned long temp;
@@ -57,7 +56,7 @@ clear_bit(unsigned long nr, void *addr)
}
extern __inline__ void
-change_bit(unsigned long nr, void *addr)
+change_bit(unsigned long nr, volatile void *addr)
{
unsigned long *m = ((unsigned long *) addr) + (nr >> 6);
unsigned long temp;
@@ -72,7 +71,7 @@ change_bit(unsigned long nr, void *addr)
}
extern __inline__ unsigned long
-test_and_set_bit(unsigned long nr, void *addr)
+test_and_set_bit(unsigned long nr, volatile void *addr)
{
unsigned long *m = ((unsigned long *) addr) + (nr >> 6);
unsigned long temp, res;
@@ -92,7 +91,7 @@ test_and_set_bit(unsigned long nr, void *addr)
}
extern __inline__ unsigned long
-test_and_clear_bit(unsigned long nr, void *addr)
+test_and_clear_bit(unsigned long nr, volatile void *addr)
{
unsigned long *m = ((unsigned long *) addr) + (nr >> 6);
unsigned long temp, res;
@@ -113,7 +112,7 @@ test_and_clear_bit(unsigned long nr, void *addr)
}
extern __inline__ unsigned long
-test_and_change_bit(unsigned long nr, void *addr)
+test_and_change_bit(unsigned long nr, volatile void *addr)
{
unsigned long *m = ((unsigned long *) addr) + (nr >> 6);
unsigned long temp, res;