summaryrefslogtreecommitdiffstats
path: root/include/asm-ia64/delay.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-ia64/delay.h')
-rw-r--r--include/asm-ia64/delay.h90
1 files changed, 90 insertions, 0 deletions
diff --git a/include/asm-ia64/delay.h b/include/asm-ia64/delay.h
new file mode 100644
index 000000000..cca4ecdf6
--- /dev/null
+++ b/include/asm-ia64/delay.h
@@ -0,0 +1,90 @@
+#ifndef _ASM_IA64_DELAY_H
+#define _ASM_IA64_DELAY_H
+
+/*
+ * Delay routines using a pre-computed "cycles/usec" value.
+ *
+ * Copyright (C) 1998, 1999 Hewlett-Packard Co
+ * Copyright (C) 1998, 1999 David Mosberger-Tang <davidm@hpl.hp.com>
+ * Copyright (C) 1999 VA Linux Systems
+ * Copyright (C) 1999 Walt Drummond <drummond@valinux.com>
+ * Copyright (C) 1999 Asit Mallick <asit.k.mallick@intel.com>
+ * Copyright (C) 1999 Don Dugger <don.dugger@intel.com>
+ */
+
+#include <linux/config.h>
+#include <linux/kernel.h>
+#include <linux/sched.h>
+
+#include <asm/processor.h>
+
+extern __inline__ void
+ia64_set_itm (unsigned long val)
+{
+ __asm__ __volatile__("mov cr.itm=%0;; srlz.d;;" :: "r"(val) : "memory");
+}
+
+extern __inline__ unsigned long
+ia64_get_itm (void)
+{
+ unsigned long result;
+
+ __asm__ __volatile__("mov %0=cr.itm;; srlz.d;;" : "=r"(result) :: "memory");
+ return result;
+}
+
+extern __inline__ void
+ia64_set_itv (unsigned char vector, unsigned char masked)
+{
+ if (masked > 1)
+ masked = 1;
+
+ __asm__ __volatile__("mov cr.itv=%0;; srlz.d;;"
+ :: "r"((masked << 16) | vector) : "memory");
+}
+
+extern __inline__ void
+ia64_set_itc (unsigned long val)
+{
+ __asm__ __volatile__("mov ar.itc=%0;; srlz.d;;" :: "r"(val) : "memory");
+}
+
+extern __inline__ unsigned long
+ia64_get_itc (void)
+{
+ unsigned long result;
+
+ __asm__ __volatile__("mov %0=ar.itc" : "=r"(result) :: "memory");
+ return result;
+}
+
+extern __inline__ void
+__delay (unsigned long loops)
+{
+ unsigned long saved_ar_lc;
+
+ if (loops < 1)
+ return;
+
+ __asm__ __volatile__("mov %0=ar.lc;;" : "=r"(saved_ar_lc));
+ __asm__ __volatile__("mov ar.lc=%0;;" :: "r"(loops - 1));
+ __asm__ __volatile__("1:\tbr.cloop.sptk.few 1b;;");
+ __asm__ __volatile__("mov ar.lc=%0" :: "r"(saved_ar_lc));
+}
+
+extern __inline__ void
+udelay (unsigned long usecs)
+{
+#ifdef CONFIG_IA64_SOFTSDV_HACKS
+ while (usecs--)
+ ;
+#else
+ unsigned long start = ia64_get_itc();
+ unsigned long cycles = usecs*my_cpu_data.cyc_per_usec;
+
+ while (ia64_get_itc() - start < cycles)
+ /* skip */;
+#endif /* CONFIG_IA64_SOFTSDV_HACKS */
+}
+
+#endif /* _ASM_IA64_DELAY_H */