1 #ifndef _ASM_IA64_DELAY_H
2 #define _ASM_IA64_DELAY_H
5 * Delay routines using a pre-computed "cycles/usec" value.
7 * Copyright (C) 1998, 1999 Hewlett-Packard Co
8 * Copyright (C) 1998, 1999 David Mosberger-Tang <davidm@hpl.hp.com>
9 * Copyright (C) 1999 VA Linux Systems
10 * Copyright (C) 1999 Walt Drummond <drummond@valinux.com>
11 * Copyright (C) 1999 Asit Mallick <asit.k.mallick@intel.com>
12 * Copyright (C) 1999 Don Dugger <don.dugger@intel.com>
15 #include <linux/config.h>
16 #include <linux/kernel.h>
17 #include <linux/sched.h>
18 #include <linux/compiler.h>
20 #include <asm/processor.h>
22 static __inline__
void
23 ia64_set_itm (unsigned long val
)
25 __asm__
__volatile__("mov cr.itm=%0;; srlz.d;;" :: "r"(val
) : "memory");
28 static __inline__
unsigned long
33 __asm__
__volatile__("mov %0=cr.itm;; srlz.d;;" : "=r"(result
) :: "memory");
37 static __inline__
void
38 ia64_set_itv (unsigned long val
)
40 __asm__
__volatile__("mov cr.itv=%0;; srlz.d;;" :: "r"(val
) : "memory");
43 static __inline__
void
44 ia64_set_itc (unsigned long val
)
46 __asm__
__volatile__("mov ar.itc=%0;; srlz.d;;" :: "r"(val
) : "memory");
49 static __inline__
unsigned long
54 __asm__
__volatile__("mov %0=ar.itc" : "=r"(result
) :: "memory");
56 while (unlikely((__s32
) result
== -1))
57 __asm__
__volatile__("mov %0=ar.itc" : "=r"(result
) :: "memory");
62 static __inline__
void
63 __delay (unsigned long loops
)
65 unsigned long saved_ar_lc
;
70 __asm__
__volatile__("mov %0=ar.lc;;" : "=r"(saved_ar_lc
));
71 __asm__
__volatile__("mov ar.lc=%0;;" :: "r"(loops
- 1));
72 __asm__
__volatile__("1:\tbr.cloop.sptk.few 1b;;");
73 __asm__
__volatile__("mov ar.lc=%0" :: "r"(saved_ar_lc
));
76 static __inline__
void
77 udelay (unsigned long usecs
)
79 unsigned long start
= ia64_get_itc();
80 unsigned long cycles
= usecs
*local_cpu_data
->cyc_per_usec
;
82 while (ia64_get_itc() - start
< cycles
)
86 #endif /* _ASM_IA64_DELAY_H */