1 #ifndef _ASM_IA64_DELAY_H
2 #define _ASM_IA64_DELAY_H
3 
4 /*
5  * Delay routines using a pre-computed "cycles/usec" value.
6  *
7  * Copyright (C) 1998, 1999 Hewlett-Packard Co
8  * Copyright (C) 1998, 1999 David Mosberger-Tang <davidm@hpl.hp.com>
9  * Copyright (C) 1999 VA Linux Systems
10  * Copyright (C) 1999 Walt Drummond <drummond@valinux.com>
11  * Copyright (C) 1999 Asit Mallick <asit.k.mallick@intel.com>
12  * Copyright (C) 1999 Don Dugger <don.dugger@intel.com>
13  */
14 
15 #include <linux/config.h>
16 #include <linux/kernel.h>
17 #include <linux/sched.h>
18 
19 #include <asm/processor.h>
20 
21 static __inline__ void
ia64_set_itm(unsigned long val)22 ia64_set_itm (unsigned long val)
23 {
24 	__asm__ __volatile__("mov cr.itm=%0;; srlz.d;;" :: "r"(val) : "memory");
25 }
26 
27 static __inline__ unsigned long
ia64_get_itm(void)28 ia64_get_itm (void)
29 {
30 	unsigned long result;
31 
32 	__asm__ __volatile__("mov %0=cr.itm;; srlz.d;;" : "=r"(result) :: "memory");
33 	return result;
34 }
35 
36 static __inline__ void
ia64_set_itv(unsigned long val)37 ia64_set_itv (unsigned long val)
38 {
39 	__asm__ __volatile__("mov cr.itv=%0;; srlz.d;;" :: "r"(val) : "memory");
40 }
41 
42 static __inline__ void
ia64_set_itc(unsigned long val)43 ia64_set_itc (unsigned long val)
44 {
45 	__asm__ __volatile__("mov ar.itc=%0;; srlz.d;;" :: "r"(val) : "memory");
46 }
47 
48 static __inline__ unsigned long
ia64_get_itc(void)49 ia64_get_itc (void)
50 {
51 	unsigned long result;
52 
53 	__asm__ __volatile__("mov %0=ar.itc" : "=r"(result) :: "memory");
54 #ifdef CONFIG_ITANIUM
55 	while (__builtin_expect ((__s32) result == -1, 0))
56 		__asm__ __volatile__("mov %0=ar.itc" : "=r"(result) :: "memory");
57 #endif
58 	return result;
59 }
60 
61 static __inline__ void
__delay(unsigned long loops)62 __delay (unsigned long loops)
63 {
64         unsigned long saved_ar_lc;
65 
66 	if (loops < 1)
67 		return;
68 
69 	__asm__ __volatile__("mov %0=ar.lc;;" : "=r"(saved_ar_lc));
70 	__asm__ __volatile__("mov ar.lc=%0;;" :: "r"(loops - 1));
71         __asm__ __volatile__("1:\tbr.cloop.sptk.few 1b;;");
72 	__asm__ __volatile__("mov ar.lc=%0" :: "r"(saved_ar_lc));
73 }
74 
75 static __inline__ void
udelay(unsigned long usecs)76 udelay (unsigned long usecs)
77 {
78 	unsigned long start = ia64_get_itc();
79 	unsigned long cycles = usecs*local_cpu_data->cyc_per_usec;
80 
81 	while (ia64_get_itc() - start < cycles)
82 		/* skip */;
83 }
84 
85 static __inline__ void
ndelay(unsigned long nsecs)86 ndelay (unsigned long nsecs)
87 {
88 	unsigned long start = ia64_get_itc();
89 	unsigned long cycles = nsecs*local_cpu_data->cyc_per_usec/1000;
90 
91 	while (ia64_get_itc() - start < cycles)
92 		/* skip */;
93 }
94 
95 #endif /* _ASM_IA64_DELAY_H */
96