1 /* Copyright (C) 2003-2022 Free Software Foundation, Inc.
2    This file is part of the GNU C Library.
3 
4    The GNU C Library is free software; you can redistribute it and/or
5    modify it under the terms of the GNU Lesser General Public
6    License as published by the Free Software Foundation; either
7    version 2.1 of the License, or (at your option) any later version.
8 
9    The GNU C Library is distributed in the hope that it will be useful,
10    but WITHOUT ANY WARRANTY; without even the implied warranty of
11    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
12    Lesser General Public License for more details.
13 
14    You should have received a copy of the GNU Lesser General Public
15    License along with the GNU C Library.  If not, see
16    <https://www.gnu.org/licenses/>.  */
17 
18 #ifndef _ATOMIC_MACHINE_H
19 #define _ATOMIC_MACHINE_H	1
20 
21 #define atomic_full_barrier() __sync_synchronize ()
22 
23 #define __HAVE_64B_ATOMICS 0
24 #define USE_ATOMIC_COMPILER_BUILTINS 0
25 
26 /* We use the compiler atomic load and store builtins as the generic
27    defines are not atomic.  In particular, we need to use compare and
28    exchange for stores as the implementation is synthesized.  */
29 void __atomic_link_error (void);
30 #define __atomic_check_size_ls(mem) \
31  if ((sizeof (*mem) != 1) && (sizeof (*mem) != 2) && sizeof (*mem) != 4)    \
32    __atomic_link_error ();
33 
34 #define atomic_load_relaxed(mem) \
35  ({ __atomic_check_size_ls((mem));                                           \
36     __atomic_load_n ((mem), __ATOMIC_RELAXED); })
37 #define atomic_load_acquire(mem) \
38  ({ __atomic_check_size_ls((mem));                                           \
39     __atomic_load_n ((mem), __ATOMIC_ACQUIRE); })
40 
41 #define atomic_store_relaxed(mem, val) \
42  do {                                                                        \
43    __atomic_check_size_ls((mem));                                            \
44    __atomic_store_n ((mem), (val), __ATOMIC_RELAXED);                        \
45  } while (0)
46 #define atomic_store_release(mem, val) \
47  do {                                                                        \
48    __atomic_check_size_ls((mem));                                            \
49    __atomic_store_n ((mem), (val), __ATOMIC_RELEASE);                        \
50  } while (0)
51 
52 /* XXX Is this actually correct?  */
53 #define ATOMIC_EXCHANGE_USES_CAS 1
54 
55 /* prev = *addr;
56    if (prev == old)
57      *addr = new;
58    return prev; */
59 
60 /* Use the kernel atomic light weight syscalls on hppa.  */
61 #define _LWS "0xb0"
62 #define _LWS_CAS "0"
63 /* Note r31 is the link register.  */
64 #define _LWS_CLOBBER "r1", "r23", "r22", "r20", "r31", "memory"
65 /* String constant for -EAGAIN.  */
66 #define _ASM_EAGAIN "-11"
67 /* String constant for -EDEADLOCK.  */
68 #define _ASM_EDEADLOCK "-45"
69 
70 /* The only basic operation needed is compare and exchange.  The mem
71    pointer must be word aligned.  We no longer loop on deadlock.  */
72 #define atomic_compare_and_exchange_val_acq(mem, newval, oldval)	\
73   ({									\
74      register long lws_errno asm("r21");				\
75      register unsigned long lws_ret asm("r28");				\
76      register unsigned long lws_mem asm("r26") = (unsigned long)(mem);	\
77      register unsigned long lws_old asm("r25") = (unsigned long)(oldval);\
78      register unsigned long lws_new asm("r24") = (unsigned long)(newval);\
79      __asm__ __volatile__(						\
80 	"0:					\n\t"			\
81 	"ble	" _LWS "(%%sr2, %%r0)		\n\t"			\
82 	"ldi	" _LWS_CAS ", %%r20		\n\t"			\
83 	"cmpiclr,<> " _ASM_EAGAIN ", %%r21, %%r0\n\t"			\
84 	"b,n 0b					\n\t"			\
85 	"cmpclr,= %%r0, %%r21, %%r0		\n\t"			\
86 	"iitlbp %%r0,(%%sr0, %%r0)		\n\t"			\
87 	: "=r" (lws_ret), "=r" (lws_errno)				\
88 	: "r" (lws_mem), "r" (lws_old), "r" (lws_new)			\
89 	: _LWS_CLOBBER							\
90      );									\
91 									\
92      (__typeof (oldval)) lws_ret;					\
93    })
94 
95 #define atomic_compare_and_exchange_bool_acq(mem, newval, oldval)	\
96   ({									\
97      __typeof__ (*mem) ret;						\
98      ret = atomic_compare_and_exchange_val_acq(mem, newval, oldval);	\
99      /* Return 1 if it was already acquired.  */			\
100      (ret != oldval);							\
101    })
102 
103 #endif
104 /* _ATOMIC_MACHINE_H */
105