1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  * Copyright (C) 2015 Regents of the University of California
4  */
5 
6 #ifndef _ASM_RISCV_ASM_H
7 #define _ASM_RISCV_ASM_H
8 
9 #ifdef __ASSEMBLY__
10 #define __ASM_STR(x)	x
11 #else
12 #define __ASM_STR(x)	#x
13 #endif
14 
15 #if __riscv_xlen == 64
16 #define __REG_SEL(a, b)	__ASM_STR(a)
17 #elif __riscv_xlen == 32
18 #define __REG_SEL(a, b)	__ASM_STR(b)
19 #else
20 #error "Unexpected __riscv_xlen"
21 #endif
22 
23 #define REG_L		__REG_SEL(ld, lw)
24 #define REG_S		__REG_SEL(sd, sw)
25 #define REG_SC		__REG_SEL(sc.d, sc.w)
26 #define REG_AMOSWAP_AQ	__REG_SEL(amoswap.d.aq, amoswap.w.aq)
27 #define REG_ASM		__REG_SEL(.dword, .word)
28 #define SZREG		__REG_SEL(8, 4)
29 #define LGREG		__REG_SEL(3, 2)
30 
31 #if __SIZEOF_POINTER__ == 8
32 #ifdef __ASSEMBLY__
33 #define RISCV_PTR		.dword
34 #define RISCV_SZPTR		8
35 #define RISCV_LGPTR		3
36 #else
37 #define RISCV_PTR		".dword"
38 #define RISCV_SZPTR		"8"
39 #define RISCV_LGPTR		"3"
40 #endif
41 #elif __SIZEOF_POINTER__ == 4
42 #ifdef __ASSEMBLY__
43 #define RISCV_PTR		.word
44 #define RISCV_SZPTR		4
45 #define RISCV_LGPTR		2
46 #else
47 #define RISCV_PTR		".word"
48 #define RISCV_SZPTR		"4"
49 #define RISCV_LGPTR		"2"
50 #endif
51 #else
52 #error "Unexpected __SIZEOF_POINTER__"
53 #endif
54 
55 #if (__SIZEOF_INT__ == 4)
56 #define RISCV_INT		__ASM_STR(.word)
57 #define RISCV_SZINT		__ASM_STR(4)
58 #define RISCV_LGINT		__ASM_STR(2)
59 #else
60 #error "Unexpected __SIZEOF_INT__"
61 #endif
62 
63 #if (__SIZEOF_SHORT__ == 2)
64 #define RISCV_SHORT		__ASM_STR(.half)
65 #define RISCV_SZSHORT		__ASM_STR(2)
66 #define RISCV_LGSHORT		__ASM_STR(1)
67 #else
68 #error "Unexpected __SIZEOF_SHORT__"
69 #endif
70 
71 #ifdef __ASSEMBLY__
72 #include <asm/asm-offsets.h>
73 
74 /* Common assembly source macros */
75 
76 /*
77  * NOP sequence
78  */
79 .macro	nops, num
80 	.rept	\num
81 	nop
82 	.endr
83 .endm
84 
85 #ifdef CONFIG_SMP
86 #ifdef CONFIG_32BIT
87 #define PER_CPU_OFFSET_SHIFT 2
88 #else
89 #define PER_CPU_OFFSET_SHIFT 3
90 #endif
91 
92 .macro asm_per_cpu dst sym tmp
93 	REG_L \tmp, TASK_TI_CPU_NUM(tp)
94 	slli  \tmp, \tmp, PER_CPU_OFFSET_SHIFT
95 	la    \dst, __per_cpu_offset
96 	add   \dst, \dst, \tmp
97 	REG_L \tmp, 0(\dst)
98 	la    \dst, \sym
99 	add   \dst, \dst, \tmp
100 .endm
101 #else /* CONFIG_SMP */
102 .macro asm_per_cpu dst sym tmp
103 	la    \dst, \sym
104 .endm
105 #endif /* CONFIG_SMP */
106 
107 	/* save all GPs except x1 ~ x5 */
108 	.macro save_from_x6_to_x31
109 	REG_S x6,  PT_T1(sp)
110 	REG_S x7,  PT_T2(sp)
111 	REG_S x8,  PT_S0(sp)
112 	REG_S x9,  PT_S1(sp)
113 	REG_S x10, PT_A0(sp)
114 	REG_S x11, PT_A1(sp)
115 	REG_S x12, PT_A2(sp)
116 	REG_S x13, PT_A3(sp)
117 	REG_S x14, PT_A4(sp)
118 	REG_S x15, PT_A5(sp)
119 	REG_S x16, PT_A6(sp)
120 	REG_S x17, PT_A7(sp)
121 	REG_S x18, PT_S2(sp)
122 	REG_S x19, PT_S3(sp)
123 	REG_S x20, PT_S4(sp)
124 	REG_S x21, PT_S5(sp)
125 	REG_S x22, PT_S6(sp)
126 	REG_S x23, PT_S7(sp)
127 	REG_S x24, PT_S8(sp)
128 	REG_S x25, PT_S9(sp)
129 	REG_S x26, PT_S10(sp)
130 	REG_S x27, PT_S11(sp)
131 	REG_S x28, PT_T3(sp)
132 	REG_S x29, PT_T4(sp)
133 	REG_S x30, PT_T5(sp)
134 	REG_S x31, PT_T6(sp)
135 	.endm
136 
137 	/* restore all GPs except x1 ~ x5 */
138 	.macro restore_from_x6_to_x31
139 	REG_L x6,  PT_T1(sp)
140 	REG_L x7,  PT_T2(sp)
141 	REG_L x8,  PT_S0(sp)
142 	REG_L x9,  PT_S1(sp)
143 	REG_L x10, PT_A0(sp)
144 	REG_L x11, PT_A1(sp)
145 	REG_L x12, PT_A2(sp)
146 	REG_L x13, PT_A3(sp)
147 	REG_L x14, PT_A4(sp)
148 	REG_L x15, PT_A5(sp)
149 	REG_L x16, PT_A6(sp)
150 	REG_L x17, PT_A7(sp)
151 	REG_L x18, PT_S2(sp)
152 	REG_L x19, PT_S3(sp)
153 	REG_L x20, PT_S4(sp)
154 	REG_L x21, PT_S5(sp)
155 	REG_L x22, PT_S6(sp)
156 	REG_L x23, PT_S7(sp)
157 	REG_L x24, PT_S8(sp)
158 	REG_L x25, PT_S9(sp)
159 	REG_L x26, PT_S10(sp)
160 	REG_L x27, PT_S11(sp)
161 	REG_L x28, PT_T3(sp)
162 	REG_L x29, PT_T4(sp)
163 	REG_L x30, PT_T5(sp)
164 	REG_L x31, PT_T6(sp)
165 	.endm
166 
167 #endif /* __ASSEMBLY__ */
168 
169 #endif /* _ASM_RISCV_ASM_H */
170