1/* memchr (str, ch, n) -- Return pointer to first occurrence of CH in the
2   first N bytes of STR.
3   For Motorola 68000.
4   Copyright (C) 1999-2022 Free Software Foundation, Inc.
5   This file is part of the GNU C Library.
6
7   The GNU C Library is free software; you can redistribute it and/or
8   modify it under the terms of the GNU Lesser General Public
9   License as published by the Free Software Foundation; either
10   version 2.1 of the License, or (at your option) any later version.
11
12   The GNU C Library is distributed in the hope that it will be useful,
13   but WITHOUT ANY WARRANTY; without even the implied warranty of
14   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15   Lesser General Public License for more details.
16
17   You should have received a copy of the GNU Lesser General Public
18   License along with the GNU C Library.  If not, see
19   <https://www.gnu.org/licenses/>.  */
20
21#include <sysdep.h>
22#include "asm-syntax.h"
23
24	TEXT
25ENTRY(__memchr)
26	/* Save the callee-saved registers we use.  */
27#ifdef __mcoldfire__
28	movel	R(d2),MEM_PREDEC(sp)
29	cfi_adjust_cfa_offset (4)
30	movel	R(d3),MEM_PREDEC(sp)
31	cfi_adjust_cfa_offset (4)
32	movel	R(d4),MEM_PREDEC(sp)
33	cfi_adjust_cfa_offset (4)
34	cfi_rel_offset (R(d2), 8)
35	cfi_rel_offset (R(d3), 4)
36	cfi_rel_offset (R(d4), 0)
37#else
38	moveml	R(d2)-R(d4),MEM_PREDEC(sp)
39	cfi_adjust_cfa_offset (3*4)
40	cfi_rel_offset (R(d2), 0)
41	cfi_rel_offset (R(d3), 4)
42	cfi_rel_offset (R(d4), 8)
43#endif
44
45	/* Get string pointer, character and length.  */
46	movel	MEM_DISP(sp,16),R(a0)
47	moveb	MEM_DISP(sp,23),R(d0)
48	movel	MEM_DISP(sp,24),R(d4)
49
50	/* Check if at least four bytes left to search.  */
51#ifdef __mcoldfire__
52	subql	#4,R(d4)
53	bcs	L(L6)
54	addql	#4,R(d4)
55#else
56	moveql	#4,R(d1)
57	cmpl	R(d1),R(d4)
58	bcs	L(L6)
59#endif
60
61	/* Distribute the character to all bytes of a longword.  */
62	movel	R(d0),R(d1)
63	lsll	#8,R(d1)
64	moveb	R(d0),R(d1)
65	movel	R(d1),R(d0)
66	swap	R(d0)
67	movew	R(d1),R(d0)
68
69	/* First search for the character one byte at a time until the
70	   pointer is aligned to a longword boundary.  */
71	movel	R(a0),R(d1)
72#ifdef __mcoldfire__
73	andl	#3,R(d1)
74#else
75	andw	#3,R(d1)
76#endif
77	beq	L(L1)
78	cmpb	MEM(a0),R(d0)
79	beq	L(L9)
80	addql	#1,R(a0)
81	subql	#1,R(d4)
82	beq	L(L7)
83
84#ifdef __mcoldfire__
85	subql	#3,R(d1)
86#else
87	subqw	#3,R(d1)
88#endif
89	beq	L(L1)
90	cmpb	MEM(a0),R(d0)
91	beq	L(L9)
92	addql	#1,R(a0)
93	subql	#1,R(d4)
94	beq	L(L7)
95
96#ifdef __mcoldfire__
97	addql	#1,R(d1)
98#else
99	addqw	#1,R(d1)
100#endif
101	beq	L(L1)
102	cmpb	MEM(a0),R(d0)
103	beq	L(L9)
104	addql	#1,R(a0)
105	subql	#1,R(d4)
106	beq	L(L7)
107
108L(L1:)
109	/* Load the magic bits.  Unlike the generic implementation we can
110	   use the carry bit as the fourth hole.  */
111	movel	#0xfefefeff,R(d3)
112
113      /* We exit the loop if adding MAGIC_BITS to LONGWORD fails to
114	 change any of the hole bits of LONGWORD.
115
116	 1) Is this safe?  Will it catch all the zero bytes?
117	 Suppose there is a byte with all zeros.  Any carry bits
118	 propagating from its left will fall into the hole at its
119	 least significant bit and stop.  Since there will be no
120	 carry from its most significant bit, the LSB of the
121	 byte to the left will be unchanged, and the zero will be
122	 detected.
123
124	 2) Is this worthwhile?  Will it ignore everything except
125	 zero bytes?  Suppose every byte of LONGWORD has a bit set
126	 somewhere.  There will be a carry into bit 8.	If bit 8
127	 is set, this will carry into bit 16.  If bit 8 is clear,
128	 one of bits 9-15 must be set, so there will be a carry
129	 into bit 16.  Similarly, there will be a carry into bit
130	 24.  If one of bits 24-31 is set, there will be a carry
131	 into bit 32 (=carry flag), so all of the hole bits will
132	 be changed.
133
134	 3) But wait!  Aren't we looking for C, not zero?
135	 Good point.  So what we do is XOR LONGWORD with a longword,
136	 each of whose bytes is C.  This turns each byte that is C
137	 into a zero.  */
138
139	/* Still at least 4 bytes to search?  */
140	subql	#4,R(d4)
141	bcs	L(L6)
142
143L(L2:)
144	/* Get the longword in question.  */
145	movel	MEM_POSTINC(a0),R(d1)
146	/* XOR with the byte we search for.  */
147	eorl	R(d0),R(d1)
148
149	/* Add the magic value.  We get carry bits reported for each byte
150	   which is not C.  */
151	movel	R(d3),R(d2)
152	addl	R(d1),R(d2)
153
154	/* Check the fourth carry bit before it is clobbered by the next
155	   XOR.  If it is not set we have a hit.  */
156	bcc	L(L8)
157
158	/* We are only interested in carry bits that change due to the
159	   previous add, so remove original bits.  */
160	eorl	R(d1),R(d2)
161
162	/* Now test for the other three overflow bits.
163	   Set all non-carry bits.  */
164	orl	R(d3),R(d2)
165	/* Add 1 to get zero if all carry bits were set.  */
166	addql	#1,R(d2)
167
168	/* If we don't get zero then at least one byte of the word equals
169	   C.  */
170	bne	L(L8)
171
172	/* Still at least 4 bytes to search?  */
173	subql	#4,R(d4)
174	bcs	L(L6)
175
176	/* Get the longword in question.  */
177	movel	MEM_POSTINC(a0),R(d1)
178	/* XOR with the byte we search for.  */
179	eorl	R(d0),R(d1)
180
181	/* Add the magic value.  We get carry bits reported for each byte
182	   which is not C.  */
183	movel	R(d3),R(d2)
184	addl	R(d1),R(d2)
185
186	/* Check the fourth carry bit before it is clobbered by the next
187	   XOR.  If it is not set we have a hit.  */
188	bcc	L(L8)
189
190	/* We are only interested in carry bits that change due to the
191	   previous add, so remove original bits */
192	eorl	R(d1),R(d2)
193
194	/* Now test for the other three overflow bits.
195	   Set all non-carry bits.  */
196	orl	R(d3),R(d2)
197	/* Add 1 to get zero if all carry bits were set.  */
198	addql	#1,R(d2)
199
200	/* If we don't get zero then at least one byte of the word equals
201	   C.  */
202	bne	L(L8)
203
204	/* Still at least 4 bytes to search?  */
205	subql	#4,R(d4)
206	bcc	L(L2)
207
208L(L6:)
209	/* Search one byte at a time in the remaining less than 4 bytes.  */
210#ifdef __mcoldfire__
211	addql	#4,R(d4)
212#else
213	andw	#3,R(d4)
214#endif
215	beq	L(L7)
216	cmpb	MEM(a0),R(d0)
217	beq	L(L9)
218	addql	#1,R(a0)
219
220#ifdef __mcoldfire__
221	subql	#1,R(d4)
222#else
223	subqw	#1,R(d4)
224#endif
225	beq	L(L7)
226	cmpb	MEM(a0),R(d0)
227	beq	L(L9)
228	addql	#1,R(a0)
229
230#ifdef __mcoldfire__
231	subql	#1,R(d4)
232#else
233	subqw	#1,R(d4)
234#endif
235	beq	L(L7)
236	cmpb	MEM(a0),R(d0)
237	beq	L(L9)
238
239L(L7:)
240	/* Return NULL.  */
241	clrl	R(d0)
242	movel	R(d0),R(a0)
243#ifdef __mcoldfire__
244	movel	MEM_POSTINC(sp),R(d4)
245	cfi_remember_state
246	cfi_adjust_cfa_offset (-4)
247	cfi_restore (R(d4))
248	movel	MEM_POSTINC(sp),R(d3)
249	cfi_adjust_cfa_offset (-4)
250	cfi_restore (R(d3))
251	movel	MEM_POSTINC(sp),R(d2)
252	cfi_adjust_cfa_offset (-4)
253	cfi_restore (R(d2))
254#else
255	moveml	MEM_POSTINC(sp),R(d2)-R(d4)
256	cfi_remember_state
257	cfi_adjust_cfa_offset (-3*4)
258	cfi_restore (R(d2))
259	cfi_restore (R(d3))
260	cfi_restore (R(d4))
261#endif
262	rts
263
264	cfi_restore_state
265L(L8:)
266	/* We have a hit.  Check to see which byte it was.  First
267	   compensate for the autoincrement in the loop.  */
268	subql	#4,R(a0)
269
270	cmpb	MEM(a0),R(d0)
271	beq	L(L9)
272	addql	#1,R(a0)
273
274	cmpb	MEM(a0),R(d0)
275	beq	L(L9)
276	addql	#1,R(a0)
277
278	cmpb	MEM(a0),R(d0)
279	beq	L(L9)
280	addql	#1,R(a0)
281
282	/* Otherwise the fourth byte must equal C.  */
283L(L9:)
284	movel	R(a0),R(d0)
285#ifdef __mcoldfire__
286	movel	MEM_POSTINC(sp),R(d4)
287	cfi_adjust_cfa_offset (-4)
288	cfi_restore (R(d4))
289	movel	MEM_POSTINC(sp),R(d3)
290	cfi_adjust_cfa_offset (-4)
291	cfi_restore (R(d3))
292	movel	MEM_POSTINC(sp),R(d2)
293	cfi_adjust_cfa_offset (-4)
294	cfi_restore (R(d2))
295#else
296	moveml	MEM_POSTINC(sp),R(d2)-R(d4)
297	cfi_adjust_cfa_offset (-3*4)
298	cfi_restore (R(d2))
299	cfi_restore (R(d3))
300	cfi_restore (R(d4))
301#endif
302	rts
303END(__memchr)
304
305weak_alias (__memchr, memchr)
306libc_hidden_builtin_def (memchr)
307