1/* mc68020 __mpn_lshift -- Shift left a low-level natural-number integer.
2
3Copyright (C) 1996-2022 Free Software Foundation, Inc.
4
5This file is part of the GNU MP Library.
6
7The GNU MP Library is free software; you can redistribute it and/or modify
8it under the terms of the GNU Lesser General Public License as published by
9the Free Software Foundation; either version 2.1 of the License, or (at your
10option) any later version.
11
12The GNU MP Library is distributed in the hope that it will be useful, but
13WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public
15License for more details.
16
17You should have received a copy of the GNU Lesser General Public License
18along with the GNU MP Library.  If not, see <https://www.gnu.org/licenses/>.  */
19
20/*
21  INPUT PARAMETERS
22  res_ptr	(sp + 4)
23  s_ptr		(sp + 8)
24  s_size	(sp + 16)
25  cnt		(sp + 12)
26*/
27
28#include "sysdep.h"
29#include "asm-syntax.h"
30
31#define res_ptr a1
32#define s_ptr a0
33#define s_size d6
34#define cnt d4
35
36	TEXT
37ENTRY(__mpn_lshift)
38
39/* Save used registers on the stack.  */
40	moveml	R(d2)-R(d6)/R(a2),MEM_PREDEC(sp)
41	cfi_adjust_cfa_offset (6*4)
42	cfi_rel_offset (R(d2), 0)
43	cfi_rel_offset (R(d3), 4)
44	cfi_rel_offset (R(d4), 8)
45	cfi_rel_offset (R(d5), 12)
46	cfi_rel_offset (R(d6), 16)
47	cfi_rel_offset (R(a2), 20)
48
49/* Copy the arguments to registers.  */
50	movel	MEM_DISP(sp,28),R(res_ptr)
51	movel	MEM_DISP(sp,32),R(s_ptr)
52	movel	MEM_DISP(sp,36),R(s_size)
53	movel	MEM_DISP(sp,40),R(cnt)
54
55	moveql	#1,R(d5)
56	cmpl	R(d5),R(cnt)
57	bne	L(Lnormal)
58	cmpl	R(s_ptr),R(res_ptr)
59	bls	L(Lspecial)		/* jump if s_ptr >= res_ptr */
60#if (defined (__mc68020__) || defined (__NeXT__) || defined(mc68020))
61	lea	MEM_INDX1(s_ptr,s_size,l,4),R(a2)
62#else /* not mc68020 */
63	movel	R(s_size),R(d0)
64	asll	#2,R(d0)
65	lea	MEM_INDX(s_ptr,d0,l),R(a2)
66#endif
67	cmpl	R(res_ptr),R(a2)
68	bls	L(Lspecial)		/* jump if res_ptr >= s_ptr + s_size */
69
70L(Lnormal:)
71	moveql	#32,R(d5)
72	subl	R(cnt),R(d5)
73
74#if (defined (__mc68020__) || defined (__NeXT__) || defined(mc68020))
75	lea	MEM_INDX1(s_ptr,s_size,l,4),R(s_ptr)
76	lea	MEM_INDX1(res_ptr,s_size,l,4),R(res_ptr)
77#else /* not mc68000 */
78	movel	R(s_size),R(d0)
79	asll	#2,R(d0)
80	addl	R(s_size),R(s_ptr)
81	addl	R(s_size),R(res_ptr)
82#endif
83	movel	MEM_PREDEC(s_ptr),R(d2)
84	movel	R(d2),R(d0)
85	lsrl	R(d5),R(d0)		/* compute carry limb */
86
87	lsll	R(cnt),R(d2)
88	movel	R(d2),R(d1)
89	subql	#1,R(s_size)
90	beq	L(Lend)
91	lsrl	#1,R(s_size)
92	bcs	L(L1)
93	subql	#1,R(s_size)
94
95L(Loop:)
96	movel	MEM_PREDEC(s_ptr),R(d2)
97	movel	R(d2),R(d3)
98	lsrl	R(d5),R(d3)
99	orl	R(d3),R(d1)
100	movel	R(d1),MEM_PREDEC(res_ptr)
101	lsll	R(cnt),R(d2)
102L(L1:)
103	movel	MEM_PREDEC(s_ptr),R(d1)
104	movel	R(d1),R(d3)
105	lsrl	R(d5),R(d3)
106	orl	R(d3),R(d2)
107	movel	R(d2),MEM_PREDEC(res_ptr)
108	lsll	R(cnt),R(d1)
109
110	dbf	R(s_size),L(Loop)
111	subl	#0x10000,R(s_size)
112	bcc	L(Loop)
113
114L(Lend:)
115	movel	R(d1),MEM_PREDEC(res_ptr) /* store least significant limb */
116
117/* Restore used registers from stack frame.  */
118	moveml	MEM_POSTINC(sp),R(d2)-R(d6)/R(a2)
119	cfi_remember_state
120	cfi_adjust_cfa_offset (-6*4)
121	cfi_restore (R(d2))
122	cfi_restore (R(d3))
123	cfi_restore (R(d4))
124	cfi_restore (R(d5))
125	cfi_restore (R(d6))
126	cfi_restore (R(a2))
127	rts
128
129/* We loop from least significant end of the arrays, which is only
130   permissible if the source and destination don't overlap, since the
131   function is documented to work for overlapping source and destination.  */
132
133	cfi_restore_state
134L(Lspecial:)
135	clrl	R(d0)			/* initialize carry */
136	eorw	#1,R(s_size)
137	lsrl	#1,R(s_size)
138	bcc	L(LL1)
139	subql	#1,R(s_size)
140
141L(LLoop:)
142	movel	MEM_POSTINC(s_ptr),R(d2)
143	addxl	R(d2),R(d2)
144	movel	R(d2),MEM_POSTINC(res_ptr)
145L(LL1:)
146	movel	MEM_POSTINC(s_ptr),R(d2)
147	addxl	R(d2),R(d2)
148	movel	R(d2),MEM_POSTINC(res_ptr)
149
150	dbf	R(s_size),L(LLoop)
151	addxl	R(d0),R(d0)		/* save cy in lsb */
152	subl	#0x10000,R(s_size)
153	bcs	L(LLend)
154	lsrl	#1,R(d0)		/* restore cy */
155	bra	L(LLoop)
156
157L(LLend:)
158/* Restore used registers from stack frame.  */
159	moveml	MEM_POSTINC(sp),R(d2)-R(d6)/R(a2)
160	cfi_adjust_cfa_offset (-6*4)
161	cfi_restore (R(d2))
162	cfi_restore (R(d3))
163	cfi_restore (R(d4))
164	cfi_restore (R(d5))
165	cfi_restore (R(d6))
166	cfi_restore (R(a2))
167	rts
168END(__mpn_lshift)
169