1 /*
2  * Hitachi SCA HD64570 and HD64572 common driver for Linux
3  *
4  * Copyright (C) 1998-2003 Krzysztof Halasa <khc@pm.waw.pl>
5  *
6  * This program is free software; you can redistribute it and/or modify it
7  * under the terms of version 2 of the GNU General Public License
8  * as published by the Free Software Foundation.
9  *
10  * Sources of information:
11  *    Hitachi HD64570 SCA User's Manual
12  *    Hitachi HD64572 SCA-II User's Manual
13  *
14  * We use the following SCA memory map:
15  *
16  * Packet buffer descriptor rings - starting from winbase or win0base:
17  * rx_ring_buffers * sizeof(pkt_desc) = logical channel #0 RX ring
18  * tx_ring_buffers * sizeof(pkt_desc) = logical channel #0 TX ring
19  * rx_ring_buffers * sizeof(pkt_desc) = logical channel #1 RX ring (if used)
20  * tx_ring_buffers * sizeof(pkt_desc) = logical channel #1 TX ring (if used)
21  *
22  * Packet data buffers - starting from winbase + buff_offset:
23  * rx_ring_buffers * HDLC_MAX_MRU     = logical channel #0 RX buffers
24  * tx_ring_buffers * HDLC_MAX_MRU     = logical channel #0 TX buffers
25  * rx_ring_buffers * HDLC_MAX_MRU     = logical channel #0 RX buffers (if used)
26  * tx_ring_buffers * HDLC_MAX_MRU     = logical channel #0 TX buffers (if used)
27  */
28 
29 #include <linux/module.h>
30 #include <linux/kernel.h>
31 #include <linux/slab.h>
32 #include <linux/sched.h>
33 #include <linux/types.h>
34 #include <linux/fcntl.h>
35 #include <linux/interrupt.h>
36 #include <linux/in.h>
37 #include <linux/string.h>
38 #include <linux/errno.h>
39 #include <linux/init.h>
40 #include <linux/ioport.h>
41 
42 #include <asm/system.h>
43 #include <asm/bitops.h>
44 #include <asm/uaccess.h>
45 #include <asm/io.h>
46 
47 #include <linux/netdevice.h>
48 #include <linux/skbuff.h>
49 
50 #include <linux/hdlc.h>
51 
52 #if (!defined (__HD64570_H) && !defined (__HD64572_H)) || \
53     (defined (__HD64570_H) && defined (__HD64572_H))
54 #error Either hd64570.h or hd64572.h must be included
55 #endif
56 
57 #define get_msci(port)	  (phy_node(port) ?   MSCI1_OFFSET :   MSCI0_OFFSET)
58 #define get_dmac_rx(port) (phy_node(port) ? DMAC1RX_OFFSET : DMAC0RX_OFFSET)
59 #define get_dmac_tx(port) (phy_node(port) ? DMAC1TX_OFFSET : DMAC0TX_OFFSET)
60 
61 #define SCA_INTR_MSCI(node)    (node ? 0x10 : 0x01)
62 #define SCA_INTR_DMAC_RX(node) (node ? 0x20 : 0x02)
63 #define SCA_INTR_DMAC_TX(node) (node ? 0x40 : 0x04)
64 
65 #ifdef __HD64570_H /* HD64570 */
66 #define sca_outa(value, reg, card)	sca_outw(value, reg, card)
67 #define sca_ina(reg, card)		sca_inw(reg, card)
68 #define writea(value, ptr)		writew(value, ptr)
69 
70 #else /* HD64572 */
71 #define sca_outa(value, reg, card)	sca_outl(value, reg, card)
72 #define sca_ina(reg, card)		sca_inl(reg, card)
73 #define writea(value, ptr)		writel(value, ptr)
74 #endif
75 
sca_intr_status(card_t * card)76 static inline int sca_intr_status(card_t *card)
77 {
78 	u8 result = 0;
79 
80 #ifdef __HD64570_H /* HD64570 */
81 	u8 isr0 = sca_in(ISR0, card);
82 	u8 isr1 = sca_in(ISR1, card);
83 
84 	if (isr1 & 0x03) result |= SCA_INTR_DMAC_RX(0);
85 	if (isr1 & 0x0C) result |= SCA_INTR_DMAC_TX(0);
86 	if (isr1 & 0x30) result |= SCA_INTR_DMAC_RX(1);
87 	if (isr1 & 0xC0) result |= SCA_INTR_DMAC_TX(1);
88 	if (isr0 & 0x0F) result |= SCA_INTR_MSCI(0);
89 	if (isr0 & 0xF0) result |= SCA_INTR_MSCI(1);
90 
91 #else /* HD64572 */
92 	u32 isr0 = sca_inl(ISR0, card);
93 
94 	if (isr0 & 0x0000000F) result |= SCA_INTR_DMAC_RX(0);
95 	if (isr0 & 0x000000F0) result |= SCA_INTR_DMAC_TX(0);
96 	if (isr0 & 0x00000F00) result |= SCA_INTR_DMAC_RX(1);
97 	if (isr0 & 0x0000F000) result |= SCA_INTR_DMAC_TX(1);
98 	if (isr0 & 0x003E0000) result |= SCA_INTR_MSCI(0);
99 	if (isr0 & 0x3E000000) result |= SCA_INTR_MSCI(1);
100 
101 #endif /* HD64570 vs HD64572 */
102 
103 	if (!(result & SCA_INTR_DMAC_TX(0)))
104 		if (sca_in(DSR_TX(0), card) & DSR_EOM)
105 			result |= SCA_INTR_DMAC_TX(0);
106 	if (!(result & SCA_INTR_DMAC_TX(1)))
107 		if (sca_in(DSR_TX(1), card) & DSR_EOM)
108 			result |= SCA_INTR_DMAC_TX(1);
109 
110 	return result;
111 }
112 
113 
114 
hdlc_to_port(hdlc_device * hdlc)115 static inline port_t* hdlc_to_port(hdlc_device *hdlc)
116 {
117 	return (port_t*)hdlc;
118 }
119 
120 
121 
dev_to_port(struct net_device * dev)122 static inline port_t* dev_to_port(struct net_device *dev)
123 {
124 	return hdlc_to_port(dev_to_hdlc(dev));
125 }
126 
127 
128 
next_desc(port_t * port,u16 desc,int transmit)129 static inline u16 next_desc(port_t *port, u16 desc, int transmit)
130 {
131 	return (desc + 1) % (transmit ? port_to_card(port)->tx_ring_buffers
132 			     : port_to_card(port)->rx_ring_buffers);
133 }
134 
135 
136 
desc_abs_number(port_t * port,u16 desc,int transmit)137 static inline u16 desc_abs_number(port_t *port, u16 desc, int transmit)
138 {
139 	u16 rx_buffs = port_to_card(port)->rx_ring_buffers;
140 	u16 tx_buffs = port_to_card(port)->tx_ring_buffers;
141 
142 	desc %= (transmit ? tx_buffs : rx_buffs); // called with "X + 1" etc.
143 	return log_node(port) * (rx_buffs + tx_buffs) +
144 		transmit * rx_buffs + desc;
145 }
146 
147 
148 
desc_offset(port_t * port,u16 desc,int transmit)149 static inline u16 desc_offset(port_t *port, u16 desc, int transmit)
150 {
151 	/* Descriptor offset always fits in 16 bytes */
152 	return desc_abs_number(port, desc, transmit) * sizeof(pkt_desc);
153 }
154 
155 
156 
desc_address(port_t * port,u16 desc,int transmit)157 static inline pkt_desc* desc_address(port_t *port, u16 desc, int transmit)
158 {
159 #ifdef PAGE0_ALWAYS_MAPPED
160 	return (pkt_desc*)(win0base(port_to_card(port))
161 			   + desc_offset(port, desc, transmit));
162 #else
163 	return (pkt_desc*)(winbase(port_to_card(port))
164 			   + desc_offset(port, desc, transmit));
165 #endif
166 }
167 
168 
169 
buffer_offset(port_t * port,u16 desc,int transmit)170 static inline u32 buffer_offset(port_t *port, u16 desc, int transmit)
171 {
172 	return port_to_card(port)->buff_offset +
173 		desc_abs_number(port, desc, transmit) * (u32)HDLC_MAX_MRU;
174 }
175 
176 
177 
sca_init_sync_port(port_t * port)178 static void sca_init_sync_port(port_t *port)
179 {
180 	card_t *card = port_to_card(port);
181 	int transmit, i;
182 
183 	port->rxin = 0;
184 	port->txin = 0;
185 	port->txlast = 0;
186 
187 #if !defined(PAGE0_ALWAYS_MAPPED) && !defined(ALL_PAGES_ALWAYS_MAPPED)
188 	openwin(card, 0);
189 #endif
190 
191 	for (transmit = 0; transmit < 2; transmit++) {
192 		u16 dmac = transmit ? get_dmac_tx(port) : get_dmac_rx(port);
193 		u16 buffs = transmit ? card->tx_ring_buffers
194 			: card->rx_ring_buffers;
195 
196 		for (i = 0; i < buffs; i++) {
197 			pkt_desc* desc = desc_address(port, i, transmit);
198 			u16 chain_off = desc_offset(port, i + 1, transmit);
199 			u32 buff_off = buffer_offset(port, i, transmit);
200 
201 			writea(chain_off, &desc->cp);
202 			writel(buff_off, &desc->bp);
203 			writew(0, &desc->len);
204 			writeb(0, &desc->stat);
205 		}
206 
207 		/* DMA disable - to halt state */
208 		sca_out(0, transmit ? DSR_TX(phy_node(port)) :
209 			DSR_RX(phy_node(port)), card);
210 		/* software ABORT - to initial state */
211 		sca_out(DCR_ABORT, transmit ? DCR_TX(phy_node(port)) :
212 			DCR_RX(phy_node(port)), card);
213 
214 #ifdef __HD64570_H
215 		sca_out(0, dmac + CPB, card); /* pointer base */
216 #endif
217 		/* current desc addr */
218 		sca_outa(desc_offset(port, 0, transmit), dmac + CDAL, card);
219 		if (!transmit)
220 			sca_outa(desc_offset(port, buffs - 1, transmit),
221 				 dmac + EDAL, card);
222 		else
223 			sca_outa(desc_offset(port, 0, transmit), dmac + EDAL,
224 				 card);
225 
226 		/* clear frame end interrupt counter */
227 		sca_out(DCR_CLEAR_EOF, transmit ? DCR_TX(phy_node(port)) :
228 			DCR_RX(phy_node(port)), card);
229 
230 		if (!transmit) { /* Receive */
231 			/* set buffer length */
232 			sca_outw(HDLC_MAX_MRU, dmac + BFLL, card);
233 			/* Chain mode, Multi-frame */
234 			sca_out(0x14, DMR_RX(phy_node(port)), card);
235 			sca_out(DIR_EOME | DIR_BOFE, DIR_RX(phy_node(port)),
236 				card);
237 			/* DMA enable */
238 			sca_out(DSR_DE, DSR_RX(phy_node(port)), card);
239 		} else {	/* Transmit */
240 			/* Chain mode, Multi-frame */
241 			sca_out(0x14, DMR_TX(phy_node(port)), card);
242 			/* enable underflow interrupts */
243 			sca_out(DIR_BOFE, DIR_TX(phy_node(port)), card);
244 		}
245 	}
246 }
247 
248 
249 
250 /* MSCI interrupt service */
sca_msci_intr(port_t * port)251 static inline void sca_msci_intr(port_t *port)
252 {
253 	u16 msci = get_msci(port);
254 	card_t* card = port_to_card(port);
255 	u8 stat = sca_in(msci + ST1, card); /* read MSCI ST1 status */
256 
257 	/* Reset MSCI TX underrun status bit */
258 	sca_out(stat & ST1_UDRN, msci + ST1, card);
259 
260 	if (stat & ST1_UDRN) {
261 		port->hdlc.stats.tx_errors++; /* TX Underrun error detected */
262 		port->hdlc.stats.tx_fifo_errors++;
263 	}
264 }
265 
266 
267 
sca_rx(card_t * card,port_t * port,pkt_desc * desc,u16 rxin)268 static inline void sca_rx(card_t *card, port_t *port, pkt_desc *desc, u16 rxin)
269 {
270 	struct sk_buff *skb;
271 	u16 len;
272 	u32 buff;
273 #ifndef ALL_PAGES_ALWAYS_MAPPED
274 	u32 maxlen;
275 	u8 page;
276 #endif
277 
278 	len = readw(&desc->len);
279 	skb = dev_alloc_skb(len);
280 	if (!skb) {
281 		port->hdlc.stats.rx_dropped++;
282 		return;
283 	}
284 
285 	buff = buffer_offset(port, rxin, 0);
286 #ifndef ALL_PAGES_ALWAYS_MAPPED
287 	page = buff / winsize(card);
288 	buff = buff % winsize(card);
289 	maxlen = winsize(card) - buff;
290 
291 	openwin(card, page);
292 
293 	if (len > maxlen) {
294 		memcpy_fromio(skb->data, winbase(card) + buff, maxlen);
295 		openwin(card, page + 1);
296 		memcpy_fromio(skb->data + maxlen, winbase(card), len - maxlen);
297 	} else
298 #endif
299 	memcpy_fromio(skb->data, winbase(card) + buff, len);
300 
301 #if !defined(PAGE0_ALWAYS_MAPPED) && !defined(ALL_PAGES_ALWAYS_MAPPED)
302 	/* select pkt_desc table page back */
303 	openwin(card, 0);
304 #endif
305 	skb_put(skb, len);
306 #ifdef DEBUG_PKT
307 	printk(KERN_DEBUG "%s RX(%i):", hdlc_to_name(&port->hdlc), skb->len);
308 	debug_frame(skb);
309 #endif
310 	port->hdlc.stats.rx_packets++;
311 	port->hdlc.stats.rx_bytes += skb->len;
312 	skb->mac.raw = skb->data;
313 	skb->dev = hdlc_to_dev(&port->hdlc);
314 	skb->dev->last_rx = jiffies;
315 	skb->protocol = hdlc_type_trans(skb, hdlc_to_dev(&port->hdlc));
316 	netif_rx(skb);
317 }
318 
319 
320 
321 /* Receive DMA interrupt service */
sca_rx_intr(port_t * port)322 static inline void sca_rx_intr(port_t *port)
323 {
324 	u16 dmac = get_dmac_rx(port);
325 	card_t *card = port_to_card(port);
326 	u8 stat = sca_in(DSR_RX(phy_node(port)), card); /* read DMA Status */
327 	struct net_device_stats *stats = &port->hdlc.stats;
328 
329 	/* Reset DSR status bits */
330 	sca_out((stat & (DSR_EOT | DSR_EOM | DSR_BOF | DSR_COF)) | DSR_DWE,
331 		DSR_RX(phy_node(port)), card);
332 
333 	if (stat & DSR_BOF)
334 		stats->rx_over_errors++; /* Dropped one or more frames */
335 
336 	while (1) {
337 		u32 desc_off = desc_offset(port, port->rxin, 0);
338 		pkt_desc *desc;
339 		u32 cda = sca_ina(dmac + CDAL, card);
340 
341 		if ((cda >= desc_off) && (cda < desc_off + sizeof(pkt_desc)))
342 			break;	/* No frame received */
343 
344 		desc = desc_address(port, port->rxin, 0);
345 		stat = readb(&desc->stat);
346 		if (!(stat & ST_RX_EOM))
347 			port->rxpart = 1; /* partial frame received */
348 		else if ((stat & ST_ERROR_MASK) || port->rxpart) {
349 			stats->rx_errors++;
350 			if (stat & ST_RX_OVERRUN) stats->rx_fifo_errors++;
351 			else if ((stat & (ST_RX_SHORT | ST_RX_ABORT |
352 					  ST_RX_RESBIT)) || port->rxpart)
353 				stats->rx_frame_errors++;
354 			else if (stat & ST_RX_CRC) stats->rx_crc_errors++;
355 			if (stat & ST_RX_EOM)
356 				port->rxpart = 0; /* received last fragment */
357 		} else
358 			sca_rx(card, port, desc, port->rxin);
359 
360 		/* Set new error descriptor address */
361 		sca_outa(desc_off, dmac + EDAL, card);
362 		port->rxin = next_desc(port, port->rxin, 0);
363 	}
364 
365 	/* make sure RX DMA is enabled */
366 	sca_out(DSR_DE, DSR_RX(phy_node(port)), card);
367 }
368 
369 
370 
371 /* Transmit DMA interrupt service */
sca_tx_intr(port_t * port)372 static inline void sca_tx_intr(port_t *port)
373 {
374 	u16 dmac = get_dmac_tx(port);
375 	card_t* card = port_to_card(port);
376 	u8 stat;
377 
378 	spin_lock(&port->lock);
379 
380 	stat = sca_in(DSR_TX(phy_node(port)), card); /* read DMA Status */
381 
382 	/* Reset DSR status bits */
383 	sca_out((stat & (DSR_EOT | DSR_EOM | DSR_BOF | DSR_COF)) | DSR_DWE,
384 		DSR_TX(phy_node(port)), card);
385 
386 	while (1) {
387 		pkt_desc *desc;
388 
389 		u32 desc_off = desc_offset(port, port->txlast, 1);
390 		u32 cda = sca_ina(dmac + CDAL, card);
391 		if ((cda >= desc_off) && (cda < desc_off + sizeof(pkt_desc)))
392 			break;	/* Transmitter is/will_be sending this frame */
393 
394 		desc = desc_address(port, port->txlast, 1);
395 		port->hdlc.stats.tx_packets++;
396 		port->hdlc.stats.tx_bytes += readw(&desc->len);
397 		writeb(0, &desc->stat);	/* Free descriptor */
398 		port->txlast = next_desc(port, port->txlast, 1);
399 	}
400 
401 	netif_wake_queue(hdlc_to_dev(&port->hdlc));
402 	spin_unlock(&port->lock);
403 }
404 
405 
406 
sca_intr(int irq,void * dev_id,struct pt_regs * regs)407 static void sca_intr(int irq, void* dev_id, struct pt_regs *regs)
408 {
409 	card_t *card = dev_id;
410 	int i;
411 	u8 stat;
412 
413 #ifndef ALL_PAGES_ALWAYS_MAPPED
414 	u8 page = sca_get_page(card);
415 #endif
416 
417 	while((stat = sca_intr_status(card)) != 0) {
418 		for (i = 0; i < 2; i++) {
419 			port_t *port = get_port(card, i);
420 			if (port) {
421 				if (stat & SCA_INTR_MSCI(i))
422 					sca_msci_intr(port);
423 
424 				if (stat & SCA_INTR_DMAC_RX(i))
425 					sca_rx_intr(port);
426 
427 				if (stat & SCA_INTR_DMAC_TX(i))
428 					sca_tx_intr(port);
429 			}
430 		}
431 	}
432 
433 #ifndef ALL_PAGES_ALWAYS_MAPPED
434 	openwin(card, page);		/* Restore original page */
435 #endif
436 }
437 
438 
439 
sca_set_port(port_t * port)440 static void sca_set_port(port_t *port)
441 {
442 	card_t* card = port_to_card(port);
443 	u16 msci = get_msci(port);
444 	u8 md2 = sca_in(msci + MD2, card);
445 	unsigned int tmc, br = 10, brv = 1024;
446 
447 
448 	if (port->settings.clock_rate > 0) {
449 		/* Try lower br for better accuracy*/
450 		do {
451 			br--;
452 			brv >>= 1; /* brv = 2^9 = 512 max in specs */
453 
454 			/* Baud Rate = CLOCK_BASE / TMC / 2^BR */
455 			tmc = CLOCK_BASE / brv / port->settings.clock_rate;
456 		}while (br > 1 && tmc <= 128);
457 
458 		if (tmc < 1) {
459 			tmc = 1;
460 			br = 0;	/* For baud=CLOCK_BASE we use tmc=1 br=0 */
461 			brv = 1;
462 		} else if (tmc > 255)
463 			tmc = 256; /* tmc=0 means 256 - low baud rates */
464 
465 		port->settings.clock_rate = CLOCK_BASE / brv / tmc;
466 	} else {
467 		br = 9; /* Minimum clock rate */
468 		tmc = 256;	/* 8bit = 0 */
469 		port->settings.clock_rate = CLOCK_BASE / (256 * 512);
470 	}
471 
472 	port->rxs = (port->rxs & ~CLK_BRG_MASK) | br;
473 	port->txs = (port->txs & ~CLK_BRG_MASK) | br;
474 	port->tmc = tmc;
475 
476 	/* baud divisor - time constant*/
477 #ifdef __HD64570_H
478 	sca_out(port->tmc, msci + TMC, card);
479 #else
480 	sca_out(port->tmc, msci + TMCR, card);
481 	sca_out(port->tmc, msci + TMCT, card);
482 #endif
483 
484 	/* Set BRG bits */
485 	sca_out(port->rxs, msci + RXS, card);
486 	sca_out(port->txs, msci + TXS, card);
487 
488 	if (port->settings.loopback)
489 		md2 |= MD2_LOOPBACK;
490 	else
491 		md2 &= ~MD2_LOOPBACK;
492 
493 	sca_out(md2, msci + MD2, card);
494 
495 }
496 
497 
498 
sca_open(hdlc_device * hdlc)499 static void sca_open(hdlc_device *hdlc)
500 {
501 	port_t *port = hdlc_to_port(hdlc);
502 	card_t* card = port_to_card(port);
503 	u16 msci = get_msci(port);
504 	u8 md0, md2;
505 
506 	switch(port->encoding) {
507 	case ENCODING_NRZ:	md2 = MD2_NRZ;		break;
508 	case ENCODING_NRZI:	md2 = MD2_NRZI;		break;
509 	case ENCODING_FM_MARK:	md2 = MD2_FM_MARK;	break;
510 	case ENCODING_FM_SPACE:	md2 = MD2_FM_SPACE;	break;
511 	default:		md2 = MD2_MANCHESTER;
512 	}
513 
514 	if (port->settings.loopback)
515 		md2 |= MD2_LOOPBACK;
516 
517 	switch(port->parity) {
518 	case PARITY_CRC16_PR0:	     md0 = MD0_HDLC | MD0_CRC_16_0;  break;
519 	case PARITY_CRC16_PR1:	     md0 = MD0_HDLC | MD0_CRC_16;    break;
520 #ifdef __HD64570_H
521 	case PARITY_CRC16_PR0_CCITT: md0 = MD0_HDLC | MD0_CRC_ITU_0; break;
522 #else
523 	case PARITY_CRC32_PR1_CCITT: md0 = MD0_HDLC | MD0_CRC_ITU32; break;
524 #endif
525 	case PARITY_CRC16_PR1_CCITT: md0 = MD0_HDLC | MD0_CRC_ITU;   break;
526 	default:		     md0 = MD0_HDLC | MD0_CRC_NONE;
527 	}
528 
529 	sca_out(CMD_RESET, msci + CMD, card);
530 	sca_out(md0, msci + MD0, card);
531 	sca_out(0x00, msci + MD1, card); /* no address field check */
532 	sca_out(md2, msci + MD2, card);
533 	sca_out(0x7E, msci + IDL, card); /* flag character 0x7E */
534 #ifdef __HD64570_H
535 	sca_out(CTL_IDLE, msci + CTL, card);
536 #else
537 	/* Skip the rest of underrun frame */
538 	sca_out(CTL_IDLE | CTL_URCT | CTL_URSKP, msci + CTL, card);
539 #endif
540 
541 #ifdef __HD64570_H
542 	/* Allow at least 8 bytes before requesting RX DMA operation */
543 	/* TX with higher priority and possibly with shorter transfers */
544 	sca_out(0x07, msci + RRC, card); /* +1=RXRDY/DMA activation condition*/
545 	sca_out(0x10, msci + TRC0, card); /* = TXRDY/DMA activation condition*/
546 	sca_out(0x14, msci + TRC1, card); /* +1=TXRDY/DMA deactiv condition */
547 #else
548 	sca_out(0x0F, msci + RNR, card); /* +1=RX DMA activation condition */
549 	sca_out(0x3C, msci + TFS, card); /* +1 = TX start */
550 	sca_out(0x38, msci + TCR, card); /* =Critical TX DMA activ condition */
551 	sca_out(0x38, msci + TNR0, card); /* =TX DMA activation condition */
552 	sca_out(0x3F, msci + TNR1, card); /* +1=TX DMA deactivation condition*/
553 #endif
554 
555 /* We're using the following interrupts:
556    - TXINT (DMAC completed all transmisions, underrun or DCD change)
557    - all DMA interrupts
558 */
559 
560 #ifdef __HD64570_H
561 	/* MSCI TX INT IRQ enable */
562 	sca_out(IE0_TXINT, msci + IE0, card);
563 	sca_out(IE1_UDRN, msci + IE1, card); /* TX underrun -> TXINT */
564 	sca_out(sca_in(IER0, card) | (phy_node(port) ? 0x80 : 0x08),
565 		IER0, card);
566 	/* DMA IRQ enable */
567 	sca_out(sca_in(IER1, card) | (phy_node(port) ? 0xF0 : 0x0F),
568 		IER1, card);
569 #else
570 	/* MSCI TX INT IRQ enable */
571 	sca_outl(IE0_TXINT | IE0_UDRN, msci + IE0, card);
572 	/* DMA & MSCI IRQ enable */
573 	sca_outl(sca_inl(IER0, card) |
574 		 (phy_node(port) ? 0x02006600 : 0x00020066), IER0, card);
575 #endif
576 
577 #ifdef __HD64570_H
578 	sca_out(port->tmc, msci + TMC, card); /* Restore registers */
579 #else
580 	sca_out(port->tmc, msci + TMCR, card);
581 	sca_out(port->tmc, msci + TMCT, card);
582 #endif
583 	sca_out(port->rxs, msci + RXS, card);
584 	sca_out(port->txs, msci + TXS, card);
585 	sca_out(CMD_TX_ENABLE, msci + CMD, card);
586 	sca_out(CMD_RX_ENABLE, msci + CMD, card);
587 
588 	netif_start_queue(hdlc_to_dev(hdlc));
589 }
590 
591 
592 
sca_close(hdlc_device * hdlc)593 static void sca_close(hdlc_device *hdlc)
594 {
595 	port_t *port = hdlc_to_port(hdlc);
596 
597 	/* reset channel */
598 	netif_stop_queue(hdlc_to_dev(hdlc));
599 	sca_out(CMD_RESET, get_msci(port) + CMD, port_to_card(port));
600 }
601 
602 
603 
sca_attach(hdlc_device * hdlc,unsigned short encoding,unsigned short parity)604 static int sca_attach(hdlc_device *hdlc, unsigned short encoding,
605 		      unsigned short parity)
606 {
607 	if (encoding != ENCODING_NRZ &&
608 	    encoding != ENCODING_NRZI &&
609 	    encoding != ENCODING_FM_MARK &&
610 	    encoding != ENCODING_FM_SPACE &&
611 	    encoding != ENCODING_MANCHESTER)
612 		return -EINVAL;
613 
614 	if (parity != PARITY_NONE &&
615 	    parity != PARITY_CRC16_PR0 &&
616 	    parity != PARITY_CRC16_PR1 &&
617 #ifdef __HD64570_H
618 	    parity != PARITY_CRC16_PR0_CCITT &&
619 #else
620 	    parity != PARITY_CRC32_PR1_CCITT &&
621 #endif
622 	    parity != PARITY_CRC16_PR1_CCITT)
623 		return -EINVAL;
624 
625 	hdlc_to_port(hdlc)->encoding = encoding;
626 	hdlc_to_port(hdlc)->parity = parity;
627 	return 0;
628 }
629 
630 
631 
632 #ifdef DEBUG_RINGS
sca_dump_rings(hdlc_device * hdlc)633 static void sca_dump_rings(hdlc_device *hdlc)
634 {
635 	port_t *port = hdlc_to_port(hdlc);
636 	card_t *card = port_to_card(port);
637 	u16 cnt;
638 #if !defined(PAGE0_ALWAYS_MAPPED) && !defined(ALL_PAGES_ALWAYS_MAPPED)
639 	u8 page;
640 #endif
641 
642 #if !defined(PAGE0_ALWAYS_MAPPED) && !defined(ALL_PAGES_ALWAYS_MAPPED)
643 	page = sca_get_page(card);
644 	openwin(card, 0);
645 #endif
646 
647 	printk(KERN_DEBUG "RX ring: CDA=%u EDA=%u DSR=%02X in=%u %sactive",
648 	       sca_ina(get_dmac_rx(port) + CDAL, card),
649 	       sca_ina(get_dmac_rx(port) + EDAL, card),
650 	       sca_in(DSR_RX(phy_node(port)), card), port->rxin,
651 	       sca_in(DSR_RX(phy_node(port)), card) & DSR_DE?"":"in");
652 	for (cnt = 0; cnt < port_to_card(port)->rx_ring_buffers; cnt++)
653 		printk(" %02X", readb(&(desc_address(port, cnt, 0)->stat)));
654 
655 	printk("\n" KERN_DEBUG "TX ring: CDA=%u EDA=%u DSR=%02X in=%u "
656 	       "last=%u %sactive",
657 	       sca_ina(get_dmac_tx(port) + CDAL, card),
658 	       sca_ina(get_dmac_tx(port) + EDAL, card),
659 	       sca_in(DSR_TX(phy_node(port)), card), port->txin, port->txlast,
660 	       sca_in(DSR_TX(phy_node(port)), card) & DSR_DE ? "" : "in");
661 
662 	for (cnt = 0; cnt < port_to_card(port)->tx_ring_buffers; cnt++)
663 		printk(" %02X", readb(&(desc_address(port, cnt, 1)->stat)));
664 	printk("\n");
665 
666 	printk(KERN_DEBUG "MSCI: MD: %02x %02x %02x, "
667 	       "ST: %02x %02x %02x %02x"
668 #ifdef __HD64572_H
669 	       " %02x"
670 #endif
671 	       ", FST: %02x CST: %02x %02x\n",
672 	       sca_in(get_msci(port) + MD0, card),
673 	       sca_in(get_msci(port) + MD1, card),
674 	       sca_in(get_msci(port) + MD2, card),
675 	       sca_in(get_msci(port) + ST0, card),
676 	       sca_in(get_msci(port) + ST1, card),
677 	       sca_in(get_msci(port) + ST2, card),
678 	       sca_in(get_msci(port) + ST3, card),
679 #ifdef __HD64572_H
680 	       sca_in(get_msci(port) + ST4, card),
681 #endif
682 	       sca_in(get_msci(port) + FST, card),
683 	       sca_in(get_msci(port) + CST0, card),
684 	       sca_in(get_msci(port) + CST1, card));
685 
686 #ifdef __HD64572_H
687 	printk(KERN_DEBUG "ILAR: %02x\n", sca_in(ILAR, card));
688 #endif
689 
690 #if !defined(PAGE0_ALWAYS_MAPPED) && !defined(ALL_PAGES_ALWAYS_MAPPED)
691 	openwin(card, page); /* Restore original page */
692 #endif
693 }
694 #endif /* DEBUG_RINGS */
695 
696 
697 
sca_xmit(struct sk_buff * skb,struct net_device * dev)698 static int sca_xmit(struct sk_buff *skb, struct net_device *dev)
699 {
700 	hdlc_device *hdlc = dev_to_hdlc(dev);
701 	port_t *port = hdlc_to_port(hdlc);
702 	card_t *card = port_to_card(port);
703 	pkt_desc *desc;
704 	u32 buff, len;
705 #ifndef ALL_PAGES_ALWAYS_MAPPED
706 	u8 page;
707 	u32 maxlen;
708 #endif
709 
710 	spin_lock_irq(&port->lock);
711 
712 	desc = desc_address(port, port->txin + 1, 1);
713 	if (readb(&desc->stat)) { /* allow 1 packet gap */
714 		/* should never happen - previous xmit should stop queue */
715 #ifdef DEBUG_PKT
716 		printk(KERN_DEBUG "%s: transmitter buffer full\n", dev->name);
717 #endif
718 		netif_stop_queue(dev);
719 		spin_unlock_irq(&port->lock);
720 		return 1;	/* request packet to be queued */
721 	}
722 
723 #ifdef DEBUG_PKT
724 	printk(KERN_DEBUG "%s TX(%i):", hdlc_to_name(hdlc), skb->len);
725 	debug_frame(skb);
726 #endif
727 
728 	desc = desc_address(port, port->txin, 1);
729 	buff = buffer_offset(port, port->txin, 1);
730 	len = skb->len;
731 #ifndef ALL_PAGES_ALWAYS_MAPPED
732 	page = buff / winsize(card);
733 	buff = buff % winsize(card);
734 	maxlen = winsize(card) - buff;
735 
736 	openwin(card, page);
737 	if (len > maxlen) {
738 		memcpy_toio(winbase(card) + buff, skb->data, maxlen);
739 		openwin(card, page + 1);
740 		memcpy_toio(winbase(card), skb->data + maxlen, len - maxlen);
741 	}
742 	else
743 #endif
744 		memcpy_toio(winbase(card) + buff, skb->data, len);
745 
746 #if !defined(PAGE0_ALWAYS_MAPPED) && !defined(ALL_PAGES_ALWAYS_MAPPED)
747 	openwin(card, 0);	/* select pkt_desc table page back */
748 #endif
749 	writew(len, &desc->len);
750 	writeb(ST_TX_EOM, &desc->stat);
751 	dev->trans_start = jiffies;
752 
753 	port->txin = next_desc(port, port->txin, 1);
754 	sca_outa(desc_offset(port, port->txin, 1),
755 		 get_dmac_tx(port) + EDAL, card);
756 
757 	sca_out(DSR_DE, DSR_TX(phy_node(port)), card); /* Enable TX DMA */
758 
759 	desc = desc_address(port, port->txin + 1, 1);
760 	if (readb(&desc->stat)) /* allow 1 packet gap */
761 		netif_stop_queue(hdlc_to_dev(&port->hdlc));
762 
763 	spin_unlock_irq(&port->lock);
764 
765 	dev_kfree_skb(skb);
766 	return 0;
767 }
768 
769 
770 
771 #ifdef NEED_DETECT_RAM
sca_detect_ram(card_t * card,u8 * rambase,u32 ramsize)772 static u32 __devinit sca_detect_ram(card_t *card, u8 *rambase, u32 ramsize)
773 {
774 	/* Round RAM size to 32 bits, fill from end to start */
775 	u32 i = ramsize &= ~3;
776 
777 #ifndef ALL_PAGES_ALWAYS_MAPPED
778 	u32 size = winsize(card);
779 
780 	openwin(card, (i - 4) / size); /* select last window */
781 #endif
782 	do {
783 		i -= 4;
784 #ifndef ALL_PAGES_ALWAYS_MAPPED
785 		if ((i + 4) % size == 0)
786 			openwin(card, i / size);
787 		writel(i ^ 0x12345678, rambase + i % size);
788 #else
789 		writel(i ^ 0x12345678, rambase + i);
790 #endif
791 	}while (i > 0);
792 
793 	for (i = 0; i < ramsize ; i += 4) {
794 #ifndef ALL_PAGES_ALWAYS_MAPPED
795 		if (i % size == 0)
796 			openwin(card, i / size);
797 
798 		if (readl(rambase + i % size) != (i ^ 0x12345678))
799 			break;
800 #else
801 		if (readl(rambase + i) != (i ^ 0x12345678))
802 			break;
803 #endif
804 	}
805 
806 	return i;
807 }
808 #endif /* NEED_DETECT_RAM */
809 
810 
811 
sca_init(card_t * card,int wait_states)812 static void __devinit sca_init(card_t *card, int wait_states)
813 {
814 	sca_out(wait_states, WCRL, card); /* Wait Control */
815 	sca_out(wait_states, WCRM, card);
816 	sca_out(wait_states, WCRH, card);
817 
818 	sca_out(0, DMER, card);	/* DMA Master disable */
819 	sca_out(0x03, PCR, card); /* DMA priority */
820 	sca_out(0, IER1, card);	/* DMA interrupt disable */
821 	sca_out(0, DSR_RX(0), card); /* DMA disable - to halt state */
822 	sca_out(0, DSR_TX(0), card);
823 	sca_out(0, DSR_RX(1), card);
824 	sca_out(0, DSR_TX(1), card);
825 	sca_out(DMER_DME, DMER, card); /* DMA Master enable */
826 }
827