1 /*
2  * arch/ppc/kernel/ppc4xx_dma.c
3  *
4  * IBM PPC4xx DMA engine core library
5  *
6  * Copyright 2000-2003 MontaVista Software Inc.
7  *
8  * Cleaned by Matt Porter <mporter@mvista.com>
9  *
10  * Original code by Armin Kuster <akuster@mvista.com>
11  * and Pete Popov <ppopov@mvista.com>
12  *
13  * This program is free software; you can redistribute  it and/or modify it
14  * under  the terms of  the GNU General  Public License as published by the
15  * Free Software Foundation;  either version 2 of the  License, or (at your
16  * option) any later version.
17  *
18  * You should have received a copy of the  GNU General Public License along
19  * with this program; if not, write  to the Free Software Foundation, Inc.,
20  * 675 Mass Ave, Cambridge, MA 02139, USA.
21  */
22 
23 #include <linux/config.h>
24 #include <linux/kernel.h>
25 #include <linux/mm.h>
26 #include <linux/miscdevice.h>
27 #include <linux/init.h>
28 #include <linux/module.h>
29 
30 #include <asm/system.h>
31 #include <asm/io.h>
32 #include <asm/ppc4xx_dma.h>
33 
34 ppc_dma_ch_t dma_channels[MAX_PPC4xx_DMA_CHANNELS];
35 
36 int
ppc4xx_get_dma_status(void)37 ppc4xx_get_dma_status(void)
38 {
39 	return (mfdcr(DCRN_DMASR));
40 }
41 
42 void
ppc4xx_set_src_addr(int dmanr,phys_addr_t src_addr)43 ppc4xx_set_src_addr(int dmanr, phys_addr_t src_addr)
44 {
45 	switch (dmanr) {
46 		case 0:
47 #ifdef PPC4xx_DMA_64BIT
48 			mtdcr(DCRN_DMASAH0, (u32)(src_addr >> 32));
49 #endif
50 			mtdcr(DCRN_DMASA0, (u32)src_addr);
51 			break;
52 		case 1:
53 #ifdef PPC4xx_DMA_64BIT
54 			mtdcr(DCRN_DMASAH1, (u32)(src_addr >> 32));
55 #endif
56 			mtdcr(DCRN_DMASA1, (u32)src_addr);
57 			break;
58 		case 2:
59 #ifdef PPC4xx_DMA_64BIT
60 			mtdcr(DCRN_DMASAH2, (u32)(src_addr >> 32));
61 #endif
62 			mtdcr(DCRN_DMASA2, (u32)src_addr);
63 			break;
64 		case 3:
65 #ifdef PPC4xx_DMA_64BIT
66 			mtdcr(DCRN_DMASAH3, (u32)(src_addr >> 32));
67 #endif
68 			mtdcr(DCRN_DMASA3, (u32)src_addr);
69 			break;
70 		default:
71 			if (dmanr >= MAX_PPC4xx_DMA_CHANNELS)
72 				printk("set_src_addr: bad channel: %d\n", dmanr);
73 	}
74 }
75 
76 void
ppc4xx_set_dst_addr(int dmanr,phys_addr_t dst_addr)77 ppc4xx_set_dst_addr(int dmanr, phys_addr_t dst_addr)
78 {
79 	switch (dmanr) {
80 		case 0:
81 #ifdef PPC4xx_DMA_64BIT
82 			mtdcr(DCRN_DMADAH0, (u32)(dst_addr >> 32));
83 #endif
84 			mtdcr(DCRN_DMADA0, (u32)dst_addr);
85 			break;
86 		case 1:
87 #ifdef PPC4xx_DMA_64BIT
88 			mtdcr(DCRN_DMADAH1, (u32)(dst_addr >> 32));
89 #endif
90 			mtdcr(DCRN_DMADA1, (u32)dst_addr);
91 			break;
92 		case 2:
93 #ifdef PPC4xx_DMA_64BIT
94 			mtdcr(DCRN_DMADAH2, (u32)(dst_addr >> 32));
95 #endif
96 			mtdcr(DCRN_DMADA2, (u32)dst_addr);
97 			break;
98 		case 3:
99 #ifdef PPC4xx_DMA_64BIT
100 			mtdcr(DCRN_DMADAH3, (u32)(dst_addr >> 32));
101 #endif
102 			mtdcr(DCRN_DMADA3, (u32)dst_addr);
103 			break;
104 		default:
105 			if (dmanr >= MAX_PPC4xx_DMA_CHANNELS)
106 				printk("set_dst_addr: bad channel: %d\n", dmanr);
107 	}
108 }
109 
110 
111 void
ppc4xx_enable_dma(unsigned int dmanr)112 ppc4xx_enable_dma(unsigned int dmanr)
113 {
114 	unsigned int control;
115 	ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
116 	unsigned int status_bits[] = { DMA_CS0 | DMA_TS0 | DMA_CH0_ERR,
117 				       DMA_CS1 | DMA_TS1 | DMA_CH1_ERR,
118 				       DMA_CS2 | DMA_TS2 | DMA_CH2_ERR,
119 				       DMA_CS3 | DMA_TS3 | DMA_CH3_ERR};
120 
121 	if (p_dma_ch->in_use) {
122 		printk("enable_dma: channel %d in use\n", dmanr);
123 		return;
124 	}
125 
126 	if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
127 		printk("enable_dma: bad channel: %d\n", dmanr);
128 		return;
129 	}
130 
131 	if (p_dma_ch->mode == DMA_MODE_READ) {
132 		/* peripheral to memory */
133 		ppc4xx_set_src_addr(dmanr, 0);
134 		ppc4xx_set_dst_addr(dmanr, p_dma_ch->addr);
135 	} else if (p_dma_ch->mode == DMA_MODE_WRITE) {
136 		/* memory to peripheral */
137 		ppc4xx_set_src_addr(dmanr, p_dma_ch->addr);
138 		ppc4xx_set_dst_addr(dmanr, 0);
139 	}
140 
141 	/* for other xfer modes, the addresses are already set */
142 	switch (dmanr) {
143 		case 0:
144 			control = mfdcr(DCRN_DMACR0);
145 			break;
146 		case 1:
147 			control = mfdcr(DCRN_DMACR1);
148 			break;
149 		case 2:
150 			control = mfdcr(DCRN_DMACR2);
151 			break;
152 		case 3:
153 			control = mfdcr(DCRN_DMACR3);
154 			break;
155 		default:
156 			printk("enable_dma: bad channel: %d\n", dmanr);
157 	}
158 
159 	control &= ~(DMA_TM_MASK | DMA_TD);	/* clear all mode bits */
160 	if (p_dma_ch->mode == DMA_MODE_MM) {
161 		/* software initiated memory to memory */
162 		control |= DMA_ETD_OUTPUT | DMA_TCE_ENABLE;
163 	}
164 
165 	switch (dmanr) {
166 		case 0:
167 			mtdcr(DCRN_DMACR0, control);
168 			break;
169 		case 1:
170 			mtdcr(DCRN_DMACR1, control);
171 			break;
172 		case 2:
173 			mtdcr(DCRN_DMACR2, control);
174 			break;
175 		case 3:
176 			mtdcr(DCRN_DMACR3, control);
177 			break;
178 		default:
179 			printk("enable_dma: bad channel: %d\n", dmanr);
180 	}
181 
182 	/*
183 	 * Clear the CS, TS, RI bits for the channel from DMASR.  This
184 	 * has been observed to happen correctly only after the mode and
185 	 * ETD/DCE bits in DMACRx are set above.  Must do this before
186 	 * enabling the channel.
187 	 */
188 
189 	mtdcr(DCRN_DMASR, status_bits[dmanr]);
190 
191 	/*
192 	 * For device-paced transfers, Terminal Count Enable apparently
193 	 * must be on, and this must be turned on after the mode, etc.
194 	 * bits are cleared above (at least on Redwood-6).
195 	 */
196 
197 	if ((p_dma_ch->mode == DMA_MODE_MM_DEVATDST) ||
198 	    (p_dma_ch->mode == DMA_MODE_MM_DEVATSRC))
199 		control |= DMA_TCE_ENABLE;
200 
201 	/*
202 	 * Now enable the channel.
203 	 */
204 
205 	control |= (p_dma_ch->mode | DMA_CE_ENABLE);
206 
207 	switch (dmanr) {
208 		case 0:
209 			mtdcr(DCRN_DMACR0, control);
210 			break;
211 		case 1:
212 			mtdcr(DCRN_DMACR1, control);
213 			break;
214 		case 2:
215 			mtdcr(DCRN_DMACR2, control);
216 			break;
217 		case 3:
218 			mtdcr(DCRN_DMACR3, control);
219 			break;
220 		default:
221 			printk("enable_dma: bad channel: %d\n", dmanr);
222 	}
223 
224 	p_dma_ch->in_use = 1;
225 }
226 
227 void
ppc4xx_disable_dma(unsigned int dmanr)228 ppc4xx_disable_dma(unsigned int dmanr)
229 {
230 	unsigned int control;
231 	ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
232 
233 	if (!p_dma_ch->in_use) {
234 		printk("disable_dma: channel %d not in use\n", dmanr);
235 		return;
236 	}
237 
238 	if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
239 		printk("disable_dma: bad channel: %d\n", dmanr);
240 		return;
241 	}
242 
243 	switch (dmanr) {
244 	case 0:
245 		control = mfdcr(DCRN_DMACR0);
246 		control &= ~DMA_CE_ENABLE;
247 		mtdcr(DCRN_DMACR0, control);
248 		break;
249 	case 1:
250 		control = mfdcr(DCRN_DMACR1);
251 		control &= ~DMA_CE_ENABLE;
252 		mtdcr(DCRN_DMACR1, control);
253 		break;
254 	case 2:
255 		control = mfdcr(DCRN_DMACR2);
256 		control &= ~DMA_CE_ENABLE;
257 		mtdcr(DCRN_DMACR2, control);
258 		break;
259 	case 3:
260 		control = mfdcr(DCRN_DMACR3);
261 		control &= ~DMA_CE_ENABLE;
262 		mtdcr(DCRN_DMACR3, control);
263 		break;
264 	default:
265 		printk("disable_dma: bad channel: %d\n", dmanr);
266 	}
267 
268 	p_dma_ch->in_use = 0;
269 }
270 
271 /*
272  * Sets the dma mode for single DMA transfers only.
273  * For scatter/gather transfers, the mode is passed to the
274  * alloc_dma_handle() function as one of the parameters.
275  *
276  * The mode is simply saved and used later.  This allows
277  * the driver to call set_dma_mode() and set_dma_addr() in
278  * any order.
279  *
280  * Valid mode values are:
281  *
282  * DMA_MODE_READ          peripheral to memory
283  * DMA_MODE_WRITE         memory to peripheral
284  * DMA_MODE_MM            memory to memory
285  * DMA_MODE_MM_DEVATSRC   device-paced memory to memory, device at src
286  * DMA_MODE_MM_DEVATDST   device-paced memory to memory, device at dst
287  */
288 int
ppc4xx_set_dma_mode(unsigned int dmanr,unsigned int mode)289 ppc4xx_set_dma_mode(unsigned int dmanr, unsigned int mode)
290 {
291 	ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
292 
293 	if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
294 		printk("set_dma_mode: bad channel 0x%x\n", dmanr);
295 		return DMA_STATUS_BAD_CHANNEL;
296 	}
297 
298 	p_dma_ch->mode = mode;
299 
300 	return DMA_STATUS_GOOD;
301 }
302 
303 /*
304  * Sets the DMA Count register. Note that 'count' is in bytes.
305  * However, the DMA Count register counts the number of "transfers",
306  * where each transfer is equal to the bus width.  Thus, count
307  * MUST be a multiple of the bus width.
308  */
309 void
ppc4xx_set_dma_count(unsigned int dmanr,unsigned int count)310 ppc4xx_set_dma_count(unsigned int dmanr, unsigned int count)
311 {
312 	ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
313 
314 #ifdef DEBUG_4xxDMA
315 	{
316 		int error = 0;
317 		switch (p_dma_ch->pwidth) {
318 		case PW_8:
319 			break;
320 		case PW_16:
321 			if (count & 0x1)
322 				error = 1;
323 			break;
324 		case PW_32:
325 			if (count & 0x3)
326 				error = 1;
327 			break;
328 		case PW_64:
329 			if (count & 0x7)
330 				error = 1;
331 			break;
332 		default:
333 			printk("set_dma_count: invalid bus width: 0x%x\n",
334 			       p_dma_ch->pwidth);
335 			return;
336 		}
337 		if (error)
338 			printk
339 			    ("Warning: set_dma_count count 0x%x bus width %d\n",
340 			     count, p_dma_ch->pwidth);
341 	}
342 #endif
343 
344 	count = count >> p_dma_ch->shift;
345 	switch (dmanr) {
346 	case 0:
347 		mtdcr(DCRN_DMACT0, count);
348 		break;
349 	case 1:
350 		mtdcr(DCRN_DMACT1, count);
351 		break;
352 	case 2:
353 		mtdcr(DCRN_DMACT2, count);
354 		break;
355 	case 3:
356 		mtdcr(DCRN_DMACT3, count);
357 		break;
358 	default:
359 		printk("ppc4xx_set_dma_count: bad channel: %d\n", dmanr);
360 	}
361 }
362 
363 /*
364  *   Returns the number of bytes left to be transfered.
365  *   After a DMA transfer, this should return zero.
366  *   Reading this while a DMA transfer is still in progress will return
367  *   unpredictable results.
368  */
369 int
ppc4xx_get_dma_residue(unsigned int dmanr)370 ppc4xx_get_dma_residue(unsigned int dmanr)
371 {
372 	unsigned int count;
373 	ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
374 
375 	switch (dmanr) {
376 	case 0:
377 		count = mfdcr(DCRN_DMACT0);
378 		break;
379 	case 1:
380 		count = mfdcr(DCRN_DMACT1);
381 		break;
382 	case 2:
383 		count = mfdcr(DCRN_DMACT2);
384 		break;
385 	case 3:
386 		count = mfdcr(DCRN_DMACT3);
387 		break;
388 	default:
389 		printk("ppc4xx_get_dma_residue: bad channel: %d\n", dmanr);
390 		return 0;
391 	}
392 
393 	return (count << p_dma_ch->shift);
394 }
395 
396 /*
397  * Sets the DMA address for a memory to peripheral or peripheral
398  * to memory transfer.  The address is just saved in the channel
399  * structure for now and used later in enable_dma().
400  */
401 void
ppc4xx_set_dma_addr(unsigned int dmanr,phys_addr_t addr)402 ppc4xx_set_dma_addr(unsigned int dmanr, phys_addr_t addr)
403 {
404 	ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
405 
406 	if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
407 		printk("ppc4xx_set_dma_addr: bad channel: %d\n", dmanr);
408 		return;
409 	}
410 
411 #ifdef DEBUG_4xxDMA
412 	{
413 		int error = 0;
414 		switch (p_dma_ch->pwidth) {
415 		case PW_8:
416 			break;
417 		case PW_16:
418 			if ((unsigned) addr & 0x1)
419 				error = 1;
420 			break;
421 		case PW_32:
422 			if ((unsigned) addr & 0x3)
423 				error = 1;
424 			break;
425 		case PW_64:
426 			if ((unsigned) addr & 0x7)
427 				error = 1;
428 			break;
429 		default:
430 			printk("ppc4xx_set_dma_addr: invalid bus width: 0x%x\n",
431 			       p_dma_ch->pwidth);
432 			return;
433 		}
434 		if (error)
435 			printk("Warning: ppc4xx_set_dma_addr addr 0x%x bus width %d\n",
436 			       addr, p_dma_ch->pwidth);
437 	}
438 #endif
439 
440 	/* save dma address and program it later after we know the xfer mode */
441 	p_dma_ch->addr = addr;
442 }
443 
444 /*
445  * Sets both DMA addresses for a memory to memory transfer.
446  * For memory to peripheral or peripheral to memory transfers
447  * the function set_dma_addr() should be used instead.
448  */
449 void
ppc4xx_set_dma_addr2(unsigned int dmanr,phys_addr_t src_dma_addr,phys_addr_t dst_dma_addr)450 ppc4xx_set_dma_addr2(unsigned int dmanr, phys_addr_t src_dma_addr,
451 		     phys_addr_t dst_dma_addr)
452 {
453 #ifdef DEBUG_4xxDMA
454 	{
455 		ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
456 		int error = 0;
457 		switch (p_dma_ch->pwidth) {
458 			case PW_8:
459 				break;
460 			case PW_16:
461 				if (((unsigned) src_dma_addr & 0x1) ||
462 						((unsigned) dst_dma_addr & 0x1)
463 				   )
464 					error = 1;
465 				break;
466 			case PW_32:
467 				if (((unsigned) src_dma_addr & 0x3) ||
468 						((unsigned) dst_dma_addr & 0x3)
469 				   )
470 					error = 1;
471 				break;
472 			case PW_64:
473 				if (((unsigned) src_dma_addr & 0x7) ||
474 						((unsigned) dst_dma_addr & 0x7)
475 				   )
476 					error = 1;
477 				break;
478 			default:
479 				printk("ppc4xx_set_dma_addr2: invalid bus width: 0x%x\n",
480 						p_dma_ch->pwidth);
481 				return;
482 		}
483 		if (error)
484 			printk
485 				("Warning: ppc4xx_set_dma_addr2 src 0x%x dst 0x%x bus width %d\n",
486 				 src_dma_addr, dst_dma_addr, p_dma_ch->pwidth);
487 	}
488 #endif
489 
490 	if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
491 		printk("ppc4xx_set_dma_addr2: bad channel: %d\n", dmanr);
492 	}
493 	else {
494 		ppc4xx_set_src_addr(dmanr, src_dma_addr);
495 		ppc4xx_set_dst_addr(dmanr, dst_dma_addr);
496 	}
497 }
498 
499 /*
500  * Enables the channel interrupt.
501  *
502  * If performing a scatter/gatter transfer, this function
503  * MUST be called before calling alloc_dma_handle() and building
504  * the sgl list.  Otherwise, interrupts will not be enabled, if
505  * they were previously disabled.
506  */
507 int
ppc4xx_enable_dma_interrupt(unsigned int dmanr)508 ppc4xx_enable_dma_interrupt(unsigned int dmanr)
509 {
510 	unsigned int control;
511 	ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
512 
513 	p_dma_ch->int_enable = 1;
514 	switch (dmanr) {
515 	case 0:
516 		control = mfdcr(DCRN_DMACR0);
517 		control |= DMA_CIE_ENABLE;	/* Channel Interrupt Enable */
518 		mtdcr(DCRN_DMACR0, control);
519 		break;
520 	case 1:
521 		control = mfdcr(DCRN_DMACR1);
522 		control |= DMA_CIE_ENABLE;
523 		mtdcr(DCRN_DMACR1, control);
524 		break;
525 	case 2:
526 		control = mfdcr(DCRN_DMACR2);
527 		control |= DMA_CIE_ENABLE;
528 		mtdcr(DCRN_DMACR2, control);
529 		break;
530 	case 3:
531 		control = mfdcr(DCRN_DMACR3);
532 		control |= DMA_CIE_ENABLE;
533 		mtdcr(DCRN_DMACR3, control);
534 		break;
535 	default:
536 		printk("ppc4xx_enable_dma_interrupt: bad channel: %d\n", dmanr);
537 		return DMA_STATUS_BAD_CHANNEL;
538 	}
539 	return DMA_STATUS_GOOD;
540 }
541 
542 /*
543  * Disables the channel interrupt.
544  *
545  * If performing a scatter/gatter transfer, this function
546  * MUST be called before calling alloc_dma_handle() and building
547  * the sgl list.  Otherwise, interrupts will not be disabled, if
548  * they were previously enabled.
549  */
550 int
ppc4xx_disable_dma_interrupt(unsigned int dmanr)551 ppc4xx_disable_dma_interrupt(unsigned int dmanr)
552 {
553 	unsigned int control;
554 	ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
555 
556 	p_dma_ch->int_enable = 0;
557 	switch (dmanr) {
558 	case 0:
559 		control = mfdcr(DCRN_DMACR0);
560 		control &= ~DMA_CIE_ENABLE;	/* Channel Interrupt Enable */
561 		mtdcr(DCRN_DMACR0, control);
562 		break;
563 	case 1:
564 		control = mfdcr(DCRN_DMACR1);
565 		control &= ~DMA_CIE_ENABLE;
566 		mtdcr(DCRN_DMACR1, control);
567 		break;
568 	case 2:
569 		control = mfdcr(DCRN_DMACR2);
570 		control &= ~DMA_CIE_ENABLE;
571 		mtdcr(DCRN_DMACR2, control);
572 		break;
573 	case 3:
574 		control = mfdcr(DCRN_DMACR3);
575 		control &= ~DMA_CIE_ENABLE;
576 		mtdcr(DCRN_DMACR3, control);
577 		break;
578 	default:
579 		printk("ppc4xx_disable_dma_interrupt: bad channel: %d\n", dmanr);
580 		return DMA_STATUS_BAD_CHANNEL;
581 	}
582 	return DMA_STATUS_GOOD;
583 }
584 
585 /*
586  * Configures a DMA channel, including the peripheral bus width, if a
587  * peripheral is attached to the channel, the polarity of the DMAReq and
588  * DMAAck signals, etc.  This information should really be setup by the boot
589  * code, since most likely the configuration won't change dynamically.
590  * If the kernel has to call this function, it's recommended that it's
591  * called from platform specific init code.  The driver should not need to
592  * call this function.
593  */
594 int
ppc4xx_init_dma_channel(unsigned int dmanr,ppc_dma_ch_t * p_init)595 ppc4xx_init_dma_channel(unsigned int dmanr, ppc_dma_ch_t * p_init)
596 {
597 	unsigned int polarity;
598 	uint32_t control = 0;
599 	ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
600 
601 	DMA_MODE_READ = (unsigned long) DMA_TD;	/* Peripheral to Memory */
602 	DMA_MODE_WRITE = 0;	/* Memory to Peripheral */
603 
604 	if (!p_init) {
605 		printk("ppc4xx_init_dma_channel: NULL p_init\n");
606 		return DMA_STATUS_NULL_POINTER;
607 	}
608 
609 	if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
610 		printk("ppc4xx_init_dma_channel: bad channel %d\n", dmanr);
611 		return DMA_STATUS_BAD_CHANNEL;
612 	}
613 
614 #if DCRN_POL > 0
615 	polarity = mfdcr(DCRN_POL);
616 #else
617 	polarity = 0;
618 #endif
619 
620 	/* Setup the control register based on the values passed to
621 	 * us in p_init.  Then, over-write the control register with this
622 	 * new value.
623 	 */
624 	control |= SET_DMA_CONTROL;
625 
626 	switch (dmanr) {
627 	case 0:
628 		/* clear all polarity signals and then "or" in new signal levels */
629 		polarity &= ~GET_DMA_POLARITY(0);
630 		polarity |= p_dma_ch->polarity;
631 #if DCRN_POL > 0
632 		mtdcr(DCRN_POL, polarity);
633 #endif
634 		mtdcr(DCRN_DMACR0, control);
635 		break;
636 	case 1:
637 		polarity &= ~GET_DMA_POLARITY(1);
638 		polarity |= p_dma_ch->polarity;
639 #if DCRN_POL > 0
640 		mtdcr(DCRN_POL, polarity);
641 #endif
642 		mtdcr(DCRN_DMACR1, control);
643 		break;
644 	case 2:
645 		polarity &= ~GET_DMA_POLARITY(2);
646 		polarity |= p_dma_ch->polarity;
647 #if DCRN_POL > 0
648 		mtdcr(DCRN_POL, polarity);
649 #endif
650 		mtdcr(DCRN_DMACR2, control);
651 		break;
652 	case 3:
653 		polarity &= ~GET_DMA_POLARITY(3);
654 		polarity |= p_dma_ch->polarity;
655 #if DCRN_POL > 0
656 		mtdcr(DCRN_POL, polarity);
657 #endif
658 		mtdcr(DCRN_DMACR3, control);
659 		break;
660 	default:
661 		return DMA_STATUS_BAD_CHANNEL;
662 	}
663 
664 	/* save these values in our dma channel structure */
665 	memcpy(p_dma_ch, p_init, sizeof (ppc_dma_ch_t));
666 
667 	/*
668 	 * The peripheral width values written in the control register are:
669 	 *   PW_8                 0
670 	 *   PW_16                1
671 	 *   PW_32                2
672 	 *   PW_64                3
673 	 *
674 	 *   Since the DMA count register takes the number of "transfers",
675 	 *   we need to divide the count sent to us in certain
676 	 *   functions by the appropriate number.  It so happens that our
677 	 *   right shift value is equal to the peripheral width value.
678 	 */
679 	p_dma_ch->shift = p_init->pwidth;
680 
681 	/*
682 	 * Save the control word for easy access.
683 	 */
684 	p_dma_ch->control = control;
685 
686 	mtdcr(DCRN_DMASR, 0xffffffff);	/* clear status register */
687 	return DMA_STATUS_GOOD;
688 }
689 
690 /*
691  * This function returns the channel configuration.
692  */
693 int
ppc4xx_get_channel_config(unsigned int dmanr,ppc_dma_ch_t * p_dma_ch)694 ppc4xx_get_channel_config(unsigned int dmanr, ppc_dma_ch_t * p_dma_ch)
695 {
696 	unsigned int polarity;
697 	unsigned int control;
698 
699 #if DCRN_POL > 0
700 	polarity = mfdcr(DCRN_POL);
701 #else
702 	polarity = 0;
703 #endif
704 
705 	switch (dmanr) {
706 	case 0:
707 		p_dma_ch->polarity = polarity & GET_DMA_POLARITY(0);
708 		control = mfdcr(DCRN_DMACR0);
709 		break;
710 	case 1:
711 		p_dma_ch->polarity = polarity & GET_DMA_POLARITY(1);
712 		control = mfdcr(DCRN_DMACR1);
713 		break;
714 	case 2:
715 		p_dma_ch->polarity = polarity & GET_DMA_POLARITY(2);
716 		control = mfdcr(DCRN_DMACR2);
717 		break;
718 	case 3:
719 		p_dma_ch->polarity = polarity & GET_DMA_POLARITY(3);
720 		control = mfdcr(DCRN_DMACR3);
721 		break;
722 	default:
723 		return DMA_STATUS_BAD_CHANNEL;
724 	}
725 
726 	p_dma_ch->cp = GET_DMA_PRIORITY(control);
727 	p_dma_ch->pwidth = GET_DMA_PW(control);
728 	p_dma_ch->psc = GET_DMA_PSC(control);
729 	p_dma_ch->pwc = GET_DMA_PWC(control);
730 	p_dma_ch->phc = GET_DMA_PHC(control);
731 	p_dma_ch->ce = GET_DMA_CE_ENABLE(control);
732 	p_dma_ch->int_enable = GET_DMA_CIE_ENABLE(control);
733 	p_dma_ch->shift = GET_DMA_PW(control);
734 
735 #ifdef CONFIG_PPC4xx_EDMA
736 	p_dma_ch->pf = GET_DMA_PREFETCH(control);
737 #else
738 	p_dma_ch->ch_enable = GET_DMA_CH(control);
739 	p_dma_ch->ece_enable = GET_DMA_ECE(control);
740 	p_dma_ch->tcd_disable = GET_DMA_TCD(control);
741 #endif
742 	return DMA_STATUS_GOOD;
743 }
744 
745 /*
746  * Sets the priority for the DMA channel dmanr.
747  * Since this is setup by the hardware init function, this function
748  * can be used to dynamically change the priority of a channel.
749  *
750  * Acceptable priorities:
751  *
752  * PRIORITY_LOW
753  * PRIORITY_MID_LOW
754  * PRIORITY_MID_HIGH
755  * PRIORITY_HIGH
756  *
757  */
758 int
ppc4xx_set_channel_priority(unsigned int dmanr,unsigned int priority)759 ppc4xx_set_channel_priority(unsigned int dmanr, unsigned int priority)
760 {
761 	unsigned int control;
762 
763 	if ((priority != PRIORITY_LOW) &&
764 	    (priority != PRIORITY_MID_LOW) &&
765 	    (priority != PRIORITY_MID_HIGH) && (priority != PRIORITY_HIGH)) {
766 		printk("ppc4xx_set_channel_priority: bad priority: 0x%x\n", priority);
767 	}
768 
769 	switch (dmanr) {
770 	case 0:
771 		control = mfdcr(DCRN_DMACR0);
772 		control |= SET_DMA_PRIORITY(priority);
773 		mtdcr(DCRN_DMACR0, control);
774 		break;
775 	case 1:
776 		control = mfdcr(DCRN_DMACR1);
777 		control |= SET_DMA_PRIORITY(priority);
778 		mtdcr(DCRN_DMACR1, control);
779 		break;
780 	case 2:
781 		control = mfdcr(DCRN_DMACR2);
782 		control |= SET_DMA_PRIORITY(priority);
783 		mtdcr(DCRN_DMACR2, control);
784 		break;
785 	case 3:
786 		control = mfdcr(DCRN_DMACR3);
787 		control |= SET_DMA_PRIORITY(priority);
788 		mtdcr(DCRN_DMACR3, control);
789 		break;
790 	default:
791 		printk("ppc4xx_set_channel_priority: bad channel: %d\n", dmanr);
792 		return DMA_STATUS_BAD_CHANNEL;
793 	}
794 	return DMA_STATUS_GOOD;
795 }
796 
797 /*
798  * Returns the width of the peripheral attached to this channel. This assumes
799  * that someone who knows the hardware configuration, boot code or some other
800  * init code, already set the width.
801  *
802  * The return value is one of:
803  *   PW_8
804  *   PW_16
805  *   PW_32
806  *   PW_64
807  *
808  *   The function returns 0 on error.
809  */
810 unsigned int
ppc4xx_get_peripheral_width(unsigned int dmanr)811 ppc4xx_get_peripheral_width(unsigned int dmanr)
812 {
813 	unsigned int control;
814 
815 	switch (dmanr) {
816 	case 0:
817 		control = mfdcr(DCRN_DMACR0);
818 		break;
819 	case 1:
820 		control = mfdcr(DCRN_DMACR1);
821 		break;
822 	case 2:
823 		control = mfdcr(DCRN_DMACR2);
824 		break;
825 	case 3:
826 		control = mfdcr(DCRN_DMACR3);
827 		break;
828 	default:
829 		printk("ppc4xx_get_peripheral_width: bad channel: %d\n", dmanr);
830 		return 0;
831 	}
832 	return (GET_DMA_PW(control));
833 }
834 
835 
836 EXPORT_SYMBOL(ppc4xx_init_dma_channel);
837 EXPORT_SYMBOL(ppc4xx_get_channel_config);
838 EXPORT_SYMBOL(ppc4xx_set_channel_priority);
839 EXPORT_SYMBOL(ppc4xx_get_peripheral_width);
840 EXPORT_SYMBOL(dma_channels);
841 EXPORT_SYMBOL(ppc4xx_set_src_addr);
842 EXPORT_SYMBOL(ppc4xx_set_dst_addr);
843 EXPORT_SYMBOL(ppc4xx_set_dma_addr);
844 EXPORT_SYMBOL(ppc4xx_set_dma_addr2);
845 EXPORT_SYMBOL(ppc4xx_enable_dma);
846 EXPORT_SYMBOL(ppc4xx_disable_dma);
847 EXPORT_SYMBOL(ppc4xx_set_dma_mode);
848 EXPORT_SYMBOL(ppc4xx_set_dma_count);
849 EXPORT_SYMBOL(ppc4xx_get_dma_residue);
850 EXPORT_SYMBOL(ppc4xx_enable_dma_interrupt);
851 EXPORT_SYMBOL(ppc4xx_disable_dma_interrupt);
852 EXPORT_SYMBOL(ppc4xx_get_dma_status);
853