PageRenderTime 64ms CodeModel.GetById 7ms app.highlight 52ms RepoModel.GetById 1ms app.codeStats 0ms

/arch/ppc/syslib/ppc4xx_dma.c

https://bitbucket.org/evzijst/gittest
C | 708 lines | 436 code | 87 blank | 185 comment | 52 complexity | 936aac16c53c54ddcbe6a0e5a61e78a0 MD5 | raw file
  1/*
  2 * arch/ppc/kernel/ppc4xx_dma.c
  3 *
  4 * IBM PPC4xx DMA engine core library
  5 *
  6 * Copyright 2000-2004 MontaVista Software Inc.
  7 *
  8 * Cleaned up and converted to new DCR access
  9 * Matt Porter <mporter@kernel.crashing.org>
 10 *
 11 * Original code by Armin Kuster <akuster@mvista.com>
 12 * and Pete Popov <ppopov@mvista.com>
 13 *
 14 * This program is free software; you can redistribute  it and/or modify it
 15 * under  the terms of  the GNU General  Public License as published by the
 16 * Free Software Foundation;  either version 2 of the  License, or (at your
 17 * option) any later version.
 18 *
 19 * You should have received a copy of the  GNU General Public License along
 20 * with this program; if not, write  to the Free Software Foundation, Inc.,
 21 * 675 Mass Ave, Cambridge, MA 02139, USA.
 22 */
 23
 24#include <linux/config.h>
 25#include <linux/kernel.h>
 26#include <linux/mm.h>
 27#include <linux/miscdevice.h>
 28#include <linux/init.h>
 29#include <linux/module.h>
 30
 31#include <asm/system.h>
 32#include <asm/io.h>
 33#include <asm/ppc4xx_dma.h>
 34
 35ppc_dma_ch_t dma_channels[MAX_PPC4xx_DMA_CHANNELS];
 36
 37int
 38ppc4xx_get_dma_status(void)
 39{
 40	return (mfdcr(DCRN_DMASR));
 41}
 42
 43void
 44ppc4xx_set_src_addr(int dmanr, phys_addr_t src_addr)
 45{
 46	if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
 47		printk("set_src_addr: bad channel: %d\n", dmanr);
 48		return;
 49	}
 50
 51#ifdef PPC4xx_DMA_64BIT
 52	mtdcr(DCRN_DMASAH0 + dmanr*2, (u32)(src_addr >> 32));
 53#else
 54	mtdcr(DCRN_DMASA0 + dmanr*2, (u32)src_addr);
 55#endif
 56}
 57
 58void
 59ppc4xx_set_dst_addr(int dmanr, phys_addr_t dst_addr)
 60{
 61	if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
 62		printk("set_dst_addr: bad channel: %d\n", dmanr);
 63		return;
 64	}
 65
 66#ifdef PPC4xx_DMA_64BIT
 67	mtdcr(DCRN_DMADAH0 + dmanr*2, (u32)(dst_addr >> 32));
 68#else
 69	mtdcr(DCRN_DMADA0 + dmanr*2, (u32)dst_addr);
 70#endif
 71}
 72
 73void
 74ppc4xx_enable_dma(unsigned int dmanr)
 75{
 76	unsigned int control;
 77	ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
 78	unsigned int status_bits[] = { DMA_CS0 | DMA_TS0 | DMA_CH0_ERR,
 79				       DMA_CS1 | DMA_TS1 | DMA_CH1_ERR,
 80				       DMA_CS2 | DMA_TS2 | DMA_CH2_ERR,
 81				       DMA_CS3 | DMA_TS3 | DMA_CH3_ERR};
 82
 83	if (p_dma_ch->in_use) {
 84		printk("enable_dma: channel %d in use\n", dmanr);
 85		return;
 86	}
 87
 88	if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
 89		printk("enable_dma: bad channel: %d\n", dmanr);
 90		return;
 91	}
 92
 93	if (p_dma_ch->mode == DMA_MODE_READ) {
 94		/* peripheral to memory */
 95		ppc4xx_set_src_addr(dmanr, 0);
 96		ppc4xx_set_dst_addr(dmanr, p_dma_ch->addr);
 97	} else if (p_dma_ch->mode == DMA_MODE_WRITE) {
 98		/* memory to peripheral */
 99		ppc4xx_set_src_addr(dmanr, p_dma_ch->addr);
100		ppc4xx_set_dst_addr(dmanr, 0);
101	}
102
103	/* for other xfer modes, the addresses are already set */
104	control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
105
106	control &= ~(DMA_TM_MASK | DMA_TD);	/* clear all mode bits */
107	if (p_dma_ch->mode == DMA_MODE_MM) {
108		/* software initiated memory to memory */
109		control |= DMA_ETD_OUTPUT | DMA_TCE_ENABLE;
110	}
111
112	mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
113
114	/*
115	 * Clear the CS, TS, RI bits for the channel from DMASR.  This
116	 * has been observed to happen correctly only after the mode and
117	 * ETD/DCE bits in DMACRx are set above.  Must do this before
118	 * enabling the channel.
119	 */
120
121	mtdcr(DCRN_DMASR, status_bits[dmanr]);
122
123	/*
124	 * For device-paced transfers, Terminal Count Enable apparently
125	 * must be on, and this must be turned on after the mode, etc.
126	 * bits are cleared above (at least on Redwood-6).
127	 */
128
129	if ((p_dma_ch->mode == DMA_MODE_MM_DEVATDST) ||
130	    (p_dma_ch->mode == DMA_MODE_MM_DEVATSRC))
131		control |= DMA_TCE_ENABLE;
132
133	/*
134	 * Now enable the channel.
135	 */
136
137	control |= (p_dma_ch->mode | DMA_CE_ENABLE);
138
139	mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
140
141	p_dma_ch->in_use = 1;
142}
143
144void
145ppc4xx_disable_dma(unsigned int dmanr)
146{
147	unsigned int control;
148	ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
149
150	if (!p_dma_ch->in_use) {
151		printk("disable_dma: channel %d not in use\n", dmanr);
152		return;
153	}
154
155	if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
156		printk("disable_dma: bad channel: %d\n", dmanr);
157		return;
158	}
159
160	control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
161	control &= ~DMA_CE_ENABLE;
162	mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
163
164	p_dma_ch->in_use = 0;
165}
166
167/*
168 * Sets the dma mode for single DMA transfers only.
169 * For scatter/gather transfers, the mode is passed to the
170 * alloc_dma_handle() function as one of the parameters.
171 *
172 * The mode is simply saved and used later.  This allows
173 * the driver to call set_dma_mode() and set_dma_addr() in
174 * any order.
175 *
176 * Valid mode values are:
177 *
178 * DMA_MODE_READ          peripheral to memory
179 * DMA_MODE_WRITE         memory to peripheral
180 * DMA_MODE_MM            memory to memory
181 * DMA_MODE_MM_DEVATSRC   device-paced memory to memory, device at src
182 * DMA_MODE_MM_DEVATDST   device-paced memory to memory, device at dst
183 */
184int
185ppc4xx_set_dma_mode(unsigned int dmanr, unsigned int mode)
186{
187	ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
188
189	if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
190		printk("set_dma_mode: bad channel 0x%x\n", dmanr);
191		return DMA_STATUS_BAD_CHANNEL;
192	}
193
194	p_dma_ch->mode = mode;
195
196	return DMA_STATUS_GOOD;
197}
198
199/*
200 * Sets the DMA Count register. Note that 'count' is in bytes.
201 * However, the DMA Count register counts the number of "transfers",
202 * where each transfer is equal to the bus width.  Thus, count
203 * MUST be a multiple of the bus width.
204 */
205void
206ppc4xx_set_dma_count(unsigned int dmanr, unsigned int count)
207{
208	ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
209
210#ifdef DEBUG_4xxDMA
211	{
212		int error = 0;
213		switch (p_dma_ch->pwidth) {
214		case PW_8:
215			break;
216		case PW_16:
217			if (count & 0x1)
218				error = 1;
219			break;
220		case PW_32:
221			if (count & 0x3)
222				error = 1;
223			break;
224		case PW_64:
225			if (count & 0x7)
226				error = 1;
227			break;
228		default:
229			printk("set_dma_count: invalid bus width: 0x%x\n",
230			       p_dma_ch->pwidth);
231			return;
232		}
233		if (error)
234			printk
235			    ("Warning: set_dma_count count 0x%x bus width %d\n",
236			     count, p_dma_ch->pwidth);
237	}
238#endif
239
240	count = count >> p_dma_ch->shift;
241
242	mtdcr(DCRN_DMACT0 + (dmanr * 0x8), count);
243}
244
245/*
246 *   Returns the number of bytes left to be transfered.
247 *   After a DMA transfer, this should return zero.
248 *   Reading this while a DMA transfer is still in progress will return
249 *   unpredictable results.
250 */
251int
252ppc4xx_get_dma_residue(unsigned int dmanr)
253{
254	unsigned int count;
255	ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
256
257	if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
258		printk("ppc4xx_get_dma_residue: bad channel 0x%x\n", dmanr);
259		return DMA_STATUS_BAD_CHANNEL;
260	}
261
262	count = mfdcr(DCRN_DMACT0 + (dmanr * 0x8));
263
264	return (count << p_dma_ch->shift);
265}
266
267/*
268 * Sets the DMA address for a memory to peripheral or peripheral
269 * to memory transfer.  The address is just saved in the channel
270 * structure for now and used later in enable_dma().
271 */
272void
273ppc4xx_set_dma_addr(unsigned int dmanr, phys_addr_t addr)
274{
275	ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
276
277	if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
278		printk("ppc4xx_set_dma_addr: bad channel: %d\n", dmanr);
279		return;
280	}
281
282#ifdef DEBUG_4xxDMA
283	{
284		int error = 0;
285		switch (p_dma_ch->pwidth) {
286		case PW_8:
287			break;
288		case PW_16:
289			if ((unsigned) addr & 0x1)
290				error = 1;
291			break;
292		case PW_32:
293			if ((unsigned) addr & 0x3)
294				error = 1;
295			break;
296		case PW_64:
297			if ((unsigned) addr & 0x7)
298				error = 1;
299			break;
300		default:
301			printk("ppc4xx_set_dma_addr: invalid bus width: 0x%x\n",
302			       p_dma_ch->pwidth);
303			return;
304		}
305		if (error)
306			printk("Warning: ppc4xx_set_dma_addr addr 0x%x bus width %d\n",
307			       addr, p_dma_ch->pwidth);
308	}
309#endif
310
311	/* save dma address and program it later after we know the xfer mode */
312	p_dma_ch->addr = addr;
313}
314
315/*
316 * Sets both DMA addresses for a memory to memory transfer.
317 * For memory to peripheral or peripheral to memory transfers
318 * the function set_dma_addr() should be used instead.
319 */
320void
321ppc4xx_set_dma_addr2(unsigned int dmanr, phys_addr_t src_dma_addr,
322		     phys_addr_t dst_dma_addr)
323{
324	if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
325		printk("ppc4xx_set_dma_addr2: bad channel: %d\n", dmanr);
326		return;
327	}
328
329#ifdef DEBUG_4xxDMA
330	{
331		ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
332		int error = 0;
333		switch (p_dma_ch->pwidth) {
334			case PW_8:
335				break;
336			case PW_16:
337				if (((unsigned) src_dma_addr & 0x1) ||
338						((unsigned) dst_dma_addr & 0x1)
339				   )
340					error = 1;
341				break;
342			case PW_32:
343				if (((unsigned) src_dma_addr & 0x3) ||
344						((unsigned) dst_dma_addr & 0x3)
345				   )
346					error = 1;
347				break;
348			case PW_64:
349				if (((unsigned) src_dma_addr & 0x7) ||
350						((unsigned) dst_dma_addr & 0x7)
351				   )
352					error = 1;
353				break;
354			default:
355				printk("ppc4xx_set_dma_addr2: invalid bus width: 0x%x\n",
356						p_dma_ch->pwidth);
357				return;
358		}
359		if (error)
360			printk
361				("Warning: ppc4xx_set_dma_addr2 src 0x%x dst 0x%x bus width %d\n",
362				 src_dma_addr, dst_dma_addr, p_dma_ch->pwidth);
363	}
364#endif
365
366	ppc4xx_set_src_addr(dmanr, src_dma_addr);
367	ppc4xx_set_dst_addr(dmanr, dst_dma_addr);
368}
369
370/*
371 * Enables the channel interrupt.
372 *
373 * If performing a scatter/gatter transfer, this function
374 * MUST be called before calling alloc_dma_handle() and building
375 * the sgl list.  Otherwise, interrupts will not be enabled, if
376 * they were previously disabled.
377 */
378int
379ppc4xx_enable_dma_interrupt(unsigned int dmanr)
380{
381	unsigned int control;
382	ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
383
384	if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
385		printk("ppc4xx_enable_dma_interrupt: bad channel: %d\n", dmanr);
386		return DMA_STATUS_BAD_CHANNEL;
387	}
388
389	p_dma_ch->int_enable = 1;
390
391	control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
392	control |= DMA_CIE_ENABLE;	/* Channel Interrupt Enable */
393	mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
394
395	return DMA_STATUS_GOOD;
396}
397
398/*
399 * Disables the channel interrupt.
400 *
401 * If performing a scatter/gatter transfer, this function
402 * MUST be called before calling alloc_dma_handle() and building
403 * the sgl list.  Otherwise, interrupts will not be disabled, if
404 * they were previously enabled.
405 */
406int
407ppc4xx_disable_dma_interrupt(unsigned int dmanr)
408{
409	unsigned int control;
410	ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
411
412	if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
413		printk("ppc4xx_disable_dma_interrupt: bad channel: %d\n", dmanr);
414		return DMA_STATUS_BAD_CHANNEL;
415	}
416
417	p_dma_ch->int_enable = 0;
418
419	control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
420	control &= ~DMA_CIE_ENABLE;	/* Channel Interrupt Enable */
421	mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
422
423	return DMA_STATUS_GOOD;
424}
425
426/*
427 * Configures a DMA channel, including the peripheral bus width, if a
428 * peripheral is attached to the channel, the polarity of the DMAReq and
429 * DMAAck signals, etc.  This information should really be setup by the boot
430 * code, since most likely the configuration won't change dynamically.
431 * If the kernel has to call this function, it's recommended that it's
432 * called from platform specific init code.  The driver should not need to
433 * call this function.
434 */
435int
436ppc4xx_init_dma_channel(unsigned int dmanr, ppc_dma_ch_t * p_init)
437{
438	unsigned int polarity;
439	uint32_t control = 0;
440	ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
441
442	DMA_MODE_READ = (unsigned long) DMA_TD;	/* Peripheral to Memory */
443	DMA_MODE_WRITE = 0;	/* Memory to Peripheral */
444
445	if (!p_init) {
446		printk("ppc4xx_init_dma_channel: NULL p_init\n");
447		return DMA_STATUS_NULL_POINTER;
448	}
449
450	if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
451		printk("ppc4xx_init_dma_channel: bad channel %d\n", dmanr);
452		return DMA_STATUS_BAD_CHANNEL;
453	}
454
455#if DCRN_POL > 0
456	polarity = mfdcr(DCRN_POL);
457#else
458	polarity = 0;
459#endif
460
461	/* Setup the control register based on the values passed to
462	 * us in p_init.  Then, over-write the control register with this
463	 * new value.
464	 */
465	control |= SET_DMA_CONTROL;
466
467	/* clear all polarity signals and then "or" in new signal levels */
468	polarity &= ~GET_DMA_POLARITY(dmanr);
469	polarity |= p_init->polarity;
470#if DCRN_POL > 0
471	mtdcr(DCRN_POL, polarity);
472#endif
473	mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
474
475	/* save these values in our dma channel structure */
476	memcpy(p_dma_ch, p_init, sizeof (ppc_dma_ch_t));
477
478	/*
479	 * The peripheral width values written in the control register are:
480	 *   PW_8                 0
481	 *   PW_16                1
482	 *   PW_32                2
483	 *   PW_64                3
484	 *
485	 *   Since the DMA count register takes the number of "transfers",
486	 *   we need to divide the count sent to us in certain
487	 *   functions by the appropriate number.  It so happens that our
488	 *   right shift value is equal to the peripheral width value.
489	 */
490	p_dma_ch->shift = p_init->pwidth;
491
492	/*
493	 * Save the control word for easy access.
494	 */
495	p_dma_ch->control = control;
496
497	mtdcr(DCRN_DMASR, 0xffffffff);	/* clear status register */
498	return DMA_STATUS_GOOD;
499}
500
501/*
502 * This function returns the channel configuration.
503 */
504int
505ppc4xx_get_channel_config(unsigned int dmanr, ppc_dma_ch_t * p_dma_ch)
506{
507	unsigned int polarity;
508	unsigned int control;
509
510	if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
511		printk("ppc4xx_get_channel_config: bad channel %d\n", dmanr);
512		return DMA_STATUS_BAD_CHANNEL;
513	}
514
515	memcpy(p_dma_ch, &dma_channels[dmanr], sizeof (ppc_dma_ch_t));
516
517#if DCRN_POL > 0
518	polarity = mfdcr(DCRN_POL);
519#else
520	polarity = 0;
521#endif
522
523	p_dma_ch->polarity = polarity & GET_DMA_POLARITY(dmanr);
524	control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
525
526	p_dma_ch->cp = GET_DMA_PRIORITY(control);
527	p_dma_ch->pwidth = GET_DMA_PW(control);
528	p_dma_ch->psc = GET_DMA_PSC(control);
529	p_dma_ch->pwc = GET_DMA_PWC(control);
530	p_dma_ch->phc = GET_DMA_PHC(control);
531	p_dma_ch->ce = GET_DMA_CE_ENABLE(control);
532	p_dma_ch->int_enable = GET_DMA_CIE_ENABLE(control);
533	p_dma_ch->shift = GET_DMA_PW(control);
534
535#ifdef CONFIG_PPC4xx_EDMA
536	p_dma_ch->pf = GET_DMA_PREFETCH(control);
537#else
538	p_dma_ch->ch_enable = GET_DMA_CH(control);
539	p_dma_ch->ece_enable = GET_DMA_ECE(control);
540	p_dma_ch->tcd_disable = GET_DMA_TCD(control);
541#endif
542	return DMA_STATUS_GOOD;
543}
544
545/*
546 * Sets the priority for the DMA channel dmanr.
547 * Since this is setup by the hardware init function, this function
548 * can be used to dynamically change the priority of a channel.
549 *
550 * Acceptable priorities:
551 *
552 * PRIORITY_LOW
553 * PRIORITY_MID_LOW
554 * PRIORITY_MID_HIGH
555 * PRIORITY_HIGH
556 *
557 */
558int
559ppc4xx_set_channel_priority(unsigned int dmanr, unsigned int priority)
560{
561	unsigned int control;
562
563	if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
564		printk("ppc4xx_set_channel_priority: bad channel %d\n", dmanr);
565		return DMA_STATUS_BAD_CHANNEL;
566	}
567
568	if ((priority != PRIORITY_LOW) &&
569	    (priority != PRIORITY_MID_LOW) &&
570	    (priority != PRIORITY_MID_HIGH) && (priority != PRIORITY_HIGH)) {
571		printk("ppc4xx_set_channel_priority: bad priority: 0x%x\n", priority);
572	}
573
574	control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
575	control |= SET_DMA_PRIORITY(priority);
576	mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
577
578	return DMA_STATUS_GOOD;
579}
580
581/*
582 * Returns the width of the peripheral attached to this channel. This assumes
583 * that someone who knows the hardware configuration, boot code or some other
584 * init code, already set the width.
585 *
586 * The return value is one of:
587 *   PW_8
588 *   PW_16
589 *   PW_32
590 *   PW_64
591 *
592 *   The function returns 0 on error.
593 */
594unsigned int
595ppc4xx_get_peripheral_width(unsigned int dmanr)
596{
597	unsigned int control;
598
599	if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
600		printk("ppc4xx_get_peripheral_width: bad channel %d\n", dmanr);
601		return DMA_STATUS_BAD_CHANNEL;
602	}
603
604	control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
605
606	return (GET_DMA_PW(control));
607}
608
609/*
610 * Clears the channel status bits
611 */
612int
613ppc4xx_clr_dma_status(unsigned int dmanr)
614{
615	if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
616		printk(KERN_ERR "ppc4xx_clr_dma_status: bad channel: %d\n", dmanr);
617		return DMA_STATUS_BAD_CHANNEL;
618	}
619	mtdcr(DCRN_DMASR, ((u32)DMA_CH0_ERR | (u32)DMA_CS0 | (u32)DMA_TS0) >> dmanr);
620	return DMA_STATUS_GOOD;
621}
622
623/*
624 * Enables the burst on the channel (BTEN bit in the control/count register)
625 * Note:
626 * For scatter/gather dma, this function MUST be called before the
627 * ppc4xx_alloc_dma_handle() func as the chan count register is copied into the
628 * sgl list and used as each sgl element is added.
629 */
630int
631ppc4xx_enable_burst(unsigned int dmanr)
632{
633	unsigned int ctc;
634	if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
635		printk(KERN_ERR "ppc4xx_enable_burst: bad channel: %d\n", dmanr);
636		return DMA_STATUS_BAD_CHANNEL;
637	}
638        ctc = mfdcr(DCRN_DMACT0 + (dmanr * 0x8)) | DMA_CTC_BTEN;
639	mtdcr(DCRN_DMACT0 + (dmanr * 0x8), ctc);
640	return DMA_STATUS_GOOD;
641}
642/*
643 * Disables the burst on the channel (BTEN bit in the control/count register)
644 * Note:
645 * For scatter/gather dma, this function MUST be called before the
646 * ppc4xx_alloc_dma_handle() func as the chan count register is copied into the
647 * sgl list and used as each sgl element is added.
648 */
649int
650ppc4xx_disable_burst(unsigned int dmanr)
651{
652	unsigned int ctc;
653	if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
654		printk(KERN_ERR "ppc4xx_disable_burst: bad channel: %d\n", dmanr);
655		return DMA_STATUS_BAD_CHANNEL;
656	}
657	ctc = mfdcr(DCRN_DMACT0 + (dmanr * 0x8)) &~ DMA_CTC_BTEN;
658	mtdcr(DCRN_DMACT0 + (dmanr * 0x8), ctc);
659	return DMA_STATUS_GOOD;
660}
661/*
662 * Sets the burst size (number of peripheral widths) for the channel
663 * (BSIZ bits in the control/count register))
664 * must be one of:
665 *    DMA_CTC_BSIZ_2
666 *    DMA_CTC_BSIZ_4
667 *    DMA_CTC_BSIZ_8
668 *    DMA_CTC_BSIZ_16
669 * Note:
670 * For scatter/gather dma, this function MUST be called before the
671 * ppc4xx_alloc_dma_handle() func as the chan count register is copied into the
672 * sgl list and used as each sgl element is added.
673 */
674int
675ppc4xx_set_burst_size(unsigned int dmanr, unsigned int bsize)
676{
677	unsigned int ctc;
678	if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
679		printk(KERN_ERR "ppc4xx_set_burst_size: bad channel: %d\n", dmanr);
680		return DMA_STATUS_BAD_CHANNEL;
681	}
682	ctc = mfdcr(DCRN_DMACT0 + (dmanr * 0x8)) &~ DMA_CTC_BSIZ_MSK;
683	ctc |= (bsize & DMA_CTC_BSIZ_MSK);
684	mtdcr(DCRN_DMACT0 + (dmanr * 0x8), ctc);
685	return DMA_STATUS_GOOD;
686}
687
688EXPORT_SYMBOL(ppc4xx_init_dma_channel);
689EXPORT_SYMBOL(ppc4xx_get_channel_config);
690EXPORT_SYMBOL(ppc4xx_set_channel_priority);
691EXPORT_SYMBOL(ppc4xx_get_peripheral_width);
692EXPORT_SYMBOL(dma_channels);
693EXPORT_SYMBOL(ppc4xx_set_src_addr);
694EXPORT_SYMBOL(ppc4xx_set_dst_addr);
695EXPORT_SYMBOL(ppc4xx_set_dma_addr);
696EXPORT_SYMBOL(ppc4xx_set_dma_addr2);
697EXPORT_SYMBOL(ppc4xx_enable_dma);
698EXPORT_SYMBOL(ppc4xx_disable_dma);
699EXPORT_SYMBOL(ppc4xx_set_dma_mode);
700EXPORT_SYMBOL(ppc4xx_set_dma_count);
701EXPORT_SYMBOL(ppc4xx_get_dma_residue);
702EXPORT_SYMBOL(ppc4xx_enable_dma_interrupt);
703EXPORT_SYMBOL(ppc4xx_disable_dma_interrupt);
704EXPORT_SYMBOL(ppc4xx_get_dma_status);
705EXPORT_SYMBOL(ppc4xx_clr_dma_status);
706EXPORT_SYMBOL(ppc4xx_enable_burst);
707EXPORT_SYMBOL(ppc4xx_disable_burst);
708EXPORT_SYMBOL(ppc4xx_set_burst_size);