[PATCH] powerpc: trivial: modify comments to refer to new location of files
[deliverable/linux.git] / arch / ppc / syslib / ppc4xx_dma.c
CommitLineData
1da177e4 1/*
1da177e4
LT
2 * IBM PPC4xx DMA engine core library
3 *
4 * Copyright 2000-2004 MontaVista Software Inc.
5 *
6 * Cleaned up and converted to new DCR access
7 * Matt Porter <mporter@kernel.crashing.org>
8 *
9 * Original code by Armin Kuster <akuster@mvista.com>
10 * and Pete Popov <ppopov@mvista.com>
11 *
12 * This program is free software; you can redistribute it and/or modify it
13 * under the terms of the GNU General Public License as published by the
14 * Free Software Foundation; either version 2 of the License, or (at your
15 * option) any later version.
16 *
17 * You should have received a copy of the GNU General Public License along
18 * with this program; if not, write to the Free Software Foundation, Inc.,
19 * 675 Mass Ave, Cambridge, MA 02139, USA.
20 */
21
22#include <linux/config.h>
23#include <linux/kernel.h>
24#include <linux/mm.h>
25#include <linux/miscdevice.h>
26#include <linux/init.h>
27#include <linux/module.h>
28
29#include <asm/system.h>
30#include <asm/io.h>
7c3dbbe9 31#include <asm/dma.h>
1da177e4
LT
32#include <asm/ppc4xx_dma.h>
33
34ppc_dma_ch_t dma_channels[MAX_PPC4xx_DMA_CHANNELS];
35
36int
37ppc4xx_get_dma_status(void)
38{
39 return (mfdcr(DCRN_DMASR));
40}
41
42void
43ppc4xx_set_src_addr(int dmanr, phys_addr_t src_addr)
44{
45 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
46 printk("set_src_addr: bad channel: %d\n", dmanr);
47 return;
48 }
49
50#ifdef PPC4xx_DMA_64BIT
51 mtdcr(DCRN_DMASAH0 + dmanr*2, (u32)(src_addr >> 32));
52#else
53 mtdcr(DCRN_DMASA0 + dmanr*2, (u32)src_addr);
54#endif
55}
56
57void
58ppc4xx_set_dst_addr(int dmanr, phys_addr_t dst_addr)
59{
60 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
61 printk("set_dst_addr: bad channel: %d\n", dmanr);
62 return;
63 }
64
65#ifdef PPC4xx_DMA_64BIT
66 mtdcr(DCRN_DMADAH0 + dmanr*2, (u32)(dst_addr >> 32));
67#else
68 mtdcr(DCRN_DMADA0 + dmanr*2, (u32)dst_addr);
69#endif
70}
71
72void
73ppc4xx_enable_dma(unsigned int dmanr)
74{
75 unsigned int control;
76 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
77 unsigned int status_bits[] = { DMA_CS0 | DMA_TS0 | DMA_CH0_ERR,
78 DMA_CS1 | DMA_TS1 | DMA_CH1_ERR,
79 DMA_CS2 | DMA_TS2 | DMA_CH2_ERR,
80 DMA_CS3 | DMA_TS3 | DMA_CH3_ERR};
81
82 if (p_dma_ch->in_use) {
83 printk("enable_dma: channel %d in use\n", dmanr);
84 return;
85 }
86
87 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
88 printk("enable_dma: bad channel: %d\n", dmanr);
89 return;
90 }
91
92 if (p_dma_ch->mode == DMA_MODE_READ) {
93 /* peripheral to memory */
94 ppc4xx_set_src_addr(dmanr, 0);
95 ppc4xx_set_dst_addr(dmanr, p_dma_ch->addr);
96 } else if (p_dma_ch->mode == DMA_MODE_WRITE) {
97 /* memory to peripheral */
98 ppc4xx_set_src_addr(dmanr, p_dma_ch->addr);
99 ppc4xx_set_dst_addr(dmanr, 0);
100 }
101
102 /* for other xfer modes, the addresses are already set */
103 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
104
105 control &= ~(DMA_TM_MASK | DMA_TD); /* clear all mode bits */
106 if (p_dma_ch->mode == DMA_MODE_MM) {
107 /* software initiated memory to memory */
108 control |= DMA_ETD_OUTPUT | DMA_TCE_ENABLE;
109 }
110
111 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
112
113 /*
114 * Clear the CS, TS, RI bits for the channel from DMASR. This
115 * has been observed to happen correctly only after the mode and
116 * ETD/DCE bits in DMACRx are set above. Must do this before
117 * enabling the channel.
118 */
119
120 mtdcr(DCRN_DMASR, status_bits[dmanr]);
121
122 /*
123 * For device-paced transfers, Terminal Count Enable apparently
124 * must be on, and this must be turned on after the mode, etc.
125 * bits are cleared above (at least on Redwood-6).
126 */
127
128 if ((p_dma_ch->mode == DMA_MODE_MM_DEVATDST) ||
129 (p_dma_ch->mode == DMA_MODE_MM_DEVATSRC))
130 control |= DMA_TCE_ENABLE;
131
132 /*
133 * Now enable the channel.
134 */
135
136 control |= (p_dma_ch->mode | DMA_CE_ENABLE);
137
138 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
139
140 p_dma_ch->in_use = 1;
141}
142
143void
144ppc4xx_disable_dma(unsigned int dmanr)
145{
146 unsigned int control;
147 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
148
149 if (!p_dma_ch->in_use) {
150 printk("disable_dma: channel %d not in use\n", dmanr);
151 return;
152 }
153
154 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
155 printk("disable_dma: bad channel: %d\n", dmanr);
156 return;
157 }
158
159 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
160 control &= ~DMA_CE_ENABLE;
161 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
162
163 p_dma_ch->in_use = 0;
164}
165
166/*
167 * Sets the dma mode for single DMA transfers only.
168 * For scatter/gather transfers, the mode is passed to the
169 * alloc_dma_handle() function as one of the parameters.
170 *
171 * The mode is simply saved and used later. This allows
172 * the driver to call set_dma_mode() and set_dma_addr() in
173 * any order.
174 *
175 * Valid mode values are:
176 *
177 * DMA_MODE_READ peripheral to memory
178 * DMA_MODE_WRITE memory to peripheral
179 * DMA_MODE_MM memory to memory
180 * DMA_MODE_MM_DEVATSRC device-paced memory to memory, device at src
181 * DMA_MODE_MM_DEVATDST device-paced memory to memory, device at dst
182 */
183int
184ppc4xx_set_dma_mode(unsigned int dmanr, unsigned int mode)
185{
186 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
187
188 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
189 printk("set_dma_mode: bad channel 0x%x\n", dmanr);
190 return DMA_STATUS_BAD_CHANNEL;
191 }
192
193 p_dma_ch->mode = mode;
194
195 return DMA_STATUS_GOOD;
196}
197
198/*
199 * Sets the DMA Count register. Note that 'count' is in bytes.
200 * However, the DMA Count register counts the number of "transfers",
201 * where each transfer is equal to the bus width. Thus, count
202 * MUST be a multiple of the bus width.
203 */
204void
205ppc4xx_set_dma_count(unsigned int dmanr, unsigned int count)
206{
207 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
208
209#ifdef DEBUG_4xxDMA
210 {
211 int error = 0;
212 switch (p_dma_ch->pwidth) {
213 case PW_8:
214 break;
215 case PW_16:
216 if (count & 0x1)
217 error = 1;
218 break;
219 case PW_32:
220 if (count & 0x3)
221 error = 1;
222 break;
223 case PW_64:
224 if (count & 0x7)
225 error = 1;
226 break;
227 default:
228 printk("set_dma_count: invalid bus width: 0x%x\n",
229 p_dma_ch->pwidth);
230 return;
231 }
232 if (error)
233 printk
234 ("Warning: set_dma_count count 0x%x bus width %d\n",
235 count, p_dma_ch->pwidth);
236 }
237#endif
238
239 count = count >> p_dma_ch->shift;
240
241 mtdcr(DCRN_DMACT0 + (dmanr * 0x8), count);
242}
243
244/*
245 * Returns the number of bytes left to be transfered.
246 * After a DMA transfer, this should return zero.
247 * Reading this while a DMA transfer is still in progress will return
248 * unpredictable results.
249 */
250int
251ppc4xx_get_dma_residue(unsigned int dmanr)
252{
253 unsigned int count;
254 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
255
256 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
257 printk("ppc4xx_get_dma_residue: bad channel 0x%x\n", dmanr);
258 return DMA_STATUS_BAD_CHANNEL;
259 }
260
261 count = mfdcr(DCRN_DMACT0 + (dmanr * 0x8));
262
263 return (count << p_dma_ch->shift);
264}
265
266/*
267 * Sets the DMA address for a memory to peripheral or peripheral
268 * to memory transfer. The address is just saved in the channel
269 * structure for now and used later in enable_dma().
270 */
271void
272ppc4xx_set_dma_addr(unsigned int dmanr, phys_addr_t addr)
273{
274 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
275
276 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
277 printk("ppc4xx_set_dma_addr: bad channel: %d\n", dmanr);
278 return;
279 }
280
281#ifdef DEBUG_4xxDMA
282 {
283 int error = 0;
284 switch (p_dma_ch->pwidth) {
285 case PW_8:
286 break;
287 case PW_16:
288 if ((unsigned) addr & 0x1)
289 error = 1;
290 break;
291 case PW_32:
292 if ((unsigned) addr & 0x3)
293 error = 1;
294 break;
295 case PW_64:
296 if ((unsigned) addr & 0x7)
297 error = 1;
298 break;
299 default:
300 printk("ppc4xx_set_dma_addr: invalid bus width: 0x%x\n",
301 p_dma_ch->pwidth);
302 return;
303 }
304 if (error)
305 printk("Warning: ppc4xx_set_dma_addr addr 0x%x bus width %d\n",
306 addr, p_dma_ch->pwidth);
307 }
308#endif
309
310 /* save dma address and program it later after we know the xfer mode */
311 p_dma_ch->addr = addr;
312}
313
314/*
315 * Sets both DMA addresses for a memory to memory transfer.
316 * For memory to peripheral or peripheral to memory transfers
317 * the function set_dma_addr() should be used instead.
318 */
319void
320ppc4xx_set_dma_addr2(unsigned int dmanr, phys_addr_t src_dma_addr,
321 phys_addr_t dst_dma_addr)
322{
323 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
324 printk("ppc4xx_set_dma_addr2: bad channel: %d\n", dmanr);
325 return;
326 }
327
328#ifdef DEBUG_4xxDMA
329 {
330 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
331 int error = 0;
332 switch (p_dma_ch->pwidth) {
333 case PW_8:
334 break;
335 case PW_16:
336 if (((unsigned) src_dma_addr & 0x1) ||
337 ((unsigned) dst_dma_addr & 0x1)
338 )
339 error = 1;
340 break;
341 case PW_32:
342 if (((unsigned) src_dma_addr & 0x3) ||
343 ((unsigned) dst_dma_addr & 0x3)
344 )
345 error = 1;
346 break;
347 case PW_64:
348 if (((unsigned) src_dma_addr & 0x7) ||
349 ((unsigned) dst_dma_addr & 0x7)
350 )
351 error = 1;
352 break;
353 default:
354 printk("ppc4xx_set_dma_addr2: invalid bus width: 0x%x\n",
355 p_dma_ch->pwidth);
356 return;
357 }
358 if (error)
359 printk
360 ("Warning: ppc4xx_set_dma_addr2 src 0x%x dst 0x%x bus width %d\n",
361 src_dma_addr, dst_dma_addr, p_dma_ch->pwidth);
362 }
363#endif
364
365 ppc4xx_set_src_addr(dmanr, src_dma_addr);
366 ppc4xx_set_dst_addr(dmanr, dst_dma_addr);
367}
368
369/*
370 * Enables the channel interrupt.
371 *
372 * If performing a scatter/gatter transfer, this function
373 * MUST be called before calling alloc_dma_handle() and building
374 * the sgl list. Otherwise, interrupts will not be enabled, if
375 * they were previously disabled.
376 */
377int
378ppc4xx_enable_dma_interrupt(unsigned int dmanr)
379{
380 unsigned int control;
381 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
382
383 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
384 printk("ppc4xx_enable_dma_interrupt: bad channel: %d\n", dmanr);
385 return DMA_STATUS_BAD_CHANNEL;
386 }
387
388 p_dma_ch->int_enable = 1;
389
390 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
391 control |= DMA_CIE_ENABLE; /* Channel Interrupt Enable */
392 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
393
394 return DMA_STATUS_GOOD;
395}
396
397/*
398 * Disables the channel interrupt.
399 *
400 * If performing a scatter/gatter transfer, this function
401 * MUST be called before calling alloc_dma_handle() and building
402 * the sgl list. Otherwise, interrupts will not be disabled, if
403 * they were previously enabled.
404 */
405int
406ppc4xx_disable_dma_interrupt(unsigned int dmanr)
407{
408 unsigned int control;
409 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
410
411 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
412 printk("ppc4xx_disable_dma_interrupt: bad channel: %d\n", dmanr);
413 return DMA_STATUS_BAD_CHANNEL;
414 }
415
416 p_dma_ch->int_enable = 0;
417
418 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
419 control &= ~DMA_CIE_ENABLE; /* Channel Interrupt Enable */
420 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
421
422 return DMA_STATUS_GOOD;
423}
424
425/*
426 * Configures a DMA channel, including the peripheral bus width, if a
427 * peripheral is attached to the channel, the polarity of the DMAReq and
428 * DMAAck signals, etc. This information should really be setup by the boot
429 * code, since most likely the configuration won't change dynamically.
430 * If the kernel has to call this function, it's recommended that it's
431 * called from platform specific init code. The driver should not need to
432 * call this function.
433 */
434int
435ppc4xx_init_dma_channel(unsigned int dmanr, ppc_dma_ch_t * p_init)
436{
437 unsigned int polarity;
438 uint32_t control = 0;
439 ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
440
441 DMA_MODE_READ = (unsigned long) DMA_TD; /* Peripheral to Memory */
442 DMA_MODE_WRITE = 0; /* Memory to Peripheral */
443
444 if (!p_init) {
445 printk("ppc4xx_init_dma_channel: NULL p_init\n");
446 return DMA_STATUS_NULL_POINTER;
447 }
448
449 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
450 printk("ppc4xx_init_dma_channel: bad channel %d\n", dmanr);
451 return DMA_STATUS_BAD_CHANNEL;
452 }
453
454#if DCRN_POL > 0
455 polarity = mfdcr(DCRN_POL);
456#else
457 polarity = 0;
458#endif
459
460 /* Setup the control register based on the values passed to
461 * us in p_init. Then, over-write the control register with this
462 * new value.
463 */
464 control |= SET_DMA_CONTROL;
465
466 /* clear all polarity signals and then "or" in new signal levels */
467 polarity &= ~GET_DMA_POLARITY(dmanr);
468 polarity |= p_init->polarity;
469#if DCRN_POL > 0
470 mtdcr(DCRN_POL, polarity);
471#endif
472 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
473
474 /* save these values in our dma channel structure */
475 memcpy(p_dma_ch, p_init, sizeof (ppc_dma_ch_t));
476
477 /*
478 * The peripheral width values written in the control register are:
479 * PW_8 0
480 * PW_16 1
481 * PW_32 2
482 * PW_64 3
483 *
484 * Since the DMA count register takes the number of "transfers",
485 * we need to divide the count sent to us in certain
486 * functions by the appropriate number. It so happens that our
487 * right shift value is equal to the peripheral width value.
488 */
489 p_dma_ch->shift = p_init->pwidth;
490
491 /*
492 * Save the control word for easy access.
493 */
494 p_dma_ch->control = control;
495
496 mtdcr(DCRN_DMASR, 0xffffffff); /* clear status register */
497 return DMA_STATUS_GOOD;
498}
499
500/*
501 * This function returns the channel configuration.
502 */
503int
504ppc4xx_get_channel_config(unsigned int dmanr, ppc_dma_ch_t * p_dma_ch)
505{
506 unsigned int polarity;
507 unsigned int control;
508
509 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
510 printk("ppc4xx_get_channel_config: bad channel %d\n", dmanr);
511 return DMA_STATUS_BAD_CHANNEL;
512 }
513
514 memcpy(p_dma_ch, &dma_channels[dmanr], sizeof (ppc_dma_ch_t));
515
516#if DCRN_POL > 0
517 polarity = mfdcr(DCRN_POL);
518#else
519 polarity = 0;
520#endif
521
522 p_dma_ch->polarity = polarity & GET_DMA_POLARITY(dmanr);
523 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
524
525 p_dma_ch->cp = GET_DMA_PRIORITY(control);
526 p_dma_ch->pwidth = GET_DMA_PW(control);
527 p_dma_ch->psc = GET_DMA_PSC(control);
528 p_dma_ch->pwc = GET_DMA_PWC(control);
529 p_dma_ch->phc = GET_DMA_PHC(control);
530 p_dma_ch->ce = GET_DMA_CE_ENABLE(control);
531 p_dma_ch->int_enable = GET_DMA_CIE_ENABLE(control);
532 p_dma_ch->shift = GET_DMA_PW(control);
533
534#ifdef CONFIG_PPC4xx_EDMA
535 p_dma_ch->pf = GET_DMA_PREFETCH(control);
536#else
537 p_dma_ch->ch_enable = GET_DMA_CH(control);
538 p_dma_ch->ece_enable = GET_DMA_ECE(control);
539 p_dma_ch->tcd_disable = GET_DMA_TCD(control);
540#endif
541 return DMA_STATUS_GOOD;
542}
543
544/*
545 * Sets the priority for the DMA channel dmanr.
546 * Since this is setup by the hardware init function, this function
547 * can be used to dynamically change the priority of a channel.
548 *
549 * Acceptable priorities:
550 *
551 * PRIORITY_LOW
552 * PRIORITY_MID_LOW
553 * PRIORITY_MID_HIGH
554 * PRIORITY_HIGH
555 *
556 */
557int
558ppc4xx_set_channel_priority(unsigned int dmanr, unsigned int priority)
559{
560 unsigned int control;
561
562 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
563 printk("ppc4xx_set_channel_priority: bad channel %d\n", dmanr);
564 return DMA_STATUS_BAD_CHANNEL;
565 }
566
567 if ((priority != PRIORITY_LOW) &&
568 (priority != PRIORITY_MID_LOW) &&
569 (priority != PRIORITY_MID_HIGH) && (priority != PRIORITY_HIGH)) {
570 printk("ppc4xx_set_channel_priority: bad priority: 0x%x\n", priority);
571 }
572
573 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
574 control |= SET_DMA_PRIORITY(priority);
575 mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
576
577 return DMA_STATUS_GOOD;
578}
579
580/*
581 * Returns the width of the peripheral attached to this channel. This assumes
582 * that someone who knows the hardware configuration, boot code or some other
583 * init code, already set the width.
584 *
585 * The return value is one of:
586 * PW_8
587 * PW_16
588 * PW_32
589 * PW_64
590 *
591 * The function returns 0 on error.
592 */
593unsigned int
594ppc4xx_get_peripheral_width(unsigned int dmanr)
595{
596 unsigned int control;
597
598 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
599 printk("ppc4xx_get_peripheral_width: bad channel %d\n", dmanr);
600 return DMA_STATUS_BAD_CHANNEL;
601 }
602
603 control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
604
605 return (GET_DMA_PW(control));
606}
607
608/*
609 * Clears the channel status bits
610 */
611int
612ppc4xx_clr_dma_status(unsigned int dmanr)
613{
614 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
615 printk(KERN_ERR "ppc4xx_clr_dma_status: bad channel: %d\n", dmanr);
616 return DMA_STATUS_BAD_CHANNEL;
617 }
618 mtdcr(DCRN_DMASR, ((u32)DMA_CH0_ERR | (u32)DMA_CS0 | (u32)DMA_TS0) >> dmanr);
619 return DMA_STATUS_GOOD;
620}
621
28cd1d17 622#ifdef CONFIG_PPC4xx_EDMA
1da177e4
LT
623/*
624 * Enables the burst on the channel (BTEN bit in the control/count register)
625 * Note:
626 * For scatter/gather dma, this function MUST be called before the
627 * ppc4xx_alloc_dma_handle() func as the chan count register is copied into the
628 * sgl list and used as each sgl element is added.
629 */
630int
631ppc4xx_enable_burst(unsigned int dmanr)
632{
633 unsigned int ctc;
634 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
635 printk(KERN_ERR "ppc4xx_enable_burst: bad channel: %d\n", dmanr);
636 return DMA_STATUS_BAD_CHANNEL;
637 }
638 ctc = mfdcr(DCRN_DMACT0 + (dmanr * 0x8)) | DMA_CTC_BTEN;
639 mtdcr(DCRN_DMACT0 + (dmanr * 0x8), ctc);
640 return DMA_STATUS_GOOD;
641}
642/*
643 * Disables the burst on the channel (BTEN bit in the control/count register)
644 * Note:
645 * For scatter/gather dma, this function MUST be called before the
646 * ppc4xx_alloc_dma_handle() func as the chan count register is copied into the
647 * sgl list and used as each sgl element is added.
648 */
649int
650ppc4xx_disable_burst(unsigned int dmanr)
651{
652 unsigned int ctc;
653 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
654 printk(KERN_ERR "ppc4xx_disable_burst: bad channel: %d\n", dmanr);
655 return DMA_STATUS_BAD_CHANNEL;
656 }
657 ctc = mfdcr(DCRN_DMACT0 + (dmanr * 0x8)) &~ DMA_CTC_BTEN;
658 mtdcr(DCRN_DMACT0 + (dmanr * 0x8), ctc);
659 return DMA_STATUS_GOOD;
660}
661/*
662 * Sets the burst size (number of peripheral widths) for the channel
663 * (BSIZ bits in the control/count register))
664 * must be one of:
665 * DMA_CTC_BSIZ_2
666 * DMA_CTC_BSIZ_4
667 * DMA_CTC_BSIZ_8
668 * DMA_CTC_BSIZ_16
669 * Note:
670 * For scatter/gather dma, this function MUST be called before the
671 * ppc4xx_alloc_dma_handle() func as the chan count register is copied into the
672 * sgl list and used as each sgl element is added.
673 */
674int
675ppc4xx_set_burst_size(unsigned int dmanr, unsigned int bsize)
676{
677 unsigned int ctc;
678 if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
679 printk(KERN_ERR "ppc4xx_set_burst_size: bad channel: %d\n", dmanr);
680 return DMA_STATUS_BAD_CHANNEL;
681 }
682 ctc = mfdcr(DCRN_DMACT0 + (dmanr * 0x8)) &~ DMA_CTC_BSIZ_MSK;
683 ctc |= (bsize & DMA_CTC_BSIZ_MSK);
684 mtdcr(DCRN_DMACT0 + (dmanr * 0x8), ctc);
685 return DMA_STATUS_GOOD;
686}
687
28cd1d17
MP
688EXPORT_SYMBOL(ppc4xx_enable_burst);
689EXPORT_SYMBOL(ppc4xx_disable_burst);
690EXPORT_SYMBOL(ppc4xx_set_burst_size);
691#endif /* CONFIG_PPC4xx_EDMA */
692
1da177e4
LT
693EXPORT_SYMBOL(ppc4xx_init_dma_channel);
694EXPORT_SYMBOL(ppc4xx_get_channel_config);
695EXPORT_SYMBOL(ppc4xx_set_channel_priority);
696EXPORT_SYMBOL(ppc4xx_get_peripheral_width);
697EXPORT_SYMBOL(dma_channels);
698EXPORT_SYMBOL(ppc4xx_set_src_addr);
699EXPORT_SYMBOL(ppc4xx_set_dst_addr);
700EXPORT_SYMBOL(ppc4xx_set_dma_addr);
701EXPORT_SYMBOL(ppc4xx_set_dma_addr2);
702EXPORT_SYMBOL(ppc4xx_enable_dma);
703EXPORT_SYMBOL(ppc4xx_disable_dma);
704EXPORT_SYMBOL(ppc4xx_set_dma_mode);
705EXPORT_SYMBOL(ppc4xx_set_dma_count);
706EXPORT_SYMBOL(ppc4xx_get_dma_residue);
707EXPORT_SYMBOL(ppc4xx_enable_dma_interrupt);
708EXPORT_SYMBOL(ppc4xx_disable_dma_interrupt);
709EXPORT_SYMBOL(ppc4xx_get_dma_status);
710EXPORT_SYMBOL(ppc4xx_clr_dma_status);
28cd1d17 711
This page took 0.110607 seconds and 5 git commands to generate.