Merge git://git.kernel.org/pub/scm/linux/kernel/git/davem/sparc-2.6
[sfrench/cifs-2.6.git] / arch / blackfin / kernel / bfin_dma_5xx.c
1 /*
2  * File:         arch/blackfin/kernel/bfin_dma_5xx.c
3  * Based on:
4  * Author:
5  *
6  * Created:
7  * Description:  This file contains the simple DMA Implementation for Blackfin
8  *
9  * Modified:
10  *               Copyright 2004-2006 Analog Devices Inc.
11  *
12  * Bugs:         Enter bugs at http://blackfin.uclinux.org/
13  *
14  * This program is free software; you can redistribute it and/or modify
15  * it under the terms of the GNU General Public License as published by
16  * the Free Software Foundation; either version 2 of the License, or
17  * (at your option) any later version.
18  *
19  * This program is distributed in the hope that it will be useful,
20  * but WITHOUT ANY WARRANTY; without even the implied warranty of
21  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
22  * GNU General Public License for more details.
23  *
24  * You should have received a copy of the GNU General Public License
25  * along with this program; if not, see the file COPYING, or write
26  * to the Free Software Foundation, Inc.,
27  * 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
28  */
29
30 #include <linux/errno.h>
31 #include <linux/module.h>
32 #include <linux/sched.h>
33 #include <linux/interrupt.h>
34 #include <linux/kernel.h>
35 #include <linux/param.h>
36
37 #include <asm/blackfin.h>
38 #include <asm/dma.h>
39 #include <asm/cacheflush.h>
40
41 /* Remove unused code not exported by symbol or internally called */
42 #define REMOVE_DEAD_CODE
43
44 /**************************************************************************
45  * Global Variables
46 ***************************************************************************/
47
48 static struct dma_channel dma_ch[MAX_BLACKFIN_DMA_CHANNEL];
49
50 /*------------------------------------------------------------------------------
51  *       Set the Buffer Clear bit in the Configuration register of specific DMA
52  *       channel. This will stop the descriptor based DMA operation.
53  *-----------------------------------------------------------------------------*/
54 static void clear_dma_buffer(unsigned int channel)
55 {
56         dma_ch[channel].regs->cfg |= RESTART;
57         SSYNC();
58         dma_ch[channel].regs->cfg &= ~RESTART;
59         SSYNC();
60 }
61
62 static int __init blackfin_dma_init(void)
63 {
64         int i;
65
66         printk(KERN_INFO "Blackfin DMA Controller\n");
67
68         for (i = 0; i < MAX_BLACKFIN_DMA_CHANNEL; i++) {
69                 dma_ch[i].chan_status = DMA_CHANNEL_FREE;
70                 dma_ch[i].regs = base_addr[i];
71                 mutex_init(&(dma_ch[i].dmalock));
72         }
73         /* Mark MEMDMA Channel 0 as requested since we're using it internally */
74         dma_ch[CH_MEM_STREAM0_DEST].chan_status = DMA_CHANNEL_REQUESTED;
75         dma_ch[CH_MEM_STREAM0_SRC].chan_status = DMA_CHANNEL_REQUESTED;
76
77 #if defined(CONFIG_DEB_DMA_URGENT)
78         bfin_write_EBIU_DDRQUE(bfin_read_EBIU_DDRQUE()
79                          | DEB1_URGENT | DEB2_URGENT | DEB3_URGENT);
80 #endif
81         return 0;
82 }
83
84 arch_initcall(blackfin_dma_init);
85
86 /*------------------------------------------------------------------------------
87  *      Request the specific DMA channel from the system.
88  *-----------------------------------------------------------------------------*/
89 int request_dma(unsigned int channel, char *device_id)
90 {
91
92         pr_debug("request_dma() : BEGIN \n");
93         mutex_lock(&(dma_ch[channel].dmalock));
94
95         if ((dma_ch[channel].chan_status == DMA_CHANNEL_REQUESTED)
96             || (dma_ch[channel].chan_status == DMA_CHANNEL_ENABLED)) {
97                 mutex_unlock(&(dma_ch[channel].dmalock));
98                 pr_debug("DMA CHANNEL IN USE  \n");
99                 return -EBUSY;
100         } else {
101                 dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
102                 pr_debug("DMA CHANNEL IS ALLOCATED  \n");
103         }
104
105         mutex_unlock(&(dma_ch[channel].dmalock));
106
107 #ifdef CONFIG_BF54x
108         if (channel >= CH_UART2_RX && channel <= CH_UART3_TX) {
109                 if (strncmp(device_id, "BFIN_UART", 9) == 0)
110                         dma_ch[channel].regs->peripheral_map |=
111                                 (channel - CH_UART2_RX + 0xC);
112                 else
113                         dma_ch[channel].regs->peripheral_map |=
114                                 (channel - CH_UART2_RX + 0x6);
115         }
116 #endif
117
118         dma_ch[channel].device_id = device_id;
119         dma_ch[channel].irq_callback = NULL;
120
121         /* This is to be enabled by putting a restriction -
122          * you have to request DMA, before doing any operations on
123          * descriptor/channel
124          */
125         pr_debug("request_dma() : END  \n");
126         return channel;
127 }
128 EXPORT_SYMBOL(request_dma);
129
130 int set_dma_callback(unsigned int channel, dma_interrupt_t callback, void *data)
131 {
132         int ret_irq = 0;
133
134         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
135                && channel < MAX_BLACKFIN_DMA_CHANNEL));
136
137         if (callback != NULL) {
138                 int ret_val;
139                 ret_irq = channel2irq(channel);
140
141                 dma_ch[channel].data = data;
142
143                 ret_val =
144                     request_irq(ret_irq, (void *)callback, IRQF_DISABLED,
145                                 dma_ch[channel].device_id, data);
146                 if (ret_val) {
147                         printk(KERN_NOTICE
148                                "Request irq in DMA engine failed.\n");
149                         return -EPERM;
150                 }
151                 dma_ch[channel].irq_callback = callback;
152         }
153         return 0;
154 }
155 EXPORT_SYMBOL(set_dma_callback);
156
157 void free_dma(unsigned int channel)
158 {
159         int ret_irq;
160
161         pr_debug("freedma() : BEGIN \n");
162         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
163                && channel < MAX_BLACKFIN_DMA_CHANNEL));
164
165         /* Halt the DMA */
166         disable_dma(channel);
167         clear_dma_buffer(channel);
168
169         if (dma_ch[channel].irq_callback != NULL) {
170                 ret_irq = channel2irq(channel);
171                 free_irq(ret_irq, dma_ch[channel].data);
172         }
173
174         /* Clear the DMA Variable in the Channel */
175         mutex_lock(&(dma_ch[channel].dmalock));
176         dma_ch[channel].chan_status = DMA_CHANNEL_FREE;
177         mutex_unlock(&(dma_ch[channel].dmalock));
178
179         pr_debug("freedma() : END \n");
180 }
181 EXPORT_SYMBOL(free_dma);
182
183 void dma_enable_irq(unsigned int channel)
184 {
185         int ret_irq;
186
187         pr_debug("dma_enable_irq() : BEGIN \n");
188         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
189                && channel < MAX_BLACKFIN_DMA_CHANNEL));
190
191         ret_irq = channel2irq(channel);
192         enable_irq(ret_irq);
193 }
194 EXPORT_SYMBOL(dma_enable_irq);
195
196 void dma_disable_irq(unsigned int channel)
197 {
198         int ret_irq;
199
200         pr_debug("dma_disable_irq() : BEGIN \n");
201         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
202                && channel < MAX_BLACKFIN_DMA_CHANNEL));
203
204         ret_irq = channel2irq(channel);
205         disable_irq(ret_irq);
206 }
207 EXPORT_SYMBOL(dma_disable_irq);
208
209 int dma_channel_active(unsigned int channel)
210 {
211         if (dma_ch[channel].chan_status == DMA_CHANNEL_FREE) {
212                 return 0;
213         } else {
214                 return 1;
215         }
216 }
217 EXPORT_SYMBOL(dma_channel_active);
218
219 /*------------------------------------------------------------------------------
220 *       stop the specific DMA channel.
221 *-----------------------------------------------------------------------------*/
222 void disable_dma(unsigned int channel)
223 {
224         pr_debug("stop_dma() : BEGIN \n");
225
226         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
227                && channel < MAX_BLACKFIN_DMA_CHANNEL));
228
229         dma_ch[channel].regs->cfg &= ~DMAEN;    /* Clean the enable bit */
230         SSYNC();
231         dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
232         /* Needs to be enabled Later */
233         pr_debug("stop_dma() : END \n");
234         return;
235 }
236 EXPORT_SYMBOL(disable_dma);
237
238 void enable_dma(unsigned int channel)
239 {
240         pr_debug("enable_dma() : BEGIN \n");
241
242         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
243                && channel < MAX_BLACKFIN_DMA_CHANNEL));
244
245         dma_ch[channel].chan_status = DMA_CHANNEL_ENABLED;
246         dma_ch[channel].regs->curr_x_count = 0;
247         dma_ch[channel].regs->curr_y_count = 0;
248
249         dma_ch[channel].regs->cfg |= DMAEN;     /* Set the enable bit */
250         SSYNC();
251         pr_debug("enable_dma() : END \n");
252         return;
253 }
254 EXPORT_SYMBOL(enable_dma);
255
256 /*------------------------------------------------------------------------------
257 *               Set the Start Address register for the specific DMA channel
258 *               This function can be used for register based DMA,
259 *               to setup the start address
260 *               addr:           Starting address of the DMA Data to be transferred.
261 *-----------------------------------------------------------------------------*/
262 void set_dma_start_addr(unsigned int channel, unsigned long addr)
263 {
264         pr_debug("set_dma_start_addr() : BEGIN \n");
265
266         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
267                && channel < MAX_BLACKFIN_DMA_CHANNEL));
268
269         dma_ch[channel].regs->start_addr = addr;
270         SSYNC();
271         pr_debug("set_dma_start_addr() : END\n");
272 }
273 EXPORT_SYMBOL(set_dma_start_addr);
274
275 void set_dma_next_desc_addr(unsigned int channel, unsigned long addr)
276 {
277         pr_debug("set_dma_next_desc_addr() : BEGIN \n");
278
279         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
280                && channel < MAX_BLACKFIN_DMA_CHANNEL));
281
282         dma_ch[channel].regs->next_desc_ptr = addr;
283         SSYNC();
284         pr_debug("set_dma_next_desc_addr() : END\n");
285 }
286 EXPORT_SYMBOL(set_dma_next_desc_addr);
287
288 void set_dma_curr_desc_addr(unsigned int channel, unsigned long addr)
289 {
290         pr_debug("set_dma_curr_desc_addr() : BEGIN \n");
291
292         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
293                && channel < MAX_BLACKFIN_DMA_CHANNEL));
294
295         dma_ch[channel].regs->curr_desc_ptr = addr;
296         SSYNC();
297         pr_debug("set_dma_curr_desc_addr() : END\n");
298 }
299 EXPORT_SYMBOL(set_dma_curr_desc_addr);
300
301 void set_dma_x_count(unsigned int channel, unsigned short x_count)
302 {
303         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
304                && channel < MAX_BLACKFIN_DMA_CHANNEL));
305
306         dma_ch[channel].regs->x_count = x_count;
307         SSYNC();
308 }
309 EXPORT_SYMBOL(set_dma_x_count);
310
311 void set_dma_y_count(unsigned int channel, unsigned short y_count)
312 {
313         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
314                && channel < MAX_BLACKFIN_DMA_CHANNEL));
315
316         dma_ch[channel].regs->y_count = y_count;
317         SSYNC();
318 }
319 EXPORT_SYMBOL(set_dma_y_count);
320
321 void set_dma_x_modify(unsigned int channel, short x_modify)
322 {
323         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
324                && channel < MAX_BLACKFIN_DMA_CHANNEL));
325
326         dma_ch[channel].regs->x_modify = x_modify;
327         SSYNC();
328 }
329 EXPORT_SYMBOL(set_dma_x_modify);
330
331 void set_dma_y_modify(unsigned int channel, short y_modify)
332 {
333         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
334                && channel < MAX_BLACKFIN_DMA_CHANNEL));
335
336         dma_ch[channel].regs->y_modify = y_modify;
337         SSYNC();
338 }
339 EXPORT_SYMBOL(set_dma_y_modify);
340
341 void set_dma_config(unsigned int channel, unsigned short config)
342 {
343         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
344                && channel < MAX_BLACKFIN_DMA_CHANNEL));
345
346         dma_ch[channel].regs->cfg = config;
347         SSYNC();
348 }
349 EXPORT_SYMBOL(set_dma_config);
350
351 unsigned short
352 set_bfin_dma_config(char direction, char flow_mode,
353                     char intr_mode, char dma_mode, char width, char syncmode)
354 {
355         unsigned short config;
356
357         config =
358             ((direction << 1) | (width << 2) | (dma_mode << 4) |
359              (intr_mode << 6) | (flow_mode << 12) | (syncmode << 5));
360         return config;
361 }
362 EXPORT_SYMBOL(set_bfin_dma_config);
363
364 void set_dma_sg(unsigned int channel, struct dmasg *sg, int nr_sg)
365 {
366         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
367                && channel < MAX_BLACKFIN_DMA_CHANNEL));
368
369         dma_ch[channel].regs->cfg |= ((nr_sg & 0x0F) << 8);
370
371         dma_ch[channel].regs->next_desc_ptr = (unsigned int)sg;
372
373         SSYNC();
374 }
375 EXPORT_SYMBOL(set_dma_sg);
376
377 void set_dma_curr_addr(unsigned int channel, unsigned long addr)
378 {
379         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
380                && channel < MAX_BLACKFIN_DMA_CHANNEL));
381
382         dma_ch[channel].regs->curr_addr_ptr = addr;
383         SSYNC();
384 }
385 EXPORT_SYMBOL(set_dma_curr_addr);
386
387 /*------------------------------------------------------------------------------
388  *      Get the DMA status of a specific DMA channel from the system.
389  *-----------------------------------------------------------------------------*/
390 unsigned short get_dma_curr_irqstat(unsigned int channel)
391 {
392         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
393                && channel < MAX_BLACKFIN_DMA_CHANNEL));
394
395         return dma_ch[channel].regs->irq_status;
396 }
397 EXPORT_SYMBOL(get_dma_curr_irqstat);
398
399 /*------------------------------------------------------------------------------
400  *      Clear the DMA_DONE bit in DMA status. Stop the DMA completion interrupt.
401  *-----------------------------------------------------------------------------*/
402 void clear_dma_irqstat(unsigned int channel)
403 {
404         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
405                && channel < MAX_BLACKFIN_DMA_CHANNEL));
406         dma_ch[channel].regs->irq_status |= 3;
407 }
408 EXPORT_SYMBOL(clear_dma_irqstat);
409
410 /*------------------------------------------------------------------------------
411  *      Get current DMA xcount of a specific DMA channel from the system.
412  *-----------------------------------------------------------------------------*/
413 unsigned short get_dma_curr_xcount(unsigned int channel)
414 {
415         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
416                && channel < MAX_BLACKFIN_DMA_CHANNEL));
417
418         return dma_ch[channel].regs->curr_x_count;
419 }
420 EXPORT_SYMBOL(get_dma_curr_xcount);
421
422 /*------------------------------------------------------------------------------
423  *      Get current DMA ycount of a specific DMA channel from the system.
424  *-----------------------------------------------------------------------------*/
425 unsigned short get_dma_curr_ycount(unsigned int channel)
426 {
427         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
428                && channel < MAX_BLACKFIN_DMA_CHANNEL));
429
430         return dma_ch[channel].regs->curr_y_count;
431 }
432 EXPORT_SYMBOL(get_dma_curr_ycount);
433
434 unsigned long get_dma_next_desc_ptr(unsigned int channel)
435 {
436         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
437               && channel < MAX_BLACKFIN_DMA_CHANNEL));
438
439         return dma_ch[channel].regs->next_desc_ptr;
440 }
441 EXPORT_SYMBOL(get_dma_next_desc_ptr);
442
443 unsigned long get_dma_curr_desc_ptr(unsigned int channel)
444 {
445         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
446               && channel < MAX_BLACKFIN_DMA_CHANNEL));
447
448         return dma_ch[channel].regs->curr_desc_ptr;
449 }
450 EXPORT_SYMBOL(get_dma_curr_desc_ptr);
451
452 unsigned long get_dma_curr_addr(unsigned int channel)
453 {
454         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
455               && channel < MAX_BLACKFIN_DMA_CHANNEL));
456
457         return dma_ch[channel].regs->curr_addr_ptr;
458 }
459 EXPORT_SYMBOL(get_dma_curr_addr);
460
461 static void *__dma_memcpy(void *dest, const void *src, size_t size)
462 {
463         int direction;  /* 1 - address decrease, 0 - address increase */
464         int flag_align; /* 1 - address aligned,  0 - address unaligned */
465         int flag_2D;    /* 1 - 2D DMA needed,    0 - 1D DMA needed */
466         unsigned long flags;
467
468         if (size <= 0)
469                 return NULL;
470
471         local_irq_save(flags);
472
473         if ((unsigned long)src < memory_end)
474                 blackfin_dcache_flush_range((unsigned int)src,
475                                             (unsigned int)(src + size));
476
477         if ((unsigned long)dest < memory_end)
478                 blackfin_dcache_invalidate_range((unsigned int)dest,
479                                                  (unsigned int)(dest + size));
480
481         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
482
483         if ((unsigned long)src < (unsigned long)dest)
484                 direction = 1;
485         else
486                 direction = 0;
487
488         if ((((unsigned long)dest % 2) == 0) && (((unsigned long)src % 2) == 0)
489             && ((size % 2) == 0))
490                 flag_align = 1;
491         else
492                 flag_align = 0;
493
494         if (size > 0x10000)     /* size > 64K */
495                 flag_2D = 1;
496         else
497                 flag_2D = 0;
498
499         /* Setup destination and source start address */
500         if (direction) {
501                 if (flag_align) {
502                         bfin_write_MDMA_D0_START_ADDR(dest + size - 2);
503                         bfin_write_MDMA_S0_START_ADDR(src + size - 2);
504                 } else {
505                         bfin_write_MDMA_D0_START_ADDR(dest + size - 1);
506                         bfin_write_MDMA_S0_START_ADDR(src + size - 1);
507                 }
508         } else {
509                 bfin_write_MDMA_D0_START_ADDR(dest);
510                 bfin_write_MDMA_S0_START_ADDR(src);
511         }
512
513         /* Setup destination and source xcount */
514         if (flag_2D) {
515                 if (flag_align) {
516                         bfin_write_MDMA_D0_X_COUNT(1024 / 2);
517                         bfin_write_MDMA_S0_X_COUNT(1024 / 2);
518                 } else {
519                         bfin_write_MDMA_D0_X_COUNT(1024);
520                         bfin_write_MDMA_S0_X_COUNT(1024);
521                 }
522                 bfin_write_MDMA_D0_Y_COUNT(size >> 10);
523                 bfin_write_MDMA_S0_Y_COUNT(size >> 10);
524         } else {
525                 if (flag_align) {
526                         bfin_write_MDMA_D0_X_COUNT(size / 2);
527                         bfin_write_MDMA_S0_X_COUNT(size / 2);
528                 } else {
529                         bfin_write_MDMA_D0_X_COUNT(size);
530                         bfin_write_MDMA_S0_X_COUNT(size);
531                 }
532         }
533
534         /* Setup destination and source xmodify and ymodify */
535         if (direction) {
536                 if (flag_align) {
537                         bfin_write_MDMA_D0_X_MODIFY(-2);
538                         bfin_write_MDMA_S0_X_MODIFY(-2);
539                         if (flag_2D) {
540                                 bfin_write_MDMA_D0_Y_MODIFY(-2);
541                                 bfin_write_MDMA_S0_Y_MODIFY(-2);
542                         }
543                 } else {
544                         bfin_write_MDMA_D0_X_MODIFY(-1);
545                         bfin_write_MDMA_S0_X_MODIFY(-1);
546                         if (flag_2D) {
547                                 bfin_write_MDMA_D0_Y_MODIFY(-1);
548                                 bfin_write_MDMA_S0_Y_MODIFY(-1);
549                         }
550                 }
551         } else {
552                 if (flag_align) {
553                         bfin_write_MDMA_D0_X_MODIFY(2);
554                         bfin_write_MDMA_S0_X_MODIFY(2);
555                         if (flag_2D) {
556                                 bfin_write_MDMA_D0_Y_MODIFY(2);
557                                 bfin_write_MDMA_S0_Y_MODIFY(2);
558                         }
559                 } else {
560                         bfin_write_MDMA_D0_X_MODIFY(1);
561                         bfin_write_MDMA_S0_X_MODIFY(1);
562                         if (flag_2D) {
563                                 bfin_write_MDMA_D0_Y_MODIFY(1);
564                                 bfin_write_MDMA_S0_Y_MODIFY(1);
565                         }
566                 }
567         }
568
569         /* Enable source DMA */
570         if (flag_2D) {
571                 if (flag_align) {
572                         bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D | WDSIZE_16);
573                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D | WDSIZE_16);
574                 } else {
575                         bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D);
576                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D);
577                 }
578         } else {
579                 if (flag_align) {
580                         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
581                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
582                 } else {
583                         bfin_write_MDMA_S0_CONFIG(DMAEN);
584                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN);
585                 }
586         }
587
588         SSYNC();
589
590         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE))
591                 ;
592
593         bfin_write_MDMA_D0_IRQ_STATUS(bfin_read_MDMA_D0_IRQ_STATUS() |
594                                       (DMA_DONE | DMA_ERR));
595
596         bfin_write_MDMA_S0_CONFIG(0);
597         bfin_write_MDMA_D0_CONFIG(0);
598
599         local_irq_restore(flags);
600
601         return dest;
602 }
603
604 void *dma_memcpy(void *dest, const void *src, size_t size)
605 {
606         size_t bulk;
607         size_t rest;
608         void * addr;
609
610         bulk = (size >> 16) << 16;
611         rest = size - bulk;
612         if (bulk)
613                 __dma_memcpy(dest, src, bulk);
614         addr = __dma_memcpy(dest+bulk, src+bulk, rest);
615         return addr;
616 }
617 EXPORT_SYMBOL(dma_memcpy);
618
619 void *safe_dma_memcpy(void *dest, const void *src, size_t size)
620 {
621         void *addr;
622         addr = dma_memcpy(dest, src, size);
623         return addr;
624 }
625 EXPORT_SYMBOL(safe_dma_memcpy);
626
627 void dma_outsb(unsigned long addr, const void *buf, unsigned short len)
628 {
629         unsigned long flags;
630
631         local_irq_save(flags);
632
633         blackfin_dcache_flush_range((unsigned int)buf,
634                          (unsigned int)(buf) + len);
635
636         bfin_write_MDMA_D0_START_ADDR(addr);
637         bfin_write_MDMA_D0_X_COUNT(len);
638         bfin_write_MDMA_D0_X_MODIFY(0);
639         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
640
641         bfin_write_MDMA_S0_START_ADDR(buf);
642         bfin_write_MDMA_S0_X_COUNT(len);
643         bfin_write_MDMA_S0_X_MODIFY(1);
644         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
645
646         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
647         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
648
649         SSYNC();
650
651         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
652
653         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
654
655         bfin_write_MDMA_S0_CONFIG(0);
656         bfin_write_MDMA_D0_CONFIG(0);
657         local_irq_restore(flags);
658
659 }
660 EXPORT_SYMBOL(dma_outsb);
661
662
663 void dma_insb(unsigned long addr, void *buf, unsigned short len)
664 {
665         unsigned long flags;
666
667         blackfin_dcache_invalidate_range((unsigned int)buf,
668                          (unsigned int)(buf) + len);
669
670         local_irq_save(flags);
671         bfin_write_MDMA_D0_START_ADDR(buf);
672         bfin_write_MDMA_D0_X_COUNT(len);
673         bfin_write_MDMA_D0_X_MODIFY(1);
674         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
675
676         bfin_write_MDMA_S0_START_ADDR(addr);
677         bfin_write_MDMA_S0_X_COUNT(len);
678         bfin_write_MDMA_S0_X_MODIFY(0);
679         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
680
681         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
682         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
683
684         SSYNC();
685
686         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
687
688         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
689
690         bfin_write_MDMA_S0_CONFIG(0);
691         bfin_write_MDMA_D0_CONFIG(0);
692         local_irq_restore(flags);
693
694 }
695 EXPORT_SYMBOL(dma_insb);
696
697 void dma_outsw(unsigned long addr, const void  *buf, unsigned short len)
698 {
699         unsigned long flags;
700
701         local_irq_save(flags);
702
703         blackfin_dcache_flush_range((unsigned int)buf,
704                          (unsigned int)(buf) + len * sizeof(short));
705
706         bfin_write_MDMA_D0_START_ADDR(addr);
707         bfin_write_MDMA_D0_X_COUNT(len);
708         bfin_write_MDMA_D0_X_MODIFY(0);
709         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
710
711         bfin_write_MDMA_S0_START_ADDR(buf);
712         bfin_write_MDMA_S0_X_COUNT(len);
713         bfin_write_MDMA_S0_X_MODIFY(2);
714         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
715
716         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
717         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
718
719         SSYNC();
720
721         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
722
723         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
724
725         bfin_write_MDMA_S0_CONFIG(0);
726         bfin_write_MDMA_D0_CONFIG(0);
727         local_irq_restore(flags);
728
729 }
730 EXPORT_SYMBOL(dma_outsw);
731
732 void dma_insw(unsigned long addr, void *buf, unsigned short len)
733 {
734         unsigned long flags;
735
736         blackfin_dcache_invalidate_range((unsigned int)buf,
737                          (unsigned int)(buf) + len * sizeof(short));
738
739         local_irq_save(flags);
740
741         bfin_write_MDMA_D0_START_ADDR(buf);
742         bfin_write_MDMA_D0_X_COUNT(len);
743         bfin_write_MDMA_D0_X_MODIFY(2);
744         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
745
746         bfin_write_MDMA_S0_START_ADDR(addr);
747         bfin_write_MDMA_S0_X_COUNT(len);
748         bfin_write_MDMA_S0_X_MODIFY(0);
749         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
750
751         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
752         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
753
754         SSYNC();
755
756         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
757
758         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
759
760         bfin_write_MDMA_S0_CONFIG(0);
761         bfin_write_MDMA_D0_CONFIG(0);
762         local_irq_restore(flags);
763
764 }
765 EXPORT_SYMBOL(dma_insw);
766
767 void dma_outsl(unsigned long addr, const void *buf, unsigned short len)
768 {
769         unsigned long flags;
770
771         local_irq_save(flags);
772
773         blackfin_dcache_flush_range((unsigned int)buf,
774                          (unsigned int)(buf) + len * sizeof(long));
775
776         bfin_write_MDMA_D0_START_ADDR(addr);
777         bfin_write_MDMA_D0_X_COUNT(len);
778         bfin_write_MDMA_D0_X_MODIFY(0);
779         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
780
781         bfin_write_MDMA_S0_START_ADDR(buf);
782         bfin_write_MDMA_S0_X_COUNT(len);
783         bfin_write_MDMA_S0_X_MODIFY(4);
784         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
785
786         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
787         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
788
789         SSYNC();
790
791         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
792
793         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
794
795         bfin_write_MDMA_S0_CONFIG(0);
796         bfin_write_MDMA_D0_CONFIG(0);
797         local_irq_restore(flags);
798
799 }
800 EXPORT_SYMBOL(dma_outsl);
801
802 void dma_insl(unsigned long addr, void *buf, unsigned short len)
803 {
804         unsigned long flags;
805
806         blackfin_dcache_invalidate_range((unsigned int)buf,
807                          (unsigned int)(buf) + len * sizeof(long));
808
809         local_irq_save(flags);
810
811         bfin_write_MDMA_D0_START_ADDR(buf);
812         bfin_write_MDMA_D0_X_COUNT(len);
813         bfin_write_MDMA_D0_X_MODIFY(4);
814         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
815
816         bfin_write_MDMA_S0_START_ADDR(addr);
817         bfin_write_MDMA_S0_X_COUNT(len);
818         bfin_write_MDMA_S0_X_MODIFY(0);
819         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
820
821         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
822         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
823
824         SSYNC();
825
826         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
827
828         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
829
830         bfin_write_MDMA_S0_CONFIG(0);
831         bfin_write_MDMA_D0_CONFIG(0);
832         local_irq_restore(flags);
833
834 }
835 EXPORT_SYMBOL(dma_insl);