Merge branch 'for-2.6.25' of git://git.kernel.org/pub/scm/linux/kernel/git/olof/pasem...
[sfrench/cifs-2.6.git] / arch / blackfin / kernel / bfin_dma_5xx.c
1 /*
2  * File:         arch/blackfin/kernel/bfin_dma_5xx.c
3  * Based on:
4  * Author:
5  *
6  * Created:
7  * Description:  This file contains the simple DMA Implementation for Blackfin
8  *
9  * Modified:
10  *               Copyright 2004-2006 Analog Devices Inc.
11  *
12  * Bugs:         Enter bugs at http://blackfin.uclinux.org/
13  *
14  * This program is free software; you can redistribute it and/or modify
15  * it under the terms of the GNU General Public License as published by
16  * the Free Software Foundation; either version 2 of the License, or
17  * (at your option) any later version.
18  *
19  * This program is distributed in the hope that it will be useful,
20  * but WITHOUT ANY WARRANTY; without even the implied warranty of
21  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
22  * GNU General Public License for more details.
23  *
24  * You should have received a copy of the GNU General Public License
25  * along with this program; if not, see the file COPYING, or write
26  * to the Free Software Foundation, Inc.,
27  * 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
28  */
29
30 #include <linux/errno.h>
31 #include <linux/module.h>
32 #include <linux/sched.h>
33 #include <linux/interrupt.h>
34 #include <linux/kernel.h>
35 #include <linux/param.h>
36
37 #include <asm/blackfin.h>
38 #include <asm/dma.h>
39 #include <asm/cacheflush.h>
40
41 /* Remove unused code not exported by symbol or internally called */
42 #define REMOVE_DEAD_CODE
43
44 /**************************************************************************
45  * Global Variables
46 ***************************************************************************/
47
48 static struct dma_channel dma_ch[MAX_BLACKFIN_DMA_CHANNEL];
49
50 /*------------------------------------------------------------------------------
51  *       Set the Buffer Clear bit in the Configuration register of specific DMA
52  *       channel. This will stop the descriptor based DMA operation.
53  *-----------------------------------------------------------------------------*/
54 static void clear_dma_buffer(unsigned int channel)
55 {
56         dma_ch[channel].regs->cfg |= RESTART;
57         SSYNC();
58         dma_ch[channel].regs->cfg &= ~RESTART;
59         SSYNC();
60 }
61
62 static int __init blackfin_dma_init(void)
63 {
64         int i;
65
66         printk(KERN_INFO "Blackfin DMA Controller\n");
67
68         for (i = 0; i < MAX_BLACKFIN_DMA_CHANNEL; i++) {
69                 dma_ch[i].chan_status = DMA_CHANNEL_FREE;
70                 dma_ch[i].regs = base_addr[i];
71                 mutex_init(&(dma_ch[i].dmalock));
72         }
73         /* Mark MEMDMA Channel 0 as requested since we're using it internally */
74         dma_ch[CH_MEM_STREAM0_DEST].chan_status = DMA_CHANNEL_REQUESTED;
75         dma_ch[CH_MEM_STREAM0_SRC].chan_status = DMA_CHANNEL_REQUESTED;
76
77 #if defined(CONFIG_DEB_DMA_URGENT)
78         bfin_write_EBIU_DDRQUE(bfin_read_EBIU_DDRQUE()
79                          | DEB1_URGENT | DEB2_URGENT | DEB3_URGENT);
80 #endif
81         return 0;
82 }
83
84 arch_initcall(blackfin_dma_init);
85
86 /*------------------------------------------------------------------------------
87  *      Request the specific DMA channel from the system.
88  *-----------------------------------------------------------------------------*/
89 int request_dma(unsigned int channel, char *device_id)
90 {
91
92         pr_debug("request_dma() : BEGIN \n");
93         mutex_lock(&(dma_ch[channel].dmalock));
94
95         if ((dma_ch[channel].chan_status == DMA_CHANNEL_REQUESTED)
96             || (dma_ch[channel].chan_status == DMA_CHANNEL_ENABLED)) {
97                 mutex_unlock(&(dma_ch[channel].dmalock));
98                 pr_debug("DMA CHANNEL IN USE  \n");
99                 return -EBUSY;
100         } else {
101                 dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
102                 pr_debug("DMA CHANNEL IS ALLOCATED  \n");
103         }
104
105         mutex_unlock(&(dma_ch[channel].dmalock));
106
107         dma_ch[channel].device_id = device_id;
108         dma_ch[channel].irq_callback = NULL;
109
110         /* This is to be enabled by putting a restriction -
111          * you have to request DMA, before doing any operations on
112          * descriptor/channel
113          */
114         pr_debug("request_dma() : END  \n");
115         return channel;
116 }
117 EXPORT_SYMBOL(request_dma);
118
119 int set_dma_callback(unsigned int channel, dma_interrupt_t callback, void *data)
120 {
121         int ret_irq = 0;
122
123         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
124                && channel < MAX_BLACKFIN_DMA_CHANNEL));
125
126         if (callback != NULL) {
127                 int ret_val;
128                 ret_irq = channel2irq(channel);
129
130                 dma_ch[channel].data = data;
131
132                 ret_val =
133                     request_irq(ret_irq, (void *)callback, IRQF_DISABLED,
134                                 dma_ch[channel].device_id, data);
135                 if (ret_val) {
136                         printk(KERN_NOTICE
137                                "Request irq in DMA engine failed.\n");
138                         return -EPERM;
139                 }
140                 dma_ch[channel].irq_callback = callback;
141         }
142         return 0;
143 }
144 EXPORT_SYMBOL(set_dma_callback);
145
146 void free_dma(unsigned int channel)
147 {
148         int ret_irq;
149
150         pr_debug("freedma() : BEGIN \n");
151         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
152                && channel < MAX_BLACKFIN_DMA_CHANNEL));
153
154         /* Halt the DMA */
155         disable_dma(channel);
156         clear_dma_buffer(channel);
157
158         if (dma_ch[channel].irq_callback != NULL) {
159                 ret_irq = channel2irq(channel);
160                 free_irq(ret_irq, dma_ch[channel].data);
161         }
162
163         /* Clear the DMA Variable in the Channel */
164         mutex_lock(&(dma_ch[channel].dmalock));
165         dma_ch[channel].chan_status = DMA_CHANNEL_FREE;
166         mutex_unlock(&(dma_ch[channel].dmalock));
167
168         pr_debug("freedma() : END \n");
169 }
170 EXPORT_SYMBOL(free_dma);
171
172 void dma_enable_irq(unsigned int channel)
173 {
174         int ret_irq;
175
176         pr_debug("dma_enable_irq() : BEGIN \n");
177         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
178                && channel < MAX_BLACKFIN_DMA_CHANNEL));
179
180         ret_irq = channel2irq(channel);
181         enable_irq(ret_irq);
182 }
183 EXPORT_SYMBOL(dma_enable_irq);
184
185 void dma_disable_irq(unsigned int channel)
186 {
187         int ret_irq;
188
189         pr_debug("dma_disable_irq() : BEGIN \n");
190         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
191                && channel < MAX_BLACKFIN_DMA_CHANNEL));
192
193         ret_irq = channel2irq(channel);
194         disable_irq(ret_irq);
195 }
196 EXPORT_SYMBOL(dma_disable_irq);
197
198 int dma_channel_active(unsigned int channel)
199 {
200         if (dma_ch[channel].chan_status == DMA_CHANNEL_FREE) {
201                 return 0;
202         } else {
203                 return 1;
204         }
205 }
206 EXPORT_SYMBOL(dma_channel_active);
207
208 /*------------------------------------------------------------------------------
209 *       stop the specific DMA channel.
210 *-----------------------------------------------------------------------------*/
211 void disable_dma(unsigned int channel)
212 {
213         pr_debug("stop_dma() : BEGIN \n");
214
215         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
216                && channel < MAX_BLACKFIN_DMA_CHANNEL));
217
218         dma_ch[channel].regs->cfg &= ~DMAEN;    /* Clean the enable bit */
219         SSYNC();
220         dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
221         /* Needs to be enabled Later */
222         pr_debug("stop_dma() : END \n");
223         return;
224 }
225 EXPORT_SYMBOL(disable_dma);
226
227 void enable_dma(unsigned int channel)
228 {
229         pr_debug("enable_dma() : BEGIN \n");
230
231         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
232                && channel < MAX_BLACKFIN_DMA_CHANNEL));
233
234         dma_ch[channel].chan_status = DMA_CHANNEL_ENABLED;
235         dma_ch[channel].regs->curr_x_count = 0;
236         dma_ch[channel].regs->curr_y_count = 0;
237
238         dma_ch[channel].regs->cfg |= DMAEN;     /* Set the enable bit */
239         SSYNC();
240         pr_debug("enable_dma() : END \n");
241         return;
242 }
243 EXPORT_SYMBOL(enable_dma);
244
245 /*------------------------------------------------------------------------------
246 *               Set the Start Address register for the specific DMA channel
247 *               This function can be used for register based DMA,
248 *               to setup the start address
249 *               addr:           Starting address of the DMA Data to be transferred.
250 *-----------------------------------------------------------------------------*/
251 void set_dma_start_addr(unsigned int channel, unsigned long addr)
252 {
253         pr_debug("set_dma_start_addr() : BEGIN \n");
254
255         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
256                && channel < MAX_BLACKFIN_DMA_CHANNEL));
257
258         dma_ch[channel].regs->start_addr = addr;
259         SSYNC();
260         pr_debug("set_dma_start_addr() : END\n");
261 }
262 EXPORT_SYMBOL(set_dma_start_addr);
263
264 void set_dma_next_desc_addr(unsigned int channel, unsigned long addr)
265 {
266         pr_debug("set_dma_next_desc_addr() : BEGIN \n");
267
268         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
269                && channel < MAX_BLACKFIN_DMA_CHANNEL));
270
271         dma_ch[channel].regs->next_desc_ptr = addr;
272         SSYNC();
273         pr_debug("set_dma_next_desc_addr() : END\n");
274 }
275 EXPORT_SYMBOL(set_dma_next_desc_addr);
276
277 void set_dma_curr_desc_addr(unsigned int channel, unsigned long addr)
278 {
279         pr_debug("set_dma_curr_desc_addr() : BEGIN \n");
280
281         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
282                && channel < MAX_BLACKFIN_DMA_CHANNEL));
283
284         dma_ch[channel].regs->curr_desc_ptr = addr;
285         SSYNC();
286         pr_debug("set_dma_curr_desc_addr() : END\n");
287 }
288 EXPORT_SYMBOL(set_dma_curr_desc_addr);
289
290 void set_dma_x_count(unsigned int channel, unsigned short x_count)
291 {
292         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
293                && channel < MAX_BLACKFIN_DMA_CHANNEL));
294
295         dma_ch[channel].regs->x_count = x_count;
296         SSYNC();
297 }
298 EXPORT_SYMBOL(set_dma_x_count);
299
300 void set_dma_y_count(unsigned int channel, unsigned short y_count)
301 {
302         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
303                && channel < MAX_BLACKFIN_DMA_CHANNEL));
304
305         dma_ch[channel].regs->y_count = y_count;
306         SSYNC();
307 }
308 EXPORT_SYMBOL(set_dma_y_count);
309
310 void set_dma_x_modify(unsigned int channel, short x_modify)
311 {
312         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
313                && channel < MAX_BLACKFIN_DMA_CHANNEL));
314
315         dma_ch[channel].regs->x_modify = x_modify;
316         SSYNC();
317 }
318 EXPORT_SYMBOL(set_dma_x_modify);
319
320 void set_dma_y_modify(unsigned int channel, short y_modify)
321 {
322         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
323                && channel < MAX_BLACKFIN_DMA_CHANNEL));
324
325         dma_ch[channel].regs->y_modify = y_modify;
326         SSYNC();
327 }
328 EXPORT_SYMBOL(set_dma_y_modify);
329
330 void set_dma_config(unsigned int channel, unsigned short config)
331 {
332         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
333                && channel < MAX_BLACKFIN_DMA_CHANNEL));
334
335         dma_ch[channel].regs->cfg = config;
336         SSYNC();
337 }
338 EXPORT_SYMBOL(set_dma_config);
339
340 unsigned short
341 set_bfin_dma_config(char direction, char flow_mode,
342                     char intr_mode, char dma_mode, char width)
343 {
344         unsigned short config;
345
346         config =
347             ((direction << 1) | (width << 2) | (dma_mode << 4) |
348              (intr_mode << 6) | (flow_mode << 12) | RESTART);
349         return config;
350 }
351 EXPORT_SYMBOL(set_bfin_dma_config);
352
353 void set_dma_sg(unsigned int channel, struct dmasg *sg, int nr_sg)
354 {
355         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
356                && channel < MAX_BLACKFIN_DMA_CHANNEL));
357
358         dma_ch[channel].regs->cfg |= ((nr_sg & 0x0F) << 8);
359
360         dma_ch[channel].regs->next_desc_ptr = (unsigned int)sg;
361
362         SSYNC();
363 }
364 EXPORT_SYMBOL(set_dma_sg);
365
366 void set_dma_curr_addr(unsigned int channel, unsigned long addr)
367 {
368         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
369                && channel < MAX_BLACKFIN_DMA_CHANNEL));
370
371         dma_ch[channel].regs->curr_addr_ptr = addr;
372         SSYNC();
373 }
374 EXPORT_SYMBOL(set_dma_curr_addr);
375
376 /*------------------------------------------------------------------------------
377  *      Get the DMA status of a specific DMA channel from the system.
378  *-----------------------------------------------------------------------------*/
379 unsigned short get_dma_curr_irqstat(unsigned int channel)
380 {
381         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
382                && channel < MAX_BLACKFIN_DMA_CHANNEL));
383
384         return dma_ch[channel].regs->irq_status;
385 }
386 EXPORT_SYMBOL(get_dma_curr_irqstat);
387
388 /*------------------------------------------------------------------------------
389  *      Clear the DMA_DONE bit in DMA status. Stop the DMA completion interrupt.
390  *-----------------------------------------------------------------------------*/
391 void clear_dma_irqstat(unsigned int channel)
392 {
393         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
394                && channel < MAX_BLACKFIN_DMA_CHANNEL));
395         dma_ch[channel].regs->irq_status |= 3;
396 }
397 EXPORT_SYMBOL(clear_dma_irqstat);
398
399 /*------------------------------------------------------------------------------
400  *      Get current DMA xcount of a specific DMA channel from the system.
401  *-----------------------------------------------------------------------------*/
402 unsigned short get_dma_curr_xcount(unsigned int channel)
403 {
404         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
405                && channel < MAX_BLACKFIN_DMA_CHANNEL));
406
407         return dma_ch[channel].regs->curr_x_count;
408 }
409 EXPORT_SYMBOL(get_dma_curr_xcount);
410
411 /*------------------------------------------------------------------------------
412  *      Get current DMA ycount of a specific DMA channel from the system.
413  *-----------------------------------------------------------------------------*/
414 unsigned short get_dma_curr_ycount(unsigned int channel)
415 {
416         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
417                && channel < MAX_BLACKFIN_DMA_CHANNEL));
418
419         return dma_ch[channel].regs->curr_y_count;
420 }
421 EXPORT_SYMBOL(get_dma_curr_ycount);
422
423 unsigned long get_dma_next_desc_ptr(unsigned int channel)
424 {
425         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
426               && channel < MAX_BLACKFIN_DMA_CHANNEL));
427
428         return dma_ch[channel].regs->next_desc_ptr;
429 }
430 EXPORT_SYMBOL(get_dma_next_desc_ptr);
431
432 unsigned long get_dma_curr_desc_ptr(unsigned int channel)
433 {
434         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
435               && channel < MAX_BLACKFIN_DMA_CHANNEL));
436
437         return dma_ch[channel].regs->curr_desc_ptr;
438 }
439 EXPORT_SYMBOL(get_dma_curr_desc_ptr);
440
441 unsigned long get_dma_curr_addr(unsigned int channel)
442 {
443         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
444               && channel < MAX_BLACKFIN_DMA_CHANNEL));
445
446         return dma_ch[channel].regs->curr_addr_ptr;
447 }
448 EXPORT_SYMBOL(get_dma_curr_addr);
449
450 static void *__dma_memcpy(void *dest, const void *src, size_t size)
451 {
452         int direction;  /* 1 - address decrease, 0 - address increase */
453         int flag_align; /* 1 - address aligned,  0 - address unaligned */
454         int flag_2D;    /* 1 - 2D DMA needed,    0 - 1D DMA needed */
455         unsigned long flags;
456
457         if (size <= 0)
458                 return NULL;
459
460         local_irq_save(flags);
461
462         if ((unsigned long)src < memory_end)
463                 blackfin_dcache_flush_range((unsigned int)src,
464                                             (unsigned int)(src + size));
465
466         if ((unsigned long)dest < memory_end)
467                 blackfin_dcache_invalidate_range((unsigned int)dest,
468                                                  (unsigned int)(dest + size));
469
470         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
471
472         if ((unsigned long)src < (unsigned long)dest)
473                 direction = 1;
474         else
475                 direction = 0;
476
477         if ((((unsigned long)dest % 2) == 0) && (((unsigned long)src % 2) == 0)
478             && ((size % 2) == 0))
479                 flag_align = 1;
480         else
481                 flag_align = 0;
482
483         if (size > 0x10000)     /* size > 64K */
484                 flag_2D = 1;
485         else
486                 flag_2D = 0;
487
488         /* Setup destination and source start address */
489         if (direction) {
490                 if (flag_align) {
491                         bfin_write_MDMA_D0_START_ADDR(dest + size - 2);
492                         bfin_write_MDMA_S0_START_ADDR(src + size - 2);
493                 } else {
494                         bfin_write_MDMA_D0_START_ADDR(dest + size - 1);
495                         bfin_write_MDMA_S0_START_ADDR(src + size - 1);
496                 }
497         } else {
498                 bfin_write_MDMA_D0_START_ADDR(dest);
499                 bfin_write_MDMA_S0_START_ADDR(src);
500         }
501
502         /* Setup destination and source xcount */
503         if (flag_2D) {
504                 if (flag_align) {
505                         bfin_write_MDMA_D0_X_COUNT(1024 / 2);
506                         bfin_write_MDMA_S0_X_COUNT(1024 / 2);
507                 } else {
508                         bfin_write_MDMA_D0_X_COUNT(1024);
509                         bfin_write_MDMA_S0_X_COUNT(1024);
510                 }
511                 bfin_write_MDMA_D0_Y_COUNT(size >> 10);
512                 bfin_write_MDMA_S0_Y_COUNT(size >> 10);
513         } else {
514                 if (flag_align) {
515                         bfin_write_MDMA_D0_X_COUNT(size / 2);
516                         bfin_write_MDMA_S0_X_COUNT(size / 2);
517                 } else {
518                         bfin_write_MDMA_D0_X_COUNT(size);
519                         bfin_write_MDMA_S0_X_COUNT(size);
520                 }
521         }
522
523         /* Setup destination and source xmodify and ymodify */
524         if (direction) {
525                 if (flag_align) {
526                         bfin_write_MDMA_D0_X_MODIFY(-2);
527                         bfin_write_MDMA_S0_X_MODIFY(-2);
528                         if (flag_2D) {
529                                 bfin_write_MDMA_D0_Y_MODIFY(-2);
530                                 bfin_write_MDMA_S0_Y_MODIFY(-2);
531                         }
532                 } else {
533                         bfin_write_MDMA_D0_X_MODIFY(-1);
534                         bfin_write_MDMA_S0_X_MODIFY(-1);
535                         if (flag_2D) {
536                                 bfin_write_MDMA_D0_Y_MODIFY(-1);
537                                 bfin_write_MDMA_S0_Y_MODIFY(-1);
538                         }
539                 }
540         } else {
541                 if (flag_align) {
542                         bfin_write_MDMA_D0_X_MODIFY(2);
543                         bfin_write_MDMA_S0_X_MODIFY(2);
544                         if (flag_2D) {
545                                 bfin_write_MDMA_D0_Y_MODIFY(2);
546                                 bfin_write_MDMA_S0_Y_MODIFY(2);
547                         }
548                 } else {
549                         bfin_write_MDMA_D0_X_MODIFY(1);
550                         bfin_write_MDMA_S0_X_MODIFY(1);
551                         if (flag_2D) {
552                                 bfin_write_MDMA_D0_Y_MODIFY(1);
553                                 bfin_write_MDMA_S0_Y_MODIFY(1);
554                         }
555                 }
556         }
557
558         /* Enable source DMA */
559         if (flag_2D) {
560                 if (flag_align) {
561                         bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D | WDSIZE_16);
562                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D | WDSIZE_16);
563                 } else {
564                         bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D);
565                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D);
566                 }
567         } else {
568                 if (flag_align) {
569                         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
570                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
571                 } else {
572                         bfin_write_MDMA_S0_CONFIG(DMAEN);
573                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN);
574                 }
575         }
576
577         SSYNC();
578
579         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE))
580                 ;
581
582         bfin_write_MDMA_D0_IRQ_STATUS(bfin_read_MDMA_D0_IRQ_STATUS() |
583                                       (DMA_DONE | DMA_ERR));
584
585         bfin_write_MDMA_S0_CONFIG(0);
586         bfin_write_MDMA_D0_CONFIG(0);
587
588         local_irq_restore(flags);
589
590         return dest;
591 }
592
593 void *dma_memcpy(void *dest, const void *src, size_t size)
594 {
595         size_t bulk;
596         size_t rest;
597         void * addr;
598
599         bulk = (size >> 16) << 16;
600         rest = size - bulk;
601         if (bulk)
602                 __dma_memcpy(dest, src, bulk);
603         addr = __dma_memcpy(dest+bulk, src+bulk, rest);
604         return addr;
605 }
606 EXPORT_SYMBOL(dma_memcpy);
607
608 void *safe_dma_memcpy(void *dest, const void *src, size_t size)
609 {
610         void *addr;
611         addr = dma_memcpy(dest, src, size);
612         return addr;
613 }
614 EXPORT_SYMBOL(safe_dma_memcpy);
615
616 void dma_outsb(unsigned long addr, const void *buf, unsigned short len)
617 {
618         unsigned long flags;
619
620         local_irq_save(flags);
621
622         blackfin_dcache_flush_range((unsigned int)buf,
623                          (unsigned int)(buf) + len);
624
625         bfin_write_MDMA_D0_START_ADDR(addr);
626         bfin_write_MDMA_D0_X_COUNT(len);
627         bfin_write_MDMA_D0_X_MODIFY(0);
628         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
629
630         bfin_write_MDMA_S0_START_ADDR(buf);
631         bfin_write_MDMA_S0_X_COUNT(len);
632         bfin_write_MDMA_S0_X_MODIFY(1);
633         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
634
635         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
636         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
637
638         SSYNC();
639
640         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
641
642         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
643
644         bfin_write_MDMA_S0_CONFIG(0);
645         bfin_write_MDMA_D0_CONFIG(0);
646         local_irq_restore(flags);
647
648 }
649 EXPORT_SYMBOL(dma_outsb);
650
651
652 void dma_insb(unsigned long addr, void *buf, unsigned short len)
653 {
654         unsigned long flags;
655
656         blackfin_dcache_invalidate_range((unsigned int)buf,
657                          (unsigned int)(buf) + len);
658
659         local_irq_save(flags);
660         bfin_write_MDMA_D0_START_ADDR(buf);
661         bfin_write_MDMA_D0_X_COUNT(len);
662         bfin_write_MDMA_D0_X_MODIFY(1);
663         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
664
665         bfin_write_MDMA_S0_START_ADDR(addr);
666         bfin_write_MDMA_S0_X_COUNT(len);
667         bfin_write_MDMA_S0_X_MODIFY(0);
668         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
669
670         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
671         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
672
673         SSYNC();
674
675         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
676
677         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
678
679         bfin_write_MDMA_S0_CONFIG(0);
680         bfin_write_MDMA_D0_CONFIG(0);
681         local_irq_restore(flags);
682
683 }
684 EXPORT_SYMBOL(dma_insb);
685
686 void dma_outsw(unsigned long addr, const void  *buf, unsigned short len)
687 {
688         unsigned long flags;
689
690         local_irq_save(flags);
691
692         blackfin_dcache_flush_range((unsigned int)buf,
693                          (unsigned int)(buf) + len * sizeof(short));
694
695         bfin_write_MDMA_D0_START_ADDR(addr);
696         bfin_write_MDMA_D0_X_COUNT(len);
697         bfin_write_MDMA_D0_X_MODIFY(0);
698         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
699
700         bfin_write_MDMA_S0_START_ADDR(buf);
701         bfin_write_MDMA_S0_X_COUNT(len);
702         bfin_write_MDMA_S0_X_MODIFY(2);
703         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
704
705         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
706         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
707
708         SSYNC();
709
710         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
711
712         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
713
714         bfin_write_MDMA_S0_CONFIG(0);
715         bfin_write_MDMA_D0_CONFIG(0);
716         local_irq_restore(flags);
717
718 }
719 EXPORT_SYMBOL(dma_outsw);
720
721 void dma_insw(unsigned long addr, void *buf, unsigned short len)
722 {
723         unsigned long flags;
724
725         blackfin_dcache_invalidate_range((unsigned int)buf,
726                          (unsigned int)(buf) + len * sizeof(short));
727
728         local_irq_save(flags);
729
730         bfin_write_MDMA_D0_START_ADDR(buf);
731         bfin_write_MDMA_D0_X_COUNT(len);
732         bfin_write_MDMA_D0_X_MODIFY(2);
733         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
734
735         bfin_write_MDMA_S0_START_ADDR(addr);
736         bfin_write_MDMA_S0_X_COUNT(len);
737         bfin_write_MDMA_S0_X_MODIFY(0);
738         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
739
740         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
741         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
742
743         SSYNC();
744
745         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
746
747         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
748
749         bfin_write_MDMA_S0_CONFIG(0);
750         bfin_write_MDMA_D0_CONFIG(0);
751         local_irq_restore(flags);
752
753 }
754 EXPORT_SYMBOL(dma_insw);
755
756 void dma_outsl(unsigned long addr, const void *buf, unsigned short len)
757 {
758         unsigned long flags;
759
760         local_irq_save(flags);
761
762         blackfin_dcache_flush_range((unsigned int)buf,
763                          (unsigned int)(buf) + len * sizeof(long));
764
765         bfin_write_MDMA_D0_START_ADDR(addr);
766         bfin_write_MDMA_D0_X_COUNT(len);
767         bfin_write_MDMA_D0_X_MODIFY(0);
768         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
769
770         bfin_write_MDMA_S0_START_ADDR(buf);
771         bfin_write_MDMA_S0_X_COUNT(len);
772         bfin_write_MDMA_S0_X_MODIFY(4);
773         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
774
775         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
776         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
777
778         SSYNC();
779
780         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
781
782         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
783
784         bfin_write_MDMA_S0_CONFIG(0);
785         bfin_write_MDMA_D0_CONFIG(0);
786         local_irq_restore(flags);
787
788 }
789 EXPORT_SYMBOL(dma_outsl);
790
791 void dma_insl(unsigned long addr, void *buf, unsigned short len)
792 {
793         unsigned long flags;
794
795         blackfin_dcache_invalidate_range((unsigned int)buf,
796                          (unsigned int)(buf) + len * sizeof(long));
797
798         local_irq_save(flags);
799
800         bfin_write_MDMA_D0_START_ADDR(buf);
801         bfin_write_MDMA_D0_X_COUNT(len);
802         bfin_write_MDMA_D0_X_MODIFY(4);
803         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
804
805         bfin_write_MDMA_S0_START_ADDR(addr);
806         bfin_write_MDMA_S0_X_COUNT(len);
807         bfin_write_MDMA_S0_X_MODIFY(0);
808         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
809
810         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
811         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
812
813         SSYNC();
814
815         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
816
817         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
818
819         bfin_write_MDMA_S0_CONFIG(0);
820         bfin_write_MDMA_D0_CONFIG(0);
821         local_irq_restore(flags);
822
823 }
824 EXPORT_SYMBOL(dma_insl);