2 * include/asm-ppc/ppc4xx_dma.h
4 * IBM PPC4xx DMA engine library
6 * Copyright 2000-2004 MontaVista Software Inc.
8 * Cleaned up a bit more, Matt Porter <mporter@kernel.crashing.org>
10 * Original code by Armin Kuster <akuster@mvista.com>
11 * and Pete Popov <ppopov@mvista.com>
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the
15 * Free Software Foundation; either version 2 of the License, or (at your
16 * option) any later version.
18 * You should have received a copy of the GNU General Public License along
19 * with this program; if not, write to the Free Software Foundation, Inc.,
20 * 675 Mass Ave, Cambridge, MA 02139, USA.
24 #ifndef __ASMPPC_PPC4xx_DMA_H
25 #define __ASMPPC_PPC4xx_DMA_H
27 #include <linux/config.h>
28 #include <linux/types.h>
30 #include <asm/ibm4xx.h>
34 #define MAX_PPC4xx_DMA_CHANNELS 4
36 /* in arch/ppc/kernel/setup.c -- Cort */
37 extern unsigned long DMA_MODE_WRITE, DMA_MODE_READ;
40 * Function return status codes
41 * These values are used to indicate whether or not the function
42 * call was successful, or a bad/invalid parameter was passed.
44 #define DMA_STATUS_GOOD 0
45 #define DMA_STATUS_BAD_CHANNEL 1
46 #define DMA_STATUS_BAD_HANDLE 2
47 #define DMA_STATUS_BAD_MODE 3
48 #define DMA_STATUS_NULL_POINTER 4
49 #define DMA_STATUS_OUT_OF_MEMORY 5
50 #define DMA_STATUS_SGL_LIST_EMPTY 6
51 #define DMA_STATUS_GENERAL_ERROR 7
52 #define DMA_STATUS_CHANNEL_NOTFREE 8
54 #define DMA_CHANNEL_BUSY 0x80000000
57 * These indicate status as returned from the DMA Status Register.
59 #define DMA_STATUS_NO_ERROR 0
60 #define DMA_STATUS_CS 1 /* Count Status */
61 #define DMA_STATUS_TS 2 /* Transfer Status */
62 #define DMA_STATUS_DMA_ERROR 3 /* DMA Error Occurred */
63 #define DMA_STATUS_DMA_BUSY 4 /* The channel is busy */
67 * DMA Channel Control Registers
71 #define PPC4xx_DMA_64BIT
72 #define DMA_CR_OFFSET 1
74 #define DMA_CR_OFFSET 0
77 #define DMA_CE_ENABLE (1<<31) /* DMA Channel Enable */
78 #define SET_DMA_CE_ENABLE(x) (((x)&0x1)<<31)
79 #define GET_DMA_CE_ENABLE(x) (((x)&DMA_CE_ENABLE)>>31)
81 #define DMA_CIE_ENABLE (1<<30) /* DMA Channel Interrupt Enable */
82 #define SET_DMA_CIE_ENABLE(x) (((x)&0x1)<<30)
83 #define GET_DMA_CIE_ENABLE(x) (((x)&DMA_CIE_ENABLE)>>30)
85 #define DMA_TD (1<<29)
86 #define SET_DMA_TD(x) (((x)&0x1)<<29)
87 #define GET_DMA_TD(x) (((x)&DMA_TD)>>29)
89 #define DMA_PL (1<<28) /* Peripheral Location */
90 #define SET_DMA_PL(x) (((x)&0x1)<<28)
91 #define GET_DMA_PL(x) (((x)&DMA_PL)>>28)
93 #define EXTERNAL_PERIPHERAL 0
94 #define INTERNAL_PERIPHERAL 1
96 #define SET_DMA_PW(x) (((x)&0x3)<<(26-DMA_CR_OFFSET)) /* Peripheral Width */
97 #define DMA_PW_MASK SET_DMA_PW(3)
102 /* FIXME: Add PW_128 support for 440GP DMA block */
103 #define GET_DMA_PW(x) (((x)&DMA_PW_MASK)>>(26-DMA_CR_OFFSET))
105 #define DMA_DAI (1<<(25-DMA_CR_OFFSET)) /* Destination Address Increment */
106 #define SET_DMA_DAI(x) (((x)&0x1)<<(25-DMA_CR_OFFSET))
108 #define DMA_SAI (1<<(24-DMA_CR_OFFSET)) /* Source Address Increment */
109 #define SET_DMA_SAI(x) (((x)&0x1)<<(24-DMA_CR_OFFSET))
111 #define DMA_BEN (1<<(23-DMA_CR_OFFSET)) /* Buffer Enable */
112 #define SET_DMA_BEN(x) (((x)&0x1)<<(23-DMA_CR_OFFSET))
114 #define SET_DMA_TM(x) (((x)&0x3)<<(21-DMA_CR_OFFSET)) /* Transfer Mode */
115 #define DMA_TM_MASK SET_DMA_TM(3)
116 #define TM_PERIPHERAL 0 /* Peripheral */
117 #define TM_RESERVED 1 /* Reserved */
118 #define TM_S_MM 2 /* Memory to Memory */
119 #define TM_D_MM 3 /* Device Paced Memory to Memory */
120 #define GET_DMA_TM(x) (((x)&DMA_TM_MASK)>>(21-DMA_CR_OFFSET))
122 #define SET_DMA_PSC(x) (((x)&0x3)<<(19-DMA_CR_OFFSET)) /* Peripheral Setup Cycles */
123 #define DMA_PSC_MASK SET_DMA_PSC(3)
124 #define GET_DMA_PSC(x) (((x)&DMA_PSC_MASK)>>(19-DMA_CR_OFFSET))
126 #define SET_DMA_PWC(x) (((x)&0x3F)<<(13-DMA_CR_OFFSET)) /* Peripheral Wait Cycles */
127 #define DMA_PWC_MASK SET_DMA_PWC(0x3F)
128 #define GET_DMA_PWC(x) (((x)&DMA_PWC_MASK)>>(13-DMA_CR_OFFSET))
130 #define SET_DMA_PHC(x) (((x)&0x7)<<(10-DMA_CR_OFFSET)) /* Peripheral Hold Cycles */
131 #define DMA_PHC_MASK SET_DMA_PHC(0x7)
132 #define GET_DMA_PHC(x) (((x)&DMA_PHC_MASK)>>(10-DMA_CR_OFFSET))
134 #define DMA_ETD_OUTPUT (1<<(9-DMA_CR_OFFSET)) /* EOT pin is a TC output */
135 #define SET_DMA_ETD(x) (((x)&0x1)<<(9-DMA_CR_OFFSET))
137 #define DMA_TCE_ENABLE (1<<(8-DMA_CR_OFFSET))
138 #define SET_DMA_TCE(x) (((x)&0x1)<<(8-DMA_CR_OFFSET))
140 #define DMA_DEC (1<<(2)) /* Address Decrement */
141 #define SET_DMA_DEC(x) (((x)&0x1)<<2)
142 #define GET_DMA_DEC(x) (((x)&DMA_DEC)>>2)
147 * These modes are defined in a way that makes it possible to
148 * simply "or" in the value in the control register.
151 #define DMA_MODE_MM (SET_DMA_TM(TM_S_MM)) /* memory to memory */
153 /* Device-paced memory to memory, */
154 /* device is at source address */
155 #define DMA_MODE_MM_DEVATSRC (DMA_TD | SET_DMA_TM(TM_D_MM))
157 /* Device-paced memory to memory, */
158 /* device is at destination address */
159 #define DMA_MODE_MM_DEVATDST (SET_DMA_TM(TM_D_MM))
162 #define SET_DMA_PREFETCH(x) (((x)&0x3)<<(4-DMA_CR_OFFSET)) /* Memory Read Prefetch */
163 #define DMA_PREFETCH_MASK SET_DMA_PREFETCH(3)
164 #define PREFETCH_1 0 /* Prefetch 1 Double Word */
167 #define GET_DMA_PREFETCH(x) (((x)&DMA_PREFETCH_MASK)>>(4-DMA_CR_OFFSET))
169 #define DMA_PCE (1<<(3-DMA_CR_OFFSET)) /* Parity Check Enable */
170 #define SET_DMA_PCE(x) (((x)&0x1)<<(3-DMA_CR_OFFSET))
171 #define GET_DMA_PCE(x) (((x)&DMA_PCE)>>(3-DMA_CR_OFFSET))
175 #define DMA_ECE_ENABLE (1<<5)
176 #define SET_DMA_ECE(x) (((x)&0x1)<<5)
177 #define GET_DMA_ECE(x) (((x)&DMA_ECE_ENABLE)>>5)
179 #define DMA_TCD_DISABLE (1<<4)
180 #define SET_DMA_TCD(x) (((x)&0x1)<<4)
181 #define GET_DMA_TCD(x) (((x)&DMA_TCD_DISABLE)>>4)
183 typedef uint32_t sgl_handle_t;
185 #ifdef CONFIG_PPC4xx_EDMA
187 #define SGL_LIST_SIZE 4096
188 #define DMA_PPC4xx_SIZE SGL_LIST_SIZE
190 #define SET_DMA_PRIORITY(x) (((x)&0x3)<<(6-DMA_CR_OFFSET)) /* DMA Channel Priority */
191 #define DMA_PRIORITY_MASK SET_DMA_PRIORITY(3)
192 #define PRIORITY_LOW 0
193 #define PRIORITY_MID_LOW 1
194 #define PRIORITY_MID_HIGH 2
195 #define PRIORITY_HIGH 3
196 #define GET_DMA_PRIORITY(x) (((x)&DMA_PRIORITY_MASK)>>(6-DMA_CR_OFFSET))
199 * DMA Polarity Configuration Register
201 #define DMAReq_ActiveLow(chan) (1<<(31-(chan*3)))
202 #define DMAAck_ActiveLow(chan) (1<<(30-(chan*3)))
203 #define EOT_ActiveLow(chan) (1<<(29-(chan*3))) /* End of Transfer */
206 * DMA Sleep Mode Register
208 #define SLEEP_MODE_ENABLE (1<<21)
211 * DMA Status Register
213 #define DMA_CS0 (1<<31) /* Terminal Count has been reached */
214 #define DMA_CS1 (1<<30)
215 #define DMA_CS2 (1<<29)
216 #define DMA_CS3 (1<<28)
218 #define DMA_TS0 (1<<27) /* End of Transfer has been requested */
219 #define DMA_TS1 (1<<26)
220 #define DMA_TS2 (1<<25)
221 #define DMA_TS3 (1<<24)
223 #define DMA_CH0_ERR (1<<23) /* DMA Chanel 0 Error */
224 #define DMA_CH1_ERR (1<<22)
225 #define DMA_CH2_ERR (1<<21)
226 #define DMA_CH3_ERR (1<<20)
228 #define DMA_IN_DMA_REQ0 (1<<19) /* Internal DMA Request is pending */
229 #define DMA_IN_DMA_REQ1 (1<<18)
230 #define DMA_IN_DMA_REQ2 (1<<17)
231 #define DMA_IN_DMA_REQ3 (1<<16)
233 #define DMA_EXT_DMA_REQ0 (1<<15) /* External DMA Request is pending */
234 #define DMA_EXT_DMA_REQ1 (1<<14)
235 #define DMA_EXT_DMA_REQ2 (1<<13)
236 #define DMA_EXT_DMA_REQ3 (1<<12)
238 #define DMA_CH0_BUSY (1<<11) /* DMA Channel 0 Busy */
239 #define DMA_CH1_BUSY (1<<10)
240 #define DMA_CH2_BUSY (1<<9)
241 #define DMA_CH3_BUSY (1<<8)
243 #define DMA_SG0 (1<<7) /* DMA Channel 0 Scatter/Gather in progress */
244 #define DMA_SG1 (1<<6)
245 #define DMA_SG2 (1<<5)
246 #define DMA_SG3 (1<<4)
248 /* DMA Channel Count Register */
249 #define DMA_CTC_BTEN (1<<23) /* Burst Enable/Disable bit */
250 #define DMA_CTC_BSIZ_MSK (3<<21) /* Mask of the Burst size bits */
251 #define DMA_CTC_BSIZ_2 (0)
252 #define DMA_CTC_BSIZ_4 (1<<21)
253 #define DMA_CTC_BSIZ_8 (2<<21)
254 #define DMA_CTC_BSIZ_16 (3<<21)
257 * DMA SG Command Register
259 #define SSG_ENABLE(chan) (1<<(31-chan)) /* Start Scatter Gather */
260 #define SSG_MASK_ENABLE(chan) (1<<(15-chan)) /* Enable writing to SSG0 bit */
263 * DMA Scatter/Gather Descriptor Bit fields
265 #define SG_LINK (1<<31) /* Link */
266 #define SG_TCI_ENABLE (1<<29) /* Enable Terminal Count Interrupt */
267 #define SG_ETI_ENABLE (1<<28) /* Enable End of Transfer Interrupt */
268 #define SG_ERI_ENABLE (1<<27) /* Enable Error Interrupt */
269 #define SG_COUNT_MASK 0xFFFF /* Count Field */
271 #define SET_DMA_CONTROL \
272 (SET_DMA_CIE_ENABLE(p_init->int_enable) | /* interrupt enable */ \
273 SET_DMA_BEN(p_init->buffer_enable) | /* buffer enable */\
274 SET_DMA_ETD(p_init->etd_output) | /* end of transfer pin */ \
275 SET_DMA_TCE(p_init->tce_enable) | /* terminal count enable */ \
276 SET_DMA_PL(p_init->pl) | /* peripheral location */ \
277 SET_DMA_DAI(p_init->dai) | /* dest addr increment */ \
278 SET_DMA_SAI(p_init->sai) | /* src addr increment */ \
279 SET_DMA_PRIORITY(p_init->cp) | /* channel priority */ \
280 SET_DMA_PW(p_init->pwidth) | /* peripheral/bus width */ \
281 SET_DMA_PSC(p_init->psc) | /* peripheral setup cycles */ \
282 SET_DMA_PWC(p_init->pwc) | /* peripheral wait cycles */ \
283 SET_DMA_PHC(p_init->phc) | /* peripheral hold cycles */ \
284 SET_DMA_PREFETCH(p_init->pf) /* read prefetch */)
286 #define GET_DMA_POLARITY(chan) (DMAReq_ActiveLow(chan) | DMAAck_ActiveLow(chan) | EOT_ActiveLow(chan))
288 #elif defined(CONFIG_STB03xxx) /* stb03xxx */
290 #define DMA_PPC4xx_SIZE 4096
293 * DMA Status Register
296 #define SET_DMA_PRIORITY(x) (((x)&0x00800001)) /* DMA Channel Priority */
297 #define DMA_PRIORITY_MASK 0x00800001
298 #define PRIORITY_LOW 0x00000000
299 #define PRIORITY_MID_LOW 0x00000001
300 #define PRIORITY_MID_HIGH 0x00800000
301 #define PRIORITY_HIGH 0x00800001
302 #define GET_DMA_PRIORITY(x) (((((x)&DMA_PRIORITY_MASK) &0x00800000) >> 22 ) | (((x)&DMA_PRIORITY_MASK) &0x00000001))
304 #define DMA_CS0 (1<<31) /* Terminal Count has been reached */
305 #define DMA_CS1 (1<<30)
306 #define DMA_CS2 (1<<29)
307 #define DMA_CS3 (1<<28)
309 #define DMA_TS0 (1<<27) /* End of Transfer has been requested */
310 #define DMA_TS1 (1<<26)
311 #define DMA_TS2 (1<<25)
312 #define DMA_TS3 (1<<24)
314 #define DMA_CH0_ERR (1<<23) /* DMA Chanel 0 Error */
315 #define DMA_CH1_ERR (1<<22)
316 #define DMA_CH2_ERR (1<<21)
317 #define DMA_CH3_ERR (1<<20)
319 #define DMA_CT0 (1<<19) /* Chained transfere */
321 #define DMA_IN_DMA_REQ0 (1<<18) /* Internal DMA Request is pending */
322 #define DMA_IN_DMA_REQ1 (1<<17)
323 #define DMA_IN_DMA_REQ2 (1<<16)
324 #define DMA_IN_DMA_REQ3 (1<<15)
326 #define DMA_EXT_DMA_REQ0 (1<<14) /* External DMA Request is pending */
327 #define DMA_EXT_DMA_REQ1 (1<<13)
328 #define DMA_EXT_DMA_REQ2 (1<<12)
329 #define DMA_EXT_DMA_REQ3 (1<<11)
331 #define DMA_CH0_BUSY (1<<10) /* DMA Channel 0 Busy */
332 #define DMA_CH1_BUSY (1<<9)
333 #define DMA_CH2_BUSY (1<<8)
334 #define DMA_CH3_BUSY (1<<7)
336 #define DMA_CT1 (1<<6) /* Chained transfere */
337 #define DMA_CT2 (1<<5)
338 #define DMA_CT3 (1<<4)
340 #define DMA_CH_ENABLE (1<<7)
341 #define SET_DMA_CH(x) (((x)&0x1)<<7)
342 #define GET_DMA_CH(x) (((x)&DMA_CH_ENABLE)>>7)
344 /* STBx25xxx dma unique */
345 /* enable device port on a dma channel
346 * example ext 0 on dma 1
356 #define SERIAL2_XMIT 7
357 #define SERIAL2_RECV 6
360 #define SERIAL1_XMIT 3
361 #define SERIAL1_RECV 2
362 #define SERIAL0_XMIT 1
363 #define SERIAL0_RECV 0
373 * Bit 30 must be one for Redwoods, otherwise transfers may receive errors.
375 #define DMA_CR_MB0 0x2
377 #define SET_DMA_CONTROL \
378 (SET_DMA_CIE_ENABLE(p_init->int_enable) | /* interrupt enable */ \
379 SET_DMA_ETD(p_init->etd_output) | /* end of transfer pin */ \
380 SET_DMA_TCE(p_init->tce_enable) | /* terminal count enable */ \
381 SET_DMA_PL(p_init->pl) | /* peripheral location */ \
382 SET_DMA_DAI(p_init->dai) | /* dest addr increment */ \
383 SET_DMA_SAI(p_init->sai) | /* src addr increment */ \
384 SET_DMA_PRIORITY(p_init->cp) | /* channel priority */ \
385 SET_DMA_PW(p_init->pwidth) | /* peripheral/bus width */ \
386 SET_DMA_PSC(p_init->psc) | /* peripheral setup cycles */ \
387 SET_DMA_PWC(p_init->pwc) | /* peripheral wait cycles */ \
388 SET_DMA_PHC(p_init->phc) | /* peripheral hold cycles */ \
389 SET_DMA_TCD(p_init->tcd_disable) | /* TC chain mode disable */ \
390 SET_DMA_ECE(p_init->ece_enable) | /* ECE chanin mode enable */ \
391 SET_DMA_CH(p_init->ch_enable) | /* Chain enable */ \
392 DMA_CR_MB0 /* must be one */)
394 #define GET_DMA_POLARITY(chan) chan
399 unsigned short in_use; /* set when channel is being used, clr when
403 * Valid polarity settings:
404 * DMAReq_ActiveLow(n)
405 * DMAAck_ActiveLow(n)
408 * n is 0 to max dma chans
410 unsigned int polarity;
412 char buffer_enable; /* Boolean: buffer enable */
413 char tce_enable; /* Boolean: terminal count enable */
414 char etd_output; /* Boolean: eot pin is a tc output */
415 char pce; /* Boolean: parity check enable */
418 * Peripheral location:
419 * INTERNAL_PERIPHERAL (UART0 on the 405GP)
420 * EXTERNAL_PERIPHERAL
422 char pl; /* internal/external peripheral */
425 * Valid pwidth settings:
433 char dai; /* Boolean: dst address increment */
434 char sai; /* Boolean: src address increment */
437 * Valid psc settings: 0-3
439 unsigned int psc; /* Peripheral Setup Cycles */
442 * Valid pwc settings:
445 unsigned int pwc; /* Peripheral Wait Cycles */
448 * Valid phc settings:
451 unsigned int phc; /* Peripheral Hold Cycles */
454 * Valid cp (channel priority) settings:
460 unsigned int cp; /* channel priority */
463 * Valid pf (memory read prefetch) settings:
469 unsigned int pf; /* memory read prefetch */
472 * Boolean: channel interrupt enable
473 * NOTE: for sgl transfers, only the last descriptor will be setup to
478 char shift; /* easy access to byte_count shift, based on */
479 /* the width of the channel */
481 uint32_t control; /* channel control word */
483 /* These variabled are used ONLY in single dma transfers */
484 unsigned int mode; /* transfer mode */
486 char ce; /* channel enable */
487 #ifdef CONFIG_STB03xxx
491 char td; /* transfer direction */
494 char int_on_final_sg;/* for scatter/gather - only interrupt on last sg */
498 * PPC44x DMA implementations have a slightly different
499 * descriptor layout. Probably moved about due to the
500 * change to 64-bit addresses and link pointer. I don't
501 * know why they didn't just leave control_count after
504 #ifdef PPC4xx_DMA_64BIT
507 uint32_t control_count;
508 phys_addr_t src_addr;
509 phys_addr_t dst_addr;
515 phys_addr_t src_addr;
516 phys_addr_t dst_addr;
517 uint32_t control_count;
524 uint32_t control; /* channel ctrl word; loaded from each descrptr */
525 uint32_t sgl_control; /* LK, TCI, ETI, and ERI bits in sgl descriptor */
526 dma_addr_t dma_addr; /* dma (physical) address of this list */
528 dma_addr_t phead_dma;
530 dma_addr_t ptail_dma;
534 phys_addr_t *src_addr;
535 phys_addr_t *dst_addr;
536 phys_addr_t dma_src_addr;
537 phys_addr_t dma_dst_addr;
540 extern ppc_dma_ch_t dma_channels[];
543 * The DMA API are in ppc4xx_dma.c and ppc4xx_sgdma.c
545 extern int ppc4xx_init_dma_channel(unsigned int, ppc_dma_ch_t *);
546 extern int ppc4xx_get_channel_config(unsigned int, ppc_dma_ch_t *);
547 extern int ppc4xx_set_channel_priority(unsigned int, unsigned int);
548 extern unsigned int ppc4xx_get_peripheral_width(unsigned int);
549 extern void ppc4xx_set_sg_addr(int, phys_addr_t);
550 extern int ppc4xx_add_dma_sgl(sgl_handle_t, phys_addr_t, phys_addr_t, unsigned int);
551 extern void ppc4xx_enable_dma_sgl(sgl_handle_t);
552 extern void ppc4xx_disable_dma_sgl(sgl_handle_t);
553 extern int ppc4xx_get_dma_sgl_residue(sgl_handle_t, phys_addr_t *, phys_addr_t *);
554 extern int ppc4xx_delete_dma_sgl_element(sgl_handle_t, phys_addr_t *, phys_addr_t *);
555 extern int ppc4xx_alloc_dma_handle(sgl_handle_t *, unsigned int, unsigned int);
556 extern void ppc4xx_free_dma_handle(sgl_handle_t);
557 extern int ppc4xx_get_dma_status(void);
558 extern int ppc4xx_enable_burst(unsigned int);
559 extern int ppc4xx_disable_burst(unsigned int);
560 extern int ppc4xx_set_burst_size(unsigned int, unsigned int);
561 extern void ppc4xx_set_src_addr(int dmanr, phys_addr_t src_addr);
562 extern void ppc4xx_set_dst_addr(int dmanr, phys_addr_t dst_addr);
563 extern void ppc4xx_enable_dma(unsigned int dmanr);
564 extern void ppc4xx_disable_dma(unsigned int dmanr);
565 extern void ppc4xx_set_dma_count(unsigned int dmanr, unsigned int count);
566 extern int ppc4xx_get_dma_residue(unsigned int dmanr);
567 extern void ppc4xx_set_dma_addr2(unsigned int dmanr, phys_addr_t src_dma_addr,
568 phys_addr_t dst_dma_addr);
569 extern int ppc4xx_enable_dma_interrupt(unsigned int dmanr);
570 extern int ppc4xx_disable_dma_interrupt(unsigned int dmanr);
571 extern int ppc4xx_clr_dma_status(unsigned int dmanr);
572 extern int ppc4xx_map_dma_port(unsigned int dmanr, unsigned int ocp_dma,short dma_chan);
573 extern int ppc4xx_disable_dma_port(unsigned int dmanr, unsigned int ocp_dma,short dma_chan);
574 extern int ppc4xx_set_dma_mode(unsigned int dmanr, unsigned int mode);
576 /* These are in kernel/dma.c: */
578 /* reserve a DMA channel */
579 extern int request_dma(unsigned int dmanr, const char *device_id);
580 /* release it again */
581 extern void free_dma(unsigned int dmanr);
583 #endif /* __KERNEL__ */