#ifndef __PLAT_DMA_H #define __PLAT_DMA_H #define DMAC_REG(x) (*((volatile u32 *)(DMAC_REGS_VIRT + (x)))) #define DCSR(n) DMAC_REG((n) << 2) #define DALGN DMAC_REG(0x00a0) /* DMA Alignment Register */ #define DINT DMAC_REG(0x00f0) /* DMA Interrupt Register */ #define DDADR(n) DMAC_REG(0x0200 + ((n) << 4)) #define DSADR(n) DMAC_REG(0x0204 + ((n) << 4)) #define DTADR(n) DMAC_REG(0x0208 + ((n) << 4)) #define DCMD(n) DMAC_REG(0x020c + ((n) << 4)) #define DRCMR(n) DMAC_REG((((n) < 64) ? 0x0100 : 0x1100) + \ (((n) & 0x3f) << 2)) #define DCSR_RUN (1 << 31) /* Run Bit (read / write) */ #define DCSR_NODESC (1 << 30) /* No-Descriptor Fetch (read / write) */ #define DCSR_STOPIRQEN (1 << 29) /* Stop Interrupt Enable (read / write) */ #define DCSR_REQPEND (1 << 8) /* Request Pending (read-only) */ #define DCSR_STOPSTATE (1 << 3) /* Stop State (read-only) */ #define DCSR_ENDINTR (1 << 2) /* End Interrupt (read / write) */ #define DCSR_STARTINTR (1 << 1) /* Start Interrupt (read / write) */ #define DCSR_BUSERR (1 << 0) /* Bus Error Interrupt (read / write) */ #define DCSR_EORIRQEN (1 << 28) /* End of Receive Interrupt Enable (R/W) */ #define DCSR_EORJMPEN (1 << 27) /* Jump to next descriptor on EOR */ #define DCSR_EORSTOPEN (1 << 26) /* STOP on an EOR */ #define DCSR_SETCMPST (1 << 25) /* Set Descriptor Compare Status */ #define DCSR_CLRCMPST (1 << 24) /* Clear Descriptor Compare Status */ #define DCSR_CMPST (1 << 10) /* The Descriptor Compare Status */ #define DCSR_EORINTR (1 << 9) /* The end of Receive */ #define DRCMR_MAPVLD (1 << 7) /* Map Valid (read / write) */ #define DRCMR_CHLNUM 0x1f /* mask for Channel Number (read / write) */ #define DDADR_DESCADDR 0xfffffff0 /* Address of next descriptor (mask) */ #define DDADR_STOP (1 << 0) /* Stop (read / write) */ #define DCMD_INCSRCADDR (1 << 31) /* Source Address Increment Setting. */ #define DCMD_INCTRGADDR (1 << 30) /* Target Address Increment Setting. */ #define DCMD_FLOWSRC (1 << 29) /* Flow Control by the source. */ #define DCMD_FLOWTRG (1 << 28) /* Flow Control by the target. */ #define DCMD_STARTIRQEN (1 << 22) /* Start Interrupt Enable */ #define DCMD_ENDIRQEN (1 << 21) /* End Interrupt Enable */ #define DCMD_ENDIAN (1 << 18) /* Device Endian-ness. */ #define DCMD_BURST8 (1 << 16) /* 8 byte burst */ #define DCMD_BURST16 (2 << 16) /* 16 byte burst */ #define DCMD_BURST32 (3 << 16) /* 32 byte burst */ #define DCMD_WIDTH1 (1 << 14) /* 1 byte width */ #define DCMD_WIDTH2 (2 << 14) /* 2 byte width (HalfWord) */ #define DCMD_WIDTH4 (3 << 14) /* 4 byte width (Word) */ #define DCMD_LENGTH 0x01fff /* length mask (max = 8K - 1) */ /* * Descriptor structure for PXA's DMA engine * Note: this structure must always be aligned to a 16-byte boundary. */ typedef struct pxa_dma_desc { volatile u32 ddadr; /* Points to the next descriptor + flags */ volatile u32 dsadr; /* DSADR value for the current transfer */ volatile u32 dtadr; /* DTADR value for the current transfer */ volatile u32 dcmd; /* DCMD value for the current transfer */ } pxa_dma_desc; typedef enum { DMA_PRIO_HIGH = 0, DMA_PRIO_MEDIUM = 1, DMA_PRIO_LOW = 2 } pxa_dma_prio; /* * DMA registration */ int __init pxa_init_dma(int irq, int num_ch); int pxa_request_dma (char *name, pxa_dma_prio prio, void (*irq_handler)(int, void *), void *data); void pxa_free_dma (int dma_ch); /* * Cooperation with pxa_dma + dmaengine while there remains at least one pxa * driver not converted to dmaengine. */ #if defined(CONFIG_PXA_DMA) extern int pxad_toggle_reserved_channel(int legacy_channel); #else static inline int pxad_toggle_reserved_channel(int legacy_channel) { return 0; } #endif extern void __init pxa2xx_set_dmac_info(int nb_channels, int nb_requestors); #endif /* __PLAT_DMA_H */