2 * linux/arch/arm/mach-pnx4008/dma.c
4 * PNX4008 DMA registration and IRQ dispatching
7 * Copyright: MontaVista Software Inc. (c) 2005
9 * Based on the code from Nicolas Pitre
11 * This program is free software; you can redistribute it and/or modify
12 * it under the terms of the GNU General Public License version 2 as
13 * published by the Free Software Foundation.
16 #include <linux/module.h>
17 #include <linux/init.h>
18 #include <linux/kernel.h>
19 #include <linux/interrupt.h>
20 #include <linux/errno.h>
21 #include <linux/err.h>
22 #include <linux/dma-mapping.h>
23 #include <linux/clk.h>
25 #include <asm/system.h>
27 #include <asm/hardware.h>
29 #include <asm/dma-mapping.h>
31 #include <asm/mach/dma.h>
32 #include <asm/arch/clock.h>
34 static struct dma_channel {
36 void (*irq_handler) (int, int, void *, struct pt_regs *);
38 struct pnx4008_dma_ll *ll;
42 } dma_channels[MAX_DMA_CHANNELS];
44 static struct ll_pool {
51 static spinlock_t ll_lock = SPIN_LOCK_UNLOCKED;
53 struct pnx4008_dma_ll *pnx4008_alloc_ll_entry(dma_addr_t * ll_dma)
55 struct pnx4008_dma_ll *ll = NULL;
58 spin_lock_irqsave(&ll_lock, flags);
59 if (ll_pool.count > 4) { /* can give one more */
60 ll = *(struct pnx4008_dma_ll **) ll_pool.cur;
61 *ll_dma = ll_pool.dma_addr + ((void *)ll - ll_pool.vaddr);
62 *(void **)ll_pool.cur = **(void ***)ll_pool.cur;
63 memset(ll, 0, sizeof(*ll));
66 spin_unlock_irqrestore(&ll_lock, flags);
71 EXPORT_SYMBOL_GPL(pnx4008_alloc_ll_entry);
73 void pnx4008_free_ll_entry(struct pnx4008_dma_ll * ll, dma_addr_t ll_dma)
78 if ((unsigned long)((long)ll - (long)ll_pool.vaddr) > 0x4000) {
79 printk(KERN_ERR "Trying to free entry not allocated by DMA\n");
83 if (ll->flags & DMA_BUFFER_ALLOCATED)
84 ll->free(ll->alloc_data);
86 spin_lock_irqsave(&ll_lock, flags);
87 *(long *)ll = *(long *)ll_pool.cur;
88 *(long *)ll_pool.cur = (long)ll;
90 spin_unlock_irqrestore(&ll_lock, flags);
94 EXPORT_SYMBOL_GPL(pnx4008_free_ll_entry);
96 void pnx4008_free_ll(u32 ll_dma, struct pnx4008_dma_ll * ll)
98 struct pnx4008_dma_ll *ptr;
104 pnx4008_free_ll_entry(ll, ll_dma);
111 EXPORT_SYMBOL_GPL(pnx4008_free_ll);
113 static int dma_channels_requested = 0;
115 static inline void dma_increment_usage(void)
117 if (!dma_channels_requested++) {
118 struct clk *clk = clk_get(0, "dma_ck");
120 clk_set_rate(clk, 1);
123 pnx4008_config_dma(-1, -1, 1);
126 static inline void dma_decrement_usage(void)
128 if (!--dma_channels_requested) {
129 struct clk *clk = clk_get(0, "dma_ck");
131 clk_set_rate(clk, 0);
134 pnx4008_config_dma(-1, -1, 0);
139 static spinlock_t dma_lock = SPIN_LOCK_UNLOCKED;
141 static inline void pnx4008_dma_lock(void)
143 spin_lock_irq(&dma_lock);
146 static inline void pnx4008_dma_unlock(void)
148 spin_unlock_irq(&dma_lock);
151 #define VALID_CHANNEL(c) (((c) >= 0) && ((c) < MAX_DMA_CHANNELS))
153 int pnx4008_request_channel(char *name, int ch,
154 void (*irq_handler) (int, int, void *,
155 struct pt_regs *), void *data)
159 /* basic sanity checks */
160 if (!name || (ch != -1 && !VALID_CHANNEL(ch)))
165 /* try grabbing a DMA channel with the requested priority */
166 for (i = MAX_DMA_CHANNELS - 1; i >= 0; i--) {
167 if (!dma_channels[i].name && (ch == -1 || ch == i)) {
174 dma_increment_usage();
175 dma_channels[i].name = name;
176 dma_channels[i].irq_handler = irq_handler;
177 dma_channels[i].data = data;
178 dma_channels[i].ll = NULL;
179 dma_channels[i].ll_dma = 0;
181 printk(KERN_WARNING "No more available DMA channels for %s\n",
186 pnx4008_dma_unlock();
190 EXPORT_SYMBOL_GPL(pnx4008_request_channel);
192 void pnx4008_free_channel(int ch)
194 if (!dma_channels[ch].name) {
196 "%s: trying to free channel %d which is already freed\n",
202 pnx4008_free_ll(dma_channels[ch].ll_dma, dma_channels[ch].ll);
203 dma_channels[ch].ll = NULL;
204 dma_decrement_usage();
206 dma_channels[ch].name = NULL;
207 pnx4008_dma_unlock();
210 EXPORT_SYMBOL_GPL(pnx4008_free_channel);
212 int pnx4008_config_dma(int ahb_m1_be, int ahb_m2_be, int enable)
214 unsigned long dma_cfg = __raw_readl(DMAC_CONFIG);
218 dma_cfg &= ~(1 << 1);
229 dma_cfg &= ~(1 << 2);
240 dma_cfg &= ~(1 << 0);
250 __raw_writel(dma_cfg, DMAC_CONFIG);
251 pnx4008_dma_unlock();
256 EXPORT_SYMBOL_GPL(pnx4008_config_dma);
258 int pnx4008_dma_pack_control(const struct pnx4008_dma_ch_ctrl * ch_ctrl,
261 int i = 0, dbsize, sbsize, err = 0;
263 if (!ctrl || !ch_ctrl) {
270 switch (ch_ctrl->tc_mask) {
282 switch (ch_ctrl->cacheable) {
293 switch (ch_ctrl->bufferable) {
304 switch (ch_ctrl->priv_mode) {
315 switch (ch_ctrl->di) {
326 switch (ch_ctrl->si) {
337 switch (ch_ctrl->dest_ahb1) {
348 switch (ch_ctrl->src_ahb1) {
359 switch (ch_ctrl->dwidth) {
376 switch (ch_ctrl->swidth) {
393 dbsize = ch_ctrl->dbsize;
394 while (!(dbsize & 1)) {
398 if (ch_ctrl->dbsize != 1 || i > 8 || i == 1) {
406 sbsize = ch_ctrl->sbsize;
407 while (!(sbsize & 1)) {
411 if (ch_ctrl->sbsize != 1 || i > 8 || i == 1) {
419 if (ch_ctrl->tr_size > 0x7ff) {
424 *ctrl |= ch_ctrl->tr_size & 0x7ff;
430 EXPORT_SYMBOL_GPL(pnx4008_dma_pack_control);
432 int pnx4008_dma_parse_control(unsigned long ctrl,
433 struct pnx4008_dma_ch_ctrl * ch_ctrl)
442 ch_ctrl->tr_size = ctrl & 0x7ff;
445 ch_ctrl->sbsize = 1 << (ctrl & 7);
446 if (ch_ctrl->sbsize > 1)
447 ch_ctrl->sbsize <<= 1;
450 ch_ctrl->dbsize = 1 << (ctrl & 7);
451 if (ch_ctrl->dbsize > 1)
452 ch_ctrl->dbsize <<= 1;
457 ch_ctrl->swidth = WIDTH_BYTE;
460 ch_ctrl->swidth = WIDTH_HWORD;
463 ch_ctrl->swidth = WIDTH_WORD;
473 ch_ctrl->dwidth = WIDTH_BYTE;
476 ch_ctrl->dwidth = WIDTH_HWORD;
479 ch_ctrl->dwidth = WIDTH_WORD;
487 ch_ctrl->src_ahb1 = ctrl & 1;
490 ch_ctrl->dest_ahb1 = ctrl & 1;
493 ch_ctrl->si = ctrl & 1;
496 ch_ctrl->di = ctrl & 1;
499 ch_ctrl->priv_mode = ctrl & 1;
502 ch_ctrl->bufferable = ctrl & 1;
505 ch_ctrl->cacheable = ctrl & 1;
508 ch_ctrl->tc_mask = ctrl & 1;
514 EXPORT_SYMBOL_GPL(pnx4008_dma_parse_control);
516 int pnx4008_dma_pack_config(const struct pnx4008_dma_ch_config * ch_cfg,
521 if (!cfg || !ch_cfg) {
528 switch (ch_cfg->halt) {
539 switch (ch_cfg->active) {
550 switch (ch_cfg->lock) {
561 switch (ch_cfg->itc) {
572 switch (ch_cfg->ie) {
583 switch (ch_cfg->flow_cntrl) {
599 case FC_PER2PER_DPER:
611 case FC_PER2PER_SPER:
619 *cfg &= ~(0x1f << 6);
620 *cfg |= ((ch_cfg->dest_per & 0x1f) << 6);
622 *cfg &= ~(0x1f << 1);
623 *cfg |= ((ch_cfg->src_per & 0x1f) << 1);
629 EXPORT_SYMBOL_GPL(pnx4008_dma_pack_config);
631 int pnx4008_dma_parse_config(unsigned long cfg,
632 struct pnx4008_dma_ch_config * ch_cfg)
643 ch_cfg->src_per = cfg & 0x1f;
646 ch_cfg->dest_per = cfg & 0x1f;
651 ch_cfg->flow_cntrl = FC_MEM2MEM_DMA;
654 ch_cfg->flow_cntrl = FC_MEM2PER_DMA;
657 ch_cfg->flow_cntrl = FC_PER2MEM_DMA;
660 ch_cfg->flow_cntrl = FC_PER2PER_DMA;
663 ch_cfg->flow_cntrl = FC_PER2PER_DPER;
666 ch_cfg->flow_cntrl = FC_MEM2PER_PER;
669 ch_cfg->flow_cntrl = FC_PER2MEM_PER;
672 ch_cfg->flow_cntrl = FC_PER2PER_SPER;
676 ch_cfg->ie = cfg & 1;
679 ch_cfg->itc = cfg & 1;
682 ch_cfg->lock = cfg & 1;
685 ch_cfg->active = cfg & 1;
688 ch_cfg->halt = cfg & 1;
694 EXPORT_SYMBOL_GPL(pnx4008_dma_parse_config);
696 void pnx4008_dma_split_head_entry(struct pnx4008_dma_config * config,
697 struct pnx4008_dma_ch_ctrl * ctrl)
699 int new_len = ctrl->tr_size, num_entries = 0;
700 int old_len = new_len;
701 int src_width, dest_width, count = 1;
703 switch (ctrl->swidth) {
717 switch (ctrl->dwidth) {
731 while (new_len > 0x7FF) {
733 new_len = (ctrl->tr_size + num_entries) / (num_entries + 1);
735 if (num_entries != 0) {
736 struct pnx4008_dma_ll *ll = NULL;
737 config->ch_ctrl &= ~0x7ff;
738 config->ch_ctrl |= new_len;
739 if (!config->is_ll) {
741 while (num_entries) {
744 pnx4008_alloc_ll_entry(&config->
749 pnx4008_alloc_ll_entry(&ll->
757 src_width * new_len * count;
759 ll->src_addr = config->src_addr;
763 dest_width * new_len * count;
765 ll->dest_addr = config->dest_addr;
766 ll->ch_ctrl = config->ch_ctrl & 0x7fffffff;
773 struct pnx4008_dma_ll *ll_old = config->ll;
774 unsigned long ll_dma_old = config->ll_dma;
775 while (num_entries) {
778 pnx4008_alloc_ll_entry(&config->
783 pnx4008_alloc_ll_entry(&ll->
791 src_width * new_len * count;
793 ll->src_addr = config->src_addr;
797 dest_width * new_len * count;
799 ll->dest_addr = config->dest_addr;
800 ll->ch_ctrl = config->ch_ctrl & 0x7fffffff;
806 ll->next_dma = ll_dma_old;
809 /* adjust last length/tc */
810 ll->ch_ctrl = config->ch_ctrl & (~0x7ff);
811 ll->ch_ctrl |= old_len - new_len * (count - 1);
812 config->ch_ctrl &= 0x7fffffff;
816 EXPORT_SYMBOL_GPL(pnx4008_dma_split_head_entry);
818 void pnx4008_dma_split_ll_entry(struct pnx4008_dma_ll * cur_ll,
819 struct pnx4008_dma_ch_ctrl * ctrl)
821 int new_len = ctrl->tr_size, num_entries = 0;
822 int old_len = new_len;
823 int src_width, dest_width, count = 1;
825 switch (ctrl->swidth) {
839 switch (ctrl->dwidth) {
853 while (new_len > 0x7FF) {
855 new_len = (ctrl->tr_size + num_entries) / (num_entries + 1);
857 if (num_entries != 0) {
858 struct pnx4008_dma_ll *ll = NULL;
859 cur_ll->ch_ctrl &= ~0x7ff;
860 cur_ll->ch_ctrl |= new_len;
862 while (num_entries) {
865 pnx4008_alloc_ll_entry(&cur_ll->
870 pnx4008_alloc_ll_entry(&ll->
878 src_width * new_len * count;
880 ll->src_addr = cur_ll->src_addr;
884 dest_width * new_len * count;
886 ll->dest_addr = cur_ll->dest_addr;
887 ll->ch_ctrl = cur_ll->ch_ctrl & 0x7fffffff;
894 struct pnx4008_dma_ll *ll_old = cur_ll->next;
895 unsigned long ll_dma_old = cur_ll->next_dma;
896 while (num_entries) {
899 pnx4008_alloc_ll_entry(&cur_ll->
904 pnx4008_alloc_ll_entry(&ll->
912 src_width * new_len * count;
914 ll->src_addr = cur_ll->src_addr;
918 dest_width * new_len * count;
920 ll->dest_addr = cur_ll->dest_addr;
921 ll->ch_ctrl = cur_ll->ch_ctrl & 0x7fffffff;
928 ll->next_dma = ll_dma_old;
931 /* adjust last length/tc */
932 ll->ch_ctrl = cur_ll->ch_ctrl & (~0x7ff);
933 ll->ch_ctrl |= old_len - new_len * (count - 1);
934 cur_ll->ch_ctrl &= 0x7fffffff;
938 EXPORT_SYMBOL_GPL(pnx4008_dma_split_ll_entry);
940 int pnx4008_config_channel(int ch, struct pnx4008_dma_config * config)
942 if (!VALID_CHANNEL(ch) || !dma_channels[ch].name)
946 __raw_writel(config->src_addr, DMAC_Cx_SRC_ADDR(ch));
947 __raw_writel(config->dest_addr, DMAC_Cx_DEST_ADDR(ch));
950 __raw_writel(config->ll_dma, DMAC_Cx_LLI(ch));
952 __raw_writel(0, DMAC_Cx_LLI(ch));
954 __raw_writel(config->ch_ctrl, DMAC_Cx_CONTROL(ch));
955 __raw_writel(config->ch_cfg, DMAC_Cx_CONFIG(ch));
956 pnx4008_dma_unlock();
962 EXPORT_SYMBOL_GPL(pnx4008_config_channel);
964 int pnx4008_channel_get_config(int ch, struct pnx4008_dma_config * config)
966 if (!VALID_CHANNEL(ch) || !dma_channels[ch].name || !config)
970 config->ch_cfg = __raw_readl(DMAC_Cx_CONFIG(ch));
971 config->ch_ctrl = __raw_readl(DMAC_Cx_CONTROL(ch));
973 config->ll_dma = __raw_readl(DMAC_Cx_LLI(ch));
974 config->is_ll = config->ll_dma ? 1 : 0;
976 config->src_addr = __raw_readl(DMAC_Cx_SRC_ADDR(ch));
977 config->dest_addr = __raw_readl(DMAC_Cx_DEST_ADDR(ch));
978 pnx4008_dma_unlock();
983 EXPORT_SYMBOL_GPL(pnx4008_channel_get_config);
985 int pnx4008_dma_ch_enable(int ch)
987 unsigned long ch_cfg;
989 if (!VALID_CHANNEL(ch) || !dma_channels[ch].name)
993 ch_cfg = __raw_readl(DMAC_Cx_CONFIG(ch));
995 __raw_writel(ch_cfg, DMAC_Cx_CONFIG(ch));
996 pnx4008_dma_unlock();
1001 EXPORT_SYMBOL_GPL(pnx4008_dma_ch_enable);
1003 int pnx4008_dma_ch_disable(int ch)
1005 unsigned long ch_cfg;
1007 if (!VALID_CHANNEL(ch) || !dma_channels[ch].name)
1011 ch_cfg = __raw_readl(DMAC_Cx_CONFIG(ch));
1013 __raw_writel(ch_cfg, DMAC_Cx_CONFIG(ch));
1014 pnx4008_dma_unlock();
1019 EXPORT_SYMBOL_GPL(pnx4008_dma_ch_disable);
1021 int pnx4008_dma_ch_enabled(int ch)
1023 unsigned long ch_cfg;
1025 if (!VALID_CHANNEL(ch) || !dma_channels[ch].name)
1029 ch_cfg = __raw_readl(DMAC_Cx_CONFIG(ch));
1030 pnx4008_dma_unlock();
1035 EXPORT_SYMBOL_GPL(pnx4008_dma_ch_enabled);
1037 static irqreturn_t dma_irq_handler(int irq, void *dev_id, struct pt_regs *regs)
1040 unsigned long dint = __raw_readl(DMAC_INT_STAT);
1041 unsigned long tcint = __raw_readl(DMAC_INT_TC_STAT);
1042 unsigned long eint = __raw_readl(DMAC_INT_ERR_STAT);
1043 unsigned long i_bit;
1045 for (i = MAX_DMA_CHANNELS - 1; i >= 0; i--) {
1048 struct dma_channel *channel = &dma_channels[i];
1050 if (channel->name && channel->irq_handler) {
1054 cause |= DMA_ERR_INT;
1056 cause |= DMA_TC_INT;
1057 channel->irq_handler(i, cause, channel->data,
1061 * IRQ for an unregistered DMA channel
1064 "spurious IRQ for DMA channel %d\n", i);
1067 __raw_writel(i_bit, DMAC_INT_TC_CLEAR);
1069 __raw_writel(i_bit, DMAC_INT_ERR_CLEAR);
1075 static int __init pnx4008_dma_init(void)
1079 ret = request_irq(DMA_INT, dma_irq_handler, 0, "DMA", NULL);
1081 printk(KERN_CRIT "Wow! Can't register IRQ for DMA\n");
1085 ll_pool.count = 0x4000 / sizeof(struct pnx4008_dma_ll);
1086 ll_pool.cur = ll_pool.vaddr =
1087 dma_alloc_coherent(NULL, ll_pool.count * sizeof(struct pnx4008_dma_ll),
1088 &ll_pool.dma_addr, GFP_KERNEL);
1090 if (!ll_pool.vaddr) {
1092 free_irq(DMA_INT, NULL);
1096 for (i = 0; i < ll_pool.count - 1; i++) {
1097 void **addr = ll_pool.vaddr + i * sizeof(struct pnx4008_dma_ll);
1098 *addr = (void *)addr + sizeof(struct pnx4008_dma_ll);
1100 *(long *)(ll_pool.vaddr +
1101 (ll_pool.count - 1) * sizeof(struct pnx4008_dma_ll)) =
1102 (long)ll_pool.vaddr;
1104 __raw_writel(1, DMAC_CONFIG);
1109 arch_initcall(pnx4008_dma_init);