• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // SPDX-License-Identifier: GPL-2.0
2 /* Wrapper for DMA channel allocator that updates DMA client muxing.
3  * Copyright 2004-2007, Axis Communications AB
4  */
5 
6 #include <linux/kernel.h>
7 #include <linux/module.h>
8 #include <linux/errno.h>
9 
10 #include <asm/dma.h>
11 #include <arch/svinto.h>
12 #include <arch/system.h>
13 
14 /* Macro to access ETRAX 100 registers */
15 #define SETS(var, reg, field, val) var = (var & ~IO_MASK_(reg##_, field##_)) | \
16 					  IO_STATE_(reg##_, field##_, _##val)
17 
18 
19 static char used_dma_channels[MAX_DMA_CHANNELS];
20 static const char * used_dma_channels_users[MAX_DMA_CHANNELS];
21 
cris_request_dma(unsigned int dmanr,const char * device_id,unsigned options,enum dma_owner owner)22 int cris_request_dma(unsigned int dmanr, const char * device_id,
23 		     unsigned options, enum dma_owner owner)
24 {
25 	unsigned long flags;
26 	unsigned long int gens;
27 	int fail = -EINVAL;
28 
29 	if (dmanr >= MAX_DMA_CHANNELS) {
30 		printk(KERN_CRIT "cris_request_dma: invalid DMA channel %u\n", dmanr);
31 		return -EINVAL;
32 	}
33 
34 	local_irq_save(flags);
35 	if (used_dma_channels[dmanr]) {
36 		local_irq_restore(flags);
37 		if (options & DMA_VERBOSE_ON_ERROR) {
38 			printk(KERN_CRIT "Failed to request DMA %i for %s, already allocated by %s\n", dmanr, device_id, used_dma_channels_users[dmanr]);
39 		}
40 		if (options & DMA_PANIC_ON_ERROR) {
41 			panic("request_dma error!");
42 		}
43 		return -EBUSY;
44 	}
45 
46 	gens = genconfig_shadow;
47 
48 	switch(owner)
49 	{
50 	case dma_eth:
51 		if ((dmanr != NETWORK_TX_DMA_NBR) &&
52 		    (dmanr != NETWORK_RX_DMA_NBR)) {
53 			printk(KERN_CRIT "Invalid DMA channel for eth\n");
54 			goto bail;
55 		}
56 		break;
57 	case dma_ser0:
58 		if (dmanr == SER0_TX_DMA_NBR) {
59 			SETS(gens, R_GEN_CONFIG, dma6, serial0);
60 		} else if (dmanr == SER0_RX_DMA_NBR) {
61 			SETS(gens, R_GEN_CONFIG, dma7, serial0);
62 		} else {
63 			printk(KERN_CRIT "Invalid DMA channel for ser0\n");
64 			goto bail;
65 		}
66 		break;
67 	case dma_ser1:
68 		if (dmanr == SER1_TX_DMA_NBR) {
69 			SETS(gens, R_GEN_CONFIG, dma8, serial1);
70 		} else if (dmanr == SER1_RX_DMA_NBR) {
71 			SETS(gens, R_GEN_CONFIG, dma9, serial1);
72 		} else {
73 			printk(KERN_CRIT "Invalid DMA channel for ser1\n");
74 			goto bail;
75 		}
76 		break;
77 	case dma_ser2:
78 		if (dmanr == SER2_TX_DMA_NBR) {
79 			SETS(gens, R_GEN_CONFIG, dma2, serial2);
80 		} else if (dmanr == SER2_RX_DMA_NBR) {
81 			SETS(gens, R_GEN_CONFIG, dma3, serial2);
82 		} else {
83 			printk(KERN_CRIT "Invalid DMA channel for ser2\n");
84 			goto bail;
85 		}
86 		break;
87 	case dma_ser3:
88 		if (dmanr == SER3_TX_DMA_NBR) {
89 			SETS(gens, R_GEN_CONFIG, dma4, serial3);
90 		} else if (dmanr == SER3_RX_DMA_NBR) {
91 			SETS(gens, R_GEN_CONFIG, dma5, serial3);
92 		} else {
93 			printk(KERN_CRIT "Invalid DMA channel for ser3\n");
94 			goto bail;
95 		}
96 		break;
97 	case dma_ata:
98 		if (dmanr == ATA_TX_DMA_NBR) {
99 			SETS(gens, R_GEN_CONFIG, dma2, ata);
100 		} else if (dmanr == ATA_RX_DMA_NBR) {
101 			SETS(gens, R_GEN_CONFIG, dma3, ata);
102 		} else {
103 			printk(KERN_CRIT "Invalid DMA channel for ata\n");
104 			goto bail;
105 		}
106 		break;
107 	case dma_ext0:
108 		if (dmanr == EXTDMA0_TX_DMA_NBR) {
109 			SETS(gens, R_GEN_CONFIG, dma4, extdma0);
110 		} else if (dmanr == EXTDMA0_RX_DMA_NBR) {
111 			SETS(gens, R_GEN_CONFIG, dma5, extdma0);
112 		} else {
113 			printk(KERN_CRIT "Invalid DMA channel for ext0\n");
114 			goto bail;
115 		}
116 		break;
117 	case dma_ext1:
118 		if (dmanr == EXTDMA1_TX_DMA_NBR) {
119 			SETS(gens, R_GEN_CONFIG, dma6, extdma1);
120 		} else if (dmanr == EXTDMA1_RX_DMA_NBR) {
121 			SETS(gens, R_GEN_CONFIG, dma7, extdma1);
122 		} else {
123 			printk(KERN_CRIT "Invalid DMA channel for ext1\n");
124 			goto bail;
125 		}
126 		break;
127 	case dma_int6:
128 		if (dmanr == MEM2MEM_RX_DMA_NBR) {
129 			SETS(gens, R_GEN_CONFIG, dma7, intdma6);
130 		} else {
131 			printk(KERN_CRIT "Invalid DMA channel for int6\n");
132 			goto bail;
133 		}
134 		break;
135 	case dma_int7:
136 		if (dmanr == MEM2MEM_TX_DMA_NBR) {
137 			SETS(gens, R_GEN_CONFIG, dma6, intdma7);
138 		} else {
139 			printk(KERN_CRIT "Invalid DMA channel for int7\n");
140 			goto bail;
141 		}
142 		break;
143 	case dma_usb:
144 		if (dmanr == USB_TX_DMA_NBR) {
145 			SETS(gens, R_GEN_CONFIG, dma8, usb);
146 		} else if (dmanr == USB_RX_DMA_NBR) {
147 			SETS(gens, R_GEN_CONFIG, dma9, usb);
148 		} else {
149 			printk(KERN_CRIT "Invalid DMA channel for usb\n");
150 			goto bail;
151 		}
152 		break;
153 	case dma_scsi0:
154 		if (dmanr == SCSI0_TX_DMA_NBR) {
155 			SETS(gens, R_GEN_CONFIG, dma2, scsi0);
156 		} else if (dmanr == SCSI0_RX_DMA_NBR) {
157 			SETS(gens, R_GEN_CONFIG, dma3, scsi0);
158 		} else {
159 			printk(KERN_CRIT "Invalid DMA channel for scsi0\n");
160 			goto bail;
161 		}
162 		break;
163 	case dma_scsi1:
164 		if (dmanr == SCSI1_TX_DMA_NBR) {
165 			SETS(gens, R_GEN_CONFIG, dma4, scsi1);
166 		} else if (dmanr == SCSI1_RX_DMA_NBR) {
167 			SETS(gens, R_GEN_CONFIG, dma5, scsi1);
168 		} else {
169 			printk(KERN_CRIT "Invalid DMA channel for scsi1\n");
170 			goto bail;
171 		}
172 		break;
173 	case dma_par0:
174 		if (dmanr == PAR0_TX_DMA_NBR) {
175 			SETS(gens, R_GEN_CONFIG, dma2, par0);
176 		} else if (dmanr == PAR0_RX_DMA_NBR) {
177 			SETS(gens, R_GEN_CONFIG, dma3, par0);
178 		} else {
179 			printk(KERN_CRIT "Invalid DMA channel for par0\n");
180 			goto bail;
181 		}
182 		break;
183 	case dma_par1:
184 		if (dmanr == PAR1_TX_DMA_NBR) {
185 			SETS(gens, R_GEN_CONFIG, dma4, par1);
186 		} else if (dmanr == PAR1_RX_DMA_NBR) {
187 			SETS(gens, R_GEN_CONFIG, dma5, par1);
188 		} else {
189 			printk(KERN_CRIT "Invalid DMA channel for par1\n");
190 			goto bail;
191 		}
192 		break;
193 	default:
194 		printk(KERN_CRIT "Invalid DMA owner.\n");
195 		goto bail;
196 	}
197 
198 	used_dma_channels[dmanr] = 1;
199 	used_dma_channels_users[dmanr] = device_id;
200 
201 	{
202 		volatile int i;
203 		genconfig_shadow = gens;
204 		*R_GEN_CONFIG = genconfig_shadow;
205 		/* Wait 12 cycles before doing any DMA command */
206 		for(i = 6; i > 0; i--)
207 			nop();
208 	}
209 	fail = 0;
210  bail:
211 	local_irq_restore(flags);
212 	return fail;
213 }
214 
cris_free_dma(unsigned int dmanr,const char * device_id)215 void cris_free_dma(unsigned int dmanr, const char * device_id)
216 {
217 	unsigned long flags;
218 	if (dmanr >= MAX_DMA_CHANNELS) {
219 		printk(KERN_CRIT "cris_free_dma: invalid DMA channel %u\n", dmanr);
220 		return;
221 	}
222 
223 	local_irq_save(flags);
224 	if (!used_dma_channels[dmanr]) {
225 		printk(KERN_CRIT "cris_free_dma: DMA channel %u not allocated\n", dmanr);
226 	} else if (device_id != used_dma_channels_users[dmanr]) {
227 		printk(KERN_CRIT "cris_free_dma: DMA channel %u not allocated by device\n", dmanr);
228 	} else {
229 		switch(dmanr)
230 		{
231 		case 0:
232 			*R_DMA_CH0_CMD = IO_STATE(R_DMA_CH0_CMD, cmd, reset);
233 			while (IO_EXTRACT(R_DMA_CH0_CMD, cmd, *R_DMA_CH0_CMD) ==
234 			       IO_STATE_VALUE(R_DMA_CH0_CMD, cmd, reset));
235 			break;
236 		case 1:
237 			*R_DMA_CH1_CMD = IO_STATE(R_DMA_CH1_CMD, cmd, reset);
238 			while (IO_EXTRACT(R_DMA_CH1_CMD, cmd, *R_DMA_CH1_CMD) ==
239 			       IO_STATE_VALUE(R_DMA_CH1_CMD, cmd, reset));
240 			break;
241 		case 2:
242 			*R_DMA_CH2_CMD = IO_STATE(R_DMA_CH2_CMD, cmd, reset);
243 			while (IO_EXTRACT(R_DMA_CH2_CMD, cmd, *R_DMA_CH2_CMD) ==
244 			       IO_STATE_VALUE(R_DMA_CH2_CMD, cmd, reset));
245 			break;
246 		case 3:
247 			*R_DMA_CH3_CMD = IO_STATE(R_DMA_CH3_CMD, cmd, reset);
248 			while (IO_EXTRACT(R_DMA_CH3_CMD, cmd, *R_DMA_CH3_CMD) ==
249 			       IO_STATE_VALUE(R_DMA_CH3_CMD, cmd, reset));
250 			break;
251 		case 4:
252 			*R_DMA_CH4_CMD = IO_STATE(R_DMA_CH4_CMD, cmd, reset);
253 			while (IO_EXTRACT(R_DMA_CH4_CMD, cmd, *R_DMA_CH4_CMD) ==
254 			       IO_STATE_VALUE(R_DMA_CH4_CMD, cmd, reset));
255 			break;
256 		case 5:
257 			*R_DMA_CH5_CMD = IO_STATE(R_DMA_CH5_CMD, cmd, reset);
258 			while (IO_EXTRACT(R_DMA_CH5_CMD, cmd, *R_DMA_CH5_CMD) ==
259 			       IO_STATE_VALUE(R_DMA_CH5_CMD, cmd, reset));
260 			break;
261 		case 6:
262 			*R_DMA_CH6_CMD = IO_STATE(R_DMA_CH6_CMD, cmd, reset);
263 			while (IO_EXTRACT(R_DMA_CH6_CMD, cmd, *R_DMA_CH6_CMD) ==
264 			       IO_STATE_VALUE(R_DMA_CH6_CMD, cmd, reset));
265 			break;
266 		case 7:
267 			*R_DMA_CH7_CMD = IO_STATE(R_DMA_CH7_CMD, cmd, reset);
268 			while (IO_EXTRACT(R_DMA_CH7_CMD, cmd, *R_DMA_CH7_CMD) ==
269 			       IO_STATE_VALUE(R_DMA_CH7_CMD, cmd, reset));
270 			break;
271 		case 8:
272 			*R_DMA_CH8_CMD = IO_STATE(R_DMA_CH8_CMD, cmd, reset);
273 			while (IO_EXTRACT(R_DMA_CH8_CMD, cmd, *R_DMA_CH8_CMD) ==
274 			       IO_STATE_VALUE(R_DMA_CH8_CMD, cmd, reset));
275 			break;
276 		case 9:
277 			*R_DMA_CH9_CMD = IO_STATE(R_DMA_CH9_CMD, cmd, reset);
278 			while (IO_EXTRACT(R_DMA_CH9_CMD, cmd, *R_DMA_CH9_CMD) ==
279 			       IO_STATE_VALUE(R_DMA_CH9_CMD, cmd, reset));
280 			break;
281 		}
282 		used_dma_channels[dmanr] = 0;
283 	}
284 	local_irq_restore(flags);
285 }
286 
287 EXPORT_SYMBOL(cris_request_dma);
288 EXPORT_SYMBOL(cris_free_dma);
289