1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * Copyright (C) STMicroelectronics 2018
4 * Author: Christophe Kerello <christophe.kerello@st.com>
5 */
6
7 #include <linux/clk.h>
8 #include <linux/dmaengine.h>
9 #include <linux/dma-mapping.h>
10 #include <linux/errno.h>
11 #include <linux/interrupt.h>
12 #include <linux/iopoll.h>
13 #include <linux/module.h>
14 #include <linux/mtd/rawnand.h>
15 #include <linux/pinctrl/consumer.h>
16 #include <linux/platform_device.h>
17 #include <linux/reset.h>
18
19 /* Bad block marker length */
20 #define FMC2_BBM_LEN 2
21
22 /* ECC step size */
23 #define FMC2_ECC_STEP_SIZE 512
24
25 /* BCHDSRx registers length */
26 #define FMC2_BCHDSRS_LEN 20
27
28 /* HECCR length */
29 #define FMC2_HECCR_LEN 4
30
31 /* Max requests done for a 8k nand page size */
32 #define FMC2_MAX_SG 16
33
34 /* Max chip enable */
35 #define FMC2_MAX_CE 2
36
37 /* Max ECC buffer length */
38 #define FMC2_MAX_ECC_BUF_LEN (FMC2_BCHDSRS_LEN * FMC2_MAX_SG)
39
40 #define FMC2_TIMEOUT_US 1000
41 #define FMC2_TIMEOUT_MS 1000
42
43 /* Timings */
44 #define FMC2_THIZ 1
45 #define FMC2_TIO 8000
46 #define FMC2_TSYNC 3000
47 #define FMC2_PCR_TIMING_MASK 0xf
48 #define FMC2_PMEM_PATT_TIMING_MASK 0xff
49
50 /* FMC2 Controller Registers */
51 #define FMC2_BCR1 0x0
52 #define FMC2_PCR 0x80
53 #define FMC2_SR 0x84
54 #define FMC2_PMEM 0x88
55 #define FMC2_PATT 0x8c
56 #define FMC2_HECCR 0x94
57 #define FMC2_ISR 0x184
58 #define FMC2_ICR 0x188
59 #define FMC2_CSQCR 0x200
60 #define FMC2_CSQCFGR1 0x204
61 #define FMC2_CSQCFGR2 0x208
62 #define FMC2_CSQCFGR3 0x20c
63 #define FMC2_CSQAR1 0x210
64 #define FMC2_CSQAR2 0x214
65 #define FMC2_CSQIER 0x220
66 #define FMC2_CSQISR 0x224
67 #define FMC2_CSQICR 0x228
68 #define FMC2_CSQEMSR 0x230
69 #define FMC2_BCHIER 0x250
70 #define FMC2_BCHISR 0x254
71 #define FMC2_BCHICR 0x258
72 #define FMC2_BCHPBR1 0x260
73 #define FMC2_BCHPBR2 0x264
74 #define FMC2_BCHPBR3 0x268
75 #define FMC2_BCHPBR4 0x26c
76 #define FMC2_BCHDSR0 0x27c
77 #define FMC2_BCHDSR1 0x280
78 #define FMC2_BCHDSR2 0x284
79 #define FMC2_BCHDSR3 0x288
80 #define FMC2_BCHDSR4 0x28c
81
82 /* Register: FMC2_BCR1 */
83 #define FMC2_BCR1_FMC2EN BIT(31)
84
85 /* Register: FMC2_PCR */
86 #define FMC2_PCR_PWAITEN BIT(1)
87 #define FMC2_PCR_PBKEN BIT(2)
88 #define FMC2_PCR_PWID_MASK GENMASK(5, 4)
89 #define FMC2_PCR_PWID(x) (((x) & 0x3) << 4)
90 #define FMC2_PCR_PWID_BUSWIDTH_8 0
91 #define FMC2_PCR_PWID_BUSWIDTH_16 1
92 #define FMC2_PCR_ECCEN BIT(6)
93 #define FMC2_PCR_ECCALG BIT(8)
94 #define FMC2_PCR_TCLR_MASK GENMASK(12, 9)
95 #define FMC2_PCR_TCLR(x) (((x) & 0xf) << 9)
96 #define FMC2_PCR_TCLR_DEFAULT 0xf
97 #define FMC2_PCR_TAR_MASK GENMASK(16, 13)
98 #define FMC2_PCR_TAR(x) (((x) & 0xf) << 13)
99 #define FMC2_PCR_TAR_DEFAULT 0xf
100 #define FMC2_PCR_ECCSS_MASK GENMASK(19, 17)
101 #define FMC2_PCR_ECCSS(x) (((x) & 0x7) << 17)
102 #define FMC2_PCR_ECCSS_512 1
103 #define FMC2_PCR_ECCSS_2048 3
104 #define FMC2_PCR_BCHECC BIT(24)
105 #define FMC2_PCR_WEN BIT(25)
106
107 /* Register: FMC2_SR */
108 #define FMC2_SR_NWRF BIT(6)
109
110 /* Register: FMC2_PMEM */
111 #define FMC2_PMEM_MEMSET(x) (((x) & 0xff) << 0)
112 #define FMC2_PMEM_MEMWAIT(x) (((x) & 0xff) << 8)
113 #define FMC2_PMEM_MEMHOLD(x) (((x) & 0xff) << 16)
114 #define FMC2_PMEM_MEMHIZ(x) (((x) & 0xff) << 24)
115 #define FMC2_PMEM_DEFAULT 0x0a0a0a0a
116
117 /* Register: FMC2_PATT */
118 #define FMC2_PATT_ATTSET(x) (((x) & 0xff) << 0)
119 #define FMC2_PATT_ATTWAIT(x) (((x) & 0xff) << 8)
120 #define FMC2_PATT_ATTHOLD(x) (((x) & 0xff) << 16)
121 #define FMC2_PATT_ATTHIZ(x) (((x) & 0xff) << 24)
122 #define FMC2_PATT_DEFAULT 0x0a0a0a0a
123
124 /* Register: FMC2_ISR */
125 #define FMC2_ISR_IHLF BIT(1)
126
127 /* Register: FMC2_ICR */
128 #define FMC2_ICR_CIHLF BIT(1)
129
130 /* Register: FMC2_CSQCR */
131 #define FMC2_CSQCR_CSQSTART BIT(0)
132
133 /* Register: FMC2_CSQCFGR1 */
134 #define FMC2_CSQCFGR1_CMD2EN BIT(1)
135 #define FMC2_CSQCFGR1_DMADEN BIT(2)
136 #define FMC2_CSQCFGR1_ACYNBR(x) (((x) & 0x7) << 4)
137 #define FMC2_CSQCFGR1_CMD1(x) (((x) & 0xff) << 8)
138 #define FMC2_CSQCFGR1_CMD2(x) (((x) & 0xff) << 16)
139 #define FMC2_CSQCFGR1_CMD1T BIT(24)
140 #define FMC2_CSQCFGR1_CMD2T BIT(25)
141
142 /* Register: FMC2_CSQCFGR2 */
143 #define FMC2_CSQCFGR2_SQSDTEN BIT(0)
144 #define FMC2_CSQCFGR2_RCMD2EN BIT(1)
145 #define FMC2_CSQCFGR2_DMASEN BIT(2)
146 #define FMC2_CSQCFGR2_RCMD1(x) (((x) & 0xff) << 8)
147 #define FMC2_CSQCFGR2_RCMD2(x) (((x) & 0xff) << 16)
148 #define FMC2_CSQCFGR2_RCMD1T BIT(24)
149 #define FMC2_CSQCFGR2_RCMD2T BIT(25)
150
151 /* Register: FMC2_CSQCFGR3 */
152 #define FMC2_CSQCFGR3_SNBR(x) (((x) & 0x1f) << 8)
153 #define FMC2_CSQCFGR3_AC1T BIT(16)
154 #define FMC2_CSQCFGR3_AC2T BIT(17)
155 #define FMC2_CSQCFGR3_AC3T BIT(18)
156 #define FMC2_CSQCFGR3_AC4T BIT(19)
157 #define FMC2_CSQCFGR3_AC5T BIT(20)
158 #define FMC2_CSQCFGR3_SDT BIT(21)
159 #define FMC2_CSQCFGR3_RAC1T BIT(22)
160 #define FMC2_CSQCFGR3_RAC2T BIT(23)
161
162 /* Register: FMC2_CSQCAR1 */
163 #define FMC2_CSQCAR1_ADDC1(x) (((x) & 0xff) << 0)
164 #define FMC2_CSQCAR1_ADDC2(x) (((x) & 0xff) << 8)
165 #define FMC2_CSQCAR1_ADDC3(x) (((x) & 0xff) << 16)
166 #define FMC2_CSQCAR1_ADDC4(x) (((x) & 0xff) << 24)
167
168 /* Register: FMC2_CSQCAR2 */
169 #define FMC2_CSQCAR2_ADDC5(x) (((x) & 0xff) << 0)
170 #define FMC2_CSQCAR2_NANDCEN(x) (((x) & 0x3) << 10)
171 #define FMC2_CSQCAR2_SAO(x) (((x) & 0xffff) << 16)
172
173 /* Register: FMC2_CSQIER */
174 #define FMC2_CSQIER_TCIE BIT(0)
175
176 /* Register: FMC2_CSQICR */
177 #define FMC2_CSQICR_CLEAR_IRQ GENMASK(4, 0)
178
179 /* Register: FMC2_CSQEMSR */
180 #define FMC2_CSQEMSR_SEM GENMASK(15, 0)
181
182 /* Register: FMC2_BCHIER */
183 #define FMC2_BCHIER_DERIE BIT(1)
184 #define FMC2_BCHIER_EPBRIE BIT(4)
185
186 /* Register: FMC2_BCHICR */
187 #define FMC2_BCHICR_CLEAR_IRQ GENMASK(4, 0)
188
189 /* Register: FMC2_BCHDSR0 */
190 #define FMC2_BCHDSR0_DUE BIT(0)
191 #define FMC2_BCHDSR0_DEF BIT(1)
192 #define FMC2_BCHDSR0_DEN_MASK GENMASK(7, 4)
193 #define FMC2_BCHDSR0_DEN_SHIFT 4
194
195 /* Register: FMC2_BCHDSR1 */
196 #define FMC2_BCHDSR1_EBP1_MASK GENMASK(12, 0)
197 #define FMC2_BCHDSR1_EBP2_MASK GENMASK(28, 16)
198 #define FMC2_BCHDSR1_EBP2_SHIFT 16
199
200 /* Register: FMC2_BCHDSR2 */
201 #define FMC2_BCHDSR2_EBP3_MASK GENMASK(12, 0)
202 #define FMC2_BCHDSR2_EBP4_MASK GENMASK(28, 16)
203 #define FMC2_BCHDSR2_EBP4_SHIFT 16
204
205 /* Register: FMC2_BCHDSR3 */
206 #define FMC2_BCHDSR3_EBP5_MASK GENMASK(12, 0)
207 #define FMC2_BCHDSR3_EBP6_MASK GENMASK(28, 16)
208 #define FMC2_BCHDSR3_EBP6_SHIFT 16
209
210 /* Register: FMC2_BCHDSR4 */
211 #define FMC2_BCHDSR4_EBP7_MASK GENMASK(12, 0)
212 #define FMC2_BCHDSR4_EBP8_MASK GENMASK(28, 16)
213 #define FMC2_BCHDSR4_EBP8_SHIFT 16
214
215 enum stm32_fmc2_ecc {
216 FMC2_ECC_HAM = 1,
217 FMC2_ECC_BCH4 = 4,
218 FMC2_ECC_BCH8 = 8
219 };
220
221 enum stm32_fmc2_irq_state {
222 FMC2_IRQ_UNKNOWN = 0,
223 FMC2_IRQ_BCH,
224 FMC2_IRQ_SEQ
225 };
226
227 struct stm32_fmc2_timings {
228 u8 tclr;
229 u8 tar;
230 u8 thiz;
231 u8 twait;
232 u8 thold_mem;
233 u8 tset_mem;
234 u8 thold_att;
235 u8 tset_att;
236 };
237
238 struct stm32_fmc2_nand {
239 struct nand_chip chip;
240 struct stm32_fmc2_timings timings;
241 int ncs;
242 int cs_used[FMC2_MAX_CE];
243 };
244
to_fmc2_nand(struct nand_chip * chip)245 static inline struct stm32_fmc2_nand *to_fmc2_nand(struct nand_chip *chip)
246 {
247 return container_of(chip, struct stm32_fmc2_nand, chip);
248 }
249
250 struct stm32_fmc2_nfc {
251 struct nand_controller base;
252 struct stm32_fmc2_nand nand;
253 struct device *dev;
254 void __iomem *io_base;
255 void __iomem *data_base[FMC2_MAX_CE];
256 void __iomem *cmd_base[FMC2_MAX_CE];
257 void __iomem *addr_base[FMC2_MAX_CE];
258 phys_addr_t io_phys_addr;
259 phys_addr_t data_phys_addr[FMC2_MAX_CE];
260 struct clk *clk;
261 u8 irq_state;
262
263 struct dma_chan *dma_tx_ch;
264 struct dma_chan *dma_rx_ch;
265 struct dma_chan *dma_ecc_ch;
266 struct sg_table dma_data_sg;
267 struct sg_table dma_ecc_sg;
268 u8 *ecc_buf;
269 int dma_ecc_len;
270
271 struct completion complete;
272 struct completion dma_data_complete;
273 struct completion dma_ecc_complete;
274
275 u8 cs_assigned;
276 int cs_sel;
277 };
278
to_stm32_nfc(struct nand_controller * base)279 static inline struct stm32_fmc2_nfc *to_stm32_nfc(struct nand_controller *base)
280 {
281 return container_of(base, struct stm32_fmc2_nfc, base);
282 }
283
284 /* Timings configuration */
stm32_fmc2_timings_init(struct nand_chip * chip)285 static void stm32_fmc2_timings_init(struct nand_chip *chip)
286 {
287 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
288 struct stm32_fmc2_nand *nand = to_fmc2_nand(chip);
289 struct stm32_fmc2_timings *timings = &nand->timings;
290 u32 pcr = readl_relaxed(fmc2->io_base + FMC2_PCR);
291 u32 pmem, patt;
292
293 /* Set tclr/tar timings */
294 pcr &= ~FMC2_PCR_TCLR_MASK;
295 pcr |= FMC2_PCR_TCLR(timings->tclr);
296 pcr &= ~FMC2_PCR_TAR_MASK;
297 pcr |= FMC2_PCR_TAR(timings->tar);
298
299 /* Set tset/twait/thold/thiz timings in common bank */
300 pmem = FMC2_PMEM_MEMSET(timings->tset_mem);
301 pmem |= FMC2_PMEM_MEMWAIT(timings->twait);
302 pmem |= FMC2_PMEM_MEMHOLD(timings->thold_mem);
303 pmem |= FMC2_PMEM_MEMHIZ(timings->thiz);
304
305 /* Set tset/twait/thold/thiz timings in attribut bank */
306 patt = FMC2_PATT_ATTSET(timings->tset_att);
307 patt |= FMC2_PATT_ATTWAIT(timings->twait);
308 patt |= FMC2_PATT_ATTHOLD(timings->thold_att);
309 patt |= FMC2_PATT_ATTHIZ(timings->thiz);
310
311 writel_relaxed(pcr, fmc2->io_base + FMC2_PCR);
312 writel_relaxed(pmem, fmc2->io_base + FMC2_PMEM);
313 writel_relaxed(patt, fmc2->io_base + FMC2_PATT);
314 }
315
316 /* Controller configuration */
stm32_fmc2_setup(struct nand_chip * chip)317 static void stm32_fmc2_setup(struct nand_chip *chip)
318 {
319 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
320 u32 pcr = readl_relaxed(fmc2->io_base + FMC2_PCR);
321
322 /* Configure ECC algorithm (default configuration is Hamming) */
323 pcr &= ~FMC2_PCR_ECCALG;
324 pcr &= ~FMC2_PCR_BCHECC;
325 if (chip->ecc.strength == FMC2_ECC_BCH8) {
326 pcr |= FMC2_PCR_ECCALG;
327 pcr |= FMC2_PCR_BCHECC;
328 } else if (chip->ecc.strength == FMC2_ECC_BCH4) {
329 pcr |= FMC2_PCR_ECCALG;
330 }
331
332 /* Set buswidth */
333 pcr &= ~FMC2_PCR_PWID_MASK;
334 if (chip->options & NAND_BUSWIDTH_16)
335 pcr |= FMC2_PCR_PWID(FMC2_PCR_PWID_BUSWIDTH_16);
336
337 /* Set ECC sector size */
338 pcr &= ~FMC2_PCR_ECCSS_MASK;
339 pcr |= FMC2_PCR_ECCSS(FMC2_PCR_ECCSS_512);
340
341 writel_relaxed(pcr, fmc2->io_base + FMC2_PCR);
342 }
343
344 /* Select target */
stm32_fmc2_select_chip(struct nand_chip * chip,int chipnr)345 static int stm32_fmc2_select_chip(struct nand_chip *chip, int chipnr)
346 {
347 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
348 struct stm32_fmc2_nand *nand = to_fmc2_nand(chip);
349 struct dma_slave_config dma_cfg;
350 int ret;
351
352 if (nand->cs_used[chipnr] == fmc2->cs_sel)
353 return 0;
354
355 fmc2->cs_sel = nand->cs_used[chipnr];
356
357 /* FMC2 setup routine */
358 stm32_fmc2_setup(chip);
359
360 /* Apply timings */
361 stm32_fmc2_timings_init(chip);
362
363 if (fmc2->dma_tx_ch && fmc2->dma_rx_ch) {
364 memset(&dma_cfg, 0, sizeof(dma_cfg));
365 dma_cfg.src_addr = fmc2->data_phys_addr[fmc2->cs_sel];
366 dma_cfg.dst_addr = fmc2->data_phys_addr[fmc2->cs_sel];
367 dma_cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
368 dma_cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
369 dma_cfg.src_maxburst = 32;
370 dma_cfg.dst_maxburst = 32;
371
372 ret = dmaengine_slave_config(fmc2->dma_tx_ch, &dma_cfg);
373 if (ret) {
374 dev_err(fmc2->dev, "tx DMA engine slave config failed\n");
375 return ret;
376 }
377
378 ret = dmaengine_slave_config(fmc2->dma_rx_ch, &dma_cfg);
379 if (ret) {
380 dev_err(fmc2->dev, "rx DMA engine slave config failed\n");
381 return ret;
382 }
383 }
384
385 if (fmc2->dma_ecc_ch) {
386 /*
387 * Hamming: we read HECCR register
388 * BCH4/BCH8: we read BCHDSRSx registers
389 */
390 memset(&dma_cfg, 0, sizeof(dma_cfg));
391 dma_cfg.src_addr = fmc2->io_phys_addr;
392 dma_cfg.src_addr += chip->ecc.strength == FMC2_ECC_HAM ?
393 FMC2_HECCR : FMC2_BCHDSR0;
394 dma_cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
395
396 ret = dmaengine_slave_config(fmc2->dma_ecc_ch, &dma_cfg);
397 if (ret) {
398 dev_err(fmc2->dev, "ECC DMA engine slave config failed\n");
399 return ret;
400 }
401
402 /* Calculate ECC length needed for one sector */
403 fmc2->dma_ecc_len = chip->ecc.strength == FMC2_ECC_HAM ?
404 FMC2_HECCR_LEN : FMC2_BCHDSRS_LEN;
405 }
406
407 return 0;
408 }
409
410 /* Set bus width to 16-bit or 8-bit */
stm32_fmc2_set_buswidth_16(struct stm32_fmc2_nfc * fmc2,bool set)411 static void stm32_fmc2_set_buswidth_16(struct stm32_fmc2_nfc *fmc2, bool set)
412 {
413 u32 pcr = readl_relaxed(fmc2->io_base + FMC2_PCR);
414
415 pcr &= ~FMC2_PCR_PWID_MASK;
416 if (set)
417 pcr |= FMC2_PCR_PWID(FMC2_PCR_PWID_BUSWIDTH_16);
418 writel_relaxed(pcr, fmc2->io_base + FMC2_PCR);
419 }
420
421 /* Enable/disable ECC */
stm32_fmc2_set_ecc(struct stm32_fmc2_nfc * fmc2,bool enable)422 static void stm32_fmc2_set_ecc(struct stm32_fmc2_nfc *fmc2, bool enable)
423 {
424 u32 pcr = readl(fmc2->io_base + FMC2_PCR);
425
426 pcr &= ~FMC2_PCR_ECCEN;
427 if (enable)
428 pcr |= FMC2_PCR_ECCEN;
429 writel(pcr, fmc2->io_base + FMC2_PCR);
430 }
431
432 /* Enable irq sources in case of the sequencer is used */
stm32_fmc2_enable_seq_irq(struct stm32_fmc2_nfc * fmc2)433 static inline void stm32_fmc2_enable_seq_irq(struct stm32_fmc2_nfc *fmc2)
434 {
435 u32 csqier = readl_relaxed(fmc2->io_base + FMC2_CSQIER);
436
437 csqier |= FMC2_CSQIER_TCIE;
438
439 fmc2->irq_state = FMC2_IRQ_SEQ;
440
441 writel_relaxed(csqier, fmc2->io_base + FMC2_CSQIER);
442 }
443
444 /* Disable irq sources in case of the sequencer is used */
stm32_fmc2_disable_seq_irq(struct stm32_fmc2_nfc * fmc2)445 static inline void stm32_fmc2_disable_seq_irq(struct stm32_fmc2_nfc *fmc2)
446 {
447 u32 csqier = readl_relaxed(fmc2->io_base + FMC2_CSQIER);
448
449 csqier &= ~FMC2_CSQIER_TCIE;
450
451 writel_relaxed(csqier, fmc2->io_base + FMC2_CSQIER);
452
453 fmc2->irq_state = FMC2_IRQ_UNKNOWN;
454 }
455
456 /* Clear irq sources in case of the sequencer is used */
stm32_fmc2_clear_seq_irq(struct stm32_fmc2_nfc * fmc2)457 static inline void stm32_fmc2_clear_seq_irq(struct stm32_fmc2_nfc *fmc2)
458 {
459 writel_relaxed(FMC2_CSQICR_CLEAR_IRQ, fmc2->io_base + FMC2_CSQICR);
460 }
461
462 /* Enable irq sources in case of bch is used */
stm32_fmc2_enable_bch_irq(struct stm32_fmc2_nfc * fmc2,int mode)463 static inline void stm32_fmc2_enable_bch_irq(struct stm32_fmc2_nfc *fmc2,
464 int mode)
465 {
466 u32 bchier = readl_relaxed(fmc2->io_base + FMC2_BCHIER);
467
468 if (mode == NAND_ECC_WRITE)
469 bchier |= FMC2_BCHIER_EPBRIE;
470 else
471 bchier |= FMC2_BCHIER_DERIE;
472
473 fmc2->irq_state = FMC2_IRQ_BCH;
474
475 writel_relaxed(bchier, fmc2->io_base + FMC2_BCHIER);
476 }
477
478 /* Disable irq sources in case of bch is used */
stm32_fmc2_disable_bch_irq(struct stm32_fmc2_nfc * fmc2)479 static inline void stm32_fmc2_disable_bch_irq(struct stm32_fmc2_nfc *fmc2)
480 {
481 u32 bchier = readl_relaxed(fmc2->io_base + FMC2_BCHIER);
482
483 bchier &= ~FMC2_BCHIER_DERIE;
484 bchier &= ~FMC2_BCHIER_EPBRIE;
485
486 writel_relaxed(bchier, fmc2->io_base + FMC2_BCHIER);
487
488 fmc2->irq_state = FMC2_IRQ_UNKNOWN;
489 }
490
491 /* Clear irq sources in case of bch is used */
stm32_fmc2_clear_bch_irq(struct stm32_fmc2_nfc * fmc2)492 static inline void stm32_fmc2_clear_bch_irq(struct stm32_fmc2_nfc *fmc2)
493 {
494 writel_relaxed(FMC2_BCHICR_CLEAR_IRQ, fmc2->io_base + FMC2_BCHICR);
495 }
496
497 /*
498 * Enable ECC logic and reset syndrome/parity bits previously calculated
499 * Syndrome/parity bits is cleared by setting the ECCEN bit to 0
500 */
stm32_fmc2_hwctl(struct nand_chip * chip,int mode)501 static void stm32_fmc2_hwctl(struct nand_chip *chip, int mode)
502 {
503 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
504
505 stm32_fmc2_set_ecc(fmc2, false);
506
507 if (chip->ecc.strength != FMC2_ECC_HAM) {
508 u32 pcr = readl_relaxed(fmc2->io_base + FMC2_PCR);
509
510 if (mode == NAND_ECC_WRITE)
511 pcr |= FMC2_PCR_WEN;
512 else
513 pcr &= ~FMC2_PCR_WEN;
514 writel_relaxed(pcr, fmc2->io_base + FMC2_PCR);
515
516 reinit_completion(&fmc2->complete);
517 stm32_fmc2_clear_bch_irq(fmc2);
518 stm32_fmc2_enable_bch_irq(fmc2, mode);
519 }
520
521 stm32_fmc2_set_ecc(fmc2, true);
522 }
523
524 /*
525 * ECC Hamming calculation
526 * ECC is 3 bytes for 512 bytes of data (supports error correction up to
527 * max of 1-bit)
528 */
stm32_fmc2_ham_set_ecc(const u32 ecc_sta,u8 * ecc)529 static inline void stm32_fmc2_ham_set_ecc(const u32 ecc_sta, u8 *ecc)
530 {
531 ecc[0] = ecc_sta;
532 ecc[1] = ecc_sta >> 8;
533 ecc[2] = ecc_sta >> 16;
534 }
535
stm32_fmc2_ham_calculate(struct nand_chip * chip,const u8 * data,u8 * ecc)536 static int stm32_fmc2_ham_calculate(struct nand_chip *chip, const u8 *data,
537 u8 *ecc)
538 {
539 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
540 u32 sr, heccr;
541 int ret;
542
543 ret = readl_relaxed_poll_timeout(fmc2->io_base + FMC2_SR,
544 sr, sr & FMC2_SR_NWRF, 10,
545 FMC2_TIMEOUT_MS);
546 if (ret) {
547 dev_err(fmc2->dev, "ham timeout\n");
548 return ret;
549 }
550
551 heccr = readl_relaxed(fmc2->io_base + FMC2_HECCR);
552
553 stm32_fmc2_ham_set_ecc(heccr, ecc);
554
555 /* Disable ECC */
556 stm32_fmc2_set_ecc(fmc2, false);
557
558 return 0;
559 }
560
stm32_fmc2_ham_correct(struct nand_chip * chip,u8 * dat,u8 * read_ecc,u8 * calc_ecc)561 static int stm32_fmc2_ham_correct(struct nand_chip *chip, u8 *dat,
562 u8 *read_ecc, u8 *calc_ecc)
563 {
564 u8 bit_position = 0, b0, b1, b2;
565 u32 byte_addr = 0, b;
566 u32 i, shifting = 1;
567
568 /* Indicate which bit and byte is faulty (if any) */
569 b0 = read_ecc[0] ^ calc_ecc[0];
570 b1 = read_ecc[1] ^ calc_ecc[1];
571 b2 = read_ecc[2] ^ calc_ecc[2];
572 b = b0 | (b1 << 8) | (b2 << 16);
573
574 /* No errors */
575 if (likely(!b))
576 return 0;
577
578 /* Calculate bit position */
579 for (i = 0; i < 3; i++) {
580 switch (b % 4) {
581 case 2:
582 bit_position += shifting;
583 case 1:
584 break;
585 default:
586 return -EBADMSG;
587 }
588 shifting <<= 1;
589 b >>= 2;
590 }
591
592 /* Calculate byte position */
593 shifting = 1;
594 for (i = 0; i < 9; i++) {
595 switch (b % 4) {
596 case 2:
597 byte_addr += shifting;
598 case 1:
599 break;
600 default:
601 return -EBADMSG;
602 }
603 shifting <<= 1;
604 b >>= 2;
605 }
606
607 /* Flip the bit */
608 dat[byte_addr] ^= (1 << bit_position);
609
610 return 1;
611 }
612
613 /*
614 * ECC BCH calculation and correction
615 * ECC is 7/13 bytes for 512 bytes of data (supports error correction up to
616 * max of 4-bit/8-bit)
617 */
stm32_fmc2_bch_calculate(struct nand_chip * chip,const u8 * data,u8 * ecc)618 static int stm32_fmc2_bch_calculate(struct nand_chip *chip, const u8 *data,
619 u8 *ecc)
620 {
621 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
622 u32 bchpbr;
623
624 /* Wait until the BCH code is ready */
625 if (!wait_for_completion_timeout(&fmc2->complete,
626 msecs_to_jiffies(FMC2_TIMEOUT_MS))) {
627 dev_err(fmc2->dev, "bch timeout\n");
628 stm32_fmc2_disable_bch_irq(fmc2);
629 return -ETIMEDOUT;
630 }
631
632 /* Read parity bits */
633 bchpbr = readl_relaxed(fmc2->io_base + FMC2_BCHPBR1);
634 ecc[0] = bchpbr;
635 ecc[1] = bchpbr >> 8;
636 ecc[2] = bchpbr >> 16;
637 ecc[3] = bchpbr >> 24;
638
639 bchpbr = readl_relaxed(fmc2->io_base + FMC2_BCHPBR2);
640 ecc[4] = bchpbr;
641 ecc[5] = bchpbr >> 8;
642 ecc[6] = bchpbr >> 16;
643
644 if (chip->ecc.strength == FMC2_ECC_BCH8) {
645 ecc[7] = bchpbr >> 24;
646
647 bchpbr = readl_relaxed(fmc2->io_base + FMC2_BCHPBR3);
648 ecc[8] = bchpbr;
649 ecc[9] = bchpbr >> 8;
650 ecc[10] = bchpbr >> 16;
651 ecc[11] = bchpbr >> 24;
652
653 bchpbr = readl_relaxed(fmc2->io_base + FMC2_BCHPBR4);
654 ecc[12] = bchpbr;
655 }
656
657 /* Disable ECC */
658 stm32_fmc2_set_ecc(fmc2, false);
659
660 return 0;
661 }
662
663 /* BCH algorithm correction */
stm32_fmc2_bch_decode(int eccsize,u8 * dat,u32 * ecc_sta)664 static int stm32_fmc2_bch_decode(int eccsize, u8 *dat, u32 *ecc_sta)
665 {
666 u32 bchdsr0 = ecc_sta[0];
667 u32 bchdsr1 = ecc_sta[1];
668 u32 bchdsr2 = ecc_sta[2];
669 u32 bchdsr3 = ecc_sta[3];
670 u32 bchdsr4 = ecc_sta[4];
671 u16 pos[8];
672 int i, den;
673 unsigned int nb_errs = 0;
674
675 /* No errors found */
676 if (likely(!(bchdsr0 & FMC2_BCHDSR0_DEF)))
677 return 0;
678
679 /* Too many errors detected */
680 if (unlikely(bchdsr0 & FMC2_BCHDSR0_DUE))
681 return -EBADMSG;
682
683 pos[0] = bchdsr1 & FMC2_BCHDSR1_EBP1_MASK;
684 pos[1] = (bchdsr1 & FMC2_BCHDSR1_EBP2_MASK) >> FMC2_BCHDSR1_EBP2_SHIFT;
685 pos[2] = bchdsr2 & FMC2_BCHDSR2_EBP3_MASK;
686 pos[3] = (bchdsr2 & FMC2_BCHDSR2_EBP4_MASK) >> FMC2_BCHDSR2_EBP4_SHIFT;
687 pos[4] = bchdsr3 & FMC2_BCHDSR3_EBP5_MASK;
688 pos[5] = (bchdsr3 & FMC2_BCHDSR3_EBP6_MASK) >> FMC2_BCHDSR3_EBP6_SHIFT;
689 pos[6] = bchdsr4 & FMC2_BCHDSR4_EBP7_MASK;
690 pos[7] = (bchdsr4 & FMC2_BCHDSR4_EBP8_MASK) >> FMC2_BCHDSR4_EBP8_SHIFT;
691
692 den = (bchdsr0 & FMC2_BCHDSR0_DEN_MASK) >> FMC2_BCHDSR0_DEN_SHIFT;
693 for (i = 0; i < den; i++) {
694 if (pos[i] < eccsize * 8) {
695 change_bit(pos[i], (unsigned long *)dat);
696 nb_errs++;
697 }
698 }
699
700 return nb_errs;
701 }
702
stm32_fmc2_bch_correct(struct nand_chip * chip,u8 * dat,u8 * read_ecc,u8 * calc_ecc)703 static int stm32_fmc2_bch_correct(struct nand_chip *chip, u8 *dat,
704 u8 *read_ecc, u8 *calc_ecc)
705 {
706 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
707 u32 ecc_sta[5];
708
709 /* Wait until the decoding error is ready */
710 if (!wait_for_completion_timeout(&fmc2->complete,
711 msecs_to_jiffies(FMC2_TIMEOUT_MS))) {
712 dev_err(fmc2->dev, "bch timeout\n");
713 stm32_fmc2_disable_bch_irq(fmc2);
714 return -ETIMEDOUT;
715 }
716
717 ecc_sta[0] = readl_relaxed(fmc2->io_base + FMC2_BCHDSR0);
718 ecc_sta[1] = readl_relaxed(fmc2->io_base + FMC2_BCHDSR1);
719 ecc_sta[2] = readl_relaxed(fmc2->io_base + FMC2_BCHDSR2);
720 ecc_sta[3] = readl_relaxed(fmc2->io_base + FMC2_BCHDSR3);
721 ecc_sta[4] = readl_relaxed(fmc2->io_base + FMC2_BCHDSR4);
722
723 /* Disable ECC */
724 stm32_fmc2_set_ecc(fmc2, false);
725
726 return stm32_fmc2_bch_decode(chip->ecc.size, dat, ecc_sta);
727 }
728
stm32_fmc2_read_page(struct nand_chip * chip,u8 * buf,int oob_required,int page)729 static int stm32_fmc2_read_page(struct nand_chip *chip, u8 *buf,
730 int oob_required, int page)
731 {
732 struct mtd_info *mtd = nand_to_mtd(chip);
733 int ret, i, s, stat, eccsize = chip->ecc.size;
734 int eccbytes = chip->ecc.bytes;
735 int eccsteps = chip->ecc.steps;
736 int eccstrength = chip->ecc.strength;
737 u8 *p = buf;
738 u8 *ecc_calc = chip->ecc.calc_buf;
739 u8 *ecc_code = chip->ecc.code_buf;
740 unsigned int max_bitflips = 0;
741
742 ret = nand_read_page_op(chip, page, 0, NULL, 0);
743 if (ret)
744 return ret;
745
746 for (i = mtd->writesize + FMC2_BBM_LEN, s = 0; s < eccsteps;
747 s++, i += eccbytes, p += eccsize) {
748 chip->ecc.hwctl(chip, NAND_ECC_READ);
749
750 /* Read the nand page sector (512 bytes) */
751 ret = nand_change_read_column_op(chip, s * eccsize, p,
752 eccsize, false);
753 if (ret)
754 return ret;
755
756 /* Read the corresponding ECC bytes */
757 ret = nand_change_read_column_op(chip, i, ecc_code,
758 eccbytes, false);
759 if (ret)
760 return ret;
761
762 /* Correct the data */
763 stat = chip->ecc.correct(chip, p, ecc_code, ecc_calc);
764 if (stat == -EBADMSG)
765 /* Check for empty pages with bitflips */
766 stat = nand_check_erased_ecc_chunk(p, eccsize,
767 ecc_code, eccbytes,
768 NULL, 0,
769 eccstrength);
770
771 if (stat < 0) {
772 mtd->ecc_stats.failed++;
773 } else {
774 mtd->ecc_stats.corrected += stat;
775 max_bitflips = max_t(unsigned int, max_bitflips, stat);
776 }
777 }
778
779 /* Read oob */
780 if (oob_required) {
781 ret = nand_change_read_column_op(chip, mtd->writesize,
782 chip->oob_poi, mtd->oobsize,
783 false);
784 if (ret)
785 return ret;
786 }
787
788 return max_bitflips;
789 }
790
791 /* Sequencer read/write configuration */
stm32_fmc2_rw_page_init(struct nand_chip * chip,int page,int raw,bool write_data)792 static void stm32_fmc2_rw_page_init(struct nand_chip *chip, int page,
793 int raw, bool write_data)
794 {
795 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
796 struct mtd_info *mtd = nand_to_mtd(chip);
797 u32 csqcfgr1, csqcfgr2, csqcfgr3;
798 u32 csqar1, csqar2;
799 u32 ecc_offset = mtd->writesize + FMC2_BBM_LEN;
800 u32 pcr = readl_relaxed(fmc2->io_base + FMC2_PCR);
801
802 if (write_data)
803 pcr |= FMC2_PCR_WEN;
804 else
805 pcr &= ~FMC2_PCR_WEN;
806 writel_relaxed(pcr, fmc2->io_base + FMC2_PCR);
807
808 /*
809 * - Set Program Page/Page Read command
810 * - Enable DMA request data
811 * - Set timings
812 */
813 csqcfgr1 = FMC2_CSQCFGR1_DMADEN | FMC2_CSQCFGR1_CMD1T;
814 if (write_data)
815 csqcfgr1 |= FMC2_CSQCFGR1_CMD1(NAND_CMD_SEQIN);
816 else
817 csqcfgr1 |= FMC2_CSQCFGR1_CMD1(NAND_CMD_READ0) |
818 FMC2_CSQCFGR1_CMD2EN |
819 FMC2_CSQCFGR1_CMD2(NAND_CMD_READSTART) |
820 FMC2_CSQCFGR1_CMD2T;
821
822 /*
823 * - Set Random Data Input/Random Data Read command
824 * - Enable the sequencer to access the Spare data area
825 * - Enable DMA request status decoding for read
826 * - Set timings
827 */
828 if (write_data)
829 csqcfgr2 = FMC2_CSQCFGR2_RCMD1(NAND_CMD_RNDIN);
830 else
831 csqcfgr2 = FMC2_CSQCFGR2_RCMD1(NAND_CMD_RNDOUT) |
832 FMC2_CSQCFGR2_RCMD2EN |
833 FMC2_CSQCFGR2_RCMD2(NAND_CMD_RNDOUTSTART) |
834 FMC2_CSQCFGR2_RCMD1T |
835 FMC2_CSQCFGR2_RCMD2T;
836 if (!raw) {
837 csqcfgr2 |= write_data ? 0 : FMC2_CSQCFGR2_DMASEN;
838 csqcfgr2 |= FMC2_CSQCFGR2_SQSDTEN;
839 }
840
841 /*
842 * - Set the number of sectors to be written
843 * - Set timings
844 */
845 csqcfgr3 = FMC2_CSQCFGR3_SNBR(chip->ecc.steps - 1);
846 if (write_data) {
847 csqcfgr3 |= FMC2_CSQCFGR3_RAC2T;
848 if (chip->options & NAND_ROW_ADDR_3)
849 csqcfgr3 |= FMC2_CSQCFGR3_AC5T;
850 else
851 csqcfgr3 |= FMC2_CSQCFGR3_AC4T;
852 }
853
854 /*
855 * Set the fourth first address cycles
856 * Byte 1 and byte 2 => column, we start at 0x0
857 * Byte 3 and byte 4 => page
858 */
859 csqar1 = FMC2_CSQCAR1_ADDC3(page);
860 csqar1 |= FMC2_CSQCAR1_ADDC4(page >> 8);
861
862 /*
863 * - Set chip enable number
864 * - Set ECC byte offset in the spare area
865 * - Calculate the number of address cycles to be issued
866 * - Set byte 5 of address cycle if needed
867 */
868 csqar2 = FMC2_CSQCAR2_NANDCEN(fmc2->cs_sel);
869 if (chip->options & NAND_BUSWIDTH_16)
870 csqar2 |= FMC2_CSQCAR2_SAO(ecc_offset >> 1);
871 else
872 csqar2 |= FMC2_CSQCAR2_SAO(ecc_offset);
873 if (chip->options & NAND_ROW_ADDR_3) {
874 csqcfgr1 |= FMC2_CSQCFGR1_ACYNBR(5);
875 csqar2 |= FMC2_CSQCAR2_ADDC5(page >> 16);
876 } else {
877 csqcfgr1 |= FMC2_CSQCFGR1_ACYNBR(4);
878 }
879
880 writel_relaxed(csqcfgr1, fmc2->io_base + FMC2_CSQCFGR1);
881 writel_relaxed(csqcfgr2, fmc2->io_base + FMC2_CSQCFGR2);
882 writel_relaxed(csqcfgr3, fmc2->io_base + FMC2_CSQCFGR3);
883 writel_relaxed(csqar1, fmc2->io_base + FMC2_CSQAR1);
884 writel_relaxed(csqar2, fmc2->io_base + FMC2_CSQAR2);
885 }
886
stm32_fmc2_dma_callback(void * arg)887 static void stm32_fmc2_dma_callback(void *arg)
888 {
889 complete((struct completion *)arg);
890 }
891
892 /* Read/write data from/to a page */
stm32_fmc2_xfer(struct nand_chip * chip,const u8 * buf,int raw,bool write_data)893 static int stm32_fmc2_xfer(struct nand_chip *chip, const u8 *buf,
894 int raw, bool write_data)
895 {
896 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
897 struct dma_async_tx_descriptor *desc_data, *desc_ecc;
898 struct scatterlist *sg;
899 struct dma_chan *dma_ch = fmc2->dma_rx_ch;
900 enum dma_data_direction dma_data_dir = DMA_FROM_DEVICE;
901 enum dma_transfer_direction dma_transfer_dir = DMA_DEV_TO_MEM;
902 u32 csqcr = readl_relaxed(fmc2->io_base + FMC2_CSQCR);
903 int eccsteps = chip->ecc.steps;
904 int eccsize = chip->ecc.size;
905 const u8 *p = buf;
906 int s, ret;
907
908 /* Configure DMA data */
909 if (write_data) {
910 dma_data_dir = DMA_TO_DEVICE;
911 dma_transfer_dir = DMA_MEM_TO_DEV;
912 dma_ch = fmc2->dma_tx_ch;
913 }
914
915 for_each_sg(fmc2->dma_data_sg.sgl, sg, eccsteps, s) {
916 sg_set_buf(sg, p, eccsize);
917 p += eccsize;
918 }
919
920 ret = dma_map_sg(fmc2->dev, fmc2->dma_data_sg.sgl,
921 eccsteps, dma_data_dir);
922 if (ret < 0)
923 return ret;
924
925 desc_data = dmaengine_prep_slave_sg(dma_ch, fmc2->dma_data_sg.sgl,
926 eccsteps, dma_transfer_dir,
927 DMA_PREP_INTERRUPT);
928 if (!desc_data) {
929 ret = -ENOMEM;
930 goto err_unmap_data;
931 }
932
933 reinit_completion(&fmc2->dma_data_complete);
934 reinit_completion(&fmc2->complete);
935 desc_data->callback = stm32_fmc2_dma_callback;
936 desc_data->callback_param = &fmc2->dma_data_complete;
937 ret = dma_submit_error(dmaengine_submit(desc_data));
938 if (ret)
939 goto err_unmap_data;
940
941 dma_async_issue_pending(dma_ch);
942
943 if (!write_data && !raw) {
944 /* Configure DMA ECC status */
945 p = fmc2->ecc_buf;
946 for_each_sg(fmc2->dma_ecc_sg.sgl, sg, eccsteps, s) {
947 sg_set_buf(sg, p, fmc2->dma_ecc_len);
948 p += fmc2->dma_ecc_len;
949 }
950
951 ret = dma_map_sg(fmc2->dev, fmc2->dma_ecc_sg.sgl,
952 eccsteps, dma_data_dir);
953 if (ret < 0)
954 goto err_unmap_data;
955
956 desc_ecc = dmaengine_prep_slave_sg(fmc2->dma_ecc_ch,
957 fmc2->dma_ecc_sg.sgl,
958 eccsteps, dma_transfer_dir,
959 DMA_PREP_INTERRUPT);
960 if (!desc_ecc) {
961 ret = -ENOMEM;
962 goto err_unmap_ecc;
963 }
964
965 reinit_completion(&fmc2->dma_ecc_complete);
966 desc_ecc->callback = stm32_fmc2_dma_callback;
967 desc_ecc->callback_param = &fmc2->dma_ecc_complete;
968 ret = dma_submit_error(dmaengine_submit(desc_ecc));
969 if (ret)
970 goto err_unmap_ecc;
971
972 dma_async_issue_pending(fmc2->dma_ecc_ch);
973 }
974
975 stm32_fmc2_clear_seq_irq(fmc2);
976 stm32_fmc2_enable_seq_irq(fmc2);
977
978 /* Start the transfer */
979 csqcr |= FMC2_CSQCR_CSQSTART;
980 writel_relaxed(csqcr, fmc2->io_base + FMC2_CSQCR);
981
982 /* Wait end of sequencer transfer */
983 if (!wait_for_completion_timeout(&fmc2->complete,
984 msecs_to_jiffies(FMC2_TIMEOUT_MS))) {
985 dev_err(fmc2->dev, "seq timeout\n");
986 stm32_fmc2_disable_seq_irq(fmc2);
987 dmaengine_terminate_all(dma_ch);
988 if (!write_data && !raw)
989 dmaengine_terminate_all(fmc2->dma_ecc_ch);
990 ret = -ETIMEDOUT;
991 goto err_unmap_ecc;
992 }
993
994 /* Wait DMA data transfer completion */
995 if (!wait_for_completion_timeout(&fmc2->dma_data_complete,
996 msecs_to_jiffies(FMC2_TIMEOUT_MS))) {
997 dev_err(fmc2->dev, "data DMA timeout\n");
998 dmaengine_terminate_all(dma_ch);
999 ret = -ETIMEDOUT;
1000 }
1001
1002 /* Wait DMA ECC transfer completion */
1003 if (!write_data && !raw) {
1004 if (!wait_for_completion_timeout(&fmc2->dma_ecc_complete,
1005 msecs_to_jiffies(FMC2_TIMEOUT_MS))) {
1006 dev_err(fmc2->dev, "ECC DMA timeout\n");
1007 dmaengine_terminate_all(fmc2->dma_ecc_ch);
1008 ret = -ETIMEDOUT;
1009 }
1010 }
1011
1012 err_unmap_ecc:
1013 if (!write_data && !raw)
1014 dma_unmap_sg(fmc2->dev, fmc2->dma_ecc_sg.sgl,
1015 eccsteps, dma_data_dir);
1016
1017 err_unmap_data:
1018 dma_unmap_sg(fmc2->dev, fmc2->dma_data_sg.sgl, eccsteps, dma_data_dir);
1019
1020 return ret;
1021 }
1022
stm32_fmc2_sequencer_write(struct nand_chip * chip,const u8 * buf,int oob_required,int page,int raw)1023 static int stm32_fmc2_sequencer_write(struct nand_chip *chip,
1024 const u8 *buf, int oob_required,
1025 int page, int raw)
1026 {
1027 struct mtd_info *mtd = nand_to_mtd(chip);
1028 int ret;
1029
1030 /* Configure the sequencer */
1031 stm32_fmc2_rw_page_init(chip, page, raw, true);
1032
1033 /* Write the page */
1034 ret = stm32_fmc2_xfer(chip, buf, raw, true);
1035 if (ret)
1036 return ret;
1037
1038 /* Write oob */
1039 if (oob_required) {
1040 ret = nand_change_write_column_op(chip, mtd->writesize,
1041 chip->oob_poi, mtd->oobsize,
1042 false);
1043 if (ret)
1044 return ret;
1045 }
1046
1047 return nand_prog_page_end_op(chip);
1048 }
1049
stm32_fmc2_sequencer_write_page(struct nand_chip * chip,const u8 * buf,int oob_required,int page)1050 static int stm32_fmc2_sequencer_write_page(struct nand_chip *chip,
1051 const u8 *buf,
1052 int oob_required,
1053 int page)
1054 {
1055 int ret;
1056
1057 /* Select the target */
1058 ret = stm32_fmc2_select_chip(chip, chip->cur_cs);
1059 if (ret)
1060 return ret;
1061
1062 return stm32_fmc2_sequencer_write(chip, buf, oob_required, page, false);
1063 }
1064
stm32_fmc2_sequencer_write_page_raw(struct nand_chip * chip,const u8 * buf,int oob_required,int page)1065 static int stm32_fmc2_sequencer_write_page_raw(struct nand_chip *chip,
1066 const u8 *buf,
1067 int oob_required,
1068 int page)
1069 {
1070 int ret;
1071
1072 /* Select the target */
1073 ret = stm32_fmc2_select_chip(chip, chip->cur_cs);
1074 if (ret)
1075 return ret;
1076
1077 return stm32_fmc2_sequencer_write(chip, buf, oob_required, page, true);
1078 }
1079
1080 /* Get a status indicating which sectors have errors */
stm32_fmc2_get_mapping_status(struct stm32_fmc2_nfc * fmc2)1081 static inline u16 stm32_fmc2_get_mapping_status(struct stm32_fmc2_nfc *fmc2)
1082 {
1083 u32 csqemsr = readl_relaxed(fmc2->io_base + FMC2_CSQEMSR);
1084
1085 return csqemsr & FMC2_CSQEMSR_SEM;
1086 }
1087
stm32_fmc2_sequencer_correct(struct nand_chip * chip,u8 * dat,u8 * read_ecc,u8 * calc_ecc)1088 static int stm32_fmc2_sequencer_correct(struct nand_chip *chip, u8 *dat,
1089 u8 *read_ecc, u8 *calc_ecc)
1090 {
1091 struct mtd_info *mtd = nand_to_mtd(chip);
1092 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1093 int eccbytes = chip->ecc.bytes;
1094 int eccsteps = chip->ecc.steps;
1095 int eccstrength = chip->ecc.strength;
1096 int i, s, eccsize = chip->ecc.size;
1097 u32 *ecc_sta = (u32 *)fmc2->ecc_buf;
1098 u16 sta_map = stm32_fmc2_get_mapping_status(fmc2);
1099 unsigned int max_bitflips = 0;
1100
1101 for (i = 0, s = 0; s < eccsteps; s++, i += eccbytes, dat += eccsize) {
1102 int stat = 0;
1103
1104 if (eccstrength == FMC2_ECC_HAM) {
1105 /* Ecc_sta = FMC2_HECCR */
1106 if (sta_map & BIT(s)) {
1107 stm32_fmc2_ham_set_ecc(*ecc_sta, &calc_ecc[i]);
1108 stat = stm32_fmc2_ham_correct(chip, dat,
1109 &read_ecc[i],
1110 &calc_ecc[i]);
1111 }
1112 ecc_sta++;
1113 } else {
1114 /*
1115 * Ecc_sta[0] = FMC2_BCHDSR0
1116 * Ecc_sta[1] = FMC2_BCHDSR1
1117 * Ecc_sta[2] = FMC2_BCHDSR2
1118 * Ecc_sta[3] = FMC2_BCHDSR3
1119 * Ecc_sta[4] = FMC2_BCHDSR4
1120 */
1121 if (sta_map & BIT(s))
1122 stat = stm32_fmc2_bch_decode(eccsize, dat,
1123 ecc_sta);
1124 ecc_sta += 5;
1125 }
1126
1127 if (stat == -EBADMSG)
1128 /* Check for empty pages with bitflips */
1129 stat = nand_check_erased_ecc_chunk(dat, eccsize,
1130 &read_ecc[i],
1131 eccbytes,
1132 NULL, 0,
1133 eccstrength);
1134
1135 if (stat < 0) {
1136 mtd->ecc_stats.failed++;
1137 } else {
1138 mtd->ecc_stats.corrected += stat;
1139 max_bitflips = max_t(unsigned int, max_bitflips, stat);
1140 }
1141 }
1142
1143 return max_bitflips;
1144 }
1145
stm32_fmc2_sequencer_read_page(struct nand_chip * chip,u8 * buf,int oob_required,int page)1146 static int stm32_fmc2_sequencer_read_page(struct nand_chip *chip, u8 *buf,
1147 int oob_required, int page)
1148 {
1149 struct mtd_info *mtd = nand_to_mtd(chip);
1150 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1151 u8 *ecc_calc = chip->ecc.calc_buf;
1152 u8 *ecc_code = chip->ecc.code_buf;
1153 u16 sta_map;
1154 int ret;
1155
1156 /* Select the target */
1157 ret = stm32_fmc2_select_chip(chip, chip->cur_cs);
1158 if (ret)
1159 return ret;
1160
1161 /* Configure the sequencer */
1162 stm32_fmc2_rw_page_init(chip, page, 0, false);
1163
1164 /* Read the page */
1165 ret = stm32_fmc2_xfer(chip, buf, 0, false);
1166 if (ret)
1167 return ret;
1168
1169 sta_map = stm32_fmc2_get_mapping_status(fmc2);
1170
1171 /* Check if errors happen */
1172 if (likely(!sta_map)) {
1173 if (oob_required)
1174 return nand_change_read_column_op(chip, mtd->writesize,
1175 chip->oob_poi,
1176 mtd->oobsize, false);
1177
1178 return 0;
1179 }
1180
1181 /* Read oob */
1182 ret = nand_change_read_column_op(chip, mtd->writesize,
1183 chip->oob_poi, mtd->oobsize, false);
1184 if (ret)
1185 return ret;
1186
1187 ret = mtd_ooblayout_get_eccbytes(mtd, ecc_code, chip->oob_poi, 0,
1188 chip->ecc.total);
1189 if (ret)
1190 return ret;
1191
1192 /* Correct data */
1193 return chip->ecc.correct(chip, buf, ecc_code, ecc_calc);
1194 }
1195
stm32_fmc2_sequencer_read_page_raw(struct nand_chip * chip,u8 * buf,int oob_required,int page)1196 static int stm32_fmc2_sequencer_read_page_raw(struct nand_chip *chip, u8 *buf,
1197 int oob_required, int page)
1198 {
1199 struct mtd_info *mtd = nand_to_mtd(chip);
1200 int ret;
1201
1202 /* Select the target */
1203 ret = stm32_fmc2_select_chip(chip, chip->cur_cs);
1204 if (ret)
1205 return ret;
1206
1207 /* Configure the sequencer */
1208 stm32_fmc2_rw_page_init(chip, page, 1, false);
1209
1210 /* Read the page */
1211 ret = stm32_fmc2_xfer(chip, buf, 1, false);
1212 if (ret)
1213 return ret;
1214
1215 /* Read oob */
1216 if (oob_required)
1217 return nand_change_read_column_op(chip, mtd->writesize,
1218 chip->oob_poi, mtd->oobsize,
1219 false);
1220
1221 return 0;
1222 }
1223
stm32_fmc2_irq(int irq,void * dev_id)1224 static irqreturn_t stm32_fmc2_irq(int irq, void *dev_id)
1225 {
1226 struct stm32_fmc2_nfc *fmc2 = (struct stm32_fmc2_nfc *)dev_id;
1227
1228 if (fmc2->irq_state == FMC2_IRQ_SEQ)
1229 /* Sequencer is used */
1230 stm32_fmc2_disable_seq_irq(fmc2);
1231 else if (fmc2->irq_state == FMC2_IRQ_BCH)
1232 /* BCH is used */
1233 stm32_fmc2_disable_bch_irq(fmc2);
1234
1235 complete(&fmc2->complete);
1236
1237 return IRQ_HANDLED;
1238 }
1239
stm32_fmc2_read_data(struct nand_chip * chip,void * buf,unsigned int len,bool force_8bit)1240 static void stm32_fmc2_read_data(struct nand_chip *chip, void *buf,
1241 unsigned int len, bool force_8bit)
1242 {
1243 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1244 void __iomem *io_addr_r = fmc2->data_base[fmc2->cs_sel];
1245
1246 if (force_8bit && chip->options & NAND_BUSWIDTH_16)
1247 /* Reconfigure bus width to 8-bit */
1248 stm32_fmc2_set_buswidth_16(fmc2, false);
1249
1250 if (!IS_ALIGNED((uintptr_t)buf, sizeof(u32))) {
1251 if (!IS_ALIGNED((uintptr_t)buf, sizeof(u16)) && len) {
1252 *(u8 *)buf = readb_relaxed(io_addr_r);
1253 buf += sizeof(u8);
1254 len -= sizeof(u8);
1255 }
1256
1257 if (!IS_ALIGNED((uintptr_t)buf, sizeof(u32)) &&
1258 len >= sizeof(u16)) {
1259 *(u16 *)buf = readw_relaxed(io_addr_r);
1260 buf += sizeof(u16);
1261 len -= sizeof(u16);
1262 }
1263 }
1264
1265 /* Buf is aligned */
1266 while (len >= sizeof(u32)) {
1267 *(u32 *)buf = readl_relaxed(io_addr_r);
1268 buf += sizeof(u32);
1269 len -= sizeof(u32);
1270 }
1271
1272 /* Read remaining bytes */
1273 if (len >= sizeof(u16)) {
1274 *(u16 *)buf = readw_relaxed(io_addr_r);
1275 buf += sizeof(u16);
1276 len -= sizeof(u16);
1277 }
1278
1279 if (len)
1280 *(u8 *)buf = readb_relaxed(io_addr_r);
1281
1282 if (force_8bit && chip->options & NAND_BUSWIDTH_16)
1283 /* Reconfigure bus width to 16-bit */
1284 stm32_fmc2_set_buswidth_16(fmc2, true);
1285 }
1286
stm32_fmc2_write_data(struct nand_chip * chip,const void * buf,unsigned int len,bool force_8bit)1287 static void stm32_fmc2_write_data(struct nand_chip *chip, const void *buf,
1288 unsigned int len, bool force_8bit)
1289 {
1290 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1291 void __iomem *io_addr_w = fmc2->data_base[fmc2->cs_sel];
1292
1293 if (force_8bit && chip->options & NAND_BUSWIDTH_16)
1294 /* Reconfigure bus width to 8-bit */
1295 stm32_fmc2_set_buswidth_16(fmc2, false);
1296
1297 if (!IS_ALIGNED((uintptr_t)buf, sizeof(u32))) {
1298 if (!IS_ALIGNED((uintptr_t)buf, sizeof(u16)) && len) {
1299 writeb_relaxed(*(u8 *)buf, io_addr_w);
1300 buf += sizeof(u8);
1301 len -= sizeof(u8);
1302 }
1303
1304 if (!IS_ALIGNED((uintptr_t)buf, sizeof(u32)) &&
1305 len >= sizeof(u16)) {
1306 writew_relaxed(*(u16 *)buf, io_addr_w);
1307 buf += sizeof(u16);
1308 len -= sizeof(u16);
1309 }
1310 }
1311
1312 /* Buf is aligned */
1313 while (len >= sizeof(u32)) {
1314 writel_relaxed(*(u32 *)buf, io_addr_w);
1315 buf += sizeof(u32);
1316 len -= sizeof(u32);
1317 }
1318
1319 /* Write remaining bytes */
1320 if (len >= sizeof(u16)) {
1321 writew_relaxed(*(u16 *)buf, io_addr_w);
1322 buf += sizeof(u16);
1323 len -= sizeof(u16);
1324 }
1325
1326 if (len)
1327 writeb_relaxed(*(u8 *)buf, io_addr_w);
1328
1329 if (force_8bit && chip->options & NAND_BUSWIDTH_16)
1330 /* Reconfigure bus width to 16-bit */
1331 stm32_fmc2_set_buswidth_16(fmc2, true);
1332 }
1333
stm32_fmc2_waitrdy(struct nand_chip * chip,unsigned long timeout_ms)1334 static int stm32_fmc2_waitrdy(struct nand_chip *chip, unsigned long timeout_ms)
1335 {
1336 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1337 const struct nand_sdr_timings *timings;
1338 u32 isr, sr;
1339
1340 /* Check if there is no pending requests to the NAND flash */
1341 if (readl_relaxed_poll_timeout_atomic(fmc2->io_base + FMC2_SR, sr,
1342 sr & FMC2_SR_NWRF, 1,
1343 FMC2_TIMEOUT_US))
1344 dev_warn(fmc2->dev, "Waitrdy timeout\n");
1345
1346 /* Wait tWB before R/B# signal is low */
1347 timings = nand_get_sdr_timings(&chip->data_interface);
1348 ndelay(PSEC_TO_NSEC(timings->tWB_max));
1349
1350 /* R/B# signal is low, clear high level flag */
1351 writel_relaxed(FMC2_ICR_CIHLF, fmc2->io_base + FMC2_ICR);
1352
1353 /* Wait R/B# signal is high */
1354 return readl_relaxed_poll_timeout_atomic(fmc2->io_base + FMC2_ISR,
1355 isr, isr & FMC2_ISR_IHLF,
1356 5, 1000 * timeout_ms);
1357 }
1358
stm32_fmc2_exec_op(struct nand_chip * chip,const struct nand_operation * op,bool check_only)1359 static int stm32_fmc2_exec_op(struct nand_chip *chip,
1360 const struct nand_operation *op,
1361 bool check_only)
1362 {
1363 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1364 const struct nand_op_instr *instr = NULL;
1365 unsigned int op_id, i;
1366 int ret;
1367
1368 ret = stm32_fmc2_select_chip(chip, op->cs);
1369 if (ret)
1370 return ret;
1371
1372 if (check_only)
1373 return ret;
1374
1375 for (op_id = 0; op_id < op->ninstrs; op_id++) {
1376 instr = &op->instrs[op_id];
1377
1378 switch (instr->type) {
1379 case NAND_OP_CMD_INSTR:
1380 writeb_relaxed(instr->ctx.cmd.opcode,
1381 fmc2->cmd_base[fmc2->cs_sel]);
1382 break;
1383
1384 case NAND_OP_ADDR_INSTR:
1385 for (i = 0; i < instr->ctx.addr.naddrs; i++)
1386 writeb_relaxed(instr->ctx.addr.addrs[i],
1387 fmc2->addr_base[fmc2->cs_sel]);
1388 break;
1389
1390 case NAND_OP_DATA_IN_INSTR:
1391 stm32_fmc2_read_data(chip, instr->ctx.data.buf.in,
1392 instr->ctx.data.len,
1393 instr->ctx.data.force_8bit);
1394 break;
1395
1396 case NAND_OP_DATA_OUT_INSTR:
1397 stm32_fmc2_write_data(chip, instr->ctx.data.buf.out,
1398 instr->ctx.data.len,
1399 instr->ctx.data.force_8bit);
1400 break;
1401
1402 case NAND_OP_WAITRDY_INSTR:
1403 ret = stm32_fmc2_waitrdy(chip,
1404 instr->ctx.waitrdy.timeout_ms);
1405 break;
1406 }
1407 }
1408
1409 return ret;
1410 }
1411
1412 /* Controller initialization */
stm32_fmc2_init(struct stm32_fmc2_nfc * fmc2)1413 static void stm32_fmc2_init(struct stm32_fmc2_nfc *fmc2)
1414 {
1415 u32 pcr = readl_relaxed(fmc2->io_base + FMC2_PCR);
1416 u32 bcr1 = readl_relaxed(fmc2->io_base + FMC2_BCR1);
1417
1418 /* Set CS used to undefined */
1419 fmc2->cs_sel = -1;
1420
1421 /* Enable wait feature and nand flash memory bank */
1422 pcr |= FMC2_PCR_PWAITEN;
1423 pcr |= FMC2_PCR_PBKEN;
1424
1425 /* Set buswidth to 8 bits mode for identification */
1426 pcr &= ~FMC2_PCR_PWID_MASK;
1427
1428 /* ECC logic is disabled */
1429 pcr &= ~FMC2_PCR_ECCEN;
1430
1431 /* Default mode */
1432 pcr &= ~FMC2_PCR_ECCALG;
1433 pcr &= ~FMC2_PCR_BCHECC;
1434 pcr &= ~FMC2_PCR_WEN;
1435
1436 /* Set default ECC sector size */
1437 pcr &= ~FMC2_PCR_ECCSS_MASK;
1438 pcr |= FMC2_PCR_ECCSS(FMC2_PCR_ECCSS_2048);
1439
1440 /* Set default tclr/tar timings */
1441 pcr &= ~FMC2_PCR_TCLR_MASK;
1442 pcr |= FMC2_PCR_TCLR(FMC2_PCR_TCLR_DEFAULT);
1443 pcr &= ~FMC2_PCR_TAR_MASK;
1444 pcr |= FMC2_PCR_TAR(FMC2_PCR_TAR_DEFAULT);
1445
1446 /* Enable FMC2 controller */
1447 bcr1 |= FMC2_BCR1_FMC2EN;
1448
1449 writel_relaxed(bcr1, fmc2->io_base + FMC2_BCR1);
1450 writel_relaxed(pcr, fmc2->io_base + FMC2_PCR);
1451 writel_relaxed(FMC2_PMEM_DEFAULT, fmc2->io_base + FMC2_PMEM);
1452 writel_relaxed(FMC2_PATT_DEFAULT, fmc2->io_base + FMC2_PATT);
1453 }
1454
1455 /* Controller timings */
stm32_fmc2_calc_timings(struct nand_chip * chip,const struct nand_sdr_timings * sdrt)1456 static void stm32_fmc2_calc_timings(struct nand_chip *chip,
1457 const struct nand_sdr_timings *sdrt)
1458 {
1459 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1460 struct stm32_fmc2_nand *nand = to_fmc2_nand(chip);
1461 struct stm32_fmc2_timings *tims = &nand->timings;
1462 unsigned long hclk = clk_get_rate(fmc2->clk);
1463 unsigned long hclkp = NSEC_PER_SEC / (hclk / 1000);
1464 unsigned long timing, tar, tclr, thiz, twait;
1465 unsigned long tset_mem, tset_att, thold_mem, thold_att;
1466
1467 tar = max_t(unsigned long, hclkp, sdrt->tAR_min);
1468 timing = DIV_ROUND_UP(tar, hclkp) - 1;
1469 tims->tar = min_t(unsigned long, timing, FMC2_PCR_TIMING_MASK);
1470
1471 tclr = max_t(unsigned long, hclkp, sdrt->tCLR_min);
1472 timing = DIV_ROUND_UP(tclr, hclkp) - 1;
1473 tims->tclr = min_t(unsigned long, timing, FMC2_PCR_TIMING_MASK);
1474
1475 tims->thiz = FMC2_THIZ;
1476 thiz = (tims->thiz + 1) * hclkp;
1477
1478 /*
1479 * tWAIT > tRP
1480 * tWAIT > tWP
1481 * tWAIT > tREA + tIO
1482 */
1483 twait = max_t(unsigned long, hclkp, sdrt->tRP_min);
1484 twait = max_t(unsigned long, twait, sdrt->tWP_min);
1485 twait = max_t(unsigned long, twait, sdrt->tREA_max + FMC2_TIO);
1486 timing = DIV_ROUND_UP(twait, hclkp);
1487 tims->twait = clamp_val(timing, 1, FMC2_PMEM_PATT_TIMING_MASK);
1488
1489 /*
1490 * tSETUP_MEM > tCS - tWAIT
1491 * tSETUP_MEM > tALS - tWAIT
1492 * tSETUP_MEM > tDS - (tWAIT - tHIZ)
1493 */
1494 tset_mem = hclkp;
1495 if (sdrt->tCS_min > twait && (tset_mem < sdrt->tCS_min - twait))
1496 tset_mem = sdrt->tCS_min - twait;
1497 if (sdrt->tALS_min > twait && (tset_mem < sdrt->tALS_min - twait))
1498 tset_mem = sdrt->tALS_min - twait;
1499 if (twait > thiz && (sdrt->tDS_min > twait - thiz) &&
1500 (tset_mem < sdrt->tDS_min - (twait - thiz)))
1501 tset_mem = sdrt->tDS_min - (twait - thiz);
1502 timing = DIV_ROUND_UP(tset_mem, hclkp);
1503 tims->tset_mem = clamp_val(timing, 1, FMC2_PMEM_PATT_TIMING_MASK);
1504
1505 /*
1506 * tHOLD_MEM > tCH
1507 * tHOLD_MEM > tREH - tSETUP_MEM
1508 * tHOLD_MEM > max(tRC, tWC) - (tSETUP_MEM + tWAIT)
1509 */
1510 thold_mem = max_t(unsigned long, hclkp, sdrt->tCH_min);
1511 if (sdrt->tREH_min > tset_mem &&
1512 (thold_mem < sdrt->tREH_min - tset_mem))
1513 thold_mem = sdrt->tREH_min - tset_mem;
1514 if ((sdrt->tRC_min > tset_mem + twait) &&
1515 (thold_mem < sdrt->tRC_min - (tset_mem + twait)))
1516 thold_mem = sdrt->tRC_min - (tset_mem + twait);
1517 if ((sdrt->tWC_min > tset_mem + twait) &&
1518 (thold_mem < sdrt->tWC_min - (tset_mem + twait)))
1519 thold_mem = sdrt->tWC_min - (tset_mem + twait);
1520 timing = DIV_ROUND_UP(thold_mem, hclkp);
1521 tims->thold_mem = clamp_val(timing, 1, FMC2_PMEM_PATT_TIMING_MASK);
1522
1523 /*
1524 * tSETUP_ATT > tCS - tWAIT
1525 * tSETUP_ATT > tCLS - tWAIT
1526 * tSETUP_ATT > tALS - tWAIT
1527 * tSETUP_ATT > tRHW - tHOLD_MEM
1528 * tSETUP_ATT > tDS - (tWAIT - tHIZ)
1529 */
1530 tset_att = hclkp;
1531 if (sdrt->tCS_min > twait && (tset_att < sdrt->tCS_min - twait))
1532 tset_att = sdrt->tCS_min - twait;
1533 if (sdrt->tCLS_min > twait && (tset_att < sdrt->tCLS_min - twait))
1534 tset_att = sdrt->tCLS_min - twait;
1535 if (sdrt->tALS_min > twait && (tset_att < sdrt->tALS_min - twait))
1536 tset_att = sdrt->tALS_min - twait;
1537 if (sdrt->tRHW_min > thold_mem &&
1538 (tset_att < sdrt->tRHW_min - thold_mem))
1539 tset_att = sdrt->tRHW_min - thold_mem;
1540 if (twait > thiz && (sdrt->tDS_min > twait - thiz) &&
1541 (tset_att < sdrt->tDS_min - (twait - thiz)))
1542 tset_att = sdrt->tDS_min - (twait - thiz);
1543 timing = DIV_ROUND_UP(tset_att, hclkp);
1544 tims->tset_att = clamp_val(timing, 1, FMC2_PMEM_PATT_TIMING_MASK);
1545
1546 /*
1547 * tHOLD_ATT > tALH
1548 * tHOLD_ATT > tCH
1549 * tHOLD_ATT > tCLH
1550 * tHOLD_ATT > tCOH
1551 * tHOLD_ATT > tDH
1552 * tHOLD_ATT > tWB + tIO + tSYNC - tSETUP_MEM
1553 * tHOLD_ATT > tADL - tSETUP_MEM
1554 * tHOLD_ATT > tWH - tSETUP_MEM
1555 * tHOLD_ATT > tWHR - tSETUP_MEM
1556 * tHOLD_ATT > tRC - (tSETUP_ATT + tWAIT)
1557 * tHOLD_ATT > tWC - (tSETUP_ATT + tWAIT)
1558 */
1559 thold_att = max_t(unsigned long, hclkp, sdrt->tALH_min);
1560 thold_att = max_t(unsigned long, thold_att, sdrt->tCH_min);
1561 thold_att = max_t(unsigned long, thold_att, sdrt->tCLH_min);
1562 thold_att = max_t(unsigned long, thold_att, sdrt->tCOH_min);
1563 thold_att = max_t(unsigned long, thold_att, sdrt->tDH_min);
1564 if ((sdrt->tWB_max + FMC2_TIO + FMC2_TSYNC > tset_mem) &&
1565 (thold_att < sdrt->tWB_max + FMC2_TIO + FMC2_TSYNC - tset_mem))
1566 thold_att = sdrt->tWB_max + FMC2_TIO + FMC2_TSYNC - tset_mem;
1567 if (sdrt->tADL_min > tset_mem &&
1568 (thold_att < sdrt->tADL_min - tset_mem))
1569 thold_att = sdrt->tADL_min - tset_mem;
1570 if (sdrt->tWH_min > tset_mem &&
1571 (thold_att < sdrt->tWH_min - tset_mem))
1572 thold_att = sdrt->tWH_min - tset_mem;
1573 if (sdrt->tWHR_min > tset_mem &&
1574 (thold_att < sdrt->tWHR_min - tset_mem))
1575 thold_att = sdrt->tWHR_min - tset_mem;
1576 if ((sdrt->tRC_min > tset_att + twait) &&
1577 (thold_att < sdrt->tRC_min - (tset_att + twait)))
1578 thold_att = sdrt->tRC_min - (tset_att + twait);
1579 if ((sdrt->tWC_min > tset_att + twait) &&
1580 (thold_att < sdrt->tWC_min - (tset_att + twait)))
1581 thold_att = sdrt->tWC_min - (tset_att + twait);
1582 timing = DIV_ROUND_UP(thold_att, hclkp);
1583 tims->thold_att = clamp_val(timing, 1, FMC2_PMEM_PATT_TIMING_MASK);
1584 }
1585
stm32_fmc2_setup_interface(struct nand_chip * chip,int chipnr,const struct nand_data_interface * conf)1586 static int stm32_fmc2_setup_interface(struct nand_chip *chip, int chipnr,
1587 const struct nand_data_interface *conf)
1588 {
1589 const struct nand_sdr_timings *sdrt;
1590
1591 sdrt = nand_get_sdr_timings(conf);
1592 if (IS_ERR(sdrt))
1593 return PTR_ERR(sdrt);
1594
1595 if (chipnr == NAND_DATA_IFACE_CHECK_ONLY)
1596 return 0;
1597
1598 stm32_fmc2_calc_timings(chip, sdrt);
1599
1600 /* Apply timings */
1601 stm32_fmc2_timings_init(chip);
1602
1603 return 0;
1604 }
1605
1606 /* DMA configuration */
stm32_fmc2_dma_setup(struct stm32_fmc2_nfc * fmc2)1607 static int stm32_fmc2_dma_setup(struct stm32_fmc2_nfc *fmc2)
1608 {
1609 int ret;
1610
1611 fmc2->dma_tx_ch = dma_request_slave_channel(fmc2->dev, "tx");
1612 fmc2->dma_rx_ch = dma_request_slave_channel(fmc2->dev, "rx");
1613 fmc2->dma_ecc_ch = dma_request_slave_channel(fmc2->dev, "ecc");
1614
1615 if (!fmc2->dma_tx_ch || !fmc2->dma_rx_ch || !fmc2->dma_ecc_ch) {
1616 dev_warn(fmc2->dev, "DMAs not defined in the device tree, polling mode is used\n");
1617 return 0;
1618 }
1619
1620 ret = sg_alloc_table(&fmc2->dma_ecc_sg, FMC2_MAX_SG, GFP_KERNEL);
1621 if (ret)
1622 return ret;
1623
1624 /* Allocate a buffer to store ECC status registers */
1625 fmc2->ecc_buf = devm_kzalloc(fmc2->dev, FMC2_MAX_ECC_BUF_LEN,
1626 GFP_KERNEL);
1627 if (!fmc2->ecc_buf)
1628 return -ENOMEM;
1629
1630 ret = sg_alloc_table(&fmc2->dma_data_sg, FMC2_MAX_SG, GFP_KERNEL);
1631 if (ret)
1632 return ret;
1633
1634 init_completion(&fmc2->dma_data_complete);
1635 init_completion(&fmc2->dma_ecc_complete);
1636
1637 return 0;
1638 }
1639
1640 /* NAND callbacks setup */
stm32_fmc2_nand_callbacks_setup(struct nand_chip * chip)1641 static void stm32_fmc2_nand_callbacks_setup(struct nand_chip *chip)
1642 {
1643 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1644
1645 /*
1646 * Specific callbacks to read/write a page depending on
1647 * the mode (polling/sequencer) and the algo used (Hamming, BCH).
1648 */
1649 if (fmc2->dma_tx_ch && fmc2->dma_rx_ch && fmc2->dma_ecc_ch) {
1650 /* DMA => use sequencer mode callbacks */
1651 chip->ecc.correct = stm32_fmc2_sequencer_correct;
1652 chip->ecc.write_page = stm32_fmc2_sequencer_write_page;
1653 chip->ecc.read_page = stm32_fmc2_sequencer_read_page;
1654 chip->ecc.write_page_raw = stm32_fmc2_sequencer_write_page_raw;
1655 chip->ecc.read_page_raw = stm32_fmc2_sequencer_read_page_raw;
1656 } else {
1657 /* No DMA => use polling mode callbacks */
1658 chip->ecc.hwctl = stm32_fmc2_hwctl;
1659 if (chip->ecc.strength == FMC2_ECC_HAM) {
1660 /* Hamming is used */
1661 chip->ecc.calculate = stm32_fmc2_ham_calculate;
1662 chip->ecc.correct = stm32_fmc2_ham_correct;
1663 chip->ecc.options |= NAND_ECC_GENERIC_ERASED_CHECK;
1664 } else {
1665 /* BCH is used */
1666 chip->ecc.calculate = stm32_fmc2_bch_calculate;
1667 chip->ecc.correct = stm32_fmc2_bch_correct;
1668 chip->ecc.read_page = stm32_fmc2_read_page;
1669 }
1670 }
1671
1672 /* Specific configurations depending on the algo used */
1673 if (chip->ecc.strength == FMC2_ECC_HAM)
1674 chip->ecc.bytes = chip->options & NAND_BUSWIDTH_16 ? 4 : 3;
1675 else if (chip->ecc.strength == FMC2_ECC_BCH8)
1676 chip->ecc.bytes = chip->options & NAND_BUSWIDTH_16 ? 14 : 13;
1677 else
1678 chip->ecc.bytes = chip->options & NAND_BUSWIDTH_16 ? 8 : 7;
1679 }
1680
1681 /* FMC2 layout */
stm32_fmc2_nand_ooblayout_ecc(struct mtd_info * mtd,int section,struct mtd_oob_region * oobregion)1682 static int stm32_fmc2_nand_ooblayout_ecc(struct mtd_info *mtd, int section,
1683 struct mtd_oob_region *oobregion)
1684 {
1685 struct nand_chip *chip = mtd_to_nand(mtd);
1686 struct nand_ecc_ctrl *ecc = &chip->ecc;
1687
1688 if (section)
1689 return -ERANGE;
1690
1691 oobregion->length = ecc->total;
1692 oobregion->offset = FMC2_BBM_LEN;
1693
1694 return 0;
1695 }
1696
stm32_fmc2_nand_ooblayout_free(struct mtd_info * mtd,int section,struct mtd_oob_region * oobregion)1697 static int stm32_fmc2_nand_ooblayout_free(struct mtd_info *mtd, int section,
1698 struct mtd_oob_region *oobregion)
1699 {
1700 struct nand_chip *chip = mtd_to_nand(mtd);
1701 struct nand_ecc_ctrl *ecc = &chip->ecc;
1702
1703 if (section)
1704 return -ERANGE;
1705
1706 oobregion->length = mtd->oobsize - ecc->total - FMC2_BBM_LEN;
1707 oobregion->offset = ecc->total + FMC2_BBM_LEN;
1708
1709 return 0;
1710 }
1711
1712 static const struct mtd_ooblayout_ops stm32_fmc2_nand_ooblayout_ops = {
1713 .ecc = stm32_fmc2_nand_ooblayout_ecc,
1714 .free = stm32_fmc2_nand_ooblayout_free,
1715 };
1716
1717 /* FMC2 caps */
stm32_fmc2_calc_ecc_bytes(int step_size,int strength)1718 static int stm32_fmc2_calc_ecc_bytes(int step_size, int strength)
1719 {
1720 /* Hamming */
1721 if (strength == FMC2_ECC_HAM)
1722 return 4;
1723
1724 /* BCH8 */
1725 if (strength == FMC2_ECC_BCH8)
1726 return 14;
1727
1728 /* BCH4 */
1729 return 8;
1730 }
1731
1732 NAND_ECC_CAPS_SINGLE(stm32_fmc2_ecc_caps, stm32_fmc2_calc_ecc_bytes,
1733 FMC2_ECC_STEP_SIZE,
1734 FMC2_ECC_HAM, FMC2_ECC_BCH4, FMC2_ECC_BCH8);
1735
1736 /* FMC2 controller ops */
stm32_fmc2_attach_chip(struct nand_chip * chip)1737 static int stm32_fmc2_attach_chip(struct nand_chip *chip)
1738 {
1739 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1740 struct mtd_info *mtd = nand_to_mtd(chip);
1741 int ret;
1742
1743 /*
1744 * Only NAND_ECC_HW mode is actually supported
1745 * Hamming => ecc.strength = 1
1746 * BCH4 => ecc.strength = 4
1747 * BCH8 => ecc.strength = 8
1748 * ECC sector size = 512
1749 */
1750 if (chip->ecc.mode != NAND_ECC_HW) {
1751 dev_err(fmc2->dev, "nand_ecc_mode is not well defined in the DT\n");
1752 return -EINVAL;
1753 }
1754
1755 ret = nand_ecc_choose_conf(chip, &stm32_fmc2_ecc_caps,
1756 mtd->oobsize - FMC2_BBM_LEN);
1757 if (ret) {
1758 dev_err(fmc2->dev, "no valid ECC settings set\n");
1759 return ret;
1760 }
1761
1762 if (mtd->writesize / chip->ecc.size > FMC2_MAX_SG) {
1763 dev_err(fmc2->dev, "nand page size is not supported\n");
1764 return -EINVAL;
1765 }
1766
1767 if (chip->bbt_options & NAND_BBT_USE_FLASH)
1768 chip->bbt_options |= NAND_BBT_NO_OOB;
1769
1770 /* NAND callbacks setup */
1771 stm32_fmc2_nand_callbacks_setup(chip);
1772
1773 /* Define ECC layout */
1774 mtd_set_ooblayout(mtd, &stm32_fmc2_nand_ooblayout_ops);
1775
1776 /* Configure bus width to 16-bit */
1777 if (chip->options & NAND_BUSWIDTH_16)
1778 stm32_fmc2_set_buswidth_16(fmc2, true);
1779
1780 return 0;
1781 }
1782
1783 static const struct nand_controller_ops stm32_fmc2_nand_controller_ops = {
1784 .attach_chip = stm32_fmc2_attach_chip,
1785 .exec_op = stm32_fmc2_exec_op,
1786 .setup_data_interface = stm32_fmc2_setup_interface,
1787 };
1788
1789 /* FMC2 probe */
stm32_fmc2_parse_child(struct stm32_fmc2_nfc * fmc2,struct device_node * dn)1790 static int stm32_fmc2_parse_child(struct stm32_fmc2_nfc *fmc2,
1791 struct device_node *dn)
1792 {
1793 struct stm32_fmc2_nand *nand = &fmc2->nand;
1794 u32 cs;
1795 int ret, i;
1796
1797 if (!of_get_property(dn, "reg", &nand->ncs))
1798 return -EINVAL;
1799
1800 nand->ncs /= sizeof(u32);
1801 if (!nand->ncs) {
1802 dev_err(fmc2->dev, "invalid reg property size\n");
1803 return -EINVAL;
1804 }
1805
1806 for (i = 0; i < nand->ncs; i++) {
1807 ret = of_property_read_u32_index(dn, "reg", i, &cs);
1808 if (ret) {
1809 dev_err(fmc2->dev, "could not retrieve reg property: %d\n",
1810 ret);
1811 return ret;
1812 }
1813
1814 if (cs > FMC2_MAX_CE) {
1815 dev_err(fmc2->dev, "invalid reg value: %d\n", cs);
1816 return -EINVAL;
1817 }
1818
1819 if (fmc2->cs_assigned & BIT(cs)) {
1820 dev_err(fmc2->dev, "cs already assigned: %d\n", cs);
1821 return -EINVAL;
1822 }
1823
1824 fmc2->cs_assigned |= BIT(cs);
1825 nand->cs_used[i] = cs;
1826 }
1827
1828 nand_set_flash_node(&nand->chip, dn);
1829
1830 return 0;
1831 }
1832
stm32_fmc2_parse_dt(struct stm32_fmc2_nfc * fmc2)1833 static int stm32_fmc2_parse_dt(struct stm32_fmc2_nfc *fmc2)
1834 {
1835 struct device_node *dn = fmc2->dev->of_node;
1836 struct device_node *child;
1837 int nchips = of_get_child_count(dn);
1838 int ret = 0;
1839
1840 if (!nchips) {
1841 dev_err(fmc2->dev, "NAND chip not defined\n");
1842 return -EINVAL;
1843 }
1844
1845 if (nchips > 1) {
1846 dev_err(fmc2->dev, "too many NAND chips defined\n");
1847 return -EINVAL;
1848 }
1849
1850 for_each_child_of_node(dn, child) {
1851 ret = stm32_fmc2_parse_child(fmc2, child);
1852 if (ret < 0) {
1853 of_node_put(child);
1854 return ret;
1855 }
1856 }
1857
1858 return ret;
1859 }
1860
stm32_fmc2_probe(struct platform_device * pdev)1861 static int stm32_fmc2_probe(struct platform_device *pdev)
1862 {
1863 struct device *dev = &pdev->dev;
1864 struct reset_control *rstc;
1865 struct stm32_fmc2_nfc *fmc2;
1866 struct stm32_fmc2_nand *nand;
1867 struct resource *res;
1868 struct mtd_info *mtd;
1869 struct nand_chip *chip;
1870 int chip_cs, mem_region, ret, irq;
1871
1872 fmc2 = devm_kzalloc(dev, sizeof(*fmc2), GFP_KERNEL);
1873 if (!fmc2)
1874 return -ENOMEM;
1875
1876 fmc2->dev = dev;
1877 nand_controller_init(&fmc2->base);
1878 fmc2->base.ops = &stm32_fmc2_nand_controller_ops;
1879
1880 ret = stm32_fmc2_parse_dt(fmc2);
1881 if (ret)
1882 return ret;
1883
1884 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1885 fmc2->io_base = devm_ioremap_resource(dev, res);
1886 if (IS_ERR(fmc2->io_base))
1887 return PTR_ERR(fmc2->io_base);
1888
1889 fmc2->io_phys_addr = res->start;
1890
1891 for (chip_cs = 0, mem_region = 1; chip_cs < FMC2_MAX_CE;
1892 chip_cs++, mem_region += 3) {
1893 if (!(fmc2->cs_assigned & BIT(chip_cs)))
1894 continue;
1895
1896 res = platform_get_resource(pdev, IORESOURCE_MEM, mem_region);
1897 fmc2->data_base[chip_cs] = devm_ioremap_resource(dev, res);
1898 if (IS_ERR(fmc2->data_base[chip_cs]))
1899 return PTR_ERR(fmc2->data_base[chip_cs]);
1900
1901 fmc2->data_phys_addr[chip_cs] = res->start;
1902
1903 res = platform_get_resource(pdev, IORESOURCE_MEM,
1904 mem_region + 1);
1905 fmc2->cmd_base[chip_cs] = devm_ioremap_resource(dev, res);
1906 if (IS_ERR(fmc2->cmd_base[chip_cs]))
1907 return PTR_ERR(fmc2->cmd_base[chip_cs]);
1908
1909 res = platform_get_resource(pdev, IORESOURCE_MEM,
1910 mem_region + 2);
1911 fmc2->addr_base[chip_cs] = devm_ioremap_resource(dev, res);
1912 if (IS_ERR(fmc2->addr_base[chip_cs]))
1913 return PTR_ERR(fmc2->addr_base[chip_cs]);
1914 }
1915
1916 irq = platform_get_irq(pdev, 0);
1917 if (irq < 0) {
1918 if (irq != -EPROBE_DEFER)
1919 dev_err(dev, "IRQ error missing or invalid\n");
1920 return irq;
1921 }
1922
1923 ret = devm_request_irq(dev, irq, stm32_fmc2_irq, 0,
1924 dev_name(dev), fmc2);
1925 if (ret) {
1926 dev_err(dev, "failed to request irq\n");
1927 return ret;
1928 }
1929
1930 init_completion(&fmc2->complete);
1931
1932 fmc2->clk = devm_clk_get(dev, NULL);
1933 if (IS_ERR(fmc2->clk))
1934 return PTR_ERR(fmc2->clk);
1935
1936 ret = clk_prepare_enable(fmc2->clk);
1937 if (ret) {
1938 dev_err(dev, "can not enable the clock\n");
1939 return ret;
1940 }
1941
1942 rstc = devm_reset_control_get(dev, NULL);
1943 if (!IS_ERR(rstc)) {
1944 reset_control_assert(rstc);
1945 reset_control_deassert(rstc);
1946 }
1947
1948 /* DMA setup */
1949 ret = stm32_fmc2_dma_setup(fmc2);
1950 if (ret)
1951 return ret;
1952
1953 /* FMC2 init routine */
1954 stm32_fmc2_init(fmc2);
1955
1956 nand = &fmc2->nand;
1957 chip = &nand->chip;
1958 mtd = nand_to_mtd(chip);
1959 mtd->dev.parent = dev;
1960
1961 chip->controller = &fmc2->base;
1962 chip->options |= NAND_BUSWIDTH_AUTO | NAND_NO_SUBPAGE_WRITE |
1963 NAND_USE_BOUNCE_BUFFER;
1964
1965 /* Default ECC settings */
1966 chip->ecc.mode = NAND_ECC_HW;
1967 chip->ecc.size = FMC2_ECC_STEP_SIZE;
1968 chip->ecc.strength = FMC2_ECC_BCH8;
1969
1970 /* Scan to find existence of the device */
1971 ret = nand_scan(chip, nand->ncs);
1972 if (ret)
1973 goto err_scan;
1974
1975 ret = mtd_device_register(mtd, NULL, 0);
1976 if (ret)
1977 goto err_device_register;
1978
1979 platform_set_drvdata(pdev, fmc2);
1980
1981 return 0;
1982
1983 err_device_register:
1984 nand_cleanup(chip);
1985
1986 err_scan:
1987 if (fmc2->dma_ecc_ch)
1988 dma_release_channel(fmc2->dma_ecc_ch);
1989 if (fmc2->dma_tx_ch)
1990 dma_release_channel(fmc2->dma_tx_ch);
1991 if (fmc2->dma_rx_ch)
1992 dma_release_channel(fmc2->dma_rx_ch);
1993
1994 sg_free_table(&fmc2->dma_data_sg);
1995 sg_free_table(&fmc2->dma_ecc_sg);
1996
1997 clk_disable_unprepare(fmc2->clk);
1998
1999 return ret;
2000 }
2001
stm32_fmc2_remove(struct platform_device * pdev)2002 static int stm32_fmc2_remove(struct platform_device *pdev)
2003 {
2004 struct stm32_fmc2_nfc *fmc2 = platform_get_drvdata(pdev);
2005 struct stm32_fmc2_nand *nand = &fmc2->nand;
2006
2007 nand_release(&nand->chip);
2008
2009 if (fmc2->dma_ecc_ch)
2010 dma_release_channel(fmc2->dma_ecc_ch);
2011 if (fmc2->dma_tx_ch)
2012 dma_release_channel(fmc2->dma_tx_ch);
2013 if (fmc2->dma_rx_ch)
2014 dma_release_channel(fmc2->dma_rx_ch);
2015
2016 sg_free_table(&fmc2->dma_data_sg);
2017 sg_free_table(&fmc2->dma_ecc_sg);
2018
2019 clk_disable_unprepare(fmc2->clk);
2020
2021 return 0;
2022 }
2023
stm32_fmc2_suspend(struct device * dev)2024 static int __maybe_unused stm32_fmc2_suspend(struct device *dev)
2025 {
2026 struct stm32_fmc2_nfc *fmc2 = dev_get_drvdata(dev);
2027
2028 clk_disable_unprepare(fmc2->clk);
2029
2030 pinctrl_pm_select_sleep_state(dev);
2031
2032 return 0;
2033 }
2034
stm32_fmc2_resume(struct device * dev)2035 static int __maybe_unused stm32_fmc2_resume(struct device *dev)
2036 {
2037 struct stm32_fmc2_nfc *fmc2 = dev_get_drvdata(dev);
2038 struct stm32_fmc2_nand *nand = &fmc2->nand;
2039 int chip_cs, ret;
2040
2041 pinctrl_pm_select_default_state(dev);
2042
2043 ret = clk_prepare_enable(fmc2->clk);
2044 if (ret) {
2045 dev_err(dev, "can not enable the clock\n");
2046 return ret;
2047 }
2048
2049 stm32_fmc2_init(fmc2);
2050
2051 for (chip_cs = 0; chip_cs < FMC2_MAX_CE; chip_cs++) {
2052 if (!(fmc2->cs_assigned & BIT(chip_cs)))
2053 continue;
2054
2055 nand_reset(&nand->chip, chip_cs);
2056 }
2057
2058 return 0;
2059 }
2060
2061 static SIMPLE_DEV_PM_OPS(stm32_fmc2_pm_ops, stm32_fmc2_suspend,
2062 stm32_fmc2_resume);
2063
2064 static const struct of_device_id stm32_fmc2_match[] = {
2065 {.compatible = "st,stm32mp15-fmc2"},
2066 {}
2067 };
2068 MODULE_DEVICE_TABLE(of, stm32_fmc2_match);
2069
2070 static struct platform_driver stm32_fmc2_driver = {
2071 .probe = stm32_fmc2_probe,
2072 .remove = stm32_fmc2_remove,
2073 .driver = {
2074 .name = "stm32_fmc2_nand",
2075 .of_match_table = stm32_fmc2_match,
2076 .pm = &stm32_fmc2_pm_ops,
2077 },
2078 };
2079 module_platform_driver(stm32_fmc2_driver);
2080
2081 MODULE_ALIAS("platform:stm32_fmc2_nand");
2082 MODULE_AUTHOR("Christophe Kerello <christophe.kerello@st.com>");
2083 MODULE_DESCRIPTION("STMicroelectronics STM32 FMC2 nand driver");
2084 MODULE_LICENSE("GPL v2");
2085