1// SPDX-License-Identifier: GPL-2.0
2//
3// Driver for AT91 USART Controllers as SPI
4//
5// Copyright (C) 2018 Microchip Technology Inc.
6//
7// Author: Radu Pirea <radu.pirea@microchip.com>
8
9#include <linux/clk.h>
10#include <linux/delay.h>
11#include <linux/dmaengine.h>
12#include <linux/dma-direction.h>
13#include <linux/interrupt.h>
14#include <linux/kernel.h>
15#include <linux/module.h>
16#include <linux/gpio/consumer.h>
17#include <linux/pinctrl/consumer.h>
18#include <linux/platform_device.h>
19#include <linux/pm_runtime.h>
20
21#include <linux/spi/spi.h>
22
23#define US_CR 0x00
24#define US_MR 0x04
25#define US_IER 0x08
26#define US_IDR 0x0C
27#define US_CSR 0x14
28#define US_RHR 0x18
29#define US_THR 0x1C
30#define US_BRGR 0x20
31#define US_VERSION 0xFC
32
33#define US_CR_RSTRX BIT(2)
34#define US_CR_RSTTX BIT(3)
35#define US_CR_RXEN BIT(4)
36#define US_CR_RXDIS BIT(5)
37#define US_CR_TXEN BIT(6)
38#define US_CR_TXDIS BIT(7)
39
40#define US_MR_SPI_HOST 0x0E
41#define US_MR_CHRL GENMASK(7, 6)
42#define US_MR_CPHA BIT(8)
43#define US_MR_CPOL BIT(16)
44#define US_MR_CLKO BIT(18)
45#define US_MR_WRDBT BIT(20)
46#define US_MR_LOOP BIT(15)
47
48#define US_IR_RXRDY BIT(0)
49#define US_IR_TXRDY BIT(1)
50#define US_IR_OVRE BIT(5)
51
52#define US_BRGR_SIZE BIT(16)
53
54#define US_MIN_CLK_DIV 0x06
55#define US_MAX_CLK_DIV BIT(16)
56
57#define US_RESET (US_CR_RSTRX | US_CR_RSTTX)
58#define US_DISABLE (US_CR_RXDIS | US_CR_TXDIS)
59#define US_ENABLE (US_CR_RXEN | US_CR_TXEN)
60#define US_OVRE_RXRDY_IRQS (US_IR_OVRE | US_IR_RXRDY)
61
62#define US_INIT \
63 (US_MR_SPI_HOST | US_MR_CHRL | US_MR_CLKO | US_MR_WRDBT)
64#define US_DMA_MIN_BYTES 16
65#define US_DMA_TIMEOUT (msecs_to_jiffies(1000))
66
67/* Register access macros */
68#define at91_usart_spi_readl(port, reg) \
69 readl_relaxed((port)->regs + US_##reg)
70#define at91_usart_spi_writel(port, reg, value) \
71 writel_relaxed((value), (port)->regs + US_##reg)
72
73#define at91_usart_spi_readb(port, reg) \
74 readb_relaxed((port)->regs + US_##reg)
75#define at91_usart_spi_writeb(port, reg, value) \
76 writeb_relaxed((value), (port)->regs + US_##reg)
77
78struct at91_usart_spi {
79 struct platform_device *mpdev;
80 struct spi_transfer *current_transfer;
81 void __iomem *regs;
82 struct device *dev;
83 struct clk *clk;
84
85 struct completion xfer_completion;
86
87 /*used in interrupt to protect data reading*/
88 spinlock_t lock;
89
90 phys_addr_t phybase;
91
92 int irq;
93 unsigned int current_tx_remaining_bytes;
94 unsigned int current_rx_remaining_bytes;
95
96 u32 spi_clk;
97 u32 status;
98
99 bool xfer_failed;
100 bool use_dma;
101};
102
103static void dma_callback(void *data)
104{
105 struct spi_controller *ctlr = data;
106 struct at91_usart_spi *aus = spi_controller_get_devdata(ctlr);
107
108 at91_usart_spi_writel(aus, IER, US_IR_RXRDY);
109 aus->current_rx_remaining_bytes = 0;
110 complete(&aus->xfer_completion);
111}
112
113static bool at91_usart_spi_can_dma(struct spi_controller *ctrl,
114 struct spi_device *spi,
115 struct spi_transfer *xfer)
116{
117 struct at91_usart_spi *aus = spi_controller_get_devdata(ctlr: ctrl);
118
119 return aus->use_dma && xfer->len >= US_DMA_MIN_BYTES;
120}
121
122static int at91_usart_spi_configure_dma(struct spi_controller *ctlr,
123 struct at91_usart_spi *aus)
124{
125 struct dma_slave_config slave_config;
126 struct device *dev = &aus->mpdev->dev;
127 phys_addr_t phybase = aus->phybase;
128 dma_cap_mask_t mask;
129 int err = 0;
130
131 dma_cap_zero(mask);
132 dma_cap_set(DMA_SLAVE, mask);
133
134 ctlr->dma_tx = dma_request_chan(dev, name: "tx");
135 if (IS_ERR(ptr: ctlr->dma_tx)) {
136 err = PTR_ERR(ptr: ctlr->dma_tx);
137 goto at91_usart_spi_error_clear;
138 }
139
140 ctlr->dma_rx = dma_request_chan(dev, name: "rx");
141 if (IS_ERR(ptr: ctlr->dma_rx)) {
142 err = PTR_ERR(ptr: ctlr->dma_rx);
143 goto at91_usart_spi_error;
144 }
145
146 slave_config.dst_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
147 slave_config.src_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
148 slave_config.dst_addr = (dma_addr_t)phybase + US_THR;
149 slave_config.src_addr = (dma_addr_t)phybase + US_RHR;
150 slave_config.src_maxburst = 1;
151 slave_config.dst_maxburst = 1;
152 slave_config.device_fc = false;
153
154 slave_config.direction = DMA_DEV_TO_MEM;
155 if (dmaengine_slave_config(chan: ctlr->dma_rx, config: &slave_config)) {
156 dev_err(&ctlr->dev,
157 "failed to configure rx dma channel\n");
158 err = -EINVAL;
159 goto at91_usart_spi_error;
160 }
161
162 slave_config.direction = DMA_MEM_TO_DEV;
163 if (dmaengine_slave_config(chan: ctlr->dma_tx, config: &slave_config)) {
164 dev_err(&ctlr->dev,
165 "failed to configure tx dma channel\n");
166 err = -EINVAL;
167 goto at91_usart_spi_error;
168 }
169
170 aus->use_dma = true;
171 return 0;
172
173at91_usart_spi_error:
174 if (!IS_ERR_OR_NULL(ptr: ctlr->dma_tx))
175 dma_release_channel(chan: ctlr->dma_tx);
176 if (!IS_ERR_OR_NULL(ptr: ctlr->dma_rx))
177 dma_release_channel(chan: ctlr->dma_rx);
178 ctlr->dma_tx = NULL;
179 ctlr->dma_rx = NULL;
180
181at91_usart_spi_error_clear:
182 return err;
183}
184
185static void at91_usart_spi_release_dma(struct spi_controller *ctlr)
186{
187 if (ctlr->dma_rx)
188 dma_release_channel(chan: ctlr->dma_rx);
189 if (ctlr->dma_tx)
190 dma_release_channel(chan: ctlr->dma_tx);
191}
192
193static void at91_usart_spi_stop_dma(struct spi_controller *ctlr)
194{
195 if (ctlr->dma_rx)
196 dmaengine_terminate_all(chan: ctlr->dma_rx);
197 if (ctlr->dma_tx)
198 dmaengine_terminate_all(chan: ctlr->dma_tx);
199}
200
201static int at91_usart_spi_dma_transfer(struct spi_controller *ctlr,
202 struct spi_transfer *xfer)
203{
204 struct at91_usart_spi *aus = spi_controller_get_devdata(ctlr);
205 struct dma_chan *rxchan = ctlr->dma_rx;
206 struct dma_chan *txchan = ctlr->dma_tx;
207 struct dma_async_tx_descriptor *rxdesc;
208 struct dma_async_tx_descriptor *txdesc;
209 dma_cookie_t cookie;
210
211 /* Disable RX interrupt */
212 at91_usart_spi_writel(aus, IDR, US_IR_RXRDY);
213
214 rxdesc = dmaengine_prep_slave_sg(chan: rxchan,
215 sgl: xfer->rx_sg.sgl,
216 sg_len: xfer->rx_sg.nents,
217 dir: DMA_DEV_TO_MEM,
218 flags: DMA_PREP_INTERRUPT |
219 DMA_CTRL_ACK);
220 if (!rxdesc)
221 goto at91_usart_spi_err_dma;
222
223 txdesc = dmaengine_prep_slave_sg(chan: txchan,
224 sgl: xfer->tx_sg.sgl,
225 sg_len: xfer->tx_sg.nents,
226 dir: DMA_MEM_TO_DEV,
227 flags: DMA_PREP_INTERRUPT |
228 DMA_CTRL_ACK);
229 if (!txdesc)
230 goto at91_usart_spi_err_dma;
231
232 rxdesc->callback = dma_callback;
233 rxdesc->callback_param = ctlr;
234
235 cookie = rxdesc->tx_submit(rxdesc);
236 if (dma_submit_error(cookie))
237 goto at91_usart_spi_err_dma;
238
239 cookie = txdesc->tx_submit(txdesc);
240 if (dma_submit_error(cookie))
241 goto at91_usart_spi_err_dma;
242
243 rxchan->device->device_issue_pending(rxchan);
244 txchan->device->device_issue_pending(txchan);
245
246 return 0;
247
248at91_usart_spi_err_dma:
249 /* Enable RX interrupt if something fails and fallback to PIO */
250 at91_usart_spi_writel(aus, IER, US_IR_RXRDY);
251 at91_usart_spi_stop_dma(ctlr);
252
253 return -ENOMEM;
254}
255
256static unsigned long at91_usart_spi_dma_timeout(struct at91_usart_spi *aus)
257{
258 return wait_for_completion_timeout(x: &aus->xfer_completion,
259 US_DMA_TIMEOUT);
260}
261
262static inline u32 at91_usart_spi_tx_ready(struct at91_usart_spi *aus)
263{
264 return aus->status & US_IR_TXRDY;
265}
266
267static inline u32 at91_usart_spi_rx_ready(struct at91_usart_spi *aus)
268{
269 return aus->status & US_IR_RXRDY;
270}
271
272static inline u32 at91_usart_spi_check_overrun(struct at91_usart_spi *aus)
273{
274 return aus->status & US_IR_OVRE;
275}
276
277static inline u32 at91_usart_spi_read_status(struct at91_usart_spi *aus)
278{
279 aus->status = at91_usart_spi_readl(aus, CSR);
280 return aus->status;
281}
282
283static inline void at91_usart_spi_tx(struct at91_usart_spi *aus)
284{
285 unsigned int len = aus->current_transfer->len;
286 unsigned int remaining = aus->current_tx_remaining_bytes;
287 const u8 *tx_buf = aus->current_transfer->tx_buf;
288
289 if (!remaining)
290 return;
291
292 if (at91_usart_spi_tx_ready(aus)) {
293 at91_usart_spi_writeb(aus, THR, tx_buf[len - remaining]);
294 aus->current_tx_remaining_bytes--;
295 }
296}
297
298static inline void at91_usart_spi_rx(struct at91_usart_spi *aus)
299{
300 int len = aus->current_transfer->len;
301 int remaining = aus->current_rx_remaining_bytes;
302 u8 *rx_buf = aus->current_transfer->rx_buf;
303
304 if (!remaining)
305 return;
306
307 rx_buf[len - remaining] = at91_usart_spi_readb(aus, RHR);
308 aus->current_rx_remaining_bytes--;
309}
310
311static inline void
312at91_usart_spi_set_xfer_speed(struct at91_usart_spi *aus,
313 struct spi_transfer *xfer)
314{
315 at91_usart_spi_writel(aus, BRGR,
316 DIV_ROUND_UP(aus->spi_clk, xfer->speed_hz));
317}
318
319static irqreturn_t at91_usart_spi_interrupt(int irq, void *dev_id)
320{
321 struct spi_controller *controller = dev_id;
322 struct at91_usart_spi *aus = spi_controller_get_devdata(ctlr: controller);
323
324 spin_lock(lock: &aus->lock);
325 at91_usart_spi_read_status(aus);
326
327 if (at91_usart_spi_check_overrun(aus)) {
328 aus->xfer_failed = true;
329 at91_usart_spi_writel(aus, IDR, US_IR_OVRE | US_IR_RXRDY);
330 spin_unlock(lock: &aus->lock);
331 return IRQ_HANDLED;
332 }
333
334 if (at91_usart_spi_rx_ready(aus)) {
335 at91_usart_spi_rx(aus);
336 spin_unlock(lock: &aus->lock);
337 return IRQ_HANDLED;
338 }
339
340 spin_unlock(lock: &aus->lock);
341
342 return IRQ_NONE;
343}
344
345static int at91_usart_spi_setup(struct spi_device *spi)
346{
347 struct at91_usart_spi *aus = spi_controller_get_devdata(ctlr: spi->controller);
348 u32 *ausd = spi->controller_state;
349 unsigned int mr = at91_usart_spi_readl(aus, MR);
350
351 if (spi->mode & SPI_CPOL)
352 mr |= US_MR_CPOL;
353 else
354 mr &= ~US_MR_CPOL;
355
356 if (spi->mode & SPI_CPHA)
357 mr |= US_MR_CPHA;
358 else
359 mr &= ~US_MR_CPHA;
360
361 if (spi->mode & SPI_LOOP)
362 mr |= US_MR_LOOP;
363 else
364 mr &= ~US_MR_LOOP;
365
366 if (!ausd) {
367 ausd = kzalloc(size: sizeof(*ausd), GFP_KERNEL);
368 if (!ausd)
369 return -ENOMEM;
370
371 spi->controller_state = ausd;
372 }
373
374 *ausd = mr;
375
376 dev_dbg(&spi->dev,
377 "setup: bpw %u mode 0x%x -> mr %d %08x\n",
378 spi->bits_per_word, spi->mode, spi_get_chipselect(spi, 0), mr);
379
380 return 0;
381}
382
383static int at91_usart_spi_transfer_one(struct spi_controller *ctlr,
384 struct spi_device *spi,
385 struct spi_transfer *xfer)
386{
387 struct at91_usart_spi *aus = spi_controller_get_devdata(ctlr);
388 unsigned long dma_timeout = 0;
389 int ret = 0;
390
391 at91_usart_spi_set_xfer_speed(aus, xfer);
392 aus->xfer_failed = false;
393 aus->current_transfer = xfer;
394 aus->current_tx_remaining_bytes = xfer->len;
395 aus->current_rx_remaining_bytes = xfer->len;
396
397 while ((aus->current_tx_remaining_bytes ||
398 aus->current_rx_remaining_bytes) && !aus->xfer_failed) {
399 reinit_completion(x: &aus->xfer_completion);
400 if (at91_usart_spi_can_dma(ctrl: ctlr, spi, xfer) &&
401 !ret) {
402 ret = at91_usart_spi_dma_transfer(ctlr, xfer);
403 if (ret)
404 continue;
405
406 dma_timeout = at91_usart_spi_dma_timeout(aus);
407
408 if (WARN_ON(dma_timeout == 0)) {
409 dev_err(&spi->dev, "DMA transfer timeout\n");
410 return -EIO;
411 }
412 aus->current_tx_remaining_bytes = 0;
413 } else {
414 at91_usart_spi_read_status(aus);
415 at91_usart_spi_tx(aus);
416 }
417
418 cpu_relax();
419 }
420
421 if (aus->xfer_failed) {
422 dev_err(aus->dev, "Overrun!\n");
423 return -EIO;
424 }
425
426 return 0;
427}
428
429static int at91_usart_spi_prepare_message(struct spi_controller *ctlr,
430 struct spi_message *message)
431{
432 struct at91_usart_spi *aus = spi_controller_get_devdata(ctlr);
433 struct spi_device *spi = message->spi;
434 u32 *ausd = spi->controller_state;
435
436 at91_usart_spi_writel(aus, CR, US_ENABLE);
437 at91_usart_spi_writel(aus, IER, US_OVRE_RXRDY_IRQS);
438 at91_usart_spi_writel(aus, MR, *ausd);
439
440 return 0;
441}
442
443static int at91_usart_spi_unprepare_message(struct spi_controller *ctlr,
444 struct spi_message *message)
445{
446 struct at91_usart_spi *aus = spi_controller_get_devdata(ctlr);
447
448 at91_usart_spi_writel(aus, CR, US_RESET | US_DISABLE);
449 at91_usart_spi_writel(aus, IDR, US_OVRE_RXRDY_IRQS);
450
451 return 0;
452}
453
454static void at91_usart_spi_cleanup(struct spi_device *spi)
455{
456 struct at91_usart_spi_device *ausd = spi->controller_state;
457
458 spi->controller_state = NULL;
459 kfree(objp: ausd);
460}
461
462static void at91_usart_spi_init(struct at91_usart_spi *aus)
463{
464 at91_usart_spi_writel(aus, MR, US_INIT);
465 at91_usart_spi_writel(aus, CR, US_RESET | US_DISABLE);
466}
467
468static int at91_usart_gpio_setup(struct platform_device *pdev)
469{
470 struct gpio_descs *cs_gpios;
471
472 cs_gpios = devm_gpiod_get_array_optional(dev: &pdev->dev, con_id: "cs", flags: GPIOD_OUT_LOW);
473
474 return PTR_ERR_OR_ZERO(ptr: cs_gpios);
475}
476
477static int at91_usart_spi_probe(struct platform_device *pdev)
478{
479 struct resource *regs;
480 struct spi_controller *controller;
481 struct at91_usart_spi *aus;
482 struct clk *clk;
483 int irq;
484 int ret;
485
486 regs = platform_get_resource(to_platform_device(pdev->dev.parent),
487 IORESOURCE_MEM, 0);
488 if (!regs)
489 return -EINVAL;
490
491 irq = platform_get_irq(to_platform_device(pdev->dev.parent), 0);
492 if (irq < 0)
493 return irq;
494
495 clk = devm_clk_get(dev: pdev->dev.parent, id: "usart");
496 if (IS_ERR(ptr: clk))
497 return PTR_ERR(ptr: clk);
498
499 ret = -ENOMEM;
500 controller = spi_alloc_host(dev: &pdev->dev, size: sizeof(*aus));
501 if (!controller)
502 goto at91_usart_spi_probe_fail;
503
504 ret = at91_usart_gpio_setup(pdev);
505 if (ret)
506 goto at91_usart_spi_probe_fail;
507
508 controller->mode_bits = SPI_CPOL | SPI_CPHA | SPI_LOOP | SPI_CS_HIGH;
509 controller->dev.of_node = pdev->dev.parent->of_node;
510 controller->bits_per_word_mask = SPI_BPW_MASK(8);
511 controller->setup = at91_usart_spi_setup;
512 controller->flags = SPI_CONTROLLER_MUST_RX | SPI_CONTROLLER_MUST_TX;
513 controller->transfer_one = at91_usart_spi_transfer_one;
514 controller->prepare_message = at91_usart_spi_prepare_message;
515 controller->unprepare_message = at91_usart_spi_unprepare_message;
516 controller->can_dma = at91_usart_spi_can_dma;
517 controller->cleanup = at91_usart_spi_cleanup;
518 controller->max_speed_hz = DIV_ROUND_UP(clk_get_rate(clk),
519 US_MIN_CLK_DIV);
520 controller->min_speed_hz = DIV_ROUND_UP(clk_get_rate(clk),
521 US_MAX_CLK_DIV);
522 platform_set_drvdata(pdev, data: controller);
523
524 aus = spi_controller_get_devdata(ctlr: controller);
525
526 aus->dev = &pdev->dev;
527 aus->regs = devm_ioremap_resource(dev: &pdev->dev, res: regs);
528 if (IS_ERR(ptr: aus->regs)) {
529 ret = PTR_ERR(ptr: aus->regs);
530 goto at91_usart_spi_probe_fail;
531 }
532
533 aus->irq = irq;
534 aus->clk = clk;
535
536 ret = devm_request_irq(dev: &pdev->dev, irq, handler: at91_usart_spi_interrupt, irqflags: 0,
537 devname: dev_name(dev: &pdev->dev), dev_id: controller);
538 if (ret)
539 goto at91_usart_spi_probe_fail;
540
541 ret = clk_prepare_enable(clk);
542 if (ret)
543 goto at91_usart_spi_probe_fail;
544
545 aus->spi_clk = clk_get_rate(clk);
546 at91_usart_spi_init(aus);
547
548 aus->phybase = regs->start;
549
550 aus->mpdev = to_platform_device(pdev->dev.parent);
551
552 ret = at91_usart_spi_configure_dma(ctlr: controller, aus);
553 if (ret)
554 goto at91_usart_fail_dma;
555
556 spin_lock_init(&aus->lock);
557 init_completion(x: &aus->xfer_completion);
558
559 ret = devm_spi_register_controller(dev: &pdev->dev, ctlr: controller);
560 if (ret)
561 goto at91_usart_fail_register_controller;
562
563 dev_info(&pdev->dev,
564 "AT91 USART SPI Controller version 0x%x at %pa (irq %d)\n",
565 at91_usart_spi_readl(aus, VERSION),
566 &regs->start, irq);
567
568 return 0;
569
570at91_usart_fail_register_controller:
571 at91_usart_spi_release_dma(ctlr: controller);
572at91_usart_fail_dma:
573 clk_disable_unprepare(clk);
574at91_usart_spi_probe_fail:
575 spi_controller_put(ctlr: controller);
576 return ret;
577}
578
579__maybe_unused static int at91_usart_spi_runtime_suspend(struct device *dev)
580{
581 struct spi_controller *ctlr = dev_get_drvdata(dev);
582 struct at91_usart_spi *aus = spi_controller_get_devdata(ctlr);
583
584 clk_disable_unprepare(clk: aus->clk);
585 pinctrl_pm_select_sleep_state(dev);
586
587 return 0;
588}
589
590__maybe_unused static int at91_usart_spi_runtime_resume(struct device *dev)
591{
592 struct spi_controller *ctrl = dev_get_drvdata(dev);
593 struct at91_usart_spi *aus = spi_controller_get_devdata(ctlr: ctrl);
594
595 pinctrl_pm_select_default_state(dev);
596
597 return clk_prepare_enable(clk: aus->clk);
598}
599
600__maybe_unused static int at91_usart_spi_suspend(struct device *dev)
601{
602 struct spi_controller *ctrl = dev_get_drvdata(dev);
603 int ret;
604
605 ret = spi_controller_suspend(ctlr: ctrl);
606 if (ret)
607 return ret;
608
609 if (!pm_runtime_suspended(dev))
610 at91_usart_spi_runtime_suspend(dev);
611
612 return 0;
613}
614
615__maybe_unused static int at91_usart_spi_resume(struct device *dev)
616{
617 struct spi_controller *ctrl = dev_get_drvdata(dev);
618 struct at91_usart_spi *aus = spi_controller_get_devdata(ctlr: ctrl);
619 int ret;
620
621 if (!pm_runtime_suspended(dev)) {
622 ret = at91_usart_spi_runtime_resume(dev);
623 if (ret)
624 return ret;
625 }
626
627 at91_usart_spi_init(aus);
628
629 return spi_controller_resume(ctlr: ctrl);
630}
631
632static void at91_usart_spi_remove(struct platform_device *pdev)
633{
634 struct spi_controller *ctlr = platform_get_drvdata(pdev);
635 struct at91_usart_spi *aus = spi_controller_get_devdata(ctlr);
636
637 at91_usart_spi_release_dma(ctlr);
638 clk_disable_unprepare(clk: aus->clk);
639}
640
641static const struct dev_pm_ops at91_usart_spi_pm_ops = {
642 SET_SYSTEM_SLEEP_PM_OPS(at91_usart_spi_suspend, at91_usart_spi_resume)
643 SET_RUNTIME_PM_OPS(at91_usart_spi_runtime_suspend,
644 at91_usart_spi_runtime_resume, NULL)
645};
646
647static struct platform_driver at91_usart_spi_driver = {
648 .driver = {
649 .name = "at91_usart_spi",
650 .pm = &at91_usart_spi_pm_ops,
651 },
652 .probe = at91_usart_spi_probe,
653 .remove_new = at91_usart_spi_remove,
654};
655
656module_platform_driver(at91_usart_spi_driver);
657
658MODULE_DESCRIPTION("Microchip AT91 USART SPI Controller driver");
659MODULE_AUTHOR("Radu Pirea <radu.pirea@microchip.com>");
660MODULE_LICENSE("GPL v2");
661MODULE_ALIAS("platform:at91_usart_spi");
662

source code of linux/drivers/spi/spi-at91-usart.c