2 * STMicroelectronics STM32 SPI Controller driver (master mode only)
4 * Copyright (C) 2017, STMicroelectronics - All Rights Reserved
5 * Author(s): Amelie Delaunay <amelie.delaunay@st.com> for STMicroelectronics.
7 * License terms: GPL V2.0.
9 * spi_stm32 driver is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License version 2 as published by
11 * the Free Software Foundation.
13 * spi_stm32 driver is distributed in the hope that it will be useful, but
14 * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
18 * You should have received a copy of the GNU General Public License along with
19 * spi_stm32 driver. If not, see <http://www.gnu.org/licenses/>.
21 #include <linux/debugfs.h>
22 #include <linux/clk.h>
23 #include <linux/delay.h>
24 #include <linux/dmaengine.h>
25 #include <linux/gpio.h>
26 #include <linux/interrupt.h>
27 #include <linux/iopoll.h>
28 #include <linux/module.h>
29 #include <linux/of_platform.h>
30 #include <linux/reset.h>
31 #include <linux/spi/spi.h>
33 #define DRIVER_NAME "spi_stm32"
35 /* STM32 SPI registers */
36 #define STM32_SPI_CR1 0x00
37 #define STM32_SPI_CR2 0x04
38 #define STM32_SPI_CFG1 0x08
39 #define STM32_SPI_CFG2 0x0C
40 #define STM32_SPI_IER 0x10
41 #define STM32_SPI_SR 0x14
42 #define STM32_SPI_IFCR 0x18
43 #define STM32_SPI_TXDR 0x20
44 #define STM32_SPI_RXDR 0x30
45 #define STM32_SPI_I2SCFGR 0x50
47 /* STM32_SPI_CR1 bit fields */
48 #define SPI_CR1_SPE BIT(0)
49 #define SPI_CR1_MASRX BIT(8)
50 #define SPI_CR1_CSTART BIT(9)
51 #define SPI_CR1_CSUSP BIT(10)
52 #define SPI_CR1_HDDIR BIT(11)
53 #define SPI_CR1_SSI BIT(12)
55 /* STM32_SPI_CR2 bit fields */
56 #define SPI_CR2_TSIZE_SHIFT 0
57 #define SPI_CR2_TSIZE GENMASK(15, 0)
59 /* STM32_SPI_CFG1 bit fields */
60 #define SPI_CFG1_DSIZE_SHIFT 0
61 #define SPI_CFG1_DSIZE GENMASK(4, 0)
62 #define SPI_CFG1_FTHLV_SHIFT 5
63 #define SPI_CFG1_FTHLV GENMASK(8, 5)
64 #define SPI_CFG1_RXDMAEN BIT(14)
65 #define SPI_CFG1_TXDMAEN BIT(15)
66 #define SPI_CFG1_MBR_SHIFT 28
67 #define SPI_CFG1_MBR GENMASK(30, 28)
68 #define SPI_CFG1_MBR_MIN 0
69 #define SPI_CFG1_MBR_MAX (GENMASK(30, 28) >> 28)
71 /* STM32_SPI_CFG2 bit fields */
72 #define SPI_CFG2_MIDI_SHIFT 4
73 #define SPI_CFG2_MIDI GENMASK(7, 4)
74 #define SPI_CFG2_COMM_SHIFT 17
75 #define SPI_CFG2_COMM GENMASK(18, 17)
76 #define SPI_CFG2_SP_SHIFT 19
77 #define SPI_CFG2_SP GENMASK(21, 19)
78 #define SPI_CFG2_MASTER BIT(22)
79 #define SPI_CFG2_LSBFRST BIT(23)
80 #define SPI_CFG2_CPHA BIT(24)
81 #define SPI_CFG2_CPOL BIT(25)
82 #define SPI_CFG2_SSM BIT(26)
83 #define SPI_CFG2_AFCNTR BIT(31)
85 /* STM32_SPI_IER bit fields */
86 #define SPI_IER_RXPIE BIT(0)
87 #define SPI_IER_TXPIE BIT(1)
88 #define SPI_IER_DXPIE BIT(2)
89 #define SPI_IER_EOTIE BIT(3)
90 #define SPI_IER_TXTFIE BIT(4)
91 #define SPI_IER_OVRIE BIT(6)
92 #define SPI_IER_MODFIE BIT(9)
93 #define SPI_IER_ALL GENMASK(10, 0)
95 /* STM32_SPI_SR bit fields */
96 #define SPI_SR_RXP BIT(0)
97 #define SPI_SR_TXP BIT(1)
98 #define SPI_SR_EOT BIT(3)
99 #define SPI_SR_OVR BIT(6)
100 #define SPI_SR_MODF BIT(9)
101 #define SPI_SR_SUSP BIT(11)
102 #define SPI_SR_RXPLVL_SHIFT 13
103 #define SPI_SR_RXPLVL GENMASK(14, 13)
104 #define SPI_SR_RXWNE BIT(15)
106 /* STM32_SPI_IFCR bit fields */
107 #define SPI_IFCR_ALL GENMASK(11, 3)
109 /* STM32_SPI_I2SCFGR bit fields */
110 #define SPI_I2SCFGR_I2SMOD BIT(0)
112 /* SPI Master Baud Rate min/max divisor */
113 #define SPI_MBR_DIV_MIN (2 << SPI_CFG1_MBR_MIN)
114 #define SPI_MBR_DIV_MAX (2 << SPI_CFG1_MBR_MAX)
116 /* SPI Communication mode */
117 #define SPI_FULL_DUPLEX 0
118 #define SPI_SIMPLEX_TX 1
119 #define SPI_SIMPLEX_RX 2
120 #define SPI_HALF_DUPLEX 3
122 #define SPI_1HZ_NS 1000000000
125 * struct stm32_spi - private data of the SPI controller
126 * @dev: driver model representation of the controller
127 * @master: controller master interface
128 * @base: virtual memory area
129 * @clk: hw kernel clock feeding the SPI clock generator
130 * @clk_rate: rate of the hw kernel clock feeding the SPI clock generator
131 * @rst: SPI controller reset line
132 * @lock: prevent I/O concurrent access
133 * @irq: SPI controller interrupt line
134 * @fifo_size: size of the embedded fifo in bytes
135 * @cur_midi: master inter-data idleness in ns
136 * @cur_speed: speed configured in Hz
137 * @cur_bpw: number of bits in a single SPI data frame
138 * @cur_fthlv: fifo threshold level (data frames in a single data packet)
139 * @cur_comm: SPI communication mode
140 * @cur_xferlen: current transfer length in bytes
141 * @cur_usedma: boolean to know if dma is used in current transfer
142 * @tx_buf: data to be written, or NULL
143 * @rx_buf: data to be read, or NULL
144 * @tx_len: number of data to be written in bytes
145 * @rx_len: number of data to be read in bytes
146 * @dma_tx: dma channel for TX transfer
147 * @dma_rx: dma channel for RX transfer
148 * @phys_addr: SPI registers physical base address
152 struct spi_master
*master
;
156 struct reset_control
*rst
;
157 spinlock_t lock
; /* prevent I/O concurrent access */
159 unsigned int fifo_size
;
161 unsigned int cur_midi
;
162 unsigned int cur_speed
;
163 unsigned int cur_bpw
;
164 unsigned int cur_fthlv
;
165 unsigned int cur_comm
;
166 unsigned int cur_xferlen
;
173 struct dma_chan
*dma_tx
;
174 struct dma_chan
*dma_rx
;
175 dma_addr_t phys_addr
;
178 static inline void stm32_spi_set_bits(struct stm32_spi
*spi
,
179 u32 offset
, u32 bits
)
181 writel_relaxed(readl_relaxed(spi
->base
+ offset
) | bits
,
185 static inline void stm32_spi_clr_bits(struct stm32_spi
*spi
,
186 u32 offset
, u32 bits
)
188 writel_relaxed(readl_relaxed(spi
->base
+ offset
) & ~bits
,
193 * stm32_spi_get_fifo_size - Return fifo size
194 * @spi: pointer to the spi controller data structure
196 static int stm32_spi_get_fifo_size(struct stm32_spi
*spi
)
201 spin_lock_irqsave(&spi
->lock
, flags
);
203 stm32_spi_set_bits(spi
, STM32_SPI_CR1
, SPI_CR1_SPE
);
205 while (readl_relaxed(spi
->base
+ STM32_SPI_SR
) & SPI_SR_TXP
)
206 writeb_relaxed(++count
, spi
->base
+ STM32_SPI_TXDR
);
208 stm32_spi_clr_bits(spi
, STM32_SPI_CR1
, SPI_CR1_SPE
);
210 spin_unlock_irqrestore(&spi
->lock
, flags
);
212 dev_dbg(spi
->dev
, "%d x 8-bit fifo size\n", count
);
218 * stm32_spi_get_bpw_mask - Return bits per word mask
219 * @spi: pointer to the spi controller data structure
221 static int stm32_spi_get_bpw_mask(struct stm32_spi
*spi
)
226 spin_lock_irqsave(&spi
->lock
, flags
);
229 * The most significant bit at DSIZE bit field is reserved when the
230 * maximum data size of periperal instances is limited to 16-bit
232 stm32_spi_set_bits(spi
, STM32_SPI_CFG1
, SPI_CFG1_DSIZE
);
234 cfg1
= readl_relaxed(spi
->base
+ STM32_SPI_CFG1
);
235 max_bpw
= (cfg1
& SPI_CFG1_DSIZE
) >> SPI_CFG1_DSIZE_SHIFT
;
238 spin_unlock_irqrestore(&spi
->lock
, flags
);
240 dev_dbg(spi
->dev
, "%d-bit maximum data frame\n", max_bpw
);
242 return SPI_BPW_RANGE_MASK(4, max_bpw
);
246 * stm32_spi_prepare_mbr - Determine SPI_CFG1.MBR value
247 * @spi: pointer to the spi controller data structure
248 * @speed_hz: requested speed
250 * Return SPI_CFG1.MBR value in case of success or -EINVAL
252 static int stm32_spi_prepare_mbr(struct stm32_spi
*spi
, u32 speed_hz
)
256 div
= DIV_ROUND_UP(spi
->clk_rate
, speed_hz
);
259 * SPI framework set xfer->speed_hz to master->max_speed_hz if
260 * xfer->speed_hz is greater than master->max_speed_hz, and it returns
261 * an error when xfer->speed_hz is lower than master->min_speed_hz, so
262 * no need to check it there.
263 * However, we need to ensure the following calculations.
265 if ((div
< SPI_MBR_DIV_MIN
) &&
266 (div
> SPI_MBR_DIV_MAX
))
269 /* Determine the first power of 2 greater than or equal to div */
273 mbrdiv
= fls(div
) - 1;
275 spi
->cur_speed
= spi
->clk_rate
/ (1 << mbrdiv
);
281 * stm32_spi_prepare_fthlv - Determine FIFO threshold level
282 * @spi: pointer to the spi controller data structure
284 static u32
stm32_spi_prepare_fthlv(struct stm32_spi
*spi
)
286 u32 fthlv
, half_fifo
;
288 /* data packet should not exceed 1/2 of fifo space */
289 half_fifo
= (spi
->fifo_size
/ 2);
291 if (spi
->cur_bpw
<= 8)
293 else if (spi
->cur_bpw
<= 16)
294 fthlv
= half_fifo
/ 2;
296 fthlv
= half_fifo
/ 4;
298 /* align packet size with data registers access */
299 if (spi
->cur_bpw
> 8)
300 fthlv
-= (fthlv
% 2); /* multiple of 2 */
302 fthlv
-= (fthlv
% 4); /* multiple of 4 */
308 * stm32_spi_write_txfifo - Write bytes in Transmit Data Register
309 * @spi: pointer to the spi controller data structure
311 * Read from tx_buf depends on remaining bytes to avoid to read beyond
314 static void stm32_spi_write_txfifo(struct stm32_spi
*spi
)
316 while ((spi
->tx_len
> 0) &&
317 (readl_relaxed(spi
->base
+ STM32_SPI_SR
) & SPI_SR_TXP
)) {
318 u32 offs
= spi
->cur_xferlen
- spi
->tx_len
;
320 if (spi
->tx_len
>= sizeof(u32
)) {
321 const u32
*tx_buf32
= (const u32
*)(spi
->tx_buf
+ offs
);
323 writel_relaxed(*tx_buf32
, spi
->base
+ STM32_SPI_TXDR
);
324 spi
->tx_len
-= sizeof(u32
);
325 } else if (spi
->tx_len
>= sizeof(u16
)) {
326 const u16
*tx_buf16
= (const u16
*)(spi
->tx_buf
+ offs
);
328 writew_relaxed(*tx_buf16
, spi
->base
+ STM32_SPI_TXDR
);
329 spi
->tx_len
-= sizeof(u16
);
331 const u8
*tx_buf8
= (const u8
*)(spi
->tx_buf
+ offs
);
333 writeb_relaxed(*tx_buf8
, spi
->base
+ STM32_SPI_TXDR
);
334 spi
->tx_len
-= sizeof(u8
);
338 dev_dbg(spi
->dev
, "%s: %d bytes left\n", __func__
, spi
->tx_len
);
342 * stm32_spi_read_rxfifo - Read bytes in Receive Data Register
343 * @spi: pointer to the spi controller data structure
345 * Write in rx_buf depends on remaining bytes to avoid to write beyond
348 static void stm32_spi_read_rxfifo(struct stm32_spi
*spi
, bool flush
)
350 u32 sr
= readl_relaxed(spi
->base
+ STM32_SPI_SR
);
351 u32 rxplvl
= (sr
& SPI_SR_RXPLVL
) >> SPI_SR_RXPLVL_SHIFT
;
353 while ((spi
->rx_len
> 0) &&
354 ((sr
& SPI_SR_RXP
) ||
355 (flush
&& ((sr
& SPI_SR_RXWNE
) || (rxplvl
> 0))))) {
356 u32 offs
= spi
->cur_xferlen
- spi
->rx_len
;
358 if ((spi
->rx_len
>= sizeof(u32
)) ||
359 (flush
&& (sr
& SPI_SR_RXWNE
))) {
360 u32
*rx_buf32
= (u32
*)(spi
->rx_buf
+ offs
);
362 *rx_buf32
= readl_relaxed(spi
->base
+ STM32_SPI_RXDR
);
363 spi
->rx_len
-= sizeof(u32
);
364 } else if ((spi
->rx_len
>= sizeof(u16
)) ||
365 (flush
&& (rxplvl
>= 2 || spi
->cur_bpw
> 8))) {
366 u16
*rx_buf16
= (u16
*)(spi
->rx_buf
+ offs
);
368 *rx_buf16
= readw_relaxed(spi
->base
+ STM32_SPI_RXDR
);
369 spi
->rx_len
-= sizeof(u16
);
371 u8
*rx_buf8
= (u8
*)(spi
->rx_buf
+ offs
);
373 *rx_buf8
= readb_relaxed(spi
->base
+ STM32_SPI_RXDR
);
374 spi
->rx_len
-= sizeof(u8
);
377 sr
= readl_relaxed(spi
->base
+ STM32_SPI_SR
);
378 rxplvl
= (sr
& SPI_SR_RXPLVL
) >> SPI_SR_RXPLVL_SHIFT
;
381 dev_dbg(spi
->dev
, "%s%s: %d bytes left\n", __func__
,
382 flush
? "(flush)" : "", spi
->rx_len
);
386 * stm32_spi_enable - Enable SPI controller
387 * @spi: pointer to the spi controller data structure
389 * SPI data transfer is enabled but spi_ker_ck is idle.
390 * SPI_CFG1 and SPI_CFG2 are now write protected.
392 static void stm32_spi_enable(struct stm32_spi
*spi
)
394 dev_dbg(spi
->dev
, "enable controller\n");
396 stm32_spi_set_bits(spi
, STM32_SPI_CR1
, SPI_CR1_SPE
);
400 * stm32_spi_disable - Disable SPI controller
401 * @spi: pointer to the spi controller data structure
403 * RX-Fifo is flushed when SPI controller is disabled. To prevent any data
404 * loss, use stm32_spi_read_rxfifo(flush) to read the remaining bytes in
407 static void stm32_spi_disable(struct stm32_spi
*spi
)
412 dev_dbg(spi
->dev
, "disable controller\n");
414 spin_lock_irqsave(&spi
->lock
, flags
);
416 cr1
= readl_relaxed(spi
->base
+ STM32_SPI_CR1
);
418 if (!(cr1
& SPI_CR1_SPE
)) {
419 spin_unlock_irqrestore(&spi
->lock
, flags
);
423 /* Wait on EOT or suspend the flow */
424 if (readl_relaxed_poll_timeout_atomic(spi
->base
+ STM32_SPI_SR
,
425 sr
, !(sr
& SPI_SR_EOT
),
427 if (cr1
& SPI_CR1_CSTART
) {
428 writel_relaxed(cr1
| SPI_CR1_CSUSP
,
429 spi
->base
+ STM32_SPI_CR1
);
430 if (readl_relaxed_poll_timeout_atomic(
431 spi
->base
+ STM32_SPI_SR
,
432 sr
, !(sr
& SPI_SR_SUSP
),
435 "Suspend request timeout\n");
439 if (!spi
->cur_usedma
&& spi
->rx_buf
&& (spi
->rx_len
> 0))
440 stm32_spi_read_rxfifo(spi
, true);
442 if (spi
->cur_usedma
&& spi
->tx_buf
)
443 dmaengine_terminate_all(spi
->dma_tx
);
444 if (spi
->cur_usedma
&& spi
->rx_buf
)
445 dmaengine_terminate_all(spi
->dma_rx
);
447 stm32_spi_clr_bits(spi
, STM32_SPI_CR1
, SPI_CR1_SPE
);
449 stm32_spi_clr_bits(spi
, STM32_SPI_CFG1
, SPI_CFG1_TXDMAEN
|
452 /* Disable interrupts and clear status flags */
453 writel_relaxed(0, spi
->base
+ STM32_SPI_IER
);
454 writel_relaxed(SPI_IFCR_ALL
, spi
->base
+ STM32_SPI_IFCR
);
456 spin_unlock_irqrestore(&spi
->lock
, flags
);
460 * stm32_spi_can_dma - Determine if the transfer is eligible for DMA use
462 * If the current transfer size is greater than fifo size, use DMA.
464 static bool stm32_spi_can_dma(struct spi_master
*master
,
465 struct spi_device
*spi_dev
,
466 struct spi_transfer
*transfer
)
468 struct stm32_spi
*spi
= spi_master_get_devdata(master
);
470 dev_dbg(spi
->dev
, "%s: %s\n", __func__
,
471 (transfer
->len
> spi
->fifo_size
) ? "true" : "false");
473 return (transfer
->len
> spi
->fifo_size
);
477 * stm32_spi_irq - Interrupt handler for SPI controller events
478 * @irq: interrupt line
479 * @dev_id: SPI controller master interface
481 static irqreturn_t
stm32_spi_irq(int irq
, void *dev_id
)
483 struct spi_master
*master
= dev_id
;
484 struct stm32_spi
*spi
= spi_master_get_devdata(master
);
489 spin_lock_irqsave(&spi
->lock
, flags
);
491 sr
= readl_relaxed(spi
->base
+ STM32_SPI_SR
);
492 ier
= readl_relaxed(spi
->base
+ STM32_SPI_IER
);
495 /* EOTIE is triggered on EOT, SUSP and TXC events. */
498 * When TXTF is set, DXPIE and TXPIE are cleared. So in case of
499 * Full-Duplex, need to poll RXP event to know if there are remaining
500 * data, before disabling SPI.
502 if (spi
->rx_buf
&& !spi
->cur_usedma
)
506 dev_dbg(spi
->dev
, "spurious IT (sr=0x%08x, ier=0x%08x)\n",
508 spin_unlock_irqrestore(&spi
->lock
, flags
);
512 if (sr
& SPI_SR_SUSP
) {
513 dev_warn(spi
->dev
, "Communication suspended\n");
514 if (!spi
->cur_usedma
&& (spi
->rx_buf
&& (spi
->rx_len
> 0)))
515 stm32_spi_read_rxfifo(spi
, false);
518 if (sr
& SPI_SR_MODF
) {
519 dev_warn(spi
->dev
, "Mode fault: transfer aborted\n");
523 if (sr
& SPI_SR_OVR
) {
524 dev_warn(spi
->dev
, "Overrun: received value discarded\n");
525 if (!spi
->cur_usedma
&& (spi
->rx_buf
&& (spi
->rx_len
> 0)))
526 stm32_spi_read_rxfifo(spi
, false);
529 if (sr
& SPI_SR_EOT
) {
530 if (!spi
->cur_usedma
&& (spi
->rx_buf
&& (spi
->rx_len
> 0)))
531 stm32_spi_read_rxfifo(spi
, true);
536 if (!spi
->cur_usedma
&& (spi
->tx_buf
&& (spi
->tx_len
> 0)))
537 stm32_spi_write_txfifo(spi
);
540 if (!spi
->cur_usedma
&& (spi
->rx_buf
&& (spi
->rx_len
> 0)))
541 stm32_spi_read_rxfifo(spi
, false);
543 writel_relaxed(mask
, spi
->base
+ STM32_SPI_IFCR
);
545 spin_unlock_irqrestore(&spi
->lock
, flags
);
548 spi_finalize_current_transfer(master
);
549 stm32_spi_disable(spi
);
556 * stm32_spi_setup - setup device chip select
558 static int stm32_spi_setup(struct spi_device
*spi_dev
)
562 if (!gpio_is_valid(spi_dev
->cs_gpio
)) {
563 dev_err(&spi_dev
->dev
, "%d is not a valid gpio\n",
568 dev_dbg(&spi_dev
->dev
, "%s: set gpio%d output %s\n", __func__
,
570 (spi_dev
->mode
& SPI_CS_HIGH
) ? "low" : "high");
572 ret
= gpio_direction_output(spi_dev
->cs_gpio
,
573 !(spi_dev
->mode
& SPI_CS_HIGH
));
579 * stm32_spi_prepare_msg - set up the controller to transfer a single message
581 static int stm32_spi_prepare_msg(struct spi_master
*master
,
582 struct spi_message
*msg
)
584 struct stm32_spi
*spi
= spi_master_get_devdata(master
);
585 struct spi_device
*spi_dev
= msg
->spi
;
586 struct device_node
*np
= spi_dev
->dev
.of_node
;
588 u32 cfg2_clrb
= 0, cfg2_setb
= 0;
590 /* SPI slave device may need time between data frames */
592 if (np
&& !of_property_read_u32(np
, "st,spi-midi-ns", &spi
->cur_midi
))
593 dev_dbg(spi
->dev
, "%dns inter-data idleness\n", spi
->cur_midi
);
595 if (spi_dev
->mode
& SPI_CPOL
)
596 cfg2_setb
|= SPI_CFG2_CPOL
;
598 cfg2_clrb
|= SPI_CFG2_CPOL
;
600 if (spi_dev
->mode
& SPI_CPHA
)
601 cfg2_setb
|= SPI_CFG2_CPHA
;
603 cfg2_clrb
|= SPI_CFG2_CPHA
;
605 if (spi_dev
->mode
& SPI_LSB_FIRST
)
606 cfg2_setb
|= SPI_CFG2_LSBFRST
;
608 cfg2_clrb
|= SPI_CFG2_LSBFRST
;
610 dev_dbg(spi
->dev
, "cpol=%d cpha=%d lsb_first=%d cs_high=%d\n",
611 spi_dev
->mode
& SPI_CPOL
,
612 spi_dev
->mode
& SPI_CPHA
,
613 spi_dev
->mode
& SPI_LSB_FIRST
,
614 spi_dev
->mode
& SPI_CS_HIGH
);
616 spin_lock_irqsave(&spi
->lock
, flags
);
618 if (cfg2_clrb
|| cfg2_setb
)
620 (readl_relaxed(spi
->base
+ STM32_SPI_CFG2
) &
621 ~cfg2_clrb
) | cfg2_setb
,
622 spi
->base
+ STM32_SPI_CFG2
);
624 spin_unlock_irqrestore(&spi
->lock
, flags
);
630 * stm32_spi_dma_cb - dma callback
632 * DMA callback is called when the transfer is complete or when an error
633 * occurs. If the transfer is complete, EOT flag is raised.
635 static void stm32_spi_dma_cb(void *data
)
637 struct stm32_spi
*spi
= data
;
641 spin_lock_irqsave(&spi
->lock
, flags
);
643 sr
= readl_relaxed(spi
->base
+ STM32_SPI_SR
);
645 spin_unlock_irqrestore(&spi
->lock
, flags
);
647 if (!(sr
& SPI_SR_EOT
)) {
648 dev_warn(spi
->dev
, "DMA callback (sr=0x%08x)\n", sr
);
650 spi_finalize_current_transfer(spi
->master
);
651 stm32_spi_disable(spi
);
656 * stm32_spi_dma_config - configure dma slave channel depending on current
657 * transfer bits_per_word.
659 static void stm32_spi_dma_config(struct stm32_spi
*spi
,
660 struct dma_slave_config
*dma_conf
,
661 enum dma_transfer_direction dir
)
663 enum dma_slave_buswidth buswidth
;
666 if (spi
->cur_bpw
<= 8)
667 buswidth
= DMA_SLAVE_BUSWIDTH_1_BYTE
;
668 else if (spi
->cur_bpw
<= 16)
669 buswidth
= DMA_SLAVE_BUSWIDTH_2_BYTES
;
671 buswidth
= DMA_SLAVE_BUSWIDTH_4_BYTES
;
673 /* Valid for DMA Half or Full Fifo threshold */
674 if (spi
->cur_fthlv
== 2)
677 maxburst
= spi
->cur_fthlv
;
679 memset(dma_conf
, 0, sizeof(struct dma_slave_config
));
680 dma_conf
->direction
= dir
;
681 if (dma_conf
->direction
== DMA_DEV_TO_MEM
) { /* RX */
682 dma_conf
->src_addr
= spi
->phys_addr
+ STM32_SPI_RXDR
;
683 dma_conf
->src_addr_width
= buswidth
;
684 dma_conf
->src_maxburst
= maxburst
;
686 dev_dbg(spi
->dev
, "Rx DMA config buswidth=%d, maxburst=%d\n",
688 } else if (dma_conf
->direction
== DMA_MEM_TO_DEV
) { /* TX */
689 dma_conf
->dst_addr
= spi
->phys_addr
+ STM32_SPI_TXDR
;
690 dma_conf
->dst_addr_width
= buswidth
;
691 dma_conf
->dst_maxburst
= maxburst
;
693 dev_dbg(spi
->dev
, "Tx DMA config buswidth=%d, maxburst=%d\n",
699 * stm32_spi_transfer_one_irq - transfer a single spi_transfer using
702 * It must returns 0 if the transfer is finished or 1 if the transfer is still
705 static int stm32_spi_transfer_one_irq(struct stm32_spi
*spi
)
710 /* Enable the interrupts relative to the current communication mode */
711 if (spi
->tx_buf
&& spi
->rx_buf
) /* Full Duplex */
712 ier
|= SPI_IER_DXPIE
;
713 else if (spi
->tx_buf
) /* Half-Duplex TX dir or Simplex TX */
714 ier
|= SPI_IER_TXPIE
;
715 else if (spi
->rx_buf
) /* Half-Duplex RX dir or Simplex RX */
716 ier
|= SPI_IER_RXPIE
;
718 /* Enable the interrupts relative to the end of transfer */
719 ier
|= SPI_IER_EOTIE
| SPI_IER_TXTFIE
| SPI_IER_OVRIE
| SPI_IER_MODFIE
;
721 spin_lock_irqsave(&spi
->lock
, flags
);
723 stm32_spi_enable(spi
);
725 /* Be sure to have data in fifo before starting data transfer */
727 stm32_spi_write_txfifo(spi
);
729 stm32_spi_set_bits(spi
, STM32_SPI_CR1
, SPI_CR1_CSTART
);
731 writel_relaxed(ier
, spi
->base
+ STM32_SPI_IER
);
733 spin_unlock_irqrestore(&spi
->lock
, flags
);
739 * stm32_spi_transfer_one_dma - transfer a single spi_transfer using DMA
741 * It must returns 0 if the transfer is finished or 1 if the transfer is still
744 static int stm32_spi_transfer_one_dma(struct stm32_spi
*spi
,
745 struct spi_transfer
*xfer
)
747 struct dma_slave_config tx_dma_conf
, rx_dma_conf
;
748 struct dma_async_tx_descriptor
*tx_dma_desc
, *rx_dma_desc
;
752 spin_lock_irqsave(&spi
->lock
, flags
);
756 stm32_spi_dma_config(spi
, &rx_dma_conf
, DMA_DEV_TO_MEM
);
757 dmaengine_slave_config(spi
->dma_rx
, &rx_dma_conf
);
759 /* Enable Rx DMA request */
760 stm32_spi_set_bits(spi
, STM32_SPI_CFG1
, SPI_CFG1_RXDMAEN
);
762 rx_dma_desc
= dmaengine_prep_slave_sg(
763 spi
->dma_rx
, xfer
->rx_sg
.sgl
,
765 rx_dma_conf
.direction
,
768 rx_dma_desc
->callback
= stm32_spi_dma_cb
;
769 rx_dma_desc
->callback_param
= spi
;
774 stm32_spi_dma_config(spi
, &tx_dma_conf
, DMA_MEM_TO_DEV
);
775 dmaengine_slave_config(spi
->dma_tx
, &tx_dma_conf
);
777 tx_dma_desc
= dmaengine_prep_slave_sg(
778 spi
->dma_tx
, xfer
->tx_sg
.sgl
,
780 tx_dma_conf
.direction
,
783 if (spi
->cur_comm
== SPI_SIMPLEX_TX
) {
784 tx_dma_desc
->callback
= stm32_spi_dma_cb
;
785 tx_dma_desc
->callback_param
= spi
;
789 if ((spi
->tx_buf
&& !tx_dma_desc
) ||
790 (spi
->rx_buf
&& !rx_dma_desc
))
794 if (dma_submit_error(dmaengine_submit(rx_dma_desc
))) {
795 dev_err(spi
->dev
, "Rx DMA submit failed\n");
798 /* Enable Rx DMA channel */
799 dma_async_issue_pending(spi
->dma_rx
);
803 if (dma_submit_error(dmaengine_submit(tx_dma_desc
))) {
804 dev_err(spi
->dev
, "Tx DMA submit failed\n");
805 goto dma_submit_error
;
807 /* Enable Tx DMA channel */
808 dma_async_issue_pending(spi
->dma_tx
);
810 /* Enable Tx DMA request */
811 stm32_spi_set_bits(spi
, STM32_SPI_CFG1
, SPI_CFG1_TXDMAEN
);
814 /* Enable the interrupts relative to the end of transfer */
815 ier
|= SPI_IER_EOTIE
| SPI_IER_TXTFIE
| SPI_IER_OVRIE
| SPI_IER_MODFIE
;
816 writel_relaxed(ier
, spi
->base
+ STM32_SPI_IER
);
818 stm32_spi_enable(spi
);
820 stm32_spi_set_bits(spi
, STM32_SPI_CR1
, SPI_CR1_CSTART
);
822 spin_unlock_irqrestore(&spi
->lock
, flags
);
828 dmaengine_terminate_all(spi
->dma_rx
);
831 stm32_spi_clr_bits(spi
, STM32_SPI_CFG1
, SPI_CFG1_RXDMAEN
);
833 spin_unlock_irqrestore(&spi
->lock
, flags
);
835 dev_info(spi
->dev
, "DMA issue: fall back to irq transfer\n");
837 return stm32_spi_transfer_one_irq(spi
);
841 * stm32_spi_transfer_one_setup - common setup to transfer a single
842 * spi_transfer either using DMA or
845 static int stm32_spi_transfer_one_setup(struct stm32_spi
*spi
,
846 struct spi_device
*spi_dev
,
847 struct spi_transfer
*transfer
)
850 u32 cfg1_clrb
= 0, cfg1_setb
= 0, cfg2_clrb
= 0, cfg2_setb
= 0;
854 spin_lock_irqsave(&spi
->lock
, flags
);
856 if (spi
->cur_bpw
!= transfer
->bits_per_word
) {
859 spi
->cur_bpw
= transfer
->bits_per_word
;
860 bpw
= spi
->cur_bpw
- 1;
862 cfg1_clrb
|= SPI_CFG1_DSIZE
;
863 cfg1_setb
|= (bpw
<< SPI_CFG1_DSIZE_SHIFT
) & SPI_CFG1_DSIZE
;
865 spi
->cur_fthlv
= stm32_spi_prepare_fthlv(spi
);
866 fthlv
= spi
->cur_fthlv
- 1;
868 cfg1_clrb
|= SPI_CFG1_FTHLV
;
869 cfg1_setb
|= (fthlv
<< SPI_CFG1_FTHLV_SHIFT
) & SPI_CFG1_FTHLV
;
872 if (spi
->cur_speed
!= transfer
->speed_hz
) {
875 /* Update spi->cur_speed with real clock speed */
876 mbr
= stm32_spi_prepare_mbr(spi
, transfer
->speed_hz
);
882 transfer
->speed_hz
= spi
->cur_speed
;
884 cfg1_clrb
|= SPI_CFG1_MBR
;
885 cfg1_setb
|= ((u32
)mbr
<< SPI_CFG1_MBR_SHIFT
) & SPI_CFG1_MBR
;
888 if (cfg1_clrb
|| cfg1_setb
)
889 writel_relaxed((readl_relaxed(spi
->base
+ STM32_SPI_CFG1
) &
890 ~cfg1_clrb
) | cfg1_setb
,
891 spi
->base
+ STM32_SPI_CFG1
);
893 mode
= SPI_FULL_DUPLEX
;
894 if (spi_dev
->mode
& SPI_3WIRE
) { /* MISO/MOSI signals shared */
896 * SPI_3WIRE and xfer->tx_buf != NULL and xfer->rx_buf != NULL
897 * is forbidden und unvalidated by SPI subsystem so depending
898 * on the valid buffer, we can determine the direction of the
901 mode
= SPI_HALF_DUPLEX
;
902 if (!transfer
->tx_buf
)
903 stm32_spi_clr_bits(spi
, STM32_SPI_CR1
, SPI_CR1_HDDIR
);
904 else if (!transfer
->rx_buf
)
905 stm32_spi_set_bits(spi
, STM32_SPI_CR1
, SPI_CR1_HDDIR
);
907 if (!transfer
->tx_buf
)
908 mode
= SPI_SIMPLEX_RX
;
909 else if (!transfer
->rx_buf
)
910 mode
= SPI_SIMPLEX_TX
;
912 if (spi
->cur_comm
!= mode
) {
913 spi
->cur_comm
= mode
;
915 cfg2_clrb
|= SPI_CFG2_COMM
;
916 cfg2_setb
|= (mode
<< SPI_CFG2_COMM_SHIFT
) & SPI_CFG2_COMM
;
919 cfg2_clrb
|= SPI_CFG2_MIDI
;
920 if ((transfer
->len
> 1) && (spi
->cur_midi
> 0)) {
921 u32 sck_period_ns
= DIV_ROUND_UP(SPI_1HZ_NS
, spi
->cur_speed
);
922 u32 midi
= min((u32
)DIV_ROUND_UP(spi
->cur_midi
, sck_period_ns
),
923 (u32
)SPI_CFG2_MIDI
>> SPI_CFG2_MIDI_SHIFT
);
925 dev_dbg(spi
->dev
, "period=%dns, midi=%d(=%dns)\n",
926 sck_period_ns
, midi
, midi
* sck_period_ns
);
928 cfg2_setb
|= (midi
<< SPI_CFG2_MIDI_SHIFT
) & SPI_CFG2_MIDI
;
931 if (cfg2_clrb
|| cfg2_setb
)
932 writel_relaxed((readl_relaxed(spi
->base
+ STM32_SPI_CFG2
) &
933 ~cfg2_clrb
) | cfg2_setb
,
934 spi
->base
+ STM32_SPI_CFG2
);
936 if (spi
->cur_bpw
<= 8)
937 nb_words
= transfer
->len
;
938 else if (spi
->cur_bpw
<= 16)
939 nb_words
= DIV_ROUND_UP(transfer
->len
* 8, 16);
941 nb_words
= DIV_ROUND_UP(transfer
->len
* 8, 32);
942 nb_words
<<= SPI_CR2_TSIZE_SHIFT
;
944 if (nb_words
<= SPI_CR2_TSIZE
) {
945 writel_relaxed(nb_words
, spi
->base
+ STM32_SPI_CR2
);
951 spi
->cur_xferlen
= transfer
->len
;
953 dev_dbg(spi
->dev
, "transfer communication mode set to %d\n",
956 "data frame of %d-bit, data packet of %d data frames\n",
957 spi
->cur_bpw
, spi
->cur_fthlv
);
958 dev_dbg(spi
->dev
, "speed set to %dHz\n", spi
->cur_speed
);
959 dev_dbg(spi
->dev
, "transfer of %d bytes (%d data frames)\n",
960 spi
->cur_xferlen
, nb_words
);
961 dev_dbg(spi
->dev
, "dma %s\n",
962 (spi
->cur_usedma
) ? "enabled" : "disabled");
965 spin_unlock_irqrestore(&spi
->lock
, flags
);
971 * stm32_spi_transfer_one - transfer a single spi_transfer
973 * It must return 0 if the transfer is finished or 1 if the transfer is still
976 static int stm32_spi_transfer_one(struct spi_master
*master
,
977 struct spi_device
*spi_dev
,
978 struct spi_transfer
*transfer
)
980 struct stm32_spi
*spi
= spi_master_get_devdata(master
);
983 spi
->tx_buf
= transfer
->tx_buf
;
984 spi
->rx_buf
= transfer
->rx_buf
;
985 spi
->tx_len
= spi
->tx_buf
? transfer
->len
: 0;
986 spi
->rx_len
= spi
->rx_buf
? transfer
->len
: 0;
988 spi
->cur_usedma
= stm32_spi_can_dma(master
, spi_dev
, transfer
);
990 ret
= stm32_spi_transfer_one_setup(spi
, spi_dev
, transfer
);
992 dev_err(spi
->dev
, "SPI transfer setup failed\n");
997 return stm32_spi_transfer_one_dma(spi
, transfer
);
999 return stm32_spi_transfer_one_irq(spi
);
1003 * stm32_spi_unprepare_msg - relax the hardware
1005 * Normally, if TSIZE has been configured, we should relax the hardware at the
1006 * reception of the EOT interrupt. But in case of error, EOT will not be
1007 * raised. So the subsystem unprepare_message call allows us to properly
1008 * complete the transfer from an hardware point of view.
1010 static int stm32_spi_unprepare_msg(struct spi_master
*master
,
1011 struct spi_message
*msg
)
1013 struct stm32_spi
*spi
= spi_master_get_devdata(master
);
1015 stm32_spi_disable(spi
);
1021 * stm32_spi_config - Configure SPI controller as SPI master
1023 static int stm32_spi_config(struct stm32_spi
*spi
)
1025 unsigned long flags
;
1027 spin_lock_irqsave(&spi
->lock
, flags
);
1029 /* Ensure I2SMOD bit is kept cleared */
1030 stm32_spi_clr_bits(spi
, STM32_SPI_I2SCFGR
, SPI_I2SCFGR_I2SMOD
);
1033 * - SS input value high
1034 * - transmitter half duplex direction
1035 * - automatic communication suspend when RX-Fifo is full
1037 stm32_spi_set_bits(spi
, STM32_SPI_CR1
, SPI_CR1_SSI
|
1042 * - Set the master mode (default Motorola mode)
1043 * - Consider 1 master/n slaves configuration and
1044 * SS input value is determined by the SSI bit
1045 * - keep control of all associated GPIOs
1047 stm32_spi_set_bits(spi
, STM32_SPI_CFG2
, SPI_CFG2_MASTER
|
1051 spin_unlock_irqrestore(&spi
->lock
, flags
);
1056 static const struct of_device_id stm32_spi_of_match
[] = {
1057 { .compatible
= "st,stm32h7-spi", },
1060 MODULE_DEVICE_TABLE(of
, stm32_spi_of_match
);
1062 static int stm32_spi_probe(struct platform_device
*pdev
)
1064 struct spi_master
*master
;
1065 struct stm32_spi
*spi
;
1066 struct resource
*res
;
1069 master
= spi_alloc_master(&pdev
->dev
, sizeof(struct stm32_spi
));
1071 dev_err(&pdev
->dev
, "spi master allocation failed\n");
1074 platform_set_drvdata(pdev
, master
);
1076 spi
= spi_master_get_devdata(master
);
1077 spi
->dev
= &pdev
->dev
;
1078 spi
->master
= master
;
1079 spin_lock_init(&spi
->lock
);
1081 res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
1082 spi
->base
= devm_ioremap_resource(&pdev
->dev
, res
);
1083 if (IS_ERR(spi
->base
)) {
1084 ret
= PTR_ERR(spi
->base
);
1085 goto err_master_put
;
1087 spi
->phys_addr
= (dma_addr_t
)res
->start
;
1089 spi
->irq
= platform_get_irq(pdev
, 0);
1090 if (spi
->irq
<= 0) {
1091 dev_err(&pdev
->dev
, "no irq: %d\n", spi
->irq
);
1093 goto err_master_put
;
1095 ret
= devm_request_threaded_irq(&pdev
->dev
, spi
->irq
, NULL
,
1096 stm32_spi_irq
, IRQF_ONESHOT
,
1097 pdev
->name
, master
);
1099 dev_err(&pdev
->dev
, "irq%d request failed: %d\n", spi
->irq
,
1101 goto err_master_put
;
1104 spi
->clk
= devm_clk_get(&pdev
->dev
, 0);
1105 if (IS_ERR(spi
->clk
)) {
1106 ret
= PTR_ERR(spi
->clk
);
1107 dev_err(&pdev
->dev
, "clk get failed: %d\n", ret
);
1108 goto err_master_put
;
1111 ret
= clk_prepare_enable(spi
->clk
);
1113 dev_err(&pdev
->dev
, "clk enable failed: %d\n", ret
);
1114 goto err_master_put
;
1116 spi
->clk_rate
= clk_get_rate(spi
->clk
);
1117 if (!spi
->clk_rate
) {
1118 dev_err(&pdev
->dev
, "clk rate = 0\n");
1120 goto err_master_put
;
1123 spi
->rst
= devm_reset_control_get(&pdev
->dev
, NULL
);
1124 if (!IS_ERR(spi
->rst
)) {
1125 reset_control_assert(spi
->rst
);
1127 reset_control_deassert(spi
->rst
);
1130 spi
->fifo_size
= stm32_spi_get_fifo_size(spi
);
1132 ret
= stm32_spi_config(spi
);
1134 dev_err(&pdev
->dev
, "controller configuration failed: %d\n",
1136 goto err_clk_disable
;
1139 master
->dev
.of_node
= pdev
->dev
.of_node
;
1140 master
->auto_runtime_pm
= true;
1141 master
->bus_num
= pdev
->id
;
1142 master
->mode_bits
= SPI_MODE_3
| SPI_CS_HIGH
| SPI_LSB_FIRST
|
1143 SPI_3WIRE
| SPI_LOOP
;
1144 master
->bits_per_word_mask
= stm32_spi_get_bpw_mask(spi
);
1145 master
->max_speed_hz
= spi
->clk_rate
/ SPI_MBR_DIV_MIN
;
1146 master
->min_speed_hz
= spi
->clk_rate
/ SPI_MBR_DIV_MAX
;
1147 master
->setup
= stm32_spi_setup
;
1148 master
->prepare_message
= stm32_spi_prepare_msg
;
1149 master
->transfer_one
= stm32_spi_transfer_one
;
1150 master
->unprepare_message
= stm32_spi_unprepare_msg
;
1152 spi
->dma_tx
= dma_request_slave_channel(spi
->dev
, "tx");
1154 dev_warn(&pdev
->dev
, "failed to request tx dma channel\n");
1156 master
->dma_tx
= spi
->dma_tx
;
1158 spi
->dma_rx
= dma_request_slave_channel(spi
->dev
, "rx");
1160 dev_warn(&pdev
->dev
, "failed to request rx dma channel\n");
1162 master
->dma_rx
= spi
->dma_rx
;
1164 if (spi
->dma_tx
|| spi
->dma_rx
)
1165 master
->can_dma
= stm32_spi_can_dma
;
1167 ret
= devm_spi_register_master(&pdev
->dev
, master
);
1169 dev_err(&pdev
->dev
, "spi master registration failed: %d\n",
1171 goto err_dma_release
;
1174 if (!master
->cs_gpios
) {
1175 dev_err(&pdev
->dev
, "no CS gpios available\n");
1177 goto err_dma_release
;
1180 for (i
= 0; i
< master
->num_chipselect
; i
++) {
1181 if (!gpio_is_valid(master
->cs_gpios
[i
])) {
1182 dev_err(&pdev
->dev
, "%i is not a valid gpio\n",
1183 master
->cs_gpios
[i
]);
1185 goto err_dma_release
;
1188 ret
= devm_gpio_request(&pdev
->dev
, master
->cs_gpios
[i
],
1191 dev_err(&pdev
->dev
, "can't get CS gpio %i\n",
1192 master
->cs_gpios
[i
]);
1193 goto err_dma_release
;
1197 dev_info(&pdev
->dev
, "driver initialized\n");
1203 dma_release_channel(spi
->dma_tx
);
1205 dma_release_channel(spi
->dma_rx
);
1207 clk_disable_unprepare(spi
->clk
);
1209 spi_master_put(master
);
1214 static int stm32_spi_remove(struct platform_device
*pdev
)
1216 struct spi_master
*master
= platform_get_drvdata(pdev
);
1217 struct stm32_spi
*spi
= spi_master_get_devdata(master
);
1219 stm32_spi_disable(spi
);
1222 dma_release_channel(master
->dma_tx
);
1224 dma_release_channel(master
->dma_rx
);
1226 clk_disable_unprepare(spi
->clk
);
1231 #ifdef CONFIG_PM_SLEEP
1232 static int stm32_spi_suspend(struct device
*dev
)
1234 struct spi_master
*master
= dev_get_drvdata(dev
);
1235 struct stm32_spi
*spi
= spi_master_get_devdata(master
);
1238 ret
= spi_master_suspend(master
);
1242 clk_disable_unprepare(spi
->clk
);
1247 static int stm32_spi_resume(struct device
*dev
)
1249 struct spi_master
*master
= dev_get_drvdata(dev
);
1250 struct stm32_spi
*spi
= spi_master_get_devdata(master
);
1253 ret
= clk_prepare_enable(spi
->clk
);
1256 ret
= spi_master_resume(master
);
1258 clk_disable_unprepare(spi
->clk
);
1264 static SIMPLE_DEV_PM_OPS(stm32_spi_pm_ops
,
1265 stm32_spi_suspend
, stm32_spi_resume
);
1267 static struct platform_driver stm32_spi_driver
= {
1268 .probe
= stm32_spi_probe
,
1269 .remove
= stm32_spi_remove
,
1271 .name
= DRIVER_NAME
,
1272 .pm
= &stm32_spi_pm_ops
,
1273 .of_match_table
= stm32_spi_of_match
,
1277 module_platform_driver(stm32_spi_driver
);
1279 MODULE_ALIAS("platform:" DRIVER_NAME
);
1280 MODULE_DESCRIPTION("STMicroelectronics STM32 SPI Controller driver");
1281 MODULE_AUTHOR("Amelie Delaunay <amelie.delaunay@st.com>");
1282 MODULE_LICENSE("GPL v2");