2 * Copyright (c) 2015 MediaTek Inc.
3 * Author: Leilk Liu <leilk.liu@mediatek.com>
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License version 2 as
7 * published by the Free Software Foundation.
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
15 #include <linux/clk.h>
16 #include <linux/device.h>
17 #include <linux/err.h>
18 #include <linux/interrupt.h>
20 #include <linux/ioport.h>
21 #include <linux/module.h>
23 #include <linux/platform_device.h>
24 #include <linux/platform_data/spi-mt65xx.h>
25 #include <linux/pm_runtime.h>
26 #include <linux/spi/spi.h>
28 #define SPI_CFG0_REG 0x0000
29 #define SPI_CFG1_REG 0x0004
30 #define SPI_TX_SRC_REG 0x0008
31 #define SPI_RX_DST_REG 0x000c
32 #define SPI_TX_DATA_REG 0x0010
33 #define SPI_RX_DATA_REG 0x0014
34 #define SPI_CMD_REG 0x0018
35 #define SPI_STATUS0_REG 0x001c
36 #define SPI_PAD_SEL_REG 0x0024
38 #define SPI_CFG0_SCK_HIGH_OFFSET 0
39 #define SPI_CFG0_SCK_LOW_OFFSET 8
40 #define SPI_CFG0_CS_HOLD_OFFSET 16
41 #define SPI_CFG0_CS_SETUP_OFFSET 24
43 #define SPI_CFG1_CS_IDLE_OFFSET 0
44 #define SPI_CFG1_PACKET_LOOP_OFFSET 8
45 #define SPI_CFG1_PACKET_LENGTH_OFFSET 16
46 #define SPI_CFG1_GET_TICK_DLY_OFFSET 30
48 #define SPI_CFG1_CS_IDLE_MASK 0xff
49 #define SPI_CFG1_PACKET_LOOP_MASK 0xff00
50 #define SPI_CFG1_PACKET_LENGTH_MASK 0x3ff0000
52 #define SPI_CMD_ACT BIT(0)
53 #define SPI_CMD_RESUME BIT(1)
54 #define SPI_CMD_RST BIT(2)
55 #define SPI_CMD_PAUSE_EN BIT(4)
56 #define SPI_CMD_DEASSERT BIT(5)
57 #define SPI_CMD_CPHA BIT(8)
58 #define SPI_CMD_CPOL BIT(9)
59 #define SPI_CMD_RX_DMA BIT(10)
60 #define SPI_CMD_TX_DMA BIT(11)
61 #define SPI_CMD_TXMSBF BIT(12)
62 #define SPI_CMD_RXMSBF BIT(13)
63 #define SPI_CMD_RX_ENDIAN BIT(14)
64 #define SPI_CMD_TX_ENDIAN BIT(15)
65 #define SPI_CMD_FINISH_IE BIT(16)
66 #define SPI_CMD_PAUSE_IE BIT(17)
68 #define MT8173_SPI_MAX_PAD_SEL 3
70 #define MTK_SPI_PAUSE_INT_STATUS 0x2
72 #define MTK_SPI_IDLE 0
73 #define MTK_SPI_PAUSED 1
75 #define MTK_SPI_MAX_FIFO_SIZE 32
76 #define MTK_SPI_PACKET_SIZE 1024
78 struct mtk_spi_compatible
{
80 /* Must explicitly send dummy Tx bytes to do Rx only transfer */
88 struct clk
*spi_clk
, *parent_clk
;
89 struct spi_transfer
*cur_transfer
;
91 struct scatterlist
*tx_sgl
, *rx_sgl
;
92 u32 tx_sgl_len
, rx_sgl_len
;
93 const struct mtk_spi_compatible
*dev_comp
;
96 static const struct mtk_spi_compatible mt6589_compat
;
97 static const struct mtk_spi_compatible mt8135_compat
;
98 static const struct mtk_spi_compatible mt8173_compat
= {
104 * A piece of default chip info unless the platform
107 static const struct mtk_chip_config mtk_default_chip_info
= {
112 static const struct of_device_id mtk_spi_of_match
[] = {
113 { .compatible
= "mediatek,mt6589-spi", .data
= (void *)&mt6589_compat
},
114 { .compatible
= "mediatek,mt8135-spi", .data
= (void *)&mt8135_compat
},
115 { .compatible
= "mediatek,mt8173-spi", .data
= (void *)&mt8173_compat
},
118 MODULE_DEVICE_TABLE(of
, mtk_spi_of_match
);
120 static void mtk_spi_reset(struct mtk_spi
*mdata
)
124 /* set the software reset bit in SPI_CMD_REG. */
125 reg_val
= readl(mdata
->base
+ SPI_CMD_REG
);
126 reg_val
|= SPI_CMD_RST
;
127 writel(reg_val
, mdata
->base
+ SPI_CMD_REG
);
129 reg_val
= readl(mdata
->base
+ SPI_CMD_REG
);
130 reg_val
&= ~SPI_CMD_RST
;
131 writel(reg_val
, mdata
->base
+ SPI_CMD_REG
);
134 static void mtk_spi_config(struct mtk_spi
*mdata
,
135 struct mtk_chip_config
*chip_config
)
139 reg_val
= readl(mdata
->base
+ SPI_CMD_REG
);
141 /* set the mlsbx and mlsbtx */
142 if (chip_config
->tx_mlsb
)
143 reg_val
|= SPI_CMD_TXMSBF
;
145 reg_val
&= ~SPI_CMD_TXMSBF
;
146 if (chip_config
->rx_mlsb
)
147 reg_val
|= SPI_CMD_RXMSBF
;
149 reg_val
&= ~SPI_CMD_RXMSBF
;
151 /* set the tx/rx endian */
152 #ifdef __LITTLE_ENDIAN
153 reg_val
&= ~SPI_CMD_TX_ENDIAN
;
154 reg_val
&= ~SPI_CMD_RX_ENDIAN
;
156 reg_val
|= SPI_CMD_TX_ENDIAN
;
157 reg_val
|= SPI_CMD_RX_ENDIAN
;
160 /* set finish and pause interrupt always enable */
161 reg_val
|= SPI_CMD_FINISH_IE
| SPI_CMD_PAUSE_IE
;
163 /* disable dma mode */
164 reg_val
&= ~(SPI_CMD_TX_DMA
| SPI_CMD_RX_DMA
);
166 /* disable deassert mode */
167 reg_val
&= ~SPI_CMD_DEASSERT
;
169 writel(reg_val
, mdata
->base
+ SPI_CMD_REG
);
172 if (mdata
->dev_comp
->need_pad_sel
)
173 writel(mdata
->pad_sel
, mdata
->base
+ SPI_PAD_SEL_REG
);
176 static int mtk_spi_prepare_hardware(struct spi_master
*master
)
178 struct spi_transfer
*trans
;
179 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
180 struct spi_message
*msg
= master
->cur_msg
;
182 trans
= list_first_entry(&msg
->transfers
, struct spi_transfer
,
184 if (!trans
->cs_change
) {
185 mdata
->state
= MTK_SPI_IDLE
;
186 mtk_spi_reset(mdata
);
192 static int mtk_spi_prepare_message(struct spi_master
*master
,
193 struct spi_message
*msg
)
197 struct mtk_chip_config
*chip_config
;
198 struct spi_device
*spi
= msg
->spi
;
199 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
201 cpha
= spi
->mode
& SPI_CPHA
? 1 : 0;
202 cpol
= spi
->mode
& SPI_CPOL
? 1 : 0;
204 reg_val
= readl(mdata
->base
+ SPI_CMD_REG
);
206 reg_val
|= SPI_CMD_CPHA
;
208 reg_val
&= ~SPI_CMD_CPHA
;
210 reg_val
|= SPI_CMD_CPOL
;
212 reg_val
&= ~SPI_CMD_CPOL
;
213 writel(reg_val
, mdata
->base
+ SPI_CMD_REG
);
215 chip_config
= spi
->controller_data
;
217 chip_config
= (void *)&mtk_default_chip_info
;
218 spi
->controller_data
= chip_config
;
220 mtk_spi_config(mdata
, chip_config
);
225 static void mtk_spi_set_cs(struct spi_device
*spi
, bool enable
)
228 struct mtk_spi
*mdata
= spi_master_get_devdata(spi
->master
);
230 reg_val
= readl(mdata
->base
+ SPI_CMD_REG
);
232 reg_val
|= SPI_CMD_PAUSE_EN
;
234 reg_val
&= ~SPI_CMD_PAUSE_EN
;
235 writel(reg_val
, mdata
->base
+ SPI_CMD_REG
);
238 static void mtk_spi_prepare_transfer(struct spi_master
*master
,
239 struct spi_transfer
*xfer
)
241 u32 spi_clk_hz
, div
, sck_time
, cs_time
, reg_val
= 0;
242 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
244 spi_clk_hz
= clk_get_rate(mdata
->spi_clk
);
245 if (xfer
->speed_hz
< spi_clk_hz
/ 2)
246 div
= DIV_ROUND_UP(spi_clk_hz
, xfer
->speed_hz
);
250 sck_time
= (div
+ 1) / 2;
251 cs_time
= sck_time
* 2;
253 reg_val
|= (((sck_time
- 1) & 0xff) << SPI_CFG0_SCK_HIGH_OFFSET
);
254 reg_val
|= (((sck_time
- 1) & 0xff) << SPI_CFG0_SCK_LOW_OFFSET
);
255 reg_val
|= (((cs_time
- 1) & 0xff) << SPI_CFG0_CS_HOLD_OFFSET
);
256 reg_val
|= (((cs_time
- 1) & 0xff) << SPI_CFG0_CS_SETUP_OFFSET
);
257 writel(reg_val
, mdata
->base
+ SPI_CFG0_REG
);
259 reg_val
= readl(mdata
->base
+ SPI_CFG1_REG
);
260 reg_val
&= ~SPI_CFG1_CS_IDLE_MASK
;
261 reg_val
|= (((cs_time
- 1) & 0xff) << SPI_CFG1_CS_IDLE_OFFSET
);
262 writel(reg_val
, mdata
->base
+ SPI_CFG1_REG
);
265 static void mtk_spi_setup_packet(struct spi_master
*master
)
267 u32 packet_size
, packet_loop
, reg_val
;
268 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
270 packet_size
= min_t(u32
, mdata
->xfer_len
, MTK_SPI_PACKET_SIZE
);
271 packet_loop
= mdata
->xfer_len
/ packet_size
;
273 reg_val
= readl(mdata
->base
+ SPI_CFG1_REG
);
274 reg_val
&= ~(SPI_CFG1_PACKET_LENGTH_MASK
| SPI_CFG1_PACKET_LOOP_MASK
);
275 reg_val
|= (packet_size
- 1) << SPI_CFG1_PACKET_LENGTH_OFFSET
;
276 reg_val
|= (packet_loop
- 1) << SPI_CFG1_PACKET_LOOP_OFFSET
;
277 writel(reg_val
, mdata
->base
+ SPI_CFG1_REG
);
280 static void mtk_spi_enable_transfer(struct spi_master
*master
)
283 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
285 cmd
= readl(mdata
->base
+ SPI_CMD_REG
);
286 if (mdata
->state
== MTK_SPI_IDLE
)
289 cmd
|= SPI_CMD_RESUME
;
290 writel(cmd
, mdata
->base
+ SPI_CMD_REG
);
293 static int mtk_spi_get_mult_delta(u32 xfer_len
)
297 if (xfer_len
> MTK_SPI_PACKET_SIZE
)
298 mult_delta
= xfer_len
% MTK_SPI_PACKET_SIZE
;
305 static void mtk_spi_update_mdata_len(struct spi_master
*master
)
308 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
310 if (mdata
->tx_sgl_len
&& mdata
->rx_sgl_len
) {
311 if (mdata
->tx_sgl_len
> mdata
->rx_sgl_len
) {
312 mult_delta
= mtk_spi_get_mult_delta(mdata
->rx_sgl_len
);
313 mdata
->xfer_len
= mdata
->rx_sgl_len
- mult_delta
;
314 mdata
->rx_sgl_len
= mult_delta
;
315 mdata
->tx_sgl_len
-= mdata
->xfer_len
;
317 mult_delta
= mtk_spi_get_mult_delta(mdata
->tx_sgl_len
);
318 mdata
->xfer_len
= mdata
->tx_sgl_len
- mult_delta
;
319 mdata
->tx_sgl_len
= mult_delta
;
320 mdata
->rx_sgl_len
-= mdata
->xfer_len
;
322 } else if (mdata
->tx_sgl_len
) {
323 mult_delta
= mtk_spi_get_mult_delta(mdata
->tx_sgl_len
);
324 mdata
->xfer_len
= mdata
->tx_sgl_len
- mult_delta
;
325 mdata
->tx_sgl_len
= mult_delta
;
326 } else if (mdata
->rx_sgl_len
) {
327 mult_delta
= mtk_spi_get_mult_delta(mdata
->rx_sgl_len
);
328 mdata
->xfer_len
= mdata
->rx_sgl_len
- mult_delta
;
329 mdata
->rx_sgl_len
= mult_delta
;
333 static void mtk_spi_setup_dma_addr(struct spi_master
*master
,
334 struct spi_transfer
*xfer
)
336 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
339 writel(xfer
->tx_dma
, mdata
->base
+ SPI_TX_SRC_REG
);
341 writel(xfer
->rx_dma
, mdata
->base
+ SPI_RX_DST_REG
);
344 static int mtk_spi_fifo_transfer(struct spi_master
*master
,
345 struct spi_device
*spi
,
346 struct spi_transfer
*xfer
)
349 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
351 mdata
->cur_transfer
= xfer
;
352 mdata
->xfer_len
= xfer
->len
;
353 mtk_spi_prepare_transfer(master
, xfer
);
354 mtk_spi_setup_packet(master
);
357 cnt
= xfer
->len
/ 4 + 1;
360 iowrite32_rep(mdata
->base
+ SPI_TX_DATA_REG
, xfer
->tx_buf
, cnt
);
362 mtk_spi_enable_transfer(master
);
367 static int mtk_spi_dma_transfer(struct spi_master
*master
,
368 struct spi_device
*spi
,
369 struct spi_transfer
*xfer
)
372 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
374 mdata
->tx_sgl
= NULL
;
375 mdata
->rx_sgl
= NULL
;
376 mdata
->tx_sgl_len
= 0;
377 mdata
->rx_sgl_len
= 0;
378 mdata
->cur_transfer
= xfer
;
380 mtk_spi_prepare_transfer(master
, xfer
);
382 cmd
= readl(mdata
->base
+ SPI_CMD_REG
);
384 cmd
|= SPI_CMD_TX_DMA
;
386 cmd
|= SPI_CMD_RX_DMA
;
387 writel(cmd
, mdata
->base
+ SPI_CMD_REG
);
390 mdata
->tx_sgl
= xfer
->tx_sg
.sgl
;
392 mdata
->rx_sgl
= xfer
->rx_sg
.sgl
;
395 xfer
->tx_dma
= sg_dma_address(mdata
->tx_sgl
);
396 mdata
->tx_sgl_len
= sg_dma_len(mdata
->tx_sgl
);
399 xfer
->rx_dma
= sg_dma_address(mdata
->rx_sgl
);
400 mdata
->rx_sgl_len
= sg_dma_len(mdata
->rx_sgl
);
403 mtk_spi_update_mdata_len(master
);
404 mtk_spi_setup_packet(master
);
405 mtk_spi_setup_dma_addr(master
, xfer
);
406 mtk_spi_enable_transfer(master
);
411 static int mtk_spi_transfer_one(struct spi_master
*master
,
412 struct spi_device
*spi
,
413 struct spi_transfer
*xfer
)
415 if (master
->can_dma(master
, spi
, xfer
))
416 return mtk_spi_dma_transfer(master
, spi
, xfer
);
418 return mtk_spi_fifo_transfer(master
, spi
, xfer
);
421 static bool mtk_spi_can_dma(struct spi_master
*master
,
422 struct spi_device
*spi
,
423 struct spi_transfer
*xfer
)
425 return xfer
->len
> MTK_SPI_MAX_FIFO_SIZE
;
428 static irqreturn_t
mtk_spi_interrupt(int irq
, void *dev_id
)
430 u32 cmd
, reg_val
, cnt
;
431 struct spi_master
*master
= dev_id
;
432 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
433 struct spi_transfer
*trans
= mdata
->cur_transfer
;
435 reg_val
= readl(mdata
->base
+ SPI_STATUS0_REG
);
436 if (reg_val
& MTK_SPI_PAUSE_INT_STATUS
)
437 mdata
->state
= MTK_SPI_PAUSED
;
439 mdata
->state
= MTK_SPI_IDLE
;
441 if (!master
->can_dma(master
, master
->cur_msg
->spi
, trans
)) {
443 if (mdata
->xfer_len
% 4)
444 cnt
= mdata
->xfer_len
/ 4 + 1;
446 cnt
= mdata
->xfer_len
/ 4;
447 ioread32_rep(mdata
->base
+ SPI_RX_DATA_REG
,
450 spi_finalize_current_transfer(master
);
455 trans
->tx_dma
+= mdata
->xfer_len
;
457 trans
->rx_dma
+= mdata
->xfer_len
;
459 if (mdata
->tx_sgl
&& (mdata
->tx_sgl_len
== 0)) {
460 mdata
->tx_sgl
= sg_next(mdata
->tx_sgl
);
462 trans
->tx_dma
= sg_dma_address(mdata
->tx_sgl
);
463 mdata
->tx_sgl_len
= sg_dma_len(mdata
->tx_sgl
);
466 if (mdata
->rx_sgl
&& (mdata
->rx_sgl_len
== 0)) {
467 mdata
->rx_sgl
= sg_next(mdata
->rx_sgl
);
469 trans
->rx_dma
= sg_dma_address(mdata
->rx_sgl
);
470 mdata
->rx_sgl_len
= sg_dma_len(mdata
->rx_sgl
);
474 if (!mdata
->tx_sgl
&& !mdata
->rx_sgl
) {
475 /* spi disable dma */
476 cmd
= readl(mdata
->base
+ SPI_CMD_REG
);
477 cmd
&= ~SPI_CMD_TX_DMA
;
478 cmd
&= ~SPI_CMD_RX_DMA
;
479 writel(cmd
, mdata
->base
+ SPI_CMD_REG
);
481 spi_finalize_current_transfer(master
);
485 mtk_spi_update_mdata_len(master
);
486 mtk_spi_setup_packet(master
);
487 mtk_spi_setup_dma_addr(master
, trans
);
488 mtk_spi_enable_transfer(master
);
493 static int mtk_spi_probe(struct platform_device
*pdev
)
495 struct spi_master
*master
;
496 struct mtk_spi
*mdata
;
497 const struct of_device_id
*of_id
;
498 struct resource
*res
;
501 master
= spi_alloc_master(&pdev
->dev
, sizeof(*mdata
));
503 dev_err(&pdev
->dev
, "failed to alloc spi master\n");
507 master
->auto_runtime_pm
= true;
508 master
->dev
.of_node
= pdev
->dev
.of_node
;
509 master
->mode_bits
= SPI_CPOL
| SPI_CPHA
;
511 master
->set_cs
= mtk_spi_set_cs
;
512 master
->prepare_transfer_hardware
= mtk_spi_prepare_hardware
;
513 master
->prepare_message
= mtk_spi_prepare_message
;
514 master
->transfer_one
= mtk_spi_transfer_one
;
515 master
->can_dma
= mtk_spi_can_dma
;
517 of_id
= of_match_node(mtk_spi_of_match
, pdev
->dev
.of_node
);
519 dev_err(&pdev
->dev
, "failed to probe of_node\n");
524 mdata
= spi_master_get_devdata(master
);
525 mdata
->dev_comp
= of_id
->data
;
526 if (mdata
->dev_comp
->must_tx
)
527 master
->flags
= SPI_MASTER_MUST_TX
;
529 if (mdata
->dev_comp
->need_pad_sel
) {
530 ret
= of_property_read_u32(pdev
->dev
.of_node
,
531 "mediatek,pad-select",
534 dev_err(&pdev
->dev
, "failed to read pad select: %d\n",
539 if (mdata
->pad_sel
> MT8173_SPI_MAX_PAD_SEL
) {
540 dev_err(&pdev
->dev
, "wrong pad-select: %u\n",
547 platform_set_drvdata(pdev
, master
);
549 res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
552 dev_err(&pdev
->dev
, "failed to determine base address\n");
556 mdata
->base
= devm_ioremap_resource(&pdev
->dev
, res
);
557 if (IS_ERR(mdata
->base
)) {
558 ret
= PTR_ERR(mdata
->base
);
562 irq
= platform_get_irq(pdev
, 0);
564 dev_err(&pdev
->dev
, "failed to get irq (%d)\n", irq
);
569 if (!pdev
->dev
.dma_mask
)
570 pdev
->dev
.dma_mask
= &pdev
->dev
.coherent_dma_mask
;
572 ret
= devm_request_irq(&pdev
->dev
, irq
, mtk_spi_interrupt
,
573 IRQF_TRIGGER_NONE
, dev_name(&pdev
->dev
), master
);
575 dev_err(&pdev
->dev
, "failed to register irq (%d)\n", ret
);
579 mdata
->spi_clk
= devm_clk_get(&pdev
->dev
, "spi-clk");
580 if (IS_ERR(mdata
->spi_clk
)) {
581 ret
= PTR_ERR(mdata
->spi_clk
);
582 dev_err(&pdev
->dev
, "failed to get spi-clk: %d\n", ret
);
586 mdata
->parent_clk
= devm_clk_get(&pdev
->dev
, "parent-clk");
587 if (IS_ERR(mdata
->parent_clk
)) {
588 ret
= PTR_ERR(mdata
->parent_clk
);
589 dev_err(&pdev
->dev
, "failed to get parent-clk: %d\n", ret
);
593 ret
= clk_prepare_enable(mdata
->spi_clk
);
595 dev_err(&pdev
->dev
, "failed to enable spi_clk (%d)\n", ret
);
599 ret
= clk_set_parent(mdata
->spi_clk
, mdata
->parent_clk
);
601 dev_err(&pdev
->dev
, "failed to clk_set_parent (%d)\n", ret
);
602 goto err_disable_clk
;
605 clk_disable_unprepare(mdata
->spi_clk
);
607 pm_runtime_enable(&pdev
->dev
);
609 ret
= devm_spi_register_master(&pdev
->dev
, master
);
611 dev_err(&pdev
->dev
, "failed to register master (%d)\n", ret
);
618 clk_disable_unprepare(mdata
->spi_clk
);
620 spi_master_put(master
);
625 static int mtk_spi_remove(struct platform_device
*pdev
)
627 struct spi_master
*master
= platform_get_drvdata(pdev
);
628 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
630 pm_runtime_disable(&pdev
->dev
);
632 mtk_spi_reset(mdata
);
633 clk_disable_unprepare(mdata
->spi_clk
);
634 spi_master_put(master
);
639 #ifdef CONFIG_PM_SLEEP
640 static int mtk_spi_suspend(struct device
*dev
)
643 struct spi_master
*master
= dev_get_drvdata(dev
);
644 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
646 ret
= spi_master_suspend(master
);
650 if (!pm_runtime_suspended(dev
))
651 clk_disable_unprepare(mdata
->spi_clk
);
656 static int mtk_spi_resume(struct device
*dev
)
659 struct spi_master
*master
= dev_get_drvdata(dev
);
660 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
662 if (!pm_runtime_suspended(dev
)) {
663 ret
= clk_prepare_enable(mdata
->spi_clk
);
665 dev_err(dev
, "failed to enable spi_clk (%d)\n", ret
);
670 ret
= spi_master_resume(master
);
672 clk_disable_unprepare(mdata
->spi_clk
);
676 #endif /* CONFIG_PM_SLEEP */
679 static int mtk_spi_runtime_suspend(struct device
*dev
)
681 struct spi_master
*master
= dev_get_drvdata(dev
);
682 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
684 clk_disable_unprepare(mdata
->spi_clk
);
689 static int mtk_spi_runtime_resume(struct device
*dev
)
691 struct spi_master
*master
= dev_get_drvdata(dev
);
692 struct mtk_spi
*mdata
= spi_master_get_devdata(master
);
695 ret
= clk_prepare_enable(mdata
->spi_clk
);
697 dev_err(dev
, "failed to enable spi_clk (%d)\n", ret
);
703 #endif /* CONFIG_PM */
705 static const struct dev_pm_ops mtk_spi_pm
= {
706 SET_SYSTEM_SLEEP_PM_OPS(mtk_spi_suspend
, mtk_spi_resume
)
707 SET_RUNTIME_PM_OPS(mtk_spi_runtime_suspend
,
708 mtk_spi_runtime_resume
, NULL
)
711 static struct platform_driver mtk_spi_driver
= {
715 .of_match_table
= mtk_spi_of_match
,
717 .probe
= mtk_spi_probe
,
718 .remove
= mtk_spi_remove
,
721 module_platform_driver(mtk_spi_driver
);
723 MODULE_DESCRIPTION("MTK SPI Controller driver");
724 MODULE_AUTHOR("Leilk Liu <leilk.liu@mediatek.com>");
725 MODULE_LICENSE("GPL v2");
726 MODULE_ALIAS("platform:mtk-spi");