2 * SPI bus driver for CSR SiRFprimaII
4 * Copyright (c) 2011 Cambridge Silicon Radio Limited, a CSR plc group company.
6 * Licensed under GPLv2 or later.
9 #include <linux/module.h>
10 #include <linux/kernel.h>
11 #include <linux/slab.h>
12 #include <linux/clk.h>
13 #include <linux/completion.h>
14 #include <linux/interrupt.h>
17 #include <linux/bitops.h>
18 #include <linux/err.h>
19 #include <linux/platform_device.h>
20 #include <linux/of_gpio.h>
21 #include <linux/spi/spi.h>
22 #include <linux/spi/spi_bitbang.h>
23 #include <linux/dmaengine.h>
24 #include <linux/dma-direction.h>
25 #include <linux/dma-mapping.h>
27 #define DRIVER_NAME "sirfsoc_spi"
29 #define SIRFSOC_SPI_CTRL 0x0000
30 #define SIRFSOC_SPI_CMD 0x0004
31 #define SIRFSOC_SPI_TX_RX_EN 0x0008
32 #define SIRFSOC_SPI_INT_EN 0x000C
33 #define SIRFSOC_SPI_INT_STATUS 0x0010
34 #define SIRFSOC_SPI_TX_DMA_IO_CTRL 0x0100
35 #define SIRFSOC_SPI_TX_DMA_IO_LEN 0x0104
36 #define SIRFSOC_SPI_TXFIFO_CTRL 0x0108
37 #define SIRFSOC_SPI_TXFIFO_LEVEL_CHK 0x010C
38 #define SIRFSOC_SPI_TXFIFO_OP 0x0110
39 #define SIRFSOC_SPI_TXFIFO_STATUS 0x0114
40 #define SIRFSOC_SPI_TXFIFO_DATA 0x0118
41 #define SIRFSOC_SPI_RX_DMA_IO_CTRL 0x0120
42 #define SIRFSOC_SPI_RX_DMA_IO_LEN 0x0124
43 #define SIRFSOC_SPI_RXFIFO_CTRL 0x0128
44 #define SIRFSOC_SPI_RXFIFO_LEVEL_CHK 0x012C
45 #define SIRFSOC_SPI_RXFIFO_OP 0x0130
46 #define SIRFSOC_SPI_RXFIFO_STATUS 0x0134
47 #define SIRFSOC_SPI_RXFIFO_DATA 0x0138
48 #define SIRFSOC_SPI_DUMMY_DELAY_CTL 0x0144
50 /* SPI CTRL register defines */
51 #define SIRFSOC_SPI_SLV_MODE BIT(16)
52 #define SIRFSOC_SPI_CMD_MODE BIT(17)
53 #define SIRFSOC_SPI_CS_IO_OUT BIT(18)
54 #define SIRFSOC_SPI_CS_IO_MODE BIT(19)
55 #define SIRFSOC_SPI_CLK_IDLE_STAT BIT(20)
56 #define SIRFSOC_SPI_CS_IDLE_STAT BIT(21)
57 #define SIRFSOC_SPI_TRAN_MSB BIT(22)
58 #define SIRFSOC_SPI_DRV_POS_EDGE BIT(23)
59 #define SIRFSOC_SPI_CS_HOLD_TIME BIT(24)
60 #define SIRFSOC_SPI_CLK_SAMPLE_MODE BIT(25)
61 #define SIRFSOC_SPI_TRAN_DAT_FORMAT_8 (0 << 26)
62 #define SIRFSOC_SPI_TRAN_DAT_FORMAT_12 (1 << 26)
63 #define SIRFSOC_SPI_TRAN_DAT_FORMAT_16 (2 << 26)
64 #define SIRFSOC_SPI_TRAN_DAT_FORMAT_32 (3 << 26)
65 #define SIRFSOC_SPI_CMD_BYTE_NUM(x) ((x & 3) << 28)
66 #define SIRFSOC_SPI_ENA_AUTO_CLR BIT(30)
67 #define SIRFSOC_SPI_MUL_DAT_MODE BIT(31)
69 /* Interrupt Enable */
70 #define SIRFSOC_SPI_RX_DONE_INT_EN BIT(0)
71 #define SIRFSOC_SPI_TX_DONE_INT_EN BIT(1)
72 #define SIRFSOC_SPI_RX_OFLOW_INT_EN BIT(2)
73 #define SIRFSOC_SPI_TX_UFLOW_INT_EN BIT(3)
74 #define SIRFSOC_SPI_RX_IO_DMA_INT_EN BIT(4)
75 #define SIRFSOC_SPI_TX_IO_DMA_INT_EN BIT(5)
76 #define SIRFSOC_SPI_RXFIFO_FULL_INT_EN BIT(6)
77 #define SIRFSOC_SPI_TXFIFO_EMPTY_INT_EN BIT(7)
78 #define SIRFSOC_SPI_RXFIFO_THD_INT_EN BIT(8)
79 #define SIRFSOC_SPI_TXFIFO_THD_INT_EN BIT(9)
80 #define SIRFSOC_SPI_FRM_END_INT_EN BIT(10)
82 #define SIRFSOC_SPI_INT_MASK_ALL 0x1FFF
84 /* Interrupt status */
85 #define SIRFSOC_SPI_RX_DONE BIT(0)
86 #define SIRFSOC_SPI_TX_DONE BIT(1)
87 #define SIRFSOC_SPI_RX_OFLOW BIT(2)
88 #define SIRFSOC_SPI_TX_UFLOW BIT(3)
89 #define SIRFSOC_SPI_RX_IO_DMA BIT(4)
90 #define SIRFSOC_SPI_RX_FIFO_FULL BIT(6)
91 #define SIRFSOC_SPI_TXFIFO_EMPTY BIT(7)
92 #define SIRFSOC_SPI_RXFIFO_THD_REACH BIT(8)
93 #define SIRFSOC_SPI_TXFIFO_THD_REACH BIT(9)
94 #define SIRFSOC_SPI_FRM_END BIT(10)
97 #define SIRFSOC_SPI_RX_EN BIT(0)
98 #define SIRFSOC_SPI_TX_EN BIT(1)
99 #define SIRFSOC_SPI_CMD_TX_EN BIT(2)
101 #define SIRFSOC_SPI_IO_MODE_SEL BIT(0)
102 #define SIRFSOC_SPI_RX_DMA_FLUSH BIT(2)
105 #define SIRFSOC_SPI_FIFO_RESET BIT(0)
106 #define SIRFSOC_SPI_FIFO_START BIT(1)
109 #define SIRFSOC_SPI_FIFO_WIDTH_BYTE (0 << 0)
110 #define SIRFSOC_SPI_FIFO_WIDTH_WORD (1 << 0)
111 #define SIRFSOC_SPI_FIFO_WIDTH_DWORD (2 << 0)
114 #define SIRFSOC_SPI_FIFO_LEVEL_MASK 0xFF
115 #define SIRFSOC_SPI_FIFO_FULL BIT(8)
116 #define SIRFSOC_SPI_FIFO_EMPTY BIT(9)
118 /* 256 bytes rx/tx FIFO */
119 #define SIRFSOC_SPI_FIFO_SIZE 256
120 #define SIRFSOC_SPI_DAT_FRM_LEN_MAX (64 * 1024)
122 #define SIRFSOC_SPI_FIFO_SC(x) ((x) & 0x3F)
123 #define SIRFSOC_SPI_FIFO_LC(x) (((x) & 0x3F) << 10)
124 #define SIRFSOC_SPI_FIFO_HC(x) (((x) & 0x3F) << 20)
125 #define SIRFSOC_SPI_FIFO_THD(x) (((x) & 0xFF) << 2)
128 * only if the rx/tx buffer and transfer size are 4-bytes aligned, we use dma
129 * due to the limitation of dma controller
132 #define ALIGNED(x) (!((u32)x & 0x3))
133 #define IS_DMA_VALID(x) (x && ALIGNED(x->tx_buf) && ALIGNED(x->rx_buf) && \
134 ALIGNED(x->len) && (x->len < 2 * PAGE_SIZE))
136 #define SIRFSOC_MAX_CMD_BYTES 4
139 struct spi_bitbang bitbang
;
140 struct completion rx_done
;
141 struct completion tx_done
;
144 u32 ctrl_freq
; /* SPI controller clock speed */
147 /* rx & tx bufs from the spi_transfer */
151 /* place received word into rx buffer */
152 void (*rx_word
) (struct sirfsoc_spi
*);
153 /* get word from tx buffer for sending */
154 void (*tx_word
) (struct sirfsoc_spi
*);
156 /* number of words left to be tranmitted/received */
157 unsigned int left_tx_word
;
158 unsigned int left_rx_word
;
160 /* rx & tx DMA channels */
161 struct dma_chan
*rx_chan
;
162 struct dma_chan
*tx_chan
;
163 dma_addr_t src_start
;
164 dma_addr_t dst_start
;
166 int word_width
; /* in bytes */
169 * if tx size is not more than 4 and rx size is NULL, use
177 static void spi_sirfsoc_rx_word_u8(struct sirfsoc_spi
*sspi
)
182 data
= readl(sspi
->base
+ SIRFSOC_SPI_RXFIFO_DATA
);
189 sspi
->left_rx_word
--;
192 static void spi_sirfsoc_tx_word_u8(struct sirfsoc_spi
*sspi
)
195 const u8
*tx
= sspi
->tx
;
202 writel(data
, sspi
->base
+ SIRFSOC_SPI_TXFIFO_DATA
);
203 sspi
->left_tx_word
--;
206 static void spi_sirfsoc_rx_word_u16(struct sirfsoc_spi
*sspi
)
211 data
= readl(sspi
->base
+ SIRFSOC_SPI_RXFIFO_DATA
);
218 sspi
->left_rx_word
--;
221 static void spi_sirfsoc_tx_word_u16(struct sirfsoc_spi
*sspi
)
224 const u16
*tx
= sspi
->tx
;
231 writel(data
, sspi
->base
+ SIRFSOC_SPI_TXFIFO_DATA
);
232 sspi
->left_tx_word
--;
235 static void spi_sirfsoc_rx_word_u32(struct sirfsoc_spi
*sspi
)
240 data
= readl(sspi
->base
+ SIRFSOC_SPI_RXFIFO_DATA
);
247 sspi
->left_rx_word
--;
251 static void spi_sirfsoc_tx_word_u32(struct sirfsoc_spi
*sspi
)
254 const u32
*tx
= sspi
->tx
;
261 writel(data
, sspi
->base
+ SIRFSOC_SPI_TXFIFO_DATA
);
262 sspi
->left_tx_word
--;
265 static irqreturn_t
spi_sirfsoc_irq(int irq
, void *dev_id
)
267 struct sirfsoc_spi
*sspi
= dev_id
;
268 u32 spi_stat
= readl(sspi
->base
+ SIRFSOC_SPI_INT_STATUS
);
269 if (sspi
->tx_by_cmd
&& (spi_stat
& SIRFSOC_SPI_FRM_END
)) {
270 complete(&sspi
->tx_done
);
271 writel(0x0, sspi
->base
+ SIRFSOC_SPI_INT_EN
);
272 writel(SIRFSOC_SPI_INT_MASK_ALL
,
273 sspi
->base
+ SIRFSOC_SPI_INT_STATUS
);
277 /* Error Conditions */
278 if (spi_stat
& SIRFSOC_SPI_RX_OFLOW
||
279 spi_stat
& SIRFSOC_SPI_TX_UFLOW
) {
280 complete(&sspi
->tx_done
);
281 complete(&sspi
->rx_done
);
282 writel(0x0, sspi
->base
+ SIRFSOC_SPI_INT_EN
);
283 writel(SIRFSOC_SPI_INT_MASK_ALL
,
284 sspi
->base
+ SIRFSOC_SPI_INT_STATUS
);
287 if (spi_stat
& SIRFSOC_SPI_TXFIFO_EMPTY
)
288 complete(&sspi
->tx_done
);
289 while (!(readl(sspi
->base
+ SIRFSOC_SPI_INT_STATUS
) &
290 SIRFSOC_SPI_RX_IO_DMA
))
292 complete(&sspi
->rx_done
);
293 writel(0x0, sspi
->base
+ SIRFSOC_SPI_INT_EN
);
294 writel(SIRFSOC_SPI_INT_MASK_ALL
,
295 sspi
->base
+ SIRFSOC_SPI_INT_STATUS
);
300 static void spi_sirfsoc_dma_fini_callback(void *data
)
302 struct completion
*dma_complete
= data
;
304 complete(dma_complete
);
307 static int spi_sirfsoc_cmd_transfer(struct spi_device
*spi
,
308 struct spi_transfer
*t
)
310 struct sirfsoc_spi
*sspi
;
311 int timeout
= t
->len
* 10;
314 sspi
= spi_master_get_devdata(spi
->master
);
315 writel(SIRFSOC_SPI_FIFO_RESET
, sspi
->base
+ SIRFSOC_SPI_TXFIFO_OP
);
316 writel(SIRFSOC_SPI_FIFO_START
, sspi
->base
+ SIRFSOC_SPI_TXFIFO_OP
);
317 memcpy(&cmd
, sspi
->tx
, t
->len
);
318 if (sspi
->word_width
== 1 && !(spi
->mode
& SPI_LSB_FIRST
))
319 cmd
= cpu_to_be32(cmd
) >>
320 ((SIRFSOC_MAX_CMD_BYTES
- t
->len
) * 8);
321 if (sspi
->word_width
== 2 && t
->len
== 4 &&
322 (!(spi
->mode
& SPI_LSB_FIRST
)))
323 cmd
= ((cmd
& 0xffff) << 16) | (cmd
>> 16);
324 writel(cmd
, sspi
->base
+ SIRFSOC_SPI_CMD
);
325 writel(SIRFSOC_SPI_FRM_END_INT_EN
,
326 sspi
->base
+ SIRFSOC_SPI_INT_EN
);
327 writel(SIRFSOC_SPI_CMD_TX_EN
,
328 sspi
->base
+ SIRFSOC_SPI_TX_RX_EN
);
329 if (wait_for_completion_timeout(&sspi
->tx_done
, timeout
) == 0) {
330 dev_err(&spi
->dev
, "cmd transfer timeout\n");
337 static void spi_sirfsoc_dma_transfer(struct spi_device
*spi
,
338 struct spi_transfer
*t
)
340 struct sirfsoc_spi
*sspi
;
341 struct dma_async_tx_descriptor
*rx_desc
, *tx_desc
;
342 int timeout
= t
->len
* 10;
344 sspi
= spi_master_get_devdata(spi
->master
);
345 writel(SIRFSOC_SPI_FIFO_RESET
, sspi
->base
+ SIRFSOC_SPI_RXFIFO_OP
);
346 writel(SIRFSOC_SPI_FIFO_RESET
, sspi
->base
+ SIRFSOC_SPI_TXFIFO_OP
);
347 writel(SIRFSOC_SPI_FIFO_START
, sspi
->base
+ SIRFSOC_SPI_RXFIFO_OP
);
348 writel(SIRFSOC_SPI_FIFO_START
, sspi
->base
+ SIRFSOC_SPI_TXFIFO_OP
);
349 writel(0, sspi
->base
+ SIRFSOC_SPI_INT_EN
);
350 writel(SIRFSOC_SPI_INT_MASK_ALL
, sspi
->base
+ SIRFSOC_SPI_INT_STATUS
);
351 if (sspi
->left_tx_word
< SIRFSOC_SPI_DAT_FRM_LEN_MAX
) {
352 writel(readl(sspi
->base
+ SIRFSOC_SPI_CTRL
) |
353 SIRFSOC_SPI_ENA_AUTO_CLR
| SIRFSOC_SPI_MUL_DAT_MODE
,
354 sspi
->base
+ SIRFSOC_SPI_CTRL
);
355 writel(sspi
->left_tx_word
- 1,
356 sspi
->base
+ SIRFSOC_SPI_TX_DMA_IO_LEN
);
357 writel(sspi
->left_tx_word
- 1,
358 sspi
->base
+ SIRFSOC_SPI_RX_DMA_IO_LEN
);
360 writel(readl(sspi
->base
+ SIRFSOC_SPI_CTRL
),
361 sspi
->base
+ SIRFSOC_SPI_CTRL
);
362 writel(0, sspi
->base
+ SIRFSOC_SPI_TX_DMA_IO_LEN
);
363 writel(0, sspi
->base
+ SIRFSOC_SPI_RX_DMA_IO_LEN
);
365 sspi
->dst_start
= dma_map_single(&spi
->dev
, sspi
->rx
, t
->len
,
366 (t
->tx_buf
!= t
->rx_buf
) ?
367 DMA_FROM_DEVICE
: DMA_BIDIRECTIONAL
);
368 rx_desc
= dmaengine_prep_slave_single(sspi
->rx_chan
,
369 sspi
->dst_start
, t
->len
, DMA_DEV_TO_MEM
,
370 DMA_PREP_INTERRUPT
| DMA_CTRL_ACK
);
371 rx_desc
->callback
= spi_sirfsoc_dma_fini_callback
;
372 rx_desc
->callback_param
= &sspi
->rx_done
;
374 sspi
->src_start
= dma_map_single(&spi
->dev
, (void *)sspi
->tx
, t
->len
,
375 (t
->tx_buf
!= t
->rx_buf
) ?
376 DMA_TO_DEVICE
: DMA_BIDIRECTIONAL
);
377 tx_desc
= dmaengine_prep_slave_single(sspi
->tx_chan
,
378 sspi
->src_start
, t
->len
, DMA_MEM_TO_DEV
,
379 DMA_PREP_INTERRUPT
| DMA_CTRL_ACK
);
380 tx_desc
->callback
= spi_sirfsoc_dma_fini_callback
;
381 tx_desc
->callback_param
= &sspi
->tx_done
;
383 dmaengine_submit(tx_desc
);
384 dmaengine_submit(rx_desc
);
385 dma_async_issue_pending(sspi
->tx_chan
);
386 dma_async_issue_pending(sspi
->rx_chan
);
387 writel(SIRFSOC_SPI_RX_EN
| SIRFSOC_SPI_TX_EN
,
388 sspi
->base
+ SIRFSOC_SPI_TX_RX_EN
);
389 if (wait_for_completion_timeout(&sspi
->rx_done
, timeout
) == 0) {
390 dev_err(&spi
->dev
, "transfer timeout\n");
391 dmaengine_terminate_all(sspi
->rx_chan
);
393 sspi
->left_rx_word
= 0;
395 * we only wait tx-done event if transferring by DMA. for PIO,
396 * we get rx data by writing tx data, so if rx is done, tx has
399 if (wait_for_completion_timeout(&sspi
->tx_done
, timeout
) == 0) {
400 dev_err(&spi
->dev
, "transfer timeout\n");
401 dmaengine_terminate_all(sspi
->tx_chan
);
403 dma_unmap_single(&spi
->dev
, sspi
->src_start
, t
->len
, DMA_TO_DEVICE
);
404 dma_unmap_single(&spi
->dev
, sspi
->dst_start
, t
->len
, DMA_FROM_DEVICE
);
405 /* TX, RX FIFO stop */
406 writel(0, sspi
->base
+ SIRFSOC_SPI_RXFIFO_OP
);
407 writel(0, sspi
->base
+ SIRFSOC_SPI_TXFIFO_OP
);
408 if (sspi
->left_tx_word
>= SIRFSOC_SPI_DAT_FRM_LEN_MAX
)
409 writel(0, sspi
->base
+ SIRFSOC_SPI_TX_RX_EN
);
412 static void spi_sirfsoc_pio_transfer(struct spi_device
*spi
,
413 struct spi_transfer
*t
)
415 struct sirfsoc_spi
*sspi
;
416 int timeout
= t
->len
* 10;
418 sspi
= spi_master_get_devdata(spi
->master
);
420 writel(SIRFSOC_SPI_FIFO_RESET
,
421 sspi
->base
+ SIRFSOC_SPI_RXFIFO_OP
);
422 writel(SIRFSOC_SPI_FIFO_RESET
,
423 sspi
->base
+ SIRFSOC_SPI_TXFIFO_OP
);
424 writel(SIRFSOC_SPI_FIFO_START
,
425 sspi
->base
+ SIRFSOC_SPI_RXFIFO_OP
);
426 writel(SIRFSOC_SPI_FIFO_START
,
427 sspi
->base
+ SIRFSOC_SPI_TXFIFO_OP
);
428 writel(0, sspi
->base
+ SIRFSOC_SPI_INT_EN
);
429 writel(SIRFSOC_SPI_INT_MASK_ALL
,
430 sspi
->base
+ SIRFSOC_SPI_INT_STATUS
);
431 writel(readl(sspi
->base
+ SIRFSOC_SPI_CTRL
) |
432 SIRFSOC_SPI_MUL_DAT_MODE
| SIRFSOC_SPI_ENA_AUTO_CLR
,
433 sspi
->base
+ SIRFSOC_SPI_CTRL
);
434 writel(min(sspi
->left_tx_word
, (u32
)(256 / sspi
->word_width
))
435 - 1, sspi
->base
+ SIRFSOC_SPI_TX_DMA_IO_LEN
);
436 writel(min(sspi
->left_rx_word
, (u32
)(256 / sspi
->word_width
))
437 - 1, sspi
->base
+ SIRFSOC_SPI_RX_DMA_IO_LEN
);
438 while (!((readl(sspi
->base
+ SIRFSOC_SPI_TXFIFO_STATUS
)
439 & SIRFSOC_SPI_FIFO_FULL
)) && sspi
->left_tx_word
)
441 writel(SIRFSOC_SPI_TXFIFO_EMPTY_INT_EN
|
442 SIRFSOC_SPI_TX_UFLOW_INT_EN
|
443 SIRFSOC_SPI_RX_OFLOW_INT_EN
|
444 SIRFSOC_SPI_RX_IO_DMA_INT_EN
,
445 sspi
->base
+ SIRFSOC_SPI_INT_EN
);
446 writel(SIRFSOC_SPI_RX_EN
| SIRFSOC_SPI_TX_EN
,
447 sspi
->base
+ SIRFSOC_SPI_TX_RX_EN
);
448 if (!wait_for_completion_timeout(&sspi
->tx_done
, timeout
) ||
449 !wait_for_completion_timeout(&sspi
->rx_done
, timeout
)) {
450 dev_err(&spi
->dev
, "transfer timeout\n");
453 while (!((readl(sspi
->base
+ SIRFSOC_SPI_RXFIFO_STATUS
)
454 & SIRFSOC_SPI_FIFO_EMPTY
)) && sspi
->left_rx_word
)
456 writel(0, sspi
->base
+ SIRFSOC_SPI_RXFIFO_OP
);
457 writel(0, sspi
->base
+ SIRFSOC_SPI_TXFIFO_OP
);
458 } while (sspi
->left_tx_word
!= 0 || sspi
->left_rx_word
!= 0);
461 static int spi_sirfsoc_transfer(struct spi_device
*spi
, struct spi_transfer
*t
)
463 struct sirfsoc_spi
*sspi
;
464 sspi
= spi_master_get_devdata(spi
->master
);
466 sspi
->tx
= t
->tx_buf
? t
->tx_buf
: sspi
->dummypage
;
467 sspi
->rx
= t
->rx_buf
? t
->rx_buf
: sspi
->dummypage
;
468 sspi
->left_tx_word
= sspi
->left_rx_word
= t
->len
/ sspi
->word_width
;
469 reinit_completion(&sspi
->rx_done
);
470 reinit_completion(&sspi
->tx_done
);
472 * in the transfer, if transfer data using command register with rx_buf
473 * null, just fill command data into command register and wait for its
477 spi_sirfsoc_cmd_transfer(spi
, t
);
478 else if (IS_DMA_VALID(t
))
479 spi_sirfsoc_dma_transfer(spi
, t
);
481 spi_sirfsoc_pio_transfer(spi
, t
);
483 return t
->len
- sspi
->left_rx_word
* sspi
->word_width
;
486 static void spi_sirfsoc_chipselect(struct spi_device
*spi
, int value
)
488 struct sirfsoc_spi
*sspi
= spi_master_get_devdata(spi
->master
);
490 if (sspi
->chipselect
[spi
->chip_select
] == 0) {
491 u32 regval
= readl(sspi
->base
+ SIRFSOC_SPI_CTRL
);
493 case BITBANG_CS_ACTIVE
:
494 if (spi
->mode
& SPI_CS_HIGH
)
495 regval
|= SIRFSOC_SPI_CS_IO_OUT
;
497 regval
&= ~SIRFSOC_SPI_CS_IO_OUT
;
499 case BITBANG_CS_INACTIVE
:
500 if (spi
->mode
& SPI_CS_HIGH
)
501 regval
&= ~SIRFSOC_SPI_CS_IO_OUT
;
503 regval
|= SIRFSOC_SPI_CS_IO_OUT
;
506 writel(regval
, sspi
->base
+ SIRFSOC_SPI_CTRL
);
508 int gpio
= sspi
->chipselect
[spi
->chip_select
];
510 case BITBANG_CS_ACTIVE
:
511 gpio_direction_output(gpio
,
512 spi
->mode
& SPI_CS_HIGH
? 1 : 0);
514 case BITBANG_CS_INACTIVE
:
515 gpio_direction_output(gpio
,
516 spi
->mode
& SPI_CS_HIGH
? 0 : 1);
523 spi_sirfsoc_setup_transfer(struct spi_device
*spi
, struct spi_transfer
*t
)
525 struct sirfsoc_spi
*sspi
;
526 u8 bits_per_word
= 0;
529 u32 txfifo_ctrl
, rxfifo_ctrl
;
530 u32 fifo_size
= SIRFSOC_SPI_FIFO_SIZE
/ 4;
532 sspi
= spi_master_get_devdata(spi
->master
);
534 bits_per_word
= (t
) ? t
->bits_per_word
: spi
->bits_per_word
;
535 hz
= t
&& t
->speed_hz
? t
->speed_hz
: spi
->max_speed_hz
;
537 regval
= (sspi
->ctrl_freq
/ (2 * hz
)) - 1;
538 if (regval
> 0xFFFF || regval
< 0) {
539 dev_err(&spi
->dev
, "Speed %d not supported\n", hz
);
543 switch (bits_per_word
) {
545 regval
|= SIRFSOC_SPI_TRAN_DAT_FORMAT_8
;
546 sspi
->rx_word
= spi_sirfsoc_rx_word_u8
;
547 sspi
->tx_word
= spi_sirfsoc_tx_word_u8
;
551 regval
|= (bits_per_word
== 12) ?
552 SIRFSOC_SPI_TRAN_DAT_FORMAT_12
:
553 SIRFSOC_SPI_TRAN_DAT_FORMAT_16
;
554 sspi
->rx_word
= spi_sirfsoc_rx_word_u16
;
555 sspi
->tx_word
= spi_sirfsoc_tx_word_u16
;
558 regval
|= SIRFSOC_SPI_TRAN_DAT_FORMAT_32
;
559 sspi
->rx_word
= spi_sirfsoc_rx_word_u32
;
560 sspi
->tx_word
= spi_sirfsoc_tx_word_u32
;
566 sspi
->word_width
= DIV_ROUND_UP(bits_per_word
, 8);
567 txfifo_ctrl
= SIRFSOC_SPI_FIFO_THD(SIRFSOC_SPI_FIFO_SIZE
/ 2) |
569 rxfifo_ctrl
= SIRFSOC_SPI_FIFO_THD(SIRFSOC_SPI_FIFO_SIZE
/ 2) |
572 if (!(spi
->mode
& SPI_CS_HIGH
))
573 regval
|= SIRFSOC_SPI_CS_IDLE_STAT
;
574 if (!(spi
->mode
& SPI_LSB_FIRST
))
575 regval
|= SIRFSOC_SPI_TRAN_MSB
;
576 if (spi
->mode
& SPI_CPOL
)
577 regval
|= SIRFSOC_SPI_CLK_IDLE_STAT
;
580 * Data should be driven at least 1/2 cycle before the fetch edge
581 * to make sure that data gets stable at the fetch edge.
583 if (((spi
->mode
& SPI_CPOL
) && (spi
->mode
& SPI_CPHA
)) ||
584 (!(spi
->mode
& SPI_CPOL
) && !(spi
->mode
& SPI_CPHA
)))
585 regval
&= ~SIRFSOC_SPI_DRV_POS_EDGE
;
587 regval
|= SIRFSOC_SPI_DRV_POS_EDGE
;
589 writel(SIRFSOC_SPI_FIFO_SC(fifo_size
- 2) |
590 SIRFSOC_SPI_FIFO_LC(fifo_size
/ 2) |
591 SIRFSOC_SPI_FIFO_HC(2),
592 sspi
->base
+ SIRFSOC_SPI_TXFIFO_LEVEL_CHK
);
593 writel(SIRFSOC_SPI_FIFO_SC(2) |
594 SIRFSOC_SPI_FIFO_LC(fifo_size
/ 2) |
595 SIRFSOC_SPI_FIFO_HC(fifo_size
- 2),
596 sspi
->base
+ SIRFSOC_SPI_RXFIFO_LEVEL_CHK
);
597 writel(txfifo_ctrl
, sspi
->base
+ SIRFSOC_SPI_TXFIFO_CTRL
);
598 writel(rxfifo_ctrl
, sspi
->base
+ SIRFSOC_SPI_RXFIFO_CTRL
);
600 if (t
&& t
->tx_buf
&& !t
->rx_buf
&& (t
->len
<= SIRFSOC_MAX_CMD_BYTES
)) {
601 regval
|= (SIRFSOC_SPI_CMD_BYTE_NUM((t
->len
- 1)) |
602 SIRFSOC_SPI_CMD_MODE
);
603 sspi
->tx_by_cmd
= true;
605 regval
&= ~SIRFSOC_SPI_CMD_MODE
;
606 sspi
->tx_by_cmd
= false;
609 * set spi controller in RISC chipselect mode, we are controlling CS by
610 * software BITBANG_CS_ACTIVE and BITBANG_CS_INACTIVE.
612 regval
|= SIRFSOC_SPI_CS_IO_MODE
;
613 writel(regval
, sspi
->base
+ SIRFSOC_SPI_CTRL
);
615 if (IS_DMA_VALID(t
)) {
616 /* Enable DMA mode for RX, TX */
617 writel(0, sspi
->base
+ SIRFSOC_SPI_TX_DMA_IO_CTRL
);
618 writel(SIRFSOC_SPI_RX_DMA_FLUSH
,
619 sspi
->base
+ SIRFSOC_SPI_RX_DMA_IO_CTRL
);
621 /* Enable IO mode for RX, TX */
622 writel(SIRFSOC_SPI_IO_MODE_SEL
,
623 sspi
->base
+ SIRFSOC_SPI_TX_DMA_IO_CTRL
);
624 writel(SIRFSOC_SPI_IO_MODE_SEL
,
625 sspi
->base
+ SIRFSOC_SPI_RX_DMA_IO_CTRL
);
631 static int spi_sirfsoc_setup(struct spi_device
*spi
)
633 if (!spi
->max_speed_hz
)
636 return spi_sirfsoc_setup_transfer(spi
, NULL
);
639 static int spi_sirfsoc_probe(struct platform_device
*pdev
)
641 struct sirfsoc_spi
*sspi
;
642 struct spi_master
*master
;
643 struct resource
*mem_res
;
644 int num_cs
, cs_gpio
, irq
;
648 ret
= of_property_read_u32(pdev
->dev
.of_node
,
649 "sirf,spi-num-chipselects", &num_cs
);
651 dev_err(&pdev
->dev
, "Unable to get chip select number\n");
655 master
= spi_alloc_master(&pdev
->dev
,
656 sizeof(*sspi
) + sizeof(int) * num_cs
);
658 dev_err(&pdev
->dev
, "Unable to allocate SPI master\n");
661 platform_set_drvdata(pdev
, master
);
662 sspi
= spi_master_get_devdata(master
);
664 master
->num_chipselect
= num_cs
;
666 for (i
= 0; i
< master
->num_chipselect
; i
++) {
667 cs_gpio
= of_get_named_gpio(pdev
->dev
.of_node
, "cs-gpios", i
);
669 dev_err(&pdev
->dev
, "can't get cs gpio from DT\n");
674 sspi
->chipselect
[i
] = cs_gpio
;
676 continue; /* use cs from spi controller */
678 ret
= gpio_request(cs_gpio
, DRIVER_NAME
);
682 if (sspi
->chipselect
[i
] > 0)
683 gpio_free(sspi
->chipselect
[i
]);
685 dev_err(&pdev
->dev
, "fail to request cs gpios\n");
690 mem_res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
691 sspi
->base
= devm_ioremap_resource(&pdev
->dev
, mem_res
);
692 if (IS_ERR(sspi
->base
)) {
693 ret
= PTR_ERR(sspi
->base
);
697 irq
= platform_get_irq(pdev
, 0);
702 ret
= devm_request_irq(&pdev
->dev
, irq
, spi_sirfsoc_irq
, 0,
707 sspi
->bitbang
.master
= master
;
708 sspi
->bitbang
.chipselect
= spi_sirfsoc_chipselect
;
709 sspi
->bitbang
.setup_transfer
= spi_sirfsoc_setup_transfer
;
710 sspi
->bitbang
.txrx_bufs
= spi_sirfsoc_transfer
;
711 sspi
->bitbang
.master
->setup
= spi_sirfsoc_setup
;
712 master
->bus_num
= pdev
->id
;
713 master
->mode_bits
= SPI_CPOL
| SPI_CPHA
| SPI_LSB_FIRST
| SPI_CS_HIGH
;
714 master
->bits_per_word_mask
= SPI_BPW_MASK(8) | SPI_BPW_MASK(12) |
715 SPI_BPW_MASK(16) | SPI_BPW_MASK(32);
716 sspi
->bitbang
.master
->dev
.of_node
= pdev
->dev
.of_node
;
718 /* request DMA channels */
719 sspi
->rx_chan
= dma_request_slave_channel(&pdev
->dev
, "rx");
720 if (!sspi
->rx_chan
) {
721 dev_err(&pdev
->dev
, "can not allocate rx dma channel\n");
725 sspi
->tx_chan
= dma_request_slave_channel(&pdev
->dev
, "tx");
726 if (!sspi
->tx_chan
) {
727 dev_err(&pdev
->dev
, "can not allocate tx dma channel\n");
732 sspi
->clk
= clk_get(&pdev
->dev
, NULL
);
733 if (IS_ERR(sspi
->clk
)) {
734 ret
= PTR_ERR(sspi
->clk
);
737 clk_prepare_enable(sspi
->clk
);
738 sspi
->ctrl_freq
= clk_get_rate(sspi
->clk
);
740 init_completion(&sspi
->rx_done
);
741 init_completion(&sspi
->tx_done
);
743 writel(SIRFSOC_SPI_FIFO_RESET
, sspi
->base
+ SIRFSOC_SPI_RXFIFO_OP
);
744 writel(SIRFSOC_SPI_FIFO_RESET
, sspi
->base
+ SIRFSOC_SPI_TXFIFO_OP
);
745 writel(SIRFSOC_SPI_FIFO_START
, sspi
->base
+ SIRFSOC_SPI_RXFIFO_OP
);
746 writel(SIRFSOC_SPI_FIFO_START
, sspi
->base
+ SIRFSOC_SPI_TXFIFO_OP
);
747 /* We are not using dummy delay between command and data */
748 writel(0, sspi
->base
+ SIRFSOC_SPI_DUMMY_DELAY_CTL
);
750 sspi
->dummypage
= kmalloc(2 * PAGE_SIZE
, GFP_KERNEL
);
751 if (!sspi
->dummypage
) {
756 ret
= spi_bitbang_start(&sspi
->bitbang
);
760 dev_info(&pdev
->dev
, "registerred, bus number = %d\n", master
->bus_num
);
764 kfree(sspi
->dummypage
);
766 clk_disable_unprepare(sspi
->clk
);
769 dma_release_channel(sspi
->tx_chan
);
771 dma_release_channel(sspi
->rx_chan
);
773 spi_master_put(master
);
778 static int spi_sirfsoc_remove(struct platform_device
*pdev
)
780 struct spi_master
*master
;
781 struct sirfsoc_spi
*sspi
;
784 master
= platform_get_drvdata(pdev
);
785 sspi
= spi_master_get_devdata(master
);
787 spi_bitbang_stop(&sspi
->bitbang
);
788 for (i
= 0; i
< master
->num_chipselect
; i
++) {
789 if (sspi
->chipselect
[i
] > 0)
790 gpio_free(sspi
->chipselect
[i
]);
792 kfree(sspi
->dummypage
);
793 clk_disable_unprepare(sspi
->clk
);
795 dma_release_channel(sspi
->rx_chan
);
796 dma_release_channel(sspi
->tx_chan
);
797 spi_master_put(master
);
801 #ifdef CONFIG_PM_SLEEP
802 static int spi_sirfsoc_suspend(struct device
*dev
)
804 struct spi_master
*master
= dev_get_drvdata(dev
);
805 struct sirfsoc_spi
*sspi
= spi_master_get_devdata(master
);
808 ret
= spi_master_suspend(master
);
812 clk_disable(sspi
->clk
);
816 static int spi_sirfsoc_resume(struct device
*dev
)
818 struct spi_master
*master
= dev_get_drvdata(dev
);
819 struct sirfsoc_spi
*sspi
= spi_master_get_devdata(master
);
821 clk_enable(sspi
->clk
);
822 writel(SIRFSOC_SPI_FIFO_RESET
, sspi
->base
+ SIRFSOC_SPI_RXFIFO_OP
);
823 writel(SIRFSOC_SPI_FIFO_RESET
, sspi
->base
+ SIRFSOC_SPI_TXFIFO_OP
);
824 writel(SIRFSOC_SPI_FIFO_START
, sspi
->base
+ SIRFSOC_SPI_RXFIFO_OP
);
825 writel(SIRFSOC_SPI_FIFO_START
, sspi
->base
+ SIRFSOC_SPI_TXFIFO_OP
);
827 return spi_master_resume(master
);
831 static SIMPLE_DEV_PM_OPS(spi_sirfsoc_pm_ops
, spi_sirfsoc_suspend
,
834 static const struct of_device_id spi_sirfsoc_of_match
[] = {
835 { .compatible
= "sirf,prima2-spi", },
836 { .compatible
= "sirf,marco-spi", },
839 MODULE_DEVICE_TABLE(of
, spi_sirfsoc_of_match
);
841 static struct platform_driver spi_sirfsoc_driver
= {
844 .owner
= THIS_MODULE
,
845 .pm
= &spi_sirfsoc_pm_ops
,
846 .of_match_table
= spi_sirfsoc_of_match
,
848 .probe
= spi_sirfsoc_probe
,
849 .remove
= spi_sirfsoc_remove
,
851 module_platform_driver(spi_sirfsoc_driver
);
852 MODULE_DESCRIPTION("SiRF SoC SPI master driver");
853 MODULE_AUTHOR("Zhiwu Song <Zhiwu.Song@csr.com>");
854 MODULE_AUTHOR("Barry Song <Baohua.Song@csr.com>");
855 MODULE_LICENSE("GPL v2");