Add copy_to_iter(), copy_from_iter() and iov_iter_zero()
[deliverable/linux.git] / drivers / spi / spi-sirf.c
1 /*
2 * SPI bus driver for CSR SiRFprimaII
3 *
4 * Copyright (c) 2011 Cambridge Silicon Radio Limited, a CSR plc group company.
5 *
6 * Licensed under GPLv2 or later.
7 */
8
9 #include <linux/module.h>
10 #include <linux/kernel.h>
11 #include <linux/slab.h>
12 #include <linux/clk.h>
13 #include <linux/completion.h>
14 #include <linux/interrupt.h>
15 #include <linux/io.h>
16 #include <linux/of.h>
17 #include <linux/bitops.h>
18 #include <linux/err.h>
19 #include <linux/platform_device.h>
20 #include <linux/of_gpio.h>
21 #include <linux/spi/spi.h>
22 #include <linux/spi/spi_bitbang.h>
23 #include <linux/dmaengine.h>
24 #include <linux/dma-direction.h>
25 #include <linux/dma-mapping.h>
26
27 #define DRIVER_NAME "sirfsoc_spi"
28
29 #define SIRFSOC_SPI_CTRL 0x0000
30 #define SIRFSOC_SPI_CMD 0x0004
31 #define SIRFSOC_SPI_TX_RX_EN 0x0008
32 #define SIRFSOC_SPI_INT_EN 0x000C
33 #define SIRFSOC_SPI_INT_STATUS 0x0010
34 #define SIRFSOC_SPI_TX_DMA_IO_CTRL 0x0100
35 #define SIRFSOC_SPI_TX_DMA_IO_LEN 0x0104
36 #define SIRFSOC_SPI_TXFIFO_CTRL 0x0108
37 #define SIRFSOC_SPI_TXFIFO_LEVEL_CHK 0x010C
38 #define SIRFSOC_SPI_TXFIFO_OP 0x0110
39 #define SIRFSOC_SPI_TXFIFO_STATUS 0x0114
40 #define SIRFSOC_SPI_TXFIFO_DATA 0x0118
41 #define SIRFSOC_SPI_RX_DMA_IO_CTRL 0x0120
42 #define SIRFSOC_SPI_RX_DMA_IO_LEN 0x0124
43 #define SIRFSOC_SPI_RXFIFO_CTRL 0x0128
44 #define SIRFSOC_SPI_RXFIFO_LEVEL_CHK 0x012C
45 #define SIRFSOC_SPI_RXFIFO_OP 0x0130
46 #define SIRFSOC_SPI_RXFIFO_STATUS 0x0134
47 #define SIRFSOC_SPI_RXFIFO_DATA 0x0138
48 #define SIRFSOC_SPI_DUMMY_DELAY_CTL 0x0144
49
50 /* SPI CTRL register defines */
51 #define SIRFSOC_SPI_SLV_MODE BIT(16)
52 #define SIRFSOC_SPI_CMD_MODE BIT(17)
53 #define SIRFSOC_SPI_CS_IO_OUT BIT(18)
54 #define SIRFSOC_SPI_CS_IO_MODE BIT(19)
55 #define SIRFSOC_SPI_CLK_IDLE_STAT BIT(20)
56 #define SIRFSOC_SPI_CS_IDLE_STAT BIT(21)
57 #define SIRFSOC_SPI_TRAN_MSB BIT(22)
58 #define SIRFSOC_SPI_DRV_POS_EDGE BIT(23)
59 #define SIRFSOC_SPI_CS_HOLD_TIME BIT(24)
60 #define SIRFSOC_SPI_CLK_SAMPLE_MODE BIT(25)
61 #define SIRFSOC_SPI_TRAN_DAT_FORMAT_8 (0 << 26)
62 #define SIRFSOC_SPI_TRAN_DAT_FORMAT_12 (1 << 26)
63 #define SIRFSOC_SPI_TRAN_DAT_FORMAT_16 (2 << 26)
64 #define SIRFSOC_SPI_TRAN_DAT_FORMAT_32 (3 << 26)
65 #define SIRFSOC_SPI_CMD_BYTE_NUM(x) ((x & 3) << 28)
66 #define SIRFSOC_SPI_ENA_AUTO_CLR BIT(30)
67 #define SIRFSOC_SPI_MUL_DAT_MODE BIT(31)
68
69 /* Interrupt Enable */
70 #define SIRFSOC_SPI_RX_DONE_INT_EN BIT(0)
71 #define SIRFSOC_SPI_TX_DONE_INT_EN BIT(1)
72 #define SIRFSOC_SPI_RX_OFLOW_INT_EN BIT(2)
73 #define SIRFSOC_SPI_TX_UFLOW_INT_EN BIT(3)
74 #define SIRFSOC_SPI_RX_IO_DMA_INT_EN BIT(4)
75 #define SIRFSOC_SPI_TX_IO_DMA_INT_EN BIT(5)
76 #define SIRFSOC_SPI_RXFIFO_FULL_INT_EN BIT(6)
77 #define SIRFSOC_SPI_TXFIFO_EMPTY_INT_EN BIT(7)
78 #define SIRFSOC_SPI_RXFIFO_THD_INT_EN BIT(8)
79 #define SIRFSOC_SPI_TXFIFO_THD_INT_EN BIT(9)
80 #define SIRFSOC_SPI_FRM_END_INT_EN BIT(10)
81
82 #define SIRFSOC_SPI_INT_MASK_ALL 0x1FFF
83
84 /* Interrupt status */
85 #define SIRFSOC_SPI_RX_DONE BIT(0)
86 #define SIRFSOC_SPI_TX_DONE BIT(1)
87 #define SIRFSOC_SPI_RX_OFLOW BIT(2)
88 #define SIRFSOC_SPI_TX_UFLOW BIT(3)
89 #define SIRFSOC_SPI_RX_IO_DMA BIT(4)
90 #define SIRFSOC_SPI_RX_FIFO_FULL BIT(6)
91 #define SIRFSOC_SPI_TXFIFO_EMPTY BIT(7)
92 #define SIRFSOC_SPI_RXFIFO_THD_REACH BIT(8)
93 #define SIRFSOC_SPI_TXFIFO_THD_REACH BIT(9)
94 #define SIRFSOC_SPI_FRM_END BIT(10)
95
96 /* TX RX enable */
97 #define SIRFSOC_SPI_RX_EN BIT(0)
98 #define SIRFSOC_SPI_TX_EN BIT(1)
99 #define SIRFSOC_SPI_CMD_TX_EN BIT(2)
100
101 #define SIRFSOC_SPI_IO_MODE_SEL BIT(0)
102 #define SIRFSOC_SPI_RX_DMA_FLUSH BIT(2)
103
104 /* FIFO OPs */
105 #define SIRFSOC_SPI_FIFO_RESET BIT(0)
106 #define SIRFSOC_SPI_FIFO_START BIT(1)
107
108 /* FIFO CTRL */
109 #define SIRFSOC_SPI_FIFO_WIDTH_BYTE (0 << 0)
110 #define SIRFSOC_SPI_FIFO_WIDTH_WORD (1 << 0)
111 #define SIRFSOC_SPI_FIFO_WIDTH_DWORD (2 << 0)
112
113 /* FIFO Status */
114 #define SIRFSOC_SPI_FIFO_LEVEL_MASK 0xFF
115 #define SIRFSOC_SPI_FIFO_FULL BIT(8)
116 #define SIRFSOC_SPI_FIFO_EMPTY BIT(9)
117
118 /* 256 bytes rx/tx FIFO */
119 #define SIRFSOC_SPI_FIFO_SIZE 256
120 #define SIRFSOC_SPI_DAT_FRM_LEN_MAX (64 * 1024)
121
122 #define SIRFSOC_SPI_FIFO_SC(x) ((x) & 0x3F)
123 #define SIRFSOC_SPI_FIFO_LC(x) (((x) & 0x3F) << 10)
124 #define SIRFSOC_SPI_FIFO_HC(x) (((x) & 0x3F) << 20)
125 #define SIRFSOC_SPI_FIFO_THD(x) (((x) & 0xFF) << 2)
126
127 /*
128 * only if the rx/tx buffer and transfer size are 4-bytes aligned, we use dma
129 * due to the limitation of dma controller
130 */
131
132 #define ALIGNED(x) (!((u32)x & 0x3))
133 #define IS_DMA_VALID(x) (x && ALIGNED(x->tx_buf) && ALIGNED(x->rx_buf) && \
134 ALIGNED(x->len) && (x->len < 2 * PAGE_SIZE))
135
136 #define SIRFSOC_MAX_CMD_BYTES 4
137
138 struct sirfsoc_spi {
139 struct spi_bitbang bitbang;
140 struct completion rx_done;
141 struct completion tx_done;
142
143 void __iomem *base;
144 u32 ctrl_freq; /* SPI controller clock speed */
145 struct clk *clk;
146
147 /* rx & tx bufs from the spi_transfer */
148 const void *tx;
149 void *rx;
150
151 /* place received word into rx buffer */
152 void (*rx_word) (struct sirfsoc_spi *);
153 /* get word from tx buffer for sending */
154 void (*tx_word) (struct sirfsoc_spi *);
155
156 /* number of words left to be tranmitted/received */
157 unsigned int left_tx_word;
158 unsigned int left_rx_word;
159
160 /* rx & tx DMA channels */
161 struct dma_chan *rx_chan;
162 struct dma_chan *tx_chan;
163 dma_addr_t src_start;
164 dma_addr_t dst_start;
165 void *dummypage;
166 int word_width; /* in bytes */
167
168 /*
169 * if tx size is not more than 4 and rx size is NULL, use
170 * command model
171 */
172 bool tx_by_cmd;
173
174 int chipselect[0];
175 };
176
177 static void spi_sirfsoc_rx_word_u8(struct sirfsoc_spi *sspi)
178 {
179 u32 data;
180 u8 *rx = sspi->rx;
181
182 data = readl(sspi->base + SIRFSOC_SPI_RXFIFO_DATA);
183
184 if (rx) {
185 *rx++ = (u8) data;
186 sspi->rx = rx;
187 }
188
189 sspi->left_rx_word--;
190 }
191
192 static void spi_sirfsoc_tx_word_u8(struct sirfsoc_spi *sspi)
193 {
194 u32 data = 0;
195 const u8 *tx = sspi->tx;
196
197 if (tx) {
198 data = *tx++;
199 sspi->tx = tx;
200 }
201
202 writel(data, sspi->base + SIRFSOC_SPI_TXFIFO_DATA);
203 sspi->left_tx_word--;
204 }
205
206 static void spi_sirfsoc_rx_word_u16(struct sirfsoc_spi *sspi)
207 {
208 u32 data;
209 u16 *rx = sspi->rx;
210
211 data = readl(sspi->base + SIRFSOC_SPI_RXFIFO_DATA);
212
213 if (rx) {
214 *rx++ = (u16) data;
215 sspi->rx = rx;
216 }
217
218 sspi->left_rx_word--;
219 }
220
221 static void spi_sirfsoc_tx_word_u16(struct sirfsoc_spi *sspi)
222 {
223 u32 data = 0;
224 const u16 *tx = sspi->tx;
225
226 if (tx) {
227 data = *tx++;
228 sspi->tx = tx;
229 }
230
231 writel(data, sspi->base + SIRFSOC_SPI_TXFIFO_DATA);
232 sspi->left_tx_word--;
233 }
234
235 static void spi_sirfsoc_rx_word_u32(struct sirfsoc_spi *sspi)
236 {
237 u32 data;
238 u32 *rx = sspi->rx;
239
240 data = readl(sspi->base + SIRFSOC_SPI_RXFIFO_DATA);
241
242 if (rx) {
243 *rx++ = (u32) data;
244 sspi->rx = rx;
245 }
246
247 sspi->left_rx_word--;
248
249 }
250
251 static void spi_sirfsoc_tx_word_u32(struct sirfsoc_spi *sspi)
252 {
253 u32 data = 0;
254 const u32 *tx = sspi->tx;
255
256 if (tx) {
257 data = *tx++;
258 sspi->tx = tx;
259 }
260
261 writel(data, sspi->base + SIRFSOC_SPI_TXFIFO_DATA);
262 sspi->left_tx_word--;
263 }
264
265 static irqreturn_t spi_sirfsoc_irq(int irq, void *dev_id)
266 {
267 struct sirfsoc_spi *sspi = dev_id;
268 u32 spi_stat = readl(sspi->base + SIRFSOC_SPI_INT_STATUS);
269 if (sspi->tx_by_cmd && (spi_stat & SIRFSOC_SPI_FRM_END)) {
270 complete(&sspi->tx_done);
271 writel(0x0, sspi->base + SIRFSOC_SPI_INT_EN);
272 writel(SIRFSOC_SPI_INT_MASK_ALL,
273 sspi->base + SIRFSOC_SPI_INT_STATUS);
274 return IRQ_HANDLED;
275 }
276
277 /* Error Conditions */
278 if (spi_stat & SIRFSOC_SPI_RX_OFLOW ||
279 spi_stat & SIRFSOC_SPI_TX_UFLOW) {
280 complete(&sspi->tx_done);
281 complete(&sspi->rx_done);
282 writel(0x0, sspi->base + SIRFSOC_SPI_INT_EN);
283 writel(SIRFSOC_SPI_INT_MASK_ALL,
284 sspi->base + SIRFSOC_SPI_INT_STATUS);
285 return IRQ_HANDLED;
286 }
287 if (spi_stat & SIRFSOC_SPI_TXFIFO_EMPTY)
288 complete(&sspi->tx_done);
289 while (!(readl(sspi->base + SIRFSOC_SPI_INT_STATUS) &
290 SIRFSOC_SPI_RX_IO_DMA))
291 cpu_relax();
292 complete(&sspi->rx_done);
293 writel(0x0, sspi->base + SIRFSOC_SPI_INT_EN);
294 writel(SIRFSOC_SPI_INT_MASK_ALL,
295 sspi->base + SIRFSOC_SPI_INT_STATUS);
296
297 return IRQ_HANDLED;
298 }
299
300 static void spi_sirfsoc_dma_fini_callback(void *data)
301 {
302 struct completion *dma_complete = data;
303
304 complete(dma_complete);
305 }
306
307 static int spi_sirfsoc_cmd_transfer(struct spi_device *spi,
308 struct spi_transfer *t)
309 {
310 struct sirfsoc_spi *sspi;
311 int timeout = t->len * 10;
312 u32 cmd;
313
314 sspi = spi_master_get_devdata(spi->master);
315 writel(SIRFSOC_SPI_FIFO_RESET, sspi->base + SIRFSOC_SPI_TXFIFO_OP);
316 writel(SIRFSOC_SPI_FIFO_START, sspi->base + SIRFSOC_SPI_TXFIFO_OP);
317 memcpy(&cmd, sspi->tx, t->len);
318 if (sspi->word_width == 1 && !(spi->mode & SPI_LSB_FIRST))
319 cmd = cpu_to_be32(cmd) >>
320 ((SIRFSOC_MAX_CMD_BYTES - t->len) * 8);
321 if (sspi->word_width == 2 && t->len == 4 &&
322 (!(spi->mode & SPI_LSB_FIRST)))
323 cmd = ((cmd & 0xffff) << 16) | (cmd >> 16);
324 writel(cmd, sspi->base + SIRFSOC_SPI_CMD);
325 writel(SIRFSOC_SPI_FRM_END_INT_EN,
326 sspi->base + SIRFSOC_SPI_INT_EN);
327 writel(SIRFSOC_SPI_CMD_TX_EN,
328 sspi->base + SIRFSOC_SPI_TX_RX_EN);
329 if (wait_for_completion_timeout(&sspi->tx_done, timeout) == 0) {
330 dev_err(&spi->dev, "cmd transfer timeout\n");
331 return 0;
332 }
333
334 return t->len;
335 }
336
337 static void spi_sirfsoc_dma_transfer(struct spi_device *spi,
338 struct spi_transfer *t)
339 {
340 struct sirfsoc_spi *sspi;
341 struct dma_async_tx_descriptor *rx_desc, *tx_desc;
342 int timeout = t->len * 10;
343
344 sspi = spi_master_get_devdata(spi->master);
345 writel(SIRFSOC_SPI_FIFO_RESET, sspi->base + SIRFSOC_SPI_RXFIFO_OP);
346 writel(SIRFSOC_SPI_FIFO_RESET, sspi->base + SIRFSOC_SPI_TXFIFO_OP);
347 writel(SIRFSOC_SPI_FIFO_START, sspi->base + SIRFSOC_SPI_RXFIFO_OP);
348 writel(SIRFSOC_SPI_FIFO_START, sspi->base + SIRFSOC_SPI_TXFIFO_OP);
349 writel(0, sspi->base + SIRFSOC_SPI_INT_EN);
350 writel(SIRFSOC_SPI_INT_MASK_ALL, sspi->base + SIRFSOC_SPI_INT_STATUS);
351 if (sspi->left_tx_word < SIRFSOC_SPI_DAT_FRM_LEN_MAX) {
352 writel(readl(sspi->base + SIRFSOC_SPI_CTRL) |
353 SIRFSOC_SPI_ENA_AUTO_CLR | SIRFSOC_SPI_MUL_DAT_MODE,
354 sspi->base + SIRFSOC_SPI_CTRL);
355 writel(sspi->left_tx_word - 1,
356 sspi->base + SIRFSOC_SPI_TX_DMA_IO_LEN);
357 writel(sspi->left_tx_word - 1,
358 sspi->base + SIRFSOC_SPI_RX_DMA_IO_LEN);
359 } else {
360 writel(readl(sspi->base + SIRFSOC_SPI_CTRL),
361 sspi->base + SIRFSOC_SPI_CTRL);
362 writel(0, sspi->base + SIRFSOC_SPI_TX_DMA_IO_LEN);
363 writel(0, sspi->base + SIRFSOC_SPI_RX_DMA_IO_LEN);
364 }
365 sspi->dst_start = dma_map_single(&spi->dev, sspi->rx, t->len,
366 (t->tx_buf != t->rx_buf) ?
367 DMA_FROM_DEVICE : DMA_BIDIRECTIONAL);
368 rx_desc = dmaengine_prep_slave_single(sspi->rx_chan,
369 sspi->dst_start, t->len, DMA_DEV_TO_MEM,
370 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
371 rx_desc->callback = spi_sirfsoc_dma_fini_callback;
372 rx_desc->callback_param = &sspi->rx_done;
373
374 sspi->src_start = dma_map_single(&spi->dev, (void *)sspi->tx, t->len,
375 (t->tx_buf != t->rx_buf) ?
376 DMA_TO_DEVICE : DMA_BIDIRECTIONAL);
377 tx_desc = dmaengine_prep_slave_single(sspi->tx_chan,
378 sspi->src_start, t->len, DMA_MEM_TO_DEV,
379 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
380 tx_desc->callback = spi_sirfsoc_dma_fini_callback;
381 tx_desc->callback_param = &sspi->tx_done;
382
383 dmaengine_submit(tx_desc);
384 dmaengine_submit(rx_desc);
385 dma_async_issue_pending(sspi->tx_chan);
386 dma_async_issue_pending(sspi->rx_chan);
387 writel(SIRFSOC_SPI_RX_EN | SIRFSOC_SPI_TX_EN,
388 sspi->base + SIRFSOC_SPI_TX_RX_EN);
389 if (wait_for_completion_timeout(&sspi->rx_done, timeout) == 0) {
390 dev_err(&spi->dev, "transfer timeout\n");
391 dmaengine_terminate_all(sspi->rx_chan);
392 } else
393 sspi->left_rx_word = 0;
394 /*
395 * we only wait tx-done event if transferring by DMA. for PIO,
396 * we get rx data by writing tx data, so if rx is done, tx has
397 * done earlier
398 */
399 if (wait_for_completion_timeout(&sspi->tx_done, timeout) == 0) {
400 dev_err(&spi->dev, "transfer timeout\n");
401 dmaengine_terminate_all(sspi->tx_chan);
402 }
403 dma_unmap_single(&spi->dev, sspi->src_start, t->len, DMA_TO_DEVICE);
404 dma_unmap_single(&spi->dev, sspi->dst_start, t->len, DMA_FROM_DEVICE);
405 /* TX, RX FIFO stop */
406 writel(0, sspi->base + SIRFSOC_SPI_RXFIFO_OP);
407 writel(0, sspi->base + SIRFSOC_SPI_TXFIFO_OP);
408 if (sspi->left_tx_word >= SIRFSOC_SPI_DAT_FRM_LEN_MAX)
409 writel(0, sspi->base + SIRFSOC_SPI_TX_RX_EN);
410 }
411
412 static void spi_sirfsoc_pio_transfer(struct spi_device *spi,
413 struct spi_transfer *t)
414 {
415 struct sirfsoc_spi *sspi;
416 int timeout = t->len * 10;
417
418 sspi = spi_master_get_devdata(spi->master);
419 do {
420 writel(SIRFSOC_SPI_FIFO_RESET,
421 sspi->base + SIRFSOC_SPI_RXFIFO_OP);
422 writel(SIRFSOC_SPI_FIFO_RESET,
423 sspi->base + SIRFSOC_SPI_TXFIFO_OP);
424 writel(SIRFSOC_SPI_FIFO_START,
425 sspi->base + SIRFSOC_SPI_RXFIFO_OP);
426 writel(SIRFSOC_SPI_FIFO_START,
427 sspi->base + SIRFSOC_SPI_TXFIFO_OP);
428 writel(0, sspi->base + SIRFSOC_SPI_INT_EN);
429 writel(SIRFSOC_SPI_INT_MASK_ALL,
430 sspi->base + SIRFSOC_SPI_INT_STATUS);
431 writel(readl(sspi->base + SIRFSOC_SPI_CTRL) |
432 SIRFSOC_SPI_MUL_DAT_MODE | SIRFSOC_SPI_ENA_AUTO_CLR,
433 sspi->base + SIRFSOC_SPI_CTRL);
434 writel(min(sspi->left_tx_word, (u32)(256 / sspi->word_width))
435 - 1, sspi->base + SIRFSOC_SPI_TX_DMA_IO_LEN);
436 writel(min(sspi->left_rx_word, (u32)(256 / sspi->word_width))
437 - 1, sspi->base + SIRFSOC_SPI_RX_DMA_IO_LEN);
438 while (!((readl(sspi->base + SIRFSOC_SPI_TXFIFO_STATUS)
439 & SIRFSOC_SPI_FIFO_FULL)) && sspi->left_tx_word)
440 sspi->tx_word(sspi);
441 writel(SIRFSOC_SPI_TXFIFO_EMPTY_INT_EN |
442 SIRFSOC_SPI_TX_UFLOW_INT_EN |
443 SIRFSOC_SPI_RX_OFLOW_INT_EN |
444 SIRFSOC_SPI_RX_IO_DMA_INT_EN,
445 sspi->base + SIRFSOC_SPI_INT_EN);
446 writel(SIRFSOC_SPI_RX_EN | SIRFSOC_SPI_TX_EN,
447 sspi->base + SIRFSOC_SPI_TX_RX_EN);
448 if (!wait_for_completion_timeout(&sspi->tx_done, timeout) ||
449 !wait_for_completion_timeout(&sspi->rx_done, timeout)) {
450 dev_err(&spi->dev, "transfer timeout\n");
451 break;
452 }
453 while (!((readl(sspi->base + SIRFSOC_SPI_RXFIFO_STATUS)
454 & SIRFSOC_SPI_FIFO_EMPTY)) && sspi->left_rx_word)
455 sspi->rx_word(sspi);
456 writel(0, sspi->base + SIRFSOC_SPI_RXFIFO_OP);
457 writel(0, sspi->base + SIRFSOC_SPI_TXFIFO_OP);
458 } while (sspi->left_tx_word != 0 || sspi->left_rx_word != 0);
459 }
460
461 static int spi_sirfsoc_transfer(struct spi_device *spi, struct spi_transfer *t)
462 {
463 struct sirfsoc_spi *sspi;
464 sspi = spi_master_get_devdata(spi->master);
465
466 sspi->tx = t->tx_buf ? t->tx_buf : sspi->dummypage;
467 sspi->rx = t->rx_buf ? t->rx_buf : sspi->dummypage;
468 sspi->left_tx_word = sspi->left_rx_word = t->len / sspi->word_width;
469 reinit_completion(&sspi->rx_done);
470 reinit_completion(&sspi->tx_done);
471 /*
472 * in the transfer, if transfer data using command register with rx_buf
473 * null, just fill command data into command register and wait for its
474 * completion.
475 */
476 if (sspi->tx_by_cmd)
477 spi_sirfsoc_cmd_transfer(spi, t);
478 else if (IS_DMA_VALID(t))
479 spi_sirfsoc_dma_transfer(spi, t);
480 else
481 spi_sirfsoc_pio_transfer(spi, t);
482
483 return t->len - sspi->left_rx_word * sspi->word_width;
484 }
485
486 static void spi_sirfsoc_chipselect(struct spi_device *spi, int value)
487 {
488 struct sirfsoc_spi *sspi = spi_master_get_devdata(spi->master);
489
490 if (sspi->chipselect[spi->chip_select] == 0) {
491 u32 regval = readl(sspi->base + SIRFSOC_SPI_CTRL);
492 switch (value) {
493 case BITBANG_CS_ACTIVE:
494 if (spi->mode & SPI_CS_HIGH)
495 regval |= SIRFSOC_SPI_CS_IO_OUT;
496 else
497 regval &= ~SIRFSOC_SPI_CS_IO_OUT;
498 break;
499 case BITBANG_CS_INACTIVE:
500 if (spi->mode & SPI_CS_HIGH)
501 regval &= ~SIRFSOC_SPI_CS_IO_OUT;
502 else
503 regval |= SIRFSOC_SPI_CS_IO_OUT;
504 break;
505 }
506 writel(regval, sspi->base + SIRFSOC_SPI_CTRL);
507 } else {
508 int gpio = sspi->chipselect[spi->chip_select];
509 switch (value) {
510 case BITBANG_CS_ACTIVE:
511 gpio_direction_output(gpio,
512 spi->mode & SPI_CS_HIGH ? 1 : 0);
513 break;
514 case BITBANG_CS_INACTIVE:
515 gpio_direction_output(gpio,
516 spi->mode & SPI_CS_HIGH ? 0 : 1);
517 break;
518 }
519 }
520 }
521
522 static int
523 spi_sirfsoc_setup_transfer(struct spi_device *spi, struct spi_transfer *t)
524 {
525 struct sirfsoc_spi *sspi;
526 u8 bits_per_word = 0;
527 int hz = 0;
528 u32 regval;
529 u32 txfifo_ctrl, rxfifo_ctrl;
530 u32 fifo_size = SIRFSOC_SPI_FIFO_SIZE / 4;
531
532 sspi = spi_master_get_devdata(spi->master);
533
534 bits_per_word = (t) ? t->bits_per_word : spi->bits_per_word;
535 hz = t && t->speed_hz ? t->speed_hz : spi->max_speed_hz;
536
537 regval = (sspi->ctrl_freq / (2 * hz)) - 1;
538 if (regval > 0xFFFF || regval < 0) {
539 dev_err(&spi->dev, "Speed %d not supported\n", hz);
540 return -EINVAL;
541 }
542
543 switch (bits_per_word) {
544 case 8:
545 regval |= SIRFSOC_SPI_TRAN_DAT_FORMAT_8;
546 sspi->rx_word = spi_sirfsoc_rx_word_u8;
547 sspi->tx_word = spi_sirfsoc_tx_word_u8;
548 break;
549 case 12:
550 case 16:
551 regval |= (bits_per_word == 12) ?
552 SIRFSOC_SPI_TRAN_DAT_FORMAT_12 :
553 SIRFSOC_SPI_TRAN_DAT_FORMAT_16;
554 sspi->rx_word = spi_sirfsoc_rx_word_u16;
555 sspi->tx_word = spi_sirfsoc_tx_word_u16;
556 break;
557 case 32:
558 regval |= SIRFSOC_SPI_TRAN_DAT_FORMAT_32;
559 sspi->rx_word = spi_sirfsoc_rx_word_u32;
560 sspi->tx_word = spi_sirfsoc_tx_word_u32;
561 break;
562 default:
563 BUG();
564 }
565
566 sspi->word_width = DIV_ROUND_UP(bits_per_word, 8);
567 txfifo_ctrl = SIRFSOC_SPI_FIFO_THD(SIRFSOC_SPI_FIFO_SIZE / 2) |
568 sspi->word_width;
569 rxfifo_ctrl = SIRFSOC_SPI_FIFO_THD(SIRFSOC_SPI_FIFO_SIZE / 2) |
570 sspi->word_width;
571
572 if (!(spi->mode & SPI_CS_HIGH))
573 regval |= SIRFSOC_SPI_CS_IDLE_STAT;
574 if (!(spi->mode & SPI_LSB_FIRST))
575 regval |= SIRFSOC_SPI_TRAN_MSB;
576 if (spi->mode & SPI_CPOL)
577 regval |= SIRFSOC_SPI_CLK_IDLE_STAT;
578
579 /*
580 * Data should be driven at least 1/2 cycle before the fetch edge
581 * to make sure that data gets stable at the fetch edge.
582 */
583 if (((spi->mode & SPI_CPOL) && (spi->mode & SPI_CPHA)) ||
584 (!(spi->mode & SPI_CPOL) && !(spi->mode & SPI_CPHA)))
585 regval &= ~SIRFSOC_SPI_DRV_POS_EDGE;
586 else
587 regval |= SIRFSOC_SPI_DRV_POS_EDGE;
588
589 writel(SIRFSOC_SPI_FIFO_SC(fifo_size - 2) |
590 SIRFSOC_SPI_FIFO_LC(fifo_size / 2) |
591 SIRFSOC_SPI_FIFO_HC(2),
592 sspi->base + SIRFSOC_SPI_TXFIFO_LEVEL_CHK);
593 writel(SIRFSOC_SPI_FIFO_SC(2) |
594 SIRFSOC_SPI_FIFO_LC(fifo_size / 2) |
595 SIRFSOC_SPI_FIFO_HC(fifo_size - 2),
596 sspi->base + SIRFSOC_SPI_RXFIFO_LEVEL_CHK);
597 writel(txfifo_ctrl, sspi->base + SIRFSOC_SPI_TXFIFO_CTRL);
598 writel(rxfifo_ctrl, sspi->base + SIRFSOC_SPI_RXFIFO_CTRL);
599
600 if (t && t->tx_buf && !t->rx_buf && (t->len <= SIRFSOC_MAX_CMD_BYTES)) {
601 regval |= (SIRFSOC_SPI_CMD_BYTE_NUM((t->len - 1)) |
602 SIRFSOC_SPI_CMD_MODE);
603 sspi->tx_by_cmd = true;
604 } else {
605 regval &= ~SIRFSOC_SPI_CMD_MODE;
606 sspi->tx_by_cmd = false;
607 }
608 /*
609 * set spi controller in RISC chipselect mode, we are controlling CS by
610 * software BITBANG_CS_ACTIVE and BITBANG_CS_INACTIVE.
611 */
612 regval |= SIRFSOC_SPI_CS_IO_MODE;
613 writel(regval, sspi->base + SIRFSOC_SPI_CTRL);
614
615 if (IS_DMA_VALID(t)) {
616 /* Enable DMA mode for RX, TX */
617 writel(0, sspi->base + SIRFSOC_SPI_TX_DMA_IO_CTRL);
618 writel(SIRFSOC_SPI_RX_DMA_FLUSH,
619 sspi->base + SIRFSOC_SPI_RX_DMA_IO_CTRL);
620 } else {
621 /* Enable IO mode for RX, TX */
622 writel(SIRFSOC_SPI_IO_MODE_SEL,
623 sspi->base + SIRFSOC_SPI_TX_DMA_IO_CTRL);
624 writel(SIRFSOC_SPI_IO_MODE_SEL,
625 sspi->base + SIRFSOC_SPI_RX_DMA_IO_CTRL);
626 }
627
628 return 0;
629 }
630
631 static int spi_sirfsoc_setup(struct spi_device *spi)
632 {
633 if (!spi->max_speed_hz)
634 return -EINVAL;
635
636 return spi_sirfsoc_setup_transfer(spi, NULL);
637 }
638
639 static int spi_sirfsoc_probe(struct platform_device *pdev)
640 {
641 struct sirfsoc_spi *sspi;
642 struct spi_master *master;
643 struct resource *mem_res;
644 int num_cs, cs_gpio, irq;
645 int i;
646 int ret;
647
648 ret = of_property_read_u32(pdev->dev.of_node,
649 "sirf,spi-num-chipselects", &num_cs);
650 if (ret < 0) {
651 dev_err(&pdev->dev, "Unable to get chip select number\n");
652 goto err_cs;
653 }
654
655 master = spi_alloc_master(&pdev->dev,
656 sizeof(*sspi) + sizeof(int) * num_cs);
657 if (!master) {
658 dev_err(&pdev->dev, "Unable to allocate SPI master\n");
659 return -ENOMEM;
660 }
661 platform_set_drvdata(pdev, master);
662 sspi = spi_master_get_devdata(master);
663
664 master->num_chipselect = num_cs;
665
666 for (i = 0; i < master->num_chipselect; i++) {
667 cs_gpio = of_get_named_gpio(pdev->dev.of_node, "cs-gpios", i);
668 if (cs_gpio < 0) {
669 dev_err(&pdev->dev, "can't get cs gpio from DT\n");
670 ret = -ENODEV;
671 goto free_master;
672 }
673
674 sspi->chipselect[i] = cs_gpio;
675 if (cs_gpio == 0)
676 continue; /* use cs from spi controller */
677
678 ret = gpio_request(cs_gpio, DRIVER_NAME);
679 if (ret) {
680 while (i > 0) {
681 i--;
682 if (sspi->chipselect[i] > 0)
683 gpio_free(sspi->chipselect[i]);
684 }
685 dev_err(&pdev->dev, "fail to request cs gpios\n");
686 goto free_master;
687 }
688 }
689
690 mem_res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
691 sspi->base = devm_ioremap_resource(&pdev->dev, mem_res);
692 if (IS_ERR(sspi->base)) {
693 ret = PTR_ERR(sspi->base);
694 goto free_master;
695 }
696
697 irq = platform_get_irq(pdev, 0);
698 if (irq < 0) {
699 ret = -ENXIO;
700 goto free_master;
701 }
702 ret = devm_request_irq(&pdev->dev, irq, spi_sirfsoc_irq, 0,
703 DRIVER_NAME, sspi);
704 if (ret)
705 goto free_master;
706
707 sspi->bitbang.master = master;
708 sspi->bitbang.chipselect = spi_sirfsoc_chipselect;
709 sspi->bitbang.setup_transfer = spi_sirfsoc_setup_transfer;
710 sspi->bitbang.txrx_bufs = spi_sirfsoc_transfer;
711 sspi->bitbang.master->setup = spi_sirfsoc_setup;
712 master->bus_num = pdev->id;
713 master->mode_bits = SPI_CPOL | SPI_CPHA | SPI_LSB_FIRST | SPI_CS_HIGH;
714 master->bits_per_word_mask = SPI_BPW_MASK(8) | SPI_BPW_MASK(12) |
715 SPI_BPW_MASK(16) | SPI_BPW_MASK(32);
716 sspi->bitbang.master->dev.of_node = pdev->dev.of_node;
717
718 /* request DMA channels */
719 sspi->rx_chan = dma_request_slave_channel(&pdev->dev, "rx");
720 if (!sspi->rx_chan) {
721 dev_err(&pdev->dev, "can not allocate rx dma channel\n");
722 ret = -ENODEV;
723 goto free_master;
724 }
725 sspi->tx_chan = dma_request_slave_channel(&pdev->dev, "tx");
726 if (!sspi->tx_chan) {
727 dev_err(&pdev->dev, "can not allocate tx dma channel\n");
728 ret = -ENODEV;
729 goto free_rx_dma;
730 }
731
732 sspi->clk = clk_get(&pdev->dev, NULL);
733 if (IS_ERR(sspi->clk)) {
734 ret = PTR_ERR(sspi->clk);
735 goto free_tx_dma;
736 }
737 clk_prepare_enable(sspi->clk);
738 sspi->ctrl_freq = clk_get_rate(sspi->clk);
739
740 init_completion(&sspi->rx_done);
741 init_completion(&sspi->tx_done);
742
743 writel(SIRFSOC_SPI_FIFO_RESET, sspi->base + SIRFSOC_SPI_RXFIFO_OP);
744 writel(SIRFSOC_SPI_FIFO_RESET, sspi->base + SIRFSOC_SPI_TXFIFO_OP);
745 writel(SIRFSOC_SPI_FIFO_START, sspi->base + SIRFSOC_SPI_RXFIFO_OP);
746 writel(SIRFSOC_SPI_FIFO_START, sspi->base + SIRFSOC_SPI_TXFIFO_OP);
747 /* We are not using dummy delay between command and data */
748 writel(0, sspi->base + SIRFSOC_SPI_DUMMY_DELAY_CTL);
749
750 sspi->dummypage = kmalloc(2 * PAGE_SIZE, GFP_KERNEL);
751 if (!sspi->dummypage) {
752 ret = -ENOMEM;
753 goto free_clk;
754 }
755
756 ret = spi_bitbang_start(&sspi->bitbang);
757 if (ret)
758 goto free_dummypage;
759
760 dev_info(&pdev->dev, "registerred, bus number = %d\n", master->bus_num);
761
762 return 0;
763 free_dummypage:
764 kfree(sspi->dummypage);
765 free_clk:
766 clk_disable_unprepare(sspi->clk);
767 clk_put(sspi->clk);
768 free_tx_dma:
769 dma_release_channel(sspi->tx_chan);
770 free_rx_dma:
771 dma_release_channel(sspi->rx_chan);
772 free_master:
773 spi_master_put(master);
774 err_cs:
775 return ret;
776 }
777
778 static int spi_sirfsoc_remove(struct platform_device *pdev)
779 {
780 struct spi_master *master;
781 struct sirfsoc_spi *sspi;
782 int i;
783
784 master = platform_get_drvdata(pdev);
785 sspi = spi_master_get_devdata(master);
786
787 spi_bitbang_stop(&sspi->bitbang);
788 for (i = 0; i < master->num_chipselect; i++) {
789 if (sspi->chipselect[i] > 0)
790 gpio_free(sspi->chipselect[i]);
791 }
792 kfree(sspi->dummypage);
793 clk_disable_unprepare(sspi->clk);
794 clk_put(sspi->clk);
795 dma_release_channel(sspi->rx_chan);
796 dma_release_channel(sspi->tx_chan);
797 spi_master_put(master);
798 return 0;
799 }
800
801 #ifdef CONFIG_PM_SLEEP
802 static int spi_sirfsoc_suspend(struct device *dev)
803 {
804 struct spi_master *master = dev_get_drvdata(dev);
805 struct sirfsoc_spi *sspi = spi_master_get_devdata(master);
806 int ret;
807
808 ret = spi_master_suspend(master);
809 if (ret)
810 return ret;
811
812 clk_disable(sspi->clk);
813 return 0;
814 }
815
816 static int spi_sirfsoc_resume(struct device *dev)
817 {
818 struct spi_master *master = dev_get_drvdata(dev);
819 struct sirfsoc_spi *sspi = spi_master_get_devdata(master);
820
821 clk_enable(sspi->clk);
822 writel(SIRFSOC_SPI_FIFO_RESET, sspi->base + SIRFSOC_SPI_RXFIFO_OP);
823 writel(SIRFSOC_SPI_FIFO_RESET, sspi->base + SIRFSOC_SPI_TXFIFO_OP);
824 writel(SIRFSOC_SPI_FIFO_START, sspi->base + SIRFSOC_SPI_RXFIFO_OP);
825 writel(SIRFSOC_SPI_FIFO_START, sspi->base + SIRFSOC_SPI_TXFIFO_OP);
826
827 return spi_master_resume(master);
828 }
829 #endif
830
831 static SIMPLE_DEV_PM_OPS(spi_sirfsoc_pm_ops, spi_sirfsoc_suspend,
832 spi_sirfsoc_resume);
833
834 static const struct of_device_id spi_sirfsoc_of_match[] = {
835 { .compatible = "sirf,prima2-spi", },
836 { .compatible = "sirf,marco-spi", },
837 {}
838 };
839 MODULE_DEVICE_TABLE(of, spi_sirfsoc_of_match);
840
841 static struct platform_driver spi_sirfsoc_driver = {
842 .driver = {
843 .name = DRIVER_NAME,
844 .owner = THIS_MODULE,
845 .pm = &spi_sirfsoc_pm_ops,
846 .of_match_table = spi_sirfsoc_of_match,
847 },
848 .probe = spi_sirfsoc_probe,
849 .remove = spi_sirfsoc_remove,
850 };
851 module_platform_driver(spi_sirfsoc_driver);
852 MODULE_DESCRIPTION("SiRF SoC SPI master driver");
853 MODULE_AUTHOR("Zhiwu Song <Zhiwu.Song@csr.com>");
854 MODULE_AUTHOR("Barry Song <Baohua.Song@csr.com>");
855 MODULE_LICENSE("GPL v2");
This page took 0.051493 seconds and 5 git commands to generate.