i2c-stm32.c 3.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153
  1. /*
  2. * i2c-stm32.c
  3. *
  4. * Copyright (C) M'boumba Cedric Madianga 2017
  5. * Author: M'boumba Cedric Madianga <cedric.madianga@gmail.com>
  6. *
  7. * License terms: GNU General Public License (GPL), version 2
  8. */
  9. #include "i2c-stm32.h"
  10. /* Functions for DMA support */
  11. struct stm32_i2c_dma *stm32_i2c_dma_request(struct device *dev,
  12. dma_addr_t phy_addr,
  13. u32 txdr_offset,
  14. u32 rxdr_offset)
  15. {
  16. struct stm32_i2c_dma *dma;
  17. struct dma_slave_config dma_sconfig;
  18. int ret;
  19. dma = devm_kzalloc(dev, sizeof(*dma), GFP_KERNEL);
  20. if (!dma)
  21. return ERR_PTR(-ENOMEM);
  22. /* Request and configure I2C TX dma channel */
  23. dma->chan_tx = dma_request_chan(dev, "tx");
  24. if (IS_ERR(dma->chan_tx)) {
  25. dev_dbg(dev, "can't request DMA tx channel\n");
  26. ret = PTR_ERR(dma->chan_tx);
  27. goto fail_al;
  28. }
  29. memset(&dma_sconfig, 0, sizeof(dma_sconfig));
  30. dma_sconfig.dst_addr = phy_addr + txdr_offset;
  31. dma_sconfig.dst_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
  32. dma_sconfig.dst_maxburst = 1;
  33. dma_sconfig.direction = DMA_MEM_TO_DEV;
  34. ret = dmaengine_slave_config(dma->chan_tx, &dma_sconfig);
  35. if (ret < 0) {
  36. dev_err(dev, "can't configure tx channel\n");
  37. goto fail_tx;
  38. }
  39. /* Request and configure I2C RX dma channel */
  40. dma->chan_rx = dma_request_chan(dev, "rx");
  41. if (IS_ERR(dma->chan_rx)) {
  42. dev_err(dev, "can't request DMA rx channel\n");
  43. ret = PTR_ERR(dma->chan_rx);
  44. goto fail_tx;
  45. }
  46. memset(&dma_sconfig, 0, sizeof(dma_sconfig));
  47. dma_sconfig.src_addr = phy_addr + rxdr_offset;
  48. dma_sconfig.src_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
  49. dma_sconfig.src_maxburst = 1;
  50. dma_sconfig.direction = DMA_DEV_TO_MEM;
  51. ret = dmaengine_slave_config(dma->chan_rx, &dma_sconfig);
  52. if (ret < 0) {
  53. dev_err(dev, "can't configure rx channel\n");
  54. goto fail_rx;
  55. }
  56. init_completion(&dma->dma_complete);
  57. dev_info(dev, "using %s (tx) and %s (rx) for DMA transfers\n",
  58. dma_chan_name(dma->chan_tx), dma_chan_name(dma->chan_rx));
  59. return dma;
  60. fail_rx:
  61. dma_release_channel(dma->chan_rx);
  62. fail_tx:
  63. dma_release_channel(dma->chan_tx);
  64. fail_al:
  65. devm_kfree(dev, dma);
  66. dev_info(dev, "can't use DMA\n");
  67. return ERR_PTR(ret);
  68. }
  69. void stm32_i2c_dma_free(struct stm32_i2c_dma *dma)
  70. {
  71. dma->dma_buf = 0;
  72. dma->dma_len = 0;
  73. dma_release_channel(dma->chan_tx);
  74. dma->chan_tx = NULL;
  75. dma_release_channel(dma->chan_rx);
  76. dma->chan_rx = NULL;
  77. dma->chan_using = NULL;
  78. }
  79. int stm32_i2c_prep_dma_xfer(struct device *dev, struct stm32_i2c_dma *dma,
  80. bool rd_wr, u32 len, u8 *buf,
  81. dma_async_tx_callback callback,
  82. void *dma_async_param)
  83. {
  84. struct dma_async_tx_descriptor *txdesc;
  85. struct device *chan_dev;
  86. int ret;
  87. if (rd_wr) {
  88. dma->chan_using = dma->chan_rx;
  89. dma->dma_transfer_dir = DMA_DEV_TO_MEM;
  90. dma->dma_data_dir = DMA_FROM_DEVICE;
  91. } else {
  92. dma->chan_using = dma->chan_tx;
  93. dma->dma_transfer_dir = DMA_MEM_TO_DEV;
  94. dma->dma_data_dir = DMA_TO_DEVICE;
  95. }
  96. dma->dma_len = len;
  97. chan_dev = dma->chan_using->device->dev;
  98. dma->dma_buf = dma_map_single(chan_dev, buf, dma->dma_len,
  99. dma->dma_data_dir);
  100. if (dma_mapping_error(chan_dev, dma->dma_buf)) {
  101. dev_err(dev, "DMA mapping failed\n");
  102. return -EINVAL;
  103. }
  104. txdesc = dmaengine_prep_slave_single(dma->chan_using, dma->dma_buf,
  105. dma->dma_len,
  106. dma->dma_transfer_dir,
  107. DMA_PREP_INTERRUPT);
  108. if (!txdesc) {
  109. dev_err(dev, "Not able to get desc for DMA xfer\n");
  110. ret = -EINVAL;
  111. goto err;
  112. }
  113. reinit_completion(&dma->dma_complete);
  114. txdesc->callback = callback;
  115. txdesc->callback_param = dma_async_param;
  116. ret = dma_submit_error(dmaengine_submit(txdesc));
  117. if (ret < 0) {
  118. dev_err(dev, "DMA submit failed\n");
  119. goto err;
  120. }
  121. dma_async_issue_pending(dma->chan_using);
  122. return 0;
  123. err:
  124. dma_unmap_single(chan_dev, dma->dma_buf, dma->dma_len,
  125. dma->dma_data_dir);
  126. return ret;
  127. }