dmaengine.h 4.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174
  1. /*
  2. * The contents of this file are private to DMA engine drivers, and is not
  3. * part of the API to be used by DMA engine users.
  4. */
  5. #ifndef DMAENGINE_H
  6. #define DMAENGINE_H
  7. #include <linux/bug.h>
  8. #include <linux/dmaengine.h>
  9. /**
  10. * dma_cookie_init - initialize the cookies for a DMA channel
  11. * @chan: dma channel to initialize
  12. */
  13. static inline void dma_cookie_init(struct dma_chan *chan)
  14. {
  15. chan->cookie = DMA_MIN_COOKIE;
  16. chan->completed_cookie = DMA_MIN_COOKIE;
  17. }
  18. /**
  19. * dma_cookie_assign - assign a DMA engine cookie to the descriptor
  20. * @tx: descriptor needing cookie
  21. *
  22. * Assign a unique non-zero per-channel cookie to the descriptor.
  23. * Note: caller is expected to hold a lock to prevent concurrency.
  24. */
  25. static inline dma_cookie_t dma_cookie_assign(struct dma_async_tx_descriptor *tx)
  26. {
  27. struct dma_chan *chan = tx->chan;
  28. dma_cookie_t cookie;
  29. cookie = chan->cookie + 1;
  30. if (cookie < DMA_MIN_COOKIE)
  31. cookie = DMA_MIN_COOKIE;
  32. tx->cookie = chan->cookie = cookie;
  33. return cookie;
  34. }
  35. /**
  36. * dma_cookie_complete - complete a descriptor
  37. * @tx: descriptor to complete
  38. *
  39. * Mark this descriptor complete by updating the channels completed
  40. * cookie marker. Zero the descriptors cookie to prevent accidental
  41. * repeated completions.
  42. *
  43. * Note: caller is expected to hold a lock to prevent concurrency.
  44. */
  45. static inline void dma_cookie_complete(struct dma_async_tx_descriptor *tx)
  46. {
  47. BUG_ON(tx->cookie < DMA_MIN_COOKIE);
  48. tx->chan->completed_cookie = tx->cookie;
  49. tx->cookie = 0;
  50. }
  51. /**
  52. * dma_cookie_status - report cookie status
  53. * @chan: dma channel
  54. * @cookie: cookie we are interested in
  55. * @state: dma_tx_state structure to return last/used cookies
  56. *
  57. * Report the status of the cookie, filling in the state structure if
  58. * non-NULL. No locking is required.
  59. */
  60. static inline enum dma_status dma_cookie_status(struct dma_chan *chan,
  61. dma_cookie_t cookie, struct dma_tx_state *state)
  62. {
  63. dma_cookie_t used, complete;
  64. used = chan->cookie;
  65. complete = chan->completed_cookie;
  66. barrier();
  67. if (state) {
  68. state->last = complete;
  69. state->used = used;
  70. state->residue = 0;
  71. }
  72. return dma_async_is_complete(cookie, complete, used);
  73. }
  74. static inline void dma_set_residue(struct dma_tx_state *state, u32 residue)
  75. {
  76. if (state)
  77. state->residue = residue;
  78. }
  79. struct dmaengine_desc_callback {
  80. dma_async_tx_callback callback;
  81. dma_async_tx_callback_result callback_result;
  82. void *callback_param;
  83. };
  84. /**
  85. * dmaengine_desc_get_callback - get the passed in callback function
  86. * @tx: tx descriptor
  87. * @cb: temp struct to hold the callback info
  88. *
  89. * Fill the passed in cb struct with what's available in the passed in
  90. * tx descriptor struct
  91. * No locking is required.
  92. */
  93. static inline void
  94. dmaengine_desc_get_callback(struct dma_async_tx_descriptor *tx,
  95. struct dmaengine_desc_callback *cb)
  96. {
  97. cb->callback = tx->callback;
  98. cb->callback_result = tx->callback_result;
  99. cb->callback_param = tx->callback_param;
  100. }
  101. /**
  102. * dmaengine_desc_callback_invoke - call the callback function in cb struct
  103. * @cb: temp struct that is holding the callback info
  104. * @result: transaction result
  105. *
  106. * Call the callback function provided in the cb struct with the parameter
  107. * in the cb struct.
  108. * Locking is dependent on the driver.
  109. */
  110. static inline void
  111. dmaengine_desc_callback_invoke(struct dmaengine_desc_callback *cb,
  112. const struct dmaengine_result *result)
  113. {
  114. struct dmaengine_result dummy_result = {
  115. .result = DMA_TRANS_NOERROR,
  116. .residue = 0
  117. };
  118. if (cb->callback_result) {
  119. if (!result)
  120. result = &dummy_result;
  121. cb->callback_result(cb->callback_param, result);
  122. } else if (cb->callback) {
  123. cb->callback(cb->callback_param);
  124. }
  125. }
  126. /**
  127. * dmaengine_desc_get_callback_invoke - get the callback in tx descriptor and
  128. * then immediately call the callback.
  129. * @tx: dma async tx descriptor
  130. * @result: transaction result
  131. *
  132. * Call dmaengine_desc_get_callback() and dmaengine_desc_callback_invoke()
  133. * in a single function since no work is necessary in between for the driver.
  134. * Locking is dependent on the driver.
  135. */
  136. static inline void
  137. dmaengine_desc_get_callback_invoke(struct dma_async_tx_descriptor *tx,
  138. const struct dmaengine_result *result)
  139. {
  140. struct dmaengine_desc_callback cb;
  141. dmaengine_desc_get_callback(tx, &cb);
  142. dmaengine_desc_callback_invoke(&cb, result);
  143. }
  144. /**
  145. * dmaengine_desc_callback_valid - verify the callback is valid in cb
  146. * @cb: callback info struct
  147. *
  148. * Return a bool that verifies whether callback in cb is valid or not.
  149. * No locking is required.
  150. */
  151. static inline bool
  152. dmaengine_desc_callback_valid(struct dmaengine_desc_callback *cb)
  153. {
  154. return (cb->callback) ? true : false;
  155. }
  156. #endif