1 /* SPDX-License-Identifier: BSD-3-Clause
2 *
3 * Copyright(c) 2016 Intel Corporation. All rights reserved.
4 *
5 * Author: Liam Girdwood <liam.r.girdwood@linux.intel.com>
6 * Keyon Jie <yang.jie@linux.intel.com>
7 */
8
9 /**
10 * \file xtos/include/sof/lib/dma.h
11 * \brief DMA Drivers definition
12 * \author Liam Girdwood <liam.r.girdwood@linux.intel.com>
13 * \author Keyon Jie <yang.jie@linux.intel.com>
14 */
15
16 #ifndef __SOF_LIB_DMA_H__
17 #define __SOF_LIB_DMA_H__
18
19 #include <platform/lib/dma.h>
20 #include <rtos/atomic.h>
21 #include <rtos/bit.h>
22 #include <rtos/alloc.h>
23 #include <sof/lib/io.h>
24 #include <sof/lib/memory.h>
25 #include <rtos/sof.h>
26 #include <rtos/spinlock.h>
27 #include <stdbool.h>
28 #include <stddef.h>
29 #include <stdint.h>
30
31 #ifdef __ZEPHYR__
32 #include <zephyr/device.h>
33 #include <zephyr/drivers/dma.h>
34 #endif
35
36 struct comp_buffer;
37
38 /** \addtogroup sof_dma_drivers DMA Drivers
39 * DMA Drivers API specification.
40 * @{
41 */
42
43 /* DMA direction bitmasks used to define DMA copy direction */
44 #define DMA_DIR_MEM_TO_MEM BIT(0) /**< local memory copy */
45 #define DMA_DIR_HMEM_TO_LMEM BIT(1) /**< host memory to local mem copy */
46 #define DMA_DIR_LMEM_TO_HMEM BIT(2) /**< local mem to host mem copy */
47 #define DMA_DIR_MEM_TO_DEV BIT(3) /**< local mem to dev copy */
48 #define DMA_DIR_DEV_TO_MEM BIT(4) /**< dev to local mem copy */
49 #define DMA_DIR_DEV_TO_DEV BIT(5) /**< dev to dev copy */
50
51 /* DMA capabilities bitmasks used to define the type of DMA */
52 #define DMA_CAP_HDA BIT(0) /**< HDA DMA */
53 #define DMA_CAP_GP_LP BIT(1) /**< GP LP DMA */
54 #define DMA_CAP_GP_HP BIT(2) /**< GP HP DMA */
55 #define DMA_CAP_BT BIT(3) /**< BT DMA */
56 #define DMA_CAP_SP BIT(4) /**< SP DMA */
57 #define DMA_CAP_DMIC BIT(5) /**< ACP DMA DMIC > */
58 #define DMA_CAP_SP_VIRTUAL BIT(6) /**< SP VIRTUAL DMA */
59 #define DMA_CAP_HS_VIRTUAL BIT(7) /**< HS VIRTUAL DMA */
60
61 /* DMA dev type bitmasks used to define the type of DMA */
62
63 #define DMA_DEV_HOST BIT(0) /**< connectable to host */
64 #define DMA_DEV_HDA BIT(1) /**< connectable to HD/A link */
65 #define DMA_DEV_SSP BIT(2) /**< connectable to SSP fifo */
66 #define DMA_DEV_DMIC BIT(3) /**< connectable to DMIC fifo */
67 #define DMA_DEV_SSI BIT(4) /**< connectable to SSI / SPI fifo */
68 #define DMA_DEV_ALH BIT(5) /**< connectable to ALH link */
69 #define DMA_DEV_SAI BIT(6) /**< connectable to SAI fifo */
70 #define DMA_DEV_ESAI BIT(7) /**< connectable to ESAI fifo */
71 #define DMA_DEV_BT BIT(8) /**< connectable to ACP BT I2S */
72 #define DMA_DEV_SP BIT(9) /**< connectable to ACP SP I2S */
73 #define DMA_DEV_AFE_MEMIF BIT(10) /**< connectable to AFE fifo */
74 #define DMA_DEV_SP_VIRTUAL BIT(11) /**< connectable to ACP SP VIRTUAL I2S */
75 #define DMA_DEV_HS_VIRTUAL BIT(12) /**< connectable to ACP HS VIRTUAL I2S */
76
77 /* DMA access privilege flag */
78 #define DMA_ACCESS_EXCLUSIVE 1
79 #define DMA_ACCESS_SHARED 0
80
81 /* DMA copy flags */
82 #define DMA_COPY_BLOCKING BIT(0)
83 #define DMA_COPY_ONE_SHOT BIT(1)
84
85 /* We will use this enum in cb handler to inform dma what
86 * action we need to perform.
87 */
88 enum dma_cb_status {
89 DMA_CB_STATUS_RELOAD = 0,
90 DMA_CB_STATUS_END,
91 };
92
93 /* DMA interrupt commands */
94 enum dma_irq_cmd {
95 DMA_IRQ_STATUS_GET = 0,
96 DMA_IRQ_CLEAR,
97 DMA_IRQ_MASK,
98 DMA_IRQ_UNMASK
99 };
100
101 #define DMA_CHAN_INVALID 0xFFFFFFFF
102 #define DMA_CORE_INVALID 0xFFFFFFFF
103
104 /* Attributes have been ported to Zephyr. This condition is necessary until full support of
105 * CONFIG_SOF_ZEPHYR_STRICT_HEADERS.
106 */
107 #ifndef CONFIG_ZEPHYR_NATIVE_DRIVERS
108 /* DMA attributes */
109 #define DMA_ATTR_BUFFER_ALIGNMENT 0
110 #define DMA_ATTR_COPY_ALIGNMENT 1
111 #define DMA_ATTR_BUFFER_ADDRESS_ALIGNMENT 2
112 #define DMA_ATTR_BUFFER_PERIOD_COUNT 3
113 #endif
114
115 struct dma;
116
117 /**
118 * \brief Element of SG list (as array item).
119 */
120 struct dma_sg_elem {
121 uint32_t src; /**< source address */
122 uint32_t dest; /**< destination address */
123 uint32_t size; /**< size (in bytes) */
124 };
125
126 /**
127 * \brief Data used in DMA callbacks.
128 */
129 struct dma_cb_data {
130 struct dma_chan_data *channel;
131 struct dma_sg_elem elem;
132 enum dma_cb_status status;
133 };
134
135 /**
136 * \brief SG elem array.
137 */
138 struct dma_sg_elem_array {
139 uint32_t count; /**< number of elements in elems */
140 struct dma_sg_elem *elems; /**< elements */
141 };
142
143 /* DMA physical SG params */
144 struct dma_sg_config {
145 uint32_t src_width; /* in bytes */
146 uint32_t dest_width; /* in bytes */
147 uint32_t burst_elems;
148 uint32_t direction;
149 uint32_t src_dev;
150 uint32_t dest_dev;
151 uint32_t cyclic; /* circular buffer */
152 uint64_t period;
153 struct dma_sg_elem_array elem_array; /* array of dma_sg elems */
154 bool scatter;
155 bool irq_disabled;
156 /* true if configured DMA channel is the scheduling source */
157 bool is_scheduling_source;
158 };
159
160 struct dma_chan_status {
161 uint32_t state;
162 uint32_t flags;
163 uint32_t w_pos;
164 uint32_t r_pos;
165 uint32_t timestamp;
166
167 /* dma position info for ipc4 */
168 void *ipc_posn_data;
169 };
170
171 /* DMA operations */
172 struct dma_ops {
173
174 struct dma_chan_data *(*channel_get)(struct dma *dma,
175 unsigned int req_channel);
176 void (*channel_put)(struct dma_chan_data *channel);
177
178 int (*start)(struct dma_chan_data *channel);
179 int (*stop)(struct dma_chan_data *channel);
180 int (*stop_delayed)(struct dma_chan_data *channel);
181 int (*copy)(struct dma_chan_data *channel, int bytes, uint32_t flags);
182 int (*pause)(struct dma_chan_data *channel);
183 int (*release)(struct dma_chan_data *channel);
184 int (*status)(struct dma_chan_data *channel,
185 struct dma_chan_status *status, uint8_t direction);
186
187 int (*set_config)(struct dma_chan_data *channel,
188 struct dma_sg_config *config);
189
190 int (*probe)(struct dma *dma);
191 int (*remove)(struct dma *dma);
192
193 int (*get_data_size)(struct dma_chan_data *channel, uint32_t *avail,
194 uint32_t *free);
195
196 int (*get_attribute)(struct dma *dma, uint32_t type, uint32_t *value);
197
198 int (*interrupt)(struct dma_chan_data *channel, enum dma_irq_cmd cmd);
199 };
200
201 /* DMA platform data */
202 struct dma_plat_data {
203 uint32_t id;
204 uint32_t dir; /* bitmask of supported copy directions */
205 uint32_t caps; /* bitmask of supported capabilities */
206 uint32_t devs; /* bitmask of supported devs */
207 uint32_t base;
208 uint32_t channels;
209 int irq;
210 const char *irq_name;
211 uint32_t chan_size;
212 const void *drv_plat_data;
213 #ifdef __ZEPHYR__
214 uint32_t period_count;
215 #endif
216 };
217
218 struct dma {
219 struct dma_plat_data plat_data;
220 struct k_spinlock lock; /**< locking mechanism */
221 int sref; /**< simple ref counter, guarded by lock */
222 const struct dma_ops *ops;
223 atomic_t num_channels_busy; /* number of busy channels */
224 struct dma_chan_data *chan; /* channels array */
225 #ifdef __ZEPHYR__
226 const struct device *z_dev; /* Zephyr driver */
227 #endif
228 void *priv_data;
229 };
230
231 struct dma_chan_data {
232 struct dma *dma;
233
234 uint32_t status;
235 uint32_t direction;
236 uint32_t desc_count;
237 uint32_t index;
238 uint32_t core;
239 uint64_t period; /* DMA channel's transfer period in us */
240 /* true if this DMA channel is the scheduling source */
241 bool is_scheduling_source;
242
243 /* device specific data set by the device that requests the DMA channel */
244 void *dev_data;
245
246 void *priv_data;
247 };
248
249 struct dma_info {
250 struct dma *dma_array;
251 size_t num_dmas;
252 };
253
254 struct audio_stream;
255 typedef int (*dma_process_func)(const struct audio_stream __sparse_cache *source,
256 uint32_t ioffset, struct audio_stream __sparse_cache *sink,
257 uint32_t ooffset, uint32_t frames);
258
259 /**
260 * \brief API to initialize a platform DMA controllers.
261 *
262 * \param[in] sof Pointer to firmware main context.
263 */
264 int dmac_init(struct sof *sof);
265
266 /**
267 * \brief API to request a platform DMAC.
268 *
269 * Users can request DMAC based on dev type, copy direction, capabilities
270 * and access privilege.
271 * For exclusive access, ret DMAC with no channels draining.
272 * For shared access, ret DMAC with the least number of channels draining.
273 */
274 struct dma *dma_get(uint32_t dir, uint32_t caps, uint32_t dev, uint32_t flags);
275
276 /**
277 * \brief API to release a platform DMAC.
278 *
279 * @param[in] dma DMAC to relese.
280 */
281 void dma_put(struct dma *dma);
282
283 #define dma_set_drvdata(dma, data) \
284 (dma->priv_data = data)
285 #define dma_get_drvdata(dma) \
286 dma->priv_data
287 #define dma_base(dma) \
288 dma->plat_data.base
289 #define dma_irq(dma) \
290 dma->plat_data.irq
291 #define dma_irq_name(dma) \
292 dma->plat_data.irq_name
293 #define dma_chan_size(dma) \
294 dma->plat_data.chan_size
295 #define dma_chan_base(dma, chan) \
296 (dma->plat_data.base + chan * dma->plat_data.chan_size)
297 #define dma_chan_get_data(chan) \
298 ((chan)->priv_data)
299 #define dma_chan_set_data(chan, data) \
300 ((chan)->priv_data = data)
301
302 /* DMA API
303 * Programming flow is :-
304 *
305 * 1) dma_channel_get()
306 * 2) notifier_register()
307 * 3) dma_set_config()
308 * 4) dma_start()
309 * ... DMA now running ...
310 * 5) dma_stop()
311 * 6) dma_stop_delayed()
312 * 7) dma_channel_put()
313 */
314
dma_channel_get_legacy(struct dma * dma,int req_channel)315 static inline struct dma_chan_data *dma_channel_get_legacy(struct dma *dma,
316 int req_channel)
317 {
318 if (!dma || !dma->ops || !dma->ops->channel_get)
319 return NULL;
320
321 struct dma_chan_data *chan = dma->ops->channel_get(dma, req_channel);
322
323 return chan;
324 }
325
dma_channel_put_legacy(struct dma_chan_data * channel)326 static inline void dma_channel_put_legacy(struct dma_chan_data *channel)
327 {
328 channel->dma->ops->channel_put(channel);
329 }
330
dma_start_legacy(struct dma_chan_data * channel)331 static inline int dma_start_legacy(struct dma_chan_data *channel)
332 {
333 return channel->dma->ops->start(channel);
334 }
335
dma_stop_legacy(struct dma_chan_data * channel)336 static inline int dma_stop_legacy(struct dma_chan_data *channel)
337 {
338 if (channel->dma->ops->stop)
339 return channel->dma->ops->stop(channel);
340
341 return 0;
342 }
343
dma_stop_delayed_legacy(struct dma_chan_data * channel)344 static inline int dma_stop_delayed_legacy(struct dma_chan_data *channel)
345 {
346 if (channel->dma->ops->stop_delayed)
347 return channel->dma->ops->stop_delayed(channel);
348
349 return 0;
350 }
351
352 /** \defgroup sof_dma_copy_func static int dma_copy (struct dma_chan_data * channel, int bytes, uint32_t flags)
353 *
354 * This function is in a separate subgroup to solve a name clash with
355 * struct dma_copy {}
356 * @{
357 */
dma_copy_legacy(struct dma_chan_data * channel,int bytes,uint32_t flags)358 static inline int dma_copy_legacy(struct dma_chan_data *channel, int bytes,
359 uint32_t flags)
360 {
361 return channel->dma->ops->copy(channel, bytes, flags);
362 }
363 /** @} */
364
dma_pause_legacy(struct dma_chan_data * channel)365 static inline int dma_pause_legacy(struct dma_chan_data *channel)
366 {
367 if (channel->dma->ops->pause)
368 return channel->dma->ops->pause(channel);
369
370 return 0;
371 }
372
dma_release_legacy(struct dma_chan_data * channel)373 static inline int dma_release_legacy(struct dma_chan_data *channel)
374 {
375 if (channel->dma->ops->release)
376 return channel->dma->ops->release(channel);
377
378 return 0;
379 }
380
dma_status_legacy(struct dma_chan_data * channel,struct dma_chan_status * status,uint8_t direction)381 static inline int dma_status_legacy(struct dma_chan_data *channel,
382 struct dma_chan_status *status, uint8_t direction)
383 {
384 return channel->dma->ops->status(channel, status, direction);
385 }
386
dma_set_config_legacy(struct dma_chan_data * channel,struct dma_sg_config * config)387 static inline int dma_set_config_legacy(struct dma_chan_data *channel,
388 struct dma_sg_config *config)
389 {
390 return channel->dma->ops->set_config(channel, config);
391 }
392
dma_probe_legacy(struct dma * dma)393 static inline int dma_probe_legacy(struct dma *dma)
394 {
395 return dma->ops->probe(dma);
396 }
397
dma_remove_legacy(struct dma * dma)398 static inline int dma_remove_legacy(struct dma *dma)
399 {
400 return dma->ops->remove(dma);
401 }
402
dma_get_data_size_legacy(struct dma_chan_data * channel,uint32_t * avail,uint32_t * free)403 static inline int dma_get_data_size_legacy(struct dma_chan_data *channel,
404 uint32_t *avail, uint32_t *free)
405 {
406 return channel->dma->ops->get_data_size(channel, avail, free);
407 }
408
dma_get_attribute_legacy(struct dma * dma,uint32_t type,uint32_t * value)409 static inline int dma_get_attribute_legacy(struct dma *dma, uint32_t type,
410 uint32_t *value)
411 {
412 return dma->ops->get_attribute(dma, type, value);
413 }
414
dma_interrupt_legacy(struct dma_chan_data * channel,enum dma_irq_cmd cmd)415 static inline int dma_interrupt_legacy(struct dma_chan_data *channel,
416 enum dma_irq_cmd cmd)
417 {
418 return channel->dma->ops->interrupt(channel, cmd);
419 }
420
421 /* DMA hardware register operations */
dma_reg_read(struct dma * dma,uint32_t reg)422 static inline uint32_t dma_reg_read(struct dma *dma, uint32_t reg)
423 {
424 return io_reg_read(dma_base(dma) + reg);
425 }
426
dma_reg_read16(struct dma * dma,uint32_t reg)427 static inline uint16_t dma_reg_read16(struct dma *dma, uint32_t reg)
428 {
429 return io_reg_read16(dma_base(dma) + reg);
430 }
431
dma_reg_write(struct dma * dma,uint32_t reg,uint32_t value)432 static inline void dma_reg_write(struct dma *dma, uint32_t reg, uint32_t value)
433 {
434 io_reg_write(dma_base(dma) + reg, value);
435 }
436
dma_reg_write16(struct dma * dma,uint32_t reg,uint16_t value)437 static inline void dma_reg_write16(struct dma *dma, uint32_t reg,
438 uint16_t value)
439 {
440 io_reg_write16(dma_base(dma) + reg, value);
441 }
442
dma_reg_update_bits(struct dma * dma,uint32_t reg,uint32_t mask,uint32_t value)443 static inline void dma_reg_update_bits(struct dma *dma, uint32_t reg,
444 uint32_t mask, uint32_t value)
445 {
446 io_reg_update_bits(dma_base(dma) + reg, mask, value);
447 }
448
dma_chan_reg_read(struct dma_chan_data * channel,uint32_t reg)449 static inline uint32_t dma_chan_reg_read(struct dma_chan_data *channel,
450 uint32_t reg)
451 {
452 return io_reg_read(dma_chan_base(channel->dma, channel->index) + reg);
453 }
454
dma_chan_reg_read16(struct dma_chan_data * channel,uint32_t reg)455 static inline uint16_t dma_chan_reg_read16(struct dma_chan_data *channel,
456 uint32_t reg)
457 {
458 return io_reg_read16(dma_chan_base(channel->dma, channel->index) + reg);
459 }
460
dma_chan_reg_write(struct dma_chan_data * channel,uint32_t reg,uint32_t value)461 static inline void dma_chan_reg_write(struct dma_chan_data *channel,
462 uint32_t reg, uint32_t value)
463 {
464 io_reg_write(dma_chan_base(channel->dma, channel->index) + reg, value);
465 }
466
dma_chan_reg_write16(struct dma_chan_data * channel,uint32_t reg,uint16_t value)467 static inline void dma_chan_reg_write16(struct dma_chan_data *channel,
468 uint32_t reg, uint16_t value)
469 {
470 io_reg_write16(dma_chan_base(channel->dma, channel->index) + reg,
471 value);
472 }
473
dma_chan_reg_update_bits(struct dma_chan_data * channel,uint32_t reg,uint32_t mask,uint32_t value)474 static inline void dma_chan_reg_update_bits(struct dma_chan_data *channel,
475 uint32_t reg, uint32_t mask,
476 uint32_t value)
477 {
478 io_reg_update_bits(dma_chan_base(channel->dma, channel->index) + reg,
479 mask, value);
480 }
481
dma_chan_reg_update_bits16(struct dma_chan_data * channel,uint32_t reg,uint16_t mask,uint16_t value)482 static inline void dma_chan_reg_update_bits16(struct dma_chan_data *channel,
483 uint32_t reg, uint16_t mask,
484 uint16_t value)
485 {
486 io_reg_update_bits16(dma_chan_base(channel->dma, channel->index) + reg,
487 mask, value);
488 }
489
dma_is_scheduling_source(struct dma_chan_data * channel)490 static inline bool dma_is_scheduling_source(struct dma_chan_data *channel)
491 {
492 return channel->is_scheduling_source;
493 }
494
dma_sg_init(struct dma_sg_elem_array * ea)495 static inline void dma_sg_init(struct dma_sg_elem_array *ea)
496 {
497 ea->count = 0;
498 ea->elems = NULL;
499 }
500
501 int dma_sg_alloc(struct dma_sg_elem_array *ea,
502 enum mem_zone zone,
503 uint32_t direction,
504 uint32_t buffer_count, uint32_t buffer_bytes,
505 uintptr_t dma_buffer_addr, uintptr_t external_addr);
506
507 void dma_sg_free(struct dma_sg_elem_array *ea);
508
509 /**
510 * \brief Get the total size of SG buffer
511 *
512 * \param ea Array of SG elements.
513 * \return Size of the buffer.
514 */
dma_sg_get_size(struct dma_sg_elem_array * ea)515 static inline uint32_t dma_sg_get_size(struct dma_sg_elem_array *ea)
516 {
517 int i;
518 uint32_t size = 0;
519
520 for (i = 0 ; i < ea->count; i++)
521 size += ea->elems[i].size;
522
523 return size;
524 }
525
526 struct audio_stream;
527 typedef void (*dma_process)(const struct audio_stream *,
528 struct audio_stream *, uint32_t);
529
530 /* copies data from DMA buffer using provided processing function */
531 int dma_buffer_copy_from(struct comp_buffer __sparse_cache *source,
532 struct comp_buffer __sparse_cache *sink,
533 dma_process_func process, uint32_t source_bytes);
534
535 /* copies data to DMA buffer using provided processing function */
536 int dma_buffer_copy_to(struct comp_buffer __sparse_cache *source,
537 struct comp_buffer __sparse_cache *sink,
538 dma_process_func process, uint32_t sink_bytes);
539
540 /* generic DMA DSP <-> Host copier */
541
542 struct dma_copy {
543 struct dma_chan_data *chan;
544 struct dma *dmac;
545 };
546
547 /* init dma copy context */
548 int dma_copy_new(struct dma_copy *dc);
549
550 /* free dma copy context resources */
dma_copy_free(struct dma_copy * dc)551 static inline void dma_copy_free(struct dma_copy *dc)
552 {
553 dma_channel_put_legacy(dc->chan);
554 }
555
556 /* DMA copy data from host to DSP */
557 int dma_copy_from_host(struct dma_copy *dc, struct dma_sg_config *host_sg,
558 int32_t host_offset, void *local_ptr, int32_t size);
559 int dma_copy_from_host_nowait(struct dma_copy *dc,
560 struct dma_sg_config *host_sg,
561 int32_t host_offset, void *local_ptr,
562 int32_t size);
563
564 /* DMA copy data from DSP to host */
565 int dma_copy_to_host(struct dma_copy *dc, struct dma_sg_config *host_sg,
566 int32_t host_offset, void *local_ptr, int32_t size);
567 int dma_copy_to_host_nowait(struct dma_copy *dc, struct dma_sg_config *host_sg,
568 int32_t host_offset, void *local_ptr, int32_t size);
569
570
571 int dma_copy_set_stream_tag(struct dma_copy *dc, uint32_t stream_tag);
572
dma_info_get(void)573 static inline const struct dma_info *dma_info_get(void)
574 {
575 return sof_get()->dma_info;
576 }
577
578 /** @}*/
579
580 #endif /* __SOF_LIB_DMA_H__ */
581