1 /* SPDX-License-Identifier: BSD-3-Clause
2 *
3 * Copyright(c) 2016 Intel Corporation. All rights reserved.
4 *
5 * Author: Liam Girdwood <liam.r.girdwood@linux.intel.com>
6 * Keyon Jie <yang.jie@linux.intel.com>
7 */
8
9 /**
10 * \file include/sof/lib/dma.h
11 * \brief DMA Drivers definition
12 * \author Liam Girdwood <liam.r.girdwood@linux.intel.com>
13 * \author Keyon Jie <yang.jie@linux.intel.com>
14 */
15
16 #ifndef __SOF_LIB_DMA_H__
17 #define __SOF_LIB_DMA_H__
18
19 #include <platform/lib/dma.h>
20 #include <sof/atomic.h>
21 #include <sof/bit.h>
22 #include <sof/lib/alloc.h>
23 #include <sof/lib/io.h>
24 #include <sof/lib/memory.h>
25 #include <sof/sof.h>
26 #include <sof/spinlock.h>
27 #include <stdbool.h>
28 #include <stddef.h>
29 #include <stdint.h>
30
31 struct comp_buffer;
32
33 /** \addtogroup sof_dma_drivers DMA Drivers
34 * DMA Drivers API specification.
35 * @{
36 */
37
38 /* DMA direction bitmasks used to define DMA copy direction */
39 #define DMA_DIR_MEM_TO_MEM BIT(0) /**< local memory copy */
40 #define DMA_DIR_HMEM_TO_LMEM BIT(1) /**< host memory to local mem copy */
41 #define DMA_DIR_LMEM_TO_HMEM BIT(2) /**< local mem to host mem copy */
42 #define DMA_DIR_MEM_TO_DEV BIT(3) /**< local mem to dev copy */
43 #define DMA_DIR_DEV_TO_MEM BIT(4) /**< dev to local mem copy */
44 #define DMA_DIR_DEV_TO_DEV BIT(5) /**< dev to dev copy */
45
46 /* DMA capabilities bitmasks used to define the type of DMA */
47 #define DMA_CAP_HDA BIT(0) /**< HDA DMA */
48 #define DMA_CAP_GP_LP BIT(1) /**< GP LP DMA */
49 #define DMA_CAP_GP_HP BIT(2) /**< GP HP DMA */
50 #define DMA_CAP_BT BIT(3) /**< DMA I2S */
51 #define DMA_CAP_SP BIT(4) /**< SP DMA */
52 #define DMA_CAP_DMIC BIT(5) /**< ACP DMA DMIC > */
53
54 /* DMA dev type bitmasks used to define the type of DMA */
55
56 #define DMA_DEV_HOST BIT(0) /**< connectable to host */
57 #define DMA_DEV_HDA BIT(1) /**< connectable to HD/A link */
58 #define DMA_DEV_SSP BIT(2) /**< connectable to SSP fifo */
59 #define DMA_DEV_DMIC BIT(3) /**< connectable to DMIC fifo */
60 #define DMA_DEV_SSI BIT(4) /**< connectable to SSI / SPI fifo */
61 #define DMA_DEV_ALH BIT(5) /**< connectable to ALH link */
62 #define DMA_DEV_SAI BIT(6) /**< connectable to SAI fifo */
63 #define DMA_DEV_ESAI BIT(7) /**< connectable to ESAI fifo */
64 #define DMA_DEV_BT BIT(8) /**< connectable to ACP BT I2S */
65 #define DMA_DEV_SP BIT(9) /**< connectable to ACP SP I2S */
66
67 /* DMA access privilege flag */
68 #define DMA_ACCESS_EXCLUSIVE 1
69 #define DMA_ACCESS_SHARED 0
70
71 /* DMA copy flags */
72 #define DMA_COPY_BLOCKING BIT(0)
73 #define DMA_COPY_ONE_SHOT BIT(1)
74
75 /* We will use this enum in cb handler to inform dma what
76 * action we need to perform.
77 */
78 enum dma_cb_status {
79 DMA_CB_STATUS_RELOAD = 0,
80 DMA_CB_STATUS_END,
81 };
82
83 /* DMA interrupt commands */
84 enum dma_irq_cmd {
85 DMA_IRQ_STATUS_GET = 0,
86 DMA_IRQ_CLEAR,
87 DMA_IRQ_MASK,
88 DMA_IRQ_UNMASK
89 };
90
91 #define DMA_CHAN_INVALID 0xFFFFFFFF
92 #define DMA_CORE_INVALID 0xFFFFFFFF
93
94 /* DMA attributes */
95 #define DMA_ATTR_BUFFER_ALIGNMENT 0
96 #define DMA_ATTR_COPY_ALIGNMENT 1
97 #define DMA_ATTR_BUFFER_ADDRESS_ALIGNMENT 2
98 #define DMA_ATTR_BUFFER_PERIOD_COUNT 3
99
100 struct dma;
101
102 /**
103 * \brief Element of SG list (as array item).
104 */
105 struct dma_sg_elem {
106 uint32_t src; /**< source address */
107 uint32_t dest; /**< destination address */
108 uint32_t size; /**< size (in bytes) */
109 };
110
111 /**
112 * \brief Data used in DMA callbacks.
113 */
114 struct dma_cb_data {
115 struct dma_chan_data *channel;
116 struct dma_sg_elem elem;
117 enum dma_cb_status status;
118 };
119
120 /**
121 * \brief SG elem array.
122 */
123 struct dma_sg_elem_array {
124 uint32_t count; /**< number of elements in elems */
125 struct dma_sg_elem *elems; /**< elements */
126 };
127
128 /* DMA physical SG params */
129 struct dma_sg_config {
130 uint32_t src_width; /* in bytes */
131 uint32_t dest_width; /* in bytes */
132 uint32_t burst_elems;
133 uint32_t direction;
134 uint32_t src_dev;
135 uint32_t dest_dev;
136 uint32_t cyclic; /* circular buffer */
137 uint64_t period;
138 struct dma_sg_elem_array elem_array; /* array of dma_sg elems */
139 bool scatter;
140 bool irq_disabled;
141 /* true if configured DMA channel is the scheduling source */
142 bool is_scheduling_source;
143 };
144
145 struct dma_chan_status {
146 uint32_t state;
147 uint32_t flags;
148 uint32_t w_pos;
149 uint32_t r_pos;
150 uint32_t timestamp;
151 };
152
153 /* DMA operations */
154 struct dma_ops {
155
156 struct dma_chan_data *(*channel_get)(struct dma *dma,
157 unsigned int req_channel);
158 void (*channel_put)(struct dma_chan_data *channel);
159
160 int (*start)(struct dma_chan_data *channel);
161 int (*stop)(struct dma_chan_data *channel);
162 int (*copy)(struct dma_chan_data *channel, int bytes, uint32_t flags);
163 int (*pause)(struct dma_chan_data *channel);
164 int (*release)(struct dma_chan_data *channel);
165 int (*status)(struct dma_chan_data *channel,
166 struct dma_chan_status *status, uint8_t direction);
167
168 int (*set_config)(struct dma_chan_data *channel,
169 struct dma_sg_config *config);
170
171 int (*pm_context_restore)(struct dma *dma);
172 int (*pm_context_store)(struct dma *dma);
173
174 int (*probe)(struct dma *dma);
175 int (*remove)(struct dma *dma);
176
177 int (*get_data_size)(struct dma_chan_data *channel, uint32_t *avail,
178 uint32_t *free);
179
180 int (*get_attribute)(struct dma *dma, uint32_t type, uint32_t *value);
181
182 int (*interrupt)(struct dma_chan_data *channel, enum dma_irq_cmd cmd);
183 };
184
185 /* DMA platform data */
186 struct dma_plat_data {
187 uint32_t id;
188 uint32_t dir; /* bitmask of supported copy directions */
189 uint32_t caps; /* bitmask of supported capabilities */
190 uint32_t devs; /* bitmask of supported devs */
191 uint32_t base;
192 uint32_t channels;
193 int irq;
194 const char *irq_name;
195 uint32_t chan_size;
196 const void *drv_plat_data;
197 };
198
199 struct dma {
200 struct dma_plat_data plat_data;
201 spinlock_t lock; /**< locking mechanism */
202 int sref; /**< simple ref counter, guarded by lock */
203 const struct dma_ops *ops;
204 atomic_t num_channels_busy; /* number of busy channels */
205 struct dma_chan_data *chan; /* channels array */
206 void *priv_data;
207 };
208
209 struct dma_chan_data {
210 struct dma *dma;
211
212 uint32_t status;
213 uint32_t direction;
214 uint32_t desc_count;
215 uint32_t index;
216 uint32_t core;
217 uint64_t period; /* DMA channel's transfer period in us */
218 /* true if this DMA channel is the scheduling source */
219 bool is_scheduling_source;
220
221 void *priv_data;
222 };
223
224 struct dma_info {
225 struct dma *dma_array;
226 size_t num_dmas;
227 };
228
229 struct audio_stream;
230 typedef int (*dma_process_func)(const struct audio_stream *source,
231 uint32_t ioffset, struct audio_stream *sink,
232 uint32_t ooffset, uint32_t frames);
233
234 /**
235 * \brief API to initialize a platform DMA controllers.
236 *
237 * \param[in] sof Pointer to firmware main context.
238 */
239 int dmac_init(struct sof *sof);
240
241 /**
242 * \brief API to request a platform DMAC.
243 *
244 * Users can request DMAC based on dev type, copy direction, capabilities
245 * and access privilege.
246 * For exclusive access, ret DMAC with no channels draining.
247 * For shared access, ret DMAC with the least number of channels draining.
248 */
249 struct dma *dma_get(uint32_t dir, uint32_t caps, uint32_t dev, uint32_t flags);
250
251 /**
252 * \brief API to release a platform DMAC.
253 *
254 * @param[in] dma DMAC to relese.
255 */
256 void dma_put(struct dma *dma);
257
258 #define dma_set_drvdata(dma, data) \
259 (dma->priv_data = data)
260 #define dma_get_drvdata(dma) \
261 dma->priv_data
262 #define dma_base(dma) \
263 dma->plat_data.base
264 #define dma_irq(dma) \
265 dma->plat_data.irq
266 #define dma_irq_name(dma) \
267 dma->plat_data.irq_name
268 #define dma_chan_size(dma) \
269 dma->plat_data.chan_size
270 #define dma_chan_base(dma, chan) \
271 (dma->plat_data.base + chan * dma->plat_data.chan_size)
272 #define dma_chan_get_data(chan) \
273 ((chan)->priv_data)
274 #define dma_chan_set_data(chan, data) \
275 ((chan)->priv_data = data)
276
277 /* DMA API
278 * Programming flow is :-
279 *
280 * 1) dma_channel_get()
281 * 2) notifier_register()
282 * 3) dma_set_config()
283 * 4) dma_start()
284 * ... DMA now running ...
285 * 5) dma_stop()
286 * 6) dma_channel_put()
287 */
288
dma_channel_get(struct dma * dma,int req_channel)289 static inline struct dma_chan_data *dma_channel_get(struct dma *dma,
290 int req_channel)
291 {
292 if (!dma || !dma->ops || !dma->ops->channel_get)
293 return NULL;
294
295 struct dma_chan_data *chan = dma->ops->channel_get(dma, req_channel);
296
297 return chan;
298 }
299
dma_channel_put(struct dma_chan_data * channel)300 static inline void dma_channel_put(struct dma_chan_data *channel)
301 {
302 channel->dma->ops->channel_put(channel);
303
304 }
305
dma_start(struct dma_chan_data * channel)306 static inline int dma_start(struct dma_chan_data *channel)
307 {
308 int ret = channel->dma->ops->start(channel);
309
310 return ret;
311 }
312
dma_stop(struct dma_chan_data * channel)313 static inline int dma_stop(struct dma_chan_data *channel)
314 {
315 int ret = channel->dma->ops->stop(channel);
316
317 return ret;
318 }
319
320 /** \defgroup sof_dma_copy_func static int dma_copy (struct dma_chan_data * channel, int bytes, uint32_t flags)
321 *
322 * This function is in a separate subgroup to solve a name clash with
323 * struct dma_copy {}
324 * @{
325 */
dma_copy(struct dma_chan_data * channel,int bytes,uint32_t flags)326 static inline int dma_copy(struct dma_chan_data *channel, int bytes,
327 uint32_t flags)
328 {
329 int ret = channel->dma->ops->copy(channel, bytes, flags);
330
331 return ret;
332 }
333 /** @} */
334
dma_pause(struct dma_chan_data * channel)335 static inline int dma_pause(struct dma_chan_data *channel)
336 {
337 int ret = channel->dma->ops->pause(channel);
338
339 return ret;
340 }
341
dma_release(struct dma_chan_data * channel)342 static inline int dma_release(struct dma_chan_data *channel)
343 {
344 int ret = channel->dma->ops->release(channel);
345
346 return ret;
347 }
348
dma_status(struct dma_chan_data * channel,struct dma_chan_status * status,uint8_t direction)349 static inline int dma_status(struct dma_chan_data *channel,
350 struct dma_chan_status *status, uint8_t direction)
351 {
352 int ret = channel->dma->ops->status(channel, status, direction);
353
354 return ret;
355 }
356
dma_set_config(struct dma_chan_data * channel,struct dma_sg_config * config)357 static inline int dma_set_config(struct dma_chan_data *channel,
358 struct dma_sg_config *config)
359 {
360 int ret = channel->dma->ops->set_config(channel, config);
361
362 return ret;
363 }
364
dma_pm_context_restore(struct dma * dma)365 static inline int dma_pm_context_restore(struct dma *dma)
366 {
367 int ret = dma->ops->pm_context_restore(dma);
368
369 return ret;
370 }
371
dma_pm_context_store(struct dma * dma)372 static inline int dma_pm_context_store(struct dma *dma)
373 {
374 int ret = dma->ops->pm_context_store(dma);
375
376 return ret;
377 }
378
dma_probe(struct dma * dma)379 static inline int dma_probe(struct dma *dma)
380 {
381 int ret = dma->ops->probe(dma);
382
383 return ret;
384 }
385
dma_remove(struct dma * dma)386 static inline int dma_remove(struct dma *dma)
387 {
388 int ret = dma->ops->remove(dma);
389
390 return ret;
391 }
392
dma_get_data_size(struct dma_chan_data * channel,uint32_t * avail,uint32_t * free)393 static inline int dma_get_data_size(struct dma_chan_data *channel,
394 uint32_t *avail, uint32_t *free)
395 {
396 int ret = channel->dma->ops->get_data_size(channel, avail, free);
397
398 return ret;
399 }
400
dma_get_attribute(struct dma * dma,uint32_t type,uint32_t * value)401 static inline int dma_get_attribute(struct dma *dma, uint32_t type,
402 uint32_t *value)
403 {
404 int ret = dma->ops->get_attribute(dma, type, value);
405
406 return ret;
407 }
408
dma_interrupt(struct dma_chan_data * channel,enum dma_irq_cmd cmd)409 static inline int dma_interrupt(struct dma_chan_data *channel,
410 enum dma_irq_cmd cmd)
411 {
412 int ret = channel->dma->ops->interrupt(channel, cmd);
413
414 return ret;
415 }
416
417 /* DMA hardware register operations */
dma_reg_read(struct dma * dma,uint32_t reg)418 static inline uint32_t dma_reg_read(struct dma *dma, uint32_t reg)
419 {
420 uint32_t val;
421
422 val = io_reg_read(dma_base(dma) + reg);
423
424 return val;
425 }
426
dma_reg_read16(struct dma * dma,uint32_t reg)427 static inline uint16_t dma_reg_read16(struct dma *dma, uint32_t reg)
428 {
429 uint16_t val;
430
431 val = io_reg_read16(dma_base(dma) + reg);
432
433 return val;
434 }
435
dma_reg_write(struct dma * dma,uint32_t reg,uint32_t value)436 static inline void dma_reg_write(struct dma *dma, uint32_t reg, uint32_t value)
437 {
438 io_reg_write(dma_base(dma) + reg, value);
439
440 }
441
dma_reg_write16(struct dma * dma,uint32_t reg,uint16_t value)442 static inline void dma_reg_write16(struct dma *dma, uint32_t reg,
443 uint16_t value)
444 {
445 io_reg_write16(dma_base(dma) + reg, value);
446
447 }
448
dma_reg_update_bits(struct dma * dma,uint32_t reg,uint32_t mask,uint32_t value)449 static inline void dma_reg_update_bits(struct dma *dma, uint32_t reg,
450 uint32_t mask, uint32_t value)
451 {
452 io_reg_update_bits(dma_base(dma) + reg, mask, value);
453
454 }
455
dma_chan_reg_read(struct dma_chan_data * channel,uint32_t reg)456 static inline uint32_t dma_chan_reg_read(struct dma_chan_data *channel,
457 uint32_t reg)
458 {
459 uint32_t val;
460
461 val = io_reg_read(dma_chan_base(channel->dma, channel->index) + reg);
462
463 return val;
464 }
465
dma_chan_reg_read16(struct dma_chan_data * channel,uint32_t reg)466 static inline uint16_t dma_chan_reg_read16(struct dma_chan_data *channel,
467 uint32_t reg)
468 {
469 uint16_t val;
470
471 val = io_reg_read16(dma_chan_base(channel->dma, channel->index) + reg);
472
473 return val;
474 }
475
dma_chan_reg_write(struct dma_chan_data * channel,uint32_t reg,uint32_t value)476 static inline void dma_chan_reg_write(struct dma_chan_data *channel,
477 uint32_t reg, uint32_t value)
478 {
479 io_reg_write(dma_chan_base(channel->dma, channel->index) + reg, value);
480
481 }
482
dma_chan_reg_write16(struct dma_chan_data * channel,uint32_t reg,uint16_t value)483 static inline void dma_chan_reg_write16(struct dma_chan_data *channel,
484 uint32_t reg, uint16_t value)
485 {
486 io_reg_write16(dma_chan_base(channel->dma, channel->index) + reg,
487 value);
488
489 }
490
dma_chan_reg_update_bits(struct dma_chan_data * channel,uint32_t reg,uint32_t mask,uint32_t value)491 static inline void dma_chan_reg_update_bits(struct dma_chan_data *channel,
492 uint32_t reg, uint32_t mask,
493 uint32_t value)
494 {
495 io_reg_update_bits(dma_chan_base(channel->dma, channel->index) + reg,
496 mask, value);
497
498 }
499
dma_chan_reg_update_bits16(struct dma_chan_data * channel,uint32_t reg,uint16_t mask,uint16_t value)500 static inline void dma_chan_reg_update_bits16(struct dma_chan_data *channel,
501 uint32_t reg, uint16_t mask,
502 uint16_t value)
503 {
504 io_reg_update_bits16(dma_chan_base(channel->dma, channel->index) + reg,
505 mask, value);
506
507 }
508
dma_is_scheduling_source(struct dma_chan_data * channel)509 static inline bool dma_is_scheduling_source(struct dma_chan_data *channel)
510 {
511 bool ret = channel->is_scheduling_source;
512
513 return ret;
514 }
515
dma_sg_init(struct dma_sg_elem_array * ea)516 static inline void dma_sg_init(struct dma_sg_elem_array *ea)
517 {
518 ea->count = 0;
519 ea->elems = NULL;
520 }
521
522 int dma_sg_alloc(struct dma_sg_elem_array *ea,
523 enum mem_zone zone,
524 uint32_t direction,
525 uint32_t buffer_count, uint32_t buffer_bytes,
526 uintptr_t dma_buffer_addr, uintptr_t external_addr);
527
528 void dma_sg_free(struct dma_sg_elem_array *ea);
529
530 /**
531 * \brief Get the total size of SG buffer
532 *
533 * \param ea Array of SG elements.
534 * \return Size of the buffer.
535 */
dma_sg_get_size(struct dma_sg_elem_array * ea)536 static inline uint32_t dma_sg_get_size(struct dma_sg_elem_array *ea)
537 {
538 int i;
539 uint32_t size = 0;
540
541 for (i = 0 ; i < ea->count; i++)
542 size += ea->elems[i].size;
543
544 return size;
545 }
546
547 struct audio_stream;
548 typedef void (*dma_process)(const struct audio_stream *,
549 struct audio_stream *, uint32_t);
550
551 /* copies data from DMA buffer using provided processing function */
552 int dma_buffer_copy_from(struct comp_buffer *source, struct comp_buffer *sink,
553 dma_process_func process, uint32_t source_bytes);
554
555 /* copies data to DMA buffer using provided processing function */
556 int dma_buffer_copy_to(struct comp_buffer *source, struct comp_buffer *sink,
557 dma_process_func process, uint32_t sink_bytes);
558
559 /* generic DMA DSP <-> Host copier */
560
561 struct dma_copy {
562 struct dma_chan_data *chan;
563 struct dma *dmac;
564 };
565
566 /* init dma copy context */
567 int dma_copy_new(struct dma_copy *dc);
568
569 /* free dma copy context resources */
dma_copy_free(struct dma_copy * dc)570 static inline void dma_copy_free(struct dma_copy *dc)
571 {
572 dma_channel_put(dc->chan);
573 }
574
575 /* DMA copy data from host to DSP */
576 int dma_copy_from_host(struct dma_copy *dc, struct dma_sg_config *host_sg,
577 int32_t host_offset, void *local_ptr, int32_t size);
578 int dma_copy_from_host_nowait(struct dma_copy *dc,
579 struct dma_sg_config *host_sg,
580 int32_t host_offset, void *local_ptr,
581 int32_t size);
582
583 /* DMA copy data from DSP to host */
584 int dma_copy_to_host_nowait(struct dma_copy *dc, struct dma_sg_config *host_sg,
585 int32_t host_offset, void *local_ptr, int32_t size);
586
587 int dma_copy_set_stream_tag(struct dma_copy *dc, uint32_t stream_tag);
588
dma_info_get(void)589 static inline const struct dma_info *dma_info_get(void)
590 {
591 return sof_get()->dma_info;
592 }
593
594 /** @}*/
595
596 #endif /* __SOF_LIB_DMA_H__ */
597