1 // SPDX-License-Identifier: BSD-3-Clause
2 //
3 // Copyright(c) 2016 Intel Corporation. All rights reserved.
4 //
5 // Author: Liam Girdwood <liam.r.girdwood@linux.intel.com>
6 //         Keyon Jie <yang.jie@linux.intel.com>
7 
8 #include <sof/audio/buffer.h>
9 #include <sof/audio/component_ext.h>
10 #include <sof/audio/format.h>
11 #include <sof/audio/pipeline.h>
12 #include <sof/common.h>
13 #include <sof/debug/panic.h>
14 #include <sof/drivers/interrupt.h>
15 #include <sof/ipc/msg.h>
16 #include <sof/drivers/interrupt.h>
17 #include <sof/drivers/timer.h>
18 #include <sof/lib/alloc.h>
19 #include <sof/lib/cache.h>
20 #include <sof/lib/dai.h>
21 #include <sof/lib/memory.h>
22 #include <sof/lib/notifier.h>
23 #include <sof/lib/uuid.h>
24 #include <sof/list.h>
25 #include <sof/string.h>
26 #include <sof/ut.h>
27 #include <sof/trace/trace.h>
28 #include <ipc/dai.h>
29 #include <ipc/stream.h>
30 #include <ipc/topology.h>
31 #include <user/trace.h>
32 #include <errno.h>
33 #include <stddef.h>
34 #include <stdint.h>
35 
36 static const struct comp_driver comp_dai;
37 
38 /* c2b00d27-ffbc-4150-a51a-245c79c5e54b */
39 DECLARE_SOF_RT_UUID("dai", dai_comp_uuid, 0xc2b00d27, 0xffbc, 0x4150,
40 		 0xa5, 0x1a, 0x24, 0x5c, 0x79, 0xc5, 0xe5, 0x4b);
41 
42 DECLARE_TR_CTX(dai_comp_tr, SOF_UUID(dai_comp_uuid), LOG_LEVEL_INFO);
43 
44 static void dai_atomic_trigger(void *arg, enum notify_id type, void *data);
45 
46 /* Assign DAI to a group */
dai_assign_group(struct comp_dev * dev,uint32_t group_id)47 int dai_assign_group(struct comp_dev *dev, uint32_t group_id)
48 {
49 	struct dai_data *dd = comp_get_drvdata(dev);
50 
51 	if (dd->group) {
52 		if (dd->group->group_id != group_id) {
53 			comp_err(dev, "dai_assign_group(), DAI already in group %d, requested %d",
54 				 dd->group->group_id, group_id);
55 			return -EINVAL;
56 		}
57 
58 		/* No need to re-assign to the same group, do nothing */
59 		return 0;
60 	}
61 
62 	dd->group = dai_group_get(group_id, DAI_CREAT);
63 	if (!dd->group) {
64 		comp_err(dev, "dai_assign_group(), failed to assign group %d",
65 			 group_id);
66 		return -EINVAL;
67 	}
68 
69 	comp_dbg(dev, "dai_assign_group(), group %d num %d",
70 		 group_id, dd->group->num_dais);
71 
72 	/* Register for the atomic trigger event */
73 	notifier_register(dev, dd->group, NOTIFIER_ID_DAI_TRIGGER,
74 			  dai_atomic_trigger, 0);
75 
76 	return 0;
77 }
78 
79 /* this is called by DMA driver every time descriptor has completed */
dai_dma_cb(void * arg,enum notify_id type,void * data)80 static void dai_dma_cb(void *arg, enum notify_id type, void *data)
81 {
82 	struct dma_cb_data *next = data;
83 	struct comp_dev *dev = arg;
84 	struct dai_data *dd = comp_get_drvdata(dev);
85 	uint32_t bytes = next->elem.size;
86 	struct comp_buffer *source;
87 	struct comp_buffer *sink;
88 	void *buffer_ptr;
89 	int ret;
90 
91 	comp_dbg(dev, "dai_dma_cb()");
92 
93 	next->status = DMA_CB_STATUS_RELOAD;
94 
95 	/* stop dma copy for pause/stop/xrun */
96 	if (dev->state != COMP_STATE_ACTIVE || dd->xrun) {
97 		/* stop the DAI */
98 		dai_trigger(dd->dai, COMP_TRIGGER_STOP, dev->direction);
99 
100 		/* tell DMA not to reload */
101 		next->status = DMA_CB_STATUS_END;
102 	}
103 
104 	/* is our pipeline handling an XRUN ? */
105 	if (dd->xrun) {
106 		/* make sure we only playback silence during an XRUN */
107 		if (dev->direction == SOF_IPC_STREAM_PLAYBACK)
108 			/* fill buffer with silence */
109 			buffer_zero(dd->dma_buffer);
110 
111 		return;
112 	}
113 
114 	if (dev->direction == SOF_IPC_STREAM_PLAYBACK) {
115 		ret = dma_buffer_copy_to(dd->local_buffer, dd->dma_buffer,
116 					 dd->process, bytes);
117 
118 		buffer_ptr = dd->local_buffer->stream.r_ptr;
119 	} else {
120 		ret = dma_buffer_copy_from(dd->dma_buffer, dd->local_buffer,
121 					   dd->process, bytes);
122 
123 		buffer_ptr = dd->local_buffer->stream.w_ptr;
124 	}
125 
126 	/* assert dma_buffer_copy succeed */
127 	if (ret < 0) {
128 		source = dev->direction == SOF_IPC_STREAM_PLAYBACK ?
129 					dd->local_buffer : dd->dma_buffer;
130 		sink = dev->direction == SOF_IPC_STREAM_PLAYBACK ?
131 					dd->dma_buffer : dd->local_buffer;
132 		comp_err(dev, "dai_dma_cb() dma buffer copy failed, dir %d bytes %d avail %d free %d",
133 			 dev->direction, bytes,
134 			 audio_stream_get_avail_samples(&source->stream) *
135 				audio_stream_frame_bytes(&source->stream),
136 			 audio_stream_get_free_samples(&sink->stream) *
137 				audio_stream_frame_bytes(&sink->stream));
138 		return;
139 	}
140 
141 	/* update host position (in bytes offset) for drivers */
142 	dev->position += bytes;
143 	if (dd->dai_pos) {
144 		dd->dai_pos_blks += bytes;
145 		*dd->dai_pos = dd->dai_pos_blks +
146 			       (char *)buffer_ptr -
147 			       (char *)dd->dma_buffer->stream.addr;
148 	}
149 }
150 
dai_new(const struct comp_driver * drv,struct comp_ipc_config * config,void * spec)151 static struct comp_dev *dai_new(const struct comp_driver *drv,
152 				struct comp_ipc_config *config,
153 				void *spec)
154 {
155 	struct comp_dev *dev;
156 	struct ipc_config_dai *dai = spec;
157 	struct dai_data *dd;
158 	uint32_t dir, caps, dma_dev;
159 
160 	comp_cl_dbg(&comp_dai, "dai_new()");
161 
162 	dev = comp_alloc(drv, sizeof(*dev));
163 	if (!dev)
164 		return NULL;
165 	dev->ipc_config = *config;
166 
167 	dd = rzalloc(SOF_MEM_ZONE_RUNTIME_SHARED, 0, SOF_MEM_CAPS_RAM, sizeof(*dd));
168 	if (!dd) {
169 		rfree(dev);
170 		return NULL;
171 	}
172 
173 	comp_set_drvdata(dev, dd);
174 
175 	dd->dai = dai_get(dai->type, dai->dai_index, DAI_CREAT);
176 	if (!dd->dai) {
177 		comp_cl_err(&comp_dai, "dai_new(): dai_get() failed to create DAI.");
178 		goto error;
179 	}
180 	dd->ipc_config = *dai;
181 
182 	/* request GP LP DMA with shared access privilege */
183 	dir = dai->direction == SOF_IPC_STREAM_PLAYBACK ?
184 			DMA_DIR_MEM_TO_DEV : DMA_DIR_DEV_TO_MEM;
185 
186 	caps = dai_get_info(dd->dai, DAI_INFO_DMA_CAPS);
187 	dma_dev = dai_get_info(dd->dai, DAI_INFO_DMA_DEV);
188 
189 	dd->dma = dma_get(dir, caps, dma_dev, DMA_ACCESS_SHARED);
190 	if (!dd->dma) {
191 		comp_cl_err(&comp_dai, "dai_new(): dma_get() failed to get shared access to DMA.");
192 		goto error;
193 	}
194 
195 	dma_sg_init(&dd->config.elem_array);
196 	dd->dai_pos = NULL;
197 	dd->dai_pos_blks = 0;
198 	dd->xrun = 0;
199 	dd->chan = NULL;
200 
201 	dev->state = COMP_STATE_READY;
202 	return dev;
203 
204 error:
205 	rfree(dd);
206 	rfree(dev);
207 	return NULL;
208 }
209 
dai_free(struct comp_dev * dev)210 static void dai_free(struct comp_dev *dev)
211 {
212 	struct dai_data *dd = comp_get_drvdata(dev);
213 
214 	if (dd->group) {
215 		notifier_unregister(dev, dd->group, NOTIFIER_ID_DAI_TRIGGER);
216 		dai_group_put(dd->group);
217 	}
218 
219 	if (dd->chan) {
220 		notifier_unregister(dev, dd->chan, NOTIFIER_ID_DMA_COPY);
221 		dma_channel_put(dd->chan);
222 	}
223 
224 	dma_put(dd->dma);
225 
226 	dai_put(dd->dai);
227 
228 	if (dd->dai_spec_config)
229 		rfree(dd->dai_spec_config);
230 
231 	rfree(dd);
232 	rfree(dev);
233 }
234 
dai_comp_get_hw_params(struct comp_dev * dev,struct sof_ipc_stream_params * params,int dir)235 static int dai_comp_get_hw_params(struct comp_dev *dev,
236 				  struct sof_ipc_stream_params *params,
237 				  int dir)
238 {
239 	struct dai_data *dd = comp_get_drvdata(dev);
240 	int ret = 0;
241 
242 	comp_dbg(dev, "dai_hw_params()");
243 
244 	/* fetching hw dai stream params */
245 	ret = dai_get_hw_params(dd->dai, params, dir);
246 	if (ret < 0) {
247 		comp_err(dev, "dai_comp_get_hw_params(): dai_get_hw_params failed ret %d",
248 			 ret);
249 		return ret;
250 	}
251 
252 	/* dai_comp_get_hw_params() function fetches hardware dai parameters,
253 	 * which then are propagating back through the pipeline, so that any
254 	 * component can convert specific stream parameter. Here, we overwrite
255 	 * frame_fmt hardware parameter as DAI component is able to convert
256 	 * stream with different frame_fmt's (using pcm converter)
257 	 */
258 	params->frame_fmt = dev->ipc_config.frame_fmt;
259 
260 	return 0;
261 }
262 
dai_comp_hw_params(struct comp_dev * dev,struct sof_ipc_stream_params * params)263 static int dai_comp_hw_params(struct comp_dev *dev,
264 			      struct sof_ipc_stream_params *params)
265 {
266 	struct dai_data *dd = comp_get_drvdata(dev);
267 	int ret;
268 
269 	comp_dbg(dev, "dai_comp_hw_params()");
270 
271 	/* configure hw dai stream params */
272 	ret = dai_hw_params(dd->dai, params);
273 	if (ret < 0) {
274 		comp_err(dev, "dai_comp_hw_params(): dai_hw_params failed ret %d",
275 			 ret);
276 		return ret;
277 	}
278 
279 	return 0;
280 }
281 
dai_verify_params(struct comp_dev * dev,struct sof_ipc_stream_params * params)282 static int dai_verify_params(struct comp_dev *dev,
283 			     struct sof_ipc_stream_params *params)
284 {
285 	struct sof_ipc_stream_params hw_params;
286 
287 	dai_comp_get_hw_params(dev, &hw_params, params->direction);
288 
289 	/* checks whether pcm parameters match hardware DAI parameter set
290 	 * during dai_set_config(). If hardware parameter is equal to 0, it
291 	 * means that it can vary, so any value is acceptable. We do not check
292 	 * format parameter, because DAI is able to change format using
293 	 * pcm_converter functions.
294 	 */
295 	if (hw_params.rate && hw_params.rate != params->rate) {
296 		comp_err(dev, "dai_verify_params(): pcm rate parameter %d does not match hardware rate %d",
297 			 params->rate, hw_params.rate);
298 		return -EINVAL;
299 	}
300 
301 	if (hw_params.channels && hw_params.channels != params->channels) {
302 		comp_err(dev, "dai_verify_params(): pcm channels parameter %d does not match hardware channels %d",
303 			 params->channels, hw_params.channels);
304 		return -EINVAL;
305 	}
306 
307 	/* set component period frames */
308 	component_set_period_frames(dev, params->rate);
309 
310 	return 0;
311 }
312 
313 /* set component audio SSP and DMA configuration */
dai_playback_params(struct comp_dev * dev,uint32_t period_bytes,uint32_t period_count)314 static int dai_playback_params(struct comp_dev *dev, uint32_t period_bytes,
315 			       uint32_t period_count)
316 {
317 	struct dai_data *dd = comp_get_drvdata(dev);
318 	struct dma_sg_config *config = &dd->config;
319 	uint32_t local_fmt = dd->local_buffer->stream.frame_fmt;
320 	uint32_t dma_fmt = dd->dma_buffer->stream.frame_fmt;
321 	uint32_t fifo;
322 	int err;
323 
324 	/* set processing function */
325 	dd->process = pcm_get_conversion_function(local_fmt, dma_fmt);
326 
327 	if (!dd->process) {
328 		comp_err(dev, "dai_playback_params(): converter function NULL: local fmt %d dma fmt %d\n",
329 			 local_fmt, dma_fmt);
330 		return -EINVAL;
331 	}
332 
333 	/* set up DMA configuration */
334 	config->direction = DMA_DIR_MEM_TO_DEV;
335 	config->src_width = get_sample_bytes(dma_fmt);
336 	config->dest_width = config->src_width;
337 	config->cyclic = 1;
338 	config->irq_disabled = pipeline_is_timer_driven(dev->pipeline);
339 	config->dest_dev = dai_get_handshake(dd->dai, dev->direction,
340 					     dd->stream_id);
341 	config->is_scheduling_source = comp_is_scheduling_source(dev);
342 	config->period = dev->pipeline->period;
343 
344 	comp_info(dev, "dai_playback_params() dest_dev = %d stream_id = %d src_width = %d dest_width = %d",
345 		  config->dest_dev, dd->stream_id,
346 		  config->src_width, config->dest_width);
347 
348 	if (!config->elem_array.elems) {
349 		fifo = dai_get_fifo(dd->dai, dev->direction,
350 				    dd->stream_id);
351 
352 		comp_info(dev, "dai_playback_params() fifo 0x%x", fifo);
353 
354 		err = dma_sg_alloc(&config->elem_array, SOF_MEM_ZONE_RUNTIME,
355 				   config->direction,
356 				   period_count,
357 				   period_bytes,
358 				   (uintptr_t)(dd->dma_buffer->stream.addr),
359 				   fifo);
360 		if (err < 0) {
361 			comp_err(dev, "dai_playback_params(): dma_sg_alloc() for period_count %d period_bytes %d failed with err = %d",
362 				 period_count, period_bytes, err);
363 			return err;
364 		}
365 	}
366 
367 	return 0;
368 }
369 
dai_capture_params(struct comp_dev * dev,uint32_t period_bytes,uint32_t period_count)370 static int dai_capture_params(struct comp_dev *dev, uint32_t period_bytes,
371 			      uint32_t period_count)
372 {
373 	struct dai_data *dd = comp_get_drvdata(dev);
374 	struct dma_sg_config *config = &dd->config;
375 	uint32_t local_fmt = dd->local_buffer->stream.frame_fmt;
376 	uint32_t dma_fmt = dd->dma_buffer->stream.frame_fmt;
377 	uint32_t fifo;
378 	int err;
379 
380 	/* set processing function */
381 	dd->process = pcm_get_conversion_function(dma_fmt, local_fmt);
382 
383 	if (!dd->process) {
384 		comp_err(dev, "dai_capture_params(): converter function NULL: local fmt %d dma fmt %d\n",
385 			 local_fmt, dma_fmt);
386 		return -EINVAL;
387 	}
388 
389 	/* set up DMA configuration */
390 	config->direction = DMA_DIR_DEV_TO_MEM;
391 	config->cyclic = 1;
392 	config->irq_disabled = pipeline_is_timer_driven(dev->pipeline);
393 	config->src_dev = dai_get_handshake(dd->dai, dev->direction,
394 					    dd->stream_id);
395 	config->is_scheduling_source = comp_is_scheduling_source(dev);
396 	config->period = dev->pipeline->period;
397 
398 	/* TODO: Make this code platform-specific or move it driver callback */
399 	if (dai_get_info(dd->dai, DAI_INFO_TYPE) == SOF_DAI_INTEL_DMIC) {
400 		/* For DMIC the DMA src and dest widths should always be 4 bytes
401 		 * due to 32 bit FIFO packer. Setting width to 2 bytes for
402 		 * 16 bit format would result in recording at double rate.
403 		 */
404 		config->src_width = 4;
405 		config->dest_width = 4;
406 	} else {
407 		config->src_width = get_sample_bytes(dma_fmt);
408 		config->dest_width = config->src_width;
409 	}
410 
411 	comp_info(dev, "dai_capture_params() src_dev = %d stream_id = %d src_width = %d dest_width = %d",
412 		  config->src_dev, dd->stream_id,
413 		  config->src_width, config->dest_width);
414 
415 	if (!config->elem_array.elems) {
416 		fifo = dai_get_fifo(dd->dai, dev->direction,
417 				    dd->stream_id);
418 
419 		comp_info(dev, "dai_capture_params() fifo 0x%x", fifo);
420 
421 		err = dma_sg_alloc(&config->elem_array, SOF_MEM_ZONE_RUNTIME,
422 				   config->direction,
423 				   period_count,
424 				   period_bytes,
425 				   (uintptr_t)(dd->dma_buffer->stream.addr),
426 				   fifo);
427 		if (err < 0) {
428 			comp_err(dev, "dai_capture_params(): dma_sg_alloc() for period_count %d period_bytes %d failed with err = %d",
429 				 period_count, period_bytes, err);
430 			return err;
431 		}
432 	}
433 
434 	return 0;
435 }
436 
dai_params(struct comp_dev * dev,struct sof_ipc_stream_params * params)437 static int dai_params(struct comp_dev *dev,
438 		      struct sof_ipc_stream_params *params)
439 {
440 	struct sof_ipc_stream_params hw_params = *params;
441 	struct dai_data *dd = comp_get_drvdata(dev);
442 	uint32_t frame_size;
443 	uint32_t period_count;
444 	uint32_t period_bytes;
445 	uint32_t buffer_size;
446 	uint32_t addr_align;
447 	uint32_t align;
448 	int err;
449 
450 	comp_dbg(dev, "dai_params()");
451 
452 	/* configure dai_data first */
453 	err = ipc_dai_data_config(dev);
454 	if (err < 0)
455 		return err;
456 
457 	err = dai_verify_params(dev, params);
458 	if (err < 0) {
459 		comp_err(dev, "dai_params(): pcm params verification failed.");
460 		return -EINVAL;
461 	}
462 
463 	/* params verification passed, so now configure hw dai stream params */
464 	err = dai_comp_hw_params(dev, params);
465 	if (err < 0) {
466 		comp_err(dev, "dai_params(): dai_comp_hw_params failed err %d", err);
467 		return err;
468 	}
469 
470 	if (dev->direction == SOF_IPC_STREAM_PLAYBACK)
471 		dd->local_buffer = list_first_item(&dev->bsource_list,
472 						   struct comp_buffer,
473 						   sink_list);
474 	else
475 		dd->local_buffer = list_first_item(&dev->bsink_list,
476 						   struct comp_buffer,
477 						   source_list);
478 
479 	/* check if already configured */
480 	if (dev->state == COMP_STATE_PREPARE) {
481 		comp_info(dev, "dai_params() component has been already configured.");
482 		return 0;
483 	}
484 
485 	/* can set params on only init state */
486 	if (dev->state != COMP_STATE_READY) {
487 		comp_err(dev, "dai_params(): Component is in state %d, expected COMP_STATE_READY.",
488 			 dev->state);
489 		return -EINVAL;
490 	}
491 
492 	err = dma_get_attribute(dd->dma, DMA_ATTR_BUFFER_ADDRESS_ALIGNMENT,
493 				&addr_align);
494 	if (err < 0) {
495 		comp_err(dev, "dai_params(): could not get dma buffer address alignment, err = %d",
496 			 err);
497 		return err;
498 	}
499 
500 	err = dma_get_attribute(dd->dma, DMA_ATTR_BUFFER_ALIGNMENT, &align);
501 	if (err < 0 || !align) {
502 		comp_err(dev, "dai_params(): could not get valid dma buffer alignment, err = %d, align = %u",
503 			 err, align);
504 		return -EINVAL;
505 	}
506 
507 	err = dma_get_attribute(dd->dma, DMA_ATTR_BUFFER_PERIOD_COUNT,
508 				&period_count);
509 	if (err < 0 || !period_count) {
510 		comp_err(dev, "dai_params(): could not get valid dma buffer period count, err = %d, period_count = %u",
511 			 err, period_count);
512 		return -EINVAL;
513 	}
514 
515 	/* calculate frame size */
516 	frame_size = get_frame_bytes(dev->ipc_config.frame_fmt,
517 				     dd->local_buffer->stream.channels);
518 
519 	/* calculate period size */
520 	period_bytes = dev->frames * frame_size;
521 	if (!period_bytes) {
522 		comp_err(dev, "dai_params(): invalid period_bytes.");
523 		return -EINVAL;
524 	}
525 
526 	dd->period_bytes = period_bytes;
527 
528 	/* calculate DMA buffer size */
529 	buffer_size = ALIGN_UP(period_count * period_bytes, align);
530 
531 	/* alloc DMA buffer or change its size if exists */
532 	if (dd->dma_buffer) {
533 		err = buffer_set_size(dd->dma_buffer, buffer_size);
534 		if (err < 0) {
535 			comp_err(dev, "dai_params(): buffer_set_size() failed, buffer_size = %u",
536 				 buffer_size);
537 			return err;
538 		}
539 	} else {
540 		dd->dma_buffer = buffer_alloc(buffer_size, SOF_MEM_CAPS_DMA,
541 					      addr_align);
542 		if (!dd->dma_buffer) {
543 			comp_err(dev, "dai_params(): failed to alloc dma buffer");
544 			return -ENOMEM;
545 		}
546 
547 		/*
548 		 * dma_buffer should reffer to hardware dai parameters.
549 		 * Here, we overwrite frame_fmt hardware parameter as DAI
550 		 * component is able to convert stream with different
551 		 * frame_fmt's (using pcm converter).
552 		 */
553 		hw_params.frame_fmt = dev->ipc_config.frame_fmt;
554 		buffer_set_params(dd->dma_buffer, &hw_params,
555 				  BUFFER_UPDATE_FORCE);
556 	}
557 
558 	return dev->direction == SOF_IPC_STREAM_PLAYBACK ?
559 		dai_playback_params(dev, period_bytes, period_count) :
560 		dai_capture_params(dev, period_bytes, period_count);
561 }
562 
dai_config_prepare(struct comp_dev * dev)563 static int dai_config_prepare(struct comp_dev *dev)
564 {
565 	struct dai_data *dd = comp_get_drvdata(dev);
566 	int channel = 0;
567 
568 	/* cannot configure DAI while active */
569 	if (dev->state == COMP_STATE_ACTIVE) {
570 		comp_info(dev, "dai_config_prepare(): Component is in active state.");
571 		return 0;
572 	}
573 
574 	if (!dd->dai_spec_config) {
575 		comp_err(dev, "dai specific config is not set yet!");
576 		return -EINVAL;
577 	}
578 
579 	if (dd->chan) {
580 		comp_info(dev, "dai_config_prepare(): dma channel index %d already configured",
581 			  dd->chan->index);
582 		return 0;
583 	}
584 
585 	channel = dai_config_dma_channel(dev, dd->dai_spec_config);
586 	comp_info(dev, "dai_config_prepare(), channel = %d", channel);
587 
588 	/* do nothing for asking for channel free, for compatibility. */
589 	if (channel == DMA_CHAN_INVALID) {
590 		comp_err(dev, "dai_config is not set yet!");
591 		return -EINVAL;
592 	}
593 
594 	/* allocate DMA channel */
595 	dd->chan = dma_channel_get(dd->dma, channel);
596 	if (!dd->chan) {
597 		comp_err(dev, "dai_config(): dma_channel_get() failed");
598 		dd->chan = NULL;
599 		return -EIO;
600 	}
601 
602 	comp_info(dev, "dai_config(): new configured dma channel index %d",
603 		  dd->chan->index);
604 
605 	/* setup callback */
606 	notifier_register(dev, dd->chan, NOTIFIER_ID_DMA_COPY,
607 			  dai_dma_cb, 0);
608 
609 	return 0;
610 }
611 
dai_config_reset(struct comp_dev * dev)612 static void dai_config_reset(struct comp_dev *dev)
613 {
614 	struct dai_data *dd = comp_get_drvdata(dev);
615 
616 	/* cannot configure DAI while active */
617 	if (dev->state == COMP_STATE_ACTIVE) {
618 		comp_info(dev, "dai_config(): Component is in active state. Ignore resetting");
619 		return;
620 	}
621 
622 	/* put the allocated DMA channel first */
623 	if (dd->chan) {
624 		dma_channel_put(dd->chan);
625 		dd->chan = NULL;
626 
627 		/* remove callback */
628 		notifier_unregister(dev, dd->chan,
629 				    NOTIFIER_ID_DMA_COPY);
630 	}
631 }
632 
dai_prepare(struct comp_dev * dev)633 static int dai_prepare(struct comp_dev *dev)
634 {
635 	struct dai_data *dd = comp_get_drvdata(dev);
636 	int ret = 0;
637 
638 	comp_info(dev, "dai_prepare()");
639 
640 	ret = dai_config_prepare(dev);
641 	if (ret < 0)
642 		return ret;
643 
644 	ret = comp_set_state(dev, COMP_TRIGGER_PREPARE);
645 	if (ret < 0)
646 		return ret;
647 
648 	if (ret == COMP_STATUS_STATE_ALREADY_SET)
649 		return PPL_STATUS_PATH_STOP;
650 
651 	dev->position = 0;
652 
653 	if (!dd->chan) {
654 		comp_err(dev, "dai_prepare(): Missing dd->chan.");
655 		comp_set_state(dev, COMP_TRIGGER_RESET);
656 		return -EINVAL;
657 	}
658 
659 	if (!dd->config.elem_array.elems) {
660 		comp_err(dev, "dai_prepare(): Missing dd->config.elem_array.elems.");
661 		comp_set_state(dev, COMP_TRIGGER_RESET);
662 		return -EINVAL;
663 	}
664 
665 	/* clear dma buffer to avoid pop noise */
666 	buffer_zero(dd->dma_buffer);
667 
668 	/* dma reconfig not required if XRUN handling */
669 	if (dd->xrun) {
670 		/* after prepare, we have recovered from xrun */
671 		dd->xrun = 0;
672 		return ret;
673 	}
674 
675 	ret = dma_set_config(dd->chan, &dd->config);
676 	if (ret < 0)
677 		comp_set_state(dev, COMP_TRIGGER_RESET);
678 
679 	return ret;
680 }
681 
dai_reset(struct comp_dev * dev)682 static int dai_reset(struct comp_dev *dev)
683 {
684 	struct dai_data *dd = comp_get_drvdata(dev);
685 	struct dma_sg_config *config = &dd->config;
686 
687 	comp_info(dev, "dai_reset()");
688 
689 	dai_config_reset(dev);
690 
691 	dma_sg_free(&config->elem_array);
692 
693 	if (dd->dma_buffer) {
694 		buffer_free(dd->dma_buffer);
695 		dd->dma_buffer = NULL;
696 	}
697 
698 	dd->dai_pos_blks = 0;
699 	if (dd->dai_pos)
700 		*dd->dai_pos = 0;
701 	dd->dai_pos = NULL;
702 	dd->wallclock = 0;
703 	dev->position = 0;
704 	dd->xrun = 0;
705 	comp_set_state(dev, COMP_TRIGGER_RESET);
706 
707 	return 0;
708 }
709 
dai_update_start_position(struct comp_dev * dev)710 static void dai_update_start_position(struct comp_dev *dev)
711 {
712 	struct dai_data *dd = comp_get_drvdata(dev);
713 
714 	/* update starting wallclock */
715 	platform_dai_wallclock(dev, &dd->wallclock);
716 
717 	/* update start position */
718 	dd->start_position = dev->position;
719 }
720 
721 /* used to pass standard and bespoke command (with data) to component */
dai_comp_trigger_internal(struct comp_dev * dev,int cmd)722 static int dai_comp_trigger_internal(struct comp_dev *dev, int cmd)
723 {
724 	struct dai_data *dd = comp_get_drvdata(dev);
725 	int ret;
726 
727 	comp_dbg(dev, "dai_comp_trigger_internal(), command = %u", cmd);
728 
729 	ret = comp_set_state(dev, cmd);
730 	if (ret < 0)
731 		return ret;
732 
733 	if (ret == COMP_STATUS_STATE_ALREADY_SET)
734 		return PPL_STATUS_PATH_STOP;
735 
736 	switch (cmd) {
737 	case COMP_TRIGGER_START:
738 		comp_dbg(dev, "dai_comp_trigger_internal(), START");
739 
740 		/* only start the DAI if we are not XRUN handling */
741 		if (dd->xrun == 0) {
742 			ret = dma_start(dd->chan);
743 			if (ret < 0)
744 				return ret;
745 			/* start the DAI */
746 			dai_trigger(dd->dai, cmd, dev->direction);
747 		} else {
748 			dd->xrun = 0;
749 		}
750 
751 		dai_update_start_position(dev);
752 		break;
753 	case COMP_TRIGGER_RELEASE:
754 		/* before release, we clear the buffer data to 0s,
755 		 * then there is no history data sent out after release.
756 		 * this is only supported at capture mode.
757 		 */
758 		if (dev->direction == SOF_IPC_STREAM_CAPTURE)
759 			buffer_zero(dd->dma_buffer);
760 
761 		/* only start the DAI if we are not XRUN handling */
762 		if (dd->xrun == 0) {
763 			/* recover valid start position */
764 			ret = dma_release(dd->chan);
765 			if (ret < 0)
766 				return ret;
767 
768 			/* start the DAI */
769 			dai_trigger(dd->dai, cmd, dev->direction);
770 			ret = dma_start(dd->chan);
771 			if (ret < 0)
772 				return ret;
773 		} else {
774 			dd->xrun = 0;
775 		}
776 
777 		dai_update_start_position(dev);
778 		break;
779 	case COMP_TRIGGER_XRUN:
780 		comp_info(dev, "dai_comp_trigger_internal(), XRUN");
781 		dd->xrun = 1;
782 
783 		COMPILER_FALLTHROUGH;
784 	case COMP_TRIGGER_STOP:
785 		comp_dbg(dev, "dai_comp_trigger_internal(), STOP");
786 /*
787  * Some platforms cannot just simple disable
788  * DMA channel during the transfer,
789  * because it will hang the whole DMA controller.
790  * Therefore, stop the DMA first and let the DAI
791  * drain the FIFO in order to stop the channel
792  * as soon as possible.
793  */
794 #if CONFIG_DMA_SUSPEND_DRAIN
795 		ret = dma_stop(dd->chan);
796 		dai_trigger(dd->dai, cmd, dev->direction);
797 #else
798 		dai_trigger(dd->dai, cmd, dev->direction);
799 		ret = dma_stop(dd->chan);
800 #endif
801 		break;
802 	case COMP_TRIGGER_PAUSE:
803 		comp_dbg(dev, "dai_comp_trigger_internal(), PAUSE");
804 		ret = dma_pause(dd->chan);
805 		dai_trigger(dd->dai, cmd, dev->direction);
806 		break;
807 	default:
808 		break;
809 	}
810 
811 	return ret;
812 }
813 
dai_comp_trigger(struct comp_dev * dev,int cmd)814 static int dai_comp_trigger(struct comp_dev *dev, int cmd)
815 {
816 	struct dai_data *dd = comp_get_drvdata(dev);
817 	struct dai_group *group = dd->group;
818 	uint32_t irq_flags;
819 	int ret = 0;
820 
821 	/* DAI not in a group, use normal trigger */
822 	if (!group) {
823 		comp_dbg(dev, "dai_comp_trigger(), non-atomic trigger");
824 		return dai_comp_trigger_internal(dev, cmd);
825 	}
826 
827 	/* DAI is grouped, so only trigger when the entire group is ready */
828 
829 	if (!group->trigger_counter) {
830 		/* First DAI to receive the trigger command,
831 		 * prepare for atomic trigger
832 		 */
833 		comp_dbg(dev, "dai_comp_trigger(), begin atomic trigger for group %d",
834 			 group->group_id);
835 		group->trigger_cmd = cmd;
836 		group->trigger_counter = group->num_dais - 1;
837 	} else if (group->trigger_cmd != cmd) {
838 		/* Already processing a different trigger command */
839 		comp_err(dev, "dai_comp_trigger(), already processing atomic trigger");
840 		ret = -EAGAIN;
841 	} else {
842 		/* Count down the number of remaining DAIs required
843 		 * to receive the trigger command before atomic trigger
844 		 * takes place
845 		 */
846 		group->trigger_counter--;
847 		comp_dbg(dev, "dai_comp_trigger(), trigger counter %d, group %d",
848 			 group->trigger_counter, group->group_id);
849 
850 		if (!group->trigger_counter) {
851 			/* The counter has reached 0, which means
852 			 * all DAIs have received the same trigger command
853 			 * and we may begin the actual trigger process
854 			 * synchronously.
855 			 */
856 
857 			irq_local_disable(irq_flags);
858 			notifier_event(group, NOTIFIER_ID_DAI_TRIGGER,
859 				       BIT(cpu_get_id()), NULL, 0);
860 			irq_local_enable(irq_flags);
861 
862 			/* return error of last trigger */
863 			ret = group->trigger_ret;
864 		}
865 	}
866 
867 	return ret;
868 }
869 
dai_atomic_trigger(void * arg,enum notify_id type,void * data)870 static void dai_atomic_trigger(void *arg, enum notify_id type, void *data)
871 {
872 	struct comp_dev *dev = arg;
873 	struct dai_data *dd = comp_get_drvdata(dev);
874 	struct dai_group *group = dd->group;
875 
876 	/* Atomic context set by the last DAI to receive trigger command */
877 	group->trigger_ret = dai_comp_trigger_internal(dev, group->trigger_cmd);
878 }
879 
880 /* report xrun occurrence */
dai_report_xrun(struct comp_dev * dev,uint32_t bytes)881 static void dai_report_xrun(struct comp_dev *dev, uint32_t bytes)
882 {
883 	struct dai_data *dd = comp_get_drvdata(dev);
884 
885 	if (dev->direction == SOF_IPC_STREAM_PLAYBACK) {
886 		comp_err(dev, "dai_report_xrun(): underrun due to no data available");
887 		comp_underrun(dev, dd->local_buffer, bytes);
888 	} else {
889 		comp_err(dev, "dai_report_xrun(): overrun due to no space available");
890 		comp_overrun(dev, dd->local_buffer, bytes);
891 	}
892 }
893 
894 /* copy and process stream data from source to sink buffers */
dai_copy(struct comp_dev * dev)895 static int dai_copy(struct comp_dev *dev)
896 {
897 	struct dai_data *dd = comp_get_drvdata(dev);
898 	uint32_t dma_fmt = dd->dma_buffer->stream.frame_fmt;
899 	const uint32_t sampling = get_sample_bytes(dma_fmt);
900 	struct comp_buffer *buf = dd->local_buffer;
901 	uint32_t avail_bytes = 0;
902 	uint32_t free_bytes = 0;
903 	uint32_t copy_bytes = 0;
904 	uint32_t src_samples;
905 	uint32_t sink_samples;
906 	uint32_t samples;
907 	int ret = 0;
908 	uint32_t flags = 0;
909 
910 	comp_dbg(dev, "dai_copy()");
911 
912 	/* get data sizes from DMA */
913 	ret = dma_get_data_size(dd->chan, &avail_bytes, &free_bytes);
914 	if (ret < 0) {
915 		dai_report_xrun(dev, 0);
916 		return ret;
917 	}
918 
919 	buffer_lock(buf, &flags);
920 
921 	/* calculate minimum size to copy */
922 	if (dev->direction == SOF_IPC_STREAM_PLAYBACK) {
923 		src_samples = audio_stream_get_avail_samples(&buf->stream);
924 		sink_samples = free_bytes / sampling;
925 		samples = MIN(src_samples, sink_samples);
926 	} else {
927 		src_samples = avail_bytes / sampling;
928 		sink_samples = audio_stream_get_free_samples(&buf->stream);
929 		samples = MIN(src_samples, sink_samples);
930 	}
931 
932 	/* limit bytes per copy to one period for the whole pipeline
933 	 * in order to avoid high load spike
934 	 */
935 	samples = MIN(samples, dd->period_bytes / sampling);
936 
937 	copy_bytes = samples * sampling;
938 
939 	buffer_unlock(buf, flags);
940 
941 	comp_dbg(dev, "dai_copy(), dir: %d copy_bytes= 0x%x, frames= %d",
942 		 dev->direction, copy_bytes,
943 		 samples / buf->stream.channels);
944 
945 	/* Check possibility of glitch occurrence */
946 	if (dev->direction == SOF_IPC_STREAM_PLAYBACK &&
947 	    copy_bytes + avail_bytes < dd->period_bytes)
948 		comp_warn(dev, "dai_copy(): Copy_bytes %d + avail bytes %d < period bytes %d, possible glitch",
949 			  copy_bytes, avail_bytes, dd->period_bytes);
950 	else if (dev->direction == SOF_IPC_STREAM_CAPTURE &&
951 		 copy_bytes + free_bytes < dd->period_bytes)
952 		comp_warn(dev, "dai_copy(): Copy_bytes %d + free bytes %d < period bytes %d, possible glitch",
953 			  copy_bytes, free_bytes, dd->period_bytes);
954 
955 	/* return if nothing to copy */
956 	if (!copy_bytes) {
957 		comp_warn(dev, "dai_copy(): nothing to copy");
958 		return 0;
959 	}
960 
961 	ret = dma_copy(dd->chan, copy_bytes, 0);
962 	if (ret < 0) {
963 		dai_report_xrun(dev, copy_bytes);
964 		return ret;
965 	}
966 
967 	return ret;
968 }
969 
dai_position(struct comp_dev * dev,struct sof_ipc_stream_posn * posn)970 static int dai_position(struct comp_dev *dev, struct sof_ipc_stream_posn *posn)
971 {
972 	struct dai_data *dd = comp_get_drvdata(dev);
973 
974 	/* TODO: improve accuracy by adding current DMA position */
975 	posn->dai_posn = dev->position;
976 
977 	/* set stream start wallclock */
978 	posn->wallclock = dd->wallclock;
979 
980 	return 0;
981 }
982 
983 /**
984  * \brief Get DAI parameters and configure timestamping
985  * \param[in, out] dev DAI device.
986  * \return Error code.
987  *
988  * This function retrieves various DAI parameters such as type, direction, index, and DMA
989  * controller information those are needed when configuring HW timestamping. Note that
990  * DAI must be prepared before this function is used (for DMA information). If not, an error
991  * is returned.
992  */
dai_ts_config(struct comp_dev * dev)993 static int dai_ts_config(struct comp_dev *dev)
994 {
995 	struct dai_data *dd = comp_get_drvdata(dev);
996 	struct timestamp_cfg *cfg = &dd->ts_config;
997 	struct ipc_config_dai *dai = &dd->ipc_config;
998 
999 	comp_dbg(dev, "dai_ts_config()");
1000 	if (!dd->chan) {
1001 		comp_err(dev, "dai_ts_config(), No DMA channel information");
1002 		return -EINVAL;
1003 	}
1004 
1005 	cfg->type = dd->dai->drv->type;
1006 	cfg->direction = dai->direction;
1007 	cfg->index = dd->dai->index;
1008 	cfg->dma_id = dd->dma->plat_data.id;
1009 	cfg->dma_chan_index = dd->chan->index;
1010 	cfg->dma_chan_count = dd->dma->plat_data.channels;
1011 	if (!dd->dai->drv->ts_ops.ts_config)
1012 		return -ENXIO;
1013 
1014 	return dd->dai->drv->ts_ops.ts_config(dd->dai, cfg);
1015 }
1016 
dai_ts_start(struct comp_dev * dev)1017 static int dai_ts_start(struct comp_dev *dev)
1018 {
1019 	struct dai_data *dd = comp_get_drvdata(dev);
1020 
1021 	comp_dbg(dev, "dai_ts_start()");
1022 	if (!dd->dai->drv->ts_ops.ts_start)
1023 		return -ENXIO;
1024 
1025 	return dd->dai->drv->ts_ops.ts_start(dd->dai, &dd->ts_config);
1026 }
1027 
dai_ts_stop(struct comp_dev * dev)1028 static int dai_ts_stop(struct comp_dev *dev)
1029 {
1030 	struct dai_data *dd = comp_get_drvdata(dev);
1031 
1032 	comp_dbg(dev, "dai_ts_stop()");
1033 	if (!dd->dai->drv->ts_ops.ts_stop)
1034 		return -ENXIO;
1035 
1036 	return dd->dai->drv->ts_ops.ts_stop(dd->dai, &dd->ts_config);
1037 }
1038 
dai_ts_get(struct comp_dev * dev,struct timestamp_data * tsd)1039 static int dai_ts_get(struct comp_dev *dev, struct timestamp_data *tsd)
1040 {
1041 	struct dai_data *dd = comp_get_drvdata(dev);
1042 
1043 	comp_dbg(dev, "dai_ts_get()");
1044 	if (!dd->dai->drv->ts_ops.ts_get)
1045 		return -ENXIO;
1046 
1047 	return dd->dai->drv->ts_ops.ts_get(dd->dai, &dd->ts_config, tsd);
1048 }
1049 
1050 static const struct comp_driver comp_dai = {
1051 	.type	= SOF_COMP_DAI,
1052 	.uid	= SOF_RT_UUID(dai_comp_uuid),
1053 	.tctx	= &dai_comp_tr,
1054 	.ops	= {
1055 		.create			= dai_new,
1056 		.free			= dai_free,
1057 		.params			= dai_params,
1058 		.dai_get_hw_params	= dai_comp_get_hw_params,
1059 		.trigger		= dai_comp_trigger,
1060 		.copy			= dai_copy,
1061 		.prepare		= dai_prepare,
1062 		.reset			= dai_reset,
1063 		.dai_config		= dai_config,
1064 		.position		= dai_position,
1065 		.dai_ts_config		= dai_ts_config,
1066 		.dai_ts_start		= dai_ts_start,
1067 		.dai_ts_stop		= dai_ts_stop,
1068 		.dai_ts_get		= dai_ts_get,
1069 	},
1070 };
1071 
1072 static SHARED_DATA struct comp_driver_info comp_dai_info = {
1073 	.drv = &comp_dai,
1074 };
1075 
sys_comp_dai_init(void)1076 UT_STATIC void sys_comp_dai_init(void)
1077 {
1078 	comp_register(platform_shared_get(&comp_dai_info,
1079 					  sizeof(comp_dai_info)));
1080 }
1081 
1082 DECLARE_MODULE(sys_comp_dai_init);
1083