xref: /linux/sound/soc/sof/intel/hda-stream.c (revision a8e7ef3cec99ba2487110e01d77a8a278593b3e9)
1 // SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause)
2 //
3 // This file is provided under a dual BSD/GPLv2 license.  When using or
4 // redistributing this file, you may do so under either license.
5 //
6 // Copyright(c) 2018 Intel Corporation
7 //
8 // Authors: Liam Girdwood <liam.r.girdwood@linux.intel.com>
9 //	    Ranjani Sridharan <ranjani.sridharan@linux.intel.com>
10 //	    Rander Wang <rander.wang@intel.com>
11 //          Keyon Jie <yang.jie@linux.intel.com>
12 //
13 
14 /*
15  * Hardware interface for generic Intel audio DSP HDA IP
16  */
17 
18 #include <sound/hdaudio_ext.h>
19 #include <sound/hda_register.h>
20 #include <sound/sof.h>
21 #include <trace/events/sof_intel.h>
22 #include "../ops.h"
23 #include "../sof-audio.h"
24 #include "../ipc4-priv.h"
25 #include "hda.h"
26 
27 int sof_hda_position_quirk = SOF_HDA_POSITION_QUIRK_USE_DPIB_REGISTERS;
28 module_param_named(position_quirk, sof_hda_position_quirk, int, 0444);
29 MODULE_PARM_DESC(position_quirk, "SOF HDaudio position quirk");
30 EXPORT_SYMBOL_NS(sof_hda_position_quirk, "SND_SOC_SOF_INTEL_HDA_COMMON");
31 
32 #define HDA_LTRP_GB_VALUE_US	95
33 
34 static inline const char *hda_hstream_direction_str(struct hdac_stream *hstream)
35 {
36 	if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK)
37 		return "Playback";
38 	else
39 		return "Capture";
40 }
41 
42 static char *hda_hstream_dbg_get_stream_info_str(struct hdac_stream *hstream)
43 {
44 	struct snd_soc_pcm_runtime *rtd;
45 
46 	if (hstream->substream)
47 		rtd = snd_soc_substream_to_rtd(hstream->substream);
48 	else if (hstream->cstream)
49 		rtd = hstream->cstream->private_data;
50 	else
51 		/* Non audio DMA user, like dma-trace */
52 		return kasprintf(GFP_KERNEL, "-- (%s, stream_tag: %u)",
53 				 hda_hstream_direction_str(hstream),
54 				 hstream->stream_tag);
55 
56 	return kasprintf(GFP_KERNEL, "dai_link \"%s\" (%s, stream_tag: %u)",
57 			 rtd->dai_link->name, hda_hstream_direction_str(hstream),
58 			 hstream->stream_tag);
59 }
60 
61 /*
62  * set up one of BDL entries for a stream
63  */
64 static int hda_setup_bdle(struct snd_sof_dev *sdev,
65 			  struct snd_dma_buffer *dmab,
66 			  struct hdac_stream *hstream,
67 			  struct sof_intel_dsp_bdl **bdlp,
68 			  int offset, int size, int ioc)
69 {
70 	struct hdac_bus *bus = sof_to_bus(sdev);
71 	struct sof_intel_dsp_bdl *bdl = *bdlp;
72 
73 	while (size > 0) {
74 		dma_addr_t addr;
75 		int chunk;
76 
77 		if (hstream->frags >= HDA_DSP_MAX_BDL_ENTRIES) {
78 			dev_err(sdev->dev, "error: stream frags exceeded\n");
79 			return -EINVAL;
80 		}
81 
82 		addr = snd_sgbuf_get_addr(dmab, offset);
83 		/* program BDL addr */
84 		bdl->addr_l = cpu_to_le32(lower_32_bits(addr));
85 		bdl->addr_h = cpu_to_le32(upper_32_bits(addr));
86 		/* program BDL size */
87 		chunk = snd_sgbuf_get_chunk_size(dmab, offset, size);
88 		/* one BDLE should not cross 4K boundary */
89 		if (bus->align_bdle_4k) {
90 			u32 remain = 0x1000 - (offset & 0xfff);
91 
92 			if (chunk > remain)
93 				chunk = remain;
94 		}
95 		bdl->size = cpu_to_le32(chunk);
96 		/* only program IOC when the whole segment is processed */
97 		size -= chunk;
98 		bdl->ioc = (size || !ioc) ? 0 : cpu_to_le32(0x01);
99 		bdl++;
100 		hstream->frags++;
101 		offset += chunk;
102 	}
103 
104 	*bdlp = bdl;
105 	return offset;
106 }
107 
108 /*
109  * set up Buffer Descriptor List (BDL) for host memory transfer
110  * BDL describes the location of the individual buffers and is little endian.
111  */
112 int hda_dsp_stream_setup_bdl(struct snd_sof_dev *sdev,
113 			     struct snd_dma_buffer *dmab,
114 			     struct hdac_stream *hstream)
115 {
116 	struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
117 	struct sof_intel_dsp_bdl *bdl;
118 	int i, offset, period_bytes, periods;
119 	int remain, ioc;
120 
121 	period_bytes = hstream->period_bytes;
122 	dev_dbg(sdev->dev, "period_bytes: %#x, bufsize: %#x\n", period_bytes,
123 		hstream->bufsize);
124 
125 	if (!period_bytes) {
126 		unsigned int chunk_size;
127 
128 		chunk_size = snd_sgbuf_get_chunk_size(dmab, 0, hstream->bufsize);
129 
130 		period_bytes = hstream->bufsize;
131 
132 		/*
133 		 * HDA spec demands that the LVI value must be at least one
134 		 * before the DMA operation can begin. This means that there
135 		 * must be at least two BDLE present for the transfer.
136 		 *
137 		 * If the buffer is not a single continuous area then the
138 		 * hda_setup_bdle() will create multiple BDLEs for each segment.
139 		 * If the memory is a single continuous area, force it to be
140 		 * split into two 'periods', otherwise the transfer will be
141 		 * split to multiple BDLE for each chunk in hda_setup_bdle()
142 		 *
143 		 * Note: period_bytes == 0 can only happen for firmware or
144 		 * library loading. The data size is 4K aligned, which ensures
145 		 * that the second chunk's start address will be 128-byte
146 		 * aligned.
147 		 */
148 		if (chunk_size == hstream->bufsize)
149 			period_bytes /= 2;
150 	}
151 
152 	periods = hstream->bufsize / period_bytes;
153 
154 	dev_dbg(sdev->dev, "periods: %d\n", periods);
155 
156 	remain = hstream->bufsize % period_bytes;
157 	if (remain)
158 		periods++;
159 
160 	/* program the initial BDL entries */
161 	bdl = (struct sof_intel_dsp_bdl *)hstream->bdl.area;
162 	offset = 0;
163 	hstream->frags = 0;
164 
165 	/*
166 	 * set IOC if don't use position IPC
167 	 * and period_wakeup needed.
168 	 */
169 	ioc = hda->no_ipc_position ?
170 	      !hstream->no_period_wakeup : 0;
171 
172 	for (i = 0; i < periods; i++) {
173 		if (i == (periods - 1) && remain)
174 			/* set the last small entry */
175 			offset = hda_setup_bdle(sdev, dmab,
176 						hstream, &bdl, offset,
177 						remain, 0);
178 		else
179 			offset = hda_setup_bdle(sdev, dmab,
180 						hstream, &bdl, offset,
181 						period_bytes, ioc);
182 	}
183 
184 	return offset;
185 }
186 
187 int hda_dsp_stream_spib_config(struct snd_sof_dev *sdev,
188 			       struct hdac_ext_stream *hext_stream,
189 			       int enable, u32 size)
190 {
191 	struct hdac_stream *hstream = &hext_stream->hstream;
192 	u32 mask;
193 
194 	if (!sdev->bar[HDA_DSP_SPIB_BAR]) {
195 		dev_err(sdev->dev, "error: address of spib capability is NULL\n");
196 		return -EINVAL;
197 	}
198 
199 	mask = (1 << hstream->index);
200 
201 	/* enable/disable SPIB for the stream */
202 	snd_sof_dsp_update_bits(sdev, HDA_DSP_SPIB_BAR,
203 				SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL, mask,
204 				enable << hstream->index);
205 
206 	/* set the SPIB value */
207 	sof_io_write(sdev, hstream->spib_addr, size);
208 
209 	return 0;
210 }
211 
212 /* get next unused stream */
213 static struct hdac_ext_stream *
214 _hda_dsp_stream_get(struct snd_sof_dev *sdev, int direction, u32 flags, bool pair)
215 {
216 	const struct sof_intel_dsp_desc *chip_info =  get_chip_info(sdev->pdata);
217 	struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
218 	struct hdac_bus *bus = sof_to_bus(sdev);
219 	struct sof_intel_hda_stream *hda_stream;
220 	struct hdac_ext_stream *hext_stream = NULL;
221 	struct hdac_stream *s;
222 
223 	spin_lock_irq(&bus->reg_lock);
224 
225 	/* get an unused stream */
226 	list_for_each_entry(s, &bus->stream_list, list) {
227 		if (s->direction == direction && !s->opened) {
228 			hext_stream = stream_to_hdac_ext_stream(s);
229 			hda_stream = container_of(hext_stream,
230 						  struct sof_intel_hda_stream,
231 						  hext_stream);
232 			/* check if the host DMA channel is reserved */
233 			if (hda_stream->host_reserved)
234 				continue;
235 
236 			if (pair && hext_stream->link_locked)
237 				continue;
238 
239 			s->opened = true;
240 
241 			if (pair)
242 				hext_stream->link_locked = true;
243 
244 			break;
245 		}
246 	}
247 
248 	spin_unlock_irq(&bus->reg_lock);
249 
250 	/* stream found ? */
251 	if (!hext_stream) {
252 		dev_err(sdev->dev, "error: no free %s streams\n", snd_pcm_direction_name(direction));
253 		return hext_stream;
254 	}
255 
256 	hda_stream->flags = flags;
257 
258 	/*
259 	 * Prevent DMI Link L1 entry for streams that don't support it.
260 	 * Workaround to address a known issue with host DMA that results
261 	 * in xruns during pause/release in capture scenarios. This is not needed for the ACE IP.
262 	 */
263 	if (chip_info->hw_ip_version < SOF_INTEL_ACE_1_0 &&
264 	    !(flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) {
265 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
266 					HDA_VS_INTEL_EM2,
267 					HDA_VS_INTEL_EM2_L1SEN, 0);
268 		hda->l1_disabled = true;
269 	}
270 
271 	return hext_stream;
272 }
273 
274 struct hdac_ext_stream *
275 hda_dsp_stream_get(struct snd_sof_dev *sdev, int direction, u32 flags)
276 {
277 	return _hda_dsp_stream_get(sdev, direction, flags, false);
278 }
279 
280 struct hdac_ext_stream *
281 hda_dsp_stream_pair_get(struct snd_sof_dev *sdev, int direction, u32 flags)
282 {
283 	return _hda_dsp_stream_get(sdev, direction, flags, true);
284 }
285 
286 /* free a stream */
287 static int _hda_dsp_stream_put(struct snd_sof_dev *sdev, int direction, int stream_tag, bool pair)
288 {
289 	const struct sof_intel_dsp_desc *chip_info =  get_chip_info(sdev->pdata);
290 	struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
291 	struct hdac_bus *bus = sof_to_bus(sdev);
292 	struct sof_intel_hda_stream *hda_stream;
293 	struct hdac_ext_stream *hext_stream;
294 	struct hdac_ext_stream *link_stream;
295 	struct hdac_stream *s;
296 	bool dmi_l1_enable = true;
297 	bool found = false;
298 
299 	spin_lock_irq(&bus->reg_lock);
300 
301 	/*
302 	 * close stream matching the stream tag and check if there are any open streams
303 	 * that are DMI L1 incompatible.
304 	 */
305 	list_for_each_entry(s, &bus->stream_list, list) {
306 		hext_stream = stream_to_hdac_ext_stream(s);
307 		hda_stream = container_of(hext_stream, struct sof_intel_hda_stream, hext_stream);
308 
309 		if (!s->opened)
310 			continue;
311 
312 		if (s->direction == direction && s->stream_tag == stream_tag) {
313 			s->opened = false;
314 			found = true;
315 			if (pair)
316 				link_stream = hext_stream;
317 		} else if (!(hda_stream->flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) {
318 			dmi_l1_enable = false;
319 		}
320 	}
321 
322 	spin_unlock_irq(&bus->reg_lock);
323 
324 	/* Enable DMI L1 if permitted */
325 	if (chip_info->hw_ip_version < SOF_INTEL_ACE_1_0 && dmi_l1_enable) {
326 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, HDA_VS_INTEL_EM2,
327 					HDA_VS_INTEL_EM2_L1SEN, HDA_VS_INTEL_EM2_L1SEN);
328 		hda->l1_disabled = false;
329 	}
330 
331 	if (!found) {
332 		dev_err(sdev->dev, "%s: stream_tag %d not opened!\n",
333 			__func__, stream_tag);
334 		return -ENODEV;
335 	}
336 
337 	if (pair)
338 		snd_hdac_ext_stream_release(link_stream, HDAC_EXT_STREAM_TYPE_LINK);
339 
340 	return 0;
341 }
342 
343 int hda_dsp_stream_put(struct snd_sof_dev *sdev, int direction, int stream_tag)
344 {
345 	return _hda_dsp_stream_put(sdev, direction, stream_tag, false);
346 }
347 
348 int hda_dsp_stream_pair_put(struct snd_sof_dev *sdev, int direction, int stream_tag)
349 {
350 	return _hda_dsp_stream_put(sdev, direction, stream_tag, true);
351 }
352 
353 static int hda_dsp_stream_reset(struct snd_sof_dev *sdev, struct hdac_stream *hstream)
354 {
355 	int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
356 	int timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
357 	u32 val;
358 
359 	/* enter stream reset */
360 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, SOF_STREAM_SD_OFFSET_CRST,
361 				SOF_STREAM_SD_OFFSET_CRST);
362 	do {
363 		val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, sd_offset);
364 		if (val & SOF_STREAM_SD_OFFSET_CRST)
365 			break;
366 	} while (--timeout);
367 	if (timeout == 0) {
368 		dev_err(sdev->dev, "timeout waiting for stream reset\n");
369 		return -ETIMEDOUT;
370 	}
371 
372 	timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
373 
374 	/* exit stream reset and wait to read a zero before reading any other register */
375 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, SOF_STREAM_SD_OFFSET_CRST, 0x0);
376 
377 	/* wait for hardware to report that stream is out of reset */
378 	udelay(3);
379 	do {
380 		val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, sd_offset);
381 		if ((val & SOF_STREAM_SD_OFFSET_CRST) == 0)
382 			break;
383 	} while (--timeout);
384 	if (timeout == 0) {
385 		dev_err(sdev->dev, "timeout waiting for stream to exit reset\n");
386 		return -ETIMEDOUT;
387 	}
388 
389 	return 0;
390 }
391 
392 int hda_dsp_stream_trigger(struct snd_sof_dev *sdev,
393 			   struct hdac_ext_stream *hext_stream, int cmd)
394 {
395 	struct hdac_stream *hstream = &hext_stream->hstream;
396 	int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
397 	u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
398 	int ret = 0;
399 	u32 run;
400 
401 	/* cmd must be for audio stream */
402 	switch (cmd) {
403 	case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
404 		if (!sdev->dspless_mode_selected)
405 			break;
406 		fallthrough;
407 	case SNDRV_PCM_TRIGGER_START:
408 		if (hstream->running)
409 			break;
410 
411 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL,
412 					1 << hstream->index,
413 					1 << hstream->index);
414 
415 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
416 					sd_offset,
417 					SOF_HDA_SD_CTL_DMA_START |
418 					SOF_HDA_CL_DMA_SD_INT_MASK,
419 					SOF_HDA_SD_CTL_DMA_START |
420 					SOF_HDA_CL_DMA_SD_INT_MASK);
421 
422 		ret = snd_sof_dsp_read_poll_timeout(sdev,
423 					HDA_DSP_HDA_BAR,
424 					sd_offset, run,
425 					((run &	dma_start) == dma_start),
426 					HDA_DSP_REG_POLL_INTERVAL_US,
427 					HDA_DSP_STREAM_RUN_TIMEOUT);
428 
429 		if (ret >= 0)
430 			hstream->running = true;
431 
432 		break;
433 	case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
434 		if (!sdev->dspless_mode_selected)
435 			break;
436 		fallthrough;
437 	case SNDRV_PCM_TRIGGER_SUSPEND:
438 	case SNDRV_PCM_TRIGGER_STOP:
439 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
440 					sd_offset,
441 					SOF_HDA_SD_CTL_DMA_START |
442 					SOF_HDA_CL_DMA_SD_INT_MASK, 0x0);
443 
444 		ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
445 						sd_offset, run,
446 						!(run &	dma_start),
447 						HDA_DSP_REG_POLL_INTERVAL_US,
448 						HDA_DSP_STREAM_RUN_TIMEOUT);
449 
450 		if (ret >= 0) {
451 			snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
452 					  sd_offset + SOF_HDA_ADSP_REG_SD_STS,
453 					  SOF_HDA_CL_DMA_SD_INT_MASK);
454 
455 			hstream->running = false;
456 			snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
457 						SOF_HDA_INTCTL,
458 						1 << hstream->index, 0x0);
459 		}
460 		break;
461 	default:
462 		dev_err(sdev->dev, "error: unknown command: %d\n", cmd);
463 		return -EINVAL;
464 	}
465 
466 	if (ret < 0) {
467 		char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
468 
469 		dev_err(sdev->dev,
470 			"%s: cmd %d on %s: timeout on STREAM_SD_OFFSET read\n",
471 			__func__, cmd, stream_name ? stream_name : "unknown stream");
472 		kfree(stream_name);
473 	}
474 
475 	return ret;
476 }
477 
478 /* minimal recommended programming for ICCMAX stream */
479 int hda_dsp_iccmax_stream_hw_params(struct snd_sof_dev *sdev, struct hdac_ext_stream *hext_stream,
480 				    struct snd_dma_buffer *dmab,
481 				    struct snd_pcm_hw_params *params)
482 {
483 	struct hdac_stream *hstream;
484 	int sd_offset;
485 	int ret;
486 	u32 mask;
487 
488 	if (!hext_stream) {
489 		dev_err(sdev->dev, "error: no stream available\n");
490 		return -ENODEV;
491 	}
492 
493 	hstream = &hext_stream->hstream;
494 	sd_offset = SOF_STREAM_SD_OFFSET(hstream);
495 	mask = 0x1 << hstream->index;
496 
497 	if (!dmab) {
498 		dev_err(sdev->dev, "error: no dma buffer allocated!\n");
499 		return -ENODEV;
500 	}
501 
502 	if (hstream->posbuf)
503 		*hstream->posbuf = 0;
504 
505 	/* reset BDL address */
506 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
507 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
508 			  0x0);
509 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
510 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
511 			  0x0);
512 
513 	hstream->frags = 0;
514 
515 	ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
516 	if (ret < 0) {
517 		dev_err(sdev->dev, "error: set up of BDL failed\n");
518 		return ret;
519 	}
520 
521 	/* program BDL address */
522 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
523 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
524 			  (u32)hstream->bdl.addr);
525 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
526 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
527 			  upper_32_bits(hstream->bdl.addr));
528 
529 	/* program cyclic buffer length */
530 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
531 			  sd_offset + SOF_HDA_ADSP_REG_SD_CBL,
532 			  hstream->bufsize);
533 
534 	/* program last valid index */
535 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
536 				sd_offset + SOF_HDA_ADSP_REG_SD_LVI,
537 				0xffff, (hstream->frags - 1));
538 
539 	/* decouple host and link DMA, enable DSP features */
540 	snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
541 				mask, mask);
542 
543 	/* Follow HW recommendation to set the guardband value to 95us during FW boot */
544 	snd_sof_dsp_update8(sdev, HDA_DSP_HDA_BAR, HDA_VS_INTEL_LTRP,
545 			    HDA_VS_INTEL_LTRP_GB_MASK, HDA_LTRP_GB_VALUE_US);
546 
547 	/* start DMA */
548 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
549 				SOF_HDA_SD_CTL_DMA_START, SOF_HDA_SD_CTL_DMA_START);
550 
551 	return 0;
552 }
553 
554 /*
555  * prepare for common hdac registers settings, for both code loader
556  * and normal stream.
557  */
558 int hda_dsp_stream_hw_params(struct snd_sof_dev *sdev,
559 			     struct hdac_ext_stream *hext_stream,
560 			     struct snd_dma_buffer *dmab,
561 			     struct snd_pcm_hw_params *params)
562 {
563 	const struct sof_intel_dsp_desc *chip = get_chip_info(sdev->pdata);
564 	struct hdac_bus *bus = sof_to_bus(sdev);
565 	struct hdac_stream *hstream;
566 	int sd_offset, ret;
567 	u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
568 	u32 mask;
569 	u32 run;
570 
571 	if (!hext_stream) {
572 		dev_err(sdev->dev, "error: no stream available\n");
573 		return -ENODEV;
574 	}
575 
576 	if (!dmab) {
577 		dev_err(sdev->dev, "error: no dma buffer allocated!\n");
578 		return -ENODEV;
579 	}
580 
581 	hstream = &hext_stream->hstream;
582 	sd_offset = SOF_STREAM_SD_OFFSET(hstream);
583 	mask = BIT(hstream->index);
584 
585 	/* decouple host and link DMA if the DSP is used */
586 	if (!sdev->dspless_mode_selected)
587 		snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
588 					mask, mask);
589 
590 	/* clear stream status */
591 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
592 				SOF_HDA_CL_DMA_SD_INT_MASK |
593 				SOF_HDA_SD_CTL_DMA_START, 0);
594 
595 	ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
596 					    sd_offset, run,
597 					    !(run & dma_start),
598 					    HDA_DSP_REG_POLL_INTERVAL_US,
599 					    HDA_DSP_STREAM_RUN_TIMEOUT);
600 
601 	if (ret < 0) {
602 		char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
603 
604 		dev_err(sdev->dev,
605 			"%s: on %s: timeout on STREAM_SD_OFFSET read1\n",
606 			__func__, stream_name ? stream_name : "unknown stream");
607 		kfree(stream_name);
608 		return ret;
609 	}
610 
611 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
612 				sd_offset + SOF_HDA_ADSP_REG_SD_STS,
613 				SOF_HDA_CL_DMA_SD_INT_MASK,
614 				SOF_HDA_CL_DMA_SD_INT_MASK);
615 
616 	/* stream reset */
617 	ret = hda_dsp_stream_reset(sdev, hstream);
618 	if (ret < 0)
619 		return ret;
620 
621 	if (hstream->posbuf)
622 		*hstream->posbuf = 0;
623 
624 	/* reset BDL address */
625 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
626 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
627 			  0x0);
628 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
629 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
630 			  0x0);
631 
632 	/* clear stream status */
633 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
634 				SOF_HDA_CL_DMA_SD_INT_MASK |
635 				SOF_HDA_SD_CTL_DMA_START, 0);
636 
637 	ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
638 					    sd_offset, run,
639 					    !(run & dma_start),
640 					    HDA_DSP_REG_POLL_INTERVAL_US,
641 					    HDA_DSP_STREAM_RUN_TIMEOUT);
642 
643 	if (ret < 0) {
644 		char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
645 
646 		dev_err(sdev->dev,
647 			"%s: on %s: timeout on STREAM_SD_OFFSET read1\n",
648 			__func__, stream_name ? stream_name : "unknown stream");
649 		kfree(stream_name);
650 		return ret;
651 	}
652 
653 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
654 				sd_offset + SOF_HDA_ADSP_REG_SD_STS,
655 				SOF_HDA_CL_DMA_SD_INT_MASK,
656 				SOF_HDA_CL_DMA_SD_INT_MASK);
657 
658 	hstream->frags = 0;
659 
660 	ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
661 	if (ret < 0) {
662 		dev_err(sdev->dev, "error: set up of BDL failed\n");
663 		return ret;
664 	}
665 
666 	/* program stream tag to set up stream descriptor for DMA */
667 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
668 				SOF_HDA_CL_SD_CTL_STREAM_TAG_MASK,
669 				hstream->stream_tag <<
670 				SOF_HDA_CL_SD_CTL_STREAM_TAG_SHIFT);
671 
672 	/* program cyclic buffer length */
673 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
674 			  sd_offset + SOF_HDA_ADSP_REG_SD_CBL,
675 			  hstream->bufsize);
676 
677 	/*
678 	 * Recommended hardware programming sequence for HDAudio DMA format
679 	 * on earlier platforms - this is not needed on newer platforms
680 	 *
681 	 * 1. Put DMA into coupled mode by clearing PPCTL.PROCEN bit
682 	 *    for corresponding stream index before the time of writing
683 	 *    format to SDxFMT register.
684 	 * 2. Write SDxFMT
685 	 * 3. Set PPCTL.PROCEN bit for corresponding stream index to
686 	 *    enable decoupled mode
687 	 */
688 
689 	if (!sdev->dspless_mode_selected && (chip->quirks & SOF_INTEL_PROCEN_FMT_QUIRK))
690 		/* couple host and link DMA, disable DSP features */
691 		snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
692 					mask, 0);
693 
694 	/* program stream format */
695 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
696 				sd_offset +
697 				SOF_HDA_ADSP_REG_SD_FORMAT,
698 				0xffff, hstream->format_val);
699 
700 	if (!sdev->dspless_mode_selected && (chip->quirks & SOF_INTEL_PROCEN_FMT_QUIRK))
701 		/* decouple host and link DMA, enable DSP features */
702 		snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
703 					mask, mask);
704 
705 	/* program last valid index */
706 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
707 				sd_offset + SOF_HDA_ADSP_REG_SD_LVI,
708 				0xffff, (hstream->frags - 1));
709 
710 	/* program BDL address */
711 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
712 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
713 			  (u32)hstream->bdl.addr);
714 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
715 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
716 			  upper_32_bits(hstream->bdl.addr));
717 
718 	/* enable position buffer, if needed */
719 	if (bus->use_posbuf && bus->posbuf.addr &&
720 	    !(snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE)
721 	      & SOF_HDA_ADSP_DPLBASE_ENABLE)) {
722 		snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPUBASE,
723 				  upper_32_bits(bus->posbuf.addr));
724 		snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE,
725 				  (u32)bus->posbuf.addr |
726 				  SOF_HDA_ADSP_DPLBASE_ENABLE);
727 	}
728 
729 	/* set interrupt enable bits */
730 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
731 				SOF_HDA_CL_DMA_SD_INT_MASK,
732 				SOF_HDA_CL_DMA_SD_INT_MASK);
733 
734 	/* read FIFO size */
735 	if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK) {
736 		hstream->fifo_size =
737 			snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
738 					 sd_offset +
739 					 SOF_HDA_ADSP_REG_SD_FIFOSIZE);
740 		hstream->fifo_size &= SOF_HDA_SD_FIFOSIZE_FIFOS_MASK;
741 		hstream->fifo_size += 1;
742 	} else {
743 		hstream->fifo_size = 0;
744 	}
745 
746 	return ret;
747 }
748 
749 int hda_dsp_stream_hw_free(struct snd_sof_dev *sdev,
750 			   struct snd_pcm_substream *substream)
751 {
752 	struct hdac_stream *hstream = substream->runtime->private_data;
753 	struct hdac_ext_stream *hext_stream = container_of(hstream,
754 							 struct hdac_ext_stream,
755 							 hstream);
756 	int ret;
757 
758 	ret = hda_dsp_stream_reset(sdev, hstream);
759 	if (ret < 0)
760 		return ret;
761 
762 	if (!sdev->dspless_mode_selected) {
763 		struct hdac_bus *bus = sof_to_bus(sdev);
764 		u32 mask = BIT(hstream->index);
765 
766 		guard(spinlock_irq)(&bus->reg_lock);
767 
768 		/* couple host and link DMA if link DMA channel is idle */
769 		if (!hext_stream->link_locked)
770 			snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR,
771 						SOF_HDA_REG_PP_PPCTL, mask, 0);
772 	}
773 
774 	hda_dsp_stream_spib_config(sdev, hext_stream, HDA_DSP_SPIB_DISABLE, 0);
775 
776 	hstream->substream = NULL;
777 
778 	return 0;
779 }
780 EXPORT_SYMBOL_NS(hda_dsp_stream_hw_free, "SND_SOC_SOF_INTEL_HDA_COMMON");
781 
782 bool hda_dsp_check_stream_irq(struct snd_sof_dev *sdev)
783 {
784 	struct hdac_bus *bus = sof_to_bus(sdev);
785 	bool ret = false;
786 	u32 status;
787 
788 	/* The function can be called at irq thread, so use spin_lock_irq */
789 	guard(spinlock_irq)(&bus->reg_lock);
790 
791 	status = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTSTS);
792 
793 	trace_sof_intel_hda_dsp_check_stream_irq(sdev, status);
794 
795 	/* if Register inaccessible, ignore it.*/
796 	if (status != 0xffffffff)
797 		ret = true;
798 
799 	return ret;
800 }
801 EXPORT_SYMBOL_NS(hda_dsp_check_stream_irq, "SND_SOC_SOF_INTEL_HDA_COMMON");
802 
803 static void
804 hda_dsp_compr_bytes_transferred(struct hdac_stream *hstream, int direction)
805 {
806 	u64 buffer_size = hstream->bufsize;
807 	u64 prev_pos, pos, num_bytes;
808 
809 	div64_u64_rem(hstream->curr_pos, buffer_size, &prev_pos);
810 	pos = hda_dsp_stream_get_position(hstream, direction, false);
811 
812 	if (pos < prev_pos)
813 		num_bytes = (buffer_size - prev_pos) +  pos;
814 	else
815 		num_bytes = pos - prev_pos;
816 
817 	hstream->curr_pos += num_bytes;
818 }
819 
820 static bool hda_dsp_stream_check(struct hdac_bus *bus, u32 status)
821 {
822 	struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
823 	struct hdac_stream *s;
824 	bool active = false;
825 	u32 sd_status;
826 
827 	list_for_each_entry(s, &bus->stream_list, list) {
828 		if (status & BIT(s->index) && s->opened) {
829 			sd_status = readb(s->sd_addr + SOF_HDA_ADSP_REG_SD_STS);
830 
831 			trace_sof_intel_hda_dsp_stream_status(bus->dev, s, sd_status);
832 
833 			writeb(sd_status, s->sd_addr + SOF_HDA_ADSP_REG_SD_STS);
834 
835 			active = true;
836 			if (!s->running)
837 				continue;
838 			if ((sd_status & SOF_HDA_CL_DMA_SD_INT_COMPLETE) == 0)
839 				continue;
840 			if (!s->substream && !s->cstream) {
841 				/*
842 				 * when no substream is found, the DMA may used for code loading
843 				 * or data transfers which can rely on wait_for_completion()
844 				 */
845 				struct sof_intel_hda_stream *hda_stream;
846 				struct hdac_ext_stream *hext_stream;
847 
848 				hext_stream = stream_to_hdac_ext_stream(s);
849 				hda_stream = container_of(hext_stream, struct sof_intel_hda_stream,
850 							  hext_stream);
851 
852 				complete(&hda_stream->ioc);
853 				continue;
854 			}
855 
856 			/* Inform ALSA only if the IPC position is not used */
857 			if (s->substream && sof_hda->no_ipc_position) {
858 				snd_sof_pcm_period_elapsed(s->substream);
859 			} else if (s->cstream) {
860 				hda_dsp_compr_bytes_transferred(s, s->cstream->direction);
861 				snd_compr_fragment_elapsed(s->cstream);
862 			}
863 		}
864 	}
865 
866 	return active;
867 }
868 
869 irqreturn_t hda_dsp_stream_threaded_handler(int irq, void *context)
870 {
871 	struct snd_sof_dev *sdev = context;
872 	struct hdac_bus *bus = sof_to_bus(sdev);
873 	bool active;
874 	u32 status;
875 	int i;
876 
877 	/*
878 	 * Loop 10 times to handle missed interrupts caused by
879 	 * unsolicited responses from the codec
880 	 */
881 	for (i = 0, active = true; i < 10 && active; i++) {
882 		guard(spinlock_irq)(&bus->reg_lock);
883 
884 		status = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTSTS);
885 
886 		/* check streams */
887 		active = hda_dsp_stream_check(bus, status);
888 
889 		/* check and clear RIRB interrupt */
890 		if (status & AZX_INT_CTRL_EN) {
891 			active |= hda_codec_check_rirb_status(sdev);
892 		}
893 	}
894 
895 	return IRQ_HANDLED;
896 }
897 EXPORT_SYMBOL_NS(hda_dsp_stream_threaded_handler, "SND_SOC_SOF_INTEL_HDA_COMMON");
898 
899 int hda_dsp_stream_init(struct snd_sof_dev *sdev)
900 {
901 	struct hdac_bus *bus = sof_to_bus(sdev);
902 	struct hdac_ext_stream *hext_stream;
903 	struct hdac_stream *hstream;
904 	struct pci_dev *pci = to_pci_dev(sdev->dev);
905 	struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
906 	int sd_offset;
907 	int i, num_playback, num_capture, num_total, ret;
908 	u32 gcap;
909 
910 	gcap = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_GCAP);
911 	dev_dbg(sdev->dev, "hda global caps = 0x%x\n", gcap);
912 
913 	/* get stream count from GCAP */
914 	num_capture = (gcap >> 8) & 0x0f;
915 	num_playback = (gcap >> 12) & 0x0f;
916 	num_total = num_playback + num_capture;
917 
918 	dev_dbg(sdev->dev, "detected %d playback and %d capture streams\n",
919 		num_playback, num_capture);
920 
921 	if (num_playback >= SOF_HDA_PLAYBACK_STREAMS) {
922 		dev_err(sdev->dev, "error: too many playback streams %d\n",
923 			num_playback);
924 		return -EINVAL;
925 	}
926 
927 	if (num_capture >= SOF_HDA_CAPTURE_STREAMS) {
928 		dev_err(sdev->dev, "error: too many capture streams %d\n",
929 			num_capture);
930 		return -EINVAL;
931 	}
932 
933 	/*
934 	 * mem alloc for the position buffer
935 	 * TODO: check position buffer update
936 	 */
937 	ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
938 				  SOF_HDA_DPIB_ENTRY_SIZE * num_total,
939 				  &bus->posbuf);
940 	if (ret < 0) {
941 		dev_err(sdev->dev, "error: posbuffer dma alloc failed\n");
942 		return -ENOMEM;
943 	}
944 
945 	/*
946 	 * mem alloc for the CORB/RIRB ringbuffers - this will be used only for
947 	 * HDAudio codecs
948 	 */
949 	ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
950 				  PAGE_SIZE, &bus->rb);
951 	if (ret < 0) {
952 		dev_err(sdev->dev, "error: RB alloc failed\n");
953 		return -ENOMEM;
954 	}
955 
956 	/* create capture and playback streams */
957 	for (i = 0; i < num_total; i++) {
958 		struct sof_intel_hda_stream *hda_stream;
959 
960 		hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream),
961 					  GFP_KERNEL);
962 		if (!hda_stream)
963 			return -ENOMEM;
964 
965 		hda_stream->sdev = sdev;
966 		init_completion(&hda_stream->ioc);
967 
968 		hext_stream = &hda_stream->hext_stream;
969 
970 		if (sdev->bar[HDA_DSP_PP_BAR]) {
971 			hext_stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
972 				SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i;
973 
974 			hext_stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
975 				SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total +
976 				SOF_HDA_PPLC_INTERVAL * i;
977 		}
978 
979 		hstream = &hext_stream->hstream;
980 
981 		/* do we support SPIB */
982 		if (sdev->bar[HDA_DSP_SPIB_BAR]) {
983 			hstream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
984 				SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
985 				SOF_HDA_SPIB_SPIB;
986 
987 			hstream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
988 				SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
989 				SOF_HDA_SPIB_MAXFIFO;
990 		}
991 
992 		hstream->bus = bus;
993 		hstream->sd_int_sta_mask = 1 << i;
994 		hstream->index = i;
995 		sd_offset = SOF_STREAM_SD_OFFSET(hstream);
996 		hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset;
997 		hstream->opened = false;
998 		hstream->running = false;
999 
1000 		if (i < num_capture) {
1001 			hstream->stream_tag = i + 1;
1002 			hstream->direction = SNDRV_PCM_STREAM_CAPTURE;
1003 		} else {
1004 			hstream->stream_tag = i - num_capture + 1;
1005 			hstream->direction = SNDRV_PCM_STREAM_PLAYBACK;
1006 		}
1007 
1008 		/* mem alloc for stream BDL */
1009 		ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
1010 					  HDA_DSP_BDL_SIZE, &hstream->bdl);
1011 		if (ret < 0) {
1012 			dev_err(sdev->dev, "error: stream bdl dma alloc failed\n");
1013 			return -ENOMEM;
1014 		}
1015 
1016 		hstream->posbuf = (__le32 *)(bus->posbuf.area +
1017 			(hstream->index) * 8);
1018 
1019 		list_add_tail(&hstream->list, &bus->stream_list);
1020 	}
1021 
1022 	/* store total stream count (playback + capture) from GCAP */
1023 	sof_hda->stream_max = num_total;
1024 
1025 	/* store stream count from GCAP required for CHAIN_DMA */
1026 	if (sdev->pdata->ipc_type == SOF_IPC_TYPE_4) {
1027 		struct sof_ipc4_fw_data *ipc4_data = sdev->private;
1028 
1029 		ipc4_data->num_playback_streams = num_playback;
1030 		ipc4_data->num_capture_streams = num_capture;
1031 	}
1032 
1033 	return 0;
1034 }
1035 EXPORT_SYMBOL_NS(hda_dsp_stream_init, "SND_SOC_SOF_INTEL_HDA_COMMON");
1036 
1037 void hda_dsp_stream_free(struct snd_sof_dev *sdev)
1038 {
1039 	struct hdac_bus *bus = sof_to_bus(sdev);
1040 	struct hdac_stream *s, *_s;
1041 	struct hdac_ext_stream *hext_stream;
1042 	struct sof_intel_hda_stream *hda_stream;
1043 
1044 	/* free position buffer */
1045 	if (bus->posbuf.area)
1046 		snd_dma_free_pages(&bus->posbuf);
1047 
1048 	/* free CORB/RIRB buffer - only used for HDaudio codecs */
1049 	if (bus->rb.area)
1050 		snd_dma_free_pages(&bus->rb);
1051 
1052 	list_for_each_entry_safe(s, _s, &bus->stream_list, list) {
1053 		/* TODO: decouple */
1054 
1055 		/* free bdl buffer */
1056 		if (s->bdl.area)
1057 			snd_dma_free_pages(&s->bdl);
1058 		list_del(&s->list);
1059 		hext_stream = stream_to_hdac_ext_stream(s);
1060 		hda_stream = container_of(hext_stream, struct sof_intel_hda_stream,
1061 					  hext_stream);
1062 		devm_kfree(sdev->dev, hda_stream);
1063 	}
1064 }
1065 EXPORT_SYMBOL_NS(hda_dsp_stream_free, "SND_SOC_SOF_INTEL_HDA_COMMON");
1066 
1067 snd_pcm_uframes_t hda_dsp_stream_get_position(struct hdac_stream *hstream,
1068 					      int direction, bool can_sleep)
1069 {
1070 	struct hdac_ext_stream *hext_stream = stream_to_hdac_ext_stream(hstream);
1071 	struct sof_intel_hda_stream *hda_stream = hstream_to_sof_hda_stream(hext_stream);
1072 	struct snd_sof_dev *sdev = hda_stream->sdev;
1073 	snd_pcm_uframes_t pos;
1074 
1075 	switch (sof_hda_position_quirk) {
1076 	case SOF_HDA_POSITION_QUIRK_USE_SKYLAKE_LEGACY:
1077 		/*
1078 		 * This legacy code, inherited from the Skylake driver,
1079 		 * mixes DPIB registers and DPIB DDR updates and
1080 		 * does not seem to follow any known hardware recommendations.
1081 		 * It's not clear e.g. why there is a different flow
1082 		 * for capture and playback, the only information that matters is
1083 		 * what traffic class is used, and on all SOF-enabled platforms
1084 		 * only VC0 is supported so the work-around was likely not necessary
1085 		 * and quite possibly wrong.
1086 		 */
1087 
1088 		/* DPIB/posbuf position mode:
1089 		 * For Playback, Use DPIB register from HDA space which
1090 		 * reflects the actual data transferred.
1091 		 * For Capture, Use the position buffer for pointer, as DPIB
1092 		 * is not accurate enough, its update may be completed
1093 		 * earlier than the data written to DDR.
1094 		 */
1095 		if (direction == SNDRV_PCM_STREAM_PLAYBACK) {
1096 			pos = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1097 					       AZX_REG_VS_SDXDPIB_XBASE +
1098 					       (AZX_REG_VS_SDXDPIB_XINTERVAL *
1099 						hstream->index));
1100 		} else {
1101 			/*
1102 			 * For capture stream, we need more workaround to fix the
1103 			 * position incorrect issue:
1104 			 *
1105 			 * 1. Wait at least 20us before reading position buffer after
1106 			 * the interrupt generated(IOC), to make sure position update
1107 			 * happens on frame boundary i.e. 20.833uSec for 48KHz.
1108 			 * 2. Perform a dummy Read to DPIB register to flush DMA
1109 			 * position value.
1110 			 * 3. Read the DMA Position from posbuf. Now the readback
1111 			 * value should be >= period boundary.
1112 			 */
1113 			if (can_sleep)
1114 				usleep_range(20, 21);
1115 
1116 			snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1117 					 AZX_REG_VS_SDXDPIB_XBASE +
1118 					 (AZX_REG_VS_SDXDPIB_XINTERVAL *
1119 					  hstream->index));
1120 			pos = snd_hdac_stream_get_pos_posbuf(hstream);
1121 		}
1122 		break;
1123 	case SOF_HDA_POSITION_QUIRK_USE_DPIB_REGISTERS:
1124 		/*
1125 		 * In case VC1 traffic is disabled this is the recommended option
1126 		 */
1127 		pos = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1128 				       AZX_REG_VS_SDXDPIB_XBASE +
1129 				       (AZX_REG_VS_SDXDPIB_XINTERVAL *
1130 					hstream->index));
1131 		break;
1132 	case SOF_HDA_POSITION_QUIRK_USE_DPIB_DDR_UPDATE:
1133 		/*
1134 		 * This is the recommended option when VC1 is enabled.
1135 		 * While this isn't needed for SOF platforms it's added for
1136 		 * consistency and debug.
1137 		 */
1138 		pos = snd_hdac_stream_get_pos_posbuf(hstream);
1139 		break;
1140 	default:
1141 		dev_err_once(sdev->dev, "hda_position_quirk value %d not supported\n",
1142 			     sof_hda_position_quirk);
1143 		pos = 0;
1144 		break;
1145 	}
1146 
1147 	if (pos >= hstream->bufsize)
1148 		pos = 0;
1149 
1150 	return pos;
1151 }
1152 EXPORT_SYMBOL_NS(hda_dsp_stream_get_position, "SND_SOC_SOF_INTEL_HDA_COMMON");
1153 
1154 #define merge_u64(u32_u, u32_l) (((u64)(u32_u) << 32) | (u32_l))
1155 
1156 /**
1157  * hda_dsp_get_stream_llp - Retrieve the LLP (Linear Link Position) of the stream
1158  * @sdev: SOF device
1159  * @component: ASoC component
1160  * @substream: PCM substream
1161  *
1162  * Returns the raw Linear Link Position value
1163  */
1164 u64 hda_dsp_get_stream_llp(struct snd_sof_dev *sdev,
1165 			   struct snd_soc_component *component,
1166 			   struct snd_pcm_substream *substream)
1167 {
1168 	struct snd_soc_pcm_runtime *rtd = snd_soc_substream_to_rtd(substream);
1169 	struct snd_soc_pcm_runtime *be_rtd = NULL;
1170 	struct hdac_ext_stream *hext_stream;
1171 	struct snd_soc_dai *cpu_dai;
1172 	struct snd_soc_dpcm *dpcm;
1173 	u32 llp_l, llp_u;
1174 
1175 	/*
1176 	 * The LLP needs to be read from the Link DMA used for this FE as it is
1177 	 * allowed to use any combination of Link and Host channels
1178 	 */
1179 	for_each_dpcm_be(rtd, substream->stream, dpcm) {
1180 		if (dpcm->fe != rtd)
1181 			continue;
1182 
1183 		be_rtd = dpcm->be;
1184 	}
1185 
1186 	if (!be_rtd)
1187 		return 0;
1188 
1189 	cpu_dai = snd_soc_rtd_to_cpu(be_rtd, 0);
1190 	if (!cpu_dai)
1191 		return 0;
1192 
1193 	hext_stream = snd_soc_dai_get_dma_data(cpu_dai, substream);
1194 	if (!hext_stream)
1195 		return 0;
1196 
1197 	/*
1198 	 * The pplc_addr have been calculated during probe in
1199 	 * hda_dsp_stream_init():
1200 	 * pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
1201 	 *	       SOF_HDA_PPLC_BASE +
1202 	 *	       SOF_HDA_PPLC_MULTI * total_stream +
1203 	 *	       SOF_HDA_PPLC_INTERVAL * stream_index
1204 	 *
1205 	 * Use this pre-calculated address to avoid repeated re-calculation.
1206 	 */
1207 	llp_l = readl(hext_stream->pplc_addr + AZX_REG_PPLCLLPL);
1208 	llp_u = readl(hext_stream->pplc_addr + AZX_REG_PPLCLLPU);
1209 
1210 	/* Compensate the LLP counter with the saved offset */
1211 	if (hext_stream->pplcllpl || hext_stream->pplcllpu)
1212 		return merge_u64(llp_u, llp_l) -
1213 		       merge_u64(hext_stream->pplcllpu, hext_stream->pplcllpl);
1214 
1215 	return merge_u64(llp_u, llp_l);
1216 }
1217 EXPORT_SYMBOL_NS(hda_dsp_get_stream_llp, "SND_SOC_SOF_INTEL_HDA_COMMON");
1218 
1219 /**
1220  * hda_dsp_get_stream_ldp - Retrieve the LDP (Linear DMA Position) of the stream
1221  * @sdev: SOF device
1222  * @component: ASoC component
1223  * @substream: PCM substream
1224  *
1225  * Returns the raw Linear Link Position value
1226  */
1227 u64 hda_dsp_get_stream_ldp(struct snd_sof_dev *sdev,
1228 			   struct snd_soc_component *component,
1229 			   struct snd_pcm_substream *substream)
1230 {
1231 	struct hdac_stream *hstream = substream->runtime->private_data;
1232 	struct hdac_ext_stream *hext_stream = stream_to_hdac_ext_stream(hstream);
1233 	u32 ldp_l, ldp_u;
1234 
1235 	/*
1236 	 * The pphc_addr have been calculated during probe in
1237 	 * hda_dsp_stream_init():
1238 	 * pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
1239 	 *	       SOF_HDA_PPHC_BASE +
1240 	 *	       SOF_HDA_PPHC_INTERVAL * stream_index
1241 	 *
1242 	 * Use this pre-calculated address to avoid repeated re-calculation.
1243 	 */
1244 	ldp_l = readl(hext_stream->pphc_addr + AZX_REG_PPHCLDPL);
1245 	ldp_u = readl(hext_stream->pphc_addr + AZX_REG_PPHCLDPU);
1246 
1247 	return ((u64)ldp_u << 32) | ldp_l;
1248 }
1249 EXPORT_SYMBOL_NS(hda_dsp_get_stream_ldp, "SND_SOC_SOF_INTEL_HDA_COMMON");
1250 
1251 struct hdac_ext_stream *
1252 hda_data_stream_prepare(struct device *dev, unsigned int format, unsigned int size,
1253 			struct snd_dma_buffer *dmab, bool persistent_buffer, int direction,
1254 			bool is_iccmax, bool pair)
1255 {
1256 	struct snd_sof_dev *sdev = dev_get_drvdata(dev);
1257 	struct hdac_ext_stream *hext_stream;
1258 	struct hdac_stream *hstream;
1259 	int ret;
1260 
1261 	if (pair)
1262 		hext_stream = hda_dsp_stream_pair_get(sdev, direction, 0);
1263 	else
1264 		hext_stream = hda_dsp_stream_get(sdev, direction, 0);
1265 
1266 	if (!hext_stream) {
1267 		dev_err(sdev->dev, "%s: no stream available\n", __func__);
1268 		return ERR_PTR(-ENODEV);
1269 	}
1270 	hstream = &hext_stream->hstream;
1271 	hstream->substream = NULL;
1272 
1273 	/*
1274 	 * Allocate DMA buffer if it is temporary or if the buffer is intended
1275 	 * to be persistent but not yet allocated.
1276 	 * We cannot rely solely on !dmab->area as caller might use a struct on
1277 	 * stack (when it is temporary) without clearing it to 0.
1278 	 */
1279 	if (!persistent_buffer || !dmab->area) {
1280 		ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV_SG, dev, size, dmab);
1281 		if (ret < 0) {
1282 			dev_err(sdev->dev, "%s: memory alloc failed: %d\n",
1283 				__func__, ret);
1284 			goto out_put;
1285 		}
1286 	}
1287 
1288 	hstream->period_bytes = 0; /* initialize period_bytes */
1289 	hstream->format_val = format;
1290 	hstream->bufsize = size;
1291 
1292 	if (is_iccmax) {
1293 		ret = hda_dsp_iccmax_stream_hw_params(sdev, hext_stream, dmab, NULL);
1294 		if (ret < 0) {
1295 			dev_err(sdev->dev, "%s: iccmax stream prepare failed: %d\n",
1296 				__func__, ret);
1297 			goto out_free;
1298 		}
1299 	} else {
1300 		ret = hda_dsp_stream_hw_params(sdev, hext_stream, dmab, NULL);
1301 		if (ret < 0) {
1302 			dev_err(sdev->dev, "%s: hdac prepare failed: %d\n", __func__, ret);
1303 			goto out_free;
1304 		}
1305 		hda_dsp_stream_spib_config(sdev, hext_stream, HDA_DSP_SPIB_ENABLE, size);
1306 	}
1307 
1308 	return hext_stream;
1309 
1310 out_free:
1311 	snd_dma_free_pages(dmab);
1312 	dmab->area = NULL;
1313 	dmab->bytes = 0;
1314 	hstream->bufsize = 0;
1315 	hstream->format_val = 0;
1316 out_put:
1317 	if (pair)
1318 		hda_dsp_stream_pair_put(sdev, direction, hstream->stream_tag);
1319 	else
1320 		hda_dsp_stream_put(sdev, direction, hstream->stream_tag);
1321 	return ERR_PTR(ret);
1322 }
1323 EXPORT_SYMBOL_NS(hda_data_stream_prepare, "SND_SOC_SOF_INTEL_HDA_COMMON");
1324 
1325 int hda_data_stream_cleanup(struct device *dev, struct snd_dma_buffer *dmab,
1326 			    bool persistent_buffer, struct hdac_ext_stream *hext_stream, bool pair)
1327 {
1328 	struct snd_sof_dev *sdev =  dev_get_drvdata(dev);
1329 	struct hdac_stream *hstream = hdac_stream(hext_stream);
1330 	int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
1331 	int ret = 0;
1332 
1333 	if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK)
1334 		ret = hda_dsp_stream_spib_config(sdev, hext_stream, HDA_DSP_SPIB_DISABLE, 0);
1335 	else
1336 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
1337 					SOF_HDA_SD_CTL_DMA_START, 0);
1338 
1339 	if (pair)
1340 		hda_dsp_stream_pair_put(sdev, hstream->direction, hstream->stream_tag);
1341 	else
1342 		hda_dsp_stream_put(sdev, hstream->direction, hstream->stream_tag);
1343 
1344 	hstream->running = 0;
1345 	hstream->substream = NULL;
1346 
1347 	/* reset BDL address */
1348 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
1349 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL, 0);
1350 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
1351 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU, 0);
1352 
1353 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, sd_offset, 0);
1354 
1355 	if (!persistent_buffer) {
1356 		snd_dma_free_pages(dmab);
1357 		dmab->area = NULL;
1358 		dmab->bytes = 0;
1359 		hstream->bufsize = 0;
1360 		hstream->format_val = 0;
1361 	}
1362 
1363 	return ret;
1364 }
1365 EXPORT_SYMBOL_NS(hda_data_stream_cleanup, "SND_SOC_SOF_INTEL_HDA_COMMON");
1366