1 // SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause)
2 //
3 // This file is provided under a dual BSD/GPLv2 license.  When using or
4 // redistributing this file, you may do so under either license.
5 //
6 // Copyright(c) 2018 Intel Corporation. All rights reserved.
7 //
8 // Authors: Liam Girdwood <liam.r.girdwood@linux.intel.com>
9 //	    Ranjani Sridharan <ranjani.sridharan@linux.intel.com>
10 //	    Rander Wang <rander.wang@intel.com>
11 //          Keyon Jie <yang.jie@linux.intel.com>
12 //
13 
14 /*
15  * Hardware interface for generic Intel audio DSP HDA IP
16  */
17 
18 #include <linux/pm_runtime.h>
19 #include <sound/hdaudio_ext.h>
20 #include <sound/hda_register.h>
21 #include <sound/sof.h>
22 #include <trace/events/sof_intel.h>
23 #include "../ops.h"
24 #include "../sof-audio.h"
25 #include "hda.h"
26 
27 #define HDA_LTRP_GB_VALUE_US	95
28 
hda_hstream_direction_str(struct hdac_stream * hstream)29 static inline const char *hda_hstream_direction_str(struct hdac_stream *hstream)
30 {
31 	if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK)
32 		return "Playback";
33 	else
34 		return "Capture";
35 }
36 
hda_hstream_dbg_get_stream_info_str(struct hdac_stream * hstream)37 static char *hda_hstream_dbg_get_stream_info_str(struct hdac_stream *hstream)
38 {
39 	struct snd_soc_pcm_runtime *rtd;
40 
41 	if (hstream->substream)
42 		rtd = asoc_substream_to_rtd(hstream->substream);
43 	else if (hstream->cstream)
44 		rtd = hstream->cstream->private_data;
45 	else
46 		/* Non audio DMA user, like dma-trace */
47 		return kasprintf(GFP_KERNEL, "-- (%s, stream_tag: %u)",
48 				 hda_hstream_direction_str(hstream),
49 				 hstream->stream_tag);
50 
51 	return kasprintf(GFP_KERNEL, "dai_link \"%s\" (%s, stream_tag: %u)",
52 			 rtd->dai_link->name, hda_hstream_direction_str(hstream),
53 			 hstream->stream_tag);
54 }
55 
56 /*
57  * set up one of BDL entries for a stream
58  */
hda_setup_bdle(struct snd_sof_dev * sdev,struct snd_dma_buffer * dmab,struct hdac_stream * hstream,struct sof_intel_dsp_bdl ** bdlp,int offset,int size,int ioc)59 static int hda_setup_bdle(struct snd_sof_dev *sdev,
60 			  struct snd_dma_buffer *dmab,
61 			  struct hdac_stream *hstream,
62 			  struct sof_intel_dsp_bdl **bdlp,
63 			  int offset, int size, int ioc)
64 {
65 	struct hdac_bus *bus = sof_to_bus(sdev);
66 	struct sof_intel_dsp_bdl *bdl = *bdlp;
67 
68 	while (size > 0) {
69 		dma_addr_t addr;
70 		int chunk;
71 
72 		if (hstream->frags >= HDA_DSP_MAX_BDL_ENTRIES) {
73 			dev_err(sdev->dev, "error: stream frags exceeded\n");
74 			return -EINVAL;
75 		}
76 
77 		addr = snd_sgbuf_get_addr(dmab, offset);
78 		/* program BDL addr */
79 		bdl->addr_l = cpu_to_le32(lower_32_bits(addr));
80 		bdl->addr_h = cpu_to_le32(upper_32_bits(addr));
81 		/* program BDL size */
82 		chunk = snd_sgbuf_get_chunk_size(dmab, offset, size);
83 		/* one BDLE should not cross 4K boundary */
84 		if (bus->align_bdle_4k) {
85 			u32 remain = 0x1000 - (offset & 0xfff);
86 
87 			if (chunk > remain)
88 				chunk = remain;
89 		}
90 		bdl->size = cpu_to_le32(chunk);
91 		/* only program IOC when the whole segment is processed */
92 		size -= chunk;
93 		bdl->ioc = (size || !ioc) ? 0 : cpu_to_le32(0x01);
94 		bdl++;
95 		hstream->frags++;
96 		offset += chunk;
97 	}
98 
99 	*bdlp = bdl;
100 	return offset;
101 }
102 
103 /*
104  * set up Buffer Descriptor List (BDL) for host memory transfer
105  * BDL describes the location of the individual buffers and is little endian.
106  */
hda_dsp_stream_setup_bdl(struct snd_sof_dev * sdev,struct snd_dma_buffer * dmab,struct hdac_stream * hstream)107 int hda_dsp_stream_setup_bdl(struct snd_sof_dev *sdev,
108 			     struct snd_dma_buffer *dmab,
109 			     struct hdac_stream *hstream)
110 {
111 	struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
112 	struct sof_intel_dsp_bdl *bdl;
113 	int i, offset, period_bytes, periods;
114 	int remain, ioc;
115 
116 	period_bytes = hstream->period_bytes;
117 	dev_dbg(sdev->dev, "period_bytes:0x%x\n", period_bytes);
118 	if (!period_bytes)
119 		period_bytes = hstream->bufsize;
120 
121 	periods = hstream->bufsize / period_bytes;
122 
123 	dev_dbg(sdev->dev, "periods:%d\n", periods);
124 
125 	remain = hstream->bufsize % period_bytes;
126 	if (remain)
127 		periods++;
128 
129 	/* program the initial BDL entries */
130 	bdl = (struct sof_intel_dsp_bdl *)hstream->bdl.area;
131 	offset = 0;
132 	hstream->frags = 0;
133 
134 	/*
135 	 * set IOC if don't use position IPC
136 	 * and period_wakeup needed.
137 	 */
138 	ioc = hda->no_ipc_position ?
139 	      !hstream->no_period_wakeup : 0;
140 
141 	for (i = 0; i < periods; i++) {
142 		if (i == (periods - 1) && remain)
143 			/* set the last small entry */
144 			offset = hda_setup_bdle(sdev, dmab,
145 						hstream, &bdl, offset,
146 						remain, 0);
147 		else
148 			offset = hda_setup_bdle(sdev, dmab,
149 						hstream, &bdl, offset,
150 						period_bytes, ioc);
151 	}
152 
153 	return offset;
154 }
155 
hda_dsp_stream_spib_config(struct snd_sof_dev * sdev,struct hdac_ext_stream * hext_stream,int enable,u32 size)156 int hda_dsp_stream_spib_config(struct snd_sof_dev *sdev,
157 			       struct hdac_ext_stream *hext_stream,
158 			       int enable, u32 size)
159 {
160 	struct hdac_stream *hstream = &hext_stream->hstream;
161 	u32 mask;
162 
163 	if (!sdev->bar[HDA_DSP_SPIB_BAR]) {
164 		dev_err(sdev->dev, "error: address of spib capability is NULL\n");
165 		return -EINVAL;
166 	}
167 
168 	mask = (1 << hstream->index);
169 
170 	/* enable/disable SPIB for the stream */
171 	snd_sof_dsp_update_bits(sdev, HDA_DSP_SPIB_BAR,
172 				SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL, mask,
173 				enable << hstream->index);
174 
175 	/* set the SPIB value */
176 	sof_io_write(sdev, hext_stream->spib_addr, size);
177 
178 	return 0;
179 }
180 
181 /* get next unused stream */
182 struct hdac_ext_stream *
hda_dsp_stream_get(struct snd_sof_dev * sdev,int direction,u32 flags)183 hda_dsp_stream_get(struct snd_sof_dev *sdev, int direction, u32 flags)
184 {
185 	struct hdac_bus *bus = sof_to_bus(sdev);
186 	struct sof_intel_hda_stream *hda_stream;
187 	struct hdac_ext_stream *hext_stream = NULL;
188 	struct hdac_stream *s;
189 
190 	spin_lock_irq(&bus->reg_lock);
191 
192 	/* get an unused stream */
193 	list_for_each_entry(s, &bus->stream_list, list) {
194 		if (s->direction == direction && !s->opened) {
195 			hext_stream = stream_to_hdac_ext_stream(s);
196 			hda_stream = container_of(hext_stream,
197 						  struct sof_intel_hda_stream,
198 						  hext_stream);
199 			/* check if the host DMA channel is reserved */
200 			if (hda_stream->host_reserved)
201 				continue;
202 
203 			s->opened = true;
204 			break;
205 		}
206 	}
207 
208 	spin_unlock_irq(&bus->reg_lock);
209 
210 	/* stream found ? */
211 	if (!hext_stream) {
212 		dev_err(sdev->dev, "error: no free %s streams\n",
213 			direction == SNDRV_PCM_STREAM_PLAYBACK ?
214 			"playback" : "capture");
215 		return hext_stream;
216 	}
217 
218 	hda_stream->flags = flags;
219 
220 	/*
221 	 * Prevent DMI Link L1 entry for streams that don't support it.
222 	 * Workaround to address a known issue with host DMA that results
223 	 * in xruns during pause/release in capture scenarios.
224 	 */
225 	if (!(flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE))
226 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
227 					HDA_VS_INTEL_EM2,
228 					HDA_VS_INTEL_EM2_L1SEN, 0);
229 
230 	return hext_stream;
231 }
232 
233 /* free a stream */
hda_dsp_stream_put(struct snd_sof_dev * sdev,int direction,int stream_tag)234 int hda_dsp_stream_put(struct snd_sof_dev *sdev, int direction, int stream_tag)
235 {
236 	struct hdac_bus *bus = sof_to_bus(sdev);
237 	struct sof_intel_hda_stream *hda_stream;
238 	struct hdac_ext_stream *hext_stream;
239 	struct hdac_stream *s;
240 	bool dmi_l1_enable = true;
241 	bool found = false;
242 
243 	spin_lock_irq(&bus->reg_lock);
244 
245 	/*
246 	 * close stream matching the stream tag and check if there are any open streams
247 	 * that are DMI L1 incompatible.
248 	 */
249 	list_for_each_entry(s, &bus->stream_list, list) {
250 		hext_stream = stream_to_hdac_ext_stream(s);
251 		hda_stream = container_of(hext_stream, struct sof_intel_hda_stream, hext_stream);
252 
253 		if (!s->opened)
254 			continue;
255 
256 		if (s->direction == direction && s->stream_tag == stream_tag) {
257 			s->opened = false;
258 			found = true;
259 		} else if (!(hda_stream->flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) {
260 			dmi_l1_enable = false;
261 		}
262 	}
263 
264 	spin_unlock_irq(&bus->reg_lock);
265 
266 	/* Enable DMI L1 if permitted */
267 	if (dmi_l1_enable)
268 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, HDA_VS_INTEL_EM2,
269 					HDA_VS_INTEL_EM2_L1SEN, HDA_VS_INTEL_EM2_L1SEN);
270 
271 	if (!found) {
272 		dev_err(sdev->dev, "%s: stream_tag %d not opened!\n",
273 			__func__, stream_tag);
274 		return -ENODEV;
275 	}
276 
277 	return 0;
278 }
279 
hda_dsp_stream_reset(struct snd_sof_dev * sdev,struct hdac_stream * hstream)280 static int hda_dsp_stream_reset(struct snd_sof_dev *sdev, struct hdac_stream *hstream)
281 {
282 	int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
283 	int timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
284 	u32 val;
285 
286 	/* enter stream reset */
287 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, SOF_STREAM_SD_OFFSET_CRST,
288 				SOF_STREAM_SD_OFFSET_CRST);
289 	do {
290 		val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, sd_offset);
291 		if (val & SOF_STREAM_SD_OFFSET_CRST)
292 			break;
293 	} while (--timeout);
294 	if (timeout == 0) {
295 		dev_err(sdev->dev, "timeout waiting for stream reset\n");
296 		return -ETIMEDOUT;
297 	}
298 
299 	timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
300 
301 	/* exit stream reset and wait to read a zero before reading any other register */
302 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, SOF_STREAM_SD_OFFSET_CRST, 0x0);
303 
304 	/* wait for hardware to report that stream is out of reset */
305 	udelay(3);
306 	do {
307 		val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, sd_offset);
308 		if ((val & SOF_STREAM_SD_OFFSET_CRST) == 0)
309 			break;
310 	} while (--timeout);
311 	if (timeout == 0) {
312 		dev_err(sdev->dev, "timeout waiting for stream to exit reset\n");
313 		return -ETIMEDOUT;
314 	}
315 
316 	return 0;
317 }
318 
hda_dsp_stream_trigger(struct snd_sof_dev * sdev,struct hdac_ext_stream * hext_stream,int cmd)319 int hda_dsp_stream_trigger(struct snd_sof_dev *sdev,
320 			   struct hdac_ext_stream *hext_stream, int cmd)
321 {
322 	struct hdac_stream *hstream = &hext_stream->hstream;
323 	int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
324 	u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
325 	int ret = 0;
326 	u32 run;
327 
328 	/* cmd must be for audio stream */
329 	switch (cmd) {
330 	case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
331 	case SNDRV_PCM_TRIGGER_START:
332 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL,
333 					1 << hstream->index,
334 					1 << hstream->index);
335 
336 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
337 					sd_offset,
338 					SOF_HDA_SD_CTL_DMA_START |
339 					SOF_HDA_CL_DMA_SD_INT_MASK,
340 					SOF_HDA_SD_CTL_DMA_START |
341 					SOF_HDA_CL_DMA_SD_INT_MASK);
342 
343 		ret = snd_sof_dsp_read_poll_timeout(sdev,
344 					HDA_DSP_HDA_BAR,
345 					sd_offset, run,
346 					((run &	dma_start) == dma_start),
347 					HDA_DSP_REG_POLL_INTERVAL_US,
348 					HDA_DSP_STREAM_RUN_TIMEOUT);
349 
350 		if (ret >= 0)
351 			hstream->running = true;
352 
353 		break;
354 	case SNDRV_PCM_TRIGGER_SUSPEND:
355 	case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
356 	case SNDRV_PCM_TRIGGER_STOP:
357 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
358 					sd_offset,
359 					SOF_HDA_SD_CTL_DMA_START |
360 					SOF_HDA_CL_DMA_SD_INT_MASK, 0x0);
361 
362 		ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
363 						sd_offset, run,
364 						!(run &	dma_start),
365 						HDA_DSP_REG_POLL_INTERVAL_US,
366 						HDA_DSP_STREAM_RUN_TIMEOUT);
367 
368 		if (ret >= 0) {
369 			snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
370 					  sd_offset + SOF_HDA_ADSP_REG_CL_SD_STS,
371 					  SOF_HDA_CL_DMA_SD_INT_MASK);
372 
373 			hstream->running = false;
374 			snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
375 						SOF_HDA_INTCTL,
376 						1 << hstream->index, 0x0);
377 		}
378 		break;
379 	default:
380 		dev_err(sdev->dev, "error: unknown command: %d\n", cmd);
381 		return -EINVAL;
382 	}
383 
384 	if (ret < 0) {
385 		char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
386 
387 		dev_err(sdev->dev,
388 			"%s: cmd %d on %s: timeout on STREAM_SD_OFFSET read\n",
389 			__func__, cmd, stream_name ? stream_name : "unknown stream");
390 		kfree(stream_name);
391 	}
392 
393 	return ret;
394 }
395 
396 /* minimal recommended programming for ICCMAX stream */
hda_dsp_iccmax_stream_hw_params(struct snd_sof_dev * sdev,struct hdac_ext_stream * hext_stream,struct snd_dma_buffer * dmab,struct snd_pcm_hw_params * params)397 int hda_dsp_iccmax_stream_hw_params(struct snd_sof_dev *sdev, struct hdac_ext_stream *hext_stream,
398 				    struct snd_dma_buffer *dmab,
399 				    struct snd_pcm_hw_params *params)
400 {
401 	struct hdac_bus *bus = sof_to_bus(sdev);
402 	struct hdac_stream *hstream = &hext_stream->hstream;
403 	int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
404 	int ret;
405 	u32 mask = 0x1 << hstream->index;
406 
407 	if (!hext_stream) {
408 		dev_err(sdev->dev, "error: no stream available\n");
409 		return -ENODEV;
410 	}
411 
412 	if (!dmab) {
413 		dev_err(sdev->dev, "error: no dma buffer allocated!\n");
414 		return -ENODEV;
415 	}
416 
417 	if (hstream->posbuf)
418 		*hstream->posbuf = 0;
419 
420 	/* reset BDL address */
421 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
422 			  sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
423 			  0x0);
424 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
425 			  sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
426 			  0x0);
427 
428 	hstream->frags = 0;
429 
430 	ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
431 	if (ret < 0) {
432 		dev_err(sdev->dev, "error: set up of BDL failed\n");
433 		return ret;
434 	}
435 
436 	/* program BDL address */
437 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
438 			  sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
439 			  (u32)hstream->bdl.addr);
440 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
441 			  sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
442 			  upper_32_bits(hstream->bdl.addr));
443 
444 	/* program cyclic buffer length */
445 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
446 			  sd_offset + SOF_HDA_ADSP_REG_CL_SD_CBL,
447 			  hstream->bufsize);
448 
449 	/* program last valid index */
450 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
451 				sd_offset + SOF_HDA_ADSP_REG_CL_SD_LVI,
452 				0xffff, (hstream->frags - 1));
453 
454 	/* decouple host and link DMA, enable DSP features */
455 	snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
456 				mask, mask);
457 
458 	/* Follow HW recommendation to set the guardband value to 95us during FW boot */
459 	snd_hdac_chip_updateb(bus, VS_LTRP, HDA_VS_INTEL_LTRP_GB_MASK, HDA_LTRP_GB_VALUE_US);
460 
461 	/* start DMA */
462 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
463 				SOF_HDA_SD_CTL_DMA_START, SOF_HDA_SD_CTL_DMA_START);
464 
465 	return 0;
466 }
467 
468 /*
469  * prepare for common hdac registers settings, for both code loader
470  * and normal stream.
471  */
hda_dsp_stream_hw_params(struct snd_sof_dev * sdev,struct hdac_ext_stream * hext_stream,struct snd_dma_buffer * dmab,struct snd_pcm_hw_params * params)472 int hda_dsp_stream_hw_params(struct snd_sof_dev *sdev,
473 			     struct hdac_ext_stream *hext_stream,
474 			     struct snd_dma_buffer *dmab,
475 			     struct snd_pcm_hw_params *params)
476 {
477 	const struct sof_intel_dsp_desc *chip = get_chip_info(sdev->pdata);
478 	struct hdac_bus *bus = sof_to_bus(sdev);
479 	struct hdac_stream *hstream = &hext_stream->hstream;
480 	int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
481 	int ret;
482 	u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
483 	u32 mask;
484 	u32 run;
485 
486 	if (!hext_stream) {
487 		dev_err(sdev->dev, "error: no stream available\n");
488 		return -ENODEV;
489 	}
490 
491 	if (!dmab) {
492 		dev_err(sdev->dev, "error: no dma buffer allocated!\n");
493 		return -ENODEV;
494 	}
495 
496 	/* decouple host and link DMA */
497 	mask = 0x1 << hstream->index;
498 	snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
499 				mask, mask);
500 
501 	/* clear stream status */
502 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
503 				SOF_HDA_CL_DMA_SD_INT_MASK |
504 				SOF_HDA_SD_CTL_DMA_START, 0);
505 
506 	ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
507 					    sd_offset, run,
508 					    !(run & dma_start),
509 					    HDA_DSP_REG_POLL_INTERVAL_US,
510 					    HDA_DSP_STREAM_RUN_TIMEOUT);
511 
512 	if (ret < 0) {
513 		char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
514 
515 		dev_err(sdev->dev,
516 			"%s: on %s: timeout on STREAM_SD_OFFSET read1\n",
517 			__func__, stream_name ? stream_name : "unknown stream");
518 		kfree(stream_name);
519 		return ret;
520 	}
521 
522 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
523 				sd_offset + SOF_HDA_ADSP_REG_CL_SD_STS,
524 				SOF_HDA_CL_DMA_SD_INT_MASK,
525 				SOF_HDA_CL_DMA_SD_INT_MASK);
526 
527 	/* stream reset */
528 	ret = hda_dsp_stream_reset(sdev, hstream);
529 	if (ret < 0)
530 		return ret;
531 
532 	if (hstream->posbuf)
533 		*hstream->posbuf = 0;
534 
535 	/* reset BDL address */
536 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
537 			  sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
538 			  0x0);
539 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
540 			  sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
541 			  0x0);
542 
543 	/* clear stream status */
544 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
545 				SOF_HDA_CL_DMA_SD_INT_MASK |
546 				SOF_HDA_SD_CTL_DMA_START, 0);
547 
548 	ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
549 					    sd_offset, run,
550 					    !(run & dma_start),
551 					    HDA_DSP_REG_POLL_INTERVAL_US,
552 					    HDA_DSP_STREAM_RUN_TIMEOUT);
553 
554 	if (ret < 0) {
555 		char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
556 
557 		dev_err(sdev->dev,
558 			"%s: on %s: timeout on STREAM_SD_OFFSET read1\n",
559 			__func__, stream_name ? stream_name : "unknown stream");
560 		kfree(stream_name);
561 		return ret;
562 	}
563 
564 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
565 				sd_offset + SOF_HDA_ADSP_REG_CL_SD_STS,
566 				SOF_HDA_CL_DMA_SD_INT_MASK,
567 				SOF_HDA_CL_DMA_SD_INT_MASK);
568 
569 	hstream->frags = 0;
570 
571 	ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
572 	if (ret < 0) {
573 		dev_err(sdev->dev, "error: set up of BDL failed\n");
574 		return ret;
575 	}
576 
577 	/* program stream tag to set up stream descriptor for DMA */
578 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
579 				SOF_HDA_CL_SD_CTL_STREAM_TAG_MASK,
580 				hstream->stream_tag <<
581 				SOF_HDA_CL_SD_CTL_STREAM_TAG_SHIFT);
582 
583 	/* program cyclic buffer length */
584 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
585 			  sd_offset + SOF_HDA_ADSP_REG_CL_SD_CBL,
586 			  hstream->bufsize);
587 
588 	/*
589 	 * Recommended hardware programming sequence for HDAudio DMA format
590 	 * on earlier platforms - this is not needed on newer platforms
591 	 *
592 	 * 1. Put DMA into coupled mode by clearing PPCTL.PROCEN bit
593 	 *    for corresponding stream index before the time of writing
594 	 *    format to SDxFMT register.
595 	 * 2. Write SDxFMT
596 	 * 3. Set PPCTL.PROCEN bit for corresponding stream index to
597 	 *    enable decoupled mode
598 	 */
599 
600 	if (chip->quirks & SOF_INTEL_PROCEN_FMT_QUIRK) {
601 		/* couple host and link DMA, disable DSP features */
602 		snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
603 					mask, 0);
604 	}
605 
606 	/* program stream format */
607 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
608 				sd_offset +
609 				SOF_HDA_ADSP_REG_CL_SD_FORMAT,
610 				0xffff, hstream->format_val);
611 
612 	if (chip->quirks & SOF_INTEL_PROCEN_FMT_QUIRK) {
613 		/* decouple host and link DMA, enable DSP features */
614 		snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
615 					mask, mask);
616 	}
617 
618 	/* program last valid index */
619 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
620 				sd_offset + SOF_HDA_ADSP_REG_CL_SD_LVI,
621 				0xffff, (hstream->frags - 1));
622 
623 	/* program BDL address */
624 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
625 			  sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
626 			  (u32)hstream->bdl.addr);
627 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
628 			  sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
629 			  upper_32_bits(hstream->bdl.addr));
630 
631 	/* enable position buffer, if needed */
632 	if (bus->use_posbuf && bus->posbuf.addr &&
633 	    !(snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE)
634 	      & SOF_HDA_ADSP_DPLBASE_ENABLE)) {
635 		snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPUBASE,
636 				  upper_32_bits(bus->posbuf.addr));
637 		snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE,
638 				  (u32)bus->posbuf.addr |
639 				  SOF_HDA_ADSP_DPLBASE_ENABLE);
640 	}
641 
642 	/* set interrupt enable bits */
643 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
644 				SOF_HDA_CL_DMA_SD_INT_MASK,
645 				SOF_HDA_CL_DMA_SD_INT_MASK);
646 
647 	/* read FIFO size */
648 	if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK) {
649 		hstream->fifo_size =
650 			snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
651 					 sd_offset +
652 					 SOF_HDA_ADSP_REG_CL_SD_FIFOSIZE);
653 		hstream->fifo_size &= 0xffff;
654 		hstream->fifo_size += 1;
655 	} else {
656 		hstream->fifo_size = 0;
657 	}
658 
659 	return ret;
660 }
661 
hda_dsp_stream_hw_free(struct snd_sof_dev * sdev,struct snd_pcm_substream * substream)662 int hda_dsp_stream_hw_free(struct snd_sof_dev *sdev,
663 			   struct snd_pcm_substream *substream)
664 {
665 	struct hdac_stream *hstream = substream->runtime->private_data;
666 	struct hdac_ext_stream *hext_stream = container_of(hstream,
667 							 struct hdac_ext_stream,
668 							 hstream);
669 	struct hdac_bus *bus = sof_to_bus(sdev);
670 	u32 mask = 0x1 << hstream->index;
671 	int ret;
672 
673 	ret = hda_dsp_stream_reset(sdev, hstream);
674 	if (ret < 0)
675 		return ret;
676 
677 	spin_lock_irq(&bus->reg_lock);
678 	/* couple host and link DMA if link DMA channel is idle */
679 	if (!hext_stream->link_locked)
680 		snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR,
681 					SOF_HDA_REG_PP_PPCTL, mask, 0);
682 	spin_unlock_irq(&bus->reg_lock);
683 
684 	hda_dsp_stream_spib_config(sdev, hext_stream, HDA_DSP_SPIB_DISABLE, 0);
685 
686 	hstream->substream = NULL;
687 
688 	return 0;
689 }
690 
hda_dsp_check_stream_irq(struct snd_sof_dev * sdev)691 bool hda_dsp_check_stream_irq(struct snd_sof_dev *sdev)
692 {
693 	struct hdac_bus *bus = sof_to_bus(sdev);
694 	bool ret = false;
695 	u32 status;
696 
697 	/* The function can be called at irq thread, so use spin_lock_irq */
698 	spin_lock_irq(&bus->reg_lock);
699 
700 	status = snd_hdac_chip_readl(bus, INTSTS);
701 	trace_sof_intel_hda_dsp_check_stream_irq(sdev, status);
702 
703 	/* if Register inaccessible, ignore it.*/
704 	if (status != 0xffffffff)
705 		ret = true;
706 
707 	spin_unlock_irq(&bus->reg_lock);
708 
709 	return ret;
710 }
711 
712 static void
hda_dsp_compr_bytes_transferred(struct hdac_stream * hstream,int direction)713 hda_dsp_compr_bytes_transferred(struct hdac_stream *hstream, int direction)
714 {
715 	u64 buffer_size = hstream->bufsize;
716 	u64 prev_pos, pos, num_bytes;
717 
718 	div64_u64_rem(hstream->curr_pos, buffer_size, &prev_pos);
719 	pos = hda_dsp_stream_get_position(hstream, direction, false);
720 
721 	if (pos < prev_pos)
722 		num_bytes = (buffer_size - prev_pos) +  pos;
723 	else
724 		num_bytes = pos - prev_pos;
725 
726 	hstream->curr_pos += num_bytes;
727 }
728 
hda_dsp_stream_check(struct hdac_bus * bus,u32 status)729 static bool hda_dsp_stream_check(struct hdac_bus *bus, u32 status)
730 {
731 	struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
732 	struct hdac_stream *s;
733 	bool active = false;
734 	u32 sd_status;
735 
736 	list_for_each_entry(s, &bus->stream_list, list) {
737 		if (status & BIT(s->index) && s->opened) {
738 			sd_status = snd_hdac_stream_readb(s, SD_STS);
739 
740 			trace_sof_intel_hda_dsp_stream_status(bus->dev, s, sd_status);
741 
742 			snd_hdac_stream_writeb(s, SD_STS, sd_status);
743 
744 			active = true;
745 			if ((!s->substream && !s->cstream) ||
746 			    !s->running ||
747 			    (sd_status & SOF_HDA_CL_DMA_SD_INT_COMPLETE) == 0)
748 				continue;
749 
750 			/* Inform ALSA only in case not do that with IPC */
751 			if (s->substream && sof_hda->no_ipc_position) {
752 				snd_sof_pcm_period_elapsed(s->substream);
753 			} else if (s->cstream) {
754 				hda_dsp_compr_bytes_transferred(s, s->cstream->direction);
755 				snd_compr_fragment_elapsed(s->cstream);
756 			}
757 		}
758 	}
759 
760 	return active;
761 }
762 
hda_dsp_stream_threaded_handler(int irq,void * context)763 irqreturn_t hda_dsp_stream_threaded_handler(int irq, void *context)
764 {
765 	struct snd_sof_dev *sdev = context;
766 	struct hdac_bus *bus = sof_to_bus(sdev);
767 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
768 	u32 rirb_status;
769 #endif
770 	bool active;
771 	u32 status;
772 	int i;
773 
774 	/*
775 	 * Loop 10 times to handle missed interrupts caused by
776 	 * unsolicited responses from the codec
777 	 */
778 	for (i = 0, active = true; i < 10 && active; i++) {
779 		spin_lock_irq(&bus->reg_lock);
780 
781 		status = snd_hdac_chip_readl(bus, INTSTS);
782 
783 		/* check streams */
784 		active = hda_dsp_stream_check(bus, status);
785 
786 		/* check and clear RIRB interrupt */
787 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
788 		if (status & AZX_INT_CTRL_EN) {
789 			rirb_status = snd_hdac_chip_readb(bus, RIRBSTS);
790 			if (rirb_status & RIRB_INT_MASK) {
791 				/*
792 				 * Clearing the interrupt status here ensures
793 				 * that no interrupt gets masked after the RIRB
794 				 * wp is read in snd_hdac_bus_update_rirb.
795 				 */
796 				snd_hdac_chip_writeb(bus, RIRBSTS,
797 						     RIRB_INT_MASK);
798 				active = true;
799 				if (rirb_status & RIRB_INT_RESPONSE)
800 					snd_hdac_bus_update_rirb(bus);
801 			}
802 		}
803 #endif
804 		spin_unlock_irq(&bus->reg_lock);
805 	}
806 
807 	return IRQ_HANDLED;
808 }
809 
hda_dsp_stream_init(struct snd_sof_dev * sdev)810 int hda_dsp_stream_init(struct snd_sof_dev *sdev)
811 {
812 	struct hdac_bus *bus = sof_to_bus(sdev);
813 	struct hdac_ext_stream *hext_stream;
814 	struct hdac_stream *hstream;
815 	struct pci_dev *pci = to_pci_dev(sdev->dev);
816 	struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
817 	int sd_offset;
818 	int i, num_playback, num_capture, num_total, ret;
819 	u32 gcap;
820 
821 	gcap = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_GCAP);
822 	dev_dbg(sdev->dev, "hda global caps = 0x%x\n", gcap);
823 
824 	/* get stream count from GCAP */
825 	num_capture = (gcap >> 8) & 0x0f;
826 	num_playback = (gcap >> 12) & 0x0f;
827 	num_total = num_playback + num_capture;
828 
829 	dev_dbg(sdev->dev, "detected %d playback and %d capture streams\n",
830 		num_playback, num_capture);
831 
832 	if (num_playback >= SOF_HDA_PLAYBACK_STREAMS) {
833 		dev_err(sdev->dev, "error: too many playback streams %d\n",
834 			num_playback);
835 		return -EINVAL;
836 	}
837 
838 	if (num_capture >= SOF_HDA_CAPTURE_STREAMS) {
839 		dev_err(sdev->dev, "error: too many capture streams %d\n",
840 			num_playback);
841 		return -EINVAL;
842 	}
843 
844 	/*
845 	 * mem alloc for the position buffer
846 	 * TODO: check position buffer update
847 	 */
848 	ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
849 				  SOF_HDA_DPIB_ENTRY_SIZE * num_total,
850 				  &bus->posbuf);
851 	if (ret < 0) {
852 		dev_err(sdev->dev, "error: posbuffer dma alloc failed\n");
853 		return -ENOMEM;
854 	}
855 
856 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
857 	/* mem alloc for the CORB/RIRB ringbuffers */
858 	ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
859 				  PAGE_SIZE, &bus->rb);
860 	if (ret < 0) {
861 		dev_err(sdev->dev, "error: RB alloc failed\n");
862 		return -ENOMEM;
863 	}
864 #endif
865 
866 	/* create capture streams */
867 	for (i = 0; i < num_capture; i++) {
868 		struct sof_intel_hda_stream *hda_stream;
869 
870 		hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream),
871 					  GFP_KERNEL);
872 		if (!hda_stream)
873 			return -ENOMEM;
874 
875 		hda_stream->sdev = sdev;
876 
877 		hext_stream = &hda_stream->hext_stream;
878 
879 		hext_stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
880 			SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i;
881 
882 		hext_stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
883 			SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total +
884 			SOF_HDA_PPLC_INTERVAL * i;
885 
886 		/* do we support SPIB */
887 		if (sdev->bar[HDA_DSP_SPIB_BAR]) {
888 			hext_stream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
889 				SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
890 				SOF_HDA_SPIB_SPIB;
891 
892 			hext_stream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
893 				SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
894 				SOF_HDA_SPIB_MAXFIFO;
895 		}
896 
897 		hstream = &hext_stream->hstream;
898 		hstream->bus = bus;
899 		hstream->sd_int_sta_mask = 1 << i;
900 		hstream->index = i;
901 		sd_offset = SOF_STREAM_SD_OFFSET(hstream);
902 		hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset;
903 		hstream->stream_tag = i + 1;
904 		hstream->opened = false;
905 		hstream->running = false;
906 		hstream->direction = SNDRV_PCM_STREAM_CAPTURE;
907 
908 		/* memory alloc for stream BDL */
909 		ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
910 					  HDA_DSP_BDL_SIZE, &hstream->bdl);
911 		if (ret < 0) {
912 			dev_err(sdev->dev, "error: stream bdl dma alloc failed\n");
913 			return -ENOMEM;
914 		}
915 		hstream->posbuf = (__le32 *)(bus->posbuf.area +
916 			(hstream->index) * 8);
917 
918 		list_add_tail(&hstream->list, &bus->stream_list);
919 	}
920 
921 	/* create playback streams */
922 	for (i = num_capture; i < num_total; i++) {
923 		struct sof_intel_hda_stream *hda_stream;
924 
925 		hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream),
926 					  GFP_KERNEL);
927 		if (!hda_stream)
928 			return -ENOMEM;
929 
930 		hda_stream->sdev = sdev;
931 
932 		hext_stream = &hda_stream->hext_stream;
933 
934 		/* we always have DSP support */
935 		hext_stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
936 			SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i;
937 
938 		hext_stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
939 			SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total +
940 			SOF_HDA_PPLC_INTERVAL * i;
941 
942 		/* do we support SPIB */
943 		if (sdev->bar[HDA_DSP_SPIB_BAR]) {
944 			hext_stream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
945 				SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
946 				SOF_HDA_SPIB_SPIB;
947 
948 			hext_stream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
949 				SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
950 				SOF_HDA_SPIB_MAXFIFO;
951 		}
952 
953 		hstream = &hext_stream->hstream;
954 		hstream->bus = bus;
955 		hstream->sd_int_sta_mask = 1 << i;
956 		hstream->index = i;
957 		sd_offset = SOF_STREAM_SD_OFFSET(hstream);
958 		hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset;
959 		hstream->stream_tag = i - num_capture + 1;
960 		hstream->opened = false;
961 		hstream->running = false;
962 		hstream->direction = SNDRV_PCM_STREAM_PLAYBACK;
963 
964 		/* mem alloc for stream BDL */
965 		ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
966 					  HDA_DSP_BDL_SIZE, &hstream->bdl);
967 		if (ret < 0) {
968 			dev_err(sdev->dev, "error: stream bdl dma alloc failed\n");
969 			return -ENOMEM;
970 		}
971 
972 		hstream->posbuf = (__le32 *)(bus->posbuf.area +
973 			(hstream->index) * 8);
974 
975 		list_add_tail(&hstream->list, &bus->stream_list);
976 	}
977 
978 	/* store total stream count (playback + capture) from GCAP */
979 	sof_hda->stream_max = num_total;
980 
981 	return 0;
982 }
983 
hda_dsp_stream_free(struct snd_sof_dev * sdev)984 void hda_dsp_stream_free(struct snd_sof_dev *sdev)
985 {
986 	struct hdac_bus *bus = sof_to_bus(sdev);
987 	struct hdac_stream *s, *_s;
988 	struct hdac_ext_stream *hext_stream;
989 	struct sof_intel_hda_stream *hda_stream;
990 
991 	/* free position buffer */
992 	if (bus->posbuf.area)
993 		snd_dma_free_pages(&bus->posbuf);
994 
995 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
996 	/* free position buffer */
997 	if (bus->rb.area)
998 		snd_dma_free_pages(&bus->rb);
999 #endif
1000 
1001 	list_for_each_entry_safe(s, _s, &bus->stream_list, list) {
1002 		/* TODO: decouple */
1003 
1004 		/* free bdl buffer */
1005 		if (s->bdl.area)
1006 			snd_dma_free_pages(&s->bdl);
1007 		list_del(&s->list);
1008 		hext_stream = stream_to_hdac_ext_stream(s);
1009 		hda_stream = container_of(hext_stream, struct sof_intel_hda_stream,
1010 					  hext_stream);
1011 		devm_kfree(sdev->dev, hda_stream);
1012 	}
1013 }
1014 
hda_dsp_stream_get_position(struct hdac_stream * hstream,int direction,bool can_sleep)1015 snd_pcm_uframes_t hda_dsp_stream_get_position(struct hdac_stream *hstream,
1016 					      int direction, bool can_sleep)
1017 {
1018 	struct hdac_ext_stream *hext_stream = stream_to_hdac_ext_stream(hstream);
1019 	struct sof_intel_hda_stream *hda_stream = hstream_to_sof_hda_stream(hext_stream);
1020 	struct snd_sof_dev *sdev = hda_stream->sdev;
1021 	snd_pcm_uframes_t pos;
1022 
1023 	switch (sof_hda_position_quirk) {
1024 	case SOF_HDA_POSITION_QUIRK_USE_SKYLAKE_LEGACY:
1025 		/*
1026 		 * This legacy code, inherited from the Skylake driver,
1027 		 * mixes DPIB registers and DPIB DDR updates and
1028 		 * does not seem to follow any known hardware recommendations.
1029 		 * It's not clear e.g. why there is a different flow
1030 		 * for capture and playback, the only information that matters is
1031 		 * what traffic class is used, and on all SOF-enabled platforms
1032 		 * only VC0 is supported so the work-around was likely not necessary
1033 		 * and quite possibly wrong.
1034 		 */
1035 
1036 		/* DPIB/posbuf position mode:
1037 		 * For Playback, Use DPIB register from HDA space which
1038 		 * reflects the actual data transferred.
1039 		 * For Capture, Use the position buffer for pointer, as DPIB
1040 		 * is not accurate enough, its update may be completed
1041 		 * earlier than the data written to DDR.
1042 		 */
1043 		if (direction == SNDRV_PCM_STREAM_PLAYBACK) {
1044 			pos = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1045 					       AZX_REG_VS_SDXDPIB_XBASE +
1046 					       (AZX_REG_VS_SDXDPIB_XINTERVAL *
1047 						hstream->index));
1048 		} else {
1049 			/*
1050 			 * For capture stream, we need more workaround to fix the
1051 			 * position incorrect issue:
1052 			 *
1053 			 * 1. Wait at least 20us before reading position buffer after
1054 			 * the interrupt generated(IOC), to make sure position update
1055 			 * happens on frame boundary i.e. 20.833uSec for 48KHz.
1056 			 * 2. Perform a dummy Read to DPIB register to flush DMA
1057 			 * position value.
1058 			 * 3. Read the DMA Position from posbuf. Now the readback
1059 			 * value should be >= period boundary.
1060 			 */
1061 			if (can_sleep)
1062 				usleep_range(20, 21);
1063 
1064 			snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1065 					 AZX_REG_VS_SDXDPIB_XBASE +
1066 					 (AZX_REG_VS_SDXDPIB_XINTERVAL *
1067 					  hstream->index));
1068 			pos = snd_hdac_stream_get_pos_posbuf(hstream);
1069 		}
1070 		break;
1071 	case SOF_HDA_POSITION_QUIRK_USE_DPIB_REGISTERS:
1072 		/*
1073 		 * In case VC1 traffic is disabled this is the recommended option
1074 		 */
1075 		pos = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1076 				       AZX_REG_VS_SDXDPIB_XBASE +
1077 				       (AZX_REG_VS_SDXDPIB_XINTERVAL *
1078 					hstream->index));
1079 		break;
1080 	case SOF_HDA_POSITION_QUIRK_USE_DPIB_DDR_UPDATE:
1081 		/*
1082 		 * This is the recommended option when VC1 is enabled.
1083 		 * While this isn't needed for SOF platforms it's added for
1084 		 * consistency and debug.
1085 		 */
1086 		pos = snd_hdac_stream_get_pos_posbuf(hstream);
1087 		break;
1088 	default:
1089 		dev_err_once(sdev->dev, "hda_position_quirk value %d not supported\n",
1090 			     sof_hda_position_quirk);
1091 		pos = 0;
1092 		break;
1093 	}
1094 
1095 	if (pos >= hstream->bufsize)
1096 		pos = 0;
1097 
1098 	return pos;
1099 }
1100