1 // SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause)
2 //
3 // This file is provided under a dual BSD/GPLv2 license. When using or
4 // redistributing this file, you may do so under either license.
5 //
6 // Copyright(c) 2018 Intel Corporation. All rights reserved.
7 //
8 // Authors: Liam Girdwood <liam.r.girdwood@linux.intel.com>
9 // Ranjani Sridharan <ranjani.sridharan@linux.intel.com>
10 // Rander Wang <rander.wang@intel.com>
11 // Keyon Jie <yang.jie@linux.intel.com>
12 //
13
14 /*
15 * Hardware interface for generic Intel audio DSP HDA IP
16 */
17
18 #include <linux/pm_runtime.h>
19 #include <sound/hdaudio_ext.h>
20 #include <sound/hda_register.h>
21 #include <sound/sof.h>
22 #include "../ops.h"
23 #include "../sof-audio.h"
24 #include "hda.h"
25
26 #define HDA_LTRP_GB_VALUE_US 95
27
hda_hstream_direction_str(struct hdac_stream * hstream)28 static inline const char *hda_hstream_direction_str(struct hdac_stream *hstream)
29 {
30 if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK)
31 return "Playback";
32 else
33 return "Capture";
34 }
35
hda_hstream_dbg_get_stream_info_str(struct hdac_stream * hstream)36 static char *hda_hstream_dbg_get_stream_info_str(struct hdac_stream *hstream)
37 {
38 struct snd_soc_pcm_runtime *rtd;
39
40 if (hstream->substream)
41 rtd = asoc_substream_to_rtd(hstream->substream);
42 else if (hstream->cstream)
43 rtd = hstream->cstream->private_data;
44 else
45 /* Non audio DMA user, like dma-trace */
46 return kasprintf(GFP_KERNEL, "-- (%s, stream_tag: %u)",
47 hda_hstream_direction_str(hstream),
48 hstream->stream_tag);
49
50 return kasprintf(GFP_KERNEL, "dai_link \"%s\" (%s, stream_tag: %u)",
51 rtd->dai_link->name, hda_hstream_direction_str(hstream),
52 hstream->stream_tag);
53 }
54
55 /*
56 * set up one of BDL entries for a stream
57 */
hda_setup_bdle(struct snd_sof_dev * sdev,struct snd_dma_buffer * dmab,struct hdac_stream * hstream,struct sof_intel_dsp_bdl ** bdlp,int offset,int size,int ioc)58 static int hda_setup_bdle(struct snd_sof_dev *sdev,
59 struct snd_dma_buffer *dmab,
60 struct hdac_stream *hstream,
61 struct sof_intel_dsp_bdl **bdlp,
62 int offset, int size, int ioc)
63 {
64 struct hdac_bus *bus = sof_to_bus(sdev);
65 struct sof_intel_dsp_bdl *bdl = *bdlp;
66
67 while (size > 0) {
68 dma_addr_t addr;
69 int chunk;
70
71 if (hstream->frags >= HDA_DSP_MAX_BDL_ENTRIES) {
72 dev_err(sdev->dev, "error: stream frags exceeded\n");
73 return -EINVAL;
74 }
75
76 addr = snd_sgbuf_get_addr(dmab, offset);
77 /* program BDL addr */
78 bdl->addr_l = cpu_to_le32(lower_32_bits(addr));
79 bdl->addr_h = cpu_to_le32(upper_32_bits(addr));
80 /* program BDL size */
81 chunk = snd_sgbuf_get_chunk_size(dmab, offset, size);
82 /* one BDLE should not cross 4K boundary */
83 if (bus->align_bdle_4k) {
84 u32 remain = 0x1000 - (offset & 0xfff);
85
86 if (chunk > remain)
87 chunk = remain;
88 }
89 bdl->size = cpu_to_le32(chunk);
90 /* only program IOC when the whole segment is processed */
91 size -= chunk;
92 bdl->ioc = (size || !ioc) ? 0 : cpu_to_le32(0x01);
93 bdl++;
94 hstream->frags++;
95 offset += chunk;
96
97 dev_vdbg(sdev->dev, "bdl, frags:%d, chunk size:0x%x;\n",
98 hstream->frags, chunk);
99 }
100
101 *bdlp = bdl;
102 return offset;
103 }
104
105 /*
106 * set up Buffer Descriptor List (BDL) for host memory transfer
107 * BDL describes the location of the individual buffers and is little endian.
108 */
hda_dsp_stream_setup_bdl(struct snd_sof_dev * sdev,struct snd_dma_buffer * dmab,struct hdac_stream * hstream)109 int hda_dsp_stream_setup_bdl(struct snd_sof_dev *sdev,
110 struct snd_dma_buffer *dmab,
111 struct hdac_stream *hstream)
112 {
113 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
114 struct sof_intel_dsp_bdl *bdl;
115 int i, offset, period_bytes, periods;
116 int remain, ioc;
117
118 period_bytes = hstream->period_bytes;
119 dev_dbg(sdev->dev, "%s: period_bytes:0x%x\n", __func__, period_bytes);
120 if (!period_bytes)
121 period_bytes = hstream->bufsize;
122
123 periods = hstream->bufsize / period_bytes;
124
125 dev_dbg(sdev->dev, "%s: periods:%d\n", __func__, periods);
126
127 remain = hstream->bufsize % period_bytes;
128 if (remain)
129 periods++;
130
131 /* program the initial BDL entries */
132 bdl = (struct sof_intel_dsp_bdl *)hstream->bdl.area;
133 offset = 0;
134 hstream->frags = 0;
135
136 /*
137 * set IOC if don't use position IPC
138 * and period_wakeup needed.
139 */
140 ioc = hda->no_ipc_position ?
141 !hstream->no_period_wakeup : 0;
142
143 for (i = 0; i < periods; i++) {
144 if (i == (periods - 1) && remain)
145 /* set the last small entry */
146 offset = hda_setup_bdle(sdev, dmab,
147 hstream, &bdl, offset,
148 remain, 0);
149 else
150 offset = hda_setup_bdle(sdev, dmab,
151 hstream, &bdl, offset,
152 period_bytes, ioc);
153 }
154
155 return offset;
156 }
157
hda_dsp_stream_spib_config(struct snd_sof_dev * sdev,struct hdac_ext_stream * hext_stream,int enable,u32 size)158 int hda_dsp_stream_spib_config(struct snd_sof_dev *sdev,
159 struct hdac_ext_stream *hext_stream,
160 int enable, u32 size)
161 {
162 struct hdac_stream *hstream = &hext_stream->hstream;
163 u32 mask;
164
165 if (!sdev->bar[HDA_DSP_SPIB_BAR]) {
166 dev_err(sdev->dev, "error: address of spib capability is NULL\n");
167 return -EINVAL;
168 }
169
170 mask = (1 << hstream->index);
171
172 /* enable/disable SPIB for the stream */
173 snd_sof_dsp_update_bits(sdev, HDA_DSP_SPIB_BAR,
174 SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL, mask,
175 enable << hstream->index);
176
177 /* set the SPIB value */
178 sof_io_write(sdev, hext_stream->spib_addr, size);
179
180 return 0;
181 }
182
183 /* get next unused stream */
184 struct hdac_ext_stream *
hda_dsp_stream_get(struct snd_sof_dev * sdev,int direction,u32 flags)185 hda_dsp_stream_get(struct snd_sof_dev *sdev, int direction, u32 flags)
186 {
187 struct hdac_bus *bus = sof_to_bus(sdev);
188 struct sof_intel_hda_stream *hda_stream;
189 struct hdac_ext_stream *hext_stream = NULL;
190 struct hdac_stream *s;
191
192 spin_lock_irq(&bus->reg_lock);
193
194 /* get an unused stream */
195 list_for_each_entry(s, &bus->stream_list, list) {
196 if (s->direction == direction && !s->opened) {
197 hext_stream = stream_to_hdac_ext_stream(s);
198 hda_stream = container_of(hext_stream,
199 struct sof_intel_hda_stream,
200 hext_stream);
201 /* check if the host DMA channel is reserved */
202 if (hda_stream->host_reserved)
203 continue;
204
205 s->opened = true;
206 break;
207 }
208 }
209
210 spin_unlock_irq(&bus->reg_lock);
211
212 /* stream found ? */
213 if (!hext_stream) {
214 dev_err(sdev->dev, "error: no free %s streams\n",
215 direction == SNDRV_PCM_STREAM_PLAYBACK ?
216 "playback" : "capture");
217 return hext_stream;
218 }
219
220 hda_stream->flags = flags;
221
222 /*
223 * Prevent DMI Link L1 entry for streams that don't support it.
224 * Workaround to address a known issue with host DMA that results
225 * in xruns during pause/release in capture scenarios.
226 */
227 if (!(flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE))
228 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
229 HDA_VS_INTEL_EM2,
230 HDA_VS_INTEL_EM2_L1SEN, 0);
231
232 return hext_stream;
233 }
234
235 /* free a stream */
hda_dsp_stream_put(struct snd_sof_dev * sdev,int direction,int stream_tag)236 int hda_dsp_stream_put(struct snd_sof_dev *sdev, int direction, int stream_tag)
237 {
238 struct hdac_bus *bus = sof_to_bus(sdev);
239 struct sof_intel_hda_stream *hda_stream;
240 struct hdac_ext_stream *hext_stream;
241 struct hdac_stream *s;
242 bool dmi_l1_enable = true;
243 bool found = false;
244
245 spin_lock_irq(&bus->reg_lock);
246
247 /*
248 * close stream matching the stream tag and check if there are any open streams
249 * that are DMI L1 incompatible.
250 */
251 list_for_each_entry(s, &bus->stream_list, list) {
252 hext_stream = stream_to_hdac_ext_stream(s);
253 hda_stream = container_of(hext_stream, struct sof_intel_hda_stream, hext_stream);
254
255 if (!s->opened)
256 continue;
257
258 if (s->direction == direction && s->stream_tag == stream_tag) {
259 s->opened = false;
260 found = true;
261 } else if (!(hda_stream->flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) {
262 dmi_l1_enable = false;
263 }
264 }
265
266 spin_unlock_irq(&bus->reg_lock);
267
268 /* Enable DMI L1 if permitted */
269 if (dmi_l1_enable)
270 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, HDA_VS_INTEL_EM2,
271 HDA_VS_INTEL_EM2_L1SEN, HDA_VS_INTEL_EM2_L1SEN);
272
273 if (!found) {
274 dev_dbg(sdev->dev, "%s: stream_tag %d not opened!\n",
275 __func__, stream_tag);
276 return -ENODEV;
277 }
278
279 return 0;
280 }
281
hda_dsp_stream_reset(struct snd_sof_dev * sdev,struct hdac_stream * hstream)282 static int hda_dsp_stream_reset(struct snd_sof_dev *sdev, struct hdac_stream *hstream)
283 {
284 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
285 int timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
286 u32 val;
287
288 /* enter stream reset */
289 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, SOF_STREAM_SD_OFFSET_CRST,
290 SOF_STREAM_SD_OFFSET_CRST);
291 do {
292 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, sd_offset);
293 if (val & SOF_STREAM_SD_OFFSET_CRST)
294 break;
295 } while (--timeout);
296 if (timeout == 0) {
297 dev_err(sdev->dev, "timeout waiting for stream reset\n");
298 return -ETIMEDOUT;
299 }
300
301 timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
302
303 /* exit stream reset and wait to read a zero before reading any other register */
304 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, SOF_STREAM_SD_OFFSET_CRST, 0x0);
305
306 /* wait for hardware to report that stream is out of reset */
307 udelay(3);
308 do {
309 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, sd_offset);
310 if ((val & SOF_STREAM_SD_OFFSET_CRST) == 0)
311 break;
312 } while (--timeout);
313 if (timeout == 0) {
314 dev_err(sdev->dev, "timeout waiting for stream to exit reset\n");
315 return -ETIMEDOUT;
316 }
317
318 return 0;
319 }
320
hda_dsp_stream_trigger(struct snd_sof_dev * sdev,struct hdac_ext_stream * hext_stream,int cmd)321 int hda_dsp_stream_trigger(struct snd_sof_dev *sdev,
322 struct hdac_ext_stream *hext_stream, int cmd)
323 {
324 struct hdac_stream *hstream = &hext_stream->hstream;
325 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
326 u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
327 int ret = 0;
328 u32 run;
329
330 /* cmd must be for audio stream */
331 switch (cmd) {
332 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
333 case SNDRV_PCM_TRIGGER_START:
334 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL,
335 1 << hstream->index,
336 1 << hstream->index);
337
338 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
339 sd_offset,
340 SOF_HDA_SD_CTL_DMA_START |
341 SOF_HDA_CL_DMA_SD_INT_MASK,
342 SOF_HDA_SD_CTL_DMA_START |
343 SOF_HDA_CL_DMA_SD_INT_MASK);
344
345 ret = snd_sof_dsp_read_poll_timeout(sdev,
346 HDA_DSP_HDA_BAR,
347 sd_offset, run,
348 ((run & dma_start) == dma_start),
349 HDA_DSP_REG_POLL_INTERVAL_US,
350 HDA_DSP_STREAM_RUN_TIMEOUT);
351
352 if (ret >= 0)
353 hstream->running = true;
354
355 break;
356 case SNDRV_PCM_TRIGGER_SUSPEND:
357 case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
358 case SNDRV_PCM_TRIGGER_STOP:
359 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
360 sd_offset,
361 SOF_HDA_SD_CTL_DMA_START |
362 SOF_HDA_CL_DMA_SD_INT_MASK, 0x0);
363
364 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
365 sd_offset, run,
366 !(run & dma_start),
367 HDA_DSP_REG_POLL_INTERVAL_US,
368 HDA_DSP_STREAM_RUN_TIMEOUT);
369
370 if (ret >= 0) {
371 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
372 sd_offset + SOF_HDA_ADSP_REG_CL_SD_STS,
373 SOF_HDA_CL_DMA_SD_INT_MASK);
374
375 hstream->running = false;
376 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
377 SOF_HDA_INTCTL,
378 1 << hstream->index, 0x0);
379 }
380 break;
381 default:
382 dev_err(sdev->dev, "error: unknown command: %d\n", cmd);
383 return -EINVAL;
384 }
385
386 if (ret < 0) {
387 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
388
389 dev_err(sdev->dev,
390 "%s: cmd %d on %s: timeout on STREAM_SD_OFFSET read\n",
391 __func__, cmd, stream_name ? stream_name : "unknown stream");
392 kfree(stream_name);
393 }
394
395 return ret;
396 }
397
398 /* minimal recommended programming for ICCMAX stream */
hda_dsp_iccmax_stream_hw_params(struct snd_sof_dev * sdev,struct hdac_ext_stream * hext_stream,struct snd_dma_buffer * dmab,struct snd_pcm_hw_params * params)399 int hda_dsp_iccmax_stream_hw_params(struct snd_sof_dev *sdev, struct hdac_ext_stream *hext_stream,
400 struct snd_dma_buffer *dmab,
401 struct snd_pcm_hw_params *params)
402 {
403 struct hdac_bus *bus = sof_to_bus(sdev);
404 struct hdac_stream *hstream = &hext_stream->hstream;
405 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
406 int ret;
407 u32 mask = 0x1 << hstream->index;
408
409 if (!hext_stream) {
410 dev_err(sdev->dev, "error: no stream available\n");
411 return -ENODEV;
412 }
413
414 if (hstream->posbuf)
415 *hstream->posbuf = 0;
416
417 /* reset BDL address */
418 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
419 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
420 0x0);
421 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
422 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
423 0x0);
424
425 hstream->frags = 0;
426
427 ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
428 if (ret < 0) {
429 dev_err(sdev->dev, "error: set up of BDL failed\n");
430 return ret;
431 }
432
433 /* program BDL address */
434 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
435 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
436 (u32)hstream->bdl.addr);
437 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
438 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
439 upper_32_bits(hstream->bdl.addr));
440
441 /* program cyclic buffer length */
442 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
443 sd_offset + SOF_HDA_ADSP_REG_CL_SD_CBL,
444 hstream->bufsize);
445
446 /* program last valid index */
447 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
448 sd_offset + SOF_HDA_ADSP_REG_CL_SD_LVI,
449 0xffff, (hstream->frags - 1));
450
451 /* decouple host and link DMA, enable DSP features */
452 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
453 mask, mask);
454
455 /* Follow HW recommendation to set the guardband value to 95us during FW boot */
456 snd_hdac_chip_updateb(bus, VS_LTRP, HDA_VS_INTEL_LTRP_GB_MASK, HDA_LTRP_GB_VALUE_US);
457
458 /* start DMA */
459 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
460 SOF_HDA_SD_CTL_DMA_START, SOF_HDA_SD_CTL_DMA_START);
461
462 return 0;
463 }
464
465 /*
466 * prepare for common hdac registers settings, for both code loader
467 * and normal stream.
468 */
hda_dsp_stream_hw_params(struct snd_sof_dev * sdev,struct hdac_ext_stream * hext_stream,struct snd_dma_buffer * dmab,struct snd_pcm_hw_params * params)469 int hda_dsp_stream_hw_params(struct snd_sof_dev *sdev,
470 struct hdac_ext_stream *hext_stream,
471 struct snd_dma_buffer *dmab,
472 struct snd_pcm_hw_params *params)
473 {
474 const struct sof_intel_dsp_desc *chip = get_chip_info(sdev->pdata);
475 struct hdac_bus *bus = sof_to_bus(sdev);
476 struct hdac_stream *hstream = &hext_stream->hstream;
477 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
478 int ret;
479 u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
480 u32 mask;
481 u32 run;
482
483 if (!hext_stream) {
484 dev_err(sdev->dev, "error: no stream available\n");
485 return -ENODEV;
486 }
487
488 /* decouple host and link DMA */
489 mask = 0x1 << hstream->index;
490 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
491 mask, mask);
492
493 if (!dmab) {
494 dev_err(sdev->dev, "error: no dma buffer allocated!\n");
495 return -ENODEV;
496 }
497
498 /* clear stream status */
499 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
500 SOF_HDA_CL_DMA_SD_INT_MASK |
501 SOF_HDA_SD_CTL_DMA_START, 0);
502
503 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
504 sd_offset, run,
505 !(run & dma_start),
506 HDA_DSP_REG_POLL_INTERVAL_US,
507 HDA_DSP_STREAM_RUN_TIMEOUT);
508
509 if (ret < 0) {
510 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
511
512 dev_err(sdev->dev,
513 "%s: on %s: timeout on STREAM_SD_OFFSET read1\n",
514 __func__, stream_name ? stream_name : "unknown stream");
515 kfree(stream_name);
516 return ret;
517 }
518
519 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
520 sd_offset + SOF_HDA_ADSP_REG_CL_SD_STS,
521 SOF_HDA_CL_DMA_SD_INT_MASK,
522 SOF_HDA_CL_DMA_SD_INT_MASK);
523
524 /* stream reset */
525 ret = hda_dsp_stream_reset(sdev, hstream);
526 if (ret < 0)
527 return ret;
528
529 if (hstream->posbuf)
530 *hstream->posbuf = 0;
531
532 /* reset BDL address */
533 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
534 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
535 0x0);
536 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
537 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
538 0x0);
539
540 /* clear stream status */
541 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
542 SOF_HDA_CL_DMA_SD_INT_MASK |
543 SOF_HDA_SD_CTL_DMA_START, 0);
544
545 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
546 sd_offset, run,
547 !(run & dma_start),
548 HDA_DSP_REG_POLL_INTERVAL_US,
549 HDA_DSP_STREAM_RUN_TIMEOUT);
550
551 if (ret < 0) {
552 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
553
554 dev_err(sdev->dev,
555 "%s: on %s: timeout on STREAM_SD_OFFSET read1\n",
556 __func__, stream_name ? stream_name : "unknown stream");
557 kfree(stream_name);
558 return ret;
559 }
560
561 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
562 sd_offset + SOF_HDA_ADSP_REG_CL_SD_STS,
563 SOF_HDA_CL_DMA_SD_INT_MASK,
564 SOF_HDA_CL_DMA_SD_INT_MASK);
565
566 hstream->frags = 0;
567
568 ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
569 if (ret < 0) {
570 dev_err(sdev->dev, "error: set up of BDL failed\n");
571 return ret;
572 }
573
574 /* program stream tag to set up stream descriptor for DMA */
575 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
576 SOF_HDA_CL_SD_CTL_STREAM_TAG_MASK,
577 hstream->stream_tag <<
578 SOF_HDA_CL_SD_CTL_STREAM_TAG_SHIFT);
579
580 /* program cyclic buffer length */
581 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
582 sd_offset + SOF_HDA_ADSP_REG_CL_SD_CBL,
583 hstream->bufsize);
584
585 /*
586 * Recommended hardware programming sequence for HDAudio DMA format
587 * on earlier platforms - this is not needed on newer platforms
588 *
589 * 1. Put DMA into coupled mode by clearing PPCTL.PROCEN bit
590 * for corresponding stream index before the time of writing
591 * format to SDxFMT register.
592 * 2. Write SDxFMT
593 * 3. Set PPCTL.PROCEN bit for corresponding stream index to
594 * enable decoupled mode
595 */
596
597 if (chip->quirks & SOF_INTEL_PROCEN_FMT_QUIRK) {
598 /* couple host and link DMA, disable DSP features */
599 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
600 mask, 0);
601 }
602
603 /* program stream format */
604 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
605 sd_offset +
606 SOF_HDA_ADSP_REG_CL_SD_FORMAT,
607 0xffff, hstream->format_val);
608
609 if (chip->quirks & SOF_INTEL_PROCEN_FMT_QUIRK) {
610 /* decouple host and link DMA, enable DSP features */
611 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
612 mask, mask);
613 }
614
615 /* program last valid index */
616 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
617 sd_offset + SOF_HDA_ADSP_REG_CL_SD_LVI,
618 0xffff, (hstream->frags - 1));
619
620 /* program BDL address */
621 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
622 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
623 (u32)hstream->bdl.addr);
624 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
625 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
626 upper_32_bits(hstream->bdl.addr));
627
628 /* enable position buffer, if needed */
629 if (bus->use_posbuf && bus->posbuf.addr &&
630 !(snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE)
631 & SOF_HDA_ADSP_DPLBASE_ENABLE)) {
632 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPUBASE,
633 upper_32_bits(bus->posbuf.addr));
634 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE,
635 (u32)bus->posbuf.addr |
636 SOF_HDA_ADSP_DPLBASE_ENABLE);
637 }
638
639 /* set interrupt enable bits */
640 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
641 SOF_HDA_CL_DMA_SD_INT_MASK,
642 SOF_HDA_CL_DMA_SD_INT_MASK);
643
644 /* read FIFO size */
645 if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK) {
646 hstream->fifo_size =
647 snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
648 sd_offset +
649 SOF_HDA_ADSP_REG_CL_SD_FIFOSIZE);
650 hstream->fifo_size &= 0xffff;
651 hstream->fifo_size += 1;
652 } else {
653 hstream->fifo_size = 0;
654 }
655
656 return ret;
657 }
658
hda_dsp_stream_hw_free(struct snd_sof_dev * sdev,struct snd_pcm_substream * substream)659 int hda_dsp_stream_hw_free(struct snd_sof_dev *sdev,
660 struct snd_pcm_substream *substream)
661 {
662 struct hdac_stream *hstream = substream->runtime->private_data;
663 struct hdac_ext_stream *hext_stream = container_of(hstream,
664 struct hdac_ext_stream,
665 hstream);
666 struct hdac_bus *bus = sof_to_bus(sdev);
667 u32 mask = 0x1 << hstream->index;
668 int ret;
669
670 ret = hda_dsp_stream_reset(sdev, hstream);
671 if (ret < 0)
672 return ret;
673
674 spin_lock_irq(&bus->reg_lock);
675 /* couple host and link DMA if link DMA channel is idle */
676 if (!hext_stream->link_locked)
677 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR,
678 SOF_HDA_REG_PP_PPCTL, mask, 0);
679 spin_unlock_irq(&bus->reg_lock);
680
681 hda_dsp_stream_spib_config(sdev, hext_stream, HDA_DSP_SPIB_DISABLE, 0);
682
683 hstream->substream = NULL;
684
685 return 0;
686 }
687
hda_dsp_check_stream_irq(struct snd_sof_dev * sdev)688 bool hda_dsp_check_stream_irq(struct snd_sof_dev *sdev)
689 {
690 struct hdac_bus *bus = sof_to_bus(sdev);
691 bool ret = false;
692 u32 status;
693
694 /* The function can be called at irq thread, so use spin_lock_irq */
695 spin_lock_irq(&bus->reg_lock);
696
697 status = snd_hdac_chip_readl(bus, INTSTS);
698 dev_vdbg(bus->dev, "stream irq, INTSTS status: 0x%x\n", status);
699
700 /* if Register inaccessible, ignore it.*/
701 if (status != 0xffffffff)
702 ret = true;
703
704 spin_unlock_irq(&bus->reg_lock);
705
706 return ret;
707 }
708
709 static void
hda_dsp_compr_bytes_transferred(struct hdac_stream * hstream,int direction)710 hda_dsp_compr_bytes_transferred(struct hdac_stream *hstream, int direction)
711 {
712 u64 buffer_size = hstream->bufsize;
713 u64 prev_pos, pos, num_bytes;
714
715 div64_u64_rem(hstream->curr_pos, buffer_size, &prev_pos);
716 pos = hda_dsp_stream_get_position(hstream, direction, false);
717
718 if (pos < prev_pos)
719 num_bytes = (buffer_size - prev_pos) + pos;
720 else
721 num_bytes = pos - prev_pos;
722
723 hstream->curr_pos += num_bytes;
724 }
725
hda_dsp_stream_check(struct hdac_bus * bus,u32 status)726 static bool hda_dsp_stream_check(struct hdac_bus *bus, u32 status)
727 {
728 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
729 struct hdac_stream *s;
730 bool active = false;
731 u32 sd_status;
732
733 list_for_each_entry(s, &bus->stream_list, list) {
734 if (status & BIT(s->index) && s->opened) {
735 sd_status = snd_hdac_stream_readb(s, SD_STS);
736
737 dev_vdbg(bus->dev, "stream %d status 0x%x\n",
738 s->index, sd_status);
739
740 snd_hdac_stream_writeb(s, SD_STS, sd_status);
741
742 active = true;
743 if ((!s->substream && !s->cstream) ||
744 !s->running ||
745 (sd_status & SOF_HDA_CL_DMA_SD_INT_COMPLETE) == 0)
746 continue;
747
748 /* Inform ALSA only in case not do that with IPC */
749 if (s->substream && sof_hda->no_ipc_position) {
750 snd_sof_pcm_period_elapsed(s->substream);
751 } else if (s->cstream) {
752 hda_dsp_compr_bytes_transferred(s, s->cstream->direction);
753 snd_compr_fragment_elapsed(s->cstream);
754 }
755 }
756 }
757
758 return active;
759 }
760
hda_dsp_stream_threaded_handler(int irq,void * context)761 irqreturn_t hda_dsp_stream_threaded_handler(int irq, void *context)
762 {
763 struct snd_sof_dev *sdev = context;
764 struct hdac_bus *bus = sof_to_bus(sdev);
765 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
766 u32 rirb_status;
767 #endif
768 bool active;
769 u32 status;
770 int i;
771
772 /*
773 * Loop 10 times to handle missed interrupts caused by
774 * unsolicited responses from the codec
775 */
776 for (i = 0, active = true; i < 10 && active; i++) {
777 spin_lock_irq(&bus->reg_lock);
778
779 status = snd_hdac_chip_readl(bus, INTSTS);
780
781 /* check streams */
782 active = hda_dsp_stream_check(bus, status);
783
784 /* check and clear RIRB interrupt */
785 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
786 if (status & AZX_INT_CTRL_EN) {
787 rirb_status = snd_hdac_chip_readb(bus, RIRBSTS);
788 if (rirb_status & RIRB_INT_MASK) {
789 /*
790 * Clearing the interrupt status here ensures
791 * that no interrupt gets masked after the RIRB
792 * wp is read in snd_hdac_bus_update_rirb.
793 */
794 snd_hdac_chip_writeb(bus, RIRBSTS,
795 RIRB_INT_MASK);
796 active = true;
797 if (rirb_status & RIRB_INT_RESPONSE)
798 snd_hdac_bus_update_rirb(bus);
799 }
800 }
801 #endif
802 spin_unlock_irq(&bus->reg_lock);
803 }
804
805 return IRQ_HANDLED;
806 }
807
hda_dsp_stream_init(struct snd_sof_dev * sdev)808 int hda_dsp_stream_init(struct snd_sof_dev *sdev)
809 {
810 struct hdac_bus *bus = sof_to_bus(sdev);
811 struct hdac_ext_stream *hext_stream;
812 struct hdac_stream *hstream;
813 struct pci_dev *pci = to_pci_dev(sdev->dev);
814 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
815 int sd_offset;
816 int i, num_playback, num_capture, num_total, ret;
817 u32 gcap;
818
819 gcap = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_GCAP);
820 dev_dbg(sdev->dev, "hda global caps = 0x%x\n", gcap);
821
822 /* get stream count from GCAP */
823 num_capture = (gcap >> 8) & 0x0f;
824 num_playback = (gcap >> 12) & 0x0f;
825 num_total = num_playback + num_capture;
826
827 dev_dbg(sdev->dev, "detected %d playback and %d capture streams\n",
828 num_playback, num_capture);
829
830 if (num_playback >= SOF_HDA_PLAYBACK_STREAMS) {
831 dev_err(sdev->dev, "error: too many playback streams %d\n",
832 num_playback);
833 return -EINVAL;
834 }
835
836 if (num_capture >= SOF_HDA_CAPTURE_STREAMS) {
837 dev_err(sdev->dev, "error: too many capture streams %d\n",
838 num_playback);
839 return -EINVAL;
840 }
841
842 /*
843 * mem alloc for the position buffer
844 * TODO: check position buffer update
845 */
846 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
847 SOF_HDA_DPIB_ENTRY_SIZE * num_total,
848 &bus->posbuf);
849 if (ret < 0) {
850 dev_err(sdev->dev, "error: posbuffer dma alloc failed\n");
851 return -ENOMEM;
852 }
853
854 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
855 /* mem alloc for the CORB/RIRB ringbuffers */
856 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
857 PAGE_SIZE, &bus->rb);
858 if (ret < 0) {
859 dev_err(sdev->dev, "error: RB alloc failed\n");
860 return -ENOMEM;
861 }
862 #endif
863
864 /* create capture streams */
865 for (i = 0; i < num_capture; i++) {
866 struct sof_intel_hda_stream *hda_stream;
867
868 hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream),
869 GFP_KERNEL);
870 if (!hda_stream)
871 return -ENOMEM;
872
873 hda_stream->sdev = sdev;
874
875 hext_stream = &hda_stream->hext_stream;
876
877 hext_stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
878 SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i;
879
880 hext_stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
881 SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total +
882 SOF_HDA_PPLC_INTERVAL * i;
883
884 /* do we support SPIB */
885 if (sdev->bar[HDA_DSP_SPIB_BAR]) {
886 hext_stream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
887 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
888 SOF_HDA_SPIB_SPIB;
889
890 hext_stream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
891 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
892 SOF_HDA_SPIB_MAXFIFO;
893 }
894
895 hstream = &hext_stream->hstream;
896 hstream->bus = bus;
897 hstream->sd_int_sta_mask = 1 << i;
898 hstream->index = i;
899 sd_offset = SOF_STREAM_SD_OFFSET(hstream);
900 hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset;
901 hstream->stream_tag = i + 1;
902 hstream->opened = false;
903 hstream->running = false;
904 hstream->direction = SNDRV_PCM_STREAM_CAPTURE;
905
906 /* memory alloc for stream BDL */
907 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
908 HDA_DSP_BDL_SIZE, &hstream->bdl);
909 if (ret < 0) {
910 dev_err(sdev->dev, "error: stream bdl dma alloc failed\n");
911 return -ENOMEM;
912 }
913 hstream->posbuf = (__le32 *)(bus->posbuf.area +
914 (hstream->index) * 8);
915
916 list_add_tail(&hstream->list, &bus->stream_list);
917 }
918
919 /* create playback streams */
920 for (i = num_capture; i < num_total; i++) {
921 struct sof_intel_hda_stream *hda_stream;
922
923 hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream),
924 GFP_KERNEL);
925 if (!hda_stream)
926 return -ENOMEM;
927
928 hda_stream->sdev = sdev;
929
930 hext_stream = &hda_stream->hext_stream;
931
932 /* we always have DSP support */
933 hext_stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
934 SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i;
935
936 hext_stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
937 SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total +
938 SOF_HDA_PPLC_INTERVAL * i;
939
940 /* do we support SPIB */
941 if (sdev->bar[HDA_DSP_SPIB_BAR]) {
942 hext_stream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
943 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
944 SOF_HDA_SPIB_SPIB;
945
946 hext_stream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
947 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
948 SOF_HDA_SPIB_MAXFIFO;
949 }
950
951 hstream = &hext_stream->hstream;
952 hstream->bus = bus;
953 hstream->sd_int_sta_mask = 1 << i;
954 hstream->index = i;
955 sd_offset = SOF_STREAM_SD_OFFSET(hstream);
956 hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset;
957 hstream->stream_tag = i - num_capture + 1;
958 hstream->opened = false;
959 hstream->running = false;
960 hstream->direction = SNDRV_PCM_STREAM_PLAYBACK;
961
962 /* mem alloc for stream BDL */
963 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
964 HDA_DSP_BDL_SIZE, &hstream->bdl);
965 if (ret < 0) {
966 dev_err(sdev->dev, "error: stream bdl dma alloc failed\n");
967 return -ENOMEM;
968 }
969
970 hstream->posbuf = (__le32 *)(bus->posbuf.area +
971 (hstream->index) * 8);
972
973 list_add_tail(&hstream->list, &bus->stream_list);
974 }
975
976 /* store total stream count (playback + capture) from GCAP */
977 sof_hda->stream_max = num_total;
978
979 return 0;
980 }
981
hda_dsp_stream_free(struct snd_sof_dev * sdev)982 void hda_dsp_stream_free(struct snd_sof_dev *sdev)
983 {
984 struct hdac_bus *bus = sof_to_bus(sdev);
985 struct hdac_stream *s, *_s;
986 struct hdac_ext_stream *hext_stream;
987 struct sof_intel_hda_stream *hda_stream;
988
989 /* free position buffer */
990 if (bus->posbuf.area)
991 snd_dma_free_pages(&bus->posbuf);
992
993 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
994 /* free position buffer */
995 if (bus->rb.area)
996 snd_dma_free_pages(&bus->rb);
997 #endif
998
999 list_for_each_entry_safe(s, _s, &bus->stream_list, list) {
1000 /* TODO: decouple */
1001
1002 /* free bdl buffer */
1003 if (s->bdl.area)
1004 snd_dma_free_pages(&s->bdl);
1005 list_del(&s->list);
1006 hext_stream = stream_to_hdac_ext_stream(s);
1007 hda_stream = container_of(hext_stream, struct sof_intel_hda_stream,
1008 hext_stream);
1009 devm_kfree(sdev->dev, hda_stream);
1010 }
1011 }
1012
hda_dsp_stream_get_position(struct hdac_stream * hstream,int direction,bool can_sleep)1013 snd_pcm_uframes_t hda_dsp_stream_get_position(struct hdac_stream *hstream,
1014 int direction, bool can_sleep)
1015 {
1016 struct hdac_ext_stream *hext_stream = stream_to_hdac_ext_stream(hstream);
1017 struct sof_intel_hda_stream *hda_stream = hstream_to_sof_hda_stream(hext_stream);
1018 struct snd_sof_dev *sdev = hda_stream->sdev;
1019 snd_pcm_uframes_t pos;
1020
1021 switch (sof_hda_position_quirk) {
1022 case SOF_HDA_POSITION_QUIRK_USE_SKYLAKE_LEGACY:
1023 /*
1024 * This legacy code, inherited from the Skylake driver,
1025 * mixes DPIB registers and DPIB DDR updates and
1026 * does not seem to follow any known hardware recommendations.
1027 * It's not clear e.g. why there is a different flow
1028 * for capture and playback, the only information that matters is
1029 * what traffic class is used, and on all SOF-enabled platforms
1030 * only VC0 is supported so the work-around was likely not necessary
1031 * and quite possibly wrong.
1032 */
1033
1034 /* DPIB/posbuf position mode:
1035 * For Playback, Use DPIB register from HDA space which
1036 * reflects the actual data transferred.
1037 * For Capture, Use the position buffer for pointer, as DPIB
1038 * is not accurate enough, its update may be completed
1039 * earlier than the data written to DDR.
1040 */
1041 if (direction == SNDRV_PCM_STREAM_PLAYBACK) {
1042 pos = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1043 AZX_REG_VS_SDXDPIB_XBASE +
1044 (AZX_REG_VS_SDXDPIB_XINTERVAL *
1045 hstream->index));
1046 } else {
1047 /*
1048 * For capture stream, we need more workaround to fix the
1049 * position incorrect issue:
1050 *
1051 * 1. Wait at least 20us before reading position buffer after
1052 * the interrupt generated(IOC), to make sure position update
1053 * happens on frame boundary i.e. 20.833uSec for 48KHz.
1054 * 2. Perform a dummy Read to DPIB register to flush DMA
1055 * position value.
1056 * 3. Read the DMA Position from posbuf. Now the readback
1057 * value should be >= period boundary.
1058 */
1059 if (can_sleep)
1060 usleep_range(20, 21);
1061
1062 snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1063 AZX_REG_VS_SDXDPIB_XBASE +
1064 (AZX_REG_VS_SDXDPIB_XINTERVAL *
1065 hstream->index));
1066 pos = snd_hdac_stream_get_pos_posbuf(hstream);
1067 }
1068 break;
1069 case SOF_HDA_POSITION_QUIRK_USE_DPIB_REGISTERS:
1070 /*
1071 * In case VC1 traffic is disabled this is the recommended option
1072 */
1073 pos = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1074 AZX_REG_VS_SDXDPIB_XBASE +
1075 (AZX_REG_VS_SDXDPIB_XINTERVAL *
1076 hstream->index));
1077 break;
1078 case SOF_HDA_POSITION_QUIRK_USE_DPIB_DDR_UPDATE:
1079 /*
1080 * This is the recommended option when VC1 is enabled.
1081 * While this isn't needed for SOF platforms it's added for
1082 * consistency and debug.
1083 */
1084 pos = snd_hdac_stream_get_pos_posbuf(hstream);
1085 break;
1086 default:
1087 dev_err_once(sdev->dev, "hda_position_quirk value %d not supported\n",
1088 sof_hda_position_quirk);
1089 pos = 0;
1090 break;
1091 }
1092
1093 if (pos >= hstream->bufsize)
1094 pos = 0;
1095
1096 return pos;
1097 }
1098