1 // SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause)
2 //
3 // This file is provided under a dual BSD/GPLv2 license. When using or
4 // redistributing this file, you may do so under either license.
5 //
6 // Copyright(c) 2018 Intel Corporation. All rights reserved.
7 //
8 // Authors: Liam Girdwood <liam.r.girdwood@linux.intel.com>
9 // Ranjani Sridharan <ranjani.sridharan@linux.intel.com>
10 // Rander Wang <rander.wang@intel.com>
11 // Keyon Jie <yang.jie@linux.intel.com>
12 //
13
14 /*
15 * Hardware interface for generic Intel audio DSP HDA IP
16 */
17
18 #include <linux/pm_runtime.h>
19 #include <sound/hdaudio_ext.h>
20 #include <sound/hda_register.h>
21 #include <sound/sof.h>
22 #include "../ops.h"
23 #include "../sof-audio.h"
24 #include "hda.h"
25
26 #define HDA_LTRP_GB_VALUE_US 95
27
hda_hstream_direction_str(struct hdac_stream * hstream)28 static inline const char *hda_hstream_direction_str(struct hdac_stream *hstream)
29 {
30 if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK)
31 return "Playback";
32 else
33 return "Capture";
34 }
35
hda_hstream_dbg_get_stream_info_str(struct hdac_stream * hstream)36 static char *hda_hstream_dbg_get_stream_info_str(struct hdac_stream *hstream)
37 {
38 struct snd_soc_pcm_runtime *rtd;
39
40 if (hstream->substream)
41 rtd = asoc_substream_to_rtd(hstream->substream);
42 else if (hstream->cstream)
43 rtd = hstream->cstream->private_data;
44 else
45 /* Non audio DMA user, like dma-trace */
46 return kasprintf(GFP_KERNEL, "-- (%s, stream_tag: %u)",
47 hda_hstream_direction_str(hstream),
48 hstream->stream_tag);
49
50 return kasprintf(GFP_KERNEL, "dai_link \"%s\" (%s, stream_tag: %u)",
51 rtd->dai_link->name, hda_hstream_direction_str(hstream),
52 hstream->stream_tag);
53 }
54
55 /*
56 * set up one of BDL entries for a stream
57 */
hda_setup_bdle(struct snd_sof_dev * sdev,struct snd_dma_buffer * dmab,struct hdac_stream * stream,struct sof_intel_dsp_bdl ** bdlp,int offset,int size,int ioc)58 static int hda_setup_bdle(struct snd_sof_dev *sdev,
59 struct snd_dma_buffer *dmab,
60 struct hdac_stream *stream,
61 struct sof_intel_dsp_bdl **bdlp,
62 int offset, int size, int ioc)
63 {
64 struct hdac_bus *bus = sof_to_bus(sdev);
65 struct sof_intel_dsp_bdl *bdl = *bdlp;
66
67 while (size > 0) {
68 dma_addr_t addr;
69 int chunk;
70
71 if (stream->frags >= HDA_DSP_MAX_BDL_ENTRIES) {
72 dev_err(sdev->dev, "error: stream frags exceeded\n");
73 return -EINVAL;
74 }
75
76 addr = snd_sgbuf_get_addr(dmab, offset);
77 /* program BDL addr */
78 bdl->addr_l = cpu_to_le32(lower_32_bits(addr));
79 bdl->addr_h = cpu_to_le32(upper_32_bits(addr));
80 /* program BDL size */
81 chunk = snd_sgbuf_get_chunk_size(dmab, offset, size);
82 /* one BDLE should not cross 4K boundary */
83 if (bus->align_bdle_4k) {
84 u32 remain = 0x1000 - (offset & 0xfff);
85
86 if (chunk > remain)
87 chunk = remain;
88 }
89 bdl->size = cpu_to_le32(chunk);
90 /* only program IOC when the whole segment is processed */
91 size -= chunk;
92 bdl->ioc = (size || !ioc) ? 0 : cpu_to_le32(0x01);
93 bdl++;
94 stream->frags++;
95 offset += chunk;
96
97 dev_vdbg(sdev->dev, "bdl, frags:%d, chunk size:0x%x;\n",
98 stream->frags, chunk);
99 }
100
101 *bdlp = bdl;
102 return offset;
103 }
104
105 /*
106 * set up Buffer Descriptor List (BDL) for host memory transfer
107 * BDL describes the location of the individual buffers and is little endian.
108 */
hda_dsp_stream_setup_bdl(struct snd_sof_dev * sdev,struct snd_dma_buffer * dmab,struct hdac_stream * stream)109 int hda_dsp_stream_setup_bdl(struct snd_sof_dev *sdev,
110 struct snd_dma_buffer *dmab,
111 struct hdac_stream *stream)
112 {
113 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
114 struct sof_intel_dsp_bdl *bdl;
115 int i, offset, period_bytes, periods;
116 int remain, ioc;
117
118 period_bytes = stream->period_bytes;
119 dev_dbg(sdev->dev, "%s: period_bytes:0x%x\n", __func__, period_bytes);
120 if (!period_bytes)
121 period_bytes = stream->bufsize;
122
123 periods = stream->bufsize / period_bytes;
124
125 dev_dbg(sdev->dev, "%s: periods:%d\n", __func__, periods);
126
127 remain = stream->bufsize % period_bytes;
128 if (remain)
129 periods++;
130
131 /* program the initial BDL entries */
132 bdl = (struct sof_intel_dsp_bdl *)stream->bdl.area;
133 offset = 0;
134 stream->frags = 0;
135
136 /*
137 * set IOC if don't use position IPC
138 * and period_wakeup needed.
139 */
140 ioc = hda->no_ipc_position ?
141 !stream->no_period_wakeup : 0;
142
143 for (i = 0; i < periods; i++) {
144 if (i == (periods - 1) && remain)
145 /* set the last small entry */
146 offset = hda_setup_bdle(sdev, dmab,
147 stream, &bdl, offset,
148 remain, 0);
149 else
150 offset = hda_setup_bdle(sdev, dmab,
151 stream, &bdl, offset,
152 period_bytes, ioc);
153 }
154
155 return offset;
156 }
157
hda_dsp_stream_spib_config(struct snd_sof_dev * sdev,struct hdac_ext_stream * stream,int enable,u32 size)158 int hda_dsp_stream_spib_config(struct snd_sof_dev *sdev,
159 struct hdac_ext_stream *stream,
160 int enable, u32 size)
161 {
162 struct hdac_stream *hstream = &stream->hstream;
163 u32 mask;
164
165 if (!sdev->bar[HDA_DSP_SPIB_BAR]) {
166 dev_err(sdev->dev, "error: address of spib capability is NULL\n");
167 return -EINVAL;
168 }
169
170 mask = (1 << hstream->index);
171
172 /* enable/disable SPIB for the stream */
173 snd_sof_dsp_update_bits(sdev, HDA_DSP_SPIB_BAR,
174 SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL, mask,
175 enable << hstream->index);
176
177 /* set the SPIB value */
178 sof_io_write(sdev, stream->spib_addr, size);
179
180 return 0;
181 }
182
183 /* get next unused stream */
184 struct hdac_ext_stream *
hda_dsp_stream_get(struct snd_sof_dev * sdev,int direction,u32 flags)185 hda_dsp_stream_get(struct snd_sof_dev *sdev, int direction, u32 flags)
186 {
187 struct hdac_bus *bus = sof_to_bus(sdev);
188 struct sof_intel_hda_stream *hda_stream;
189 struct hdac_ext_stream *stream = NULL;
190 struct hdac_stream *s;
191
192 spin_lock_irq(&bus->reg_lock);
193
194 /* get an unused stream */
195 list_for_each_entry(s, &bus->stream_list, list) {
196 if (s->direction == direction && !s->opened) {
197 stream = stream_to_hdac_ext_stream(s);
198 hda_stream = container_of(stream,
199 struct sof_intel_hda_stream,
200 hda_stream);
201 /* check if the host DMA channel is reserved */
202 if (hda_stream->host_reserved)
203 continue;
204
205 s->opened = true;
206 break;
207 }
208 }
209
210 spin_unlock_irq(&bus->reg_lock);
211
212 /* stream found ? */
213 if (!stream) {
214 dev_err(sdev->dev, "error: no free %s streams\n",
215 direction == SNDRV_PCM_STREAM_PLAYBACK ?
216 "playback" : "capture");
217 return stream;
218 }
219
220 hda_stream->flags = flags;
221
222 /*
223 * Prevent DMI Link L1 entry for streams that don't support it.
224 * Workaround to address a known issue with host DMA that results
225 * in xruns during pause/release in capture scenarios.
226 */
227 if (!(flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE))
228 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
229 HDA_VS_INTEL_EM2,
230 HDA_VS_INTEL_EM2_L1SEN, 0);
231
232 return stream;
233 }
234
235 /* free a stream */
hda_dsp_stream_put(struct snd_sof_dev * sdev,int direction,int stream_tag)236 int hda_dsp_stream_put(struct snd_sof_dev *sdev, int direction, int stream_tag)
237 {
238 struct hdac_bus *bus = sof_to_bus(sdev);
239 struct sof_intel_hda_stream *hda_stream;
240 struct hdac_ext_stream *stream;
241 struct hdac_stream *s;
242 bool dmi_l1_enable = true;
243 bool found = false;
244
245 spin_lock_irq(&bus->reg_lock);
246
247 /*
248 * close stream matching the stream tag and check if there are any open streams
249 * that are DMI L1 incompatible.
250 */
251 list_for_each_entry(s, &bus->stream_list, list) {
252 stream = stream_to_hdac_ext_stream(s);
253 hda_stream = container_of(stream, struct sof_intel_hda_stream, hda_stream);
254
255 if (!s->opened)
256 continue;
257
258 if (s->direction == direction && s->stream_tag == stream_tag) {
259 s->opened = false;
260 found = true;
261 } else if (!(hda_stream->flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) {
262 dmi_l1_enable = false;
263 }
264 }
265
266 spin_unlock_irq(&bus->reg_lock);
267
268 /* Enable DMI L1 if permitted */
269 if (dmi_l1_enable)
270 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, HDA_VS_INTEL_EM2,
271 HDA_VS_INTEL_EM2_L1SEN, HDA_VS_INTEL_EM2_L1SEN);
272
273 if (!found) {
274 dev_dbg(sdev->dev, "%s: stream_tag %d not opened!\n",
275 __func__, stream_tag);
276 return -ENODEV;
277 }
278
279 return 0;
280 }
281
hda_dsp_stream_trigger(struct snd_sof_dev * sdev,struct hdac_ext_stream * stream,int cmd)282 int hda_dsp_stream_trigger(struct snd_sof_dev *sdev,
283 struct hdac_ext_stream *stream, int cmd)
284 {
285 struct hdac_stream *hstream = &stream->hstream;
286 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
287 u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
288 int ret = 0;
289 u32 run;
290
291 /* cmd must be for audio stream */
292 switch (cmd) {
293 case SNDRV_PCM_TRIGGER_RESUME:
294 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
295 case SNDRV_PCM_TRIGGER_START:
296 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL,
297 1 << hstream->index,
298 1 << hstream->index);
299
300 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
301 sd_offset,
302 SOF_HDA_SD_CTL_DMA_START |
303 SOF_HDA_CL_DMA_SD_INT_MASK,
304 SOF_HDA_SD_CTL_DMA_START |
305 SOF_HDA_CL_DMA_SD_INT_MASK);
306
307 ret = snd_sof_dsp_read_poll_timeout(sdev,
308 HDA_DSP_HDA_BAR,
309 sd_offset, run,
310 ((run & dma_start) == dma_start),
311 HDA_DSP_REG_POLL_INTERVAL_US,
312 HDA_DSP_STREAM_RUN_TIMEOUT);
313
314 if (ret >= 0)
315 hstream->running = true;
316
317 break;
318 case SNDRV_PCM_TRIGGER_SUSPEND:
319 case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
320 case SNDRV_PCM_TRIGGER_STOP:
321 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
322 sd_offset,
323 SOF_HDA_SD_CTL_DMA_START |
324 SOF_HDA_CL_DMA_SD_INT_MASK, 0x0);
325
326 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
327 sd_offset, run,
328 !(run & dma_start),
329 HDA_DSP_REG_POLL_INTERVAL_US,
330 HDA_DSP_STREAM_RUN_TIMEOUT);
331
332 if (ret >= 0) {
333 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
334 sd_offset + SOF_HDA_ADSP_REG_CL_SD_STS,
335 SOF_HDA_CL_DMA_SD_INT_MASK);
336
337 hstream->running = false;
338 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
339 SOF_HDA_INTCTL,
340 1 << hstream->index, 0x0);
341 }
342 break;
343 default:
344 dev_err(sdev->dev, "error: unknown command: %d\n", cmd);
345 return -EINVAL;
346 }
347
348 if (ret < 0) {
349 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
350
351 dev_err(sdev->dev,
352 "%s: cmd %d on %s: timeout on STREAM_SD_OFFSET read\n",
353 __func__, cmd, stream_name ? stream_name : "unknown stream");
354 kfree(stream_name);
355 }
356
357 return ret;
358 }
359
360 /* minimal recommended programming for ICCMAX stream */
hda_dsp_iccmax_stream_hw_params(struct snd_sof_dev * sdev,struct hdac_ext_stream * stream,struct snd_dma_buffer * dmab,struct snd_pcm_hw_params * params)361 int hda_dsp_iccmax_stream_hw_params(struct snd_sof_dev *sdev, struct hdac_ext_stream *stream,
362 struct snd_dma_buffer *dmab,
363 struct snd_pcm_hw_params *params)
364 {
365 struct hdac_bus *bus = sof_to_bus(sdev);
366 struct hdac_stream *hstream = &stream->hstream;
367 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
368 int ret;
369 u32 mask = 0x1 << hstream->index;
370
371 if (!stream) {
372 dev_err(sdev->dev, "error: no stream available\n");
373 return -ENODEV;
374 }
375
376 if (hstream->posbuf)
377 *hstream->posbuf = 0;
378
379 /* reset BDL address */
380 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
381 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
382 0x0);
383 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
384 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
385 0x0);
386
387 hstream->frags = 0;
388
389 ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
390 if (ret < 0) {
391 dev_err(sdev->dev, "error: set up of BDL failed\n");
392 return ret;
393 }
394
395 /* program BDL address */
396 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
397 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
398 (u32)hstream->bdl.addr);
399 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
400 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
401 upper_32_bits(hstream->bdl.addr));
402
403 /* program cyclic buffer length */
404 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
405 sd_offset + SOF_HDA_ADSP_REG_CL_SD_CBL,
406 hstream->bufsize);
407
408 /* program last valid index */
409 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
410 sd_offset + SOF_HDA_ADSP_REG_CL_SD_LVI,
411 0xffff, (hstream->frags - 1));
412
413 /* decouple host and link DMA, enable DSP features */
414 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
415 mask, mask);
416
417 /* Follow HW recommendation to set the guardband value to 95us during FW boot */
418 snd_hdac_chip_updateb(bus, VS_LTRP, HDA_VS_INTEL_LTRP_GB_MASK, HDA_LTRP_GB_VALUE_US);
419
420 /* start DMA */
421 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
422 SOF_HDA_SD_CTL_DMA_START, SOF_HDA_SD_CTL_DMA_START);
423
424 return 0;
425 }
426
427 /*
428 * prepare for common hdac registers settings, for both code loader
429 * and normal stream.
430 */
hda_dsp_stream_hw_params(struct snd_sof_dev * sdev,struct hdac_ext_stream * stream,struct snd_dma_buffer * dmab,struct snd_pcm_hw_params * params)431 int hda_dsp_stream_hw_params(struct snd_sof_dev *sdev,
432 struct hdac_ext_stream *stream,
433 struct snd_dma_buffer *dmab,
434 struct snd_pcm_hw_params *params)
435 {
436 struct hdac_bus *bus = sof_to_bus(sdev);
437 struct hdac_stream *hstream = &stream->hstream;
438 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
439 int ret, timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
440 u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
441 u32 val, mask;
442 u32 run;
443
444 if (!stream) {
445 dev_err(sdev->dev, "error: no stream available\n");
446 return -ENODEV;
447 }
448
449 /* decouple host and link DMA */
450 mask = 0x1 << hstream->index;
451 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
452 mask, mask);
453
454 if (!dmab) {
455 dev_err(sdev->dev, "error: no dma buffer allocated!\n");
456 return -ENODEV;
457 }
458
459 /* clear stream status */
460 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
461 SOF_HDA_CL_DMA_SD_INT_MASK |
462 SOF_HDA_SD_CTL_DMA_START, 0);
463
464 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
465 sd_offset, run,
466 !(run & dma_start),
467 HDA_DSP_REG_POLL_INTERVAL_US,
468 HDA_DSP_STREAM_RUN_TIMEOUT);
469
470 if (ret < 0) {
471 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
472
473 dev_err(sdev->dev,
474 "%s: on %s: timeout on STREAM_SD_OFFSET read1\n",
475 __func__, stream_name ? stream_name : "unknown stream");
476 kfree(stream_name);
477 return ret;
478 }
479
480 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
481 sd_offset + SOF_HDA_ADSP_REG_CL_SD_STS,
482 SOF_HDA_CL_DMA_SD_INT_MASK,
483 SOF_HDA_CL_DMA_SD_INT_MASK);
484
485 /* stream reset */
486 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 0x1,
487 0x1);
488 udelay(3);
489 do {
490 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
491 sd_offset);
492 if (val & 0x1)
493 break;
494 } while (--timeout);
495 if (timeout == 0) {
496 dev_err(sdev->dev, "error: stream reset failed\n");
497 return -ETIMEDOUT;
498 }
499
500 timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
501 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 0x1,
502 0x0);
503
504 /* wait for hardware to report that stream is out of reset */
505 udelay(3);
506 do {
507 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
508 sd_offset);
509 if ((val & 0x1) == 0)
510 break;
511 } while (--timeout);
512 if (timeout == 0) {
513 dev_err(sdev->dev, "error: timeout waiting for stream reset\n");
514 return -ETIMEDOUT;
515 }
516
517 if (hstream->posbuf)
518 *hstream->posbuf = 0;
519
520 /* reset BDL address */
521 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
522 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
523 0x0);
524 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
525 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
526 0x0);
527
528 /* clear stream status */
529 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
530 SOF_HDA_CL_DMA_SD_INT_MASK |
531 SOF_HDA_SD_CTL_DMA_START, 0);
532
533 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
534 sd_offset, run,
535 !(run & dma_start),
536 HDA_DSP_REG_POLL_INTERVAL_US,
537 HDA_DSP_STREAM_RUN_TIMEOUT);
538
539 if (ret < 0) {
540 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
541
542 dev_err(sdev->dev,
543 "%s: on %s: timeout on STREAM_SD_OFFSET read1\n",
544 __func__, stream_name ? stream_name : "unknown stream");
545 kfree(stream_name);
546 return ret;
547 }
548
549 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
550 sd_offset + SOF_HDA_ADSP_REG_CL_SD_STS,
551 SOF_HDA_CL_DMA_SD_INT_MASK,
552 SOF_HDA_CL_DMA_SD_INT_MASK);
553
554 hstream->frags = 0;
555
556 ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
557 if (ret < 0) {
558 dev_err(sdev->dev, "error: set up of BDL failed\n");
559 return ret;
560 }
561
562 /* program stream tag to set up stream descriptor for DMA */
563 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
564 SOF_HDA_CL_SD_CTL_STREAM_TAG_MASK,
565 hstream->stream_tag <<
566 SOF_HDA_CL_SD_CTL_STREAM_TAG_SHIFT);
567
568 /* program cyclic buffer length */
569 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
570 sd_offset + SOF_HDA_ADSP_REG_CL_SD_CBL,
571 hstream->bufsize);
572
573 /*
574 * Recommended hardware programming sequence for HDAudio DMA format
575 *
576 * 1. Put DMA into coupled mode by clearing PPCTL.PROCEN bit
577 * for corresponding stream index before the time of writing
578 * format to SDxFMT register.
579 * 2. Write SDxFMT
580 * 3. Set PPCTL.PROCEN bit for corresponding stream index to
581 * enable decoupled mode
582 */
583
584 /* couple host and link DMA, disable DSP features */
585 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
586 mask, 0);
587
588 /* program stream format */
589 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
590 sd_offset +
591 SOF_HDA_ADSP_REG_CL_SD_FORMAT,
592 0xffff, hstream->format_val);
593
594 /* decouple host and link DMA, enable DSP features */
595 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
596 mask, mask);
597
598 /* program last valid index */
599 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
600 sd_offset + SOF_HDA_ADSP_REG_CL_SD_LVI,
601 0xffff, (hstream->frags - 1));
602
603 /* program BDL address */
604 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
605 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
606 (u32)hstream->bdl.addr);
607 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
608 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
609 upper_32_bits(hstream->bdl.addr));
610
611 /* enable position buffer */
612 if (!(snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE)
613 & SOF_HDA_ADSP_DPLBASE_ENABLE)) {
614 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPUBASE,
615 upper_32_bits(bus->posbuf.addr));
616 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE,
617 (u32)bus->posbuf.addr |
618 SOF_HDA_ADSP_DPLBASE_ENABLE);
619 }
620
621 /* set interrupt enable bits */
622 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
623 SOF_HDA_CL_DMA_SD_INT_MASK,
624 SOF_HDA_CL_DMA_SD_INT_MASK);
625
626 /* read FIFO size */
627 if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK) {
628 hstream->fifo_size =
629 snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
630 sd_offset +
631 SOF_HDA_ADSP_REG_CL_SD_FIFOSIZE);
632 hstream->fifo_size &= 0xffff;
633 hstream->fifo_size += 1;
634 } else {
635 hstream->fifo_size = 0;
636 }
637
638 return ret;
639 }
640
hda_dsp_stream_hw_free(struct snd_sof_dev * sdev,struct snd_pcm_substream * substream)641 int hda_dsp_stream_hw_free(struct snd_sof_dev *sdev,
642 struct snd_pcm_substream *substream)
643 {
644 struct hdac_stream *stream = substream->runtime->private_data;
645 struct hdac_ext_stream *link_dev = container_of(stream,
646 struct hdac_ext_stream,
647 hstream);
648 struct hdac_bus *bus = sof_to_bus(sdev);
649 u32 mask = 0x1 << stream->index;
650
651 spin_lock_irq(&bus->reg_lock);
652 /* couple host and link DMA if link DMA channel is idle */
653 if (!link_dev->link_locked)
654 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR,
655 SOF_HDA_REG_PP_PPCTL, mask, 0);
656 spin_unlock_irq(&bus->reg_lock);
657
658 stream->substream = NULL;
659
660 return 0;
661 }
662
hda_dsp_check_stream_irq(struct snd_sof_dev * sdev)663 bool hda_dsp_check_stream_irq(struct snd_sof_dev *sdev)
664 {
665 struct hdac_bus *bus = sof_to_bus(sdev);
666 bool ret = false;
667 u32 status;
668
669 /* The function can be called at irq thread, so use spin_lock_irq */
670 spin_lock_irq(&bus->reg_lock);
671
672 status = snd_hdac_chip_readl(bus, INTSTS);
673 dev_vdbg(bus->dev, "stream irq, INTSTS status: 0x%x\n", status);
674
675 /* if Register inaccessible, ignore it.*/
676 if (status != 0xffffffff)
677 ret = true;
678
679 spin_unlock_irq(&bus->reg_lock);
680
681 return ret;
682 }
683
684 static void
hda_dsp_set_bytes_transferred(struct hdac_stream * hstream,u64 buffer_size)685 hda_dsp_set_bytes_transferred(struct hdac_stream *hstream, u64 buffer_size)
686 {
687 u64 prev_pos, pos, num_bytes;
688
689 div64_u64_rem(hstream->curr_pos, buffer_size, &prev_pos);
690 pos = snd_hdac_stream_get_pos_posbuf(hstream);
691
692 if (pos < prev_pos)
693 num_bytes = (buffer_size - prev_pos) + pos;
694 else
695 num_bytes = pos - prev_pos;
696
697 hstream->curr_pos += num_bytes;
698 }
699
hda_dsp_stream_check(struct hdac_bus * bus,u32 status)700 static bool hda_dsp_stream_check(struct hdac_bus *bus, u32 status)
701 {
702 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
703 struct hdac_stream *s;
704 bool active = false;
705 u32 sd_status;
706
707 list_for_each_entry(s, &bus->stream_list, list) {
708 if (status & BIT(s->index) && s->opened) {
709 sd_status = snd_hdac_stream_readb(s, SD_STS);
710
711 dev_vdbg(bus->dev, "stream %d status 0x%x\n",
712 s->index, sd_status);
713
714 snd_hdac_stream_writeb(s, SD_STS, sd_status);
715
716 active = true;
717 if ((!s->substream && !s->cstream) ||
718 !s->running ||
719 (sd_status & SOF_HDA_CL_DMA_SD_INT_COMPLETE) == 0)
720 continue;
721
722 /* Inform ALSA only in case not do that with IPC */
723 if (s->substream && sof_hda->no_ipc_position) {
724 snd_sof_pcm_period_elapsed(s->substream);
725 } else if (s->cstream) {
726 hda_dsp_set_bytes_transferred(s,
727 s->cstream->runtime->buffer_size);
728 snd_compr_fragment_elapsed(s->cstream);
729 }
730 }
731 }
732
733 return active;
734 }
735
hda_dsp_stream_threaded_handler(int irq,void * context)736 irqreturn_t hda_dsp_stream_threaded_handler(int irq, void *context)
737 {
738 struct snd_sof_dev *sdev = context;
739 struct hdac_bus *bus = sof_to_bus(sdev);
740 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
741 u32 rirb_status;
742 #endif
743 bool active;
744 u32 status;
745 int i;
746
747 /*
748 * Loop 10 times to handle missed interrupts caused by
749 * unsolicited responses from the codec
750 */
751 for (i = 0, active = true; i < 10 && active; i++) {
752 spin_lock_irq(&bus->reg_lock);
753
754 status = snd_hdac_chip_readl(bus, INTSTS);
755
756 /* check streams */
757 active = hda_dsp_stream_check(bus, status);
758
759 /* check and clear RIRB interrupt */
760 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
761 if (status & AZX_INT_CTRL_EN) {
762 rirb_status = snd_hdac_chip_readb(bus, RIRBSTS);
763 if (rirb_status & RIRB_INT_MASK) {
764 /*
765 * Clearing the interrupt status here ensures
766 * that no interrupt gets masked after the RIRB
767 * wp is read in snd_hdac_bus_update_rirb.
768 */
769 snd_hdac_chip_writeb(bus, RIRBSTS,
770 RIRB_INT_MASK);
771 active = true;
772 if (rirb_status & RIRB_INT_RESPONSE)
773 snd_hdac_bus_update_rirb(bus);
774 }
775 }
776 #endif
777 spin_unlock_irq(&bus->reg_lock);
778 }
779
780 return IRQ_HANDLED;
781 }
782
hda_dsp_stream_init(struct snd_sof_dev * sdev)783 int hda_dsp_stream_init(struct snd_sof_dev *sdev)
784 {
785 struct hdac_bus *bus = sof_to_bus(sdev);
786 struct hdac_ext_stream *stream;
787 struct hdac_stream *hstream;
788 struct pci_dev *pci = to_pci_dev(sdev->dev);
789 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
790 int sd_offset;
791 int i, num_playback, num_capture, num_total, ret;
792 u32 gcap;
793
794 gcap = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_GCAP);
795 dev_dbg(sdev->dev, "hda global caps = 0x%x\n", gcap);
796
797 /* get stream count from GCAP */
798 num_capture = (gcap >> 8) & 0x0f;
799 num_playback = (gcap >> 12) & 0x0f;
800 num_total = num_playback + num_capture;
801
802 dev_dbg(sdev->dev, "detected %d playback and %d capture streams\n",
803 num_playback, num_capture);
804
805 if (num_playback >= SOF_HDA_PLAYBACK_STREAMS) {
806 dev_err(sdev->dev, "error: too many playback streams %d\n",
807 num_playback);
808 return -EINVAL;
809 }
810
811 if (num_capture >= SOF_HDA_CAPTURE_STREAMS) {
812 dev_err(sdev->dev, "error: too many capture streams %d\n",
813 num_playback);
814 return -EINVAL;
815 }
816
817 /*
818 * mem alloc for the position buffer
819 * TODO: check position buffer update
820 */
821 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
822 SOF_HDA_DPIB_ENTRY_SIZE * num_total,
823 &bus->posbuf);
824 if (ret < 0) {
825 dev_err(sdev->dev, "error: posbuffer dma alloc failed\n");
826 return -ENOMEM;
827 }
828
829 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
830 /* mem alloc for the CORB/RIRB ringbuffers */
831 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
832 PAGE_SIZE, &bus->rb);
833 if (ret < 0) {
834 dev_err(sdev->dev, "error: RB alloc failed\n");
835 return -ENOMEM;
836 }
837 #endif
838
839 /* create capture streams */
840 for (i = 0; i < num_capture; i++) {
841 struct sof_intel_hda_stream *hda_stream;
842
843 hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream),
844 GFP_KERNEL);
845 if (!hda_stream)
846 return -ENOMEM;
847
848 hda_stream->sdev = sdev;
849
850 stream = &hda_stream->hda_stream;
851
852 stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
853 SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i;
854
855 stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
856 SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total +
857 SOF_HDA_PPLC_INTERVAL * i;
858
859 /* do we support SPIB */
860 if (sdev->bar[HDA_DSP_SPIB_BAR]) {
861 stream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
862 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
863 SOF_HDA_SPIB_SPIB;
864
865 stream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
866 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
867 SOF_HDA_SPIB_MAXFIFO;
868 }
869
870 hstream = &stream->hstream;
871 hstream->bus = bus;
872 hstream->sd_int_sta_mask = 1 << i;
873 hstream->index = i;
874 sd_offset = SOF_STREAM_SD_OFFSET(hstream);
875 hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset;
876 hstream->stream_tag = i + 1;
877 hstream->opened = false;
878 hstream->running = false;
879 hstream->direction = SNDRV_PCM_STREAM_CAPTURE;
880
881 /* memory alloc for stream BDL */
882 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
883 HDA_DSP_BDL_SIZE, &hstream->bdl);
884 if (ret < 0) {
885 dev_err(sdev->dev, "error: stream bdl dma alloc failed\n");
886 return -ENOMEM;
887 }
888 hstream->posbuf = (__le32 *)(bus->posbuf.area +
889 (hstream->index) * 8);
890
891 list_add_tail(&hstream->list, &bus->stream_list);
892 }
893
894 /* create playback streams */
895 for (i = num_capture; i < num_total; i++) {
896 struct sof_intel_hda_stream *hda_stream;
897
898 hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream),
899 GFP_KERNEL);
900 if (!hda_stream)
901 return -ENOMEM;
902
903 hda_stream->sdev = sdev;
904
905 stream = &hda_stream->hda_stream;
906
907 /* we always have DSP support */
908 stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
909 SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i;
910
911 stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
912 SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total +
913 SOF_HDA_PPLC_INTERVAL * i;
914
915 /* do we support SPIB */
916 if (sdev->bar[HDA_DSP_SPIB_BAR]) {
917 stream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
918 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
919 SOF_HDA_SPIB_SPIB;
920
921 stream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
922 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
923 SOF_HDA_SPIB_MAXFIFO;
924 }
925
926 hstream = &stream->hstream;
927 hstream->bus = bus;
928 hstream->sd_int_sta_mask = 1 << i;
929 hstream->index = i;
930 sd_offset = SOF_STREAM_SD_OFFSET(hstream);
931 hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset;
932 hstream->stream_tag = i - num_capture + 1;
933 hstream->opened = false;
934 hstream->running = false;
935 hstream->direction = SNDRV_PCM_STREAM_PLAYBACK;
936
937 /* mem alloc for stream BDL */
938 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
939 HDA_DSP_BDL_SIZE, &hstream->bdl);
940 if (ret < 0) {
941 dev_err(sdev->dev, "error: stream bdl dma alloc failed\n");
942 return -ENOMEM;
943 }
944
945 hstream->posbuf = (__le32 *)(bus->posbuf.area +
946 (hstream->index) * 8);
947
948 list_add_tail(&hstream->list, &bus->stream_list);
949 }
950
951 /* store total stream count (playback + capture) from GCAP */
952 sof_hda->stream_max = num_total;
953
954 return 0;
955 }
956
hda_dsp_stream_free(struct snd_sof_dev * sdev)957 void hda_dsp_stream_free(struct snd_sof_dev *sdev)
958 {
959 struct hdac_bus *bus = sof_to_bus(sdev);
960 struct hdac_stream *s, *_s;
961 struct hdac_ext_stream *stream;
962 struct sof_intel_hda_stream *hda_stream;
963
964 /* free position buffer */
965 if (bus->posbuf.area)
966 snd_dma_free_pages(&bus->posbuf);
967
968 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
969 /* free position buffer */
970 if (bus->rb.area)
971 snd_dma_free_pages(&bus->rb);
972 #endif
973
974 list_for_each_entry_safe(s, _s, &bus->stream_list, list) {
975 /* TODO: decouple */
976
977 /* free bdl buffer */
978 if (s->bdl.area)
979 snd_dma_free_pages(&s->bdl);
980 list_del(&s->list);
981 stream = stream_to_hdac_ext_stream(s);
982 hda_stream = container_of(stream, struct sof_intel_hda_stream,
983 hda_stream);
984 devm_kfree(sdev->dev, hda_stream);
985 }
986 }
987