1 // SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause)
2 //
3 // This file is provided under a dual BSD/GPLv2 license. When using or
4 // redistributing this file, you may do so under either license.
5 //
6 // Copyright(c) 2025 Intel Corporation.
7 //
8
9 /*
10 * Hardware interface for SoundWire BPT support with HDA DMA
11 */
12
13 #include <sound/hdaudio_ext.h>
14 #include <sound/hda-mlink.h>
15 #include <sound/hda-sdw-bpt.h>
16 #include <sound/sof.h>
17 #include <sound/sof/ipc4/header.h>
18 #include "../ops.h"
19 #include "../sof-priv.h"
20 #include "../ipc4-priv.h"
21 #include "hda.h"
22
23 #define BPT_FREQUENCY 192000 /* The max rate defined in rate_bits[] hdac_device.c */
24 #define BPT_MULTIPLIER ((BPT_FREQUENCY / 48000) - 1)
25 #define BPT_CHAIN_DMA_FIFO_MS 10
26 /*
27 * This routine is directly inspired by sof_ipc4_chain_dma_trigger(),
28 * with major simplifications since there are no pipelines defined
29 * and no dependency on ALSA hw_params
30 */
chain_dma_trigger(struct snd_sof_dev * sdev,unsigned int stream_tag,int direction,int state)31 static int chain_dma_trigger(struct snd_sof_dev *sdev, unsigned int stream_tag,
32 int direction, int state)
33 {
34 struct sof_ipc4_fw_data *ipc4_data = sdev->private;
35 bool allocate, enable, set_fifo_size;
36 struct sof_ipc4_msg msg = {{ 0 }};
37 int dma_id;
38
39 if (sdev->pdata->ipc_type != SOF_IPC_TYPE_4)
40 return -EOPNOTSUPP;
41
42 switch (state) {
43 case SOF_IPC4_PIPE_RUNNING: /* Allocate and start the chain */
44 allocate = true;
45 enable = true;
46 set_fifo_size = true;
47 break;
48 case SOF_IPC4_PIPE_PAUSED: /* Stop the chain */
49 allocate = true;
50 enable = false;
51 set_fifo_size = false;
52 break;
53 case SOF_IPC4_PIPE_RESET: /* Deallocate chain resources and remove the chain */
54 allocate = false;
55 enable = false;
56 set_fifo_size = false;
57 break;
58 default:
59 dev_err(sdev->dev, "Unexpected state %d", state);
60 return -EINVAL;
61 }
62
63 msg.primary = SOF_IPC4_MSG_TYPE_SET(SOF_IPC4_GLB_CHAIN_DMA);
64 msg.primary |= SOF_IPC4_MSG_DIR(SOF_IPC4_MSG_REQUEST);
65 msg.primary |= SOF_IPC4_MSG_TARGET(SOF_IPC4_FW_GEN_MSG);
66
67 /* for BPT/BRA we can use the same stream tag for host and link */
68 dma_id = stream_tag - 1;
69 if (direction == SNDRV_PCM_STREAM_CAPTURE)
70 dma_id += ipc4_data->num_playback_streams;
71
72 msg.primary |= SOF_IPC4_GLB_CHAIN_DMA_HOST_ID(dma_id);
73 msg.primary |= SOF_IPC4_GLB_CHAIN_DMA_LINK_ID(dma_id);
74
75 /* For BPT/BRA we use 32 bits so SCS is not set */
76
77 /* CHAIN DMA needs at least 2ms */
78 if (set_fifo_size)
79 msg.extension |= SOF_IPC4_GLB_EXT_CHAIN_DMA_FIFO_SIZE(BPT_FREQUENCY / 1000 *
80 BPT_CHAIN_DMA_FIFO_MS *
81 sizeof(u32));
82
83 if (allocate)
84 msg.primary |= SOF_IPC4_GLB_CHAIN_DMA_ALLOCATE_MASK;
85
86 if (enable)
87 msg.primary |= SOF_IPC4_GLB_CHAIN_DMA_ENABLE_MASK;
88
89 return sof_ipc_tx_message_no_reply(sdev->ipc, &msg, 0);
90 }
91
hda_sdw_bpt_dma_prepare(struct device * dev,struct hdac_ext_stream ** sdw_bpt_stream,struct snd_dma_buffer * dmab_bdl,u32 bpt_num_bytes,unsigned int num_channels,int direction)92 static int hda_sdw_bpt_dma_prepare(struct device *dev, struct hdac_ext_stream **sdw_bpt_stream,
93 struct snd_dma_buffer *dmab_bdl, u32 bpt_num_bytes,
94 unsigned int num_channels, int direction)
95 {
96 struct snd_sof_dev *sdev = dev_get_drvdata(dev);
97 struct hdac_ext_stream *bpt_stream;
98 unsigned int format = HDA_CL_STREAM_FORMAT;
99
100 /*
101 * the baseline format needs to be adjusted to
102 * bandwidth requirements
103 */
104 format |= (num_channels - 1);
105 format |= BPT_MULTIPLIER << AC_FMT_MULT_SHIFT;
106
107 dev_dbg(dev, "direction %d format_val %#x\n", direction, format);
108
109 bpt_stream = hda_cl_prepare(dev, format, bpt_num_bytes, dmab_bdl, false, direction, false);
110 if (IS_ERR(bpt_stream)) {
111 dev_err(sdev->dev, "%s: SDW BPT DMA prepare failed: dir %d\n",
112 __func__, direction);
113 return PTR_ERR(bpt_stream);
114 }
115 *sdw_bpt_stream = bpt_stream;
116
117 if (!sdev->dspless_mode_selected) {
118 struct hdac_stream *hstream;
119 u32 mask;
120
121 /* decouple host and link DMA if the DSP is used */
122 hstream = &bpt_stream->hstream;
123 mask = BIT(hstream->index);
124
125 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL, mask, mask);
126
127 snd_hdac_ext_stream_reset(bpt_stream);
128
129 snd_hdac_ext_stream_setup(bpt_stream, format);
130 }
131
132 if (hdac_stream(bpt_stream)->direction == SNDRV_PCM_STREAM_PLAYBACK) {
133 struct hdac_bus *bus = sof_to_bus(sdev);
134 struct hdac_ext_link *hlink;
135 int stream_tag;
136
137 stream_tag = hdac_stream(bpt_stream)->stream_tag;
138 hlink = hdac_bus_eml_sdw_get_hlink(bus);
139
140 snd_hdac_ext_bus_link_set_stream_id(hlink, stream_tag);
141 }
142 return 0;
143 }
144
hda_sdw_bpt_dma_deprepare(struct device * dev,struct hdac_ext_stream * sdw_bpt_stream,struct snd_dma_buffer * dmab_bdl)145 static int hda_sdw_bpt_dma_deprepare(struct device *dev, struct hdac_ext_stream *sdw_bpt_stream,
146 struct snd_dma_buffer *dmab_bdl)
147 {
148 struct snd_sof_dev *sdev = dev_get_drvdata(dev);
149 struct hdac_stream *hstream;
150 u32 mask;
151 int ret;
152
153 ret = hda_cl_cleanup(sdev->dev, dmab_bdl, true, sdw_bpt_stream);
154 if (ret < 0) {
155 dev_err(sdev->dev, "%s: SDW BPT DMA cleanup failed\n",
156 __func__);
157 return ret;
158 }
159
160 if (hdac_stream(sdw_bpt_stream)->direction == SNDRV_PCM_STREAM_PLAYBACK) {
161 struct hdac_bus *bus = sof_to_bus(sdev);
162 struct hdac_ext_link *hlink;
163 int stream_tag;
164
165 stream_tag = hdac_stream(sdw_bpt_stream)->stream_tag;
166 hlink = hdac_bus_eml_sdw_get_hlink(bus);
167
168 snd_hdac_ext_bus_link_clear_stream_id(hlink, stream_tag);
169 }
170
171 if (!sdev->dspless_mode_selected) {
172 /* Release CHAIN_DMA resources */
173 ret = chain_dma_trigger(sdev, hdac_stream(sdw_bpt_stream)->stream_tag,
174 hdac_stream(sdw_bpt_stream)->direction,
175 SOF_IPC4_PIPE_RESET);
176 if (ret < 0)
177 dev_err(sdev->dev, "%s: chain_dma_trigger PIPE_RESET failed: %d\n",
178 __func__, ret);
179
180 /* couple host and link DMA */
181 hstream = &sdw_bpt_stream->hstream;
182 mask = BIT(hstream->index);
183
184 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL, mask, 0);
185 }
186
187 return 0;
188 }
189
hda_sdw_bpt_dma_enable(struct device * dev,struct hdac_ext_stream * sdw_bpt_stream)190 static int hda_sdw_bpt_dma_enable(struct device *dev, struct hdac_ext_stream *sdw_bpt_stream)
191 {
192 struct snd_sof_dev *sdev = dev_get_drvdata(dev);
193 int ret;
194
195 ret = hda_cl_trigger(sdev->dev, sdw_bpt_stream, SNDRV_PCM_TRIGGER_START);
196 if (ret < 0)
197 dev_err(sdev->dev, "%s: SDW BPT DMA trigger start failed\n", __func__);
198
199 if (!sdev->dspless_mode_selected) {
200 /* the chain DMA needs to be programmed before the DMAs */
201 ret = chain_dma_trigger(sdev, hdac_stream(sdw_bpt_stream)->stream_tag,
202 hdac_stream(sdw_bpt_stream)->direction,
203 SOF_IPC4_PIPE_RUNNING);
204 if (ret < 0) {
205 dev_err(sdev->dev, "%s: chain_dma_trigger failed: %d\n",
206 __func__, ret);
207 hda_cl_trigger(sdev->dev, sdw_bpt_stream, SNDRV_PCM_TRIGGER_STOP);
208 return ret;
209 }
210 snd_hdac_ext_stream_start(sdw_bpt_stream);
211 }
212
213 return ret;
214 }
215
hda_sdw_bpt_dma_disable(struct device * dev,struct hdac_ext_stream * sdw_bpt_stream)216 static int hda_sdw_bpt_dma_disable(struct device *dev, struct hdac_ext_stream *sdw_bpt_stream)
217 {
218 struct snd_sof_dev *sdev = dev_get_drvdata(dev);
219 int ret;
220
221 if (!sdev->dspless_mode_selected) {
222 snd_hdac_ext_stream_clear(sdw_bpt_stream);
223
224 ret = chain_dma_trigger(sdev, hdac_stream(sdw_bpt_stream)->stream_tag,
225 hdac_stream(sdw_bpt_stream)->direction,
226 SOF_IPC4_PIPE_PAUSED);
227 if (ret < 0)
228 dev_err(sdev->dev, "%s: chain_dma_trigger PIPE_PAUSED failed: %d\n",
229 __func__, ret);
230 }
231
232 ret = hda_cl_trigger(sdev->dev, sdw_bpt_stream, SNDRV_PCM_TRIGGER_STOP);
233 if (ret < 0)
234 dev_err(sdev->dev, "%s: SDW BPT DMA trigger stop failed\n", __func__);
235
236 return ret;
237 }
238
hda_sdw_bpt_open(struct device * dev,int link_id,struct hdac_ext_stream ** bpt_tx_stream,struct snd_dma_buffer * dmab_tx_bdl,u32 bpt_tx_num_bytes,u32 tx_dma_bandwidth,struct hdac_ext_stream ** bpt_rx_stream,struct snd_dma_buffer * dmab_rx_bdl,u32 bpt_rx_num_bytes,u32 rx_dma_bandwidth)239 int hda_sdw_bpt_open(struct device *dev, int link_id, struct hdac_ext_stream **bpt_tx_stream,
240 struct snd_dma_buffer *dmab_tx_bdl, u32 bpt_tx_num_bytes,
241 u32 tx_dma_bandwidth, struct hdac_ext_stream **bpt_rx_stream,
242 struct snd_dma_buffer *dmab_rx_bdl, u32 bpt_rx_num_bytes,
243 u32 rx_dma_bandwidth)
244 {
245 struct snd_sof_dev *sdev = dev_get_drvdata(dev);
246 unsigned int num_channels_tx;
247 unsigned int num_channels_rx;
248 int ret1;
249 int ret;
250
251 num_channels_tx = DIV_ROUND_UP(tx_dma_bandwidth, BPT_FREQUENCY * 32);
252
253 ret = hda_sdw_bpt_dma_prepare(dev, bpt_tx_stream, dmab_tx_bdl, bpt_tx_num_bytes,
254 num_channels_tx, SNDRV_PCM_STREAM_PLAYBACK);
255 if (ret < 0) {
256 dev_err(dev, "%s: hda_sdw_bpt_dma_prepare failed for TX: %d\n",
257 __func__, ret);
258 return ret;
259 }
260
261 num_channels_rx = DIV_ROUND_UP(rx_dma_bandwidth, BPT_FREQUENCY * 32);
262
263 ret = hda_sdw_bpt_dma_prepare(dev, bpt_rx_stream, dmab_rx_bdl, bpt_rx_num_bytes,
264 num_channels_rx, SNDRV_PCM_STREAM_CAPTURE);
265 if (ret < 0) {
266 dev_err(dev, "%s: hda_sdw_bpt_dma_prepare failed for RX: %d\n",
267 __func__, ret);
268
269 ret1 = hda_sdw_bpt_dma_deprepare(dev, *bpt_tx_stream, dmab_tx_bdl);
270 if (ret1 < 0)
271 dev_err(dev, "%s: hda_sdw_bpt_dma_deprepare failed for TX: %d\n",
272 __func__, ret1);
273 return ret;
274 }
275
276 /* we need to map the channels in PCMSyCM registers */
277 ret = hdac_bus_eml_sdw_map_stream_ch(sof_to_bus(sdev), link_id,
278 0, /* cpu_dai->id -> PDI0 */
279 GENMASK(num_channels_tx - 1, 0),
280 hdac_stream(*bpt_tx_stream)->stream_tag,
281 SNDRV_PCM_STREAM_PLAYBACK);
282 if (ret < 0) {
283 dev_err(dev, "%s: hdac_bus_eml_sdw_map_stream_ch failed for TX: %d\n",
284 __func__, ret);
285 goto close;
286 }
287
288 ret = hdac_bus_eml_sdw_map_stream_ch(sof_to_bus(sdev), link_id,
289 1, /* cpu_dai->id -> PDI1 */
290 GENMASK(num_channels_rx - 1, 0),
291 hdac_stream(*bpt_rx_stream)->stream_tag,
292 SNDRV_PCM_STREAM_CAPTURE);
293 if (!ret)
294 return 0;
295
296 dev_err(dev, "%s: hdac_bus_eml_sdw_map_stream_ch failed for RX: %d\n",
297 __func__, ret);
298
299 close:
300 ret1 = hda_sdw_bpt_close(dev, *bpt_tx_stream, dmab_tx_bdl, *bpt_rx_stream, dmab_rx_bdl);
301 if (ret1 < 0)
302 dev_err(dev, "%s: hda_sdw_bpt_close failed: %d\n",
303 __func__, ret1);
304
305 return ret;
306 }
307 EXPORT_SYMBOL_NS(hda_sdw_bpt_open, "SND_SOC_SOF_INTEL_HDA_SDW_BPT");
308
hda_sdw_bpt_send_async(struct device * dev,struct hdac_ext_stream * bpt_tx_stream,struct hdac_ext_stream * bpt_rx_stream)309 int hda_sdw_bpt_send_async(struct device *dev, struct hdac_ext_stream *bpt_tx_stream,
310 struct hdac_ext_stream *bpt_rx_stream)
311 {
312 int ret1;
313 int ret;
314
315 ret = hda_sdw_bpt_dma_enable(dev, bpt_tx_stream);
316 if (ret < 0) {
317 dev_err(dev, "%s: hda_sdw_bpt_dma_enable failed for TX: %d\n",
318 __func__, ret);
319 return ret;
320 }
321
322 ret = hda_sdw_bpt_dma_enable(dev, bpt_rx_stream);
323 if (ret < 0) {
324 dev_err(dev, "%s: hda_sdw_bpt_dma_enable failed for RX: %d\n",
325 __func__, ret);
326
327 ret1 = hda_sdw_bpt_dma_disable(dev, bpt_tx_stream);
328 if (ret1 < 0)
329 dev_err(dev, "%s: hda_sdw_bpt_dma_disable failed for TX: %d\n",
330 __func__, ret1);
331 }
332
333 return ret;
334 }
335 EXPORT_SYMBOL_NS(hda_sdw_bpt_send_async, "SND_SOC_SOF_INTEL_HDA_SDW_BPT");
336
337 /*
338 * 3s is several orders of magnitude larger than what is needed for a
339 * typical firmware download.
340 */
341 #define HDA_BPT_IOC_TIMEOUT_MS 3000
342
hda_sdw_bpt_wait(struct device * dev,struct hdac_ext_stream * bpt_tx_stream,struct hdac_ext_stream * bpt_rx_stream)343 int hda_sdw_bpt_wait(struct device *dev, struct hdac_ext_stream *bpt_tx_stream,
344 struct hdac_ext_stream *bpt_rx_stream)
345 {
346 struct sof_intel_hda_stream *hda_tx_stream;
347 struct sof_intel_hda_stream *hda_rx_stream;
348 snd_pcm_uframes_t tx_position;
349 snd_pcm_uframes_t rx_position;
350 unsigned long time_tx_left;
351 unsigned long time_rx_left;
352 int ret = 0;
353 int ret1;
354 int i;
355
356 hda_tx_stream = container_of(bpt_tx_stream, struct sof_intel_hda_stream, hext_stream);
357 hda_rx_stream = container_of(bpt_rx_stream, struct sof_intel_hda_stream, hext_stream);
358
359 time_tx_left = wait_for_completion_timeout(&hda_tx_stream->ioc,
360 msecs_to_jiffies(HDA_BPT_IOC_TIMEOUT_MS));
361 if (!time_tx_left) {
362 tx_position = hda_dsp_stream_get_position(hdac_stream(bpt_tx_stream),
363 SNDRV_PCM_STREAM_PLAYBACK, false);
364 dev_err(dev, "%s: SDW BPT TX DMA did not complete: %ld\n",
365 __func__, tx_position);
366 ret = -ETIMEDOUT;
367 goto dma_disable;
368 }
369
370 /* Make sure the DMA is flushed */
371 i = 0;
372 do {
373 tx_position = hda_dsp_stream_get_position(hdac_stream(bpt_tx_stream),
374 SNDRV_PCM_STREAM_PLAYBACK, false);
375 usleep_range(1000, 1010);
376 i++;
377 } while (tx_position && i < HDA_BPT_IOC_TIMEOUT_MS);
378 if (tx_position) {
379 dev_err(dev, "%s: SDW BPT TX DMA position %ld was not cleared\n",
380 __func__, tx_position);
381 ret = -ETIMEDOUT;
382 goto dma_disable;
383 }
384
385 /* the wait should be minimal here */
386 time_rx_left = wait_for_completion_timeout(&hda_rx_stream->ioc,
387 msecs_to_jiffies(HDA_BPT_IOC_TIMEOUT_MS));
388 if (!time_rx_left) {
389 rx_position = hda_dsp_stream_get_position(hdac_stream(bpt_rx_stream),
390 SNDRV_PCM_STREAM_CAPTURE, false);
391 dev_err(dev, "%s: SDW BPT RX DMA did not complete: %ld\n",
392 __func__, rx_position);
393 ret = -ETIMEDOUT;
394 goto dma_disable;
395 }
396
397 /* Make sure the DMA is flushed */
398 i = 0;
399 do {
400 rx_position = hda_dsp_stream_get_position(hdac_stream(bpt_rx_stream),
401 SNDRV_PCM_STREAM_CAPTURE, false);
402 usleep_range(1000, 1010);
403 i++;
404 } while (rx_position && i < HDA_BPT_IOC_TIMEOUT_MS);
405 if (rx_position) {
406 dev_err(dev, "%s: SDW BPT RX DMA position %ld was not cleared\n",
407 __func__, rx_position);
408 ret = -ETIMEDOUT;
409 goto dma_disable;
410 }
411
412 dma_disable:
413 ret1 = hda_sdw_bpt_dma_disable(dev, bpt_rx_stream);
414 if (!ret)
415 ret = ret1;
416
417 ret1 = hda_sdw_bpt_dma_disable(dev, bpt_tx_stream);
418 if (!ret)
419 ret = ret1;
420
421 return ret;
422 }
423 EXPORT_SYMBOL_NS(hda_sdw_bpt_wait, "SND_SOC_SOF_INTEL_HDA_SDW_BPT");
424
hda_sdw_bpt_close(struct device * dev,struct hdac_ext_stream * bpt_tx_stream,struct snd_dma_buffer * dmab_tx_bdl,struct hdac_ext_stream * bpt_rx_stream,struct snd_dma_buffer * dmab_rx_bdl)425 int hda_sdw_bpt_close(struct device *dev, struct hdac_ext_stream *bpt_tx_stream,
426 struct snd_dma_buffer *dmab_tx_bdl, struct hdac_ext_stream *bpt_rx_stream,
427 struct snd_dma_buffer *dmab_rx_bdl)
428 {
429 int ret;
430 int ret1;
431
432 ret = hda_sdw_bpt_dma_deprepare(dev, bpt_rx_stream, dmab_rx_bdl);
433
434 ret1 = hda_sdw_bpt_dma_deprepare(dev, bpt_tx_stream, dmab_tx_bdl);
435 if (!ret)
436 ret = ret1;
437
438 return ret;
439 }
440 EXPORT_SYMBOL_NS(hda_sdw_bpt_close, "SND_SOC_SOF_INTEL_HDA_SDW_BPT");
441
442 MODULE_LICENSE("Dual BSD/GPL");
443 MODULE_DESCRIPTION("SOF helpers for HDaudio SoundWire BPT");
444 MODULE_IMPORT_NS("SND_SOC_SOF_INTEL_HDA_COMMON");
445 MODULE_IMPORT_NS("SND_SOC_SOF_HDA_MLINK");
446