xref: /OK3568_Linux_fs/kernel/sound/soc/intel/skylake/skl-topology.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-only
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  *  skl-topology.c - Implements Platform component ALSA controls/widget
4*4882a593Smuzhiyun  *  handlers.
5*4882a593Smuzhiyun  *
6*4882a593Smuzhiyun  *  Copyright (C) 2014-2015 Intel Corp
7*4882a593Smuzhiyun  *  Author: Jeeja KP <jeeja.kp@intel.com>
8*4882a593Smuzhiyun  *  ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
9*4882a593Smuzhiyun  */
10*4882a593Smuzhiyun 
11*4882a593Smuzhiyun #include <linux/slab.h>
12*4882a593Smuzhiyun #include <linux/types.h>
13*4882a593Smuzhiyun #include <linux/firmware.h>
14*4882a593Smuzhiyun #include <linux/uuid.h>
15*4882a593Smuzhiyun #include <sound/intel-nhlt.h>
16*4882a593Smuzhiyun #include <sound/soc.h>
17*4882a593Smuzhiyun #include <sound/soc-acpi.h>
18*4882a593Smuzhiyun #include <sound/soc-topology.h>
19*4882a593Smuzhiyun #include <uapi/sound/snd_sst_tokens.h>
20*4882a593Smuzhiyun #include <uapi/sound/skl-tplg-interface.h>
21*4882a593Smuzhiyun #include "skl-sst-dsp.h"
22*4882a593Smuzhiyun #include "skl-sst-ipc.h"
23*4882a593Smuzhiyun #include "skl-topology.h"
24*4882a593Smuzhiyun #include "skl.h"
25*4882a593Smuzhiyun #include "../common/sst-dsp.h"
26*4882a593Smuzhiyun #include "../common/sst-dsp-priv.h"
27*4882a593Smuzhiyun 
28*4882a593Smuzhiyun #define SKL_CH_FIXUP_MASK		(1 << 0)
29*4882a593Smuzhiyun #define SKL_RATE_FIXUP_MASK		(1 << 1)
30*4882a593Smuzhiyun #define SKL_FMT_FIXUP_MASK		(1 << 2)
31*4882a593Smuzhiyun #define SKL_IN_DIR_BIT_MASK		BIT(0)
32*4882a593Smuzhiyun #define SKL_PIN_COUNT_MASK		GENMASK(7, 4)
33*4882a593Smuzhiyun 
34*4882a593Smuzhiyun static const int mic_mono_list[] = {
35*4882a593Smuzhiyun 0, 1, 2, 3,
36*4882a593Smuzhiyun };
37*4882a593Smuzhiyun static const int mic_stereo_list[][SKL_CH_STEREO] = {
38*4882a593Smuzhiyun {0, 1}, {0, 2}, {0, 3}, {1, 2}, {1, 3}, {2, 3},
39*4882a593Smuzhiyun };
40*4882a593Smuzhiyun static const int mic_trio_list[][SKL_CH_TRIO] = {
41*4882a593Smuzhiyun {0, 1, 2}, {0, 1, 3}, {0, 2, 3}, {1, 2, 3},
42*4882a593Smuzhiyun };
43*4882a593Smuzhiyun static const int mic_quatro_list[][SKL_CH_QUATRO] = {
44*4882a593Smuzhiyun {0, 1, 2, 3},
45*4882a593Smuzhiyun };
46*4882a593Smuzhiyun 
47*4882a593Smuzhiyun #define CHECK_HW_PARAMS(ch, freq, bps, prm_ch, prm_freq, prm_bps) \
48*4882a593Smuzhiyun 	((ch == prm_ch) && (bps == prm_bps) && (freq == prm_freq))
49*4882a593Smuzhiyun 
skl_tplg_d0i3_get(struct skl_dev * skl,enum d0i3_capability caps)50*4882a593Smuzhiyun void skl_tplg_d0i3_get(struct skl_dev *skl, enum d0i3_capability caps)
51*4882a593Smuzhiyun {
52*4882a593Smuzhiyun 	struct skl_d0i3_data *d0i3 =  &skl->d0i3;
53*4882a593Smuzhiyun 
54*4882a593Smuzhiyun 	switch (caps) {
55*4882a593Smuzhiyun 	case SKL_D0I3_NONE:
56*4882a593Smuzhiyun 		d0i3->non_d0i3++;
57*4882a593Smuzhiyun 		break;
58*4882a593Smuzhiyun 
59*4882a593Smuzhiyun 	case SKL_D0I3_STREAMING:
60*4882a593Smuzhiyun 		d0i3->streaming++;
61*4882a593Smuzhiyun 		break;
62*4882a593Smuzhiyun 
63*4882a593Smuzhiyun 	case SKL_D0I3_NON_STREAMING:
64*4882a593Smuzhiyun 		d0i3->non_streaming++;
65*4882a593Smuzhiyun 		break;
66*4882a593Smuzhiyun 	}
67*4882a593Smuzhiyun }
68*4882a593Smuzhiyun 
skl_tplg_d0i3_put(struct skl_dev * skl,enum d0i3_capability caps)69*4882a593Smuzhiyun void skl_tplg_d0i3_put(struct skl_dev *skl, enum d0i3_capability caps)
70*4882a593Smuzhiyun {
71*4882a593Smuzhiyun 	struct skl_d0i3_data *d0i3 =  &skl->d0i3;
72*4882a593Smuzhiyun 
73*4882a593Smuzhiyun 	switch (caps) {
74*4882a593Smuzhiyun 	case SKL_D0I3_NONE:
75*4882a593Smuzhiyun 		d0i3->non_d0i3--;
76*4882a593Smuzhiyun 		break;
77*4882a593Smuzhiyun 
78*4882a593Smuzhiyun 	case SKL_D0I3_STREAMING:
79*4882a593Smuzhiyun 		d0i3->streaming--;
80*4882a593Smuzhiyun 		break;
81*4882a593Smuzhiyun 
82*4882a593Smuzhiyun 	case SKL_D0I3_NON_STREAMING:
83*4882a593Smuzhiyun 		d0i3->non_streaming--;
84*4882a593Smuzhiyun 		break;
85*4882a593Smuzhiyun 	}
86*4882a593Smuzhiyun }
87*4882a593Smuzhiyun 
88*4882a593Smuzhiyun /*
89*4882a593Smuzhiyun  * SKL DSP driver modelling uses only few DAPM widgets so for rest we will
90*4882a593Smuzhiyun  * ignore. This helpers checks if the SKL driver handles this widget type
91*4882a593Smuzhiyun  */
is_skl_dsp_widget_type(struct snd_soc_dapm_widget * w,struct device * dev)92*4882a593Smuzhiyun static int is_skl_dsp_widget_type(struct snd_soc_dapm_widget *w,
93*4882a593Smuzhiyun 				  struct device *dev)
94*4882a593Smuzhiyun {
95*4882a593Smuzhiyun 	if (w->dapm->dev != dev)
96*4882a593Smuzhiyun 		return false;
97*4882a593Smuzhiyun 
98*4882a593Smuzhiyun 	switch (w->id) {
99*4882a593Smuzhiyun 	case snd_soc_dapm_dai_link:
100*4882a593Smuzhiyun 	case snd_soc_dapm_dai_in:
101*4882a593Smuzhiyun 	case snd_soc_dapm_aif_in:
102*4882a593Smuzhiyun 	case snd_soc_dapm_aif_out:
103*4882a593Smuzhiyun 	case snd_soc_dapm_dai_out:
104*4882a593Smuzhiyun 	case snd_soc_dapm_switch:
105*4882a593Smuzhiyun 	case snd_soc_dapm_output:
106*4882a593Smuzhiyun 	case snd_soc_dapm_mux:
107*4882a593Smuzhiyun 
108*4882a593Smuzhiyun 		return false;
109*4882a593Smuzhiyun 	default:
110*4882a593Smuzhiyun 		return true;
111*4882a593Smuzhiyun 	}
112*4882a593Smuzhiyun }
113*4882a593Smuzhiyun 
skl_dump_mconfig(struct skl_dev * skl,struct skl_module_cfg * mcfg)114*4882a593Smuzhiyun static void skl_dump_mconfig(struct skl_dev *skl, struct skl_module_cfg *mcfg)
115*4882a593Smuzhiyun {
116*4882a593Smuzhiyun 	struct skl_module_iface *iface = &mcfg->module->formats[mcfg->fmt_idx];
117*4882a593Smuzhiyun 
118*4882a593Smuzhiyun 	dev_dbg(skl->dev, "Dumping config\n");
119*4882a593Smuzhiyun 	dev_dbg(skl->dev, "Input Format:\n");
120*4882a593Smuzhiyun 	dev_dbg(skl->dev, "channels = %d\n", iface->inputs[0].fmt.channels);
121*4882a593Smuzhiyun 	dev_dbg(skl->dev, "s_freq = %d\n", iface->inputs[0].fmt.s_freq);
122*4882a593Smuzhiyun 	dev_dbg(skl->dev, "ch_cfg = %d\n", iface->inputs[0].fmt.ch_cfg);
123*4882a593Smuzhiyun 	dev_dbg(skl->dev, "valid bit depth = %d\n",
124*4882a593Smuzhiyun 				iface->inputs[0].fmt.valid_bit_depth);
125*4882a593Smuzhiyun 	dev_dbg(skl->dev, "Output Format:\n");
126*4882a593Smuzhiyun 	dev_dbg(skl->dev, "channels = %d\n", iface->outputs[0].fmt.channels);
127*4882a593Smuzhiyun 	dev_dbg(skl->dev, "s_freq = %d\n", iface->outputs[0].fmt.s_freq);
128*4882a593Smuzhiyun 	dev_dbg(skl->dev, "valid bit depth = %d\n",
129*4882a593Smuzhiyun 				iface->outputs[0].fmt.valid_bit_depth);
130*4882a593Smuzhiyun 	dev_dbg(skl->dev, "ch_cfg = %d\n", iface->outputs[0].fmt.ch_cfg);
131*4882a593Smuzhiyun }
132*4882a593Smuzhiyun 
skl_tplg_update_chmap(struct skl_module_fmt * fmt,int chs)133*4882a593Smuzhiyun static void skl_tplg_update_chmap(struct skl_module_fmt *fmt, int chs)
134*4882a593Smuzhiyun {
135*4882a593Smuzhiyun 	int slot_map = 0xFFFFFFFF;
136*4882a593Smuzhiyun 	int start_slot = 0;
137*4882a593Smuzhiyun 	int i;
138*4882a593Smuzhiyun 
139*4882a593Smuzhiyun 	for (i = 0; i < chs; i++) {
140*4882a593Smuzhiyun 		/*
141*4882a593Smuzhiyun 		 * For 2 channels with starting slot as 0, slot map will
142*4882a593Smuzhiyun 		 * look like 0xFFFFFF10.
143*4882a593Smuzhiyun 		 */
144*4882a593Smuzhiyun 		slot_map &= (~(0xF << (4 * i)) | (start_slot << (4 * i)));
145*4882a593Smuzhiyun 		start_slot++;
146*4882a593Smuzhiyun 	}
147*4882a593Smuzhiyun 	fmt->ch_map = slot_map;
148*4882a593Smuzhiyun }
149*4882a593Smuzhiyun 
skl_tplg_update_params(struct skl_module_fmt * fmt,struct skl_pipe_params * params,int fixup)150*4882a593Smuzhiyun static void skl_tplg_update_params(struct skl_module_fmt *fmt,
151*4882a593Smuzhiyun 			struct skl_pipe_params *params, int fixup)
152*4882a593Smuzhiyun {
153*4882a593Smuzhiyun 	if (fixup & SKL_RATE_FIXUP_MASK)
154*4882a593Smuzhiyun 		fmt->s_freq = params->s_freq;
155*4882a593Smuzhiyun 	if (fixup & SKL_CH_FIXUP_MASK) {
156*4882a593Smuzhiyun 		fmt->channels = params->ch;
157*4882a593Smuzhiyun 		skl_tplg_update_chmap(fmt, fmt->channels);
158*4882a593Smuzhiyun 	}
159*4882a593Smuzhiyun 	if (fixup & SKL_FMT_FIXUP_MASK) {
160*4882a593Smuzhiyun 		fmt->valid_bit_depth = skl_get_bit_depth(params->s_fmt);
161*4882a593Smuzhiyun 
162*4882a593Smuzhiyun 		/*
163*4882a593Smuzhiyun 		 * 16 bit is 16 bit container whereas 24 bit is in 32 bit
164*4882a593Smuzhiyun 		 * container so update bit depth accordingly
165*4882a593Smuzhiyun 		 */
166*4882a593Smuzhiyun 		switch (fmt->valid_bit_depth) {
167*4882a593Smuzhiyun 		case SKL_DEPTH_16BIT:
168*4882a593Smuzhiyun 			fmt->bit_depth = fmt->valid_bit_depth;
169*4882a593Smuzhiyun 			break;
170*4882a593Smuzhiyun 
171*4882a593Smuzhiyun 		default:
172*4882a593Smuzhiyun 			fmt->bit_depth = SKL_DEPTH_32BIT;
173*4882a593Smuzhiyun 			break;
174*4882a593Smuzhiyun 		}
175*4882a593Smuzhiyun 	}
176*4882a593Smuzhiyun 
177*4882a593Smuzhiyun }
178*4882a593Smuzhiyun 
179*4882a593Smuzhiyun /*
180*4882a593Smuzhiyun  * A pipeline may have modules which impact the pcm parameters, like SRC,
181*4882a593Smuzhiyun  * channel converter, format converter.
182*4882a593Smuzhiyun  * We need to calculate the output params by applying the 'fixup'
183*4882a593Smuzhiyun  * Topology will tell driver which type of fixup is to be applied by
184*4882a593Smuzhiyun  * supplying the fixup mask, so based on that we calculate the output
185*4882a593Smuzhiyun  *
186*4882a593Smuzhiyun  * Now In FE the pcm hw_params is source/target format. Same is applicable
187*4882a593Smuzhiyun  * for BE with its hw_params invoked.
188*4882a593Smuzhiyun  * here based on FE, BE pipeline and direction we calculate the input and
189*4882a593Smuzhiyun  * outfix and then apply that for a module
190*4882a593Smuzhiyun  */
skl_tplg_update_params_fixup(struct skl_module_cfg * m_cfg,struct skl_pipe_params * params,bool is_fe)191*4882a593Smuzhiyun static void skl_tplg_update_params_fixup(struct skl_module_cfg *m_cfg,
192*4882a593Smuzhiyun 		struct skl_pipe_params *params, bool is_fe)
193*4882a593Smuzhiyun {
194*4882a593Smuzhiyun 	int in_fixup, out_fixup;
195*4882a593Smuzhiyun 	struct skl_module_fmt *in_fmt, *out_fmt;
196*4882a593Smuzhiyun 
197*4882a593Smuzhiyun 	/* Fixups will be applied to pin 0 only */
198*4882a593Smuzhiyun 	in_fmt = &m_cfg->module->formats[m_cfg->fmt_idx].inputs[0].fmt;
199*4882a593Smuzhiyun 	out_fmt = &m_cfg->module->formats[m_cfg->fmt_idx].outputs[0].fmt;
200*4882a593Smuzhiyun 
201*4882a593Smuzhiyun 	if (params->stream == SNDRV_PCM_STREAM_PLAYBACK) {
202*4882a593Smuzhiyun 		if (is_fe) {
203*4882a593Smuzhiyun 			in_fixup = m_cfg->params_fixup;
204*4882a593Smuzhiyun 			out_fixup = (~m_cfg->converter) &
205*4882a593Smuzhiyun 					m_cfg->params_fixup;
206*4882a593Smuzhiyun 		} else {
207*4882a593Smuzhiyun 			out_fixup = m_cfg->params_fixup;
208*4882a593Smuzhiyun 			in_fixup = (~m_cfg->converter) &
209*4882a593Smuzhiyun 					m_cfg->params_fixup;
210*4882a593Smuzhiyun 		}
211*4882a593Smuzhiyun 	} else {
212*4882a593Smuzhiyun 		if (is_fe) {
213*4882a593Smuzhiyun 			out_fixup = m_cfg->params_fixup;
214*4882a593Smuzhiyun 			in_fixup = (~m_cfg->converter) &
215*4882a593Smuzhiyun 					m_cfg->params_fixup;
216*4882a593Smuzhiyun 		} else {
217*4882a593Smuzhiyun 			in_fixup = m_cfg->params_fixup;
218*4882a593Smuzhiyun 			out_fixup = (~m_cfg->converter) &
219*4882a593Smuzhiyun 					m_cfg->params_fixup;
220*4882a593Smuzhiyun 		}
221*4882a593Smuzhiyun 	}
222*4882a593Smuzhiyun 
223*4882a593Smuzhiyun 	skl_tplg_update_params(in_fmt, params, in_fixup);
224*4882a593Smuzhiyun 	skl_tplg_update_params(out_fmt, params, out_fixup);
225*4882a593Smuzhiyun }
226*4882a593Smuzhiyun 
227*4882a593Smuzhiyun /*
228*4882a593Smuzhiyun  * A module needs input and output buffers, which are dependent upon pcm
229*4882a593Smuzhiyun  * params, so once we have calculate params, we need buffer calculation as
230*4882a593Smuzhiyun  * well.
231*4882a593Smuzhiyun  */
skl_tplg_update_buffer_size(struct skl_dev * skl,struct skl_module_cfg * mcfg)232*4882a593Smuzhiyun static void skl_tplg_update_buffer_size(struct skl_dev *skl,
233*4882a593Smuzhiyun 				struct skl_module_cfg *mcfg)
234*4882a593Smuzhiyun {
235*4882a593Smuzhiyun 	int multiplier = 1;
236*4882a593Smuzhiyun 	struct skl_module_fmt *in_fmt, *out_fmt;
237*4882a593Smuzhiyun 	struct skl_module_res *res;
238*4882a593Smuzhiyun 
239*4882a593Smuzhiyun 	/* Since fixups is applied to pin 0 only, ibs, obs needs
240*4882a593Smuzhiyun 	 * change for pin 0 only
241*4882a593Smuzhiyun 	 */
242*4882a593Smuzhiyun 	res = &mcfg->module->resources[mcfg->res_idx];
243*4882a593Smuzhiyun 	in_fmt = &mcfg->module->formats[mcfg->fmt_idx].inputs[0].fmt;
244*4882a593Smuzhiyun 	out_fmt = &mcfg->module->formats[mcfg->fmt_idx].outputs[0].fmt;
245*4882a593Smuzhiyun 
246*4882a593Smuzhiyun 	if (mcfg->m_type == SKL_MODULE_TYPE_SRCINT)
247*4882a593Smuzhiyun 		multiplier = 5;
248*4882a593Smuzhiyun 
249*4882a593Smuzhiyun 	res->ibs = DIV_ROUND_UP(in_fmt->s_freq, 1000) *
250*4882a593Smuzhiyun 			in_fmt->channels * (in_fmt->bit_depth >> 3) *
251*4882a593Smuzhiyun 			multiplier;
252*4882a593Smuzhiyun 
253*4882a593Smuzhiyun 	res->obs = DIV_ROUND_UP(out_fmt->s_freq, 1000) *
254*4882a593Smuzhiyun 			out_fmt->channels * (out_fmt->bit_depth >> 3) *
255*4882a593Smuzhiyun 			multiplier;
256*4882a593Smuzhiyun }
257*4882a593Smuzhiyun 
skl_tplg_be_dev_type(int dev_type)258*4882a593Smuzhiyun static u8 skl_tplg_be_dev_type(int dev_type)
259*4882a593Smuzhiyun {
260*4882a593Smuzhiyun 	int ret;
261*4882a593Smuzhiyun 
262*4882a593Smuzhiyun 	switch (dev_type) {
263*4882a593Smuzhiyun 	case SKL_DEVICE_BT:
264*4882a593Smuzhiyun 		ret = NHLT_DEVICE_BT;
265*4882a593Smuzhiyun 		break;
266*4882a593Smuzhiyun 
267*4882a593Smuzhiyun 	case SKL_DEVICE_DMIC:
268*4882a593Smuzhiyun 		ret = NHLT_DEVICE_DMIC;
269*4882a593Smuzhiyun 		break;
270*4882a593Smuzhiyun 
271*4882a593Smuzhiyun 	case SKL_DEVICE_I2S:
272*4882a593Smuzhiyun 		ret = NHLT_DEVICE_I2S;
273*4882a593Smuzhiyun 		break;
274*4882a593Smuzhiyun 
275*4882a593Smuzhiyun 	default:
276*4882a593Smuzhiyun 		ret = NHLT_DEVICE_INVALID;
277*4882a593Smuzhiyun 		break;
278*4882a593Smuzhiyun 	}
279*4882a593Smuzhiyun 
280*4882a593Smuzhiyun 	return ret;
281*4882a593Smuzhiyun }
282*4882a593Smuzhiyun 
skl_tplg_update_be_blob(struct snd_soc_dapm_widget * w,struct skl_dev * skl)283*4882a593Smuzhiyun static int skl_tplg_update_be_blob(struct snd_soc_dapm_widget *w,
284*4882a593Smuzhiyun 						struct skl_dev *skl)
285*4882a593Smuzhiyun {
286*4882a593Smuzhiyun 	struct skl_module_cfg *m_cfg = w->priv;
287*4882a593Smuzhiyun 	int link_type, dir;
288*4882a593Smuzhiyun 	u32 ch, s_freq, s_fmt;
289*4882a593Smuzhiyun 	struct nhlt_specific_cfg *cfg;
290*4882a593Smuzhiyun 	u8 dev_type = skl_tplg_be_dev_type(m_cfg->dev_type);
291*4882a593Smuzhiyun 	int fmt_idx = m_cfg->fmt_idx;
292*4882a593Smuzhiyun 	struct skl_module_iface *m_iface = &m_cfg->module->formats[fmt_idx];
293*4882a593Smuzhiyun 
294*4882a593Smuzhiyun 	/* check if we already have blob */
295*4882a593Smuzhiyun 	if (m_cfg->formats_config.caps_size > 0)
296*4882a593Smuzhiyun 		return 0;
297*4882a593Smuzhiyun 
298*4882a593Smuzhiyun 	dev_dbg(skl->dev, "Applying default cfg blob\n");
299*4882a593Smuzhiyun 	switch (m_cfg->dev_type) {
300*4882a593Smuzhiyun 	case SKL_DEVICE_DMIC:
301*4882a593Smuzhiyun 		link_type = NHLT_LINK_DMIC;
302*4882a593Smuzhiyun 		dir = SNDRV_PCM_STREAM_CAPTURE;
303*4882a593Smuzhiyun 		s_freq = m_iface->inputs[0].fmt.s_freq;
304*4882a593Smuzhiyun 		s_fmt = m_iface->inputs[0].fmt.bit_depth;
305*4882a593Smuzhiyun 		ch = m_iface->inputs[0].fmt.channels;
306*4882a593Smuzhiyun 		break;
307*4882a593Smuzhiyun 
308*4882a593Smuzhiyun 	case SKL_DEVICE_I2S:
309*4882a593Smuzhiyun 		link_type = NHLT_LINK_SSP;
310*4882a593Smuzhiyun 		if (m_cfg->hw_conn_type == SKL_CONN_SOURCE) {
311*4882a593Smuzhiyun 			dir = SNDRV_PCM_STREAM_PLAYBACK;
312*4882a593Smuzhiyun 			s_freq = m_iface->outputs[0].fmt.s_freq;
313*4882a593Smuzhiyun 			s_fmt = m_iface->outputs[0].fmt.bit_depth;
314*4882a593Smuzhiyun 			ch = m_iface->outputs[0].fmt.channels;
315*4882a593Smuzhiyun 		} else {
316*4882a593Smuzhiyun 			dir = SNDRV_PCM_STREAM_CAPTURE;
317*4882a593Smuzhiyun 			s_freq = m_iface->inputs[0].fmt.s_freq;
318*4882a593Smuzhiyun 			s_fmt = m_iface->inputs[0].fmt.bit_depth;
319*4882a593Smuzhiyun 			ch = m_iface->inputs[0].fmt.channels;
320*4882a593Smuzhiyun 		}
321*4882a593Smuzhiyun 		break;
322*4882a593Smuzhiyun 
323*4882a593Smuzhiyun 	default:
324*4882a593Smuzhiyun 		return -EINVAL;
325*4882a593Smuzhiyun 	}
326*4882a593Smuzhiyun 
327*4882a593Smuzhiyun 	/* update the blob based on virtual bus_id and default params */
328*4882a593Smuzhiyun 	cfg = skl_get_ep_blob(skl, m_cfg->vbus_id, link_type,
329*4882a593Smuzhiyun 					s_fmt, ch, s_freq, dir, dev_type);
330*4882a593Smuzhiyun 	if (cfg) {
331*4882a593Smuzhiyun 		m_cfg->formats_config.caps_size = cfg->size;
332*4882a593Smuzhiyun 		m_cfg->formats_config.caps = (u32 *) &cfg->caps;
333*4882a593Smuzhiyun 	} else {
334*4882a593Smuzhiyun 		dev_err(skl->dev, "Blob NULL for id %x type %d dirn %d\n",
335*4882a593Smuzhiyun 					m_cfg->vbus_id, link_type, dir);
336*4882a593Smuzhiyun 		dev_err(skl->dev, "PCM: ch %d, freq %d, fmt %d\n",
337*4882a593Smuzhiyun 					ch, s_freq, s_fmt);
338*4882a593Smuzhiyun 		return -EIO;
339*4882a593Smuzhiyun 	}
340*4882a593Smuzhiyun 
341*4882a593Smuzhiyun 	return 0;
342*4882a593Smuzhiyun }
343*4882a593Smuzhiyun 
skl_tplg_update_module_params(struct snd_soc_dapm_widget * w,struct skl_dev * skl)344*4882a593Smuzhiyun static void skl_tplg_update_module_params(struct snd_soc_dapm_widget *w,
345*4882a593Smuzhiyun 							struct skl_dev *skl)
346*4882a593Smuzhiyun {
347*4882a593Smuzhiyun 	struct skl_module_cfg *m_cfg = w->priv;
348*4882a593Smuzhiyun 	struct skl_pipe_params *params = m_cfg->pipe->p_params;
349*4882a593Smuzhiyun 	int p_conn_type = m_cfg->pipe->conn_type;
350*4882a593Smuzhiyun 	bool is_fe;
351*4882a593Smuzhiyun 
352*4882a593Smuzhiyun 	if (!m_cfg->params_fixup)
353*4882a593Smuzhiyun 		return;
354*4882a593Smuzhiyun 
355*4882a593Smuzhiyun 	dev_dbg(skl->dev, "Mconfig for widget=%s BEFORE updation\n",
356*4882a593Smuzhiyun 				w->name);
357*4882a593Smuzhiyun 
358*4882a593Smuzhiyun 	skl_dump_mconfig(skl, m_cfg);
359*4882a593Smuzhiyun 
360*4882a593Smuzhiyun 	if (p_conn_type == SKL_PIPE_CONN_TYPE_FE)
361*4882a593Smuzhiyun 		is_fe = true;
362*4882a593Smuzhiyun 	else
363*4882a593Smuzhiyun 		is_fe = false;
364*4882a593Smuzhiyun 
365*4882a593Smuzhiyun 	skl_tplg_update_params_fixup(m_cfg, params, is_fe);
366*4882a593Smuzhiyun 	skl_tplg_update_buffer_size(skl, m_cfg);
367*4882a593Smuzhiyun 
368*4882a593Smuzhiyun 	dev_dbg(skl->dev, "Mconfig for widget=%s AFTER updation\n",
369*4882a593Smuzhiyun 				w->name);
370*4882a593Smuzhiyun 
371*4882a593Smuzhiyun 	skl_dump_mconfig(skl, m_cfg);
372*4882a593Smuzhiyun }
373*4882a593Smuzhiyun 
374*4882a593Smuzhiyun /*
375*4882a593Smuzhiyun  * some modules can have multiple params set from user control and
376*4882a593Smuzhiyun  * need to be set after module is initialized. If set_param flag is
377*4882a593Smuzhiyun  * set module params will be done after module is initialised.
378*4882a593Smuzhiyun  */
skl_tplg_set_module_params(struct snd_soc_dapm_widget * w,struct skl_dev * skl)379*4882a593Smuzhiyun static int skl_tplg_set_module_params(struct snd_soc_dapm_widget *w,
380*4882a593Smuzhiyun 						struct skl_dev *skl)
381*4882a593Smuzhiyun {
382*4882a593Smuzhiyun 	int i, ret;
383*4882a593Smuzhiyun 	struct skl_module_cfg *mconfig = w->priv;
384*4882a593Smuzhiyun 	const struct snd_kcontrol_new *k;
385*4882a593Smuzhiyun 	struct soc_bytes_ext *sb;
386*4882a593Smuzhiyun 	struct skl_algo_data *bc;
387*4882a593Smuzhiyun 	struct skl_specific_cfg *sp_cfg;
388*4882a593Smuzhiyun 
389*4882a593Smuzhiyun 	if (mconfig->formats_config.caps_size > 0 &&
390*4882a593Smuzhiyun 		mconfig->formats_config.set_params == SKL_PARAM_SET) {
391*4882a593Smuzhiyun 		sp_cfg = &mconfig->formats_config;
392*4882a593Smuzhiyun 		ret = skl_set_module_params(skl, sp_cfg->caps,
393*4882a593Smuzhiyun 					sp_cfg->caps_size,
394*4882a593Smuzhiyun 					sp_cfg->param_id, mconfig);
395*4882a593Smuzhiyun 		if (ret < 0)
396*4882a593Smuzhiyun 			return ret;
397*4882a593Smuzhiyun 	}
398*4882a593Smuzhiyun 
399*4882a593Smuzhiyun 	for (i = 0; i < w->num_kcontrols; i++) {
400*4882a593Smuzhiyun 		k = &w->kcontrol_news[i];
401*4882a593Smuzhiyun 		if (k->access & SNDRV_CTL_ELEM_ACCESS_TLV_CALLBACK) {
402*4882a593Smuzhiyun 			sb = (void *) k->private_value;
403*4882a593Smuzhiyun 			bc = (struct skl_algo_data *)sb->dobj.private;
404*4882a593Smuzhiyun 
405*4882a593Smuzhiyun 			if (bc->set_params == SKL_PARAM_SET) {
406*4882a593Smuzhiyun 				ret = skl_set_module_params(skl,
407*4882a593Smuzhiyun 						(u32 *)bc->params, bc->size,
408*4882a593Smuzhiyun 						bc->param_id, mconfig);
409*4882a593Smuzhiyun 				if (ret < 0)
410*4882a593Smuzhiyun 					return ret;
411*4882a593Smuzhiyun 			}
412*4882a593Smuzhiyun 		}
413*4882a593Smuzhiyun 	}
414*4882a593Smuzhiyun 
415*4882a593Smuzhiyun 	return 0;
416*4882a593Smuzhiyun }
417*4882a593Smuzhiyun 
418*4882a593Smuzhiyun /*
419*4882a593Smuzhiyun  * some module param can set from user control and this is required as
420*4882a593Smuzhiyun  * when module is initailzed. if module param is required in init it is
421*4882a593Smuzhiyun  * identifed by set_param flag. if set_param flag is not set, then this
422*4882a593Smuzhiyun  * parameter needs to set as part of module init.
423*4882a593Smuzhiyun  */
skl_tplg_set_module_init_data(struct snd_soc_dapm_widget * w)424*4882a593Smuzhiyun static int skl_tplg_set_module_init_data(struct snd_soc_dapm_widget *w)
425*4882a593Smuzhiyun {
426*4882a593Smuzhiyun 	const struct snd_kcontrol_new *k;
427*4882a593Smuzhiyun 	struct soc_bytes_ext *sb;
428*4882a593Smuzhiyun 	struct skl_algo_data *bc;
429*4882a593Smuzhiyun 	struct skl_module_cfg *mconfig = w->priv;
430*4882a593Smuzhiyun 	int i;
431*4882a593Smuzhiyun 
432*4882a593Smuzhiyun 	for (i = 0; i < w->num_kcontrols; i++) {
433*4882a593Smuzhiyun 		k = &w->kcontrol_news[i];
434*4882a593Smuzhiyun 		if (k->access & SNDRV_CTL_ELEM_ACCESS_TLV_CALLBACK) {
435*4882a593Smuzhiyun 			sb = (struct soc_bytes_ext *)k->private_value;
436*4882a593Smuzhiyun 			bc = (struct skl_algo_data *)sb->dobj.private;
437*4882a593Smuzhiyun 
438*4882a593Smuzhiyun 			if (bc->set_params != SKL_PARAM_INIT)
439*4882a593Smuzhiyun 				continue;
440*4882a593Smuzhiyun 
441*4882a593Smuzhiyun 			mconfig->formats_config.caps = (u32 *)bc->params;
442*4882a593Smuzhiyun 			mconfig->formats_config.caps_size = bc->size;
443*4882a593Smuzhiyun 
444*4882a593Smuzhiyun 			break;
445*4882a593Smuzhiyun 		}
446*4882a593Smuzhiyun 	}
447*4882a593Smuzhiyun 
448*4882a593Smuzhiyun 	return 0;
449*4882a593Smuzhiyun }
450*4882a593Smuzhiyun 
skl_tplg_module_prepare(struct skl_dev * skl,struct skl_pipe * pipe,struct snd_soc_dapm_widget * w,struct skl_module_cfg * mcfg)451*4882a593Smuzhiyun static int skl_tplg_module_prepare(struct skl_dev *skl, struct skl_pipe *pipe,
452*4882a593Smuzhiyun 		struct snd_soc_dapm_widget *w, struct skl_module_cfg *mcfg)
453*4882a593Smuzhiyun {
454*4882a593Smuzhiyun 	switch (mcfg->dev_type) {
455*4882a593Smuzhiyun 	case SKL_DEVICE_HDAHOST:
456*4882a593Smuzhiyun 		return skl_pcm_host_dma_prepare(skl->dev, pipe->p_params);
457*4882a593Smuzhiyun 
458*4882a593Smuzhiyun 	case SKL_DEVICE_HDALINK:
459*4882a593Smuzhiyun 		return skl_pcm_link_dma_prepare(skl->dev, pipe->p_params);
460*4882a593Smuzhiyun 	}
461*4882a593Smuzhiyun 
462*4882a593Smuzhiyun 	return 0;
463*4882a593Smuzhiyun }
464*4882a593Smuzhiyun 
465*4882a593Smuzhiyun /*
466*4882a593Smuzhiyun  * Inside a pipe instance, we can have various modules. These modules need
467*4882a593Smuzhiyun  * to instantiated in DSP by invoking INIT_MODULE IPC, which is achieved by
468*4882a593Smuzhiyun  * skl_init_module() routine, so invoke that for all modules in a pipeline
469*4882a593Smuzhiyun  */
470*4882a593Smuzhiyun static int
skl_tplg_init_pipe_modules(struct skl_dev * skl,struct skl_pipe * pipe)471*4882a593Smuzhiyun skl_tplg_init_pipe_modules(struct skl_dev *skl, struct skl_pipe *pipe)
472*4882a593Smuzhiyun {
473*4882a593Smuzhiyun 	struct skl_pipe_module *w_module;
474*4882a593Smuzhiyun 	struct snd_soc_dapm_widget *w;
475*4882a593Smuzhiyun 	struct skl_module_cfg *mconfig;
476*4882a593Smuzhiyun 	u8 cfg_idx;
477*4882a593Smuzhiyun 	int ret = 0;
478*4882a593Smuzhiyun 
479*4882a593Smuzhiyun 	list_for_each_entry(w_module, &pipe->w_list, node) {
480*4882a593Smuzhiyun 		guid_t *uuid_mod;
481*4882a593Smuzhiyun 		w = w_module->w;
482*4882a593Smuzhiyun 		mconfig = w->priv;
483*4882a593Smuzhiyun 
484*4882a593Smuzhiyun 		/* check if module ids are populated */
485*4882a593Smuzhiyun 		if (mconfig->id.module_id < 0) {
486*4882a593Smuzhiyun 			dev_err(skl->dev,
487*4882a593Smuzhiyun 					"module %pUL id not populated\n",
488*4882a593Smuzhiyun 					(guid_t *)mconfig->guid);
489*4882a593Smuzhiyun 			return -EIO;
490*4882a593Smuzhiyun 		}
491*4882a593Smuzhiyun 
492*4882a593Smuzhiyun 		cfg_idx = mconfig->pipe->cur_config_idx;
493*4882a593Smuzhiyun 		mconfig->fmt_idx = mconfig->mod_cfg[cfg_idx].fmt_idx;
494*4882a593Smuzhiyun 		mconfig->res_idx = mconfig->mod_cfg[cfg_idx].res_idx;
495*4882a593Smuzhiyun 
496*4882a593Smuzhiyun 		if (mconfig->module->loadable && skl->dsp->fw_ops.load_mod) {
497*4882a593Smuzhiyun 			ret = skl->dsp->fw_ops.load_mod(skl->dsp,
498*4882a593Smuzhiyun 				mconfig->id.module_id, mconfig->guid);
499*4882a593Smuzhiyun 			if (ret < 0)
500*4882a593Smuzhiyun 				return ret;
501*4882a593Smuzhiyun 
502*4882a593Smuzhiyun 			mconfig->m_state = SKL_MODULE_LOADED;
503*4882a593Smuzhiyun 		}
504*4882a593Smuzhiyun 
505*4882a593Smuzhiyun 		/* prepare the DMA if the module is gateway cpr */
506*4882a593Smuzhiyun 		ret = skl_tplg_module_prepare(skl, pipe, w, mconfig);
507*4882a593Smuzhiyun 		if (ret < 0)
508*4882a593Smuzhiyun 			return ret;
509*4882a593Smuzhiyun 
510*4882a593Smuzhiyun 		/* update blob if blob is null for be with default value */
511*4882a593Smuzhiyun 		skl_tplg_update_be_blob(w, skl);
512*4882a593Smuzhiyun 
513*4882a593Smuzhiyun 		/*
514*4882a593Smuzhiyun 		 * apply fix/conversion to module params based on
515*4882a593Smuzhiyun 		 * FE/BE params
516*4882a593Smuzhiyun 		 */
517*4882a593Smuzhiyun 		skl_tplg_update_module_params(w, skl);
518*4882a593Smuzhiyun 		uuid_mod = (guid_t *)mconfig->guid;
519*4882a593Smuzhiyun 		mconfig->id.pvt_id = skl_get_pvt_id(skl, uuid_mod,
520*4882a593Smuzhiyun 						mconfig->id.instance_id);
521*4882a593Smuzhiyun 		if (mconfig->id.pvt_id < 0)
522*4882a593Smuzhiyun 			return ret;
523*4882a593Smuzhiyun 		skl_tplg_set_module_init_data(w);
524*4882a593Smuzhiyun 
525*4882a593Smuzhiyun 		ret = skl_dsp_get_core(skl->dsp, mconfig->core_id);
526*4882a593Smuzhiyun 		if (ret < 0) {
527*4882a593Smuzhiyun 			dev_err(skl->dev, "Failed to wake up core %d ret=%d\n",
528*4882a593Smuzhiyun 						mconfig->core_id, ret);
529*4882a593Smuzhiyun 			return ret;
530*4882a593Smuzhiyun 		}
531*4882a593Smuzhiyun 
532*4882a593Smuzhiyun 		ret = skl_init_module(skl, mconfig);
533*4882a593Smuzhiyun 		if (ret < 0) {
534*4882a593Smuzhiyun 			skl_put_pvt_id(skl, uuid_mod, &mconfig->id.pvt_id);
535*4882a593Smuzhiyun 			goto err;
536*4882a593Smuzhiyun 		}
537*4882a593Smuzhiyun 
538*4882a593Smuzhiyun 		ret = skl_tplg_set_module_params(w, skl);
539*4882a593Smuzhiyun 		if (ret < 0)
540*4882a593Smuzhiyun 			goto err;
541*4882a593Smuzhiyun 	}
542*4882a593Smuzhiyun 
543*4882a593Smuzhiyun 	return 0;
544*4882a593Smuzhiyun err:
545*4882a593Smuzhiyun 	skl_dsp_put_core(skl->dsp, mconfig->core_id);
546*4882a593Smuzhiyun 	return ret;
547*4882a593Smuzhiyun }
548*4882a593Smuzhiyun 
skl_tplg_unload_pipe_modules(struct skl_dev * skl,struct skl_pipe * pipe)549*4882a593Smuzhiyun static int skl_tplg_unload_pipe_modules(struct skl_dev *skl,
550*4882a593Smuzhiyun 	 struct skl_pipe *pipe)
551*4882a593Smuzhiyun {
552*4882a593Smuzhiyun 	int ret = 0;
553*4882a593Smuzhiyun 	struct skl_pipe_module *w_module;
554*4882a593Smuzhiyun 	struct skl_module_cfg *mconfig;
555*4882a593Smuzhiyun 
556*4882a593Smuzhiyun 	list_for_each_entry(w_module, &pipe->w_list, node) {
557*4882a593Smuzhiyun 		guid_t *uuid_mod;
558*4882a593Smuzhiyun 		mconfig  = w_module->w->priv;
559*4882a593Smuzhiyun 		uuid_mod = (guid_t *)mconfig->guid;
560*4882a593Smuzhiyun 
561*4882a593Smuzhiyun 		if (mconfig->module->loadable && skl->dsp->fw_ops.unload_mod &&
562*4882a593Smuzhiyun 			mconfig->m_state > SKL_MODULE_UNINIT) {
563*4882a593Smuzhiyun 			ret = skl->dsp->fw_ops.unload_mod(skl->dsp,
564*4882a593Smuzhiyun 						mconfig->id.module_id);
565*4882a593Smuzhiyun 			if (ret < 0)
566*4882a593Smuzhiyun 				return -EIO;
567*4882a593Smuzhiyun 		}
568*4882a593Smuzhiyun 		skl_put_pvt_id(skl, uuid_mod, &mconfig->id.pvt_id);
569*4882a593Smuzhiyun 
570*4882a593Smuzhiyun 		ret = skl_dsp_put_core(skl->dsp, mconfig->core_id);
571*4882a593Smuzhiyun 		if (ret < 0) {
572*4882a593Smuzhiyun 			/* don't return; continue with other modules */
573*4882a593Smuzhiyun 			dev_err(skl->dev, "Failed to sleep core %d ret=%d\n",
574*4882a593Smuzhiyun 				mconfig->core_id, ret);
575*4882a593Smuzhiyun 		}
576*4882a593Smuzhiyun 	}
577*4882a593Smuzhiyun 
578*4882a593Smuzhiyun 	/* no modules to unload in this path, so return */
579*4882a593Smuzhiyun 	return ret;
580*4882a593Smuzhiyun }
581*4882a593Smuzhiyun 
skl_tplg_is_multi_fmt(struct skl_dev * skl,struct skl_pipe * pipe)582*4882a593Smuzhiyun static bool skl_tplg_is_multi_fmt(struct skl_dev *skl, struct skl_pipe *pipe)
583*4882a593Smuzhiyun {
584*4882a593Smuzhiyun 	struct skl_pipe_fmt *cur_fmt;
585*4882a593Smuzhiyun 	struct skl_pipe_fmt *next_fmt;
586*4882a593Smuzhiyun 	int i;
587*4882a593Smuzhiyun 
588*4882a593Smuzhiyun 	if (pipe->nr_cfgs <= 1)
589*4882a593Smuzhiyun 		return false;
590*4882a593Smuzhiyun 
591*4882a593Smuzhiyun 	if (pipe->conn_type != SKL_PIPE_CONN_TYPE_FE)
592*4882a593Smuzhiyun 		return true;
593*4882a593Smuzhiyun 
594*4882a593Smuzhiyun 	for (i = 0; i < pipe->nr_cfgs - 1; i++) {
595*4882a593Smuzhiyun 		if (pipe->direction == SNDRV_PCM_STREAM_PLAYBACK) {
596*4882a593Smuzhiyun 			cur_fmt = &pipe->configs[i].out_fmt;
597*4882a593Smuzhiyun 			next_fmt = &pipe->configs[i + 1].out_fmt;
598*4882a593Smuzhiyun 		} else {
599*4882a593Smuzhiyun 			cur_fmt = &pipe->configs[i].in_fmt;
600*4882a593Smuzhiyun 			next_fmt = &pipe->configs[i + 1].in_fmt;
601*4882a593Smuzhiyun 		}
602*4882a593Smuzhiyun 
603*4882a593Smuzhiyun 		if (!CHECK_HW_PARAMS(cur_fmt->channels, cur_fmt->freq,
604*4882a593Smuzhiyun 				     cur_fmt->bps,
605*4882a593Smuzhiyun 				     next_fmt->channels,
606*4882a593Smuzhiyun 				     next_fmt->freq,
607*4882a593Smuzhiyun 				     next_fmt->bps))
608*4882a593Smuzhiyun 			return true;
609*4882a593Smuzhiyun 	}
610*4882a593Smuzhiyun 
611*4882a593Smuzhiyun 	return false;
612*4882a593Smuzhiyun }
613*4882a593Smuzhiyun 
614*4882a593Smuzhiyun /*
615*4882a593Smuzhiyun  * Here, we select pipe format based on the pipe type and pipe
616*4882a593Smuzhiyun  * direction to determine the current config index for the pipeline.
617*4882a593Smuzhiyun  * The config index is then used to select proper module resources.
618*4882a593Smuzhiyun  * Intermediate pipes currently have a fixed format hence we select the
619*4882a593Smuzhiyun  * 0th configuratation by default for such pipes.
620*4882a593Smuzhiyun  */
621*4882a593Smuzhiyun static int
skl_tplg_get_pipe_config(struct skl_dev * skl,struct skl_module_cfg * mconfig)622*4882a593Smuzhiyun skl_tplg_get_pipe_config(struct skl_dev *skl, struct skl_module_cfg *mconfig)
623*4882a593Smuzhiyun {
624*4882a593Smuzhiyun 	struct skl_pipe *pipe = mconfig->pipe;
625*4882a593Smuzhiyun 	struct skl_pipe_params *params = pipe->p_params;
626*4882a593Smuzhiyun 	struct skl_path_config *pconfig = &pipe->configs[0];
627*4882a593Smuzhiyun 	struct skl_pipe_fmt *fmt = NULL;
628*4882a593Smuzhiyun 	bool in_fmt = false;
629*4882a593Smuzhiyun 	int i;
630*4882a593Smuzhiyun 
631*4882a593Smuzhiyun 	if (pipe->nr_cfgs == 0) {
632*4882a593Smuzhiyun 		pipe->cur_config_idx = 0;
633*4882a593Smuzhiyun 		return 0;
634*4882a593Smuzhiyun 	}
635*4882a593Smuzhiyun 
636*4882a593Smuzhiyun 	if (skl_tplg_is_multi_fmt(skl, pipe)) {
637*4882a593Smuzhiyun 		pipe->cur_config_idx = pipe->pipe_config_idx;
638*4882a593Smuzhiyun 		pipe->memory_pages = pconfig->mem_pages;
639*4882a593Smuzhiyun 		dev_dbg(skl->dev, "found pipe config idx:%d\n",
640*4882a593Smuzhiyun 			pipe->cur_config_idx);
641*4882a593Smuzhiyun 		return 0;
642*4882a593Smuzhiyun 	}
643*4882a593Smuzhiyun 
644*4882a593Smuzhiyun 	if (pipe->conn_type == SKL_PIPE_CONN_TYPE_NONE) {
645*4882a593Smuzhiyun 		dev_dbg(skl->dev, "No conn_type detected, take 0th config\n");
646*4882a593Smuzhiyun 		pipe->cur_config_idx = 0;
647*4882a593Smuzhiyun 		pipe->memory_pages = pconfig->mem_pages;
648*4882a593Smuzhiyun 
649*4882a593Smuzhiyun 		return 0;
650*4882a593Smuzhiyun 	}
651*4882a593Smuzhiyun 
652*4882a593Smuzhiyun 	if ((pipe->conn_type == SKL_PIPE_CONN_TYPE_FE &&
653*4882a593Smuzhiyun 	     pipe->direction == SNDRV_PCM_STREAM_PLAYBACK) ||
654*4882a593Smuzhiyun 	     (pipe->conn_type == SKL_PIPE_CONN_TYPE_BE &&
655*4882a593Smuzhiyun 	     pipe->direction == SNDRV_PCM_STREAM_CAPTURE))
656*4882a593Smuzhiyun 		in_fmt = true;
657*4882a593Smuzhiyun 
658*4882a593Smuzhiyun 	for (i = 0; i < pipe->nr_cfgs; i++) {
659*4882a593Smuzhiyun 		pconfig = &pipe->configs[i];
660*4882a593Smuzhiyun 		if (in_fmt)
661*4882a593Smuzhiyun 			fmt = &pconfig->in_fmt;
662*4882a593Smuzhiyun 		else
663*4882a593Smuzhiyun 			fmt = &pconfig->out_fmt;
664*4882a593Smuzhiyun 
665*4882a593Smuzhiyun 		if (CHECK_HW_PARAMS(params->ch, params->s_freq, params->s_fmt,
666*4882a593Smuzhiyun 				    fmt->channels, fmt->freq, fmt->bps)) {
667*4882a593Smuzhiyun 			pipe->cur_config_idx = i;
668*4882a593Smuzhiyun 			pipe->memory_pages = pconfig->mem_pages;
669*4882a593Smuzhiyun 			dev_dbg(skl->dev, "Using pipe config: %d\n", i);
670*4882a593Smuzhiyun 
671*4882a593Smuzhiyun 			return 0;
672*4882a593Smuzhiyun 		}
673*4882a593Smuzhiyun 	}
674*4882a593Smuzhiyun 
675*4882a593Smuzhiyun 	dev_err(skl->dev, "Invalid pipe config: %d %d %d for pipe: %d\n",
676*4882a593Smuzhiyun 		params->ch, params->s_freq, params->s_fmt, pipe->ppl_id);
677*4882a593Smuzhiyun 	return -EINVAL;
678*4882a593Smuzhiyun }
679*4882a593Smuzhiyun 
680*4882a593Smuzhiyun /*
681*4882a593Smuzhiyun  * Mixer module represents a pipeline. So in the Pre-PMU event of mixer we
682*4882a593Smuzhiyun  * need create the pipeline. So we do following:
683*4882a593Smuzhiyun  *   - Create the pipeline
684*4882a593Smuzhiyun  *   - Initialize the modules in pipeline
685*4882a593Smuzhiyun  *   - finally bind all modules together
686*4882a593Smuzhiyun  */
skl_tplg_mixer_dapm_pre_pmu_event(struct snd_soc_dapm_widget * w,struct skl_dev * skl)687*4882a593Smuzhiyun static int skl_tplg_mixer_dapm_pre_pmu_event(struct snd_soc_dapm_widget *w,
688*4882a593Smuzhiyun 							struct skl_dev *skl)
689*4882a593Smuzhiyun {
690*4882a593Smuzhiyun 	int ret;
691*4882a593Smuzhiyun 	struct skl_module_cfg *mconfig = w->priv;
692*4882a593Smuzhiyun 	struct skl_pipe_module *w_module;
693*4882a593Smuzhiyun 	struct skl_pipe *s_pipe = mconfig->pipe;
694*4882a593Smuzhiyun 	struct skl_module_cfg *src_module = NULL, *dst_module, *module;
695*4882a593Smuzhiyun 	struct skl_module_deferred_bind *modules;
696*4882a593Smuzhiyun 
697*4882a593Smuzhiyun 	ret = skl_tplg_get_pipe_config(skl, mconfig);
698*4882a593Smuzhiyun 	if (ret < 0)
699*4882a593Smuzhiyun 		return ret;
700*4882a593Smuzhiyun 
701*4882a593Smuzhiyun 	/*
702*4882a593Smuzhiyun 	 * Create a list of modules for pipe.
703*4882a593Smuzhiyun 	 * This list contains modules from source to sink
704*4882a593Smuzhiyun 	 */
705*4882a593Smuzhiyun 	ret = skl_create_pipeline(skl, mconfig->pipe);
706*4882a593Smuzhiyun 	if (ret < 0)
707*4882a593Smuzhiyun 		return ret;
708*4882a593Smuzhiyun 
709*4882a593Smuzhiyun 	/* Init all pipe modules from source to sink */
710*4882a593Smuzhiyun 	ret = skl_tplg_init_pipe_modules(skl, s_pipe);
711*4882a593Smuzhiyun 	if (ret < 0)
712*4882a593Smuzhiyun 		return ret;
713*4882a593Smuzhiyun 
714*4882a593Smuzhiyun 	/* Bind modules from source to sink */
715*4882a593Smuzhiyun 	list_for_each_entry(w_module, &s_pipe->w_list, node) {
716*4882a593Smuzhiyun 		dst_module = w_module->w->priv;
717*4882a593Smuzhiyun 
718*4882a593Smuzhiyun 		if (src_module == NULL) {
719*4882a593Smuzhiyun 			src_module = dst_module;
720*4882a593Smuzhiyun 			continue;
721*4882a593Smuzhiyun 		}
722*4882a593Smuzhiyun 
723*4882a593Smuzhiyun 		ret = skl_bind_modules(skl, src_module, dst_module);
724*4882a593Smuzhiyun 		if (ret < 0)
725*4882a593Smuzhiyun 			return ret;
726*4882a593Smuzhiyun 
727*4882a593Smuzhiyun 		src_module = dst_module;
728*4882a593Smuzhiyun 	}
729*4882a593Smuzhiyun 
730*4882a593Smuzhiyun 	/*
731*4882a593Smuzhiyun 	 * When the destination module is initialized, check for these modules
732*4882a593Smuzhiyun 	 * in deferred bind list. If found, bind them.
733*4882a593Smuzhiyun 	 */
734*4882a593Smuzhiyun 	list_for_each_entry(w_module, &s_pipe->w_list, node) {
735*4882a593Smuzhiyun 		if (list_empty(&skl->bind_list))
736*4882a593Smuzhiyun 			break;
737*4882a593Smuzhiyun 
738*4882a593Smuzhiyun 		list_for_each_entry(modules, &skl->bind_list, node) {
739*4882a593Smuzhiyun 			module = w_module->w->priv;
740*4882a593Smuzhiyun 			if (modules->dst == module)
741*4882a593Smuzhiyun 				skl_bind_modules(skl, modules->src,
742*4882a593Smuzhiyun 							modules->dst);
743*4882a593Smuzhiyun 		}
744*4882a593Smuzhiyun 	}
745*4882a593Smuzhiyun 
746*4882a593Smuzhiyun 	return 0;
747*4882a593Smuzhiyun }
748*4882a593Smuzhiyun 
skl_fill_sink_instance_id(struct skl_dev * skl,u32 * params,int size,struct skl_module_cfg * mcfg)749*4882a593Smuzhiyun static int skl_fill_sink_instance_id(struct skl_dev *skl, u32 *params,
750*4882a593Smuzhiyun 				int size, struct skl_module_cfg *mcfg)
751*4882a593Smuzhiyun {
752*4882a593Smuzhiyun 	int i, pvt_id;
753*4882a593Smuzhiyun 
754*4882a593Smuzhiyun 	if (mcfg->m_type == SKL_MODULE_TYPE_KPB) {
755*4882a593Smuzhiyun 		struct skl_kpb_params *kpb_params =
756*4882a593Smuzhiyun 				(struct skl_kpb_params *)params;
757*4882a593Smuzhiyun 		struct skl_mod_inst_map *inst = kpb_params->u.map;
758*4882a593Smuzhiyun 
759*4882a593Smuzhiyun 		for (i = 0; i < kpb_params->num_modules; i++) {
760*4882a593Smuzhiyun 			pvt_id = skl_get_pvt_instance_id_map(skl, inst->mod_id,
761*4882a593Smuzhiyun 								inst->inst_id);
762*4882a593Smuzhiyun 			if (pvt_id < 0)
763*4882a593Smuzhiyun 				return -EINVAL;
764*4882a593Smuzhiyun 
765*4882a593Smuzhiyun 			inst->inst_id = pvt_id;
766*4882a593Smuzhiyun 			inst++;
767*4882a593Smuzhiyun 		}
768*4882a593Smuzhiyun 	}
769*4882a593Smuzhiyun 
770*4882a593Smuzhiyun 	return 0;
771*4882a593Smuzhiyun }
772*4882a593Smuzhiyun /*
773*4882a593Smuzhiyun  * Some modules require params to be set after the module is bound to
774*4882a593Smuzhiyun  * all pins connected.
775*4882a593Smuzhiyun  *
776*4882a593Smuzhiyun  * The module provider initializes set_param flag for such modules and we
777*4882a593Smuzhiyun  * send params after binding
778*4882a593Smuzhiyun  */
skl_tplg_set_module_bind_params(struct snd_soc_dapm_widget * w,struct skl_module_cfg * mcfg,struct skl_dev * skl)779*4882a593Smuzhiyun static int skl_tplg_set_module_bind_params(struct snd_soc_dapm_widget *w,
780*4882a593Smuzhiyun 			struct skl_module_cfg *mcfg, struct skl_dev *skl)
781*4882a593Smuzhiyun {
782*4882a593Smuzhiyun 	int i, ret;
783*4882a593Smuzhiyun 	struct skl_module_cfg *mconfig = w->priv;
784*4882a593Smuzhiyun 	const struct snd_kcontrol_new *k;
785*4882a593Smuzhiyun 	struct soc_bytes_ext *sb;
786*4882a593Smuzhiyun 	struct skl_algo_data *bc;
787*4882a593Smuzhiyun 	struct skl_specific_cfg *sp_cfg;
788*4882a593Smuzhiyun 	u32 *params;
789*4882a593Smuzhiyun 
790*4882a593Smuzhiyun 	/*
791*4882a593Smuzhiyun 	 * check all out/in pins are in bind state.
792*4882a593Smuzhiyun 	 * if so set the module param
793*4882a593Smuzhiyun 	 */
794*4882a593Smuzhiyun 	for (i = 0; i < mcfg->module->max_output_pins; i++) {
795*4882a593Smuzhiyun 		if (mcfg->m_out_pin[i].pin_state != SKL_PIN_BIND_DONE)
796*4882a593Smuzhiyun 			return 0;
797*4882a593Smuzhiyun 	}
798*4882a593Smuzhiyun 
799*4882a593Smuzhiyun 	for (i = 0; i < mcfg->module->max_input_pins; i++) {
800*4882a593Smuzhiyun 		if (mcfg->m_in_pin[i].pin_state != SKL_PIN_BIND_DONE)
801*4882a593Smuzhiyun 			return 0;
802*4882a593Smuzhiyun 	}
803*4882a593Smuzhiyun 
804*4882a593Smuzhiyun 	if (mconfig->formats_config.caps_size > 0 &&
805*4882a593Smuzhiyun 		mconfig->formats_config.set_params == SKL_PARAM_BIND) {
806*4882a593Smuzhiyun 		sp_cfg = &mconfig->formats_config;
807*4882a593Smuzhiyun 		ret = skl_set_module_params(skl, sp_cfg->caps,
808*4882a593Smuzhiyun 					sp_cfg->caps_size,
809*4882a593Smuzhiyun 					sp_cfg->param_id, mconfig);
810*4882a593Smuzhiyun 		if (ret < 0)
811*4882a593Smuzhiyun 			return ret;
812*4882a593Smuzhiyun 	}
813*4882a593Smuzhiyun 
814*4882a593Smuzhiyun 	for (i = 0; i < w->num_kcontrols; i++) {
815*4882a593Smuzhiyun 		k = &w->kcontrol_news[i];
816*4882a593Smuzhiyun 		if (k->access & SNDRV_CTL_ELEM_ACCESS_TLV_CALLBACK) {
817*4882a593Smuzhiyun 			sb = (void *) k->private_value;
818*4882a593Smuzhiyun 			bc = (struct skl_algo_data *)sb->dobj.private;
819*4882a593Smuzhiyun 
820*4882a593Smuzhiyun 			if (bc->set_params == SKL_PARAM_BIND) {
821*4882a593Smuzhiyun 				params = kmemdup(bc->params, bc->max, GFP_KERNEL);
822*4882a593Smuzhiyun 				if (!params)
823*4882a593Smuzhiyun 					return -ENOMEM;
824*4882a593Smuzhiyun 
825*4882a593Smuzhiyun 				skl_fill_sink_instance_id(skl, params, bc->max,
826*4882a593Smuzhiyun 								mconfig);
827*4882a593Smuzhiyun 
828*4882a593Smuzhiyun 				ret = skl_set_module_params(skl, params,
829*4882a593Smuzhiyun 						bc->max, bc->param_id, mconfig);
830*4882a593Smuzhiyun 				kfree(params);
831*4882a593Smuzhiyun 
832*4882a593Smuzhiyun 				if (ret < 0)
833*4882a593Smuzhiyun 					return ret;
834*4882a593Smuzhiyun 			}
835*4882a593Smuzhiyun 		}
836*4882a593Smuzhiyun 	}
837*4882a593Smuzhiyun 
838*4882a593Smuzhiyun 	return 0;
839*4882a593Smuzhiyun }
840*4882a593Smuzhiyun 
skl_get_module_id(struct skl_dev * skl,guid_t * uuid)841*4882a593Smuzhiyun static int skl_get_module_id(struct skl_dev *skl, guid_t *uuid)
842*4882a593Smuzhiyun {
843*4882a593Smuzhiyun 	struct uuid_module *module;
844*4882a593Smuzhiyun 
845*4882a593Smuzhiyun 	list_for_each_entry(module, &skl->uuid_list, list) {
846*4882a593Smuzhiyun 		if (guid_equal(uuid, &module->uuid))
847*4882a593Smuzhiyun 			return module->id;
848*4882a593Smuzhiyun 	}
849*4882a593Smuzhiyun 
850*4882a593Smuzhiyun 	return -EINVAL;
851*4882a593Smuzhiyun }
852*4882a593Smuzhiyun 
skl_tplg_find_moduleid_from_uuid(struct skl_dev * skl,const struct snd_kcontrol_new * k)853*4882a593Smuzhiyun static int skl_tplg_find_moduleid_from_uuid(struct skl_dev *skl,
854*4882a593Smuzhiyun 					const struct snd_kcontrol_new *k)
855*4882a593Smuzhiyun {
856*4882a593Smuzhiyun 	struct soc_bytes_ext *sb = (void *) k->private_value;
857*4882a593Smuzhiyun 	struct skl_algo_data *bc = (struct skl_algo_data *)sb->dobj.private;
858*4882a593Smuzhiyun 	struct skl_kpb_params *uuid_params, *params;
859*4882a593Smuzhiyun 	struct hdac_bus *bus = skl_to_bus(skl);
860*4882a593Smuzhiyun 	int i, size, module_id;
861*4882a593Smuzhiyun 
862*4882a593Smuzhiyun 	if (bc->set_params == SKL_PARAM_BIND && bc->max) {
863*4882a593Smuzhiyun 		uuid_params = (struct skl_kpb_params *)bc->params;
864*4882a593Smuzhiyun 		size = struct_size(params, u.map, uuid_params->num_modules);
865*4882a593Smuzhiyun 
866*4882a593Smuzhiyun 		params = devm_kzalloc(bus->dev, size, GFP_KERNEL);
867*4882a593Smuzhiyun 		if (!params)
868*4882a593Smuzhiyun 			return -ENOMEM;
869*4882a593Smuzhiyun 
870*4882a593Smuzhiyun 		params->num_modules = uuid_params->num_modules;
871*4882a593Smuzhiyun 
872*4882a593Smuzhiyun 		for (i = 0; i < uuid_params->num_modules; i++) {
873*4882a593Smuzhiyun 			module_id = skl_get_module_id(skl,
874*4882a593Smuzhiyun 				&uuid_params->u.map_uuid[i].mod_uuid);
875*4882a593Smuzhiyun 			if (module_id < 0) {
876*4882a593Smuzhiyun 				devm_kfree(bus->dev, params);
877*4882a593Smuzhiyun 				return -EINVAL;
878*4882a593Smuzhiyun 			}
879*4882a593Smuzhiyun 
880*4882a593Smuzhiyun 			params->u.map[i].mod_id = module_id;
881*4882a593Smuzhiyun 			params->u.map[i].inst_id =
882*4882a593Smuzhiyun 				uuid_params->u.map_uuid[i].inst_id;
883*4882a593Smuzhiyun 		}
884*4882a593Smuzhiyun 
885*4882a593Smuzhiyun 		devm_kfree(bus->dev, bc->params);
886*4882a593Smuzhiyun 		bc->params = (char *)params;
887*4882a593Smuzhiyun 		bc->max = size;
888*4882a593Smuzhiyun 	}
889*4882a593Smuzhiyun 
890*4882a593Smuzhiyun 	return 0;
891*4882a593Smuzhiyun }
892*4882a593Smuzhiyun 
893*4882a593Smuzhiyun /*
894*4882a593Smuzhiyun  * Retrieve the module id from UUID mentioned in the
895*4882a593Smuzhiyun  * post bind params
896*4882a593Smuzhiyun  */
skl_tplg_add_moduleid_in_bind_params(struct skl_dev * skl,struct snd_soc_dapm_widget * w)897*4882a593Smuzhiyun void skl_tplg_add_moduleid_in_bind_params(struct skl_dev *skl,
898*4882a593Smuzhiyun 				struct snd_soc_dapm_widget *w)
899*4882a593Smuzhiyun {
900*4882a593Smuzhiyun 	struct skl_module_cfg *mconfig = w->priv;
901*4882a593Smuzhiyun 	int i;
902*4882a593Smuzhiyun 
903*4882a593Smuzhiyun 	/*
904*4882a593Smuzhiyun 	 * Post bind params are used for only for KPB
905*4882a593Smuzhiyun 	 * to set copier instances to drain the data
906*4882a593Smuzhiyun 	 * in fast mode
907*4882a593Smuzhiyun 	 */
908*4882a593Smuzhiyun 	if (mconfig->m_type != SKL_MODULE_TYPE_KPB)
909*4882a593Smuzhiyun 		return;
910*4882a593Smuzhiyun 
911*4882a593Smuzhiyun 	for (i = 0; i < w->num_kcontrols; i++)
912*4882a593Smuzhiyun 		if ((w->kcontrol_news[i].access &
913*4882a593Smuzhiyun 			SNDRV_CTL_ELEM_ACCESS_TLV_CALLBACK) &&
914*4882a593Smuzhiyun 			(skl_tplg_find_moduleid_from_uuid(skl,
915*4882a593Smuzhiyun 			&w->kcontrol_news[i]) < 0))
916*4882a593Smuzhiyun 			dev_err(skl->dev,
917*4882a593Smuzhiyun 				"%s: invalid kpb post bind params\n",
918*4882a593Smuzhiyun 				__func__);
919*4882a593Smuzhiyun }
920*4882a593Smuzhiyun 
skl_tplg_module_add_deferred_bind(struct skl_dev * skl,struct skl_module_cfg * src,struct skl_module_cfg * dst)921*4882a593Smuzhiyun static int skl_tplg_module_add_deferred_bind(struct skl_dev *skl,
922*4882a593Smuzhiyun 	struct skl_module_cfg *src, struct skl_module_cfg *dst)
923*4882a593Smuzhiyun {
924*4882a593Smuzhiyun 	struct skl_module_deferred_bind *m_list, *modules;
925*4882a593Smuzhiyun 	int i;
926*4882a593Smuzhiyun 
927*4882a593Smuzhiyun 	/* only supported for module with static pin connection */
928*4882a593Smuzhiyun 	for (i = 0; i < dst->module->max_input_pins; i++) {
929*4882a593Smuzhiyun 		struct skl_module_pin *pin = &dst->m_in_pin[i];
930*4882a593Smuzhiyun 
931*4882a593Smuzhiyun 		if (pin->is_dynamic)
932*4882a593Smuzhiyun 			continue;
933*4882a593Smuzhiyun 
934*4882a593Smuzhiyun 		if ((pin->id.module_id  == src->id.module_id) &&
935*4882a593Smuzhiyun 			(pin->id.instance_id  == src->id.instance_id)) {
936*4882a593Smuzhiyun 
937*4882a593Smuzhiyun 			if (!list_empty(&skl->bind_list)) {
938*4882a593Smuzhiyun 				list_for_each_entry(modules, &skl->bind_list, node) {
939*4882a593Smuzhiyun 					if (modules->src == src && modules->dst == dst)
940*4882a593Smuzhiyun 						return 0;
941*4882a593Smuzhiyun 				}
942*4882a593Smuzhiyun 			}
943*4882a593Smuzhiyun 
944*4882a593Smuzhiyun 			m_list = kzalloc(sizeof(*m_list), GFP_KERNEL);
945*4882a593Smuzhiyun 			if (!m_list)
946*4882a593Smuzhiyun 				return -ENOMEM;
947*4882a593Smuzhiyun 
948*4882a593Smuzhiyun 			m_list->src = src;
949*4882a593Smuzhiyun 			m_list->dst = dst;
950*4882a593Smuzhiyun 
951*4882a593Smuzhiyun 			list_add(&m_list->node, &skl->bind_list);
952*4882a593Smuzhiyun 		}
953*4882a593Smuzhiyun 	}
954*4882a593Smuzhiyun 
955*4882a593Smuzhiyun 	return 0;
956*4882a593Smuzhiyun }
957*4882a593Smuzhiyun 
skl_tplg_bind_sinks(struct snd_soc_dapm_widget * w,struct skl_dev * skl,struct snd_soc_dapm_widget * src_w,struct skl_module_cfg * src_mconfig)958*4882a593Smuzhiyun static int skl_tplg_bind_sinks(struct snd_soc_dapm_widget *w,
959*4882a593Smuzhiyun 				struct skl_dev *skl,
960*4882a593Smuzhiyun 				struct snd_soc_dapm_widget *src_w,
961*4882a593Smuzhiyun 				struct skl_module_cfg *src_mconfig)
962*4882a593Smuzhiyun {
963*4882a593Smuzhiyun 	struct snd_soc_dapm_path *p;
964*4882a593Smuzhiyun 	struct snd_soc_dapm_widget *sink = NULL, *next_sink = NULL;
965*4882a593Smuzhiyun 	struct skl_module_cfg *sink_mconfig;
966*4882a593Smuzhiyun 	int ret;
967*4882a593Smuzhiyun 
968*4882a593Smuzhiyun 	snd_soc_dapm_widget_for_each_sink_path(w, p) {
969*4882a593Smuzhiyun 		if (!p->connect)
970*4882a593Smuzhiyun 			continue;
971*4882a593Smuzhiyun 
972*4882a593Smuzhiyun 		dev_dbg(skl->dev,
973*4882a593Smuzhiyun 			"%s: src widget=%s\n", __func__, w->name);
974*4882a593Smuzhiyun 		dev_dbg(skl->dev,
975*4882a593Smuzhiyun 			"%s: sink widget=%s\n", __func__, p->sink->name);
976*4882a593Smuzhiyun 
977*4882a593Smuzhiyun 		next_sink = p->sink;
978*4882a593Smuzhiyun 
979*4882a593Smuzhiyun 		if (!is_skl_dsp_widget_type(p->sink, skl->dev))
980*4882a593Smuzhiyun 			return skl_tplg_bind_sinks(p->sink, skl, src_w, src_mconfig);
981*4882a593Smuzhiyun 
982*4882a593Smuzhiyun 		/*
983*4882a593Smuzhiyun 		 * here we will check widgets in sink pipelines, so that
984*4882a593Smuzhiyun 		 * can be any widgets type and we are only interested if
985*4882a593Smuzhiyun 		 * they are ones used for SKL so check that first
986*4882a593Smuzhiyun 		 */
987*4882a593Smuzhiyun 		if ((p->sink->priv != NULL) &&
988*4882a593Smuzhiyun 				is_skl_dsp_widget_type(p->sink, skl->dev)) {
989*4882a593Smuzhiyun 
990*4882a593Smuzhiyun 			sink = p->sink;
991*4882a593Smuzhiyun 			sink_mconfig = sink->priv;
992*4882a593Smuzhiyun 
993*4882a593Smuzhiyun 			/*
994*4882a593Smuzhiyun 			 * Modules other than PGA leaf can be connected
995*4882a593Smuzhiyun 			 * directly or via switch to a module in another
996*4882a593Smuzhiyun 			 * pipeline. EX: reference path
997*4882a593Smuzhiyun 			 * when the path is enabled, the dst module that needs
998*4882a593Smuzhiyun 			 * to be bound may not be initialized. if the module is
999*4882a593Smuzhiyun 			 * not initialized, add these modules in the deferred
1000*4882a593Smuzhiyun 			 * bind list and when the dst module is initialised,
1001*4882a593Smuzhiyun 			 * bind this module to the dst_module in deferred list.
1002*4882a593Smuzhiyun 			 */
1003*4882a593Smuzhiyun 			if (((src_mconfig->m_state == SKL_MODULE_INIT_DONE)
1004*4882a593Smuzhiyun 				&& (sink_mconfig->m_state == SKL_MODULE_UNINIT))) {
1005*4882a593Smuzhiyun 
1006*4882a593Smuzhiyun 				ret = skl_tplg_module_add_deferred_bind(skl,
1007*4882a593Smuzhiyun 						src_mconfig, sink_mconfig);
1008*4882a593Smuzhiyun 
1009*4882a593Smuzhiyun 				if (ret < 0)
1010*4882a593Smuzhiyun 					return ret;
1011*4882a593Smuzhiyun 
1012*4882a593Smuzhiyun 			}
1013*4882a593Smuzhiyun 
1014*4882a593Smuzhiyun 
1015*4882a593Smuzhiyun 			if (src_mconfig->m_state == SKL_MODULE_UNINIT ||
1016*4882a593Smuzhiyun 				sink_mconfig->m_state == SKL_MODULE_UNINIT)
1017*4882a593Smuzhiyun 				continue;
1018*4882a593Smuzhiyun 
1019*4882a593Smuzhiyun 			/* Bind source to sink, mixin is always source */
1020*4882a593Smuzhiyun 			ret = skl_bind_modules(skl, src_mconfig, sink_mconfig);
1021*4882a593Smuzhiyun 			if (ret)
1022*4882a593Smuzhiyun 				return ret;
1023*4882a593Smuzhiyun 
1024*4882a593Smuzhiyun 			/* set module params after bind */
1025*4882a593Smuzhiyun 			skl_tplg_set_module_bind_params(src_w,
1026*4882a593Smuzhiyun 					src_mconfig, skl);
1027*4882a593Smuzhiyun 			skl_tplg_set_module_bind_params(sink,
1028*4882a593Smuzhiyun 					sink_mconfig, skl);
1029*4882a593Smuzhiyun 
1030*4882a593Smuzhiyun 			/* Start sinks pipe first */
1031*4882a593Smuzhiyun 			if (sink_mconfig->pipe->state != SKL_PIPE_STARTED) {
1032*4882a593Smuzhiyun 				if (sink_mconfig->pipe->conn_type !=
1033*4882a593Smuzhiyun 							SKL_PIPE_CONN_TYPE_FE)
1034*4882a593Smuzhiyun 					ret = skl_run_pipe(skl,
1035*4882a593Smuzhiyun 							sink_mconfig->pipe);
1036*4882a593Smuzhiyun 				if (ret)
1037*4882a593Smuzhiyun 					return ret;
1038*4882a593Smuzhiyun 			}
1039*4882a593Smuzhiyun 		}
1040*4882a593Smuzhiyun 	}
1041*4882a593Smuzhiyun 
1042*4882a593Smuzhiyun 	if (!sink && next_sink)
1043*4882a593Smuzhiyun 		return skl_tplg_bind_sinks(next_sink, skl, src_w, src_mconfig);
1044*4882a593Smuzhiyun 
1045*4882a593Smuzhiyun 	return 0;
1046*4882a593Smuzhiyun }
1047*4882a593Smuzhiyun 
1048*4882a593Smuzhiyun /*
1049*4882a593Smuzhiyun  * A PGA represents a module in a pipeline. So in the Pre-PMU event of PGA
1050*4882a593Smuzhiyun  * we need to do following:
1051*4882a593Smuzhiyun  *   - Bind to sink pipeline
1052*4882a593Smuzhiyun  *      Since the sink pipes can be running and we don't get mixer event on
1053*4882a593Smuzhiyun  *      connect for already running mixer, we need to find the sink pipes
1054*4882a593Smuzhiyun  *      here and bind to them. This way dynamic connect works.
1055*4882a593Smuzhiyun  *   - Start sink pipeline, if not running
1056*4882a593Smuzhiyun  *   - Then run current pipe
1057*4882a593Smuzhiyun  */
skl_tplg_pga_dapm_pre_pmu_event(struct snd_soc_dapm_widget * w,struct skl_dev * skl)1058*4882a593Smuzhiyun static int skl_tplg_pga_dapm_pre_pmu_event(struct snd_soc_dapm_widget *w,
1059*4882a593Smuzhiyun 							struct skl_dev *skl)
1060*4882a593Smuzhiyun {
1061*4882a593Smuzhiyun 	struct skl_module_cfg *src_mconfig;
1062*4882a593Smuzhiyun 	int ret = 0;
1063*4882a593Smuzhiyun 
1064*4882a593Smuzhiyun 	src_mconfig = w->priv;
1065*4882a593Smuzhiyun 
1066*4882a593Smuzhiyun 	/*
1067*4882a593Smuzhiyun 	 * find which sink it is connected to, bind with the sink,
1068*4882a593Smuzhiyun 	 * if sink is not started, start sink pipe first, then start
1069*4882a593Smuzhiyun 	 * this pipe
1070*4882a593Smuzhiyun 	 */
1071*4882a593Smuzhiyun 	ret = skl_tplg_bind_sinks(w, skl, w, src_mconfig);
1072*4882a593Smuzhiyun 	if (ret)
1073*4882a593Smuzhiyun 		return ret;
1074*4882a593Smuzhiyun 
1075*4882a593Smuzhiyun 	/* Start source pipe last after starting all sinks */
1076*4882a593Smuzhiyun 	if (src_mconfig->pipe->conn_type != SKL_PIPE_CONN_TYPE_FE)
1077*4882a593Smuzhiyun 		return skl_run_pipe(skl, src_mconfig->pipe);
1078*4882a593Smuzhiyun 
1079*4882a593Smuzhiyun 	return 0;
1080*4882a593Smuzhiyun }
1081*4882a593Smuzhiyun 
skl_get_src_dsp_widget(struct snd_soc_dapm_widget * w,struct skl_dev * skl)1082*4882a593Smuzhiyun static struct snd_soc_dapm_widget *skl_get_src_dsp_widget(
1083*4882a593Smuzhiyun 		struct snd_soc_dapm_widget *w, struct skl_dev *skl)
1084*4882a593Smuzhiyun {
1085*4882a593Smuzhiyun 	struct snd_soc_dapm_path *p;
1086*4882a593Smuzhiyun 	struct snd_soc_dapm_widget *src_w = NULL;
1087*4882a593Smuzhiyun 
1088*4882a593Smuzhiyun 	snd_soc_dapm_widget_for_each_source_path(w, p) {
1089*4882a593Smuzhiyun 		src_w = p->source;
1090*4882a593Smuzhiyun 		if (!p->connect)
1091*4882a593Smuzhiyun 			continue;
1092*4882a593Smuzhiyun 
1093*4882a593Smuzhiyun 		dev_dbg(skl->dev, "sink widget=%s\n", w->name);
1094*4882a593Smuzhiyun 		dev_dbg(skl->dev, "src widget=%s\n", p->source->name);
1095*4882a593Smuzhiyun 
1096*4882a593Smuzhiyun 		/*
1097*4882a593Smuzhiyun 		 * here we will check widgets in sink pipelines, so that can
1098*4882a593Smuzhiyun 		 * be any widgets type and we are only interested if they are
1099*4882a593Smuzhiyun 		 * ones used for SKL so check that first
1100*4882a593Smuzhiyun 		 */
1101*4882a593Smuzhiyun 		if ((p->source->priv != NULL) &&
1102*4882a593Smuzhiyun 				is_skl_dsp_widget_type(p->source, skl->dev)) {
1103*4882a593Smuzhiyun 			return p->source;
1104*4882a593Smuzhiyun 		}
1105*4882a593Smuzhiyun 	}
1106*4882a593Smuzhiyun 
1107*4882a593Smuzhiyun 	if (src_w != NULL)
1108*4882a593Smuzhiyun 		return skl_get_src_dsp_widget(src_w, skl);
1109*4882a593Smuzhiyun 
1110*4882a593Smuzhiyun 	return NULL;
1111*4882a593Smuzhiyun }
1112*4882a593Smuzhiyun 
1113*4882a593Smuzhiyun /*
1114*4882a593Smuzhiyun  * in the Post-PMU event of mixer we need to do following:
1115*4882a593Smuzhiyun  *   - Check if this pipe is running
1116*4882a593Smuzhiyun  *   - if not, then
1117*4882a593Smuzhiyun  *	- bind this pipeline to its source pipeline
1118*4882a593Smuzhiyun  *	  if source pipe is already running, this means it is a dynamic
1119*4882a593Smuzhiyun  *	  connection and we need to bind only to that pipe
1120*4882a593Smuzhiyun  *	- start this pipeline
1121*4882a593Smuzhiyun  */
skl_tplg_mixer_dapm_post_pmu_event(struct snd_soc_dapm_widget * w,struct skl_dev * skl)1122*4882a593Smuzhiyun static int skl_tplg_mixer_dapm_post_pmu_event(struct snd_soc_dapm_widget *w,
1123*4882a593Smuzhiyun 							struct skl_dev *skl)
1124*4882a593Smuzhiyun {
1125*4882a593Smuzhiyun 	int ret = 0;
1126*4882a593Smuzhiyun 	struct snd_soc_dapm_widget *source, *sink;
1127*4882a593Smuzhiyun 	struct skl_module_cfg *src_mconfig, *sink_mconfig;
1128*4882a593Smuzhiyun 	int src_pipe_started = 0;
1129*4882a593Smuzhiyun 
1130*4882a593Smuzhiyun 	sink = w;
1131*4882a593Smuzhiyun 	sink_mconfig = sink->priv;
1132*4882a593Smuzhiyun 
1133*4882a593Smuzhiyun 	/*
1134*4882a593Smuzhiyun 	 * If source pipe is already started, that means source is driving
1135*4882a593Smuzhiyun 	 * one more sink before this sink got connected, Since source is
1136*4882a593Smuzhiyun 	 * started, bind this sink to source and start this pipe.
1137*4882a593Smuzhiyun 	 */
1138*4882a593Smuzhiyun 	source = skl_get_src_dsp_widget(w, skl);
1139*4882a593Smuzhiyun 	if (source != NULL) {
1140*4882a593Smuzhiyun 		src_mconfig = source->priv;
1141*4882a593Smuzhiyun 		sink_mconfig = sink->priv;
1142*4882a593Smuzhiyun 		src_pipe_started = 1;
1143*4882a593Smuzhiyun 
1144*4882a593Smuzhiyun 		/*
1145*4882a593Smuzhiyun 		 * check pipe state, then no need to bind or start the
1146*4882a593Smuzhiyun 		 * pipe
1147*4882a593Smuzhiyun 		 */
1148*4882a593Smuzhiyun 		if (src_mconfig->pipe->state != SKL_PIPE_STARTED)
1149*4882a593Smuzhiyun 			src_pipe_started = 0;
1150*4882a593Smuzhiyun 	}
1151*4882a593Smuzhiyun 
1152*4882a593Smuzhiyun 	if (src_pipe_started) {
1153*4882a593Smuzhiyun 		ret = skl_bind_modules(skl, src_mconfig, sink_mconfig);
1154*4882a593Smuzhiyun 		if (ret)
1155*4882a593Smuzhiyun 			return ret;
1156*4882a593Smuzhiyun 
1157*4882a593Smuzhiyun 		/* set module params after bind */
1158*4882a593Smuzhiyun 		skl_tplg_set_module_bind_params(source, src_mconfig, skl);
1159*4882a593Smuzhiyun 		skl_tplg_set_module_bind_params(sink, sink_mconfig, skl);
1160*4882a593Smuzhiyun 
1161*4882a593Smuzhiyun 		if (sink_mconfig->pipe->conn_type != SKL_PIPE_CONN_TYPE_FE)
1162*4882a593Smuzhiyun 			ret = skl_run_pipe(skl, sink_mconfig->pipe);
1163*4882a593Smuzhiyun 	}
1164*4882a593Smuzhiyun 
1165*4882a593Smuzhiyun 	return ret;
1166*4882a593Smuzhiyun }
1167*4882a593Smuzhiyun 
1168*4882a593Smuzhiyun /*
1169*4882a593Smuzhiyun  * in the Pre-PMD event of mixer we need to do following:
1170*4882a593Smuzhiyun  *   - Stop the pipe
1171*4882a593Smuzhiyun  *   - find the source connections and remove that from dapm_path_list
1172*4882a593Smuzhiyun  *   - unbind with source pipelines if still connected
1173*4882a593Smuzhiyun  */
skl_tplg_mixer_dapm_pre_pmd_event(struct snd_soc_dapm_widget * w,struct skl_dev * skl)1174*4882a593Smuzhiyun static int skl_tplg_mixer_dapm_pre_pmd_event(struct snd_soc_dapm_widget *w,
1175*4882a593Smuzhiyun 							struct skl_dev *skl)
1176*4882a593Smuzhiyun {
1177*4882a593Smuzhiyun 	struct skl_module_cfg *src_mconfig, *sink_mconfig;
1178*4882a593Smuzhiyun 	int ret = 0, i;
1179*4882a593Smuzhiyun 
1180*4882a593Smuzhiyun 	sink_mconfig = w->priv;
1181*4882a593Smuzhiyun 
1182*4882a593Smuzhiyun 	/* Stop the pipe */
1183*4882a593Smuzhiyun 	ret = skl_stop_pipe(skl, sink_mconfig->pipe);
1184*4882a593Smuzhiyun 	if (ret)
1185*4882a593Smuzhiyun 		return ret;
1186*4882a593Smuzhiyun 
1187*4882a593Smuzhiyun 	for (i = 0; i < sink_mconfig->module->max_input_pins; i++) {
1188*4882a593Smuzhiyun 		if (sink_mconfig->m_in_pin[i].pin_state == SKL_PIN_BIND_DONE) {
1189*4882a593Smuzhiyun 			src_mconfig = sink_mconfig->m_in_pin[i].tgt_mcfg;
1190*4882a593Smuzhiyun 			if (!src_mconfig)
1191*4882a593Smuzhiyun 				continue;
1192*4882a593Smuzhiyun 
1193*4882a593Smuzhiyun 			ret = skl_unbind_modules(skl,
1194*4882a593Smuzhiyun 						src_mconfig, sink_mconfig);
1195*4882a593Smuzhiyun 		}
1196*4882a593Smuzhiyun 	}
1197*4882a593Smuzhiyun 
1198*4882a593Smuzhiyun 	return ret;
1199*4882a593Smuzhiyun }
1200*4882a593Smuzhiyun 
1201*4882a593Smuzhiyun /*
1202*4882a593Smuzhiyun  * in the Post-PMD event of mixer we need to do following:
1203*4882a593Smuzhiyun  *   - Unbind the modules within the pipeline
1204*4882a593Smuzhiyun  *   - Delete the pipeline (modules are not required to be explicitly
1205*4882a593Smuzhiyun  *     deleted, pipeline delete is enough here
1206*4882a593Smuzhiyun  */
skl_tplg_mixer_dapm_post_pmd_event(struct snd_soc_dapm_widget * w,struct skl_dev * skl)1207*4882a593Smuzhiyun static int skl_tplg_mixer_dapm_post_pmd_event(struct snd_soc_dapm_widget *w,
1208*4882a593Smuzhiyun 							struct skl_dev *skl)
1209*4882a593Smuzhiyun {
1210*4882a593Smuzhiyun 	struct skl_module_cfg *mconfig = w->priv;
1211*4882a593Smuzhiyun 	struct skl_pipe_module *w_module;
1212*4882a593Smuzhiyun 	struct skl_module_cfg *src_module = NULL, *dst_module;
1213*4882a593Smuzhiyun 	struct skl_pipe *s_pipe = mconfig->pipe;
1214*4882a593Smuzhiyun 	struct skl_module_deferred_bind *modules, *tmp;
1215*4882a593Smuzhiyun 
1216*4882a593Smuzhiyun 	if (s_pipe->state == SKL_PIPE_INVALID)
1217*4882a593Smuzhiyun 		return -EINVAL;
1218*4882a593Smuzhiyun 
1219*4882a593Smuzhiyun 	list_for_each_entry(w_module, &s_pipe->w_list, node) {
1220*4882a593Smuzhiyun 		if (list_empty(&skl->bind_list))
1221*4882a593Smuzhiyun 			break;
1222*4882a593Smuzhiyun 
1223*4882a593Smuzhiyun 		src_module = w_module->w->priv;
1224*4882a593Smuzhiyun 
1225*4882a593Smuzhiyun 		list_for_each_entry_safe(modules, tmp, &skl->bind_list, node) {
1226*4882a593Smuzhiyun 			/*
1227*4882a593Smuzhiyun 			 * When the destination module is deleted, Unbind the
1228*4882a593Smuzhiyun 			 * modules from deferred bind list.
1229*4882a593Smuzhiyun 			 */
1230*4882a593Smuzhiyun 			if (modules->dst == src_module) {
1231*4882a593Smuzhiyun 				skl_unbind_modules(skl, modules->src,
1232*4882a593Smuzhiyun 						modules->dst);
1233*4882a593Smuzhiyun 			}
1234*4882a593Smuzhiyun 
1235*4882a593Smuzhiyun 			/*
1236*4882a593Smuzhiyun 			 * When the source module is deleted, remove this entry
1237*4882a593Smuzhiyun 			 * from the deferred bind list.
1238*4882a593Smuzhiyun 			 */
1239*4882a593Smuzhiyun 			if (modules->src == src_module) {
1240*4882a593Smuzhiyun 				list_del(&modules->node);
1241*4882a593Smuzhiyun 				modules->src = NULL;
1242*4882a593Smuzhiyun 				modules->dst = NULL;
1243*4882a593Smuzhiyun 				kfree(modules);
1244*4882a593Smuzhiyun 			}
1245*4882a593Smuzhiyun 		}
1246*4882a593Smuzhiyun 	}
1247*4882a593Smuzhiyun 
1248*4882a593Smuzhiyun 	list_for_each_entry(w_module, &s_pipe->w_list, node) {
1249*4882a593Smuzhiyun 		dst_module = w_module->w->priv;
1250*4882a593Smuzhiyun 
1251*4882a593Smuzhiyun 		if (src_module == NULL) {
1252*4882a593Smuzhiyun 			src_module = dst_module;
1253*4882a593Smuzhiyun 			continue;
1254*4882a593Smuzhiyun 		}
1255*4882a593Smuzhiyun 
1256*4882a593Smuzhiyun 		skl_unbind_modules(skl, src_module, dst_module);
1257*4882a593Smuzhiyun 		src_module = dst_module;
1258*4882a593Smuzhiyun 	}
1259*4882a593Smuzhiyun 
1260*4882a593Smuzhiyun 	skl_delete_pipe(skl, mconfig->pipe);
1261*4882a593Smuzhiyun 
1262*4882a593Smuzhiyun 	list_for_each_entry(w_module, &s_pipe->w_list, node) {
1263*4882a593Smuzhiyun 		src_module = w_module->w->priv;
1264*4882a593Smuzhiyun 		src_module->m_state = SKL_MODULE_UNINIT;
1265*4882a593Smuzhiyun 	}
1266*4882a593Smuzhiyun 
1267*4882a593Smuzhiyun 	return skl_tplg_unload_pipe_modules(skl, s_pipe);
1268*4882a593Smuzhiyun }
1269*4882a593Smuzhiyun 
1270*4882a593Smuzhiyun /*
1271*4882a593Smuzhiyun  * in the Post-PMD event of PGA we need to do following:
1272*4882a593Smuzhiyun  *   - Stop the pipeline
1273*4882a593Smuzhiyun  *   - In source pipe is connected, unbind with source pipelines
1274*4882a593Smuzhiyun  */
skl_tplg_pga_dapm_post_pmd_event(struct snd_soc_dapm_widget * w,struct skl_dev * skl)1275*4882a593Smuzhiyun static int skl_tplg_pga_dapm_post_pmd_event(struct snd_soc_dapm_widget *w,
1276*4882a593Smuzhiyun 							struct skl_dev *skl)
1277*4882a593Smuzhiyun {
1278*4882a593Smuzhiyun 	struct skl_module_cfg *src_mconfig, *sink_mconfig;
1279*4882a593Smuzhiyun 	int ret = 0, i;
1280*4882a593Smuzhiyun 
1281*4882a593Smuzhiyun 	src_mconfig = w->priv;
1282*4882a593Smuzhiyun 
1283*4882a593Smuzhiyun 	/* Stop the pipe since this is a mixin module */
1284*4882a593Smuzhiyun 	ret = skl_stop_pipe(skl, src_mconfig->pipe);
1285*4882a593Smuzhiyun 	if (ret)
1286*4882a593Smuzhiyun 		return ret;
1287*4882a593Smuzhiyun 
1288*4882a593Smuzhiyun 	for (i = 0; i < src_mconfig->module->max_output_pins; i++) {
1289*4882a593Smuzhiyun 		if (src_mconfig->m_out_pin[i].pin_state == SKL_PIN_BIND_DONE) {
1290*4882a593Smuzhiyun 			sink_mconfig = src_mconfig->m_out_pin[i].tgt_mcfg;
1291*4882a593Smuzhiyun 			if (!sink_mconfig)
1292*4882a593Smuzhiyun 				continue;
1293*4882a593Smuzhiyun 			/*
1294*4882a593Smuzhiyun 			 * This is a connecter and if path is found that means
1295*4882a593Smuzhiyun 			 * unbind between source and sink has not happened yet
1296*4882a593Smuzhiyun 			 */
1297*4882a593Smuzhiyun 			ret = skl_unbind_modules(skl, src_mconfig,
1298*4882a593Smuzhiyun 							sink_mconfig);
1299*4882a593Smuzhiyun 		}
1300*4882a593Smuzhiyun 	}
1301*4882a593Smuzhiyun 
1302*4882a593Smuzhiyun 	return ret;
1303*4882a593Smuzhiyun }
1304*4882a593Smuzhiyun 
1305*4882a593Smuzhiyun /*
1306*4882a593Smuzhiyun  * In modelling, we assume there will be ONLY one mixer in a pipeline. If a
1307*4882a593Smuzhiyun  * second one is required that is created as another pipe entity.
1308*4882a593Smuzhiyun  * The mixer is responsible for pipe management and represent a pipeline
1309*4882a593Smuzhiyun  * instance
1310*4882a593Smuzhiyun  */
skl_tplg_mixer_event(struct snd_soc_dapm_widget * w,struct snd_kcontrol * k,int event)1311*4882a593Smuzhiyun static int skl_tplg_mixer_event(struct snd_soc_dapm_widget *w,
1312*4882a593Smuzhiyun 				struct snd_kcontrol *k, int event)
1313*4882a593Smuzhiyun {
1314*4882a593Smuzhiyun 	struct snd_soc_dapm_context *dapm = w->dapm;
1315*4882a593Smuzhiyun 	struct skl_dev *skl = get_skl_ctx(dapm->dev);
1316*4882a593Smuzhiyun 
1317*4882a593Smuzhiyun 	switch (event) {
1318*4882a593Smuzhiyun 	case SND_SOC_DAPM_PRE_PMU:
1319*4882a593Smuzhiyun 		return skl_tplg_mixer_dapm_pre_pmu_event(w, skl);
1320*4882a593Smuzhiyun 
1321*4882a593Smuzhiyun 	case SND_SOC_DAPM_POST_PMU:
1322*4882a593Smuzhiyun 		return skl_tplg_mixer_dapm_post_pmu_event(w, skl);
1323*4882a593Smuzhiyun 
1324*4882a593Smuzhiyun 	case SND_SOC_DAPM_PRE_PMD:
1325*4882a593Smuzhiyun 		return skl_tplg_mixer_dapm_pre_pmd_event(w, skl);
1326*4882a593Smuzhiyun 
1327*4882a593Smuzhiyun 	case SND_SOC_DAPM_POST_PMD:
1328*4882a593Smuzhiyun 		return skl_tplg_mixer_dapm_post_pmd_event(w, skl);
1329*4882a593Smuzhiyun 	}
1330*4882a593Smuzhiyun 
1331*4882a593Smuzhiyun 	return 0;
1332*4882a593Smuzhiyun }
1333*4882a593Smuzhiyun 
1334*4882a593Smuzhiyun /*
1335*4882a593Smuzhiyun  * In modelling, we assumed rest of the modules in pipeline are PGA. But we
1336*4882a593Smuzhiyun  * are interested in last PGA (leaf PGA) in a pipeline to disconnect with
1337*4882a593Smuzhiyun  * the sink when it is running (two FE to one BE or one FE to two BE)
1338*4882a593Smuzhiyun  * scenarios
1339*4882a593Smuzhiyun  */
skl_tplg_pga_event(struct snd_soc_dapm_widget * w,struct snd_kcontrol * k,int event)1340*4882a593Smuzhiyun static int skl_tplg_pga_event(struct snd_soc_dapm_widget *w,
1341*4882a593Smuzhiyun 			struct snd_kcontrol *k, int event)
1342*4882a593Smuzhiyun 
1343*4882a593Smuzhiyun {
1344*4882a593Smuzhiyun 	struct snd_soc_dapm_context *dapm = w->dapm;
1345*4882a593Smuzhiyun 	struct skl_dev *skl = get_skl_ctx(dapm->dev);
1346*4882a593Smuzhiyun 
1347*4882a593Smuzhiyun 	switch (event) {
1348*4882a593Smuzhiyun 	case SND_SOC_DAPM_PRE_PMU:
1349*4882a593Smuzhiyun 		return skl_tplg_pga_dapm_pre_pmu_event(w, skl);
1350*4882a593Smuzhiyun 
1351*4882a593Smuzhiyun 	case SND_SOC_DAPM_POST_PMD:
1352*4882a593Smuzhiyun 		return skl_tplg_pga_dapm_post_pmd_event(w, skl);
1353*4882a593Smuzhiyun 	}
1354*4882a593Smuzhiyun 
1355*4882a593Smuzhiyun 	return 0;
1356*4882a593Smuzhiyun }
1357*4882a593Smuzhiyun 
skl_tplg_multi_config_set_get(struct snd_kcontrol * kcontrol,struct snd_ctl_elem_value * ucontrol,bool is_set)1358*4882a593Smuzhiyun static int skl_tplg_multi_config_set_get(struct snd_kcontrol *kcontrol,
1359*4882a593Smuzhiyun 					 struct snd_ctl_elem_value *ucontrol,
1360*4882a593Smuzhiyun 					 bool is_set)
1361*4882a593Smuzhiyun {
1362*4882a593Smuzhiyun 	struct snd_soc_component *component =
1363*4882a593Smuzhiyun 		snd_soc_kcontrol_component(kcontrol);
1364*4882a593Smuzhiyun 	struct hdac_bus *bus = snd_soc_component_get_drvdata(component);
1365*4882a593Smuzhiyun 	struct skl_dev *skl = bus_to_skl(bus);
1366*4882a593Smuzhiyun 	struct skl_pipeline *ppl;
1367*4882a593Smuzhiyun 	struct skl_pipe *pipe = NULL;
1368*4882a593Smuzhiyun 	struct soc_enum *ec = (struct soc_enum *)kcontrol->private_value;
1369*4882a593Smuzhiyun 	u32 *pipe_id;
1370*4882a593Smuzhiyun 
1371*4882a593Smuzhiyun 	if (!ec)
1372*4882a593Smuzhiyun 		return -EINVAL;
1373*4882a593Smuzhiyun 
1374*4882a593Smuzhiyun 	if (is_set && ucontrol->value.enumerated.item[0] > ec->items)
1375*4882a593Smuzhiyun 		return -EINVAL;
1376*4882a593Smuzhiyun 
1377*4882a593Smuzhiyun 	pipe_id = ec->dobj.private;
1378*4882a593Smuzhiyun 
1379*4882a593Smuzhiyun 	list_for_each_entry(ppl, &skl->ppl_list, node) {
1380*4882a593Smuzhiyun 		if (ppl->pipe->ppl_id == *pipe_id) {
1381*4882a593Smuzhiyun 			pipe = ppl->pipe;
1382*4882a593Smuzhiyun 			break;
1383*4882a593Smuzhiyun 		}
1384*4882a593Smuzhiyun 	}
1385*4882a593Smuzhiyun 	if (!pipe)
1386*4882a593Smuzhiyun 		return -EIO;
1387*4882a593Smuzhiyun 
1388*4882a593Smuzhiyun 	if (is_set)
1389*4882a593Smuzhiyun 		pipe->pipe_config_idx = ucontrol->value.enumerated.item[0];
1390*4882a593Smuzhiyun 	else
1391*4882a593Smuzhiyun 		ucontrol->value.enumerated.item[0]  =  pipe->pipe_config_idx;
1392*4882a593Smuzhiyun 
1393*4882a593Smuzhiyun 	return 0;
1394*4882a593Smuzhiyun }
1395*4882a593Smuzhiyun 
skl_tplg_multi_config_get(struct snd_kcontrol * kcontrol,struct snd_ctl_elem_value * ucontrol)1396*4882a593Smuzhiyun static int skl_tplg_multi_config_get(struct snd_kcontrol *kcontrol,
1397*4882a593Smuzhiyun 				     struct snd_ctl_elem_value *ucontrol)
1398*4882a593Smuzhiyun {
1399*4882a593Smuzhiyun 	return skl_tplg_multi_config_set_get(kcontrol, ucontrol, false);
1400*4882a593Smuzhiyun }
1401*4882a593Smuzhiyun 
skl_tplg_multi_config_set(struct snd_kcontrol * kcontrol,struct snd_ctl_elem_value * ucontrol)1402*4882a593Smuzhiyun static int skl_tplg_multi_config_set(struct snd_kcontrol *kcontrol,
1403*4882a593Smuzhiyun 				     struct snd_ctl_elem_value *ucontrol)
1404*4882a593Smuzhiyun {
1405*4882a593Smuzhiyun 	return skl_tplg_multi_config_set_get(kcontrol, ucontrol, true);
1406*4882a593Smuzhiyun }
1407*4882a593Smuzhiyun 
skl_tplg_multi_config_get_dmic(struct snd_kcontrol * kcontrol,struct snd_ctl_elem_value * ucontrol)1408*4882a593Smuzhiyun static int skl_tplg_multi_config_get_dmic(struct snd_kcontrol *kcontrol,
1409*4882a593Smuzhiyun 					  struct snd_ctl_elem_value *ucontrol)
1410*4882a593Smuzhiyun {
1411*4882a593Smuzhiyun 	return skl_tplg_multi_config_set_get(kcontrol, ucontrol, false);
1412*4882a593Smuzhiyun }
1413*4882a593Smuzhiyun 
skl_tplg_multi_config_set_dmic(struct snd_kcontrol * kcontrol,struct snd_ctl_elem_value * ucontrol)1414*4882a593Smuzhiyun static int skl_tplg_multi_config_set_dmic(struct snd_kcontrol *kcontrol,
1415*4882a593Smuzhiyun 					  struct snd_ctl_elem_value *ucontrol)
1416*4882a593Smuzhiyun {
1417*4882a593Smuzhiyun 	return skl_tplg_multi_config_set_get(kcontrol, ucontrol, true);
1418*4882a593Smuzhiyun }
1419*4882a593Smuzhiyun 
skl_tplg_tlv_control_get(struct snd_kcontrol * kcontrol,unsigned int __user * data,unsigned int size)1420*4882a593Smuzhiyun static int skl_tplg_tlv_control_get(struct snd_kcontrol *kcontrol,
1421*4882a593Smuzhiyun 			unsigned int __user *data, unsigned int size)
1422*4882a593Smuzhiyun {
1423*4882a593Smuzhiyun 	struct soc_bytes_ext *sb =
1424*4882a593Smuzhiyun 			(struct soc_bytes_ext *)kcontrol->private_value;
1425*4882a593Smuzhiyun 	struct skl_algo_data *bc = (struct skl_algo_data *)sb->dobj.private;
1426*4882a593Smuzhiyun 	struct snd_soc_dapm_widget *w = snd_soc_dapm_kcontrol_widget(kcontrol);
1427*4882a593Smuzhiyun 	struct skl_module_cfg *mconfig = w->priv;
1428*4882a593Smuzhiyun 	struct skl_dev *skl = get_skl_ctx(w->dapm->dev);
1429*4882a593Smuzhiyun 
1430*4882a593Smuzhiyun 	if (w->power)
1431*4882a593Smuzhiyun 		skl_get_module_params(skl, (u32 *)bc->params,
1432*4882a593Smuzhiyun 				      bc->size, bc->param_id, mconfig);
1433*4882a593Smuzhiyun 
1434*4882a593Smuzhiyun 	/* decrement size for TLV header */
1435*4882a593Smuzhiyun 	size -= 2 * sizeof(u32);
1436*4882a593Smuzhiyun 
1437*4882a593Smuzhiyun 	/* check size as we don't want to send kernel data */
1438*4882a593Smuzhiyun 	if (size > bc->max)
1439*4882a593Smuzhiyun 		size = bc->max;
1440*4882a593Smuzhiyun 
1441*4882a593Smuzhiyun 	if (bc->params) {
1442*4882a593Smuzhiyun 		if (copy_to_user(data, &bc->param_id, sizeof(u32)))
1443*4882a593Smuzhiyun 			return -EFAULT;
1444*4882a593Smuzhiyun 		if (copy_to_user(data + 1, &size, sizeof(u32)))
1445*4882a593Smuzhiyun 			return -EFAULT;
1446*4882a593Smuzhiyun 		if (copy_to_user(data + 2, bc->params, size))
1447*4882a593Smuzhiyun 			return -EFAULT;
1448*4882a593Smuzhiyun 	}
1449*4882a593Smuzhiyun 
1450*4882a593Smuzhiyun 	return 0;
1451*4882a593Smuzhiyun }
1452*4882a593Smuzhiyun 
1453*4882a593Smuzhiyun #define SKL_PARAM_VENDOR_ID 0xff
1454*4882a593Smuzhiyun 
skl_tplg_tlv_control_set(struct snd_kcontrol * kcontrol,const unsigned int __user * data,unsigned int size)1455*4882a593Smuzhiyun static int skl_tplg_tlv_control_set(struct snd_kcontrol *kcontrol,
1456*4882a593Smuzhiyun 			const unsigned int __user *data, unsigned int size)
1457*4882a593Smuzhiyun {
1458*4882a593Smuzhiyun 	struct snd_soc_dapm_widget *w = snd_soc_dapm_kcontrol_widget(kcontrol);
1459*4882a593Smuzhiyun 	struct skl_module_cfg *mconfig = w->priv;
1460*4882a593Smuzhiyun 	struct soc_bytes_ext *sb =
1461*4882a593Smuzhiyun 			(struct soc_bytes_ext *)kcontrol->private_value;
1462*4882a593Smuzhiyun 	struct skl_algo_data *ac = (struct skl_algo_data *)sb->dobj.private;
1463*4882a593Smuzhiyun 	struct skl_dev *skl = get_skl_ctx(w->dapm->dev);
1464*4882a593Smuzhiyun 
1465*4882a593Smuzhiyun 	if (ac->params) {
1466*4882a593Smuzhiyun 		if (size > ac->max)
1467*4882a593Smuzhiyun 			return -EINVAL;
1468*4882a593Smuzhiyun 		ac->size = size;
1469*4882a593Smuzhiyun 
1470*4882a593Smuzhiyun 		if (copy_from_user(ac->params, data, size))
1471*4882a593Smuzhiyun 			return -EFAULT;
1472*4882a593Smuzhiyun 
1473*4882a593Smuzhiyun 		if (w->power)
1474*4882a593Smuzhiyun 			return skl_set_module_params(skl,
1475*4882a593Smuzhiyun 						(u32 *)ac->params, ac->size,
1476*4882a593Smuzhiyun 						ac->param_id, mconfig);
1477*4882a593Smuzhiyun 	}
1478*4882a593Smuzhiyun 
1479*4882a593Smuzhiyun 	return 0;
1480*4882a593Smuzhiyun }
1481*4882a593Smuzhiyun 
skl_tplg_mic_control_get(struct snd_kcontrol * kcontrol,struct snd_ctl_elem_value * ucontrol)1482*4882a593Smuzhiyun static int skl_tplg_mic_control_get(struct snd_kcontrol *kcontrol,
1483*4882a593Smuzhiyun 		struct snd_ctl_elem_value *ucontrol)
1484*4882a593Smuzhiyun {
1485*4882a593Smuzhiyun 	struct snd_soc_dapm_widget *w = snd_soc_dapm_kcontrol_widget(kcontrol);
1486*4882a593Smuzhiyun 	struct skl_module_cfg *mconfig = w->priv;
1487*4882a593Smuzhiyun 	struct soc_enum *ec = (struct soc_enum *)kcontrol->private_value;
1488*4882a593Smuzhiyun 	u32 ch_type = *((u32 *)ec->dobj.private);
1489*4882a593Smuzhiyun 
1490*4882a593Smuzhiyun 	if (mconfig->dmic_ch_type == ch_type)
1491*4882a593Smuzhiyun 		ucontrol->value.enumerated.item[0] =
1492*4882a593Smuzhiyun 					mconfig->dmic_ch_combo_index;
1493*4882a593Smuzhiyun 	else
1494*4882a593Smuzhiyun 		ucontrol->value.enumerated.item[0] = 0;
1495*4882a593Smuzhiyun 
1496*4882a593Smuzhiyun 	return 0;
1497*4882a593Smuzhiyun }
1498*4882a593Smuzhiyun 
skl_fill_mic_sel_params(struct skl_module_cfg * mconfig,struct skl_mic_sel_config * mic_cfg,struct device * dev)1499*4882a593Smuzhiyun static int skl_fill_mic_sel_params(struct skl_module_cfg *mconfig,
1500*4882a593Smuzhiyun 	struct skl_mic_sel_config *mic_cfg, struct device *dev)
1501*4882a593Smuzhiyun {
1502*4882a593Smuzhiyun 	struct skl_specific_cfg *sp_cfg = &mconfig->formats_config;
1503*4882a593Smuzhiyun 
1504*4882a593Smuzhiyun 	sp_cfg->caps_size = sizeof(struct skl_mic_sel_config);
1505*4882a593Smuzhiyun 	sp_cfg->set_params = SKL_PARAM_SET;
1506*4882a593Smuzhiyun 	sp_cfg->param_id = 0x00;
1507*4882a593Smuzhiyun 	if (!sp_cfg->caps) {
1508*4882a593Smuzhiyun 		sp_cfg->caps = devm_kzalloc(dev, sp_cfg->caps_size, GFP_KERNEL);
1509*4882a593Smuzhiyun 		if (!sp_cfg->caps)
1510*4882a593Smuzhiyun 			return -ENOMEM;
1511*4882a593Smuzhiyun 	}
1512*4882a593Smuzhiyun 
1513*4882a593Smuzhiyun 	mic_cfg->mic_switch = SKL_MIC_SEL_SWITCH;
1514*4882a593Smuzhiyun 	mic_cfg->flags = 0;
1515*4882a593Smuzhiyun 	memcpy(sp_cfg->caps, mic_cfg, sp_cfg->caps_size);
1516*4882a593Smuzhiyun 
1517*4882a593Smuzhiyun 	return 0;
1518*4882a593Smuzhiyun }
1519*4882a593Smuzhiyun 
skl_tplg_mic_control_set(struct snd_kcontrol * kcontrol,struct snd_ctl_elem_value * ucontrol)1520*4882a593Smuzhiyun static int skl_tplg_mic_control_set(struct snd_kcontrol *kcontrol,
1521*4882a593Smuzhiyun 			struct snd_ctl_elem_value *ucontrol)
1522*4882a593Smuzhiyun {
1523*4882a593Smuzhiyun 	struct snd_soc_dapm_widget *w = snd_soc_dapm_kcontrol_widget(kcontrol);
1524*4882a593Smuzhiyun 	struct skl_module_cfg *mconfig = w->priv;
1525*4882a593Smuzhiyun 	struct skl_mic_sel_config mic_cfg = {0};
1526*4882a593Smuzhiyun 	struct soc_enum *ec = (struct soc_enum *)kcontrol->private_value;
1527*4882a593Smuzhiyun 	u32 ch_type = *((u32 *)ec->dobj.private);
1528*4882a593Smuzhiyun 	const int *list;
1529*4882a593Smuzhiyun 	u8 in_ch, out_ch, index;
1530*4882a593Smuzhiyun 
1531*4882a593Smuzhiyun 	mconfig->dmic_ch_type = ch_type;
1532*4882a593Smuzhiyun 	mconfig->dmic_ch_combo_index = ucontrol->value.enumerated.item[0];
1533*4882a593Smuzhiyun 
1534*4882a593Smuzhiyun 	/* enum control index 0 is INVALID, so no channels to be set */
1535*4882a593Smuzhiyun 	if (mconfig->dmic_ch_combo_index == 0)
1536*4882a593Smuzhiyun 		return 0;
1537*4882a593Smuzhiyun 
1538*4882a593Smuzhiyun 	/* No valid channel selection map for index 0, so offset by 1 */
1539*4882a593Smuzhiyun 	index = mconfig->dmic_ch_combo_index - 1;
1540*4882a593Smuzhiyun 
1541*4882a593Smuzhiyun 	switch (ch_type) {
1542*4882a593Smuzhiyun 	case SKL_CH_MONO:
1543*4882a593Smuzhiyun 		if (mconfig->dmic_ch_combo_index > ARRAY_SIZE(mic_mono_list))
1544*4882a593Smuzhiyun 			return -EINVAL;
1545*4882a593Smuzhiyun 
1546*4882a593Smuzhiyun 		list = &mic_mono_list[index];
1547*4882a593Smuzhiyun 		break;
1548*4882a593Smuzhiyun 
1549*4882a593Smuzhiyun 	case SKL_CH_STEREO:
1550*4882a593Smuzhiyun 		if (mconfig->dmic_ch_combo_index > ARRAY_SIZE(mic_stereo_list))
1551*4882a593Smuzhiyun 			return -EINVAL;
1552*4882a593Smuzhiyun 
1553*4882a593Smuzhiyun 		list = mic_stereo_list[index];
1554*4882a593Smuzhiyun 		break;
1555*4882a593Smuzhiyun 
1556*4882a593Smuzhiyun 	case SKL_CH_TRIO:
1557*4882a593Smuzhiyun 		if (mconfig->dmic_ch_combo_index > ARRAY_SIZE(mic_trio_list))
1558*4882a593Smuzhiyun 			return -EINVAL;
1559*4882a593Smuzhiyun 
1560*4882a593Smuzhiyun 		list = mic_trio_list[index];
1561*4882a593Smuzhiyun 		break;
1562*4882a593Smuzhiyun 
1563*4882a593Smuzhiyun 	case SKL_CH_QUATRO:
1564*4882a593Smuzhiyun 		if (mconfig->dmic_ch_combo_index > ARRAY_SIZE(mic_quatro_list))
1565*4882a593Smuzhiyun 			return -EINVAL;
1566*4882a593Smuzhiyun 
1567*4882a593Smuzhiyun 		list = mic_quatro_list[index];
1568*4882a593Smuzhiyun 		break;
1569*4882a593Smuzhiyun 
1570*4882a593Smuzhiyun 	default:
1571*4882a593Smuzhiyun 		dev_err(w->dapm->dev,
1572*4882a593Smuzhiyun 				"Invalid channel %d for mic_select module\n",
1573*4882a593Smuzhiyun 				ch_type);
1574*4882a593Smuzhiyun 		return -EINVAL;
1575*4882a593Smuzhiyun 
1576*4882a593Smuzhiyun 	}
1577*4882a593Smuzhiyun 
1578*4882a593Smuzhiyun 	/* channel type enum map to number of chanels for that type */
1579*4882a593Smuzhiyun 	for (out_ch = 0; out_ch < ch_type; out_ch++) {
1580*4882a593Smuzhiyun 		in_ch = list[out_ch];
1581*4882a593Smuzhiyun 		mic_cfg.blob[out_ch][in_ch] = SKL_DEFAULT_MIC_SEL_GAIN;
1582*4882a593Smuzhiyun 	}
1583*4882a593Smuzhiyun 
1584*4882a593Smuzhiyun 	return skl_fill_mic_sel_params(mconfig, &mic_cfg, w->dapm->dev);
1585*4882a593Smuzhiyun }
1586*4882a593Smuzhiyun 
1587*4882a593Smuzhiyun /*
1588*4882a593Smuzhiyun  * Fill the dma id for host and link. In case of passthrough
1589*4882a593Smuzhiyun  * pipeline, this will both host and link in the same
1590*4882a593Smuzhiyun  * pipeline, so need to copy the link and host based on dev_type
1591*4882a593Smuzhiyun  */
skl_tplg_fill_dma_id(struct skl_module_cfg * mcfg,struct skl_pipe_params * params)1592*4882a593Smuzhiyun static void skl_tplg_fill_dma_id(struct skl_module_cfg *mcfg,
1593*4882a593Smuzhiyun 				struct skl_pipe_params *params)
1594*4882a593Smuzhiyun {
1595*4882a593Smuzhiyun 	struct skl_pipe *pipe = mcfg->pipe;
1596*4882a593Smuzhiyun 
1597*4882a593Smuzhiyun 	if (pipe->passthru) {
1598*4882a593Smuzhiyun 		switch (mcfg->dev_type) {
1599*4882a593Smuzhiyun 		case SKL_DEVICE_HDALINK:
1600*4882a593Smuzhiyun 			pipe->p_params->link_dma_id = params->link_dma_id;
1601*4882a593Smuzhiyun 			pipe->p_params->link_index = params->link_index;
1602*4882a593Smuzhiyun 			pipe->p_params->link_bps = params->link_bps;
1603*4882a593Smuzhiyun 			break;
1604*4882a593Smuzhiyun 
1605*4882a593Smuzhiyun 		case SKL_DEVICE_HDAHOST:
1606*4882a593Smuzhiyun 			pipe->p_params->host_dma_id = params->host_dma_id;
1607*4882a593Smuzhiyun 			pipe->p_params->host_bps = params->host_bps;
1608*4882a593Smuzhiyun 			break;
1609*4882a593Smuzhiyun 
1610*4882a593Smuzhiyun 		default:
1611*4882a593Smuzhiyun 			break;
1612*4882a593Smuzhiyun 		}
1613*4882a593Smuzhiyun 		pipe->p_params->s_fmt = params->s_fmt;
1614*4882a593Smuzhiyun 		pipe->p_params->ch = params->ch;
1615*4882a593Smuzhiyun 		pipe->p_params->s_freq = params->s_freq;
1616*4882a593Smuzhiyun 		pipe->p_params->stream = params->stream;
1617*4882a593Smuzhiyun 		pipe->p_params->format = params->format;
1618*4882a593Smuzhiyun 
1619*4882a593Smuzhiyun 	} else {
1620*4882a593Smuzhiyun 		memcpy(pipe->p_params, params, sizeof(*params));
1621*4882a593Smuzhiyun 	}
1622*4882a593Smuzhiyun }
1623*4882a593Smuzhiyun 
1624*4882a593Smuzhiyun /*
1625*4882a593Smuzhiyun  * The FE params are passed by hw_params of the DAI.
1626*4882a593Smuzhiyun  * On hw_params, the params are stored in Gateway module of the FE and we
1627*4882a593Smuzhiyun  * need to calculate the format in DSP module configuration, that
1628*4882a593Smuzhiyun  * conversion is done here
1629*4882a593Smuzhiyun  */
skl_tplg_update_pipe_params(struct device * dev,struct skl_module_cfg * mconfig,struct skl_pipe_params * params)1630*4882a593Smuzhiyun int skl_tplg_update_pipe_params(struct device *dev,
1631*4882a593Smuzhiyun 			struct skl_module_cfg *mconfig,
1632*4882a593Smuzhiyun 			struct skl_pipe_params *params)
1633*4882a593Smuzhiyun {
1634*4882a593Smuzhiyun 	struct skl_module_res *res;
1635*4882a593Smuzhiyun 	struct skl_dev *skl = get_skl_ctx(dev);
1636*4882a593Smuzhiyun 	struct skl_module_fmt *format = NULL;
1637*4882a593Smuzhiyun 	u8 cfg_idx = mconfig->pipe->cur_config_idx;
1638*4882a593Smuzhiyun 
1639*4882a593Smuzhiyun 	res = &mconfig->module->resources[mconfig->res_idx];
1640*4882a593Smuzhiyun 	skl_tplg_fill_dma_id(mconfig, params);
1641*4882a593Smuzhiyun 	mconfig->fmt_idx = mconfig->mod_cfg[cfg_idx].fmt_idx;
1642*4882a593Smuzhiyun 	mconfig->res_idx = mconfig->mod_cfg[cfg_idx].res_idx;
1643*4882a593Smuzhiyun 
1644*4882a593Smuzhiyun 	if (skl->nr_modules)
1645*4882a593Smuzhiyun 		return 0;
1646*4882a593Smuzhiyun 
1647*4882a593Smuzhiyun 	if (params->stream == SNDRV_PCM_STREAM_PLAYBACK)
1648*4882a593Smuzhiyun 		format = &mconfig->module->formats[mconfig->fmt_idx].inputs[0].fmt;
1649*4882a593Smuzhiyun 	else
1650*4882a593Smuzhiyun 		format = &mconfig->module->formats[mconfig->fmt_idx].outputs[0].fmt;
1651*4882a593Smuzhiyun 
1652*4882a593Smuzhiyun 	/* set the hw_params */
1653*4882a593Smuzhiyun 	format->s_freq = params->s_freq;
1654*4882a593Smuzhiyun 	format->channels = params->ch;
1655*4882a593Smuzhiyun 	format->valid_bit_depth = skl_get_bit_depth(params->s_fmt);
1656*4882a593Smuzhiyun 
1657*4882a593Smuzhiyun 	/*
1658*4882a593Smuzhiyun 	 * 16 bit is 16 bit container whereas 24 bit is in 32 bit
1659*4882a593Smuzhiyun 	 * container so update bit depth accordingly
1660*4882a593Smuzhiyun 	 */
1661*4882a593Smuzhiyun 	switch (format->valid_bit_depth) {
1662*4882a593Smuzhiyun 	case SKL_DEPTH_16BIT:
1663*4882a593Smuzhiyun 		format->bit_depth = format->valid_bit_depth;
1664*4882a593Smuzhiyun 		break;
1665*4882a593Smuzhiyun 
1666*4882a593Smuzhiyun 	case SKL_DEPTH_24BIT:
1667*4882a593Smuzhiyun 	case SKL_DEPTH_32BIT:
1668*4882a593Smuzhiyun 		format->bit_depth = SKL_DEPTH_32BIT;
1669*4882a593Smuzhiyun 		break;
1670*4882a593Smuzhiyun 
1671*4882a593Smuzhiyun 	default:
1672*4882a593Smuzhiyun 		dev_err(dev, "Invalid bit depth %x for pipe\n",
1673*4882a593Smuzhiyun 				format->valid_bit_depth);
1674*4882a593Smuzhiyun 		return -EINVAL;
1675*4882a593Smuzhiyun 	}
1676*4882a593Smuzhiyun 
1677*4882a593Smuzhiyun 	if (params->stream == SNDRV_PCM_STREAM_PLAYBACK) {
1678*4882a593Smuzhiyun 		res->ibs = (format->s_freq / 1000) *
1679*4882a593Smuzhiyun 				(format->channels) *
1680*4882a593Smuzhiyun 				(format->bit_depth >> 3);
1681*4882a593Smuzhiyun 	} else {
1682*4882a593Smuzhiyun 		res->obs = (format->s_freq / 1000) *
1683*4882a593Smuzhiyun 				(format->channels) *
1684*4882a593Smuzhiyun 				(format->bit_depth >> 3);
1685*4882a593Smuzhiyun 	}
1686*4882a593Smuzhiyun 
1687*4882a593Smuzhiyun 	return 0;
1688*4882a593Smuzhiyun }
1689*4882a593Smuzhiyun 
1690*4882a593Smuzhiyun /*
1691*4882a593Smuzhiyun  * Query the module config for the FE DAI
1692*4882a593Smuzhiyun  * This is used to find the hw_params set for that DAI and apply to FE
1693*4882a593Smuzhiyun  * pipeline
1694*4882a593Smuzhiyun  */
1695*4882a593Smuzhiyun struct skl_module_cfg *
skl_tplg_fe_get_cpr_module(struct snd_soc_dai * dai,int stream)1696*4882a593Smuzhiyun skl_tplg_fe_get_cpr_module(struct snd_soc_dai *dai, int stream)
1697*4882a593Smuzhiyun {
1698*4882a593Smuzhiyun 	struct snd_soc_dapm_widget *w;
1699*4882a593Smuzhiyun 	struct snd_soc_dapm_path *p = NULL;
1700*4882a593Smuzhiyun 
1701*4882a593Smuzhiyun 	if (stream == SNDRV_PCM_STREAM_PLAYBACK) {
1702*4882a593Smuzhiyun 		w = dai->playback_widget;
1703*4882a593Smuzhiyun 		snd_soc_dapm_widget_for_each_sink_path(w, p) {
1704*4882a593Smuzhiyun 			if (p->connect && p->sink->power &&
1705*4882a593Smuzhiyun 				!is_skl_dsp_widget_type(p->sink, dai->dev))
1706*4882a593Smuzhiyun 				continue;
1707*4882a593Smuzhiyun 
1708*4882a593Smuzhiyun 			if (p->sink->priv) {
1709*4882a593Smuzhiyun 				dev_dbg(dai->dev, "set params for %s\n",
1710*4882a593Smuzhiyun 						p->sink->name);
1711*4882a593Smuzhiyun 				return p->sink->priv;
1712*4882a593Smuzhiyun 			}
1713*4882a593Smuzhiyun 		}
1714*4882a593Smuzhiyun 	} else {
1715*4882a593Smuzhiyun 		w = dai->capture_widget;
1716*4882a593Smuzhiyun 		snd_soc_dapm_widget_for_each_source_path(w, p) {
1717*4882a593Smuzhiyun 			if (p->connect && p->source->power &&
1718*4882a593Smuzhiyun 				!is_skl_dsp_widget_type(p->source, dai->dev))
1719*4882a593Smuzhiyun 				continue;
1720*4882a593Smuzhiyun 
1721*4882a593Smuzhiyun 			if (p->source->priv) {
1722*4882a593Smuzhiyun 				dev_dbg(dai->dev, "set params for %s\n",
1723*4882a593Smuzhiyun 						p->source->name);
1724*4882a593Smuzhiyun 				return p->source->priv;
1725*4882a593Smuzhiyun 			}
1726*4882a593Smuzhiyun 		}
1727*4882a593Smuzhiyun 	}
1728*4882a593Smuzhiyun 
1729*4882a593Smuzhiyun 	return NULL;
1730*4882a593Smuzhiyun }
1731*4882a593Smuzhiyun 
skl_get_mconfig_pb_cpr(struct snd_soc_dai * dai,struct snd_soc_dapm_widget * w)1732*4882a593Smuzhiyun static struct skl_module_cfg *skl_get_mconfig_pb_cpr(
1733*4882a593Smuzhiyun 		struct snd_soc_dai *dai, struct snd_soc_dapm_widget *w)
1734*4882a593Smuzhiyun {
1735*4882a593Smuzhiyun 	struct snd_soc_dapm_path *p;
1736*4882a593Smuzhiyun 	struct skl_module_cfg *mconfig = NULL;
1737*4882a593Smuzhiyun 
1738*4882a593Smuzhiyun 	snd_soc_dapm_widget_for_each_source_path(w, p) {
1739*4882a593Smuzhiyun 		if (w->endpoints[SND_SOC_DAPM_DIR_OUT] > 0) {
1740*4882a593Smuzhiyun 			if (p->connect &&
1741*4882a593Smuzhiyun 				    (p->sink->id == snd_soc_dapm_aif_out) &&
1742*4882a593Smuzhiyun 				    p->source->priv) {
1743*4882a593Smuzhiyun 				mconfig = p->source->priv;
1744*4882a593Smuzhiyun 				return mconfig;
1745*4882a593Smuzhiyun 			}
1746*4882a593Smuzhiyun 			mconfig = skl_get_mconfig_pb_cpr(dai, p->source);
1747*4882a593Smuzhiyun 			if (mconfig)
1748*4882a593Smuzhiyun 				return mconfig;
1749*4882a593Smuzhiyun 		}
1750*4882a593Smuzhiyun 	}
1751*4882a593Smuzhiyun 	return mconfig;
1752*4882a593Smuzhiyun }
1753*4882a593Smuzhiyun 
skl_get_mconfig_cap_cpr(struct snd_soc_dai * dai,struct snd_soc_dapm_widget * w)1754*4882a593Smuzhiyun static struct skl_module_cfg *skl_get_mconfig_cap_cpr(
1755*4882a593Smuzhiyun 		struct snd_soc_dai *dai, struct snd_soc_dapm_widget *w)
1756*4882a593Smuzhiyun {
1757*4882a593Smuzhiyun 	struct snd_soc_dapm_path *p;
1758*4882a593Smuzhiyun 	struct skl_module_cfg *mconfig = NULL;
1759*4882a593Smuzhiyun 
1760*4882a593Smuzhiyun 	snd_soc_dapm_widget_for_each_sink_path(w, p) {
1761*4882a593Smuzhiyun 		if (w->endpoints[SND_SOC_DAPM_DIR_IN] > 0) {
1762*4882a593Smuzhiyun 			if (p->connect &&
1763*4882a593Smuzhiyun 				    (p->source->id == snd_soc_dapm_aif_in) &&
1764*4882a593Smuzhiyun 				    p->sink->priv) {
1765*4882a593Smuzhiyun 				mconfig = p->sink->priv;
1766*4882a593Smuzhiyun 				return mconfig;
1767*4882a593Smuzhiyun 			}
1768*4882a593Smuzhiyun 			mconfig = skl_get_mconfig_cap_cpr(dai, p->sink);
1769*4882a593Smuzhiyun 			if (mconfig)
1770*4882a593Smuzhiyun 				return mconfig;
1771*4882a593Smuzhiyun 		}
1772*4882a593Smuzhiyun 	}
1773*4882a593Smuzhiyun 	return mconfig;
1774*4882a593Smuzhiyun }
1775*4882a593Smuzhiyun 
1776*4882a593Smuzhiyun struct skl_module_cfg *
skl_tplg_be_get_cpr_module(struct snd_soc_dai * dai,int stream)1777*4882a593Smuzhiyun skl_tplg_be_get_cpr_module(struct snd_soc_dai *dai, int stream)
1778*4882a593Smuzhiyun {
1779*4882a593Smuzhiyun 	struct snd_soc_dapm_widget *w;
1780*4882a593Smuzhiyun 	struct skl_module_cfg *mconfig;
1781*4882a593Smuzhiyun 
1782*4882a593Smuzhiyun 	if (stream == SNDRV_PCM_STREAM_PLAYBACK) {
1783*4882a593Smuzhiyun 		w = dai->playback_widget;
1784*4882a593Smuzhiyun 		mconfig = skl_get_mconfig_pb_cpr(dai, w);
1785*4882a593Smuzhiyun 	} else {
1786*4882a593Smuzhiyun 		w = dai->capture_widget;
1787*4882a593Smuzhiyun 		mconfig = skl_get_mconfig_cap_cpr(dai, w);
1788*4882a593Smuzhiyun 	}
1789*4882a593Smuzhiyun 	return mconfig;
1790*4882a593Smuzhiyun }
1791*4882a593Smuzhiyun 
skl_tplg_be_link_type(int dev_type)1792*4882a593Smuzhiyun static u8 skl_tplg_be_link_type(int dev_type)
1793*4882a593Smuzhiyun {
1794*4882a593Smuzhiyun 	int ret;
1795*4882a593Smuzhiyun 
1796*4882a593Smuzhiyun 	switch (dev_type) {
1797*4882a593Smuzhiyun 	case SKL_DEVICE_BT:
1798*4882a593Smuzhiyun 		ret = NHLT_LINK_SSP;
1799*4882a593Smuzhiyun 		break;
1800*4882a593Smuzhiyun 
1801*4882a593Smuzhiyun 	case SKL_DEVICE_DMIC:
1802*4882a593Smuzhiyun 		ret = NHLT_LINK_DMIC;
1803*4882a593Smuzhiyun 		break;
1804*4882a593Smuzhiyun 
1805*4882a593Smuzhiyun 	case SKL_DEVICE_I2S:
1806*4882a593Smuzhiyun 		ret = NHLT_LINK_SSP;
1807*4882a593Smuzhiyun 		break;
1808*4882a593Smuzhiyun 
1809*4882a593Smuzhiyun 	case SKL_DEVICE_HDALINK:
1810*4882a593Smuzhiyun 		ret = NHLT_LINK_HDA;
1811*4882a593Smuzhiyun 		break;
1812*4882a593Smuzhiyun 
1813*4882a593Smuzhiyun 	default:
1814*4882a593Smuzhiyun 		ret = NHLT_LINK_INVALID;
1815*4882a593Smuzhiyun 		break;
1816*4882a593Smuzhiyun 	}
1817*4882a593Smuzhiyun 
1818*4882a593Smuzhiyun 	return ret;
1819*4882a593Smuzhiyun }
1820*4882a593Smuzhiyun 
1821*4882a593Smuzhiyun /*
1822*4882a593Smuzhiyun  * Fill the BE gateway parameters
1823*4882a593Smuzhiyun  * The BE gateway expects a blob of parameters which are kept in the ACPI
1824*4882a593Smuzhiyun  * NHLT blob, so query the blob for interface type (i2s/pdm) and instance.
1825*4882a593Smuzhiyun  * The port can have multiple settings so pick based on the PCM
1826*4882a593Smuzhiyun  * parameters
1827*4882a593Smuzhiyun  */
skl_tplg_be_fill_pipe_params(struct snd_soc_dai * dai,struct skl_module_cfg * mconfig,struct skl_pipe_params * params)1828*4882a593Smuzhiyun static int skl_tplg_be_fill_pipe_params(struct snd_soc_dai *dai,
1829*4882a593Smuzhiyun 				struct skl_module_cfg *mconfig,
1830*4882a593Smuzhiyun 				struct skl_pipe_params *params)
1831*4882a593Smuzhiyun {
1832*4882a593Smuzhiyun 	struct nhlt_specific_cfg *cfg;
1833*4882a593Smuzhiyun 	struct skl_dev *skl = get_skl_ctx(dai->dev);
1834*4882a593Smuzhiyun 	int link_type = skl_tplg_be_link_type(mconfig->dev_type);
1835*4882a593Smuzhiyun 	u8 dev_type = skl_tplg_be_dev_type(mconfig->dev_type);
1836*4882a593Smuzhiyun 
1837*4882a593Smuzhiyun 	skl_tplg_fill_dma_id(mconfig, params);
1838*4882a593Smuzhiyun 
1839*4882a593Smuzhiyun 	if (link_type == NHLT_LINK_HDA)
1840*4882a593Smuzhiyun 		return 0;
1841*4882a593Smuzhiyun 
1842*4882a593Smuzhiyun 	/* update the blob based on virtual bus_id*/
1843*4882a593Smuzhiyun 	cfg = skl_get_ep_blob(skl, mconfig->vbus_id, link_type,
1844*4882a593Smuzhiyun 					params->s_fmt, params->ch,
1845*4882a593Smuzhiyun 					params->s_freq, params->stream,
1846*4882a593Smuzhiyun 					dev_type);
1847*4882a593Smuzhiyun 	if (cfg) {
1848*4882a593Smuzhiyun 		mconfig->formats_config.caps_size = cfg->size;
1849*4882a593Smuzhiyun 		mconfig->formats_config.caps = (u32 *) &cfg->caps;
1850*4882a593Smuzhiyun 	} else {
1851*4882a593Smuzhiyun 		dev_err(dai->dev, "Blob NULL for id %x type %d dirn %d\n",
1852*4882a593Smuzhiyun 					mconfig->vbus_id, link_type,
1853*4882a593Smuzhiyun 					params->stream);
1854*4882a593Smuzhiyun 		dev_err(dai->dev, "PCM: ch %d, freq %d, fmt %d\n",
1855*4882a593Smuzhiyun 				 params->ch, params->s_freq, params->s_fmt);
1856*4882a593Smuzhiyun 		return -EINVAL;
1857*4882a593Smuzhiyun 	}
1858*4882a593Smuzhiyun 
1859*4882a593Smuzhiyun 	return 0;
1860*4882a593Smuzhiyun }
1861*4882a593Smuzhiyun 
skl_tplg_be_set_src_pipe_params(struct snd_soc_dai * dai,struct snd_soc_dapm_widget * w,struct skl_pipe_params * params)1862*4882a593Smuzhiyun static int skl_tplg_be_set_src_pipe_params(struct snd_soc_dai *dai,
1863*4882a593Smuzhiyun 				struct snd_soc_dapm_widget *w,
1864*4882a593Smuzhiyun 				struct skl_pipe_params *params)
1865*4882a593Smuzhiyun {
1866*4882a593Smuzhiyun 	struct snd_soc_dapm_path *p;
1867*4882a593Smuzhiyun 	int ret = -EIO;
1868*4882a593Smuzhiyun 
1869*4882a593Smuzhiyun 	snd_soc_dapm_widget_for_each_source_path(w, p) {
1870*4882a593Smuzhiyun 		if (p->connect && is_skl_dsp_widget_type(p->source, dai->dev) &&
1871*4882a593Smuzhiyun 						p->source->priv) {
1872*4882a593Smuzhiyun 
1873*4882a593Smuzhiyun 			ret = skl_tplg_be_fill_pipe_params(dai,
1874*4882a593Smuzhiyun 						p->source->priv, params);
1875*4882a593Smuzhiyun 			if (ret < 0)
1876*4882a593Smuzhiyun 				return ret;
1877*4882a593Smuzhiyun 		} else {
1878*4882a593Smuzhiyun 			ret = skl_tplg_be_set_src_pipe_params(dai,
1879*4882a593Smuzhiyun 						p->source, params);
1880*4882a593Smuzhiyun 			if (ret < 0)
1881*4882a593Smuzhiyun 				return ret;
1882*4882a593Smuzhiyun 		}
1883*4882a593Smuzhiyun 	}
1884*4882a593Smuzhiyun 
1885*4882a593Smuzhiyun 	return ret;
1886*4882a593Smuzhiyun }
1887*4882a593Smuzhiyun 
skl_tplg_be_set_sink_pipe_params(struct snd_soc_dai * dai,struct snd_soc_dapm_widget * w,struct skl_pipe_params * params)1888*4882a593Smuzhiyun static int skl_tplg_be_set_sink_pipe_params(struct snd_soc_dai *dai,
1889*4882a593Smuzhiyun 	struct snd_soc_dapm_widget *w, struct skl_pipe_params *params)
1890*4882a593Smuzhiyun {
1891*4882a593Smuzhiyun 	struct snd_soc_dapm_path *p;
1892*4882a593Smuzhiyun 	int ret = -EIO;
1893*4882a593Smuzhiyun 
1894*4882a593Smuzhiyun 	snd_soc_dapm_widget_for_each_sink_path(w, p) {
1895*4882a593Smuzhiyun 		if (p->connect && is_skl_dsp_widget_type(p->sink, dai->dev) &&
1896*4882a593Smuzhiyun 						p->sink->priv) {
1897*4882a593Smuzhiyun 
1898*4882a593Smuzhiyun 			ret = skl_tplg_be_fill_pipe_params(dai,
1899*4882a593Smuzhiyun 						p->sink->priv, params);
1900*4882a593Smuzhiyun 			if (ret < 0)
1901*4882a593Smuzhiyun 				return ret;
1902*4882a593Smuzhiyun 		} else {
1903*4882a593Smuzhiyun 			ret = skl_tplg_be_set_sink_pipe_params(
1904*4882a593Smuzhiyun 						dai, p->sink, params);
1905*4882a593Smuzhiyun 			if (ret < 0)
1906*4882a593Smuzhiyun 				return ret;
1907*4882a593Smuzhiyun 		}
1908*4882a593Smuzhiyun 	}
1909*4882a593Smuzhiyun 
1910*4882a593Smuzhiyun 	return ret;
1911*4882a593Smuzhiyun }
1912*4882a593Smuzhiyun 
1913*4882a593Smuzhiyun /*
1914*4882a593Smuzhiyun  * BE hw_params can be a source parameters (capture) or sink parameters
1915*4882a593Smuzhiyun  * (playback). Based on sink and source we need to either find the source
1916*4882a593Smuzhiyun  * list or the sink list and set the pipeline parameters
1917*4882a593Smuzhiyun  */
skl_tplg_be_update_params(struct snd_soc_dai * dai,struct skl_pipe_params * params)1918*4882a593Smuzhiyun int skl_tplg_be_update_params(struct snd_soc_dai *dai,
1919*4882a593Smuzhiyun 				struct skl_pipe_params *params)
1920*4882a593Smuzhiyun {
1921*4882a593Smuzhiyun 	struct snd_soc_dapm_widget *w;
1922*4882a593Smuzhiyun 
1923*4882a593Smuzhiyun 	if (params->stream == SNDRV_PCM_STREAM_PLAYBACK) {
1924*4882a593Smuzhiyun 		w = dai->playback_widget;
1925*4882a593Smuzhiyun 
1926*4882a593Smuzhiyun 		return skl_tplg_be_set_src_pipe_params(dai, w, params);
1927*4882a593Smuzhiyun 
1928*4882a593Smuzhiyun 	} else {
1929*4882a593Smuzhiyun 		w = dai->capture_widget;
1930*4882a593Smuzhiyun 
1931*4882a593Smuzhiyun 		return skl_tplg_be_set_sink_pipe_params(dai, w, params);
1932*4882a593Smuzhiyun 	}
1933*4882a593Smuzhiyun 
1934*4882a593Smuzhiyun 	return 0;
1935*4882a593Smuzhiyun }
1936*4882a593Smuzhiyun 
1937*4882a593Smuzhiyun static const struct snd_soc_tplg_widget_events skl_tplg_widget_ops[] = {
1938*4882a593Smuzhiyun 	{SKL_MIXER_EVENT, skl_tplg_mixer_event},
1939*4882a593Smuzhiyun 	{SKL_VMIXER_EVENT, skl_tplg_mixer_event},
1940*4882a593Smuzhiyun 	{SKL_PGA_EVENT, skl_tplg_pga_event},
1941*4882a593Smuzhiyun };
1942*4882a593Smuzhiyun 
1943*4882a593Smuzhiyun static const struct snd_soc_tplg_bytes_ext_ops skl_tlv_ops[] = {
1944*4882a593Smuzhiyun 	{SKL_CONTROL_TYPE_BYTE_TLV, skl_tplg_tlv_control_get,
1945*4882a593Smuzhiyun 					skl_tplg_tlv_control_set},
1946*4882a593Smuzhiyun };
1947*4882a593Smuzhiyun 
1948*4882a593Smuzhiyun static const struct snd_soc_tplg_kcontrol_ops skl_tplg_kcontrol_ops[] = {
1949*4882a593Smuzhiyun 	{
1950*4882a593Smuzhiyun 		.id = SKL_CONTROL_TYPE_MIC_SELECT,
1951*4882a593Smuzhiyun 		.get = skl_tplg_mic_control_get,
1952*4882a593Smuzhiyun 		.put = skl_tplg_mic_control_set,
1953*4882a593Smuzhiyun 	},
1954*4882a593Smuzhiyun 	{
1955*4882a593Smuzhiyun 		.id = SKL_CONTROL_TYPE_MULTI_IO_SELECT,
1956*4882a593Smuzhiyun 		.get = skl_tplg_multi_config_get,
1957*4882a593Smuzhiyun 		.put = skl_tplg_multi_config_set,
1958*4882a593Smuzhiyun 	},
1959*4882a593Smuzhiyun 	{
1960*4882a593Smuzhiyun 		.id = SKL_CONTROL_TYPE_MULTI_IO_SELECT_DMIC,
1961*4882a593Smuzhiyun 		.get = skl_tplg_multi_config_get_dmic,
1962*4882a593Smuzhiyun 		.put = skl_tplg_multi_config_set_dmic,
1963*4882a593Smuzhiyun 	}
1964*4882a593Smuzhiyun };
1965*4882a593Smuzhiyun 
skl_tplg_fill_pipe_cfg(struct device * dev,struct skl_pipe * pipe,u32 tkn,u32 tkn_val,int conf_idx,int dir)1966*4882a593Smuzhiyun static int skl_tplg_fill_pipe_cfg(struct device *dev,
1967*4882a593Smuzhiyun 			struct skl_pipe *pipe, u32 tkn,
1968*4882a593Smuzhiyun 			u32 tkn_val, int conf_idx, int dir)
1969*4882a593Smuzhiyun {
1970*4882a593Smuzhiyun 	struct skl_pipe_fmt *fmt;
1971*4882a593Smuzhiyun 	struct skl_path_config *config;
1972*4882a593Smuzhiyun 
1973*4882a593Smuzhiyun 	switch (dir) {
1974*4882a593Smuzhiyun 	case SKL_DIR_IN:
1975*4882a593Smuzhiyun 		fmt = &pipe->configs[conf_idx].in_fmt;
1976*4882a593Smuzhiyun 		break;
1977*4882a593Smuzhiyun 
1978*4882a593Smuzhiyun 	case SKL_DIR_OUT:
1979*4882a593Smuzhiyun 		fmt = &pipe->configs[conf_idx].out_fmt;
1980*4882a593Smuzhiyun 		break;
1981*4882a593Smuzhiyun 
1982*4882a593Smuzhiyun 	default:
1983*4882a593Smuzhiyun 		dev_err(dev, "Invalid direction: %d\n", dir);
1984*4882a593Smuzhiyun 		return -EINVAL;
1985*4882a593Smuzhiyun 	}
1986*4882a593Smuzhiyun 
1987*4882a593Smuzhiyun 	config = &pipe->configs[conf_idx];
1988*4882a593Smuzhiyun 
1989*4882a593Smuzhiyun 	switch (tkn) {
1990*4882a593Smuzhiyun 	case SKL_TKN_U32_CFG_FREQ:
1991*4882a593Smuzhiyun 		fmt->freq = tkn_val;
1992*4882a593Smuzhiyun 		break;
1993*4882a593Smuzhiyun 
1994*4882a593Smuzhiyun 	case SKL_TKN_U8_CFG_CHAN:
1995*4882a593Smuzhiyun 		fmt->channels = tkn_val;
1996*4882a593Smuzhiyun 		break;
1997*4882a593Smuzhiyun 
1998*4882a593Smuzhiyun 	case SKL_TKN_U8_CFG_BPS:
1999*4882a593Smuzhiyun 		fmt->bps = tkn_val;
2000*4882a593Smuzhiyun 		break;
2001*4882a593Smuzhiyun 
2002*4882a593Smuzhiyun 	case SKL_TKN_U32_PATH_MEM_PGS:
2003*4882a593Smuzhiyun 		config->mem_pages = tkn_val;
2004*4882a593Smuzhiyun 		break;
2005*4882a593Smuzhiyun 
2006*4882a593Smuzhiyun 	default:
2007*4882a593Smuzhiyun 		dev_err(dev, "Invalid token config: %d\n", tkn);
2008*4882a593Smuzhiyun 		return -EINVAL;
2009*4882a593Smuzhiyun 	}
2010*4882a593Smuzhiyun 
2011*4882a593Smuzhiyun 	return 0;
2012*4882a593Smuzhiyun }
2013*4882a593Smuzhiyun 
skl_tplg_fill_pipe_tkn(struct device * dev,struct skl_pipe * pipe,u32 tkn,u32 tkn_val)2014*4882a593Smuzhiyun static int skl_tplg_fill_pipe_tkn(struct device *dev,
2015*4882a593Smuzhiyun 			struct skl_pipe *pipe, u32 tkn,
2016*4882a593Smuzhiyun 			u32 tkn_val)
2017*4882a593Smuzhiyun {
2018*4882a593Smuzhiyun 
2019*4882a593Smuzhiyun 	switch (tkn) {
2020*4882a593Smuzhiyun 	case SKL_TKN_U32_PIPE_CONN_TYPE:
2021*4882a593Smuzhiyun 		pipe->conn_type = tkn_val;
2022*4882a593Smuzhiyun 		break;
2023*4882a593Smuzhiyun 
2024*4882a593Smuzhiyun 	case SKL_TKN_U32_PIPE_PRIORITY:
2025*4882a593Smuzhiyun 		pipe->pipe_priority = tkn_val;
2026*4882a593Smuzhiyun 		break;
2027*4882a593Smuzhiyun 
2028*4882a593Smuzhiyun 	case SKL_TKN_U32_PIPE_MEM_PGS:
2029*4882a593Smuzhiyun 		pipe->memory_pages = tkn_val;
2030*4882a593Smuzhiyun 		break;
2031*4882a593Smuzhiyun 
2032*4882a593Smuzhiyun 	case SKL_TKN_U32_PMODE:
2033*4882a593Smuzhiyun 		pipe->lp_mode = tkn_val;
2034*4882a593Smuzhiyun 		break;
2035*4882a593Smuzhiyun 
2036*4882a593Smuzhiyun 	case SKL_TKN_U32_PIPE_DIRECTION:
2037*4882a593Smuzhiyun 		pipe->direction = tkn_val;
2038*4882a593Smuzhiyun 		break;
2039*4882a593Smuzhiyun 
2040*4882a593Smuzhiyun 	case SKL_TKN_U32_NUM_CONFIGS:
2041*4882a593Smuzhiyun 		pipe->nr_cfgs = tkn_val;
2042*4882a593Smuzhiyun 		break;
2043*4882a593Smuzhiyun 
2044*4882a593Smuzhiyun 	default:
2045*4882a593Smuzhiyun 		dev_err(dev, "Token not handled %d\n", tkn);
2046*4882a593Smuzhiyun 		return -EINVAL;
2047*4882a593Smuzhiyun 	}
2048*4882a593Smuzhiyun 
2049*4882a593Smuzhiyun 	return 0;
2050*4882a593Smuzhiyun }
2051*4882a593Smuzhiyun 
2052*4882a593Smuzhiyun /*
2053*4882a593Smuzhiyun  * Add pipeline by parsing the relevant tokens
2054*4882a593Smuzhiyun  * Return an existing pipe if the pipe already exists.
2055*4882a593Smuzhiyun  */
skl_tplg_add_pipe(struct device * dev,struct skl_module_cfg * mconfig,struct skl_dev * skl,struct snd_soc_tplg_vendor_value_elem * tkn_elem)2056*4882a593Smuzhiyun static int skl_tplg_add_pipe(struct device *dev,
2057*4882a593Smuzhiyun 		struct skl_module_cfg *mconfig, struct skl_dev *skl,
2058*4882a593Smuzhiyun 		struct snd_soc_tplg_vendor_value_elem *tkn_elem)
2059*4882a593Smuzhiyun {
2060*4882a593Smuzhiyun 	struct skl_pipeline *ppl;
2061*4882a593Smuzhiyun 	struct skl_pipe *pipe;
2062*4882a593Smuzhiyun 	struct skl_pipe_params *params;
2063*4882a593Smuzhiyun 
2064*4882a593Smuzhiyun 	list_for_each_entry(ppl, &skl->ppl_list, node) {
2065*4882a593Smuzhiyun 		if (ppl->pipe->ppl_id == tkn_elem->value) {
2066*4882a593Smuzhiyun 			mconfig->pipe = ppl->pipe;
2067*4882a593Smuzhiyun 			return -EEXIST;
2068*4882a593Smuzhiyun 		}
2069*4882a593Smuzhiyun 	}
2070*4882a593Smuzhiyun 
2071*4882a593Smuzhiyun 	ppl = devm_kzalloc(dev, sizeof(*ppl), GFP_KERNEL);
2072*4882a593Smuzhiyun 	if (!ppl)
2073*4882a593Smuzhiyun 		return -ENOMEM;
2074*4882a593Smuzhiyun 
2075*4882a593Smuzhiyun 	pipe = devm_kzalloc(dev, sizeof(*pipe), GFP_KERNEL);
2076*4882a593Smuzhiyun 	if (!pipe)
2077*4882a593Smuzhiyun 		return -ENOMEM;
2078*4882a593Smuzhiyun 
2079*4882a593Smuzhiyun 	params = devm_kzalloc(dev, sizeof(*params), GFP_KERNEL);
2080*4882a593Smuzhiyun 	if (!params)
2081*4882a593Smuzhiyun 		return -ENOMEM;
2082*4882a593Smuzhiyun 
2083*4882a593Smuzhiyun 	pipe->p_params = params;
2084*4882a593Smuzhiyun 	pipe->ppl_id = tkn_elem->value;
2085*4882a593Smuzhiyun 	INIT_LIST_HEAD(&pipe->w_list);
2086*4882a593Smuzhiyun 
2087*4882a593Smuzhiyun 	ppl->pipe = pipe;
2088*4882a593Smuzhiyun 	list_add(&ppl->node, &skl->ppl_list);
2089*4882a593Smuzhiyun 
2090*4882a593Smuzhiyun 	mconfig->pipe = pipe;
2091*4882a593Smuzhiyun 	mconfig->pipe->state = SKL_PIPE_INVALID;
2092*4882a593Smuzhiyun 
2093*4882a593Smuzhiyun 	return 0;
2094*4882a593Smuzhiyun }
2095*4882a593Smuzhiyun 
skl_tplg_get_uuid(struct device * dev,guid_t * guid,struct snd_soc_tplg_vendor_uuid_elem * uuid_tkn)2096*4882a593Smuzhiyun static int skl_tplg_get_uuid(struct device *dev, guid_t *guid,
2097*4882a593Smuzhiyun 	      struct snd_soc_tplg_vendor_uuid_elem *uuid_tkn)
2098*4882a593Smuzhiyun {
2099*4882a593Smuzhiyun 	if (uuid_tkn->token == SKL_TKN_UUID) {
2100*4882a593Smuzhiyun 		import_guid(guid, uuid_tkn->uuid);
2101*4882a593Smuzhiyun 		return 0;
2102*4882a593Smuzhiyun 	}
2103*4882a593Smuzhiyun 
2104*4882a593Smuzhiyun 	dev_err(dev, "Not an UUID token %d\n", uuid_tkn->token);
2105*4882a593Smuzhiyun 
2106*4882a593Smuzhiyun 	return -EINVAL;
2107*4882a593Smuzhiyun }
2108*4882a593Smuzhiyun 
skl_tplg_fill_pin(struct device * dev,struct snd_soc_tplg_vendor_value_elem * tkn_elem,struct skl_module_pin * m_pin,int pin_index)2109*4882a593Smuzhiyun static int skl_tplg_fill_pin(struct device *dev,
2110*4882a593Smuzhiyun 			struct snd_soc_tplg_vendor_value_elem *tkn_elem,
2111*4882a593Smuzhiyun 			struct skl_module_pin *m_pin,
2112*4882a593Smuzhiyun 			int pin_index)
2113*4882a593Smuzhiyun {
2114*4882a593Smuzhiyun 	int ret;
2115*4882a593Smuzhiyun 
2116*4882a593Smuzhiyun 	switch (tkn_elem->token) {
2117*4882a593Smuzhiyun 	case SKL_TKN_U32_PIN_MOD_ID:
2118*4882a593Smuzhiyun 		m_pin[pin_index].id.module_id = tkn_elem->value;
2119*4882a593Smuzhiyun 		break;
2120*4882a593Smuzhiyun 
2121*4882a593Smuzhiyun 	case SKL_TKN_U32_PIN_INST_ID:
2122*4882a593Smuzhiyun 		m_pin[pin_index].id.instance_id = tkn_elem->value;
2123*4882a593Smuzhiyun 		break;
2124*4882a593Smuzhiyun 
2125*4882a593Smuzhiyun 	case SKL_TKN_UUID:
2126*4882a593Smuzhiyun 		ret = skl_tplg_get_uuid(dev, &m_pin[pin_index].id.mod_uuid,
2127*4882a593Smuzhiyun 			(struct snd_soc_tplg_vendor_uuid_elem *)tkn_elem);
2128*4882a593Smuzhiyun 		if (ret < 0)
2129*4882a593Smuzhiyun 			return ret;
2130*4882a593Smuzhiyun 
2131*4882a593Smuzhiyun 		break;
2132*4882a593Smuzhiyun 
2133*4882a593Smuzhiyun 	default:
2134*4882a593Smuzhiyun 		dev_err(dev, "%d Not a pin token\n", tkn_elem->token);
2135*4882a593Smuzhiyun 		return -EINVAL;
2136*4882a593Smuzhiyun 	}
2137*4882a593Smuzhiyun 
2138*4882a593Smuzhiyun 	return 0;
2139*4882a593Smuzhiyun }
2140*4882a593Smuzhiyun 
2141*4882a593Smuzhiyun /*
2142*4882a593Smuzhiyun  * Parse for pin config specific tokens to fill up the
2143*4882a593Smuzhiyun  * module private data
2144*4882a593Smuzhiyun  */
skl_tplg_fill_pins_info(struct device * dev,struct skl_module_cfg * mconfig,struct snd_soc_tplg_vendor_value_elem * tkn_elem,int dir,int pin_count)2145*4882a593Smuzhiyun static int skl_tplg_fill_pins_info(struct device *dev,
2146*4882a593Smuzhiyun 		struct skl_module_cfg *mconfig,
2147*4882a593Smuzhiyun 		struct snd_soc_tplg_vendor_value_elem *tkn_elem,
2148*4882a593Smuzhiyun 		int dir, int pin_count)
2149*4882a593Smuzhiyun {
2150*4882a593Smuzhiyun 	int ret;
2151*4882a593Smuzhiyun 	struct skl_module_pin *m_pin;
2152*4882a593Smuzhiyun 
2153*4882a593Smuzhiyun 	switch (dir) {
2154*4882a593Smuzhiyun 	case SKL_DIR_IN:
2155*4882a593Smuzhiyun 		m_pin = mconfig->m_in_pin;
2156*4882a593Smuzhiyun 		break;
2157*4882a593Smuzhiyun 
2158*4882a593Smuzhiyun 	case SKL_DIR_OUT:
2159*4882a593Smuzhiyun 		m_pin = mconfig->m_out_pin;
2160*4882a593Smuzhiyun 		break;
2161*4882a593Smuzhiyun 
2162*4882a593Smuzhiyun 	default:
2163*4882a593Smuzhiyun 		dev_err(dev, "Invalid direction value\n");
2164*4882a593Smuzhiyun 		return -EINVAL;
2165*4882a593Smuzhiyun 	}
2166*4882a593Smuzhiyun 
2167*4882a593Smuzhiyun 	ret = skl_tplg_fill_pin(dev, tkn_elem, m_pin, pin_count);
2168*4882a593Smuzhiyun 	if (ret < 0)
2169*4882a593Smuzhiyun 		return ret;
2170*4882a593Smuzhiyun 
2171*4882a593Smuzhiyun 	m_pin[pin_count].in_use = false;
2172*4882a593Smuzhiyun 	m_pin[pin_count].pin_state = SKL_PIN_UNBIND;
2173*4882a593Smuzhiyun 
2174*4882a593Smuzhiyun 	return 0;
2175*4882a593Smuzhiyun }
2176*4882a593Smuzhiyun 
2177*4882a593Smuzhiyun /*
2178*4882a593Smuzhiyun  * Fill up input/output module config format based
2179*4882a593Smuzhiyun  * on the direction
2180*4882a593Smuzhiyun  */
skl_tplg_fill_fmt(struct device * dev,struct skl_module_fmt * dst_fmt,u32 tkn,u32 value)2181*4882a593Smuzhiyun static int skl_tplg_fill_fmt(struct device *dev,
2182*4882a593Smuzhiyun 		struct skl_module_fmt *dst_fmt,
2183*4882a593Smuzhiyun 		u32 tkn, u32 value)
2184*4882a593Smuzhiyun {
2185*4882a593Smuzhiyun 	switch (tkn) {
2186*4882a593Smuzhiyun 	case SKL_TKN_U32_FMT_CH:
2187*4882a593Smuzhiyun 		dst_fmt->channels  = value;
2188*4882a593Smuzhiyun 		break;
2189*4882a593Smuzhiyun 
2190*4882a593Smuzhiyun 	case SKL_TKN_U32_FMT_FREQ:
2191*4882a593Smuzhiyun 		dst_fmt->s_freq = value;
2192*4882a593Smuzhiyun 		break;
2193*4882a593Smuzhiyun 
2194*4882a593Smuzhiyun 	case SKL_TKN_U32_FMT_BIT_DEPTH:
2195*4882a593Smuzhiyun 		dst_fmt->bit_depth = value;
2196*4882a593Smuzhiyun 		break;
2197*4882a593Smuzhiyun 
2198*4882a593Smuzhiyun 	case SKL_TKN_U32_FMT_SAMPLE_SIZE:
2199*4882a593Smuzhiyun 		dst_fmt->valid_bit_depth = value;
2200*4882a593Smuzhiyun 		break;
2201*4882a593Smuzhiyun 
2202*4882a593Smuzhiyun 	case SKL_TKN_U32_FMT_CH_CONFIG:
2203*4882a593Smuzhiyun 		dst_fmt->ch_cfg = value;
2204*4882a593Smuzhiyun 		break;
2205*4882a593Smuzhiyun 
2206*4882a593Smuzhiyun 	case SKL_TKN_U32_FMT_INTERLEAVE:
2207*4882a593Smuzhiyun 		dst_fmt->interleaving_style = value;
2208*4882a593Smuzhiyun 		break;
2209*4882a593Smuzhiyun 
2210*4882a593Smuzhiyun 	case SKL_TKN_U32_FMT_SAMPLE_TYPE:
2211*4882a593Smuzhiyun 		dst_fmt->sample_type = value;
2212*4882a593Smuzhiyun 		break;
2213*4882a593Smuzhiyun 
2214*4882a593Smuzhiyun 	case SKL_TKN_U32_FMT_CH_MAP:
2215*4882a593Smuzhiyun 		dst_fmt->ch_map = value;
2216*4882a593Smuzhiyun 		break;
2217*4882a593Smuzhiyun 
2218*4882a593Smuzhiyun 	default:
2219*4882a593Smuzhiyun 		dev_err(dev, "Invalid token %d\n", tkn);
2220*4882a593Smuzhiyun 		return -EINVAL;
2221*4882a593Smuzhiyun 	}
2222*4882a593Smuzhiyun 
2223*4882a593Smuzhiyun 	return 0;
2224*4882a593Smuzhiyun }
2225*4882a593Smuzhiyun 
skl_tplg_widget_fill_fmt(struct device * dev,struct skl_module_iface * fmt,u32 tkn,u32 val,u32 dir,int fmt_idx)2226*4882a593Smuzhiyun static int skl_tplg_widget_fill_fmt(struct device *dev,
2227*4882a593Smuzhiyun 		struct skl_module_iface *fmt,
2228*4882a593Smuzhiyun 		u32 tkn, u32 val, u32 dir, int fmt_idx)
2229*4882a593Smuzhiyun {
2230*4882a593Smuzhiyun 	struct skl_module_fmt *dst_fmt;
2231*4882a593Smuzhiyun 
2232*4882a593Smuzhiyun 	if (!fmt)
2233*4882a593Smuzhiyun 		return -EINVAL;
2234*4882a593Smuzhiyun 
2235*4882a593Smuzhiyun 	switch (dir) {
2236*4882a593Smuzhiyun 	case SKL_DIR_IN:
2237*4882a593Smuzhiyun 		dst_fmt = &fmt->inputs[fmt_idx].fmt;
2238*4882a593Smuzhiyun 		break;
2239*4882a593Smuzhiyun 
2240*4882a593Smuzhiyun 	case SKL_DIR_OUT:
2241*4882a593Smuzhiyun 		dst_fmt = &fmt->outputs[fmt_idx].fmt;
2242*4882a593Smuzhiyun 		break;
2243*4882a593Smuzhiyun 
2244*4882a593Smuzhiyun 	default:
2245*4882a593Smuzhiyun 		dev_err(dev, "Invalid direction: %d\n", dir);
2246*4882a593Smuzhiyun 		return -EINVAL;
2247*4882a593Smuzhiyun 	}
2248*4882a593Smuzhiyun 
2249*4882a593Smuzhiyun 	return skl_tplg_fill_fmt(dev, dst_fmt, tkn, val);
2250*4882a593Smuzhiyun }
2251*4882a593Smuzhiyun 
skl_tplg_fill_pin_dynamic_val(struct skl_module_pin * mpin,u32 pin_count,u32 value)2252*4882a593Smuzhiyun static void skl_tplg_fill_pin_dynamic_val(
2253*4882a593Smuzhiyun 		struct skl_module_pin *mpin, u32 pin_count, u32 value)
2254*4882a593Smuzhiyun {
2255*4882a593Smuzhiyun 	int i;
2256*4882a593Smuzhiyun 
2257*4882a593Smuzhiyun 	for (i = 0; i < pin_count; i++)
2258*4882a593Smuzhiyun 		mpin[i].is_dynamic = value;
2259*4882a593Smuzhiyun }
2260*4882a593Smuzhiyun 
2261*4882a593Smuzhiyun /*
2262*4882a593Smuzhiyun  * Resource table in the manifest has pin specific resources
2263*4882a593Smuzhiyun  * like pin and pin buffer size
2264*4882a593Smuzhiyun  */
skl_tplg_manifest_pin_res_tkn(struct device * dev,struct snd_soc_tplg_vendor_value_elem * tkn_elem,struct skl_module_res * res,int pin_idx,int dir)2265*4882a593Smuzhiyun static int skl_tplg_manifest_pin_res_tkn(struct device *dev,
2266*4882a593Smuzhiyun 		struct snd_soc_tplg_vendor_value_elem *tkn_elem,
2267*4882a593Smuzhiyun 		struct skl_module_res *res, int pin_idx, int dir)
2268*4882a593Smuzhiyun {
2269*4882a593Smuzhiyun 	struct skl_module_pin_resources *m_pin;
2270*4882a593Smuzhiyun 
2271*4882a593Smuzhiyun 	switch (dir) {
2272*4882a593Smuzhiyun 	case SKL_DIR_IN:
2273*4882a593Smuzhiyun 		m_pin = &res->input[pin_idx];
2274*4882a593Smuzhiyun 		break;
2275*4882a593Smuzhiyun 
2276*4882a593Smuzhiyun 	case SKL_DIR_OUT:
2277*4882a593Smuzhiyun 		m_pin = &res->output[pin_idx];
2278*4882a593Smuzhiyun 		break;
2279*4882a593Smuzhiyun 
2280*4882a593Smuzhiyun 	default:
2281*4882a593Smuzhiyun 		dev_err(dev, "Invalid pin direction: %d\n", dir);
2282*4882a593Smuzhiyun 		return -EINVAL;
2283*4882a593Smuzhiyun 	}
2284*4882a593Smuzhiyun 
2285*4882a593Smuzhiyun 	switch (tkn_elem->token) {
2286*4882a593Smuzhiyun 	case SKL_TKN_MM_U32_RES_PIN_ID:
2287*4882a593Smuzhiyun 		m_pin->pin_index = tkn_elem->value;
2288*4882a593Smuzhiyun 		break;
2289*4882a593Smuzhiyun 
2290*4882a593Smuzhiyun 	case SKL_TKN_MM_U32_PIN_BUF:
2291*4882a593Smuzhiyun 		m_pin->buf_size = tkn_elem->value;
2292*4882a593Smuzhiyun 		break;
2293*4882a593Smuzhiyun 
2294*4882a593Smuzhiyun 	default:
2295*4882a593Smuzhiyun 		dev_err(dev, "Invalid token: %d\n", tkn_elem->token);
2296*4882a593Smuzhiyun 		return -EINVAL;
2297*4882a593Smuzhiyun 	}
2298*4882a593Smuzhiyun 
2299*4882a593Smuzhiyun 	return 0;
2300*4882a593Smuzhiyun }
2301*4882a593Smuzhiyun 
2302*4882a593Smuzhiyun /*
2303*4882a593Smuzhiyun  * Fill module specific resources from the manifest's resource
2304*4882a593Smuzhiyun  * table like CPS, DMA size, mem_pages.
2305*4882a593Smuzhiyun  */
skl_tplg_fill_res_tkn(struct device * dev,struct snd_soc_tplg_vendor_value_elem * tkn_elem,struct skl_module_res * res,int pin_idx,int dir)2306*4882a593Smuzhiyun static int skl_tplg_fill_res_tkn(struct device *dev,
2307*4882a593Smuzhiyun 		struct snd_soc_tplg_vendor_value_elem *tkn_elem,
2308*4882a593Smuzhiyun 		struct skl_module_res *res,
2309*4882a593Smuzhiyun 		int pin_idx, int dir)
2310*4882a593Smuzhiyun {
2311*4882a593Smuzhiyun 	int ret, tkn_count = 0;
2312*4882a593Smuzhiyun 
2313*4882a593Smuzhiyun 	if (!res)
2314*4882a593Smuzhiyun 		return -EINVAL;
2315*4882a593Smuzhiyun 
2316*4882a593Smuzhiyun 	switch (tkn_elem->token) {
2317*4882a593Smuzhiyun 	case SKL_TKN_MM_U32_DMA_SIZE:
2318*4882a593Smuzhiyun 		res->dma_buffer_size = tkn_elem->value;
2319*4882a593Smuzhiyun 		break;
2320*4882a593Smuzhiyun 
2321*4882a593Smuzhiyun 	case SKL_TKN_MM_U32_CPC:
2322*4882a593Smuzhiyun 		res->cpc = tkn_elem->value;
2323*4882a593Smuzhiyun 		break;
2324*4882a593Smuzhiyun 
2325*4882a593Smuzhiyun 	case SKL_TKN_U32_MEM_PAGES:
2326*4882a593Smuzhiyun 		res->is_pages = tkn_elem->value;
2327*4882a593Smuzhiyun 		break;
2328*4882a593Smuzhiyun 
2329*4882a593Smuzhiyun 	case SKL_TKN_U32_OBS:
2330*4882a593Smuzhiyun 		res->obs = tkn_elem->value;
2331*4882a593Smuzhiyun 		break;
2332*4882a593Smuzhiyun 
2333*4882a593Smuzhiyun 	case SKL_TKN_U32_IBS:
2334*4882a593Smuzhiyun 		res->ibs = tkn_elem->value;
2335*4882a593Smuzhiyun 		break;
2336*4882a593Smuzhiyun 
2337*4882a593Smuzhiyun 	case SKL_TKN_MM_U32_RES_PIN_ID:
2338*4882a593Smuzhiyun 	case SKL_TKN_MM_U32_PIN_BUF:
2339*4882a593Smuzhiyun 		ret = skl_tplg_manifest_pin_res_tkn(dev, tkn_elem, res,
2340*4882a593Smuzhiyun 						    pin_idx, dir);
2341*4882a593Smuzhiyun 		if (ret < 0)
2342*4882a593Smuzhiyun 			return ret;
2343*4882a593Smuzhiyun 		break;
2344*4882a593Smuzhiyun 
2345*4882a593Smuzhiyun 	case SKL_TKN_MM_U32_CPS:
2346*4882a593Smuzhiyun 	case SKL_TKN_U32_MAX_MCPS:
2347*4882a593Smuzhiyun 		/* ignore unused tokens */
2348*4882a593Smuzhiyun 		break;
2349*4882a593Smuzhiyun 
2350*4882a593Smuzhiyun 	default:
2351*4882a593Smuzhiyun 		dev_err(dev, "Not a res type token: %d", tkn_elem->token);
2352*4882a593Smuzhiyun 		return -EINVAL;
2353*4882a593Smuzhiyun 
2354*4882a593Smuzhiyun 	}
2355*4882a593Smuzhiyun 	tkn_count++;
2356*4882a593Smuzhiyun 
2357*4882a593Smuzhiyun 	return tkn_count;
2358*4882a593Smuzhiyun }
2359*4882a593Smuzhiyun 
2360*4882a593Smuzhiyun /*
2361*4882a593Smuzhiyun  * Parse tokens to fill up the module private data
2362*4882a593Smuzhiyun  */
skl_tplg_get_token(struct device * dev,struct snd_soc_tplg_vendor_value_elem * tkn_elem,struct skl_dev * skl,struct skl_module_cfg * mconfig)2363*4882a593Smuzhiyun static int skl_tplg_get_token(struct device *dev,
2364*4882a593Smuzhiyun 		struct snd_soc_tplg_vendor_value_elem *tkn_elem,
2365*4882a593Smuzhiyun 		struct skl_dev *skl, struct skl_module_cfg *mconfig)
2366*4882a593Smuzhiyun {
2367*4882a593Smuzhiyun 	int tkn_count = 0;
2368*4882a593Smuzhiyun 	int ret;
2369*4882a593Smuzhiyun 	static int is_pipe_exists;
2370*4882a593Smuzhiyun 	static int pin_index, dir, conf_idx;
2371*4882a593Smuzhiyun 	struct skl_module_iface *iface = NULL;
2372*4882a593Smuzhiyun 	struct skl_module_res *res = NULL;
2373*4882a593Smuzhiyun 	int res_idx = mconfig->res_idx;
2374*4882a593Smuzhiyun 	int fmt_idx = mconfig->fmt_idx;
2375*4882a593Smuzhiyun 
2376*4882a593Smuzhiyun 	/*
2377*4882a593Smuzhiyun 	 * If the manifest structure contains no modules, fill all
2378*4882a593Smuzhiyun 	 * the module data to 0th index.
2379*4882a593Smuzhiyun 	 * res_idx and fmt_idx are default set to 0.
2380*4882a593Smuzhiyun 	 */
2381*4882a593Smuzhiyun 	if (skl->nr_modules == 0) {
2382*4882a593Smuzhiyun 		res = &mconfig->module->resources[res_idx];
2383*4882a593Smuzhiyun 		iface = &mconfig->module->formats[fmt_idx];
2384*4882a593Smuzhiyun 	}
2385*4882a593Smuzhiyun 
2386*4882a593Smuzhiyun 	if (tkn_elem->token > SKL_TKN_MAX)
2387*4882a593Smuzhiyun 		return -EINVAL;
2388*4882a593Smuzhiyun 
2389*4882a593Smuzhiyun 	switch (tkn_elem->token) {
2390*4882a593Smuzhiyun 	case SKL_TKN_U8_IN_QUEUE_COUNT:
2391*4882a593Smuzhiyun 		mconfig->module->max_input_pins = tkn_elem->value;
2392*4882a593Smuzhiyun 		break;
2393*4882a593Smuzhiyun 
2394*4882a593Smuzhiyun 	case SKL_TKN_U8_OUT_QUEUE_COUNT:
2395*4882a593Smuzhiyun 		mconfig->module->max_output_pins = tkn_elem->value;
2396*4882a593Smuzhiyun 		break;
2397*4882a593Smuzhiyun 
2398*4882a593Smuzhiyun 	case SKL_TKN_U8_DYN_IN_PIN:
2399*4882a593Smuzhiyun 		if (!mconfig->m_in_pin)
2400*4882a593Smuzhiyun 			mconfig->m_in_pin =
2401*4882a593Smuzhiyun 				devm_kcalloc(dev, MAX_IN_QUEUE,
2402*4882a593Smuzhiyun 					     sizeof(*mconfig->m_in_pin),
2403*4882a593Smuzhiyun 					     GFP_KERNEL);
2404*4882a593Smuzhiyun 		if (!mconfig->m_in_pin)
2405*4882a593Smuzhiyun 			return -ENOMEM;
2406*4882a593Smuzhiyun 
2407*4882a593Smuzhiyun 		skl_tplg_fill_pin_dynamic_val(mconfig->m_in_pin, MAX_IN_QUEUE,
2408*4882a593Smuzhiyun 					      tkn_elem->value);
2409*4882a593Smuzhiyun 		break;
2410*4882a593Smuzhiyun 
2411*4882a593Smuzhiyun 	case SKL_TKN_U8_DYN_OUT_PIN:
2412*4882a593Smuzhiyun 		if (!mconfig->m_out_pin)
2413*4882a593Smuzhiyun 			mconfig->m_out_pin =
2414*4882a593Smuzhiyun 				devm_kcalloc(dev, MAX_IN_QUEUE,
2415*4882a593Smuzhiyun 					     sizeof(*mconfig->m_in_pin),
2416*4882a593Smuzhiyun 					     GFP_KERNEL);
2417*4882a593Smuzhiyun 		if (!mconfig->m_out_pin)
2418*4882a593Smuzhiyun 			return -ENOMEM;
2419*4882a593Smuzhiyun 
2420*4882a593Smuzhiyun 		skl_tplg_fill_pin_dynamic_val(mconfig->m_out_pin, MAX_OUT_QUEUE,
2421*4882a593Smuzhiyun 					      tkn_elem->value);
2422*4882a593Smuzhiyun 		break;
2423*4882a593Smuzhiyun 
2424*4882a593Smuzhiyun 	case SKL_TKN_U8_TIME_SLOT:
2425*4882a593Smuzhiyun 		mconfig->time_slot = tkn_elem->value;
2426*4882a593Smuzhiyun 		break;
2427*4882a593Smuzhiyun 
2428*4882a593Smuzhiyun 	case SKL_TKN_U8_CORE_ID:
2429*4882a593Smuzhiyun 		mconfig->core_id = tkn_elem->value;
2430*4882a593Smuzhiyun 		break;
2431*4882a593Smuzhiyun 
2432*4882a593Smuzhiyun 	case SKL_TKN_U8_MOD_TYPE:
2433*4882a593Smuzhiyun 		mconfig->m_type = tkn_elem->value;
2434*4882a593Smuzhiyun 		break;
2435*4882a593Smuzhiyun 
2436*4882a593Smuzhiyun 	case SKL_TKN_U8_DEV_TYPE:
2437*4882a593Smuzhiyun 		mconfig->dev_type = tkn_elem->value;
2438*4882a593Smuzhiyun 		break;
2439*4882a593Smuzhiyun 
2440*4882a593Smuzhiyun 	case SKL_TKN_U8_HW_CONN_TYPE:
2441*4882a593Smuzhiyun 		mconfig->hw_conn_type = tkn_elem->value;
2442*4882a593Smuzhiyun 		break;
2443*4882a593Smuzhiyun 
2444*4882a593Smuzhiyun 	case SKL_TKN_U16_MOD_INST_ID:
2445*4882a593Smuzhiyun 		mconfig->id.instance_id =
2446*4882a593Smuzhiyun 		tkn_elem->value;
2447*4882a593Smuzhiyun 		break;
2448*4882a593Smuzhiyun 
2449*4882a593Smuzhiyun 	case SKL_TKN_U32_MEM_PAGES:
2450*4882a593Smuzhiyun 	case SKL_TKN_U32_MAX_MCPS:
2451*4882a593Smuzhiyun 	case SKL_TKN_U32_OBS:
2452*4882a593Smuzhiyun 	case SKL_TKN_U32_IBS:
2453*4882a593Smuzhiyun 		ret = skl_tplg_fill_res_tkn(dev, tkn_elem, res, pin_index, dir);
2454*4882a593Smuzhiyun 		if (ret < 0)
2455*4882a593Smuzhiyun 			return ret;
2456*4882a593Smuzhiyun 
2457*4882a593Smuzhiyun 		break;
2458*4882a593Smuzhiyun 
2459*4882a593Smuzhiyun 	case SKL_TKN_U32_VBUS_ID:
2460*4882a593Smuzhiyun 		mconfig->vbus_id = tkn_elem->value;
2461*4882a593Smuzhiyun 		break;
2462*4882a593Smuzhiyun 
2463*4882a593Smuzhiyun 	case SKL_TKN_U32_PARAMS_FIXUP:
2464*4882a593Smuzhiyun 		mconfig->params_fixup = tkn_elem->value;
2465*4882a593Smuzhiyun 		break;
2466*4882a593Smuzhiyun 
2467*4882a593Smuzhiyun 	case SKL_TKN_U32_CONVERTER:
2468*4882a593Smuzhiyun 		mconfig->converter = tkn_elem->value;
2469*4882a593Smuzhiyun 		break;
2470*4882a593Smuzhiyun 
2471*4882a593Smuzhiyun 	case SKL_TKN_U32_D0I3_CAPS:
2472*4882a593Smuzhiyun 		mconfig->d0i3_caps = tkn_elem->value;
2473*4882a593Smuzhiyun 		break;
2474*4882a593Smuzhiyun 
2475*4882a593Smuzhiyun 	case SKL_TKN_U32_PIPE_ID:
2476*4882a593Smuzhiyun 		ret = skl_tplg_add_pipe(dev,
2477*4882a593Smuzhiyun 				mconfig, skl, tkn_elem);
2478*4882a593Smuzhiyun 
2479*4882a593Smuzhiyun 		if (ret < 0) {
2480*4882a593Smuzhiyun 			if (ret == -EEXIST) {
2481*4882a593Smuzhiyun 				is_pipe_exists = 1;
2482*4882a593Smuzhiyun 				break;
2483*4882a593Smuzhiyun 			}
2484*4882a593Smuzhiyun 			return is_pipe_exists;
2485*4882a593Smuzhiyun 		}
2486*4882a593Smuzhiyun 
2487*4882a593Smuzhiyun 		break;
2488*4882a593Smuzhiyun 
2489*4882a593Smuzhiyun 	case SKL_TKN_U32_PIPE_CONFIG_ID:
2490*4882a593Smuzhiyun 		conf_idx = tkn_elem->value;
2491*4882a593Smuzhiyun 		break;
2492*4882a593Smuzhiyun 
2493*4882a593Smuzhiyun 	case SKL_TKN_U32_PIPE_CONN_TYPE:
2494*4882a593Smuzhiyun 	case SKL_TKN_U32_PIPE_PRIORITY:
2495*4882a593Smuzhiyun 	case SKL_TKN_U32_PIPE_MEM_PGS:
2496*4882a593Smuzhiyun 	case SKL_TKN_U32_PMODE:
2497*4882a593Smuzhiyun 	case SKL_TKN_U32_PIPE_DIRECTION:
2498*4882a593Smuzhiyun 	case SKL_TKN_U32_NUM_CONFIGS:
2499*4882a593Smuzhiyun 		if (is_pipe_exists) {
2500*4882a593Smuzhiyun 			ret = skl_tplg_fill_pipe_tkn(dev, mconfig->pipe,
2501*4882a593Smuzhiyun 					tkn_elem->token, tkn_elem->value);
2502*4882a593Smuzhiyun 			if (ret < 0)
2503*4882a593Smuzhiyun 				return ret;
2504*4882a593Smuzhiyun 		}
2505*4882a593Smuzhiyun 
2506*4882a593Smuzhiyun 		break;
2507*4882a593Smuzhiyun 
2508*4882a593Smuzhiyun 	case SKL_TKN_U32_PATH_MEM_PGS:
2509*4882a593Smuzhiyun 	case SKL_TKN_U32_CFG_FREQ:
2510*4882a593Smuzhiyun 	case SKL_TKN_U8_CFG_CHAN:
2511*4882a593Smuzhiyun 	case SKL_TKN_U8_CFG_BPS:
2512*4882a593Smuzhiyun 		if (mconfig->pipe->nr_cfgs) {
2513*4882a593Smuzhiyun 			ret = skl_tplg_fill_pipe_cfg(dev, mconfig->pipe,
2514*4882a593Smuzhiyun 					tkn_elem->token, tkn_elem->value,
2515*4882a593Smuzhiyun 					conf_idx, dir);
2516*4882a593Smuzhiyun 			if (ret < 0)
2517*4882a593Smuzhiyun 				return ret;
2518*4882a593Smuzhiyun 		}
2519*4882a593Smuzhiyun 		break;
2520*4882a593Smuzhiyun 
2521*4882a593Smuzhiyun 	case SKL_TKN_CFG_MOD_RES_ID:
2522*4882a593Smuzhiyun 		mconfig->mod_cfg[conf_idx].res_idx = tkn_elem->value;
2523*4882a593Smuzhiyun 		break;
2524*4882a593Smuzhiyun 
2525*4882a593Smuzhiyun 	case SKL_TKN_CFG_MOD_FMT_ID:
2526*4882a593Smuzhiyun 		mconfig->mod_cfg[conf_idx].fmt_idx = tkn_elem->value;
2527*4882a593Smuzhiyun 		break;
2528*4882a593Smuzhiyun 
2529*4882a593Smuzhiyun 	/*
2530*4882a593Smuzhiyun 	 * SKL_TKN_U32_DIR_PIN_COUNT token has the value for both
2531*4882a593Smuzhiyun 	 * direction and the pin count. The first four bits represent
2532*4882a593Smuzhiyun 	 * direction and next four the pin count.
2533*4882a593Smuzhiyun 	 */
2534*4882a593Smuzhiyun 	case SKL_TKN_U32_DIR_PIN_COUNT:
2535*4882a593Smuzhiyun 		dir = tkn_elem->value & SKL_IN_DIR_BIT_MASK;
2536*4882a593Smuzhiyun 		pin_index = (tkn_elem->value &
2537*4882a593Smuzhiyun 			SKL_PIN_COUNT_MASK) >> 4;
2538*4882a593Smuzhiyun 
2539*4882a593Smuzhiyun 		break;
2540*4882a593Smuzhiyun 
2541*4882a593Smuzhiyun 	case SKL_TKN_U32_FMT_CH:
2542*4882a593Smuzhiyun 	case SKL_TKN_U32_FMT_FREQ:
2543*4882a593Smuzhiyun 	case SKL_TKN_U32_FMT_BIT_DEPTH:
2544*4882a593Smuzhiyun 	case SKL_TKN_U32_FMT_SAMPLE_SIZE:
2545*4882a593Smuzhiyun 	case SKL_TKN_U32_FMT_CH_CONFIG:
2546*4882a593Smuzhiyun 	case SKL_TKN_U32_FMT_INTERLEAVE:
2547*4882a593Smuzhiyun 	case SKL_TKN_U32_FMT_SAMPLE_TYPE:
2548*4882a593Smuzhiyun 	case SKL_TKN_U32_FMT_CH_MAP:
2549*4882a593Smuzhiyun 		ret = skl_tplg_widget_fill_fmt(dev, iface, tkn_elem->token,
2550*4882a593Smuzhiyun 				tkn_elem->value, dir, pin_index);
2551*4882a593Smuzhiyun 
2552*4882a593Smuzhiyun 		if (ret < 0)
2553*4882a593Smuzhiyun 			return ret;
2554*4882a593Smuzhiyun 
2555*4882a593Smuzhiyun 		break;
2556*4882a593Smuzhiyun 
2557*4882a593Smuzhiyun 	case SKL_TKN_U32_PIN_MOD_ID:
2558*4882a593Smuzhiyun 	case SKL_TKN_U32_PIN_INST_ID:
2559*4882a593Smuzhiyun 	case SKL_TKN_UUID:
2560*4882a593Smuzhiyun 		ret = skl_tplg_fill_pins_info(dev,
2561*4882a593Smuzhiyun 				mconfig, tkn_elem, dir,
2562*4882a593Smuzhiyun 				pin_index);
2563*4882a593Smuzhiyun 		if (ret < 0)
2564*4882a593Smuzhiyun 			return ret;
2565*4882a593Smuzhiyun 
2566*4882a593Smuzhiyun 		break;
2567*4882a593Smuzhiyun 
2568*4882a593Smuzhiyun 	case SKL_TKN_U32_CAPS_SIZE:
2569*4882a593Smuzhiyun 		mconfig->formats_config.caps_size =
2570*4882a593Smuzhiyun 			tkn_elem->value;
2571*4882a593Smuzhiyun 
2572*4882a593Smuzhiyun 		break;
2573*4882a593Smuzhiyun 
2574*4882a593Smuzhiyun 	case SKL_TKN_U32_CAPS_SET_PARAMS:
2575*4882a593Smuzhiyun 		mconfig->formats_config.set_params =
2576*4882a593Smuzhiyun 				tkn_elem->value;
2577*4882a593Smuzhiyun 		break;
2578*4882a593Smuzhiyun 
2579*4882a593Smuzhiyun 	case SKL_TKN_U32_CAPS_PARAMS_ID:
2580*4882a593Smuzhiyun 		mconfig->formats_config.param_id =
2581*4882a593Smuzhiyun 				tkn_elem->value;
2582*4882a593Smuzhiyun 		break;
2583*4882a593Smuzhiyun 
2584*4882a593Smuzhiyun 	case SKL_TKN_U32_PROC_DOMAIN:
2585*4882a593Smuzhiyun 		mconfig->domain =
2586*4882a593Smuzhiyun 			tkn_elem->value;
2587*4882a593Smuzhiyun 
2588*4882a593Smuzhiyun 		break;
2589*4882a593Smuzhiyun 
2590*4882a593Smuzhiyun 	case SKL_TKN_U32_DMA_BUF_SIZE:
2591*4882a593Smuzhiyun 		mconfig->dma_buffer_size = tkn_elem->value;
2592*4882a593Smuzhiyun 		break;
2593*4882a593Smuzhiyun 
2594*4882a593Smuzhiyun 	case SKL_TKN_U8_IN_PIN_TYPE:
2595*4882a593Smuzhiyun 	case SKL_TKN_U8_OUT_PIN_TYPE:
2596*4882a593Smuzhiyun 	case SKL_TKN_U8_CONN_TYPE:
2597*4882a593Smuzhiyun 		break;
2598*4882a593Smuzhiyun 
2599*4882a593Smuzhiyun 	default:
2600*4882a593Smuzhiyun 		dev_err(dev, "Token %d not handled\n",
2601*4882a593Smuzhiyun 				tkn_elem->token);
2602*4882a593Smuzhiyun 		return -EINVAL;
2603*4882a593Smuzhiyun 	}
2604*4882a593Smuzhiyun 
2605*4882a593Smuzhiyun 	tkn_count++;
2606*4882a593Smuzhiyun 
2607*4882a593Smuzhiyun 	return tkn_count;
2608*4882a593Smuzhiyun }
2609*4882a593Smuzhiyun 
2610*4882a593Smuzhiyun /*
2611*4882a593Smuzhiyun  * Parse the vendor array for specific tokens to construct
2612*4882a593Smuzhiyun  * module private data
2613*4882a593Smuzhiyun  */
skl_tplg_get_tokens(struct device * dev,char * pvt_data,struct skl_dev * skl,struct skl_module_cfg * mconfig,int block_size)2614*4882a593Smuzhiyun static int skl_tplg_get_tokens(struct device *dev,
2615*4882a593Smuzhiyun 		char *pvt_data,	struct skl_dev *skl,
2616*4882a593Smuzhiyun 		struct skl_module_cfg *mconfig, int block_size)
2617*4882a593Smuzhiyun {
2618*4882a593Smuzhiyun 	struct snd_soc_tplg_vendor_array *array;
2619*4882a593Smuzhiyun 	struct snd_soc_tplg_vendor_value_elem *tkn_elem;
2620*4882a593Smuzhiyun 	int tkn_count = 0, ret;
2621*4882a593Smuzhiyun 	int off = 0, tuple_size = 0;
2622*4882a593Smuzhiyun 	bool is_module_guid = true;
2623*4882a593Smuzhiyun 
2624*4882a593Smuzhiyun 	if (block_size <= 0)
2625*4882a593Smuzhiyun 		return -EINVAL;
2626*4882a593Smuzhiyun 
2627*4882a593Smuzhiyun 	while (tuple_size < block_size) {
2628*4882a593Smuzhiyun 		array = (struct snd_soc_tplg_vendor_array *)(pvt_data + off);
2629*4882a593Smuzhiyun 
2630*4882a593Smuzhiyun 		off += array->size;
2631*4882a593Smuzhiyun 
2632*4882a593Smuzhiyun 		switch (array->type) {
2633*4882a593Smuzhiyun 		case SND_SOC_TPLG_TUPLE_TYPE_STRING:
2634*4882a593Smuzhiyun 			dev_warn(dev, "no string tokens expected for skl tplg\n");
2635*4882a593Smuzhiyun 			continue;
2636*4882a593Smuzhiyun 
2637*4882a593Smuzhiyun 		case SND_SOC_TPLG_TUPLE_TYPE_UUID:
2638*4882a593Smuzhiyun 			if (is_module_guid) {
2639*4882a593Smuzhiyun 				ret = skl_tplg_get_uuid(dev, (guid_t *)mconfig->guid,
2640*4882a593Smuzhiyun 							array->uuid);
2641*4882a593Smuzhiyun 				is_module_guid = false;
2642*4882a593Smuzhiyun 			} else {
2643*4882a593Smuzhiyun 				ret = skl_tplg_get_token(dev, array->value, skl,
2644*4882a593Smuzhiyun 							 mconfig);
2645*4882a593Smuzhiyun 			}
2646*4882a593Smuzhiyun 
2647*4882a593Smuzhiyun 			if (ret < 0)
2648*4882a593Smuzhiyun 				return ret;
2649*4882a593Smuzhiyun 
2650*4882a593Smuzhiyun 			tuple_size += sizeof(*array->uuid);
2651*4882a593Smuzhiyun 
2652*4882a593Smuzhiyun 			continue;
2653*4882a593Smuzhiyun 
2654*4882a593Smuzhiyun 		default:
2655*4882a593Smuzhiyun 			tkn_elem = array->value;
2656*4882a593Smuzhiyun 			tkn_count = 0;
2657*4882a593Smuzhiyun 			break;
2658*4882a593Smuzhiyun 		}
2659*4882a593Smuzhiyun 
2660*4882a593Smuzhiyun 		while (tkn_count <= (array->num_elems - 1)) {
2661*4882a593Smuzhiyun 			ret = skl_tplg_get_token(dev, tkn_elem,
2662*4882a593Smuzhiyun 					skl, mconfig);
2663*4882a593Smuzhiyun 
2664*4882a593Smuzhiyun 			if (ret < 0)
2665*4882a593Smuzhiyun 				return ret;
2666*4882a593Smuzhiyun 
2667*4882a593Smuzhiyun 			tkn_count = tkn_count + ret;
2668*4882a593Smuzhiyun 			tkn_elem++;
2669*4882a593Smuzhiyun 		}
2670*4882a593Smuzhiyun 
2671*4882a593Smuzhiyun 		tuple_size += tkn_count * sizeof(*tkn_elem);
2672*4882a593Smuzhiyun 	}
2673*4882a593Smuzhiyun 
2674*4882a593Smuzhiyun 	return off;
2675*4882a593Smuzhiyun }
2676*4882a593Smuzhiyun 
2677*4882a593Smuzhiyun /*
2678*4882a593Smuzhiyun  * Every data block is preceded by a descriptor to read the number
2679*4882a593Smuzhiyun  * of data blocks, they type of the block and it's size
2680*4882a593Smuzhiyun  */
skl_tplg_get_desc_blocks(struct device * dev,struct snd_soc_tplg_vendor_array * array)2681*4882a593Smuzhiyun static int skl_tplg_get_desc_blocks(struct device *dev,
2682*4882a593Smuzhiyun 		struct snd_soc_tplg_vendor_array *array)
2683*4882a593Smuzhiyun {
2684*4882a593Smuzhiyun 	struct snd_soc_tplg_vendor_value_elem *tkn_elem;
2685*4882a593Smuzhiyun 
2686*4882a593Smuzhiyun 	tkn_elem = array->value;
2687*4882a593Smuzhiyun 
2688*4882a593Smuzhiyun 	switch (tkn_elem->token) {
2689*4882a593Smuzhiyun 	case SKL_TKN_U8_NUM_BLOCKS:
2690*4882a593Smuzhiyun 	case SKL_TKN_U8_BLOCK_TYPE:
2691*4882a593Smuzhiyun 	case SKL_TKN_U16_BLOCK_SIZE:
2692*4882a593Smuzhiyun 		return tkn_elem->value;
2693*4882a593Smuzhiyun 
2694*4882a593Smuzhiyun 	default:
2695*4882a593Smuzhiyun 		dev_err(dev, "Invalid descriptor token %d\n", tkn_elem->token);
2696*4882a593Smuzhiyun 		break;
2697*4882a593Smuzhiyun 	}
2698*4882a593Smuzhiyun 
2699*4882a593Smuzhiyun 	return -EINVAL;
2700*4882a593Smuzhiyun }
2701*4882a593Smuzhiyun 
2702*4882a593Smuzhiyun /* Functions to parse private data from configuration file format v4 */
2703*4882a593Smuzhiyun 
2704*4882a593Smuzhiyun /*
2705*4882a593Smuzhiyun  * Add pipeline from topology binary into driver pipeline list
2706*4882a593Smuzhiyun  *
2707*4882a593Smuzhiyun  * If already added we return that instance
2708*4882a593Smuzhiyun  * Otherwise we create a new instance and add into driver list
2709*4882a593Smuzhiyun  */
skl_tplg_add_pipe_v4(struct device * dev,struct skl_module_cfg * mconfig,struct skl_dev * skl,struct skl_dfw_v4_pipe * dfw_pipe)2710*4882a593Smuzhiyun static int skl_tplg_add_pipe_v4(struct device *dev,
2711*4882a593Smuzhiyun 			struct skl_module_cfg *mconfig, struct skl_dev *skl,
2712*4882a593Smuzhiyun 			struct skl_dfw_v4_pipe *dfw_pipe)
2713*4882a593Smuzhiyun {
2714*4882a593Smuzhiyun 	struct skl_pipeline *ppl;
2715*4882a593Smuzhiyun 	struct skl_pipe *pipe;
2716*4882a593Smuzhiyun 	struct skl_pipe_params *params;
2717*4882a593Smuzhiyun 
2718*4882a593Smuzhiyun 	list_for_each_entry(ppl, &skl->ppl_list, node) {
2719*4882a593Smuzhiyun 		if (ppl->pipe->ppl_id == dfw_pipe->pipe_id) {
2720*4882a593Smuzhiyun 			mconfig->pipe = ppl->pipe;
2721*4882a593Smuzhiyun 			return 0;
2722*4882a593Smuzhiyun 		}
2723*4882a593Smuzhiyun 	}
2724*4882a593Smuzhiyun 
2725*4882a593Smuzhiyun 	ppl = devm_kzalloc(dev, sizeof(*ppl), GFP_KERNEL);
2726*4882a593Smuzhiyun 	if (!ppl)
2727*4882a593Smuzhiyun 		return -ENOMEM;
2728*4882a593Smuzhiyun 
2729*4882a593Smuzhiyun 	pipe = devm_kzalloc(dev, sizeof(*pipe), GFP_KERNEL);
2730*4882a593Smuzhiyun 	if (!pipe)
2731*4882a593Smuzhiyun 		return -ENOMEM;
2732*4882a593Smuzhiyun 
2733*4882a593Smuzhiyun 	params = devm_kzalloc(dev, sizeof(*params), GFP_KERNEL);
2734*4882a593Smuzhiyun 	if (!params)
2735*4882a593Smuzhiyun 		return -ENOMEM;
2736*4882a593Smuzhiyun 
2737*4882a593Smuzhiyun 	pipe->ppl_id = dfw_pipe->pipe_id;
2738*4882a593Smuzhiyun 	pipe->memory_pages = dfw_pipe->memory_pages;
2739*4882a593Smuzhiyun 	pipe->pipe_priority = dfw_pipe->pipe_priority;
2740*4882a593Smuzhiyun 	pipe->conn_type = dfw_pipe->conn_type;
2741*4882a593Smuzhiyun 	pipe->state = SKL_PIPE_INVALID;
2742*4882a593Smuzhiyun 	pipe->p_params = params;
2743*4882a593Smuzhiyun 	INIT_LIST_HEAD(&pipe->w_list);
2744*4882a593Smuzhiyun 
2745*4882a593Smuzhiyun 	ppl->pipe = pipe;
2746*4882a593Smuzhiyun 	list_add(&ppl->node, &skl->ppl_list);
2747*4882a593Smuzhiyun 
2748*4882a593Smuzhiyun 	mconfig->pipe = pipe;
2749*4882a593Smuzhiyun 
2750*4882a593Smuzhiyun 	return 0;
2751*4882a593Smuzhiyun }
2752*4882a593Smuzhiyun 
skl_fill_module_pin_info_v4(struct skl_dfw_v4_module_pin * dfw_pin,struct skl_module_pin * m_pin,bool is_dynamic,int max_pin)2753*4882a593Smuzhiyun static void skl_fill_module_pin_info_v4(struct skl_dfw_v4_module_pin *dfw_pin,
2754*4882a593Smuzhiyun 					struct skl_module_pin *m_pin,
2755*4882a593Smuzhiyun 					bool is_dynamic, int max_pin)
2756*4882a593Smuzhiyun {
2757*4882a593Smuzhiyun 	int i;
2758*4882a593Smuzhiyun 
2759*4882a593Smuzhiyun 	for (i = 0; i < max_pin; i++) {
2760*4882a593Smuzhiyun 		m_pin[i].id.module_id = dfw_pin[i].module_id;
2761*4882a593Smuzhiyun 		m_pin[i].id.instance_id = dfw_pin[i].instance_id;
2762*4882a593Smuzhiyun 		m_pin[i].in_use = false;
2763*4882a593Smuzhiyun 		m_pin[i].is_dynamic = is_dynamic;
2764*4882a593Smuzhiyun 		m_pin[i].pin_state = SKL_PIN_UNBIND;
2765*4882a593Smuzhiyun 	}
2766*4882a593Smuzhiyun }
2767*4882a593Smuzhiyun 
skl_tplg_fill_fmt_v4(struct skl_module_pin_fmt * dst_fmt,struct skl_dfw_v4_module_fmt * src_fmt,int pins)2768*4882a593Smuzhiyun static void skl_tplg_fill_fmt_v4(struct skl_module_pin_fmt *dst_fmt,
2769*4882a593Smuzhiyun 				 struct skl_dfw_v4_module_fmt *src_fmt,
2770*4882a593Smuzhiyun 				 int pins)
2771*4882a593Smuzhiyun {
2772*4882a593Smuzhiyun 	int i;
2773*4882a593Smuzhiyun 
2774*4882a593Smuzhiyun 	for (i = 0; i < pins; i++) {
2775*4882a593Smuzhiyun 		dst_fmt[i].fmt.channels  = src_fmt[i].channels;
2776*4882a593Smuzhiyun 		dst_fmt[i].fmt.s_freq = src_fmt[i].freq;
2777*4882a593Smuzhiyun 		dst_fmt[i].fmt.bit_depth = src_fmt[i].bit_depth;
2778*4882a593Smuzhiyun 		dst_fmt[i].fmt.valid_bit_depth = src_fmt[i].valid_bit_depth;
2779*4882a593Smuzhiyun 		dst_fmt[i].fmt.ch_cfg = src_fmt[i].ch_cfg;
2780*4882a593Smuzhiyun 		dst_fmt[i].fmt.ch_map = src_fmt[i].ch_map;
2781*4882a593Smuzhiyun 		dst_fmt[i].fmt.interleaving_style =
2782*4882a593Smuzhiyun 						src_fmt[i].interleaving_style;
2783*4882a593Smuzhiyun 		dst_fmt[i].fmt.sample_type = src_fmt[i].sample_type;
2784*4882a593Smuzhiyun 	}
2785*4882a593Smuzhiyun }
2786*4882a593Smuzhiyun 
skl_tplg_get_pvt_data_v4(struct snd_soc_tplg_dapm_widget * tplg_w,struct skl_dev * skl,struct device * dev,struct skl_module_cfg * mconfig)2787*4882a593Smuzhiyun static int skl_tplg_get_pvt_data_v4(struct snd_soc_tplg_dapm_widget *tplg_w,
2788*4882a593Smuzhiyun 				    struct skl_dev *skl, struct device *dev,
2789*4882a593Smuzhiyun 				    struct skl_module_cfg *mconfig)
2790*4882a593Smuzhiyun {
2791*4882a593Smuzhiyun 	struct skl_dfw_v4_module *dfw =
2792*4882a593Smuzhiyun 				(struct skl_dfw_v4_module *)tplg_w->priv.data;
2793*4882a593Smuzhiyun 	int ret;
2794*4882a593Smuzhiyun 
2795*4882a593Smuzhiyun 	dev_dbg(dev, "Parsing Skylake v4 widget topology data\n");
2796*4882a593Smuzhiyun 
2797*4882a593Smuzhiyun 	ret = guid_parse(dfw->uuid, (guid_t *)mconfig->guid);
2798*4882a593Smuzhiyun 	if (ret)
2799*4882a593Smuzhiyun 		return ret;
2800*4882a593Smuzhiyun 	mconfig->id.module_id = -1;
2801*4882a593Smuzhiyun 	mconfig->id.instance_id = dfw->instance_id;
2802*4882a593Smuzhiyun 	mconfig->module->resources[0].cpc = dfw->max_mcps / 1000;
2803*4882a593Smuzhiyun 	mconfig->module->resources[0].ibs = dfw->ibs;
2804*4882a593Smuzhiyun 	mconfig->module->resources[0].obs = dfw->obs;
2805*4882a593Smuzhiyun 	mconfig->core_id = dfw->core_id;
2806*4882a593Smuzhiyun 	mconfig->module->max_input_pins = dfw->max_in_queue;
2807*4882a593Smuzhiyun 	mconfig->module->max_output_pins = dfw->max_out_queue;
2808*4882a593Smuzhiyun 	mconfig->module->loadable = dfw->is_loadable;
2809*4882a593Smuzhiyun 	skl_tplg_fill_fmt_v4(mconfig->module->formats[0].inputs, dfw->in_fmt,
2810*4882a593Smuzhiyun 			     MAX_IN_QUEUE);
2811*4882a593Smuzhiyun 	skl_tplg_fill_fmt_v4(mconfig->module->formats[0].outputs, dfw->out_fmt,
2812*4882a593Smuzhiyun 			     MAX_OUT_QUEUE);
2813*4882a593Smuzhiyun 
2814*4882a593Smuzhiyun 	mconfig->params_fixup = dfw->params_fixup;
2815*4882a593Smuzhiyun 	mconfig->converter = dfw->converter;
2816*4882a593Smuzhiyun 	mconfig->m_type = dfw->module_type;
2817*4882a593Smuzhiyun 	mconfig->vbus_id = dfw->vbus_id;
2818*4882a593Smuzhiyun 	mconfig->module->resources[0].is_pages = dfw->mem_pages;
2819*4882a593Smuzhiyun 
2820*4882a593Smuzhiyun 	ret = skl_tplg_add_pipe_v4(dev, mconfig, skl, &dfw->pipe);
2821*4882a593Smuzhiyun 	if (ret)
2822*4882a593Smuzhiyun 		return ret;
2823*4882a593Smuzhiyun 
2824*4882a593Smuzhiyun 	mconfig->dev_type = dfw->dev_type;
2825*4882a593Smuzhiyun 	mconfig->hw_conn_type = dfw->hw_conn_type;
2826*4882a593Smuzhiyun 	mconfig->time_slot = dfw->time_slot;
2827*4882a593Smuzhiyun 	mconfig->formats_config.caps_size = dfw->caps.caps_size;
2828*4882a593Smuzhiyun 
2829*4882a593Smuzhiyun 	mconfig->m_in_pin = devm_kcalloc(dev,
2830*4882a593Smuzhiyun 				MAX_IN_QUEUE, sizeof(*mconfig->m_in_pin),
2831*4882a593Smuzhiyun 				GFP_KERNEL);
2832*4882a593Smuzhiyun 	if (!mconfig->m_in_pin)
2833*4882a593Smuzhiyun 		return -ENOMEM;
2834*4882a593Smuzhiyun 
2835*4882a593Smuzhiyun 	mconfig->m_out_pin = devm_kcalloc(dev,
2836*4882a593Smuzhiyun 				MAX_OUT_QUEUE, sizeof(*mconfig->m_out_pin),
2837*4882a593Smuzhiyun 				GFP_KERNEL);
2838*4882a593Smuzhiyun 	if (!mconfig->m_out_pin)
2839*4882a593Smuzhiyun 		return -ENOMEM;
2840*4882a593Smuzhiyun 
2841*4882a593Smuzhiyun 	skl_fill_module_pin_info_v4(dfw->in_pin, mconfig->m_in_pin,
2842*4882a593Smuzhiyun 				    dfw->is_dynamic_in_pin,
2843*4882a593Smuzhiyun 				    mconfig->module->max_input_pins);
2844*4882a593Smuzhiyun 	skl_fill_module_pin_info_v4(dfw->out_pin, mconfig->m_out_pin,
2845*4882a593Smuzhiyun 				    dfw->is_dynamic_out_pin,
2846*4882a593Smuzhiyun 				    mconfig->module->max_output_pins);
2847*4882a593Smuzhiyun 
2848*4882a593Smuzhiyun 	if (mconfig->formats_config.caps_size) {
2849*4882a593Smuzhiyun 		mconfig->formats_config.set_params = dfw->caps.set_params;
2850*4882a593Smuzhiyun 		mconfig->formats_config.param_id = dfw->caps.param_id;
2851*4882a593Smuzhiyun 		mconfig->formats_config.caps =
2852*4882a593Smuzhiyun 		devm_kzalloc(dev, mconfig->formats_config.caps_size,
2853*4882a593Smuzhiyun 			     GFP_KERNEL);
2854*4882a593Smuzhiyun 		if (!mconfig->formats_config.caps)
2855*4882a593Smuzhiyun 			return -ENOMEM;
2856*4882a593Smuzhiyun 		memcpy(mconfig->formats_config.caps, dfw->caps.caps,
2857*4882a593Smuzhiyun 		       dfw->caps.caps_size);
2858*4882a593Smuzhiyun 	}
2859*4882a593Smuzhiyun 
2860*4882a593Smuzhiyun 	return 0;
2861*4882a593Smuzhiyun }
2862*4882a593Smuzhiyun 
2863*4882a593Smuzhiyun /*
2864*4882a593Smuzhiyun  * Parse the private data for the token and corresponding value.
2865*4882a593Smuzhiyun  * The private data can have multiple data blocks. So, a data block
2866*4882a593Smuzhiyun  * is preceded by a descriptor for number of blocks and a descriptor
2867*4882a593Smuzhiyun  * for the type and size of the suceeding data block.
2868*4882a593Smuzhiyun  */
skl_tplg_get_pvt_data(struct snd_soc_tplg_dapm_widget * tplg_w,struct skl_dev * skl,struct device * dev,struct skl_module_cfg * mconfig)2869*4882a593Smuzhiyun static int skl_tplg_get_pvt_data(struct snd_soc_tplg_dapm_widget *tplg_w,
2870*4882a593Smuzhiyun 				struct skl_dev *skl, struct device *dev,
2871*4882a593Smuzhiyun 				struct skl_module_cfg *mconfig)
2872*4882a593Smuzhiyun {
2873*4882a593Smuzhiyun 	struct snd_soc_tplg_vendor_array *array;
2874*4882a593Smuzhiyun 	int num_blocks, block_size, block_type, off = 0;
2875*4882a593Smuzhiyun 	char *data;
2876*4882a593Smuzhiyun 	int ret;
2877*4882a593Smuzhiyun 
2878*4882a593Smuzhiyun 	/*
2879*4882a593Smuzhiyun 	 * v4 configuration files have a valid UUID at the start of
2880*4882a593Smuzhiyun 	 * the widget's private data.
2881*4882a593Smuzhiyun 	 */
2882*4882a593Smuzhiyun 	if (uuid_is_valid((char *)tplg_w->priv.data))
2883*4882a593Smuzhiyun 		return skl_tplg_get_pvt_data_v4(tplg_w, skl, dev, mconfig);
2884*4882a593Smuzhiyun 
2885*4882a593Smuzhiyun 	/* Read the NUM_DATA_BLOCKS descriptor */
2886*4882a593Smuzhiyun 	array = (struct snd_soc_tplg_vendor_array *)tplg_w->priv.data;
2887*4882a593Smuzhiyun 	ret = skl_tplg_get_desc_blocks(dev, array);
2888*4882a593Smuzhiyun 	if (ret < 0)
2889*4882a593Smuzhiyun 		return ret;
2890*4882a593Smuzhiyun 	num_blocks = ret;
2891*4882a593Smuzhiyun 
2892*4882a593Smuzhiyun 	off += array->size;
2893*4882a593Smuzhiyun 	/* Read the BLOCK_TYPE and BLOCK_SIZE descriptor */
2894*4882a593Smuzhiyun 	while (num_blocks > 0) {
2895*4882a593Smuzhiyun 		array = (struct snd_soc_tplg_vendor_array *)
2896*4882a593Smuzhiyun 				(tplg_w->priv.data + off);
2897*4882a593Smuzhiyun 
2898*4882a593Smuzhiyun 		ret = skl_tplg_get_desc_blocks(dev, array);
2899*4882a593Smuzhiyun 
2900*4882a593Smuzhiyun 		if (ret < 0)
2901*4882a593Smuzhiyun 			return ret;
2902*4882a593Smuzhiyun 		block_type = ret;
2903*4882a593Smuzhiyun 		off += array->size;
2904*4882a593Smuzhiyun 
2905*4882a593Smuzhiyun 		array = (struct snd_soc_tplg_vendor_array *)
2906*4882a593Smuzhiyun 			(tplg_w->priv.data + off);
2907*4882a593Smuzhiyun 
2908*4882a593Smuzhiyun 		ret = skl_tplg_get_desc_blocks(dev, array);
2909*4882a593Smuzhiyun 
2910*4882a593Smuzhiyun 		if (ret < 0)
2911*4882a593Smuzhiyun 			return ret;
2912*4882a593Smuzhiyun 		block_size = ret;
2913*4882a593Smuzhiyun 		off += array->size;
2914*4882a593Smuzhiyun 
2915*4882a593Smuzhiyun 		array = (struct snd_soc_tplg_vendor_array *)
2916*4882a593Smuzhiyun 			(tplg_w->priv.data + off);
2917*4882a593Smuzhiyun 
2918*4882a593Smuzhiyun 		data = (tplg_w->priv.data + off);
2919*4882a593Smuzhiyun 
2920*4882a593Smuzhiyun 		if (block_type == SKL_TYPE_TUPLE) {
2921*4882a593Smuzhiyun 			ret = skl_tplg_get_tokens(dev, data,
2922*4882a593Smuzhiyun 					skl, mconfig, block_size);
2923*4882a593Smuzhiyun 
2924*4882a593Smuzhiyun 			if (ret < 0)
2925*4882a593Smuzhiyun 				return ret;
2926*4882a593Smuzhiyun 
2927*4882a593Smuzhiyun 			--num_blocks;
2928*4882a593Smuzhiyun 		} else {
2929*4882a593Smuzhiyun 			if (mconfig->formats_config.caps_size > 0)
2930*4882a593Smuzhiyun 				memcpy(mconfig->formats_config.caps, data,
2931*4882a593Smuzhiyun 					mconfig->formats_config.caps_size);
2932*4882a593Smuzhiyun 			--num_blocks;
2933*4882a593Smuzhiyun 			ret = mconfig->formats_config.caps_size;
2934*4882a593Smuzhiyun 		}
2935*4882a593Smuzhiyun 		off += ret;
2936*4882a593Smuzhiyun 	}
2937*4882a593Smuzhiyun 
2938*4882a593Smuzhiyun 	return 0;
2939*4882a593Smuzhiyun }
2940*4882a593Smuzhiyun 
skl_clear_pin_config(struct snd_soc_component * component,struct snd_soc_dapm_widget * w)2941*4882a593Smuzhiyun static void skl_clear_pin_config(struct snd_soc_component *component,
2942*4882a593Smuzhiyun 				struct snd_soc_dapm_widget *w)
2943*4882a593Smuzhiyun {
2944*4882a593Smuzhiyun 	int i;
2945*4882a593Smuzhiyun 	struct skl_module_cfg *mconfig;
2946*4882a593Smuzhiyun 	struct skl_pipe *pipe;
2947*4882a593Smuzhiyun 
2948*4882a593Smuzhiyun 	if (!strncmp(w->dapm->component->name, component->name,
2949*4882a593Smuzhiyun 					strlen(component->name))) {
2950*4882a593Smuzhiyun 		mconfig = w->priv;
2951*4882a593Smuzhiyun 		pipe = mconfig->pipe;
2952*4882a593Smuzhiyun 		for (i = 0; i < mconfig->module->max_input_pins; i++) {
2953*4882a593Smuzhiyun 			mconfig->m_in_pin[i].in_use = false;
2954*4882a593Smuzhiyun 			mconfig->m_in_pin[i].pin_state = SKL_PIN_UNBIND;
2955*4882a593Smuzhiyun 		}
2956*4882a593Smuzhiyun 		for (i = 0; i < mconfig->module->max_output_pins; i++) {
2957*4882a593Smuzhiyun 			mconfig->m_out_pin[i].in_use = false;
2958*4882a593Smuzhiyun 			mconfig->m_out_pin[i].pin_state = SKL_PIN_UNBIND;
2959*4882a593Smuzhiyun 		}
2960*4882a593Smuzhiyun 		pipe->state = SKL_PIPE_INVALID;
2961*4882a593Smuzhiyun 		mconfig->m_state = SKL_MODULE_UNINIT;
2962*4882a593Smuzhiyun 	}
2963*4882a593Smuzhiyun }
2964*4882a593Smuzhiyun 
skl_cleanup_resources(struct skl_dev * skl)2965*4882a593Smuzhiyun void skl_cleanup_resources(struct skl_dev *skl)
2966*4882a593Smuzhiyun {
2967*4882a593Smuzhiyun 	struct snd_soc_component *soc_component = skl->component;
2968*4882a593Smuzhiyun 	struct snd_soc_dapm_widget *w;
2969*4882a593Smuzhiyun 	struct snd_soc_card *card;
2970*4882a593Smuzhiyun 
2971*4882a593Smuzhiyun 	if (soc_component == NULL)
2972*4882a593Smuzhiyun 		return;
2973*4882a593Smuzhiyun 
2974*4882a593Smuzhiyun 	card = soc_component->card;
2975*4882a593Smuzhiyun 	if (!card || !card->instantiated)
2976*4882a593Smuzhiyun 		return;
2977*4882a593Smuzhiyun 
2978*4882a593Smuzhiyun 	list_for_each_entry(w, &card->widgets, list) {
2979*4882a593Smuzhiyun 		if (is_skl_dsp_widget_type(w, skl->dev) && w->priv != NULL)
2980*4882a593Smuzhiyun 			skl_clear_pin_config(soc_component, w);
2981*4882a593Smuzhiyun 	}
2982*4882a593Smuzhiyun 
2983*4882a593Smuzhiyun 	skl_clear_module_cnt(skl->dsp);
2984*4882a593Smuzhiyun }
2985*4882a593Smuzhiyun 
2986*4882a593Smuzhiyun /*
2987*4882a593Smuzhiyun  * Topology core widget load callback
2988*4882a593Smuzhiyun  *
2989*4882a593Smuzhiyun  * This is used to save the private data for each widget which gives
2990*4882a593Smuzhiyun  * information to the driver about module and pipeline parameters which DSP
2991*4882a593Smuzhiyun  * FW expects like ids, resource values, formats etc
2992*4882a593Smuzhiyun  */
skl_tplg_widget_load(struct snd_soc_component * cmpnt,int index,struct snd_soc_dapm_widget * w,struct snd_soc_tplg_dapm_widget * tplg_w)2993*4882a593Smuzhiyun static int skl_tplg_widget_load(struct snd_soc_component *cmpnt, int index,
2994*4882a593Smuzhiyun 				struct snd_soc_dapm_widget *w,
2995*4882a593Smuzhiyun 				struct snd_soc_tplg_dapm_widget *tplg_w)
2996*4882a593Smuzhiyun {
2997*4882a593Smuzhiyun 	int ret;
2998*4882a593Smuzhiyun 	struct hdac_bus *bus = snd_soc_component_get_drvdata(cmpnt);
2999*4882a593Smuzhiyun 	struct skl_dev *skl = bus_to_skl(bus);
3000*4882a593Smuzhiyun 	struct skl_module_cfg *mconfig;
3001*4882a593Smuzhiyun 
3002*4882a593Smuzhiyun 	if (!tplg_w->priv.size)
3003*4882a593Smuzhiyun 		goto bind_event;
3004*4882a593Smuzhiyun 
3005*4882a593Smuzhiyun 	mconfig = devm_kzalloc(bus->dev, sizeof(*mconfig), GFP_KERNEL);
3006*4882a593Smuzhiyun 
3007*4882a593Smuzhiyun 	if (!mconfig)
3008*4882a593Smuzhiyun 		return -ENOMEM;
3009*4882a593Smuzhiyun 
3010*4882a593Smuzhiyun 	if (skl->nr_modules == 0) {
3011*4882a593Smuzhiyun 		mconfig->module = devm_kzalloc(bus->dev,
3012*4882a593Smuzhiyun 				sizeof(*mconfig->module), GFP_KERNEL);
3013*4882a593Smuzhiyun 		if (!mconfig->module)
3014*4882a593Smuzhiyun 			return -ENOMEM;
3015*4882a593Smuzhiyun 	}
3016*4882a593Smuzhiyun 
3017*4882a593Smuzhiyun 	w->priv = mconfig;
3018*4882a593Smuzhiyun 
3019*4882a593Smuzhiyun 	/*
3020*4882a593Smuzhiyun 	 * module binary can be loaded later, so set it to query when
3021*4882a593Smuzhiyun 	 * module is load for a use case
3022*4882a593Smuzhiyun 	 */
3023*4882a593Smuzhiyun 	mconfig->id.module_id = -1;
3024*4882a593Smuzhiyun 
3025*4882a593Smuzhiyun 	/* Parse private data for tuples */
3026*4882a593Smuzhiyun 	ret = skl_tplg_get_pvt_data(tplg_w, skl, bus->dev, mconfig);
3027*4882a593Smuzhiyun 	if (ret < 0)
3028*4882a593Smuzhiyun 		return ret;
3029*4882a593Smuzhiyun 
3030*4882a593Smuzhiyun 	skl_debug_init_module(skl->debugfs, w, mconfig);
3031*4882a593Smuzhiyun 
3032*4882a593Smuzhiyun bind_event:
3033*4882a593Smuzhiyun 	if (tplg_w->event_type == 0) {
3034*4882a593Smuzhiyun 		dev_dbg(bus->dev, "ASoC: No event handler required\n");
3035*4882a593Smuzhiyun 		return 0;
3036*4882a593Smuzhiyun 	}
3037*4882a593Smuzhiyun 
3038*4882a593Smuzhiyun 	ret = snd_soc_tplg_widget_bind_event(w, skl_tplg_widget_ops,
3039*4882a593Smuzhiyun 					ARRAY_SIZE(skl_tplg_widget_ops),
3040*4882a593Smuzhiyun 					tplg_w->event_type);
3041*4882a593Smuzhiyun 
3042*4882a593Smuzhiyun 	if (ret) {
3043*4882a593Smuzhiyun 		dev_err(bus->dev, "%s: No matching event handlers found for %d\n",
3044*4882a593Smuzhiyun 					__func__, tplg_w->event_type);
3045*4882a593Smuzhiyun 		return -EINVAL;
3046*4882a593Smuzhiyun 	}
3047*4882a593Smuzhiyun 
3048*4882a593Smuzhiyun 	return 0;
3049*4882a593Smuzhiyun }
3050*4882a593Smuzhiyun 
skl_init_algo_data(struct device * dev,struct soc_bytes_ext * be,struct snd_soc_tplg_bytes_control * bc)3051*4882a593Smuzhiyun static int skl_init_algo_data(struct device *dev, struct soc_bytes_ext *be,
3052*4882a593Smuzhiyun 					struct snd_soc_tplg_bytes_control *bc)
3053*4882a593Smuzhiyun {
3054*4882a593Smuzhiyun 	struct skl_algo_data *ac;
3055*4882a593Smuzhiyun 	struct skl_dfw_algo_data *dfw_ac =
3056*4882a593Smuzhiyun 				(struct skl_dfw_algo_data *)bc->priv.data;
3057*4882a593Smuzhiyun 
3058*4882a593Smuzhiyun 	ac = devm_kzalloc(dev, sizeof(*ac), GFP_KERNEL);
3059*4882a593Smuzhiyun 	if (!ac)
3060*4882a593Smuzhiyun 		return -ENOMEM;
3061*4882a593Smuzhiyun 
3062*4882a593Smuzhiyun 	/* Fill private data */
3063*4882a593Smuzhiyun 	ac->max = dfw_ac->max;
3064*4882a593Smuzhiyun 	ac->param_id = dfw_ac->param_id;
3065*4882a593Smuzhiyun 	ac->set_params = dfw_ac->set_params;
3066*4882a593Smuzhiyun 	ac->size = dfw_ac->max;
3067*4882a593Smuzhiyun 
3068*4882a593Smuzhiyun 	if (ac->max) {
3069*4882a593Smuzhiyun 		ac->params = devm_kzalloc(dev, ac->max, GFP_KERNEL);
3070*4882a593Smuzhiyun 		if (!ac->params)
3071*4882a593Smuzhiyun 			return -ENOMEM;
3072*4882a593Smuzhiyun 
3073*4882a593Smuzhiyun 		memcpy(ac->params, dfw_ac->params, ac->max);
3074*4882a593Smuzhiyun 	}
3075*4882a593Smuzhiyun 
3076*4882a593Smuzhiyun 	be->dobj.private  = ac;
3077*4882a593Smuzhiyun 	return 0;
3078*4882a593Smuzhiyun }
3079*4882a593Smuzhiyun 
skl_init_enum_data(struct device * dev,struct soc_enum * se,struct snd_soc_tplg_enum_control * ec)3080*4882a593Smuzhiyun static int skl_init_enum_data(struct device *dev, struct soc_enum *se,
3081*4882a593Smuzhiyun 				struct snd_soc_tplg_enum_control *ec)
3082*4882a593Smuzhiyun {
3083*4882a593Smuzhiyun 
3084*4882a593Smuzhiyun 	void *data;
3085*4882a593Smuzhiyun 
3086*4882a593Smuzhiyun 	if (ec->priv.size) {
3087*4882a593Smuzhiyun 		data = devm_kzalloc(dev, sizeof(ec->priv.size), GFP_KERNEL);
3088*4882a593Smuzhiyun 		if (!data)
3089*4882a593Smuzhiyun 			return -ENOMEM;
3090*4882a593Smuzhiyun 		memcpy(data, ec->priv.data, ec->priv.size);
3091*4882a593Smuzhiyun 		se->dobj.private = data;
3092*4882a593Smuzhiyun 	}
3093*4882a593Smuzhiyun 
3094*4882a593Smuzhiyun 	return 0;
3095*4882a593Smuzhiyun 
3096*4882a593Smuzhiyun }
3097*4882a593Smuzhiyun 
skl_tplg_control_load(struct snd_soc_component * cmpnt,int index,struct snd_kcontrol_new * kctl,struct snd_soc_tplg_ctl_hdr * hdr)3098*4882a593Smuzhiyun static int skl_tplg_control_load(struct snd_soc_component *cmpnt,
3099*4882a593Smuzhiyun 				int index,
3100*4882a593Smuzhiyun 				struct snd_kcontrol_new *kctl,
3101*4882a593Smuzhiyun 				struct snd_soc_tplg_ctl_hdr *hdr)
3102*4882a593Smuzhiyun {
3103*4882a593Smuzhiyun 	struct soc_bytes_ext *sb;
3104*4882a593Smuzhiyun 	struct snd_soc_tplg_bytes_control *tplg_bc;
3105*4882a593Smuzhiyun 	struct snd_soc_tplg_enum_control *tplg_ec;
3106*4882a593Smuzhiyun 	struct hdac_bus *bus  = snd_soc_component_get_drvdata(cmpnt);
3107*4882a593Smuzhiyun 	struct soc_enum *se;
3108*4882a593Smuzhiyun 
3109*4882a593Smuzhiyun 	switch (hdr->ops.info) {
3110*4882a593Smuzhiyun 	case SND_SOC_TPLG_CTL_BYTES:
3111*4882a593Smuzhiyun 		tplg_bc = container_of(hdr,
3112*4882a593Smuzhiyun 				struct snd_soc_tplg_bytes_control, hdr);
3113*4882a593Smuzhiyun 		if (kctl->access & SNDRV_CTL_ELEM_ACCESS_TLV_CALLBACK) {
3114*4882a593Smuzhiyun 			sb = (struct soc_bytes_ext *)kctl->private_value;
3115*4882a593Smuzhiyun 			if (tplg_bc->priv.size)
3116*4882a593Smuzhiyun 				return skl_init_algo_data(
3117*4882a593Smuzhiyun 						bus->dev, sb, tplg_bc);
3118*4882a593Smuzhiyun 		}
3119*4882a593Smuzhiyun 		break;
3120*4882a593Smuzhiyun 
3121*4882a593Smuzhiyun 	case SND_SOC_TPLG_CTL_ENUM:
3122*4882a593Smuzhiyun 		tplg_ec = container_of(hdr,
3123*4882a593Smuzhiyun 				struct snd_soc_tplg_enum_control, hdr);
3124*4882a593Smuzhiyun 		if (kctl->access & SNDRV_CTL_ELEM_ACCESS_READ) {
3125*4882a593Smuzhiyun 			se = (struct soc_enum *)kctl->private_value;
3126*4882a593Smuzhiyun 			if (tplg_ec->priv.size)
3127*4882a593Smuzhiyun 				skl_init_enum_data(bus->dev, se, tplg_ec);
3128*4882a593Smuzhiyun 		}
3129*4882a593Smuzhiyun 
3130*4882a593Smuzhiyun 		/*
3131*4882a593Smuzhiyun 		 * now that the control initializations are done, remove
3132*4882a593Smuzhiyun 		 * write permission for the DMIC configuration enums to
3133*4882a593Smuzhiyun 		 * avoid conflicts between NHLT settings and user interaction
3134*4882a593Smuzhiyun 		 */
3135*4882a593Smuzhiyun 
3136*4882a593Smuzhiyun 		if (hdr->ops.get == SKL_CONTROL_TYPE_MULTI_IO_SELECT_DMIC)
3137*4882a593Smuzhiyun 			kctl->access = SNDRV_CTL_ELEM_ACCESS_READ;
3138*4882a593Smuzhiyun 
3139*4882a593Smuzhiyun 		break;
3140*4882a593Smuzhiyun 
3141*4882a593Smuzhiyun 	default:
3142*4882a593Smuzhiyun 		dev_dbg(bus->dev, "Control load not supported %d:%d:%d\n",
3143*4882a593Smuzhiyun 			hdr->ops.get, hdr->ops.put, hdr->ops.info);
3144*4882a593Smuzhiyun 		break;
3145*4882a593Smuzhiyun 	}
3146*4882a593Smuzhiyun 
3147*4882a593Smuzhiyun 	return 0;
3148*4882a593Smuzhiyun }
3149*4882a593Smuzhiyun 
skl_tplg_fill_str_mfest_tkn(struct device * dev,struct snd_soc_tplg_vendor_string_elem * str_elem,struct skl_dev * skl)3150*4882a593Smuzhiyun static int skl_tplg_fill_str_mfest_tkn(struct device *dev,
3151*4882a593Smuzhiyun 		struct snd_soc_tplg_vendor_string_elem *str_elem,
3152*4882a593Smuzhiyun 		struct skl_dev *skl)
3153*4882a593Smuzhiyun {
3154*4882a593Smuzhiyun 	int tkn_count = 0;
3155*4882a593Smuzhiyun 	static int ref_count;
3156*4882a593Smuzhiyun 
3157*4882a593Smuzhiyun 	switch (str_elem->token) {
3158*4882a593Smuzhiyun 	case SKL_TKN_STR_LIB_NAME:
3159*4882a593Smuzhiyun 		if (ref_count > skl->lib_count - 1) {
3160*4882a593Smuzhiyun 			ref_count = 0;
3161*4882a593Smuzhiyun 			return -EINVAL;
3162*4882a593Smuzhiyun 		}
3163*4882a593Smuzhiyun 
3164*4882a593Smuzhiyun 		strncpy(skl->lib_info[ref_count].name,
3165*4882a593Smuzhiyun 			str_elem->string,
3166*4882a593Smuzhiyun 			ARRAY_SIZE(skl->lib_info[ref_count].name));
3167*4882a593Smuzhiyun 		ref_count++;
3168*4882a593Smuzhiyun 		break;
3169*4882a593Smuzhiyun 
3170*4882a593Smuzhiyun 	default:
3171*4882a593Smuzhiyun 		dev_err(dev, "Not a string token %d\n", str_elem->token);
3172*4882a593Smuzhiyun 		break;
3173*4882a593Smuzhiyun 	}
3174*4882a593Smuzhiyun 	tkn_count++;
3175*4882a593Smuzhiyun 
3176*4882a593Smuzhiyun 	return tkn_count;
3177*4882a593Smuzhiyun }
3178*4882a593Smuzhiyun 
skl_tplg_get_str_tkn(struct device * dev,struct snd_soc_tplg_vendor_array * array,struct skl_dev * skl)3179*4882a593Smuzhiyun static int skl_tplg_get_str_tkn(struct device *dev,
3180*4882a593Smuzhiyun 		struct snd_soc_tplg_vendor_array *array,
3181*4882a593Smuzhiyun 		struct skl_dev *skl)
3182*4882a593Smuzhiyun {
3183*4882a593Smuzhiyun 	int tkn_count = 0, ret;
3184*4882a593Smuzhiyun 	struct snd_soc_tplg_vendor_string_elem *str_elem;
3185*4882a593Smuzhiyun 
3186*4882a593Smuzhiyun 	str_elem = (struct snd_soc_tplg_vendor_string_elem *)array->value;
3187*4882a593Smuzhiyun 	while (tkn_count < array->num_elems) {
3188*4882a593Smuzhiyun 		ret = skl_tplg_fill_str_mfest_tkn(dev, str_elem, skl);
3189*4882a593Smuzhiyun 		str_elem++;
3190*4882a593Smuzhiyun 
3191*4882a593Smuzhiyun 		if (ret < 0)
3192*4882a593Smuzhiyun 			return ret;
3193*4882a593Smuzhiyun 
3194*4882a593Smuzhiyun 		tkn_count = tkn_count + ret;
3195*4882a593Smuzhiyun 	}
3196*4882a593Smuzhiyun 
3197*4882a593Smuzhiyun 	return tkn_count;
3198*4882a593Smuzhiyun }
3199*4882a593Smuzhiyun 
skl_tplg_manifest_fill_fmt(struct device * dev,struct skl_module_iface * fmt,struct snd_soc_tplg_vendor_value_elem * tkn_elem,u32 dir,int fmt_idx)3200*4882a593Smuzhiyun static int skl_tplg_manifest_fill_fmt(struct device *dev,
3201*4882a593Smuzhiyun 		struct skl_module_iface *fmt,
3202*4882a593Smuzhiyun 		struct snd_soc_tplg_vendor_value_elem *tkn_elem,
3203*4882a593Smuzhiyun 		u32 dir, int fmt_idx)
3204*4882a593Smuzhiyun {
3205*4882a593Smuzhiyun 	struct skl_module_pin_fmt *dst_fmt;
3206*4882a593Smuzhiyun 	struct skl_module_fmt *mod_fmt;
3207*4882a593Smuzhiyun 	int ret;
3208*4882a593Smuzhiyun 
3209*4882a593Smuzhiyun 	if (!fmt)
3210*4882a593Smuzhiyun 		return -EINVAL;
3211*4882a593Smuzhiyun 
3212*4882a593Smuzhiyun 	switch (dir) {
3213*4882a593Smuzhiyun 	case SKL_DIR_IN:
3214*4882a593Smuzhiyun 		dst_fmt = &fmt->inputs[fmt_idx];
3215*4882a593Smuzhiyun 		break;
3216*4882a593Smuzhiyun 
3217*4882a593Smuzhiyun 	case SKL_DIR_OUT:
3218*4882a593Smuzhiyun 		dst_fmt = &fmt->outputs[fmt_idx];
3219*4882a593Smuzhiyun 		break;
3220*4882a593Smuzhiyun 
3221*4882a593Smuzhiyun 	default:
3222*4882a593Smuzhiyun 		dev_err(dev, "Invalid direction: %d\n", dir);
3223*4882a593Smuzhiyun 		return -EINVAL;
3224*4882a593Smuzhiyun 	}
3225*4882a593Smuzhiyun 
3226*4882a593Smuzhiyun 	mod_fmt = &dst_fmt->fmt;
3227*4882a593Smuzhiyun 
3228*4882a593Smuzhiyun 	switch (tkn_elem->token) {
3229*4882a593Smuzhiyun 	case SKL_TKN_MM_U32_INTF_PIN_ID:
3230*4882a593Smuzhiyun 		dst_fmt->id = tkn_elem->value;
3231*4882a593Smuzhiyun 		break;
3232*4882a593Smuzhiyun 
3233*4882a593Smuzhiyun 	default:
3234*4882a593Smuzhiyun 		ret = skl_tplg_fill_fmt(dev, mod_fmt, tkn_elem->token,
3235*4882a593Smuzhiyun 					tkn_elem->value);
3236*4882a593Smuzhiyun 		if (ret < 0)
3237*4882a593Smuzhiyun 			return ret;
3238*4882a593Smuzhiyun 		break;
3239*4882a593Smuzhiyun 	}
3240*4882a593Smuzhiyun 
3241*4882a593Smuzhiyun 	return 0;
3242*4882a593Smuzhiyun }
3243*4882a593Smuzhiyun 
skl_tplg_fill_mod_info(struct device * dev,struct snd_soc_tplg_vendor_value_elem * tkn_elem,struct skl_module * mod)3244*4882a593Smuzhiyun static int skl_tplg_fill_mod_info(struct device *dev,
3245*4882a593Smuzhiyun 		struct snd_soc_tplg_vendor_value_elem *tkn_elem,
3246*4882a593Smuzhiyun 		struct skl_module *mod)
3247*4882a593Smuzhiyun {
3248*4882a593Smuzhiyun 
3249*4882a593Smuzhiyun 	if (!mod)
3250*4882a593Smuzhiyun 		return -EINVAL;
3251*4882a593Smuzhiyun 
3252*4882a593Smuzhiyun 	switch (tkn_elem->token) {
3253*4882a593Smuzhiyun 	case SKL_TKN_U8_IN_PIN_TYPE:
3254*4882a593Smuzhiyun 		mod->input_pin_type = tkn_elem->value;
3255*4882a593Smuzhiyun 		break;
3256*4882a593Smuzhiyun 
3257*4882a593Smuzhiyun 	case SKL_TKN_U8_OUT_PIN_TYPE:
3258*4882a593Smuzhiyun 		mod->output_pin_type = tkn_elem->value;
3259*4882a593Smuzhiyun 		break;
3260*4882a593Smuzhiyun 
3261*4882a593Smuzhiyun 	case SKL_TKN_U8_IN_QUEUE_COUNT:
3262*4882a593Smuzhiyun 		mod->max_input_pins = tkn_elem->value;
3263*4882a593Smuzhiyun 		break;
3264*4882a593Smuzhiyun 
3265*4882a593Smuzhiyun 	case SKL_TKN_U8_OUT_QUEUE_COUNT:
3266*4882a593Smuzhiyun 		mod->max_output_pins = tkn_elem->value;
3267*4882a593Smuzhiyun 		break;
3268*4882a593Smuzhiyun 
3269*4882a593Smuzhiyun 	case SKL_TKN_MM_U8_NUM_RES:
3270*4882a593Smuzhiyun 		mod->nr_resources = tkn_elem->value;
3271*4882a593Smuzhiyun 		break;
3272*4882a593Smuzhiyun 
3273*4882a593Smuzhiyun 	case SKL_TKN_MM_U8_NUM_INTF:
3274*4882a593Smuzhiyun 		mod->nr_interfaces = tkn_elem->value;
3275*4882a593Smuzhiyun 		break;
3276*4882a593Smuzhiyun 
3277*4882a593Smuzhiyun 	default:
3278*4882a593Smuzhiyun 		dev_err(dev, "Invalid mod info token %d", tkn_elem->token);
3279*4882a593Smuzhiyun 		return -EINVAL;
3280*4882a593Smuzhiyun 	}
3281*4882a593Smuzhiyun 
3282*4882a593Smuzhiyun 	return 0;
3283*4882a593Smuzhiyun }
3284*4882a593Smuzhiyun 
3285*4882a593Smuzhiyun 
skl_tplg_get_int_tkn(struct device * dev,struct snd_soc_tplg_vendor_value_elem * tkn_elem,struct skl_dev * skl)3286*4882a593Smuzhiyun static int skl_tplg_get_int_tkn(struct device *dev,
3287*4882a593Smuzhiyun 		struct snd_soc_tplg_vendor_value_elem *tkn_elem,
3288*4882a593Smuzhiyun 		struct skl_dev *skl)
3289*4882a593Smuzhiyun {
3290*4882a593Smuzhiyun 	int tkn_count = 0, ret;
3291*4882a593Smuzhiyun 	static int mod_idx, res_val_idx, intf_val_idx, dir, pin_idx;
3292*4882a593Smuzhiyun 	struct skl_module_res *res = NULL;
3293*4882a593Smuzhiyun 	struct skl_module_iface *fmt = NULL;
3294*4882a593Smuzhiyun 	struct skl_module *mod = NULL;
3295*4882a593Smuzhiyun 	static struct skl_astate_param *astate_table;
3296*4882a593Smuzhiyun 	static int astate_cfg_idx, count;
3297*4882a593Smuzhiyun 	int i;
3298*4882a593Smuzhiyun 	size_t size;
3299*4882a593Smuzhiyun 
3300*4882a593Smuzhiyun 	if (skl->modules) {
3301*4882a593Smuzhiyun 		mod = skl->modules[mod_idx];
3302*4882a593Smuzhiyun 		res = &mod->resources[res_val_idx];
3303*4882a593Smuzhiyun 		fmt = &mod->formats[intf_val_idx];
3304*4882a593Smuzhiyun 	}
3305*4882a593Smuzhiyun 
3306*4882a593Smuzhiyun 	switch (tkn_elem->token) {
3307*4882a593Smuzhiyun 	case SKL_TKN_U32_LIB_COUNT:
3308*4882a593Smuzhiyun 		skl->lib_count = tkn_elem->value;
3309*4882a593Smuzhiyun 		break;
3310*4882a593Smuzhiyun 
3311*4882a593Smuzhiyun 	case SKL_TKN_U8_NUM_MOD:
3312*4882a593Smuzhiyun 		skl->nr_modules = tkn_elem->value;
3313*4882a593Smuzhiyun 		skl->modules = devm_kcalloc(dev, skl->nr_modules,
3314*4882a593Smuzhiyun 				sizeof(*skl->modules), GFP_KERNEL);
3315*4882a593Smuzhiyun 		if (!skl->modules)
3316*4882a593Smuzhiyun 			return -ENOMEM;
3317*4882a593Smuzhiyun 
3318*4882a593Smuzhiyun 		for (i = 0; i < skl->nr_modules; i++) {
3319*4882a593Smuzhiyun 			skl->modules[i] = devm_kzalloc(dev,
3320*4882a593Smuzhiyun 					sizeof(struct skl_module), GFP_KERNEL);
3321*4882a593Smuzhiyun 			if (!skl->modules[i])
3322*4882a593Smuzhiyun 				return -ENOMEM;
3323*4882a593Smuzhiyun 		}
3324*4882a593Smuzhiyun 		break;
3325*4882a593Smuzhiyun 
3326*4882a593Smuzhiyun 	case SKL_TKN_MM_U8_MOD_IDX:
3327*4882a593Smuzhiyun 		mod_idx = tkn_elem->value;
3328*4882a593Smuzhiyun 		break;
3329*4882a593Smuzhiyun 
3330*4882a593Smuzhiyun 	case SKL_TKN_U32_ASTATE_COUNT:
3331*4882a593Smuzhiyun 		if (astate_table != NULL) {
3332*4882a593Smuzhiyun 			dev_err(dev, "More than one entry for A-State count");
3333*4882a593Smuzhiyun 			return -EINVAL;
3334*4882a593Smuzhiyun 		}
3335*4882a593Smuzhiyun 
3336*4882a593Smuzhiyun 		if (tkn_elem->value > SKL_MAX_ASTATE_CFG) {
3337*4882a593Smuzhiyun 			dev_err(dev, "Invalid A-State count %d\n",
3338*4882a593Smuzhiyun 				tkn_elem->value);
3339*4882a593Smuzhiyun 			return -EINVAL;
3340*4882a593Smuzhiyun 		}
3341*4882a593Smuzhiyun 
3342*4882a593Smuzhiyun 		size = struct_size(skl->cfg.astate_cfg, astate_table,
3343*4882a593Smuzhiyun 				   tkn_elem->value);
3344*4882a593Smuzhiyun 		skl->cfg.astate_cfg = devm_kzalloc(dev, size, GFP_KERNEL);
3345*4882a593Smuzhiyun 		if (!skl->cfg.astate_cfg)
3346*4882a593Smuzhiyun 			return -ENOMEM;
3347*4882a593Smuzhiyun 
3348*4882a593Smuzhiyun 		astate_table = skl->cfg.astate_cfg->astate_table;
3349*4882a593Smuzhiyun 		count = skl->cfg.astate_cfg->count = tkn_elem->value;
3350*4882a593Smuzhiyun 		break;
3351*4882a593Smuzhiyun 
3352*4882a593Smuzhiyun 	case SKL_TKN_U32_ASTATE_IDX:
3353*4882a593Smuzhiyun 		if (tkn_elem->value >= count) {
3354*4882a593Smuzhiyun 			dev_err(dev, "Invalid A-State index %d\n",
3355*4882a593Smuzhiyun 				tkn_elem->value);
3356*4882a593Smuzhiyun 			return -EINVAL;
3357*4882a593Smuzhiyun 		}
3358*4882a593Smuzhiyun 
3359*4882a593Smuzhiyun 		astate_cfg_idx = tkn_elem->value;
3360*4882a593Smuzhiyun 		break;
3361*4882a593Smuzhiyun 
3362*4882a593Smuzhiyun 	case SKL_TKN_U32_ASTATE_KCPS:
3363*4882a593Smuzhiyun 		astate_table[astate_cfg_idx].kcps = tkn_elem->value;
3364*4882a593Smuzhiyun 		break;
3365*4882a593Smuzhiyun 
3366*4882a593Smuzhiyun 	case SKL_TKN_U32_ASTATE_CLK_SRC:
3367*4882a593Smuzhiyun 		astate_table[astate_cfg_idx].clk_src = tkn_elem->value;
3368*4882a593Smuzhiyun 		break;
3369*4882a593Smuzhiyun 
3370*4882a593Smuzhiyun 	case SKL_TKN_U8_IN_PIN_TYPE:
3371*4882a593Smuzhiyun 	case SKL_TKN_U8_OUT_PIN_TYPE:
3372*4882a593Smuzhiyun 	case SKL_TKN_U8_IN_QUEUE_COUNT:
3373*4882a593Smuzhiyun 	case SKL_TKN_U8_OUT_QUEUE_COUNT:
3374*4882a593Smuzhiyun 	case SKL_TKN_MM_U8_NUM_RES:
3375*4882a593Smuzhiyun 	case SKL_TKN_MM_U8_NUM_INTF:
3376*4882a593Smuzhiyun 		ret = skl_tplg_fill_mod_info(dev, tkn_elem, mod);
3377*4882a593Smuzhiyun 		if (ret < 0)
3378*4882a593Smuzhiyun 			return ret;
3379*4882a593Smuzhiyun 		break;
3380*4882a593Smuzhiyun 
3381*4882a593Smuzhiyun 	case SKL_TKN_U32_DIR_PIN_COUNT:
3382*4882a593Smuzhiyun 		dir = tkn_elem->value & SKL_IN_DIR_BIT_MASK;
3383*4882a593Smuzhiyun 		pin_idx = (tkn_elem->value & SKL_PIN_COUNT_MASK) >> 4;
3384*4882a593Smuzhiyun 		break;
3385*4882a593Smuzhiyun 
3386*4882a593Smuzhiyun 	case SKL_TKN_MM_U32_RES_ID:
3387*4882a593Smuzhiyun 		if (!res)
3388*4882a593Smuzhiyun 			return -EINVAL;
3389*4882a593Smuzhiyun 
3390*4882a593Smuzhiyun 		res->id = tkn_elem->value;
3391*4882a593Smuzhiyun 		res_val_idx = tkn_elem->value;
3392*4882a593Smuzhiyun 		break;
3393*4882a593Smuzhiyun 
3394*4882a593Smuzhiyun 	case SKL_TKN_MM_U32_FMT_ID:
3395*4882a593Smuzhiyun 		if (!fmt)
3396*4882a593Smuzhiyun 			return -EINVAL;
3397*4882a593Smuzhiyun 
3398*4882a593Smuzhiyun 		fmt->fmt_idx = tkn_elem->value;
3399*4882a593Smuzhiyun 		intf_val_idx = tkn_elem->value;
3400*4882a593Smuzhiyun 		break;
3401*4882a593Smuzhiyun 
3402*4882a593Smuzhiyun 	case SKL_TKN_MM_U32_CPS:
3403*4882a593Smuzhiyun 	case SKL_TKN_MM_U32_DMA_SIZE:
3404*4882a593Smuzhiyun 	case SKL_TKN_MM_U32_CPC:
3405*4882a593Smuzhiyun 	case SKL_TKN_U32_MEM_PAGES:
3406*4882a593Smuzhiyun 	case SKL_TKN_U32_OBS:
3407*4882a593Smuzhiyun 	case SKL_TKN_U32_IBS:
3408*4882a593Smuzhiyun 	case SKL_TKN_MM_U32_RES_PIN_ID:
3409*4882a593Smuzhiyun 	case SKL_TKN_MM_U32_PIN_BUF:
3410*4882a593Smuzhiyun 		ret = skl_tplg_fill_res_tkn(dev, tkn_elem, res, pin_idx, dir);
3411*4882a593Smuzhiyun 		if (ret < 0)
3412*4882a593Smuzhiyun 			return ret;
3413*4882a593Smuzhiyun 
3414*4882a593Smuzhiyun 		break;
3415*4882a593Smuzhiyun 
3416*4882a593Smuzhiyun 	case SKL_TKN_MM_U32_NUM_IN_FMT:
3417*4882a593Smuzhiyun 		if (!fmt)
3418*4882a593Smuzhiyun 			return -EINVAL;
3419*4882a593Smuzhiyun 
3420*4882a593Smuzhiyun 		res->nr_input_pins = tkn_elem->value;
3421*4882a593Smuzhiyun 		break;
3422*4882a593Smuzhiyun 
3423*4882a593Smuzhiyun 	case SKL_TKN_MM_U32_NUM_OUT_FMT:
3424*4882a593Smuzhiyun 		if (!fmt)
3425*4882a593Smuzhiyun 			return -EINVAL;
3426*4882a593Smuzhiyun 
3427*4882a593Smuzhiyun 		res->nr_output_pins = tkn_elem->value;
3428*4882a593Smuzhiyun 		break;
3429*4882a593Smuzhiyun 
3430*4882a593Smuzhiyun 	case SKL_TKN_U32_FMT_CH:
3431*4882a593Smuzhiyun 	case SKL_TKN_U32_FMT_FREQ:
3432*4882a593Smuzhiyun 	case SKL_TKN_U32_FMT_BIT_DEPTH:
3433*4882a593Smuzhiyun 	case SKL_TKN_U32_FMT_SAMPLE_SIZE:
3434*4882a593Smuzhiyun 	case SKL_TKN_U32_FMT_CH_CONFIG:
3435*4882a593Smuzhiyun 	case SKL_TKN_U32_FMT_INTERLEAVE:
3436*4882a593Smuzhiyun 	case SKL_TKN_U32_FMT_SAMPLE_TYPE:
3437*4882a593Smuzhiyun 	case SKL_TKN_U32_FMT_CH_MAP:
3438*4882a593Smuzhiyun 	case SKL_TKN_MM_U32_INTF_PIN_ID:
3439*4882a593Smuzhiyun 		ret = skl_tplg_manifest_fill_fmt(dev, fmt, tkn_elem,
3440*4882a593Smuzhiyun 						 dir, pin_idx);
3441*4882a593Smuzhiyun 		if (ret < 0)
3442*4882a593Smuzhiyun 			return ret;
3443*4882a593Smuzhiyun 		break;
3444*4882a593Smuzhiyun 
3445*4882a593Smuzhiyun 	default:
3446*4882a593Smuzhiyun 		dev_err(dev, "Not a manifest token %d\n", tkn_elem->token);
3447*4882a593Smuzhiyun 		return -EINVAL;
3448*4882a593Smuzhiyun 	}
3449*4882a593Smuzhiyun 	tkn_count++;
3450*4882a593Smuzhiyun 
3451*4882a593Smuzhiyun 	return tkn_count;
3452*4882a593Smuzhiyun }
3453*4882a593Smuzhiyun 
3454*4882a593Smuzhiyun /*
3455*4882a593Smuzhiyun  * Fill the manifest structure by parsing the tokens based on the
3456*4882a593Smuzhiyun  * type.
3457*4882a593Smuzhiyun  */
skl_tplg_get_manifest_tkn(struct device * dev,char * pvt_data,struct skl_dev * skl,int block_size)3458*4882a593Smuzhiyun static int skl_tplg_get_manifest_tkn(struct device *dev,
3459*4882a593Smuzhiyun 		char *pvt_data, struct skl_dev *skl,
3460*4882a593Smuzhiyun 		int block_size)
3461*4882a593Smuzhiyun {
3462*4882a593Smuzhiyun 	int tkn_count = 0, ret;
3463*4882a593Smuzhiyun 	int off = 0, tuple_size = 0;
3464*4882a593Smuzhiyun 	u8 uuid_index = 0;
3465*4882a593Smuzhiyun 	struct snd_soc_tplg_vendor_array *array;
3466*4882a593Smuzhiyun 	struct snd_soc_tplg_vendor_value_elem *tkn_elem;
3467*4882a593Smuzhiyun 
3468*4882a593Smuzhiyun 	if (block_size <= 0)
3469*4882a593Smuzhiyun 		return -EINVAL;
3470*4882a593Smuzhiyun 
3471*4882a593Smuzhiyun 	while (tuple_size < block_size) {
3472*4882a593Smuzhiyun 		array = (struct snd_soc_tplg_vendor_array *)(pvt_data + off);
3473*4882a593Smuzhiyun 		off += array->size;
3474*4882a593Smuzhiyun 		switch (array->type) {
3475*4882a593Smuzhiyun 		case SND_SOC_TPLG_TUPLE_TYPE_STRING:
3476*4882a593Smuzhiyun 			ret = skl_tplg_get_str_tkn(dev, array, skl);
3477*4882a593Smuzhiyun 
3478*4882a593Smuzhiyun 			if (ret < 0)
3479*4882a593Smuzhiyun 				return ret;
3480*4882a593Smuzhiyun 			tkn_count = ret;
3481*4882a593Smuzhiyun 
3482*4882a593Smuzhiyun 			tuple_size += tkn_count *
3483*4882a593Smuzhiyun 				sizeof(struct snd_soc_tplg_vendor_string_elem);
3484*4882a593Smuzhiyun 			continue;
3485*4882a593Smuzhiyun 
3486*4882a593Smuzhiyun 		case SND_SOC_TPLG_TUPLE_TYPE_UUID:
3487*4882a593Smuzhiyun 			if (array->uuid->token != SKL_TKN_UUID) {
3488*4882a593Smuzhiyun 				dev_err(dev, "Not an UUID token: %d\n",
3489*4882a593Smuzhiyun 					array->uuid->token);
3490*4882a593Smuzhiyun 				return -EINVAL;
3491*4882a593Smuzhiyun 			}
3492*4882a593Smuzhiyun 			if (uuid_index >= skl->nr_modules) {
3493*4882a593Smuzhiyun 				dev_err(dev, "Too many UUID tokens\n");
3494*4882a593Smuzhiyun 				return -EINVAL;
3495*4882a593Smuzhiyun 			}
3496*4882a593Smuzhiyun 			import_guid(&skl->modules[uuid_index++]->uuid,
3497*4882a593Smuzhiyun 				    array->uuid->uuid);
3498*4882a593Smuzhiyun 
3499*4882a593Smuzhiyun 			tuple_size += sizeof(*array->uuid);
3500*4882a593Smuzhiyun 			continue;
3501*4882a593Smuzhiyun 
3502*4882a593Smuzhiyun 		default:
3503*4882a593Smuzhiyun 			tkn_elem = array->value;
3504*4882a593Smuzhiyun 			tkn_count = 0;
3505*4882a593Smuzhiyun 			break;
3506*4882a593Smuzhiyun 		}
3507*4882a593Smuzhiyun 
3508*4882a593Smuzhiyun 		while (tkn_count <= array->num_elems - 1) {
3509*4882a593Smuzhiyun 			ret = skl_tplg_get_int_tkn(dev,
3510*4882a593Smuzhiyun 					tkn_elem, skl);
3511*4882a593Smuzhiyun 			if (ret < 0)
3512*4882a593Smuzhiyun 				return ret;
3513*4882a593Smuzhiyun 
3514*4882a593Smuzhiyun 			tkn_count = tkn_count + ret;
3515*4882a593Smuzhiyun 			tkn_elem++;
3516*4882a593Smuzhiyun 		}
3517*4882a593Smuzhiyun 		tuple_size += (tkn_count * sizeof(*tkn_elem));
3518*4882a593Smuzhiyun 		tkn_count = 0;
3519*4882a593Smuzhiyun 	}
3520*4882a593Smuzhiyun 
3521*4882a593Smuzhiyun 	return off;
3522*4882a593Smuzhiyun }
3523*4882a593Smuzhiyun 
3524*4882a593Smuzhiyun /*
3525*4882a593Smuzhiyun  * Parse manifest private data for tokens. The private data block is
3526*4882a593Smuzhiyun  * preceded by descriptors for type and size of data block.
3527*4882a593Smuzhiyun  */
skl_tplg_get_manifest_data(struct snd_soc_tplg_manifest * manifest,struct device * dev,struct skl_dev * skl)3528*4882a593Smuzhiyun static int skl_tplg_get_manifest_data(struct snd_soc_tplg_manifest *manifest,
3529*4882a593Smuzhiyun 			struct device *dev, struct skl_dev *skl)
3530*4882a593Smuzhiyun {
3531*4882a593Smuzhiyun 	struct snd_soc_tplg_vendor_array *array;
3532*4882a593Smuzhiyun 	int num_blocks, block_size = 0, block_type, off = 0;
3533*4882a593Smuzhiyun 	char *data;
3534*4882a593Smuzhiyun 	int ret;
3535*4882a593Smuzhiyun 
3536*4882a593Smuzhiyun 	/* Read the NUM_DATA_BLOCKS descriptor */
3537*4882a593Smuzhiyun 	array = (struct snd_soc_tplg_vendor_array *)manifest->priv.data;
3538*4882a593Smuzhiyun 	ret = skl_tplg_get_desc_blocks(dev, array);
3539*4882a593Smuzhiyun 	if (ret < 0)
3540*4882a593Smuzhiyun 		return ret;
3541*4882a593Smuzhiyun 	num_blocks = ret;
3542*4882a593Smuzhiyun 
3543*4882a593Smuzhiyun 	off += array->size;
3544*4882a593Smuzhiyun 	/* Read the BLOCK_TYPE and BLOCK_SIZE descriptor */
3545*4882a593Smuzhiyun 	while (num_blocks > 0) {
3546*4882a593Smuzhiyun 		array = (struct snd_soc_tplg_vendor_array *)
3547*4882a593Smuzhiyun 				(manifest->priv.data + off);
3548*4882a593Smuzhiyun 		ret = skl_tplg_get_desc_blocks(dev, array);
3549*4882a593Smuzhiyun 
3550*4882a593Smuzhiyun 		if (ret < 0)
3551*4882a593Smuzhiyun 			return ret;
3552*4882a593Smuzhiyun 		block_type = ret;
3553*4882a593Smuzhiyun 		off += array->size;
3554*4882a593Smuzhiyun 
3555*4882a593Smuzhiyun 		array = (struct snd_soc_tplg_vendor_array *)
3556*4882a593Smuzhiyun 			(manifest->priv.data + off);
3557*4882a593Smuzhiyun 
3558*4882a593Smuzhiyun 		ret = skl_tplg_get_desc_blocks(dev, array);
3559*4882a593Smuzhiyun 
3560*4882a593Smuzhiyun 		if (ret < 0)
3561*4882a593Smuzhiyun 			return ret;
3562*4882a593Smuzhiyun 		block_size = ret;
3563*4882a593Smuzhiyun 		off += array->size;
3564*4882a593Smuzhiyun 
3565*4882a593Smuzhiyun 		array = (struct snd_soc_tplg_vendor_array *)
3566*4882a593Smuzhiyun 			(manifest->priv.data + off);
3567*4882a593Smuzhiyun 
3568*4882a593Smuzhiyun 		data = (manifest->priv.data + off);
3569*4882a593Smuzhiyun 
3570*4882a593Smuzhiyun 		if (block_type == SKL_TYPE_TUPLE) {
3571*4882a593Smuzhiyun 			ret = skl_tplg_get_manifest_tkn(dev, data, skl,
3572*4882a593Smuzhiyun 					block_size);
3573*4882a593Smuzhiyun 
3574*4882a593Smuzhiyun 			if (ret < 0)
3575*4882a593Smuzhiyun 				return ret;
3576*4882a593Smuzhiyun 
3577*4882a593Smuzhiyun 			--num_blocks;
3578*4882a593Smuzhiyun 		} else {
3579*4882a593Smuzhiyun 			return -EINVAL;
3580*4882a593Smuzhiyun 		}
3581*4882a593Smuzhiyun 		off += ret;
3582*4882a593Smuzhiyun 	}
3583*4882a593Smuzhiyun 
3584*4882a593Smuzhiyun 	return 0;
3585*4882a593Smuzhiyun }
3586*4882a593Smuzhiyun 
skl_manifest_load(struct snd_soc_component * cmpnt,int index,struct snd_soc_tplg_manifest * manifest)3587*4882a593Smuzhiyun static int skl_manifest_load(struct snd_soc_component *cmpnt, int index,
3588*4882a593Smuzhiyun 				struct snd_soc_tplg_manifest *manifest)
3589*4882a593Smuzhiyun {
3590*4882a593Smuzhiyun 	struct hdac_bus *bus = snd_soc_component_get_drvdata(cmpnt);
3591*4882a593Smuzhiyun 	struct skl_dev *skl = bus_to_skl(bus);
3592*4882a593Smuzhiyun 
3593*4882a593Smuzhiyun 	/* proceed only if we have private data defined */
3594*4882a593Smuzhiyun 	if (manifest->priv.size == 0)
3595*4882a593Smuzhiyun 		return 0;
3596*4882a593Smuzhiyun 
3597*4882a593Smuzhiyun 	skl_tplg_get_manifest_data(manifest, bus->dev, skl);
3598*4882a593Smuzhiyun 
3599*4882a593Smuzhiyun 	if (skl->lib_count > SKL_MAX_LIB) {
3600*4882a593Smuzhiyun 		dev_err(bus->dev, "Exceeding max Library count. Got:%d\n",
3601*4882a593Smuzhiyun 					skl->lib_count);
3602*4882a593Smuzhiyun 		return  -EINVAL;
3603*4882a593Smuzhiyun 	}
3604*4882a593Smuzhiyun 
3605*4882a593Smuzhiyun 	return 0;
3606*4882a593Smuzhiyun }
3607*4882a593Smuzhiyun 
skl_tplg_complete(struct snd_soc_component * component)3608*4882a593Smuzhiyun static void skl_tplg_complete(struct snd_soc_component *component)
3609*4882a593Smuzhiyun {
3610*4882a593Smuzhiyun 	struct snd_soc_dobj *dobj;
3611*4882a593Smuzhiyun 	struct snd_soc_acpi_mach *mach =
3612*4882a593Smuzhiyun 		dev_get_platdata(component->card->dev);
3613*4882a593Smuzhiyun 	int i;
3614*4882a593Smuzhiyun 
3615*4882a593Smuzhiyun 	list_for_each_entry(dobj, &component->dobj_list, list) {
3616*4882a593Smuzhiyun 		struct snd_kcontrol *kcontrol = dobj->control.kcontrol;
3617*4882a593Smuzhiyun 		struct soc_enum *se;
3618*4882a593Smuzhiyun 		char **texts;
3619*4882a593Smuzhiyun 		char chan_text[4];
3620*4882a593Smuzhiyun 
3621*4882a593Smuzhiyun 		if (dobj->type != SND_SOC_DOBJ_ENUM || !kcontrol ||
3622*4882a593Smuzhiyun 		    kcontrol->put != skl_tplg_multi_config_set_dmic)
3623*4882a593Smuzhiyun 			continue;
3624*4882a593Smuzhiyun 
3625*4882a593Smuzhiyun 		se = (struct soc_enum *)kcontrol->private_value;
3626*4882a593Smuzhiyun 		texts = dobj->control.dtexts;
3627*4882a593Smuzhiyun 		sprintf(chan_text, "c%d", mach->mach_params.dmic_num);
3628*4882a593Smuzhiyun 
3629*4882a593Smuzhiyun 		for (i = 0; i < se->items; i++) {
3630*4882a593Smuzhiyun 			struct snd_ctl_elem_value val = {};
3631*4882a593Smuzhiyun 
3632*4882a593Smuzhiyun 			if (strstr(texts[i], chan_text)) {
3633*4882a593Smuzhiyun 				val.value.enumerated.item[0] = i;
3634*4882a593Smuzhiyun 				kcontrol->put(kcontrol, &val);
3635*4882a593Smuzhiyun 			}
3636*4882a593Smuzhiyun 		}
3637*4882a593Smuzhiyun 	}
3638*4882a593Smuzhiyun }
3639*4882a593Smuzhiyun 
3640*4882a593Smuzhiyun static struct snd_soc_tplg_ops skl_tplg_ops  = {
3641*4882a593Smuzhiyun 	.widget_load = skl_tplg_widget_load,
3642*4882a593Smuzhiyun 	.control_load = skl_tplg_control_load,
3643*4882a593Smuzhiyun 	.bytes_ext_ops = skl_tlv_ops,
3644*4882a593Smuzhiyun 	.bytes_ext_ops_count = ARRAY_SIZE(skl_tlv_ops),
3645*4882a593Smuzhiyun 	.io_ops = skl_tplg_kcontrol_ops,
3646*4882a593Smuzhiyun 	.io_ops_count = ARRAY_SIZE(skl_tplg_kcontrol_ops),
3647*4882a593Smuzhiyun 	.manifest = skl_manifest_load,
3648*4882a593Smuzhiyun 	.dai_load = skl_dai_load,
3649*4882a593Smuzhiyun 	.complete = skl_tplg_complete,
3650*4882a593Smuzhiyun };
3651*4882a593Smuzhiyun 
3652*4882a593Smuzhiyun /*
3653*4882a593Smuzhiyun  * A pipe can have multiple modules, each of them will be a DAPM widget as
3654*4882a593Smuzhiyun  * well. While managing a pipeline we need to get the list of all the
3655*4882a593Smuzhiyun  * widgets in a pipelines, so this helper - skl_tplg_create_pipe_widget_list()
3656*4882a593Smuzhiyun  * helps to get the SKL type widgets in that pipeline
3657*4882a593Smuzhiyun  */
skl_tplg_create_pipe_widget_list(struct snd_soc_component * component)3658*4882a593Smuzhiyun static int skl_tplg_create_pipe_widget_list(struct snd_soc_component *component)
3659*4882a593Smuzhiyun {
3660*4882a593Smuzhiyun 	struct snd_soc_dapm_widget *w;
3661*4882a593Smuzhiyun 	struct skl_module_cfg *mcfg = NULL;
3662*4882a593Smuzhiyun 	struct skl_pipe_module *p_module = NULL;
3663*4882a593Smuzhiyun 	struct skl_pipe *pipe;
3664*4882a593Smuzhiyun 
3665*4882a593Smuzhiyun 	list_for_each_entry(w, &component->card->widgets, list) {
3666*4882a593Smuzhiyun 		if (is_skl_dsp_widget_type(w, component->dev) && w->priv) {
3667*4882a593Smuzhiyun 			mcfg = w->priv;
3668*4882a593Smuzhiyun 			pipe = mcfg->pipe;
3669*4882a593Smuzhiyun 
3670*4882a593Smuzhiyun 			p_module = devm_kzalloc(component->dev,
3671*4882a593Smuzhiyun 						sizeof(*p_module), GFP_KERNEL);
3672*4882a593Smuzhiyun 			if (!p_module)
3673*4882a593Smuzhiyun 				return -ENOMEM;
3674*4882a593Smuzhiyun 
3675*4882a593Smuzhiyun 			p_module->w = w;
3676*4882a593Smuzhiyun 			list_add_tail(&p_module->node, &pipe->w_list);
3677*4882a593Smuzhiyun 		}
3678*4882a593Smuzhiyun 	}
3679*4882a593Smuzhiyun 
3680*4882a593Smuzhiyun 	return 0;
3681*4882a593Smuzhiyun }
3682*4882a593Smuzhiyun 
skl_tplg_set_pipe_type(struct skl_dev * skl,struct skl_pipe * pipe)3683*4882a593Smuzhiyun static void skl_tplg_set_pipe_type(struct skl_dev *skl, struct skl_pipe *pipe)
3684*4882a593Smuzhiyun {
3685*4882a593Smuzhiyun 	struct skl_pipe_module *w_module;
3686*4882a593Smuzhiyun 	struct snd_soc_dapm_widget *w;
3687*4882a593Smuzhiyun 	struct skl_module_cfg *mconfig;
3688*4882a593Smuzhiyun 	bool host_found = false, link_found = false;
3689*4882a593Smuzhiyun 
3690*4882a593Smuzhiyun 	list_for_each_entry(w_module, &pipe->w_list, node) {
3691*4882a593Smuzhiyun 		w = w_module->w;
3692*4882a593Smuzhiyun 		mconfig = w->priv;
3693*4882a593Smuzhiyun 
3694*4882a593Smuzhiyun 		if (mconfig->dev_type == SKL_DEVICE_HDAHOST)
3695*4882a593Smuzhiyun 			host_found = true;
3696*4882a593Smuzhiyun 		else if (mconfig->dev_type != SKL_DEVICE_NONE)
3697*4882a593Smuzhiyun 			link_found = true;
3698*4882a593Smuzhiyun 	}
3699*4882a593Smuzhiyun 
3700*4882a593Smuzhiyun 	if (host_found && link_found)
3701*4882a593Smuzhiyun 		pipe->passthru = true;
3702*4882a593Smuzhiyun 	else
3703*4882a593Smuzhiyun 		pipe->passthru = false;
3704*4882a593Smuzhiyun }
3705*4882a593Smuzhiyun 
3706*4882a593Smuzhiyun /*
3707*4882a593Smuzhiyun  * SKL topology init routine
3708*4882a593Smuzhiyun  */
skl_tplg_init(struct snd_soc_component * component,struct hdac_bus * bus)3709*4882a593Smuzhiyun int skl_tplg_init(struct snd_soc_component *component, struct hdac_bus *bus)
3710*4882a593Smuzhiyun {
3711*4882a593Smuzhiyun 	int ret;
3712*4882a593Smuzhiyun 	const struct firmware *fw;
3713*4882a593Smuzhiyun 	struct skl_dev *skl = bus_to_skl(bus);
3714*4882a593Smuzhiyun 	struct skl_pipeline *ppl;
3715*4882a593Smuzhiyun 
3716*4882a593Smuzhiyun 	ret = request_firmware(&fw, skl->tplg_name, bus->dev);
3717*4882a593Smuzhiyun 	if (ret < 0) {
3718*4882a593Smuzhiyun 		char alt_tplg_name[64];
3719*4882a593Smuzhiyun 
3720*4882a593Smuzhiyun 		snprintf(alt_tplg_name, sizeof(alt_tplg_name), "%s-tplg.bin",
3721*4882a593Smuzhiyun 			 skl->mach->drv_name);
3722*4882a593Smuzhiyun 		dev_info(bus->dev, "tplg fw %s load failed with %d, trying alternative tplg name %s",
3723*4882a593Smuzhiyun 			 skl->tplg_name, ret, alt_tplg_name);
3724*4882a593Smuzhiyun 
3725*4882a593Smuzhiyun 		ret = request_firmware(&fw, alt_tplg_name, bus->dev);
3726*4882a593Smuzhiyun 		if (!ret)
3727*4882a593Smuzhiyun 			goto component_load;
3728*4882a593Smuzhiyun 
3729*4882a593Smuzhiyun 		dev_info(bus->dev, "tplg %s failed with %d, falling back to dfw_sst.bin",
3730*4882a593Smuzhiyun 			 alt_tplg_name, ret);
3731*4882a593Smuzhiyun 
3732*4882a593Smuzhiyun 		ret = request_firmware(&fw, "dfw_sst.bin", bus->dev);
3733*4882a593Smuzhiyun 		if (ret < 0) {
3734*4882a593Smuzhiyun 			dev_err(bus->dev, "Fallback tplg fw %s load failed with %d\n",
3735*4882a593Smuzhiyun 					"dfw_sst.bin", ret);
3736*4882a593Smuzhiyun 			return ret;
3737*4882a593Smuzhiyun 		}
3738*4882a593Smuzhiyun 	}
3739*4882a593Smuzhiyun 
3740*4882a593Smuzhiyun component_load:
3741*4882a593Smuzhiyun 
3742*4882a593Smuzhiyun 	/*
3743*4882a593Smuzhiyun 	 * The complete tplg for SKL is loaded as index 0, we don't use
3744*4882a593Smuzhiyun 	 * any other index
3745*4882a593Smuzhiyun 	 */
3746*4882a593Smuzhiyun 	ret = snd_soc_tplg_component_load(component, &skl_tplg_ops, fw, 0);
3747*4882a593Smuzhiyun 	if (ret < 0) {
3748*4882a593Smuzhiyun 		dev_err(bus->dev, "tplg component load failed%d\n", ret);
3749*4882a593Smuzhiyun 		goto err;
3750*4882a593Smuzhiyun 	}
3751*4882a593Smuzhiyun 
3752*4882a593Smuzhiyun 	ret = skl_tplg_create_pipe_widget_list(component);
3753*4882a593Smuzhiyun 	if (ret < 0) {
3754*4882a593Smuzhiyun 		dev_err(bus->dev, "tplg create pipe widget list failed%d\n",
3755*4882a593Smuzhiyun 				ret);
3756*4882a593Smuzhiyun 		goto err;
3757*4882a593Smuzhiyun 	}
3758*4882a593Smuzhiyun 
3759*4882a593Smuzhiyun 	list_for_each_entry(ppl, &skl->ppl_list, node)
3760*4882a593Smuzhiyun 		skl_tplg_set_pipe_type(skl, ppl->pipe);
3761*4882a593Smuzhiyun 
3762*4882a593Smuzhiyun err:
3763*4882a593Smuzhiyun 	release_firmware(fw);
3764*4882a593Smuzhiyun 	return ret;
3765*4882a593Smuzhiyun }
3766*4882a593Smuzhiyun 
skl_tplg_exit(struct snd_soc_component * component,struct hdac_bus * bus)3767*4882a593Smuzhiyun void skl_tplg_exit(struct snd_soc_component *component, struct hdac_bus *bus)
3768*4882a593Smuzhiyun {
3769*4882a593Smuzhiyun 	struct skl_dev *skl = bus_to_skl(bus);
3770*4882a593Smuzhiyun 	struct skl_pipeline *ppl, *tmp;
3771*4882a593Smuzhiyun 
3772*4882a593Smuzhiyun 	list_for_each_entry_safe(ppl, tmp, &skl->ppl_list, node)
3773*4882a593Smuzhiyun 		list_del(&ppl->node);
3774*4882a593Smuzhiyun 
3775*4882a593Smuzhiyun 	/* clean up topology */
3776*4882a593Smuzhiyun 	snd_soc_tplg_component_remove(component, SND_SOC_TPLG_INDEX_ALL);
3777*4882a593Smuzhiyun }
3778