• Home
  • Raw
  • Download

Lines Matching refs:skl

55 void skl_tplg_d0i3_get(struct skl *skl, enum d0i3_capability caps)  in skl_tplg_d0i3_get()  argument
57 struct skl_d0i3_data *d0i3 = &skl->skl_sst->d0i3; in skl_tplg_d0i3_get()
74 void skl_tplg_d0i3_put(struct skl *skl, enum d0i3_capability caps) in skl_tplg_d0i3_put() argument
76 struct skl_d0i3_data *d0i3 = &skl->skl_sst->d0i3; in skl_tplg_d0i3_put()
116 static bool skl_is_pipe_mem_avail(struct skl *skl, in skl_is_pipe_mem_avail() argument
119 struct skl_sst *ctx = skl->skl_sst; in skl_is_pipe_mem_avail()
121 if (skl->resource.mem + mconfig->pipe->memory_pages > in skl_is_pipe_mem_avail()
122 skl->resource.max_mem) { in skl_is_pipe_mem_avail()
129 skl->resource.max_mem, skl->resource.mem); in skl_is_pipe_mem_avail()
141 static void skl_tplg_alloc_pipe_mem(struct skl *skl, in skl_tplg_alloc_pipe_mem() argument
144 skl->resource.mem += mconfig->pipe->memory_pages; in skl_tplg_alloc_pipe_mem()
155 static bool skl_is_pipe_mcps_avail(struct skl *skl, in skl_is_pipe_mcps_avail() argument
158 struct skl_sst *ctx = skl->skl_sst; in skl_is_pipe_mcps_avail()
162 if (skl->resource.mcps + res->cps > skl->resource.max_mcps) { in skl_is_pipe_mcps_avail()
168 skl->resource.max_mcps, skl->resource.mcps); in skl_is_pipe_mcps_avail()
175 static void skl_tplg_alloc_pipe_mcps(struct skl *skl, in skl_tplg_alloc_pipe_mcps() argument
181 skl->resource.mcps += res->cps; in skl_tplg_alloc_pipe_mcps()
188 skl_tplg_free_pipe_mcps(struct skl *skl, struct skl_module_cfg *mconfig) in skl_tplg_free_pipe_mcps() argument
194 skl->resource.mcps -= res->cps; in skl_tplg_free_pipe_mcps()
201 skl_tplg_free_pipe_mem(struct skl *skl, struct skl_module_cfg *mconfig) in skl_tplg_free_pipe_mem() argument
203 skl->resource.mem -= mconfig->pipe->memory_pages; in skl_tplg_free_pipe_mem()
384 struct skl *skl = get_skl_ctx(ctx->dev); in skl_tplg_update_be_blob() local
423 cfg = skl_get_ep_blob(skl, m_cfg->vbus_id, link_type, in skl_tplg_update_be_blob()
566 skl_tplg_init_pipe_modules(struct skl *skl, struct skl_pipe *pipe) in skl_tplg_init_pipe_modules() argument
571 struct skl_sst *ctx = skl->skl_sst; in skl_tplg_init_pipe_modules()
582 dev_err(skl->skl_sst->dev, in skl_tplg_init_pipe_modules()
593 if (!skl_is_pipe_mcps_avail(skl, mconfig)) in skl_tplg_init_pipe_modules()
637 skl_tplg_alloc_pipe_mcps(skl, mconfig); in skl_tplg_init_pipe_modules()
690 skl_tplg_get_pipe_config(struct skl *skl, struct skl_module_cfg *mconfig) in skl_tplg_get_pipe_config() argument
692 struct skl_sst *ctx = skl->skl_sst; in skl_tplg_get_pipe_config()
750 struct skl *skl) in skl_tplg_mixer_dapm_pre_pmu_event() argument
757 struct skl_sst *ctx = skl->skl_sst; in skl_tplg_mixer_dapm_pre_pmu_event()
760 ret = skl_tplg_get_pipe_config(skl, mconfig); in skl_tplg_mixer_dapm_pre_pmu_event()
765 if (!skl_is_pipe_mcps_avail(skl, mconfig)) in skl_tplg_mixer_dapm_pre_pmu_event()
768 if (!skl_is_pipe_mem_avail(skl, mconfig)) in skl_tplg_mixer_dapm_pre_pmu_event()
779 skl_tplg_alloc_pipe_mem(skl, mconfig); in skl_tplg_mixer_dapm_pre_pmu_event()
780 skl_tplg_alloc_pipe_mcps(skl, mconfig); in skl_tplg_mixer_dapm_pre_pmu_event()
783 ret = skl_tplg_init_pipe_modules(skl, s_pipe); in skl_tplg_mixer_dapm_pre_pmu_event()
808 if (list_empty(&skl->bind_list)) in skl_tplg_mixer_dapm_pre_pmu_event()
811 list_for_each_entry(modules, &skl->bind_list, node) { in skl_tplg_mixer_dapm_pre_pmu_event()
916 static int skl_tplg_module_add_deferred_bind(struct skl *skl, in skl_tplg_module_add_deferred_bind() argument
932 if (!list_empty(&skl->bind_list)) { in skl_tplg_module_add_deferred_bind()
933 list_for_each_entry(modules, &skl->bind_list, node) { in skl_tplg_module_add_deferred_bind()
946 list_add(&m_list->node, &skl->bind_list); in skl_tplg_module_add_deferred_bind()
954 struct skl *skl, in skl_tplg_bind_sinks() argument
961 struct skl_sst *ctx = skl->skl_sst; in skl_tplg_bind_sinks()
974 return skl_tplg_bind_sinks(p->sink, skl, src_w, src_mconfig); in skl_tplg_bind_sinks()
1000 ret = skl_tplg_module_add_deferred_bind(skl, in skl_tplg_bind_sinks()
1035 return skl_tplg_bind_sinks(next_sink, skl, src_w, src_mconfig); in skl_tplg_bind_sinks()
1051 struct skl *skl) in skl_tplg_pga_dapm_pre_pmu_event() argument
1054 struct skl_sst *ctx = skl->skl_sst; in skl_tplg_pga_dapm_pre_pmu_event()
1064 ret = skl_tplg_bind_sinks(w, skl, w, src_mconfig); in skl_tplg_pga_dapm_pre_pmu_event()
1076 struct snd_soc_dapm_widget *w, struct skl *skl) in skl_get_src_dsp_widget() argument
1080 struct skl_sst *ctx = skl->skl_sst; in skl_get_src_dsp_widget()
1102 return skl_get_src_dsp_widget(src_w, skl); in skl_get_src_dsp_widget()
1117 struct skl *skl) in skl_tplg_mixer_dapm_post_pmu_event() argument
1122 struct skl_sst *ctx = skl->skl_sst; in skl_tplg_mixer_dapm_post_pmu_event()
1133 source = skl_get_src_dsp_widget(w, skl); in skl_tplg_mixer_dapm_post_pmu_event()
1170 struct skl *skl) in skl_tplg_mixer_dapm_pre_pmd_event() argument
1174 struct skl_sst *ctx = skl->skl_sst; in skl_tplg_mixer_dapm_pre_pmd_event()
1206 struct skl *skl) in skl_tplg_mixer_dapm_post_pmd_event() argument
1211 struct skl_sst *ctx = skl->skl_sst; in skl_tplg_mixer_dapm_post_pmd_event()
1218 skl_tplg_free_pipe_mcps(skl, mconfig); in skl_tplg_mixer_dapm_post_pmd_event()
1219 skl_tplg_free_pipe_mem(skl, mconfig); in skl_tplg_mixer_dapm_post_pmd_event()
1222 if (list_empty(&skl->bind_list)) in skl_tplg_mixer_dapm_post_pmd_event()
1227 list_for_each_entry_safe(modules, tmp, &skl->bind_list, node) { in skl_tplg_mixer_dapm_post_pmd_event()
1254 skl_tplg_free_pipe_mcps(skl, dst_module); in skl_tplg_mixer_dapm_post_pmd_event()
1281 struct skl *skl) in skl_tplg_pga_dapm_post_pmd_event() argument
1285 struct skl_sst *ctx = skl->skl_sst; in skl_tplg_pga_dapm_post_pmd_event()
1321 struct skl *skl = get_skl_ctx(dapm->dev); in skl_tplg_mixer_event() local
1325 return skl_tplg_mixer_dapm_pre_pmu_event(w, skl); in skl_tplg_mixer_event()
1328 return skl_tplg_mixer_dapm_post_pmu_event(w, skl); in skl_tplg_mixer_event()
1331 return skl_tplg_mixer_dapm_pre_pmd_event(w, skl); in skl_tplg_mixer_event()
1334 return skl_tplg_mixer_dapm_post_pmd_event(w, skl); in skl_tplg_mixer_event()
1351 struct skl *skl = get_skl_ctx(dapm->dev); in skl_tplg_pga_event() local
1355 return skl_tplg_pga_dapm_pre_pmu_event(w, skl); in skl_tplg_pga_event()
1358 return skl_tplg_pga_dapm_post_pmd_event(w, skl); in skl_tplg_pga_event()
1372 struct skl *skl = get_skl_ctx(w->dapm->dev); in skl_tplg_tlv_control_get() local
1375 skl_get_module_params(skl->skl_sst, (u32 *)bc->params, in skl_tplg_tlv_control_get()
1407 struct skl *skl = get_skl_ctx(w->dapm->dev); in skl_tplg_tlv_control_set() local
1428 return skl_set_module_params(skl->skl_sst, in skl_tplg_tlv_control_set()
1589 struct skl *skl = get_skl_ctx(dev); in skl_tplg_update_pipe_params() local
1597 if (skl->nr_modules) in skl_tplg_update_pipe_params()
1786 struct skl *skl = get_skl_ctx(dai->dev); in skl_tplg_be_fill_pipe_params() local
1796 cfg = skl_get_ep_blob(skl, mconfig->vbus_id, link_type, in skl_tplg_be_fill_pipe_params()
2000 struct skl_module_cfg *mconfig, struct skl *skl, in skl_tplg_add_pipe() argument
2007 list_for_each_entry(ppl, &skl->ppl_list, node) { in skl_tplg_add_pipe()
2031 list_add(&ppl->node, &skl->ppl_list); in skl_tplg_add_pipe()
2302 struct skl *skl, struct skl_module_cfg *mconfig) in skl_tplg_get_token() argument
2318 if (skl->nr_modules == 0) { in skl_tplg_get_token()
2410 mconfig, skl, tkn_elem); in skl_tplg_get_token()
2547 char *pvt_data, struct skl *skl, in skl_tplg_get_tokens() argument
2585 skl, mconfig); in skl_tplg_get_tokens()
2632 struct skl *skl, struct device *dev, in skl_tplg_get_pvt_data() argument
2677 skl, mconfig, block_size); in skl_tplg_get_pvt_data()
2720 void skl_cleanup_resources(struct skl *skl) in skl_cleanup_resources() argument
2722 struct skl_sst *ctx = skl->skl_sst; in skl_cleanup_resources()
2723 struct snd_soc_platform *soc_platform = skl->platform; in skl_cleanup_resources()
2734 skl->resource.mem = 0; in skl_cleanup_resources()
2735 skl->resource.mcps = 0; in skl_cleanup_resources()
2758 struct skl *skl = ebus_to_skl(ebus); in skl_tplg_widget_load() local
2770 if (skl->nr_modules == 0) { in skl_tplg_widget_load()
2786 ret = skl_tplg_get_pvt_data(tplg_w, skl, bus->dev, mconfig); in skl_tplg_widget_load()
2790 skl_debug_init_module(skl->debugfs, w, mconfig); in skl_tplg_widget_load()
2903 struct skl *skl) in skl_tplg_fill_str_mfest_tkn() argument
2910 if (ref_count > skl->skl_sst->lib_count - 1) { in skl_tplg_fill_str_mfest_tkn()
2915 strncpy(skl->skl_sst->lib_info[ref_count].name, in skl_tplg_fill_str_mfest_tkn()
2917 ARRAY_SIZE(skl->skl_sst->lib_info[ref_count].name)); in skl_tplg_fill_str_mfest_tkn()
2932 struct skl *skl) in skl_tplg_get_str_tkn() argument
2939 ret = skl_tplg_fill_str_mfest_tkn(dev, str_elem, skl); in skl_tplg_get_str_tkn()
3039 struct skl *skl) in skl_tplg_get_int_tkn() argument
3048 if (skl->modules) { in skl_tplg_get_int_tkn()
3049 mod = skl->modules[mod_idx]; in skl_tplg_get_int_tkn()
3056 skl->skl_sst->lib_count = tkn_elem->value; in skl_tplg_get_int_tkn()
3060 skl->nr_modules = tkn_elem->value; in skl_tplg_get_int_tkn()
3061 skl->modules = devm_kcalloc(dev, skl->nr_modules, in skl_tplg_get_int_tkn()
3062 sizeof(*skl->modules), GFP_KERNEL); in skl_tplg_get_int_tkn()
3063 if (!skl->modules) in skl_tplg_get_int_tkn()
3066 for (i = 0; i < skl->nr_modules; i++) { in skl_tplg_get_int_tkn()
3067 skl->modules[i] = devm_kzalloc(dev, in skl_tplg_get_int_tkn()
3069 if (!skl->modules[i]) in skl_tplg_get_int_tkn()
3163 struct skl *skl, in skl_tplg_get_manifest_uuid() argument
3170 mod = skl->modules[ref_count]; in skl_tplg_get_manifest_uuid()
3186 char *pvt_data, struct skl *skl, in skl_tplg_get_manifest_tkn() argument
3202 ret = skl_tplg_get_str_tkn(dev, array, skl); in skl_tplg_get_manifest_tkn()
3213 ret = skl_tplg_get_manifest_uuid(dev, skl, array->uuid); in skl_tplg_get_manifest_tkn()
3228 tkn_elem, skl); in skl_tplg_get_manifest_tkn()
3247 struct device *dev, struct skl *skl) in skl_tplg_get_manifest_data() argument
3289 ret = skl_tplg_get_manifest_tkn(dev, data, skl, in skl_tplg_get_manifest_data()
3310 struct skl *skl = ebus_to_skl(ebus); in skl_manifest_load() local
3316 skl_tplg_get_manifest_data(manifest, bus->dev, skl); in skl_manifest_load()
3318 if (skl->skl_sst->lib_count > SKL_MAX_LIB) { in skl_manifest_load()
3320 skl->skl_sst->lib_count); in skl_manifest_load()
3368 static void skl_tplg_set_pipe_type(struct skl *skl, struct skl_pipe *pipe) in skl_tplg_set_pipe_type() argument
3403 struct skl *skl = ebus_to_skl(ebus); in skl_tplg_init() local
3406 ret = request_firmware(&fw, skl->tplg_name, bus->dev); in skl_tplg_init()
3409 skl->tplg_name, ret); in skl_tplg_init()
3430 skl->resource.max_mcps = SKL_MAX_MCPS; in skl_tplg_init()
3431 skl->resource.max_mem = SKL_FW_MAX_MEM; in skl_tplg_init()
3433 skl->tplg = fw; in skl_tplg_init()
3438 list_for_each_entry(ppl, &skl->ppl_list, node) in skl_tplg_init()
3439 skl_tplg_set_pipe_type(skl, ppl->pipe); in skl_tplg_init()