Home
last modified time | relevance | path

Searched refs:contexts (Results 1 – 25 of 35) sorted by relevance

12

/drivers/net/wireless/intel/iwlwifi/dvm/
Dmain.c613 priv->contexts[i].ctxid = i; in iwl_init_context()
615 priv->contexts[IWL_RXON_CTX_BSS].always_active = true; in iwl_init_context()
616 priv->contexts[IWL_RXON_CTX_BSS].is_active = true; in iwl_init_context()
617 priv->contexts[IWL_RXON_CTX_BSS].rxon_cmd = REPLY_RXON; in iwl_init_context()
618 priv->contexts[IWL_RXON_CTX_BSS].rxon_timing_cmd = REPLY_RXON_TIMING; in iwl_init_context()
619 priv->contexts[IWL_RXON_CTX_BSS].rxon_assoc_cmd = REPLY_RXON_ASSOC; in iwl_init_context()
620 priv->contexts[IWL_RXON_CTX_BSS].qos_cmd = REPLY_QOS_PARAM; in iwl_init_context()
621 priv->contexts[IWL_RXON_CTX_BSS].ap_sta_id = IWL_AP_ID; in iwl_init_context()
622 priv->contexts[IWL_RXON_CTX_BSS].wep_key_cmd = REPLY_WEPKEY; in iwl_init_context()
623 priv->contexts[IWL_RXON_CTX_BSS].bcast_sta_id = IWLAGN_BROADCAST_ID; in iwl_init_context()
[all …]
Drxon.c306 priv->contexts[IWL_RXON_CTX_BSS].vif && in iwl_send_rxon_timing()
307 priv->contexts[IWL_RXON_CTX_BSS].vif->bss_conf.beacon_int) { in iwl_send_rxon_timing()
309 priv->contexts[IWL_RXON_CTX_BSS].timing.beacon_interval; in iwl_send_rxon_timing()
313 priv->contexts[IWL_RXON_CTX_PAN].vif && in iwl_send_rxon_timing()
314 priv->contexts[IWL_RXON_CTX_PAN].vif->bss_conf.beacon_int && in iwl_send_rxon_timing()
318 priv->contexts[IWL_RXON_CTX_PAN].timing.beacon_interval; in iwl_send_rxon_timing()
392 struct iwl_rxon_context *ctx = &priv->contexts[IWL_RXON_CTX_BSS]; in iwl_set_tx_power()
528 ctx_bss = &priv->contexts[IWL_RXON_CTX_BSS]; in iwlagn_set_pan_params()
529 ctx_pan = &priv->contexts[IWL_RXON_CTX_PAN]; in iwlagn_set_pan_params()
896 struct iwl_rxon_context *ctx = &priv->contexts[ctxid]; in iwl_print_rx_config_cmd()
Ddev.h722 struct iwl_rxon_context contexts[NUM_IWL_RXON_CTX]; member
904 for (ctx = &priv->contexts[IWL_RXON_CTX_BSS]; \
905 ctx < &priv->contexts[NUM_IWL_RXON_CTX]; ctx++) \
916 return iwl_is_associated_ctx(&priv->contexts[ctxid]); in iwl_is_associated()
Dmac80211.c344 if (priv->contexts[IWL_RXON_CTX_BSS].vif != vif) in iwlagn_mac_set_rekey_data()
364 struct iwl_rxon_context *ctx = &priv->contexts[IWL_RXON_CTX_BSS]; in iwlagn_mac_suspend()
428 struct iwl_rxon_context *ctx = &priv->contexts[IWL_RXON_CTX_BSS]; in iwlagn_mac_resume()
527 if (resume_data.valid && priv->contexts[IWL_RXON_CTX_BSS].vif) { in iwlagn_mac_resume()
949 struct iwl_rxon_context *ctx = &priv->contexts[IWL_RXON_CTX_BSS]; in iwlagn_mac_channel_switch()
1023 struct iwl_rxon_context *ctx = &priv->contexts[IWL_RXON_CTX_BSS]; in iwl_chswitch_done()
Ddevices.c397 struct iwl_rxon_context *ctx = &priv->contexts[IWL_RXON_CTX_BSS]; in iwl5000_hw_channel_switch()
558 struct iwl_rxon_context *ctx = &priv->contexts[IWL_RXON_CTX_BSS]; in iwl6000_hw_channel_switch()
Dscan.c620 struct iwl_rxon_context *ctx = &priv->contexts[IWL_RXON_CTX_BSS]; in iwlagn_request_scan()
741 priv->contexts[IWL_RXON_CTX_BSS].active.flags & in iwlagn_request_scan()
Drx.c57 struct iwl_rxon_context *ctx = &priv->contexts[IWL_RXON_CTX_BSS]; in iwlagn_rx_csa()
570 if (priv->contexts[IWL_RXON_CTX_BSS].active.filter_flags & in iwlagn_set_decrypted_flag()
Dlib.c586 struct iwl_rxon_context *ctx = &priv->contexts[IWL_RXON_CTX_BSS]; in iwlagn_fill_txpower_mode()
1043 struct iwl_rxon_context *ctx = &priv->contexts[IWL_RXON_CTX_BSS]; in iwlagn_suspend()
/drivers/gpu/drm/tegra/
Duapi.c56 xa_for_each(&file->contexts, id, context) in tegra_drm_uapi_close_file()
62 xa_destroy(&file->contexts); in tegra_drm_uapi_close_file()
109 err = xa_alloc(&fpriv->contexts, &args->context, context, XA_LIMIT(1, U32_MAX), in tegra_drm_ioctl_channel_open()
141 context = xa_load(&fpriv->contexts, args->context); in tegra_drm_ioctl_channel_close()
147 xa_erase(&fpriv->contexts, args->context); in tegra_drm_ioctl_channel_close()
169 context = xa_load(&fpriv->contexts, args->context); in tegra_drm_ioctl_channel_map()
260 context = xa_load(&fpriv->contexts, args->context); in tegra_drm_ioctl_channel_unmap()
Duapi.h23 struct xarray contexts; member
/drivers/media/platform/omap3isp/
Dispcsi2.c220 struct isp_csi2_ctx_cfg *ctx = &csi2->contexts[0]; in csi2_set_outaddr()
250 struct isp_csi2_ctx_cfg *ctx = &csi2->contexts[ctxnum]; in csi2_ctx_enable()
561 if (csi2->contexts[0].enabled || csi2->ctrl.if_enable) in csi2_configure()
597 csi2->contexts[0].format_id = csi2_ctx_map_format(csi2); in csi2_configure()
600 csi2->contexts[0].data_offset = 0; in csi2_configure()
602 csi2->contexts[0].data_offset = csi2->video_out.bpl_value; in csi2_configure()
611 csi2->contexts[0].eof_enabled = 1; in csi2_configure()
612 csi2->contexts[0].eol_enabled = 1; in csi2_configure()
621 csi2_ctx_config(isp, csi2, &csi2->contexts[0]); in csi2_configure()
783 csi2_isr_ctx(csi2, &csi2->contexts[0]); in omap3isp_csi2_isr()
Dispcsi2.h137 struct isp_csi2_ctx_cfg contexts[ISP_CSI2_MAX_CTX_NUM + 1]; member
/drivers/staging/media/omap4iss/
Diss_csi2.c254 struct iss_csi2_ctx_cfg *ctx = &csi2->contexts[0]; in csi2_set_outaddr()
283 struct iss_csi2_ctx_cfg *ctx = &csi2->contexts[ctxnum]; in csi2_ctx_enable()
538 if (csi2->contexts[0].enabled || csi2->ctrl.if_enable) in csi2_configure()
568 csi2->contexts[0].format_id = csi2_ctx_map_format(csi2); in csi2_configure()
571 csi2->contexts[0].data_offset = 0; in csi2_configure()
573 csi2->contexts[0].data_offset = csi2->video_out.bpl_value; in csi2_configure()
582 csi2->contexts[0].eof_enabled = 1; in csi2_configure()
583 csi2->contexts[0].eol_enabled = 1; in csi2_configure()
592 csi2_ctx_config(csi2, &csi2->contexts[0]); in csi2_configure()
780 csi2_isr_ctx(csi2, &csi2->contexts[0]); in omap4iss_csi2_isr()
Diss_csi2.h139 struct iss_csi2_ctx_cfg contexts[ISS_CSI2_MAX_CTX_NUM + 1]; member
/drivers/gpu/drm/i915/
Di915_sysfs.c181 spin_lock(&i915->gem.contexts.lock); in i915_l3_read()
186 spin_unlock(&i915->gem.contexts.lock); in i915_l3_read()
214 spin_lock(&i915->gem.contexts.lock); in i915_l3_write()
227 list_for_each_entry(ctx, &i915->gem.contexts.list, link) in i915_l3_write()
230 spin_unlock(&i915->gem.contexts.lock); in i915_l3_write()
DKconfig.profile96 the GPU, we allow the innocent contexts also on the system to quiesce.
/drivers/gpu/drm/i915/selftests/
Di915_vma.c108 struct list_head *contexts) in create_vmas() argument
116 list_for_each_entry(ctx, contexts, link) { in create_vmas()
157 LIST_HEAD(contexts); in igt_vma_create()
182 list_move(&ctx->link, &contexts); in igt_vma_create()
185 err = create_vmas(i915, &objects, &contexts); in igt_vma_create()
195 list_for_each_entry_safe(ctx, cn, &contexts, link) { in igt_vma_create()
205 err = create_vmas(i915, &objects, &contexts); in igt_vma_create()
207 list_for_each_entry_safe(ctx, cn, &contexts, link) { in igt_vma_create()
Di915_request.c273 struct i915_gem_context **contexts; member
355 t->contexts[order[n] % t->ncontexts]; in __igt_breadcrumbs_smoketest()
467 t.contexts = kcalloc(t.ncontexts, sizeof(*t.contexts), GFP_KERNEL); in mock_breadcrumbs_smoketest()
468 if (!t.contexts) { in mock_breadcrumbs_smoketest()
474 t.contexts[n] = mock_context(t.engine->i915, "mock"); in mock_breadcrumbs_smoketest()
475 if (!t.contexts[n]) { in mock_breadcrumbs_smoketest()
521 if (!t.contexts[n]) in mock_breadcrumbs_smoketest()
523 mock_context_close(t.contexts[n]); in mock_breadcrumbs_smoketest()
525 kfree(t.contexts); in mock_breadcrumbs_smoketest()
1626 smoke[0].contexts = kcalloc(smoke[0].ncontexts, in live_breadcrumbs_smoketest()
[all …]
/drivers/net/ethernet/marvell/octeontx2/
DKconfig29 NPA Aura/Pool contexts.
/drivers/gpu/drm/i915/gem/selftests/
Dmock_context.c73 init_contexts(&i915->gem.contexts); in mock_init_contexts()
/drivers/gpu/drm/i915/gem/
Di915_gem_context.c1001 spin_lock(&ctx->i915->gem.contexts.lock); in i915_gem_context_release()
1003 spin_unlock(&ctx->i915->gem.contexts.lock); in i915_gem_context_release()
1436 init_contexts(&i915->gem.contexts); in i915_gem_init__contexts()
1456 spin_lock(&i915->gem.contexts.lock); in gem_context_register()
1457 list_add_tail(&ctx->link, &i915->gem.contexts.list); in gem_context_register()
1458 spin_unlock(&i915->gem.contexts.lock); in gem_context_register()
/drivers/misc/ocxl/
Dsysfs.c92 __ATTR_RO(contexts),
/drivers/gpu/drm/
Ddrm_ioc32.c550 u32 contexts; member
565 res.contexts = compat_ptr(res32.contexts); in compat_drm_resctx()
Ddrm_context.c346 if (copy_to_user(&res->contexts[i], &ctx, sizeof(ctx))) in drm_legacy_resctx()
/drivers/gpu/drm/i915/gt/
Dselftest_execlists.c3476 struct i915_gem_context **contexts; member
3487 return smoke->contexts[i915_prandom_u32_max_state(smoke->ncontext, in smoke_context()
3657 smoke.contexts = kmalloc_array(smoke.ncontext, in live_preempt_smoke()
3658 sizeof(*smoke.contexts), in live_preempt_smoke()
3660 if (!smoke.contexts) in live_preempt_smoke()
3687 smoke.contexts[n] = kernel_context(smoke.gt->i915, NULL); in live_preempt_smoke()
3688 if (!smoke.contexts[n]) in live_preempt_smoke()
3707 if (!smoke.contexts[n]) in live_preempt_smoke()
3709 kernel_context_close(smoke.contexts[n]); in live_preempt_smoke()
3715 kfree(smoke.contexts); in live_preempt_smoke()

12