Home
last modified time | relevance | path

Searched refs:context (Results 1 – 25 of 1248) sorted by relevance

12345678910>>...50

/drivers/misc/vmw_vmci/
Dvmci_context.c44 static void ctx_signal_notify(struct vmci_ctx *context) in ctx_signal_notify() argument
46 *context->notify = true; in ctx_signal_notify()
49 static void ctx_clear_notify(struct vmci_ctx *context) in ctx_clear_notify() argument
51 *context->notify = false; in ctx_clear_notify()
58 static void ctx_clear_notify_call(struct vmci_ctx *context) in ctx_clear_notify_call() argument
60 if (context->pending_datagrams == 0 && in ctx_clear_notify_call()
61 vmci_handle_arr_get_size(context->pending_doorbell_array) == 0) in ctx_clear_notify_call()
62 ctx_clear_notify(context); in ctx_clear_notify_call()
69 void vmci_ctx_check_signal_notify(struct vmci_ctx *context) in vmci_ctx_check_signal_notify() argument
71 spin_lock(&context->lock); in vmci_ctx_check_signal_notify()
[all …]
Dvmci_route.c42 if (VMCI_INVALID_ID == dst->context) in vmci_route()
46 if (VMCI_HYPERVISOR_CONTEXT_ID == dst->context) { in vmci_route()
64 if (VMCI_HOST_CONTEXT_ID == src->context) in vmci_route()
75 if (VMCI_INVALID_ID == src->context && in vmci_route()
77 src->context = vmci_get_context_id(); in vmci_route()
85 if (VMCI_HOST_CONTEXT_ID == dst->context) { in vmci_route()
94 if (src->context == VMCI_HYPERVISOR_CONTEXT_ID) { in vmci_route()
113 if (VMCI_INVALID_ID == src->context) in vmci_route()
114 src->context = vmci_get_context_id(); in vmci_route()
130 if (VMCI_INVALID_ID == src->context) { in vmci_route()
[all …]
Dvmci_host.c85 struct vmci_ctx *context; member
143 vmci_ctx_destroy(vmci_host_dev->context); in vmci_host_close()
144 vmci_host_dev->context = NULL; in vmci_host_close()
168 struct vmci_ctx *context; in vmci_host_poll() local
176 context = vmci_host_dev->context; in vmci_host_poll()
180 poll_wait(filp, &context->host_context.wait_queue, in vmci_host_poll()
183 spin_lock(&context->lock); in vmci_host_poll()
184 if (context->pending_datagrams > 0 || in vmci_host_poll()
186 context->pending_doorbell_array) > 0) { in vmci_host_poll()
189 spin_unlock(&context->lock); in vmci_host_poll()
[all …]
Dvmci_context.h129 void vmci_ctx_destroy(struct vmci_ctx *context);
131 bool vmci_ctx_supports_host_qp(struct vmci_ctx *context);
133 int vmci_ctx_dequeue_datagram(struct vmci_ctx *context,
137 void vmci_ctx_put(struct vmci_ctx *context);
147 int vmci_ctx_qp_create(struct vmci_ctx *context, struct vmci_handle handle);
148 int vmci_ctx_qp_destroy(struct vmci_ctx *context, struct vmci_handle handle);
149 bool vmci_ctx_qp_exists(struct vmci_ctx *context, struct vmci_handle handle);
151 void vmci_ctx_check_signal_notify(struct vmci_ctx *context);
152 void vmci_ctx_unset_notify(struct vmci_ctx *context);
167 static inline u32 vmci_ctx_get_id(struct vmci_ctx *context) in vmci_ctx_get_id() argument
[all …]
/drivers/gpu/drm/etnaviv/
Detnaviv_mmu.c16 static void etnaviv_context_unmap(struct etnaviv_iommu_context *context, in etnaviv_context_unmap() argument
29 unmapped_page = context->global->ops->unmap(context, iova, in etnaviv_context_unmap()
39 static int etnaviv_context_map(struct etnaviv_iommu_context *context, in etnaviv_context_map() argument
55 ret = context->global->ops->map(context, iova, paddr, pgsize, in etnaviv_context_map()
67 etnaviv_context_unmap(context, orig_iova, orig_size - size); in etnaviv_context_map()
72 static int etnaviv_iommu_map(struct etnaviv_iommu_context *context, u32 iova, in etnaviv_iommu_map() argument
79 if (!context || !sgt) in etnaviv_iommu_map()
88 ret = etnaviv_context_map(context, da, pa, bytes, prot); in etnaviv_iommu_map()
98 etnaviv_context_unmap(context, iova, da - iova); in etnaviv_iommu_map()
102 static void etnaviv_iommu_unmap(struct etnaviv_iommu_context *context, u32 iova, in etnaviv_iommu_unmap() argument
[all …]
Detnaviv_iommu.c28 to_v1_context(struct etnaviv_iommu_context *context) in to_v1_context() argument
30 return container_of(context, struct etnaviv_iommuv1_context, base); in to_v1_context()
33 static void etnaviv_iommuv1_free(struct etnaviv_iommu_context *context) in etnaviv_iommuv1_free() argument
35 struct etnaviv_iommuv1_context *v1_context = to_v1_context(context); in etnaviv_iommuv1_free()
37 drm_mm_takedown(&context->mm); in etnaviv_iommuv1_free()
39 dma_free_wc(context->global->dev, PT_SIZE, v1_context->pgtable_cpu, in etnaviv_iommuv1_free()
42 context->global->v1.shared_context = NULL; in etnaviv_iommuv1_free()
47 static int etnaviv_iommuv1_map(struct etnaviv_iommu_context *context, in etnaviv_iommuv1_map() argument
51 struct etnaviv_iommuv1_context *v1_context = to_v1_context(context); in etnaviv_iommuv1_map()
62 static size_t etnaviv_iommuv1_unmap(struct etnaviv_iommu_context *context, in etnaviv_iommuv1_unmap() argument
[all …]
Detnaviv_iommu_v2.c42 to_v2_context(struct etnaviv_iommu_context *context) in to_v2_context() argument
44 return container_of(context, struct etnaviv_iommuv2_context, base); in to_v2_context()
47 static void etnaviv_iommuv2_free(struct etnaviv_iommu_context *context) in etnaviv_iommuv2_free() argument
49 struct etnaviv_iommuv2_context *v2_context = to_v2_context(context); in etnaviv_iommuv2_free()
52 drm_mm_takedown(&context->mm); in etnaviv_iommuv2_free()
56 dma_free_wc(context->global->dev, SZ_4K, in etnaviv_iommuv2_free()
61 dma_free_wc(context->global->dev, SZ_4K, v2_context->mtlb_cpu, in etnaviv_iommuv2_free()
64 clear_bit(v2_context->id, context->global->v2.pta_alloc); in etnaviv_iommuv2_free()
92 static int etnaviv_iommuv2_map(struct etnaviv_iommu_context *context, in etnaviv_iommuv2_map() argument
96 struct etnaviv_iommuv2_context *v2_context = to_v2_context(context); in etnaviv_iommuv2_map()
[all …]
/drivers/net/ethernet/mellanox/mlx4/
Den_resources.c42 int user_prio, struct mlx4_qp_context *context) in mlx4_en_fill_qp_context() argument
47 memset(context, 0, sizeof(*context)); in mlx4_en_fill_qp_context()
48 context->flags = cpu_to_be32(7 << 16 | rss << MLX4_RSS_QPC_FLAG_OFFSET); in mlx4_en_fill_qp_context()
49 context->pd = cpu_to_be32(mdev->priv_pdn); in mlx4_en_fill_qp_context()
50 context->mtu_msgmax = 0xff; in mlx4_en_fill_qp_context()
52 context->rq_size_stride = ilog2(size) << 3 | (ilog2(stride) - 4); in mlx4_en_fill_qp_context()
54 context->sq_size_stride = ilog2(size) << 3 | (ilog2(stride) - 4); in mlx4_en_fill_qp_context()
56 context->params2 |= cpu_to_be32(MLX4_QP_BIT_FPP); in mlx4_en_fill_qp_context()
59 context->sq_size_stride = ilog2(TXBB_SIZE) - 4; in mlx4_en_fill_qp_context()
61 context->usr_page = cpu_to_be32(mlx4_to_hw_uar_index(mdev->dev, in mlx4_en_fill_qp_context()
[all …]
/drivers/usb/image/
Dmicrotek.c187 MTS_DEBUG("transfer = 0x%x context = 0x%x\n",(int)transfer,(int)context ); \
188 …MTS_DEBUG("status = 0x%x data-length = 0x%x sent = 0x%x\n",transfer->status,(int)context->data_len…
189 mts_debug_dump(context->instance);\
204 struct mts_transfer_context* context = (struct mts_transfer_context*)transfer->context; \
381 context->instance->usb_dev, in mts_int_submit_urb()
386 context in mts_int_submit_urb()
392 set_host_byte(context->srb, DID_ERROR); in mts_int_submit_urb()
403 if ( likely(context->final_callback != NULL) ) in mts_transfer_cleanup()
404 context->final_callback(context->srb); in mts_transfer_cleanup()
411 context->srb->result &= MTS_SCSI_ERR_MASK; in mts_transfer_done()
[all …]
/drivers/infiniband/hw/hns/
Dhns_roce_restrack.c13 struct hns_roce_v2_cq_context *context) in hns_roce_fill_cq() argument
16 roce_get_field(context->byte_4_pg_ceqn, in hns_roce_fill_cq()
22 roce_get_field(context->byte_4_pg_ceqn, in hns_roce_fill_cq()
28 roce_get_field(context->byte_8_cqn, in hns_roce_fill_cq()
34 roce_get_field(context->byte_16_hop_addr, in hns_roce_fill_cq()
41 roce_get_field(context->byte_28_cq_pi, in hns_roce_fill_cq()
48 roce_get_field(context->byte_32_cq_ci, in hns_roce_fill_cq()
55 roce_get_field(context->byte_56_cqe_period_maxcnt, in hns_roce_fill_cq()
62 roce_get_field(context->byte_56_cqe_period_maxcnt, in hns_roce_fill_cq()
68 roce_get_field(context->byte_52_cqe_cnt, in hns_roce_fill_cq()
[all …]
Dhns_roce_cmd.c96 struct hns_roce_cmd_context *context = in hns_roce_cmd_event() local
97 &hr_dev->cmd.context[token % hr_dev->cmd.max_cmds]; in hns_roce_cmd_event()
99 if (token != context->token) in hns_roce_cmd_event()
102 context->result = (status == HNS_ROCE_CMD_SUCCESS) ? 0 : (-EIO); in hns_roce_cmd_event()
103 context->out_param = out_param; in hns_roce_cmd_event()
104 complete(&context->done); in hns_roce_cmd_event()
114 struct hns_roce_cmd_context *context; in __hns_roce_cmd_mbox_wait() local
120 context = &cmd->context[cmd->free_head]; in __hns_roce_cmd_mbox_wait()
121 context->token += cmd->token_mask + 1; in __hns_roce_cmd_mbox_wait()
122 cmd->free_head = context->next; in __hns_roce_cmd_mbox_wait()
[all …]
Dhns_roce_hw_v1.c2473 struct hns_roce_qp_context *context, in hns_roce_v1_qp_modify() argument
2541 memcpy(mailbox->buf, context, sizeof(*context)); in hns_roce_v1_qp_modify()
2579 struct hns_roce_sqp_context *context; in hns_roce_v1_m_sqp() local
2587 context = kzalloc(sizeof(*context), GFP_KERNEL); in hns_roce_v1_m_sqp()
2588 if (!context) in hns_roce_v1_m_sqp()
2596 roce_set_field(context->qp1c_bytes_4, in hns_roce_v1_m_sqp()
2600 roce_set_field(context->qp1c_bytes_4, in hns_roce_v1_m_sqp()
2604 roce_set_field(context->qp1c_bytes_4, QP1C_BYTES_4_PD_M, in hns_roce_v1_m_sqp()
2607 context->sq_rq_bt_l = cpu_to_le32(dma_handle); in hns_roce_v1_m_sqp()
2608 roce_set_field(context->qp1c_bytes_12, in hns_roce_v1_m_sqp()
[all …]
/drivers/pci/hotplug/
Dacpiphp_glue.c50 static void hotplug_event(u32 type, struct acpiphp_context *context);
61 struct acpiphp_context *context; in acpiphp_init_context() local
63 context = kzalloc(sizeof(*context), GFP_KERNEL); in acpiphp_init_context()
64 if (!context) in acpiphp_init_context()
67 context->refcount = 1; in acpiphp_init_context()
68 context->hp.notify = acpiphp_hotplug_notify; in acpiphp_init_context()
69 context->hp.fixup = acpiphp_post_dock_fixup; in acpiphp_init_context()
70 acpi_set_hp_context(adev, &context->hp); in acpiphp_init_context()
71 return context; in acpiphp_init_context()
82 struct acpiphp_context *context; in acpiphp_get_context() local
[all …]
/drivers/gpu/drm/i915/selftests/
Di915_syncmap.c163 static int check_one(struct i915_syncmap **sync, u64 context, u32 seqno) in check_one() argument
167 err = i915_syncmap_set(sync, context, seqno); in check_one()
173 context, (*sync)->height, (*sync)->prefix); in check_one()
179 context); in check_one()
193 if (!i915_syncmap_is_later(sync, context, seqno)) { in check_one()
195 context, seqno); in check_one()
217 u64 context = i915_prandom_u64_state(&prng); in igt_syncmap_one() local
225 err = check_one(&sync, context, in igt_syncmap_one()
238 static int check_leaf(struct i915_syncmap **sync, u64 context, u32 seqno) in check_leaf() argument
242 err = i915_syncmap_set(sync, context, seqno); in check_leaf()
[all …]
/drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/
Ddce110_clk_mgr.c92 uint32_t dce110_get_min_vblank_time_us(const struct dc_state *context) in dce110_get_min_vblank_time_us() argument
97 for (j = 0; j < context->stream_count; j++) { in dce110_get_min_vblank_time_us()
98 struct dc_stream_state *stream = context->streams[j]; in dce110_get_min_vblank_time_us()
120 const struct dc_state *context, in dce110_fill_display_configs() argument
126 for (j = 0; j < context->stream_count; j++) { in dce110_fill_display_configs()
129 const struct dc_stream_state *stream = context->streams[j]; in dce110_fill_display_configs()
135 if (stream == context->res_ctx.pipe_ctx[k].stream) { in dce110_fill_display_configs()
136 pipe_ctx = &context->res_ctx.pipe_ctx[k]; in dce110_fill_display_configs()
174 struct dc_state *context) in dce11_pplib_apply_display_requirements() argument
176 struct dm_pp_display_configuration *pp_display_cfg = &context->pp_display_cfg; in dce11_pplib_apply_display_requirements()
[all …]
/drivers/gpu/drm/
Ddrm_lock.c47 static int drm_lock_take(struct drm_lock_data *lock_data, unsigned int context);
60 unsigned int context) in drm_lock_take() argument
71 new = context | _DRM_LOCK_HELD | in drm_lock_take()
79 if (_DRM_LOCKING_CONTEXT(old) == context) { in drm_lock_take()
81 if (context != DRM_KERNEL_CONTEXT) { in drm_lock_take()
83 context); in drm_lock_take()
89 if ((_DRM_LOCKING_CONTEXT(new)) == context && (new & _DRM_LOCK_HELD)) { in drm_lock_take()
109 unsigned int context) in drm_lock_transfer() argument
117 new = context | _DRM_LOCK_HELD; in drm_lock_transfer()
124 unsigned int context) in drm_legacy_lock_free() argument
[all …]
/drivers/base/regmap/
Dregmap-i2c.c15 static int regmap_smbus_byte_reg_read(void *context, unsigned int reg, in regmap_smbus_byte_reg_read() argument
18 struct device *dev = context; in regmap_smbus_byte_reg_read()
34 static int regmap_smbus_byte_reg_write(void *context, unsigned int reg, in regmap_smbus_byte_reg_write() argument
37 struct device *dev = context; in regmap_smbus_byte_reg_write()
51 static int regmap_smbus_word_reg_read(void *context, unsigned int reg, in regmap_smbus_word_reg_read() argument
54 struct device *dev = context; in regmap_smbus_word_reg_read()
70 static int regmap_smbus_word_reg_write(void *context, unsigned int reg, in regmap_smbus_word_reg_write() argument
73 struct device *dev = context; in regmap_smbus_word_reg_write()
87 static int regmap_smbus_word_read_swapped(void *context, unsigned int reg, in regmap_smbus_word_read_swapped() argument
90 struct device *dev = context; in regmap_smbus_word_read_swapped()
[all …]
Dregmap-spmi.c16 static int regmap_spmi_base_read(void *context, in regmap_spmi_base_read() argument
26 err = spmi_register_read(context, addr++, val++); in regmap_spmi_base_read()
31 static int regmap_spmi_base_gather_write(void *context, in regmap_spmi_base_gather_write() argument
46 err = spmi_register_zero_write(context, *data); in regmap_spmi_base_gather_write()
56 err = spmi_register_write(context, addr, *data); in regmap_spmi_base_gather_write()
69 static int regmap_spmi_base_write(void *context, const void *data, in regmap_spmi_base_write() argument
73 return regmap_spmi_base_gather_write(context, data, 1, data + 1, in regmap_spmi_base_write()
105 static int regmap_spmi_ext_read(void *context, in regmap_spmi_ext_read() argument
124 err = spmi_ext_register_read(context, addr, val, len); in regmap_spmi_ext_read()
136 err = spmi_ext_register_readl(context, addr, val, len); in regmap_spmi_ext_read()
[all …]
/drivers/gpu/drm/amd/display/dc/core/
Ddc.c770 struct dc_state *context) in disable_all_writeback_pipes_for_stream() argument
780 void apply_ctx_interdependent_lock(struct dc *dc, struct dc_state *context, struct dc_stream_state … in apply_ctx_interdependent_lock() argument
786 dc->hwss.interdependent_update_lock(dc, context, lock); in apply_ctx_interdependent_lock()
789 struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; in apply_ctx_interdependent_lock()
802 static void disable_dangling_plane(struct dc *dc, struct dc_state *context) in disable_dangling_plane() argument
818 for (j = 0; j < context->stream_count; j++) { in disable_dangling_plane()
819 if (old_stream == context->streams[j]) { in disable_dangling_plane()
850 struct dc_state *context) in disable_vbios_mode_if_required() argument
860 pipe = &context->res_ctx.pipe_ctx[i]; in disable_vbios_mode_if_required()
909 static void wait_for_no_pipes_pending(struct dc *dc, struct dc_state *context) in wait_for_no_pipes_pending() argument
[all …]
/drivers/media/usb/as102/
Das10x_cmd_cfg.c34 sizeof(pcmd->body.context.req)); in as10x_cmd_get_context()
37 pcmd->body.context.req.proc_id = cpu_to_le16(CONTROL_PROC_CONTEXT); in as10x_cmd_get_context()
38 pcmd->body.context.req.tag = cpu_to_le16(tag); in as10x_cmd_get_context()
39 pcmd->body.context.req.type = cpu_to_le16(GET_CONTEXT_DATA); in as10x_cmd_get_context()
45 sizeof(pcmd->body.context.req) in as10x_cmd_get_context()
48 sizeof(prsp->body.context.rsp) in as10x_cmd_get_context()
63 *pvalue = le32_to_cpu((__force __le32)prsp->body.context.rsp.reg_val.u.value32); in as10x_cmd_get_context()
90 sizeof(pcmd->body.context.req)); in as10x_cmd_set_context()
93 pcmd->body.context.req.proc_id = cpu_to_le16(CONTROL_PROC_CONTEXT); in as10x_cmd_set_context()
95 pcmd->body.context.req.reg_val.u.value32 = (__force u32)cpu_to_le32(value); in as10x_cmd_set_context()
[all …]
/drivers/gpu/drm/amd/display/dc/dce/
Ddce_clk_mgr.c184 static uint32_t get_max_pixel_clock_for_all_paths(struct dc_state *context) in get_max_pixel_clock_for_all_paths() argument
190 struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; in get_max_pixel_clock_for_all_paths()
215 struct dc_state *context) in dce_get_required_clocks_state() argument
220 int max_pix_clk = get_max_pixel_clock_for_all_paths(context); in dce_get_required_clocks_state()
227 if (context->bw_ctx.bw.dce.dispclk_khz > in dce_get_required_clocks_state()
237 < context->bw_ctx.bw.dce.dispclk_khz) in dce_get_required_clocks_state()
492 const struct dc_state *context, in dce110_fill_display_configs() argument
498 for (j = 0; j < context->stream_count; j++) { in dce110_fill_display_configs()
501 const struct dc_stream_state *stream = context->streams[j]; in dce110_fill_display_configs()
507 if (stream == context->res_ctx.pipe_ctx[k].stream) { in dce110_fill_display_configs()
[all …]
/drivers/staging/wfx/
Ddebug.c244 struct dbgfs_hif_msg *context = file->private_data; in wfx_send_hif_msg_write() local
245 struct wfx_dev *wdev = context->wdev; in wfx_send_hif_msg_write()
248 if (completion_done(&context->complete)) { in wfx_send_hif_msg_write()
258 memset(context->reply, 0xFF, sizeof(context->reply)); in wfx_send_hif_msg_write()
266 context->ret = wfx_cmd_send(wdev, request, context->reply, in wfx_send_hif_msg_write()
267 sizeof(context->reply), false); in wfx_send_hif_msg_write()
270 complete(&context->complete); in wfx_send_hif_msg_write()
277 struct dbgfs_hif_msg *context = file->private_data; in wfx_send_hif_msg_read() local
280 if (count > sizeof(context->reply)) in wfx_send_hif_msg_read()
282 ret = wait_for_completion_interruptible(&context->complete); in wfx_send_hif_msg_read()
[all …]
/drivers/infiniband/hw/hfi1/
Dchip.c1061 unsigned int context, u64 err_status);
1064 unsigned int context, u64 err_status);
1066 unsigned int context, u64 err_status);
1219 u64 (*rw_cntr)(const struct cntr_entry *, void *context, int vl,
1412 void *context, int vl, int mode, u64 data) in dev_access_u32_csr() argument
1414 struct hfi1_devdata *dd = context; in dev_access_u32_csr()
1429 void *context, int idx, int mode, u64 data) in access_sde_err_cnt() argument
1431 struct hfi1_devdata *dd = (struct hfi1_devdata *)context; in access_sde_err_cnt()
1439 void *context, int idx, int mode, u64 data) in access_sde_int_cnt() argument
1441 struct hfi1_devdata *dd = (struct hfi1_devdata *)context; in access_sde_int_cnt()
[all …]
/drivers/gpu/drm/amd/display/dc/dcn30/
Ddcn30_resource.c1449 struct dc *dc, struct dc_state *context, in dcn30_populate_dml_pipes_from_context() argument
1453 struct resource_context *res_ctx = &context->res_ctx; in dcn30_populate_dml_pipes_from_context()
1455 dcn20_populate_dml_pipes_from_context(dc, context, pipes); in dcn30_populate_dml_pipes_from_context()
1582 struct dc_state *context, in dcn30_set_mcif_arb_params() argument
1587 struct display_mode_lib *dml = &context->bw_ctx.dml; in dcn30_set_mcif_arb_params()
1595 if (!context->res_ctx.pipe_ctx[i].stream) in dcn30_set_mcif_arb_params()
1599 …struct dc_writeback_info *writeback_info = &context->res_ctx.pipe_ctx[i].stream->writeback_info[j]; in dcn30_set_mcif_arb_params()
1605 wb_arb_params = &context->bw_ctx.bw.dcn.bw_writeback.mcif_wb_arb[dwb_pipe]; in dcn30_set_mcif_arb_params()
1617 …wb_arb_params->time_per_pixel = (1000000 << 6) / context->res_ctx.pipe_ctx[i].stream->phy_pix_clk;… in dcn30_set_mcif_arb_params()
1903 struct dc_state *context, in dcn30_find_split_pipe() argument
[all …]
/drivers/gpu/drm/amd/display/dc/dcn20/
Ddcn20_resource.c1652 enum dc_status dcn20_build_mapped_resource(const struct dc *dc, struct dc_state *context, struct dc… in dcn20_build_mapped_resource() argument
1655 struct pipe_ctx *pipe_ctx = resource_get_head_pipe_for_stream(&context->res_ctx, stream); in dcn20_build_mapped_resource()
2007 struct dc *dc, struct dc_state *context, display_e2e_pipe_params_st *pipes) argument
2011 struct resource_context *res_ctx = &context->res_ctx;
2426 struct dc_state *context, argument
2438 if (!context->res_ctx.pipe_ctx[i].stream)
2442 if (context->res_ctx.pipe_ctx[i].stream->writeback_info[j].wb_enabled == false)
2446 wb_arb_params = &context->bw_ctx.bw.dcn.bw_writeback.mcif_wb_arb[dwb_pipe];
2448 …if (context->res_ctx.pipe_ctx[i].stream->writeback_info[j].dwb_params.out_format == dwb_scaler_mod…
2449 …if (context->res_ctx.pipe_ctx[i].stream->writeback_info[j].dwb_params.output_depth == DWB_OUTPUT_P…
[all …]

12345678910>>...50