/drivers/media/platform/mtk-vcodec/venc/ |
D | venc_h264_if.c | 157 static inline u32 h264_read_reg(struct venc_h264_inst *inst, u32 addr) in h264_read_reg() argument 159 return readl(inst->hw_base + addr); in h264_read_reg() 162 static unsigned int h264_get_profile(struct venc_h264_inst *inst, in h264_get_profile() argument 173 mtk_vcodec_err(inst, "unsupported CONSTRAINED_BASELINE"); in h264_get_profile() 176 mtk_vcodec_err(inst, "unsupported EXTENDED"); in h264_get_profile() 179 mtk_vcodec_debug(inst, "unsupported profile %d", profile); in h264_get_profile() 184 static unsigned int h264_get_level(struct venc_h264_inst *inst, in h264_get_level() argument 189 mtk_vcodec_err(inst, "unsupported 1B"); in h264_get_level() 218 mtk_vcodec_debug(inst, "unsupported level %d", level); in h264_get_level() 223 static void h264_enc_free_work_buf(struct venc_h264_inst *inst) in h264_enc_free_work_buf() argument [all …]
|
D | venc_vp8_if.c | 145 static inline u32 vp8_enc_read_reg(struct venc_vp8_inst *inst, u32 addr) in vp8_enc_read_reg() argument 147 return readl(inst->hw_base + addr); in vp8_enc_read_reg() 150 static void vp8_enc_free_work_buf(struct venc_vp8_inst *inst) in vp8_enc_free_work_buf() argument 154 mtk_vcodec_debug_enter(inst); in vp8_enc_free_work_buf() 158 if (inst->work_bufs[i].size == 0) in vp8_enc_free_work_buf() 160 mtk_vcodec_mem_free(inst->ctx, &inst->work_bufs[i]); in vp8_enc_free_work_buf() 163 mtk_vcodec_debug_leave(inst); in vp8_enc_free_work_buf() 166 static int vp8_enc_alloc_work_buf(struct venc_vp8_inst *inst) in vp8_enc_alloc_work_buf() argument 170 struct venc_vp8_vpu_buf *wb = inst->vsi->work_bufs; in vp8_enc_alloc_work_buf() 172 mtk_vcodec_debug_enter(inst); in vp8_enc_alloc_work_buf() [all …]
|
/drivers/media/platform/mtk-vcodec/vdec/ |
D | vdec_vp9_if.c | 210 static bool vp9_is_sf_ref_fb(struct vdec_vp9_inst *inst, struct vdec_fb *fb) in vp9_is_sf_ref_fb() argument 213 struct vdec_vp9_vsi *vsi = inst->vsi; in vp9_is_sf_ref_fb() 223 *inst, void *addr) in vp9_rm_from_fb_use_list() 228 list_for_each_entry(node, &inst->fb_use_list, list) { in vp9_rm_from_fb_use_list() 232 &inst->available_fb_node_list); in vp9_rm_from_fb_use_list() 239 static void vp9_add_to_fb_free_list(struct vdec_vp9_inst *inst, in vp9_add_to_fb_free_list() argument 245 node = list_first_entry_or_null(&inst->available_fb_node_list, in vp9_add_to_fb_free_list() 250 list_move_tail(&node->list, &inst->fb_free_list); in vp9_add_to_fb_free_list() 253 mtk_vcodec_debug(inst, "No free fb node"); in vp9_add_to_fb_free_list() 265 static void vp9_ref_cnt_fb(struct vdec_vp9_inst *inst, int *idx, in vp9_ref_cnt_fb() argument [all …]
|
D | vdec_vp8_if.c | 177 static void get_hw_reg_base(struct vdec_vp8_inst *inst) in get_hw_reg_base() argument 179 inst->reg_base.top = mtk_vcodec_get_reg_addr(inst->ctx, VDEC_TOP); in get_hw_reg_base() 180 inst->reg_base.cm = mtk_vcodec_get_reg_addr(inst->ctx, VDEC_CM); in get_hw_reg_base() 181 inst->reg_base.hwd = mtk_vcodec_get_reg_addr(inst->ctx, VDEC_HWD); in get_hw_reg_base() 182 inst->reg_base.sys = mtk_vcodec_get_reg_addr(inst->ctx, VDEC_SYS); in get_hw_reg_base() 183 inst->reg_base.misc = mtk_vcodec_get_reg_addr(inst->ctx, VDEC_MISC); in get_hw_reg_base() 184 inst->reg_base.ld = mtk_vcodec_get_reg_addr(inst->ctx, VDEC_LD); in get_hw_reg_base() 185 inst->reg_base.hwb = mtk_vcodec_get_reg_addr(inst->ctx, VDEC_HWB); in get_hw_reg_base() 188 static void write_hw_segmentation_data(struct vdec_vp8_inst *inst) in write_hw_segmentation_data() argument 193 void __iomem *cm = inst->reg_base.cm; in write_hw_segmentation_data() [all …]
|
D | vdec_h264_if.c | 144 static int allocate_predication_buf(struct vdec_h264_inst *inst) in allocate_predication_buf() argument 148 inst->pred_buf.size = BUF_PREDICTION_SZ; in allocate_predication_buf() 149 err = mtk_vcodec_mem_alloc(inst->ctx, &inst->pred_buf); in allocate_predication_buf() 151 mtk_vcodec_err(inst, "failed to allocate ppl buf"); in allocate_predication_buf() 155 inst->vsi->pred_buf_dma = inst->pred_buf.dma_addr; in allocate_predication_buf() 159 static void free_predication_buf(struct vdec_h264_inst *inst) in free_predication_buf() argument 163 mtk_vcodec_debug_enter(inst); in free_predication_buf() 165 inst->vsi->pred_buf_dma = 0; in free_predication_buf() 166 mem = &inst->pred_buf; in free_predication_buf() 168 mtk_vcodec_mem_free(inst->ctx, mem); in free_predication_buf() [all …]
|
/drivers/media/platform/qcom/venus/ |
D | vdec.c | 106 find_format(struct venus_inst *inst, u32 pixfmt, u32 type) in find_format() argument 121 !venus_helper_check_codec(inst, fmt[i].pixfmt)) in find_format() 128 find_format_by_index(struct venus_inst *inst, unsigned int index, u32 type) in find_format_by_index() argument 149 !venus_helper_check_codec(inst, fmt[i].pixfmt)) in find_format_by_index() 156 vdec_try_fmt_common(struct venus_inst *inst, struct v4l2_format *f) in vdec_try_fmt_common() argument 166 fmt = find_format(inst, pixmp->pixelformat, f->type); in vdec_try_fmt_common() 174 fmt = find_format(inst, pixmp->pixelformat, f->type); in vdec_try_fmt_common() 179 pixmp->width = clamp(pixmp->width, inst->cap_width.min, in vdec_try_fmt_common() 180 inst->cap_width.max); in vdec_try_fmt_common() 181 pixmp->height = clamp(pixmp->height, inst->cap_height.min, in vdec_try_fmt_common() [all …]
|
D | venc.c | 91 find_format(struct venus_inst *inst, u32 pixfmt, u32 type) in find_format() argument 106 !venus_helper_check_codec(inst, fmt[i].pixfmt)) in find_format() 113 find_format_by_index(struct venus_inst *inst, unsigned int index, u32 type) in find_format_by_index() argument 134 !venus_helper_check_codec(inst, fmt[i].pixfmt)) in find_format_by_index() 254 struct venus_inst *inst = to_inst(file); in venc_enum_fmt() local 257 fmt = find_format_by_index(inst, f->index, f->type); in venc_enum_fmt() 270 venc_try_fmt_common(struct venus_inst *inst, struct v4l2_format *f) in venc_try_fmt_common() argument 280 fmt = find_format(inst, pixmp->pixelformat, f->type); in venc_try_fmt_common() 288 fmt = find_format(inst, pixmp->pixelformat, f->type); in venc_try_fmt_common() 293 pixmp->width = clamp(pixmp->width, inst->cap_width.min, in venc_try_fmt_common() [all …]
|
D | hfi.c | 174 static int wait_session_msg(struct venus_inst *inst) in wait_session_msg() argument 178 ret = wait_for_completion_timeout(&inst->done, TIMEOUT); in wait_session_msg() 182 if (inst->error != HFI_ERR_NONE) in wait_session_msg() 188 int hfi_session_create(struct venus_inst *inst, const struct hfi_inst_ops *ops) in hfi_session_create() argument 190 struct venus_core *core = inst->core; in hfi_session_create() 195 inst->state = INST_UNINIT; in hfi_session_create() 196 init_completion(&inst->done); in hfi_session_create() 197 inst->ops = ops; in hfi_session_create() 200 list_add_tail(&inst->list, &core->instances); in hfi_session_create() 208 int hfi_session_init(struct venus_inst *inst, u32 pixfmt) in hfi_session_init() argument [all …]
|
D | helpers.c | 37 bool venus_helper_check_codec(struct venus_inst *inst, u32 v4l2_pixfmt) in venus_helper_check_codec() argument 39 struct venus_core *core = inst->core; in venus_helper_check_codec() 40 u32 session_type = inst->session_type; in venus_helper_check_codec() 86 static int intbufs_set_buffer(struct venus_inst *inst, u32 type) in intbufs_set_buffer() argument 88 struct venus_core *core = inst->core; in intbufs_set_buffer() 96 ret = venus_helper_get_bufreq(inst, type, &bufreq); in intbufs_set_buffer() 127 ret = hfi_session_set_buffers(inst, &bd); in intbufs_set_buffer() 133 list_add_tail(&buf->list, &inst->internalbufs); in intbufs_set_buffer() 145 static int intbufs_unset_buffers(struct venus_inst *inst) in intbufs_unset_buffers() argument 151 list_for_each_entry_safe(buf, n, &inst->internalbufs, list) { in intbufs_unset_buffers() [all …]
|
D | hfi.h | 99 void (*buf_done)(struct venus_inst *inst, unsigned int buf_type, 102 void (*event_notify)(struct venus_inst *inst, u32 event, 112 int (*session_init)(struct venus_inst *inst, u32 session_type, 114 int (*session_end)(struct venus_inst *inst); 115 int (*session_abort)(struct venus_inst *inst); 116 int (*session_flush)(struct venus_inst *inst, u32 flush_mode); 117 int (*session_start)(struct venus_inst *inst); 118 int (*session_stop)(struct venus_inst *inst); 119 int (*session_continue)(struct venus_inst *inst); 120 int (*session_etb)(struct venus_inst *inst, struct hfi_frame_data *fd); [all …]
|
D | venc_ctrls.c | 35 struct venus_inst *inst = ctrl_to_inst(ctrl); in venc_op_s_ctrl() local 36 struct venc_controls *ctr = &inst->controls.enc; in venc_op_s_ctrl() 130 int venc_ctrl_init(struct venus_inst *inst) in venc_ctrl_init() argument 134 ret = v4l2_ctrl_handler_init(&inst->ctrl_handler, 27); in venc_ctrl_init() 138 v4l2_ctrl_new_std_menu(&inst->ctrl_handler, &venc_ctrl_ops, in venc_ctrl_init() 145 v4l2_ctrl_new_std_menu(&inst->ctrl_handler, &venc_ctrl_ops, in venc_ctrl_init() 150 v4l2_ctrl_new_std_menu(&inst->ctrl_handler, &venc_ctrl_ops, in venc_ctrl_init() 157 v4l2_ctrl_new_std_menu(&inst->ctrl_handler, &venc_ctrl_ops, in venc_ctrl_init() 162 v4l2_ctrl_new_std_menu(&inst->ctrl_handler, &venc_ctrl_ops, in venc_ctrl_init() 173 v4l2_ctrl_new_std_menu(&inst->ctrl_handler, &venc_ctrl_ops, in venc_ctrl_init() [all …]
|
D | hfi_msgs.c | 25 static void event_seq_changed(struct venus_core *core, struct venus_inst *inst, in event_seq_changed() argument 35 inst->error = HFI_ERR_NONE; in event_seq_changed() 42 inst->error = HFI_ERR_SESSION_INVALID_PARAMETER; in event_seq_changed() 50 inst->error = HFI_ERR_SESSION_INSUFFICIENT_RESOURCES; in event_seq_changed() 79 inst->ops->event_notify(inst, EVT_SYS_EVENT_CHANGE, &event); in event_seq_changed() 83 struct venus_inst *inst, in event_release_buffer_ref() argument 97 inst->error = HFI_ERR_NONE; in event_release_buffer_ref() 98 inst->ops->event_notify(inst, EVT_SYS_EVENT_CHANGE, &event); in event_release_buffer_ref() 114 event_session_error(struct venus_core *core, struct venus_inst *inst, in event_session_error() argument 122 if (!inst) in event_session_error() [all …]
|
D | vdec_ctrls.c | 23 struct venus_inst *inst = ctrl_to_inst(ctrl); in vdec_op_s_ctrl() local 24 struct vdec_controls *ctr = &inst->controls.dec; in vdec_op_s_ctrl() 48 struct venus_inst *inst = ctrl_to_inst(ctrl); in vdec_op_g_volatile_ctrl() local 49 struct vdec_controls *ctr = &inst->controls.dec; in vdec_op_g_volatile_ctrl() 58 ret = hfi_session_get_property(inst, ptype, &hprop); in vdec_op_g_volatile_ctrl() 65 ret = hfi_session_get_property(inst, ptype, &hprop); in vdec_op_g_volatile_ctrl() 74 ctrl->val = inst->num_output_bufs; in vdec_op_g_volatile_ctrl() 88 int vdec_ctrl_init(struct venus_inst *inst) in vdec_ctrl_init() argument 93 ret = v4l2_ctrl_handler_init(&inst->ctrl_handler, 7); in vdec_ctrl_init() 97 ctrl = v4l2_ctrl_new_std_menu(&inst->ctrl_handler, &vdec_ctrl_ops, in vdec_ctrl_init() [all …]
|
/drivers/gpu/drm/nouveau/nvkm/engine/gr/ |
D | nv25.c | 37 &chan->inst); in nv25_gr_chan_new() 41 nvkm_kmap(chan->inst); in nv25_gr_chan_new() 42 nvkm_wo32(chan->inst, 0x0028, 0x00000001 | (chan->chid << 24)); in nv25_gr_chan_new() 43 nvkm_wo32(chan->inst, 0x035c, 0xffff0000); in nv25_gr_chan_new() 44 nvkm_wo32(chan->inst, 0x03c0, 0x0fff0000); in nv25_gr_chan_new() 45 nvkm_wo32(chan->inst, 0x03c4, 0x0fff0000); in nv25_gr_chan_new() 46 nvkm_wo32(chan->inst, 0x049c, 0x00000101); in nv25_gr_chan_new() 47 nvkm_wo32(chan->inst, 0x04b0, 0x00000111); in nv25_gr_chan_new() 48 nvkm_wo32(chan->inst, 0x04c8, 0x00000080); in nv25_gr_chan_new() 49 nvkm_wo32(chan->inst, 0x04cc, 0xffff0000); in nv25_gr_chan_new() [all …]
|
D | nv35.c | 37 &chan->inst); in nv35_gr_chan_new() 41 nvkm_kmap(chan->inst); in nv35_gr_chan_new() 42 nvkm_wo32(chan->inst, 0x0028, 0x00000001 | (chan->chid << 24)); in nv35_gr_chan_new() 43 nvkm_wo32(chan->inst, 0x040c, 0x00000101); in nv35_gr_chan_new() 44 nvkm_wo32(chan->inst, 0x0420, 0x00000111); in nv35_gr_chan_new() 45 nvkm_wo32(chan->inst, 0x0424, 0x00000060); in nv35_gr_chan_new() 46 nvkm_wo32(chan->inst, 0x0440, 0x00000080); in nv35_gr_chan_new() 47 nvkm_wo32(chan->inst, 0x0444, 0xffff0000); in nv35_gr_chan_new() 48 nvkm_wo32(chan->inst, 0x0448, 0x00000001); in nv35_gr_chan_new() 49 nvkm_wo32(chan->inst, 0x045c, 0x44400000); in nv35_gr_chan_new() [all …]
|
D | nv34.c | 37 &chan->inst); in nv34_gr_chan_new() 41 nvkm_kmap(chan->inst); in nv34_gr_chan_new() 42 nvkm_wo32(chan->inst, 0x0028, 0x00000001 | (chan->chid << 24)); in nv34_gr_chan_new() 43 nvkm_wo32(chan->inst, 0x040c, 0x01000101); in nv34_gr_chan_new() 44 nvkm_wo32(chan->inst, 0x0420, 0x00000111); in nv34_gr_chan_new() 45 nvkm_wo32(chan->inst, 0x0424, 0x00000060); in nv34_gr_chan_new() 46 nvkm_wo32(chan->inst, 0x0440, 0x00000080); in nv34_gr_chan_new() 47 nvkm_wo32(chan->inst, 0x0444, 0xffff0000); in nv34_gr_chan_new() 48 nvkm_wo32(chan->inst, 0x0448, 0x00000001); in nv34_gr_chan_new() 49 nvkm_wo32(chan->inst, 0x045c, 0x44400000); in nv34_gr_chan_new() [all …]
|
D | nv2a.c | 37 &chan->inst); in nv2a_gr_chan_new() 41 nvkm_kmap(chan->inst); in nv2a_gr_chan_new() 42 nvkm_wo32(chan->inst, 0x0000, 0x00000001 | (chan->chid << 24)); in nv2a_gr_chan_new() 43 nvkm_wo32(chan->inst, 0x033c, 0xffff0000); in nv2a_gr_chan_new() 44 nvkm_wo32(chan->inst, 0x03a0, 0x0fff0000); in nv2a_gr_chan_new() 45 nvkm_wo32(chan->inst, 0x03a4, 0x0fff0000); in nv2a_gr_chan_new() 46 nvkm_wo32(chan->inst, 0x047c, 0x00000101); in nv2a_gr_chan_new() 47 nvkm_wo32(chan->inst, 0x0490, 0x00000111); in nv2a_gr_chan_new() 48 nvkm_wo32(chan->inst, 0x04a8, 0x44400000); in nv2a_gr_chan_new() 50 nvkm_wo32(chan->inst, i, 0x00030303); in nv2a_gr_chan_new() [all …]
|
D | nv30.c | 38 &chan->inst); in nv30_gr_chan_new() 42 nvkm_kmap(chan->inst); in nv30_gr_chan_new() 43 nvkm_wo32(chan->inst, 0x0028, 0x00000001 | (chan->chid << 24)); in nv30_gr_chan_new() 44 nvkm_wo32(chan->inst, 0x0410, 0x00000101); in nv30_gr_chan_new() 45 nvkm_wo32(chan->inst, 0x0424, 0x00000111); in nv30_gr_chan_new() 46 nvkm_wo32(chan->inst, 0x0428, 0x00000060); in nv30_gr_chan_new() 47 nvkm_wo32(chan->inst, 0x0444, 0x00000080); in nv30_gr_chan_new() 48 nvkm_wo32(chan->inst, 0x0448, 0xffff0000); in nv30_gr_chan_new() 49 nvkm_wo32(chan->inst, 0x044c, 0x00000001); in nv30_gr_chan_new() 50 nvkm_wo32(chan->inst, 0x0460, 0x44400000); in nv30_gr_chan_new() [all …]
|
D | nv04.c | 445 nv04_gr_set_ctx1(struct nvkm_device *device, u32 inst, u32 mask, u32 value) in nv04_gr_set_ctx1() argument 450 tmp = nvkm_rd32(device, 0x700000 + inst); in nv04_gr_set_ctx1() 453 nvkm_wr32(device, 0x700000 + inst, tmp); in nv04_gr_set_ctx1() 460 nv04_gr_set_ctx_val(struct nvkm_device *device, u32 inst, u32 mask, u32 value) in nv04_gr_set_ctx_val() argument 465 ctx1 = nvkm_rd32(device, 0x700000 + inst); in nv04_gr_set_ctx_val() 469 tmp = nvkm_rd32(device, 0x70000c + inst); in nv04_gr_set_ctx_val() 472 nvkm_wr32(device, 0x70000c + inst, tmp); in nv04_gr_set_ctx_val() 504 nv04_gr_set_ctx1(device, inst, 0x01000000, valid << 24); in nv04_gr_set_ctx_val() 508 nv04_gr_mthd_set_operation(struct nvkm_device *device, u32 inst, u32 data) in nv04_gr_mthd_set_operation() argument 516 nv04_gr_set_ctx1(device, inst, 0x00038000, data << 15); in nv04_gr_mthd_set_operation() [all …]
|
D | nv20.c | 21 u32 inst = nvkm_memory_addr(chan->inst); in nv20_gr_chan_init() local 24 nvkm_wo32(gr->ctxtab, chan->chid * 4, inst >> 4); in nv20_gr_chan_init() 35 u32 inst = nvkm_memory_addr(chan->inst); in nv20_gr_chan_fini() local 42 nvkm_wr32(device, 0x400784, inst >> 4); in nv20_gr_chan_fini() 63 nvkm_memory_del(&chan->inst); in nv20_gr_chan_dtor() 91 &chan->inst); in nv20_gr_chan_new() 95 nvkm_kmap(chan->inst); in nv20_gr_chan_new() 96 nvkm_wo32(chan->inst, 0x0000, 0x00000001 | (chan->chid << 24)); in nv20_gr_chan_new() 97 nvkm_wo32(chan->inst, 0x033c, 0xffff0000); in nv20_gr_chan_new() 98 nvkm_wo32(chan->inst, 0x03a0, 0x0fff0000); in nv20_gr_chan_new() [all …]
|
/drivers/gpu/drm/amd/amdgpu/ |
D | soc15_common.h | 50 #define SOC15_REG_OFFSET(ip, inst, reg) (0 == reg##_BASE_IDX ? ip##_BASE__INST##inst##_SEG0 +… argument 51 … (1 == reg##_BASE_IDX ? ip##_BASE__INST##inst##_SEG1 + reg : \ 52 … (2 == reg##_BASE_IDX ? ip##_BASE__INST##inst##_SEG2 + reg : \ 53 … (3 == reg##_BASE_IDX ? ip##_BASE__INST##inst##_SEG3 + reg : \ 54 (ip##_BASE__INST##inst##_SEG4 + reg))))) 59 #define RREG32_SOC15(ip, inst, reg) \ argument 60 RREG32( (0 == reg##_BASE_IDX ? ip##_BASE__INST##inst##_SEG0 + reg : \ 61 (1 == reg##_BASE_IDX ? ip##_BASE__INST##inst##_SEG1 + reg : \ 62 (2 == reg##_BASE_IDX ? ip##_BASE__INST##inst##_SEG2 + reg : \ 63 (3 == reg##_BASE_IDX ? ip##_BASE__INST##inst##_SEG3 + reg : \ [all …]
|
/drivers/gpu/drm/vc4/ |
D | vc4_validate_shaders.c | 116 raddr_add_a_to_live_reg_index(uint64_t inst) in raddr_add_a_to_live_reg_index() argument 118 uint32_t sig = QPU_GET_FIELD(inst, QPU_SIG); in raddr_add_a_to_live_reg_index() 119 uint32_t add_a = QPU_GET_FIELD(inst, QPU_ADD_A); in raddr_add_a_to_live_reg_index() 120 uint32_t raddr_a = QPU_GET_FIELD(inst, QPU_RADDR_A); in raddr_add_a_to_live_reg_index() 121 uint32_t raddr_b = QPU_GET_FIELD(inst, QPU_RADDR_B); in raddr_add_a_to_live_reg_index() 187 uint64_t inst = validation_state->shader[validation_state->ip]; in check_tmu_write() local 189 QPU_GET_FIELD(inst, QPU_WADDR_MUL) : in check_tmu_write() 190 QPU_GET_FIELD(inst, QPU_WADDR_ADD)); in check_tmu_write() 191 uint32_t raddr_a = QPU_GET_FIELD(inst, QPU_RADDR_A); in check_tmu_write() 192 uint32_t raddr_b = QPU_GET_FIELD(inst, QPU_RADDR_B); in check_tmu_write() [all …]
|
/drivers/phy/samsung/ |
D | phy-exynos4x12-usb2.c | 168 static void exynos4x12_isol(struct samsung_usb2_phy_instance *inst, bool on) in exynos4x12_isol() argument 170 struct samsung_usb2_phy_driver *drv = inst->drv; in exynos4x12_isol() 174 switch (inst->cfg->id) { in exynos4x12_isol() 195 static void exynos4x12_setup_clk(struct samsung_usb2_phy_instance *inst) in exynos4x12_setup_clk() argument 197 struct samsung_usb2_phy_driver *drv = inst->drv; in exynos4x12_setup_clk() 211 static void exynos4x12_phy_pwr(struct samsung_usb2_phy_instance *inst, bool on) in exynos4x12_phy_pwr() argument 213 struct samsung_usb2_phy_driver *drv = inst->drv; in exynos4x12_phy_pwr() 219 switch (inst->cfg->id) { in exynos4x12_phy_pwr() 263 static void exynos4x12_power_on_int(struct samsung_usb2_phy_instance *inst) in exynos4x12_power_on_int() argument 265 if (inst->int_cnt++ > 0) in exynos4x12_power_on_int() [all …]
|
/drivers/gpu/drm/nouveau/nvkm/engine/fifo/ |
D | gpfifogf100.c | 77 struct nvkm_gpuobj *inst = chan->base.inst; in gf100_fifo_gpfifo_engine_fini() local 96 nvkm_kmap(inst); in gf100_fifo_gpfifo_engine_fini() 97 nvkm_wo32(inst, offset + 0x00, 0x00000000); in gf100_fifo_gpfifo_engine_fini() 98 nvkm_wo32(inst, offset + 0x04, 0x00000000); in gf100_fifo_gpfifo_engine_fini() 99 nvkm_done(inst); in gf100_fifo_gpfifo_engine_fini() 111 struct nvkm_gpuobj *inst = chan->base.inst; in gf100_fifo_gpfifo_engine_init() local 115 nvkm_kmap(inst); in gf100_fifo_gpfifo_engine_init() 116 nvkm_wo32(inst, offset + 0x00, lower_32_bits(addr) | 4); in gf100_fifo_gpfifo_engine_init() 117 nvkm_wo32(inst, offset + 0x04, upper_32_bits(addr)); in gf100_fifo_gpfifo_engine_init() 118 nvkm_done(inst); in gf100_fifo_gpfifo_engine_init() [all …]
|
/drivers/crypto/qat/qat_common/ |
D | qat_crypto.c | 62 void qat_crypto_put_instance(struct qat_crypto_instance *inst) in qat_crypto_put_instance() argument 64 atomic_dec(&inst->refctr); in qat_crypto_put_instance() 65 adf_dev_put(inst->accel_dev); in qat_crypto_put_instance() 70 struct qat_crypto_instance *inst, *tmp; in qat_crypto_free_instances() local 73 list_for_each_entry_safe(inst, tmp, &accel_dev->crypto_list, list) { in qat_crypto_free_instances() 74 for (i = 0; i < atomic_read(&inst->refctr); i++) in qat_crypto_free_instances() 75 qat_crypto_put_instance(inst); in qat_crypto_free_instances() 77 if (inst->sym_tx) in qat_crypto_free_instances() 78 adf_remove_ring(inst->sym_tx); in qat_crypto_free_instances() 80 if (inst->sym_rx) in qat_crypto_free_instances() [all …]
|