Home
last modified time | relevance | path

Searched refs:ctx (Results 1 – 25 of 531) sorted by relevance

12345678910>>...22

/drivers/gpu/drm/nouveau/nvkm/engine/gr/
Dctxnv50.c169 static void nv50_gr_construct_mmio(struct nvkm_grctx *ctx);
170 static void nv50_gr_construct_xfer1(struct nvkm_grctx *ctx);
171 static void nv50_gr_construct_xfer2(struct nvkm_grctx *ctx);
176 nv50_grctx_generate(struct nvkm_grctx *ctx) in nv50_grctx_generate() argument
178 cp_set (ctx, STATE, RUNNING); in nv50_grctx_generate()
179 cp_set (ctx, XFER_SWITCH, ENABLE); in nv50_grctx_generate()
181 cp_bra (ctx, AUTO_SAVE, PENDING, cp_setup_save); in nv50_grctx_generate()
182 cp_bra (ctx, USER_SAVE, PENDING, cp_setup_save); in nv50_grctx_generate()
184 cp_name(ctx, cp_check_load); in nv50_grctx_generate()
185 cp_bra (ctx, AUTO_LOAD, PENDING, cp_setup_auto_load); in nv50_grctx_generate()
[all …]
Dctxnv40.c159 nv40_gr_construct_general(struct nvkm_grctx *ctx) in nv40_gr_construct_general() argument
161 struct nvkm_device *device = ctx->device; in nv40_gr_construct_general()
164 cp_ctx(ctx, 0x4000a4, 1); in nv40_gr_construct_general()
165 gr_def(ctx, 0x4000a4, 0x00000008); in nv40_gr_construct_general()
166 cp_ctx(ctx, 0x400144, 58); in nv40_gr_construct_general()
167 gr_def(ctx, 0x400144, 0x00000001); in nv40_gr_construct_general()
168 cp_ctx(ctx, 0x400314, 1); in nv40_gr_construct_general()
169 gr_def(ctx, 0x400314, 0x00000000); in nv40_gr_construct_general()
170 cp_ctx(ctx, 0x400400, 10); in nv40_gr_construct_general()
171 cp_ctx(ctx, 0x400480, 10); in nv40_gr_construct_general()
[all …]
/drivers/staging/skein/
Dskein_base.c23 int skein_256_init(struct skein_256_ctx *ctx, size_t hash_bit_len) in skein_256_init() argument
31 ctx->h.hash_bit_len = hash_bit_len; /* output hash bit count */ in skein_256_init()
35 memcpy(ctx->x, SKEIN_256_IV_256, sizeof(ctx->x)); in skein_256_init()
38 memcpy(ctx->x, SKEIN_256_IV_224, sizeof(ctx->x)); in skein_256_init()
41 memcpy(ctx->x, SKEIN_256_IV_160, sizeof(ctx->x)); in skein_256_init()
44 memcpy(ctx->x, SKEIN_256_IV_128, sizeof(ctx->x)); in skein_256_init()
53 skein_start_new_type(ctx, CFG_FINAL); in skein_256_init()
65 memset(ctx->x, 0, sizeof(ctx->x)); in skein_256_init()
66 skein_256_process_block(ctx, cfg.b, 1, SKEIN_CFG_STR_LEN); in skein_256_init()
71 skein_start_new_type(ctx, MSG); /* T0=0, T1= MSG type */ in skein_256_init()
[all …]
/drivers/misc/cxl/
Dcontext.c37 int cxl_context_init(struct cxl_context *ctx, struct cxl_afu *afu, bool master, in cxl_context_init() argument
42 spin_lock_init(&ctx->sste_lock); in cxl_context_init()
43 ctx->afu = afu; in cxl_context_init()
44 ctx->master = master; in cxl_context_init()
45 ctx->pid = ctx->glpid = NULL; /* Set in start work ioctl */ in cxl_context_init()
46 mutex_init(&ctx->mapping_lock); in cxl_context_init()
47 ctx->mapping = mapping; in cxl_context_init()
56 i = cxl_alloc_sst(ctx); in cxl_context_init()
60 INIT_WORK(&ctx->fault_work, cxl_handle_fault); in cxl_context_init()
62 init_waitqueue_head(&ctx->wq); in cxl_context_init()
[all …]
Dapi.c25 struct cxl_context *ctx; in cxl_dev_context_init() local
32 ctx = cxl_context_alloc(); in cxl_dev_context_init()
33 if (IS_ERR(ctx)) { in cxl_dev_context_init()
34 rc = PTR_ERR(ctx); in cxl_dev_context_init()
38 ctx->kernelapi = true; in cxl_dev_context_init()
54 rc = cxl_context_init(ctx, afu, false, mapping); in cxl_dev_context_init()
58 return ctx; in cxl_dev_context_init()
63 kfree(ctx); in cxl_dev_context_init()
75 int cxl_release_context(struct cxl_context *ctx) in cxl_release_context() argument
77 if (ctx->status >= STARTED) in cxl_release_context()
[all …]
/drivers/gpu/drm/panel/
Dpanel-samsung-s6e8aa0.c128 static int s6e8aa0_clear_error(struct s6e8aa0 *ctx) in s6e8aa0_clear_error() argument
130 int ret = ctx->error; in s6e8aa0_clear_error()
132 ctx->error = 0; in s6e8aa0_clear_error()
136 static void s6e8aa0_dcs_write(struct s6e8aa0 *ctx, const void *data, size_t len) in s6e8aa0_dcs_write() argument
138 struct mipi_dsi_device *dsi = to_mipi_dsi_device(ctx->dev); in s6e8aa0_dcs_write()
141 if (ctx->error < 0) in s6e8aa0_dcs_write()
146 dev_err(ctx->dev, "error %zd writing dcs seq: %*ph\n", ret, in s6e8aa0_dcs_write()
148 ctx->error = ret; in s6e8aa0_dcs_write()
152 static int s6e8aa0_dcs_read(struct s6e8aa0 *ctx, u8 cmd, void *data, size_t len) in s6e8aa0_dcs_read() argument
154 struct mipi_dsi_device *dsi = to_mipi_dsi_device(ctx->dev); in s6e8aa0_dcs_read()
[all …]
Dpanel-samsung-ld9040.c118 static int ld9040_clear_error(struct ld9040 *ctx) in ld9040_clear_error() argument
120 int ret = ctx->error; in ld9040_clear_error()
122 ctx->error = 0; in ld9040_clear_error()
126 static int ld9040_spi_write_word(struct ld9040 *ctx, u16 data) in ld9040_spi_write_word() argument
128 struct spi_device *spi = to_spi_device(ctx->dev); in ld9040_spi_write_word()
141 static void ld9040_dcs_write(struct ld9040 *ctx, const u8 *data, size_t len) in ld9040_dcs_write() argument
145 if (ctx->error < 0 || len == 0) in ld9040_dcs_write()
148 dev_dbg(ctx->dev, "writing dcs seq: %*ph\n", (int)len, data); in ld9040_dcs_write()
149 ret = ld9040_spi_write_word(ctx, *data); in ld9040_dcs_write()
153 ret = ld9040_spi_write_word(ctx, *data | 0x100); in ld9040_dcs_write()
[all …]
/drivers/media/platform/coda/
Dcoda-bit.c43 static void coda_free_bitstream_buffer(struct coda_ctx *ctx);
66 static void coda_command_async(struct coda_ctx *ctx, int cmd) in coda_command_async() argument
68 struct coda_dev *dev = ctx->dev; in coda_command_async()
73 coda_write(dev, ctx->bit_stream_param, in coda_command_async()
75 coda_write(dev, ctx->frm_dis_flg, in coda_command_async()
76 CODA_REG_BIT_FRM_DIS_FLG(ctx->reg_idx)); in coda_command_async()
77 coda_write(dev, ctx->frame_mem_ctrl, in coda_command_async()
79 coda_write(dev, ctx->workbuf.paddr, CODA_REG_BIT_WORK_BUF_ADDR); in coda_command_async()
89 coda_write(dev, ctx->idx, CODA_REG_BIT_RUN_INDEX); in coda_command_async()
90 coda_write(dev, ctx->params.codec_mode, CODA_REG_BIT_RUN_COD_STD); in coda_command_async()
[all …]
/drivers/gpu/drm/exynos/
Dexynos7_drm_decon.c90 struct decon_context *ctx = crtc->ctx; in decon_wait_for_vblank() local
92 if (ctx->suspended) in decon_wait_for_vblank()
95 atomic_set(&ctx->wait_vsync_event, 1); in decon_wait_for_vblank()
101 if (!wait_event_timeout(ctx->wait_vsync_queue, in decon_wait_for_vblank()
102 !atomic_read(&ctx->wait_vsync_event), in decon_wait_for_vblank()
109 struct decon_context *ctx = crtc->ctx; in decon_clear_channels() local
116 u32 val = readl(ctx->regs + WINCON(win)); in decon_clear_channels()
120 writel(val, ctx->regs + WINCON(win)); in decon_clear_channels()
127 decon_wait_for_vblank(ctx->crtc); in decon_clear_channels()
130 static int decon_ctx_initialize(struct decon_context *ctx, in decon_ctx_initialize() argument
[all …]
Dexynos_drm_fimd.c243 struct fimd_context *ctx = crtc->ctx; in fimd_enable_vblank() local
246 if (ctx->suspended) in fimd_enable_vblank()
249 if (!test_and_set_bit(0, &ctx->irq_flags)) { in fimd_enable_vblank()
250 val = readl(ctx->regs + VIDINTCON0); in fimd_enable_vblank()
254 if (ctx->i80_if) { in fimd_enable_vblank()
267 writel(val, ctx->regs + VIDINTCON0); in fimd_enable_vblank()
275 struct fimd_context *ctx = crtc->ctx; in fimd_disable_vblank() local
278 if (ctx->suspended) in fimd_disable_vblank()
281 if (test_and_clear_bit(0, &ctx->irq_flags)) { in fimd_disable_vblank()
282 val = readl(ctx->regs + VIDINTCON0); in fimd_disable_vblank()
[all …]
Dexynos5433_drm_decon.c80 static inline void decon_set_bits(struct decon_context *ctx, u32 reg, u32 mask, in decon_set_bits() argument
83 val = (val & mask) | (readl(ctx->addr + reg) & ~mask); in decon_set_bits()
84 writel(val, ctx->addr + reg); in decon_set_bits()
89 struct decon_context *ctx = crtc->ctx; in decon_enable_vblank() local
92 if (test_bit(BIT_SUSPENDED, &ctx->flags)) in decon_enable_vblank()
95 if (!test_and_set_bit(BIT_IRQS_ENABLED, &ctx->flags)) { in decon_enable_vblank()
97 if (ctx->out_type & IFTYPE_I80) in decon_enable_vblank()
102 writel(val, ctx->addr + DECON_VIDINTCON0); in decon_enable_vblank()
110 struct decon_context *ctx = crtc->ctx; in decon_disable_vblank() local
112 if (test_bit(BIT_SUSPENDED, &ctx->flags)) in decon_disable_vblank()
[all …]
/drivers/media/platform/s5p-mfc/
Ds5p_mfc_opr_v5.c37 static int s5p_mfc_alloc_dec_temp_buffers_v5(struct s5p_mfc_ctx *ctx) in s5p_mfc_alloc_dec_temp_buffers_v5() argument
39 struct s5p_mfc_dev *dev = ctx->dev; in s5p_mfc_alloc_dec_temp_buffers_v5()
43 ctx->dsc.size = buf_size->dsc; in s5p_mfc_alloc_dec_temp_buffers_v5()
44 ret = s5p_mfc_alloc_priv_buf(dev->mem_dev_l, dev->bank1, &ctx->dsc); in s5p_mfc_alloc_dec_temp_buffers_v5()
50 BUG_ON(ctx->dsc.dma & ((1 << MFC_BANK1_ALIGN_ORDER) - 1)); in s5p_mfc_alloc_dec_temp_buffers_v5()
51 memset(ctx->dsc.virt, 0, ctx->dsc.size); in s5p_mfc_alloc_dec_temp_buffers_v5()
58 static void s5p_mfc_release_dec_desc_buffer_v5(struct s5p_mfc_ctx *ctx) in s5p_mfc_release_dec_desc_buffer_v5() argument
60 s5p_mfc_release_priv_buf(ctx->dev->mem_dev_l, &ctx->dsc); in s5p_mfc_release_dec_desc_buffer_v5()
64 static int s5p_mfc_alloc_codec_buffers_v5(struct s5p_mfc_ctx *ctx) in s5p_mfc_alloc_codec_buffers_v5() argument
66 struct s5p_mfc_dev *dev = ctx->dev; in s5p_mfc_alloc_codec_buffers_v5()
[all …]
Ds5p_mfc_dec.c224 static int s5p_mfc_ctx_ready(struct s5p_mfc_ctx *ctx) in s5p_mfc_ctx_ready() argument
227 if (ctx->src_queue_cnt >= 1 && ctx->state == MFCINST_GOT_INST) in s5p_mfc_ctx_ready()
230 if (ctx->src_queue_cnt >= 1 && in s5p_mfc_ctx_ready()
231 ctx->state == MFCINST_RUNNING && in s5p_mfc_ctx_ready()
232 ctx->dst_queue_cnt >= ctx->pb_count) in s5p_mfc_ctx_ready()
235 if (ctx->state == MFCINST_FINISHING && in s5p_mfc_ctx_ready()
236 ctx->dst_queue_cnt >= ctx->pb_count) in s5p_mfc_ctx_ready()
239 if (ctx->src_queue_cnt >= 1 && in s5p_mfc_ctx_ready()
240 ctx->state == MFCINST_HEAD_PARSED && in s5p_mfc_ctx_ready()
241 ctx->capture_state == QUEUE_BUFS_MMAPED) in s5p_mfc_ctx_ready()
[all …]
Ds5p_mfc_opr_v6.c49 static int s5p_mfc_alloc_dec_temp_buffers_v6(struct s5p_mfc_ctx *ctx) in s5p_mfc_alloc_dec_temp_buffers_v6() argument
57 static void s5p_mfc_release_dec_desc_buffer_v6(struct s5p_mfc_ctx *ctx) in s5p_mfc_release_dec_desc_buffer_v6() argument
63 static int s5p_mfc_alloc_codec_buffers_v6(struct s5p_mfc_ctx *ctx) in s5p_mfc_alloc_codec_buffers_v6() argument
65 struct s5p_mfc_dev *dev = ctx->dev; in s5p_mfc_alloc_codec_buffers_v6()
69 mb_width = MB_WIDTH(ctx->img_width); in s5p_mfc_alloc_codec_buffers_v6()
70 mb_height = MB_HEIGHT(ctx->img_height); in s5p_mfc_alloc_codec_buffers_v6()
72 if (ctx->type == MFCINST_DECODER) { in s5p_mfc_alloc_codec_buffers_v6()
74 ctx->luma_size, ctx->chroma_size, ctx->mv_size); in s5p_mfc_alloc_codec_buffers_v6()
75 mfc_debug(2, "Totals bufs: %d\n", ctx->total_dpb_count); in s5p_mfc_alloc_codec_buffers_v6()
76 } else if (ctx->type == MFCINST_ENCODER) { in s5p_mfc_alloc_codec_buffers_v6()
[all …]
/drivers/base/regmap/
Dregmap-mmio.c33 void (*reg_write)(struct regmap_mmio_context *ctx,
35 unsigned int (*reg_read)(struct regmap_mmio_context *ctx,
81 static void regmap_mmio_write8(struct regmap_mmio_context *ctx, in regmap_mmio_write8() argument
85 writeb(val, ctx->regs + reg); in regmap_mmio_write8()
88 static void regmap_mmio_write16le(struct regmap_mmio_context *ctx, in regmap_mmio_write16le() argument
92 writew(val, ctx->regs + reg); in regmap_mmio_write16le()
95 static void regmap_mmio_write16be(struct regmap_mmio_context *ctx, in regmap_mmio_write16be() argument
99 iowrite16be(val, ctx->regs + reg); in regmap_mmio_write16be()
102 static void regmap_mmio_write32le(struct regmap_mmio_context *ctx, in regmap_mmio_write32le() argument
106 writel(val, ctx->regs + reg); in regmap_mmio_write32le()
[all …]
/drivers/media/platform/ti-vpe/
Dcal.c70 #define ctx_dbg(level, ctx, fmt, arg...) \ argument
71 v4l2_dbg(level, debug, &ctx->v4l2_dev, fmt, ##arg)
72 #define ctx_info(ctx, fmt, arg...) \ argument
73 v4l2_info(&ctx->v4l2_dev, fmt, ##arg)
74 #define ctx_err(ctx, fmt, arg...) \ argument
75 v4l2_err(&ctx->v4l2_dev, fmt, ##arg)
261 struct cal_ctx *ctx[CAL_NUM_CONTEXT]; member
320 static const struct cal_fmt *find_format_by_pix(struct cal_ctx *ctx, in find_format_by_pix() argument
326 for (k = 0; k < ctx->num_active_fmt; k++) { in find_format_by_pix()
327 fmt = ctx->active_fmt[k]; in find_format_by_pix()
[all …]
/drivers/auxdisplay/
Dimg-ascii-lcd.c34 void (*update)(struct img_ascii_lcd_ctx *ctx);
71 static void boston_update(struct img_ascii_lcd_ctx *ctx) in boston_update() argument
76 val = *((u64 *)&ctx->curr[0]); in boston_update()
77 __raw_writeq(val, ctx->base); in boston_update()
79 val = *((u32 *)&ctx->curr[0]); in boston_update()
80 __raw_writel(val, ctx->base); in boston_update()
81 val = *((u32 *)&ctx->curr[4]); in boston_update()
82 __raw_writel(val, ctx->base + 4); in boston_update()
97 static void malta_update(struct img_ascii_lcd_ctx *ctx) in malta_update() argument
102 for (i = 0; i < ctx->cfg->num_chars; i++) { in malta_update()
[all …]
/drivers/crypto/qat/qat_common/
Dqat_asym_algs.c176 } ctx; member
189 struct device *dev = &GET_DEV(req->ctx.dh->inst->accel_dev); in qat_dh_cb()
197 dma_free_coherent(dev, req->ctx.dh->p_size, in qat_dh_cb()
201 req->ctx.dh->p_size, DMA_TO_DEVICE); in qat_dh_cb()
204 areq->dst_len = req->ctx.dh->p_size; in qat_dh_cb()
209 dma_free_coherent(dev, req->ctx.dh->p_size, req->dst_align, in qat_dh_cb()
212 dma_unmap_single(dev, req->out.dh.r, req->ctx.dh->p_size, in qat_dh_cb()
260 struct qat_dh_ctx *ctx = kpp_tfm_ctx(tfm); in qat_dh_compute_value() local
261 struct qat_crypto_instance *inst = ctx->inst; in qat_dh_compute_value()
269 if (unlikely(!ctx->xa)) in qat_dh_compute_value()
[all …]
/drivers/media/platform/sti/hva/
Dhva-v4l2.c92 static void set_default_params(struct hva_ctx *ctx) in set_default_params() argument
94 struct hva_frameinfo *frameinfo = &ctx->frameinfo; in set_default_params()
95 struct hva_streaminfo *streaminfo = &ctx->streaminfo; in set_default_params()
112 ctx->colorspace = V4L2_COLORSPACE_REC709; in set_default_params()
113 ctx->xfer_func = V4L2_XFER_FUNC_DEFAULT; in set_default_params()
114 ctx->ycbcr_enc = V4L2_YCBCR_ENC_DEFAULT; in set_default_params()
115 ctx->quantization = V4L2_QUANTIZATION_DEFAULT; in set_default_params()
117 ctx->max_stream_size = estimated_stream_size(streaminfo->width, in set_default_params()
121 static const struct hva_enc *hva_find_encoder(struct hva_ctx *ctx, in hva_find_encoder() argument
125 struct hva_dev *hva = ctx_to_hdev(ctx); in hva_find_encoder()
[all …]
/drivers/gpu/drm/
Ddrm_modeset_lock.c81 struct drm_modeset_acquire_ctx *ctx; in drm_modeset_lock_all() local
84 ctx = kzalloc(sizeof(*ctx), GFP_KERNEL | __GFP_NOFAIL); in drm_modeset_lock_all()
85 if (WARN_ON(!ctx)) in drm_modeset_lock_all()
90 drm_modeset_acquire_init(ctx, 0); in drm_modeset_lock_all()
93 ret = drm_modeset_lock_all_ctx(dev, ctx); in drm_modeset_lock_all()
96 drm_modeset_backoff(ctx); in drm_modeset_lock_all()
100 drm_modeset_acquire_fini(ctx); in drm_modeset_lock_all()
101 kfree(ctx); in drm_modeset_lock_all()
111 config->acquire_ctx = ctx; in drm_modeset_lock_all()
134 struct drm_modeset_acquire_ctx *ctx = config->acquire_ctx; in drm_modeset_unlock_all() local
[all …]
/drivers/net/usb/
Dcdc_ncm.c65 static void cdc_ncm_tx_timeout_start(struct cdc_ncm_ctx *ctx);
107 struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0]; in cdc_ncm_get_ethtool_stats() local
112 p = (char *)ctx + cdc_ncm_gstrings_stats[i].stat_offset; in cdc_ncm_get_ethtool_stats()
149 struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0]; in cdc_ncm_check_rx_max() local
154 max = min_t(u32, CDC_NCM_NTB_MAX_SIZE_RX, le32_to_cpu(ctx->ncm_parm.dwNtbInMaxSize)); in cdc_ncm_check_rx_max()
159 le32_to_cpu(ctx->ncm_parm.dwNtbInMaxSize), min); in cdc_ncm_check_rx_max()
172 struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0]; in cdc_ncm_check_tx_max() local
176 min = ctx->max_datagram_size + ctx->max_ndp_size + sizeof(struct usb_cdc_ncm_nth16); in cdc_ncm_check_tx_max()
177 max = min_t(u32, CDC_NCM_NTB_MAX_SIZE_TX, le32_to_cpu(ctx->ncm_parm.dwNtbOutMaxSize)); in cdc_ncm_check_tx_max()
192 struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0]; in cdc_ncm_show_min_tx_pkt() local
[all …]
/drivers/net/wireless/intel/iwlwifi/dvm/
Drxon.c39 struct iwl_rxon_context *ctx) in iwl_connection_init_rx_config() argument
41 memset(&ctx->staging, 0, sizeof(ctx->staging)); in iwl_connection_init_rx_config()
43 if (!ctx->vif) { in iwl_connection_init_rx_config()
44 ctx->staging.dev_type = ctx->unused_devtype; in iwl_connection_init_rx_config()
46 switch (ctx->vif->type) { in iwl_connection_init_rx_config()
48 ctx->staging.dev_type = ctx->ap_devtype; in iwl_connection_init_rx_config()
52 ctx->staging.dev_type = ctx->station_devtype; in iwl_connection_init_rx_config()
53 ctx->staging.filter_flags = RXON_FILTER_ACCEPT_GRP_MSK; in iwl_connection_init_rx_config()
57 ctx->staging.dev_type = ctx->ibss_devtype; in iwl_connection_init_rx_config()
58 ctx->staging.flags = RXON_FLG_SHORT_PREAMBLE_MSK; in iwl_connection_init_rx_config()
[all …]
/drivers/hwmon/
Dpwm-fan.c41 static int __set_pwm(struct pwm_fan_ctx *ctx, unsigned long pwm) in __set_pwm() argument
47 pwm_get_args(ctx->pwm, &pargs); in __set_pwm()
49 mutex_lock(&ctx->lock); in __set_pwm()
50 if (ctx->pwm_value == pwm) in __set_pwm()
54 ret = pwm_config(ctx->pwm, duty, pargs.period); in __set_pwm()
59 pwm_disable(ctx->pwm); in __set_pwm()
61 if (ctx->pwm_value == 0) { in __set_pwm()
62 ret = pwm_enable(ctx->pwm); in __set_pwm()
67 ctx->pwm_value = pwm; in __set_pwm()
69 mutex_unlock(&ctx->lock); in __set_pwm()
[all …]
Dxgene-hwmon.c138 static int xgene_hwmon_pcc_rd(struct xgene_hwmon_dev *ctx, u32 *msg) in xgene_hwmon_pcc_rd() argument
140 struct acpi_pcct_shared_memory *generic_comm_base = ctx->pcc_comm_addr; in xgene_hwmon_pcc_rd()
145 mutex_lock(&ctx->rd_mutex); in xgene_hwmon_pcc_rd()
146 init_completion(&ctx->rd_complete); in xgene_hwmon_pcc_rd()
147 ctx->resp_pending = true; in xgene_hwmon_pcc_rd()
151 cpu_to_le32(PCC_SIGNATURE_MASK | ctx->mbox_idx)); in xgene_hwmon_pcc_rd()
167 rc = mbox_send_message(ctx->mbox_chan, msg); in xgene_hwmon_pcc_rd()
169 dev_err(ctx->dev, "Mailbox send error %d\n", rc); in xgene_hwmon_pcc_rd()
172 if (!wait_for_completion_timeout(&ctx->rd_complete, in xgene_hwmon_pcc_rd()
173 usecs_to_jiffies(ctx->usecs_lat))) { in xgene_hwmon_pcc_rd()
[all …]
/drivers/gpu/drm/radeon/
Datom.c56 struct atom_context *ctx; member
66 static int atom_execute_table_locked(struct atom_context *ctx, int index, uint32_t * params);
67 int atom_execute_table(struct atom_context *ctx, int index, uint32_t * params);
103 static uint32_t atom_iio_execute(struct atom_context *ctx, int base, in atom_iio_execute() argument
106 struct radeon_device *rdev = ctx->card->dev->dev_private; in atom_iio_execute()
115 temp = ctx->card->ioreg_read(ctx->card, CU16(base + 1)); in atom_iio_execute()
120 (void)ctx->card->ioreg_read(ctx->card, CU16(base + 1)); in atom_iio_execute()
121 ctx->card->ioreg_write(ctx->card, CU16(base + 1), temp); in atom_iio_execute()
161 ((ctx-> in atom_iio_execute()
178 static uint32_t atom_get_src_int(atom_exec_context *ctx, uint8_t attr, in atom_get_src_int() argument
[all …]

12345678910>>...22