/drivers/irqchip/ |
D | irq-crossbar.c | 47 static struct crossbar_device *cb; variable 51 writel(cb_no, cb->crossbar_base + cb->register_offsets[irq_no]); in crossbar_writel() 56 writew(cb_no, cb->crossbar_base + cb->register_offsets[irq_no]); in crossbar_writew() 61 writeb(cb_no, cb->crossbar_base + cb->register_offsets[irq_no]); in crossbar_writeb() 88 raw_spin_lock(&cb->lock); in allocate_gic_irq() 89 for (i = cb->int_max - 1; i >= 0; i--) { in allocate_gic_irq() 90 if (cb->irq_map[i] == IRQ_FREE) { in allocate_gic_irq() 91 cb->irq_map[i] = hwirq; in allocate_gic_irq() 95 raw_spin_unlock(&cb->lock); in allocate_gic_irq() 108 cb->irq_map[i] = IRQ_FREE; in allocate_gic_irq() [all …]
|
/drivers/net/phy/ |
D | mdio-mux.c | 44 struct mdio_mux_child_bus *cb = bus->priv; in mdio_mux_read() local 45 struct mdio_mux_parent_bus *pb = cb->parent; in mdio_mux_read() 49 r = pb->switch_fn(pb->current_child, cb->bus_number, pb->switch_data); in mdio_mux_read() 53 pb->current_child = cb->bus_number; in mdio_mux_read() 68 struct mdio_mux_child_bus *cb = bus->priv; in mdio_mux_write() local 69 struct mdio_mux_parent_bus *pb = cb->parent; in mdio_mux_write() 74 r = pb->switch_fn(pb->current_child, cb->bus_number, pb->switch_data); in mdio_mux_write() 78 pb->current_child = cb->bus_number; in mdio_mux_write() 100 struct mdio_mux_child_bus *cb; in mdio_mux_init() local 142 cb = devm_kzalloc(dev, sizeof(*cb), GFP_KERNEL); in mdio_mux_init() [all …]
|
/drivers/mfd/ |
D | lm3533-ctrlbank.c | 33 static inline u8 lm3533_ctrlbank_get_reg(struct lm3533_ctrlbank *cb, u8 base) in lm3533_ctrlbank_get_reg() argument 35 return base + cb->id; in lm3533_ctrlbank_get_reg() 38 int lm3533_ctrlbank_enable(struct lm3533_ctrlbank *cb) in lm3533_ctrlbank_enable() argument 43 dev_dbg(cb->dev, "%s - %d\n", __func__, cb->id); in lm3533_ctrlbank_enable() 45 mask = 1 << cb->id; in lm3533_ctrlbank_enable() 46 ret = lm3533_update(cb->lm3533, LM3533_REG_CTRLBANK_ENABLE, in lm3533_ctrlbank_enable() 49 dev_err(cb->dev, "failed to enable ctrlbank %d\n", cb->id); in lm3533_ctrlbank_enable() 55 int lm3533_ctrlbank_disable(struct lm3533_ctrlbank *cb) in lm3533_ctrlbank_disable() argument 60 dev_dbg(cb->dev, "%s - %d\n", __func__, cb->id); in lm3533_ctrlbank_disable() 62 mask = 1 << cb->id; in lm3533_ctrlbank_disable() [all …]
|
/drivers/misc/mei/ |
D | amthif.c | 104 struct mei_cl_cb *cb; in mei_amthif_read_start() local 106 cb = mei_cl_enqueue_ctrl_wr_cb(cl, mei_cl_mtu(cl), MEI_FOP_READ, fp); in mei_amthif_read_start() 107 if (!cb) in mei_amthif_read_start() 113 cl->fp = cb->fp; in mei_amthif_read_start() 128 struct mei_cl_cb *cb; in mei_amthif_run_next_cmd() local 135 cb = list_first_entry_or_null(&dev->amthif_cmd_list.list, in mei_amthif_run_next_cmd() 136 typeof(*cb), list); in mei_amthif_run_next_cmd() 137 if (!cb) { in mei_amthif_run_next_cmd() 143 list_del_init(&cb->list); in mei_amthif_run_next_cmd() 145 cl->fp = cb->fp; in mei_amthif_run_next_cmd() [all …]
|
D | interrupt.c | 42 struct mei_cl_cb *cb, *next; in mei_irq_compl_handler() local 45 list_for_each_entry_safe(cb, next, &compl_list->list, list) { in mei_irq_compl_handler() 46 cl = cb->cl; in mei_irq_compl_handler() 47 list_del_init(&cb->list); in mei_irq_compl_handler() 51 mei_amthif_complete(cl, cb); in mei_irq_compl_handler() 53 mei_cl_complete(cl, cb); in mei_irq_compl_handler() 104 struct mei_cl_cb *cb; in mei_cl_irq_read_msg() local 107 cb = list_first_entry_or_null(&cl->rd_pending, struct mei_cl_cb, list); in mei_cl_irq_read_msg() 108 if (!cb) { in mei_cl_irq_read_msg() 113 cb = mei_cl_alloc_cb(cl, mei_cl_mtu(cl), MEI_FOP_READ, cl->fp); in mei_cl_irq_read_msg() [all …]
|
D | client.c | 342 void mei_io_cb_free(struct mei_cl_cb *cb) in mei_io_cb_free() argument 344 if (cb == NULL) in mei_io_cb_free() 347 list_del(&cb->list); in mei_io_cb_free() 348 kfree(cb->buf.data); in mei_io_cb_free() 349 kfree(cb); in mei_io_cb_free() 365 struct mei_cl_cb *cb; in mei_io_cb_init() local 367 cb = kzalloc(sizeof(struct mei_cl_cb), GFP_KERNEL); in mei_io_cb_init() 368 if (!cb) in mei_io_cb_init() 371 INIT_LIST_HEAD(&cb->list); in mei_io_cb_init() 372 cb->fp = fp; in mei_io_cb_init() [all …]
|
/drivers/misc/sgi-gru/ |
D | gru_instructions.h | 22 extern int gru_check_status_proc(void *cb); 23 extern int gru_wait_proc(void *cb); 24 extern void gru_wait_abort_proc(void *cb); 79 unsigned long cb; member 359 static inline void gru_vload_phys(void *cb, unsigned long gpa, in gru_vload_phys() argument 362 struct gru_instruction *ins = (struct gru_instruction *)cb; in gru_vload_phys() 371 static inline void gru_vstore_phys(void *cb, unsigned long gpa, in gru_vstore_phys() argument 374 struct gru_instruction *ins = (struct gru_instruction *)cb; in gru_vstore_phys() 383 static inline void gru_vload(void *cb, unsigned long mem_addr, in gru_vload() argument 387 struct gru_instruction *ins = (struct gru_instruction *)cb; in gru_vload() [all …]
|
D | grukservices.c | 267 static int gru_get_cpu_resources(int dsr_bytes, void **cb, void **dsr) in gru_get_cpu_resources() argument 276 *cb = bs->kernel_cb + lcpu * GRU_HANDLE_STRIDE; in gru_get_cpu_resources() 284 static void gru_free_cpu_resources(void *cb, void *dsr) in gru_free_cpu_resources() argument 371 void gru_lock_async_resource(unsigned long han, void **cb, void **dsr) in gru_lock_async_resource() argument 379 if (cb) in gru_lock_async_resource() 380 *cb = bs->kernel_cb + ncpus * GRU_HANDLE_STRIDE; in gru_lock_async_resource() 399 int gru_get_cb_exception_detail(void *cb, in gru_get_cb_exception_detail() argument 418 off = cb - kgts->ts_gru->gs_gru_base_vaddr; in gru_get_cb_exception_detail() 424 cbrnum = thread_cbr_number(kgts, get_cb_number(cb)); in gru_get_cb_exception_detail() 425 cbe = get_cbe(GRUBASE(cb), cbrnum); in gru_get_cb_exception_detail() [all …]
|
/drivers/gpu/drm/i915/ |
D | i915_sw_fence.c | 238 struct dma_fence_cb *cb = (struct dma_fence_cb *)data; in timer_i915_sw_fence_wake() local 241 cb->dma->ops->get_driver_name(cb->dma), in timer_i915_sw_fence_wake() 242 cb->dma->ops->get_timeline_name(cb->dma), in timer_i915_sw_fence_wake() 243 cb->dma->seqno); in timer_i915_sw_fence_wake() 244 fence_put(cb->dma); in timer_i915_sw_fence_wake() 245 cb->dma = NULL; in timer_i915_sw_fence_wake() 247 i915_sw_fence_commit(cb->fence); in timer_i915_sw_fence_wake() 248 cb->timer.function = NULL; in timer_i915_sw_fence_wake() 253 struct dma_fence_cb *cb = container_of(data, typeof(*cb), base); in dma_i915_sw_fence_wake() local 255 del_timer_sync(&cb->timer); in dma_i915_sw_fence_wake() [all …]
|
/drivers/dma/ |
D | dmaengine.h | 106 struct dmaengine_desc_callback *cb) in dmaengine_desc_get_callback() argument 108 cb->callback = tx->callback; in dmaengine_desc_get_callback() 109 cb->callback_result = tx->callback_result; in dmaengine_desc_get_callback() 110 cb->callback_param = tx->callback_param; in dmaengine_desc_get_callback() 123 dmaengine_desc_callback_invoke(struct dmaengine_desc_callback *cb, in dmaengine_desc_callback_invoke() argument 131 if (cb->callback_result) { in dmaengine_desc_callback_invoke() 134 cb->callback_result(cb->callback_param, result); in dmaengine_desc_callback_invoke() 135 } else if (cb->callback) { in dmaengine_desc_callback_invoke() 136 cb->callback(cb->callback_param); in dmaengine_desc_callback_invoke() 154 struct dmaengine_desc_callback cb; in dmaengine_desc_get_callback_invoke() local [all …]
|
/drivers/net/wireless/marvell/mwifiex/ |
D | util.h | 55 struct mwifiex_cb *cb = (struct mwifiex_cb *)skb->cb; in MWIFIEX_SKB_RXCB() local 57 BUILD_BUG_ON(sizeof(struct mwifiex_cb) > sizeof(skb->cb)); in MWIFIEX_SKB_RXCB() 58 return &cb->rx_info; in MWIFIEX_SKB_RXCB() 63 struct mwifiex_cb *cb = (struct mwifiex_cb *)skb->cb; in MWIFIEX_SKB_TXCB() local 65 return &cb->tx_info; in MWIFIEX_SKB_TXCB() 71 struct mwifiex_cb *cb = (struct mwifiex_cb *)skb->cb; in mwifiex_store_mapping() local 73 memcpy(&cb->dma_mapping, mapping, sizeof(*mapping)); in mwifiex_store_mapping() 79 struct mwifiex_cb *cb = (struct mwifiex_cb *)skb->cb; in mwifiex_get_mapping() local 81 memcpy(mapping, &cb->dma_mapping, sizeof(*mapping)); in mwifiex_get_mapping()
|
/drivers/s390/net/ |
D | smsgiucv.c | 71 struct smsg_callback *cb; in smsg_message_pending() local 94 list_for_each_entry(cb, &smsg_list, list) in smsg_message_pending() 95 if (strncmp(buffer + 8, cb->prefix, cb->len) == 0) { in smsg_message_pending() 96 cb->callback(sender, buffer + 8); in smsg_message_pending() 107 struct smsg_callback *cb; in smsg_register_callback() local 109 cb = kmalloc(sizeof(struct smsg_callback), GFP_KERNEL); in smsg_register_callback() 110 if (!cb) in smsg_register_callback() 112 cb->prefix = prefix; in smsg_register_callback() 113 cb->len = strlen(prefix); in smsg_register_callback() 114 cb->callback = callback; in smsg_register_callback() [all …]
|
/drivers/isdn/gigaset/ |
D | ser-gigaset.c | 107 struct cmdbuf_t *cb, *tcb; in send_cb() local 113 cb = cs->cmdbuf; in send_cb() 114 if (!cb) in send_cb() 117 if (cb->len) { in send_cb() 119 sent = tty->ops->write(tty, cb->buf + cb->offset, cb->len); in send_cb() 126 cb->offset += sent; in send_cb() 127 cb->len -= sent; in send_cb() 129 sent, cb->len, cs->cmdbytes); in send_cb() 132 while (cb && !cb->len) { in send_cb() 135 tcb = cb; in send_cb() [all …]
|
/drivers/dma-buf/ |
D | fence.c | 241 int fence_add_callback(struct fence *fence, struct fence_cb *cb, in fence_add_callback() argument 252 INIT_LIST_HEAD(&cb->node); in fence_add_callback() 272 cb->func = func; in fence_add_callback() 273 list_add_tail(&cb->node, &fence->cb_list); in fence_add_callback() 275 INIT_LIST_HEAD(&cb->node); in fence_add_callback() 323 fence_remove_callback(struct fence *fence, struct fence_cb *cb) in fence_remove_callback() argument 330 ret = !list_empty(&cb->node); in fence_remove_callback() 332 list_del_init(&cb->node); in fence_remove_callback() 350 fence_default_wait_cb(struct fence *fence, struct fence_cb *cb) in fence_default_wait_cb() argument 353 container_of(cb, struct default_wait_cb, base); in fence_default_wait_cb() [all …]
|
D | fence-array.c | 24 static void fence_array_cb_func(struct fence *f, struct fence_cb *cb); 36 static void fence_array_cb_func(struct fence *f, struct fence_cb *cb) in fence_array_cb_func() argument 39 container_of(cb, struct fence_array_cb, cb); in fence_array_cb_func() 50 struct fence_array_cb *cb = (void *)(&array[1]); in fence_array_enable_signaling() local 54 cb[i].array = array; in fence_array_enable_signaling() 64 if (fence_add_callback(array->fences[i], &cb[i].cb, in fence_array_enable_signaling()
|
/drivers/infiniband/hw/nes/ |
D | nes_mgt.c | 54 struct nes_rskb_cb *cb; in nes_replenish_mgt_rq() local 81 cb = (struct nes_rskb_cb *)&skb->cb[0]; in nes_replenish_mgt_rq() 82 cb->busaddr = bus_address; in nes_replenish_mgt_rq() 83 cb->maplen = mgtvnic->nesvnic->max_frame_size; in nes_replenish_mgt_rq() 139 struct nes_rskb_cb *cb; in nes_mgt_free_skb() local 141 cb = (struct nes_rskb_cb *)&skb->cb[0]; in nes_mgt_free_skb() 142 pci_unmap_single(nesdev->pcidev, cb->busaddr, cb->maplen, dir); in nes_mgt_free_skb() 143 cb->busaddr = 0; in nes_mgt_free_skb() 176 struct nes_rskb_cb *cb = (struct nes_rskb_cb *)&skb->cb[0]; in nes_get_seq() local 177 struct iphdr *iph = (struct iphdr *)(cb->data_start + ETH_HLEN); in nes_get_seq() [all …]
|
/drivers/net/ethernet/hisilicon/hns/ |
D | hnae.c | 40 static int hnae_alloc_buffer(struct hnae_ring *ring, struct hnae_desc_cb *cb) in hnae_alloc_buffer() argument 48 cb->priv = p; in hnae_alloc_buffer() 49 cb->page_offset = 0; in hnae_alloc_buffer() 50 cb->reuse_flag = 0; in hnae_alloc_buffer() 51 cb->buf = page_address(p); in hnae_alloc_buffer() 52 cb->length = hnae_page_size(ring); in hnae_alloc_buffer() 53 cb->type = DESC_TYPE_PAGE; in hnae_alloc_buffer() 58 static void hnae_free_buffer(struct hnae_ring *ring, struct hnae_desc_cb *cb) in hnae_free_buffer() argument 60 if (cb->type == DESC_TYPE_SKB) in hnae_free_buffer() 61 dev_kfree_skb_any((struct sk_buff *)cb->priv); in hnae_free_buffer() [all …]
|
/drivers/media/tuners/ |
D | tuner-simple.c | 260 unsigned *frequency, u8 *config, u8 *cb) in simple_config_lookup() argument 276 *cb = t_params->ranges[i].cb; in simple_config_lookup() 281 i, *config, *cb); in simple_config_lookup() 289 u8 *config, u8 *cb, unsigned int rf) in simple_set_rf_input() argument 297 *cb |= 0x08; in simple_set_rf_input() 300 *cb &= ~0x08; in simple_set_rf_input() 307 *cb |= 0x01; in simple_set_rf_input() 310 *cb &= ~0x01; in simple_set_rf_input() 321 u8 *config, u8 *cb) in simple_std_setup() argument 332 *cb &= ~0x03; in simple_std_setup() [all …]
|
/drivers/misc/ |
D | enclosure.c | 125 struct enclosure_component_callbacks *cb) in enclosure_register() argument 133 BUG_ON(!cb); in enclosure_register() 142 edev->cb = cb; in enclosure_register() 187 edev->cb = &enclosure_null_callbacks; in enclosure_unregister() 451 if (edev->cb->show_id) in id_show() 452 return edev->cb->show_id(edev, buf); in id_show() 494 if (edev->cb->get_fault) in get_component_fault() 495 edev->cb->get_fault(edev, ecomp); in get_component_fault() 507 if (edev->cb->set_fault) in set_component_fault() 508 edev->cb->set_fault(edev, ecomp, val); in set_component_fault() [all …]
|
/drivers/mcb/ |
D | mcb-parse.c | 29 struct chameleon_bar *cb, in chameleon_parse_bdd() argument 36 struct chameleon_bar *cb, in chameleon_parse_gdd() argument 76 dev_mapbase = cb[mdev->bar].addr; in chameleon_parse_gdd() 116 struct chameleon_bar *cb, int bar_count) in chameleon_parse_bar() argument 125 cb[i].addr = readl(p); in chameleon_parse_bar() 126 cb[i].size = readl(p + 4); in chameleon_parse_bar() 133 struct chameleon_bar **cb) in chameleon_get_bar() argument 171 *cb = c; in chameleon_get_bar() 180 struct chameleon_bar *cb; in chameleon_parse_cells() local 212 bar_count = chameleon_get_bar(&p, mapbase, &cb); in chameleon_parse_cells() [all …]
|
/drivers/media/mmc/siano/ |
D | smssdio.c | 133 struct smscore_buffer_t *cb; in smssdio_interrupt() local 150 cb = smscore_getbuffer(smsdev->coredev); in smssdio_interrupt() 151 if (!cb) { in smssdio_interrupt() 157 cb->p, in smssdio_interrupt() 165 hdr = cb->p; in smssdio_interrupt() 168 smsdev->split_cb = cb; in smssdio_interrupt() 177 cb = smsdev->split_cb; in smssdio_interrupt() 178 hdr = cb->p; in smssdio_interrupt() 188 buffer = cb->p + (hdr->msg_length - size); in smssdio_interrupt() 201 smscore_putbuffer(smsdev->coredev, cb); in smssdio_interrupt() [all …]
|
/drivers/infiniband/core/ |
D | iwpm_msg.c | 348 int iwpm_register_pid_cb(struct sk_buff *skb, struct netlink_callback *cb) in iwpm_register_pid_cb() argument 359 if (iwpm_parse_nlmsg(cb, IWPM_NLA_RREG_PID_MAX, in iwpm_register_pid_cb() 386 iwpm_user_pid = cb->nlh->nlmsg_pid; in iwpm_register_pid_cb() 387 atomic_set(&echo_nlmsg_seq, cb->nlh->nlmsg_seq); in iwpm_register_pid_cb() 414 int iwpm_add_mapping_cb(struct sk_buff *skb, struct netlink_callback *cb) in iwpm_add_mapping_cb() argument 425 if (iwpm_parse_nlmsg(cb, IWPM_NLA_RMANAGE_MAPPING_MAX, in iwpm_add_mapping_cb() 429 atomic_set(&echo_nlmsg_seq, cb->nlh->nlmsg_seq); in iwpm_add_mapping_cb() 487 struct netlink_callback *cb) in iwpm_add_and_query_mapping_cb() argument 499 if (iwpm_parse_nlmsg(cb, IWPM_NLA_RQUERY_MAPPING_MAX, in iwpm_add_and_query_mapping_cb() 502 atomic_set(&echo_nlmsg_seq, cb->nlh->nlmsg_seq); in iwpm_add_and_query_mapping_cb() [all …]
|
/drivers/gpu/drm/amd/scheduler/ |
D | gpu_scheduler.c | 35 static void amd_sched_process_job(struct fence *f, struct fence_cb *cb); 221 static void amd_sched_entity_wakeup(struct fence *f, struct fence_cb *cb) in amd_sched_entity_wakeup() argument 224 container_of(cb, struct amd_sched_entity, cb); in amd_sched_entity_wakeup() 230 static void amd_sched_entity_clear_dep(struct fence *f, struct fence_cb *cb) in amd_sched_entity_clear_dep() argument 233 container_of(cb, struct amd_sched_entity, cb); in amd_sched_entity_clear_dep() 260 if (!fence_add_callback(fence, &entity->cb, in amd_sched_entity_add_dependency_cb() 269 if (!fence_add_callback(entity->dependency, &entity->cb, in amd_sched_entity_add_dependency_cb() 354 static void amd_sched_job_finish_cb(struct fence *f, struct fence_cb *cb) in amd_sched_job_finish_cb() argument 356 struct amd_sched_job *job = container_of(cb, struct amd_sched_job, in amd_sched_job_finish_cb() 388 if (fence_remove_callback(s_job->s_fence->parent, &s_job->s_fence->cb)) { in amd_sched_hw_job_reset() [all …]
|
/drivers/video/backlight/ |
D | lm3533_bl.c | 32 struct lm3533_ctrlbank cb; member 53 return lm3533_ctrlbank_set_brightness(&bl->cb, (u8)brightness); in lm3533_bl_update_status() 62 ret = lm3533_ctrlbank_get_brightness(&bl->cb, &val); in lm3533_bl_get_brightness() 199 ret = lm3533_ctrlbank_get_pwm(&bl->cb, &val); in show_pwm() 217 ret = lm3533_ctrlbank_set_pwm(&bl->cb, val); in store_pwm() 265 ret = lm3533_ctrlbank_set_max_current(&bl->cb, pdata->max_current); in lm3533_bl_setup() 269 return lm3533_ctrlbank_set_pwm(&bl->cb, pdata->pwm); in lm3533_bl_setup() 305 bl->cb.lm3533 = lm3533; in lm3533_bl_probe() 306 bl->cb.id = lm3533_bl_get_ctrlbank_id(bl); in lm3533_bl_probe() 307 bl->cb.dev = NULL; /* until registered */ in lm3533_bl_probe() [all …]
|
/drivers/net/ethernet/intel/ |
D | e100.c | 502 struct cb { struct 524 struct cb *next, *prev; argument 584 struct cb *cbs; 585 struct cb *cb_to_use; 586 struct cb *cb_to_send; 587 struct cb *cb_to_clean; 873 int (*cb_prepare)(struct nic *, struct cb *, struct sk_buff *)) in e100_exec_cb() argument 875 struct cb *cb; in e100_exec_cb() local 886 cb = nic->cb_to_use; in e100_exec_cb() 887 nic->cb_to_use = cb->next; in e100_exec_cb() [all …]
|