Home
last modified time | relevance | path

Searched refs:irqs (Results 1 – 25 of 163) sorted by relevance

1234567

/drivers/vfio/platform/
Dvfio_platform_irq.c57 if (!(vdev->irqs[index].flags & VFIO_IRQ_INFO_MASKABLE)) in vfio_platform_set_irq_mask()
64 return vfio_virqfd_enable((void *) &vdev->irqs[index], in vfio_platform_set_irq_mask()
67 &vdev->irqs[index].mask, fd); in vfio_platform_set_irq_mask()
69 vfio_virqfd_disable(&vdev->irqs[index].mask); in vfio_platform_set_irq_mask()
74 vfio_platform_mask(&vdev->irqs[index]); in vfio_platform_set_irq_mask()
80 vfio_platform_mask(&vdev->irqs[index]); in vfio_platform_set_irq_mask()
117 if (!(vdev->irqs[index].flags & VFIO_IRQ_INFO_MASKABLE)) in vfio_platform_set_irq_unmask()
124 return vfio_virqfd_enable((void *) &vdev->irqs[index], in vfio_platform_set_irq_unmask()
127 &vdev->irqs[index].unmask, in vfio_platform_set_irq_unmask()
130 vfio_virqfd_disable(&vdev->irqs[index].unmask); in vfio_platform_set_irq_unmask()
[all …]
/drivers/staging/fsl-mc/bus/
Dfsl-mc-allocator.c437 struct fsl_mc_device_irq **irqs = NULL; in fsl_mc_allocate_irqs() local
441 if (WARN_ON(mc_dev->irqs)) in fsl_mc_allocate_irqs()
463 irqs = devm_kzalloc(&mc_dev->dev, irq_count * sizeof(irqs[0]), in fsl_mc_allocate_irqs()
465 if (!irqs) in fsl_mc_allocate_irqs()
476 irqs[i] = to_fsl_mc_irq(resource); in fsl_mc_allocate_irqs()
479 WARN_ON(irqs[i]->mc_dev); in fsl_mc_allocate_irqs()
480 irqs[i]->mc_dev = mc_dev; in fsl_mc_allocate_irqs()
481 irqs[i]->dev_irq_index = i; in fsl_mc_allocate_irqs()
484 mc_dev->irqs = irqs; in fsl_mc_allocate_irqs()
489 irqs[i]->mc_dev = NULL; in fsl_mc_allocate_irqs()
[all …]
/drivers/misc/cxl/
Dirq.c144 irq_off = hwirq - ctx->irqs.offset[r]; in cxl_irq_afu()
145 range = ctx->irqs.range[r]; in cxl_irq_afu()
263 if ((rc = cxl_ops->alloc_irq_ranges(&ctx->irqs, ctx->afu->adapter, in afu_allocate_irqs()
269 ctx->irqs.offset[0] = ctx->afu->native->psl_hwirq; in afu_allocate_irqs()
270 ctx->irqs.range[0] = 1; in afu_allocate_irqs()
284 for (i = 0; i < ctx->irqs.range[r]; i++) { in afu_allocate_irqs()
304 cxl_ops->release_irq_ranges(&ctx->irqs, ctx->afu->adapter); in afu_allocate_irqs()
319 hwirq = ctx->irqs.offset[r]; in afu_register_hwirqs()
320 for (i = 0; i < ctx->irqs.range[r]; hwirq++, i++) { in afu_register_hwirqs()
361 hwirq = ctx->irqs.offset[r]; in afu_release_irqs()
[all …]
Dguest.c307 static int guest_alloc_irq_ranges(struct cxl_irq_ranges *irqs, in guest_alloc_irq_ranges() argument
312 memset(irqs, 0, sizeof(struct cxl_irq_ranges)); in guest_alloc_irq_ranges()
324 irqs->offset[i] = irq; in guest_alloc_irq_ranges()
325 irqs->range[i] = try; in guest_alloc_irq_ranges()
335 irq_free_range(adapter, irqs->offset[i], irqs->range[i]); in guest_alloc_irq_ranges()
340 static void guest_release_irq_ranges(struct cxl_irq_ranges *irqs, in guest_release_irq_ranges() argument
347 irq_free_range(adapter, irqs->offset[i], irqs->range[i]); in guest_release_irq_ranges()
389 hwirq = ctx->irqs.offset[r]; in disable_afu_irqs()
390 for (i = 0; i < ctx->irqs.range[r]; hwirq++, i++) { in disable_afu_irqs()
405 hwirq = ctx->irqs.offset[r]; in enable_afu_irqs()
[all …]
Dapi.c92 range = ctx->irqs.range[r]; in cxl_find_afu_irq()
94 return ctx->irqs.offset[r] + num; in cxl_find_afu_irq()
181 cxl_ops->release_irq_ranges(&ctx->irqs, ctx->afu->adapter); in cxl_free_afu_irqs()
482 int cxl_set_max_irqs_per_process(struct pci_dev *dev, int irqs) in cxl_set_max_irqs_per_process() argument
488 if (irqs > afu->adapter->user_irqs) in cxl_set_max_irqs_per_process()
492 afu->adapter->user_irqs = irqs; in cxl_set_max_irqs_per_process()
493 afu->irqs_max = irqs; in cxl_set_max_irqs_per_process()
Dnative.c563 ctx->elem->ivte_offsets[r] = cpu_to_be16(ctx->irqs.offset[r]); in update_ivtes_directed()
564 ctx->elem->ivte_ranges[r] = cpu_to_be16(ctx->irqs.range[r]); in update_ivtes_directed()
614 if (ctx->irqs.range[0] == 0) { in attach_afu_directed()
615 ctx->irqs.offset[0] = ctx->afu->native->psl_hwirq; in attach_afu_directed()
616 ctx->irqs.range[0] = 1; in attach_afu_directed()
702 (((u64)ctx->irqs.offset[0] & 0xffff) << 48) | in update_ivtes_dedicated()
703 (((u64)ctx->irqs.offset[1] & 0xffff) << 32) | in update_ivtes_dedicated()
704 (((u64)ctx->irqs.offset[2] & 0xffff) << 16) | in update_ivtes_dedicated()
705 ((u64)ctx->irqs.offset[3] & 0xffff)); in update_ivtes_dedicated()
707 (((u64)ctx->irqs.range[0] & 0xffff) << 48) | in update_ivtes_dedicated()
[all …]
/drivers/irqchip/
Dirq-nvic.c90 unsigned int irqs, i, ret, numbanks; in nvic_of_init() local
102 irqs = numbanks * 32; in nvic_of_init()
103 if (irqs > NVIC_MAX_IRQ) in nvic_of_init()
104 irqs = NVIC_MAX_IRQ; in nvic_of_init()
107 irq_domain_add_linear(node, irqs, &nvic_irq_domain_ops, NULL); in nvic_of_init()
142 for (i = 0; i < irqs; i += 4) in nvic_of_init()
/drivers/pci/hotplug/
Dcpqphp_ctrl.c1542 res_lists.irqs = NULL; in board_added()
2389 struct irq_mapping irqs; in configure_new_function() local
2469 if (!resources->irqs) { in configure_new_function()
2470 irqs.barber_pole = 0; in configure_new_function()
2471 irqs.interrupt[0] = 0; in configure_new_function()
2472 irqs.interrupt[1] = 0; in configure_new_function()
2473 irqs.interrupt[2] = 0; in configure_new_function()
2474 irqs.interrupt[3] = 0; in configure_new_function()
2475 irqs.valid_INT = 0; in configure_new_function()
2477 irqs.barber_pole = resources->irqs->barber_pole; in configure_new_function()
[all …]
/drivers/pci/host/
Dvmd.c76 struct vmd_irq_list *irqs; member
95 struct vmd_irq_list *irqs) in index_from_irqs() argument
97 return irqs - vmd->irqs; in index_from_irqs()
186 return &vmd->irqs[0]; in vmd_next_irq()
190 if (vmd->irqs[i].count < vmd->irqs[best].count) in vmd_next_irq()
192 vmd->irqs[best].count++; in vmd_next_irq()
195 return &vmd->irqs[best]; in vmd_next_irq()
647 struct vmd_irq_list *irqs = data; in vmd_irq() local
651 list_for_each_entry_rcu(vmdirq, &irqs->irq_list, node) in vmd_irq()
693 vmd->irqs = devm_kcalloc(&dev->dev, vmd->msix_count, sizeof(*vmd->irqs), in vmd_probe()
[all …]
/drivers/net/wan/
Dz85230.c713 struct z8530_irqhandler *irqs; in z8530_interrupt() local
737 irqs=dev->chanA.irqs; in z8530_interrupt()
742 irqs->rx(&dev->chanA); in z8530_interrupt()
744 irqs->tx(&dev->chanA); in z8530_interrupt()
746 irqs->status(&dev->chanA); in z8530_interrupt()
749 irqs=dev->chanB.irqs; in z8530_interrupt()
754 irqs->rx(&dev->chanB); in z8530_interrupt()
756 irqs->tx(&dev->chanB); in z8530_interrupt()
758 irqs->status(&dev->chanB); in z8530_interrupt()
801 c->irqs = &z8530_sync; in z8530_sync_open()
[all …]
/drivers/perf/
Darm_pmu.c625 int i, irq, irqs; in cpu_pmu_free_irq() local
629 irqs = min(pmu_device->num_resources, num_possible_cpus()); in cpu_pmu_free_irq()
637 for (i = 0; i < irqs; ++i) { in cpu_pmu_free_irq()
654 int i, err, irq, irqs; in cpu_pmu_request_irq() local
661 irqs = min(pmu_device->num_resources, num_possible_cpus()); in cpu_pmu_request_irq()
662 if (irqs < 1) { in cpu_pmu_request_irq()
680 for (i = 0; i < irqs; ++i) { in cpu_pmu_request_irq()
696 if (irq_set_affinity(irq, cpumask_of(cpu)) && irqs > 1) { in cpu_pmu_request_irq()
922 int *irqs, i = 0; in of_pmu_irq_cfg() local
926 irqs = kcalloc(pdev->num_resources, sizeof(*irqs), GFP_KERNEL); in of_pmu_irq_cfg()
[all …]
/drivers/ssb/
Ddriver_gpio.c128 unsigned long irqs = (val ^ pol) & mask; in ssb_gpio_irq_chipco_handler() local
131 if (!irqs) in ssb_gpio_irq_chipco_handler()
134 for_each_set_bit(gpio, &irqs, bus->gpio.ngpio) in ssb_gpio_irq_chipco_handler()
136 ssb_chipco_gpio_polarity(chipco, irqs, val & irqs); in ssb_gpio_irq_chipco_handler()
325 unsigned long irqs = (val ^ pol) & mask; in ssb_gpio_irq_extif_handler() local
328 if (!irqs) in ssb_gpio_irq_extif_handler()
331 for_each_set_bit(gpio, &irqs, bus->gpio.ngpio) in ssb_gpio_irq_extif_handler()
333 ssb_extif_gpio_polarity(extif, irqs, val & irqs); in ssb_gpio_irq_extif_handler()
/drivers/gpu/drm/i915/
Dintel_guc_loader.c103 int irqs; in guc_interrupts_release() local
106 irqs = _MASKED_FIELD(GFX_FORWARD_VBLANK_MASK, GFX_FORWARD_VBLANK_NEVER); in guc_interrupts_release()
107 irqs |= _MASKED_BIT_DISABLE(GFX_INTERRUPT_STEERING); in guc_interrupts_release()
109 I915_WRITE(RING_MODE_GEN7(engine), irqs); in guc_interrupts_release()
120 int irqs; in guc_interrupts_capture() local
124 irqs = _MASKED_BIT_ENABLE(GFX_INTERRUPT_STEERING); in guc_interrupts_capture()
126 I915_WRITE(RING_MODE_GEN7(engine), irqs); in guc_interrupts_capture()
129 irqs = GT_RENDER_USER_INTERRUPT << GEN8_RCS_IRQ_SHIFT | in guc_interrupts_capture()
132 I915_WRITE(GUC_BCS_RCS_IER, ~irqs); in guc_interrupts_capture()
133 I915_WRITE(GUC_VCS2_VCS1_IER, ~irqs); in guc_interrupts_capture()
[all …]
/drivers/input/serio/
Di8042-sparcio.h62 unsigned int irq = kbd->archdata.irqs[0]; in sparc_i8042_probe()
64 irq = op->archdata.irqs[0]; in sparc_i8042_probe()
72 unsigned int irq = ms->archdata.irqs[0]; in sparc_i8042_probe()
74 irq = op->archdata.irqs[0]; in sparc_i8042_probe()
/drivers/pci/pcie/
Dportdrv_core.c200 static int init_service_irqs(struct pci_dev *dev, int *irqs, int mask) in init_service_irqs() argument
216 if (!pcie_port_enable_msix(dev, irqs, mask)) in init_service_irqs()
229 irqs[i] = irq; in init_service_irqs()
230 irqs[PCIE_PORT_SERVICE_VC_SHIFT] = -1; in init_service_irqs()
361 int irqs[PCIE_PORT_DEVICE_MAXSERVICES]; in pcie_port_device_register() local
381 status = init_service_irqs(dev, irqs, capabilities); in pcie_port_device_register()
395 if (!pcie_device_init(dev, service, irqs[i])) in pcie_port_device_register()
/drivers/bcma/
Ddriver_gpio.c109 unsigned long irqs = (val ^ pol) & mask; in bcma_gpio_irq_handler() local
112 if (!irqs) in bcma_gpio_irq_handler()
115 for_each_set_bit(gpio, &irqs, gc->ngpio) in bcma_gpio_irq_handler()
117 bcma_chipco_gpio_polarity(cc, irqs, val & irqs); in bcma_gpio_irq_handler()
/drivers/fpga/
Dsocfpga.c264 static void socfpga_fpga_enable_irqs(struct socfpga_fpga_priv *priv, u32 irqs) in socfpga_fpga_enable_irqs() argument
270 socfpga_fpga_writel(priv, SOCFPGA_FPGMGR_GPIO_INT_POL_OFST, irqs); in socfpga_fpga_enable_irqs()
273 socfpga_fpga_writel(priv, SOCFPGA_FPGMGR_GPIO_PORTA_EOI_OFST, irqs); in socfpga_fpga_enable_irqs()
279 socfpga_fpga_writel(priv, SOCFPGA_FPGMGR_GPIO_INTEN_OFST, irqs); in socfpga_fpga_enable_irqs()
290 u32 irqs, st; in socfpga_fpga_isr() local
294 irqs = socfpga_fpga_raw_readl(priv, SOCFPGA_FPGMGR_GPIO_INTSTAT_OFST); in socfpga_fpga_isr()
296 socfpga_fpga_raw_writel(priv, SOCFPGA_FPGMGR_GPIO_PORTA_EOI_OFST, irqs); in socfpga_fpga_isr()
/drivers/base/regmap/
Dregmap-irq.c53 return &data->chip->irqs[irq]; in irq_to_regmap_irq()
361 if (data->status_buf[chip->irqs[i].reg_offset / in regmap_irq_thread()
362 map->reg_stride] & chip->irqs[i].mask) { in regmap_irq_thread()
429 if (chip->irqs[i].reg_offset % map->reg_stride) in regmap_add_irq_chip()
431 if (chip->irqs[i].reg_offset / map->reg_stride >= in regmap_add_irq_chip()
512 d->mask_buf_def[chip->irqs[i].reg_offset / map->reg_stride] in regmap_add_irq_chip()
513 |= chip->irqs[i].mask; in regmap_add_irq_chip()
594 reg = chip->irqs[i].type_reg_offset / map->reg_stride; in regmap_add_irq_chip()
595 d->type_buf_def[reg] |= chip->irqs[i].type_rising_mask | in regmap_add_irq_chip()
596 chip->irqs[i].type_falling_mask; in regmap_add_irq_chip()
[all …]
/drivers/mtd/nand/
Dcafe_nand.c107 uint32_t irqs = cafe_readl(cafe, NAND_IRQ); in cafe_device_ready() local
109 cafe_writel(cafe, irqs, NAND_IRQ); in cafe_device_ready()
112 result?"":" not", irqs, cafe_readl(cafe, NAND_IRQ), in cafe_device_ready()
284 uint32_t irqs; in cafe_nand_cmdfunc() local
287 irqs = cafe_readl(cafe, NAND_IRQ); in cafe_nand_cmdfunc()
288 if (irqs & doneint) in cafe_nand_cmdfunc()
292 cafe_dev_dbg(&cafe->pdev->dev, "Wait for ready, IRQ %x\n", irqs); in cafe_nand_cmdfunc()
297 command, 500000-c, irqs, cafe_readl(cafe, NAND_IRQ)); in cafe_nand_cmdfunc()
339 uint32_t irqs = cafe_readl(cafe, NAND_IRQ); in cafe_nand_interrupt() local
340 cafe_writel(cafe, irqs & ~0x90000000, NAND_IRQ); in cafe_nand_interrupt()
[all …]
/drivers/mfd/
Dmc13xxx-core.c133 if (irq < 0 || irq >= ARRAY_SIZE(mc13xxx->irqs)) in mc13xxx_irq_status()
427 for (i = 0; i < ARRAY_SIZE(mc13xxx->irqs); i++) { in mc13xxx_common_init()
428 mc13xxx->irqs[i].reg_offset = i / MC13XXX_IRQ_PER_REG; in mc13xxx_common_init()
429 mc13xxx->irqs[i].mask = BIT(i % MC13XXX_IRQ_PER_REG); in mc13xxx_common_init()
440 mc13xxx->irq_chip.irqs = mc13xxx->irqs; in mc13xxx_common_init()
441 mc13xxx->irq_chip.num_irqs = ARRAY_SIZE(mc13xxx->irqs); in mc13xxx_common_init()
Dsec-irq.c384 .irqs = s2mps11_irqs,
393 .irqs = s2mps14_irqs, \
417 .irqs = s2mpu02_irqs,
427 .irqs = s5m8767_irqs,
437 .irqs = s5m8763_irqs,
/drivers/usb/host/
Du132-hcd.c515 unsigned long irqs; in u132_hcd_giveback_urb() local
518 spin_lock_irqsave(&endp->queue_lock.slock, irqs); in u132_hcd_giveback_urb()
523 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); in u132_hcd_giveback_urb()
532 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); in u132_hcd_giveback_urb()
554 unsigned long irqs; in u132_hcd_abandon_urb() local
557 spin_lock_irqsave(&endp->queue_lock.slock, irqs); in u132_hcd_abandon_urb()
562 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); in u132_hcd_abandon_urb()
571 spin_unlock_irqrestore(&endp->queue_lock.slock, irqs); in u132_hcd_abandon_urb()
1860 unsigned long irqs; in create_endpoint_and_queue_int() local
1869 spin_lock_irqsave(&endp->queue_lock.slock, irqs); in create_endpoint_and_queue_int()
[all …]
/drivers/iio/accel/
Dmma9551.c58 int irqs[MMA9551_GPIO_COUNT]; member
355 if (irq == data->irqs[i]) { in mma9551_event_handler()
363 "irq triggered on unused line %d\n", data->irqs[3]); in mma9551_event_handler()
432 data->irqs[i] = ret; in mma9551_gpio_probe()
433 ret = devm_request_threaded_irq(dev, data->irqs[i], in mma9551_gpio_probe()
438 dev_err(dev, "request irq %d failed\n", data->irqs[i]); in mma9551_gpio_probe()
443 desc_to_gpio(gpio), data->irqs[i]); in mma9551_gpio_probe()
/drivers/pinctrl/sh-pfc/
Dcore.c36 unsigned int *irqs = NULL; in sh_pfc_map_resources() local
68 irqs = devm_kzalloc(pfc->dev, num_irqs * sizeof(*irqs), in sh_pfc_map_resources()
70 if (irqs == NULL) in sh_pfc_map_resources()
74 pfc->irqs = irqs; in sh_pfc_map_resources()
88 *irqs++ = platform_get_irq(pdev, i); in sh_pfc_map_resources()
/drivers/power/supply/
Dlp8788-charger.c112 struct lp8788_chg_irq irqs[LP8788_MAX_CHG_IRQS]; member
462 if (pchg->irqs[i].virq == virq) { in lp8788_find_irq_id()
463 *id = pchg->irqs[i].which; in lp8788_find_irq_id()
529 pchg->irqs[nr_irq].virq = virq; in lp8788_set_irqs()
530 pchg->irqs[nr_irq].which = i; in lp8788_set_irqs()
547 free_irq(pchg->irqs[i].virq, pchg); in lp8788_set_irqs()
588 irq = pchg->irqs[i].virq; in lp8788_irq_unregister()

1234567