/drivers/irqchip/ |
D | irq-armada-370-xp.c | 94 irq_hw_number_t hwirq = irqd_to_hwirq(d); in armada_370_xp_irq_mask() local 96 if (!is_percpu_irq(hwirq)) in armada_370_xp_irq_mask() 97 writel(hwirq, main_int_base + in armada_370_xp_irq_mask() 100 writel(hwirq, per_cpu_int_base + in armada_370_xp_irq_mask() 106 irq_hw_number_t hwirq = irqd_to_hwirq(d); in armada_370_xp_irq_unmask() local 108 if (!is_percpu_irq(hwirq)) in armada_370_xp_irq_unmask() 109 writel(hwirq, main_int_base + in armada_370_xp_irq_unmask() 112 writel(hwirq, per_cpu_int_base + in armada_370_xp_irq_unmask() 120 int hwirq; in armada_370_xp_alloc_msi() local 123 hwirq = find_first_zero_bit(&msi_used, PCI_MSI_DOORBELL_NR); in armada_370_xp_alloc_msi() [all …]
|
D | irq-or1k-pic.c | 32 mtspr(SPR_PICMR, mfspr(SPR_PICMR) & ~(1UL << data->hwirq)); in or1k_pic_mask() 37 mtspr(SPR_PICMR, mfspr(SPR_PICMR) | (1UL << data->hwirq)); in or1k_pic_unmask() 42 mtspr(SPR_PICSR, (1UL << data->hwirq)); in or1k_pic_ack() 47 mtspr(SPR_PICMR, mfspr(SPR_PICMR) & ~(1UL << data->hwirq)); in or1k_pic_mask_ack() 48 mtspr(SPR_PICSR, (1UL << data->hwirq)); in or1k_pic_mask_ack() 59 mtspr(SPR_PICSR, mfspr(SPR_PICSR) & ~(1UL << data->hwirq)); in or1k_pic_or1200_ack() 64 mtspr(SPR_PICMR, mfspr(SPR_PICMR) & ~(1UL << data->hwirq)); in or1k_pic_or1200_mask_ack() 65 mtspr(SPR_PICSR, mfspr(SPR_PICSR) & ~(1UL << data->hwirq)); in or1k_pic_or1200_mask_ack() 107 int hwirq; in pic_get_irq() local 109 hwirq = ffs(mfspr(SPR_PICSR) >> first); in pic_get_irq() [all …]
|
D | irq-mmp.c | 70 int hwirq; in icu_mask_ack_irq() local 73 hwirq = d->irq - data->virq_base; in icu_mask_ack_irq() 75 r = readl_relaxed(mmp_icu_base + (hwirq << 2)); in icu_mask_ack_irq() 78 writel_relaxed(r, mmp_icu_base + (hwirq << 2)); in icu_mask_ack_irq() 82 && (hwirq == data->clr_mfp_hwirq)) in icu_mask_ack_irq() 85 r = readl_relaxed(data->reg_mask) | (1 << hwirq); in icu_mask_ack_irq() 94 int hwirq; in icu_mask_irq() local 97 hwirq = d->irq - data->virq_base; in icu_mask_irq() 99 r = readl_relaxed(mmp_icu_base + (hwirq << 2)); in icu_mask_irq() 102 writel_relaxed(r, mmp_icu_base + (hwirq << 2)); in icu_mask_irq() [all …]
|
D | irq-vf610-mscm-ir.c | 92 irq_hw_number_t hwirq = data->hwirq; in vf610_mscm_ir_enable() local 96 irsprc = readw_relaxed(chip_data->mscm_ir_base + MSCM_IRSPRC(hwirq)); in vf610_mscm_ir_enable() 102 chip_data->mscm_ir_base + MSCM_IRSPRC(hwirq)); in vf610_mscm_ir_enable() 109 irq_hw_number_t hwirq = data->hwirq; in vf610_mscm_ir_disable() local 112 writew_relaxed(0x0, chip_data->mscm_ir_base + MSCM_IRSPRC(hwirq)); in vf610_mscm_ir_disable() 132 irq_hw_number_t hwirq; in vf610_mscm_ir_domain_alloc() local 142 hwirq = fwspec->param[0]; in vf610_mscm_ir_domain_alloc() 144 irq_domain_set_hwirq_and_chip(domain, virq + i, hwirq + i, in vf610_mscm_ir_domain_alloc() 166 unsigned long *hwirq, in vf610_mscm_ir_domain_translate() argument 171 *hwirq = fwspec->param[0]; in vf610_mscm_ir_domain_translate()
|
D | irq-xtensa-mx.c | 70 unsigned int mask = 1u << d->hwirq; in xtensa_mx_irq_mask() 74 set_er(1u << (xtensa_get_ext_irq_no(d->hwirq) - in xtensa_mx_irq_mask() 85 unsigned int mask = 1u << d->hwirq; in xtensa_mx_irq_unmask() 89 set_er(1u << (xtensa_get_ext_irq_no(d->hwirq) - in xtensa_mx_irq_unmask() 100 variant_irq_enable(d->hwirq); in xtensa_mx_irq_enable() 107 variant_irq_disable(d->hwirq); in xtensa_mx_irq_disable() 112 set_sr(1 << d->hwirq, intclear); in xtensa_mx_irq_ack() 117 set_sr(1 << d->hwirq, intset); in xtensa_mx_irq_retrigger() 126 set_er(mask, MIROUT(d->hwirq - HW_IRQ_MX_BASE)); in xtensa_mx_irq_set_affinity()
|
D | irq-imx-gpcv2.c | 88 unsigned int idx = d->hwirq / 32; in imx_gpcv2_irq_set_wake() 95 mask = 1 << d->hwirq % 32; in imx_gpcv2_irq_set_wake() 116 reg = cd->gpc_base + cd->cpu2wakeup + d->hwirq / 32 * 4; in imx_gpcv2_irq_unmask() 118 val &= ~(1 << d->hwirq % 32); in imx_gpcv2_irq_unmask() 132 reg = cd->gpc_base + cd->cpu2wakeup + d->hwirq / 32 * 4; in imx_gpcv2_irq_mask() 134 val |= 1 << (d->hwirq % 32); in imx_gpcv2_irq_mask() 156 unsigned long *hwirq, in imx_gpcv2_domain_translate() argument 167 *hwirq = fwspec->param[1]; in imx_gpcv2_domain_translate() 181 irq_hw_number_t hwirq; in imx_gpcv2_domain_alloc() local 186 err = imx_gpcv2_domain_translate(domain, fwspec, &hwirq, &type); in imx_gpcv2_domain_alloc() [all …]
|
D | irq-mtk-sysirq.c | 32 irq_hw_number_t hwirq = data->hwirq; in mtk_sysirq_set_type() local 38 offset = hwirq & 0x1f; in mtk_sysirq_set_type() 39 reg_index = hwirq >> 5; in mtk_sysirq_set_type() 72 unsigned long *hwirq, in mtk_sysirq_domain_translate() argument 83 *hwirq = fwspec->param[1]; in mtk_sysirq_domain_translate() 95 irq_hw_number_t hwirq; in mtk_sysirq_domain_alloc() local 106 hwirq = fwspec->param[1]; in mtk_sysirq_domain_alloc() 108 irq_domain_set_hwirq_and_chip(domain, virq + i, hwirq + i, in mtk_sysirq_domain_alloc()
|
D | irq-bcm2835.c | 103 writel_relaxed(HWIRQ_BIT(d->hwirq), intc.disable[HWIRQ_BANK(d->hwirq)]); in armctrl_mask_irq() 108 writel_relaxed(HWIRQ_BIT(d->hwirq), intc.enable[HWIRQ_BANK(d->hwirq)]); in armctrl_unmask_irq() 242 u32 hwirq; in bcm2835_handle_irq() local 244 while ((hwirq = get_next_armctrl_hwirq()) != ~0) in bcm2835_handle_irq() 245 handle_IRQ(irq_linear_revmap(intc.domain, hwirq), regs); in bcm2835_handle_irq() 250 u32 hwirq; in bcm2836_chained_handle_irq() local 252 while ((hwirq = get_next_armctrl_hwirq()) != ~0) in bcm2836_chained_handle_irq() 253 generic_handle_irq(irq_linear_revmap(intc.domain, hwirq)); in bcm2836_chained_handle_irq()
|
D | irq-crossbar.c | 79 irq_hw_number_t hwirq) in allocate_gic_irq() argument 91 cb->irq_map[i] = hwirq; in allocate_gic_irq() 110 cb->write(i, hwirq); in allocate_gic_irq() 119 irq_hw_number_t hwirq; in crossbar_domain_alloc() local 127 hwirq = fwspec->param[1]; in crossbar_domain_alloc() 128 if ((hwirq + nr_irqs) > cb->max_crossbar_sources) in crossbar_domain_alloc() 132 int err = allocate_gic_irq(d, virq + i, hwirq + i); in crossbar_domain_alloc() 137 irq_domain_set_hwirq_and_chip(d, virq + i, hwirq + i, in crossbar_domain_alloc() 166 cb->irq_map[d->hwirq] = IRQ_FREE; in crossbar_domain_free() 167 cb->write(d->hwirq, cb->safe_map); in crossbar_domain_free() [all …]
|
D | irq-vt8500.c | 87 void __iomem *stat_reg = base + VT8500_ICIS + (d->hwirq < 32 ? 0 : 4); in vt8500_irq_mask() 91 edge = readb(base + VT8500_ICDC + d->hwirq) & VT8500_EDGE; in vt8500_irq_mask() 95 status |= (1 << (d->hwirq & 0x1f)); in vt8500_irq_mask() 98 dctr = readb(base + VT8500_ICDC + d->hwirq); in vt8500_irq_mask() 100 writeb(dctr, base + VT8500_ICDC + d->hwirq); in vt8500_irq_mask() 110 dctr = readb(base + VT8500_ICDC + d->hwirq); in vt8500_irq_unmask() 112 writeb(dctr, base + VT8500_ICDC + d->hwirq); in vt8500_irq_unmask() 121 dctr = readb(base + VT8500_ICDC + d->hwirq); in vt8500_irq_set_type() 140 writeb(dctr, base + VT8500_ICDC + d->hwirq); in vt8500_irq_set_type()
|
D | irq-xtensa-pic.c | 46 cached_irq_mask &= ~(1 << d->hwirq); in xtensa_irq_mask() 52 cached_irq_mask |= 1 << d->hwirq; in xtensa_irq_unmask() 58 variant_irq_enable(d->hwirq); in xtensa_irq_enable() 65 variant_irq_disable(d->hwirq); in xtensa_irq_disable() 70 set_sr(1 << d->hwirq, intclear); in xtensa_irq_ack() 75 set_sr(1 << d->hwirq, intset); in xtensa_irq_retrigger()
|
D | irq-nvic.c | 44 nvic_handle_irq(irq_hw_number_t hwirq, struct pt_regs *regs) in nvic_handle_irq() argument 46 unsigned int irq = irq_linear_revmap(nvic_irq_domain, hwirq); in nvic_handle_irq() 53 unsigned long *hwirq, unsigned int *type) in nvic_irq_domain_translate() argument 57 *hwirq = fwspec->param[0]; in nvic_irq_domain_translate() 66 irq_hw_number_t hwirq; in nvic_irq_domain_alloc() local 70 ret = nvic_irq_domain_translate(domain, fwspec, &hwirq, &type); in nvic_irq_domain_alloc() 75 irq_map_generic_chip(domain, virq + i, hwirq + i); in nvic_irq_domain_alloc()
|
D | irq-gic-v2m.c | 112 msg->data = data->hwirq; in gicv2m_compose_msi_msg() 129 irq_hw_number_t hwirq) in gicv2m_irq_gic_domain_alloc() argument 139 fwspec.param[1] = hwirq - 32; in gicv2m_irq_gic_domain_alloc() 155 static void gicv2m_unalloc_msi(struct v2m_data *v2m, unsigned int hwirq) in gicv2m_unalloc_msi() argument 159 pos = hwirq - v2m->spi_start; in gicv2m_unalloc_msi() 161 pr_err("Failed to teardown msi. Invalid hwirq %d\n", hwirq); in gicv2m_unalloc_msi() 174 int hwirq, offset, err = 0; in gicv2m_irq_domain_alloc() local 190 hwirq = v2m->spi_start + offset; in gicv2m_irq_domain_alloc() 192 err = gicv2m_irq_gic_domain_alloc(domain, virq, hwirq); in gicv2m_irq_domain_alloc() 194 gicv2m_unalloc_msi(v2m, hwirq); in gicv2m_irq_domain_alloc() [all …]
|
D | irq-keystone.c | 70 kirq->mask |= BIT(d->hwirq); in keystone_irq_setmask() 71 dev_dbg(kirq->dev, "mask %lu [%x]\n", d->hwirq, kirq->mask); in keystone_irq_setmask() 78 kirq->mask &= ~BIT(d->hwirq); in keystone_irq_unmask() 79 dev_dbg(kirq->dev, "unmask %lu [%x]\n", d->hwirq, kirq->mask); in keystone_irq_unmask() 208 int hwirq; in keystone_irq_remove() local 212 for (hwirq = 0; hwirq < KEYSTONE_N_IRQ; hwirq++) in keystone_irq_remove() 213 irq_dispose_mapping(irq_find_mapping(kirq->irqd, hwirq)); in keystone_irq_remove()
|
D | irq-sun4i.c | 138 u32 hwirq; in sun4i_handle_irq() local 150 hwirq = readl(sun4i_irq_base + SUN4I_IRQ_VECTOR_REG) >> 2; in sun4i_handle_irq() 151 if (hwirq == 0 && in sun4i_handle_irq() 156 handle_domain_irq(sun4i_irq_domain, hwirq, regs); in sun4i_handle_irq() 157 hwirq = readl(sun4i_irq_base + SUN4I_IRQ_VECTOR_REG) >> 2; in sun4i_handle_irq() 158 } while (hwirq != 0); in sun4i_handle_irq()
|
D | irq-clps711x.c | 97 irq_hw_number_t hwirq = irqd_to_hwirq(d); in clps711x_intc_eoi() local 99 writel_relaxed(0, clps711x_intc->base + clps711x_irqs[hwirq].eoi); in clps711x_intc_eoi() 104 irq_hw_number_t hwirq = irqd_to_hwirq(d); in clps711x_intc_mask() local 105 void __iomem *intmr = clps711x_intc->intmr[hwirq / 16]; in clps711x_intc_mask() 109 tmp &= ~(1 << (hwirq % 16)); in clps711x_intc_mask() 115 irq_hw_number_t hwirq = irqd_to_hwirq(d); in clps711x_intc_unmask() local 116 void __iomem *intmr = clps711x_intc->intmr[hwirq / 16]; in clps711x_intc_unmask() 120 tmp |= 1 << (hwirq % 16); in clps711x_intc_unmask()
|
D | irq-bcm7038-l1.c | 136 int hwirq; in bcm7038_l1_irq_handle() local 143 for_each_set_bit(hwirq, &pending, IRQS_PER_WORD) { in bcm7038_l1_irq_handle() 145 base + hwirq)); in bcm7038_l1_irq_handle() 155 u32 word = d->hwirq / IRQS_PER_WORD; in __bcm7038_l1_unmask() 156 u32 mask = BIT(d->hwirq % IRQS_PER_WORD); in __bcm7038_l1_unmask() 166 u32 word = d->hwirq / IRQS_PER_WORD; in __bcm7038_l1_mask() 167 u32 mask = BIT(d->hwirq % IRQS_PER_WORD); in __bcm7038_l1_mask() 180 __bcm7038_l1_unmask(d, intc->affinity[d->hwirq]); in bcm7038_l1_unmask() 190 __bcm7038_l1_mask(d, intc->affinity[d->hwirq]); in bcm7038_l1_mask() 200 irq_hw_number_t hw = d->hwirq; in bcm7038_l1_set_affinity()
|
D | irq-tegra.c | 96 mask = BIT(d->hwirq % 32); in tegra_ictlr_write_mask() 127 u32 irq = d->hwirq; in tegra_set_wake() 226 unsigned long *hwirq, in tegra_ictlr_domain_translate() argument 237 *hwirq = fwspec->param[1]; in tegra_ictlr_domain_translate() 252 irq_hw_number_t hwirq; in tegra_ictlr_domain_alloc() local 260 hwirq = fwspec->param[1]; in tegra_ictlr_domain_alloc() 261 if (hwirq >= (num_ictlrs * 32)) in tegra_ictlr_domain_alloc() 265 int ictlr = (hwirq + i) / 32; in tegra_ictlr_domain_alloc() 267 irq_domain_set_hwirq_and_chip(domain, virq + i, hwirq + i, in tegra_ictlr_domain_alloc()
|
/drivers/pci/host/ |
D | pci-xgene-msi.c | 135 static u32 hwirq_to_reg_set(unsigned long hwirq) in hwirq_to_reg_set() argument 137 return (hwirq / (NR_HW_IRQS * IRQS_PER_IDX)); in hwirq_to_reg_set() 140 static u32 hwirq_to_group(unsigned long hwirq) in hwirq_to_group() argument 142 return (hwirq % NR_HW_IRQS); in hwirq_to_group() 145 static u32 hwirq_to_msi_data(unsigned long hwirq) in hwirq_to_msi_data() argument 147 return ((hwirq / NR_HW_IRQS) % IRQS_PER_IDX); in hwirq_to_msi_data() 153 u32 reg_set = hwirq_to_reg_set(data->hwirq); in xgene_compose_msi_msg() 154 u32 group = hwirq_to_group(data->hwirq); in xgene_compose_msi_msg() 159 msg->data = hwirq_to_msi_data(data->hwirq); in xgene_compose_msi_msg() 171 static int hwirq_to_cpu(unsigned long hwirq) in hwirq_to_cpu() argument [all …]
|
/drivers/vfio/platform/ |
D | vfio_platform_irq.c | 33 disable_irq_nosync(irq_ctx->hwirq); in vfio_platform_mask() 93 enable_irq(irq_ctx->hwirq); in vfio_platform_unmask() 159 disable_irq_nosync(irq_ctx->hwirq); in vfio_automasked_irq_handler() 188 irq_clear_status_flags(irq->hwirq, IRQ_NOAUTOEN); in vfio_set_trigger() 189 free_irq(irq->hwirq, irq); in vfio_set_trigger() 199 irq->hwirq, vdev->name); in vfio_set_trigger() 211 irq_set_status_flags(irq->hwirq, IRQ_NOAUTOEN); in vfio_set_trigger() 212 ret = request_irq(irq->hwirq, handler, 0, irq->name, irq); in vfio_set_trigger() 221 enable_irq(irq->hwirq); in vfio_set_trigger() 252 handler(irq->hwirq, irq); in vfio_platform_set_irq_trigger() [all …]
|
/drivers/misc/cxl/ |
D | irq.c | 224 irq_hw_number_t hwirq = irqd_to_hwirq(irq_get_irq_data(irq)); in cxl_irq_afu() local 230 irq_off = hwirq - ctx->irqs.offset[r]; in cxl_irq_afu() 240 ctx->pe, irq, hwirq); in cxl_irq_afu() 244 trace_cxl_afu_irq(ctx, afu_irq, irq, hwirq); in cxl_irq_afu() 246 afu_irq, ctx->pe, irq, hwirq); in cxl_irq_afu() 262 unsigned int cxl_map_irq(struct cxl *adapter, irq_hw_number_t hwirq, in cxl_map_irq() argument 269 virq = irq_create_mapping(NULL, hwirq); in cxl_map_irq() 275 cxl_setup_irq(adapter, hwirq, virq); in cxl_map_irq() 277 pr_devel("hwirq %#lx mapped to virq %u\n", hwirq, virq); in cxl_map_irq() 300 int hwirq, virq; in cxl_register_one_irq() local [all …]
|
/drivers/gpio/ |
D | gpio-ts5500.c | 41 u8 hwirq; member 282 return priv->hwirq; in ts5500_gpio_to_irq() 293 if (priv->hwirq == 7) in ts5500_enable_irq() 295 else if (priv->hwirq == 6) in ts5500_enable_irq() 297 else if (priv->hwirq == 1) in ts5500_enable_irq() 311 if (priv->hwirq == 7) in ts5500_disable_irq() 313 else if (priv->hwirq == 6) in ts5500_disable_irq() 315 else if (priv->hwirq == 1) in ts5500_disable_irq() 318 dev_err(priv->gpio_chip.dev, "invalid hwirq %d\n", priv->hwirq); in ts5500_disable_irq() 344 priv->hwirq = res->start; in ts5500_dio_probe() [all …]
|
D | gpio-sa1100.c | 81 mask = BIT(d->hwirq); in sa1100_gpio_type() 109 GEDR = BIT(d->hwirq); in sa1100_gpio_ack() 114 unsigned int mask = BIT(d->hwirq); in sa1100_gpio_mask() 124 unsigned int mask = BIT(d->hwirq); in sa1100_gpio_unmask() 135 PWER |= BIT(d->hwirq); in sa1100_gpio_wake() 137 PWER &= ~BIT(d->hwirq); in sa1100_gpio_wake() 154 unsigned int irq, irq_hw_number_t hwirq) in sa1100_gpio_irqdomain_map() argument
|
D | gpio-dwapb.c | 136 int hwirq = fls(irq_status) - 1; in dwapb_do_irq() local 137 int gpio_irq = irq_find_mapping(gpio->domain, hwirq); in dwapb_do_irq() 140 irq_status &= ~BIT(hwirq); in dwapb_do_irq() 144 dwapb_toggle_trigger(gpio, hwirq); in dwapb_do_irq() 171 val |= BIT(d->hwirq); in dwapb_irq_enable() 186 val &= ~BIT(d->hwirq); in dwapb_irq_disable() 219 int bit = d->hwirq; in dwapb_irq_set_type() 301 unsigned int hwirq, ngpio = gc->ngpio; in dwapb_configure_irqs() local 369 for (hwirq = 0 ; hwirq < ngpio ; hwirq++) in dwapb_configure_irqs() 370 irq_create_mapping(gpio->domain, hwirq); in dwapb_configure_irqs() [all …]
|
/drivers/pinctrl/samsung/ |
D | pinctrl-s3c24xx.c | 172 int index = bank->eint_offset + data->hwirq; in s3c24xx_eint_type() 195 s3c24xx_eint_set_function(d, bank, data->hwirq); in s3c24xx_eint_type() 207 int parent_irq = eint_data->parents[data->hwirq]; in s3c2410_eint0_3_ack() 218 int parent_irq = eint_data->parents[data->hwirq]; in s3c2410_eint0_3_mask() 229 int parent_irq = eint_data->parents[data->hwirq]; in s3c2410_eint0_3_unmask() 250 virq = irq_linear_revmap(eint_data->domains[data->hwirq], data->hwirq); in s3c2410_demux_eint0_3() 264 unsigned long bitval = 1UL << data->hwirq; in s3c2412_eint0_3_ack() 275 mask |= (1UL << data->hwirq); in s3c2412_eint0_3_mask() 286 mask &= ~(1UL << data->hwirq); in s3c2412_eint0_3_unmask() 308 virq = irq_linear_revmap(eint_data->domains[data->hwirq], data->hwirq); in s3c2412_demux_eint0_3() [all …]
|