/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_ih.c | 41 int amdgpu_ih_ring_init(struct amdgpu_device *adev, struct amdgpu_ih_ring *ih, in amdgpu_ih_ring_init() argument 50 ih->ring_size = ring_size; in amdgpu_ih_ring_init() 51 ih->ptr_mask = ih->ring_size - 1; in amdgpu_ih_ring_init() 52 ih->rptr = 0; in amdgpu_ih_ring_init() 53 ih->use_bus_addr = use_bus_addr; in amdgpu_ih_ring_init() 58 if (ih->ring) in amdgpu_ih_ring_init() 64 ih->ring = dma_alloc_coherent(adev->dev, ih->ring_size + 8, in amdgpu_ih_ring_init() 66 if (ih->ring == NULL) in amdgpu_ih_ring_init() 69 ih->gpu_addr = dma_addr; in amdgpu_ih_ring_init() 70 ih->wptr_addr = dma_addr + ih->ring_size; in amdgpu_ih_ring_init() [all …]
|
D | vega10_ih.c | 51 if (adev->irq.ih.ring_size) { in vega10_ih_init_register_offset() 52 ih_regs = &adev->irq.ih.ih_regs; in vega10_ih_init_register_offset() 97 struct amdgpu_ih_ring *ih, in vega10_ih_toggle_ring_interrupts() argument 103 ih_regs = &ih->ih_regs; in vega10_ih_toggle_ring_interrupts() 109 if (ih == &adev->irq.ih) in vega10_ih_toggle_ring_interrupts() 121 ih->enabled = true; in vega10_ih_toggle_ring_interrupts() 126 ih->enabled = false; in vega10_ih_toggle_ring_interrupts() 127 ih->rptr = 0; in vega10_ih_toggle_ring_interrupts() 143 struct amdgpu_ih_ring *ih[] = {&adev->irq.ih, &adev->irq.ih1, &adev->irq.ih2}; in vega10_ih_toggle_interrupts() local 147 for (i = 0; i < ARRAY_SIZE(ih); i++) { in vega10_ih_toggle_interrupts() [all …]
|
D | vega20_ih.c | 54 if (adev->irq.ih.ring_size) { in vega20_ih_init_register_offset() 55 ih_regs = &adev->irq.ih.ih_regs; in vega20_ih_init_register_offset() 100 struct amdgpu_ih_ring *ih, in vega20_ih_toggle_ring_interrupts() argument 106 ih_regs = &ih->ih_regs; in vega20_ih_toggle_ring_interrupts() 113 if (ih == &adev->irq.ih) in vega20_ih_toggle_ring_interrupts() 125 ih->enabled = true; in vega20_ih_toggle_ring_interrupts() 130 ih->enabled = false; in vega20_ih_toggle_ring_interrupts() 131 ih->rptr = 0; in vega20_ih_toggle_ring_interrupts() 147 struct amdgpu_ih_ring *ih[] = {&adev->irq.ih, &adev->irq.ih1, &adev->irq.ih2}; in vega20_ih_toggle_interrupts() local 151 for (i = 0; i < ARRAY_SIZE(ih); i++) { in vega20_ih_toggle_interrupts() [all …]
|
D | navi10_ih.c | 53 if (adev->irq.ih.ring_size) { in navi10_ih_init_register_offset() 54 ih_regs = &adev->irq.ih.ih_regs; in navi10_ih_init_register_offset() 153 struct amdgpu_ih_ring *ih, in navi10_ih_toggle_ring_interrupts() argument 159 ih_regs = &ih->ih_regs; in navi10_ih_toggle_ring_interrupts() 164 if (ih == &adev->irq.ih) in navi10_ih_toggle_ring_interrupts() 175 ih->enabled = true; in navi10_ih_toggle_ring_interrupts() 180 ih->enabled = false; in navi10_ih_toggle_ring_interrupts() 181 ih->rptr = 0; in navi10_ih_toggle_ring_interrupts() 197 struct amdgpu_ih_ring *ih[] = {&adev->irq.ih, &adev->irq.ih1, &adev->irq.ih2}; in navi10_ih_toggle_interrupts() local 201 for (i = 0; i < ARRAY_SIZE(ih); i++) { in navi10_ih_toggle_interrupts() [all …]
|
D | tonga_ih.c | 67 adev->irq.ih.enabled = true; in tonga_ih_enable_interrupts() 87 adev->irq.ih.enabled = false; in tonga_ih_disable_interrupts() 88 adev->irq.ih.rptr = 0; in tonga_ih_disable_interrupts() 105 struct amdgpu_ih_ring *ih = &adev->irq.ih; in tonga_ih_irq_init() local 123 WREG32(mmIH_RB_BASE, ih->gpu_addr >> 8); in tonga_ih_irq_init() 125 rb_bufsz = order_base_2(adev->irq.ih.ring_size / 4); in tonga_ih_irq_init() 138 WREG32(mmIH_RB_WPTR_ADDR_LO, lower_32_bits(ih->wptr_addr)); in tonga_ih_irq_init() 139 WREG32(mmIH_RB_WPTR_ADDR_HI, upper_32_bits(ih->wptr_addr) & 0xFF); in tonga_ih_irq_init() 146 if (adev->irq.ih.use_doorbell) { in tonga_ih_irq_init() 148 OFFSET, adev->irq.ih.doorbell_index); in tonga_ih_irq_init() [all …]
|
D | si_ih.c | 44 adev->irq.ih.enabled = true; in si_ih_enable_interrupts() 58 adev->irq.ih.enabled = false; in si_ih_disable_interrupts() 59 adev->irq.ih.rptr = 0; in si_ih_disable_interrupts() 64 struct amdgpu_ih_ring *ih = &adev->irq.ih; in si_ih_irq_init() local 76 WREG32(IH_RB_BASE, adev->irq.ih.gpu_addr >> 8); in si_ih_irq_init() 77 rb_bufsz = order_base_2(adev->irq.ih.ring_size / 4); in si_ih_irq_init() 84 WREG32(IH_RB_WPTR_ADDR_LO, lower_32_bits(ih->wptr_addr)); in si_ih_irq_init() 85 WREG32(IH_RB_WPTR_ADDR_HI, upper_32_bits(ih->wptr_addr) & 0xFF); in si_ih_irq_init() 108 struct amdgpu_ih_ring *ih) in si_ih_get_wptr() argument 112 wptr = le32_to_cpu(*ih->wptr_cpu); in si_ih_get_wptr() [all …]
|
D | cik_ih.c | 69 adev->irq.ih.enabled = true; in cik_ih_enable_interrupts() 91 adev->irq.ih.enabled = false; in cik_ih_disable_interrupts() 92 adev->irq.ih.rptr = 0; in cik_ih_disable_interrupts() 108 struct amdgpu_ih_ring *ih = &adev->irq.ih; in cik_ih_irq_init() local 126 WREG32(mmIH_RB_BASE, adev->irq.ih.gpu_addr >> 8); in cik_ih_irq_init() 127 rb_bufsz = order_base_2(adev->irq.ih.ring_size / 4); in cik_ih_irq_init() 136 WREG32(mmIH_RB_WPTR_ADDR_LO, lower_32_bits(ih->wptr_addr)); in cik_ih_irq_init() 137 WREG32(mmIH_RB_WPTR_ADDR_HI, upper_32_bits(ih->wptr_addr) & 0xFF); in cik_ih_irq_init() 189 struct amdgpu_ih_ring *ih) in cik_ih_get_wptr() argument 193 wptr = le32_to_cpu(*ih->wptr_cpu); in cik_ih_get_wptr() [all …]
|
D | amdgpu_ih.h | 76 u32 (*get_wptr)(struct amdgpu_device *adev, struct amdgpu_ih_ring *ih); 77 void (*decode_iv)(struct amdgpu_device *adev, struct amdgpu_ih_ring *ih, 79 void (*set_rptr)(struct amdgpu_device *adev, struct amdgpu_ih_ring *ih); 82 #define amdgpu_ih_get_wptr(adev, ih) (adev)->irq.ih_funcs->get_wptr((adev), (ih)) argument 84 (adev)->irq.ih_funcs->decode_iv((adev), (ih), (iv)) 85 #define amdgpu_ih_set_rptr(adev, ih) (adev)->irq.ih_funcs->set_rptr((adev), (ih)) argument 87 int amdgpu_ih_ring_init(struct amdgpu_device *adev, struct amdgpu_ih_ring *ih, 89 void amdgpu_ih_ring_fini(struct amdgpu_device *adev, struct amdgpu_ih_ring *ih); 90 void amdgpu_ih_ring_write(struct amdgpu_ih_ring *ih, const uint32_t *iv, 93 struct amdgpu_ih_ring *ih); [all …]
|
D | iceland_ih.c | 69 adev->irq.ih.enabled = true; in iceland_ih_enable_interrupts() 91 adev->irq.ih.enabled = false; in iceland_ih_disable_interrupts() 92 adev->irq.ih.rptr = 0; in iceland_ih_disable_interrupts() 108 struct amdgpu_ih_ring *ih = &adev->irq.ih; in iceland_ih_irq_init() local 127 WREG32(mmIH_RB_BASE, adev->irq.ih.gpu_addr >> 8); in iceland_ih_irq_init() 129 rb_bufsz = order_base_2(adev->irq.ih.ring_size / 4); in iceland_ih_irq_init() 138 WREG32(mmIH_RB_WPTR_ADDR_LO, lower_32_bits(ih->wptr_addr)); in iceland_ih_irq_init() 139 WREG32(mmIH_RB_WPTR_ADDR_HI, upper_32_bits(ih->wptr_addr) & 0xFF); in iceland_ih_irq_init() 191 struct amdgpu_ih_ring *ih) in iceland_ih_get_wptr() argument 195 wptr = le32_to_cpu(*ih->wptr_cpu); in iceland_ih_get_wptr() [all …]
|
D | cz_ih.c | 69 adev->irq.ih.enabled = true; in cz_ih_enable_interrupts() 91 adev->irq.ih.enabled = false; in cz_ih_disable_interrupts() 92 adev->irq.ih.rptr = 0; in cz_ih_disable_interrupts() 108 struct amdgpu_ih_ring *ih = &adev->irq.ih; in cz_ih_irq_init() local 127 WREG32(mmIH_RB_BASE, adev->irq.ih.gpu_addr >> 8); in cz_ih_irq_init() 129 rb_bufsz = order_base_2(adev->irq.ih.ring_size / 4); in cz_ih_irq_init() 138 WREG32(mmIH_RB_WPTR_ADDR_LO, lower_32_bits(ih->wptr_addr)); in cz_ih_irq_init() 139 WREG32(mmIH_RB_WPTR_ADDR_HI, upper_32_bits(ih->wptr_addr) & 0xFF); in cz_ih_irq_init() 191 struct amdgpu_ih_ring *ih) in cz_ih_get_wptr() argument 195 wptr = le32_to_cpu(*ih->wptr_cpu); in cz_ih_get_wptr() [all …]
|
D | amdgpu_irq.c | 192 ret = amdgpu_ih_process(adev, &adev->irq.ih); in amdgpu_irq_handler() 388 amdgpu_ih_ring_fini(adev, &adev->irq.ih); in amdgpu_irq_fini_hw() 486 struct amdgpu_ih_ring *ih) in amdgpu_irq_dispatch() argument 488 u32 ring_index = ih->rptr >> 2; in amdgpu_irq_dispatch() 495 entry.ih = ih; in amdgpu_irq_dispatch() 496 entry.iv_entry = (const uint32_t *)&ih->ring[ring_index]; in amdgpu_irq_dispatch() 499 trace_amdgpu_iv(ih - &adev->irq.ih, &entry); in amdgpu_irq_dispatch()
|
D | amdgpu_irq.h | 47 struct amdgpu_ih_ring *ih; member 92 struct amdgpu_ih_ring ih, ih1, ih2, ih_soft; member 112 struct amdgpu_ih_ring *ih);
|
D | amdgpu_trace.h | 77 TP_PROTO(unsigned ih, struct amdgpu_iv_entry *iv), 78 TP_ARGS(ih, iv), 80 __field(unsigned, ih) 92 __entry->ih = ih; 108 __entry->ih, __entry->client_id, __entry->src_id,
|
D | amdgpu_doorbell.h | 57 uint32_t ih; member
|
D | vega10_reg_init.c | 74 adev->doorbell_index.ih = AMDGPU_DOORBELL64_IH; in vega10_doorbell_index_init()
|
D | vega20_reg_init.c | 80 adev->doorbell_index.ih = AMDGPU_VEGA20_DOORBELL_IH; in vega20_doorbell_index_init()
|
/drivers/gpu/drm/amd/display/amdgpu_dm/ |
D | amdgpu_dm_irq.c | 100 void (*ih)(void *), in init_handler_common_data() 104 hcd->handler = ih; in init_handler_common_data() 131 void *ih, in remove_irq_handler() argument 163 if (ih == handler->handler) { in remove_irq_handler() 183 ih, int_params->irq_source, int_params->int_context); in remove_irq_handler() 239 void (*ih)(void *)) in validate_irq_registration_params() 241 if (NULL == int_params || NULL == ih) { in validate_irq_registration_params() 302 void (*ih)(void *), in amdgpu_dm_irq_register_interrupt() 310 if (false == validate_irq_registration_params(int_params, ih)) in amdgpu_dm_irq_register_interrupt() 319 init_handler_common_data(handler_data, ih, handler_args, &adev->dm); in amdgpu_dm_irq_register_interrupt() [all …]
|
D | amdgpu_dm_irq.h | 68 void (*ih)(void *),
|
/drivers/net/ethernet/sgi/ |
D | ioc3-eth.c | 302 struct iphdr *ih; in ioc3_tcpudp_checksum() local 322 ih = (struct iphdr *)((char *)eh + ETH_HLEN); in ioc3_tcpudp_checksum() 323 if (ip_is_fragment(ih)) in ioc3_tcpudp_checksum() 326 proto = ih->protocol; in ioc3_tcpudp_checksum() 332 (ih->tot_len - (ih->ihl << 2)) + in ioc3_tcpudp_checksum() 333 htons((u16)ih->protocol) + in ioc3_tcpudp_checksum() 334 (ih->saddr >> 16) + (ih->saddr & 0xffff) + in ioc3_tcpudp_checksum() 335 (ih->daddr >> 16) + (ih->daddr & 0xffff); in ioc3_tcpudp_checksum() 998 const struct iphdr *ih = ip_hdr(skb); in ioc3_start_xmit() local 999 const int proto = ntohs(ih->protocol); in ioc3_start_xmit() [all …]
|
/drivers/crypto/cavium/nitrox/ |
D | nitrox_reqmgr.c | 450 sr->instr.ih.value = 0; in nitrox_process_se_request() 451 sr->instr.ih.s.g = 1; in nitrox_process_se_request() 452 sr->instr.ih.s.gsz = sr->in.sgmap_cnt; in nitrox_process_se_request() 453 sr->instr.ih.s.ssz = sr->out.sgmap_cnt; in nitrox_process_se_request() 454 sr->instr.ih.s.fsz = FDATA_SIZE + sizeof(struct gphdr); in nitrox_process_se_request() 455 sr->instr.ih.s.tlen = sr->instr.ih.s.fsz + sr->in.total_bytes; in nitrox_process_se_request() 456 sr->instr.ih.bev = cpu_to_be64(sr->instr.ih.value); in nitrox_process_se_request()
|
/drivers/gpu/drm/radeon/ |
D | r600.c | 3315 rdev->ih.ring_obj = NULL; in r600_init() 3473 rdev->ih.ring_size = ring_size; in r600_ih_ring_init() 3474 rdev->ih.ptr_mask = rdev->ih.ring_size - 1; in r600_ih_ring_init() 3475 rdev->ih.rptr = 0; in r600_ih_ring_init() 3483 if (rdev->ih.ring_obj == NULL) { in r600_ih_ring_alloc() 3484 r = radeon_bo_create(rdev, rdev->ih.ring_size, in r600_ih_ring_alloc() 3487 NULL, NULL, &rdev->ih.ring_obj); in r600_ih_ring_alloc() 3492 r = radeon_bo_reserve(rdev->ih.ring_obj, false); in r600_ih_ring_alloc() 3495 r = radeon_bo_pin(rdev->ih.ring_obj, in r600_ih_ring_alloc() 3497 &rdev->ih.gpu_addr); in r600_ih_ring_alloc() [all …]
|
D | si.c | 5928 rdev->ih.enabled = true; in si_enable_interrupts() 5943 rdev->ih.enabled = false; in si_disable_interrupts() 5944 rdev->ih.rptr = 0; in si_disable_interrupts() 6010 WREG32(IH_RB_BASE, rdev->ih.gpu_addr >> 8); in si_irq_init() 6011 rb_bufsz = order_base_2(rdev->ih.ring_size / 4); in si_irq_init() 6063 if (!rdev->ih.enabled) { in si_irq_set() 6225 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask); in si_get_ih_wptr() 6226 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask; in si_get_ih_wptr() 6231 return (wptr & rdev->ih.ptr_mask); in si_get_ih_wptr() 6259 if (!rdev->ih.enabled || rdev->shutdown) in si_irq_process() [all …]
|
D | evergreen.c | 4506 if (!rdev->ih.enabled) { in evergreen_irq_set() 4693 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask); in evergreen_get_ih_wptr() 4694 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask; in evergreen_get_ih_wptr() 4699 return (wptr & rdev->ih.ptr_mask); in evergreen_get_ih_wptr() 4719 if (!rdev->ih.enabled || rdev->shutdown) in evergreen_irq_process() 4726 if (atomic_xchg(&rdev->ih.lock, 1)) in evergreen_irq_process() 4729 rptr = rdev->ih.rptr; in evergreen_irq_process() 4741 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff; in evergreen_irq_process() 4742 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff; in evergreen_irq_process() 4912 rptr &= rdev->ih.ptr_mask; in evergreen_irq_process() [all …]
|
/drivers/cpufreq/ |
D | pmac64-cpufreq.c | 484 u64 max_freq, min_freq, ih, il; in g5_pm72_cpufreq_init() local 580 ih = *((u32 *)(eeprom + 0x10)); in g5_pm72_cpufreq_init() 584 if (il == ih) { in g5_pm72_cpufreq_init() 591 if (ih != 0 && il != 0) in g5_pm72_cpufreq_init() 592 min_freq = (max_freq * il) / ih; in g5_pm72_cpufreq_init()
|
/drivers/iommu/intel/ |
D | svm.c | 201 unsigned long pages, int ih) in __flush_svm_range_dev() argument 208 qi_flush_piotlb(sdev->iommu, sdev->did, svm->pasid, address, pages, ih); in __flush_svm_range_dev() 218 unsigned long pages, int ih) in intel_flush_svm_range_dev() argument 226 __flush_svm_range_dev(svm, sdev, start, align >> VTD_PAGE_SHIFT, ih); in intel_flush_svm_range_dev() 232 unsigned long pages, int ih) in intel_flush_svm_range() argument 238 intel_flush_svm_range_dev(svm, sdev, address, pages, ih); in intel_flush_svm_range()
|