/drivers/net/ethernet/mellanox/mlx5/core/sf/ |
D | hw_table.c | 14 u8 allocated: 1; member 84 if (!hwc->sfs[i].allocated && free_idx == -1) { in mlx5_sf_hw_table_id_alloc() 89 if (hwc->sfs[i].allocated && hwc->sfs[i].usr_sfnum == usr_sfnum) in mlx5_sf_hw_table_id_alloc() 97 hwc->sfs[free_idx].allocated = true; in mlx5_sf_hw_table_id_alloc() 106 hwc->sfs[id].allocated = false; in mlx5_sf_hw_table_id_free() 173 hwc->sfs[idx].allocated = false; in mlx5_sf_hw_table_hwc_sf_free() 195 hwc->sfs[id].allocated = false; in mlx5_sf_hw_table_sf_deferred_free() 209 if (hwc->sfs[i].allocated) in mlx5_sf_hw_table_hwc_dealloc_all() 328 if (sf_hw->allocated && sf_hw->pending_delete) in mlx5_sf_hw_vhca_event()
|
/drivers/powercap/ |
D | powercap_sys.c | 387 bool allocated; in powercap_release() local 393 allocated = power_zone->allocated; in powercap_release() 403 if (allocated) in powercap_release() 410 allocated = control_type->allocated; in powercap_release() 415 if (allocated) in powercap_release() 507 power_zone->allocated = true; in powercap_register_zone() 579 if (power_zone->allocated) in powercap_register_zone() 620 control_type->allocated = true; in powercap_register_control_type() 629 if (control_type->allocated) in powercap_register_control_type()
|
/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_vce.c | 692 uint32_t handle, uint32_t *allocated) in amdgpu_vce_validate_handle() argument 712 *allocated |= 1 << i; in amdgpu_vce_validate_handle() 734 uint32_t allocated = 0; in amdgpu_vce_ring_parse_cs() local 817 &allocated); in amdgpu_vce_ring_parse_cs() 834 allocated |= 1 << session_idx; in amdgpu_vce_ring_parse_cs() 836 } else if (!(allocated & (1 << session_idx))) { in amdgpu_vce_ring_parse_cs() 936 if (allocated & ~created) { in amdgpu_vce_ring_parse_cs() 947 tmp = allocated; in amdgpu_vce_ring_parse_cs() 969 uint32_t allocated = 0; in amdgpu_vce_ring_parse_cs_vm() local 987 &allocated); in amdgpu_vce_ring_parse_cs_vm() [all …]
|
/drivers/gpu/drm/amd/amdkfd/ |
D | kfd_packet_manager.c | 94 if (WARN_ON(pm->allocated)) in pm_allocate_runlist_ib() 113 pm->allocated = true; in pm_allocate_runlist_ib() 273 pm->allocated = false; in pm_init() 419 if (pm->allocated) { in pm_release_ib() 421 pm->allocated = false; in pm_release_ib() 434 if (!pm->allocated) { in pm_debugfs_runlist()
|
/drivers/comedi/ |
D | comedi_buf.c | 434 unsigned int allocated = comedi_buf_write_n_allocated(s); in comedi_buf_write_free() local 436 if (nbytes > allocated) in comedi_buf_write_free() 437 nbytes = allocated; in comedi_buf_write_free() 539 unsigned int allocated; in comedi_buf_read_free() local 547 allocated = comedi_buf_read_n_allocated(async); in comedi_buf_read_free() 548 if (nbytes > allocated) in comedi_buf_read_free() 549 nbytes = allocated; in comedi_buf_read_free()
|
/drivers/gpu/drm/radeon/ |
D | radeon_vce.c | 519 uint32_t handle, bool *allocated) in radeon_vce_validate_handle() argument 523 *allocated = false; in radeon_vce_validate_handle() 541 *allocated = true; in radeon_vce_validate_handle() 559 bool destroyed = false, created = false, allocated = false; in radeon_vce_cs_parse() local 584 &allocated); in radeon_vce_cs_parse() 595 if (!allocated) { in radeon_vce_cs_parse() 667 if (allocated && !created) { in radeon_vce_cs_parse() 673 if ((!r && destroyed) || (r && allocated)) { in radeon_vce_cs_parse()
|
/drivers/irqchip/ |
D | irq-mips-cpu.c | 180 DECLARE_BITMAP(allocated, 2); 191 hwirq = find_first_zero_bit(state->allocated, 2); in mips_cpu_ipi_alloc() 194 bitmap_set(state->allocated, hwirq, 1); in mips_cpu_ipi_alloc()
|
/drivers/net/ethernet/marvell/octeontx2/nic/ |
D | otx2_flows.c | 77 int ent, allocated = 0; in otx2_alloc_mcam_entries() local 99 while (allocated < count) { in otx2_alloc_mcam_entries() 105 req->count = (count - allocated) > NPC_MAX_NONCONTIG_ENTRIES ? in otx2_alloc_mcam_entries() 106 NPC_MAX_NONCONTIG_ENTRIES : count - allocated; in otx2_alloc_mcam_entries() 124 flow_cfg->flow_ent[ent + allocated] = rsp->entry_list[ent]; in otx2_alloc_mcam_entries() 126 allocated += rsp->count; in otx2_alloc_mcam_entries() 140 if (allocated) in otx2_alloc_mcam_entries() 141 sort(&flow_cfg->flow_ent[0], allocated, in otx2_alloc_mcam_entries() 147 flow_cfg->max_flows = allocated; in otx2_alloc_mcam_entries() 149 if (allocated) { in otx2_alloc_mcam_entries() [all …]
|
/drivers/gpu/drm/i915/ |
D | i915_buddy.c | 321 LIST_HEAD(allocated); in i915_buddy_alloc_range() 371 list_add_tail(&block->link, &allocated); in i915_buddy_alloc_range() 385 list_splice_tail(&allocated, blocks); in i915_buddy_alloc_range() 401 i915_buddy_free_list(mm, &allocated); in i915_buddy_alloc_range()
|
/drivers/dma/ |
D | moxart-dma.c | 141 bool allocated; member 355 ch->allocated = 1; in moxart_alloc_chan_resources() 368 ch->allocated = 0; in moxart_free_chan_resources() 532 if (!ch->allocated) in moxart_dma_interrupt() 601 ch->allocated = 0; in moxart_probe()
|
/drivers/dma/qcom/ |
D | hidma.h | 29 atomic_t allocated; /* if this channel is allocated */ member 86 bool allocated; member
|
D | hidma_ll.c | 126 if (atomic_read(&tre->allocated) != true) { in hidma_ll_free() 131 atomic_set(&tre->allocated, 0); in hidma_ll_free() 146 if (atomic_add_unless(&lldev->trepool[i].allocated, 1, 1)) in hidma_ll_request() 612 if (atomic_read(&tre->allocated) != true) { in hidma_ll_set_transfer_params()
|
/drivers/firmware/tegra/ |
D | bpmp.c | 231 clear_bit(index, bpmp->threaded.allocated); in tegra_bpmp_channel_read() 270 index = find_first_zero_bit(bpmp->threaded.allocated, count); in tegra_bpmp_write_threaded() 283 set_bit(index, bpmp->threaded.allocated); in tegra_bpmp_write_threaded() 296 clear_bit(index, bpmp->threaded.allocated); in tegra_bpmp_write_threaded() 710 bpmp->threaded.allocated = devm_kzalloc(&pdev->dev, size, GFP_KERNEL); in tegra_bpmp_probe() 711 if (!bpmp->threaded.allocated) in tegra_bpmp_probe()
|
/drivers/usb/gadget/function/ |
D | u_serial.c | 492 int *allocated) in gs_free_requests() argument 500 if (allocated) in gs_free_requests() 501 (*allocated)--; in gs_free_requests() 507 int *allocated) in gs_alloc_requests() argument 511 int n = allocated ? QUEUE_SIZE - *allocated : QUEUE_SIZE; in gs_alloc_requests() 523 if (allocated) in gs_alloc_requests() 524 (*allocated)++; in gs_alloc_requests()
|
/drivers/net/ethernet/broadcom/bnxt/ |
D | bnxt_hwrm.c | 84 ctx->allocated = BNXT_HWRM_DMA_SIZE - BNXT_HWRM_CTX_OFFSET; in __hwrm_req_init() 205 ctx->allocated = BNXT_HWRM_DMA_SIZE - BNXT_HWRM_CTX_OFFSET; in hwrm_req_replace() 739 max_offset = BNXT_HWRM_DMA_SIZE - ctx->allocated; in hwrm_req_dma_slice() 745 ctx->allocated = end - addr; in hwrm_req_dma_slice()
|
D | bnxt_hwrm.h | 36 u32 allocated; member
|
/drivers/media/usb/uvc/ |
D | uvc_queue.c | 388 int allocated; in uvc_queue_allocated() local 391 allocated = vb2_is_busy(&queue->queue); in uvc_queue_allocated() 394 return allocated; in uvc_queue_allocated()
|
/drivers/base/ |
D | swnode.c | 28 unsigned int allocated:1; member 765 if (swnode->allocated) in software_node_release() 779 unsigned int allocated) in swnode_register() argument 822 swnode->allocated = allocated; in swnode_register()
|
/drivers/net/ethernet/mellanox/mlx4/ |
D | resource_tracker.c | 315 int allocated, free, reserved, guaranteed, from_free; in mlx4_grant_resource() local 322 allocated = (port > 0) ? in mlx4_grant_resource() 323 res_alloc->allocated[(port - 1) * in mlx4_grant_resource() 325 res_alloc->allocated[slave]; in mlx4_grant_resource() 332 if (allocated + count > res_alloc->quota[slave]) { in mlx4_grant_resource() 335 allocated, res_alloc->quota[slave]); in mlx4_grant_resource() 339 if (allocated + count <= guaranteed) { in mlx4_grant_resource() 344 if (guaranteed - allocated > 0) in mlx4_grant_resource() 345 from_free = count - (guaranteed - allocated); in mlx4_grant_resource() 362 res_alloc->allocated[(port - 1) * in mlx4_grant_resource() [all …]
|
/drivers/net/can/usb/ |
D | ucan.c | 263 bool allocated; member 343 up->context_array[i].allocated = false; in ucan_alloc_context_array() 366 if (!up->context_array[i].allocated) { in ucan_alloc_context() 369 up->context_array[i].allocated = true; in ucan_alloc_context() 397 if (ctx->allocated) { in ucan_release_context() 398 ctx->allocated = false; in ucan_release_context()
|
/drivers/soc/qcom/ |
D | smem.c | 110 __le32 allocated; member 409 if (entry->allocated) in qcom_smem_alloc_global() 425 entry->allocated = cpu_to_le32(1); in qcom_smem_alloc_global() 494 if (!entry->allocated) in qcom_smem_get_global()
|
/drivers/md/ |
D | dm-cache-policy-smq.c | 44 bool allocated:1; member 723 e->allocated = true; in init_entry() 749 BUG_ON(e->allocated); in alloc_particular_entry() 761 BUG_ON(!e->allocated); in free_entry() 764 e->allocated = false; in free_entry() 1169 BUG_ON(!e->allocated); in mark_pending() 1588 if (!e->allocated) in smq_invalidate_mapping() 1603 if (!e->allocated) in smq_get_hint()
|
/drivers/usb/host/ |
D | xhci-mtk.h | 100 bool allocated; member
|
/drivers/nvdimm/ |
D | namespace_devs.c | 952 resource_size_t allocated = 0, available = 0; in __size_store() local 1008 allocated += nvdimm_allocated_dpa(ndd, &label_id); in __size_store() 1012 if (val > available + allocated) in __size_store() 1015 if (val == allocated) in __size_store() 1019 allocated = div_u64(allocated, nd_region->ndr_mappings); in __size_store() 1020 if (val < allocated) in __size_store() 1022 allocated - val); in __size_store() 1024 rc = grow_dpa_allocation(nd_region, &label_id, val - allocated); in __size_store()
|
/drivers/net/wireless/broadcom/brcm80211/brcmfmac/ |
D | msgbuf.c | 278 atomic_t allocated; member 343 if (array[*idx].allocated.counter == 0) in brcmf_msgbuf_alloc_pktid() 344 if (atomic_cmpxchg(&array[*idx].allocated, 0, 1) == 0) in brcmf_msgbuf_alloc_pktid() 377 if (pktids->array[idx].allocated.counter) { in brcmf_msgbuf_get_pktid() 383 pktid->allocated.counter = 0; in brcmf_msgbuf_get_pktid() 404 if (array[count].allocated.counter) { in brcmf_msgbuf_release_array()
|