/kernel/linux/linux-5.10/net/sctp/ |
D | inqueue.c | 44 struct sctp_chunk *chunk, *tmp; in sctp_inq_free() local 47 list_for_each_entry_safe(chunk, tmp, &queue->in_chunk_list, list) { in sctp_inq_free() 48 list_del_init(&chunk->list); in sctp_inq_free() 49 sctp_chunk_free(chunk); in sctp_inq_free() 64 void sctp_inq_push(struct sctp_inq *q, struct sctp_chunk *chunk) in sctp_inq_push() argument 67 if (chunk->rcvr->dead) { in sctp_inq_push() 68 sctp_chunk_free(chunk); in sctp_inq_push() 77 list_add_tail(&chunk->list, &q->in_chunk_list); in sctp_inq_push() 78 if (chunk->asoc) in sctp_inq_push() 79 chunk->asoc->stats.ipackets++; in sctp_inq_push() [all …]
|
D | chunk.c | 60 struct sctp_chunk *chunk; in sctp_datamsg_free() local 65 list_for_each_entry(chunk, &msg->chunks, frag_list) in sctp_datamsg_free() 66 sctp_chunk_free(chunk); in sctp_datamsg_free() 76 struct sctp_chunk *chunk; in sctp_datamsg_destroy() local 83 chunk = list_entry(pos, struct sctp_chunk, frag_list); in sctp_datamsg_destroy() 86 sctp_chunk_put(chunk); in sctp_datamsg_destroy() 90 asoc = chunk->asoc; in sctp_datamsg_destroy() 92 sent = chunk->has_tsn ? SCTP_DATA_SENT : SCTP_DATA_UNSENT; in sctp_datamsg_destroy() 96 ev = sctp_ulpevent_make_send_failed(asoc, chunk, sent, in sctp_datamsg_destroy() 104 ev = sctp_ulpevent_make_send_failed_event(asoc, chunk, in sctp_datamsg_destroy() [all …]
|
D | output.c | 46 struct sctp_chunk *chunk); 48 struct sctp_chunk *chunk); 50 struct sctp_chunk *chunk); 52 struct sctp_chunk *chunk, 121 struct sctp_chunk *chunk = sctp_get_ecne_prepend(asoc); in sctp_packet_config() local 123 if (chunk) in sctp_packet_config() 124 sctp_packet_append_chunk(packet, chunk); in sctp_packet_config() 161 struct sctp_chunk *chunk, *tmp; in sctp_packet_free() local 165 list_for_each_entry_safe(chunk, tmp, &packet->chunk_list, list) { in sctp_packet_free() 166 list_del_init(&chunk->list); in sctp_packet_free() [all …]
|
D | outqueue.c | 210 struct sctp_chunk *chunk, *tmp; in __sctp_outq_teardown() local 216 chunk = list_entry(lchunk, struct sctp_chunk, in __sctp_outq_teardown() 219 sctp_chunk_fail(chunk, q->error); in __sctp_outq_teardown() 220 sctp_chunk_free(chunk); in __sctp_outq_teardown() 227 chunk = list_entry(lchunk, struct sctp_chunk, in __sctp_outq_teardown() 229 sctp_chunk_fail(chunk, q->error); in __sctp_outq_teardown() 230 sctp_chunk_free(chunk); in __sctp_outq_teardown() 236 chunk = list_entry(lchunk, struct sctp_chunk, in __sctp_outq_teardown() 238 sctp_chunk_fail(chunk, q->error); in __sctp_outq_teardown() 239 sctp_chunk_free(chunk); in __sctp_outq_teardown() [all …]
|
D | sm_statefuns.c | 54 struct sctp_chunk *chunk, 57 struct sctp_chunk *chunk, 62 const struct sctp_chunk *chunk); 66 const struct sctp_chunk *chunk, 90 static struct sctp_sackhdr *sctp_sm_pull_sack(struct sctp_chunk *chunk); 142 struct sctp_chunk *chunk); 159 static inline bool sctp_chunk_length_valid(struct sctp_chunk *chunk, in sctp_chunk_length_valid() argument 162 __u16 chunk_length = ntohs(chunk->chunk_hdr->length); in sctp_chunk_length_valid() 165 if (unlikely(chunk->pdiscard)) in sctp_chunk_length_valid() 174 static inline bool sctp_err_chunk_valid(struct sctp_chunk *chunk) in sctp_err_chunk_valid() argument [all …]
|
D | sm_make_chunk.c | 67 static void *sctp_addto_param(struct sctp_chunk *chunk, int len, 73 struct sctp_chunk *chunk = skb_shinfo(skb)->destructor_arg; in sctp_control_release_owner() local 75 if (chunk->shkey) { in sctp_control_release_owner() 76 struct sctp_shared_key *shkey = chunk->shkey; in sctp_control_release_owner() 77 struct sctp_association *asoc = chunk->asoc; in sctp_control_release_owner() 93 sctp_auth_shkey_release(chunk->shkey); in sctp_control_release_owner() 97 static void sctp_control_set_owner_w(struct sctp_chunk *chunk) in sctp_control_set_owner_w() argument 99 struct sctp_association *asoc = chunk->asoc; in sctp_control_set_owner_w() 100 struct sk_buff *skb = chunk->skb; in sctp_control_set_owner_w() 109 if (chunk->auth) { in sctp_control_set_owner_w() [all …]
|
/kernel/linux/linux-5.10/mm/ |
D | percpu-vm.c | 12 static struct page *pcpu_chunk_page(struct pcpu_chunk *chunk, in pcpu_chunk_page() argument 16 WARN_ON(chunk->immutable); in pcpu_chunk_page() 18 return vmalloc_to_page((void *)pcpu_chunk_addr(chunk, cpu, page_idx)); in pcpu_chunk_page() 53 static void pcpu_free_pages(struct pcpu_chunk *chunk, in pcpu_free_pages() argument 81 static int pcpu_alloc_pages(struct pcpu_chunk *chunk, in pcpu_alloc_pages() argument 126 static void pcpu_pre_unmap_flush(struct pcpu_chunk *chunk, in pcpu_pre_unmap_flush() argument 130 pcpu_chunk_addr(chunk, pcpu_low_unit_cpu, page_start), in pcpu_pre_unmap_flush() 131 pcpu_chunk_addr(chunk, pcpu_high_unit_cpu, page_end)); in pcpu_pre_unmap_flush() 152 static void pcpu_unmap_pages(struct pcpu_chunk *chunk, in pcpu_unmap_pages() argument 162 page = pcpu_chunk_page(chunk, cpu, i); in pcpu_unmap_pages() [all …]
|
D | percpu.c | 213 static bool pcpu_addr_in_chunk(struct pcpu_chunk *chunk, void *addr) in pcpu_addr_in_chunk() argument 217 if (!chunk) in pcpu_addr_in_chunk() 220 start_addr = chunk->base_addr + chunk->start_offset; in pcpu_addr_in_chunk() 221 end_addr = chunk->base_addr + chunk->nr_pages * PAGE_SIZE - in pcpu_addr_in_chunk() 222 chunk->end_offset; in pcpu_addr_in_chunk() 240 static int pcpu_chunk_slot(const struct pcpu_chunk *chunk) in pcpu_chunk_slot() argument 242 const struct pcpu_block_md *chunk_md = &chunk->chunk_md; in pcpu_chunk_slot() 244 if (chunk->free_bytes < PCPU_MIN_ALLOC_SIZE || in pcpu_chunk_slot() 273 static unsigned long pcpu_chunk_addr(struct pcpu_chunk *chunk, in pcpu_chunk_addr() argument 276 return (unsigned long)chunk->base_addr + in pcpu_chunk_addr() [all …]
|
D | percpu-km.c | 35 static int pcpu_populate_chunk(struct pcpu_chunk *chunk, in pcpu_populate_chunk() argument 41 static void pcpu_depopulate_chunk(struct pcpu_chunk *chunk, in pcpu_depopulate_chunk() argument 51 struct pcpu_chunk *chunk; in pcpu_create_chunk() local 56 chunk = pcpu_alloc_chunk(type, gfp); in pcpu_create_chunk() 57 if (!chunk) in pcpu_create_chunk() 62 pcpu_free_chunk(chunk); in pcpu_create_chunk() 67 pcpu_set_page_chunk(nth_page(pages, i), chunk); in pcpu_create_chunk() 69 chunk->data = pages; in pcpu_create_chunk() 70 chunk->base_addr = page_address(pages); in pcpu_create_chunk() 73 pcpu_chunk_populated(chunk, 0, nr_pages); in pcpu_create_chunk() [all …]
|
D | percpu-stats.c | 35 struct pcpu_chunk *chunk; in find_max_nr_alloc() local 42 list_for_each_entry(chunk, &pcpu_chunk_list(type)[slot], in find_max_nr_alloc() 45 chunk->nr_alloc); in find_max_nr_alloc() 56 static void chunk_map_stats(struct seq_file *m, struct pcpu_chunk *chunk, in chunk_map_stats() argument 59 struct pcpu_block_md *chunk_md = &chunk->chunk_md; in chunk_map_stats() 73 last_alloc = find_last_bit(chunk->alloc_map, in chunk_map_stats() 74 pcpu_chunk_map_bits(chunk) - in chunk_map_stats() 75 chunk->end_offset / PCPU_MIN_ALLOC_SIZE - 1); in chunk_map_stats() 76 last_alloc = test_bit(last_alloc, chunk->alloc_map) ? in chunk_map_stats() 80 start = chunk->start_offset / PCPU_MIN_ALLOC_SIZE; in chunk_map_stats() [all …]
|
D | percpu-internal.h | 102 static inline int pcpu_chunk_nr_blocks(struct pcpu_chunk *chunk) in pcpu_chunk_nr_blocks() argument 104 return chunk->nr_pages * PAGE_SIZE / PCPU_BITMAP_BLOCK_SIZE; in pcpu_chunk_nr_blocks() 126 static inline int pcpu_chunk_map_bits(struct pcpu_chunk *chunk) in pcpu_chunk_map_bits() argument 128 return pcpu_nr_pages_to_map_bits(chunk->nr_pages); in pcpu_chunk_map_bits() 132 static inline enum pcpu_chunk_type pcpu_chunk_type(struct pcpu_chunk *chunk) in pcpu_chunk_type() argument 134 if (chunk->obj_cgroups) in pcpu_chunk_type() 145 static inline enum pcpu_chunk_type pcpu_chunk_type(struct pcpu_chunk *chunk) in pcpu_chunk_type() argument 199 static inline void pcpu_stats_area_alloc(struct pcpu_chunk *chunk, size_t size) in pcpu_stats_area_alloc() argument 212 chunk->nr_alloc++; in pcpu_stats_area_alloc() 213 chunk->max_alloc_size = max(chunk->max_alloc_size, size); in pcpu_stats_area_alloc() [all …]
|
/kernel/linux/linux-5.10/drivers/s390/cio/ |
D | itcw.c | 182 void *chunk; in itcw_init() local 194 chunk = fit_chunk(&start, end, sizeof(struct itcw), 1, 0); in itcw_init() 195 if (IS_ERR(chunk)) in itcw_init() 196 return chunk; in itcw_init() 197 itcw = chunk; in itcw_init() 210 chunk = fit_chunk(&start, end, sizeof(struct tcw), 64, 0); in itcw_init() 211 if (IS_ERR(chunk)) in itcw_init() 212 return chunk; in itcw_init() 213 itcw->tcw = chunk; in itcw_init() 218 chunk = fit_chunk(&start, end, sizeof(struct tcw), 64, 0); in itcw_init() [all …]
|
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx4/ |
D | icm.c | 55 static void mlx4_free_icm_pages(struct mlx4_dev *dev, struct mlx4_icm_chunk *chunk) in mlx4_free_icm_pages() argument 59 if (chunk->nsg > 0) in mlx4_free_icm_pages() 60 dma_unmap_sg(&dev->persist->pdev->dev, chunk->sg, chunk->npages, in mlx4_free_icm_pages() 63 for (i = 0; i < chunk->npages; ++i) in mlx4_free_icm_pages() 64 __free_pages(sg_page(&chunk->sg[i]), in mlx4_free_icm_pages() 65 get_order(chunk->sg[i].length)); in mlx4_free_icm_pages() 68 static void mlx4_free_icm_coherent(struct mlx4_dev *dev, struct mlx4_icm_chunk *chunk) in mlx4_free_icm_coherent() argument 72 for (i = 0; i < chunk->npages; ++i) in mlx4_free_icm_coherent() 74 chunk->buf[i].size, in mlx4_free_icm_coherent() 75 chunk->buf[i].addr, in mlx4_free_icm_coherent() [all …]
|
D | icm.h | 74 struct mlx4_icm_chunk *chunk; member 100 iter->chunk = list_empty(&icm->chunk_list) ? in mlx4_icm_first() 108 return !iter->chunk; in mlx4_icm_last() 113 if (++iter->page_idx >= iter->chunk->nsg) { in mlx4_icm_next() 114 if (iter->chunk->list.next == &iter->icm->chunk_list) { in mlx4_icm_next() 115 iter->chunk = NULL; in mlx4_icm_next() 119 iter->chunk = list_entry(iter->chunk->list.next, in mlx4_icm_next() 127 if (iter->chunk->coherent) in mlx4_icm_addr() 128 return iter->chunk->buf[iter->page_idx].dma_addr; in mlx4_icm_addr() 130 return sg_dma_address(&iter->chunk->sg[iter->page_idx]); in mlx4_icm_addr() [all …]
|
/kernel/linux/linux-5.10/kernel/ |
D | audit_tree.c | 42 struct audit_chunk *chunk; member 128 static void free_chunk(struct audit_chunk *chunk) in free_chunk() argument 132 for (i = 0; i < chunk->count; i++) { in free_chunk() 133 if (chunk->owners[i].owner) in free_chunk() 134 put_tree(chunk->owners[i].owner); in free_chunk() 136 kfree(chunk); in free_chunk() 139 void audit_put_chunk(struct audit_chunk *chunk) in audit_put_chunk() argument 141 if (atomic_long_dec_and_test(&chunk->refs)) in audit_put_chunk() 142 free_chunk(chunk); in audit_put_chunk() 147 struct audit_chunk *chunk = container_of(rcu, struct audit_chunk, head); in __put_chunk() local [all …]
|
/kernel/linux/linux-5.10/lib/ |
D | genalloc.c | 38 static inline size_t chunk_size(const struct gen_pool_chunk *chunk) in chunk_size() argument 40 return chunk->end_addr - chunk->start_addr + 1; in chunk_size() 186 struct gen_pool_chunk *chunk; in gen_pool_add_owner() local 191 chunk = vzalloc_node(nbytes, nid); in gen_pool_add_owner() 192 if (unlikely(chunk == NULL)) in gen_pool_add_owner() 195 chunk->phys_addr = phys; in gen_pool_add_owner() 196 chunk->start_addr = virt; in gen_pool_add_owner() 197 chunk->end_addr = virt + size - 1; in gen_pool_add_owner() 198 chunk->owner = owner; in gen_pool_add_owner() 199 atomic_long_set(&chunk->avail, size); in gen_pool_add_owner() [all …]
|
/kernel/linux/linux-5.10/drivers/infiniband/hw/i40iw/ |
D | i40iw_pble.c | 53 static void i40iw_free_vmalloc_mem(struct i40iw_hw *hw, struct i40iw_chunk *chunk); 63 struct i40iw_chunk *chunk; in i40iw_destroy_pble_pool() local 68 chunk = list_entry(clist, struct i40iw_chunk, list); in i40iw_destroy_pble_pool() 69 if (chunk->type == I40IW_VMALLOC) in i40iw_destroy_pble_pool() 70 i40iw_free_vmalloc_mem(dev->hw, chunk); in i40iw_destroy_pble_pool() 71 kfree(chunk); in i40iw_destroy_pble_pool() 138 struct i40iw_chunk *chunk = info->chunk; in add_sd_direct() local 151 chunk->type = I40IW_DMA_COHERENT; in add_sd_direct() 155 chunk->size = info->pages << I40IW_HMC_PAGED_BP_SHIFT; in add_sd_direct() 156 chunk->vaddr = ((u8 *)sd_entry->u.bp.addr.va + offset); in add_sd_direct() [all …]
|
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/steering/ |
D | dr_icm_pool.c | 179 static int dr_icm_chunk_ste_init(struct mlx5dr_icm_chunk *chunk) in dr_icm_chunk_ste_init() argument 181 struct mlx5dr_icm_bucket *bucket = chunk->bucket; in dr_icm_chunk_ste_init() 183 chunk->ste_arr = kvzalloc(bucket->num_of_entries * in dr_icm_chunk_ste_init() 184 sizeof(chunk->ste_arr[0]), GFP_KERNEL); in dr_icm_chunk_ste_init() 185 if (!chunk->ste_arr) in dr_icm_chunk_ste_init() 188 chunk->hw_ste_arr = kvzalloc(bucket->num_of_entries * in dr_icm_chunk_ste_init() 190 if (!chunk->hw_ste_arr) in dr_icm_chunk_ste_init() 193 chunk->miss_list = kvmalloc(bucket->num_of_entries * in dr_icm_chunk_ste_init() 194 sizeof(chunk->miss_list[0]), GFP_KERNEL); in dr_icm_chunk_ste_init() 195 if (!chunk->miss_list) in dr_icm_chunk_ste_init() [all …]
|
/kernel/linux/linux-5.10/include/net/sctp/ |
D | sm.h | 158 __u8 sctp_get_chunk_type(struct sctp_chunk *chunk); 176 const struct sctp_chunk *chunk, 179 const struct sctp_chunk *chunk); 181 const struct sctp_chunk *chunk); 184 const struct sctp_chunk *chunk); 197 const struct sctp_chunk *chunk); 199 const struct sctp_chunk *chunk); 202 const struct sctp_chunk *chunk); 203 int sctp_init_cause(struct sctp_chunk *chunk, __be16 cause, size_t paylen); 205 const struct sctp_chunk *chunk, [all …]
|
/kernel/linux/linux-5.10/drivers/infiniband/hw/mthca/ |
D | mthca_memfree.c | 64 static void mthca_free_icm_pages(struct mthca_dev *dev, struct mthca_icm_chunk *chunk) in mthca_free_icm_pages() argument 68 if (chunk->nsg > 0) in mthca_free_icm_pages() 69 pci_unmap_sg(dev->pdev, chunk->mem, chunk->npages, in mthca_free_icm_pages() 72 for (i = 0; i < chunk->npages; ++i) in mthca_free_icm_pages() 73 __free_pages(sg_page(&chunk->mem[i]), in mthca_free_icm_pages() 74 get_order(chunk->mem[i].length)); in mthca_free_icm_pages() 77 static void mthca_free_icm_coherent(struct mthca_dev *dev, struct mthca_icm_chunk *chunk) in mthca_free_icm_coherent() argument 81 for (i = 0; i < chunk->npages; ++i) { in mthca_free_icm_coherent() 82 dma_free_coherent(&dev->pdev->dev, chunk->mem[i].length, in mthca_free_icm_coherent() 83 lowmem_page_address(sg_page(&chunk->mem[i])), in mthca_free_icm_coherent() [all …]
|
/kernel/linux/linux-5.10/drivers/gpu/drm/panel/ |
D | panel-samsung-s6e63m0-dsi.c | 42 int chunk; in s6e63m0_dsi_dcs_write() local 52 chunk = remain; in s6e63m0_dsi_dcs_write() 55 if (chunk > S6E63M0_DSI_MAX_CHUNK) in s6e63m0_dsi_dcs_write() 56 chunk = S6E63M0_DSI_MAX_CHUNK; in s6e63m0_dsi_dcs_write() 57 ret = mipi_dsi_dcs_write(dsi, cmd, seqp, chunk); in s6e63m0_dsi_dcs_write() 62 cmdwritten += chunk; in s6e63m0_dsi_dcs_write() 63 seqp += chunk; in s6e63m0_dsi_dcs_write() 66 chunk = remain - cmdwritten; in s6e63m0_dsi_dcs_write() 67 if (chunk > S6E63M0_DSI_MAX_CHUNK) in s6e63m0_dsi_dcs_write() 68 chunk = S6E63M0_DSI_MAX_CHUNK; in s6e63m0_dsi_dcs_write() [all …]
|
/kernel/linux/linux-5.10/drivers/gpu/drm/nouveau/ |
D | nouveau_dmem.c | 95 struct nouveau_dmem_chunk *chunk = nouveau_page_to_chunk(page); in page_to_drm() local 97 return chunk->drm; in page_to_drm() 102 struct nouveau_dmem_chunk *chunk = nouveau_page_to_chunk(page); in nouveau_dmem_page_addr() local 104 chunk->pagemap.range.start; in nouveau_dmem_page_addr() 106 return chunk->bo->offset + off; in nouveau_dmem_page_addr() 111 struct nouveau_dmem_chunk *chunk = nouveau_page_to_chunk(page); in nouveau_dmem_page_free() local 112 struct nouveau_dmem *dmem = chunk->drm->dmem; in nouveau_dmem_page_free() 118 WARN_ON(!chunk->callocated); in nouveau_dmem_page_free() 119 chunk->callocated--; in nouveau_dmem_page_free() 229 struct nouveau_dmem_chunk *chunk; in nouveau_dmem_chunk_alloc() local [all …]
|
/kernel/linux/linux-5.10/arch/arm64/crypto/ |
D | crct10dif-ce-glue.c | 41 unsigned int chunk = length; in crct10dif_update_pmull_p8() local 43 if (chunk > SZ_4K + CRC_T10DIF_PMULL_CHUNK_SIZE) in crct10dif_update_pmull_p8() 44 chunk = SZ_4K; in crct10dif_update_pmull_p8() 47 *crc = crc_t10dif_pmull_p8(*crc, data, chunk); in crct10dif_update_pmull_p8() 49 data += chunk; in crct10dif_update_pmull_p8() 50 length -= chunk; in crct10dif_update_pmull_p8() 66 unsigned int chunk = length; in crct10dif_update_pmull_p64() local 68 if (chunk > SZ_4K + CRC_T10DIF_PMULL_CHUNK_SIZE) in crct10dif_update_pmull_p64() 69 chunk = SZ_4K; in crct10dif_update_pmull_p64() 72 *crc = crc_t10dif_pmull_p64(*crc, data, chunk); in crct10dif_update_pmull_p64() [all …]
|
/kernel/linux/linux-5.10/drivers/staging/media/atomisp/pci/isp/kernels/s3a/s3a_1.0/ |
D | ia_css_s3a.host.c | 339 int out_width, out_height, chunk, rest, kmax, y, x, k, elm_start, elm, ofs; in ia_css_s3a_vmem_decode() local 354 chunk = ISP_VEC_NELEMS >> host_stats->grid.deci_factor_log2; in ia_css_s3a_vmem_decode() 355 chunk = max(chunk, 1); in ia_css_s3a_vmem_decode() 362 kmax = (rest > chunk) ? chunk : rest; in ia_css_s3a_vmem_decode() 367 hi[elm + chunk * 0], lo[elm + chunk * 0]); in ia_css_s3a_vmem_decode() 369 hi[elm + chunk * 1], lo[elm + chunk * 1]); in ia_css_s3a_vmem_decode() 371 hi[elm + chunk * 2], lo[elm + chunk * 2]); in ia_css_s3a_vmem_decode() 373 hi[elm + chunk * 3], lo[elm + chunk * 3]); in ia_css_s3a_vmem_decode() 375 hi[elm + chunk * 4], lo[elm + chunk * 4]); in ia_css_s3a_vmem_decode() 377 hi[elm + chunk * 5], lo[elm + chunk * 5]); in ia_css_s3a_vmem_decode() [all …]
|
/kernel/linux/linux-5.10/drivers/gpu/drm/qxl/ |
D | qxl_image.c | 38 struct qxl_drm_chunk *chunk; in qxl_allocate_chunk() local 41 chunk = kmalloc(sizeof(struct qxl_drm_chunk), GFP_KERNEL); in qxl_allocate_chunk() 42 if (!chunk) in qxl_allocate_chunk() 45 ret = qxl_alloc_bo_reserved(qdev, release, chunk_size, &chunk->bo); in qxl_allocate_chunk() 47 kfree(chunk); in qxl_allocate_chunk() 51 list_add_tail(&chunk->head, &image->chunk_list); in qxl_allocate_chunk() 88 struct qxl_drm_chunk *chunk, *tmp; in qxl_image_free_objects() local 90 list_for_each_entry_safe(chunk, tmp, &dimage->chunk_list, head) { in qxl_image_free_objects() 91 qxl_bo_unref(&chunk->bo); in qxl_image_free_objects() 92 kfree(chunk); in qxl_image_free_objects() [all …]
|