/drivers/android/ |
D | binder_alloc.c | 61 static size_t binder_alloc_buffer_size(struct binder_alloc *alloc, in binder_alloc_buffer_size() argument 64 if (list_is_last(&buffer->entry, &alloc->buffers)) in binder_alloc_buffer_size() 65 return alloc->buffer + alloc->buffer_size - buffer->user_data; in binder_alloc_buffer_size() 69 static void binder_insert_free_buffer(struct binder_alloc *alloc, in binder_insert_free_buffer() argument 72 struct rb_node **p = &alloc->free_buffers.rb_node; in binder_insert_free_buffer() 80 new_buffer_size = binder_alloc_buffer_size(alloc, new_buffer); in binder_insert_free_buffer() 84 alloc->pid, new_buffer_size, new_buffer); in binder_insert_free_buffer() 91 buffer_size = binder_alloc_buffer_size(alloc, buffer); in binder_insert_free_buffer() 99 rb_insert_color(&new_buffer->rb_node, &alloc->free_buffers); in binder_insert_free_buffer() 103 struct binder_alloc *alloc, struct binder_buffer *new_buffer) in binder_insert_allocated_buffer_locked() argument [all …]
|
D | binder_alloc_selftest.c | 92 static bool check_buffer_pages_allocated(struct binder_alloc *alloc, in check_buffer_pages_allocated() argument 103 page_index = (page_addr - (uintptr_t)alloc->buffer) / PAGE_SIZE; in check_buffer_pages_allocated() 104 if (!alloc->pages[page_index].page_ptr || in check_buffer_pages_allocated() 105 !list_empty(&alloc->pages[page_index].lru)) { in check_buffer_pages_allocated() 107 alloc->pages[page_index].page_ptr ? in check_buffer_pages_allocated() 115 static void binder_selftest_alloc_buf(struct binder_alloc *alloc, in binder_selftest_alloc_buf() argument 122 buffers[i] = binder_alloc_new_buf(alloc, sizes[i], 0, 0, 0); in binder_selftest_alloc_buf() 124 !check_buffer_pages_allocated(alloc, buffers[i], in binder_selftest_alloc_buf() 132 static void binder_selftest_free_buf(struct binder_alloc *alloc, in binder_selftest_free_buf() argument 139 binder_alloc_free_buf(alloc, buffers[seq[i]]); in binder_selftest_free_buf() [all …]
|
D | binder_alloc.h | 70 struct binder_alloc *alloc; member 118 void binder_selftest_alloc(struct binder_alloc *alloc); 120 static inline void binder_selftest_alloc(struct binder_alloc *alloc) {} in binder_selftest_alloc() argument 125 struct binder_buffer *binder_alloc_new_buf(struct binder_alloc *alloc, 130 void binder_alloc_init(struct binder_alloc *alloc); 133 void binder_alloc_vma_close(struct binder_alloc *alloc); 135 binder_alloc_prepare_to_free(struct binder_alloc *alloc, 137 void binder_alloc_free_buf(struct binder_alloc *alloc, 139 int binder_alloc_mmap_handler(struct binder_alloc *alloc, 141 void binder_alloc_deferred_release(struct binder_alloc *alloc); [all …]
|
D | binder_internal.h | 456 struct binder_alloc alloc; member 470 binder_proc_entry(struct binder_alloc *alloc) in binder_proc_entry() argument 472 return container_of(alloc, struct binder_proc, alloc); in binder_proc_entry() 482 binder_alloc_to_proc_wrap(struct binder_alloc *alloc) in binder_alloc_to_proc_wrap() argument 484 return binder_proc_wrap_entry(binder_proc_entry(alloc)); in binder_alloc_to_proc_wrap() 487 static inline void binder_alloc_lock_init(struct binder_alloc *alloc) in binder_alloc_lock_init() argument 489 spin_lock_init(&binder_alloc_to_proc_wrap(alloc)->lock); in binder_alloc_lock_init() 492 static inline void binder_alloc_lock(struct binder_alloc *alloc) in binder_alloc_lock() argument 494 spin_lock(&binder_alloc_to_proc_wrap(alloc)->lock); in binder_alloc_lock() 497 static inline void binder_alloc_unlock(struct binder_alloc *alloc) in binder_alloc_unlock() argument [all …]
|
D | binder_trace.h | 314 TP_PROTO(struct binder_alloc *alloc, bool allocate, 316 TP_ARGS(alloc, allocate, start, end), 324 __entry->proc = alloc->pid; 326 __entry->offset = start - (uintptr_t)alloc->buffer; 335 TP_PROTO(const struct binder_alloc *alloc, size_t page_index), 336 TP_ARGS(alloc, page_index), 342 __entry->proc = alloc->pid; 350 TP_PROTO(const struct binder_alloc *alloc, size_t page_index), 351 TP_ARGS(alloc, page_index)); 354 TP_PROTO(const struct binder_alloc *alloc, size_t page_index), [all …]
|
/drivers/infiniband/hw/cxgb4/ |
D | id_table.c | 44 u32 c4iw_id_alloc(struct c4iw_id_table *alloc) in c4iw_id_alloc() argument 49 spin_lock_irqsave(&alloc->lock, flags); in c4iw_id_alloc() 51 obj = find_next_zero_bit(alloc->table, alloc->max, alloc->last); in c4iw_id_alloc() 52 if (obj >= alloc->max) in c4iw_id_alloc() 53 obj = find_first_zero_bit(alloc->table, alloc->max); in c4iw_id_alloc() 55 if (obj < alloc->max) { in c4iw_id_alloc() 56 if (alloc->flags & C4IW_ID_TABLE_F_RANDOM) in c4iw_id_alloc() 57 alloc->last += prandom_u32() % RANDOM_SKIP; in c4iw_id_alloc() 59 alloc->last = obj + 1; in c4iw_id_alloc() 60 if (alloc->last >= alloc->max) in c4iw_id_alloc() [all …]
|
/drivers/infiniband/hw/mthca/ |
D | mthca_allocator.c | 40 u32 mthca_alloc(struct mthca_alloc *alloc) in mthca_alloc() argument 45 spin_lock_irqsave(&alloc->lock, flags); in mthca_alloc() 47 obj = find_next_zero_bit(alloc->table, alloc->max, alloc->last); in mthca_alloc() 48 if (obj >= alloc->max) { in mthca_alloc() 49 alloc->top = (alloc->top + alloc->max) & alloc->mask; in mthca_alloc() 50 obj = find_first_zero_bit(alloc->table, alloc->max); in mthca_alloc() 53 if (obj < alloc->max) { in mthca_alloc() 54 set_bit(obj, alloc->table); in mthca_alloc() 55 obj |= alloc->top; in mthca_alloc() 59 spin_unlock_irqrestore(&alloc->lock, flags); in mthca_alloc() [all …]
|
D | mthca_uar.c | 40 uar->index = mthca_alloc(&dev->uar_table.alloc); in mthca_uar_alloc() 51 mthca_free(&dev->uar_table.alloc, uar->index); in mthca_uar_free() 58 ret = mthca_alloc_init(&dev->uar_table.alloc, in mthca_init_uar_table() 67 mthca_alloc_cleanup(&dev->uar_table.alloc); in mthca_init_uar_table() 77 mthca_alloc_cleanup(&dev->uar_table.alloc); in mthca_cleanup_uar_table()
|
D | mthca_pd.c | 46 pd->pd_num = mthca_alloc(&dev->pd_table.alloc); in mthca_pd_alloc() 56 mthca_free(&dev->pd_table.alloc, pd->pd_num); in mthca_pd_alloc() 66 mthca_free(&dev->pd_table.alloc, pd->pd_num); in mthca_pd_free() 71 return mthca_alloc_init(&dev->pd_table.alloc, in mthca_init_pd_table() 80 mthca_alloc_cleanup(&dev->pd_table.alloc); in mthca_cleanup_pd_table()
|
D | mthca_dev.h | 194 struct mthca_alloc alloc; member 200 struct mthca_alloc alloc; member 226 struct mthca_alloc alloc; member 239 struct mthca_alloc alloc; member 246 struct mthca_alloc alloc; member 253 struct mthca_alloc alloc; member 269 struct mthca_alloc alloc; member 274 struct mthca_alloc alloc; member 413 u32 mthca_alloc(struct mthca_alloc *alloc); 414 void mthca_free(struct mthca_alloc *alloc, u32 obj); [all …]
|
D | mthca_mcg.c | 148 index = mthca_alloc(&dev->mcg_table.alloc); in mthca_multicast_attach() 206 mthca_free(&dev->mcg_table.alloc, index); in mthca_multicast_attach() 286 mthca_free(&dev->mcg_table.alloc, amgm_index_to_free); in mthca_multicast_detach() 305 mthca_free(&dev->mcg_table.alloc, index); in mthca_multicast_detach() 320 err = mthca_alloc_init(&dev->mcg_table.alloc, in mthca_init_mcg_table() 334 mthca_alloc_cleanup(&dev->mcg_table.alloc); in mthca_cleanup_mcg_table()
|
D | mthca_av.c | 172 index = mthca_alloc(&dev->av_table.alloc); in mthca_create_ah() 247 mthca_free(&dev->av_table.alloc, in mthca_destroy_ah() 333 err = mthca_alloc_init(&dev->av_table.alloc, in mthca_init_av_table() 363 mthca_alloc_cleanup(&dev->av_table.alloc); in mthca_init_av_table() 375 mthca_alloc_cleanup(&dev->av_table.alloc); in mthca_cleanup_av_table()
|
/drivers/char/agp/ |
D | compat_ioctl.c | 155 struct agp_allocate32 alloc; in compat_agpioc_allocate_wrap() local 158 if (copy_from_user(&alloc, arg, sizeof(alloc))) in compat_agpioc_allocate_wrap() 161 memory = agp_allocate_memory_wrap(alloc.pg_count, alloc.type); in compat_agpioc_allocate_wrap() 166 alloc.key = memory->key; in compat_agpioc_allocate_wrap() 167 alloc.physical = memory->physical; in compat_agpioc_allocate_wrap() 169 if (copy_to_user(arg, &alloc, sizeof(alloc))) { in compat_agpioc_allocate_wrap()
|
D | frontend.c | 875 struct agp_allocate alloc; in agpioc_allocate_wrap() local 878 if (copy_from_user(&alloc, arg, sizeof(struct agp_allocate))) in agpioc_allocate_wrap() 881 if (alloc.type >= AGP_USER_TYPES) in agpioc_allocate_wrap() 884 memory = agp_allocate_memory_wrap(alloc.pg_count, alloc.type); in agpioc_allocate_wrap() 889 alloc.key = memory->key; in agpioc_allocate_wrap() 890 alloc.physical = memory->physical; in agpioc_allocate_wrap() 892 if (copy_to_user(arg, &alloc, sizeof(struct agp_allocate))) { in agpioc_allocate_wrap()
|
/drivers/thunderbolt/ |
D | lc.c | 316 u32 val, alloc; in tb_lc_dp_sink_available() local 329 alloc = val & TB_LC_SNK_ALLOCATION_SNK0_MASK; in tb_lc_dp_sink_available() 330 if (!alloc || alloc == TB_LC_SNK_ALLOCATION_SNK0_CM) in tb_lc_dp_sink_available() 333 alloc = (val & TB_LC_SNK_ALLOCATION_SNK1_MASK) >> in tb_lc_dp_sink_available() 335 if (!alloc || alloc == TB_LC_SNK_ALLOCATION_SNK1_CM) in tb_lc_dp_sink_available()
|
/drivers/xen/xenbus/ |
D | xenbus_comms.c | 211 void *alloc; in process_msg() member 264 state.alloc = kmalloc(len, GFP_NOIO | __GFP_HIGH); in process_msg() 265 if (!state.alloc) in process_msg() 271 state.body = state.alloc; in process_msg() 319 state.alloc = NULL; in process_msg() 325 kfree(state.alloc); in process_msg() 326 state.alloc = NULL; in process_msg()
|
/drivers/md/bcache/ |
D | alloc.c | 569 struct bkey *alloc) in pick_data_bucket() argument 585 if (!ret->sectors_free && KEY_PTRS(alloc)) { in pick_data_bucket() 587 bkey_copy(&ret->key, alloc); in pick_data_bucket() 588 bkey_init(alloc); in pick_data_bucket() 615 BKEY_PADDED(key) alloc; in bch_alloc_sectors() 625 bkey_init(&alloc.key); in bch_alloc_sectors() 628 while (!(b = pick_data_bucket(c, k, write_point, &alloc.key))) { in bch_alloc_sectors() 635 if (bch_bucket_alloc_set(c, watermark, &alloc.key, wait)) in bch_alloc_sectors() 646 if (KEY_PTRS(&alloc.key)) in bch_alloc_sectors() 647 bkey_put(c, &alloc.key); in bch_alloc_sectors()
|
/drivers/dax/ |
D | bus.c | 780 struct resource *alloc; in alloc_dev_dax_range() local 799 alloc = __request_region(res, start, size, dev_name(dev), 0); in alloc_dev_dax_range() 800 if (!alloc) { in alloc_dev_dax_range() 819 .start = alloc->start, in alloc_dev_dax_range() 820 .end = alloc->end, in alloc_dev_dax_range() 825 &alloc->start, &alloc->end); in alloc_dev_dax_range() 968 resource_size_t alloc = 0; in dev_dax_resize() local 1001 alloc = min(res->start - dax_region->res.start, to_alloc); in dev_dax_resize() 1002 rc = alloc_dev_dax_range(dev_dax, dax_region->res.start, alloc); in dev_dax_resize() 1006 alloc = 0; in dev_dax_resize() [all …]
|
/drivers/firmware/efi/libstub/ |
D | Makefile | 94 STUBCOPY_FLAGS-$(CONFIG_X86) += --rename-section .bss=.bss.efistub,load,alloc 104 --rename-section .bss=.bss.efistub,load,alloc 124 STUBCOPY_FLAGS-$(CONFIG_ARM64) += --prefix-alloc-sections=.init \ 131 STUBCOPY_FLAGS-$(CONFIG_RISCV) += --prefix-alloc-sections=.init \
|
/drivers/net/ethernet/ |
D | ec_bhf.c | 117 u8 *alloc; member 341 buf->alloc = dma_alloc_coherent(dev, buf->alloc_len, &buf->alloc_phys, in ec_bhf_alloc_dma_mem() 343 if (buf->alloc == NULL) { in ec_bhf_alloc_dma_mem() 349 buf->buf = buf->alloc + (buf->buf_phys - buf->alloc_phys); in ec_bhf_alloc_dma_mem() 426 dma_free_coherent(dev, priv->rx_buf.alloc_len, priv->rx_buf.alloc, in ec_bhf_open() 444 priv->tx_buf.alloc, priv->tx_buf.alloc_phys); in ec_bhf_stop() 446 priv->rx_buf.alloc, priv->rx_buf.alloc_phys); in ec_bhf_stop()
|
/drivers/dma/dw-edma/ |
D | dw-edma-core.c | 654 u32 alloc, off_alloc; in dw_edma_channel_setup() local 672 alloc = wr_alloc; in dw_edma_channel_setup() 678 alloc = rd_alloc; in dw_edma_channel_setup() 683 for (j = 0; (alloc || dw->nr_irqs == 1) && j < cnt; j++, i++) { in dw_edma_channel_setup() 711 pos = off_alloc + (j % alloc); in dw_edma_channel_setup() 773 static inline void dw_edma_dec_irq_alloc(int *nr_irqs, u32 *alloc, u16 cnt) in dw_edma_dec_irq_alloc() argument 775 if (*nr_irqs && *alloc < cnt) { in dw_edma_dec_irq_alloc() 776 (*alloc)++; in dw_edma_dec_irq_alloc() 781 static inline void dw_edma_add_irq_mask(u32 *mask, u32 alloc, u16 cnt) in dw_edma_add_irq_mask() argument 783 while (*mask * alloc < cnt) in dw_edma_add_irq_mask()
|
/drivers/iommu/ |
D | ioasid.c | 65 .alloc = default_alloc, 119 return (a->free == b->free) && (a->alloc == b->alloc); in use_same_ops() 324 id = active_allocator->ops->alloc(min, max, adata); in ioasid_alloc()
|
/drivers/gpu/host1x/ |
D | cdma.c | 74 struct iova *alloc; in host1x_pushbuffer_init() local 99 alloc = alloc_iova(&host1x->iova, size >> shift, in host1x_pushbuffer_init() 101 if (!alloc) { in host1x_pushbuffer_init() 106 pb->dma = iova_dma_addr(&host1x->iova, alloc); in host1x_pushbuffer_init() 127 __free_iova(&host1x->iova, alloc); in host1x_pushbuffer_init()
|
D | job.c | 201 struct iova *alloc; in pin_job() local 234 alloc = alloc_iova(&host->iova, gather_size >> shift, in pin_job() 236 if (!alloc) { in pin_job() 242 iova_dma_addr(&host->iova, alloc), in pin_job() 245 __free_iova(&host->iova, alloc); in pin_job() 251 phys_addr = iova_dma_addr(&host->iova, alloc); in pin_job()
|
/drivers/gpu/drm/ttm/ |
D | ttm_resource.c | 36 if (!man->func || !man->func->alloc) in ttm_resource_alloc() 39 return man->func->alloc(man, bo, place, res); in ttm_resource_alloc()
|