Home
last modified time | relevance | path

Searched refs:alloc (Results 1 – 25 of 156) sorted by relevance

1234567

/drivers/android/
Dbinder_alloc.c66 static size_t binder_alloc_buffer_size(struct binder_alloc *alloc, in binder_alloc_buffer_size() argument
69 if (list_is_last(&buffer->entry, &alloc->buffers)) in binder_alloc_buffer_size()
70 return alloc->buffer + alloc->buffer_size - buffer->user_data; in binder_alloc_buffer_size()
74 static void binder_insert_free_buffer(struct binder_alloc *alloc, in binder_insert_free_buffer() argument
77 struct rb_node **p = &alloc->free_buffers.rb_node; in binder_insert_free_buffer()
85 new_buffer_size = binder_alloc_buffer_size(alloc, new_buffer); in binder_insert_free_buffer()
89 alloc->pid, new_buffer_size, new_buffer); in binder_insert_free_buffer()
96 buffer_size = binder_alloc_buffer_size(alloc, buffer); in binder_insert_free_buffer()
104 rb_insert_color(&new_buffer->rb_node, &alloc->free_buffers); in binder_insert_free_buffer()
108 struct binder_alloc *alloc, struct binder_buffer *new_buffer) in binder_insert_allocated_buffer_locked() argument
[all …]
Dbinder_alloc_selftest.c101 static bool check_buffer_pages_allocated(struct binder_alloc *alloc, in check_buffer_pages_allocated() argument
112 page_index = (page_addr - alloc->buffer) / PAGE_SIZE; in check_buffer_pages_allocated()
113 if (!alloc->pages[page_index].page_ptr || in check_buffer_pages_allocated()
114 !list_empty(&alloc->pages[page_index].lru)) { in check_buffer_pages_allocated()
116 alloc->pages[page_index].page_ptr ? in check_buffer_pages_allocated()
124 static void binder_selftest_alloc_buf(struct binder_alloc *alloc, in binder_selftest_alloc_buf() argument
131 buffers[i] = binder_alloc_new_buf(alloc, sizes[i], 0, 0, 0); in binder_selftest_alloc_buf()
133 !check_buffer_pages_allocated(alloc, buffers[i], in binder_selftest_alloc_buf()
141 static void binder_selftest_free_buf(struct binder_alloc *alloc, in binder_selftest_free_buf() argument
148 binder_alloc_free_buf(alloc, buffers[seq[i]]); in binder_selftest_free_buf()
[all …]
Dbinder_alloc.h74 struct binder_alloc *alloc; member
118 void binder_selftest_alloc(struct binder_alloc *alloc);
120 static inline void binder_selftest_alloc(struct binder_alloc *alloc) {} in binder_selftest_alloc() argument
125 extern struct binder_buffer *binder_alloc_new_buf(struct binder_alloc *alloc,
130 extern void binder_alloc_init(struct binder_alloc *alloc);
132 extern void binder_alloc_vma_close(struct binder_alloc *alloc);
134 binder_alloc_prepare_to_free(struct binder_alloc *alloc,
136 extern void binder_alloc_free_buf(struct binder_alloc *alloc,
138 extern int binder_alloc_mmap_handler(struct binder_alloc *alloc,
140 extern void binder_alloc_deferred_release(struct binder_alloc *alloc);
[all …]
Dbinder_trace.h298 TP_PROTO(struct binder_alloc *alloc, bool allocate,
300 TP_ARGS(alloc, allocate, start, end),
308 __entry->proc = alloc->pid;
310 __entry->offset = start - alloc->buffer;
319 TP_PROTO(const struct binder_alloc *alloc, size_t page_index),
320 TP_ARGS(alloc, page_index),
326 __entry->proc = alloc->pid;
334 TP_PROTO(const struct binder_alloc *alloc, size_t page_index),
335 TP_ARGS(alloc, page_index));
338 TP_PROTO(const struct binder_alloc *alloc, size_t page_index),
[all …]
Dbinder.c567 struct binder_alloc alloc; member
2255 binder_alloc_copy_from_buffer(&proc->alloc, object, buffer, in binder_get_object()
2325 binder_alloc_copy_from_buffer(&proc->alloc, &object_offset, in binder_validate_ptr()
2407 binder_alloc_copy_from_buffer(&proc->alloc, &last_obj_offset, in binder_validate_fixup()
2441 binder_alloc_copy_from_buffer(&proc->alloc, &object_offset, in binder_transaction_buffer_release()
2557 binder_alloc_copy_from_buffer(&proc->alloc, in binder_transaction_buffer_release()
2797 binder_alloc_copy_from_buffer(&target_proc->alloc, in binder_translate_fd_array()
2803 binder_alloc_copy_to_buffer(&target_proc->alloc, in binder_translate_fd_array()
2818 binder_alloc_copy_from_buffer(&target_proc->alloc, in binder_translate_fd_array()
2872 binder_alloc_copy_to_buffer(&target_proc->alloc, b, buffer_offset, in binder_fixup_parent()
[all …]
/drivers/infiniband/hw/cxgb4/
Did_table.c44 u32 c4iw_id_alloc(struct c4iw_id_table *alloc) in c4iw_id_alloc() argument
49 spin_lock_irqsave(&alloc->lock, flags); in c4iw_id_alloc()
51 obj = find_next_zero_bit(alloc->table, alloc->max, alloc->last); in c4iw_id_alloc()
52 if (obj >= alloc->max) in c4iw_id_alloc()
53 obj = find_first_zero_bit(alloc->table, alloc->max); in c4iw_id_alloc()
55 if (obj < alloc->max) { in c4iw_id_alloc()
56 if (alloc->flags & C4IW_ID_TABLE_F_RANDOM) in c4iw_id_alloc()
57 alloc->last += prandom_u32() % RANDOM_SKIP; in c4iw_id_alloc()
59 alloc->last = obj + 1; in c4iw_id_alloc()
60 if (alloc->last >= alloc->max) in c4iw_id_alloc()
[all …]
/drivers/infiniband/hw/mthca/
Dmthca_allocator.c40 u32 mthca_alloc(struct mthca_alloc *alloc) in mthca_alloc() argument
45 spin_lock_irqsave(&alloc->lock, flags); in mthca_alloc()
47 obj = find_next_zero_bit(alloc->table, alloc->max, alloc->last); in mthca_alloc()
48 if (obj >= alloc->max) { in mthca_alloc()
49 alloc->top = (alloc->top + alloc->max) & alloc->mask; in mthca_alloc()
50 obj = find_first_zero_bit(alloc->table, alloc->max); in mthca_alloc()
53 if (obj < alloc->max) { in mthca_alloc()
54 set_bit(obj, alloc->table); in mthca_alloc()
55 obj |= alloc->top; in mthca_alloc()
59 spin_unlock_irqrestore(&alloc->lock, flags); in mthca_alloc()
[all …]
Dmthca_uar.c40 uar->index = mthca_alloc(&dev->uar_table.alloc); in mthca_uar_alloc()
51 mthca_free(&dev->uar_table.alloc, uar->index); in mthca_uar_free()
58 ret = mthca_alloc_init(&dev->uar_table.alloc, in mthca_init_uar_table()
67 mthca_alloc_cleanup(&dev->uar_table.alloc); in mthca_init_uar_table()
77 mthca_alloc_cleanup(&dev->uar_table.alloc); in mthca_cleanup_uar_table()
Dmthca_pd.c46 pd->pd_num = mthca_alloc(&dev->pd_table.alloc); in mthca_pd_alloc()
56 mthca_free(&dev->pd_table.alloc, pd->pd_num); in mthca_pd_alloc()
66 mthca_free(&dev->pd_table.alloc, pd->pd_num); in mthca_pd_free()
71 return mthca_alloc_init(&dev->pd_table.alloc, in mthca_init_pd_table()
80 mthca_alloc_cleanup(&dev->pd_table.alloc); in mthca_cleanup_pd_table()
Dmthca_dev.h195 struct mthca_alloc alloc; member
201 struct mthca_alloc alloc; member
227 struct mthca_alloc alloc; member
240 struct mthca_alloc alloc; member
247 struct mthca_alloc alloc; member
254 struct mthca_alloc alloc; member
270 struct mthca_alloc alloc; member
275 struct mthca_alloc alloc; member
414 u32 mthca_alloc(struct mthca_alloc *alloc);
415 void mthca_free(struct mthca_alloc *alloc, u32 obj);
[all …]
Dmthca_mcg.c148 index = mthca_alloc(&dev->mcg_table.alloc); in mthca_multicast_attach()
206 mthca_free(&dev->mcg_table.alloc, index); in mthca_multicast_attach()
286 mthca_free(&dev->mcg_table.alloc, amgm_index_to_free); in mthca_multicast_detach()
305 mthca_free(&dev->mcg_table.alloc, index); in mthca_multicast_detach()
320 err = mthca_alloc_init(&dev->mcg_table.alloc, in mthca_init_mcg_table()
334 mthca_alloc_cleanup(&dev->mcg_table.alloc); in mthca_cleanup_mcg_table()
Dmthca_av.c172 index = mthca_alloc(&dev->av_table.alloc); in mthca_create_ah()
247 mthca_free(&dev->av_table.alloc, in mthca_destroy_ah()
336 err = mthca_alloc_init(&dev->av_table.alloc, in mthca_init_av_table()
366 mthca_alloc_cleanup(&dev->av_table.alloc); in mthca_init_av_table()
378 mthca_alloc_cleanup(&dev->av_table.alloc); in mthca_cleanup_av_table()
Dmthca_srq.c229 srq->srqn = mthca_alloc(&dev->srq_table.alloc); in mthca_alloc_srq()
314 mthca_free(&dev->srq_table.alloc, srq->srqn); in mthca_alloc_srq()
360 mthca_free(&dev->srq_table.alloc, srq->srqn); in mthca_free_srq()
674 err = mthca_alloc_init(&dev->srq_table.alloc, in mthca_init_srq_table()
684 mthca_alloc_cleanup(&dev->srq_table.alloc); in mthca_init_srq_table()
695 mthca_alloc_cleanup(&dev->srq_table.alloc); in mthca_cleanup_srq_table()
/drivers/char/agp/
Dcompat_ioctl.c151 struct agp_allocate32 alloc; in compat_agpioc_allocate_wrap() local
154 if (copy_from_user(&alloc, arg, sizeof(alloc))) in compat_agpioc_allocate_wrap()
157 memory = agp_allocate_memory_wrap(alloc.pg_count, alloc.type); in compat_agpioc_allocate_wrap()
162 alloc.key = memory->key; in compat_agpioc_allocate_wrap()
163 alloc.physical = memory->physical; in compat_agpioc_allocate_wrap()
165 if (copy_to_user(arg, &alloc, sizeof(alloc))) { in compat_agpioc_allocate_wrap()
Dfrontend.c877 struct agp_allocate alloc; in agpioc_allocate_wrap() local
880 if (copy_from_user(&alloc, arg, sizeof(struct agp_allocate))) in agpioc_allocate_wrap()
883 if (alloc.type >= AGP_USER_TYPES) in agpioc_allocate_wrap()
886 memory = agp_allocate_memory_wrap(alloc.pg_count, alloc.type); in agpioc_allocate_wrap()
891 alloc.key = memory->key; in agpioc_allocate_wrap()
892 alloc.physical = memory->physical; in agpioc_allocate_wrap()
894 if (copy_to_user(arg, &alloc, sizeof(struct agp_allocate))) { in agpioc_allocate_wrap()
/drivers/xen/xenbus/
Dxenbus_comms.c219 void *alloc; in process_msg() member
272 state.alloc = kmalloc(len, GFP_NOIO | __GFP_HIGH); in process_msg()
273 if (!state.alloc) in process_msg()
279 state.body = state.alloc; in process_msg()
327 state.alloc = NULL; in process_msg()
333 kfree(state.alloc); in process_msg()
334 state.alloc = NULL; in process_msg()
/drivers/md/bcache/
Dalloc.c547 struct bkey *alloc) in pick_data_bucket() argument
563 if (!ret->sectors_free && KEY_PTRS(alloc)) { in pick_data_bucket()
565 bkey_copy(&ret->key, alloc); in pick_data_bucket()
566 bkey_init(alloc); in pick_data_bucket()
589 BKEY_PADDED(key) alloc; in bch_alloc_sectors()
599 bkey_init(&alloc.key); in bch_alloc_sectors()
602 while (!(b = pick_data_bucket(c, k, write_point, &alloc.key))) { in bch_alloc_sectors()
609 if (bch_bucket_alloc_set(c, watermark, &alloc.key, 1, wait)) in bch_alloc_sectors()
620 if (KEY_PTRS(&alloc.key)) in bch_alloc_sectors()
621 bkey_put(c, &alloc.key); in bch_alloc_sectors()
/drivers/uwb/
Ddrp-ie.c299 const struct uwb_drp_alloc *alloc; in uwb_drp_ie_to_bm() local
308 alloc = &drp_ie->allocs[cnt]; in uwb_drp_ie_to_bm()
309 zone_bm = le16_to_cpu(alloc->zone_bm); in uwb_drp_ie_to_bm()
310 mas_bm = le16_to_cpu(alloc->mas_bm); in uwb_drp_ie_to_bm()
/drivers/net/ethernet/
Dec_bhf.c126 u8 *alloc; member
350 buf->alloc = dma_alloc_coherent(dev, buf->alloc_len, &buf->alloc_phys, in ec_bhf_alloc_dma_mem()
352 if (buf->alloc == NULL) { in ec_bhf_alloc_dma_mem()
358 buf->buf = buf->alloc + (buf->buf_phys - buf->alloc_phys); in ec_bhf_alloc_dma_mem()
435 dma_free_coherent(dev, priv->rx_buf.alloc_len, priv->rx_buf.alloc, in ec_bhf_open()
453 priv->tx_buf.alloc, priv->tx_buf.alloc_phys); in ec_bhf_stop()
455 priv->rx_buf.alloc, priv->rx_buf.alloc_phys); in ec_bhf_stop()
/drivers/gpu/host1x/
Dcdma.c75 struct iova *alloc; in host1x_pushbuffer_init() local
100 alloc = alloc_iova(&host1x->iova, size >> shift, in host1x_pushbuffer_init()
102 if (!alloc) { in host1x_pushbuffer_init()
107 pb->dma = iova_dma_addr(&host1x->iova, alloc); in host1x_pushbuffer_init()
128 __free_iova(&host1x->iova, alloc); in host1x_pushbuffer_init()
Djob.c214 struct iova *alloc; in pin_job() local
231 alloc = alloc_iova(&host->iova, gather_size >> shift, in pin_job()
233 if (!alloc) { in pin_job()
239 iova_dma_addr(&host->iova, alloc), in pin_job()
242 __free_iova(&host->iova, alloc); in pin_job()
248 iova_dma_addr(&host->iova, alloc); in pin_job()
/drivers/base/
Dcomponent.c32 size_t alloc; member
223 if (match->alloc == num) in component_match_realloc()
236 match->alloc = num; in component_match_realloc()
269 if (match->num == match->alloc) { in component_match_add_release()
270 size_t new_size = match->alloc + 16; in component_match_add_release()
/drivers/scsi/arm/
Dqueue.c73 queue->alloc = q = kmalloc(sizeof(QE_t) * nqueues, GFP_KERNEL); in queue_initialise()
82 return queue->alloc != NULL; in queue_initialise()
94 kfree(queue->alloc); in queue_free()
/drivers/misc/mic/scif/
Dscif_rma.c199 struct scif_allocmsg *alloc = &window->alloc_handle; in scif_destroy_incomplete_window() local
204 err = wait_event_timeout(alloc->allocwq, in scif_destroy_incomplete_window()
205 alloc->state != OP_IN_PROGRESS, in scif_destroy_incomplete_window()
211 if (alloc->state == OP_COMPLETED) { in scif_destroy_incomplete_window()
763 struct scif_allocmsg *alloc = &window->alloc_handle; in scif_send_alloc_request() local
766 alloc->state = OP_IN_PROGRESS; in scif_send_alloc_request()
767 init_waitqueue_head(&alloc->allocwq); in scif_send_alloc_request()
789 struct scif_allocmsg *alloc = &window->alloc_handle; in scif_prep_remote_window() local
804 err = wait_event_timeout(alloc->allocwq, in scif_prep_remote_window()
805 alloc->state != OP_IN_PROGRESS, in scif_prep_remote_window()
[all …]
/drivers/media/rc/
Drc-main.c184 rc_map->alloc = roundup_pow_of_two(size * sizeof(struct rc_map_table)); in ir_create_table()
185 rc_map->size = rc_map->alloc / sizeof(struct rc_map_table); in ir_create_table()
186 rc_map->scan = kmalloc(rc_map->alloc, GFP_KERNEL); in ir_create_table()
194 rc_map->size, rc_map->alloc); in ir_create_table()
225 unsigned int oldalloc = rc_map->alloc; in ir_resize_table()
232 if (rc_map->alloc >= IR_TAB_MAX_SIZE) in ir_resize_table()
256 rc_map->alloc = newalloc; in ir_resize_table()
257 rc_map->size = rc_map->alloc / sizeof(struct rc_map_table); in ir_resize_table()
443 rc_map->size, rc_map->alloc); in ir_setkeytable()

1234567