Home
last modified time | relevance | path

Searched refs:slab (Results 1 – 25 of 166) sorted by relevance

1234567

/external/rust/crates/slab/tests/
Dslab.rs1 extern crate slab;
3 use slab::*;
7 let mut slab = Slab::new(); in insert_get_remove_one() localVariable
8 assert!(slab.is_empty()); in insert_get_remove_one()
10 let key = slab.insert(10); in insert_get_remove_one()
12 assert_eq!(slab[key], 10); in insert_get_remove_one()
13 assert_eq!(slab.get(key), Some(&10)); in insert_get_remove_one()
14 assert!(!slab.is_empty()); in insert_get_remove_one()
15 assert!(slab.contains(key)); in insert_get_remove_one()
17 assert_eq!(slab.remove(key), 10); in insert_get_remove_one()
[all …]
/external/mesa3d/src/gallium/drivers/nouveau/
Dnouveau_mm.c53 mm_slab_alloc(struct mm_slab *slab) in mm_slab_alloc() argument
57 if (slab->free == 0) in mm_slab_alloc()
60 for (i = 0; i < (slab->count + 31) / 32; ++i) { in mm_slab_alloc()
61 b = ffs(slab->bits[i]) - 1; in mm_slab_alloc()
64 assert(n < slab->count); in mm_slab_alloc()
65 slab->free--; in mm_slab_alloc()
66 slab->bits[i] &= ~(1 << b); in mm_slab_alloc()
74 mm_slab_free(struct mm_slab *slab, int i) in mm_slab_free() argument
76 assert(i < slab->count); in mm_slab_free()
77 slab->bits[i / 32] |= 1 << (i % 32); in mm_slab_free()
[all …]
/external/rust/crates/slab/
DREADME.md5 [![Crates.io](https://img.shields.io/crates/v/slab.svg?maxAge=2592000)](https://crates.io/crates/sl…
6 …ild Status](https://travis-ci.org/carllerche/slab.svg?branch=master)](https://travis-ci.org/carlle…
8 [Documentation](https://docs.rs/slab/0.4.2/slab/)
12 To use `slab`, first add this to your `Cargo.toml`:
16 slab = "0.4.2"
22 extern crate slab;
24 use slab::Slab;
26 let mut slab = Slab::new();
28 let hello = slab.insert("hello");
29 let world = slab.insert("world");
[all …]
DCargo.toml.orig3 name = "slab"
17 documentation = "https://docs.rs/slab/0.4.2/slab/"
18 homepage = "https://github.com/carllerche/slab"
19 repository = "https://github.com/carllerche/slab"
21 keywords = ["slab", "allocator"]
DCargo.toml14 name = "slab"
18 homepage = "https://github.com/carllerche/slab"
19 documentation = "https://docs.rs/slab/0.4.2/slab/"
21 keywords = ["slab", "allocator"]
24 repository = "https://github.com/carllerche/slab"
DMETADATA1 name: "slab"
6 value: "https://crates.io/crates/slab"
10 value: "https://static.crates.io/crates/slab/slab-0.4.2.crate"
DAndroid.bp24 crate_name: "slab",
35 crate_name: "slab",
57 crate_name: "slab",
58 srcs: ["tests/slab.rs"],
/external/mesa3d/src/gallium/auxiliary/pipebuffer/
Dpb_bufmgr_slab.c62 struct pb_slab *slab; member
193 struct pb_slab *slab = buf->slab; in pb_slab_buffer_destroy() local
194 struct pb_slab_manager *mgr = slab->mgr; in pb_slab_buffer_destroy()
204 list_addtail(list, &slab->freeBuffers); in pb_slab_buffer_destroy()
205 slab->numFree++; in pb_slab_buffer_destroy()
207 if (slab->head.next == &slab->head) in pb_slab_buffer_destroy()
208 list_addtail(&slab->head, &mgr->slabs); in pb_slab_buffer_destroy()
211 if (slab->numFree == slab->numBuffers) { in pb_slab_buffer_destroy()
212 list = &slab->head; in pb_slab_buffer_destroy()
214 pb_unmap(slab->bo); in pb_slab_buffer_destroy()
[all …]
Dpb_slab.c56 struct pb_slab *slab = entry->slab; in pb_slab_reclaim() local
59 list_add(&entry->head, &slab->free); in pb_slab_reclaim()
60 slab->num_free++; in pb_slab_reclaim()
63 if (!slab->head.next) { in pb_slab_reclaim()
65 list_addtail(&slab->head, &group->slabs); in pb_slab_reclaim()
68 if (slab->num_free >= slab->num_entries) { in pb_slab_reclaim()
69 list_del(&slab->head); in pb_slab_reclaim()
70 slabs->slab_free(slabs->priv, slab); in pb_slab_reclaim()
103 struct pb_slab *slab; in pb_slab_alloc() local
123 slab = LIST_ENTRY(struct pb_slab, group->slabs.next, head); in pb_slab_alloc()
[all …]
/external/jemalloc_new/include/jemalloc/internal/
Darena_inlines_b.h23 if (unlikely(!alloc_ctx->slab)) { in arena_prof_tctx_get()
43 if (unlikely(!alloc_ctx->slab)) { in arena_prof_tctx_set()
168 bool slab; in arena_dalloc_no_tcache() local
170 true, &szind, &slab); in arena_dalloc_no_tcache()
177 assert(slab == extent_slab_get(extent)); in arena_dalloc_no_tcache()
180 if (likely(slab)) { in arena_dalloc_no_tcache()
201 bool slab; in arena_dalloc() local
205 slab = alloc_ctx->slab; in arena_dalloc()
210 (uintptr_t)ptr, true, &szind, &slab); in arena_dalloc()
219 assert(slab == extent_slab_get(extent)); in arena_dalloc()
[all …]
Drtree.h281 rtree_leaf_elm_t *elm, bool slab) { in rtree_leaf_elm_slab_write() argument
287 (((uintptr_t)0x1 << LG_VADDR) - 1)) | ((uintptr_t)slab); in rtree_leaf_elm_slab_write()
290 atomic_store_b(&elm->le_slab, slab, ATOMIC_RELEASE); in rtree_leaf_elm_slab_write()
296 extent_t *extent, szind_t szind, bool slab) { in rtree_leaf_elm_write() argument
300 ((uintptr_t)slab); in rtree_leaf_elm_write()
303 rtree_leaf_elm_slab_write(tsdn, rtree, elm, slab); in rtree_leaf_elm_write()
315 rtree_leaf_elm_t *elm, szind_t szind, bool slab) { in rtree_leaf_elm_szind_slab_update() argument
316 assert(!slab || szind < NBINS); in rtree_leaf_elm_szind_slab_update()
322 rtree_leaf_elm_slab_write(tsdn, rtree, elm, slab); in rtree_leaf_elm_szind_slab_update()
397 extent_t *extent, szind_t szind, bool slab) { in rtree_write() argument
[all …]
/external/jemalloc_new/src/
Darena.c56 static void arena_dalloc_bin_slab(tsdn_t *tsdn, arena_t *arena, extent_t *slab,
58 static void arena_bin_lower_slab(tsdn_t *tsdn, arena_t *arena, extent_t *slab,
224 arena_slab_reg_alloc(extent_t *slab, const bin_info_t *bin_info) { in arena_slab_reg_alloc() argument
226 arena_slab_data_t *slab_data = extent_slab_data_get(slab); in arena_slab_reg_alloc()
229 assert(extent_nfree_get(slab) > 0); in arena_slab_reg_alloc()
233 ret = (void *)((uintptr_t)extent_addr_get(slab) + in arena_slab_reg_alloc()
235 extent_nfree_dec(slab); in arena_slab_reg_alloc()
243 arena_slab_regind(extent_t *slab, szind_t binind, const void *ptr) { in arena_slab_regind() argument
247 assert((uintptr_t)ptr >= (uintptr_t)extent_addr_get(slab)); in arena_slab_regind()
248 assert((uintptr_t)ptr < (uintptr_t)extent_past_get(slab)); in arena_slab_regind()
[all …]
Dextent.c111 size_t usize, size_t pad, size_t alignment, bool slab, szind_t szind,
520 size_t alignment, bool slab, szind_t szind, bool *zero, bool *commit) { in extents_alloc() argument
527 new_addr, size, pad, alignment, slab, szind, zero, commit, false); in extents_alloc()
695 rtree_leaf_elm_t *elm_b, extent_t *extent, szind_t szind, bool slab) { in extent_rtree_write_acquired() argument
696 rtree_leaf_elm_write(tsdn, &extents_rtree, elm_a, extent, szind, slab); in extent_rtree_write_acquired()
699 slab); in extent_rtree_write_acquired()
771 bool slab = extent_slab_get(extent); in extent_register_impl() local
772 extent_rtree_write_acquired(tsdn, elm_a, elm_b, extent, szind, slab); in extent_register_impl()
773 if (slab) { in extent_register_impl()
866 void *new_addr, size_t size, size_t pad, size_t alignment, bool slab, in extent_recycle_extract() argument
[all …]
Dandroid_je_iterate.c44 bool slab; in je_malloc_iterate() local
45 rtree_szind_slab_read(tsd_tsdn(tsd), &extents_rtree, rtree_ctx, ptr, true, &szind, &slab); in je_malloc_iterate()
46 if (slab) { in je_malloc_iterate()
/external/rust/crates/tokio/src/util/
Dslab.rs180 let mut slab = Slab { in new() localVariable
188 for page in &mut slab.pages { in new()
203 slab in new()
657 let mut slab = Slab::<Foo>::new(); in insert_remove() localVariable
658 let alloc = slab.allocator(); in insert_remove()
668 assert_eq!(1, slab.get(addr1).unwrap().id.load(SeqCst)); in insert_remove()
669 assert_eq!(2, slab.get(addr2).unwrap().id.load(SeqCst)); in insert_remove()
673 assert_eq!(1, slab.get(addr1).unwrap().id.load(SeqCst)); in insert_remove()
679 assert_eq!(3, slab.get(addr3).unwrap().id.load(SeqCst)); in insert_remove()
684 slab.compact(); in insert_remove()
[all …]
/external/jemalloc_new/test/unit/
Dslab.c8 extent_t slab; in TEST_BEGIN() local
10 extent_init(&slab, NULL, mallocx(bin_info->slab_size, in TEST_BEGIN()
13 assert_ptr_not_null(extent_addr_get(&slab), in TEST_BEGIN()
16 void *reg = (void *)((uintptr_t)extent_addr_get(&slab) + in TEST_BEGIN()
18 assert_zu_eq(arena_slab_regind(&slab, binind, reg), in TEST_BEGIN()
23 free(extent_addr_get(&slab)); in TEST_BEGIN()
/external/mesa3d/src/gallium/winsys/radeon/drm/
Dradeon_drm_bo.c81 for (num_idle = 0; num_idle < bo->u.slab.num_fences; ++num_idle) { in radeon_bo_is_busy()
82 if (radeon_real_bo_is_busy(bo->u.slab.fences[num_idle])) { in radeon_bo_is_busy()
86 radeon_bo_reference(&bo->u.slab.fences[num_idle], NULL); in radeon_bo_is_busy()
88 memmove(&bo->u.slab.fences[0], &bo->u.slab.fences[num_idle], in radeon_bo_is_busy()
89 (bo->u.slab.num_fences - num_idle) * sizeof(bo->u.slab.fences[0])); in radeon_bo_is_busy()
90 bo->u.slab.num_fences -= num_idle; in radeon_bo_is_busy()
111 while (bo->u.slab.num_fences) { in radeon_bo_wait_idle()
113 radeon_bo_reference(&fence, bo->u.slab.fences[0]); in radeon_bo_wait_idle()
120 if (bo->u.slab.num_fences && fence == bo->u.slab.fences[0]) { in radeon_bo_wait_idle()
121 radeon_bo_reference(&bo->u.slab.fences[0], NULL); in radeon_bo_wait_idle()
[all …]
Dradeon_drm_cs.c316 real_idx = radeon_lookup_or_add_real_buffer(cs, bo->u.slab.real); in radeon_lookup_or_add_slab_buffer()
340 item->u.slab.real_idx = real_idx; in radeon_lookup_or_add_slab_buffer()
377 index = cs->csc->slab_buffers[index].u.slab.real_idx; in radeon_drm_cs_add_buffer()
530 for (unsigned src = 0; src < bo->u.slab.num_fences; ++src) { in radeon_bo_slab_fence()
531 if (bo->u.slab.fences[src]->num_cs_references) { in radeon_bo_slab_fence()
532 bo->u.slab.fences[dst] = bo->u.slab.fences[src]; in radeon_bo_slab_fence()
535 radeon_bo_reference(&bo->u.slab.fences[src], NULL); in radeon_bo_slab_fence()
538 bo->u.slab.num_fences = dst; in radeon_bo_slab_fence()
541 if (bo->u.slab.num_fences >= bo->u.slab.max_fences) { in radeon_bo_slab_fence()
542 unsigned new_max_fences = bo->u.slab.max_fences + 1; in radeon_bo_slab_fence()
[all …]
/external/mesa3d/src/gallium/winsys/amdgpu/drm/
Damdgpu_bo.c369 real = bo->u.slab.real; in amdgpu_bo_map()
411 real = bo->bo ? bo : bo->u.slab.real; in amdgpu_bo_unmap()
621 bo = container_of(entry, bo, u.slab.entry); in amdgpu_bo_can_reclaim_slab()
652 RADEON_FLAG_ENCRYPTED), &bo->u.slab.entry); in amdgpu_bo_slab_destroy()
656 0), &bo->u.slab.entry); in amdgpu_bo_slab_destroy()
670 struct amdgpu_slab *slab = CALLOC_STRUCT(amdgpu_slab); in amdgpu_bo_slab_alloc() local
676 if (!slab) in amdgpu_bo_slab_alloc()
704 slab->buffer = amdgpu_winsys_bo(amdgpu_bo_create(ws, in amdgpu_bo_slab_alloc()
707 if (!slab->buffer) in amdgpu_bo_slab_alloc()
710 slab->base.num_entries = slab->buffer->base.size / entry_size; in amdgpu_bo_slab_alloc()
[all …]
Damdgpu_bo.h75 } slab; member
147 void amdgpu_bo_slab_free(void *priv, struct pb_slab *slab);
156 struct amdgpu_slab *amdgpu_slab(struct pb_slab *slab) in amdgpu_slab() argument
158 return (struct amdgpu_slab *)slab; in amdgpu_slab()
/external/rust/crates/tokio/src/io/driver/
Dmod.rs18 use crate::util::slab::{self, Slab};
72 pub(super) io_dispatch: slab::Allocator<ScheduledIo>,
119 let slab = Slab::new(); in new() localVariable
120 let allocator = slab.allocator(); in new()
126 resources: Some(slab), in new()
183 let addr = slab::Address::from_usize(ADDRESS.unpack(token.0)); in dispatch()
213 if let Some(mut slab) = resources { in drop()
214 slab.for_each(|io| { in drop()
324 ) -> io::Result<slab::Ref<ScheduledIo>> { in add_source()
/external/skqp/src/compute/hs/vk/bench/
Dmain.c79 uint32_t * const slab = ALLOCA_MACRO(slab_size); in hs_transpose_slabs_u32() local
84 memcpy(slab,vout_h,slab_size); in hs_transpose_slabs_u32()
88 vout_h[col * hs_height + row] = slab[row * hs_width + col]; in hs_transpose_slabs_u32()
104 uint64_t * const slab = ALLOCA_MACRO(slab_size); in hs_transpose_slabs_u64() local
109 memcpy(slab,vout_h,slab_size); in hs_transpose_slabs_u64()
113 vout_h[col * hs_height + row] = slab[row * hs_width + col]; in hs_transpose_slabs_u64()
533 uint32_t const slab_size = hs_target->config.slab.height << hs_target->config.slab.width_log2; in main()
1078 1u<<hs_target->config.slab.width_log2, in main()
1079 hs_target->config.slab.height, in main()
1092 hs_debug_u32(1u<<hs_target->config.slab.width_log2, in main()
[all …]
/external/google-fonts/zilla-slab/
DMETADATA4 name: "zilla-slab"
7 …"A contemporary slab serif, based on Typotheque's Tesla, it is constructed with smooth curves and …
12 value: "https://github.com/mozilla/zilla-slab"
/external/skqp/src/compute/hs/cl/bench/
Dmain.c119 uint32_t * const slab = ALLOCA_MACRO(slab_size); in hs_transpose_slabs_u32() local
124 memcpy(slab,vout_h,slab_size); in hs_transpose_slabs_u32()
128 vout_h[col * hs_height + row] = slab[row * hs_width + col]; in hs_transpose_slabs_u32()
144 uint64_t * const slab = ALLOCA_MACRO(slab_size); in hs_transpose_slabs_u64() local
149 memcpy(slab,vout_h,slab_size); in hs_transpose_slabs_u64()
153 vout_h[col * hs_height + row] = slab[row * hs_width + col]; in hs_transpose_slabs_u64()
693 uint32_t const kpb = hs_target->config.slab.height << hs_target->config.slab.width_log2; in main()
750 1 << hs_target->config.slab.width_log2, in main()
751 hs_target->config.slab.height, in main()
/external/mesa3d/src/amd/vulkan/
Dradv_shader.c788 list_for_each_entry(struct radv_shader_slab, slab, &device->shader_slabs, slabs) { in radv_alloc_shader_memory()
790 list_for_each_entry(struct radv_shader_variant, s, &slab->shaders, slab_list) { in radv_alloc_shader_memory()
792 shader->bo = slab->bo; in radv_alloc_shader_memory()
796 return slab->ptr + offset; in radv_alloc_shader_memory()
800 if (offset <= slab->size && slab->size - offset >= shader->code_size) { in radv_alloc_shader_memory()
801 shader->bo = slab->bo; in radv_alloc_shader_memory()
803 list_addtail(&shader->slab_list, &slab->shaders); in radv_alloc_shader_memory()
805 return slab->ptr + offset; in radv_alloc_shader_memory()
810 struct radv_shader_slab *slab = calloc(1, sizeof(struct radv_shader_slab)); in radv_alloc_shader_memory() local
812 slab->size = MAX2(256 * 1024, shader->code_size); in radv_alloc_shader_memory()
[all …]

1234567