Home
last modified time | relevance | path

Searched refs:offset (Results 1 – 25 of 32) sorted by relevance

12

/lib/
Dradix-tree.c97 unsigned int offset = (index >> parent->shift) & RADIX_TREE_MAP_MASK; in radix_tree_descend() local
98 void __rcu **entry = rcu_dereference_raw(parent->slots[offset]); in radix_tree_descend()
101 return offset; in radix_tree_descend()
110 int offset) in tag_set() argument
112 __set_bit(offset, node->tags[tag]); in tag_set()
116 int offset) in tag_clear() argument
118 __clear_bit(offset, node->tags[tag]); in tag_clear()
122 int offset) in tag_get() argument
124 return test_bit(offset, node->tags[tag]); in tag_get()
190 unsigned long offset) in radix_tree_find_next_bit() argument
[all …]
Dgeneric-radix-tree.c54 void *__genradix_ptr(struct __genradix *radix, size_t offset) in __genradix_ptr() argument
60 if (ilog2(offset) >= genradix_depth_shift(level)) in __genradix_ptr()
71 n = n->children[offset >> genradix_depth_shift(level)]; in __genradix_ptr()
72 offset &= genradix_depth_size(level) - 1; in __genradix_ptr()
75 return &n->data[offset]; in __genradix_ptr()
104 void *__genradix_ptr_alloc(struct __genradix *radix, size_t offset, in __genradix_ptr_alloc() argument
118 if (n && ilog2(offset) < genradix_depth_shift(level)) in __genradix_ptr_alloc()
139 &n->children[offset >> genradix_depth_shift(level)]; in __genradix_ptr_alloc()
140 offset &= genradix_depth_size(level) - 1; in __genradix_ptr_alloc()
158 return &n->data[offset]; in __genradix_ptr_alloc()
[all …]
Dpci_iomap.c30 unsigned long offset, in pci_iomap_range() argument
37 if (len <= offset || !start) in pci_iomap_range()
39 len -= offset; in pci_iomap_range()
40 start += offset; in pci_iomap_range()
70 unsigned long offset, in pci_iomap_wc_range() argument
81 if (len <= offset || !start) in pci_iomap_wc_range()
84 len -= offset; in pci_iomap_wc_range()
85 start += offset; in pci_iomap_wc_range()
Dxarray.c84 unsigned int offset, xa_mark_t mark) in node_get_mark() argument
86 return test_bit(offset, node_marks(node, mark)); in node_get_mark()
90 static inline bool node_set_mark(struct xa_node *node, unsigned int offset, in node_set_mark() argument
93 return __test_and_set_bit(offset, node_marks(node, mark)); in node_set_mark()
97 static inline bool node_clear_mark(struct xa_node *node, unsigned int offset, in node_clear_mark() argument
100 return __test_and_clear_bit(offset, node_marks(node, mark)); in node_clear_mark()
153 static void xas_move_index(struct xa_state *xas, unsigned long offset) in xas_move_index() argument
157 xas->xa_index += offset << shift; in xas_move_index()
203 unsigned int offset = get_offset(xas->xa_index, node); in xas_descend() local
204 void *entry = xa_entry(xas->xa, node, offset); in xas_descend()
[all …]
Dfind_bit.c68 unsigned long offset) in find_next_bit() argument
70 return _find_next_bit(addr, NULL, size, offset, 0UL); in find_next_bit()
77 unsigned long offset) in find_next_zero_bit() argument
79 return _find_next_bit(addr, NULL, size, offset, ~0UL); in find_next_zero_bit()
87 unsigned long offset) in find_next_and_bit() argument
89 return _find_next_bit(addr1, addr2, size, offset, 0UL); in find_next_and_bit()
200 long size, unsigned long offset) in find_next_zero_bit_le() argument
202 return _find_next_bit_le(addr, NULL, size, offset, ~0UL); in find_next_zero_bit_le()
209 long size, unsigned long offset) in find_next_bit_le() argument
211 return _find_next_bit_le(addr, NULL, size, offset, 0UL); in find_next_bit_le()
Dscatterlist.c390 unsigned int n_pages, unsigned int offset, in __sg_alloc_table_from_pages() argument
432 chunk_size = ((j - cur_page) << PAGE_SHIFT) - offset; in __sg_alloc_table_from_pages()
434 min_t(unsigned long, size, chunk_size), offset); in __sg_alloc_table_from_pages()
436 offset = 0; in __sg_alloc_table_from_pages()
465 unsigned int n_pages, unsigned int offset, in sg_alloc_table_from_pages() argument
468 return __sg_alloc_table_from_pages(sgt, pages, n_pages, offset, size, in sg_alloc_table_from_pages()
614 return PAGE_ALIGN(sg->offset + sg->length) >> PAGE_SHIFT; in sg_page_count()
638 return PAGE_ALIGN(sg->offset + sg_dma_len(sg)) >> PAGE_SHIFT; in sg_dma_page_count()
695 miter->__offset = miter->piter.sg_pgoffset ? 0 : sg->offset; in sg_miter_get_next_page()
698 miter->__remaining = sg->offset + sg->length - in sg_miter_get_next_page()
[all …]
Dpacking.c11 static int get_le_offset(int offset) in get_le_offset() argument
15 closest_multiple_of_4 = (offset / 4) * 4; in get_le_offset()
16 offset -= closest_multiple_of_4; in get_le_offset()
17 return closest_multiple_of_4 + (3 - offset); in get_le_offset()
20 static int get_reverse_lsw32_offset(int offset, size_t len) in get_reverse_lsw32_offset() argument
25 word_index = offset / 4; in get_reverse_lsw32_offset()
27 offset -= closest_multiple_of_4; in get_reverse_lsw32_offset()
29 return word_index * 4 + offset; in get_reverse_lsw32_offset()
Ddevres.c25 static void __iomem *__devm_ioremap(struct device *dev, resource_size_t offset, in __devm_ioremap() argument
37 addr = ioremap(offset, size); in __devm_ioremap()
40 addr = ioremap_nocache(offset, size); in __devm_ioremap()
43 addr = ioremap_wc(offset, size); in __devm_ioremap()
64 void __iomem *devm_ioremap(struct device *dev, resource_size_t offset, in devm_ioremap() argument
67 return __devm_ioremap(dev, offset, size, DEVM_IOREMAP); in devm_ioremap()
80 void __iomem *devm_ioremap_nocache(struct device *dev, resource_size_t offset, in devm_ioremap_nocache() argument
83 return __devm_ioremap(dev, offset, size, DEVM_IOREMAP_NC); in devm_ioremap_nocache()
95 void __iomem *devm_ioremap_wc(struct device *dev, resource_size_t offset, in devm_ioremap_wc() argument
98 return __devm_ioremap(dev, offset, size, DEVM_IOREMAP_WC); in devm_ioremap_wc()
Diov_iter.c156 static size_t copy_page_to_iter_iovec(struct page *page, size_t offset, size_t bytes, in copy_page_to_iter_iovec() argument
179 from = kaddr + offset; in copy_page_to_iter_iovec()
202 offset = from - kaddr; in copy_page_to_iter_iovec()
210 from = kaddr + offset; in copy_page_to_iter_iovec()
240 static size_t copy_page_from_iter_iovec(struct page *page, size_t offset, size_t bytes, in copy_page_from_iter_iovec() argument
263 to = kaddr + offset; in copy_page_from_iter_iovec()
286 offset = to - kaddr; in copy_page_from_iter_iovec()
294 to = kaddr + offset; in copy_page_from_iter_iovec()
338 if (unlikely(p->offset + p->len != i->iov_offset)) in sanity()
353 pipe->bufs[idx].offset, in sanity()
[all …]
Dstackdepot.c54 u32 offset : STACK_ALLOC_OFFSET_BITS; member
132 stack->handle.offset = depot_offset >> STACK_ALLOC_ALIGN; in depot_alloc_stack()
202 size_t offset = parts.offset << STACK_ALLOC_ALIGN; in stack_depot_fetch() local
203 struct stack_record *stack = slab + offset; in stack_depot_fetch()
Dsbitmap.c291 static inline void emit_byte(struct seq_file *m, unsigned int offset, u8 byte) in emit_byte() argument
293 if ((offset & 0xf) == 0) { in emit_byte()
294 if (offset != 0) in emit_byte()
296 seq_printf(m, "%08x:", offset); in emit_byte()
298 if ((offset & 0x1) == 0) in emit_byte()
307 unsigned int offset = 0; in sbitmap_bitmap_show() local
320 emit_byte(m, offset, byte); in sbitmap_bitmap_show()
323 offset++; in sbitmap_bitmap_show()
330 emit_byte(m, offset, byte); in sbitmap_bitmap_show()
331 offset++; in sbitmap_bitmap_show()
[all …]
Dts_kmp.c45 unsigned int i, q = 0, text_len, consumed = state->offset; in kmp_find()
63 state->offset = consumed + i + 1; in kmp_find()
64 return state->offset - kmp->pattern_len; in kmp_find()
Derror-inject.c63 unsigned long entry, offset = 0, size = 0; in populate_error_injection_list() local
70 !kallsyms_lookup_size_offset(entry, &size, &offset)) { in populate_error_injection_list()
Dsg_split.c89 out_sg->offset += split->skip_sg0; in sg_split_phys()
92 out_sg->offset = 0; in sg_split_phys()
Ddecompress_unlzma.c394 int offset; in process_bit1() local
460 offset = 0; in process_bit1()
470 offset = 1 << LZMA_LEN_NUM_LOW_BITS; in process_bit1()
475 offset = ((1 << LZMA_LEN_NUM_LOW_BITS) in process_bit1()
482 len += offset; in process_bit1()
Dtest_kasan.c348 unsigned long offset; in kfree_via_page() local
358 offset = offset_in_page(ptr); in kfree_via_page()
359 kfree(page_address(page) + offset); in kfree_via_page()
/lib/lzo/
Dlzo1x_decompress_safe.c79 size_t offset; in lzo1x_decompress_safe() local
86 offset = ip - ip_last; in lzo1x_decompress_safe()
87 if (unlikely(offset > MAX_255_COUNT)) in lzo1x_decompress_safe()
90 offset = (offset << 8) - offset; in lzo1x_decompress_safe()
91 t += offset + 15 + *ip++; in lzo1x_decompress_safe()
147 size_t offset; in lzo1x_decompress_safe() local
154 offset = ip - ip_last; in lzo1x_decompress_safe()
155 if (unlikely(offset > MAX_255_COUNT)) in lzo1x_decompress_safe()
158 offset = (offset << 8) - offset; in lzo1x_decompress_safe()
159 t += offset + 31 + *ip++; in lzo1x_decompress_safe()
[all …]
/lib/842/
D842_decompress.c165 u64 index, offset, total = round_down(p->out - p->ostart, 8); in __do_index() local
172 offset = index * size; in __do_index()
184 if (offset >= pos) in __do_index()
187 offset += section; in __do_index()
190 if (offset + size > total) { in __do_index()
192 (unsigned long)offset, (unsigned long)total); in __do_index()
201 (unsigned long)(index * size), (unsigned long)offset, in __do_index()
203 (unsigned long)beN_to_cpu(&p->ostart[offset], size)); in __do_index()
205 memcpy(p->out, &p->ostart[offset], size); in __do_index()
/lib/zstd/
Dzstd_opt.h142 …tPrice(seqStore_t *seqStorePtr, U32 litLength, const BYTE *literals, U32 offset, U32 matchLength, … in ZSTD_getPrice() argument
146 BYTE const offCode = (BYTE)ZSTD_highbit32(offset + 1); in ZSTD_getPrice()
165 …atePrice(seqStore_t *seqStorePtr, U32 litLength, const BYTE *literals, U32 offset, U32 matchLength) in ZSTD_updatePrice() argument
184 BYTE const offCode = (BYTE)ZSTD_highbit32(offset + 1); in ZSTD_updatePrice()
426 U32 offset, rep[ZSTD_REP_NUM]; in ZSTD_compressBlock_opt_generic() local
636 offset = opt[cur].off; in ZSTD_compressBlock_opt_generic()
640 best_off = offset; in ZSTD_compressBlock_opt_generic()
657 offset = opt[cur].off; in ZSTD_compressBlock_opt_generic()
661 if (offset > ZSTD_REP_MOVE_OPT) { in ZSTD_compressBlock_opt_generic()
664 rep[0] = offset - ZSTD_REP_MOVE_OPT; in ZSTD_compressBlock_opt_generic()
[all …]
Ddecompress.c864 size_t offset; member
888 const BYTE *match = oLitEnd - sequence.offset; in ZSTD_execSequenceLast7()
908 if (sequence.offset > (size_t)(oLitEnd - base)) { in ZSTD_execSequenceLast7()
910 if (sequence.offset > (size_t)(oLitEnd - vBase)) in ZSTD_execSequenceLast7()
957 size_t offset; in ZSTD_decodeSequence() local
959 offset = 0; in ZSTD_decodeSequence()
961offset = OF_base[ofCode] + BIT_readBitsFast(&seqState->DStream, ofBits); /* <= (ZSTD_WINDOWLOG_MA… in ZSTD_decodeSequence()
967 offset += (llCode == 0); in ZSTD_decodeSequence()
968 if (offset) { in ZSTD_decodeSequence()
969 size_t temp = (offset == 3) ? seqState->prevOffset[0] - 1 : seqState->prevOffset[offset]; in ZSTD_decodeSequence()
[all …]
Dcompress.c577 ofCodeTable[u] = (BYTE)ZSTD_highbit32(sequences[u].offset); in ZSTD_seqToCodes()
772 BIT_addBits(&blockStream, sequences[nbSeq - 1].offset, extraBits); in ZSTD_compressSequences_internal()
775 BIT_addBits(&blockStream, sequences[nbSeq - 1].offset >> extraBits, ofBits - extraBits); in ZSTD_compressSequences_internal()
777 BIT_addBits(&blockStream, sequences[nbSeq - 1].offset, ofCodeTable[nbSeq - 1]); in ZSTD_compressSequences_internal()
807 BIT_addBits(&blockStream, sequences[n].offset, extraBits); in ZSTD_compressSequences_internal()
810 BIT_addBits(&blockStream, sequences[n].offset >> extraBits, ofBits - extraBits); /* 31 */ in ZSTD_compressSequences_internal()
812 BIT_addBits(&blockStream, sequences[n].offset, ofBits); /* 31 */ in ZSTD_compressSequences_internal()
875 seqStorePtr->sequences[0].offset = offsetCode + 1; in ZSTD_storeSeq()
1048 U32 offset; in ZSTD_compressBlock_fast_generic() local
1054 offset = (U32)(ip - match); in ZSTD_compressBlock_fast_generic()
[all …]
/lib/lz4/
Dlz4_decompress.c124 size_t offset; in LZ4_decompress_generic() local
162 offset = LZ4_readLE16(ip); in LZ4_decompress_generic()
164 match = op - offset; in LZ4_decompress_generic()
169 (offset >= 8) && in LZ4_decompress_generic()
278 offset = LZ4_readLE16(ip); in LZ4_decompress_generic()
280 match = op - offset; in LZ4_decompress_generic()
300 LZ4_write32(op, (U32)offset); in LZ4_decompress_generic()
394 if (unlikely(offset < 8)) { in LZ4_decompress_generic()
399 match += inc32table[offset]; in LZ4_decompress_generic()
401 match -= dec64table[offset]; in LZ4_decompress_generic()
/lib/zlib_inflate/
Dinflate.c634 state->offset = (unsigned)this.val; in zlib_inflate()
641 state->offset += BITS(state->extra); in zlib_inflate()
645 if (state->offset > state->dmax) { in zlib_inflate()
651 if (state->offset > state->whave + out - left) { in zlib_inflate()
661 if (state->offset > copy) { /* copy from window */ in zlib_inflate()
662 copy = state->offset - copy; in zlib_inflate()
672 from = put - state->offset; in zlib_inflate()
Dinflate.h93 unsigned offset; /* distance back to copy string from */ member
/lib/xz/
Dxz_dec_lzma2.c322 size_t offset = dict->pos - dist - 1; in dict_get() local
325 offset += dict->end; in dict_get()
327 return dict->full > 0 ? dict->buf[offset] : 0; in dict_get()
587 uint32_t offset; in lzma_literal() local
597 offset = 0x100; in lzma_literal()
600 match_bit = match_byte & offset; in lzma_literal()
602 i = offset + match_bit + symbol; in lzma_literal()
606 offset &= match_bit; in lzma_literal()
609 offset &= ~match_bit; in lzma_literal()

12