Home
last modified time | relevance | path

Searched refs:tail (Results 1 – 25 of 49) sorted by relevance

12

/arch/arm64/kernel/
Dperf_callchain.c23 user_backtrace(struct frame_tail __user *tail, in user_backtrace() argument
31 if (!access_ok(tail, sizeof(buftail))) in user_backtrace()
35 err = __copy_from_user_inatomic(&buftail, tail, sizeof(buftail)); in user_backtrace()
49 if (tail >= buftail.fp) in user_backtrace()
71 compat_user_backtrace(struct compat_frame_tail __user *tail, in compat_user_backtrace() argument
78 if (!access_ok(tail, sizeof(buftail))) in compat_user_backtrace()
82 err = __copy_from_user_inatomic(&buftail, tail, sizeof(buftail)); in compat_user_backtrace()
94 if (tail + 1 >= (struct compat_frame_tail __user *) in compat_user_backtrace()
114 struct frame_tail __user *tail; in perf_callchain_user() local
116 tail = (struct frame_tail __user *)regs->regs[29]; in perf_callchain_user()
[all …]
/arch/arm/kernel/
Dperf_callchain.c34 user_backtrace(struct frame_tail __user *tail, in user_backtrace() argument
40 if (!access_ok(tail, sizeof(buftail))) in user_backtrace()
44 err = __copy_from_user_inatomic(&buftail, tail, sizeof(buftail)); in user_backtrace()
56 if (tail + 1 >= buftail.fp) in user_backtrace()
65 struct frame_tail __user *tail; in perf_callchain_user() local
72 tail = (struct frame_tail __user *)regs->ARM_fp - 1; in perf_callchain_user()
75 tail && !((unsigned long)tail & 0x3)) in perf_callchain_user()
76 tail = user_backtrace(tail, entry); in perf_callchain_user()
/arch/x86/kernel/
Dstatic_call.c70 static void __static_call_validate(void *insn, bool tail) in __static_call_validate() argument
74 if (tail) { in __static_call_validate()
91 static inline enum insn_type __sc_insn(bool null, bool tail) in __sc_insn() argument
103 return 2*tail + null; in __sc_insn()
106 void arch_static_call_transform(void *site, void *tramp, void *func, bool tail) in arch_static_call_transform() argument
116 __static_call_validate(site, tail); in arch_static_call_transform()
117 __static_call_transform(site, __sc_insn(!func, tail), func, false); in arch_static_call_transform()
/arch/x86/platform/geode/
Dalix.c139 const char *tail; in alix_present() local
157 tail = p + alix_sig_len; in alix_present()
158 if ((tail[0] == '2' || tail[0] == '3' || tail[0] == '6')) { in alix_present()
/arch/powerpc/platforms/pseries/
Dof_helpers.c22 const char *tail; in pseries_of_derive_parent() local
25 tail = kbasename(path) - 1; in pseries_of_derive_parent()
31 if (tail > path) { in pseries_of_derive_parent()
32 parent_path = kstrndup(path, tail - path, GFP_KERNEL); in pseries_of_derive_parent()
/arch/powerpc/crypto/
Daes-spe-glue.c324 int tail = req->cryptlen % AES_BLOCK_SIZE; in ppc_xts_encrypt() local
325 int offset = req->cryptlen - tail - AES_BLOCK_SIZE; in ppc_xts_encrypt()
333 if (tail) { in ppc_xts_encrypt()
336 req->cryptlen - tail, req->iv); in ppc_xts_encrypt()
341 if (err || !tail) in ppc_xts_encrypt()
345 memcpy(b[1], b[0], tail); in ppc_xts_encrypt()
346 scatterwalk_map_and_copy(b[0], req->src, offset + AES_BLOCK_SIZE, tail, 0); in ppc_xts_encrypt()
353 scatterwalk_map_and_copy(b[0], req->dst, offset, AES_BLOCK_SIZE + tail, 1); in ppc_xts_encrypt()
362 int tail = req->cryptlen % AES_BLOCK_SIZE; in ppc_xts_decrypt() local
363 int offset = req->cryptlen - tail - AES_BLOCK_SIZE; in ppc_xts_decrypt()
[all …]
Dcrct10dif-vpmsum_glue.c30 unsigned int tail; in crct10dif_vpmsum() local
55 tail = len & VMX_ALIGN_MASK; in crct10dif_vpmsum()
56 if (tail) { in crct10dif_vpmsum()
58 crc = crc_t10dif_generic(crc, p, tail); in crct10dif_vpmsum()
Dcrc32c-vpmsum_glue.c26 unsigned int tail; in crc32c_vpmsum() local
48 tail = len & VMX_ALIGN_MASK; in crc32c_vpmsum()
49 if (tail) { in crc32c_vpmsum()
51 crc = __crc32c_le(crc, p, tail); in crc32c_vpmsum()
/arch/arm64/crypto/
Daes-ce-ccm-glue.c182 u32 tail = walk->nbytes % AES_BLOCK_SIZE; in ccm_crypt_fallback() local
187 if (nbytes == walk->total && tail > 0) { in ccm_crypt_fallback()
189 tail = 0; in ccm_crypt_fallback()
211 err = skcipher_walk_done(walk, tail); in ccm_crypt_fallback()
246 u32 tail = walk.nbytes % AES_BLOCK_SIZE; in ccm_encrypt() local
249 tail = 0; in ccm_encrypt()
254 walk.nbytes - tail, ctx->key_enc, in ccm_encrypt()
258 err = skcipher_walk_done(&walk, tail); in ccm_encrypt()
304 u32 tail = walk.nbytes % AES_BLOCK_SIZE; in ccm_decrypt() local
307 tail = 0; in ccm_decrypt()
[all …]
Daes-neonbs-glue.c277 int tail = req->cryptlen % (8 * AES_BLOCK_SIZE); in __xts_crypt() local
290 if (unlikely(tail > 0 && tail < AES_BLOCK_SIZE)) { in __xts_crypt()
303 tail = 0; in __xts_crypt()
344 if (err || likely(!tail)) in __xts_crypt()
352 skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail, in __xts_crypt()
/arch/alpha/lib/
Dclear_user.S44 beq $1, $tail # .. e1 :
58 $tail:
59 bne $2, 1f # e1 : is there a tail to do?
74 and $1, 7, $2 # e1 : number of bytes in tail
/arch/sparc/kernel/
Dsignal_32.c230 void __user *tail; in setup_frame() local
251 tail = sf + 1; in setup_frame()
259 __siginfo_fpu_t __user *fp = tail; in setup_frame()
260 tail += sizeof(*fp); in setup_frame()
267 __siginfo_rwin_t __user *rwp = tail; in setup_frame()
268 tail += sizeof(*rwp); in setup_frame()
325 void __user *tail; in setup_rt_frame() local
343 tail = sf + 1; in setup_rt_frame()
355 __siginfo_fpu_t __user *fp = tail; in setup_rt_frame()
356 tail += sizeof(*fp); in setup_rt_frame()
[all …]
Dsignal32.c356 void __user *tail; in setup_frame32() local
385 tail = (sf + 1); in setup_frame32()
410 __siginfo_fpu_t __user *fp = tail; in setup_frame32()
411 tail += sizeof(*fp); in setup_frame32()
418 __siginfo_rwin_t __user *rwp = tail; in setup_frame32()
419 tail += sizeof(*rwp); in setup_frame32()
491 void __user *tail; in setup_rt_frame32() local
519 tail = (sf + 1); in setup_rt_frame32()
544 __siginfo_fpu_t __user *fp = tail; in setup_rt_frame32()
545 tail += sizeof(*fp); in setup_rt_frame32()
[all …]
Dsignal_64.c355 void __user *tail; in setup_rt_frame() local
380 tail = (sf + 1); in setup_rt_frame()
386 __siginfo_fpu_t __user *fpu_save = tail; in setup_rt_frame()
387 tail += sizeof(__siginfo_fpu_t); in setup_rt_frame()
394 __siginfo_rwin_t __user *rwin_save = tail; in setup_rt_frame()
395 tail += sizeof(__siginfo_rwin_t); in setup_rt_frame()
/arch/s390/kernel/
Dperf_cpum_sf.c75 unsigned long *tail; /* last sample-data-block-table */ member
197 unsigned long *new, *tail, *tail_prev = NULL; in realloc_sampling_buffer() local
199 if (!sfb->sdbt || !sfb->tail) in realloc_sampling_buffer()
202 if (!is_link_entry(sfb->tail)) in realloc_sampling_buffer()
210 tail = sfb->tail; in realloc_sampling_buffer()
215 if (sfb->sdbt != get_next_sdbt(tail)) { in realloc_sampling_buffer()
220 (unsigned long)tail); in realloc_sampling_buffer()
228 if (require_table_link(tail)) { in realloc_sampling_buffer()
236 *tail = (unsigned long)(void *) new + 1; in realloc_sampling_buffer()
237 tail_prev = tail; in realloc_sampling_buffer()
[all …]
/arch/um/drivers/
Dline.c43 n = line->head - line->tail; in write_room()
98 line->tail = line->buffer; in buffer_data()
104 end = line->buffer + LINE_BUFSIZE - line->tail; in buffer_data()
107 memcpy(line->tail, buf, len); in buffer_data()
108 line->tail += len; in buffer_data()
112 memcpy(line->tail, buf, end); in buffer_data()
115 line->tail = line->buffer + len - end; in buffer_data()
134 if ((line->buffer == NULL) || (line->head == line->tail)) in flush_buffer()
137 if (line->tail < line->head) { in flush_buffer()
157 count = line->tail - line->head; in flush_buffer()
[all …]
/arch/sparc/boot/
Dpiggyback.c184 int image, tail; in main() local
255 if ((tail = open(argv[4], O_RDONLY)) < 0) in main()
257 while ((i = read(tail, buffer, 1024)) > 0) in main()
262 if (close(tail) < 0) in main()
/arch/arm/crypto/
Daes-ce-glue.c401 u8 __aligned(8) tail[AES_BLOCK_SIZE]; in ctr_encrypt()
412 ce_aes_ctr_encrypt(tail, NULL, ctx->key_enc, num_rounds(ctx), in ctr_encrypt()
415 crypto_xor_cpy(tdst, tsrc, tail, nbytes); in ctr_encrypt()
449 int tail = req->cryptlen % AES_BLOCK_SIZE; in xts_encrypt() local
460 if (unlikely(tail > 0 && walk.nbytes < walk.total)) { in xts_encrypt()
476 tail = 0; in xts_encrypt()
493 if (err || likely(!tail)) in xts_encrypt()
500 skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail, in xts_encrypt()
521 int tail = req->cryptlen % AES_BLOCK_SIZE; in xts_decrypt() local
532 if (unlikely(tail > 0 && walk.nbytes < walk.total)) { in xts_decrypt()
[all …]
Daes-neonbs-glue.c350 int tail = req->cryptlen % AES_BLOCK_SIZE; in __xts_crypt() local
359 if (unlikely(tail)) { in __xts_crypt()
365 req->cryptlen - tail, req->iv); in __xts_crypt()
377 int reorder_last_tweak = !encrypt && tail > 0; in __xts_crypt()
393 if (err || likely(!tail)) in __xts_crypt()
399 memcpy(buf + AES_BLOCK_SIZE, buf, tail); in __xts_crypt()
400 scatterwalk_map_and_copy(buf, req->src, req->cryptlen, tail, 0); in __xts_crypt()
412 AES_BLOCK_SIZE + tail, 1); in __xts_crypt()
/arch/arm64/kvm/hyp/nvhe/
Dpage_alloc.c191 struct hyp_page *tail = p + i; in hyp_split_page() local
193 tail->order = 0; in hyp_split_page()
194 hyp_set_page_refcounted(tail); in hyp_split_page()
/arch/arm/tools/
Dsyscallnr.sh11 grep -E "^[0-9A-Fa-fXx]+[[:space:]]+" "$in" | sort -n | tail -n1 | (
/arch/arm64/lib/
Dstrlen.S146 L(tail):
177 bne L(tail)
187 b L(tail)
/arch/sh/mm/
Dpmb.c658 struct pmb_entry *tail; in pmb_merge() local
663 tail = head->link; in pmb_merge()
664 while (tail) { in pmb_merge()
665 span += tail->size; in pmb_merge()
673 if (!tail->link) in pmb_merge()
676 tail = tail->link; in pmb_merge()
/arch/arm/mach-omap1/
Dams-delta-fiq-handler.S196 ldr r10, [r9, #BUF_TAIL_OFFSET] @ get buffer tail offset
202 add r12, r12, r10, LSL #2 @ calculate buffer tail address
204 str r8, [r12] @ append it to the buffer tail
206 add r10, r10, #1 @ increment buffer tail offset
/arch/riscv/kernel/
Dhead.S180 tail smp_callin
254 tail .Lsecondary_park
330 tail start_kernel
358 tail secondary_start_common

12