• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 #include <string.h>
2 #include <stdint.h>
3 
4 #ifdef LOSCFG_KERNEL_LMS
__memset(void * dest,int c,size_t n)5 __attribute__((no_sanitize_address)) void *__memset(void *dest, int c, size_t n)
6 #else
7 void *memset(void *dest, int c, size_t n)
8 #endif
9 {
10 	unsigned char *s = dest;
11 	size_t k;
12 
13 	/* Fill head and tail with minimal branching. Each
14 	 * conditional ensures that all the subsequently used
15 	 * offsets are well-defined and in the dest region. */
16 
17 	if (!n) return dest;
18 	s[0] = c;
19 	s[n-1] = c;
20 	if (n <= 2) return dest;
21 	s[1] = c;
22 	s[2] = c;
23 	s[n-2] = c;
24 	s[n-3] = c;
25 	if (n <= 6) return dest;
26 	s[3] = c;
27 	s[n-4] = c;
28 	if (n <= 8) return dest;
29 
30 	/* Advance pointer to align it at a 4-byte boundary,
31 	 * and truncate n to a multiple of 4. The previous code
32 	 * already took care of any head/tail that get cut off
33 	 * by the alignment. */
34 
35 	k = -(uintptr_t)s & 3;
36 	s += k;
37 	n -= k;
38 	n &= -4;
39 
40 #ifdef __GNUC__
41 	typedef uint32_t __attribute__((__may_alias__)) u32;
42 	typedef uint64_t __attribute__((__may_alias__)) u64;
43 
44 	u32 c32 = ((u32)-1)/255 * (unsigned char)c;
45 
46 	/* In preparation to copy 32 bytes at a time, aligned on
47 	 * an 8-byte bounary, fill head/tail up to 28 bytes each.
48 	 * As in the initial byte-based head/tail fill, each
49 	 * conditional below ensures that the subsequent offsets
50 	 * are valid (e.g. !(n<=24) implies n>=28). */
51 
52 	*(u32 *)(s+0) = c32;
53 	*(u32 *)(s+n-4) = c32;
54 	if (n <= 8) return dest;
55 	*(u32 *)(s+4) = c32;
56 	*(u32 *)(s+8) = c32;
57 	*(u32 *)(s+n-12) = c32;
58 	*(u32 *)(s+n-8) = c32;
59 	if (n <= 24) return dest;
60 	*(u32 *)(s+12) = c32;
61 	*(u32 *)(s+16) = c32;
62 	*(u32 *)(s+20) = c32;
63 	*(u32 *)(s+24) = c32;
64 	*(u32 *)(s+n-28) = c32;
65 	*(u32 *)(s+n-24) = c32;
66 	*(u32 *)(s+n-20) = c32;
67 	*(u32 *)(s+n-16) = c32;
68 
69 	/* Align to a multiple of 8 so we can fill 64 bits at a time,
70 	 * and avoid writing the same bytes twice as much as is
71 	 * practical without introducing additional branching. */
72 
73 	k = 24 + ((uintptr_t)s & 4);
74 	s += k;
75 	n -= k;
76 
77 	/* If this loop is reached, 28 tail bytes have already been
78 	 * filled, so any remainder when n drops below 32 can be
79 	 * safely ignored. */
80 
81 	u64 c64 = c32 | ((u64)c32 << 32);
82 	for (; n >= 32; n-=32, s+=32) {
83 		*(u64 *)(s+0) = c64;
84 		*(u64 *)(s+8) = c64;
85 		*(u64 *)(s+16) = c64;
86 		*(u64 *)(s+24) = c64;
87 	}
88 #else
89 	/* Pure C fallback with no aliasing violations. */
90 	for (; n; n--, s++) *s = c;
91 #endif
92 
93 	return dest;
94 }
95