Home
last modified time | relevance | path

Searched refs:U64 (Results 1 – 25 of 196) sorted by relevance

12345678

/external/zstd/contrib/linux-kernel/
Dmem.h36 typedef uint64_t U64; typedef
50 MEM_STATIC U64 MEM_read64(const void* memPtr);
55 MEM_STATIC void MEM_write64(void* memPtr, U64 value);
61 MEM_STATIC U64 MEM_readLE64(const void* memPtr);
67 MEM_STATIC void MEM_writeLE64(void* memPtr, U64 val64);
72 MEM_STATIC U64 MEM_readBE64(const void* memPtr);
76 MEM_STATIC void MEM_writeBE64(void* memPtr, U64 val64);
81 MEM_STATIC U64 MEM_swap64(U64 in);
118 MEM_STATIC U64 MEM_read64(const void *memPtr) in MEM_read64()
120 return get_unaligned((const U64 *)memPtr); in MEM_read64()
[all …]
/external/zstd/lib/common/
Dmem.h58 typedef uint64_t U64; typedef
78 typedef unsigned long long U64;
94 MEM_STATIC U64 MEM_read64(const void* memPtr);
99 MEM_STATIC void MEM_write64(void* memPtr, U64 value);
105 MEM_STATIC U64 MEM_readLE64(const void* memPtr);
111 MEM_STATIC void MEM_writeLE64(void* memPtr, U64 val64);
116 MEM_STATIC U64 MEM_readBE64(const void* memPtr);
120 MEM_STATIC void MEM_writeBE64(void* memPtr, U64 val64);
125 MEM_STATIC U64 MEM_swap64(U64 in);
168 MEM_STATIC U64 MEM_read64(const void* memPtr) { return *(const U64*) memPtr; } in MEM_read64()
[all …]
Dxxhash.c108 static U64 XXH_read64(const void* memPtr) { return *(const U64*) memPtr; } in XXH_read64()
114 typedef union { U32 u32; U64 u64; } __attribute__((packed)) unalign;
117 static U64 XXH_read64(const void* ptr) { return ((const unalign*)ptr)->u64; } in XXH_read64()
132 static U64 XXH_read64(const void* memPtr) in XXH_read64()
134 U64 val; in XXH_read64()
175 static U64 XXH_swap64 (U64 x) in XXH_swap64()
224 FORCE_INLINE_TEMPLATE U64 XXH_readLE64_align(const void* ptr, XXH_endianess endian, XXH_alignment a… in XXH_readLE64_align()
229 return endian==XXH_littleEndian ? *(const U64*)ptr : XXH_swap64(*(const U64*)ptr); in XXH_readLE64_align()
232 FORCE_INLINE_TEMPLATE U64 XXH_readLE64(const void* ptr, XXH_endianess endian) in XXH_readLE64()
237 static U64 XXH_readBE64(const void* ptr) in XXH_readBE64()
[all …]
/external/angle/src/common/third_party/xxhash/
Dxxhash.c592 typedef uint64_t U64; typedef
595 typedef unsigned long long U64; typedef
603 static U64 XXH_read64(const void* memPtr) { return *(const U64*) memPtr; } in XXH_read64()
609 typedef union { U32 u32; U64 u64; } __attribute__((packed)) unalign64;
610 static U64 XXH_read64(const void* ptr) { return ((const unalign64*)ptr)->u64; } in XXH_read64()
618 static U64 XXH_read64(const void* memPtr) in XXH_read64()
620 U64 val; in XXH_read64()
632 static U64 XXH_swap64 (U64 x) in XXH_swap64()
645 FORCE_INLINE U64 XXH_readLE64_align(const void* ptr, XXH_endianess endian, XXH_alignment align) in XXH_readLE64_align()
650 return endian==XXH_littleEndian ? *(const U64*)ptr : XXH_swap64(*(const U64*)ptr); in XXH_readLE64_align()
[all …]
/external/lz4/lib/
Dxxhash.c592 typedef uint64_t U64; typedef
595 typedef unsigned long long U64; typedef
603 static U64 XXH_read64(const void* memPtr) { return *(const U64*) memPtr; } in XXH_read64()
609 typedef union { U32 u32; U64 u64; } __attribute__((packed)) unalign64;
610 static U64 XXH_read64(const void* ptr) { return ((const unalign64*)ptr)->u64; } in XXH_read64()
618 static U64 XXH_read64(const void* memPtr) in XXH_read64()
620 U64 val; in XXH_read64()
632 static U64 XXH_swap64 (U64 x) in XXH_swap64()
645 FORCE_INLINE U64 XXH_readLE64_align(const void* ptr, XXH_endianess endian, XXH_alignment align) in XXH_readLE64_align()
650 return endian==XXH_littleEndian ? *(const U64*)ptr : XXH_swap64(*(const U64*)ptr); in XXH_readLE64_align()
[all …]
/external/vulkan-validation-layers/layers/
Dxxhash.c528 typedef uint64_t U64; typedef
530 …typedef unsigned long long U64; /* if your compiler doesn't support unsigned long long, replace … typedef
538 static U64 XXH_read64(const void* memPtr) { return *(const U64*) memPtr; } in XXH_read64()
544 typedef union { U32 u32; U64 u64; } __attribute__((packed)) unalign64;
545 static U64 XXH_read64(const void* ptr) { return ((const unalign64*)ptr)->u64; } in XXH_read64()
553 static U64 XXH_read64(const void* memPtr) in XXH_read64()
555 U64 val; in XXH_read64()
567 static U64 XXH_swap64 (U64 x) in XXH_swap64()
580 FORCE_INLINE U64 XXH_readLE64_align(const void* ptr, XXH_endianess endian, XXH_alignment align) in XXH_readLE64_align()
585 return endian==XXH_littleEndian ? *(const U64*)ptr : XXH_swap64(*(const U64*)ptr); in XXH_readLE64_align()
[all …]
/external/lz4/programs/
Dutil.h63 typedef uint64_t U64; typedef
71 typedef unsigned long long U64;
150 UTIL_STATIC U64 UTIL_getSpanTimeMicro(UTIL_time_t clockStart, UTIL_time_t clockEnd) in UTIL_getSpanTimeMicro()
161 UTIL_STATIC U64 UTIL_getSpanTimeNano(UTIL_time_t clockStart, UTIL_time_t clockEnd) in UTIL_getSpanTimeNano()
176 typedef U64 UTIL_time_t;
178 UTIL_STATIC U64 UTIL_getSpanTimeMicro(UTIL_time_t clockStart, UTIL_time_t clockEnd) in UTIL_getSpanTimeMicro()
186 return (((clockEnd - clockStart) * (U64)rate.numer) / ((U64)rate.denom)) / 1000ULL; in UTIL_getSpanTimeMicro()
188 UTIL_STATIC U64 UTIL_getSpanTimeNano(UTIL_time_t clockStart, UTIL_time_t clockEnd) in UTIL_getSpanTimeNano()
196 return ((clockEnd - clockStart) * (U64)rate.numer) / ((U64)rate.denom); in UTIL_getSpanTimeNano()
222 UTIL_STATIC U64 UTIL_getSpanTimeMicro(UTIL_time_t begin, UTIL_time_t end) in UTIL_getSpanTimeMicro()
[all …]
Dbench.c223 { U64 fastestC = (U64)(-1LL), fastestD = (U64)(-1LL); in BMK_benchMem()
224 U64 const crcOrig = XXH64(srcBuffer, srcSize, 0); in BMK_benchMem()
226 U64 const maxTime = (g_nbSeconds * TIMELOOP_NANOSEC) + 100; in BMK_benchMem()
229 U64 totalCTime=0, totalDTime=0; in BMK_benchMem()
264 { U64 const clockSpan = UTIL_clockSpanNano(clockStart); in BMK_benchMem()
308 { U64 const clockSpan = UTIL_clockSpanNano(clockStart); in BMK_benchMem()
329 { U64 const crcCheck = XXH64(resultBuffer, srcSize, 0); in BMK_benchMem()
374 static size_t BMK_findMaxMem(U64 requiredMem) in BMK_findMaxMem()
434 U64 fileSize = UTIL_getFileSize(fileNamesTable[n]); in BMK_loadFiles()
464 U64 const totalSizeToLoad = UTIL_getTotalFileSize(fileNamesTable, nbFiles); in BMK_benchFileTable()
[all …]
/external/llvm-project/compiler-rt/test/builtins/Unit/
Datomic_test.c120 #define U64(value) ((uint64_t)(value)) macro
151 a64 = U64(value); in set_a_values()
161 b64 = U64(value); in set_b_values()
201 if (__atomic_load_8(&a64, model) != U64(V + m)) in test_loads()
245 __atomic_store_8(&a64, U64(V + m), model); in test_stores()
246 if (a64 != U64(V + m)) in test_stores()
302 if (__atomic_exchange_8(&a64, U64(V + m + 1), model) != U64(V + m)) in test_exchanges()
388 if (__atomic_compare_exchange_8(&a64, &b64, U64(V + m1), m_succ, m_fail)) in test_compare_exchanges()
390 if (a64 != U64(ONES) || b64 != U64(ONES)) in test_compare_exchanges()
392 if (!__atomic_compare_exchange_8(&a64, &b64, U64(V + m1), m_succ, m_fail)) in test_compare_exchanges()
[all …]
/external/zstd/lib/compress/
Dzstd_compress_internal.h191 U64 hashPower; /* Used to compute the rolling hash.
568 return _BitScanForward64( &r, (U64)val ) ? (unsigned)(r >> 3) : 0; in ZSTD_NbCommonBytes()
571 return (__builtin_ctzll((U64)val) >> 3); in ZSTD_NbCommonBytes()
581 return DeBruijnBytePos[((U64)((val & -(long long)val) * 0x0218A392CDABBD3FULL)) >> 58]; in ZSTD_NbCommonBytes()
604 return _BitScanReverse64(&r, (U64)val) ? (unsigned)(r >> 3) : 0; in ZSTD_NbCommonBytes()
684 static const U64 prime5bytes = 889523592379ULL;
685 static size_t ZSTD_hash5(U64 u, U32 h) { return (size_t)(((u << (64-40)) * prime5bytes) >> (64-h))… in ZSTD_hash5()
688 static const U64 prime6bytes = 227718039650203ULL;
689 static size_t ZSTD_hash6(U64 u, U32 h) { return (size_t)(((u << (64-48)) * prime6bytes) >> (64-h))… in ZSTD_hash6()
692 static const U64 prime7bytes = 58295818150454627ULL;
[all …]
Dfse_compress.c418 { U64 const vStepLog = 62 - tableLog; in FSE_normalizeM2()
419 U64 const mid = (1ULL << (vStepLog-1)) - 1; in FSE_normalizeM2()
420U64 const rStep = ZSTD_div64((((U64)1<<vStepLog) * ToDistribute) + mid, (U32)total); /* scale on… in FSE_normalizeM2()
421 U64 tmpTotal = mid; in FSE_normalizeM2()
424 U64 const end = tmpTotal + (count[s] * rStep); in FSE_normalizeM2()
449 U64 const scale = 62 - tableLog; in FSE_normalizeCount()
450 U64 const step = ZSTD_div64((U64)1<<62, (U32)total); /* <== here, one division ! */ in FSE_normalizeCount()
451 U64 const vStep = 1ULL<<(scale-20); in FSE_normalizeCount()
467 U64 restToBeat = vStep * rtbTable[proba]; in FSE_normalizeCount()
468 proba += (count[s]*step) - ((U64)proba<<scale) > restToBeat; in FSE_normalizeCount()
Dzstd_ldm.c61 static U32 ZSTD_ldm_getSmallHash(U64 value, U32 numBits) in ZSTD_ldm_getSmallHash()
70 static U32 ZSTD_ldm_getChecksum(U64 hash, U32 numBitsToDiscard) in ZSTD_ldm_getChecksum()
82 static U32 ZSTD_ldm_getTag(U64 hash, U32 hbits, U32 numTagBits) in ZSTD_ldm_getTag()
123 U64 const rollingHash, in ZSTD_ldm_makeEntryAndInsertByTag()
221 static U64 ZSTD_ldm_fillLdmHashTable(ldmState_t* state, in ZSTD_ldm_fillLdmHashTable()
222 U64 lastHash, const BYTE* lastHashed, in ZSTD_ldm_fillLdmHashTable()
226 U64 rollingHash = lastHash; in ZSTD_ldm_fillLdmHashTable()
247 U64 startingHash = ZSTD_rollingHash_compute(ip, params->minMatchLength); in ZSTD_ldm_fillHashTable()
277 U64 const hashPower = ldmState->hashPower; in ZSTD_ldm_generateSequences_internal()
299 U64 rollingHash = 0; in ZSTD_ldm_generateSequences_internal()
/external/tensorflow/tensorflow/compiler/tf2xla/kernels/
Drandom_ops_util.cc26 ConvertElementType(ConvertElementType(seed0, xla::U32), xla::U64); in GetU64FromS32Seeds()
28 ConvertElementType(ConvertElementType(seed1, xla::U32), xla::U64); in GetU64FromS32Seeds()
30 (u64_seed1 << ConstantR0WithType(seed0.builder(), xla::U64, 32)); in GetU64FromS32Seeds()
Dstateful_random_ops.cc78 case xla::U64: in StatefulRngUniform()
99 case xla::U64: in StatefulRngUniformFullInt()
143 xla::Reshape(xla::Slice(var, {0}, {kStateSize}, {1}), {}), xla::U64); in StateAndKeyFromVariable()
146 xla::U64); in StateAndKeyFromVariable()
151 BitcastConvertType(xla::Slice(var, {0}, {kStateSize}, {1}), xla::U64); in StateAndKeyFromVariable()
154 xla::U64), in StateAndKeyFromVariable()
414 delta = delta * ConstantR0WithType(delta.builder(), xla::U64, 256); in IncreaseCounter()
463 delta = BitcastConvertType(delta, xla::U64); in Compile()
/external/rust/crates/protobuf/src/reflect/
Dvalue.rs37 ReflectValueRef::U64(v) => ReflectValueRef::U64(v), in as_ref_copy()
58 ReflectValueRef::U64(*self) in as_ref()
145 U64(u64), enumerator
173 ReflectValueRef::U64(v) => v != 0, in is_non_zero()
Drepeated.rs115 U64(&'a [u64]), enumerator
132 ReflectRepeatedRef::U64(ref r) => r.len(), in len()
149 ReflectRepeatedRef::U64(ref r) => ReflectValueRef::U64(r[index]), in get()
/external/rust/crates/bindgen/src/ir/
Dint.rs70 U64, enumerator
97 WChar | U32 | U64 | U128 => false, in is_signed()
117 U64 | I64 => 8, in known_size()
/external/zstd/programs/
Dutil.h146 U64 UTIL_getFileSizeStat(const stat_t* statbuf);
168 #define UTIL_FILESIZE_UNKNOWN ((U64)(-1))
169 U64 UTIL_getFileSize(const char* infilename);
170 U64 UTIL_getTotalFileSize(const char* const * fileNamesTable, unsigned nbFiles);
Dbenchzstd.c74 static const U64 g_refreshRate = SEC_TO_MICRO / 6;
331 U64 totalDSize64 = 0; in BMK_benchMemAdvancedNoAlloc()
334 U64 const fSize64 = ZSTD_findDecompressedSize(srcPtr, fileSizes[fileNb]); in BMK_benchMemAdvancedNoAlloc()
340 assert((U64)decodedSize == totalDSize64); /* check overflow */ in BMK_benchMemAdvancedNoAlloc()
393 { U64 const crcOrig = (adv->mode == BMK_decodeOnly) ? 0 : XXH64(srcBuffer, srcSize, 0); in BMK_benchMemAdvancedNoAlloc()
453 … newResult.cSpeed = (U64)((double)srcSize * TIMELOOP_NANOSEC / cResult.nanoSecPerRun); in BMK_benchMemAdvancedNoAlloc()
477U64 const newDSpeed = (U64)((double)srcSize * TIMELOOP_NANOSEC / dResult.nanoSecPerRun); in BMK_benchMemAdvancedNoAlloc()
497 U64 const crcCheck = XXH64(resultBuffer, srcSize, 0); in BMK_benchMemAdvancedNoAlloc()
727 static size_t BMK_findMaxMem(U64 requiredMem) in BMK_findMaxMem()
756 U64 fileSize = UTIL_getFileSize(fileNamesTable[n]); /* last file may be shortened */ in BMK_loadFiles()
[all …]
Ddibio.c60 static const U64 g_refreshRate = SEC_TO_MICRO / 6;
118 U64 const chunkSize = targetChunkSize ? MIN(targetChunkSize, fs64) : fs64; in DiB_loadFiles()
228 U64 totalSizeToLoad;
244 U64 const fileSize = UTIL_getFileSize(fileNamesTable[n]); in DiB_fileStats()
245 U64 const srcSize = (fileSize == UTIL_FILESIZE_UNKNOWN) ? 0 : fileSize; in DiB_fileStats()
247 U64 const chunkToLoad = chunkSize ? MIN(chunkSize, srcSize) : srcSize; in DiB_fileStats()
/external/rust/cxx/syntax/
Datom.rs12 U64, enumerator
38 "u64" => Some(U64), in from_str()
69 U64 => "u64", in as_ref()
/external/tensorflow/tensorflow/compiler/xla/client/lib/
Dprng.cc124 XlaOp const32 = ConstantR0WithType(builder, U64, 32); in Uint64ToUint32s()
133 return ConvertElementType(u32s[0], U64) | in Uint32sToUint64()
134 ShiftLeft(ConvertElementType(u32s[1], U64), in Uint32sToUint64()
135 ConstantR0WithType(builder, U64, 32)); in Uint32sToUint64()
143 auto u64_shape = ShapeUtil::MakeShape(U64, shape.dimensions()); in GetThreeFryInputsAndUpdatedState()
285 ConvertElementType(x, U64) * ConstantR0<uint64>(x.builder(), k); in Philox4x32()
318 XlaOp key0 = ConvertElementType(key[0], U64); in ScramblePhiloxKey()
319 XlaOp key1 = ConvertElementType(key[1], U64); in ScramblePhiloxKey()
380 XlaOp iota = Iota(builder, U64, n); in GetPhiloxInputsAndUpdatedState()
474 (value_type == F64 && bit_type == U64)); in ConvertRandomBitsToUniformFloatingPoint()
[all …]
/external/skqp/third_party/skcms/src/
DTransform_inl.h191 SI U64 swap_endian_16x4(const U64& rgba) { in swap_endian_16x4()
437 SI void gather_48(const uint8_t* p, I32 ix, U64* v) { in gather_48()
449 *v = U64{ in gather_48()
453 *v = U64{ in gather_48()
550 U64 rgb; in sample_clut_16()
751 U64 px = load<U64>(rgba); in exec_ops()
791 U64 px = swap_endian_16x4(load<U64>(rgba)); in exec_ops()
830 U64 px = load<U64>(rgba); in exec_ops()
1072 U64 px = cast<U64>(to_fixed(r * 65535)) << 0 in exec_ops()
1073 | cast<U64>(to_fixed(g * 65535)) << 16 in exec_ops()
[all …]
/external/zstd/tests/
Dlongmatch.c61 U64 compressed = 0; in main()
62 const U64 toCompress = ((U64)1) << 33; in main()
/external/zstd/zlibWrapper/examples/
Dzwrapbench.c200 { U64 fastestC = (U64)(-1LL), fastestD = (U64)(-1LL); in BMK_benchMem()
201 U64 const crcOrig = XXH64(srcBuffer, srcSize, 0); in BMK_benchMem()
203 U64 const maxTime = (g_nbIterations * TIMELOOP_MICROSEC) + 100; in BMK_benchMem()
204 U64 totalCTime=0, totalDTime=0; in BMK_benchMem()
216 U64 clockLoop = g_nbIterations ? TIMELOOP_MICROSEC : 1; in BMK_benchMem()
364 { U64 const clockSpan = UTIL_clockSpanMicro(clockStart); in BMK_benchMem()
506 { U64 const clockSpan = UTIL_clockSpanMicro(clockStart); in BMK_benchMem()
519 { U64 const crcCheck = XXH64(resultBuffer, srcSize, 0); in BMK_benchMem()
566 static size_t BMK_findMaxMem(U64 requiredMem) in BMK_findMaxMem()
685 U64 fileSize = UTIL_getFileSize(fileNamesTable[n]); in BMK_loadFiles()
[all …]

12345678