Searched refs:highest_significant_bit (Results 1 – 2 of 2) sorted by relevance
1198 const int highest_significant_bit = 63 - CountLeadingZeros(mantissa); in FPRound() local1199 int shift = highest_significant_bit - mbits; in FPRound()1213 if (shift > (highest_significant_bit + 1)) { in FPRound()1231 mantissa &= ~(UINT64_C(1) << highest_significant_bit); in FPRound()
107 const int highest_significant_bit = 63 - CountLeadingZeros(src); in UFixedToDouble() local108 const int64_t exponent = highest_significant_bit - fbits; in UFixedToDouble()134 const int highest_significant_bit = 63 - CountLeadingZeros(src); in UFixedToFloat() local135 const int32_t exponent = highest_significant_bit - fbits; in UFixedToFloat()163 const int highest_significant_bit = 63 - CountLeadingZeros(src); in UFixedToFloat16() local164 const int16_t exponent = highest_significant_bit - fbits; in UFixedToFloat16()