1 // SPDX-License-Identifier: GPL-2.0 2 3 #include <linux/bitops.h> 4 #include <linux/math.h> 5 #include <linux/string.h> 6 #include <linux/unaligned.h> 7 8 #ifdef CONFIG_VALGRIND 9 #include <valgrind/memcheck.h> 10 #endif 11 12 #include "errcode.h" 13 #include "varint.h" 14 15 /** 16 * bch2_varint_encode - encode a variable length integer 17 * @out: destination to encode to 18 * @v: unsigned integer to encode 19 * Returns: size in bytes of the encoded integer - at most 9 bytes 20 */ 21 int bch2_varint_encode(u8 *out, u64 v) 22 { 23 unsigned bits = fls64(v|1); 24 unsigned bytes = DIV_ROUND_UP(bits, 7); 25 __le64 v_le; 26 27 if (likely(bytes < 9)) { 28 v <<= bytes; 29 v |= ~(~0 << (bytes - 1)); 30 v_le = cpu_to_le64(v); 31 memcpy(out, &v_le, bytes); 32 } else { 33 *out++ = 255; 34 bytes = 9; 35 put_unaligned_le64(v, out); 36 } 37 38 return bytes; 39 } 40 41 /** 42 * bch2_varint_decode - encode a variable length integer 43 * @in: varint to decode 44 * @end: end of buffer to decode from 45 * @out: on success, decoded integer 46 * Returns: size in bytes of the decoded integer - or -1 on failure (would 47 * have read past the end of the buffer) 48 */ 49 int bch2_varint_decode(const u8 *in, const u8 *end, u64 *out) 50 { 51 unsigned bytes = likely(in < end) 52 ? ffz(*in & 255) + 1 53 : 1; 54 u64 v; 55 56 if (unlikely(in + bytes > end)) 57 return -BCH_ERR_varint_decode_error; 58 59 if (likely(bytes < 9)) { 60 __le64 v_le = 0; 61 62 memcpy(&v_le, in, bytes); 63 v = le64_to_cpu(v_le); 64 v >>= bytes; 65 } else { 66 v = get_unaligned_le64(++in); 67 } 68 69 *out = v; 70 return bytes; 71 } 72 73 /** 74 * bch2_varint_encode_fast - fast version of bch2_varint_encode 75 * @out: destination to encode to 76 * @v: unsigned integer to encode 77 * Returns: size in bytes of the encoded integer - at most 9 bytes 78 * 79 * This version assumes it's always safe to write 8 bytes to @out, even if the 80 * encoded integer would be smaller. 81 */ 82 int bch2_varint_encode_fast(u8 *out, u64 v) 83 { 84 unsigned bits = fls64(v|1); 85 unsigned bytes = DIV_ROUND_UP(bits, 7); 86 87 if (likely(bytes < 9)) { 88 v <<= bytes; 89 v |= ~(~0U << (bytes - 1)); 90 } else { 91 *out++ = 255; 92 bytes = 9; 93 } 94 95 put_unaligned_le64(v, out); 96 return bytes; 97 } 98 99 /** 100 * bch2_varint_decode_fast - fast version of bch2_varint_decode 101 * @in: varint to decode 102 * @end: end of buffer to decode from 103 * @out: on success, decoded integer 104 * Returns: size in bytes of the decoded integer - or -1 on failure (would 105 * have read past the end of the buffer) 106 * 107 * This version assumes that it is safe to read at most 8 bytes past the end of 108 * @end (we still return an error if the varint extends past @end). 109 */ 110 int bch2_varint_decode_fast(const u8 *in, const u8 *end, u64 *out) 111 { 112 #ifdef CONFIG_VALGRIND 113 VALGRIND_MAKE_MEM_DEFINED(in, 8); 114 #endif 115 u64 v = get_unaligned_le64(in); 116 unsigned bytes = ffz(*in) + 1; 117 118 if (unlikely(in + bytes > end)) 119 return -BCH_ERR_varint_decode_error; 120 121 if (likely(bytes < 9)) { 122 v >>= bytes; 123 v &= ~(~0ULL << (7 * bytes)); 124 } else { 125 v = get_unaligned_le64(++in); 126 } 127 128 *out = v; 129 return bytes; 130 } 131