1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * Unit tests and benchmarks for the CRC library functions 4 * 5 * Copyright 2024 Google LLC 6 * 7 * Author: Eric Biggers <ebiggers@google.com> 8 */ 9 #include <kunit/run-in-irq-context.h> 10 #include <kunit/test.h> 11 #include <linux/crc7.h> 12 #include <linux/crc16.h> 13 #include <linux/crc-t10dif.h> 14 #include <linux/crc32.h> 15 #include <linux/crc32c.h> 16 #include <linux/crc64.h> 17 #include <linux/prandom.h> 18 #include <linux/vmalloc.h> 19 20 #define CRC_KUNIT_SEED 42 21 #define CRC_KUNIT_MAX_LEN 16384 22 #define CRC_KUNIT_NUM_TEST_ITERS 1000 23 24 static struct rnd_state rng; 25 static u8 *test_buffer; 26 static size_t test_buflen; 27 28 /** 29 * struct crc_variant - describes a CRC variant 30 * @bits: Number of bits in the CRC, 1 <= @bits <= 64. 31 * @le: true if it's a "little endian" CRC (reversed mapping between bits and 32 * polynomial coefficients in each byte), false if it's a "big endian" CRC 33 * (natural mapping between bits and polynomial coefficients in each byte) 34 * @poly: The generator polynomial with the highest-order term omitted. 35 * Bit-reversed if @le is true. 36 * @func: The function to compute a CRC. The type signature uses u64 so that it 37 * can fit any CRC up to CRC-64. The CRC is passed in, and is expected 38 * to be returned in, the least significant bits of the u64. The 39 * function is expected to *not* invert the CRC at the beginning and end. 40 */ 41 struct crc_variant { 42 int bits; 43 bool le; 44 u64 poly; 45 u64 (*func)(u64 crc, const u8 *p, size_t len); 46 }; 47 48 static u32 rand32(void) 49 { 50 return prandom_u32_state(&rng); 51 } 52 53 static u64 rand64(void) 54 { 55 u32 n = rand32(); 56 57 return ((u64)n << 32) | rand32(); 58 } 59 60 static u64 crc_mask(const struct crc_variant *v) 61 { 62 return (u64)-1 >> (64 - v->bits); 63 } 64 65 /* Reference implementation of any CRC variant */ 66 static u64 crc_ref(const struct crc_variant *v, 67 u64 crc, const u8 *p, size_t len) 68 { 69 size_t i, j; 70 71 for (i = 0; i < len; i++) { 72 for (j = 0; j < 8; j++) { 73 if (v->le) { 74 crc ^= (p[i] >> j) & 1; 75 crc = (crc >> 1) ^ ((crc & 1) ? v->poly : 0); 76 } else { 77 crc ^= (u64)((p[i] >> (7 - j)) & 1) << 78 (v->bits - 1); 79 if (crc & (1ULL << (v->bits - 1))) 80 crc = ((crc << 1) ^ v->poly) & 81 crc_mask(v); 82 else 83 crc <<= 1; 84 } 85 } 86 } 87 return crc; 88 } 89 90 static int crc_suite_init(struct kunit_suite *suite) 91 { 92 /* 93 * Allocate the test buffer using vmalloc() with a page-aligned length 94 * so that it is immediately followed by a guard page. This allows 95 * buffer overreads to be detected, even in assembly code. 96 */ 97 test_buflen = round_up(CRC_KUNIT_MAX_LEN, PAGE_SIZE); 98 test_buffer = vmalloc(test_buflen); 99 if (!test_buffer) 100 return -ENOMEM; 101 102 prandom_seed_state(&rng, CRC_KUNIT_SEED); 103 prandom_bytes_state(&rng, test_buffer, test_buflen); 104 return 0; 105 } 106 107 static void crc_suite_exit(struct kunit_suite *suite) 108 { 109 vfree(test_buffer); 110 test_buffer = NULL; 111 } 112 113 /* Generate a random initial CRC. */ 114 static u64 generate_random_initial_crc(const struct crc_variant *v) 115 { 116 switch (rand32() % 4) { 117 case 0: 118 return 0; 119 case 1: 120 return crc_mask(v); /* All 1 bits */ 121 default: 122 return rand64() & crc_mask(v); 123 } 124 } 125 126 /* Generate a random length, preferring small lengths. */ 127 static size_t generate_random_length(size_t max_length) 128 { 129 size_t len; 130 131 switch (rand32() % 3) { 132 case 0: 133 len = rand32() % 128; 134 break; 135 case 1: 136 len = rand32() % 3072; 137 break; 138 default: 139 len = rand32(); 140 break; 141 } 142 return len % (max_length + 1); 143 } 144 145 #define IRQ_TEST_DATA_LEN 512 146 #define IRQ_TEST_NUM_BUFFERS 3 /* matches max concurrency level */ 147 148 struct crc_irq_test_state { 149 const struct crc_variant *v; 150 u64 initial_crc; 151 u64 expected_crcs[IRQ_TEST_NUM_BUFFERS]; 152 atomic_t seqno; 153 }; 154 155 /* 156 * Compute the CRC of one of the test messages and verify that it matches the 157 * expected CRC from @state->expected_crcs. To increase the chance of detecting 158 * problems, cycle through multiple messages. 159 */ 160 static bool crc_irq_test_func(void *state_) 161 { 162 struct crc_irq_test_state *state = state_; 163 const struct crc_variant *v = state->v; 164 u32 i = (u32)atomic_inc_return(&state->seqno) % IRQ_TEST_NUM_BUFFERS; 165 u64 actual_crc = v->func(state->initial_crc, 166 &test_buffer[i * IRQ_TEST_DATA_LEN], 167 IRQ_TEST_DATA_LEN); 168 169 return actual_crc == state->expected_crcs[i]; 170 } 171 172 /* 173 * Test that if CRCs are computed in task, softirq, and hardirq context 174 * concurrently, then all results are as expected. 175 */ 176 static void crc_interrupt_context_test(struct kunit *test, 177 const struct crc_variant *v) 178 { 179 struct crc_irq_test_state state = { 180 .v = v, 181 .initial_crc = generate_random_initial_crc(v), 182 }; 183 184 for (int i = 0; i < IRQ_TEST_NUM_BUFFERS; i++) { 185 state.expected_crcs[i] = crc_ref( 186 v, state.initial_crc, 187 &test_buffer[i * IRQ_TEST_DATA_LEN], IRQ_TEST_DATA_LEN); 188 } 189 190 kunit_run_irq_test(test, crc_irq_test_func, 100000, &state); 191 } 192 193 /* Test that v->func gives the same CRCs as a reference implementation. */ 194 static void crc_test(struct kunit *test, const struct crc_variant *v) 195 { 196 size_t i; 197 198 for (i = 0; i < CRC_KUNIT_NUM_TEST_ITERS; i++) { 199 u64 init_crc, expected_crc, actual_crc; 200 size_t len, offset; 201 202 init_crc = generate_random_initial_crc(v); 203 len = generate_random_length(CRC_KUNIT_MAX_LEN); 204 205 /* Generate a random offset. */ 206 if (rand32() % 2 == 0) { 207 /* Use a random alignment mod 64 */ 208 offset = rand32() % 64; 209 offset = min(offset, CRC_KUNIT_MAX_LEN - len); 210 } else { 211 /* Go up to the guard page, to catch buffer overreads */ 212 offset = test_buflen - len; 213 } 214 215 if (rand32() % 8 == 0) 216 /* Refresh the data occasionally. */ 217 prandom_bytes_state(&rng, &test_buffer[offset], len); 218 219 /* 220 * Compute the CRC, and verify that it equals the CRC computed 221 * by a simple bit-at-a-time reference implementation. 222 */ 223 expected_crc = crc_ref(v, init_crc, &test_buffer[offset], len); 224 actual_crc = v->func(init_crc, &test_buffer[offset], len); 225 KUNIT_EXPECT_EQ_MSG(test, expected_crc, actual_crc, 226 "Wrong result with len=%zu offset=%zu", 227 len, offset); 228 } 229 230 crc_interrupt_context_test(test, v); 231 } 232 233 static __always_inline void 234 crc_benchmark(struct kunit *test, 235 u64 (*crc_func)(u64 crc, const u8 *p, size_t len)) 236 { 237 static const size_t lens_to_test[] = { 238 1, 16, 64, 127, 128, 200, 256, 511, 512, 1024, 3173, 4096, 16384, 239 }; 240 size_t len, i, j, num_iters; 241 /* 242 * The CRC value that this function computes in a series of calls to 243 * crc_func is never actually used, so use volatile to ensure that the 244 * computations are done as intended and don't all get optimized out. 245 */ 246 volatile u64 crc = 0; 247 u64 t; 248 249 if (!IS_ENABLED(CONFIG_CRC_BENCHMARK)) 250 kunit_skip(test, "not enabled"); 251 252 /* warm-up */ 253 for (i = 0; i < 10000000; i += CRC_KUNIT_MAX_LEN) 254 crc = crc_func(crc, test_buffer, CRC_KUNIT_MAX_LEN); 255 256 for (i = 0; i < ARRAY_SIZE(lens_to_test); i++) { 257 len = lens_to_test[i]; 258 KUNIT_ASSERT_LE(test, len, CRC_KUNIT_MAX_LEN); 259 num_iters = 10000000 / (len + 128); 260 preempt_disable(); 261 t = ktime_get_ns(); 262 for (j = 0; j < num_iters; j++) 263 crc = crc_func(crc, test_buffer, len); 264 t = ktime_get_ns() - t; 265 preempt_enable(); 266 kunit_info(test, "len=%zu: %llu MB/s\n", 267 len, div64_u64((u64)len * num_iters * 1000, t)); 268 } 269 } 270 271 #if IS_REACHABLE(CONFIG_CRC7) 272 static u64 crc7_be_wrapper(u64 crc, const u8 *p, size_t len) 273 { 274 /* 275 * crc7_be() left-aligns the 7-bit CRC in a u8, whereas the test wants a 276 * right-aligned CRC (in a u64). Convert between the conventions. 277 */ 278 return crc7_be(crc << 1, p, len) >> 1; 279 } 280 281 static const struct crc_variant crc_variant_crc7_be = { 282 .bits = 7, 283 .poly = 0x9, 284 .func = crc7_be_wrapper, 285 }; 286 287 static void crc7_be_test(struct kunit *test) 288 { 289 crc_test(test, &crc_variant_crc7_be); 290 } 291 292 static void crc7_be_benchmark(struct kunit *test) 293 { 294 crc_benchmark(test, crc7_be_wrapper); 295 } 296 #endif /* CONFIG_CRC7 */ 297 298 #if IS_REACHABLE(CONFIG_CRC16) 299 static u64 crc16_wrapper(u64 crc, const u8 *p, size_t len) 300 { 301 return crc16(crc, p, len); 302 } 303 304 static const struct crc_variant crc_variant_crc16 = { 305 .bits = 16, 306 .le = true, 307 .poly = 0xa001, 308 .func = crc16_wrapper, 309 }; 310 311 static void crc16_test(struct kunit *test) 312 { 313 crc_test(test, &crc_variant_crc16); 314 } 315 316 static void crc16_benchmark(struct kunit *test) 317 { 318 crc_benchmark(test, crc16_wrapper); 319 } 320 #endif /* CONFIG_CRC16 */ 321 322 #if IS_REACHABLE(CONFIG_CRC_T10DIF) 323 static u64 crc_t10dif_wrapper(u64 crc, const u8 *p, size_t len) 324 { 325 return crc_t10dif_update(crc, p, len); 326 } 327 328 static const struct crc_variant crc_variant_crc_t10dif = { 329 .bits = 16, 330 .le = false, 331 .poly = 0x8bb7, 332 .func = crc_t10dif_wrapper, 333 }; 334 335 static void crc_t10dif_test(struct kunit *test) 336 { 337 crc_test(test, &crc_variant_crc_t10dif); 338 } 339 340 static void crc_t10dif_benchmark(struct kunit *test) 341 { 342 crc_benchmark(test, crc_t10dif_wrapper); 343 } 344 #endif /* CONFIG_CRC_T10DIF */ 345 346 #if IS_REACHABLE(CONFIG_CRC32) 347 348 /* crc32_le */ 349 350 static u64 crc32_le_wrapper(u64 crc, const u8 *p, size_t len) 351 { 352 return crc32_le(crc, p, len); 353 } 354 355 static const struct crc_variant crc_variant_crc32_le = { 356 .bits = 32, 357 .le = true, 358 .poly = 0xedb88320, 359 .func = crc32_le_wrapper, 360 }; 361 362 static void crc32_le_test(struct kunit *test) 363 { 364 crc_test(test, &crc_variant_crc32_le); 365 } 366 367 static void crc32_le_benchmark(struct kunit *test) 368 { 369 crc_benchmark(test, crc32_le_wrapper); 370 } 371 372 /* crc32_be */ 373 374 static u64 crc32_be_wrapper(u64 crc, const u8 *p, size_t len) 375 { 376 return crc32_be(crc, p, len); 377 } 378 379 static const struct crc_variant crc_variant_crc32_be = { 380 .bits = 32, 381 .le = false, 382 .poly = 0x04c11db7, 383 .func = crc32_be_wrapper, 384 }; 385 386 static void crc32_be_test(struct kunit *test) 387 { 388 crc_test(test, &crc_variant_crc32_be); 389 } 390 391 static void crc32_be_benchmark(struct kunit *test) 392 { 393 crc_benchmark(test, crc32_be_wrapper); 394 } 395 396 /* crc32c */ 397 398 static u64 crc32c_wrapper(u64 crc, const u8 *p, size_t len) 399 { 400 return crc32c(crc, p, len); 401 } 402 403 static const struct crc_variant crc_variant_crc32c = { 404 .bits = 32, 405 .le = true, 406 .poly = 0x82f63b78, 407 .func = crc32c_wrapper, 408 }; 409 410 static void crc32c_test(struct kunit *test) 411 { 412 crc_test(test, &crc_variant_crc32c); 413 } 414 415 static void crc32c_benchmark(struct kunit *test) 416 { 417 crc_benchmark(test, crc32c_wrapper); 418 } 419 #endif /* CONFIG_CRC32 */ 420 421 #if IS_REACHABLE(CONFIG_CRC64) 422 423 /* crc64_be */ 424 425 static u64 crc64_be_wrapper(u64 crc, const u8 *p, size_t len) 426 { 427 return crc64_be(crc, p, len); 428 } 429 430 static const struct crc_variant crc_variant_crc64_be = { 431 .bits = 64, 432 .le = false, 433 .poly = 0x42f0e1eba9ea3693, 434 .func = crc64_be_wrapper, 435 }; 436 437 static void crc64_be_test(struct kunit *test) 438 { 439 crc_test(test, &crc_variant_crc64_be); 440 } 441 442 static void crc64_be_benchmark(struct kunit *test) 443 { 444 crc_benchmark(test, crc64_be_wrapper); 445 } 446 447 /* crc64_nvme */ 448 449 static u64 crc64_nvme_wrapper(u64 crc, const u8 *p, size_t len) 450 { 451 /* The inversions that crc64_nvme() does have to be undone here. */ 452 return ~crc64_nvme(~crc, p, len); 453 } 454 455 static const struct crc_variant crc_variant_crc64_nvme = { 456 .bits = 64, 457 .le = true, 458 .poly = 0x9a6c9329ac4bc9b5, 459 .func = crc64_nvme_wrapper, 460 }; 461 462 static void crc64_nvme_test(struct kunit *test) 463 { 464 crc_test(test, &crc_variant_crc64_nvme); 465 } 466 467 static void crc64_nvme_benchmark(struct kunit *test) 468 { 469 crc_benchmark(test, crc64_nvme_wrapper); 470 } 471 #endif /* CONFIG_CRC64 */ 472 473 static struct kunit_case crc_test_cases[] = { 474 #if IS_REACHABLE(CONFIG_CRC7) 475 KUNIT_CASE(crc7_be_test), 476 KUNIT_CASE(crc7_be_benchmark), 477 #endif 478 #if IS_REACHABLE(CONFIG_CRC16) 479 KUNIT_CASE(crc16_test), 480 KUNIT_CASE(crc16_benchmark), 481 #endif 482 #if IS_REACHABLE(CONFIG_CRC_T10DIF) 483 KUNIT_CASE(crc_t10dif_test), 484 KUNIT_CASE(crc_t10dif_benchmark), 485 #endif 486 #if IS_REACHABLE(CONFIG_CRC32) 487 KUNIT_CASE(crc32_le_test), 488 KUNIT_CASE(crc32_le_benchmark), 489 KUNIT_CASE(crc32_be_test), 490 KUNIT_CASE(crc32_be_benchmark), 491 KUNIT_CASE(crc32c_test), 492 KUNIT_CASE(crc32c_benchmark), 493 #endif 494 #if IS_REACHABLE(CONFIG_CRC64) 495 KUNIT_CASE(crc64_be_test), 496 KUNIT_CASE(crc64_be_benchmark), 497 KUNIT_CASE(crc64_nvme_test), 498 KUNIT_CASE(crc64_nvme_benchmark), 499 #endif 500 {}, 501 }; 502 503 static struct kunit_suite crc_test_suite = { 504 .name = "crc", 505 .test_cases = crc_test_cases, 506 .suite_init = crc_suite_init, 507 .suite_exit = crc_suite_exit, 508 }; 509 kunit_test_suite(crc_test_suite); 510 511 MODULE_DESCRIPTION("Unit tests and benchmarks for the CRC library functions"); 512 MODULE_LICENSE("GPL"); 513