Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
torvalds
GitHub Repository: torvalds/linux
Path: blob/master/lib/crc/tests/crc_kunit.c
29286 views
1
// SPDX-License-Identifier: GPL-2.0-or-later
2
/*
3
* Unit tests and benchmarks for the CRC library functions
4
*
5
* Copyright 2024 Google LLC
6
*
7
* Author: Eric Biggers <[email protected]>
8
*/
9
#include <kunit/run-in-irq-context.h>
10
#include <kunit/test.h>
11
#include <linux/crc7.h>
12
#include <linux/crc16.h>
13
#include <linux/crc-t10dif.h>
14
#include <linux/crc32.h>
15
#include <linux/crc32c.h>
16
#include <linux/crc64.h>
17
#include <linux/prandom.h>
18
#include <linux/vmalloc.h>
19
20
#define CRC_KUNIT_SEED 42
21
#define CRC_KUNIT_MAX_LEN 16384
22
#define CRC_KUNIT_NUM_TEST_ITERS 1000
23
24
static struct rnd_state rng;
25
static u8 *test_buffer;
26
static size_t test_buflen;
27
28
/**
29
* struct crc_variant - describes a CRC variant
30
* @bits: Number of bits in the CRC, 1 <= @bits <= 64.
31
* @le: true if it's a "little endian" CRC (reversed mapping between bits and
32
* polynomial coefficients in each byte), false if it's a "big endian" CRC
33
* (natural mapping between bits and polynomial coefficients in each byte)
34
* @poly: The generator polynomial with the highest-order term omitted.
35
* Bit-reversed if @le is true.
36
* @func: The function to compute a CRC. The type signature uses u64 so that it
37
* can fit any CRC up to CRC-64. The CRC is passed in, and is expected
38
* to be returned in, the least significant bits of the u64. The
39
* function is expected to *not* invert the CRC at the beginning and end.
40
*/
41
struct crc_variant {
42
int bits;
43
bool le;
44
u64 poly;
45
u64 (*func)(u64 crc, const u8 *p, size_t len);
46
};
47
48
static u32 rand32(void)
49
{
50
return prandom_u32_state(&rng);
51
}
52
53
static u64 rand64(void)
54
{
55
u32 n = rand32();
56
57
return ((u64)n << 32) | rand32();
58
}
59
60
static u64 crc_mask(const struct crc_variant *v)
61
{
62
return (u64)-1 >> (64 - v->bits);
63
}
64
65
/* Reference implementation of any CRC variant */
66
static u64 crc_ref(const struct crc_variant *v,
67
u64 crc, const u8 *p, size_t len)
68
{
69
size_t i, j;
70
71
for (i = 0; i < len; i++) {
72
for (j = 0; j < 8; j++) {
73
if (v->le) {
74
crc ^= (p[i] >> j) & 1;
75
crc = (crc >> 1) ^ ((crc & 1) ? v->poly : 0);
76
} else {
77
crc ^= (u64)((p[i] >> (7 - j)) & 1) <<
78
(v->bits - 1);
79
if (crc & (1ULL << (v->bits - 1)))
80
crc = ((crc << 1) ^ v->poly) &
81
crc_mask(v);
82
else
83
crc <<= 1;
84
}
85
}
86
}
87
return crc;
88
}
89
90
static int crc_suite_init(struct kunit_suite *suite)
91
{
92
/*
93
* Allocate the test buffer using vmalloc() with a page-aligned length
94
* so that it is immediately followed by a guard page. This allows
95
* buffer overreads to be detected, even in assembly code.
96
*/
97
test_buflen = round_up(CRC_KUNIT_MAX_LEN, PAGE_SIZE);
98
test_buffer = vmalloc(test_buflen);
99
if (!test_buffer)
100
return -ENOMEM;
101
102
prandom_seed_state(&rng, CRC_KUNIT_SEED);
103
prandom_bytes_state(&rng, test_buffer, test_buflen);
104
return 0;
105
}
106
107
static void crc_suite_exit(struct kunit_suite *suite)
108
{
109
vfree(test_buffer);
110
test_buffer = NULL;
111
}
112
113
/* Generate a random initial CRC. */
114
static u64 generate_random_initial_crc(const struct crc_variant *v)
115
{
116
switch (rand32() % 4) {
117
case 0:
118
return 0;
119
case 1:
120
return crc_mask(v); /* All 1 bits */
121
default:
122
return rand64() & crc_mask(v);
123
}
124
}
125
126
/* Generate a random length, preferring small lengths. */
127
static size_t generate_random_length(size_t max_length)
128
{
129
size_t len;
130
131
switch (rand32() % 3) {
132
case 0:
133
len = rand32() % 128;
134
break;
135
case 1:
136
len = rand32() % 3072;
137
break;
138
default:
139
len = rand32();
140
break;
141
}
142
return len % (max_length + 1);
143
}
144
145
#define IRQ_TEST_DATA_LEN 512
146
#define IRQ_TEST_NUM_BUFFERS 3 /* matches max concurrency level */
147
148
struct crc_irq_test_state {
149
const struct crc_variant *v;
150
u64 initial_crc;
151
u64 expected_crcs[IRQ_TEST_NUM_BUFFERS];
152
atomic_t seqno;
153
};
154
155
/*
156
* Compute the CRC of one of the test messages and verify that it matches the
157
* expected CRC from @state->expected_crcs. To increase the chance of detecting
158
* problems, cycle through multiple messages.
159
*/
160
static bool crc_irq_test_func(void *state_)
161
{
162
struct crc_irq_test_state *state = state_;
163
const struct crc_variant *v = state->v;
164
u32 i = (u32)atomic_inc_return(&state->seqno) % IRQ_TEST_NUM_BUFFERS;
165
u64 actual_crc = v->func(state->initial_crc,
166
&test_buffer[i * IRQ_TEST_DATA_LEN],
167
IRQ_TEST_DATA_LEN);
168
169
return actual_crc == state->expected_crcs[i];
170
}
171
172
/*
173
* Test that if CRCs are computed in task, softirq, and hardirq context
174
* concurrently, then all results are as expected.
175
*/
176
static void crc_interrupt_context_test(struct kunit *test,
177
const struct crc_variant *v)
178
{
179
struct crc_irq_test_state state = {
180
.v = v,
181
.initial_crc = generate_random_initial_crc(v),
182
};
183
184
for (int i = 0; i < IRQ_TEST_NUM_BUFFERS; i++) {
185
state.expected_crcs[i] = crc_ref(
186
v, state.initial_crc,
187
&test_buffer[i * IRQ_TEST_DATA_LEN], IRQ_TEST_DATA_LEN);
188
}
189
190
kunit_run_irq_test(test, crc_irq_test_func, 100000, &state);
191
}
192
193
/* Test that v->func gives the same CRCs as a reference implementation. */
194
static void crc_test(struct kunit *test, const struct crc_variant *v)
195
{
196
size_t i;
197
198
for (i = 0; i < CRC_KUNIT_NUM_TEST_ITERS; i++) {
199
u64 init_crc, expected_crc, actual_crc;
200
size_t len, offset;
201
202
init_crc = generate_random_initial_crc(v);
203
len = generate_random_length(CRC_KUNIT_MAX_LEN);
204
205
/* Generate a random offset. */
206
if (rand32() % 2 == 0) {
207
/* Use a random alignment mod 64 */
208
offset = rand32() % 64;
209
offset = min(offset, CRC_KUNIT_MAX_LEN - len);
210
} else {
211
/* Go up to the guard page, to catch buffer overreads */
212
offset = test_buflen - len;
213
}
214
215
if (rand32() % 8 == 0)
216
/* Refresh the data occasionally. */
217
prandom_bytes_state(&rng, &test_buffer[offset], len);
218
219
/*
220
* Compute the CRC, and verify that it equals the CRC computed
221
* by a simple bit-at-a-time reference implementation.
222
*/
223
expected_crc = crc_ref(v, init_crc, &test_buffer[offset], len);
224
actual_crc = v->func(init_crc, &test_buffer[offset], len);
225
KUNIT_EXPECT_EQ_MSG(test, expected_crc, actual_crc,
226
"Wrong result with len=%zu offset=%zu",
227
len, offset);
228
}
229
230
crc_interrupt_context_test(test, v);
231
}
232
233
static __always_inline void
234
crc_benchmark(struct kunit *test,
235
u64 (*crc_func)(u64 crc, const u8 *p, size_t len))
236
{
237
static const size_t lens_to_test[] = {
238
1, 16, 64, 127, 128, 200, 256, 511, 512, 1024, 3173, 4096, 16384,
239
};
240
size_t len, i, j, num_iters;
241
/*
242
* The CRC value that this function computes in a series of calls to
243
* crc_func is never actually used, so use volatile to ensure that the
244
* computations are done as intended and don't all get optimized out.
245
*/
246
volatile u64 crc = 0;
247
u64 t;
248
249
if (!IS_ENABLED(CONFIG_CRC_BENCHMARK))
250
kunit_skip(test, "not enabled");
251
252
/* warm-up */
253
for (i = 0; i < 10000000; i += CRC_KUNIT_MAX_LEN)
254
crc = crc_func(crc, test_buffer, CRC_KUNIT_MAX_LEN);
255
256
for (i = 0; i < ARRAY_SIZE(lens_to_test); i++) {
257
len = lens_to_test[i];
258
KUNIT_ASSERT_LE(test, len, CRC_KUNIT_MAX_LEN);
259
num_iters = 10000000 / (len + 128);
260
preempt_disable();
261
t = ktime_get_ns();
262
for (j = 0; j < num_iters; j++)
263
crc = crc_func(crc, test_buffer, len);
264
t = ktime_get_ns() - t;
265
preempt_enable();
266
kunit_info(test, "len=%zu: %llu MB/s\n",
267
len, div64_u64((u64)len * num_iters * 1000, t));
268
}
269
}
270
271
/* crc7_be */
272
273
static u64 crc7_be_wrapper(u64 crc, const u8 *p, size_t len)
274
{
275
/*
276
* crc7_be() left-aligns the 7-bit CRC in a u8, whereas the test wants a
277
* right-aligned CRC (in a u64). Convert between the conventions.
278
*/
279
return crc7_be(crc << 1, p, len) >> 1;
280
}
281
282
static const struct crc_variant crc_variant_crc7_be = {
283
.bits = 7,
284
.poly = 0x9,
285
.func = crc7_be_wrapper,
286
};
287
288
static void crc7_be_test(struct kunit *test)
289
{
290
crc_test(test, &crc_variant_crc7_be);
291
}
292
293
static void crc7_be_benchmark(struct kunit *test)
294
{
295
crc_benchmark(test, crc7_be_wrapper);
296
}
297
298
/* crc16 */
299
300
static u64 crc16_wrapper(u64 crc, const u8 *p, size_t len)
301
{
302
return crc16(crc, p, len);
303
}
304
305
static const struct crc_variant crc_variant_crc16 = {
306
.bits = 16,
307
.le = true,
308
.poly = 0xa001,
309
.func = crc16_wrapper,
310
};
311
312
static void crc16_test(struct kunit *test)
313
{
314
crc_test(test, &crc_variant_crc16);
315
}
316
317
static void crc16_benchmark(struct kunit *test)
318
{
319
crc_benchmark(test, crc16_wrapper);
320
}
321
322
/* crc_t10dif */
323
324
static u64 crc_t10dif_wrapper(u64 crc, const u8 *p, size_t len)
325
{
326
return crc_t10dif_update(crc, p, len);
327
}
328
329
static const struct crc_variant crc_variant_crc_t10dif = {
330
.bits = 16,
331
.le = false,
332
.poly = 0x8bb7,
333
.func = crc_t10dif_wrapper,
334
};
335
336
static void crc_t10dif_test(struct kunit *test)
337
{
338
crc_test(test, &crc_variant_crc_t10dif);
339
}
340
341
static void crc_t10dif_benchmark(struct kunit *test)
342
{
343
crc_benchmark(test, crc_t10dif_wrapper);
344
}
345
346
/* crc32_le */
347
348
static u64 crc32_le_wrapper(u64 crc, const u8 *p, size_t len)
349
{
350
return crc32_le(crc, p, len);
351
}
352
353
static const struct crc_variant crc_variant_crc32_le = {
354
.bits = 32,
355
.le = true,
356
.poly = 0xedb88320,
357
.func = crc32_le_wrapper,
358
};
359
360
static void crc32_le_test(struct kunit *test)
361
{
362
crc_test(test, &crc_variant_crc32_le);
363
}
364
365
static void crc32_le_benchmark(struct kunit *test)
366
{
367
crc_benchmark(test, crc32_le_wrapper);
368
}
369
370
/* crc32_be */
371
372
static u64 crc32_be_wrapper(u64 crc, const u8 *p, size_t len)
373
{
374
return crc32_be(crc, p, len);
375
}
376
377
static const struct crc_variant crc_variant_crc32_be = {
378
.bits = 32,
379
.le = false,
380
.poly = 0x04c11db7,
381
.func = crc32_be_wrapper,
382
};
383
384
static void crc32_be_test(struct kunit *test)
385
{
386
crc_test(test, &crc_variant_crc32_be);
387
}
388
389
static void crc32_be_benchmark(struct kunit *test)
390
{
391
crc_benchmark(test, crc32_be_wrapper);
392
}
393
394
/* crc32c */
395
396
static u64 crc32c_wrapper(u64 crc, const u8 *p, size_t len)
397
{
398
return crc32c(crc, p, len);
399
}
400
401
static const struct crc_variant crc_variant_crc32c = {
402
.bits = 32,
403
.le = true,
404
.poly = 0x82f63b78,
405
.func = crc32c_wrapper,
406
};
407
408
static void crc32c_test(struct kunit *test)
409
{
410
crc_test(test, &crc_variant_crc32c);
411
}
412
413
static void crc32c_benchmark(struct kunit *test)
414
{
415
crc_benchmark(test, crc32c_wrapper);
416
}
417
418
/* crc64_be */
419
420
static u64 crc64_be_wrapper(u64 crc, const u8 *p, size_t len)
421
{
422
return crc64_be(crc, p, len);
423
}
424
425
static const struct crc_variant crc_variant_crc64_be = {
426
.bits = 64,
427
.le = false,
428
.poly = 0x42f0e1eba9ea3693,
429
.func = crc64_be_wrapper,
430
};
431
432
static void crc64_be_test(struct kunit *test)
433
{
434
crc_test(test, &crc_variant_crc64_be);
435
}
436
437
static void crc64_be_benchmark(struct kunit *test)
438
{
439
crc_benchmark(test, crc64_be_wrapper);
440
}
441
442
/* crc64_nvme */
443
444
static u64 crc64_nvme_wrapper(u64 crc, const u8 *p, size_t len)
445
{
446
/* The inversions that crc64_nvme() does have to be undone here. */
447
return ~crc64_nvme(~crc, p, len);
448
}
449
450
static const struct crc_variant crc_variant_crc64_nvme = {
451
.bits = 64,
452
.le = true,
453
.poly = 0x9a6c9329ac4bc9b5,
454
.func = crc64_nvme_wrapper,
455
};
456
457
static void crc64_nvme_test(struct kunit *test)
458
{
459
crc_test(test, &crc_variant_crc64_nvme);
460
}
461
462
static void crc64_nvme_benchmark(struct kunit *test)
463
{
464
crc_benchmark(test, crc64_nvme_wrapper);
465
}
466
467
static struct kunit_case crc_test_cases[] = {
468
KUNIT_CASE(crc7_be_test),
469
KUNIT_CASE(crc7_be_benchmark),
470
KUNIT_CASE(crc16_test),
471
KUNIT_CASE(crc16_benchmark),
472
KUNIT_CASE(crc_t10dif_test),
473
KUNIT_CASE(crc_t10dif_benchmark),
474
KUNIT_CASE(crc32_le_test),
475
KUNIT_CASE(crc32_le_benchmark),
476
KUNIT_CASE(crc32_be_test),
477
KUNIT_CASE(crc32_be_benchmark),
478
KUNIT_CASE(crc32c_test),
479
KUNIT_CASE(crc32c_benchmark),
480
KUNIT_CASE(crc64_be_test),
481
KUNIT_CASE(crc64_be_benchmark),
482
KUNIT_CASE(crc64_nvme_test),
483
KUNIT_CASE(crc64_nvme_benchmark),
484
{},
485
};
486
487
static struct kunit_suite crc_test_suite = {
488
.name = "crc",
489
.test_cases = crc_test_cases,
490
.suite_init = crc_suite_init,
491
.suite_exit = crc_suite_exit,
492
};
493
kunit_test_suite(crc_test_suite);
494
495
MODULE_DESCRIPTION("Unit tests and benchmarks for the CRC library functions");
496
MODULE_LICENSE("GPL");
497
498