1 // SPDX-License-Identifier: GPL-2.0-only
3 * Testsuite for BPF interpreter and BPF JIT compiler
5 * Copyright (c) 2011-2014 PLUMgrid, http://plumgrid.com
8 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
10 #include <linux/init.h>
11 #include <linux/module.h>
12 #include <linux/filter.h>
13 #include <linux/bpf.h>
14 #include <linux/skbuff.h>
15 #include <linux/netdevice.h>
16 #include <linux/if_vlan.h>
17 #include <linux/random.h>
18 #include <linux/highmem.h>
19 #include <linux/sched.h>
21 /* General test specific settings */
22 #define MAX_SUBTESTS 3
23 #define MAX_TESTRUNS 1000
26 #define MAX_K 0xffffFFFF
28 /* Few constants used to init test 'skb' */
30 #define SKB_MARK 0x1234aaaa
31 #define SKB_HASH 0x1234aaab
32 #define SKB_QUEUE_MAP 123
33 #define SKB_VLAN_TCI 0xffff
34 #define SKB_VLAN_PRESENT 1
35 #define SKB_DEV_IFINDEX 577
36 #define SKB_DEV_TYPE 588
38 /* Redefine REGs to make tests less verbose */
49 #define R10 BPF_REG_10
51 /* Flags that can be passed to test cases */
52 #define FLAG_NO_DATA BIT(0)
53 #define FLAG_EXPECTED_FAIL BIT(1)
54 #define FLAG_SKB_FRAG BIT(2)
55 #define FLAG_VERIFIER_ZEXT BIT(3)
58 CLASSIC = BIT(6), /* Old BPF instructions only. */
59 INTERNAL = BIT(7), /* Extended instruction set. */
62 #define TEST_TYPE_MASK (CLASSIC | INTERNAL)
67 struct sock_filter insns[MAX_INSNS];
68 struct bpf_insn insns_int[MAX_INSNS];
80 int (*fill_helper)(struct bpf_test *self);
81 int expected_errcode; /* used when FLAG_EXPECTED_FAIL is set in the aux */
82 __u8 frag_data[MAX_DATA];
83 int stack_depth; /* for eBPF only, since tests don't call verifier */
84 int nr_testruns; /* Custom run count, defaults to MAX_TESTRUNS if 0 */
87 /* Large test cases need separate allocation and fill handler. */
89 static int bpf_fill_maxinsns1(struct bpf_test *self)
91 unsigned int len = BPF_MAXINSNS;
92 struct sock_filter *insn;
96 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
100 for (i = 0; i < len; i++, k--)
101 insn[i] = __BPF_STMT(BPF_RET | BPF_K, k);
103 self->u.ptr.insns = insn;
104 self->u.ptr.len = len;
109 static int bpf_fill_maxinsns2(struct bpf_test *self)
111 unsigned int len = BPF_MAXINSNS;
112 struct sock_filter *insn;
115 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
119 for (i = 0; i < len; i++)
120 insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
122 self->u.ptr.insns = insn;
123 self->u.ptr.len = len;
128 static int bpf_fill_maxinsns3(struct bpf_test *self)
130 unsigned int len = BPF_MAXINSNS;
131 struct sock_filter *insn;
132 struct rnd_state rnd;
135 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
139 prandom_seed_state(&rnd, 3141592653589793238ULL);
141 for (i = 0; i < len - 1; i++) {
142 __u32 k = prandom_u32_state(&rnd);
144 insn[i] = __BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, k);
147 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
149 self->u.ptr.insns = insn;
150 self->u.ptr.len = len;
155 static int bpf_fill_maxinsns4(struct bpf_test *self)
157 unsigned int len = BPF_MAXINSNS + 1;
158 struct sock_filter *insn;
161 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
165 for (i = 0; i < len; i++)
166 insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
168 self->u.ptr.insns = insn;
169 self->u.ptr.len = len;
174 static int bpf_fill_maxinsns5(struct bpf_test *self)
176 unsigned int len = BPF_MAXINSNS;
177 struct sock_filter *insn;
180 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
184 insn[0] = __BPF_JUMP(BPF_JMP | BPF_JA, len - 2, 0, 0);
186 for (i = 1; i < len - 1; i++)
187 insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
189 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xabababab);
191 self->u.ptr.insns = insn;
192 self->u.ptr.len = len;
197 static int bpf_fill_maxinsns6(struct bpf_test *self)
199 unsigned int len = BPF_MAXINSNS;
200 struct sock_filter *insn;
203 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
207 for (i = 0; i < len - 1; i++)
208 insn[i] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
209 SKF_AD_VLAN_TAG_PRESENT);
211 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
213 self->u.ptr.insns = insn;
214 self->u.ptr.len = len;
219 static int bpf_fill_maxinsns7(struct bpf_test *self)
221 unsigned int len = BPF_MAXINSNS;
222 struct sock_filter *insn;
225 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
229 for (i = 0; i < len - 4; i++)
230 insn[i] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
233 insn[len - 4] = __BPF_STMT(BPF_MISC | BPF_TAX, 0);
234 insn[len - 3] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
236 insn[len - 2] = __BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0);
237 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
239 self->u.ptr.insns = insn;
240 self->u.ptr.len = len;
245 static int bpf_fill_maxinsns8(struct bpf_test *self)
247 unsigned int len = BPF_MAXINSNS;
248 struct sock_filter *insn;
249 int i, jmp_off = len - 3;
251 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
255 insn[0] = __BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff);
257 for (i = 1; i < len - 1; i++)
258 insn[i] = __BPF_JUMP(BPF_JMP | BPF_JGT, 0xffffffff, jmp_off--, 0);
260 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
262 self->u.ptr.insns = insn;
263 self->u.ptr.len = len;
268 static int bpf_fill_maxinsns9(struct bpf_test *self)
270 unsigned int len = BPF_MAXINSNS;
271 struct bpf_insn *insn;
274 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
278 insn[0] = BPF_JMP_IMM(BPF_JA, 0, 0, len - 2);
279 insn[1] = BPF_ALU32_IMM(BPF_MOV, R0, 0xcbababab);
280 insn[2] = BPF_EXIT_INSN();
282 for (i = 3; i < len - 2; i++)
283 insn[i] = BPF_ALU32_IMM(BPF_MOV, R0, 0xfefefefe);
285 insn[len - 2] = BPF_EXIT_INSN();
286 insn[len - 1] = BPF_JMP_IMM(BPF_JA, 0, 0, -(len - 1));
288 self->u.ptr.insns = insn;
289 self->u.ptr.len = len;
294 static int bpf_fill_maxinsns10(struct bpf_test *self)
296 unsigned int len = BPF_MAXINSNS, hlen = len - 2;
297 struct bpf_insn *insn;
300 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
304 for (i = 0; i < hlen / 2; i++)
305 insn[i] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen - 2 - 2 * i);
306 for (i = hlen - 1; i > hlen / 2; i--)
307 insn[i] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen - 1 - 2 * i);
309 insn[hlen / 2] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen / 2 - 1);
310 insn[hlen] = BPF_ALU32_IMM(BPF_MOV, R0, 0xabababac);
311 insn[hlen + 1] = BPF_EXIT_INSN();
313 self->u.ptr.insns = insn;
314 self->u.ptr.len = len;
319 static int __bpf_fill_ja(struct bpf_test *self, unsigned int len,
322 struct sock_filter *insn;
326 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
330 rlen = (len % plen) - 1;
332 for (i = 0; i + plen < len; i += plen)
333 for (j = 0; j < plen; j++)
334 insn[i + j] = __BPF_JUMP(BPF_JMP | BPF_JA,
336 for (j = 0; j < rlen; j++)
337 insn[i + j] = __BPF_JUMP(BPF_JMP | BPF_JA, rlen - 1 - j,
340 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xababcbac);
342 self->u.ptr.insns = insn;
343 self->u.ptr.len = len;
348 static int bpf_fill_maxinsns11(struct bpf_test *self)
350 /* Hits 70 passes on x86_64 and triggers NOPs padding. */
351 return __bpf_fill_ja(self, BPF_MAXINSNS, 68);
354 static int bpf_fill_maxinsns12(struct bpf_test *self)
356 unsigned int len = BPF_MAXINSNS;
357 struct sock_filter *insn;
360 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
364 insn[0] = __BPF_JUMP(BPF_JMP | BPF_JA, len - 2, 0, 0);
366 for (i = 1; i < len - 1; i++)
367 insn[i] = __BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0);
369 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xabababab);
371 self->u.ptr.insns = insn;
372 self->u.ptr.len = len;
377 static int bpf_fill_maxinsns13(struct bpf_test *self)
379 unsigned int len = BPF_MAXINSNS;
380 struct sock_filter *insn;
383 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
387 for (i = 0; i < len - 3; i++)
388 insn[i] = __BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0);
390 insn[len - 3] = __BPF_STMT(BPF_LD | BPF_IMM, 0xabababab);
391 insn[len - 2] = __BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0);
392 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
394 self->u.ptr.insns = insn;
395 self->u.ptr.len = len;
400 static int bpf_fill_ja(struct bpf_test *self)
402 /* Hits exactly 11 passes on x86_64 JIT. */
403 return __bpf_fill_ja(self, 12, 9);
406 static int bpf_fill_ld_abs_get_processor_id(struct bpf_test *self)
408 unsigned int len = BPF_MAXINSNS;
409 struct sock_filter *insn;
412 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
416 for (i = 0; i < len - 1; i += 2) {
417 insn[i] = __BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 0);
418 insn[i + 1] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
419 SKF_AD_OFF + SKF_AD_CPU);
422 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xbee);
424 self->u.ptr.insns = insn;
425 self->u.ptr.len = len;
430 static int __bpf_fill_stxdw(struct bpf_test *self, int size)
432 unsigned int len = BPF_MAXINSNS;
433 struct bpf_insn *insn;
436 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
440 insn[0] = BPF_ALU32_IMM(BPF_MOV, R0, 1);
441 insn[1] = BPF_ST_MEM(size, R10, -40, 42);
443 for (i = 2; i < len - 2; i++)
444 insn[i] = BPF_STX_XADD(size, R10, R0, -40);
446 insn[len - 2] = BPF_LDX_MEM(size, R0, R10, -40);
447 insn[len - 1] = BPF_EXIT_INSN();
449 self->u.ptr.insns = insn;
450 self->u.ptr.len = len;
451 self->stack_depth = 40;
456 static int bpf_fill_stxw(struct bpf_test *self)
458 return __bpf_fill_stxdw(self, BPF_W);
461 static int bpf_fill_stxdw(struct bpf_test *self)
463 return __bpf_fill_stxdw(self, BPF_DW);
466 static int __bpf_ld_imm64(struct bpf_insn insns[2], u8 reg, s64 imm64)
468 struct bpf_insn tmp[] = {BPF_LD_IMM64(reg, imm64)};
470 memcpy(insns, tmp, sizeof(tmp));
475 * Branch conversion tests. Complex operations can expand to a lot
476 * of instructions when JITed. This in turn may cause jump offsets
477 * to overflow the field size of the native instruction, triggering
478 * a branch conversion mechanism in some JITs.
480 static int __bpf_fill_max_jmp(struct bpf_test *self, int jmp, int imm)
482 struct bpf_insn *insns;
483 int len = S16_MAX + 5;
486 insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
490 i = __bpf_ld_imm64(insns, R1, 0x0123456789abcdefULL);
491 insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
492 insns[i++] = BPF_JMP_IMM(jmp, R0, imm, S16_MAX);
493 insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 2);
494 insns[i++] = BPF_EXIT_INSN();
496 while (i < len - 1) {
497 static const int ops[] = {
498 BPF_LSH, BPF_RSH, BPF_ARSH, BPF_ADD,
499 BPF_SUB, BPF_MUL, BPF_DIV, BPF_MOD,
501 int op = ops[(i >> 1) % ARRAY_SIZE(ops)];
504 insns[i++] = BPF_ALU32_REG(op, R0, R1);
506 insns[i++] = BPF_ALU64_REG(op, R0, R1);
509 insns[i++] = BPF_EXIT_INSN();
510 self->u.ptr.insns = insns;
511 self->u.ptr.len = len;
517 /* Branch taken by runtime decision */
518 static int bpf_fill_max_jmp_taken(struct bpf_test *self)
520 return __bpf_fill_max_jmp(self, BPF_JEQ, 1);
523 /* Branch not taken by runtime decision */
524 static int bpf_fill_max_jmp_not_taken(struct bpf_test *self)
526 return __bpf_fill_max_jmp(self, BPF_JEQ, 0);
529 /* Branch always taken, known at JIT time */
530 static int bpf_fill_max_jmp_always_taken(struct bpf_test *self)
532 return __bpf_fill_max_jmp(self, BPF_JGE, 0);
535 /* Branch never taken, known at JIT time */
536 static int bpf_fill_max_jmp_never_taken(struct bpf_test *self)
538 return __bpf_fill_max_jmp(self, BPF_JLT, 0);
541 /* ALU result computation used in tests */
542 static bool __bpf_alu_result(u64 *res, u64 v1, u64 v2, u8 op)
566 if (v2 > 0 && v1 > S64_MAX)
567 *res |= ~0ULL << (64 - v2);
581 *res = div64_u64(v1, v2);
586 div64_u64_rem(v1, v2, res);
592 /* Test an ALU shift operation for all valid shift values */
593 static int __bpf_fill_alu_shift(struct bpf_test *self, u8 op,
596 static const s64 regs[] = {
597 0x0123456789abcdefLL, /* dword > 0, word < 0 */
598 0xfedcba9876543210LL, /* dowrd < 0, word > 0 */
599 0xfedcba0198765432LL, /* dowrd < 0, word < 0 */
600 0x0123458967abcdefLL, /* dword > 0, word > 0 */
602 int bits = alu32 ? 32 : 64;
603 int len = (2 + 7 * bits) * ARRAY_SIZE(regs) + 3;
604 struct bpf_insn *insn;
608 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
612 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
614 for (k = 0; k < ARRAY_SIZE(regs); k++) {
617 i += __bpf_ld_imm64(&insn[i], R3, reg);
619 for (imm = 0; imm < bits; imm++) {
622 /* Perform operation */
623 insn[i++] = BPF_ALU64_REG(BPF_MOV, R1, R3);
624 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R2, imm);
627 insn[i++] = BPF_ALU32_IMM(op, R1, imm);
629 insn[i++] = BPF_ALU32_REG(op, R1, R2);
635 __bpf_alu_result(&val, reg, imm, op);
639 insn[i++] = BPF_ALU64_IMM(op, R1, imm);
641 insn[i++] = BPF_ALU64_REG(op, R1, R2);
642 __bpf_alu_result(&val, reg, imm, op);
646 * When debugging a JIT that fails this test, one
647 * can write the immediate value to R0 here to find
648 * out which operand values that fail.
651 /* Load reference and check the result */
652 i += __bpf_ld_imm64(&insn[i], R4, val);
653 insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R4, 1);
654 insn[i++] = BPF_EXIT_INSN();
658 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
659 insn[i++] = BPF_EXIT_INSN();
661 self->u.ptr.insns = insn;
662 self->u.ptr.len = len;
668 static int bpf_fill_alu64_lsh_imm(struct bpf_test *self)
670 return __bpf_fill_alu_shift(self, BPF_LSH, BPF_K, false);
673 static int bpf_fill_alu64_rsh_imm(struct bpf_test *self)
675 return __bpf_fill_alu_shift(self, BPF_RSH, BPF_K, false);
678 static int bpf_fill_alu64_arsh_imm(struct bpf_test *self)
680 return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_K, false);
683 static int bpf_fill_alu64_lsh_reg(struct bpf_test *self)
685 return __bpf_fill_alu_shift(self, BPF_LSH, BPF_X, false);
688 static int bpf_fill_alu64_rsh_reg(struct bpf_test *self)
690 return __bpf_fill_alu_shift(self, BPF_RSH, BPF_X, false);
693 static int bpf_fill_alu64_arsh_reg(struct bpf_test *self)
695 return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_X, false);
698 static int bpf_fill_alu32_lsh_imm(struct bpf_test *self)
700 return __bpf_fill_alu_shift(self, BPF_LSH, BPF_K, true);
703 static int bpf_fill_alu32_rsh_imm(struct bpf_test *self)
705 return __bpf_fill_alu_shift(self, BPF_RSH, BPF_K, true);
708 static int bpf_fill_alu32_arsh_imm(struct bpf_test *self)
710 return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_K, true);
713 static int bpf_fill_alu32_lsh_reg(struct bpf_test *self)
715 return __bpf_fill_alu_shift(self, BPF_LSH, BPF_X, true);
718 static int bpf_fill_alu32_rsh_reg(struct bpf_test *self)
720 return __bpf_fill_alu_shift(self, BPF_RSH, BPF_X, true);
723 static int bpf_fill_alu32_arsh_reg(struct bpf_test *self)
725 return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_X, true);
729 * Test an ALU register shift operation for all valid shift values
730 * for the case when the source and destination are the same.
732 static int __bpf_fill_alu_shift_same_reg(struct bpf_test *self, u8 op,
735 int bits = alu32 ? 32 : 64;
736 int len = 3 + 6 * bits;
737 struct bpf_insn *insn;
741 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
745 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
747 for (val = 0; val < bits; val++) {
750 /* Perform operation */
751 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R1, val);
753 insn[i++] = BPF_ALU32_REG(op, R1, R1);
755 insn[i++] = BPF_ALU64_REG(op, R1, R1);
757 /* Compute the reference result */
758 __bpf_alu_result(&res, val, val, op);
761 i += __bpf_ld_imm64(&insn[i], R2, res);
763 /* Check the actual result */
764 insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R2, 1);
765 insn[i++] = BPF_EXIT_INSN();
768 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
769 insn[i++] = BPF_EXIT_INSN();
771 self->u.ptr.insns = insn;
772 self->u.ptr.len = len;
778 static int bpf_fill_alu64_lsh_same_reg(struct bpf_test *self)
780 return __bpf_fill_alu_shift_same_reg(self, BPF_LSH, false);
783 static int bpf_fill_alu64_rsh_same_reg(struct bpf_test *self)
785 return __bpf_fill_alu_shift_same_reg(self, BPF_RSH, false);
788 static int bpf_fill_alu64_arsh_same_reg(struct bpf_test *self)
790 return __bpf_fill_alu_shift_same_reg(self, BPF_ARSH, false);
793 static int bpf_fill_alu32_lsh_same_reg(struct bpf_test *self)
795 return __bpf_fill_alu_shift_same_reg(self, BPF_LSH, true);
798 static int bpf_fill_alu32_rsh_same_reg(struct bpf_test *self)
800 return __bpf_fill_alu_shift_same_reg(self, BPF_RSH, true);
803 static int bpf_fill_alu32_arsh_same_reg(struct bpf_test *self)
805 return __bpf_fill_alu_shift_same_reg(self, BPF_ARSH, true);
809 * Common operand pattern generator for exhaustive power-of-two magnitudes
810 * tests. The block size parameters can be adjusted to increase/reduce the
811 * number of combinatons tested and thereby execution speed and memory
815 static inline s64 value(int msb, int delta, int sign)
817 return sign * (1LL << msb) + delta;
820 static int __bpf_fill_pattern(struct bpf_test *self, void *arg,
821 int dbits, int sbits, int block1, int block2,
822 int (*emit)(struct bpf_test*, void*,
823 struct bpf_insn*, s64, s64))
825 static const int sgn[][2] = {{1, 1}, {1, -1}, {-1, 1}, {-1, -1}};
826 struct bpf_insn *insns;
827 int di, si, bt, db, sb;
832 /* Total number of iterations for the two pattern */
833 count = (dbits - 1) * (sbits - 1) * block1 * block1 * ARRAY_SIZE(sgn);
834 count += (max(dbits, sbits) - 1) * block2 * block2 * ARRAY_SIZE(sgn);
836 /* Compute the maximum number of insns and allocate the buffer */
837 len = extra + count * (*emit)(self, arg, NULL, 0, 0);
838 insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
842 /* Add head instruction(s) */
843 insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
846 * Pattern 1: all combinations of power-of-two magnitudes and sign,
847 * and with a block of contiguous values around each magnitude.
849 for (di = 0; di < dbits - 1; di++) /* Dst magnitudes */
850 for (si = 0; si < sbits - 1; si++) /* Src magnitudes */
851 for (k = 0; k < ARRAY_SIZE(sgn); k++) /* Sign combos */
852 for (db = -(block1 / 2);
853 db < (block1 + 1) / 2; db++)
854 for (sb = -(block1 / 2);
855 sb < (block1 + 1) / 2; sb++) {
858 dst = value(di, db, sgn[k][0]);
859 src = value(si, sb, sgn[k][1]);
860 i += (*emit)(self, arg,
865 * Pattern 2: all combinations for a larger block of values
866 * for each power-of-two magnitude and sign, where the magnitude is
867 * the same for both operands.
869 for (bt = 0; bt < max(dbits, sbits) - 1; bt++) /* Magnitude */
870 for (k = 0; k < ARRAY_SIZE(sgn); k++) /* Sign combos */
871 for (db = -(block2 / 2); db < (block2 + 1) / 2; db++)
872 for (sb = -(block2 / 2);
873 sb < (block2 + 1) / 2; sb++) {
876 dst = value(bt % dbits, db, sgn[k][0]);
877 src = value(bt % sbits, sb, sgn[k][1]);
878 i += (*emit)(self, arg, &insns[i],
882 /* Append tail instructions */
883 insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
884 insns[i++] = BPF_EXIT_INSN();
887 self->u.ptr.insns = insns;
894 * Block size parameters used in pattern tests below. une as needed to
895 * increase/reduce the number combinations tested, see following examples.
896 * block values per operand MSB
897 * ----------------------------------------
900 * 2 (1 << MSB) + [-1, 0]
901 * 3 (1 << MSB) + [-1, 0, 1]
903 #define PATTERN_BLOCK1 1
904 #define PATTERN_BLOCK2 5
906 /* Number of test runs for a pattern test */
907 #define NR_PATTERN_RUNS 1
910 * Exhaustive tests of ALU operations for all combinations of power-of-two
911 * magnitudes of the operands, both for positive and negative values. The
912 * test is designed to verify e.g. the ALU and ALU64 operations for JITs that
913 * emit different code depending on the magnitude of the immediate value.
915 static int __bpf_emit_alu64_imm(struct bpf_test *self, void *arg,
916 struct bpf_insn *insns, s64 dst, s64 imm)
918 int op = *(int *)arg;
925 if (__bpf_alu_result(&res, dst, (s32)imm, op)) {
926 i += __bpf_ld_imm64(&insns[i], R1, dst);
927 i += __bpf_ld_imm64(&insns[i], R3, res);
928 insns[i++] = BPF_ALU64_IMM(op, R1, imm);
929 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
930 insns[i++] = BPF_EXIT_INSN();
936 static int __bpf_emit_alu32_imm(struct bpf_test *self, void *arg,
937 struct bpf_insn *insns, s64 dst, s64 imm)
939 int op = *(int *)arg;
946 if (__bpf_alu_result(&res, (u32)dst, (u32)imm, op)) {
947 i += __bpf_ld_imm64(&insns[i], R1, dst);
948 i += __bpf_ld_imm64(&insns[i], R3, (u32)res);
949 insns[i++] = BPF_ALU32_IMM(op, R1, imm);
950 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
951 insns[i++] = BPF_EXIT_INSN();
957 static int __bpf_emit_alu64_reg(struct bpf_test *self, void *arg,
958 struct bpf_insn *insns, s64 dst, s64 src)
960 int op = *(int *)arg;
967 if (__bpf_alu_result(&res, dst, src, op)) {
968 i += __bpf_ld_imm64(&insns[i], R1, dst);
969 i += __bpf_ld_imm64(&insns[i], R2, src);
970 i += __bpf_ld_imm64(&insns[i], R3, res);
971 insns[i++] = BPF_ALU64_REG(op, R1, R2);
972 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
973 insns[i++] = BPF_EXIT_INSN();
979 static int __bpf_emit_alu32_reg(struct bpf_test *self, void *arg,
980 struct bpf_insn *insns, s64 dst, s64 src)
982 int op = *(int *)arg;
989 if (__bpf_alu_result(&res, (u32)dst, (u32)src, op)) {
990 i += __bpf_ld_imm64(&insns[i], R1, dst);
991 i += __bpf_ld_imm64(&insns[i], R2, src);
992 i += __bpf_ld_imm64(&insns[i], R3, (u32)res);
993 insns[i++] = BPF_ALU32_REG(op, R1, R2);
994 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
995 insns[i++] = BPF_EXIT_INSN();
1001 static int __bpf_fill_alu64_imm(struct bpf_test *self, int op)
1003 return __bpf_fill_pattern(self, &op, 64, 32,
1004 PATTERN_BLOCK1, PATTERN_BLOCK2,
1005 &__bpf_emit_alu64_imm);
1008 static int __bpf_fill_alu32_imm(struct bpf_test *self, int op)
1010 return __bpf_fill_pattern(self, &op, 64, 32,
1011 PATTERN_BLOCK1, PATTERN_BLOCK2,
1012 &__bpf_emit_alu32_imm);
1015 static int __bpf_fill_alu64_reg(struct bpf_test *self, int op)
1017 return __bpf_fill_pattern(self, &op, 64, 64,
1018 PATTERN_BLOCK1, PATTERN_BLOCK2,
1019 &__bpf_emit_alu64_reg);
1022 static int __bpf_fill_alu32_reg(struct bpf_test *self, int op)
1024 return __bpf_fill_pattern(self, &op, 64, 64,
1025 PATTERN_BLOCK1, PATTERN_BLOCK2,
1026 &__bpf_emit_alu32_reg);
1029 /* ALU64 immediate operations */
1030 static int bpf_fill_alu64_mov_imm(struct bpf_test *self)
1032 return __bpf_fill_alu64_imm(self, BPF_MOV);
1035 static int bpf_fill_alu64_and_imm(struct bpf_test *self)
1037 return __bpf_fill_alu64_imm(self, BPF_AND);
1040 static int bpf_fill_alu64_or_imm(struct bpf_test *self)
1042 return __bpf_fill_alu64_imm(self, BPF_OR);
1045 static int bpf_fill_alu64_xor_imm(struct bpf_test *self)
1047 return __bpf_fill_alu64_imm(self, BPF_XOR);
1050 static int bpf_fill_alu64_add_imm(struct bpf_test *self)
1052 return __bpf_fill_alu64_imm(self, BPF_ADD);
1055 static int bpf_fill_alu64_sub_imm(struct bpf_test *self)
1057 return __bpf_fill_alu64_imm(self, BPF_SUB);
1060 static int bpf_fill_alu64_mul_imm(struct bpf_test *self)
1062 return __bpf_fill_alu64_imm(self, BPF_MUL);
1065 static int bpf_fill_alu64_div_imm(struct bpf_test *self)
1067 return __bpf_fill_alu64_imm(self, BPF_DIV);
1070 static int bpf_fill_alu64_mod_imm(struct bpf_test *self)
1072 return __bpf_fill_alu64_imm(self, BPF_MOD);
1075 /* ALU32 immediate operations */
1076 static int bpf_fill_alu32_mov_imm(struct bpf_test *self)
1078 return __bpf_fill_alu32_imm(self, BPF_MOV);
1081 static int bpf_fill_alu32_and_imm(struct bpf_test *self)
1083 return __bpf_fill_alu32_imm(self, BPF_AND);
1086 static int bpf_fill_alu32_or_imm(struct bpf_test *self)
1088 return __bpf_fill_alu32_imm(self, BPF_OR);
1091 static int bpf_fill_alu32_xor_imm(struct bpf_test *self)
1093 return __bpf_fill_alu32_imm(self, BPF_XOR);
1096 static int bpf_fill_alu32_add_imm(struct bpf_test *self)
1098 return __bpf_fill_alu32_imm(self, BPF_ADD);
1101 static int bpf_fill_alu32_sub_imm(struct bpf_test *self)
1103 return __bpf_fill_alu32_imm(self, BPF_SUB);
1106 static int bpf_fill_alu32_mul_imm(struct bpf_test *self)
1108 return __bpf_fill_alu32_imm(self, BPF_MUL);
1111 static int bpf_fill_alu32_div_imm(struct bpf_test *self)
1113 return __bpf_fill_alu32_imm(self, BPF_DIV);
1116 static int bpf_fill_alu32_mod_imm(struct bpf_test *self)
1118 return __bpf_fill_alu32_imm(self, BPF_MOD);
1121 /* ALU64 register operations */
1122 static int bpf_fill_alu64_mov_reg(struct bpf_test *self)
1124 return __bpf_fill_alu64_reg(self, BPF_MOV);
1127 static int bpf_fill_alu64_and_reg(struct bpf_test *self)
1129 return __bpf_fill_alu64_reg(self, BPF_AND);
1132 static int bpf_fill_alu64_or_reg(struct bpf_test *self)
1134 return __bpf_fill_alu64_reg(self, BPF_OR);
1137 static int bpf_fill_alu64_xor_reg(struct bpf_test *self)
1139 return __bpf_fill_alu64_reg(self, BPF_XOR);
1142 static int bpf_fill_alu64_add_reg(struct bpf_test *self)
1144 return __bpf_fill_alu64_reg(self, BPF_ADD);
1147 static int bpf_fill_alu64_sub_reg(struct bpf_test *self)
1149 return __bpf_fill_alu64_reg(self, BPF_SUB);
1152 static int bpf_fill_alu64_mul_reg(struct bpf_test *self)
1154 return __bpf_fill_alu64_reg(self, BPF_MUL);
1157 static int bpf_fill_alu64_div_reg(struct bpf_test *self)
1159 return __bpf_fill_alu64_reg(self, BPF_DIV);
1162 static int bpf_fill_alu64_mod_reg(struct bpf_test *self)
1164 return __bpf_fill_alu64_reg(self, BPF_MOD);
1167 /* ALU32 register operations */
1168 static int bpf_fill_alu32_mov_reg(struct bpf_test *self)
1170 return __bpf_fill_alu32_reg(self, BPF_MOV);
1173 static int bpf_fill_alu32_and_reg(struct bpf_test *self)
1175 return __bpf_fill_alu32_reg(self, BPF_AND);
1178 static int bpf_fill_alu32_or_reg(struct bpf_test *self)
1180 return __bpf_fill_alu32_reg(self, BPF_OR);
1183 static int bpf_fill_alu32_xor_reg(struct bpf_test *self)
1185 return __bpf_fill_alu32_reg(self, BPF_XOR);
1188 static int bpf_fill_alu32_add_reg(struct bpf_test *self)
1190 return __bpf_fill_alu32_reg(self, BPF_ADD);
1193 static int bpf_fill_alu32_sub_reg(struct bpf_test *self)
1195 return __bpf_fill_alu32_reg(self, BPF_SUB);
1198 static int bpf_fill_alu32_mul_reg(struct bpf_test *self)
1200 return __bpf_fill_alu32_reg(self, BPF_MUL);
1203 static int bpf_fill_alu32_div_reg(struct bpf_test *self)
1205 return __bpf_fill_alu32_reg(self, BPF_DIV);
1208 static int bpf_fill_alu32_mod_reg(struct bpf_test *self)
1210 return __bpf_fill_alu32_reg(self, BPF_MOD);
1214 * Test JITs that implement complex ALU operations as function
1215 * calls, and must re-arrange operands for argument passing.
1217 static int __bpf_fill_alu_imm_regs(struct bpf_test *self, u8 op, bool alu32)
1219 int len = 2 + 10 * 10;
1220 struct bpf_insn *insns;
1226 insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
1230 /* Operand and result values according to operation */
1234 dst = 0x7edcba9876543210ULL;
1237 if (op == BPF_LSH || op == BPF_RSH || op == BPF_ARSH)
1240 __bpf_alu_result(&res, dst, imm, op);
1245 /* Check all operand registers */
1246 for (rd = R0; rd <= R9; rd++) {
1247 i += __bpf_ld_imm64(&insns[i], rd, dst);
1250 insns[i++] = BPF_ALU32_IMM(op, rd, imm);
1252 insns[i++] = BPF_ALU64_IMM(op, rd, imm);
1254 insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, res, 2);
1255 insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1256 insns[i++] = BPF_EXIT_INSN();
1258 insns[i++] = BPF_ALU64_IMM(BPF_RSH, rd, 32);
1259 insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, res >> 32, 2);
1260 insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1261 insns[i++] = BPF_EXIT_INSN();
1264 insns[i++] = BPF_MOV64_IMM(R0, 1);
1265 insns[i++] = BPF_EXIT_INSN();
1267 self->u.ptr.insns = insns;
1268 self->u.ptr.len = len;
1274 /* ALU64 K registers */
1275 static int bpf_fill_alu64_mov_imm_regs(struct bpf_test *self)
1277 return __bpf_fill_alu_imm_regs(self, BPF_MOV, false);
1280 static int bpf_fill_alu64_and_imm_regs(struct bpf_test *self)
1282 return __bpf_fill_alu_imm_regs(self, BPF_AND, false);
1285 static int bpf_fill_alu64_or_imm_regs(struct bpf_test *self)
1287 return __bpf_fill_alu_imm_regs(self, BPF_OR, false);
1290 static int bpf_fill_alu64_xor_imm_regs(struct bpf_test *self)
1292 return __bpf_fill_alu_imm_regs(self, BPF_XOR, false);
1295 static int bpf_fill_alu64_lsh_imm_regs(struct bpf_test *self)
1297 return __bpf_fill_alu_imm_regs(self, BPF_LSH, false);
1300 static int bpf_fill_alu64_rsh_imm_regs(struct bpf_test *self)
1302 return __bpf_fill_alu_imm_regs(self, BPF_RSH, false);
1305 static int bpf_fill_alu64_arsh_imm_regs(struct bpf_test *self)
1307 return __bpf_fill_alu_imm_regs(self, BPF_ARSH, false);
1310 static int bpf_fill_alu64_add_imm_regs(struct bpf_test *self)
1312 return __bpf_fill_alu_imm_regs(self, BPF_ADD, false);
1315 static int bpf_fill_alu64_sub_imm_regs(struct bpf_test *self)
1317 return __bpf_fill_alu_imm_regs(self, BPF_SUB, false);
1320 static int bpf_fill_alu64_mul_imm_regs(struct bpf_test *self)
1322 return __bpf_fill_alu_imm_regs(self, BPF_MUL, false);
1325 static int bpf_fill_alu64_div_imm_regs(struct bpf_test *self)
1327 return __bpf_fill_alu_imm_regs(self, BPF_DIV, false);
1330 static int bpf_fill_alu64_mod_imm_regs(struct bpf_test *self)
1332 return __bpf_fill_alu_imm_regs(self, BPF_MOD, false);
1335 /* ALU32 K registers */
1336 static int bpf_fill_alu32_mov_imm_regs(struct bpf_test *self)
1338 return __bpf_fill_alu_imm_regs(self, BPF_MOV, true);
1341 static int bpf_fill_alu32_and_imm_regs(struct bpf_test *self)
1343 return __bpf_fill_alu_imm_regs(self, BPF_AND, true);
1346 static int bpf_fill_alu32_or_imm_regs(struct bpf_test *self)
1348 return __bpf_fill_alu_imm_regs(self, BPF_OR, true);
1351 static int bpf_fill_alu32_xor_imm_regs(struct bpf_test *self)
1353 return __bpf_fill_alu_imm_regs(self, BPF_XOR, true);
1356 static int bpf_fill_alu32_lsh_imm_regs(struct bpf_test *self)
1358 return __bpf_fill_alu_imm_regs(self, BPF_LSH, true);
1361 static int bpf_fill_alu32_rsh_imm_regs(struct bpf_test *self)
1363 return __bpf_fill_alu_imm_regs(self, BPF_RSH, true);
1366 static int bpf_fill_alu32_arsh_imm_regs(struct bpf_test *self)
1368 return __bpf_fill_alu_imm_regs(self, BPF_ARSH, true);
1371 static int bpf_fill_alu32_add_imm_regs(struct bpf_test *self)
1373 return __bpf_fill_alu_imm_regs(self, BPF_ADD, true);
1376 static int bpf_fill_alu32_sub_imm_regs(struct bpf_test *self)
1378 return __bpf_fill_alu_imm_regs(self, BPF_SUB, true);
1381 static int bpf_fill_alu32_mul_imm_regs(struct bpf_test *self)
1383 return __bpf_fill_alu_imm_regs(self, BPF_MUL, true);
1386 static int bpf_fill_alu32_div_imm_regs(struct bpf_test *self)
1388 return __bpf_fill_alu_imm_regs(self, BPF_DIV, true);
1391 static int bpf_fill_alu32_mod_imm_regs(struct bpf_test *self)
1393 return __bpf_fill_alu_imm_regs(self, BPF_MOD, true);
1397 * Test JITs that implement complex ALU operations as function
1398 * calls, and must re-arrange operands for argument passing.
1400 static int __bpf_fill_alu_reg_pairs(struct bpf_test *self, u8 op, bool alu32)
1402 int len = 2 + 10 * 10 * 12;
1403 u64 dst, src, res, same;
1404 struct bpf_insn *insns;
1408 insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
1412 /* Operand and result values according to operation */
1417 dst = 0x7edcba9876543210ULL;
1418 src = 0x0123456789abcdefULL;
1421 if (op == BPF_LSH || op == BPF_RSH || op == BPF_ARSH)
1424 __bpf_alu_result(&res, dst, src, op);
1425 __bpf_alu_result(&same, src, src, op);
1432 /* Check all combinations of operand registers */
1433 for (rd = R0; rd <= R9; rd++) {
1434 for (rs = R0; rs <= R9; rs++) {
1435 u64 val = rd == rs ? same : res;
1437 i += __bpf_ld_imm64(&insns[i], rd, dst);
1438 i += __bpf_ld_imm64(&insns[i], rs, src);
1441 insns[i++] = BPF_ALU32_REG(op, rd, rs);
1443 insns[i++] = BPF_ALU64_REG(op, rd, rs);
1445 insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, val, 2);
1446 insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1447 insns[i++] = BPF_EXIT_INSN();
1449 insns[i++] = BPF_ALU64_IMM(BPF_RSH, rd, 32);
1450 insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, val >> 32, 2);
1451 insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1452 insns[i++] = BPF_EXIT_INSN();
1456 insns[i++] = BPF_MOV64_IMM(R0, 1);
1457 insns[i++] = BPF_EXIT_INSN();
1459 self->u.ptr.insns = insns;
1460 self->u.ptr.len = len;
1466 /* ALU64 X register combinations */
1467 static int bpf_fill_alu64_mov_reg_pairs(struct bpf_test *self)
1469 return __bpf_fill_alu_reg_pairs(self, BPF_MOV, false);
1472 static int bpf_fill_alu64_and_reg_pairs(struct bpf_test *self)
1474 return __bpf_fill_alu_reg_pairs(self, BPF_AND, false);
1477 static int bpf_fill_alu64_or_reg_pairs(struct bpf_test *self)
1479 return __bpf_fill_alu_reg_pairs(self, BPF_OR, false);
1482 static int bpf_fill_alu64_xor_reg_pairs(struct bpf_test *self)
1484 return __bpf_fill_alu_reg_pairs(self, BPF_XOR, false);
1487 static int bpf_fill_alu64_lsh_reg_pairs(struct bpf_test *self)
1489 return __bpf_fill_alu_reg_pairs(self, BPF_LSH, false);
1492 static int bpf_fill_alu64_rsh_reg_pairs(struct bpf_test *self)
1494 return __bpf_fill_alu_reg_pairs(self, BPF_RSH, false);
1497 static int bpf_fill_alu64_arsh_reg_pairs(struct bpf_test *self)
1499 return __bpf_fill_alu_reg_pairs(self, BPF_ARSH, false);
1502 static int bpf_fill_alu64_add_reg_pairs(struct bpf_test *self)
1504 return __bpf_fill_alu_reg_pairs(self, BPF_ADD, false);
1507 static int bpf_fill_alu64_sub_reg_pairs(struct bpf_test *self)
1509 return __bpf_fill_alu_reg_pairs(self, BPF_SUB, false);
1512 static int bpf_fill_alu64_mul_reg_pairs(struct bpf_test *self)
1514 return __bpf_fill_alu_reg_pairs(self, BPF_MUL, false);
1517 static int bpf_fill_alu64_div_reg_pairs(struct bpf_test *self)
1519 return __bpf_fill_alu_reg_pairs(self, BPF_DIV, false);
1522 static int bpf_fill_alu64_mod_reg_pairs(struct bpf_test *self)
1524 return __bpf_fill_alu_reg_pairs(self, BPF_MOD, false);
1527 /* ALU32 X register combinations */
1528 static int bpf_fill_alu32_mov_reg_pairs(struct bpf_test *self)
1530 return __bpf_fill_alu_reg_pairs(self, BPF_MOV, true);
1533 static int bpf_fill_alu32_and_reg_pairs(struct bpf_test *self)
1535 return __bpf_fill_alu_reg_pairs(self, BPF_AND, true);
1538 static int bpf_fill_alu32_or_reg_pairs(struct bpf_test *self)
1540 return __bpf_fill_alu_reg_pairs(self, BPF_OR, true);
1543 static int bpf_fill_alu32_xor_reg_pairs(struct bpf_test *self)
1545 return __bpf_fill_alu_reg_pairs(self, BPF_XOR, true);
1548 static int bpf_fill_alu32_lsh_reg_pairs(struct bpf_test *self)
1550 return __bpf_fill_alu_reg_pairs(self, BPF_LSH, true);
1553 static int bpf_fill_alu32_rsh_reg_pairs(struct bpf_test *self)
1555 return __bpf_fill_alu_reg_pairs(self, BPF_RSH, true);
1558 static int bpf_fill_alu32_arsh_reg_pairs(struct bpf_test *self)
1560 return __bpf_fill_alu_reg_pairs(self, BPF_ARSH, true);
1563 static int bpf_fill_alu32_add_reg_pairs(struct bpf_test *self)
1565 return __bpf_fill_alu_reg_pairs(self, BPF_ADD, true);
1568 static int bpf_fill_alu32_sub_reg_pairs(struct bpf_test *self)
1570 return __bpf_fill_alu_reg_pairs(self, BPF_SUB, true);
1573 static int bpf_fill_alu32_mul_reg_pairs(struct bpf_test *self)
1575 return __bpf_fill_alu_reg_pairs(self, BPF_MUL, true);
1578 static int bpf_fill_alu32_div_reg_pairs(struct bpf_test *self)
1580 return __bpf_fill_alu_reg_pairs(self, BPF_DIV, true);
1583 static int bpf_fill_alu32_mod_reg_pairs(struct bpf_test *self)
1585 return __bpf_fill_alu_reg_pairs(self, BPF_MOD, true);
1589 * Exhaustive tests of atomic operations for all power-of-two operand
1590 * magnitudes, both for positive and negative values.
1593 static int __bpf_emit_atomic64(struct bpf_test *self, void *arg,
1594 struct bpf_insn *insns, s64 dst, s64 src)
1596 int op = *(int *)arg;
1597 u64 keep, fetch, res;
1608 __bpf_alu_result(&res, dst, src, BPF_OP(op));
1611 keep = 0x0123456789abcdefULL;
1617 i += __bpf_ld_imm64(&insns[i], R0, keep);
1618 i += __bpf_ld_imm64(&insns[i], R1, dst);
1619 i += __bpf_ld_imm64(&insns[i], R2, src);
1620 i += __bpf_ld_imm64(&insns[i], R3, res);
1621 i += __bpf_ld_imm64(&insns[i], R4, fetch);
1622 i += __bpf_ld_imm64(&insns[i], R5, keep);
1624 insns[i++] = BPF_STX_MEM(BPF_DW, R10, R1, -8);
1625 insns[i++] = BPF_ATOMIC_OP(BPF_DW, op, R10, R2, -8);
1626 insns[i++] = BPF_LDX_MEM(BPF_DW, R1, R10, -8);
1628 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
1629 insns[i++] = BPF_EXIT_INSN();
1631 insns[i++] = BPF_JMP_REG(BPF_JEQ, R2, R4, 1);
1632 insns[i++] = BPF_EXIT_INSN();
1634 insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R5, 1);
1635 insns[i++] = BPF_EXIT_INSN();
1640 static int __bpf_emit_atomic32(struct bpf_test *self, void *arg,
1641 struct bpf_insn *insns, s64 dst, s64 src)
1643 int op = *(int *)arg;
1644 u64 keep, fetch, res;
1655 __bpf_alu_result(&res, (u32)dst, (u32)src, BPF_OP(op));
1658 keep = 0x0123456789abcdefULL;
1664 i += __bpf_ld_imm64(&insns[i], R0, keep);
1665 i += __bpf_ld_imm64(&insns[i], R1, (u32)dst);
1666 i += __bpf_ld_imm64(&insns[i], R2, src);
1667 i += __bpf_ld_imm64(&insns[i], R3, (u32)res);
1668 i += __bpf_ld_imm64(&insns[i], R4, fetch);
1669 i += __bpf_ld_imm64(&insns[i], R5, keep);
1671 insns[i++] = BPF_STX_MEM(BPF_W, R10, R1, -4);
1672 insns[i++] = BPF_ATOMIC_OP(BPF_W, op, R10, R2, -4);
1673 insns[i++] = BPF_LDX_MEM(BPF_W, R1, R10, -4);
1675 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
1676 insns[i++] = BPF_EXIT_INSN();
1678 insns[i++] = BPF_JMP_REG(BPF_JEQ, R2, R4, 1);
1679 insns[i++] = BPF_EXIT_INSN();
1681 insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R5, 1);
1682 insns[i++] = BPF_EXIT_INSN();
1687 static int __bpf_emit_cmpxchg64(struct bpf_test *self, void *arg,
1688 struct bpf_insn *insns, s64 dst, s64 src)
1695 i += __bpf_ld_imm64(&insns[i], R0, ~dst);
1696 i += __bpf_ld_imm64(&insns[i], R1, dst);
1697 i += __bpf_ld_imm64(&insns[i], R2, src);
1699 /* Result unsuccessful */
1700 insns[i++] = BPF_STX_MEM(BPF_DW, R10, R1, -8);
1701 insns[i++] = BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -8);
1702 insns[i++] = BPF_LDX_MEM(BPF_DW, R3, R10, -8);
1704 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 2);
1705 insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1706 insns[i++] = BPF_EXIT_INSN();
1708 insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R3, 2);
1709 insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1710 insns[i++] = BPF_EXIT_INSN();
1712 /* Result successful */
1713 insns[i++] = BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -8);
1714 insns[i++] = BPF_LDX_MEM(BPF_DW, R3, R10, -8);
1716 insns[i++] = BPF_JMP_REG(BPF_JEQ, R2, R3, 2);
1717 insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1718 insns[i++] = BPF_EXIT_INSN();
1720 insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2);
1721 insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1722 insns[i++] = BPF_EXIT_INSN();
1727 static int __bpf_emit_cmpxchg32(struct bpf_test *self, void *arg,
1728 struct bpf_insn *insns, s64 dst, s64 src)
1735 i += __bpf_ld_imm64(&insns[i], R0, ~dst);
1736 i += __bpf_ld_imm64(&insns[i], R1, (u32)dst);
1737 i += __bpf_ld_imm64(&insns[i], R2, src);
1739 /* Result unsuccessful */
1740 insns[i++] = BPF_STX_MEM(BPF_W, R10, R1, -4);
1741 insns[i++] = BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R2, -4);
1742 insns[i++] = BPF_ZEXT_REG(R0), /* Zext always inserted by verifier */
1743 insns[i++] = BPF_LDX_MEM(BPF_W, R3, R10, -4);
1745 insns[i++] = BPF_JMP32_REG(BPF_JEQ, R1, R3, 2);
1746 insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
1747 insns[i++] = BPF_EXIT_INSN();
1749 insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R3, 2);
1750 insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
1751 insns[i++] = BPF_EXIT_INSN();
1753 /* Result successful */
1754 i += __bpf_ld_imm64(&insns[i], R0, dst);
1755 insns[i++] = BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R2, -4);
1756 insns[i++] = BPF_ZEXT_REG(R0), /* Zext always inserted by verifier */
1757 insns[i++] = BPF_LDX_MEM(BPF_W, R3, R10, -4);
1759 insns[i++] = BPF_JMP32_REG(BPF_JEQ, R2, R3, 2);
1760 insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
1761 insns[i++] = BPF_EXIT_INSN();
1763 insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2);
1764 insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
1765 insns[i++] = BPF_EXIT_INSN();
1770 static int __bpf_fill_atomic64(struct bpf_test *self, int op)
1772 return __bpf_fill_pattern(self, &op, 64, 64,
1774 &__bpf_emit_atomic64);
1777 static int __bpf_fill_atomic32(struct bpf_test *self, int op)
1779 return __bpf_fill_pattern(self, &op, 64, 64,
1781 &__bpf_emit_atomic32);
1784 /* 64-bit atomic operations */
1785 static int bpf_fill_atomic64_add(struct bpf_test *self)
1787 return __bpf_fill_atomic64(self, BPF_ADD);
1790 static int bpf_fill_atomic64_and(struct bpf_test *self)
1792 return __bpf_fill_atomic64(self, BPF_AND);
1795 static int bpf_fill_atomic64_or(struct bpf_test *self)
1797 return __bpf_fill_atomic64(self, BPF_OR);
1800 static int bpf_fill_atomic64_xor(struct bpf_test *self)
1802 return __bpf_fill_atomic64(self, BPF_XOR);
1805 static int bpf_fill_atomic64_add_fetch(struct bpf_test *self)
1807 return __bpf_fill_atomic64(self, BPF_ADD | BPF_FETCH);
1810 static int bpf_fill_atomic64_and_fetch(struct bpf_test *self)
1812 return __bpf_fill_atomic64(self, BPF_AND | BPF_FETCH);
1815 static int bpf_fill_atomic64_or_fetch(struct bpf_test *self)
1817 return __bpf_fill_atomic64(self, BPF_OR | BPF_FETCH);
1820 static int bpf_fill_atomic64_xor_fetch(struct bpf_test *self)
1822 return __bpf_fill_atomic64(self, BPF_XOR | BPF_FETCH);
1825 static int bpf_fill_atomic64_xchg(struct bpf_test *self)
1827 return __bpf_fill_atomic64(self, BPF_XCHG);
1830 static int bpf_fill_cmpxchg64(struct bpf_test *self)
1832 return __bpf_fill_pattern(self, NULL, 64, 64, 0, PATTERN_BLOCK2,
1833 &__bpf_emit_cmpxchg64);
1836 /* 32-bit atomic operations */
1837 static int bpf_fill_atomic32_add(struct bpf_test *self)
1839 return __bpf_fill_atomic32(self, BPF_ADD);
1842 static int bpf_fill_atomic32_and(struct bpf_test *self)
1844 return __bpf_fill_atomic32(self, BPF_AND);
1847 static int bpf_fill_atomic32_or(struct bpf_test *self)
1849 return __bpf_fill_atomic32(self, BPF_OR);
1852 static int bpf_fill_atomic32_xor(struct bpf_test *self)
1854 return __bpf_fill_atomic32(self, BPF_XOR);
1857 static int bpf_fill_atomic32_add_fetch(struct bpf_test *self)
1859 return __bpf_fill_atomic32(self, BPF_ADD | BPF_FETCH);
1862 static int bpf_fill_atomic32_and_fetch(struct bpf_test *self)
1864 return __bpf_fill_atomic32(self, BPF_AND | BPF_FETCH);
1867 static int bpf_fill_atomic32_or_fetch(struct bpf_test *self)
1869 return __bpf_fill_atomic32(self, BPF_OR | BPF_FETCH);
1872 static int bpf_fill_atomic32_xor_fetch(struct bpf_test *self)
1874 return __bpf_fill_atomic32(self, BPF_XOR | BPF_FETCH);
1877 static int bpf_fill_atomic32_xchg(struct bpf_test *self)
1879 return __bpf_fill_atomic32(self, BPF_XCHG);
1882 static int bpf_fill_cmpxchg32(struct bpf_test *self)
1884 return __bpf_fill_pattern(self, NULL, 64, 64, 0, PATTERN_BLOCK2,
1885 &__bpf_emit_cmpxchg32);
1889 * Test JITs that implement ATOMIC operations as function calls or
1890 * other primitives, and must re-arrange operands for argument passing.
1892 static int __bpf_fill_atomic_reg_pairs(struct bpf_test *self, u8 width, u8 op)
1894 struct bpf_insn *insn;
1895 int len = 2 + 34 * 10 * 10;
1899 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
1903 /* Operand and memory values */
1904 if (width == BPF_DW) {
1905 mem = 0x0123456789abcdefULL;
1906 upd = 0xfedcba9876543210ULL;
1907 } else { /* BPF_W */
1912 /* Memory updated according to operation */
1921 __bpf_alu_result(&res, mem, upd, BPF_OP(op));
1924 /* Test all operand registers */
1925 for (rd = R0; rd <= R9; rd++) {
1926 for (rs = R0; rs <= R9; rs++) {
1929 /* Initialize value in memory */
1930 i += __bpf_ld_imm64(&insn[i], R0, mem);
1931 insn[i++] = BPF_STX_MEM(width, R10, R0, -8);
1933 /* Initialize registers in order */
1934 i += __bpf_ld_imm64(&insn[i], R0, ~mem);
1935 i += __bpf_ld_imm64(&insn[i], rs, upd);
1936 insn[i++] = BPF_MOV64_REG(rd, R10);
1938 /* Perform atomic operation */
1939 insn[i++] = BPF_ATOMIC_OP(width, op, rd, rs, -8);
1940 if (op == BPF_CMPXCHG && width == BPF_W)
1941 insn[i++] = BPF_ZEXT_REG(R0);
1943 /* Check R0 register value */
1944 if (op == BPF_CMPXCHG)
1945 cmp = mem; /* Expect value from memory */
1946 else if (R0 == rd || R0 == rs)
1947 cmp = 0; /* Aliased, checked below */
1949 cmp = ~mem; /* Expect value to be preserved */
1951 insn[i++] = BPF_JMP32_IMM(BPF_JEQ, R0,
1953 insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
1954 insn[i++] = BPF_EXIT_INSN();
1955 insn[i++] = BPF_ALU64_IMM(BPF_RSH, R0, 32);
1956 insn[i++] = BPF_JMP32_IMM(BPF_JEQ, R0,
1958 insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
1959 insn[i++] = BPF_EXIT_INSN();
1962 /* Check source register value */
1963 if (rs == R0 && op == BPF_CMPXCHG)
1964 src = 0; /* Aliased with R0, checked above */
1965 else if (rs == rd && (op == BPF_CMPXCHG ||
1967 src = 0; /* Aliased with rd, checked below */
1968 else if (op == BPF_CMPXCHG)
1969 src = upd; /* Expect value to be preserved */
1970 else if (op & BPF_FETCH)
1971 src = mem; /* Expect fetched value from mem */
1973 src = upd; /* Expect value to be preserved */
1975 insn[i++] = BPF_JMP32_IMM(BPF_JEQ, rs,
1977 insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
1978 insn[i++] = BPF_EXIT_INSN();
1979 insn[i++] = BPF_ALU64_IMM(BPF_RSH, rs, 32);
1980 insn[i++] = BPF_JMP32_IMM(BPF_JEQ, rs,
1982 insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
1983 insn[i++] = BPF_EXIT_INSN();
1986 /* Check destination register value */
1987 if (!(rd == R0 && op == BPF_CMPXCHG) &&
1988 !(rd == rs && (op & BPF_FETCH))) {
1989 insn[i++] = BPF_JMP_REG(BPF_JEQ, rd, R10, 2);
1990 insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
1991 insn[i++] = BPF_EXIT_INSN();
1994 /* Check value in memory */
1995 if (rs != rd) { /* No aliasing */
1996 i += __bpf_ld_imm64(&insn[i], R1, res);
1997 } else if (op == BPF_XCHG) { /* Aliased, XCHG */
1998 insn[i++] = BPF_MOV64_REG(R1, R10);
1999 } else if (op == BPF_CMPXCHG) { /* Aliased, CMPXCHG */
2000 i += __bpf_ld_imm64(&insn[i], R1, mem);
2001 } else { /* Aliased, ALU oper */
2002 i += __bpf_ld_imm64(&insn[i], R1, mem);
2003 insn[i++] = BPF_ALU64_REG(BPF_OP(op), R1, R10);
2006 insn[i++] = BPF_LDX_MEM(width, R0, R10, -8);
2007 if (width == BPF_DW)
2008 insn[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2);
2009 else /* width == BPF_W */
2010 insn[i++] = BPF_JMP32_REG(BPF_JEQ, R0, R1, 2);
2011 insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
2012 insn[i++] = BPF_EXIT_INSN();
2016 insn[i++] = BPF_MOV64_IMM(R0, 1);
2017 insn[i++] = BPF_EXIT_INSN();
2019 self->u.ptr.insns = insn;
2020 self->u.ptr.len = i;
2026 /* 64-bit atomic register tests */
2027 static int bpf_fill_atomic64_add_reg_pairs(struct bpf_test *self)
2029 return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_ADD);
2032 static int bpf_fill_atomic64_and_reg_pairs(struct bpf_test *self)
2034 return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_AND);
2037 static int bpf_fill_atomic64_or_reg_pairs(struct bpf_test *self)
2039 return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_OR);
2042 static int bpf_fill_atomic64_xor_reg_pairs(struct bpf_test *self)
2044 return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_XOR);
2047 static int bpf_fill_atomic64_add_fetch_reg_pairs(struct bpf_test *self)
2049 return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_ADD | BPF_FETCH);
2052 static int bpf_fill_atomic64_and_fetch_reg_pairs(struct bpf_test *self)
2054 return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_AND | BPF_FETCH);
2057 static int bpf_fill_atomic64_or_fetch_reg_pairs(struct bpf_test *self)
2059 return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_OR | BPF_FETCH);
2062 static int bpf_fill_atomic64_xor_fetch_reg_pairs(struct bpf_test *self)
2064 return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_XOR | BPF_FETCH);
2067 static int bpf_fill_atomic64_xchg_reg_pairs(struct bpf_test *self)
2069 return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_XCHG);
2072 static int bpf_fill_atomic64_cmpxchg_reg_pairs(struct bpf_test *self)
2074 return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_CMPXCHG);
2077 /* 32-bit atomic register tests */
2078 static int bpf_fill_atomic32_add_reg_pairs(struct bpf_test *self)
2080 return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_ADD);
2083 static int bpf_fill_atomic32_and_reg_pairs(struct bpf_test *self)
2085 return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_AND);
2088 static int bpf_fill_atomic32_or_reg_pairs(struct bpf_test *self)
2090 return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_OR);
2093 static int bpf_fill_atomic32_xor_reg_pairs(struct bpf_test *self)
2095 return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_XOR);
2098 static int bpf_fill_atomic32_add_fetch_reg_pairs(struct bpf_test *self)
2100 return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_ADD | BPF_FETCH);
2103 static int bpf_fill_atomic32_and_fetch_reg_pairs(struct bpf_test *self)
2105 return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_AND | BPF_FETCH);
2108 static int bpf_fill_atomic32_or_fetch_reg_pairs(struct bpf_test *self)
2110 return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_OR | BPF_FETCH);
2113 static int bpf_fill_atomic32_xor_fetch_reg_pairs(struct bpf_test *self)
2115 return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_XOR | BPF_FETCH);
2118 static int bpf_fill_atomic32_xchg_reg_pairs(struct bpf_test *self)
2120 return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_XCHG);
2123 static int bpf_fill_atomic32_cmpxchg_reg_pairs(struct bpf_test *self)
2125 return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_CMPXCHG);
2129 * Test the two-instruction 64-bit immediate load operation for all
2130 * power-of-two magnitudes of the immediate operand. For each MSB, a block
2131 * of immediate values centered around the power-of-two MSB are tested,
2132 * both for positive and negative values. The test is designed to verify
2133 * the operation for JITs that emit different code depending on the magnitude
2134 * of the immediate value. This is often the case if the native instruction
2135 * immediate field width is narrower than 32 bits.
2137 static int bpf_fill_ld_imm64_magn(struct bpf_test *self)
2139 int block = 64; /* Increase for more tests per MSB position */
2140 int len = 3 + 8 * 63 * block * 2;
2141 struct bpf_insn *insn;
2145 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
2149 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
2151 for (bit = 0; bit <= 62; bit++) {
2152 for (adj = -block / 2; adj < block / 2; adj++) {
2153 for (sign = -1; sign <= 1; sign += 2) {
2154 s64 imm = sign * ((1LL << bit) + adj);
2156 /* Perform operation */
2157 i += __bpf_ld_imm64(&insn[i], R1, imm);
2159 /* Load reference */
2160 insn[i++] = BPF_ALU32_IMM(BPF_MOV, R2, imm);
2161 insn[i++] = BPF_ALU32_IMM(BPF_MOV, R3,
2163 insn[i++] = BPF_ALU64_IMM(BPF_LSH, R3, 32);
2164 insn[i++] = BPF_ALU64_REG(BPF_OR, R2, R3);
2167 insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R2, 1);
2168 insn[i++] = BPF_EXIT_INSN();
2173 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
2174 insn[i++] = BPF_EXIT_INSN();
2176 self->u.ptr.insns = insn;
2177 self->u.ptr.len = len;
2184 * Test the two-instruction 64-bit immediate load operation for different
2185 * combinations of bytes. Each byte in the 64-bit word is constructed as
2186 * (base & mask) | (rand() & ~mask), where rand() is a deterministic LCG.
2187 * All patterns (base1, mask1) and (base2, mask2) bytes are tested.
2189 static int __bpf_fill_ld_imm64_bytes(struct bpf_test *self,
2193 struct bpf_insn *insn;
2194 int len = 3 + 8 * BIT(8);
2199 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
2203 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
2205 for (pattern = 0; pattern < BIT(8); pattern++) {
2208 for (index = 0; index < 8; index++) {
2211 if (pattern & BIT(index))
2212 byte = (base1 & mask1) | (rand & ~mask1);
2214 byte = (base2 & mask2) | (rand & ~mask2);
2215 imm = (imm << 8) | byte;
2218 /* Update our LCG */
2219 rand = rand * 1664525 + 1013904223;
2221 /* Perform operation */
2222 i += __bpf_ld_imm64(&insn[i], R1, imm);
2224 /* Load reference */
2225 insn[i++] = BPF_ALU32_IMM(BPF_MOV, R2, imm);
2226 insn[i++] = BPF_ALU32_IMM(BPF_MOV, R3, (u32)(imm >> 32));
2227 insn[i++] = BPF_ALU64_IMM(BPF_LSH, R3, 32);
2228 insn[i++] = BPF_ALU64_REG(BPF_OR, R2, R3);
2231 insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R2, 1);
2232 insn[i++] = BPF_EXIT_INSN();
2235 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
2236 insn[i++] = BPF_EXIT_INSN();
2238 self->u.ptr.insns = insn;
2239 self->u.ptr.len = len;
2245 static int bpf_fill_ld_imm64_checker(struct bpf_test *self)
2247 return __bpf_fill_ld_imm64_bytes(self, 0, 0xff, 0xff, 0xff);
2250 static int bpf_fill_ld_imm64_pos_neg(struct bpf_test *self)
2252 return __bpf_fill_ld_imm64_bytes(self, 1, 0x81, 0x80, 0x80);
2255 static int bpf_fill_ld_imm64_pos_zero(struct bpf_test *self)
2257 return __bpf_fill_ld_imm64_bytes(self, 1, 0x81, 0, 0xff);
2260 static int bpf_fill_ld_imm64_neg_zero(struct bpf_test *self)
2262 return __bpf_fill_ld_imm64_bytes(self, 0x80, 0x80, 0, 0xff);
2266 * Exhaustive tests of JMP operations for all combinations of power-of-two
2267 * magnitudes of the operands, both for positive and negative values. The
2268 * test is designed to verify e.g. the JMP and JMP32 operations for JITs that
2269 * emit different code depending on the magnitude of the immediate value.
2272 static bool __bpf_match_jmp_cond(s64 v1, s64 v2, u8 op)
2282 return (u64)v1 > (u64)v2;
2284 return (u64)v1 >= (u64)v2;
2286 return (u64)v1 < (u64)v2;
2288 return (u64)v1 <= (u64)v2;
2301 static int __bpf_emit_jmp_imm(struct bpf_test *self, void *arg,
2302 struct bpf_insn *insns, s64 dst, s64 imm)
2304 int op = *(int *)arg;
2307 bool match = __bpf_match_jmp_cond(dst, (s32)imm, op);
2310 insns[i++] = BPF_ALU32_IMM(BPF_MOV, R0, match);
2312 i += __bpf_ld_imm64(&insns[i], R1, dst);
2313 insns[i++] = BPF_JMP_IMM(op, R1, imm, 1);
2315 insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
2316 insns[i++] = BPF_EXIT_INSN();
2324 static int __bpf_emit_jmp32_imm(struct bpf_test *self, void *arg,
2325 struct bpf_insn *insns, s64 dst, s64 imm)
2327 int op = *(int *)arg;
2330 bool match = __bpf_match_jmp_cond((s32)dst, (s32)imm, op);
2333 i += __bpf_ld_imm64(&insns[i], R1, dst);
2334 insns[i++] = BPF_JMP32_IMM(op, R1, imm, 1);
2336 insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
2337 insns[i++] = BPF_EXIT_INSN();
2345 static int __bpf_emit_jmp_reg(struct bpf_test *self, void *arg,
2346 struct bpf_insn *insns, s64 dst, s64 src)
2348 int op = *(int *)arg;
2351 bool match = __bpf_match_jmp_cond(dst, src, op);
2354 i += __bpf_ld_imm64(&insns[i], R1, dst);
2355 i += __bpf_ld_imm64(&insns[i], R2, src);
2356 insns[i++] = BPF_JMP_REG(op, R1, R2, 1);
2358 insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
2359 insns[i++] = BPF_EXIT_INSN();
2367 static int __bpf_emit_jmp32_reg(struct bpf_test *self, void *arg,
2368 struct bpf_insn *insns, s64 dst, s64 src)
2370 int op = *(int *)arg;
2373 bool match = __bpf_match_jmp_cond((s32)dst, (s32)src, op);
2376 i += __bpf_ld_imm64(&insns[i], R1, dst);
2377 i += __bpf_ld_imm64(&insns[i], R2, src);
2378 insns[i++] = BPF_JMP32_REG(op, R1, R2, 1);
2380 insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
2381 insns[i++] = BPF_EXIT_INSN();
2389 static int __bpf_fill_jmp_imm(struct bpf_test *self, int op)
2391 return __bpf_fill_pattern(self, &op, 64, 32,
2392 PATTERN_BLOCK1, PATTERN_BLOCK2,
2393 &__bpf_emit_jmp_imm);
2396 static int __bpf_fill_jmp32_imm(struct bpf_test *self, int op)
2398 return __bpf_fill_pattern(self, &op, 64, 32,
2399 PATTERN_BLOCK1, PATTERN_BLOCK2,
2400 &__bpf_emit_jmp32_imm);
2403 static int __bpf_fill_jmp_reg(struct bpf_test *self, int op)
2405 return __bpf_fill_pattern(self, &op, 64, 64,
2406 PATTERN_BLOCK1, PATTERN_BLOCK2,
2407 &__bpf_emit_jmp_reg);
2410 static int __bpf_fill_jmp32_reg(struct bpf_test *self, int op)
2412 return __bpf_fill_pattern(self, &op, 64, 64,
2413 PATTERN_BLOCK1, PATTERN_BLOCK2,
2414 &__bpf_emit_jmp32_reg);
2417 /* JMP immediate tests */
2418 static int bpf_fill_jmp_jset_imm(struct bpf_test *self)
2420 return __bpf_fill_jmp_imm(self, BPF_JSET);
2423 static int bpf_fill_jmp_jeq_imm(struct bpf_test *self)
2425 return __bpf_fill_jmp_imm(self, BPF_JEQ);
2428 static int bpf_fill_jmp_jne_imm(struct bpf_test *self)
2430 return __bpf_fill_jmp_imm(self, BPF_JNE);
2433 static int bpf_fill_jmp_jgt_imm(struct bpf_test *self)
2435 return __bpf_fill_jmp_imm(self, BPF_JGT);
2438 static int bpf_fill_jmp_jge_imm(struct bpf_test *self)
2440 return __bpf_fill_jmp_imm(self, BPF_JGE);
2443 static int bpf_fill_jmp_jlt_imm(struct bpf_test *self)
2445 return __bpf_fill_jmp_imm(self, BPF_JLT);
2448 static int bpf_fill_jmp_jle_imm(struct bpf_test *self)
2450 return __bpf_fill_jmp_imm(self, BPF_JLE);
2453 static int bpf_fill_jmp_jsgt_imm(struct bpf_test *self)
2455 return __bpf_fill_jmp_imm(self, BPF_JSGT);
2458 static int bpf_fill_jmp_jsge_imm(struct bpf_test *self)
2460 return __bpf_fill_jmp_imm(self, BPF_JSGE);
2463 static int bpf_fill_jmp_jslt_imm(struct bpf_test *self)
2465 return __bpf_fill_jmp_imm(self, BPF_JSLT);
2468 static int bpf_fill_jmp_jsle_imm(struct bpf_test *self)
2470 return __bpf_fill_jmp_imm(self, BPF_JSLE);
2473 /* JMP32 immediate tests */
2474 static int bpf_fill_jmp32_jset_imm(struct bpf_test *self)
2476 return __bpf_fill_jmp32_imm(self, BPF_JSET);
2479 static int bpf_fill_jmp32_jeq_imm(struct bpf_test *self)
2481 return __bpf_fill_jmp32_imm(self, BPF_JEQ);
2484 static int bpf_fill_jmp32_jne_imm(struct bpf_test *self)
2486 return __bpf_fill_jmp32_imm(self, BPF_JNE);
2489 static int bpf_fill_jmp32_jgt_imm(struct bpf_test *self)
2491 return __bpf_fill_jmp32_imm(self, BPF_JGT);
2494 static int bpf_fill_jmp32_jge_imm(struct bpf_test *self)
2496 return __bpf_fill_jmp32_imm(self, BPF_JGE);
2499 static int bpf_fill_jmp32_jlt_imm(struct bpf_test *self)
2501 return __bpf_fill_jmp32_imm(self, BPF_JLT);
2504 static int bpf_fill_jmp32_jle_imm(struct bpf_test *self)
2506 return __bpf_fill_jmp32_imm(self, BPF_JLE);
2509 static int bpf_fill_jmp32_jsgt_imm(struct bpf_test *self)
2511 return __bpf_fill_jmp32_imm(self, BPF_JSGT);
2514 static int bpf_fill_jmp32_jsge_imm(struct bpf_test *self)
2516 return __bpf_fill_jmp32_imm(self, BPF_JSGE);
2519 static int bpf_fill_jmp32_jslt_imm(struct bpf_test *self)
2521 return __bpf_fill_jmp32_imm(self, BPF_JSLT);
2524 static int bpf_fill_jmp32_jsle_imm(struct bpf_test *self)
2526 return __bpf_fill_jmp32_imm(self, BPF_JSLE);
2529 /* JMP register tests */
2530 static int bpf_fill_jmp_jset_reg(struct bpf_test *self)
2532 return __bpf_fill_jmp_reg(self, BPF_JSET);
2535 static int bpf_fill_jmp_jeq_reg(struct bpf_test *self)
2537 return __bpf_fill_jmp_reg(self, BPF_JEQ);
2540 static int bpf_fill_jmp_jne_reg(struct bpf_test *self)
2542 return __bpf_fill_jmp_reg(self, BPF_JNE);
2545 static int bpf_fill_jmp_jgt_reg(struct bpf_test *self)
2547 return __bpf_fill_jmp_reg(self, BPF_JGT);
2550 static int bpf_fill_jmp_jge_reg(struct bpf_test *self)
2552 return __bpf_fill_jmp_reg(self, BPF_JGE);
2555 static int bpf_fill_jmp_jlt_reg(struct bpf_test *self)
2557 return __bpf_fill_jmp_reg(self, BPF_JLT);
2560 static int bpf_fill_jmp_jle_reg(struct bpf_test *self)
2562 return __bpf_fill_jmp_reg(self, BPF_JLE);
2565 static int bpf_fill_jmp_jsgt_reg(struct bpf_test *self)
2567 return __bpf_fill_jmp_reg(self, BPF_JSGT);
2570 static int bpf_fill_jmp_jsge_reg(struct bpf_test *self)
2572 return __bpf_fill_jmp_reg(self, BPF_JSGE);
2575 static int bpf_fill_jmp_jslt_reg(struct bpf_test *self)
2577 return __bpf_fill_jmp_reg(self, BPF_JSLT);
2580 static int bpf_fill_jmp_jsle_reg(struct bpf_test *self)
2582 return __bpf_fill_jmp_reg(self, BPF_JSLE);
2585 /* JMP32 register tests */
2586 static int bpf_fill_jmp32_jset_reg(struct bpf_test *self)
2588 return __bpf_fill_jmp32_reg(self, BPF_JSET);
2591 static int bpf_fill_jmp32_jeq_reg(struct bpf_test *self)
2593 return __bpf_fill_jmp32_reg(self, BPF_JEQ);
2596 static int bpf_fill_jmp32_jne_reg(struct bpf_test *self)
2598 return __bpf_fill_jmp32_reg(self, BPF_JNE);
2601 static int bpf_fill_jmp32_jgt_reg(struct bpf_test *self)
2603 return __bpf_fill_jmp32_reg(self, BPF_JGT);
2606 static int bpf_fill_jmp32_jge_reg(struct bpf_test *self)
2608 return __bpf_fill_jmp32_reg(self, BPF_JGE);
2611 static int bpf_fill_jmp32_jlt_reg(struct bpf_test *self)
2613 return __bpf_fill_jmp32_reg(self, BPF_JLT);
2616 static int bpf_fill_jmp32_jle_reg(struct bpf_test *self)
2618 return __bpf_fill_jmp32_reg(self, BPF_JLE);
2621 static int bpf_fill_jmp32_jsgt_reg(struct bpf_test *self)
2623 return __bpf_fill_jmp32_reg(self, BPF_JSGT);
2626 static int bpf_fill_jmp32_jsge_reg(struct bpf_test *self)
2628 return __bpf_fill_jmp32_reg(self, BPF_JSGE);
2631 static int bpf_fill_jmp32_jslt_reg(struct bpf_test *self)
2633 return __bpf_fill_jmp32_reg(self, BPF_JSLT);
2636 static int bpf_fill_jmp32_jsle_reg(struct bpf_test *self)
2638 return __bpf_fill_jmp32_reg(self, BPF_JSLE);
2642 * Set up a sequence of staggered jumps, forwards and backwards with
2643 * increasing offset. This tests the conversion of relative jumps to
2644 * JITed native jumps. On some architectures, for example MIPS, a large
2645 * PC-relative jump offset may overflow the immediate field of the native
2646 * conditional branch instruction, triggering a conversion to use an
2647 * absolute jump instead. Since this changes the jump offsets, another
2648 * offset computation pass is necessary, and that may in turn trigger
2649 * another branch conversion. This jump sequence is particularly nasty
2652 * The sequence generation is parameterized by size and jump type.
2653 * The size must be even, and the expected result is always size + 1.
2654 * Below is an example with size=8 and result=9.
2656 * ________________________Start
2660 * ,------- JMP +4 * 3______________Preamble: 4 insns
2661 * ,----------|-ind 0- if R0 != 7 JMP 8 * 3 + 1 <--------------------.
2663 * | | JMP +7 * 3 ------------------------.
2664 * | ,--------|-----1- if R0 != 5 JMP 7 * 3 + 1 <--------------. | |
2665 * | | | R0 = 6 | | |
2666 * | | | JMP +5 * 3 ------------------. | |
2667 * | | ,------|-----2- if R0 != 3 JMP 6 * 3 + 1 <--------. | | | |
2668 * | | | | R0 = 4 | | | | |
2669 * | | | | JMP +3 * 3 ------------. | | | |
2670 * | | | ,----|-----3- if R0 != 1 JMP 5 * 3 + 1 <--. | | | | | |
2671 * | | | | | R0 = 2 | | | | | | |
2672 * | | | | | JMP +1 * 3 ------. | | | | | |
2673 * | | | | ,--t=====4> if R0 != 0 JMP 4 * 3 + 1 1 2 3 4 5 6 7 8 loc
2674 * | | | | | R0 = 1 -1 +2 -3 +4 -5 +6 -7 +8 off
2675 * | | | | | JMP -2 * 3 ---' | | | | | | |
2676 * | | | | | ,------5- if R0 != 2 JMP 3 * 3 + 1 <-----' | | | | | |
2677 * | | | | | | R0 = 3 | | | | | |
2678 * | | | | | | JMP -4 * 3 ---------' | | | | |
2679 * | | | | | | ,----6- if R0 != 4 JMP 2 * 3 + 1 <-----------' | | | |
2680 * | | | | | | | R0 = 5 | | | |
2681 * | | | | | | | JMP -6 * 3 ---------------' | | |
2682 * | | | | | | | ,--7- if R0 != 6 JMP 1 * 3 + 1 <-----------------' | |
2683 * | | | | | | | | R0 = 7 | |
2684 * | | Error | | | JMP -8 * 3 ---------------------' |
2685 * | | paths | | | ,8- if R0 != 8 JMP 0 * 3 + 1 <-----------------------'
2686 * | | | | | | | | | R0 = 9__________________Sequence: 3 * size - 1 insns
2687 * `-+-+-+-+-+-+-+-+-> EXIT____________________Return: 1 insn
2691 /* The maximum size parameter */
2692 #define MAX_STAGGERED_JMP_SIZE ((0x7fff / 3) & ~1)
2694 /* We use a reduced number of iterations to get a reasonable execution time */
2695 #define NR_STAGGERED_JMP_RUNS 10
2697 static int __bpf_fill_staggered_jumps(struct bpf_test *self,
2698 const struct bpf_insn *jmp,
2701 int size = self->test[0].result - 1;
2702 int len = 4 + 3 * (size + 1);
2703 struct bpf_insn *insns;
2706 insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
2711 insns[0] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
2712 insns[1] = BPF_ALU64_IMM(BPF_MOV, R1, r1);
2713 insns[2] = BPF_ALU64_IMM(BPF_MOV, R2, r2);
2714 insns[3] = BPF_JMP_IMM(BPF_JA, 0, 0, 3 * size / 2);
2717 for (ind = 0, off = size; ind <= size; ind++, off -= 2) {
2718 struct bpf_insn *ins = &insns[4 + 3 * ind];
2725 ins[0] = BPF_JMP_IMM(BPF_JNE, R0, loc - 1,
2726 3 * (size - ind) + 1);
2727 ins[1] = BPF_ALU64_IMM(BPF_MOV, R0, loc);
2729 ins[2].off = 3 * (off - 1);
2733 insns[len - 1] = BPF_EXIT_INSN();
2735 self->u.ptr.insns = insns;
2736 self->u.ptr.len = len;
2741 /* 64-bit unconditional jump */
2742 static int bpf_fill_staggered_ja(struct bpf_test *self)
2744 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JA, 0, 0, 0);
2746 return __bpf_fill_staggered_jumps(self, &jmp, 0, 0);
2749 /* 64-bit immediate jumps */
2750 static int bpf_fill_staggered_jeq_imm(struct bpf_test *self)
2752 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JEQ, R1, 1234, 0);
2754 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2757 static int bpf_fill_staggered_jne_imm(struct bpf_test *self)
2759 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JNE, R1, 1234, 0);
2761 return __bpf_fill_staggered_jumps(self, &jmp, 4321, 0);
2764 static int bpf_fill_staggered_jset_imm(struct bpf_test *self)
2766 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSET, R1, 0x82, 0);
2768 return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0);
2771 static int bpf_fill_staggered_jgt_imm(struct bpf_test *self)
2773 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JGT, R1, 1234, 0);
2775 return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 0);
2778 static int bpf_fill_staggered_jge_imm(struct bpf_test *self)
2780 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JGE, R1, 1234, 0);
2782 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2785 static int bpf_fill_staggered_jlt_imm(struct bpf_test *self)
2787 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JLT, R1, 0x80000000, 0);
2789 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2792 static int bpf_fill_staggered_jle_imm(struct bpf_test *self)
2794 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JLE, R1, 1234, 0);
2796 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2799 static int bpf_fill_staggered_jsgt_imm(struct bpf_test *self)
2801 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSGT, R1, -2, 0);
2803 return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
2806 static int bpf_fill_staggered_jsge_imm(struct bpf_test *self)
2808 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSGE, R1, -2, 0);
2810 return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
2813 static int bpf_fill_staggered_jslt_imm(struct bpf_test *self)
2815 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSLT, R1, -1, 0);
2817 return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
2820 static int bpf_fill_staggered_jsle_imm(struct bpf_test *self)
2822 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSLE, R1, -1, 0);
2824 return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
2827 /* 64-bit register jumps */
2828 static int bpf_fill_staggered_jeq_reg(struct bpf_test *self)
2830 struct bpf_insn jmp = BPF_JMP_REG(BPF_JEQ, R1, R2, 0);
2832 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
2835 static int bpf_fill_staggered_jne_reg(struct bpf_test *self)
2837 struct bpf_insn jmp = BPF_JMP_REG(BPF_JNE, R1, R2, 0);
2839 return __bpf_fill_staggered_jumps(self, &jmp, 4321, 1234);
2842 static int bpf_fill_staggered_jset_reg(struct bpf_test *self)
2844 struct bpf_insn jmp = BPF_JMP_REG(BPF_JSET, R1, R2, 0);
2846 return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0x82);
2849 static int bpf_fill_staggered_jgt_reg(struct bpf_test *self)
2851 struct bpf_insn jmp = BPF_JMP_REG(BPF_JGT, R1, R2, 0);
2853 return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 1234);
2856 static int bpf_fill_staggered_jge_reg(struct bpf_test *self)
2858 struct bpf_insn jmp = BPF_JMP_REG(BPF_JGE, R1, R2, 0);
2860 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
2863 static int bpf_fill_staggered_jlt_reg(struct bpf_test *self)
2865 struct bpf_insn jmp = BPF_JMP_REG(BPF_JLT, R1, R2, 0);
2867 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0x80000000);
2870 static int bpf_fill_staggered_jle_reg(struct bpf_test *self)
2872 struct bpf_insn jmp = BPF_JMP_REG(BPF_JLE, R1, R2, 0);
2874 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
2877 static int bpf_fill_staggered_jsgt_reg(struct bpf_test *self)
2879 struct bpf_insn jmp = BPF_JMP_REG(BPF_JSGT, R1, R2, 0);
2881 return __bpf_fill_staggered_jumps(self, &jmp, -1, -2);
2884 static int bpf_fill_staggered_jsge_reg(struct bpf_test *self)
2886 struct bpf_insn jmp = BPF_JMP_REG(BPF_JSGE, R1, R2, 0);
2888 return __bpf_fill_staggered_jumps(self, &jmp, -2, -2);
2891 static int bpf_fill_staggered_jslt_reg(struct bpf_test *self)
2893 struct bpf_insn jmp = BPF_JMP_REG(BPF_JSLT, R1, R2, 0);
2895 return __bpf_fill_staggered_jumps(self, &jmp, -2, -1);
2898 static int bpf_fill_staggered_jsle_reg(struct bpf_test *self)
2900 struct bpf_insn jmp = BPF_JMP_REG(BPF_JSLE, R1, R2, 0);
2902 return __bpf_fill_staggered_jumps(self, &jmp, -1, -1);
2905 /* 32-bit immediate jumps */
2906 static int bpf_fill_staggered_jeq32_imm(struct bpf_test *self)
2908 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JEQ, R1, 1234, 0);
2910 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2913 static int bpf_fill_staggered_jne32_imm(struct bpf_test *self)
2915 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JNE, R1, 1234, 0);
2917 return __bpf_fill_staggered_jumps(self, &jmp, 4321, 0);
2920 static int bpf_fill_staggered_jset32_imm(struct bpf_test *self)
2922 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSET, R1, 0x82, 0);
2924 return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0);
2927 static int bpf_fill_staggered_jgt32_imm(struct bpf_test *self)
2929 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JGT, R1, 1234, 0);
2931 return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 0);
2934 static int bpf_fill_staggered_jge32_imm(struct bpf_test *self)
2936 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JGE, R1, 1234, 0);
2938 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2941 static int bpf_fill_staggered_jlt32_imm(struct bpf_test *self)
2943 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JLT, R1, 0x80000000, 0);
2945 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2948 static int bpf_fill_staggered_jle32_imm(struct bpf_test *self)
2950 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JLE, R1, 1234, 0);
2952 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2955 static int bpf_fill_staggered_jsgt32_imm(struct bpf_test *self)
2957 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSGT, R1, -2, 0);
2959 return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
2962 static int bpf_fill_staggered_jsge32_imm(struct bpf_test *self)
2964 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSGE, R1, -2, 0);
2966 return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
2969 static int bpf_fill_staggered_jslt32_imm(struct bpf_test *self)
2971 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSLT, R1, -1, 0);
2973 return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
2976 static int bpf_fill_staggered_jsle32_imm(struct bpf_test *self)
2978 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSLE, R1, -1, 0);
2980 return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
2983 /* 32-bit register jumps */
2984 static int bpf_fill_staggered_jeq32_reg(struct bpf_test *self)
2986 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JEQ, R1, R2, 0);
2988 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
2991 static int bpf_fill_staggered_jne32_reg(struct bpf_test *self)
2993 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JNE, R1, R2, 0);
2995 return __bpf_fill_staggered_jumps(self, &jmp, 4321, 1234);
2998 static int bpf_fill_staggered_jset32_reg(struct bpf_test *self)
3000 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSET, R1, R2, 0);
3002 return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0x82);
3005 static int bpf_fill_staggered_jgt32_reg(struct bpf_test *self)
3007 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JGT, R1, R2, 0);
3009 return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 1234);
3012 static int bpf_fill_staggered_jge32_reg(struct bpf_test *self)
3014 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JGE, R1, R2, 0);
3016 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
3019 static int bpf_fill_staggered_jlt32_reg(struct bpf_test *self)
3021 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JLT, R1, R2, 0);
3023 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0x80000000);
3026 static int bpf_fill_staggered_jle32_reg(struct bpf_test *self)
3028 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JLE, R1, R2, 0);
3030 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
3033 static int bpf_fill_staggered_jsgt32_reg(struct bpf_test *self)
3035 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSGT, R1, R2, 0);
3037 return __bpf_fill_staggered_jumps(self, &jmp, -1, -2);
3040 static int bpf_fill_staggered_jsge32_reg(struct bpf_test *self)
3042 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSGE, R1, R2, 0);
3044 return __bpf_fill_staggered_jumps(self, &jmp, -2, -2);
3047 static int bpf_fill_staggered_jslt32_reg(struct bpf_test *self)
3049 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSLT, R1, R2, 0);
3051 return __bpf_fill_staggered_jumps(self, &jmp, -2, -1);
3054 static int bpf_fill_staggered_jsle32_reg(struct bpf_test *self)
3056 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSLE, R1, R2, 0);
3058 return __bpf_fill_staggered_jumps(self, &jmp, -1, -1);
3062 static struct bpf_test tests[] = {
3066 BPF_STMT(BPF_LD | BPF_IMM, 1),
3067 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3068 BPF_STMT(BPF_LD | BPF_IMM, 2),
3069 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3070 BPF_STMT(BPF_ALU | BPF_NEG, 0), /* A == -3 */
3071 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3072 BPF_STMT(BPF_LD | BPF_LEN, 0),
3073 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3074 BPF_STMT(BPF_MISC | BPF_TAX, 0), /* X == len - 3 */
3075 BPF_STMT(BPF_LD | BPF_B | BPF_IND, 1),
3076 BPF_STMT(BPF_RET | BPF_A, 0)
3079 { 10, 20, 30, 40, 50 },
3080 { { 2, 10 }, { 3, 20 }, { 4, 30 } },
3085 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3086 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3087 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3088 BPF_STMT(BPF_RET | BPF_A, 0) /* A == len * 2 */
3091 { 10, 20, 30, 40, 50 },
3092 { { 1, 2 }, { 3, 6 }, { 4, 8 } },
3097 BPF_STMT(BPF_LD | BPF_IMM, 1),
3098 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 2),
3099 BPF_STMT(BPF_LDX | BPF_IMM, 3),
3100 BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
3101 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 0xffffffff),
3102 BPF_STMT(BPF_ALU | BPF_MUL | BPF_K, 3),
3103 BPF_STMT(BPF_RET | BPF_A, 0)
3105 CLASSIC | FLAG_NO_DATA,
3107 { { 0, 0xfffffffd } }
3112 BPF_STMT(BPF_LD | BPF_IMM, 8),
3113 BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 2),
3114 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3115 BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
3116 BPF_STMT(BPF_ALU | BPF_DIV | BPF_X, 0),
3117 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3118 BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
3119 BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0x70000000),
3120 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3121 BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
3122 BPF_STMT(BPF_ALU | BPF_MOD | BPF_X, 0),
3123 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3124 BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
3125 BPF_STMT(BPF_ALU | BPF_MOD | BPF_K, 0x70000000),
3126 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3127 BPF_STMT(BPF_RET | BPF_A, 0)
3129 CLASSIC | FLAG_NO_DATA,
3131 { { 0, 0x20000000 } }
3136 BPF_STMT(BPF_LD | BPF_IMM, 0xff),
3137 BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf0),
3138 BPF_STMT(BPF_ALU | BPF_LSH | BPF_K, 27),
3139 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3140 BPF_STMT(BPF_LD | BPF_IMM, 0xf),
3141 BPF_STMT(BPF_ALU | BPF_OR | BPF_K, 0xf0),
3142 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3143 BPF_STMT(BPF_RET | BPF_A, 0)
3145 CLASSIC | FLAG_NO_DATA,
3147 { { 0, 0x800000ff }, { 1, 0x800000ff } },
3152 BPF_STMT(BPF_LD | BPF_IMM, 0), /* ld #0 */
3153 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0, 1, 0),
3154 BPF_STMT(BPF_RET | BPF_K, 0),
3155 BPF_STMT(BPF_RET | BPF_K, 1),
3164 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3165 BPF_STMT(BPF_LD | BPF_H | BPF_IND, MAX_K),
3166 BPF_STMT(BPF_RET | BPF_K, 1)
3170 { { 1, 0 }, { 10, 0 }, { 60, 0 } },
3175 BPF_STMT(BPF_LD | BPF_W | BPF_ABS, 1000),
3176 BPF_STMT(BPF_RET | BPF_K, 1)
3180 { { 1, 0 }, { 10, 0 }, { 60, 0 } },
3185 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_LL_OFF),
3186 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3187 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_LL_OFF + 1),
3188 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3189 BPF_STMT(BPF_RET | BPF_A, 0)
3193 { { 1, 0 }, { 2, 3 } },
3198 BPF_STMT(BPF_LD | BPF_IMM, SKF_LL_OFF - 1),
3199 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3200 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3201 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3202 BPF_STMT(BPF_LD | BPF_B | BPF_IND, 0),
3203 BPF_STMT(BPF_RET | BPF_A, 0)
3207 { { 1, 1 }, { 3, 3 }, { 4, 0xff } },
3212 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_NET_OFF),
3213 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3214 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_NET_OFF + 1),
3215 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3216 BPF_STMT(BPF_RET | BPF_A, 0)
3219 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3 },
3220 { { 15, 0 }, { 16, 3 } },
3225 BPF_STMT(BPF_LD | BPF_IMM, SKF_NET_OFF - 15),
3226 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3227 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3228 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3229 BPF_STMT(BPF_LD | BPF_B | BPF_IND, 0),
3230 BPF_STMT(BPF_RET | BPF_A, 0)
3233 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3 },
3234 { { 14, 0 }, { 15, 1 }, { 17, 3 } },
3239 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3240 SKF_AD_OFF + SKF_AD_PKTTYPE),
3241 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
3242 BPF_STMT(BPF_RET | BPF_K, 1),
3243 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3244 SKF_AD_OFF + SKF_AD_PKTTYPE),
3245 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
3246 BPF_STMT(BPF_RET | BPF_K, 1),
3247 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3248 SKF_AD_OFF + SKF_AD_PKTTYPE),
3249 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
3250 BPF_STMT(BPF_RET | BPF_K, 1),
3251 BPF_STMT(BPF_RET | BPF_A, 0)
3255 { { 1, 3 }, { 10, 3 } },
3260 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3261 SKF_AD_OFF + SKF_AD_MARK),
3262 BPF_STMT(BPF_RET | BPF_A, 0)
3266 { { 1, SKB_MARK}, { 10, SKB_MARK} },
3271 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3272 SKF_AD_OFF + SKF_AD_RXHASH),
3273 BPF_STMT(BPF_RET | BPF_A, 0)
3277 { { 1, SKB_HASH}, { 10, SKB_HASH} },
3282 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3283 SKF_AD_OFF + SKF_AD_QUEUE),
3284 BPF_STMT(BPF_RET | BPF_A, 0)
3288 { { 1, SKB_QUEUE_MAP }, { 10, SKB_QUEUE_MAP } },
3293 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 1),
3294 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 20, 1, 0),
3295 BPF_STMT(BPF_RET | BPF_K, 0),
3296 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3297 SKF_AD_OFF + SKF_AD_PROTOCOL),
3298 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3299 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
3300 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 30, 1, 0),
3301 BPF_STMT(BPF_RET | BPF_K, 0),
3302 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3303 BPF_STMT(BPF_RET | BPF_A, 0)
3307 { { 10, ETH_P_IP }, { 100, ETH_P_IP } },
3312 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3313 SKF_AD_OFF + SKF_AD_VLAN_TAG),
3314 BPF_STMT(BPF_RET | BPF_A, 0)
3319 { 1, SKB_VLAN_TCI },
3320 { 10, SKB_VLAN_TCI }
3324 "LD_VLAN_TAG_PRESENT",
3326 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3327 SKF_AD_OFF + SKF_AD_VLAN_TAG_PRESENT),
3328 BPF_STMT(BPF_RET | BPF_A, 0)
3333 { 1, SKB_VLAN_PRESENT },
3334 { 10, SKB_VLAN_PRESENT }
3340 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3341 SKF_AD_OFF + SKF_AD_IFINDEX),
3342 BPF_STMT(BPF_RET | BPF_A, 0)
3346 { { 1, SKB_DEV_IFINDEX }, { 10, SKB_DEV_IFINDEX } },
3351 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3352 SKF_AD_OFF + SKF_AD_HATYPE),
3353 BPF_STMT(BPF_RET | BPF_A, 0)
3357 { { 1, SKB_DEV_TYPE }, { 10, SKB_DEV_TYPE } },
3362 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3363 SKF_AD_OFF + SKF_AD_CPU),
3364 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3365 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3366 SKF_AD_OFF + SKF_AD_CPU),
3367 BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
3368 BPF_STMT(BPF_RET | BPF_A, 0)
3372 { { 1, 0 }, { 10, 0 } },
3377 BPF_STMT(BPF_LDX | BPF_IMM, 2),
3378 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3379 BPF_STMT(BPF_LDX | BPF_IMM, 3),
3380 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3381 SKF_AD_OFF + SKF_AD_NLATTR),
3382 BPF_STMT(BPF_RET | BPF_A, 0)
3386 { 0xff, 0xff, 0, 4, 0, 2, 0, 4, 0, 3 },
3388 { 0xff, 0xff, 4, 0, 2, 0, 4, 0, 3, 0 },
3390 { { 4, 0 }, { 20, 6 } },
3395 BPF_STMT(BPF_LD | BPF_IMM, 2),
3396 BPF_STMT(BPF_LDX | BPF_IMM, 3),
3397 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3398 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3399 BPF_STMT(BPF_LD | BPF_IMM, 2),
3400 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3401 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3402 BPF_STMT(BPF_LD | BPF_IMM, 2),
3403 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3404 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3405 BPF_STMT(BPF_LD | BPF_IMM, 2),
3406 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3407 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3408 BPF_STMT(BPF_LD | BPF_IMM, 2),
3409 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3410 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3411 BPF_STMT(BPF_LD | BPF_IMM, 2),
3412 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3413 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3414 BPF_STMT(BPF_LD | BPF_IMM, 2),
3415 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3416 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3417 BPF_STMT(BPF_LD | BPF_IMM, 2),
3418 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3419 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3420 BPF_STMT(BPF_RET | BPF_A, 0)
3424 { 0xff, 0xff, 0, 12, 0, 1, 0, 4, 0, 2, 0, 4, 0, 3 },
3426 { 0xff, 0xff, 12, 0, 1, 0, 4, 0, 2, 0, 4, 0, 3, 0 },
3428 { { 4, 0 }, { 20, 10 } },
3433 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3434 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
3435 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3436 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
3437 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3438 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
3439 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3440 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
3441 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3442 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
3443 BPF_STMT(BPF_RET | BPF_A, 0)
3446 /* 00:00:00:00:00:00 > 00:00:00:00:00:00, ethtype IPv4 (0x0800),
3447 * length 98: 127.0.0.1 > 127.0.0.1: ICMP echo request,
3448 * id 9737, seq 1, length 64
3450 { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3451 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3453 0x45, 0x00, 0x00, 0x54, 0xac, 0x8b, 0x40, 0x00, 0x40,
3454 0x01, 0x90, 0x1b, 0x7f, 0x00, 0x00, 0x01 },
3455 { { 30, 0 }, { 100, 42 } },
3460 BPF_STMT(BPF_LD | BPF_IMM, 10),
3461 BPF_STMT(BPF_LDX | BPF_IMM, 300),
3462 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3463 SKF_AD_OFF + SKF_AD_ALU_XOR_X),
3464 BPF_STMT(BPF_RET | BPF_A, 0)
3468 { { 4, 0xA ^ 300 }, { 20, 0xA ^ 300 } },
3473 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3474 BPF_STMT(BPF_LD | BPF_IMM, 2),
3475 BPF_STMT(BPF_ALU | BPF_RSH, 1),
3476 BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
3477 BPF_STMT(BPF_ST, 1), /* M1 = 1 ^ len */
3478 BPF_STMT(BPF_ALU | BPF_XOR | BPF_K, 0x80000000),
3479 BPF_STMT(BPF_ST, 2), /* M2 = 1 ^ len ^ 0x80000000 */
3480 BPF_STMT(BPF_STX, 15), /* M3 = len */
3481 BPF_STMT(BPF_LDX | BPF_MEM, 1),
3482 BPF_STMT(BPF_LD | BPF_MEM, 2),
3483 BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
3484 BPF_STMT(BPF_LDX | BPF_MEM, 15),
3485 BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
3486 BPF_STMT(BPF_RET | BPF_A, 0)
3490 { { 1, 0x80000001 }, { 2, 0x80000002 }, { 60, 0x80000000 ^ 60 } }
3495 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3496 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
3497 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0, 0, 1),
3498 BPF_STMT(BPF_RET | BPF_K, 1),
3499 BPF_STMT(BPF_RET | BPF_K, MAX_K)
3503 { { 1, 0 }, { 3, 1 }, { 4, MAX_K } },
3508 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3509 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
3510 BPF_JUMP(BPF_JMP | BPF_JGT | BPF_X, 0, 0, 1),
3511 BPF_STMT(BPF_RET | BPF_K, 1),
3512 BPF_STMT(BPF_RET | BPF_K, MAX_K)
3516 { { 2, 0 }, { 3, 1 }, { 4, MAX_K } },
3519 "JGE (jt 0), test 1",
3521 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3522 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
3523 BPF_JUMP(BPF_JMP | BPF_JGE | BPF_X, 0, 0, 1),
3524 BPF_STMT(BPF_RET | BPF_K, 1),
3525 BPF_STMT(BPF_RET | BPF_K, MAX_K)
3529 { { 2, 0 }, { 3, 1 }, { 4, 1 } },
3532 "JGE (jt 0), test 2",
3534 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3535 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
3536 BPF_JUMP(BPF_JMP | BPF_JGE | BPF_X, 0, 0, 1),
3537 BPF_STMT(BPF_RET | BPF_K, 1),
3538 BPF_STMT(BPF_RET | BPF_K, MAX_K)
3542 { { 4, 1 }, { 5, 1 }, { 6, MAX_K } },
3547 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3548 BPF_STMT(BPF_LD | BPF_B | BPF_IND, MAX_K),
3549 BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 1, 1, 0),
3550 BPF_STMT(BPF_RET | BPF_K, 10),
3551 BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 2, 1, 0),
3552 BPF_STMT(BPF_RET | BPF_K, 20),
3553 BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 3, 1, 0),
3554 BPF_STMT(BPF_RET | BPF_K, 30),
3555 BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 4, 1, 0),
3556 BPF_STMT(BPF_RET | BPF_K, 40),
3557 BPF_STMT(BPF_RET | BPF_K, MAX_K)
3561 { { 1, 20 }, { 3, 40 }, { 5, MAX_K } },
3566 BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
3567 BPF_JUMP(BPF_JMP | BPF_JA, 1, 1, 1),
3568 BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
3569 BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
3570 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3571 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3572 BPF_STMT(BPF_ALU | BPF_SUB | BPF_K, 4),
3573 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3574 BPF_STMT(BPF_LD | BPF_W | BPF_IND, 0),
3575 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 1, 0, 1),
3576 BPF_STMT(BPF_RET | BPF_K, 10),
3577 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x80000000, 0, 1),
3578 BPF_STMT(BPF_RET | BPF_K, 20),
3579 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
3580 BPF_STMT(BPF_RET | BPF_K, 30),
3581 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
3582 BPF_STMT(BPF_RET | BPF_K, 30),
3583 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
3584 BPF_STMT(BPF_RET | BPF_K, 30),
3585 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
3586 BPF_STMT(BPF_RET | BPF_K, 30),
3587 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
3588 BPF_STMT(BPF_RET | BPF_K, 30),
3589 BPF_STMT(BPF_RET | BPF_K, MAX_K)
3592 { 0, 0xAA, 0x55, 1 },
3593 { { 4, 10 }, { 5, 20 }, { 6, MAX_K } },
3598 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 12),
3599 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x86dd, 0, 8), /* IPv6 */
3600 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 20),
3601 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x84, 2, 0),
3602 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 1, 0),
3603 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x11, 0, 17),
3604 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 54),
3605 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 14, 0),
3606 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 56),
3607 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 12, 13),
3608 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x0800, 0, 12), /* IPv4 */
3609 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 23),
3610 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x84, 2, 0),
3611 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 1, 0),
3612 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x11, 0, 8),
3613 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 20),
3614 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x1fff, 6, 0),
3615 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
3616 BPF_STMT(BPF_LD | BPF_H | BPF_IND, 14),
3617 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 2, 0),
3618 BPF_STMT(BPF_LD | BPF_H | BPF_IND, 16),
3619 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 0, 1),
3620 BPF_STMT(BPF_RET | BPF_K, 0xffff),
3621 BPF_STMT(BPF_RET | BPF_K, 0),
3624 /* 3c:07:54:43:e5:76 > 10:bf:48:d6:43:d6, ethertype IPv4(0x0800)
3625 * length 114: 10.1.1.149.49700 > 10.1.2.10.22: Flags [P.],
3626 * seq 1305692979:1305693027, ack 3650467037, win 65535,
3627 * options [nop,nop,TS val 2502645400 ecr 3971138], length 48
3629 { 0x10, 0xbf, 0x48, 0xd6, 0x43, 0xd6,
3630 0x3c, 0x07, 0x54, 0x43, 0xe5, 0x76,
3632 0x45, 0x10, 0x00, 0x64, 0x75, 0xb5,
3633 0x40, 0x00, 0x40, 0x06, 0xad, 0x2e, /* IP header */
3634 0x0a, 0x01, 0x01, 0x95, /* ip src */
3635 0x0a, 0x01, 0x02, 0x0a, /* ip dst */
3637 0x00, 0x16 /* dst port */ },
3638 { { 10, 0 }, { 30, 0 }, { 100, 65535 } },
3643 /* tcpdump -nei eth0 'tcp port 22 and (((ip[2:2] -
3644 * ((ip[0]&0xf)<<2)) - ((tcp[12]&0xf0)>>2)) != 0) and
3645 * (len > 115 or len < 30000000000)' -d
3647 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 12),
3648 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x86dd, 30, 0),
3649 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x800, 0, 29),
3650 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 23),
3651 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 0, 27),
3652 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 20),
3653 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x1fff, 25, 0),
3654 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
3655 BPF_STMT(BPF_LD | BPF_H | BPF_IND, 14),
3656 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 2, 0),
3657 BPF_STMT(BPF_LD | BPF_H | BPF_IND, 16),
3658 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 0, 20),
3659 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 16),
3660 BPF_STMT(BPF_ST, 1),
3661 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 14),
3662 BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf),
3663 BPF_STMT(BPF_ALU | BPF_LSH | BPF_K, 2),
3664 BPF_STMT(BPF_MISC | BPF_TAX, 0x5), /* libpcap emits K on TAX */
3665 BPF_STMT(BPF_LD | BPF_MEM, 1),
3666 BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
3667 BPF_STMT(BPF_ST, 5),
3668 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
3669 BPF_STMT(BPF_LD | BPF_B | BPF_IND, 26),
3670 BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf0),
3671 BPF_STMT(BPF_ALU | BPF_RSH | BPF_K, 2),
3672 BPF_STMT(BPF_MISC | BPF_TAX, 0x9), /* libpcap emits K on TAX */
3673 BPF_STMT(BPF_LD | BPF_MEM, 5),
3674 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0, 4, 0),
3675 BPF_STMT(BPF_LD | BPF_LEN, 0),
3676 BPF_JUMP(BPF_JMP | BPF_JGT | BPF_K, 0x73, 1, 0),
3677 BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 0xfc23ac00, 1, 0),
3678 BPF_STMT(BPF_RET | BPF_K, 0xffff),
3679 BPF_STMT(BPF_RET | BPF_K, 0),
3682 { 0x10, 0xbf, 0x48, 0xd6, 0x43, 0xd6,
3683 0x3c, 0x07, 0x54, 0x43, 0xe5, 0x76,
3685 0x45, 0x10, 0x00, 0x64, 0x75, 0xb5,
3686 0x40, 0x00, 0x40, 0x06, 0xad, 0x2e, /* IP header */
3687 0x0a, 0x01, 0x01, 0x95, /* ip src */
3688 0x0a, 0x01, 0x02, 0x0a, /* ip dst */
3690 0x00, 0x16 /* dst port */ },
3691 { { 10, 0 }, { 30, 0 }, { 100, 65535 } },
3696 /* check that uninitialized X and A contain zeros */
3697 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3698 BPF_STMT(BPF_RET | BPF_A, 0)
3707 BPF_ALU64_IMM(BPF_MOV, R1, 1),
3708 BPF_ALU64_IMM(BPF_ADD, R1, 2),
3709 BPF_ALU64_IMM(BPF_MOV, R2, 3),
3710 BPF_ALU64_REG(BPF_SUB, R1, R2),
3711 BPF_ALU64_IMM(BPF_ADD, R1, -1),
3712 BPF_ALU64_IMM(BPF_MUL, R1, 3),
3713 BPF_ALU64_REG(BPF_MOV, R0, R1),
3718 { { 0, 0xfffffffd } }
3723 BPF_ALU64_IMM(BPF_MOV, R0, -1),
3724 BPF_ALU64_IMM(BPF_MOV, R1, -1),
3725 BPF_ALU64_IMM(BPF_MOV, R2, 3),
3726 BPF_ALU64_REG(BPF_MUL, R1, R2),
3727 BPF_JMP_IMM(BPF_JEQ, R1, 0xfffffffd, 1),
3729 BPF_ALU64_IMM(BPF_MOV, R0, 1),
3739 BPF_ALU32_IMM(BPF_MOV, R0, -1),
3740 BPF_ALU32_IMM(BPF_MOV, R1, -1),
3741 BPF_ALU32_IMM(BPF_MOV, R2, 3),
3742 BPF_ALU64_REG(BPF_MUL, R1, R2),
3743 BPF_ALU64_IMM(BPF_RSH, R1, 8),
3744 BPF_JMP_IMM(BPF_JEQ, R1, 0x2ffffff, 1),
3746 BPF_ALU32_IMM(BPF_MOV, R0, 1),
3756 BPF_ALU32_IMM(BPF_MOV, R0, -1),
3757 BPF_ALU64_IMM(BPF_MOV, R1, -1),
3758 BPF_ALU32_IMM(BPF_MOV, R2, 3),
3759 BPF_ALU32_REG(BPF_MUL, R1, R2),
3760 BPF_ALU64_IMM(BPF_RSH, R1, 8),
3761 BPF_JMP_IMM(BPF_JEQ, R1, 0xffffff, 1),
3763 BPF_ALU32_IMM(BPF_MOV, R0, 1),
3771 /* Have to test all register combinations, since
3772 * JITing of different registers will produce
3773 * different asm code.
3777 BPF_ALU64_IMM(BPF_MOV, R0, 0),
3778 BPF_ALU64_IMM(BPF_MOV, R1, 1),
3779 BPF_ALU64_IMM(BPF_MOV, R2, 2),
3780 BPF_ALU64_IMM(BPF_MOV, R3, 3),
3781 BPF_ALU64_IMM(BPF_MOV, R4, 4),
3782 BPF_ALU64_IMM(BPF_MOV, R5, 5),
3783 BPF_ALU64_IMM(BPF_MOV, R6, 6),
3784 BPF_ALU64_IMM(BPF_MOV, R7, 7),
3785 BPF_ALU64_IMM(BPF_MOV, R8, 8),
3786 BPF_ALU64_IMM(BPF_MOV, R9, 9),
3787 BPF_ALU64_IMM(BPF_ADD, R0, 20),
3788 BPF_ALU64_IMM(BPF_ADD, R1, 20),
3789 BPF_ALU64_IMM(BPF_ADD, R2, 20),
3790 BPF_ALU64_IMM(BPF_ADD, R3, 20),
3791 BPF_ALU64_IMM(BPF_ADD, R4, 20),
3792 BPF_ALU64_IMM(BPF_ADD, R5, 20),
3793 BPF_ALU64_IMM(BPF_ADD, R6, 20),
3794 BPF_ALU64_IMM(BPF_ADD, R7, 20),
3795 BPF_ALU64_IMM(BPF_ADD, R8, 20),
3796 BPF_ALU64_IMM(BPF_ADD, R9, 20),
3797 BPF_ALU64_IMM(BPF_SUB, R0, 10),
3798 BPF_ALU64_IMM(BPF_SUB, R1, 10),
3799 BPF_ALU64_IMM(BPF_SUB, R2, 10),
3800 BPF_ALU64_IMM(BPF_SUB, R3, 10),
3801 BPF_ALU64_IMM(BPF_SUB, R4, 10),
3802 BPF_ALU64_IMM(BPF_SUB, R5, 10),
3803 BPF_ALU64_IMM(BPF_SUB, R6, 10),
3804 BPF_ALU64_IMM(BPF_SUB, R7, 10),
3805 BPF_ALU64_IMM(BPF_SUB, R8, 10),
3806 BPF_ALU64_IMM(BPF_SUB, R9, 10),
3807 BPF_ALU64_REG(BPF_ADD, R0, R0),
3808 BPF_ALU64_REG(BPF_ADD, R0, R1),
3809 BPF_ALU64_REG(BPF_ADD, R0, R2),
3810 BPF_ALU64_REG(BPF_ADD, R0, R3),
3811 BPF_ALU64_REG(BPF_ADD, R0, R4),
3812 BPF_ALU64_REG(BPF_ADD, R0, R5),
3813 BPF_ALU64_REG(BPF_ADD, R0, R6),
3814 BPF_ALU64_REG(BPF_ADD, R0, R7),
3815 BPF_ALU64_REG(BPF_ADD, R0, R8),
3816 BPF_ALU64_REG(BPF_ADD, R0, R9), /* R0 == 155 */
3817 BPF_JMP_IMM(BPF_JEQ, R0, 155, 1),
3819 BPF_ALU64_REG(BPF_ADD, R1, R0),
3820 BPF_ALU64_REG(BPF_ADD, R1, R1),
3821 BPF_ALU64_REG(BPF_ADD, R1, R2),
3822 BPF_ALU64_REG(BPF_ADD, R1, R3),
3823 BPF_ALU64_REG(BPF_ADD, R1, R4),
3824 BPF_ALU64_REG(BPF_ADD, R1, R5),
3825 BPF_ALU64_REG(BPF_ADD, R1, R6),
3826 BPF_ALU64_REG(BPF_ADD, R1, R7),
3827 BPF_ALU64_REG(BPF_ADD, R1, R8),
3828 BPF_ALU64_REG(BPF_ADD, R1, R9), /* R1 == 456 */
3829 BPF_JMP_IMM(BPF_JEQ, R1, 456, 1),
3831 BPF_ALU64_REG(BPF_ADD, R2, R0),
3832 BPF_ALU64_REG(BPF_ADD, R2, R1),
3833 BPF_ALU64_REG(BPF_ADD, R2, R2),
3834 BPF_ALU64_REG(BPF_ADD, R2, R3),
3835 BPF_ALU64_REG(BPF_ADD, R2, R4),
3836 BPF_ALU64_REG(BPF_ADD, R2, R5),
3837 BPF_ALU64_REG(BPF_ADD, R2, R6),
3838 BPF_ALU64_REG(BPF_ADD, R2, R7),
3839 BPF_ALU64_REG(BPF_ADD, R2, R8),
3840 BPF_ALU64_REG(BPF_ADD, R2, R9), /* R2 == 1358 */
3841 BPF_JMP_IMM(BPF_JEQ, R2, 1358, 1),
3843 BPF_ALU64_REG(BPF_ADD, R3, R0),
3844 BPF_ALU64_REG(BPF_ADD, R3, R1),
3845 BPF_ALU64_REG(BPF_ADD, R3, R2),
3846 BPF_ALU64_REG(BPF_ADD, R3, R3),
3847 BPF_ALU64_REG(BPF_ADD, R3, R4),
3848 BPF_ALU64_REG(BPF_ADD, R3, R5),
3849 BPF_ALU64_REG(BPF_ADD, R3, R6),
3850 BPF_ALU64_REG(BPF_ADD, R3, R7),
3851 BPF_ALU64_REG(BPF_ADD, R3, R8),
3852 BPF_ALU64_REG(BPF_ADD, R3, R9), /* R3 == 4063 */
3853 BPF_JMP_IMM(BPF_JEQ, R3, 4063, 1),
3855 BPF_ALU64_REG(BPF_ADD, R4, R0),
3856 BPF_ALU64_REG(BPF_ADD, R4, R1),
3857 BPF_ALU64_REG(BPF_ADD, R4, R2),
3858 BPF_ALU64_REG(BPF_ADD, R4, R3),
3859 BPF_ALU64_REG(BPF_ADD, R4, R4),
3860 BPF_ALU64_REG(BPF_ADD, R4, R5),
3861 BPF_ALU64_REG(BPF_ADD, R4, R6),
3862 BPF_ALU64_REG(BPF_ADD, R4, R7),
3863 BPF_ALU64_REG(BPF_ADD, R4, R8),
3864 BPF_ALU64_REG(BPF_ADD, R4, R9), /* R4 == 12177 */
3865 BPF_JMP_IMM(BPF_JEQ, R4, 12177, 1),
3867 BPF_ALU64_REG(BPF_ADD, R5, R0),
3868 BPF_ALU64_REG(BPF_ADD, R5, R1),
3869 BPF_ALU64_REG(BPF_ADD, R5, R2),
3870 BPF_ALU64_REG(BPF_ADD, R5, R3),
3871 BPF_ALU64_REG(BPF_ADD, R5, R4),
3872 BPF_ALU64_REG(BPF_ADD, R5, R5),
3873 BPF_ALU64_REG(BPF_ADD, R5, R6),
3874 BPF_ALU64_REG(BPF_ADD, R5, R7),
3875 BPF_ALU64_REG(BPF_ADD, R5, R8),
3876 BPF_ALU64_REG(BPF_ADD, R5, R9), /* R5 == 36518 */
3877 BPF_JMP_IMM(BPF_JEQ, R5, 36518, 1),
3879 BPF_ALU64_REG(BPF_ADD, R6, R0),
3880 BPF_ALU64_REG(BPF_ADD, R6, R1),
3881 BPF_ALU64_REG(BPF_ADD, R6, R2),
3882 BPF_ALU64_REG(BPF_ADD, R6, R3),
3883 BPF_ALU64_REG(BPF_ADD, R6, R4),
3884 BPF_ALU64_REG(BPF_ADD, R6, R5),
3885 BPF_ALU64_REG(BPF_ADD, R6, R6),
3886 BPF_ALU64_REG(BPF_ADD, R6, R7),
3887 BPF_ALU64_REG(BPF_ADD, R6, R8),
3888 BPF_ALU64_REG(BPF_ADD, R6, R9), /* R6 == 109540 */
3889 BPF_JMP_IMM(BPF_JEQ, R6, 109540, 1),
3891 BPF_ALU64_REG(BPF_ADD, R7, R0),
3892 BPF_ALU64_REG(BPF_ADD, R7, R1),
3893 BPF_ALU64_REG(BPF_ADD, R7, R2),
3894 BPF_ALU64_REG(BPF_ADD, R7, R3),
3895 BPF_ALU64_REG(BPF_ADD, R7, R4),
3896 BPF_ALU64_REG(BPF_ADD, R7, R5),
3897 BPF_ALU64_REG(BPF_ADD, R7, R6),
3898 BPF_ALU64_REG(BPF_ADD, R7, R7),
3899 BPF_ALU64_REG(BPF_ADD, R7, R8),
3900 BPF_ALU64_REG(BPF_ADD, R7, R9), /* R7 == 328605 */
3901 BPF_JMP_IMM(BPF_JEQ, R7, 328605, 1),
3903 BPF_ALU64_REG(BPF_ADD, R8, R0),
3904 BPF_ALU64_REG(BPF_ADD, R8, R1),
3905 BPF_ALU64_REG(BPF_ADD, R8, R2),
3906 BPF_ALU64_REG(BPF_ADD, R8, R3),
3907 BPF_ALU64_REG(BPF_ADD, R8, R4),
3908 BPF_ALU64_REG(BPF_ADD, R8, R5),
3909 BPF_ALU64_REG(BPF_ADD, R8, R6),
3910 BPF_ALU64_REG(BPF_ADD, R8, R7),
3911 BPF_ALU64_REG(BPF_ADD, R8, R8),
3912 BPF_ALU64_REG(BPF_ADD, R8, R9), /* R8 == 985799 */
3913 BPF_JMP_IMM(BPF_JEQ, R8, 985799, 1),
3915 BPF_ALU64_REG(BPF_ADD, R9, R0),
3916 BPF_ALU64_REG(BPF_ADD, R9, R1),
3917 BPF_ALU64_REG(BPF_ADD, R9, R2),
3918 BPF_ALU64_REG(BPF_ADD, R9, R3),
3919 BPF_ALU64_REG(BPF_ADD, R9, R4),
3920 BPF_ALU64_REG(BPF_ADD, R9, R5),
3921 BPF_ALU64_REG(BPF_ADD, R9, R6),
3922 BPF_ALU64_REG(BPF_ADD, R9, R7),
3923 BPF_ALU64_REG(BPF_ADD, R9, R8),
3924 BPF_ALU64_REG(BPF_ADD, R9, R9), /* R9 == 2957380 */
3925 BPF_ALU64_REG(BPF_MOV, R0, R9),
3935 BPF_ALU32_IMM(BPF_MOV, R0, 20),
3936 BPF_ALU32_IMM(BPF_MOV, R1, 1),
3937 BPF_ALU32_IMM(BPF_MOV, R2, 2),
3938 BPF_ALU32_IMM(BPF_MOV, R3, 3),
3939 BPF_ALU32_IMM(BPF_MOV, R4, 4),
3940 BPF_ALU32_IMM(BPF_MOV, R5, 5),
3941 BPF_ALU32_IMM(BPF_MOV, R6, 6),
3942 BPF_ALU32_IMM(BPF_MOV, R7, 7),
3943 BPF_ALU32_IMM(BPF_MOV, R8, 8),
3944 BPF_ALU32_IMM(BPF_MOV, R9, 9),
3945 BPF_ALU64_IMM(BPF_ADD, R1, 10),
3946 BPF_ALU64_IMM(BPF_ADD, R2, 10),
3947 BPF_ALU64_IMM(BPF_ADD, R3, 10),
3948 BPF_ALU64_IMM(BPF_ADD, R4, 10),
3949 BPF_ALU64_IMM(BPF_ADD, R5, 10),
3950 BPF_ALU64_IMM(BPF_ADD, R6, 10),
3951 BPF_ALU64_IMM(BPF_ADD, R7, 10),
3952 BPF_ALU64_IMM(BPF_ADD, R8, 10),
3953 BPF_ALU64_IMM(BPF_ADD, R9, 10),
3954 BPF_ALU32_REG(BPF_ADD, R0, R1),
3955 BPF_ALU32_REG(BPF_ADD, R0, R2),
3956 BPF_ALU32_REG(BPF_ADD, R0, R3),
3957 BPF_ALU32_REG(BPF_ADD, R0, R4),
3958 BPF_ALU32_REG(BPF_ADD, R0, R5),
3959 BPF_ALU32_REG(BPF_ADD, R0, R6),
3960 BPF_ALU32_REG(BPF_ADD, R0, R7),
3961 BPF_ALU32_REG(BPF_ADD, R0, R8),
3962 BPF_ALU32_REG(BPF_ADD, R0, R9), /* R0 == 155 */
3963 BPF_JMP_IMM(BPF_JEQ, R0, 155, 1),
3965 BPF_ALU32_REG(BPF_ADD, R1, R0),
3966 BPF_ALU32_REG(BPF_ADD, R1, R1),
3967 BPF_ALU32_REG(BPF_ADD, R1, R2),
3968 BPF_ALU32_REG(BPF_ADD, R1, R3),
3969 BPF_ALU32_REG(BPF_ADD, R1, R4),
3970 BPF_ALU32_REG(BPF_ADD, R1, R5),
3971 BPF_ALU32_REG(BPF_ADD, R1, R6),
3972 BPF_ALU32_REG(BPF_ADD, R1, R7),
3973 BPF_ALU32_REG(BPF_ADD, R1, R8),
3974 BPF_ALU32_REG(BPF_ADD, R1, R9), /* R1 == 456 */
3975 BPF_JMP_IMM(BPF_JEQ, R1, 456, 1),
3977 BPF_ALU32_REG(BPF_ADD, R2, R0),
3978 BPF_ALU32_REG(BPF_ADD, R2, R1),
3979 BPF_ALU32_REG(BPF_ADD, R2, R2),
3980 BPF_ALU32_REG(BPF_ADD, R2, R3),
3981 BPF_ALU32_REG(BPF_ADD, R2, R4),
3982 BPF_ALU32_REG(BPF_ADD, R2, R5),
3983 BPF_ALU32_REG(BPF_ADD, R2, R6),
3984 BPF_ALU32_REG(BPF_ADD, R2, R7),
3985 BPF_ALU32_REG(BPF_ADD, R2, R8),
3986 BPF_ALU32_REG(BPF_ADD, R2, R9), /* R2 == 1358 */
3987 BPF_JMP_IMM(BPF_JEQ, R2, 1358, 1),
3989 BPF_ALU32_REG(BPF_ADD, R3, R0),
3990 BPF_ALU32_REG(BPF_ADD, R3, R1),
3991 BPF_ALU32_REG(BPF_ADD, R3, R2),
3992 BPF_ALU32_REG(BPF_ADD, R3, R3),
3993 BPF_ALU32_REG(BPF_ADD, R3, R4),
3994 BPF_ALU32_REG(BPF_ADD, R3, R5),
3995 BPF_ALU32_REG(BPF_ADD, R3, R6),
3996 BPF_ALU32_REG(BPF_ADD, R3, R7),
3997 BPF_ALU32_REG(BPF_ADD, R3, R8),
3998 BPF_ALU32_REG(BPF_ADD, R3, R9), /* R3 == 4063 */
3999 BPF_JMP_IMM(BPF_JEQ, R3, 4063, 1),
4001 BPF_ALU32_REG(BPF_ADD, R4, R0),
4002 BPF_ALU32_REG(BPF_ADD, R4, R1),
4003 BPF_ALU32_REG(BPF_ADD, R4, R2),
4004 BPF_ALU32_REG(BPF_ADD, R4, R3),
4005 BPF_ALU32_REG(BPF_ADD, R4, R4),
4006 BPF_ALU32_REG(BPF_ADD, R4, R5),
4007 BPF_ALU32_REG(BPF_ADD, R4, R6),
4008 BPF_ALU32_REG(BPF_ADD, R4, R7),
4009 BPF_ALU32_REG(BPF_ADD, R4, R8),
4010 BPF_ALU32_REG(BPF_ADD, R4, R9), /* R4 == 12177 */
4011 BPF_JMP_IMM(BPF_JEQ, R4, 12177, 1),
4013 BPF_ALU32_REG(BPF_ADD, R5, R0),
4014 BPF_ALU32_REG(BPF_ADD, R5, R1),
4015 BPF_ALU32_REG(BPF_ADD, R5, R2),
4016 BPF_ALU32_REG(BPF_ADD, R5, R3),
4017 BPF_ALU32_REG(BPF_ADD, R5, R4),
4018 BPF_ALU32_REG(BPF_ADD, R5, R5),
4019 BPF_ALU32_REG(BPF_ADD, R5, R6),
4020 BPF_ALU32_REG(BPF_ADD, R5, R7),
4021 BPF_ALU32_REG(BPF_ADD, R5, R8),
4022 BPF_ALU32_REG(BPF_ADD, R5, R9), /* R5 == 36518 */
4023 BPF_JMP_IMM(BPF_JEQ, R5, 36518, 1),
4025 BPF_ALU32_REG(BPF_ADD, R6, R0),
4026 BPF_ALU32_REG(BPF_ADD, R6, R1),
4027 BPF_ALU32_REG(BPF_ADD, R6, R2),
4028 BPF_ALU32_REG(BPF_ADD, R6, R3),
4029 BPF_ALU32_REG(BPF_ADD, R6, R4),
4030 BPF_ALU32_REG(BPF_ADD, R6, R5),
4031 BPF_ALU32_REG(BPF_ADD, R6, R6),
4032 BPF_ALU32_REG(BPF_ADD, R6, R7),
4033 BPF_ALU32_REG(BPF_ADD, R6, R8),
4034 BPF_ALU32_REG(BPF_ADD, R6, R9), /* R6 == 109540 */
4035 BPF_JMP_IMM(BPF_JEQ, R6, 109540, 1),
4037 BPF_ALU32_REG(BPF_ADD, R7, R0),
4038 BPF_ALU32_REG(BPF_ADD, R7, R1),
4039 BPF_ALU32_REG(BPF_ADD, R7, R2),
4040 BPF_ALU32_REG(BPF_ADD, R7, R3),
4041 BPF_ALU32_REG(BPF_ADD, R7, R4),
4042 BPF_ALU32_REG(BPF_ADD, R7, R5),
4043 BPF_ALU32_REG(BPF_ADD, R7, R6),
4044 BPF_ALU32_REG(BPF_ADD, R7, R7),
4045 BPF_ALU32_REG(BPF_ADD, R7, R8),
4046 BPF_ALU32_REG(BPF_ADD, R7, R9), /* R7 == 328605 */
4047 BPF_JMP_IMM(BPF_JEQ, R7, 328605, 1),
4049 BPF_ALU32_REG(BPF_ADD, R8, R0),
4050 BPF_ALU32_REG(BPF_ADD, R8, R1),
4051 BPF_ALU32_REG(BPF_ADD, R8, R2),
4052 BPF_ALU32_REG(BPF_ADD, R8, R3),
4053 BPF_ALU32_REG(BPF_ADD, R8, R4),
4054 BPF_ALU32_REG(BPF_ADD, R8, R5),
4055 BPF_ALU32_REG(BPF_ADD, R8, R6),
4056 BPF_ALU32_REG(BPF_ADD, R8, R7),
4057 BPF_ALU32_REG(BPF_ADD, R8, R8),
4058 BPF_ALU32_REG(BPF_ADD, R8, R9), /* R8 == 985799 */
4059 BPF_JMP_IMM(BPF_JEQ, R8, 985799, 1),
4061 BPF_ALU32_REG(BPF_ADD, R9, R0),
4062 BPF_ALU32_REG(BPF_ADD, R9, R1),
4063 BPF_ALU32_REG(BPF_ADD, R9, R2),
4064 BPF_ALU32_REG(BPF_ADD, R9, R3),
4065 BPF_ALU32_REG(BPF_ADD, R9, R4),
4066 BPF_ALU32_REG(BPF_ADD, R9, R5),
4067 BPF_ALU32_REG(BPF_ADD, R9, R6),
4068 BPF_ALU32_REG(BPF_ADD, R9, R7),
4069 BPF_ALU32_REG(BPF_ADD, R9, R8),
4070 BPF_ALU32_REG(BPF_ADD, R9, R9), /* R9 == 2957380 */
4071 BPF_ALU32_REG(BPF_MOV, R0, R9),
4078 { /* Mainly checking JIT here. */
4081 BPF_ALU64_IMM(BPF_MOV, R0, 0),
4082 BPF_ALU64_IMM(BPF_MOV, R1, 1),
4083 BPF_ALU64_IMM(BPF_MOV, R2, 2),
4084 BPF_ALU64_IMM(BPF_MOV, R3, 3),
4085 BPF_ALU64_IMM(BPF_MOV, R4, 4),
4086 BPF_ALU64_IMM(BPF_MOV, R5, 5),
4087 BPF_ALU64_IMM(BPF_MOV, R6, 6),
4088 BPF_ALU64_IMM(BPF_MOV, R7, 7),
4089 BPF_ALU64_IMM(BPF_MOV, R8, 8),
4090 BPF_ALU64_IMM(BPF_MOV, R9, 9),
4091 BPF_ALU64_REG(BPF_SUB, R0, R0),
4092 BPF_ALU64_REG(BPF_SUB, R0, R1),
4093 BPF_ALU64_REG(BPF_SUB, R0, R2),
4094 BPF_ALU64_REG(BPF_SUB, R0, R3),
4095 BPF_ALU64_REG(BPF_SUB, R0, R4),
4096 BPF_ALU64_REG(BPF_SUB, R0, R5),
4097 BPF_ALU64_REG(BPF_SUB, R0, R6),
4098 BPF_ALU64_REG(BPF_SUB, R0, R7),
4099 BPF_ALU64_REG(BPF_SUB, R0, R8),
4100 BPF_ALU64_REG(BPF_SUB, R0, R9),
4101 BPF_ALU64_IMM(BPF_SUB, R0, 10),
4102 BPF_JMP_IMM(BPF_JEQ, R0, -55, 1),
4104 BPF_ALU64_REG(BPF_SUB, R1, R0),
4105 BPF_ALU64_REG(BPF_SUB, R1, R2),
4106 BPF_ALU64_REG(BPF_SUB, R1, R3),
4107 BPF_ALU64_REG(BPF_SUB, R1, R4),
4108 BPF_ALU64_REG(BPF_SUB, R1, R5),
4109 BPF_ALU64_REG(BPF_SUB, R1, R6),
4110 BPF_ALU64_REG(BPF_SUB, R1, R7),
4111 BPF_ALU64_REG(BPF_SUB, R1, R8),
4112 BPF_ALU64_REG(BPF_SUB, R1, R9),
4113 BPF_ALU64_IMM(BPF_SUB, R1, 10),
4114 BPF_ALU64_REG(BPF_SUB, R2, R0),
4115 BPF_ALU64_REG(BPF_SUB, R2, R1),
4116 BPF_ALU64_REG(BPF_SUB, R2, R3),
4117 BPF_ALU64_REG(BPF_SUB, R2, R4),
4118 BPF_ALU64_REG(BPF_SUB, R2, R5),
4119 BPF_ALU64_REG(BPF_SUB, R2, R6),
4120 BPF_ALU64_REG(BPF_SUB, R2, R7),
4121 BPF_ALU64_REG(BPF_SUB, R2, R8),
4122 BPF_ALU64_REG(BPF_SUB, R2, R9),
4123 BPF_ALU64_IMM(BPF_SUB, R2, 10),
4124 BPF_ALU64_REG(BPF_SUB, R3, R0),
4125 BPF_ALU64_REG(BPF_SUB, R3, R1),
4126 BPF_ALU64_REG(BPF_SUB, R3, R2),
4127 BPF_ALU64_REG(BPF_SUB, R3, R4),
4128 BPF_ALU64_REG(BPF_SUB, R3, R5),
4129 BPF_ALU64_REG(BPF_SUB, R3, R6),
4130 BPF_ALU64_REG(BPF_SUB, R3, R7),
4131 BPF_ALU64_REG(BPF_SUB, R3, R8),
4132 BPF_ALU64_REG(BPF_SUB, R3, R9),
4133 BPF_ALU64_IMM(BPF_SUB, R3, 10),
4134 BPF_ALU64_REG(BPF_SUB, R4, R0),
4135 BPF_ALU64_REG(BPF_SUB, R4, R1),
4136 BPF_ALU64_REG(BPF_SUB, R4, R2),
4137 BPF_ALU64_REG(BPF_SUB, R4, R3),
4138 BPF_ALU64_REG(BPF_SUB, R4, R5),
4139 BPF_ALU64_REG(BPF_SUB, R4, R6),
4140 BPF_ALU64_REG(BPF_SUB, R4, R7),
4141 BPF_ALU64_REG(BPF_SUB, R4, R8),
4142 BPF_ALU64_REG(BPF_SUB, R4, R9),
4143 BPF_ALU64_IMM(BPF_SUB, R4, 10),
4144 BPF_ALU64_REG(BPF_SUB, R5, R0),
4145 BPF_ALU64_REG(BPF_SUB, R5, R1),
4146 BPF_ALU64_REG(BPF_SUB, R5, R2),
4147 BPF_ALU64_REG(BPF_SUB, R5, R3),
4148 BPF_ALU64_REG(BPF_SUB, R5, R4),
4149 BPF_ALU64_REG(BPF_SUB, R5, R6),
4150 BPF_ALU64_REG(BPF_SUB, R5, R7),
4151 BPF_ALU64_REG(BPF_SUB, R5, R8),
4152 BPF_ALU64_REG(BPF_SUB, R5, R9),
4153 BPF_ALU64_IMM(BPF_SUB, R5, 10),
4154 BPF_ALU64_REG(BPF_SUB, R6, R0),
4155 BPF_ALU64_REG(BPF_SUB, R6, R1),
4156 BPF_ALU64_REG(BPF_SUB, R6, R2),
4157 BPF_ALU64_REG(BPF_SUB, R6, R3),
4158 BPF_ALU64_REG(BPF_SUB, R6, R4),
4159 BPF_ALU64_REG(BPF_SUB, R6, R5),
4160 BPF_ALU64_REG(BPF_SUB, R6, R7),
4161 BPF_ALU64_REG(BPF_SUB, R6, R8),
4162 BPF_ALU64_REG(BPF_SUB, R6, R9),
4163 BPF_ALU64_IMM(BPF_SUB, R6, 10),
4164 BPF_ALU64_REG(BPF_SUB, R7, R0),
4165 BPF_ALU64_REG(BPF_SUB, R7, R1),
4166 BPF_ALU64_REG(BPF_SUB, R7, R2),
4167 BPF_ALU64_REG(BPF_SUB, R7, R3),
4168 BPF_ALU64_REG(BPF_SUB, R7, R4),
4169 BPF_ALU64_REG(BPF_SUB, R7, R5),
4170 BPF_ALU64_REG(BPF_SUB, R7, R6),
4171 BPF_ALU64_REG(BPF_SUB, R7, R8),
4172 BPF_ALU64_REG(BPF_SUB, R7, R9),
4173 BPF_ALU64_IMM(BPF_SUB, R7, 10),
4174 BPF_ALU64_REG(BPF_SUB, R8, R0),
4175 BPF_ALU64_REG(BPF_SUB, R8, R1),
4176 BPF_ALU64_REG(BPF_SUB, R8, R2),
4177 BPF_ALU64_REG(BPF_SUB, R8, R3),
4178 BPF_ALU64_REG(BPF_SUB, R8, R4),
4179 BPF_ALU64_REG(BPF_SUB, R8, R5),
4180 BPF_ALU64_REG(BPF_SUB, R8, R6),
4181 BPF_ALU64_REG(BPF_SUB, R8, R7),
4182 BPF_ALU64_REG(BPF_SUB, R8, R9),
4183 BPF_ALU64_IMM(BPF_SUB, R8, 10),
4184 BPF_ALU64_REG(BPF_SUB, R9, R0),
4185 BPF_ALU64_REG(BPF_SUB, R9, R1),
4186 BPF_ALU64_REG(BPF_SUB, R9, R2),
4187 BPF_ALU64_REG(BPF_SUB, R9, R3),
4188 BPF_ALU64_REG(BPF_SUB, R9, R4),
4189 BPF_ALU64_REG(BPF_SUB, R9, R5),
4190 BPF_ALU64_REG(BPF_SUB, R9, R6),
4191 BPF_ALU64_REG(BPF_SUB, R9, R7),
4192 BPF_ALU64_REG(BPF_SUB, R9, R8),
4193 BPF_ALU64_IMM(BPF_SUB, R9, 10),
4194 BPF_ALU64_IMM(BPF_SUB, R0, 10),
4195 BPF_ALU64_IMM(BPF_NEG, R0, 0),
4196 BPF_ALU64_REG(BPF_SUB, R0, R1),
4197 BPF_ALU64_REG(BPF_SUB, R0, R2),
4198 BPF_ALU64_REG(BPF_SUB, R0, R3),
4199 BPF_ALU64_REG(BPF_SUB, R0, R4),
4200 BPF_ALU64_REG(BPF_SUB, R0, R5),
4201 BPF_ALU64_REG(BPF_SUB, R0, R6),
4202 BPF_ALU64_REG(BPF_SUB, R0, R7),
4203 BPF_ALU64_REG(BPF_SUB, R0, R8),
4204 BPF_ALU64_REG(BPF_SUB, R0, R9),
4211 { /* Mainly checking JIT here. */
4214 BPF_ALU64_REG(BPF_SUB, R0, R0),
4215 BPF_ALU64_REG(BPF_XOR, R1, R1),
4216 BPF_JMP_REG(BPF_JEQ, R0, R1, 1),
4218 BPF_ALU64_IMM(BPF_MOV, R0, 10),
4219 BPF_ALU64_IMM(BPF_MOV, R1, -1),
4220 BPF_ALU64_REG(BPF_SUB, R1, R1),
4221 BPF_ALU64_REG(BPF_XOR, R2, R2),
4222 BPF_JMP_REG(BPF_JEQ, R1, R2, 1),
4224 BPF_ALU64_REG(BPF_SUB, R2, R2),
4225 BPF_ALU64_REG(BPF_XOR, R3, R3),
4226 BPF_ALU64_IMM(BPF_MOV, R0, 10),
4227 BPF_ALU64_IMM(BPF_MOV, R1, -1),
4228 BPF_JMP_REG(BPF_JEQ, R2, R3, 1),
4230 BPF_ALU64_REG(BPF_SUB, R3, R3),
4231 BPF_ALU64_REG(BPF_XOR, R4, R4),
4232 BPF_ALU64_IMM(BPF_MOV, R2, 1),
4233 BPF_ALU64_IMM(BPF_MOV, R5, -1),
4234 BPF_JMP_REG(BPF_JEQ, R3, R4, 1),
4236 BPF_ALU64_REG(BPF_SUB, R4, R4),
4237 BPF_ALU64_REG(BPF_XOR, R5, R5),
4238 BPF_ALU64_IMM(BPF_MOV, R3, 1),
4239 BPF_ALU64_IMM(BPF_MOV, R7, -1),
4240 BPF_JMP_REG(BPF_JEQ, R5, R4, 1),
4242 BPF_ALU64_IMM(BPF_MOV, R5, 1),
4243 BPF_ALU64_REG(BPF_SUB, R5, R5),
4244 BPF_ALU64_REG(BPF_XOR, R6, R6),
4245 BPF_ALU64_IMM(BPF_MOV, R1, 1),
4246 BPF_ALU64_IMM(BPF_MOV, R8, -1),
4247 BPF_JMP_REG(BPF_JEQ, R5, R6, 1),
4249 BPF_ALU64_REG(BPF_SUB, R6, R6),
4250 BPF_ALU64_REG(BPF_XOR, R7, R7),
4251 BPF_JMP_REG(BPF_JEQ, R7, R6, 1),
4253 BPF_ALU64_REG(BPF_SUB, R7, R7),
4254 BPF_ALU64_REG(BPF_XOR, R8, R8),
4255 BPF_JMP_REG(BPF_JEQ, R7, R8, 1),
4257 BPF_ALU64_REG(BPF_SUB, R8, R8),
4258 BPF_ALU64_REG(BPF_XOR, R9, R9),
4259 BPF_JMP_REG(BPF_JEQ, R9, R8, 1),
4261 BPF_ALU64_REG(BPF_SUB, R9, R9),
4262 BPF_ALU64_REG(BPF_XOR, R0, R0),
4263 BPF_JMP_REG(BPF_JEQ, R9, R0, 1),
4265 BPF_ALU64_REG(BPF_SUB, R1, R1),
4266 BPF_ALU64_REG(BPF_XOR, R0, R0),
4267 BPF_JMP_REG(BPF_JEQ, R9, R0, 2),
4268 BPF_ALU64_IMM(BPF_MOV, R0, 0),
4270 BPF_ALU64_IMM(BPF_MOV, R0, 1),
4277 { /* Mainly checking JIT here. */
4280 BPF_ALU64_IMM(BPF_MOV, R0, 11),
4281 BPF_ALU64_IMM(BPF_MOV, R1, 1),
4282 BPF_ALU64_IMM(BPF_MOV, R2, 2),
4283 BPF_ALU64_IMM(BPF_MOV, R3, 3),
4284 BPF_ALU64_IMM(BPF_MOV, R4, 4),
4285 BPF_ALU64_IMM(BPF_MOV, R5, 5),
4286 BPF_ALU64_IMM(BPF_MOV, R6, 6),
4287 BPF_ALU64_IMM(BPF_MOV, R7, 7),
4288 BPF_ALU64_IMM(BPF_MOV, R8, 8),
4289 BPF_ALU64_IMM(BPF_MOV, R9, 9),
4290 BPF_ALU64_REG(BPF_MUL, R0, R0),
4291 BPF_ALU64_REG(BPF_MUL, R0, R1),
4292 BPF_ALU64_REG(BPF_MUL, R0, R2),
4293 BPF_ALU64_REG(BPF_MUL, R0, R3),
4294 BPF_ALU64_REG(BPF_MUL, R0, R4),
4295 BPF_ALU64_REG(BPF_MUL, R0, R5),
4296 BPF_ALU64_REG(BPF_MUL, R0, R6),
4297 BPF_ALU64_REG(BPF_MUL, R0, R7),
4298 BPF_ALU64_REG(BPF_MUL, R0, R8),
4299 BPF_ALU64_REG(BPF_MUL, R0, R9),
4300 BPF_ALU64_IMM(BPF_MUL, R0, 10),
4301 BPF_JMP_IMM(BPF_JEQ, R0, 439084800, 1),
4303 BPF_ALU64_REG(BPF_MUL, R1, R0),
4304 BPF_ALU64_REG(BPF_MUL, R1, R2),
4305 BPF_ALU64_REG(BPF_MUL, R1, R3),
4306 BPF_ALU64_REG(BPF_MUL, R1, R4),
4307 BPF_ALU64_REG(BPF_MUL, R1, R5),
4308 BPF_ALU64_REG(BPF_MUL, R1, R6),
4309 BPF_ALU64_REG(BPF_MUL, R1, R7),
4310 BPF_ALU64_REG(BPF_MUL, R1, R8),
4311 BPF_ALU64_REG(BPF_MUL, R1, R9),
4312 BPF_ALU64_IMM(BPF_MUL, R1, 10),
4313 BPF_ALU64_REG(BPF_MOV, R2, R1),
4314 BPF_ALU64_IMM(BPF_RSH, R2, 32),
4315 BPF_JMP_IMM(BPF_JEQ, R2, 0x5a924, 1),
4317 BPF_ALU64_IMM(BPF_LSH, R1, 32),
4318 BPF_ALU64_IMM(BPF_ARSH, R1, 32),
4319 BPF_JMP_IMM(BPF_JEQ, R1, 0xebb90000, 1),
4321 BPF_ALU64_REG(BPF_MUL, R2, R0),
4322 BPF_ALU64_REG(BPF_MUL, R2, R1),
4323 BPF_ALU64_REG(BPF_MUL, R2, R3),
4324 BPF_ALU64_REG(BPF_MUL, R2, R4),
4325 BPF_ALU64_REG(BPF_MUL, R2, R5),
4326 BPF_ALU64_REG(BPF_MUL, R2, R6),
4327 BPF_ALU64_REG(BPF_MUL, R2, R7),
4328 BPF_ALU64_REG(BPF_MUL, R2, R8),
4329 BPF_ALU64_REG(BPF_MUL, R2, R9),
4330 BPF_ALU64_IMM(BPF_MUL, R2, 10),
4331 BPF_ALU64_IMM(BPF_RSH, R2, 32),
4332 BPF_ALU64_REG(BPF_MOV, R0, R2),
4337 { { 0, 0x35d97ef2 } }
4339 { /* Mainly checking JIT here. */
4342 BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
4343 BPF_MOV64_REG(R1, R0),
4344 BPF_MOV64_REG(R2, R1),
4345 BPF_MOV64_REG(R3, R2),
4346 BPF_MOV64_REG(R4, R3),
4347 BPF_MOV64_REG(R5, R4),
4348 BPF_MOV64_REG(R6, R5),
4349 BPF_MOV64_REG(R7, R6),
4350 BPF_MOV64_REG(R8, R7),
4351 BPF_MOV64_REG(R9, R8),
4352 BPF_ALU64_IMM(BPF_MOV, R0, 0),
4353 BPF_ALU64_IMM(BPF_MOV, R1, 0),
4354 BPF_ALU64_IMM(BPF_MOV, R2, 0),
4355 BPF_ALU64_IMM(BPF_MOV, R3, 0),
4356 BPF_ALU64_IMM(BPF_MOV, R4, 0),
4357 BPF_ALU64_IMM(BPF_MOV, R5, 0),
4358 BPF_ALU64_IMM(BPF_MOV, R6, 0),
4359 BPF_ALU64_IMM(BPF_MOV, R7, 0),
4360 BPF_ALU64_IMM(BPF_MOV, R8, 0),
4361 BPF_ALU64_IMM(BPF_MOV, R9, 0),
4362 BPF_ALU64_REG(BPF_ADD, R0, R0),
4363 BPF_ALU64_REG(BPF_ADD, R0, R1),
4364 BPF_ALU64_REG(BPF_ADD, R0, R2),
4365 BPF_ALU64_REG(BPF_ADD, R0, R3),
4366 BPF_ALU64_REG(BPF_ADD, R0, R4),
4367 BPF_ALU64_REG(BPF_ADD, R0, R5),
4368 BPF_ALU64_REG(BPF_ADD, R0, R6),
4369 BPF_ALU64_REG(BPF_ADD, R0, R7),
4370 BPF_ALU64_REG(BPF_ADD, R0, R8),
4371 BPF_ALU64_REG(BPF_ADD, R0, R9),
4372 BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
4379 { /* Mainly checking JIT here. */
4382 BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
4383 BPF_MOV64_REG(R1, R0),
4384 BPF_MOV64_REG(R2, R1),
4385 BPF_MOV64_REG(R3, R2),
4386 BPF_MOV64_REG(R4, R3),
4387 BPF_MOV64_REG(R5, R4),
4388 BPF_MOV64_REG(R6, R5),
4389 BPF_MOV64_REG(R7, R6),
4390 BPF_MOV64_REG(R8, R7),
4391 BPF_MOV64_REG(R9, R8),
4392 BPF_ALU32_IMM(BPF_MOV, R0, 0),
4393 BPF_ALU32_IMM(BPF_MOV, R1, 0),
4394 BPF_ALU32_IMM(BPF_MOV, R2, 0),
4395 BPF_ALU32_IMM(BPF_MOV, R3, 0),
4396 BPF_ALU32_IMM(BPF_MOV, R4, 0),
4397 BPF_ALU32_IMM(BPF_MOV, R5, 0),
4398 BPF_ALU32_IMM(BPF_MOV, R6, 0),
4399 BPF_ALU32_IMM(BPF_MOV, R7, 0),
4400 BPF_ALU32_IMM(BPF_MOV, R8, 0),
4401 BPF_ALU32_IMM(BPF_MOV, R9, 0),
4402 BPF_ALU64_REG(BPF_ADD, R0, R0),
4403 BPF_ALU64_REG(BPF_ADD, R0, R1),
4404 BPF_ALU64_REG(BPF_ADD, R0, R2),
4405 BPF_ALU64_REG(BPF_ADD, R0, R3),
4406 BPF_ALU64_REG(BPF_ADD, R0, R4),
4407 BPF_ALU64_REG(BPF_ADD, R0, R5),
4408 BPF_ALU64_REG(BPF_ADD, R0, R6),
4409 BPF_ALU64_REG(BPF_ADD, R0, R7),
4410 BPF_ALU64_REG(BPF_ADD, R0, R8),
4411 BPF_ALU64_REG(BPF_ADD, R0, R9),
4412 BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
4419 { /* Mainly checking JIT here. */
4422 BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
4423 BPF_MOV64_REG(R1, R0),
4424 BPF_MOV64_REG(R2, R1),
4425 BPF_MOV64_REG(R3, R2),
4426 BPF_MOV64_REG(R4, R3),
4427 BPF_MOV64_REG(R5, R4),
4428 BPF_MOV64_REG(R6, R5),
4429 BPF_MOV64_REG(R7, R6),
4430 BPF_MOV64_REG(R8, R7),
4431 BPF_MOV64_REG(R9, R8),
4432 BPF_LD_IMM64(R0, 0x0LL),
4433 BPF_LD_IMM64(R1, 0x0LL),
4434 BPF_LD_IMM64(R2, 0x0LL),
4435 BPF_LD_IMM64(R3, 0x0LL),
4436 BPF_LD_IMM64(R4, 0x0LL),
4437 BPF_LD_IMM64(R5, 0x0LL),
4438 BPF_LD_IMM64(R6, 0x0LL),
4439 BPF_LD_IMM64(R7, 0x0LL),
4440 BPF_LD_IMM64(R8, 0x0LL),
4441 BPF_LD_IMM64(R9, 0x0LL),
4442 BPF_ALU64_REG(BPF_ADD, R0, R0),
4443 BPF_ALU64_REG(BPF_ADD, R0, R1),
4444 BPF_ALU64_REG(BPF_ADD, R0, R2),
4445 BPF_ALU64_REG(BPF_ADD, R0, R3),
4446 BPF_ALU64_REG(BPF_ADD, R0, R4),
4447 BPF_ALU64_REG(BPF_ADD, R0, R5),
4448 BPF_ALU64_REG(BPF_ADD, R0, R6),
4449 BPF_ALU64_REG(BPF_ADD, R0, R7),
4450 BPF_ALU64_REG(BPF_ADD, R0, R8),
4451 BPF_ALU64_REG(BPF_ADD, R0, R9),
4452 BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
4462 BPF_ALU64_IMM(BPF_MOV, R0, 11),
4463 BPF_ALU64_IMM(BPF_ADD, R0, -1),
4464 BPF_ALU64_IMM(BPF_MOV, R2, 2),
4465 BPF_ALU64_IMM(BPF_XOR, R2, 3),
4466 BPF_ALU64_REG(BPF_DIV, R0, R2),
4467 BPF_JMP_IMM(BPF_JEQ, R0, 10, 1),
4469 BPF_ALU64_IMM(BPF_MOD, R0, 3),
4470 BPF_JMP_IMM(BPF_JEQ, R0, 1, 1),
4472 BPF_ALU64_IMM(BPF_MOV, R0, -1),
4480 "INT: shifts by register",
4482 BPF_MOV64_IMM(R0, -1234),
4483 BPF_MOV64_IMM(R1, 1),
4484 BPF_ALU32_REG(BPF_RSH, R0, R1),
4485 BPF_JMP_IMM(BPF_JEQ, R0, 0x7ffffd97, 1),
4487 BPF_MOV64_IMM(R2, 1),
4488 BPF_ALU64_REG(BPF_LSH, R0, R2),
4489 BPF_MOV32_IMM(R4, -1234),
4490 BPF_JMP_REG(BPF_JEQ, R0, R4, 1),
4492 BPF_ALU64_IMM(BPF_AND, R4, 63),
4493 BPF_ALU64_REG(BPF_LSH, R0, R4), /* R0 <= 46 */
4494 BPF_MOV64_IMM(R3, 47),
4495 BPF_ALU64_REG(BPF_ARSH, R0, R3),
4496 BPF_JMP_IMM(BPF_JEQ, R0, -617, 1),
4498 BPF_MOV64_IMM(R2, 1),
4499 BPF_ALU64_REG(BPF_LSH, R4, R2), /* R4 = 46 << 1 */
4500 BPF_JMP_IMM(BPF_JEQ, R4, 92, 1),
4502 BPF_MOV64_IMM(R4, 4),
4503 BPF_ALU64_REG(BPF_LSH, R4, R4), /* R4 = 4 << 4 */
4504 BPF_JMP_IMM(BPF_JEQ, R4, 64, 1),
4506 BPF_MOV64_IMM(R4, 5),
4507 BPF_ALU32_REG(BPF_LSH, R4, R4), /* R4 = 5 << 5 */
4508 BPF_JMP_IMM(BPF_JEQ, R4, 160, 1),
4510 BPF_MOV64_IMM(R0, -1),
4519 "INT: 32-bit context pointer word order and zero-extension",
4521 BPF_ALU32_IMM(BPF_MOV, R0, 0),
4522 BPF_JMP32_IMM(BPF_JEQ, R1, 0, 3),
4523 BPF_ALU64_IMM(BPF_RSH, R1, 32),
4524 BPF_JMP32_IMM(BPF_JNE, R1, 0, 1),
4525 BPF_ALU32_IMM(BPF_MOV, R0, 1),
4534 "check: missing ret",
4536 BPF_STMT(BPF_LD | BPF_IMM, 1),
4538 CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4541 .fill_helper = NULL,
4542 .expected_errcode = -EINVAL,
4547 BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0),
4548 BPF_STMT(BPF_RET | BPF_K, 0)
4550 CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4553 .fill_helper = NULL,
4554 .expected_errcode = -EINVAL,
4557 "check: unknown insn",
4559 /* seccomp insn, rejected in socket filter */
4560 BPF_STMT(BPF_LDX | BPF_W | BPF_ABS, 0),
4561 BPF_STMT(BPF_RET | BPF_K, 0)
4563 CLASSIC | FLAG_EXPECTED_FAIL,
4566 .fill_helper = NULL,
4567 .expected_errcode = -EINVAL,
4570 "check: out of range spill/fill",
4572 BPF_STMT(BPF_STX, 16),
4573 BPF_STMT(BPF_RET | BPF_K, 0)
4575 CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4578 .fill_helper = NULL,
4579 .expected_errcode = -EINVAL,
4584 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4585 BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 15),
4586 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4587 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4588 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4589 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4590 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4591 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4592 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4593 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4594 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4595 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4596 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4597 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4598 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4599 BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90c2894d, 3, 4),
4600 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4601 BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90c2894d, 1, 2),
4602 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4603 BPF_JUMP(BPF_JMP | BPF_JGE, 0, 14, 15),
4604 BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 14),
4605 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4606 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4607 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4608 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4609 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4610 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4611 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4612 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4613 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4614 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4615 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4616 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4617 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4618 BPF_JUMP(BPF_JMP | BPF_JEQ, 0x2ac28349, 2, 3),
4619 BPF_JUMP(BPF_JMP | BPF_JEQ, 0x2ac28349, 1, 2),
4620 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4621 BPF_JUMP(BPF_JMP | BPF_JGE, 0, 14, 15),
4622 BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 14),
4623 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4624 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4625 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4626 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4627 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4628 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4629 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4630 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4631 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4632 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4633 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4634 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4635 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4636 BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90d2ff41, 2, 3),
4637 BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90d2ff41, 1, 2),
4638 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4639 BPF_STMT(BPF_RET | BPF_A, 0),
4640 BPF_STMT(BPF_RET | BPF_A, 0),
4643 { 0x00, 0x1b, 0x21, 0x3c, 0x9d, 0xf8,
4644 0x90, 0xe2, 0xba, 0x0a, 0x56, 0xb4,
4646 0x45, 0x00, 0x00, 0x28, 0x00, 0x00,
4647 0x20, 0x00, 0x40, 0x11, 0x00, 0x00, /* IP header */
4648 0xc0, 0xa8, 0x33, 0x01,
4649 0xc0, 0xa8, 0x33, 0x02,
4652 0x00, 0x14, 0x00, 0x00,
4653 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4654 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4655 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4656 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4657 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4658 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4659 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4660 0xcc, 0xcc, 0xcc, 0xcc },
4666 BPF_STMT(BPF_RET | BPF_X, 0),
4668 CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4671 .fill_helper = NULL,
4672 .expected_errcode = -EINVAL,
4675 "check: LDX + RET X",
4677 BPF_STMT(BPF_LDX | BPF_IMM, 42),
4678 BPF_STMT(BPF_RET | BPF_X, 0),
4680 CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4683 .fill_helper = NULL,
4684 .expected_errcode = -EINVAL,
4686 { /* Mainly checking JIT here. */
4687 "M[]: alt STX + LDX",
4689 BPF_STMT(BPF_LDX | BPF_IMM, 100),
4690 BPF_STMT(BPF_STX, 0),
4691 BPF_STMT(BPF_LDX | BPF_MEM, 0),
4692 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4693 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4694 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4695 BPF_STMT(BPF_STX, 1),
4696 BPF_STMT(BPF_LDX | BPF_MEM, 1),
4697 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4698 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4699 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4700 BPF_STMT(BPF_STX, 2),
4701 BPF_STMT(BPF_LDX | BPF_MEM, 2),
4702 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4703 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4704 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4705 BPF_STMT(BPF_STX, 3),
4706 BPF_STMT(BPF_LDX | BPF_MEM, 3),
4707 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4708 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4709 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4710 BPF_STMT(BPF_STX, 4),
4711 BPF_STMT(BPF_LDX | BPF_MEM, 4),
4712 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4713 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4714 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4715 BPF_STMT(BPF_STX, 5),
4716 BPF_STMT(BPF_LDX | BPF_MEM, 5),
4717 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4718 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4719 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4720 BPF_STMT(BPF_STX, 6),
4721 BPF_STMT(BPF_LDX | BPF_MEM, 6),
4722 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4723 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4724 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4725 BPF_STMT(BPF_STX, 7),
4726 BPF_STMT(BPF_LDX | BPF_MEM, 7),
4727 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4728 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4729 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4730 BPF_STMT(BPF_STX, 8),
4731 BPF_STMT(BPF_LDX | BPF_MEM, 8),
4732 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4733 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4734 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4735 BPF_STMT(BPF_STX, 9),
4736 BPF_STMT(BPF_LDX | BPF_MEM, 9),
4737 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4738 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4739 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4740 BPF_STMT(BPF_STX, 10),
4741 BPF_STMT(BPF_LDX | BPF_MEM, 10),
4742 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4743 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4744 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4745 BPF_STMT(BPF_STX, 11),
4746 BPF_STMT(BPF_LDX | BPF_MEM, 11),
4747 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4748 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4749 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4750 BPF_STMT(BPF_STX, 12),
4751 BPF_STMT(BPF_LDX | BPF_MEM, 12),
4752 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4753 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4754 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4755 BPF_STMT(BPF_STX, 13),
4756 BPF_STMT(BPF_LDX | BPF_MEM, 13),
4757 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4758 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4759 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4760 BPF_STMT(BPF_STX, 14),
4761 BPF_STMT(BPF_LDX | BPF_MEM, 14),
4762 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4763 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4764 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4765 BPF_STMT(BPF_STX, 15),
4766 BPF_STMT(BPF_LDX | BPF_MEM, 15),
4767 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4768 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4769 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4770 BPF_STMT(BPF_RET | BPF_A, 0),
4772 CLASSIC | FLAG_NO_DATA,
4776 { /* Mainly checking JIT here. */
4777 "M[]: full STX + full LDX",
4779 BPF_STMT(BPF_LDX | BPF_IMM, 0xbadfeedb),
4780 BPF_STMT(BPF_STX, 0),
4781 BPF_STMT(BPF_LDX | BPF_IMM, 0xecabedae),
4782 BPF_STMT(BPF_STX, 1),
4783 BPF_STMT(BPF_LDX | BPF_IMM, 0xafccfeaf),
4784 BPF_STMT(BPF_STX, 2),
4785 BPF_STMT(BPF_LDX | BPF_IMM, 0xbffdcedc),
4786 BPF_STMT(BPF_STX, 3),
4787 BPF_STMT(BPF_LDX | BPF_IMM, 0xfbbbdccb),
4788 BPF_STMT(BPF_STX, 4),
4789 BPF_STMT(BPF_LDX | BPF_IMM, 0xfbabcbda),
4790 BPF_STMT(BPF_STX, 5),
4791 BPF_STMT(BPF_LDX | BPF_IMM, 0xaedecbdb),
4792 BPF_STMT(BPF_STX, 6),
4793 BPF_STMT(BPF_LDX | BPF_IMM, 0xadebbade),
4794 BPF_STMT(BPF_STX, 7),
4795 BPF_STMT(BPF_LDX | BPF_IMM, 0xfcfcfaec),
4796 BPF_STMT(BPF_STX, 8),
4797 BPF_STMT(BPF_LDX | BPF_IMM, 0xbcdddbdc),
4798 BPF_STMT(BPF_STX, 9),
4799 BPF_STMT(BPF_LDX | BPF_IMM, 0xfeefdfac),
4800 BPF_STMT(BPF_STX, 10),
4801 BPF_STMT(BPF_LDX | BPF_IMM, 0xcddcdeea),
4802 BPF_STMT(BPF_STX, 11),
4803 BPF_STMT(BPF_LDX | BPF_IMM, 0xaccfaebb),
4804 BPF_STMT(BPF_STX, 12),
4805 BPF_STMT(BPF_LDX | BPF_IMM, 0xbdcccdcf),
4806 BPF_STMT(BPF_STX, 13),
4807 BPF_STMT(BPF_LDX | BPF_IMM, 0xaaedecde),
4808 BPF_STMT(BPF_STX, 14),
4809 BPF_STMT(BPF_LDX | BPF_IMM, 0xfaeacdad),
4810 BPF_STMT(BPF_STX, 15),
4811 BPF_STMT(BPF_LDX | BPF_MEM, 0),
4812 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4813 BPF_STMT(BPF_LDX | BPF_MEM, 1),
4814 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4815 BPF_STMT(BPF_LDX | BPF_MEM, 2),
4816 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4817 BPF_STMT(BPF_LDX | BPF_MEM, 3),
4818 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4819 BPF_STMT(BPF_LDX | BPF_MEM, 4),
4820 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4821 BPF_STMT(BPF_LDX | BPF_MEM, 5),
4822 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4823 BPF_STMT(BPF_LDX | BPF_MEM, 6),
4824 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4825 BPF_STMT(BPF_LDX | BPF_MEM, 7),
4826 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4827 BPF_STMT(BPF_LDX | BPF_MEM, 8),
4828 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4829 BPF_STMT(BPF_LDX | BPF_MEM, 9),
4830 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4831 BPF_STMT(BPF_LDX | BPF_MEM, 10),
4832 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4833 BPF_STMT(BPF_LDX | BPF_MEM, 11),
4834 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4835 BPF_STMT(BPF_LDX | BPF_MEM, 12),
4836 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4837 BPF_STMT(BPF_LDX | BPF_MEM, 13),
4838 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4839 BPF_STMT(BPF_LDX | BPF_MEM, 14),
4840 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4841 BPF_STMT(BPF_LDX | BPF_MEM, 15),
4842 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4843 BPF_STMT(BPF_RET | BPF_A, 0),
4845 CLASSIC | FLAG_NO_DATA,
4847 { { 0, 0x2a5a5e5 } },
4850 "check: SKF_AD_MAX",
4852 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
4853 SKF_AD_OFF + SKF_AD_MAX),
4854 BPF_STMT(BPF_RET | BPF_A, 0),
4856 CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4859 .fill_helper = NULL,
4860 .expected_errcode = -EINVAL,
4862 { /* Passes checker but fails during runtime. */
4863 "LD [SKF_AD_OFF-1]",
4865 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
4867 BPF_STMT(BPF_RET | BPF_K, 1),
4874 "load 64-bit immediate",
4876 BPF_LD_IMM64(R1, 0x567800001234LL),
4877 BPF_MOV64_REG(R2, R1),
4878 BPF_MOV64_REG(R3, R2),
4879 BPF_ALU64_IMM(BPF_RSH, R2, 32),
4880 BPF_ALU64_IMM(BPF_LSH, R3, 32),
4881 BPF_ALU64_IMM(BPF_RSH, R3, 32),
4882 BPF_ALU64_IMM(BPF_MOV, R0, 0),
4883 BPF_JMP_IMM(BPF_JEQ, R2, 0x5678, 1),
4885 BPF_JMP_IMM(BPF_JEQ, R3, 0x1234, 1),
4887 BPF_LD_IMM64(R0, 0x1ffffffffLL),
4888 BPF_ALU64_IMM(BPF_RSH, R0, 32), /* R0 = 1 */
4895 /* BPF_ALU | BPF_MOV | BPF_X */
4897 "ALU_MOV_X: dst = 2",
4899 BPF_ALU32_IMM(BPF_MOV, R1, 2),
4900 BPF_ALU32_REG(BPF_MOV, R0, R1),
4908 "ALU_MOV_X: dst = 4294967295",
4910 BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
4911 BPF_ALU32_REG(BPF_MOV, R0, R1),
4916 { { 0, 4294967295U } },
4919 "ALU64_MOV_X: dst = 2",
4921 BPF_ALU32_IMM(BPF_MOV, R1, 2),
4922 BPF_ALU64_REG(BPF_MOV, R0, R1),
4930 "ALU64_MOV_X: dst = 4294967295",
4932 BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
4933 BPF_ALU64_REG(BPF_MOV, R0, R1),
4938 { { 0, 4294967295U } },
4940 /* BPF_ALU | BPF_MOV | BPF_K */
4942 "ALU_MOV_K: dst = 2",
4944 BPF_ALU32_IMM(BPF_MOV, R0, 2),
4952 "ALU_MOV_K: dst = 4294967295",
4954 BPF_ALU32_IMM(BPF_MOV, R0, 4294967295U),
4959 { { 0, 4294967295U } },
4962 "ALU_MOV_K: 0x0000ffffffff0000 = 0x00000000ffffffff",
4964 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
4965 BPF_LD_IMM64(R3, 0x00000000ffffffffLL),
4966 BPF_ALU32_IMM(BPF_MOV, R2, 0xffffffff),
4967 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
4968 BPF_MOV32_IMM(R0, 2),
4970 BPF_MOV32_IMM(R0, 1),
4978 "ALU_MOV_K: small negative",
4980 BPF_ALU32_IMM(BPF_MOV, R0, -123),
4988 "ALU_MOV_K: small negative zero extension",
4990 BPF_ALU32_IMM(BPF_MOV, R0, -123),
4991 BPF_ALU64_IMM(BPF_RSH, R0, 32),
4999 "ALU_MOV_K: large negative",
5001 BPF_ALU32_IMM(BPF_MOV, R0, -123456789),
5006 { { 0, -123456789 } }
5009 "ALU_MOV_K: large negative zero extension",
5011 BPF_ALU32_IMM(BPF_MOV, R0, -123456789),
5012 BPF_ALU64_IMM(BPF_RSH, R0, 32),
5020 "ALU64_MOV_K: dst = 2",
5022 BPF_ALU64_IMM(BPF_MOV, R0, 2),
5030 "ALU64_MOV_K: dst = 2147483647",
5032 BPF_ALU64_IMM(BPF_MOV, R0, 2147483647),
5037 { { 0, 2147483647 } },
5040 "ALU64_OR_K: dst = 0x0",
5042 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
5043 BPF_LD_IMM64(R3, 0x0),
5044 BPF_ALU64_IMM(BPF_MOV, R2, 0x0),
5045 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5046 BPF_MOV32_IMM(R0, 2),
5048 BPF_MOV32_IMM(R0, 1),
5056 "ALU64_MOV_K: dst = -1",
5058 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
5059 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
5060 BPF_ALU64_IMM(BPF_MOV, R2, 0xffffffff),
5061 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5062 BPF_MOV32_IMM(R0, 2),
5064 BPF_MOV32_IMM(R0, 1),
5072 "ALU64_MOV_K: small negative",
5074 BPF_ALU64_IMM(BPF_MOV, R0, -123),
5082 "ALU64_MOV_K: small negative sign extension",
5084 BPF_ALU64_IMM(BPF_MOV, R0, -123),
5085 BPF_ALU64_IMM(BPF_RSH, R0, 32),
5090 { { 0, 0xffffffff } }
5093 "ALU64_MOV_K: large negative",
5095 BPF_ALU64_IMM(BPF_MOV, R0, -123456789),
5100 { { 0, -123456789 } }
5103 "ALU64_MOV_K: large negative sign extension",
5105 BPF_ALU64_IMM(BPF_MOV, R0, -123456789),
5106 BPF_ALU64_IMM(BPF_RSH, R0, 32),
5111 { { 0, 0xffffffff } }
5113 /* BPF_ALU | BPF_ADD | BPF_X */
5115 "ALU_ADD_X: 1 + 2 = 3",
5117 BPF_LD_IMM64(R0, 1),
5118 BPF_ALU32_IMM(BPF_MOV, R1, 2),
5119 BPF_ALU32_REG(BPF_ADD, R0, R1),
5127 "ALU_ADD_X: 1 + 4294967294 = 4294967295",
5129 BPF_LD_IMM64(R0, 1),
5130 BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
5131 BPF_ALU32_REG(BPF_ADD, R0, R1),
5136 { { 0, 4294967295U } },
5139 "ALU_ADD_X: 2 + 4294967294 = 0",
5141 BPF_LD_IMM64(R0, 2),
5142 BPF_LD_IMM64(R1, 4294967294U),
5143 BPF_ALU32_REG(BPF_ADD, R0, R1),
5144 BPF_JMP_IMM(BPF_JEQ, R0, 0, 2),
5145 BPF_ALU32_IMM(BPF_MOV, R0, 0),
5147 BPF_ALU32_IMM(BPF_MOV, R0, 1),
5155 "ALU64_ADD_X: 1 + 2 = 3",
5157 BPF_LD_IMM64(R0, 1),
5158 BPF_ALU32_IMM(BPF_MOV, R1, 2),
5159 BPF_ALU64_REG(BPF_ADD, R0, R1),
5167 "ALU64_ADD_X: 1 + 4294967294 = 4294967295",
5169 BPF_LD_IMM64(R0, 1),
5170 BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
5171 BPF_ALU64_REG(BPF_ADD, R0, R1),
5176 { { 0, 4294967295U } },
5179 "ALU64_ADD_X: 2 + 4294967294 = 4294967296",
5181 BPF_LD_IMM64(R0, 2),
5182 BPF_LD_IMM64(R1, 4294967294U),
5183 BPF_LD_IMM64(R2, 4294967296ULL),
5184 BPF_ALU64_REG(BPF_ADD, R0, R1),
5185 BPF_JMP_REG(BPF_JEQ, R0, R2, 2),
5186 BPF_MOV32_IMM(R0, 0),
5188 BPF_MOV32_IMM(R0, 1),
5195 /* BPF_ALU | BPF_ADD | BPF_K */
5197 "ALU_ADD_K: 1 + 2 = 3",
5199 BPF_LD_IMM64(R0, 1),
5200 BPF_ALU32_IMM(BPF_ADD, R0, 2),
5208 "ALU_ADD_K: 3 + 0 = 3",
5210 BPF_LD_IMM64(R0, 3),
5211 BPF_ALU32_IMM(BPF_ADD, R0, 0),
5219 "ALU_ADD_K: 1 + 4294967294 = 4294967295",
5221 BPF_LD_IMM64(R0, 1),
5222 BPF_ALU32_IMM(BPF_ADD, R0, 4294967294U),
5227 { { 0, 4294967295U } },
5230 "ALU_ADD_K: 4294967294 + 2 = 0",
5232 BPF_LD_IMM64(R0, 4294967294U),
5233 BPF_ALU32_IMM(BPF_ADD, R0, 2),
5234 BPF_JMP_IMM(BPF_JEQ, R0, 0, 2),
5235 BPF_ALU32_IMM(BPF_MOV, R0, 0),
5237 BPF_ALU32_IMM(BPF_MOV, R0, 1),
5245 "ALU_ADD_K: 0 + (-1) = 0x00000000ffffffff",
5247 BPF_LD_IMM64(R2, 0x0),
5248 BPF_LD_IMM64(R3, 0x00000000ffffffff),
5249 BPF_ALU32_IMM(BPF_ADD, R2, 0xffffffff),
5250 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5251 BPF_MOV32_IMM(R0, 2),
5253 BPF_MOV32_IMM(R0, 1),
5261 "ALU_ADD_K: 0 + 0xffff = 0xffff",
5263 BPF_LD_IMM64(R2, 0x0),
5264 BPF_LD_IMM64(R3, 0xffff),
5265 BPF_ALU32_IMM(BPF_ADD, R2, 0xffff),
5266 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5267 BPF_MOV32_IMM(R0, 2),
5269 BPF_MOV32_IMM(R0, 1),
5277 "ALU_ADD_K: 0 + 0x7fffffff = 0x7fffffff",
5279 BPF_LD_IMM64(R2, 0x0),
5280 BPF_LD_IMM64(R3, 0x7fffffff),
5281 BPF_ALU32_IMM(BPF_ADD, R2, 0x7fffffff),
5282 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5283 BPF_MOV32_IMM(R0, 2),
5285 BPF_MOV32_IMM(R0, 1),
5293 "ALU_ADD_K: 0 + 0x80000000 = 0x80000000",
5295 BPF_LD_IMM64(R2, 0x0),
5296 BPF_LD_IMM64(R3, 0x80000000),
5297 BPF_ALU32_IMM(BPF_ADD, R2, 0x80000000),
5298 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5299 BPF_MOV32_IMM(R0, 2),
5301 BPF_MOV32_IMM(R0, 1),
5309 "ALU_ADD_K: 0 + 0x80008000 = 0x80008000",
5311 BPF_LD_IMM64(R2, 0x0),
5312 BPF_LD_IMM64(R3, 0x80008000),
5313 BPF_ALU32_IMM(BPF_ADD, R2, 0x80008000),
5314 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5315 BPF_MOV32_IMM(R0, 2),
5317 BPF_MOV32_IMM(R0, 1),
5325 "ALU64_ADD_K: 1 + 2 = 3",
5327 BPF_LD_IMM64(R0, 1),
5328 BPF_ALU64_IMM(BPF_ADD, R0, 2),
5336 "ALU64_ADD_K: 3 + 0 = 3",
5338 BPF_LD_IMM64(R0, 3),
5339 BPF_ALU64_IMM(BPF_ADD, R0, 0),
5347 "ALU64_ADD_K: 1 + 2147483646 = 2147483647",
5349 BPF_LD_IMM64(R0, 1),
5350 BPF_ALU64_IMM(BPF_ADD, R0, 2147483646),
5355 { { 0, 2147483647 } },
5358 "ALU64_ADD_K: 4294967294 + 2 = 4294967296",
5360 BPF_LD_IMM64(R0, 4294967294U),
5361 BPF_LD_IMM64(R1, 4294967296ULL),
5362 BPF_ALU64_IMM(BPF_ADD, R0, 2),
5363 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
5364 BPF_ALU32_IMM(BPF_MOV, R0, 0),
5366 BPF_ALU32_IMM(BPF_MOV, R0, 1),
5374 "ALU64_ADD_K: 2147483646 + -2147483647 = -1",
5376 BPF_LD_IMM64(R0, 2147483646),
5377 BPF_ALU64_IMM(BPF_ADD, R0, -2147483647),
5385 "ALU64_ADD_K: 1 + 0 = 1",
5387 BPF_LD_IMM64(R2, 0x1),
5388 BPF_LD_IMM64(R3, 0x1),
5389 BPF_ALU64_IMM(BPF_ADD, R2, 0x0),
5390 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5391 BPF_MOV32_IMM(R0, 2),
5393 BPF_MOV32_IMM(R0, 1),
5401 "ALU64_ADD_K: 0 + (-1) = 0xffffffffffffffff",
5403 BPF_LD_IMM64(R2, 0x0),
5404 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
5405 BPF_ALU64_IMM(BPF_ADD, R2, 0xffffffff),
5406 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5407 BPF_MOV32_IMM(R0, 2),
5409 BPF_MOV32_IMM(R0, 1),
5417 "ALU64_ADD_K: 0 + 0xffff = 0xffff",
5419 BPF_LD_IMM64(R2, 0x0),
5420 BPF_LD_IMM64(R3, 0xffff),
5421 BPF_ALU64_IMM(BPF_ADD, R2, 0xffff),
5422 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5423 BPF_MOV32_IMM(R0, 2),
5425 BPF_MOV32_IMM(R0, 1),
5433 "ALU64_ADD_K: 0 + 0x7fffffff = 0x7fffffff",
5435 BPF_LD_IMM64(R2, 0x0),
5436 BPF_LD_IMM64(R3, 0x7fffffff),
5437 BPF_ALU64_IMM(BPF_ADD, R2, 0x7fffffff),
5438 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5439 BPF_MOV32_IMM(R0, 2),
5441 BPF_MOV32_IMM(R0, 1),
5449 "ALU64_ADD_K: 0 + 0x80000000 = 0xffffffff80000000",
5451 BPF_LD_IMM64(R2, 0x0),
5452 BPF_LD_IMM64(R3, 0xffffffff80000000LL),
5453 BPF_ALU64_IMM(BPF_ADD, R2, 0x80000000),
5454 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5455 BPF_MOV32_IMM(R0, 2),
5457 BPF_MOV32_IMM(R0, 1),
5465 "ALU_ADD_K: 0 + 0x80008000 = 0xffffffff80008000",
5467 BPF_LD_IMM64(R2, 0x0),
5468 BPF_LD_IMM64(R3, 0xffffffff80008000LL),
5469 BPF_ALU64_IMM(BPF_ADD, R2, 0x80008000),
5470 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5471 BPF_MOV32_IMM(R0, 2),
5473 BPF_MOV32_IMM(R0, 1),
5480 /* BPF_ALU | BPF_SUB | BPF_X */
5482 "ALU_SUB_X: 3 - 1 = 2",
5484 BPF_LD_IMM64(R0, 3),
5485 BPF_ALU32_IMM(BPF_MOV, R1, 1),
5486 BPF_ALU32_REG(BPF_SUB, R0, R1),
5494 "ALU_SUB_X: 4294967295 - 4294967294 = 1",
5496 BPF_LD_IMM64(R0, 4294967295U),
5497 BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
5498 BPF_ALU32_REG(BPF_SUB, R0, R1),
5506 "ALU64_SUB_X: 3 - 1 = 2",
5508 BPF_LD_IMM64(R0, 3),
5509 BPF_ALU32_IMM(BPF_MOV, R1, 1),
5510 BPF_ALU64_REG(BPF_SUB, R0, R1),
5518 "ALU64_SUB_X: 4294967295 - 4294967294 = 1",
5520 BPF_LD_IMM64(R0, 4294967295U),
5521 BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
5522 BPF_ALU64_REG(BPF_SUB, R0, R1),
5529 /* BPF_ALU | BPF_SUB | BPF_K */
5531 "ALU_SUB_K: 3 - 1 = 2",
5533 BPF_LD_IMM64(R0, 3),
5534 BPF_ALU32_IMM(BPF_SUB, R0, 1),
5542 "ALU_SUB_K: 3 - 0 = 3",
5544 BPF_LD_IMM64(R0, 3),
5545 BPF_ALU32_IMM(BPF_SUB, R0, 0),
5553 "ALU_SUB_K: 4294967295 - 4294967294 = 1",
5555 BPF_LD_IMM64(R0, 4294967295U),
5556 BPF_ALU32_IMM(BPF_SUB, R0, 4294967294U),
5564 "ALU64_SUB_K: 3 - 1 = 2",
5566 BPF_LD_IMM64(R0, 3),
5567 BPF_ALU64_IMM(BPF_SUB, R0, 1),
5575 "ALU64_SUB_K: 3 - 0 = 3",
5577 BPF_LD_IMM64(R0, 3),
5578 BPF_ALU64_IMM(BPF_SUB, R0, 0),
5586 "ALU64_SUB_K: 4294967294 - 4294967295 = -1",
5588 BPF_LD_IMM64(R0, 4294967294U),
5589 BPF_ALU64_IMM(BPF_SUB, R0, 4294967295U),
5597 "ALU64_ADD_K: 2147483646 - 2147483647 = -1",
5599 BPF_LD_IMM64(R0, 2147483646),
5600 BPF_ALU64_IMM(BPF_SUB, R0, 2147483647),
5607 /* BPF_ALU | BPF_MUL | BPF_X */
5609 "ALU_MUL_X: 2 * 3 = 6",
5611 BPF_LD_IMM64(R0, 2),
5612 BPF_ALU32_IMM(BPF_MOV, R1, 3),
5613 BPF_ALU32_REG(BPF_MUL, R0, R1),
5621 "ALU_MUL_X: 2 * 0x7FFFFFF8 = 0xFFFFFFF0",
5623 BPF_LD_IMM64(R0, 2),
5624 BPF_ALU32_IMM(BPF_MOV, R1, 0x7FFFFFF8),
5625 BPF_ALU32_REG(BPF_MUL, R0, R1),
5630 { { 0, 0xFFFFFFF0 } },
5633 "ALU_MUL_X: -1 * -1 = 1",
5635 BPF_LD_IMM64(R0, -1),
5636 BPF_ALU32_IMM(BPF_MOV, R1, -1),
5637 BPF_ALU32_REG(BPF_MUL, R0, R1),
5645 "ALU64_MUL_X: 2 * 3 = 6",
5647 BPF_LD_IMM64(R0, 2),
5648 BPF_ALU32_IMM(BPF_MOV, R1, 3),
5649 BPF_ALU64_REG(BPF_MUL, R0, R1),
5657 "ALU64_MUL_X: 1 * 2147483647 = 2147483647",
5659 BPF_LD_IMM64(R0, 1),
5660 BPF_ALU32_IMM(BPF_MOV, R1, 2147483647),
5661 BPF_ALU64_REG(BPF_MUL, R0, R1),
5666 { { 0, 2147483647 } },
5669 "ALU64_MUL_X: 64x64 multiply, low word",
5671 BPF_LD_IMM64(R0, 0x0fedcba987654321LL),
5672 BPF_LD_IMM64(R1, 0x123456789abcdef0LL),
5673 BPF_ALU64_REG(BPF_MUL, R0, R1),
5678 { { 0, 0xe5618cf0 } }
5681 "ALU64_MUL_X: 64x64 multiply, high word",
5683 BPF_LD_IMM64(R0, 0x0fedcba987654321LL),
5684 BPF_LD_IMM64(R1, 0x123456789abcdef0LL),
5685 BPF_ALU64_REG(BPF_MUL, R0, R1),
5686 BPF_ALU64_IMM(BPF_RSH, R0, 32),
5691 { { 0, 0x2236d88f } }
5693 /* BPF_ALU | BPF_MUL | BPF_K */
5695 "ALU_MUL_K: 2 * 3 = 6",
5697 BPF_LD_IMM64(R0, 2),
5698 BPF_ALU32_IMM(BPF_MUL, R0, 3),
5706 "ALU_MUL_K: 3 * 1 = 3",
5708 BPF_LD_IMM64(R0, 3),
5709 BPF_ALU32_IMM(BPF_MUL, R0, 1),
5717 "ALU_MUL_K: 2 * 0x7FFFFFF8 = 0xFFFFFFF0",
5719 BPF_LD_IMM64(R0, 2),
5720 BPF_ALU32_IMM(BPF_MUL, R0, 0x7FFFFFF8),
5725 { { 0, 0xFFFFFFF0 } },
5728 "ALU_MUL_K: 1 * (-1) = 0x00000000ffffffff",
5730 BPF_LD_IMM64(R2, 0x1),
5731 BPF_LD_IMM64(R3, 0x00000000ffffffff),
5732 BPF_ALU32_IMM(BPF_MUL, R2, 0xffffffff),
5733 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5734 BPF_MOV32_IMM(R0, 2),
5736 BPF_MOV32_IMM(R0, 1),
5744 "ALU64_MUL_K: 2 * 3 = 6",
5746 BPF_LD_IMM64(R0, 2),
5747 BPF_ALU64_IMM(BPF_MUL, R0, 3),
5755 "ALU64_MUL_K: 3 * 1 = 3",
5757 BPF_LD_IMM64(R0, 3),
5758 BPF_ALU64_IMM(BPF_MUL, R0, 1),
5766 "ALU64_MUL_K: 1 * 2147483647 = 2147483647",
5768 BPF_LD_IMM64(R0, 1),
5769 BPF_ALU64_IMM(BPF_MUL, R0, 2147483647),
5774 { { 0, 2147483647 } },
5777 "ALU64_MUL_K: 1 * -2147483647 = -2147483647",
5779 BPF_LD_IMM64(R0, 1),
5780 BPF_ALU64_IMM(BPF_MUL, R0, -2147483647),
5785 { { 0, -2147483647 } },
5788 "ALU64_MUL_K: 1 * (-1) = 0xffffffffffffffff",
5790 BPF_LD_IMM64(R2, 0x1),
5791 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
5792 BPF_ALU64_IMM(BPF_MUL, R2, 0xffffffff),
5793 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5794 BPF_MOV32_IMM(R0, 2),
5796 BPF_MOV32_IMM(R0, 1),
5804 "ALU64_MUL_K: 64x32 multiply, low word",
5806 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
5807 BPF_ALU64_IMM(BPF_MUL, R0, 0x12345678),
5812 { { 0, 0xe242d208 } }
5815 "ALU64_MUL_K: 64x32 multiply, high word",
5817 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
5818 BPF_ALU64_IMM(BPF_MUL, R0, 0x12345678),
5819 BPF_ALU64_IMM(BPF_RSH, R0, 32),
5824 { { 0, 0xc28f5c28 } }
5826 /* BPF_ALU | BPF_DIV | BPF_X */
5828 "ALU_DIV_X: 6 / 2 = 3",
5830 BPF_LD_IMM64(R0, 6),
5831 BPF_ALU32_IMM(BPF_MOV, R1, 2),
5832 BPF_ALU32_REG(BPF_DIV, R0, R1),
5840 "ALU_DIV_X: 4294967295 / 4294967295 = 1",
5842 BPF_LD_IMM64(R0, 4294967295U),
5843 BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
5844 BPF_ALU32_REG(BPF_DIV, R0, R1),
5852 "ALU64_DIV_X: 6 / 2 = 3",
5854 BPF_LD_IMM64(R0, 6),
5855 BPF_ALU32_IMM(BPF_MOV, R1, 2),
5856 BPF_ALU64_REG(BPF_DIV, R0, R1),
5864 "ALU64_DIV_X: 2147483647 / 2147483647 = 1",
5866 BPF_LD_IMM64(R0, 2147483647),
5867 BPF_ALU32_IMM(BPF_MOV, R1, 2147483647),
5868 BPF_ALU64_REG(BPF_DIV, R0, R1),
5876 "ALU64_DIV_X: 0xffffffffffffffff / (-1) = 0x0000000000000001",
5878 BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
5879 BPF_LD_IMM64(R4, 0xffffffffffffffffLL),
5880 BPF_LD_IMM64(R3, 0x0000000000000001LL),
5881 BPF_ALU64_REG(BPF_DIV, R2, R4),
5882 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5883 BPF_MOV32_IMM(R0, 2),
5885 BPF_MOV32_IMM(R0, 1),
5892 /* BPF_ALU | BPF_DIV | BPF_K */
5894 "ALU_DIV_K: 6 / 2 = 3",
5896 BPF_LD_IMM64(R0, 6),
5897 BPF_ALU32_IMM(BPF_DIV, R0, 2),
5905 "ALU_DIV_K: 3 / 1 = 3",
5907 BPF_LD_IMM64(R0, 3),
5908 BPF_ALU32_IMM(BPF_DIV, R0, 1),
5916 "ALU_DIV_K: 4294967295 / 4294967295 = 1",
5918 BPF_LD_IMM64(R0, 4294967295U),
5919 BPF_ALU32_IMM(BPF_DIV, R0, 4294967295U),
5927 "ALU_DIV_K: 0xffffffffffffffff / (-1) = 0x1",
5929 BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
5930 BPF_LD_IMM64(R3, 0x1UL),
5931 BPF_ALU32_IMM(BPF_DIV, R2, 0xffffffff),
5932 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5933 BPF_MOV32_IMM(R0, 2),
5935 BPF_MOV32_IMM(R0, 1),
5943 "ALU64_DIV_K: 6 / 2 = 3",
5945 BPF_LD_IMM64(R0, 6),
5946 BPF_ALU64_IMM(BPF_DIV, R0, 2),
5954 "ALU64_DIV_K: 3 / 1 = 3",
5956 BPF_LD_IMM64(R0, 3),
5957 BPF_ALU64_IMM(BPF_DIV, R0, 1),
5965 "ALU64_DIV_K: 2147483647 / 2147483647 = 1",
5967 BPF_LD_IMM64(R0, 2147483647),
5968 BPF_ALU64_IMM(BPF_DIV, R0, 2147483647),
5976 "ALU64_DIV_K: 0xffffffffffffffff / (-1) = 0x0000000000000001",
5978 BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
5979 BPF_LD_IMM64(R3, 0x0000000000000001LL),
5980 BPF_ALU64_IMM(BPF_DIV, R2, 0xffffffff),
5981 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5982 BPF_MOV32_IMM(R0, 2),
5984 BPF_MOV32_IMM(R0, 1),
5991 /* BPF_ALU | BPF_MOD | BPF_X */
5993 "ALU_MOD_X: 3 % 2 = 1",
5995 BPF_LD_IMM64(R0, 3),
5996 BPF_ALU32_IMM(BPF_MOV, R1, 2),
5997 BPF_ALU32_REG(BPF_MOD, R0, R1),
6005 "ALU_MOD_X: 4294967295 % 4294967293 = 2",
6007 BPF_LD_IMM64(R0, 4294967295U),
6008 BPF_ALU32_IMM(BPF_MOV, R1, 4294967293U),
6009 BPF_ALU32_REG(BPF_MOD, R0, R1),
6017 "ALU64_MOD_X: 3 % 2 = 1",
6019 BPF_LD_IMM64(R0, 3),
6020 BPF_ALU32_IMM(BPF_MOV, R1, 2),
6021 BPF_ALU64_REG(BPF_MOD, R0, R1),
6029 "ALU64_MOD_X: 2147483647 % 2147483645 = 2",
6031 BPF_LD_IMM64(R0, 2147483647),
6032 BPF_ALU32_IMM(BPF_MOV, R1, 2147483645),
6033 BPF_ALU64_REG(BPF_MOD, R0, R1),
6040 /* BPF_ALU | BPF_MOD | BPF_K */
6042 "ALU_MOD_K: 3 % 2 = 1",
6044 BPF_LD_IMM64(R0, 3),
6045 BPF_ALU32_IMM(BPF_MOD, R0, 2),
6053 "ALU_MOD_K: 3 % 1 = 0",
6055 BPF_LD_IMM64(R0, 3),
6056 BPF_ALU32_IMM(BPF_MOD, R0, 1),
6064 "ALU_MOD_K: 4294967295 % 4294967293 = 2",
6066 BPF_LD_IMM64(R0, 4294967295U),
6067 BPF_ALU32_IMM(BPF_MOD, R0, 4294967293U),
6075 "ALU64_MOD_K: 3 % 2 = 1",
6077 BPF_LD_IMM64(R0, 3),
6078 BPF_ALU64_IMM(BPF_MOD, R0, 2),
6086 "ALU64_MOD_K: 3 % 1 = 0",
6088 BPF_LD_IMM64(R0, 3),
6089 BPF_ALU64_IMM(BPF_MOD, R0, 1),
6097 "ALU64_MOD_K: 2147483647 % 2147483645 = 2",
6099 BPF_LD_IMM64(R0, 2147483647),
6100 BPF_ALU64_IMM(BPF_MOD, R0, 2147483645),
6107 /* BPF_ALU | BPF_AND | BPF_X */
6109 "ALU_AND_X: 3 & 2 = 2",
6111 BPF_LD_IMM64(R0, 3),
6112 BPF_ALU32_IMM(BPF_MOV, R1, 2),
6113 BPF_ALU32_REG(BPF_AND, R0, R1),
6121 "ALU_AND_X: 0xffffffff & 0xffffffff = 0xffffffff",
6123 BPF_LD_IMM64(R0, 0xffffffff),
6124 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6125 BPF_ALU32_REG(BPF_AND, R0, R1),
6130 { { 0, 0xffffffff } },
6133 "ALU64_AND_X: 3 & 2 = 2",
6135 BPF_LD_IMM64(R0, 3),
6136 BPF_ALU32_IMM(BPF_MOV, R1, 2),
6137 BPF_ALU64_REG(BPF_AND, R0, R1),
6145 "ALU64_AND_X: 0xffffffff & 0xffffffff = 0xffffffff",
6147 BPF_LD_IMM64(R0, 0xffffffff),
6148 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6149 BPF_ALU64_REG(BPF_AND, R0, R1),
6154 { { 0, 0xffffffff } },
6156 /* BPF_ALU | BPF_AND | BPF_K */
6158 "ALU_AND_K: 3 & 2 = 2",
6160 BPF_LD_IMM64(R0, 3),
6161 BPF_ALU32_IMM(BPF_AND, R0, 2),
6169 "ALU_AND_K: 0xffffffff & 0xffffffff = 0xffffffff",
6171 BPF_LD_IMM64(R0, 0xffffffff),
6172 BPF_ALU32_IMM(BPF_AND, R0, 0xffffffff),
6177 { { 0, 0xffffffff } },
6180 "ALU_AND_K: Small immediate",
6182 BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
6183 BPF_ALU32_IMM(BPF_AND, R0, 15),
6191 "ALU_AND_K: Large immediate",
6193 BPF_ALU32_IMM(BPF_MOV, R0, 0xf1f2f3f4),
6194 BPF_ALU32_IMM(BPF_AND, R0, 0xafbfcfdf),
6199 { { 0, 0xa1b2c3d4 } }
6202 "ALU_AND_K: Zero extension",
6204 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6205 BPF_LD_IMM64(R1, 0x0000000080a0c0e0LL),
6206 BPF_ALU32_IMM(BPF_AND, R0, 0xf0f0f0f0),
6207 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6208 BPF_MOV32_IMM(R0, 2),
6210 BPF_MOV32_IMM(R0, 1),
6218 "ALU64_AND_K: 3 & 2 = 2",
6220 BPF_LD_IMM64(R0, 3),
6221 BPF_ALU64_IMM(BPF_AND, R0, 2),
6229 "ALU64_AND_K: 0xffffffff & 0xffffffff = 0xffffffff",
6231 BPF_LD_IMM64(R0, 0xffffffff),
6232 BPF_ALU64_IMM(BPF_AND, R0, 0xffffffff),
6237 { { 0, 0xffffffff } },
6240 "ALU64_AND_K: 0x0000ffffffff0000 & 0x0 = 0x0000000000000000",
6242 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6243 BPF_LD_IMM64(R3, 0x0000000000000000LL),
6244 BPF_ALU64_IMM(BPF_AND, R2, 0x0),
6245 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6246 BPF_MOV32_IMM(R0, 2),
6248 BPF_MOV32_IMM(R0, 1),
6256 "ALU64_AND_K: 0x0000ffffffff0000 & -1 = 0x0000ffffffff0000",
6258 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6259 BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
6260 BPF_ALU64_IMM(BPF_AND, R2, 0xffffffff),
6261 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6262 BPF_MOV32_IMM(R0, 2),
6264 BPF_MOV32_IMM(R0, 1),
6272 "ALU64_AND_K: 0xffffffffffffffff & -1 = 0xffffffffffffffff",
6274 BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
6275 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
6276 BPF_ALU64_IMM(BPF_AND, R2, 0xffffffff),
6277 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6278 BPF_MOV32_IMM(R0, 2),
6280 BPF_MOV32_IMM(R0, 1),
6288 "ALU64_AND_K: Sign extension 1",
6290 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6291 BPF_LD_IMM64(R1, 0x00000000090b0d0fLL),
6292 BPF_ALU64_IMM(BPF_AND, R0, 0x0f0f0f0f),
6293 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6294 BPF_MOV32_IMM(R0, 2),
6296 BPF_MOV32_IMM(R0, 1),
6304 "ALU64_AND_K: Sign extension 2",
6306 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6307 BPF_LD_IMM64(R1, 0x0123456780a0c0e0LL),
6308 BPF_ALU64_IMM(BPF_AND, R0, 0xf0f0f0f0),
6309 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6310 BPF_MOV32_IMM(R0, 2),
6312 BPF_MOV32_IMM(R0, 1),
6319 /* BPF_ALU | BPF_OR | BPF_X */
6321 "ALU_OR_X: 1 | 2 = 3",
6323 BPF_LD_IMM64(R0, 1),
6324 BPF_ALU32_IMM(BPF_MOV, R1, 2),
6325 BPF_ALU32_REG(BPF_OR, R0, R1),
6333 "ALU_OR_X: 0x0 | 0xffffffff = 0xffffffff",
6335 BPF_LD_IMM64(R0, 0),
6336 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6337 BPF_ALU32_REG(BPF_OR, R0, R1),
6342 { { 0, 0xffffffff } },
6345 "ALU64_OR_X: 1 | 2 = 3",
6347 BPF_LD_IMM64(R0, 1),
6348 BPF_ALU32_IMM(BPF_MOV, R1, 2),
6349 BPF_ALU64_REG(BPF_OR, R0, R1),
6357 "ALU64_OR_X: 0 | 0xffffffff = 0xffffffff",
6359 BPF_LD_IMM64(R0, 0),
6360 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6361 BPF_ALU64_REG(BPF_OR, R0, R1),
6366 { { 0, 0xffffffff } },
6368 /* BPF_ALU | BPF_OR | BPF_K */
6370 "ALU_OR_K: 1 | 2 = 3",
6372 BPF_LD_IMM64(R0, 1),
6373 BPF_ALU32_IMM(BPF_OR, R0, 2),
6381 "ALU_OR_K: 0 & 0xffffffff = 0xffffffff",
6383 BPF_LD_IMM64(R0, 0),
6384 BPF_ALU32_IMM(BPF_OR, R0, 0xffffffff),
6389 { { 0, 0xffffffff } },
6392 "ALU_OR_K: Small immediate",
6394 BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
6395 BPF_ALU32_IMM(BPF_OR, R0, 1),
6400 { { 0, 0x01020305 } }
6403 "ALU_OR_K: Large immediate",
6405 BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
6406 BPF_ALU32_IMM(BPF_OR, R0, 0xa0b0c0d0),
6411 { { 0, 0xa1b2c3d4 } }
6414 "ALU_OR_K: Zero extension",
6416 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6417 BPF_LD_IMM64(R1, 0x00000000f9fbfdffLL),
6418 BPF_ALU32_IMM(BPF_OR, R0, 0xf0f0f0f0),
6419 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6420 BPF_MOV32_IMM(R0, 2),
6422 BPF_MOV32_IMM(R0, 1),
6430 "ALU64_OR_K: 1 | 2 = 3",
6432 BPF_LD_IMM64(R0, 1),
6433 BPF_ALU64_IMM(BPF_OR, R0, 2),
6441 "ALU64_OR_K: 0 & 0xffffffff = 0xffffffff",
6443 BPF_LD_IMM64(R0, 0),
6444 BPF_ALU64_IMM(BPF_OR, R0, 0xffffffff),
6449 { { 0, 0xffffffff } },
6452 "ALU64_OR_K: 0x0000ffffffff0000 | 0x0 = 0x0000ffffffff0000",
6454 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6455 BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
6456 BPF_ALU64_IMM(BPF_OR, R2, 0x0),
6457 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6458 BPF_MOV32_IMM(R0, 2),
6460 BPF_MOV32_IMM(R0, 1),
6468 "ALU64_OR_K: 0x0000ffffffff0000 | -1 = 0xffffffffffffffff",
6470 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6471 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
6472 BPF_ALU64_IMM(BPF_OR, R2, 0xffffffff),
6473 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6474 BPF_MOV32_IMM(R0, 2),
6476 BPF_MOV32_IMM(R0, 1),
6484 "ALU64_OR_K: 0x000000000000000 | -1 = 0xffffffffffffffff",
6486 BPF_LD_IMM64(R2, 0x0000000000000000LL),
6487 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
6488 BPF_ALU64_IMM(BPF_OR, R2, 0xffffffff),
6489 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6490 BPF_MOV32_IMM(R0, 2),
6492 BPF_MOV32_IMM(R0, 1),
6500 "ALU64_OR_K: Sign extension 1",
6502 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6503 BPF_LD_IMM64(R1, 0x012345678fafcfefLL),
6504 BPF_ALU64_IMM(BPF_OR, R0, 0x0f0f0f0f),
6505 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6506 BPF_MOV32_IMM(R0, 2),
6508 BPF_MOV32_IMM(R0, 1),
6516 "ALU64_OR_K: Sign extension 2",
6518 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6519 BPF_LD_IMM64(R1, 0xfffffffff9fbfdffLL),
6520 BPF_ALU64_IMM(BPF_OR, R0, 0xf0f0f0f0),
6521 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6522 BPF_MOV32_IMM(R0, 2),
6524 BPF_MOV32_IMM(R0, 1),
6531 /* BPF_ALU | BPF_XOR | BPF_X */
6533 "ALU_XOR_X: 5 ^ 6 = 3",
6535 BPF_LD_IMM64(R0, 5),
6536 BPF_ALU32_IMM(BPF_MOV, R1, 6),
6537 BPF_ALU32_REG(BPF_XOR, R0, R1),
6545 "ALU_XOR_X: 0x1 ^ 0xffffffff = 0xfffffffe",
6547 BPF_LD_IMM64(R0, 1),
6548 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6549 BPF_ALU32_REG(BPF_XOR, R0, R1),
6554 { { 0, 0xfffffffe } },
6557 "ALU64_XOR_X: 5 ^ 6 = 3",
6559 BPF_LD_IMM64(R0, 5),
6560 BPF_ALU32_IMM(BPF_MOV, R1, 6),
6561 BPF_ALU64_REG(BPF_XOR, R0, R1),
6569 "ALU64_XOR_X: 1 ^ 0xffffffff = 0xfffffffe",
6571 BPF_LD_IMM64(R0, 1),
6572 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6573 BPF_ALU64_REG(BPF_XOR, R0, R1),
6578 { { 0, 0xfffffffe } },
6580 /* BPF_ALU | BPF_XOR | BPF_K */
6582 "ALU_XOR_K: 5 ^ 6 = 3",
6584 BPF_LD_IMM64(R0, 5),
6585 BPF_ALU32_IMM(BPF_XOR, R0, 6),
6593 "ALU_XOR_K: 1 ^ 0xffffffff = 0xfffffffe",
6595 BPF_LD_IMM64(R0, 1),
6596 BPF_ALU32_IMM(BPF_XOR, R0, 0xffffffff),
6601 { { 0, 0xfffffffe } },
6604 "ALU_XOR_K: Small immediate",
6606 BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
6607 BPF_ALU32_IMM(BPF_XOR, R0, 15),
6612 { { 0, 0x0102030b } }
6615 "ALU_XOR_K: Large immediate",
6617 BPF_ALU32_IMM(BPF_MOV, R0, 0xf1f2f3f4),
6618 BPF_ALU32_IMM(BPF_XOR, R0, 0xafbfcfdf),
6623 { { 0, 0x5e4d3c2b } }
6626 "ALU_XOR_K: Zero extension",
6628 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6629 BPF_LD_IMM64(R1, 0x00000000795b3d1fLL),
6630 BPF_ALU32_IMM(BPF_XOR, R0, 0xf0f0f0f0),
6631 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6632 BPF_MOV32_IMM(R0, 2),
6634 BPF_MOV32_IMM(R0, 1),
6642 "ALU64_XOR_K: 5 ^ 6 = 3",
6644 BPF_LD_IMM64(R0, 5),
6645 BPF_ALU64_IMM(BPF_XOR, R0, 6),
6653 "ALU64_XOR_K: 1 ^ 0xffffffff = 0xfffffffe",
6655 BPF_LD_IMM64(R0, 1),
6656 BPF_ALU64_IMM(BPF_XOR, R0, 0xffffffff),
6661 { { 0, 0xfffffffe } },
6664 "ALU64_XOR_K: 0x0000ffffffff0000 ^ 0x0 = 0x0000ffffffff0000",
6666 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6667 BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
6668 BPF_ALU64_IMM(BPF_XOR, R2, 0x0),
6669 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6670 BPF_MOV32_IMM(R0, 2),
6672 BPF_MOV32_IMM(R0, 1),
6680 "ALU64_XOR_K: 0x0000ffffffff0000 ^ -1 = 0xffff00000000ffff",
6682 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6683 BPF_LD_IMM64(R3, 0xffff00000000ffffLL),
6684 BPF_ALU64_IMM(BPF_XOR, R2, 0xffffffff),
6685 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6686 BPF_MOV32_IMM(R0, 2),
6688 BPF_MOV32_IMM(R0, 1),
6696 "ALU64_XOR_K: 0x000000000000000 ^ -1 = 0xffffffffffffffff",
6698 BPF_LD_IMM64(R2, 0x0000000000000000LL),
6699 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
6700 BPF_ALU64_IMM(BPF_XOR, R2, 0xffffffff),
6701 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6702 BPF_MOV32_IMM(R0, 2),
6704 BPF_MOV32_IMM(R0, 1),
6712 "ALU64_XOR_K: Sign extension 1",
6714 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6715 BPF_LD_IMM64(R1, 0x0123456786a4c2e0LL),
6716 BPF_ALU64_IMM(BPF_XOR, R0, 0x0f0f0f0f),
6717 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6718 BPF_MOV32_IMM(R0, 2),
6720 BPF_MOV32_IMM(R0, 1),
6728 "ALU64_XOR_K: Sign extension 2",
6730 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6731 BPF_LD_IMM64(R1, 0xfedcba98795b3d1fLL),
6732 BPF_ALU64_IMM(BPF_XOR, R0, 0xf0f0f0f0),
6733 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6734 BPF_MOV32_IMM(R0, 2),
6736 BPF_MOV32_IMM(R0, 1),
6743 /* BPF_ALU | BPF_LSH | BPF_X */
6745 "ALU_LSH_X: 1 << 1 = 2",
6747 BPF_LD_IMM64(R0, 1),
6748 BPF_ALU32_IMM(BPF_MOV, R1, 1),
6749 BPF_ALU32_REG(BPF_LSH, R0, R1),
6757 "ALU_LSH_X: 1 << 31 = 0x80000000",
6759 BPF_LD_IMM64(R0, 1),
6760 BPF_ALU32_IMM(BPF_MOV, R1, 31),
6761 BPF_ALU32_REG(BPF_LSH, R0, R1),
6766 { { 0, 0x80000000 } },
6769 "ALU_LSH_X: 0x12345678 << 12 = 0x45678000",
6771 BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
6772 BPF_ALU32_IMM(BPF_MOV, R1, 12),
6773 BPF_ALU32_REG(BPF_LSH, R0, R1),
6778 { { 0, 0x45678000 } }
6781 "ALU64_LSH_X: 1 << 1 = 2",
6783 BPF_LD_IMM64(R0, 1),
6784 BPF_ALU32_IMM(BPF_MOV, R1, 1),
6785 BPF_ALU64_REG(BPF_LSH, R0, R1),
6793 "ALU64_LSH_X: 1 << 31 = 0x80000000",
6795 BPF_LD_IMM64(R0, 1),
6796 BPF_ALU32_IMM(BPF_MOV, R1, 31),
6797 BPF_ALU64_REG(BPF_LSH, R0, R1),
6802 { { 0, 0x80000000 } },
6805 "ALU64_LSH_X: Shift < 32, low word",
6807 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6808 BPF_ALU32_IMM(BPF_MOV, R1, 12),
6809 BPF_ALU64_REG(BPF_LSH, R0, R1),
6814 { { 0, 0xbcdef000 } }
6817 "ALU64_LSH_X: Shift < 32, high word",
6819 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6820 BPF_ALU32_IMM(BPF_MOV, R1, 12),
6821 BPF_ALU64_REG(BPF_LSH, R0, R1),
6822 BPF_ALU64_IMM(BPF_RSH, R0, 32),
6827 { { 0, 0x3456789a } }
6830 "ALU64_LSH_X: Shift > 32, low word",
6832 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6833 BPF_ALU32_IMM(BPF_MOV, R1, 36),
6834 BPF_ALU64_REG(BPF_LSH, R0, R1),
6842 "ALU64_LSH_X: Shift > 32, high word",
6844 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6845 BPF_ALU32_IMM(BPF_MOV, R1, 36),
6846 BPF_ALU64_REG(BPF_LSH, R0, R1),
6847 BPF_ALU64_IMM(BPF_RSH, R0, 32),
6852 { { 0, 0x9abcdef0 } }
6855 "ALU64_LSH_X: Shift == 32, low word",
6857 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6858 BPF_ALU32_IMM(BPF_MOV, R1, 32),
6859 BPF_ALU64_REG(BPF_LSH, R0, R1),
6867 "ALU64_LSH_X: Shift == 32, high word",
6869 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6870 BPF_ALU32_IMM(BPF_MOV, R1, 32),
6871 BPF_ALU64_REG(BPF_LSH, R0, R1),
6872 BPF_ALU64_IMM(BPF_RSH, R0, 32),
6877 { { 0, 0x89abcdef } }
6880 "ALU64_LSH_X: Zero shift, low word",
6882 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6883 BPF_ALU32_IMM(BPF_MOV, R1, 0),
6884 BPF_ALU64_REG(BPF_LSH, R0, R1),
6889 { { 0, 0x89abcdef } }
6892 "ALU64_LSH_X: Zero shift, high word",
6894 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6895 BPF_ALU32_IMM(BPF_MOV, R1, 0),
6896 BPF_ALU64_REG(BPF_LSH, R0, R1),
6897 BPF_ALU64_IMM(BPF_RSH, R0, 32),
6902 { { 0, 0x01234567 } }
6904 /* BPF_ALU | BPF_LSH | BPF_K */
6906 "ALU_LSH_K: 1 << 1 = 2",
6908 BPF_LD_IMM64(R0, 1),
6909 BPF_ALU32_IMM(BPF_LSH, R0, 1),
6917 "ALU_LSH_K: 1 << 31 = 0x80000000",
6919 BPF_LD_IMM64(R0, 1),
6920 BPF_ALU32_IMM(BPF_LSH, R0, 31),
6925 { { 0, 0x80000000 } },
6928 "ALU_LSH_K: 0x12345678 << 12 = 0x45678000",
6930 BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
6931 BPF_ALU32_IMM(BPF_LSH, R0, 12),
6936 { { 0, 0x45678000 } }
6939 "ALU_LSH_K: 0x12345678 << 0 = 0x12345678",
6941 BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
6942 BPF_ALU32_IMM(BPF_LSH, R0, 0),
6947 { { 0, 0x12345678 } }
6950 "ALU64_LSH_K: 1 << 1 = 2",
6952 BPF_LD_IMM64(R0, 1),
6953 BPF_ALU64_IMM(BPF_LSH, R0, 1),
6961 "ALU64_LSH_K: 1 << 31 = 0x80000000",
6963 BPF_LD_IMM64(R0, 1),
6964 BPF_ALU64_IMM(BPF_LSH, R0, 31),
6969 { { 0, 0x80000000 } },
6972 "ALU64_LSH_K: Shift < 32, low word",
6974 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6975 BPF_ALU64_IMM(BPF_LSH, R0, 12),
6980 { { 0, 0xbcdef000 } }
6983 "ALU64_LSH_K: Shift < 32, high word",
6985 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6986 BPF_ALU64_IMM(BPF_LSH, R0, 12),
6987 BPF_ALU64_IMM(BPF_RSH, R0, 32),
6992 { { 0, 0x3456789a } }
6995 "ALU64_LSH_K: Shift > 32, low word",
6997 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6998 BPF_ALU64_IMM(BPF_LSH, R0, 36),
7006 "ALU64_LSH_K: Shift > 32, high word",
7008 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7009 BPF_ALU64_IMM(BPF_LSH, R0, 36),
7010 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7015 { { 0, 0x9abcdef0 } }
7018 "ALU64_LSH_K: Shift == 32, low word",
7020 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7021 BPF_ALU64_IMM(BPF_LSH, R0, 32),
7029 "ALU64_LSH_K: Shift == 32, high word",
7031 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7032 BPF_ALU64_IMM(BPF_LSH, R0, 32),
7033 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7038 { { 0, 0x89abcdef } }
7041 "ALU64_LSH_K: Zero shift",
7043 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7044 BPF_ALU64_IMM(BPF_LSH, R0, 0),
7049 { { 0, 0x89abcdef } }
7051 /* BPF_ALU | BPF_RSH | BPF_X */
7053 "ALU_RSH_X: 2 >> 1 = 1",
7055 BPF_LD_IMM64(R0, 2),
7056 BPF_ALU32_IMM(BPF_MOV, R1, 1),
7057 BPF_ALU32_REG(BPF_RSH, R0, R1),
7065 "ALU_RSH_X: 0x80000000 >> 31 = 1",
7067 BPF_LD_IMM64(R0, 0x80000000),
7068 BPF_ALU32_IMM(BPF_MOV, R1, 31),
7069 BPF_ALU32_REG(BPF_RSH, R0, R1),
7077 "ALU_RSH_X: 0x12345678 >> 20 = 0x123",
7079 BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
7080 BPF_ALU32_IMM(BPF_MOV, R1, 20),
7081 BPF_ALU32_REG(BPF_RSH, R0, R1),
7089 "ALU64_RSH_X: 2 >> 1 = 1",
7091 BPF_LD_IMM64(R0, 2),
7092 BPF_ALU32_IMM(BPF_MOV, R1, 1),
7093 BPF_ALU64_REG(BPF_RSH, R0, R1),
7101 "ALU64_RSH_X: 0x80000000 >> 31 = 1",
7103 BPF_LD_IMM64(R0, 0x80000000),
7104 BPF_ALU32_IMM(BPF_MOV, R1, 31),
7105 BPF_ALU64_REG(BPF_RSH, R0, R1),
7113 "ALU64_RSH_X: Shift < 32, low word",
7115 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7116 BPF_ALU32_IMM(BPF_MOV, R1, 12),
7117 BPF_ALU64_REG(BPF_RSH, R0, R1),
7122 { { 0, 0x56789abc } }
7125 "ALU64_RSH_X: Shift < 32, high word",
7127 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7128 BPF_ALU32_IMM(BPF_MOV, R1, 12),
7129 BPF_ALU64_REG(BPF_RSH, R0, R1),
7130 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7135 { { 0, 0x00081234 } }
7138 "ALU64_RSH_X: Shift > 32, low word",
7140 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7141 BPF_ALU32_IMM(BPF_MOV, R1, 36),
7142 BPF_ALU64_REG(BPF_RSH, R0, R1),
7147 { { 0, 0x08123456 } }
7150 "ALU64_RSH_X: Shift > 32, high word",
7152 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7153 BPF_ALU32_IMM(BPF_MOV, R1, 36),
7154 BPF_ALU64_REG(BPF_RSH, R0, R1),
7155 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7163 "ALU64_RSH_X: Shift == 32, low word",
7165 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7166 BPF_ALU32_IMM(BPF_MOV, R1, 32),
7167 BPF_ALU64_REG(BPF_RSH, R0, R1),
7172 { { 0, 0x81234567 } }
7175 "ALU64_RSH_X: Shift == 32, high word",
7177 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7178 BPF_ALU32_IMM(BPF_MOV, R1, 32),
7179 BPF_ALU64_REG(BPF_RSH, R0, R1),
7180 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7188 "ALU64_RSH_X: Zero shift, low word",
7190 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7191 BPF_ALU32_IMM(BPF_MOV, R1, 0),
7192 BPF_ALU64_REG(BPF_RSH, R0, R1),
7197 { { 0, 0x89abcdef } }
7200 "ALU64_RSH_X: Zero shift, high word",
7202 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7203 BPF_ALU32_IMM(BPF_MOV, R1, 0),
7204 BPF_ALU64_REG(BPF_RSH, R0, R1),
7205 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7210 { { 0, 0x81234567 } }
7212 /* BPF_ALU | BPF_RSH | BPF_K */
7214 "ALU_RSH_K: 2 >> 1 = 1",
7216 BPF_LD_IMM64(R0, 2),
7217 BPF_ALU32_IMM(BPF_RSH, R0, 1),
7225 "ALU_RSH_K: 0x80000000 >> 31 = 1",
7227 BPF_LD_IMM64(R0, 0x80000000),
7228 BPF_ALU32_IMM(BPF_RSH, R0, 31),
7236 "ALU_RSH_K: 0x12345678 >> 20 = 0x123",
7238 BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
7239 BPF_ALU32_IMM(BPF_RSH, R0, 20),
7247 "ALU_RSH_K: 0x12345678 >> 0 = 0x12345678",
7249 BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
7250 BPF_ALU32_IMM(BPF_RSH, R0, 0),
7255 { { 0, 0x12345678 } }
7258 "ALU64_RSH_K: 2 >> 1 = 1",
7260 BPF_LD_IMM64(R0, 2),
7261 BPF_ALU64_IMM(BPF_RSH, R0, 1),
7269 "ALU64_RSH_K: 0x80000000 >> 31 = 1",
7271 BPF_LD_IMM64(R0, 0x80000000),
7272 BPF_ALU64_IMM(BPF_RSH, R0, 31),
7280 "ALU64_RSH_K: Shift < 32, low word",
7282 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7283 BPF_ALU64_IMM(BPF_RSH, R0, 12),
7288 { { 0, 0x56789abc } }
7291 "ALU64_RSH_K: Shift < 32, high word",
7293 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7294 BPF_ALU64_IMM(BPF_RSH, R0, 12),
7295 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7300 { { 0, 0x00081234 } }
7303 "ALU64_RSH_K: Shift > 32, low word",
7305 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7306 BPF_ALU64_IMM(BPF_RSH, R0, 36),
7311 { { 0, 0x08123456 } }
7314 "ALU64_RSH_K: Shift > 32, high word",
7316 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7317 BPF_ALU64_IMM(BPF_RSH, R0, 36),
7318 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7326 "ALU64_RSH_K: Shift == 32, low word",
7328 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7329 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7334 { { 0, 0x81234567 } }
7337 "ALU64_RSH_K: Shift == 32, high word",
7339 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7340 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7341 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7349 "ALU64_RSH_K: Zero shift",
7351 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7352 BPF_ALU64_IMM(BPF_RSH, R0, 0),
7357 { { 0, 0x89abcdef } }
7359 /* BPF_ALU | BPF_ARSH | BPF_X */
7361 "ALU32_ARSH_X: -1234 >> 7 = -10",
7363 BPF_ALU32_IMM(BPF_MOV, R0, -1234),
7364 BPF_ALU32_IMM(BPF_MOV, R1, 7),
7365 BPF_ALU32_REG(BPF_ARSH, R0, R1),
7373 "ALU64_ARSH_X: 0xff00ff0000000000 >> 40 = 0xffffffffffff00ff",
7375 BPF_LD_IMM64(R0, 0xff00ff0000000000LL),
7376 BPF_ALU32_IMM(BPF_MOV, R1, 40),
7377 BPF_ALU64_REG(BPF_ARSH, R0, R1),
7382 { { 0, 0xffff00ff } },
7385 "ALU64_ARSH_X: Shift < 32, low word",
7387 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7388 BPF_ALU32_IMM(BPF_MOV, R1, 12),
7389 BPF_ALU64_REG(BPF_ARSH, R0, R1),
7394 { { 0, 0x56789abc } }
7397 "ALU64_ARSH_X: Shift < 32, high word",
7399 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7400 BPF_ALU32_IMM(BPF_MOV, R1, 12),
7401 BPF_ALU64_REG(BPF_ARSH, R0, R1),
7402 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7407 { { 0, 0xfff81234 } }
7410 "ALU64_ARSH_X: Shift > 32, low word",
7412 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7413 BPF_ALU32_IMM(BPF_MOV, R1, 36),
7414 BPF_ALU64_REG(BPF_ARSH, R0, R1),
7419 { { 0, 0xf8123456 } }
7422 "ALU64_ARSH_X: Shift > 32, high word",
7424 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7425 BPF_ALU32_IMM(BPF_MOV, R1, 36),
7426 BPF_ALU64_REG(BPF_ARSH, R0, R1),
7427 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7435 "ALU64_ARSH_X: Shift == 32, low word",
7437 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7438 BPF_ALU32_IMM(BPF_MOV, R1, 32),
7439 BPF_ALU64_REG(BPF_ARSH, R0, R1),
7444 { { 0, 0x81234567 } }
7447 "ALU64_ARSH_X: Shift == 32, high word",
7449 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7450 BPF_ALU32_IMM(BPF_MOV, R1, 32),
7451 BPF_ALU64_REG(BPF_ARSH, R0, R1),
7452 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7460 "ALU64_ARSH_X: Zero shift, low word",
7462 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7463 BPF_ALU32_IMM(BPF_MOV, R1, 0),
7464 BPF_ALU64_REG(BPF_ARSH, R0, R1),
7469 { { 0, 0x89abcdef } }
7472 "ALU64_ARSH_X: Zero shift, high word",
7474 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7475 BPF_ALU32_IMM(BPF_MOV, R1, 0),
7476 BPF_ALU64_REG(BPF_ARSH, R0, R1),
7477 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7482 { { 0, 0x81234567 } }
7484 /* BPF_ALU | BPF_ARSH | BPF_K */
7486 "ALU32_ARSH_K: -1234 >> 7 = -10",
7488 BPF_ALU32_IMM(BPF_MOV, R0, -1234),
7489 BPF_ALU32_IMM(BPF_ARSH, R0, 7),
7497 "ALU32_ARSH_K: -1234 >> 0 = -1234",
7499 BPF_ALU32_IMM(BPF_MOV, R0, -1234),
7500 BPF_ALU32_IMM(BPF_ARSH, R0, 0),
7508 "ALU64_ARSH_K: 0xff00ff0000000000 >> 40 = 0xffffffffffff00ff",
7510 BPF_LD_IMM64(R0, 0xff00ff0000000000LL),
7511 BPF_ALU64_IMM(BPF_ARSH, R0, 40),
7516 { { 0, 0xffff00ff } },
7519 "ALU64_ARSH_K: Shift < 32, low word",
7521 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7522 BPF_ALU64_IMM(BPF_RSH, R0, 12),
7527 { { 0, 0x56789abc } }
7530 "ALU64_ARSH_K: Shift < 32, high word",
7532 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7533 BPF_ALU64_IMM(BPF_ARSH, R0, 12),
7534 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7539 { { 0, 0xfff81234 } }
7542 "ALU64_ARSH_K: Shift > 32, low word",
7544 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7545 BPF_ALU64_IMM(BPF_ARSH, R0, 36),
7550 { { 0, 0xf8123456 } }
7553 "ALU64_ARSH_K: Shift > 32, high word",
7555 BPF_LD_IMM64(R0, 0xf123456789abcdefLL),
7556 BPF_ALU64_IMM(BPF_ARSH, R0, 36),
7557 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7565 "ALU64_ARSH_K: Shift == 32, low word",
7567 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7568 BPF_ALU64_IMM(BPF_ARSH, R0, 32),
7573 { { 0, 0x81234567 } }
7576 "ALU64_ARSH_K: Shift == 32, high word",
7578 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7579 BPF_ALU64_IMM(BPF_ARSH, R0, 32),
7580 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7588 "ALU64_ARSH_K: Zero shift",
7590 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7591 BPF_ALU64_IMM(BPF_ARSH, R0, 0),
7596 { { 0, 0x89abcdef } }
7598 /* BPF_ALU | BPF_NEG */
7600 "ALU_NEG: -(3) = -3",
7602 BPF_ALU32_IMM(BPF_MOV, R0, 3),
7603 BPF_ALU32_IMM(BPF_NEG, R0, 0),
7611 "ALU_NEG: -(-3) = 3",
7613 BPF_ALU32_IMM(BPF_MOV, R0, -3),
7614 BPF_ALU32_IMM(BPF_NEG, R0, 0),
7622 "ALU64_NEG: -(3) = -3",
7624 BPF_LD_IMM64(R0, 3),
7625 BPF_ALU64_IMM(BPF_NEG, R0, 0),
7633 "ALU64_NEG: -(-3) = 3",
7635 BPF_LD_IMM64(R0, -3),
7636 BPF_ALU64_IMM(BPF_NEG, R0, 0),
7643 /* BPF_ALU | BPF_END | BPF_FROM_BE */
7645 "ALU_END_FROM_BE 16: 0x0123456789abcdef -> 0xcdef",
7647 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7648 BPF_ENDIAN(BPF_FROM_BE, R0, 16),
7653 { { 0, cpu_to_be16(0xcdef) } },
7656 "ALU_END_FROM_BE 32: 0x0123456789abcdef -> 0x89abcdef",
7658 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7659 BPF_ENDIAN(BPF_FROM_BE, R0, 32),
7660 BPF_ALU64_REG(BPF_MOV, R1, R0),
7661 BPF_ALU64_IMM(BPF_RSH, R1, 32),
7662 BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
7667 { { 0, cpu_to_be32(0x89abcdef) } },
7670 "ALU_END_FROM_BE 64: 0x0123456789abcdef -> 0x89abcdef",
7672 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7673 BPF_ENDIAN(BPF_FROM_BE, R0, 64),
7678 { { 0, (u32) cpu_to_be64(0x0123456789abcdefLL) } },
7681 "ALU_END_FROM_BE 64: 0x0123456789abcdef >> 32 -> 0x01234567",
7683 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7684 BPF_ENDIAN(BPF_FROM_BE, R0, 64),
7685 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7690 { { 0, (u32) (cpu_to_be64(0x0123456789abcdefLL) >> 32) } },
7692 /* BPF_ALU | BPF_END | BPF_FROM_BE, reversed */
7694 "ALU_END_FROM_BE 16: 0xfedcba9876543210 -> 0x3210",
7696 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7697 BPF_ENDIAN(BPF_FROM_BE, R0, 16),
7702 { { 0, cpu_to_be16(0x3210) } },
7705 "ALU_END_FROM_BE 32: 0xfedcba9876543210 -> 0x76543210",
7707 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7708 BPF_ENDIAN(BPF_FROM_BE, R0, 32),
7709 BPF_ALU64_REG(BPF_MOV, R1, R0),
7710 BPF_ALU64_IMM(BPF_RSH, R1, 32),
7711 BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
7716 { { 0, cpu_to_be32(0x76543210) } },
7719 "ALU_END_FROM_BE 64: 0xfedcba9876543210 -> 0x76543210",
7721 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7722 BPF_ENDIAN(BPF_FROM_BE, R0, 64),
7727 { { 0, (u32) cpu_to_be64(0xfedcba9876543210ULL) } },
7730 "ALU_END_FROM_BE 64: 0xfedcba9876543210 >> 32 -> 0xfedcba98",
7732 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7733 BPF_ENDIAN(BPF_FROM_BE, R0, 64),
7734 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7739 { { 0, (u32) (cpu_to_be64(0xfedcba9876543210ULL) >> 32) } },
7741 /* BPF_ALU | BPF_END | BPF_FROM_LE */
7743 "ALU_END_FROM_LE 16: 0x0123456789abcdef -> 0xefcd",
7745 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7746 BPF_ENDIAN(BPF_FROM_LE, R0, 16),
7751 { { 0, cpu_to_le16(0xcdef) } },
7754 "ALU_END_FROM_LE 32: 0x0123456789abcdef -> 0xefcdab89",
7756 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7757 BPF_ENDIAN(BPF_FROM_LE, R0, 32),
7758 BPF_ALU64_REG(BPF_MOV, R1, R0),
7759 BPF_ALU64_IMM(BPF_RSH, R1, 32),
7760 BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
7765 { { 0, cpu_to_le32(0x89abcdef) } },
7768 "ALU_END_FROM_LE 64: 0x0123456789abcdef -> 0x67452301",
7770 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7771 BPF_ENDIAN(BPF_FROM_LE, R0, 64),
7776 { { 0, (u32) cpu_to_le64(0x0123456789abcdefLL) } },
7779 "ALU_END_FROM_LE 64: 0x0123456789abcdef >> 32 -> 0xefcdab89",
7781 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7782 BPF_ENDIAN(BPF_FROM_LE, R0, 64),
7783 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7788 { { 0, (u32) (cpu_to_le64(0x0123456789abcdefLL) >> 32) } },
7790 /* BPF_ALU | BPF_END | BPF_FROM_LE, reversed */
7792 "ALU_END_FROM_LE 16: 0xfedcba9876543210 -> 0x1032",
7794 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7795 BPF_ENDIAN(BPF_FROM_LE, R0, 16),
7800 { { 0, cpu_to_le16(0x3210) } },
7803 "ALU_END_FROM_LE 32: 0xfedcba9876543210 -> 0x10325476",
7805 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7806 BPF_ENDIAN(BPF_FROM_LE, R0, 32),
7807 BPF_ALU64_REG(BPF_MOV, R1, R0),
7808 BPF_ALU64_IMM(BPF_RSH, R1, 32),
7809 BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
7814 { { 0, cpu_to_le32(0x76543210) } },
7817 "ALU_END_FROM_LE 64: 0xfedcba9876543210 -> 0x10325476",
7819 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7820 BPF_ENDIAN(BPF_FROM_LE, R0, 64),
7825 { { 0, (u32) cpu_to_le64(0xfedcba9876543210ULL) } },
7828 "ALU_END_FROM_LE 64: 0xfedcba9876543210 >> 32 -> 0x98badcfe",
7830 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7831 BPF_ENDIAN(BPF_FROM_LE, R0, 64),
7832 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7837 { { 0, (u32) (cpu_to_le64(0xfedcba9876543210ULL) >> 32) } },
7839 /* BPF_LDX_MEM B/H/W/DW */
7841 "BPF_LDX_MEM | BPF_B",
7843 BPF_LD_IMM64(R1, 0x0102030405060708ULL),
7844 BPF_LD_IMM64(R2, 0x0000000000000008ULL),
7845 BPF_STX_MEM(BPF_DW, R10, R1, -8),
7847 BPF_LDX_MEM(BPF_B, R0, R10, -1),
7849 BPF_LDX_MEM(BPF_B, R0, R10, -8),
7851 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
7852 BPF_ALU64_IMM(BPF_MOV, R0, 0),
7861 "BPF_LDX_MEM | BPF_B, MSB set",
7863 BPF_LD_IMM64(R1, 0x8182838485868788ULL),
7864 BPF_LD_IMM64(R2, 0x0000000000000088ULL),
7865 BPF_STX_MEM(BPF_DW, R10, R1, -8),
7867 BPF_LDX_MEM(BPF_B, R0, R10, -1),
7869 BPF_LDX_MEM(BPF_B, R0, R10, -8),
7871 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
7872 BPF_ALU64_IMM(BPF_MOV, R0, 0),
7881 "BPF_LDX_MEM | BPF_H",
7883 BPF_LD_IMM64(R1, 0x0102030405060708ULL),
7884 BPF_LD_IMM64(R2, 0x0000000000000708ULL),
7885 BPF_STX_MEM(BPF_DW, R10, R1, -8),
7887 BPF_LDX_MEM(BPF_H, R0, R10, -2),
7889 BPF_LDX_MEM(BPF_H, R0, R10, -8),
7891 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
7892 BPF_ALU64_IMM(BPF_MOV, R0, 0),
7901 "BPF_LDX_MEM | BPF_H, MSB set",
7903 BPF_LD_IMM64(R1, 0x8182838485868788ULL),
7904 BPF_LD_IMM64(R2, 0x0000000000008788ULL),
7905 BPF_STX_MEM(BPF_DW, R10, R1, -8),
7907 BPF_LDX_MEM(BPF_H, R0, R10, -2),
7909 BPF_LDX_MEM(BPF_H, R0, R10, -8),
7911 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
7912 BPF_ALU64_IMM(BPF_MOV, R0, 0),
7921 "BPF_LDX_MEM | BPF_W",
7923 BPF_LD_IMM64(R1, 0x0102030405060708ULL),
7924 BPF_LD_IMM64(R2, 0x0000000005060708ULL),
7925 BPF_STX_MEM(BPF_DW, R10, R1, -8),
7927 BPF_LDX_MEM(BPF_W, R0, R10, -4),
7929 BPF_LDX_MEM(BPF_W, R0, R10, -8),
7931 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
7932 BPF_ALU64_IMM(BPF_MOV, R0, 0),
7941 "BPF_LDX_MEM | BPF_W, MSB set",
7943 BPF_LD_IMM64(R1, 0x8182838485868788ULL),
7944 BPF_LD_IMM64(R2, 0x0000000085868788ULL),
7945 BPF_STX_MEM(BPF_DW, R10, R1, -8),
7947 BPF_LDX_MEM(BPF_W, R0, R10, -4),
7949 BPF_LDX_MEM(BPF_W, R0, R10, -8),
7951 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
7952 BPF_ALU64_IMM(BPF_MOV, R0, 0),
7960 /* BPF_STX_MEM B/H/W/DW */
7962 "BPF_STX_MEM | BPF_B",
7964 BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
7965 BPF_LD_IMM64(R2, 0x0102030405060708ULL),
7966 BPF_LD_IMM64(R3, 0x8090a0b0c0d0e008ULL),
7967 BPF_STX_MEM(BPF_DW, R10, R1, -8),
7969 BPF_STX_MEM(BPF_B, R10, R2, -1),
7971 BPF_STX_MEM(BPF_B, R10, R2, -8),
7973 BPF_LDX_MEM(BPF_DW, R0, R10, -8),
7974 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
7975 BPF_ALU64_IMM(BPF_MOV, R0, 0),
7984 "BPF_STX_MEM | BPF_B, MSB set",
7986 BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
7987 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
7988 BPF_LD_IMM64(R3, 0x8090a0b0c0d0e088ULL),
7989 BPF_STX_MEM(BPF_DW, R10, R1, -8),
7991 BPF_STX_MEM(BPF_B, R10, R2, -1),
7993 BPF_STX_MEM(BPF_B, R10, R2, -8),
7995 BPF_LDX_MEM(BPF_DW, R0, R10, -8),
7996 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
7997 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8006 "BPF_STX_MEM | BPF_H",
8008 BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
8009 BPF_LD_IMM64(R2, 0x0102030405060708ULL),
8010 BPF_LD_IMM64(R3, 0x8090a0b0c0d00708ULL),
8011 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8013 BPF_STX_MEM(BPF_H, R10, R2, -2),
8015 BPF_STX_MEM(BPF_H, R10, R2, -8),
8017 BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8018 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8019 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8028 "BPF_STX_MEM | BPF_H, MSB set",
8030 BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
8031 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8032 BPF_LD_IMM64(R3, 0x8090a0b0c0d08788ULL),
8033 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8035 BPF_STX_MEM(BPF_H, R10, R2, -2),
8037 BPF_STX_MEM(BPF_H, R10, R2, -8),
8039 BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8040 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8041 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8050 "BPF_STX_MEM | BPF_W",
8052 BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
8053 BPF_LD_IMM64(R2, 0x0102030405060708ULL),
8054 BPF_LD_IMM64(R3, 0x8090a0b005060708ULL),
8055 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8057 BPF_STX_MEM(BPF_W, R10, R2, -4),
8059 BPF_STX_MEM(BPF_W, R10, R2, -8),
8061 BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8062 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8063 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8072 "BPF_STX_MEM | BPF_W, MSB set",
8074 BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
8075 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8076 BPF_LD_IMM64(R3, 0x8090a0b085868788ULL),
8077 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8079 BPF_STX_MEM(BPF_W, R10, R2, -4),
8081 BPF_STX_MEM(BPF_W, R10, R2, -8),
8083 BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8084 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8085 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8093 /* BPF_ST(X) | BPF_MEM | BPF_B/H/W/DW */
8095 "ST_MEM_B: Store/Load byte: max negative",
8097 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8098 BPF_ST_MEM(BPF_B, R10, -40, 0xff),
8099 BPF_LDX_MEM(BPF_B, R0, R10, -40),
8108 "ST_MEM_B: Store/Load byte: max positive",
8110 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8111 BPF_ST_MEM(BPF_H, R10, -40, 0x7f),
8112 BPF_LDX_MEM(BPF_H, R0, R10, -40),
8121 "STX_MEM_B: Store/Load byte: max negative",
8123 BPF_LD_IMM64(R0, 0),
8124 BPF_LD_IMM64(R1, 0xffLL),
8125 BPF_STX_MEM(BPF_B, R10, R1, -40),
8126 BPF_LDX_MEM(BPF_B, R0, R10, -40),
8135 "ST_MEM_H: Store/Load half word: max negative",
8137 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8138 BPF_ST_MEM(BPF_H, R10, -40, 0xffff),
8139 BPF_LDX_MEM(BPF_H, R0, R10, -40),
8148 "ST_MEM_H: Store/Load half word: max positive",
8150 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8151 BPF_ST_MEM(BPF_H, R10, -40, 0x7fff),
8152 BPF_LDX_MEM(BPF_H, R0, R10, -40),
8161 "STX_MEM_H: Store/Load half word: max negative",
8163 BPF_LD_IMM64(R0, 0),
8164 BPF_LD_IMM64(R1, 0xffffLL),
8165 BPF_STX_MEM(BPF_H, R10, R1, -40),
8166 BPF_LDX_MEM(BPF_H, R0, R10, -40),
8175 "ST_MEM_W: Store/Load word: max negative",
8177 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8178 BPF_ST_MEM(BPF_W, R10, -40, 0xffffffff),
8179 BPF_LDX_MEM(BPF_W, R0, R10, -40),
8184 { { 0, 0xffffffff } },
8188 "ST_MEM_W: Store/Load word: max positive",
8190 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8191 BPF_ST_MEM(BPF_W, R10, -40, 0x7fffffff),
8192 BPF_LDX_MEM(BPF_W, R0, R10, -40),
8197 { { 0, 0x7fffffff } },
8201 "STX_MEM_W: Store/Load word: max negative",
8203 BPF_LD_IMM64(R0, 0),
8204 BPF_LD_IMM64(R1, 0xffffffffLL),
8205 BPF_STX_MEM(BPF_W, R10, R1, -40),
8206 BPF_LDX_MEM(BPF_W, R0, R10, -40),
8211 { { 0, 0xffffffff } },
8215 "ST_MEM_DW: Store/Load double word: max negative",
8217 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8218 BPF_ST_MEM(BPF_DW, R10, -40, 0xffffffff),
8219 BPF_LDX_MEM(BPF_DW, R0, R10, -40),
8224 { { 0, 0xffffffff } },
8228 "ST_MEM_DW: Store/Load double word: max negative 2",
8230 BPF_LD_IMM64(R2, 0xffff00000000ffffLL),
8231 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
8232 BPF_ST_MEM(BPF_DW, R10, -40, 0xffffffff),
8233 BPF_LDX_MEM(BPF_DW, R2, R10, -40),
8234 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
8235 BPF_MOV32_IMM(R0, 2),
8237 BPF_MOV32_IMM(R0, 1),
8246 "ST_MEM_DW: Store/Load double word: max positive",
8248 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8249 BPF_ST_MEM(BPF_DW, R10, -40, 0x7fffffff),
8250 BPF_LDX_MEM(BPF_DW, R0, R10, -40),
8255 { { 0, 0x7fffffff } },
8259 "STX_MEM_DW: Store/Load double word: max negative",
8261 BPF_LD_IMM64(R0, 0),
8262 BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
8263 BPF_STX_MEM(BPF_DW, R10, R1, -40),
8264 BPF_LDX_MEM(BPF_DW, R0, R10, -40),
8269 { { 0, 0xffffffff } },
8273 "STX_MEM_DW: Store double word: first word in memory",
8275 BPF_LD_IMM64(R0, 0),
8276 BPF_LD_IMM64(R1, 0x0123456789abcdefLL),
8277 BPF_STX_MEM(BPF_DW, R10, R1, -40),
8278 BPF_LDX_MEM(BPF_W, R0, R10, -40),
8284 { { 0, 0x01234567 } },
8286 { { 0, 0x89abcdef } },
8291 "STX_MEM_DW: Store double word: second word in memory",
8293 BPF_LD_IMM64(R0, 0),
8294 BPF_LD_IMM64(R1, 0x0123456789abcdefLL),
8295 BPF_STX_MEM(BPF_DW, R10, R1, -40),
8296 BPF_LDX_MEM(BPF_W, R0, R10, -36),
8302 { { 0, 0x89abcdef } },
8304 { { 0, 0x01234567 } },
8308 /* BPF_STX | BPF_ATOMIC | BPF_W/DW */
8310 "STX_XADD_W: X + 1 + 1 + 1 + ...",
8315 .fill_helper = bpf_fill_stxw,
8318 "STX_XADD_DW: X + 1 + 1 + 1 + ...",
8323 .fill_helper = bpf_fill_stxdw,
8326 * Exhaustive tests of atomic operation variants.
8327 * Individual tests are expanded from template macros for all
8328 * combinations of ALU operation, word size and fetching.
8330 #define BPF_ATOMIC_POISON(width) ((width) == BPF_W ? (0xbaadf00dULL << 32) : 0)
8332 #define BPF_ATOMIC_OP_TEST1(width, op, logic, old, update, result) \
8334 "BPF_ATOMIC | " #width ", " #op ": Test: " \
8335 #old " " #logic " " #update " = " #result, \
8337 BPF_LD_IMM64(R5, (update) | BPF_ATOMIC_POISON(width)), \
8338 BPF_ST_MEM(width, R10, -40, old), \
8339 BPF_ATOMIC_OP(width, op, R10, R5, -40), \
8340 BPF_LDX_MEM(width, R0, R10, -40), \
8341 BPF_ALU64_REG(BPF_MOV, R1, R0), \
8342 BPF_ALU64_IMM(BPF_RSH, R1, 32), \
8343 BPF_ALU64_REG(BPF_OR, R0, R1), \
8348 { { 0, result } }, \
8349 .stack_depth = 40, \
8351 #define BPF_ATOMIC_OP_TEST2(width, op, logic, old, update, result) \
8353 "BPF_ATOMIC | " #width ", " #op ": Test side effects, r10: " \
8354 #old " " #logic " " #update " = " #result, \
8356 BPF_ALU64_REG(BPF_MOV, R1, R10), \
8357 BPF_LD_IMM64(R0, (update) | BPF_ATOMIC_POISON(width)), \
8358 BPF_ST_MEM(BPF_W, R10, -40, old), \
8359 BPF_ATOMIC_OP(width, op, R10, R0, -40), \
8360 BPF_ALU64_REG(BPF_MOV, R0, R10), \
8361 BPF_ALU64_REG(BPF_SUB, R0, R1), \
8362 BPF_ALU64_REG(BPF_MOV, R1, R0), \
8363 BPF_ALU64_IMM(BPF_RSH, R1, 32), \
8364 BPF_ALU64_REG(BPF_OR, R0, R1), \
8370 .stack_depth = 40, \
8372 #define BPF_ATOMIC_OP_TEST3(width, op, logic, old, update, result) \
8374 "BPF_ATOMIC | " #width ", " #op ": Test side effects, r0: " \
8375 #old " " #logic " " #update " = " #result, \
8377 BPF_ALU64_REG(BPF_MOV, R0, R10), \
8378 BPF_LD_IMM64(R1, (update) | BPF_ATOMIC_POISON(width)), \
8379 BPF_ST_MEM(width, R10, -40, old), \
8380 BPF_ATOMIC_OP(width, op, R10, R1, -40), \
8381 BPF_ALU64_REG(BPF_SUB, R0, R10), \
8382 BPF_ALU64_REG(BPF_MOV, R1, R0), \
8383 BPF_ALU64_IMM(BPF_RSH, R1, 32), \
8384 BPF_ALU64_REG(BPF_OR, R0, R1), \
8390 .stack_depth = 40, \
8392 #define BPF_ATOMIC_OP_TEST4(width, op, logic, old, update, result) \
8394 "BPF_ATOMIC | " #width ", " #op ": Test fetch: " \
8395 #old " " #logic " " #update " = " #result, \
8397 BPF_LD_IMM64(R3, (update) | BPF_ATOMIC_POISON(width)), \
8398 BPF_ST_MEM(width, R10, -40, old), \
8399 BPF_ATOMIC_OP(width, op, R10, R3, -40), \
8400 BPF_ALU32_REG(BPF_MOV, R0, R3), \
8405 { { 0, (op) & BPF_FETCH ? old : update } }, \
8406 .stack_depth = 40, \
8408 /* BPF_ATOMIC | BPF_W: BPF_ADD */
8409 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
8410 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
8411 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
8412 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
8413 /* BPF_ATOMIC | BPF_W: BPF_ADD | BPF_FETCH */
8414 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
8415 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
8416 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
8417 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
8418 /* BPF_ATOMIC | BPF_DW: BPF_ADD */
8419 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
8420 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
8421 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
8422 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
8423 /* BPF_ATOMIC | BPF_DW: BPF_ADD | BPF_FETCH */
8424 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
8425 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
8426 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
8427 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
8428 /* BPF_ATOMIC | BPF_W: BPF_AND */
8429 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
8430 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
8431 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
8432 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
8433 /* BPF_ATOMIC | BPF_W: BPF_AND | BPF_FETCH */
8434 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
8435 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
8436 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
8437 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
8438 /* BPF_ATOMIC | BPF_DW: BPF_AND */
8439 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
8440 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
8441 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
8442 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
8443 /* BPF_ATOMIC | BPF_DW: BPF_AND | BPF_FETCH */
8444 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
8445 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
8446 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
8447 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
8448 /* BPF_ATOMIC | BPF_W: BPF_OR */
8449 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
8450 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
8451 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
8452 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
8453 /* BPF_ATOMIC | BPF_W: BPF_OR | BPF_FETCH */
8454 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
8455 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
8456 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
8457 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
8458 /* BPF_ATOMIC | BPF_DW: BPF_OR */
8459 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
8460 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
8461 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
8462 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
8463 /* BPF_ATOMIC | BPF_DW: BPF_OR | BPF_FETCH */
8464 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
8465 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
8466 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
8467 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
8468 /* BPF_ATOMIC | BPF_W: BPF_XOR */
8469 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
8470 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
8471 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
8472 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
8473 /* BPF_ATOMIC | BPF_W: BPF_XOR | BPF_FETCH */
8474 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
8475 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
8476 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
8477 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
8478 /* BPF_ATOMIC | BPF_DW: BPF_XOR */
8479 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
8480 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
8481 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
8482 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
8483 /* BPF_ATOMIC | BPF_DW: BPF_XOR | BPF_FETCH */
8484 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
8485 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
8486 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
8487 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
8488 /* BPF_ATOMIC | BPF_W: BPF_XCHG */
8489 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
8490 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
8491 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
8492 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
8493 /* BPF_ATOMIC | BPF_DW: BPF_XCHG */
8494 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
8495 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
8496 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
8497 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
8498 #undef BPF_ATOMIC_POISON
8499 #undef BPF_ATOMIC_OP_TEST1
8500 #undef BPF_ATOMIC_OP_TEST2
8501 #undef BPF_ATOMIC_OP_TEST3
8502 #undef BPF_ATOMIC_OP_TEST4
8503 /* BPF_ATOMIC | BPF_W, BPF_CMPXCHG */
8505 "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test successful return",
8507 BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
8508 BPF_ALU32_IMM(BPF_MOV, R0, 0x01234567),
8509 BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
8510 BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
8515 { { 0, 0x01234567 } },
8519 "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test successful store",
8521 BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
8522 BPF_ALU32_IMM(BPF_MOV, R0, 0x01234567),
8523 BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
8524 BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
8525 BPF_LDX_MEM(BPF_W, R0, R10, -40),
8530 { { 0, 0x89abcdef } },
8534 "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test failure return",
8536 BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
8537 BPF_ALU32_IMM(BPF_MOV, R0, 0x76543210),
8538 BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
8539 BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
8544 { { 0, 0x01234567 } },
8548 "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test failure store",
8550 BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
8551 BPF_ALU32_IMM(BPF_MOV, R0, 0x76543210),
8552 BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
8553 BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
8554 BPF_LDX_MEM(BPF_W, R0, R10, -40),
8559 { { 0, 0x01234567 } },
8563 "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test side effects",
8565 BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
8566 BPF_ALU32_IMM(BPF_MOV, R0, 0x01234567),
8567 BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
8568 BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
8569 BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
8570 BPF_ALU32_REG(BPF_MOV, R0, R3),
8575 { { 0, 0x89abcdef } },
8578 /* BPF_ATOMIC | BPF_DW, BPF_CMPXCHG */
8580 "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test successful return",
8582 BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
8583 BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
8584 BPF_ALU64_REG(BPF_MOV, R0, R1),
8585 BPF_STX_MEM(BPF_DW, R10, R1, -40),
8586 BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
8587 BPF_JMP_REG(BPF_JNE, R0, R1, 1),
8588 BPF_ALU64_REG(BPF_SUB, R0, R1),
8597 "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test successful store",
8599 BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
8600 BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
8601 BPF_ALU64_REG(BPF_MOV, R0, R1),
8602 BPF_STX_MEM(BPF_DW, R10, R0, -40),
8603 BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
8604 BPF_LDX_MEM(BPF_DW, R0, R10, -40),
8605 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8606 BPF_ALU64_REG(BPF_SUB, R0, R2),
8615 "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test failure return",
8617 BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
8618 BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
8619 BPF_ALU64_REG(BPF_MOV, R0, R1),
8620 BPF_ALU64_IMM(BPF_ADD, R0, 1),
8621 BPF_STX_MEM(BPF_DW, R10, R1, -40),
8622 BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
8623 BPF_JMP_REG(BPF_JNE, R0, R1, 1),
8624 BPF_ALU64_REG(BPF_SUB, R0, R1),
8633 "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test failure store",
8635 BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
8636 BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
8637 BPF_ALU64_REG(BPF_MOV, R0, R1),
8638 BPF_ALU64_IMM(BPF_ADD, R0, 1),
8639 BPF_STX_MEM(BPF_DW, R10, R1, -40),
8640 BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
8641 BPF_LDX_MEM(BPF_DW, R0, R10, -40),
8642 BPF_JMP_REG(BPF_JNE, R0, R1, 1),
8643 BPF_ALU64_REG(BPF_SUB, R0, R1),
8652 "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test side effects",
8654 BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
8655 BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
8656 BPF_ALU64_REG(BPF_MOV, R0, R1),
8657 BPF_STX_MEM(BPF_DW, R10, R1, -40),
8658 BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
8659 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
8660 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8661 BPF_ALU64_REG(BPF_SUB, R0, R2),
8669 /* BPF_JMP32 | BPF_JEQ | BPF_K */
8671 "JMP32_JEQ_K: Small immediate",
8673 BPF_ALU32_IMM(BPF_MOV, R0, 123),
8674 BPF_JMP32_IMM(BPF_JEQ, R0, 321, 1),
8675 BPF_JMP32_IMM(BPF_JEQ, R0, 123, 1),
8676 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8684 "JMP32_JEQ_K: Large immediate",
8686 BPF_ALU32_IMM(BPF_MOV, R0, 12345678),
8687 BPF_JMP32_IMM(BPF_JEQ, R0, 12345678 & 0xffff, 1),
8688 BPF_JMP32_IMM(BPF_JEQ, R0, 12345678, 1),
8689 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8697 "JMP32_JEQ_K: negative immediate",
8699 BPF_ALU32_IMM(BPF_MOV, R0, -123),
8700 BPF_JMP32_IMM(BPF_JEQ, R0, 123, 1),
8701 BPF_JMP32_IMM(BPF_JEQ, R0, -123, 1),
8702 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8709 /* BPF_JMP32 | BPF_JEQ | BPF_X */
8713 BPF_ALU32_IMM(BPF_MOV, R0, 1234),
8714 BPF_ALU32_IMM(BPF_MOV, R1, 4321),
8715 BPF_JMP32_REG(BPF_JEQ, R0, R1, 2),
8716 BPF_ALU32_IMM(BPF_MOV, R1, 1234),
8717 BPF_JMP32_REG(BPF_JEQ, R0, R1, 1),
8718 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8725 /* BPF_JMP32 | BPF_JNE | BPF_K */
8727 "JMP32_JNE_K: Small immediate",
8729 BPF_ALU32_IMM(BPF_MOV, R0, 123),
8730 BPF_JMP32_IMM(BPF_JNE, R0, 123, 1),
8731 BPF_JMP32_IMM(BPF_JNE, R0, 321, 1),
8732 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8740 "JMP32_JNE_K: Large immediate",
8742 BPF_ALU32_IMM(BPF_MOV, R0, 12345678),
8743 BPF_JMP32_IMM(BPF_JNE, R0, 12345678, 1),
8744 BPF_JMP32_IMM(BPF_JNE, R0, 12345678 & 0xffff, 1),
8745 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8753 "JMP32_JNE_K: negative immediate",
8755 BPF_ALU32_IMM(BPF_MOV, R0, -123),
8756 BPF_JMP32_IMM(BPF_JNE, R0, -123, 1),
8757 BPF_JMP32_IMM(BPF_JNE, R0, 123, 1),
8758 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8765 /* BPF_JMP32 | BPF_JNE | BPF_X */
8769 BPF_ALU32_IMM(BPF_MOV, R0, 1234),
8770 BPF_ALU32_IMM(BPF_MOV, R1, 1234),
8771 BPF_JMP32_REG(BPF_JNE, R0, R1, 2),
8772 BPF_ALU32_IMM(BPF_MOV, R1, 4321),
8773 BPF_JMP32_REG(BPF_JNE, R0, R1, 1),
8774 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8781 /* BPF_JMP32 | BPF_JSET | BPF_K */
8783 "JMP32_JSET_K: Small immediate",
8785 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8786 BPF_JMP32_IMM(BPF_JSET, R0, 2, 1),
8787 BPF_JMP32_IMM(BPF_JSET, R0, 3, 1),
8788 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8796 "JMP32_JSET_K: Large immediate",
8798 BPF_ALU32_IMM(BPF_MOV, R0, 0x40000000),
8799 BPF_JMP32_IMM(BPF_JSET, R0, 0x3fffffff, 1),
8800 BPF_JMP32_IMM(BPF_JSET, R0, 0x60000000, 1),
8801 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8806 { { 0, 0x40000000 } }
8809 "JMP32_JSET_K: negative immediate",
8811 BPF_ALU32_IMM(BPF_MOV, R0, -123),
8812 BPF_JMP32_IMM(BPF_JSET, R0, -1, 1),
8813 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8820 /* BPF_JMP32 | BPF_JSET | BPF_X */
8824 BPF_ALU32_IMM(BPF_MOV, R0, 8),
8825 BPF_ALU32_IMM(BPF_MOV, R1, 7),
8826 BPF_JMP32_REG(BPF_JSET, R0, R1, 2),
8827 BPF_ALU32_IMM(BPF_MOV, R1, 8 | 2),
8828 BPF_JMP32_REG(BPF_JNE, R0, R1, 1),
8829 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8836 /* BPF_JMP32 | BPF_JGT | BPF_K */
8838 "JMP32_JGT_K: Small immediate",
8840 BPF_ALU32_IMM(BPF_MOV, R0, 123),
8841 BPF_JMP32_IMM(BPF_JGT, R0, 123, 1),
8842 BPF_JMP32_IMM(BPF_JGT, R0, 122, 1),
8843 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8851 "JMP32_JGT_K: Large immediate",
8853 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
8854 BPF_JMP32_IMM(BPF_JGT, R0, 0xffffffff, 1),
8855 BPF_JMP32_IMM(BPF_JGT, R0, 0xfffffffd, 1),
8856 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8861 { { 0, 0xfffffffe } }
8863 /* BPF_JMP32 | BPF_JGT | BPF_X */
8867 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
8868 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
8869 BPF_JMP32_REG(BPF_JGT, R0, R1, 2),
8870 BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
8871 BPF_JMP32_REG(BPF_JGT, R0, R1, 1),
8872 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8877 { { 0, 0xfffffffe } }
8879 /* BPF_JMP32 | BPF_JGE | BPF_K */
8881 "JMP32_JGE_K: Small immediate",
8883 BPF_ALU32_IMM(BPF_MOV, R0, 123),
8884 BPF_JMP32_IMM(BPF_JGE, R0, 124, 1),
8885 BPF_JMP32_IMM(BPF_JGE, R0, 123, 1),
8886 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8894 "JMP32_JGE_K: Large immediate",
8896 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
8897 BPF_JMP32_IMM(BPF_JGE, R0, 0xffffffff, 1),
8898 BPF_JMP32_IMM(BPF_JGE, R0, 0xfffffffe, 1),
8899 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8904 { { 0, 0xfffffffe } }
8906 /* BPF_JMP32 | BPF_JGE | BPF_X */
8910 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
8911 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
8912 BPF_JMP32_REG(BPF_JGE, R0, R1, 2),
8913 BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffe),
8914 BPF_JMP32_REG(BPF_JGE, R0, R1, 1),
8915 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8920 { { 0, 0xfffffffe } }
8922 /* BPF_JMP32 | BPF_JLT | BPF_K */
8924 "JMP32_JLT_K: Small immediate",
8926 BPF_ALU32_IMM(BPF_MOV, R0, 123),
8927 BPF_JMP32_IMM(BPF_JLT, R0, 123, 1),
8928 BPF_JMP32_IMM(BPF_JLT, R0, 124, 1),
8929 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8937 "JMP32_JLT_K: Large immediate",
8939 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
8940 BPF_JMP32_IMM(BPF_JLT, R0, 0xfffffffd, 1),
8941 BPF_JMP32_IMM(BPF_JLT, R0, 0xffffffff, 1),
8942 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8947 { { 0, 0xfffffffe } }
8949 /* BPF_JMP32 | BPF_JLT | BPF_X */
8953 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
8954 BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
8955 BPF_JMP32_REG(BPF_JLT, R0, R1, 2),
8956 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
8957 BPF_JMP32_REG(BPF_JLT, R0, R1, 1),
8958 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8963 { { 0, 0xfffffffe } }
8965 /* BPF_JMP32 | BPF_JLE | BPF_K */
8967 "JMP32_JLE_K: Small immediate",
8969 BPF_ALU32_IMM(BPF_MOV, R0, 123),
8970 BPF_JMP32_IMM(BPF_JLE, R0, 122, 1),
8971 BPF_JMP32_IMM(BPF_JLE, R0, 123, 1),
8972 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8980 "JMP32_JLE_K: Large immediate",
8982 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
8983 BPF_JMP32_IMM(BPF_JLE, R0, 0xfffffffd, 1),
8984 BPF_JMP32_IMM(BPF_JLE, R0, 0xfffffffe, 1),
8985 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8990 { { 0, 0xfffffffe } }
8992 /* BPF_JMP32 | BPF_JLE | BPF_X */
8996 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
8997 BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
8998 BPF_JMP32_REG(BPF_JLE, R0, R1, 2),
8999 BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffe),
9000 BPF_JMP32_REG(BPF_JLE, R0, R1, 1),
9001 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9006 { { 0, 0xfffffffe } }
9008 /* BPF_JMP32 | BPF_JSGT | BPF_K */
9010 "JMP32_JSGT_K: Small immediate",
9012 BPF_ALU32_IMM(BPF_MOV, R0, -123),
9013 BPF_JMP32_IMM(BPF_JSGT, R0, -123, 1),
9014 BPF_JMP32_IMM(BPF_JSGT, R0, -124, 1),
9015 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9023 "JMP32_JSGT_K: Large immediate",
9025 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9026 BPF_JMP32_IMM(BPF_JSGT, R0, -12345678, 1),
9027 BPF_JMP32_IMM(BPF_JSGT, R0, -12345679, 1),
9028 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9033 { { 0, -12345678 } }
9035 /* BPF_JMP32 | BPF_JSGT | BPF_X */
9039 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9040 BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
9041 BPF_JMP32_REG(BPF_JSGT, R0, R1, 2),
9042 BPF_ALU32_IMM(BPF_MOV, R1, -12345679),
9043 BPF_JMP32_REG(BPF_JSGT, R0, R1, 1),
9044 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9049 { { 0, -12345678 } }
9051 /* BPF_JMP32 | BPF_JSGE | BPF_K */
9053 "JMP32_JSGE_K: Small immediate",
9055 BPF_ALU32_IMM(BPF_MOV, R0, -123),
9056 BPF_JMP32_IMM(BPF_JSGE, R0, -122, 1),
9057 BPF_JMP32_IMM(BPF_JSGE, R0, -123, 1),
9058 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9066 "JMP32_JSGE_K: Large immediate",
9068 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9069 BPF_JMP32_IMM(BPF_JSGE, R0, -12345677, 1),
9070 BPF_JMP32_IMM(BPF_JSGE, R0, -12345678, 1),
9071 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9076 { { 0, -12345678 } }
9078 /* BPF_JMP32 | BPF_JSGE | BPF_X */
9082 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9083 BPF_ALU32_IMM(BPF_MOV, R1, -12345677),
9084 BPF_JMP32_REG(BPF_JSGE, R0, R1, 2),
9085 BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
9086 BPF_JMP32_REG(BPF_JSGE, R0, R1, 1),
9087 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9092 { { 0, -12345678 } }
9094 /* BPF_JMP32 | BPF_JSLT | BPF_K */
9096 "JMP32_JSLT_K: Small immediate",
9098 BPF_ALU32_IMM(BPF_MOV, R0, -123),
9099 BPF_JMP32_IMM(BPF_JSLT, R0, -123, 1),
9100 BPF_JMP32_IMM(BPF_JSLT, R0, -122, 1),
9101 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9109 "JMP32_JSLT_K: Large immediate",
9111 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9112 BPF_JMP32_IMM(BPF_JSLT, R0, -12345678, 1),
9113 BPF_JMP32_IMM(BPF_JSLT, R0, -12345677, 1),
9114 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9119 { { 0, -12345678 } }
9121 /* BPF_JMP32 | BPF_JSLT | BPF_X */
9125 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9126 BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
9127 BPF_JMP32_REG(BPF_JSLT, R0, R1, 2),
9128 BPF_ALU32_IMM(BPF_MOV, R1, -12345677),
9129 BPF_JMP32_REG(BPF_JSLT, R0, R1, 1),
9130 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9135 { { 0, -12345678 } }
9137 /* BPF_JMP32 | BPF_JSLE | BPF_K */
9139 "JMP32_JSLE_K: Small immediate",
9141 BPF_ALU32_IMM(BPF_MOV, R0, -123),
9142 BPF_JMP32_IMM(BPF_JSLE, R0, -124, 1),
9143 BPF_JMP32_IMM(BPF_JSLE, R0, -123, 1),
9144 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9152 "JMP32_JSLE_K: Large immediate",
9154 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9155 BPF_JMP32_IMM(BPF_JSLE, R0, -12345679, 1),
9156 BPF_JMP32_IMM(BPF_JSLE, R0, -12345678, 1),
9157 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9162 { { 0, -12345678 } }
9164 /* BPF_JMP32 | BPF_JSLE | BPF_K */
9168 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9169 BPF_ALU32_IMM(BPF_MOV, R1, -12345679),
9170 BPF_JMP32_REG(BPF_JSLE, R0, R1, 2),
9171 BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
9172 BPF_JMP32_REG(BPF_JSLE, R0, R1, 1),
9173 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9178 { { 0, -12345678 } }
9180 /* BPF_JMP | BPF_EXIT */
9184 BPF_ALU32_IMM(BPF_MOV, R0, 0x4711),
9186 BPF_ALU32_IMM(BPF_MOV, R0, 0x4712),
9192 /* BPF_JMP | BPF_JA */
9194 "JMP_JA: Unconditional jump: if (true) return 1",
9196 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9197 BPF_JMP_IMM(BPF_JA, 0, 0, 1),
9199 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9206 /* BPF_JMP | BPF_JSLT | BPF_K */
9208 "JMP_JSLT_K: Signed jump: if (-2 < -1) return 1",
9210 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9211 BPF_LD_IMM64(R1, 0xfffffffffffffffeLL),
9212 BPF_JMP_IMM(BPF_JSLT, R1, -1, 1),
9214 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9222 "JMP_JSLT_K: Signed jump: if (-1 < -1) return 0",
9224 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9225 BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9226 BPF_JMP_IMM(BPF_JSLT, R1, -1, 1),
9228 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9235 /* BPF_JMP | BPF_JSGT | BPF_K */
9237 "JMP_JSGT_K: Signed jump: if (-1 > -2) return 1",
9239 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9240 BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9241 BPF_JMP_IMM(BPF_JSGT, R1, -2, 1),
9243 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9251 "JMP_JSGT_K: Signed jump: if (-1 > -1) return 0",
9253 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9254 BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9255 BPF_JMP_IMM(BPF_JSGT, R1, -1, 1),
9257 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9264 /* BPF_JMP | BPF_JSLE | BPF_K */
9266 "JMP_JSLE_K: Signed jump: if (-2 <= -1) return 1",
9268 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9269 BPF_LD_IMM64(R1, 0xfffffffffffffffeLL),
9270 BPF_JMP_IMM(BPF_JSLE, R1, -1, 1),
9272 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9280 "JMP_JSLE_K: Signed jump: if (-1 <= -1) return 1",
9282 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9283 BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9284 BPF_JMP_IMM(BPF_JSLE, R1, -1, 1),
9286 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9294 "JMP_JSLE_K: Signed jump: value walk 1",
9296 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9297 BPF_LD_IMM64(R1, 3),
9298 BPF_JMP_IMM(BPF_JSLE, R1, 0, 6),
9299 BPF_ALU64_IMM(BPF_SUB, R1, 1),
9300 BPF_JMP_IMM(BPF_JSLE, R1, 0, 4),
9301 BPF_ALU64_IMM(BPF_SUB, R1, 1),
9302 BPF_JMP_IMM(BPF_JSLE, R1, 0, 2),
9303 BPF_ALU64_IMM(BPF_SUB, R1, 1),
9304 BPF_JMP_IMM(BPF_JSLE, R1, 0, 1),
9305 BPF_EXIT_INSN(), /* bad exit */
9306 BPF_ALU32_IMM(BPF_MOV, R0, 1), /* good exit */
9314 "JMP_JSLE_K: Signed jump: value walk 2",
9316 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9317 BPF_LD_IMM64(R1, 3),
9318 BPF_JMP_IMM(BPF_JSLE, R1, 0, 4),
9319 BPF_ALU64_IMM(BPF_SUB, R1, 2),
9320 BPF_JMP_IMM(BPF_JSLE, R1, 0, 2),
9321 BPF_ALU64_IMM(BPF_SUB, R1, 2),
9322 BPF_JMP_IMM(BPF_JSLE, R1, 0, 1),
9323 BPF_EXIT_INSN(), /* bad exit */
9324 BPF_ALU32_IMM(BPF_MOV, R0, 1), /* good exit */
9331 /* BPF_JMP | BPF_JSGE | BPF_K */
9333 "JMP_JSGE_K: Signed jump: if (-1 >= -2) return 1",
9335 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9336 BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9337 BPF_JMP_IMM(BPF_JSGE, R1, -2, 1),
9339 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9347 "JMP_JSGE_K: Signed jump: if (-1 >= -1) return 1",
9349 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9350 BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9351 BPF_JMP_IMM(BPF_JSGE, R1, -1, 1),
9353 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9361 "JMP_JSGE_K: Signed jump: value walk 1",
9363 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9364 BPF_LD_IMM64(R1, -3),
9365 BPF_JMP_IMM(BPF_JSGE, R1, 0, 6),
9366 BPF_ALU64_IMM(BPF_ADD, R1, 1),
9367 BPF_JMP_IMM(BPF_JSGE, R1, 0, 4),
9368 BPF_ALU64_IMM(BPF_ADD, R1, 1),
9369 BPF_JMP_IMM(BPF_JSGE, R1, 0, 2),
9370 BPF_ALU64_IMM(BPF_ADD, R1, 1),
9371 BPF_JMP_IMM(BPF_JSGE, R1, 0, 1),
9372 BPF_EXIT_INSN(), /* bad exit */
9373 BPF_ALU32_IMM(BPF_MOV, R0, 1), /* good exit */
9381 "JMP_JSGE_K: Signed jump: value walk 2",
9383 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9384 BPF_LD_IMM64(R1, -3),
9385 BPF_JMP_IMM(BPF_JSGE, R1, 0, 4),
9386 BPF_ALU64_IMM(BPF_ADD, R1, 2),
9387 BPF_JMP_IMM(BPF_JSGE, R1, 0, 2),
9388 BPF_ALU64_IMM(BPF_ADD, R1, 2),
9389 BPF_JMP_IMM(BPF_JSGE, R1, 0, 1),
9390 BPF_EXIT_INSN(), /* bad exit */
9391 BPF_ALU32_IMM(BPF_MOV, R0, 1), /* good exit */
9398 /* BPF_JMP | BPF_JGT | BPF_K */
9400 "JMP_JGT_K: if (3 > 2) return 1",
9402 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9403 BPF_LD_IMM64(R1, 3),
9404 BPF_JMP_IMM(BPF_JGT, R1, 2, 1),
9406 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9414 "JMP_JGT_K: Unsigned jump: if (-1 > 1) return 1",
9416 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9417 BPF_LD_IMM64(R1, -1),
9418 BPF_JMP_IMM(BPF_JGT, R1, 1, 1),
9420 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9427 /* BPF_JMP | BPF_JLT | BPF_K */
9429 "JMP_JLT_K: if (2 < 3) return 1",
9431 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9432 BPF_LD_IMM64(R1, 2),
9433 BPF_JMP_IMM(BPF_JLT, R1, 3, 1),
9435 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9443 "JMP_JGT_K: Unsigned jump: if (1 < -1) return 1",
9445 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9446 BPF_LD_IMM64(R1, 1),
9447 BPF_JMP_IMM(BPF_JLT, R1, -1, 1),
9449 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9456 /* BPF_JMP | BPF_JGE | BPF_K */
9458 "JMP_JGE_K: if (3 >= 2) return 1",
9460 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9461 BPF_LD_IMM64(R1, 3),
9462 BPF_JMP_IMM(BPF_JGE, R1, 2, 1),
9464 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9471 /* BPF_JMP | BPF_JLE | BPF_K */
9473 "JMP_JLE_K: if (2 <= 3) return 1",
9475 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9476 BPF_LD_IMM64(R1, 2),
9477 BPF_JMP_IMM(BPF_JLE, R1, 3, 1),
9479 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9486 /* BPF_JMP | BPF_JGT | BPF_K jump backwards */
9488 "JMP_JGT_K: if (3 > 2) return 1 (jump backwards)",
9490 BPF_JMP_IMM(BPF_JA, 0, 0, 2), /* goto start */
9491 BPF_ALU32_IMM(BPF_MOV, R0, 1), /* out: */
9493 BPF_ALU32_IMM(BPF_MOV, R0, 0), /* start: */
9494 BPF_LD_IMM64(R1, 3), /* note: this takes 2 insns */
9495 BPF_JMP_IMM(BPF_JGT, R1, 2, -6), /* goto out */
9503 "JMP_JGE_K: if (3 >= 3) return 1",
9505 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9506 BPF_LD_IMM64(R1, 3),
9507 BPF_JMP_IMM(BPF_JGE, R1, 3, 1),
9509 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9516 /* BPF_JMP | BPF_JLT | BPF_K jump backwards */
9518 "JMP_JGT_K: if (2 < 3) return 1 (jump backwards)",
9520 BPF_JMP_IMM(BPF_JA, 0, 0, 2), /* goto start */
9521 BPF_ALU32_IMM(BPF_MOV, R0, 1), /* out: */
9523 BPF_ALU32_IMM(BPF_MOV, R0, 0), /* start: */
9524 BPF_LD_IMM64(R1, 2), /* note: this takes 2 insns */
9525 BPF_JMP_IMM(BPF_JLT, R1, 3, -6), /* goto out */
9533 "JMP_JLE_K: if (3 <= 3) return 1",
9535 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9536 BPF_LD_IMM64(R1, 3),
9537 BPF_JMP_IMM(BPF_JLE, R1, 3, 1),
9539 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9546 /* BPF_JMP | BPF_JNE | BPF_K */
9548 "JMP_JNE_K: if (3 != 2) return 1",
9550 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9551 BPF_LD_IMM64(R1, 3),
9552 BPF_JMP_IMM(BPF_JNE, R1, 2, 1),
9554 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9561 /* BPF_JMP | BPF_JEQ | BPF_K */
9563 "JMP_JEQ_K: if (3 == 3) return 1",
9565 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9566 BPF_LD_IMM64(R1, 3),
9567 BPF_JMP_IMM(BPF_JEQ, R1, 3, 1),
9569 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9576 /* BPF_JMP | BPF_JSET | BPF_K */
9578 "JMP_JSET_K: if (0x3 & 0x2) return 1",
9580 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9581 BPF_LD_IMM64(R1, 3),
9582 BPF_JMP_IMM(BPF_JSET, R1, 2, 1),
9584 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9592 "JMP_JSET_K: if (0x3 & 0xffffffff) return 1",
9594 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9595 BPF_LD_IMM64(R1, 3),
9596 BPF_JMP_IMM(BPF_JSET, R1, 0xffffffff, 1),
9598 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9605 /* BPF_JMP | BPF_JSGT | BPF_X */
9607 "JMP_JSGT_X: Signed jump: if (-1 > -2) return 1",
9609 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9610 BPF_LD_IMM64(R1, -1),
9611 BPF_LD_IMM64(R2, -2),
9612 BPF_JMP_REG(BPF_JSGT, R1, R2, 1),
9614 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9622 "JMP_JSGT_X: Signed jump: if (-1 > -1) return 0",
9624 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9625 BPF_LD_IMM64(R1, -1),
9626 BPF_LD_IMM64(R2, -1),
9627 BPF_JMP_REG(BPF_JSGT, R1, R2, 1),
9629 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9636 /* BPF_JMP | BPF_JSLT | BPF_X */
9638 "JMP_JSLT_X: Signed jump: if (-2 < -1) return 1",
9640 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9641 BPF_LD_IMM64(R1, -1),
9642 BPF_LD_IMM64(R2, -2),
9643 BPF_JMP_REG(BPF_JSLT, R2, R1, 1),
9645 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9653 "JMP_JSLT_X: Signed jump: if (-1 < -1) return 0",
9655 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9656 BPF_LD_IMM64(R1, -1),
9657 BPF_LD_IMM64(R2, -1),
9658 BPF_JMP_REG(BPF_JSLT, R1, R2, 1),
9660 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9667 /* BPF_JMP | BPF_JSGE | BPF_X */
9669 "JMP_JSGE_X: Signed jump: if (-1 >= -2) return 1",
9671 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9672 BPF_LD_IMM64(R1, -1),
9673 BPF_LD_IMM64(R2, -2),
9674 BPF_JMP_REG(BPF_JSGE, R1, R2, 1),
9676 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9684 "JMP_JSGE_X: Signed jump: if (-1 >= -1) return 1",
9686 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9687 BPF_LD_IMM64(R1, -1),
9688 BPF_LD_IMM64(R2, -1),
9689 BPF_JMP_REG(BPF_JSGE, R1, R2, 1),
9691 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9698 /* BPF_JMP | BPF_JSLE | BPF_X */
9700 "JMP_JSLE_X: Signed jump: if (-2 <= -1) return 1",
9702 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9703 BPF_LD_IMM64(R1, -1),
9704 BPF_LD_IMM64(R2, -2),
9705 BPF_JMP_REG(BPF_JSLE, R2, R1, 1),
9707 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9715 "JMP_JSLE_X: Signed jump: if (-1 <= -1) return 1",
9717 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9718 BPF_LD_IMM64(R1, -1),
9719 BPF_LD_IMM64(R2, -1),
9720 BPF_JMP_REG(BPF_JSLE, R1, R2, 1),
9722 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9729 /* BPF_JMP | BPF_JGT | BPF_X */
9731 "JMP_JGT_X: if (3 > 2) return 1",
9733 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9734 BPF_LD_IMM64(R1, 3),
9735 BPF_LD_IMM64(R2, 2),
9736 BPF_JMP_REG(BPF_JGT, R1, R2, 1),
9738 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9746 "JMP_JGT_X: Unsigned jump: if (-1 > 1) return 1",
9748 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9749 BPF_LD_IMM64(R1, -1),
9750 BPF_LD_IMM64(R2, 1),
9751 BPF_JMP_REG(BPF_JGT, R1, R2, 1),
9753 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9760 /* BPF_JMP | BPF_JLT | BPF_X */
9762 "JMP_JLT_X: if (2 < 3) return 1",
9764 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9765 BPF_LD_IMM64(R1, 3),
9766 BPF_LD_IMM64(R2, 2),
9767 BPF_JMP_REG(BPF_JLT, R2, R1, 1),
9769 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9777 "JMP_JLT_X: Unsigned jump: if (1 < -1) return 1",
9779 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9780 BPF_LD_IMM64(R1, -1),
9781 BPF_LD_IMM64(R2, 1),
9782 BPF_JMP_REG(BPF_JLT, R2, R1, 1),
9784 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9791 /* BPF_JMP | BPF_JGE | BPF_X */
9793 "JMP_JGE_X: if (3 >= 2) return 1",
9795 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9796 BPF_LD_IMM64(R1, 3),
9797 BPF_LD_IMM64(R2, 2),
9798 BPF_JMP_REG(BPF_JGE, R1, R2, 1),
9800 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9808 "JMP_JGE_X: if (3 >= 3) return 1",
9810 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9811 BPF_LD_IMM64(R1, 3),
9812 BPF_LD_IMM64(R2, 3),
9813 BPF_JMP_REG(BPF_JGE, R1, R2, 1),
9815 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9822 /* BPF_JMP | BPF_JLE | BPF_X */
9824 "JMP_JLE_X: if (2 <= 3) return 1",
9826 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9827 BPF_LD_IMM64(R1, 3),
9828 BPF_LD_IMM64(R2, 2),
9829 BPF_JMP_REG(BPF_JLE, R2, R1, 1),
9831 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9839 "JMP_JLE_X: if (3 <= 3) return 1",
9841 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9842 BPF_LD_IMM64(R1, 3),
9843 BPF_LD_IMM64(R2, 3),
9844 BPF_JMP_REG(BPF_JLE, R1, R2, 1),
9846 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9854 /* Mainly testing JIT + imm64 here. */
9855 "JMP_JGE_X: ldimm64 test 1",
9857 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9858 BPF_LD_IMM64(R1, 3),
9859 BPF_LD_IMM64(R2, 2),
9860 BPF_JMP_REG(BPF_JGE, R1, R2, 2),
9861 BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
9862 BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
9867 { { 0, 0xeeeeeeeeU } },
9870 "JMP_JGE_X: ldimm64 test 2",
9872 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9873 BPF_LD_IMM64(R1, 3),
9874 BPF_LD_IMM64(R2, 2),
9875 BPF_JMP_REG(BPF_JGE, R1, R2, 0),
9876 BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
9881 { { 0, 0xffffffffU } },
9884 "JMP_JGE_X: ldimm64 test 3",
9886 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9887 BPF_LD_IMM64(R1, 3),
9888 BPF_LD_IMM64(R2, 2),
9889 BPF_JMP_REG(BPF_JGE, R1, R2, 4),
9890 BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
9891 BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
9899 "JMP_JLE_X: ldimm64 test 1",
9901 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9902 BPF_LD_IMM64(R1, 3),
9903 BPF_LD_IMM64(R2, 2),
9904 BPF_JMP_REG(BPF_JLE, R2, R1, 2),
9905 BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
9906 BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
9911 { { 0, 0xeeeeeeeeU } },
9914 "JMP_JLE_X: ldimm64 test 2",
9916 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9917 BPF_LD_IMM64(R1, 3),
9918 BPF_LD_IMM64(R2, 2),
9919 BPF_JMP_REG(BPF_JLE, R2, R1, 0),
9920 BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
9925 { { 0, 0xffffffffU } },
9928 "JMP_JLE_X: ldimm64 test 3",
9930 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9931 BPF_LD_IMM64(R1, 3),
9932 BPF_LD_IMM64(R2, 2),
9933 BPF_JMP_REG(BPF_JLE, R2, R1, 4),
9934 BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
9935 BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
9942 /* BPF_JMP | BPF_JNE | BPF_X */
9944 "JMP_JNE_X: if (3 != 2) return 1",
9946 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9947 BPF_LD_IMM64(R1, 3),
9948 BPF_LD_IMM64(R2, 2),
9949 BPF_JMP_REG(BPF_JNE, R1, R2, 1),
9951 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9958 /* BPF_JMP | BPF_JEQ | BPF_X */
9960 "JMP_JEQ_X: if (3 == 3) return 1",
9962 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9963 BPF_LD_IMM64(R1, 3),
9964 BPF_LD_IMM64(R2, 3),
9965 BPF_JMP_REG(BPF_JEQ, R1, R2, 1),
9967 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9974 /* BPF_JMP | BPF_JSET | BPF_X */
9976 "JMP_JSET_X: if (0x3 & 0x2) return 1",
9978 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9979 BPF_LD_IMM64(R1, 3),
9980 BPF_LD_IMM64(R2, 2),
9981 BPF_JMP_REG(BPF_JSET, R1, R2, 1),
9983 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9991 "JMP_JSET_X: if (0x3 & 0xffffffff) return 1",
9993 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9994 BPF_LD_IMM64(R1, 3),
9995 BPF_LD_IMM64(R2, 0xffffffff),
9996 BPF_JMP_REG(BPF_JSET, R1, R2, 1),
9998 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10006 "JMP_JA: Jump, gap, jump, ...",
10008 CLASSIC | FLAG_NO_DATA,
10010 { { 0, 0xababcbac } },
10011 .fill_helper = bpf_fill_ja,
10013 { /* Mainly checking JIT here. */
10014 "BPF_MAXINSNS: Maximum possible literals",
10016 CLASSIC | FLAG_NO_DATA,
10018 { { 0, 0xffffffff } },
10019 .fill_helper = bpf_fill_maxinsns1,
10021 { /* Mainly checking JIT here. */
10022 "BPF_MAXINSNS: Single literal",
10024 CLASSIC | FLAG_NO_DATA,
10026 { { 0, 0xfefefefe } },
10027 .fill_helper = bpf_fill_maxinsns2,
10029 { /* Mainly checking JIT here. */
10030 "BPF_MAXINSNS: Run/add until end",
10032 CLASSIC | FLAG_NO_DATA,
10034 { { 0, 0x947bf368 } },
10035 .fill_helper = bpf_fill_maxinsns3,
10038 "BPF_MAXINSNS: Too many instructions",
10040 CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
10043 .fill_helper = bpf_fill_maxinsns4,
10044 .expected_errcode = -EINVAL,
10046 { /* Mainly checking JIT here. */
10047 "BPF_MAXINSNS: Very long jump",
10049 CLASSIC | FLAG_NO_DATA,
10051 { { 0, 0xabababab } },
10052 .fill_helper = bpf_fill_maxinsns5,
10054 { /* Mainly checking JIT here. */
10055 "BPF_MAXINSNS: Ctx heavy transformations",
10060 { 1, SKB_VLAN_PRESENT },
10061 { 10, SKB_VLAN_PRESENT }
10063 .fill_helper = bpf_fill_maxinsns6,
10065 { /* Mainly checking JIT here. */
10066 "BPF_MAXINSNS: Call heavy transformations",
10068 CLASSIC | FLAG_NO_DATA,
10070 { { 1, 0 }, { 10, 0 } },
10071 .fill_helper = bpf_fill_maxinsns7,
10073 { /* Mainly checking JIT here. */
10074 "BPF_MAXINSNS: Jump heavy test",
10076 CLASSIC | FLAG_NO_DATA,
10078 { { 0, 0xffffffff } },
10079 .fill_helper = bpf_fill_maxinsns8,
10081 { /* Mainly checking JIT here. */
10082 "BPF_MAXINSNS: Very long jump backwards",
10084 INTERNAL | FLAG_NO_DATA,
10086 { { 0, 0xcbababab } },
10087 .fill_helper = bpf_fill_maxinsns9,
10089 { /* Mainly checking JIT here. */
10090 "BPF_MAXINSNS: Edge hopping nuthouse",
10092 INTERNAL | FLAG_NO_DATA,
10094 { { 0, 0xabababac } },
10095 .fill_helper = bpf_fill_maxinsns10,
10098 "BPF_MAXINSNS: Jump, gap, jump, ...",
10100 CLASSIC | FLAG_NO_DATA,
10102 { { 0, 0xababcbac } },
10103 .fill_helper = bpf_fill_maxinsns11,
10106 "BPF_MAXINSNS: jump over MSH",
10108 CLASSIC | FLAG_EXPECTED_FAIL,
10109 { 0xfa, 0xfb, 0xfc, 0xfd, },
10110 { { 4, 0xabababab } },
10111 .fill_helper = bpf_fill_maxinsns12,
10112 .expected_errcode = -EINVAL,
10115 "BPF_MAXINSNS: exec all MSH",
10118 { 0xfa, 0xfb, 0xfc, 0xfd, },
10119 { { 4, 0xababab83 } },
10120 .fill_helper = bpf_fill_maxinsns13,
10123 "BPF_MAXINSNS: ld_abs+get_processor_id",
10128 .fill_helper = bpf_fill_ld_abs_get_processor_id,
10131 * LD_IND / LD_ABS on fragmented SKBs
10134 "LD_IND byte frag",
10136 BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
10137 BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x0),
10138 BPF_STMT(BPF_RET | BPF_A, 0x0),
10140 CLASSIC | FLAG_SKB_FRAG,
10144 0x42, 0x00, 0x00, 0x00,
10145 0x43, 0x44, 0x00, 0x00,
10146 0x21, 0x07, 0x19, 0x83,
10150 "LD_IND halfword frag",
10152 BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
10153 BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x4),
10154 BPF_STMT(BPF_RET | BPF_A, 0x0),
10156 CLASSIC | FLAG_SKB_FRAG,
10158 { {0x40, 0x4344} },
10160 0x42, 0x00, 0x00, 0x00,
10161 0x43, 0x44, 0x00, 0x00,
10162 0x21, 0x07, 0x19, 0x83,
10166 "LD_IND word frag",
10168 BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
10169 BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x8),
10170 BPF_STMT(BPF_RET | BPF_A, 0x0),
10172 CLASSIC | FLAG_SKB_FRAG,
10174 { {0x40, 0x21071983} },
10176 0x42, 0x00, 0x00, 0x00,
10177 0x43, 0x44, 0x00, 0x00,
10178 0x21, 0x07, 0x19, 0x83,
10182 "LD_IND halfword mixed head/frag",
10184 BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
10185 BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x1),
10186 BPF_STMT(BPF_RET | BPF_A, 0x0),
10188 CLASSIC | FLAG_SKB_FRAG,
10189 { [0x3e] = 0x25, [0x3f] = 0x05, },
10190 { {0x40, 0x0519} },
10191 .frag_data = { 0x19, 0x82 },
10194 "LD_IND word mixed head/frag",
10196 BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
10197 BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x2),
10198 BPF_STMT(BPF_RET | BPF_A, 0x0),
10200 CLASSIC | FLAG_SKB_FRAG,
10201 { [0x3e] = 0x25, [0x3f] = 0x05, },
10202 { {0x40, 0x25051982} },
10203 .frag_data = { 0x19, 0x82 },
10206 "LD_ABS byte frag",
10208 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x40),
10209 BPF_STMT(BPF_RET | BPF_A, 0x0),
10211 CLASSIC | FLAG_SKB_FRAG,
10215 0x42, 0x00, 0x00, 0x00,
10216 0x43, 0x44, 0x00, 0x00,
10217 0x21, 0x07, 0x19, 0x83,
10221 "LD_ABS halfword frag",
10223 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x44),
10224 BPF_STMT(BPF_RET | BPF_A, 0x0),
10226 CLASSIC | FLAG_SKB_FRAG,
10228 { {0x40, 0x4344} },
10230 0x42, 0x00, 0x00, 0x00,
10231 0x43, 0x44, 0x00, 0x00,
10232 0x21, 0x07, 0x19, 0x83,
10236 "LD_ABS word frag",
10238 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x48),
10239 BPF_STMT(BPF_RET | BPF_A, 0x0),
10241 CLASSIC | FLAG_SKB_FRAG,
10243 { {0x40, 0x21071983} },
10245 0x42, 0x00, 0x00, 0x00,
10246 0x43, 0x44, 0x00, 0x00,
10247 0x21, 0x07, 0x19, 0x83,
10251 "LD_ABS halfword mixed head/frag",
10253 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x3f),
10254 BPF_STMT(BPF_RET | BPF_A, 0x0),
10256 CLASSIC | FLAG_SKB_FRAG,
10257 { [0x3e] = 0x25, [0x3f] = 0x05, },
10258 { {0x40, 0x0519} },
10259 .frag_data = { 0x19, 0x82 },
10262 "LD_ABS word mixed head/frag",
10264 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x3e),
10265 BPF_STMT(BPF_RET | BPF_A, 0x0),
10267 CLASSIC | FLAG_SKB_FRAG,
10268 { [0x3e] = 0x25, [0x3f] = 0x05, },
10269 { {0x40, 0x25051982} },
10270 .frag_data = { 0x19, 0x82 },
10273 * LD_IND / LD_ABS on non fragmented SKBs
10277 * this tests that the JIT/interpreter correctly resets X
10278 * before using it in an LD_IND instruction.
10280 "LD_IND byte default X",
10282 BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
10283 BPF_STMT(BPF_RET | BPF_A, 0x0),
10290 "LD_IND byte positive offset",
10292 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10293 BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
10294 BPF_STMT(BPF_RET | BPF_A, 0x0),
10297 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10301 "LD_IND byte negative offset",
10303 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10304 BPF_STMT(BPF_LD | BPF_IND | BPF_B, -0x1),
10305 BPF_STMT(BPF_RET | BPF_A, 0x0),
10308 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10312 "LD_IND byte positive offset, all ff",
10314 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10315 BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
10316 BPF_STMT(BPF_RET | BPF_A, 0x0),
10319 { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
10323 "LD_IND byte positive offset, out of bounds",
10325 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10326 BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
10327 BPF_STMT(BPF_RET | BPF_A, 0x0),
10330 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10334 "LD_IND byte negative offset, out of bounds",
10336 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10337 BPF_STMT(BPF_LD | BPF_IND | BPF_B, -0x3f),
10338 BPF_STMT(BPF_RET | BPF_A, 0x0),
10341 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10345 "LD_IND byte negative offset, multiple calls",
10347 BPF_STMT(BPF_LDX | BPF_IMM, 0x3b),
10348 BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 1),
10349 BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 2),
10350 BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 3),
10351 BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 4),
10352 BPF_STMT(BPF_RET | BPF_A, 0x0),
10355 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10356 { {0x40, 0x82 }, },
10359 "LD_IND halfword positive offset",
10361 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
10362 BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x2),
10363 BPF_STMT(BPF_RET | BPF_A, 0x0),
10367 [0x1c] = 0xaa, [0x1d] = 0x55,
10368 [0x1e] = 0xbb, [0x1f] = 0x66,
10369 [0x20] = 0xcc, [0x21] = 0x77,
10370 [0x22] = 0xdd, [0x23] = 0x88,
10372 { {0x40, 0xdd88 } },
10375 "LD_IND halfword negative offset",
10377 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
10378 BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x2),
10379 BPF_STMT(BPF_RET | BPF_A, 0x0),
10383 [0x1c] = 0xaa, [0x1d] = 0x55,
10384 [0x1e] = 0xbb, [0x1f] = 0x66,
10385 [0x20] = 0xcc, [0x21] = 0x77,
10386 [0x22] = 0xdd, [0x23] = 0x88,
10388 { {0x40, 0xbb66 } },
10391 "LD_IND halfword unaligned",
10393 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
10394 BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x1),
10395 BPF_STMT(BPF_RET | BPF_A, 0x0),
10399 [0x1c] = 0xaa, [0x1d] = 0x55,
10400 [0x1e] = 0xbb, [0x1f] = 0x66,
10401 [0x20] = 0xcc, [0x21] = 0x77,
10402 [0x22] = 0xdd, [0x23] = 0x88,
10404 { {0x40, 0x66cc } },
10407 "LD_IND halfword positive offset, all ff",
10409 BPF_STMT(BPF_LDX | BPF_IMM, 0x3d),
10410 BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x1),
10411 BPF_STMT(BPF_RET | BPF_A, 0x0),
10414 { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
10415 { {0x40, 0xffff } },
10418 "LD_IND halfword positive offset, out of bounds",
10420 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10421 BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x1),
10422 BPF_STMT(BPF_RET | BPF_A, 0x0),
10425 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10429 "LD_IND halfword negative offset, out of bounds",
10431 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10432 BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x3f),
10433 BPF_STMT(BPF_RET | BPF_A, 0x0),
10436 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10440 "LD_IND word positive offset",
10442 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
10443 BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x4),
10444 BPF_STMT(BPF_RET | BPF_A, 0x0),
10448 [0x1c] = 0xaa, [0x1d] = 0x55,
10449 [0x1e] = 0xbb, [0x1f] = 0x66,
10450 [0x20] = 0xcc, [0x21] = 0x77,
10451 [0x22] = 0xdd, [0x23] = 0x88,
10452 [0x24] = 0xee, [0x25] = 0x99,
10453 [0x26] = 0xff, [0x27] = 0xaa,
10455 { {0x40, 0xee99ffaa } },
10458 "LD_IND word negative offset",
10460 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
10461 BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x4),
10462 BPF_STMT(BPF_RET | BPF_A, 0x0),
10466 [0x1c] = 0xaa, [0x1d] = 0x55,
10467 [0x1e] = 0xbb, [0x1f] = 0x66,
10468 [0x20] = 0xcc, [0x21] = 0x77,
10469 [0x22] = 0xdd, [0x23] = 0x88,
10470 [0x24] = 0xee, [0x25] = 0x99,
10471 [0x26] = 0xff, [0x27] = 0xaa,
10473 { {0x40, 0xaa55bb66 } },
10476 "LD_IND word unaligned (addr & 3 == 2)",
10478 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
10479 BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x2),
10480 BPF_STMT(BPF_RET | BPF_A, 0x0),
10484 [0x1c] = 0xaa, [0x1d] = 0x55,
10485 [0x1e] = 0xbb, [0x1f] = 0x66,
10486 [0x20] = 0xcc, [0x21] = 0x77,
10487 [0x22] = 0xdd, [0x23] = 0x88,
10488 [0x24] = 0xee, [0x25] = 0x99,
10489 [0x26] = 0xff, [0x27] = 0xaa,
10491 { {0x40, 0xbb66cc77 } },
10494 "LD_IND word unaligned (addr & 3 == 1)",
10496 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
10497 BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x3),
10498 BPF_STMT(BPF_RET | BPF_A, 0x0),
10502 [0x1c] = 0xaa, [0x1d] = 0x55,
10503 [0x1e] = 0xbb, [0x1f] = 0x66,
10504 [0x20] = 0xcc, [0x21] = 0x77,
10505 [0x22] = 0xdd, [0x23] = 0x88,
10506 [0x24] = 0xee, [0x25] = 0x99,
10507 [0x26] = 0xff, [0x27] = 0xaa,
10509 { {0x40, 0x55bb66cc } },
10512 "LD_IND word unaligned (addr & 3 == 3)",
10514 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
10515 BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x1),
10516 BPF_STMT(BPF_RET | BPF_A, 0x0),
10520 [0x1c] = 0xaa, [0x1d] = 0x55,
10521 [0x1e] = 0xbb, [0x1f] = 0x66,
10522 [0x20] = 0xcc, [0x21] = 0x77,
10523 [0x22] = 0xdd, [0x23] = 0x88,
10524 [0x24] = 0xee, [0x25] = 0x99,
10525 [0x26] = 0xff, [0x27] = 0xaa,
10527 { {0x40, 0x66cc77dd } },
10530 "LD_IND word positive offset, all ff",
10532 BPF_STMT(BPF_LDX | BPF_IMM, 0x3b),
10533 BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x1),
10534 BPF_STMT(BPF_RET | BPF_A, 0x0),
10537 { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
10538 { {0x40, 0xffffffff } },
10541 "LD_IND word positive offset, out of bounds",
10543 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10544 BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x1),
10545 BPF_STMT(BPF_RET | BPF_A, 0x0),
10548 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10552 "LD_IND word negative offset, out of bounds",
10554 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10555 BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x3f),
10556 BPF_STMT(BPF_RET | BPF_A, 0x0),
10559 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10565 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x20),
10566 BPF_STMT(BPF_RET | BPF_A, 0x0),
10570 [0x1c] = 0xaa, [0x1d] = 0x55,
10571 [0x1e] = 0xbb, [0x1f] = 0x66,
10572 [0x20] = 0xcc, [0x21] = 0x77,
10573 [0x22] = 0xdd, [0x23] = 0x88,
10574 [0x24] = 0xee, [0x25] = 0x99,
10575 [0x26] = 0xff, [0x27] = 0xaa,
10580 "LD_ABS byte positive offset, all ff",
10582 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x3f),
10583 BPF_STMT(BPF_RET | BPF_A, 0x0),
10586 { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
10590 "LD_ABS byte positive offset, out of bounds",
10592 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x3f),
10593 BPF_STMT(BPF_RET | BPF_A, 0x0),
10596 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10600 "LD_ABS byte negative offset, out of bounds load",
10602 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, -1),
10603 BPF_STMT(BPF_RET | BPF_A, 0x0),
10605 CLASSIC | FLAG_EXPECTED_FAIL,
10606 .expected_errcode = -EINVAL,
10609 "LD_ABS byte negative offset, in bounds",
10611 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3f),
10612 BPF_STMT(BPF_RET | BPF_A, 0x0),
10615 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10616 { {0x40, 0x82 }, },
10619 "LD_ABS byte negative offset, out of bounds",
10621 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3f),
10622 BPF_STMT(BPF_RET | BPF_A, 0x0),
10625 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10629 "LD_ABS byte negative offset, multiple calls",
10631 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3c),
10632 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3d),
10633 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3e),
10634 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3f),
10635 BPF_STMT(BPF_RET | BPF_A, 0x0),
10638 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10639 { {0x40, 0x82 }, },
10644 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x22),
10645 BPF_STMT(BPF_RET | BPF_A, 0x0),
10649 [0x1c] = 0xaa, [0x1d] = 0x55,
10650 [0x1e] = 0xbb, [0x1f] = 0x66,
10651 [0x20] = 0xcc, [0x21] = 0x77,
10652 [0x22] = 0xdd, [0x23] = 0x88,
10653 [0x24] = 0xee, [0x25] = 0x99,
10654 [0x26] = 0xff, [0x27] = 0xaa,
10656 { {0x40, 0xdd88 } },
10659 "LD_ABS halfword unaligned",
10661 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x25),
10662 BPF_STMT(BPF_RET | BPF_A, 0x0),
10666 [0x1c] = 0xaa, [0x1d] = 0x55,
10667 [0x1e] = 0xbb, [0x1f] = 0x66,
10668 [0x20] = 0xcc, [0x21] = 0x77,
10669 [0x22] = 0xdd, [0x23] = 0x88,
10670 [0x24] = 0xee, [0x25] = 0x99,
10671 [0x26] = 0xff, [0x27] = 0xaa,
10673 { {0x40, 0x99ff } },
10676 "LD_ABS halfword positive offset, all ff",
10678 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x3e),
10679 BPF_STMT(BPF_RET | BPF_A, 0x0),
10682 { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
10683 { {0x40, 0xffff } },
10686 "LD_ABS halfword positive offset, out of bounds",
10688 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x3f),
10689 BPF_STMT(BPF_RET | BPF_A, 0x0),
10692 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10696 "LD_ABS halfword negative offset, out of bounds load",
10698 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, -1),
10699 BPF_STMT(BPF_RET | BPF_A, 0x0),
10701 CLASSIC | FLAG_EXPECTED_FAIL,
10702 .expected_errcode = -EINVAL,
10705 "LD_ABS halfword negative offset, in bounds",
10707 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, SKF_LL_OFF + 0x3e),
10708 BPF_STMT(BPF_RET | BPF_A, 0x0),
10711 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10712 { {0x40, 0x1982 }, },
10715 "LD_ABS halfword negative offset, out of bounds",
10717 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, SKF_LL_OFF + 0x3e),
10718 BPF_STMT(BPF_RET | BPF_A, 0x0),
10721 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10727 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x1c),
10728 BPF_STMT(BPF_RET | BPF_A, 0x0),
10732 [0x1c] = 0xaa, [0x1d] = 0x55,
10733 [0x1e] = 0xbb, [0x1f] = 0x66,
10734 [0x20] = 0xcc, [0x21] = 0x77,
10735 [0x22] = 0xdd, [0x23] = 0x88,
10736 [0x24] = 0xee, [0x25] = 0x99,
10737 [0x26] = 0xff, [0x27] = 0xaa,
10739 { {0x40, 0xaa55bb66 } },
10742 "LD_ABS word unaligned (addr & 3 == 2)",
10744 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x22),
10745 BPF_STMT(BPF_RET | BPF_A, 0x0),
10749 [0x1c] = 0xaa, [0x1d] = 0x55,
10750 [0x1e] = 0xbb, [0x1f] = 0x66,
10751 [0x20] = 0xcc, [0x21] = 0x77,
10752 [0x22] = 0xdd, [0x23] = 0x88,
10753 [0x24] = 0xee, [0x25] = 0x99,
10754 [0x26] = 0xff, [0x27] = 0xaa,
10756 { {0x40, 0xdd88ee99 } },
10759 "LD_ABS word unaligned (addr & 3 == 1)",
10761 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x21),
10762 BPF_STMT(BPF_RET | BPF_A, 0x0),
10766 [0x1c] = 0xaa, [0x1d] = 0x55,
10767 [0x1e] = 0xbb, [0x1f] = 0x66,
10768 [0x20] = 0xcc, [0x21] = 0x77,
10769 [0x22] = 0xdd, [0x23] = 0x88,
10770 [0x24] = 0xee, [0x25] = 0x99,
10771 [0x26] = 0xff, [0x27] = 0xaa,
10773 { {0x40, 0x77dd88ee } },
10776 "LD_ABS word unaligned (addr & 3 == 3)",
10778 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x23),
10779 BPF_STMT(BPF_RET | BPF_A, 0x0),
10783 [0x1c] = 0xaa, [0x1d] = 0x55,
10784 [0x1e] = 0xbb, [0x1f] = 0x66,
10785 [0x20] = 0xcc, [0x21] = 0x77,
10786 [0x22] = 0xdd, [0x23] = 0x88,
10787 [0x24] = 0xee, [0x25] = 0x99,
10788 [0x26] = 0xff, [0x27] = 0xaa,
10790 { {0x40, 0x88ee99ff } },
10793 "LD_ABS word positive offset, all ff",
10795 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x3c),
10796 BPF_STMT(BPF_RET | BPF_A, 0x0),
10799 { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
10800 { {0x40, 0xffffffff } },
10803 "LD_ABS word positive offset, out of bounds",
10805 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x3f),
10806 BPF_STMT(BPF_RET | BPF_A, 0x0),
10809 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10813 "LD_ABS word negative offset, out of bounds load",
10815 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, -1),
10816 BPF_STMT(BPF_RET | BPF_A, 0x0),
10818 CLASSIC | FLAG_EXPECTED_FAIL,
10819 .expected_errcode = -EINVAL,
10822 "LD_ABS word negative offset, in bounds",
10824 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, SKF_LL_OFF + 0x3c),
10825 BPF_STMT(BPF_RET | BPF_A, 0x0),
10828 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10829 { {0x40, 0x25051982 }, },
10832 "LD_ABS word negative offset, out of bounds",
10834 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, SKF_LL_OFF + 0x3c),
10835 BPF_STMT(BPF_RET | BPF_A, 0x0),
10838 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10842 "LDX_MSH standalone, preserved A",
10844 BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
10845 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3c),
10846 BPF_STMT(BPF_RET | BPF_A, 0x0),
10849 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10850 { {0x40, 0xffeebbaa }, },
10853 "LDX_MSH standalone, preserved A 2",
10855 BPF_STMT(BPF_LD | BPF_IMM, 0x175e9d63),
10856 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3c),
10857 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3d),
10858 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3e),
10859 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3f),
10860 BPF_STMT(BPF_RET | BPF_A, 0x0),
10863 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10864 { {0x40, 0x175e9d63 }, },
10867 "LDX_MSH standalone, test result 1",
10869 BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
10870 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3c),
10871 BPF_STMT(BPF_MISC | BPF_TXA, 0),
10872 BPF_STMT(BPF_RET | BPF_A, 0x0),
10875 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10876 { {0x40, 0x14 }, },
10879 "LDX_MSH standalone, test result 2",
10881 BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
10882 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3e),
10883 BPF_STMT(BPF_MISC | BPF_TXA, 0),
10884 BPF_STMT(BPF_RET | BPF_A, 0x0),
10887 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10888 { {0x40, 0x24 }, },
10891 "LDX_MSH standalone, negative offset",
10893 BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
10894 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, -1),
10895 BPF_STMT(BPF_MISC | BPF_TXA, 0),
10896 BPF_STMT(BPF_RET | BPF_A, 0x0),
10899 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10903 "LDX_MSH standalone, negative offset 2",
10905 BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
10906 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, SKF_LL_OFF + 0x3e),
10907 BPF_STMT(BPF_MISC | BPF_TXA, 0),
10908 BPF_STMT(BPF_RET | BPF_A, 0x0),
10911 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10912 { {0x40, 0x24 }, },
10915 "LDX_MSH standalone, out of bounds",
10917 BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
10918 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x40),
10919 BPF_STMT(BPF_MISC | BPF_TXA, 0),
10920 BPF_STMT(BPF_RET | BPF_A, 0x0),
10923 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10927 * verify that the interpreter or JIT correctly sets A and X
10938 BPF_STMT(BPF_LD | BPF_IMM, 0x42),
10939 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
10940 BPF_STMT(BPF_RET | BPF_A, 0x0),
10942 CLASSIC | FLAG_NO_DATA,
10953 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 0x42),
10954 BPF_STMT(BPF_RET | BPF_A, 0x0),
10956 CLASSIC | FLAG_NO_DATA,
10968 BPF_STMT(BPF_LD | BPF_IMM, 0x66),
10969 BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
10970 BPF_STMT(BPF_RET | BPF_A, 0x0),
10972 CLASSIC | FLAG_NO_DATA,
10983 BPF_STMT(BPF_ALU | BPF_SUB | BPF_K, -0x66),
10984 BPF_STMT(BPF_RET | BPF_A, 0x0),
10986 CLASSIC | FLAG_NO_DATA,
10998 BPF_STMT(BPF_LD | BPF_IMM, 0x42),
10999 BPF_STMT(BPF_ALU | BPF_MUL | BPF_X, 0),
11000 BPF_STMT(BPF_RET | BPF_A, 0x0),
11002 CLASSIC | FLAG_NO_DATA,
11013 BPF_STMT(BPF_ALU | BPF_MUL | BPF_K, 0x66),
11014 BPF_STMT(BPF_RET | BPF_A, 0x0),
11016 CLASSIC | FLAG_NO_DATA,
11025 * A = A / X ; this halt the filter execution if X is 0
11028 BPF_STMT(BPF_LD | BPF_IMM, 0x42),
11029 BPF_STMT(BPF_ALU | BPF_DIV | BPF_X, 0),
11030 BPF_STMT(BPF_RET | BPF_K, 0x42),
11032 CLASSIC | FLAG_NO_DATA,
11043 BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0x1),
11044 BPF_STMT(BPF_RET | BPF_A, 0x0),
11046 CLASSIC | FLAG_NO_DATA,
11055 * A = A mod X ; this halt the filter execution if X is 0
11058 BPF_STMT(BPF_LD | BPF_IMM, 0x42),
11059 BPF_STMT(BPF_ALU | BPF_MOD | BPF_X, 0),
11060 BPF_STMT(BPF_RET | BPF_K, 0x42),
11062 CLASSIC | FLAG_NO_DATA,
11073 BPF_STMT(BPF_ALU | BPF_MOD | BPF_K, 0x1),
11074 BPF_STMT(BPF_RET | BPF_A, 0x0),
11076 CLASSIC | FLAG_NO_DATA,
11081 "JMP EQ default A",
11088 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x0, 0, 1),
11089 BPF_STMT(BPF_RET | BPF_K, 0x42),
11090 BPF_STMT(BPF_RET | BPF_K, 0x66),
11092 CLASSIC | FLAG_NO_DATA,
11097 "JMP EQ default X",
11105 BPF_STMT(BPF_LD | BPF_IMM, 0x0),
11106 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0x0, 0, 1),
11107 BPF_STMT(BPF_RET | BPF_K, 0x42),
11108 BPF_STMT(BPF_RET | BPF_K, 0x66),
11110 CLASSIC | FLAG_NO_DATA,
11114 /* Checking interpreter vs JIT wrt signed extended imms. */
11116 "JNE signed compare, test 1",
11118 BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
11119 BPF_ALU32_IMM(BPF_MOV, R3, 0xffff0000),
11120 BPF_MOV64_REG(R2, R1),
11121 BPF_ALU64_REG(BPF_AND, R2, R3),
11122 BPF_ALU32_IMM(BPF_MOV, R0, 1),
11123 BPF_JMP_IMM(BPF_JNE, R2, -17104896, 1),
11124 BPF_ALU32_IMM(BPF_MOV, R0, 2),
11132 "JNE signed compare, test 2",
11134 BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
11135 BPF_ALU32_IMM(BPF_MOV, R3, 0xffff0000),
11136 BPF_MOV64_REG(R2, R1),
11137 BPF_ALU64_REG(BPF_AND, R2, R3),
11138 BPF_ALU32_IMM(BPF_MOV, R0, 1),
11139 BPF_JMP_IMM(BPF_JNE, R2, 0xfefb0000, 1),
11140 BPF_ALU32_IMM(BPF_MOV, R0, 2),
11148 "JNE signed compare, test 3",
11150 BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
11151 BPF_ALU32_IMM(BPF_MOV, R3, 0xffff0000),
11152 BPF_ALU32_IMM(BPF_MOV, R4, 0xfefb0000),
11153 BPF_MOV64_REG(R2, R1),
11154 BPF_ALU64_REG(BPF_AND, R2, R3),
11155 BPF_ALU32_IMM(BPF_MOV, R0, 1),
11156 BPF_JMP_REG(BPF_JNE, R2, R4, 1),
11157 BPF_ALU32_IMM(BPF_MOV, R0, 2),
11165 "JNE signed compare, test 4",
11167 BPF_LD_IMM64(R1, -17104896),
11168 BPF_ALU32_IMM(BPF_MOV, R0, 1),
11169 BPF_JMP_IMM(BPF_JNE, R1, -17104896, 1),
11170 BPF_ALU32_IMM(BPF_MOV, R0, 2),
11178 "JNE signed compare, test 5",
11180 BPF_LD_IMM64(R1, 0xfefb0000),
11181 BPF_ALU32_IMM(BPF_MOV, R0, 1),
11182 BPF_JMP_IMM(BPF_JNE, R1, 0xfefb0000, 1),
11183 BPF_ALU32_IMM(BPF_MOV, R0, 2),
11191 "JNE signed compare, test 6",
11193 BPF_LD_IMM64(R1, 0x7efb0000),
11194 BPF_ALU32_IMM(BPF_MOV, R0, 1),
11195 BPF_JMP_IMM(BPF_JNE, R1, 0x7efb0000, 1),
11196 BPF_ALU32_IMM(BPF_MOV, R0, 2),
11204 "JNE signed compare, test 7",
11206 BPF_STMT(BPF_LD | BPF_IMM, 0xffff0000),
11207 BPF_STMT(BPF_MISC | BPF_TAX, 0),
11208 BPF_STMT(BPF_LD | BPF_IMM, 0xfefbbc12),
11209 BPF_STMT(BPF_ALU | BPF_AND | BPF_X, 0),
11210 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0xfefb0000, 1, 0),
11211 BPF_STMT(BPF_RET | BPF_K, 1),
11212 BPF_STMT(BPF_RET | BPF_K, 2),
11214 CLASSIC | FLAG_NO_DATA,
11218 /* BPF_LDX_MEM with operand aliasing */
11220 "LDX_MEM_B: operand register aliasing",
11222 BPF_ST_MEM(BPF_B, R10, -8, 123),
11223 BPF_MOV64_REG(R0, R10),
11224 BPF_LDX_MEM(BPF_B, R0, R0, -8),
11233 "LDX_MEM_H: operand register aliasing",
11235 BPF_ST_MEM(BPF_H, R10, -8, 12345),
11236 BPF_MOV64_REG(R0, R10),
11237 BPF_LDX_MEM(BPF_H, R0, R0, -8),
11246 "LDX_MEM_W: operand register aliasing",
11248 BPF_ST_MEM(BPF_W, R10, -8, 123456789),
11249 BPF_MOV64_REG(R0, R10),
11250 BPF_LDX_MEM(BPF_W, R0, R0, -8),
11255 { { 0, 123456789 } },
11259 "LDX_MEM_DW: operand register aliasing",
11261 BPF_LD_IMM64(R1, 0x123456789abcdefULL),
11262 BPF_STX_MEM(BPF_DW, R10, R1, -8),
11263 BPF_MOV64_REG(R0, R10),
11264 BPF_LDX_MEM(BPF_DW, R0, R0, -8),
11265 BPF_ALU64_REG(BPF_SUB, R0, R1),
11266 BPF_MOV64_REG(R1, R0),
11267 BPF_ALU64_IMM(BPF_RSH, R1, 32),
11268 BPF_ALU64_REG(BPF_OR, R0, R1),
11277 * Register (non-)clobbering tests for the case where a JIT implements
11278 * complex ALU or ATOMIC operations via function calls. If so, the
11279 * function call must be transparent to the eBPF registers. The JIT
11280 * must therefore save and restore relevant registers across the call.
11281 * The following tests check that the eBPF registers retain their
11282 * values after such an operation. Mainly intended for complex ALU
11283 * and atomic operation, but we run it for all. You never know...
11285 * Note that each operations should be tested twice with different
11286 * destinations, to check preservation for all registers.
11288 #define BPF_TEST_CLOBBER_ALU(alu, op, dst, src) \
11290 #alu "_" #op " to " #dst ": no clobbering", \
11292 BPF_ALU64_IMM(BPF_MOV, R0, R0), \
11293 BPF_ALU64_IMM(BPF_MOV, R1, R1), \
11294 BPF_ALU64_IMM(BPF_MOV, R2, R2), \
11295 BPF_ALU64_IMM(BPF_MOV, R3, R3), \
11296 BPF_ALU64_IMM(BPF_MOV, R4, R4), \
11297 BPF_ALU64_IMM(BPF_MOV, R5, R5), \
11298 BPF_ALU64_IMM(BPF_MOV, R6, R6), \
11299 BPF_ALU64_IMM(BPF_MOV, R7, R7), \
11300 BPF_ALU64_IMM(BPF_MOV, R8, R8), \
11301 BPF_ALU64_IMM(BPF_MOV, R9, R9), \
11302 BPF_##alu(BPF_ ##op, dst, src), \
11303 BPF_ALU32_IMM(BPF_MOV, dst, dst), \
11304 BPF_JMP_IMM(BPF_JNE, R0, R0, 10), \
11305 BPF_JMP_IMM(BPF_JNE, R1, R1, 9), \
11306 BPF_JMP_IMM(BPF_JNE, R2, R2, 8), \
11307 BPF_JMP_IMM(BPF_JNE, R3, R3, 7), \
11308 BPF_JMP_IMM(BPF_JNE, R4, R4, 6), \
11309 BPF_JMP_IMM(BPF_JNE, R5, R5, 5), \
11310 BPF_JMP_IMM(BPF_JNE, R6, R6, 4), \
11311 BPF_JMP_IMM(BPF_JNE, R7, R7, 3), \
11312 BPF_JMP_IMM(BPF_JNE, R8, R8, 2), \
11313 BPF_JMP_IMM(BPF_JNE, R9, R9, 1), \
11314 BPF_ALU64_IMM(BPF_MOV, R0, 1), \
11321 /* ALU64 operations, register clobbering */
11322 BPF_TEST_CLOBBER_ALU(ALU64_IMM, AND, R8, 123456789),
11323 BPF_TEST_CLOBBER_ALU(ALU64_IMM, AND, R9, 123456789),
11324 BPF_TEST_CLOBBER_ALU(ALU64_IMM, OR, R8, 123456789),
11325 BPF_TEST_CLOBBER_ALU(ALU64_IMM, OR, R9, 123456789),
11326 BPF_TEST_CLOBBER_ALU(ALU64_IMM, XOR, R8, 123456789),
11327 BPF_TEST_CLOBBER_ALU(ALU64_IMM, XOR, R9, 123456789),
11328 BPF_TEST_CLOBBER_ALU(ALU64_IMM, LSH, R8, 12),
11329 BPF_TEST_CLOBBER_ALU(ALU64_IMM, LSH, R9, 12),
11330 BPF_TEST_CLOBBER_ALU(ALU64_IMM, RSH, R8, 12),
11331 BPF_TEST_CLOBBER_ALU(ALU64_IMM, RSH, R9, 12),
11332 BPF_TEST_CLOBBER_ALU(ALU64_IMM, ARSH, R8, 12),
11333 BPF_TEST_CLOBBER_ALU(ALU64_IMM, ARSH, R9, 12),
11334 BPF_TEST_CLOBBER_ALU(ALU64_IMM, ADD, R8, 123456789),
11335 BPF_TEST_CLOBBER_ALU(ALU64_IMM, ADD, R9, 123456789),
11336 BPF_TEST_CLOBBER_ALU(ALU64_IMM, SUB, R8, 123456789),
11337 BPF_TEST_CLOBBER_ALU(ALU64_IMM, SUB, R9, 123456789),
11338 BPF_TEST_CLOBBER_ALU(ALU64_IMM, MUL, R8, 123456789),
11339 BPF_TEST_CLOBBER_ALU(ALU64_IMM, MUL, R9, 123456789),
11340 BPF_TEST_CLOBBER_ALU(ALU64_IMM, DIV, R8, 123456789),
11341 BPF_TEST_CLOBBER_ALU(ALU64_IMM, DIV, R9, 123456789),
11342 BPF_TEST_CLOBBER_ALU(ALU64_IMM, MOD, R8, 123456789),
11343 BPF_TEST_CLOBBER_ALU(ALU64_IMM, MOD, R9, 123456789),
11344 /* ALU32 immediate operations, register clobbering */
11345 BPF_TEST_CLOBBER_ALU(ALU32_IMM, AND, R8, 123456789),
11346 BPF_TEST_CLOBBER_ALU(ALU32_IMM, AND, R9, 123456789),
11347 BPF_TEST_CLOBBER_ALU(ALU32_IMM, OR, R8, 123456789),
11348 BPF_TEST_CLOBBER_ALU(ALU32_IMM, OR, R9, 123456789),
11349 BPF_TEST_CLOBBER_ALU(ALU32_IMM, XOR, R8, 123456789),
11350 BPF_TEST_CLOBBER_ALU(ALU32_IMM, XOR, R9, 123456789),
11351 BPF_TEST_CLOBBER_ALU(ALU32_IMM, LSH, R8, 12),
11352 BPF_TEST_CLOBBER_ALU(ALU32_IMM, LSH, R9, 12),
11353 BPF_TEST_CLOBBER_ALU(ALU32_IMM, RSH, R8, 12),
11354 BPF_TEST_CLOBBER_ALU(ALU32_IMM, RSH, R9, 12),
11355 BPF_TEST_CLOBBER_ALU(ALU32_IMM, ARSH, R8, 12),
11356 BPF_TEST_CLOBBER_ALU(ALU32_IMM, ARSH, R9, 12),
11357 BPF_TEST_CLOBBER_ALU(ALU32_IMM, ADD, R8, 123456789),
11358 BPF_TEST_CLOBBER_ALU(ALU32_IMM, ADD, R9, 123456789),
11359 BPF_TEST_CLOBBER_ALU(ALU32_IMM, SUB, R8, 123456789),
11360 BPF_TEST_CLOBBER_ALU(ALU32_IMM, SUB, R9, 123456789),
11361 BPF_TEST_CLOBBER_ALU(ALU32_IMM, MUL, R8, 123456789),
11362 BPF_TEST_CLOBBER_ALU(ALU32_IMM, MUL, R9, 123456789),
11363 BPF_TEST_CLOBBER_ALU(ALU32_IMM, DIV, R8, 123456789),
11364 BPF_TEST_CLOBBER_ALU(ALU32_IMM, DIV, R9, 123456789),
11365 BPF_TEST_CLOBBER_ALU(ALU32_IMM, MOD, R8, 123456789),
11366 BPF_TEST_CLOBBER_ALU(ALU32_IMM, MOD, R9, 123456789),
11367 /* ALU64 register operations, register clobbering */
11368 BPF_TEST_CLOBBER_ALU(ALU64_REG, AND, R8, R1),
11369 BPF_TEST_CLOBBER_ALU(ALU64_REG, AND, R9, R1),
11370 BPF_TEST_CLOBBER_ALU(ALU64_REG, OR, R8, R1),
11371 BPF_TEST_CLOBBER_ALU(ALU64_REG, OR, R9, R1),
11372 BPF_TEST_CLOBBER_ALU(ALU64_REG, XOR, R8, R1),
11373 BPF_TEST_CLOBBER_ALU(ALU64_REG, XOR, R9, R1),
11374 BPF_TEST_CLOBBER_ALU(ALU64_REG, LSH, R8, R1),
11375 BPF_TEST_CLOBBER_ALU(ALU64_REG, LSH, R9, R1),
11376 BPF_TEST_CLOBBER_ALU(ALU64_REG, RSH, R8, R1),
11377 BPF_TEST_CLOBBER_ALU(ALU64_REG, RSH, R9, R1),
11378 BPF_TEST_CLOBBER_ALU(ALU64_REG, ARSH, R8, R1),
11379 BPF_TEST_CLOBBER_ALU(ALU64_REG, ARSH, R9, R1),
11380 BPF_TEST_CLOBBER_ALU(ALU64_REG, ADD, R8, R1),
11381 BPF_TEST_CLOBBER_ALU(ALU64_REG, ADD, R9, R1),
11382 BPF_TEST_CLOBBER_ALU(ALU64_REG, SUB, R8, R1),
11383 BPF_TEST_CLOBBER_ALU(ALU64_REG, SUB, R9, R1),
11384 BPF_TEST_CLOBBER_ALU(ALU64_REG, MUL, R8, R1),
11385 BPF_TEST_CLOBBER_ALU(ALU64_REG, MUL, R9, R1),
11386 BPF_TEST_CLOBBER_ALU(ALU64_REG, DIV, R8, R1),
11387 BPF_TEST_CLOBBER_ALU(ALU64_REG, DIV, R9, R1),
11388 BPF_TEST_CLOBBER_ALU(ALU64_REG, MOD, R8, R1),
11389 BPF_TEST_CLOBBER_ALU(ALU64_REG, MOD, R9, R1),
11390 /* ALU32 register operations, register clobbering */
11391 BPF_TEST_CLOBBER_ALU(ALU32_REG, AND, R8, R1),
11392 BPF_TEST_CLOBBER_ALU(ALU32_REG, AND, R9, R1),
11393 BPF_TEST_CLOBBER_ALU(ALU32_REG, OR, R8, R1),
11394 BPF_TEST_CLOBBER_ALU(ALU32_REG, OR, R9, R1),
11395 BPF_TEST_CLOBBER_ALU(ALU32_REG, XOR, R8, R1),
11396 BPF_TEST_CLOBBER_ALU(ALU32_REG, XOR, R9, R1),
11397 BPF_TEST_CLOBBER_ALU(ALU32_REG, LSH, R8, R1),
11398 BPF_TEST_CLOBBER_ALU(ALU32_REG, LSH, R9, R1),
11399 BPF_TEST_CLOBBER_ALU(ALU32_REG, RSH, R8, R1),
11400 BPF_TEST_CLOBBER_ALU(ALU32_REG, RSH, R9, R1),
11401 BPF_TEST_CLOBBER_ALU(ALU32_REG, ARSH, R8, R1),
11402 BPF_TEST_CLOBBER_ALU(ALU32_REG, ARSH, R9, R1),
11403 BPF_TEST_CLOBBER_ALU(ALU32_REG, ADD, R8, R1),
11404 BPF_TEST_CLOBBER_ALU(ALU32_REG, ADD, R9, R1),
11405 BPF_TEST_CLOBBER_ALU(ALU32_REG, SUB, R8, R1),
11406 BPF_TEST_CLOBBER_ALU(ALU32_REG, SUB, R9, R1),
11407 BPF_TEST_CLOBBER_ALU(ALU32_REG, MUL, R8, R1),
11408 BPF_TEST_CLOBBER_ALU(ALU32_REG, MUL, R9, R1),
11409 BPF_TEST_CLOBBER_ALU(ALU32_REG, DIV, R8, R1),
11410 BPF_TEST_CLOBBER_ALU(ALU32_REG, DIV, R9, R1),
11411 BPF_TEST_CLOBBER_ALU(ALU32_REG, MOD, R8, R1),
11412 BPF_TEST_CLOBBER_ALU(ALU32_REG, MOD, R9, R1),
11413 #undef BPF_TEST_CLOBBER_ALU
11414 #define BPF_TEST_CLOBBER_ATOMIC(width, op) \
11416 "Atomic_" #width " " #op ": no clobbering", \
11418 BPF_ALU64_IMM(BPF_MOV, R0, 0), \
11419 BPF_ALU64_IMM(BPF_MOV, R1, 1), \
11420 BPF_ALU64_IMM(BPF_MOV, R2, 2), \
11421 BPF_ALU64_IMM(BPF_MOV, R3, 3), \
11422 BPF_ALU64_IMM(BPF_MOV, R4, 4), \
11423 BPF_ALU64_IMM(BPF_MOV, R5, 5), \
11424 BPF_ALU64_IMM(BPF_MOV, R6, 6), \
11425 BPF_ALU64_IMM(BPF_MOV, R7, 7), \
11426 BPF_ALU64_IMM(BPF_MOV, R8, 8), \
11427 BPF_ALU64_IMM(BPF_MOV, R9, 9), \
11428 BPF_ST_MEM(width, R10, -8, \
11429 (op) == BPF_CMPXCHG ? 0 : \
11430 (op) & BPF_FETCH ? 1 : 0), \
11431 BPF_ATOMIC_OP(width, op, R10, R1, -8), \
11432 BPF_JMP_IMM(BPF_JNE, R0, 0, 10), \
11433 BPF_JMP_IMM(BPF_JNE, R1, 1, 9), \
11434 BPF_JMP_IMM(BPF_JNE, R2, 2, 8), \
11435 BPF_JMP_IMM(BPF_JNE, R3, 3, 7), \
11436 BPF_JMP_IMM(BPF_JNE, R4, 4, 6), \
11437 BPF_JMP_IMM(BPF_JNE, R5, 5, 5), \
11438 BPF_JMP_IMM(BPF_JNE, R6, 6, 4), \
11439 BPF_JMP_IMM(BPF_JNE, R7, 7, 3), \
11440 BPF_JMP_IMM(BPF_JNE, R8, 8, 2), \
11441 BPF_JMP_IMM(BPF_JNE, R9, 9, 1), \
11442 BPF_ALU64_IMM(BPF_MOV, R0, 1), \
11448 .stack_depth = 8, \
11450 /* 64-bit atomic operations, register clobbering */
11451 BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_ADD),
11452 BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_AND),
11453 BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_OR),
11454 BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_XOR),
11455 BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_ADD | BPF_FETCH),
11456 BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_AND | BPF_FETCH),
11457 BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_OR | BPF_FETCH),
11458 BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_XOR | BPF_FETCH),
11459 BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_XCHG),
11460 BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_CMPXCHG),
11461 /* 32-bit atomic operations, register clobbering */
11462 BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_ADD),
11463 BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_AND),
11464 BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_OR),
11465 BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_XOR),
11466 BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_ADD | BPF_FETCH),
11467 BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_AND | BPF_FETCH),
11468 BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_OR | BPF_FETCH),
11469 BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_XOR | BPF_FETCH),
11470 BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_XCHG),
11471 BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_CMPXCHG),
11472 #undef BPF_TEST_CLOBBER_ATOMIC
11473 /* Checking that ALU32 src is not zero extended in place */
11474 #define BPF_ALU32_SRC_ZEXT(op) \
11476 "ALU32_" #op "_X: src preserved in zext", \
11478 BPF_LD_IMM64(R1, 0x0123456789acbdefULL),\
11479 BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),\
11480 BPF_ALU64_REG(BPF_MOV, R0, R1), \
11481 BPF_ALU32_REG(BPF_##op, R2, R1), \
11482 BPF_ALU64_REG(BPF_SUB, R0, R1), \
11483 BPF_ALU64_REG(BPF_MOV, R1, R0), \
11484 BPF_ALU64_IMM(BPF_RSH, R1, 32), \
11485 BPF_ALU64_REG(BPF_OR, R0, R1), \
11492 BPF_ALU32_SRC_ZEXT(MOV),
11493 BPF_ALU32_SRC_ZEXT(AND),
11494 BPF_ALU32_SRC_ZEXT(OR),
11495 BPF_ALU32_SRC_ZEXT(XOR),
11496 BPF_ALU32_SRC_ZEXT(ADD),
11497 BPF_ALU32_SRC_ZEXT(SUB),
11498 BPF_ALU32_SRC_ZEXT(MUL),
11499 BPF_ALU32_SRC_ZEXT(DIV),
11500 BPF_ALU32_SRC_ZEXT(MOD),
11501 #undef BPF_ALU32_SRC_ZEXT
11502 /* Checking that ATOMIC32 src is not zero extended in place */
11503 #define BPF_ATOMIC32_SRC_ZEXT(op) \
11505 "ATOMIC_W_" #op ": src preserved in zext", \
11507 BPF_LD_IMM64(R0, 0x0123456789acbdefULL), \
11508 BPF_ALU64_REG(BPF_MOV, R1, R0), \
11509 BPF_ST_MEM(BPF_W, R10, -4, 0), \
11510 BPF_ATOMIC_OP(BPF_W, BPF_##op, R10, R1, -4), \
11511 BPF_ALU64_REG(BPF_SUB, R0, R1), \
11512 BPF_ALU64_REG(BPF_MOV, R1, R0), \
11513 BPF_ALU64_IMM(BPF_RSH, R1, 32), \
11514 BPF_ALU64_REG(BPF_OR, R0, R1), \
11520 .stack_depth = 8, \
11522 BPF_ATOMIC32_SRC_ZEXT(ADD),
11523 BPF_ATOMIC32_SRC_ZEXT(AND),
11524 BPF_ATOMIC32_SRC_ZEXT(OR),
11525 BPF_ATOMIC32_SRC_ZEXT(XOR),
11526 #undef BPF_ATOMIC32_SRC_ZEXT
11527 /* Checking that CMPXCHG32 src is not zero extended in place */
11529 "ATOMIC_W_CMPXCHG: src preserved in zext",
11531 BPF_LD_IMM64(R1, 0x0123456789acbdefULL),
11532 BPF_ALU64_REG(BPF_MOV, R2, R1),
11533 BPF_ALU64_REG(BPF_MOV, R0, 0),
11534 BPF_ST_MEM(BPF_W, R10, -4, 0),
11535 BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R1, -4),
11536 BPF_ALU64_REG(BPF_SUB, R1, R2),
11537 BPF_ALU64_REG(BPF_MOV, R2, R1),
11538 BPF_ALU64_IMM(BPF_RSH, R2, 32),
11539 BPF_ALU64_REG(BPF_OR, R1, R2),
11540 BPF_ALU64_REG(BPF_MOV, R0, R1),
11548 /* Checking that JMP32 immediate src is not zero extended in place */
11549 #define BPF_JMP32_IMM_ZEXT(op) \
11551 "JMP32_" #op "_K: operand preserved in zext", \
11553 BPF_LD_IMM64(R0, 0x0123456789acbdefULL),\
11554 BPF_ALU64_REG(BPF_MOV, R1, R0), \
11555 BPF_JMP32_IMM(BPF_##op, R0, 1234, 1), \
11556 BPF_JMP_A(0), /* Nop */ \
11557 BPF_ALU64_REG(BPF_SUB, R0, R1), \
11558 BPF_ALU64_REG(BPF_MOV, R1, R0), \
11559 BPF_ALU64_IMM(BPF_RSH, R1, 32), \
11560 BPF_ALU64_REG(BPF_OR, R0, R1), \
11567 BPF_JMP32_IMM_ZEXT(JEQ),
11568 BPF_JMP32_IMM_ZEXT(JNE),
11569 BPF_JMP32_IMM_ZEXT(JSET),
11570 BPF_JMP32_IMM_ZEXT(JGT),
11571 BPF_JMP32_IMM_ZEXT(JGE),
11572 BPF_JMP32_IMM_ZEXT(JLT),
11573 BPF_JMP32_IMM_ZEXT(JLE),
11574 BPF_JMP32_IMM_ZEXT(JSGT),
11575 BPF_JMP32_IMM_ZEXT(JSGE),
11576 BPF_JMP32_IMM_ZEXT(JSGT),
11577 BPF_JMP32_IMM_ZEXT(JSLT),
11578 BPF_JMP32_IMM_ZEXT(JSLE),
11579 #undef BPF_JMP2_IMM_ZEXT
11580 /* Checking that JMP32 dst & src are not zero extended in place */
11581 #define BPF_JMP32_REG_ZEXT(op) \
11583 "JMP32_" #op "_X: operands preserved in zext", \
11585 BPF_LD_IMM64(R0, 0x0123456789acbdefULL),\
11586 BPF_LD_IMM64(R1, 0xfedcba9876543210ULL),\
11587 BPF_ALU64_REG(BPF_MOV, R2, R0), \
11588 BPF_ALU64_REG(BPF_MOV, R3, R1), \
11589 BPF_JMP32_IMM(BPF_##op, R0, R1, 1), \
11590 BPF_JMP_A(0), /* Nop */ \
11591 BPF_ALU64_REG(BPF_SUB, R0, R2), \
11592 BPF_ALU64_REG(BPF_SUB, R1, R3), \
11593 BPF_ALU64_REG(BPF_OR, R0, R1), \
11594 BPF_ALU64_REG(BPF_MOV, R1, R0), \
11595 BPF_ALU64_IMM(BPF_RSH, R1, 32), \
11596 BPF_ALU64_REG(BPF_OR, R0, R1), \
11603 BPF_JMP32_REG_ZEXT(JEQ),
11604 BPF_JMP32_REG_ZEXT(JNE),
11605 BPF_JMP32_REG_ZEXT(JSET),
11606 BPF_JMP32_REG_ZEXT(JGT),
11607 BPF_JMP32_REG_ZEXT(JGE),
11608 BPF_JMP32_REG_ZEXT(JLT),
11609 BPF_JMP32_REG_ZEXT(JLE),
11610 BPF_JMP32_REG_ZEXT(JSGT),
11611 BPF_JMP32_REG_ZEXT(JSGE),
11612 BPF_JMP32_REG_ZEXT(JSGT),
11613 BPF_JMP32_REG_ZEXT(JSLT),
11614 BPF_JMP32_REG_ZEXT(JSLE),
11615 #undef BPF_JMP2_REG_ZEXT
11616 /* ALU64 K register combinations */
11618 "ALU64_MOV_K: registers",
11623 .fill_helper = bpf_fill_alu64_mov_imm_regs,
11626 "ALU64_AND_K: registers",
11631 .fill_helper = bpf_fill_alu64_and_imm_regs,
11634 "ALU64_OR_K: registers",
11639 .fill_helper = bpf_fill_alu64_or_imm_regs,
11642 "ALU64_XOR_K: registers",
11647 .fill_helper = bpf_fill_alu64_xor_imm_regs,
11650 "ALU64_LSH_K: registers",
11655 .fill_helper = bpf_fill_alu64_lsh_imm_regs,
11658 "ALU64_RSH_K: registers",
11663 .fill_helper = bpf_fill_alu64_rsh_imm_regs,
11666 "ALU64_ARSH_K: registers",
11671 .fill_helper = bpf_fill_alu64_arsh_imm_regs,
11674 "ALU64_ADD_K: registers",
11679 .fill_helper = bpf_fill_alu64_add_imm_regs,
11682 "ALU64_SUB_K: registers",
11687 .fill_helper = bpf_fill_alu64_sub_imm_regs,
11690 "ALU64_MUL_K: registers",
11695 .fill_helper = bpf_fill_alu64_mul_imm_regs,
11698 "ALU64_DIV_K: registers",
11703 .fill_helper = bpf_fill_alu64_div_imm_regs,
11706 "ALU64_MOD_K: registers",
11711 .fill_helper = bpf_fill_alu64_mod_imm_regs,
11713 /* ALU32 K registers */
11715 "ALU32_MOV_K: registers",
11720 .fill_helper = bpf_fill_alu32_mov_imm_regs,
11723 "ALU32_AND_K: registers",
11728 .fill_helper = bpf_fill_alu32_and_imm_regs,
11731 "ALU32_OR_K: registers",
11736 .fill_helper = bpf_fill_alu32_or_imm_regs,
11739 "ALU32_XOR_K: registers",
11744 .fill_helper = bpf_fill_alu32_xor_imm_regs,
11747 "ALU32_LSH_K: registers",
11752 .fill_helper = bpf_fill_alu32_lsh_imm_regs,
11755 "ALU32_RSH_K: registers",
11760 .fill_helper = bpf_fill_alu32_rsh_imm_regs,
11763 "ALU32_ARSH_K: registers",
11768 .fill_helper = bpf_fill_alu32_arsh_imm_regs,
11771 "ALU32_ADD_K: registers",
11776 .fill_helper = bpf_fill_alu32_add_imm_regs,
11779 "ALU32_SUB_K: registers",
11784 .fill_helper = bpf_fill_alu32_sub_imm_regs,
11787 "ALU32_MUL_K: registers",
11792 .fill_helper = bpf_fill_alu32_mul_imm_regs,
11795 "ALU32_DIV_K: registers",
11800 .fill_helper = bpf_fill_alu32_div_imm_regs,
11803 "ALU32_MOD_K: registers",
11808 .fill_helper = bpf_fill_alu32_mod_imm_regs,
11810 /* ALU64 X register combinations */
11812 "ALU64_MOV_X: register combinations",
11817 .fill_helper = bpf_fill_alu64_mov_reg_pairs,
11820 "ALU64_AND_X: register combinations",
11825 .fill_helper = bpf_fill_alu64_and_reg_pairs,
11828 "ALU64_OR_X: register combinations",
11833 .fill_helper = bpf_fill_alu64_or_reg_pairs,
11836 "ALU64_XOR_X: register combinations",
11841 .fill_helper = bpf_fill_alu64_xor_reg_pairs,
11844 "ALU64_LSH_X: register combinations",
11849 .fill_helper = bpf_fill_alu64_lsh_reg_pairs,
11852 "ALU64_RSH_X: register combinations",
11857 .fill_helper = bpf_fill_alu64_rsh_reg_pairs,
11860 "ALU64_ARSH_X: register combinations",
11865 .fill_helper = bpf_fill_alu64_arsh_reg_pairs,
11868 "ALU64_ADD_X: register combinations",
11873 .fill_helper = bpf_fill_alu64_add_reg_pairs,
11876 "ALU64_SUB_X: register combinations",
11881 .fill_helper = bpf_fill_alu64_sub_reg_pairs,
11884 "ALU64_MUL_X: register combinations",
11889 .fill_helper = bpf_fill_alu64_mul_reg_pairs,
11892 "ALU64_DIV_X: register combinations",
11897 .fill_helper = bpf_fill_alu64_div_reg_pairs,
11900 "ALU64_MOD_X: register combinations",
11905 .fill_helper = bpf_fill_alu64_mod_reg_pairs,
11907 /* ALU32 X register combinations */
11909 "ALU32_MOV_X: register combinations",
11914 .fill_helper = bpf_fill_alu32_mov_reg_pairs,
11917 "ALU32_AND_X: register combinations",
11922 .fill_helper = bpf_fill_alu32_and_reg_pairs,
11925 "ALU32_OR_X: register combinations",
11930 .fill_helper = bpf_fill_alu32_or_reg_pairs,
11933 "ALU32_XOR_X: register combinations",
11938 .fill_helper = bpf_fill_alu32_xor_reg_pairs,
11941 "ALU32_LSH_X: register combinations",
11946 .fill_helper = bpf_fill_alu32_lsh_reg_pairs,
11949 "ALU32_RSH_X: register combinations",
11954 .fill_helper = bpf_fill_alu32_rsh_reg_pairs,
11957 "ALU32_ARSH_X: register combinations",
11962 .fill_helper = bpf_fill_alu32_arsh_reg_pairs,
11965 "ALU32_ADD_X: register combinations",
11970 .fill_helper = bpf_fill_alu32_add_reg_pairs,
11973 "ALU32_SUB_X: register combinations",
11978 .fill_helper = bpf_fill_alu32_sub_reg_pairs,
11981 "ALU32_MUL_X: register combinations",
11986 .fill_helper = bpf_fill_alu32_mul_reg_pairs,
11989 "ALU32_DIV_X: register combinations",
11994 .fill_helper = bpf_fill_alu32_div_reg_pairs,
11997 "ALU32_MOD_X register combinations",
12002 .fill_helper = bpf_fill_alu32_mod_reg_pairs,
12004 /* Exhaustive test of ALU64 shift operations */
12006 "ALU64_LSH_K: all shift values",
12008 INTERNAL | FLAG_NO_DATA,
12011 .fill_helper = bpf_fill_alu64_lsh_imm,
12014 "ALU64_RSH_K: all shift values",
12016 INTERNAL | FLAG_NO_DATA,
12019 .fill_helper = bpf_fill_alu64_rsh_imm,
12022 "ALU64_ARSH_K: all shift values",
12024 INTERNAL | FLAG_NO_DATA,
12027 .fill_helper = bpf_fill_alu64_arsh_imm,
12030 "ALU64_LSH_X: all shift values",
12032 INTERNAL | FLAG_NO_DATA,
12035 .fill_helper = bpf_fill_alu64_lsh_reg,
12038 "ALU64_RSH_X: all shift values",
12040 INTERNAL | FLAG_NO_DATA,
12043 .fill_helper = bpf_fill_alu64_rsh_reg,
12046 "ALU64_ARSH_X: all shift values",
12048 INTERNAL | FLAG_NO_DATA,
12051 .fill_helper = bpf_fill_alu64_arsh_reg,
12053 /* Exhaustive test of ALU32 shift operations */
12055 "ALU32_LSH_K: all shift values",
12057 INTERNAL | FLAG_NO_DATA,
12060 .fill_helper = bpf_fill_alu32_lsh_imm,
12063 "ALU32_RSH_K: all shift values",
12065 INTERNAL | FLAG_NO_DATA,
12068 .fill_helper = bpf_fill_alu32_rsh_imm,
12071 "ALU32_ARSH_K: all shift values",
12073 INTERNAL | FLAG_NO_DATA,
12076 .fill_helper = bpf_fill_alu32_arsh_imm,
12079 "ALU32_LSH_X: all shift values",
12081 INTERNAL | FLAG_NO_DATA,
12084 .fill_helper = bpf_fill_alu32_lsh_reg,
12087 "ALU32_RSH_X: all shift values",
12089 INTERNAL | FLAG_NO_DATA,
12092 .fill_helper = bpf_fill_alu32_rsh_reg,
12095 "ALU32_ARSH_X: all shift values",
12097 INTERNAL | FLAG_NO_DATA,
12100 .fill_helper = bpf_fill_alu32_arsh_reg,
12103 * Exhaustive test of ALU64 shift operations when
12104 * source and destination register are the same.
12107 "ALU64_LSH_X: all shift values with the same register",
12109 INTERNAL | FLAG_NO_DATA,
12112 .fill_helper = bpf_fill_alu64_lsh_same_reg,
12115 "ALU64_RSH_X: all shift values with the same register",
12117 INTERNAL | FLAG_NO_DATA,
12120 .fill_helper = bpf_fill_alu64_rsh_same_reg,
12123 "ALU64_ARSH_X: all shift values with the same register",
12125 INTERNAL | FLAG_NO_DATA,
12128 .fill_helper = bpf_fill_alu64_arsh_same_reg,
12131 * Exhaustive test of ALU32 shift operations when
12132 * source and destination register are the same.
12135 "ALU32_LSH_X: all shift values with the same register",
12137 INTERNAL | FLAG_NO_DATA,
12140 .fill_helper = bpf_fill_alu32_lsh_same_reg,
12143 "ALU32_RSH_X: all shift values with the same register",
12145 INTERNAL | FLAG_NO_DATA,
12148 .fill_helper = bpf_fill_alu32_rsh_same_reg,
12151 "ALU32_ARSH_X: all shift values with the same register",
12153 INTERNAL | FLAG_NO_DATA,
12156 .fill_helper = bpf_fill_alu32_arsh_same_reg,
12158 /* ALU64 immediate magnitudes */
12160 "ALU64_MOV_K: all immediate value magnitudes",
12162 INTERNAL | FLAG_NO_DATA,
12165 .fill_helper = bpf_fill_alu64_mov_imm,
12166 .nr_testruns = NR_PATTERN_RUNS,
12169 "ALU64_AND_K: all immediate value magnitudes",
12171 INTERNAL | FLAG_NO_DATA,
12174 .fill_helper = bpf_fill_alu64_and_imm,
12175 .nr_testruns = NR_PATTERN_RUNS,
12178 "ALU64_OR_K: all immediate value magnitudes",
12180 INTERNAL | FLAG_NO_DATA,
12183 .fill_helper = bpf_fill_alu64_or_imm,
12184 .nr_testruns = NR_PATTERN_RUNS,
12187 "ALU64_XOR_K: all immediate value magnitudes",
12189 INTERNAL | FLAG_NO_DATA,
12192 .fill_helper = bpf_fill_alu64_xor_imm,
12193 .nr_testruns = NR_PATTERN_RUNS,
12196 "ALU64_ADD_K: all immediate value magnitudes",
12198 INTERNAL | FLAG_NO_DATA,
12201 .fill_helper = bpf_fill_alu64_add_imm,
12202 .nr_testruns = NR_PATTERN_RUNS,
12205 "ALU64_SUB_K: all immediate value magnitudes",
12207 INTERNAL | FLAG_NO_DATA,
12210 .fill_helper = bpf_fill_alu64_sub_imm,
12211 .nr_testruns = NR_PATTERN_RUNS,
12214 "ALU64_MUL_K: all immediate value magnitudes",
12216 INTERNAL | FLAG_NO_DATA,
12219 .fill_helper = bpf_fill_alu64_mul_imm,
12220 .nr_testruns = NR_PATTERN_RUNS,
12223 "ALU64_DIV_K: all immediate value magnitudes",
12225 INTERNAL | FLAG_NO_DATA,
12228 .fill_helper = bpf_fill_alu64_div_imm,
12229 .nr_testruns = NR_PATTERN_RUNS,
12232 "ALU64_MOD_K: all immediate value magnitudes",
12234 INTERNAL | FLAG_NO_DATA,
12237 .fill_helper = bpf_fill_alu64_mod_imm,
12238 .nr_testruns = NR_PATTERN_RUNS,
12240 /* ALU32 immediate magnitudes */
12242 "ALU32_MOV_K: all immediate value magnitudes",
12244 INTERNAL | FLAG_NO_DATA,
12247 .fill_helper = bpf_fill_alu32_mov_imm,
12248 .nr_testruns = NR_PATTERN_RUNS,
12251 "ALU32_AND_K: all immediate value magnitudes",
12253 INTERNAL | FLAG_NO_DATA,
12256 .fill_helper = bpf_fill_alu32_and_imm,
12257 .nr_testruns = NR_PATTERN_RUNS,
12260 "ALU32_OR_K: all immediate value magnitudes",
12262 INTERNAL | FLAG_NO_DATA,
12265 .fill_helper = bpf_fill_alu32_or_imm,
12266 .nr_testruns = NR_PATTERN_RUNS,
12269 "ALU32_XOR_K: all immediate value magnitudes",
12271 INTERNAL | FLAG_NO_DATA,
12274 .fill_helper = bpf_fill_alu32_xor_imm,
12275 .nr_testruns = NR_PATTERN_RUNS,
12278 "ALU32_ADD_K: all immediate value magnitudes",
12280 INTERNAL | FLAG_NO_DATA,
12283 .fill_helper = bpf_fill_alu32_add_imm,
12284 .nr_testruns = NR_PATTERN_RUNS,
12287 "ALU32_SUB_K: all immediate value magnitudes",
12289 INTERNAL | FLAG_NO_DATA,
12292 .fill_helper = bpf_fill_alu32_sub_imm,
12293 .nr_testruns = NR_PATTERN_RUNS,
12296 "ALU32_MUL_K: all immediate value magnitudes",
12298 INTERNAL | FLAG_NO_DATA,
12301 .fill_helper = bpf_fill_alu32_mul_imm,
12302 .nr_testruns = NR_PATTERN_RUNS,
12305 "ALU32_DIV_K: all immediate value magnitudes",
12307 INTERNAL | FLAG_NO_DATA,
12310 .fill_helper = bpf_fill_alu32_div_imm,
12311 .nr_testruns = NR_PATTERN_RUNS,
12314 "ALU32_MOD_K: all immediate value magnitudes",
12316 INTERNAL | FLAG_NO_DATA,
12319 .fill_helper = bpf_fill_alu32_mod_imm,
12320 .nr_testruns = NR_PATTERN_RUNS,
12322 /* ALU64 register magnitudes */
12324 "ALU64_MOV_X: all register value magnitudes",
12326 INTERNAL | FLAG_NO_DATA,
12329 .fill_helper = bpf_fill_alu64_mov_reg,
12330 .nr_testruns = NR_PATTERN_RUNS,
12333 "ALU64_AND_X: all register value magnitudes",
12335 INTERNAL | FLAG_NO_DATA,
12338 .fill_helper = bpf_fill_alu64_and_reg,
12339 .nr_testruns = NR_PATTERN_RUNS,
12342 "ALU64_OR_X: all register value magnitudes",
12344 INTERNAL | FLAG_NO_DATA,
12347 .fill_helper = bpf_fill_alu64_or_reg,
12348 .nr_testruns = NR_PATTERN_RUNS,
12351 "ALU64_XOR_X: all register value magnitudes",
12353 INTERNAL | FLAG_NO_DATA,
12356 .fill_helper = bpf_fill_alu64_xor_reg,
12357 .nr_testruns = NR_PATTERN_RUNS,
12360 "ALU64_ADD_X: all register value magnitudes",
12362 INTERNAL | FLAG_NO_DATA,
12365 .fill_helper = bpf_fill_alu64_add_reg,
12366 .nr_testruns = NR_PATTERN_RUNS,
12369 "ALU64_SUB_X: all register value magnitudes",
12371 INTERNAL | FLAG_NO_DATA,
12374 .fill_helper = bpf_fill_alu64_sub_reg,
12375 .nr_testruns = NR_PATTERN_RUNS,
12378 "ALU64_MUL_X: all register value magnitudes",
12380 INTERNAL | FLAG_NO_DATA,
12383 .fill_helper = bpf_fill_alu64_mul_reg,
12384 .nr_testruns = NR_PATTERN_RUNS,
12387 "ALU64_DIV_X: all register value magnitudes",
12389 INTERNAL | FLAG_NO_DATA,
12392 .fill_helper = bpf_fill_alu64_div_reg,
12393 .nr_testruns = NR_PATTERN_RUNS,
12396 "ALU64_MOD_X: all register value magnitudes",
12398 INTERNAL | FLAG_NO_DATA,
12401 .fill_helper = bpf_fill_alu64_mod_reg,
12402 .nr_testruns = NR_PATTERN_RUNS,
12404 /* ALU32 register magnitudes */
12406 "ALU32_MOV_X: all register value magnitudes",
12408 INTERNAL | FLAG_NO_DATA,
12411 .fill_helper = bpf_fill_alu32_mov_reg,
12412 .nr_testruns = NR_PATTERN_RUNS,
12415 "ALU32_AND_X: all register value magnitudes",
12417 INTERNAL | FLAG_NO_DATA,
12420 .fill_helper = bpf_fill_alu32_and_reg,
12421 .nr_testruns = NR_PATTERN_RUNS,
12424 "ALU32_OR_X: all register value magnitudes",
12426 INTERNAL | FLAG_NO_DATA,
12429 .fill_helper = bpf_fill_alu32_or_reg,
12430 .nr_testruns = NR_PATTERN_RUNS,
12433 "ALU32_XOR_X: all register value magnitudes",
12435 INTERNAL | FLAG_NO_DATA,
12438 .fill_helper = bpf_fill_alu32_xor_reg,
12439 .nr_testruns = NR_PATTERN_RUNS,
12442 "ALU32_ADD_X: all register value magnitudes",
12444 INTERNAL | FLAG_NO_DATA,
12447 .fill_helper = bpf_fill_alu32_add_reg,
12448 .nr_testruns = NR_PATTERN_RUNS,
12451 "ALU32_SUB_X: all register value magnitudes",
12453 INTERNAL | FLAG_NO_DATA,
12456 .fill_helper = bpf_fill_alu32_sub_reg,
12457 .nr_testruns = NR_PATTERN_RUNS,
12460 "ALU32_MUL_X: all register value magnitudes",
12462 INTERNAL | FLAG_NO_DATA,
12465 .fill_helper = bpf_fill_alu32_mul_reg,
12466 .nr_testruns = NR_PATTERN_RUNS,
12469 "ALU32_DIV_X: all register value magnitudes",
12471 INTERNAL | FLAG_NO_DATA,
12474 .fill_helper = bpf_fill_alu32_div_reg,
12475 .nr_testruns = NR_PATTERN_RUNS,
12478 "ALU32_MOD_X: all register value magnitudes",
12480 INTERNAL | FLAG_NO_DATA,
12483 .fill_helper = bpf_fill_alu32_mod_reg,
12484 .nr_testruns = NR_PATTERN_RUNS,
12486 /* LD_IMM64 immediate magnitudes and byte patterns */
12488 "LD_IMM64: all immediate value magnitudes",
12490 INTERNAL | FLAG_NO_DATA,
12493 .fill_helper = bpf_fill_ld_imm64_magn,
12496 "LD_IMM64: checker byte patterns",
12498 INTERNAL | FLAG_NO_DATA,
12501 .fill_helper = bpf_fill_ld_imm64_checker,
12504 "LD_IMM64: random positive and zero byte patterns",
12506 INTERNAL | FLAG_NO_DATA,
12509 .fill_helper = bpf_fill_ld_imm64_pos_zero,
12512 "LD_IMM64: random negative and zero byte patterns",
12514 INTERNAL | FLAG_NO_DATA,
12517 .fill_helper = bpf_fill_ld_imm64_neg_zero,
12520 "LD_IMM64: random positive and negative byte patterns",
12522 INTERNAL | FLAG_NO_DATA,
12525 .fill_helper = bpf_fill_ld_imm64_pos_neg,
12527 /* 64-bit ATOMIC register combinations */
12529 "ATOMIC_DW_ADD: register combinations",
12534 .fill_helper = bpf_fill_atomic64_add_reg_pairs,
12538 "ATOMIC_DW_AND: register combinations",
12543 .fill_helper = bpf_fill_atomic64_and_reg_pairs,
12547 "ATOMIC_DW_OR: register combinations",
12552 .fill_helper = bpf_fill_atomic64_or_reg_pairs,
12556 "ATOMIC_DW_XOR: register combinations",
12561 .fill_helper = bpf_fill_atomic64_xor_reg_pairs,
12565 "ATOMIC_DW_ADD_FETCH: register combinations",
12570 .fill_helper = bpf_fill_atomic64_add_fetch_reg_pairs,
12574 "ATOMIC_DW_AND_FETCH: register combinations",
12579 .fill_helper = bpf_fill_atomic64_and_fetch_reg_pairs,
12583 "ATOMIC_DW_OR_FETCH: register combinations",
12588 .fill_helper = bpf_fill_atomic64_or_fetch_reg_pairs,
12592 "ATOMIC_DW_XOR_FETCH: register combinations",
12597 .fill_helper = bpf_fill_atomic64_xor_fetch_reg_pairs,
12601 "ATOMIC_DW_XCHG: register combinations",
12606 .fill_helper = bpf_fill_atomic64_xchg_reg_pairs,
12610 "ATOMIC_DW_CMPXCHG: register combinations",
12615 .fill_helper = bpf_fill_atomic64_cmpxchg_reg_pairs,
12618 /* 32-bit ATOMIC register combinations */
12620 "ATOMIC_W_ADD: register combinations",
12625 .fill_helper = bpf_fill_atomic32_add_reg_pairs,
12629 "ATOMIC_W_AND: register combinations",
12634 .fill_helper = bpf_fill_atomic32_and_reg_pairs,
12638 "ATOMIC_W_OR: register combinations",
12643 .fill_helper = bpf_fill_atomic32_or_reg_pairs,
12647 "ATOMIC_W_XOR: register combinations",
12652 .fill_helper = bpf_fill_atomic32_xor_reg_pairs,
12656 "ATOMIC_W_ADD_FETCH: register combinations",
12661 .fill_helper = bpf_fill_atomic32_add_fetch_reg_pairs,
12665 "ATOMIC_W_AND_FETCH: register combinations",
12670 .fill_helper = bpf_fill_atomic32_and_fetch_reg_pairs,
12674 "ATOMIC_W_OR_FETCH: register combinations",
12679 .fill_helper = bpf_fill_atomic32_or_fetch_reg_pairs,
12683 "ATOMIC_W_XOR_FETCH: register combinations",
12688 .fill_helper = bpf_fill_atomic32_xor_fetch_reg_pairs,
12692 "ATOMIC_W_XCHG: register combinations",
12697 .fill_helper = bpf_fill_atomic32_xchg_reg_pairs,
12701 "ATOMIC_W_CMPXCHG: register combinations",
12706 .fill_helper = bpf_fill_atomic32_cmpxchg_reg_pairs,
12709 /* 64-bit ATOMIC magnitudes */
12711 "ATOMIC_DW_ADD: all operand magnitudes",
12713 INTERNAL | FLAG_NO_DATA,
12716 .fill_helper = bpf_fill_atomic64_add,
12718 .nr_testruns = NR_PATTERN_RUNS,
12721 "ATOMIC_DW_AND: all operand magnitudes",
12723 INTERNAL | FLAG_NO_DATA,
12726 .fill_helper = bpf_fill_atomic64_and,
12728 .nr_testruns = NR_PATTERN_RUNS,
12731 "ATOMIC_DW_OR: all operand magnitudes",
12733 INTERNAL | FLAG_NO_DATA,
12736 .fill_helper = bpf_fill_atomic64_or,
12738 .nr_testruns = NR_PATTERN_RUNS,
12741 "ATOMIC_DW_XOR: all operand magnitudes",
12743 INTERNAL | FLAG_NO_DATA,
12746 .fill_helper = bpf_fill_atomic64_xor,
12748 .nr_testruns = NR_PATTERN_RUNS,
12751 "ATOMIC_DW_ADD_FETCH: all operand magnitudes",
12753 INTERNAL | FLAG_NO_DATA,
12756 .fill_helper = bpf_fill_atomic64_add_fetch,
12758 .nr_testruns = NR_PATTERN_RUNS,
12761 "ATOMIC_DW_AND_FETCH: all operand magnitudes",
12763 INTERNAL | FLAG_NO_DATA,
12766 .fill_helper = bpf_fill_atomic64_and_fetch,
12768 .nr_testruns = NR_PATTERN_RUNS,
12771 "ATOMIC_DW_OR_FETCH: all operand magnitudes",
12773 INTERNAL | FLAG_NO_DATA,
12776 .fill_helper = bpf_fill_atomic64_or_fetch,
12778 .nr_testruns = NR_PATTERN_RUNS,
12781 "ATOMIC_DW_XOR_FETCH: all operand magnitudes",
12783 INTERNAL | FLAG_NO_DATA,
12786 .fill_helper = bpf_fill_atomic64_xor_fetch,
12788 .nr_testruns = NR_PATTERN_RUNS,
12791 "ATOMIC_DW_XCHG: all operand magnitudes",
12793 INTERNAL | FLAG_NO_DATA,
12796 .fill_helper = bpf_fill_atomic64_xchg,
12798 .nr_testruns = NR_PATTERN_RUNS,
12801 "ATOMIC_DW_CMPXCHG: all operand magnitudes",
12803 INTERNAL | FLAG_NO_DATA,
12806 .fill_helper = bpf_fill_cmpxchg64,
12808 .nr_testruns = NR_PATTERN_RUNS,
12810 /* 64-bit atomic magnitudes */
12812 "ATOMIC_W_ADD: all operand magnitudes",
12814 INTERNAL | FLAG_NO_DATA,
12817 .fill_helper = bpf_fill_atomic32_add,
12819 .nr_testruns = NR_PATTERN_RUNS,
12822 "ATOMIC_W_AND: all operand magnitudes",
12824 INTERNAL | FLAG_NO_DATA,
12827 .fill_helper = bpf_fill_atomic32_and,
12829 .nr_testruns = NR_PATTERN_RUNS,
12832 "ATOMIC_W_OR: all operand magnitudes",
12834 INTERNAL | FLAG_NO_DATA,
12837 .fill_helper = bpf_fill_atomic32_or,
12839 .nr_testruns = NR_PATTERN_RUNS,
12842 "ATOMIC_W_XOR: all operand magnitudes",
12844 INTERNAL | FLAG_NO_DATA,
12847 .fill_helper = bpf_fill_atomic32_xor,
12849 .nr_testruns = NR_PATTERN_RUNS,
12852 "ATOMIC_W_ADD_FETCH: all operand magnitudes",
12854 INTERNAL | FLAG_NO_DATA,
12857 .fill_helper = bpf_fill_atomic32_add_fetch,
12859 .nr_testruns = NR_PATTERN_RUNS,
12862 "ATOMIC_W_AND_FETCH: all operand magnitudes",
12864 INTERNAL | FLAG_NO_DATA,
12867 .fill_helper = bpf_fill_atomic32_and_fetch,
12869 .nr_testruns = NR_PATTERN_RUNS,
12872 "ATOMIC_W_OR_FETCH: all operand magnitudes",
12874 INTERNAL | FLAG_NO_DATA,
12877 .fill_helper = bpf_fill_atomic32_or_fetch,
12879 .nr_testruns = NR_PATTERN_RUNS,
12882 "ATOMIC_W_XOR_FETCH: all operand magnitudes",
12884 INTERNAL | FLAG_NO_DATA,
12887 .fill_helper = bpf_fill_atomic32_xor_fetch,
12889 .nr_testruns = NR_PATTERN_RUNS,
12892 "ATOMIC_W_XCHG: all operand magnitudes",
12894 INTERNAL | FLAG_NO_DATA,
12897 .fill_helper = bpf_fill_atomic32_xchg,
12899 .nr_testruns = NR_PATTERN_RUNS,
12902 "ATOMIC_W_CMPXCHG: all operand magnitudes",
12904 INTERNAL | FLAG_NO_DATA,
12907 .fill_helper = bpf_fill_cmpxchg32,
12909 .nr_testruns = NR_PATTERN_RUNS,
12911 /* JMP immediate magnitudes */
12913 "JMP_JSET_K: all immediate value magnitudes",
12915 INTERNAL | FLAG_NO_DATA,
12918 .fill_helper = bpf_fill_jmp_jset_imm,
12919 .nr_testruns = NR_PATTERN_RUNS,
12922 "JMP_JEQ_K: all immediate value magnitudes",
12924 INTERNAL | FLAG_NO_DATA,
12927 .fill_helper = bpf_fill_jmp_jeq_imm,
12928 .nr_testruns = NR_PATTERN_RUNS,
12931 "JMP_JNE_K: all immediate value magnitudes",
12933 INTERNAL | FLAG_NO_DATA,
12936 .fill_helper = bpf_fill_jmp_jne_imm,
12937 .nr_testruns = NR_PATTERN_RUNS,
12940 "JMP_JGT_K: all immediate value magnitudes",
12942 INTERNAL | FLAG_NO_DATA,
12945 .fill_helper = bpf_fill_jmp_jgt_imm,
12946 .nr_testruns = NR_PATTERN_RUNS,
12949 "JMP_JGE_K: all immediate value magnitudes",
12951 INTERNAL | FLAG_NO_DATA,
12954 .fill_helper = bpf_fill_jmp_jge_imm,
12955 .nr_testruns = NR_PATTERN_RUNS,
12958 "JMP_JLT_K: all immediate value magnitudes",
12960 INTERNAL | FLAG_NO_DATA,
12963 .fill_helper = bpf_fill_jmp_jlt_imm,
12964 .nr_testruns = NR_PATTERN_RUNS,
12967 "JMP_JLE_K: all immediate value magnitudes",
12969 INTERNAL | FLAG_NO_DATA,
12972 .fill_helper = bpf_fill_jmp_jle_imm,
12973 .nr_testruns = NR_PATTERN_RUNS,
12976 "JMP_JSGT_K: all immediate value magnitudes",
12978 INTERNAL | FLAG_NO_DATA,
12981 .fill_helper = bpf_fill_jmp_jsgt_imm,
12982 .nr_testruns = NR_PATTERN_RUNS,
12985 "JMP_JSGE_K: all immediate value magnitudes",
12987 INTERNAL | FLAG_NO_DATA,
12990 .fill_helper = bpf_fill_jmp_jsge_imm,
12991 .nr_testruns = NR_PATTERN_RUNS,
12994 "JMP_JSLT_K: all immediate value magnitudes",
12996 INTERNAL | FLAG_NO_DATA,
12999 .fill_helper = bpf_fill_jmp_jslt_imm,
13000 .nr_testruns = NR_PATTERN_RUNS,
13003 "JMP_JSLE_K: all immediate value magnitudes",
13005 INTERNAL | FLAG_NO_DATA,
13008 .fill_helper = bpf_fill_jmp_jsle_imm,
13009 .nr_testruns = NR_PATTERN_RUNS,
13011 /* JMP register magnitudes */
13013 "JMP_JSET_X: all register value magnitudes",
13015 INTERNAL | FLAG_NO_DATA,
13018 .fill_helper = bpf_fill_jmp_jset_reg,
13019 .nr_testruns = NR_PATTERN_RUNS,
13022 "JMP_JEQ_X: all register value magnitudes",
13024 INTERNAL | FLAG_NO_DATA,
13027 .fill_helper = bpf_fill_jmp_jeq_reg,
13028 .nr_testruns = NR_PATTERN_RUNS,
13031 "JMP_JNE_X: all register value magnitudes",
13033 INTERNAL | FLAG_NO_DATA,
13036 .fill_helper = bpf_fill_jmp_jne_reg,
13037 .nr_testruns = NR_PATTERN_RUNS,
13040 "JMP_JGT_X: all register value magnitudes",
13042 INTERNAL | FLAG_NO_DATA,
13045 .fill_helper = bpf_fill_jmp_jgt_reg,
13046 .nr_testruns = NR_PATTERN_RUNS,
13049 "JMP_JGE_X: all register value magnitudes",
13051 INTERNAL | FLAG_NO_DATA,
13054 .fill_helper = bpf_fill_jmp_jge_reg,
13055 .nr_testruns = NR_PATTERN_RUNS,
13058 "JMP_JLT_X: all register value magnitudes",
13060 INTERNAL | FLAG_NO_DATA,
13063 .fill_helper = bpf_fill_jmp_jlt_reg,
13064 .nr_testruns = NR_PATTERN_RUNS,
13067 "JMP_JLE_X: all register value magnitudes",
13069 INTERNAL | FLAG_NO_DATA,
13072 .fill_helper = bpf_fill_jmp_jle_reg,
13073 .nr_testruns = NR_PATTERN_RUNS,
13076 "JMP_JSGT_X: all register value magnitudes",
13078 INTERNAL | FLAG_NO_DATA,
13081 .fill_helper = bpf_fill_jmp_jsgt_reg,
13082 .nr_testruns = NR_PATTERN_RUNS,
13085 "JMP_JSGE_X: all register value magnitudes",
13087 INTERNAL | FLAG_NO_DATA,
13090 .fill_helper = bpf_fill_jmp_jsge_reg,
13091 .nr_testruns = NR_PATTERN_RUNS,
13094 "JMP_JSLT_X: all register value magnitudes",
13096 INTERNAL | FLAG_NO_DATA,
13099 .fill_helper = bpf_fill_jmp_jslt_reg,
13100 .nr_testruns = NR_PATTERN_RUNS,
13103 "JMP_JSLE_X: all register value magnitudes",
13105 INTERNAL | FLAG_NO_DATA,
13108 .fill_helper = bpf_fill_jmp_jsle_reg,
13109 .nr_testruns = NR_PATTERN_RUNS,
13111 /* JMP32 immediate magnitudes */
13113 "JMP32_JSET_K: all immediate value magnitudes",
13115 INTERNAL | FLAG_NO_DATA,
13118 .fill_helper = bpf_fill_jmp32_jset_imm,
13119 .nr_testruns = NR_PATTERN_RUNS,
13122 "JMP32_JEQ_K: all immediate value magnitudes",
13124 INTERNAL | FLAG_NO_DATA,
13127 .fill_helper = bpf_fill_jmp32_jeq_imm,
13128 .nr_testruns = NR_PATTERN_RUNS,
13131 "JMP32_JNE_K: all immediate value magnitudes",
13133 INTERNAL | FLAG_NO_DATA,
13136 .fill_helper = bpf_fill_jmp32_jne_imm,
13137 .nr_testruns = NR_PATTERN_RUNS,
13140 "JMP32_JGT_K: all immediate value magnitudes",
13142 INTERNAL | FLAG_NO_DATA,
13145 .fill_helper = bpf_fill_jmp32_jgt_imm,
13146 .nr_testruns = NR_PATTERN_RUNS,
13149 "JMP32_JGE_K: all immediate value magnitudes",
13151 INTERNAL | FLAG_NO_DATA,
13154 .fill_helper = bpf_fill_jmp32_jge_imm,
13155 .nr_testruns = NR_PATTERN_RUNS,
13158 "JMP32_JLT_K: all immediate value magnitudes",
13160 INTERNAL | FLAG_NO_DATA,
13163 .fill_helper = bpf_fill_jmp32_jlt_imm,
13164 .nr_testruns = NR_PATTERN_RUNS,
13167 "JMP32_JLE_K: all immediate value magnitudes",
13169 INTERNAL | FLAG_NO_DATA,
13172 .fill_helper = bpf_fill_jmp32_jle_imm,
13173 .nr_testruns = NR_PATTERN_RUNS,
13176 "JMP32_JSGT_K: all immediate value magnitudes",
13178 INTERNAL | FLAG_NO_DATA,
13181 .fill_helper = bpf_fill_jmp32_jsgt_imm,
13182 .nr_testruns = NR_PATTERN_RUNS,
13185 "JMP32_JSGE_K: all immediate value magnitudes",
13187 INTERNAL | FLAG_NO_DATA,
13190 .fill_helper = bpf_fill_jmp32_jsge_imm,
13191 .nr_testruns = NR_PATTERN_RUNS,
13194 "JMP32_JSLT_K: all immediate value magnitudes",
13196 INTERNAL | FLAG_NO_DATA,
13199 .fill_helper = bpf_fill_jmp32_jslt_imm,
13200 .nr_testruns = NR_PATTERN_RUNS,
13203 "JMP32_JSLE_K: all immediate value magnitudes",
13205 INTERNAL | FLAG_NO_DATA,
13208 .fill_helper = bpf_fill_jmp32_jsle_imm,
13209 .nr_testruns = NR_PATTERN_RUNS,
13211 /* JMP32 register magnitudes */
13213 "JMP32_JSET_X: all register value magnitudes",
13215 INTERNAL | FLAG_NO_DATA,
13218 .fill_helper = bpf_fill_jmp32_jset_reg,
13219 .nr_testruns = NR_PATTERN_RUNS,
13222 "JMP32_JEQ_X: all register value magnitudes",
13224 INTERNAL | FLAG_NO_DATA,
13227 .fill_helper = bpf_fill_jmp32_jeq_reg,
13228 .nr_testruns = NR_PATTERN_RUNS,
13231 "JMP32_JNE_X: all register value magnitudes",
13233 INTERNAL | FLAG_NO_DATA,
13236 .fill_helper = bpf_fill_jmp32_jne_reg,
13237 .nr_testruns = NR_PATTERN_RUNS,
13240 "JMP32_JGT_X: all register value magnitudes",
13242 INTERNAL | FLAG_NO_DATA,
13245 .fill_helper = bpf_fill_jmp32_jgt_reg,
13246 .nr_testruns = NR_PATTERN_RUNS,
13249 "JMP32_JGE_X: all register value magnitudes",
13251 INTERNAL | FLAG_NO_DATA,
13254 .fill_helper = bpf_fill_jmp32_jge_reg,
13255 .nr_testruns = NR_PATTERN_RUNS,
13258 "JMP32_JLT_X: all register value magnitudes",
13260 INTERNAL | FLAG_NO_DATA,
13263 .fill_helper = bpf_fill_jmp32_jlt_reg,
13264 .nr_testruns = NR_PATTERN_RUNS,
13267 "JMP32_JLE_X: all register value magnitudes",
13269 INTERNAL | FLAG_NO_DATA,
13272 .fill_helper = bpf_fill_jmp32_jle_reg,
13273 .nr_testruns = NR_PATTERN_RUNS,
13276 "JMP32_JSGT_X: all register value magnitudes",
13278 INTERNAL | FLAG_NO_DATA,
13281 .fill_helper = bpf_fill_jmp32_jsgt_reg,
13282 .nr_testruns = NR_PATTERN_RUNS,
13285 "JMP32_JSGE_X: all register value magnitudes",
13287 INTERNAL | FLAG_NO_DATA,
13290 .fill_helper = bpf_fill_jmp32_jsge_reg,
13291 .nr_testruns = NR_PATTERN_RUNS,
13294 "JMP32_JSLT_X: all register value magnitudes",
13296 INTERNAL | FLAG_NO_DATA,
13299 .fill_helper = bpf_fill_jmp32_jslt_reg,
13300 .nr_testruns = NR_PATTERN_RUNS,
13303 "JMP32_JSLE_X: all register value magnitudes",
13305 INTERNAL | FLAG_NO_DATA,
13308 .fill_helper = bpf_fill_jmp32_jsle_reg,
13309 .nr_testruns = NR_PATTERN_RUNS,
13311 /* Conditional jumps with constant decision */
13313 "JMP_JSET_K: imm = 0 -> never taken",
13315 BPF_ALU64_IMM(BPF_MOV, R0, 1),
13316 BPF_JMP_IMM(BPF_JSET, R1, 0, 1),
13317 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13320 INTERNAL | FLAG_NO_DATA,
13325 "JMP_JLT_K: imm = 0 -> never taken",
13327 BPF_ALU64_IMM(BPF_MOV, R0, 1),
13328 BPF_JMP_IMM(BPF_JLT, R1, 0, 1),
13329 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13332 INTERNAL | FLAG_NO_DATA,
13337 "JMP_JGE_K: imm = 0 -> always taken",
13339 BPF_ALU64_IMM(BPF_MOV, R0, 1),
13340 BPF_JMP_IMM(BPF_JGE, R1, 0, 1),
13341 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13344 INTERNAL | FLAG_NO_DATA,
13349 "JMP_JGT_K: imm = 0xffffffff -> never taken",
13351 BPF_ALU64_IMM(BPF_MOV, R0, 1),
13352 BPF_JMP_IMM(BPF_JGT, R1, U32_MAX, 1),
13353 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13356 INTERNAL | FLAG_NO_DATA,
13361 "JMP_JLE_K: imm = 0xffffffff -> always taken",
13363 BPF_ALU64_IMM(BPF_MOV, R0, 1),
13364 BPF_JMP_IMM(BPF_JLE, R1, U32_MAX, 1),
13365 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13368 INTERNAL | FLAG_NO_DATA,
13373 "JMP32_JSGT_K: imm = 0x7fffffff -> never taken",
13375 BPF_ALU64_IMM(BPF_MOV, R0, 1),
13376 BPF_JMP32_IMM(BPF_JSGT, R1, S32_MAX, 1),
13377 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13380 INTERNAL | FLAG_NO_DATA,
13385 "JMP32_JSGE_K: imm = -0x80000000 -> always taken",
13387 BPF_ALU64_IMM(BPF_MOV, R0, 1),
13388 BPF_JMP32_IMM(BPF_JSGE, R1, S32_MIN, 1),
13389 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13392 INTERNAL | FLAG_NO_DATA,
13397 "JMP32_JSLT_K: imm = -0x80000000 -> never taken",
13399 BPF_ALU64_IMM(BPF_MOV, R0, 1),
13400 BPF_JMP32_IMM(BPF_JSLT, R1, S32_MIN, 1),
13401 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13404 INTERNAL | FLAG_NO_DATA,
13409 "JMP32_JSLE_K: imm = 0x7fffffff -> always taken",
13411 BPF_ALU64_IMM(BPF_MOV, R0, 1),
13412 BPF_JMP32_IMM(BPF_JSLE, R1, S32_MAX, 1),
13413 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13416 INTERNAL | FLAG_NO_DATA,
13421 "JMP_JEQ_X: dst = src -> always taken",
13423 BPF_ALU64_IMM(BPF_MOV, R0, 1),
13424 BPF_JMP_REG(BPF_JEQ, R1, R1, 1),
13425 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13428 INTERNAL | FLAG_NO_DATA,
13433 "JMP_JGE_X: dst = src -> always taken",
13435 BPF_ALU64_IMM(BPF_MOV, R0, 1),
13436 BPF_JMP_REG(BPF_JGE, R1, R1, 1),
13437 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13440 INTERNAL | FLAG_NO_DATA,
13445 "JMP_JLE_X: dst = src -> always taken",
13447 BPF_ALU64_IMM(BPF_MOV, R0, 1),
13448 BPF_JMP_REG(BPF_JLE, R1, R1, 1),
13449 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13452 INTERNAL | FLAG_NO_DATA,
13457 "JMP_JSGE_X: dst = src -> always taken",
13459 BPF_ALU64_IMM(BPF_MOV, R0, 1),
13460 BPF_JMP_REG(BPF_JSGE, R1, R1, 1),
13461 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13464 INTERNAL | FLAG_NO_DATA,
13469 "JMP_JSLE_X: dst = src -> always taken",
13471 BPF_ALU64_IMM(BPF_MOV, R0, 1),
13472 BPF_JMP_REG(BPF_JSLE, R1, R1, 1),
13473 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13476 INTERNAL | FLAG_NO_DATA,
13481 "JMP_JNE_X: dst = src -> never taken",
13483 BPF_ALU64_IMM(BPF_MOV, R0, 1),
13484 BPF_JMP_REG(BPF_JNE, R1, R1, 1),
13485 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13488 INTERNAL | FLAG_NO_DATA,
13493 "JMP_JGT_X: dst = src -> never taken",
13495 BPF_ALU64_IMM(BPF_MOV, R0, 1),
13496 BPF_JMP_REG(BPF_JGT, R1, R1, 1),
13497 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13500 INTERNAL | FLAG_NO_DATA,
13505 "JMP_JLT_X: dst = src -> never taken",
13507 BPF_ALU64_IMM(BPF_MOV, R0, 1),
13508 BPF_JMP_REG(BPF_JLT, R1, R1, 1),
13509 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13512 INTERNAL | FLAG_NO_DATA,
13517 "JMP_JSGT_X: dst = src -> never taken",
13519 BPF_ALU64_IMM(BPF_MOV, R0, 1),
13520 BPF_JMP_REG(BPF_JSGT, R1, R1, 1),
13521 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13524 INTERNAL | FLAG_NO_DATA,
13529 "JMP_JSLT_X: dst = src -> never taken",
13531 BPF_ALU64_IMM(BPF_MOV, R0, 1),
13532 BPF_JMP_REG(BPF_JSLT, R1, R1, 1),
13533 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13536 INTERNAL | FLAG_NO_DATA,
13540 /* Short relative jumps */
13542 "Short relative jump: offset=0",
13544 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13545 BPF_JMP_IMM(BPF_JEQ, R0, 0, 0),
13547 BPF_ALU32_IMM(BPF_MOV, R0, -1),
13549 INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
13554 "Short relative jump: offset=1",
13556 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13557 BPF_JMP_IMM(BPF_JEQ, R0, 0, 1),
13558 BPF_ALU32_IMM(BPF_ADD, R0, 1),
13560 BPF_ALU32_IMM(BPF_MOV, R0, -1),
13562 INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
13567 "Short relative jump: offset=2",
13569 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13570 BPF_JMP_IMM(BPF_JEQ, R0, 0, 2),
13571 BPF_ALU32_IMM(BPF_ADD, R0, 1),
13572 BPF_ALU32_IMM(BPF_ADD, R0, 1),
13574 BPF_ALU32_IMM(BPF_MOV, R0, -1),
13576 INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
13581 "Short relative jump: offset=3",
13583 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13584 BPF_JMP_IMM(BPF_JEQ, R0, 0, 3),
13585 BPF_ALU32_IMM(BPF_ADD, R0, 1),
13586 BPF_ALU32_IMM(BPF_ADD, R0, 1),
13587 BPF_ALU32_IMM(BPF_ADD, R0, 1),
13589 BPF_ALU32_IMM(BPF_MOV, R0, -1),
13591 INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
13596 "Short relative jump: offset=4",
13598 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13599 BPF_JMP_IMM(BPF_JEQ, R0, 0, 4),
13600 BPF_ALU32_IMM(BPF_ADD, R0, 1),
13601 BPF_ALU32_IMM(BPF_ADD, R0, 1),
13602 BPF_ALU32_IMM(BPF_ADD, R0, 1),
13603 BPF_ALU32_IMM(BPF_ADD, R0, 1),
13605 BPF_ALU32_IMM(BPF_MOV, R0, -1),
13607 INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
13611 /* Conditional branch conversions */
13613 "Long conditional jump: taken at runtime",
13615 INTERNAL | FLAG_NO_DATA,
13618 .fill_helper = bpf_fill_max_jmp_taken,
13621 "Long conditional jump: not taken at runtime",
13623 INTERNAL | FLAG_NO_DATA,
13626 .fill_helper = bpf_fill_max_jmp_not_taken,
13629 "Long conditional jump: always taken, known at JIT time",
13631 INTERNAL | FLAG_NO_DATA,
13634 .fill_helper = bpf_fill_max_jmp_always_taken,
13637 "Long conditional jump: never taken, known at JIT time",
13639 INTERNAL | FLAG_NO_DATA,
13642 .fill_helper = bpf_fill_max_jmp_never_taken,
13644 /* Staggered jump sequences, immediate */
13646 "Staggered jumps: JMP_JA",
13648 INTERNAL | FLAG_NO_DATA,
13650 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13651 .fill_helper = bpf_fill_staggered_ja,
13652 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13655 "Staggered jumps: JMP_JEQ_K",
13657 INTERNAL | FLAG_NO_DATA,
13659 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13660 .fill_helper = bpf_fill_staggered_jeq_imm,
13661 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13664 "Staggered jumps: JMP_JNE_K",
13666 INTERNAL | FLAG_NO_DATA,
13668 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13669 .fill_helper = bpf_fill_staggered_jne_imm,
13670 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13673 "Staggered jumps: JMP_JSET_K",
13675 INTERNAL | FLAG_NO_DATA,
13677 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13678 .fill_helper = bpf_fill_staggered_jset_imm,
13679 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13682 "Staggered jumps: JMP_JGT_K",
13684 INTERNAL | FLAG_NO_DATA,
13686 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13687 .fill_helper = bpf_fill_staggered_jgt_imm,
13688 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13691 "Staggered jumps: JMP_JGE_K",
13693 INTERNAL | FLAG_NO_DATA,
13695 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13696 .fill_helper = bpf_fill_staggered_jge_imm,
13697 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13700 "Staggered jumps: JMP_JLT_K",
13702 INTERNAL | FLAG_NO_DATA,
13704 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13705 .fill_helper = bpf_fill_staggered_jlt_imm,
13706 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13709 "Staggered jumps: JMP_JLE_K",
13711 INTERNAL | FLAG_NO_DATA,
13713 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13714 .fill_helper = bpf_fill_staggered_jle_imm,
13715 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13718 "Staggered jumps: JMP_JSGT_K",
13720 INTERNAL | FLAG_NO_DATA,
13722 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13723 .fill_helper = bpf_fill_staggered_jsgt_imm,
13724 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13727 "Staggered jumps: JMP_JSGE_K",
13729 INTERNAL | FLAG_NO_DATA,
13731 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13732 .fill_helper = bpf_fill_staggered_jsge_imm,
13733 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13736 "Staggered jumps: JMP_JSLT_K",
13738 INTERNAL | FLAG_NO_DATA,
13740 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13741 .fill_helper = bpf_fill_staggered_jslt_imm,
13742 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13745 "Staggered jumps: JMP_JSLE_K",
13747 INTERNAL | FLAG_NO_DATA,
13749 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13750 .fill_helper = bpf_fill_staggered_jsle_imm,
13751 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13753 /* Staggered jump sequences, register */
13755 "Staggered jumps: JMP_JEQ_X",
13757 INTERNAL | FLAG_NO_DATA,
13759 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13760 .fill_helper = bpf_fill_staggered_jeq_reg,
13761 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13764 "Staggered jumps: JMP_JNE_X",
13766 INTERNAL | FLAG_NO_DATA,
13768 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13769 .fill_helper = bpf_fill_staggered_jne_reg,
13770 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13773 "Staggered jumps: JMP_JSET_X",
13775 INTERNAL | FLAG_NO_DATA,
13777 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13778 .fill_helper = bpf_fill_staggered_jset_reg,
13779 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13782 "Staggered jumps: JMP_JGT_X",
13784 INTERNAL | FLAG_NO_DATA,
13786 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13787 .fill_helper = bpf_fill_staggered_jgt_reg,
13788 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13791 "Staggered jumps: JMP_JGE_X",
13793 INTERNAL | FLAG_NO_DATA,
13795 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13796 .fill_helper = bpf_fill_staggered_jge_reg,
13797 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13800 "Staggered jumps: JMP_JLT_X",
13802 INTERNAL | FLAG_NO_DATA,
13804 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13805 .fill_helper = bpf_fill_staggered_jlt_reg,
13806 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13809 "Staggered jumps: JMP_JLE_X",
13811 INTERNAL | FLAG_NO_DATA,
13813 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13814 .fill_helper = bpf_fill_staggered_jle_reg,
13815 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13818 "Staggered jumps: JMP_JSGT_X",
13820 INTERNAL | FLAG_NO_DATA,
13822 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13823 .fill_helper = bpf_fill_staggered_jsgt_reg,
13824 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13827 "Staggered jumps: JMP_JSGE_X",
13829 INTERNAL | FLAG_NO_DATA,
13831 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13832 .fill_helper = bpf_fill_staggered_jsge_reg,
13833 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13836 "Staggered jumps: JMP_JSLT_X",
13838 INTERNAL | FLAG_NO_DATA,
13840 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13841 .fill_helper = bpf_fill_staggered_jslt_reg,
13842 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13845 "Staggered jumps: JMP_JSLE_X",
13847 INTERNAL | FLAG_NO_DATA,
13849 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13850 .fill_helper = bpf_fill_staggered_jsle_reg,
13851 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13853 /* Staggered jump sequences, JMP32 immediate */
13855 "Staggered jumps: JMP32_JEQ_K",
13857 INTERNAL | FLAG_NO_DATA,
13859 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13860 .fill_helper = bpf_fill_staggered_jeq32_imm,
13861 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13864 "Staggered jumps: JMP32_JNE_K",
13866 INTERNAL | FLAG_NO_DATA,
13868 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13869 .fill_helper = bpf_fill_staggered_jne32_imm,
13870 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13873 "Staggered jumps: JMP32_JSET_K",
13875 INTERNAL | FLAG_NO_DATA,
13877 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13878 .fill_helper = bpf_fill_staggered_jset32_imm,
13879 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13882 "Staggered jumps: JMP32_JGT_K",
13884 INTERNAL | FLAG_NO_DATA,
13886 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13887 .fill_helper = bpf_fill_staggered_jgt32_imm,
13888 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13891 "Staggered jumps: JMP32_JGE_K",
13893 INTERNAL | FLAG_NO_DATA,
13895 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13896 .fill_helper = bpf_fill_staggered_jge32_imm,
13897 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13900 "Staggered jumps: JMP32_JLT_K",
13902 INTERNAL | FLAG_NO_DATA,
13904 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13905 .fill_helper = bpf_fill_staggered_jlt32_imm,
13906 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13909 "Staggered jumps: JMP32_JLE_K",
13911 INTERNAL | FLAG_NO_DATA,
13913 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13914 .fill_helper = bpf_fill_staggered_jle32_imm,
13915 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13918 "Staggered jumps: JMP32_JSGT_K",
13920 INTERNAL | FLAG_NO_DATA,
13922 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13923 .fill_helper = bpf_fill_staggered_jsgt32_imm,
13924 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13927 "Staggered jumps: JMP32_JSGE_K",
13929 INTERNAL | FLAG_NO_DATA,
13931 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13932 .fill_helper = bpf_fill_staggered_jsge32_imm,
13933 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13936 "Staggered jumps: JMP32_JSLT_K",
13938 INTERNAL | FLAG_NO_DATA,
13940 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13941 .fill_helper = bpf_fill_staggered_jslt32_imm,
13942 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13945 "Staggered jumps: JMP32_JSLE_K",
13947 INTERNAL | FLAG_NO_DATA,
13949 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13950 .fill_helper = bpf_fill_staggered_jsle32_imm,
13951 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13953 /* Staggered jump sequences, JMP32 register */
13955 "Staggered jumps: JMP32_JEQ_X",
13957 INTERNAL | FLAG_NO_DATA,
13959 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13960 .fill_helper = bpf_fill_staggered_jeq32_reg,
13961 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13964 "Staggered jumps: JMP32_JNE_X",
13966 INTERNAL | FLAG_NO_DATA,
13968 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13969 .fill_helper = bpf_fill_staggered_jne32_reg,
13970 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13973 "Staggered jumps: JMP32_JSET_X",
13975 INTERNAL | FLAG_NO_DATA,
13977 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13978 .fill_helper = bpf_fill_staggered_jset32_reg,
13979 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13982 "Staggered jumps: JMP32_JGT_X",
13984 INTERNAL | FLAG_NO_DATA,
13986 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13987 .fill_helper = bpf_fill_staggered_jgt32_reg,
13988 .nr_testruns = NR_STAGGERED_JMP_RUNS,
13991 "Staggered jumps: JMP32_JGE_X",
13993 INTERNAL | FLAG_NO_DATA,
13995 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13996 .fill_helper = bpf_fill_staggered_jge32_reg,
13997 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14000 "Staggered jumps: JMP32_JLT_X",
14002 INTERNAL | FLAG_NO_DATA,
14004 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14005 .fill_helper = bpf_fill_staggered_jlt32_reg,
14006 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14009 "Staggered jumps: JMP32_JLE_X",
14011 INTERNAL | FLAG_NO_DATA,
14013 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14014 .fill_helper = bpf_fill_staggered_jle32_reg,
14015 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14018 "Staggered jumps: JMP32_JSGT_X",
14020 INTERNAL | FLAG_NO_DATA,
14022 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14023 .fill_helper = bpf_fill_staggered_jsgt32_reg,
14024 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14027 "Staggered jumps: JMP32_JSGE_X",
14029 INTERNAL | FLAG_NO_DATA,
14031 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14032 .fill_helper = bpf_fill_staggered_jsge32_reg,
14033 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14036 "Staggered jumps: JMP32_JSLT_X",
14038 INTERNAL | FLAG_NO_DATA,
14040 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14041 .fill_helper = bpf_fill_staggered_jslt32_reg,
14042 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14045 "Staggered jumps: JMP32_JSLE_X",
14047 INTERNAL | FLAG_NO_DATA,
14049 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14050 .fill_helper = bpf_fill_staggered_jsle32_reg,
14051 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14055 static struct net_device dev;
14057 static struct sk_buff *populate_skb(char *buf, int size)
14059 struct sk_buff *skb;
14061 if (size >= MAX_DATA)
14064 skb = alloc_skb(MAX_DATA, GFP_KERNEL);
14068 __skb_put_data(skb, buf, size);
14070 /* Initialize a fake skb with test pattern. */
14071 skb_reset_mac_header(skb);
14072 skb->protocol = htons(ETH_P_IP);
14073 skb->pkt_type = SKB_TYPE;
14074 skb->mark = SKB_MARK;
14075 skb->hash = SKB_HASH;
14076 skb->queue_mapping = SKB_QUEUE_MAP;
14077 skb->vlan_tci = SKB_VLAN_TCI;
14078 skb->vlan_present = SKB_VLAN_PRESENT;
14079 skb->vlan_proto = htons(ETH_P_IP);
14080 dev_net_set(&dev, &init_net);
14082 skb->dev->ifindex = SKB_DEV_IFINDEX;
14083 skb->dev->type = SKB_DEV_TYPE;
14084 skb_set_network_header(skb, min(size, ETH_HLEN));
14089 static void *generate_test_data(struct bpf_test *test, int sub)
14091 struct sk_buff *skb;
14094 if (test->aux & FLAG_NO_DATA)
14097 /* Test case expects an skb, so populate one. Various
14098 * subtests generate skbs of different sizes based on
14101 skb = populate_skb(test->data, test->test[sub].data_size);
14105 if (test->aux & FLAG_SKB_FRAG) {
14107 * when the test requires a fragmented skb, add a
14108 * single fragment to the skb, filled with
14113 page = alloc_page(GFP_KERNEL);
14116 goto err_kfree_skb;
14120 goto err_free_page;
14121 memcpy(ptr, test->frag_data, MAX_DATA);
14123 skb_add_rx_frag(skb, 0, page, 0, MAX_DATA, MAX_DATA);
14135 static void release_test_data(const struct bpf_test *test, void *data)
14137 if (test->aux & FLAG_NO_DATA)
14143 static int filter_length(int which)
14145 struct sock_filter *fp;
14148 if (tests[which].fill_helper)
14149 return tests[which].u.ptr.len;
14151 fp = tests[which].u.insns;
14152 for (len = MAX_INSNS - 1; len > 0; --len)
14153 if (fp[len].code != 0 || fp[len].k != 0)
14159 static void *filter_pointer(int which)
14161 if (tests[which].fill_helper)
14162 return tests[which].u.ptr.insns;
14164 return tests[which].u.insns;
14167 static struct bpf_prog *generate_filter(int which, int *err)
14169 __u8 test_type = tests[which].aux & TEST_TYPE_MASK;
14170 unsigned int flen = filter_length(which);
14171 void *fptr = filter_pointer(which);
14172 struct sock_fprog_kern fprog;
14173 struct bpf_prog *fp;
14175 switch (test_type) {
14177 fprog.filter = fptr;
14180 *err = bpf_prog_create(&fp, &fprog);
14181 if (tests[which].aux & FLAG_EXPECTED_FAIL) {
14182 if (*err == tests[which].expected_errcode) {
14184 /* Verifier rejected filter as expected. */
14188 pr_cont("UNEXPECTED_PASS\n");
14189 /* Verifier didn't reject the test that's
14190 * bad enough, just return!
14197 pr_cont("FAIL to prog_create err=%d len=%d\n",
14204 fp = bpf_prog_alloc(bpf_prog_size(flen), 0);
14206 pr_cont("UNEXPECTED_FAIL no memory left\n");
14212 /* Type doesn't really matter here as long as it's not unspec. */
14213 fp->type = BPF_PROG_TYPE_SOCKET_FILTER;
14214 memcpy(fp->insnsi, fptr, fp->len * sizeof(struct bpf_insn));
14215 fp->aux->stack_depth = tests[which].stack_depth;
14216 fp->aux->verifier_zext = !!(tests[which].aux &
14217 FLAG_VERIFIER_ZEXT);
14219 /* We cannot error here as we don't need type compatibility
14222 fp = bpf_prog_select_runtime(fp, err);
14224 pr_cont("FAIL to select_runtime err=%d\n", *err);
14234 static void release_filter(struct bpf_prog *fp, int which)
14236 __u8 test_type = tests[which].aux & TEST_TYPE_MASK;
14238 switch (test_type) {
14240 bpf_prog_destroy(fp);
14248 static int __run_one(const struct bpf_prog *fp, const void *data,
14249 int runs, u64 *duration)
14255 start = ktime_get_ns();
14257 for (i = 0; i < runs; i++)
14258 ret = bpf_prog_run(fp, data);
14260 finish = ktime_get_ns();
14263 *duration = finish - start;
14264 do_div(*duration, runs);
14269 static int run_one(const struct bpf_prog *fp, struct bpf_test *test)
14271 int err_cnt = 0, i, runs = MAX_TESTRUNS;
14273 if (test->nr_testruns)
14274 runs = min(test->nr_testruns, MAX_TESTRUNS);
14276 for (i = 0; i < MAX_SUBTESTS; i++) {
14282 * NOTE: Several sub-tests may be present, in which case
14283 * a zero {data_size, result} tuple indicates the end of
14284 * the sub-test array. The first test is always run,
14285 * even if both data_size and result happen to be zero.
14288 test->test[i].data_size == 0 &&
14289 test->test[i].result == 0)
14292 data = generate_test_data(test, i);
14293 if (!data && !(test->aux & FLAG_NO_DATA)) {
14294 pr_cont("data generation failed ");
14298 ret = __run_one(fp, data, runs, &duration);
14299 release_test_data(test, data);
14301 if (ret == test->test[i].result) {
14302 pr_cont("%lld ", duration);
14304 pr_cont("ret %d != %d ", ret,
14305 test->test[i].result);
14313 static char test_name[64];
14314 module_param_string(test_name, test_name, sizeof(test_name), 0);
14316 static int test_id = -1;
14317 module_param(test_id, int, 0);
14319 static int test_range[2] = { 0, INT_MAX };
14320 module_param_array(test_range, int, NULL, 0);
14322 static bool exclude_test(int test_id)
14324 return test_id < test_range[0] || test_id > test_range[1];
14327 static __init struct sk_buff *build_test_skb(void)
14329 u32 headroom = NET_SKB_PAD + NET_IP_ALIGN + ETH_HLEN;
14330 struct sk_buff *skb[2];
14331 struct page *page[2];
14332 int i, data_size = 8;
14334 for (i = 0; i < 2; i++) {
14335 page[i] = alloc_page(GFP_KERNEL);
14343 /* this will set skb[i]->head_frag */
14344 skb[i] = dev_alloc_skb(headroom + data_size);
14352 skb_reserve(skb[i], headroom);
14353 skb_put(skb[i], data_size);
14354 skb[i]->protocol = htons(ETH_P_IP);
14355 skb_reset_network_header(skb[i]);
14356 skb_set_mac_header(skb[i], -ETH_HLEN);
14358 skb_add_rx_frag(skb[i], 0, page[i], 0, 64, 64);
14359 // skb_headlen(skb[i]): 8, skb[i]->head_frag = 1
14363 skb_shinfo(skb[0])->gso_size = 1448;
14364 skb_shinfo(skb[0])->gso_type = SKB_GSO_TCPV4;
14365 skb_shinfo(skb[0])->gso_type |= SKB_GSO_DODGY;
14366 skb_shinfo(skb[0])->gso_segs = 0;
14367 skb_shinfo(skb[0])->frag_list = skb[1];
14368 skb_shinfo(skb[0])->hwtstamps.hwtstamp = 1000;
14370 /* adjust skb[0]'s len */
14371 skb[0]->len += skb[1]->len;
14372 skb[0]->data_len += skb[1]->data_len;
14373 skb[0]->truesize += skb[1]->truesize;
14378 __free_page(page[1]);
14382 __free_page(page[0]);
14387 static __init struct sk_buff *build_test_skb_linear_no_head_frag(void)
14389 unsigned int alloc_size = 2000;
14390 unsigned int headroom = 102, doffset = 72, data_size = 1308;
14391 struct sk_buff *skb[2];
14394 /* skbs linked in a frag_list, both with linear data, with head_frag=0
14395 * (data allocated by kmalloc), both have tcp data of 1308 bytes
14396 * (total payload is 2616 bytes).
14397 * Data offset is 72 bytes (40 ipv6 hdr, 32 tcp hdr). Some headroom.
14399 for (i = 0; i < 2; i++) {
14400 skb[i] = alloc_skb(alloc_size, GFP_KERNEL);
14408 skb[i]->protocol = htons(ETH_P_IPV6);
14409 skb_reserve(skb[i], headroom);
14410 skb_put(skb[i], doffset + data_size);
14411 skb_reset_network_header(skb[i]);
14413 skb_reset_mac_header(skb[i]);
14415 skb_set_mac_header(skb[i], -ETH_HLEN);
14416 __skb_pull(skb[i], doffset);
14420 * mimic bpf_skb_proto_4_to_6, which resets gso_segs and assigns a
14421 * reduced gso_size.
14423 skb_shinfo(skb[0])->gso_size = 1288;
14424 skb_shinfo(skb[0])->gso_type = SKB_GSO_TCPV6 | SKB_GSO_DODGY;
14425 skb_shinfo(skb[0])->gso_segs = 0;
14426 skb_shinfo(skb[0])->frag_list = skb[1];
14428 /* adjust skb[0]'s len */
14429 skb[0]->len += skb[1]->len;
14430 skb[0]->data_len += skb[1]->len;
14431 skb[0]->truesize += skb[1]->truesize;
14441 struct skb_segment_test {
14443 struct sk_buff *(*build_skb)(void);
14444 netdev_features_t features;
14447 static struct skb_segment_test skb_segment_tests[] __initconst = {
14449 .descr = "gso_with_rx_frags",
14450 .build_skb = build_test_skb,
14451 .features = NETIF_F_SG | NETIF_F_GSO_PARTIAL | NETIF_F_IP_CSUM |
14452 NETIF_F_IPV6_CSUM | NETIF_F_RXCSUM
14455 .descr = "gso_linear_no_head_frag",
14456 .build_skb = build_test_skb_linear_no_head_frag,
14457 .features = NETIF_F_SG | NETIF_F_FRAGLIST |
14458 NETIF_F_HW_VLAN_CTAG_TX | NETIF_F_GSO |
14459 NETIF_F_LLTX_BIT | NETIF_F_GRO |
14460 NETIF_F_IPV6_CSUM | NETIF_F_RXCSUM |
14461 NETIF_F_HW_VLAN_STAG_TX_BIT
14465 static __init int test_skb_segment_single(const struct skb_segment_test *test)
14467 struct sk_buff *skb, *segs;
14470 skb = test->build_skb();
14472 pr_info("%s: failed to build_test_skb", __func__);
14476 segs = skb_segment(skb, test->features);
14477 if (!IS_ERR(segs)) {
14478 kfree_skb_list(segs);
14486 static __init int test_skb_segment(void)
14488 int i, err_cnt = 0, pass_cnt = 0;
14490 for (i = 0; i < ARRAY_SIZE(skb_segment_tests); i++) {
14491 const struct skb_segment_test *test = &skb_segment_tests[i];
14494 if (exclude_test(i))
14497 pr_info("#%d %s ", i, test->descr);
14499 if (test_skb_segment_single(test)) {
14508 pr_info("%s: Summary: %d PASSED, %d FAILED\n", __func__,
14509 pass_cnt, err_cnt);
14510 return err_cnt ? -EINVAL : 0;
14513 static __init int test_bpf(void)
14515 int i, err_cnt = 0, pass_cnt = 0;
14516 int jit_cnt = 0, run_cnt = 0;
14518 for (i = 0; i < ARRAY_SIZE(tests); i++) {
14519 struct bpf_prog *fp;
14523 if (exclude_test(i))
14526 pr_info("#%d %s ", i, tests[i].descr);
14528 if (tests[i].fill_helper &&
14529 tests[i].fill_helper(&tests[i]) < 0) {
14530 pr_cont("FAIL to prog_fill\n");
14534 fp = generate_filter(i, &err);
14536 if (tests[i].fill_helper) {
14537 kfree(tests[i].u.ptr.insns);
14538 tests[i].u.ptr.insns = NULL;
14550 pr_cont("jited:%u ", fp->jited);
14556 err = run_one(fp, &tests[i]);
14557 release_filter(fp, i);
14560 pr_cont("FAIL (%d times)\n", err);
14568 pr_info("Summary: %d PASSED, %d FAILED, [%d/%d JIT'ed]\n",
14569 pass_cnt, err_cnt, jit_cnt, run_cnt);
14571 return err_cnt ? -EINVAL : 0;
14574 struct tail_call_test {
14576 struct bpf_insn insns[MAX_INSNS];
14582 /* Flags that can be passed to tail call test cases */
14583 #define FLAG_NEED_STATE BIT(0)
14584 #define FLAG_RESULT_IN_STATE BIT(1)
14587 * Magic marker used in test snippets for tail calls below.
14588 * BPF_LD/MOV to R2 and R2 with this immediate value is replaced
14589 * with the proper values by the test runner.
14591 #define TAIL_CALL_MARKER 0x7a11ca11
14593 /* Special offset to indicate a NULL call target */
14594 #define TAIL_CALL_NULL 0x7fff
14596 /* Special offset to indicate an out-of-range index */
14597 #define TAIL_CALL_INVALID 0x7ffe
14599 #define TAIL_CALL(offset) \
14600 BPF_LD_IMM64(R2, TAIL_CALL_MARKER), \
14601 BPF_RAW_INSN(BPF_ALU | BPF_MOV | BPF_K, R3, 0, \
14602 offset, TAIL_CALL_MARKER), \
14603 BPF_JMP_IMM(BPF_TAIL_CALL, 0, 0, 0)
14606 * A test function to be called from a BPF program, clobbering a lot of
14607 * CPU registers in the process. A JITed BPF program calling this function
14608 * must save and restore any caller-saved registers it uses for internal
14609 * state, for example the current tail call count.
14611 BPF_CALL_1(bpf_test_func, u64, arg)
14623 return snprintf(buf, sizeof(buf),
14624 "%ld %lu %lx %ld %lu %lx %ld %lu %x",
14625 a, b, c, d, e, f, g, h, (int)arg);
14627 #define BPF_FUNC_test_func __BPF_FUNC_MAX_ID
14630 * Tail call tests. Each test case may call any other test in the table,
14631 * including itself, specified as a relative index offset from the calling
14632 * test. The index TAIL_CALL_NULL can be used to specify a NULL target
14633 * function to test the JIT error path. Similarly, the index TAIL_CALL_INVALID
14634 * results in a target index that is out of range.
14636 static struct tail_call_test tail_call_tests[] = {
14640 BPF_ALU64_REG(BPF_MOV, R0, R1),
14641 BPF_ALU64_IMM(BPF_ADD, R0, 1),
14649 BPF_ALU64_IMM(BPF_ADD, R1, 2),
14651 BPF_ALU64_IMM(BPF_MOV, R0, -1),
14659 BPF_ALU64_IMM(BPF_ADD, R1, 3),
14661 BPF_ALU64_IMM(BPF_MOV, R0, -1),
14669 BPF_ALU64_IMM(BPF_ADD, R1, 4),
14671 BPF_ALU64_IMM(BPF_MOV, R0, -1),
14677 "Tail call error path, max count reached",
14679 BPF_LDX_MEM(BPF_W, R2, R1, 0),
14680 BPF_ALU64_IMM(BPF_ADD, R2, 1),
14681 BPF_STX_MEM(BPF_W, R1, R2, 0),
14685 .flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
14686 .result = (MAX_TAIL_CALL_CNT + 1) * MAX_TESTRUNS,
14689 "Tail call count preserved across function calls",
14691 BPF_LDX_MEM(BPF_W, R2, R1, 0),
14692 BPF_ALU64_IMM(BPF_ADD, R2, 1),
14693 BPF_STX_MEM(BPF_W, R1, R2, 0),
14694 BPF_STX_MEM(BPF_DW, R10, R1, -8),
14695 BPF_CALL_REL(BPF_FUNC_get_numa_node_id),
14696 BPF_CALL_REL(BPF_FUNC_ktime_get_ns),
14697 BPF_CALL_REL(BPF_FUNC_ktime_get_boot_ns),
14698 BPF_CALL_REL(BPF_FUNC_ktime_get_coarse_ns),
14699 BPF_CALL_REL(BPF_FUNC_jiffies64),
14700 BPF_CALL_REL(BPF_FUNC_test_func),
14701 BPF_LDX_MEM(BPF_DW, R1, R10, -8),
14702 BPF_ALU32_REG(BPF_MOV, R0, R1),
14707 .flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
14708 .result = (MAX_TAIL_CALL_CNT + 1) * MAX_TESTRUNS,
14711 "Tail call error path, NULL target",
14713 BPF_LDX_MEM(BPF_W, R2, R1, 0),
14714 BPF_ALU64_IMM(BPF_ADD, R2, 1),
14715 BPF_STX_MEM(BPF_W, R1, R2, 0),
14716 TAIL_CALL(TAIL_CALL_NULL),
14719 .flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
14720 .result = MAX_TESTRUNS,
14723 "Tail call error path, index out of range",
14725 BPF_LDX_MEM(BPF_W, R2, R1, 0),
14726 BPF_ALU64_IMM(BPF_ADD, R2, 1),
14727 BPF_STX_MEM(BPF_W, R1, R2, 0),
14728 TAIL_CALL(TAIL_CALL_INVALID),
14731 .flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
14732 .result = MAX_TESTRUNS,
14736 static void __init destroy_tail_call_tests(struct bpf_array *progs)
14740 for (i = 0; i < ARRAY_SIZE(tail_call_tests); i++)
14741 if (progs->ptrs[i])
14742 bpf_prog_free(progs->ptrs[i]);
14746 static __init int prepare_tail_call_tests(struct bpf_array **pprogs)
14748 int ntests = ARRAY_SIZE(tail_call_tests);
14749 struct bpf_array *progs;
14752 /* Allocate the table of programs to be used for tall calls */
14753 progs = kzalloc(sizeof(*progs) + (ntests + 1) * sizeof(progs->ptrs[0]),
14758 /* Create all eBPF programs and populate the table */
14759 for (which = 0; which < ntests; which++) {
14760 struct tail_call_test *test = &tail_call_tests[which];
14761 struct bpf_prog *fp;
14764 /* Compute the number of program instructions */
14765 for (len = 0; len < MAX_INSNS; len++) {
14766 struct bpf_insn *insn = &test->insns[len];
14768 if (len < MAX_INSNS - 1 &&
14769 insn->code == (BPF_LD | BPF_DW | BPF_IMM))
14771 if (insn->code == 0)
14775 /* Allocate and initialize the program */
14776 fp = bpf_prog_alloc(bpf_prog_size(len), 0);
14781 fp->type = BPF_PROG_TYPE_SOCKET_FILTER;
14782 fp->aux->stack_depth = test->stack_depth;
14783 memcpy(fp->insnsi, test->insns, len * sizeof(struct bpf_insn));
14785 /* Relocate runtime tail call offsets and addresses */
14786 for (i = 0; i < len; i++) {
14787 struct bpf_insn *insn = &fp->insnsi[i];
14790 switch (insn->code) {
14791 case BPF_LD | BPF_DW | BPF_IMM:
14792 if (insn->imm != TAIL_CALL_MARKER)
14794 insn[0].imm = (u32)(long)progs;
14795 insn[1].imm = ((u64)(long)progs) >> 32;
14798 case BPF_ALU | BPF_MOV | BPF_K:
14799 if (insn->imm != TAIL_CALL_MARKER)
14801 if (insn->off == TAIL_CALL_NULL)
14802 insn->imm = ntests;
14803 else if (insn->off == TAIL_CALL_INVALID)
14804 insn->imm = ntests + 1;
14806 insn->imm = which + insn->off;
14810 case BPF_JMP | BPF_CALL:
14811 if (insn->src_reg != BPF_PSEUDO_CALL)
14813 switch (insn->imm) {
14814 case BPF_FUNC_get_numa_node_id:
14815 addr = (long)&numa_node_id;
14817 case BPF_FUNC_ktime_get_ns:
14818 addr = (long)&ktime_get_ns;
14820 case BPF_FUNC_ktime_get_boot_ns:
14821 addr = (long)&ktime_get_boot_fast_ns;
14823 case BPF_FUNC_ktime_get_coarse_ns:
14824 addr = (long)&ktime_get_coarse_ns;
14826 case BPF_FUNC_jiffies64:
14827 addr = (long)&get_jiffies_64;
14829 case BPF_FUNC_test_func:
14830 addr = (long)&bpf_test_func;
14836 *insn = BPF_EMIT_CALL(addr);
14837 if ((long)__bpf_call_base + insn->imm != addr)
14838 *insn = BPF_JMP_A(0); /* Skip: NOP */
14843 fp = bpf_prog_select_runtime(fp, &err);
14847 progs->ptrs[which] = fp;
14850 /* The last entry contains a NULL program pointer */
14851 progs->map.max_entries = ntests + 1;
14860 destroy_tail_call_tests(progs);
14864 static __init int test_tail_calls(struct bpf_array *progs)
14866 int i, err_cnt = 0, pass_cnt = 0;
14867 int jit_cnt = 0, run_cnt = 0;
14869 for (i = 0; i < ARRAY_SIZE(tail_call_tests); i++) {
14870 struct tail_call_test *test = &tail_call_tests[i];
14871 struct bpf_prog *fp = progs->ptrs[i];
14878 if (exclude_test(i))
14881 pr_info("#%d %s ", i, test->descr);
14886 pr_cont("jited:%u ", fp->jited);
14892 if (test->flags & FLAG_NEED_STATE)
14894 ret = __run_one(fp, data, MAX_TESTRUNS, &duration);
14895 if (test->flags & FLAG_RESULT_IN_STATE)
14897 if (ret == test->result) {
14898 pr_cont("%lld PASS", duration);
14901 pr_cont("ret %d != %d FAIL", ret, test->result);
14906 pr_info("%s: Summary: %d PASSED, %d FAILED, [%d/%d JIT'ed]\n",
14907 __func__, pass_cnt, err_cnt, jit_cnt, run_cnt);
14909 return err_cnt ? -EINVAL : 0;
14912 static char test_suite[32];
14913 module_param_string(test_suite, test_suite, sizeof(test_suite), 0);
14915 static __init int find_test_index(const char *test_name)
14919 if (!strcmp(test_suite, "test_bpf")) {
14920 for (i = 0; i < ARRAY_SIZE(tests); i++) {
14921 if (!strcmp(tests[i].descr, test_name))
14926 if (!strcmp(test_suite, "test_tail_calls")) {
14927 for (i = 0; i < ARRAY_SIZE(tail_call_tests); i++) {
14928 if (!strcmp(tail_call_tests[i].descr, test_name))
14933 if (!strcmp(test_suite, "test_skb_segment")) {
14934 for (i = 0; i < ARRAY_SIZE(skb_segment_tests); i++) {
14935 if (!strcmp(skb_segment_tests[i].descr, test_name))
14943 static __init int prepare_test_range(void)
14947 if (!strcmp(test_suite, "test_bpf"))
14948 valid_range = ARRAY_SIZE(tests);
14949 else if (!strcmp(test_suite, "test_tail_calls"))
14950 valid_range = ARRAY_SIZE(tail_call_tests);
14951 else if (!strcmp(test_suite, "test_skb_segment"))
14952 valid_range = ARRAY_SIZE(skb_segment_tests);
14956 if (test_id >= 0) {
14958 * if a test_id was specified, use test_range to
14959 * cover only that test.
14961 if (test_id >= valid_range) {
14962 pr_err("test_bpf: invalid test_id specified for '%s' suite.\n",
14967 test_range[0] = test_id;
14968 test_range[1] = test_id;
14969 } else if (*test_name) {
14971 * if a test_name was specified, find it and setup
14972 * test_range to cover only that test.
14974 int idx = find_test_index(test_name);
14977 pr_err("test_bpf: no test named '%s' found for '%s' suite.\n",
14978 test_name, test_suite);
14981 test_range[0] = idx;
14982 test_range[1] = idx;
14983 } else if (test_range[0] != 0 || test_range[1] != INT_MAX) {
14985 * check that the supplied test_range is valid.
14987 if (test_range[0] < 0 || test_range[1] >= valid_range) {
14988 pr_err("test_bpf: test_range is out of bound for '%s' suite.\n",
14993 if (test_range[1] < test_range[0]) {
14994 pr_err("test_bpf: test_range is ending before it starts.\n");
15002 static int __init test_bpf_init(void)
15004 struct bpf_array *progs = NULL;
15007 if (strlen(test_suite) &&
15008 strcmp(test_suite, "test_bpf") &&
15009 strcmp(test_suite, "test_tail_calls") &&
15010 strcmp(test_suite, "test_skb_segment")) {
15011 pr_err("test_bpf: invalid test_suite '%s' specified.\n", test_suite);
15016 * if test_suite is not specified, but test_id, test_name or test_range
15017 * is specified, set 'test_bpf' as the default test suite.
15019 if (!strlen(test_suite) &&
15020 (test_id != -1 || strlen(test_name) ||
15021 (test_range[0] != 0 || test_range[1] != INT_MAX))) {
15022 pr_info("test_bpf: set 'test_bpf' as the default test_suite.\n");
15023 strscpy(test_suite, "test_bpf", sizeof(test_suite));
15026 ret = prepare_test_range();
15030 if (!strlen(test_suite) || !strcmp(test_suite, "test_bpf")) {
15036 if (!strlen(test_suite) || !strcmp(test_suite, "test_tail_calls")) {
15037 ret = prepare_tail_call_tests(&progs);
15040 ret = test_tail_calls(progs);
15041 destroy_tail_call_tests(progs);
15046 if (!strlen(test_suite) || !strcmp(test_suite, "test_skb_segment"))
15047 return test_skb_segment();
15052 static void __exit test_bpf_exit(void)
15056 module_init(test_bpf_init);
15057 module_exit(test_bpf_exit);
15059 MODULE_LICENSE("GPL");